Merge branch 'main' into mcp-codex

This commit is contained in:
Agus Zubiaga 2025-07-22 19:24:10 -03:00
commit 1e5625c4b4
97 changed files with 3234 additions and 1599 deletions

18
Cargo.lock generated
View file

@ -211,6 +211,7 @@ dependencies = [
"chrono",
"client",
"collections",
"command_palette_hooks",
"component",
"context_server",
"db",
@ -232,6 +233,7 @@ dependencies = [
"jsonschema",
"language",
"language_model",
"language_models",
"languages",
"log",
"lsp",
@ -270,6 +272,7 @@ dependencies = [
"time_format",
"tree-sitter-md",
"ui",
"ui_input",
"unindent",
"urlencoding",
"util",
@ -1870,9 +1873,7 @@ version = "0.1.0"
dependencies = [
"aws-smithy-runtime-api",
"aws-smithy-types",
"futures 0.3.31",
"http_client",
"tokio",
"workspace-hack",
]
@ -6359,6 +6360,7 @@ dependencies = [
"buffer_diff",
"call",
"chrono",
"client",
"collections",
"command_palette_hooks",
"component",
@ -7400,9 +7402,9 @@ dependencies = [
[[package]]
name = "grid"
version = "0.14.0"
version = "0.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "be136d9dacc2a13cc70bb6c8f902b414fb2641f8db1314637c6b7933411a8f82"
checksum = "71b01d27060ad58be4663b9e4ac9e2d4806918e8876af8912afbddd1a91d5eaa"
[[package]]
name = "group"
@ -7854,6 +7856,7 @@ dependencies = [
"derive_more 0.99.19",
"futures 0.3.31",
"http 1.3.1",
"http-body 1.0.1",
"log",
"serde",
"serde_json",
@ -9098,11 +9101,11 @@ dependencies = [
"client",
"collections",
"component",
"convert_case 0.8.0",
"copilot",
"credentials_provider",
"deepseek",
"editor",
"fs",
"futures 0.3.31",
"google_ai",
"gpui",
@ -15956,13 +15959,12 @@ dependencies = [
[[package]]
name = "taffy"
version = "0.5.1"
version = "0.8.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e8b61630cba2afd2c851821add2e1bb1b7851a2436e839ab73b56558b009035e"
checksum = "7aaef0ac998e6527d6d0d5582f7e43953bb17221ac75bb8eb2fcc2db3396db1c"
dependencies = [
"arrayvec",
"grid",
"num-traits",
"serde",
"slotmap",
]

View file

@ -482,6 +482,7 @@ heed = { version = "0.21.0", features = ["read-txn-no-tls"] }
hex = "0.4.3"
html5ever = "0.27.0"
http = "1.1"
http-body = "1.0"
hyper = "0.14"
ignore = "0.4.22"
image = "0.25.1"

View file

@ -0,0 +1,4 @@
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M7.25669 0.999943C8.27509 0.993825 9.24655 1.42125 9.9227 2.17279C11.4427 1.85079 12.9991 2.53518 13.7733 3.86518C14.159 4.5149 14.3171 5.26409 14.2372 5.99994H13.2967C13.3789 5.42185 13.265 4.8321 12.9686 4.32514C12.2353 3.06961 10.6088 2.63919 9.33676 3.36322L6.48032 4.98822C6.46926 4.99697 6.46284 5.01135 6.46372 5.02533V6.38568L9.91294 4.42084C10.0565 4.33818 10.2336 4.33823 10.3768 4.42084L13.1502 5.99994H11.2948L9.88364 5.19623C9.87034 5.19054 9.85459 5.19128 9.84262 5.19916L8.64926 5.87983L8.8602 5.99994H7.99985C6.89539 6.00004 5.99988 6.89547 5.99985 7.99994V9.34955L3.90219 8.15522C3.75815 8.07431 3.66897 7.92228 3.66977 7.75873V4.53803C3.66977 4.50828 3.67172 4.4654 3.67172 4.44135C3.08836 4.65262 2.59832 5.0599 2.28794 5.59174C1.55635 6.84647 1.99122 8.44936 3.26059 9.17475L5.99985 10.7363V11.6162C5.87564 11.6568 5.73827 11.6456 5.6229 11.579L2.7977 9.96869C2.77156 9.95382 2.73449 9.9311 2.71372 9.91889C2.60687 10.5231 2.7194 11.1466 3.0311 11.6777C3.6435 12.7209 4.87159 13.1902 5.99985 12.9023V13.8398C4.50443 14.1233 2.98758 13.4424 2.22641 12.1347C1.71174 11.2677 1.60096 10.2237 1.9227 9.27045C0.880739 8.13295 0.703328 6.46023 1.48325 5.13373C1.98739 4.26024 2.84863 3.64401 3.84653 3.44233C4.3245 1.9837 5.70306 0.996447 7.25669 0.999943ZM7.25766 1.91498C5.78932 1.9143 4.59839 3.08914 4.59751 4.53803V7.79193C4.59926 7.80578 4.60735 7.81796 4.61997 7.82416L5.8143 8.50483L5.81626 4.57611C5.81537 4.41216 5.90431 4.2606 6.04868 4.17963L8.87387 2.56928C8.89868 2.55441 8.93612 2.53379 8.95786 2.5224C8.48035 2.13046 7.8788 1.91498 7.25766 1.91498Z" fill="black"/>
<path d="M13.5 6C14.6046 6 15.5 6.89543 15.5 8V13.5C15.5 14.6046 14.6046 15.5 13.5 15.5H8C6.89543 15.5 6 14.6046 6 13.5V8C6 6.89543 6.89543 6 8 6H13.5ZM10.8916 8.02539C10.0563 8.02539 9.33453 8.27982 8.81934 8.76562C8.30213 9.25335 8.02547 9.94371 8.02539 10.748C8.02539 11.557 8.29852 12.2492 8.81543 12.7373C9.33013 13.2232 10.0521 13.4746 10.8916 13.4746C11.9865 13.4745 12.8545 13.1022 13.3076 12.3525C13.3894 12.2176 13.4521 12.0693 13.4521 11.8857C13.4521 11.4795 13.0933 11.2773 12.7842 11.2773C12.6604 11.2774 12.5292 11.3025 12.4072 11.3779C12.2862 11.4529 12.2058 11.5586 12.1494 11.666L12.1475 11.6689C11.9677 12.0213 11.5535 12.246 10.8955 12.2461C10.4219 12.2461 10.0667 12.0932 9.83008 11.8506C9.59255 11.607 9.44141 11.2389 9.44141 10.748C9.44148 10.264 9.59319 9.89628 9.83203 9.65137C10.0702 9.40725 10.4255 9.25391 10.8916 9.25391C11.4912 9.25399 11.9415 9.50614 12.1289 9.8916V9.89062C12.1888 10.0157 12.276 10.1311 12.4023 10.2129C12.5303 10.2956 12.6724 10.3271 12.8115 10.3271C12.9661 10.3271 13.1303 10.2857 13.2627 10.1758C13.4018 10.0603 13.4746 9.89383 13.4746 9.71582C13.4746 9.61857 13.4542 9.52036 13.4199 9.42773L13.3818 9.33691C12.9749 8.49175 11.9927 8.02548 10.8916 8.02539ZM10.3203 8.97852L10.1494 9.03516C10.2095 9.01178 10.2716 8.99089 10.3359 8.97363C10.3307 8.97505 10.3256 8.97706 10.3203 8.97852ZM10.4814 8.94141C10.4969 8.9385 10.5126 8.93616 10.5283 8.93359C10.5126 8.93617 10.4969 8.9385 10.4814 8.94141ZM10.6709 8.91504C10.6819 8.91399 10.693 8.913 10.7041 8.91211C10.693 8.913 10.6819 8.91399 10.6709 8.91504Z" fill="black" fill-opacity="0.95"/>
</svg>

After

Width:  |  Height:  |  Size: 3.2 KiB

View file

@ -15,7 +15,7 @@
{
"context": "Editor && vim_mode == insert && !menu",
"bindings": {
// "j k": "vim::SwitchToNormalMode"
// "j k": "vim::NormalBefore"
}
}
]

View file

@ -6,7 +6,7 @@
}
},
{
"context": "Editor",
"context": "Editor && mode == full",
"bindings": {
"cmd-l": "go_to_line::Toggle",
"ctrl-shift-d": "editor::DuplicateLineDown",
@ -15,7 +15,12 @@
"cmd-enter": "editor::NewlineBelow",
"cmd-alt-enter": "editor::NewlineAbove",
"cmd-shift-l": "editor::SelectLine",
"cmd-shift-t": "outline::Toggle",
"cmd-shift-t": "outline::Toggle"
}
},
{
"context": "Editor",
"bindings": {
"alt-backspace": "editor::DeleteToPreviousWordStart",
"alt-shift-backspace": "editor::DeleteToNextWordEnd",
"alt-delete": "editor::DeleteToNextWordEnd",
@ -39,10 +44,6 @@
"ctrl-_": "editor::ConvertToSnakeCase"
}
},
{
"context": "Editor && mode == full",
"bindings": {}
},
{
"context": "BufferSearchBar",
"bindings": {

View file

@ -1076,6 +1076,10 @@
// Send anonymized usage data like what languages you're using Zed with.
"metrics": true
},
// Whether to disable all AI features in Zed.
//
// Default: false
"disable_ai": false,
// Automatically update Zed. This setting may be ignored on Linux if
// installed through a package manager.
"auto_update": true,
@ -1712,6 +1716,7 @@
"openai": {
"api_url": "https://api.openai.com/v1"
},
"openai_compatible": {},
"open_router": {
"api_url": "https://openrouter.ai/api/v1"
},

View file

@ -15,13 +15,15 @@
"adapter": "JavaScript",
"program": "$ZED_FILE",
"request": "launch",
"cwd": "$ZED_WORKTREE_ROOT"
"cwd": "$ZED_WORKTREE_ROOT",
"type": "pwa-node"
},
{
"label": "JavaScript debug terminal",
"adapter": "JavaScript",
"request": "launch",
"cwd": "$ZED_WORKTREE_ROOT",
"console": "integratedTerminal"
"console": "integratedTerminal",
"type": "pwa-node"
}
]

View file

@ -47,7 +47,7 @@ use std::{
time::{Duration, Instant},
};
use thiserror::Error;
use util::{ResultExt as _, debug_panic, post_inc};
use util::{ResultExt as _, post_inc};
use uuid::Uuid;
use zed_llm_client::{CompletionIntent, CompletionRequestStatus, UsageLimit};
@ -1582,20 +1582,18 @@ impl Thread {
model: Arc<dyn LanguageModel>,
cx: &mut App,
) -> Option<PendingToolUse> {
let action_log = self.action_log.read(cx);
// Represent notification as a simulated `project_notifications` tool call
let tool_name = Arc::from("project_notifications");
let tool = self.tools.read(cx).tool(&tool_name, cx)?;
if !action_log.has_unnotified_user_edits() {
if !self.profile.is_tool_enabled(tool.source(), tool.name(), cx) {
return None;
}
// Represent notification as a simulated `project_notifications` tool call
let tool_name = Arc::from("project_notifications");
let Some(tool) = self.tools.read(cx).tool(&tool_name, cx) else {
debug_panic!("`project_notifications` tool not found");
return None;
};
if !self.profile.is_tool_enabled(tool.source(), tool.name(), cx) {
if self
.action_log
.update(cx, |log, cx| log.unnotified_user_edits(cx).is_none())
{
return None;
}
@ -5492,7 +5490,7 @@ fn main() {{
let thread = thread_store.update(cx, |store, cx| store.create_thread(cx));
let context_store = cx.new(|_cx| ContextStore::new(project.downgrade(), None));
let provider = Arc::new(FakeLanguageModelProvider);
let provider = Arc::new(FakeLanguageModelProvider::default());
let model = provider.test_model();
let model: Arc<dyn LanguageModel> = Arc::new(model);

View file

@ -434,10 +434,6 @@ pub struct EditToolParams {
pub new_text: String,
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
pub struct EditToolResponse;
#[derive(Deserialize, JsonSchema, Debug)]
pub struct ReadToolParams {
/// The absolute path to the file to read.
@ -450,12 +446,6 @@ pub struct ReadToolParams {
pub limit: Option<u32>,
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
pub struct ReadToolResponse {
pub content: String,
}
#[derive(Deserialize, JsonSchema, Debug)]
pub struct WriteToolParams {
/// Absolute path for new file

View file

@ -14,11 +14,7 @@ use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use util::debug_panic;
// todo! use shared tool inference?
use crate::{
claude::tools::{ClaudeTool, EditToolParams, ReadToolParams},
tools::{EditToolResponse, ReadToolResponse},
};
use crate::claude::tools::{ClaudeTool, EditToolParams, ReadToolParams};
pub struct ZedMcpServer {
server: context_server::listener::McpServer,
@ -196,11 +192,9 @@ impl ZedMcpServer {
let input =
serde_json::from_value(request.arguments.context("Arguments required")?)?;
let result = Self::handle_read_tool_call(input, delegate, cx).await?;
let content = Self::handle_read_tool_call(input, delegate, cx).await?;
Ok(CallToolResponse {
content: vec![ToolResponseContent::Text {
text: serde_json::to_string(&result)?,
}],
content,
is_error: None,
meta: None,
})
@ -208,11 +202,9 @@ impl ZedMcpServer {
let input =
serde_json::from_value(request.arguments.context("Arguments required")?)?;
let result = Self::handle_edit_tool_call(input, delegate, cx).await?;
Self::handle_edit_tool_call(input, delegate, cx).await?;
Ok(CallToolResponse {
content: vec![ToolResponseContent::Text {
text: serde_json::to_string(&result)?,
}],
content: vec![],
is_error: None,
meta: None,
})
@ -226,7 +218,7 @@ impl ZedMcpServer {
params: ReadToolParams,
delegate: AcpClientDelegate,
cx: &AsyncApp,
) -> Task<Result<ReadToolResponse>> {
) -> Task<Result<Vec<ToolResponseContent>>> {
cx.foreground_executor().spawn(async move {
let response = delegate
.read_text_file(ReadTextFileParams {
@ -236,9 +228,9 @@ impl ZedMcpServer {
})
.await?;
Ok(ReadToolResponse {
content: response.content,
})
Ok(vec![ToolResponseContent::Text {
text: response.content,
}])
})
}
@ -246,7 +238,7 @@ impl ZedMcpServer {
params: EditToolParams,
delegate: AcpClientDelegate,
cx: &AsyncApp,
) -> Task<Result<EditToolResponse>> {
) -> Task<Result<()>> {
cx.foreground_executor().spawn(async move {
let response = delegate
.read_text_file_reusing_snapshot(ReadTextFileParams {
@ -268,7 +260,7 @@ impl ZedMcpServer {
})
.await?;
Ok(EditToolResponse)
Ok(())
})
}

View file

@ -32,6 +32,7 @@ buffer_diff.workspace = true
chrono.workspace = true
client.workspace = true
collections.workspace = true
command_palette_hooks.workspace = true
component.workspace = true
context_server.workspace = true
db.workspace = true
@ -53,6 +54,7 @@ itertools.workspace = true
jsonschema.workspace = true
language.workspace = true
language_model.workspace = true
language_models.workspace = true
log.workspace = true
lsp.workspace = true
markdown.workspace = true
@ -87,6 +89,7 @@ theme.workspace = true
time.workspace = true
time_format.workspace = true
ui.workspace = true
ui_input.workspace = true
urlencoding.workspace = true
util.workspace = true
uuid.workspace = true

View file

@ -3895,7 +3895,7 @@ mod tests {
LanguageModelRegistry::global(cx).update(cx, |registry, cx| {
registry.set_default_model(
Some(ConfiguredModel {
provider: Arc::new(FakeLanguageModelProvider),
provider: Arc::new(FakeLanguageModelProvider::default()),
model,
}),
cx,
@ -3979,7 +3979,7 @@ mod tests {
LanguageModelRegistry::global(cx).update(cx, |registry, cx| {
registry.set_default_model(
Some(ConfiguredModel {
provider: Arc::new(FakeLanguageModelProvider),
provider: Arc::new(FakeLanguageModelProvider::default()),
model: model.clone(),
}),
cx,

View file

@ -1,3 +1,4 @@
mod add_llm_provider_modal;
mod configure_context_server_modal;
mod manage_profiles_modal;
mod tool_picker;
@ -28,7 +29,7 @@ use proto::Plan;
use settings::{Settings, update_settings_file};
use ui::{
Chip, ContextMenu, Disclosure, Divider, DividerColor, ElevationIndex, Indicator, PopoverMenu,
Scrollbar, ScrollbarState, Switch, SwitchColor, Tooltip, prelude::*,
Scrollbar, ScrollbarState, Switch, SwitchColor, SwitchField, Tooltip, prelude::*,
};
use util::ResultExt as _;
use workspace::Workspace;
@ -37,7 +38,10 @@ use zed_actions::ExtensionCategoryFilter;
pub(crate) use configure_context_server_modal::ConfigureContextServerModal;
pub(crate) use manage_profiles_modal::ManageProfilesModal;
use crate::AddContextServer;
use crate::{
AddContextServer,
agent_configuration::add_llm_provider_modal::{AddLlmProviderModal, LlmCompatibleProvider},
};
pub struct AgentConfiguration {
fs: Arc<dyn Fs>,
@ -304,11 +308,15 @@ impl AgentConfiguration {
v_flex()
.child(
v_flex()
h_flex()
.p(DynamicSpacing::Base16.rems(cx))
.pr(DynamicSpacing::Base20.rems(cx))
.pb_0()
.mb_2p5()
.items_start()
.justify_between()
.child(
v_flex()
.gap_0p5()
.child(Headline::new("LLM Providers"))
.child(
@ -316,6 +324,41 @@ impl AgentConfiguration {
.color(Color::Muted),
),
)
.child(
PopoverMenu::new("add-provider-popover")
.trigger(
Button::new("add-provider", "Add Provider")
.icon_position(IconPosition::Start)
.icon(IconName::Plus)
.icon_size(IconSize::Small)
.icon_color(Color::Muted)
.label_size(LabelSize::Small),
)
.anchor(gpui::Corner::TopRight)
.menu({
let workspace = self.workspace.clone();
move |window, cx| {
Some(ContextMenu::build(window, cx, |menu, _window, _cx| {
menu.header("Compatible APIs").entry("OpenAI", None, {
let workspace = workspace.clone();
move |window, cx| {
workspace
.update(cx, |workspace, cx| {
AddLlmProviderModal::toggle(
LlmCompatibleProvider::OpenAi,
workspace,
window,
cx,
);
})
.log_err();
}
})
}))
}
}),
),
)
.child(
div()
.pl(DynamicSpacing::Base08.rems(cx))
@ -330,118 +373,73 @@ impl AgentConfiguration {
fn render_command_permission(&mut self, cx: &mut Context<Self>) -> impl IntoElement {
let always_allow_tool_actions = AgentSettings::get_global(cx).always_allow_tool_actions;
h_flex()
.gap_4()
.justify_between()
.flex_wrap()
.child(
v_flex()
.gap_0p5()
.max_w_5_6()
.child(Label::new("Allow running editing tools without asking for confirmation"))
.child(
Label::new(
"The agent can perform potentially destructive actions without asking for your confirmation.",
)
.color(Color::Muted),
),
)
.child(
Switch::new(
"always-allow-tool-actions-switch",
always_allow_tool_actions.into(),
)
.color(SwitchColor::Accent)
.on_click({
let fs = self.fs.clone();
SwitchField::new(
"single-file-review",
"Enable single-file agent reviews",
"Agent edits are also displayed in single-file editors for review.",
always_allow_tool_actions,
move |state, _window, cx| {
let allow = state == &ToggleState::Selected;
update_settings_file::<AgentSettings>(
fs.clone(),
cx,
move |settings, _| {
update_settings_file::<AgentSettings>(fs.clone(), cx, move |settings, _| {
settings.set_always_allow_tool_actions(allow);
});
},
);
}
}),
)
}
fn render_single_file_review(&mut self, cx: &mut Context<Self>) -> impl IntoElement {
let single_file_review = AgentSettings::get_global(cx).single_file_review;
h_flex()
.gap_4()
.justify_between()
.flex_wrap()
.child(
v_flex()
.gap_0p5()
.max_w_5_6()
.child(Label::new("Enable single-file agent reviews"))
.child(
Label::new(
"Agent edits are also displayed in single-file editors for review.",
)
.color(Color::Muted),
),
)
.child(
Switch::new("single-file-review-switch", single_file_review.into())
.color(SwitchColor::Accent)
.on_click({
let fs = self.fs.clone();
SwitchField::new(
"single-file-review",
"Enable single-file agent reviews",
"Agent edits are also displayed in single-file editors for review.",
single_file_review,
move |state, _window, cx| {
let allow = state == &ToggleState::Selected;
update_settings_file::<AgentSettings>(
fs.clone(),
cx,
move |settings, _| {
update_settings_file::<AgentSettings>(fs.clone(), cx, move |settings, _| {
settings.set_single_file_review(allow);
});
},
);
}
}),
)
}
fn render_sound_notification(&mut self, cx: &mut Context<Self>) -> impl IntoElement {
let play_sound_when_agent_done = AgentSettings::get_global(cx).play_sound_when_agent_done;
h_flex()
.gap_4()
.justify_between()
.flex_wrap()
.child(
v_flex()
.gap_0p5()
.max_w_5_6()
.child(Label::new("Play sound when finished generating"))
.child(
Label::new(
"Hear a notification sound when the agent is done generating changes or needs your input.",
)
.color(Color::Muted),
),
)
.child(
Switch::new("play-sound-notification-switch", play_sound_when_agent_done.into())
.color(SwitchColor::Accent)
.on_click({
let fs = self.fs.clone();
SwitchField::new(
"sound-notification",
"Play sound when finished generating",
"Hear a notification sound when the agent is done generating changes or needs your input.",
play_sound_when_agent_done,
move |state, _window, cx| {
let allow = state == &ToggleState::Selected;
update_settings_file::<AgentSettings>(
fs.clone(),
cx,
move |settings, _| {
update_settings_file::<AgentSettings>(fs.clone(), cx, move |settings, _| {
settings.set_play_sound_when_agent_done(allow);
});
},
);
)
}
}),
fn render_modifier_to_send(&mut self, cx: &mut Context<Self>) -> impl IntoElement {
let use_modifier_to_send = AgentSettings::get_global(cx).use_modifier_to_send;
let fs = self.fs.clone();
SwitchField::new(
"modifier-send",
"Use modifier to submit a message",
"Make a modifier (cmd-enter on macOS, ctrl-enter on Linux) required to send messages.",
use_modifier_to_send,
move |state, _window, cx| {
let allow = state == &ToggleState::Selected;
update_settings_file::<AgentSettings>(fs.clone(), cx, move |settings, _| {
settings.set_use_modifier_to_send(allow);
});
},
)
}
@ -456,6 +454,7 @@ impl AgentConfiguration {
.child(self.render_command_permission(cx))
.child(self.render_single_file_review(cx))
.child(self.render_sound_notification(cx))
.child(self.render_modifier_to_send(cx))
}
fn render_zed_plan_info(&self, plan: Option<Plan>, cx: &mut Context<Self>) -> impl IntoElement {

View file

@ -0,0 +1,639 @@
use std::sync::Arc;
use anyhow::Result;
use collections::HashSet;
use fs::Fs;
use gpui::{DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Render, Task};
use language_model::LanguageModelRegistry;
use language_models::{
AllLanguageModelSettings, OpenAiCompatibleSettingsContent,
provider::open_ai_compatible::AvailableModel,
};
use settings::update_settings_file;
use ui::{Banner, KeyBinding, Modal, ModalFooter, ModalHeader, Section, prelude::*};
use ui_input::SingleLineInput;
use workspace::{ModalView, Workspace};
#[derive(Clone, Copy)]
pub enum LlmCompatibleProvider {
OpenAi,
}
impl LlmCompatibleProvider {
fn name(&self) -> &'static str {
match self {
LlmCompatibleProvider::OpenAi => "OpenAI",
}
}
fn api_url(&self) -> &'static str {
match self {
LlmCompatibleProvider::OpenAi => "https://api.openai.com/v1",
}
}
}
struct AddLlmProviderInput {
provider_name: Entity<SingleLineInput>,
api_url: Entity<SingleLineInput>,
api_key: Entity<SingleLineInput>,
models: Vec<ModelInput>,
}
impl AddLlmProviderInput {
fn new(provider: LlmCompatibleProvider, window: &mut Window, cx: &mut App) -> Self {
let provider_name = single_line_input("Provider Name", provider.name(), None, window, cx);
let api_url = single_line_input("API URL", provider.api_url(), None, window, cx);
let api_key = single_line_input(
"API Key",
"000000000000000000000000000000000000000000000000",
None,
window,
cx,
);
Self {
provider_name,
api_url,
api_key,
models: vec![ModelInput::new(window, cx)],
}
}
fn add_model(&mut self, window: &mut Window, cx: &mut App) {
self.models.push(ModelInput::new(window, cx));
}
fn remove_model(&mut self, index: usize) {
self.models.remove(index);
}
}
struct ModelInput {
name: Entity<SingleLineInput>,
max_completion_tokens: Entity<SingleLineInput>,
max_output_tokens: Entity<SingleLineInput>,
max_tokens: Entity<SingleLineInput>,
}
impl ModelInput {
fn new(window: &mut Window, cx: &mut App) -> Self {
let model_name = single_line_input(
"Model Name",
"e.g. gpt-4o, claude-opus-4, gemini-2.5-pro",
None,
window,
cx,
);
let max_completion_tokens = single_line_input(
"Max Completion Tokens",
"200000",
Some("200000"),
window,
cx,
);
let max_output_tokens = single_line_input(
"Max Output Tokens",
"Max Output Tokens",
Some("32000"),
window,
cx,
);
let max_tokens = single_line_input("Max Tokens", "Max Tokens", Some("200000"), window, cx);
Self {
name: model_name,
max_completion_tokens,
max_output_tokens,
max_tokens,
}
}
fn parse(&self, cx: &App) -> Result<AvailableModel, SharedString> {
let name = self.name.read(cx).text(cx);
if name.is_empty() {
return Err(SharedString::from("Model Name cannot be empty"));
}
Ok(AvailableModel {
name,
display_name: None,
max_completion_tokens: Some(
self.max_completion_tokens
.read(cx)
.text(cx)
.parse::<u64>()
.map_err(|_| SharedString::from("Max Completion Tokens must be a number"))?,
),
max_output_tokens: Some(
self.max_output_tokens
.read(cx)
.text(cx)
.parse::<u64>()
.map_err(|_| SharedString::from("Max Output Tokens must be a number"))?,
),
max_tokens: self
.max_tokens
.read(cx)
.text(cx)
.parse::<u64>()
.map_err(|_| SharedString::from("Max Tokens must be a number"))?,
})
}
}
fn single_line_input(
label: impl Into<SharedString>,
placeholder: impl Into<SharedString>,
text: Option<&str>,
window: &mut Window,
cx: &mut App,
) -> Entity<SingleLineInput> {
cx.new(|cx| {
let input = SingleLineInput::new(window, cx, placeholder).label(label);
if let Some(text) = text {
input
.editor()
.update(cx, |editor, cx| editor.set_text(text, window, cx));
}
input
})
}
fn save_provider_to_settings(
input: &AddLlmProviderInput,
cx: &mut App,
) -> Task<Result<(), SharedString>> {
let provider_name: Arc<str> = input.provider_name.read(cx).text(cx).into();
if provider_name.is_empty() {
return Task::ready(Err("Provider Name cannot be empty".into()));
}
if LanguageModelRegistry::read_global(cx)
.providers()
.iter()
.any(|provider| {
provider.id().0.as_ref() == provider_name.as_ref()
|| provider.name().0.as_ref() == provider_name.as_ref()
})
{
return Task::ready(Err(
"Provider Name is already taken by another provider".into()
));
}
let api_url = input.api_url.read(cx).text(cx);
if api_url.is_empty() {
return Task::ready(Err("API URL cannot be empty".into()));
}
let api_key = input.api_key.read(cx).text(cx);
if api_key.is_empty() {
return Task::ready(Err("API Key cannot be empty".into()));
}
let mut models = Vec::new();
let mut model_names: HashSet<String> = HashSet::default();
for model in &input.models {
match model.parse(cx) {
Ok(model) => {
if !model_names.insert(model.name.clone()) {
return Task::ready(Err("Model Names must be unique".into()));
}
models.push(model)
}
Err(err) => return Task::ready(Err(err)),
}
}
let fs = <dyn Fs>::global(cx);
let task = cx.write_credentials(&api_url, "Bearer", api_key.as_bytes());
cx.spawn(async move |cx| {
task.await
.map_err(|_| "Failed to write API key to keychain")?;
cx.update(|cx| {
update_settings_file::<AllLanguageModelSettings>(fs, cx, |settings, _cx| {
settings.openai_compatible.get_or_insert_default().insert(
provider_name,
OpenAiCompatibleSettingsContent {
api_url,
available_models: models,
},
);
});
})
.ok();
Ok(())
})
}
pub struct AddLlmProviderModal {
provider: LlmCompatibleProvider,
input: AddLlmProviderInput,
focus_handle: FocusHandle,
last_error: Option<SharedString>,
}
impl AddLlmProviderModal {
pub fn toggle(
provider: LlmCompatibleProvider,
workspace: &mut Workspace,
window: &mut Window,
cx: &mut Context<Workspace>,
) {
workspace.toggle_modal(window, cx, |window, cx| Self::new(provider, window, cx));
}
fn new(provider: LlmCompatibleProvider, window: &mut Window, cx: &mut Context<Self>) -> Self {
Self {
input: AddLlmProviderInput::new(provider, window, cx),
provider,
last_error: None,
focus_handle: cx.focus_handle(),
}
}
fn confirm(&mut self, _: &menu::Confirm, _: &mut Window, cx: &mut Context<Self>) {
let task = save_provider_to_settings(&self.input, cx);
cx.spawn(async move |this, cx| {
let result = task.await;
this.update(cx, |this, cx| match result {
Ok(_) => {
cx.emit(DismissEvent);
}
Err(error) => {
this.last_error = Some(error);
cx.notify();
}
})
})
.detach_and_log_err(cx);
}
fn cancel(&mut self, _: &menu::Cancel, _: &mut Window, cx: &mut Context<Self>) {
cx.emit(DismissEvent);
}
fn render_section(&self) -> Section {
Section::new()
.child(self.input.provider_name.clone())
.child(self.input.api_url.clone())
.child(self.input.api_key.clone())
}
fn render_model_section(&self, cx: &mut Context<Self>) -> Section {
Section::new().child(
v_flex()
.gap_2()
.child(
h_flex()
.justify_between()
.child(Label::new("Models").size(LabelSize::Small))
.child(
Button::new("add-model", "Add Model")
.icon(IconName::Plus)
.icon_position(IconPosition::Start)
.icon_size(IconSize::XSmall)
.icon_color(Color::Muted)
.label_size(LabelSize::Small)
.on_click(cx.listener(|this, _, window, cx| {
this.input.add_model(window, cx);
cx.notify();
})),
),
)
.children(
self.input
.models
.iter()
.enumerate()
.map(|(ix, _)| self.render_model(ix, cx)),
),
)
}
fn render_model(&self, ix: usize, cx: &mut Context<Self>) -> impl IntoElement + use<> {
let has_more_than_one_model = self.input.models.len() > 1;
let model = &self.input.models[ix];
v_flex()
.p_2()
.gap_2()
.rounded_sm()
.border_1()
.border_dashed()
.border_color(cx.theme().colors().border.opacity(0.6))
.bg(cx.theme().colors().element_active.opacity(0.15))
.child(model.name.clone())
.child(
h_flex()
.gap_2()
.child(model.max_completion_tokens.clone())
.child(model.max_output_tokens.clone()),
)
.child(model.max_tokens.clone())
.when(has_more_than_one_model, |this| {
this.child(
Button::new(("remove-model", ix), "Remove Model")
.icon(IconName::Trash)
.icon_position(IconPosition::Start)
.icon_size(IconSize::XSmall)
.icon_color(Color::Muted)
.label_size(LabelSize::Small)
.style(ButtonStyle::Outlined)
.full_width()
.on_click(cx.listener(move |this, _, _window, cx| {
this.input.remove_model(ix);
cx.notify();
})),
)
})
}
}
impl EventEmitter<DismissEvent> for AddLlmProviderModal {}
impl Focusable for AddLlmProviderModal {
fn focus_handle(&self, _cx: &App) -> FocusHandle {
self.focus_handle.clone()
}
}
impl ModalView for AddLlmProviderModal {}
impl Render for AddLlmProviderModal {
fn render(&mut self, window: &mut ui::Window, cx: &mut ui::Context<Self>) -> impl IntoElement {
let focus_handle = self.focus_handle(cx);
div()
.id("add-llm-provider-modal")
.key_context("AddLlmProviderModal")
.w(rems(34.))
.elevation_3(cx)
.on_action(cx.listener(Self::cancel))
.capture_any_mouse_down(cx.listener(|this, _, window, cx| {
this.focus_handle(cx).focus(window);
}))
.child(
Modal::new("configure-context-server", None)
.header(ModalHeader::new().headline("Add LLM Provider").description(
match self.provider {
LlmCompatibleProvider::OpenAi => {
"This provider will use an OpenAI compatible API."
}
},
))
.when_some(self.last_error.clone(), |this, error| {
this.section(
Section::new().child(
Banner::new()
.severity(ui::Severity::Warning)
.child(div().text_xs().child(error)),
),
)
})
.child(
v_flex()
.id("modal_content")
.max_h_128()
.overflow_y_scroll()
.gap_2()
.child(self.render_section())
.child(self.render_model_section(cx)),
)
.footer(
ModalFooter::new().end_slot(
h_flex()
.gap_1()
.child(
Button::new("cancel", "Cancel")
.key_binding(
KeyBinding::for_action_in(
&menu::Cancel,
&focus_handle,
window,
cx,
)
.map(|kb| kb.size(rems_from_px(12.))),
)
.on_click(cx.listener(|this, _event, window, cx| {
this.cancel(&menu::Cancel, window, cx)
})),
)
.child(
Button::new("save-server", "Save Provider")
.key_binding(
KeyBinding::for_action_in(
&menu::Confirm,
&focus_handle,
window,
cx,
)
.map(|kb| kb.size(rems_from_px(12.))),
)
.on_click(cx.listener(|this, _event, window, cx| {
this.confirm(&menu::Confirm, window, cx)
})),
),
),
),
)
}
}
#[cfg(test)]
mod tests {
use super::*;
use editor::EditorSettings;
use fs::FakeFs;
use gpui::{TestAppContext, VisualTestContext};
use language::language_settings;
use language_model::{
LanguageModelProviderId, LanguageModelProviderName,
fake_provider::FakeLanguageModelProvider,
};
use project::Project;
use settings::{Settings as _, SettingsStore};
use util::path;
#[gpui::test]
async fn test_save_provider_invalid_inputs(cx: &mut TestAppContext) {
let cx = setup_test(cx).await;
assert_eq!(
save_provider_validation_errors("", "someurl", "somekey", vec![], cx,).await,
Some("Provider Name cannot be empty".into())
);
assert_eq!(
save_provider_validation_errors("someprovider", "", "somekey", vec![], cx,).await,
Some("API URL cannot be empty".into())
);
assert_eq!(
save_provider_validation_errors("someprovider", "someurl", "", vec![], cx,).await,
Some("API Key cannot be empty".into())
);
assert_eq!(
save_provider_validation_errors(
"someprovider",
"someurl",
"somekey",
vec![("", "200000", "200000", "32000")],
cx,
)
.await,
Some("Model Name cannot be empty".into())
);
assert_eq!(
save_provider_validation_errors(
"someprovider",
"someurl",
"somekey",
vec![("somemodel", "abc", "200000", "32000")],
cx,
)
.await,
Some("Max Tokens must be a number".into())
);
assert_eq!(
save_provider_validation_errors(
"someprovider",
"someurl",
"somekey",
vec![("somemodel", "200000", "abc", "32000")],
cx,
)
.await,
Some("Max Completion Tokens must be a number".into())
);
assert_eq!(
save_provider_validation_errors(
"someprovider",
"someurl",
"somekey",
vec![("somemodel", "200000", "200000", "abc")],
cx,
)
.await,
Some("Max Output Tokens must be a number".into())
);
assert_eq!(
save_provider_validation_errors(
"someprovider",
"someurl",
"somekey",
vec![
("somemodel", "200000", "200000", "32000"),
("somemodel", "200000", "200000", "32000"),
],
cx,
)
.await,
Some("Model Names must be unique".into())
);
}
#[gpui::test]
async fn test_save_provider_name_conflict(cx: &mut TestAppContext) {
let cx = setup_test(cx).await;
cx.update(|_window, cx| {
LanguageModelRegistry::global(cx).update(cx, |registry, cx| {
registry.register_provider(
FakeLanguageModelProvider::new(
LanguageModelProviderId::new("someprovider"),
LanguageModelProviderName::new("Some Provider"),
),
cx,
);
});
});
assert_eq!(
save_provider_validation_errors(
"someprovider",
"someurl",
"someapikey",
vec![("somemodel", "200000", "200000", "32000")],
cx,
)
.await,
Some("Provider Name is already taken by another provider".into())
);
}
async fn setup_test(cx: &mut TestAppContext) -> &mut VisualTestContext {
cx.update(|cx| {
let store = SettingsStore::test(cx);
cx.set_global(store);
workspace::init_settings(cx);
Project::init_settings(cx);
theme::init(theme::LoadThemes::JustBase, cx);
language_settings::init(cx);
EditorSettings::register(cx);
language_model::init_settings(cx);
language_models::init_settings(cx);
});
let fs = FakeFs::new(cx.executor());
cx.update(|cx| <dyn Fs>::set_global(fs.clone(), cx));
let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
let (_, cx) =
cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx));
cx
}
async fn save_provider_validation_errors(
provider_name: &str,
api_url: &str,
api_key: &str,
models: Vec<(&str, &str, &str, &str)>,
cx: &mut VisualTestContext,
) -> Option<SharedString> {
fn set_text(
input: &Entity<SingleLineInput>,
text: &str,
window: &mut Window,
cx: &mut App,
) {
input.update(cx, |input, cx| {
input.editor().update(cx, |editor, cx| {
editor.set_text(text, window, cx);
});
});
}
let task = cx.update(|window, cx| {
let mut input = AddLlmProviderInput::new(LlmCompatibleProvider::OpenAi, window, cx);
set_text(&input.provider_name, provider_name, window, cx);
set_text(&input.api_url, api_url, window, cx);
set_text(&input.api_key, api_key, window, cx);
for (i, (name, max_tokens, max_completion_tokens, max_output_tokens)) in
models.iter().enumerate()
{
if i >= input.models.len() {
input.models.push(ModelInput::new(window, cx));
}
let model = &mut input.models[i];
set_text(&model.name, name, window, cx);
set_text(&model.max_tokens, max_tokens, window, cx);
set_text(
&model.max_completion_tokens,
max_completion_tokens,
window,
cx,
);
set_text(&model.max_output_tokens, max_output_tokens, window, cx);
}
save_provider_to_settings(&input, cx)
});
task.await.err()
}
}

View file

@ -43,7 +43,7 @@ use anyhow::{Result, anyhow};
use assistant_context::{AssistantContext, ContextEvent, ContextSummary};
use assistant_slash_command::SlashCommandWorkingSet;
use assistant_tool::ToolWorkingSet;
use client::{UserStore, zed_urls};
use client::{DisableAiSettings, UserStore, zed_urls};
use editor::{Anchor, AnchorRangeExt as _, Editor, EditorEvent, MultiBuffer};
use feature_flags::{self, FeatureFlagAppExt};
use fs::Fs;
@ -744,6 +744,7 @@ impl AgentPanel {
if workspace
.panel::<Self>(cx)
.is_some_and(|panel| panel.read(cx).enabled(cx))
&& !DisableAiSettings::get_global(cx).disable_ai
{
workspace.toggle_panel_focus::<Self>(window, cx);
}
@ -1665,7 +1666,10 @@ impl Panel for AgentPanel {
}
fn icon(&self, _window: &Window, cx: &App) -> Option<IconName> {
(self.enabled(cx) && AgentSettings::get_global(cx).button).then_some(IconName::ZedAssistant)
(self.enabled(cx)
&& AgentSettings::get_global(cx).button
&& !DisableAiSettings::get_global(cx).disable_ai)
.then_some(IconName::ZedAssistant)
}
fn icon_tooltip(&self, _window: &Window, _cx: &App) -> Option<&'static str> {

View file

@ -31,7 +31,8 @@ use std::sync::Arc;
use agent::{Thread, ThreadId};
use agent_settings::{AgentProfileId, AgentSettings, LanguageModelSelection};
use assistant_slash_command::SlashCommandRegistry;
use client::Client;
use client::{Client, DisableAiSettings};
use command_palette_hooks::CommandPaletteFilter;
use feature_flags::FeatureFlagAppExt as _;
use fs::Fs;
use gpui::{Action, App, Entity, actions};
@ -43,6 +44,7 @@ use prompt_store::PromptBuilder;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use settings::{Settings as _, SettingsStore};
use std::any::TypeId;
pub use crate::active_thread::ActiveThread;
use crate::agent_configuration::{ConfigureContextServerModal, ManageProfilesModal};
@ -52,6 +54,7 @@ use crate::slash_command_settings::SlashCommandSettings;
pub use agent_diff::{AgentDiffPane, AgentDiffToolbar};
pub use text_thread_editor::{AgentPanelDelegate, TextThreadEditor};
pub use ui::preview::{all_agent_previews, get_agent_preview};
use zed_actions;
actions!(
agent,
@ -243,6 +246,66 @@ pub fn init(
})
.detach();
cx.observe_new(ManageProfilesModal::register).detach();
// Update command palette filter based on AI settings
update_command_palette_filter(cx);
// Watch for settings changes
cx.observe_global::<SettingsStore>(|app_cx| {
// When settings change, update the command palette filter
update_command_palette_filter(app_cx);
})
.detach();
}
fn update_command_palette_filter(cx: &mut App) {
let disable_ai = DisableAiSettings::get_global(cx).disable_ai;
CommandPaletteFilter::update_global(cx, |filter, _| {
if disable_ai {
filter.hide_namespace("agent");
filter.hide_namespace("assistant");
filter.hide_namespace("zed_predict_onboarding");
filter.hide_namespace("edit_prediction");
use editor::actions::{
AcceptEditPrediction, AcceptPartialEditPrediction, NextEditPrediction,
PreviousEditPrediction, ShowEditPrediction, ToggleEditPrediction,
};
let edit_prediction_actions = [
TypeId::of::<AcceptEditPrediction>(),
TypeId::of::<AcceptPartialEditPrediction>(),
TypeId::of::<ShowEditPrediction>(),
TypeId::of::<NextEditPrediction>(),
TypeId::of::<PreviousEditPrediction>(),
TypeId::of::<ToggleEditPrediction>(),
];
filter.hide_action_types(&edit_prediction_actions);
filter.hide_action_types(&[TypeId::of::<zed_actions::OpenZedPredictOnboarding>()]);
} else {
filter.show_namespace("agent");
filter.show_namespace("assistant");
filter.show_namespace("zed_predict_onboarding");
filter.show_namespace("edit_prediction");
use editor::actions::{
AcceptEditPrediction, AcceptPartialEditPrediction, NextEditPrediction,
PreviousEditPrediction, ShowEditPrediction, ToggleEditPrediction,
};
let edit_prediction_actions = [
TypeId::of::<AcceptEditPrediction>(),
TypeId::of::<AcceptPartialEditPrediction>(),
TypeId::of::<ShowEditPrediction>(),
TypeId::of::<NextEditPrediction>(),
TypeId::of::<PreviousEditPrediction>(),
TypeId::of::<ToggleEditPrediction>(),
];
filter.show_action_types(edit_prediction_actions.iter());
filter
.show_action_types([TypeId::of::<zed_actions::OpenZedPredictOnboarding>()].iter());
}
});
}
fn init_language_model_settings(cx: &mut App) {

View file

@ -16,7 +16,7 @@ use agent::{
};
use agent_settings::AgentSettings;
use anyhow::{Context as _, Result};
use client::telemetry::Telemetry;
use client::{DisableAiSettings, telemetry::Telemetry};
use collections::{HashMap, HashSet, VecDeque, hash_map};
use editor::SelectionEffects;
use editor::{
@ -57,6 +57,17 @@ pub fn init(
cx: &mut App,
) {
cx.set_global(InlineAssistant::new(fs, prompt_builder, telemetry));
cx.observe_global::<SettingsStore>(|cx| {
if DisableAiSettings::get_global(cx).disable_ai {
// Hide any active inline assist UI when AI is disabled
InlineAssistant::update_global(cx, |assistant, cx| {
assistant.cancel_all_active_completions(cx);
});
}
})
.detach();
cx.observe_new(|_workspace: &mut Workspace, window, cx| {
let Some(window) = window else {
return;
@ -141,6 +152,26 @@ impl InlineAssistant {
.detach();
}
/// Hides all active inline assists when AI is disabled
pub fn cancel_all_active_completions(&mut self, cx: &mut App) {
// Cancel all active completions in editors
for (editor_handle, _) in self.assists_by_editor.iter() {
if let Some(editor) = editor_handle.upgrade() {
let windows = cx.windows();
if !windows.is_empty() {
let window = windows[0];
let _ = window.update(cx, |_, window, cx| {
editor.update(cx, |editor, cx| {
if editor.has_active_inline_completion() {
editor.cancel(&Default::default(), window, cx);
}
});
});
}
}
}
}
fn handle_workspace_event(
&mut self,
workspace: Entity<Workspace>,
@ -176,7 +207,7 @@ impl InlineAssistant {
window: &mut Window,
cx: &mut App,
) {
let is_assistant2_enabled = true;
let is_assistant2_enabled = !DisableAiSettings::get_global(cx).disable_ai;
if let Some(editor) = item.act_as::<Editor>(cx) {
editor.update(cx, |editor, cx| {
@ -199,6 +230,13 @@ impl InlineAssistant {
cx,
);
if DisableAiSettings::get_global(cx).disable_ai {
// Cancel any active completions
if editor.has_active_inline_completion() {
editor.cancel(&Default::default(), window, cx);
}
}
// Remove the Assistant1 code action provider, as it still might be registered.
editor.remove_code_action_provider("assistant".into(), window, cx);
} else {
@ -219,7 +257,7 @@ impl InlineAssistant {
cx: &mut Context<Workspace>,
) {
let settings = AgentSettings::get_global(cx);
if !settings.enabled {
if !settings.enabled || DisableAiSettings::get_global(cx).disable_ai {
return;
}

View file

@ -38,10 +38,6 @@ impl ApiKeysWithProviders {
.map(|provider| (provider.icon(), provider.name().0.clone()))
.collect()
}
pub fn has_providers(&self) -> bool {
!self.configured_providers.is_empty()
}
}
impl Render for ApiKeysWithProviders {
@ -53,11 +49,10 @@ impl Render for ApiKeysWithProviders {
.map(|(icon, name)| {
h_flex()
.gap_1p5()
.child(Icon::new(icon).size(IconSize::Small).color(Color::Muted))
.child(Icon::new(icon).size(IconSize::XSmall).color(Color::Muted))
.child(Label::new(name))
});
h_flex()
div()
.mx_2p5()
.p_1()
.pb_0()
@ -85,8 +80,24 @@ impl Render for ApiKeysWithProviders {
.border_x_1()
.border_color(cx.theme().colors().border)
.bg(cx.theme().colors().panel_background)
.child(Icon::new(IconName::Info).size(IconSize::XSmall).color(Color::Muted))
.child(Label::new("Or start now using API keys from your environment for the following providers:").color(Color::Muted))
.child(
h_flex()
.min_w_0()
.gap_2()
.child(
Icon::new(IconName::Info)
.size(IconSize::XSmall)
.color(Color::Muted)
)
.child(
div()
.w_full()
.child(
Label::new("Start now using API keys from your environment for the following providers:")
.color(Color::Muted)
)
)
)
.children(configured_providers_list)
)
}
@ -118,7 +129,7 @@ impl RenderOnce for ApiKeysWithoutProviders {
.child(Divider::horizontal()),
)
.child(List::new().child(BulletItem::new(
"You can also use AI in Zed by bringing your own API keys",
"Add your own keys to use AI without signing in.",
)))
.child(
Button::new("configure-providers", "Configure Providers")

View file

@ -141,22 +141,18 @@ impl ZedAiOnboarding {
)
.child(
List::new()
.child(BulletItem::new("50 prompts per month with Claude models"))
.child(BulletItem::new(
"50 prompts per month with the Claude models",
))
.child(BulletItem::new(
"2000 accepted edit predictions using our open-source Zeta model",
"2,000 accepted edit predictions with Zeta, our open-source model",
)),
)
}
fn pro_trial_definition(&self) -> impl IntoElement {
List::new()
.child(BulletItem::new("150 prompts with Claude models"))
.child(BulletItem::new(
"150 prompts per month with the Claude models",
))
.child(BulletItem::new(
"Unlimited accepted edit predictions using our open-source Zeta model",
"Unlimited accepted edit predictions with Zeta, our open-source model",
))
}
@ -178,12 +174,12 @@ impl ZedAiOnboarding {
List::new()
.child(BulletItem::new("500 prompts per month with Claude models"))
.child(BulletItem::new(
"Unlimited accepted edit predictions using our open-source Zeta model",
"Unlimited accepted edit predictions with Zeta, our open-source model",
))
.child(BulletItem::new("USD $20 per month")),
.child(BulletItem::new("$20 USD per month")),
)
.child(
Button::new("pro", "Start with Pro")
Button::new("pro", "Get Started")
.full_width()
.style(ButtonStyle::Tinted(ui::TintColor::Accent))
.on_click(move |_, _window, cx| {
@ -206,11 +202,11 @@ impl ZedAiOnboarding {
List::new()
.child(self.pro_trial_definition())
.child(BulletItem::new(
"Try it out for 14 days with no charge and no credit card required",
"Try it out for 14 days for free, no credit card required",
)),
)
.child(
Button::new("pro", "Start Pro Trial")
Button::new("pro", "Start Free Trial")
.full_width()
.style(ButtonStyle::Tinted(ui::TintColor::Accent))
.on_click(move |_, _window, cx| {
@ -225,14 +221,14 @@ impl ZedAiOnboarding {
v_flex()
.gap_1()
.w_full()
.child(Headline::new("Before starting…"))
.child(Headline::new("Accept Terms of Service"))
.child(
Label::new("Make sure you have read and accepted Zed AI's terms of service.")
Label::new("We dont sell your data, track you across the web, or compromise your privacy.")
.color(Color::Muted)
.mb_2(),
)
.child(
Button::new("terms_of_service", "View and Read the Terms of Service")
Button::new("terms_of_service", "Review Terms of Service")
.full_width()
.style(ButtonStyle::Outlined)
.icon(IconName::ArrowUpRight)
@ -241,7 +237,7 @@ impl ZedAiOnboarding {
.on_click(move |_, _window, cx| cx.open_url(&zed_urls::terms_of_service(cx))),
)
.child(
Button::new("accept_terms", "I've read it and accept it")
Button::new("accept_terms", "Accept")
.full_width()
.style(ButtonStyle::Tinted(TintColor::Accent))
.on_click({
@ -259,13 +255,13 @@ impl ZedAiOnboarding {
.gap_1()
.child(Headline::new("Welcome to Zed AI"))
.child(
Label::new("Sign in to start using AI in Zed with a free trial of the Pro plan, which includes:")
Label::new("Sign in to try Zed Pro for 14 days, no credit card required.")
.color(Color::Muted)
.mb_2(),
)
.child(self.pro_trial_definition())
.child(
Button::new("sign_in", "Sign in to Start Trial")
Button::new("sign_in", "Try Zed Pro for Free")
.disabled(signing_in)
.full_width()
.style(ButtonStyle::Tinted(ui::TintColor::Accent))
@ -284,11 +280,6 @@ impl ZedAiOnboarding {
.relative()
.gap_1()
.child(Headline::new("Welcome to Zed AI"))
.child(
Label::new("Choose how you want to start.")
.color(Color::Muted)
.mb_2(),
)
.map(|this| {
if self.account_too_young {
this.child(young_account_banner)
@ -318,7 +309,7 @@ impl ZedAiOnboarding {
v_flex()
.relative()
.gap_1()
.child(Headline::new("Welcome to the Zed Pro free trial"))
.child(Headline::new("Welcome to the Zed Pro Trial"))
.child(
Label::new("Here's what you get for the next 14 days:")
.color(Color::Muted)

View file

@ -6,7 +6,7 @@ pub struct YoungAccountBanner;
impl RenderOnce for YoungAccountBanner {
fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement {
const YOUNG_ACCOUNT_DISCLAIMER: &str = "To prevent abuse of our service, we cannot offer plans to GitHub accounts created fewer than 30 days ago. To request an exception, reach out to billing@zed.dev.";
const YOUNG_ACCOUNT_DISCLAIMER: &str = "To prevent abuse of our service, we cannot offer plans to GitHub accounts created fewer than 30 days ago. To request an exception, reach out to billing-support@zed.dev.";
let label = div()
.w_full()

View file

@ -1323,7 +1323,7 @@ fn setup_context_editor_with_fake_model(
) -> (Entity<AssistantContext>, Arc<FakeLanguageModel>) {
let registry = Arc::new(LanguageRegistry::test(cx.executor().clone()));
let fake_provider = Arc::new(FakeLanguageModelProvider);
let fake_provider = Arc::new(FakeLanguageModelProvider::default());
let fake_model = Arc::new(fake_provider.test_model());
cx.update(|cx| {

View file

@ -51,23 +51,13 @@ impl ActionLog {
Some(self.tracked_buffers.get(buffer)?.snapshot.clone())
}
pub fn has_unnotified_user_edits(&self) -> bool {
self.tracked_buffers
.values()
.any(|tracked| tracked.has_unnotified_user_edits)
}
/// Return a unified diff patch with user edits made since last read or notification
pub fn unnotified_user_edits(&self, cx: &Context<Self>) -> Option<String> {
if !self.has_unnotified_user_edits() {
return None;
}
let unified_diff = self
let diffs = self
.tracked_buffers
.values()
.filter_map(|tracked| {
if !tracked.has_unnotified_user_edits {
if !tracked.may_have_unnotified_user_edits {
return None;
}
@ -95,9 +85,13 @@ impl ActionLog {
Some(result)
})
.collect::<Vec<_>>()
.join("\n\n");
.collect::<Vec<_>>();
if diffs.is_empty() {
return None;
}
let unified_diff = diffs.join("\n\n");
Some(unified_diff)
}
@ -106,7 +100,7 @@ impl ActionLog {
pub fn flush_unnotified_user_edits(&mut self, cx: &Context<Self>) -> Option<String> {
let patch = self.unnotified_user_edits(cx);
self.tracked_buffers.values_mut().for_each(|tracked| {
tracked.has_unnotified_user_edits = false;
tracked.may_have_unnotified_user_edits = false;
tracked.last_seen_base = tracked.diff_base.clone();
});
patch
@ -185,7 +179,7 @@ impl ActionLog {
version: buffer.read(cx).version(),
diff,
diff_update: diff_update_tx,
has_unnotified_user_edits: false,
may_have_unnotified_user_edits: false,
_open_lsp_handle: open_lsp_handle,
_maintain_diff: cx.spawn({
let buffer = buffer.clone();
@ -337,27 +331,34 @@ impl ActionLog {
let new_snapshot = buffer_snapshot.clone();
let unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
let edits = diff_snapshots(&old_snapshot, &new_snapshot);
if let ChangeAuthor::User = author
&& !edits.is_empty()
{
tracked_buffer.has_unnotified_user_edits = true;
}
let mut has_user_changes = false;
async move {
if let ChangeAuthor::User = author {
apply_non_conflicting_edits(
has_user_changes = apply_non_conflicting_edits(
&unreviewed_edits,
edits,
&mut base_text,
new_snapshot.as_rope(),
);
}
(Arc::new(base_text.to_string()), base_text)
(Arc::new(base_text.to_string()), base_text, has_user_changes)
}
});
anyhow::Ok(rebase)
})??;
let (new_base_text, new_diff_base) = rebase.await;
let (new_base_text, new_diff_base, has_user_changes) = rebase.await;
this.update(cx, |this, _| {
let tracked_buffer = this
.tracked_buffers
.get_mut(buffer)
.context("buffer not tracked")
.unwrap();
tracked_buffer.may_have_unnotified_user_edits |= has_user_changes;
})?;
Self::update_diff(
this,
buffer,
@ -829,11 +830,12 @@ fn apply_non_conflicting_edits(
edits: Vec<Edit<u32>>,
old_text: &mut Rope,
new_text: &Rope,
) {
) -> bool {
let mut old_edits = patch.edits().iter().cloned().peekable();
let mut new_edits = edits.into_iter().peekable();
let mut applied_delta = 0i32;
let mut rebased_delta = 0i32;
let mut has_made_changes = false;
while let Some(mut new_edit) = new_edits.next() {
let mut conflict = false;
@ -883,8 +885,10 @@ fn apply_non_conflicting_edits(
&new_text.chunks_in_range(new_bytes).collect::<String>(),
);
applied_delta += new_edit.new_len() as i32 - new_edit.old_len() as i32;
has_made_changes = true;
}
}
has_made_changes
}
fn diff_snapshots(
@ -958,7 +962,7 @@ struct TrackedBuffer {
diff: Entity<BufferDiff>,
snapshot: text::BufferSnapshot,
diff_update: mpsc::UnboundedSender<(ChangeAuthor, text::BufferSnapshot)>,
has_unnotified_user_edits: bool,
may_have_unnotified_user_edits: bool,
_open_lsp_handle: OpenLspBufferHandle,
_maintain_diff: Task<()>,
_subscription: Subscription,

View file

@ -20,6 +20,7 @@ anyhow.workspace = true
assistant_tool.workspace = true
buffer_diff.workspace = true
chrono.workspace = true
client.workspace = true
collections.workspace = true
component.workspace = true
derive_more.workspace = true

View file

@ -20,14 +20,13 @@ mod thinking_tool;
mod ui;
mod web_search_tool;
use std::sync::Arc;
use assistant_tool::ToolRegistry;
use copy_path_tool::CopyPathTool;
use gpui::{App, Entity};
use http_client::HttpClientWithUrl;
use language_model::LanguageModelRegistry;
use move_path_tool::MovePathTool;
use std::sync::Arc;
use web_search_tool::WebSearchTool;
pub(crate) use templates::*;

View file

@ -278,6 +278,9 @@ impl Tool for EditFileTool {
.unwrap_or(false);
if format_on_save_enabled {
action_log.update(cx, |log, cx| {
log.buffer_edited(buffer.clone(), cx);
})?;
let format_task = project.update(cx, |project, cx| {
project.format(
HashSet::from_iter([buffer.clone()]),

View file

@ -200,7 +200,7 @@ mod tests {
// Run the tool before any changes
let tool = Arc::new(ProjectNotificationsTool);
let provider = Arc::new(FakeLanguageModelProvider);
let provider = Arc::new(FakeLanguageModelProvider::default());
let model: Arc<dyn LanguageModel> = Arc::new(provider.test_model());
let request = Arc::new(LanguageModelRequest::default());
let tool_input = json!({});

View file

@ -17,7 +17,5 @@ default = []
[dependencies]
aws-smithy-runtime-api.workspace = true
aws-smithy-types.workspace = true
futures.workspace = true
http_client.workspace = true
tokio = { workspace = true, features = ["rt", "rt-multi-thread"] }
workspace-hack.workspace = true

View file

@ -11,14 +11,11 @@ use aws_smithy_runtime_api::client::result::ConnectorError;
use aws_smithy_runtime_api::client::runtime_components::RuntimeComponents;
use aws_smithy_runtime_api::http::{Headers, StatusCode};
use aws_smithy_types::body::SdkBody;
use futures::AsyncReadExt;
use http_client::{AsyncBody, Inner};
use http_client::AsyncBody;
use http_client::{HttpClient, Request};
use tokio::runtime::Handle;
struct AwsHttpConnector {
client: Arc<dyn HttpClient>,
handle: Handle,
}
impl std::fmt::Debug for AwsHttpConnector {
@ -42,18 +39,17 @@ impl AwsConnector for AwsHttpConnector {
.client
.send(Request::from_parts(parts, convert_to_async_body(body)));
let handle = self.handle.clone();
HttpConnectorFuture::new(async move {
let response = match response.await {
Ok(response) => response,
Err(err) => return Err(ConnectorError::other(err.into(), None)),
};
let (parts, body) = response.into_parts();
let body = convert_to_sdk_body(body, handle).await;
let mut response =
HttpResponse::new(StatusCode::try_from(parts.status.as_u16()).unwrap(), body);
let mut response = HttpResponse::new(
StatusCode::try_from(parts.status.as_u16()).unwrap(),
convert_to_sdk_body(body),
);
let headers = match Headers::try_from(parts.headers) {
Ok(headers) => headers,
@ -70,7 +66,6 @@ impl AwsConnector for AwsHttpConnector {
#[derive(Clone)]
pub struct AwsHttpClient {
client: Arc<dyn HttpClient>,
handler: Handle,
}
impl std::fmt::Debug for AwsHttpClient {
@ -80,11 +75,8 @@ impl std::fmt::Debug for AwsHttpClient {
}
impl AwsHttpClient {
pub fn new(client: Arc<dyn HttpClient>, handle: Handle) -> Self {
Self {
client,
handler: handle,
}
pub fn new(client: Arc<dyn HttpClient>) -> Self {
Self { client }
}
}
@ -96,25 +88,12 @@ impl AwsClient for AwsHttpClient {
) -> SharedHttpConnector {
SharedHttpConnector::new(AwsHttpConnector {
client: self.client.clone(),
handle: self.handler.clone(),
})
}
}
pub async fn convert_to_sdk_body(body: AsyncBody, handle: Handle) -> SdkBody {
match body.0 {
Inner::Empty => SdkBody::empty(),
Inner::Bytes(bytes) => SdkBody::from(bytes.into_inner()),
Inner::AsyncReader(mut reader) => {
let buffer = handle.spawn(async move {
let mut buffer = Vec::new();
let _ = reader.read_to_end(&mut buffer).await;
buffer
});
SdkBody::from(buffer.await.unwrap_or_default())
}
}
pub fn convert_to_sdk_body(body: AsyncBody) -> SdkBody {
SdkBody::from_body_1_x(body)
}
pub fn convert_to_async_body(body: SdkBody) -> AsyncBody {

View file

@ -343,8 +343,7 @@ impl BufferDiffInner {
..
} in hunks.iter().cloned()
{
let preceding_pending_hunks =
old_pending_hunks.slice(&buffer_range.start, Bias::Left, buffer);
let preceding_pending_hunks = old_pending_hunks.slice(&buffer_range.start, Bias::Left);
pending_hunks.append(preceding_pending_hunks, buffer);
// Skip all overlapping or adjacent old pending hunks
@ -355,7 +354,7 @@ impl BufferDiffInner {
.cmp(&buffer_range.end, buffer)
.is_le()
}) {
old_pending_hunks.next(buffer);
old_pending_hunks.next();
}
if (stage && secondary_status == DiffHunkSecondaryStatus::NoSecondaryHunk)
@ -379,10 +378,10 @@ impl BufferDiffInner {
);
}
// append the remainder
pending_hunks.append(old_pending_hunks.suffix(buffer), buffer);
pending_hunks.append(old_pending_hunks.suffix(), buffer);
let mut unstaged_hunk_cursor = unstaged_diff.hunks.cursor::<DiffHunkSummary>(buffer);
unstaged_hunk_cursor.next(buffer);
unstaged_hunk_cursor.next();
// then, iterate over all pending hunks (both new ones and the existing ones) and compute the edits
let mut prev_unstaged_hunk_buffer_end = 0;
@ -397,8 +396,7 @@ impl BufferDiffInner {
}) = pending_hunks_iter.next()
{
// Advance unstaged_hunk_cursor to skip unstaged hunks before current hunk
let skipped_unstaged =
unstaged_hunk_cursor.slice(&buffer_range.start, Bias::Left, buffer);
let skipped_unstaged = unstaged_hunk_cursor.slice(&buffer_range.start, Bias::Left);
if let Some(unstaged_hunk) = skipped_unstaged.last() {
prev_unstaged_hunk_base_text_end = unstaged_hunk.diff_base_byte_range.end;
@ -425,7 +423,7 @@ impl BufferDiffInner {
buffer_offset_range.end =
buffer_offset_range.end.max(unstaged_hunk_offset_range.end);
unstaged_hunk_cursor.next(buffer);
unstaged_hunk_cursor.next();
continue;
}
}
@ -514,7 +512,7 @@ impl BufferDiffInner {
});
let anchor_iter = iter::from_fn(move || {
cursor.next(buffer);
cursor.next();
cursor.item()
})
.flat_map(move |hunk| {
@ -531,12 +529,12 @@ impl BufferDiffInner {
});
let mut pending_hunks_cursor = self.pending_hunks.cursor::<DiffHunkSummary>(buffer);
pending_hunks_cursor.next(buffer);
pending_hunks_cursor.next();
let mut secondary_cursor = None;
if let Some(secondary) = secondary.as_ref() {
let mut cursor = secondary.hunks.cursor::<DiffHunkSummary>(buffer);
cursor.next(buffer);
cursor.next();
secondary_cursor = Some(cursor);
}
@ -564,7 +562,7 @@ impl BufferDiffInner {
.cmp(&pending_hunks_cursor.start().buffer_range.start, buffer)
.is_gt()
{
pending_hunks_cursor.seek_forward(&start_anchor, Bias::Left, buffer);
pending_hunks_cursor.seek_forward(&start_anchor, Bias::Left);
}
if let Some(pending_hunk) = pending_hunks_cursor.item() {
@ -590,7 +588,7 @@ impl BufferDiffInner {
.cmp(&secondary_cursor.start().buffer_range.start, buffer)
.is_gt()
{
secondary_cursor.seek_forward(&start_anchor, Bias::Left, buffer);
secondary_cursor.seek_forward(&start_anchor, Bias::Left);
}
if let Some(secondary_hunk) = secondary_cursor.item() {
@ -635,7 +633,7 @@ impl BufferDiffInner {
});
iter::from_fn(move || {
cursor.prev(buffer);
cursor.prev();
let hunk = cursor.item()?;
let range = hunk.buffer_range.to_point(buffer);
@ -653,8 +651,8 @@ impl BufferDiffInner {
fn compare(&self, old: &Self, new_snapshot: &text::BufferSnapshot) -> Option<Range<Anchor>> {
let mut new_cursor = self.hunks.cursor::<()>(new_snapshot);
let mut old_cursor = old.hunks.cursor::<()>(new_snapshot);
old_cursor.next(new_snapshot);
new_cursor.next(new_snapshot);
old_cursor.next();
new_cursor.next();
let mut start = None;
let mut end = None;
@ -669,7 +667,7 @@ impl BufferDiffInner {
Ordering::Less => {
start.get_or_insert(new_hunk.buffer_range.start);
end.replace(new_hunk.buffer_range.end);
new_cursor.next(new_snapshot);
new_cursor.next();
}
Ordering::Equal => {
if new_hunk != old_hunk {
@ -686,25 +684,25 @@ impl BufferDiffInner {
}
}
new_cursor.next(new_snapshot);
old_cursor.next(new_snapshot);
new_cursor.next();
old_cursor.next();
}
Ordering::Greater => {
start.get_or_insert(old_hunk.buffer_range.start);
end.replace(old_hunk.buffer_range.end);
old_cursor.next(new_snapshot);
old_cursor.next();
}
}
}
(Some(new_hunk), None) => {
start.get_or_insert(new_hunk.buffer_range.start);
end.replace(new_hunk.buffer_range.end);
new_cursor.next(new_snapshot);
new_cursor.next();
}
(None, Some(old_hunk)) => {
start.get_or_insert(old_hunk.buffer_range.start);
end.replace(old_hunk.buffer_range.end);
old_cursor.next(new_snapshot);
old_cursor.next();
}
(None, None) => break,
}

View file

@ -333,7 +333,7 @@ impl ChannelChat {
if first_id <= message_id {
let mut cursor = chat.messages.cursor::<(ChannelMessageId, Count)>(&());
let message_id = ChannelMessageId::Saved(message_id);
cursor.seek(&message_id, Bias::Left, &());
cursor.seek(&message_id, Bias::Left);
return ControlFlow::Break(
if cursor
.item()
@ -499,7 +499,7 @@ impl ChannelChat {
pub fn message(&self, ix: usize) -> &ChannelMessage {
let mut cursor = self.messages.cursor::<Count>(&());
cursor.seek(&Count(ix), Bias::Right, &());
cursor.seek(&Count(ix), Bias::Right);
cursor.item().unwrap()
}
@ -516,13 +516,13 @@ impl ChannelChat {
pub fn messages_in_range(&self, range: Range<usize>) -> impl Iterator<Item = &ChannelMessage> {
let mut cursor = self.messages.cursor::<Count>(&());
cursor.seek(&Count(range.start), Bias::Right, &());
cursor.seek(&Count(range.start), Bias::Right);
cursor.take(range.len())
}
pub fn pending_messages(&self) -> impl Iterator<Item = &ChannelMessage> {
let mut cursor = self.messages.cursor::<ChannelMessageId>(&());
cursor.seek(&ChannelMessageId::Pending(0), Bias::Left, &());
cursor.seek(&ChannelMessageId::Pending(0), Bias::Left);
cursor
}
@ -588,9 +588,9 @@ impl ChannelChat {
.collect::<HashSet<_>>();
let mut old_cursor = self.messages.cursor::<(ChannelMessageId, Count)>(&());
let mut new_messages = old_cursor.slice(&first_message.id, Bias::Left, &());
let mut new_messages = old_cursor.slice(&first_message.id, Bias::Left);
let start_ix = old_cursor.start().1.0;
let removed_messages = old_cursor.slice(&last_message.id, Bias::Right, &());
let removed_messages = old_cursor.slice(&last_message.id, Bias::Right);
let removed_count = removed_messages.summary().count;
let new_count = messages.summary().count;
let end_ix = start_ix + removed_count;
@ -599,10 +599,10 @@ impl ChannelChat {
let mut ranges = Vec::<Range<usize>>::new();
if new_messages.last().unwrap().is_pending() {
new_messages.append(old_cursor.suffix(&()), &());
new_messages.append(old_cursor.suffix(), &());
} else {
new_messages.append(
old_cursor.slice(&ChannelMessageId::Pending(0), Bias::Left, &()),
old_cursor.slice(&ChannelMessageId::Pending(0), Bias::Left),
&(),
);
@ -617,7 +617,7 @@ impl ChannelChat {
} else {
new_messages.push(message.clone(), &());
}
old_cursor.next(&());
old_cursor.next();
}
}
@ -641,12 +641,12 @@ impl ChannelChat {
fn message_removed(&mut self, id: u64, cx: &mut Context<Self>) {
let mut cursor = self.messages.cursor::<ChannelMessageId>(&());
let mut messages = cursor.slice(&ChannelMessageId::Saved(id), Bias::Left, &());
let mut messages = cursor.slice(&ChannelMessageId::Saved(id), Bias::Left);
if let Some(item) = cursor.item() {
if item.id == ChannelMessageId::Saved(id) {
let deleted_message_ix = messages.summary().count;
cursor.next(&());
messages.append(cursor.suffix(&()), &());
cursor.next();
messages.append(cursor.suffix(), &());
drop(cursor);
self.messages = messages;
@ -680,7 +680,7 @@ impl ChannelChat {
cx: &mut Context<Self>,
) {
let mut cursor = self.messages.cursor::<ChannelMessageId>(&());
let mut messages = cursor.slice(&id, Bias::Left, &());
let mut messages = cursor.slice(&id, Bias::Left);
let ix = messages.summary().count;
if let Some(mut message_to_update) = cursor.item().cloned() {
@ -688,10 +688,10 @@ impl ChannelChat {
message_to_update.mentions = mentions;
message_to_update.edited_at = edited_at;
messages.push(message_to_update, &());
cursor.next(&());
cursor.next();
}
messages.append(cursor.suffix(&()), &());
messages.append(cursor.suffix(), &());
drop(cursor);
self.messages = messages;

View file

@ -151,6 +151,7 @@ impl Settings for ProxySettings {
pub fn init_settings(cx: &mut App) {
TelemetrySettings::register(cx);
DisableAiSettings::register(cx);
ClientSettings::register(cx);
ProxySettings::register(cx);
}
@ -548,6 +549,33 @@ impl settings::Settings for TelemetrySettings {
}
}
/// Whether to disable all AI features in Zed.
///
/// Default: false
#[derive(Copy, Clone, Debug)]
pub struct DisableAiSettings {
pub disable_ai: bool,
}
impl settings::Settings for DisableAiSettings {
const KEY: Option<&'static str> = Some("disable_ai");
type FileContent = Option<bool>;
fn load(sources: SettingsSources<Self::FileContent>, _: &mut App) -> Result<Self> {
Ok(Self {
disable_ai: sources
.user
.or(sources.server)
.copied()
.flatten()
.unwrap_or(sources.default.ok_or_else(Self::missing_default)?),
})
}
fn import_from_vscode(_vscode: &settings::VsCodeSettings, _current: &mut Self::FileContent) {}
}
impl Client {
pub fn new(
clock: Arc<dyn SystemClock>,

View file

@ -17,7 +17,7 @@ use crate::stripe_client::{
StripeCreateCheckoutSessionSubscriptionData, StripeCreateMeterEventParams,
StripeCreateMeterEventPayload, StripeCreateSubscriptionItems, StripeCreateSubscriptionParams,
StripeCustomerId, StripeCustomerUpdate, StripeCustomerUpdateAddress, StripeCustomerUpdateName,
StripeMeter, StripePrice, StripePriceId, StripeSubscription, StripeSubscriptionId,
StripePrice, StripePriceId, StripeSubscription, StripeSubscriptionId,
StripeSubscriptionTrialSettings, StripeSubscriptionTrialSettingsEndBehavior,
StripeSubscriptionTrialSettingsEndBehaviorMissingPaymentMethod, StripeTaxIdCollection,
UpdateSubscriptionItems, UpdateSubscriptionParams,
@ -30,8 +30,6 @@ pub struct StripeBilling {
#[derive(Default)]
struct StripeBillingState {
meters_by_event_name: HashMap<String, StripeMeter>,
price_ids_by_meter_id: HashMap<String, StripePriceId>,
prices_by_lookup_key: HashMap<String, StripePrice>,
}
@ -60,24 +58,11 @@ impl StripeBilling {
let mut state = self.state.write().await;
let (meters, prices) =
futures::try_join!(self.client.list_meters(), self.client.list_prices())?;
for meter in meters {
state
.meters_by_event_name
.insert(meter.event_name.clone(), meter);
}
let prices = self.client.list_prices().await?;
for price in prices {
if let Some(lookup_key) = price.lookup_key.clone() {
state.prices_by_lookup_key.insert(lookup_key, price.clone());
}
if let Some(recurring) = price.recurring {
if let Some(meter) = recurring.meter {
state.price_ids_by_meter_id.insert(meter, price.id);
}
state.prices_by_lookup_key.insert(lookup_key, price);
}
}

View file

@ -6,6 +6,7 @@ mod sign_in;
use crate::sign_in::initiate_sign_in_within_workspace;
use ::fs::Fs;
use anyhow::{Context as _, Result, anyhow};
use client::DisableAiSettings;
use collections::{HashMap, HashSet};
use command_palette_hooks::CommandPaletteFilter;
use futures::{Future, FutureExt, TryFutureExt, channel::oneshot, future::Shared};
@ -25,6 +26,7 @@ use node_runtime::NodeRuntime;
use parking_lot::Mutex;
use request::StatusNotification;
use serde_json::json;
use settings::Settings;
use settings::SettingsStore;
use sign_in::{reinstall_and_sign_in_within_workspace, sign_out_within_workspace};
use std::collections::hash_map::Entry;
@ -93,8 +95,15 @@ pub fn init(
let copilot_auth_action_types = [TypeId::of::<SignOut>()];
let copilot_no_auth_action_types = [TypeId::of::<SignIn>()];
let status = handle.read(cx).status();
let is_ai_disabled = DisableAiSettings::get_global(cx).disable_ai;
let filter = CommandPaletteFilter::global_mut(cx);
if is_ai_disabled {
filter.hide_action_types(&copilot_action_types);
filter.hide_action_types(&copilot_auth_action_types);
filter.hide_action_types(&copilot_no_auth_action_types);
} else {
match status {
Status::Disabled => {
filter.hide_action_types(&copilot_action_types);
@ -115,6 +124,7 @@ pub fn init(
filter.show_action_types(copilot_no_auth_action_types.iter());
}
}
}
})
.detach();

View file

@ -524,10 +524,10 @@ impl BlockMap {
// * Isomorphic transforms that end *at* the start of the edit
// * Below blocks that end at the start of the edit
// However, if we hit a replace block that ends at the start of the edit we want to reconstruct it.
new_transforms.append(cursor.slice(&old_start, Bias::Left, &()), &());
new_transforms.append(cursor.slice(&old_start, Bias::Left), &());
if let Some(transform) = cursor.item() {
if transform.summary.input_rows > 0
&& cursor.end(&()) == old_start
&& cursor.end() == old_start
&& transform
.block
.as_ref()
@ -535,13 +535,13 @@ impl BlockMap {
{
// Preserve the transform (push and next)
new_transforms.push(transform.clone(), &());
cursor.next(&());
cursor.next();
// Preserve below blocks at end of edit
while let Some(transform) = cursor.item() {
if transform.block.as_ref().map_or(false, |b| b.place_below()) {
new_transforms.push(transform.clone(), &());
cursor.next(&());
cursor.next();
} else {
break;
}
@ -579,8 +579,8 @@ impl BlockMap {
let mut new_end = WrapRow(edit.new.end);
loop {
// Seek to the transform starting at or after the end of the edit
cursor.seek(&old_end, Bias::Left, &());
cursor.next(&());
cursor.seek(&old_end, Bias::Left);
cursor.next();
// Extend edit to the end of the discarded transform so it is reconstructed in full
let transform_rows_after_edit = cursor.start().0 - old_end.0;
@ -592,8 +592,8 @@ impl BlockMap {
if next_edit.old.start <= cursor.start().0 {
old_end = WrapRow(next_edit.old.end);
new_end = WrapRow(next_edit.new.end);
cursor.seek(&old_end, Bias::Left, &());
cursor.next(&());
cursor.seek(&old_end, Bias::Left);
cursor.next();
edits.next();
} else {
break;
@ -608,7 +608,7 @@ impl BlockMap {
// Discard below blocks at the end of the edit. They'll be reconstructed.
while let Some(transform) = cursor.item() {
if transform.block.as_ref().map_or(false, |b| b.place_below()) {
cursor.next(&());
cursor.next();
} else {
break;
}
@ -720,7 +720,7 @@ impl BlockMap {
push_isomorphic(&mut new_transforms, rows_after_last_block, wrap_snapshot);
}
new_transforms.append(cursor.suffix(&()), &());
new_transforms.append(cursor.suffix(), &());
debug_assert_eq!(
new_transforms.summary().input_rows,
wrap_snapshot.max_point().row() + 1
@ -971,7 +971,7 @@ impl BlockMapReader<'_> {
);
let mut cursor = self.transforms.cursor::<(WrapRow, BlockRow)>(&());
cursor.seek(&start_wrap_row, Bias::Left, &());
cursor.seek(&start_wrap_row, Bias::Left);
while let Some(transform) = cursor.item() {
if cursor.start().0 > end_wrap_row {
break;
@ -982,7 +982,7 @@ impl BlockMapReader<'_> {
return Some(cursor.start().1);
}
}
cursor.next(&());
cursor.next();
}
None
@ -1293,7 +1293,7 @@ impl BlockSnapshot {
let max_output_row = cmp::min(rows.end, self.transforms.summary().output_rows);
let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&());
cursor.seek(&BlockRow(rows.start), Bias::Right, &());
cursor.seek(&BlockRow(rows.start), Bias::Right);
let transform_output_start = cursor.start().0.0;
let transform_input_start = cursor.start().1.0;
@ -1325,7 +1325,7 @@ impl BlockSnapshot {
pub(super) fn row_infos(&self, start_row: BlockRow) -> BlockRows<'_> {
let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&());
cursor.seek(&start_row, Bias::Right, &());
cursor.seek(&start_row, Bias::Right);
let (output_start, input_start) = cursor.start();
let overshoot = if cursor
.item()
@ -1346,9 +1346,9 @@ impl BlockSnapshot {
pub fn blocks_in_range(&self, rows: Range<u32>) -> impl Iterator<Item = (u32, &Block)> {
let mut cursor = self.transforms.cursor::<BlockRow>(&());
cursor.seek(&BlockRow(rows.start), Bias::Left, &());
while cursor.start().0 < rows.start && cursor.end(&()).0 <= rows.start {
cursor.next(&());
cursor.seek(&BlockRow(rows.start), Bias::Left);
while cursor.start().0 < rows.start && cursor.end().0 <= rows.start {
cursor.next();
}
std::iter::from_fn(move || {
@ -1364,10 +1364,10 @@ impl BlockSnapshot {
break;
}
if let Some(block) = &transform.block {
cursor.next(&());
cursor.next();
return Some((start_row, block));
} else {
cursor.next(&());
cursor.next();
}
}
None
@ -1377,7 +1377,7 @@ impl BlockSnapshot {
pub fn sticky_header_excerpt(&self, position: f32) -> Option<StickyHeaderExcerpt<'_>> {
let top_row = position as u32;
let mut cursor = self.transforms.cursor::<BlockRow>(&());
cursor.seek(&BlockRow(top_row), Bias::Right, &());
cursor.seek(&BlockRow(top_row), Bias::Right);
while let Some(transform) = cursor.item() {
match &transform.block {
@ -1386,7 +1386,7 @@ impl BlockSnapshot {
}
Some(block) if block.is_buffer_header() => return None,
_ => {
cursor.prev(&());
cursor.prev();
continue;
}
}
@ -1414,7 +1414,7 @@ impl BlockSnapshot {
let wrap_row = WrapRow(wrap_point.row());
let mut cursor = self.transforms.cursor::<WrapRow>(&());
cursor.seek(&wrap_row, Bias::Left, &());
cursor.seek(&wrap_row, Bias::Left);
while let Some(transform) = cursor.item() {
if let Some(block) = transform.block.as_ref() {
@ -1425,7 +1425,7 @@ impl BlockSnapshot {
break;
}
cursor.next(&());
cursor.next();
}
None
@ -1442,7 +1442,7 @@ impl BlockSnapshot {
pub fn longest_row_in_range(&self, range: Range<BlockRow>) -> BlockRow {
let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&());
cursor.seek(&range.start, Bias::Right, &());
cursor.seek(&range.start, Bias::Right);
let mut longest_row = range.start;
let mut longest_row_chars = 0;
@ -1453,7 +1453,7 @@ impl BlockSnapshot {
let wrap_start_row = input_start.0 + overshoot;
let wrap_end_row = cmp::min(
input_start.0 + (range.end.0 - output_start.0),
cursor.end(&()).1.0,
cursor.end().1.0,
);
let summary = self
.wrap_snapshot
@ -1461,12 +1461,12 @@ impl BlockSnapshot {
longest_row = BlockRow(range.start.0 + summary.longest_row);
longest_row_chars = summary.longest_row_chars;
}
cursor.next(&());
cursor.next();
}
let cursor_start_row = cursor.start().0;
if range.end > cursor_start_row {
let summary = cursor.summary::<_, TransformSummary>(&range.end, Bias::Right, &());
let summary = cursor.summary::<_, TransformSummary>(&range.end, Bias::Right);
if summary.longest_row_chars > longest_row_chars {
longest_row = BlockRow(cursor_start_row.0 + summary.longest_row);
longest_row_chars = summary.longest_row_chars;
@ -1493,7 +1493,7 @@ impl BlockSnapshot {
pub(super) fn line_len(&self, row: BlockRow) -> u32 {
let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&());
cursor.seek(&BlockRow(row.0), Bias::Right, &());
cursor.seek(&BlockRow(row.0), Bias::Right);
if let Some(transform) = cursor.item() {
let (output_start, input_start) = cursor.start();
let overshoot = row.0 - output_start.0;
@ -1511,13 +1511,13 @@ impl BlockSnapshot {
pub(super) fn is_block_line(&self, row: BlockRow) -> bool {
let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&());
cursor.seek(&row, Bias::Right, &());
cursor.seek(&row, Bias::Right);
cursor.item().map_or(false, |t| t.block.is_some())
}
pub(super) fn is_folded_buffer_header(&self, row: BlockRow) -> bool {
let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&());
cursor.seek(&row, Bias::Right, &());
cursor.seek(&row, Bias::Right);
let Some(transform) = cursor.item() else {
return false;
};
@ -1529,7 +1529,7 @@ impl BlockSnapshot {
.wrap_snapshot
.make_wrap_point(Point::new(row.0, 0), Bias::Left);
let mut cursor = self.transforms.cursor::<(WrapRow, BlockRow)>(&());
cursor.seek(&WrapRow(wrap_point.row()), Bias::Right, &());
cursor.seek(&WrapRow(wrap_point.row()), Bias::Right);
cursor.item().map_or(false, |transform| {
transform
.block
@ -1540,17 +1540,17 @@ impl BlockSnapshot {
pub fn clip_point(&self, point: BlockPoint, bias: Bias) -> BlockPoint {
let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&());
cursor.seek(&BlockRow(point.row), Bias::Right, &());
cursor.seek(&BlockRow(point.row), Bias::Right);
let max_input_row = WrapRow(self.transforms.summary().input_rows);
let mut search_left =
(bias == Bias::Left && cursor.start().1.0 > 0) || cursor.end(&()).1 == max_input_row;
(bias == Bias::Left && cursor.start().1.0 > 0) || cursor.end().1 == max_input_row;
let mut reversed = false;
loop {
if let Some(transform) = cursor.item() {
let (output_start_row, input_start_row) = cursor.start();
let (output_end_row, input_end_row) = cursor.end(&());
let (output_end_row, input_end_row) = cursor.end();
let output_start = Point::new(output_start_row.0, 0);
let input_start = Point::new(input_start_row.0, 0);
let input_end = Point::new(input_end_row.0, 0);
@ -1584,23 +1584,23 @@ impl BlockSnapshot {
}
if search_left {
cursor.prev(&());
cursor.prev();
} else {
cursor.next(&());
cursor.next();
}
} else if reversed {
return self.max_point();
} else {
reversed = true;
search_left = !search_left;
cursor.seek(&BlockRow(point.row), Bias::Right, &());
cursor.seek(&BlockRow(point.row), Bias::Right);
}
}
}
pub fn to_block_point(&self, wrap_point: WrapPoint) -> BlockPoint {
let mut cursor = self.transforms.cursor::<(WrapRow, BlockRow)>(&());
cursor.seek(&WrapRow(wrap_point.row()), Bias::Right, &());
cursor.seek(&WrapRow(wrap_point.row()), Bias::Right);
if let Some(transform) = cursor.item() {
if transform.block.is_some() {
BlockPoint::new(cursor.start().1.0, 0)
@ -1618,7 +1618,7 @@ impl BlockSnapshot {
pub fn to_wrap_point(&self, block_point: BlockPoint, bias: Bias) -> WrapPoint {
let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&());
cursor.seek(&BlockRow(block_point.row), Bias::Right, &());
cursor.seek(&BlockRow(block_point.row), Bias::Right);
if let Some(transform) = cursor.item() {
match transform.block.as_ref() {
Some(block) => {
@ -1630,7 +1630,7 @@ impl BlockSnapshot {
} else if bias == Bias::Left {
WrapPoint::new(cursor.start().1.0, 0)
} else {
let wrap_row = cursor.end(&()).1.0 - 1;
let wrap_row = cursor.end().1.0 - 1;
WrapPoint::new(wrap_row, self.wrap_snapshot.line_len(wrap_row))
}
}
@ -1650,14 +1650,14 @@ impl BlockChunks<'_> {
/// Go to the next transform
fn advance(&mut self) {
self.input_chunk = Chunk::default();
self.transforms.next(&());
self.transforms.next();
while let Some(transform) = self.transforms.item() {
if transform
.block
.as_ref()
.map_or(false, |block| block.height() == 0)
{
self.transforms.next(&());
self.transforms.next();
} else {
break;
}
@ -1672,7 +1672,7 @@ impl BlockChunks<'_> {
let start_output_row = self.transforms.start().0.0;
if start_output_row < self.max_output_row {
let end_input_row = cmp::min(
self.transforms.end(&()).1.0,
self.transforms.end().1.0,
start_input_row + (self.max_output_row - start_output_row),
);
self.input_chunks.seek(start_input_row..end_input_row);
@ -1696,7 +1696,7 @@ impl<'a> Iterator for BlockChunks<'a> {
let transform = self.transforms.item()?;
if transform.block.is_some() {
let block_start = self.transforms.start().0.0;
let mut block_end = self.transforms.end(&()).0.0;
let mut block_end = self.transforms.end().0.0;
self.advance();
if self.transforms.item().is_none() {
block_end -= 1;
@ -1731,7 +1731,7 @@ impl<'a> Iterator for BlockChunks<'a> {
}
}
let transform_end = self.transforms.end(&()).0.0;
let transform_end = self.transforms.end().0.0;
let (prefix_rows, prefix_bytes) =
offset_for_row(self.input_chunk.text, transform_end - self.output_row);
self.output_row += prefix_rows;
@ -1770,15 +1770,15 @@ impl Iterator for BlockRows<'_> {
self.started = true;
}
if self.output_row.0 >= self.transforms.end(&()).0.0 {
self.transforms.next(&());
if self.output_row.0 >= self.transforms.end().0.0 {
self.transforms.next();
while let Some(transform) = self.transforms.item() {
if transform
.block
.as_ref()
.map_or(false, |block| block.height() == 0)
{
self.transforms.next(&());
self.transforms.next();
} else {
break;
}

View file

@ -52,15 +52,15 @@ impl CreaseSnapshot {
) -> Option<&'a Crease<Anchor>> {
let start = snapshot.anchor_before(Point::new(row.0, 0));
let mut cursor = self.creases.cursor::<ItemSummary>(snapshot);
cursor.seek(&start, Bias::Left, snapshot);
cursor.seek(&start, Bias::Left);
while let Some(item) = cursor.item() {
match Ord::cmp(&item.crease.range().start.to_point(snapshot).row, &row.0) {
Ordering::Less => cursor.next(snapshot),
Ordering::Less => cursor.next(),
Ordering::Equal => {
if item.crease.range().start.is_valid(snapshot) {
return Some(&item.crease);
} else {
cursor.next(snapshot);
cursor.next();
}
}
Ordering::Greater => break,
@ -76,11 +76,11 @@ impl CreaseSnapshot {
) -> impl 'a + Iterator<Item = &'a Crease<Anchor>> {
let start = snapshot.anchor_before(Point::new(range.start.0, 0));
let mut cursor = self.creases.cursor::<ItemSummary>(snapshot);
cursor.seek(&start, Bias::Left, snapshot);
cursor.seek(&start, Bias::Left);
std::iter::from_fn(move || {
while let Some(item) = cursor.item() {
cursor.next(snapshot);
cursor.next();
let crease_range = item.crease.range();
let crease_start = crease_range.start.to_point(snapshot);
let crease_end = crease_range.end.to_point(snapshot);
@ -102,13 +102,13 @@ impl CreaseSnapshot {
let mut cursor = self.creases.cursor::<ItemSummary>(snapshot);
let mut results = Vec::new();
cursor.next(snapshot);
cursor.next();
while let Some(item) = cursor.item() {
let crease_range = item.crease.range();
let start_point = crease_range.start.to_point(snapshot);
let end_point = crease_range.end.to_point(snapshot);
results.push((item.id, start_point..end_point));
cursor.next(snapshot);
cursor.next();
}
results
@ -298,7 +298,7 @@ impl CreaseMap {
let mut cursor = self.snapshot.creases.cursor::<ItemSummary>(snapshot);
for crease in creases {
let crease_range = crease.range().clone();
new_creases.append(cursor.slice(&crease_range, Bias::Left, snapshot), snapshot);
new_creases.append(cursor.slice(&crease_range, Bias::Left), snapshot);
let id = self.next_id;
self.next_id.0 += 1;
@ -306,7 +306,7 @@ impl CreaseMap {
new_creases.push(CreaseItem { crease, id }, snapshot);
new_ids.push(id);
}
new_creases.append(cursor.suffix(snapshot), snapshot);
new_creases.append(cursor.suffix(), snapshot);
new_creases
};
new_ids
@ -332,9 +332,9 @@ impl CreaseMap {
let mut cursor = self.snapshot.creases.cursor::<ItemSummary>(snapshot);
for (id, range) in &removals {
new_creases.append(cursor.slice(range, Bias::Left, snapshot), snapshot);
new_creases.append(cursor.slice(range, Bias::Left), snapshot);
while let Some(item) = cursor.item() {
cursor.next(snapshot);
cursor.next();
if item.id == *id {
break;
} else {
@ -343,7 +343,7 @@ impl CreaseMap {
}
}
new_creases.append(cursor.suffix(snapshot), snapshot);
new_creases.append(cursor.suffix(), snapshot);
new_creases
};

View file

@ -99,7 +99,7 @@ impl FoldPoint {
pub fn to_inlay_point(self, snapshot: &FoldSnapshot) -> InlayPoint {
let mut cursor = snapshot.transforms.cursor::<(FoldPoint, InlayPoint)>(&());
cursor.seek(&self, Bias::Right, &());
cursor.seek(&self, Bias::Right);
let overshoot = self.0 - cursor.start().0.0;
InlayPoint(cursor.start().1.0 + overshoot)
}
@ -108,7 +108,7 @@ impl FoldPoint {
let mut cursor = snapshot
.transforms
.cursor::<(FoldPoint, TransformSummary)>(&());
cursor.seek(&self, Bias::Right, &());
cursor.seek(&self, Bias::Right);
let overshoot = self.0 - cursor.start().1.output.lines;
let mut offset = cursor.start().1.output.len;
if !overshoot.is_zero() {
@ -187,10 +187,10 @@ impl FoldMapWriter<'_> {
width: None,
},
);
new_tree.append(cursor.slice(&fold.range, Bias::Right, buffer), buffer);
new_tree.append(cursor.slice(&fold.range, Bias::Right), buffer);
new_tree.push(fold, buffer);
}
new_tree.append(cursor.suffix(buffer), buffer);
new_tree.append(cursor.suffix(), buffer);
new_tree
};
@ -252,7 +252,7 @@ impl FoldMapWriter<'_> {
fold_ixs_to_delete.push(*folds_cursor.start());
self.0.snapshot.fold_metadata_by_id.remove(&fold.id);
}
folds_cursor.next(buffer);
folds_cursor.next();
}
}
@ -263,10 +263,10 @@ impl FoldMapWriter<'_> {
let mut cursor = self.0.snapshot.folds.cursor::<usize>(buffer);
let mut folds = SumTree::new(buffer);
for fold_ix in fold_ixs_to_delete {
folds.append(cursor.slice(&fold_ix, Bias::Right, buffer), buffer);
cursor.next(buffer);
folds.append(cursor.slice(&fold_ix, Bias::Right), buffer);
cursor.next();
}
folds.append(cursor.suffix(buffer), buffer);
folds.append(cursor.suffix(), buffer);
folds
};
@ -412,7 +412,7 @@ impl FoldMap {
let mut new_transforms = SumTree::<Transform>::default();
let mut cursor = self.snapshot.transforms.cursor::<InlayOffset>(&());
cursor.seek(&InlayOffset(0), Bias::Right, &());
cursor.seek(&InlayOffset(0), Bias::Right);
while let Some(mut edit) = inlay_edits_iter.next() {
if let Some(item) = cursor.item() {
@ -421,19 +421,19 @@ impl FoldMap {
|transform| {
if !transform.is_fold() {
transform.summary.add_summary(&item.summary, &());
cursor.next(&());
cursor.next();
}
},
&(),
);
}
}
new_transforms.append(cursor.slice(&edit.old.start, Bias::Left, &()), &());
new_transforms.append(cursor.slice(&edit.old.start, Bias::Left), &());
edit.new.start -= edit.old.start - *cursor.start();
edit.old.start = *cursor.start();
cursor.seek(&edit.old.end, Bias::Right, &());
cursor.next(&());
cursor.seek(&edit.old.end, Bias::Right);
cursor.next();
let mut delta = edit.new_len().0 as isize - edit.old_len().0 as isize;
loop {
@ -449,8 +449,8 @@ impl FoldMap {
if next_edit.old.end >= edit.old.end {
edit.old.end = next_edit.old.end;
cursor.seek(&edit.old.end, Bias::Right, &());
cursor.next(&());
cursor.seek(&edit.old.end, Bias::Right);
cursor.next();
}
} else {
break;
@ -467,11 +467,7 @@ impl FoldMap {
.snapshot
.folds
.cursor::<FoldRange>(&inlay_snapshot.buffer);
folds_cursor.seek(
&FoldRange(anchor..Anchor::max()),
Bias::Left,
&inlay_snapshot.buffer,
);
folds_cursor.seek(&FoldRange(anchor..Anchor::max()), Bias::Left);
let mut folds = iter::from_fn({
let inlay_snapshot = &inlay_snapshot;
@ -485,7 +481,7 @@ impl FoldMap {
..inlay_snapshot.to_inlay_offset(buffer_end),
)
});
folds_cursor.next(&inlay_snapshot.buffer);
folds_cursor.next();
item
}
})
@ -558,7 +554,7 @@ impl FoldMap {
}
}
new_transforms.append(cursor.suffix(&()), &());
new_transforms.append(cursor.suffix(), &());
if new_transforms.is_empty() {
let text_summary = inlay_snapshot.text_summary();
push_isomorphic(&mut new_transforms, text_summary);
@ -575,31 +571,31 @@ impl FoldMap {
let mut new_transforms = new_transforms.cursor::<(InlayOffset, FoldOffset)>(&());
for mut edit in inlay_edits {
old_transforms.seek(&edit.old.start, Bias::Left, &());
old_transforms.seek(&edit.old.start, Bias::Left);
if old_transforms.item().map_or(false, |t| t.is_fold()) {
edit.old.start = old_transforms.start().0;
}
let old_start =
old_transforms.start().1.0 + (edit.old.start - old_transforms.start().0).0;
old_transforms.seek_forward(&edit.old.end, Bias::Right, &());
old_transforms.seek_forward(&edit.old.end, Bias::Right);
if old_transforms.item().map_or(false, |t| t.is_fold()) {
old_transforms.next(&());
old_transforms.next();
edit.old.end = old_transforms.start().0;
}
let old_end =
old_transforms.start().1.0 + (edit.old.end - old_transforms.start().0).0;
new_transforms.seek(&edit.new.start, Bias::Left, &());
new_transforms.seek(&edit.new.start, Bias::Left);
if new_transforms.item().map_or(false, |t| t.is_fold()) {
edit.new.start = new_transforms.start().0;
}
let new_start =
new_transforms.start().1.0 + (edit.new.start - new_transforms.start().0).0;
new_transforms.seek_forward(&edit.new.end, Bias::Right, &());
new_transforms.seek_forward(&edit.new.end, Bias::Right);
if new_transforms.item().map_or(false, |t| t.is_fold()) {
new_transforms.next(&());
new_transforms.next();
edit.new.end = new_transforms.start().0;
}
let new_end =
@ -656,10 +652,10 @@ impl FoldSnapshot {
let mut summary = TextSummary::default();
let mut cursor = self.transforms.cursor::<(FoldPoint, InlayPoint)>(&());
cursor.seek(&range.start, Bias::Right, &());
cursor.seek(&range.start, Bias::Right);
if let Some(transform) = cursor.item() {
let start_in_transform = range.start.0 - cursor.start().0.0;
let end_in_transform = cmp::min(range.end, cursor.end(&()).0).0 - cursor.start().0.0;
let end_in_transform = cmp::min(range.end, cursor.end().0).0 - cursor.start().0.0;
if let Some(placeholder) = transform.placeholder.as_ref() {
summary = TextSummary::from(
&placeholder.text
@ -678,10 +674,10 @@ impl FoldSnapshot {
}
}
if range.end > cursor.end(&()).0 {
cursor.next(&());
if range.end > cursor.end().0 {
cursor.next();
summary += &cursor
.summary::<_, TransformSummary>(&range.end, Bias::Right, &())
.summary::<_, TransformSummary>(&range.end, Bias::Right)
.output;
if let Some(transform) = cursor.item() {
let end_in_transform = range.end.0 - cursor.start().0.0;
@ -705,19 +701,16 @@ impl FoldSnapshot {
pub fn to_fold_point(&self, point: InlayPoint, bias: Bias) -> FoldPoint {
let mut cursor = self.transforms.cursor::<(InlayPoint, FoldPoint)>(&());
cursor.seek(&point, Bias::Right, &());
cursor.seek(&point, Bias::Right);
if cursor.item().map_or(false, |t| t.is_fold()) {
if bias == Bias::Left || point == cursor.start().0 {
cursor.start().1
} else {
cursor.end(&()).1
cursor.end().1
}
} else {
let overshoot = point.0 - cursor.start().0.0;
FoldPoint(cmp::min(
cursor.start().1.0 + overshoot,
cursor.end(&()).1.0,
))
FoldPoint(cmp::min(cursor.start().1.0 + overshoot, cursor.end().1.0))
}
}
@ -742,7 +735,7 @@ impl FoldSnapshot {
let fold_point = FoldPoint::new(start_row, 0);
let mut cursor = self.transforms.cursor::<(FoldPoint, InlayPoint)>(&());
cursor.seek(&fold_point, Bias::Left, &());
cursor.seek(&fold_point, Bias::Left);
let overshoot = fold_point.0 - cursor.start().0.0;
let inlay_point = InlayPoint(cursor.start().1.0 + overshoot);
@ -773,7 +766,7 @@ impl FoldSnapshot {
let mut folds = intersecting_folds(&self.inlay_snapshot, &self.folds, range, false);
iter::from_fn(move || {
let item = folds.item();
folds.next(&self.inlay_snapshot.buffer);
folds.next();
item
})
}
@ -785,7 +778,7 @@ impl FoldSnapshot {
let buffer_offset = offset.to_offset(&self.inlay_snapshot.buffer);
let inlay_offset = self.inlay_snapshot.to_inlay_offset(buffer_offset);
let mut cursor = self.transforms.cursor::<InlayOffset>(&());
cursor.seek(&inlay_offset, Bias::Right, &());
cursor.seek(&inlay_offset, Bias::Right);
cursor.item().map_or(false, |t| t.placeholder.is_some())
}
@ -794,7 +787,7 @@ impl FoldSnapshot {
.inlay_snapshot
.to_inlay_point(Point::new(buffer_row.0, 0));
let mut cursor = self.transforms.cursor::<InlayPoint>(&());
cursor.seek(&inlay_point, Bias::Right, &());
cursor.seek(&inlay_point, Bias::Right);
loop {
match cursor.item() {
Some(transform) => {
@ -808,11 +801,11 @@ impl FoldSnapshot {
None => return false,
}
if cursor.end(&()).row() == inlay_point.row() {
cursor.next(&());
if cursor.end().row() == inlay_point.row() {
cursor.next();
} else {
inlay_point.0 += Point::new(1, 0);
cursor.seek(&inlay_point, Bias::Right, &());
cursor.seek(&inlay_point, Bias::Right);
}
}
}
@ -824,14 +817,14 @@ impl FoldSnapshot {
highlights: Highlights<'a>,
) -> FoldChunks<'a> {
let mut transform_cursor = self.transforms.cursor::<(FoldOffset, InlayOffset)>(&());
transform_cursor.seek(&range.start, Bias::Right, &());
transform_cursor.seek(&range.start, Bias::Right);
let inlay_start = {
let overshoot = range.start.0 - transform_cursor.start().0.0;
transform_cursor.start().1 + InlayOffset(overshoot)
};
let transform_end = transform_cursor.end(&());
let transform_end = transform_cursor.end();
let inlay_end = if transform_cursor
.item()
@ -879,14 +872,14 @@ impl FoldSnapshot {
pub fn clip_point(&self, point: FoldPoint, bias: Bias) -> FoldPoint {
let mut cursor = self.transforms.cursor::<(FoldPoint, InlayPoint)>(&());
cursor.seek(&point, Bias::Right, &());
cursor.seek(&point, Bias::Right);
if let Some(transform) = cursor.item() {
let transform_start = cursor.start().0.0;
if transform.placeholder.is_some() {
if point.0 == transform_start || matches!(bias, Bias::Left) {
FoldPoint(transform_start)
} else {
FoldPoint(cursor.end(&()).0.0)
FoldPoint(cursor.end().0.0)
}
} else {
let overshoot = InlayPoint(point.0 - transform_start);
@ -945,7 +938,7 @@ fn intersecting_folds<'a>(
start_cmp == Ordering::Less && end_cmp == Ordering::Greater
}
});
cursor.next(buffer);
cursor.next();
cursor
}
@ -1211,7 +1204,7 @@ pub struct FoldRows<'a> {
impl FoldRows<'_> {
pub(crate) fn seek(&mut self, row: u32) {
let fold_point = FoldPoint::new(row, 0);
self.cursor.seek(&fold_point, Bias::Left, &());
self.cursor.seek(&fold_point, Bias::Left);
let overshoot = fold_point.0 - self.cursor.start().0.0;
let inlay_point = InlayPoint(self.cursor.start().1.0 + overshoot);
self.input_rows.seek(inlay_point.row());
@ -1224,8 +1217,8 @@ impl Iterator for FoldRows<'_> {
fn next(&mut self) -> Option<Self::Item> {
let mut traversed_fold = false;
while self.fold_point > self.cursor.end(&()).0 {
self.cursor.next(&());
while self.fold_point > self.cursor.end().0 {
self.cursor.next();
traversed_fold = true;
if self.cursor.item().is_none() {
break;
@ -1330,14 +1323,14 @@ pub struct FoldChunks<'a> {
impl FoldChunks<'_> {
pub(crate) fn seek(&mut self, range: Range<FoldOffset>) {
self.transform_cursor.seek(&range.start, Bias::Right, &());
self.transform_cursor.seek(&range.start, Bias::Right);
let inlay_start = {
let overshoot = range.start.0 - self.transform_cursor.start().0.0;
self.transform_cursor.start().1 + InlayOffset(overshoot)
};
let transform_end = self.transform_cursor.end(&());
let transform_end = self.transform_cursor.end();
let inlay_end = if self
.transform_cursor
@ -1376,10 +1369,10 @@ impl<'a> Iterator for FoldChunks<'a> {
self.inlay_chunk.take();
self.inlay_offset += InlayOffset(transform.summary.input.len);
while self.inlay_offset >= self.transform_cursor.end(&()).1
while self.inlay_offset >= self.transform_cursor.end().1
&& self.transform_cursor.item().is_some()
{
self.transform_cursor.next(&());
self.transform_cursor.next();
}
self.output_offset.0 += placeholder.text.len();
@ -1396,7 +1389,7 @@ impl<'a> Iterator for FoldChunks<'a> {
&& self.inlay_chunks.offset() != self.inlay_offset
{
let transform_start = self.transform_cursor.start();
let transform_end = self.transform_cursor.end(&());
let transform_end = self.transform_cursor.end();
let inlay_end = if self.max_output_offset < transform_end.0 {
let overshoot = self.max_output_offset.0 - transform_start.0.0;
transform_start.1 + InlayOffset(overshoot)
@ -1417,14 +1410,14 @@ impl<'a> Iterator for FoldChunks<'a> {
if let Some((buffer_chunk_start, mut inlay_chunk)) = self.inlay_chunk.clone() {
let chunk = &mut inlay_chunk.chunk;
let buffer_chunk_end = buffer_chunk_start + InlayOffset(chunk.text.len());
let transform_end = self.transform_cursor.end(&()).1;
let transform_end = self.transform_cursor.end().1;
let chunk_end = buffer_chunk_end.min(transform_end);
chunk.text = &chunk.text
[(self.inlay_offset - buffer_chunk_start).0..(chunk_end - buffer_chunk_start).0];
if chunk_end == transform_end {
self.transform_cursor.next(&());
self.transform_cursor.next();
} else if chunk_end == buffer_chunk_end {
self.inlay_chunk.take();
}
@ -1456,7 +1449,7 @@ impl FoldOffset {
let mut cursor = snapshot
.transforms
.cursor::<(FoldOffset, TransformSummary)>(&());
cursor.seek(&self, Bias::Right, &());
cursor.seek(&self, Bias::Right);
let overshoot = if cursor.item().map_or(true, |t| t.is_fold()) {
Point::new(0, (self.0 - cursor.start().0.0) as u32)
} else {
@ -1470,7 +1463,7 @@ impl FoldOffset {
#[cfg(test)]
pub fn to_inlay_offset(self, snapshot: &FoldSnapshot) -> InlayOffset {
let mut cursor = snapshot.transforms.cursor::<(FoldOffset, InlayOffset)>(&());
cursor.seek(&self, Bias::Right, &());
cursor.seek(&self, Bias::Right);
let overshoot = self.0 - cursor.start().0.0;
InlayOffset(cursor.start().1.0 + overshoot)
}

View file

@ -263,7 +263,7 @@ pub struct InlayChunk<'a> {
impl InlayChunks<'_> {
pub fn seek(&mut self, new_range: Range<InlayOffset>) {
self.transforms.seek(&new_range.start, Bias::Right, &());
self.transforms.seek(&new_range.start, Bias::Right);
let buffer_range = self.snapshot.to_buffer_offset(new_range.start)
..self.snapshot.to_buffer_offset(new_range.end);
@ -296,12 +296,12 @@ impl<'a> Iterator for InlayChunks<'a> {
*chunk = self.buffer_chunks.next().unwrap();
}
let desired_bytes = self.transforms.end(&()).0.0 - self.output_offset.0;
let desired_bytes = self.transforms.end().0.0 - self.output_offset.0;
// If we're already at the transform boundary, skip to the next transform
if desired_bytes == 0 {
self.inlay_chunks = None;
self.transforms.next(&());
self.transforms.next();
return self.next();
}
@ -397,7 +397,7 @@ impl<'a> Iterator for InlayChunks<'a> {
let inlay_chunks = self.inlay_chunks.get_or_insert_with(|| {
let start = offset_in_inlay;
let end = cmp::min(self.max_output_offset, self.transforms.end(&()).0)
let end = cmp::min(self.max_output_offset, self.transforms.end().0)
- self.transforms.start().0;
inlay.text.chunks_in_range(start.0..end.0)
});
@ -441,9 +441,9 @@ impl<'a> Iterator for InlayChunks<'a> {
}
};
if self.output_offset >= self.transforms.end(&()).0 {
if self.output_offset >= self.transforms.end().0 {
self.inlay_chunks = None;
self.transforms.next(&());
self.transforms.next();
}
Some(chunk)
@ -453,7 +453,7 @@ impl<'a> Iterator for InlayChunks<'a> {
impl InlayBufferRows<'_> {
pub fn seek(&mut self, row: u32) {
let inlay_point = InlayPoint::new(row, 0);
self.transforms.seek(&inlay_point, Bias::Left, &());
self.transforms.seek(&inlay_point, Bias::Left);
let mut buffer_point = self.transforms.start().1;
let buffer_row = MultiBufferRow(if row == 0 {
@ -487,7 +487,7 @@ impl Iterator for InlayBufferRows<'_> {
self.inlay_row += 1;
self.transforms
.seek_forward(&InlayPoint::new(self.inlay_row, 0), Bias::Left, &());
.seek_forward(&InlayPoint::new(self.inlay_row, 0), Bias::Left);
Some(buffer_row)
}
@ -556,18 +556,18 @@ impl InlayMap {
let mut cursor = snapshot.transforms.cursor::<(usize, InlayOffset)>(&());
let mut buffer_edits_iter = buffer_edits.iter().peekable();
while let Some(buffer_edit) = buffer_edits_iter.next() {
new_transforms.append(cursor.slice(&buffer_edit.old.start, Bias::Left, &()), &());
new_transforms.append(cursor.slice(&buffer_edit.old.start, Bias::Left), &());
if let Some(Transform::Isomorphic(transform)) = cursor.item() {
if cursor.end(&()).0 == buffer_edit.old.start {
if cursor.end().0 == buffer_edit.old.start {
push_isomorphic(&mut new_transforms, *transform);
cursor.next(&());
cursor.next();
}
}
// Remove all the inlays and transforms contained by the edit.
let old_start =
cursor.start().1 + InlayOffset(buffer_edit.old.start - cursor.start().0);
cursor.seek(&buffer_edit.old.end, Bias::Right, &());
cursor.seek(&buffer_edit.old.end, Bias::Right);
let old_end =
cursor.start().1 + InlayOffset(buffer_edit.old.end - cursor.start().0);
@ -625,20 +625,20 @@ impl InlayMap {
// we can push its remainder.
if buffer_edits_iter
.peek()
.map_or(true, |edit| edit.old.start >= cursor.end(&()).0)
.map_or(true, |edit| edit.old.start >= cursor.end().0)
{
let transform_start = new_transforms.summary().input.len;
let transform_end =
buffer_edit.new.end + (cursor.end(&()).0 - buffer_edit.old.end);
buffer_edit.new.end + (cursor.end().0 - buffer_edit.old.end);
push_isomorphic(
&mut new_transforms,
buffer_snapshot.text_summary_for_range(transform_start..transform_end),
);
cursor.next(&());
cursor.next();
}
}
new_transforms.append(cursor.suffix(&()), &());
new_transforms.append(cursor.suffix(), &());
if new_transforms.is_empty() {
new_transforms.push(Transform::Isomorphic(Default::default()), &());
}
@ -773,7 +773,7 @@ impl InlaySnapshot {
let mut cursor = self
.transforms
.cursor::<(InlayOffset, (InlayPoint, usize))>(&());
cursor.seek(&offset, Bias::Right, &());
cursor.seek(&offset, Bias::Right);
let overshoot = offset.0 - cursor.start().0.0;
match cursor.item() {
Some(Transform::Isomorphic(_)) => {
@ -803,7 +803,7 @@ impl InlaySnapshot {
let mut cursor = self
.transforms
.cursor::<(InlayPoint, (InlayOffset, Point))>(&());
cursor.seek(&point, Bias::Right, &());
cursor.seek(&point, Bias::Right);
let overshoot = point.0 - cursor.start().0.0;
match cursor.item() {
Some(Transform::Isomorphic(_)) => {
@ -822,7 +822,7 @@ impl InlaySnapshot {
}
pub fn to_buffer_point(&self, point: InlayPoint) -> Point {
let mut cursor = self.transforms.cursor::<(InlayPoint, Point)>(&());
cursor.seek(&point, Bias::Right, &());
cursor.seek(&point, Bias::Right);
match cursor.item() {
Some(Transform::Isomorphic(_)) => {
let overshoot = point.0 - cursor.start().0.0;
@ -834,7 +834,7 @@ impl InlaySnapshot {
}
pub fn to_buffer_offset(&self, offset: InlayOffset) -> usize {
let mut cursor = self.transforms.cursor::<(InlayOffset, usize)>(&());
cursor.seek(&offset, Bias::Right, &());
cursor.seek(&offset, Bias::Right);
match cursor.item() {
Some(Transform::Isomorphic(_)) => {
let overshoot = offset - cursor.start().0;
@ -847,19 +847,19 @@ impl InlaySnapshot {
pub fn to_inlay_offset(&self, offset: usize) -> InlayOffset {
let mut cursor = self.transforms.cursor::<(usize, InlayOffset)>(&());
cursor.seek(&offset, Bias::Left, &());
cursor.seek(&offset, Bias::Left);
loop {
match cursor.item() {
Some(Transform::Isomorphic(_)) => {
if offset == cursor.end(&()).0 {
if offset == cursor.end().0 {
while let Some(Transform::Inlay(inlay)) = cursor.next_item() {
if inlay.position.bias() == Bias::Right {
break;
} else {
cursor.next(&());
cursor.next();
}
}
return cursor.end(&()).1;
return cursor.end().1;
} else {
let overshoot = offset - cursor.start().0;
return InlayOffset(cursor.start().1.0 + overshoot);
@ -867,7 +867,7 @@ impl InlaySnapshot {
}
Some(Transform::Inlay(inlay)) => {
if inlay.position.bias() == Bias::Left {
cursor.next(&());
cursor.next();
} else {
return cursor.start().1;
}
@ -880,19 +880,19 @@ impl InlaySnapshot {
}
pub fn to_inlay_point(&self, point: Point) -> InlayPoint {
let mut cursor = self.transforms.cursor::<(Point, InlayPoint)>(&());
cursor.seek(&point, Bias::Left, &());
cursor.seek(&point, Bias::Left);
loop {
match cursor.item() {
Some(Transform::Isomorphic(_)) => {
if point == cursor.end(&()).0 {
if point == cursor.end().0 {
while let Some(Transform::Inlay(inlay)) = cursor.next_item() {
if inlay.position.bias() == Bias::Right {
break;
} else {
cursor.next(&());
cursor.next();
}
}
return cursor.end(&()).1;
return cursor.end().1;
} else {
let overshoot = point - cursor.start().0;
return InlayPoint(cursor.start().1.0 + overshoot);
@ -900,7 +900,7 @@ impl InlaySnapshot {
}
Some(Transform::Inlay(inlay)) => {
if inlay.position.bias() == Bias::Left {
cursor.next(&());
cursor.next();
} else {
return cursor.start().1;
}
@ -914,7 +914,7 @@ impl InlaySnapshot {
pub fn clip_point(&self, mut point: InlayPoint, mut bias: Bias) -> InlayPoint {
let mut cursor = self.transforms.cursor::<(InlayPoint, Point)>(&());
cursor.seek(&point, Bias::Left, &());
cursor.seek(&point, Bias::Left);
loop {
match cursor.item() {
Some(Transform::Isomorphic(transform)) => {
@ -923,7 +923,7 @@ impl InlaySnapshot {
if inlay.position.bias() == Bias::Left {
return point;
} else if bias == Bias::Left {
cursor.prev(&());
cursor.prev();
} else if transform.first_line_chars == 0 {
point.0 += Point::new(1, 0);
} else {
@ -932,12 +932,12 @@ impl InlaySnapshot {
} else {
return point;
}
} else if cursor.end(&()).0 == point {
} else if cursor.end().0 == point {
if let Some(Transform::Inlay(inlay)) = cursor.next_item() {
if inlay.position.bias() == Bias::Right {
return point;
} else if bias == Bias::Right {
cursor.next(&());
cursor.next();
} else if point.0.column == 0 {
point.0.row -= 1;
point.0.column = self.line_len(point.0.row);
@ -970,7 +970,7 @@ impl InlaySnapshot {
}
_ => return point,
}
} else if point == cursor.end(&()).0 && inlay.position.bias() == Bias::Left {
} else if point == cursor.end().0 && inlay.position.bias() == Bias::Left {
match cursor.next_item() {
Some(Transform::Inlay(inlay)) => {
if inlay.position.bias() == Bias::Right {
@ -983,9 +983,9 @@ impl InlaySnapshot {
if bias == Bias::Left {
point = cursor.start().0;
cursor.prev(&());
cursor.prev();
} else {
cursor.next(&());
cursor.next();
point = cursor.start().0;
}
}
@ -993,9 +993,9 @@ impl InlaySnapshot {
bias = bias.invert();
if bias == Bias::Left {
point = cursor.start().0;
cursor.prev(&());
cursor.prev();
} else {
cursor.next(&());
cursor.next();
point = cursor.start().0;
}
}
@ -1011,7 +1011,7 @@ impl InlaySnapshot {
let mut summary = TextSummary::default();
let mut cursor = self.transforms.cursor::<(InlayOffset, usize)>(&());
cursor.seek(&range.start, Bias::Right, &());
cursor.seek(&range.start, Bias::Right);
let overshoot = range.start.0 - cursor.start().0.0;
match cursor.item() {
@ -1019,22 +1019,22 @@ impl InlaySnapshot {
let buffer_start = cursor.start().1;
let suffix_start = buffer_start + overshoot;
let suffix_end =
buffer_start + (cmp::min(cursor.end(&()).0, range.end).0 - cursor.start().0.0);
buffer_start + (cmp::min(cursor.end().0, range.end).0 - cursor.start().0.0);
summary = self.buffer.text_summary_for_range(suffix_start..suffix_end);
cursor.next(&());
cursor.next();
}
Some(Transform::Inlay(inlay)) => {
let suffix_start = overshoot;
let suffix_end = cmp::min(cursor.end(&()).0, range.end).0 - cursor.start().0.0;
let suffix_end = cmp::min(cursor.end().0, range.end).0 - cursor.start().0.0;
summary = inlay.text.cursor(suffix_start).summary(suffix_end);
cursor.next(&());
cursor.next();
}
None => {}
}
if range.end > cursor.start().0 {
summary += cursor
.summary::<_, TransformSummary>(&range.end, Bias::Right, &())
.summary::<_, TransformSummary>(&range.end, Bias::Right)
.output;
let overshoot = range.end.0 - cursor.start().0.0;
@ -1060,7 +1060,7 @@ impl InlaySnapshot {
pub fn row_infos(&self, row: u32) -> InlayBufferRows<'_> {
let mut cursor = self.transforms.cursor::<(InlayPoint, Point)>(&());
let inlay_point = InlayPoint::new(row, 0);
cursor.seek(&inlay_point, Bias::Left, &());
cursor.seek(&inlay_point, Bias::Left);
let max_buffer_row = self.buffer.max_row();
let mut buffer_point = cursor.start().1;
@ -1101,7 +1101,7 @@ impl InlaySnapshot {
highlights: Highlights<'a>,
) -> InlayChunks<'a> {
let mut cursor = self.transforms.cursor::<(InlayOffset, usize)>(&());
cursor.seek(&range.start, Bias::Right, &());
cursor.seek(&range.start, Bias::Right);
let buffer_range = self.to_buffer_offset(range.start)..self.to_buffer_offset(range.end);
let buffer_chunks = CustomHighlightsChunks::new(

View file

@ -72,7 +72,7 @@ pub struct WrapRows<'a> {
impl WrapRows<'_> {
pub(crate) fn seek(&mut self, start_row: u32) {
self.transforms
.seek(&WrapPoint::new(start_row, 0), Bias::Left, &());
.seek(&WrapPoint::new(start_row, 0), Bias::Left);
let mut input_row = self.transforms.start().1.row();
if self.transforms.item().map_or(false, |t| t.is_isomorphic()) {
input_row += start_row - self.transforms.start().0.row();
@ -340,7 +340,7 @@ impl WrapSnapshot {
let mut tab_edits_iter = tab_edits.iter().peekable();
new_transforms =
old_cursor.slice(&tab_edits_iter.peek().unwrap().old.start, Bias::Right, &());
old_cursor.slice(&tab_edits_iter.peek().unwrap().old.start, Bias::Right);
while let Some(edit) = tab_edits_iter.next() {
if edit.new.start > TabPoint::from(new_transforms.summary().input.lines) {
@ -356,31 +356,29 @@ impl WrapSnapshot {
));
}
old_cursor.seek_forward(&edit.old.end, Bias::Right, &());
old_cursor.seek_forward(&edit.old.end, Bias::Right);
if let Some(next_edit) = tab_edits_iter.peek() {
if next_edit.old.start > old_cursor.end(&()) {
if old_cursor.end(&()) > edit.old.end {
if next_edit.old.start > old_cursor.end() {
if old_cursor.end() > edit.old.end {
let summary = self
.tab_snapshot
.text_summary_for_range(edit.old.end..old_cursor.end(&()));
.text_summary_for_range(edit.old.end..old_cursor.end());
new_transforms.push_or_extend(Transform::isomorphic(summary));
}
old_cursor.next(&());
new_transforms.append(
old_cursor.slice(&next_edit.old.start, Bias::Right, &()),
&(),
);
old_cursor.next();
new_transforms
.append(old_cursor.slice(&next_edit.old.start, Bias::Right), &());
}
} else {
if old_cursor.end(&()) > edit.old.end {
if old_cursor.end() > edit.old.end {
let summary = self
.tab_snapshot
.text_summary_for_range(edit.old.end..old_cursor.end(&()));
.text_summary_for_range(edit.old.end..old_cursor.end());
new_transforms.push_or_extend(Transform::isomorphic(summary));
}
old_cursor.next(&());
new_transforms.append(old_cursor.suffix(&()), &());
old_cursor.next();
new_transforms.append(old_cursor.suffix(), &());
}
}
}
@ -441,7 +439,6 @@ impl WrapSnapshot {
new_transforms = old_cursor.slice(
&TabPoint::new(row_edits.peek().unwrap().old_rows.start, 0),
Bias::Right,
&(),
);
while let Some(edit) = row_edits.next() {
@ -516,34 +513,31 @@ impl WrapSnapshot {
}
new_transforms.extend(edit_transforms, &());
old_cursor.seek_forward(&TabPoint::new(edit.old_rows.end, 0), Bias::Right, &());
old_cursor.seek_forward(&TabPoint::new(edit.old_rows.end, 0), Bias::Right);
if let Some(next_edit) = row_edits.peek() {
if next_edit.old_rows.start > old_cursor.end(&()).row() {
if old_cursor.end(&()) > TabPoint::new(edit.old_rows.end, 0) {
if next_edit.old_rows.start > old_cursor.end().row() {
if old_cursor.end() > TabPoint::new(edit.old_rows.end, 0) {
let summary = self.tab_snapshot.text_summary_for_range(
TabPoint::new(edit.old_rows.end, 0)..old_cursor.end(&()),
TabPoint::new(edit.old_rows.end, 0)..old_cursor.end(),
);
new_transforms.push_or_extend(Transform::isomorphic(summary));
}
old_cursor.next(&());
old_cursor.next();
new_transforms.append(
old_cursor.slice(
&TabPoint::new(next_edit.old_rows.start, 0),
Bias::Right,
&(),
),
old_cursor
.slice(&TabPoint::new(next_edit.old_rows.start, 0), Bias::Right),
&(),
);
}
} else {
if old_cursor.end(&()) > TabPoint::new(edit.old_rows.end, 0) {
if old_cursor.end() > TabPoint::new(edit.old_rows.end, 0) {
let summary = self.tab_snapshot.text_summary_for_range(
TabPoint::new(edit.old_rows.end, 0)..old_cursor.end(&()),
TabPoint::new(edit.old_rows.end, 0)..old_cursor.end(),
);
new_transforms.push_or_extend(Transform::isomorphic(summary));
}
old_cursor.next(&());
new_transforms.append(old_cursor.suffix(&()), &());
old_cursor.next();
new_transforms.append(old_cursor.suffix(), &());
}
}
}
@ -570,19 +564,19 @@ impl WrapSnapshot {
tab_edit.new.start.0.column = 0;
tab_edit.new.end.0 += Point::new(1, 0);
old_cursor.seek(&tab_edit.old.start, Bias::Right, &());
old_cursor.seek(&tab_edit.old.start, Bias::Right);
let mut old_start = old_cursor.start().output.lines;
old_start += tab_edit.old.start.0 - old_cursor.start().input.lines;
old_cursor.seek(&tab_edit.old.end, Bias::Right, &());
old_cursor.seek(&tab_edit.old.end, Bias::Right);
let mut old_end = old_cursor.start().output.lines;
old_end += tab_edit.old.end.0 - old_cursor.start().input.lines;
new_cursor.seek(&tab_edit.new.start, Bias::Right, &());
new_cursor.seek(&tab_edit.new.start, Bias::Right);
let mut new_start = new_cursor.start().output.lines;
new_start += tab_edit.new.start.0 - new_cursor.start().input.lines;
new_cursor.seek(&tab_edit.new.end, Bias::Right, &());
new_cursor.seek(&tab_edit.new.end, Bias::Right);
let mut new_end = new_cursor.start().output.lines;
new_end += tab_edit.new.end.0 - new_cursor.start().input.lines;
@ -605,7 +599,7 @@ impl WrapSnapshot {
let output_start = WrapPoint::new(rows.start, 0);
let output_end = WrapPoint::new(rows.end, 0);
let mut transforms = self.transforms.cursor::<(WrapPoint, TabPoint)>(&());
transforms.seek(&output_start, Bias::Right, &());
transforms.seek(&output_start, Bias::Right);
let mut input_start = TabPoint(transforms.start().1.0);
if transforms.item().map_or(false, |t| t.is_isomorphic()) {
input_start.0 += output_start.0 - transforms.start().0.0;
@ -633,7 +627,7 @@ impl WrapSnapshot {
pub fn line_len(&self, row: u32) -> u32 {
let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&());
cursor.seek(&WrapPoint::new(row + 1, 0), Bias::Left, &());
cursor.seek(&WrapPoint::new(row + 1, 0), Bias::Left);
if cursor
.item()
.map_or(false, |transform| transform.is_isomorphic())
@ -658,10 +652,10 @@ impl WrapSnapshot {
let end = WrapPoint::new(rows.end, 0);
let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&());
cursor.seek(&start, Bias::Right, &());
cursor.seek(&start, Bias::Right);
if let Some(transform) = cursor.item() {
let start_in_transform = start.0 - cursor.start().0.0;
let end_in_transform = cmp::min(end, cursor.end(&()).0).0 - cursor.start().0.0;
let end_in_transform = cmp::min(end, cursor.end().0).0 - cursor.start().0.0;
if transform.is_isomorphic() {
let tab_start = TabPoint(cursor.start().1.0 + start_in_transform);
let tab_end = TabPoint(cursor.start().1.0 + end_in_transform);
@ -678,12 +672,12 @@ impl WrapSnapshot {
};
}
cursor.next(&());
cursor.next();
}
if rows.end > cursor.start().0.row() {
summary += &cursor
.summary::<_, TransformSummary>(&WrapPoint::new(rows.end, 0), Bias::Right, &())
.summary::<_, TransformSummary>(&WrapPoint::new(rows.end, 0), Bias::Right)
.output;
if let Some(transform) = cursor.item() {
@ -712,7 +706,7 @@ impl WrapSnapshot {
pub fn soft_wrap_indent(&self, row: u32) -> Option<u32> {
let mut cursor = self.transforms.cursor::<WrapPoint>(&());
cursor.seek(&WrapPoint::new(row + 1, 0), Bias::Right, &());
cursor.seek(&WrapPoint::new(row + 1, 0), Bias::Right);
cursor.item().and_then(|transform| {
if transform.is_isomorphic() {
None
@ -728,7 +722,7 @@ impl WrapSnapshot {
pub fn row_infos(&self, start_row: u32) -> WrapRows<'_> {
let mut transforms = self.transforms.cursor::<(WrapPoint, TabPoint)>(&());
transforms.seek(&WrapPoint::new(start_row, 0), Bias::Left, &());
transforms.seek(&WrapPoint::new(start_row, 0), Bias::Left);
let mut input_row = transforms.start().1.row();
if transforms.item().map_or(false, |t| t.is_isomorphic()) {
input_row += start_row - transforms.start().0.row();
@ -748,7 +742,7 @@ impl WrapSnapshot {
pub fn to_tab_point(&self, point: WrapPoint) -> TabPoint {
let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&());
cursor.seek(&point, Bias::Right, &());
cursor.seek(&point, Bias::Right);
let mut tab_point = cursor.start().1.0;
if cursor.item().map_or(false, |t| t.is_isomorphic()) {
tab_point += point.0 - cursor.start().0.0;
@ -766,14 +760,14 @@ impl WrapSnapshot {
pub fn tab_point_to_wrap_point(&self, point: TabPoint) -> WrapPoint {
let mut cursor = self.transforms.cursor::<(TabPoint, WrapPoint)>(&());
cursor.seek(&point, Bias::Right, &());
cursor.seek(&point, Bias::Right);
WrapPoint(cursor.start().1.0 + (point.0 - cursor.start().0.0))
}
pub fn clip_point(&self, mut point: WrapPoint, bias: Bias) -> WrapPoint {
if bias == Bias::Left {
let mut cursor = self.transforms.cursor::<WrapPoint>(&());
cursor.seek(&point, Bias::Right, &());
cursor.seek(&point, Bias::Right);
if cursor.item().map_or(false, |t| !t.is_isomorphic()) {
point = *cursor.start();
*point.column_mut() -= 1;
@ -791,16 +785,16 @@ impl WrapSnapshot {
*point.column_mut() = 0;
let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&());
cursor.seek(&point, Bias::Right, &());
cursor.seek(&point, Bias::Right);
if cursor.item().is_none() {
cursor.prev(&());
cursor.prev();
}
while let Some(transform) = cursor.item() {
if transform.is_isomorphic() && cursor.start().1.column() == 0 {
return cmp::min(cursor.end(&()).0.row(), point.row());
return cmp::min(cursor.end().0.row(), point.row());
} else {
cursor.prev(&());
cursor.prev();
}
}
@ -811,12 +805,12 @@ impl WrapSnapshot {
point.0 += Point::new(1, 0);
let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&());
cursor.seek(&point, Bias::Right, &());
cursor.seek(&point, Bias::Right);
while let Some(transform) = cursor.item() {
if transform.is_isomorphic() && cursor.start().1.column() == 0 {
return Some(cmp::max(cursor.start().0.row(), point.row()));
} else {
cursor.next(&());
cursor.next();
}
}
@ -889,7 +883,7 @@ impl WrapChunks<'_> {
pub(crate) fn seek(&mut self, rows: Range<u32>) {
let output_start = WrapPoint::new(rows.start, 0);
let output_end = WrapPoint::new(rows.end, 0);
self.transforms.seek(&output_start, Bias::Right, &());
self.transforms.seek(&output_start, Bias::Right);
let mut input_start = TabPoint(self.transforms.start().1.0);
if self.transforms.item().map_or(false, |t| t.is_isomorphic()) {
input_start.0 += output_start.0 - self.transforms.start().0.0;
@ -930,7 +924,7 @@ impl<'a> Iterator for WrapChunks<'a> {
}
self.output_position.0 += summary;
self.transforms.next(&());
self.transforms.next();
return Some(Chunk {
text: &display_text[start_ix..end_ix],
..Default::default()
@ -942,7 +936,7 @@ impl<'a> Iterator for WrapChunks<'a> {
}
let mut input_len = 0;
let transform_end = self.transforms.end(&()).0;
let transform_end = self.transforms.end().0;
for c in self.input_chunk.text.chars() {
let char_len = c.len_utf8();
input_len += char_len;
@ -954,7 +948,7 @@ impl<'a> Iterator for WrapChunks<'a> {
}
if self.output_position >= transform_end {
self.transforms.next(&());
self.transforms.next();
break;
}
}
@ -982,7 +976,7 @@ impl Iterator for WrapRows<'_> {
self.output_row += 1;
self.transforms
.seek_forward(&WrapPoint::new(self.output_row, 0), Bias::Left, &());
.seek_forward(&WrapPoint::new(self.output_row, 0), Bias::Left);
if self.transforms.item().map_or(false, |t| t.is_isomorphic()) {
self.input_buffer_row = self.input_buffer_rows.next().unwrap();
self.soft_wrapped = false;

View file

@ -9570,6 +9570,74 @@ async fn test_document_format_during_save(cx: &mut TestAppContext) {
}
}
#[gpui::test]
async fn test_redo_after_noop_format(cx: &mut TestAppContext) {
init_test(cx, |settings| {
settings.defaults.ensure_final_newline_on_save = Some(false);
});
let fs = FakeFs::new(cx.executor());
fs.insert_file(path!("/file.txt"), "foo".into()).await;
let project = Project::test(fs, [path!("/file.txt").as_ref()], cx).await;
let buffer = project
.update(cx, |project, cx| {
project.open_local_buffer(path!("/file.txt"), cx)
})
.await
.unwrap();
let buffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx));
let (editor, cx) = cx.add_window_view(|window, cx| {
build_editor_with_project(project.clone(), buffer, window, cx)
});
editor.update_in(cx, |editor, window, cx| {
editor.change_selections(SelectionEffects::default(), window, cx, |s| {
s.select_ranges([0..0])
});
});
assert!(!cx.read(|cx| editor.is_dirty(cx)));
editor.update_in(cx, |editor, window, cx| {
editor.handle_input("\n", window, cx)
});
cx.run_until_parked();
save(&editor, &project, cx).await;
assert_eq!("\nfoo", editor.read_with(cx, |editor, cx| editor.text(cx)));
editor.update_in(cx, |editor, window, cx| {
editor.undo(&Default::default(), window, cx);
});
save(&editor, &project, cx).await;
assert_eq!("foo", editor.read_with(cx, |editor, cx| editor.text(cx)));
editor.update_in(cx, |editor, window, cx| {
editor.redo(&Default::default(), window, cx);
});
cx.run_until_parked();
assert_eq!("\nfoo", editor.read_with(cx, |editor, cx| editor.text(cx)));
async fn save(editor: &Entity<Editor>, project: &Entity<Project>, cx: &mut VisualTestContext) {
let save = editor
.update_in(cx, |editor, window, cx| {
editor.save(
SaveOptions {
format: true,
autosave: false,
},
project.clone(),
window,
cx,
)
})
.unwrap();
cx.executor().start_waiting();
save.await;
assert!(!cx.read(|cx| editor.is_dirty(cx)));
}
}
#[gpui::test]
async fn test_multibuffer_format_during_save(cx: &mut TestAppContext) {
init_test(cx, |_| {});
@ -22708,7 +22776,7 @@ pub(crate) fn init_test(cx: &mut TestAppContext, f: fn(&mut AllLanguageSettingsC
workspace::init_settings(cx);
crate::init(cx);
});
zlog::init_test();
update_test_language_settings(cx, f);
}

View file

@ -296,7 +296,7 @@ impl GitBlame {
let row = info
.buffer_row
.filter(|_| info.buffer_id == Some(buffer_id))?;
cursor.seek_forward(&row, Bias::Right, &());
cursor.seek_forward(&row, Bias::Right);
cursor.item()?.blame.clone()
})
}
@ -389,7 +389,7 @@ impl GitBlame {
}
}
new_entries.append(cursor.slice(&edit.old.start, Bias::Right, &()), &());
new_entries.append(cursor.slice(&edit.old.start, Bias::Right), &());
if edit.new.start > new_entries.summary().rows {
new_entries.push(
@ -401,7 +401,7 @@ impl GitBlame {
);
}
cursor.seek(&edit.old.end, Bias::Right, &());
cursor.seek(&edit.old.end, Bias::Right);
if !edit.new.is_empty() {
new_entries.push(
GitBlameEntry {
@ -412,7 +412,7 @@ impl GitBlame {
);
}
let old_end = cursor.end(&());
let old_end = cursor.end();
if row_edits
.peek()
.map_or(true, |next_edit| next_edit.old.start >= old_end)
@ -421,18 +421,18 @@ impl GitBlame {
if old_end > edit.old.end {
new_entries.push(
GitBlameEntry {
rows: cursor.end(&()) - edit.old.end,
rows: cursor.end() - edit.old.end,
blame: entry.blame.clone(),
},
&(),
);
}
cursor.next(&());
cursor.next();
}
}
}
new_entries.append(cursor.suffix(&()), &());
new_entries.append(cursor.suffix(), &());
drop(cursor);
self.buffer_snapshot = new_snapshot;

View file

@ -23,6 +23,7 @@ askpass.workspace = true
buffer_diff.workspace = true
call.workspace = true
chrono.workspace = true
client.workspace = true
collections.workspace = true
command_palette_hooks.workspace = true
component.workspace = true

View file

@ -1,8 +1,10 @@
use crate::branch_picker::{self, BranchList};
use crate::git_panel::{GitPanel, commit_message_editor};
use client::DisableAiSettings;
use git::repository::CommitOptions;
use git::{Amend, Commit, GenerateCommitMessage, Signoff};
use panel::{panel_button, panel_editor_style};
use settings::Settings;
use ui::{
ContextMenu, KeybindingHint, PopoverMenu, PopoverMenuHandle, SplitButton, Tooltip, prelude::*,
};
@ -569,11 +571,13 @@ impl Render for CommitModal {
.on_action(cx.listener(Self::dismiss))
.on_action(cx.listener(Self::commit))
.on_action(cx.listener(Self::amend))
.on_action(cx.listener(|this, _: &GenerateCommitMessage, _, cx| {
.when(!DisableAiSettings::get_global(cx).disable_ai, |this| {
this.on_action(cx.listener(|this, _: &GenerateCommitMessage, _, cx| {
this.git_panel.update(cx, |panel, cx| {
panel.generate_commit_message(cx);
})
}))
})
.on_action(
cx.listener(|this, _: &zed_actions::git::Branch, window, cx| {
this.toggle_branch_selector(window, cx);

View file

@ -12,6 +12,7 @@ use crate::{
use agent_settings::AgentSettings;
use anyhow::Context as _;
use askpass::AskPassDelegate;
use client::DisableAiSettings;
use db::kvp::KEY_VALUE_STORE;
use editor::{
Editor, EditorElement, EditorMode, EditorSettings, MultiBuffer, ShowScrollbar,
@ -53,7 +54,7 @@ use project::{
git_store::{GitStoreEvent, Repository},
};
use serde::{Deserialize, Serialize};
use settings::{Settings as _, SettingsStore};
use settings::{Settings, SettingsStore};
use std::future::Future;
use std::ops::Range;
use std::path::{Path, PathBuf};
@ -464,9 +465,14 @@ impl GitPanel {
};
let mut assistant_enabled = AgentSettings::get_global(cx).enabled;
let mut was_ai_disabled = DisableAiSettings::get_global(cx).disable_ai;
let _settings_subscription = cx.observe_global::<SettingsStore>(move |_, cx| {
if assistant_enabled != AgentSettings::get_global(cx).enabled {
let is_ai_disabled = DisableAiSettings::get_global(cx).disable_ai;
if assistant_enabled != AgentSettings::get_global(cx).enabled
|| was_ai_disabled != is_ai_disabled
{
assistant_enabled = AgentSettings::get_global(cx).enabled;
was_ai_disabled = is_ai_disabled;
cx.notify();
}
});
@ -1806,7 +1812,7 @@ impl GitPanel {
/// Generates a commit message using an LLM.
pub fn generate_commit_message(&mut self, cx: &mut Context<Self>) {
if !self.can_commit() {
if !self.can_commit() || DisableAiSettings::get_global(cx).disable_ai {
return;
}
@ -4305,8 +4311,10 @@ impl GitPanel {
}
fn current_language_model(cx: &Context<'_, GitPanel>) -> Option<Arc<dyn LanguageModel>> {
agent_settings::AgentSettings::get_global(cx)
.enabled
let is_enabled = agent_settings::AgentSettings::get_global(cx).enabled
&& !DisableAiSettings::get_global(cx).disable_ai;
is_enabled
.then(|| {
let ConfiguredModel { provider, model } =
LanguageModelRegistry::read_global(cx).commit_message_model()?;
@ -5037,6 +5045,7 @@ mod tests {
language::init(cx);
editor::init(cx);
Project::init_settings(cx);
client::DisableAiSettings::register(cx);
crate::init(cx);
});
}

View file

@ -501,7 +501,7 @@ mod remote_button {
)
.into_any_element();
SplitButton { left, right }
SplitButton::new(left, right)
}
}

View file

@ -121,7 +121,7 @@ smallvec.workspace = true
smol.workspace = true
strum.workspace = true
sum_tree.workspace = true
taffy = "=0.5.1"
taffy = "=0.8.3"
thiserror.workspace = true
util.workspace = true
uuid.workspace = true

View file

@ -249,8 +249,8 @@ impl ListState {
let state = &mut *self.0.borrow_mut();
let mut old_items = state.items.cursor::<Count>(&());
let mut new_items = old_items.slice(&Count(old_range.start), Bias::Right, &());
old_items.seek_forward(&Count(old_range.end), Bias::Right, &());
let mut new_items = old_items.slice(&Count(old_range.start), Bias::Right);
old_items.seek_forward(&Count(old_range.end), Bias::Right);
let mut spliced_count = 0;
new_items.extend(
@ -260,7 +260,7 @@ impl ListState {
}),
&(),
);
new_items.append(old_items.suffix(&()), &());
new_items.append(old_items.suffix(), &());
drop(old_items);
state.items = new_items;
@ -300,14 +300,14 @@ impl ListState {
let current_offset = self.logical_scroll_top();
let state = &mut *self.0.borrow_mut();
let mut cursor = state.items.cursor::<ListItemSummary>(&());
cursor.seek(&Count(current_offset.item_ix), Bias::Right, &());
cursor.seek(&Count(current_offset.item_ix), Bias::Right);
let start_pixel_offset = cursor.start().height + current_offset.offset_in_item;
let new_pixel_offset = (start_pixel_offset + distance).max(px(0.));
if new_pixel_offset > start_pixel_offset {
cursor.seek_forward(&Height(new_pixel_offset), Bias::Right, &());
cursor.seek_forward(&Height(new_pixel_offset), Bias::Right);
} else {
cursor.seek(&Height(new_pixel_offset), Bias::Right, &());
cursor.seek(&Height(new_pixel_offset), Bias::Right);
}
state.logical_scroll_top = Some(ListOffset {
@ -343,11 +343,11 @@ impl ListState {
scroll_top.offset_in_item = px(0.);
} else {
let mut cursor = state.items.cursor::<ListItemSummary>(&());
cursor.seek(&Count(ix + 1), Bias::Right, &());
cursor.seek(&Count(ix + 1), Bias::Right);
let bottom = cursor.start().height + padding.top;
let goal_top = px(0.).max(bottom - height + padding.bottom);
cursor.seek(&Height(goal_top), Bias::Left, &());
cursor.seek(&Height(goal_top), Bias::Left);
let start_ix = cursor.start().count;
let start_item_top = cursor.start().height;
@ -372,11 +372,11 @@ impl ListState {
}
let mut cursor = state.items.cursor::<(Count, Height)>(&());
cursor.seek(&Count(scroll_top.item_ix), Bias::Right, &());
cursor.seek(&Count(scroll_top.item_ix), Bias::Right);
let scroll_top = cursor.start().1.0 + scroll_top.offset_in_item;
cursor.seek_forward(&Count(ix), Bias::Right, &());
cursor.seek_forward(&Count(ix), Bias::Right);
if let Some(&ListItem::Measured { size, .. }) = cursor.item() {
let &(Count(count), Height(top)) = cursor.start();
if count == ix {
@ -431,7 +431,7 @@ impl ListState {
let mut cursor = state.items.cursor::<ListItemSummary>(&());
let summary: ListItemSummary =
cursor.summary(&Count(logical_scroll_top.item_ix), Bias::Right, &());
cursor.summary(&Count(logical_scroll_top.item_ix), Bias::Right);
let content_height = state.items.summary().height;
let drag_offset =
// if dragging the scrollbar, we want to offset the point if the height changed
@ -450,9 +450,9 @@ impl ListState {
impl StateInner {
fn visible_range(&self, height: Pixels, scroll_top: &ListOffset) -> Range<usize> {
let mut cursor = self.items.cursor::<ListItemSummary>(&());
cursor.seek(&Count(scroll_top.item_ix), Bias::Right, &());
cursor.seek(&Count(scroll_top.item_ix), Bias::Right);
let start_y = cursor.start().height + scroll_top.offset_in_item;
cursor.seek_forward(&Height(start_y + height), Bias::Left, &());
cursor.seek_forward(&Height(start_y + height), Bias::Left);
scroll_top.item_ix..cursor.start().count + 1
}
@ -482,7 +482,7 @@ impl StateInner {
self.logical_scroll_top = None;
} else {
let mut cursor = self.items.cursor::<ListItemSummary>(&());
cursor.seek(&Height(new_scroll_top), Bias::Right, &());
cursor.seek(&Height(new_scroll_top), Bias::Right);
let item_ix = cursor.start().count;
let offset_in_item = new_scroll_top - cursor.start().height;
self.logical_scroll_top = Some(ListOffset {
@ -523,7 +523,7 @@ impl StateInner {
fn scroll_top(&self, logical_scroll_top: &ListOffset) -> Pixels {
let mut cursor = self.items.cursor::<ListItemSummary>(&());
cursor.seek(&Count(logical_scroll_top.item_ix), Bias::Right, &());
cursor.seek(&Count(logical_scroll_top.item_ix), Bias::Right);
cursor.start().height + logical_scroll_top.offset_in_item
}
@ -553,7 +553,7 @@ impl StateInner {
let mut cursor = old_items.cursor::<Count>(&());
// Render items after the scroll top, including those in the trailing overdraw
cursor.seek(&Count(scroll_top.item_ix), Bias::Right, &());
cursor.seek(&Count(scroll_top.item_ix), Bias::Right);
for (ix, item) in cursor.by_ref().enumerate() {
let visible_height = rendered_height - scroll_top.offset_in_item;
if visible_height >= available_height + self.overdraw {
@ -592,13 +592,13 @@ impl StateInner {
rendered_height += padding.bottom;
// Prepare to start walking upward from the item at the scroll top.
cursor.seek(&Count(scroll_top.item_ix), Bias::Right, &());
cursor.seek(&Count(scroll_top.item_ix), Bias::Right);
// If the rendered items do not fill the visible region, then adjust
// the scroll top upward.
if rendered_height - scroll_top.offset_in_item < available_height {
while rendered_height < available_height {
cursor.prev(&());
cursor.prev();
if let Some(item) = cursor.item() {
let item_index = cursor.start().0;
let mut element = (self.render_item)(item_index, window, cx);
@ -645,7 +645,7 @@ impl StateInner {
// Measure items in the leading overdraw
let mut leading_overdraw = scroll_top.offset_in_item;
while leading_overdraw < self.overdraw {
cursor.prev(&());
cursor.prev();
if let Some(item) = cursor.item() {
let size = if let ListItem::Measured { size, .. } = item {
*size
@ -666,10 +666,10 @@ impl StateInner {
let measured_range = cursor.start().0..(cursor.start().0 + measured_items.len());
let mut cursor = old_items.cursor::<Count>(&());
let mut new_items = cursor.slice(&Count(measured_range.start), Bias::Right, &());
let mut new_items = cursor.slice(&Count(measured_range.start), Bias::Right);
new_items.extend(measured_items, &());
cursor.seek(&Count(measured_range.end), Bias::Right, &());
new_items.append(cursor.suffix(&()), &());
cursor.seek(&Count(measured_range.end), Bias::Right);
new_items.append(cursor.suffix(), &());
self.items = new_items;
// If none of the visible items are focused, check if an off-screen item is focused
@ -679,7 +679,7 @@ impl StateInner {
let mut cursor = self
.items
.filter::<_, Count>(&(), |summary| summary.has_focus_handles);
cursor.next(&());
cursor.next();
while let Some(item) = cursor.item() {
if item.contains_focused(window, cx) {
let item_index = cursor.start().0;
@ -692,7 +692,7 @@ impl StateInner {
});
break;
}
cursor.next(&());
cursor.next();
}
}
@ -741,7 +741,7 @@ impl StateInner {
});
} else if autoscroll_bounds.bottom() > bounds.bottom() {
let mut cursor = self.items.cursor::<Count>(&());
cursor.seek(&Count(item.index), Bias::Right, &());
cursor.seek(&Count(item.index), Bias::Right);
let mut height = bounds.size.height - padding.top - padding.bottom;
// Account for the height of the element down until the autoscroll bottom.
@ -749,7 +749,7 @@ impl StateInner {
// Keep decreasing the scroll top until we fill all the available space.
while height > Pixels::ZERO {
cursor.prev(&());
cursor.prev();
let Some(item) = cursor.item() else { break };
let size = item.size().unwrap_or_else(|| {
@ -806,7 +806,7 @@ impl StateInner {
self.logical_scroll_top = None;
} else {
let mut cursor = self.items.cursor::<ListItemSummary>(&());
cursor.seek(&Height(new_scroll_top), Bias::Right, &());
cursor.seek(&Height(new_scroll_top), Bias::Right);
let item_ix = cursor.start().count;
let offset_in_item = new_scroll_top - cursor.start().height;

View file

@ -50,8 +50,8 @@
/// KeyBinding::new("cmd-k left", pane::SplitLeft, Some("Pane"))
///
use crate::{
Action, ActionRegistry, App, BindingIndex, DispatchPhase, EntityId, FocusId, KeyBinding,
KeyContext, Keymap, Keystroke, ModifiersChangedEvent, Window,
Action, ActionRegistry, App, DispatchPhase, EntityId, FocusId, KeyBinding, KeyContext, Keymap,
Keystroke, ModifiersChangedEvent, Window,
};
use collections::FxHashMap;
use smallvec::SmallVec;
@ -406,16 +406,11 @@ impl DispatchTree {
// methods, but this can't be done very cleanly since keymap must be borrowed.
let keymap = self.keymap.borrow();
keymap
.bindings_for_action_with_indices(action)
.filter(|(binding_index, binding)| {
Self::binding_matches_predicate_and_not_shadowed(
&keymap,
*binding_index,
&binding.keystrokes,
context_stack,
)
.bindings_for_action(action)
.filter(|binding| {
Self::binding_matches_predicate_and_not_shadowed(&keymap, &binding, context_stack)
})
.map(|(_, binding)| binding.clone())
.cloned()
.collect()
}
@ -428,28 +423,22 @@ impl DispatchTree {
) -> Option<KeyBinding> {
let keymap = self.keymap.borrow();
keymap
.bindings_for_action_with_indices(action)
.bindings_for_action(action)
.rev()
.find_map(|(binding_index, binding)| {
let found = Self::binding_matches_predicate_and_not_shadowed(
&keymap,
binding_index,
&binding.keystrokes,
context_stack,
);
if found { Some(binding.clone()) } else { None }
.find(|binding| {
Self::binding_matches_predicate_and_not_shadowed(&keymap, &binding, context_stack)
})
.cloned()
}
fn binding_matches_predicate_and_not_shadowed(
keymap: &Keymap,
binding_index: BindingIndex,
keystrokes: &[Keystroke],
binding: &KeyBinding,
context_stack: &[KeyContext],
) -> bool {
let (bindings, _) = keymap.bindings_for_input_with_indices(&keystrokes, context_stack);
if let Some((highest_precedence_index, _)) = bindings.iter().next() {
binding_index == *highest_precedence_index
let (bindings, _) = keymap.bindings_for_input(&binding.keystrokes, context_stack);
if let Some(found) = bindings.iter().next() {
found.action.partial_eq(binding.action.as_ref())
} else {
false
}

View file

@ -5,7 +5,7 @@ pub use binding::*;
pub use context::*;
use crate::{Action, Keystroke, is_no_action};
use collections::HashMap;
use collections::{HashMap, HashSet};
use smallvec::SmallVec;
use std::any::TypeId;
@ -77,15 +77,6 @@ impl Keymap {
&'a self,
action: &'a dyn Action,
) -> impl 'a + DoubleEndedIterator<Item = &'a KeyBinding> {
self.bindings_for_action_with_indices(action)
.map(|(_, binding)| binding)
}
/// Like `bindings_for_action_with_indices`, but also returns the binding indices.
pub fn bindings_for_action_with_indices<'a>(
&'a self,
action: &'a dyn Action,
) -> impl 'a + DoubleEndedIterator<Item = (BindingIndex, &'a KeyBinding)> {
let action_id = action.type_id();
let binding_indices = self
.binding_indices_by_action_id
@ -118,7 +109,7 @@ impl Keymap {
}
}
Some((BindingIndex(*ix), binding))
Some(binding)
})
}
@ -153,90 +144,53 @@ impl Keymap {
input: &[Keystroke],
context_stack: &[KeyContext],
) -> (SmallVec<[KeyBinding; 1]>, bool) {
let (bindings, pending) = self.bindings_for_input_with_indices(input, context_stack);
let bindings = bindings
.into_iter()
.map(|(_, binding)| binding)
.collect::<SmallVec<[KeyBinding; 1]>>();
(bindings, pending)
let mut matched_bindings = SmallVec::<[(usize, BindingIndex, &KeyBinding); 1]>::new();
let mut pending_bindings = SmallVec::<[(BindingIndex, &KeyBinding); 1]>::new();
for (ix, binding) in self.bindings().enumerate().rev() {
let Some(depth) = self.binding_enabled(binding, &context_stack) else {
continue;
};
let Some(pending) = binding.match_keystrokes(input) else {
continue;
};
if !pending {
matched_bindings.push((depth, BindingIndex(ix), binding));
} else {
pending_bindings.push((BindingIndex(ix), binding));
}
}
/// Like `bindings_for_input`, but also returns the binding indices.
pub fn bindings_for_input_with_indices(
&self,
input: &[Keystroke],
context_stack: &[KeyContext],
) -> (SmallVec<[(BindingIndex, KeyBinding); 1]>, bool) {
let mut possibilities = self
.bindings()
.enumerate()
.rev()
.filter_map(|(ix, binding)| {
let depth = self.binding_enabled(binding, &context_stack)?;
let pending = binding.match_keystrokes(input)?;
Some((depth, BindingIndex(ix), binding, pending))
})
.collect::<Vec<_>>();
possibilities.sort_by(|(depth_a, ix_a, _, _), (depth_b, ix_b, _, _)| {
matched_bindings.sort_by(|(depth_a, ix_a, _), (depth_b, ix_b, _)| {
depth_b.cmp(depth_a).then(ix_b.cmp(ix_a))
});
let mut bindings: SmallVec<[(BindingIndex, KeyBinding, usize); 1]> = SmallVec::new();
// (pending, is_no_action, depth, keystrokes)
let mut pending_info_opt: Option<(bool, bool, usize, &[Keystroke])> = None;
'outer: for (depth, binding_index, binding, pending) in possibilities {
let is_no_action = is_no_action(&*binding.action);
// We only want to consider a binding pending if it has an action
// This, however, means that if we have both a NoAction binding and a binding
// with an action at the same depth, we should still set is_pending to true.
if let Some(pending_info) = pending_info_opt.as_mut() {
let (already_pending, pending_is_no_action, pending_depth, pending_keystrokes) =
*pending_info;
// We only want to change the pending status if it's not already pending AND if
// the existing pending status was set by a NoAction binding. This avoids a NoAction
// binding erroneously setting the pending status to true when a binding with an action
// already set it to false
//
// We also want to change the pending status if the keystrokes don't match,
// meaning it's different keystrokes than the NoAction that set pending to false
if pending
&& !already_pending
&& pending_is_no_action
&& (pending_depth == depth || pending_keystrokes != binding.keystrokes())
{
pending_info.0 = !is_no_action;
}
} else {
pending_info_opt = Some((
pending && !is_no_action,
is_no_action,
depth,
binding.keystrokes(),
));
}
if !pending {
bindings.push((binding_index, binding.clone(), depth));
continue 'outer;
}
}
// sort by descending depth
bindings.sort_by(|a, b| a.2.cmp(&b.2).reverse());
let bindings = bindings
.into_iter()
.map_while(|(binding_index, binding, _)| {
let mut bindings: SmallVec<[_; 1]> = SmallVec::new();
let mut first_binding_index = None;
for (_, ix, binding) in matched_bindings {
if is_no_action(&*binding.action) {
None
} else {
Some((binding_index, binding))
break;
}
bindings.push(binding.clone());
first_binding_index.get_or_insert(ix);
}
})
.collect();
(bindings, pending_info_opt.unwrap_or_default().0)
let mut pending = HashSet::default();
for (ix, binding) in pending_bindings.into_iter().rev() {
if let Some(binding_ix) = first_binding_index
&& binding_ix > ix
{
continue;
}
if is_no_action(&*binding.action) {
pending.remove(&&binding.keystrokes);
continue;
}
pending.insert(&binding.keystrokes);
}
(bindings, !pending.is_empty())
}
/// Check if the given binding is enabled, given a certain key context.
@ -302,6 +256,30 @@ mod tests {
);
}
#[test]
fn test_depth_precedence() {
let bindings = [
KeyBinding::new("ctrl-a", ActionBeta {}, Some("pane")),
KeyBinding::new("ctrl-a", ActionGamma {}, Some("editor")),
];
let mut keymap = Keymap::default();
keymap.add_bindings(bindings.clone());
let (result, pending) = keymap.bindings_for_input(
&[Keystroke::parse("ctrl-a").unwrap()],
&[
KeyContext::parse("pane").unwrap(),
KeyContext::parse("editor").unwrap(),
],
);
assert!(!pending);
assert_eq!(result.len(), 2);
assert!(result[0].action.partial_eq(&ActionGamma {}));
assert!(result[1].action.partial_eq(&ActionBeta {}));
}
#[test]
fn test_keymap_disabled() {
let bindings = [
@ -453,6 +431,193 @@ mod tests {
assert_eq!(space_editor.1, true);
}
#[test]
fn test_override_multikey() {
let bindings = [
KeyBinding::new("ctrl-w left", ActionAlpha {}, Some("editor")),
KeyBinding::new("ctrl-w", NoAction {}, Some("editor")),
];
let mut keymap = Keymap::default();
keymap.add_bindings(bindings.clone());
// Ensure `space` results in pending input on the workspace, but not editor
let (result, pending) = keymap.bindings_for_input(
&[Keystroke::parse("ctrl-w").unwrap()],
&[KeyContext::parse("editor").unwrap()],
);
assert!(result.is_empty());
assert_eq!(pending, true);
let bindings = [
KeyBinding::new("ctrl-w left", ActionAlpha {}, Some("editor")),
KeyBinding::new("ctrl-w", ActionBeta {}, Some("editor")),
];
let mut keymap = Keymap::default();
keymap.add_bindings(bindings.clone());
// Ensure `space` results in pending input on the workspace, but not editor
let (result, pending) = keymap.bindings_for_input(
&[Keystroke::parse("ctrl-w").unwrap()],
&[KeyContext::parse("editor").unwrap()],
);
assert_eq!(result.len(), 1);
assert_eq!(pending, false);
}
#[test]
fn test_simple_disable() {
let bindings = [
KeyBinding::new("ctrl-x", ActionAlpha {}, Some("editor")),
KeyBinding::new("ctrl-x", NoAction {}, Some("editor")),
];
let mut keymap = Keymap::default();
keymap.add_bindings(bindings.clone());
// Ensure `space` results in pending input on the workspace, but not editor
let (result, pending) = keymap.bindings_for_input(
&[Keystroke::parse("ctrl-x").unwrap()],
&[KeyContext::parse("editor").unwrap()],
);
assert!(result.is_empty());
assert_eq!(pending, false);
}
#[test]
fn test_fail_to_disable() {
// disabled at the wrong level
let bindings = [
KeyBinding::new("ctrl-x", ActionAlpha {}, Some("editor")),
KeyBinding::new("ctrl-x", NoAction {}, Some("workspace")),
];
let mut keymap = Keymap::default();
keymap.add_bindings(bindings.clone());
// Ensure `space` results in pending input on the workspace, but not editor
let (result, pending) = keymap.bindings_for_input(
&[Keystroke::parse("ctrl-x").unwrap()],
&[
KeyContext::parse("workspace").unwrap(),
KeyContext::parse("editor").unwrap(),
],
);
assert_eq!(result.len(), 1);
assert_eq!(pending, false);
}
#[test]
fn test_disable_deeper() {
let bindings = [
KeyBinding::new("ctrl-x", ActionAlpha {}, Some("workspace")),
KeyBinding::new("ctrl-x", NoAction {}, Some("editor")),
];
let mut keymap = Keymap::default();
keymap.add_bindings(bindings.clone());
// Ensure `space` results in pending input on the workspace, but not editor
let (result, pending) = keymap.bindings_for_input(
&[Keystroke::parse("ctrl-x").unwrap()],
&[
KeyContext::parse("workspace").unwrap(),
KeyContext::parse("editor").unwrap(),
],
);
assert_eq!(result.len(), 0);
assert_eq!(pending, false);
}
#[test]
fn test_pending_match_enabled() {
let bindings = [
KeyBinding::new("ctrl-x", ActionBeta, Some("vim_mode == normal")),
KeyBinding::new("ctrl-x 0", ActionAlpha, Some("Workspace")),
];
let mut keymap = Keymap::default();
keymap.add_bindings(bindings.clone());
let matched = keymap.bindings_for_input(
&[Keystroke::parse("ctrl-x")].map(Result::unwrap),
&[
KeyContext::parse("Workspace"),
KeyContext::parse("Pane"),
KeyContext::parse("Editor vim_mode=normal"),
]
.map(Result::unwrap),
);
assert_eq!(matched.0.len(), 1);
assert!(matched.0[0].action.partial_eq(&ActionBeta));
assert!(matched.1);
}
#[test]
fn test_pending_match_enabled_extended() {
let bindings = [
KeyBinding::new("ctrl-x", ActionBeta, Some("vim_mode == normal")),
KeyBinding::new("ctrl-x 0", NoAction, Some("Workspace")),
];
let mut keymap = Keymap::default();
keymap.add_bindings(bindings.clone());
let matched = keymap.bindings_for_input(
&[Keystroke::parse("ctrl-x")].map(Result::unwrap),
&[
KeyContext::parse("Workspace"),
KeyContext::parse("Pane"),
KeyContext::parse("Editor vim_mode=normal"),
]
.map(Result::unwrap),
);
assert_eq!(matched.0.len(), 1);
assert!(matched.0[0].action.partial_eq(&ActionBeta));
assert!(!matched.1);
let bindings = [
KeyBinding::new("ctrl-x", ActionBeta, Some("Workspace")),
KeyBinding::new("ctrl-x 0", NoAction, Some("vim_mode == normal")),
];
let mut keymap = Keymap::default();
keymap.add_bindings(bindings.clone());
let matched = keymap.bindings_for_input(
&[Keystroke::parse("ctrl-x")].map(Result::unwrap),
&[
KeyContext::parse("Workspace"),
KeyContext::parse("Pane"),
KeyContext::parse("Editor vim_mode=normal"),
]
.map(Result::unwrap),
);
assert_eq!(matched.0.len(), 1);
assert!(matched.0[0].action.partial_eq(&ActionBeta));
assert!(!matched.1);
}
#[test]
fn test_overriding_prefix() {
let bindings = [
KeyBinding::new("ctrl-x 0", ActionAlpha, Some("Workspace")),
KeyBinding::new("ctrl-x", ActionBeta, Some("vim_mode == normal")),
];
let mut keymap = Keymap::default();
keymap.add_bindings(bindings.clone());
let matched = keymap.bindings_for_input(
&[Keystroke::parse("ctrl-x")].map(Result::unwrap),
&[
KeyContext::parse("Workspace"),
KeyContext::parse("Pane"),
KeyContext::parse("Editor vim_mode=normal"),
]
.map(Result::unwrap),
);
assert_eq!(matched.0.len(), 1);
assert!(matched.0[0].action.partial_eq(&ActionBeta));
assert!(!matched.1);
}
#[test]
fn test_bindings_for_action() {
let bindings = [

View file

@ -283,7 +283,7 @@ impl ToTaffy<taffy::style::LengthPercentageAuto> for Length {
fn to_taffy(&self, rem_size: Pixels) -> taffy::prelude::LengthPercentageAuto {
match self {
Length::Definite(length) => length.to_taffy(rem_size),
Length::Auto => taffy::prelude::LengthPercentageAuto::Auto,
Length::Auto => taffy::prelude::LengthPercentageAuto::auto(),
}
}
}
@ -292,7 +292,7 @@ impl ToTaffy<taffy::style::Dimension> for Length {
fn to_taffy(&self, rem_size: Pixels) -> taffy::prelude::Dimension {
match self {
Length::Definite(length) => length.to_taffy(rem_size),
Length::Auto => taffy::prelude::Dimension::Auto,
Length::Auto => taffy::prelude::Dimension::auto(),
}
}
}
@ -302,14 +302,14 @@ impl ToTaffy<taffy::style::LengthPercentage> for DefiniteLength {
match self {
DefiniteLength::Absolute(length) => match length {
AbsoluteLength::Pixels(pixels) => {
taffy::style::LengthPercentage::Length(pixels.into())
taffy::style::LengthPercentage::length(pixels.into())
}
AbsoluteLength::Rems(rems) => {
taffy::style::LengthPercentage::Length((*rems * rem_size).into())
taffy::style::LengthPercentage::length((*rems * rem_size).into())
}
},
DefiniteLength::Fraction(fraction) => {
taffy::style::LengthPercentage::Percent(*fraction)
taffy::style::LengthPercentage::percent(*fraction)
}
}
}
@ -320,14 +320,14 @@ impl ToTaffy<taffy::style::LengthPercentageAuto> for DefiniteLength {
match self {
DefiniteLength::Absolute(length) => match length {
AbsoluteLength::Pixels(pixels) => {
taffy::style::LengthPercentageAuto::Length(pixels.into())
taffy::style::LengthPercentageAuto::length(pixels.into())
}
AbsoluteLength::Rems(rems) => {
taffy::style::LengthPercentageAuto::Length((*rems * rem_size).into())
taffy::style::LengthPercentageAuto::length((*rems * rem_size).into())
}
},
DefiniteLength::Fraction(fraction) => {
taffy::style::LengthPercentageAuto::Percent(*fraction)
taffy::style::LengthPercentageAuto::percent(*fraction)
}
}
}
@ -337,12 +337,12 @@ impl ToTaffy<taffy::style::Dimension> for DefiniteLength {
fn to_taffy(&self, rem_size: Pixels) -> taffy::style::Dimension {
match self {
DefiniteLength::Absolute(length) => match length {
AbsoluteLength::Pixels(pixels) => taffy::style::Dimension::Length(pixels.into()),
AbsoluteLength::Pixels(pixels) => taffy::style::Dimension::length(pixels.into()),
AbsoluteLength::Rems(rems) => {
taffy::style::Dimension::Length((*rems * rem_size).into())
taffy::style::Dimension::length((*rems * rem_size).into())
}
},
DefiniteLength::Fraction(fraction) => taffy::style::Dimension::Percent(*fraction),
DefiniteLength::Fraction(fraction) => taffy::style::Dimension::percent(*fraction),
}
}
}
@ -350,9 +350,9 @@ impl ToTaffy<taffy::style::Dimension> for DefiniteLength {
impl ToTaffy<taffy::style::LengthPercentage> for AbsoluteLength {
fn to_taffy(&self, rem_size: Pixels) -> taffy::style::LengthPercentage {
match self {
AbsoluteLength::Pixels(pixels) => taffy::style::LengthPercentage::Length(pixels.into()),
AbsoluteLength::Pixels(pixels) => taffy::style::LengthPercentage::length(pixels.into()),
AbsoluteLength::Rems(rems) => {
taffy::style::LengthPercentage::Length((*rems * rem_size).into())
taffy::style::LengthPercentage::length((*rems * rem_size).into())
}
}
}

View file

@ -21,6 +21,7 @@ anyhow.workspace = true
derive_more.workspace = true
futures.workspace = true
http.workspace = true
http-body.workspace = true
log.workspace = true
serde.workspace = true
serde_json.workspace = true

View file

@ -6,6 +6,7 @@ use std::{
use bytes::Bytes;
use futures::AsyncRead;
use http_body::{Body, Frame};
/// Based on the implementation of AsyncBody in
/// <https://github.com/sagebind/isahc/blob/5c533f1ef4d6bdf1fd291b5103c22110f41d0bf0/src/body/mod.rs>.
@ -114,3 +115,24 @@ impl futures::AsyncRead for AsyncBody {
}
}
}
impl Body for AsyncBody {
type Data = Bytes;
type Error = std::io::Error;
fn poll_frame(
mut self: Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
) -> Poll<Option<Result<Frame<Self::Data>, Self::Error>>> {
let mut buffer = vec![0; 8192];
match AsyncRead::poll_read(self.as_mut(), cx, &mut buffer) {
Poll::Ready(Ok(0)) => Poll::Ready(None),
Poll::Ready(Ok(n)) => {
let data = Bytes::copy_from_slice(&buffer[..n]);
Poll::Ready(Some(Ok(Frame::data(data))))
}
Poll::Ready(Err(e)) => Poll::Ready(Some(Err(e))),
Poll::Pending => Poll::Pending,
}
}
}

View file

@ -20,6 +20,7 @@ pub enum IconName {
AiMistral,
AiOllama,
AiOpenAi,
AiOpenAiCompat,
AiOpenRouter,
AiVZero,
AiXAi,

View file

@ -1,5 +1,5 @@
use anyhow::Result;
use client::{UserStore, zed_urls};
use client::{DisableAiSettings, UserStore, zed_urls};
use copilot::{Copilot, Status};
use editor::{
Editor, SelectionEffects,
@ -72,6 +72,11 @@ enum SupermavenButtonStatus {
impl Render for InlineCompletionButton {
fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
// Return empty div if AI is disabled
if DisableAiSettings::get_global(cx).disable_ai {
return div();
}
let all_language_settings = all_language_settings(None, cx);
match all_language_settings.edit_predictions.provider {

View file

@ -2072,6 +2072,21 @@ impl Buffer {
self.text.push_transaction(transaction, now);
}
/// Differs from `push_transaction` in that it does not clear the redo
/// stack. Intended to be used to create a parent transaction to merge
/// potential child transactions into.
///
/// The caller is responsible for removing it from the undo history using
/// `forget_transaction` if no edits are merged into it. Otherwise, if edits
/// are merged into this transaction, the caller is responsible for ensuring
/// the redo stack is cleared. The easiest way to ensure the redo stack is
/// cleared is to create transactions with the usual `start_transaction` and
/// `end_transaction` methods and merging the resulting transactions into
/// the transaction created by this method
pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
self.text.push_empty_transaction(now)
}
/// Prevent the last transaction from being grouped with any subsequent transactions,
/// even if they occur with the buffer's undo grouping duration.
pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {

View file

@ -158,17 +158,17 @@ impl DiagnosticSet {
});
if reversed {
cursor.prev(buffer);
cursor.prev();
} else {
cursor.next(buffer);
cursor.next();
}
iter::from_fn({
move || {
if let Some(diagnostic) = cursor.item() {
if reversed {
cursor.prev(buffer);
cursor.prev();
} else {
cursor.next(buffer);
cursor.next();
}
Some(diagnostic.resolve(buffer))
} else {

View file

@ -297,10 +297,10 @@ impl SyntaxSnapshot {
let mut first_edit_ix_for_depth = 0;
let mut prev_depth = 0;
let mut cursor = self.layers.cursor::<SyntaxLayerSummary>(text);
cursor.next(text);
cursor.next();
'outer: loop {
let depth = cursor.end(text).max_depth;
let depth = cursor.end().max_depth;
if depth > prev_depth {
first_edit_ix_for_depth = 0;
prev_depth = depth;
@ -313,7 +313,7 @@ impl SyntaxSnapshot {
position: edit_range.start,
};
if target.cmp(cursor.start(), text).is_gt() {
let slice = cursor.slice(&target, Bias::Left, text);
let slice = cursor.slice(&target, Bias::Left);
layers.append(slice, text);
}
}
@ -327,7 +327,6 @@ impl SyntaxSnapshot {
language: None,
},
Bias::Left,
text,
);
layers.append(slice, text);
continue;
@ -394,10 +393,10 @@ impl SyntaxSnapshot {
}
layers.push(layer, text);
cursor.next(text);
cursor.next();
}
layers.append(cursor.suffix(text), text);
layers.append(cursor.suffix(), text);
drop(cursor);
self.layers = layers;
}
@ -420,7 +419,7 @@ impl SyntaxSnapshot {
let mut cursor = self
.layers
.filter::<_, ()>(text, |summary| summary.contains_unknown_injections);
cursor.next(text);
cursor.next();
while let Some(layer) = cursor.item() {
let SyntaxLayerContent::Pending { language_name } = &layer.content else {
unreachable!()
@ -436,7 +435,7 @@ impl SyntaxSnapshot {
resolved_injection_ranges.push(range);
}
cursor.next(text);
cursor.next();
}
drop(cursor);
@ -469,7 +468,7 @@ impl SyntaxSnapshot {
let max_depth = self.layers.summary().max_depth;
let mut cursor = self.layers.cursor::<SyntaxLayerSummary>(text);
cursor.next(text);
cursor.next();
let mut layers = SumTree::new(text);
let mut changed_regions = ChangeRegionSet::default();
@ -514,7 +513,7 @@ impl SyntaxSnapshot {
};
let mut done = cursor.item().is_none();
while !done && position.cmp(&cursor.end(text), text).is_gt() {
while !done && position.cmp(&cursor.end(), text).is_gt() {
done = true;
let bounded_position = SyntaxLayerPositionBeforeChange {
@ -522,16 +521,16 @@ impl SyntaxSnapshot {
change: changed_regions.start_position(),
};
if bounded_position.cmp(cursor.start(), text).is_gt() {
let slice = cursor.slice(&bounded_position, Bias::Left, text);
let slice = cursor.slice(&bounded_position, Bias::Left);
if !slice.is_empty() {
layers.append(slice, text);
if changed_regions.prune(cursor.end(text), text) {
if changed_regions.prune(cursor.end(), text) {
done = false;
}
}
}
while position.cmp(&cursor.end(text), text).is_gt() {
while position.cmp(&cursor.end(), text).is_gt() {
let Some(layer) = cursor.item() else { break };
if changed_regions.intersects(layer, text) {
@ -555,8 +554,8 @@ impl SyntaxSnapshot {
layers.push(layer.clone(), text);
}
cursor.next(text);
if changed_regions.prune(cursor.end(text), text) {
cursor.next();
if changed_regions.prune(cursor.end(), text) {
done = false;
}
}
@ -572,7 +571,7 @@ impl SyntaxSnapshot {
if layer.range.to_offset(text) == (step_start_byte..step_end_byte)
&& layer.content.language_id() == step.language.id()
{
cursor.next(text);
cursor.next();
} else {
old_layer = None;
}
@ -918,7 +917,7 @@ impl SyntaxSnapshot {
}
});
cursor.next(buffer);
cursor.next();
iter::from_fn(move || {
while let Some(layer) = cursor.item() {
let mut info = None;
@ -940,7 +939,7 @@ impl SyntaxSnapshot {
});
}
}
cursor.next(buffer);
cursor.next();
if info.is_some() {
return info;
}

View file

@ -10,25 +10,21 @@ use http_client::Result;
use parking_lot::Mutex;
use std::sync::Arc;
pub fn language_model_id() -> LanguageModelId {
LanguageModelId::from("fake".to_string())
#[derive(Clone)]
pub struct FakeLanguageModelProvider {
id: LanguageModelProviderId,
name: LanguageModelProviderName,
}
pub fn language_model_name() -> LanguageModelName {
LanguageModelName::from("Fake".to_string())
impl Default for FakeLanguageModelProvider {
fn default() -> Self {
Self {
id: LanguageModelProviderId::from("fake".to_string()),
name: LanguageModelProviderName::from("Fake".to_string()),
}
pub fn provider_id() -> LanguageModelProviderId {
LanguageModelProviderId::from("fake".to_string())
}
pub fn provider_name() -> LanguageModelProviderName {
LanguageModelProviderName::from("Fake".to_string())
}
#[derive(Clone, Default)]
pub struct FakeLanguageModelProvider;
impl LanguageModelProviderState for FakeLanguageModelProvider {
type ObservableEntity = ();
@ -39,11 +35,11 @@ impl LanguageModelProviderState for FakeLanguageModelProvider {
impl LanguageModelProvider for FakeLanguageModelProvider {
fn id(&self) -> LanguageModelProviderId {
provider_id()
self.id.clone()
}
fn name(&self) -> LanguageModelProviderName {
provider_name()
self.name.clone()
}
fn default_model(&self, _cx: &App) -> Option<Arc<dyn LanguageModel>> {
@ -76,6 +72,10 @@ impl LanguageModelProvider for FakeLanguageModelProvider {
}
impl FakeLanguageModelProvider {
pub fn new(id: LanguageModelProviderId, name: LanguageModelProviderName) -> Self {
Self { id, name }
}
pub fn test_model(&self) -> FakeLanguageModel {
FakeLanguageModel::default()
}
@ -89,11 +89,22 @@ pub struct ToolUseRequest {
pub schema: serde_json::Value,
}
#[derive(Default)]
pub struct FakeLanguageModel {
provider_id: LanguageModelProviderId,
provider_name: LanguageModelProviderName,
current_completion_txs: Mutex<Vec<(LanguageModelRequest, mpsc::UnboundedSender<String>)>>,
}
impl Default for FakeLanguageModel {
fn default() -> Self {
Self {
provider_id: LanguageModelProviderId::from("fake".to_string()),
provider_name: LanguageModelProviderName::from("Fake".to_string()),
current_completion_txs: Mutex::new(Vec::new()),
}
}
}
impl FakeLanguageModel {
pub fn pending_completions(&self) -> Vec<LanguageModelRequest> {
self.current_completion_txs
@ -138,19 +149,19 @@ impl FakeLanguageModel {
impl LanguageModel for FakeLanguageModel {
fn id(&self) -> LanguageModelId {
language_model_id()
LanguageModelId::from("fake".to_string())
}
fn name(&self) -> LanguageModelName {
language_model_name()
LanguageModelName::from("Fake".to_string())
}
fn provider_id(&self) -> LanguageModelProviderId {
provider_id()
self.provider_id.clone()
}
fn provider_name(&self) -> LanguageModelProviderName {
provider_name()
self.provider_name.clone()
}
fn supports_tools(&self) -> bool {

View file

@ -735,6 +735,18 @@ impl From<String> for LanguageModelProviderName {
}
}
impl From<Arc<str>> for LanguageModelProviderId {
fn from(value: Arc<str>) -> Self {
Self(SharedString::from(value))
}
}
impl From<Arc<str>> for LanguageModelProviderName {
fn from(value: Arc<str>) -> Self {
Self(SharedString::from(value))
}
}
#[cfg(test)]
mod tests {
use super::*;

View file

@ -125,7 +125,7 @@ impl LanguageModelRegistry {
#[cfg(any(test, feature = "test-support"))]
pub fn test(cx: &mut App) -> crate::fake_provider::FakeLanguageModelProvider {
let fake_provider = crate::fake_provider::FakeLanguageModelProvider;
let fake_provider = crate::fake_provider::FakeLanguageModelProvider::default();
let registry = cx.new(|cx| {
let mut registry = Self::default();
registry.register_provider(fake_provider.clone(), cx);
@ -403,16 +403,17 @@ mod tests {
fn test_register_providers(cx: &mut App) {
let registry = cx.new(|_| LanguageModelRegistry::default());
let provider = FakeLanguageModelProvider::default();
registry.update(cx, |registry, cx| {
registry.register_provider(FakeLanguageModelProvider, cx);
registry.register_provider(provider.clone(), cx);
});
let providers = registry.read(cx).providers();
assert_eq!(providers.len(), 1);
assert_eq!(providers[0].id(), crate::fake_provider::provider_id());
assert_eq!(providers[0].id(), provider.id());
registry.update(cx, |registry, cx| {
registry.unregister_provider(crate::fake_provider::provider_id(), cx);
registry.unregister_provider(provider.id(), cx);
});
let providers = registry.read(cx).providers();

View file

@ -26,10 +26,10 @@ client.workspace = true
collections.workspace = true
component.workspace = true
credentials_provider.workspace = true
convert_case.workspace = true
copilot.workspace = true
deepseek = { workspace = true, features = ["schemars"] }
editor.workspace = true
fs.workspace = true
futures.workspace = true
google_ai = { workspace = true, features = ["schemars"] }
gpui.workspace = true

View file

@ -1,8 +1,10 @@
use std::sync::Arc;
use ::settings::{Settings, SettingsStore};
use client::{Client, UserStore};
use collections::HashSet;
use gpui::{App, Context, Entity};
use language_model::LanguageModelRegistry;
use language_model::{LanguageModelProviderId, LanguageModelRegistry};
use provider::deepseek::DeepSeekLanguageModelProvider;
pub mod provider;
@ -18,17 +20,81 @@ use crate::provider::lmstudio::LmStudioLanguageModelProvider;
use crate::provider::mistral::MistralLanguageModelProvider;
use crate::provider::ollama::OllamaLanguageModelProvider;
use crate::provider::open_ai::OpenAiLanguageModelProvider;
use crate::provider::open_ai_compatible::OpenAiCompatibleLanguageModelProvider;
use crate::provider::open_router::OpenRouterLanguageModelProvider;
use crate::provider::vercel::VercelLanguageModelProvider;
use crate::provider::x_ai::XAiLanguageModelProvider;
pub use crate::settings::*;
pub fn init(user_store: Entity<UserStore>, client: Arc<Client>, cx: &mut App) {
crate::settings::init(cx);
crate::settings::init_settings(cx);
let registry = LanguageModelRegistry::global(cx);
registry.update(cx, |registry, cx| {
register_language_model_providers(registry, user_store, client, cx);
register_language_model_providers(registry, user_store, client.clone(), cx);
});
let mut openai_compatible_providers = AllLanguageModelSettings::get_global(cx)
.openai_compatible
.keys()
.cloned()
.collect::<HashSet<_>>();
registry.update(cx, |registry, cx| {
register_openai_compatible_providers(
registry,
&HashSet::default(),
&openai_compatible_providers,
client.clone(),
cx,
);
});
cx.observe_global::<SettingsStore>(move |cx| {
let openai_compatible_providers_new = AllLanguageModelSettings::get_global(cx)
.openai_compatible
.keys()
.cloned()
.collect::<HashSet<_>>();
if openai_compatible_providers_new != openai_compatible_providers {
registry.update(cx, |registry, cx| {
register_openai_compatible_providers(
registry,
&openai_compatible_providers,
&openai_compatible_providers_new,
client.clone(),
cx,
);
});
openai_compatible_providers = openai_compatible_providers_new;
}
})
.detach();
}
fn register_openai_compatible_providers(
registry: &mut LanguageModelRegistry,
old: &HashSet<Arc<str>>,
new: &HashSet<Arc<str>>,
client: Arc<Client>,
cx: &mut Context<LanguageModelRegistry>,
) {
for provider_id in old {
if !new.contains(provider_id) {
registry.unregister_provider(LanguageModelProviderId::from(provider_id.clone()), cx);
}
}
for provider_id in new {
if !old.contains(provider_id) {
registry.register_provider(
OpenAiCompatibleLanguageModelProvider::new(
provider_id.clone(),
client.http_client(),
cx,
),
cx,
);
}
}
}
fn register_language_model_providers(

View file

@ -8,6 +8,7 @@ pub mod lmstudio;
pub mod mistral;
pub mod ollama;
pub mod open_ai;
pub mod open_ai_compatible;
pub mod open_router;
pub mod vercel;
pub mod x_ai;

View file

@ -243,7 +243,7 @@ impl State {
pub struct BedrockLanguageModelProvider {
http_client: AwsHttpClient,
handler: tokio::runtime::Handle,
handle: tokio::runtime::Handle,
state: gpui::Entity<State>,
}
@ -258,13 +258,9 @@ impl BedrockLanguageModelProvider {
}),
});
let tokio_handle = Tokio::handle(cx);
let coerced_client = AwsHttpClient::new(http_client.clone(), tokio_handle.clone());
Self {
http_client: coerced_client,
handler: tokio_handle.clone(),
http_client: AwsHttpClient::new(http_client.clone()),
handle: Tokio::handle(cx),
state,
}
}
@ -274,7 +270,7 @@ impl BedrockLanguageModelProvider {
id: LanguageModelId::from(model.id().to_string()),
model,
http_client: self.http_client.clone(),
handler: self.handler.clone(),
handle: self.handle.clone(),
state: self.state.clone(),
client: OnceCell::new(),
request_limiter: RateLimiter::new(4),
@ -375,7 +371,7 @@ struct BedrockModel {
id: LanguageModelId,
model: Model,
http_client: AwsHttpClient,
handler: tokio::runtime::Handle,
handle: tokio::runtime::Handle,
client: OnceCell<BedrockClient>,
state: gpui::Entity<State>,
request_limiter: RateLimiter,
@ -447,7 +443,7 @@ impl BedrockModel {
}
}
let config = self.handler.block_on(config_builder.load());
let config = self.handle.block_on(config_builder.load());
anyhow::Ok(BedrockClient::new(&config))
})
.context("initializing Bedrock client")?;

View file

@ -2,7 +2,6 @@ use anyhow::{Context as _, Result, anyhow};
use collections::{BTreeMap, HashMap};
use credentials_provider::CredentialsProvider;
use fs::Fs;
use futures::Stream;
use futures::{FutureExt, StreamExt, future::BoxFuture};
use gpui::{AnyView, App, AsyncApp, Context, Entity, Subscription, Task, Window};
@ -18,7 +17,7 @@ use menu;
use open_ai::{ImageUrl, Model, ResponseStreamEvent, stream_completion};
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use settings::{Settings, SettingsStore, update_settings_file};
use settings::{Settings, SettingsStore};
use std::pin::Pin;
use std::str::FromStr as _;
use std::sync::Arc;
@ -28,7 +27,6 @@ use ui::{ElevationIndex, List, Tooltip, prelude::*};
use ui_input::SingleLineInput;
use util::ResultExt;
use crate::OpenAiSettingsContent;
use crate::{AllLanguageModelSettings, ui::InstructionListItem};
const PROVIDER_ID: LanguageModelProviderId = language_model::OPEN_AI_PROVIDER_ID;
@ -621,13 +619,10 @@ struct RawToolCall {
arguments: String,
}
pub fn count_open_ai_tokens(
pub(crate) fn collect_tiktoken_messages(
request: LanguageModelRequest,
model: Model,
cx: &App,
) -> BoxFuture<'static, Result<u64>> {
cx.background_spawn(async move {
let messages = request
) -> Vec<tiktoken_rs::ChatCompletionRequestMessage> {
request
.messages
.into_iter()
.map(|message| tiktoken_rs::ChatCompletionRequestMessage {
@ -640,7 +635,16 @@ pub fn count_open_ai_tokens(
name: None,
function_call: None,
})
.collect::<Vec<_>>();
.collect::<Vec<_>>()
}
pub fn count_open_ai_tokens(
request: LanguageModelRequest,
model: Model,
cx: &App,
) -> BoxFuture<'static, Result<u64>> {
cx.background_spawn(async move {
let messages = collect_tiktoken_messages(request);
match model {
Model::Custom { max_tokens, .. } => {
@ -678,7 +682,6 @@ pub fn count_open_ai_tokens(
struct ConfigurationView {
api_key_editor: Entity<SingleLineInput>,
api_url_editor: Entity<SingleLineInput>,
state: gpui::Entity<State>,
load_credentials_task: Option<Task<()>>,
}
@ -691,23 +694,6 @@ impl ConfigurationView {
cx,
"sk-000000000000000000000000000000000000000000000000",
)
.label("API key")
});
let api_url = AllLanguageModelSettings::get_global(cx)
.openai
.api_url
.clone();
let api_url_editor = cx.new(|cx| {
let input = SingleLineInput::new(window, cx, open_ai::OPEN_AI_API_URL).label("API URL");
if !api_url.is_empty() {
input.editor.update(cx, |editor, cx| {
editor.set_text(&*api_url, window, cx);
});
}
input
});
cx.observe(&state, |_, _, cx| {
@ -735,7 +721,6 @@ impl ConfigurationView {
Self {
api_key_editor,
api_url_editor,
state,
load_credentials_task,
}
@ -783,57 +768,6 @@ impl ConfigurationView {
cx.notify();
}
fn save_api_url(&mut self, cx: &mut Context<Self>) {
let api_url = self
.api_url_editor
.read(cx)
.editor()
.read(cx)
.text(cx)
.trim()
.to_string();
let current_url = AllLanguageModelSettings::get_global(cx)
.openai
.api_url
.clone();
let effective_current_url = if current_url.is_empty() {
open_ai::OPEN_AI_API_URL
} else {
&current_url
};
if !api_url.is_empty() && api_url != effective_current_url {
let fs = <dyn Fs>::global(cx);
update_settings_file::<AllLanguageModelSettings>(fs, cx, move |settings, _| {
if let Some(settings) = settings.openai.as_mut() {
settings.api_url = Some(api_url.clone());
} else {
settings.openai = Some(OpenAiSettingsContent {
api_url: Some(api_url.clone()),
available_models: None,
});
}
});
}
}
fn reset_api_url(&mut self, window: &mut Window, cx: &mut Context<Self>) {
self.api_url_editor.update(cx, |input, cx| {
input.editor.update(cx, |editor, cx| {
editor.set_text("", window, cx);
});
});
let fs = <dyn Fs>::global(cx);
update_settings_file::<AllLanguageModelSettings>(fs, cx, |settings, _cx| {
if let Some(settings) = settings.openai.as_mut() {
settings.api_url = None;
}
});
cx.notify();
}
fn should_render_editor(&self, cx: &mut Context<Self>) -> bool {
!self.state.read(cx).is_authenticated()
}
@ -846,7 +780,6 @@ impl Render for ConfigurationView {
let api_key_section = if self.should_render_editor(cx) {
v_flex()
.on_action(cx.listener(Self::save_api_key))
.child(Label::new("To use Zed's assistant with OpenAI, you need to add an API key. Follow these steps:"))
.child(
List::new()
@ -910,59 +843,34 @@ impl Render for ConfigurationView {
.into_any()
};
let custom_api_url_set =
AllLanguageModelSettings::get_global(cx).openai.api_url != open_ai::OPEN_AI_API_URL;
let api_url_section = if custom_api_url_set {
h_flex()
.mt_1()
.p_1()
.justify_between()
.rounded_md()
.border_1()
.border_color(cx.theme().colors().border)
.bg(cx.theme().colors().background)
.child(
h_flex()
.gap_1()
.child(Icon::new(IconName::Check).color(Color::Success))
.child(Label::new("Custom API URL configured.")),
)
.child(
Button::new("reset-api-url", "Reset API URL")
.label_size(LabelSize::Small)
.icon(IconName::Undo)
.icon_size(IconSize::Small)
.icon_position(IconPosition::Start)
.layer(ElevationIndex::ModalSurface)
.on_click(
cx.listener(|this, _, window, cx| this.reset_api_url(window, cx)),
),
)
.into_any()
} else {
v_flex()
.on_action(cx.listener(|this, _: &menu::Confirm, _window, cx| {
this.save_api_url(cx);
cx.notify();
}))
.mt_2()
.pt_2()
let compatible_api_section = h_flex()
.mt_1p5()
.gap_0p5()
.flex_wrap()
.when(self.should_render_editor(cx), |this| {
this.pt_1p5()
.border_t_1()
.border_color(cx.theme().colors().border_variant)
.gap_1()
})
.child(
List::new()
.child(InstructionListItem::text_only(
"Optionally, you can change the base URL for the OpenAI API request.",
))
.child(InstructionListItem::text_only(
"Paste the new API endpoint below and hit enter",
)),
h_flex()
.gap_2()
.child(
Icon::new(IconName::Info)
.size(IconSize::XSmall)
.color(Color::Muted),
)
.child(self.api_url_editor.clone())
.into_any()
};
.child(Label::new("Zed also supports OpenAI-compatible models.")),
)
.child(
Button::new("docs", "Learn More")
.icon(IconName::ArrowUpRight)
.icon_size(IconSize::XSmall)
.icon_color(Color::Muted)
.on_click(move |_, _window, cx| {
cx.open_url("https://zed.dev/docs/ai/configuration#openai-api-compatible")
}),
);
if self.load_credentials_task.is_some() {
div().child(Label::new("Loading credentials…")).into_any()
@ -970,7 +878,7 @@ impl Render for ConfigurationView {
v_flex()
.size_full()
.child(api_key_section)
.child(api_url_section)
.child(compatible_api_section)
.into_any()
}
}

View file

@ -0,0 +1,522 @@
use anyhow::{Context as _, Result, anyhow};
use credentials_provider::CredentialsProvider;
use convert_case::{Case, Casing};
use futures::{FutureExt, StreamExt, future::BoxFuture};
use gpui::{AnyView, App, AsyncApp, Context, Entity, Subscription, Task, Window};
use http_client::HttpClient;
use language_model::{
AuthenticateError, LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent,
LanguageModelId, LanguageModelName, LanguageModelProvider, LanguageModelProviderId,
LanguageModelProviderName, LanguageModelProviderState, LanguageModelRequest,
LanguageModelToolChoice, RateLimiter,
};
use menu;
use open_ai::{ResponseStreamEvent, stream_completion};
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use settings::{Settings, SettingsStore};
use std::sync::Arc;
use ui::{ElevationIndex, Tooltip, prelude::*};
use ui_input::SingleLineInput;
use util::ResultExt;
use crate::AllLanguageModelSettings;
use crate::provider::open_ai::{OpenAiEventMapper, into_open_ai};
#[derive(Default, Clone, Debug, PartialEq)]
pub struct OpenAiCompatibleSettings {
pub api_url: String,
pub available_models: Vec<AvailableModel>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
pub struct AvailableModel {
pub name: String,
pub display_name: Option<String>,
pub max_tokens: u64,
pub max_output_tokens: Option<u64>,
pub max_completion_tokens: Option<u64>,
}
pub struct OpenAiCompatibleLanguageModelProvider {
id: LanguageModelProviderId,
name: LanguageModelProviderName,
http_client: Arc<dyn HttpClient>,
state: gpui::Entity<State>,
}
pub struct State {
id: Arc<str>,
env_var_name: Arc<str>,
api_key: Option<String>,
api_key_from_env: bool,
settings: OpenAiCompatibleSettings,
_subscription: Subscription,
}
impl State {
fn is_authenticated(&self) -> bool {
self.api_key.is_some()
}
fn reset_api_key(&self, cx: &mut Context<Self>) -> Task<Result<()>> {
let credentials_provider = <dyn CredentialsProvider>::global(cx);
let api_url = self.settings.api_url.clone();
cx.spawn(async move |this, cx| {
credentials_provider
.delete_credentials(&api_url, &cx)
.await
.log_err();
this.update(cx, |this, cx| {
this.api_key = None;
this.api_key_from_env = false;
cx.notify();
})
})
}
fn set_api_key(&mut self, api_key: String, cx: &mut Context<Self>) -> Task<Result<()>> {
let credentials_provider = <dyn CredentialsProvider>::global(cx);
let api_url = self.settings.api_url.clone();
cx.spawn(async move |this, cx| {
credentials_provider
.write_credentials(&api_url, "Bearer", api_key.as_bytes(), &cx)
.await
.log_err();
this.update(cx, |this, cx| {
this.api_key = Some(api_key);
cx.notify();
})
})
}
fn authenticate(&self, cx: &mut Context<Self>) -> Task<Result<(), AuthenticateError>> {
if self.is_authenticated() {
return Task::ready(Ok(()));
}
let credentials_provider = <dyn CredentialsProvider>::global(cx);
let env_var_name = self.env_var_name.clone();
let api_url = self.settings.api_url.clone();
cx.spawn(async move |this, cx| {
let (api_key, from_env) = if let Ok(api_key) = std::env::var(env_var_name.as_ref()) {
(api_key, true)
} else {
let (_, api_key) = credentials_provider
.read_credentials(&api_url, &cx)
.await?
.ok_or(AuthenticateError::CredentialsNotFound)?;
(
String::from_utf8(api_key).context("invalid {PROVIDER_NAME} API key")?,
false,
)
};
this.update(cx, |this, cx| {
this.api_key = Some(api_key);
this.api_key_from_env = from_env;
cx.notify();
})?;
Ok(())
})
}
}
impl OpenAiCompatibleLanguageModelProvider {
pub fn new(id: Arc<str>, http_client: Arc<dyn HttpClient>, cx: &mut App) -> Self {
fn resolve_settings<'a>(id: &'a str, cx: &'a App) -> Option<&'a OpenAiCompatibleSettings> {
AllLanguageModelSettings::get_global(cx)
.openai_compatible
.get(id)
}
let state = cx.new(|cx| State {
id: id.clone(),
env_var_name: format!("{}_API_KEY", id).to_case(Case::Constant).into(),
settings: resolve_settings(&id, cx).cloned().unwrap_or_default(),
api_key: None,
api_key_from_env: false,
_subscription: cx.observe_global::<SettingsStore>(|this: &mut State, cx| {
let Some(settings) = resolve_settings(&this.id, cx) else {
return;
};
if &this.settings != settings {
this.settings = settings.clone();
cx.notify();
}
}),
});
Self {
id: id.clone().into(),
name: id.into(),
http_client,
state,
}
}
fn create_language_model(&self, model: AvailableModel) -> Arc<dyn LanguageModel> {
Arc::new(OpenAiCompatibleLanguageModel {
id: LanguageModelId::from(model.name.clone()),
provider_id: self.id.clone(),
provider_name: self.name.clone(),
model,
state: self.state.clone(),
http_client: self.http_client.clone(),
request_limiter: RateLimiter::new(4),
})
}
}
impl LanguageModelProviderState for OpenAiCompatibleLanguageModelProvider {
type ObservableEntity = State;
fn observable_entity(&self) -> Option<gpui::Entity<Self::ObservableEntity>> {
Some(self.state.clone())
}
}
impl LanguageModelProvider for OpenAiCompatibleLanguageModelProvider {
fn id(&self) -> LanguageModelProviderId {
self.id.clone()
}
fn name(&self) -> LanguageModelProviderName {
self.name.clone()
}
fn icon(&self) -> IconName {
IconName::AiOpenAiCompat
}
fn default_model(&self, cx: &App) -> Option<Arc<dyn LanguageModel>> {
self.state
.read(cx)
.settings
.available_models
.first()
.map(|model| self.create_language_model(model.clone()))
}
fn default_fast_model(&self, _cx: &App) -> Option<Arc<dyn LanguageModel>> {
None
}
fn provided_models(&self, cx: &App) -> Vec<Arc<dyn LanguageModel>> {
self.state
.read(cx)
.settings
.available_models
.iter()
.map(|model| self.create_language_model(model.clone()))
.collect()
}
fn is_authenticated(&self, cx: &App) -> bool {
self.state.read(cx).is_authenticated()
}
fn authenticate(&self, cx: &mut App) -> Task<Result<(), AuthenticateError>> {
self.state.update(cx, |state, cx| state.authenticate(cx))
}
fn configuration_view(&self, window: &mut Window, cx: &mut App) -> AnyView {
cx.new(|cx| ConfigurationView::new(self.state.clone(), window, cx))
.into()
}
fn reset_credentials(&self, cx: &mut App) -> Task<Result<()>> {
self.state.update(cx, |state, cx| state.reset_api_key(cx))
}
}
pub struct OpenAiCompatibleLanguageModel {
id: LanguageModelId,
provider_id: LanguageModelProviderId,
provider_name: LanguageModelProviderName,
model: AvailableModel,
state: gpui::Entity<State>,
http_client: Arc<dyn HttpClient>,
request_limiter: RateLimiter,
}
impl OpenAiCompatibleLanguageModel {
fn stream_completion(
&self,
request: open_ai::Request,
cx: &AsyncApp,
) -> BoxFuture<'static, Result<futures::stream::BoxStream<'static, Result<ResponseStreamEvent>>>>
{
let http_client = self.http_client.clone();
let Ok((api_key, api_url)) = cx.read_entity(&self.state, |state, _| {
(state.api_key.clone(), state.settings.api_url.clone())
}) else {
return futures::future::ready(Err(anyhow!("App state dropped"))).boxed();
};
let provider = self.provider_name.clone();
let future = self.request_limiter.stream(async move {
let Some(api_key) = api_key else {
return Err(LanguageModelCompletionError::NoApiKey { provider });
};
let request = stream_completion(http_client.as_ref(), &api_url, &api_key, request);
let response = request.await?;
Ok(response)
});
async move { Ok(future.await?.boxed()) }.boxed()
}
}
impl LanguageModel for OpenAiCompatibleLanguageModel {
fn id(&self) -> LanguageModelId {
self.id.clone()
}
fn name(&self) -> LanguageModelName {
LanguageModelName::from(
self.model
.display_name
.clone()
.unwrap_or_else(|| self.model.name.clone()),
)
}
fn provider_id(&self) -> LanguageModelProviderId {
self.provider_id.clone()
}
fn provider_name(&self) -> LanguageModelProviderName {
self.provider_name.clone()
}
fn supports_tools(&self) -> bool {
true
}
fn supports_images(&self) -> bool {
false
}
fn supports_tool_choice(&self, choice: LanguageModelToolChoice) -> bool {
match choice {
LanguageModelToolChoice::Auto => true,
LanguageModelToolChoice::Any => true,
LanguageModelToolChoice::None => true,
}
}
fn telemetry_id(&self) -> String {
format!("openai/{}", self.model.name)
}
fn max_token_count(&self) -> u64 {
self.model.max_tokens
}
fn max_output_tokens(&self) -> Option<u64> {
self.model.max_output_tokens
}
fn count_tokens(
&self,
request: LanguageModelRequest,
cx: &App,
) -> BoxFuture<'static, Result<u64>> {
let max_token_count = self.max_token_count();
cx.background_spawn(async move {
let messages = super::open_ai::collect_tiktoken_messages(request);
let model = if max_token_count >= 100_000 {
// If the max tokens is 100k or more, it is likely the o200k_base tokenizer from gpt4o
"gpt-4o"
} else {
// Otherwise fallback to gpt-4, since only cl100k_base and o200k_base are
// supported with this tiktoken method
"gpt-4"
};
tiktoken_rs::num_tokens_from_messages(model, &messages).map(|tokens| tokens as u64)
})
.boxed()
}
fn stream_completion(
&self,
request: LanguageModelRequest,
cx: &AsyncApp,
) -> BoxFuture<
'static,
Result<
futures::stream::BoxStream<
'static,
Result<LanguageModelCompletionEvent, LanguageModelCompletionError>,
>,
LanguageModelCompletionError,
>,
> {
let request = into_open_ai(request, &self.model.name, true, self.max_output_tokens());
let completions = self.stream_completion(request, cx);
async move {
let mapper = OpenAiEventMapper::new();
Ok(mapper.map_stream(completions.await?).boxed())
}
.boxed()
}
}
struct ConfigurationView {
api_key_editor: Entity<SingleLineInput>,
state: gpui::Entity<State>,
load_credentials_task: Option<Task<()>>,
}
impl ConfigurationView {
fn new(state: gpui::Entity<State>, window: &mut Window, cx: &mut Context<Self>) -> Self {
let api_key_editor = cx.new(|cx| {
SingleLineInput::new(
window,
cx,
"000000000000000000000000000000000000000000000000000",
)
});
cx.observe(&state, |_, _, cx| {
cx.notify();
})
.detach();
let load_credentials_task = Some(cx.spawn_in(window, {
let state = state.clone();
async move |this, cx| {
if let Some(task) = state
.update(cx, |state, cx| state.authenticate(cx))
.log_err()
{
// We don't log an error, because "not signed in" is also an error.
let _ = task.await;
}
this.update(cx, |this, cx| {
this.load_credentials_task = None;
cx.notify();
})
.log_err();
}
}));
Self {
api_key_editor,
state,
load_credentials_task,
}
}
fn save_api_key(&mut self, _: &menu::Confirm, window: &mut Window, cx: &mut Context<Self>) {
let api_key = self
.api_key_editor
.read(cx)
.editor()
.read(cx)
.text(cx)
.trim()
.to_string();
// Don't proceed if no API key is provided and we're not authenticated
if api_key.is_empty() && !self.state.read(cx).is_authenticated() {
return;
}
let state = self.state.clone();
cx.spawn_in(window, async move |_, cx| {
state
.update(cx, |state, cx| state.set_api_key(api_key, cx))?
.await
})
.detach_and_log_err(cx);
cx.notify();
}
fn reset_api_key(&mut self, window: &mut Window, cx: &mut Context<Self>) {
self.api_key_editor.update(cx, |input, cx| {
input.editor.update(cx, |editor, cx| {
editor.set_text("", window, cx);
});
});
let state = self.state.clone();
cx.spawn_in(window, async move |_, cx| {
state.update(cx, |state, cx| state.reset_api_key(cx))?.await
})
.detach_and_log_err(cx);
cx.notify();
}
fn should_render_editor(&self, cx: &mut Context<Self>) -> bool {
!self.state.read(cx).is_authenticated()
}
}
impl Render for ConfigurationView {
fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
let env_var_set = self.state.read(cx).api_key_from_env;
let env_var_name = self.state.read(cx).env_var_name.clone();
let api_key_section = if self.should_render_editor(cx) {
v_flex()
.on_action(cx.listener(Self::save_api_key))
.child(Label::new("To use Zed's assistant with an OpenAI compatible provider, you need to add an API key."))
.child(
div()
.pt(DynamicSpacing::Base04.rems(cx))
.child(self.api_key_editor.clone())
)
.child(
Label::new(
format!("You can also assign the {env_var_name} environment variable and restart Zed."),
)
.size(LabelSize::Small).color(Color::Muted),
)
.into_any()
} else {
h_flex()
.mt_1()
.p_1()
.justify_between()
.rounded_md()
.border_1()
.border_color(cx.theme().colors().border)
.bg(cx.theme().colors().background)
.child(
h_flex()
.gap_1()
.child(Icon::new(IconName::Check).color(Color::Success))
.child(Label::new(if env_var_set {
format!("API key set in {env_var_name} environment variable.")
} else {
"API key configured.".to_string()
})),
)
.child(
Button::new("reset-api-key", "Reset API Key")
.label_size(LabelSize::Small)
.icon(IconName::Undo)
.icon_size(IconSize::Small)
.icon_position(IconPosition::Start)
.layer(ElevationIndex::ModalSurface)
.when(env_var_set, |this| {
this.tooltip(Tooltip::text(format!("To reset your API key, unset the {env_var_name} environment variable.")))
})
.on_click(cx.listener(|this, _, window, cx| this.reset_api_key(window, cx))),
)
.into_any()
};
if self.load_credentials_task.is_some() {
div().child(Label::new("Loading credentials…")).into_any()
} else {
v_flex().size_full().child(api_key_section).into_any()
}
}
}

View file

@ -1,4 +1,7 @@
use std::sync::Arc;
use anyhow::Result;
use collections::HashMap;
use gpui::App;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
@ -15,13 +18,14 @@ use crate::provider::{
mistral::MistralSettings,
ollama::OllamaSettings,
open_ai::OpenAiSettings,
open_ai_compatible::OpenAiCompatibleSettings,
open_router::OpenRouterSettings,
vercel::VercelSettings,
x_ai::XAiSettings,
};
/// Initializes the language model settings.
pub fn init(cx: &mut App) {
pub fn init_settings(cx: &mut App) {
AllLanguageModelSettings::register(cx);
}
@ -36,6 +40,7 @@ pub struct AllLanguageModelSettings {
pub ollama: OllamaSettings,
pub open_router: OpenRouterSettings,
pub openai: OpenAiSettings,
pub openai_compatible: HashMap<Arc<str>, OpenAiCompatibleSettings>,
pub vercel: VercelSettings,
pub x_ai: XAiSettings,
pub zed_dot_dev: ZedDotDevSettings,
@ -52,6 +57,7 @@ pub struct AllLanguageModelSettingsContent {
pub ollama: Option<OllamaSettingsContent>,
pub open_router: Option<OpenRouterSettingsContent>,
pub openai: Option<OpenAiSettingsContent>,
pub openai_compatible: Option<HashMap<Arc<str>, OpenAiCompatibleSettingsContent>>,
pub vercel: Option<VercelSettingsContent>,
pub x_ai: Option<XAiSettingsContent>,
#[serde(rename = "zed.dev")]
@ -103,6 +109,12 @@ pub struct OpenAiSettingsContent {
pub available_models: Option<Vec<provider::open_ai::AvailableModel>>,
}
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
pub struct OpenAiCompatibleSettingsContent {
pub api_url: String,
pub available_models: Vec<provider::open_ai_compatible::AvailableModel>,
}
#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
pub struct VercelSettingsContent {
pub api_url: Option<String>,
@ -226,6 +238,19 @@ impl settings::Settings for AllLanguageModelSettings {
openai.as_ref().and_then(|s| s.available_models.clone()),
);
// OpenAI Compatible
if let Some(openai_compatible) = value.openai_compatible.clone() {
for (id, openai_compatible_settings) in openai_compatible {
settings.openai_compatible.insert(
id,
OpenAiCompatibleSettings {
api_url: openai_compatible_settings.api_url,
available_models: openai_compatible_settings.available_models,
},
);
}
}
// Vercel
let vercel = value.vercel.clone();
merge(

View file

@ -1211,7 +1211,7 @@ impl MultiBuffer {
let buffer = buffer_state.buffer.read(cx);
for range in buffer.edited_ranges_for_transaction_id::<D>(*buffer_transaction) {
for excerpt_id in &buffer_state.excerpts {
cursor.seek(excerpt_id, Bias::Left, &());
cursor.seek(excerpt_id, Bias::Left);
if let Some(excerpt) = cursor.item() {
if excerpt.locator == *excerpt_id {
let excerpt_buffer_start =
@ -1322,7 +1322,7 @@ impl MultiBuffer {
let start_locator = snapshot.excerpt_locator_for_id(selection.start.excerpt_id);
let end_locator = snapshot.excerpt_locator_for_id(selection.end.excerpt_id);
cursor.seek(&Some(start_locator), Bias::Left, &());
cursor.seek(&Some(start_locator), Bias::Left);
while let Some(excerpt) = cursor.item() {
if excerpt.locator > *end_locator {
break;
@ -1347,7 +1347,7 @@ impl MultiBuffer {
goal: selection.goal,
});
cursor.next(&());
cursor.next();
}
}
@ -1769,13 +1769,13 @@ impl MultiBuffer {
let mut next_excerpt_id = move || ExcerptId(post_inc(&mut next_excerpt_id));
let mut excerpts_cursor = snapshot.excerpts.cursor::<Option<&Locator>>(&());
excerpts_cursor.next(&());
excerpts_cursor.next();
loop {
let new = new_iter.peek();
let existing = if let Some(existing_id) = existing_iter.peek() {
let locator = snapshot.excerpt_locator_for_id(*existing_id);
excerpts_cursor.seek_forward(&Some(locator), Bias::Left, &());
excerpts_cursor.seek_forward(&Some(locator), Bias::Left);
if let Some(excerpt) = excerpts_cursor.item() {
if excerpt.buffer_id != buffer_snapshot.remote_id() {
to_remove.push(*existing_id);
@ -1970,7 +1970,7 @@ impl MultiBuffer {
let mut prev_locator = snapshot.excerpt_locator_for_id(prev_excerpt_id).clone();
let mut new_excerpt_ids = mem::take(&mut snapshot.excerpt_ids);
let mut cursor = snapshot.excerpts.cursor::<Option<&Locator>>(&());
let mut new_excerpts = cursor.slice(&prev_locator, Bias::Right, &());
let mut new_excerpts = cursor.slice(&prev_locator, Bias::Right);
prev_locator = cursor.start().unwrap_or(Locator::min_ref()).clone();
let edit_start = ExcerptOffset::new(new_excerpts.summary().text.len);
@ -2019,7 +2019,7 @@ impl MultiBuffer {
let edit_end = ExcerptOffset::new(new_excerpts.summary().text.len);
let suffix = cursor.suffix(&());
let suffix = cursor.suffix();
let changed_trailing_excerpt = suffix.is_empty();
new_excerpts.append(suffix, &());
drop(cursor);
@ -2104,7 +2104,7 @@ impl MultiBuffer {
.into_iter()
.flatten()
{
cursor.seek_forward(&Some(locator), Bias::Left, &());
cursor.seek_forward(&Some(locator), Bias::Left);
if let Some(excerpt) = cursor.item() {
if excerpt.locator == *locator {
excerpts.push((excerpt.id, excerpt.range.clone()));
@ -2124,25 +2124,25 @@ impl MultiBuffer {
let mut diff_transforms = snapshot
.diff_transforms
.cursor::<(ExcerptDimension<Point>, OutputDimension<Point>)>(&());
diff_transforms.next(&());
diff_transforms.next();
let locators = buffers
.get(&buffer_id)
.into_iter()
.flat_map(|state| &state.excerpts);
let mut result = Vec::new();
for locator in locators {
excerpts.seek_forward(&Some(locator), Bias::Left, &());
excerpts.seek_forward(&Some(locator), Bias::Left);
if let Some(excerpt) = excerpts.item() {
if excerpt.locator == *locator {
let excerpt_start = excerpts.start().1.clone();
let excerpt_end =
ExcerptDimension(excerpt_start.0 + excerpt.text_summary.lines);
diff_transforms.seek_forward(&excerpt_start, Bias::Left, &());
diff_transforms.seek_forward(&excerpt_start, Bias::Left);
let overshoot = excerpt_start.0 - diff_transforms.start().0.0;
let start = diff_transforms.start().1.0 + overshoot;
diff_transforms.seek_forward(&excerpt_end, Bias::Right, &());
diff_transforms.seek_forward(&excerpt_end, Bias::Right);
let overshoot = excerpt_end.0 - diff_transforms.start().0.0;
let end = diff_transforms.start().1.0 + overshoot;
@ -2290,7 +2290,7 @@ impl MultiBuffer {
self.paths_by_excerpt.remove(&excerpt_id);
// Seek to the next excerpt to remove, preserving any preceding excerpts.
let locator = snapshot.excerpt_locator_for_id(excerpt_id);
new_excerpts.append(cursor.slice(&Some(locator), Bias::Left, &()), &());
new_excerpts.append(cursor.slice(&Some(locator), Bias::Left), &());
if let Some(mut excerpt) = cursor.item() {
if excerpt.id != excerpt_id {
@ -2311,7 +2311,7 @@ impl MultiBuffer {
removed_buffer_ids.push(excerpt.buffer_id);
}
}
cursor.next(&());
cursor.next();
// Skip over any subsequent excerpts that are also removed.
if let Some(&next_excerpt_id) = excerpt_ids.peek() {
@ -2344,7 +2344,7 @@ impl MultiBuffer {
});
}
}
let suffix = cursor.suffix(&());
let suffix = cursor.suffix();
let changed_trailing_excerpt = suffix.is_empty();
new_excerpts.append(suffix, &());
drop(cursor);
@ -2493,7 +2493,7 @@ impl MultiBuffer {
let mut cursor = snapshot
.excerpts
.cursor::<(Option<&Locator>, ExcerptOffset)>(&());
cursor.seek_forward(&Some(locator), Bias::Left, &());
cursor.seek_forward(&Some(locator), Bias::Left);
if let Some(excerpt) = cursor.item() {
if excerpt.locator == *locator {
let excerpt_buffer_range = excerpt.range.context.to_offset(&excerpt.buffer);
@ -2724,7 +2724,7 @@ impl MultiBuffer {
let snapshot = self.read(cx);
let mut cursor = snapshot.diff_transforms.cursor::<usize>(&());
let offset_range = range.to_offset(&snapshot);
cursor.seek(&offset_range.start, Bias::Left, &());
cursor.seek(&offset_range.start, Bias::Left);
while let Some(item) = cursor.item() {
if *cursor.start() >= offset_range.end && *cursor.start() > offset_range.start {
break;
@ -2732,7 +2732,7 @@ impl MultiBuffer {
if item.hunk_info().is_some() {
return true;
}
cursor.next(&());
cursor.next();
}
false
}
@ -2746,7 +2746,7 @@ impl MultiBuffer {
let end = snapshot.point_to_offset(Point::new(range.end.row + 1, 0));
let start = start.saturating_sub(1);
let end = snapshot.len().min(end + 1);
cursor.seek(&start, Bias::Right, &());
cursor.seek(&start, Bias::Right);
while let Some(item) = cursor.item() {
if *cursor.start() >= end {
break;
@ -2754,7 +2754,7 @@ impl MultiBuffer {
if item.hunk_info().is_some() {
return true;
}
cursor.next(&());
cursor.next();
}
}
false
@ -2848,7 +2848,7 @@ impl MultiBuffer {
.cursor::<(Option<&Locator>, ExcerptOffset)>(&());
let mut edits = Vec::<Edit<ExcerptOffset>>::new();
let prefix = cursor.slice(&Some(locator), Bias::Left, &());
let prefix = cursor.slice(&Some(locator), Bias::Left);
new_excerpts.append(prefix, &());
let mut excerpt = cursor.item().unwrap().clone();
@ -2883,9 +2883,9 @@ impl MultiBuffer {
new_excerpts.push(excerpt, &());
cursor.next(&());
cursor.next();
new_excerpts.append(cursor.suffix(&()), &());
new_excerpts.append(cursor.suffix(), &());
drop(cursor);
snapshot.excerpts = new_excerpts;
@ -2925,7 +2925,7 @@ impl MultiBuffer {
let mut edits = Vec::<Edit<ExcerptOffset>>::new();
for locator in &locators {
let prefix = cursor.slice(&Some(locator), Bias::Left, &());
let prefix = cursor.slice(&Some(locator), Bias::Left);
new_excerpts.append(prefix, &());
let mut excerpt = cursor.item().unwrap().clone();
@ -2987,10 +2987,10 @@ impl MultiBuffer {
new_excerpts.push(excerpt, &());
cursor.next(&());
cursor.next();
}
new_excerpts.append(cursor.suffix(&()), &());
new_excerpts.append(cursor.suffix(), &());
drop(cursor);
snapshot.excerpts = new_excerpts;
@ -3070,7 +3070,7 @@ impl MultiBuffer {
.cursor::<(Option<&Locator>, ExcerptOffset)>(&());
for (locator, buffer, buffer_edited) in excerpts_to_edit {
new_excerpts.append(cursor.slice(&Some(locator), Bias::Left, &()), &());
new_excerpts.append(cursor.slice(&Some(locator), Bias::Left), &());
let old_excerpt = cursor.item().unwrap();
let buffer = buffer.read(cx);
let buffer_id = buffer.remote_id();
@ -3112,9 +3112,9 @@ impl MultiBuffer {
}
new_excerpts.push(new_excerpt, &());
cursor.next(&());
cursor.next();
}
new_excerpts.append(cursor.suffix(&()), &());
new_excerpts.append(cursor.suffix(), &());
drop(cursor);
snapshot.excerpts = new_excerpts;
@ -3145,23 +3145,22 @@ impl MultiBuffer {
let mut excerpt_edits = excerpt_edits.into_iter().peekable();
while let Some(edit) = excerpt_edits.next() {
excerpts.seek_forward(&edit.new.start, Bias::Right, &());
excerpts.seek_forward(&edit.new.start, Bias::Right);
if excerpts.item().is_none() && *excerpts.start() == edit.new.start {
excerpts.prev(&());
excerpts.prev();
}
// Keep any transforms that are before the edit.
if at_transform_boundary {
at_transform_boundary = false;
let transforms_before_edit =
old_diff_transforms.slice(&edit.old.start, Bias::Left, &());
let transforms_before_edit = old_diff_transforms.slice(&edit.old.start, Bias::Left);
self.append_diff_transforms(&mut new_diff_transforms, transforms_before_edit);
if let Some(transform) = old_diff_transforms.item() {
if old_diff_transforms.end(&()).0 == edit.old.start
if old_diff_transforms.end().0 == edit.old.start
&& old_diff_transforms.start().0 < edit.old.start
{
self.push_diff_transform(&mut new_diff_transforms, transform.clone());
old_diff_transforms.next(&());
old_diff_transforms.next();
}
}
}
@ -3203,7 +3202,7 @@ impl MultiBuffer {
// then recreate the content up to the end of this transform, to prepare
// for reusing additional slices of the old transforms.
if excerpt_edits.peek().map_or(true, |next_edit| {
next_edit.old.start >= old_diff_transforms.end(&()).0
next_edit.old.start >= old_diff_transforms.end().0
}) {
let keep_next_old_transform = (old_diff_transforms.start().0 >= edit.old.end)
&& match old_diff_transforms.item() {
@ -3218,8 +3217,8 @@ impl MultiBuffer {
let mut excerpt_offset = edit.new.end;
if !keep_next_old_transform {
excerpt_offset += old_diff_transforms.end(&()).0 - edit.old.end;
old_diff_transforms.next(&());
excerpt_offset += old_diff_transforms.end().0 - edit.old.end;
old_diff_transforms.next();
}
old_expanded_hunks.clear();
@ -3234,7 +3233,7 @@ impl MultiBuffer {
}
// Keep any transforms that are after the last edit.
self.append_diff_transforms(&mut new_diff_transforms, old_diff_transforms.suffix(&()));
self.append_diff_transforms(&mut new_diff_transforms, old_diff_transforms.suffix());
// Ensure there's always at least one buffer content transform.
if new_diff_transforms.is_empty() {
@ -3283,10 +3282,10 @@ impl MultiBuffer {
);
old_expanded_hunks.insert(hunk_info);
}
if old_diff_transforms.end(&()).0 > edit.old.end {
if old_diff_transforms.end().0 > edit.old.end {
break;
}
old_diff_transforms.next(&());
old_diff_transforms.next();
}
// Avoid querying diff hunks if there's no possibility of hunks being expanded.
@ -3413,8 +3412,8 @@ impl MultiBuffer {
}
}
if excerpts.end(&()) <= edit.new.end {
excerpts.next(&());
if excerpts.end() <= edit.new.end {
excerpts.next();
} else {
break;
}
@ -3439,9 +3438,9 @@ impl MultiBuffer {
*summary,
) {
let mut cursor = subtree.cursor::<()>(&());
cursor.next(&());
cursor.next(&());
new_transforms.append(cursor.suffix(&()), &());
cursor.next();
cursor.next();
new_transforms.append(cursor.suffix(), &());
return;
}
}
@ -4715,14 +4714,14 @@ impl MultiBufferSnapshot {
{
let range = range.start.to_offset(self)..range.end.to_offset(self);
let mut cursor = self.diff_transforms.cursor::<(usize, ExcerptOffset)>(&());
cursor.seek(&range.start, Bias::Right, &());
cursor.seek(&range.start, Bias::Right);
let Some(first_transform) = cursor.item() else {
return D::from_text_summary(&TextSummary::default());
};
let diff_transform_start = cursor.start().0;
let diff_transform_end = cursor.end(&()).0;
let diff_transform_end = cursor.end().0;
let diff_start = range.start;
let start_overshoot = diff_start - diff_transform_start;
let end_overshoot = std::cmp::min(range.end, diff_transform_end) - diff_transform_start;
@ -4765,12 +4764,10 @@ impl MultiBufferSnapshot {
return result;
}
cursor.next(&());
result.add_assign(&D::from_text_summary(&cursor.summary(
&range.end,
Bias::Right,
&(),
)));
cursor.next();
result.add_assign(&D::from_text_summary(
&cursor.summary(&range.end, Bias::Right),
));
let Some(last_transform) = cursor.item() else {
return result;
@ -4813,9 +4810,9 @@ impl MultiBufferSnapshot {
// let mut range = range.start..range.end;
let mut summary = D::zero(&());
let mut cursor = self.excerpts.cursor::<ExcerptOffset>(&());
cursor.seek(&range.start, Bias::Right, &());
cursor.seek(&range.start, Bias::Right);
if let Some(excerpt) = cursor.item() {
let mut end_before_newline = cursor.end(&());
let mut end_before_newline = cursor.end();
if excerpt.has_trailing_newline {
end_before_newline -= ExcerptOffset::new(1);
}
@ -4834,13 +4831,13 @@ impl MultiBufferSnapshot {
summary.add_assign(&D::from_text_summary(&TextSummary::from("\n")));
}
cursor.next(&());
cursor.next();
}
if range.end > *cursor.start() {
summary.add_assign(
&cursor
.summary::<_, ExcerptDimension<D>>(&range.end, Bias::Right, &())
.summary::<_, ExcerptDimension<D>>(&range.end, Bias::Right)
.0,
);
if let Some(excerpt) = cursor.item() {
@ -4876,11 +4873,11 @@ impl MultiBufferSnapshot {
D: TextDimension + Ord + Sub<D, Output = D>,
{
loop {
let transform_end_position = diff_transforms.end(&()).0.0;
let transform_end_position = diff_transforms.end().0.0;
let at_transform_end =
excerpt_position == transform_end_position && diff_transforms.item().is_some();
if at_transform_end && anchor.text_anchor.bias == Bias::Right {
diff_transforms.next(&());
diff_transforms.next();
continue;
}
@ -4906,7 +4903,7 @@ impl MultiBufferSnapshot {
);
position.add_assign(&position_in_hunk);
} else if at_transform_end {
diff_transforms.next(&());
diff_transforms.next();
continue;
}
}
@ -4915,7 +4912,7 @@ impl MultiBufferSnapshot {
}
_ => {
if at_transform_end && anchor.diff_base_anchor.is_some() {
diff_transforms.next(&());
diff_transforms.next();
continue;
}
let overshoot = excerpt_position - diff_transforms.start().0.0;
@ -4933,9 +4930,9 @@ impl MultiBufferSnapshot {
.cursor::<(Option<&Locator>, ExcerptOffset)>(&());
let locator = self.excerpt_locator_for_id(anchor.excerpt_id);
cursor.seek(&Some(locator), Bias::Left, &());
cursor.seek(&Some(locator), Bias::Left);
if cursor.item().is_none() {
cursor.next(&());
cursor.next();
}
let mut position = cursor.start().1;
@ -4975,7 +4972,7 @@ impl MultiBufferSnapshot {
let mut diff_transforms_cursor = self
.diff_transforms
.cursor::<(ExcerptDimension<D>, OutputDimension<D>)>(&());
diff_transforms_cursor.next(&());
diff_transforms_cursor.next();
let mut summaries = Vec::new();
while let Some(anchor) = anchors.peek() {
@ -4990,9 +4987,9 @@ impl MultiBufferSnapshot {
});
let locator = self.excerpt_locator_for_id(excerpt_id);
cursor.seek_forward(locator, Bias::Left, &());
cursor.seek_forward(locator, Bias::Left);
if cursor.item().is_none() {
cursor.next(&());
cursor.next();
}
let excerpt_start_position = D::from_text_summary(&cursor.start().text);
@ -5022,11 +5019,8 @@ impl MultiBufferSnapshot {
}
if position > diff_transforms_cursor.start().0.0 {
diff_transforms_cursor.seek_forward(
&ExcerptDimension(position),
Bias::Left,
&(),
);
diff_transforms_cursor
.seek_forward(&ExcerptDimension(position), Bias::Left);
}
summaries.push(self.resolve_summary_for_anchor(
@ -5036,11 +5030,8 @@ impl MultiBufferSnapshot {
));
}
} else {
diff_transforms_cursor.seek_forward(
&ExcerptDimension(excerpt_start_position),
Bias::Left,
&(),
);
diff_transforms_cursor
.seek_forward(&ExcerptDimension(excerpt_start_position), Bias::Left);
let position = self.resolve_summary_for_anchor(
&Anchor::max(),
excerpt_start_position,
@ -5099,7 +5090,7 @@ impl MultiBufferSnapshot {
{
let mut anchors = anchors.into_iter().enumerate().peekable();
let mut cursor = self.excerpts.cursor::<Option<&Locator>>(&());
cursor.next(&());
cursor.next();
let mut result = Vec::new();
@ -5108,10 +5099,10 @@ impl MultiBufferSnapshot {
// Find the location where this anchor's excerpt should be.
let old_locator = self.excerpt_locator_for_id(old_excerpt_id);
cursor.seek_forward(&Some(old_locator), Bias::Left, &());
cursor.seek_forward(&Some(old_locator), Bias::Left);
if cursor.item().is_none() {
cursor.next(&());
cursor.next();
}
let next_excerpt = cursor.item();
@ -5211,13 +5202,13 @@ impl MultiBufferSnapshot {
// Find the given position in the diff transforms. Determine the corresponding
// offset in the excerpts, and whether the position is within a deleted hunk.
let mut diff_transforms = self.diff_transforms.cursor::<(usize, ExcerptOffset)>(&());
diff_transforms.seek(&offset, Bias::Right, &());
diff_transforms.seek(&offset, Bias::Right);
if offset == diff_transforms.start().0 && bias == Bias::Left {
if let Some(prev_item) = diff_transforms.prev_item() {
match prev_item {
DiffTransform::DeletedHunk { .. } => {
diff_transforms.prev(&());
diff_transforms.prev();
}
_ => {}
}
@ -5260,13 +5251,13 @@ impl MultiBufferSnapshot {
let mut excerpts = self
.excerpts
.cursor::<(ExcerptOffset, Option<ExcerptId>)>(&());
excerpts.seek(&excerpt_offset, Bias::Right, &());
excerpts.seek(&excerpt_offset, Bias::Right);
if excerpts.item().is_none() && excerpt_offset == excerpts.start().0 && bias == Bias::Left {
excerpts.prev(&());
excerpts.prev();
}
if let Some(excerpt) = excerpts.item() {
let mut overshoot = excerpt_offset.saturating_sub(excerpts.start().0).value;
if excerpt.has_trailing_newline && excerpt_offset == excerpts.end(&()).0 {
if excerpt.has_trailing_newline && excerpt_offset == excerpts.end().0 {
overshoot -= 1;
bias = Bias::Right;
}
@ -5297,7 +5288,7 @@ impl MultiBufferSnapshot {
let excerpt_id = self.latest_excerpt_id(excerpt_id);
let locator = self.excerpt_locator_for_id(excerpt_id);
let mut cursor = self.excerpts.cursor::<Option<&Locator>>(&());
cursor.seek(locator, Bias::Left, &());
cursor.seek(locator, Bias::Left);
if let Some(excerpt) = cursor.item() {
if excerpt.id == excerpt_id {
let text_anchor = excerpt.clip_anchor(text_anchor);
@ -5351,13 +5342,13 @@ impl MultiBufferSnapshot {
let mut excerpts = self
.excerpts
.cursor::<(Option<&Locator>, ExcerptDimension<usize>)>(&());
excerpts.seek(&Some(start_locator), Bias::Left, &());
excerpts.prev(&());
excerpts.seek(&Some(start_locator), Bias::Left);
excerpts.prev();
let mut diff_transforms = self.diff_transforms.cursor::<DiffTransforms<usize>>(&());
diff_transforms.seek(&excerpts.start().1, Bias::Left, &());
if diff_transforms.end(&()).excerpt_dimension < excerpts.start().1 {
diff_transforms.next(&());
diff_transforms.seek(&excerpts.start().1, Bias::Left);
if diff_transforms.end().excerpt_dimension < excerpts.start().1 {
diff_transforms.next();
}
let excerpt = excerpts.item()?;
@ -6193,7 +6184,7 @@ impl MultiBufferSnapshot {
Locator::max_ref()
} else {
let mut cursor = self.excerpt_ids.cursor::<ExcerptId>(&());
cursor.seek(&id, Bias::Left, &());
cursor.seek(&id, Bias::Left);
if let Some(entry) = cursor.item() {
if entry.id == id {
return &entry.locator;
@ -6229,7 +6220,7 @@ impl MultiBufferSnapshot {
let mut cursor = self.excerpt_ids.cursor::<ExcerptId>(&());
for id in sorted_ids {
if cursor.seek_forward(&id, Bias::Left, &()) {
if cursor.seek_forward(&id, Bias::Left) {
locators.push(cursor.item().unwrap().locator.clone());
} else {
panic!("invalid excerpt id {:?}", id);
@ -6253,16 +6244,16 @@ impl MultiBufferSnapshot {
.excerpts
.cursor::<(Option<&Locator>, ExcerptDimension<Point>)>(&());
let locator = self.excerpt_locator_for_id(excerpt_id);
if cursor.seek(&Some(locator), Bias::Left, &()) {
if cursor.seek(&Some(locator), Bias::Left) {
let start = cursor.start().1.clone();
let end = cursor.end(&()).1;
let end = cursor.end().1;
let mut diff_transforms = self
.diff_transforms
.cursor::<(ExcerptDimension<Point>, OutputDimension<Point>)>(&());
diff_transforms.seek(&start, Bias::Left, &());
diff_transforms.seek(&start, Bias::Left);
let overshoot = start.0 - diff_transforms.start().0.0;
let start = diff_transforms.start().1.0 + overshoot;
diff_transforms.seek(&end, Bias::Right, &());
diff_transforms.seek(&end, Bias::Right);
let overshoot = end.0 - diff_transforms.start().0.0;
let end = diff_transforms.start().1.0 + overshoot;
Some(start..end)
@ -6274,7 +6265,7 @@ impl MultiBufferSnapshot {
pub fn buffer_range_for_excerpt(&self, excerpt_id: ExcerptId) -> Option<Range<text::Anchor>> {
let mut cursor = self.excerpts.cursor::<Option<&Locator>>(&());
let locator = self.excerpt_locator_for_id(excerpt_id);
if cursor.seek(&Some(locator), Bias::Left, &()) {
if cursor.seek(&Some(locator), Bias::Left) {
if let Some(excerpt) = cursor.item() {
return Some(excerpt.range.context.clone());
}
@ -6285,7 +6276,7 @@ impl MultiBufferSnapshot {
fn excerpt(&self, excerpt_id: ExcerptId) -> Option<&Excerpt> {
let mut cursor = self.excerpts.cursor::<Option<&Locator>>(&());
let locator = self.excerpt_locator_for_id(excerpt_id);
cursor.seek(&Some(locator), Bias::Left, &());
cursor.seek(&Some(locator), Bias::Left);
if let Some(excerpt) = cursor.item() {
if excerpt.id == excerpt_id {
return Some(excerpt);
@ -6333,7 +6324,7 @@ impl MultiBufferSnapshot {
let mut cursor = self.excerpts.cursor::<ExcerptSummary>(&());
let start_locator = self.excerpt_locator_for_id(range.start.excerpt_id);
let end_locator = self.excerpt_locator_for_id(range.end.excerpt_id);
cursor.seek(start_locator, Bias::Left, &());
cursor.seek(start_locator, Bias::Left);
cursor
.take_while(move |excerpt| excerpt.locator <= *end_locator)
.flat_map(move |excerpt| {
@ -6472,11 +6463,11 @@ where
fn seek(&mut self, position: &D) {
self.cached_region.take();
self.diff_transforms
.seek(&OutputDimension(*position), Bias::Right, &());
.seek(&OutputDimension(*position), Bias::Right);
if self.diff_transforms.item().is_none()
&& *position == self.diff_transforms.start().output_dimension.0
{
self.diff_transforms.prev(&());
self.diff_transforms.prev();
}
let mut excerpt_position = self.diff_transforms.start().excerpt_dimension.0;
@ -6486,20 +6477,20 @@ where
}
self.excerpts
.seek(&ExcerptDimension(excerpt_position), Bias::Right, &());
.seek(&ExcerptDimension(excerpt_position), Bias::Right);
if self.excerpts.item().is_none() && excerpt_position == self.excerpts.start().0 {
self.excerpts.prev(&());
self.excerpts.prev();
}
}
fn seek_forward(&mut self, position: &D) {
self.cached_region.take();
self.diff_transforms
.seek_forward(&OutputDimension(*position), Bias::Right, &());
.seek_forward(&OutputDimension(*position), Bias::Right);
if self.diff_transforms.item().is_none()
&& *position == self.diff_transforms.start().output_dimension.0
{
self.diff_transforms.prev(&());
self.diff_transforms.prev();
}
let overshoot = *position - self.diff_transforms.start().output_dimension.0;
@ -6509,31 +6500,30 @@ where
}
self.excerpts
.seek_forward(&ExcerptDimension(excerpt_position), Bias::Right, &());
.seek_forward(&ExcerptDimension(excerpt_position), Bias::Right);
if self.excerpts.item().is_none() && excerpt_position == self.excerpts.start().0 {
self.excerpts.prev(&());
self.excerpts.prev();
}
}
fn next_excerpt(&mut self) {
self.excerpts.next(&());
self.excerpts.next();
self.seek_to_start_of_current_excerpt();
}
fn prev_excerpt(&mut self) {
self.excerpts.prev(&());
self.excerpts.prev();
self.seek_to_start_of_current_excerpt();
}
fn seek_to_start_of_current_excerpt(&mut self) {
self.cached_region.take();
self.diff_transforms
.seek(self.excerpts.start(), Bias::Left, &());
if self.diff_transforms.end(&()).excerpt_dimension == *self.excerpts.start()
self.diff_transforms.seek(self.excerpts.start(), Bias::Left);
if self.diff_transforms.end().excerpt_dimension == *self.excerpts.start()
&& self.diff_transforms.start().excerpt_dimension < *self.excerpts.start()
&& self.diff_transforms.next_item().is_some()
{
self.diff_transforms.next(&());
self.diff_transforms.next();
}
}
@ -6541,18 +6531,18 @@ where
self.cached_region.take();
match self
.diff_transforms
.end(&())
.end()
.excerpt_dimension
.cmp(&self.excerpts.end(&()))
.cmp(&self.excerpts.end())
{
cmp::Ordering::Less => self.diff_transforms.next(&()),
cmp::Ordering::Greater => self.excerpts.next(&()),
cmp::Ordering::Less => self.diff_transforms.next(),
cmp::Ordering::Greater => self.excerpts.next(),
cmp::Ordering::Equal => {
self.diff_transforms.next(&());
if self.diff_transforms.end(&()).excerpt_dimension > self.excerpts.end(&())
self.diff_transforms.next();
if self.diff_transforms.end().excerpt_dimension > self.excerpts.end()
|| self.diff_transforms.item().is_none()
{
self.excerpts.next(&());
self.excerpts.next();
} else if let Some(DiffTransform::DeletedHunk { hunk_info, .. }) =
self.diff_transforms.item()
{
@ -6561,7 +6551,7 @@ where
.item()
.map_or(false, |excerpt| excerpt.id != hunk_info.excerpt_id)
{
self.excerpts.next(&());
self.excerpts.next();
}
}
}
@ -6576,14 +6566,14 @@ where
.excerpt_dimension
.cmp(self.excerpts.start())
{
cmp::Ordering::Less => self.excerpts.prev(&()),
cmp::Ordering::Greater => self.diff_transforms.prev(&()),
cmp::Ordering::Less => self.excerpts.prev(),
cmp::Ordering::Greater => self.diff_transforms.prev(),
cmp::Ordering::Equal => {
self.diff_transforms.prev(&());
self.diff_transforms.prev();
if self.diff_transforms.start().excerpt_dimension < *self.excerpts.start()
|| self.diff_transforms.item().is_none()
{
self.excerpts.prev(&());
self.excerpts.prev();
}
}
}
@ -6603,9 +6593,9 @@ where
return true;
}
self.diff_transforms.prev(&());
self.diff_transforms.prev();
let prev_transform = self.diff_transforms.item();
self.diff_transforms.next(&());
self.diff_transforms.next();
prev_transform.map_or(true, |next_transform| {
matches!(next_transform, DiffTransform::BufferContent { .. })
@ -6613,9 +6603,9 @@ where
}
fn is_at_end_of_excerpt(&mut self) -> bool {
if self.diff_transforms.end(&()).excerpt_dimension < self.excerpts.end(&()) {
if self.diff_transforms.end().excerpt_dimension < self.excerpts.end() {
return false;
} else if self.diff_transforms.end(&()).excerpt_dimension > self.excerpts.end(&())
} else if self.diff_transforms.end().excerpt_dimension > self.excerpts.end()
|| self.diff_transforms.item().is_none()
{
return true;
@ -6636,7 +6626,7 @@ where
let buffer = &excerpt.buffer;
let buffer_context_start = excerpt.range.context.start.summary::<D>(buffer);
let mut buffer_start = buffer_context_start;
let overshoot = self.diff_transforms.end(&()).excerpt_dimension.0 - self.excerpts.start().0;
let overshoot = self.diff_transforms.end().excerpt_dimension.0 - self.excerpts.start().0;
buffer_start.add_assign(&overshoot);
Some(buffer_start)
}
@ -6659,7 +6649,7 @@ where
let mut buffer_end = buffer_start;
buffer_end.add_assign(&buffer_range_len);
let start = self.diff_transforms.start().output_dimension.0;
let end = self.diff_transforms.end(&()).output_dimension.0;
let end = self.diff_transforms.end().output_dimension.0;
return Some(MultiBufferRegion {
buffer,
excerpt,
@ -6693,16 +6683,16 @@ where
let mut end;
let mut buffer_end;
let has_trailing_newline;
if self.diff_transforms.end(&()).excerpt_dimension.0 < self.excerpts.end(&()).0 {
if self.diff_transforms.end().excerpt_dimension.0 < self.excerpts.end().0 {
let overshoot =
self.diff_transforms.end(&()).excerpt_dimension.0 - self.excerpts.start().0;
end = self.diff_transforms.end(&()).output_dimension.0;
self.diff_transforms.end().excerpt_dimension.0 - self.excerpts.start().0;
end = self.diff_transforms.end().output_dimension.0;
buffer_end = buffer_context_start;
buffer_end.add_assign(&overshoot);
has_trailing_newline = false;
} else {
let overshoot =
self.excerpts.end(&()).0 - self.diff_transforms.start().excerpt_dimension.0;
self.excerpts.end().0 - self.diff_transforms.start().excerpt_dimension.0;
end = self.diff_transforms.start().output_dimension.0;
end.add_assign(&overshoot);
buffer_end = excerpt.range.context.end.summary::<D>(buffer);
@ -7086,11 +7076,11 @@ impl<'a> MultiBufferExcerpt<'a> {
/// Maps a range within the [`MultiBuffer`] to a range within the [`Buffer`]
pub fn map_range_to_buffer(&mut self, range: Range<usize>) -> Range<usize> {
self.diff_transforms
.seek(&OutputDimension(range.start), Bias::Right, &());
.seek(&OutputDimension(range.start), Bias::Right);
let start = self.map_offset_to_buffer_internal(range.start);
let end = if range.end > range.start {
self.diff_transforms
.seek_forward(&OutputDimension(range.end), Bias::Right, &());
.seek_forward(&OutputDimension(range.end), Bias::Right);
self.map_offset_to_buffer_internal(range.end)
} else {
start
@ -7123,7 +7113,7 @@ impl<'a> MultiBufferExcerpt<'a> {
}
let overshoot = buffer_range.start - self.buffer_offset;
let excerpt_offset = ExcerptDimension(self.excerpt_offset.0 + overshoot);
self.diff_transforms.seek(&excerpt_offset, Bias::Right, &());
self.diff_transforms.seek(&excerpt_offset, Bias::Right);
if excerpt_offset.0 < self.diff_transforms.start().excerpt_dimension.0 {
log::warn!(
"Attempting to map a range from a buffer offset that starts before the current buffer offset"
@ -7137,7 +7127,7 @@ impl<'a> MultiBufferExcerpt<'a> {
let overshoot = buffer_range.end - self.buffer_offset;
let excerpt_offset = ExcerptDimension(self.excerpt_offset.0 + overshoot);
self.diff_transforms
.seek_forward(&excerpt_offset, Bias::Right, &());
.seek_forward(&excerpt_offset, Bias::Right);
let overshoot = excerpt_offset.0 - self.diff_transforms.start().excerpt_dimension.0;
self.diff_transforms.start().output_dimension.0 + overshoot
} else {
@ -7509,7 +7499,7 @@ impl Iterator for MultiBufferRows<'_> {
if let Some(next_region) = self.cursor.region() {
region = next_region;
} else {
if self.point == self.cursor.diff_transforms.end(&()).output_dimension.0 {
if self.point == self.cursor.diff_transforms.end().output_dimension.0 {
let multibuffer_row = MultiBufferRow(self.point.row);
let last_excerpt = self
.cursor
@ -7615,14 +7605,14 @@ impl<'a> MultiBufferChunks<'a> {
}
pub fn seek(&mut self, range: Range<usize>) {
self.diff_transforms.seek(&range.end, Bias::Right, &());
self.diff_transforms.seek(&range.end, Bias::Right);
let mut excerpt_end = self.diff_transforms.start().1;
if let Some(DiffTransform::BufferContent { .. }) = self.diff_transforms.item() {
let overshoot = range.end - self.diff_transforms.start().0;
excerpt_end.value += overshoot;
}
self.diff_transforms.seek(&range.start, Bias::Right, &());
self.diff_transforms.seek(&range.start, Bias::Right);
let mut excerpt_start = self.diff_transforms.start().1;
if let Some(DiffTransform::BufferContent { .. }) = self.diff_transforms.item() {
let overshoot = range.start - self.diff_transforms.start().0;
@ -7636,7 +7626,7 @@ impl<'a> MultiBufferChunks<'a> {
fn seek_to_excerpt_offset_range(&mut self, new_range: Range<ExcerptOffset>) {
self.excerpt_offset_range = new_range.clone();
self.excerpts.seek(&new_range.start, Bias::Right, &());
self.excerpts.seek(&new_range.start, Bias::Right);
if let Some(excerpt) = self.excerpts.item() {
let excerpt_start = *self.excerpts.start();
if let Some(excerpt_chunks) = self
@ -7669,7 +7659,7 @@ impl<'a> MultiBufferChunks<'a> {
self.excerpt_offset_range.start.value += chunk.text.len();
return Some(chunk);
} else {
self.excerpts.next(&());
self.excerpts.next();
let excerpt = self.excerpts.item()?;
self.excerpt_chunks = Some(excerpt.chunks_in_range(
0..(self.excerpt_offset_range.end - *self.excerpts.start()).value,
@ -7712,12 +7702,12 @@ impl<'a> Iterator for MultiBufferChunks<'a> {
if self.range.start >= self.range.end {
return None;
}
if self.range.start == self.diff_transforms.end(&()).0 {
self.diff_transforms.next(&());
if self.range.start == self.diff_transforms.end().0 {
self.diff_transforms.next();
}
let diff_transform_start = self.diff_transforms.start().0;
let diff_transform_end = self.diff_transforms.end(&()).0;
let diff_transform_end = self.diff_transforms.end().0;
debug_assert!(self.range.start < diff_transform_end);
let diff_transform = self.diff_transforms.item()?;

View file

@ -132,12 +132,12 @@ impl NotificationStore {
}
let ix = count - 1 - ix;
let mut cursor = self.notifications.cursor::<Count>(&());
cursor.seek(&Count(ix), Bias::Right, &());
cursor.seek(&Count(ix), Bias::Right);
cursor.item()
}
pub fn notification_for_id(&self, id: u64) -> Option<&NotificationEntry> {
let mut cursor = self.notifications.cursor::<NotificationId>(&());
cursor.seek(&NotificationId(id), Bias::Left, &());
cursor.seek(&NotificationId(id), Bias::Left);
if let Some(item) = cursor.item() {
if item.id == id {
return Some(item);
@ -365,7 +365,7 @@ impl NotificationStore {
let mut old_range = 0..0;
for (i, (id, new_notification)) in notifications.into_iter().enumerate() {
new_notifications.append(cursor.slice(&NotificationId(id), Bias::Left, &()), &());
new_notifications.append(cursor.slice(&NotificationId(id), Bias::Left), &());
if i == 0 {
old_range.start = cursor.start().1.0;
@ -374,7 +374,7 @@ impl NotificationStore {
let old_notification = cursor.item();
if let Some(old_notification) = old_notification {
if old_notification.id == id {
cursor.next(&());
cursor.next();
if let Some(new_notification) = &new_notification {
if new_notification.is_read {
@ -403,7 +403,7 @@ impl NotificationStore {
old_range.end = cursor.start().1.0;
let new_count = new_notifications.summary().count - old_range.start;
new_notifications.append(cursor.suffix(&()), &());
new_notifications.append(cursor.suffix(), &());
drop(cursor);
self.notifications = new_notifications;

View file

@ -1 +1 @@
../../../LICENSE-GPL
../../LICENSE-GPL

View file

@ -4279,7 +4279,7 @@ impl Repository {
for (repo_path, status) in &*statuses.entries {
changed_paths.remove(repo_path);
if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left, &()) {
if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left) {
if cursor.item().is_some_and(|entry| entry.status == *status) {
continue;
}
@ -4292,7 +4292,7 @@ impl Repository {
}
let mut cursor = prev_statuses.cursor::<PathProgress>(&());
for path in changed_paths.into_iter() {
if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left, &()) {
if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
changed_path_statuses.push(Edit::Remove(PathKey(path.0)));
}
}

View file

@ -72,14 +72,13 @@ impl<'a> GitTraversal<'a> {
if entry.is_dir() {
let mut statuses = statuses.clone();
statuses.seek_forward(&PathTarget::Path(repo_path.as_ref()), Bias::Left, &());
let summary =
statuses.summary(&PathTarget::Successor(repo_path.as_ref()), Bias::Left, &());
statuses.seek_forward(&PathTarget::Path(repo_path.as_ref()), Bias::Left);
let summary = statuses.summary(&PathTarget::Successor(repo_path.as_ref()), Bias::Left);
self.current_entry_summary = Some(summary);
} else if entry.is_file() {
// For a file entry, park the cursor on the corresponding status
if statuses.seek_forward(&PathTarget::Path(repo_path.as_ref()), Bias::Left, &()) {
if statuses.seek_forward(&PathTarget::Path(repo_path.as_ref()), Bias::Left) {
// TODO: Investigate statuses.item() being None here.
self.current_entry_summary = statuses.item().map(|item| item.status.into());
} else {

View file

@ -1274,15 +1274,11 @@ impl LocalLspStore {
// grouped with the previous transaction in the history
// based on the transaction group interval
buffer.finalize_last_transaction();
let transaction_id = buffer
buffer
.start_transaction()
.context("transaction already open")?;
let transaction = buffer
.get_transaction(transaction_id)
.expect("transaction started")
.clone();
buffer.end_transaction(cx);
buffer.push_transaction(transaction, cx.background_executor().now());
let transaction_id = buffer.push_empty_transaction(cx.background_executor().now());
buffer.finalize_last_transaction();
anyhow::Ok(transaction_id)
})??;

View file

@ -45,12 +45,6 @@ impl SearchHistory {
}
pub fn add(&mut self, cursor: &mut SearchHistoryCursor, search_string: String) {
if let Some(selected_ix) = cursor.selection {
if self.history.get(selected_ix) == Some(&search_string) {
return;
}
}
if self.insertion_behavior == QueryInsertionBehavior::ReplacePreviousIfContains {
if let Some(previously_searched) = self.history.back_mut() {
if search_string.contains(previously_searched.as_str()) {
@ -144,6 +138,14 @@ mod tests {
);
assert_eq!(search_history.current(&cursor), Some("rustlang"));
// add item when it equals to current item if it's not the last one
search_history.add(&mut cursor, "php".to_string());
search_history.previous(&mut cursor);
assert_eq!(search_history.current(&cursor), Some("rustlang"));
search_history.add(&mut cursor, "rustlang".to_string());
assert_eq!(search_history.history.len(), 3, "Should add item");
assert_eq!(search_history.current(&cursor), Some("rustlang"));
// push enough items to test SEARCH_HISTORY_LIMIT
for i in 0..MAX_HISTORY_LEN * 2 {
search_history.add(&mut cursor, format!("item{i}"));

View file

@ -41,9 +41,9 @@ impl Rope {
self.push_chunk(chunk.as_slice());
let mut chunks = rope.chunks.cursor::<()>(&());
chunks.next(&());
chunks.next(&());
self.chunks.append(chunks.suffix(&()), &());
chunks.next();
chunks.next();
self.chunks.append(chunks.suffix(), &());
self.check_invariants();
return;
}
@ -283,7 +283,7 @@ impl Rope {
return self.summary().len_utf16;
}
let mut cursor = self.chunks.cursor::<(usize, OffsetUtf16)>(&());
cursor.seek(&offset, Bias::Left, &());
cursor.seek(&offset, Bias::Left);
let overshoot = offset - cursor.start().0;
cursor.start().1
+ cursor.item().map_or(Default::default(), |chunk| {
@ -296,7 +296,7 @@ impl Rope {
return self.summary().len;
}
let mut cursor = self.chunks.cursor::<(OffsetUtf16, usize)>(&());
cursor.seek(&offset, Bias::Left, &());
cursor.seek(&offset, Bias::Left);
let overshoot = offset - cursor.start().0;
cursor.start().1
+ cursor.item().map_or(Default::default(), |chunk| {
@ -309,7 +309,7 @@ impl Rope {
return self.summary().lines;
}
let mut cursor = self.chunks.cursor::<(usize, Point)>(&());
cursor.seek(&offset, Bias::Left, &());
cursor.seek(&offset, Bias::Left);
let overshoot = offset - cursor.start().0;
cursor.start().1
+ cursor.item().map_or(Point::zero(), |chunk| {
@ -322,7 +322,7 @@ impl Rope {
return self.summary().lines_utf16();
}
let mut cursor = self.chunks.cursor::<(usize, PointUtf16)>(&());
cursor.seek(&offset, Bias::Left, &());
cursor.seek(&offset, Bias::Left);
let overshoot = offset - cursor.start().0;
cursor.start().1
+ cursor.item().map_or(PointUtf16::zero(), |chunk| {
@ -335,7 +335,7 @@ impl Rope {
return self.summary().lines_utf16();
}
let mut cursor = self.chunks.cursor::<(Point, PointUtf16)>(&());
cursor.seek(&point, Bias::Left, &());
cursor.seek(&point, Bias::Left);
let overshoot = point - cursor.start().0;
cursor.start().1
+ cursor.item().map_or(PointUtf16::zero(), |chunk| {
@ -348,7 +348,7 @@ impl Rope {
return self.summary().len;
}
let mut cursor = self.chunks.cursor::<(Point, usize)>(&());
cursor.seek(&point, Bias::Left, &());
cursor.seek(&point, Bias::Left);
let overshoot = point - cursor.start().0;
cursor.start().1
+ cursor
@ -369,7 +369,7 @@ impl Rope {
return self.summary().len;
}
let mut cursor = self.chunks.cursor::<(PointUtf16, usize)>(&());
cursor.seek(&point, Bias::Left, &());
cursor.seek(&point, Bias::Left);
let overshoot = point - cursor.start().0;
cursor.start().1
+ cursor.item().map_or(0, |chunk| {
@ -382,7 +382,7 @@ impl Rope {
return self.summary().lines;
}
let mut cursor = self.chunks.cursor::<(PointUtf16, Point)>(&());
cursor.seek(&point.0, Bias::Left, &());
cursor.seek(&point.0, Bias::Left);
let overshoot = Unclipped(point.0 - cursor.start().0);
cursor.start().1
+ cursor.item().map_or(Point::zero(), |chunk| {
@ -392,7 +392,7 @@ impl Rope {
pub fn clip_offset(&self, mut offset: usize, bias: Bias) -> usize {
let mut cursor = self.chunks.cursor::<usize>(&());
cursor.seek(&offset, Bias::Left, &());
cursor.seek(&offset, Bias::Left);
if let Some(chunk) = cursor.item() {
let mut ix = offset - cursor.start();
while !chunk.text.is_char_boundary(ix) {
@ -415,7 +415,7 @@ impl Rope {
pub fn clip_offset_utf16(&self, offset: OffsetUtf16, bias: Bias) -> OffsetUtf16 {
let mut cursor = self.chunks.cursor::<OffsetUtf16>(&());
cursor.seek(&offset, Bias::Right, &());
cursor.seek(&offset, Bias::Right);
if let Some(chunk) = cursor.item() {
let overshoot = offset - cursor.start();
*cursor.start() + chunk.as_slice().clip_offset_utf16(overshoot, bias)
@ -426,7 +426,7 @@ impl Rope {
pub fn clip_point(&self, point: Point, bias: Bias) -> Point {
let mut cursor = self.chunks.cursor::<Point>(&());
cursor.seek(&point, Bias::Right, &());
cursor.seek(&point, Bias::Right);
if let Some(chunk) = cursor.item() {
let overshoot = point - cursor.start();
*cursor.start() + chunk.as_slice().clip_point(overshoot, bias)
@ -437,7 +437,7 @@ impl Rope {
pub fn clip_point_utf16(&self, point: Unclipped<PointUtf16>, bias: Bias) -> PointUtf16 {
let mut cursor = self.chunks.cursor::<PointUtf16>(&());
cursor.seek(&point.0, Bias::Right, &());
cursor.seek(&point.0, Bias::Right);
if let Some(chunk) = cursor.item() {
let overshoot = Unclipped(point.0 - cursor.start());
*cursor.start() + chunk.as_slice().clip_point_utf16(overshoot, bias)
@ -450,10 +450,6 @@ impl Rope {
self.clip_point(Point::new(row, u32::MAX), Bias::Left)
.column
}
pub fn ptr_eq(&self, other: &Self) -> bool {
self.chunks.ptr_eq(&other.chunks)
}
}
impl<'a> From<&'a str> for Rope {
@ -514,7 +510,7 @@ pub struct Cursor<'a> {
impl<'a> Cursor<'a> {
pub fn new(rope: &'a Rope, offset: usize) -> Self {
let mut chunks = rope.chunks.cursor(&());
chunks.seek(&offset, Bias::Right, &());
chunks.seek(&offset, Bias::Right);
Self {
rope,
chunks,
@ -525,7 +521,7 @@ impl<'a> Cursor<'a> {
pub fn seek_forward(&mut self, end_offset: usize) {
debug_assert!(end_offset >= self.offset);
self.chunks.seek_forward(&end_offset, Bias::Right, &());
self.chunks.seek_forward(&end_offset, Bias::Right);
self.offset = end_offset;
}
@ -540,14 +536,14 @@ impl<'a> Cursor<'a> {
let mut slice = Rope::new();
if let Some(start_chunk) = self.chunks.item() {
let start_ix = self.offset - self.chunks.start();
let end_ix = cmp::min(end_offset, self.chunks.end(&())) - self.chunks.start();
let end_ix = cmp::min(end_offset, self.chunks.end()) - self.chunks.start();
slice.push_chunk(start_chunk.slice(start_ix..end_ix));
}
if end_offset > self.chunks.end(&()) {
self.chunks.next(&());
if end_offset > self.chunks.end() {
self.chunks.next();
slice.append(Rope {
chunks: self.chunks.slice(&end_offset, Bias::Right, &()),
chunks: self.chunks.slice(&end_offset, Bias::Right),
});
if let Some(end_chunk) = self.chunks.item() {
let end_ix = end_offset - self.chunks.start();
@ -565,13 +561,13 @@ impl<'a> Cursor<'a> {
let mut summary = D::zero(&());
if let Some(start_chunk) = self.chunks.item() {
let start_ix = self.offset - self.chunks.start();
let end_ix = cmp::min(end_offset, self.chunks.end(&())) - self.chunks.start();
let end_ix = cmp::min(end_offset, self.chunks.end()) - self.chunks.start();
summary.add_assign(&D::from_chunk(start_chunk.slice(start_ix..end_ix)));
}
if end_offset > self.chunks.end(&()) {
self.chunks.next(&());
summary.add_assign(&self.chunks.summary(&end_offset, Bias::Right, &()));
if end_offset > self.chunks.end() {
self.chunks.next();
summary.add_assign(&self.chunks.summary(&end_offset, Bias::Right));
if let Some(end_chunk) = self.chunks.item() {
let end_ix = end_offset - self.chunks.start();
summary.add_assign(&D::from_chunk(end_chunk.slice(0..end_ix)));
@ -603,10 +599,10 @@ impl<'a> Chunks<'a> {
pub fn new(rope: &'a Rope, range: Range<usize>, reversed: bool) -> Self {
let mut chunks = rope.chunks.cursor(&());
let offset = if reversed {
chunks.seek(&range.end, Bias::Left, &());
chunks.seek(&range.end, Bias::Left);
range.end
} else {
chunks.seek(&range.start, Bias::Right, &());
chunks.seek(&range.start, Bias::Right);
range.start
};
Self {
@ -642,10 +638,10 @@ impl<'a> Chunks<'a> {
Bias::Right
};
if offset >= self.chunks.end(&()) {
self.chunks.seek_forward(&offset, bias, &());
if offset >= self.chunks.end() {
self.chunks.seek_forward(&offset, bias);
} else {
self.chunks.seek(&offset, bias, &());
self.chunks.seek(&offset, bias);
}
self.offset = offset;
@ -674,25 +670,25 @@ impl<'a> Chunks<'a> {
found = self.offset <= self.range.end;
} else {
self.chunks
.search_forward(|summary| summary.text.lines.row > 0, &());
.search_forward(|summary| summary.text.lines.row > 0);
self.offset = *self.chunks.start();
if let Some(newline_ix) = self.peek().and_then(|chunk| chunk.find('\n')) {
self.offset += newline_ix + 1;
found = self.offset <= self.range.end;
} else {
self.offset = self.chunks.end(&());
self.offset = self.chunks.end();
}
}
if self.offset == self.chunks.end(&()) {
if self.offset == self.chunks.end() {
self.next();
}
}
if self.offset > self.range.end {
self.offset = cmp::min(self.offset, self.range.end);
self.chunks.seek(&self.offset, Bias::Right, &());
self.chunks.seek(&self.offset, Bias::Right);
}
found
@ -711,7 +707,7 @@ impl<'a> Chunks<'a> {
let initial_offset = self.offset;
if self.offset == *self.chunks.start() {
self.chunks.prev(&());
self.chunks.prev();
}
if let Some(chunk) = self.chunks.item() {
@ -729,14 +725,14 @@ impl<'a> Chunks<'a> {
}
self.chunks
.search_backward(|summary| summary.text.lines.row > 0, &());
.search_backward(|summary| summary.text.lines.row > 0);
self.offset = *self.chunks.start();
if let Some(chunk) = self.chunks.item() {
if let Some(newline_ix) = chunk.text.rfind('\n') {
self.offset += newline_ix + 1;
if self.offset_is_valid() {
if self.offset == self.chunks.end(&()) {
self.chunks.next(&());
if self.offset == self.chunks.end() {
self.chunks.next();
}
return true;
@ -746,7 +742,7 @@ impl<'a> Chunks<'a> {
if !self.offset_is_valid() || self.chunks.item().is_none() {
self.offset = self.range.start;
self.chunks.seek(&self.offset, Bias::Right, &());
self.chunks.seek(&self.offset, Bias::Right);
}
self.offset < initial_offset && self.offset == 0
@ -765,7 +761,7 @@ impl<'a> Chunks<'a> {
slice_start..slice_end
} else {
let slice_start = self.offset - chunk_start;
let slice_end = cmp::min(self.chunks.end(&()), self.range.end) - chunk_start;
let slice_end = cmp::min(self.chunks.end(), self.range.end) - chunk_start;
slice_start..slice_end
};
@ -825,12 +821,12 @@ impl<'a> Iterator for Chunks<'a> {
if self.reversed {
self.offset -= chunk.len();
if self.offset <= *self.chunks.start() {
self.chunks.prev(&());
self.chunks.prev();
}
} else {
self.offset += chunk.len();
if self.offset >= self.chunks.end(&()) {
self.chunks.next(&());
if self.offset >= self.chunks.end() {
self.chunks.next();
}
}
@ -848,9 +844,9 @@ impl<'a> Bytes<'a> {
pub fn new(rope: &'a Rope, range: Range<usize>, reversed: bool) -> Self {
let mut chunks = rope.chunks.cursor(&());
if reversed {
chunks.seek(&range.end, Bias::Left, &());
chunks.seek(&range.end, Bias::Left);
} else {
chunks.seek(&range.start, Bias::Right, &());
chunks.seek(&range.start, Bias::Right);
}
Self {
chunks,
@ -861,7 +857,7 @@ impl<'a> Bytes<'a> {
pub fn peek(&self) -> Option<&'a [u8]> {
let chunk = self.chunks.item()?;
if self.reversed && self.range.start >= self.chunks.end(&()) {
if self.reversed && self.range.start >= self.chunks.end() {
return None;
}
let chunk_start = *self.chunks.start();
@ -881,9 +877,9 @@ impl<'a> Iterator for Bytes<'a> {
let result = self.peek();
if result.is_some() {
if self.reversed {
self.chunks.prev(&());
self.chunks.prev();
} else {
self.chunks.next(&());
self.chunks.next();
}
}
result
@ -905,9 +901,9 @@ impl io::Read for Bytes<'_> {
if len == chunk.len() {
if self.reversed {
self.chunks.prev(&());
self.chunks.prev();
} else {
self.chunks.next(&());
self.chunks.next();
}
}
Ok(len)

View file

@ -2784,6 +2784,7 @@ impl KeystrokeInput {
else {
log::trace!("No keybinding to stop recording keystrokes in keystroke input");
self.close_keystrokes.take();
self.close_keystrokes_start.take();
return CloseKeystrokeResult::None;
};
let action_keystrokes = keybind_for_close_action.keystrokes();
@ -2976,7 +2977,9 @@ impl KeystrokeInput {
return;
}
window.focus(&self.outer_focus_handle);
if let Some(close_keystrokes_start) = self.close_keystrokes_start.take() {
if let Some(close_keystrokes_start) = self.close_keystrokes_start.take()
&& close_keystrokes_start < self.keystrokes.len()
{
self.keystrokes.drain(close_keystrokes_start..);
}
self.close_keystrokes.take();

View file

@ -25,6 +25,7 @@ pub struct Cursor<'a, T: Item, D> {
position: D,
did_seek: bool,
at_end: bool,
cx: &'a <T::Summary as Summary>::Context,
}
impl<T: Item + fmt::Debug, D: fmt::Debug> fmt::Debug for Cursor<'_, T, D>
@ -52,21 +53,22 @@ where
T: Item,
D: Dimension<'a, T::Summary>,
{
pub fn new(tree: &'a SumTree<T>, cx: &<T::Summary as Summary>::Context) -> Self {
pub fn new(tree: &'a SumTree<T>, cx: &'a <T::Summary as Summary>::Context) -> Self {
Self {
tree,
stack: ArrayVec::new(),
position: D::zero(cx),
did_seek: false,
at_end: tree.is_empty(),
cx,
}
}
fn reset(&mut self, cx: &<T::Summary as Summary>::Context) {
fn reset(&mut self) {
self.did_seek = false;
self.at_end = self.tree.is_empty();
self.stack.truncate(0);
self.position = D::zero(cx);
self.position = D::zero(self.cx);
}
pub fn start(&self) -> &D {
@ -74,10 +76,10 @@ where
}
#[track_caller]
pub fn end(&self, cx: &<T::Summary as Summary>::Context) -> D {
pub fn end(&self) -> D {
if let Some(item_summary) = self.item_summary() {
let mut end = self.start().clone();
end.add_summary(item_summary, cx);
end.add_summary(item_summary, self.cx);
end
} else {
self.start().clone()
@ -202,12 +204,12 @@ where
}
#[track_caller]
pub fn prev(&mut self, cx: &<T::Summary as Summary>::Context) {
self.search_backward(|_| true, cx)
pub fn prev(&mut self) {
self.search_backward(|_| true)
}
#[track_caller]
pub fn search_backward<F>(&mut self, mut filter_node: F, cx: &<T::Summary as Summary>::Context)
pub fn search_backward<F>(&mut self, mut filter_node: F)
where
F: FnMut(&T::Summary) -> bool,
{
@ -217,13 +219,13 @@ where
}
if self.at_end {
self.position = D::zero(cx);
self.position = D::zero(self.cx);
self.at_end = self.tree.is_empty();
if !self.tree.is_empty() {
self.stack.push(StackEntry {
tree: self.tree,
index: self.tree.0.child_summaries().len(),
position: D::from_summary(self.tree.summary(), cx),
position: D::from_summary(self.tree.summary(), self.cx),
});
}
}
@ -233,7 +235,7 @@ where
if let Some(StackEntry { position, .. }) = self.stack.iter().rev().nth(1) {
self.position = position.clone();
} else {
self.position = D::zero(cx);
self.position = D::zero(self.cx);
}
let entry = self.stack.last_mut().unwrap();
@ -247,7 +249,7 @@ where
}
for summary in &entry.tree.0.child_summaries()[..entry.index] {
self.position.add_summary(summary, cx);
self.position.add_summary(summary, self.cx);
}
entry.position = self.position.clone();
@ -257,7 +259,7 @@ where
if descending {
let tree = &child_trees[entry.index];
self.stack.push(StackEntry {
position: D::zero(cx),
position: D::zero(self.cx),
tree,
index: tree.0.child_summaries().len() - 1,
})
@ -273,12 +275,12 @@ where
}
#[track_caller]
pub fn next(&mut self, cx: &<T::Summary as Summary>::Context) {
self.search_forward(|_| true, cx)
pub fn next(&mut self) {
self.search_forward(|_| true)
}
#[track_caller]
pub fn search_forward<F>(&mut self, mut filter_node: F, cx: &<T::Summary as Summary>::Context)
pub fn search_forward<F>(&mut self, mut filter_node: F)
where
F: FnMut(&T::Summary) -> bool,
{
@ -289,7 +291,7 @@ where
self.stack.push(StackEntry {
tree: self.tree,
index: 0,
position: D::zero(cx),
position: D::zero(self.cx),
});
descend = true;
}
@ -316,8 +318,8 @@ where
break;
} else {
entry.index += 1;
entry.position.add_summary(next_summary, cx);
self.position.add_summary(next_summary, cx);
entry.position.add_summary(next_summary, self.cx);
self.position.add_summary(next_summary, self.cx);
}
}
@ -327,8 +329,8 @@ where
if !descend {
let item_summary = &item_summaries[entry.index];
entry.index += 1;
entry.position.add_summary(item_summary, cx);
self.position.add_summary(item_summary, cx);
entry.position.add_summary(item_summary, self.cx);
self.position.add_summary(item_summary, self.cx);
}
loop {
@ -337,8 +339,8 @@ where
return;
} else {
entry.index += 1;
entry.position.add_summary(next_item_summary, cx);
self.position.add_summary(next_item_summary, cx);
entry.position.add_summary(next_item_summary, self.cx);
self.position.add_summary(next_item_summary, self.cx);
}
} else {
break None;
@ -380,71 +382,51 @@ where
D: Dimension<'a, T::Summary>,
{
#[track_caller]
pub fn seek<Target>(
&mut self,
pos: &Target,
bias: Bias,
cx: &<T::Summary as Summary>::Context,
) -> bool
pub fn seek<Target>(&mut self, pos: &Target, bias: Bias) -> bool
where
Target: SeekTarget<'a, T::Summary, D>,
{
self.reset(cx);
self.seek_internal(pos, bias, &mut (), cx)
self.reset();
self.seek_internal(pos, bias, &mut ())
}
#[track_caller]
pub fn seek_forward<Target>(
&mut self,
pos: &Target,
bias: Bias,
cx: &<T::Summary as Summary>::Context,
) -> bool
pub fn seek_forward<Target>(&mut self, pos: &Target, bias: Bias) -> bool
where
Target: SeekTarget<'a, T::Summary, D>,
{
self.seek_internal(pos, bias, &mut (), cx)
self.seek_internal(pos, bias, &mut ())
}
/// Advances the cursor and returns traversed items as a tree.
#[track_caller]
pub fn slice<Target>(
&mut self,
end: &Target,
bias: Bias,
cx: &<T::Summary as Summary>::Context,
) -> SumTree<T>
pub fn slice<Target>(&mut self, end: &Target, bias: Bias) -> SumTree<T>
where
Target: SeekTarget<'a, T::Summary, D>,
{
let mut slice = SliceSeekAggregate {
tree: SumTree::new(cx),
tree: SumTree::new(self.cx),
leaf_items: ArrayVec::new(),
leaf_item_summaries: ArrayVec::new(),
leaf_summary: <T::Summary as Summary>::zero(cx),
leaf_summary: <T::Summary as Summary>::zero(self.cx),
};
self.seek_internal(end, bias, &mut slice, cx);
self.seek_internal(end, bias, &mut slice);
slice.tree
}
#[track_caller]
pub fn suffix(&mut self, cx: &<T::Summary as Summary>::Context) -> SumTree<T> {
self.slice(&End::new(), Bias::Right, cx)
pub fn suffix(&mut self) -> SumTree<T> {
self.slice(&End::new(), Bias::Right)
}
#[track_caller]
pub fn summary<Target, Output>(
&mut self,
end: &Target,
bias: Bias,
cx: &<T::Summary as Summary>::Context,
) -> Output
pub fn summary<Target, Output>(&mut self, end: &Target, bias: Bias) -> Output
where
Target: SeekTarget<'a, T::Summary, D>,
Output: Dimension<'a, T::Summary>,
{
let mut summary = SummarySeekAggregate(Output::zero(cx));
self.seek_internal(end, bias, &mut summary, cx);
let mut summary = SummarySeekAggregate(Output::zero(self.cx));
self.seek_internal(end, bias, &mut summary);
summary.0
}
@ -455,10 +437,9 @@ where
target: &dyn SeekTarget<'a, T::Summary, D>,
bias: Bias,
aggregate: &mut dyn SeekAggregate<'a, T>,
cx: &<T::Summary as Summary>::Context,
) -> bool {
assert!(
target.cmp(&self.position, cx) >= Ordering::Equal,
target.cmp(&self.position, self.cx) >= Ordering::Equal,
"cannot seek backward",
);
@ -467,7 +448,7 @@ where
self.stack.push(StackEntry {
tree: self.tree,
index: 0,
position: D::zero(cx),
position: D::zero(self.cx),
});
}
@ -489,14 +470,14 @@ where
.zip(&child_summaries[entry.index..])
{
let mut child_end = self.position.clone();
child_end.add_summary(child_summary, cx);
child_end.add_summary(child_summary, self.cx);
let comparison = target.cmp(&child_end, cx);
let comparison = target.cmp(&child_end, self.cx);
if comparison == Ordering::Greater
|| (comparison == Ordering::Equal && bias == Bias::Right)
{
self.position = child_end;
aggregate.push_tree(child_tree, child_summary, cx);
aggregate.push_tree(child_tree, child_summary, self.cx);
entry.index += 1;
entry.position = self.position.clone();
} else {
@ -522,22 +503,22 @@ where
.zip(&item_summaries[entry.index..])
{
let mut child_end = self.position.clone();
child_end.add_summary(item_summary, cx);
child_end.add_summary(item_summary, self.cx);
let comparison = target.cmp(&child_end, cx);
let comparison = target.cmp(&child_end, self.cx);
if comparison == Ordering::Greater
|| (comparison == Ordering::Equal && bias == Bias::Right)
{
self.position = child_end;
aggregate.push_item(item, item_summary, cx);
aggregate.push_item(item, item_summary, self.cx);
entry.index += 1;
} else {
aggregate.end_leaf(cx);
aggregate.end_leaf(self.cx);
break 'outer;
}
}
aggregate.end_leaf(cx);
aggregate.end_leaf(self.cx);
}
}
@ -551,11 +532,11 @@ where
let mut end = self.position.clone();
if bias == Bias::Left {
if let Some(summary) = self.item_summary() {
end.add_summary(summary, cx);
end.add_summary(summary, self.cx);
}
}
target.cmp(&end, cx) == Ordering::Equal
target.cmp(&end, self.cx) == Ordering::Equal
}
}
@ -624,21 +605,19 @@ impl<'a, T: Item> Iterator for Iter<'a, T> {
}
}
impl<'a, T, S, D> Iterator for Cursor<'a, T, D>
impl<'a, T: Item, D> Iterator for Cursor<'a, T, D>
where
T: Item<Summary = S>,
S: Summary<Context = ()>,
D: Dimension<'a, T::Summary>,
{
type Item = &'a T;
fn next(&mut self) -> Option<Self::Item> {
if !self.did_seek {
self.next(&());
self.next();
}
if let Some(item) = self.item() {
self.next(&());
self.next();
Some(item)
} else {
None
@ -651,7 +630,7 @@ pub struct FilterCursor<'a, F, T: Item, D> {
filter_node: F,
}
impl<'a, F, T, D> FilterCursor<'a, F, T, D>
impl<'a, F, T: Item, D> FilterCursor<'a, F, T, D>
where
F: FnMut(&T::Summary) -> bool,
T: Item,
@ -659,7 +638,7 @@ where
{
pub fn new(
tree: &'a SumTree<T>,
cx: &<T::Summary as Summary>::Context,
cx: &'a <T::Summary as Summary>::Context,
filter_node: F,
) -> Self {
let cursor = tree.cursor::<D>(cx);
@ -673,8 +652,8 @@ where
self.cursor.start()
}
pub fn end(&self, cx: &<T::Summary as Summary>::Context) -> D {
self.cursor.end(cx)
pub fn end(&self) -> D {
self.cursor.end()
}
pub fn item(&self) -> Option<&'a T> {
@ -685,31 +664,29 @@ where
self.cursor.item_summary()
}
pub fn next(&mut self, cx: &<T::Summary as Summary>::Context) {
self.cursor.search_forward(&mut self.filter_node, cx);
pub fn next(&mut self) {
self.cursor.search_forward(&mut self.filter_node);
}
pub fn prev(&mut self, cx: &<T::Summary as Summary>::Context) {
self.cursor.search_backward(&mut self.filter_node, cx);
pub fn prev(&mut self) {
self.cursor.search_backward(&mut self.filter_node);
}
}
impl<'a, F, T, S, U> Iterator for FilterCursor<'a, F, T, U>
impl<'a, F, T: Item, U> Iterator for FilterCursor<'a, F, T, U>
where
F: FnMut(&T::Summary) -> bool,
T: Item<Summary = S>,
S: Summary<Context = ()>, //Context for the summary must be unit type, as .next() doesn't take arguments
U: Dimension<'a, T::Summary>,
{
type Item = &'a T;
fn next(&mut self) -> Option<Self::Item> {
if !self.cursor.did_seek {
self.next(&());
self.next();
}
if let Some(item) = self.item() {
self.cursor.search_forward(&mut self.filter_node, &());
self.cursor.search_forward(&mut self.filter_node);
Some(item)
} else {
None
@ -795,3 +772,23 @@ where
self.0.add_summary(summary, cx);
}
}
struct End<D>(PhantomData<D>);
impl<D> End<D> {
fn new() -> Self {
Self(PhantomData)
}
}
impl<'a, S: Summary, D: Dimension<'a, S>> SeekTarget<'a, S, D> for End<D> {
fn cmp(&self, _: &D, _: &S::Context) -> Ordering {
Ordering::Greater
}
}
impl<D> fmt::Debug for End<D> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("End").finish()
}
}

View file

@ -38,7 +38,6 @@ pub trait Summary: Clone {
type Context;
fn zero(cx: &Self::Context) -> Self;
fn add_summary(&mut self, summary: &Self, cx: &Self::Context);
}
@ -138,26 +137,6 @@ where
}
}
struct End<D>(PhantomData<D>);
impl<D> End<D> {
fn new() -> Self {
Self(PhantomData)
}
}
impl<'a, S: Summary, D: Dimension<'a, S>> SeekTarget<'a, S, D> for End<D> {
fn cmp(&self, _: &D, _: &S::Context) -> Ordering {
Ordering::Greater
}
}
impl<D> fmt::Debug for End<D> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("End").finish()
}
}
/// Bias is used to settle ambiguities when determining positions in an ordered sequence.
///
/// The primary use case is for text, where Bias influences
@ -372,10 +351,10 @@ impl<T: Item> SumTree<T> {
pub fn items(&self, cx: &<T::Summary as Summary>::Context) -> Vec<T> {
let mut items = Vec::new();
let mut cursor = self.cursor::<()>(cx);
cursor.next(cx);
cursor.next();
while let Some(item) = cursor.item() {
items.push(item.clone());
cursor.next(cx);
cursor.next();
}
items
}
@ -384,7 +363,7 @@ impl<T: Item> SumTree<T> {
Iter::new(self)
}
pub fn cursor<'a, S>(&'a self, cx: &<T::Summary as Summary>::Context) -> Cursor<'a, T, S>
pub fn cursor<'a, S>(&'a self, cx: &'a <T::Summary as Summary>::Context) -> Cursor<'a, T, S>
where
S: Dimension<'a, T::Summary>,
{
@ -395,7 +374,7 @@ impl<T: Item> SumTree<T> {
/// that is returned cannot be used with Rust's iterators.
pub fn filter<'a, F, U>(
&'a self,
cx: &<T::Summary as Summary>::Context,
cx: &'a <T::Summary as Summary>::Context,
filter_node: F,
) -> FilterCursor<'a, F, T, U>
where
@ -525,10 +504,6 @@ impl<T: Item> SumTree<T> {
}
}
pub fn ptr_eq(&self, other: &Self) -> bool {
Arc::ptr_eq(&self.0, &other.0)
}
fn push_tree_recursive(
&mut self,
other: SumTree<T>,
@ -686,11 +661,6 @@ impl<T: Item> SumTree<T> {
} => child_trees.last().unwrap().rightmost_leaf(),
}
}
#[cfg(debug_assertions)]
pub fn _debug_entries(&self) -> Vec<&T> {
self.iter().collect::<Vec<_>>()
}
}
impl<T: Item + PartialEq> PartialEq for SumTree<T> {
@ -710,15 +680,15 @@ impl<T: KeyedItem> SumTree<T> {
let mut replaced = None;
*self = {
let mut cursor = self.cursor::<T::Key>(cx);
let mut new_tree = cursor.slice(&item.key(), Bias::Left, cx);
let mut new_tree = cursor.slice(&item.key(), Bias::Left);
if let Some(cursor_item) = cursor.item() {
if cursor_item.key() == item.key() {
replaced = Some(cursor_item.clone());
cursor.next(cx);
cursor.next();
}
}
new_tree.push(item, cx);
new_tree.append(cursor.suffix(cx), cx);
new_tree.append(cursor.suffix(), cx);
new_tree
};
replaced
@ -728,14 +698,14 @@ impl<T: KeyedItem> SumTree<T> {
let mut removed = None;
*self = {
let mut cursor = self.cursor::<T::Key>(cx);
let mut new_tree = cursor.slice(key, Bias::Left, cx);
let mut new_tree = cursor.slice(key, Bias::Left);
if let Some(item) = cursor.item() {
if item.key() == *key {
removed = Some(item.clone());
cursor.next(cx);
cursor.next();
}
}
new_tree.append(cursor.suffix(cx), cx);
new_tree.append(cursor.suffix(), cx);
new_tree
};
removed
@ -758,7 +728,7 @@ impl<T: KeyedItem> SumTree<T> {
let mut new_tree = SumTree::new(cx);
let mut buffered_items = Vec::new();
cursor.seek(&T::Key::zero(cx), Bias::Left, cx);
cursor.seek(&T::Key::zero(cx), Bias::Left);
for edit in edits {
let new_key = edit.key();
let mut old_item = cursor.item();
@ -768,7 +738,7 @@ impl<T: KeyedItem> SumTree<T> {
.map_or(false, |old_item| old_item.key() < new_key)
{
new_tree.extend(buffered_items.drain(..), cx);
let slice = cursor.slice(&new_key, Bias::Left, cx);
let slice = cursor.slice(&new_key, Bias::Left);
new_tree.append(slice, cx);
old_item = cursor.item();
}
@ -776,7 +746,7 @@ impl<T: KeyedItem> SumTree<T> {
if let Some(old_item) = old_item {
if old_item.key() == new_key {
removed.push(old_item.clone());
cursor.next(cx);
cursor.next();
}
}
@ -789,70 +759,25 @@ impl<T: KeyedItem> SumTree<T> {
}
new_tree.extend(buffered_items, cx);
new_tree.append(cursor.suffix(cx), cx);
new_tree.append(cursor.suffix(), cx);
new_tree
};
removed
}
pub fn get(&self, key: &T::Key, cx: &<T::Summary as Summary>::Context) -> Option<&T> {
pub fn get<'a>(
&'a self,
key: &T::Key,
cx: &'a <T::Summary as Summary>::Context,
) -> Option<&'a T> {
let mut cursor = self.cursor::<T::Key>(cx);
if cursor.seek(key, Bias::Left, cx) {
if cursor.seek(key, Bias::Left) {
cursor.item()
} else {
None
}
}
#[inline]
pub fn contains(&self, key: &T::Key, cx: &<T::Summary as Summary>::Context) -> bool {
self.get(key, cx).is_some()
}
pub fn update<F, R>(
&mut self,
key: &T::Key,
cx: &<T::Summary as Summary>::Context,
f: F,
) -> Option<R>
where
F: FnOnce(&mut T) -> R,
{
let mut cursor = self.cursor::<T::Key>(cx);
let mut new_tree = cursor.slice(key, Bias::Left, cx);
let mut result = None;
if Ord::cmp(key, &cursor.end(cx)) == Ordering::Equal {
let mut updated = cursor.item().unwrap().clone();
result = Some(f(&mut updated));
new_tree.push(updated, cx);
cursor.next(cx);
}
new_tree.append(cursor.suffix(cx), cx);
drop(cursor);
*self = new_tree;
result
}
pub fn retain<F: FnMut(&T) -> bool>(
&mut self,
cx: &<T::Summary as Summary>::Context,
mut predicate: F,
) {
let mut new_map = SumTree::new(cx);
let mut cursor = self.cursor::<T::Key>(cx);
cursor.next(cx);
while let Some(item) = cursor.item() {
if predicate(&item) {
new_map.push(item.clone(), cx);
}
cursor.next(cx);
}
drop(cursor);
*self = new_map;
}
}
impl<T, S> Default for SumTree<T>
@ -1061,14 +986,14 @@ mod tests {
tree = {
let mut cursor = tree.cursor::<Count>(&());
let mut new_tree = cursor.slice(&Count(splice_start), Bias::Right, &());
let mut new_tree = cursor.slice(&Count(splice_start), Bias::Right);
if rng.r#gen() {
new_tree.extend(new_items, &());
} else {
new_tree.par_extend(new_items, &());
}
cursor.seek(&Count(splice_end), Bias::Right, &());
new_tree.append(cursor.slice(&tree_end, Bias::Right, &()), &());
cursor.seek(&Count(splice_end), Bias::Right);
new_tree.append(cursor.slice(&tree_end, Bias::Right), &());
new_tree
};
@ -1090,10 +1015,10 @@ mod tests {
.collect::<Vec<_>>();
let mut item_ix = if rng.r#gen() {
filter_cursor.next(&());
filter_cursor.next();
0
} else {
filter_cursor.prev(&());
filter_cursor.prev();
expected_filtered_items.len().saturating_sub(1)
};
while item_ix < expected_filtered_items.len() {
@ -1103,19 +1028,19 @@ mod tests {
assert_eq!(actual_item, &reference_item);
assert_eq!(filter_cursor.start().0, reference_index);
log::info!("next");
filter_cursor.next(&());
filter_cursor.next();
item_ix += 1;
while item_ix > 0 && rng.gen_bool(0.2) {
log::info!("prev");
filter_cursor.prev(&());
filter_cursor.prev();
item_ix -= 1;
if item_ix == 0 && rng.gen_bool(0.2) {
filter_cursor.prev(&());
filter_cursor.prev();
assert_eq!(filter_cursor.item(), None);
assert_eq!(filter_cursor.start().0, 0);
filter_cursor.next(&());
filter_cursor.next();
}
}
}
@ -1124,9 +1049,9 @@ mod tests {
let mut before_start = false;
let mut cursor = tree.cursor::<Count>(&());
let start_pos = rng.gen_range(0..=reference_items.len());
cursor.seek(&Count(start_pos), Bias::Right, &());
cursor.seek(&Count(start_pos), Bias::Right);
let mut pos = rng.gen_range(start_pos..=reference_items.len());
cursor.seek_forward(&Count(pos), Bias::Right, &());
cursor.seek_forward(&Count(pos), Bias::Right);
for i in 0..10 {
assert_eq!(cursor.start().0, pos);
@ -1152,13 +1077,13 @@ mod tests {
}
if i < 5 {
cursor.next(&());
cursor.next();
if pos < reference_items.len() {
pos += 1;
before_start = false;
}
} else {
cursor.prev(&());
cursor.prev();
if pos == 0 {
before_start = true;
}
@ -1174,11 +1099,11 @@ mod tests {
let end_bias = if rng.r#gen() { Bias::Left } else { Bias::Right };
let mut cursor = tree.cursor::<Count>(&());
cursor.seek(&Count(start), start_bias, &());
let slice = cursor.slice(&Count(end), end_bias, &());
cursor.seek(&Count(start), start_bias);
let slice = cursor.slice(&Count(end), end_bias);
cursor.seek(&Count(start), start_bias, &());
let summary = cursor.summary::<_, Sum>(&Count(end), end_bias, &());
cursor.seek(&Count(start), start_bias);
let summary = cursor.summary::<_, Sum>(&Count(end), end_bias);
assert_eq!(summary.0, slice.summary().sum);
}
@ -1191,19 +1116,19 @@ mod tests {
let tree = SumTree::<u8>::default();
let mut cursor = tree.cursor::<IntegersSummary>(&());
assert_eq!(
cursor.slice(&Count(0), Bias::Right, &()).items(&()),
cursor.slice(&Count(0), Bias::Right).items(&()),
Vec::<u8>::new()
);
assert_eq!(cursor.item(), None);
assert_eq!(cursor.prev_item(), None);
assert_eq!(cursor.next_item(), None);
assert_eq!(cursor.start().sum, 0);
cursor.prev(&());
cursor.prev();
assert_eq!(cursor.item(), None);
assert_eq!(cursor.prev_item(), None);
assert_eq!(cursor.next_item(), None);
assert_eq!(cursor.start().sum, 0);
cursor.next(&());
cursor.next();
assert_eq!(cursor.item(), None);
assert_eq!(cursor.prev_item(), None);
assert_eq!(cursor.next_item(), None);
@ -1214,7 +1139,7 @@ mod tests {
tree.extend(vec![1], &());
let mut cursor = tree.cursor::<IntegersSummary>(&());
assert_eq!(
cursor.slice(&Count(0), Bias::Right, &()).items(&()),
cursor.slice(&Count(0), Bias::Right).items(&()),
Vec::<u8>::new()
);
assert_eq!(cursor.item(), Some(&1));
@ -1222,29 +1147,29 @@ mod tests {
assert_eq!(cursor.next_item(), None);
assert_eq!(cursor.start().sum, 0);
cursor.next(&());
cursor.next();
assert_eq!(cursor.item(), None);
assert_eq!(cursor.prev_item(), Some(&1));
assert_eq!(cursor.next_item(), None);
assert_eq!(cursor.start().sum, 1);
cursor.prev(&());
cursor.prev();
assert_eq!(cursor.item(), Some(&1));
assert_eq!(cursor.prev_item(), None);
assert_eq!(cursor.next_item(), None);
assert_eq!(cursor.start().sum, 0);
let mut cursor = tree.cursor::<IntegersSummary>(&());
assert_eq!(cursor.slice(&Count(1), Bias::Right, &()).items(&()), [1]);
assert_eq!(cursor.slice(&Count(1), Bias::Right).items(&()), [1]);
assert_eq!(cursor.item(), None);
assert_eq!(cursor.prev_item(), Some(&1));
assert_eq!(cursor.next_item(), None);
assert_eq!(cursor.start().sum, 1);
cursor.seek(&Count(0), Bias::Right, &());
cursor.seek(&Count(0), Bias::Right);
assert_eq!(
cursor
.slice(&tree.extent::<Count>(&()), Bias::Right, &())
.slice(&tree.extent::<Count>(&()), Bias::Right)
.items(&()),
[1]
);
@ -1258,80 +1183,80 @@ mod tests {
tree.extend(vec![1, 2, 3, 4, 5, 6], &());
let mut cursor = tree.cursor::<IntegersSummary>(&());
assert_eq!(cursor.slice(&Count(2), Bias::Right, &()).items(&()), [1, 2]);
assert_eq!(cursor.slice(&Count(2), Bias::Right).items(&()), [1, 2]);
assert_eq!(cursor.item(), Some(&3));
assert_eq!(cursor.prev_item(), Some(&2));
assert_eq!(cursor.next_item(), Some(&4));
assert_eq!(cursor.start().sum, 3);
cursor.next(&());
cursor.next();
assert_eq!(cursor.item(), Some(&4));
assert_eq!(cursor.prev_item(), Some(&3));
assert_eq!(cursor.next_item(), Some(&5));
assert_eq!(cursor.start().sum, 6);
cursor.next(&());
cursor.next();
assert_eq!(cursor.item(), Some(&5));
assert_eq!(cursor.prev_item(), Some(&4));
assert_eq!(cursor.next_item(), Some(&6));
assert_eq!(cursor.start().sum, 10);
cursor.next(&());
cursor.next();
assert_eq!(cursor.item(), Some(&6));
assert_eq!(cursor.prev_item(), Some(&5));
assert_eq!(cursor.next_item(), None);
assert_eq!(cursor.start().sum, 15);
cursor.next(&());
cursor.next(&());
cursor.next();
cursor.next();
assert_eq!(cursor.item(), None);
assert_eq!(cursor.prev_item(), Some(&6));
assert_eq!(cursor.next_item(), None);
assert_eq!(cursor.start().sum, 21);
cursor.prev(&());
cursor.prev();
assert_eq!(cursor.item(), Some(&6));
assert_eq!(cursor.prev_item(), Some(&5));
assert_eq!(cursor.next_item(), None);
assert_eq!(cursor.start().sum, 15);
cursor.prev(&());
cursor.prev();
assert_eq!(cursor.item(), Some(&5));
assert_eq!(cursor.prev_item(), Some(&4));
assert_eq!(cursor.next_item(), Some(&6));
assert_eq!(cursor.start().sum, 10);
cursor.prev(&());
cursor.prev();
assert_eq!(cursor.item(), Some(&4));
assert_eq!(cursor.prev_item(), Some(&3));
assert_eq!(cursor.next_item(), Some(&5));
assert_eq!(cursor.start().sum, 6);
cursor.prev(&());
cursor.prev();
assert_eq!(cursor.item(), Some(&3));
assert_eq!(cursor.prev_item(), Some(&2));
assert_eq!(cursor.next_item(), Some(&4));
assert_eq!(cursor.start().sum, 3);
cursor.prev(&());
cursor.prev();
assert_eq!(cursor.item(), Some(&2));
assert_eq!(cursor.prev_item(), Some(&1));
assert_eq!(cursor.next_item(), Some(&3));
assert_eq!(cursor.start().sum, 1);
cursor.prev(&());
cursor.prev();
assert_eq!(cursor.item(), Some(&1));
assert_eq!(cursor.prev_item(), None);
assert_eq!(cursor.next_item(), Some(&2));
assert_eq!(cursor.start().sum, 0);
cursor.prev(&());
cursor.prev();
assert_eq!(cursor.item(), None);
assert_eq!(cursor.prev_item(), None);
assert_eq!(cursor.next_item(), Some(&1));
assert_eq!(cursor.start().sum, 0);
cursor.next(&());
cursor.next();
assert_eq!(cursor.item(), Some(&1));
assert_eq!(cursor.prev_item(), None);
assert_eq!(cursor.next_item(), Some(&2));
@ -1340,7 +1265,7 @@ mod tests {
let mut cursor = tree.cursor::<IntegersSummary>(&());
assert_eq!(
cursor
.slice(&tree.extent::<Count>(&()), Bias::Right, &())
.slice(&tree.extent::<Count>(&()), Bias::Right)
.items(&()),
tree.items(&())
);
@ -1349,10 +1274,10 @@ mod tests {
assert_eq!(cursor.next_item(), None);
assert_eq!(cursor.start().sum, 21);
cursor.seek(&Count(3), Bias::Right, &());
cursor.seek(&Count(3), Bias::Right);
assert_eq!(
cursor
.slice(&tree.extent::<Count>(&()), Bias::Right, &())
.slice(&tree.extent::<Count>(&()), Bias::Right)
.items(&()),
[4, 5, 6]
);
@ -1362,25 +1287,16 @@ mod tests {
assert_eq!(cursor.start().sum, 21);
// Seeking can bias left or right
cursor.seek(&Count(1), Bias::Left, &());
cursor.seek(&Count(1), Bias::Left);
assert_eq!(cursor.item(), Some(&1));
cursor.seek(&Count(1), Bias::Right, &());
cursor.seek(&Count(1), Bias::Right);
assert_eq!(cursor.item(), Some(&2));
// Slicing without resetting starts from where the cursor is parked at.
cursor.seek(&Count(1), Bias::Right, &());
assert_eq!(
cursor.slice(&Count(3), Bias::Right, &()).items(&()),
vec![2, 3]
);
assert_eq!(
cursor.slice(&Count(6), Bias::Left, &()).items(&()),
vec![4, 5]
);
assert_eq!(
cursor.slice(&Count(6), Bias::Right, &()).items(&()),
vec![6]
);
cursor.seek(&Count(1), Bias::Right);
assert_eq!(cursor.slice(&Count(3), Bias::Right).items(&()), vec![2, 3]);
assert_eq!(cursor.slice(&Count(6), Bias::Left).items(&()), vec![4, 5]);
assert_eq!(cursor.slice(&Count(6), Bias::Right).items(&()), vec![6]);
}
#[test]

View file

@ -54,7 +54,7 @@ impl<K: Clone + Ord, V: Clone> TreeMap<K, V> {
pub fn get(&self, key: &K) -> Option<&V> {
let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>(&());
cursor.seek(&MapKeyRef(Some(key)), Bias::Left, &());
cursor.seek(&MapKeyRef(Some(key)), Bias::Left);
if let Some(item) = cursor.item() {
if Some(key) == item.key().0.as_ref() {
Some(&item.value)
@ -86,12 +86,12 @@ impl<K: Clone + Ord, V: Clone> TreeMap<K, V> {
let mut removed = None;
let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>(&());
let key = MapKeyRef(Some(key));
let mut new_tree = cursor.slice(&key, Bias::Left, &());
if key.cmp(&cursor.end(&()), &()) == Ordering::Equal {
let mut new_tree = cursor.slice(&key, Bias::Left);
if key.cmp(&cursor.end(), &()) == Ordering::Equal {
removed = Some(cursor.item().unwrap().value.clone());
cursor.next(&());
cursor.next();
}
new_tree.append(cursor.suffix(&()), &());
new_tree.append(cursor.suffix(), &());
drop(cursor);
self.0 = new_tree;
removed
@ -101,9 +101,9 @@ impl<K: Clone + Ord, V: Clone> TreeMap<K, V> {
let start = MapSeekTargetAdaptor(start);
let end = MapSeekTargetAdaptor(end);
let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>(&());
let mut new_tree = cursor.slice(&start, Bias::Left, &());
cursor.seek(&end, Bias::Left, &());
new_tree.append(cursor.suffix(&()), &());
let mut new_tree = cursor.slice(&start, Bias::Left);
cursor.seek(&end, Bias::Left);
new_tree.append(cursor.suffix(), &());
drop(cursor);
self.0 = new_tree;
}
@ -112,15 +112,15 @@ impl<K: Clone + Ord, V: Clone> TreeMap<K, V> {
pub fn closest(&self, key: &K) -> Option<(&K, &V)> {
let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>(&());
let key = MapKeyRef(Some(key));
cursor.seek(&key, Bias::Right, &());
cursor.prev(&());
cursor.seek(&key, Bias::Right);
cursor.prev();
cursor.item().map(|item| (&item.key, &item.value))
}
pub fn iter_from<'a>(&'a self, from: &K) -> impl Iterator<Item = (&'a K, &'a V)> + 'a {
let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>(&());
let from_key = MapKeyRef(Some(from));
cursor.seek(&from_key, Bias::Left, &());
cursor.seek(&from_key, Bias::Left);
cursor.map(|map_entry| (&map_entry.key, &map_entry.value))
}
@ -131,15 +131,15 @@ impl<K: Clone + Ord, V: Clone> TreeMap<K, V> {
{
let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>(&());
let key = MapKeyRef(Some(key));
let mut new_tree = cursor.slice(&key, Bias::Left, &());
let mut new_tree = cursor.slice(&key, Bias::Left);
let mut result = None;
if key.cmp(&cursor.end(&()), &()) == Ordering::Equal {
if key.cmp(&cursor.end(), &()) == Ordering::Equal {
let mut updated = cursor.item().unwrap().clone();
result = Some(f(&mut updated.value));
new_tree.push(updated, &());
cursor.next(&());
cursor.next();
}
new_tree.append(cursor.suffix(&()), &());
new_tree.append(cursor.suffix(), &());
drop(cursor);
self.0 = new_tree;
result
@ -149,12 +149,12 @@ impl<K: Clone + Ord, V: Clone> TreeMap<K, V> {
let mut new_map = SumTree::<MapEntry<K, V>>::default();
let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>(&());
cursor.next(&());
cursor.next();
while let Some(item) = cursor.item() {
if predicate(&item.key, &item.value) {
new_map.push(item.clone(), &());
}
cursor.next(&());
cursor.next();
}
drop(cursor);

View file

@ -101,7 +101,7 @@ impl Anchor {
} else {
let fragment_id = buffer.fragment_id_for_anchor(self);
let mut fragment_cursor = buffer.fragments.cursor::<(Option<&Locator>, usize)>(&None);
fragment_cursor.seek(&Some(fragment_id), Bias::Left, &None);
fragment_cursor.seek(&Some(fragment_id), Bias::Left);
fragment_cursor
.item()
.map_or(false, |fragment| fragment.visible)

View file

@ -320,7 +320,39 @@ impl History {
last_edit_at: now,
suppress_grouping: false,
});
self.redo_stack.clear();
}
/// Differs from `push_transaction` in that it does not clear the redo
/// stack. Intended to be used to create a parent transaction to merge
/// potential child transactions into.
///
/// The caller is responsible for removing it from the undo history using
/// `forget_transaction` if no edits are merged into it. Otherwise, if edits
/// are merged into this transaction, the caller is responsible for ensuring
/// the redo stack is cleared. The easiest way to ensure the redo stack is
/// cleared is to create transactions with the usual `start_transaction` and
/// `end_transaction` methods and merging the resulting transactions into
/// the transaction created by this method
fn push_empty_transaction(
&mut self,
start: clock::Global,
now: Instant,
clock: &mut clock::Lamport,
) -> TransactionId {
assert_eq!(self.transaction_depth, 0);
let id = clock.tick();
let transaction = Transaction {
id,
start,
edit_ids: Vec::new(),
};
self.undo_stack.push(HistoryEntry {
transaction,
first_edit_at: now,
last_edit_at: now,
suppress_grouping: false,
});
id
}
fn push_undo(&mut self, op_id: clock::Lamport) {
@ -824,14 +856,13 @@ impl Buffer {
let mut new_ropes =
RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
let mut old_fragments = self.fragments.cursor::<FragmentTextSummary>(&None);
let mut new_fragments =
old_fragments.slice(&edits.peek().unwrap().0.start, Bias::Right, &None);
let mut new_fragments = old_fragments.slice(&edits.peek().unwrap().0.start, Bias::Right);
new_ropes.append(new_fragments.summary().text);
let mut fragment_start = old_fragments.start().visible;
for (range, new_text) in edits {
let new_text = LineEnding::normalize_arc(new_text.into());
let fragment_end = old_fragments.end(&None).visible;
let fragment_end = old_fragments.end().visible;
// If the current fragment ends before this range, then jump ahead to the first fragment
// that extends past the start of this range, reusing any intervening fragments.
@ -847,10 +878,10 @@ impl Buffer {
new_ropes.push_fragment(&suffix, suffix.visible);
new_fragments.push(suffix, &None);
}
old_fragments.next(&None);
old_fragments.next();
}
let slice = old_fragments.slice(&range.start, Bias::Right, &None);
let slice = old_fragments.slice(&range.start, Bias::Right);
new_ropes.append(slice.summary().text);
new_fragments.append(slice, &None);
fragment_start = old_fragments.start().visible;
@ -903,7 +934,7 @@ impl Buffer {
// portions as deleted.
while fragment_start < range.end {
let fragment = old_fragments.item().unwrap();
let fragment_end = old_fragments.end(&None).visible;
let fragment_end = old_fragments.end().visible;
let mut intersection = fragment.clone();
let intersection_end = cmp::min(range.end, fragment_end);
if fragment.visible {
@ -930,7 +961,7 @@ impl Buffer {
fragment_start = intersection_end;
}
if fragment_end <= range.end {
old_fragments.next(&None);
old_fragments.next();
}
}
@ -942,7 +973,7 @@ impl Buffer {
// If the current fragment has been partially consumed, then consume the rest of it
// and advance to the next fragment before slicing.
if fragment_start > old_fragments.start().visible {
let fragment_end = old_fragments.end(&None).visible;
let fragment_end = old_fragments.end().visible;
if fragment_end > fragment_start {
let mut suffix = old_fragments.item().unwrap().clone();
suffix.len = fragment_end - fragment_start;
@ -951,10 +982,10 @@ impl Buffer {
new_ropes.push_fragment(&suffix, suffix.visible);
new_fragments.push(suffix, &None);
}
old_fragments.next(&None);
old_fragments.next();
}
let suffix = old_fragments.suffix(&None);
let suffix = old_fragments.suffix();
new_ropes.append(suffix.summary().text);
new_fragments.append(suffix, &None);
let (visible_text, deleted_text) = new_ropes.finish();
@ -1041,16 +1072,13 @@ impl Buffer {
let mut new_ropes =
RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
let mut old_fragments = self.fragments.cursor::<(VersionedFullOffset, usize)>(&cx);
let mut new_fragments = old_fragments.slice(
&VersionedFullOffset::Offset(ranges[0].start),
Bias::Left,
&cx,
);
let mut new_fragments =
old_fragments.slice(&VersionedFullOffset::Offset(ranges[0].start), Bias::Left);
new_ropes.append(new_fragments.summary().text);
let mut fragment_start = old_fragments.start().0.full_offset();
for (range, new_text) in edits {
let fragment_end = old_fragments.end(&cx).0.full_offset();
let fragment_end = old_fragments.end().0.full_offset();
// If the current fragment ends before this range, then jump ahead to the first fragment
// that extends past the start of this range, reusing any intervening fragments.
@ -1067,18 +1095,18 @@ impl Buffer {
new_ropes.push_fragment(&suffix, suffix.visible);
new_fragments.push(suffix, &None);
}
old_fragments.next(&cx);
old_fragments.next();
}
let slice =
old_fragments.slice(&VersionedFullOffset::Offset(range.start), Bias::Left, &cx);
old_fragments.slice(&VersionedFullOffset::Offset(range.start), Bias::Left);
new_ropes.append(slice.summary().text);
new_fragments.append(slice, &None);
fragment_start = old_fragments.start().0.full_offset();
}
// If we are at the end of a non-concurrent fragment, advance to the next one.
let fragment_end = old_fragments.end(&cx).0.full_offset();
let fragment_end = old_fragments.end().0.full_offset();
if fragment_end == range.start && fragment_end > fragment_start {
let mut fragment = old_fragments.item().unwrap().clone();
fragment.len = fragment_end.0 - fragment_start.0;
@ -1086,7 +1114,7 @@ impl Buffer {
new_insertions.push(InsertionFragment::insert_new(&fragment));
new_ropes.push_fragment(&fragment, fragment.visible);
new_fragments.push(fragment, &None);
old_fragments.next(&cx);
old_fragments.next();
fragment_start = old_fragments.start().0.full_offset();
}
@ -1096,7 +1124,7 @@ impl Buffer {
if fragment_start == range.start && fragment.timestamp > timestamp {
new_ropes.push_fragment(fragment, fragment.visible);
new_fragments.push(fragment.clone(), &None);
old_fragments.next(&cx);
old_fragments.next();
debug_assert_eq!(fragment_start, range.start);
} else {
break;
@ -1152,7 +1180,7 @@ impl Buffer {
// portions as deleted.
while fragment_start < range.end {
let fragment = old_fragments.item().unwrap();
let fragment_end = old_fragments.end(&cx).0.full_offset();
let fragment_end = old_fragments.end().0.full_offset();
let mut intersection = fragment.clone();
let intersection_end = cmp::min(range.end, fragment_end);
if fragment.was_visible(version, &self.undo_map) {
@ -1181,7 +1209,7 @@ impl Buffer {
fragment_start = intersection_end;
}
if fragment_end <= range.end {
old_fragments.next(&cx);
old_fragments.next();
}
}
}
@ -1189,7 +1217,7 @@ impl Buffer {
// If the current fragment has been partially consumed, then consume the rest of it
// and advance to the next fragment before slicing.
if fragment_start > old_fragments.start().0.full_offset() {
let fragment_end = old_fragments.end(&cx).0.full_offset();
let fragment_end = old_fragments.end().0.full_offset();
if fragment_end > fragment_start {
let mut suffix = old_fragments.item().unwrap().clone();
suffix.len = fragment_end.0 - fragment_start.0;
@ -1198,10 +1226,10 @@ impl Buffer {
new_ropes.push_fragment(&suffix, suffix.visible);
new_fragments.push(suffix, &None);
}
old_fragments.next(&cx);
old_fragments.next();
}
let suffix = old_fragments.suffix(&cx);
let suffix = old_fragments.suffix();
new_ropes.append(suffix.summary().text);
new_fragments.append(suffix, &None);
let (visible_text, deleted_text) = new_ropes.finish();
@ -1250,7 +1278,6 @@ impl Buffer {
split_offset: insertion_slice.range.start,
},
Bias::Left,
&(),
);
}
while let Some(item) = insertions_cursor.item() {
@ -1260,7 +1287,7 @@ impl Buffer {
break;
}
fragment_ids.push(&item.fragment_id);
insertions_cursor.next(&());
insertions_cursor.next();
}
}
fragment_ids.sort_unstable();
@ -1277,7 +1304,7 @@ impl Buffer {
RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
for fragment_id in self.fragment_ids_for_edits(undo.counts.keys()) {
let preceding_fragments = old_fragments.slice(&Some(fragment_id), Bias::Left, &None);
let preceding_fragments = old_fragments.slice(&Some(fragment_id), Bias::Left);
new_ropes.append(preceding_fragments.summary().text);
new_fragments.append(preceding_fragments, &None);
@ -1304,11 +1331,11 @@ impl Buffer {
new_ropes.push_fragment(&fragment, fragment_was_visible);
new_fragments.push(fragment, &None);
old_fragments.next(&None);
old_fragments.next();
}
}
let suffix = old_fragments.suffix(&None);
let suffix = old_fragments.suffix();
new_ropes.append(suffix.summary().text);
new_fragments.append(suffix, &None);
@ -1495,6 +1522,24 @@ impl Buffer {
self.history.push_transaction(transaction, now);
}
/// Differs from `push_transaction` in that it does not clear the redo stack.
/// The caller responsible for
/// Differs from `push_transaction` in that it does not clear the redo
/// stack. Intended to be used to create a parent transaction to merge
/// potential child transactions into.
///
/// The caller is responsible for removing it from the undo history using
/// `forget_transaction` if no edits are merged into it. Otherwise, if edits
/// are merged into this transaction, the caller is responsible for ensuring
/// the redo stack is cleared. The easiest way to ensure the redo stack is
/// cleared is to create transactions with the usual `start_transaction` and
/// `end_transaction` methods and merging the resulting transactions into
/// the transaction created by this method
pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
self.history
.push_empty_transaction(self.version.clone(), now, &mut self.lamport_clock)
}
pub fn edited_ranges_for_transaction_id<D>(
&self,
transaction_id: TransactionId,
@ -1521,7 +1566,7 @@ impl Buffer {
.fragment_ids_for_edits(edit_ids.into_iter())
.into_iter()
.filter_map(move |fragment_id| {
cursor.seek_forward(&Some(fragment_id), Bias::Left, &None);
cursor.seek_forward(&Some(fragment_id), Bias::Left);
let fragment = cursor.item()?;
let start_offset = cursor.start().1;
let end_offset = start_offset + if fragment.visible { fragment.len } else { 0 };
@ -1743,7 +1788,7 @@ impl Buffer {
let mut cursor = self.snapshot.fragments.cursor::<Option<&Locator>>(&None);
for insertion_fragment in self.snapshot.insertions.cursor::<()>(&()) {
cursor.seek(&Some(&insertion_fragment.fragment_id), Bias::Left, &None);
cursor.seek(&Some(&insertion_fragment.fragment_id), Bias::Left);
let fragment = cursor.item().unwrap();
assert_eq!(insertion_fragment.fragment_id, fragment.id);
assert_eq!(insertion_fragment.split_offset, fragment.insertion_offset);
@ -1862,7 +1907,7 @@ impl BufferSnapshot {
.filter::<_, FragmentTextSummary>(&None, move |summary| {
!version.observed_all(&summary.max_version)
});
cursor.next(&None);
cursor.next();
let mut visible_cursor = self.visible_text.cursor(0);
let mut deleted_cursor = self.deleted_text.cursor(0);
@ -1875,18 +1920,18 @@ impl BufferSnapshot {
if fragment.was_visible(version, &self.undo_map) {
if fragment.visible {
let text = visible_cursor.slice(cursor.end(&None).visible);
let text = visible_cursor.slice(cursor.end().visible);
rope.append(text);
} else {
deleted_cursor.seek_forward(cursor.start().deleted);
let text = deleted_cursor.slice(cursor.end(&None).deleted);
let text = deleted_cursor.slice(cursor.end().deleted);
rope.append(text);
}
} else if fragment.visible {
visible_cursor.seek_forward(cursor.end(&None).visible);
visible_cursor.seek_forward(cursor.end().visible);
}
cursor.next(&None);
cursor.next();
}
if cursor.start().visible > visible_cursor.offset() {
@ -2202,7 +2247,7 @@ impl BufferSnapshot {
timestamp: anchor.timestamp,
split_offset: anchor.offset,
};
insertion_cursor.seek(&anchor_key, anchor.bias, &());
insertion_cursor.seek(&anchor_key, anchor.bias);
if let Some(insertion) = insertion_cursor.item() {
let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key);
if comparison == Ordering::Greater
@ -2210,15 +2255,15 @@ impl BufferSnapshot {
&& comparison == Ordering::Equal
&& anchor.offset > 0)
{
insertion_cursor.prev(&());
insertion_cursor.prev();
}
} else {
insertion_cursor.prev(&());
insertion_cursor.prev();
}
let insertion = insertion_cursor.item().expect("invalid insertion");
assert_eq!(insertion.timestamp, anchor.timestamp, "invalid insertion");
fragment_cursor.seek_forward(&Some(&insertion.fragment_id), Bias::Left, &None);
fragment_cursor.seek_forward(&Some(&insertion.fragment_id), Bias::Left);
let fragment = fragment_cursor.item().unwrap();
let mut fragment_offset = fragment_cursor.start().1;
if fragment.visible {
@ -2249,7 +2294,7 @@ impl BufferSnapshot {
split_offset: anchor.offset,
};
let mut insertion_cursor = self.insertions.cursor::<InsertionFragmentKey>(&());
insertion_cursor.seek(&anchor_key, anchor.bias, &());
insertion_cursor.seek(&anchor_key, anchor.bias);
if let Some(insertion) = insertion_cursor.item() {
let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key);
if comparison == Ordering::Greater
@ -2257,10 +2302,10 @@ impl BufferSnapshot {
&& comparison == Ordering::Equal
&& anchor.offset > 0)
{
insertion_cursor.prev(&());
insertion_cursor.prev();
}
} else {
insertion_cursor.prev(&());
insertion_cursor.prev();
}
let Some(insertion) = insertion_cursor
@ -2274,7 +2319,7 @@ impl BufferSnapshot {
};
let mut fragment_cursor = self.fragments.cursor::<(Option<&Locator>, usize)>(&None);
fragment_cursor.seek(&Some(&insertion.fragment_id), Bias::Left, &None);
fragment_cursor.seek(&Some(&insertion.fragment_id), Bias::Left);
let fragment = fragment_cursor.item().unwrap();
let mut fragment_offset = fragment_cursor.start().1;
if fragment.visible {
@ -2295,7 +2340,7 @@ impl BufferSnapshot {
split_offset: anchor.offset,
};
let mut insertion_cursor = self.insertions.cursor::<InsertionFragmentKey>(&());
insertion_cursor.seek(&anchor_key, anchor.bias, &());
insertion_cursor.seek(&anchor_key, anchor.bias);
if let Some(insertion) = insertion_cursor.item() {
let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key);
if comparison == Ordering::Greater
@ -2303,10 +2348,10 @@ impl BufferSnapshot {
&& comparison == Ordering::Equal
&& anchor.offset > 0)
{
insertion_cursor.prev(&());
insertion_cursor.prev();
}
} else {
insertion_cursor.prev(&());
insertion_cursor.prev();
}
let Some(insertion) = insertion_cursor.item().filter(|insertion| {
@ -2345,7 +2390,7 @@ impl BufferSnapshot {
Anchor::MAX
} else {
let mut fragment_cursor = self.fragments.cursor::<usize>(&None);
fragment_cursor.seek(&offset, bias, &None);
fragment_cursor.seek(&offset, bias);
let fragment = fragment_cursor.item().unwrap();
let overshoot = offset - *fragment_cursor.start();
Anchor {
@ -2425,7 +2470,7 @@ impl BufferSnapshot {
let mut cursor = self.fragments.filter(&None, move |summary| {
!since.observed_all(&summary.max_version)
});
cursor.next(&None);
cursor.next();
Some(cursor)
};
let mut cursor = self
@ -2433,7 +2478,7 @@ impl BufferSnapshot {
.cursor::<(Option<&Locator>, FragmentTextSummary)>(&None);
let start_fragment_id = self.fragment_id_for_anchor(&range.start);
cursor.seek(&Some(start_fragment_id), Bias::Left, &None);
cursor.seek(&Some(start_fragment_id), Bias::Left);
let mut visible_start = cursor.start().1.visible;
let mut deleted_start = cursor.start().1.deleted;
if let Some(fragment) = cursor.item() {
@ -2466,7 +2511,7 @@ impl BufferSnapshot {
let mut cursor = self.fragments.filter::<_, usize>(&None, move |summary| {
!since.observed_all(&summary.max_version)
});
cursor.next(&None);
cursor.next();
while let Some(fragment) = cursor.item() {
if fragment.id > *end_fragment_id {
break;
@ -2478,7 +2523,7 @@ impl BufferSnapshot {
return true;
}
}
cursor.next(&None);
cursor.next();
}
}
false
@ -2489,14 +2534,14 @@ impl BufferSnapshot {
let mut cursor = self.fragments.filter::<_, usize>(&None, move |summary| {
!since.observed_all(&summary.max_version)
});
cursor.next(&None);
cursor.next();
while let Some(fragment) = cursor.item() {
let was_visible = fragment.was_visible(since, &self.undo_map);
let is_visible = fragment.visible;
if was_visible != is_visible {
return true;
}
cursor.next(&None);
cursor.next();
}
}
false
@ -2601,7 +2646,7 @@ impl<D: TextDimension + Ord, F: FnMut(&FragmentSummary) -> bool> Iterator for Ed
while let Some(fragment) = cursor.item() {
if fragment.id < *self.range.start.0 {
cursor.next(&None);
cursor.next();
continue;
} else if fragment.id > *self.range.end.0 {
break;
@ -2634,7 +2679,7 @@ impl<D: TextDimension + Ord, F: FnMut(&FragmentSummary) -> bool> Iterator for Ed
};
if !fragment.was_visible(self.since, self.undos) && fragment.visible {
let mut visible_end = cursor.end(&None).visible;
let mut visible_end = cursor.end().visible;
if fragment.id == *self.range.end.0 {
visible_end = cmp::min(
visible_end,
@ -2660,7 +2705,7 @@ impl<D: TextDimension + Ord, F: FnMut(&FragmentSummary) -> bool> Iterator for Ed
self.new_end = new_end;
} else if fragment.was_visible(self.since, self.undos) && !fragment.visible {
let mut deleted_end = cursor.end(&None).deleted;
let mut deleted_end = cursor.end().deleted;
if fragment.id == *self.range.end.0 {
deleted_end = cmp::min(
deleted_end,
@ -2690,7 +2735,7 @@ impl<D: TextDimension + Ord, F: FnMut(&FragmentSummary) -> bool> Iterator for Ed
self.old_end = old_end;
}
cursor.next(&None);
cursor.next();
}
pending_edit

View file

@ -74,7 +74,6 @@ impl UndoMap {
undo_id: Default::default(),
},
Bias::Left,
&(),
);
let mut undo_count = 0;
@ -99,7 +98,6 @@ impl UndoMap {
undo_id: Default::default(),
},
Bias::Left,
&(),
);
let mut undo_count = 0;

View file

@ -11,8 +11,8 @@ use gpui::{App, Task, Window, actions};
use rpc::proto::{self};
use theme::ActiveTheme;
use ui::{
Avatar, AvatarAudioStatusIndicator, ContextMenu, ContextMenuItem, Facepile, PopoverMenu,
SplitButton, TintColor, Tooltip, prelude::*,
Avatar, AvatarAudioStatusIndicator, ContextMenu, ContextMenuItem, Divider, Facepile,
PopoverMenu, SplitButton, SplitButtonStyle, TintColor, Tooltip, prelude::*,
};
use util::maybe;
use workspace::notifications::DetachAndPromptErr;
@ -383,6 +383,7 @@ impl TitleBar {
.detach_and_log_err(cx);
}),
)
.child(Divider::vertical())
.into_any_element(),
);
@ -497,6 +498,7 @@ impl TitleBar {
trigger.render(window, cx),
self.render_screen_list().into_any_element(),
)
.style(SplitButtonStyle::Outlined)
.into_any_element(),
);
}
@ -547,10 +549,17 @@ impl TitleBar {
entry_render: Box::new(move |_, _| {
h_flex()
.gap_2()
.child(Icon::new(IconName::Screen).when(
active_screenshare_id == Some(meta.id),
|this| this.color(Color::Accent),
))
.child(
Icon::new(IconName::Screen)
.size(IconSize::XSmall)
.map(|this| {
if active_screenshare_id == Some(meta.id) {
this.color(Color::Accent)
} else {
this.color(Color::Muted)
}
}),
)
.child(Label::new(label.clone()))
.child(
Label::new(resolution.clone())

View file

@ -1,6 +1,6 @@
use gpui::{
AnyElement, App, BoxShadow, IntoElement, ParentElement, RenderOnce, Styled, Window, div, hsla,
point, px,
point, prelude::FluentBuilder, px,
};
use theme::ActiveTheme;
@ -8,6 +8,12 @@ use crate::{ElevationIndex, h_flex};
use super::ButtonLike;
#[derive(Clone, Copy, PartialEq)]
pub enum SplitButtonStyle {
Filled,
Outlined,
}
/// /// A button with two parts: a primary action on the left and a secondary action on the right.
///
/// The left side is a [`ButtonLike`] with the main action, while the right side can contain
@ -18,11 +24,21 @@ use super::ButtonLike;
pub struct SplitButton {
pub left: ButtonLike,
pub right: AnyElement,
style: SplitButtonStyle,
}
impl SplitButton {
pub fn new(left: ButtonLike, right: AnyElement) -> Self {
Self { left, right }
Self {
left,
right,
style: SplitButtonStyle::Filled,
}
}
pub fn style(mut self, style: SplitButtonStyle) -> Self {
self.style = style;
self
}
}
@ -31,21 +47,23 @@ impl RenderOnce for SplitButton {
h_flex()
.rounded_sm()
.border_1()
.border_color(cx.theme().colors().text_muted.alpha(0.12))
.border_color(cx.theme().colors().border.opacity(0.5))
.child(div().flex_grow().child(self.left))
.child(
div()
.h_full()
.w_px()
.bg(cx.theme().colors().text_muted.alpha(0.16)),
.bg(cx.theme().colors().border.opacity(0.5)),
)
.child(self.right)
.bg(ElevationIndex::Surface.on_elevation_bg(cx))
.when(self.style == SplitButtonStyle::Filled, |this| {
this.bg(ElevationIndex::Surface.on_elevation_bg(cx))
.shadow(vec![BoxShadow {
color: hsla(0.0, 0.0, 0.0, 0.16),
offset: point(px(0.), px(1.)),
blur_radius: px(0.),
spread_radius: px(0.),
}])
})
}
}

View file

@ -84,7 +84,9 @@ impl RenderOnce for List {
(false, _) => this.children(self.children),
(true, Some(false)) => this,
(true, _) => match self.empty_message {
EmptyMessage::Text(text) => this.child(Label::new(text).color(Color::Muted)),
EmptyMessage::Text(text) => {
this.px_2().child(Label::new(text).color(Color::Muted))
}
EmptyMessage::Element(element) => this.child(element),
},
})

View file

@ -93,6 +93,7 @@ impl RenderOnce for Modal {
#[derive(IntoElement)]
pub struct ModalHeader {
headline: Option<SharedString>,
description: Option<SharedString>,
children: SmallVec<[AnyElement; 2]>,
show_dismiss_button: bool,
show_back_button: bool,
@ -108,6 +109,7 @@ impl ModalHeader {
pub fn new() -> Self {
Self {
headline: None,
description: None,
children: SmallVec::new(),
show_dismiss_button: false,
show_back_button: false,
@ -123,6 +125,11 @@ impl ModalHeader {
self
}
pub fn description(mut self, description: impl Into<SharedString>) -> Self {
self.description = Some(description.into());
self
}
pub fn show_dismiss_button(mut self, show: bool) -> Self {
self.show_dismiss_button = show;
self
@ -171,7 +178,14 @@ impl RenderOnce for ModalHeader {
}),
)
})
.child(div().flex_1().children(children))
.child(
v_flex().flex_1().children(children).when_some(
self.description,
|this, description| {
this.child(Label::new(description).color(Color::Muted).mb_2())
},
),
)
.when(self.show_dismiss_button, |this| {
this.child(
IconButton::new("dismiss", IconName::Close)

View file

@ -588,7 +588,7 @@ impl SwitchField {
toggle_state: toggle_state.into(),
on_click: Arc::new(on_click),
disabled: false,
color: SwitchColor::default(),
color: SwitchColor::Accent,
}
}
@ -634,6 +634,15 @@ impl RenderOnce for SwitchField {
}
}),
)
.when(!self.disabled, |this| {
this.on_click({
let on_click = self.on_click.clone();
let toggle_state = self.toggle_state;
move |_click, window, cx| {
(on_click)(&toggle_state.inverse(), window, cx);
}
})
})
}
}

View file

@ -97,6 +97,10 @@ impl SingleLineInput {
pub fn editor(&self) -> &Entity<Editor> {
&self.editor
}
pub fn text(&self, cx: &App) -> String {
self.editor().read(cx).text(cx)
}
}
impl Render for SingleLineInput {

View file

@ -1,4 +1,4 @@
use client::{TelemetrySettings, telemetry::Telemetry};
use client::{DisableAiSettings, TelemetrySettings, telemetry::Telemetry};
use db::kvp::KEY_VALUE_STORE;
use gpui::{
Action, App, Context, Entity, EventEmitter, FocusHandle, Focusable, InteractiveElement,
@ -174,9 +174,10 @@ impl Render for WelcomePage {
.ok();
})),
)
.child(
.when(!DisableAiSettings::get_global(cx).disable_ai, |parent| {
parent.child(
Button::new(
"try-zed-edit-prediction",
"edit_prediction_onboarding",
edit_prediction_label,
)
.disabled(edit_prediction_provider_is_zed)
@ -191,6 +192,7 @@ impl Render for WelcomePage {
}),
),
)
})
.child(
Button::new("edit settings", "Edit Settings")
.icon(IconName::Settings)

View file

@ -242,6 +242,7 @@ struct PanelEntry {
pub struct PanelButtons {
dock: Entity<Dock>,
_settings_subscription: Subscription,
}
impl Dock {
@ -373,6 +374,12 @@ impl Dock {
})
}
pub fn first_enabled_panel_idx_excluding(&self, exclude_name: &str, cx: &App) -> Option<usize> {
self.panel_entries.iter().position(|entry| {
entry.panel.persistent_name() != exclude_name && entry.panel.enabled(cx)
})
}
fn active_panel_entry(&self) -> Option<&PanelEntry> {
self.active_panel_index
.and_then(|index| self.panel_entries.get(index))
@ -833,7 +840,11 @@ impl Render for Dock {
impl PanelButtons {
pub fn new(dock: Entity<Dock>, cx: &mut Context<Self>) -> Self {
cx.observe(&dock, |_, _, cx| cx.notify()).detach();
Self { dock }
let settings_subscription = cx.observe_global::<SettingsStore>(|_, cx| cx.notify());
Self {
dock,
_settings_subscription: settings_subscription,
}
}
}

View file

@ -3239,10 +3239,13 @@ impl Pane {
split_direction = None;
}
if let Ok(open_task) = workspace.update_in(cx, |workspace, window, cx| {
if let Ok((open_task, to_pane)) =
workspace.update_in(cx, |workspace, window, cx| {
if let Some(split_direction) = split_direction {
to_pane = workspace.split_pane(to_pane, split_direction, window, cx);
to_pane =
workspace.split_pane(to_pane, split_direction, window, cx);
}
(
workspace.open_paths(
paths,
OpenOptions {
@ -3252,15 +3255,21 @@ impl Pane {
Some(to_pane.downgrade()),
window,
cx,
),
to_pane,
)
}) {
})
{
let opened_items: Vec<_> = open_task.await;
_ = workspace.update(cx, |workspace, cx| {
_ = workspace.update_in(cx, |workspace, window, cx| {
for item in opened_items.into_iter().flatten() {
if let Err(e) = item {
workspace.show_error(&e, cx);
}
}
if to_pane.read(cx).items_len() == 0 {
workspace.remove_pane(to_pane, None, window, cx);
}
});
}
})

View file

@ -2454,16 +2454,16 @@ impl Snapshot {
self.entries_by_path = {
let mut cursor = self.entries_by_path.cursor::<TraversalProgress>(&());
let mut new_entries_by_path =
cursor.slice(&TraversalTarget::path(&removed_entry.path), Bias::Left, &());
cursor.slice(&TraversalTarget::path(&removed_entry.path), Bias::Left);
while let Some(entry) = cursor.item() {
if entry.path.starts_with(&removed_entry.path) {
self.entries_by_id.remove(&entry.id, &());
cursor.next(&());
cursor.next();
} else {
break;
}
}
new_entries_by_path.append(cursor.suffix(&()), &());
new_entries_by_path.append(cursor.suffix(), &());
new_entries_by_path
};
@ -2576,7 +2576,6 @@ impl Snapshot {
include_ignored,
},
Bias::Right,
&(),
);
Traversal {
snapshot: self,
@ -2632,7 +2631,7 @@ impl Snapshot {
options: ChildEntriesOptions,
) -> ChildEntriesIter<'a> {
let mut cursor = self.entries_by_path.cursor(&());
cursor.seek(&TraversalTarget::path(parent_path), Bias::Right, &());
cursor.seek(&TraversalTarget::path(parent_path), Bias::Right);
let traversal = Traversal {
snapshot: self,
cursor,
@ -3056,9 +3055,9 @@ impl BackgroundScannerState {
.snapshot
.entries_by_path
.cursor::<TraversalProgress>(&());
new_entries = cursor.slice(&TraversalTarget::path(path), Bias::Left, &());
removed_entries = cursor.slice(&TraversalTarget::successor(path), Bias::Left, &());
new_entries.append(cursor.suffix(&()), &());
new_entries = cursor.slice(&TraversalTarget::path(path), Bias::Left);
removed_entries = cursor.slice(&TraversalTarget::successor(path), Bias::Left);
new_entries.append(cursor.suffix(), &());
}
self.snapshot.entries_by_path = new_entries;
@ -4925,15 +4924,15 @@ fn build_diff(
let mut old_paths = old_snapshot.entries_by_path.cursor::<PathKey>(&());
let mut new_paths = new_snapshot.entries_by_path.cursor::<PathKey>(&());
let mut last_newly_loaded_dir_path = None;
old_paths.next(&());
new_paths.next(&());
old_paths.next();
new_paths.next();
for path in event_paths {
let path = PathKey(path.clone());
if old_paths.item().map_or(false, |e| e.path < path.0) {
old_paths.seek_forward(&path, Bias::Left, &());
old_paths.seek_forward(&path, Bias::Left);
}
if new_paths.item().map_or(false, |e| e.path < path.0) {
new_paths.seek_forward(&path, Bias::Left, &());
new_paths.seek_forward(&path, Bias::Left);
}
loop {
match (old_paths.item(), new_paths.item()) {
@ -4949,7 +4948,7 @@ fn build_diff(
match Ord::cmp(&old_entry.path, &new_entry.path) {
Ordering::Less => {
changes.push((old_entry.path.clone(), old_entry.id, Removed));
old_paths.next(&());
old_paths.next();
}
Ordering::Equal => {
if phase == EventsReceivedDuringInitialScan {
@ -4975,8 +4974,8 @@ fn build_diff(
changes.push((new_entry.path.clone(), new_entry.id, Updated));
}
}
old_paths.next(&());
new_paths.next(&());
old_paths.next();
new_paths.next();
}
Ordering::Greater => {
let is_newly_loaded = phase == InitialScan
@ -4988,13 +4987,13 @@ fn build_diff(
new_entry.id,
if is_newly_loaded { Loaded } else { Added },
));
new_paths.next(&());
new_paths.next();
}
}
}
(Some(old_entry), None) => {
changes.push((old_entry.path.clone(), old_entry.id, Removed));
old_paths.next(&());
old_paths.next();
}
(None, Some(new_entry)) => {
let is_newly_loaded = phase == InitialScan
@ -5006,7 +5005,7 @@ fn build_diff(
new_entry.id,
if is_newly_loaded { Loaded } else { Added },
));
new_paths.next(&());
new_paths.next();
}
(None, None) => break,
}
@ -5255,7 +5254,7 @@ impl<'a> Traversal<'a> {
start_path: &Path,
) -> Self {
let mut cursor = snapshot.entries_by_path.cursor(&());
cursor.seek(&TraversalTarget::path(start_path), Bias::Left, &());
cursor.seek(&TraversalTarget::path(start_path), Bias::Left);
let mut traversal = Self {
snapshot,
cursor,
@ -5282,14 +5281,13 @@ impl<'a> Traversal<'a> {
include_ignored: self.include_ignored,
},
Bias::Left,
&(),
)
}
pub fn advance_to_sibling(&mut self) -> bool {
while let Some(entry) = self.cursor.item() {
self.cursor
.seek_forward(&TraversalTarget::successor(&entry.path), Bias::Left, &());
.seek_forward(&TraversalTarget::successor(&entry.path), Bias::Left);
if let Some(entry) = self.cursor.item() {
if (self.include_files || !entry.is_file())
&& (self.include_dirs || !entry.is_dir())
@ -5307,7 +5305,7 @@ impl<'a> Traversal<'a> {
return false;
};
self.cursor
.seek(&TraversalTarget::path(parent_path), Bias::Left, &())
.seek(&TraversalTarget::path(parent_path), Bias::Left)
}
pub fn entry(&self) -> Option<&'a Entry> {
@ -5326,7 +5324,7 @@ impl<'a> Traversal<'a> {
pub fn end_offset(&self) -> usize {
self.cursor
.end(&())
.end()
.count(self.include_files, self.include_dirs, self.include_ignored)
}
}

View file

@ -554,6 +554,7 @@ pub fn main() {
supermaven::init(app_state.client.clone(), cx);
language_model::init(app_state.client.clone(), cx);
language_models::init(app_state.user_store.clone(), app_state.client.clone(), cx);
agent_settings::init(cx);
agent_servers::init(cx);
web_search::init(cx);
web_search_providers::init(app_state.client.clone(), cx);

View file

@ -145,15 +145,15 @@ pub fn app_menus() -> Vec<Menu> {
items: vec![
MenuItem::action(
"Zoom In",
zed_actions::IncreaseBufferFontSize { persist: true },
zed_actions::IncreaseBufferFontSize { persist: false },
),
MenuItem::action(
"Zoom Out",
zed_actions::DecreaseBufferFontSize { persist: true },
zed_actions::DecreaseBufferFontSize { persist: false },
),
MenuItem::action(
"Reset Zoom",
zed_actions::ResetBufferFontSize { persist: true },
zed_actions::ResetBufferFontSize { persist: false },
),
MenuItem::separator(),
MenuItem::action("Toggle Left Dock", workspace::ToggleLeftDock),

View file

@ -2,6 +2,7 @@ mod preview;
mod repl_menu;
use agent_settings::AgentSettings;
use client::DisableAiSettings;
use editor::actions::{
AddSelectionAbove, AddSelectionBelow, CodeActionSource, DuplicateLineDown, GoToDiagnostic,
GoToHunk, GoToPreviousDiagnostic, GoToPreviousHunk, MoveLineDown, MoveLineUp, SelectAll,
@ -32,6 +33,7 @@ const MAX_CODE_ACTION_MENU_LINES: u32 = 16;
pub struct QuickActionBar {
_inlay_hints_enabled_subscription: Option<Subscription>,
_ai_settings_subscription: Subscription,
active_item: Option<Box<dyn ItemHandle>>,
buffer_search_bar: Entity<BufferSearchBar>,
show: bool,
@ -46,8 +48,28 @@ impl QuickActionBar {
workspace: &Workspace,
cx: &mut Context<Self>,
) -> Self {
let mut was_ai_disabled = DisableAiSettings::get_global(cx).disable_ai;
let mut was_agent_enabled = AgentSettings::get_global(cx).enabled;
let mut was_agent_button = AgentSettings::get_global(cx).button;
let ai_settings_subscription = cx.observe_global::<SettingsStore>(move |_, cx| {
let is_ai_disabled = DisableAiSettings::get_global(cx).disable_ai;
let agent_settings = AgentSettings::get_global(cx);
if was_ai_disabled != is_ai_disabled
|| was_agent_enabled != agent_settings.enabled
|| was_agent_button != agent_settings.button
{
was_ai_disabled = is_ai_disabled;
was_agent_enabled = agent_settings.enabled;
was_agent_button = agent_settings.button;
cx.notify();
}
});
let mut this = Self {
_inlay_hints_enabled_subscription: None,
_ai_settings_subscription: ai_settings_subscription,
active_item: None,
buffer_search_bar,
show: true,
@ -575,7 +597,9 @@ impl Render for QuickActionBar {
.children(self.render_preview_button(self.workspace.clone(), cx))
.children(search_button)
.when(
AgentSettings::get_global(cx).enabled && AgentSettings::get_global(cx).button,
AgentSettings::get_global(cx).enabled
&& AgentSettings::get_global(cx).button
&& !DisableAiSettings::get_global(cx).disable_ai,
|bar| bar.child(assistant_button),
)
.children(code_actions_dropdown)

View file

@ -1,10 +1,11 @@
use std::any::{Any, TypeId};
use client::DisableAiSettings;
use command_palette_hooks::CommandPaletteFilter;
use feature_flags::{FeatureFlagAppExt as _, PredictEditsRateCompletionsFeatureFlag};
use gpui::actions;
use language::language_settings::{AllLanguageSettings, EditPredictionProvider};
use settings::update_settings_file;
use settings::{Settings, SettingsStore, update_settings_file};
use ui::App;
use workspace::Workspace;
@ -21,6 +22,8 @@ actions!(
);
pub fn init(cx: &mut App) {
feature_gate_predict_edits_actions(cx);
cx.observe_new(move |workspace: &mut Workspace, _, _cx| {
workspace.register_action(|workspace, _: &RateCompletions, window, cx| {
if cx.has_flag::<PredictEditsRateCompletionsFeatureFlag>() {
@ -53,19 +56,48 @@ pub fn init(cx: &mut App) {
});
})
.detach();
feature_gate_predict_edits_rating_actions(cx);
}
fn feature_gate_predict_edits_rating_actions(cx: &mut App) {
fn feature_gate_predict_edits_actions(cx: &mut App) {
let rate_completion_action_types = [TypeId::of::<RateCompletions>()];
let reset_onboarding_action_types = [TypeId::of::<ResetOnboarding>()];
let zeta_all_action_types = [
TypeId::of::<RateCompletions>(),
TypeId::of::<ResetOnboarding>(),
zed_actions::OpenZedPredictOnboarding.type_id(),
TypeId::of::<crate::ClearHistory>(),
TypeId::of::<crate::ThumbsUpActiveCompletion>(),
TypeId::of::<crate::ThumbsDownActiveCompletion>(),
TypeId::of::<crate::NextEdit>(),
TypeId::of::<crate::PreviousEdit>(),
];
CommandPaletteFilter::update_global(cx, |filter, _cx| {
filter.hide_action_types(&rate_completion_action_types);
filter.hide_action_types(&reset_onboarding_action_types);
filter.hide_action_types(&[zed_actions::OpenZedPredictOnboarding.type_id()]);
});
cx.observe_global::<SettingsStore>(move |cx| {
let is_ai_disabled = DisableAiSettings::get_global(cx).disable_ai;
let has_feature_flag = cx.has_flag::<PredictEditsRateCompletionsFeatureFlag>();
CommandPaletteFilter::update_global(cx, |filter, _cx| {
if is_ai_disabled {
filter.hide_action_types(&zeta_all_action_types);
} else {
if has_feature_flag {
filter.show_action_types(rate_completion_action_types.iter());
} else {
filter.hide_action_types(&rate_completion_action_types);
}
}
});
})
.detach();
cx.observe_flag::<PredictEditsRateCompletionsFeatureFlag, _>(move |is_enabled, cx| {
if !DisableAiSettings::get_global(cx).disable_ai {
if is_enabled {
CommandPaletteFilter::update_global(cx, |filter, _cx| {
filter.show_action_types(rate_completion_action_types.iter());
@ -75,6 +107,7 @@ fn feature_gate_predict_edits_rating_actions(cx: &mut App) {
filter.hide_action_types(&rate_completion_action_types);
});
}
}
})
.detach();
}

View file

@ -444,14 +444,17 @@ Custom models will be listed in the model dropdown in the Agent Panel.
### OpenAI API Compatible {#openai-api-compatible}
Zed supports using OpenAI compatible APIs by specifying a custom `endpoint` and `available_models` for the OpenAI provider.
Zed supports using [OpenAI compatible APIs](https://platform.openai.com/docs/api-reference/chat) by specifying a custom `api_url` and `available_models` for the OpenAI provider. This is useful for connecting to other hosted services (like Together AI, Anyscale, etc.) or local models.
Zed supports using OpenAI compatible APIs by specifying a custom `api_url` and `available_models` for the OpenAI provider. This is useful for connecting to other hosted services (like Together AI, Anyscale, etc.) or local models.
To configure a compatible API, you can add a custom API URL for OpenAI either via the UI (currently available only in Preview) or by editing your `settings.json`.
To configure a compatible API, you can add a custom API URL for OpenAI either via the UI or by editing your `settings.json`. For example, to connect to [Together AI](https://www.together.ai/):
For example, to connect to [Together AI](https://www.together.ai/) via the UI:
1. Get an API key from your [Together AI account](https://api.together.ai/settings/api-keys).
2. Add the following to your `settings.json`:
2. Go to the Agent Panel's settings view, click on the "Add Provider" button, and then on the "OpenAI" menu item
3. Add the requested fields, such as `api_url`, `api_key`, available models, and others
Alternatively, you can also add it via the `settings.json`:
```json
{

View file

@ -39,7 +39,7 @@ CRATE_PATH="crates/$CRATE_NAME"
mkdir -p "$CRATE_PATH/src"
# Symlink the license
ln -sf "../../../$LICENSE_FILE" "$CRATE_PATH/$LICENSE_FILE"
ln -sf "../../$LICENSE_FILE" "$CRATE_PATH/$LICENSE_FILE"
CARGO_TOML_TEMPLATE=$(cat << 'EOF'
[package]