Compare commits

...
Sign in to create a new pull request.

15 commits

Author SHA1 Message Date
Joseph T. Lyons
cf60642f39 Remove unnecessary identifier 2023-12-01 11:49:55 -05:00
Joseph T. Lyons
ee869baea6 zed 0.114.4 2023-12-01 11:38:05 -05:00
Joseph T. Lyons
2915ac787d Fix bug preventing spaces from being used in filename 2023-12-01 11:37:32 -05:00
Max Brunsfeld
dd54724aee zed 0.114.3 2023-11-30 11:49:10 -08:00
Max Brunsfeld
662994e5f6 Bump Tree-sitter to fix another crash triggered by a markdown file (#3466)
Bumps Tree-sitter for
https://github.com/tree-sitter/tree-sitter/pull/2802

This fixes a regression introduced in the last Tree-sitter upgrade.

Release Notes:

- Fixed a crash that occurred when editing certain Markdown files.
2023-11-30 11:48:14 -08:00
Kirill Bulatov
a61726e8ce zed 0.114.2 2023-11-30 17:12:28 +02:00
Kirill Bulatov
b6eadc9af8 Fix TypeScript diagnostics (#3457)
Deals with https://github.com/zed-industries/community/issues/2124

* sends more ClientCapabilities LSP data, diagnostics capabilities in
particular: those are now required by typescript-language-server LSP to
start publishing diagnostics
* sends more parameters during eslint workspace initialization, so it is
able to correctly look up project's typescript config
Presumably, it's not enough and some convoluted project set ups may
break still, but let's wait for examples and feedback.

Release Notes:

- Fixed typescript-language-server diagnostics not appearing for newer
server versions; fixed eslint diagnostics using wrong directory for
typescript config lookup
2023-11-30 13:26:11 +02:00
Joseph T. Lyons
9b37b9a047 v0.114.x stable 2023-11-29 11:08:11 -05:00
Max Brunsfeld
6c78458573 Upgrade Tree-sitter for stack-overflow bugfix (#3413)
Fixes https://github.com/zed-industries/community/issues/2290

This PR bumps Tree-sitter for
https://github.com/tree-sitter/tree-sitter/pull/2788.

Release Notes:

- Fixed a crash that could happen when opening certain large markdown
files.
2023-11-27 15:15:34 -08:00
Kirill Bulatov
3397cdb4be zed 0.114.1 2023-11-24 13:23:07 +02:00
Kirill Bulatov
e5c999e66b Ignore excluded entries' children FS events (#3400)
Deals with https://github.com/zed-industries/community/issues/2295 and
https://github.com/zed-industries/community/issues/2296

Release Notes:

- Fixed excluded .git files appearing in worktree after FS events
2023-11-24 13:18:43 +02:00
Joseph T. Lyons
69704c8c40 Add app close events (#3399)
Release Notes:

- N/A
2023-11-23 21:25:02 -05:00
Joseph T. Lyons
010d43b17f Add app events (#3372)
Adds app events (`first open` and `open`). For the time being, I'm
abandonding trying to add `close`, after running into many issues
trying. The code is in place for me to continue on that work, but at the
moment, we require having the telemetry settings in hand when calling
any of the methods that log an event, so we can honor the user's
preference for sending telemetry or not, but when running the
`on_app_close` method, to send off an app `close` event, the settings
are no longer available (probably the order of teardown?), which causes
some tests to end up failing. I'm not sure how to solve this. Maybe we
keep the settings on the telemetry struct and update it each time any
event is logged, then, on app shutdown, when logging the app `close`
event, we can use the stored version (idk).

Release Notes:

- N/A
2023-11-23 09:04:44 -05:00
Julia
02d737ae73 zed1: Cancel completion resolution when new list (#3389)
Release Notes:

- Fixed a bug where Zed would continue to request documentation for
completion lists which were stale or no longer visible.
2023-11-22 14:25:08 -05:00
Joseph T. Lyons
843846da2e v0.114.x preview 2023-11-22 12:57:14 -05:00
41 changed files with 761 additions and 252 deletions

4
Cargo.lock generated
View file

@ -9925,7 +9925,7 @@ dependencies = [
[[package]] [[package]]
name = "tree-sitter" name = "tree-sitter"
version = "0.20.10" version = "0.20.10"
source = "git+https://github.com/tree-sitter/tree-sitter?rev=35a6052fbcafc5e5fc0f9415b8652be7dcaf7222#35a6052fbcafc5e5fc0f9415b8652be7dcaf7222" source = "git+https://github.com/tree-sitter/tree-sitter?rev=b5f461a69bf3df7298b1903574d506179e6390b0#b5f461a69bf3df7298b1903574d506179e6390b0"
dependencies = [ dependencies = [
"cc", "cc",
"regex", "regex",
@ -11493,7 +11493,7 @@ dependencies = [
[[package]] [[package]]
name = "zed" name = "zed"
version = "0.114.0" version = "0.114.4"
dependencies = [ dependencies = [
"activity_indicator", "activity_indicator",
"ai", "ai",

View file

@ -195,8 +195,9 @@ tree-sitter-lua = "0.0.14"
tree-sitter-nix = { git = "https://github.com/nix-community/tree-sitter-nix", rev = "66e3e9ce9180ae08fc57372061006ef83f0abde7" } tree-sitter-nix = { git = "https://github.com/nix-community/tree-sitter-nix", rev = "66e3e9ce9180ae08fc57372061006ef83f0abde7" }
tree-sitter-nu = { git = "https://github.com/nushell/tree-sitter-nu", rev = "786689b0562b9799ce53e824cb45a1a2a04dc673"} tree-sitter-nu = { git = "https://github.com/nushell/tree-sitter-nu", rev = "786689b0562b9799ce53e824cb45a1a2a04dc673"}
tree-sitter-vue = {git = "https://github.com/zed-industries/tree-sitter-vue", rev = "9b6cb221ccb8d0b956fcb17e9a1efac2feefeb58"} tree-sitter-vue = {git = "https://github.com/zed-industries/tree-sitter-vue", rev = "9b6cb221ccb8d0b956fcb17e9a1efac2feefeb58"}
[patch.crates-io] [patch.crates-io]
tree-sitter = { git = "https://github.com/tree-sitter/tree-sitter", rev = "35a6052fbcafc5e5fc0f9415b8652be7dcaf7222" } tree-sitter = { git = "https://github.com/tree-sitter/tree-sitter", rev = "b5f461a69bf3df7298b1903574d506179e6390b0" }
async-task = { git = "https://github.com/zed-industries/async-task", rev = "341b57d6de98cdfd7b418567b8de2022ca993a6e" } async-task = { git = "https://github.com/zed-industries/async-task", rev = "341b57d6de98cdfd7b418567b8de2022ca993a6e" }
# TODO - Remove when a version is released with this PR: https://github.com/servo/core-foundation-rs/pull/457 # TODO - Remove when a version is released with this PR: https://github.com/servo/core-foundation-rs/pull/457

View file

@ -530,12 +530,17 @@
"alt-cmd-shift-c": "project_panel::CopyRelativePath", "alt-cmd-shift-c": "project_panel::CopyRelativePath",
"f2": "project_panel::Rename", "f2": "project_panel::Rename",
"enter": "project_panel::Rename", "enter": "project_panel::Rename",
"space": "project_panel::Open",
"backspace": "project_panel::Delete", "backspace": "project_panel::Delete",
"alt-cmd-r": "project_panel::RevealInFinder", "alt-cmd-r": "project_panel::RevealInFinder",
"alt-shift-f": "project_panel::NewSearchInDirectory" "alt-shift-f": "project_panel::NewSearchInDirectory"
} }
}, },
{
"context": "ProjectPanel && not_editing",
"bindings": {
"space": "project_panel::Open"
}
},
{ {
"context": "CollabPanel && not_editing", "context": "CollabPanel && not_editing",
"bindings": { "bindings": {

View file

@ -464,7 +464,7 @@ impl ActiveCall {
&self.pending_invites &self.pending_invites
} }
pub fn report_call_event(&self, operation: &'static str, cx: &AppContext) { pub fn report_call_event(&self, operation: &'static str, cx: &mut AppContext) {
if let Some(room) = self.room() { if let Some(room) = self.room() {
let room = room.read(cx); let room = room.read(cx);
report_call_event_for_room(operation, room.id(), room.channel_id(), &self.client, cx); report_call_event_for_room(operation, room.id(), room.channel_id(), &self.client, cx);
@ -477,7 +477,7 @@ pub fn report_call_event_for_room(
room_id: u64, room_id: u64,
channel_id: Option<u64>, channel_id: Option<u64>,
client: &Arc<Client>, client: &Arc<Client>,
cx: &AppContext, cx: &mut AppContext,
) { ) {
let telemetry = client.telemetry(); let telemetry = client.telemetry();
let telemetry_settings = *TelemetrySettings::get_global(cx); let telemetry_settings = *TelemetrySettings::get_global(cx);

View file

@ -109,6 +109,10 @@ pub enum ClickhouseEvent {
virtual_memory_in_bytes: u64, virtual_memory_in_bytes: u64,
milliseconds_since_first_event: i64, milliseconds_since_first_event: i64,
}, },
App {
operation: &'static str,
milliseconds_since_first_event: i64,
},
} }
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
@ -168,13 +172,8 @@ impl Telemetry {
let mut state = self.state.lock(); let mut state = self.state.lock();
state.installation_id = installation_id.map(|id| id.into()); state.installation_id = installation_id.map(|id| id.into());
state.session_id = Some(session_id.into()); state.session_id = Some(session_id.into());
let has_clickhouse_events = !state.clickhouse_events_queue.is_empty();
drop(state); drop(state);
if has_clickhouse_events {
self.flush_clickhouse_events();
}
let this = self.clone(); let this = self.clone();
cx.spawn(|mut cx| async move { cx.spawn(|mut cx| async move {
// Avoiding calling `System::new_all()`, as there have been crashes related to it // Avoiding calling `System::new_all()`, as there have been crashes related to it
@ -256,7 +255,7 @@ impl Telemetry {
milliseconds_since_first_event: self.milliseconds_since_first_event(), milliseconds_since_first_event: self.milliseconds_since_first_event(),
}; };
self.report_clickhouse_event(event, telemetry_settings) self.report_clickhouse_event(event, telemetry_settings, false)
} }
pub fn report_copilot_event( pub fn report_copilot_event(
@ -273,7 +272,7 @@ impl Telemetry {
milliseconds_since_first_event: self.milliseconds_since_first_event(), milliseconds_since_first_event: self.milliseconds_since_first_event(),
}; };
self.report_clickhouse_event(event, telemetry_settings) self.report_clickhouse_event(event, telemetry_settings, false)
} }
pub fn report_assistant_event( pub fn report_assistant_event(
@ -290,7 +289,7 @@ impl Telemetry {
milliseconds_since_first_event: self.milliseconds_since_first_event(), milliseconds_since_first_event: self.milliseconds_since_first_event(),
}; };
self.report_clickhouse_event(event, telemetry_settings) self.report_clickhouse_event(event, telemetry_settings, false)
} }
pub fn report_call_event( pub fn report_call_event(
@ -307,7 +306,7 @@ impl Telemetry {
milliseconds_since_first_event: self.milliseconds_since_first_event(), milliseconds_since_first_event: self.milliseconds_since_first_event(),
}; };
self.report_clickhouse_event(event, telemetry_settings) self.report_clickhouse_event(event, telemetry_settings, false)
} }
pub fn report_cpu_event( pub fn report_cpu_event(
@ -322,7 +321,7 @@ impl Telemetry {
milliseconds_since_first_event: self.milliseconds_since_first_event(), milliseconds_since_first_event: self.milliseconds_since_first_event(),
}; };
self.report_clickhouse_event(event, telemetry_settings) self.report_clickhouse_event(event, telemetry_settings, false)
} }
pub fn report_memory_event( pub fn report_memory_event(
@ -337,7 +336,21 @@ impl Telemetry {
milliseconds_since_first_event: self.milliseconds_since_first_event(), milliseconds_since_first_event: self.milliseconds_since_first_event(),
}; };
self.report_clickhouse_event(event, telemetry_settings) self.report_clickhouse_event(event, telemetry_settings, false)
}
// app_events are called at app open and app close, so flush is set to immediately send
pub fn report_app_event(
self: &Arc<Self>,
telemetry_settings: TelemetrySettings,
operation: &'static str,
) {
let event = ClickhouseEvent::App {
operation,
milliseconds_since_first_event: self.milliseconds_since_first_event(),
};
self.report_clickhouse_event(event, telemetry_settings, true)
} }
fn milliseconds_since_first_event(&self) -> i64 { fn milliseconds_since_first_event(&self) -> i64 {
@ -358,6 +371,7 @@ impl Telemetry {
self: &Arc<Self>, self: &Arc<Self>,
event: ClickhouseEvent, event: ClickhouseEvent,
telemetry_settings: TelemetrySettings, telemetry_settings: TelemetrySettings,
immediate_flush: bool,
) { ) {
if !telemetry_settings.metrics { if !telemetry_settings.metrics {
return; return;
@ -370,7 +384,7 @@ impl Telemetry {
.push(ClickhouseEventWrapper { signed_in, event }); .push(ClickhouseEventWrapper { signed_in, event });
if state.installation_id.is_some() { if state.installation_id.is_some() {
if state.clickhouse_events_queue.len() >= MAX_QUEUE_LEN { if immediate_flush || state.clickhouse_events_queue.len() >= MAX_QUEUE_LEN {
drop(state); drop(state);
self.flush_clickhouse_events(); self.flush_clickhouse_events();
} else { } else {

View file

@ -382,7 +382,7 @@ impl settings::Settings for TelemetrySettings {
} }
impl Client { impl Client {
pub fn new(http: Arc<dyn HttpClient>, cx: &AppContext) -> Arc<Self> { pub fn new(http: Arc<dyn HttpClient>, cx: &mut AppContext) -> Arc<Self> {
Arc::new(Self { Arc::new(Self {
id: AtomicU64::new(0), id: AtomicU64::new(0),
peer: Peer::new(0), peer: Peer::new(0),

View file

@ -1,5 +1,6 @@
use crate::{TelemetrySettings, ZED_SECRET_CLIENT_TOKEN, ZED_SERVER_URL}; use crate::{TelemetrySettings, ZED_SECRET_CLIENT_TOKEN, ZED_SERVER_URL};
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use futures::Future;
use gpui::{serde_json, AppContext, AppMetadata, BackgroundExecutor, Task}; use gpui::{serde_json, AppContext, AppMetadata, BackgroundExecutor, Task};
use lazy_static::lazy_static; use lazy_static::lazy_static;
use parking_lot::Mutex; use parking_lot::Mutex;
@ -107,6 +108,10 @@ pub enum ClickhouseEvent {
virtual_memory_in_bytes: u64, virtual_memory_in_bytes: u64,
milliseconds_since_first_event: i64, milliseconds_since_first_event: i64,
}, },
App {
operation: &'static str,
milliseconds_since_first_event: i64,
},
} }
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
@ -122,12 +127,13 @@ const DEBOUNCE_INTERVAL: Duration = Duration::from_secs(1);
const DEBOUNCE_INTERVAL: Duration = Duration::from_secs(30); const DEBOUNCE_INTERVAL: Duration = Duration::from_secs(30);
impl Telemetry { impl Telemetry {
pub fn new(client: Arc<dyn HttpClient>, cx: &AppContext) -> Arc<Self> { pub fn new(client: Arc<dyn HttpClient>, cx: &mut AppContext) -> Arc<Self> {
let release_channel = if cx.has_global::<ReleaseChannel>() { let release_channel = if cx.has_global::<ReleaseChannel>() {
Some(cx.global::<ReleaseChannel>().display_name()) Some(cx.global::<ReleaseChannel>().display_name())
} else { } else {
None None
}; };
// TODO: Replace all hardware stuff with nested SystemSpecs json // TODO: Replace all hardware stuff with nested SystemSpecs json
let this = Arc::new(Self { let this = Arc::new(Self {
http_client: client, http_client: client,
@ -147,9 +153,30 @@ impl Telemetry {
}), }),
}); });
// We should only ever have one instance of Telemetry, leak the subscription to keep it alive
// rather than store in TelemetryState, complicating spawn as subscriptions are not Send
std::mem::forget(cx.on_app_quit({
let this = this.clone();
move |cx| this.shutdown_telemetry(cx)
}));
this this
} }
#[cfg(any(test, feature = "test-support"))]
fn shutdown_telemetry(self: &Arc<Self>, _: &mut AppContext) -> impl Future<Output = ()> {
Task::ready(())
}
// Skip calling this function in tests.
// TestAppContext ends up calling this function on shutdown and it panics when trying to find the TelemetrySettings
#[cfg(not(any(test, feature = "test-support")))]
fn shutdown_telemetry(self: &Arc<Self>, cx: &mut AppContext) -> impl Future<Output = ()> {
let telemetry_settings = TelemetrySettings::get_global(cx).clone();
self.report_app_event(telemetry_settings, "close");
Task::ready(())
}
pub fn log_file_path(&self) -> Option<PathBuf> { pub fn log_file_path(&self) -> Option<PathBuf> {
Some(self.state.lock().log_file.as_ref()?.path().to_path_buf()) Some(self.state.lock().log_file.as_ref()?.path().to_path_buf())
} }
@ -163,13 +190,8 @@ impl Telemetry {
let mut state = self.state.lock(); let mut state = self.state.lock();
state.installation_id = installation_id.map(|id| id.into()); state.installation_id = installation_id.map(|id| id.into());
state.session_id = Some(session_id.into()); state.session_id = Some(session_id.into());
let has_clickhouse_events = !state.clickhouse_events_queue.is_empty();
drop(state); drop(state);
if has_clickhouse_events {
self.flush_clickhouse_events();
}
let this = self.clone(); let this = self.clone();
cx.spawn(|cx| async move { cx.spawn(|cx| async move {
// Avoiding calling `System::new_all()`, as there have been crashes related to it // Avoiding calling `System::new_all()`, as there have been crashes related to it
@ -257,7 +279,7 @@ impl Telemetry {
milliseconds_since_first_event: self.milliseconds_since_first_event(), milliseconds_since_first_event: self.milliseconds_since_first_event(),
}; };
self.report_clickhouse_event(event, telemetry_settings) self.report_clickhouse_event(event, telemetry_settings, false)
} }
pub fn report_copilot_event( pub fn report_copilot_event(
@ -274,7 +296,7 @@ impl Telemetry {
milliseconds_since_first_event: self.milliseconds_since_first_event(), milliseconds_since_first_event: self.milliseconds_since_first_event(),
}; };
self.report_clickhouse_event(event, telemetry_settings) self.report_clickhouse_event(event, telemetry_settings, false)
} }
pub fn report_assistant_event( pub fn report_assistant_event(
@ -291,7 +313,7 @@ impl Telemetry {
milliseconds_since_first_event: self.milliseconds_since_first_event(), milliseconds_since_first_event: self.milliseconds_since_first_event(),
}; };
self.report_clickhouse_event(event, telemetry_settings) self.report_clickhouse_event(event, telemetry_settings, false)
} }
pub fn report_call_event( pub fn report_call_event(
@ -308,7 +330,7 @@ impl Telemetry {
milliseconds_since_first_event: self.milliseconds_since_first_event(), milliseconds_since_first_event: self.milliseconds_since_first_event(),
}; };
self.report_clickhouse_event(event, telemetry_settings) self.report_clickhouse_event(event, telemetry_settings, false)
} }
pub fn report_cpu_event( pub fn report_cpu_event(
@ -323,7 +345,7 @@ impl Telemetry {
milliseconds_since_first_event: self.milliseconds_since_first_event(), milliseconds_since_first_event: self.milliseconds_since_first_event(),
}; };
self.report_clickhouse_event(event, telemetry_settings) self.report_clickhouse_event(event, telemetry_settings, false)
} }
pub fn report_memory_event( pub fn report_memory_event(
@ -338,7 +360,21 @@ impl Telemetry {
milliseconds_since_first_event: self.milliseconds_since_first_event(), milliseconds_since_first_event: self.milliseconds_since_first_event(),
}; };
self.report_clickhouse_event(event, telemetry_settings) self.report_clickhouse_event(event, telemetry_settings, false)
}
// app_events are called at app open and app close, so flush is set to immediately send
pub fn report_app_event(
self: &Arc<Self>,
telemetry_settings: TelemetrySettings,
operation: &'static str,
) {
let event = ClickhouseEvent::App {
operation,
milliseconds_since_first_event: self.milliseconds_since_first_event(),
};
self.report_clickhouse_event(event, telemetry_settings, true)
} }
fn milliseconds_since_first_event(&self) -> i64 { fn milliseconds_since_first_event(&self) -> i64 {
@ -359,6 +395,7 @@ impl Telemetry {
self: &Arc<Self>, self: &Arc<Self>,
event: ClickhouseEvent, event: ClickhouseEvent,
telemetry_settings: TelemetrySettings, telemetry_settings: TelemetrySettings,
immediate_flush: bool,
) { ) {
if !telemetry_settings.metrics { if !telemetry_settings.metrics {
return; return;
@ -371,7 +408,7 @@ impl Telemetry {
.push(ClickhouseEventWrapper { signed_in, event }); .push(ClickhouseEventWrapper { signed_in, event });
if state.installation_id.is_some() { if state.installation_id.is_some() {
if state.clickhouse_events_queue.len() >= MAX_QUEUE_LEN { if immediate_flush || state.clickhouse_events_queue.len() >= MAX_QUEUE_LEN {
drop(state); drop(state);
self.flush_clickhouse_events(); self.flush_clickhouse_events();
} else { } else {

View file

@ -149,7 +149,7 @@ impl TestServer {
.user_id .user_id
}; };
let client_name = name.to_string(); let client_name = name.to_string();
let mut client = cx.read(|cx| Client::new(http.clone(), cx)); let mut client = cx.update(|cx| Client::new(http.clone(), cx));
let server = self.server.clone(); let server = self.server.clone();
let db = self.app_state.db.clone(); let db = self.app_state.db.clone();
let connection_killers = self.connection_killers.clone(); let connection_killers = self.connection_killers.clone();

View file

@ -1001,17 +1001,18 @@ impl CompletionsMenu {
fn pre_resolve_completion_documentation( fn pre_resolve_completion_documentation(
&self, &self,
project: Option<ModelHandle<Project>>, editor: &Editor,
cx: &mut ViewContext<Editor>, cx: &mut ViewContext<Editor>,
) { ) -> Option<Task<()>> {
let settings = settings::get::<EditorSettings>(cx); let settings = settings::get::<EditorSettings>(cx);
if !settings.show_completion_documentation { if !settings.show_completion_documentation {
return; return None;
} }
let Some(project) = project else { let Some(project) = editor.project.clone() else {
return; return None;
}; };
let client = project.read(cx).client(); let client = project.read(cx).client();
let language_registry = project.read(cx).languages().clone(); let language_registry = project.read(cx).languages().clone();
@ -1021,7 +1022,7 @@ impl CompletionsMenu {
let completions = self.completions.clone(); let completions = self.completions.clone();
let completion_indices: Vec<_> = self.matches.iter().map(|m| m.candidate_id).collect(); let completion_indices: Vec<_> = self.matches.iter().map(|m| m.candidate_id).collect();
cx.spawn(move |this, mut cx| async move { Some(cx.spawn(move |this, mut cx| async move {
if is_remote { if is_remote {
let Some(project_id) = project_id else { let Some(project_id) = project_id else {
log::error!("Remote project without remote_id"); log::error!("Remote project without remote_id");
@ -1083,8 +1084,7 @@ impl CompletionsMenu {
_ = this.update(&mut cx, |_, cx| cx.notify()); _ = this.update(&mut cx, |_, cx| cx.notify());
} }
} }
}) }))
.detach();
} }
fn attempt_resolve_selected_completion_documentation( fn attempt_resolve_selected_completion_documentation(
@ -3580,7 +3580,8 @@ impl Editor {
let id = post_inc(&mut self.next_completion_id); let id = post_inc(&mut self.next_completion_id);
let task = cx.spawn(|this, mut cx| { let task = cx.spawn(|this, mut cx| {
async move { async move {
let menu = if let Some(completions) = completions.await.log_err() { let completions = completions.await.log_err();
let (menu, pre_resolve_task) = if let Some(completions) = completions {
let mut menu = CompletionsMenu { let mut menu = CompletionsMenu {
id, id,
initial_position: position, initial_position: position,
@ -3601,21 +3602,26 @@ impl Editor {
selected_item: 0, selected_item: 0,
list: Default::default(), list: Default::default(),
}; };
menu.filter(query.as_deref(), cx.background()).await; menu.filter(query.as_deref(), cx.background()).await;
if menu.matches.is_empty() { if menu.matches.is_empty() {
None (None, None)
} else { } else {
_ = this.update(&mut cx, |editor, cx| { let pre_resolve_task = this
menu.pre_resolve_completion_documentation(editor.project.clone(), cx); .update(&mut cx, |editor, cx| {
}); menu.pre_resolve_completion_documentation(editor, cx)
Some(menu) })
.ok()
.flatten();
(Some(menu), pre_resolve_task)
} }
} else { } else {
None (None, None)
}; };
this.update(&mut cx, |this, cx| { this.update(&mut cx, |this, cx| {
this.completion_tasks.retain(|(task_id, _)| *task_id > id); this.completion_tasks.retain(|(task_id, _)| *task_id >= id);
let mut context_menu = this.context_menu.write(); let mut context_menu = this.context_menu.write();
match context_menu.as_ref() { match context_menu.as_ref() {
@ -3636,10 +3642,10 @@ impl Editor {
drop(context_menu); drop(context_menu);
this.discard_copilot_suggestion(cx); this.discard_copilot_suggestion(cx);
cx.notify(); cx.notify();
} else if this.completion_tasks.is_empty() { } else if this.completion_tasks.len() <= 1 {
// If there are no more completion tasks and the last menu was // If there are no more completion tasks (omitting ourself) and
// empty, we should hide it. If it was already hidden, we should // the last menu was empty, we should hide it. If it was already
// also show the copilot suggestion when available. // hidden, we should also show the copilot suggestion when available.
drop(context_menu); drop(context_menu);
if this.hide_context_menu(cx).is_none() { if this.hide_context_menu(cx).is_none() {
this.update_visible_copilot_suggestion(cx); this.update_visible_copilot_suggestion(cx);
@ -3647,10 +3653,15 @@ impl Editor {
} }
})?; })?;
if let Some(pre_resolve_task) = pre_resolve_task {
pre_resolve_task.await;
}
Ok::<_, anyhow::Error>(()) Ok::<_, anyhow::Error>(())
} }
.log_err() .log_err()
}); });
self.completion_tasks.push((id, task)); self.completion_tasks.push((id, task));
} }

View file

@ -10,6 +10,7 @@ pub use entity_map::*;
pub use model_context::*; pub use model_context::*;
use refineable::Refineable; use refineable::Refineable;
use smallvec::SmallVec; use smallvec::SmallVec;
use smol::future::FutureExt;
#[cfg(any(test, feature = "test-support"))] #[cfg(any(test, feature = "test-support"))]
pub use test_context::*; pub use test_context::*;
@ -983,6 +984,22 @@ impl AppContext {
pub fn all_action_names(&self) -> &[SharedString] { pub fn all_action_names(&self) -> &[SharedString] {
self.actions.all_action_names() self.actions.all_action_names()
} }
pub fn on_app_quit<Fut>(
&mut self,
mut on_quit: impl FnMut(&mut AppContext) -> Fut + 'static,
) -> Subscription
where
Fut: 'static + Future<Output = ()>,
{
self.quit_observers.insert(
(),
Box::new(move |cx| {
let future = on_quit(cx);
async move { future.await }.boxed_local()
}),
)
}
} }
impl Context for AppContext { impl Context for AppContext {

View file

@ -11,7 +11,7 @@ pub struct HighlightId(pub u32);
const DEFAULT_SYNTAX_HIGHLIGHT_ID: HighlightId = HighlightId(u32::MAX); const DEFAULT_SYNTAX_HIGHLIGHT_ID: HighlightId = HighlightId(u32::MAX);
impl HighlightMap { impl HighlightMap {
pub fn new(capture_names: &[String], theme: &SyntaxTheme) -> Self { pub fn new(capture_names: &[&str], theme: &SyntaxTheme) -> Self {
// For each capture name in the highlight query, find the longest // For each capture name in the highlight query, find the longest
// key in the theme's syntax styles that matches all of the // key in the theme's syntax styles that matches all of the
// dot-separated components of the capture name. // dot-separated components of the capture name.
@ -98,9 +98,9 @@ mod tests {
); );
let capture_names = &[ let capture_names = &[
"function.special".to_string(), "function.special",
"function.async.rust".to_string(), "function.async.rust",
"variable.builtin.self".to_string(), "variable.builtin.self",
]; ];
let map = HighlightMap::new(capture_names, &theme); let map = HighlightMap::new(capture_names, &theme);

View file

@ -197,8 +197,12 @@ impl CachedLspAdapter {
self.adapter.code_action_kinds() self.adapter.code_action_kinds()
} }
pub fn workspace_configuration(&self, cx: &mut AppContext) -> BoxFuture<'static, Value> { pub fn workspace_configuration(
self.adapter.workspace_configuration(cx) &self,
workspace_root: &Path,
cx: &mut AppContext,
) -> BoxFuture<'static, Value> {
self.adapter.workspace_configuration(workspace_root, cx)
} }
pub fn process_diagnostics(&self, params: &mut lsp::PublishDiagnosticsParams) { pub fn process_diagnostics(&self, params: &mut lsp::PublishDiagnosticsParams) {
@ -312,7 +316,7 @@ pub trait LspAdapter: 'static + Send + Sync {
None None
} }
fn workspace_configuration(&self, _: &mut AppContext) -> BoxFuture<'static, Value> { fn workspace_configuration(&self, _: &Path, _: &mut AppContext) -> BoxFuture<'static, Value> {
futures::future::ready(serde_json::json!({})).boxed() futures::future::ready(serde_json::json!({})).boxed()
} }
@ -1383,7 +1387,7 @@ impl Language {
let query = Query::new(self.grammar_mut().ts_language, source)?; let query = Query::new(self.grammar_mut().ts_language, source)?;
let mut override_configs_by_id = HashMap::default(); let mut override_configs_by_id = HashMap::default();
for (ix, name) in query.capture_names().iter().enumerate() { for (ix, name) in query.capture_names().iter().copied().enumerate() {
if !name.starts_with('_') { if !name.starts_with('_') {
let value = self.config.overrides.remove(name).unwrap_or_default(); let value = self.config.overrides.remove(name).unwrap_or_default();
for server_name in &value.opt_into_language_servers { for server_name in &value.opt_into_language_servers {
@ -1396,7 +1400,7 @@ impl Language {
} }
} }
override_configs_by_id.insert(ix as u32, (name.clone(), value)); override_configs_by_id.insert(ix as u32, (name.into(), value));
} }
} }

View file

@ -1300,7 +1300,7 @@ fn assert_capture_ranges(
.collect::<Vec<_>>(); .collect::<Vec<_>>();
for capture in captures { for capture in captures {
let name = &queries[capture.grammar_index].capture_names()[capture.index as usize]; let name = &queries[capture.grammar_index].capture_names()[capture.index as usize];
if highlight_query_capture_names.contains(&name.as_str()) { if highlight_query_capture_names.contains(&name) {
actual_ranges.push(capture.node.byte_range()); actual_ranges.push(capture.node.byte_range());
} }
} }

View file

@ -11,7 +11,7 @@ pub struct HighlightId(pub u32);
const DEFAULT_SYNTAX_HIGHLIGHT_ID: HighlightId = HighlightId(u32::MAX); const DEFAULT_SYNTAX_HIGHLIGHT_ID: HighlightId = HighlightId(u32::MAX);
impl HighlightMap { impl HighlightMap {
pub fn new(capture_names: &[String], theme: &SyntaxTheme) -> Self { pub fn new(capture_names: &[&str], theme: &SyntaxTheme) -> Self {
// For each capture name in the highlight query, find the longest // For each capture name in the highlight query, find the longest
// key in the theme's syntax styles that matches all of the // key in the theme's syntax styles that matches all of the
// dot-separated components of the capture name. // dot-separated components of the capture name.
@ -100,9 +100,9 @@ mod tests {
}; };
let capture_names = &[ let capture_names = &[
"function.special".to_string(), "function.special",
"function.async.rust".to_string(), "function.async.rust",
"variable.builtin.self".to_string(), "variable.builtin.self",
]; ];
let map = HighlightMap::new(capture_names, &theme); let map = HighlightMap::new(capture_names, &theme);

View file

@ -200,8 +200,12 @@ impl CachedLspAdapter {
self.adapter.code_action_kinds() self.adapter.code_action_kinds()
} }
pub fn workspace_configuration(&self, cx: &mut AppContext) -> BoxFuture<'static, Value> { pub fn workspace_configuration(
self.adapter.workspace_configuration(cx) &self,
workspace_root: &Path,
cx: &mut AppContext,
) -> BoxFuture<'static, Value> {
self.adapter.workspace_configuration(workspace_root, cx)
} }
pub fn process_diagnostics(&self, params: &mut lsp::PublishDiagnosticsParams) { pub fn process_diagnostics(&self, params: &mut lsp::PublishDiagnosticsParams) {
@ -315,7 +319,7 @@ pub trait LspAdapter: 'static + Send + Sync {
None None
} }
fn workspace_configuration(&self, _: &mut AppContext) -> BoxFuture<'static, Value> { fn workspace_configuration(&self, _: &Path, _: &mut AppContext) -> BoxFuture<'static, Value> {
futures::future::ready(serde_json::json!({})).boxed() futures::future::ready(serde_json::json!({})).boxed()
} }
@ -1391,7 +1395,7 @@ impl Language {
let mut override_configs_by_id = HashMap::default(); let mut override_configs_by_id = HashMap::default();
for (ix, name) in query.capture_names().iter().enumerate() { for (ix, name) in query.capture_names().iter().enumerate() {
if !name.starts_with('_') { if !name.starts_with('_') {
let value = self.config.overrides.remove(name).unwrap_or_default(); let value = self.config.overrides.remove(*name).unwrap_or_default();
for server_name in &value.opt_into_language_servers { for server_name in &value.opt_into_language_servers {
if !self if !self
.config .config
@ -1402,7 +1406,7 @@ impl Language {
} }
} }
override_configs_by_id.insert(ix as u32, (name.clone(), value)); override_configs_by_id.insert(ix as u32, (name.to_string(), value));
} }
} }

View file

@ -1300,7 +1300,7 @@ fn assert_capture_ranges(
.collect::<Vec<_>>(); .collect::<Vec<_>>();
for capture in captures { for capture in captures {
let name = &queries[capture.grammar_index].capture_names()[capture.index as usize]; let name = &queries[capture.grammar_index].capture_names()[capture.index as usize];
if highlight_query_capture_names.contains(&name.as_str()) { if highlight_query_capture_names.contains(&name) {
actual_ranges.push(capture.node.byte_range()); actual_ranges.push(capture.node.byte_range());
} }
} }

View file

@ -429,8 +429,8 @@ impl LanguageServer {
let root_uri = Url::from_file_path(&self.root_path).unwrap(); let root_uri = Url::from_file_path(&self.root_path).unwrap();
#[allow(deprecated)] #[allow(deprecated)]
let params = InitializeParams { let params = InitializeParams {
process_id: Default::default(), process_id: None,
root_path: Default::default(), root_path: None,
root_uri: Some(root_uri.clone()), root_uri: Some(root_uri.clone()),
initialization_options: options, initialization_options: options,
capabilities: ClientCapabilities { capabilities: ClientCapabilities {
@ -451,12 +451,15 @@ impl LanguageServer {
inlay_hint: Some(InlayHintWorkspaceClientCapabilities { inlay_hint: Some(InlayHintWorkspaceClientCapabilities {
refresh_support: Some(true), refresh_support: Some(true),
}), }),
diagnostic: Some(DiagnosticWorkspaceClientCapabilities {
refresh_support: None,
}),
..Default::default() ..Default::default()
}), }),
text_document: Some(TextDocumentClientCapabilities { text_document: Some(TextDocumentClientCapabilities {
definition: Some(GotoCapability { definition: Some(GotoCapability {
link_support: Some(true), link_support: Some(true),
..Default::default() dynamic_registration: None,
}), }),
code_action: Some(CodeActionClientCapabilities { code_action: Some(CodeActionClientCapabilities {
code_action_literal_support: Some(CodeActionLiteralSupport { code_action_literal_support: Some(CodeActionLiteralSupport {
@ -501,7 +504,7 @@ impl LanguageServer {
}), }),
hover: Some(HoverClientCapabilities { hover: Some(HoverClientCapabilities {
content_format: Some(vec![MarkupKind::Markdown]), content_format: Some(vec![MarkupKind::Markdown]),
..Default::default() dynamic_registration: None,
}), }),
inlay_hint: Some(InlayHintClientCapabilities { inlay_hint: Some(InlayHintClientCapabilities {
resolve_support: Some(InlayHintResolveClientCapabilities { resolve_support: Some(InlayHintResolveClientCapabilities {
@ -515,6 +518,20 @@ impl LanguageServer {
}), }),
dynamic_registration: Some(false), dynamic_registration: Some(false),
}), }),
publish_diagnostics: Some(PublishDiagnosticsClientCapabilities {
related_information: Some(true),
..Default::default()
}),
formatting: Some(DynamicRegistrationClientCapabilities {
dynamic_registration: None,
}),
on_type_formatting: Some(DynamicRegistrationClientCapabilities {
dynamic_registration: None,
}),
diagnostic: Some(DiagnosticClientCapabilities {
related_document_support: Some(true),
dynamic_registration: None,
}),
..Default::default() ..Default::default()
}), }),
experimental: Some(json!({ experimental: Some(json!({
@ -524,15 +541,15 @@ impl LanguageServer {
work_done_progress: Some(true), work_done_progress: Some(true),
..Default::default() ..Default::default()
}), }),
..Default::default() general: None,
}, },
trace: Default::default(), trace: None,
workspace_folders: Some(vec![WorkspaceFolder { workspace_folders: Some(vec![WorkspaceFolder {
uri: root_uri, uri: root_uri,
name: Default::default(), name: Default::default(),
}]), }]),
client_info: Default::default(), client_info: None,
locale: Default::default(), locale: None,
}; };
let response = self.request::<request::Initialize>(params).await?; let response = self.request::<request::Initialize>(params).await?;

View file

@ -434,8 +434,8 @@ impl LanguageServer {
let root_uri = Url::from_file_path(&self.root_path).unwrap(); let root_uri = Url::from_file_path(&self.root_path).unwrap();
#[allow(deprecated)] #[allow(deprecated)]
let params = InitializeParams { let params = InitializeParams {
process_id: Default::default(), process_id: None,
root_path: Default::default(), root_path: None,
root_uri: Some(root_uri.clone()), root_uri: Some(root_uri.clone()),
initialization_options: options, initialization_options: options,
capabilities: ClientCapabilities { capabilities: ClientCapabilities {
@ -456,12 +456,15 @@ impl LanguageServer {
inlay_hint: Some(InlayHintWorkspaceClientCapabilities { inlay_hint: Some(InlayHintWorkspaceClientCapabilities {
refresh_support: Some(true), refresh_support: Some(true),
}), }),
diagnostic: Some(DiagnosticWorkspaceClientCapabilities {
refresh_support: None,
}),
..Default::default() ..Default::default()
}), }),
text_document: Some(TextDocumentClientCapabilities { text_document: Some(TextDocumentClientCapabilities {
definition: Some(GotoCapability { definition: Some(GotoCapability {
link_support: Some(true), link_support: Some(true),
..Default::default() dynamic_registration: None,
}), }),
code_action: Some(CodeActionClientCapabilities { code_action: Some(CodeActionClientCapabilities {
code_action_literal_support: Some(CodeActionLiteralSupport { code_action_literal_support: Some(CodeActionLiteralSupport {
@ -503,7 +506,7 @@ impl LanguageServer {
}), }),
hover: Some(HoverClientCapabilities { hover: Some(HoverClientCapabilities {
content_format: Some(vec![MarkupKind::Markdown]), content_format: Some(vec![MarkupKind::Markdown]),
..Default::default() dynamic_registration: None,
}), }),
inlay_hint: Some(InlayHintClientCapabilities { inlay_hint: Some(InlayHintClientCapabilities {
resolve_support: Some(InlayHintResolveClientCapabilities { resolve_support: Some(InlayHintResolveClientCapabilities {
@ -517,6 +520,20 @@ impl LanguageServer {
}), }),
dynamic_registration: Some(false), dynamic_registration: Some(false),
}), }),
publish_diagnostics: Some(PublishDiagnosticsClientCapabilities {
related_information: Some(true),
..Default::default()
}),
formatting: Some(DynamicRegistrationClientCapabilities {
dynamic_registration: None,
}),
on_type_formatting: Some(DynamicRegistrationClientCapabilities {
dynamic_registration: None,
}),
diagnostic: Some(DiagnosticClientCapabilities {
related_document_support: Some(true),
dynamic_registration: None,
}),
..Default::default() ..Default::default()
}), }),
experimental: Some(json!({ experimental: Some(json!({
@ -526,15 +543,15 @@ impl LanguageServer {
work_done_progress: Some(true), work_done_progress: Some(true),
..Default::default() ..Default::default()
}), }),
..Default::default() general: None,
}, },
trace: Default::default(), trace: None,
workspace_folders: Some(vec![WorkspaceFolder { workspace_folders: Some(vec![WorkspaceFolder {
uri: root_uri, uri: root_uri,
name: Default::default(), name: Default::default(),
}]), }]),
client_info: Default::default(), client_info: None,
locale: Default::default(), locale: None,
}; };
let response = self.request::<request::Initialize>(params).await?; let response = self.request::<request::Initialize>(params).await?;

View file

@ -2629,8 +2629,9 @@ impl Project {
}); });
for (adapter, server) in servers { for (adapter, server) in servers {
let workspace_config = let workspace_config = cx
cx.update(|cx| adapter.workspace_configuration(cx)).await; .update(|cx| adapter.workspace_configuration(server.root_path(), cx))
.await;
server server
.notify::<lsp::notification::DidChangeConfiguration>( .notify::<lsp::notification::DidChangeConfiguration>(
lsp::DidChangeConfigurationParams { lsp::DidChangeConfigurationParams {
@ -2738,7 +2739,7 @@ impl Project {
stderr_capture.clone(), stderr_capture.clone(),
language.clone(), language.clone(),
adapter.clone(), adapter.clone(),
worktree_path, Arc::clone(&worktree_path),
ProjectLspAdapterDelegate::new(self, cx), ProjectLspAdapterDelegate::new(self, cx),
cx, cx,
) { ) {
@ -2761,6 +2762,7 @@ impl Project {
cx.spawn_weak(|this, mut cx| async move { cx.spawn_weak(|this, mut cx| async move {
let result = Self::setup_and_insert_language_server( let result = Self::setup_and_insert_language_server(
this, this,
&worktree_path,
override_options, override_options,
pending_server, pending_server,
adapter.clone(), adapter.clone(),
@ -2876,6 +2878,7 @@ impl Project {
async fn setup_and_insert_language_server( async fn setup_and_insert_language_server(
this: WeakModelHandle<Self>, this: WeakModelHandle<Self>,
worktree_path: &Path,
override_initialization_options: Option<serde_json::Value>, override_initialization_options: Option<serde_json::Value>,
pending_server: PendingLanguageServer, pending_server: PendingLanguageServer,
adapter: Arc<CachedLspAdapter>, adapter: Arc<CachedLspAdapter>,
@ -2888,6 +2891,7 @@ impl Project {
this, this,
override_initialization_options, override_initialization_options,
pending_server, pending_server,
worktree_path,
adapter.clone(), adapter.clone(),
server_id, server_id,
cx, cx,
@ -2917,11 +2921,14 @@ impl Project {
this: WeakModelHandle<Self>, this: WeakModelHandle<Self>,
override_options: Option<serde_json::Value>, override_options: Option<serde_json::Value>,
pending_server: PendingLanguageServer, pending_server: PendingLanguageServer,
worktree_path: &Path,
adapter: Arc<CachedLspAdapter>, adapter: Arc<CachedLspAdapter>,
server_id: LanguageServerId, server_id: LanguageServerId,
cx: &mut AsyncAppContext, cx: &mut AsyncAppContext,
) -> Result<Arc<LanguageServer>> { ) -> Result<Arc<LanguageServer>> {
let workspace_config = cx.update(|cx| adapter.workspace_configuration(cx)).await; let workspace_config = cx
.update(|cx| adapter.workspace_configuration(worktree_path, cx))
.await;
let language_server = pending_server.task.await?; let language_server = pending_server.task.await?;
language_server language_server
@ -2949,11 +2956,14 @@ impl Project {
language_server language_server
.on_request::<lsp::request::WorkspaceConfiguration, _, _>({ .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
let adapter = adapter.clone(); let adapter = adapter.clone();
let worktree_path = worktree_path.to_path_buf();
move |params, mut cx| { move |params, mut cx| {
let adapter = adapter.clone(); let adapter = adapter.clone();
let worktree_path = worktree_path.clone();
async move { async move {
let workspace_config = let workspace_config = cx
cx.update(|cx| adapter.workspace_configuration(cx)).await; .update(|cx| adapter.workspace_configuration(&worktree_path, cx))
.await;
Ok(params Ok(params
.items .items
.into_iter() .into_iter()

View file

@ -2226,7 +2226,7 @@ impl LocalSnapshot {
paths paths
} }
fn is_abs_path_excluded(&self, abs_path: &Path) -> bool { fn is_path_excluded(&self, abs_path: &Path) -> bool {
self.file_scan_exclusions self.file_scan_exclusions
.iter() .iter()
.any(|exclude_matcher| exclude_matcher.is_match(abs_path)) .any(|exclude_matcher| exclude_matcher.is_match(abs_path))
@ -2399,26 +2399,9 @@ impl BackgroundScannerState {
self.snapshot.check_invariants(false); self.snapshot.check_invariants(false);
} }
fn reload_repositories(&mut self, changed_paths: &[Arc<Path>], fs: &dyn Fs) { fn reload_repositories(&mut self, dot_git_dirs_to_reload: &HashSet<PathBuf>, fs: &dyn Fs) {
let scan_id = self.snapshot.scan_id; let scan_id = self.snapshot.scan_id;
for dot_git_dir in dot_git_dirs_to_reload {
// Find each of the .git directories that contain any of the given paths.
let mut prev_dot_git_dir = None;
for changed_path in changed_paths {
let Some(dot_git_dir) = changed_path
.ancestors()
.find(|ancestor| ancestor.file_name() == Some(&*DOT_GIT))
else {
continue;
};
// Avoid processing the same repository multiple times, if multiple paths
// within it have changed.
if prev_dot_git_dir == Some(dot_git_dir) {
continue;
}
prev_dot_git_dir = Some(dot_git_dir);
// If there is already a repository for this .git directory, reload // If there is already a repository for this .git directory, reload
// the status for all of its files. // the status for all of its files.
let repository = self let repository = self
@ -2430,7 +2413,7 @@ impl BackgroundScannerState {
}); });
match repository { match repository {
None => { None => {
self.build_git_repository(dot_git_dir.into(), fs); self.build_git_repository(Arc::from(dot_git_dir.as_path()), fs);
} }
Some((entry_id, repository)) => { Some((entry_id, repository)) => {
if repository.git_dir_scan_id == scan_id { if repository.git_dir_scan_id == scan_id {
@ -2444,7 +2427,7 @@ impl BackgroundScannerState {
continue; continue;
}; };
log::info!("reload git repository {:?}", dot_git_dir); log::info!("reload git repository {dot_git_dir:?}");
let repository = repository.repo_ptr.lock(); let repository = repository.repo_ptr.lock();
let branch = repository.branch_name(); let branch = repository.branch_name();
repository.reload_index(); repository.reload_index();
@ -2475,7 +2458,9 @@ impl BackgroundScannerState {
ids_to_preserve.insert(work_directory_id); ids_to_preserve.insert(work_directory_id);
} else { } else {
let git_dir_abs_path = snapshot.abs_path().join(&entry.git_dir_path); let git_dir_abs_path = snapshot.abs_path().join(&entry.git_dir_path);
if snapshot.is_abs_path_excluded(&git_dir_abs_path) let git_dir_excluded = snapshot.is_path_excluded(&entry.git_dir_path)
|| snapshot.is_path_excluded(&git_dir_abs_path);
if git_dir_excluded
&& !matches!(smol::block_on(fs.metadata(&git_dir_abs_path)), Ok(None)) && !matches!(smol::block_on(fs.metadata(&git_dir_abs_path)), Ok(None))
{ {
ids_to_preserve.insert(work_directory_id); ids_to_preserve.insert(work_directory_id);
@ -3314,11 +3299,26 @@ impl BackgroundScanner {
}; };
let mut relative_paths = Vec::with_capacity(abs_paths.len()); let mut relative_paths = Vec::with_capacity(abs_paths.len());
let mut dot_git_paths_to_reload = HashSet::default();
abs_paths.sort_unstable(); abs_paths.sort_unstable();
abs_paths.dedup_by(|a, b| a.starts_with(&b)); abs_paths.dedup_by(|a, b| a.starts_with(&b));
abs_paths.retain(|abs_path| { abs_paths.retain(|abs_path| {
let snapshot = &self.state.lock().snapshot; let snapshot = &self.state.lock().snapshot;
{ {
let mut is_git_related = false;
if let Some(dot_git_dir) = abs_path
.ancestors()
.find(|ancestor| ancestor.file_name() == Some(&*DOT_GIT))
{
let dot_git_path = dot_git_dir
.strip_prefix(&root_canonical_path)
.ok()
.map(|path| path.to_path_buf())
.unwrap_or_else(|| dot_git_dir.to_path_buf());
dot_git_paths_to_reload.insert(dot_git_path.to_path_buf());
is_git_related = true;
}
let relative_path: Arc<Path> = let relative_path: Arc<Path> =
if let Ok(path) = abs_path.strip_prefix(&root_canonical_path) { if let Ok(path) = abs_path.strip_prefix(&root_canonical_path) {
path.into() path.into()
@ -3328,23 +3328,30 @@ impl BackgroundScanner {
); );
return false; return false;
}; };
let parent_dir_is_loaded = relative_path.parent().map_or(true, |parent| {
snapshot
.entry_for_path(parent)
.map_or(false, |entry| entry.kind == EntryKind::Dir)
});
if !parent_dir_is_loaded {
log::debug!("ignoring event {relative_path:?} within unloaded directory");
return false;
}
if !is_git_related(&abs_path) { // FS events may come for files which parent directory is excluded, need to check ignore those.
let parent_dir_is_loaded = relative_path.parent().map_or(true, |parent| { let mut path_to_test = abs_path.clone();
snapshot let mut excluded_file_event = snapshot.is_path_excluded(abs_path)
.entry_for_path(parent) || snapshot.is_path_excluded(&relative_path);
.map_or(false, |entry| entry.kind == EntryKind::Dir) while !excluded_file_event && path_to_test.pop() {
}); if snapshot.is_path_excluded(&path_to_test) {
if !parent_dir_is_loaded { excluded_file_event = true;
log::debug!("ignoring event {relative_path:?} within unloaded directory");
return false;
} }
if snapshot.is_abs_path_excluded(abs_path) { }
log::debug!( if excluded_file_event {
"ignoring FS event for path {relative_path:?} within excluded directory" if !is_git_related {
); log::debug!("ignoring FS event for excluded path {relative_path:?}");
return false;
} }
return false;
} }
relative_paths.push(relative_path); relative_paths.push(relative_path);
@ -3352,31 +3359,39 @@ impl BackgroundScanner {
} }
}); });
if relative_paths.is_empty() { if dot_git_paths_to_reload.is_empty() && relative_paths.is_empty() {
return; return;
} }
log::debug!("received fs events {:?}", relative_paths); if !relative_paths.is_empty() {
log::debug!("received fs events {:?}", relative_paths);
let (scan_job_tx, scan_job_rx) = channel::unbounded(); let (scan_job_tx, scan_job_rx) = channel::unbounded();
self.reload_entries_for_paths( self.reload_entries_for_paths(
root_path, root_path,
root_canonical_path, root_canonical_path,
&relative_paths, &relative_paths,
abs_paths, abs_paths,
Some(scan_job_tx.clone()), Some(scan_job_tx.clone()),
) )
.await; .await;
drop(scan_job_tx); drop(scan_job_tx);
self.scan_dirs(false, scan_job_rx).await; self.scan_dirs(false, scan_job_rx).await;
let (scan_job_tx, scan_job_rx) = channel::unbounded(); let (scan_job_tx, scan_job_rx) = channel::unbounded();
self.update_ignore_statuses(scan_job_tx).await; self.update_ignore_statuses(scan_job_tx).await;
self.scan_dirs(false, scan_job_rx).await; self.scan_dirs(false, scan_job_rx).await;
}
{ {
let mut state = self.state.lock(); let mut state = self.state.lock();
state.reload_repositories(&relative_paths, self.fs.as_ref()); if !dot_git_paths_to_reload.is_empty() {
if relative_paths.is_empty() {
state.snapshot.scan_id += 1;
}
log::debug!("reloading repositories: {dot_git_paths_to_reload:?}");
state.reload_repositories(&dot_git_paths_to_reload, self.fs.as_ref());
}
state.snapshot.completed_scan_id = state.snapshot.scan_id; state.snapshot.completed_scan_id = state.snapshot.scan_id;
for (_, entry_id) in mem::take(&mut state.removed_entry_ids) { for (_, entry_id) in mem::take(&mut state.removed_entry_ids) {
state.scanned_dirs.remove(&entry_id); state.scanned_dirs.remove(&entry_id);
@ -3516,7 +3531,7 @@ impl BackgroundScanner {
let state = self.state.lock(); let state = self.state.lock();
let snapshot = &state.snapshot; let snapshot = &state.snapshot;
root_abs_path = snapshot.abs_path().clone(); root_abs_path = snapshot.abs_path().clone();
if snapshot.is_abs_path_excluded(&job.abs_path) { if snapshot.is_path_excluded(&job.abs_path) {
log::error!("skipping excluded directory {:?}", job.path); log::error!("skipping excluded directory {:?}", job.path);
return Ok(()); return Ok(());
} }
@ -3588,7 +3603,7 @@ impl BackgroundScanner {
{ {
let mut state = self.state.lock(); let mut state = self.state.lock();
if state.snapshot.is_abs_path_excluded(&child_abs_path) { if state.snapshot.is_path_excluded(&child_abs_path) {
let relative_path = job.path.join(child_name); let relative_path = job.path.join(child_name);
log::debug!("skipping excluded child entry {relative_path:?}"); log::debug!("skipping excluded child entry {relative_path:?}");
state.remove_path(&relative_path); state.remove_path(&relative_path);
@ -4130,12 +4145,6 @@ impl BackgroundScanner {
} }
} }
fn is_git_related(abs_path: &Path) -> bool {
abs_path
.components()
.any(|c| c.as_os_str() == *DOT_GIT || c.as_os_str() == *GITIGNORE)
}
fn char_bag_for_path(root_char_bag: CharBag, path: &Path) -> CharBag { fn char_bag_for_path(root_char_bag: CharBag, path: &Path) -> CharBag {
let mut result = root_char_bag; let mut result = root_char_bag;
result.extend( result.extend(

View file

@ -990,6 +990,145 @@ async fn test_file_scan_exclusions(cx: &mut TestAppContext) {
}); });
} }
#[gpui::test]
async fn test_fs_events_in_exclusions(cx: &mut TestAppContext) {
init_test(cx);
let dir = temp_tree(json!({
".git": {
"HEAD": "ref: refs/heads/main\n",
"foo": "bar",
},
".gitignore": "**/target\n/node_modules\ntest_output\n",
"target": {
"index": "blah2"
},
"node_modules": {
".DS_Store": "",
"prettier": {
"package.json": "{}",
},
},
"src": {
".DS_Store": "",
"foo": {
"foo.rs": "mod another;\n",
"another.rs": "// another",
},
"bar": {
"bar.rs": "// bar",
},
"lib.rs": "mod foo;\nmod bar;\n",
},
".DS_Store": "",
}));
cx.update(|cx| {
cx.update_global::<SettingsStore, _, _>(|store, cx| {
store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
project_settings.file_scan_exclusions = Some(vec![
"**/.git".to_string(),
"node_modules/".to_string(),
"build_output".to_string(),
]);
});
});
});
let tree = Worktree::local(
build_client(cx),
dir.path(),
true,
Arc::new(RealFs),
Default::default(),
&mut cx.to_async(),
)
.await
.unwrap();
cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
.await;
tree.flush_fs_events(cx).await;
tree.read_with(cx, |tree, _| {
check_worktree_entries(
tree,
&[
".git/HEAD",
".git/foo",
"node_modules/.DS_Store",
"node_modules/prettier",
"node_modules/prettier/package.json",
],
&["target", "node_modules"],
&[
".DS_Store",
"src/.DS_Store",
"src/lib.rs",
"src/foo/foo.rs",
"src/foo/another.rs",
"src/bar/bar.rs",
".gitignore",
],
)
});
let new_excluded_dir = dir.path().join("build_output");
let new_ignored_dir = dir.path().join("test_output");
std::fs::create_dir_all(&new_excluded_dir)
.unwrap_or_else(|e| panic!("Failed to create a {new_excluded_dir:?} directory: {e}"));
std::fs::create_dir_all(&new_ignored_dir)
.unwrap_or_else(|e| panic!("Failed to create a {new_ignored_dir:?} directory: {e}"));
let node_modules_dir = dir.path().join("node_modules");
let dot_git_dir = dir.path().join(".git");
let src_dir = dir.path().join("src");
for existing_dir in [&node_modules_dir, &dot_git_dir, &src_dir] {
assert!(
existing_dir.is_dir(),
"Expect {existing_dir:?} to be present in the FS already"
);
}
for directory_for_new_file in [
new_excluded_dir,
new_ignored_dir,
node_modules_dir,
dot_git_dir,
src_dir,
] {
std::fs::write(directory_for_new_file.join("new_file"), "new file contents")
.unwrap_or_else(|e| {
panic!("Failed to create in {directory_for_new_file:?} a new file: {e}")
});
}
tree.flush_fs_events(cx).await;
tree.read_with(cx, |tree, _| {
check_worktree_entries(
tree,
&[
".git/HEAD",
".git/foo",
".git/new_file",
"node_modules/.DS_Store",
"node_modules/prettier",
"node_modules/prettier/package.json",
"node_modules/new_file",
"build_output",
"build_output/new_file",
"test_output/new_file",
],
&["target", "node_modules", "test_output"],
&[
".DS_Store",
"src/.DS_Store",
"src/lib.rs",
"src/foo/foo.rs",
"src/foo/another.rs",
"src/bar/bar.rs",
"src/new_file",
".gitignore",
],
)
});
}
#[gpui::test(iterations = 30)] #[gpui::test(iterations = 30)]
async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) { async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
init_test(cx); init_test(cx);

View file

@ -2667,8 +2667,9 @@ impl Project {
})?; })?;
for (adapter, server) in servers { for (adapter, server) in servers {
let workspace_config = let workspace_config = cx
cx.update(|cx| adapter.workspace_configuration(cx))?.await; .update(|cx| adapter.workspace_configuration(server.root_path(), cx))?
.await;
server server
.notify::<lsp::notification::DidChangeConfiguration>( .notify::<lsp::notification::DidChangeConfiguration>(
lsp::DidChangeConfigurationParams { lsp::DidChangeConfigurationParams {
@ -2777,7 +2778,7 @@ impl Project {
stderr_capture.clone(), stderr_capture.clone(),
language.clone(), language.clone(),
adapter.clone(), adapter.clone(),
worktree_path, Arc::clone(&worktree_path),
ProjectLspAdapterDelegate::new(self, cx), ProjectLspAdapterDelegate::new(self, cx),
cx, cx,
) { ) {
@ -2809,6 +2810,7 @@ impl Project {
cx.spawn(move |this, mut cx| async move { cx.spawn(move |this, mut cx| async move {
let result = Self::setup_and_insert_language_server( let result = Self::setup_and_insert_language_server(
this.clone(), this.clone(),
&worktree_path,
initialization_options, initialization_options,
pending_server, pending_server,
adapter.clone(), adapter.clone(),
@ -2929,6 +2931,7 @@ impl Project {
async fn setup_and_insert_language_server( async fn setup_and_insert_language_server(
this: WeakModel<Self>, this: WeakModel<Self>,
worktree_path: &Path,
initialization_options: Option<serde_json::Value>, initialization_options: Option<serde_json::Value>,
pending_server: PendingLanguageServer, pending_server: PendingLanguageServer,
adapter: Arc<CachedLspAdapter>, adapter: Arc<CachedLspAdapter>,
@ -2941,6 +2944,7 @@ impl Project {
this.clone(), this.clone(),
initialization_options, initialization_options,
pending_server, pending_server,
worktree_path,
adapter.clone(), adapter.clone(),
server_id, server_id,
cx, cx,
@ -2970,11 +2974,14 @@ impl Project {
this: WeakModel<Self>, this: WeakModel<Self>,
initialization_options: Option<serde_json::Value>, initialization_options: Option<serde_json::Value>,
pending_server: PendingLanguageServer, pending_server: PendingLanguageServer,
worktree_path: &Path,
adapter: Arc<CachedLspAdapter>, adapter: Arc<CachedLspAdapter>,
server_id: LanguageServerId, server_id: LanguageServerId,
cx: &mut AsyncAppContext, cx: &mut AsyncAppContext,
) -> Result<Arc<LanguageServer>> { ) -> Result<Arc<LanguageServer>> {
let workspace_config = cx.update(|cx| adapter.workspace_configuration(cx))?.await; let workspace_config = cx
.update(|cx| adapter.workspace_configuration(worktree_path, cx))?
.await;
let language_server = pending_server.task.await?; let language_server = pending_server.task.await?;
language_server language_server
@ -3003,11 +3010,14 @@ impl Project {
language_server language_server
.on_request::<lsp::request::WorkspaceConfiguration, _, _>({ .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
let adapter = adapter.clone(); let adapter = adapter.clone();
let worktree_path = worktree_path.to_path_buf();
move |params, cx| { move |params, cx| {
let adapter = adapter.clone(); let adapter = adapter.clone();
let worktree_path = worktree_path.clone();
async move { async move {
let workspace_config = let workspace_config = cx
cx.update(|cx| adapter.workspace_configuration(cx))?.await; .update(|cx| adapter.workspace_configuration(&worktree_path, cx))?
.await;
Ok(params Ok(params
.items .items
.into_iter() .into_iter()

View file

@ -2222,7 +2222,7 @@ impl LocalSnapshot {
paths paths
} }
fn is_abs_path_excluded(&self, abs_path: &Path) -> bool { fn is_path_excluded(&self, abs_path: &Path) -> bool {
self.file_scan_exclusions self.file_scan_exclusions
.iter() .iter()
.any(|exclude_matcher| exclude_matcher.is_match(abs_path)) .any(|exclude_matcher| exclude_matcher.is_match(abs_path))
@ -2395,26 +2395,10 @@ impl BackgroundScannerState {
self.snapshot.check_invariants(false); self.snapshot.check_invariants(false);
} }
fn reload_repositories(&mut self, changed_paths: &[Arc<Path>], fs: &dyn Fs) { fn reload_repositories(&mut self, dot_git_dirs_to_reload: &HashSet<PathBuf>, fs: &dyn Fs) {
let scan_id = self.snapshot.scan_id; let scan_id = self.snapshot.scan_id;
// Find each of the .git directories that contain any of the given paths. for dot_git_dir in dot_git_dirs_to_reload {
let mut prev_dot_git_dir = None;
for changed_path in changed_paths {
let Some(dot_git_dir) = changed_path
.ancestors()
.find(|ancestor| ancestor.file_name() == Some(&*DOT_GIT))
else {
continue;
};
// Avoid processing the same repository multiple times, if multiple paths
// within it have changed.
if prev_dot_git_dir == Some(dot_git_dir) {
continue;
}
prev_dot_git_dir = Some(dot_git_dir);
// If there is already a repository for this .git directory, reload // If there is already a repository for this .git directory, reload
// the status for all of its files. // the status for all of its files.
let repository = self let repository = self
@ -2426,7 +2410,7 @@ impl BackgroundScannerState {
}); });
match repository { match repository {
None => { None => {
self.build_git_repository(dot_git_dir.into(), fs); self.build_git_repository(Arc::from(dot_git_dir.as_path()), fs);
} }
Some((entry_id, repository)) => { Some((entry_id, repository)) => {
if repository.git_dir_scan_id == scan_id { if repository.git_dir_scan_id == scan_id {
@ -2440,7 +2424,7 @@ impl BackgroundScannerState {
continue; continue;
}; };
log::info!("reload git repository {:?}", dot_git_dir); log::info!("reload git repository {dot_git_dir:?}");
let repository = repository.repo_ptr.lock(); let repository = repository.repo_ptr.lock();
let branch = repository.branch_name(); let branch = repository.branch_name();
repository.reload_index(); repository.reload_index();
@ -2471,7 +2455,9 @@ impl BackgroundScannerState {
ids_to_preserve.insert(work_directory_id); ids_to_preserve.insert(work_directory_id);
} else { } else {
let git_dir_abs_path = snapshot.abs_path().join(&entry.git_dir_path); let git_dir_abs_path = snapshot.abs_path().join(&entry.git_dir_path);
if snapshot.is_abs_path_excluded(&git_dir_abs_path) let git_dir_excluded = snapshot.is_path_excluded(&entry.git_dir_path)
|| snapshot.is_path_excluded(&git_dir_abs_path);
if git_dir_excluded
&& !matches!(smol::block_on(fs.metadata(&git_dir_abs_path)), Ok(None)) && !matches!(smol::block_on(fs.metadata(&git_dir_abs_path)), Ok(None))
{ {
ids_to_preserve.insert(work_directory_id); ids_to_preserve.insert(work_directory_id);
@ -3303,11 +3289,26 @@ impl BackgroundScanner {
}; };
let mut relative_paths = Vec::with_capacity(abs_paths.len()); let mut relative_paths = Vec::with_capacity(abs_paths.len());
let mut dot_git_paths_to_reload = HashSet::default();
abs_paths.sort_unstable(); abs_paths.sort_unstable();
abs_paths.dedup_by(|a, b| a.starts_with(&b)); abs_paths.dedup_by(|a, b| a.starts_with(&b));
abs_paths.retain(|abs_path| { abs_paths.retain(|abs_path| {
let snapshot = &self.state.lock().snapshot; let snapshot = &self.state.lock().snapshot;
{ {
let mut is_git_related = false;
if let Some(dot_git_dir) = abs_path
.ancestors()
.find(|ancestor| ancestor.file_name() == Some(&*DOT_GIT))
{
let dot_git_path = dot_git_dir
.strip_prefix(&root_canonical_path)
.ok()
.map(|path| path.to_path_buf())
.unwrap_or_else(|| dot_git_dir.to_path_buf());
dot_git_paths_to_reload.insert(dot_git_path.to_path_buf());
is_git_related = true;
}
let relative_path: Arc<Path> = let relative_path: Arc<Path> =
if let Ok(path) = abs_path.strip_prefix(&root_canonical_path) { if let Ok(path) = abs_path.strip_prefix(&root_canonical_path) {
path.into() path.into()
@ -3318,22 +3319,30 @@ impl BackgroundScanner {
return false; return false;
}; };
if !is_git_related(&abs_path) { let parent_dir_is_loaded = relative_path.parent().map_or(true, |parent| {
let parent_dir_is_loaded = relative_path.parent().map_or(true, |parent| { snapshot
snapshot .entry_for_path(parent)
.entry_for_path(parent) .map_or(false, |entry| entry.kind == EntryKind::Dir)
.map_or(false, |entry| entry.kind == EntryKind::Dir) });
}); if !parent_dir_is_loaded {
if !parent_dir_is_loaded { log::debug!("ignoring event {relative_path:?} within unloaded directory");
log::debug!("ignoring event {relative_path:?} within unloaded directory"); return false;
return false; }
// FS events may come for files which parent directory is excluded, need to check ignore those.
let mut path_to_test = abs_path.clone();
let mut excluded_file_event = snapshot.is_path_excluded(abs_path)
|| snapshot.is_path_excluded(&relative_path);
while !excluded_file_event && path_to_test.pop() {
if snapshot.is_path_excluded(&path_to_test) {
excluded_file_event = true;
} }
if snapshot.is_abs_path_excluded(abs_path) { }
log::debug!( if excluded_file_event {
"ignoring FS event for path {relative_path:?} within excluded directory" if !is_git_related {
); log::debug!("ignoring FS event for excluded path {relative_path:?}");
return false;
} }
return false;
} }
relative_paths.push(relative_path); relative_paths.push(relative_path);
@ -3341,31 +3350,39 @@ impl BackgroundScanner {
} }
}); });
if relative_paths.is_empty() { if dot_git_paths_to_reload.is_empty() && relative_paths.is_empty() {
return; return;
} }
log::debug!("received fs events {:?}", relative_paths); if !relative_paths.is_empty() {
log::debug!("received fs events {:?}", relative_paths);
let (scan_job_tx, scan_job_rx) = channel::unbounded(); let (scan_job_tx, scan_job_rx) = channel::unbounded();
self.reload_entries_for_paths( self.reload_entries_for_paths(
root_path, root_path,
root_canonical_path, root_canonical_path,
&relative_paths, &relative_paths,
abs_paths, abs_paths,
Some(scan_job_tx.clone()), Some(scan_job_tx.clone()),
) )
.await; .await;
drop(scan_job_tx); drop(scan_job_tx);
self.scan_dirs(false, scan_job_rx).await; self.scan_dirs(false, scan_job_rx).await;
let (scan_job_tx, scan_job_rx) = channel::unbounded(); let (scan_job_tx, scan_job_rx) = channel::unbounded();
self.update_ignore_statuses(scan_job_tx).await; self.update_ignore_statuses(scan_job_tx).await;
self.scan_dirs(false, scan_job_rx).await; self.scan_dirs(false, scan_job_rx).await;
}
{ {
let mut state = self.state.lock(); let mut state = self.state.lock();
state.reload_repositories(&relative_paths, self.fs.as_ref()); if !dot_git_paths_to_reload.is_empty() {
if relative_paths.is_empty() {
state.snapshot.scan_id += 1;
}
log::debug!("reloading repositories: {dot_git_paths_to_reload:?}");
state.reload_repositories(&dot_git_paths_to_reload, self.fs.as_ref());
}
state.snapshot.completed_scan_id = state.snapshot.scan_id; state.snapshot.completed_scan_id = state.snapshot.scan_id;
for (_, entry_id) in mem::take(&mut state.removed_entry_ids) { for (_, entry_id) in mem::take(&mut state.removed_entry_ids) {
state.scanned_dirs.remove(&entry_id); state.scanned_dirs.remove(&entry_id);
@ -3505,7 +3522,7 @@ impl BackgroundScanner {
let state = self.state.lock(); let state = self.state.lock();
let snapshot = &state.snapshot; let snapshot = &state.snapshot;
root_abs_path = snapshot.abs_path().clone(); root_abs_path = snapshot.abs_path().clone();
if snapshot.is_abs_path_excluded(&job.abs_path) { if snapshot.is_path_excluded(&job.abs_path) {
log::error!("skipping excluded directory {:?}", job.path); log::error!("skipping excluded directory {:?}", job.path);
return Ok(()); return Ok(());
} }
@ -3577,7 +3594,7 @@ impl BackgroundScanner {
{ {
let mut state = self.state.lock(); let mut state = self.state.lock();
if state.snapshot.is_abs_path_excluded(&child_abs_path) { if state.snapshot.is_path_excluded(&child_abs_path) {
let relative_path = job.path.join(child_name); let relative_path = job.path.join(child_name);
log::debug!("skipping excluded child entry {relative_path:?}"); log::debug!("skipping excluded child entry {relative_path:?}");
state.remove_path(&relative_path); state.remove_path(&relative_path);
@ -4119,12 +4136,6 @@ impl BackgroundScanner {
} }
} }
fn is_git_related(abs_path: &Path) -> bool {
abs_path
.components()
.any(|c| c.as_os_str() == *DOT_GIT || c.as_os_str() == *GITIGNORE)
}
fn char_bag_for_path(root_char_bag: CharBag, path: &Path) -> CharBag { fn char_bag_for_path(root_char_bag: CharBag, path: &Path) -> CharBag {
let mut result = root_char_bag; let mut result = root_char_bag;
result.extend( result.extend(

View file

@ -992,6 +992,146 @@ async fn test_file_scan_exclusions(cx: &mut TestAppContext) {
}); });
} }
#[gpui::test]
async fn test_fs_events_in_exclusions(cx: &mut TestAppContext) {
init_test(cx);
cx.executor().allow_parking();
let dir = temp_tree(json!({
".git": {
"HEAD": "ref: refs/heads/main\n",
"foo": "bar",
},
".gitignore": "**/target\n/node_modules\ntest_output\n",
"target": {
"index": "blah2"
},
"node_modules": {
".DS_Store": "",
"prettier": {
"package.json": "{}",
},
},
"src": {
".DS_Store": "",
"foo": {
"foo.rs": "mod another;\n",
"another.rs": "// another",
},
"bar": {
"bar.rs": "// bar",
},
"lib.rs": "mod foo;\nmod bar;\n",
},
".DS_Store": "",
}));
cx.update(|cx| {
cx.update_global::<SettingsStore, _>(|store, cx| {
store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
project_settings.file_scan_exclusions = Some(vec![
"**/.git".to_string(),
"node_modules/".to_string(),
"build_output".to_string(),
]);
});
});
});
let tree = Worktree::local(
build_client(cx),
dir.path(),
true,
Arc::new(RealFs),
Default::default(),
&mut cx.to_async(),
)
.await
.unwrap();
cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
.await;
tree.flush_fs_events(cx).await;
tree.read_with(cx, |tree, _| {
check_worktree_entries(
tree,
&[
".git/HEAD",
".git/foo",
"node_modules/.DS_Store",
"node_modules/prettier",
"node_modules/prettier/package.json",
],
&["target", "node_modules"],
&[
".DS_Store",
"src/.DS_Store",
"src/lib.rs",
"src/foo/foo.rs",
"src/foo/another.rs",
"src/bar/bar.rs",
".gitignore",
],
)
});
let new_excluded_dir = dir.path().join("build_output");
let new_ignored_dir = dir.path().join("test_output");
std::fs::create_dir_all(&new_excluded_dir)
.unwrap_or_else(|e| panic!("Failed to create a {new_excluded_dir:?} directory: {e}"));
std::fs::create_dir_all(&new_ignored_dir)
.unwrap_or_else(|e| panic!("Failed to create a {new_ignored_dir:?} directory: {e}"));
let node_modules_dir = dir.path().join("node_modules");
let dot_git_dir = dir.path().join(".git");
let src_dir = dir.path().join("src");
for existing_dir in [&node_modules_dir, &dot_git_dir, &src_dir] {
assert!(
existing_dir.is_dir(),
"Expect {existing_dir:?} to be present in the FS already"
);
}
for directory_for_new_file in [
new_excluded_dir,
new_ignored_dir,
node_modules_dir,
dot_git_dir,
src_dir,
] {
std::fs::write(directory_for_new_file.join("new_file"), "new file contents")
.unwrap_or_else(|e| {
panic!("Failed to create in {directory_for_new_file:?} a new file: {e}")
});
}
tree.flush_fs_events(cx).await;
tree.read_with(cx, |tree, _| {
check_worktree_entries(
tree,
&[
".git/HEAD",
".git/foo",
".git/new_file",
"node_modules/.DS_Store",
"node_modules/prettier",
"node_modules/prettier/package.json",
"node_modules/new_file",
"build_output",
"build_output/new_file",
"test_output/new_file",
],
&["target", "node_modules", "test_output"],
&[
".DS_Store",
"src/.DS_Store",
"src/lib.rs",
"src/foo/foo.rs",
"src/foo/another.rs",
"src/bar/bar.rs",
"src/new_file",
".gitignore",
],
)
});
}
#[gpui::test(iterations = 30)] #[gpui::test(iterations = 30)]
async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) { async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
init_test(cx); init_test(cx);
@ -1056,7 +1196,7 @@ async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) { async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
init_test(cx); init_test(cx);
cx.executor().allow_parking(); cx.executor().allow_parking();
let client_fake = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx)); let client_fake = cx.update(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
let fs_fake = FakeFs::new(cx.background_executor.clone()); let fs_fake = FakeFs::new(cx.background_executor.clone());
fs_fake fs_fake
@ -1096,7 +1236,7 @@ async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
assert!(tree.entry_for_path("a/b/").unwrap().is_dir()); assert!(tree.entry_for_path("a/b/").unwrap().is_dir());
}); });
let client_real = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx)); let client_real = cx.update(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
let fs_real = Arc::new(RealFs); let fs_real = Arc::new(RealFs);
let temp_root = temp_tree(json!({ let temp_root = temp_tree(json!({
@ -2181,7 +2321,7 @@ async fn test_propagate_git_statuses(cx: &mut TestAppContext) {
fn build_client(cx: &mut TestAppContext) -> Arc<Client> { fn build_client(cx: &mut TestAppContext) -> Arc<Client> {
let http_client = FakeHttpClient::with_404_response(); let http_client = FakeHttpClient::with_404_response();
cx.read(|cx| Client::new(http_client, cx)) cx.update(|cx| Client::new(http_client, cx))
} }
#[track_caller] #[track_caller]

View file

@ -1627,9 +1627,17 @@ impl View for ProjectPanel {
} }
} }
fn update_keymap_context(&self, keymap: &mut KeymapContext, _: &AppContext) { fn update_keymap_context(&self, keymap: &mut KeymapContext, cx: &AppContext) {
Self::reset_to_default_keymap_context(keymap); Self::reset_to_default_keymap_context(keymap);
keymap.add_identifier("menu"); keymap.add_identifier("menu");
if let Some(window) = cx.active_window() {
window.read_with(cx, |cx| {
if !self.filename_editor.is_focused(cx) {
keymap.add_identifier("not_editing");
};
});
}
} }
fn focus_in(&mut self, _: gpui::AnyViewHandle, cx: &mut ViewContext<Self>) { fn focus_in(&mut self, _: gpui::AnyViewHandle, cx: &mut ViewContext<Self>) {

View file

@ -1659,13 +1659,13 @@ fn elixir_lang() -> Arc<Language> {
target: (identifier) @name) target: (identifier) @name)
operator: "when") operator: "when")
]) ])
(#match? @name "^(def|defp|defdelegate|defguard|defguardp|defmacro|defmacrop|defn|defnp)$")) @item (#any-match? @name "^(def|defp|defdelegate|defguard|defguardp|defmacro|defmacrop|defn|defnp)$")) @item
) )
(call (call
target: (identifier) @name target: (identifier) @name
(arguments (alias) @name) (arguments (alias) @name)
(#match? @name "^(defmodule|defprotocol)$")) @item (#any-match? @name "^(defmodule|defprotocol)$")) @item
"#, "#,
) )
.unwrap(), .unwrap(),

View file

@ -3,7 +3,7 @@ authors = ["Nathan Sobo <nathansobo@gmail.com>"]
description = "The fast, collaborative code editor." description = "The fast, collaborative code editor."
edition = "2021" edition = "2021"
name = "zed" name = "zed"
version = "0.114.0" version = "0.114.4"
publish = false publish = false
[lib] [lib]

View file

@ -1 +1 @@
dev stable

View file

@ -18,10 +18,10 @@
target: (identifier) @name) target: (identifier) @name)
operator: "when") operator: "when")
]) ])
(#match? @name "^(def|defp|defdelegate|defguard|defguardp|defmacro|defmacrop|defn|defnp)$")) @item (#any-match? @name "^(def|defp|defdelegate|defguard|defguardp|defmacro|defmacrop|defn|defnp)$")) @item
) )
(call (call
target: (identifier) @name target: (identifier) @name
(arguments (alias) @name) (arguments (alias) @name)
(#match? @name "^(defmodule|defprotocol)$")) @item (#any-match? @name "^(defmodule|defprotocol)$")) @item

View file

@ -105,6 +105,7 @@ impl LspAdapter for JsonLspAdapter {
fn workspace_configuration( fn workspace_configuration(
&self, &self,
_workspace_root: &Path,
cx: &mut AppContext, cx: &mut AppContext,
) -> BoxFuture<'static, serde_json::Value> { ) -> BoxFuture<'static, serde_json::Value> {
let action_names = cx.all_action_names().collect::<Vec<_>>(); let action_names = cx.all_action_names().collect::<Vec<_>>();

View file

@ -29,7 +29,6 @@ pub struct IntelephenseLspAdapter {
impl IntelephenseLspAdapter { impl IntelephenseLspAdapter {
const SERVER_PATH: &'static str = "node_modules/intelephense/lib/intelephense.js"; const SERVER_PATH: &'static str = "node_modules/intelephense/lib/intelephense.js";
#[allow(unused)]
pub fn new(node: Arc<dyn NodeRuntime>) -> Self { pub fn new(node: Arc<dyn NodeRuntime>) -> Self {
Self { node } Self { node }
} }

View file

@ -107,7 +107,11 @@ impl LspAdapter for TailwindLspAdapter {
})) }))
} }
fn workspace_configuration(&self, _: &mut AppContext) -> BoxFuture<'static, Value> { fn workspace_configuration(
&self,
_workspace_root: &Path,
_: &mut AppContext,
) -> BoxFuture<'static, Value> {
future::ready(json!({ future::ready(json!({
"tailwindCSS": { "tailwindCSS": {
"emmetCompletions": true, "emmetCompletions": true,

View file

@ -205,7 +205,6 @@ pub struct EsLintLspAdapter {
impl EsLintLspAdapter { impl EsLintLspAdapter {
const SERVER_PATH: &'static str = "vscode-eslint/server/out/eslintServer.js"; const SERVER_PATH: &'static str = "vscode-eslint/server/out/eslintServer.js";
#[allow(unused)]
pub fn new(node: Arc<dyn NodeRuntime>) -> Self { pub fn new(node: Arc<dyn NodeRuntime>) -> Self {
EsLintLspAdapter { node } EsLintLspAdapter { node }
} }
@ -213,13 +212,23 @@ impl EsLintLspAdapter {
#[async_trait] #[async_trait]
impl LspAdapter for EsLintLspAdapter { impl LspAdapter for EsLintLspAdapter {
fn workspace_configuration(&self, _: &mut AppContext) -> BoxFuture<'static, Value> { fn workspace_configuration(
&self,
workspace_root: &Path,
_: &mut AppContext,
) -> BoxFuture<'static, Value> {
future::ready(json!({ future::ready(json!({
"": { "": {
"validate": "on", "validate": "on",
"rulesCustomizations": [], "rulesCustomizations": [],
"run": "onType", "run": "onType",
"nodePath": null, "nodePath": null,
"workingDirectory": {"mode": "auto"},
"workspaceFolder": {
"uri": workspace_root,
"name": workspace_root.file_name()
.unwrap_or_else(|| workspace_root.as_os_str()),
},
} }
})) }))
.boxed() .boxed()

View file

@ -93,7 +93,11 @@ impl LspAdapter for YamlLspAdapter {
) -> Option<LanguageServerBinary> { ) -> Option<LanguageServerBinary> {
get_cached_server_binary(container_dir, &*self.node).await get_cached_server_binary(container_dir, &*self.node).await
} }
fn workspace_configuration(&self, cx: &mut AppContext) -> BoxFuture<'static, Value> { fn workspace_configuration(
&self,
_workspace_root: &Path,
cx: &mut AppContext,
) -> BoxFuture<'static, Value> {
let tab_size = all_language_settings(None, cx) let tab_size = all_language_settings(None, cx)
.language(Some("YAML")) .language(Some("YAML"))
.tab_size; .tab_size;

View file

@ -65,7 +65,8 @@ fn main() {
log::info!("========== starting zed =========="); log::info!("========== starting zed ==========");
let mut app = gpui::App::new(Assets).unwrap(); let mut app = gpui::App::new(Assets).unwrap();
let installation_id = app.background().block(installation_id()).ok(); let (installation_id, existing_installation_id_found) =
app.background().block(installation_id()).ok().unzip();
let session_id = Uuid::new_v4().to_string(); let session_id = Uuid::new_v4().to_string();
init_panic_hook(&app, installation_id.clone(), session_id.clone()); init_panic_hook(&app, installation_id.clone(), session_id.clone());
@ -166,6 +167,14 @@ fn main() {
.detach(); .detach();
client.telemetry().start(installation_id, session_id, cx); client.telemetry().start(installation_id, session_id, cx);
let telemetry_settings = *settings::get::<TelemetrySettings>(cx);
let event_operation = match existing_installation_id_found {
Some(false) => "first open",
_ => "open",
};
client
.telemetry()
.report_app_event(telemetry_settings, event_operation);
let app_state = Arc::new(AppState { let app_state = Arc::new(AppState {
languages, languages,
@ -317,11 +326,11 @@ async fn authenticate(client: Arc<Client>, cx: &AsyncAppContext) -> Result<()> {
Ok::<_, anyhow::Error>(()) Ok::<_, anyhow::Error>(())
} }
async fn installation_id() -> Result<String> { async fn installation_id() -> Result<(String, bool)> {
let legacy_key_name = "device_id"; let legacy_key_name = "device_id";
if let Ok(Some(installation_id)) = KEY_VALUE_STORE.read_kvp(legacy_key_name) { if let Ok(Some(installation_id)) = KEY_VALUE_STORE.read_kvp(legacy_key_name) {
Ok(installation_id) Ok((installation_id, true))
} else { } else {
let installation_id = Uuid::new_v4().to_string(); let installation_id = Uuid::new_v4().to_string();
@ -329,7 +338,7 @@ async fn installation_id() -> Result<String> {
.write_kvp(legacy_key_name.to_string(), installation_id.clone()) .write_kvp(legacy_key_name.to_string(), installation_id.clone())
.await?; .await?;
Ok(installation_id) Ok((installation_id, false))
} }
} }

View file

@ -105,6 +105,7 @@ impl LspAdapter for JsonLspAdapter {
fn workspace_configuration( fn workspace_configuration(
&self, &self,
_workspace_root: &Path,
cx: &mut AppContext, cx: &mut AppContext,
) -> BoxFuture<'static, serde_json::Value> { ) -> BoxFuture<'static, serde_json::Value> {
let action_names = cx.all_action_names(); let action_names = cx.all_action_names();

View file

@ -29,7 +29,6 @@ pub struct IntelephenseLspAdapter {
impl IntelephenseLspAdapter { impl IntelephenseLspAdapter {
const SERVER_PATH: &'static str = "node_modules/intelephense/lib/intelephense.js"; const SERVER_PATH: &'static str = "node_modules/intelephense/lib/intelephense.js";
#[allow(unused)]
pub fn new(node: Arc<dyn NodeRuntime>) -> Self { pub fn new(node: Arc<dyn NodeRuntime>) -> Self {
Self { node } Self { node }
} }

View file

@ -107,7 +107,11 @@ impl LspAdapter for TailwindLspAdapter {
})) }))
} }
fn workspace_configuration(&self, _: &mut AppContext) -> BoxFuture<'static, Value> { fn workspace_configuration(
&self,
_workspace_root: &Path,
_: &mut AppContext,
) -> BoxFuture<'static, Value> {
future::ready(json!({ future::ready(json!({
"tailwindCSS": { "tailwindCSS": {
"emmetCompletions": true, "emmetCompletions": true,

View file

@ -205,7 +205,6 @@ pub struct EsLintLspAdapter {
impl EsLintLspAdapter { impl EsLintLspAdapter {
const SERVER_PATH: &'static str = "vscode-eslint/server/out/eslintServer.js"; const SERVER_PATH: &'static str = "vscode-eslint/server/out/eslintServer.js";
#[allow(unused)]
pub fn new(node: Arc<dyn NodeRuntime>) -> Self { pub fn new(node: Arc<dyn NodeRuntime>) -> Self {
EsLintLspAdapter { node } EsLintLspAdapter { node }
} }
@ -213,13 +212,23 @@ impl EsLintLspAdapter {
#[async_trait] #[async_trait]
impl LspAdapter for EsLintLspAdapter { impl LspAdapter for EsLintLspAdapter {
fn workspace_configuration(&self, _: &mut AppContext) -> BoxFuture<'static, Value> { fn workspace_configuration(
&self,
workspace_root: &Path,
_: &mut AppContext,
) -> BoxFuture<'static, Value> {
future::ready(json!({ future::ready(json!({
"": { "": {
"validate": "on", "validate": "on",
"rulesCustomizations": [], "rulesCustomizations": [],
"run": "onType", "run": "onType",
"nodePath": null, "nodePath": null,
"workingDirectory": {"mode": "auto"},
"workspaceFolder": {
"uri": workspace_root,
"name": workspace_root.file_name()
.unwrap_or_else(|| workspace_root.as_os_str()),
},
} }
})) }))
.boxed() .boxed()

View file

@ -93,7 +93,11 @@ impl LspAdapter for YamlLspAdapter {
) -> Option<LanguageServerBinary> { ) -> Option<LanguageServerBinary> {
get_cached_server_binary(container_dir, &*self.node).await get_cached_server_binary(container_dir, &*self.node).await
} }
fn workspace_configuration(&self, cx: &mut AppContext) -> BoxFuture<'static, Value> { fn workspace_configuration(
&self,
_workspace_root: &Path,
cx: &mut AppContext,
) -> BoxFuture<'static, Value> {
let tab_size = all_language_settings(None, cx) let tab_size = all_language_settings(None, cx)
.language(Some("YAML")) .language(Some("YAML"))
.tab_size; .tab_size;

View file

@ -71,7 +71,11 @@ fn main() {
log::info!("========== starting zed =========="); log::info!("========== starting zed ==========");
let app = App::production(Arc::new(Assets)); let app = App::production(Arc::new(Assets));
let installation_id = app.background_executor().block(installation_id()).ok(); let (installation_id, existing_installation_id_found) = app
.background_executor()
.block(installation_id())
.ok()
.unzip();
let session_id = Uuid::new_v4().to_string(); let session_id = Uuid::new_v4().to_string();
init_panic_hook(&app, installation_id.clone(), session_id.clone()); init_panic_hook(&app, installation_id.clone(), session_id.clone());
@ -173,6 +177,14 @@ fn main() {
// .detach(); // .detach();
client.telemetry().start(installation_id, session_id, cx); client.telemetry().start(installation_id, session_id, cx);
let telemetry_settings = *client::TelemetrySettings::get_global(cx);
let event_operation = match existing_installation_id_found {
Some(false) => "first open",
_ => "open",
};
client
.telemetry()
.report_app_event(telemetry_settings, event_operation);
let app_state = Arc::new(AppState { let app_state = Arc::new(AppState {
languages, languages,
@ -333,11 +345,11 @@ fn main() {
// Ok::<_, anyhow::Error>(()) // Ok::<_, anyhow::Error>(())
// } // }
async fn installation_id() -> Result<String> { async fn installation_id() -> Result<(String, bool)> {
let legacy_key_name = "device_id"; let legacy_key_name = "device_id";
if let Ok(Some(installation_id)) = KEY_VALUE_STORE.read_kvp(legacy_key_name) { if let Ok(Some(installation_id)) = KEY_VALUE_STORE.read_kvp(legacy_key_name) {
Ok(installation_id) Ok((installation_id, true))
} else { } else {
let installation_id = Uuid::new_v4().to_string(); let installation_id = Uuid::new_v4().to_string();
@ -345,7 +357,7 @@ async fn installation_id() -> Result<String> {
.write_kvp(legacy_key_name.to_string(), installation_id.clone()) .write_kvp(legacy_key_name.to_string(), installation_id.clone())
.await?; .await?;
Ok(installation_id) Ok((installation_id, false))
} }
} }