diff --git a/crates/buffer_diff/src/buffer_diff.rs b/crates/buffer_diff/src/buffer_diff.rs index ee09fda46e..ce9bda62b4 100644 --- a/crates/buffer_diff/src/buffer_diff.rs +++ b/crates/buffer_diff/src/buffer_diff.rs @@ -4,6 +4,7 @@ use gpui::{App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Task, use language::{Language, LanguageRegistry}; use rope::Rope; use std::{ + cell::Ref, cmp::Ordering, future::Future, iter, @@ -1109,9 +1110,11 @@ impl BufferDiff { let unstaged_counterpart = self .secondary_diff .as_ref() - .map(|diff| &diff.read(cx).inner); - self.inner - .hunks_intersecting_range(range, buffer_snapshot, unstaged_counterpart) + .map(|diff| Ref::map(diff.read(cx), |d| &d.inner)); + // self.inner + // .hunks_intersecting_range(range, buffer_snapshot, unstaged_counterpart) + // todo! Figure out what to do here + None.into_iter() } pub fn hunks_intersecting_range_rev<'a>( diff --git a/crates/channel/src/channel_store.rs b/crates/channel/src/channel_store.rs index b7ba811421..42da573db8 100644 --- a/crates/channel/src/channel_store.rs +++ b/crates/channel/src/channel_store.rs @@ -976,7 +976,8 @@ impl ChannelStore { if let OpenEntityHandle::Open(buffer) = buffer { if let Some(buffer) = buffer.upgrade() { let channel_buffer = buffer.read(cx); - let buffer = channel_buffer.buffer().read(cx); + let buffer = channel_buffer.buffer(); + let buffer = buffer.read(cx); buffer_versions.push(proto::ChannelBufferVersion { channel_id: channel_buffer.channel_id.0, epoch: channel_buffer.epoch(), diff --git a/crates/eval/src/example.rs b/crates/eval/src/example.rs index 904eca83e6..a7bbe959aa 100644 --- a/crates/eval/src/example.rs +++ b/crates/eval/src/example.rs @@ -17,7 +17,7 @@ use async_trait::async_trait; use buffer_diff::DiffHunkStatus; use collections::HashMap; use futures::{FutureExt as _, StreamExt, channel::mpsc, select_biased}; -use gpui::{App, AppContext, AsyncApp, Entity}; +use gpui::{App, AppContext, AsyncApp, Entity, EntityId}; use language_model::{LanguageModel, Role, StopReason}; use zed_llm_client::CompletionIntent; @@ -402,16 +402,16 @@ impl AppContext for ExampleContext { self.app.new(build_entity) } - fn reserve_entity(&mut self) -> Self::Result> { + fn reserve_entity(&mut self) -> Self::Result { self.app.reserve_entity() } fn insert_entity( &mut self, - reservation: gpui::Reservation, + entity_id: EntityId, build_entity: impl FnOnce(&mut gpui::Context) -> T, ) -> Self::Result> { - self.app.insert_entity(reservation, build_entity) + self.app.insert_entity(entity_id, build_entity) } fn update_entity( diff --git a/crates/gpui/src/app.rs b/crates/gpui/src/app.rs index ef462ae084..7582831fa5 100644 --- a/crates/gpui/src/app.rs +++ b/crates/gpui/src/app.rs @@ -38,9 +38,9 @@ use crate::{ EventEmitter, FocusHandle, FocusMap, ForegroundExecutor, Global, KeyBinding, KeyContext, Keymap, Keystroke, LayoutId, Menu, MenuItem, OwnedMenu, PathPromptOptions, Pixels, Platform, PlatformDisplay, PlatformKeyboardLayout, Point, PromptBuilder, PromptButton, PromptHandle, - PromptLevel, Render, RenderImage, RenderablePromptHandle, Reservation, ScreenCaptureSource, - SubscriberSet, Subscription, SvgRenderer, Task, TextSystem, Window, WindowAppearance, - WindowHandle, WindowId, WindowInvalidator, + PromptLevel, Render, RenderImage, RenderablePromptHandle, ScreenCaptureSource, SubscriberSet, + Subscription, SvgRenderer, Task, TextSystem, Window, WindowAppearance, WindowHandle, WindowId, + WindowInvalidator, colors::{Colors, GlobalColors}, current_platform, hash, init_app_menus, }; @@ -930,11 +930,11 @@ impl App { break; } - for (entity_id, mut entity) in dropped { + for (entity_id, entity) in dropped { self.observers.remove(&entity_id); self.event_listeners.remove(&entity_id); for release_callback in self.release_listeners.remove(&entity_id) { - release_callback(entity.as_mut(), self); + release_callback(entity.borrow_mut().deref_mut(), self); } } } @@ -1746,9 +1746,9 @@ impl AppContext for App { /// [`Entity`] handle will be returned, which can be used to access the entity in a context. fn new(&mut self, build_entity: impl FnOnce(&mut Context) -> T) -> Entity { self.update(|cx| { - let slot = cx.entities.reserve(); - let handle = slot.clone(); - let entity = build_entity(&mut Context::new_context(cx, slot.downgrade())); + let entity_id = cx.entities.reserve(); + let entity = build_entity(&mut Context::new_context(cx, entity_id, None)); + let handle = cx.entities.insert(entity_id, entity); cx.push_effect(Effect::EntityCreated { entity: handle.clone().into_any(), @@ -1756,24 +1756,22 @@ impl AppContext for App { window: cx.window_update_stack.last().cloned(), }); - cx.entities.insert(slot, entity); handle }) } - fn reserve_entity(&mut self) -> Self::Result> { - Reservation(self.entities.reserve()) + fn reserve_entity(&mut self) -> Self::Result { + self.entities.reserve() } fn insert_entity( &mut self, - reservation: Reservation, + entity_id: EntityId, build_entity: impl FnOnce(&mut Context) -> T, ) -> Self::Result> { self.update(|cx| { - let slot = reservation.0; - let entity = build_entity(&mut Context::new_context(cx, slot.downgrade())); - cx.entities.insert(slot, entity) + let entity = build_entity(&mut Context::new_context(cx, entity_id, None)); + cx.entities.insert(entity_id, entity) }) } @@ -1785,13 +1783,11 @@ impl AppContext for App { update: impl FnOnce(&mut T, &mut Context) -> R, ) -> R { self.update(|cx| { - let mut entity = cx.entities.lease(handle); - let result = update( - &mut entity, - &mut Context::new_context(cx, handle.downgrade()), - ); - cx.entities.end_lease(entity); - result + let mut entity = cx.entities.get(handle.entity_id()); + update( + entity.borrow_mut().downcast_mut().unwrap(), + &mut Context::new_context(cx, handle.entity_id(), Some(handle.downgrade())), + ) }) } diff --git a/crates/gpui/src/app/async_context.rs b/crates/gpui/src/app/async_context.rs index c3b60dd580..2820ab5c05 100644 --- a/crates/gpui/src/app/async_context.rs +++ b/crates/gpui/src/app/async_context.rs @@ -1,7 +1,7 @@ use crate::{ AnyView, AnyWindowHandle, App, AppCell, AppContext, BackgroundExecutor, BorrowAppContext, - Entity, EventEmitter, Focusable, ForegroundExecutor, Global, PromptButton, PromptLevel, Render, - Reservation, Result, Subscription, Task, VisualContext, Window, WindowHandle, + Entity, EntityId, EventEmitter, Focusable, ForegroundExecutor, Global, PromptButton, + PromptLevel, Render, Result, Subscription, Task, VisualContext, Window, WindowHandle, }; use anyhow::Context as _; use derive_more::{Deref, DerefMut}; @@ -32,7 +32,7 @@ impl AppContext for AsyncApp { Ok(app.new(build_entity)) } - fn reserve_entity(&mut self) -> Result> { + fn reserve_entity(&mut self) -> Result { let app = self.app.upgrade().context("app was released")?; let mut app = app.borrow_mut(); Ok(app.reserve_entity()) @@ -40,12 +40,12 @@ impl AppContext for AsyncApp { fn insert_entity( &mut self, - reservation: Reservation, + entity_id: EntityId, build_entity: impl FnOnce(&mut Context) -> T, ) -> Result> { let app = self.app.upgrade().context("app was released")?; let mut app = app.borrow_mut(); - Ok(app.insert_entity(reservation, build_entity)) + Ok(app.insert_entity(entity_id, build_entity)) } fn update_entity( @@ -342,17 +342,17 @@ impl AppContext for AsyncWindowContext { self.window.update(self, |_, _, cx| cx.new(build_entity)) } - fn reserve_entity(&mut self) -> Result> { + fn reserve_entity(&mut self) -> Result { self.window.update(self, |_, _, cx| cx.reserve_entity()) } fn insert_entity( &mut self, - reservation: Reservation, + entity_id: EntityId, build_entity: impl FnOnce(&mut Context) -> T, ) -> Self::Result> { self.window - .update(self, |_, _, cx| cx.insert_entity(reservation, build_entity)) + .update(self, |_, _, cx| cx.insert_entity(entity_id, build_entity)) } fn update_entity( diff --git a/crates/gpui/src/app/context.rs b/crates/gpui/src/app/context.rs index 2d90ff35b1..152283dcf1 100644 --- a/crates/gpui/src/app/context.rs +++ b/crates/gpui/src/app/context.rs @@ -1,7 +1,7 @@ use crate::{ AnyView, AnyWindowHandle, AppContext, AsyncApp, DispatchPhase, Effect, EntityId, EventEmitter, - FocusHandle, FocusOutEvent, Focusable, Global, KeystrokeObserver, Reservation, SubscriberSet, - Subscription, Task, WeakEntity, WeakFocusHandle, Window, WindowHandle, + FocusHandle, FocusOutEvent, Focusable, Global, KeystrokeObserver, SubscriberSet, Subscription, + Task, WeakEntity, WeakFocusHandle, Window, WindowHandle, }; use anyhow::Result; use derive_more::{Deref, DerefMut}; @@ -22,17 +22,26 @@ pub struct Context<'a, T> { #[deref] #[deref_mut] app: &'a mut App, - entity_state: WeakEntity, + entity_id: EntityId, + entity_state: Option>, } impl<'a, T: 'static> Context<'a, T> { - pub(crate) fn new_context(app: &'a mut App, entity_state: WeakEntity) -> Self { - Self { app, entity_state } + pub(crate) fn new_context( + app: &'a mut App, + entity_id: EntityId, + entity_state: Option>, + ) -> Self { + Self { + app, + entity_id, + entity_state, + } } /// The entity id of the entity backing this context. pub fn entity_id(&self) -> EntityId { - self.entity_state.entity_id + self.entity_id } /// Returns a handle to the entity belonging to this context. @@ -44,7 +53,7 @@ impl<'a, T: 'static> Context<'a, T> { /// Returns a weak handle to the entity belonging to this context. pub fn weak_entity(&self) -> WeakEntity { - self.entity_state.clone() + self.entity_state.as_ref().unwrap().clone() } /// Arranges for the given function to be called whenever [`Context::notify`] is @@ -112,7 +121,7 @@ impl<'a, T: 'static> Context<'a, T> { T: 'static, { let (subscription, activate) = self.app.release_listeners.insert( - self.entity_state.entity_id, + self.entity_id, Box::new(move |this, cx| { let this = this.downcast_mut().expect("invalid entity type"); on_release(this, cx); @@ -193,7 +202,7 @@ impl<'a, T: 'static> Context<'a, T> { /// Tell GPUI that this entity has changed and observers of it should be notified. pub fn notify(&mut self) { - self.app.notify(self.entity_state.entity_id); + self.app.notify(self.entity_id); } /// Spawn the future returned by the given function. @@ -692,7 +701,7 @@ impl Context<'_, T> { Evt: 'static, { self.app.pending_effects.push_back(Effect::Emit { - emitter: self.entity_state.entity_id, + emitter: self.entity_id, event_type: TypeId::of::(), event: Box::new(event), }); @@ -706,16 +715,16 @@ impl AppContext for Context<'_, T> { self.app.new(build_entity) } - fn reserve_entity(&mut self) -> Reservation { + fn reserve_entity(&mut self) -> EntityId { self.app.reserve_entity() } fn insert_entity( &mut self, - reservation: Reservation, + entity_id: EntityId, build_entity: impl FnOnce(&mut Context) -> U, ) -> Self::Result> { - self.app.insert_entity(reservation, build_entity) + self.app.insert_entity(entity_id, build_entity) } fn update_entity( diff --git a/crates/gpui/src/app/entity_map.rs b/crates/gpui/src/app/entity_map.rs index f1aafa55e8..1a2bca869c 100644 --- a/crates/gpui/src/app/entity_map.rs +++ b/crates/gpui/src/app/entity_map.rs @@ -6,18 +6,18 @@ use parking_lot::{RwLock, RwLockUpgradableReadGuard}; use slotmap::{KeyData, SecondaryMap, SlotMap}; use std::{ any::{Any, TypeId, type_name}, - cell::RefCell, + cell::{Ref, RefCell}, cmp::Ordering, fmt::{self, Display}, hash::{Hash, Hasher}, marker::PhantomData, mem, num::NonZeroU64, + rc::{self, Rc}, sync::{ Arc, Weak, atomic::{AtomicU64, AtomicUsize, Ordering::SeqCst}, }, - thread::panicking, }; use super::Context; @@ -55,7 +55,7 @@ impl Display for EntityId { } pub(crate) struct EntityMap { - entities: SecondaryMap>, + entities: SecondaryMap>>, pub accessed_entities: RefCell>, ref_counts: Arc>, } @@ -85,47 +85,28 @@ impl EntityMap { } /// Reserve a slot for an entity, which you can subsequently use with `insert`. - pub fn reserve(&self) -> Slot { - let id = self.ref_counts.write().counts.insert(1.into()); - Slot(Entity::new(id, Arc::downgrade(&self.ref_counts))) + pub fn reserve(&self) -> EntityId { + self.ref_counts.write().counts.insert(AtomicUsize::new(1)) } /// Insert an entity into a slot obtained by calling `reserve`. - pub fn insert(&mut self, slot: Slot, entity: T) -> Entity + pub fn insert(&mut self, entity_id: EntityId, entity: T) -> Entity where T: 'static, { let mut accessed_entities = self.accessed_entities.borrow_mut(); - accessed_entities.insert(slot.entity_id); + accessed_entities.insert(entity_id); - let handle = slot.0; - self.entities.insert(handle.entity_id, Box::new(entity)); - handle + let entity_data = Rc::new(RefCell::new(entity)); + self.entities.insert(entity_id, entity_data.clone()); + + Entity::new(entity_id, entity_data, Arc::downgrade(&self.ref_counts)) } - /// Move an entity to the stack. - #[track_caller] - pub fn lease<'a, T>(&mut self, pointer: &'a Entity) -> Lease<'a, T> { - self.assert_valid_context(pointer); + pub fn get(&self, entity_id: EntityId) -> Rc> { let mut accessed_entities = self.accessed_entities.borrow_mut(); - accessed_entities.insert(pointer.entity_id); - - let entity = Some( - self.entities - .remove(pointer.entity_id) - .unwrap_or_else(|| double_lease_panic::("update")), - ); - Lease { - entity, - pointer, - entity_type: PhantomData, - } - } - - /// Returns an entity after moving it to the stack. - pub fn end_lease(&mut self, mut lease: Lease) { - self.entities - .insert(lease.pointer.entity_id, lease.entity.take().unwrap()); + accessed_entities.insert(entity_id); + self.entities.get(entity_id).unwrap().clone() } pub fn read(&self, entity: &Entity) -> &T { @@ -133,15 +114,16 @@ impl EntityMap { let mut accessed_entities = self.accessed_entities.borrow_mut(); accessed_entities.insert(entity.entity_id); - self.entities - .get(entity.entity_id) - .and_then(|entity| entity.downcast_ref()) - .unwrap_or_else(|| double_lease_panic::("read")) + // self.entities + // .get(entity.entity_id) + // .and_then(|entity| entity.borrow().downcast_ref()) + // .unwrap_or_else(|| double_lease_panic::("read")) + todo!("interface will need to change here") } fn assert_valid_context(&self, entity: &AnyEntity) { debug_assert!( - Weak::ptr_eq(&entity.entity_map, &Arc::downgrade(&self.ref_counts)), + Weak::ptr_eq(&entity.ref_counts, &Arc::downgrade(&self.ref_counts)), "used a entity with the wrong context" ); } @@ -156,7 +138,7 @@ impl EntityMap { self.accessed_entities.borrow_mut().clear(); } - pub fn take_dropped(&mut self) -> Vec<(EntityId, Box)> { + pub fn take_dropped(&mut self) -> Vec<(EntityId, Rc>)> { let mut ref_counts = self.ref_counts.write(); let dropped_entity_ids = mem::take(&mut ref_counts.dropped_entity_ids); let mut accessed_entities = self.accessed_entities.borrow_mut(); @@ -179,62 +161,30 @@ impl EntityMap { } } -#[track_caller] -fn double_lease_panic(operation: &str) -> ! { - panic!( - "cannot {operation} {} while it is already being updated", - std::any::type_name::() - ) -} - -pub(crate) struct Lease<'a, T> { - entity: Option>, - pub pointer: &'a Entity, - entity_type: PhantomData, -} - -impl core::ops::Deref for Lease<'_, T> { - type Target = T; - - fn deref(&self) -> &Self::Target { - self.entity.as_ref().unwrap().downcast_ref().unwrap() - } -} - -impl core::ops::DerefMut for Lease<'_, T> { - fn deref_mut(&mut self) -> &mut Self::Target { - self.entity.as_mut().unwrap().downcast_mut().unwrap() - } -} - -impl Drop for Lease<'_, T> { - fn drop(&mut self) { - if self.entity.is_some() && !panicking() { - panic!("Leases must be ended with EntityMap::end_lease") - } - } -} - -#[derive(Deref, DerefMut)] -pub(crate) struct Slot(Entity); - /// A dynamically typed reference to a entity, which can be downcast into a `Entity`. pub struct AnyEntity { pub(crate) entity_id: EntityId, pub(crate) entity_type: TypeId, - entity_map: Weak>, + entity_data: Rc>, + ref_counts: Weak>, #[cfg(any(test, feature = "leak-detection"))] handle_id: HandleId, } impl AnyEntity { - fn new(id: EntityId, entity_type: TypeId, entity_map: Weak>) -> Self { + fn new( + id: EntityId, + entity_type: TypeId, + entity_data: Rc>, + ref_counts: Weak>, + ) -> Self { Self { entity_id: id, entity_type, - entity_map: entity_map.clone(), + entity_data, + ref_counts: ref_counts.clone(), #[cfg(any(test, feature = "leak-detection"))] - handle_id: entity_map + handle_id: ref_counts .upgrade() .unwrap() .write() @@ -258,7 +208,8 @@ impl AnyEntity { AnyWeakEntity { entity_id: self.entity_id, entity_type: self.entity_type, - entity_ref_counts: self.entity_map.clone(), + entity_data: Rc::downgrade(&self.entity_data), + entity_ref_counts: self.ref_counts.clone(), } } @@ -278,7 +229,7 @@ impl AnyEntity { impl Clone for AnyEntity { fn clone(&self) -> Self { - if let Some(entity_map) = self.entity_map.upgrade() { + if let Some(entity_map) = self.ref_counts.upgrade() { let entity_map = entity_map.read(); let count = entity_map .counts @@ -291,10 +242,11 @@ impl Clone for AnyEntity { Self { entity_id: self.entity_id, entity_type: self.entity_type, - entity_map: self.entity_map.clone(), + entity_data: self.entity_data.clone(), + ref_counts: self.ref_counts.clone(), #[cfg(any(test, feature = "leak-detection"))] handle_id: self - .entity_map + .ref_counts .upgrade() .unwrap() .write() @@ -306,7 +258,7 @@ impl Clone for AnyEntity { impl Drop for AnyEntity { fn drop(&mut self) { - if let Some(entity_map) = self.entity_map.upgrade() { + if let Some(entity_map) = self.ref_counts.upgrade() { let entity_map = entity_map.upgradable_read(); let count = entity_map .counts @@ -322,7 +274,7 @@ impl Drop for AnyEntity { } #[cfg(any(test, feature = "leak-detection"))] - if let Some(entity_map) = self.entity_map.upgrade() { + if let Some(entity_map) = self.ref_counts.upgrade() { entity_map .write() .leak_detector @@ -386,12 +338,16 @@ unsafe impl Sync for Entity {} impl Sealed for Entity {} impl Entity { - fn new(id: EntityId, entity_map: Weak>) -> Self + fn new( + id: EntityId, + entity_data: Rc>, + ref_counts: Weak>, + ) -> Self where T: 'static, { Self { - any_entity: AnyEntity::new(id, TypeId::of::(), entity_map), + any_entity: AnyEntity::new(id, TypeId::of::(), entity_data, ref_counts), entity_type: PhantomData, } } @@ -415,8 +371,11 @@ impl Entity { } /// Grab a reference to this entity from the context. - pub fn read<'a>(&self, cx: &'a App) -> &'a T { - cx.entities.read(self) + /// todo! remove the cx param + pub fn read(&self, _cx: &App) -> Ref { + Ref::map(self.any_entity.entity_data.borrow(), |data| { + data.downcast_ref().unwrap() + }) } /// Read the entity referenced by this handle with the given function. @@ -504,6 +463,7 @@ impl PartialOrd for Entity { pub struct AnyWeakEntity { pub(crate) entity_id: EntityId, entity_type: TypeId, + entity_data: rc::Weak>, entity_ref_counts: Weak>, } @@ -538,7 +498,8 @@ impl AnyWeakEntity { Some(AnyEntity { entity_id: self.entity_id, entity_type: self.entity_type, - entity_map: self.entity_ref_counts.clone(), + entity_data: self.entity_data.upgrade()?, + ref_counts: self.entity_ref_counts.clone(), #[cfg(any(test, feature = "leak-detection"))] handle_id: self .entity_ref_counts @@ -592,6 +553,7 @@ impl AnyWeakEntity { // read in the first place, so we're good! entity_id: entity_id.into(), entity_type: TypeId::of::<()>(), + entity_data: Rc::downgrade(&(Rc::new(RefCell::new(())) as Rc>)), entity_ref_counts: Weak::new(), } } @@ -836,10 +798,10 @@ mod test { // Tests that slots are not re-used before take_dropped. let mut entity_map = EntityMap::new(); - let slot = entity_map.reserve::(); + let slot = entity_map.reserve(); entity_map.insert(slot, TestEntity { i: 1 }); - let slot = entity_map.reserve::(); + let slot = entity_map.reserve(); entity_map.insert(slot, TestEntity { i: 2 }); let dropped = entity_map.take_dropped(); @@ -848,7 +810,7 @@ mod test { assert_eq!( dropped .into_iter() - .map(|(_, entity)| entity.downcast::().unwrap().i) + .map(|(_, entity)| entity.borrow().downcast_ref::().unwrap().i) .collect::>(), vec![1, 2], ); @@ -859,7 +821,7 @@ mod test { // Tests that weak handles are not upgraded before take_dropped let mut entity_map = EntityMap::new(); - let slot = entity_map.reserve::(); + let slot = entity_map.reserve(); let handle = entity_map.insert(slot, TestEntity { i: 1 }); let weak = handle.downgrade(); drop(handle); @@ -873,7 +835,7 @@ mod test { assert_eq!( dropped .into_iter() - .map(|(_, entity)| entity.downcast::().unwrap().i) + .map(|(_, entity)| entity.borrow().downcast_ref::().unwrap().i) .collect::>(), vec![1], ); diff --git a/crates/gpui/src/app/test_context.rs b/crates/gpui/src/app/test_context.rs index dfc7af0d9c..a803cc2127 100644 --- a/crates/gpui/src/app/test_context.rs +++ b/crates/gpui/src/app/test_context.rs @@ -1,9 +1,9 @@ use crate::{ Action, AnyView, AnyWindowHandle, App, AppCell, AppContext, AsyncApp, AvailableSpace, BackgroundExecutor, BorrowAppContext, Bounds, Capslock, ClipboardItem, DrawPhase, Drawable, - Element, Empty, EventEmitter, ForegroundExecutor, Global, InputEvent, Keystroke, Modifiers, - ModifiersChangedEvent, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent, Pixels, - Platform, Point, Render, Result, Size, Task, TestDispatcher, TestPlatform, + Element, Empty, EntityId, EventEmitter, ForegroundExecutor, Global, InputEvent, Keystroke, + Modifiers, ModifiersChangedEvent, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent, + Pixels, Platform, Point, Render, Result, Size, Task, TestDispatcher, TestPlatform, TestScreenCaptureSource, TestWindow, TextSystem, VisualContext, Window, WindowBounds, WindowHandle, WindowOptions, }; @@ -40,14 +40,14 @@ impl AppContext for TestAppContext { app.new(build_entity) } - fn reserve_entity(&mut self) -> Self::Result> { + fn reserve_entity(&mut self) -> Self::Result { let mut app = self.app.borrow_mut(); app.reserve_entity() } fn insert_entity( &mut self, - reservation: crate::Reservation, + reservation: EntityId, build_entity: impl FnOnce(&mut Context) -> T, ) -> Self::Result> { let mut app = self.app.borrow_mut(); @@ -624,7 +624,8 @@ impl Entity { handle .upgrade() .expect("view dropped with pending condition") - .read(cx), + .read(cx) + .deref(), cx, ) { break; @@ -891,13 +892,13 @@ impl AppContext for VisualTestContext { self.cx.new(build_entity) } - fn reserve_entity(&mut self) -> Self::Result> { + fn reserve_entity(&mut self) -> Self::Result { self.cx.reserve_entity() } fn insert_entity( &mut self, - reservation: crate::Reservation, + reservation: EntityId, build_entity: impl FnOnce(&mut Context) -> T, ) -> Self::Result> { self.cx.insert_entity(reservation, build_entity) diff --git a/crates/gpui/src/gpui.rs b/crates/gpui/src/gpui.rs index 91461a4d2c..a6ec7f1cdc 100644 --- a/crates/gpui/src/gpui.rs +++ b/crates/gpui/src/gpui.rs @@ -175,16 +175,15 @@ pub trait AppContext { build_entity: impl FnOnce(&mut Context) -> T, ) -> Self::Result>; - /// Reserve a slot for a entity to be inserted later. - /// The returned [Reservation] allows you to obtain the [EntityId] for the future entity. - fn reserve_entity(&mut self) -> Self::Result>; + /// Reserve an EntityId for a entity to be inserted later. + fn reserve_entity(&mut self) -> Self::Result; - /// Insert a new entity in the app context based on a [Reservation] previously obtained from [`reserve_entity`]. + /// Insert a new entity in the app context based on a [EntityId] previously obtained from [`reserve_entity`]. /// /// [`reserve_entity`]: Self::reserve_entity fn insert_entity( &mut self, - reservation: Reservation, + reservation: EntityId, build_entity: impl FnOnce(&mut Context) -> T, ) -> Self::Result>; @@ -231,17 +230,6 @@ pub trait AppContext { G: Global; } -/// Returned by [Context::reserve_entity] to later be passed to [Context::insert_entity]. -/// Allows you to obtain the [EntityId] for a entity before it is created. -pub struct Reservation(pub(crate) Slot); - -impl Reservation { - /// Returns the [EntityId] that will be associated with the entity once it is inserted. - pub fn entity_id(&self) -> EntityId { - self.0.entity_id() - } -} - /// This trait is used for the different visual contexts in GPUI that /// require a window to be present. pub trait VisualContext: AppContext { diff --git a/crates/gpui/src/window.rs b/crates/gpui/src/window.rs index 8c01b8afcf..85d81fb57b 100644 --- a/crates/gpui/src/window.rs +++ b/crates/gpui/src/window.rs @@ -31,6 +31,8 @@ use raw_window_handle::{HandleError, HasDisplayHandle, HasWindowHandle}; use refineable::Refineable; use slotmap::SlotMap; use smallvec::SmallVec; +use std::cell::Ref; +use std::ops::Deref; use std::{ any::{Any, TypeId}, borrow::Cow, @@ -4139,7 +4141,8 @@ impl Window { if let Some(inspector_id) = _inspector_id { if let Some(inspector) = &self.inspector { let inspector = inspector.clone(); - let active_element_id = inspector.read(cx).active_element_id(); + let inspector_ref = inspector.read(cx); + let active_element_id = inspector_ref.active_element_id(); if Some(inspector_id) == active_element_id { return inspector.update(cx, |inspector, _cx| { inspector.with_active_element_state(self, f) @@ -4213,9 +4216,9 @@ impl Window { #[cfg(any(feature = "inspector", debug_assertions))] fn paint_inspector_hitbox(&mut self, cx: &App) { - if let Some(inspector) = self.inspector.as_ref() { - let inspector = inspector.read(cx); - if let Some((hitbox_id, _)) = self.hovered_inspector_hitbox(inspector, &self.next_frame) + if let Some(inspector) = self.inspector.clone() { + if let Some((hitbox_id, _)) = + self.hovered_inspector_hitbox(inspector.read(cx).deref(), &self.next_frame) { if let Some(hitbox) = self .next_frame @@ -4379,7 +4382,7 @@ impl WindowHandle { /// Read the root view out of this window. /// /// This will fail if the window is closed or if the root view's type does not match `V`. - pub fn read<'a>(&self, cx: &'a App) -> Result<&'a V> { + pub fn read(&self, cx: &App) -> Result> { let x = cx .windows .get(self.id) @@ -4392,7 +4395,7 @@ impl WindowHandle { .context("window not found")? .map_err(|_| anyhow!("the type of the window's root view has changed"))?; - Ok(x.read(cx)) + todo!() } /// Read the root view out of this window, with a callback @@ -4402,7 +4405,9 @@ impl WindowHandle { where C: AppContext, { - cx.read_window(self, |root_view, cx| read_with(root_view.read(cx), cx)) + cx.read_window(self, |root_view, cx| { + read_with(root_view.read(cx).deref(), cx) + }) } /// Read the root view pointer off of this window. diff --git a/crates/gpui_macros/src/derive_app_context.rs b/crates/gpui_macros/src/derive_app_context.rs index bca015b8dc..6ec1c9a167 100644 --- a/crates/gpui_macros/src/derive_app_context.rs +++ b/crates/gpui_macros/src/derive_app_context.rs @@ -30,16 +30,16 @@ pub fn derive_app_context(input: TokenStream) -> TokenStream { self.#app_variable.new(build_entity) } - fn reserve_entity(&mut self) -> Self::Result> { + fn reserve_entity(&mut self) -> Self::Result { self.#app_variable.reserve_entity() } fn insert_entity( &mut self, - reservation: gpui::Reservation, + entity_id: EntityId, build_entity: impl FnOnce(&mut gpui::Context<'_, T>) -> T, ) -> Self::Result> { - self.#app_variable.insert_entity(reservation, build_entity) + self.#app_variable.insert_entity(entity_id, build_entity) } fn update_entity( diff --git a/crates/gpui_macros/tests/derive_context.rs b/crates/gpui_macros/tests/derive_context.rs index 6c122eff25..1ebe577bb4 100644 --- a/crates/gpui_macros/tests/derive_context.rs +++ b/crates/gpui_macros/tests/derive_context.rs @@ -1,6 +1,6 @@ #[test] fn test_derive_context() { - use gpui::{App, Window}; + use gpui::{App, EntityId, Window}; use gpui_macros::{AppContext, VisualContext}; #[derive(AppContext, VisualContext)] diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index ae0184b22a..a84206dc62 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -41,7 +41,7 @@ use std::{ cell::Cell, cmp::{self, Ordering, Reverse}, collections::{BTreeMap, BTreeSet}, - ffi::OsStr, + ffi::{OsStr, OsString}, future::Future, iter::{self, Iterator, Peekable}, mem, @@ -343,7 +343,7 @@ pub trait File: Send + Sync + Any { /// Returns the last component of this handle's absolute path. If this handle refers to the root /// of its worktree, then this method will return the name of the worktree itself. - fn file_name<'a>(&'a self, cx: &'a App) -> &'a OsStr; + fn file_name<'a>(&'a self, cx: &'a App) -> OsString; /// Returns the id of the worktree to which this file belongs. /// @@ -967,7 +967,7 @@ impl Buffer { language_registry: Option>, cx: &mut App, ) -> impl Future + use<> { - let entity_id = cx.reserve_entity::().entity_id(); + let entity_id = cx.reserve_entity(); let buffer_id = entity_id.as_non_zero_u64().into(); async move { let text = @@ -992,7 +992,7 @@ impl Buffer { } pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot { - let entity_id = cx.reserve_entity::().entity_id(); + let entity_id = cx.reserve_entity(); let buffer_id = entity_id.as_non_zero_u64().into(); let text = TextBuffer::new_normalized(0, buffer_id, Default::default(), Rope::new()).snapshot(); @@ -1015,7 +1015,7 @@ impl Buffer { language_registry: Option>, cx: &mut App, ) -> BufferSnapshot { - let entity_id = cx.reserve_entity::().entity_id(); + let entity_id = cx.reserve_entity(); let buffer_id = entity_id.as_non_zero_u64().into(); let text = TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot(); let mut syntax = SyntaxMap::new(&text).snapshot(); @@ -4895,8 +4895,11 @@ impl File for TestFile { unimplemented!() } - fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a std::ffi::OsStr { - self.path().file_name().unwrap_or(self.root_name.as_ref()) + fn file_name<'a>(&'a self, _: &'a gpui::App) -> OsString { + self.path() + .file_name() + .unwrap_or(self.root_name.as_ref()) + .into() } fn worktree_id(&self, _: &App) -> WorktreeId { diff --git a/crates/language_model/src/registry.rs b/crates/language_model/src/registry.rs index 840fda38de..25bc9acc8a 100644 --- a/crates/language_model/src/registry.rs +++ b/crates/language_model/src/registry.rs @@ -4,7 +4,7 @@ use crate::{ }; use collections::BTreeMap; use gpui::{App, Context, Entity, EventEmitter, Global, prelude::*}; -use std::{str::FromStr, sync::Arc}; +use std::{cell::Ref, str::FromStr, sync::Arc}; use thiserror::Error; use util::maybe; @@ -119,7 +119,7 @@ impl LanguageModelRegistry { cx.global::().0.clone() } - pub fn read_global(cx: &App) -> &Self { + pub fn read_global(cx: &App) -> Ref { cx.global::().0.read(cx) } diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs index e22fdb1ed5..2345307462 100644 --- a/crates/multi_buffer/src/multi_buffer.rs +++ b/crates/multi_buffer/src/multi_buffer.rs @@ -1215,8 +1215,9 @@ impl MultiBuffer { if let Some(excerpt) = cursor.item() { if excerpt.locator == *excerpt_id { let excerpt_buffer_start = - excerpt.range.context.start.summary::(buffer); - let excerpt_buffer_end = excerpt.range.context.end.summary::(buffer); + excerpt.range.context.start.summary::(&buffer); + let excerpt_buffer_end = + excerpt.range.context.end.summary::(&buffer); let excerpt_range = excerpt_buffer_start..excerpt_buffer_end; if excerpt_range.contains(&range.start) && excerpt_range.contains(&range.end) @@ -2477,7 +2478,7 @@ impl MultiBuffer { }; let buffer = buffer_state.buffer.read(cx); - let diff_change_range = range.to_offset(buffer); + let diff_change_range = range.to_offset(&buffer); let new_diff = diff.snapshot(cx); let mut snapshot = self.snapshot.borrow_mut(); @@ -2558,19 +2559,20 @@ impl MultiBuffer { } pub fn language_settings<'a>(&'a self, cx: &'a App) -> Cow<'a, LanguageSettings> { - let buffer_id = self - .snapshot - .borrow() - .excerpts - .first() - .map(|excerpt| excerpt.buffer.remote_id()); - buffer_id - .and_then(|buffer_id| self.buffer(buffer_id)) - .map(|buffer| { - let buffer = buffer.read(cx); - language_settings(buffer.language().map(|l| l.name()), buffer.file(), cx) - }) - .unwrap_or_else(move || self.language_settings_at(0, cx)) + // let buffer_id = self + // .snapshot + // .borrow() + // .excerpts + // .first() + // .map(|excerpt| excerpt.buffer.remote_id()); + // buffer_id + // .and_then(|buffer_id| self.buffer(buffer_id)) + // .map(|buffer| { + // let buffer = buffer.read(cx); + // language_settings(buffer.language().map(|l| l.name()), buffer.file(), cx) + // }) + // .unwrap_or_else(move || self.language_settings_at(0, cx)) + todo!() } pub fn language_settings_at<'a, T: ToOffset>( @@ -2585,7 +2587,8 @@ impl MultiBuffer { language = buffer.language_at(offset); file = buffer.file(); } - language_settings(language.map(|l| l.name()), file, cx) + // language_settings(language.map(|l| l.name()), file, cx) + todo!() } pub fn for_each_buffer(&self, mut f: impl FnMut(&Entity)) { @@ -2596,23 +2599,24 @@ impl MultiBuffer { } pub fn title<'a>(&'a self, cx: &'a App) -> Cow<'a, str> { - if let Some(title) = self.title.as_ref() { - return title.into(); - } + // if let Some(title) = self.title.as_ref() { + // return title.into(); + // } - if let Some(buffer) = self.as_singleton() { - let buffer = buffer.read(cx); + // if let Some(buffer) = self.as_singleton() { + // let buffer = buffer.read(cx); - if let Some(file) = buffer.file() { - return file.file_name(cx).to_string_lossy(); - } + // if let Some(file) = buffer.file() { + // return file.file_name(cx).to_string_lossy(); + // } - if let Some(title) = self.buffer_content_title(buffer) { - return title; - } - }; + // if let Some(title) = self.buffer_content_title(&buffer) { + // return title; + // } + // }; - "untitled".into() + // "untitled".into() + todo!() } fn buffer_content_title(&self, buffer: &Buffer) -> Option> { diff --git a/crates/project/src/buffer_store.rs b/crates/project/src/buffer_store.rs index b8101e14f3..5afcdbd89e 100644 --- a/crates/project/src/buffer_store.rs +++ b/crates/project/src/buffer_store.rs @@ -126,6 +126,7 @@ impl RemoteBufferStore { let version = buffer.version(); let rpc = self.upstream_client.clone(); let project_id = self.project_id; + drop(buffer); cx.spawn(async move |_, cx| { let response = rpc .request(proto::SaveBuffer { @@ -373,6 +374,7 @@ impl LocalBufferStore { let save = worktree.update(cx, |worktree, cx| { worktree.write_file(path.as_ref(), text, line_ending, cx) }); + drop(buffer); cx.spawn(async move |this, cx| { let new_file = save.await?; @@ -574,11 +576,14 @@ impl LocalBufferStore { buffer: Entity, cx: &mut Context, ) -> Task> { - let Some(file) = File::from_dyn(buffer.read(cx).file()) else { - return Task::ready(Err(anyhow!("buffer doesn't have a file"))); + let (worktree, path) = { + let buffer_ref = buffer.read(cx); + let Some(file) = File::from_dyn(buffer_ref.file()) else { + return Task::ready(Err(anyhow!("buffer doesn't have a file"))); + }; + (file.worktree.clone(), file.path.clone()) }; - let worktree = file.worktree.clone(); - self.save_local_buffer(buffer, worktree, file.path.clone(), false, cx) + self.save_local_buffer(buffer, worktree, path, false, cx) } fn save_buffer_as( @@ -605,14 +610,14 @@ impl LocalBufferStore { ) -> Task>> { let load_buffer = worktree.update(cx, |worktree, cx| { let load_file = worktree.load_file(path.as_ref(), cx); - let reservation = cx.reserve_entity(); - let buffer_id = BufferId::from(reservation.entity_id().as_non_zero_u64()); + let entity_id = cx.reserve_entity(); + let buffer_id = BufferId::from(entity_id.as_non_zero_u64()); cx.spawn(async move |_, cx| { let loaded = load_file.await?; let text_buffer = cx .background_spawn(async move { text::Buffer::new(0, buffer_id, loaded.text) }) .await; - cx.insert_entity(reservation, |_| { + cx.insert_entity(entity_id, |_| { Buffer::build(text_buffer, Some(loaded.file), Capability::ReadWrite) }) }) @@ -922,6 +927,7 @@ impl BufferStore { self.path_to_buffer_id.insert(path, remote_id); } + drop(buffer); cx.subscribe(&buffer_entity, Self::on_buffer_event).detach(); cx.emit(BufferStoreEvent::BufferAdded(buffer_entity)); Ok(()) @@ -1022,9 +1028,9 @@ impl BufferStore { } fn buffer_changed_file(&mut self, buffer: Entity, cx: &mut App) -> Option<()> { - let file = File::from_dyn(buffer.read(cx).file())?; - - let remote_id = buffer.read(cx).remote_id(); + let buffer_ref = buffer.read(cx); + let file = File::from_dyn(buffer_ref.file())?; + let remote_id = buffer_ref.remote_id(); if let Some(entry_id) = file.entry_id { if let Some(local) = self.as_local_mut() { match local.local_buffer_ids_by_entry_id.get(&entry_id) { @@ -1061,10 +1067,13 @@ impl BufferStore { let mut open_buffers = HashSet::default(); let mut unnamed_buffers = Vec::new(); for handle in self.buffers() { - let buffer = handle.read(cx); - if self.non_searchable_buffers.contains(&buffer.remote_id()) { + let (remote_id, entry_id) = { + let buffer = handle.read(cx); + (buffer.remote_id(), buffer.entry_id(cx)) + }; + if self.non_searchable_buffers.contains(&remote_id) { continue; - } else if let Some(entry_id) = buffer.entry_id(cx) { + } else if let Some(entry_id) = entry_id { open_buffers.insert(entry_id); } else { limit = limit.saturating_sub(1); diff --git a/crates/project/src/debugger/dap_store.rs b/crates/project/src/debugger/dap_store.rs index 19e64adb2d..873c91ace6 100644 --- a/crates/project/src/debugger/dap_store.rs +++ b/crates/project/src/debugger/dap_store.rs @@ -155,7 +155,7 @@ impl DapStore { ) -> Self { let mode = DapStoreMode::Ssh(SshDapStore { upstream_client: ssh_client.read(cx).proto_client(), - ssh_client, + ssh_client: ssh_client.clone(), upstream_project_id: project_id, }); diff --git a/crates/project/src/git_store.rs b/crates/project/src/git_store.rs index 9ff3823e0f..26e31770a5 100644 --- a/crates/project/src/git_store.rs +++ b/crates/project/src/git_store.rs @@ -947,14 +947,18 @@ impl GitStore { selection: Range, cx: &mut App, ) -> Task> { - let Some(file) = File::from_dyn(buffer.read(cx).file()) else { - return Task::ready(Err(anyhow!("buffer has no file"))); + let (worktree, path) = { + let buffer_ref = buffer.read(cx); + let Some(file) = File::from_dyn(buffer_ref.file()) else { + return Task::ready(Err(anyhow!("buffer has no file"))); + }; + (file.worktree.clone(), file.path.clone()) }; - let Some((repo, repo_path)) = self.repository_and_path_for_project_path( - &(file.worktree.read(cx).id(), file.path.clone()).into(), - cx, - ) else { + let worktree_id = worktree.read(cx).id(); + let Some((repo, repo_path)) = + self.repository_and_path_for_project_path(&(worktree_id, path.clone()).into(), cx) + else { // If we're not in a Git repo, check whether this is a Rust source // file in the Cargo registry (presumably opened with go-to-definition // from a normal Rust file). If so, we can put together a permalink @@ -966,7 +970,7 @@ impl GitStore { { return Task::ready(Err(anyhow!("no permalink available"))); } - let Some(file_path) = file.worktree.read(cx).absolutize(&file.path).ok() else { + let Some(file_path) = worktree.read(cx).absolutize(&path).ok() else { return Task::ready(Err(anyhow!("no permalink available"))); }; return cx.spawn(async move |cx| { @@ -2062,7 +2066,7 @@ impl GitStore { .or_default(); shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone()); })?; - diff.read_with(&cx, |diff, cx| { + Ok(diff.read_with(&cx, |diff, cx| { use proto::open_uncommitted_diff_response::Mode; let unstaged_diff = diff.secondary_diff(); @@ -2076,14 +2080,14 @@ impl GitStore { let committed_text; if diff.base_text_exists() { let committed_snapshot = diff.base_text(); - committed_text = Some(committed_snapshot.text()); + committed_text = Some(committed_snapshot.text().to_string()); if let Some(index_text) = index_snapshot { if index_text.remote_id() == committed_snapshot.remote_id() { mode = Mode::IndexMatchesHead; staged_text = None; } else { mode = Mode::IndexAndHead; - staged_text = Some(index_text.text()); + staged_text = Some(index_text.text().to_string()); } } else { mode = Mode::IndexAndHead; @@ -2092,15 +2096,17 @@ impl GitStore { } else { mode = Mode::IndexAndHead; committed_text = None; - staged_text = index_snapshot.as_ref().map(|buffer| buffer.text()); + staged_text = index_snapshot + .as_ref() + .map(|buffer| buffer.text().to_string()); } proto::OpenUncommittedDiffResponse { committed_text, staged_text, - mode: mode.into(), + mode: mode as i32, } - }) + })?) } async fn handle_update_diff_bases( @@ -2842,9 +2848,15 @@ impl Repository { .filter_map(|(buffer_id, diff_state)| { let buffer_store = git_store.buffer_store.read(cx); let buffer = buffer_store.get(*buffer_id)?; - let file = File::from_dyn(buffer.read(cx).file())?; - let abs_path = - file.worktree.read(cx).absolutize(&file.path).ok()?; + let (worktree, path) = { + let buffer_ref = buffer.read(cx); + let file = File::from_dyn(buffer_ref.file())?; + (file.worktree.clone(), file.path.clone()) + }; + let abs_path = { + let worktree_ref = worktree.read(cx); + worktree_ref.absolutize(&path).ok()? + }; let repo_path = this.abs_path_to_repo_path(&abs_path)?; log::debug!( "start reload diff bases for repo path {}", @@ -3066,18 +3078,21 @@ impl Repository { pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option { let git_store = self.git_store.upgrade()?; - let worktree_store = git_store.read(cx).worktree_store.read(cx); + let git_store_ref = git_store.read(cx); + let worktree_store = git_store_ref.worktree_store.read(cx); let abs_path = self.snapshot.work_directory_abs_path.join(&path.0); let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?; + let worktree_id = worktree.read(cx).id(); Some(ProjectPath { - worktree_id: worktree.read(cx).id(), + worktree_id, path: relative_path.into(), }) } pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option { let git_store = self.git_store.upgrade()?; - let worktree_store = git_store.read(cx).worktree_store.read(cx); + let git_store_ref = git_store.read(cx); + let worktree_store = git_store_ref.worktree_store.read(cx); let abs_path = worktree_store.absolutize(path, cx)?; self.snapshot.abs_path_to_repo_path(&abs_path) } diff --git a/crates/project/src/git_store/conflict_set.rs b/crates/project/src/git_store/conflict_set.rs index 27b191f65f..dd67455508 100644 --- a/crates/project/src/git_store/conflict_set.rs +++ b/crates/project/src/git_store/conflict_set.rs @@ -641,7 +641,7 @@ mod tests { conflict_set.update(cx, |conflict_set, cx| { let conflict_range = conflict_set.snapshot().conflicts[0] .range - .to_point(buffer.read(cx)); + .to_point(&buffer.read(cx).snapshot()); assert_eq!(conflict_range, Point::new(1, 0)..Point::new(6, 0)); }); @@ -673,7 +673,7 @@ mod tests { conflict_set.update(cx, |conflict_set, cx| { let conflict_range = conflict_set.snapshot().conflicts[0] .range - .to_point(buffer.read(cx)); + .to_point(&buffer.read(cx).snapshot()); assert_eq!(conflict_range, Point::new(1, 0)..Point::new(6, 0)); }); } diff --git a/crates/project/src/git_store/git_traversal.rs b/crates/project/src/git_store/git_traversal.rs index 68ed03cfe9..1dc1e01fb5 100644 --- a/crates/project/src/git_store/git_traversal.rs +++ b/crates/project/src/git_store/git_traversal.rs @@ -710,11 +710,11 @@ mod tests { cx.executor().run_until_parked(); let (old_entry_ids, old_mtimes) = project.read_with(cx, |project, cx| { - let tree = project.worktrees(cx).next().unwrap().read(cx); - ( - tree.entries(true, 0).map(|e| e.id).collect::>(), - tree.entries(true, 0).map(|e| e.mtime).collect::>(), - ) + let worktree = project.worktrees(cx).next().unwrap(); + let tree = worktree.read(cx); + let entry_ids = tree.entries(true, 0).map(|e| e.id).collect::>(); + let mtimes = tree.entries(true, 0).map(|e| e.mtime).collect::>(); + (entry_ids, mtimes) }); // Regression test: after the directory is scanned, touch the git repo's @@ -724,11 +724,11 @@ mod tests { cx.executor().run_until_parked(); let (new_entry_ids, new_mtimes) = project.read_with(cx, |project, cx| { - let tree = project.worktrees(cx).next().unwrap().read(cx); - ( - tree.entries(true, 0).map(|e| e.id).collect::>(), - tree.entries(true, 0).map(|e| e.mtime).collect::>(), - ) + let worktree = project.worktrees(cx).next().unwrap(); + let tree = worktree.read(cx); + let entry_ids = tree.entries(true, 0).map(|e| e.id).collect::>(); + let mtimes = tree.entries(true, 0).map(|e| e.mtime).collect::>(); + (entry_ids, mtimes) }); assert_eq!(new_entry_ids, old_entry_ids); assert_ne!(new_mtimes, old_mtimes); diff --git a/crates/project/src/lsp_command.rs b/crates/project/src/lsp_command.rs index 8ed3716436..4bdc299fff 100644 --- a/crates/project/src/lsp_command.rs +++ b/crates/project/src/lsp_command.rs @@ -1151,7 +1151,7 @@ pub async fn location_links_from_lsp( let target_end = target_buffer.clip_point_utf16(point_from_lsp(target_range.end), Bias::Left); let target_location = Location { - buffer: target_buffer_handle, + buffer: target_buffer_handle.clone(), range: target_buffer.anchor_after(target_start) ..target_buffer.anchor_before(target_end), }; @@ -1212,7 +1212,7 @@ pub async fn location_link_from_lsp( let target_end = target_buffer.clip_point_utf16(point_from_lsp(target_range.end), Bias::Left); let target_location = Location { - buffer: target_buffer_handle, + buffer: target_buffer_handle.clone(), range: target_buffer.anchor_after(target_start) ..target_buffer.anchor_before(target_end), }; diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 8a14e02e0b..b1d0d5b06e 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -2276,10 +2276,9 @@ impl LocalLspStore { let snapshot = self.buffer_snapshot_for_lsp_version(buffer, server_id, version, cx)?; - let edits_since_save = std::cell::LazyCell::new(|| { - let saved_version = buffer.read(cx).saved_version(); - Patch::new(snapshot.edits_since::(saved_version).collect()) - }); + let saved_version = buffer.read(cx).saved_version(); + let edits: Vec<_> = snapshot.edits_since::(saved_version).collect(); + let edits_since_save = Patch::new(edits); let mut sanitized_diagnostics = Vec::with_capacity(diagnostics.len()); @@ -2292,8 +2291,8 @@ impl LocalLspStore { // any unsaved edits. // Do not alter the reused ones though, as their coordinates were stored as anchors // and were properly adjusted on reuse. - start = Unclipped((*edits_since_save).old_to_new(entry.range.start.0)); - end = Unclipped((*edits_since_save).old_to_new(entry.range.end.0)); + start = Unclipped(edits_since_save.old_to_new(entry.range.start.0)); + end = Unclipped(edits_since_save.old_to_new(entry.range.end.0)); } else { start = entry.range.start; end = entry.range.end; @@ -2318,7 +2317,6 @@ impl LocalLspStore { diagnostic: entry.diagnostic, }); } - drop(edits_since_save); let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot); buffer.update(cx, |buffer, cx| { @@ -3228,17 +3226,23 @@ impl LocalLspStore { watchers: impl Iterator, cx: &mut Context, ) -> LanguageServerWatchedPathsBuilder { - let worktrees = self + let worktree_ids = self .worktree_store .read(cx) .worktrees() .filter_map(|worktree| { - self.language_servers_for_worktree(worktree.read(cx).id()) + let worktree_id = worktree.read(cx).id(); + self.language_servers_for_worktree(worktree_id) .find(|server| server.server_id() == language_server_id) - .map(|_| worktree) + .map(|_| worktree_id) }) .collect::>(); + let worktrees = worktree_ids + .into_iter() + .filter_map(|id| self.worktree_store.read(cx).worktree_for_id(id, cx)) + .collect::>(); + let mut worktree_globs = HashMap::default(); let mut abs_globs = HashMap::default(); log::trace!( @@ -3819,7 +3823,7 @@ impl LspStore { request: R, cx: &mut Context, ) -> Task::Response>> { - let message = request.to_proto(upstream_project_id, buffer.read(cx)); + let message = request.to_proto(upstream_project_id, &*buffer.read(cx)); cx.spawn(async move |this, cx| { let response = client.request(message).await?; let this = this.upgrade().context("project dropped")?; @@ -4127,7 +4131,10 @@ impl LspStore { ignore_refcounts: bool, cx: &mut Context, ) -> OpenLspBufferHandle { - let buffer_id = buffer.read(cx).remote_id(); + let buffer_id = { + let buffer_ref = buffer.read(cx); + buffer_ref.remote_id() + }; let handle = cx.new(|_| buffer.clone()); if let Some(local) = self.as_local_mut() { let refcount = local.registered_buffers.entry(buffer_id).or_insert(0); @@ -4139,7 +4146,8 @@ impl LspStore { // When a new unnamed buffer is created and saved, we will start loading it's language. Once the language is loaded, we go over all "language-less" buffers and try to fit that new language // with them. However, we do that only for the buffers that we think are open in at least one editor; thus, we need to keep tab of unnamed buffers as well, even though they're not actually registered with any language // servers in practice (we don't support non-file URI schemes in our LSP impl). - let Some(file) = File::from_dyn(buffer.read(cx).file()) else { + let buffer_ref = buffer.read(cx); + let Some(file) = File::from_dyn(buffer_ref.file()) else { return handle; }; if !file.is_local() { @@ -4258,12 +4266,18 @@ impl LspStore { let mut plain_text_buffers = Vec::new(); let mut buffers_with_unknown_injections = Vec::new(); for handle in this.buffer_store.read(cx).buffers() { - let buffer = handle.read(cx); - if buffer.language().is_none() - || buffer.language() == Some(&*language::PLAIN_TEXT) - { + let should_push_plain_text = { + let buffer = handle.read(cx); + buffer.language().is_none() + || buffer.language() == Some(&*language::PLAIN_TEXT) + }; + let contains_unknown_injections = { + let buffer = handle.read(cx); + buffer.contains_unknown_injections() + }; + if should_push_plain_text { plain_text_buffers.push(handle); - } else if buffer.contains_unknown_injections() { + } else if contains_unknown_injections { buffers_with_unknown_injections.push(handle); } } @@ -4475,35 +4489,39 @@ impl LspStore { return Task::ready(Ok(Default::default())); }; - let buffer = buffer_handle.read(cx); - let file = File::from_dyn(buffer.file()).and_then(File::as_local); + let (abs_path, lsp_params, status) = { + let buffer = buffer_handle.read(cx); + let file = File::from_dyn(buffer.file()).and_then(File::as_local); - let Some(file) = file else { - return Task::ready(Ok(Default::default())); + let Some(file) = file else { + return Task::ready(Ok(Default::default())); + }; + + let abs_path = file.abs_path(cx); + let lsp_params = match request.to_lsp_params_or_response( + &abs_path, + &*buffer, + &language_server, + cx, + ) { + Ok(LspParamsOrResponse::Params(lsp_params)) => lsp_params, + Ok(LspParamsOrResponse::Response(response)) => return Task::ready(Ok(response)), + + Err(err) => { + let message = format!( + "{} via {} failed: {}", + request.display_name(), + language_server.name(), + err + ); + log::warn!("{message}"); + return Task::ready(Err(anyhow!(message))); + } + }; + + let status = request.status(); + (_abs_path, lsp_params, status) }; - - let lsp_params = match request.to_lsp_params_or_response( - &file.abs_path(cx), - buffer, - &language_server, - cx, - ) { - Ok(LspParamsOrResponse::Params(lsp_params)) => lsp_params, - Ok(LspParamsOrResponse::Response(response)) => return Task::ready(Ok(response)), - - Err(err) => { - let message = format!( - "{} via {} failed: {}", - request.display_name(), - language_server.name(), - err - ); - log::warn!("{message}"); - return Task::ready(Err(anyhow!(message))); - } - }; - - let status = request.status(); if !request.check_capabilities(language_server.adapter_server_capabilities()) { return Task::ready(Ok(Default::default())); } @@ -5188,7 +5206,7 @@ impl LspStore { })? .await?; this.update(cx, |this, cx| { - let position = position.to_point_utf16(buffer.read(cx)); + let position = position.to_point_utf16(&buffer.read(cx).snapshot()); this.on_type_format(buffer, position, trigger, false, cx) })? .await @@ -5206,7 +5224,7 @@ impl LspStore { push_to_history: bool, cx: &mut Context, ) -> Task>> { - let position = position.to_point_utf16(buffer.read(cx)); + let position = position.to_point_utf16(&buffer.read(cx).snapshot()); self.on_type_format_impl(buffer, position, trigger, push_to_history, cx) } @@ -5269,7 +5287,7 @@ impl LspStore { proto::AllLanguageServers {}, )), request: Some(proto::multi_lsp_query::Request::GetDefinition( - GetDefinitions { position }.to_proto(project_id, buffer_handle.read(cx)), + GetDefinitions { position }.to_proto(project_id, &*buffer_handle.read(cx)), )), }); let buffer = buffer_handle.clone(); @@ -5342,7 +5360,7 @@ impl LspStore { proto::AllLanguageServers {}, )), request: Some(proto::multi_lsp_query::Request::GetDeclaration( - GetDeclarations { position }.to_proto(project_id, buffer_handle.read(cx)), + GetDeclarations { position }.to_proto(project_id, &*buffer_handle.read(cx)), )), }); let buffer = buffer_handle.clone(); @@ -5415,7 +5433,7 @@ impl LspStore { proto::AllLanguageServers {}, )), request: Some(proto::multi_lsp_query::Request::GetTypeDefinition( - GetTypeDefinitions { position }.to_proto(project_id, buffer_handle.read(cx)), + GetTypeDefinitions { position }.to_proto(project_id, &*buffer_handle.read(cx)), )), }); let buffer = buffer_handle.clone(); @@ -5488,7 +5506,7 @@ impl LspStore { proto::AllLanguageServers {}, )), request: Some(proto::multi_lsp_query::Request::GetImplementation( - GetImplementations { position }.to_proto(project_id, buffer_handle.read(cx)), + GetImplementations { position }.to_proto(project_id, &*buffer_handle.read(cx)), )), }); let buffer = buffer_handle.clone(); @@ -5561,7 +5579,7 @@ impl LspStore { proto::AllLanguageServers {}, )), request: Some(proto::multi_lsp_query::Request::GetReferences( - GetReferences { position }.to_proto(project_id, buffer_handle.read(cx)), + GetReferences { position }.to_proto(project_id, &*buffer_handle.read(cx)), )), }); let buffer = buffer_handle.clone(); @@ -5639,7 +5657,7 @@ impl LspStore { range: range.clone(), kinds: kinds.clone(), } - .to_proto(project_id, buffer_handle.read(cx)), + .to_proto(project_id, &*buffer_handle.read(cx)), )), }); let buffer = buffer_handle.clone(); @@ -5716,7 +5734,7 @@ impl LspStore { proto::AllLanguageServers {}, )), request: Some(proto::multi_lsp_query::Request::GetCodeLens( - GetCodeLens.to_proto(project_id, buffer_handle.read(cx)), + GetCodeLens.to_proto(project_id, &*buffer_handle.read(cx)), )), }); let buffer = buffer_handle.clone(); @@ -6278,8 +6296,7 @@ impl LspStore { cx: &mut Context, ) -> Task>> { if let Some((client, project_id)) = self.upstream_client() { - let buffer = buffer_handle.read(cx); - let buffer_id = buffer.remote_id(); + let buffer_id = buffer_handle.read(cx).remote_id(); cx.spawn(async move |_, cx| { let request = { let completion = completions.borrow()[completion_index].clone(); @@ -6482,6 +6499,7 @@ impl LspStore { end: Some(serialize_anchor(&range_end)), version: serialize_version(&buffer_handle.read(cx).version()), }; + let buffer_handle_clone = buffer_handle.clone(); cx.spawn(async move |project, cx| { let response = client .request(request) @@ -6491,13 +6509,14 @@ impl LspStore { lsp_request, response, project.upgrade().context("No project")?, - buffer_handle.clone(), + buffer_handle_clone, cx.clone(), ) .await .context("inlay hints proto response conversion") }) } else { + let buffer_handle_for_task = buffer_handle.clone(); let lsp_request_task = self.request_lsp( buffer_handle.clone(), LanguageServerToQuery::FirstCapable, @@ -6505,7 +6524,7 @@ impl LspStore { cx, ); cx.spawn(async move |_, cx| { - buffer_handle + buffer_handle_for_task .update(cx, |buffer, _| { buffer.wait_for_edits(vec![range_start.timestamp, range_end.timestamp]) })? @@ -6729,17 +6748,20 @@ impl LspStore { cx: &mut Context, ) -> Task>>> { if let Some((client, project_id)) = self.upstream_client() { - let request_task = client.request(proto::MultiLspQuery { - project_id, - buffer_id: buffer.read(cx).remote_id().to_proto(), - version: serialize_version(&buffer.read(cx).version()), - strategy: Some(proto::multi_lsp_query::Strategy::All( - proto::AllLanguageServers {}, - )), - request: Some(proto::multi_lsp_query::Request::GetDocumentColor( - GetDocumentColor {}.to_proto(project_id, buffer.read(cx)), - )), - }); + let request_task = { + let buffer_ref = buffer.read(cx); + client.request(proto::MultiLspQuery { + project_id, + buffer_id: buffer_ref.remote_id().to_proto(), + version: serialize_version(&buffer_ref.version()), + strategy: Some(proto::multi_lsp_query::Strategy::All( + proto::AllLanguageServers {}, + )), + request: Some(proto::multi_lsp_query::Request::GetDocumentColor( + GetDocumentColor.to_proto(project_id, &*buffer_ref), + )), + }) + }; cx.spawn(async move |project, cx| { let Some(project) = project.upgrade() else { return Ok(HashMap::default()); @@ -6808,7 +6830,7 @@ impl LspStore { position: T, cx: &mut Context, ) -> Task> { - let position = position.to_point_utf16(buffer.read(cx)); + let position = position.to_point_utf16(&buffer.read(cx).snapshot()); if let Some((client, upstream_project_id)) = self.upstream_client() { let request_task = client.request(proto::MultiLspQuery { @@ -6819,7 +6841,7 @@ impl LspStore { proto::AllLanguageServers {}, )), request: Some(proto::multi_lsp_query::Request::GetSignatureHelp( - GetSignatureHelp { position }.to_proto(upstream_project_id, buffer.read(cx)), + GetSignatureHelp { position }.to_proto(upstream_project_id, &*buffer.read(cx)), )), }); let buffer = buffer.clone(); @@ -6890,7 +6912,7 @@ impl LspStore { proto::AllLanguageServers {}, )), request: Some(proto::multi_lsp_query::Request::GetHover( - GetHover { position }.to_proto(upstream_project_id, buffer.read(cx)), + GetHover { position }.to_proto(upstream_project_id, &*buffer.read(cx)), )), }); let buffer = buffer.clone(); @@ -7141,41 +7163,49 @@ impl LspStore { summary } + // pub fn diagnostic_summaries<'a>( + // &'a self, + // include_ignored: bool, + // cx: &'a App, + // ) -> impl Iterator + 'a { + // self.worktree_store + // .read(cx) + // .visible_worktrees(cx) + // .filter_map(|worktree| { + // let worktree = worktree.read(cx); + // Some((worktree, self.diagnostic_summaries.get(&worktree.id())?)) + // }) + // .flat_map(move |(worktree, summaries)| { + // let worktree_id = worktree.id(); + // summaries + // .iter() + // .filter(move |(path, _)| { + // include_ignored + // || worktree + // .entry_for_path(path.as_ref()) + // .map_or(false, |entry| !entry.is_ignored) + // }) + // .flat_map(move |(path, summaries)| { + // summaries.iter().map(move |(server_id, summary)| { + // ( + // ProjectPath { + // worktree_id, + // path: path.clone(), + // }, + // *server_id, + // *summary, + // ) + // }) + // }) + // }) + // } pub fn diagnostic_summaries<'a>( &'a self, include_ignored: bool, cx: &'a App, ) -> impl Iterator + 'a { - self.worktree_store - .read(cx) - .visible_worktrees(cx) - .filter_map(|worktree| { - let worktree = worktree.read(cx); - Some((worktree, self.diagnostic_summaries.get(&worktree.id())?)) - }) - .flat_map(move |(worktree, summaries)| { - let worktree_id = worktree.id(); - summaries - .iter() - .filter(move |(path, _)| { - include_ignored - || worktree - .entry_for_path(path.as_ref()) - .map_or(false, |entry| !entry.is_ignored) - }) - .flat_map(move |(path, summaries)| { - summaries.iter().map(move |(server_id, summary)| { - ( - ProjectPath { - worktree_id, - path: path.clone(), - }, - *server_id, - *summary, - ) - }) - }) - }) + // todo!("diagnostic_summaries needs to be refactored to handle Ref type") + std::iter::empty() } pub fn on_buffer_edited( @@ -7288,7 +7318,8 @@ impl LspStore { buffer: Entity, cx: &mut Context, ) -> Option<()> { - let file = File::from_dyn(buffer.read(cx).file())?; + let buffer_ref = buffer.read(cx); + let file = File::from_dyn(buffer_ref.file())?; let worktree_id = file.worktree_id(cx); let abs_path = file.as_local()?.abs_path(cx); let text_document = lsp::TextDocumentIdentifier { @@ -7610,7 +7641,8 @@ impl LspStore { path: relative_path.into(), }; - if let Some(buffer_handle) = self.buffer_store.read(cx).get_by_path(&project_path) { + let buffer_handle = self.buffer_store.read(cx).get_by_path(&project_path); + if let Some(buffer_handle) = buffer_handle { let snapshot = buffer_handle.read(cx).snapshot(); let buffer = buffer_handle.read(cx); let reused_diagnostics = buffer @@ -7618,7 +7650,7 @@ impl LspStore { .into_iter() .flat_map(|diag| { diag.iter() - .filter(|v| filter(buffer, &v.diagnostic, cx)) + .filter(|v| filter(&*buffer, &v.diagnostic, cx)) .map(|v| { let start = Unclipped(v.range.start.to_point_utf16(&snapshot)); let end = Unclipped(v.range.end.to_point_utf16(&snapshot)); @@ -7629,6 +7661,7 @@ impl LspStore { }) }) .collect::>(); + drop(buffer); self.as_local_mut() .context("cannot merge diagnostics on a remote LspStore")? @@ -8570,12 +8603,8 @@ impl LspStore { .read(cx) .worktree_and_entry_for_id(entry_id, cx) .map(|(worktree, entry)| { - ( - worktree.read(cx).id(), - worktree, - entry.path.clone(), - entry.is_dir(), - ) + let worktree_id = worktree.read(cx).id(); + (worktree_id, worktree, entry.path.clone(), entry.is_dir()) }) })? .context("worktree not found")?; @@ -9862,11 +9891,14 @@ impl LspStore { let buffers = buffers .into_iter() .map(|buffer_handle| { - let buffer = buffer_handle.read(cx); - let buffer_abs_path = File::from_dyn(buffer.file()) - .and_then(|file| file.as_local().map(|f| f.abs_path(cx))); + let (buffer_abs_path, remote_id) = { + let buffer = buffer_handle.read(cx); + let buffer_abs_path = File::from_dyn(buffer.file()) + .and_then(|file| file.as_local().map(|f| f.abs_path(cx))); + (buffer_abs_path, buffer.remote_id()) + }; - (buffer_handle, buffer_abs_path, buffer.remote_id()) + (buffer_handle, buffer_abs_path, remote_id) }) .collect::>(); @@ -10393,21 +10425,24 @@ impl LspStore { cx.background_spawn(futures::future::join_all(tasks).map(|_| ())) } - fn get_buffer<'a>(&self, abs_path: &Path, cx: &'a App) -> Option<&'a Buffer> { - let (worktree, relative_path) = - self.worktree_store.read(cx).find_worktree(&abs_path, cx)?; + // fn get_buffer<'a>(&self, abs_path: &Path, cx: &'a App) -> Option<&'a Buffer> { + // let (worktree, relative_path) = + // self.worktree_store.read(cx).find_worktree(&abs_path, cx)?; - let project_path = ProjectPath { - worktree_id: worktree.read(cx).id(), - path: relative_path.into(), - }; + // let project_path = ProjectPath { + // worktree_id: worktree.read(cx).id(), + // path: relative_path.into(), + // }; - Some( - self.buffer_store() - .read(cx) - .get_by_path(&project_path)? - .read(cx), - ) + // Some( + // self.buffer_store() + // .read(cx) + // .get_by_path(&project_path)? + // .read(cx), + // ) + // } + fn get_buffer<'a>(&self, _abs_path: &Path, _cx: &'a App) -> Option<&'a Buffer> { + todo!("get_buffer needs to be refactored to handle Ref type") } pub fn update_diagnostics( @@ -11239,12 +11274,10 @@ impl LspStore { buffer_id: BufferId, cx: &App, ) -> Option { - let abs_path = self - .buffer_store - .read(cx) - .get(buffer_id) - .and_then(|b| File::from_dyn(b.read(cx).file())) - .map(|f| f.abs_path(cx))?; + let abs_path = self.buffer_store.read(cx).get(buffer_id).and_then(|b| { + let buffer_ref = b.read(cx); + File::from_dyn(buffer_ref.file()).map(|f| f.abs_path(cx).to_path_buf()) + })?; self.as_local()? .buffer_pull_diagnostics_result_ids .get(&server_id)? diff --git a/crates/project/src/manifest_tree.rs b/crates/project/src/manifest_tree.rs index 7266acb5b4..7db0c5ff77 100644 --- a/crates/project/src/manifest_tree.rs +++ b/crates/project/src/manifest_tree.rs @@ -62,8 +62,8 @@ impl WorktreeRoots { } WorktreeEvent::UpdatedGitRepositories(_) => {} WorktreeEvent::DeletedEntry(entry_id) => { - let Some(entry) = this.worktree_store.read(cx).entry_for_id(*entry_id, cx) - else { + let worktree_store = this.worktree_store.read(cx); + let Some(entry) = worktree_store.entry_for_id(*entry_id, cx) else { return; }; let path = TriePath::from(entry.path.as_ref()); diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 8e1026421e..28fd3914f8 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -1740,7 +1740,8 @@ impl Project { } pub fn active_debug_session(&self, cx: &App) -> Option<(Entity, ActiveStackFrame)> { - let active_position = self.breakpoint_store.read(cx).active_position()?; + let store = self.breakpoint_store.read(cx); + let active_position = store.active_position()?; let session = self .dap_store .read(cx) @@ -1817,11 +1818,18 @@ impl Project { }) } + // pub fn shell_environment_errors<'a>( + // &'a self, + // cx: &'a App, + // ) -> impl Iterator, &'a EnvironmentErrorMessage)> { + // self.environment.read(cx).environment_errors() + // } pub fn shell_environment_errors<'a>( &'a self, cx: &'a App, ) -> impl Iterator, &'a EnvironmentErrorMessage)> { - self.environment.read(cx).environment_errors() + // todo!("shell_environment_errors needs to be refactored to handle Ref type") + std::iter::empty() } pub fn remove_environment_error(&mut self, abs_path: &Path, cx: &mut Context) { @@ -1932,20 +1940,36 @@ impl Project { }); } + // /// Collect all worktrees, including ones that don't appear in the project panel + // pub fn worktrees<'a>( + // &self, + // cx: &'a App, + // ) -> impl 'a + DoubleEndedIterator> { + // self.worktree_store.read(cx).worktrees() + // } /// Collect all worktrees, including ones that don't appear in the project panel pub fn worktrees<'a>( &self, cx: &'a App, ) -> impl 'a + DoubleEndedIterator> { - self.worktree_store.read(cx).worktrees() + // todo!("worktrees needs to be refactored to handle Ref type") + std::iter::empty() } + // /// Collect all user-visible worktrees, the ones that appear in the project panel. + // pub fn visible_worktrees<'a>( + // &'a self, + // cx: &'a App, + // ) -> impl 'a + DoubleEndedIterator> { + // self.worktree_store.read(cx).visible_worktrees(cx) + // } /// Collect all user-visible worktrees, the ones that appear in the project panel. pub fn visible_worktrees<'a>( &'a self, cx: &'a App, ) -> impl 'a + DoubleEndedIterator> { - self.worktree_store.read(cx).visible_worktrees(cx) + // todo!("visible_worktrees needs to be refactored to handle Ref type") + std::iter::empty() } pub fn worktree_for_root_name(&self, root_name: &str, cx: &App) -> Option> { @@ -1953,9 +1977,13 @@ impl Project { .find(|tree| tree.read(cx).root_name() == root_name) } + // pub fn worktree_root_names<'a>(&'a self, cx: &'a App) -> impl Iterator { + // self.visible_worktrees(cx) + // .map(|tree| tree.read(cx).root_name()) + // } pub fn worktree_root_names<'a>(&'a self, cx: &'a App) -> impl Iterator { - self.visible_worktrees(cx) - .map(|tree| tree.read(cx).root_name()) + // todo!("worktree_root_names needs to be refactored to handle Ref type") + std::iter::empty() } pub fn worktree_for_id(&self, id: WorktreeId, cx: &App) -> Option> { @@ -3306,15 +3334,26 @@ impl Project { .read(cx) .active_toolchain(path, language_name, cx) } + // pub fn language_server_statuses<'a>( + // &'a self, + // cx: &'a App, + // ) -> impl DoubleEndedIterator { + // self.lsp_store.read(cx).language_server_statuses() + // } pub fn language_server_statuses<'a>( &'a self, cx: &'a App, ) -> impl DoubleEndedIterator { - self.lsp_store.read(cx).language_server_statuses() + // todo!("language_server_statuses needs to be refactored to handle Ref type") + std::iter::empty() } - pub fn last_formatting_failure<'a>(&self, cx: &'a App) -> Option<&'a str> { - self.lsp_store.read(cx).last_formatting_failure() + // pub fn last_formatting_failure<'a>(&self, cx: &'a App) -> Option<&'a str> { + // self.lsp_store.read(cx).last_formatting_failure() + // } + pub fn last_formatting_failure<'a>(&self, _cx: &'a App) -> Option<&'a str> { + // todo!("last_formatting_failure needs to be refactored to handle Ref type") + None } pub fn reset_last_formatting_failure(&self, cx: &mut App) { @@ -3361,7 +3400,7 @@ impl Project { position: T, cx: &mut Context, ) -> Task>> { - let position = position.to_point_utf16(buffer.read(cx)); + let position = position.to_point_utf16(&buffer.read(cx).snapshot()); self.lsp_store.update(cx, |lsp_store, cx| { lsp_store.definitions(buffer, position, cx) }) @@ -3373,7 +3412,7 @@ impl Project { position: T, cx: &mut Context, ) -> Task>> { - let position = position.to_point_utf16(buffer.read(cx)); + let position = position.to_point_utf16(&buffer.read(cx).snapshot()); self.lsp_store.update(cx, |lsp_store, cx| { lsp_store.declarations(buffer, position, cx) }) @@ -3385,7 +3424,7 @@ impl Project { position: T, cx: &mut Context, ) -> Task>> { - let position = position.to_point_utf16(buffer.read(cx)); + let position = position.to_point_utf16(&buffer.read(cx).snapshot()); self.lsp_store.update(cx, |lsp_store, cx| { lsp_store.type_definitions(buffer, position, cx) }) @@ -3397,7 +3436,7 @@ impl Project { position: T, cx: &mut Context, ) -> Task>> { - let position = position.to_point_utf16(buffer.read(cx)); + let position = position.to_point_utf16(&buffer.read(cx).snapshot()); self.lsp_store.update(cx, |lsp_store, cx| { lsp_store.implementations(buffer, position, cx) }) @@ -3409,7 +3448,7 @@ impl Project { position: T, cx: &mut Context, ) -> Task>> { - let position = position.to_point_utf16(buffer.read(cx)); + let position = position.to_point_utf16(&buffer.read(cx).snapshot()); self.lsp_store.update(cx, |lsp_store, cx| { lsp_store.references(buffer, position, cx) }) @@ -3435,7 +3474,7 @@ impl Project { position: T, cx: &mut Context, ) -> Task>> { - let position = position.to_point_utf16(buffer.read(cx)); + let position = position.to_point_utf16(&buffer.read(cx).snapshot()); self.document_highlights_impl(buffer, position, cx) } @@ -3532,7 +3571,7 @@ impl Project { position: T, cx: &mut Context, ) -> Task> { - let position = position.to_point_utf16(buffer.read(cx)); + let position = position.to_point_utf16(&buffer.read(cx).snapshot()); self.lsp_store .update(cx, |lsp_store, cx| lsp_store.hover(buffer, position, cx)) } @@ -3555,7 +3594,7 @@ impl Project { context: CompletionContext, cx: &mut Context, ) -> Task>> { - let position = position.to_point_utf16(buffer.read(cx)); + let position = position.to_point_utf16(&buffer.read(cx).snapshot()); self.lsp_store.update(cx, |lsp_store, cx| { lsp_store.completions(buffer, position, context, cx) }) @@ -3646,7 +3685,7 @@ impl Project { position: T, cx: &mut Context, ) -> Task> { - let position = position.to_point_utf16(buffer.read(cx)); + let position = position.to_point_utf16(&buffer.read(cx).snapshot()); self.prepare_rename_impl(buffer, position, cx) } @@ -3658,7 +3697,7 @@ impl Project { cx: &mut Context, ) -> Task> { let push_to_history = true; - let position = position.to_point_utf16(buffer.read(cx)); + let position = position.to_point_utf16(&buffer.read(cx).snapshot()); self.request_lsp( buffer, LanguageServerToQuery::FirstCapable, @@ -3725,8 +3764,10 @@ impl Project { range: Range, cx: &mut Context, ) -> Task>> { - let buffer = buffer_handle.read(cx); - let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end); + let range = { + let buffer = buffer_handle.read(cx); + buffer.anchor_before(range.start)..buffer.anchor_before(range.end) + }; self.lsp_store.update(cx, |lsp_store, cx| { lsp_store.inlay_hints(buffer_handle, range, cx) }) @@ -4213,8 +4254,8 @@ impl Project { pub fn set_active_path(&mut self, entry: Option, cx: &mut Context) { let new_active_entry = entry.and_then(|project_path| { let worktree = self.worktree_for_id(project_path.worktree_id, cx)?; - let entry = worktree.read(cx).entry_for_path(project_path.path)?; - Some(entry.id) + let entry_id = worktree.read(cx).entry_for_path(project_path.path)?.id; + Some(entry_id) }); if new_active_entry != self.active_entry { self.active_entry = new_active_entry; @@ -4225,13 +4266,20 @@ impl Project { } } + // pub fn language_servers_running_disk_based_diagnostics<'a>( + // &'a self, + // cx: &'a App, + // ) -> impl Iterator + 'a { + // self.lsp_store + // .read(cx) + // .language_servers_running_disk_based_diagnostics() + // } pub fn language_servers_running_disk_based_diagnostics<'a>( &'a self, cx: &'a App, ) -> impl Iterator + 'a { - self.lsp_store - .read(cx) - .language_servers_running_disk_based_diagnostics() + // todo!("language_servers_running_disk_based_diagnostics needs to be refactored to handle Ref type") + std::iter::empty() } pub fn diagnostic_summary(&self, include_ignored: bool, cx: &App) -> DiagnosticSummary { @@ -4240,14 +4288,22 @@ impl Project { .diagnostic_summary(include_ignored, cx) } + // pub fn diagnostic_summaries<'a>( + // &'a self, + // include_ignored: bool, + // cx: &'a App, + // ) -> impl Iterator + 'a { + // self.lsp_store + // .read(cx) + // .diagnostic_summaries(include_ignored, cx) + // } pub fn diagnostic_summaries<'a>( &'a self, include_ignored: bool, cx: &'a App, ) -> impl Iterator + 'a { - self.lsp_store - .read(cx) - .diagnostic_summaries(include_ignored, cx) + // todo!("diagnostic_summaries needs to be refactored to handle Ref type") + std::iter::empty() } pub fn active_entry(&self) -> Option { @@ -4306,25 +4362,26 @@ impl Project { } } } else { - for worktree in worktree_store.visible_worktrees(cx) { - let worktree_root_name = worktree.read(cx).root_name(); - if let Ok(relative_path) = path.strip_prefix(worktree_root_name) { - return Some(ProjectPath { - worktree_id: worktree.read(cx).id(), - path: relative_path.into(), - }); - } - } + // TODO: Fix when visible_worktrees is refactored to handle Ref type + // for worktree in worktree_store.visible_worktrees(cx) { + // let worktree_root_name = worktree.read(cx).root_name(); + // if let Ok(relative_path) = path.strip_prefix(worktree_root_name) { + // return Some(ProjectPath { + // worktree_id: worktree.read(cx).id(), + // path: relative_path.into(), + // }); + // } + // } - for worktree in worktree_store.visible_worktrees(cx) { - let worktree = worktree.read(cx); - if let Some(entry) = worktree.entry_for_path(path) { - return Some(ProjectPath { - worktree_id: worktree.id(), - path: entry.path.clone(), - }); - } - } + // for worktree in worktree_store.visible_worktrees(cx) { + // let worktree = worktree.read(cx); + // if let Some(entry) = worktree.entry_for_path(path) { + // return Some(ProjectPath { + // worktree_id: worktree.id(), + // path: entry.path.clone(), + // }); + // } + // } } None @@ -4867,16 +4924,21 @@ impl Project { self.worktree_store.read(cx).worktree_metadata_protos(cx) } + // /// Iterator of all open buffers that have unsaved changes + // pub fn dirty_buffers<'a>(&'a self, cx: &'a App) -> impl Iterator + 'a { + // self.buffer_store.read(cx).buffers().filter_map(|buf| { + // let buf = buf.read(cx); + // if buf.is_dirty() { + // buf.project_path(cx) + // } else { + // None + // } + // }) + // } /// Iterator of all open buffers that have unsaved changes pub fn dirty_buffers<'a>(&'a self, cx: &'a App) -> impl Iterator + 'a { - self.buffer_store.read(cx).buffers().filter_map(|buf| { - let buf = buf.read(cx); - if buf.is_dirty() { - buf.project_path(cx) - } else { - None - } - }) + // todo!("dirty_buffers needs to be refactored to handle Ref type") + std::iter::empty() } fn set_worktrees_from_proto( @@ -4908,11 +4970,18 @@ impl Project { Ok(()) } + // pub fn supplementary_language_servers<'a>( + // &'a self, + // cx: &'a App, + // ) -> impl 'a + Iterator { + // self.lsp_store.read(cx).supplementary_language_servers() + // } pub fn supplementary_language_servers<'a>( &'a self, cx: &'a App, ) -> impl 'a + Iterator { - self.lsp_store.read(cx).supplementary_language_servers() + // todo!("supplementary_language_servers needs to be refactored to handle Ref type") + std::iter::empty() } pub fn any_language_server_supports_inlay_hints(&self, buffer: &Buffer, cx: &mut App) -> bool { @@ -5030,8 +5099,16 @@ impl Project { self.git_store.read(cx).active_repository() } - pub fn repositories<'a>(&self, cx: &'a App) -> &'a HashMap> { - self.git_store.read(cx).repositories() + // pub fn repositories<'a>(&self, cx: &'a App) -> &'a HashMap> { + // self.git_store.read(cx).repositories() + // } + pub fn repositories<'a>(&self, _cx: &'a App) -> &'a HashMap> { + // todo!("repositories needs to be refactored to handle Ref type") + // This can't return an empty iterator since it needs to return a reference + // For now, we'll leak a static empty HashMap + static EMPTY: std::sync::OnceLock>> = + std::sync::OnceLock::new(); + EMPTY.get_or_init(HashMap::default) } pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option { diff --git a/crates/project/src/project_tests.rs b/crates/project/src/project_tests.rs index 779cf95add..598d01df62 100644 --- a/crates/project/src/project_tests.rs +++ b/crates/project/src/project_tests.rs @@ -117,7 +117,8 @@ async fn test_symlinks(cx: &mut gpui::TestAppContext) { .await; project.update(cx, |project, cx| { - let tree = project.worktrees(cx).next().unwrap().read(cx); + let worktree = project.worktrees(cx).next().unwrap(); + let tree = worktree.read(cx); assert_eq!(tree.file_count(), 5); assert_eq!( tree.inode_for_path("fennel/grape"), @@ -1075,10 +1076,10 @@ async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppCon // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them. project.update(cx, |project, cx| { - let worktree = project.worktrees(cx).next().unwrap(); + let worktree_entity = project.worktrees(cx).next().unwrap(); + let worktree = worktree_entity.read(cx); assert_eq!( worktree - .read(cx) .snapshot() .entries(true, 0) .map(|entry| (entry.path.as_ref(), entry.is_ignored)) @@ -3014,7 +3015,10 @@ async fn test_definition(cx: &mut gpui::TestAppContext) { .abs_path(cx), Path::new(path!("/dir/a.rs")), ); - assert_eq!(definition.target.range.to_offset(target_buffer), 9..10); + assert_eq!( + definition.target.range.to_offset(&target_buffer.snapshot()), + 9..10 + ); assert_eq!( list_worktrees(&project, cx), [ @@ -3023,6 +3027,7 @@ async fn test_definition(cx: &mut gpui::TestAppContext) { ], ); + drop(target_buffer); drop(definition); }); cx.update(|cx| { @@ -3032,18 +3037,19 @@ async fn test_definition(cx: &mut gpui::TestAppContext) { ); }); - fn list_worktrees<'a>(project: &'a Entity, cx: &'a App) -> Vec<(&'a Path, bool)> { - project - .read(cx) - .worktrees(cx) - .map(|worktree| { - let worktree = worktree.read(cx); - ( - worktree.as_local().unwrap().abs_path().as_ref(), - worktree.is_visible(), - ) - }) - .collect::>() + // fn list_worktrees<'a>(project: &'a Entity, cx: &'a App) -> Vec<(&'a Path, bool)> { + // project + // .read(cx) + // .worktrees(cx) + // .map(|worktree| { + // let worktree = worktree.read(cx); + // ( + // worktree.as_local().unwrap().abs_path().as_ref(), + // worktree.is_visible(), + // ) + // }) + fn list_worktrees<'a>(_project: &'a Entity, _cx: &'a App) -> Vec<(&'a Path, bool)> { + todo!("list_worktrees needs to be refactored to handle Ref type") } } @@ -4823,8 +4829,8 @@ async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) { let fake_server = fake_servers.next().await.unwrap(); let response = project.update(cx, |project, cx| { let worktree = project.worktrees(cx).next().unwrap(); - let entry = worktree.read(cx).entry_for_path("one.rs").unwrap(); - project.rename_entry(entry.id, "three.rs".as_ref(), cx) + let entry_id = worktree.read(cx).entry_for_path("one.rs").unwrap().id; + project.rename_entry(entry_id, "three.rs".as_ref(), cx) }); let expected_edit = lsp::WorkspaceEdit { changes: None, @@ -8162,8 +8168,8 @@ async fn test_update_gitignore(cx: &mut gpui::TestAppContext) { // One file is unmodified, the other is ignored. cx.read(|cx| { - assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false); - assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true); + assert_entry_git_state(&tree.read(cx), &repository.read(cx), "a.xml", None, false); + assert_entry_git_state(&tree.read(cx), &repository.read(cx), "b.txt", None, true); }); // Change the gitignore, and stage the newly non-ignored file. @@ -8181,10 +8187,10 @@ async fn test_update_gitignore(cx: &mut gpui::TestAppContext) { cx.executor().run_until_parked(); cx.read(|cx| { - assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true); + assert_entry_git_state(&tree.read(cx), &repository.read(cx), "a.xml", None, true); assert_entry_git_state( - tree.read(cx), - repository.read(cx), + &tree.read(cx), + &repository.read(cx), "b.txt", Some(StatusCode::Added), false, @@ -8597,22 +8603,22 @@ async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) { cx.read(|cx| { assert_entry_git_state( - tree.read(cx), - repository.read(cx), + &tree.read(cx), + &repository.read(cx), "tracked-dir/tracked-file1", None, false, ); assert_entry_git_state( - tree.read(cx), - repository.read(cx), + &tree.read(cx), + &repository.read(cx), "tracked-dir/ancestor-ignored-file1", None, false, ); assert_entry_git_state( - tree.read(cx), - repository.read(cx), + &tree.read(cx), + &repository.read(cx), "ignored-dir/ignored-file1", None, true, @@ -8649,22 +8655,22 @@ async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) { cx.executor().run_until_parked(); cx.read(|cx| { assert_entry_git_state( - tree.read(cx), - repository.read(cx), + &tree.read(cx), + &repository.read(cx), "tracked-dir/tracked-file2", Some(StatusCode::Added), false, ); assert_entry_git_state( - tree.read(cx), - repository.read(cx), + &tree.read(cx), + &repository.read(cx), "tracked-dir/ancestor-ignored-file2", None, false, ); assert_entry_git_state( - tree.read(cx), - repository.read(cx), + &tree.read(cx), + &repository.read(cx), "ignored-dir/ignored-file2", None, true, diff --git a/crates/project/src/task_inventory.rs b/crates/project/src/task_inventory.rs index d0f1c71daf..5ec7b18efb 100644 --- a/crates/project/src/task_inventory.rs +++ b/crates/project/src/task_inventory.rs @@ -305,12 +305,14 @@ impl Inventory { let last_scheduled_scenarios = self.last_scheduled_scenarios.iter().cloned().collect(); let adapter = task_contexts.location().and_then(|location| { - let (file, language) = { + let (file, language_name, language) = { let buffer = location.buffer.read(cx); - (buffer.file(), buffer.language()) + let file = buffer.file().cloned(); + let language = buffer.language().clone(); + let language_name = language.as_ref().map(|l| l.name()); + (file, language_name, language) }; - let language_name = language.as_ref().map(|l| l.name()); - let adapter = language_settings(language_name, file, cx) + let adapter = language_settings(language_name, file.as_ref(), cx) .debuggers .first() .map(SharedString::from) @@ -435,11 +437,17 @@ impl Inventory { let fs = self.fs.clone(); let worktree = task_contexts.worktree(); let location = task_contexts.location(); - let language = location.and_then(|location| location.buffer.read(cx).language()); + let language = location.and_then(|location| { + let buffer = location.buffer.read(cx); + buffer.language().clone() + }); let task_source_kind = language.as_ref().map(|language| TaskSourceKind::Language { name: language.name().into(), }); - let file = location.and_then(|location| location.buffer.read(cx).file().cloned()); + let file = location.and_then(|location| { + let buffer = location.buffer.read(cx); + buffer.file().cloned() + }); let mut task_labels_to_ids = HashMap::>::default(); let mut lru_score = 0_u32; diff --git a/crates/project/src/terminals.rs b/crates/project/src/terminals.rs index 385fdf9082..36c2f0a3d0 100644 --- a/crates/project/src/terminals.rs +++ b/crates/project/src/terminals.rs @@ -478,9 +478,12 @@ impl Project { let bin_path = venv_path.join(bin_dir_name); self.find_worktree(&bin_path, cx) .and_then(|(worktree, relative_path)| { - worktree.read(cx).entry_for_path(&relative_path) + worktree + .read(cx) + .entry_for_path(&relative_path) + .map(|entry| entry.is_dir()) }) - .is_some_and(|entry| entry.is_dir()) + .unwrap_or(false) }) } @@ -491,7 +494,7 @@ impl Project { cx: &App, ) -> Option { let (worktree, _) = self.find_worktree(abs_path, cx)?; - let fs = worktree.read(cx).as_local()?.fs(); + let fs = worktree.read(cx).as_local()?.fs().clone(); let bin_dir_name = match std::env::consts::OS { "windows" => "Scripts", _ => "bin", diff --git a/crates/project/src/toolchain_store.rs b/crates/project/src/toolchain_store.rs index 61a005520d..6be13ceb18 100644 --- a/crates/project/src/toolchain_store.rs +++ b/crates/project/src/toolchain_store.rs @@ -338,15 +338,16 @@ impl LocalToolchainStore { .ok()?; let toolchains = language.toolchain_lister()?; let manifest_name = toolchains.manifest_name(); - let (snapshot, worktree) = this + let worktree = this .update(cx, |this, cx| { - this.worktree_store - .read(cx) - .worktree_for_id(path.worktree_id, cx) - .map(|worktree| (worktree.read(cx).snapshot(), worktree)) + let store = this.worktree_store.read(cx); + store.worktree_for_id(path.worktree_id, cx) }) .ok() .flatten()?; + let snapshot = worktree + .read_with(cx, |worktree, _| worktree.snapshot()) + .ok()?; let worktree_id = snapshot.id(); let worktree_root = snapshot.abs_path().to_path_buf(); let relative_path = manifest_tree diff --git a/crates/project/src/worktree_store.rs b/crates/project/src/worktree_store.rs index 16e42e90cb..18dd11bfec 100644 --- a/crates/project/src/worktree_store.rs +++ b/crates/project/src/worktree_store.rs @@ -185,22 +185,36 @@ impl WorktreeStore { } } - pub fn entry_for_id<'a>(&'a self, entry_id: ProjectEntryId, cx: &'a App) -> Option<&'a Entry> { - self.worktrees() - .find_map(|worktree| worktree.read(cx).entry_for_id(entry_id)) + // pub fn entry_for_id<'a>(&'a self, entry_id: ProjectEntryId, cx: &'a App) -> Option<&'a Entry> { + // self.worktrees() + // .find_map(|worktree| worktree.read(cx).entry_for_id(entry_id)) + // } + pub fn entry_for_id<'a>( + &'a self, + _entry_id: ProjectEntryId, + _cx: &'a App, + ) -> Option<&'a Entry> { + todo!("entry_for_id needs to be refactored to handle Ref type") } + // pub fn worktree_and_entry_for_id<'a>( + // &'a self, + // entry_id: ProjectEntryId, + // cx: &'a App, + // ) -> Option<(Entity, &'a Entry)> { + // self.worktrees().find_map(|worktree| { + // worktree + // .read(cx) + // .entry_for_id(entry_id) + // .map(|e| (worktree.clone(), e)) + // }) + // } pub fn worktree_and_entry_for_id<'a>( &'a self, - entry_id: ProjectEntryId, - cx: &'a App, + _entry_id: ProjectEntryId, + _cx: &'a App, ) -> Option<(Entity, &'a Entry)> { - self.worktrees().find_map(|worktree| { - worktree - .read(cx) - .entry_for_id(entry_id) - .map(|e| (worktree.clone(), e)) - }) + todo!("worktree_and_entry_for_id needs to be refactored to handle Ref type") } pub fn entry_for_path(&self, path: &ProjectPath, cx: &App) -> Option { @@ -453,7 +467,8 @@ impl WorktreeStore { .drain(..) .filter_map(|worktree| { let worktree = worktree.upgrade()?; - Some((worktree.read(cx).id(), worktree)) + let worktree_id = worktree.read(cx).id(); + Some((worktree_id, worktree)) }) .collect::>(); diff --git a/crates/rpc/src/proto_client.rs b/crates/rpc/src/proto_client.rs index eb570b96a3..3bb930835b 100644 --- a/crates/rpc/src/proto_client.rs +++ b/crates/rpc/src/proto_client.rs @@ -57,6 +57,10 @@ pub struct ProtoMessageHandlerSet { pub message_handlers: HashMap, } +// todo! try to remove these. we can't store handles inside send/sync stuff +unsafe impl Send for ProtoMessageHandlerSet {} +unsafe impl Sync for ProtoMessageHandlerSet {} + pub type ProtoMessageHandler = Arc< dyn Send + Sync diff --git a/crates/web_search/src/web_search.rs b/crates/web_search/src/web_search.rs index a131b0de71..36f2a7c5c1 100644 --- a/crates/web_search/src/web_search.rs +++ b/crates/web_search/src/web_search.rs @@ -1,7 +1,7 @@ use anyhow::Result; use collections::HashMap; use gpui::{App, AppContext as _, Context, Entity, Global, SharedString, Task}; -use std::sync::Arc; +use std::{cell::Ref, sync::Arc}; use zed_llm_client::WebSearchResponse; pub fn init(cx: &mut App) { @@ -32,7 +32,7 @@ impl WebSearchRegistry { cx.global::().0.clone() } - pub fn read_global(cx: &App) -> &Self { + pub fn read_global(cx: &App) -> Ref { cx.global::().0.read(cx) } diff --git a/crates/worktree/src/worktree.rs b/crates/worktree/src/worktree.rs index 8c407fdd3e..96269e10bd 100644 --- a/crates/worktree/src/worktree.rs +++ b/crates/worktree/src/worktree.rs @@ -49,7 +49,7 @@ use std::{ cmp::Ordering, collections::hash_map, convert::TryFrom, - ffi::OsStr, + ffi::{OsStr, OsString}, fmt, future::Future, mem::{self}, @@ -2667,17 +2667,18 @@ impl Snapshot { } pub fn entry_for_path(&self, path: impl AsRef) -> Option<&Entry> { - let path = path.as_ref(); - debug_assert!(path.is_relative()); - self.traverse_from_path(true, true, true, path) - .entry() - .and_then(|entry| { - if entry.path.as_ref() == path { - Some(entry) - } else { - None - } - }) + // let path = path.as_ref(); + // debug_assert!(path.is_relative()); + // self.traverse_from_path(true, true, true, path) + // .entry() + // .and_then(|entry| { + // if entry.path.as_ref() == path { + // Some(entry) + // } else { + // None + // } + // }) + todo!() } pub fn entry_for_id(&self, id: ProjectEntryId) -> Option<&Entry> { @@ -3321,10 +3322,11 @@ impl language::File for File { /// Returns the last component of this handle's absolute path. If this handle refers to the root /// of its worktree, then this method will return the name of the worktree itself. - fn file_name<'a>(&'a self, cx: &'a App) -> &'a OsStr { + fn file_name<'a>(&'a self, cx: &'a App) -> OsString { self.path .file_name() - .unwrap_or_else(|| OsStr::new(&self.worktree.read(cx).root_name)) + .map(Into::into) + .unwrap_or_else(|| OsStr::new(&self.worktree.read(cx).root_name).into()) } fn worktree_id(&self, cx: &App) -> WorktreeId { @@ -3357,14 +3359,16 @@ impl language::LocalFile for File { } fn load(&self, cx: &App) -> Task> { - let worktree = self.worktree.read(cx).as_local().unwrap(); + let worktree = self.worktree.read(cx); + let worktree = worktree.as_local().unwrap(); let abs_path = worktree.absolutize(&self.path); let fs = worktree.fs.clone(); cx.background_spawn(async move { fs.load(&abs_path?).await }) } fn load_bytes(&self, cx: &App) -> Task>> { - let worktree = self.worktree.read(cx).as_local().unwrap(); + let worktree = self.worktree.read(cx); + let worktree = worktree.as_local().unwrap(); let abs_path = worktree.absolutize(&self.path); let fs = worktree.fs.clone(); cx.background_spawn(async move { fs.load_bytes(&abs_path?).await })