gpui: Improve Global ergonomics (#11923)

This PR adds some ergonomic improvements when working with GPUI
`Global`s.

Two new traits have been added—`ReadGlobal` and `UpdateGlobal`—that
provide associated functions on any type that implements `Global` for
accessing and updating the global without needing to call the methods on
the `cx` directly (which generally involves qualifying the type).

I looked into adding `ObserveGlobal` as well, but this seems a bit
trickier to implement as the signatures of `cx.observe_global` vary
slightly between the different contexts.

Release Notes:

- N/A
This commit is contained in:
Marshall Bowers 2024-05-16 12:47:43 -04:00 committed by GitHub
parent 1b261608c6
commit c1e291bc96
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
8 changed files with 81 additions and 33 deletions

View file

@ -24,7 +24,7 @@ use fs::Fs;
use futures::{future::join_all, StreamExt};
use gpui::{
list, AnyElement, AppContext, AsyncWindowContext, ClickEvent, EventEmitter, FocusHandle,
FocusableView, ListAlignment, ListState, Model, Render, Task, View, WeakView,
FocusableView, ListAlignment, ListState, Model, ReadGlobal, Render, Task, View, WeakView,
};
use language::{language_settings::SoftWrap, LanguageRegistry};
use markdown::{Markdown, MarkdownStyle};
@ -288,7 +288,7 @@ impl AssistantChat {
workspace: WeakView<Workspace>,
cx: &mut ViewContext<Self>,
) -> Self {
let model = CompletionProvider::get(cx).default_model();
let model = CompletionProvider::global(cx).default_model();
let view = cx.view().downgrade();
let list_state = ListState::new(
0,
@ -550,7 +550,7 @@ impl AssistantChat {
let messages = messages.await?;
let completion = cx.update(|cx| {
CompletionProvider::get(cx).complete(
CompletionProvider::global(cx).complete(
model_name,
messages,
Vec::new(),

View file

@ -2,7 +2,7 @@ use anyhow::Result;
use assistant_tooling::ToolFunctionDefinition;
use client::{proto, Client};
use futures::{future::BoxFuture, stream::BoxStream, FutureExt, StreamExt};
use gpui::{AppContext, Global};
use gpui::Global;
use std::sync::Arc;
pub use open_ai::RequestMessage as CompletionMessage;
@ -11,10 +11,6 @@ pub use open_ai::RequestMessage as CompletionMessage;
pub struct CompletionProvider(Arc<dyn CompletionProviderBackend>);
impl CompletionProvider {
pub fn get(cx: &AppContext) -> &Self {
cx.global::<CompletionProvider>()
}
pub fn new(backend: impl CompletionProviderBackend) -> Self {
Self(Arc::new(backend))
}

View file

@ -3,7 +3,7 @@ use crate::{
AssistantChat, CompletionProvider,
};
use editor::{Editor, EditorElement, EditorStyle};
use gpui::{AnyElement, FontStyle, FontWeight, TextStyle, View, WeakView, WhiteSpace};
use gpui::{AnyElement, FontStyle, FontWeight, ReadGlobal, TextStyle, View, WeakView, WhiteSpace};
use settings::Settings;
use theme::ThemeSettings;
use ui::{popover_menu, prelude::*, ButtonLike, ContextMenu, Divider, TextSize, Tooltip};
@ -139,7 +139,7 @@ impl RenderOnce for ModelSelector {
popover_menu("model-switcher")
.menu(move |cx| {
ContextMenu::build(cx, |mut menu, cx| {
for model in CompletionProvider::get(cx).available_models() {
for model in CompletionProvider::global(cx).available_models() {
menu = menu.custom_entry(
{
let model = model.clone();