Fix a bug where a GPUI macro still used `ModelContext` Rename `AsyncAppContext` -> `AsyncApp` Rename update_model, read_model, insert_model, and reserve_model to update_entity, read_entity, insert_entity, and reserve_entity Release Notes: - N/A
This commit is contained in:
parent
83141d07e9
commit
a6b1514246
118 changed files with 708 additions and 757 deletions
|
@ -3,8 +3,7 @@ use collections::BTreeMap;
|
|||
use editor::{Editor, EditorElement, EditorStyle};
|
||||
use futures::{future::BoxFuture, FutureExt, StreamExt};
|
||||
use gpui::{
|
||||
AnyView, App, AsyncAppContext, Context, Entity, FontStyle, Subscription, Task, TextStyle,
|
||||
WhiteSpace,
|
||||
AnyView, App, AsyncApp, Context, Entity, FontStyle, Subscription, Task, TextStyle, WhiteSpace,
|
||||
};
|
||||
use http_client::HttpClient;
|
||||
use language_model::{
|
||||
|
@ -224,11 +223,11 @@ impl OpenAiLanguageModel {
|
|||
fn stream_completion(
|
||||
&self,
|
||||
request: open_ai::Request,
|
||||
cx: &AsyncAppContext,
|
||||
cx: &AsyncApp,
|
||||
) -> BoxFuture<'static, Result<futures::stream::BoxStream<'static, Result<ResponseStreamEvent>>>>
|
||||
{
|
||||
let http_client = self.http_client.clone();
|
||||
let Ok((api_key, api_url)) = cx.read_model(&self.state, |state, cx| {
|
||||
let Ok((api_key, api_url)) = cx.read_entity(&self.state, |state, cx| {
|
||||
let settings = &AllLanguageModelSettings::get_global(cx).openai;
|
||||
(state.api_key.clone(), settings.api_url.clone())
|
||||
}) else {
|
||||
|
@ -286,7 +285,7 @@ impl LanguageModel for OpenAiLanguageModel {
|
|||
fn stream_completion(
|
||||
&self,
|
||||
request: LanguageModelRequest,
|
||||
cx: &AsyncAppContext,
|
||||
cx: &AsyncApp,
|
||||
) -> BoxFuture<
|
||||
'static,
|
||||
Result<futures::stream::BoxStream<'static, Result<LanguageModelCompletionEvent>>>,
|
||||
|
@ -307,7 +306,7 @@ impl LanguageModel for OpenAiLanguageModel {
|
|||
tool_name: String,
|
||||
tool_description: String,
|
||||
schema: serde_json::Value,
|
||||
cx: &AsyncAppContext,
|
||||
cx: &AsyncApp,
|
||||
) -> BoxFuture<'static, Result<futures::stream::BoxStream<'static, Result<String>>>> {
|
||||
let mut request = request.into_open_ai(self.model.id().into(), self.max_output_tokens());
|
||||
request.tool_choice = Some(ToolChoice::Other(ToolDefinition::Function {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue