Remove Assistant::open_ai_request_messages

This commit is contained in:
Antonio Scandurra 2023-06-19 14:35:33 +02:00
parent dc9231d5a3
commit 9191a82447

View file

@ -543,7 +543,7 @@ impl Assistant {
fn count_remaining_tokens(&mut self, cx: &mut ModelContext<Self>) { fn count_remaining_tokens(&mut self, cx: &mut ModelContext<Self>) {
let messages = self let messages = self
.open_ai_request_messages(cx) .messages(cx)
.into_iter() .into_iter()
.filter_map(|message| { .filter_map(|message| {
Some(tiktoken_rs::ChatCompletionRequestMessage { Some(tiktoken_rs::ChatCompletionRequestMessage {
@ -552,7 +552,7 @@ impl Assistant {
Role::Assistant => "assistant".into(), Role::Assistant => "assistant".into(),
Role::System => "system".into(), Role::System => "system".into(),
}, },
content: message.content, content: self.buffer.read(cx).text_for_range(message.range).collect(),
name: None, name: None,
}) })
}) })
@ -596,7 +596,10 @@ impl Assistant {
) -> Option<(MessageAnchor, MessageAnchor)> { ) -> Option<(MessageAnchor, MessageAnchor)> {
let request = OpenAIRequest { let request = OpenAIRequest {
model: self.model.clone(), model: self.model.clone(),
messages: self.open_ai_request_messages(cx), messages: self
.messages(cx)
.map(|message| message.to_open_ai_message(self.buffer.read(cx)))
.collect(),
stream: true, stream: true,
}; };
@ -841,16 +844,19 @@ impl Assistant {
if self.message_anchors.len() >= 2 && self.summary.is_none() { if self.message_anchors.len() >= 2 && self.summary.is_none() {
let api_key = self.api_key.borrow().clone(); let api_key = self.api_key.borrow().clone();
if let Some(api_key) = api_key { if let Some(api_key) = api_key {
let mut messages = self.open_ai_request_messages(cx); let messages = self
messages.truncate(2); .messages(cx)
messages.push(RequestMessage { .take(2)
role: Role::User, .map(|message| message.to_open_ai_message(self.buffer.read(cx)))
content: "Summarize the conversation into a short title without punctuation" .chain(Some(RequestMessage {
.into(), role: Role::User,
}); content:
"Summarize the conversation into a short title without punctuation"
.into(),
}));
let request = OpenAIRequest { let request = OpenAIRequest {
model: self.model.clone(), model: self.model.clone(),
messages, messages: messages.collect(),
stream: true, stream: true,
}; };
@ -878,16 +884,6 @@ impl Assistant {
} }
} }
fn open_ai_request_messages(&self, cx: &AppContext) -> Vec<RequestMessage> {
let buffer = self.buffer.read(cx);
self.messages(cx)
.map(|message| RequestMessage {
role: message.role,
content: buffer.text_for_range(message.range).collect(),
})
.collect()
}
fn message_for_offset<'a>(&'a self, offset: usize, cx: &'a AppContext) -> Option<Message> { fn message_for_offset<'a>(&'a self, offset: usize, cx: &'a AppContext) -> Option<Message> {
let mut messages = self.messages(cx).peekable(); let mut messages = self.messages(cx).peekable();
while let Some(message) = messages.next() { while let Some(message) = messages.next() {
@ -1446,6 +1442,15 @@ pub struct Message {
error: Option<Arc<str>>, error: Option<Arc<str>>,
} }
impl Message {
fn to_open_ai_message(&self, buffer: &Buffer) -> RequestMessage {
RequestMessage {
role: self.role,
content: buffer.text_for_range(self.range.clone()).collect(),
}
}
}
async fn stream_completion( async fn stream_completion(
api_key: String, api_key: String,
executor: Arc<Background>, executor: Arc<Background>,