
Co-authored-by: Antonio <antonio@zed.dev> Resurrected this from some assistant work I did in Spring of 2023. - [x] Resurrect streaming responses - [x] Use streaming responses to enable AI via Zed's servers by default (but preserve API key option for now) - [x] Simplify protobuf - [x] Proxy to OpenAI on zed.dev - [x] Proxy to Gemini on zed.dev - [x] Improve UX for switching between openAI and google models - We current disallow cycling when setting a custom model, but we need a better solution to keep OpenAI models available while testing the google ones - [x] Show remaining tokens correctly for Google models - [x] Remove semantic index - [x] Delete `ai` crate - [x] Cloud front so we can ban abuse - [x] Rate-limiting - [x] Fix panic when using inline assistant - [x] Double check the upgraded `AssistantSettings` are backwards-compatible - [x] Add hosted LLM interaction behind a `language-models` feature flag. Release Notes: - We are temporarily removing the semantic index in order to redesign it from scratch. --------- Co-authored-by: Antonio <antonio@zed.dev> Co-authored-by: Antonio Scandurra <me@as-cii.com> Co-authored-by: Thorsten <thorsten@zed.dev> Co-authored-by: Max <max@zed.dev>
95 lines
2.6 KiB
Rust
95 lines
2.6 KiB
Rust
use language::BufferSnapshot;
|
|
use std::{fmt::Write, ops::Range};
|
|
|
|
pub fn generate_content_prompt(
|
|
user_prompt: String,
|
|
language_name: Option<&str>,
|
|
buffer: BufferSnapshot,
|
|
range: Range<usize>,
|
|
project_name: Option<String>,
|
|
) -> anyhow::Result<String> {
|
|
let mut prompt = String::new();
|
|
|
|
let content_type = match language_name {
|
|
None | Some("Markdown" | "Plain Text") => {
|
|
writeln!(prompt, "You are an expert engineer.")?;
|
|
"Text"
|
|
}
|
|
Some(language_name) => {
|
|
writeln!(prompt, "You are an expert {language_name} engineer.")?;
|
|
writeln!(
|
|
prompt,
|
|
"Your answer MUST always and only be valid {}.",
|
|
language_name
|
|
)?;
|
|
"Code"
|
|
}
|
|
};
|
|
|
|
if let Some(project_name) = project_name {
|
|
writeln!(
|
|
prompt,
|
|
"You are currently working inside the '{project_name}' project in code editor Zed."
|
|
)?;
|
|
}
|
|
|
|
// Include file content.
|
|
for chunk in buffer.text_for_range(0..range.start) {
|
|
prompt.push_str(chunk);
|
|
}
|
|
|
|
if range.is_empty() {
|
|
prompt.push_str("<|START|>");
|
|
} else {
|
|
prompt.push_str("<|START|");
|
|
}
|
|
|
|
for chunk in buffer.text_for_range(range.clone()) {
|
|
prompt.push_str(chunk);
|
|
}
|
|
|
|
if !range.is_empty() {
|
|
prompt.push_str("|END|>");
|
|
}
|
|
|
|
for chunk in buffer.text_for_range(range.end..buffer.len()) {
|
|
prompt.push_str(chunk);
|
|
}
|
|
|
|
prompt.push('\n');
|
|
|
|
if range.is_empty() {
|
|
writeln!(
|
|
prompt,
|
|
"Assume the cursor is located where the `<|START|>` span is."
|
|
)
|
|
.unwrap();
|
|
writeln!(
|
|
prompt,
|
|
"{content_type} can't be replaced, so assume your answer will be inserted at the cursor.",
|
|
)
|
|
.unwrap();
|
|
writeln!(
|
|
prompt,
|
|
"Generate {content_type} based on the users prompt: {user_prompt}",
|
|
)
|
|
.unwrap();
|
|
} else {
|
|
writeln!(prompt, "Modify the user's selected {content_type} based upon the users prompt: '{user_prompt}'").unwrap();
|
|
writeln!(prompt, "You must reply with only the adjusted {content_type} (within the '<|START|' and '|END|>' spans) not the entire file.").unwrap();
|
|
writeln!(
|
|
prompt,
|
|
"Double check that you only return code and not the '<|START|' and '|END|'> spans"
|
|
)
|
|
.unwrap();
|
|
}
|
|
|
|
writeln!(prompt, "Never make remarks about the output.").unwrap();
|
|
writeln!(
|
|
prompt,
|
|
"Do not return anything else, except the generated {content_type}."
|
|
)
|
|
.unwrap();
|
|
|
|
Ok(prompt)
|
|
}
|