rename templates to prompts in ai crate

This commit is contained in:
KCaverly 2023-10-22 13:46:49 +02:00
parent 0b57ab7303
commit a62baf34f2
9 changed files with 13 additions and 13 deletions

View file

@ -1,4 +1,4 @@
pub mod completion;
pub mod embedding;
pub mod models;
pub mod templates;
pub mod prompts;

View file

@ -6,7 +6,7 @@ use language::BufferSnapshot;
use util::ResultExt;
use crate::models::LanguageModel;
use crate::templates::repository_context::PromptCodeSnippet;
use crate::prompts::repository_context::PromptCodeSnippet;
pub(crate) enum PromptFileType {
Text,

View file

@ -4,8 +4,8 @@ use language::ToOffset;
use crate::models::LanguageModel;
use crate::models::TruncationDirection;
use crate::templates::base::PromptArguments;
use crate::templates::base::PromptTemplate;
use crate::prompts::base::PromptArguments;
use crate::prompts::base::PromptTemplate;
use std::fmt::Write;
use std::ops::Range;
use std::sync::Arc;

View file

@ -1,4 +1,4 @@
use crate::templates::base::{PromptArguments, PromptFileType, PromptTemplate};
use crate::prompts::base::{PromptArguments, PromptFileType, PromptTemplate};
use anyhow::anyhow;
use std::fmt::Write;

View file

@ -1,4 +1,4 @@
use crate::templates::base::{PromptArguments, PromptFileType, PromptTemplate};
use crate::prompts::base::{PromptArguments, PromptFileType, PromptTemplate};
use std::fmt::Write;
pub struct EngineerPreamble {}

View file

@ -1,4 +1,4 @@
use crate::templates::base::{PromptArguments, PromptTemplate};
use crate::prompts::base::{PromptArguments, PromptTemplate};
use std::fmt::Write;
use std::{ops::Range, path::PathBuf};

View file

@ -9,7 +9,7 @@ use ai::{
completion::{
stream_completion, OpenAICompletionProvider, OpenAIRequest, RequestMessage, OPENAI_API_URL,
},
templates::repository_context::PromptCodeSnippet,
prompts::repository_context::PromptCodeSnippet,
};
use anyhow::{anyhow, Result};
use chrono::{DateTime, Local};

View file

@ -1,9 +1,9 @@
use ai::models::{LanguageModel, OpenAILanguageModel};
use ai::templates::base::{PromptArguments, PromptChain, PromptPriority, PromptTemplate};
use ai::templates::file_context::FileContext;
use ai::templates::generate::GenerateInlineContent;
use ai::templates::preamble::EngineerPreamble;
use ai::templates::repository_context::{PromptCodeSnippet, RepositoryContext};
use ai::prompts::base::{PromptArguments, PromptChain, PromptPriority, PromptTemplate};
use ai::prompts::file_context::FileContext;
use ai::prompts::generate::GenerateInlineContent;
use ai::prompts::preamble::EngineerPreamble;
use ai::prompts::repository_context::{PromptCodeSnippet, RepositoryContext};
use language::{BufferSnapshot, OffsetRangeExt, ToOffset};
use std::cmp::{self, Reverse};
use std::ops::Range;