chore: Fix warnings for Rust 1.89 (#32378)
Closes #ISSUE Release Notes: - N/A
This commit is contained in:
parent
4ff41ba62e
commit
72bcb0beb7
32 changed files with 102 additions and 95 deletions
|
@ -89,7 +89,7 @@ pub fn init(cx: &mut App) {
|
||||||
pub struct SharedProjectContext(Rc<RefCell<Option<ProjectContext>>>);
|
pub struct SharedProjectContext(Rc<RefCell<Option<ProjectContext>>>);
|
||||||
|
|
||||||
impl SharedProjectContext {
|
impl SharedProjectContext {
|
||||||
pub fn borrow(&self) -> Ref<Option<ProjectContext>> {
|
pub fn borrow(&self) -> Ref<'_, Option<ProjectContext>> {
|
||||||
self.0.borrow()
|
self.0.borrow()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -919,7 +919,7 @@ impl ThreadsDatabase {
|
||||||
|
|
||||||
fn bytes_encode(
|
fn bytes_encode(
|
||||||
item: &Self::EItem,
|
item: &Self::EItem,
|
||||||
) -> Result<std::borrow::Cow<[u8]>, heed::BoxedError> {
|
) -> Result<std::borrow::Cow<'_, [u8]>, heed::BoxedError> {
|
||||||
serde_json::to_vec(&item.0)
|
serde_json::to_vec(&item.0)
|
||||||
.map(std::borrow::Cow::Owned)
|
.map(std::borrow::Cow::Owned)
|
||||||
.map_err(Into::into)
|
.map_err(Into::into)
|
||||||
|
|
|
@ -111,7 +111,7 @@ pub struct ChannelMembership {
|
||||||
pub role: proto::ChannelRole,
|
pub role: proto::ChannelRole,
|
||||||
}
|
}
|
||||||
impl ChannelMembership {
|
impl ChannelMembership {
|
||||||
pub fn sort_key(&self) -> MembershipSortKey {
|
pub fn sort_key(&self) -> MembershipSortKey<'_> {
|
||||||
MembershipSortKey {
|
MembershipSortKey {
|
||||||
role_order: match self.role {
|
role_order: match self.role {
|
||||||
proto::ChannelRole::Admin => 0,
|
proto::ChannelRole::Admin => 0,
|
||||||
|
|
|
@ -32,7 +32,7 @@ impl ChannelIndex {
|
||||||
.retain(|channel_id| !channels.contains(channel_id));
|
.retain(|channel_id| !channels.contains(channel_id));
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn bulk_insert(&mut self) -> ChannelPathsInsertGuard {
|
pub fn bulk_insert(&mut self) -> ChannelPathsInsertGuard<'_> {
|
||||||
ChannelPathsInsertGuard {
|
ChannelPathsInsertGuard {
|
||||||
channels_ordered: &mut self.channels_ordered,
|
channels_ordered: &mut self.channels_ordered,
|
||||||
channels_by_id: &mut self.channels_by_id,
|
channels_by_id: &mut self.channels_by_id,
|
||||||
|
|
|
@ -39,7 +39,7 @@ enum ProxyType<'t> {
|
||||||
HttpProxy(HttpProxyType<'t>),
|
HttpProxy(HttpProxyType<'t>),
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_proxy_type(proxy: &Url) -> Option<((String, u16), ProxyType)> {
|
fn parse_proxy_type(proxy: &Url) -> Option<((String, u16), ProxyType<'_>)> {
|
||||||
let scheme = proxy.scheme();
|
let scheme = proxy.scheme();
|
||||||
let host = proxy.host()?.to_string();
|
let host = proxy.host()?.to_string();
|
||||||
let port = proxy.port_or_known_default()?;
|
let port = proxy.port_or_known_default()?;
|
||||||
|
|
|
@ -464,7 +464,7 @@ impl BlockMap {
|
||||||
map
|
map
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn read(&self, wrap_snapshot: WrapSnapshot, edits: Patch<u32>) -> BlockMapReader {
|
pub fn read(&self, wrap_snapshot: WrapSnapshot, edits: Patch<u32>) -> BlockMapReader<'_> {
|
||||||
self.sync(&wrap_snapshot, edits);
|
self.sync(&wrap_snapshot, edits);
|
||||||
*self.wrap_snapshot.borrow_mut() = wrap_snapshot.clone();
|
*self.wrap_snapshot.borrow_mut() = wrap_snapshot.clone();
|
||||||
BlockMapReader {
|
BlockMapReader {
|
||||||
|
@ -479,7 +479,7 @@ impl BlockMap {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn write(&mut self, wrap_snapshot: WrapSnapshot, edits: Patch<u32>) -> BlockMapWriter {
|
pub fn write(&mut self, wrap_snapshot: WrapSnapshot, edits: Patch<u32>) -> BlockMapWriter<'_> {
|
||||||
self.sync(&wrap_snapshot, edits);
|
self.sync(&wrap_snapshot, edits);
|
||||||
*self.wrap_snapshot.borrow_mut() = wrap_snapshot;
|
*self.wrap_snapshot.borrow_mut() = wrap_snapshot;
|
||||||
BlockMapWriter(self)
|
BlockMapWriter(self)
|
||||||
|
@ -1327,7 +1327,7 @@ impl BlockSnapshot {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn row_infos(&self, start_row: BlockRow) -> BlockRows {
|
pub(super) fn row_infos(&self, start_row: BlockRow) -> BlockRows<'_> {
|
||||||
let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&());
|
let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&());
|
||||||
cursor.seek(&start_row, Bias::Right, &());
|
cursor.seek(&start_row, Bias::Right, &());
|
||||||
let (output_start, input_start) = cursor.start();
|
let (output_start, input_start) = cursor.start();
|
||||||
|
|
|
@ -357,7 +357,7 @@ impl FoldMap {
|
||||||
&mut self,
|
&mut self,
|
||||||
inlay_snapshot: InlaySnapshot,
|
inlay_snapshot: InlaySnapshot,
|
||||||
edits: Vec<InlayEdit>,
|
edits: Vec<InlayEdit>,
|
||||||
) -> (FoldMapWriter, FoldSnapshot, Vec<FoldEdit>) {
|
) -> (FoldMapWriter<'_>, FoldSnapshot, Vec<FoldEdit>) {
|
||||||
let (snapshot, edits) = self.read(inlay_snapshot, edits);
|
let (snapshot, edits) = self.read(inlay_snapshot, edits);
|
||||||
(FoldMapWriter(self), snapshot, edits)
|
(FoldMapWriter(self), snapshot, edits)
|
||||||
}
|
}
|
||||||
|
@ -730,7 +730,7 @@ impl FoldSnapshot {
|
||||||
(line_end - line_start) as u32
|
(line_end - line_start) as u32
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn row_infos(&self, start_row: u32) -> FoldRows {
|
pub fn row_infos(&self, start_row: u32) -> FoldRows<'_> {
|
||||||
if start_row > self.transforms.summary().output.lines.row {
|
if start_row > self.transforms.summary().output.lines.row {
|
||||||
panic!("invalid display row {}", start_row);
|
panic!("invalid display row {}", start_row);
|
||||||
}
|
}
|
||||||
|
|
|
@ -726,7 +726,7 @@ impl WrapSnapshot {
|
||||||
self.transforms.summary().output.longest_row
|
self.transforms.summary().output.longest_row
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn row_infos(&self, start_row: u32) -> WrapRows {
|
pub fn row_infos(&self, start_row: u32) -> WrapRows<'_> {
|
||||||
let mut transforms = self.transforms.cursor::<(WrapPoint, TabPoint)>(&());
|
let mut transforms = self.transforms.cursor::<(WrapPoint, TabPoint)>(&());
|
||||||
transforms.seek(&WrapPoint::new(start_row, 0), Bias::Left, &());
|
transforms.seek(&WrapPoint::new(start_row, 0), Bias::Left, &());
|
||||||
let mut input_row = transforms.start().1.row();
|
let mut input_row = transforms.start().1.row();
|
||||||
|
|
|
@ -411,7 +411,7 @@ impl<'a> MutableSelectionsCollection<'a> {
|
||||||
self.collection.display_map(self.cx)
|
self.collection.display_map(self.cx)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn buffer(&self) -> Ref<MultiBufferSnapshot> {
|
pub fn buffer(&self) -> Ref<'_, MultiBufferSnapshot> {
|
||||||
self.collection.buffer(self.cx)
|
self.collection.buffer(self.cx)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -724,7 +724,7 @@ impl IncrementalCompilationCache {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl CacheStore for IncrementalCompilationCache {
|
impl CacheStore for IncrementalCompilationCache {
|
||||||
fn get(&self, key: &[u8]) -> Option<Cow<[u8]>> {
|
fn get(&self, key: &[u8]) -> Option<Cow<'_, [u8]>> {
|
||||||
self.cache.get(key).map(|v| v.into())
|
self.cache.get(key).map(|v| v.into())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -323,7 +323,7 @@ pub trait GitRepository: Send + Sync {
|
||||||
/// Resolve a list of refs to SHAs.
|
/// Resolve a list of refs to SHAs.
|
||||||
fn revparse_batch(&self, revs: Vec<String>) -> BoxFuture<Result<Vec<Option<String>>>>;
|
fn revparse_batch(&self, revs: Vec<String>) -> BoxFuture<Result<Vec<Option<String>>>>;
|
||||||
|
|
||||||
fn head_sha(&self) -> BoxFuture<Option<String>> {
|
fn head_sha(&self) -> BoxFuture<'_, Option<String>> {
|
||||||
async move {
|
async move {
|
||||||
self.revparse_batch(vec!["HEAD".into()])
|
self.revparse_batch(vec!["HEAD".into()])
|
||||||
.await
|
.await
|
||||||
|
@ -525,7 +525,7 @@ impl GitRepository for RealGitRepository {
|
||||||
repo.commondir().into()
|
repo.commondir().into()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn show(&self, commit: String) -> BoxFuture<Result<CommitDetails>> {
|
fn show(&self, commit: String) -> BoxFuture<'_, Result<CommitDetails>> {
|
||||||
let working_directory = self.working_directory();
|
let working_directory = self.working_directory();
|
||||||
self.executor
|
self.executor
|
||||||
.spawn(async move {
|
.spawn(async move {
|
||||||
|
@ -561,7 +561,7 @@ impl GitRepository for RealGitRepository {
|
||||||
.boxed()
|
.boxed()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn load_commit(&self, commit: String, cx: AsyncApp) -> BoxFuture<Result<CommitDiff>> {
|
fn load_commit(&self, commit: String, cx: AsyncApp) -> BoxFuture<'_, Result<CommitDiff>> {
|
||||||
let Some(working_directory) = self.repository.lock().workdir().map(ToOwned::to_owned)
|
let Some(working_directory) = self.repository.lock().workdir().map(ToOwned::to_owned)
|
||||||
else {
|
else {
|
||||||
return future::ready(Err(anyhow!("no working directory"))).boxed();
|
return future::ready(Err(anyhow!("no working directory"))).boxed();
|
||||||
|
@ -668,7 +668,7 @@ impl GitRepository for RealGitRepository {
|
||||||
commit: String,
|
commit: String,
|
||||||
mode: ResetMode,
|
mode: ResetMode,
|
||||||
env: Arc<HashMap<String, String>>,
|
env: Arc<HashMap<String, String>>,
|
||||||
) -> BoxFuture<Result<()>> {
|
) -> BoxFuture<'_, Result<()>> {
|
||||||
async move {
|
async move {
|
||||||
let working_directory = self.working_directory();
|
let working_directory = self.working_directory();
|
||||||
|
|
||||||
|
@ -698,7 +698,7 @@ impl GitRepository for RealGitRepository {
|
||||||
commit: String,
|
commit: String,
|
||||||
paths: Vec<RepoPath>,
|
paths: Vec<RepoPath>,
|
||||||
env: Arc<HashMap<String, String>>,
|
env: Arc<HashMap<String, String>>,
|
||||||
) -> BoxFuture<Result<()>> {
|
) -> BoxFuture<'_, Result<()>> {
|
||||||
let working_directory = self.working_directory();
|
let working_directory = self.working_directory();
|
||||||
let git_binary_path = self.git_binary_path.clone();
|
let git_binary_path = self.git_binary_path.clone();
|
||||||
async move {
|
async move {
|
||||||
|
@ -723,7 +723,7 @@ impl GitRepository for RealGitRepository {
|
||||||
.boxed()
|
.boxed()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn load_index_text(&self, path: RepoPath) -> BoxFuture<Option<String>> {
|
fn load_index_text(&self, path: RepoPath) -> BoxFuture<'_, Option<String>> {
|
||||||
// https://git-scm.com/book/en/v2/Git-Internals-Git-Objects
|
// https://git-scm.com/book/en/v2/Git-Internals-Git-Objects
|
||||||
const GIT_MODE_SYMLINK: u32 = 0o120000;
|
const GIT_MODE_SYMLINK: u32 = 0o120000;
|
||||||
|
|
||||||
|
@ -756,7 +756,7 @@ impl GitRepository for RealGitRepository {
|
||||||
.boxed()
|
.boxed()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn load_committed_text(&self, path: RepoPath) -> BoxFuture<Option<String>> {
|
fn load_committed_text(&self, path: RepoPath) -> BoxFuture<'_, Option<String>> {
|
||||||
let repo = self.repository.clone();
|
let repo = self.repository.clone();
|
||||||
self.executor
|
self.executor
|
||||||
.spawn(async move {
|
.spawn(async move {
|
||||||
|
@ -777,7 +777,7 @@ impl GitRepository for RealGitRepository {
|
||||||
path: RepoPath,
|
path: RepoPath,
|
||||||
content: Option<String>,
|
content: Option<String>,
|
||||||
env: Arc<HashMap<String, String>>,
|
env: Arc<HashMap<String, String>>,
|
||||||
) -> BoxFuture<anyhow::Result<()>> {
|
) -> BoxFuture<'_, anyhow::Result<()>> {
|
||||||
let working_directory = self.working_directory();
|
let working_directory = self.working_directory();
|
||||||
let git_binary_path = self.git_binary_path.clone();
|
let git_binary_path = self.git_binary_path.clone();
|
||||||
self.executor
|
self.executor
|
||||||
|
@ -841,7 +841,7 @@ impl GitRepository for RealGitRepository {
|
||||||
remote.url().map(|url| url.to_string())
|
remote.url().map(|url| url.to_string())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn revparse_batch(&self, revs: Vec<String>) -> BoxFuture<Result<Vec<Option<String>>>> {
|
fn revparse_batch(&self, revs: Vec<String>) -> BoxFuture<'_, Result<Vec<Option<String>>>> {
|
||||||
let working_directory = self.working_directory();
|
let working_directory = self.working_directory();
|
||||||
self.executor
|
self.executor
|
||||||
.spawn(async move {
|
.spawn(async move {
|
||||||
|
@ -891,14 +891,14 @@ impl GitRepository for RealGitRepository {
|
||||||
.boxed()
|
.boxed()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn merge_message(&self) -> BoxFuture<Option<String>> {
|
fn merge_message(&self) -> BoxFuture<'_, Option<String>> {
|
||||||
let path = self.path().join("MERGE_MSG");
|
let path = self.path().join("MERGE_MSG");
|
||||||
self.executor
|
self.executor
|
||||||
.spawn(async move { std::fs::read_to_string(&path).ok() })
|
.spawn(async move { std::fs::read_to_string(&path).ok() })
|
||||||
.boxed()
|
.boxed()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn status(&self, path_prefixes: &[RepoPath]) -> BoxFuture<Result<GitStatus>> {
|
fn status(&self, path_prefixes: &[RepoPath]) -> BoxFuture<'_, Result<GitStatus>> {
|
||||||
let git_binary_path = self.git_binary_path.clone();
|
let git_binary_path = self.git_binary_path.clone();
|
||||||
let working_directory = self.working_directory();
|
let working_directory = self.working_directory();
|
||||||
let path_prefixes = path_prefixes.to_owned();
|
let path_prefixes = path_prefixes.to_owned();
|
||||||
|
@ -919,7 +919,7 @@ impl GitRepository for RealGitRepository {
|
||||||
.boxed()
|
.boxed()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn branches(&self) -> BoxFuture<Result<Vec<Branch>>> {
|
fn branches(&self) -> BoxFuture<'_, Result<Vec<Branch>>> {
|
||||||
let working_directory = self.working_directory();
|
let working_directory = self.working_directory();
|
||||||
let git_binary_path = self.git_binary_path.clone();
|
let git_binary_path = self.git_binary_path.clone();
|
||||||
self.executor
|
self.executor
|
||||||
|
@ -986,7 +986,7 @@ impl GitRepository for RealGitRepository {
|
||||||
.boxed()
|
.boxed()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn change_branch(&self, name: String) -> BoxFuture<Result<()>> {
|
fn change_branch(&self, name: String) -> BoxFuture<'_, Result<()>> {
|
||||||
let repo = self.repository.clone();
|
let repo = self.repository.clone();
|
||||||
self.executor
|
self.executor
|
||||||
.spawn(async move {
|
.spawn(async move {
|
||||||
|
@ -1018,7 +1018,7 @@ impl GitRepository for RealGitRepository {
|
||||||
.boxed()
|
.boxed()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn create_branch(&self, name: String) -> BoxFuture<Result<()>> {
|
fn create_branch(&self, name: String) -> BoxFuture<'_, Result<()>> {
|
||||||
let repo = self.repository.clone();
|
let repo = self.repository.clone();
|
||||||
self.executor
|
self.executor
|
||||||
.spawn(async move {
|
.spawn(async move {
|
||||||
|
@ -1030,7 +1030,7 @@ impl GitRepository for RealGitRepository {
|
||||||
.boxed()
|
.boxed()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn blame(&self, path: RepoPath, content: Rope) -> BoxFuture<Result<crate::blame::Blame>> {
|
fn blame(&self, path: RepoPath, content: Rope) -> BoxFuture<'_, Result<crate::blame::Blame>> {
|
||||||
let working_directory = self.working_directory();
|
let working_directory = self.working_directory();
|
||||||
let git_binary_path = self.git_binary_path.clone();
|
let git_binary_path = self.git_binary_path.clone();
|
||||||
|
|
||||||
|
@ -1052,7 +1052,7 @@ impl GitRepository for RealGitRepository {
|
||||||
.boxed()
|
.boxed()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn diff(&self, diff: DiffType) -> BoxFuture<Result<String>> {
|
fn diff(&self, diff: DiffType) -> BoxFuture<'_, Result<String>> {
|
||||||
let working_directory = self.working_directory();
|
let working_directory = self.working_directory();
|
||||||
let git_binary_path = self.git_binary_path.clone();
|
let git_binary_path = self.git_binary_path.clone();
|
||||||
self.executor
|
self.executor
|
||||||
|
@ -1083,7 +1083,7 @@ impl GitRepository for RealGitRepository {
|
||||||
&self,
|
&self,
|
||||||
paths: Vec<RepoPath>,
|
paths: Vec<RepoPath>,
|
||||||
env: Arc<HashMap<String, String>>,
|
env: Arc<HashMap<String, String>>,
|
||||||
) -> BoxFuture<Result<()>> {
|
) -> BoxFuture<'_, Result<()>> {
|
||||||
let working_directory = self.working_directory();
|
let working_directory = self.working_directory();
|
||||||
let git_binary_path = self.git_binary_path.clone();
|
let git_binary_path = self.git_binary_path.clone();
|
||||||
self.executor
|
self.executor
|
||||||
|
@ -1111,7 +1111,7 @@ impl GitRepository for RealGitRepository {
|
||||||
&self,
|
&self,
|
||||||
paths: Vec<RepoPath>,
|
paths: Vec<RepoPath>,
|
||||||
env: Arc<HashMap<String, String>>,
|
env: Arc<HashMap<String, String>>,
|
||||||
) -> BoxFuture<Result<()>> {
|
) -> BoxFuture<'_, Result<()>> {
|
||||||
let working_directory = self.working_directory();
|
let working_directory = self.working_directory();
|
||||||
let git_binary_path = self.git_binary_path.clone();
|
let git_binary_path = self.git_binary_path.clone();
|
||||||
|
|
||||||
|
@ -1143,7 +1143,7 @@ impl GitRepository for RealGitRepository {
|
||||||
name_and_email: Option<(SharedString, SharedString)>,
|
name_and_email: Option<(SharedString, SharedString)>,
|
||||||
options: CommitOptions,
|
options: CommitOptions,
|
||||||
env: Arc<HashMap<String, String>>,
|
env: Arc<HashMap<String, String>>,
|
||||||
) -> BoxFuture<Result<()>> {
|
) -> BoxFuture<'_, Result<()>> {
|
||||||
let working_directory = self.working_directory();
|
let working_directory = self.working_directory();
|
||||||
self.executor
|
self.executor
|
||||||
.spawn(async move {
|
.spawn(async move {
|
||||||
|
@ -1182,7 +1182,7 @@ impl GitRepository for RealGitRepository {
|
||||||
ask_pass: AskPassDelegate,
|
ask_pass: AskPassDelegate,
|
||||||
env: Arc<HashMap<String, String>>,
|
env: Arc<HashMap<String, String>>,
|
||||||
cx: AsyncApp,
|
cx: AsyncApp,
|
||||||
) -> BoxFuture<Result<RemoteCommandOutput>> {
|
) -> BoxFuture<'_, Result<RemoteCommandOutput>> {
|
||||||
let working_directory = self.working_directory();
|
let working_directory = self.working_directory();
|
||||||
let executor = cx.background_executor().clone();
|
let executor = cx.background_executor().clone();
|
||||||
async move {
|
async move {
|
||||||
|
@ -1214,7 +1214,7 @@ impl GitRepository for RealGitRepository {
|
||||||
ask_pass: AskPassDelegate,
|
ask_pass: AskPassDelegate,
|
||||||
env: Arc<HashMap<String, String>>,
|
env: Arc<HashMap<String, String>>,
|
||||||
cx: AsyncApp,
|
cx: AsyncApp,
|
||||||
) -> BoxFuture<Result<RemoteCommandOutput>> {
|
) -> BoxFuture<'_, Result<RemoteCommandOutput>> {
|
||||||
let working_directory = self.working_directory();
|
let working_directory = self.working_directory();
|
||||||
let executor = cx.background_executor().clone();
|
let executor = cx.background_executor().clone();
|
||||||
async move {
|
async move {
|
||||||
|
@ -1239,7 +1239,7 @@ impl GitRepository for RealGitRepository {
|
||||||
ask_pass: AskPassDelegate,
|
ask_pass: AskPassDelegate,
|
||||||
env: Arc<HashMap<String, String>>,
|
env: Arc<HashMap<String, String>>,
|
||||||
cx: AsyncApp,
|
cx: AsyncApp,
|
||||||
) -> BoxFuture<Result<RemoteCommandOutput>> {
|
) -> BoxFuture<'_, Result<RemoteCommandOutput>> {
|
||||||
let working_directory = self.working_directory();
|
let working_directory = self.working_directory();
|
||||||
let remote_name = format!("{}", fetch_options);
|
let remote_name = format!("{}", fetch_options);
|
||||||
let executor = cx.background_executor().clone();
|
let executor = cx.background_executor().clone();
|
||||||
|
@ -1257,7 +1257,7 @@ impl GitRepository for RealGitRepository {
|
||||||
.boxed()
|
.boxed()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_remotes(&self, branch_name: Option<String>) -> BoxFuture<Result<Vec<Remote>>> {
|
fn get_remotes(&self, branch_name: Option<String>) -> BoxFuture<'_, Result<Vec<Remote>>> {
|
||||||
let working_directory = self.working_directory();
|
let working_directory = self.working_directory();
|
||||||
let git_binary_path = self.git_binary_path.clone();
|
let git_binary_path = self.git_binary_path.clone();
|
||||||
self.executor
|
self.executor
|
||||||
|
@ -1303,7 +1303,7 @@ impl GitRepository for RealGitRepository {
|
||||||
.boxed()
|
.boxed()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_for_pushed_commit(&self) -> BoxFuture<Result<Vec<SharedString>>> {
|
fn check_for_pushed_commit(&self) -> BoxFuture<'_, Result<Vec<SharedString>>> {
|
||||||
let working_directory = self.working_directory();
|
let working_directory = self.working_directory();
|
||||||
let git_binary_path = self.git_binary_path.clone();
|
let git_binary_path = self.git_binary_path.clone();
|
||||||
self.executor
|
self.executor
|
||||||
|
@ -1396,7 +1396,7 @@ impl GitRepository for RealGitRepository {
|
||||||
.boxed()
|
.boxed()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn restore_checkpoint(&self, checkpoint: GitRepositoryCheckpoint) -> BoxFuture<Result<()>> {
|
fn restore_checkpoint(&self, checkpoint: GitRepositoryCheckpoint) -> BoxFuture<'_, Result<()>> {
|
||||||
let working_directory = self.working_directory();
|
let working_directory = self.working_directory();
|
||||||
let git_binary_path = self.git_binary_path.clone();
|
let git_binary_path = self.git_binary_path.clone();
|
||||||
|
|
||||||
|
@ -1435,7 +1435,7 @@ impl GitRepository for RealGitRepository {
|
||||||
&self,
|
&self,
|
||||||
left: GitRepositoryCheckpoint,
|
left: GitRepositoryCheckpoint,
|
||||||
right: GitRepositoryCheckpoint,
|
right: GitRepositoryCheckpoint,
|
||||||
) -> BoxFuture<Result<bool>> {
|
) -> BoxFuture<'_, Result<bool>> {
|
||||||
let working_directory = self.working_directory();
|
let working_directory = self.working_directory();
|
||||||
let git_binary_path = self.git_binary_path.clone();
|
let git_binary_path = self.git_binary_path.clone();
|
||||||
|
|
||||||
|
@ -1474,7 +1474,7 @@ impl GitRepository for RealGitRepository {
|
||||||
&self,
|
&self,
|
||||||
base_checkpoint: GitRepositoryCheckpoint,
|
base_checkpoint: GitRepositoryCheckpoint,
|
||||||
target_checkpoint: GitRepositoryCheckpoint,
|
target_checkpoint: GitRepositoryCheckpoint,
|
||||||
) -> BoxFuture<Result<String>> {
|
) -> BoxFuture<'_, Result<String>> {
|
||||||
let working_directory = self.working_directory();
|
let working_directory = self.working_directory();
|
||||||
let git_binary_path = self.git_binary_path.clone();
|
let git_binary_path = self.git_binary_path.clone();
|
||||||
|
|
||||||
|
|
|
@ -64,7 +64,7 @@ pub struct AppCell {
|
||||||
impl AppCell {
|
impl AppCell {
|
||||||
#[doc(hidden)]
|
#[doc(hidden)]
|
||||||
#[track_caller]
|
#[track_caller]
|
||||||
pub fn borrow(&self) -> AppRef {
|
pub fn borrow(&self) -> AppRef<'_> {
|
||||||
if option_env!("TRACK_THREAD_BORROWS").is_some() {
|
if option_env!("TRACK_THREAD_BORROWS").is_some() {
|
||||||
let thread_id = std::thread::current().id();
|
let thread_id = std::thread::current().id();
|
||||||
eprintln!("borrowed {thread_id:?}");
|
eprintln!("borrowed {thread_id:?}");
|
||||||
|
@ -74,7 +74,7 @@ impl AppCell {
|
||||||
|
|
||||||
#[doc(hidden)]
|
#[doc(hidden)]
|
||||||
#[track_caller]
|
#[track_caller]
|
||||||
pub fn borrow_mut(&self) -> AppRefMut {
|
pub fn borrow_mut(&self) -> AppRefMut<'_> {
|
||||||
if option_env!("TRACK_THREAD_BORROWS").is_some() {
|
if option_env!("TRACK_THREAD_BORROWS").is_some() {
|
||||||
let thread_id = std::thread::current().id();
|
let thread_id = std::thread::current().id();
|
||||||
eprintln!("borrowed {thread_id:?}");
|
eprintln!("borrowed {thread_id:?}");
|
||||||
|
@ -84,7 +84,7 @@ impl AppCell {
|
||||||
|
|
||||||
#[doc(hidden)]
|
#[doc(hidden)]
|
||||||
#[track_caller]
|
#[track_caller]
|
||||||
pub fn try_borrow_mut(&self) -> Result<AppRefMut, BorrowMutError> {
|
pub fn try_borrow_mut(&self) -> Result<AppRefMut<'_>, BorrowMutError> {
|
||||||
if option_env!("TRACK_THREAD_BORROWS").is_some() {
|
if option_env!("TRACK_THREAD_BORROWS").is_some() {
|
||||||
let thread_id = std::thread::current().id();
|
let thread_id = std::thread::current().id();
|
||||||
eprintln!("borrowed {thread_id:?}");
|
eprintln!("borrowed {thread_id:?}");
|
||||||
|
|
|
@ -718,7 +718,7 @@ impl<T> ops::Index<usize> for AtlasTextureList<T> {
|
||||||
|
|
||||||
impl<T> AtlasTextureList<T> {
|
impl<T> AtlasTextureList<T> {
|
||||||
#[allow(unused)]
|
#[allow(unused)]
|
||||||
fn drain(&mut self) -> std::vec::Drain<Option<T>> {
|
fn drain(&mut self) -> std::vec::Drain<'_, Option<T>> {
|
||||||
self.free_list.clear();
|
self.free_list.clear();
|
||||||
self.textures.drain(..)
|
self.textures.drain(..)
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,7 +25,7 @@ pub(crate) const ESCAPE_KEY: u16 = 0x1b;
|
||||||
const TAB_KEY: u16 = 0x09;
|
const TAB_KEY: u16 = 0x09;
|
||||||
const SHIFT_TAB_KEY: u16 = 0x19;
|
const SHIFT_TAB_KEY: u16 = 0x19;
|
||||||
|
|
||||||
pub fn key_to_native(key: &str) -> Cow<str> {
|
pub fn key_to_native(key: &str) -> Cow<'_, str> {
|
||||||
use cocoa::appkit::*;
|
use cocoa::appkit::*;
|
||||||
let code = match key {
|
let code = match key {
|
||||||
"space" => SPACE_KEY,
|
"space" => SPACE_KEY,
|
||||||
|
|
|
@ -149,7 +149,7 @@ impl Scene {
|
||||||
),
|
),
|
||||||
allow(dead_code)
|
allow(dead_code)
|
||||||
)]
|
)]
|
||||||
pub(crate) fn batches(&self) -> impl Iterator<Item = PrimitiveBatch> {
|
pub(crate) fn batches(&self) -> impl Iterator<Item = PrimitiveBatch<'_>> {
|
||||||
BatchIterator {
|
BatchIterator {
|
||||||
shadows: &self.shadows,
|
shadows: &self.shadows,
|
||||||
shadows_start: 0,
|
shadows_start: 0,
|
||||||
|
|
|
@ -616,7 +616,7 @@ impl Hash for (dyn AsCacheKeyRef + '_) {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AsCacheKeyRef for CacheKey {
|
impl AsCacheKeyRef for CacheKey {
|
||||||
fn as_cache_key_ref(&self) -> CacheKeyRef {
|
fn as_cache_key_ref(&self) -> CacheKeyRef<'_> {
|
||||||
CacheKeyRef {
|
CacheKeyRef {
|
||||||
text: &self.text,
|
text: &self.text,
|
||||||
font_size: self.font_size,
|
font_size: self.font_size,
|
||||||
|
@ -645,7 +645,7 @@ impl<'a> Borrow<dyn AsCacheKeyRef + 'a> for Arc<CacheKey> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AsCacheKeyRef for CacheKeyRef<'_> {
|
impl AsCacheKeyRef for CacheKeyRef<'_> {
|
||||||
fn as_cache_key_ref(&self) -> CacheKeyRef {
|
fn as_cache_key_ref(&self) -> CacheKeyRef<'_> {
|
||||||
*self
|
*self
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -3127,7 +3127,7 @@ impl BufferSnapshot {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures, Vec<HighlightMap>) {
|
fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
|
||||||
let captures = self.syntax.captures(range, &self.text, |grammar| {
|
let captures = self.syntax.captures(range, &self.text, |grammar| {
|
||||||
grammar.highlights_query.as_ref()
|
grammar.highlights_query.as_ref()
|
||||||
});
|
});
|
||||||
|
@ -3143,7 +3143,7 @@ impl BufferSnapshot {
|
||||||
/// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
|
/// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
|
||||||
/// returned in chunks where each chunk has a single syntax highlighting style and
|
/// returned in chunks where each chunk has a single syntax highlighting style and
|
||||||
/// diagnostic status.
|
/// diagnostic status.
|
||||||
pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks {
|
pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
|
||||||
let range = range.start.to_offset(self)..range.end.to_offset(self);
|
let range = range.start.to_offset(self)..range.end.to_offset(self);
|
||||||
|
|
||||||
let mut syntax = None;
|
let mut syntax = None;
|
||||||
|
@ -3192,12 +3192,12 @@ impl BufferSnapshot {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Iterates over every [`SyntaxLayer`] in the buffer.
|
/// Iterates over every [`SyntaxLayer`] in the buffer.
|
||||||
pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer> + '_ {
|
pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
|
||||||
self.syntax
|
self.syntax
|
||||||
.layers_for_range(0..self.len(), &self.text, true)
|
.layers_for_range(0..self.len(), &self.text, true)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer> {
|
pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
|
||||||
let offset = position.to_offset(self);
|
let offset = position.to_offset(self);
|
||||||
self.syntax
|
self.syntax
|
||||||
.layers_for_range(offset..offset, &self.text, false)
|
.layers_for_range(offset..offset, &self.text, false)
|
||||||
|
@ -3208,7 +3208,7 @@ impl BufferSnapshot {
|
||||||
pub fn smallest_syntax_layer_containing<D: ToOffset>(
|
pub fn smallest_syntax_layer_containing<D: ToOffset>(
|
||||||
&self,
|
&self,
|
||||||
range: Range<D>,
|
range: Range<D>,
|
||||||
) -> Option<SyntaxLayer> {
|
) -> Option<SyntaxLayer<'_>> {
|
||||||
let range = range.to_offset(self);
|
let range = range.to_offset(self);
|
||||||
return self
|
return self
|
||||||
.syntax
|
.syntax
|
||||||
|
@ -3426,7 +3426,7 @@ impl BufferSnapshot {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the root syntax node within the given row
|
/// Returns the root syntax node within the given row
|
||||||
pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node> {
|
pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
|
||||||
let start_offset = position.to_offset(self);
|
let start_offset = position.to_offset(self);
|
||||||
|
|
||||||
let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
|
let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
|
||||||
|
@ -3763,7 +3763,7 @@ impl BufferSnapshot {
|
||||||
&self,
|
&self,
|
||||||
range: Range<usize>,
|
range: Range<usize>,
|
||||||
query: fn(&Grammar) -> Option<&tree_sitter::Query>,
|
query: fn(&Grammar) -> Option<&tree_sitter::Query>,
|
||||||
) -> SyntaxMapMatches {
|
) -> SyntaxMapMatches<'_> {
|
||||||
self.syntax.matches(range, self, query)
|
self.syntax.matches(range, self, query)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1126,7 +1126,7 @@ impl<'a> SyntaxMapMatches<'a> {
|
||||||
&self.grammars
|
&self.grammars
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn peek(&self) -> Option<SyntaxMapMatch> {
|
pub fn peek(&self) -> Option<SyntaxMapMatch<'_>> {
|
||||||
let layer = self.layers.first()?;
|
let layer = self.layers.first()?;
|
||||||
|
|
||||||
if !layer.has_next {
|
if !layer.has_next {
|
||||||
|
@ -1550,7 +1550,7 @@ fn insert_newlines_between_ranges(
|
||||||
|
|
||||||
impl OwnedSyntaxLayer {
|
impl OwnedSyntaxLayer {
|
||||||
/// Returns the root syntax node for this layer.
|
/// Returns the root syntax node for this layer.
|
||||||
pub fn node(&self) -> Node {
|
pub fn node(&self) -> Node<'_> {
|
||||||
self.tree
|
self.tree
|
||||||
.root_node_with_offset(self.offset.0, self.offset.1)
|
.root_node_with_offset(self.offset.0, self.offset.1)
|
||||||
}
|
}
|
||||||
|
|
|
@ -412,7 +412,7 @@ impl libwebrtc::native::audio_mixer::AudioMixerSource for AudioMixerSource {
|
||||||
self.sample_rate
|
self.sample_rate
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_audio_frame_with_info<'a>(&self, target_sample_rate: u32) -> Option<AudioFrame> {
|
fn get_audio_frame_with_info<'a>(&self, target_sample_rate: u32) -> Option<AudioFrame<'_>> {
|
||||||
assert_eq!(self.sample_rate, target_sample_rate);
|
assert_eq!(self.sample_rate, target_sample_rate);
|
||||||
let buf = self.buffer.lock().pop_front()?;
|
let buf = self.buffer.lock().pop_front()?;
|
||||||
Some(AudioFrame {
|
Some(AudioFrame {
|
||||||
|
|
|
@ -231,7 +231,7 @@ impl Markdown {
|
||||||
&self.parsed_markdown
|
&self.parsed_markdown
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn escape(s: &str) -> Cow<str> {
|
pub fn escape(s: &str) -> Cow<'_, str> {
|
||||||
// Valid to use bytes since multi-byte UTF-8 doesn't use ASCII chars.
|
// Valid to use bytes since multi-byte UTF-8 doesn't use ASCII chars.
|
||||||
let count = s
|
let count = s
|
||||||
.bytes()
|
.bytes()
|
||||||
|
|
|
@ -72,25 +72,25 @@ impl<'a> MarkdownParser<'a> {
|
||||||
self.cursor >= self.tokens.len() - 1
|
self.cursor >= self.tokens.len() - 1
|
||||||
}
|
}
|
||||||
|
|
||||||
fn peek(&self, steps: usize) -> Option<&(Event, Range<usize>)> {
|
fn peek(&self, steps: usize) -> Option<&(Event<'_>, Range<usize>)> {
|
||||||
if self.eof() || (steps + self.cursor) >= self.tokens.len() {
|
if self.eof() || (steps + self.cursor) >= self.tokens.len() {
|
||||||
return self.tokens.last();
|
return self.tokens.last();
|
||||||
}
|
}
|
||||||
return self.tokens.get(self.cursor + steps);
|
return self.tokens.get(self.cursor + steps);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn previous(&self) -> Option<&(Event, Range<usize>)> {
|
fn previous(&self) -> Option<&(Event<'_>, Range<usize>)> {
|
||||||
if self.cursor == 0 || self.cursor > self.tokens.len() {
|
if self.cursor == 0 || self.cursor > self.tokens.len() {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
return self.tokens.get(self.cursor - 1);
|
return self.tokens.get(self.cursor - 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn current(&self) -> Option<&(Event, Range<usize>)> {
|
fn current(&self) -> Option<&(Event<'_>, Range<usize>)> {
|
||||||
return self.peek(0);
|
return self.peek(0);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn current_event(&self) -> Option<&Event> {
|
fn current_event(&self) -> Option<&Event<'_>> {
|
||||||
return self.current().map(|(event, _)| event);
|
return self.current().map(|(event, _)| event);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -728,7 +728,7 @@ impl MultiBuffer {
|
||||||
self.snapshot.borrow().clone()
|
self.snapshot.borrow().clone()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn read(&self, cx: &App) -> Ref<MultiBufferSnapshot> {
|
pub fn read(&self, cx: &App) -> Ref<'_, MultiBufferSnapshot> {
|
||||||
self.sync(cx);
|
self.sync(cx);
|
||||||
self.snapshot.borrow()
|
self.snapshot.borrow()
|
||||||
}
|
}
|
||||||
|
@ -2615,7 +2615,7 @@ impl MultiBuffer {
|
||||||
"untitled".into()
|
"untitled".into()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn buffer_based_title(&self, buffer: &Buffer) -> Option<Cow<str>> {
|
fn buffer_based_title(&self, buffer: &Buffer) -> Option<Cow<'_, str>> {
|
||||||
let mut is_leading_whitespace = true;
|
let mut is_leading_whitespace = true;
|
||||||
let mut count = 0;
|
let mut count = 0;
|
||||||
let mut prev_was_space = false;
|
let mut prev_was_space = false;
|
||||||
|
@ -3779,7 +3779,7 @@ impl MultiBufferSnapshot {
|
||||||
.flat_map(|c| c.chars().rev())
|
.flat_map(|c| c.chars().rev())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn reversed_chunks_in_range(&self, range: Range<usize>) -> ReversedMultiBufferChunks {
|
fn reversed_chunks_in_range(&self, range: Range<usize>) -> ReversedMultiBufferChunks<'_> {
|
||||||
let mut cursor = self.cursor::<usize>();
|
let mut cursor = self.cursor::<usize>();
|
||||||
cursor.seek(&range.end);
|
cursor.seek(&range.end);
|
||||||
let current_chunks = cursor.region().as_ref().map(|region| {
|
let current_chunks = cursor.region().as_ref().map(|region| {
|
||||||
|
@ -4294,7 +4294,7 @@ impl MultiBufferSnapshot {
|
||||||
self.excerpts.summary().widest_line_number + 1
|
self.excerpts.summary().widest_line_number + 1
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn bytes_in_range<T: ToOffset>(&self, range: Range<T>) -> MultiBufferBytes {
|
pub fn bytes_in_range<T: ToOffset>(&self, range: Range<T>) -> MultiBufferBytes<'_> {
|
||||||
let range = range.start.to_offset(self)..range.end.to_offset(self);
|
let range = range.start.to_offset(self)..range.end.to_offset(self);
|
||||||
let mut excerpts = self.cursor::<usize>();
|
let mut excerpts = self.cursor::<usize>();
|
||||||
excerpts.seek(&range.start);
|
excerpts.seek(&range.start);
|
||||||
|
@ -4333,7 +4333,7 @@ impl MultiBufferSnapshot {
|
||||||
pub fn reversed_bytes_in_range<T: ToOffset>(
|
pub fn reversed_bytes_in_range<T: ToOffset>(
|
||||||
&self,
|
&self,
|
||||||
range: Range<T>,
|
range: Range<T>,
|
||||||
) -> ReversedMultiBufferBytes {
|
) -> ReversedMultiBufferBytes<'_> {
|
||||||
let range = range.start.to_offset(self)..range.end.to_offset(self);
|
let range = range.start.to_offset(self)..range.end.to_offset(self);
|
||||||
let mut chunks = self.reversed_chunks_in_range(range.clone());
|
let mut chunks = self.reversed_chunks_in_range(range.clone());
|
||||||
let chunk = chunks.next().map_or(&[][..], |c| c.as_bytes());
|
let chunk = chunks.next().map_or(&[][..], |c| c.as_bytes());
|
||||||
|
@ -4344,7 +4344,7 @@ impl MultiBufferSnapshot {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn row_infos(&self, start_row: MultiBufferRow) -> MultiBufferRows {
|
pub fn row_infos(&self, start_row: MultiBufferRow) -> MultiBufferRows<'_> {
|
||||||
let mut cursor = self.cursor::<Point>();
|
let mut cursor = self.cursor::<Point>();
|
||||||
cursor.seek(&Point::new(start_row.0, 0));
|
cursor.seek(&Point::new(start_row.0, 0));
|
||||||
let mut result = MultiBufferRows {
|
let mut result = MultiBufferRows {
|
||||||
|
@ -4357,7 +4357,11 @@ impl MultiBufferSnapshot {
|
||||||
result
|
result
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> MultiBufferChunks {
|
pub fn chunks<T: ToOffset>(
|
||||||
|
&self,
|
||||||
|
range: Range<T>,
|
||||||
|
language_aware: bool,
|
||||||
|
) -> MultiBufferChunks<'_> {
|
||||||
let mut chunks = MultiBufferChunks {
|
let mut chunks = MultiBufferChunks {
|
||||||
excerpt_offset_range: ExcerptOffset::new(0)..ExcerptOffset::new(0),
|
excerpt_offset_range: ExcerptOffset::new(0)..ExcerptOffset::new(0),
|
||||||
range: 0..0,
|
range: 0..0,
|
||||||
|
@ -5318,7 +5322,7 @@ impl MultiBufferSnapshot {
|
||||||
.map(|excerpt| (excerpt.id, &excerpt.buffer, excerpt.range.clone()))
|
.map(|excerpt| (excerpt.id, &excerpt.buffer, excerpt.range.clone()))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn cursor<D: TextDimension + Default>(&self) -> MultiBufferCursor<D> {
|
fn cursor<D: TextDimension + Default>(&self) -> MultiBufferCursor<'_, D> {
|
||||||
let excerpts = self.excerpts.cursor(&());
|
let excerpts = self.excerpts.cursor(&());
|
||||||
let diff_transforms = self.diff_transforms.cursor(&());
|
let diff_transforms = self.diff_transforms.cursor(&());
|
||||||
MultiBufferCursor {
|
MultiBufferCursor {
|
||||||
|
@ -6081,7 +6085,7 @@ impl MultiBufferSnapshot {
|
||||||
pub fn syntax_ancestor<T: ToOffset>(
|
pub fn syntax_ancestor<T: ToOffset>(
|
||||||
&self,
|
&self,
|
||||||
range: Range<T>,
|
range: Range<T>,
|
||||||
) -> Option<(tree_sitter::Node, MultiOrSingleBufferOffsetRange)> {
|
) -> Option<(tree_sitter::Node<'_>, MultiOrSingleBufferOffsetRange)> {
|
||||||
let range = range.start.to_offset(self)..range.end.to_offset(self);
|
let range = range.start.to_offset(self)..range.end.to_offset(self);
|
||||||
let mut excerpt = self.excerpt_containing(range.clone())?;
|
let mut excerpt = self.excerpt_containing(range.clone())?;
|
||||||
let node = excerpt
|
let node = excerpt
|
||||||
|
@ -6279,7 +6283,10 @@ impl MultiBufferSnapshot {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the excerpt containing range and its offset start within the multibuffer or none if `range` spans multiple excerpts
|
/// Returns the excerpt containing range and its offset start within the multibuffer or none if `range` spans multiple excerpts
|
||||||
pub fn excerpt_containing<T: ToOffset>(&self, range: Range<T>) -> Option<MultiBufferExcerpt> {
|
pub fn excerpt_containing<T: ToOffset>(
|
||||||
|
&self,
|
||||||
|
range: Range<T>,
|
||||||
|
) -> Option<MultiBufferExcerpt<'_>> {
|
||||||
let range = range.start.to_offset(self)..range.end.to_offset(self);
|
let range = range.start.to_offset(self)..range.end.to_offset(self);
|
||||||
let mut cursor = self.cursor::<usize>();
|
let mut cursor = self.cursor::<usize>();
|
||||||
cursor.seek(&range.start);
|
cursor.seek(&range.start);
|
||||||
|
@ -6933,7 +6940,7 @@ impl Excerpt {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn chunks_in_range(&self, range: Range<usize>, language_aware: bool) -> ExcerptChunks {
|
fn chunks_in_range(&self, range: Range<usize>, language_aware: bool) -> ExcerptChunks<'_> {
|
||||||
let content_start = self.range.context.start.to_offset(&self.buffer);
|
let content_start = self.range.context.start.to_offset(&self.buffer);
|
||||||
let chunks_start = content_start + range.start;
|
let chunks_start = content_start + range.start;
|
||||||
let chunks_end = content_start + cmp::min(range.end, self.text_summary.len);
|
let chunks_end = content_start + cmp::min(range.end, self.text_summary.len);
|
||||||
|
|
|
@ -211,7 +211,7 @@ pub struct GitEntry {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl GitEntry {
|
impl GitEntry {
|
||||||
pub fn to_ref(&self) -> GitEntryRef {
|
pub fn to_ref(&self) -> GitEntryRef<'_> {
|
||||||
GitEntryRef {
|
GitEntryRef {
|
||||||
entry: &self.entry,
|
entry: &self.entry,
|
||||||
git_summary: self.git_summary,
|
git_summary: self.git_summary,
|
||||||
|
|
|
@ -467,7 +467,7 @@ impl CompletionSource {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn lsp_completion(&self, apply_defaults: bool) -> Option<Cow<lsp::CompletionItem>> {
|
pub fn lsp_completion(&self, apply_defaults: bool) -> Option<Cow<'_, lsp::CompletionItem>> {
|
||||||
if let Self::Lsp {
|
if let Self::Lsp {
|
||||||
lsp_completion,
|
lsp_completion,
|
||||||
lsp_defaults,
|
lsp_defaults,
|
||||||
|
|
|
@ -3227,7 +3227,7 @@ impl ProjectPanel {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
fn entry_at_index(&self, index: usize) -> Option<(WorktreeId, GitEntryRef)> {
|
fn entry_at_index(&self, index: usize) -> Option<(WorktreeId, GitEntryRef<'_>)> {
|
||||||
let mut offset = 0;
|
let mut offset = 0;
|
||||||
for (worktree_id, visible_worktree_entries, _) in &self.visible_entries {
|
for (worktree_id, visible_worktree_entries, _) in &self.visible_entries {
|
||||||
if visible_worktree_entries.len() > offset + index {
|
if visible_worktree_entries.len() > offset + index {
|
||||||
|
|
|
@ -281,7 +281,7 @@ impl RemoteEntry {
|
||||||
matches!(self, Self::Project { .. })
|
matches!(self, Self::Project { .. })
|
||||||
}
|
}
|
||||||
|
|
||||||
fn connection(&self) -> Cow<SshConnection> {
|
fn connection(&self) -> Cow<'_, SshConnection> {
|
||||||
match self {
|
match self {
|
||||||
Self::Project { connection, .. } => Cow::Borrowed(connection),
|
Self::Project { connection, .. } => Cow::Borrowed(connection),
|
||||||
Self::SshConfig { host, .. } => Cow::Owned(SshConnection {
|
Self::SshConfig { host, .. } => Cow::Owned(SshConnection {
|
||||||
|
|
|
@ -53,7 +53,7 @@ impl Chunk {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
pub fn as_slice(&self) -> ChunkSlice {
|
pub fn as_slice(&self) -> ChunkSlice<'_> {
|
||||||
ChunkSlice {
|
ChunkSlice {
|
||||||
chars: self.chars,
|
chars: self.chars,
|
||||||
chars_utf16: self.chars_utf16,
|
chars_utf16: self.chars_utf16,
|
||||||
|
@ -64,7 +64,7 @@ impl Chunk {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
pub fn slice(&self, range: Range<usize>) -> ChunkSlice {
|
pub fn slice(&self, range: Range<usize>) -> ChunkSlice<'_> {
|
||||||
self.as_slice().slice(range)
|
self.as_slice().slice(range)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -241,7 +241,7 @@ impl Rope {
|
||||||
self.chunks.extent(&())
|
self.chunks.extent(&())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn cursor(&self, offset: usize) -> Cursor {
|
pub fn cursor(&self, offset: usize) -> Cursor<'_> {
|
||||||
Cursor::new(self, offset)
|
Cursor::new(self, offset)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -258,23 +258,23 @@ impl Rope {
|
||||||
.flat_map(|chunk| chunk.chars().rev())
|
.flat_map(|chunk| chunk.chars().rev())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn bytes_in_range(&self, range: Range<usize>) -> Bytes {
|
pub fn bytes_in_range(&self, range: Range<usize>) -> Bytes<'_> {
|
||||||
Bytes::new(self, range, false)
|
Bytes::new(self, range, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn reversed_bytes_in_range(&self, range: Range<usize>) -> Bytes {
|
pub fn reversed_bytes_in_range(&self, range: Range<usize>) -> Bytes<'_> {
|
||||||
Bytes::new(self, range, true)
|
Bytes::new(self, range, true)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn chunks(&self) -> Chunks {
|
pub fn chunks(&self) -> Chunks<'_> {
|
||||||
self.chunks_in_range(0..self.len())
|
self.chunks_in_range(0..self.len())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn chunks_in_range(&self, range: Range<usize>) -> Chunks {
|
pub fn chunks_in_range(&self, range: Range<usize>) -> Chunks<'_> {
|
||||||
Chunks::new(self, range, false)
|
Chunks::new(self, range, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn reversed_chunks_in_range(&self, range: Range<usize>) -> Chunks {
|
pub fn reversed_chunks_in_range(&self, range: Range<usize>) -> Chunks<'_> {
|
||||||
Chunks::new(self, range, true)
|
Chunks::new(self, range, true)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -380,7 +380,7 @@ impl<T: Item> SumTree<T> {
|
||||||
items
|
items
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn iter(&self) -> Iter<T> {
|
pub fn iter(&self) -> Iter<'_, T> {
|
||||||
Iter::new(self)
|
Iter::new(self)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -103,7 +103,7 @@ pub struct VenvSettingsContent<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl VenvSettings {
|
impl VenvSettings {
|
||||||
pub fn as_option(&self) -> Option<VenvSettingsContent> {
|
pub fn as_option(&self) -> Option<VenvSettingsContent<'_>> {
|
||||||
match self {
|
match self {
|
||||||
VenvSettings::Off => None,
|
VenvSettings::Off => None,
|
||||||
VenvSettings::On {
|
VenvSettings::On {
|
||||||
|
|
|
@ -2049,7 +2049,7 @@ impl BufferSnapshot {
|
||||||
self.visible_text.reversed_chars_at(offset)
|
self.visible_text.reversed_chars_at(offset)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn reversed_chunks_in_range<T: ToOffset>(&self, range: Range<T>) -> rope::Chunks {
|
pub fn reversed_chunks_in_range<T: ToOffset>(&self, range: Range<T>) -> rope::Chunks<'_> {
|
||||||
let range = range.start.to_offset(self)..range.end.to_offset(self);
|
let range = range.start.to_offset(self)..range.end.to_offset(self);
|
||||||
self.visible_text.reversed_chunks_in_range(range)
|
self.visible_text.reversed_chunks_in_range(range)
|
||||||
}
|
}
|
||||||
|
|
|
@ -150,7 +150,7 @@ impl<T> Drop for Changed<'_, T> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> Receiver<T> {
|
impl<T> Receiver<T> {
|
||||||
pub fn borrow(&mut self) -> parking_lot::MappedRwLockReadGuard<T> {
|
pub fn borrow(&mut self) -> parking_lot::MappedRwLockReadGuard<'_, T> {
|
||||||
let state = self.state.read();
|
let state = self.state.read();
|
||||||
self.version = state.version;
|
self.version = state.version;
|
||||||
RwLockReadGuard::map(state, |state| &state.value)
|
RwLockReadGuard::map(state, |state| &state.value)
|
||||||
|
|
|
@ -2566,7 +2566,7 @@ impl Snapshot {
|
||||||
include_dirs: bool,
|
include_dirs: bool,
|
||||||
include_ignored: bool,
|
include_ignored: bool,
|
||||||
start_offset: usize,
|
start_offset: usize,
|
||||||
) -> Traversal {
|
) -> Traversal<'_> {
|
||||||
let mut cursor = self.entries_by_path.cursor(&());
|
let mut cursor = self.entries_by_path.cursor(&());
|
||||||
cursor.seek(
|
cursor.seek(
|
||||||
&TraversalTarget::Count {
|
&TraversalTarget::Count {
|
||||||
|
@ -2593,19 +2593,19 @@ impl Snapshot {
|
||||||
include_dirs: bool,
|
include_dirs: bool,
|
||||||
include_ignored: bool,
|
include_ignored: bool,
|
||||||
path: &Path,
|
path: &Path,
|
||||||
) -> Traversal {
|
) -> Traversal<'_> {
|
||||||
Traversal::new(self, include_files, include_dirs, include_ignored, path)
|
Traversal::new(self, include_files, include_dirs, include_ignored, path)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn files(&self, include_ignored: bool, start: usize) -> Traversal {
|
pub fn files(&self, include_ignored: bool, start: usize) -> Traversal<'_> {
|
||||||
self.traverse_from_offset(true, false, include_ignored, start)
|
self.traverse_from_offset(true, false, include_ignored, start)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn directories(&self, include_ignored: bool, start: usize) -> Traversal {
|
pub fn directories(&self, include_ignored: bool, start: usize) -> Traversal<'_> {
|
||||||
self.traverse_from_offset(false, true, include_ignored, start)
|
self.traverse_from_offset(false, true, include_ignored, start)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn entries(&self, include_ignored: bool, start: usize) -> Traversal {
|
pub fn entries(&self, include_ignored: bool, start: usize) -> Traversal<'_> {
|
||||||
self.traverse_from_offset(true, true, include_ignored, start)
|
self.traverse_from_offset(true, true, include_ignored, start)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue