Add git blame
(#8889)
This adds a new action to the editor: `editor: toggle git blame`. When used it turns on a sidebar containing `git blame` information for the currently open buffer. The git blame information is updated when the buffer changes. It handles additions, deletions, modifications, changes to the underlying git data (new commits, changed commits, ...), file saves. It also handles folding and wrapping lines correctly. When the user hovers over a commit, a tooltip displays information for the commit that introduced the line. If the repository has a remote with the name `origin` configured, then clicking on a blame entry opens the permalink to the commit on the code host. Users can right-click on a blame entry to get a context menu which allows them to copy the SHA of the commit. The feature also works on shared projects, e.g. when collaborating a peer can request `git blame` data. As of this PR, Zed now comes bundled with a `git` binary so that users don't have to have `git` installed locally to use this feature. ### Screenshots    ### TODOs - [x] Bundling `git` binary ### Release Notes Release Notes: - Added `editor: toggle git blame` command that toggles a sidebar with git blame information for the current buffer. --------- Co-authored-by: Antonio <antonio@zed.dev> Co-authored-by: Piotr <piotr@zed.dev> Co-authored-by: Bennet <bennetbo@gmx.de> Co-authored-by: Mikayla <mikayla@zed.dev>
This commit is contained in:
parent
e2d6b0deba
commit
7f54935324
39 changed files with 3760 additions and 157 deletions
|
@ -30,6 +30,7 @@ use futures::{
|
|||
stream::FuturesUnordered,
|
||||
AsyncWriteExt, Future, FutureExt, StreamExt, TryFutureExt,
|
||||
};
|
||||
use git::blame::Blame;
|
||||
use globset::{Glob, GlobSet, GlobSetBuilder};
|
||||
use gpui::{
|
||||
AnyModel, AppContext, AsyncAppContext, BackgroundExecutor, BorrowAppContext, Context, Entity,
|
||||
|
@ -83,7 +84,7 @@ use std::{
|
|||
ops::Range,
|
||||
path::{self, Component, Path, PathBuf},
|
||||
process::Stdio,
|
||||
str,
|
||||
str::{self, FromStr},
|
||||
sync::{
|
||||
atomic::{AtomicUsize, Ordering::SeqCst},
|
||||
Arc,
|
||||
|
@ -95,7 +96,7 @@ use terminals::Terminals;
|
|||
use text::{Anchor, BufferId, RopeFingerprint};
|
||||
use util::{
|
||||
debug_panic, defer,
|
||||
http::HttpClient,
|
||||
http::{HttpClient, Url},
|
||||
maybe, merge_json_value_into,
|
||||
paths::{
|
||||
LOCAL_SETTINGS_RELATIVE_PATH, LOCAL_TASKS_RELATIVE_PATH, LOCAL_VSCODE_TASKS_RELATIVE_PATH,
|
||||
|
@ -304,6 +305,7 @@ pub enum Event {
|
|||
WorktreeAdded,
|
||||
WorktreeRemoved(WorktreeId),
|
||||
WorktreeUpdatedEntries(WorktreeId, UpdatedEntriesSet),
|
||||
WorktreeUpdatedGitRepositories,
|
||||
DiskBasedDiagnosticsStarted {
|
||||
language_server_id: LanguageServerId,
|
||||
},
|
||||
|
@ -593,6 +595,7 @@ impl Project {
|
|||
client.add_model_request_handler(Self::handle_save_buffer);
|
||||
client.add_model_message_handler(Self::handle_update_diff_base);
|
||||
client.add_model_request_handler(Self::handle_lsp_command::<lsp_ext_command::ExpandMacro>);
|
||||
client.add_model_request_handler(Self::handle_blame_buffer);
|
||||
}
|
||||
|
||||
pub fn local(
|
||||
|
@ -6746,8 +6749,13 @@ impl Project {
|
|||
}
|
||||
worktree::Event::UpdatedGitRepositories(updated_repos) => {
|
||||
if is_local {
|
||||
this.update_local_worktree_buffers_git_repos(worktree, updated_repos, cx)
|
||||
this.update_local_worktree_buffers_git_repos(
|
||||
worktree.clone(),
|
||||
updated_repos,
|
||||
cx,
|
||||
)
|
||||
}
|
||||
cx.emit(Event::WorktreeUpdatedGitRepositories);
|
||||
}
|
||||
}
|
||||
})
|
||||
|
@ -7036,9 +7044,10 @@ impl Project {
|
|||
.filter_map(|(buffer, path)| {
|
||||
let (work_directory, repo) =
|
||||
snapshot.repository_and_work_directory_for_path(&path)?;
|
||||
let repo = snapshot.get_local_repo(&repo)?;
|
||||
let repo_entry = snapshot.get_local_repo(&repo)?;
|
||||
let relative_path = path.strip_prefix(&work_directory).ok()?;
|
||||
let base_text = repo.load_index_text(relative_path);
|
||||
let base_text = repo_entry.repo().lock().load_index_text(relative_path);
|
||||
|
||||
Some((buffer, base_text))
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
|
@ -7315,6 +7324,19 @@ impl Project {
|
|||
})
|
||||
}
|
||||
|
||||
pub fn get_workspace_root(
|
||||
&self,
|
||||
project_path: &ProjectPath,
|
||||
cx: &AppContext,
|
||||
) -> Option<PathBuf> {
|
||||
Some(
|
||||
self.worktree_for_id(project_path.worktree_id, cx)?
|
||||
.read(cx)
|
||||
.abs_path()
|
||||
.to_path_buf(),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn get_repo(
|
||||
&self,
|
||||
project_path: &ProjectPath,
|
||||
|
@ -7327,8 +7349,107 @@ impl Project {
|
|||
.local_git_repo(&project_path.path)
|
||||
}
|
||||
|
||||
pub fn blame_buffer(
|
||||
&self,
|
||||
buffer: &Model<Buffer>,
|
||||
version: Option<clock::Global>,
|
||||
cx: &AppContext,
|
||||
) -> Task<Result<Blame>> {
|
||||
if self.is_local() {
|
||||
let blame_params = maybe!({
|
||||
let buffer = buffer.read(cx);
|
||||
let buffer_project_path = buffer
|
||||
.project_path(cx)
|
||||
.context("failed to get buffer project path")?;
|
||||
|
||||
let worktree = self
|
||||
.worktree_for_id(buffer_project_path.worktree_id, cx)
|
||||
.context("failed to get worktree")?
|
||||
.read(cx)
|
||||
.as_local()
|
||||
.context("worktree was not local")?
|
||||
.snapshot();
|
||||
let (work_directory, repo) = worktree
|
||||
.repository_and_work_directory_for_path(&buffer_project_path.path)
|
||||
.context("failed to get repo for blamed buffer")?;
|
||||
|
||||
let repo_entry = worktree
|
||||
.get_local_repo(&repo)
|
||||
.context("failed to get repo for blamed buffer")?;
|
||||
|
||||
let relative_path = buffer_project_path
|
||||
.path
|
||||
.strip_prefix(&work_directory)?
|
||||
.to_path_buf();
|
||||
|
||||
let content = match version {
|
||||
Some(version) => buffer.rope_for_version(&version).clone(),
|
||||
None => buffer.as_rope().clone(),
|
||||
};
|
||||
let repo = repo_entry.repo().clone();
|
||||
|
||||
anyhow::Ok((repo, relative_path, content))
|
||||
});
|
||||
|
||||
cx.background_executor().spawn(async move {
|
||||
let (repo, relative_path, content) = blame_params?;
|
||||
let lock = repo.lock();
|
||||
lock.blame(&relative_path, content)
|
||||
})
|
||||
} else {
|
||||
let project_id = self.remote_id();
|
||||
let buffer_id = buffer.read(cx).remote_id();
|
||||
let client = self.client.clone();
|
||||
let version = buffer.read(cx).version();
|
||||
|
||||
cx.spawn(|_| async move {
|
||||
let project_id = project_id.context("unable to get project id for buffer")?;
|
||||
let response = client
|
||||
.request(proto::BlameBuffer {
|
||||
project_id,
|
||||
buffer_id: buffer_id.into(),
|
||||
version: serialize_version(&version),
|
||||
})
|
||||
.await?;
|
||||
|
||||
Ok(deserialize_blame_buffer_response(response))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// RPC message handlers
|
||||
|
||||
async fn handle_blame_buffer(
|
||||
this: Model<Self>,
|
||||
envelope: TypedEnvelope<proto::BlameBuffer>,
|
||||
_: Arc<Client>,
|
||||
mut cx: AsyncAppContext,
|
||||
) -> Result<proto::BlameBufferResponse> {
|
||||
let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
|
||||
let version = deserialize_version(&envelope.payload.version);
|
||||
|
||||
let buffer = this.update(&mut cx, |this, _cx| {
|
||||
this.opened_buffers
|
||||
.get(&buffer_id)
|
||||
.and_then(|buffer| buffer.upgrade())
|
||||
.ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
|
||||
})??;
|
||||
|
||||
buffer
|
||||
.update(&mut cx, |buffer, _| {
|
||||
buffer.wait_for_version(version.clone())
|
||||
})?
|
||||
.await?;
|
||||
|
||||
let blame = this
|
||||
.update(&mut cx, |this, cx| {
|
||||
this.blame_buffer(&buffer, Some(version), cx)
|
||||
})?
|
||||
.await?;
|
||||
|
||||
Ok(serialize_blame_buffer_response(blame))
|
||||
}
|
||||
|
||||
async fn handle_unshare_project(
|
||||
this: Model<Self>,
|
||||
_: TypedEnvelope<proto::UnshareProject>,
|
||||
|
@ -9768,3 +9889,99 @@ async fn load_shell_environment(dir: &Path) -> Result<HashMap<String, String>> {
|
|||
}
|
||||
Ok(parsed_env)
|
||||
}
|
||||
|
||||
fn serialize_blame_buffer_response(blame: git::blame::Blame) -> proto::BlameBufferResponse {
|
||||
let entries = blame
|
||||
.entries
|
||||
.into_iter()
|
||||
.map(|entry| proto::BlameEntry {
|
||||
sha: entry.sha.as_bytes().into(),
|
||||
start_line: entry.range.start,
|
||||
end_line: entry.range.end,
|
||||
original_line_number: entry.original_line_number,
|
||||
author: entry.author.clone(),
|
||||
author_mail: entry.author_mail.clone(),
|
||||
author_time: entry.author_time,
|
||||
author_tz: entry.author_tz.clone(),
|
||||
committer: entry.committer.clone(),
|
||||
committer_mail: entry.committer_mail.clone(),
|
||||
committer_time: entry.committer_time,
|
||||
committer_tz: entry.committer_tz.clone(),
|
||||
summary: entry.summary.clone(),
|
||||
previous: entry.previous.clone(),
|
||||
filename: entry.filename.clone(),
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let messages = blame
|
||||
.messages
|
||||
.into_iter()
|
||||
.map(|(oid, message)| proto::CommitMessage {
|
||||
oid: oid.as_bytes().into(),
|
||||
message,
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let permalinks = blame
|
||||
.permalinks
|
||||
.into_iter()
|
||||
.map(|(oid, url)| proto::CommitPermalink {
|
||||
oid: oid.as_bytes().into(),
|
||||
permalink: url.to_string(),
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
proto::BlameBufferResponse {
|
||||
entries,
|
||||
messages,
|
||||
permalinks,
|
||||
}
|
||||
}
|
||||
|
||||
fn deserialize_blame_buffer_response(response: proto::BlameBufferResponse) -> git::blame::Blame {
|
||||
let entries = response
|
||||
.entries
|
||||
.into_iter()
|
||||
.filter_map(|entry| {
|
||||
Some(git::blame::BlameEntry {
|
||||
sha: git::Oid::from_bytes(&entry.sha).ok()?,
|
||||
range: entry.start_line..entry.end_line,
|
||||
original_line_number: entry.original_line_number,
|
||||
committer: entry.committer,
|
||||
committer_time: entry.committer_time,
|
||||
committer_tz: entry.committer_tz,
|
||||
committer_mail: entry.committer_mail,
|
||||
author: entry.author,
|
||||
author_mail: entry.author_mail,
|
||||
author_time: entry.author_time,
|
||||
author_tz: entry.author_tz,
|
||||
summary: entry.summary,
|
||||
previous: entry.previous,
|
||||
filename: entry.filename,
|
||||
})
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let messages = response
|
||||
.messages
|
||||
.into_iter()
|
||||
.filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
|
||||
.collect::<HashMap<_, _>>();
|
||||
|
||||
let permalinks = response
|
||||
.permalinks
|
||||
.into_iter()
|
||||
.filter_map(|permalink| {
|
||||
Some((
|
||||
git::Oid::from_bytes(&permalink.oid).ok()?,
|
||||
Url::from_str(&permalink.permalink).ok()?,
|
||||
))
|
||||
})
|
||||
.collect::<HashMap<_, _>>();
|
||||
|
||||
Blame {
|
||||
entries,
|
||||
permalinks,
|
||||
messages,
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue