Auto-fix clippy::collapsible_if violations (#36428)
Release Notes: - N/A
This commit is contained in:
parent
9e8ec72bd5
commit
8f567383e4
281 changed files with 6628 additions and 7089 deletions
|
@ -23,22 +23,20 @@ fn main() {
|
|||
"cargo:rustc-env=TARGET={}",
|
||||
std::env::var("TARGET").unwrap()
|
||||
);
|
||||
if let Ok(output) = Command::new("git").args(["rev-parse", "HEAD"]).output() {
|
||||
if output.status.success() {
|
||||
let git_sha = String::from_utf8_lossy(&output.stdout);
|
||||
let git_sha = git_sha.trim();
|
||||
if let Ok(output) = Command::new("git").args(["rev-parse", "HEAD"]).output()
|
||||
&& output.status.success()
|
||||
{
|
||||
let git_sha = String::from_utf8_lossy(&output.stdout);
|
||||
let git_sha = git_sha.trim();
|
||||
|
||||
println!("cargo:rustc-env=ZED_COMMIT_SHA={git_sha}");
|
||||
println!("cargo:rustc-env=ZED_COMMIT_SHA={git_sha}");
|
||||
|
||||
if let Ok(build_profile) = std::env::var("PROFILE") {
|
||||
if build_profile == "release" {
|
||||
// This is currently the best way to make `cargo build ...`'s build script
|
||||
// to print something to stdout without extra verbosity.
|
||||
println!(
|
||||
"cargo:warning=Info: using '{git_sha}' hash for ZED_COMMIT_SHA env var"
|
||||
);
|
||||
}
|
||||
}
|
||||
if let Ok(build_profile) = std::env::var("PROFILE")
|
||||
&& build_profile == "release"
|
||||
{
|
||||
// This is currently the best way to make `cargo build ...`'s build script
|
||||
// to print something to stdout without extra verbosity.
|
||||
println!("cargo:warning=Info: using '{git_sha}' hash for ZED_COMMIT_SHA env var");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1026,18 +1026,18 @@ async fn restore_or_create_workspace(app_state: Arc<AppState>, cx: &mut AsyncApp
|
|||
// Try to find an active workspace to show the toast
|
||||
let toast_shown = cx
|
||||
.update(|cx| {
|
||||
if let Some(window) = cx.active_window() {
|
||||
if let Some(workspace) = window.downcast::<Workspace>() {
|
||||
workspace
|
||||
.update(cx, |workspace, _, cx| {
|
||||
workspace.show_toast(
|
||||
Toast::new(NotificationId::unique::<()>(), message),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.ok();
|
||||
return true;
|
||||
}
|
||||
if let Some(window) = cx.active_window()
|
||||
&& let Some(workspace) = window.downcast::<Workspace>()
|
||||
{
|
||||
workspace
|
||||
.update(cx, |workspace, _, cx| {
|
||||
workspace.show_toast(
|
||||
Toast::new(NotificationId::unique::<()>(), message),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.ok();
|
||||
return true;
|
||||
}
|
||||
false
|
||||
})
|
||||
|
@ -1117,10 +1117,8 @@ pub(crate) async fn restorable_workspace_locations(
|
|||
|
||||
// Since last_session_window_order returns the windows ordered front-to-back
|
||||
// we need to open the window that was frontmost last.
|
||||
if ordered {
|
||||
if let Some(locations) = locations.as_mut() {
|
||||
locations.reverse();
|
||||
}
|
||||
if ordered && let Some(locations) = locations.as_mut() {
|
||||
locations.reverse();
|
||||
}
|
||||
|
||||
locations
|
||||
|
@ -1290,21 +1288,21 @@ fn eager_load_active_theme_and_icon_theme(fs: Arc<dyn Fs>, cx: &App) {
|
|||
|
||||
if let Some(theme_selection) = theme_settings.theme_selection.as_ref() {
|
||||
let theme_name = theme_selection.theme(appearance);
|
||||
if matches!(theme_registry.get(theme_name), Err(ThemeNotFoundError(_))) {
|
||||
if let Some(theme_path) = extension_store.read(cx).path_to_extension_theme(theme_name) {
|
||||
cx.spawn({
|
||||
let theme_registry = theme_registry.clone();
|
||||
let fs = fs.clone();
|
||||
async move |cx| {
|
||||
theme_registry.load_user_theme(&theme_path, fs).await?;
|
||||
if matches!(theme_registry.get(theme_name), Err(ThemeNotFoundError(_)))
|
||||
&& let Some(theme_path) = extension_store.read(cx).path_to_extension_theme(theme_name)
|
||||
{
|
||||
cx.spawn({
|
||||
let theme_registry = theme_registry.clone();
|
||||
let fs = fs.clone();
|
||||
async move |cx| {
|
||||
theme_registry.load_user_theme(&theme_path, fs).await?;
|
||||
|
||||
cx.update(|cx| {
|
||||
ThemeSettings::reload_current_theme(cx);
|
||||
})
|
||||
}
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
cx.update(|cx| {
|
||||
ThemeSettings::reload_current_theme(cx);
|
||||
})
|
||||
}
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1313,26 +1311,24 @@ fn eager_load_active_theme_and_icon_theme(fs: Arc<dyn Fs>, cx: &App) {
|
|||
if matches!(
|
||||
theme_registry.get_icon_theme(icon_theme_name),
|
||||
Err(IconThemeNotFoundError(_))
|
||||
) {
|
||||
if let Some((icon_theme_path, icons_root_path)) = extension_store
|
||||
.read(cx)
|
||||
.path_to_extension_icon_theme(icon_theme_name)
|
||||
{
|
||||
cx.spawn({
|
||||
let theme_registry = theme_registry.clone();
|
||||
let fs = fs.clone();
|
||||
async move |cx| {
|
||||
theme_registry
|
||||
.load_icon_theme(&icon_theme_path, &icons_root_path, fs)
|
||||
.await?;
|
||||
) && let Some((icon_theme_path, icons_root_path)) = extension_store
|
||||
.read(cx)
|
||||
.path_to_extension_icon_theme(icon_theme_name)
|
||||
{
|
||||
cx.spawn({
|
||||
let theme_registry = theme_registry.clone();
|
||||
let fs = fs.clone();
|
||||
async move |cx| {
|
||||
theme_registry
|
||||
.load_icon_theme(&icon_theme_path, &icons_root_path, fs)
|
||||
.await?;
|
||||
|
||||
cx.update(|cx| {
|
||||
ThemeSettings::reload_current_icon_theme(cx);
|
||||
})
|
||||
}
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
cx.update(|cx| {
|
||||
ThemeSettings::reload_current_icon_theme(cx);
|
||||
})
|
||||
}
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1381,18 +1377,15 @@ fn watch_themes(fs: Arc<dyn fs::Fs>, cx: &mut App) {
|
|||
|
||||
while let Some(paths) = events.next().await {
|
||||
for event in paths {
|
||||
if fs.metadata(&event.path).await.ok().flatten().is_some() {
|
||||
if let Some(theme_registry) =
|
||||
if fs.metadata(&event.path).await.ok().flatten().is_some()
|
||||
&& let Some(theme_registry) =
|
||||
cx.update(|cx| ThemeRegistry::global(cx).clone()).log_err()
|
||||
{
|
||||
if let Some(()) = theme_registry
|
||||
.load_user_theme(&event.path, fs.clone())
|
||||
.await
|
||||
.log_err()
|
||||
{
|
||||
cx.update(ThemeSettings::reload_current_theme).log_err();
|
||||
}
|
||||
}
|
||||
&& let Some(()) = theme_registry
|
||||
.load_user_theme(&event.path, fs.clone())
|
||||
.await
|
||||
.log_err()
|
||||
{
|
||||
cx.update(ThemeSettings::reload_current_theme).log_err();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -146,19 +146,17 @@ pub fn init_panic_hook(
|
|||
}
|
||||
zlog::flush();
|
||||
|
||||
if !is_pty {
|
||||
if let Some(panic_data_json) = serde_json::to_string(&panic_data).log_err() {
|
||||
let timestamp = chrono::Utc::now().format("%Y_%m_%d %H_%M_%S").to_string();
|
||||
let panic_file_path = paths::logs_dir().join(format!("zed-{timestamp}.panic"));
|
||||
let panic_file = fs::OpenOptions::new()
|
||||
.write(true)
|
||||
.create_new(true)
|
||||
.open(&panic_file_path)
|
||||
.log_err();
|
||||
if let Some(mut panic_file) = panic_file {
|
||||
writeln!(&mut panic_file, "{panic_data_json}").log_err();
|
||||
panic_file.flush().log_err();
|
||||
}
|
||||
if !is_pty && let Some(panic_data_json) = serde_json::to_string(&panic_data).log_err() {
|
||||
let timestamp = chrono::Utc::now().format("%Y_%m_%d %H_%M_%S").to_string();
|
||||
let panic_file_path = paths::logs_dir().join(format!("zed-{timestamp}.panic"));
|
||||
let panic_file = fs::OpenOptions::new()
|
||||
.write(true)
|
||||
.create_new(true)
|
||||
.open(&panic_file_path)
|
||||
.log_err();
|
||||
if let Some(mut panic_file) = panic_file {
|
||||
writeln!(&mut panic_file, "{panic_data_json}").log_err();
|
||||
panic_file.flush().log_err();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -459,10 +457,10 @@ pub fn monitor_main_thread_hangs(
|
|||
continue;
|
||||
};
|
||||
|
||||
if let Some(response) = http_client.send(request).await.log_err() {
|
||||
if response.status() != 200 {
|
||||
log::error!("Failed to send hang report: HTTP {:?}", response.status());
|
||||
}
|
||||
if let Some(response) = http_client.send(request).await.log_err()
|
||||
&& response.status() != 200
|
||||
{
|
||||
log::error!("Failed to send hang report: HTTP {:?}", response.status());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -563,8 +561,8 @@ pub async fn upload_previous_minidumps(http: Arc<HttpClientWithUrl>) -> anyhow::
|
|||
}
|
||||
let mut json_path = child_path.clone();
|
||||
json_path.set_extension("json");
|
||||
if let Ok(metadata) = serde_json::from_slice(&smol::fs::read(&json_path).await?) {
|
||||
if upload_minidump(
|
||||
if let Ok(metadata) = serde_json::from_slice(&smol::fs::read(&json_path).await?)
|
||||
&& upload_minidump(
|
||||
http.clone(),
|
||||
minidump_endpoint,
|
||||
smol::fs::read(&child_path)
|
||||
|
@ -575,10 +573,9 @@ pub async fn upload_previous_minidumps(http: Arc<HttpClientWithUrl>) -> anyhow::
|
|||
.await
|
||||
.log_err()
|
||||
.is_some()
|
||||
{
|
||||
fs::remove_file(child_path).ok();
|
||||
fs::remove_file(json_path).ok();
|
||||
}
|
||||
{
|
||||
fs::remove_file(child_path).ok();
|
||||
fs::remove_file(json_path).ok();
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
|
|
|
@ -1054,27 +1054,25 @@ fn quit(_: &Quit, cx: &mut App) {
|
|||
})
|
||||
.log_err();
|
||||
|
||||
if should_confirm {
|
||||
if let Some(workspace) = workspace_windows.first() {
|
||||
let answer = workspace
|
||||
.update(cx, |_, window, cx| {
|
||||
window.prompt(
|
||||
PromptLevel::Info,
|
||||
"Are you sure you want to quit?",
|
||||
None,
|
||||
&["Quit", "Cancel"],
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.log_err();
|
||||
if should_confirm && let Some(workspace) = workspace_windows.first() {
|
||||
let answer = workspace
|
||||
.update(cx, |_, window, cx| {
|
||||
window.prompt(
|
||||
PromptLevel::Info,
|
||||
"Are you sure you want to quit?",
|
||||
None,
|
||||
&["Quit", "Cancel"],
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.log_err();
|
||||
|
||||
if let Some(answer) = answer {
|
||||
WAITING_QUIT_CONFIRMATION.store(true, atomic::Ordering::Release);
|
||||
let answer = answer.await.ok();
|
||||
WAITING_QUIT_CONFIRMATION.store(false, atomic::Ordering::Release);
|
||||
if answer != Some(0) {
|
||||
return Ok(());
|
||||
}
|
||||
if let Some(answer) = answer {
|
||||
WAITING_QUIT_CONFIRMATION.store(true, atomic::Ordering::Release);
|
||||
let answer = answer.await.ok();
|
||||
WAITING_QUIT_CONFIRMATION.store(false, atomic::Ordering::Release);
|
||||
if answer != Some(0) {
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1086,10 +1084,9 @@ fn quit(_: &Quit, cx: &mut App) {
|
|||
workspace.prepare_to_close(CloseIntent::Quit, window, cx)
|
||||
})
|
||||
.log_err()
|
||||
&& !should_close.await?
|
||||
{
|
||||
if !should_close.await? {
|
||||
return Ok(());
|
||||
}
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
cx.update(|cx| cx.quit())?;
|
||||
|
@ -1633,15 +1630,15 @@ fn open_local_file(
|
|||
};
|
||||
|
||||
if !file_exists {
|
||||
if let Some(dir_path) = settings_relative_path.parent() {
|
||||
if worktree.read_with(cx, |tree, _| tree.entry_for_path(dir_path).is_none())? {
|
||||
project
|
||||
.update(cx, |project, cx| {
|
||||
project.create_entry((tree_id, dir_path), true, cx)
|
||||
})?
|
||||
.await
|
||||
.context("worktree was removed")?;
|
||||
}
|
||||
if let Some(dir_path) = settings_relative_path.parent()
|
||||
&& worktree.read_with(cx, |tree, _| tree.entry_for_path(dir_path).is_none())?
|
||||
{
|
||||
project
|
||||
.update(cx, |project, cx| {
|
||||
project.create_entry((tree_id, dir_path), true, cx)
|
||||
})?
|
||||
.await
|
||||
.context("worktree was removed")?;
|
||||
}
|
||||
|
||||
if worktree.read_with(cx, |tree, _| {
|
||||
|
@ -1667,12 +1664,12 @@ fn open_local_file(
|
|||
editor
|
||||
.downgrade()
|
||||
.update(cx, |editor, cx| {
|
||||
if let Some(buffer) = editor.buffer().read(cx).as_singleton() {
|
||||
if buffer.read(cx).is_empty() {
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
buffer.edit([(0..0, initial_contents)], None, cx)
|
||||
});
|
||||
}
|
||||
if let Some(buffer) = editor.buffer().read(cx).as_singleton()
|
||||
&& buffer.read(cx).is_empty()
|
||||
{
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
buffer.edit([(0..0, initial_contents)], None, cx)
|
||||
});
|
||||
}
|
||||
})
|
||||
.ok();
|
||||
|
|
|
@ -318,25 +318,25 @@ impl ComponentPreview {
|
|||
let lowercase_scope = scope_name.to_lowercase();
|
||||
let lowercase_desc = description.to_lowercase();
|
||||
|
||||
if lowercase_scopeless.contains(&lowercase_filter) {
|
||||
if let Some(index) = lowercase_scopeless.find(&lowercase_filter) {
|
||||
let end = index + lowercase_filter.len();
|
||||
if lowercase_scopeless.contains(&lowercase_filter)
|
||||
&& let Some(index) = lowercase_scopeless.find(&lowercase_filter)
|
||||
{
|
||||
let end = index + lowercase_filter.len();
|
||||
|
||||
if end <= scopeless_name.len() {
|
||||
let mut positions = Vec::new();
|
||||
for i in index..end {
|
||||
if scopeless_name.is_char_boundary(i) {
|
||||
positions.push(i);
|
||||
}
|
||||
if end <= scopeless_name.len() {
|
||||
let mut positions = Vec::new();
|
||||
for i in index..end {
|
||||
if scopeless_name.is_char_boundary(i) {
|
||||
positions.push(i);
|
||||
}
|
||||
}
|
||||
|
||||
if !positions.is_empty() {
|
||||
scope_groups
|
||||
.entry(component.scope())
|
||||
.or_insert_with(Vec::new)
|
||||
.push((component.clone(), Some(positions)));
|
||||
continue;
|
||||
}
|
||||
if !positions.is_empty() {
|
||||
scope_groups
|
||||
.entry(component.scope())
|
||||
.or_insert_with(Vec::new)
|
||||
.push((component.clone(), Some(positions)));
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -372,32 +372,32 @@ impl ComponentPreview {
|
|||
scopes.sort_by_key(|s| s.to_string());
|
||||
|
||||
for scope in scopes {
|
||||
if let Some(components) = scope_groups.remove(&scope) {
|
||||
if !components.is_empty() {
|
||||
entries.push(PreviewEntry::Separator);
|
||||
entries.push(PreviewEntry::SectionHeader(scope.to_string().into()));
|
||||
if let Some(components) = scope_groups.remove(&scope)
|
||||
&& !components.is_empty()
|
||||
{
|
||||
entries.push(PreviewEntry::Separator);
|
||||
entries.push(PreviewEntry::SectionHeader(scope.to_string().into()));
|
||||
|
||||
let mut sorted_components = components;
|
||||
sorted_components.sort_by_key(|(component, _)| component.sort_name());
|
||||
let mut sorted_components = components;
|
||||
sorted_components.sort_by_key(|(component, _)| component.sort_name());
|
||||
|
||||
for (component, positions) in sorted_components {
|
||||
entries.push(PreviewEntry::Component(component, positions));
|
||||
}
|
||||
for (component, positions) in sorted_components {
|
||||
entries.push(PreviewEntry::Component(component, positions));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add uncategorized components last
|
||||
if let Some(components) = scope_groups.get(&ComponentScope::None) {
|
||||
if !components.is_empty() {
|
||||
entries.push(PreviewEntry::Separator);
|
||||
entries.push(PreviewEntry::SectionHeader("Uncategorized".into()));
|
||||
let mut sorted_components = components.clone();
|
||||
sorted_components.sort_by_key(|(c, _)| c.sort_name());
|
||||
if let Some(components) = scope_groups.get(&ComponentScope::None)
|
||||
&& !components.is_empty()
|
||||
{
|
||||
entries.push(PreviewEntry::Separator);
|
||||
entries.push(PreviewEntry::SectionHeader("Uncategorized".into()));
|
||||
let mut sorted_components = components.clone();
|
||||
sorted_components.sort_by_key(|(c, _)| c.sort_name());
|
||||
|
||||
for (component, positions) in sorted_components {
|
||||
entries.push(PreviewEntry::Component(component, positions));
|
||||
}
|
||||
for (component, positions) in sorted_components {
|
||||
entries.push(PreviewEntry::Component(component, positions));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -415,19 +415,20 @@ impl ComponentPreview {
|
|||
|
||||
let filtered_components = self.filtered_components();
|
||||
|
||||
if !self.filter_text.is_empty() && !matches!(self.active_page, PreviewPage::AllComponents) {
|
||||
if let PreviewPage::Component(ref component_id) = self.active_page {
|
||||
let component_still_visible = filtered_components
|
||||
.iter()
|
||||
.any(|component| component.id() == *component_id);
|
||||
if !self.filter_text.is_empty()
|
||||
&& !matches!(self.active_page, PreviewPage::AllComponents)
|
||||
&& let PreviewPage::Component(ref component_id) = self.active_page
|
||||
{
|
||||
let component_still_visible = filtered_components
|
||||
.iter()
|
||||
.any(|component| component.id() == *component_id);
|
||||
|
||||
if !component_still_visible {
|
||||
if !filtered_components.is_empty() {
|
||||
let first_component = &filtered_components[0];
|
||||
self.set_active_page(PreviewPage::Component(first_component.id()), cx);
|
||||
} else {
|
||||
self.set_active_page(PreviewPage::AllComponents, cx);
|
||||
}
|
||||
if !component_still_visible {
|
||||
if !filtered_components.is_empty() {
|
||||
let first_component = &filtered_components[0];
|
||||
self.set_active_page(PreviewPage::Component(first_component.id()), cx);
|
||||
} else {
|
||||
self.set_active_page(PreviewPage::AllComponents, cx);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -204,12 +204,12 @@ fn assign_edit_prediction_provider(
|
|||
}
|
||||
EditPredictionProvider::Copilot => {
|
||||
if let Some(copilot) = Copilot::global(cx) {
|
||||
if let Some(buffer) = singleton_buffer {
|
||||
if buffer.read(cx).file().is_some() {
|
||||
copilot.update(cx, |copilot, cx| {
|
||||
copilot.register_buffer(&buffer, cx);
|
||||
});
|
||||
}
|
||||
if let Some(buffer) = singleton_buffer
|
||||
&& buffer.read(cx).file().is_some()
|
||||
{
|
||||
copilot.update(cx, |copilot, cx| {
|
||||
copilot.register_buffer(&buffer, cx);
|
||||
});
|
||||
}
|
||||
let provider = cx.new(|_| CopilotCompletionProvider::new(copilot));
|
||||
editor.set_edit_prediction_provider(Some(provider), window, cx);
|
||||
|
@ -225,15 +225,15 @@ fn assign_edit_prediction_provider(
|
|||
if user_store.read(cx).current_user().is_some() {
|
||||
let mut worktree = None;
|
||||
|
||||
if let Some(buffer) = &singleton_buffer {
|
||||
if let Some(file) = buffer.read(cx).file() {
|
||||
let id = file.worktree_id(cx);
|
||||
if let Some(inner_worktree) = editor
|
||||
.project()
|
||||
.and_then(|project| project.read(cx).worktree_for_id(id, cx))
|
||||
{
|
||||
worktree = Some(inner_worktree);
|
||||
}
|
||||
if let Some(buffer) = &singleton_buffer
|
||||
&& let Some(file) = buffer.read(cx).file()
|
||||
{
|
||||
let id = file.worktree_id(cx);
|
||||
if let Some(inner_worktree) = editor
|
||||
.project()
|
||||
.and_then(|project| project.read(cx).worktree_for_id(id, cx))
|
||||
{
|
||||
worktree = Some(inner_worktree);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -245,12 +245,12 @@ fn assign_edit_prediction_provider(
|
|||
let zeta =
|
||||
zeta::Zeta::register(workspace, worktree, client.clone(), user_store, cx);
|
||||
|
||||
if let Some(buffer) = &singleton_buffer {
|
||||
if buffer.read(cx).file().is_some() {
|
||||
zeta.update(cx, |zeta, cx| {
|
||||
zeta.register_buffer(buffer, cx);
|
||||
});
|
||||
}
|
||||
if let Some(buffer) = &singleton_buffer
|
||||
&& buffer.read(cx).file().is_some()
|
||||
{
|
||||
zeta.update(cx, |zeta, cx| {
|
||||
zeta.register_buffer(buffer, cx);
|
||||
});
|
||||
}
|
||||
|
||||
let data_collection =
|
||||
|
|
|
@ -37,20 +37,19 @@ fn address() -> SocketAddr {
|
|||
let mut user_port = port;
|
||||
let mut sys = System::new_all();
|
||||
sys.refresh_all();
|
||||
if let Ok(current_pid) = sysinfo::get_current_pid() {
|
||||
if let Some(uid) = sys
|
||||
if let Ok(current_pid) = sysinfo::get_current_pid()
|
||||
&& let Some(uid) = sys
|
||||
.process(current_pid)
|
||||
.and_then(|process| process.user_id())
|
||||
{
|
||||
let uid_u32 = get_uid_as_u32(uid);
|
||||
// Ensure that the user ID is not too large to avoid overflow when
|
||||
// calculating the port number. This seems unlikely but it doesn't
|
||||
// hurt to be safe.
|
||||
let max_port = 65535;
|
||||
let max_uid: u32 = max_port - port as u32;
|
||||
let wrapped_uid: u16 = (uid_u32 % max_uid) as u16;
|
||||
user_port += wrapped_uid;
|
||||
}
|
||||
{
|
||||
let uid_u32 = get_uid_as_u32(uid);
|
||||
// Ensure that the user ID is not too large to avoid overflow when
|
||||
// calculating the port number. This seems unlikely but it doesn't
|
||||
// hurt to be safe.
|
||||
let max_port = 65535;
|
||||
let max_uid: u32 = max_port - port as u32;
|
||||
let wrapped_uid: u16 = (uid_u32 % max_uid) as u16;
|
||||
user_port += wrapped_uid;
|
||||
}
|
||||
|
||||
SocketAddr::V4(SocketAddrV4::new(LOCALHOST, user_port))
|
||||
|
|
|
@ -123,26 +123,24 @@ impl OpenRequest {
|
|||
|
||||
fn parse_request_path(&mut self, request_path: &str) -> Result<()> {
|
||||
let mut parts = request_path.split('/');
|
||||
if parts.next() == Some("channel") {
|
||||
if let Some(slug) = parts.next() {
|
||||
if let Some(id_str) = slug.split('-').next_back() {
|
||||
if let Ok(channel_id) = id_str.parse::<u64>() {
|
||||
let Some(next) = parts.next() else {
|
||||
self.join_channel = Some(channel_id);
|
||||
return Ok(());
|
||||
};
|
||||
if parts.next() == Some("channel")
|
||||
&& let Some(slug) = parts.next()
|
||||
&& let Some(id_str) = slug.split('-').next_back()
|
||||
&& let Ok(channel_id) = id_str.parse::<u64>()
|
||||
{
|
||||
let Some(next) = parts.next() else {
|
||||
self.join_channel = Some(channel_id);
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
if let Some(heading) = next.strip_prefix("notes#") {
|
||||
self.open_channel_notes
|
||||
.push((channel_id, Some(heading.to_string())));
|
||||
return Ok(());
|
||||
}
|
||||
if next == "notes" {
|
||||
self.open_channel_notes.push((channel_id, None));
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
}
|
||||
if let Some(heading) = next.strip_prefix("notes#") {
|
||||
self.open_channel_notes
|
||||
.push((channel_id, Some(heading.to_string())));
|
||||
return Ok(());
|
||||
}
|
||||
if next == "notes" {
|
||||
self.open_channel_notes.push((channel_id, None));
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
anyhow::bail!("invalid zed url: {request_path}")
|
||||
|
@ -181,10 +179,10 @@ pub fn listen_for_cli_connections(opener: OpenListener) -> Result<()> {
|
|||
|
||||
let sock_path = paths::data_dir().join(format!("zed-{}.sock", *RELEASE_CHANNEL_NAME));
|
||||
// remove the socket if the process listening on it has died
|
||||
if let Err(e) = UnixDatagram::unbound()?.connect(&sock_path) {
|
||||
if e.kind() == std::io::ErrorKind::ConnectionRefused {
|
||||
std::fs::remove_file(&sock_path)?;
|
||||
}
|
||||
if let Err(e) = UnixDatagram::unbound()?.connect(&sock_path)
|
||||
&& e.kind() == std::io::ErrorKind::ConnectionRefused
|
||||
{
|
||||
std::fs::remove_file(&sock_path)?;
|
||||
}
|
||||
let listener = UnixDatagram::bind(&sock_path)?;
|
||||
thread::spawn(move || {
|
||||
|
@ -244,12 +242,12 @@ pub async fn open_paths_with_positions(
|
|||
.iter()
|
||||
.map(|path_with_position| {
|
||||
let path = path_with_position.path.clone();
|
||||
if let Some(row) = path_with_position.row {
|
||||
if path.is_file() {
|
||||
let row = row.saturating_sub(1);
|
||||
let col = path_with_position.column.unwrap_or(0).saturating_sub(1);
|
||||
caret_positions.insert(path.clone(), Point::new(row, col));
|
||||
}
|
||||
if let Some(row) = path_with_position.row
|
||||
&& path.is_file()
|
||||
{
|
||||
let row = row.saturating_sub(1);
|
||||
let col = path_with_position.column.unwrap_or(0).saturating_sub(1);
|
||||
caret_positions.insert(path.clone(), Point::new(row, col));
|
||||
}
|
||||
path
|
||||
})
|
||||
|
@ -264,10 +262,9 @@ pub async fn open_paths_with_positions(
|
|||
let new_path = Path::new(&diff_pair[1]).canonicalize()?;
|
||||
if let Ok(diff_view) = workspace.update(cx, |workspace, window, cx| {
|
||||
FileDiffView::open(old_path, new_path, workspace, window, cx)
|
||||
}) {
|
||||
if let Some(diff_view) = diff_view.await.log_err() {
|
||||
items.push(Some(Ok(Box::new(diff_view))))
|
||||
}
|
||||
}) && let Some(diff_view) = diff_view.await.log_err()
|
||||
{
|
||||
items.push(Some(Ok(Box::new(diff_view))))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue