Merge branch 'main' into in-app-feedback
This commit is contained in:
commit
f2a5a4d0fd
135 changed files with 3316 additions and 2821 deletions
|
@ -25,6 +25,7 @@ use postage::watch;
|
|||
use rand::prelude::*;
|
||||
use rpc::proto::{AnyTypedEnvelope, EntityMessage, EnvelopedMessage, PeerId, RequestMessage};
|
||||
use serde::Deserialize;
|
||||
use settings::{Settings, TelemetrySettings};
|
||||
use std::{
|
||||
any::TypeId,
|
||||
collections::HashMap,
|
||||
|
@ -423,7 +424,9 @@ impl Client {
|
|||
}));
|
||||
}
|
||||
Status::SignedOut | Status::UpgradeRequired => {
|
||||
self.telemetry.set_authenticated_user_info(None, false);
|
||||
let telemetry_settings = cx.read(|cx| cx.global::<Settings>().telemetry());
|
||||
self.telemetry
|
||||
.set_authenticated_user_info(None, false, telemetry_settings);
|
||||
state._reconnect_task.take();
|
||||
}
|
||||
_ => {}
|
||||
|
@ -706,7 +709,13 @@ impl Client {
|
|||
credentials = read_credentials_from_keychain(cx);
|
||||
read_from_keychain = credentials.is_some();
|
||||
if read_from_keychain {
|
||||
self.report_event("read credentials from keychain", Default::default());
|
||||
cx.read(|cx| {
|
||||
self.report_event(
|
||||
"read credentials from keychain",
|
||||
Default::default(),
|
||||
cx.global::<Settings>().telemetry(),
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
if credentials.is_none() {
|
||||
|
@ -997,6 +1006,8 @@ impl Client {
|
|||
let executor = cx.background();
|
||||
let telemetry = self.telemetry.clone();
|
||||
let http = self.http.clone();
|
||||
let metrics_enabled = cx.read(|cx| cx.global::<Settings>().telemetry());
|
||||
|
||||
executor.clone().spawn(async move {
|
||||
// Generate a pair of asymmetric encryption keys. The public key will be used by the
|
||||
// zed server to encrypt the user's access token, so that it can'be intercepted by
|
||||
|
@ -1079,7 +1090,11 @@ impl Client {
|
|||
.context("failed to decrypt access token")?;
|
||||
platform.activate(true);
|
||||
|
||||
telemetry.report_event("authenticate with browser", Default::default());
|
||||
telemetry.report_event(
|
||||
"authenticate with browser",
|
||||
Default::default(),
|
||||
metrics_enabled,
|
||||
);
|
||||
|
||||
Ok(Credentials {
|
||||
user_id: user_id.parse()?,
|
||||
|
@ -1287,8 +1302,14 @@ impl Client {
|
|||
self.telemetry.start();
|
||||
}
|
||||
|
||||
pub fn report_event(&self, kind: &str, properties: Value) {
|
||||
self.telemetry.report_event(kind, properties.clone());
|
||||
pub fn report_event(
|
||||
&self,
|
||||
kind: &str,
|
||||
properties: Value,
|
||||
telemetry_settings: TelemetrySettings,
|
||||
) {
|
||||
self.telemetry
|
||||
.report_event(kind, properties.clone(), telemetry_settings);
|
||||
}
|
||||
|
||||
pub fn telemetry_log_file_path(&self) -> Option<PathBuf> {
|
||||
|
|
|
@ -10,6 +10,7 @@ use lazy_static::lazy_static;
|
|||
use parking_lot::Mutex;
|
||||
use serde::Serialize;
|
||||
use serde_json::json;
|
||||
use settings::TelemetrySettings;
|
||||
use std::{
|
||||
io::Write,
|
||||
mem,
|
||||
|
@ -184,11 +185,18 @@ impl Telemetry {
|
|||
.detach();
|
||||
}
|
||||
|
||||
/// This method takes the entire TelemetrySettings struct in order to force client code
|
||||
/// to pull the struct out of the settings global. Do not remove!
|
||||
pub fn set_authenticated_user_info(
|
||||
self: &Arc<Self>,
|
||||
metrics_id: Option<String>,
|
||||
is_staff: bool,
|
||||
telemetry_settings: TelemetrySettings,
|
||||
) {
|
||||
if !telemetry_settings.metrics() {
|
||||
return;
|
||||
}
|
||||
|
||||
let this = self.clone();
|
||||
let mut state = self.state.lock();
|
||||
let device_id = state.device_id.clone();
|
||||
|
@ -221,7 +229,16 @@ impl Telemetry {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn report_event(self: &Arc<Self>, kind: &str, properties: Value) {
|
||||
pub fn report_event(
|
||||
self: &Arc<Self>,
|
||||
kind: &str,
|
||||
properties: Value,
|
||||
telemetry_settings: TelemetrySettings,
|
||||
) {
|
||||
if !telemetry_settings.metrics() {
|
||||
return;
|
||||
}
|
||||
|
||||
let mut state = self.state.lock();
|
||||
let event = MixpanelEvent {
|
||||
event: kind.to_string(),
|
||||
|
|
|
@ -5,6 +5,7 @@ use futures::{channel::mpsc, future, AsyncReadExt, Future, StreamExt};
|
|||
use gpui::{AsyncAppContext, Entity, ImageData, ModelContext, ModelHandle, Task};
|
||||
use postage::{sink::Sink, watch};
|
||||
use rpc::proto::{RequestMessage, UsersResponse};
|
||||
use settings::Settings;
|
||||
use std::sync::{Arc, Weak};
|
||||
use util::TryFutureExt as _;
|
||||
|
||||
|
@ -141,14 +142,11 @@ impl UserStore {
|
|||
let fetch_metrics_id =
|
||||
client.request(proto::GetPrivateUserInfo {}).log_err();
|
||||
let (user, info) = futures::join!(fetch_user, fetch_metrics_id);
|
||||
if let Some(info) = info {
|
||||
client.telemetry.set_authenticated_user_info(
|
||||
Some(info.metrics_id.clone()),
|
||||
info.staff,
|
||||
);
|
||||
} else {
|
||||
client.telemetry.set_authenticated_user_info(None, false);
|
||||
}
|
||||
client.telemetry.set_authenticated_user_info(
|
||||
info.as_ref().map(|info| info.metrics_id.clone()),
|
||||
info.as_ref().map(|info| info.staff).unwrap_or(false),
|
||||
cx.read(|cx| cx.global::<Settings>().telemetry()),
|
||||
);
|
||||
|
||||
current_user_tx.send(user).await.ok();
|
||||
}
|
||||
|
|
|
@ -3,7 +3,7 @@ authors = ["Nathan Sobo <nathan@zed.dev>"]
|
|||
default-run = "collab"
|
||||
edition = "2021"
|
||||
name = "collab"
|
||||
version = "0.4.2"
|
||||
version = "0.5.3"
|
||||
|
||||
[[bin]]
|
||||
name = "collab"
|
||||
|
|
|
@ -57,6 +57,7 @@ CREATE TABLE "worktrees" (
|
|||
"abs_path" VARCHAR NOT NULL,
|
||||
"visible" BOOL NOT NULL,
|
||||
"scan_id" INTEGER NOT NULL,
|
||||
"is_complete" BOOL NOT NULL DEFAULT FALSE,
|
||||
"completed_scan_id" INTEGER NOT NULL,
|
||||
PRIMARY KEY(project_id, id)
|
||||
);
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
ALTER TABLE worktrees
|
||||
DROP COLUMN is_complete,
|
||||
ALTER COLUMN is_complete SET DEFAULT FALSE,
|
||||
ADD COLUMN completed_scan_id INT8;
|
||||
|
|
|
@ -353,6 +353,8 @@ pub struct CreateInviteFromCodeParams {
|
|||
invite_code: String,
|
||||
email_address: String,
|
||||
device_id: Option<String>,
|
||||
#[serde(default)]
|
||||
added_to_mailing_list: bool,
|
||||
}
|
||||
|
||||
async fn create_invite_from_code(
|
||||
|
@ -365,6 +367,7 @@ async fn create_invite_from_code(
|
|||
¶ms.invite_code,
|
||||
¶ms.email_address,
|
||||
params.device_id.as_deref(),
|
||||
params.added_to_mailing_list,
|
||||
)
|
||||
.await?,
|
||||
))
|
||||
|
|
|
@ -882,6 +882,7 @@ impl Database {
|
|||
code: &str,
|
||||
email_address: &str,
|
||||
device_id: Option<&str>,
|
||||
added_to_mailing_list: bool,
|
||||
) -> Result<Invite> {
|
||||
self.transaction(|tx| async move {
|
||||
let existing_user = user::Entity::find()
|
||||
|
@ -933,6 +934,7 @@ impl Database {
|
|||
platform_windows: ActiveValue::set(false),
|
||||
platform_unknown: ActiveValue::set(true),
|
||||
device_id: ActiveValue::set(device_id.map(|device_id| device_id.into())),
|
||||
added_to_mailing_list: ActiveValue::set(added_to_mailing_list),
|
||||
..Default::default()
|
||||
})
|
||||
.on_conflict(
|
||||
|
|
|
@ -567,7 +567,12 @@ async fn test_invite_codes() {
|
|||
|
||||
// User 2 redeems the invite code and becomes a contact of user 1.
|
||||
let user2_invite = db
|
||||
.create_invite_from_code(&invite_code, "user2@example.com", Some("user-2-device-id"))
|
||||
.create_invite_from_code(
|
||||
&invite_code,
|
||||
"user2@example.com",
|
||||
Some("user-2-device-id"),
|
||||
true,
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
let NewUserResult {
|
||||
|
@ -617,7 +622,7 @@ async fn test_invite_codes() {
|
|||
|
||||
// User 3 redeems the invite code and becomes a contact of user 1.
|
||||
let user3_invite = db
|
||||
.create_invite_from_code(&invite_code, "user3@example.com", None)
|
||||
.create_invite_from_code(&invite_code, "user3@example.com", None, true)
|
||||
.await
|
||||
.unwrap();
|
||||
let NewUserResult {
|
||||
|
@ -672,9 +677,14 @@ async fn test_invite_codes() {
|
|||
);
|
||||
|
||||
// Trying to reedem the code for the third time results in an error.
|
||||
db.create_invite_from_code(&invite_code, "user4@example.com", Some("user-4-device-id"))
|
||||
.await
|
||||
.unwrap_err();
|
||||
db.create_invite_from_code(
|
||||
&invite_code,
|
||||
"user4@example.com",
|
||||
Some("user-4-device-id"),
|
||||
true,
|
||||
)
|
||||
.await
|
||||
.unwrap_err();
|
||||
|
||||
// Invite count can be updated after the code has been created.
|
||||
db.set_invite_count_for_user(user1, 2).await.unwrap();
|
||||
|
@ -684,7 +694,12 @@ async fn test_invite_codes() {
|
|||
|
||||
// User 4 can now redeem the invite code and becomes a contact of user 1.
|
||||
let user4_invite = db
|
||||
.create_invite_from_code(&invite_code, "user4@example.com", Some("user-4-device-id"))
|
||||
.create_invite_from_code(
|
||||
&invite_code,
|
||||
"user4@example.com",
|
||||
Some("user-4-device-id"),
|
||||
true,
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
let user4 = db
|
||||
|
@ -739,9 +754,14 @@ async fn test_invite_codes() {
|
|||
);
|
||||
|
||||
// An existing user cannot redeem invite codes.
|
||||
db.create_invite_from_code(&invite_code, "user2@example.com", Some("user-2-device-id"))
|
||||
.await
|
||||
.unwrap_err();
|
||||
db.create_invite_from_code(
|
||||
&invite_code,
|
||||
"user2@example.com",
|
||||
Some("user-2-device-id"),
|
||||
true,
|
||||
)
|
||||
.await
|
||||
.unwrap_err();
|
||||
let (_, invite_count) = db.get_invite_code_for_user(user1).await.unwrap().unwrap();
|
||||
assert_eq!(invite_count, 1);
|
||||
|
||||
|
@ -763,7 +783,7 @@ async fn test_invite_codes() {
|
|||
db.set_invite_count_for_user(user5, 5).await.unwrap();
|
||||
let (user5_invite_code, _) = db.get_invite_code_for_user(user5).await.unwrap().unwrap();
|
||||
let user5_invite_to_user1 = db
|
||||
.create_invite_from_code(&user5_invite_code, "user1@different.com", None)
|
||||
.create_invite_from_code(&user5_invite_code, "user1@different.com", None, true)
|
||||
.await
|
||||
.unwrap();
|
||||
let user1_2 = db
|
||||
|
|
|
@ -101,10 +101,7 @@ impl TestServer {
|
|||
async fn create_client(&mut self, cx: &mut TestAppContext, name: &str) -> TestClient {
|
||||
cx.update(|cx| {
|
||||
cx.set_global(HomeDir(Path::new("/tmp/").to_path_buf()));
|
||||
|
||||
let mut settings = Settings::test(cx);
|
||||
settings.projects_online_by_default = false;
|
||||
cx.set_global(settings);
|
||||
cx.set_global(Settings::test(cx));
|
||||
});
|
||||
|
||||
let http = FakeHttpClient::with_404_response();
|
||||
|
|
|
@ -32,7 +32,9 @@ use std::{
|
|||
sync::Arc,
|
||||
};
|
||||
use unindent::Unindent as _;
|
||||
use workspace::{item::Item, shared_screen::SharedScreen, SplitDirection, ToggleFollow, Workspace};
|
||||
use workspace::{
|
||||
item::ItemHandle as _, shared_screen::SharedScreen, SplitDirection, ToggleFollow, Workspace,
|
||||
};
|
||||
|
||||
#[ctor::ctor]
|
||||
fn init_logger() {
|
||||
|
@ -5602,7 +5604,7 @@ async fn test_following(
|
|||
});
|
||||
assert!(cx_b.read(|cx| editor_b2.is_focused(cx)));
|
||||
assert_eq!(
|
||||
editor_b2.read_with(cx_b, |editor, cx| editor.project_path(cx)),
|
||||
cx_b.read(|cx| editor_b2.project_path(cx)),
|
||||
Some((worktree_id, "2.txt").into())
|
||||
);
|
||||
assert_eq!(
|
||||
|
|
|
@ -10,7 +10,7 @@ use collections::BTreeMap;
|
|||
use fs::{FakeFs, Fs as _};
|
||||
use futures::StreamExt as _;
|
||||
use gpui::{executor::Deterministic, ModelHandle, TestAppContext};
|
||||
use language::{range_to_lsp, FakeLspAdapter, Language, LanguageConfig, PointUtf16};
|
||||
use language::{range_to_lsp, FakeLspAdapter, Language, LanguageConfig, PointUtf16, Rope};
|
||||
use lsp::FakeLanguageServer;
|
||||
use parking_lot::Mutex;
|
||||
use project::{search::SearchQuery, Project};
|
||||
|
@ -18,7 +18,13 @@ use rand::{
|
|||
distributions::{Alphanumeric, DistString},
|
||||
prelude::*,
|
||||
};
|
||||
use std::{env, ffi::OsStr, path::PathBuf, sync::Arc};
|
||||
use settings::Settings;
|
||||
use std::{
|
||||
env,
|
||||
ffi::OsStr,
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
#[gpui::test(iterations = 100)]
|
||||
async fn test_random_collaboration(
|
||||
|
@ -104,6 +110,8 @@ async fn test_random_collaboration(
|
|||
cx.function_name.clone(),
|
||||
);
|
||||
|
||||
client_cx.update(|cx| cx.set_global(Settings::test(cx)));
|
||||
|
||||
let op_start_signal = futures::channel::mpsc::unbounded();
|
||||
let client = server.create_client(&mut client_cx, &username).await;
|
||||
user_ids.push(client.current_user_id(&client_cx));
|
||||
|
@ -173,6 +181,7 @@ async fn test_random_collaboration(
|
|||
available_users.push((removed_user_id, client.username.clone()));
|
||||
client_cx.update(|cx| {
|
||||
cx.clear_globals();
|
||||
cx.set_global(Settings::test(cx));
|
||||
drop(client);
|
||||
});
|
||||
|
||||
|
@ -394,6 +403,33 @@ async fn test_random_collaboration(
|
|||
let guest_diff_base = guest_buffer
|
||||
.read_with(client_cx, |b, _| b.diff_base().map(ToString::to_string));
|
||||
assert_eq!(guest_diff_base, host_diff_base);
|
||||
|
||||
let host_saved_version =
|
||||
host_buffer.read_with(host_cx, |b, _| b.saved_version().clone());
|
||||
let guest_saved_version =
|
||||
guest_buffer.read_with(client_cx, |b, _| b.saved_version().clone());
|
||||
assert_eq!(guest_saved_version, host_saved_version);
|
||||
|
||||
let host_saved_version_fingerprint =
|
||||
host_buffer.read_with(host_cx, |b, _| b.saved_version_fingerprint());
|
||||
let guest_saved_version_fingerprint =
|
||||
guest_buffer.read_with(client_cx, |b, _| b.saved_version_fingerprint());
|
||||
assert_eq!(
|
||||
guest_saved_version_fingerprint,
|
||||
host_saved_version_fingerprint
|
||||
);
|
||||
|
||||
let host_saved_mtime = host_buffer.read_with(host_cx, |b, _| b.saved_mtime());
|
||||
let guest_saved_mtime = guest_buffer.read_with(client_cx, |b, _| b.saved_mtime());
|
||||
assert_eq!(guest_saved_mtime, host_saved_mtime);
|
||||
|
||||
let host_is_dirty = host_buffer.read_with(host_cx, |b, _| b.is_dirty());
|
||||
let guest_is_dirty = guest_buffer.read_with(client_cx, |b, _| b.is_dirty());
|
||||
assert_eq!(guest_is_dirty, host_is_dirty);
|
||||
|
||||
let host_has_conflict = host_buffer.read_with(host_cx, |b, _| b.has_conflict());
|
||||
let guest_has_conflict = guest_buffer.read_with(client_cx, |b, _| b.has_conflict());
|
||||
assert_eq!(guest_has_conflict, host_has_conflict);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -401,6 +437,7 @@ async fn test_random_collaboration(
|
|||
for (client, mut cx) in clients {
|
||||
cx.update(|cx| {
|
||||
cx.clear_globals();
|
||||
cx.set_global(Settings::test(cx));
|
||||
drop(client);
|
||||
});
|
||||
}
|
||||
|
@ -633,14 +670,7 @@ async fn randomly_mutate_git(client: &mut TestClient, rng: &Mutex<StdRng>) {
|
|||
client.fs.create_dir(&git_dir_path).await.unwrap();
|
||||
}
|
||||
|
||||
let mut child_paths = client.fs.read_dir(&dir_path).await.unwrap();
|
||||
let mut child_file_paths = Vec::new();
|
||||
while let Some(child_path) = child_paths.next().await {
|
||||
let child_path = child_path.unwrap();
|
||||
if client.fs.is_file(&child_path).await {
|
||||
child_file_paths.push(child_path);
|
||||
}
|
||||
}
|
||||
let mut child_file_paths = child_file_paths(client, &dir_path).await;
|
||||
let count = rng.lock().gen_range(0..=child_file_paths.len());
|
||||
child_file_paths.shuffle(&mut *rng.lock());
|
||||
child_file_paths.truncate(count);
|
||||
|
@ -664,26 +694,63 @@ async fn randomly_mutate_git(client: &mut TestClient, rng: &Mutex<StdRng>) {
|
|||
}
|
||||
|
||||
async fn randomly_mutate_fs(client: &mut TestClient, rng: &Mutex<StdRng>) {
|
||||
let is_dir = rng.lock().gen::<bool>();
|
||||
let mut new_path = client
|
||||
let parent_dir_path = client
|
||||
.fs
|
||||
.directories()
|
||||
.await
|
||||
.choose(&mut *rng.lock())
|
||||
.unwrap()
|
||||
.clone();
|
||||
new_path.push(gen_file_name(rng));
|
||||
|
||||
let is_dir = rng.lock().gen::<bool>();
|
||||
if is_dir {
|
||||
log::info!("{}: creating local dir at {:?}", client.username, new_path);
|
||||
client.fs.create_dir(&new_path).await.unwrap();
|
||||
let mut dir_path = parent_dir_path.clone();
|
||||
dir_path.push(gen_file_name(rng));
|
||||
log::info!("{}: creating local dir at {:?}", client.username, dir_path);
|
||||
client.fs.create_dir(&dir_path).await.unwrap();
|
||||
} else {
|
||||
new_path.set_extension("rs");
|
||||
log::info!("{}: creating local file at {:?}", client.username, new_path);
|
||||
client
|
||||
.fs
|
||||
.create_file(&new_path, Default::default())
|
||||
.await
|
||||
.unwrap();
|
||||
let child_file_paths = child_file_paths(client, &parent_dir_path).await;
|
||||
let create_new_file = child_file_paths.is_empty() || rng.lock().gen();
|
||||
let text = Alphanumeric.sample_string(&mut *rng.lock(), 16);
|
||||
if create_new_file {
|
||||
let mut file_path = parent_dir_path.clone();
|
||||
file_path.push(gen_file_name(rng));
|
||||
file_path.set_extension("rs");
|
||||
log::info!(
|
||||
"{}: creating local file at {:?}",
|
||||
client.username,
|
||||
file_path
|
||||
);
|
||||
client
|
||||
.fs
|
||||
.create_file(&file_path, Default::default())
|
||||
.await
|
||||
.unwrap();
|
||||
log::info!(
|
||||
"{}: setting local file {:?} text to {:?}",
|
||||
client.username,
|
||||
file_path,
|
||||
text
|
||||
);
|
||||
client
|
||||
.fs
|
||||
.save(&file_path, &Rope::from(text.as_str()), fs::LineEnding::Unix)
|
||||
.await
|
||||
.unwrap();
|
||||
} else {
|
||||
let file_path = child_file_paths.choose(&mut *rng.lock()).unwrap();
|
||||
log::info!(
|
||||
"{}: setting local file {:?} text to {:?}",
|
||||
client.username,
|
||||
file_path,
|
||||
text
|
||||
);
|
||||
client
|
||||
.fs
|
||||
.save(file_path, &Rope::from(text.as_str()), fs::LineEnding::Unix)
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1149,3 +1216,15 @@ fn gen_file_name(rng: &Mutex<StdRng>) -> String {
|
|||
}
|
||||
name
|
||||
}
|
||||
|
||||
async fn child_file_paths(client: &TestClient, dir_path: &Path) -> Vec<PathBuf> {
|
||||
let mut child_paths = client.fs.read_dir(dir_path).await.unwrap();
|
||||
let mut child_file_paths = Vec::new();
|
||||
while let Some(child_path) = child_paths.next().await {
|
||||
let child_path = child_path.unwrap();
|
||||
if client.fs.is_file(&child_path).await {
|
||||
child_file_paths.push(child_path);
|
||||
}
|
||||
}
|
||||
child_file_paths
|
||||
}
|
||||
|
|
|
@ -48,6 +48,7 @@ pub fn init(cx: &mut MutableAppContext) {
|
|||
},
|
||||
|_| IncomingCallNotification::new(incoming_call.clone()),
|
||||
);
|
||||
|
||||
notification_windows.push(window_id);
|
||||
}
|
||||
}
|
||||
|
@ -225,6 +226,7 @@ impl View for IncomingCallNotification {
|
|||
.theme
|
||||
.incoming_call_notification
|
||||
.background;
|
||||
|
||||
Flex::row()
|
||||
.with_child(self.render_caller(cx))
|
||||
.with_child(self.render_buttons(cx))
|
||||
|
|
|
@ -21,7 +21,6 @@ use language::{
|
|||
use project::{DiagnosticSummary, Project, ProjectPath};
|
||||
use serde_json::json;
|
||||
use settings::Settings;
|
||||
use smallvec::SmallVec;
|
||||
use std::{
|
||||
any::{Any, TypeId},
|
||||
cmp::Ordering,
|
||||
|
@ -521,12 +520,8 @@ impl Item for ProjectDiagnosticsEditor {
|
|||
)
|
||||
}
|
||||
|
||||
fn project_path(&self, _: &AppContext) -> Option<project::ProjectPath> {
|
||||
None
|
||||
}
|
||||
|
||||
fn project_entry_ids(&self, cx: &AppContext) -> SmallVec<[project::ProjectEntryId; 3]> {
|
||||
self.editor.project_entry_ids(cx)
|
||||
fn for_each_project_item(&self, cx: &AppContext, f: &mut dyn FnMut(usize, &dyn project::Item)) {
|
||||
self.editor.for_each_project_item(cx, f)
|
||||
}
|
||||
|
||||
fn is_singleton(&self, _: &AppContext) -> bool {
|
||||
|
|
|
@ -44,7 +44,7 @@ use gpui::{
|
|||
ViewContext, ViewHandle, WeakViewHandle,
|
||||
};
|
||||
use highlight_matching_bracket::refresh_matching_bracket_highlights;
|
||||
use hover_popover::{hide_hover, HoverState};
|
||||
use hover_popover::{hide_hover, HideHover, HoverState};
|
||||
pub use items::MAX_TAB_TITLE_LEN;
|
||||
use itertools::Itertools;
|
||||
pub use language::{char_kind, CharKind};
|
||||
|
@ -62,7 +62,7 @@ pub use multi_buffer::{
|
|||
};
|
||||
use multi_buffer::{MultiBufferChunks, ToOffsetUtf16};
|
||||
use ordered_float::OrderedFloat;
|
||||
use project::{FormatTrigger, LocationLink, Project, ProjectPath, ProjectTransaction};
|
||||
use project::{FormatTrigger, Location, LocationLink, Project, ProjectPath, ProjectTransaction};
|
||||
use scroll::{
|
||||
autoscroll::Autoscroll, OngoingScroll, ScrollAnchor, ScrollManager, ScrollbarAutoHide,
|
||||
};
|
||||
|
@ -1095,6 +1095,8 @@ impl Editor {
|
|||
|
||||
let blink_manager = cx.add_model(|cx| BlinkManager::new(CURSOR_BLINK_INTERVAL, cx));
|
||||
|
||||
let soft_wrap_mode_override =
|
||||
(mode == EditorMode::SingleLine).then(|| settings::SoftWrap::None);
|
||||
let mut this = Self {
|
||||
handle: cx.weak_handle(),
|
||||
buffer: buffer.clone(),
|
||||
|
@ -1110,7 +1112,7 @@ impl Editor {
|
|||
select_larger_syntax_node_stack: Vec::new(),
|
||||
ime_transaction: Default::default(),
|
||||
active_diagnostics: None,
|
||||
soft_wrap_mode_override: None,
|
||||
soft_wrap_mode_override,
|
||||
get_field_editor_theme,
|
||||
project,
|
||||
focused: false,
|
||||
|
@ -1328,7 +1330,7 @@ impl Editor {
|
|||
}
|
||||
}
|
||||
|
||||
hide_hover(self, cx);
|
||||
hide_hover(self, &HideHover, cx);
|
||||
|
||||
if old_cursor_position.to_display_point(&display_map).row()
|
||||
!= new_cursor_position.to_display_point(&display_map).row()
|
||||
|
@ -1703,7 +1705,7 @@ impl Editor {
|
|||
return;
|
||||
}
|
||||
|
||||
if hide_hover(self, cx) {
|
||||
if hide_hover(self, &HideHover, cx) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -1744,7 +1746,7 @@ impl Editor {
|
|||
for (selection, autoclose_region) in
|
||||
self.selections_with_autoclose_regions(selections, &snapshot)
|
||||
{
|
||||
if let Some(language) = snapshot.language_at(selection.head()) {
|
||||
if let Some(language) = snapshot.language_scope_at(selection.head()) {
|
||||
// Determine if the inserted text matches the opening or closing
|
||||
// bracket of any of this language's bracket pairs.
|
||||
let mut bracket_pair = None;
|
||||
|
@ -1905,7 +1907,7 @@ impl Editor {
|
|||
let end = selection.end;
|
||||
|
||||
let mut insert_extra_newline = false;
|
||||
if let Some(language) = buffer.language_at(start) {
|
||||
if let Some(language) = buffer.language_scope_at(start) {
|
||||
let leading_whitespace_len = buffer
|
||||
.reversed_chars_at(start)
|
||||
.take_while(|c| c.is_whitespace() && *c != '\n')
|
||||
|
@ -2029,7 +2031,9 @@ impl Editor {
|
|||
old_selections
|
||||
.iter()
|
||||
.map(|s| (s.start..s.end, text.clone())),
|
||||
Some(AutoindentMode::EachLine),
|
||||
Some(AutoindentMode::Block {
|
||||
original_indent_columns: Vec::new(),
|
||||
}),
|
||||
cx,
|
||||
);
|
||||
anchors
|
||||
|
@ -3638,9 +3642,7 @@ impl Editor {
|
|||
}
|
||||
|
||||
pub fn undo(&mut self, _: &Undo, cx: &mut ViewContext<Self>) {
|
||||
dbg!("undo");
|
||||
if let Some(tx_id) = self.buffer.update(cx, |buffer, cx| buffer.undo(cx)) {
|
||||
dbg!(tx_id);
|
||||
if let Some((selections, _)) = self.selection_history.transaction(tx_id).cloned() {
|
||||
self.change_selections(None, cx, |s| {
|
||||
s.select_anchors(selections.to_vec());
|
||||
|
@ -4540,7 +4542,10 @@ impl Editor {
|
|||
|
||||
// TODO: Handle selections that cross excerpts
|
||||
for selection in &mut selections {
|
||||
let language = if let Some(language) = snapshot.language_at(selection.start) {
|
||||
let start_column = snapshot.indent_size_for_line(selection.start.row).len;
|
||||
let language = if let Some(language) =
|
||||
snapshot.language_scope_at(Point::new(selection.start.row, start_column))
|
||||
{
|
||||
language
|
||||
} else {
|
||||
continue;
|
||||
|
@ -4810,7 +4815,7 @@ impl Editor {
|
|||
if let Some(popover) = self.hover_state.diagnostic_popover.as_ref() {
|
||||
let (group_id, jump_to) = popover.activation_info();
|
||||
if self.activate_diagnostics(group_id, cx) {
|
||||
self.change_selections(Some(Autoscroll::center()), cx, |s| {
|
||||
self.change_selections(Some(Autoscroll::fit()), cx, |s| {
|
||||
let mut new_selection = s.newest_anchor().clone();
|
||||
new_selection.collapse_to(jump_to, SelectionGoal::None);
|
||||
s.select_anchors(vec![new_selection.clone()]);
|
||||
|
@ -4856,7 +4861,7 @@ impl Editor {
|
|||
|
||||
if let Some((primary_range, group_id)) = group {
|
||||
if self.activate_diagnostics(group_id, cx) {
|
||||
self.change_selections(Some(Autoscroll::center()), cx, |s| {
|
||||
self.change_selections(Some(Autoscroll::fit()), cx, |s| {
|
||||
s.select(vec![Selection {
|
||||
id: selection.id,
|
||||
start: primary_range.start,
|
||||
|
@ -4931,7 +4936,7 @@ impl Editor {
|
|||
.dedup();
|
||||
|
||||
if let Some(hunk) = hunks.next() {
|
||||
this.change_selections(Some(Autoscroll::center()), cx, |s| {
|
||||
this.change_selections(Some(Autoscroll::fit()), cx, |s| {
|
||||
let row = hunk.start_display_row();
|
||||
let point = DisplayPoint::new(row, 0);
|
||||
s.select_display_ranges([point..point]);
|
||||
|
@ -5016,25 +5021,49 @@ impl Editor {
|
|||
cx: &mut ViewContext<Workspace>,
|
||||
) {
|
||||
let pane = workspace.active_pane().clone();
|
||||
for definition in definitions {
|
||||
// If there is one definition, just open it directly
|
||||
if let [definition] = definitions.as_slice() {
|
||||
let range = definition
|
||||
.target
|
||||
.range
|
||||
.to_offset(definition.target.buffer.read(cx));
|
||||
|
||||
let target_editor_handle = workspace.open_project_item(definition.target.buffer, cx);
|
||||
let target_editor_handle =
|
||||
workspace.open_project_item(definition.target.buffer.clone(), cx);
|
||||
target_editor_handle.update(cx, |target_editor, cx| {
|
||||
// When selecting a definition in a different buffer, disable the nav history
|
||||
// to avoid creating a history entry at the previous cursor location.
|
||||
if editor_handle != target_editor_handle {
|
||||
pane.update(cx, |pane, _| pane.disable_history());
|
||||
}
|
||||
target_editor.change_selections(Some(Autoscroll::center()), cx, |s| {
|
||||
target_editor.change_selections(Some(Autoscroll::fit()), cx, |s| {
|
||||
s.select_ranges([range]);
|
||||
});
|
||||
|
||||
pane.update(cx, |pane, _| pane.enable_history());
|
||||
});
|
||||
} else {
|
||||
let replica_id = editor_handle.read(cx).replica_id(cx);
|
||||
let title = definitions
|
||||
.iter()
|
||||
.find(|definition| definition.origin.is_some())
|
||||
.and_then(|definition| {
|
||||
definition.origin.as_ref().map(|origin| {
|
||||
let buffer = origin.buffer.read(cx);
|
||||
format!(
|
||||
"Definitions for {}",
|
||||
buffer
|
||||
.text_for_range(origin.range.clone())
|
||||
.collect::<String>()
|
||||
)
|
||||
})
|
||||
})
|
||||
.unwrap_or("Definitions".to_owned());
|
||||
let locations = definitions
|
||||
.into_iter()
|
||||
.map(|definition| definition.target)
|
||||
.collect();
|
||||
Self::open_locations_in_multibuffer(workspace, locations, replica_id, title, cx)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -5055,64 +5084,87 @@ impl Editor {
|
|||
let project = workspace.project().clone();
|
||||
let references = project.update(cx, |project, cx| project.references(&buffer, head, cx));
|
||||
Some(cx.spawn(|workspace, mut cx| async move {
|
||||
let mut locations = references.await?;
|
||||
let locations = references.await?;
|
||||
if locations.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
locations.sort_by_key(|location| location.buffer.id());
|
||||
let mut locations = locations.into_iter().peekable();
|
||||
let mut ranges_to_highlight = Vec::new();
|
||||
|
||||
let excerpt_buffer = cx.add_model(|cx| {
|
||||
let mut symbol_name = None;
|
||||
let mut multibuffer = MultiBuffer::new(replica_id);
|
||||
while let Some(location) = locations.next() {
|
||||
let buffer = location.buffer.read(cx);
|
||||
let mut ranges_for_buffer = Vec::new();
|
||||
let range = location.range.to_offset(buffer);
|
||||
ranges_for_buffer.push(range.clone());
|
||||
if symbol_name.is_none() {
|
||||
symbol_name = Some(buffer.text_for_range(range).collect::<String>());
|
||||
}
|
||||
|
||||
while let Some(next_location) = locations.peek() {
|
||||
if next_location.buffer == location.buffer {
|
||||
ranges_for_buffer.push(next_location.range.to_offset(buffer));
|
||||
locations.next();
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
ranges_for_buffer.sort_by_key(|range| (range.start, Reverse(range.end)));
|
||||
ranges_to_highlight.extend(multibuffer.push_excerpts_with_context_lines(
|
||||
location.buffer.clone(),
|
||||
ranges_for_buffer,
|
||||
1,
|
||||
cx,
|
||||
));
|
||||
}
|
||||
multibuffer.with_title(format!("References to `{}`", symbol_name.unwrap()))
|
||||
});
|
||||
|
||||
workspace.update(&mut cx, |workspace, cx| {
|
||||
let editor =
|
||||
cx.add_view(|cx| Editor::for_multibuffer(excerpt_buffer, Some(project), cx));
|
||||
editor.update(cx, |editor, cx| {
|
||||
editor.highlight_background::<Self>(
|
||||
ranges_to_highlight,
|
||||
|theme| theme.editor.highlighted_line_background,
|
||||
cx,
|
||||
);
|
||||
});
|
||||
workspace.add_item(Box::new(editor), cx);
|
||||
let title = locations
|
||||
.first()
|
||||
.as_ref()
|
||||
.map(|location| {
|
||||
let buffer = location.buffer.read(cx);
|
||||
format!(
|
||||
"References to `{}`",
|
||||
buffer
|
||||
.text_for_range(location.range.clone())
|
||||
.collect::<String>()
|
||||
)
|
||||
})
|
||||
.unwrap();
|
||||
Self::open_locations_in_multibuffer(workspace, locations, replica_id, title, cx);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}))
|
||||
}
|
||||
|
||||
/// Opens a multibuffer with the given project locations in it
|
||||
pub fn open_locations_in_multibuffer(
|
||||
workspace: &mut Workspace,
|
||||
mut locations: Vec<Location>,
|
||||
replica_id: ReplicaId,
|
||||
title: String,
|
||||
cx: &mut ViewContext<Workspace>,
|
||||
) {
|
||||
// If there are multiple definitions, open them in a multibuffer
|
||||
locations.sort_by_key(|location| location.buffer.id());
|
||||
let mut locations = locations.into_iter().peekable();
|
||||
let mut ranges_to_highlight = Vec::new();
|
||||
|
||||
let excerpt_buffer = cx.add_model(|cx| {
|
||||
let mut multibuffer = MultiBuffer::new(replica_id);
|
||||
while let Some(location) = locations.next() {
|
||||
let buffer = location.buffer.read(cx);
|
||||
let mut ranges_for_buffer = Vec::new();
|
||||
let range = location.range.to_offset(buffer);
|
||||
ranges_for_buffer.push(range.clone());
|
||||
|
||||
while let Some(next_location) = locations.peek() {
|
||||
if next_location.buffer == location.buffer {
|
||||
ranges_for_buffer.push(next_location.range.to_offset(buffer));
|
||||
locations.next();
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
ranges_for_buffer.sort_by_key(|range| (range.start, Reverse(range.end)));
|
||||
ranges_to_highlight.extend(multibuffer.push_excerpts_with_context_lines(
|
||||
location.buffer.clone(),
|
||||
ranges_for_buffer,
|
||||
1,
|
||||
cx,
|
||||
))
|
||||
}
|
||||
|
||||
multibuffer.with_title(title)
|
||||
});
|
||||
|
||||
let editor = cx.add_view(|cx| {
|
||||
Editor::for_multibuffer(excerpt_buffer, Some(workspace.project().clone()), cx)
|
||||
});
|
||||
editor.update(cx, |editor, cx| {
|
||||
editor.highlight_background::<Self>(
|
||||
ranges_to_highlight,
|
||||
|theme| theme.editor.highlighted_line_background,
|
||||
cx,
|
||||
);
|
||||
});
|
||||
workspace.add_item(Box::new(editor), cx);
|
||||
}
|
||||
|
||||
pub fn rename(&mut self, _: &Rename, cx: &mut ViewContext<Self>) -> Option<Task<Result<()>>> {
|
||||
use language::ToOffset as _;
|
||||
|
||||
|
@ -6098,10 +6150,11 @@ impl Editor {
|
|||
let extension = Path::new(file.file_name(cx))
|
||||
.extension()
|
||||
.and_then(|e| e.to_str());
|
||||
project
|
||||
.read(cx)
|
||||
.client()
|
||||
.report_event(name, json!({ "File Extension": extension }));
|
||||
project.read(cx).client().report_event(
|
||||
name,
|
||||
json!({ "File Extension": extension }),
|
||||
cx.global::<Settings>().telemetry(),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -6182,7 +6235,7 @@ impl View for Editor {
|
|||
cx.defer(move |cx| {
|
||||
if let Some(editor) = handle.upgrade(cx) {
|
||||
editor.update(cx, |editor, cx| {
|
||||
hide_hover(editor, cx);
|
||||
hide_hover(editor, &HideHover, cx);
|
||||
hide_link_definition(editor, cx);
|
||||
})
|
||||
}
|
||||
|
@ -6231,7 +6284,7 @@ impl View for Editor {
|
|||
self.buffer
|
||||
.update(cx, |buffer, cx| buffer.remove_active_selections(cx));
|
||||
self.hide_context_menu(cx);
|
||||
hide_hover(self, cx);
|
||||
hide_hover(self, &HideHover, cx);
|
||||
cx.emit(Event::Blurred);
|
||||
cx.notify();
|
||||
}
|
||||
|
|
|
@ -7,7 +7,7 @@ use crate::{
|
|||
display_map::{BlockStyle, DisplaySnapshot, TransformBlock},
|
||||
git::{diff_hunk_to_display, DisplayDiffHunk},
|
||||
hover_popover::{
|
||||
HoverAt, HOVER_POPOVER_GAP, MIN_POPOVER_CHARACTER_WIDTH, MIN_POPOVER_LINE_HEIGHT,
|
||||
HideHover, HoverAt, HOVER_POPOVER_GAP, MIN_POPOVER_CHARACTER_WIDTH, MIN_POPOVER_LINE_HEIGHT,
|
||||
},
|
||||
link_go_to_definition::{
|
||||
GoToFetchedDefinition, GoToFetchedTypeDefinition, UpdateGoToDefinitionLink,
|
||||
|
@ -114,6 +114,7 @@ impl EditorElement {
|
|||
fn attach_mouse_handlers(
|
||||
view: &WeakViewHandle<Editor>,
|
||||
position_map: &Arc<PositionMap>,
|
||||
has_popovers: bool,
|
||||
visible_bounds: RectF,
|
||||
text_bounds: RectF,
|
||||
gutter_bounds: RectF,
|
||||
|
@ -190,6 +191,11 @@ impl EditorElement {
|
|||
}
|
||||
}
|
||||
})
|
||||
.on_move_out(move |_, cx| {
|
||||
if has_popovers {
|
||||
cx.dispatch_action(HideHover);
|
||||
}
|
||||
})
|
||||
.on_scroll({
|
||||
let position_map = position_map.clone();
|
||||
move |e, cx| {
|
||||
|
@ -1870,6 +1876,7 @@ impl Element for EditorElement {
|
|||
Self::attach_mouse_handlers(
|
||||
&self.view,
|
||||
&layout.position_map,
|
||||
layout.hover_popovers.is_some(),
|
||||
visible_bounds,
|
||||
text_bounds,
|
||||
gutter_bounds,
|
||||
|
|
|
@ -29,12 +29,16 @@ pub struct HoverAt {
|
|||
pub point: Option<DisplayPoint>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq)]
|
||||
pub struct HideHover;
|
||||
|
||||
actions!(editor, [Hover]);
|
||||
impl_internal_actions!(editor, [HoverAt]);
|
||||
impl_internal_actions!(editor, [HoverAt, HideHover]);
|
||||
|
||||
pub fn init(cx: &mut MutableAppContext) {
|
||||
cx.add_action(hover);
|
||||
cx.add_action(hover_at);
|
||||
cx.add_action(hide_hover);
|
||||
}
|
||||
|
||||
/// Bindable action which uses the most recent selection head to trigger a hover
|
||||
|
@ -50,7 +54,7 @@ pub fn hover_at(editor: &mut Editor, action: &HoverAt, cx: &mut ViewContext<Edit
|
|||
if let Some(point) = action.point {
|
||||
show_hover(editor, point, false, cx);
|
||||
} else {
|
||||
hide_hover(editor, cx);
|
||||
hide_hover(editor, &HideHover, cx);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -58,7 +62,7 @@ pub fn hover_at(editor: &mut Editor, action: &HoverAt, cx: &mut ViewContext<Edit
|
|||
/// Hides the type information popup.
|
||||
/// Triggered by the `Hover` action when the cursor is not over a symbol or when the
|
||||
/// selections changed.
|
||||
pub fn hide_hover(editor: &mut Editor, cx: &mut ViewContext<Editor>) -> bool {
|
||||
pub fn hide_hover(editor: &mut Editor, _: &HideHover, cx: &mut ViewContext<Editor>) -> bool {
|
||||
let did_hide = editor.hover_state.info_popover.take().is_some()
|
||||
| editor.hover_state.diagnostic_popover.take().is_some();
|
||||
|
||||
|
@ -67,6 +71,10 @@ pub fn hide_hover(editor: &mut Editor, cx: &mut ViewContext<Editor>) -> bool {
|
|||
|
||||
editor.clear_background_highlights::<HoverState>(cx);
|
||||
|
||||
if did_hide {
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
did_hide
|
||||
}
|
||||
|
||||
|
@ -121,7 +129,7 @@ fn show_hover(
|
|||
// Hover triggered from same location as last time. Don't show again.
|
||||
return;
|
||||
} else {
|
||||
hide_hover(editor, cx);
|
||||
hide_hover(editor, &HideHover, cx);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -12,12 +12,13 @@ use gpui::{
|
|||
elements::*, geometry::vector::vec2f, AppContext, Entity, ModelHandle, MutableAppContext,
|
||||
RenderContext, Subscription, Task, View, ViewContext, ViewHandle, WeakViewHandle,
|
||||
};
|
||||
use language::proto::serialize_anchor as serialize_text_anchor;
|
||||
use language::{Bias, Buffer, File as _, OffsetRangeExt, Point, SelectionGoal};
|
||||
use project::{File, FormatTrigger, Project, ProjectEntryId, ProjectPath};
|
||||
use language::{
|
||||
proto::serialize_anchor as serialize_text_anchor, Bias, Buffer, OffsetRangeExt, Point,
|
||||
SelectionGoal,
|
||||
};
|
||||
use project::{FormatTrigger, Item as _, Project, ProjectPath};
|
||||
use rpc::proto::{self, update_view};
|
||||
use settings::Settings;
|
||||
use smallvec::SmallVec;
|
||||
use std::{
|
||||
borrow::Cow,
|
||||
cmp::{self, Ordering},
|
||||
|
@ -554,22 +555,10 @@ impl Item for Editor {
|
|||
.boxed()
|
||||
}
|
||||
|
||||
fn project_path(&self, cx: &AppContext) -> Option<ProjectPath> {
|
||||
let buffer = self.buffer.read(cx).as_singleton()?;
|
||||
let file = buffer.read(cx).file();
|
||||
File::from_dyn(file).map(|file| ProjectPath {
|
||||
worktree_id: file.worktree_id(cx),
|
||||
path: file.path().clone(),
|
||||
})
|
||||
}
|
||||
|
||||
fn project_entry_ids(&self, cx: &AppContext) -> SmallVec<[ProjectEntryId; 3]> {
|
||||
fn for_each_project_item(&self, cx: &AppContext, f: &mut dyn FnMut(usize, &dyn project::Item)) {
|
||||
self.buffer
|
||||
.read(cx)
|
||||
.files(cx)
|
||||
.into_iter()
|
||||
.filter_map(|file| File::from_dyn(Some(file))?.project_entry_id(cx))
|
||||
.collect()
|
||||
.for_each_buffer(|buffer| f(buffer.id(), buffer.read(cx)));
|
||||
}
|
||||
|
||||
fn is_singleton(&self, cx: &AppContext) -> bool {
|
||||
|
@ -606,7 +595,12 @@ impl Item for Editor {
|
|||
}
|
||||
|
||||
fn can_save(&self, cx: &AppContext) -> bool {
|
||||
!self.buffer().read(cx).is_singleton() || self.project_path(cx).is_some()
|
||||
let buffer = &self.buffer().read(cx);
|
||||
if let Some(buffer) = buffer.as_singleton() {
|
||||
buffer.read(cx).project_path(cx).is_some()
|
||||
} else {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
fn save(
|
||||
|
@ -765,6 +759,7 @@ impl Item for Editor {
|
|||
fn added_to_workspace(&mut self, workspace: &mut Workspace, cx: &mut ViewContext<Self>) {
|
||||
let workspace_id = workspace.database_id();
|
||||
let item_id = cx.view_id();
|
||||
self.workspace_id = Some(workspace_id);
|
||||
|
||||
fn serialize(
|
||||
buffer: ModelHandle<Buffer>,
|
||||
|
@ -836,7 +831,11 @@ impl Item for Editor {
|
|||
.context("Project item at stored path was not a buffer")?;
|
||||
|
||||
Ok(cx.update(|cx| {
|
||||
cx.add_view(pane, |cx| Editor::for_buffer(buffer, Some(project), cx))
|
||||
cx.add_view(pane, |cx| {
|
||||
let mut editor = Editor::for_buffer(buffer, Some(project), cx);
|
||||
editor.read_scroll_position_from_db(item_id, workspace_id, cx);
|
||||
editor
|
||||
})
|
||||
}))
|
||||
})
|
||||
})
|
||||
|
@ -1159,9 +1158,11 @@ fn path_for_file<'a>(
|
|||
mod tests {
|
||||
use super::*;
|
||||
use gpui::MutableAppContext;
|
||||
use language::RopeFingerprint;
|
||||
use std::{
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
time::SystemTime,
|
||||
};
|
||||
|
||||
#[gpui::test]
|
||||
|
@ -1191,7 +1192,7 @@ mod tests {
|
|||
todo!()
|
||||
}
|
||||
|
||||
fn mtime(&self) -> std::time::SystemTime {
|
||||
fn mtime(&self) -> SystemTime {
|
||||
todo!()
|
||||
}
|
||||
|
||||
|
@ -1210,7 +1211,7 @@ mod tests {
|
|||
_: clock::Global,
|
||||
_: project::LineEnding,
|
||||
_: &mut MutableAppContext,
|
||||
) -> gpui::Task<anyhow::Result<(clock::Global, String, std::time::SystemTime)>> {
|
||||
) -> gpui::Task<anyhow::Result<(clock::Global, RopeFingerprint, SystemTime)>> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
|
|
|
@ -352,6 +352,29 @@ pub fn surrounding_word(map: &DisplaySnapshot, position: DisplayPoint) -> Range<
|
|||
start..end
|
||||
}
|
||||
|
||||
pub fn split_display_range_by_lines(
|
||||
map: &DisplaySnapshot,
|
||||
range: Range<DisplayPoint>,
|
||||
) -> Vec<Range<DisplayPoint>> {
|
||||
let mut result = Vec::new();
|
||||
|
||||
let mut start = range.start;
|
||||
// Loop over all the covered rows until the one containing the range end
|
||||
for row in range.start.row()..range.end.row() {
|
||||
let row_end_column = map.line_len(row);
|
||||
let end = map.clip_point(DisplayPoint::new(row, row_end_column), Bias::Left);
|
||||
if start != end {
|
||||
result.push(start..end);
|
||||
}
|
||||
start = map.clip_point(DisplayPoint::new(row + 1, 0), Bias::Left);
|
||||
}
|
||||
|
||||
// Add the final range from the start of the last end to the original range end.
|
||||
result.push(start..range.end);
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
|
|
@ -4,16 +4,16 @@ pub use anchor::{Anchor, AnchorRangeExt};
|
|||
use anyhow::Result;
|
||||
use clock::ReplicaId;
|
||||
use collections::{BTreeMap, Bound, HashMap, HashSet};
|
||||
use futures::{channel::mpsc, SinkExt};
|
||||
use git::diff::DiffHunk;
|
||||
use gpui::{AppContext, Entity, ModelContext, ModelHandle, Task};
|
||||
pub use language::Completion;
|
||||
use language::{
|
||||
char_kind, AutoindentMode, Buffer, BufferChunks, BufferSnapshot, CharKind, Chunk, CursorShape,
|
||||
DiagnosticEntry, File, IndentSize, Language, OffsetRangeExt, OffsetUtf16, Outline, OutlineItem,
|
||||
Point, PointUtf16, Selection, TextDimension, ToOffset as _, ToOffsetUtf16 as _, ToPoint as _,
|
||||
ToPointUtf16 as _, TransactionId, Unclipped,
|
||||
DiagnosticEntry, IndentSize, Language, LanguageScope, OffsetRangeExt, OffsetUtf16, Outline,
|
||||
OutlineItem, Point, PointUtf16, Selection, TextDimension, ToOffset as _, ToOffsetUtf16 as _,
|
||||
ToPoint as _, ToPointUtf16 as _, TransactionId, Unclipped,
|
||||
};
|
||||
use smallvec::SmallVec;
|
||||
use std::{
|
||||
borrow::Cow,
|
||||
cell::{Ref, RefCell},
|
||||
|
@ -764,6 +764,63 @@ impl MultiBuffer {
|
|||
None
|
||||
}
|
||||
|
||||
pub fn stream_excerpts_with_context_lines(
|
||||
&mut self,
|
||||
excerpts: Vec<(ModelHandle<Buffer>, Vec<Range<text::Anchor>>)>,
|
||||
context_line_count: u32,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> (Task<()>, mpsc::Receiver<Range<Anchor>>) {
|
||||
let (mut tx, rx) = mpsc::channel(256);
|
||||
let task = cx.spawn(|this, mut cx| async move {
|
||||
for (buffer, ranges) in excerpts {
|
||||
let buffer_id = buffer.id();
|
||||
let buffer_snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
|
||||
|
||||
let mut excerpt_ranges = Vec::new();
|
||||
let mut range_counts = Vec::new();
|
||||
cx.background()
|
||||
.scoped(|scope| {
|
||||
scope.spawn(async {
|
||||
let (ranges, counts) =
|
||||
build_excerpt_ranges(&buffer_snapshot, &ranges, context_line_count);
|
||||
excerpt_ranges = ranges;
|
||||
range_counts = counts;
|
||||
});
|
||||
})
|
||||
.await;
|
||||
|
||||
let mut ranges = ranges.into_iter();
|
||||
let mut range_counts = range_counts.into_iter();
|
||||
for excerpt_ranges in excerpt_ranges.chunks(100) {
|
||||
let excerpt_ids = this.update(&mut cx, |this, cx| {
|
||||
this.push_excerpts(buffer.clone(), excerpt_ranges.iter().cloned(), cx)
|
||||
});
|
||||
|
||||
for (excerpt_id, range_count) in
|
||||
excerpt_ids.into_iter().zip(range_counts.by_ref())
|
||||
{
|
||||
for range in ranges.by_ref().take(range_count) {
|
||||
let start = Anchor {
|
||||
buffer_id: Some(buffer_id),
|
||||
excerpt_id: excerpt_id.clone(),
|
||||
text_anchor: range.start,
|
||||
};
|
||||
let end = Anchor {
|
||||
buffer_id: Some(buffer_id),
|
||||
excerpt_id: excerpt_id.clone(),
|
||||
text_anchor: range.end,
|
||||
};
|
||||
if tx.send(start..end).await.is_err() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
(task, rx)
|
||||
}
|
||||
|
||||
pub fn push_excerpts<O>(
|
||||
&mut self,
|
||||
buffer: ModelHandle<Buffer>,
|
||||
|
@ -788,39 +845,8 @@ impl MultiBuffer {
|
|||
{
|
||||
let buffer_id = buffer.id();
|
||||
let buffer_snapshot = buffer.read(cx).snapshot();
|
||||
let max_point = buffer_snapshot.max_point();
|
||||
|
||||
let mut range_counts = Vec::new();
|
||||
let mut excerpt_ranges = Vec::new();
|
||||
let mut range_iter = ranges
|
||||
.iter()
|
||||
.map(|range| {
|
||||
range.start.to_point(&buffer_snapshot)..range.end.to_point(&buffer_snapshot)
|
||||
})
|
||||
.peekable();
|
||||
while let Some(range) = range_iter.next() {
|
||||
let excerpt_start = Point::new(range.start.row.saturating_sub(context_line_count), 0);
|
||||
let mut excerpt_end =
|
||||
Point::new(range.end.row + 1 + context_line_count, 0).min(max_point);
|
||||
let mut ranges_in_excerpt = 1;
|
||||
|
||||
while let Some(next_range) = range_iter.peek() {
|
||||
if next_range.start.row <= excerpt_end.row + context_line_count {
|
||||
excerpt_end =
|
||||
Point::new(next_range.end.row + 1 + context_line_count, 0).min(max_point);
|
||||
ranges_in_excerpt += 1;
|
||||
range_iter.next();
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
excerpt_ranges.push(ExcerptRange {
|
||||
context: excerpt_start..excerpt_end,
|
||||
primary: Some(range),
|
||||
});
|
||||
range_counts.push(ranges_in_excerpt);
|
||||
}
|
||||
let (excerpt_ranges, range_counts) =
|
||||
build_excerpt_ranges(&buffer_snapshot, &ranges, context_line_count);
|
||||
|
||||
let excerpt_ids = self.push_excerpts(buffer, excerpt_ranges, cx);
|
||||
|
||||
|
@ -1311,12 +1337,11 @@ impl MultiBuffer {
|
|||
.and_then(|(buffer, offset)| buffer.read(cx).language_at(offset))
|
||||
}
|
||||
|
||||
pub fn files<'a>(&'a self, cx: &'a AppContext) -> SmallVec<[&'a Arc<dyn File>; 2]> {
|
||||
let buffers = self.buffers.borrow();
|
||||
buffers
|
||||
pub fn for_each_buffer(&self, mut f: impl FnMut(&ModelHandle<Buffer>)) {
|
||||
self.buffers
|
||||
.borrow()
|
||||
.values()
|
||||
.filter_map(|buffer| buffer.buffer.read(cx).file())
|
||||
.collect()
|
||||
.for_each(|state| f(&state.buffer))
|
||||
}
|
||||
|
||||
pub fn title<'a>(&'a self, cx: &'a AppContext) -> Cow<'a, str> {
|
||||
|
@ -2666,6 +2691,11 @@ impl MultiBufferSnapshot {
|
|||
.and_then(|(buffer, offset)| buffer.language_at(offset))
|
||||
}
|
||||
|
||||
pub fn language_scope_at<'a, T: ToOffset>(&'a self, point: T) -> Option<LanguageScope> {
|
||||
self.point_to_buffer_offset(point)
|
||||
.and_then(|(buffer, offset)| buffer.language_scope_at(offset))
|
||||
}
|
||||
|
||||
pub fn is_dirty(&self) -> bool {
|
||||
self.is_dirty
|
||||
}
|
||||
|
@ -3605,9 +3635,51 @@ impl ToPointUtf16 for PointUtf16 {
|
|||
}
|
||||
}
|
||||
|
||||
fn build_excerpt_ranges<T>(
|
||||
buffer: &BufferSnapshot,
|
||||
ranges: &[Range<T>],
|
||||
context_line_count: u32,
|
||||
) -> (Vec<ExcerptRange<Point>>, Vec<usize>)
|
||||
where
|
||||
T: text::ToPoint,
|
||||
{
|
||||
let max_point = buffer.max_point();
|
||||
let mut range_counts = Vec::new();
|
||||
let mut excerpt_ranges = Vec::new();
|
||||
let mut range_iter = ranges
|
||||
.iter()
|
||||
.map(|range| range.start.to_point(buffer)..range.end.to_point(buffer))
|
||||
.peekable();
|
||||
while let Some(range) = range_iter.next() {
|
||||
let excerpt_start = Point::new(range.start.row.saturating_sub(context_line_count), 0);
|
||||
let mut excerpt_end = Point::new(range.end.row + 1 + context_line_count, 0).min(max_point);
|
||||
let mut ranges_in_excerpt = 1;
|
||||
|
||||
while let Some(next_range) = range_iter.peek() {
|
||||
if next_range.start.row <= excerpt_end.row + context_line_count {
|
||||
excerpt_end =
|
||||
Point::new(next_range.end.row + 1 + context_line_count, 0).min(max_point);
|
||||
ranges_in_excerpt += 1;
|
||||
range_iter.next();
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
excerpt_ranges.push(ExcerptRange {
|
||||
context: excerpt_start..excerpt_end,
|
||||
primary: Some(range),
|
||||
});
|
||||
range_counts.push(ranges_in_excerpt);
|
||||
}
|
||||
|
||||
(excerpt_ranges, range_counts)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use futures::StreamExt;
|
||||
use gpui::{MutableAppContext, TestAppContext};
|
||||
use language::{Buffer, Rope};
|
||||
use rand::prelude::*;
|
||||
|
@ -4012,6 +4084,44 @@ mod tests {
|
|||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_stream_excerpts_with_context_lines(cx: &mut TestAppContext) {
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(20, 3, 'a'), cx));
|
||||
let multibuffer = cx.add_model(|_| MultiBuffer::new(0));
|
||||
let (task, anchor_ranges) = multibuffer.update(cx, |multibuffer, cx| {
|
||||
let snapshot = buffer.read(cx);
|
||||
let ranges = vec![
|
||||
snapshot.anchor_before(Point::new(3, 2))..snapshot.anchor_before(Point::new(4, 2)),
|
||||
snapshot.anchor_before(Point::new(7, 1))..snapshot.anchor_before(Point::new(7, 3)),
|
||||
snapshot.anchor_before(Point::new(15, 0))
|
||||
..snapshot.anchor_before(Point::new(15, 0)),
|
||||
];
|
||||
multibuffer.stream_excerpts_with_context_lines(vec![(buffer.clone(), ranges)], 2, cx)
|
||||
});
|
||||
|
||||
let anchor_ranges = anchor_ranges.collect::<Vec<_>>().await;
|
||||
// Ensure task is finished when stream completes.
|
||||
task.await;
|
||||
|
||||
let snapshot = multibuffer.read_with(cx, |multibuffer, cx| multibuffer.snapshot(cx));
|
||||
assert_eq!(
|
||||
snapshot.text(),
|
||||
"bbb\nccc\nddd\neee\nfff\nggg\nhhh\niii\njjj\n\nnnn\nooo\nppp\nqqq\nrrr\n"
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
anchor_ranges
|
||||
.iter()
|
||||
.map(|range| range.to_point(&snapshot))
|
||||
.collect::<Vec<_>>(),
|
||||
vec![
|
||||
Point::new(2, 2)..Point::new(3, 2),
|
||||
Point::new(6, 1)..Point::new(6, 3),
|
||||
Point::new(12, 0)..Point::new(12, 0)
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_empty_multibuffer(cx: &mut MutableAppContext) {
|
||||
let multibuffer = cx.add_model(|_| MultiBuffer::new(0));
|
||||
|
|
|
@ -2,9 +2,19 @@ use std::path::PathBuf;
|
|||
|
||||
use db::sqlez_macros::sql;
|
||||
use db::{define_connection, query};
|
||||
|
||||
use workspace::{ItemId, WorkspaceDb, WorkspaceId};
|
||||
|
||||
define_connection!(
|
||||
// Current table shape using pseudo-rust syntax:
|
||||
// editors(
|
||||
// item_id: usize,
|
||||
// workspace_id: usize,
|
||||
// path: PathBuf,
|
||||
// scroll_top_row: usize,
|
||||
// scroll_vertical_offset: f32,
|
||||
// scroll_horizontal_offset: f32,
|
||||
// )
|
||||
pub static ref DB: EditorDb<WorkspaceDb> =
|
||||
&[sql! (
|
||||
CREATE TABLE editors(
|
||||
|
@ -15,8 +25,13 @@ define_connection!(
|
|||
FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id)
|
||||
ON DELETE CASCADE
|
||||
ON UPDATE CASCADE
|
||||
) STRICT;
|
||||
)];
|
||||
) STRICT;
|
||||
),
|
||||
sql! (
|
||||
ALTER TABLE editors ADD COLUMN scroll_top_row INTEGER NOT NULL DEFAULT 0;
|
||||
ALTER TABLE editors ADD COLUMN scroll_horizontal_offset REAL NOT NULL DEFAULT 0;
|
||||
ALTER TABLE editors ADD COLUMN scroll_vertical_offset REAL NOT NULL DEFAULT 0;
|
||||
)];
|
||||
);
|
||||
|
||||
impl EditorDb {
|
||||
|
@ -29,8 +44,40 @@ impl EditorDb {
|
|||
|
||||
query! {
|
||||
pub async fn save_path(item_id: ItemId, workspace_id: WorkspaceId, path: PathBuf) -> Result<()> {
|
||||
INSERT OR REPLACE INTO editors(item_id, workspace_id, path)
|
||||
VALUES (?, ?, ?)
|
||||
INSERT INTO editors
|
||||
(item_id, workspace_id, path)
|
||||
VALUES
|
||||
(?1, ?2, ?3)
|
||||
ON CONFLICT DO UPDATE SET
|
||||
item_id = ?1,
|
||||
workspace_id = ?2,
|
||||
path = ?3
|
||||
}
|
||||
}
|
||||
|
||||
// Returns the scroll top row, and offset
|
||||
query! {
|
||||
pub fn get_scroll_position(item_id: ItemId, workspace_id: WorkspaceId) -> Result<Option<(u32, f32, f32)>> {
|
||||
SELECT scroll_top_row, scroll_horizontal_offset, scroll_vertical_offset
|
||||
FROM editors
|
||||
WHERE item_id = ? AND workspace_id = ?
|
||||
}
|
||||
}
|
||||
|
||||
query! {
|
||||
pub async fn save_scroll_position(
|
||||
item_id: ItemId,
|
||||
workspace_id: WorkspaceId,
|
||||
top_row: u32,
|
||||
vertical_offset: f32,
|
||||
horizontal_offset: f32
|
||||
) -> Result<()> {
|
||||
UPDATE OR IGNORE editors
|
||||
SET
|
||||
scroll_top_row = ?3,
|
||||
scroll_horizontal_offset = ?4,
|
||||
scroll_vertical_offset = ?5
|
||||
WHERE item_id = ?1 AND workspace_id = ?2
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -11,11 +11,14 @@ use gpui::{
|
|||
geometry::vector::{vec2f, Vector2F},
|
||||
Axis, MutableAppContext, Task, ViewContext,
|
||||
};
|
||||
use language::Bias;
|
||||
use language::{Bias, Point};
|
||||
use util::ResultExt;
|
||||
use workspace::WorkspaceId;
|
||||
|
||||
use crate::{
|
||||
display_map::{DisplaySnapshot, ToDisplayPoint},
|
||||
hover_popover::hide_hover,
|
||||
hover_popover::{hide_hover, HideHover},
|
||||
persistence::DB,
|
||||
Anchor, DisplayPoint, Editor, EditorMode, Event, MultiBufferSnapshot, ToPoint,
|
||||
};
|
||||
|
||||
|
@ -170,37 +173,68 @@ impl ScrollManager {
|
|||
scroll_position: Vector2F,
|
||||
map: &DisplaySnapshot,
|
||||
local: bool,
|
||||
workspace_id: Option<i64>,
|
||||
cx: &mut ViewContext<Editor>,
|
||||
) {
|
||||
let new_anchor = if scroll_position.y() <= 0. {
|
||||
ScrollAnchor {
|
||||
top_anchor: Anchor::min(),
|
||||
offset: scroll_position.max(vec2f(0., 0.)),
|
||||
}
|
||||
let (new_anchor, top_row) = if scroll_position.y() <= 0. {
|
||||
(
|
||||
ScrollAnchor {
|
||||
top_anchor: Anchor::min(),
|
||||
offset: scroll_position.max(vec2f(0., 0.)),
|
||||
},
|
||||
0,
|
||||
)
|
||||
} else {
|
||||
let scroll_top_buffer_offset =
|
||||
DisplayPoint::new(scroll_position.y() as u32, 0).to_offset(&map, Bias::Right);
|
||||
let scroll_top_buffer_point =
|
||||
DisplayPoint::new(scroll_position.y() as u32, 0).to_point(&map);
|
||||
let top_anchor = map
|
||||
.buffer_snapshot
|
||||
.anchor_at(scroll_top_buffer_offset, Bias::Right);
|
||||
.anchor_at(scroll_top_buffer_point, Bias::Right);
|
||||
|
||||
ScrollAnchor {
|
||||
top_anchor,
|
||||
offset: vec2f(
|
||||
scroll_position.x(),
|
||||
scroll_position.y() - top_anchor.to_display_point(&map).row() as f32,
|
||||
),
|
||||
}
|
||||
(
|
||||
ScrollAnchor {
|
||||
top_anchor,
|
||||
offset: vec2f(
|
||||
scroll_position.x(),
|
||||
scroll_position.y() - top_anchor.to_display_point(&map).row() as f32,
|
||||
),
|
||||
},
|
||||
scroll_top_buffer_point.row,
|
||||
)
|
||||
};
|
||||
|
||||
self.set_anchor(new_anchor, local, cx);
|
||||
self.set_anchor(new_anchor, top_row, local, workspace_id, cx);
|
||||
}
|
||||
|
||||
fn set_anchor(&mut self, anchor: ScrollAnchor, local: bool, cx: &mut ViewContext<Editor>) {
|
||||
fn set_anchor(
|
||||
&mut self,
|
||||
anchor: ScrollAnchor,
|
||||
top_row: u32,
|
||||
local: bool,
|
||||
workspace_id: Option<i64>,
|
||||
cx: &mut ViewContext<Editor>,
|
||||
) {
|
||||
self.anchor = anchor;
|
||||
cx.emit(Event::ScrollPositionChanged { local });
|
||||
self.show_scrollbar(cx);
|
||||
self.autoscroll_request.take();
|
||||
if let Some(workspace_id) = workspace_id {
|
||||
let item_id = cx.view_id();
|
||||
|
||||
cx.background()
|
||||
.spawn(async move {
|
||||
DB.save_scroll_position(
|
||||
item_id,
|
||||
workspace_id,
|
||||
top_row,
|
||||
anchor.offset.x(),
|
||||
anchor.offset.y(),
|
||||
)
|
||||
.await
|
||||
.log_err()
|
||||
})
|
||||
.detach()
|
||||
}
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
|
@ -273,9 +307,14 @@ impl Editor {
|
|||
) {
|
||||
let map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
|
||||
|
||||
hide_hover(self, cx);
|
||||
self.scroll_manager
|
||||
.set_scroll_position(scroll_position, &map, local, cx);
|
||||
hide_hover(self, &HideHover, cx);
|
||||
self.scroll_manager.set_scroll_position(
|
||||
scroll_position,
|
||||
&map,
|
||||
local,
|
||||
self.workspace_id,
|
||||
cx,
|
||||
);
|
||||
}
|
||||
|
||||
pub fn scroll_position(&self, cx: &mut ViewContext<Self>) -> Vector2F {
|
||||
|
@ -284,8 +323,13 @@ impl Editor {
|
|||
}
|
||||
|
||||
pub fn set_scroll_anchor(&mut self, scroll_anchor: ScrollAnchor, cx: &mut ViewContext<Self>) {
|
||||
hide_hover(self, cx);
|
||||
self.scroll_manager.set_anchor(scroll_anchor, true, cx);
|
||||
hide_hover(self, &HideHover, cx);
|
||||
let top_row = scroll_anchor
|
||||
.top_anchor
|
||||
.to_point(&self.buffer().read(cx).snapshot(cx))
|
||||
.row;
|
||||
self.scroll_manager
|
||||
.set_anchor(scroll_anchor, top_row, true, self.workspace_id, cx);
|
||||
}
|
||||
|
||||
pub(crate) fn set_scroll_anchor_remote(
|
||||
|
@ -293,8 +337,13 @@ impl Editor {
|
|||
scroll_anchor: ScrollAnchor,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
hide_hover(self, cx);
|
||||
self.scroll_manager.set_anchor(scroll_anchor, false, cx);
|
||||
hide_hover(self, &HideHover, cx);
|
||||
let top_row = scroll_anchor
|
||||
.top_anchor
|
||||
.to_point(&self.buffer().read(cx).snapshot(cx))
|
||||
.row;
|
||||
self.scroll_manager
|
||||
.set_anchor(scroll_anchor, top_row, false, self.workspace_id, cx);
|
||||
}
|
||||
|
||||
pub fn scroll_screen(&mut self, amount: &ScrollAmount, cx: &mut ViewContext<Self>) {
|
||||
|
@ -345,4 +394,25 @@ impl Editor {
|
|||
|
||||
Ordering::Greater
|
||||
}
|
||||
|
||||
pub fn read_scroll_position_from_db(
|
||||
&mut self,
|
||||
item_id: usize,
|
||||
workspace_id: WorkspaceId,
|
||||
cx: &mut ViewContext<Editor>,
|
||||
) {
|
||||
let scroll_position = DB.get_scroll_position(item_id, workspace_id);
|
||||
if let Ok(Some((top_row, x, y))) = scroll_position {
|
||||
let top_anchor = self
|
||||
.buffer()
|
||||
.read(cx)
|
||||
.snapshot(cx)
|
||||
.anchor_at(Point::new(top_row as u32, 0), Bias::Left);
|
||||
let scroll_anchor = ScrollAnchor {
|
||||
offset: Vector2F::new(x, y),
|
||||
top_anchor,
|
||||
};
|
||||
self.set_scroll_anchor(scroll_anchor, cx);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -45,7 +45,6 @@ smallvec = { version = "1.6", features = ["union"] }
|
|||
smol = "1.2"
|
||||
time = { version = "0.3", features = ["serde", "serde-well-known"] }
|
||||
tiny-skia = "0.5"
|
||||
tree-sitter = "0.20"
|
||||
usvg = "0.14"
|
||||
waker-fn = "1.1.0"
|
||||
|
||||
|
|
|
@ -6,7 +6,6 @@ use std::{
|
|||
|
||||
fn main() {
|
||||
generate_dispatch_bindings();
|
||||
compile_context_predicate_parser();
|
||||
compile_metal_shaders();
|
||||
generate_shader_bindings();
|
||||
}
|
||||
|
@ -30,17 +29,6 @@ fn generate_dispatch_bindings() {
|
|||
.expect("couldn't write dispatch bindings");
|
||||
}
|
||||
|
||||
fn compile_context_predicate_parser() {
|
||||
let dir = PathBuf::from("./grammars/context-predicate/src");
|
||||
let parser_c = dir.join("parser.c");
|
||||
|
||||
println!("cargo:rerun-if-changed={}", &parser_c.to_str().unwrap());
|
||||
cc::Build::new()
|
||||
.include(&dir)
|
||||
.file(parser_c)
|
||||
.compile("tree_sitter_context_predicate");
|
||||
}
|
||||
|
||||
const SHADER_HEADER_PATH: &str = "./src/platform/mac/shaders/shaders.h";
|
||||
|
||||
fn compile_metal_shaders() {
|
||||
|
|
|
@ -1,2 +0,0 @@
|
|||
/node_modules
|
||||
/build
|
|
@ -1,20 +0,0 @@
|
|||
[package]
|
||||
name = "tree-sitter-context-predicate"
|
||||
description = "context-predicate grammar for the tree-sitter parsing library"
|
||||
version = "0.0.1"
|
||||
keywords = ["incremental", "parsing", "context-predicate"]
|
||||
categories = ["parsing", "text-editors"]
|
||||
repository = "https://github.com/tree-sitter/tree-sitter-javascript"
|
||||
edition = "2021"
|
||||
license = "MIT"
|
||||
build = "bindings/rust/build.rs"
|
||||
include = ["bindings/rust/*", "grammar.js", "queries/*", "src/*"]
|
||||
|
||||
[lib]
|
||||
path = "bindings/rust/lib.rs"
|
||||
|
||||
[dependencies]
|
||||
tree-sitter = "0.20"
|
||||
|
||||
[build-dependencies]
|
||||
cc = "1.0"
|
|
@ -1,18 +0,0 @@
|
|||
{
|
||||
"targets": [
|
||||
{
|
||||
"target_name": "tree_sitter_context_predicate_binding",
|
||||
"include_dirs": [
|
||||
"<!(node -e \"require('nan')\")",
|
||||
"src"
|
||||
],
|
||||
"sources": [
|
||||
"src/parser.c",
|
||||
"bindings/node/binding.cc"
|
||||
],
|
||||
"cflags_c": [
|
||||
"-std=c99",
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
|
@ -1,30 +0,0 @@
|
|||
#include "nan.h"
|
||||
#include "tree_sitter/parser.h"
|
||||
#include <node.h>
|
||||
|
||||
using namespace v8;
|
||||
|
||||
extern "C" TSLanguage *tree_sitter_context_predicate();
|
||||
|
||||
namespace {
|
||||
|
||||
NAN_METHOD(New) {}
|
||||
|
||||
void Init(Local<Object> exports, Local<Object> module) {
|
||||
Local<FunctionTemplate> tpl = Nan::New<FunctionTemplate>(New);
|
||||
tpl->SetClassName(Nan::New("Language").ToLocalChecked());
|
||||
tpl->InstanceTemplate()->SetInternalFieldCount(1);
|
||||
|
||||
Local<Function> constructor = Nan::GetFunction(tpl).ToLocalChecked();
|
||||
Local<Object> instance =
|
||||
constructor->NewInstance(Nan::GetCurrentContext()).ToLocalChecked();
|
||||
Nan::SetInternalFieldPointer(instance, 0, tree_sitter_context_predicate());
|
||||
|
||||
Nan::Set(instance, Nan::New("name").ToLocalChecked(),
|
||||
Nan::New("context_predicate").ToLocalChecked());
|
||||
Nan::Set(module, Nan::New("exports").ToLocalChecked(), instance);
|
||||
}
|
||||
|
||||
NODE_MODULE(tree_sitter_context_predicate_binding, Init)
|
||||
|
||||
} // namespace
|
|
@ -1,19 +0,0 @@
|
|||
try {
|
||||
module.exports = require("../../build/Release/tree_sitter_context_predicate_binding");
|
||||
} catch (error1) {
|
||||
if (error1.code !== 'MODULE_NOT_FOUND') {
|
||||
throw error1;
|
||||
}
|
||||
try {
|
||||
module.exports = require("../../build/Debug/tree_sitter_context_predicate_binding");
|
||||
} catch (error2) {
|
||||
if (error2.code !== 'MODULE_NOT_FOUND') {
|
||||
throw error2;
|
||||
}
|
||||
throw error1
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
module.exports.nodeTypeInfo = require("../../src/node-types.json");
|
||||
} catch (_) {}
|
|
@ -1,40 +0,0 @@
|
|||
fn main() {
|
||||
let src_dir = std::path::Path::new("src");
|
||||
|
||||
let mut c_config = cc::Build::new();
|
||||
c_config.include(&src_dir);
|
||||
c_config
|
||||
.flag_if_supported("-Wno-unused-parameter")
|
||||
.flag_if_supported("-Wno-unused-but-set-variable")
|
||||
.flag_if_supported("-Wno-trigraphs");
|
||||
let parser_path = src_dir.join("parser.c");
|
||||
c_config.file(&parser_path);
|
||||
|
||||
// If your language uses an external scanner written in C,
|
||||
// then include this block of code:
|
||||
|
||||
/*
|
||||
let scanner_path = src_dir.join("scanner.c");
|
||||
c_config.file(&scanner_path);
|
||||
println!("cargo:rerun-if-changed={}", scanner_path.to_str().unwrap());
|
||||
*/
|
||||
|
||||
c_config.compile("parser");
|
||||
println!("cargo:rerun-if-changed={}", parser_path.to_str().unwrap());
|
||||
|
||||
// If your language uses an external scanner written in C++,
|
||||
// then include this block of code:
|
||||
|
||||
/*
|
||||
let mut cpp_config = cc::Build::new();
|
||||
cpp_config.cpp(true);
|
||||
cpp_config.include(&src_dir);
|
||||
cpp_config
|
||||
.flag_if_supported("-Wno-unused-parameter")
|
||||
.flag_if_supported("-Wno-unused-but-set-variable");
|
||||
let scanner_path = src_dir.join("scanner.cc");
|
||||
cpp_config.file(&scanner_path);
|
||||
cpp_config.compile("scanner");
|
||||
println!("cargo:rerun-if-changed={}", scanner_path.to_str().unwrap());
|
||||
*/
|
||||
}
|
|
@ -1,52 +0,0 @@
|
|||
//! This crate provides context_predicate language support for the [tree-sitter][] parsing library.
|
||||
//!
|
||||
//! Typically, you will use the [language][language func] function to add this language to a
|
||||
//! tree-sitter [Parser][], and then use the parser to parse some code:
|
||||
//!
|
||||
//! ```
|
||||
//! let code = "";
|
||||
//! let mut parser = tree_sitter::Parser::new();
|
||||
//! parser.set_language(tree_sitter_context_predicate::language()).expect("Error loading context_predicate grammar");
|
||||
//! let tree = parser.parse(code, None).unwrap();
|
||||
//! ```
|
||||
//!
|
||||
//! [Language]: https://docs.rs/tree-sitter/*/tree_sitter/struct.Language.html
|
||||
//! [language func]: fn.language.html
|
||||
//! [Parser]: https://docs.rs/tree-sitter/*/tree_sitter/struct.Parser.html
|
||||
//! [tree-sitter]: https://tree-sitter.github.io/
|
||||
|
||||
use tree_sitter::Language;
|
||||
|
||||
extern "C" {
|
||||
fn tree_sitter_context_predicate() -> Language;
|
||||
}
|
||||
|
||||
/// Get the tree-sitter [Language][] for this grammar.
|
||||
///
|
||||
/// [Language]: https://docs.rs/tree-sitter/*/tree_sitter/struct.Language.html
|
||||
pub fn language() -> Language {
|
||||
unsafe { tree_sitter_context_predicate() }
|
||||
}
|
||||
|
||||
/// The content of the [`node-types.json`][] file for this grammar.
|
||||
///
|
||||
/// [`node-types.json`]: https://tree-sitter.github.io/tree-sitter/using-parsers#static-node-types
|
||||
pub const NODE_TYPES: &'static str = include_str!("../../src/node-types.json");
|
||||
|
||||
// Uncomment these to include any queries that this grammar contains
|
||||
|
||||
// pub const HIGHLIGHTS_QUERY: &'static str = include_str!("../../queries/highlights.scm");
|
||||
// pub const INJECTIONS_QUERY: &'static str = include_str!("../../queries/injections.scm");
|
||||
// pub const LOCALS_QUERY: &'static str = include_str!("../../queries/locals.scm");
|
||||
// pub const TAGS_QUERY: &'static str = include_str!("../../queries/tags.scm");
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
#[test]
|
||||
fn test_can_load_grammar() {
|
||||
let mut parser = tree_sitter::Parser::new();
|
||||
parser
|
||||
.set_language(super::language())
|
||||
.expect("Error loading context_predicate language");
|
||||
}
|
||||
}
|
|
@ -1,49 +0,0 @@
|
|||
==================
|
||||
Identifiers
|
||||
==================
|
||||
|
||||
abc12
|
||||
|
||||
---
|
||||
|
||||
(source (identifier))
|
||||
|
||||
==================
|
||||
Negation
|
||||
==================
|
||||
|
||||
!abc
|
||||
|
||||
---
|
||||
|
||||
(source (not (identifier)))
|
||||
|
||||
==================
|
||||
And/Or
|
||||
==================
|
||||
|
||||
a || b && c && d
|
||||
|
||||
---
|
||||
|
||||
(source
|
||||
(or
|
||||
(identifier)
|
||||
(and
|
||||
(and (identifier) (identifier))
|
||||
(identifier))))
|
||||
|
||||
==================
|
||||
Expressions
|
||||
==================
|
||||
|
||||
a && (b == c || d != e)
|
||||
|
||||
---
|
||||
|
||||
(source
|
||||
(and
|
||||
(identifier)
|
||||
(parenthesized (or
|
||||
(equal (identifier) (identifier))
|
||||
(not_equal (identifier) (identifier))))))
|
|
@ -1,31 +0,0 @@
|
|||
module.exports = grammar({
|
||||
name: 'context_predicate',
|
||||
|
||||
rules: {
|
||||
source: $ => $._expression,
|
||||
|
||||
_expression: $ => choice(
|
||||
$.identifier,
|
||||
$.not,
|
||||
$.and,
|
||||
$.or,
|
||||
$.equal,
|
||||
$.not_equal,
|
||||
$.parenthesized,
|
||||
),
|
||||
|
||||
identifier: $ => /[A-Za-z0-9_-]+/,
|
||||
|
||||
not: $ => prec(3, seq("!", field("expression", $._expression))),
|
||||
|
||||
and: $ => prec.left(2, seq(field("left", $._expression), "&&", field("right", $._expression))),
|
||||
|
||||
or: $ => prec.left(1, seq(field("left", $._expression), "||", field("right", $._expression))),
|
||||
|
||||
equal: $ => seq(field("left", $.identifier), "==", field("right", $.identifier)),
|
||||
|
||||
not_equal: $ => seq(field("left", $.identifier), "!=", field("right", $.identifier)),
|
||||
|
||||
parenthesized: $ => seq("(", field("expression", $._expression), ")"),
|
||||
}
|
||||
});
|
|
@ -1,44 +0,0 @@
|
|||
{
|
||||
"name": "tree-sitter-context-predicate",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "tree-sitter-context-predicate",
|
||||
"dependencies": {
|
||||
"nan": "^2.14.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"tree-sitter-cli": "^0.19.5"
|
||||
}
|
||||
},
|
||||
"node_modules/nan": {
|
||||
"version": "2.14.2",
|
||||
"resolved": "https://registry.npmjs.org/nan/-/nan-2.14.2.tgz",
|
||||
"integrity": "sha512-M2ufzIiINKCuDfBSAUr1vWQ+vuVcA9kqx8JJUsbQi6yf1uGRyb7HfpdfUr5qLXf3B/t8dPvcjhKMmlfnP47EzQ=="
|
||||
},
|
||||
"node_modules/tree-sitter-cli": {
|
||||
"version": "0.19.5",
|
||||
"resolved": "https://registry.npmjs.org/tree-sitter-cli/-/tree-sitter-cli-0.19.5.tgz",
|
||||
"integrity": "sha512-kRzKrUAwpDN9AjA3b0tPBwT1hd8N2oQvvvHup2OEsX6mdsSMLmAvR+NSqK9fe05JrRbVvG8mbteNUQsxlMQohQ==",
|
||||
"dev": true,
|
||||
"hasInstallScript": true,
|
||||
"bin": {
|
||||
"tree-sitter": "cli.js"
|
||||
}
|
||||
}
|
||||
},
|
||||
"dependencies": {
|
||||
"nan": {
|
||||
"version": "2.14.2",
|
||||
"resolved": "https://registry.npmjs.org/nan/-/nan-2.14.2.tgz",
|
||||
"integrity": "sha512-M2ufzIiINKCuDfBSAUr1vWQ+vuVcA9kqx8JJUsbQi6yf1uGRyb7HfpdfUr5qLXf3B/t8dPvcjhKMmlfnP47EzQ=="
|
||||
},
|
||||
"tree-sitter-cli": {
|
||||
"version": "0.19.5",
|
||||
"resolved": "https://registry.npmjs.org/tree-sitter-cli/-/tree-sitter-cli-0.19.5.tgz",
|
||||
"integrity": "sha512-kRzKrUAwpDN9AjA3b0tPBwT1hd8N2oQvvvHup2OEsX6mdsSMLmAvR+NSqK9fe05JrRbVvG8mbteNUQsxlMQohQ==",
|
||||
"dev": true
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,10 +0,0 @@
|
|||
{
|
||||
"name": "tree-sitter-context-predicate",
|
||||
"main": "bindings/node",
|
||||
"devDependencies": {
|
||||
"tree-sitter-cli": "^0.19.5"
|
||||
},
|
||||
"dependencies": {
|
||||
"nan": "^2.14.0"
|
||||
}
|
||||
}
|
|
@ -1,208 +0,0 @@
|
|||
{
|
||||
"name": "context_predicate",
|
||||
"rules": {
|
||||
"source": {
|
||||
"type": "SYMBOL",
|
||||
"name": "_expression"
|
||||
},
|
||||
"_expression": {
|
||||
"type": "CHOICE",
|
||||
"members": [
|
||||
{
|
||||
"type": "SYMBOL",
|
||||
"name": "identifier"
|
||||
},
|
||||
{
|
||||
"type": "SYMBOL",
|
||||
"name": "not"
|
||||
},
|
||||
{
|
||||
"type": "SYMBOL",
|
||||
"name": "and"
|
||||
},
|
||||
{
|
||||
"type": "SYMBOL",
|
||||
"name": "or"
|
||||
},
|
||||
{
|
||||
"type": "SYMBOL",
|
||||
"name": "equal"
|
||||
},
|
||||
{
|
||||
"type": "SYMBOL",
|
||||
"name": "not_equal"
|
||||
},
|
||||
{
|
||||
"type": "SYMBOL",
|
||||
"name": "parenthesized"
|
||||
}
|
||||
]
|
||||
},
|
||||
"identifier": {
|
||||
"type": "PATTERN",
|
||||
"value": "[A-Za-z0-9_-]+"
|
||||
},
|
||||
"not": {
|
||||
"type": "PREC",
|
||||
"value": 3,
|
||||
"content": {
|
||||
"type": "SEQ",
|
||||
"members": [
|
||||
{
|
||||
"type": "STRING",
|
||||
"value": "!"
|
||||
},
|
||||
{
|
||||
"type": "FIELD",
|
||||
"name": "expression",
|
||||
"content": {
|
||||
"type": "SYMBOL",
|
||||
"name": "_expression"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"and": {
|
||||
"type": "PREC_LEFT",
|
||||
"value": 2,
|
||||
"content": {
|
||||
"type": "SEQ",
|
||||
"members": [
|
||||
{
|
||||
"type": "FIELD",
|
||||
"name": "left",
|
||||
"content": {
|
||||
"type": "SYMBOL",
|
||||
"name": "_expression"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "STRING",
|
||||
"value": "&&"
|
||||
},
|
||||
{
|
||||
"type": "FIELD",
|
||||
"name": "right",
|
||||
"content": {
|
||||
"type": "SYMBOL",
|
||||
"name": "_expression"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"or": {
|
||||
"type": "PREC_LEFT",
|
||||
"value": 1,
|
||||
"content": {
|
||||
"type": "SEQ",
|
||||
"members": [
|
||||
{
|
||||
"type": "FIELD",
|
||||
"name": "left",
|
||||
"content": {
|
||||
"type": "SYMBOL",
|
||||
"name": "_expression"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "STRING",
|
||||
"value": "||"
|
||||
},
|
||||
{
|
||||
"type": "FIELD",
|
||||
"name": "right",
|
||||
"content": {
|
||||
"type": "SYMBOL",
|
||||
"name": "_expression"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"equal": {
|
||||
"type": "SEQ",
|
||||
"members": [
|
||||
{
|
||||
"type": "FIELD",
|
||||
"name": "left",
|
||||
"content": {
|
||||
"type": "SYMBOL",
|
||||
"name": "identifier"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "STRING",
|
||||
"value": "=="
|
||||
},
|
||||
{
|
||||
"type": "FIELD",
|
||||
"name": "right",
|
||||
"content": {
|
||||
"type": "SYMBOL",
|
||||
"name": "identifier"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"not_equal": {
|
||||
"type": "SEQ",
|
||||
"members": [
|
||||
{
|
||||
"type": "FIELD",
|
||||
"name": "left",
|
||||
"content": {
|
||||
"type": "SYMBOL",
|
||||
"name": "identifier"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "STRING",
|
||||
"value": "!="
|
||||
},
|
||||
{
|
||||
"type": "FIELD",
|
||||
"name": "right",
|
||||
"content": {
|
||||
"type": "SYMBOL",
|
||||
"name": "identifier"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"parenthesized": {
|
||||
"type": "SEQ",
|
||||
"members": [
|
||||
{
|
||||
"type": "STRING",
|
||||
"value": "("
|
||||
},
|
||||
{
|
||||
"type": "FIELD",
|
||||
"name": "expression",
|
||||
"content": {
|
||||
"type": "SYMBOL",
|
||||
"name": "_expression"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "STRING",
|
||||
"value": ")"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"extras": [
|
||||
{
|
||||
"type": "PATTERN",
|
||||
"value": "\\s"
|
||||
}
|
||||
],
|
||||
"conflicts": [],
|
||||
"precedences": [],
|
||||
"externals": [],
|
||||
"inline": [],
|
||||
"supertypes": []
|
||||
}
|
||||
|
|
@ -1,353 +0,0 @@
|
|||
[
|
||||
{
|
||||
"type": "and",
|
||||
"named": true,
|
||||
"fields": {
|
||||
"left": {
|
||||
"multiple": false,
|
||||
"required": true,
|
||||
"types": [
|
||||
{
|
||||
"type": "and",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "equal",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "identifier",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "not",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "not_equal",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "or",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "parenthesized",
|
||||
"named": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"right": {
|
||||
"multiple": false,
|
||||
"required": true,
|
||||
"types": [
|
||||
{
|
||||
"type": "and",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "equal",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "identifier",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "not",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "not_equal",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "or",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "parenthesized",
|
||||
"named": true
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "equal",
|
||||
"named": true,
|
||||
"fields": {
|
||||
"left": {
|
||||
"multiple": false,
|
||||
"required": true,
|
||||
"types": [
|
||||
{
|
||||
"type": "identifier",
|
||||
"named": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"right": {
|
||||
"multiple": false,
|
||||
"required": true,
|
||||
"types": [
|
||||
{
|
||||
"type": "identifier",
|
||||
"named": true
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "not",
|
||||
"named": true,
|
||||
"fields": {
|
||||
"expression": {
|
||||
"multiple": false,
|
||||
"required": true,
|
||||
"types": [
|
||||
{
|
||||
"type": "and",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "equal",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "identifier",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "not",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "not_equal",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "or",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "parenthesized",
|
||||
"named": true
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "not_equal",
|
||||
"named": true,
|
||||
"fields": {
|
||||
"left": {
|
||||
"multiple": false,
|
||||
"required": true,
|
||||
"types": [
|
||||
{
|
||||
"type": "identifier",
|
||||
"named": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"right": {
|
||||
"multiple": false,
|
||||
"required": true,
|
||||
"types": [
|
||||
{
|
||||
"type": "identifier",
|
||||
"named": true
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "or",
|
||||
"named": true,
|
||||
"fields": {
|
||||
"left": {
|
||||
"multiple": false,
|
||||
"required": true,
|
||||
"types": [
|
||||
{
|
||||
"type": "and",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "equal",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "identifier",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "not",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "not_equal",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "or",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "parenthesized",
|
||||
"named": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"right": {
|
||||
"multiple": false,
|
||||
"required": true,
|
||||
"types": [
|
||||
{
|
||||
"type": "and",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "equal",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "identifier",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "not",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "not_equal",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "or",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "parenthesized",
|
||||
"named": true
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "parenthesized",
|
||||
"named": true,
|
||||
"fields": {
|
||||
"expression": {
|
||||
"multiple": false,
|
||||
"required": true,
|
||||
"types": [
|
||||
{
|
||||
"type": "and",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "equal",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "identifier",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "not",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "not_equal",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "or",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "parenthesized",
|
||||
"named": true
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "source",
|
||||
"named": true,
|
||||
"fields": {},
|
||||
"children": {
|
||||
"multiple": false,
|
||||
"required": true,
|
||||
"types": [
|
||||
{
|
||||
"type": "and",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "equal",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "identifier",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "not",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "not_equal",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "or",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "parenthesized",
|
||||
"named": true
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "!",
|
||||
"named": false
|
||||
},
|
||||
{
|
||||
"type": "!=",
|
||||
"named": false
|
||||
},
|
||||
{
|
||||
"type": "&&",
|
||||
"named": false
|
||||
},
|
||||
{
|
||||
"type": "(",
|
||||
"named": false
|
||||
},
|
||||
{
|
||||
"type": ")",
|
||||
"named": false
|
||||
},
|
||||
{
|
||||
"type": "==",
|
||||
"named": false
|
||||
},
|
||||
{
|
||||
"type": "identifier",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "||",
|
||||
"named": false
|
||||
}
|
||||
]
|
|
@ -1,584 +0,0 @@
|
|||
#include <tree_sitter/parser.h>
|
||||
|
||||
#if defined(__GNUC__) || defined(__clang__)
|
||||
#pragma GCC diagnostic push
|
||||
#pragma GCC diagnostic ignored "-Wmissing-field-initializers"
|
||||
#endif
|
||||
|
||||
#define LANGUAGE_VERSION 13
|
||||
#define STATE_COUNT 18
|
||||
#define LARGE_STATE_COUNT 6
|
||||
#define SYMBOL_COUNT 17
|
||||
#define ALIAS_COUNT 0
|
||||
#define TOKEN_COUNT 9
|
||||
#define EXTERNAL_TOKEN_COUNT 0
|
||||
#define FIELD_COUNT 3
|
||||
#define MAX_ALIAS_SEQUENCE_LENGTH 3
|
||||
#define PRODUCTION_ID_COUNT 3
|
||||
|
||||
enum {
|
||||
sym_identifier = 1,
|
||||
anon_sym_BANG = 2,
|
||||
anon_sym_AMP_AMP = 3,
|
||||
anon_sym_PIPE_PIPE = 4,
|
||||
anon_sym_EQ_EQ = 5,
|
||||
anon_sym_BANG_EQ = 6,
|
||||
anon_sym_LPAREN = 7,
|
||||
anon_sym_RPAREN = 8,
|
||||
sym_source = 9,
|
||||
sym__expression = 10,
|
||||
sym_not = 11,
|
||||
sym_and = 12,
|
||||
sym_or = 13,
|
||||
sym_equal = 14,
|
||||
sym_not_equal = 15,
|
||||
sym_parenthesized = 16,
|
||||
};
|
||||
|
||||
static const char *const ts_symbol_names[] = {
|
||||
[ts_builtin_sym_end] = "end",
|
||||
[sym_identifier] = "identifier",
|
||||
[anon_sym_BANG] = "!",
|
||||
[anon_sym_AMP_AMP] = "&&",
|
||||
[anon_sym_PIPE_PIPE] = "||",
|
||||
[anon_sym_EQ_EQ] = "==",
|
||||
[anon_sym_BANG_EQ] = "!=",
|
||||
[anon_sym_LPAREN] = "(",
|
||||
[anon_sym_RPAREN] = ")",
|
||||
[sym_source] = "source",
|
||||
[sym__expression] = "_expression",
|
||||
[sym_not] = "not",
|
||||
[sym_and] = "and",
|
||||
[sym_or] = "or",
|
||||
[sym_equal] = "equal",
|
||||
[sym_not_equal] = "not_equal",
|
||||
[sym_parenthesized] = "parenthesized",
|
||||
};
|
||||
|
||||
static const TSSymbol ts_symbol_map[] = {
|
||||
[ts_builtin_sym_end] = ts_builtin_sym_end,
|
||||
[sym_identifier] = sym_identifier,
|
||||
[anon_sym_BANG] = anon_sym_BANG,
|
||||
[anon_sym_AMP_AMP] = anon_sym_AMP_AMP,
|
||||
[anon_sym_PIPE_PIPE] = anon_sym_PIPE_PIPE,
|
||||
[anon_sym_EQ_EQ] = anon_sym_EQ_EQ,
|
||||
[anon_sym_BANG_EQ] = anon_sym_BANG_EQ,
|
||||
[anon_sym_LPAREN] = anon_sym_LPAREN,
|
||||
[anon_sym_RPAREN] = anon_sym_RPAREN,
|
||||
[sym_source] = sym_source,
|
||||
[sym__expression] = sym__expression,
|
||||
[sym_not] = sym_not,
|
||||
[sym_and] = sym_and,
|
||||
[sym_or] = sym_or,
|
||||
[sym_equal] = sym_equal,
|
||||
[sym_not_equal] = sym_not_equal,
|
||||
[sym_parenthesized] = sym_parenthesized,
|
||||
};
|
||||
|
||||
static const TSSymbolMetadata ts_symbol_metadata[] = {
|
||||
[ts_builtin_sym_end] =
|
||||
{
|
||||
.visible = false,
|
||||
.named = true,
|
||||
},
|
||||
[sym_identifier] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = true,
|
||||
},
|
||||
[anon_sym_BANG] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = false,
|
||||
},
|
||||
[anon_sym_AMP_AMP] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = false,
|
||||
},
|
||||
[anon_sym_PIPE_PIPE] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = false,
|
||||
},
|
||||
[anon_sym_EQ_EQ] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = false,
|
||||
},
|
||||
[anon_sym_BANG_EQ] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = false,
|
||||
},
|
||||
[anon_sym_LPAREN] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = false,
|
||||
},
|
||||
[anon_sym_RPAREN] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = false,
|
||||
},
|
||||
[sym_source] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = true,
|
||||
},
|
||||
[sym__expression] =
|
||||
{
|
||||
.visible = false,
|
||||
.named = true,
|
||||
},
|
||||
[sym_not] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = true,
|
||||
},
|
||||
[sym_and] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = true,
|
||||
},
|
||||
[sym_or] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = true,
|
||||
},
|
||||
[sym_equal] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = true,
|
||||
},
|
||||
[sym_not_equal] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = true,
|
||||
},
|
||||
[sym_parenthesized] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = true,
|
||||
},
|
||||
};
|
||||
|
||||
enum {
|
||||
field_expression = 1,
|
||||
field_left = 2,
|
||||
field_right = 3,
|
||||
};
|
||||
|
||||
static const char *const ts_field_names[] = {
|
||||
[0] = NULL,
|
||||
[field_expression] = "expression",
|
||||
[field_left] = "left",
|
||||
[field_right] = "right",
|
||||
};
|
||||
|
||||
static const TSFieldMapSlice ts_field_map_slices[PRODUCTION_ID_COUNT] = {
|
||||
[1] = {.index = 0, .length = 1},
|
||||
[2] = {.index = 1, .length = 2},
|
||||
};
|
||||
|
||||
static const TSFieldMapEntry ts_field_map_entries[] = {
|
||||
[0] = {field_expression, 1},
|
||||
[1] = {field_left, 0},
|
||||
{field_right, 2},
|
||||
};
|
||||
|
||||
static const TSSymbol ts_alias_sequences[PRODUCTION_ID_COUNT]
|
||||
[MAX_ALIAS_SEQUENCE_LENGTH] = {
|
||||
[0] = {0},
|
||||
};
|
||||
|
||||
static const uint16_t ts_non_terminal_alias_map[] = {
|
||||
0,
|
||||
};
|
||||
|
||||
static bool ts_lex(TSLexer *lexer, TSStateId state) {
|
||||
START_LEXER();
|
||||
eof = lexer->eof(lexer);
|
||||
switch (state) {
|
||||
case 0:
|
||||
if (eof)
|
||||
ADVANCE(7);
|
||||
if (lookahead == '!')
|
||||
ADVANCE(10);
|
||||
if (lookahead == '&')
|
||||
ADVANCE(2);
|
||||
if (lookahead == '(')
|
||||
ADVANCE(15);
|
||||
if (lookahead == ')')
|
||||
ADVANCE(16);
|
||||
if (lookahead == '=')
|
||||
ADVANCE(4);
|
||||
if (lookahead == '|')
|
||||
ADVANCE(5);
|
||||
if (lookahead == '\t' || lookahead == '\n' || lookahead == '\r' ||
|
||||
lookahead == ' ')
|
||||
SKIP(0)
|
||||
if (lookahead == '-' || ('0' <= lookahead && lookahead <= '9') ||
|
||||
('A' <= lookahead && lookahead <= 'Z') || lookahead == '_' ||
|
||||
('a' <= lookahead && lookahead <= 'z'))
|
||||
ADVANCE(8);
|
||||
END_STATE();
|
||||
case 1:
|
||||
if (lookahead == '!')
|
||||
ADVANCE(9);
|
||||
if (lookahead == '(')
|
||||
ADVANCE(15);
|
||||
if (lookahead == '\t' || lookahead == '\n' || lookahead == '\r' ||
|
||||
lookahead == ' ')
|
||||
SKIP(1)
|
||||
if (lookahead == '-' || ('0' <= lookahead && lookahead <= '9') ||
|
||||
('A' <= lookahead && lookahead <= 'Z') || lookahead == '_' ||
|
||||
('a' <= lookahead && lookahead <= 'z'))
|
||||
ADVANCE(8);
|
||||
END_STATE();
|
||||
case 2:
|
||||
if (lookahead == '&')
|
||||
ADVANCE(11);
|
||||
END_STATE();
|
||||
case 3:
|
||||
if (lookahead == '=')
|
||||
ADVANCE(14);
|
||||
END_STATE();
|
||||
case 4:
|
||||
if (lookahead == '=')
|
||||
ADVANCE(13);
|
||||
END_STATE();
|
||||
case 5:
|
||||
if (lookahead == '|')
|
||||
ADVANCE(12);
|
||||
END_STATE();
|
||||
case 6:
|
||||
if (eof)
|
||||
ADVANCE(7);
|
||||
if (lookahead == '!')
|
||||
ADVANCE(3);
|
||||
if (lookahead == '&')
|
||||
ADVANCE(2);
|
||||
if (lookahead == ')')
|
||||
ADVANCE(16);
|
||||
if (lookahead == '=')
|
||||
ADVANCE(4);
|
||||
if (lookahead == '|')
|
||||
ADVANCE(5);
|
||||
if (lookahead == '\t' || lookahead == '\n' || lookahead == '\r' ||
|
||||
lookahead == ' ')
|
||||
SKIP(6)
|
||||
END_STATE();
|
||||
case 7:
|
||||
ACCEPT_TOKEN(ts_builtin_sym_end);
|
||||
END_STATE();
|
||||
case 8:
|
||||
ACCEPT_TOKEN(sym_identifier);
|
||||
if (lookahead == '-' || ('0' <= lookahead && lookahead <= '9') ||
|
||||
('A' <= lookahead && lookahead <= 'Z') || lookahead == '_' ||
|
||||
('a' <= lookahead && lookahead <= 'z'))
|
||||
ADVANCE(8);
|
||||
END_STATE();
|
||||
case 9:
|
||||
ACCEPT_TOKEN(anon_sym_BANG);
|
||||
END_STATE();
|
||||
case 10:
|
||||
ACCEPT_TOKEN(anon_sym_BANG);
|
||||
if (lookahead == '=')
|
||||
ADVANCE(14);
|
||||
END_STATE();
|
||||
case 11:
|
||||
ACCEPT_TOKEN(anon_sym_AMP_AMP);
|
||||
END_STATE();
|
||||
case 12:
|
||||
ACCEPT_TOKEN(anon_sym_PIPE_PIPE);
|
||||
END_STATE();
|
||||
case 13:
|
||||
ACCEPT_TOKEN(anon_sym_EQ_EQ);
|
||||
END_STATE();
|
||||
case 14:
|
||||
ACCEPT_TOKEN(anon_sym_BANG_EQ);
|
||||
END_STATE();
|
||||
case 15:
|
||||
ACCEPT_TOKEN(anon_sym_LPAREN);
|
||||
END_STATE();
|
||||
case 16:
|
||||
ACCEPT_TOKEN(anon_sym_RPAREN);
|
||||
END_STATE();
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
static const TSLexMode ts_lex_modes[STATE_COUNT] = {
|
||||
[0] = {.lex_state = 0}, [1] = {.lex_state = 1}, [2] = {.lex_state = 1},
|
||||
[3] = {.lex_state = 1}, [4] = {.lex_state = 1}, [5] = {.lex_state = 1},
|
||||
[6] = {.lex_state = 6}, [7] = {.lex_state = 0}, [8] = {.lex_state = 0},
|
||||
[9] = {.lex_state = 0}, [10] = {.lex_state = 0}, [11] = {.lex_state = 0},
|
||||
[12] = {.lex_state = 0}, [13] = {.lex_state = 0}, [14] = {.lex_state = 0},
|
||||
[15] = {.lex_state = 0}, [16] = {.lex_state = 0}, [17] = {.lex_state = 0},
|
||||
};
|
||||
|
||||
static const uint16_t ts_parse_table[LARGE_STATE_COUNT][SYMBOL_COUNT] = {
|
||||
[0] =
|
||||
{
|
||||
[ts_builtin_sym_end] = ACTIONS(1),
|
||||
[sym_identifier] = ACTIONS(1),
|
||||
[anon_sym_BANG] = ACTIONS(1),
|
||||
[anon_sym_AMP_AMP] = ACTIONS(1),
|
||||
[anon_sym_PIPE_PIPE] = ACTIONS(1),
|
||||
[anon_sym_EQ_EQ] = ACTIONS(1),
|
||||
[anon_sym_BANG_EQ] = ACTIONS(1),
|
||||
[anon_sym_LPAREN] = ACTIONS(1),
|
||||
[anon_sym_RPAREN] = ACTIONS(1),
|
||||
},
|
||||
[1] =
|
||||
{
|
||||
[sym_source] = STATE(15),
|
||||
[sym__expression] = STATE(13),
|
||||
[sym_not] = STATE(13),
|
||||
[sym_and] = STATE(13),
|
||||
[sym_or] = STATE(13),
|
||||
[sym_equal] = STATE(13),
|
||||
[sym_not_equal] = STATE(13),
|
||||
[sym_parenthesized] = STATE(13),
|
||||
[sym_identifier] = ACTIONS(3),
|
||||
[anon_sym_BANG] = ACTIONS(5),
|
||||
[anon_sym_LPAREN] = ACTIONS(7),
|
||||
},
|
||||
[2] =
|
||||
{
|
||||
[sym__expression] = STATE(7),
|
||||
[sym_not] = STATE(7),
|
||||
[sym_and] = STATE(7),
|
||||
[sym_or] = STATE(7),
|
||||
[sym_equal] = STATE(7),
|
||||
[sym_not_equal] = STATE(7),
|
||||
[sym_parenthesized] = STATE(7),
|
||||
[sym_identifier] = ACTIONS(3),
|
||||
[anon_sym_BANG] = ACTIONS(5),
|
||||
[anon_sym_LPAREN] = ACTIONS(7),
|
||||
},
|
||||
[3] =
|
||||
{
|
||||
[sym__expression] = STATE(14),
|
||||
[sym_not] = STATE(14),
|
||||
[sym_and] = STATE(14),
|
||||
[sym_or] = STATE(14),
|
||||
[sym_equal] = STATE(14),
|
||||
[sym_not_equal] = STATE(14),
|
||||
[sym_parenthesized] = STATE(14),
|
||||
[sym_identifier] = ACTIONS(3),
|
||||
[anon_sym_BANG] = ACTIONS(5),
|
||||
[anon_sym_LPAREN] = ACTIONS(7),
|
||||
},
|
||||
[4] =
|
||||
{
|
||||
[sym__expression] = STATE(11),
|
||||
[sym_not] = STATE(11),
|
||||
[sym_and] = STATE(11),
|
||||
[sym_or] = STATE(11),
|
||||
[sym_equal] = STATE(11),
|
||||
[sym_not_equal] = STATE(11),
|
||||
[sym_parenthesized] = STATE(11),
|
||||
[sym_identifier] = ACTIONS(3),
|
||||
[anon_sym_BANG] = ACTIONS(5),
|
||||
[anon_sym_LPAREN] = ACTIONS(7),
|
||||
},
|
||||
[5] =
|
||||
{
|
||||
[sym__expression] = STATE(12),
|
||||
[sym_not] = STATE(12),
|
||||
[sym_and] = STATE(12),
|
||||
[sym_or] = STATE(12),
|
||||
[sym_equal] = STATE(12),
|
||||
[sym_not_equal] = STATE(12),
|
||||
[sym_parenthesized] = STATE(12),
|
||||
[sym_identifier] = ACTIONS(3),
|
||||
[anon_sym_BANG] = ACTIONS(5),
|
||||
[anon_sym_LPAREN] = ACTIONS(7),
|
||||
},
|
||||
};
|
||||
|
||||
static const uint16_t ts_small_parse_table[] = {
|
||||
[0] = 3,
|
||||
ACTIONS(11),
|
||||
1,
|
||||
anon_sym_EQ_EQ,
|
||||
ACTIONS(13),
|
||||
1,
|
||||
anon_sym_BANG_EQ,
|
||||
ACTIONS(9),
|
||||
4,
|
||||
ts_builtin_sym_end,
|
||||
anon_sym_AMP_AMP,
|
||||
anon_sym_PIPE_PIPE,
|
||||
anon_sym_RPAREN,
|
||||
[13] = 1,
|
||||
ACTIONS(15),
|
||||
4,
|
||||
ts_builtin_sym_end,
|
||||
anon_sym_AMP_AMP,
|
||||
anon_sym_PIPE_PIPE,
|
||||
anon_sym_RPAREN,
|
||||
[20] = 1,
|
||||
ACTIONS(17),
|
||||
4,
|
||||
ts_builtin_sym_end,
|
||||
anon_sym_AMP_AMP,
|
||||
anon_sym_PIPE_PIPE,
|
||||
anon_sym_RPAREN,
|
||||
[27] = 1,
|
||||
ACTIONS(19),
|
||||
4,
|
||||
ts_builtin_sym_end,
|
||||
anon_sym_AMP_AMP,
|
||||
anon_sym_PIPE_PIPE,
|
||||
anon_sym_RPAREN,
|
||||
[34] = 1,
|
||||
ACTIONS(21),
|
||||
4,
|
||||
ts_builtin_sym_end,
|
||||
anon_sym_AMP_AMP,
|
||||
anon_sym_PIPE_PIPE,
|
||||
anon_sym_RPAREN,
|
||||
[41] = 1,
|
||||
ACTIONS(23),
|
||||
4,
|
||||
ts_builtin_sym_end,
|
||||
anon_sym_AMP_AMP,
|
||||
anon_sym_PIPE_PIPE,
|
||||
anon_sym_RPAREN,
|
||||
[48] = 2,
|
||||
ACTIONS(27),
|
||||
1,
|
||||
anon_sym_AMP_AMP,
|
||||
ACTIONS(25),
|
||||
3,
|
||||
ts_builtin_sym_end,
|
||||
anon_sym_PIPE_PIPE,
|
||||
anon_sym_RPAREN,
|
||||
[57] = 3,
|
||||
ACTIONS(27),
|
||||
1,
|
||||
anon_sym_AMP_AMP,
|
||||
ACTIONS(29),
|
||||
1,
|
||||
ts_builtin_sym_end,
|
||||
ACTIONS(31),
|
||||
1,
|
||||
anon_sym_PIPE_PIPE,
|
||||
[67] = 3,
|
||||
ACTIONS(27),
|
||||
1,
|
||||
anon_sym_AMP_AMP,
|
||||
ACTIONS(31),
|
||||
1,
|
||||
anon_sym_PIPE_PIPE,
|
||||
ACTIONS(33),
|
||||
1,
|
||||
anon_sym_RPAREN,
|
||||
[77] = 1,
|
||||
ACTIONS(35),
|
||||
1,
|
||||
ts_builtin_sym_end,
|
||||
[81] = 1,
|
||||
ACTIONS(37),
|
||||
1,
|
||||
sym_identifier,
|
||||
[85] = 1,
|
||||
ACTIONS(39),
|
||||
1,
|
||||
sym_identifier,
|
||||
};
|
||||
|
||||
static const uint32_t ts_small_parse_table_map[] = {
|
||||
[SMALL_STATE(6)] = 0, [SMALL_STATE(7)] = 13, [SMALL_STATE(8)] = 20,
|
||||
[SMALL_STATE(9)] = 27, [SMALL_STATE(10)] = 34, [SMALL_STATE(11)] = 41,
|
||||
[SMALL_STATE(12)] = 48, [SMALL_STATE(13)] = 57, [SMALL_STATE(14)] = 67,
|
||||
[SMALL_STATE(15)] = 77, [SMALL_STATE(16)] = 81, [SMALL_STATE(17)] = 85,
|
||||
};
|
||||
|
||||
static const TSParseActionEntry ts_parse_actions[] = {
|
||||
[0] = {.entry = {.count = 0, .reusable = false}},
|
||||
[1] = {.entry = {.count = 1, .reusable = false}},
|
||||
RECOVER(),
|
||||
[3] = {.entry = {.count = 1, .reusable = true}},
|
||||
SHIFT(6),
|
||||
[5] = {.entry = {.count = 1, .reusable = true}},
|
||||
SHIFT(2),
|
||||
[7] = {.entry = {.count = 1, .reusable = true}},
|
||||
SHIFT(3),
|
||||
[9] = {.entry = {.count = 1, .reusable = true}},
|
||||
REDUCE(sym__expression, 1),
|
||||
[11] = {.entry = {.count = 1, .reusable = true}},
|
||||
SHIFT(16),
|
||||
[13] = {.entry = {.count = 1, .reusable = true}},
|
||||
SHIFT(17),
|
||||
[15] = {.entry = {.count = 1, .reusable = true}},
|
||||
REDUCE(sym_not, 2, .production_id = 1),
|
||||
[17] = {.entry = {.count = 1, .reusable = true}},
|
||||
REDUCE(sym_equal, 3, .production_id = 2),
|
||||
[19] = {.entry = {.count = 1, .reusable = true}},
|
||||
REDUCE(sym_not_equal, 3, .production_id = 2),
|
||||
[21] = {.entry = {.count = 1, .reusable = true}},
|
||||
REDUCE(sym_parenthesized, 3, .production_id = 1),
|
||||
[23] = {.entry = {.count = 1, .reusable = true}},
|
||||
REDUCE(sym_and, 3, .production_id = 2),
|
||||
[25] = {.entry = {.count = 1, .reusable = true}},
|
||||
REDUCE(sym_or, 3, .production_id = 2),
|
||||
[27] = {.entry = {.count = 1, .reusable = true}},
|
||||
SHIFT(4),
|
||||
[29] = {.entry = {.count = 1, .reusable = true}},
|
||||
REDUCE(sym_source, 1),
|
||||
[31] = {.entry = {.count = 1, .reusable = true}},
|
||||
SHIFT(5),
|
||||
[33] = {.entry = {.count = 1, .reusable = true}},
|
||||
SHIFT(10),
|
||||
[35] = {.entry = {.count = 1, .reusable = true}},
|
||||
ACCEPT_INPUT(),
|
||||
[37] = {.entry = {.count = 1, .reusable = true}},
|
||||
SHIFT(8),
|
||||
[39] = {.entry = {.count = 1, .reusable = true}},
|
||||
SHIFT(9),
|
||||
};
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
#ifdef _WIN32
|
||||
#define extern __declspec(dllexport)
|
||||
#endif
|
||||
|
||||
extern const TSLanguage *tree_sitter_context_predicate(void) {
|
||||
static const TSLanguage language = {
|
||||
.version = LANGUAGE_VERSION,
|
||||
.symbol_count = SYMBOL_COUNT,
|
||||
.alias_count = ALIAS_COUNT,
|
||||
.token_count = TOKEN_COUNT,
|
||||
.external_token_count = EXTERNAL_TOKEN_COUNT,
|
||||
.state_count = STATE_COUNT,
|
||||
.large_state_count = LARGE_STATE_COUNT,
|
||||
.production_id_count = PRODUCTION_ID_COUNT,
|
||||
.field_count = FIELD_COUNT,
|
||||
.max_alias_sequence_length = MAX_ALIAS_SEQUENCE_LENGTH,
|
||||
.parse_table = &ts_parse_table[0][0],
|
||||
.small_parse_table = ts_small_parse_table,
|
||||
.small_parse_table_map = ts_small_parse_table_map,
|
||||
.parse_actions = ts_parse_actions,
|
||||
.symbol_names = ts_symbol_names,
|
||||
.field_names = ts_field_names,
|
||||
.field_map_slices = ts_field_map_slices,
|
||||
.field_map_entries = ts_field_map_entries,
|
||||
.symbol_metadata = ts_symbol_metadata,
|
||||
.public_symbol_map = ts_symbol_map,
|
||||
.alias_map = ts_non_terminal_alias_map,
|
||||
.alias_sequences = &ts_alias_sequences[0][0],
|
||||
.lex_modes = ts_lex_modes,
|
||||
.lex_fn = ts_lex,
|
||||
};
|
||||
return &language;
|
||||
}
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
|
@ -1,223 +0,0 @@
|
|||
#ifndef TREE_SITTER_PARSER_H_
|
||||
#define TREE_SITTER_PARSER_H_
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
#include <stdbool.h>
|
||||
#include <stdint.h>
|
||||
#include <stdlib.h>
|
||||
|
||||
#define ts_builtin_sym_error ((TSSymbol)-1)
|
||||
#define ts_builtin_sym_end 0
|
||||
#define TREE_SITTER_SERIALIZATION_BUFFER_SIZE 1024
|
||||
|
||||
typedef uint16_t TSStateId;
|
||||
|
||||
#ifndef TREE_SITTER_API_H_
|
||||
typedef uint16_t TSSymbol;
|
||||
typedef uint16_t TSFieldId;
|
||||
typedef struct TSLanguage TSLanguage;
|
||||
#endif
|
||||
|
||||
typedef struct {
|
||||
TSFieldId field_id;
|
||||
uint8_t child_index;
|
||||
bool inherited;
|
||||
} TSFieldMapEntry;
|
||||
|
||||
typedef struct {
|
||||
uint16_t index;
|
||||
uint16_t length;
|
||||
} TSFieldMapSlice;
|
||||
|
||||
typedef struct {
|
||||
bool visible;
|
||||
bool named;
|
||||
bool supertype;
|
||||
} TSSymbolMetadata;
|
||||
|
||||
typedef struct TSLexer TSLexer;
|
||||
|
||||
struct TSLexer {
|
||||
int32_t lookahead;
|
||||
TSSymbol result_symbol;
|
||||
void (*advance)(TSLexer *, bool);
|
||||
void (*mark_end)(TSLexer *);
|
||||
uint32_t (*get_column)(TSLexer *);
|
||||
bool (*is_at_included_range_start)(const TSLexer *);
|
||||
bool (*eof)(const TSLexer *);
|
||||
};
|
||||
|
||||
typedef enum {
|
||||
TSParseActionTypeShift,
|
||||
TSParseActionTypeReduce,
|
||||
TSParseActionTypeAccept,
|
||||
TSParseActionTypeRecover,
|
||||
} TSParseActionType;
|
||||
|
||||
typedef union {
|
||||
struct {
|
||||
uint8_t type;
|
||||
TSStateId state;
|
||||
bool extra;
|
||||
bool repetition;
|
||||
} shift;
|
||||
struct {
|
||||
uint8_t type;
|
||||
uint8_t child_count;
|
||||
TSSymbol symbol;
|
||||
int16_t dynamic_precedence;
|
||||
uint16_t production_id;
|
||||
} reduce;
|
||||
uint8_t type;
|
||||
} TSParseAction;
|
||||
|
||||
typedef struct {
|
||||
uint16_t lex_state;
|
||||
uint16_t external_lex_state;
|
||||
} TSLexMode;
|
||||
|
||||
typedef union {
|
||||
TSParseAction action;
|
||||
struct {
|
||||
uint8_t count;
|
||||
bool reusable;
|
||||
} entry;
|
||||
} TSParseActionEntry;
|
||||
|
||||
struct TSLanguage {
|
||||
uint32_t version;
|
||||
uint32_t symbol_count;
|
||||
uint32_t alias_count;
|
||||
uint32_t token_count;
|
||||
uint32_t external_token_count;
|
||||
uint32_t state_count;
|
||||
uint32_t large_state_count;
|
||||
uint32_t production_id_count;
|
||||
uint32_t field_count;
|
||||
uint16_t max_alias_sequence_length;
|
||||
const uint16_t *parse_table;
|
||||
const uint16_t *small_parse_table;
|
||||
const uint32_t *small_parse_table_map;
|
||||
const TSParseActionEntry *parse_actions;
|
||||
const char * const *symbol_names;
|
||||
const char * const *field_names;
|
||||
const TSFieldMapSlice *field_map_slices;
|
||||
const TSFieldMapEntry *field_map_entries;
|
||||
const TSSymbolMetadata *symbol_metadata;
|
||||
const TSSymbol *public_symbol_map;
|
||||
const uint16_t *alias_map;
|
||||
const TSSymbol *alias_sequences;
|
||||
const TSLexMode *lex_modes;
|
||||
bool (*lex_fn)(TSLexer *, TSStateId);
|
||||
bool (*keyword_lex_fn)(TSLexer *, TSStateId);
|
||||
TSSymbol keyword_capture_token;
|
||||
struct {
|
||||
const bool *states;
|
||||
const TSSymbol *symbol_map;
|
||||
void *(*create)(void);
|
||||
void (*destroy)(void *);
|
||||
bool (*scan)(void *, TSLexer *, const bool *symbol_whitelist);
|
||||
unsigned (*serialize)(void *, char *);
|
||||
void (*deserialize)(void *, const char *, unsigned);
|
||||
} external_scanner;
|
||||
};
|
||||
|
||||
/*
|
||||
* Lexer Macros
|
||||
*/
|
||||
|
||||
#define START_LEXER() \
|
||||
bool result = false; \
|
||||
bool skip = false; \
|
||||
bool eof = false; \
|
||||
int32_t lookahead; \
|
||||
goto start; \
|
||||
next_state: \
|
||||
lexer->advance(lexer, skip); \
|
||||
start: \
|
||||
skip = false; \
|
||||
lookahead = lexer->lookahead;
|
||||
|
||||
#define ADVANCE(state_value) \
|
||||
{ \
|
||||
state = state_value; \
|
||||
goto next_state; \
|
||||
}
|
||||
|
||||
#define SKIP(state_value) \
|
||||
{ \
|
||||
skip = true; \
|
||||
state = state_value; \
|
||||
goto next_state; \
|
||||
}
|
||||
|
||||
#define ACCEPT_TOKEN(symbol_value) \
|
||||
result = true; \
|
||||
lexer->result_symbol = symbol_value; \
|
||||
lexer->mark_end(lexer);
|
||||
|
||||
#define END_STATE() return result;
|
||||
|
||||
/*
|
||||
* Parse Table Macros
|
||||
*/
|
||||
|
||||
#define SMALL_STATE(id) id - LARGE_STATE_COUNT
|
||||
|
||||
#define STATE(id) id
|
||||
|
||||
#define ACTIONS(id) id
|
||||
|
||||
#define SHIFT(state_value) \
|
||||
{{ \
|
||||
.shift = { \
|
||||
.type = TSParseActionTypeShift, \
|
||||
.state = state_value \
|
||||
} \
|
||||
}}
|
||||
|
||||
#define SHIFT_REPEAT(state_value) \
|
||||
{{ \
|
||||
.shift = { \
|
||||
.type = TSParseActionTypeShift, \
|
||||
.state = state_value, \
|
||||
.repetition = true \
|
||||
} \
|
||||
}}
|
||||
|
||||
#define SHIFT_EXTRA() \
|
||||
{{ \
|
||||
.shift = { \
|
||||
.type = TSParseActionTypeShift, \
|
||||
.extra = true \
|
||||
} \
|
||||
}}
|
||||
|
||||
#define REDUCE(symbol_val, child_count_val, ...) \
|
||||
{{ \
|
||||
.reduce = { \
|
||||
.type = TSParseActionTypeReduce, \
|
||||
.symbol = symbol_val, \
|
||||
.child_count = child_count_val, \
|
||||
__VA_ARGS__ \
|
||||
}, \
|
||||
}}
|
||||
|
||||
#define RECOVER() \
|
||||
{{ \
|
||||
.type = TSParseActionTypeRecover \
|
||||
}}
|
||||
|
||||
#define ACCEPT_INPUT() \
|
||||
{{ \
|
||||
.type = TSParseActionTypeAccept \
|
||||
}}
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
|
||||
#endif // TREE_SITTER_PARSER_H_
|
|
@ -989,7 +989,7 @@ impl MutableAppContext {
|
|||
window.toggle_full_screen();
|
||||
}
|
||||
|
||||
fn prompt(
|
||||
pub fn prompt(
|
||||
&self,
|
||||
window_id: usize,
|
||||
level: PromptLevel,
|
||||
|
@ -1349,21 +1349,24 @@ impl MutableAppContext {
|
|||
|
||||
/// Return keystrokes that would dispatch the given action closest to the focused view, if there are any.
|
||||
pub(crate) fn keystrokes_for_action(
|
||||
&self,
|
||||
&mut self,
|
||||
window_id: usize,
|
||||
dispatch_path: &[usize],
|
||||
view_stack: &[usize],
|
||||
action: &dyn Action,
|
||||
) -> Option<SmallVec<[Keystroke; 2]>> {
|
||||
for view_id in dispatch_path.iter().rev() {
|
||||
self.keystroke_matcher.contexts.clear();
|
||||
for view_id in view_stack.iter().rev() {
|
||||
let view = self
|
||||
.cx
|
||||
.views
|
||||
.get(&(window_id, *view_id))
|
||||
.expect("view in responder chain does not exist");
|
||||
let keymap_context = view.keymap_context(self.as_ref());
|
||||
self.keystroke_matcher
|
||||
.contexts
|
||||
.push(view.keymap_context(self.as_ref()));
|
||||
let keystrokes = self
|
||||
.keystroke_matcher
|
||||
.keystrokes_for_action(action, &keymap_context);
|
||||
.keystrokes_for_action(action, &self.keystroke_matcher.contexts);
|
||||
if keystrokes.is_some() {
|
||||
return keystrokes;
|
||||
}
|
||||
|
@ -6681,7 +6684,7 @@ mod tests {
|
|||
view_3
|
||||
});
|
||||
|
||||
// This keymap's only binding dispatches an action on view 2 because that view will have
|
||||
// This binding only dispatches an action on view 2 because that view will have
|
||||
// "a" and "b" in its context, but not "c".
|
||||
cx.add_bindings(vec![Binding::new(
|
||||
"a",
|
||||
|
@ -6691,16 +6694,31 @@ mod tests {
|
|||
|
||||
cx.add_bindings(vec![Binding::new("b", Action("b".to_string()), None)]);
|
||||
|
||||
// This binding only dispatches an action on views 2 and 3, because they have
|
||||
// a parent view with a in its context
|
||||
cx.add_bindings(vec![Binding::new(
|
||||
"c",
|
||||
Action("c".to_string()),
|
||||
Some("b > c"),
|
||||
)]);
|
||||
|
||||
// This binding only dispatches an action on view 2, because they have
|
||||
// a parent view with a in its context
|
||||
cx.add_bindings(vec![Binding::new(
|
||||
"d",
|
||||
Action("d".to_string()),
|
||||
Some("a && !b > b"),
|
||||
)]);
|
||||
|
||||
let actions = Rc::new(RefCell::new(Vec::new()));
|
||||
cx.add_action({
|
||||
let actions = actions.clone();
|
||||
move |view: &mut View, action: &Action, cx| {
|
||||
if action.0 == "a" {
|
||||
actions.borrow_mut().push(format!("{} a", view.id));
|
||||
} else {
|
||||
actions
|
||||
.borrow_mut()
|
||||
.push(format!("{} {}", view.id, action.0));
|
||||
actions
|
||||
.borrow_mut()
|
||||
.push(format!("{} {}", view.id, action.0));
|
||||
|
||||
if action.0 == "b" {
|
||||
cx.propagate_action();
|
||||
}
|
||||
}
|
||||
|
@ -6714,14 +6732,20 @@ mod tests {
|
|||
});
|
||||
|
||||
cx.dispatch_keystroke(window_id, &Keystroke::parse("a").unwrap());
|
||||
|
||||
assert_eq!(&*actions.borrow(), &["2 a"]);
|
||||
|
||||
actions.borrow_mut().clear();
|
||||
|
||||
cx.dispatch_keystroke(window_id, &Keystroke::parse("b").unwrap());
|
||||
|
||||
assert_eq!(&*actions.borrow(), &["3 b", "2 b", "1 b", "global b"]);
|
||||
actions.borrow_mut().clear();
|
||||
|
||||
cx.dispatch_keystroke(window_id, &Keystroke::parse("c").unwrap());
|
||||
assert_eq!(&*actions.borrow(), &["3 c"]);
|
||||
actions.borrow_mut().clear();
|
||||
|
||||
cx.dispatch_keystroke(window_id, &Keystroke::parse("d").unwrap());
|
||||
assert_eq!(&*actions.borrow(), &["2 d"]);
|
||||
actions.borrow_mut().clear();
|
||||
}
|
||||
|
||||
#[crate::test(self)]
|
||||
|
|
|
@ -7,7 +7,7 @@ use crate::{
|
|||
platform::CursorStyle,
|
||||
scene::{
|
||||
CursorRegion, HandlerSet, MouseClick, MouseDown, MouseDownOut, MouseDrag, MouseHover,
|
||||
MouseMove, MouseScrollWheel, MouseUp, MouseUpOut,
|
||||
MouseMove, MouseMoveOut, MouseScrollWheel, MouseUp, MouseUpOut,
|
||||
},
|
||||
DebugContext, Element, ElementBox, EventContext, LayoutContext, MeasurementContext,
|
||||
MouseButton, MouseRegion, MouseState, PaintContext, RenderContext, SizeConstraint, View,
|
||||
|
@ -82,6 +82,14 @@ impl<Tag> MouseEventHandler<Tag> {
|
|||
self
|
||||
}
|
||||
|
||||
pub fn on_move_out(
|
||||
mut self,
|
||||
handler: impl Fn(MouseMoveOut, &mut EventContext) + 'static,
|
||||
) -> Self {
|
||||
self.handlers = self.handlers.on_move_out(handler);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn on_down(
|
||||
mut self,
|
||||
button: MouseButton,
|
||||
|
|
|
@ -25,6 +25,7 @@ pub struct KeyPressed {
|
|||
impl_actions!(gpui, [KeyPressed]);
|
||||
|
||||
pub struct KeymapMatcher {
|
||||
pub contexts: Vec<KeymapContext>,
|
||||
pending_views: HashMap<usize, KeymapContext>,
|
||||
pending_keystrokes: Vec<Keystroke>,
|
||||
keymap: Keymap,
|
||||
|
@ -33,6 +34,7 @@ pub struct KeymapMatcher {
|
|||
impl KeymapMatcher {
|
||||
pub fn new(keymap: Keymap) -> Self {
|
||||
Self {
|
||||
contexts: Vec::new(),
|
||||
pending_views: Default::default(),
|
||||
pending_keystrokes: Vec::new(),
|
||||
keymap,
|
||||
|
@ -70,7 +72,7 @@ impl KeymapMatcher {
|
|||
pub fn push_keystroke(
|
||||
&mut self,
|
||||
keystroke: Keystroke,
|
||||
dispatch_path: Vec<(usize, KeymapContext)>,
|
||||
mut dispatch_path: Vec<(usize, KeymapContext)>,
|
||||
) -> MatchResult {
|
||||
let mut any_pending = false;
|
||||
let mut matched_bindings: Vec<(usize, Box<dyn Action>)> = Vec::new();
|
||||
|
@ -78,7 +80,11 @@ impl KeymapMatcher {
|
|||
let first_keystroke = self.pending_keystrokes.is_empty();
|
||||
self.pending_keystrokes.push(keystroke.clone());
|
||||
|
||||
for (view_id, context) in dispatch_path {
|
||||
self.contexts.clear();
|
||||
self.contexts
|
||||
.extend(dispatch_path.iter_mut().map(|e| std::mem::take(&mut e.1)));
|
||||
|
||||
for (i, (view_id, _)) in dispatch_path.into_iter().enumerate() {
|
||||
// Don't require pending view entry if there are no pending keystrokes
|
||||
if !first_keystroke && !self.pending_views.contains_key(&view_id) {
|
||||
continue;
|
||||
|
@ -87,14 +93,15 @@ impl KeymapMatcher {
|
|||
// If there is a previous view context, invalidate that view if it
|
||||
// has changed
|
||||
if let Some(previous_view_context) = self.pending_views.remove(&view_id) {
|
||||
if previous_view_context != context {
|
||||
if previous_view_context != self.contexts[i] {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// Find the bindings which map the pending keystrokes and current context
|
||||
for binding in self.keymap.bindings().iter().rev() {
|
||||
match binding.match_keys_and_context(&self.pending_keystrokes, &context) {
|
||||
match binding.match_keys_and_context(&self.pending_keystrokes, &self.contexts[i..])
|
||||
{
|
||||
BindingMatchResult::Complete(mut action) => {
|
||||
// Swap in keystroke for special KeyPressed action
|
||||
if action.name() == "KeyPressed" && action.namespace() == "gpui" {
|
||||
|
@ -105,7 +112,7 @@ impl KeymapMatcher {
|
|||
matched_bindings.push((view_id, action))
|
||||
}
|
||||
BindingMatchResult::Partial => {
|
||||
self.pending_views.insert(view_id, context.clone());
|
||||
self.pending_views.insert(view_id, self.contexts[i].clone());
|
||||
any_pending = true;
|
||||
}
|
||||
_ => {}
|
||||
|
@ -129,13 +136,13 @@ impl KeymapMatcher {
|
|||
pub fn keystrokes_for_action(
|
||||
&self,
|
||||
action: &dyn Action,
|
||||
context: &KeymapContext,
|
||||
contexts: &[KeymapContext],
|
||||
) -> Option<SmallVec<[Keystroke; 2]>> {
|
||||
self.keymap
|
||||
.bindings()
|
||||
.iter()
|
||||
.rev()
|
||||
.find_map(|binding| binding.keystrokes_for_action(action, context))
|
||||
.find_map(|binding| binding.keystrokes_for_action(action, contexts))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -349,27 +356,70 @@ mod tests {
|
|||
}
|
||||
|
||||
#[test]
|
||||
fn test_context_predicate_eval() -> Result<()> {
|
||||
let predicate = KeymapContextPredicate::parse("a && b || c == d")?;
|
||||
fn test_context_predicate_eval() {
|
||||
let predicate = KeymapContextPredicate::parse("a && b || c == d").unwrap();
|
||||
|
||||
let mut context = KeymapContext::default();
|
||||
context.set.insert("a".into());
|
||||
assert!(!predicate.eval(&context));
|
||||
assert!(!predicate.eval(&[context]));
|
||||
|
||||
let mut context = KeymapContext::default();
|
||||
context.set.insert("a".into());
|
||||
context.set.insert("b".into());
|
||||
assert!(predicate.eval(&context));
|
||||
assert!(predicate.eval(&[context]));
|
||||
|
||||
context.set.remove("b");
|
||||
let mut context = KeymapContext::default();
|
||||
context.set.insert("a".into());
|
||||
context.map.insert("c".into(), "x".into());
|
||||
assert!(!predicate.eval(&context));
|
||||
assert!(!predicate.eval(&[context]));
|
||||
|
||||
let mut context = KeymapContext::default();
|
||||
context.set.insert("a".into());
|
||||
context.map.insert("c".into(), "d".into());
|
||||
assert!(predicate.eval(&context));
|
||||
assert!(predicate.eval(&[context]));
|
||||
|
||||
let predicate = KeymapContextPredicate::parse("!a")?;
|
||||
assert!(predicate.eval(&KeymapContext::default()));
|
||||
let predicate = KeymapContextPredicate::parse("!a").unwrap();
|
||||
assert!(predicate.eval(&[KeymapContext::default()]));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
#[test]
|
||||
fn test_context_child_predicate_eval() {
|
||||
let predicate = KeymapContextPredicate::parse("a && b > c").unwrap();
|
||||
let contexts = [
|
||||
context_set(&["e", "f"]),
|
||||
context_set(&["c", "d"]), // match this context
|
||||
context_set(&["a", "b"]),
|
||||
];
|
||||
|
||||
assert!(!predicate.eval(&contexts[0..]));
|
||||
assert!(predicate.eval(&contexts[1..]));
|
||||
assert!(!predicate.eval(&contexts[2..]));
|
||||
|
||||
let predicate = KeymapContextPredicate::parse("a && b > c && !d > e").unwrap();
|
||||
let contexts = [
|
||||
context_set(&["f"]),
|
||||
context_set(&["e"]), // only match this context
|
||||
context_set(&["c"]),
|
||||
context_set(&["a", "b"]),
|
||||
context_set(&["e"]),
|
||||
context_set(&["c", "d"]),
|
||||
context_set(&["a", "b"]),
|
||||
];
|
||||
|
||||
assert!(!predicate.eval(&contexts[0..]));
|
||||
assert!(predicate.eval(&contexts[1..]));
|
||||
assert!(!predicate.eval(&contexts[2..]));
|
||||
assert!(!predicate.eval(&contexts[3..]));
|
||||
assert!(!predicate.eval(&contexts[4..]));
|
||||
assert!(!predicate.eval(&contexts[5..]));
|
||||
assert!(!predicate.eval(&contexts[6..]));
|
||||
|
||||
fn context_set(names: &[&str]) -> KeymapContext {
|
||||
KeymapContext {
|
||||
set: names.iter().copied().map(str::to_string).collect(),
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -41,24 +41,24 @@ impl Binding {
|
|||
})
|
||||
}
|
||||
|
||||
fn match_context(&self, context: &KeymapContext) -> bool {
|
||||
fn match_context(&self, contexts: &[KeymapContext]) -> bool {
|
||||
self.context_predicate
|
||||
.as_ref()
|
||||
.map(|predicate| predicate.eval(context))
|
||||
.map(|predicate| predicate.eval(contexts))
|
||||
.unwrap_or(true)
|
||||
}
|
||||
|
||||
pub fn match_keys_and_context(
|
||||
&self,
|
||||
pending_keystrokes: &Vec<Keystroke>,
|
||||
context: &KeymapContext,
|
||||
contexts: &[KeymapContext],
|
||||
) -> BindingMatchResult {
|
||||
if self
|
||||
.keystrokes
|
||||
.as_ref()
|
||||
.map(|keystrokes| keystrokes.starts_with(&pending_keystrokes))
|
||||
.unwrap_or(true)
|
||||
&& self.match_context(context)
|
||||
&& self.match_context(contexts)
|
||||
{
|
||||
// If the binding is completed, push it onto the matches list
|
||||
if self
|
||||
|
@ -79,9 +79,9 @@ impl Binding {
|
|||
pub fn keystrokes_for_action(
|
||||
&self,
|
||||
action: &dyn Action,
|
||||
context: &KeymapContext,
|
||||
contexts: &[KeymapContext],
|
||||
) -> Option<SmallVec<[Keystroke; 2]>> {
|
||||
if self.action.eq(action) && self.match_context(context) {
|
||||
if self.action.eq(action) && self.match_context(contexts) {
|
||||
self.keystrokes.clone()
|
||||
} else {
|
||||
None
|
||||
|
|
|
@ -43,7 +43,7 @@ impl Keymap {
|
|||
pub(crate) fn add_bindings<T: IntoIterator<Item = Binding>>(&mut self, bindings: T) {
|
||||
for binding in bindings {
|
||||
self.binding_indices_by_action_type
|
||||
.entry(binding.action().type_id())
|
||||
.entry(binding.action().as_any().type_id())
|
||||
.or_default()
|
||||
.push(self.bindings.len());
|
||||
self.bindings.push(binding);
|
||||
|
|
|
@ -1,11 +1,5 @@
|
|||
use anyhow::anyhow;
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
use collections::{HashMap, HashSet};
|
||||
use tree_sitter::{Language, Node, Parser};
|
||||
|
||||
extern "C" {
|
||||
fn tree_sitter_context_predicate() -> Language;
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Eq, PartialEq)]
|
||||
pub struct KeymapContext {
|
||||
|
@ -29,80 +23,25 @@ pub enum KeymapContextPredicate {
|
|||
Identifier(String),
|
||||
Equal(String, String),
|
||||
NotEqual(String, String),
|
||||
Child(Box<KeymapContextPredicate>, Box<KeymapContextPredicate>),
|
||||
Not(Box<KeymapContextPredicate>),
|
||||
And(Box<KeymapContextPredicate>, Box<KeymapContextPredicate>),
|
||||
Or(Box<KeymapContextPredicate>, Box<KeymapContextPredicate>),
|
||||
}
|
||||
|
||||
impl KeymapContextPredicate {
|
||||
pub fn parse(source: &str) -> anyhow::Result<Self> {
|
||||
let mut parser = Parser::new();
|
||||
let language = unsafe { tree_sitter_context_predicate() };
|
||||
parser.set_language(language).unwrap();
|
||||
let source = source.as_bytes();
|
||||
let tree = parser.parse(source, None).unwrap();
|
||||
Self::from_node(tree.root_node(), source)
|
||||
}
|
||||
|
||||
fn from_node(node: Node, source: &[u8]) -> anyhow::Result<Self> {
|
||||
let parse_error = "error parsing context predicate";
|
||||
let kind = node.kind();
|
||||
|
||||
match kind {
|
||||
"source" => Self::from_node(node.child(0).ok_or_else(|| anyhow!(parse_error))?, source),
|
||||
"identifier" => Ok(Self::Identifier(node.utf8_text(source)?.into())),
|
||||
"not" => {
|
||||
let child = Self::from_node(
|
||||
node.child_by_field_name("expression")
|
||||
.ok_or_else(|| anyhow!(parse_error))?,
|
||||
source,
|
||||
)?;
|
||||
Ok(Self::Not(Box::new(child)))
|
||||
}
|
||||
"and" | "or" => {
|
||||
let left = Box::new(Self::from_node(
|
||||
node.child_by_field_name("left")
|
||||
.ok_or_else(|| anyhow!(parse_error))?,
|
||||
source,
|
||||
)?);
|
||||
let right = Box::new(Self::from_node(
|
||||
node.child_by_field_name("right")
|
||||
.ok_or_else(|| anyhow!(parse_error))?,
|
||||
source,
|
||||
)?);
|
||||
if kind == "and" {
|
||||
Ok(Self::And(left, right))
|
||||
} else {
|
||||
Ok(Self::Or(left, right))
|
||||
}
|
||||
}
|
||||
"equal" | "not_equal" => {
|
||||
let left = node
|
||||
.child_by_field_name("left")
|
||||
.ok_or_else(|| anyhow!(parse_error))?
|
||||
.utf8_text(source)?
|
||||
.into();
|
||||
let right = node
|
||||
.child_by_field_name("right")
|
||||
.ok_or_else(|| anyhow!(parse_error))?
|
||||
.utf8_text(source)?
|
||||
.into();
|
||||
if kind == "equal" {
|
||||
Ok(Self::Equal(left, right))
|
||||
} else {
|
||||
Ok(Self::NotEqual(left, right))
|
||||
}
|
||||
}
|
||||
"parenthesized" => Self::from_node(
|
||||
node.child_by_field_name("expression")
|
||||
.ok_or_else(|| anyhow!(parse_error))?,
|
||||
source,
|
||||
),
|
||||
_ => Err(anyhow!(parse_error)),
|
||||
pub fn parse(source: &str) -> Result<Self> {
|
||||
let source = Self::skip_whitespace(source);
|
||||
let (predicate, rest) = Self::parse_expr(source, 0)?;
|
||||
if let Some(next) = rest.chars().next() {
|
||||
Err(anyhow!("unexpected character {next:?}"))
|
||||
} else {
|
||||
Ok(predicate)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn eval(&self, context: &KeymapContext) -> bool {
|
||||
pub fn eval(&self, contexts: &[KeymapContext]) -> bool {
|
||||
let Some(context) = contexts.first() else { return false };
|
||||
match self {
|
||||
Self::Identifier(name) => context.set.contains(name.as_str()),
|
||||
Self::Equal(left, right) => context
|
||||
|
@ -115,9 +54,245 @@ impl KeymapContextPredicate {
|
|||
.get(left)
|
||||
.map(|value| value != right)
|
||||
.unwrap_or(true),
|
||||
Self::Not(pred) => !pred.eval(context),
|
||||
Self::And(left, right) => left.eval(context) && right.eval(context),
|
||||
Self::Or(left, right) => left.eval(context) || right.eval(context),
|
||||
Self::Not(pred) => !pred.eval(contexts),
|
||||
Self::Child(parent, child) => parent.eval(&contexts[1..]) && child.eval(contexts),
|
||||
Self::And(left, right) => left.eval(contexts) && right.eval(contexts),
|
||||
Self::Or(left, right) => left.eval(contexts) || right.eval(contexts),
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_expr(mut source: &str, min_precedence: u32) -> anyhow::Result<(Self, &str)> {
|
||||
type Op =
|
||||
fn(KeymapContextPredicate, KeymapContextPredicate) -> Result<KeymapContextPredicate>;
|
||||
|
||||
let (mut predicate, rest) = Self::parse_primary(source)?;
|
||||
source = rest;
|
||||
|
||||
'parse: loop {
|
||||
for (operator, precedence, constructor) in [
|
||||
(">", PRECEDENCE_CHILD, Self::new_child as Op),
|
||||
("&&", PRECEDENCE_AND, Self::new_and as Op),
|
||||
("||", PRECEDENCE_OR, Self::new_or as Op),
|
||||
("==", PRECEDENCE_EQ, Self::new_eq as Op),
|
||||
("!=", PRECEDENCE_EQ, Self::new_neq as Op),
|
||||
] {
|
||||
if source.starts_with(operator) && precedence >= min_precedence {
|
||||
source = Self::skip_whitespace(&source[operator.len()..]);
|
||||
let (right, rest) = Self::parse_expr(source, precedence + 1)?;
|
||||
predicate = constructor(predicate, right)?;
|
||||
source = rest;
|
||||
continue 'parse;
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
Ok((predicate, source))
|
||||
}
|
||||
|
||||
fn parse_primary(mut source: &str) -> anyhow::Result<(Self, &str)> {
|
||||
let next = source
|
||||
.chars()
|
||||
.next()
|
||||
.ok_or_else(|| anyhow!("unexpected eof"))?;
|
||||
match next {
|
||||
'(' => {
|
||||
source = Self::skip_whitespace(&source[1..]);
|
||||
let (predicate, rest) = Self::parse_expr(source, 0)?;
|
||||
if rest.starts_with(')') {
|
||||
source = Self::skip_whitespace(&rest[1..]);
|
||||
Ok((predicate, source))
|
||||
} else {
|
||||
Err(anyhow!("expected a ')'"))
|
||||
}
|
||||
}
|
||||
'!' => {
|
||||
let source = Self::skip_whitespace(&source[1..]);
|
||||
let (predicate, source) = Self::parse_expr(&source, PRECEDENCE_NOT)?;
|
||||
Ok((KeymapContextPredicate::Not(Box::new(predicate)), source))
|
||||
}
|
||||
_ if next.is_alphanumeric() || next == '_' => {
|
||||
let len = source
|
||||
.find(|c: char| !(c.is_alphanumeric() || c == '_'))
|
||||
.unwrap_or(source.len());
|
||||
let (identifier, rest) = source.split_at(len);
|
||||
source = Self::skip_whitespace(rest);
|
||||
Ok((
|
||||
KeymapContextPredicate::Identifier(identifier.into()),
|
||||
source,
|
||||
))
|
||||
}
|
||||
_ => Err(anyhow!("unexpected character {next:?}")),
|
||||
}
|
||||
}
|
||||
|
||||
fn skip_whitespace(source: &str) -> &str {
|
||||
let len = source
|
||||
.find(|c: char| !c.is_whitespace())
|
||||
.unwrap_or(source.len());
|
||||
&source[len..]
|
||||
}
|
||||
|
||||
fn new_or(self, other: Self) -> Result<Self> {
|
||||
Ok(Self::Or(Box::new(self), Box::new(other)))
|
||||
}
|
||||
|
||||
fn new_and(self, other: Self) -> Result<Self> {
|
||||
Ok(Self::And(Box::new(self), Box::new(other)))
|
||||
}
|
||||
|
||||
fn new_child(self, other: Self) -> Result<Self> {
|
||||
Ok(Self::Child(Box::new(self), Box::new(other)))
|
||||
}
|
||||
|
||||
fn new_eq(self, other: Self) -> Result<Self> {
|
||||
if let (Self::Identifier(left), Self::Identifier(right)) = (self, other) {
|
||||
Ok(Self::Equal(left, right))
|
||||
} else {
|
||||
Err(anyhow!("operands must be identifiers"))
|
||||
}
|
||||
}
|
||||
|
||||
fn new_neq(self, other: Self) -> Result<Self> {
|
||||
if let (Self::Identifier(left), Self::Identifier(right)) = (self, other) {
|
||||
Ok(Self::NotEqual(left, right))
|
||||
} else {
|
||||
Err(anyhow!("operands must be identifiers"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const PRECEDENCE_CHILD: u32 = 1;
|
||||
const PRECEDENCE_OR: u32 = 2;
|
||||
const PRECEDENCE_AND: u32 = 3;
|
||||
const PRECEDENCE_EQ: u32 = 4;
|
||||
const PRECEDENCE_NOT: u32 = 5;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::KeymapContextPredicate::{self, *};
|
||||
|
||||
#[test]
|
||||
fn test_parse_identifiers() {
|
||||
// Identifiers
|
||||
assert_eq!(
|
||||
KeymapContextPredicate::parse("abc12").unwrap(),
|
||||
Identifier("abc12".into())
|
||||
);
|
||||
assert_eq!(
|
||||
KeymapContextPredicate::parse("_1a").unwrap(),
|
||||
Identifier("_1a".into())
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_negations() {
|
||||
assert_eq!(
|
||||
KeymapContextPredicate::parse("!abc").unwrap(),
|
||||
Not(Box::new(Identifier("abc".into())))
|
||||
);
|
||||
assert_eq!(
|
||||
KeymapContextPredicate::parse(" ! ! abc").unwrap(),
|
||||
Not(Box::new(Not(Box::new(Identifier("abc".into())))))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_equality_operators() {
|
||||
assert_eq!(
|
||||
KeymapContextPredicate::parse("a == b").unwrap(),
|
||||
Equal("a".into(), "b".into())
|
||||
);
|
||||
assert_eq!(
|
||||
KeymapContextPredicate::parse("c!=d").unwrap(),
|
||||
NotEqual("c".into(), "d".into())
|
||||
);
|
||||
assert_eq!(
|
||||
KeymapContextPredicate::parse("c == !d")
|
||||
.unwrap_err()
|
||||
.to_string(),
|
||||
"operands must be identifiers"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_boolean_operators() {
|
||||
assert_eq!(
|
||||
KeymapContextPredicate::parse("a || b").unwrap(),
|
||||
Or(
|
||||
Box::new(Identifier("a".into())),
|
||||
Box::new(Identifier("b".into()))
|
||||
)
|
||||
);
|
||||
assert_eq!(
|
||||
KeymapContextPredicate::parse("a || !b && c").unwrap(),
|
||||
Or(
|
||||
Box::new(Identifier("a".into())),
|
||||
Box::new(And(
|
||||
Box::new(Not(Box::new(Identifier("b".into())))),
|
||||
Box::new(Identifier("c".into()))
|
||||
))
|
||||
)
|
||||
);
|
||||
assert_eq!(
|
||||
KeymapContextPredicate::parse("a && b || c&&d").unwrap(),
|
||||
Or(
|
||||
Box::new(And(
|
||||
Box::new(Identifier("a".into())),
|
||||
Box::new(Identifier("b".into()))
|
||||
)),
|
||||
Box::new(And(
|
||||
Box::new(Identifier("c".into())),
|
||||
Box::new(Identifier("d".into()))
|
||||
))
|
||||
)
|
||||
);
|
||||
assert_eq!(
|
||||
KeymapContextPredicate::parse("a == b && c || d == e && f").unwrap(),
|
||||
Or(
|
||||
Box::new(And(
|
||||
Box::new(Equal("a".into(), "b".into())),
|
||||
Box::new(Identifier("c".into()))
|
||||
)),
|
||||
Box::new(And(
|
||||
Box::new(Equal("d".into(), "e".into())),
|
||||
Box::new(Identifier("f".into()))
|
||||
))
|
||||
)
|
||||
);
|
||||
assert_eq!(
|
||||
KeymapContextPredicate::parse("a && b && c && d").unwrap(),
|
||||
And(
|
||||
Box::new(And(
|
||||
Box::new(And(
|
||||
Box::new(Identifier("a".into())),
|
||||
Box::new(Identifier("b".into()))
|
||||
)),
|
||||
Box::new(Identifier("c".into())),
|
||||
)),
|
||||
Box::new(Identifier("d".into()))
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_parenthesized_expressions() {
|
||||
assert_eq!(
|
||||
KeymapContextPredicate::parse("a && (b == c || d != e)").unwrap(),
|
||||
And(
|
||||
Box::new(Identifier("a".into())),
|
||||
Box::new(Or(
|
||||
Box::new(Equal("b".into(), "c".into())),
|
||||
Box::new(NotEqual("d".into(), "e".into())),
|
||||
)),
|
||||
),
|
||||
);
|
||||
assert_eq!(
|
||||
KeymapContextPredicate::parse(" ( a || b ) ").unwrap(),
|
||||
Or(
|
||||
Box::new(Identifier("a".into())),
|
||||
Box::new(Identifier("b".into())),
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -179,7 +179,7 @@ impl Default for Appearance {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
pub enum WindowKind {
|
||||
Normal,
|
||||
PopUp,
|
||||
|
|
|
@ -178,6 +178,21 @@ impl MouseMovedEvent {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, Default)]
|
||||
pub struct MouseExitedEvent {
|
||||
pub position: Vector2F,
|
||||
pub pressed_button: Option<MouseButton>,
|
||||
pub modifiers: Modifiers,
|
||||
}
|
||||
|
||||
impl Deref for MouseExitedEvent {
|
||||
type Target = Modifiers;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.modifiers
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum Event {
|
||||
KeyDown(KeyDownEvent),
|
||||
|
@ -186,6 +201,7 @@ pub enum Event {
|
|||
MouseDown(MouseButtonEvent),
|
||||
MouseUp(MouseButtonEvent),
|
||||
MouseMoved(MouseMovedEvent),
|
||||
MouseExited(MouseExitedEvent),
|
||||
ScrollWheel(ScrollWheelEvent),
|
||||
}
|
||||
|
||||
|
@ -197,6 +213,7 @@ impl Event {
|
|||
Event::ModifiersChanged { .. } => None,
|
||||
Event::MouseDown(event) | Event::MouseUp(event) => Some(event.position),
|
||||
Event::MouseMoved(event) => Some(event.position),
|
||||
Event::MouseExited(event) => Some(event.position),
|
||||
Event::ScrollWheel(event) => Some(event.position),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,7 +3,7 @@ use crate::{
|
|||
keymap_matcher::Keystroke,
|
||||
platform::{Event, NavigationDirection},
|
||||
KeyDownEvent, KeyUpEvent, Modifiers, ModifiersChangedEvent, MouseButton, MouseButtonEvent,
|
||||
MouseMovedEvent, ScrollDelta, ScrollWheelEvent, TouchPhase,
|
||||
MouseExitedEvent, MouseMovedEvent, ScrollDelta, ScrollWheelEvent, TouchPhase,
|
||||
};
|
||||
use cocoa::{
|
||||
appkit::{NSEvent, NSEventModifierFlags, NSEventPhase, NSEventType},
|
||||
|
@ -221,6 +221,16 @@ impl Event {
|
|||
modifiers: read_modifiers(native_event),
|
||||
})
|
||||
}),
|
||||
NSEventType::NSMouseExited => window_height.map(|window_height| {
|
||||
Self::MouseExited(MouseExitedEvent {
|
||||
position: vec2f(
|
||||
native_event.locationInWindow().x as f32,
|
||||
window_height - native_event.locationInWindow().y as f32,
|
||||
),
|
||||
pressed_button: None,
|
||||
modifiers: read_modifiers(native_event),
|
||||
})
|
||||
}),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -66,6 +66,8 @@ const NSNormalWindowLevel: NSInteger = 0;
|
|||
#[allow(non_upper_case_globals)]
|
||||
const NSPopUpWindowLevel: NSInteger = 101;
|
||||
#[allow(non_upper_case_globals)]
|
||||
const NSTrackingMouseEnteredAndExited: NSUInteger = 0x01;
|
||||
#[allow(non_upper_case_globals)]
|
||||
const NSTrackingMouseMoved: NSUInteger = 0x02;
|
||||
#[allow(non_upper_case_globals)]
|
||||
const NSTrackingActiveAlways: NSUInteger = 0x80;
|
||||
|
@ -170,6 +172,10 @@ unsafe fn build_classes() {
|
|||
sel!(mouseMoved:),
|
||||
handle_view_event as extern "C" fn(&Object, Sel, id),
|
||||
);
|
||||
decl.add_method(
|
||||
sel!(mouseExited:),
|
||||
handle_view_event as extern "C" fn(&Object, Sel, id),
|
||||
);
|
||||
decl.add_method(
|
||||
sel!(mouseDragged:),
|
||||
handle_view_event as extern "C" fn(&Object, Sel, id),
|
||||
|
@ -252,6 +258,11 @@ unsafe fn build_classes() {
|
|||
do_command_by_selector as extern "C" fn(&Object, Sel, Sel),
|
||||
);
|
||||
|
||||
decl.add_method(
|
||||
sel!(acceptsFirstMouse:),
|
||||
accepts_first_mouse as extern "C" fn(&Object, Sel, id) -> BOOL,
|
||||
);
|
||||
|
||||
decl.register()
|
||||
};
|
||||
}
|
||||
|
@ -317,6 +328,7 @@ enum ImeState {
|
|||
struct WindowState {
|
||||
id: usize,
|
||||
native_window: id,
|
||||
kind: WindowKind,
|
||||
event_callback: Option<Box<dyn FnMut(Event) -> bool>>,
|
||||
activate_callback: Option<Box<dyn FnMut(bool)>>,
|
||||
resize_callback: Option<Box<dyn FnMut()>>,
|
||||
|
@ -422,6 +434,7 @@ impl Window {
|
|||
let window = Self(Rc::new(RefCell::new(WindowState {
|
||||
id,
|
||||
native_window,
|
||||
kind: options.kind,
|
||||
event_callback: None,
|
||||
resize_callback: None,
|
||||
should_close_callback: None,
|
||||
|
@ -469,16 +482,6 @@ impl Window {
|
|||
native_window.setTitlebarAppearsTransparent_(YES);
|
||||
}
|
||||
|
||||
let tracking_area: id = msg_send![class!(NSTrackingArea), alloc];
|
||||
let _: () = msg_send![
|
||||
tracking_area,
|
||||
initWithRect: NSRect::new(NSPoint::new(0., 0.), NSSize::new(0., 0.))
|
||||
options: NSTrackingMouseMoved | NSTrackingActiveAlways | NSTrackingInVisibleRect
|
||||
owner: native_view
|
||||
userInfo: nil
|
||||
];
|
||||
let _: () = msg_send![native_view, addTrackingArea: tracking_area.autorelease()];
|
||||
|
||||
native_view.setAutoresizingMask_(NSViewWidthSizable | NSViewHeightSizable);
|
||||
native_view.setWantsBestResolutionOpenGLSurface_(YES);
|
||||
|
||||
|
@ -501,8 +504,25 @@ impl Window {
|
|||
}
|
||||
|
||||
match options.kind {
|
||||
WindowKind::Normal => native_window.setLevel_(NSNormalWindowLevel),
|
||||
WindowKind::Normal => {
|
||||
native_window.setLevel_(NSNormalWindowLevel);
|
||||
native_window.setAcceptsMouseMovedEvents_(YES);
|
||||
}
|
||||
WindowKind::PopUp => {
|
||||
// Use a tracking area to allow receiving MouseMoved events even when
|
||||
// the window or application aren't active, which is often the case
|
||||
// e.g. for notification windows.
|
||||
let tracking_area: id = msg_send![class!(NSTrackingArea), alloc];
|
||||
let _: () = msg_send![
|
||||
tracking_area,
|
||||
initWithRect: NSRect::new(NSPoint::new(0., 0.), NSSize::new(0., 0.))
|
||||
options: NSTrackingMouseEnteredAndExited | NSTrackingMouseMoved | NSTrackingActiveAlways | NSTrackingInVisibleRect
|
||||
owner: native_view
|
||||
userInfo: nil
|
||||
];
|
||||
let _: () =
|
||||
msg_send![native_view, addTrackingArea: tracking_area.autorelease()];
|
||||
|
||||
native_window.setLevel_(NSPopUpWindowLevel);
|
||||
let _: () = msg_send![
|
||||
native_window,
|
||||
|
@ -873,11 +893,10 @@ extern "C" fn handle_key_down(this: &Object, _: Sel, native_event: id) {
|
|||
|
||||
extern "C" fn handle_key_event(this: &Object, native_event: id, key_equivalent: bool) -> BOOL {
|
||||
let window_state = unsafe { get_window_state(this) };
|
||||
|
||||
let mut window_state_borrow = window_state.as_ref().borrow_mut();
|
||||
|
||||
let event =
|
||||
unsafe { Event::from_native(native_event, Some(window_state_borrow.content_size().y())) };
|
||||
let window_height = window_state_borrow.content_size().y();
|
||||
let event = unsafe { Event::from_native(native_event, Some(window_height)) };
|
||||
|
||||
if let Some(event) = event {
|
||||
if key_equivalent {
|
||||
|
@ -902,6 +921,7 @@ extern "C" fn handle_key_event(this: &Object, native_event: id, key_equivalent:
|
|||
function_is_held = event.keystroke.function;
|
||||
Some((event, None))
|
||||
}
|
||||
|
||||
_ => return NO,
|
||||
};
|
||||
|
||||
|
@ -968,9 +988,10 @@ extern "C" fn handle_view_event(this: &Object, _: Sel, native_event: id) {
|
|||
let window_state = unsafe { get_window_state(this) };
|
||||
let weak_window_state = Rc::downgrade(&window_state);
|
||||
let mut window_state_borrow = window_state.as_ref().borrow_mut();
|
||||
let is_active = unsafe { window_state_borrow.native_window.isKeyWindow() == YES };
|
||||
|
||||
let event =
|
||||
unsafe { Event::from_native(native_event, Some(window_state_borrow.content_size().y())) };
|
||||
let window_height = window_state_borrow.content_size().y();
|
||||
let event = unsafe { Event::from_native(native_event, Some(window_height)) };
|
||||
if let Some(event) = event {
|
||||
match &event {
|
||||
Event::MouseMoved(
|
||||
|
@ -989,12 +1010,20 @@ extern "C" fn handle_view_event(this: &Object, _: Sel, native_event: id) {
|
|||
))
|
||||
.detach();
|
||||
}
|
||||
|
||||
Event::MouseMoved(_)
|
||||
if !(is_active || window_state_borrow.kind == WindowKind::PopUp) =>
|
||||
{
|
||||
return
|
||||
}
|
||||
|
||||
Event::MouseUp(MouseButtonEvent {
|
||||
button: MouseButton::Left,
|
||||
..
|
||||
}) => {
|
||||
window_state_borrow.synthetic_drag_counter += 1;
|
||||
}
|
||||
|
||||
Event::ModifiersChanged(ModifiersChangedEvent { modifiers }) => {
|
||||
// Only raise modifiers changed event when they have actually changed
|
||||
if let Some(Event::ModifiersChanged(ModifiersChangedEvent {
|
||||
|
@ -1008,6 +1037,7 @@ extern "C" fn handle_view_event(this: &Object, _: Sel, native_event: id) {
|
|||
|
||||
window_state_borrow.previous_modifiers_changed_event = Some(event.clone());
|
||||
}
|
||||
|
||||
_ => {}
|
||||
}
|
||||
|
||||
|
@ -1404,6 +1434,18 @@ extern "C" fn view_did_change_effective_appearance(this: &Object, _: Sel) {
|
|||
}
|
||||
}
|
||||
|
||||
extern "C" fn accepts_first_mouse(this: &Object, _: Sel, _: id) -> BOOL {
|
||||
unsafe {
|
||||
let state = get_window_state(this);
|
||||
let state_borrow = state.as_ref().borrow();
|
||||
return if state_borrow.kind == WindowKind::PopUp {
|
||||
YES
|
||||
} else {
|
||||
NO
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
async fn synthetic_drag(
|
||||
window_state: Weak<RefCell<WindowState>>,
|
||||
drag_id: usize,
|
||||
|
|
|
@ -8,7 +8,7 @@ use crate::{
|
|||
platform::{CursorStyle, Event},
|
||||
scene::{
|
||||
CursorRegion, MouseClick, MouseDown, MouseDownOut, MouseDrag, MouseEvent, MouseHover,
|
||||
MouseMove, MouseScrollWheel, MouseUp, MouseUpOut, Scene,
|
||||
MouseMove, MouseMoveOut, MouseScrollWheel, MouseUp, MouseUpOut, Scene,
|
||||
},
|
||||
text_layout::TextLayoutCache,
|
||||
Action, AnyModelHandle, AnyViewHandle, AnyWeakModelHandle, AnyWeakViewHandle, Appearance,
|
||||
|
@ -156,6 +156,7 @@ impl Presenter {
|
|||
self.cursor_regions = scene.cursor_regions();
|
||||
self.mouse_regions = scene.mouse_regions();
|
||||
|
||||
// window.is_topmost for the mouse moved event's postion?
|
||||
if cx.window_is_active(self.window_id) {
|
||||
if let Some(event) = self.last_mouse_moved_event.clone() {
|
||||
self.dispatch_event(event, true, cx);
|
||||
|
@ -245,8 +246,11 @@ impl Presenter {
|
|||
// -> Also updates mouse-related state
|
||||
match &event {
|
||||
Event::KeyDown(e) => return cx.dispatch_key_down(self.window_id, e),
|
||||
|
||||
Event::KeyUp(e) => return cx.dispatch_key_up(self.window_id, e),
|
||||
|
||||
Event::ModifiersChanged(e) => return cx.dispatch_modifiers_changed(self.window_id, e),
|
||||
|
||||
Event::MouseDown(e) => {
|
||||
// Click events are weird because they can be fired after a drag event.
|
||||
// MDN says that browsers handle this by starting from 'the most
|
||||
|
@ -279,6 +283,7 @@ impl Presenter {
|
|||
platform_event: e.clone(),
|
||||
}));
|
||||
}
|
||||
|
||||
Event::MouseUp(e) => {
|
||||
// NOTE: The order of event pushes is important! MouseUp events MUST be fired
|
||||
// before click events, and so the MouseUp events need to be pushed before
|
||||
|
@ -296,6 +301,7 @@ impl Presenter {
|
|||
platform_event: e.clone(),
|
||||
}));
|
||||
}
|
||||
|
||||
Event::MouseMoved(
|
||||
e @ MouseMovedEvent {
|
||||
position,
|
||||
|
@ -347,9 +353,28 @@ impl Presenter {
|
|||
platform_event: e.clone(),
|
||||
started: false,
|
||||
}));
|
||||
mouse_events.push(MouseEvent::MoveOut(MouseMoveOut {
|
||||
region: Default::default(),
|
||||
}));
|
||||
|
||||
self.last_mouse_moved_event = Some(event.clone());
|
||||
}
|
||||
|
||||
Event::MouseExited(event) => {
|
||||
// When the platform sends a MouseExited event, synthesize
|
||||
// a MouseMoved event whose position is outside the window's
|
||||
// bounds so that hover and cursor state can be updated.
|
||||
return self.dispatch_event(
|
||||
Event::MouseMoved(MouseMovedEvent {
|
||||
position: event.position,
|
||||
pressed_button: event.pressed_button,
|
||||
modifiers: event.modifiers,
|
||||
}),
|
||||
event_reused,
|
||||
cx,
|
||||
);
|
||||
}
|
||||
|
||||
Event::ScrollWheel(e) => mouse_events.push(MouseEvent::ScrollWheel(MouseScrollWheel {
|
||||
region: Default::default(),
|
||||
platform_event: e.clone(),
|
||||
|
@ -407,6 +432,7 @@ impl Presenter {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
MouseEvent::Down(_) | MouseEvent::Up(_) => {
|
||||
for (region, _) in self.mouse_regions.iter().rev() {
|
||||
if region.bounds.contains_point(self.mouse_position) {
|
||||
|
@ -417,6 +443,7 @@ impl Presenter {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
MouseEvent::Click(e) => {
|
||||
// Only raise click events if the released button is the same as the one stored
|
||||
if self
|
||||
|
@ -439,6 +466,7 @@ impl Presenter {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
MouseEvent::Drag(_) => {
|
||||
for (mouse_region, _) in self.mouse_regions.iter().rev() {
|
||||
if self.clicked_region_ids.contains(&mouse_region.id()) {
|
||||
|
@ -447,7 +475,7 @@ impl Presenter {
|
|||
}
|
||||
}
|
||||
|
||||
MouseEvent::UpOut(_) | MouseEvent::DownOut(_) => {
|
||||
MouseEvent::MoveOut(_) | MouseEvent::UpOut(_) | MouseEvent::DownOut(_) => {
|
||||
for (mouse_region, _) in self.mouse_regions.iter().rev() {
|
||||
// NOT contains
|
||||
if !mouse_region.bounds.contains_point(self.mouse_position) {
|
||||
|
@ -455,6 +483,7 @@ impl Presenter {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
_ => {
|
||||
for (mouse_region, _) in self.mouse_regions.iter().rev() {
|
||||
// Contains
|
||||
|
@ -573,7 +602,7 @@ pub struct LayoutContext<'a> {
|
|||
|
||||
impl<'a> LayoutContext<'a> {
|
||||
pub(crate) fn keystrokes_for_action(
|
||||
&self,
|
||||
&mut self,
|
||||
action: &dyn Action,
|
||||
) -> Option<SmallVec<[Keystroke; 2]>> {
|
||||
self.app
|
||||
|
|
|
@ -21,6 +21,11 @@ impl Deref for MouseMove {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Clone)]
|
||||
pub struct MouseMoveOut {
|
||||
pub region: RectF,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Clone)]
|
||||
pub struct MouseDrag {
|
||||
pub region: RectF,
|
||||
|
@ -138,6 +143,7 @@ impl Deref for MouseScrollWheel {
|
|||
#[derive(Debug, Clone)]
|
||||
pub enum MouseEvent {
|
||||
Move(MouseMove),
|
||||
MoveOut(MouseMoveOut),
|
||||
Drag(MouseDrag),
|
||||
Hover(MouseHover),
|
||||
Down(MouseDown),
|
||||
|
@ -152,6 +158,7 @@ impl MouseEvent {
|
|||
pub fn set_region(&mut self, region: RectF) {
|
||||
match self {
|
||||
MouseEvent::Move(r) => r.region = region,
|
||||
MouseEvent::MoveOut(r) => r.region = region,
|
||||
MouseEvent::Drag(r) => r.region = region,
|
||||
MouseEvent::Hover(r) => r.region = region,
|
||||
MouseEvent::Down(r) => r.region = region,
|
||||
|
@ -168,6 +175,7 @@ impl MouseEvent {
|
|||
pub fn is_capturable(&self) -> bool {
|
||||
match self {
|
||||
MouseEvent::Move(_) => true,
|
||||
MouseEvent::MoveOut(_) => false,
|
||||
MouseEvent::Drag(_) => true,
|
||||
MouseEvent::Hover(_) => false,
|
||||
MouseEvent::Down(_) => true,
|
||||
|
@ -185,6 +193,10 @@ impl MouseEvent {
|
|||
discriminant(&MouseEvent::Move(Default::default()))
|
||||
}
|
||||
|
||||
pub fn move_out_disc() -> Discriminant<MouseEvent> {
|
||||
discriminant(&MouseEvent::MoveOut(Default::default()))
|
||||
}
|
||||
|
||||
pub fn drag_disc() -> Discriminant<MouseEvent> {
|
||||
discriminant(&MouseEvent::Drag(Default::default()))
|
||||
}
|
||||
|
@ -220,6 +232,7 @@ impl MouseEvent {
|
|||
pub fn handler_key(&self) -> HandlerKey {
|
||||
match self {
|
||||
MouseEvent::Move(_) => HandlerKey::new(Self::move_disc(), None),
|
||||
MouseEvent::MoveOut(_) => HandlerKey::new(Self::move_out_disc(), None),
|
||||
MouseEvent::Drag(e) => HandlerKey::new(Self::drag_disc(), e.pressed_button),
|
||||
MouseEvent::Hover(_) => HandlerKey::new(Self::hover_disc(), None),
|
||||
MouseEvent::Down(e) => HandlerKey::new(Self::down_disc(), Some(e.button)),
|
||||
|
|
|
@ -12,7 +12,7 @@ use super::{
|
|||
MouseClick, MouseDown, MouseDownOut, MouseDrag, MouseEvent, MouseHover, MouseMove, MouseUp,
|
||||
MouseUpOut,
|
||||
},
|
||||
MouseScrollWheel,
|
||||
MouseMoveOut, MouseScrollWheel,
|
||||
};
|
||||
|
||||
#[derive(Clone)]
|
||||
|
@ -124,6 +124,14 @@ impl MouseRegion {
|
|||
self
|
||||
}
|
||||
|
||||
pub fn on_move_out(
|
||||
mut self,
|
||||
handler: impl Fn(MouseMoveOut, &mut EventContext) + 'static,
|
||||
) -> Self {
|
||||
self.handlers = self.handlers.on_move_out(handler);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn on_scroll(
|
||||
mut self,
|
||||
handler: impl Fn(MouseScrollWheel, &mut EventContext) + 'static,
|
||||
|
@ -289,6 +297,23 @@ impl HandlerSet {
|
|||
self
|
||||
}
|
||||
|
||||
pub fn on_move_out(
|
||||
mut self,
|
||||
handler: impl Fn(MouseMoveOut, &mut EventContext) + 'static,
|
||||
) -> Self {
|
||||
self.insert(MouseEvent::move_out_disc(), None,
|
||||
Rc::new(move |region_event, cx| {
|
||||
if let MouseEvent::MoveOut(e) = region_event {
|
||||
handler(e, cx);
|
||||
} else {
|
||||
panic!(
|
||||
"Mouse Region Event incorrectly called with mismatched event type. Expected MouseRegionEvent::MoveOut, found {:?}",
|
||||
region_event);
|
||||
}
|
||||
}));
|
||||
self
|
||||
}
|
||||
|
||||
pub fn on_down(
|
||||
mut self,
|
||||
button: MouseButton,
|
||||
|
|
|
@ -9,7 +9,7 @@ use crate::{
|
|||
syntax_map::{
|
||||
SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxSnapshot, ToTreeSitterPoint,
|
||||
},
|
||||
CodeLabel, Outline,
|
||||
CodeLabel, LanguageScope, Outline,
|
||||
};
|
||||
use anyhow::{anyhow, Result};
|
||||
use clock::ReplicaId;
|
||||
|
@ -60,7 +60,7 @@ pub struct Buffer {
|
|||
git_diff_status: GitDiffStatus,
|
||||
file: Option<Arc<dyn File>>,
|
||||
saved_version: clock::Global,
|
||||
saved_version_fingerprint: String,
|
||||
saved_version_fingerprint: RopeFingerprint,
|
||||
saved_mtime: SystemTime,
|
||||
transaction_depth: usize,
|
||||
was_dirty_before_starting_transaction: Option<bool>,
|
||||
|
@ -221,7 +221,7 @@ pub trait File: Send + Sync {
|
|||
version: clock::Global,
|
||||
line_ending: LineEnding,
|
||||
cx: &mut MutableAppContext,
|
||||
) -> Task<Result<(clock::Global, String, SystemTime)>>;
|
||||
) -> Task<Result<(clock::Global, RopeFingerprint, SystemTime)>>;
|
||||
|
||||
fn as_any(&self) -> &dyn Any;
|
||||
|
||||
|
@ -238,7 +238,7 @@ pub trait LocalFile: File {
|
|||
&self,
|
||||
buffer_id: u64,
|
||||
version: &clock::Global,
|
||||
fingerprint: String,
|
||||
fingerprint: RopeFingerprint,
|
||||
line_ending: LineEnding,
|
||||
mtime: SystemTime,
|
||||
cx: &mut MutableAppContext,
|
||||
|
@ -282,6 +282,7 @@ struct AutoindentRequestEntry {
|
|||
struct IndentSuggestion {
|
||||
basis_row: u32,
|
||||
delta: Ordering,
|
||||
within_error: bool,
|
||||
}
|
||||
|
||||
struct BufferChunkHighlights<'a> {
|
||||
|
@ -385,6 +386,13 @@ impl Buffer {
|
|||
rpc::proto::LineEnding::from_i32(message.line_ending)
|
||||
.ok_or_else(|| anyhow!("missing line_ending"))?,
|
||||
));
|
||||
this.saved_version = proto::deserialize_version(message.saved_version);
|
||||
this.saved_version_fingerprint =
|
||||
proto::deserialize_fingerprint(&message.saved_version_fingerprint)?;
|
||||
this.saved_mtime = message
|
||||
.saved_mtime
|
||||
.ok_or_else(|| anyhow!("invalid saved_mtime"))?
|
||||
.into();
|
||||
Ok(this)
|
||||
}
|
||||
|
||||
|
@ -395,6 +403,9 @@ impl Buffer {
|
|||
base_text: self.base_text().to_string(),
|
||||
diff_base: self.diff_base.as_ref().map(|h| h.to_string()),
|
||||
line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
|
||||
saved_version: proto::serialize_version(&self.saved_version),
|
||||
saved_version_fingerprint: proto::serialize_fingerprint(self.saved_version_fingerprint),
|
||||
saved_mtime: Some(self.saved_mtime.into()),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -521,7 +532,7 @@ impl Buffer {
|
|||
pub fn save(
|
||||
&mut self,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<(clock::Global, String, SystemTime)>> {
|
||||
) -> Task<Result<(clock::Global, RopeFingerprint, SystemTime)>> {
|
||||
let file = if let Some(file) = self.file.as_ref() {
|
||||
file
|
||||
} else {
|
||||
|
@ -539,7 +550,7 @@ impl Buffer {
|
|||
cx.spawn(|this, mut cx| async move {
|
||||
let (version, fingerprint, mtime) = save.await?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.did_save(version.clone(), fingerprint.clone(), mtime, None, cx);
|
||||
this.did_save(version.clone(), fingerprint, mtime, None, cx);
|
||||
});
|
||||
Ok((version, fingerprint, mtime))
|
||||
})
|
||||
|
@ -549,6 +560,14 @@ impl Buffer {
|
|||
&self.saved_version
|
||||
}
|
||||
|
||||
pub fn saved_version_fingerprint(&self) -> RopeFingerprint {
|
||||
self.saved_version_fingerprint
|
||||
}
|
||||
|
||||
pub fn saved_mtime(&self) -> SystemTime {
|
||||
self.saved_mtime
|
||||
}
|
||||
|
||||
pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut ModelContext<Self>) {
|
||||
self.syntax_map.lock().clear();
|
||||
self.language = language;
|
||||
|
@ -564,7 +583,7 @@ impl Buffer {
|
|||
pub fn did_save(
|
||||
&mut self,
|
||||
version: clock::Global,
|
||||
fingerprint: String,
|
||||
fingerprint: RopeFingerprint,
|
||||
mtime: SystemTime,
|
||||
new_file: Option<Arc<dyn File>>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
|
@ -613,7 +632,7 @@ impl Buffer {
|
|||
pub fn did_reload(
|
||||
&mut self,
|
||||
version: clock::Global,
|
||||
fingerprint: String,
|
||||
fingerprint: RopeFingerprint,
|
||||
line_ending: LineEnding,
|
||||
mtime: SystemTime,
|
||||
cx: &mut ModelContext<Self>,
|
||||
|
@ -626,7 +645,7 @@ impl Buffer {
|
|||
file.buffer_reloaded(
|
||||
self.remote_id(),
|
||||
&self.saved_version,
|
||||
self.saved_version_fingerprint.clone(),
|
||||
self.saved_version_fingerprint,
|
||||
self.line_ending(),
|
||||
self.saved_mtime,
|
||||
cx,
|
||||
|
@ -919,7 +938,7 @@ impl Buffer {
|
|||
// Build a map containing the suggested indentation for each of the edited lines
|
||||
// with respect to the state of the buffer before these edits. This map is keyed
|
||||
// by the rows for these lines in the current state of the buffer.
|
||||
let mut old_suggestions = BTreeMap::<u32, IndentSize>::default();
|
||||
let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
|
||||
let old_edited_ranges =
|
||||
contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
|
||||
let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
|
||||
|
@ -945,14 +964,17 @@ impl Buffer {
|
|||
|
||||
let suggested_indent = old_to_new_rows
|
||||
.get(&suggestion.basis_row)
|
||||
.and_then(|from_row| old_suggestions.get(from_row).copied())
|
||||
.and_then(|from_row| {
|
||||
Some(old_suggestions.get(from_row).copied()?.0)
|
||||
})
|
||||
.unwrap_or_else(|| {
|
||||
request
|
||||
.before_edit
|
||||
.indent_size_for_line(suggestion.basis_row)
|
||||
})
|
||||
.with_delta(suggestion.delta, language_indent_size);
|
||||
old_suggestions.insert(new_row, suggested_indent);
|
||||
old_suggestions
|
||||
.insert(new_row, (suggested_indent, suggestion.within_error));
|
||||
}
|
||||
}
|
||||
yield_now().await;
|
||||
|
@ -998,12 +1020,13 @@ impl Buffer {
|
|||
snapshot.indent_size_for_line(suggestion.basis_row)
|
||||
})
|
||||
.with_delta(suggestion.delta, language_indent_size);
|
||||
if old_suggestions
|
||||
.get(&new_row)
|
||||
.map_or(true, |old_indentation| {
|
||||
if old_suggestions.get(&new_row).map_or(
|
||||
true,
|
||||
|(old_indentation, was_within_error)| {
|
||||
suggested_indent != *old_indentation
|
||||
})
|
||||
{
|
||||
&& (!suggestion.within_error || *was_within_error)
|
||||
},
|
||||
) {
|
||||
indent_sizes.insert(new_row, suggested_indent);
|
||||
}
|
||||
}
|
||||
|
@ -1332,13 +1355,6 @@ impl Buffer {
|
|||
let edit_id = edit_operation.local_timestamp();
|
||||
|
||||
if let Some((before_edit, mode)) = autoindent_request {
|
||||
let (start_columns, is_block_mode) = match mode {
|
||||
AutoindentMode::Block {
|
||||
original_indent_columns: start_columns,
|
||||
} => (start_columns, true),
|
||||
AutoindentMode::EachLine => (Default::default(), false),
|
||||
};
|
||||
|
||||
let mut delta = 0isize;
|
||||
let entries = edits
|
||||
.into_iter()
|
||||
|
@ -1352,7 +1368,7 @@ impl Buffer {
|
|||
|
||||
let mut range_of_insertion_to_indent = 0..new_text_len;
|
||||
let mut first_line_is_new = false;
|
||||
let mut start_column = None;
|
||||
let mut original_indent_column = None;
|
||||
|
||||
// When inserting an entire line at the beginning of an existing line,
|
||||
// treat the insertion as new.
|
||||
|
@ -1364,14 +1380,23 @@ impl Buffer {
|
|||
|
||||
// When inserting text starting with a newline, avoid auto-indenting the
|
||||
// previous line.
|
||||
if new_text[range_of_insertion_to_indent.clone()].starts_with('\n') {
|
||||
if new_text.starts_with('\n') {
|
||||
range_of_insertion_to_indent.start += 1;
|
||||
first_line_is_new = true;
|
||||
}
|
||||
|
||||
// Avoid auto-indenting after the insertion.
|
||||
if is_block_mode {
|
||||
start_column = start_columns.get(ix).copied();
|
||||
if let AutoindentMode::Block {
|
||||
original_indent_columns,
|
||||
} = &mode
|
||||
{
|
||||
original_indent_column =
|
||||
Some(original_indent_columns.get(ix).copied().unwrap_or_else(|| {
|
||||
indent_size_for_text(
|
||||
new_text[range_of_insertion_to_indent.clone()].chars(),
|
||||
)
|
||||
.len
|
||||
}));
|
||||
if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
|
||||
range_of_insertion_to_indent.end -= 1;
|
||||
}
|
||||
|
@ -1379,7 +1404,7 @@ impl Buffer {
|
|||
|
||||
AutoindentRequestEntry {
|
||||
first_line_is_new,
|
||||
original_indent_column: start_column,
|
||||
original_indent_column,
|
||||
indent_size: before_edit.language_indent_size_at(range.start, cx),
|
||||
range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
|
||||
..self.anchor_after(new_start + range_of_insertion_to_indent.end),
|
||||
|
@ -1390,7 +1415,7 @@ impl Buffer {
|
|||
self.autoindent_requests.push(Arc::new(AutoindentRequest {
|
||||
before_edit,
|
||||
entries,
|
||||
is_block_mode,
|
||||
is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
|
||||
}));
|
||||
}
|
||||
|
||||
|
@ -1641,6 +1666,16 @@ impl Buffer {
|
|||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
impl Buffer {
|
||||
pub fn edit_via_marked_text(
|
||||
&mut self,
|
||||
marked_string: &str,
|
||||
autoindent_mode: Option<AutoindentMode>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) {
|
||||
let edits = self.edits_for_marked_text(marked_string);
|
||||
self.edit(edits, autoindent_mode, cx);
|
||||
}
|
||||
|
||||
pub fn set_group_interval(&mut self, group_interval: Duration) {
|
||||
self.text.set_group_interval(group_interval);
|
||||
}
|
||||
|
@ -1759,7 +1794,7 @@ impl BufferSnapshot {
|
|||
let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
|
||||
let end = Point::new(row_range.end, 0);
|
||||
let range = (start..end).to_offset(&self.text);
|
||||
let mut matches = self.syntax.matches(range, &self.text, |grammar| {
|
||||
let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
|
||||
Some(&grammar.indents_config.as_ref()?.query)
|
||||
});
|
||||
let indent_configs = matches
|
||||
|
@ -1805,6 +1840,30 @@ impl BufferSnapshot {
|
|||
}
|
||||
}
|
||||
|
||||
let mut error_ranges = Vec::<Range<Point>>::new();
|
||||
let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
|
||||
Some(&grammar.error_query)
|
||||
});
|
||||
while let Some(mat) = matches.peek() {
|
||||
let node = mat.captures[0].node;
|
||||
let start = Point::from_ts_point(node.start_position());
|
||||
let end = Point::from_ts_point(node.end_position());
|
||||
let range = start..end;
|
||||
let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
|
||||
Ok(ix) | Err(ix) => ix,
|
||||
};
|
||||
let mut end_ix = ix;
|
||||
while let Some(existing_range) = error_ranges.get(end_ix) {
|
||||
if existing_range.end < end {
|
||||
end_ix += 1;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
error_ranges.splice(ix..end_ix, [range]);
|
||||
matches.advance();
|
||||
}
|
||||
|
||||
outdent_positions.sort();
|
||||
for outdent_position in outdent_positions {
|
||||
// find the innermost indent range containing this outdent_position
|
||||
|
@ -1882,33 +1941,42 @@ impl BufferSnapshot {
|
|||
}
|
||||
}
|
||||
|
||||
let within_error = error_ranges
|
||||
.iter()
|
||||
.any(|e| e.start.row < row && e.end > row_start);
|
||||
|
||||
let suggestion = if outdent_to_row == prev_row
|
||||
|| (outdent_from_prev_row && indent_from_prev_row)
|
||||
{
|
||||
Some(IndentSuggestion {
|
||||
basis_row: prev_row,
|
||||
delta: Ordering::Equal,
|
||||
within_error,
|
||||
})
|
||||
} else if indent_from_prev_row {
|
||||
Some(IndentSuggestion {
|
||||
basis_row: prev_row,
|
||||
delta: Ordering::Greater,
|
||||
within_error,
|
||||
})
|
||||
} else if outdent_to_row < prev_row {
|
||||
Some(IndentSuggestion {
|
||||
basis_row: outdent_to_row,
|
||||
delta: Ordering::Equal,
|
||||
within_error,
|
||||
})
|
||||
} else if outdent_from_prev_row {
|
||||
Some(IndentSuggestion {
|
||||
basis_row: prev_row,
|
||||
delta: Ordering::Less,
|
||||
within_error,
|
||||
})
|
||||
} else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
|
||||
{
|
||||
Some(IndentSuggestion {
|
||||
basis_row: prev_row,
|
||||
delta: Ordering::Equal,
|
||||
within_error,
|
||||
})
|
||||
} else {
|
||||
None
|
||||
|
@ -1995,6 +2063,27 @@ impl BufferSnapshot {
|
|||
.or(self.language.as_ref())
|
||||
}
|
||||
|
||||
pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
|
||||
let offset = position.to_offset(self);
|
||||
|
||||
if let Some(layer_info) = self
|
||||
.syntax
|
||||
.layers_for_range(offset..offset, &self.text)
|
||||
.filter(|l| l.node.end_byte() > offset)
|
||||
.last()
|
||||
{
|
||||
Some(LanguageScope {
|
||||
language: layer_info.language.clone(),
|
||||
override_id: layer_info.override_id(offset, &self.text),
|
||||
})
|
||||
} else {
|
||||
self.language.clone().map(|language| LanguageScope {
|
||||
language,
|
||||
override_id: None,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
|
||||
let mut start = start.to_offset(self);
|
||||
let mut end = start;
|
||||
|
@ -2149,8 +2238,6 @@ impl BufferSnapshot {
|
|||
continue;
|
||||
}
|
||||
|
||||
// TODO - move later, after processing captures
|
||||
|
||||
let mut text = String::new();
|
||||
let mut name_ranges = Vec::new();
|
||||
let mut highlight_ranges = Vec::new();
|
||||
|
@ -2164,7 +2251,13 @@ impl BufferSnapshot {
|
|||
continue;
|
||||
}
|
||||
|
||||
let range = capture.node.start_byte()..capture.node.end_byte();
|
||||
let mut range = capture.node.start_byte()..capture.node.end_byte();
|
||||
let start = capture.node.start_position();
|
||||
if capture.node.end_position().row > start.row {
|
||||
range.end =
|
||||
range.start + self.line_len(start.row as u32) as usize - start.column;
|
||||
}
|
||||
|
||||
if !text.is_empty() {
|
||||
text.push(' ');
|
||||
}
|
||||
|
@ -2397,7 +2490,7 @@ impl BufferSnapshot {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
|
||||
fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
|
||||
indent_size_for_text(text.chars_at(Point::new(row, 0)))
|
||||
}
|
||||
|
||||
|
|
|
@ -455,6 +455,32 @@ async fn test_outline(cx: &mut gpui::TestAppContext) {
|
|||
}
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_outline_nodes_with_newlines(cx: &mut gpui::TestAppContext) {
|
||||
let text = r#"
|
||||
impl A for B<
|
||||
C
|
||||
> {
|
||||
};
|
||||
"#
|
||||
.unindent();
|
||||
|
||||
let buffer =
|
||||
cx.add_model(|cx| Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx));
|
||||
let outline = buffer
|
||||
.read_with(cx, |buffer, _| buffer.snapshot().outline(None))
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
outline
|
||||
.items
|
||||
.iter()
|
||||
.map(|item| (item.text.as_str(), item.depth))
|
||||
.collect::<Vec<_>>(),
|
||||
&[("impl A for B<", 0)]
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_symbols_containing(cx: &mut gpui::TestAppContext) {
|
||||
let text = r#"
|
||||
|
@ -774,23 +800,29 @@ fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut Muta
|
|||
cx.set_global(settings);
|
||||
|
||||
cx.add_model(|cx| {
|
||||
let text = "
|
||||
let mut buffer = Buffer::new(
|
||||
0,
|
||||
"
|
||||
fn a() {
|
||||
c;
|
||||
d;
|
||||
}
|
||||
"
|
||||
.unindent();
|
||||
|
||||
let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
|
||||
"
|
||||
.unindent(),
|
||||
cx,
|
||||
)
|
||||
.with_language(Arc::new(rust_lang()), cx);
|
||||
|
||||
// Lines 2 and 3 don't match the indentation suggestion. When editing these lines,
|
||||
// their indentation is not adjusted.
|
||||
buffer.edit(
|
||||
[
|
||||
(empty(Point::new(1, 1)), "()"),
|
||||
(empty(Point::new(2, 1)), "()"),
|
||||
],
|
||||
buffer.edit_via_marked_text(
|
||||
&"
|
||||
fn a() {
|
||||
c«()»;
|
||||
d«()»;
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
Some(AutoindentMode::EachLine),
|
||||
cx,
|
||||
);
|
||||
|
@ -807,14 +839,22 @@ fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut Muta
|
|||
|
||||
// When appending new content after these lines, the indentation is based on the
|
||||
// preceding lines' actual indentation.
|
||||
buffer.edit(
|
||||
[
|
||||
(empty(Point::new(1, 1)), "\n.f\n.g"),
|
||||
(empty(Point::new(2, 1)), "\n.f\n.g"),
|
||||
],
|
||||
buffer.edit_via_marked_text(
|
||||
&"
|
||||
fn a() {
|
||||
c«
|
||||
.f
|
||||
.g()»;
|
||||
d«
|
||||
.f
|
||||
.g()»;
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
Some(AutoindentMode::EachLine),
|
||||
cx,
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
buffer.text(),
|
||||
"
|
||||
|
@ -833,20 +873,27 @@ fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut Muta
|
|||
});
|
||||
|
||||
cx.add_model(|cx| {
|
||||
let text = "
|
||||
let mut buffer = Buffer::new(
|
||||
0,
|
||||
"
|
||||
fn a() {
|
||||
{
|
||||
b()?
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
"
|
||||
.unindent();
|
||||
let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
|
||||
b();
|
||||
|
|
||||
"
|
||||
.replace("|", "") // marker to preserve trailing whitespace
|
||||
.unindent(),
|
||||
cx,
|
||||
)
|
||||
.with_language(Arc::new(rust_lang()), cx);
|
||||
|
||||
// Delete a closing curly brace changes the suggested indent for the line.
|
||||
buffer.edit(
|
||||
[(Point::new(3, 4)..Point::new(3, 5), "")],
|
||||
// Insert a closing brace. It is outdented.
|
||||
buffer.edit_via_marked_text(
|
||||
&"
|
||||
fn a() {
|
||||
b();
|
||||
«}»
|
||||
"
|
||||
.unindent(),
|
||||
Some(AutoindentMode::EachLine),
|
||||
cx,
|
||||
);
|
||||
|
@ -854,19 +901,20 @@ fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut Muta
|
|||
buffer.text(),
|
||||
"
|
||||
fn a() {
|
||||
{
|
||||
b()?
|
||||
|
|
||||
Ok(())
|
||||
b();
|
||||
}
|
||||
"
|
||||
.replace('|', "") // included in the string to preserve trailing whites
|
||||
.unindent()
|
||||
);
|
||||
|
||||
// Manually editing the leading whitespace
|
||||
buffer.edit(
|
||||
[(Point::new(3, 0)..Point::new(3, 12), "")],
|
||||
// Manually edit the leading whitespace. The edit is preserved.
|
||||
buffer.edit_via_marked_text(
|
||||
&"
|
||||
fn a() {
|
||||
b();
|
||||
« »}
|
||||
"
|
||||
.unindent(),
|
||||
Some(AutoindentMode::EachLine),
|
||||
cx,
|
||||
);
|
||||
|
@ -874,11 +922,8 @@ fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut Muta
|
|||
buffer.text(),
|
||||
"
|
||||
fn a() {
|
||||
{
|
||||
b()?
|
||||
|
||||
Ok(())
|
||||
}
|
||||
b();
|
||||
}
|
||||
"
|
||||
.unindent()
|
||||
);
|
||||
|
@ -887,30 +932,108 @@ fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut Muta
|
|||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_autoindent_adjusts_lines_when_only_text_changes(cx: &mut MutableAppContext) {
|
||||
cx.set_global(Settings::test(cx));
|
||||
fn test_autoindent_does_not_adjust_lines_within_newly_created_errors(cx: &mut MutableAppContext) {
|
||||
let settings = Settings::test(cx);
|
||||
cx.set_global(settings);
|
||||
|
||||
cx.add_model(|cx| {
|
||||
let text = "
|
||||
fn a() {}
|
||||
"
|
||||
.unindent();
|
||||
let mut buffer = Buffer::new(
|
||||
0,
|
||||
"
|
||||
fn a() {
|
||||
i
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
cx,
|
||||
)
|
||||
.with_language(Arc::new(rust_lang()), cx);
|
||||
|
||||
let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
|
||||
|
||||
buffer.edit([(5..5, "\nb")], Some(AutoindentMode::EachLine), cx);
|
||||
// Regression test: line does not get outdented due to syntax error
|
||||
buffer.edit_via_marked_text(
|
||||
&"
|
||||
fn a() {
|
||||
i«f let Some(x) = y»
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
Some(AutoindentMode::EachLine),
|
||||
cx,
|
||||
);
|
||||
assert_eq!(
|
||||
buffer.text(),
|
||||
"
|
||||
fn a(
|
||||
b) {}
|
||||
fn a() {
|
||||
if let Some(x) = y
|
||||
}
|
||||
"
|
||||
.unindent()
|
||||
);
|
||||
|
||||
buffer.edit_via_marked_text(
|
||||
&"
|
||||
fn a() {
|
||||
if let Some(x) = y« {»
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
Some(AutoindentMode::EachLine),
|
||||
cx,
|
||||
);
|
||||
assert_eq!(
|
||||
buffer.text(),
|
||||
"
|
||||
fn a() {
|
||||
if let Some(x) = y {
|
||||
}
|
||||
"
|
||||
.unindent()
|
||||
);
|
||||
|
||||
buffer
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_autoindent_adjusts_lines_when_only_text_changes(cx: &mut MutableAppContext) {
|
||||
cx.set_global(Settings::test(cx));
|
||||
cx.add_model(|cx| {
|
||||
let mut buffer = Buffer::new(
|
||||
0,
|
||||
"
|
||||
fn a() {}
|
||||
"
|
||||
.unindent(),
|
||||
cx,
|
||||
)
|
||||
.with_language(Arc::new(rust_lang()), cx);
|
||||
|
||||
buffer.edit_via_marked_text(
|
||||
&"
|
||||
fn a(«
|
||||
b») {}
|
||||
"
|
||||
.unindent(),
|
||||
Some(AutoindentMode::EachLine),
|
||||
cx,
|
||||
);
|
||||
assert_eq!(
|
||||
buffer.text(),
|
||||
"
|
||||
fn a(
|
||||
b) {}
|
||||
"
|
||||
.unindent()
|
||||
);
|
||||
|
||||
// The indentation suggestion changed because `@end` node (a close paren)
|
||||
// is now at the beginning of the line.
|
||||
buffer.edit(
|
||||
[(Point::new(1, 4)..Point::new(1, 5), "")],
|
||||
buffer.edit_via_marked_text(
|
||||
&"
|
||||
fn a(
|
||||
ˇ) {}
|
||||
"
|
||||
.unindent(),
|
||||
Some(AutoindentMode::EachLine),
|
||||
cx,
|
||||
);
|
||||
|
@ -995,12 +1118,17 @@ fn test_autoindent_block_mode(cx: &mut MutableAppContext) {
|
|||
.unindent();
|
||||
let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
|
||||
|
||||
// When this text was copied, both of the quotation marks were at the same
|
||||
// indent level, but the indentation of the first line was not included in
|
||||
// the copied text. This information is retained in the
|
||||
// 'original_indent_columns' vector.
|
||||
let original_indent_columns = vec![4];
|
||||
let inserted_text = r#"
|
||||
"
|
||||
c
|
||||
d
|
||||
e
|
||||
"
|
||||
c
|
||||
d
|
||||
e
|
||||
"
|
||||
"#
|
||||
.unindent();
|
||||
|
||||
|
@ -1009,7 +1137,7 @@ fn test_autoindent_block_mode(cx: &mut MutableAppContext) {
|
|||
buffer.edit(
|
||||
[(Point::new(2, 0)..Point::new(2, 0), inserted_text.clone())],
|
||||
Some(AutoindentMode::Block {
|
||||
original_indent_columns: vec![0],
|
||||
original_indent_columns: original_indent_columns.clone(),
|
||||
}),
|
||||
cx,
|
||||
);
|
||||
|
@ -1037,7 +1165,7 @@ fn test_autoindent_block_mode(cx: &mut MutableAppContext) {
|
|||
buffer.edit(
|
||||
[(Point::new(2, 8)..Point::new(2, 8), inserted_text)],
|
||||
Some(AutoindentMode::Block {
|
||||
original_indent_columns: vec![0],
|
||||
original_indent_columns: original_indent_columns.clone(),
|
||||
}),
|
||||
cx,
|
||||
);
|
||||
|
@ -1060,6 +1188,84 @@ fn test_autoindent_block_mode(cx: &mut MutableAppContext) {
|
|||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_autoindent_block_mode_without_original_indent_columns(cx: &mut MutableAppContext) {
|
||||
cx.set_global(Settings::test(cx));
|
||||
cx.add_model(|cx| {
|
||||
let text = r#"
|
||||
fn a() {
|
||||
if b() {
|
||||
|
||||
}
|
||||
}
|
||||
"#
|
||||
.unindent();
|
||||
let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
|
||||
|
||||
// The original indent columns are not known, so this text is
|
||||
// auto-indented in a block as if the first line was copied in
|
||||
// its entirety.
|
||||
let original_indent_columns = Vec::new();
|
||||
let inserted_text = " c\n .d()\n .e();";
|
||||
|
||||
// Insert the block at column zero. The entire block is indented
|
||||
// so that the first line matches the previous line's indentation.
|
||||
buffer.edit(
|
||||
[(Point::new(2, 0)..Point::new(2, 0), inserted_text.clone())],
|
||||
Some(AutoindentMode::Block {
|
||||
original_indent_columns: original_indent_columns.clone(),
|
||||
}),
|
||||
cx,
|
||||
);
|
||||
assert_eq!(
|
||||
buffer.text(),
|
||||
r#"
|
||||
fn a() {
|
||||
if b() {
|
||||
c
|
||||
.d()
|
||||
.e();
|
||||
}
|
||||
}
|
||||
"#
|
||||
.unindent()
|
||||
);
|
||||
|
||||
// Grouping is disabled in tests, so we need 2 undos
|
||||
buffer.undo(cx); // Undo the auto-indent
|
||||
buffer.undo(cx); // Undo the original edit
|
||||
|
||||
// Insert the block at a deeper indent level. The entire block is outdented.
|
||||
buffer.edit(
|
||||
[(Point::new(2, 0)..Point::new(2, 0), " ".repeat(12))],
|
||||
None,
|
||||
cx,
|
||||
);
|
||||
buffer.edit(
|
||||
[(Point::new(2, 12)..Point::new(2, 12), inserted_text)],
|
||||
Some(AutoindentMode::Block {
|
||||
original_indent_columns: Vec::new(),
|
||||
}),
|
||||
cx,
|
||||
);
|
||||
assert_eq!(
|
||||
buffer.text(),
|
||||
r#"
|
||||
fn a() {
|
||||
if b() {
|
||||
c
|
||||
.d()
|
||||
.e();
|
||||
}
|
||||
}
|
||||
"#
|
||||
.unindent()
|
||||
);
|
||||
|
||||
buffer
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_autoindent_language_without_indents_query(cx: &mut MutableAppContext) {
|
||||
cx.set_global(Settings::test(cx));
|
||||
|
@ -1260,6 +1466,89 @@ fn test_autoindent_query_with_outdent_captures(cx: &mut MutableAppContext) {
|
|||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_language_config_at(cx: &mut MutableAppContext) {
|
||||
cx.set_global(Settings::test(cx));
|
||||
cx.add_model(|cx| {
|
||||
let language = Language::new(
|
||||
LanguageConfig {
|
||||
name: "JavaScript".into(),
|
||||
line_comment: Some("// ".into()),
|
||||
brackets: vec![
|
||||
BracketPair {
|
||||
start: "{".into(),
|
||||
end: "}".into(),
|
||||
close: true,
|
||||
newline: false,
|
||||
},
|
||||
BracketPair {
|
||||
start: "'".into(),
|
||||
end: "'".into(),
|
||||
close: true,
|
||||
newline: false,
|
||||
},
|
||||
],
|
||||
overrides: [
|
||||
(
|
||||
"element".into(),
|
||||
LanguageConfigOverride {
|
||||
line_comment: Override::Remove { remove: true },
|
||||
block_comment: Override::Set(("{/*".into(), "*/}".into())),
|
||||
..Default::default()
|
||||
},
|
||||
),
|
||||
(
|
||||
"string".into(),
|
||||
LanguageConfigOverride {
|
||||
brackets: Override::Set(vec![BracketPair {
|
||||
start: "{".into(),
|
||||
end: "}".into(),
|
||||
close: true,
|
||||
newline: false,
|
||||
}]),
|
||||
..Default::default()
|
||||
},
|
||||
),
|
||||
]
|
||||
.into_iter()
|
||||
.collect(),
|
||||
..Default::default()
|
||||
},
|
||||
Some(tree_sitter_javascript::language()),
|
||||
)
|
||||
.with_override_query(
|
||||
r#"
|
||||
(jsx_element) @element
|
||||
(string) @string
|
||||
"#,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let text = r#"a["b"] = <C d="e"></C>;"#;
|
||||
|
||||
let buffer = Buffer::new(0, text, cx).with_language(Arc::new(language), cx);
|
||||
let snapshot = buffer.snapshot();
|
||||
|
||||
let config = snapshot.language_scope_at(0).unwrap();
|
||||
assert_eq!(config.line_comment_prefix().unwrap().as_ref(), "// ");
|
||||
assert_eq!(config.brackets().len(), 2);
|
||||
|
||||
let string_config = snapshot.language_scope_at(3).unwrap();
|
||||
assert_eq!(config.line_comment_prefix().unwrap().as_ref(), "// ");
|
||||
assert_eq!(string_config.brackets().len(), 1);
|
||||
|
||||
let element_config = snapshot.language_scope_at(10).unwrap();
|
||||
assert_eq!(element_config.line_comment_prefix(), None);
|
||||
assert_eq!(
|
||||
element_config.block_comment_delimiters(),
|
||||
Some((&"{/*".into(), &"*/}".into()))
|
||||
);
|
||||
assert_eq!(element_config.brackets().len(), 2);
|
||||
|
||||
buffer
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_serialization(cx: &mut gpui::MutableAppContext) {
|
||||
let mut now = Instant::now();
|
||||
|
@ -1702,7 +1991,3 @@ fn get_tree_sexp(buffer: &ModelHandle<Buffer>, cx: &gpui::TestAppContext) -> Str
|
|||
layers[0].node.to_sexp()
|
||||
})
|
||||
}
|
||||
|
||||
fn empty(point: Point) -> Range<Point> {
|
||||
point..point
|
||||
}
|
||||
|
|
|
@ -243,6 +243,47 @@ pub struct LanguageConfig {
|
|||
pub line_comment: Option<Arc<str>>,
|
||||
#[serde(default)]
|
||||
pub block_comment: Option<(Arc<str>, Arc<str>)>,
|
||||
#[serde(default)]
|
||||
pub overrides: HashMap<String, LanguageConfigOverride>,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct LanguageScope {
|
||||
language: Arc<Language>,
|
||||
override_id: Option<u32>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Default, Debug)]
|
||||
pub struct LanguageConfigOverride {
|
||||
#[serde(default)]
|
||||
pub line_comment: Override<Arc<str>>,
|
||||
#[serde(default)]
|
||||
pub block_comment: Override<(Arc<str>, Arc<str>)>,
|
||||
#[serde(default)]
|
||||
pub brackets: Override<Vec<BracketPair>>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
#[serde(untagged)]
|
||||
pub enum Override<T> {
|
||||
Remove { remove: bool },
|
||||
Set(T),
|
||||
}
|
||||
|
||||
impl<T> Default for Override<T> {
|
||||
fn default() -> Self {
|
||||
Override::Remove { remove: false }
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Override<T> {
|
||||
fn as_option<'a>(this: Option<&'a Self>, original: Option<&'a T>) -> Option<&'a T> {
|
||||
match this {
|
||||
Some(Self::Set(value)) => Some(value),
|
||||
Some(Self::Remove { remove: true }) => None,
|
||||
Some(Self::Remove { remove: false }) | None => original,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for LanguageConfig {
|
||||
|
@ -257,6 +298,7 @@ impl Default for LanguageConfig {
|
|||
autoclose_before: Default::default(),
|
||||
line_comment: Default::default(),
|
||||
block_comment: Default::default(),
|
||||
overrides: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -306,11 +348,13 @@ pub struct Language {
|
|||
pub struct Grammar {
|
||||
id: usize,
|
||||
pub(crate) ts_language: tree_sitter::Language,
|
||||
pub(crate) error_query: Query,
|
||||
pub(crate) highlights_query: Option<Query>,
|
||||
pub(crate) brackets_config: Option<BracketConfig>,
|
||||
pub(crate) indents_config: Option<IndentConfig>,
|
||||
pub(crate) outline_config: Option<OutlineConfig>,
|
||||
pub(crate) injection_config: Option<InjectionConfig>,
|
||||
pub(crate) override_config: Option<OverrideConfig>,
|
||||
pub(crate) highlight_map: Mutex<HighlightMap>,
|
||||
}
|
||||
|
||||
|
@ -336,6 +380,11 @@ struct InjectionConfig {
|
|||
patterns: Vec<InjectionPatternConfig>,
|
||||
}
|
||||
|
||||
struct OverrideConfig {
|
||||
query: Query,
|
||||
values: HashMap<u32, LanguageConfigOverride>,
|
||||
}
|
||||
|
||||
#[derive(Default, Clone)]
|
||||
struct InjectionPatternConfig {
|
||||
language: Option<Box<str>>,
|
||||
|
@ -635,6 +684,8 @@ impl Language {
|
|||
outline_config: None,
|
||||
indents_config: None,
|
||||
injection_config: None,
|
||||
override_config: None,
|
||||
error_query: Query::new(ts_language, "(ERROR) @error").unwrap(),
|
||||
ts_language,
|
||||
highlight_map: Default::default(),
|
||||
})
|
||||
|
@ -775,6 +826,34 @@ impl Language {
|
|||
Ok(self)
|
||||
}
|
||||
|
||||
pub fn with_override_query(mut self, source: &str) -> Result<Self> {
|
||||
let query = Query::new(self.grammar_mut().ts_language, source)?;
|
||||
|
||||
let mut values = HashMap::default();
|
||||
for (ix, name) in query.capture_names().iter().enumerate() {
|
||||
if !name.starts_with('_') {
|
||||
let value = self.config.overrides.remove(name).ok_or_else(|| {
|
||||
anyhow!(
|
||||
"language {:?} has override in query but not in config: {name:?}",
|
||||
self.config.name
|
||||
)
|
||||
})?;
|
||||
values.insert(ix as u32, value);
|
||||
}
|
||||
}
|
||||
|
||||
if !self.config.overrides.is_empty() {
|
||||
let keys = self.config.overrides.keys().collect::<Vec<_>>();
|
||||
Err(anyhow!(
|
||||
"language {:?} has overrides in config not in query: {keys:?}",
|
||||
self.config.name
|
||||
))?;
|
||||
}
|
||||
|
||||
self.grammar_mut().override_config = Some(OverrideConfig { query, values });
|
||||
Ok(self)
|
||||
}
|
||||
|
||||
fn grammar_mut(&mut self) -> &mut Grammar {
|
||||
Arc::get_mut(self.grammar.as_mut().unwrap()).unwrap()
|
||||
}
|
||||
|
@ -800,17 +879,6 @@ impl Language {
|
|||
self.config.name.clone()
|
||||
}
|
||||
|
||||
pub fn line_comment_prefix(&self) -> Option<&Arc<str>> {
|
||||
self.config.line_comment.as_ref()
|
||||
}
|
||||
|
||||
pub fn block_comment_delimiters(&self) -> Option<(&Arc<str>, &Arc<str>)> {
|
||||
self.config
|
||||
.block_comment
|
||||
.as_ref()
|
||||
.map(|(start, end)| (start, end))
|
||||
}
|
||||
|
||||
pub async fn disk_based_diagnostic_sources(&self) -> &[String] {
|
||||
match self.adapter.as_ref() {
|
||||
Some(adapter) => &adapter.disk_based_diagnostic_sources,
|
||||
|
@ -886,10 +954,6 @@ impl Language {
|
|||
result
|
||||
}
|
||||
|
||||
pub fn brackets(&self) -> &[BracketPair] {
|
||||
&self.config.brackets
|
||||
}
|
||||
|
||||
pub fn path_suffixes(&self) -> &[String] {
|
||||
&self.config.path_suffixes
|
||||
}
|
||||
|
@ -912,6 +976,42 @@ impl Language {
|
|||
}
|
||||
}
|
||||
|
||||
impl LanguageScope {
|
||||
pub fn line_comment_prefix(&self) -> Option<&Arc<str>> {
|
||||
Override::as_option(
|
||||
self.config_override().map(|o| &o.line_comment),
|
||||
self.language.config.line_comment.as_ref(),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn block_comment_delimiters(&self) -> Option<(&Arc<str>, &Arc<str>)> {
|
||||
Override::as_option(
|
||||
self.config_override().map(|o| &o.block_comment),
|
||||
self.language.config.block_comment.as_ref(),
|
||||
)
|
||||
.map(|e| (&e.0, &e.1))
|
||||
}
|
||||
|
||||
pub fn brackets(&self) -> &[BracketPair] {
|
||||
Override::as_option(
|
||||
self.config_override().map(|o| &o.brackets),
|
||||
Some(&self.language.config.brackets),
|
||||
)
|
||||
.map_or(&[], Vec::as_slice)
|
||||
}
|
||||
|
||||
pub fn should_autoclose_before(&self, c: char) -> bool {
|
||||
c.is_whitespace() || self.language.config.autoclose_before.contains(c)
|
||||
}
|
||||
|
||||
fn config_override(&self) -> Option<&LanguageConfigOverride> {
|
||||
let id = self.override_id?;
|
||||
let grammar = self.language.grammar.as_ref()?;
|
||||
let override_config = grammar.override_config.as_ref()?;
|
||||
override_config.values.get(&id)
|
||||
}
|
||||
}
|
||||
|
||||
impl Hash for Language {
|
||||
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
|
||||
self.id().hash(state)
|
||||
|
|
|
@ -11,6 +11,15 @@ use text::*;
|
|||
|
||||
pub use proto::{BufferState, Operation};
|
||||
|
||||
pub fn serialize_fingerprint(fingerprint: RopeFingerprint) -> String {
|
||||
fingerprint.to_hex()
|
||||
}
|
||||
|
||||
pub fn deserialize_fingerprint(fingerprint: &str) -> Result<RopeFingerprint> {
|
||||
RopeFingerprint::from_hex(fingerprint)
|
||||
.map_err(|error| anyhow!("invalid fingerprint: {}", error))
|
||||
}
|
||||
|
||||
pub fn deserialize_line_ending(message: proto::LineEnding) -> fs::LineEnding {
|
||||
match message {
|
||||
proto::LineEnding::Unix => fs::LineEnding::Unix,
|
||||
|
|
|
@ -1127,6 +1127,41 @@ fn splice_included_ranges(
|
|||
ranges
|
||||
}
|
||||
|
||||
impl<'a> SyntaxLayerInfo<'a> {
|
||||
pub(crate) fn override_id(&self, offset: usize, text: &text::BufferSnapshot) -> Option<u32> {
|
||||
let text = TextProvider(text.as_rope());
|
||||
let config = self.language.grammar.as_ref()?.override_config.as_ref()?;
|
||||
|
||||
let mut query_cursor = QueryCursorHandle::new();
|
||||
query_cursor.set_byte_range(offset..offset);
|
||||
|
||||
let mut smallest_match: Option<(u32, Range<usize>)> = None;
|
||||
for mat in query_cursor.matches(&config.query, self.node, text) {
|
||||
for capture in mat.captures {
|
||||
if !config.values.contains_key(&capture.index) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let range = capture.node.byte_range();
|
||||
if offset <= range.start || offset >= range.end {
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Some((_, smallest_range)) = &smallest_match {
|
||||
if range.len() < smallest_range.len() {
|
||||
smallest_match = Some((capture.index, range))
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
smallest_match = Some((capture.index, range));
|
||||
}
|
||||
}
|
||||
|
||||
smallest_match.map(|(index, _)| index)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::ops::Deref for SyntaxMap {
|
||||
type Target = SyntaxSnapshot;
|
||||
|
||||
|
@ -2227,7 +2262,7 @@ mod tests {
|
|||
mutated_syntax_map.reparse(language.clone(), &buffer);
|
||||
|
||||
for (i, marked_string) in steps.into_iter().enumerate() {
|
||||
edit_buffer(&mut buffer, &marked_string.unindent());
|
||||
buffer.edit_via_marked_text(&marked_string.unindent());
|
||||
|
||||
// Reparse the syntax map
|
||||
mutated_syntax_map.interpolate(&buffer);
|
||||
|
@ -2417,52 +2452,6 @@ mod tests {
|
|||
assert_eq!(actual_ranges, expected_ranges);
|
||||
}
|
||||
|
||||
fn edit_buffer(buffer: &mut Buffer, marked_string: &str) {
|
||||
let old_text = buffer.text();
|
||||
let (new_text, mut ranges) = marked_text_ranges(marked_string, false);
|
||||
if ranges.is_empty() {
|
||||
ranges.push(0..new_text.len());
|
||||
}
|
||||
|
||||
assert_eq!(
|
||||
old_text[..ranges[0].start],
|
||||
new_text[..ranges[0].start],
|
||||
"invalid edit"
|
||||
);
|
||||
|
||||
let mut delta = 0;
|
||||
let mut edits = Vec::new();
|
||||
let mut ranges = ranges.into_iter().peekable();
|
||||
|
||||
while let Some(inserted_range) = ranges.next() {
|
||||
let new_start = inserted_range.start;
|
||||
let old_start = (new_start as isize - delta) as usize;
|
||||
|
||||
let following_text = if let Some(next_range) = ranges.peek() {
|
||||
&new_text[inserted_range.end..next_range.start]
|
||||
} else {
|
||||
&new_text[inserted_range.end..]
|
||||
};
|
||||
|
||||
let inserted_len = inserted_range.len();
|
||||
let deleted_len = old_text[old_start..]
|
||||
.find(following_text)
|
||||
.expect("invalid edit");
|
||||
|
||||
let old_range = old_start..old_start + deleted_len;
|
||||
edits.push((old_range, new_text[inserted_range].to_string()));
|
||||
delta += inserted_len as isize - deleted_len as isize;
|
||||
}
|
||||
|
||||
assert_eq!(
|
||||
old_text.len() as isize + delta,
|
||||
new_text.len() as isize,
|
||||
"invalid edit"
|
||||
);
|
||||
|
||||
buffer.edit(edits);
|
||||
}
|
||||
|
||||
pub fn string_contains_sequence(text: &str, parts: &[&str]) -> bool {
|
||||
let mut last_part_end = 0;
|
||||
for part in parts {
|
||||
|
|
|
@ -22,8 +22,8 @@ use gpui::{
|
|||
use language::{
|
||||
point_to_lsp,
|
||||
proto::{
|
||||
deserialize_anchor, deserialize_line_ending, deserialize_version, serialize_anchor,
|
||||
serialize_version,
|
||||
deserialize_anchor, deserialize_fingerprint, deserialize_line_ending, deserialize_version,
|
||||
serialize_anchor, serialize_version,
|
||||
},
|
||||
range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CachedLspAdapter, CharKind, CodeAction,
|
||||
CodeLabel, Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent,
|
||||
|
@ -67,8 +67,9 @@ use util::{debug_panic, defer, post_inc, ResultExt, TryFutureExt as _};
|
|||
pub use fs::*;
|
||||
pub use worktree::*;
|
||||
|
||||
pub trait Item: Entity {
|
||||
pub trait Item {
|
||||
fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
|
||||
fn project_path(&self, cx: &AppContext) -> Option<ProjectPath>;
|
||||
}
|
||||
|
||||
// Language server state is stored across 3 collections:
|
||||
|
@ -2081,6 +2082,7 @@ impl Project {
|
|||
.buffer_snapshots
|
||||
.entry(buffer.remote_id())
|
||||
.or_insert_with(|| vec![(0, buffer.text_snapshot())]);
|
||||
|
||||
let (version, initial_snapshot) = versions.last().unwrap();
|
||||
let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
|
||||
language_server
|
||||
|
@ -2617,6 +2619,7 @@ impl Project {
|
|||
worktree_id: worktree.read(cx).id(),
|
||||
path: relative_path.into(),
|
||||
};
|
||||
|
||||
if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
|
||||
self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
|
||||
}
|
||||
|
@ -5121,7 +5124,7 @@ impl Project {
|
|||
buffer_id,
|
||||
version: serialize_version(&saved_version),
|
||||
mtime: Some(mtime.into()),
|
||||
fingerprint,
|
||||
fingerprint: language::proto::serialize_fingerprint(fingerprint),
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -5207,6 +5210,21 @@ impl Project {
|
|||
})
|
||||
.log_err();
|
||||
|
||||
client
|
||||
.send(proto::BufferReloaded {
|
||||
project_id,
|
||||
buffer_id,
|
||||
version: language::proto::serialize_version(buffer.saved_version()),
|
||||
mtime: Some(buffer.saved_mtime().into()),
|
||||
fingerprint: language::proto::serialize_fingerprint(
|
||||
buffer.saved_version_fingerprint(),
|
||||
),
|
||||
line_ending: language::proto::serialize_line_ending(
|
||||
buffer.line_ending(),
|
||||
) as i32,
|
||||
})
|
||||
.log_err();
|
||||
|
||||
cx.background()
|
||||
.spawn(
|
||||
async move {
|
||||
|
@ -5955,6 +5973,7 @@ impl Project {
|
|||
_: Arc<Client>,
|
||||
mut cx: AsyncAppContext,
|
||||
) -> Result<()> {
|
||||
let fingerprint = deserialize_fingerprint(&envelope.payload.fingerprint)?;
|
||||
let version = deserialize_version(envelope.payload.version);
|
||||
let mtime = envelope
|
||||
.payload
|
||||
|
@ -5969,7 +5988,7 @@ impl Project {
|
|||
.and_then(|buffer| buffer.upgrade(cx));
|
||||
if let Some(buffer) = buffer {
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
buffer.did_save(version, envelope.payload.fingerprint, mtime, None, cx);
|
||||
buffer.did_save(version, fingerprint, mtime, None, cx);
|
||||
});
|
||||
}
|
||||
Ok(())
|
||||
|
@ -5984,6 +6003,7 @@ impl Project {
|
|||
) -> Result<()> {
|
||||
let payload = envelope.payload;
|
||||
let version = deserialize_version(payload.version);
|
||||
let fingerprint = deserialize_fingerprint(&payload.fingerprint)?;
|
||||
let line_ending = deserialize_line_ending(
|
||||
proto::LineEnding::from_i32(payload.line_ending)
|
||||
.ok_or_else(|| anyhow!("missing line ending"))?,
|
||||
|
@ -5999,7 +6019,7 @@ impl Project {
|
|||
.and_then(|buffer| buffer.upgrade(cx));
|
||||
if let Some(buffer) = buffer {
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
buffer.did_reload(version, payload.fingerprint, line_ending, mtime, cx);
|
||||
buffer.did_reload(version, fingerprint, line_ending, mtime, cx);
|
||||
});
|
||||
}
|
||||
Ok(())
|
||||
|
@ -6124,25 +6144,20 @@ impl Project {
|
|||
.buffer_snapshots
|
||||
.get_mut(&buffer_id)
|
||||
.ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
|
||||
let mut found_snapshot = None;
|
||||
snapshots.retain(|(snapshot_version, snapshot)| {
|
||||
if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
|
||||
false
|
||||
} else {
|
||||
if *snapshot_version == version {
|
||||
found_snapshot = Some(snapshot.clone());
|
||||
}
|
||||
true
|
||||
}
|
||||
let found_snapshot = snapshots
|
||||
.binary_search_by_key(&version, |e| e.0)
|
||||
.map(|ix| snapshots[ix].1.clone())
|
||||
.map_err(|_| {
|
||||
anyhow!(
|
||||
"snapshot not found for buffer {} at version {}",
|
||||
buffer_id,
|
||||
version
|
||||
)
|
||||
})?;
|
||||
snapshots.retain(|(snapshot_version, _)| {
|
||||
snapshot_version + OLD_VERSIONS_TO_RETAIN >= version
|
||||
});
|
||||
|
||||
found_snapshot.ok_or_else(|| {
|
||||
anyhow!(
|
||||
"snapshot not found for buffer {} at version {}",
|
||||
buffer_id,
|
||||
version
|
||||
)
|
||||
})
|
||||
Ok(found_snapshot)
|
||||
} else {
|
||||
Ok((buffer.read(cx)).text_snapshot())
|
||||
}
|
||||
|
@ -6391,4 +6406,11 @@ impl Item for Buffer {
|
|||
fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
|
||||
File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
|
||||
}
|
||||
|
||||
fn project_path(&self, cx: &AppContext) -> Option<ProjectPath> {
|
||||
File::from_dyn(self.file()).map(|file| ProjectPath {
|
||||
worktree_id: file.worktree_id(cx),
|
||||
path: file.path().clone(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -806,6 +806,55 @@ async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppC
|
|||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
|
||||
cx.foreground().forbid_parking();
|
||||
|
||||
let mut language = Language::new(
|
||||
LanguageConfig {
|
||||
path_suffixes: vec!["rs".to_string()],
|
||||
..Default::default()
|
||||
},
|
||||
None,
|
||||
);
|
||||
let mut fake_servers = language
|
||||
.set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
|
||||
name: "the-lsp",
|
||||
..Default::default()
|
||||
}))
|
||||
.await;
|
||||
|
||||
let fs = FakeFs::new(cx.background());
|
||||
fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
|
||||
|
||||
let project = Project::test(fs, ["/dir".as_ref()], cx).await;
|
||||
project.update(cx, |project, _| project.languages.add(Arc::new(language)));
|
||||
|
||||
let buffer = project
|
||||
.update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Before restarting the server, report diagnostics with an unknown buffer version.
|
||||
let fake_server = fake_servers.next().await.unwrap();
|
||||
fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
|
||||
uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
|
||||
version: Some(10000),
|
||||
diagnostics: Vec::new(),
|
||||
});
|
||||
cx.foreground().run_until_parked();
|
||||
|
||||
project.update(cx, |project, cx| {
|
||||
project.restart_language_servers_for_buffers([buffer.clone()], cx);
|
||||
});
|
||||
let mut fake_server = fake_servers.next().await.unwrap();
|
||||
let notification = fake_server
|
||||
.receive_notification::<lsp::notification::DidOpenTextDocument>()
|
||||
.await
|
||||
.text_document;
|
||||
assert_eq!(notification.version, 0);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_toggling_enable_language_server(
|
||||
deterministic: Arc<Deterministic>,
|
||||
|
|
|
@ -20,10 +20,12 @@ use gpui::{
|
|||
executor, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext,
|
||||
Task,
|
||||
};
|
||||
use language::Unclipped;
|
||||
use language::{
|
||||
proto::{deserialize_version, serialize_line_ending, serialize_version},
|
||||
Buffer, DiagnosticEntry, PointUtf16, Rope,
|
||||
proto::{
|
||||
deserialize_fingerprint, deserialize_version, serialize_fingerprint, serialize_line_ending,
|
||||
serialize_version,
|
||||
},
|
||||
Buffer, DiagnosticEntry, PointUtf16, Rope, RopeFingerprint, Unclipped,
|
||||
};
|
||||
use parking_lot::Mutex;
|
||||
use postage::{
|
||||
|
@ -1863,7 +1865,7 @@ impl language::File for File {
|
|||
version: clock::Global,
|
||||
line_ending: LineEnding,
|
||||
cx: &mut MutableAppContext,
|
||||
) -> Task<Result<(clock::Global, String, SystemTime)>> {
|
||||
) -> Task<Result<(clock::Global, RopeFingerprint, SystemTime)>> {
|
||||
self.worktree.update(cx, |worktree, cx| match worktree {
|
||||
Worktree::Local(worktree) => {
|
||||
let rpc = worktree.client.clone();
|
||||
|
@ -1878,7 +1880,7 @@ impl language::File for File {
|
|||
buffer_id,
|
||||
version: serialize_version(&version),
|
||||
mtime: Some(entry.mtime.into()),
|
||||
fingerprint: fingerprint.clone(),
|
||||
fingerprint: serialize_fingerprint(fingerprint),
|
||||
})?;
|
||||
}
|
||||
Ok((version, fingerprint, entry.mtime))
|
||||
|
@ -1896,11 +1898,12 @@ impl language::File for File {
|
|||
})
|
||||
.await?;
|
||||
let version = deserialize_version(response.version);
|
||||
let fingerprint = deserialize_fingerprint(&response.fingerprint)?;
|
||||
let mtime = response
|
||||
.mtime
|
||||
.ok_or_else(|| anyhow!("missing mtime"))?
|
||||
.into();
|
||||
Ok((version, response.fingerprint, mtime))
|
||||
Ok((version, fingerprint, mtime))
|
||||
})
|
||||
}
|
||||
})
|
||||
|
@ -1943,7 +1946,7 @@ impl language::LocalFile for File {
|
|||
&self,
|
||||
buffer_id: u64,
|
||||
version: &clock::Global,
|
||||
fingerprint: String,
|
||||
fingerprint: RopeFingerprint,
|
||||
line_ending: LineEnding,
|
||||
mtime: SystemTime,
|
||||
cx: &mut MutableAppContext,
|
||||
|
@ -1957,7 +1960,7 @@ impl language::LocalFile for File {
|
|||
buffer_id,
|
||||
version: serialize_version(version),
|
||||
mtime: Some(mtime.into()),
|
||||
fingerprint,
|
||||
fingerprint: serialize_fingerprint(fingerprint),
|
||||
line_ending: serialize_line_ending(line_ending) as i32,
|
||||
})
|
||||
.log_err();
|
||||
|
|
|
@ -763,7 +763,6 @@ impl ProjectPanel {
|
|||
ix += 1;
|
||||
}
|
||||
|
||||
self.clipboard_entry.take();
|
||||
if clipboard_entry.is_cut() {
|
||||
if let Some(task) = self.project.update(cx, |project, cx| {
|
||||
project.rename_entry(clipboard_entry.entry_id(), new_path, cx)
|
||||
|
@ -1176,13 +1175,15 @@ impl ProjectPanel {
|
|||
)
|
||||
})
|
||||
.on_click(MouseButton::Left, move |e, cx| {
|
||||
if kind == EntryKind::Dir {
|
||||
cx.dispatch_action(ToggleExpanded(entry_id))
|
||||
} else {
|
||||
cx.dispatch_action(Open {
|
||||
entry_id,
|
||||
change_focus: e.click_count > 1,
|
||||
})
|
||||
if !show_editor {
|
||||
if kind == EntryKind::Dir {
|
||||
cx.dispatch_action(ToggleExpanded(entry_id))
|
||||
} else {
|
||||
cx.dispatch_action(Open {
|
||||
entry_id,
|
||||
change_focus: e.click_count > 1,
|
||||
})
|
||||
}
|
||||
}
|
||||
})
|
||||
.on_down(MouseButton::Right, move |e, cx| {
|
||||
|
|
|
@ -7,7 +7,7 @@ edition = "2021"
|
|||
path = "src/rope.rs"
|
||||
|
||||
[dependencies]
|
||||
bromberg_sl2 = "0.6"
|
||||
bromberg_sl2 = { git = "https://github.com/zed-industries/bromberg_sl2", rev = "dac565a90e8f9245f48ff46225c915dc50f76920" }
|
||||
smallvec = { version = "1.6", features = ["union"] }
|
||||
sum_tree = { path = "../sum_tree" }
|
||||
arrayvec = "0.7.1"
|
||||
|
|
|
@ -4,7 +4,7 @@ mod point_utf16;
|
|||
mod unclipped;
|
||||
|
||||
use arrayvec::ArrayString;
|
||||
use bromberg_sl2::{DigestString, HashMatrix};
|
||||
use bromberg_sl2::HashMatrix;
|
||||
use smallvec::SmallVec;
|
||||
use std::{
|
||||
cmp, fmt, io, mem,
|
||||
|
@ -25,6 +25,12 @@ const CHUNK_BASE: usize = 6;
|
|||
#[cfg(not(test))]
|
||||
const CHUNK_BASE: usize = 16;
|
||||
|
||||
/// Type alias to [HashMatrix], an implementation of a homomorphic hash function. Two [Rope] instances
|
||||
/// containing the same text will produce the same fingerprint. This hash function is special in that
|
||||
/// it allows us to hash individual chunks and aggregate them up the [Rope]'s tree, with the resulting
|
||||
/// hash being equivalent to hashing all the text contained in the [Rope] at once.
|
||||
pub type RopeFingerprint = HashMatrix;
|
||||
|
||||
#[derive(Clone, Default, Debug)]
|
||||
pub struct Rope {
|
||||
chunks: SumTree<Chunk>,
|
||||
|
@ -361,8 +367,8 @@ impl Rope {
|
|||
.column
|
||||
}
|
||||
|
||||
pub fn fingerprint(&self) -> String {
|
||||
self.chunks.summary().fingerprint.to_hex()
|
||||
pub fn fingerprint(&self) -> RopeFingerprint {
|
||||
self.chunks.summary().fingerprint
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -856,7 +862,7 @@ impl sum_tree::Item for Chunk {
|
|||
#[derive(Clone, Debug, Default, Eq, PartialEq)]
|
||||
pub struct ChunkSummary {
|
||||
text: TextSummary,
|
||||
fingerprint: HashMatrix,
|
||||
fingerprint: RopeFingerprint,
|
||||
}
|
||||
|
||||
impl<'a> From<&'a str> for ChunkSummary {
|
||||
|
|
|
@ -975,6 +975,9 @@ message BufferState {
|
|||
string base_text = 3;
|
||||
optional string diff_base = 4;
|
||||
LineEnding line_ending = 5;
|
||||
repeated VectorClockEntry saved_version = 6;
|
||||
string saved_version_fingerprint = 7;
|
||||
Timestamp saved_mtime = 8;
|
||||
}
|
||||
|
||||
message BufferChunk {
|
||||
|
|
|
@ -6,4 +6,4 @@ pub use conn::Connection;
|
|||
pub use peer::*;
|
||||
mod macros;
|
||||
|
||||
pub const PROTOCOL_VERSION: u32 = 44;
|
||||
pub const PROTOCOL_VERSION: u32 = 46;
|
||||
|
|
|
@ -19,10 +19,12 @@ theme = { path = "../theme" }
|
|||
util = { path = "../util" }
|
||||
workspace = { path = "../workspace" }
|
||||
anyhow = "1.0"
|
||||
futures = "0.3"
|
||||
log = { version = "0.4.16", features = ["kv_unstable_serde"] }
|
||||
postage = { version = "0.4.1", features = ["futures-traits"] }
|
||||
serde = { version = "1.0", features = ["derive", "rc"] }
|
||||
smallvec = { version = "1.6", features = ["union"] }
|
||||
smol = "1.2"
|
||||
|
||||
[dev-dependencies]
|
||||
editor = { path = "../editor", features = ["test-support"] }
|
||||
|
|
|
@ -7,6 +7,7 @@ use editor::{
|
|||
items::active_match_index, scroll::autoscroll::Autoscroll, Anchor, Editor, MultiBuffer,
|
||||
SelectAll, MAX_TAB_TITLE_LEN,
|
||||
};
|
||||
use futures::StreamExt;
|
||||
use gpui::{
|
||||
actions, elements::*, platform::CursorStyle, Action, AnyViewHandle, AppContext, ElementBox,
|
||||
Entity, ModelContext, ModelHandle, MouseButton, MutableAppContext, RenderContext, Subscription,
|
||||
|
@ -15,9 +16,9 @@ use gpui::{
|
|||
use menu::Confirm;
|
||||
use project::{search::SearchQuery, Project};
|
||||
use settings::Settings;
|
||||
use smallvec::SmallVec;
|
||||
use std::{
|
||||
any::{Any, TypeId},
|
||||
mem,
|
||||
ops::Range,
|
||||
path::PathBuf,
|
||||
sync::Arc,
|
||||
|
@ -67,6 +68,7 @@ struct ProjectSearch {
|
|||
pending_search: Option<Task<Option<()>>>,
|
||||
match_ranges: Vec<Range<Anchor>>,
|
||||
active_query: Option<SearchQuery>,
|
||||
search_id: usize,
|
||||
}
|
||||
|
||||
pub struct ProjectSearchView {
|
||||
|
@ -78,6 +80,7 @@ pub struct ProjectSearchView {
|
|||
regex: bool,
|
||||
query_contains_error: bool,
|
||||
active_match_index: Option<usize>,
|
||||
search_id: usize,
|
||||
}
|
||||
|
||||
pub struct ProjectSearchBar {
|
||||
|
@ -98,6 +101,7 @@ impl ProjectSearch {
|
|||
pending_search: Default::default(),
|
||||
match_ranges: Default::default(),
|
||||
active_query: None,
|
||||
search_id: 0,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -110,6 +114,7 @@ impl ProjectSearch {
|
|||
pending_search: Default::default(),
|
||||
match_ranges: self.match_ranges.clone(),
|
||||
active_query: self.active_query.clone(),
|
||||
search_id: self.search_id,
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -117,32 +122,37 @@ impl ProjectSearch {
|
|||
let search = self
|
||||
.project
|
||||
.update(cx, |project, cx| project.search(query.clone(), cx));
|
||||
self.search_id += 1;
|
||||
self.active_query = Some(query);
|
||||
self.match_ranges.clear();
|
||||
self.pending_search = Some(cx.spawn_weak(|this, mut cx| async move {
|
||||
let matches = search.await.log_err()?;
|
||||
if let Some(this) = this.upgrade(&cx) {
|
||||
let this = this.upgrade(&cx)?;
|
||||
let mut matches = matches.into_iter().collect::<Vec<_>>();
|
||||
let (_task, mut match_ranges) = this.update(&mut cx, |this, cx| {
|
||||
this.match_ranges.clear();
|
||||
matches.sort_by_key(|(buffer, _)| buffer.read(cx).file().map(|file| file.path()));
|
||||
this.excerpts.update(cx, |excerpts, cx| {
|
||||
excerpts.clear(cx);
|
||||
excerpts.stream_excerpts_with_context_lines(matches, 1, cx)
|
||||
})
|
||||
});
|
||||
|
||||
while let Some(match_range) = match_ranges.next().await {
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.match_ranges.clear();
|
||||
let mut matches = matches.into_iter().collect::<Vec<_>>();
|
||||
matches
|
||||
.sort_by_key(|(buffer, _)| buffer.read(cx).file().map(|file| file.path()));
|
||||
this.excerpts.update(cx, |excerpts, cx| {
|
||||
excerpts.clear(cx);
|
||||
for (buffer, buffer_matches) in matches {
|
||||
let ranges_to_highlight = excerpts.push_excerpts_with_context_lines(
|
||||
buffer,
|
||||
buffer_matches.clone(),
|
||||
1,
|
||||
cx,
|
||||
);
|
||||
this.match_ranges.extend(ranges_to_highlight);
|
||||
}
|
||||
});
|
||||
this.pending_search.take();
|
||||
this.match_ranges.push(match_range);
|
||||
while let Ok(Some(match_range)) = match_ranges.try_next() {
|
||||
this.match_ranges.push(match_range);
|
||||
}
|
||||
cx.notify();
|
||||
});
|
||||
}
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.pending_search.take();
|
||||
cx.notify();
|
||||
});
|
||||
|
||||
None
|
||||
}));
|
||||
cx.notify();
|
||||
|
@ -264,12 +274,8 @@ impl Item for ProjectSearchView {
|
|||
.boxed()
|
||||
}
|
||||
|
||||
fn project_path(&self, _: &gpui::AppContext) -> Option<project::ProjectPath> {
|
||||
None
|
||||
}
|
||||
|
||||
fn project_entry_ids(&self, cx: &AppContext) -> SmallVec<[project::ProjectEntryId; 3]> {
|
||||
self.results_editor.project_entry_ids(cx)
|
||||
fn for_each_project_item(&self, cx: &AppContext, f: &mut dyn FnMut(usize, &dyn project::Item)) {
|
||||
self.results_editor.for_each_project_item(cx, f)
|
||||
}
|
||||
|
||||
fn is_singleton(&self, _: &AppContext) -> bool {
|
||||
|
@ -398,7 +404,7 @@ impl ProjectSearchView {
|
|||
whole_word = active_query.whole_word();
|
||||
}
|
||||
}
|
||||
cx.observe(&model, |this, _, cx| this.model_changed(true, cx))
|
||||
cx.observe(&model, |this, _, cx| this.model_changed(cx))
|
||||
.detach();
|
||||
|
||||
let query_editor = cx.add_view(|cx| {
|
||||
|
@ -433,6 +439,7 @@ impl ProjectSearchView {
|
|||
.detach();
|
||||
|
||||
let mut this = ProjectSearchView {
|
||||
search_id: model.read(cx).search_id,
|
||||
model,
|
||||
query_editor,
|
||||
results_editor,
|
||||
|
@ -442,7 +449,7 @@ impl ProjectSearchView {
|
|||
query_contains_error: false,
|
||||
active_match_index: None,
|
||||
};
|
||||
this.model_changed(false, cx);
|
||||
this.model_changed(cx);
|
||||
this
|
||||
}
|
||||
|
||||
|
@ -562,11 +569,13 @@ impl ProjectSearchView {
|
|||
cx.focus(&self.results_editor);
|
||||
}
|
||||
|
||||
fn model_changed(&mut self, reset_selections: bool, cx: &mut ViewContext<Self>) {
|
||||
fn model_changed(&mut self, cx: &mut ViewContext<Self>) {
|
||||
let match_ranges = self.model.read(cx).match_ranges.clone();
|
||||
if match_ranges.is_empty() {
|
||||
self.active_match_index = None;
|
||||
} else {
|
||||
let prev_search_id = mem::replace(&mut self.search_id, self.model.read(cx).search_id);
|
||||
let reset_selections = self.search_id != prev_search_id;
|
||||
self.results_editor.update(cx, |editor, cx| {
|
||||
if reset_selections {
|
||||
editor.change_selections(Some(Autoscroll::fit()), cx, |s| {
|
||||
|
@ -940,13 +949,13 @@ impl ToolbarItemView for ProjectSearchBar {
|
|||
mod tests {
|
||||
use super::*;
|
||||
use editor::DisplayPoint;
|
||||
use gpui::{color::Color, TestAppContext};
|
||||
use gpui::{color::Color, executor::Deterministic, TestAppContext};
|
||||
use project::FakeFs;
|
||||
use serde_json::json;
|
||||
use std::sync::Arc;
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_project_search(cx: &mut TestAppContext) {
|
||||
async fn test_project_search(deterministic: Arc<Deterministic>, cx: &mut TestAppContext) {
|
||||
let fonts = cx.font_cache();
|
||||
let mut theme = gpui::fonts::with_font_cache(fonts.clone(), theme::Theme::default);
|
||||
theme.search.match_background = Color::red();
|
||||
|
@ -978,7 +987,7 @@ mod tests {
|
|||
.update(cx, |query_editor, cx| query_editor.set_text("TWO", cx));
|
||||
search_view.search(cx);
|
||||
});
|
||||
search_view.next_notification(cx).await;
|
||||
deterministic.run_until_parked();
|
||||
search_view.update(cx, |search_view, cx| {
|
||||
assert_eq!(
|
||||
search_view
|
||||
|
|
|
@ -28,12 +28,12 @@ pub use keymap_file::{keymap_file_json_schema, KeymapFileContent};
|
|||
#[derive(Clone)]
|
||||
pub struct Settings {
|
||||
pub experiments: FeatureFlags,
|
||||
pub projects_online_by_default: bool,
|
||||
pub buffer_font_family: FamilyId,
|
||||
pub default_buffer_font_size: f32,
|
||||
pub buffer_font_size: f32,
|
||||
pub active_pane_magnification: f32,
|
||||
pub cursor_blink: bool,
|
||||
pub confirm_quit: bool,
|
||||
pub hover_popover_enabled: bool,
|
||||
pub show_completions_on_input: bool,
|
||||
pub vim_mode: bool,
|
||||
|
@ -51,9 +51,26 @@ pub struct Settings {
|
|||
pub language_overrides: HashMap<Arc<str>, EditorSettings>,
|
||||
pub lsp: HashMap<Arc<str>, LspSettings>,
|
||||
pub theme: Arc<Theme>,
|
||||
pub telemetry_defaults: TelemetrySettings,
|
||||
pub telemetry_overrides: TelemetrySettings,
|
||||
pub staff_mode: bool,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct TelemetrySettings {
|
||||
diagnostics: Option<bool>,
|
||||
metrics: Option<bool>,
|
||||
}
|
||||
|
||||
impl TelemetrySettings {
|
||||
pub fn metrics(&self) -> bool {
|
||||
self.metrics.unwrap()
|
||||
}
|
||||
pub fn diagnostics(&self) -> bool {
|
||||
self.diagnostics.unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct FeatureFlags {
|
||||
pub experimental_themes: bool,
|
||||
|
@ -277,6 +294,8 @@ pub struct SettingsFileContent {
|
|||
#[serde(default)]
|
||||
pub cursor_blink: Option<bool>,
|
||||
#[serde(default)]
|
||||
pub confirm_quit: Option<bool>,
|
||||
#[serde(default)]
|
||||
pub hover_popover_enabled: Option<bool>,
|
||||
#[serde(default)]
|
||||
pub show_completions_on_input: Option<bool>,
|
||||
|
@ -302,6 +321,8 @@ pub struct SettingsFileContent {
|
|||
#[serde(default)]
|
||||
pub theme: Option<String>,
|
||||
#[serde(default)]
|
||||
pub telemetry: TelemetrySettings,
|
||||
#[serde(default)]
|
||||
pub staff_mode: Option<bool>,
|
||||
}
|
||||
|
||||
|
@ -312,6 +333,7 @@ pub struct LspSettings {
|
|||
}
|
||||
|
||||
impl Settings {
|
||||
/// Fill out the settings corresponding to the default.json file, overrides will be set later
|
||||
pub fn defaults(
|
||||
assets: impl AssetSource,
|
||||
font_cache: &FontCache,
|
||||
|
@ -336,10 +358,10 @@ impl Settings {
|
|||
buffer_font_size: defaults.buffer_font_size.unwrap(),
|
||||
active_pane_magnification: defaults.active_pane_magnification.unwrap(),
|
||||
default_buffer_font_size: defaults.buffer_font_size.unwrap(),
|
||||
confirm_quit: defaults.confirm_quit.unwrap(),
|
||||
cursor_blink: defaults.cursor_blink.unwrap(),
|
||||
hover_popover_enabled: defaults.hover_popover_enabled.unwrap(),
|
||||
show_completions_on_input: defaults.show_completions_on_input.unwrap(),
|
||||
projects_online_by_default: defaults.projects_online_by_default.unwrap(),
|
||||
vim_mode: defaults.vim_mode.unwrap(),
|
||||
autosave: defaults.autosave.unwrap(),
|
||||
default_dock_anchor: defaults.default_dock_anchor.unwrap(),
|
||||
|
@ -363,11 +385,13 @@ impl Settings {
|
|||
language_overrides: Default::default(),
|
||||
lsp: defaults.lsp.clone(),
|
||||
theme: themes.get(&defaults.theme.unwrap()).unwrap(),
|
||||
|
||||
telemetry_defaults: defaults.telemetry,
|
||||
telemetry_overrides: Default::default(),
|
||||
staff_mode: false,
|
||||
}
|
||||
}
|
||||
|
||||
// Fill out the overrride and etc. settings from the user's settings.json
|
||||
pub fn set_user_settings(
|
||||
&mut self,
|
||||
data: SettingsFileContent,
|
||||
|
@ -385,10 +409,6 @@ impl Settings {
|
|||
}
|
||||
}
|
||||
|
||||
merge(
|
||||
&mut self.projects_online_by_default,
|
||||
data.projects_online_by_default,
|
||||
);
|
||||
merge(&mut self.buffer_font_size, data.buffer_font_size);
|
||||
merge(
|
||||
&mut self.active_pane_magnification,
|
||||
|
@ -396,6 +416,7 @@ impl Settings {
|
|||
);
|
||||
merge(&mut self.default_buffer_font_size, data.buffer_font_size);
|
||||
merge(&mut self.cursor_blink, data.cursor_blink);
|
||||
merge(&mut self.confirm_quit, data.confirm_quit);
|
||||
merge(&mut self.hover_popover_enabled, data.hover_popover_enabled);
|
||||
merge(
|
||||
&mut self.show_completions_on_input,
|
||||
|
@ -419,6 +440,7 @@ impl Settings {
|
|||
self.terminal_overrides.copy_on_select = data.terminal.copy_on_select;
|
||||
self.terminal_overrides = data.terminal;
|
||||
self.language_overrides = data.languages;
|
||||
self.telemetry_overrides = data.telemetry;
|
||||
self.lsp = data.lsp;
|
||||
}
|
||||
|
||||
|
@ -489,6 +511,27 @@ impl Settings {
|
|||
.unwrap_or_else(|| R::default())
|
||||
}
|
||||
|
||||
pub fn telemetry(&self) -> TelemetrySettings {
|
||||
TelemetrySettings {
|
||||
diagnostics: Some(self.telemetry_diagnostics()),
|
||||
metrics: Some(self.telemetry_metrics()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn telemetry_diagnostics(&self) -> bool {
|
||||
self.telemetry_overrides
|
||||
.diagnostics
|
||||
.or(self.telemetry_defaults.diagnostics)
|
||||
.expect("missing default")
|
||||
}
|
||||
|
||||
pub fn telemetry_metrics(&self) -> bool {
|
||||
self.telemetry_overrides
|
||||
.metrics
|
||||
.or(self.telemetry_defaults.metrics)
|
||||
.expect("missing default")
|
||||
}
|
||||
|
||||
pub fn terminal_scroll(&self) -> AlternateScroll {
|
||||
self.terminal_setting(|terminal_setting| terminal_setting.alternate_scroll.as_ref())
|
||||
}
|
||||
|
@ -513,6 +556,7 @@ impl Settings {
|
|||
buffer_font_size: 14.,
|
||||
active_pane_magnification: 1.,
|
||||
default_buffer_font_size: 14.,
|
||||
confirm_quit: false,
|
||||
cursor_blink: true,
|
||||
hover_popover_enabled: true,
|
||||
show_completions_on_input: true,
|
||||
|
@ -538,8 +582,12 @@ impl Settings {
|
|||
language_defaults: Default::default(),
|
||||
language_overrides: Default::default(),
|
||||
lsp: Default::default(),
|
||||
projects_online_by_default: true,
|
||||
theme: gpui::fonts::with_font_cache(cx.font_cache().clone(), Default::default),
|
||||
telemetry_defaults: TelemetrySettings {
|
||||
diagnostics: Some(true),
|
||||
metrics: Some(true),
|
||||
},
|
||||
telemetry_overrides: Default::default(),
|
||||
staff_mode: false,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -89,6 +89,26 @@ impl Column for f64 {
|
|||
}
|
||||
}
|
||||
|
||||
impl Bind for f32 {
|
||||
fn bind(&self, statement: &Statement, start_index: i32) -> Result<i32> {
|
||||
statement
|
||||
.bind_double(start_index, *self as f64)
|
||||
.with_context(|| format!("Failed to bind f64 at index {start_index}"))?;
|
||||
Ok(start_index + 1)
|
||||
}
|
||||
}
|
||||
|
||||
impl Column for f32 {
|
||||
fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> {
|
||||
let result = statement
|
||||
.column_double(start_index)
|
||||
.with_context(|| format!("Failed to parse f32 at index {start_index}"))?
|
||||
as f32;
|
||||
|
||||
Ok((result, start_index + 1))
|
||||
}
|
||||
}
|
||||
|
||||
impl Bind for i32 {
|
||||
fn bind(&self, statement: &Statement, start_index: i32) -> Result<i32> {
|
||||
statement
|
||||
|
@ -122,6 +142,21 @@ impl Column for i64 {
|
|||
}
|
||||
}
|
||||
|
||||
impl Bind for u32 {
|
||||
fn bind(&self, statement: &Statement, start_index: i32) -> Result<i32> {
|
||||
(*self as i64)
|
||||
.bind(statement, start_index)
|
||||
.with_context(|| format!("Failed to bind usize at index {start_index}"))
|
||||
}
|
||||
}
|
||||
|
||||
impl Column for u32 {
|
||||
fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> {
|
||||
let result = statement.column_int64(start_index)?;
|
||||
Ok((result as u32, start_index + 1))
|
||||
}
|
||||
}
|
||||
|
||||
impl Bind for usize {
|
||||
fn bind(&self, statement: &Statement, start_index: i32) -> Result<i32> {
|
||||
(*self as i64)
|
||||
|
|
|
@ -93,36 +93,77 @@ impl Connection {
|
|||
let sql_start = remaining_sql.as_ptr();
|
||||
|
||||
unsafe {
|
||||
let mut alter_table = None;
|
||||
while {
|
||||
let remaining_sql_str = remaining_sql.to_str().unwrap().trim();
|
||||
remaining_sql_str != ";" && !remaining_sql_str.is_empty()
|
||||
let any_remaining_sql = remaining_sql_str != ";" && !remaining_sql_str.is_empty();
|
||||
if any_remaining_sql {
|
||||
alter_table = parse_alter_table(remaining_sql_str);
|
||||
}
|
||||
any_remaining_sql
|
||||
} {
|
||||
let mut raw_statement = ptr::null_mut::<sqlite3_stmt>();
|
||||
let mut remaining_sql_ptr = ptr::null();
|
||||
sqlite3_prepare_v2(
|
||||
self.sqlite3,
|
||||
remaining_sql.as_ptr(),
|
||||
-1,
|
||||
&mut raw_statement,
|
||||
&mut remaining_sql_ptr,
|
||||
);
|
||||
|
||||
let res = sqlite3_errcode(self.sqlite3);
|
||||
let offset = sqlite3_error_offset(self.sqlite3);
|
||||
let message = sqlite3_errmsg(self.sqlite3);
|
||||
let (res, offset, message, _conn) = if let Some(table_to_alter) = alter_table {
|
||||
// ALTER TABLE is a weird statement. When preparing the statement the table's
|
||||
// existence is checked *before* syntax checking any other part of the statement.
|
||||
// Therefore, we need to make sure that the table has been created before calling
|
||||
// prepare. As we don't want to trash whatever database this is connected to, we
|
||||
// create a new in-memory DB to test.
|
||||
|
||||
let temp_connection = Connection::open_memory(None);
|
||||
//This should always succeed, if it doesn't then you really should know about it
|
||||
temp_connection
|
||||
.exec(&format!(
|
||||
"CREATE TABLE {table_to_alter}(__place_holder_column_for_syntax_checking)"
|
||||
))
|
||||
.unwrap()()
|
||||
.unwrap();
|
||||
|
||||
sqlite3_prepare_v2(
|
||||
temp_connection.sqlite3,
|
||||
remaining_sql.as_ptr(),
|
||||
-1,
|
||||
&mut raw_statement,
|
||||
&mut remaining_sql_ptr,
|
||||
);
|
||||
|
||||
(
|
||||
sqlite3_errcode(temp_connection.sqlite3),
|
||||
sqlite3_error_offset(temp_connection.sqlite3),
|
||||
sqlite3_errmsg(temp_connection.sqlite3),
|
||||
Some(temp_connection),
|
||||
)
|
||||
} else {
|
||||
sqlite3_prepare_v2(
|
||||
self.sqlite3,
|
||||
remaining_sql.as_ptr(),
|
||||
-1,
|
||||
&mut raw_statement,
|
||||
&mut remaining_sql_ptr,
|
||||
);
|
||||
(
|
||||
sqlite3_errcode(self.sqlite3),
|
||||
sqlite3_error_offset(self.sqlite3),
|
||||
sqlite3_errmsg(self.sqlite3),
|
||||
None,
|
||||
)
|
||||
};
|
||||
|
||||
sqlite3_finalize(raw_statement);
|
||||
|
||||
if res == 1 && offset >= 0 {
|
||||
let sub_statement_correction =
|
||||
remaining_sql.as_ptr() as usize - sql_start as usize;
|
||||
let err_msg =
|
||||
String::from_utf8_lossy(CStr::from_ptr(message as *const _).to_bytes())
|
||||
.into_owned();
|
||||
let sub_statement_correction =
|
||||
remaining_sql.as_ptr() as usize - sql_start as usize;
|
||||
|
||||
return Some((err_msg, offset as usize + sub_statement_correction));
|
||||
}
|
||||
remaining_sql = CStr::from_ptr(remaining_sql_ptr);
|
||||
alter_table = None;
|
||||
}
|
||||
}
|
||||
None
|
||||
|
@ -162,6 +203,25 @@ impl Connection {
|
|||
}
|
||||
}
|
||||
|
||||
fn parse_alter_table(remaining_sql_str: &str) -> Option<String> {
|
||||
let remaining_sql_str = remaining_sql_str.to_lowercase();
|
||||
if remaining_sql_str.starts_with("alter") {
|
||||
if let Some(table_offset) = remaining_sql_str.find("table") {
|
||||
let after_table_offset = table_offset + "table".len();
|
||||
let table_to_alter = remaining_sql_str
|
||||
.chars()
|
||||
.skip(after_table_offset)
|
||||
.skip_while(|c| c.is_whitespace())
|
||||
.take_while(|c| !c.is_whitespace())
|
||||
.collect::<String>();
|
||||
if !table_to_alter.is_empty() {
|
||||
return Some(table_to_alter);
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
impl Drop for Connection {
|
||||
fn drop(&mut self) {
|
||||
unsafe { sqlite3_close(self.sqlite3) };
|
||||
|
@ -331,4 +391,17 @@ mod test {
|
|||
|
||||
assert_eq!(res, Some(first_stmt.len() + second_offset + 1));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_alter_table_syntax() {
|
||||
let connection = Connection::open_memory(Some("test_alter_table_syntax"));
|
||||
|
||||
assert!(connection
|
||||
.sql_has_syntax_error("ALTER TABLE test ADD x TEXT")
|
||||
.is_none());
|
||||
|
||||
assert!(connection
|
||||
.sql_has_syntax_error("ALTER TABLE test AAD x TEXT")
|
||||
.is_some());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -42,10 +42,10 @@ impl TerminalDb {
|
|||
}
|
||||
|
||||
query! {
|
||||
pub async fn take_working_directory(item_id: ItemId, workspace_id: WorkspaceId) -> Result<Option<PathBuf>> {
|
||||
DELETE FROM terminals
|
||||
pub fn get_working_directory(item_id: ItemId, workspace_id: WorkspaceId) -> Result<Option<PathBuf>> {
|
||||
SELECT working_directory
|
||||
FROM terminals
|
||||
WHERE item_id = ? AND workspace_id = ?
|
||||
RETURNING working_directory
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,10 +18,9 @@ use gpui::{
|
|||
AnyViewHandle, AppContext, Element, ElementBox, Entity, ModelHandle, MutableAppContext, Task,
|
||||
View, ViewContext, ViewHandle, WeakViewHandle,
|
||||
};
|
||||
use project::{LocalWorktree, Project, ProjectPath};
|
||||
use project::{LocalWorktree, Project};
|
||||
use serde::Deserialize;
|
||||
use settings::{Settings, TerminalBlink, WorkingDirectory};
|
||||
use smallvec::SmallVec;
|
||||
use smol::Timer;
|
||||
use terminal::{
|
||||
alacritty_terminal::{
|
||||
|
@ -616,13 +615,7 @@ impl Item for TerminalView {
|
|||
None
|
||||
}
|
||||
|
||||
fn project_path(&self, _cx: &gpui::AppContext) -> Option<ProjectPath> {
|
||||
None
|
||||
}
|
||||
|
||||
fn project_entry_ids(&self, _cx: &gpui::AppContext) -> SmallVec<[project::ProjectEntryId; 3]> {
|
||||
SmallVec::new()
|
||||
}
|
||||
fn for_each_project_item(&self, _: &AppContext, _: &mut dyn FnMut(usize, &dyn project::Item)) {}
|
||||
|
||||
fn is_singleton(&self, _cx: &gpui::AppContext) -> bool {
|
||||
false
|
||||
|
@ -706,8 +699,7 @@ impl Item for TerminalView {
|
|||
let window_id = cx.window_id();
|
||||
cx.spawn(|pane, mut cx| async move {
|
||||
let cwd = TERMINAL_DB
|
||||
.take_working_directory(item_id, workspace_id)
|
||||
.await
|
||||
.get_working_directory(item_id, workspace_id)
|
||||
.log_err()
|
||||
.flatten();
|
||||
|
||||
|
|
|
@ -1372,6 +1372,57 @@ impl Buffer {
|
|||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
impl Buffer {
|
||||
pub fn edit_via_marked_text(&mut self, marked_string: &str) {
|
||||
let edits = self.edits_for_marked_text(marked_string);
|
||||
self.edit(edits);
|
||||
}
|
||||
|
||||
pub fn edits_for_marked_text(&self, marked_string: &str) -> Vec<(Range<usize>, String)> {
|
||||
let old_text = self.text();
|
||||
let (new_text, mut ranges) = util::test::marked_text_ranges(marked_string, false);
|
||||
if ranges.is_empty() {
|
||||
ranges.push(0..new_text.len());
|
||||
}
|
||||
|
||||
assert_eq!(
|
||||
old_text[..ranges[0].start],
|
||||
new_text[..ranges[0].start],
|
||||
"invalid edit"
|
||||
);
|
||||
|
||||
let mut delta = 0;
|
||||
let mut edits = Vec::new();
|
||||
let mut ranges = ranges.into_iter().peekable();
|
||||
|
||||
while let Some(inserted_range) = ranges.next() {
|
||||
let new_start = inserted_range.start;
|
||||
let old_start = (new_start as isize - delta) as usize;
|
||||
|
||||
let following_text = if let Some(next_range) = ranges.peek() {
|
||||
&new_text[inserted_range.end..next_range.start]
|
||||
} else {
|
||||
&new_text[inserted_range.end..]
|
||||
};
|
||||
|
||||
let inserted_len = inserted_range.len();
|
||||
let deleted_len = old_text[old_start..]
|
||||
.find(following_text)
|
||||
.expect("invalid edit");
|
||||
|
||||
let old_range = old_start..old_start + deleted_len;
|
||||
edits.push((old_range, new_text[inserted_range].to_string()));
|
||||
delta += inserted_len as isize - deleted_len as isize;
|
||||
}
|
||||
|
||||
assert_eq!(
|
||||
old_text.len() as isize + delta,
|
||||
new_text.len() as isize,
|
||||
"invalid edit"
|
||||
);
|
||||
|
||||
edits
|
||||
}
|
||||
|
||||
pub fn check_invariants(&self) {
|
||||
// Ensure every fragment is ordered by locator in the fragment tree and corresponds
|
||||
// to an insertion fragment in the insertions tree.
|
||||
|
|
|
@ -6,12 +6,11 @@ use gpui::{
|
|||
Padding, ParentElement,
|
||||
},
|
||||
fonts::TextStyle,
|
||||
Border, Element, Entity, ModelHandle, MutableAppContext, Quad, RenderContext, Task, View,
|
||||
ViewContext, ViewHandle, WeakViewHandle,
|
||||
AppContext, Border, Element, Entity, ModelHandle, MutableAppContext, Quad, RenderContext, Task,
|
||||
View, ViewContext, ViewHandle, WeakViewHandle,
|
||||
};
|
||||
use project::{Project, ProjectEntryId, ProjectPath};
|
||||
use project::Project;
|
||||
use settings::Settings;
|
||||
use smallvec::SmallVec;
|
||||
use theme::{ColorScheme, Layer, Style, StyleSet};
|
||||
use workspace::{
|
||||
item::{Item, ItemEvent},
|
||||
|
@ -306,7 +305,7 @@ impl Item for ThemeTestbench {
|
|||
&self,
|
||||
_: Option<usize>,
|
||||
style: &theme::Tab,
|
||||
_: &gpui::AppContext,
|
||||
_: &AppContext,
|
||||
) -> gpui::ElementBox {
|
||||
Label::new("Theme Testbench".into(), style.label.clone())
|
||||
.aligned()
|
||||
|
@ -314,21 +313,15 @@ impl Item for ThemeTestbench {
|
|||
.boxed()
|
||||
}
|
||||
|
||||
fn project_path(&self, _: &gpui::AppContext) -> Option<ProjectPath> {
|
||||
None
|
||||
}
|
||||
fn for_each_project_item(&self, _: &AppContext, _: &mut dyn FnMut(usize, &dyn project::Item)) {}
|
||||
|
||||
fn project_entry_ids(&self, _: &gpui::AppContext) -> SmallVec<[ProjectEntryId; 3]> {
|
||||
SmallVec::new()
|
||||
}
|
||||
|
||||
fn is_singleton(&self, _: &gpui::AppContext) -> bool {
|
||||
fn is_singleton(&self, _: &AppContext) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn set_nav_history(&mut self, _: workspace::ItemNavHistory, _: &mut ViewContext<Self>) {}
|
||||
|
||||
fn can_save(&self, _: &gpui::AppContext) -> bool {
|
||||
fn can_save(&self, _: &AppContext) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
|
|
|
@ -37,9 +37,7 @@ fn editor_focused(EditorFocused(editor): &EditorFocused, cx: &mut MutableAppCont
|
|||
let editor_mode = editor.mode();
|
||||
let newest_selection_empty = editor.selections.newest::<usize>(cx).is_empty();
|
||||
|
||||
if editor_mode != EditorMode::Full {
|
||||
vim.switch_mode(Mode::Insert, true, cx);
|
||||
} else if !newest_selection_empty {
|
||||
if editor_mode == EditorMode::Full && !newest_selection_empty {
|
||||
vim.switch_mode(Mode::Visual { line: false }, true, cx);
|
||||
}
|
||||
});
|
||||
|
|
|
@ -3,7 +3,7 @@ use editor::{
|
|||
display_map::{DisplaySnapshot, ToDisplayPoint},
|
||||
movement, Bias, CharKind, DisplayPoint,
|
||||
};
|
||||
use gpui::{actions, impl_actions, keymap_matcher::KeyPressed, MutableAppContext};
|
||||
use gpui::{actions, impl_actions, MutableAppContext};
|
||||
use language::{Point, Selection, SelectionGoal};
|
||||
use serde::Deserialize;
|
||||
use workspace::Workspace;
|
||||
|
@ -109,27 +109,6 @@ pub fn init(cx: &mut MutableAppContext) {
|
|||
&PreviousWordStart { ignore_punctuation }: &PreviousWordStart,
|
||||
cx: _| { motion(Motion::PreviousWordStart { ignore_punctuation }, cx) },
|
||||
);
|
||||
cx.add_action(
|
||||
|_: &mut Workspace, KeyPressed { keystroke }: &KeyPressed, cx| match Vim::read(cx)
|
||||
.active_operator()
|
||||
{
|
||||
Some(Operator::FindForward { before }) => motion(
|
||||
Motion::FindForward {
|
||||
before,
|
||||
character: keystroke.key.chars().next().unwrap(),
|
||||
},
|
||||
cx,
|
||||
),
|
||||
Some(Operator::FindBackward { after }) => motion(
|
||||
Motion::FindBackward {
|
||||
after,
|
||||
character: keystroke.key.chars().next().unwrap(),
|
||||
},
|
||||
cx,
|
||||
),
|
||||
_ => cx.propagate_action(),
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
pub(crate) fn motion(motion: Motion, cx: &mut MutableAppContext) {
|
||||
|
|
|
@ -424,6 +424,53 @@ fn scroll(editor: &mut Editor, amount: &ScrollAmount, cx: &mut ViewContext<Edito
|
|||
}
|
||||
}
|
||||
|
||||
pub(crate) fn normal_replace(text: &str, cx: &mut MutableAppContext) {
|
||||
Vim::update(cx, |vim, cx| {
|
||||
vim.update_active_editor(cx, |editor, cx| {
|
||||
editor.transact(cx, |editor, cx| {
|
||||
editor.set_clip_at_line_ends(false, cx);
|
||||
let (map, display_selections) = editor.selections.all_display(cx);
|
||||
// Selections are biased right at the start. So we need to store
|
||||
// anchors that are biased left so that we can restore the selections
|
||||
// after the change
|
||||
let stable_anchors = editor
|
||||
.selections
|
||||
.disjoint_anchors()
|
||||
.into_iter()
|
||||
.map(|selection| {
|
||||
let start = selection.start.bias_left(&map.buffer_snapshot);
|
||||
start..start
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let edits = display_selections
|
||||
.into_iter()
|
||||
.map(|selection| {
|
||||
let mut range = selection.range();
|
||||
*range.end.column_mut() += 1;
|
||||
range.end = map.clip_point(range.end, Bias::Right);
|
||||
|
||||
(
|
||||
range.start.to_offset(&map, Bias::Left)
|
||||
..range.end.to_offset(&map, Bias::Left),
|
||||
text,
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
editor.buffer().update(cx, |buffer, cx| {
|
||||
buffer.edit(edits, None, cx);
|
||||
});
|
||||
editor.set_clip_at_line_ends(true, cx);
|
||||
editor.change_selections(None, cx, |s| {
|
||||
s.select_anchor_ranges(stable_anchors);
|
||||
});
|
||||
});
|
||||
});
|
||||
vim.pop_operator(cx)
|
||||
});
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use indoc::indoc;
|
||||
|
@ -468,6 +515,16 @@ mod test {
|
|||
.await;
|
||||
}
|
||||
|
||||
// #[gpui::test]
|
||||
// async fn test_enter(cx: &mut gpui::TestAppContext) {
|
||||
// let mut cx = NeovimBackedTestContext::new(cx).await.binding(["enter"]);
|
||||
// cx.assert_all(indoc! {"
|
||||
// ˇThe qˇuick broˇwn
|
||||
// ˇfox jumps"
|
||||
// })
|
||||
// .await;
|
||||
// }
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_k(cx: &mut gpui::TestAppContext) {
|
||||
let mut cx = NeovimBackedTestContext::new(cx).await.binding(["k"]);
|
||||
|
|
|
@ -28,6 +28,7 @@ pub enum Operator {
|
|||
Change,
|
||||
Delete,
|
||||
Yank,
|
||||
Replace,
|
||||
Object { around: bool },
|
||||
FindForward { before: bool },
|
||||
FindBackward { after: bool },
|
||||
|
@ -117,6 +118,7 @@ impl Operator {
|
|||
Operator::Change => "c",
|
||||
Operator::Delete => "d",
|
||||
Operator::Yank => "y",
|
||||
Operator::Replace => "r",
|
||||
Operator::FindForward { before: false } => "f",
|
||||
Operator::FindForward { before: true } => "t",
|
||||
Operator::FindBackward { after: false } => "F",
|
||||
|
@ -127,7 +129,9 @@ impl Operator {
|
|||
pub fn context_flags(&self) -> &'static [&'static str] {
|
||||
match self {
|
||||
Operator::Object { .. } => &["VimObject"],
|
||||
Operator::FindForward { .. } | Operator::FindBackward { .. } => &["VimWaiting"],
|
||||
Operator::FindForward { .. } | Operator::FindBackward { .. } | Operator::Replace => {
|
||||
&["VimWaiting"]
|
||||
}
|
||||
_ => &[],
|
||||
}
|
||||
}
|
||||
|
|
|
@ -84,9 +84,6 @@ async fn test_buffer_search(cx: &mut gpui::TestAppContext) {
|
|||
);
|
||||
cx.simulate_keystroke("/");
|
||||
|
||||
// We now use a weird insert mode with selection when jumping to a single line editor
|
||||
assert_eq!(cx.mode(), Mode::Insert);
|
||||
|
||||
let search_bar = cx.workspace(|workspace, cx| {
|
||||
workspace
|
||||
.active_pane()
|
||||
|
|
|
@ -12,12 +12,19 @@ mod visual;
|
|||
|
||||
use collections::HashMap;
|
||||
use command_palette::CommandPaletteFilter;
|
||||
use editor::{Bias, Cancel, Editor};
|
||||
use gpui::{impl_actions, MutableAppContext, Subscription, ViewContext, WeakViewHandle};
|
||||
use editor::{Bias, Cancel, Editor, EditorMode};
|
||||
use gpui::{
|
||||
impl_actions,
|
||||
keymap_matcher::{KeyPressed, Keystroke},
|
||||
MutableAppContext, Subscription, ViewContext, WeakViewHandle,
|
||||
};
|
||||
use language::CursorShape;
|
||||
use motion::Motion;
|
||||
use normal::normal_replace;
|
||||
use serde::Deserialize;
|
||||
use settings::Settings;
|
||||
use state::{Mode, Operator, VimState};
|
||||
use visual::visual_replace;
|
||||
use workspace::{self, Workspace};
|
||||
|
||||
#[derive(Clone, Deserialize, PartialEq)]
|
||||
|
@ -51,6 +58,11 @@ pub fn init(cx: &mut MutableAppContext) {
|
|||
cx.add_action(|_: &mut Workspace, n: &Number, cx: _| {
|
||||
Vim::update(cx, |vim, cx| vim.push_number(n, cx));
|
||||
});
|
||||
cx.add_action(
|
||||
|_: &mut Workspace, KeyPressed { keystroke }: &KeyPressed, cx| {
|
||||
Vim::key_pressed(keystroke, cx);
|
||||
},
|
||||
);
|
||||
|
||||
// Editor Actions
|
||||
cx.add_action(|_: &mut Editor, _: &Cancel, cx| {
|
||||
|
@ -208,6 +220,27 @@ impl Vim {
|
|||
self.state.operator_stack.last().copied()
|
||||
}
|
||||
|
||||
fn key_pressed(keystroke: &Keystroke, cx: &mut ViewContext<Workspace>) {
|
||||
match Vim::read(cx).active_operator() {
|
||||
Some(Operator::FindForward { before }) => {
|
||||
if let Some(character) = keystroke.key.chars().next() {
|
||||
motion::motion(Motion::FindForward { before, character }, cx)
|
||||
}
|
||||
}
|
||||
Some(Operator::FindBackward { after }) => {
|
||||
if let Some(character) = keystroke.key.chars().next() {
|
||||
motion::motion(Motion::FindBackward { after, character }, cx)
|
||||
}
|
||||
}
|
||||
Some(Operator::Replace) => match Vim::read(cx).state.mode {
|
||||
Mode::Normal => normal_replace(&keystroke.key, cx),
|
||||
Mode::Visual { line } => visual_replace(&keystroke.key, line, cx),
|
||||
_ => Vim::update(cx, |vim, cx| vim.clear_operator(cx)),
|
||||
},
|
||||
_ => cx.propagate_action(),
|
||||
}
|
||||
}
|
||||
|
||||
fn set_enabled(&mut self, enabled: bool, cx: &mut MutableAppContext) {
|
||||
if self.enabled != enabled {
|
||||
self.enabled = enabled;
|
||||
|
@ -234,7 +267,7 @@ impl Vim {
|
|||
for editor in self.editors.values() {
|
||||
if let Some(editor) = editor.upgrade(cx) {
|
||||
editor.update(cx, |editor, cx| {
|
||||
if self.enabled {
|
||||
if self.enabled && editor.mode() == EditorMode::Full {
|
||||
editor.set_cursor_shape(cursor_shape, cx);
|
||||
editor.set_clip_at_line_ends(state.clip_at_line_end(), cx);
|
||||
editor.set_input_enabled(!state.vim_controlled());
|
||||
|
|
|
@ -2,7 +2,7 @@ use std::borrow::Cow;
|
|||
|
||||
use collections::HashMap;
|
||||
use editor::{
|
||||
display_map::ToDisplayPoint, scroll::autoscroll::Autoscroll, Bias, ClipboardSelection,
|
||||
display_map::ToDisplayPoint, movement, scroll::autoscroll::Autoscroll, Bias, ClipboardSelection,
|
||||
};
|
||||
use gpui::{actions, MutableAppContext, ViewContext};
|
||||
use language::{AutoindentMode, SelectionGoal};
|
||||
|
@ -313,6 +313,55 @@ pub fn paste(_: &mut Workspace, _: &VisualPaste, cx: &mut ViewContext<Workspace>
|
|||
});
|
||||
}
|
||||
|
||||
pub(crate) fn visual_replace(text: &str, line: bool, cx: &mut MutableAppContext) {
|
||||
Vim::update(cx, |vim, cx| {
|
||||
vim.update_active_editor(cx, |editor, cx| {
|
||||
editor.transact(cx, |editor, cx| {
|
||||
let (display_map, selections) = editor.selections.all_adjusted_display(cx);
|
||||
|
||||
// Selections are biased right at the start. So we need to store
|
||||
// anchors that are biased left so that we can restore the selections
|
||||
// after the change
|
||||
let stable_anchors = editor
|
||||
.selections
|
||||
.disjoint_anchors()
|
||||
.into_iter()
|
||||
.map(|selection| {
|
||||
let start = selection.start.bias_left(&display_map.buffer_snapshot);
|
||||
start..start
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let mut edits = Vec::new();
|
||||
for selection in selections.iter() {
|
||||
let mut selection = selection.clone();
|
||||
if !line && !selection.reversed {
|
||||
// Head is at the end of the selection. Adjust the end position to
|
||||
// to include the character under the cursor.
|
||||
*selection.end.column_mut() = selection.end.column() + 1;
|
||||
selection.end = display_map.clip_point(selection.end, Bias::Right);
|
||||
}
|
||||
|
||||
for row_range in
|
||||
movement::split_display_range_by_lines(&display_map, selection.range())
|
||||
{
|
||||
let range = row_range.start.to_offset(&display_map, Bias::Right)
|
||||
..row_range.end.to_offset(&display_map, Bias::Right);
|
||||
let text = text.repeat(range.len());
|
||||
edits.push((range, text));
|
||||
}
|
||||
}
|
||||
|
||||
editor.buffer().update(cx, |buffer, cx| {
|
||||
buffer.edit(edits, None, cx);
|
||||
});
|
||||
editor.change_selections(None, cx, |s| s.select_ranges(stable_anchors));
|
||||
});
|
||||
});
|
||||
vim.switch_mode(Mode::Normal, false, cx);
|
||||
});
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use indoc::indoc;
|
||||
|
|
|
@ -2,8 +2,10 @@ use collections::HashMap;
|
|||
use gpui::{
|
||||
actions,
|
||||
elements::{ChildView, Container, Empty, MouseEventHandler, ParentElement, Side, Stack, Svg},
|
||||
geometry::vector::Vector2F,
|
||||
impl_internal_actions, Border, CursorStyle, Element, ElementBox, Entity, MouseButton,
|
||||
MutableAppContext, RenderContext, View, ViewContext, ViewHandle, WeakViewHandle,
|
||||
MutableAppContext, RenderContext, SizeConstraint, View, ViewContext, ViewHandle,
|
||||
WeakViewHandle,
|
||||
};
|
||||
use serde::Deserialize;
|
||||
use settings::{DockAnchor, Settings};
|
||||
|
@ -312,7 +314,27 @@ impl Dock {
|
|||
}
|
||||
});
|
||||
|
||||
resizable.flex(5., false).boxed()
|
||||
if anchor == DockAnchor::Right {
|
||||
resizable
|
||||
.constrained()
|
||||
.dynamically(|constraint, cx| {
|
||||
SizeConstraint::new(
|
||||
Vector2F::new(20., constraint.min.y()),
|
||||
Vector2F::new(cx.window_size.x() * 0.8, constraint.max.y()),
|
||||
)
|
||||
})
|
||||
.boxed()
|
||||
} else {
|
||||
resizable
|
||||
.constrained()
|
||||
.dynamically(|constraint, cx| {
|
||||
SizeConstraint::new(
|
||||
Vector2F::new(constraint.min.x(), 50.),
|
||||
Vector2F::new(constraint.max.x(), cx.window_size.y() * 0.8),
|
||||
)
|
||||
})
|
||||
.boxed()
|
||||
}
|
||||
}
|
||||
DockAnchor::Expanded => {
|
||||
enum ExpandedDockWash {}
|
||||
|
@ -470,7 +492,7 @@ mod tests {
|
|||
use super::*;
|
||||
use crate::{
|
||||
dock,
|
||||
item::test::TestItem,
|
||||
item::{self, test::TestItem},
|
||||
persistence::model::{
|
||||
SerializedItem, SerializedPane, SerializedPaneGroup, SerializedWorkspace,
|
||||
},
|
||||
|
@ -492,7 +514,7 @@ mod tests {
|
|||
Settings::test_async(cx);
|
||||
|
||||
cx.update(|cx| {
|
||||
register_deserializable_item::<TestItem>(cx);
|
||||
register_deserializable_item::<item::test::TestItem>(cx);
|
||||
});
|
||||
|
||||
let serialized_workspace = SerializedWorkspace {
|
||||
|
@ -508,7 +530,7 @@ mod tests {
|
|||
children: vec![SerializedItem {
|
||||
active: true,
|
||||
item_id: 0,
|
||||
kind: "test".into(),
|
||||
kind: "TestItem".into(),
|
||||
}],
|
||||
},
|
||||
left_sidebar_open: false,
|
||||
|
@ -623,6 +645,20 @@ mod tests {
|
|||
cx.assert_dock_pane_active();
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_activate_next_and_prev_pane(cx: &mut TestAppContext) {
|
||||
let mut cx = DockTestContext::new(cx).await;
|
||||
|
||||
cx.move_dock(DockAnchor::Right);
|
||||
cx.assert_dock_pane_active();
|
||||
|
||||
cx.update_workspace(|workspace, cx| workspace.activate_next_pane(cx));
|
||||
cx.assert_dock_pane_active();
|
||||
|
||||
cx.update_workspace(|workspace, cx| workspace.activate_previous_pane(cx));
|
||||
cx.assert_dock_pane_active();
|
||||
}
|
||||
|
||||
struct DockTestContext<'a> {
|
||||
pub cx: &'a mut TestAppContext,
|
||||
pub window_id: usize,
|
||||
|
|
|
@ -49,8 +49,7 @@ pub trait Item: View {
|
|||
}
|
||||
fn tab_content(&self, detail: Option<usize>, style: &theme::Tab, cx: &AppContext)
|
||||
-> ElementBox;
|
||||
fn project_path(&self, cx: &AppContext) -> Option<ProjectPath>;
|
||||
fn project_entry_ids(&self, cx: &AppContext) -> SmallVec<[ProjectEntryId; 3]>;
|
||||
fn for_each_project_item(&self, _: &AppContext, _: &mut dyn FnMut(usize, &dyn project::Item));
|
||||
fn is_singleton(&self, cx: &AppContext) -> bool;
|
||||
fn set_nav_history(&mut self, _: ItemNavHistory, _: &mut ViewContext<Self>);
|
||||
fn clone_on_split(&self, _workspace_id: WorkspaceId, _: &mut ViewContext<Self>) -> Option<Self>
|
||||
|
@ -147,6 +146,8 @@ pub trait ItemHandle: 'static + fmt::Debug {
|
|||
-> ElementBox;
|
||||
fn project_path(&self, cx: &AppContext) -> Option<ProjectPath>;
|
||||
fn project_entry_ids(&self, cx: &AppContext) -> SmallVec<[ProjectEntryId; 3]>;
|
||||
fn project_item_model_ids(&self, cx: &AppContext) -> SmallVec<[usize; 3]>;
|
||||
fn for_each_project_item(&self, _: &AppContext, _: &mut dyn FnMut(usize, &dyn project::Item));
|
||||
fn is_singleton(&self, cx: &AppContext) -> bool;
|
||||
fn boxed_clone(&self) -> Box<dyn ItemHandle>;
|
||||
fn clone_on_split(
|
||||
|
@ -240,11 +241,36 @@ impl<T: Item> ItemHandle for ViewHandle<T> {
|
|||
}
|
||||
|
||||
fn project_path(&self, cx: &AppContext) -> Option<ProjectPath> {
|
||||
self.read(cx).project_path(cx)
|
||||
let this = self.read(cx);
|
||||
let mut result = None;
|
||||
if this.is_singleton(cx) {
|
||||
this.for_each_project_item(cx, &mut |_, item| {
|
||||
result = item.project_path(cx);
|
||||
});
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
fn project_entry_ids(&self, cx: &AppContext) -> SmallVec<[ProjectEntryId; 3]> {
|
||||
self.read(cx).project_entry_ids(cx)
|
||||
let mut result = SmallVec::new();
|
||||
self.read(cx).for_each_project_item(cx, &mut |_, item| {
|
||||
if let Some(id) = item.entry_id(cx) {
|
||||
result.push(id);
|
||||
}
|
||||
});
|
||||
result
|
||||
}
|
||||
|
||||
fn project_item_model_ids(&self, cx: &AppContext) -> SmallVec<[usize; 3]> {
|
||||
let mut result = SmallVec::new();
|
||||
self.read(cx).for_each_project_item(cx, &mut |id, _| {
|
||||
result.push(id);
|
||||
});
|
||||
result
|
||||
}
|
||||
|
||||
fn for_each_project_item(&self, cx: &AppContext, f: &mut dyn FnMut(usize, &dyn project::Item)) {
|
||||
self.read(cx).for_each_project_item(cx, f)
|
||||
}
|
||||
|
||||
fn is_singleton(&self, cx: &AppContext) -> bool {
|
||||
|
@ -582,7 +608,7 @@ impl<T: Item> WeakItemHandle for WeakViewHandle<T> {
|
|||
}
|
||||
|
||||
pub trait ProjectItem: Item {
|
||||
type Item: project::Item;
|
||||
type Item: project::Item + gpui::Entity;
|
||||
|
||||
fn for_project_item(
|
||||
project: ModelHandle<Project>,
|
||||
|
@ -690,18 +716,19 @@ impl<T: FollowableItem> FollowableItemHandle for ViewHandle<T> {
|
|||
|
||||
#[cfg(test)]
|
||||
pub(crate) mod test {
|
||||
use std::{any::Any, borrow::Cow, cell::Cell};
|
||||
|
||||
use gpui::{
|
||||
elements::Empty, AppContext, Element, ElementBox, Entity, ModelHandle, RenderContext, Task,
|
||||
View, ViewContext, ViewHandle, WeakViewHandle,
|
||||
};
|
||||
use project::{Project, ProjectEntryId, ProjectPath};
|
||||
use smallvec::SmallVec;
|
||||
|
||||
use crate::{sidebar::SidebarItem, ItemId, ItemNavHistory, Pane, Workspace, WorkspaceId};
|
||||
|
||||
use super::{Item, ItemEvent};
|
||||
use crate::{sidebar::SidebarItem, ItemId, ItemNavHistory, Pane, Workspace, WorkspaceId};
|
||||
use gpui::{
|
||||
elements::Empty, AppContext, Element, ElementBox, Entity, ModelHandle, MutableAppContext,
|
||||
RenderContext, Task, View, ViewContext, ViewHandle, WeakViewHandle,
|
||||
};
|
||||
use project::{Project, ProjectEntryId, ProjectPath, WorktreeId};
|
||||
use std::{any::Any, borrow::Cow, cell::Cell, path::Path};
|
||||
|
||||
pub struct TestProjectItem {
|
||||
pub entry_id: Option<ProjectEntryId>,
|
||||
pub project_path: Option<ProjectPath>,
|
||||
}
|
||||
|
||||
pub struct TestItem {
|
||||
pub workspace_id: WorkspaceId,
|
||||
|
@ -713,13 +740,26 @@ pub(crate) mod test {
|
|||
pub is_dirty: bool,
|
||||
pub is_singleton: bool,
|
||||
pub has_conflict: bool,
|
||||
pub project_entry_ids: Vec<ProjectEntryId>,
|
||||
pub project_path: Option<ProjectPath>,
|
||||
pub project_items: Vec<ModelHandle<TestProjectItem>>,
|
||||
pub nav_history: Option<ItemNavHistory>,
|
||||
pub tab_descriptions: Option<Vec<&'static str>>,
|
||||
pub tab_detail: Cell<Option<usize>>,
|
||||
}
|
||||
|
||||
impl Entity for TestProjectItem {
|
||||
type Event = ();
|
||||
}
|
||||
|
||||
impl project::Item for TestProjectItem {
|
||||
fn entry_id(&self, _: &AppContext) -> Option<ProjectEntryId> {
|
||||
self.entry_id
|
||||
}
|
||||
|
||||
fn project_path(&self, _: &AppContext) -> Option<ProjectPath> {
|
||||
self.project_path.clone()
|
||||
}
|
||||
}
|
||||
|
||||
pub enum TestItemEvent {
|
||||
Edit,
|
||||
}
|
||||
|
@ -735,8 +775,7 @@ pub(crate) mod test {
|
|||
is_dirty: self.is_dirty,
|
||||
is_singleton: self.is_singleton,
|
||||
has_conflict: self.has_conflict,
|
||||
project_entry_ids: self.project_entry_ids.clone(),
|
||||
project_path: self.project_path.clone(),
|
||||
project_items: self.project_items.clone(),
|
||||
nav_history: None,
|
||||
tab_descriptions: None,
|
||||
tab_detail: Default::default(),
|
||||
|
@ -745,6 +784,27 @@ pub(crate) mod test {
|
|||
}
|
||||
}
|
||||
|
||||
impl TestProjectItem {
|
||||
pub fn new(id: u64, path: &str, cx: &mut MutableAppContext) -> ModelHandle<Self> {
|
||||
let entry_id = Some(ProjectEntryId::from_proto(id));
|
||||
let project_path = Some(ProjectPath {
|
||||
worktree_id: WorktreeId::from_usize(0),
|
||||
path: Path::new(path).into(),
|
||||
});
|
||||
cx.add_model(|_| Self {
|
||||
entry_id,
|
||||
project_path,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn new_untitled(cx: &mut MutableAppContext) -> ModelHandle<Self> {
|
||||
cx.add_model(|_| Self {
|
||||
project_path: None,
|
||||
entry_id: None,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl TestItem {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
|
@ -755,8 +815,7 @@ pub(crate) mod test {
|
|||
reload_count: 0,
|
||||
is_dirty: false,
|
||||
has_conflict: false,
|
||||
project_entry_ids: Vec::new(),
|
||||
project_path: None,
|
||||
project_items: Vec::new(),
|
||||
is_singleton: true,
|
||||
nav_history: None,
|
||||
tab_descriptions: None,
|
||||
|
@ -781,13 +840,19 @@ pub(crate) mod test {
|
|||
self
|
||||
}
|
||||
|
||||
pub fn with_project_entry_ids(mut self, project_entry_ids: &[u64]) -> Self {
|
||||
self.project_entry_ids.extend(
|
||||
project_entry_ids
|
||||
.iter()
|
||||
.copied()
|
||||
.map(ProjectEntryId::from_proto),
|
||||
);
|
||||
pub fn with_dirty(mut self, dirty: bool) -> Self {
|
||||
self.is_dirty = dirty;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_conflict(mut self, has_conflict: bool) -> Self {
|
||||
self.has_conflict = has_conflict;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_project_items(mut self, items: &[ModelHandle<TestProjectItem>]) -> Self {
|
||||
self.project_items.clear();
|
||||
self.project_items.extend(items.iter().cloned());
|
||||
self
|
||||
}
|
||||
|
||||
|
@ -830,12 +895,14 @@ pub(crate) mod test {
|
|||
Empty::new().boxed()
|
||||
}
|
||||
|
||||
fn project_path(&self, _: &AppContext) -> Option<ProjectPath> {
|
||||
self.project_path.clone()
|
||||
}
|
||||
|
||||
fn project_entry_ids(&self, _: &AppContext) -> SmallVec<[ProjectEntryId; 3]> {
|
||||
self.project_entry_ids.iter().copied().collect()
|
||||
fn for_each_project_item(
|
||||
&self,
|
||||
cx: &AppContext,
|
||||
f: &mut dyn FnMut(usize, &dyn project::Item),
|
||||
) {
|
||||
self.project_items
|
||||
.iter()
|
||||
.for_each(|item| f(item.id(), item.read(cx)))
|
||||
}
|
||||
|
||||
fn is_singleton(&self, _: &AppContext) -> bool {
|
||||
|
@ -879,8 +946,12 @@ pub(crate) mod test {
|
|||
self.has_conflict
|
||||
}
|
||||
|
||||
fn can_save(&self, _: &AppContext) -> bool {
|
||||
!self.project_entry_ids.is_empty()
|
||||
fn can_save(&self, cx: &AppContext) -> bool {
|
||||
!self.project_items.is_empty()
|
||||
&& self
|
||||
.project_items
|
||||
.iter()
|
||||
.all(|item| item.read(cx).entry_id.is_some())
|
||||
}
|
||||
|
||||
fn save(
|
||||
|
@ -919,7 +990,7 @@ pub(crate) mod test {
|
|||
}
|
||||
|
||||
fn serialized_item_kind() -> Option<&'static str> {
|
||||
None
|
||||
Some("TestItem")
|
||||
}
|
||||
|
||||
fn deserialize(
|
||||
|
|
|
@ -488,7 +488,7 @@ impl Pane {
|
|||
) -> Box<dyn ItemHandle> {
|
||||
let existing_item = pane.update(cx, |pane, cx| {
|
||||
for (index, item) in pane.items.iter().enumerate() {
|
||||
if item.project_path(cx).is_some()
|
||||
if item.is_singleton(cx)
|
||||
&& item.project_entry_ids(cx).as_slice() == [project_entry_id]
|
||||
{
|
||||
let item = item.boxed_clone();
|
||||
|
@ -810,13 +810,13 @@ impl Pane {
|
|||
items_to_close.sort_by_key(|item| !item.is_singleton(cx));
|
||||
|
||||
cx.spawn(|workspace, mut cx| async move {
|
||||
let mut saved_project_entry_ids = HashSet::default();
|
||||
let mut saved_project_items_ids = HashSet::default();
|
||||
for item in items_to_close.clone() {
|
||||
// Find the item's current index and its set of project entries. Avoid
|
||||
// Find the item's current index and its set of project item models. Avoid
|
||||
// storing these in advance, in case they have changed since this task
|
||||
// was started.
|
||||
let (item_ix, mut project_entry_ids) = pane.read_with(&cx, |pane, cx| {
|
||||
(pane.index_for_item(&*item), item.project_entry_ids(cx))
|
||||
let (item_ix, mut project_item_ids) = pane.read_with(&cx, |pane, cx| {
|
||||
(pane.index_for_item(&*item), item.project_item_model_ids(cx))
|
||||
});
|
||||
let item_ix = if let Some(ix) = item_ix {
|
||||
ix
|
||||
|
@ -824,30 +824,23 @@ impl Pane {
|
|||
continue;
|
||||
};
|
||||
|
||||
// If an item hasn't yet been associated with a project entry, then always
|
||||
// prompt to save it before closing it. Otherwise, check if the item has
|
||||
// any project entries that are not open anywhere else in the workspace,
|
||||
// AND that the user has not already been prompted to save. If there are
|
||||
// any such project entries, prompt the user to save this item.
|
||||
let should_save = if project_entry_ids.is_empty() {
|
||||
true
|
||||
} else {
|
||||
workspace.read_with(&cx, |workspace, cx| {
|
||||
for item in workspace.items(cx) {
|
||||
if !items_to_close
|
||||
.iter()
|
||||
.any(|item_to_close| item_to_close.id() == item.id())
|
||||
{
|
||||
let other_project_entry_ids = item.project_entry_ids(cx);
|
||||
project_entry_ids
|
||||
.retain(|id| !other_project_entry_ids.contains(id));
|
||||
}
|
||||
// Check if this view has any project items that are not open anywhere else
|
||||
// in the workspace, AND that the user has not already been prompted to save.
|
||||
// If there are any such project entries, prompt the user to save this item.
|
||||
workspace.read_with(&cx, |workspace, cx| {
|
||||
for item in workspace.items(cx) {
|
||||
if !items_to_close
|
||||
.iter()
|
||||
.any(|item_to_close| item_to_close.id() == item.id())
|
||||
{
|
||||
let other_project_item_ids = item.project_item_model_ids(cx);
|
||||
project_item_ids.retain(|id| !other_project_item_ids.contains(id));
|
||||
}
|
||||
});
|
||||
project_entry_ids
|
||||
.iter()
|
||||
.any(|id| saved_project_entry_ids.insert(*id))
|
||||
};
|
||||
}
|
||||
});
|
||||
let should_save = project_item_ids
|
||||
.iter()
|
||||
.any(|id| saved_project_items_ids.insert(*id));
|
||||
|
||||
if should_save
|
||||
&& !Self::save_item(project.clone(), &pane, item_ix, &*item, true, &mut cx)
|
||||
|
@ -1458,7 +1451,11 @@ impl View for Pane {
|
|||
0,
|
||||
self.active_item_index + 1,
|
||||
false,
|
||||
Some(100.),
|
||||
if self.docked.is_some() {
|
||||
None
|
||||
} else {
|
||||
Some(100.)
|
||||
},
|
||||
cx,
|
||||
{
|
||||
let toolbar = self.toolbar.clone();
|
||||
|
@ -1679,7 +1676,7 @@ mod tests {
|
|||
use std::sync::Arc;
|
||||
|
||||
use super::*;
|
||||
use crate::item::test::TestItem;
|
||||
use crate::item::test::{TestItem, TestProjectItem};
|
||||
use gpui::{executor::Deterministic, TestAppContext};
|
||||
use project::FakeFs;
|
||||
|
||||
|
@ -1868,7 +1865,7 @@ mod tests {
|
|||
let item = TestItem::new()
|
||||
.with_singleton(true)
|
||||
.with_label("buffer 1")
|
||||
.with_project_entry_ids(&[1]);
|
||||
.with_project_items(&[TestProjectItem::new(1, "one.txt", cx)]);
|
||||
|
||||
Pane::add_item(
|
||||
workspace,
|
||||
|
@ -1887,7 +1884,7 @@ mod tests {
|
|||
let item = TestItem::new()
|
||||
.with_singleton(true)
|
||||
.with_label("buffer 1")
|
||||
.with_project_entry_ids(&[1]);
|
||||
.with_project_items(&[TestProjectItem::new(1, "1.txt", cx)]);
|
||||
|
||||
Pane::add_item(
|
||||
workspace,
|
||||
|
@ -1906,7 +1903,7 @@ mod tests {
|
|||
let item = TestItem::new()
|
||||
.with_singleton(true)
|
||||
.with_label("buffer 2")
|
||||
.with_project_entry_ids(&[2]);
|
||||
.with_project_items(&[TestProjectItem::new(2, "2.txt", cx)]);
|
||||
|
||||
Pane::add_item(
|
||||
workspace,
|
||||
|
@ -1925,7 +1922,7 @@ mod tests {
|
|||
let item = TestItem::new()
|
||||
.with_singleton(false)
|
||||
.with_label("multibuffer 1")
|
||||
.with_project_entry_ids(&[1]);
|
||||
.with_project_items(&[TestProjectItem::new(1, "1.txt", cx)]);
|
||||
|
||||
Pane::add_item(
|
||||
workspace,
|
||||
|
@ -1944,7 +1941,7 @@ mod tests {
|
|||
let item = TestItem::new()
|
||||
.with_singleton(false)
|
||||
.with_label("multibuffer 1b")
|
||||
.with_project_entry_ids(&[1]);
|
||||
.with_project_items(&[TestProjectItem::new(1, "1.txt", cx)]);
|
||||
|
||||
Pane::add_item(
|
||||
workspace,
|
||||
|
|
|
@ -216,7 +216,9 @@ impl WorkspaceDb {
|
|||
let mut result = Vec::new();
|
||||
let mut delete_tasks = Vec::new();
|
||||
for (id, location) in self.recent_workspaces()? {
|
||||
if location.paths().iter().all(|path| path.exists()) {
|
||||
if location.paths().iter().all(|path| path.exists())
|
||||
&& location.paths().iter().any(|path| path.is_dir())
|
||||
{
|
||||
result.push((id, location));
|
||||
} else {
|
||||
delete_tasks.push(self.delete_stale_workspace(id));
|
||||
|
@ -227,14 +229,13 @@ impl WorkspaceDb {
|
|||
Ok(result)
|
||||
}
|
||||
|
||||
query! {
|
||||
pub fn last_workspace() -> Result<Option<WorkspaceLocation>> {
|
||||
SELECT workspace_location
|
||||
FROM workspaces
|
||||
WHERE workspace_location IS NOT NULL
|
||||
ORDER BY timestamp DESC
|
||||
LIMIT 1
|
||||
}
|
||||
pub async fn last_workspace(&self) -> Result<Option<WorkspaceLocation>> {
|
||||
Ok(self
|
||||
.recent_workspaces_on_disk()
|
||||
.await?
|
||||
.into_iter()
|
||||
.next()
|
||||
.map(|(_, location)| location))
|
||||
}
|
||||
|
||||
fn get_center_pane_group(&self, workspace_id: WorkspaceId) -> Result<SerializedPaneGroup> {
|
||||
|
|
|
@ -8,12 +8,11 @@ use futures::StreamExt;
|
|||
use gpui::{
|
||||
elements::*,
|
||||
geometry::{rect::RectF, vector::vec2f},
|
||||
Entity, ModelHandle, MouseButton, RenderContext, Task, View, ViewContext, ViewHandle,
|
||||
WeakViewHandle,
|
||||
AppContext, Entity, ModelHandle, MouseButton, RenderContext, Task, View, ViewContext,
|
||||
ViewHandle, WeakViewHandle,
|
||||
};
|
||||
use project::Project;
|
||||
use settings::Settings;
|
||||
use smallvec::SmallVec;
|
||||
use std::{
|
||||
path::PathBuf,
|
||||
sync::{Arc, Weak},
|
||||
|
@ -106,7 +105,7 @@ impl Item for SharedScreen {
|
|||
&self,
|
||||
_: Option<usize>,
|
||||
style: &theme::Tab,
|
||||
_: &gpui::AppContext,
|
||||
_: &AppContext,
|
||||
) -> gpui::ElementBox {
|
||||
Flex::row()
|
||||
.with_child(
|
||||
|
@ -130,15 +129,9 @@ impl Item for SharedScreen {
|
|||
.boxed()
|
||||
}
|
||||
|
||||
fn project_path(&self, _: &gpui::AppContext) -> Option<project::ProjectPath> {
|
||||
Default::default()
|
||||
}
|
||||
fn for_each_project_item(&self, _: &AppContext, _: &mut dyn FnMut(usize, &dyn project::Item)) {}
|
||||
|
||||
fn project_entry_ids(&self, _: &gpui::AppContext) -> SmallVec<[project::ProjectEntryId; 3]> {
|
||||
Default::default()
|
||||
}
|
||||
|
||||
fn is_singleton(&self, _: &gpui::AppContext) -> bool {
|
||||
fn is_singleton(&self, _: &AppContext) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
|
@ -155,7 +148,7 @@ impl Item for SharedScreen {
|
|||
Some(Self::new(&track, self.peer_id, self.user.clone(), cx))
|
||||
}
|
||||
|
||||
fn can_save(&self, _: &gpui::AppContext) -> bool {
|
||||
fn can_save(&self, _: &AppContext) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
|
|
|
@ -32,18 +32,20 @@ use futures::{
|
|||
use gpui::{
|
||||
actions,
|
||||
elements::*,
|
||||
geometry::vector::Vector2F,
|
||||
impl_actions, impl_internal_actions,
|
||||
keymap_matcher::KeymapContext,
|
||||
platform::{CursorStyle, WindowOptions},
|
||||
AnyModelHandle, AnyViewHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
|
||||
MouseButton, MutableAppContext, PathPromptOptions, PromptLevel, RenderContext, Task, View,
|
||||
ViewContext, ViewHandle, WeakViewHandle,
|
||||
MouseButton, MutableAppContext, PathPromptOptions, PromptLevel, RenderContext, SizeConstraint,
|
||||
Task, View, ViewContext, ViewHandle, WeakViewHandle,
|
||||
};
|
||||
use item::{FollowableItem, FollowableItemHandle, Item, ItemHandle, ProjectItem};
|
||||
use language::LanguageRegistry;
|
||||
use std::{
|
||||
any::TypeId,
|
||||
borrow::Cow,
|
||||
cmp,
|
||||
future::Future,
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
|
@ -98,6 +100,7 @@ actions!(
|
|||
NewTerminal,
|
||||
NewSearch,
|
||||
Feedback
|
||||
ShowNotif,
|
||||
]
|
||||
);
|
||||
|
||||
|
@ -231,54 +234,8 @@ pub fn init(app_state: Arc<AppState>, cx: &mut MutableAppContext) {
|
|||
workspace.toggle_sidebar(SidebarSide::Right, cx);
|
||||
});
|
||||
cx.add_action(Workspace::activate_pane_at_index);
|
||||
cx.add_action(
|
||||
|workspace: &mut Workspace,
|
||||
SplitWithItem {
|
||||
from,
|
||||
pane_to_split,
|
||||
item_id_to_move,
|
||||
split_direction,
|
||||
}: &_,
|
||||
cx| {
|
||||
workspace.split_pane_with_item(
|
||||
from.clone(),
|
||||
pane_to_split.clone(),
|
||||
*item_id_to_move,
|
||||
*split_direction,
|
||||
cx,
|
||||
)
|
||||
},
|
||||
);
|
||||
|
||||
cx.add_async_action(
|
||||
|workspace: &mut Workspace,
|
||||
SplitWithProjectEntry {
|
||||
pane_to_split,
|
||||
split_direction,
|
||||
project_entry,
|
||||
}: &_,
|
||||
cx| {
|
||||
pane_to_split.upgrade(cx).and_then(|pane_to_split| {
|
||||
let new_pane = workspace.add_pane(cx);
|
||||
workspace
|
||||
.center
|
||||
.split(&pane_to_split, &new_pane, *split_direction)
|
||||
.unwrap();
|
||||
|
||||
workspace
|
||||
.project
|
||||
.read(cx)
|
||||
.path_for_entry(*project_entry, cx)
|
||||
.map(|path| {
|
||||
let task = workspace.open_path(path, Some(new_pane.downgrade()), true, cx);
|
||||
cx.foreground().spawn(async move {
|
||||
task.await?;
|
||||
Ok(())
|
||||
})
|
||||
})
|
||||
})
|
||||
},
|
||||
);
|
||||
cx.add_action(Workspace::split_pane_with_item);
|
||||
cx.add_action(Workspace::split_pane_with_project_entry);
|
||||
|
||||
cx.add_async_action(
|
||||
|workspace: &mut Workspace,
|
||||
|
@ -1416,29 +1373,21 @@ impl Workspace {
|
|||
}
|
||||
|
||||
pub fn activate_next_pane(&mut self, cx: &mut ViewContext<Self>) {
|
||||
let next_pane = {
|
||||
let panes = self.center.panes();
|
||||
let ix = panes
|
||||
.iter()
|
||||
.position(|pane| **pane == self.active_pane)
|
||||
.unwrap();
|
||||
let panes = self.center.panes();
|
||||
if let Some(ix) = panes.iter().position(|pane| **pane == self.active_pane) {
|
||||
let next_ix = (ix + 1) % panes.len();
|
||||
panes[next_ix].clone()
|
||||
};
|
||||
cx.focus(next_pane);
|
||||
let next_pane = panes[next_ix].clone();
|
||||
cx.focus(next_pane);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn activate_previous_pane(&mut self, cx: &mut ViewContext<Self>) {
|
||||
let prev_pane = {
|
||||
let panes = self.center.panes();
|
||||
let ix = panes
|
||||
.iter()
|
||||
.position(|pane| **pane == self.active_pane)
|
||||
.unwrap();
|
||||
let prev_ix = if ix == 0 { panes.len() - 1 } else { ix - 1 };
|
||||
panes[prev_ix].clone()
|
||||
};
|
||||
cx.focus(prev_pane);
|
||||
let panes = self.center.panes();
|
||||
if let Some(ix) = panes.iter().position(|pane| **pane == self.active_pane) {
|
||||
let prev_ix = cmp::min(ix.wrapping_sub(1), panes.len() - 1);
|
||||
let prev_pane = panes[prev_ix].clone();
|
||||
cx.focus(prev_pane);
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_pane_focused(&mut self, pane: ViewHandle<Pane>, cx: &mut ViewContext<Self>) {
|
||||
|
@ -1533,38 +1482,64 @@ impl Workspace {
|
|||
return None;
|
||||
}
|
||||
|
||||
pane.read(cx).active_item().map(|item| {
|
||||
let new_pane = self.add_pane(cx);
|
||||
if let Some(clone) = item.clone_on_split(self.database_id(), cx.as_mut()) {
|
||||
Pane::add_item(self, &new_pane, clone, true, true, None, cx);
|
||||
}
|
||||
self.center.split(&pane, &new_pane, direction).unwrap();
|
||||
cx.notify();
|
||||
new_pane
|
||||
})
|
||||
let item = pane.read(cx).active_item()?;
|
||||
let new_pane = self.add_pane(cx);
|
||||
if let Some(clone) = item.clone_on_split(self.database_id(), cx.as_mut()) {
|
||||
Pane::add_item(self, &new_pane, clone, true, true, None, cx);
|
||||
}
|
||||
self.center.split(&pane, &new_pane, direction).unwrap();
|
||||
cx.notify();
|
||||
Some(new_pane)
|
||||
}
|
||||
|
||||
pub fn split_pane_with_item(
|
||||
&mut self,
|
||||
from: WeakViewHandle<Pane>,
|
||||
pane_to_split: WeakViewHandle<Pane>,
|
||||
item_id_to_move: usize,
|
||||
split_direction: SplitDirection,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
if let Some((pane_to_split, from)) = pane_to_split.upgrade(cx).zip(from.upgrade(cx)) {
|
||||
if &pane_to_split == self.dock_pane() {
|
||||
warn!("Can't split dock pane.");
|
||||
return;
|
||||
}
|
||||
|
||||
let new_pane = self.add_pane(cx);
|
||||
Pane::move_item(self, from.clone(), new_pane.clone(), item_id_to_move, 0, cx);
|
||||
self.center
|
||||
.split(&pane_to_split, &new_pane, split_direction)
|
||||
.unwrap();
|
||||
cx.notify();
|
||||
pub fn split_pane_with_item(&mut self, action: &SplitWithItem, cx: &mut ViewContext<Self>) {
|
||||
let Some(pane_to_split) = action.pane_to_split.upgrade(cx) else { return; };
|
||||
let Some(from) = action.from.upgrade(cx) else { return; };
|
||||
if &pane_to_split == self.dock_pane() {
|
||||
warn!("Can't split dock pane.");
|
||||
return;
|
||||
}
|
||||
|
||||
let new_pane = self.add_pane(cx);
|
||||
Pane::move_item(
|
||||
self,
|
||||
from.clone(),
|
||||
new_pane.clone(),
|
||||
action.item_id_to_move,
|
||||
0,
|
||||
cx,
|
||||
);
|
||||
self.center
|
||||
.split(&pane_to_split, &new_pane, action.split_direction)
|
||||
.unwrap();
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
pub fn split_pane_with_project_entry(
|
||||
&mut self,
|
||||
action: &SplitWithProjectEntry,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Option<Task<Result<()>>> {
|
||||
let pane_to_split = action.pane_to_split.upgrade(cx)?;
|
||||
if &pane_to_split == self.dock_pane() {
|
||||
warn!("Can't split dock pane.");
|
||||
return None;
|
||||
}
|
||||
|
||||
let new_pane = self.add_pane(cx);
|
||||
self.center
|
||||
.split(&pane_to_split, &new_pane, action.split_direction)
|
||||
.unwrap();
|
||||
|
||||
let path = self
|
||||
.project
|
||||
.read(cx)
|
||||
.path_for_entry(action.project_entry, cx)?;
|
||||
let task = self.open_path(path, Some(new_pane.downgrade()), true, cx);
|
||||
Some(cx.foreground().spawn(async move {
|
||||
task.await?;
|
||||
Ok(())
|
||||
}))
|
||||
}
|
||||
|
||||
fn remove_pane(&mut self, pane: ViewHandle<Pane>, cx: &mut ViewContext<Self>) {
|
||||
|
@ -1650,6 +1625,7 @@ impl Workspace {
|
|||
project_id,
|
||||
leader_id: Some(leader_id),
|
||||
});
|
||||
|
||||
Some(cx.spawn_weak(|this, mut cx| async move {
|
||||
let response = request.await?;
|
||||
if let Some(this) = this.upgrade(&cx) {
|
||||
|
@ -1746,6 +1722,10 @@ impl Workspace {
|
|||
self.follower_states_by_leader.contains_key(&peer_id)
|
||||
}
|
||||
|
||||
pub fn is_followed(&self, peer_id: PeerId) -> bool {
|
||||
self.leader_state.followers.contains(&peer_id)
|
||||
}
|
||||
|
||||
fn render_titlebar(&self, theme: &Theme, cx: &mut RenderContext<Self>) -> ElementBox {
|
||||
let project = &self.project.read(cx);
|
||||
let mut worktree_root_names = String::new();
|
||||
|
@ -1923,6 +1903,9 @@ impl Workspace {
|
|||
.to_proto(),
|
||||
)
|
||||
});
|
||||
|
||||
cx.notify();
|
||||
|
||||
Ok(proto::FollowResponse {
|
||||
active_view_id,
|
||||
views: this
|
||||
|
@ -1955,10 +1938,11 @@ impl Workspace {
|
|||
_: Arc<Client>,
|
||||
mut cx: AsyncAppContext,
|
||||
) -> Result<()> {
|
||||
this.update(&mut cx, |this, _| {
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.leader_state
|
||||
.followers
|
||||
.remove(&envelope.original_sender_id()?);
|
||||
cx.notify();
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
@ -2499,7 +2483,16 @@ impl View for Workspace {
|
|||
if self.left_sidebar.read(cx).active_item().is_some() {
|
||||
Some(
|
||||
ChildView::new(&self.left_sidebar, cx)
|
||||
.flex(0.8, false)
|
||||
.constrained()
|
||||
.dynamically(|constraint, cx| {
|
||||
SizeConstraint::new(
|
||||
Vector2F::new(20., constraint.min.y()),
|
||||
Vector2F::new(
|
||||
cx.window_size.x() * 0.8,
|
||||
constraint.max.y(),
|
||||
),
|
||||
)
|
||||
})
|
||||
.boxed(),
|
||||
)
|
||||
} else {
|
||||
|
@ -2536,7 +2529,16 @@ impl View for Workspace {
|
|||
if self.right_sidebar.read(cx).active_item().is_some() {
|
||||
Some(
|
||||
ChildView::new(&self.right_sidebar, cx)
|
||||
.flex(0.8, false)
|
||||
.constrained()
|
||||
.dynamically(|constraint, cx| {
|
||||
SizeConstraint::new(
|
||||
Vector2F::new(20., constraint.min.y()),
|
||||
Vector2F::new(
|
||||
cx.window_size.x() * 0.8,
|
||||
constraint.max.y(),
|
||||
),
|
||||
)
|
||||
})
|
||||
.boxed(),
|
||||
)
|
||||
} else {
|
||||
|
@ -2681,8 +2683,8 @@ pub fn activate_workspace_for_project(
|
|||
None
|
||||
}
|
||||
|
||||
pub fn last_opened_workspace_paths() -> Option<WorkspaceLocation> {
|
||||
DB.last_workspace().log_err().flatten()
|
||||
pub async fn last_opened_workspace_paths() -> Option<WorkspaceLocation> {
|
||||
DB.last_workspace().await.log_err().flatten()
|
||||
}
|
||||
|
||||
#[allow(clippy::type_complexity)]
|
||||
|
@ -2755,7 +2757,7 @@ pub fn open_new(app_state: &Arc<AppState>, cx: &mut MutableAppContext) -> Task<(
|
|||
mod tests {
|
||||
use std::{cell::RefCell, rc::Rc};
|
||||
|
||||
use crate::item::test::{TestItem, TestItemEvent};
|
||||
use crate::item::test::{TestItem, TestItemEvent, TestProjectItem};
|
||||
|
||||
use super::*;
|
||||
use fs::FakeFs;
|
||||
|
@ -2862,15 +2864,11 @@ mod tests {
|
|||
project.worktrees(cx).next().unwrap().read(cx).id()
|
||||
});
|
||||
|
||||
let item1 = cx.add_view(&workspace, |_| {
|
||||
let mut item = TestItem::new();
|
||||
item.project_path = Some((worktree_id, "one.txt").into());
|
||||
item
|
||||
let item1 = cx.add_view(&workspace, |cx| {
|
||||
TestItem::new().with_project_items(&[TestProjectItem::new(1, "one.txt", cx)])
|
||||
});
|
||||
let item2 = cx.add_view(&workspace, |_| {
|
||||
let mut item = TestItem::new();
|
||||
item.project_path = Some((worktree_id, "two.txt").into());
|
||||
item
|
||||
let item2 = cx.add_view(&workspace, |cx| {
|
||||
TestItem::new().with_project_items(&[TestProjectItem::new(2, "two.txt", cx)])
|
||||
});
|
||||
|
||||
// Add an item to an empty pane
|
||||
|
@ -2971,16 +2969,11 @@ mod tests {
|
|||
|
||||
// When there are dirty untitled items, prompt to save each one. If the user
|
||||
// cancels any prompt, then abort.
|
||||
let item2 = cx.add_view(&workspace, |_| {
|
||||
let mut item = TestItem::new();
|
||||
item.is_dirty = true;
|
||||
item
|
||||
});
|
||||
let item3 = cx.add_view(&workspace, |_| {
|
||||
let mut item = TestItem::new();
|
||||
item.is_dirty = true;
|
||||
item.project_entry_ids = vec![ProjectEntryId::from_proto(1)];
|
||||
item
|
||||
let item2 = cx.add_view(&workspace, |_| TestItem::new().with_dirty(true));
|
||||
let item3 = cx.add_view(&workspace, |cx| {
|
||||
TestItem::new()
|
||||
.with_dirty(true)
|
||||
.with_project_items(&[TestProjectItem::new(1, "1.txt", cx)])
|
||||
});
|
||||
workspace.update(cx, |w, cx| {
|
||||
w.add_item(Box::new(item2.clone()), cx);
|
||||
|
@ -3005,30 +2998,27 @@ mod tests {
|
|||
Workspace::new(Default::default(), 0, project, default_item_factory, cx)
|
||||
});
|
||||
|
||||
let item1 = cx.add_view(&workspace, |_| {
|
||||
let mut item = TestItem::new();
|
||||
item.is_dirty = true;
|
||||
item.project_entry_ids = vec![ProjectEntryId::from_proto(1)];
|
||||
item
|
||||
let item1 = cx.add_view(&workspace, |cx| {
|
||||
TestItem::new()
|
||||
.with_dirty(true)
|
||||
.with_project_items(&[TestProjectItem::new(1, "1.txt", cx)])
|
||||
});
|
||||
let item2 = cx.add_view(&workspace, |_| {
|
||||
let mut item = TestItem::new();
|
||||
item.is_dirty = true;
|
||||
item.has_conflict = true;
|
||||
item.project_entry_ids = vec![ProjectEntryId::from_proto(2)];
|
||||
item
|
||||
let item2 = cx.add_view(&workspace, |cx| {
|
||||
TestItem::new()
|
||||
.with_dirty(true)
|
||||
.with_conflict(true)
|
||||
.with_project_items(&[TestProjectItem::new(2, "2.txt", cx)])
|
||||
});
|
||||
let item3 = cx.add_view(&workspace, |_| {
|
||||
let mut item = TestItem::new();
|
||||
item.is_dirty = true;
|
||||
item.has_conflict = true;
|
||||
item.project_entry_ids = vec![ProjectEntryId::from_proto(3)];
|
||||
item
|
||||
let item3 = cx.add_view(&workspace, |cx| {
|
||||
TestItem::new()
|
||||
.with_dirty(true)
|
||||
.with_conflict(true)
|
||||
.with_project_items(&[TestProjectItem::new(3, "3.txt", cx)])
|
||||
});
|
||||
let item4 = cx.add_view(&workspace, |_| {
|
||||
let mut item = TestItem::new();
|
||||
item.is_dirty = true;
|
||||
item
|
||||
let item4 = cx.add_view(&workspace, |cx| {
|
||||
TestItem::new()
|
||||
.with_dirty(true)
|
||||
.with_project_items(&[TestProjectItem::new_untitled(cx)])
|
||||
});
|
||||
let pane = workspace.update(cx, |workspace, cx| {
|
||||
workspace.add_item(Box::new(item1.clone()), cx);
|
||||
|
@ -3051,15 +3041,20 @@ mod tests {
|
|||
[item1_id, item3_id, item4_id].contains(&id)
|
||||
})
|
||||
});
|
||||
|
||||
cx.foreground().run_until_parked();
|
||||
|
||||
// There's a prompt to save item 1.
|
||||
pane.read_with(cx, |pane, _| {
|
||||
assert_eq!(pane.items_len(), 4);
|
||||
assert_eq!(pane.active_item().unwrap().id(), item1.id());
|
||||
});
|
||||
assert!(cx.has_pending_prompt(window_id));
|
||||
|
||||
// Confirm saving item 1.
|
||||
cx.simulate_prompt_answer(window_id, 0);
|
||||
cx.foreground().run_until_parked();
|
||||
|
||||
// Item 1 is saved. There's a prompt to save item 3.
|
||||
pane.read_with(cx, |pane, cx| {
|
||||
assert_eq!(item1.read(cx).save_count, 1);
|
||||
assert_eq!(item1.read(cx).save_as_count, 0);
|
||||
|
@ -3067,9 +3062,13 @@ mod tests {
|
|||
assert_eq!(pane.items_len(), 3);
|
||||
assert_eq!(pane.active_item().unwrap().id(), item3.id());
|
||||
});
|
||||
assert!(cx.has_pending_prompt(window_id));
|
||||
|
||||
// Cancel saving item 3.
|
||||
cx.simulate_prompt_answer(window_id, 1);
|
||||
cx.foreground().run_until_parked();
|
||||
|
||||
// Item 3 is reloaded. There's a prompt to save item 4.
|
||||
pane.read_with(cx, |pane, cx| {
|
||||
assert_eq!(item3.read(cx).save_count, 0);
|
||||
assert_eq!(item3.read(cx).save_as_count, 0);
|
||||
|
@ -3077,11 +3076,17 @@ mod tests {
|
|||
assert_eq!(pane.items_len(), 2);
|
||||
assert_eq!(pane.active_item().unwrap().id(), item4.id());
|
||||
});
|
||||
assert!(cx.has_pending_prompt(window_id));
|
||||
|
||||
// Confirm saving item 4.
|
||||
cx.simulate_prompt_answer(window_id, 0);
|
||||
cx.foreground().run_until_parked();
|
||||
|
||||
// There's a prompt for a path for item 4.
|
||||
cx.simulate_new_path_selection(|_| Some(Default::default()));
|
||||
close_items.await.unwrap();
|
||||
|
||||
// The requested items are closed.
|
||||
pane.read_with(cx, |pane, cx| {
|
||||
assert_eq!(item4.read(cx).save_count, 0);
|
||||
assert_eq!(item4.read(cx).save_as_count, 1);
|
||||
|
@ -3106,29 +3111,35 @@ mod tests {
|
|||
// workspace items with multiple project entries.
|
||||
let single_entry_items = (0..=4)
|
||||
.map(|project_entry_id| {
|
||||
let mut item = TestItem::new();
|
||||
item.is_dirty = true;
|
||||
item.project_entry_ids = vec![ProjectEntryId::from_proto(project_entry_id)];
|
||||
item.is_singleton = true;
|
||||
item
|
||||
cx.add_view(&workspace, |cx| {
|
||||
TestItem::new()
|
||||
.with_dirty(true)
|
||||
.with_project_items(&[TestProjectItem::new(
|
||||
project_entry_id,
|
||||
&format!("{project_entry_id}.txt"),
|
||||
cx,
|
||||
)])
|
||||
})
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
let item_2_3 = {
|
||||
let mut item = TestItem::new();
|
||||
item.is_dirty = true;
|
||||
item.is_singleton = false;
|
||||
item.project_entry_ids =
|
||||
vec![ProjectEntryId::from_proto(2), ProjectEntryId::from_proto(3)];
|
||||
item
|
||||
};
|
||||
let item_3_4 = {
|
||||
let mut item = TestItem::new();
|
||||
item.is_dirty = true;
|
||||
item.is_singleton = false;
|
||||
item.project_entry_ids =
|
||||
vec![ProjectEntryId::from_proto(3), ProjectEntryId::from_proto(4)];
|
||||
item
|
||||
};
|
||||
let item_2_3 = cx.add_view(&workspace, |cx| {
|
||||
TestItem::new()
|
||||
.with_dirty(true)
|
||||
.with_singleton(false)
|
||||
.with_project_items(&[
|
||||
single_entry_items[2].read(cx).project_items[0].clone(),
|
||||
single_entry_items[3].read(cx).project_items[0].clone(),
|
||||
])
|
||||
});
|
||||
let item_3_4 = cx.add_view(&workspace, |cx| {
|
||||
TestItem::new()
|
||||
.with_dirty(true)
|
||||
.with_singleton(false)
|
||||
.with_project_items(&[
|
||||
single_entry_items[3].read(cx).project_items[0].clone(),
|
||||
single_entry_items[4].read(cx).project_items[0].clone(),
|
||||
])
|
||||
});
|
||||
|
||||
// Create two panes that contain the following project entries:
|
||||
// left pane:
|
||||
|
@ -3139,9 +3150,9 @@ mod tests {
|
|||
// multi-entry items: (3, 4)
|
||||
let left_pane = workspace.update(cx, |workspace, cx| {
|
||||
let left_pane = workspace.active_pane().clone();
|
||||
workspace.add_item(Box::new(cx.add_view(|_| item_2_3.clone())), cx);
|
||||
for item in &single_entry_items {
|
||||
workspace.add_item(Box::new(cx.add_view(|_| item.clone())), cx);
|
||||
workspace.add_item(Box::new(item_2_3.clone()), cx);
|
||||
for item in single_entry_items {
|
||||
workspace.add_item(Box::new(item), cx);
|
||||
}
|
||||
left_pane.update(cx, |pane, cx| {
|
||||
pane.activate_item(2, true, true, cx);
|
||||
|
@ -3156,7 +3167,7 @@ mod tests {
|
|||
|
||||
//Need to cause an effect flush in order to respect new focus
|
||||
workspace.update(cx, |workspace, cx| {
|
||||
workspace.add_item(Box::new(cx.add_view(|_| item_3_4.clone())), cx);
|
||||
workspace.add_item(Box::new(item_3_4.clone()), cx);
|
||||
cx.focus(left_pane.clone());
|
||||
});
|
||||
|
||||
|
@ -3205,10 +3216,8 @@ mod tests {
|
|||
Workspace::new(Default::default(), 0, project, default_item_factory, cx)
|
||||
});
|
||||
|
||||
let item = cx.add_view(&workspace, |_| {
|
||||
let mut item = TestItem::new();
|
||||
item.project_entry_ids = vec![ProjectEntryId::from_proto(1)];
|
||||
item
|
||||
let item = cx.add_view(&workspace, |cx| {
|
||||
TestItem::new().with_project_items(&[TestProjectItem::new(1, "1.txt", cx)])
|
||||
});
|
||||
let item_id = item.id();
|
||||
workspace.update(cx, |workspace, cx| {
|
||||
|
@ -3293,7 +3302,9 @@ mod tests {
|
|||
workspace.add_item(Box::new(item.clone()), cx);
|
||||
});
|
||||
item.update(cx, |item, cx| {
|
||||
item.project_entry_ids = Default::default();
|
||||
item.project_items[0].update(cx, |item, _| {
|
||||
item.entry_id = None;
|
||||
});
|
||||
item.is_dirty = true;
|
||||
cx.blur();
|
||||
});
|
||||
|
@ -3324,10 +3335,8 @@ mod tests {
|
|||
Workspace::new(Default::default(), 0, project, default_item_factory, cx)
|
||||
});
|
||||
|
||||
let item = cx.add_view(&workspace, |_| {
|
||||
let mut item = TestItem::new();
|
||||
item.project_entry_ids = vec![ProjectEntryId::from_proto(1)];
|
||||
item
|
||||
let item = cx.add_view(&workspace, |cx| {
|
||||
TestItem::new().with_project_items(&[TestProjectItem::new(1, "1.txt", cx)])
|
||||
});
|
||||
let pane = workspace.read_with(cx, |workspace, _| workspace.active_pane().clone());
|
||||
let toolbar = pane.read_with(cx, |pane, _| pane.toolbar().clone());
|
||||
|
|
|
@ -3,7 +3,7 @@ authors = ["Nathan Sobo <nathansobo@gmail.com>"]
|
|||
description = "The fast, collaborative code editor."
|
||||
edition = "2021"
|
||||
name = "zed"
|
||||
version = "0.69.0"
|
||||
version = "0.71.0"
|
||||
|
||||
[lib]
|
||||
name = "zed"
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
use anyhow::Context;
|
||||
use gpui::executor::Background;
|
||||
pub use language::*;
|
||||
use lazy_static::lazy_static;
|
||||
|
@ -145,7 +146,9 @@ pub(crate) fn language(
|
|||
.unwrap()
|
||||
.data,
|
||||
)
|
||||
.with_context(|| format!("failed to load config.toml for language {name:?}"))
|
||||
.unwrap();
|
||||
|
||||
let mut language = Language::new(config, Some(grammar));
|
||||
|
||||
if let Some(query) = load_query(name, "/highlights") {
|
||||
|
@ -173,6 +176,11 @@ pub(crate) fn language(
|
|||
.with_injection_query(query.as_ref())
|
||||
.expect("failed to load injection query");
|
||||
}
|
||||
if let Some(query) = load_query(name, "/overrides") {
|
||||
language = language
|
||||
.with_override_query(query.as_ref())
|
||||
.expect("failed to load override query");
|
||||
}
|
||||
if let Some(lsp_adapter) = lsp_adapter {
|
||||
language = language.with_lsp_adapter(lsp_adapter)
|
||||
}
|
||||
|
|
|
@ -7,5 +7,20 @@ brackets = [
|
|||
{ start = "[", end = "]", close = true, newline = true },
|
||||
{ start = "(", end = ")", close = true, newline = true },
|
||||
{ start = "\"", end = "\"", close = true, newline = false },
|
||||
{ start = "'", end = "'", close = true, newline = false },
|
||||
{ start = "/*", end = " */", close = true, newline = false },
|
||||
]
|
||||
|
||||
[overrides.comment]
|
||||
brackets = [
|
||||
{ start = "{", end = "}", close = true, newline = true },
|
||||
{ start = "[", end = "]", close = true, newline = true },
|
||||
{ start = "(", end = ")", close = true, newline = true },
|
||||
]
|
||||
|
||||
[overrides.string]
|
||||
brackets = [
|
||||
{ start = "{", end = "}", close = true, newline = true },
|
||||
{ start = "[", end = "]", close = true, newline = true },
|
||||
{ start = "(", end = ")", close = true, newline = true },
|
||||
]
|
||||
|
|
2
crates/zed/src/languages/c/overrides.scm
Normal file
2
crates/zed/src/languages/c/overrides.scm
Normal file
|
@ -0,0 +1,2 @@
|
|||
(comment) @comment
|
||||
(string_literal) @string
|
|
@ -7,5 +7,20 @@ brackets = [
|
|||
{ start = "[", end = "]", close = true, newline = true },
|
||||
{ start = "(", end = ")", close = true, newline = true },
|
||||
{ start = "\"", end = "\"", close = true, newline = false },
|
||||
{ start = "'", end = "'", close = true, newline = false },
|
||||
{ start = "/*", end = " */", close = true, newline = false },
|
||||
]
|
||||
|
||||
[overrides.comment]
|
||||
brackets = [
|
||||
{ start = "{", end = "}", close = true, newline = true },
|
||||
{ start = "[", end = "]", close = true, newline = true },
|
||||
{ start = "(", end = ")", close = true, newline = true },
|
||||
]
|
||||
|
||||
[overrides.string]
|
||||
brackets = [
|
||||
{ start = "{", end = "}", close = true, newline = true },
|
||||
{ start = "[", end = "]", close = true, newline = true },
|
||||
{ start = "(", end = ")", close = true, newline = true },
|
||||
]
|
||||
|
|
2
crates/zed/src/languages/cpp/overrides.scm
Normal file
2
crates/zed/src/languages/cpp/overrides.scm
Normal file
|
@ -0,0 +1,2 @@
|
|||
(comment) @comment
|
||||
(string_literal) @string
|
|
@ -5,5 +5,20 @@ brackets = [
|
|||
{ start = "{", end = "}", close = true, newline = true },
|
||||
{ start = "[", end = "]", close = true, newline = true },
|
||||
{ start = "(", end = ")", close = true, newline = true },
|
||||
{ start = "\"", end = "\"", close = true, newline = false }
|
||||
{ start = "\"", end = "\"", close = true, newline = false },
|
||||
{ start = "'", end = "'", close = true, newline = false },
|
||||
]
|
||||
|
||||
[overrides.comment]
|
||||
brackets = [
|
||||
{ start = "{", end = "}", close = true, newline = true },
|
||||
{ start = "[", end = "]", close = true, newline = true },
|
||||
{ start = "(", end = ")", close = true, newline = true },
|
||||
]
|
||||
|
||||
[overrides.string]
|
||||
brackets = [
|
||||
{ start = "{", end = "}", close = true, newline = true },
|
||||
{ start = "[", end = "]", close = true, newline = true },
|
||||
{ start = "(", end = ")", close = true, newline = true },
|
||||
]
|
||||
|
|
2
crates/zed/src/languages/css/overrides.scm
Normal file
2
crates/zed/src/languages/css/overrides.scm
Normal file
|
@ -0,0 +1,2 @@
|
|||
(comment) @comment
|
||||
(string_value) @string
|
|
@ -6,5 +6,20 @@ brackets = [
|
|||
{ start = "{", end = "}", close = true, newline = true },
|
||||
{ start = "[", end = "]", close = true, newline = true },
|
||||
{ start = "(", end = ")", close = true, newline = true },
|
||||
{ start = "\"", end = "\"", close = true, newline = false }
|
||||
{ start = "\"", end = "\"", close = true, newline = false },
|
||||
{ start = "'", end = "'", close = true, newline = false },
|
||||
]
|
||||
|
||||
[overrides.comment]
|
||||
brackets = [
|
||||
{ start = "{", end = "}", close = true, newline = true },
|
||||
{ start = "[", end = "]", close = true, newline = true },
|
||||
{ start = "(", end = ")", close = true, newline = true },
|
||||
]
|
||||
|
||||
[overrides.string]
|
||||
brackets = [
|
||||
{ start = "{", end = "}", close = true, newline = true },
|
||||
{ start = "[", end = "]", close = true, newline = true },
|
||||
{ start = "(", end = ")", close = true, newline = true },
|
||||
]
|
||||
|
|
2
crates/zed/src/languages/elixir/overrides.scm
Normal file
2
crates/zed/src/languages/elixir/overrides.scm
Normal file
|
@ -0,0 +1,2 @@
|
|||
(comment) @comment
|
||||
[(string) (charlist)] @string
|
|
@ -4,5 +4,4 @@ autoclose_before = ">})"
|
|||
brackets = [
|
||||
{ start = "<", end = ">", close = true, newline = true },
|
||||
]
|
||||
|
||||
block_comment = ["<%#", "%>"]
|
||||
block_comment = ["<%#", "%>"]
|
||||
|
|
|
@ -7,5 +7,20 @@ brackets = [
|
|||
{ start = "[", end = "]", close = true, newline = true },
|
||||
{ start = "(", end = ")", close = true, newline = true },
|
||||
{ start = "\"", end = "\"", close = true, newline = false },
|
||||
{ start = "'", end = "'", close = true, newline = false },
|
||||
{ start = "/*", end = " */", close = true, newline = false },
|
||||
]
|
||||
|
||||
[overrides.comment]
|
||||
brackets = [
|
||||
{ start = "{", end = "}", close = true, newline = true },
|
||||
{ start = "[", end = "]", close = true, newline = true },
|
||||
{ start = "(", end = ")", close = true, newline = true },
|
||||
]
|
||||
|
||||
[overrides.string]
|
||||
brackets = [
|
||||
{ start = "{", end = "}", close = true, newline = true },
|
||||
{ start = "[", end = "]", close = true, newline = true },
|
||||
{ start = "(", end = ")", close = true, newline = true },
|
||||
]
|
||||
|
|
|
@ -40,5 +40,4 @@
|
|||
")" @context)) @item
|
||||
|
||||
(field_declaration
|
||||
name: (_) @name
|
||||
type: (_) @context) @item
|
||||
name: (_) @name) @item
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue