Merge branch 'main' into randomized-tests-operation-script
In randomized integration test, incorporate random updates of existing files into the test's new structure.
This commit is contained in:
commit
9c25d37dfc
203 changed files with 4139 additions and 2989 deletions
10
.github/workflows/ci.yml
vendored
10
.github/workflows/ci.yml
vendored
|
@ -41,16 +41,19 @@ jobs:
|
|||
with:
|
||||
clean: false
|
||||
submodules: 'recursive'
|
||||
|
||||
|
||||
- name: Run tests
|
||||
run: cargo test --workspace --no-fail-fast
|
||||
|
||||
|
||||
- name: Build collab
|
||||
run: cargo build -p collab
|
||||
|
||||
- name: Build other binaries
|
||||
run: cargo build --workspace --bins --all-features
|
||||
|
||||
- name: Generate license file
|
||||
run: script/generate-licenses
|
||||
|
||||
bundle:
|
||||
name: Bundle app
|
||||
runs-on:
|
||||
|
@ -109,6 +112,9 @@ jobs:
|
|||
exit 1
|
||||
fi
|
||||
|
||||
- name: Generate license file
|
||||
run: script/generate-licenses
|
||||
|
||||
- name: Create app bundle
|
||||
run: script/bundle
|
||||
|
||||
|
|
9
.github/workflows/release_actions.yml
vendored
9
.github/workflows/release_actions.yml
vendored
|
@ -21,6 +21,15 @@ jobs:
|
|||
|
||||
${{ github.event.release.body }}
|
||||
```
|
||||
discourse_release:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v2
|
||||
if: ${{ ! github.event.release.prerelease }}
|
||||
with:
|
||||
node-version: '16'
|
||||
- run: script/discourse_release ${{ secrets.DISCOURSE_RELEASES_API_KEY }} ${{ github.event.release.tag_name }} ${{ github.event.release.body }}
|
||||
mixpanel_release:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
|
|
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -9,6 +9,7 @@
|
|||
/assets/themes/*.json
|
||||
/assets/themes/Internal/*.json
|
||||
/assets/themes/Experiments/*.json
|
||||
/assets/licenses.md
|
||||
**/venv
|
||||
.build
|
||||
Packages
|
||||
|
|
45
Cargo.lock
generated
45
Cargo.lock
generated
|
@ -739,8 +739,7 @@ dependencies = [
|
|||
[[package]]
|
||||
name = "bromberg_sl2"
|
||||
version = "0.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2ed88064f69518b7e3ea50ecfc1b61d43f19248618a377b95ae5c8b611134d4d"
|
||||
source = "git+https://github.com/zed-industries/bromberg_sl2?rev=dac565a90e8f9245f48ff46225c915dc50f76920#dac565a90e8f9245f48ff46225c915dc50f76920"
|
||||
dependencies = [
|
||||
"digest 0.9.0",
|
||||
"lazy_static",
|
||||
|
@ -1133,7 +1132,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "collab"
|
||||
version = "0.4.2"
|
||||
version = "0.5.3"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-tungstenite",
|
||||
|
@ -2022,6 +2021,33 @@ dependencies = [
|
|||
"instant",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "feedback"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"client",
|
||||
"editor",
|
||||
"futures 0.3.25",
|
||||
"gpui",
|
||||
"human_bytes",
|
||||
"isahc",
|
||||
"language",
|
||||
"lazy_static",
|
||||
"log",
|
||||
"postage",
|
||||
"project",
|
||||
"search",
|
||||
"serde",
|
||||
"settings",
|
||||
"sysinfo",
|
||||
"theme",
|
||||
"tree-sitter-markdown",
|
||||
"urlencoding",
|
||||
"util",
|
||||
"workspace",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "file-per-thread-logger"
|
||||
version = "0.1.5"
|
||||
|
@ -2563,7 +2589,6 @@ dependencies = [
|
|||
"sum_tree",
|
||||
"time 0.3.17",
|
||||
"tiny-skia",
|
||||
"tree-sitter",
|
||||
"usvg",
|
||||
"util",
|
||||
"waker-fn",
|
||||
|
@ -5512,6 +5537,7 @@ dependencies = [
|
|||
"anyhow",
|
||||
"collections",
|
||||
"editor",
|
||||
"futures 0.3.25",
|
||||
"gpui",
|
||||
"language",
|
||||
"log",
|
||||
|
@ -5522,6 +5548,7 @@ dependencies = [
|
|||
"serde_json",
|
||||
"settings",
|
||||
"smallvec",
|
||||
"smol",
|
||||
"theme",
|
||||
"unindent",
|
||||
"util",
|
||||
|
@ -6239,9 +6266,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "sysinfo"
|
||||
version = "0.27.1"
|
||||
version = "0.27.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ccb297c0afb439440834b4bcf02c5c9da8ec2e808e70f36b0d8e815ff403bd24"
|
||||
checksum = "1620f9573034c573376acc550f3b9a2be96daeb08abb3c12c8523e1cee06e80f"
|
||||
dependencies = [
|
||||
"cfg-if 1.0.0",
|
||||
"core-foundation-sys",
|
||||
|
@ -8187,7 +8214,7 @@ checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec"
|
|||
|
||||
[[package]]
|
||||
name = "zed"
|
||||
version = "0.69.0"
|
||||
version = "0.71.0"
|
||||
dependencies = [
|
||||
"activity_indicator",
|
||||
"anyhow",
|
||||
|
@ -8212,6 +8239,7 @@ dependencies = [
|
|||
"easy-parallel",
|
||||
"editor",
|
||||
"env_logger",
|
||||
"feedback",
|
||||
"file_finder",
|
||||
"fs",
|
||||
"fsevent",
|
||||
|
@ -8219,7 +8247,6 @@ dependencies = [
|
|||
"fuzzy",
|
||||
"go_to_line",
|
||||
"gpui",
|
||||
"human_bytes",
|
||||
"ignore",
|
||||
"image",
|
||||
"indexmap",
|
||||
|
@ -8253,7 +8280,6 @@ dependencies = [
|
|||
"smallvec",
|
||||
"smol",
|
||||
"sum_tree",
|
||||
"sysinfo",
|
||||
"tempdir",
|
||||
"terminal_view",
|
||||
"text",
|
||||
|
@ -8282,7 +8308,6 @@ dependencies = [
|
|||
"tree-sitter-typescript",
|
||||
"unindent",
|
||||
"url",
|
||||
"urlencoding",
|
||||
"util",
|
||||
"vim",
|
||||
"workspace",
|
||||
|
|
|
@ -17,6 +17,7 @@ members = [
|
|||
"crates/diagnostics",
|
||||
"crates/drag_and_drop",
|
||||
"crates/editor",
|
||||
"crates/feedback",
|
||||
"crates/file_finder",
|
||||
"crates/fs",
|
||||
"crates/fsevent",
|
||||
|
|
53
README.md
53
README.md
|
@ -83,56 +83,3 @@ rustup target add wasm32-wasi
|
|||
```
|
||||
|
||||
Plugins can be found in the `plugins` folder in the root. For more information about how plugins work, check the [Plugin Guide](./crates/plugin_runtime/README.md) in `crates/plugin_runtime/README.md`.
|
||||
|
||||
## Roadmap
|
||||
|
||||
We will organize our efforts around the following major milestones. We'll create tracking issues for each of these milestones to detail the individual tasks that comprise them.
|
||||
|
||||
### Minimal text editor
|
||||
|
||||
[Tracking issue](https://github.com/zed-industries/zed/issues/2)
|
||||
|
||||
Ship a minimal text editor to investors and other insiders. It should be extremely fast and stable, but all it can do is open, edit, and save text files, making it potentially useful for basic editing but not for real coding.
|
||||
|
||||
Establish basic infrastructure for building the app bundle and uploading an artifact. Once this is released, we should regularly distribute updates as features land.
|
||||
|
||||
### Collaborative code editor for internal use
|
||||
|
||||
[Tracking issue](https://github.com/zed-industries/zed/issues/6)
|
||||
|
||||
Turn the minimal text editor into a collaborative _code_ editor. This will include the minimal features that the Zed team needs to collaborate in Zed to build Zed without net loss in developer productivity. This includes productivity-critical features such as:
|
||||
|
||||
- Syntax highlighting and syntax-aware editing and navigation
|
||||
- The ability to see and edit non-local working copies of a repository
|
||||
- Language server support for Rust code navigation, refactoring, diagnostics, etc.
|
||||
- Project browsing and project-wide search and replace
|
||||
|
||||
We want to tackle collaboration fairly early so that the rest of the design of the product can flow around that assumption. We could probably produce a single-player code editor more quickly, but at the risk of having collaboration feel more "bolted on" when we eventually add it.
|
||||
|
||||
### Private alpha for Rust teams on macOS
|
||||
|
||||
The "minimal" milestones were about getting Zed to a point where the Zed team could use Zed productively to build Zed. What features are required for someone outside the company to use Zed to productively work on another project that is also written in Rust?
|
||||
|
||||
This includes infrastructure like auto-updates, error reporting, and metrics collection. It also includes some amount of polish to make the tool more discoverable for someone that didn't write it, such as a UI for updating settings and key bindings. We may also need to enhance the server to support user authentication and related concerns.
|
||||
|
||||
The initial target audience is like us. A small team working in Rust that's potentially interested in collaborating. As the alpha proceeds, we can work with teams of different sizes.
|
||||
|
||||
### Private beta for Rust teams on macOS
|
||||
|
||||
Once we're getting sufficiently positive feedback from our initial alpha users, we widen the audience by letting people share invites. Now may be a good time to get Zed running on the web, so that it's extremely easy for a Zed user to share a link and be collaborating in seconds. Once someone is using Zed on the Web, we'll let them register for the private beta and download the native binary if they're on macOS.
|
||||
|
||||
### Expand to other languages
|
||||
|
||||
Depending on how the Rust beta is going, focus hard on dominating another niche language such as Elixr or getting a foothold within a niche of a larger language, such as React/Typescript. Alternatively, go wide at this point and add decent support several widely-used languages such as Python, Ruby, Typescript, etc. This would entail taking 1-2 weeks per language and making sure we ship a solid experience based on a publicly-available language server. Each language has slightly different development practices, so we need to make sure Zed's UX meshes well with those practices.
|
||||
|
||||
### Future directions
|
||||
|
||||
Each of these sections could probably broken into multiple milestones, but this part of the roadmap is too far in the future to go into that level of detail at this point.
|
||||
|
||||
#### Expand to other platforms
|
||||
|
||||
Support Linux and Windows. We'll probably want to hire at least one person that prefers to work on each respective platform and have them spearhead the effort to port Zed to that platform. Once they've done so, they can join the general development effort while ensuring the user experience stays good on that platform.
|
||||
|
||||
#### Expand on collaboration
|
||||
|
||||
To start with, we'll focus on synchronous collaboration because that's where we're most differentiated, but there's no reason we have to limit ourselves to that. How can our tool facilitate collaboration generally, whether it's sync or async? What would it take for a team to go 100% Zed and collaborate fully within the tool? If we haven't added it already, basic Git support would be nice.
|
||||
|
|
|
@ -186,10 +186,10 @@
|
|||
}
|
||||
},
|
||||
{
|
||||
"context": "BufferSearchBar",
|
||||
"context": "BufferSearchBar > Editor",
|
||||
"bindings": {
|
||||
"escape": "buffer_search::Dismiss",
|
||||
"cmd-f": "buffer_search::FocusEditor",
|
||||
"tab": "buffer_search::FocusEditor",
|
||||
"enter": "search::SelectNextMatch",
|
||||
"shift-enter": "search::SelectPrevMatch"
|
||||
}
|
||||
|
|
|
@ -209,6 +209,10 @@
|
|||
"ctrl-e": [
|
||||
"vim::Scroll",
|
||||
"LineDown"
|
||||
],
|
||||
"r": [
|
||||
"vim::PushOperator",
|
||||
"Replace"
|
||||
]
|
||||
}
|
||||
},
|
||||
|
@ -294,7 +298,11 @@
|
|||
"d": "vim::VisualDelete",
|
||||
"x": "vim::VisualDelete",
|
||||
"y": "vim::VisualYank",
|
||||
"p": "vim::VisualPaste"
|
||||
"p": "vim::VisualPaste",
|
||||
"r": [
|
||||
"vim::PushOperator",
|
||||
"Replace"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
|
|
|
@ -13,6 +13,8 @@
|
|||
// Whether to show the informational hover box when moving the mouse
|
||||
// over symbols in the editor.
|
||||
"hover_popover_enabled": true,
|
||||
// Whether to confirm before quitting Zed.
|
||||
"confirm_quit": false,
|
||||
// Whether the cursor blinks in the editor.
|
||||
"cursor_blink": true,
|
||||
// Whether to pop the completions menu while typing in an editor without
|
||||
|
@ -79,6 +81,13 @@
|
|||
"hard_tabs": false,
|
||||
// How many columns a tab should occupy.
|
||||
"tab_size": 4,
|
||||
// Control what info Zed sends to our servers
|
||||
"telemetry": {
|
||||
// Send debug info like crash reports.
|
||||
"diagnostics": true,
|
||||
// Send anonymized usage data like what languages you're using Zed with.
|
||||
"metrics": true
|
||||
},
|
||||
// Git gutter behavior configuration.
|
||||
"git": {
|
||||
// Control whether the git gutter is shown. May take 2 values:
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
name = "activity_indicator"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/activity_indicator.rs"
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
name = "assets"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/assets.rs"
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
name = "auto_update"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/auto_update.rs"
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
name = "breadcrumbs"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/breadcrumbs.rs"
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
name = "call"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/call.rs"
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
name = "cli"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/cli.rs"
|
||||
|
|
|
@ -9,7 +9,13 @@ use core_foundation::{
|
|||
use core_services::{kLSLaunchDefaults, LSLaunchURLSpec, LSOpenFromURLSpec, TCFType};
|
||||
use ipc_channel::ipc::{IpcOneShotServer, IpcReceiver, IpcSender};
|
||||
use serde::Deserialize;
|
||||
use std::{ffi::OsStr, fs, path::PathBuf, ptr};
|
||||
use std::{
|
||||
ffi::OsStr,
|
||||
fs::{self, OpenOptions},
|
||||
io,
|
||||
path::{Path, PathBuf},
|
||||
ptr,
|
||||
};
|
||||
|
||||
#[derive(Parser)]
|
||||
#[clap(name = "zed", global_setting(clap::AppSettings::NoAutoVersion))]
|
||||
|
@ -54,6 +60,12 @@ fn main() -> Result<()> {
|
|||
return Ok(());
|
||||
}
|
||||
|
||||
for path in args.paths.iter() {
|
||||
if !path.exists() {
|
||||
touch(path.as_path())?;
|
||||
}
|
||||
}
|
||||
|
||||
let (tx, rx) = launch_app(bundle_path)?;
|
||||
|
||||
tx.send(CliRequest::Open {
|
||||
|
@ -77,6 +89,13 @@ fn main() -> Result<()> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn touch(path: &Path) -> io::Result<()> {
|
||||
match OpenOptions::new().create(true).write(true).open(path) {
|
||||
Ok(_) => Ok(()),
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
}
|
||||
|
||||
fn locate_bundle() -> Result<PathBuf> {
|
||||
let cli_path = std::env::current_exe()?.canonicalize()?;
|
||||
let mut app_path = cli_path.clone();
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
name = "client"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/client.rs"
|
||||
|
|
|
@ -25,6 +25,7 @@ use postage::watch;
|
|||
use rand::prelude::*;
|
||||
use rpc::proto::{AnyTypedEnvelope, EntityMessage, EnvelopedMessage, PeerId, RequestMessage};
|
||||
use serde::Deserialize;
|
||||
use settings::{Settings, TelemetrySettings};
|
||||
use std::{
|
||||
any::TypeId,
|
||||
collections::HashMap,
|
||||
|
@ -423,7 +424,9 @@ impl Client {
|
|||
}));
|
||||
}
|
||||
Status::SignedOut | Status::UpgradeRequired => {
|
||||
self.telemetry.set_authenticated_user_info(None, false);
|
||||
let telemetry_settings = cx.read(|cx| cx.global::<Settings>().telemetry());
|
||||
self.telemetry
|
||||
.set_authenticated_user_info(None, false, telemetry_settings);
|
||||
state._reconnect_task.take();
|
||||
}
|
||||
_ => {}
|
||||
|
@ -706,7 +709,13 @@ impl Client {
|
|||
credentials = read_credentials_from_keychain(cx);
|
||||
read_from_keychain = credentials.is_some();
|
||||
if read_from_keychain {
|
||||
self.report_event("read credentials from keychain", Default::default());
|
||||
cx.read(|cx| {
|
||||
self.report_event(
|
||||
"read credentials from keychain",
|
||||
Default::default(),
|
||||
cx.global::<Settings>().telemetry(),
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
if credentials.is_none() {
|
||||
|
@ -997,6 +1006,8 @@ impl Client {
|
|||
let executor = cx.background();
|
||||
let telemetry = self.telemetry.clone();
|
||||
let http = self.http.clone();
|
||||
let metrics_enabled = cx.read(|cx| cx.global::<Settings>().telemetry());
|
||||
|
||||
executor.clone().spawn(async move {
|
||||
// Generate a pair of asymmetric encryption keys. The public key will be used by the
|
||||
// zed server to encrypt the user's access token, so that it can'be intercepted by
|
||||
|
@ -1079,7 +1090,11 @@ impl Client {
|
|||
.context("failed to decrypt access token")?;
|
||||
platform.activate(true);
|
||||
|
||||
telemetry.report_event("authenticate with browser", Default::default());
|
||||
telemetry.report_event(
|
||||
"authenticate with browser",
|
||||
Default::default(),
|
||||
metrics_enabled,
|
||||
);
|
||||
|
||||
Ok(Credentials {
|
||||
user_id: user_id.parse()?,
|
||||
|
@ -1287,13 +1302,23 @@ impl Client {
|
|||
self.telemetry.start();
|
||||
}
|
||||
|
||||
pub fn report_event(&self, kind: &str, properties: Value) {
|
||||
self.telemetry.report_event(kind, properties.clone());
|
||||
pub fn report_event(
|
||||
&self,
|
||||
kind: &str,
|
||||
properties: Value,
|
||||
telemetry_settings: TelemetrySettings,
|
||||
) {
|
||||
self.telemetry
|
||||
.report_event(kind, properties.clone(), telemetry_settings);
|
||||
}
|
||||
|
||||
pub fn telemetry_log_file_path(&self) -> Option<PathBuf> {
|
||||
self.telemetry.log_file_path()
|
||||
}
|
||||
|
||||
pub fn metrics_id(&self) -> Option<Arc<str>> {
|
||||
self.telemetry.metrics_id()
|
||||
}
|
||||
}
|
||||
|
||||
impl WeakSubscriber {
|
||||
|
|
|
@ -10,6 +10,7 @@ use lazy_static::lazy_static;
|
|||
use parking_lot::Mutex;
|
||||
use serde::Serialize;
|
||||
use serde_json::json;
|
||||
use settings::TelemetrySettings;
|
||||
use std::{
|
||||
io::Write,
|
||||
mem,
|
||||
|
@ -184,11 +185,18 @@ impl Telemetry {
|
|||
.detach();
|
||||
}
|
||||
|
||||
/// This method takes the entire TelemetrySettings struct in order to force client code
|
||||
/// to pull the struct out of the settings global. Do not remove!
|
||||
pub fn set_authenticated_user_info(
|
||||
self: &Arc<Self>,
|
||||
metrics_id: Option<String>,
|
||||
is_staff: bool,
|
||||
telemetry_settings: TelemetrySettings,
|
||||
) {
|
||||
if !telemetry_settings.metrics() {
|
||||
return;
|
||||
}
|
||||
|
||||
let this = self.clone();
|
||||
let mut state = self.state.lock();
|
||||
let device_id = state.device_id.clone();
|
||||
|
@ -221,7 +229,16 @@ impl Telemetry {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn report_event(self: &Arc<Self>, kind: &str, properties: Value) {
|
||||
pub fn report_event(
|
||||
self: &Arc<Self>,
|
||||
kind: &str,
|
||||
properties: Value,
|
||||
telemetry_settings: TelemetrySettings,
|
||||
) {
|
||||
if !telemetry_settings.metrics() {
|
||||
return;
|
||||
}
|
||||
|
||||
let mut state = self.state.lock();
|
||||
let event = MixpanelEvent {
|
||||
event: kind.to_string(),
|
||||
|
@ -261,6 +278,10 @@ impl Telemetry {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn metrics_id(self: &Arc<Self>) -> Option<Arc<str>> {
|
||||
self.state.lock().metrics_id.clone()
|
||||
}
|
||||
|
||||
fn flush(self: &Arc<Self>) {
|
||||
let mut state = self.state.lock();
|
||||
let mut events = mem::take(&mut state.queue);
|
||||
|
|
|
@ -5,6 +5,7 @@ use futures::{channel::mpsc, future, AsyncReadExt, Future, StreamExt};
|
|||
use gpui::{AsyncAppContext, Entity, ImageData, ModelContext, ModelHandle, Task};
|
||||
use postage::{sink::Sink, watch};
|
||||
use rpc::proto::{RequestMessage, UsersResponse};
|
||||
use settings::Settings;
|
||||
use std::sync::{Arc, Weak};
|
||||
use util::TryFutureExt as _;
|
||||
|
||||
|
@ -141,14 +142,11 @@ impl UserStore {
|
|||
let fetch_metrics_id =
|
||||
client.request(proto::GetPrivateUserInfo {}).log_err();
|
||||
let (user, info) = futures::join!(fetch_user, fetch_metrics_id);
|
||||
if let Some(info) = info {
|
||||
client.telemetry.set_authenticated_user_info(
|
||||
Some(info.metrics_id.clone()),
|
||||
info.staff,
|
||||
);
|
||||
} else {
|
||||
client.telemetry.set_authenticated_user_info(None, false);
|
||||
}
|
||||
client.telemetry.set_authenticated_user_info(
|
||||
info.as_ref().map(|info| info.metrics_id.clone()),
|
||||
info.as_ref().map(|info| info.staff).unwrap_or(false),
|
||||
cx.read(|cx| cx.global::<Settings>().telemetry()),
|
||||
);
|
||||
|
||||
current_user_tx.send(user).await.ok();
|
||||
}
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
name = "clock"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/clock.rs"
|
||||
|
|
|
@ -3,7 +3,8 @@ authors = ["Nathan Sobo <nathan@zed.dev>"]
|
|||
default-run = "collab"
|
||||
edition = "2021"
|
||||
name = "collab"
|
||||
version = "0.4.2"
|
||||
version = "0.5.3"
|
||||
publish = false
|
||||
|
||||
[[bin]]
|
||||
name = "collab"
|
||||
|
|
|
@ -57,6 +57,7 @@ CREATE TABLE "worktrees" (
|
|||
"abs_path" VARCHAR NOT NULL,
|
||||
"visible" BOOL NOT NULL,
|
||||
"scan_id" INTEGER NOT NULL,
|
||||
"is_complete" BOOL NOT NULL DEFAULT FALSE,
|
||||
"completed_scan_id" INTEGER NOT NULL,
|
||||
PRIMARY KEY(project_id, id)
|
||||
);
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
ALTER TABLE worktrees
|
||||
DROP COLUMN is_complete,
|
||||
ALTER COLUMN is_complete SET DEFAULT FALSE,
|
||||
ADD COLUMN completed_scan_id INT8;
|
||||
|
|
|
@ -353,6 +353,8 @@ pub struct CreateInviteFromCodeParams {
|
|||
invite_code: String,
|
||||
email_address: String,
|
||||
device_id: Option<String>,
|
||||
#[serde(default)]
|
||||
added_to_mailing_list: bool,
|
||||
}
|
||||
|
||||
async fn create_invite_from_code(
|
||||
|
@ -365,6 +367,7 @@ async fn create_invite_from_code(
|
|||
¶ms.invite_code,
|
||||
¶ms.email_address,
|
||||
params.device_id.as_deref(),
|
||||
params.added_to_mailing_list,
|
||||
)
|
||||
.await?,
|
||||
))
|
||||
|
|
|
@ -882,6 +882,7 @@ impl Database {
|
|||
code: &str,
|
||||
email_address: &str,
|
||||
device_id: Option<&str>,
|
||||
added_to_mailing_list: bool,
|
||||
) -> Result<Invite> {
|
||||
self.transaction(|tx| async move {
|
||||
let existing_user = user::Entity::find()
|
||||
|
@ -933,6 +934,7 @@ impl Database {
|
|||
platform_windows: ActiveValue::set(false),
|
||||
platform_unknown: ActiveValue::set(true),
|
||||
device_id: ActiveValue::set(device_id.map(|device_id| device_id.into())),
|
||||
added_to_mailing_list: ActiveValue::set(added_to_mailing_list),
|
||||
..Default::default()
|
||||
})
|
||||
.on_conflict(
|
||||
|
|
|
@ -567,7 +567,12 @@ async fn test_invite_codes() {
|
|||
|
||||
// User 2 redeems the invite code and becomes a contact of user 1.
|
||||
let user2_invite = db
|
||||
.create_invite_from_code(&invite_code, "user2@example.com", Some("user-2-device-id"))
|
||||
.create_invite_from_code(
|
||||
&invite_code,
|
||||
"user2@example.com",
|
||||
Some("user-2-device-id"),
|
||||
true,
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
let NewUserResult {
|
||||
|
@ -617,7 +622,7 @@ async fn test_invite_codes() {
|
|||
|
||||
// User 3 redeems the invite code and becomes a contact of user 1.
|
||||
let user3_invite = db
|
||||
.create_invite_from_code(&invite_code, "user3@example.com", None)
|
||||
.create_invite_from_code(&invite_code, "user3@example.com", None, true)
|
||||
.await
|
||||
.unwrap();
|
||||
let NewUserResult {
|
||||
|
@ -672,9 +677,14 @@ async fn test_invite_codes() {
|
|||
);
|
||||
|
||||
// Trying to reedem the code for the third time results in an error.
|
||||
db.create_invite_from_code(&invite_code, "user4@example.com", Some("user-4-device-id"))
|
||||
.await
|
||||
.unwrap_err();
|
||||
db.create_invite_from_code(
|
||||
&invite_code,
|
||||
"user4@example.com",
|
||||
Some("user-4-device-id"),
|
||||
true,
|
||||
)
|
||||
.await
|
||||
.unwrap_err();
|
||||
|
||||
// Invite count can be updated after the code has been created.
|
||||
db.set_invite_count_for_user(user1, 2).await.unwrap();
|
||||
|
@ -684,7 +694,12 @@ async fn test_invite_codes() {
|
|||
|
||||
// User 4 can now redeem the invite code and becomes a contact of user 1.
|
||||
let user4_invite = db
|
||||
.create_invite_from_code(&invite_code, "user4@example.com", Some("user-4-device-id"))
|
||||
.create_invite_from_code(
|
||||
&invite_code,
|
||||
"user4@example.com",
|
||||
Some("user-4-device-id"),
|
||||
true,
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
let user4 = db
|
||||
|
@ -739,9 +754,14 @@ async fn test_invite_codes() {
|
|||
);
|
||||
|
||||
// An existing user cannot redeem invite codes.
|
||||
db.create_invite_from_code(&invite_code, "user2@example.com", Some("user-2-device-id"))
|
||||
.await
|
||||
.unwrap_err();
|
||||
db.create_invite_from_code(
|
||||
&invite_code,
|
||||
"user2@example.com",
|
||||
Some("user-2-device-id"),
|
||||
true,
|
||||
)
|
||||
.await
|
||||
.unwrap_err();
|
||||
let (_, invite_count) = db.get_invite_code_for_user(user1).await.unwrap().unwrap();
|
||||
assert_eq!(invite_count, 1);
|
||||
|
||||
|
@ -763,7 +783,7 @@ async fn test_invite_codes() {
|
|||
db.set_invite_count_for_user(user5, 5).await.unwrap();
|
||||
let (user5_invite_code, _) = db.get_invite_code_for_user(user5).await.unwrap().unwrap();
|
||||
let user5_invite_to_user1 = db
|
||||
.create_invite_from_code(&user5_invite_code, "user1@different.com", None)
|
||||
.create_invite_from_code(&user5_invite_code, "user1@different.com", None, true)
|
||||
.await
|
||||
.unwrap();
|
||||
let user1_2 = db
|
||||
|
|
|
@ -102,10 +102,7 @@ impl TestServer {
|
|||
async fn create_client(&mut self, cx: &mut TestAppContext, name: &str) -> TestClient {
|
||||
cx.update(|cx| {
|
||||
cx.set_global(HomeDir(Path::new("/tmp/").to_path_buf()));
|
||||
|
||||
let mut settings = Settings::test(cx);
|
||||
settings.projects_online_by_default = false;
|
||||
cx.set_global(settings);
|
||||
cx.set_global(Settings::test(cx));
|
||||
});
|
||||
|
||||
let http = FakeHttpClient::with_404_response();
|
||||
|
|
|
@ -32,7 +32,9 @@ use std::{
|
|||
sync::Arc,
|
||||
};
|
||||
use unindent::Unindent as _;
|
||||
use workspace::{item::Item, shared_screen::SharedScreen, SplitDirection, ToggleFollow, Workspace};
|
||||
use workspace::{
|
||||
item::ItemHandle as _, shared_screen::SharedScreen, SplitDirection, ToggleFollow, Workspace,
|
||||
};
|
||||
|
||||
#[ctor::ctor]
|
||||
fn init_logger() {
|
||||
|
@ -5602,7 +5604,7 @@ async fn test_following(
|
|||
});
|
||||
assert!(cx_b.read(|cx| editor_b2.is_focused(cx)));
|
||||
assert_eq!(
|
||||
editor_b2.read_with(cx_b, |editor, cx| editor.project_path(cx)),
|
||||
cx_b.read(|cx| editor_b2.project_path(cx)),
|
||||
Some((worktree_id, "2.txt").into())
|
||||
);
|
||||
assert_eq!(
|
||||
|
|
|
@ -20,6 +20,7 @@ use rand::{
|
|||
prelude::*,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::Settings;
|
||||
use std::{
|
||||
env,
|
||||
ops::Range,
|
||||
|
@ -307,6 +308,33 @@ async fn test_random_collaboration(
|
|||
let guest_diff_base = guest_buffer
|
||||
.read_with(client_cx, |b, _| b.diff_base().map(ToString::to_string));
|
||||
assert_eq!(guest_diff_base, host_diff_base);
|
||||
|
||||
let host_saved_version =
|
||||
host_buffer.read_with(host_cx, |b, _| b.saved_version().clone());
|
||||
let guest_saved_version =
|
||||
guest_buffer.read_with(client_cx, |b, _| b.saved_version().clone());
|
||||
assert_eq!(guest_saved_version, host_saved_version);
|
||||
|
||||
let host_saved_version_fingerprint =
|
||||
host_buffer.read_with(host_cx, |b, _| b.saved_version_fingerprint());
|
||||
let guest_saved_version_fingerprint =
|
||||
guest_buffer.read_with(client_cx, |b, _| b.saved_version_fingerprint());
|
||||
assert_eq!(
|
||||
guest_saved_version_fingerprint,
|
||||
host_saved_version_fingerprint
|
||||
);
|
||||
|
||||
let host_saved_mtime = host_buffer.read_with(host_cx, |b, _| b.saved_mtime());
|
||||
let guest_saved_mtime = guest_buffer.read_with(client_cx, |b, _| b.saved_mtime());
|
||||
assert_eq!(guest_saved_mtime, host_saved_mtime);
|
||||
|
||||
let host_is_dirty = host_buffer.read_with(host_cx, |b, _| b.is_dirty());
|
||||
let guest_is_dirty = guest_buffer.read_with(client_cx, |b, _| b.is_dirty());
|
||||
assert_eq!(guest_is_dirty, host_is_dirty);
|
||||
|
||||
let host_has_conflict = host_buffer.read_with(host_cx, |b, _| b.has_conflict());
|
||||
let guest_has_conflict = guest_buffer.read_with(client_cx, |b, _| b.has_conflict());
|
||||
assert_eq!(guest_has_conflict, host_has_conflict);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -314,6 +342,7 @@ async fn test_random_collaboration(
|
|||
for (client, mut cx) in clients {
|
||||
cx.update(|cx| {
|
||||
cx.clear_globals();
|
||||
cx.set_global(Settings::test(cx));
|
||||
drop(client);
|
||||
});
|
||||
}
|
||||
|
@ -883,26 +912,28 @@ async fn apply_client_operation(
|
|||
}
|
||||
}
|
||||
|
||||
ClientOperation::CreateFsEntry { path, is_dir } => {
|
||||
ClientOperation::WriteFsEntry {
|
||||
path,
|
||||
is_dir,
|
||||
content,
|
||||
} => {
|
||||
client
|
||||
.fs
|
||||
.metadata(&path.parent().unwrap())
|
||||
.await?
|
||||
.ok_or(TestError::Inapplicable)?;
|
||||
|
||||
log::info!(
|
||||
"{}: creating {} at {:?}",
|
||||
client.username,
|
||||
if is_dir { "dir" } else { "file" },
|
||||
path
|
||||
);
|
||||
|
||||
if is_dir {
|
||||
log::info!("{}: creating dir at {:?}", client.username, path);
|
||||
client.fs.create_dir(&path).await.unwrap();
|
||||
} else {
|
||||
let exists = client.fs.metadata(&path).await?.is_some();
|
||||
let verb = if exists { "updating" } else { "creating" };
|
||||
log::info!("{}: {} file at {:?}", verb, client.username, path);
|
||||
|
||||
client
|
||||
.fs
|
||||
.create_file(&path, Default::default())
|
||||
.save(&path, &content.as_str().into(), fs::LineEnding::Unix)
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
|
@ -1059,9 +1090,10 @@ enum ClientOperation {
|
|||
full_path: PathBuf,
|
||||
is_dir: bool,
|
||||
},
|
||||
CreateFsEntry {
|
||||
WriteFsEntry {
|
||||
path: PathBuf,
|
||||
is_dir: bool,
|
||||
content: String,
|
||||
},
|
||||
WriteGitIndex {
|
||||
repo_path: PathBuf,
|
||||
|
@ -1614,20 +1646,35 @@ impl TestPlan {
|
|||
};
|
||||
}
|
||||
|
||||
// Create a file or directory
|
||||
// Create or update a file or directory
|
||||
96.. => {
|
||||
let is_dir = self.rng.gen::<bool>();
|
||||
let mut path = cx
|
||||
.background()
|
||||
.block(client.fs.directories())
|
||||
.choose(&mut self.rng)
|
||||
.unwrap()
|
||||
.clone();
|
||||
path.push(gen_file_name(&mut self.rng));
|
||||
if !is_dir {
|
||||
path.set_extension("rs");
|
||||
let content;
|
||||
let mut path;
|
||||
let dir_paths = cx.background().block(client.fs.directories());
|
||||
|
||||
if is_dir {
|
||||
content = String::new();
|
||||
path = dir_paths.choose(&mut self.rng).unwrap().clone();
|
||||
path.push(gen_file_name(&mut self.rng));
|
||||
} else {
|
||||
content = Alphanumeric.sample_string(&mut self.rng, 16);
|
||||
|
||||
// Create a new file or overwrite an existing file
|
||||
let file_paths = cx.background().block(client.fs.files());
|
||||
if file_paths.is_empty() || self.rng.gen_bool(0.5) {
|
||||
path = dir_paths.choose(&mut self.rng).unwrap().clone();
|
||||
path.push(gen_file_name(&mut self.rng));
|
||||
path.set_extension("rs");
|
||||
} else {
|
||||
path = file_paths.choose(&mut self.rng).unwrap().clone()
|
||||
};
|
||||
}
|
||||
break ClientOperation::CreateFsEntry { path, is_dir };
|
||||
break ClientOperation::WriteFsEntry {
|
||||
path,
|
||||
is_dir,
|
||||
content,
|
||||
};
|
||||
}
|
||||
}
|
||||
})
|
||||
|
@ -1925,3 +1972,15 @@ fn path_env_var(name: &str) -> Option<PathBuf> {
|
|||
}
|
||||
Some(path)
|
||||
}
|
||||
|
||||
async fn child_file_paths(client: &TestClient, dir_path: &Path) -> Vec<PathBuf> {
|
||||
let mut child_paths = client.fs.read_dir(dir_path).await.unwrap();
|
||||
let mut child_file_paths = Vec::new();
|
||||
while let Some(child_path) = child_paths.next().await {
|
||||
let child_path = child_path.unwrap();
|
||||
if client.fs.is_file(&child_path).await {
|
||||
child_file_paths.push(child_path);
|
||||
}
|
||||
}
|
||||
child_file_paths
|
||||
}
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
name = "collab_ui"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/collab_ui.rs"
|
||||
|
|
|
@ -48,6 +48,7 @@ pub fn init(cx: &mut MutableAppContext) {
|
|||
},
|
||||
|_| IncomingCallNotification::new(incoming_call.clone()),
|
||||
);
|
||||
|
||||
notification_windows.push(window_id);
|
||||
}
|
||||
}
|
||||
|
@ -225,6 +226,7 @@ impl View for IncomingCallNotification {
|
|||
.theme
|
||||
.incoming_call_notification
|
||||
.background;
|
||||
|
||||
Flex::row()
|
||||
.with_child(self.render_caller(cx))
|
||||
.with_child(self.render_buttons(cx))
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
name = "collections"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/collections.rs"
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
name = "command_palette"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/command_palette.rs"
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
name = "context_menu"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/context_menu.rs"
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
name = "db"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/db.rs"
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
name = "diagnostics"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/diagnostics.rs"
|
||||
|
|
|
@ -21,7 +21,6 @@ use language::{
|
|||
use project::{DiagnosticSummary, Project, ProjectPath};
|
||||
use serde_json::json;
|
||||
use settings::Settings;
|
||||
use smallvec::SmallVec;
|
||||
use std::{
|
||||
any::{Any, TypeId},
|
||||
cmp::Ordering,
|
||||
|
@ -521,12 +520,8 @@ impl Item for ProjectDiagnosticsEditor {
|
|||
)
|
||||
}
|
||||
|
||||
fn project_path(&self, _: &AppContext) -> Option<project::ProjectPath> {
|
||||
None
|
||||
}
|
||||
|
||||
fn project_entry_ids(&self, cx: &AppContext) -> SmallVec<[project::ProjectEntryId; 3]> {
|
||||
self.editor.project_entry_ids(cx)
|
||||
fn for_each_project_item(&self, cx: &AppContext, f: &mut dyn FnMut(usize, &dyn project::Item)) {
|
||||
self.editor.for_each_project_item(cx, f)
|
||||
}
|
||||
|
||||
fn is_singleton(&self, _: &AppContext) -> bool {
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
name = "drag_and_drop"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/drag_and_drop.rs"
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
name = "editor"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/editor.rs"
|
||||
|
|
|
@ -44,7 +44,7 @@ use gpui::{
|
|||
ViewContext, ViewHandle, WeakViewHandle,
|
||||
};
|
||||
use highlight_matching_bracket::refresh_matching_bracket_highlights;
|
||||
use hover_popover::{hide_hover, HoverState};
|
||||
use hover_popover::{hide_hover, HideHover, HoverState};
|
||||
pub use items::MAX_TAB_TITLE_LEN;
|
||||
use itertools::Itertools;
|
||||
pub use language::{char_kind, CharKind};
|
||||
|
@ -62,7 +62,7 @@ pub use multi_buffer::{
|
|||
};
|
||||
use multi_buffer::{MultiBufferChunks, ToOffsetUtf16};
|
||||
use ordered_float::OrderedFloat;
|
||||
use project::{FormatTrigger, LocationLink, Project, ProjectPath, ProjectTransaction};
|
||||
use project::{FormatTrigger, Location, LocationLink, Project, ProjectPath, ProjectTransaction};
|
||||
use scroll::{
|
||||
autoscroll::Autoscroll, OngoingScroll, ScrollAnchor, ScrollManager, ScrollbarAutoHide,
|
||||
};
|
||||
|
@ -1008,6 +1008,15 @@ impl Editor {
|
|||
Self::new(EditorMode::SingleLine, buffer, None, field_editor_style, cx)
|
||||
}
|
||||
|
||||
pub fn multi_line(
|
||||
field_editor_style: Option<Arc<GetFieldEditorTheme>>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Self {
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, String::new(), cx));
|
||||
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
Self::new(EditorMode::Full, buffer, None, field_editor_style, cx)
|
||||
}
|
||||
|
||||
pub fn auto_height(
|
||||
max_lines: usize,
|
||||
field_editor_style: Option<Arc<GetFieldEditorTheme>>,
|
||||
|
@ -1086,6 +1095,8 @@ impl Editor {
|
|||
|
||||
let blink_manager = cx.add_model(|cx| BlinkManager::new(CURSOR_BLINK_INTERVAL, cx));
|
||||
|
||||
let soft_wrap_mode_override =
|
||||
(mode == EditorMode::SingleLine).then(|| settings::SoftWrap::None);
|
||||
let mut this = Self {
|
||||
handle: cx.weak_handle(),
|
||||
buffer: buffer.clone(),
|
||||
|
@ -1101,7 +1112,7 @@ impl Editor {
|
|||
select_larger_syntax_node_stack: Vec::new(),
|
||||
ime_transaction: Default::default(),
|
||||
active_diagnostics: None,
|
||||
soft_wrap_mode_override: None,
|
||||
soft_wrap_mode_override,
|
||||
get_field_editor_theme,
|
||||
project,
|
||||
focused: false,
|
||||
|
@ -1319,7 +1330,7 @@ impl Editor {
|
|||
}
|
||||
}
|
||||
|
||||
hide_hover(self, cx);
|
||||
hide_hover(self, &HideHover, cx);
|
||||
|
||||
if old_cursor_position.to_display_point(&display_map).row()
|
||||
!= new_cursor_position.to_display_point(&display_map).row()
|
||||
|
@ -1694,7 +1705,7 @@ impl Editor {
|
|||
return;
|
||||
}
|
||||
|
||||
if hide_hover(self, cx) {
|
||||
if hide_hover(self, &HideHover, cx) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -1735,7 +1746,7 @@ impl Editor {
|
|||
for (selection, autoclose_region) in
|
||||
self.selections_with_autoclose_regions(selections, &snapshot)
|
||||
{
|
||||
if let Some(language) = snapshot.language_at(selection.head()) {
|
||||
if let Some(language) = snapshot.language_scope_at(selection.head()) {
|
||||
// Determine if the inserted text matches the opening or closing
|
||||
// bracket of any of this language's bracket pairs.
|
||||
let mut bracket_pair = None;
|
||||
|
@ -1896,7 +1907,7 @@ impl Editor {
|
|||
let end = selection.end;
|
||||
|
||||
let mut insert_extra_newline = false;
|
||||
if let Some(language) = buffer.language_at(start) {
|
||||
if let Some(language) = buffer.language_scope_at(start) {
|
||||
let leading_whitespace_len = buffer
|
||||
.reversed_chars_at(start)
|
||||
.take_while(|c| c.is_whitespace() && *c != '\n')
|
||||
|
@ -2020,7 +2031,9 @@ impl Editor {
|
|||
old_selections
|
||||
.iter()
|
||||
.map(|s| (s.start..s.end, text.clone())),
|
||||
Some(AutoindentMode::EachLine),
|
||||
Some(AutoindentMode::Block {
|
||||
original_indent_columns: Vec::new(),
|
||||
}),
|
||||
cx,
|
||||
);
|
||||
anchors
|
||||
|
@ -3629,9 +3642,7 @@ impl Editor {
|
|||
}
|
||||
|
||||
pub fn undo(&mut self, _: &Undo, cx: &mut ViewContext<Self>) {
|
||||
dbg!("undo");
|
||||
if let Some(tx_id) = self.buffer.update(cx, |buffer, cx| buffer.undo(cx)) {
|
||||
dbg!(tx_id);
|
||||
if let Some((selections, _)) = self.selection_history.transaction(tx_id).cloned() {
|
||||
self.change_selections(None, cx, |s| {
|
||||
s.select_anchors(selections.to_vec());
|
||||
|
@ -4531,7 +4542,10 @@ impl Editor {
|
|||
|
||||
// TODO: Handle selections that cross excerpts
|
||||
for selection in &mut selections {
|
||||
let language = if let Some(language) = snapshot.language_at(selection.start) {
|
||||
let start_column = snapshot.indent_size_for_line(selection.start.row).len;
|
||||
let language = if let Some(language) =
|
||||
snapshot.language_scope_at(Point::new(selection.start.row, start_column))
|
||||
{
|
||||
language
|
||||
} else {
|
||||
continue;
|
||||
|
@ -4801,7 +4815,7 @@ impl Editor {
|
|||
if let Some(popover) = self.hover_state.diagnostic_popover.as_ref() {
|
||||
let (group_id, jump_to) = popover.activation_info();
|
||||
if self.activate_diagnostics(group_id, cx) {
|
||||
self.change_selections(Some(Autoscroll::center()), cx, |s| {
|
||||
self.change_selections(Some(Autoscroll::fit()), cx, |s| {
|
||||
let mut new_selection = s.newest_anchor().clone();
|
||||
new_selection.collapse_to(jump_to, SelectionGoal::None);
|
||||
s.select_anchors(vec![new_selection.clone()]);
|
||||
|
@ -4847,7 +4861,7 @@ impl Editor {
|
|||
|
||||
if let Some((primary_range, group_id)) = group {
|
||||
if self.activate_diagnostics(group_id, cx) {
|
||||
self.change_selections(Some(Autoscroll::center()), cx, |s| {
|
||||
self.change_selections(Some(Autoscroll::fit()), cx, |s| {
|
||||
s.select(vec![Selection {
|
||||
id: selection.id,
|
||||
start: primary_range.start,
|
||||
|
@ -4922,7 +4936,7 @@ impl Editor {
|
|||
.dedup();
|
||||
|
||||
if let Some(hunk) = hunks.next() {
|
||||
this.change_selections(Some(Autoscroll::center()), cx, |s| {
|
||||
this.change_selections(Some(Autoscroll::fit()), cx, |s| {
|
||||
let row = hunk.start_display_row();
|
||||
let point = DisplayPoint::new(row, 0);
|
||||
s.select_display_ranges([point..point]);
|
||||
|
@ -5007,25 +5021,49 @@ impl Editor {
|
|||
cx: &mut ViewContext<Workspace>,
|
||||
) {
|
||||
let pane = workspace.active_pane().clone();
|
||||
for definition in definitions {
|
||||
// If there is one definition, just open it directly
|
||||
if let [definition] = definitions.as_slice() {
|
||||
let range = definition
|
||||
.target
|
||||
.range
|
||||
.to_offset(definition.target.buffer.read(cx));
|
||||
|
||||
let target_editor_handle = workspace.open_project_item(definition.target.buffer, cx);
|
||||
let target_editor_handle =
|
||||
workspace.open_project_item(definition.target.buffer.clone(), cx);
|
||||
target_editor_handle.update(cx, |target_editor, cx| {
|
||||
// When selecting a definition in a different buffer, disable the nav history
|
||||
// to avoid creating a history entry at the previous cursor location.
|
||||
if editor_handle != target_editor_handle {
|
||||
pane.update(cx, |pane, _| pane.disable_history());
|
||||
}
|
||||
target_editor.change_selections(Some(Autoscroll::center()), cx, |s| {
|
||||
target_editor.change_selections(Some(Autoscroll::fit()), cx, |s| {
|
||||
s.select_ranges([range]);
|
||||
});
|
||||
|
||||
pane.update(cx, |pane, _| pane.enable_history());
|
||||
});
|
||||
} else {
|
||||
let replica_id = editor_handle.read(cx).replica_id(cx);
|
||||
let title = definitions
|
||||
.iter()
|
||||
.find(|definition| definition.origin.is_some())
|
||||
.and_then(|definition| {
|
||||
definition.origin.as_ref().map(|origin| {
|
||||
let buffer = origin.buffer.read(cx);
|
||||
format!(
|
||||
"Definitions for {}",
|
||||
buffer
|
||||
.text_for_range(origin.range.clone())
|
||||
.collect::<String>()
|
||||
)
|
||||
})
|
||||
})
|
||||
.unwrap_or("Definitions".to_owned());
|
||||
let locations = definitions
|
||||
.into_iter()
|
||||
.map(|definition| definition.target)
|
||||
.collect();
|
||||
Self::open_locations_in_multibuffer(workspace, locations, replica_id, title, cx)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -5046,64 +5084,87 @@ impl Editor {
|
|||
let project = workspace.project().clone();
|
||||
let references = project.update(cx, |project, cx| project.references(&buffer, head, cx));
|
||||
Some(cx.spawn(|workspace, mut cx| async move {
|
||||
let mut locations = references.await?;
|
||||
let locations = references.await?;
|
||||
if locations.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
locations.sort_by_key(|location| location.buffer.id());
|
||||
let mut locations = locations.into_iter().peekable();
|
||||
let mut ranges_to_highlight = Vec::new();
|
||||
|
||||
let excerpt_buffer = cx.add_model(|cx| {
|
||||
let mut symbol_name = None;
|
||||
let mut multibuffer = MultiBuffer::new(replica_id);
|
||||
while let Some(location) = locations.next() {
|
||||
let buffer = location.buffer.read(cx);
|
||||
let mut ranges_for_buffer = Vec::new();
|
||||
let range = location.range.to_offset(buffer);
|
||||
ranges_for_buffer.push(range.clone());
|
||||
if symbol_name.is_none() {
|
||||
symbol_name = Some(buffer.text_for_range(range).collect::<String>());
|
||||
}
|
||||
|
||||
while let Some(next_location) = locations.peek() {
|
||||
if next_location.buffer == location.buffer {
|
||||
ranges_for_buffer.push(next_location.range.to_offset(buffer));
|
||||
locations.next();
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
ranges_for_buffer.sort_by_key(|range| (range.start, Reverse(range.end)));
|
||||
ranges_to_highlight.extend(multibuffer.push_excerpts_with_context_lines(
|
||||
location.buffer.clone(),
|
||||
ranges_for_buffer,
|
||||
1,
|
||||
cx,
|
||||
));
|
||||
}
|
||||
multibuffer.with_title(format!("References to `{}`", symbol_name.unwrap()))
|
||||
});
|
||||
|
||||
workspace.update(&mut cx, |workspace, cx| {
|
||||
let editor =
|
||||
cx.add_view(|cx| Editor::for_multibuffer(excerpt_buffer, Some(project), cx));
|
||||
editor.update(cx, |editor, cx| {
|
||||
editor.highlight_background::<Self>(
|
||||
ranges_to_highlight,
|
||||
|theme| theme.editor.highlighted_line_background,
|
||||
cx,
|
||||
);
|
||||
});
|
||||
workspace.add_item(Box::new(editor), cx);
|
||||
let title = locations
|
||||
.first()
|
||||
.as_ref()
|
||||
.map(|location| {
|
||||
let buffer = location.buffer.read(cx);
|
||||
format!(
|
||||
"References to `{}`",
|
||||
buffer
|
||||
.text_for_range(location.range.clone())
|
||||
.collect::<String>()
|
||||
)
|
||||
})
|
||||
.unwrap();
|
||||
Self::open_locations_in_multibuffer(workspace, locations, replica_id, title, cx);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}))
|
||||
}
|
||||
|
||||
/// Opens a multibuffer with the given project locations in it
|
||||
pub fn open_locations_in_multibuffer(
|
||||
workspace: &mut Workspace,
|
||||
mut locations: Vec<Location>,
|
||||
replica_id: ReplicaId,
|
||||
title: String,
|
||||
cx: &mut ViewContext<Workspace>,
|
||||
) {
|
||||
// If there are multiple definitions, open them in a multibuffer
|
||||
locations.sort_by_key(|location| location.buffer.id());
|
||||
let mut locations = locations.into_iter().peekable();
|
||||
let mut ranges_to_highlight = Vec::new();
|
||||
|
||||
let excerpt_buffer = cx.add_model(|cx| {
|
||||
let mut multibuffer = MultiBuffer::new(replica_id);
|
||||
while let Some(location) = locations.next() {
|
||||
let buffer = location.buffer.read(cx);
|
||||
let mut ranges_for_buffer = Vec::new();
|
||||
let range = location.range.to_offset(buffer);
|
||||
ranges_for_buffer.push(range.clone());
|
||||
|
||||
while let Some(next_location) = locations.peek() {
|
||||
if next_location.buffer == location.buffer {
|
||||
ranges_for_buffer.push(next_location.range.to_offset(buffer));
|
||||
locations.next();
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
ranges_for_buffer.sort_by_key(|range| (range.start, Reverse(range.end)));
|
||||
ranges_to_highlight.extend(multibuffer.push_excerpts_with_context_lines(
|
||||
location.buffer.clone(),
|
||||
ranges_for_buffer,
|
||||
1,
|
||||
cx,
|
||||
))
|
||||
}
|
||||
|
||||
multibuffer.with_title(title)
|
||||
});
|
||||
|
||||
let editor = cx.add_view(|cx| {
|
||||
Editor::for_multibuffer(excerpt_buffer, Some(workspace.project().clone()), cx)
|
||||
});
|
||||
editor.update(cx, |editor, cx| {
|
||||
editor.highlight_background::<Self>(
|
||||
ranges_to_highlight,
|
||||
|theme| theme.editor.highlighted_line_background,
|
||||
cx,
|
||||
);
|
||||
});
|
||||
workspace.add_item(Box::new(editor), cx);
|
||||
}
|
||||
|
||||
pub fn rename(&mut self, _: &Rename, cx: &mut ViewContext<Self>) -> Option<Task<Result<()>>> {
|
||||
use language::ToOffset as _;
|
||||
|
||||
|
@ -6089,10 +6150,11 @@ impl Editor {
|
|||
let extension = Path::new(file.file_name(cx))
|
||||
.extension()
|
||||
.and_then(|e| e.to_str());
|
||||
project
|
||||
.read(cx)
|
||||
.client()
|
||||
.report_event(name, json!({ "File Extension": extension }));
|
||||
project.read(cx).client().report_event(
|
||||
name,
|
||||
json!({ "File Extension": extension }),
|
||||
cx.global::<Settings>().telemetry(),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -6173,7 +6235,7 @@ impl View for Editor {
|
|||
cx.defer(move |cx| {
|
||||
if let Some(editor) = handle.upgrade(cx) {
|
||||
editor.update(cx, |editor, cx| {
|
||||
hide_hover(editor, cx);
|
||||
hide_hover(editor, &HideHover, cx);
|
||||
hide_link_definition(editor, cx);
|
||||
})
|
||||
}
|
||||
|
@ -6222,7 +6284,7 @@ impl View for Editor {
|
|||
self.buffer
|
||||
.update(cx, |buffer, cx| buffer.remove_active_selections(cx));
|
||||
self.hide_context_menu(cx);
|
||||
hide_hover(self, cx);
|
||||
hide_hover(self, &HideHover, cx);
|
||||
cx.emit(Event::Blurred);
|
||||
cx.notify();
|
||||
}
|
||||
|
|
|
@ -7,7 +7,7 @@ use crate::{
|
|||
display_map::{BlockStyle, DisplaySnapshot, TransformBlock},
|
||||
git::{diff_hunk_to_display, DisplayDiffHunk},
|
||||
hover_popover::{
|
||||
HoverAt, HOVER_POPOVER_GAP, MIN_POPOVER_CHARACTER_WIDTH, MIN_POPOVER_LINE_HEIGHT,
|
||||
HideHover, HoverAt, HOVER_POPOVER_GAP, MIN_POPOVER_CHARACTER_WIDTH, MIN_POPOVER_LINE_HEIGHT,
|
||||
},
|
||||
link_go_to_definition::{
|
||||
GoToFetchedDefinition, GoToFetchedTypeDefinition, UpdateGoToDefinitionLink,
|
||||
|
@ -114,6 +114,7 @@ impl EditorElement {
|
|||
fn attach_mouse_handlers(
|
||||
view: &WeakViewHandle<Editor>,
|
||||
position_map: &Arc<PositionMap>,
|
||||
has_popovers: bool,
|
||||
visible_bounds: RectF,
|
||||
text_bounds: RectF,
|
||||
gutter_bounds: RectF,
|
||||
|
@ -190,6 +191,11 @@ impl EditorElement {
|
|||
}
|
||||
}
|
||||
})
|
||||
.on_move_out(move |_, cx| {
|
||||
if has_popovers {
|
||||
cx.dispatch_action(HideHover);
|
||||
}
|
||||
})
|
||||
.on_scroll({
|
||||
let position_map = position_map.clone();
|
||||
move |e, cx| {
|
||||
|
@ -1870,6 +1876,7 @@ impl Element for EditorElement {
|
|||
Self::attach_mouse_handlers(
|
||||
&self.view,
|
||||
&layout.position_map,
|
||||
layout.hover_popovers.is_some(),
|
||||
visible_bounds,
|
||||
text_bounds,
|
||||
gutter_bounds,
|
||||
|
|
|
@ -29,12 +29,16 @@ pub struct HoverAt {
|
|||
pub point: Option<DisplayPoint>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq)]
|
||||
pub struct HideHover;
|
||||
|
||||
actions!(editor, [Hover]);
|
||||
impl_internal_actions!(editor, [HoverAt]);
|
||||
impl_internal_actions!(editor, [HoverAt, HideHover]);
|
||||
|
||||
pub fn init(cx: &mut MutableAppContext) {
|
||||
cx.add_action(hover);
|
||||
cx.add_action(hover_at);
|
||||
cx.add_action(hide_hover);
|
||||
}
|
||||
|
||||
/// Bindable action which uses the most recent selection head to trigger a hover
|
||||
|
@ -50,7 +54,7 @@ pub fn hover_at(editor: &mut Editor, action: &HoverAt, cx: &mut ViewContext<Edit
|
|||
if let Some(point) = action.point {
|
||||
show_hover(editor, point, false, cx);
|
||||
} else {
|
||||
hide_hover(editor, cx);
|
||||
hide_hover(editor, &HideHover, cx);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -58,7 +62,7 @@ pub fn hover_at(editor: &mut Editor, action: &HoverAt, cx: &mut ViewContext<Edit
|
|||
/// Hides the type information popup.
|
||||
/// Triggered by the `Hover` action when the cursor is not over a symbol or when the
|
||||
/// selections changed.
|
||||
pub fn hide_hover(editor: &mut Editor, cx: &mut ViewContext<Editor>) -> bool {
|
||||
pub fn hide_hover(editor: &mut Editor, _: &HideHover, cx: &mut ViewContext<Editor>) -> bool {
|
||||
let did_hide = editor.hover_state.info_popover.take().is_some()
|
||||
| editor.hover_state.diagnostic_popover.take().is_some();
|
||||
|
||||
|
@ -67,6 +71,10 @@ pub fn hide_hover(editor: &mut Editor, cx: &mut ViewContext<Editor>) -> bool {
|
|||
|
||||
editor.clear_background_highlights::<HoverState>(cx);
|
||||
|
||||
if did_hide {
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
did_hide
|
||||
}
|
||||
|
||||
|
@ -121,7 +129,7 @@ fn show_hover(
|
|||
// Hover triggered from same location as last time. Don't show again.
|
||||
return;
|
||||
} else {
|
||||
hide_hover(editor, cx);
|
||||
hide_hover(editor, &HideHover, cx);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -12,12 +12,13 @@ use gpui::{
|
|||
elements::*, geometry::vector::vec2f, AppContext, Entity, ModelHandle, MutableAppContext,
|
||||
RenderContext, Subscription, Task, View, ViewContext, ViewHandle, WeakViewHandle,
|
||||
};
|
||||
use language::proto::serialize_anchor as serialize_text_anchor;
|
||||
use language::{Bias, Buffer, File as _, OffsetRangeExt, Point, SelectionGoal};
|
||||
use project::{File, FormatTrigger, Project, ProjectEntryId, ProjectPath};
|
||||
use language::{
|
||||
proto::serialize_anchor as serialize_text_anchor, Bias, Buffer, OffsetRangeExt, Point,
|
||||
SelectionGoal,
|
||||
};
|
||||
use project::{FormatTrigger, Item as _, Project, ProjectPath};
|
||||
use rpc::proto::{self, update_view};
|
||||
use settings::Settings;
|
||||
use smallvec::SmallVec;
|
||||
use std::{
|
||||
borrow::Cow,
|
||||
cmp::{self, Ordering},
|
||||
|
@ -554,22 +555,10 @@ impl Item for Editor {
|
|||
.boxed()
|
||||
}
|
||||
|
||||
fn project_path(&self, cx: &AppContext) -> Option<ProjectPath> {
|
||||
let buffer = self.buffer.read(cx).as_singleton()?;
|
||||
let file = buffer.read(cx).file();
|
||||
File::from_dyn(file).map(|file| ProjectPath {
|
||||
worktree_id: file.worktree_id(cx),
|
||||
path: file.path().clone(),
|
||||
})
|
||||
}
|
||||
|
||||
fn project_entry_ids(&self, cx: &AppContext) -> SmallVec<[ProjectEntryId; 3]> {
|
||||
fn for_each_project_item(&self, cx: &AppContext, f: &mut dyn FnMut(usize, &dyn project::Item)) {
|
||||
self.buffer
|
||||
.read(cx)
|
||||
.files(cx)
|
||||
.into_iter()
|
||||
.filter_map(|file| File::from_dyn(Some(file))?.project_entry_id(cx))
|
||||
.collect()
|
||||
.for_each_buffer(|buffer| f(buffer.id(), buffer.read(cx)));
|
||||
}
|
||||
|
||||
fn is_singleton(&self, cx: &AppContext) -> bool {
|
||||
|
@ -606,7 +595,12 @@ impl Item for Editor {
|
|||
}
|
||||
|
||||
fn can_save(&self, cx: &AppContext) -> bool {
|
||||
!self.buffer().read(cx).is_singleton() || self.project_path(cx).is_some()
|
||||
let buffer = &self.buffer().read(cx);
|
||||
if let Some(buffer) = buffer.as_singleton() {
|
||||
buffer.read(cx).project_path(cx).is_some()
|
||||
} else {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
fn save(
|
||||
|
@ -765,6 +759,7 @@ impl Item for Editor {
|
|||
fn added_to_workspace(&mut self, workspace: &mut Workspace, cx: &mut ViewContext<Self>) {
|
||||
let workspace_id = workspace.database_id();
|
||||
let item_id = cx.view_id();
|
||||
self.workspace_id = Some(workspace_id);
|
||||
|
||||
fn serialize(
|
||||
buffer: ModelHandle<Buffer>,
|
||||
|
@ -836,7 +831,11 @@ impl Item for Editor {
|
|||
.context("Project item at stored path was not a buffer")?;
|
||||
|
||||
Ok(cx.update(|cx| {
|
||||
cx.add_view(pane, |cx| Editor::for_buffer(buffer, Some(project), cx))
|
||||
cx.add_view(pane, |cx| {
|
||||
let mut editor = Editor::for_buffer(buffer, Some(project), cx);
|
||||
editor.read_scroll_position_from_db(item_id, workspace_id, cx);
|
||||
editor
|
||||
})
|
||||
}))
|
||||
})
|
||||
})
|
||||
|
@ -1159,9 +1158,11 @@ fn path_for_file<'a>(
|
|||
mod tests {
|
||||
use super::*;
|
||||
use gpui::MutableAppContext;
|
||||
use language::RopeFingerprint;
|
||||
use std::{
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
time::SystemTime,
|
||||
};
|
||||
|
||||
#[gpui::test]
|
||||
|
@ -1191,7 +1192,7 @@ mod tests {
|
|||
todo!()
|
||||
}
|
||||
|
||||
fn mtime(&self) -> std::time::SystemTime {
|
||||
fn mtime(&self) -> SystemTime {
|
||||
todo!()
|
||||
}
|
||||
|
||||
|
@ -1210,7 +1211,7 @@ mod tests {
|
|||
_: clock::Global,
|
||||
_: project::LineEnding,
|
||||
_: &mut MutableAppContext,
|
||||
) -> gpui::Task<anyhow::Result<(clock::Global, String, std::time::SystemTime)>> {
|
||||
) -> gpui::Task<anyhow::Result<(clock::Global, RopeFingerprint, SystemTime)>> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
|
|
|
@ -352,6 +352,29 @@ pub fn surrounding_word(map: &DisplaySnapshot, position: DisplayPoint) -> Range<
|
|||
start..end
|
||||
}
|
||||
|
||||
pub fn split_display_range_by_lines(
|
||||
map: &DisplaySnapshot,
|
||||
range: Range<DisplayPoint>,
|
||||
) -> Vec<Range<DisplayPoint>> {
|
||||
let mut result = Vec::new();
|
||||
|
||||
let mut start = range.start;
|
||||
// Loop over all the covered rows until the one containing the range end
|
||||
for row in range.start.row()..range.end.row() {
|
||||
let row_end_column = map.line_len(row);
|
||||
let end = map.clip_point(DisplayPoint::new(row, row_end_column), Bias::Left);
|
||||
if start != end {
|
||||
result.push(start..end);
|
||||
}
|
||||
start = map.clip_point(DisplayPoint::new(row + 1, 0), Bias::Left);
|
||||
}
|
||||
|
||||
// Add the final range from the start of the last end to the original range end.
|
||||
result.push(start..range.end);
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
|
|
@ -4,16 +4,16 @@ pub use anchor::{Anchor, AnchorRangeExt};
|
|||
use anyhow::Result;
|
||||
use clock::ReplicaId;
|
||||
use collections::{BTreeMap, Bound, HashMap, HashSet};
|
||||
use futures::{channel::mpsc, SinkExt};
|
||||
use git::diff::DiffHunk;
|
||||
use gpui::{AppContext, Entity, ModelContext, ModelHandle, Task};
|
||||
pub use language::Completion;
|
||||
use language::{
|
||||
char_kind, AutoindentMode, Buffer, BufferChunks, BufferSnapshot, CharKind, Chunk, CursorShape,
|
||||
DiagnosticEntry, File, IndentSize, Language, OffsetRangeExt, OffsetUtf16, Outline, OutlineItem,
|
||||
Point, PointUtf16, Selection, TextDimension, ToOffset as _, ToOffsetUtf16 as _, ToPoint as _,
|
||||
ToPointUtf16 as _, TransactionId, Unclipped,
|
||||
DiagnosticEntry, IndentSize, Language, LanguageScope, OffsetRangeExt, OffsetUtf16, Outline,
|
||||
OutlineItem, Point, PointUtf16, Selection, TextDimension, ToOffset as _, ToOffsetUtf16 as _,
|
||||
ToPoint as _, ToPointUtf16 as _, TransactionId, Unclipped,
|
||||
};
|
||||
use smallvec::SmallVec;
|
||||
use std::{
|
||||
borrow::Cow,
|
||||
cell::{Ref, RefCell},
|
||||
|
@ -764,6 +764,63 @@ impl MultiBuffer {
|
|||
None
|
||||
}
|
||||
|
||||
pub fn stream_excerpts_with_context_lines(
|
||||
&mut self,
|
||||
excerpts: Vec<(ModelHandle<Buffer>, Vec<Range<text::Anchor>>)>,
|
||||
context_line_count: u32,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> (Task<()>, mpsc::Receiver<Range<Anchor>>) {
|
||||
let (mut tx, rx) = mpsc::channel(256);
|
||||
let task = cx.spawn(|this, mut cx| async move {
|
||||
for (buffer, ranges) in excerpts {
|
||||
let buffer_id = buffer.id();
|
||||
let buffer_snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
|
||||
|
||||
let mut excerpt_ranges = Vec::new();
|
||||
let mut range_counts = Vec::new();
|
||||
cx.background()
|
||||
.scoped(|scope| {
|
||||
scope.spawn(async {
|
||||
let (ranges, counts) =
|
||||
build_excerpt_ranges(&buffer_snapshot, &ranges, context_line_count);
|
||||
excerpt_ranges = ranges;
|
||||
range_counts = counts;
|
||||
});
|
||||
})
|
||||
.await;
|
||||
|
||||
let mut ranges = ranges.into_iter();
|
||||
let mut range_counts = range_counts.into_iter();
|
||||
for excerpt_ranges in excerpt_ranges.chunks(100) {
|
||||
let excerpt_ids = this.update(&mut cx, |this, cx| {
|
||||
this.push_excerpts(buffer.clone(), excerpt_ranges.iter().cloned(), cx)
|
||||
});
|
||||
|
||||
for (excerpt_id, range_count) in
|
||||
excerpt_ids.into_iter().zip(range_counts.by_ref())
|
||||
{
|
||||
for range in ranges.by_ref().take(range_count) {
|
||||
let start = Anchor {
|
||||
buffer_id: Some(buffer_id),
|
||||
excerpt_id: excerpt_id.clone(),
|
||||
text_anchor: range.start,
|
||||
};
|
||||
let end = Anchor {
|
||||
buffer_id: Some(buffer_id),
|
||||
excerpt_id: excerpt_id.clone(),
|
||||
text_anchor: range.end,
|
||||
};
|
||||
if tx.send(start..end).await.is_err() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
(task, rx)
|
||||
}
|
||||
|
||||
pub fn push_excerpts<O>(
|
||||
&mut self,
|
||||
buffer: ModelHandle<Buffer>,
|
||||
|
@ -788,39 +845,8 @@ impl MultiBuffer {
|
|||
{
|
||||
let buffer_id = buffer.id();
|
||||
let buffer_snapshot = buffer.read(cx).snapshot();
|
||||
let max_point = buffer_snapshot.max_point();
|
||||
|
||||
let mut range_counts = Vec::new();
|
||||
let mut excerpt_ranges = Vec::new();
|
||||
let mut range_iter = ranges
|
||||
.iter()
|
||||
.map(|range| {
|
||||
range.start.to_point(&buffer_snapshot)..range.end.to_point(&buffer_snapshot)
|
||||
})
|
||||
.peekable();
|
||||
while let Some(range) = range_iter.next() {
|
||||
let excerpt_start = Point::new(range.start.row.saturating_sub(context_line_count), 0);
|
||||
let mut excerpt_end =
|
||||
Point::new(range.end.row + 1 + context_line_count, 0).min(max_point);
|
||||
let mut ranges_in_excerpt = 1;
|
||||
|
||||
while let Some(next_range) = range_iter.peek() {
|
||||
if next_range.start.row <= excerpt_end.row + context_line_count {
|
||||
excerpt_end =
|
||||
Point::new(next_range.end.row + 1 + context_line_count, 0).min(max_point);
|
||||
ranges_in_excerpt += 1;
|
||||
range_iter.next();
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
excerpt_ranges.push(ExcerptRange {
|
||||
context: excerpt_start..excerpt_end,
|
||||
primary: Some(range),
|
||||
});
|
||||
range_counts.push(ranges_in_excerpt);
|
||||
}
|
||||
let (excerpt_ranges, range_counts) =
|
||||
build_excerpt_ranges(&buffer_snapshot, &ranges, context_line_count);
|
||||
|
||||
let excerpt_ids = self.push_excerpts(buffer, excerpt_ranges, cx);
|
||||
|
||||
|
@ -1311,12 +1337,11 @@ impl MultiBuffer {
|
|||
.and_then(|(buffer, offset)| buffer.read(cx).language_at(offset))
|
||||
}
|
||||
|
||||
pub fn files<'a>(&'a self, cx: &'a AppContext) -> SmallVec<[&'a Arc<dyn File>; 2]> {
|
||||
let buffers = self.buffers.borrow();
|
||||
buffers
|
||||
pub fn for_each_buffer(&self, mut f: impl FnMut(&ModelHandle<Buffer>)) {
|
||||
self.buffers
|
||||
.borrow()
|
||||
.values()
|
||||
.filter_map(|buffer| buffer.buffer.read(cx).file())
|
||||
.collect()
|
||||
.for_each(|state| f(&state.buffer))
|
||||
}
|
||||
|
||||
pub fn title<'a>(&'a self, cx: &'a AppContext) -> Cow<'a, str> {
|
||||
|
@ -2666,6 +2691,11 @@ impl MultiBufferSnapshot {
|
|||
.and_then(|(buffer, offset)| buffer.language_at(offset))
|
||||
}
|
||||
|
||||
pub fn language_scope_at<'a, T: ToOffset>(&'a self, point: T) -> Option<LanguageScope> {
|
||||
self.point_to_buffer_offset(point)
|
||||
.and_then(|(buffer, offset)| buffer.language_scope_at(offset))
|
||||
}
|
||||
|
||||
pub fn is_dirty(&self) -> bool {
|
||||
self.is_dirty
|
||||
}
|
||||
|
@ -3605,9 +3635,51 @@ impl ToPointUtf16 for PointUtf16 {
|
|||
}
|
||||
}
|
||||
|
||||
fn build_excerpt_ranges<T>(
|
||||
buffer: &BufferSnapshot,
|
||||
ranges: &[Range<T>],
|
||||
context_line_count: u32,
|
||||
) -> (Vec<ExcerptRange<Point>>, Vec<usize>)
|
||||
where
|
||||
T: text::ToPoint,
|
||||
{
|
||||
let max_point = buffer.max_point();
|
||||
let mut range_counts = Vec::new();
|
||||
let mut excerpt_ranges = Vec::new();
|
||||
let mut range_iter = ranges
|
||||
.iter()
|
||||
.map(|range| range.start.to_point(buffer)..range.end.to_point(buffer))
|
||||
.peekable();
|
||||
while let Some(range) = range_iter.next() {
|
||||
let excerpt_start = Point::new(range.start.row.saturating_sub(context_line_count), 0);
|
||||
let mut excerpt_end = Point::new(range.end.row + 1 + context_line_count, 0).min(max_point);
|
||||
let mut ranges_in_excerpt = 1;
|
||||
|
||||
while let Some(next_range) = range_iter.peek() {
|
||||
if next_range.start.row <= excerpt_end.row + context_line_count {
|
||||
excerpt_end =
|
||||
Point::new(next_range.end.row + 1 + context_line_count, 0).min(max_point);
|
||||
ranges_in_excerpt += 1;
|
||||
range_iter.next();
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
excerpt_ranges.push(ExcerptRange {
|
||||
context: excerpt_start..excerpt_end,
|
||||
primary: Some(range),
|
||||
});
|
||||
range_counts.push(ranges_in_excerpt);
|
||||
}
|
||||
|
||||
(excerpt_ranges, range_counts)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use futures::StreamExt;
|
||||
use gpui::{MutableAppContext, TestAppContext};
|
||||
use language::{Buffer, Rope};
|
||||
use rand::prelude::*;
|
||||
|
@ -4012,6 +4084,44 @@ mod tests {
|
|||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_stream_excerpts_with_context_lines(cx: &mut TestAppContext) {
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(20, 3, 'a'), cx));
|
||||
let multibuffer = cx.add_model(|_| MultiBuffer::new(0));
|
||||
let (task, anchor_ranges) = multibuffer.update(cx, |multibuffer, cx| {
|
||||
let snapshot = buffer.read(cx);
|
||||
let ranges = vec![
|
||||
snapshot.anchor_before(Point::new(3, 2))..snapshot.anchor_before(Point::new(4, 2)),
|
||||
snapshot.anchor_before(Point::new(7, 1))..snapshot.anchor_before(Point::new(7, 3)),
|
||||
snapshot.anchor_before(Point::new(15, 0))
|
||||
..snapshot.anchor_before(Point::new(15, 0)),
|
||||
];
|
||||
multibuffer.stream_excerpts_with_context_lines(vec![(buffer.clone(), ranges)], 2, cx)
|
||||
});
|
||||
|
||||
let anchor_ranges = anchor_ranges.collect::<Vec<_>>().await;
|
||||
// Ensure task is finished when stream completes.
|
||||
task.await;
|
||||
|
||||
let snapshot = multibuffer.read_with(cx, |multibuffer, cx| multibuffer.snapshot(cx));
|
||||
assert_eq!(
|
||||
snapshot.text(),
|
||||
"bbb\nccc\nddd\neee\nfff\nggg\nhhh\niii\njjj\n\nnnn\nooo\nppp\nqqq\nrrr\n"
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
anchor_ranges
|
||||
.iter()
|
||||
.map(|range| range.to_point(&snapshot))
|
||||
.collect::<Vec<_>>(),
|
||||
vec![
|
||||
Point::new(2, 2)..Point::new(3, 2),
|
||||
Point::new(6, 1)..Point::new(6, 3),
|
||||
Point::new(12, 0)..Point::new(12, 0)
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_empty_multibuffer(cx: &mut MutableAppContext) {
|
||||
let multibuffer = cx.add_model(|_| MultiBuffer::new(0));
|
||||
|
|
|
@ -2,9 +2,19 @@ use std::path::PathBuf;
|
|||
|
||||
use db::sqlez_macros::sql;
|
||||
use db::{define_connection, query};
|
||||
|
||||
use workspace::{ItemId, WorkspaceDb, WorkspaceId};
|
||||
|
||||
define_connection!(
|
||||
// Current table shape using pseudo-rust syntax:
|
||||
// editors(
|
||||
// item_id: usize,
|
||||
// workspace_id: usize,
|
||||
// path: PathBuf,
|
||||
// scroll_top_row: usize,
|
||||
// scroll_vertical_offset: f32,
|
||||
// scroll_horizontal_offset: f32,
|
||||
// )
|
||||
pub static ref DB: EditorDb<WorkspaceDb> =
|
||||
&[sql! (
|
||||
CREATE TABLE editors(
|
||||
|
@ -15,8 +25,13 @@ define_connection!(
|
|||
FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id)
|
||||
ON DELETE CASCADE
|
||||
ON UPDATE CASCADE
|
||||
) STRICT;
|
||||
)];
|
||||
) STRICT;
|
||||
),
|
||||
sql! (
|
||||
ALTER TABLE editors ADD COLUMN scroll_top_row INTEGER NOT NULL DEFAULT 0;
|
||||
ALTER TABLE editors ADD COLUMN scroll_horizontal_offset REAL NOT NULL DEFAULT 0;
|
||||
ALTER TABLE editors ADD COLUMN scroll_vertical_offset REAL NOT NULL DEFAULT 0;
|
||||
)];
|
||||
);
|
||||
|
||||
impl EditorDb {
|
||||
|
@ -29,8 +44,40 @@ impl EditorDb {
|
|||
|
||||
query! {
|
||||
pub async fn save_path(item_id: ItemId, workspace_id: WorkspaceId, path: PathBuf) -> Result<()> {
|
||||
INSERT OR REPLACE INTO editors(item_id, workspace_id, path)
|
||||
VALUES (?, ?, ?)
|
||||
INSERT INTO editors
|
||||
(item_id, workspace_id, path)
|
||||
VALUES
|
||||
(?1, ?2, ?3)
|
||||
ON CONFLICT DO UPDATE SET
|
||||
item_id = ?1,
|
||||
workspace_id = ?2,
|
||||
path = ?3
|
||||
}
|
||||
}
|
||||
|
||||
// Returns the scroll top row, and offset
|
||||
query! {
|
||||
pub fn get_scroll_position(item_id: ItemId, workspace_id: WorkspaceId) -> Result<Option<(u32, f32, f32)>> {
|
||||
SELECT scroll_top_row, scroll_horizontal_offset, scroll_vertical_offset
|
||||
FROM editors
|
||||
WHERE item_id = ? AND workspace_id = ?
|
||||
}
|
||||
}
|
||||
|
||||
query! {
|
||||
pub async fn save_scroll_position(
|
||||
item_id: ItemId,
|
||||
workspace_id: WorkspaceId,
|
||||
top_row: u32,
|
||||
vertical_offset: f32,
|
||||
horizontal_offset: f32
|
||||
) -> Result<()> {
|
||||
UPDATE OR IGNORE editors
|
||||
SET
|
||||
scroll_top_row = ?3,
|
||||
scroll_horizontal_offset = ?4,
|
||||
scroll_vertical_offset = ?5
|
||||
WHERE item_id = ?1 AND workspace_id = ?2
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -11,11 +11,14 @@ use gpui::{
|
|||
geometry::vector::{vec2f, Vector2F},
|
||||
Axis, MutableAppContext, Task, ViewContext,
|
||||
};
|
||||
use language::Bias;
|
||||
use language::{Bias, Point};
|
||||
use util::ResultExt;
|
||||
use workspace::WorkspaceId;
|
||||
|
||||
use crate::{
|
||||
display_map::{DisplaySnapshot, ToDisplayPoint},
|
||||
hover_popover::hide_hover,
|
||||
hover_popover::{hide_hover, HideHover},
|
||||
persistence::DB,
|
||||
Anchor, DisplayPoint, Editor, EditorMode, Event, MultiBufferSnapshot, ToPoint,
|
||||
};
|
||||
|
||||
|
@ -170,37 +173,68 @@ impl ScrollManager {
|
|||
scroll_position: Vector2F,
|
||||
map: &DisplaySnapshot,
|
||||
local: bool,
|
||||
workspace_id: Option<i64>,
|
||||
cx: &mut ViewContext<Editor>,
|
||||
) {
|
||||
let new_anchor = if scroll_position.y() <= 0. {
|
||||
ScrollAnchor {
|
||||
top_anchor: Anchor::min(),
|
||||
offset: scroll_position.max(vec2f(0., 0.)),
|
||||
}
|
||||
let (new_anchor, top_row) = if scroll_position.y() <= 0. {
|
||||
(
|
||||
ScrollAnchor {
|
||||
top_anchor: Anchor::min(),
|
||||
offset: scroll_position.max(vec2f(0., 0.)),
|
||||
},
|
||||
0,
|
||||
)
|
||||
} else {
|
||||
let scroll_top_buffer_offset =
|
||||
DisplayPoint::new(scroll_position.y() as u32, 0).to_offset(&map, Bias::Right);
|
||||
let scroll_top_buffer_point =
|
||||
DisplayPoint::new(scroll_position.y() as u32, 0).to_point(&map);
|
||||
let top_anchor = map
|
||||
.buffer_snapshot
|
||||
.anchor_at(scroll_top_buffer_offset, Bias::Right);
|
||||
.anchor_at(scroll_top_buffer_point, Bias::Right);
|
||||
|
||||
ScrollAnchor {
|
||||
top_anchor,
|
||||
offset: vec2f(
|
||||
scroll_position.x(),
|
||||
scroll_position.y() - top_anchor.to_display_point(&map).row() as f32,
|
||||
),
|
||||
}
|
||||
(
|
||||
ScrollAnchor {
|
||||
top_anchor,
|
||||
offset: vec2f(
|
||||
scroll_position.x(),
|
||||
scroll_position.y() - top_anchor.to_display_point(&map).row() as f32,
|
||||
),
|
||||
},
|
||||
scroll_top_buffer_point.row,
|
||||
)
|
||||
};
|
||||
|
||||
self.set_anchor(new_anchor, local, cx);
|
||||
self.set_anchor(new_anchor, top_row, local, workspace_id, cx);
|
||||
}
|
||||
|
||||
fn set_anchor(&mut self, anchor: ScrollAnchor, local: bool, cx: &mut ViewContext<Editor>) {
|
||||
fn set_anchor(
|
||||
&mut self,
|
||||
anchor: ScrollAnchor,
|
||||
top_row: u32,
|
||||
local: bool,
|
||||
workspace_id: Option<i64>,
|
||||
cx: &mut ViewContext<Editor>,
|
||||
) {
|
||||
self.anchor = anchor;
|
||||
cx.emit(Event::ScrollPositionChanged { local });
|
||||
self.show_scrollbar(cx);
|
||||
self.autoscroll_request.take();
|
||||
if let Some(workspace_id) = workspace_id {
|
||||
let item_id = cx.view_id();
|
||||
|
||||
cx.background()
|
||||
.spawn(async move {
|
||||
DB.save_scroll_position(
|
||||
item_id,
|
||||
workspace_id,
|
||||
top_row,
|
||||
anchor.offset.x(),
|
||||
anchor.offset.y(),
|
||||
)
|
||||
.await
|
||||
.log_err()
|
||||
})
|
||||
.detach()
|
||||
}
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
|
@ -273,9 +307,14 @@ impl Editor {
|
|||
) {
|
||||
let map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
|
||||
|
||||
hide_hover(self, cx);
|
||||
self.scroll_manager
|
||||
.set_scroll_position(scroll_position, &map, local, cx);
|
||||
hide_hover(self, &HideHover, cx);
|
||||
self.scroll_manager.set_scroll_position(
|
||||
scroll_position,
|
||||
&map,
|
||||
local,
|
||||
self.workspace_id,
|
||||
cx,
|
||||
);
|
||||
}
|
||||
|
||||
pub fn scroll_position(&self, cx: &mut ViewContext<Self>) -> Vector2F {
|
||||
|
@ -284,8 +323,13 @@ impl Editor {
|
|||
}
|
||||
|
||||
pub fn set_scroll_anchor(&mut self, scroll_anchor: ScrollAnchor, cx: &mut ViewContext<Self>) {
|
||||
hide_hover(self, cx);
|
||||
self.scroll_manager.set_anchor(scroll_anchor, true, cx);
|
||||
hide_hover(self, &HideHover, cx);
|
||||
let top_row = scroll_anchor
|
||||
.top_anchor
|
||||
.to_point(&self.buffer().read(cx).snapshot(cx))
|
||||
.row;
|
||||
self.scroll_manager
|
||||
.set_anchor(scroll_anchor, top_row, true, self.workspace_id, cx);
|
||||
}
|
||||
|
||||
pub(crate) fn set_scroll_anchor_remote(
|
||||
|
@ -293,8 +337,13 @@ impl Editor {
|
|||
scroll_anchor: ScrollAnchor,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
hide_hover(self, cx);
|
||||
self.scroll_manager.set_anchor(scroll_anchor, false, cx);
|
||||
hide_hover(self, &HideHover, cx);
|
||||
let top_row = scroll_anchor
|
||||
.top_anchor
|
||||
.to_point(&self.buffer().read(cx).snapshot(cx))
|
||||
.row;
|
||||
self.scroll_manager
|
||||
.set_anchor(scroll_anchor, top_row, false, self.workspace_id, cx);
|
||||
}
|
||||
|
||||
pub fn scroll_screen(&mut self, amount: &ScrollAmount, cx: &mut ViewContext<Self>) {
|
||||
|
@ -345,4 +394,25 @@ impl Editor {
|
|||
|
||||
Ordering::Greater
|
||||
}
|
||||
|
||||
pub fn read_scroll_position_from_db(
|
||||
&mut self,
|
||||
item_id: usize,
|
||||
workspace_id: WorkspaceId,
|
||||
cx: &mut ViewContext<Editor>,
|
||||
) {
|
||||
let scroll_position = DB.get_scroll_position(item_id, workspace_id);
|
||||
if let Ok(Some((top_row, x, y))) = scroll_position {
|
||||
let top_anchor = self
|
||||
.buffer()
|
||||
.read(cx)
|
||||
.snapshot(cx)
|
||||
.anchor_at(Point::new(top_row as u32, 0), Bias::Left);
|
||||
let scroll_anchor = ScrollAnchor {
|
||||
offset: Vector2F::new(x, y),
|
||||
top_anchor,
|
||||
};
|
||||
self.set_scroll_anchor(scroll_anchor, cx);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
34
crates/feedback/Cargo.toml
Normal file
34
crates/feedback/Cargo.toml
Normal file
|
@ -0,0 +1,34 @@
|
|||
[package]
|
||||
name = "feedback"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/feedback.rs"
|
||||
|
||||
[features]
|
||||
test-support = []
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0.38"
|
||||
client = { path = "../client" }
|
||||
editor = { path = "../editor" }
|
||||
language = { path = "../language" }
|
||||
log = "0.4"
|
||||
futures = "0.3"
|
||||
gpui = { path = "../gpui" }
|
||||
human_bytes = "0.4.1"
|
||||
isahc = "1.7"
|
||||
lazy_static = "1.4.0"
|
||||
postage = { version = "0.4", features = ["futures-traits"] }
|
||||
project = { path = "../project" }
|
||||
search = { path = "../search" }
|
||||
serde = { version = "1.0", features = ["derive", "rc"] }
|
||||
settings = { path = "../settings" }
|
||||
sysinfo = "0.27.1"
|
||||
theme = { path = "../theme" }
|
||||
tree-sitter-markdown = { git = "https://github.com/MDeiml/tree-sitter-markdown", rev = "330ecab87a3e3a7211ac69bbadc19eabecdb1cca" }
|
||||
urlencoding = "2.1.2"
|
||||
util = { path = "../util" }
|
||||
workspace = { path = "../workspace" }
|
61
crates/feedback/src/feedback.rs
Normal file
61
crates/feedback/src/feedback.rs
Normal file
|
@ -0,0 +1,61 @@
|
|||
use std::sync::Arc;
|
||||
|
||||
pub mod feedback_editor;
|
||||
mod system_specs;
|
||||
use gpui::{actions, impl_actions, ClipboardItem, ViewContext};
|
||||
use serde::Deserialize;
|
||||
use system_specs::SystemSpecs;
|
||||
use workspace::Workspace;
|
||||
|
||||
#[derive(Deserialize, Clone, PartialEq)]
|
||||
pub struct OpenBrowser {
|
||||
pub url: Arc<str>,
|
||||
}
|
||||
|
||||
impl_actions!(zed, [OpenBrowser]);
|
||||
|
||||
actions!(
|
||||
zed,
|
||||
[CopySystemSpecsIntoClipboard, FileBugReport, RequestFeature,]
|
||||
);
|
||||
|
||||
pub fn init(cx: &mut gpui::MutableAppContext) {
|
||||
feedback_editor::init(cx);
|
||||
|
||||
cx.add_global_action(move |action: &OpenBrowser, cx| cx.platform().open_url(&action.url));
|
||||
|
||||
cx.add_action(
|
||||
|_: &mut Workspace, _: &CopySystemSpecsIntoClipboard, cx: &mut ViewContext<Workspace>| {
|
||||
let system_specs = SystemSpecs::new(cx).to_string();
|
||||
let item = ClipboardItem::new(system_specs.clone());
|
||||
cx.prompt(
|
||||
gpui::PromptLevel::Info,
|
||||
&format!("Copied into clipboard:\n\n{system_specs}"),
|
||||
&["OK"],
|
||||
);
|
||||
cx.write_to_clipboard(item);
|
||||
},
|
||||
);
|
||||
|
||||
cx.add_action(
|
||||
|_: &mut Workspace, _: &RequestFeature, cx: &mut ViewContext<Workspace>| {
|
||||
let url = "https://github.com/zed-industries/feedback/issues/new?assignees=&labels=enhancement%2Ctriage&template=0_feature_request.yml";
|
||||
cx.dispatch_action(OpenBrowser {
|
||||
url: url.into(),
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
cx.add_action(
|
||||
|_: &mut Workspace, _: &FileBugReport, cx: &mut ViewContext<Workspace>| {
|
||||
let system_specs_text = SystemSpecs::new(cx).to_string();
|
||||
let url = format!(
|
||||
"https://github.com/zed-industries/feedback/issues/new?assignees=&labels=defect%2Ctriage&template=2_bug_report.yml&environment={}",
|
||||
urlencoding::encode(&system_specs_text)
|
||||
);
|
||||
cx.dispatch_action(OpenBrowser {
|
||||
url: url.into(),
|
||||
});
|
||||
},
|
||||
);
|
||||
}
|
417
crates/feedback/src/feedback_editor.rs
Normal file
417
crates/feedback/src/feedback_editor.rs
Normal file
|
@ -0,0 +1,417 @@
|
|||
use std::{ops::Range, sync::Arc};
|
||||
|
||||
use anyhow::bail;
|
||||
use client::{Client, ZED_SECRET_CLIENT_TOKEN};
|
||||
use editor::{Anchor, Editor};
|
||||
use futures::AsyncReadExt;
|
||||
use gpui::{
|
||||
actions,
|
||||
elements::{ChildView, Flex, Label, MouseEventHandler, ParentElement, Stack, Text},
|
||||
serde_json, AnyViewHandle, AppContext, CursorStyle, Element, ElementBox, Entity, ModelHandle,
|
||||
MouseButton, MutableAppContext, PromptLevel, RenderContext, Task, View, ViewContext,
|
||||
ViewHandle,
|
||||
};
|
||||
use isahc::Request;
|
||||
use language::Buffer;
|
||||
use postage::prelude::Stream;
|
||||
|
||||
use lazy_static::lazy_static;
|
||||
use project::Project;
|
||||
use serde::Serialize;
|
||||
use settings::Settings;
|
||||
use workspace::{
|
||||
item::{Item, ItemHandle},
|
||||
searchable::{SearchableItem, SearchableItemHandle},
|
||||
StatusItemView, Workspace,
|
||||
};
|
||||
|
||||
use crate::system_specs::SystemSpecs;
|
||||
|
||||
lazy_static! {
|
||||
pub static ref ZED_SERVER_URL: String =
|
||||
std::env::var("ZED_SERVER_URL").unwrap_or_else(|_| "https://zed.dev".to_string());
|
||||
}
|
||||
|
||||
const FEEDBACK_CHAR_COUNT_RANGE: Range<usize> = Range {
|
||||
start: 10,
|
||||
end: 1000,
|
||||
};
|
||||
|
||||
const FEEDBACK_PLACEHOLDER_TEXT: &str = "Thanks for spending time with Zed. Enter your feedback here as Markdown. Save the tab to submit your feedback.";
|
||||
const FEEDBACK_SUBMISSION_ERROR_TEXT: &str =
|
||||
"Feedback failed to submit, see error log for details.";
|
||||
|
||||
actions!(feedback, [SubmitFeedback, GiveFeedback, DeployFeedback]);
|
||||
|
||||
pub fn init(cx: &mut MutableAppContext) {
|
||||
cx.add_action(FeedbackEditor::deploy);
|
||||
}
|
||||
|
||||
pub struct FeedbackButton;
|
||||
|
||||
impl Entity for FeedbackButton {
|
||||
type Event = ();
|
||||
}
|
||||
|
||||
impl View for FeedbackButton {
|
||||
fn ui_name() -> &'static str {
|
||||
"FeedbackButton"
|
||||
}
|
||||
|
||||
fn render(&mut self, cx: &mut RenderContext<'_, Self>) -> ElementBox {
|
||||
Stack::new()
|
||||
.with_child(
|
||||
MouseEventHandler::<Self>::new(0, cx, |state, cx| {
|
||||
let theme = &cx.global::<Settings>().theme;
|
||||
let theme = &theme.workspace.status_bar.feedback;
|
||||
|
||||
Text::new(
|
||||
"Give Feedback".to_string(),
|
||||
theme.style_for(state, true).clone(),
|
||||
)
|
||||
.boxed()
|
||||
})
|
||||
.with_cursor_style(CursorStyle::PointingHand)
|
||||
.on_click(MouseButton::Left, |_, cx| cx.dispatch_action(GiveFeedback))
|
||||
.boxed(),
|
||||
)
|
||||
.boxed()
|
||||
}
|
||||
}
|
||||
|
||||
impl StatusItemView for FeedbackButton {
|
||||
fn set_active_pane_item(
|
||||
&mut self,
|
||||
_: Option<&dyn ItemHandle>,
|
||||
_: &mut gpui::ViewContext<Self>,
|
||||
) {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct FeedbackRequestBody<'a> {
|
||||
feedback_text: &'a str,
|
||||
metrics_id: Option<Arc<str>>,
|
||||
system_specs: SystemSpecs,
|
||||
token: &'a str,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct FeedbackEditor {
|
||||
editor: ViewHandle<Editor>,
|
||||
project: ModelHandle<Project>,
|
||||
}
|
||||
|
||||
impl FeedbackEditor {
|
||||
fn new_with_buffer(
|
||||
project: ModelHandle<Project>,
|
||||
buffer: ModelHandle<Buffer>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Self {
|
||||
let editor = cx.add_view(|cx| {
|
||||
let mut editor = Editor::for_buffer(buffer, Some(project.clone()), cx);
|
||||
editor.set_vertical_scroll_margin(5, cx);
|
||||
editor.set_placeholder_text(FEEDBACK_PLACEHOLDER_TEXT, cx);
|
||||
editor
|
||||
});
|
||||
|
||||
cx.subscribe(&editor, |_, _, e, cx| cx.emit(e.clone()))
|
||||
.detach();
|
||||
|
||||
let this = Self { editor, project };
|
||||
this
|
||||
}
|
||||
|
||||
fn new(project: ModelHandle<Project>, cx: &mut ViewContext<Self>) -> Self {
|
||||
let markdown_language = project.read(cx).languages().get_language("Markdown");
|
||||
|
||||
let buffer = project
|
||||
.update(cx, |project, cx| {
|
||||
project.create_buffer("", markdown_language, cx)
|
||||
})
|
||||
.expect("creating buffers on a local workspace always succeeds");
|
||||
|
||||
Self::new_with_buffer(project, buffer, cx)
|
||||
}
|
||||
|
||||
fn handle_save(
|
||||
&mut self,
|
||||
_: gpui::ModelHandle<Project>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Task<anyhow::Result<()>> {
|
||||
let feedback_text_length = self.editor.read(cx).buffer().read(cx).len(cx);
|
||||
|
||||
if feedback_text_length <= FEEDBACK_CHAR_COUNT_RANGE.start {
|
||||
cx.prompt(
|
||||
PromptLevel::Critical,
|
||||
&format!(
|
||||
"Feedback must be longer than {} characters",
|
||||
FEEDBACK_CHAR_COUNT_RANGE.start
|
||||
),
|
||||
&["OK"],
|
||||
);
|
||||
|
||||
return Task::ready(Ok(()));
|
||||
}
|
||||
|
||||
let mut answer = cx.prompt(
|
||||
PromptLevel::Info,
|
||||
"Ready to submit your feedback?",
|
||||
&["Yes, Submit!", "No"],
|
||||
);
|
||||
|
||||
let this = cx.handle();
|
||||
let client = cx.global::<Arc<Client>>().clone();
|
||||
let feedback_text = self.editor.read(cx).text(cx);
|
||||
let specs = SystemSpecs::new(cx);
|
||||
|
||||
cx.spawn(|_, mut cx| async move {
|
||||
let answer = answer.recv().await;
|
||||
|
||||
if answer == Some(0) {
|
||||
match FeedbackEditor::submit_feedback(&feedback_text, client, specs).await {
|
||||
Ok(_) => {
|
||||
cx.update(|cx| {
|
||||
this.update(cx, |_, cx| {
|
||||
cx.dispatch_action(workspace::CloseActiveItem);
|
||||
})
|
||||
});
|
||||
}
|
||||
Err(error) => {
|
||||
log::error!("{}", error);
|
||||
|
||||
cx.update(|cx| {
|
||||
this.update(cx, |_, cx| {
|
||||
cx.prompt(
|
||||
PromptLevel::Critical,
|
||||
FEEDBACK_SUBMISSION_ERROR_TEXT,
|
||||
&["OK"],
|
||||
);
|
||||
})
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
|
||||
Task::ready(Ok(()))
|
||||
}
|
||||
|
||||
async fn submit_feedback(
|
||||
feedback_text: &str,
|
||||
zed_client: Arc<Client>,
|
||||
system_specs: SystemSpecs,
|
||||
) -> anyhow::Result<()> {
|
||||
let feedback_endpoint = format!("{}/api/feedback", *ZED_SERVER_URL);
|
||||
|
||||
let metrics_id = zed_client.metrics_id();
|
||||
let http_client = zed_client.http_client();
|
||||
|
||||
let request = FeedbackRequestBody {
|
||||
feedback_text: &feedback_text,
|
||||
metrics_id,
|
||||
system_specs,
|
||||
token: ZED_SECRET_CLIENT_TOKEN,
|
||||
};
|
||||
|
||||
let json_bytes = serde_json::to_vec(&request)?;
|
||||
|
||||
let request = Request::post(feedback_endpoint)
|
||||
.header("content-type", "application/json")
|
||||
.body(json_bytes.into())?;
|
||||
|
||||
let mut response = http_client.send(request).await?;
|
||||
let mut body = String::new();
|
||||
response.body_mut().read_to_string(&mut body).await?;
|
||||
|
||||
let response_status = response.status();
|
||||
|
||||
if !response_status.is_success() {
|
||||
bail!("Feedback API failed with error: {}", response_status)
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl FeedbackEditor {
|
||||
pub fn deploy(workspace: &mut Workspace, _: &GiveFeedback, cx: &mut ViewContext<Workspace>) {
|
||||
let feedback_editor =
|
||||
cx.add_view(|cx| FeedbackEditor::new(workspace.project().clone(), cx));
|
||||
workspace.add_item(Box::new(feedback_editor), cx);
|
||||
}
|
||||
}
|
||||
|
||||
impl View for FeedbackEditor {
|
||||
fn ui_name() -> &'static str {
|
||||
"FeedbackEditor"
|
||||
}
|
||||
|
||||
fn render(&mut self, cx: &mut RenderContext<Self>) -> ElementBox {
|
||||
ChildView::new(&self.editor, cx).boxed()
|
||||
}
|
||||
|
||||
fn focus_in(&mut self, _: AnyViewHandle, cx: &mut ViewContext<Self>) {
|
||||
if cx.is_self_focused() {
|
||||
cx.focus(&self.editor);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Entity for FeedbackEditor {
|
||||
type Event = editor::Event;
|
||||
}
|
||||
|
||||
impl Item for FeedbackEditor {
|
||||
fn tab_content(
|
||||
&self,
|
||||
_: Option<usize>,
|
||||
style: &theme::Tab,
|
||||
_: &gpui::AppContext,
|
||||
) -> ElementBox {
|
||||
Flex::row()
|
||||
.with_child(
|
||||
Label::new("Feedback".to_string(), style.label.clone())
|
||||
.aligned()
|
||||
.contained()
|
||||
.boxed(),
|
||||
)
|
||||
.boxed()
|
||||
}
|
||||
|
||||
fn for_each_project_item(&self, cx: &AppContext, f: &mut dyn FnMut(usize, &dyn project::Item)) {
|
||||
self.editor.for_each_project_item(cx, f)
|
||||
}
|
||||
|
||||
fn to_item_events(_: &Self::Event) -> Vec<workspace::item::ItemEvent> {
|
||||
Vec::new()
|
||||
}
|
||||
|
||||
fn is_singleton(&self, _: &gpui::AppContext) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn set_nav_history(&mut self, _: workspace::ItemNavHistory, _: &mut ViewContext<Self>) {}
|
||||
|
||||
fn can_save(&self, _: &gpui::AppContext) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn save(
|
||||
&mut self,
|
||||
project: gpui::ModelHandle<Project>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Task<anyhow::Result<()>> {
|
||||
self.handle_save(project, cx)
|
||||
}
|
||||
|
||||
fn save_as(
|
||||
&mut self,
|
||||
project: gpui::ModelHandle<Project>,
|
||||
_: std::path::PathBuf,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Task<anyhow::Result<()>> {
|
||||
self.handle_save(project, cx)
|
||||
}
|
||||
|
||||
fn reload(
|
||||
&mut self,
|
||||
_: gpui::ModelHandle<Project>,
|
||||
_: &mut ViewContext<Self>,
|
||||
) -> Task<anyhow::Result<()>> {
|
||||
unreachable!("reload should not have been called")
|
||||
}
|
||||
|
||||
fn clone_on_split(
|
||||
&self,
|
||||
_workspace_id: workspace::WorkspaceId,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Option<Self>
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
let buffer = self
|
||||
.editor
|
||||
.read(cx)
|
||||
.buffer()
|
||||
.read(cx)
|
||||
.as_singleton()
|
||||
.expect("Feedback buffer is only ever singleton");
|
||||
|
||||
Some(Self::new_with_buffer(
|
||||
self.project.clone(),
|
||||
buffer.clone(),
|
||||
cx,
|
||||
))
|
||||
}
|
||||
|
||||
fn serialized_item_kind() -> Option<&'static str> {
|
||||
None
|
||||
}
|
||||
|
||||
fn deserialize(
|
||||
_: gpui::ModelHandle<Project>,
|
||||
_: gpui::WeakViewHandle<Workspace>,
|
||||
_: workspace::WorkspaceId,
|
||||
_: workspace::ItemId,
|
||||
_: &mut ViewContext<workspace::Pane>,
|
||||
) -> Task<anyhow::Result<ViewHandle<Self>>> {
|
||||
unreachable!()
|
||||
}
|
||||
|
||||
fn as_searchable(&self, handle: &ViewHandle<Self>) -> Option<Box<dyn SearchableItemHandle>> {
|
||||
Some(Box::new(handle.clone()))
|
||||
}
|
||||
}
|
||||
|
||||
impl SearchableItem for FeedbackEditor {
|
||||
type Match = Range<Anchor>;
|
||||
|
||||
fn to_search_event(event: &Self::Event) -> Option<workspace::searchable::SearchEvent> {
|
||||
Editor::to_search_event(event)
|
||||
}
|
||||
|
||||
fn clear_matches(&mut self, cx: &mut ViewContext<Self>) {
|
||||
self.editor
|
||||
.update(cx, |editor, cx| editor.clear_matches(cx))
|
||||
}
|
||||
|
||||
fn update_matches(&mut self, matches: Vec<Self::Match>, cx: &mut ViewContext<Self>) {
|
||||
self.editor
|
||||
.update(cx, |editor, cx| editor.update_matches(matches, cx))
|
||||
}
|
||||
|
||||
fn query_suggestion(&mut self, cx: &mut ViewContext<Self>) -> String {
|
||||
self.editor
|
||||
.update(cx, |editor, cx| editor.query_suggestion(cx))
|
||||
}
|
||||
|
||||
fn activate_match(
|
||||
&mut self,
|
||||
index: usize,
|
||||
matches: Vec<Self::Match>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
self.editor
|
||||
.update(cx, |editor, cx| editor.activate_match(index, matches, cx))
|
||||
}
|
||||
|
||||
fn find_matches(
|
||||
&mut self,
|
||||
query: project::search::SearchQuery,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Task<Vec<Self::Match>> {
|
||||
self.editor
|
||||
.update(cx, |editor, cx| editor.find_matches(query, cx))
|
||||
}
|
||||
|
||||
fn active_match_index(
|
||||
&mut self,
|
||||
matches: Vec<Self::Match>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Option<usize> {
|
||||
self.editor
|
||||
.update(cx, |editor, cx| editor.active_match_index(matches, cx))
|
||||
}
|
||||
}
|
|
@ -2,9 +2,11 @@ use std::{env, fmt::Display};
|
|||
|
||||
use gpui::AppContext;
|
||||
use human_bytes::human_bytes;
|
||||
use serde::Serialize;
|
||||
use sysinfo::{System, SystemExt};
|
||||
use util::channel::ReleaseChannel;
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct SystemSpecs {
|
||||
app_version: &'static str,
|
||||
release_channel: &'static str,
|
||||
|
@ -40,7 +42,7 @@ impl Display for SystemSpecs {
|
|||
None => format!("OS: {}", self.os_name),
|
||||
};
|
||||
let system_specs = [
|
||||
format!("Zed: {} ({})", self.app_version, self.release_channel),
|
||||
format!("Zed: v{} ({})", self.app_version, self.release_channel),
|
||||
os_information,
|
||||
format!("Memory: {}", human_bytes(self.memory as f64)),
|
||||
format!("Architecture: {}", self.architecture),
|
|
@ -2,6 +2,7 @@
|
|||
name = "file_finder"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/file_finder.rs"
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
name = "fs"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/fs.rs"
|
||||
|
|
|
@ -3,6 +3,7 @@ name = "fsevent"
|
|||
version = "2.0.2"
|
||||
license = "MIT"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/fsevent.rs"
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
name = "fuzzy"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/fuzzy.rs"
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
name = "git"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/git.rs"
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
name = "go_to_line"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/go_to_line.rs"
|
||||
|
|
|
@ -4,6 +4,7 @@ edition = "2021"
|
|||
name = "gpui"
|
||||
version = "0.1.0"
|
||||
description = "A GPU-accelerated UI framework"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/gpui.rs"
|
||||
|
@ -45,7 +46,6 @@ smallvec = { version = "1.6", features = ["union"] }
|
|||
smol = "1.2"
|
||||
time = { version = "0.3", features = ["serde", "serde-well-known"] }
|
||||
tiny-skia = "0.5"
|
||||
tree-sitter = "0.20"
|
||||
usvg = "0.14"
|
||||
waker-fn = "1.1.0"
|
||||
|
||||
|
|
|
@ -6,7 +6,6 @@ use std::{
|
|||
|
||||
fn main() {
|
||||
generate_dispatch_bindings();
|
||||
compile_context_predicate_parser();
|
||||
compile_metal_shaders();
|
||||
generate_shader_bindings();
|
||||
}
|
||||
|
@ -30,17 +29,6 @@ fn generate_dispatch_bindings() {
|
|||
.expect("couldn't write dispatch bindings");
|
||||
}
|
||||
|
||||
fn compile_context_predicate_parser() {
|
||||
let dir = PathBuf::from("./grammars/context-predicate/src");
|
||||
let parser_c = dir.join("parser.c");
|
||||
|
||||
println!("cargo:rerun-if-changed={}", &parser_c.to_str().unwrap());
|
||||
cc::Build::new()
|
||||
.include(&dir)
|
||||
.file(parser_c)
|
||||
.compile("tree_sitter_context_predicate");
|
||||
}
|
||||
|
||||
const SHADER_HEADER_PATH: &str = "./src/platform/mac/shaders/shaders.h";
|
||||
|
||||
fn compile_metal_shaders() {
|
||||
|
|
|
@ -1,2 +0,0 @@
|
|||
/node_modules
|
||||
/build
|
|
@ -1,20 +0,0 @@
|
|||
[package]
|
||||
name = "tree-sitter-context-predicate"
|
||||
description = "context-predicate grammar for the tree-sitter parsing library"
|
||||
version = "0.0.1"
|
||||
keywords = ["incremental", "parsing", "context-predicate"]
|
||||
categories = ["parsing", "text-editors"]
|
||||
repository = "https://github.com/tree-sitter/tree-sitter-javascript"
|
||||
edition = "2021"
|
||||
license = "MIT"
|
||||
build = "bindings/rust/build.rs"
|
||||
include = ["bindings/rust/*", "grammar.js", "queries/*", "src/*"]
|
||||
|
||||
[lib]
|
||||
path = "bindings/rust/lib.rs"
|
||||
|
||||
[dependencies]
|
||||
tree-sitter = "0.20"
|
||||
|
||||
[build-dependencies]
|
||||
cc = "1.0"
|
|
@ -1,18 +0,0 @@
|
|||
{
|
||||
"targets": [
|
||||
{
|
||||
"target_name": "tree_sitter_context_predicate_binding",
|
||||
"include_dirs": [
|
||||
"<!(node -e \"require('nan')\")",
|
||||
"src"
|
||||
],
|
||||
"sources": [
|
||||
"src/parser.c",
|
||||
"bindings/node/binding.cc"
|
||||
],
|
||||
"cflags_c": [
|
||||
"-std=c99",
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
|
@ -1,30 +0,0 @@
|
|||
#include "nan.h"
|
||||
#include "tree_sitter/parser.h"
|
||||
#include <node.h>
|
||||
|
||||
using namespace v8;
|
||||
|
||||
extern "C" TSLanguage *tree_sitter_context_predicate();
|
||||
|
||||
namespace {
|
||||
|
||||
NAN_METHOD(New) {}
|
||||
|
||||
void Init(Local<Object> exports, Local<Object> module) {
|
||||
Local<FunctionTemplate> tpl = Nan::New<FunctionTemplate>(New);
|
||||
tpl->SetClassName(Nan::New("Language").ToLocalChecked());
|
||||
tpl->InstanceTemplate()->SetInternalFieldCount(1);
|
||||
|
||||
Local<Function> constructor = Nan::GetFunction(tpl).ToLocalChecked();
|
||||
Local<Object> instance =
|
||||
constructor->NewInstance(Nan::GetCurrentContext()).ToLocalChecked();
|
||||
Nan::SetInternalFieldPointer(instance, 0, tree_sitter_context_predicate());
|
||||
|
||||
Nan::Set(instance, Nan::New("name").ToLocalChecked(),
|
||||
Nan::New("context_predicate").ToLocalChecked());
|
||||
Nan::Set(module, Nan::New("exports").ToLocalChecked(), instance);
|
||||
}
|
||||
|
||||
NODE_MODULE(tree_sitter_context_predicate_binding, Init)
|
||||
|
||||
} // namespace
|
|
@ -1,19 +0,0 @@
|
|||
try {
|
||||
module.exports = require("../../build/Release/tree_sitter_context_predicate_binding");
|
||||
} catch (error1) {
|
||||
if (error1.code !== 'MODULE_NOT_FOUND') {
|
||||
throw error1;
|
||||
}
|
||||
try {
|
||||
module.exports = require("../../build/Debug/tree_sitter_context_predicate_binding");
|
||||
} catch (error2) {
|
||||
if (error2.code !== 'MODULE_NOT_FOUND') {
|
||||
throw error2;
|
||||
}
|
||||
throw error1
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
module.exports.nodeTypeInfo = require("../../src/node-types.json");
|
||||
} catch (_) {}
|
|
@ -1,40 +0,0 @@
|
|||
fn main() {
|
||||
let src_dir = std::path::Path::new("src");
|
||||
|
||||
let mut c_config = cc::Build::new();
|
||||
c_config.include(&src_dir);
|
||||
c_config
|
||||
.flag_if_supported("-Wno-unused-parameter")
|
||||
.flag_if_supported("-Wno-unused-but-set-variable")
|
||||
.flag_if_supported("-Wno-trigraphs");
|
||||
let parser_path = src_dir.join("parser.c");
|
||||
c_config.file(&parser_path);
|
||||
|
||||
// If your language uses an external scanner written in C,
|
||||
// then include this block of code:
|
||||
|
||||
/*
|
||||
let scanner_path = src_dir.join("scanner.c");
|
||||
c_config.file(&scanner_path);
|
||||
println!("cargo:rerun-if-changed={}", scanner_path.to_str().unwrap());
|
||||
*/
|
||||
|
||||
c_config.compile("parser");
|
||||
println!("cargo:rerun-if-changed={}", parser_path.to_str().unwrap());
|
||||
|
||||
// If your language uses an external scanner written in C++,
|
||||
// then include this block of code:
|
||||
|
||||
/*
|
||||
let mut cpp_config = cc::Build::new();
|
||||
cpp_config.cpp(true);
|
||||
cpp_config.include(&src_dir);
|
||||
cpp_config
|
||||
.flag_if_supported("-Wno-unused-parameter")
|
||||
.flag_if_supported("-Wno-unused-but-set-variable");
|
||||
let scanner_path = src_dir.join("scanner.cc");
|
||||
cpp_config.file(&scanner_path);
|
||||
cpp_config.compile("scanner");
|
||||
println!("cargo:rerun-if-changed={}", scanner_path.to_str().unwrap());
|
||||
*/
|
||||
}
|
|
@ -1,52 +0,0 @@
|
|||
//! This crate provides context_predicate language support for the [tree-sitter][] parsing library.
|
||||
//!
|
||||
//! Typically, you will use the [language][language func] function to add this language to a
|
||||
//! tree-sitter [Parser][], and then use the parser to parse some code:
|
||||
//!
|
||||
//! ```
|
||||
//! let code = "";
|
||||
//! let mut parser = tree_sitter::Parser::new();
|
||||
//! parser.set_language(tree_sitter_context_predicate::language()).expect("Error loading context_predicate grammar");
|
||||
//! let tree = parser.parse(code, None).unwrap();
|
||||
//! ```
|
||||
//!
|
||||
//! [Language]: https://docs.rs/tree-sitter/*/tree_sitter/struct.Language.html
|
||||
//! [language func]: fn.language.html
|
||||
//! [Parser]: https://docs.rs/tree-sitter/*/tree_sitter/struct.Parser.html
|
||||
//! [tree-sitter]: https://tree-sitter.github.io/
|
||||
|
||||
use tree_sitter::Language;
|
||||
|
||||
extern "C" {
|
||||
fn tree_sitter_context_predicate() -> Language;
|
||||
}
|
||||
|
||||
/// Get the tree-sitter [Language][] for this grammar.
|
||||
///
|
||||
/// [Language]: https://docs.rs/tree-sitter/*/tree_sitter/struct.Language.html
|
||||
pub fn language() -> Language {
|
||||
unsafe { tree_sitter_context_predicate() }
|
||||
}
|
||||
|
||||
/// The content of the [`node-types.json`][] file for this grammar.
|
||||
///
|
||||
/// [`node-types.json`]: https://tree-sitter.github.io/tree-sitter/using-parsers#static-node-types
|
||||
pub const NODE_TYPES: &'static str = include_str!("../../src/node-types.json");
|
||||
|
||||
// Uncomment these to include any queries that this grammar contains
|
||||
|
||||
// pub const HIGHLIGHTS_QUERY: &'static str = include_str!("../../queries/highlights.scm");
|
||||
// pub const INJECTIONS_QUERY: &'static str = include_str!("../../queries/injections.scm");
|
||||
// pub const LOCALS_QUERY: &'static str = include_str!("../../queries/locals.scm");
|
||||
// pub const TAGS_QUERY: &'static str = include_str!("../../queries/tags.scm");
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
#[test]
|
||||
fn test_can_load_grammar() {
|
||||
let mut parser = tree_sitter::Parser::new();
|
||||
parser
|
||||
.set_language(super::language())
|
||||
.expect("Error loading context_predicate language");
|
||||
}
|
||||
}
|
|
@ -1,49 +0,0 @@
|
|||
==================
|
||||
Identifiers
|
||||
==================
|
||||
|
||||
abc12
|
||||
|
||||
---
|
||||
|
||||
(source (identifier))
|
||||
|
||||
==================
|
||||
Negation
|
||||
==================
|
||||
|
||||
!abc
|
||||
|
||||
---
|
||||
|
||||
(source (not (identifier)))
|
||||
|
||||
==================
|
||||
And/Or
|
||||
==================
|
||||
|
||||
a || b && c && d
|
||||
|
||||
---
|
||||
|
||||
(source
|
||||
(or
|
||||
(identifier)
|
||||
(and
|
||||
(and (identifier) (identifier))
|
||||
(identifier))))
|
||||
|
||||
==================
|
||||
Expressions
|
||||
==================
|
||||
|
||||
a && (b == c || d != e)
|
||||
|
||||
---
|
||||
|
||||
(source
|
||||
(and
|
||||
(identifier)
|
||||
(parenthesized (or
|
||||
(equal (identifier) (identifier))
|
||||
(not_equal (identifier) (identifier))))))
|
|
@ -1,31 +0,0 @@
|
|||
module.exports = grammar({
|
||||
name: 'context_predicate',
|
||||
|
||||
rules: {
|
||||
source: $ => $._expression,
|
||||
|
||||
_expression: $ => choice(
|
||||
$.identifier,
|
||||
$.not,
|
||||
$.and,
|
||||
$.or,
|
||||
$.equal,
|
||||
$.not_equal,
|
||||
$.parenthesized,
|
||||
),
|
||||
|
||||
identifier: $ => /[A-Za-z0-9_-]+/,
|
||||
|
||||
not: $ => prec(3, seq("!", field("expression", $._expression))),
|
||||
|
||||
and: $ => prec.left(2, seq(field("left", $._expression), "&&", field("right", $._expression))),
|
||||
|
||||
or: $ => prec.left(1, seq(field("left", $._expression), "||", field("right", $._expression))),
|
||||
|
||||
equal: $ => seq(field("left", $.identifier), "==", field("right", $.identifier)),
|
||||
|
||||
not_equal: $ => seq(field("left", $.identifier), "!=", field("right", $.identifier)),
|
||||
|
||||
parenthesized: $ => seq("(", field("expression", $._expression), ")"),
|
||||
}
|
||||
});
|
|
@ -1,44 +0,0 @@
|
|||
{
|
||||
"name": "tree-sitter-context-predicate",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "tree-sitter-context-predicate",
|
||||
"dependencies": {
|
||||
"nan": "^2.14.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"tree-sitter-cli": "^0.19.5"
|
||||
}
|
||||
},
|
||||
"node_modules/nan": {
|
||||
"version": "2.14.2",
|
||||
"resolved": "https://registry.npmjs.org/nan/-/nan-2.14.2.tgz",
|
||||
"integrity": "sha512-M2ufzIiINKCuDfBSAUr1vWQ+vuVcA9kqx8JJUsbQi6yf1uGRyb7HfpdfUr5qLXf3B/t8dPvcjhKMmlfnP47EzQ=="
|
||||
},
|
||||
"node_modules/tree-sitter-cli": {
|
||||
"version": "0.19.5",
|
||||
"resolved": "https://registry.npmjs.org/tree-sitter-cli/-/tree-sitter-cli-0.19.5.tgz",
|
||||
"integrity": "sha512-kRzKrUAwpDN9AjA3b0tPBwT1hd8N2oQvvvHup2OEsX6mdsSMLmAvR+NSqK9fe05JrRbVvG8mbteNUQsxlMQohQ==",
|
||||
"dev": true,
|
||||
"hasInstallScript": true,
|
||||
"bin": {
|
||||
"tree-sitter": "cli.js"
|
||||
}
|
||||
}
|
||||
},
|
||||
"dependencies": {
|
||||
"nan": {
|
||||
"version": "2.14.2",
|
||||
"resolved": "https://registry.npmjs.org/nan/-/nan-2.14.2.tgz",
|
||||
"integrity": "sha512-M2ufzIiINKCuDfBSAUr1vWQ+vuVcA9kqx8JJUsbQi6yf1uGRyb7HfpdfUr5qLXf3B/t8dPvcjhKMmlfnP47EzQ=="
|
||||
},
|
||||
"tree-sitter-cli": {
|
||||
"version": "0.19.5",
|
||||
"resolved": "https://registry.npmjs.org/tree-sitter-cli/-/tree-sitter-cli-0.19.5.tgz",
|
||||
"integrity": "sha512-kRzKrUAwpDN9AjA3b0tPBwT1hd8N2oQvvvHup2OEsX6mdsSMLmAvR+NSqK9fe05JrRbVvG8mbteNUQsxlMQohQ==",
|
||||
"dev": true
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,10 +0,0 @@
|
|||
{
|
||||
"name": "tree-sitter-context-predicate",
|
||||
"main": "bindings/node",
|
||||
"devDependencies": {
|
||||
"tree-sitter-cli": "^0.19.5"
|
||||
},
|
||||
"dependencies": {
|
||||
"nan": "^2.14.0"
|
||||
}
|
||||
}
|
|
@ -1,208 +0,0 @@
|
|||
{
|
||||
"name": "context_predicate",
|
||||
"rules": {
|
||||
"source": {
|
||||
"type": "SYMBOL",
|
||||
"name": "_expression"
|
||||
},
|
||||
"_expression": {
|
||||
"type": "CHOICE",
|
||||
"members": [
|
||||
{
|
||||
"type": "SYMBOL",
|
||||
"name": "identifier"
|
||||
},
|
||||
{
|
||||
"type": "SYMBOL",
|
||||
"name": "not"
|
||||
},
|
||||
{
|
||||
"type": "SYMBOL",
|
||||
"name": "and"
|
||||
},
|
||||
{
|
||||
"type": "SYMBOL",
|
||||
"name": "or"
|
||||
},
|
||||
{
|
||||
"type": "SYMBOL",
|
||||
"name": "equal"
|
||||
},
|
||||
{
|
||||
"type": "SYMBOL",
|
||||
"name": "not_equal"
|
||||
},
|
||||
{
|
||||
"type": "SYMBOL",
|
||||
"name": "parenthesized"
|
||||
}
|
||||
]
|
||||
},
|
||||
"identifier": {
|
||||
"type": "PATTERN",
|
||||
"value": "[A-Za-z0-9_-]+"
|
||||
},
|
||||
"not": {
|
||||
"type": "PREC",
|
||||
"value": 3,
|
||||
"content": {
|
||||
"type": "SEQ",
|
||||
"members": [
|
||||
{
|
||||
"type": "STRING",
|
||||
"value": "!"
|
||||
},
|
||||
{
|
||||
"type": "FIELD",
|
||||
"name": "expression",
|
||||
"content": {
|
||||
"type": "SYMBOL",
|
||||
"name": "_expression"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"and": {
|
||||
"type": "PREC_LEFT",
|
||||
"value": 2,
|
||||
"content": {
|
||||
"type": "SEQ",
|
||||
"members": [
|
||||
{
|
||||
"type": "FIELD",
|
||||
"name": "left",
|
||||
"content": {
|
||||
"type": "SYMBOL",
|
||||
"name": "_expression"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "STRING",
|
||||
"value": "&&"
|
||||
},
|
||||
{
|
||||
"type": "FIELD",
|
||||
"name": "right",
|
||||
"content": {
|
||||
"type": "SYMBOL",
|
||||
"name": "_expression"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"or": {
|
||||
"type": "PREC_LEFT",
|
||||
"value": 1,
|
||||
"content": {
|
||||
"type": "SEQ",
|
||||
"members": [
|
||||
{
|
||||
"type": "FIELD",
|
||||
"name": "left",
|
||||
"content": {
|
||||
"type": "SYMBOL",
|
||||
"name": "_expression"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "STRING",
|
||||
"value": "||"
|
||||
},
|
||||
{
|
||||
"type": "FIELD",
|
||||
"name": "right",
|
||||
"content": {
|
||||
"type": "SYMBOL",
|
||||
"name": "_expression"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"equal": {
|
||||
"type": "SEQ",
|
||||
"members": [
|
||||
{
|
||||
"type": "FIELD",
|
||||
"name": "left",
|
||||
"content": {
|
||||
"type": "SYMBOL",
|
||||
"name": "identifier"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "STRING",
|
||||
"value": "=="
|
||||
},
|
||||
{
|
||||
"type": "FIELD",
|
||||
"name": "right",
|
||||
"content": {
|
||||
"type": "SYMBOL",
|
||||
"name": "identifier"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"not_equal": {
|
||||
"type": "SEQ",
|
||||
"members": [
|
||||
{
|
||||
"type": "FIELD",
|
||||
"name": "left",
|
||||
"content": {
|
||||
"type": "SYMBOL",
|
||||
"name": "identifier"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "STRING",
|
||||
"value": "!="
|
||||
},
|
||||
{
|
||||
"type": "FIELD",
|
||||
"name": "right",
|
||||
"content": {
|
||||
"type": "SYMBOL",
|
||||
"name": "identifier"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"parenthesized": {
|
||||
"type": "SEQ",
|
||||
"members": [
|
||||
{
|
||||
"type": "STRING",
|
||||
"value": "("
|
||||
},
|
||||
{
|
||||
"type": "FIELD",
|
||||
"name": "expression",
|
||||
"content": {
|
||||
"type": "SYMBOL",
|
||||
"name": "_expression"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "STRING",
|
||||
"value": ")"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"extras": [
|
||||
{
|
||||
"type": "PATTERN",
|
||||
"value": "\\s"
|
||||
}
|
||||
],
|
||||
"conflicts": [],
|
||||
"precedences": [],
|
||||
"externals": [],
|
||||
"inline": [],
|
||||
"supertypes": []
|
||||
}
|
||||
|
|
@ -1,353 +0,0 @@
|
|||
[
|
||||
{
|
||||
"type": "and",
|
||||
"named": true,
|
||||
"fields": {
|
||||
"left": {
|
||||
"multiple": false,
|
||||
"required": true,
|
||||
"types": [
|
||||
{
|
||||
"type": "and",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "equal",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "identifier",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "not",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "not_equal",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "or",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "parenthesized",
|
||||
"named": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"right": {
|
||||
"multiple": false,
|
||||
"required": true,
|
||||
"types": [
|
||||
{
|
||||
"type": "and",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "equal",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "identifier",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "not",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "not_equal",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "or",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "parenthesized",
|
||||
"named": true
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "equal",
|
||||
"named": true,
|
||||
"fields": {
|
||||
"left": {
|
||||
"multiple": false,
|
||||
"required": true,
|
||||
"types": [
|
||||
{
|
||||
"type": "identifier",
|
||||
"named": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"right": {
|
||||
"multiple": false,
|
||||
"required": true,
|
||||
"types": [
|
||||
{
|
||||
"type": "identifier",
|
||||
"named": true
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "not",
|
||||
"named": true,
|
||||
"fields": {
|
||||
"expression": {
|
||||
"multiple": false,
|
||||
"required": true,
|
||||
"types": [
|
||||
{
|
||||
"type": "and",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "equal",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "identifier",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "not",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "not_equal",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "or",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "parenthesized",
|
||||
"named": true
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "not_equal",
|
||||
"named": true,
|
||||
"fields": {
|
||||
"left": {
|
||||
"multiple": false,
|
||||
"required": true,
|
||||
"types": [
|
||||
{
|
||||
"type": "identifier",
|
||||
"named": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"right": {
|
||||
"multiple": false,
|
||||
"required": true,
|
||||
"types": [
|
||||
{
|
||||
"type": "identifier",
|
||||
"named": true
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "or",
|
||||
"named": true,
|
||||
"fields": {
|
||||
"left": {
|
||||
"multiple": false,
|
||||
"required": true,
|
||||
"types": [
|
||||
{
|
||||
"type": "and",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "equal",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "identifier",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "not",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "not_equal",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "or",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "parenthesized",
|
||||
"named": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"right": {
|
||||
"multiple": false,
|
||||
"required": true,
|
||||
"types": [
|
||||
{
|
||||
"type": "and",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "equal",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "identifier",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "not",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "not_equal",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "or",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "parenthesized",
|
||||
"named": true
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "parenthesized",
|
||||
"named": true,
|
||||
"fields": {
|
||||
"expression": {
|
||||
"multiple": false,
|
||||
"required": true,
|
||||
"types": [
|
||||
{
|
||||
"type": "and",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "equal",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "identifier",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "not",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "not_equal",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "or",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "parenthesized",
|
||||
"named": true
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "source",
|
||||
"named": true,
|
||||
"fields": {},
|
||||
"children": {
|
||||
"multiple": false,
|
||||
"required": true,
|
||||
"types": [
|
||||
{
|
||||
"type": "and",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "equal",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "identifier",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "not",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "not_equal",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "or",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "parenthesized",
|
||||
"named": true
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "!",
|
||||
"named": false
|
||||
},
|
||||
{
|
||||
"type": "!=",
|
||||
"named": false
|
||||
},
|
||||
{
|
||||
"type": "&&",
|
||||
"named": false
|
||||
},
|
||||
{
|
||||
"type": "(",
|
||||
"named": false
|
||||
},
|
||||
{
|
||||
"type": ")",
|
||||
"named": false
|
||||
},
|
||||
{
|
||||
"type": "==",
|
||||
"named": false
|
||||
},
|
||||
{
|
||||
"type": "identifier",
|
||||
"named": true
|
||||
},
|
||||
{
|
||||
"type": "||",
|
||||
"named": false
|
||||
}
|
||||
]
|
|
@ -1,584 +0,0 @@
|
|||
#include <tree_sitter/parser.h>
|
||||
|
||||
#if defined(__GNUC__) || defined(__clang__)
|
||||
#pragma GCC diagnostic push
|
||||
#pragma GCC diagnostic ignored "-Wmissing-field-initializers"
|
||||
#endif
|
||||
|
||||
#define LANGUAGE_VERSION 13
|
||||
#define STATE_COUNT 18
|
||||
#define LARGE_STATE_COUNT 6
|
||||
#define SYMBOL_COUNT 17
|
||||
#define ALIAS_COUNT 0
|
||||
#define TOKEN_COUNT 9
|
||||
#define EXTERNAL_TOKEN_COUNT 0
|
||||
#define FIELD_COUNT 3
|
||||
#define MAX_ALIAS_SEQUENCE_LENGTH 3
|
||||
#define PRODUCTION_ID_COUNT 3
|
||||
|
||||
enum {
|
||||
sym_identifier = 1,
|
||||
anon_sym_BANG = 2,
|
||||
anon_sym_AMP_AMP = 3,
|
||||
anon_sym_PIPE_PIPE = 4,
|
||||
anon_sym_EQ_EQ = 5,
|
||||
anon_sym_BANG_EQ = 6,
|
||||
anon_sym_LPAREN = 7,
|
||||
anon_sym_RPAREN = 8,
|
||||
sym_source = 9,
|
||||
sym__expression = 10,
|
||||
sym_not = 11,
|
||||
sym_and = 12,
|
||||
sym_or = 13,
|
||||
sym_equal = 14,
|
||||
sym_not_equal = 15,
|
||||
sym_parenthesized = 16,
|
||||
};
|
||||
|
||||
static const char *const ts_symbol_names[] = {
|
||||
[ts_builtin_sym_end] = "end",
|
||||
[sym_identifier] = "identifier",
|
||||
[anon_sym_BANG] = "!",
|
||||
[anon_sym_AMP_AMP] = "&&",
|
||||
[anon_sym_PIPE_PIPE] = "||",
|
||||
[anon_sym_EQ_EQ] = "==",
|
||||
[anon_sym_BANG_EQ] = "!=",
|
||||
[anon_sym_LPAREN] = "(",
|
||||
[anon_sym_RPAREN] = ")",
|
||||
[sym_source] = "source",
|
||||
[sym__expression] = "_expression",
|
||||
[sym_not] = "not",
|
||||
[sym_and] = "and",
|
||||
[sym_or] = "or",
|
||||
[sym_equal] = "equal",
|
||||
[sym_not_equal] = "not_equal",
|
||||
[sym_parenthesized] = "parenthesized",
|
||||
};
|
||||
|
||||
static const TSSymbol ts_symbol_map[] = {
|
||||
[ts_builtin_sym_end] = ts_builtin_sym_end,
|
||||
[sym_identifier] = sym_identifier,
|
||||
[anon_sym_BANG] = anon_sym_BANG,
|
||||
[anon_sym_AMP_AMP] = anon_sym_AMP_AMP,
|
||||
[anon_sym_PIPE_PIPE] = anon_sym_PIPE_PIPE,
|
||||
[anon_sym_EQ_EQ] = anon_sym_EQ_EQ,
|
||||
[anon_sym_BANG_EQ] = anon_sym_BANG_EQ,
|
||||
[anon_sym_LPAREN] = anon_sym_LPAREN,
|
||||
[anon_sym_RPAREN] = anon_sym_RPAREN,
|
||||
[sym_source] = sym_source,
|
||||
[sym__expression] = sym__expression,
|
||||
[sym_not] = sym_not,
|
||||
[sym_and] = sym_and,
|
||||
[sym_or] = sym_or,
|
||||
[sym_equal] = sym_equal,
|
||||
[sym_not_equal] = sym_not_equal,
|
||||
[sym_parenthesized] = sym_parenthesized,
|
||||
};
|
||||
|
||||
static const TSSymbolMetadata ts_symbol_metadata[] = {
|
||||
[ts_builtin_sym_end] =
|
||||
{
|
||||
.visible = false,
|
||||
.named = true,
|
||||
},
|
||||
[sym_identifier] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = true,
|
||||
},
|
||||
[anon_sym_BANG] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = false,
|
||||
},
|
||||
[anon_sym_AMP_AMP] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = false,
|
||||
},
|
||||
[anon_sym_PIPE_PIPE] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = false,
|
||||
},
|
||||
[anon_sym_EQ_EQ] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = false,
|
||||
},
|
||||
[anon_sym_BANG_EQ] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = false,
|
||||
},
|
||||
[anon_sym_LPAREN] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = false,
|
||||
},
|
||||
[anon_sym_RPAREN] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = false,
|
||||
},
|
||||
[sym_source] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = true,
|
||||
},
|
||||
[sym__expression] =
|
||||
{
|
||||
.visible = false,
|
||||
.named = true,
|
||||
},
|
||||
[sym_not] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = true,
|
||||
},
|
||||
[sym_and] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = true,
|
||||
},
|
||||
[sym_or] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = true,
|
||||
},
|
||||
[sym_equal] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = true,
|
||||
},
|
||||
[sym_not_equal] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = true,
|
||||
},
|
||||
[sym_parenthesized] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = true,
|
||||
},
|
||||
};
|
||||
|
||||
enum {
|
||||
field_expression = 1,
|
||||
field_left = 2,
|
||||
field_right = 3,
|
||||
};
|
||||
|
||||
static const char *const ts_field_names[] = {
|
||||
[0] = NULL,
|
||||
[field_expression] = "expression",
|
||||
[field_left] = "left",
|
||||
[field_right] = "right",
|
||||
};
|
||||
|
||||
static const TSFieldMapSlice ts_field_map_slices[PRODUCTION_ID_COUNT] = {
|
||||
[1] = {.index = 0, .length = 1},
|
||||
[2] = {.index = 1, .length = 2},
|
||||
};
|
||||
|
||||
static const TSFieldMapEntry ts_field_map_entries[] = {
|
||||
[0] = {field_expression, 1},
|
||||
[1] = {field_left, 0},
|
||||
{field_right, 2},
|
||||
};
|
||||
|
||||
static const TSSymbol ts_alias_sequences[PRODUCTION_ID_COUNT]
|
||||
[MAX_ALIAS_SEQUENCE_LENGTH] = {
|
||||
[0] = {0},
|
||||
};
|
||||
|
||||
static const uint16_t ts_non_terminal_alias_map[] = {
|
||||
0,
|
||||
};
|
||||
|
||||
static bool ts_lex(TSLexer *lexer, TSStateId state) {
|
||||
START_LEXER();
|
||||
eof = lexer->eof(lexer);
|
||||
switch (state) {
|
||||
case 0:
|
||||
if (eof)
|
||||
ADVANCE(7);
|
||||
if (lookahead == '!')
|
||||
ADVANCE(10);
|
||||
if (lookahead == '&')
|
||||
ADVANCE(2);
|
||||
if (lookahead == '(')
|
||||
ADVANCE(15);
|
||||
if (lookahead == ')')
|
||||
ADVANCE(16);
|
||||
if (lookahead == '=')
|
||||
ADVANCE(4);
|
||||
if (lookahead == '|')
|
||||
ADVANCE(5);
|
||||
if (lookahead == '\t' || lookahead == '\n' || lookahead == '\r' ||
|
||||
lookahead == ' ')
|
||||
SKIP(0)
|
||||
if (lookahead == '-' || ('0' <= lookahead && lookahead <= '9') ||
|
||||
('A' <= lookahead && lookahead <= 'Z') || lookahead == '_' ||
|
||||
('a' <= lookahead && lookahead <= 'z'))
|
||||
ADVANCE(8);
|
||||
END_STATE();
|
||||
case 1:
|
||||
if (lookahead == '!')
|
||||
ADVANCE(9);
|
||||
if (lookahead == '(')
|
||||
ADVANCE(15);
|
||||
if (lookahead == '\t' || lookahead == '\n' || lookahead == '\r' ||
|
||||
lookahead == ' ')
|
||||
SKIP(1)
|
||||
if (lookahead == '-' || ('0' <= lookahead && lookahead <= '9') ||
|
||||
('A' <= lookahead && lookahead <= 'Z') || lookahead == '_' ||
|
||||
('a' <= lookahead && lookahead <= 'z'))
|
||||
ADVANCE(8);
|
||||
END_STATE();
|
||||
case 2:
|
||||
if (lookahead == '&')
|
||||
ADVANCE(11);
|
||||
END_STATE();
|
||||
case 3:
|
||||
if (lookahead == '=')
|
||||
ADVANCE(14);
|
||||
END_STATE();
|
||||
case 4:
|
||||
if (lookahead == '=')
|
||||
ADVANCE(13);
|
||||
END_STATE();
|
||||
case 5:
|
||||
if (lookahead == '|')
|
||||
ADVANCE(12);
|
||||
END_STATE();
|
||||
case 6:
|
||||
if (eof)
|
||||
ADVANCE(7);
|
||||
if (lookahead == '!')
|
||||
ADVANCE(3);
|
||||
if (lookahead == '&')
|
||||
ADVANCE(2);
|
||||
if (lookahead == ')')
|
||||
ADVANCE(16);
|
||||
if (lookahead == '=')
|
||||
ADVANCE(4);
|
||||
if (lookahead == '|')
|
||||
ADVANCE(5);
|
||||
if (lookahead == '\t' || lookahead == '\n' || lookahead == '\r' ||
|
||||
lookahead == ' ')
|
||||
SKIP(6)
|
||||
END_STATE();
|
||||
case 7:
|
||||
ACCEPT_TOKEN(ts_builtin_sym_end);
|
||||
END_STATE();
|
||||
case 8:
|
||||
ACCEPT_TOKEN(sym_identifier);
|
||||
if (lookahead == '-' || ('0' <= lookahead && lookahead <= '9') ||
|
||||
('A' <= lookahead && lookahead <= 'Z') || lookahead == '_' ||
|
||||
('a' <= lookahead && lookahead <= 'z'))
|
||||
ADVANCE(8);
|
||||
END_STATE();
|
||||
case 9:
|
||||
ACCEPT_TOKEN(anon_sym_BANG);
|
||||
END_STATE();
|
||||
case 10:
|
||||
ACCEPT_TOKEN(anon_sym_BANG);
|
||||
if (lookahead == '=')
|
||||
ADVANCE(14);
|
||||
END_STATE();
|
||||
case 11:
|
||||
ACCEPT_TOKEN(anon_sym_AMP_AMP);
|
||||
END_STATE();
|
||||
case 12:
|
||||
ACCEPT_TOKEN(anon_sym_PIPE_PIPE);
|
||||
END_STATE();
|
||||
case 13:
|
||||
ACCEPT_TOKEN(anon_sym_EQ_EQ);
|
||||
END_STATE();
|
||||
case 14:
|
||||
ACCEPT_TOKEN(anon_sym_BANG_EQ);
|
||||
END_STATE();
|
||||
case 15:
|
||||
ACCEPT_TOKEN(anon_sym_LPAREN);
|
||||
END_STATE();
|
||||
case 16:
|
||||
ACCEPT_TOKEN(anon_sym_RPAREN);
|
||||
END_STATE();
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
static const TSLexMode ts_lex_modes[STATE_COUNT] = {
|
||||
[0] = {.lex_state = 0}, [1] = {.lex_state = 1}, [2] = {.lex_state = 1},
|
||||
[3] = {.lex_state = 1}, [4] = {.lex_state = 1}, [5] = {.lex_state = 1},
|
||||
[6] = {.lex_state = 6}, [7] = {.lex_state = 0}, [8] = {.lex_state = 0},
|
||||
[9] = {.lex_state = 0}, [10] = {.lex_state = 0}, [11] = {.lex_state = 0},
|
||||
[12] = {.lex_state = 0}, [13] = {.lex_state = 0}, [14] = {.lex_state = 0},
|
||||
[15] = {.lex_state = 0}, [16] = {.lex_state = 0}, [17] = {.lex_state = 0},
|
||||
};
|
||||
|
||||
static const uint16_t ts_parse_table[LARGE_STATE_COUNT][SYMBOL_COUNT] = {
|
||||
[0] =
|
||||
{
|
||||
[ts_builtin_sym_end] = ACTIONS(1),
|
||||
[sym_identifier] = ACTIONS(1),
|
||||
[anon_sym_BANG] = ACTIONS(1),
|
||||
[anon_sym_AMP_AMP] = ACTIONS(1),
|
||||
[anon_sym_PIPE_PIPE] = ACTIONS(1),
|
||||
[anon_sym_EQ_EQ] = ACTIONS(1),
|
||||
[anon_sym_BANG_EQ] = ACTIONS(1),
|
||||
[anon_sym_LPAREN] = ACTIONS(1),
|
||||
[anon_sym_RPAREN] = ACTIONS(1),
|
||||
},
|
||||
[1] =
|
||||
{
|
||||
[sym_source] = STATE(15),
|
||||
[sym__expression] = STATE(13),
|
||||
[sym_not] = STATE(13),
|
||||
[sym_and] = STATE(13),
|
||||
[sym_or] = STATE(13),
|
||||
[sym_equal] = STATE(13),
|
||||
[sym_not_equal] = STATE(13),
|
||||
[sym_parenthesized] = STATE(13),
|
||||
[sym_identifier] = ACTIONS(3),
|
||||
[anon_sym_BANG] = ACTIONS(5),
|
||||
[anon_sym_LPAREN] = ACTIONS(7),
|
||||
},
|
||||
[2] =
|
||||
{
|
||||
[sym__expression] = STATE(7),
|
||||
[sym_not] = STATE(7),
|
||||
[sym_and] = STATE(7),
|
||||
[sym_or] = STATE(7),
|
||||
[sym_equal] = STATE(7),
|
||||
[sym_not_equal] = STATE(7),
|
||||
[sym_parenthesized] = STATE(7),
|
||||
[sym_identifier] = ACTIONS(3),
|
||||
[anon_sym_BANG] = ACTIONS(5),
|
||||
[anon_sym_LPAREN] = ACTIONS(7),
|
||||
},
|
||||
[3] =
|
||||
{
|
||||
[sym__expression] = STATE(14),
|
||||
[sym_not] = STATE(14),
|
||||
[sym_and] = STATE(14),
|
||||
[sym_or] = STATE(14),
|
||||
[sym_equal] = STATE(14),
|
||||
[sym_not_equal] = STATE(14),
|
||||
[sym_parenthesized] = STATE(14),
|
||||
[sym_identifier] = ACTIONS(3),
|
||||
[anon_sym_BANG] = ACTIONS(5),
|
||||
[anon_sym_LPAREN] = ACTIONS(7),
|
||||
},
|
||||
[4] =
|
||||
{
|
||||
[sym__expression] = STATE(11),
|
||||
[sym_not] = STATE(11),
|
||||
[sym_and] = STATE(11),
|
||||
[sym_or] = STATE(11),
|
||||
[sym_equal] = STATE(11),
|
||||
[sym_not_equal] = STATE(11),
|
||||
[sym_parenthesized] = STATE(11),
|
||||
[sym_identifier] = ACTIONS(3),
|
||||
[anon_sym_BANG] = ACTIONS(5),
|
||||
[anon_sym_LPAREN] = ACTIONS(7),
|
||||
},
|
||||
[5] =
|
||||
{
|
||||
[sym__expression] = STATE(12),
|
||||
[sym_not] = STATE(12),
|
||||
[sym_and] = STATE(12),
|
||||
[sym_or] = STATE(12),
|
||||
[sym_equal] = STATE(12),
|
||||
[sym_not_equal] = STATE(12),
|
||||
[sym_parenthesized] = STATE(12),
|
||||
[sym_identifier] = ACTIONS(3),
|
||||
[anon_sym_BANG] = ACTIONS(5),
|
||||
[anon_sym_LPAREN] = ACTIONS(7),
|
||||
},
|
||||
};
|
||||
|
||||
static const uint16_t ts_small_parse_table[] = {
|
||||
[0] = 3,
|
||||
ACTIONS(11),
|
||||
1,
|
||||
anon_sym_EQ_EQ,
|
||||
ACTIONS(13),
|
||||
1,
|
||||
anon_sym_BANG_EQ,
|
||||
ACTIONS(9),
|
||||
4,
|
||||
ts_builtin_sym_end,
|
||||
anon_sym_AMP_AMP,
|
||||
anon_sym_PIPE_PIPE,
|
||||
anon_sym_RPAREN,
|
||||
[13] = 1,
|
||||
ACTIONS(15),
|
||||
4,
|
||||
ts_builtin_sym_end,
|
||||
anon_sym_AMP_AMP,
|
||||
anon_sym_PIPE_PIPE,
|
||||
anon_sym_RPAREN,
|
||||
[20] = 1,
|
||||
ACTIONS(17),
|
||||
4,
|
||||
ts_builtin_sym_end,
|
||||
anon_sym_AMP_AMP,
|
||||
anon_sym_PIPE_PIPE,
|
||||
anon_sym_RPAREN,
|
||||
[27] = 1,
|
||||
ACTIONS(19),
|
||||
4,
|
||||
ts_builtin_sym_end,
|
||||
anon_sym_AMP_AMP,
|
||||
anon_sym_PIPE_PIPE,
|
||||
anon_sym_RPAREN,
|
||||
[34] = 1,
|
||||
ACTIONS(21),
|
||||
4,
|
||||
ts_builtin_sym_end,
|
||||
anon_sym_AMP_AMP,
|
||||
anon_sym_PIPE_PIPE,
|
||||
anon_sym_RPAREN,
|
||||
[41] = 1,
|
||||
ACTIONS(23),
|
||||
4,
|
||||
ts_builtin_sym_end,
|
||||
anon_sym_AMP_AMP,
|
||||
anon_sym_PIPE_PIPE,
|
||||
anon_sym_RPAREN,
|
||||
[48] = 2,
|
||||
ACTIONS(27),
|
||||
1,
|
||||
anon_sym_AMP_AMP,
|
||||
ACTIONS(25),
|
||||
3,
|
||||
ts_builtin_sym_end,
|
||||
anon_sym_PIPE_PIPE,
|
||||
anon_sym_RPAREN,
|
||||
[57] = 3,
|
||||
ACTIONS(27),
|
||||
1,
|
||||
anon_sym_AMP_AMP,
|
||||
ACTIONS(29),
|
||||
1,
|
||||
ts_builtin_sym_end,
|
||||
ACTIONS(31),
|
||||
1,
|
||||
anon_sym_PIPE_PIPE,
|
||||
[67] = 3,
|
||||
ACTIONS(27),
|
||||
1,
|
||||
anon_sym_AMP_AMP,
|
||||
ACTIONS(31),
|
||||
1,
|
||||
anon_sym_PIPE_PIPE,
|
||||
ACTIONS(33),
|
||||
1,
|
||||
anon_sym_RPAREN,
|
||||
[77] = 1,
|
||||
ACTIONS(35),
|
||||
1,
|
||||
ts_builtin_sym_end,
|
||||
[81] = 1,
|
||||
ACTIONS(37),
|
||||
1,
|
||||
sym_identifier,
|
||||
[85] = 1,
|
||||
ACTIONS(39),
|
||||
1,
|
||||
sym_identifier,
|
||||
};
|
||||
|
||||
static const uint32_t ts_small_parse_table_map[] = {
|
||||
[SMALL_STATE(6)] = 0, [SMALL_STATE(7)] = 13, [SMALL_STATE(8)] = 20,
|
||||
[SMALL_STATE(9)] = 27, [SMALL_STATE(10)] = 34, [SMALL_STATE(11)] = 41,
|
||||
[SMALL_STATE(12)] = 48, [SMALL_STATE(13)] = 57, [SMALL_STATE(14)] = 67,
|
||||
[SMALL_STATE(15)] = 77, [SMALL_STATE(16)] = 81, [SMALL_STATE(17)] = 85,
|
||||
};
|
||||
|
||||
static const TSParseActionEntry ts_parse_actions[] = {
|
||||
[0] = {.entry = {.count = 0, .reusable = false}},
|
||||
[1] = {.entry = {.count = 1, .reusable = false}},
|
||||
RECOVER(),
|
||||
[3] = {.entry = {.count = 1, .reusable = true}},
|
||||
SHIFT(6),
|
||||
[5] = {.entry = {.count = 1, .reusable = true}},
|
||||
SHIFT(2),
|
||||
[7] = {.entry = {.count = 1, .reusable = true}},
|
||||
SHIFT(3),
|
||||
[9] = {.entry = {.count = 1, .reusable = true}},
|
||||
REDUCE(sym__expression, 1),
|
||||
[11] = {.entry = {.count = 1, .reusable = true}},
|
||||
SHIFT(16),
|
||||
[13] = {.entry = {.count = 1, .reusable = true}},
|
||||
SHIFT(17),
|
||||
[15] = {.entry = {.count = 1, .reusable = true}},
|
||||
REDUCE(sym_not, 2, .production_id = 1),
|
||||
[17] = {.entry = {.count = 1, .reusable = true}},
|
||||
REDUCE(sym_equal, 3, .production_id = 2),
|
||||
[19] = {.entry = {.count = 1, .reusable = true}},
|
||||
REDUCE(sym_not_equal, 3, .production_id = 2),
|
||||
[21] = {.entry = {.count = 1, .reusable = true}},
|
||||
REDUCE(sym_parenthesized, 3, .production_id = 1),
|
||||
[23] = {.entry = {.count = 1, .reusable = true}},
|
||||
REDUCE(sym_and, 3, .production_id = 2),
|
||||
[25] = {.entry = {.count = 1, .reusable = true}},
|
||||
REDUCE(sym_or, 3, .production_id = 2),
|
||||
[27] = {.entry = {.count = 1, .reusable = true}},
|
||||
SHIFT(4),
|
||||
[29] = {.entry = {.count = 1, .reusable = true}},
|
||||
REDUCE(sym_source, 1),
|
||||
[31] = {.entry = {.count = 1, .reusable = true}},
|
||||
SHIFT(5),
|
||||
[33] = {.entry = {.count = 1, .reusable = true}},
|
||||
SHIFT(10),
|
||||
[35] = {.entry = {.count = 1, .reusable = true}},
|
||||
ACCEPT_INPUT(),
|
||||
[37] = {.entry = {.count = 1, .reusable = true}},
|
||||
SHIFT(8),
|
||||
[39] = {.entry = {.count = 1, .reusable = true}},
|
||||
SHIFT(9),
|
||||
};
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
#ifdef _WIN32
|
||||
#define extern __declspec(dllexport)
|
||||
#endif
|
||||
|
||||
extern const TSLanguage *tree_sitter_context_predicate(void) {
|
||||
static const TSLanguage language = {
|
||||
.version = LANGUAGE_VERSION,
|
||||
.symbol_count = SYMBOL_COUNT,
|
||||
.alias_count = ALIAS_COUNT,
|
||||
.token_count = TOKEN_COUNT,
|
||||
.external_token_count = EXTERNAL_TOKEN_COUNT,
|
||||
.state_count = STATE_COUNT,
|
||||
.large_state_count = LARGE_STATE_COUNT,
|
||||
.production_id_count = PRODUCTION_ID_COUNT,
|
||||
.field_count = FIELD_COUNT,
|
||||
.max_alias_sequence_length = MAX_ALIAS_SEQUENCE_LENGTH,
|
||||
.parse_table = &ts_parse_table[0][0],
|
||||
.small_parse_table = ts_small_parse_table,
|
||||
.small_parse_table_map = ts_small_parse_table_map,
|
||||
.parse_actions = ts_parse_actions,
|
||||
.symbol_names = ts_symbol_names,
|
||||
.field_names = ts_field_names,
|
||||
.field_map_slices = ts_field_map_slices,
|
||||
.field_map_entries = ts_field_map_entries,
|
||||
.symbol_metadata = ts_symbol_metadata,
|
||||
.public_symbol_map = ts_symbol_map,
|
||||
.alias_map = ts_non_terminal_alias_map,
|
||||
.alias_sequences = &ts_alias_sequences[0][0],
|
||||
.lex_modes = ts_lex_modes,
|
||||
.lex_fn = ts_lex,
|
||||
};
|
||||
return &language;
|
||||
}
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
|
@ -1,223 +0,0 @@
|
|||
#ifndef TREE_SITTER_PARSER_H_
|
||||
#define TREE_SITTER_PARSER_H_
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
#include <stdbool.h>
|
||||
#include <stdint.h>
|
||||
#include <stdlib.h>
|
||||
|
||||
#define ts_builtin_sym_error ((TSSymbol)-1)
|
||||
#define ts_builtin_sym_end 0
|
||||
#define TREE_SITTER_SERIALIZATION_BUFFER_SIZE 1024
|
||||
|
||||
typedef uint16_t TSStateId;
|
||||
|
||||
#ifndef TREE_SITTER_API_H_
|
||||
typedef uint16_t TSSymbol;
|
||||
typedef uint16_t TSFieldId;
|
||||
typedef struct TSLanguage TSLanguage;
|
||||
#endif
|
||||
|
||||
typedef struct {
|
||||
TSFieldId field_id;
|
||||
uint8_t child_index;
|
||||
bool inherited;
|
||||
} TSFieldMapEntry;
|
||||
|
||||
typedef struct {
|
||||
uint16_t index;
|
||||
uint16_t length;
|
||||
} TSFieldMapSlice;
|
||||
|
||||
typedef struct {
|
||||
bool visible;
|
||||
bool named;
|
||||
bool supertype;
|
||||
} TSSymbolMetadata;
|
||||
|
||||
typedef struct TSLexer TSLexer;
|
||||
|
||||
struct TSLexer {
|
||||
int32_t lookahead;
|
||||
TSSymbol result_symbol;
|
||||
void (*advance)(TSLexer *, bool);
|
||||
void (*mark_end)(TSLexer *);
|
||||
uint32_t (*get_column)(TSLexer *);
|
||||
bool (*is_at_included_range_start)(const TSLexer *);
|
||||
bool (*eof)(const TSLexer *);
|
||||
};
|
||||
|
||||
typedef enum {
|
||||
TSParseActionTypeShift,
|
||||
TSParseActionTypeReduce,
|
||||
TSParseActionTypeAccept,
|
||||
TSParseActionTypeRecover,
|
||||
} TSParseActionType;
|
||||
|
||||
typedef union {
|
||||
struct {
|
||||
uint8_t type;
|
||||
TSStateId state;
|
||||
bool extra;
|
||||
bool repetition;
|
||||
} shift;
|
||||
struct {
|
||||
uint8_t type;
|
||||
uint8_t child_count;
|
||||
TSSymbol symbol;
|
||||
int16_t dynamic_precedence;
|
||||
uint16_t production_id;
|
||||
} reduce;
|
||||
uint8_t type;
|
||||
} TSParseAction;
|
||||
|
||||
typedef struct {
|
||||
uint16_t lex_state;
|
||||
uint16_t external_lex_state;
|
||||
} TSLexMode;
|
||||
|
||||
typedef union {
|
||||
TSParseAction action;
|
||||
struct {
|
||||
uint8_t count;
|
||||
bool reusable;
|
||||
} entry;
|
||||
} TSParseActionEntry;
|
||||
|
||||
struct TSLanguage {
|
||||
uint32_t version;
|
||||
uint32_t symbol_count;
|
||||
uint32_t alias_count;
|
||||
uint32_t token_count;
|
||||
uint32_t external_token_count;
|
||||
uint32_t state_count;
|
||||
uint32_t large_state_count;
|
||||
uint32_t production_id_count;
|
||||
uint32_t field_count;
|
||||
uint16_t max_alias_sequence_length;
|
||||
const uint16_t *parse_table;
|
||||
const uint16_t *small_parse_table;
|
||||
const uint32_t *small_parse_table_map;
|
||||
const TSParseActionEntry *parse_actions;
|
||||
const char * const *symbol_names;
|
||||
const char * const *field_names;
|
||||
const TSFieldMapSlice *field_map_slices;
|
||||
const TSFieldMapEntry *field_map_entries;
|
||||
const TSSymbolMetadata *symbol_metadata;
|
||||
const TSSymbol *public_symbol_map;
|
||||
const uint16_t *alias_map;
|
||||
const TSSymbol *alias_sequences;
|
||||
const TSLexMode *lex_modes;
|
||||
bool (*lex_fn)(TSLexer *, TSStateId);
|
||||
bool (*keyword_lex_fn)(TSLexer *, TSStateId);
|
||||
TSSymbol keyword_capture_token;
|
||||
struct {
|
||||
const bool *states;
|
||||
const TSSymbol *symbol_map;
|
||||
void *(*create)(void);
|
||||
void (*destroy)(void *);
|
||||
bool (*scan)(void *, TSLexer *, const bool *symbol_whitelist);
|
||||
unsigned (*serialize)(void *, char *);
|
||||
void (*deserialize)(void *, const char *, unsigned);
|
||||
} external_scanner;
|
||||
};
|
||||
|
||||
/*
|
||||
* Lexer Macros
|
||||
*/
|
||||
|
||||
#define START_LEXER() \
|
||||
bool result = false; \
|
||||
bool skip = false; \
|
||||
bool eof = false; \
|
||||
int32_t lookahead; \
|
||||
goto start; \
|
||||
next_state: \
|
||||
lexer->advance(lexer, skip); \
|
||||
start: \
|
||||
skip = false; \
|
||||
lookahead = lexer->lookahead;
|
||||
|
||||
#define ADVANCE(state_value) \
|
||||
{ \
|
||||
state = state_value; \
|
||||
goto next_state; \
|
||||
}
|
||||
|
||||
#define SKIP(state_value) \
|
||||
{ \
|
||||
skip = true; \
|
||||
state = state_value; \
|
||||
goto next_state; \
|
||||
}
|
||||
|
||||
#define ACCEPT_TOKEN(symbol_value) \
|
||||
result = true; \
|
||||
lexer->result_symbol = symbol_value; \
|
||||
lexer->mark_end(lexer);
|
||||
|
||||
#define END_STATE() return result;
|
||||
|
||||
/*
|
||||
* Parse Table Macros
|
||||
*/
|
||||
|
||||
#define SMALL_STATE(id) id - LARGE_STATE_COUNT
|
||||
|
||||
#define STATE(id) id
|
||||
|
||||
#define ACTIONS(id) id
|
||||
|
||||
#define SHIFT(state_value) \
|
||||
{{ \
|
||||
.shift = { \
|
||||
.type = TSParseActionTypeShift, \
|
||||
.state = state_value \
|
||||
} \
|
||||
}}
|
||||
|
||||
#define SHIFT_REPEAT(state_value) \
|
||||
{{ \
|
||||
.shift = { \
|
||||
.type = TSParseActionTypeShift, \
|
||||
.state = state_value, \
|
||||
.repetition = true \
|
||||
} \
|
||||
}}
|
||||
|
||||
#define SHIFT_EXTRA() \
|
||||
{{ \
|
||||
.shift = { \
|
||||
.type = TSParseActionTypeShift, \
|
||||
.extra = true \
|
||||
} \
|
||||
}}
|
||||
|
||||
#define REDUCE(symbol_val, child_count_val, ...) \
|
||||
{{ \
|
||||
.reduce = { \
|
||||
.type = TSParseActionTypeReduce, \
|
||||
.symbol = symbol_val, \
|
||||
.child_count = child_count_val, \
|
||||
__VA_ARGS__ \
|
||||
}, \
|
||||
}}
|
||||
|
||||
#define RECOVER() \
|
||||
{{ \
|
||||
.type = TSParseActionTypeRecover \
|
||||
}}
|
||||
|
||||
#define ACCEPT_INPUT() \
|
||||
{{ \
|
||||
.type = TSParseActionTypeAccept \
|
||||
}}
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
|
||||
#endif // TREE_SITTER_PARSER_H_
|
|
@ -989,7 +989,7 @@ impl MutableAppContext {
|
|||
window.toggle_full_screen();
|
||||
}
|
||||
|
||||
fn prompt(
|
||||
pub fn prompt(
|
||||
&self,
|
||||
window_id: usize,
|
||||
level: PromptLevel,
|
||||
|
@ -1349,21 +1349,24 @@ impl MutableAppContext {
|
|||
|
||||
/// Return keystrokes that would dispatch the given action closest to the focused view, if there are any.
|
||||
pub(crate) fn keystrokes_for_action(
|
||||
&self,
|
||||
&mut self,
|
||||
window_id: usize,
|
||||
dispatch_path: &[usize],
|
||||
view_stack: &[usize],
|
||||
action: &dyn Action,
|
||||
) -> Option<SmallVec<[Keystroke; 2]>> {
|
||||
for view_id in dispatch_path.iter().rev() {
|
||||
self.keystroke_matcher.contexts.clear();
|
||||
for view_id in view_stack.iter().rev() {
|
||||
let view = self
|
||||
.cx
|
||||
.views
|
||||
.get(&(window_id, *view_id))
|
||||
.expect("view in responder chain does not exist");
|
||||
let keymap_context = view.keymap_context(self.as_ref());
|
||||
self.keystroke_matcher
|
||||
.contexts
|
||||
.push(view.keymap_context(self.as_ref()));
|
||||
let keystrokes = self
|
||||
.keystroke_matcher
|
||||
.keystrokes_for_action(action, &keymap_context);
|
||||
.keystrokes_for_action(action, &self.keystroke_matcher.contexts);
|
||||
if keystrokes.is_some() {
|
||||
return keystrokes;
|
||||
}
|
||||
|
@ -6681,7 +6684,7 @@ mod tests {
|
|||
view_3
|
||||
});
|
||||
|
||||
// This keymap's only binding dispatches an action on view 2 because that view will have
|
||||
// This binding only dispatches an action on view 2 because that view will have
|
||||
// "a" and "b" in its context, but not "c".
|
||||
cx.add_bindings(vec![Binding::new(
|
||||
"a",
|
||||
|
@ -6691,16 +6694,31 @@ mod tests {
|
|||
|
||||
cx.add_bindings(vec![Binding::new("b", Action("b".to_string()), None)]);
|
||||
|
||||
// This binding only dispatches an action on views 2 and 3, because they have
|
||||
// a parent view with a in its context
|
||||
cx.add_bindings(vec![Binding::new(
|
||||
"c",
|
||||
Action("c".to_string()),
|
||||
Some("b > c"),
|
||||
)]);
|
||||
|
||||
// This binding only dispatches an action on view 2, because they have
|
||||
// a parent view with a in its context
|
||||
cx.add_bindings(vec![Binding::new(
|
||||
"d",
|
||||
Action("d".to_string()),
|
||||
Some("a && !b > b"),
|
||||
)]);
|
||||
|
||||
let actions = Rc::new(RefCell::new(Vec::new()));
|
||||
cx.add_action({
|
||||
let actions = actions.clone();
|
||||
move |view: &mut View, action: &Action, cx| {
|
||||
if action.0 == "a" {
|
||||
actions.borrow_mut().push(format!("{} a", view.id));
|
||||
} else {
|
||||
actions
|
||||
.borrow_mut()
|
||||
.push(format!("{} {}", view.id, action.0));
|
||||
actions
|
||||
.borrow_mut()
|
||||
.push(format!("{} {}", view.id, action.0));
|
||||
|
||||
if action.0 == "b" {
|
||||
cx.propagate_action();
|
||||
}
|
||||
}
|
||||
|
@ -6714,14 +6732,20 @@ mod tests {
|
|||
});
|
||||
|
||||
cx.dispatch_keystroke(window_id, &Keystroke::parse("a").unwrap());
|
||||
|
||||
assert_eq!(&*actions.borrow(), &["2 a"]);
|
||||
|
||||
actions.borrow_mut().clear();
|
||||
|
||||
cx.dispatch_keystroke(window_id, &Keystroke::parse("b").unwrap());
|
||||
|
||||
assert_eq!(&*actions.borrow(), &["3 b", "2 b", "1 b", "global b"]);
|
||||
actions.borrow_mut().clear();
|
||||
|
||||
cx.dispatch_keystroke(window_id, &Keystroke::parse("c").unwrap());
|
||||
assert_eq!(&*actions.borrow(), &["3 c"]);
|
||||
actions.borrow_mut().clear();
|
||||
|
||||
cx.dispatch_keystroke(window_id, &Keystroke::parse("d").unwrap());
|
||||
assert_eq!(&*actions.borrow(), &["2 d"]);
|
||||
actions.borrow_mut().clear();
|
||||
}
|
||||
|
||||
#[crate::test(self)]
|
||||
|
|
|
@ -7,7 +7,7 @@ use crate::{
|
|||
platform::CursorStyle,
|
||||
scene::{
|
||||
CursorRegion, HandlerSet, MouseClick, MouseDown, MouseDownOut, MouseDrag, MouseHover,
|
||||
MouseMove, MouseScrollWheel, MouseUp, MouseUpOut,
|
||||
MouseMove, MouseMoveOut, MouseScrollWheel, MouseUp, MouseUpOut,
|
||||
},
|
||||
DebugContext, Element, ElementBox, EventContext, LayoutContext, MeasurementContext,
|
||||
MouseButton, MouseRegion, MouseState, PaintContext, RenderContext, SizeConstraint, View,
|
||||
|
@ -82,6 +82,14 @@ impl<Tag> MouseEventHandler<Tag> {
|
|||
self
|
||||
}
|
||||
|
||||
pub fn on_move_out(
|
||||
mut self,
|
||||
handler: impl Fn(MouseMoveOut, &mut EventContext) + 'static,
|
||||
) -> Self {
|
||||
self.handlers = self.handlers.on_move_out(handler);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn on_down(
|
||||
mut self,
|
||||
button: MouseButton,
|
||||
|
|
|
@ -25,6 +25,7 @@ pub struct KeyPressed {
|
|||
impl_actions!(gpui, [KeyPressed]);
|
||||
|
||||
pub struct KeymapMatcher {
|
||||
pub contexts: Vec<KeymapContext>,
|
||||
pending_views: HashMap<usize, KeymapContext>,
|
||||
pending_keystrokes: Vec<Keystroke>,
|
||||
keymap: Keymap,
|
||||
|
@ -33,6 +34,7 @@ pub struct KeymapMatcher {
|
|||
impl KeymapMatcher {
|
||||
pub fn new(keymap: Keymap) -> Self {
|
||||
Self {
|
||||
contexts: Vec::new(),
|
||||
pending_views: Default::default(),
|
||||
pending_keystrokes: Vec::new(),
|
||||
keymap,
|
||||
|
@ -70,7 +72,7 @@ impl KeymapMatcher {
|
|||
pub fn push_keystroke(
|
||||
&mut self,
|
||||
keystroke: Keystroke,
|
||||
dispatch_path: Vec<(usize, KeymapContext)>,
|
||||
mut dispatch_path: Vec<(usize, KeymapContext)>,
|
||||
) -> MatchResult {
|
||||
let mut any_pending = false;
|
||||
let mut matched_bindings: Vec<(usize, Box<dyn Action>)> = Vec::new();
|
||||
|
@ -78,7 +80,11 @@ impl KeymapMatcher {
|
|||
let first_keystroke = self.pending_keystrokes.is_empty();
|
||||
self.pending_keystrokes.push(keystroke.clone());
|
||||
|
||||
for (view_id, context) in dispatch_path {
|
||||
self.contexts.clear();
|
||||
self.contexts
|
||||
.extend(dispatch_path.iter_mut().map(|e| std::mem::take(&mut e.1)));
|
||||
|
||||
for (i, (view_id, _)) in dispatch_path.into_iter().enumerate() {
|
||||
// Don't require pending view entry if there are no pending keystrokes
|
||||
if !first_keystroke && !self.pending_views.contains_key(&view_id) {
|
||||
continue;
|
||||
|
@ -87,14 +93,15 @@ impl KeymapMatcher {
|
|||
// If there is a previous view context, invalidate that view if it
|
||||
// has changed
|
||||
if let Some(previous_view_context) = self.pending_views.remove(&view_id) {
|
||||
if previous_view_context != context {
|
||||
if previous_view_context != self.contexts[i] {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// Find the bindings which map the pending keystrokes and current context
|
||||
for binding in self.keymap.bindings().iter().rev() {
|
||||
match binding.match_keys_and_context(&self.pending_keystrokes, &context) {
|
||||
match binding.match_keys_and_context(&self.pending_keystrokes, &self.contexts[i..])
|
||||
{
|
||||
BindingMatchResult::Complete(mut action) => {
|
||||
// Swap in keystroke for special KeyPressed action
|
||||
if action.name() == "KeyPressed" && action.namespace() == "gpui" {
|
||||
|
@ -105,7 +112,7 @@ impl KeymapMatcher {
|
|||
matched_bindings.push((view_id, action))
|
||||
}
|
||||
BindingMatchResult::Partial => {
|
||||
self.pending_views.insert(view_id, context.clone());
|
||||
self.pending_views.insert(view_id, self.contexts[i].clone());
|
||||
any_pending = true;
|
||||
}
|
||||
_ => {}
|
||||
|
@ -129,13 +136,13 @@ impl KeymapMatcher {
|
|||
pub fn keystrokes_for_action(
|
||||
&self,
|
||||
action: &dyn Action,
|
||||
context: &KeymapContext,
|
||||
contexts: &[KeymapContext],
|
||||
) -> Option<SmallVec<[Keystroke; 2]>> {
|
||||
self.keymap
|
||||
.bindings()
|
||||
.iter()
|
||||
.rev()
|
||||
.find_map(|binding| binding.keystrokes_for_action(action, context))
|
||||
.find_map(|binding| binding.keystrokes_for_action(action, contexts))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -349,27 +356,70 @@ mod tests {
|
|||
}
|
||||
|
||||
#[test]
|
||||
fn test_context_predicate_eval() -> Result<()> {
|
||||
let predicate = KeymapContextPredicate::parse("a && b || c == d")?;
|
||||
fn test_context_predicate_eval() {
|
||||
let predicate = KeymapContextPredicate::parse("a && b || c == d").unwrap();
|
||||
|
||||
let mut context = KeymapContext::default();
|
||||
context.set.insert("a".into());
|
||||
assert!(!predicate.eval(&context));
|
||||
assert!(!predicate.eval(&[context]));
|
||||
|
||||
let mut context = KeymapContext::default();
|
||||
context.set.insert("a".into());
|
||||
context.set.insert("b".into());
|
||||
assert!(predicate.eval(&context));
|
||||
assert!(predicate.eval(&[context]));
|
||||
|
||||
context.set.remove("b");
|
||||
let mut context = KeymapContext::default();
|
||||
context.set.insert("a".into());
|
||||
context.map.insert("c".into(), "x".into());
|
||||
assert!(!predicate.eval(&context));
|
||||
assert!(!predicate.eval(&[context]));
|
||||
|
||||
let mut context = KeymapContext::default();
|
||||
context.set.insert("a".into());
|
||||
context.map.insert("c".into(), "d".into());
|
||||
assert!(predicate.eval(&context));
|
||||
assert!(predicate.eval(&[context]));
|
||||
|
||||
let predicate = KeymapContextPredicate::parse("!a")?;
|
||||
assert!(predicate.eval(&KeymapContext::default()));
|
||||
let predicate = KeymapContextPredicate::parse("!a").unwrap();
|
||||
assert!(predicate.eval(&[KeymapContext::default()]));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
#[test]
|
||||
fn test_context_child_predicate_eval() {
|
||||
let predicate = KeymapContextPredicate::parse("a && b > c").unwrap();
|
||||
let contexts = [
|
||||
context_set(&["e", "f"]),
|
||||
context_set(&["c", "d"]), // match this context
|
||||
context_set(&["a", "b"]),
|
||||
];
|
||||
|
||||
assert!(!predicate.eval(&contexts[0..]));
|
||||
assert!(predicate.eval(&contexts[1..]));
|
||||
assert!(!predicate.eval(&contexts[2..]));
|
||||
|
||||
let predicate = KeymapContextPredicate::parse("a && b > c && !d > e").unwrap();
|
||||
let contexts = [
|
||||
context_set(&["f"]),
|
||||
context_set(&["e"]), // only match this context
|
||||
context_set(&["c"]),
|
||||
context_set(&["a", "b"]),
|
||||
context_set(&["e"]),
|
||||
context_set(&["c", "d"]),
|
||||
context_set(&["a", "b"]),
|
||||
];
|
||||
|
||||
assert!(!predicate.eval(&contexts[0..]));
|
||||
assert!(predicate.eval(&contexts[1..]));
|
||||
assert!(!predicate.eval(&contexts[2..]));
|
||||
assert!(!predicate.eval(&contexts[3..]));
|
||||
assert!(!predicate.eval(&contexts[4..]));
|
||||
assert!(!predicate.eval(&contexts[5..]));
|
||||
assert!(!predicate.eval(&contexts[6..]));
|
||||
|
||||
fn context_set(names: &[&str]) -> KeymapContext {
|
||||
KeymapContext {
|
||||
set: names.iter().copied().map(str::to_string).collect(),
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -41,24 +41,24 @@ impl Binding {
|
|||
})
|
||||
}
|
||||
|
||||
fn match_context(&self, context: &KeymapContext) -> bool {
|
||||
fn match_context(&self, contexts: &[KeymapContext]) -> bool {
|
||||
self.context_predicate
|
||||
.as_ref()
|
||||
.map(|predicate| predicate.eval(context))
|
||||
.map(|predicate| predicate.eval(contexts))
|
||||
.unwrap_or(true)
|
||||
}
|
||||
|
||||
pub fn match_keys_and_context(
|
||||
&self,
|
||||
pending_keystrokes: &Vec<Keystroke>,
|
||||
context: &KeymapContext,
|
||||
contexts: &[KeymapContext],
|
||||
) -> BindingMatchResult {
|
||||
if self
|
||||
.keystrokes
|
||||
.as_ref()
|
||||
.map(|keystrokes| keystrokes.starts_with(&pending_keystrokes))
|
||||
.unwrap_or(true)
|
||||
&& self.match_context(context)
|
||||
&& self.match_context(contexts)
|
||||
{
|
||||
// If the binding is completed, push it onto the matches list
|
||||
if self
|
||||
|
@ -79,9 +79,9 @@ impl Binding {
|
|||
pub fn keystrokes_for_action(
|
||||
&self,
|
||||
action: &dyn Action,
|
||||
context: &KeymapContext,
|
||||
contexts: &[KeymapContext],
|
||||
) -> Option<SmallVec<[Keystroke; 2]>> {
|
||||
if self.action.eq(action) && self.match_context(context) {
|
||||
if self.action.eq(action) && self.match_context(contexts) {
|
||||
self.keystrokes.clone()
|
||||
} else {
|
||||
None
|
||||
|
|
|
@ -43,7 +43,7 @@ impl Keymap {
|
|||
pub(crate) fn add_bindings<T: IntoIterator<Item = Binding>>(&mut self, bindings: T) {
|
||||
for binding in bindings {
|
||||
self.binding_indices_by_action_type
|
||||
.entry(binding.action().type_id())
|
||||
.entry(binding.action().as_any().type_id())
|
||||
.or_default()
|
||||
.push(self.bindings.len());
|
||||
self.bindings.push(binding);
|
||||
|
|
|
@ -1,11 +1,5 @@
|
|||
use anyhow::anyhow;
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
use collections::{HashMap, HashSet};
|
||||
use tree_sitter::{Language, Node, Parser};
|
||||
|
||||
extern "C" {
|
||||
fn tree_sitter_context_predicate() -> Language;
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Eq, PartialEq)]
|
||||
pub struct KeymapContext {
|
||||
|
@ -29,80 +23,25 @@ pub enum KeymapContextPredicate {
|
|||
Identifier(String),
|
||||
Equal(String, String),
|
||||
NotEqual(String, String),
|
||||
Child(Box<KeymapContextPredicate>, Box<KeymapContextPredicate>),
|
||||
Not(Box<KeymapContextPredicate>),
|
||||
And(Box<KeymapContextPredicate>, Box<KeymapContextPredicate>),
|
||||
Or(Box<KeymapContextPredicate>, Box<KeymapContextPredicate>),
|
||||
}
|
||||
|
||||
impl KeymapContextPredicate {
|
||||
pub fn parse(source: &str) -> anyhow::Result<Self> {
|
||||
let mut parser = Parser::new();
|
||||
let language = unsafe { tree_sitter_context_predicate() };
|
||||
parser.set_language(language).unwrap();
|
||||
let source = source.as_bytes();
|
||||
let tree = parser.parse(source, None).unwrap();
|
||||
Self::from_node(tree.root_node(), source)
|
||||
}
|
||||
|
||||
fn from_node(node: Node, source: &[u8]) -> anyhow::Result<Self> {
|
||||
let parse_error = "error parsing context predicate";
|
||||
let kind = node.kind();
|
||||
|
||||
match kind {
|
||||
"source" => Self::from_node(node.child(0).ok_or_else(|| anyhow!(parse_error))?, source),
|
||||
"identifier" => Ok(Self::Identifier(node.utf8_text(source)?.into())),
|
||||
"not" => {
|
||||
let child = Self::from_node(
|
||||
node.child_by_field_name("expression")
|
||||
.ok_or_else(|| anyhow!(parse_error))?,
|
||||
source,
|
||||
)?;
|
||||
Ok(Self::Not(Box::new(child)))
|
||||
}
|
||||
"and" | "or" => {
|
||||
let left = Box::new(Self::from_node(
|
||||
node.child_by_field_name("left")
|
||||
.ok_or_else(|| anyhow!(parse_error))?,
|
||||
source,
|
||||
)?);
|
||||
let right = Box::new(Self::from_node(
|
||||
node.child_by_field_name("right")
|
||||
.ok_or_else(|| anyhow!(parse_error))?,
|
||||
source,
|
||||
)?);
|
||||
if kind == "and" {
|
||||
Ok(Self::And(left, right))
|
||||
} else {
|
||||
Ok(Self::Or(left, right))
|
||||
}
|
||||
}
|
||||
"equal" | "not_equal" => {
|
||||
let left = node
|
||||
.child_by_field_name("left")
|
||||
.ok_or_else(|| anyhow!(parse_error))?
|
||||
.utf8_text(source)?
|
||||
.into();
|
||||
let right = node
|
||||
.child_by_field_name("right")
|
||||
.ok_or_else(|| anyhow!(parse_error))?
|
||||
.utf8_text(source)?
|
||||
.into();
|
||||
if kind == "equal" {
|
||||
Ok(Self::Equal(left, right))
|
||||
} else {
|
||||
Ok(Self::NotEqual(left, right))
|
||||
}
|
||||
}
|
||||
"parenthesized" => Self::from_node(
|
||||
node.child_by_field_name("expression")
|
||||
.ok_or_else(|| anyhow!(parse_error))?,
|
||||
source,
|
||||
),
|
||||
_ => Err(anyhow!(parse_error)),
|
||||
pub fn parse(source: &str) -> Result<Self> {
|
||||
let source = Self::skip_whitespace(source);
|
||||
let (predicate, rest) = Self::parse_expr(source, 0)?;
|
||||
if let Some(next) = rest.chars().next() {
|
||||
Err(anyhow!("unexpected character {next:?}"))
|
||||
} else {
|
||||
Ok(predicate)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn eval(&self, context: &KeymapContext) -> bool {
|
||||
pub fn eval(&self, contexts: &[KeymapContext]) -> bool {
|
||||
let Some(context) = contexts.first() else { return false };
|
||||
match self {
|
||||
Self::Identifier(name) => context.set.contains(name.as_str()),
|
||||
Self::Equal(left, right) => context
|
||||
|
@ -115,9 +54,245 @@ impl KeymapContextPredicate {
|
|||
.get(left)
|
||||
.map(|value| value != right)
|
||||
.unwrap_or(true),
|
||||
Self::Not(pred) => !pred.eval(context),
|
||||
Self::And(left, right) => left.eval(context) && right.eval(context),
|
||||
Self::Or(left, right) => left.eval(context) || right.eval(context),
|
||||
Self::Not(pred) => !pred.eval(contexts),
|
||||
Self::Child(parent, child) => parent.eval(&contexts[1..]) && child.eval(contexts),
|
||||
Self::And(left, right) => left.eval(contexts) && right.eval(contexts),
|
||||
Self::Or(left, right) => left.eval(contexts) || right.eval(contexts),
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_expr(mut source: &str, min_precedence: u32) -> anyhow::Result<(Self, &str)> {
|
||||
type Op =
|
||||
fn(KeymapContextPredicate, KeymapContextPredicate) -> Result<KeymapContextPredicate>;
|
||||
|
||||
let (mut predicate, rest) = Self::parse_primary(source)?;
|
||||
source = rest;
|
||||
|
||||
'parse: loop {
|
||||
for (operator, precedence, constructor) in [
|
||||
(">", PRECEDENCE_CHILD, Self::new_child as Op),
|
||||
("&&", PRECEDENCE_AND, Self::new_and as Op),
|
||||
("||", PRECEDENCE_OR, Self::new_or as Op),
|
||||
("==", PRECEDENCE_EQ, Self::new_eq as Op),
|
||||
("!=", PRECEDENCE_EQ, Self::new_neq as Op),
|
||||
] {
|
||||
if source.starts_with(operator) && precedence >= min_precedence {
|
||||
source = Self::skip_whitespace(&source[operator.len()..]);
|
||||
let (right, rest) = Self::parse_expr(source, precedence + 1)?;
|
||||
predicate = constructor(predicate, right)?;
|
||||
source = rest;
|
||||
continue 'parse;
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
Ok((predicate, source))
|
||||
}
|
||||
|
||||
fn parse_primary(mut source: &str) -> anyhow::Result<(Self, &str)> {
|
||||
let next = source
|
||||
.chars()
|
||||
.next()
|
||||
.ok_or_else(|| anyhow!("unexpected eof"))?;
|
||||
match next {
|
||||
'(' => {
|
||||
source = Self::skip_whitespace(&source[1..]);
|
||||
let (predicate, rest) = Self::parse_expr(source, 0)?;
|
||||
if rest.starts_with(')') {
|
||||
source = Self::skip_whitespace(&rest[1..]);
|
||||
Ok((predicate, source))
|
||||
} else {
|
||||
Err(anyhow!("expected a ')'"))
|
||||
}
|
||||
}
|
||||
'!' => {
|
||||
let source = Self::skip_whitespace(&source[1..]);
|
||||
let (predicate, source) = Self::parse_expr(&source, PRECEDENCE_NOT)?;
|
||||
Ok((KeymapContextPredicate::Not(Box::new(predicate)), source))
|
||||
}
|
||||
_ if next.is_alphanumeric() || next == '_' => {
|
||||
let len = source
|
||||
.find(|c: char| !(c.is_alphanumeric() || c == '_'))
|
||||
.unwrap_or(source.len());
|
||||
let (identifier, rest) = source.split_at(len);
|
||||
source = Self::skip_whitespace(rest);
|
||||
Ok((
|
||||
KeymapContextPredicate::Identifier(identifier.into()),
|
||||
source,
|
||||
))
|
||||
}
|
||||
_ => Err(anyhow!("unexpected character {next:?}")),
|
||||
}
|
||||
}
|
||||
|
||||
fn skip_whitespace(source: &str) -> &str {
|
||||
let len = source
|
||||
.find(|c: char| !c.is_whitespace())
|
||||
.unwrap_or(source.len());
|
||||
&source[len..]
|
||||
}
|
||||
|
||||
fn new_or(self, other: Self) -> Result<Self> {
|
||||
Ok(Self::Or(Box::new(self), Box::new(other)))
|
||||
}
|
||||
|
||||
fn new_and(self, other: Self) -> Result<Self> {
|
||||
Ok(Self::And(Box::new(self), Box::new(other)))
|
||||
}
|
||||
|
||||
fn new_child(self, other: Self) -> Result<Self> {
|
||||
Ok(Self::Child(Box::new(self), Box::new(other)))
|
||||
}
|
||||
|
||||
fn new_eq(self, other: Self) -> Result<Self> {
|
||||
if let (Self::Identifier(left), Self::Identifier(right)) = (self, other) {
|
||||
Ok(Self::Equal(left, right))
|
||||
} else {
|
||||
Err(anyhow!("operands must be identifiers"))
|
||||
}
|
||||
}
|
||||
|
||||
fn new_neq(self, other: Self) -> Result<Self> {
|
||||
if let (Self::Identifier(left), Self::Identifier(right)) = (self, other) {
|
||||
Ok(Self::NotEqual(left, right))
|
||||
} else {
|
||||
Err(anyhow!("operands must be identifiers"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const PRECEDENCE_CHILD: u32 = 1;
|
||||
const PRECEDENCE_OR: u32 = 2;
|
||||
const PRECEDENCE_AND: u32 = 3;
|
||||
const PRECEDENCE_EQ: u32 = 4;
|
||||
const PRECEDENCE_NOT: u32 = 5;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::KeymapContextPredicate::{self, *};
|
||||
|
||||
#[test]
|
||||
fn test_parse_identifiers() {
|
||||
// Identifiers
|
||||
assert_eq!(
|
||||
KeymapContextPredicate::parse("abc12").unwrap(),
|
||||
Identifier("abc12".into())
|
||||
);
|
||||
assert_eq!(
|
||||
KeymapContextPredicate::parse("_1a").unwrap(),
|
||||
Identifier("_1a".into())
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_negations() {
|
||||
assert_eq!(
|
||||
KeymapContextPredicate::parse("!abc").unwrap(),
|
||||
Not(Box::new(Identifier("abc".into())))
|
||||
);
|
||||
assert_eq!(
|
||||
KeymapContextPredicate::parse(" ! ! abc").unwrap(),
|
||||
Not(Box::new(Not(Box::new(Identifier("abc".into())))))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_equality_operators() {
|
||||
assert_eq!(
|
||||
KeymapContextPredicate::parse("a == b").unwrap(),
|
||||
Equal("a".into(), "b".into())
|
||||
);
|
||||
assert_eq!(
|
||||
KeymapContextPredicate::parse("c!=d").unwrap(),
|
||||
NotEqual("c".into(), "d".into())
|
||||
);
|
||||
assert_eq!(
|
||||
KeymapContextPredicate::parse("c == !d")
|
||||
.unwrap_err()
|
||||
.to_string(),
|
||||
"operands must be identifiers"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_boolean_operators() {
|
||||
assert_eq!(
|
||||
KeymapContextPredicate::parse("a || b").unwrap(),
|
||||
Or(
|
||||
Box::new(Identifier("a".into())),
|
||||
Box::new(Identifier("b".into()))
|
||||
)
|
||||
);
|
||||
assert_eq!(
|
||||
KeymapContextPredicate::parse("a || !b && c").unwrap(),
|
||||
Or(
|
||||
Box::new(Identifier("a".into())),
|
||||
Box::new(And(
|
||||
Box::new(Not(Box::new(Identifier("b".into())))),
|
||||
Box::new(Identifier("c".into()))
|
||||
))
|
||||
)
|
||||
);
|
||||
assert_eq!(
|
||||
KeymapContextPredicate::parse("a && b || c&&d").unwrap(),
|
||||
Or(
|
||||
Box::new(And(
|
||||
Box::new(Identifier("a".into())),
|
||||
Box::new(Identifier("b".into()))
|
||||
)),
|
||||
Box::new(And(
|
||||
Box::new(Identifier("c".into())),
|
||||
Box::new(Identifier("d".into()))
|
||||
))
|
||||
)
|
||||
);
|
||||
assert_eq!(
|
||||
KeymapContextPredicate::parse("a == b && c || d == e && f").unwrap(),
|
||||
Or(
|
||||
Box::new(And(
|
||||
Box::new(Equal("a".into(), "b".into())),
|
||||
Box::new(Identifier("c".into()))
|
||||
)),
|
||||
Box::new(And(
|
||||
Box::new(Equal("d".into(), "e".into())),
|
||||
Box::new(Identifier("f".into()))
|
||||
))
|
||||
)
|
||||
);
|
||||
assert_eq!(
|
||||
KeymapContextPredicate::parse("a && b && c && d").unwrap(),
|
||||
And(
|
||||
Box::new(And(
|
||||
Box::new(And(
|
||||
Box::new(Identifier("a".into())),
|
||||
Box::new(Identifier("b".into()))
|
||||
)),
|
||||
Box::new(Identifier("c".into())),
|
||||
)),
|
||||
Box::new(Identifier("d".into()))
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_parenthesized_expressions() {
|
||||
assert_eq!(
|
||||
KeymapContextPredicate::parse("a && (b == c || d != e)").unwrap(),
|
||||
And(
|
||||
Box::new(Identifier("a".into())),
|
||||
Box::new(Or(
|
||||
Box::new(Equal("b".into(), "c".into())),
|
||||
Box::new(NotEqual("d".into(), "e".into())),
|
||||
)),
|
||||
),
|
||||
);
|
||||
assert_eq!(
|
||||
KeymapContextPredicate::parse(" ( a || b ) ").unwrap(),
|
||||
Or(
|
||||
Box::new(Identifier("a".into())),
|
||||
Box::new(Identifier("b".into())),
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -179,7 +179,7 @@ impl Default for Appearance {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
pub enum WindowKind {
|
||||
Normal,
|
||||
PopUp,
|
||||
|
|
|
@ -178,6 +178,21 @@ impl MouseMovedEvent {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, Default)]
|
||||
pub struct MouseExitedEvent {
|
||||
pub position: Vector2F,
|
||||
pub pressed_button: Option<MouseButton>,
|
||||
pub modifiers: Modifiers,
|
||||
}
|
||||
|
||||
impl Deref for MouseExitedEvent {
|
||||
type Target = Modifiers;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.modifiers
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum Event {
|
||||
KeyDown(KeyDownEvent),
|
||||
|
@ -186,6 +201,7 @@ pub enum Event {
|
|||
MouseDown(MouseButtonEvent),
|
||||
MouseUp(MouseButtonEvent),
|
||||
MouseMoved(MouseMovedEvent),
|
||||
MouseExited(MouseExitedEvent),
|
||||
ScrollWheel(ScrollWheelEvent),
|
||||
}
|
||||
|
||||
|
@ -197,6 +213,7 @@ impl Event {
|
|||
Event::ModifiersChanged { .. } => None,
|
||||
Event::MouseDown(event) | Event::MouseUp(event) => Some(event.position),
|
||||
Event::MouseMoved(event) => Some(event.position),
|
||||
Event::MouseExited(event) => Some(event.position),
|
||||
Event::ScrollWheel(event) => Some(event.position),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,7 +3,7 @@ use crate::{
|
|||
keymap_matcher::Keystroke,
|
||||
platform::{Event, NavigationDirection},
|
||||
KeyDownEvent, KeyUpEvent, Modifiers, ModifiersChangedEvent, MouseButton, MouseButtonEvent,
|
||||
MouseMovedEvent, ScrollDelta, ScrollWheelEvent, TouchPhase,
|
||||
MouseExitedEvent, MouseMovedEvent, ScrollDelta, ScrollWheelEvent, TouchPhase,
|
||||
};
|
||||
use cocoa::{
|
||||
appkit::{NSEvent, NSEventModifierFlags, NSEventPhase, NSEventType},
|
||||
|
@ -221,6 +221,16 @@ impl Event {
|
|||
modifiers: read_modifiers(native_event),
|
||||
})
|
||||
}),
|
||||
NSEventType::NSMouseExited => window_height.map(|window_height| {
|
||||
Self::MouseExited(MouseExitedEvent {
|
||||
position: vec2f(
|
||||
native_event.locationInWindow().x as f32,
|
||||
window_height - native_event.locationInWindow().y as f32,
|
||||
),
|
||||
pressed_button: None,
|
||||
modifiers: read_modifiers(native_event),
|
||||
})
|
||||
}),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -66,6 +66,8 @@ const NSNormalWindowLevel: NSInteger = 0;
|
|||
#[allow(non_upper_case_globals)]
|
||||
const NSPopUpWindowLevel: NSInteger = 101;
|
||||
#[allow(non_upper_case_globals)]
|
||||
const NSTrackingMouseEnteredAndExited: NSUInteger = 0x01;
|
||||
#[allow(non_upper_case_globals)]
|
||||
const NSTrackingMouseMoved: NSUInteger = 0x02;
|
||||
#[allow(non_upper_case_globals)]
|
||||
const NSTrackingActiveAlways: NSUInteger = 0x80;
|
||||
|
@ -170,6 +172,10 @@ unsafe fn build_classes() {
|
|||
sel!(mouseMoved:),
|
||||
handle_view_event as extern "C" fn(&Object, Sel, id),
|
||||
);
|
||||
decl.add_method(
|
||||
sel!(mouseExited:),
|
||||
handle_view_event as extern "C" fn(&Object, Sel, id),
|
||||
);
|
||||
decl.add_method(
|
||||
sel!(mouseDragged:),
|
||||
handle_view_event as extern "C" fn(&Object, Sel, id),
|
||||
|
@ -252,6 +258,11 @@ unsafe fn build_classes() {
|
|||
do_command_by_selector as extern "C" fn(&Object, Sel, Sel),
|
||||
);
|
||||
|
||||
decl.add_method(
|
||||
sel!(acceptsFirstMouse:),
|
||||
accepts_first_mouse as extern "C" fn(&Object, Sel, id) -> BOOL,
|
||||
);
|
||||
|
||||
decl.register()
|
||||
};
|
||||
}
|
||||
|
@ -317,6 +328,7 @@ enum ImeState {
|
|||
struct WindowState {
|
||||
id: usize,
|
||||
native_window: id,
|
||||
kind: WindowKind,
|
||||
event_callback: Option<Box<dyn FnMut(Event) -> bool>>,
|
||||
activate_callback: Option<Box<dyn FnMut(bool)>>,
|
||||
resize_callback: Option<Box<dyn FnMut()>>,
|
||||
|
@ -422,6 +434,7 @@ impl Window {
|
|||
let window = Self(Rc::new(RefCell::new(WindowState {
|
||||
id,
|
||||
native_window,
|
||||
kind: options.kind,
|
||||
event_callback: None,
|
||||
resize_callback: None,
|
||||
should_close_callback: None,
|
||||
|
@ -469,16 +482,6 @@ impl Window {
|
|||
native_window.setTitlebarAppearsTransparent_(YES);
|
||||
}
|
||||
|
||||
let tracking_area: id = msg_send![class!(NSTrackingArea), alloc];
|
||||
let _: () = msg_send![
|
||||
tracking_area,
|
||||
initWithRect: NSRect::new(NSPoint::new(0., 0.), NSSize::new(0., 0.))
|
||||
options: NSTrackingMouseMoved | NSTrackingActiveAlways | NSTrackingInVisibleRect
|
||||
owner: native_view
|
||||
userInfo: nil
|
||||
];
|
||||
let _: () = msg_send![native_view, addTrackingArea: tracking_area.autorelease()];
|
||||
|
||||
native_view.setAutoresizingMask_(NSViewWidthSizable | NSViewHeightSizable);
|
||||
native_view.setWantsBestResolutionOpenGLSurface_(YES);
|
||||
|
||||
|
@ -501,8 +504,25 @@ impl Window {
|
|||
}
|
||||
|
||||
match options.kind {
|
||||
WindowKind::Normal => native_window.setLevel_(NSNormalWindowLevel),
|
||||
WindowKind::Normal => {
|
||||
native_window.setLevel_(NSNormalWindowLevel);
|
||||
native_window.setAcceptsMouseMovedEvents_(YES);
|
||||
}
|
||||
WindowKind::PopUp => {
|
||||
// Use a tracking area to allow receiving MouseMoved events even when
|
||||
// the window or application aren't active, which is often the case
|
||||
// e.g. for notification windows.
|
||||
let tracking_area: id = msg_send![class!(NSTrackingArea), alloc];
|
||||
let _: () = msg_send![
|
||||
tracking_area,
|
||||
initWithRect: NSRect::new(NSPoint::new(0., 0.), NSSize::new(0., 0.))
|
||||
options: NSTrackingMouseEnteredAndExited | NSTrackingMouseMoved | NSTrackingActiveAlways | NSTrackingInVisibleRect
|
||||
owner: native_view
|
||||
userInfo: nil
|
||||
];
|
||||
let _: () =
|
||||
msg_send![native_view, addTrackingArea: tracking_area.autorelease()];
|
||||
|
||||
native_window.setLevel_(NSPopUpWindowLevel);
|
||||
let _: () = msg_send![
|
||||
native_window,
|
||||
|
@ -873,11 +893,10 @@ extern "C" fn handle_key_down(this: &Object, _: Sel, native_event: id) {
|
|||
|
||||
extern "C" fn handle_key_event(this: &Object, native_event: id, key_equivalent: bool) -> BOOL {
|
||||
let window_state = unsafe { get_window_state(this) };
|
||||
|
||||
let mut window_state_borrow = window_state.as_ref().borrow_mut();
|
||||
|
||||
let event =
|
||||
unsafe { Event::from_native(native_event, Some(window_state_borrow.content_size().y())) };
|
||||
let window_height = window_state_borrow.content_size().y();
|
||||
let event = unsafe { Event::from_native(native_event, Some(window_height)) };
|
||||
|
||||
if let Some(event) = event {
|
||||
if key_equivalent {
|
||||
|
@ -902,6 +921,7 @@ extern "C" fn handle_key_event(this: &Object, native_event: id, key_equivalent:
|
|||
function_is_held = event.keystroke.function;
|
||||
Some((event, None))
|
||||
}
|
||||
|
||||
_ => return NO,
|
||||
};
|
||||
|
||||
|
@ -968,9 +988,10 @@ extern "C" fn handle_view_event(this: &Object, _: Sel, native_event: id) {
|
|||
let window_state = unsafe { get_window_state(this) };
|
||||
let weak_window_state = Rc::downgrade(&window_state);
|
||||
let mut window_state_borrow = window_state.as_ref().borrow_mut();
|
||||
let is_active = unsafe { window_state_borrow.native_window.isKeyWindow() == YES };
|
||||
|
||||
let event =
|
||||
unsafe { Event::from_native(native_event, Some(window_state_borrow.content_size().y())) };
|
||||
let window_height = window_state_borrow.content_size().y();
|
||||
let event = unsafe { Event::from_native(native_event, Some(window_height)) };
|
||||
if let Some(event) = event {
|
||||
match &event {
|
||||
Event::MouseMoved(
|
||||
|
@ -989,12 +1010,20 @@ extern "C" fn handle_view_event(this: &Object, _: Sel, native_event: id) {
|
|||
))
|
||||
.detach();
|
||||
}
|
||||
|
||||
Event::MouseMoved(_)
|
||||
if !(is_active || window_state_borrow.kind == WindowKind::PopUp) =>
|
||||
{
|
||||
return
|
||||
}
|
||||
|
||||
Event::MouseUp(MouseButtonEvent {
|
||||
button: MouseButton::Left,
|
||||
..
|
||||
}) => {
|
||||
window_state_borrow.synthetic_drag_counter += 1;
|
||||
}
|
||||
|
||||
Event::ModifiersChanged(ModifiersChangedEvent { modifiers }) => {
|
||||
// Only raise modifiers changed event when they have actually changed
|
||||
if let Some(Event::ModifiersChanged(ModifiersChangedEvent {
|
||||
|
@ -1008,6 +1037,7 @@ extern "C" fn handle_view_event(this: &Object, _: Sel, native_event: id) {
|
|||
|
||||
window_state_borrow.previous_modifiers_changed_event = Some(event.clone());
|
||||
}
|
||||
|
||||
_ => {}
|
||||
}
|
||||
|
||||
|
@ -1404,6 +1434,18 @@ extern "C" fn view_did_change_effective_appearance(this: &Object, _: Sel) {
|
|||
}
|
||||
}
|
||||
|
||||
extern "C" fn accepts_first_mouse(this: &Object, _: Sel, _: id) -> BOOL {
|
||||
unsafe {
|
||||
let state = get_window_state(this);
|
||||
let state_borrow = state.as_ref().borrow();
|
||||
return if state_borrow.kind == WindowKind::PopUp {
|
||||
YES
|
||||
} else {
|
||||
NO
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
async fn synthetic_drag(
|
||||
window_state: Weak<RefCell<WindowState>>,
|
||||
drag_id: usize,
|
||||
|
|
|
@ -8,7 +8,7 @@ use crate::{
|
|||
platform::{CursorStyle, Event},
|
||||
scene::{
|
||||
CursorRegion, MouseClick, MouseDown, MouseDownOut, MouseDrag, MouseEvent, MouseHover,
|
||||
MouseMove, MouseScrollWheel, MouseUp, MouseUpOut, Scene,
|
||||
MouseMove, MouseMoveOut, MouseScrollWheel, MouseUp, MouseUpOut, Scene,
|
||||
},
|
||||
text_layout::TextLayoutCache,
|
||||
Action, AnyModelHandle, AnyViewHandle, AnyWeakModelHandle, AnyWeakViewHandle, Appearance,
|
||||
|
@ -156,6 +156,7 @@ impl Presenter {
|
|||
self.cursor_regions = scene.cursor_regions();
|
||||
self.mouse_regions = scene.mouse_regions();
|
||||
|
||||
// window.is_topmost for the mouse moved event's postion?
|
||||
if cx.window_is_active(self.window_id) {
|
||||
if let Some(event) = self.last_mouse_moved_event.clone() {
|
||||
self.dispatch_event(event, true, cx);
|
||||
|
@ -245,8 +246,11 @@ impl Presenter {
|
|||
// -> Also updates mouse-related state
|
||||
match &event {
|
||||
Event::KeyDown(e) => return cx.dispatch_key_down(self.window_id, e),
|
||||
|
||||
Event::KeyUp(e) => return cx.dispatch_key_up(self.window_id, e),
|
||||
|
||||
Event::ModifiersChanged(e) => return cx.dispatch_modifiers_changed(self.window_id, e),
|
||||
|
||||
Event::MouseDown(e) => {
|
||||
// Click events are weird because they can be fired after a drag event.
|
||||
// MDN says that browsers handle this by starting from 'the most
|
||||
|
@ -279,6 +283,7 @@ impl Presenter {
|
|||
platform_event: e.clone(),
|
||||
}));
|
||||
}
|
||||
|
||||
Event::MouseUp(e) => {
|
||||
// NOTE: The order of event pushes is important! MouseUp events MUST be fired
|
||||
// before click events, and so the MouseUp events need to be pushed before
|
||||
|
@ -296,6 +301,7 @@ impl Presenter {
|
|||
platform_event: e.clone(),
|
||||
}));
|
||||
}
|
||||
|
||||
Event::MouseMoved(
|
||||
e @ MouseMovedEvent {
|
||||
position,
|
||||
|
@ -347,9 +353,28 @@ impl Presenter {
|
|||
platform_event: e.clone(),
|
||||
started: false,
|
||||
}));
|
||||
mouse_events.push(MouseEvent::MoveOut(MouseMoveOut {
|
||||
region: Default::default(),
|
||||
}));
|
||||
|
||||
self.last_mouse_moved_event = Some(event.clone());
|
||||
}
|
||||
|
||||
Event::MouseExited(event) => {
|
||||
// When the platform sends a MouseExited event, synthesize
|
||||
// a MouseMoved event whose position is outside the window's
|
||||
// bounds so that hover and cursor state can be updated.
|
||||
return self.dispatch_event(
|
||||
Event::MouseMoved(MouseMovedEvent {
|
||||
position: event.position,
|
||||
pressed_button: event.pressed_button,
|
||||
modifiers: event.modifiers,
|
||||
}),
|
||||
event_reused,
|
||||
cx,
|
||||
);
|
||||
}
|
||||
|
||||
Event::ScrollWheel(e) => mouse_events.push(MouseEvent::ScrollWheel(MouseScrollWheel {
|
||||
region: Default::default(),
|
||||
platform_event: e.clone(),
|
||||
|
@ -407,6 +432,7 @@ impl Presenter {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
MouseEvent::Down(_) | MouseEvent::Up(_) => {
|
||||
for (region, _) in self.mouse_regions.iter().rev() {
|
||||
if region.bounds.contains_point(self.mouse_position) {
|
||||
|
@ -417,6 +443,7 @@ impl Presenter {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
MouseEvent::Click(e) => {
|
||||
// Only raise click events if the released button is the same as the one stored
|
||||
if self
|
||||
|
@ -439,6 +466,7 @@ impl Presenter {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
MouseEvent::Drag(_) => {
|
||||
for (mouse_region, _) in self.mouse_regions.iter().rev() {
|
||||
if self.clicked_region_ids.contains(&mouse_region.id()) {
|
||||
|
@ -447,7 +475,7 @@ impl Presenter {
|
|||
}
|
||||
}
|
||||
|
||||
MouseEvent::UpOut(_) | MouseEvent::DownOut(_) => {
|
||||
MouseEvent::MoveOut(_) | MouseEvent::UpOut(_) | MouseEvent::DownOut(_) => {
|
||||
for (mouse_region, _) in self.mouse_regions.iter().rev() {
|
||||
// NOT contains
|
||||
if !mouse_region.bounds.contains_point(self.mouse_position) {
|
||||
|
@ -455,6 +483,7 @@ impl Presenter {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
_ => {
|
||||
for (mouse_region, _) in self.mouse_regions.iter().rev() {
|
||||
// Contains
|
||||
|
@ -573,7 +602,7 @@ pub struct LayoutContext<'a> {
|
|||
|
||||
impl<'a> LayoutContext<'a> {
|
||||
pub(crate) fn keystrokes_for_action(
|
||||
&self,
|
||||
&mut self,
|
||||
action: &dyn Action,
|
||||
) -> Option<SmallVec<[Keystroke; 2]>> {
|
||||
self.app
|
||||
|
|
|
@ -21,6 +21,11 @@ impl Deref for MouseMove {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Clone)]
|
||||
pub struct MouseMoveOut {
|
||||
pub region: RectF,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Clone)]
|
||||
pub struct MouseDrag {
|
||||
pub region: RectF,
|
||||
|
@ -138,6 +143,7 @@ impl Deref for MouseScrollWheel {
|
|||
#[derive(Debug, Clone)]
|
||||
pub enum MouseEvent {
|
||||
Move(MouseMove),
|
||||
MoveOut(MouseMoveOut),
|
||||
Drag(MouseDrag),
|
||||
Hover(MouseHover),
|
||||
Down(MouseDown),
|
||||
|
@ -152,6 +158,7 @@ impl MouseEvent {
|
|||
pub fn set_region(&mut self, region: RectF) {
|
||||
match self {
|
||||
MouseEvent::Move(r) => r.region = region,
|
||||
MouseEvent::MoveOut(r) => r.region = region,
|
||||
MouseEvent::Drag(r) => r.region = region,
|
||||
MouseEvent::Hover(r) => r.region = region,
|
||||
MouseEvent::Down(r) => r.region = region,
|
||||
|
@ -168,6 +175,7 @@ impl MouseEvent {
|
|||
pub fn is_capturable(&self) -> bool {
|
||||
match self {
|
||||
MouseEvent::Move(_) => true,
|
||||
MouseEvent::MoveOut(_) => false,
|
||||
MouseEvent::Drag(_) => true,
|
||||
MouseEvent::Hover(_) => false,
|
||||
MouseEvent::Down(_) => true,
|
||||
|
@ -185,6 +193,10 @@ impl MouseEvent {
|
|||
discriminant(&MouseEvent::Move(Default::default()))
|
||||
}
|
||||
|
||||
pub fn move_out_disc() -> Discriminant<MouseEvent> {
|
||||
discriminant(&MouseEvent::MoveOut(Default::default()))
|
||||
}
|
||||
|
||||
pub fn drag_disc() -> Discriminant<MouseEvent> {
|
||||
discriminant(&MouseEvent::Drag(Default::default()))
|
||||
}
|
||||
|
@ -220,6 +232,7 @@ impl MouseEvent {
|
|||
pub fn handler_key(&self) -> HandlerKey {
|
||||
match self {
|
||||
MouseEvent::Move(_) => HandlerKey::new(Self::move_disc(), None),
|
||||
MouseEvent::MoveOut(_) => HandlerKey::new(Self::move_out_disc(), None),
|
||||
MouseEvent::Drag(e) => HandlerKey::new(Self::drag_disc(), e.pressed_button),
|
||||
MouseEvent::Hover(_) => HandlerKey::new(Self::hover_disc(), None),
|
||||
MouseEvent::Down(e) => HandlerKey::new(Self::down_disc(), Some(e.button)),
|
||||
|
|
|
@ -12,7 +12,7 @@ use super::{
|
|||
MouseClick, MouseDown, MouseDownOut, MouseDrag, MouseEvent, MouseHover, MouseMove, MouseUp,
|
||||
MouseUpOut,
|
||||
},
|
||||
MouseScrollWheel,
|
||||
MouseMoveOut, MouseScrollWheel,
|
||||
};
|
||||
|
||||
#[derive(Clone)]
|
||||
|
@ -124,6 +124,14 @@ impl MouseRegion {
|
|||
self
|
||||
}
|
||||
|
||||
pub fn on_move_out(
|
||||
mut self,
|
||||
handler: impl Fn(MouseMoveOut, &mut EventContext) + 'static,
|
||||
) -> Self {
|
||||
self.handlers = self.handlers.on_move_out(handler);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn on_scroll(
|
||||
mut self,
|
||||
handler: impl Fn(MouseScrollWheel, &mut EventContext) + 'static,
|
||||
|
@ -289,6 +297,23 @@ impl HandlerSet {
|
|||
self
|
||||
}
|
||||
|
||||
pub fn on_move_out(
|
||||
mut self,
|
||||
handler: impl Fn(MouseMoveOut, &mut EventContext) + 'static,
|
||||
) -> Self {
|
||||
self.insert(MouseEvent::move_out_disc(), None,
|
||||
Rc::new(move |region_event, cx| {
|
||||
if let MouseEvent::MoveOut(e) = region_event {
|
||||
handler(e, cx);
|
||||
} else {
|
||||
panic!(
|
||||
"Mouse Region Event incorrectly called with mismatched event type. Expected MouseRegionEvent::MoveOut, found {:?}",
|
||||
region_event);
|
||||
}
|
||||
}));
|
||||
self
|
||||
}
|
||||
|
||||
pub fn on_down(
|
||||
mut self,
|
||||
button: MouseButton,
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
name = "gpui_macros"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/gpui_macros.rs"
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
name = "journal"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/journal.rs"
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
name = "language"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/language.rs"
|
||||
|
|
|
@ -9,7 +9,7 @@ use crate::{
|
|||
syntax_map::{
|
||||
SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxSnapshot, ToTreeSitterPoint,
|
||||
},
|
||||
CodeLabel, Outline,
|
||||
CodeLabel, LanguageScope, Outline,
|
||||
};
|
||||
use anyhow::{anyhow, Result};
|
||||
use clock::ReplicaId;
|
||||
|
@ -60,7 +60,7 @@ pub struct Buffer {
|
|||
git_diff_status: GitDiffStatus,
|
||||
file: Option<Arc<dyn File>>,
|
||||
saved_version: clock::Global,
|
||||
saved_version_fingerprint: String,
|
||||
saved_version_fingerprint: RopeFingerprint,
|
||||
saved_mtime: SystemTime,
|
||||
transaction_depth: usize,
|
||||
was_dirty_before_starting_transaction: Option<bool>,
|
||||
|
@ -221,7 +221,7 @@ pub trait File: Send + Sync {
|
|||
version: clock::Global,
|
||||
line_ending: LineEnding,
|
||||
cx: &mut MutableAppContext,
|
||||
) -> Task<Result<(clock::Global, String, SystemTime)>>;
|
||||
) -> Task<Result<(clock::Global, RopeFingerprint, SystemTime)>>;
|
||||
|
||||
fn as_any(&self) -> &dyn Any;
|
||||
|
||||
|
@ -238,7 +238,7 @@ pub trait LocalFile: File {
|
|||
&self,
|
||||
buffer_id: u64,
|
||||
version: &clock::Global,
|
||||
fingerprint: String,
|
||||
fingerprint: RopeFingerprint,
|
||||
line_ending: LineEnding,
|
||||
mtime: SystemTime,
|
||||
cx: &mut MutableAppContext,
|
||||
|
@ -282,6 +282,7 @@ struct AutoindentRequestEntry {
|
|||
struct IndentSuggestion {
|
||||
basis_row: u32,
|
||||
delta: Ordering,
|
||||
within_error: bool,
|
||||
}
|
||||
|
||||
struct BufferChunkHighlights<'a> {
|
||||
|
@ -385,6 +386,13 @@ impl Buffer {
|
|||
rpc::proto::LineEnding::from_i32(message.line_ending)
|
||||
.ok_or_else(|| anyhow!("missing line_ending"))?,
|
||||
));
|
||||
this.saved_version = proto::deserialize_version(message.saved_version);
|
||||
this.saved_version_fingerprint =
|
||||
proto::deserialize_fingerprint(&message.saved_version_fingerprint)?;
|
||||
this.saved_mtime = message
|
||||
.saved_mtime
|
||||
.ok_or_else(|| anyhow!("invalid saved_mtime"))?
|
||||
.into();
|
||||
Ok(this)
|
||||
}
|
||||
|
||||
|
@ -395,6 +403,9 @@ impl Buffer {
|
|||
base_text: self.base_text().to_string(),
|
||||
diff_base: self.diff_base.as_ref().map(|h| h.to_string()),
|
||||
line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
|
||||
saved_version: proto::serialize_version(&self.saved_version),
|
||||
saved_version_fingerprint: proto::serialize_fingerprint(self.saved_version_fingerprint),
|
||||
saved_mtime: Some(self.saved_mtime.into()),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -521,7 +532,7 @@ impl Buffer {
|
|||
pub fn save(
|
||||
&mut self,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<(clock::Global, String, SystemTime)>> {
|
||||
) -> Task<Result<(clock::Global, RopeFingerprint, SystemTime)>> {
|
||||
let file = if let Some(file) = self.file.as_ref() {
|
||||
file
|
||||
} else {
|
||||
|
@ -539,7 +550,7 @@ impl Buffer {
|
|||
cx.spawn(|this, mut cx| async move {
|
||||
let (version, fingerprint, mtime) = save.await?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.did_save(version.clone(), fingerprint.clone(), mtime, None, cx);
|
||||
this.did_save(version.clone(), fingerprint, mtime, None, cx);
|
||||
});
|
||||
Ok((version, fingerprint, mtime))
|
||||
})
|
||||
|
@ -549,6 +560,14 @@ impl Buffer {
|
|||
&self.saved_version
|
||||
}
|
||||
|
||||
pub fn saved_version_fingerprint(&self) -> RopeFingerprint {
|
||||
self.saved_version_fingerprint
|
||||
}
|
||||
|
||||
pub fn saved_mtime(&self) -> SystemTime {
|
||||
self.saved_mtime
|
||||
}
|
||||
|
||||
pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut ModelContext<Self>) {
|
||||
self.syntax_map.lock().clear();
|
||||
self.language = language;
|
||||
|
@ -564,7 +583,7 @@ impl Buffer {
|
|||
pub fn did_save(
|
||||
&mut self,
|
||||
version: clock::Global,
|
||||
fingerprint: String,
|
||||
fingerprint: RopeFingerprint,
|
||||
mtime: SystemTime,
|
||||
new_file: Option<Arc<dyn File>>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
|
@ -613,7 +632,7 @@ impl Buffer {
|
|||
pub fn did_reload(
|
||||
&mut self,
|
||||
version: clock::Global,
|
||||
fingerprint: String,
|
||||
fingerprint: RopeFingerprint,
|
||||
line_ending: LineEnding,
|
||||
mtime: SystemTime,
|
||||
cx: &mut ModelContext<Self>,
|
||||
|
@ -626,7 +645,7 @@ impl Buffer {
|
|||
file.buffer_reloaded(
|
||||
self.remote_id(),
|
||||
&self.saved_version,
|
||||
self.saved_version_fingerprint.clone(),
|
||||
self.saved_version_fingerprint,
|
||||
self.line_ending(),
|
||||
self.saved_mtime,
|
||||
cx,
|
||||
|
@ -783,6 +802,29 @@ impl Buffer {
|
|||
self.sync_parse_timeout = timeout;
|
||||
}
|
||||
|
||||
/// Called after an edit to synchronize the buffer's main parse tree with
|
||||
/// the buffer's new underlying state.
|
||||
///
|
||||
/// Locks the syntax map and interpolates the edits since the last reparse
|
||||
/// into the foreground syntax tree.
|
||||
///
|
||||
/// Then takes a stable snapshot of the syntax map before unlocking it.
|
||||
/// The snapshot with the interpolated edits is sent to a background thread,
|
||||
/// where we ask Tree-sitter to perform an incremental parse.
|
||||
///
|
||||
/// Meanwhile, in the foreground, we block the main thread for up to 1ms
|
||||
/// waiting on the parse to complete. As soon as it completes, we proceed
|
||||
/// synchronously, unless a 1ms timeout elapses.
|
||||
///
|
||||
/// If we time out waiting on the parse, we spawn a second task waiting
|
||||
/// until the parse does complete and return with the interpolated tree still
|
||||
/// in the foreground. When the background parse completes, call back into
|
||||
/// the main thread and assign the foreground parse state.
|
||||
///
|
||||
/// If the buffer or grammar changed since the start of the background parse,
|
||||
/// initiate an additional reparse recursively. To avoid concurrent parses
|
||||
/// for the same buffer, we only initiate a new parse if we are not already
|
||||
/// parsing in the background.
|
||||
fn reparse(&mut self, cx: &mut ModelContext<Self>) {
|
||||
if self.parsing_in_background {
|
||||
return;
|
||||
|
@ -919,7 +961,7 @@ impl Buffer {
|
|||
// Build a map containing the suggested indentation for each of the edited lines
|
||||
// with respect to the state of the buffer before these edits. This map is keyed
|
||||
// by the rows for these lines in the current state of the buffer.
|
||||
let mut old_suggestions = BTreeMap::<u32, IndentSize>::default();
|
||||
let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
|
||||
let old_edited_ranges =
|
||||
contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
|
||||
let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
|
||||
|
@ -945,14 +987,17 @@ impl Buffer {
|
|||
|
||||
let suggested_indent = old_to_new_rows
|
||||
.get(&suggestion.basis_row)
|
||||
.and_then(|from_row| old_suggestions.get(from_row).copied())
|
||||
.and_then(|from_row| {
|
||||
Some(old_suggestions.get(from_row).copied()?.0)
|
||||
})
|
||||
.unwrap_or_else(|| {
|
||||
request
|
||||
.before_edit
|
||||
.indent_size_for_line(suggestion.basis_row)
|
||||
})
|
||||
.with_delta(suggestion.delta, language_indent_size);
|
||||
old_suggestions.insert(new_row, suggested_indent);
|
||||
old_suggestions
|
||||
.insert(new_row, (suggested_indent, suggestion.within_error));
|
||||
}
|
||||
}
|
||||
yield_now().await;
|
||||
|
@ -998,12 +1043,13 @@ impl Buffer {
|
|||
snapshot.indent_size_for_line(suggestion.basis_row)
|
||||
})
|
||||
.with_delta(suggestion.delta, language_indent_size);
|
||||
if old_suggestions
|
||||
.get(&new_row)
|
||||
.map_or(true, |old_indentation| {
|
||||
if old_suggestions.get(&new_row).map_or(
|
||||
true,
|
||||
|(old_indentation, was_within_error)| {
|
||||
suggested_indent != *old_indentation
|
||||
})
|
||||
{
|
||||
&& (!suggestion.within_error || *was_within_error)
|
||||
},
|
||||
) {
|
||||
indent_sizes.insert(new_row, suggested_indent);
|
||||
}
|
||||
}
|
||||
|
@ -1332,13 +1378,6 @@ impl Buffer {
|
|||
let edit_id = edit_operation.local_timestamp();
|
||||
|
||||
if let Some((before_edit, mode)) = autoindent_request {
|
||||
let (start_columns, is_block_mode) = match mode {
|
||||
AutoindentMode::Block {
|
||||
original_indent_columns: start_columns,
|
||||
} => (start_columns, true),
|
||||
AutoindentMode::EachLine => (Default::default(), false),
|
||||
};
|
||||
|
||||
let mut delta = 0isize;
|
||||
let entries = edits
|
||||
.into_iter()
|
||||
|
@ -1352,7 +1391,7 @@ impl Buffer {
|
|||
|
||||
let mut range_of_insertion_to_indent = 0..new_text_len;
|
||||
let mut first_line_is_new = false;
|
||||
let mut start_column = None;
|
||||
let mut original_indent_column = None;
|
||||
|
||||
// When inserting an entire line at the beginning of an existing line,
|
||||
// treat the insertion as new.
|
||||
|
@ -1364,14 +1403,23 @@ impl Buffer {
|
|||
|
||||
// When inserting text starting with a newline, avoid auto-indenting the
|
||||
// previous line.
|
||||
if new_text[range_of_insertion_to_indent.clone()].starts_with('\n') {
|
||||
if new_text.starts_with('\n') {
|
||||
range_of_insertion_to_indent.start += 1;
|
||||
first_line_is_new = true;
|
||||
}
|
||||
|
||||
// Avoid auto-indenting after the insertion.
|
||||
if is_block_mode {
|
||||
start_column = start_columns.get(ix).copied();
|
||||
if let AutoindentMode::Block {
|
||||
original_indent_columns,
|
||||
} = &mode
|
||||
{
|
||||
original_indent_column =
|
||||
Some(original_indent_columns.get(ix).copied().unwrap_or_else(|| {
|
||||
indent_size_for_text(
|
||||
new_text[range_of_insertion_to_indent.clone()].chars(),
|
||||
)
|
||||
.len
|
||||
}));
|
||||
if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
|
||||
range_of_insertion_to_indent.end -= 1;
|
||||
}
|
||||
|
@ -1379,7 +1427,7 @@ impl Buffer {
|
|||
|
||||
AutoindentRequestEntry {
|
||||
first_line_is_new,
|
||||
original_indent_column: start_column,
|
||||
original_indent_column,
|
||||
indent_size: before_edit.language_indent_size_at(range.start, cx),
|
||||
range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
|
||||
..self.anchor_after(new_start + range_of_insertion_to_indent.end),
|
||||
|
@ -1390,7 +1438,7 @@ impl Buffer {
|
|||
self.autoindent_requests.push(Arc::new(AutoindentRequest {
|
||||
before_edit,
|
||||
entries,
|
||||
is_block_mode,
|
||||
is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
|
||||
}));
|
||||
}
|
||||
|
||||
|
@ -1641,6 +1689,16 @@ impl Buffer {
|
|||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
impl Buffer {
|
||||
pub fn edit_via_marked_text(
|
||||
&mut self,
|
||||
marked_string: &str,
|
||||
autoindent_mode: Option<AutoindentMode>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) {
|
||||
let edits = self.edits_for_marked_text(marked_string);
|
||||
self.edit(edits, autoindent_mode, cx);
|
||||
}
|
||||
|
||||
pub fn set_group_interval(&mut self, group_interval: Duration) {
|
||||
self.text.set_group_interval(group_interval);
|
||||
}
|
||||
|
@ -1759,7 +1817,7 @@ impl BufferSnapshot {
|
|||
let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
|
||||
let end = Point::new(row_range.end, 0);
|
||||
let range = (start..end).to_offset(&self.text);
|
||||
let mut matches = self.syntax.matches(range, &self.text, |grammar| {
|
||||
let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
|
||||
Some(&grammar.indents_config.as_ref()?.query)
|
||||
});
|
||||
let indent_configs = matches
|
||||
|
@ -1805,6 +1863,30 @@ impl BufferSnapshot {
|
|||
}
|
||||
}
|
||||
|
||||
let mut error_ranges = Vec::<Range<Point>>::new();
|
||||
let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
|
||||
Some(&grammar.error_query)
|
||||
});
|
||||
while let Some(mat) = matches.peek() {
|
||||
let node = mat.captures[0].node;
|
||||
let start = Point::from_ts_point(node.start_position());
|
||||
let end = Point::from_ts_point(node.end_position());
|
||||
let range = start..end;
|
||||
let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
|
||||
Ok(ix) | Err(ix) => ix,
|
||||
};
|
||||
let mut end_ix = ix;
|
||||
while let Some(existing_range) = error_ranges.get(end_ix) {
|
||||
if existing_range.end < end {
|
||||
end_ix += 1;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
error_ranges.splice(ix..end_ix, [range]);
|
||||
matches.advance();
|
||||
}
|
||||
|
||||
outdent_positions.sort();
|
||||
for outdent_position in outdent_positions {
|
||||
// find the innermost indent range containing this outdent_position
|
||||
|
@ -1882,33 +1964,42 @@ impl BufferSnapshot {
|
|||
}
|
||||
}
|
||||
|
||||
let within_error = error_ranges
|
||||
.iter()
|
||||
.any(|e| e.start.row < row && e.end > row_start);
|
||||
|
||||
let suggestion = if outdent_to_row == prev_row
|
||||
|| (outdent_from_prev_row && indent_from_prev_row)
|
||||
{
|
||||
Some(IndentSuggestion {
|
||||
basis_row: prev_row,
|
||||
delta: Ordering::Equal,
|
||||
within_error,
|
||||
})
|
||||
} else if indent_from_prev_row {
|
||||
Some(IndentSuggestion {
|
||||
basis_row: prev_row,
|
||||
delta: Ordering::Greater,
|
||||
within_error,
|
||||
})
|
||||
} else if outdent_to_row < prev_row {
|
||||
Some(IndentSuggestion {
|
||||
basis_row: outdent_to_row,
|
||||
delta: Ordering::Equal,
|
||||
within_error,
|
||||
})
|
||||
} else if outdent_from_prev_row {
|
||||
Some(IndentSuggestion {
|
||||
basis_row: prev_row,
|
||||
delta: Ordering::Less,
|
||||
within_error,
|
||||
})
|
||||
} else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
|
||||
{
|
||||
Some(IndentSuggestion {
|
||||
basis_row: prev_row,
|
||||
delta: Ordering::Equal,
|
||||
within_error,
|
||||
})
|
||||
} else {
|
||||
None
|
||||
|
@ -1995,6 +2086,27 @@ impl BufferSnapshot {
|
|||
.or(self.language.as_ref())
|
||||
}
|
||||
|
||||
pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
|
||||
let offset = position.to_offset(self);
|
||||
|
||||
if let Some(layer_info) = self
|
||||
.syntax
|
||||
.layers_for_range(offset..offset, &self.text)
|
||||
.filter(|l| l.node.end_byte() > offset)
|
||||
.last()
|
||||
{
|
||||
Some(LanguageScope {
|
||||
language: layer_info.language.clone(),
|
||||
override_id: layer_info.override_id(offset, &self.text),
|
||||
})
|
||||
} else {
|
||||
self.language.clone().map(|language| LanguageScope {
|
||||
language,
|
||||
override_id: None,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
|
||||
let mut start = start.to_offset(self);
|
||||
let mut end = start;
|
||||
|
@ -2149,8 +2261,6 @@ impl BufferSnapshot {
|
|||
continue;
|
||||
}
|
||||
|
||||
// TODO - move later, after processing captures
|
||||
|
||||
let mut text = String::new();
|
||||
let mut name_ranges = Vec::new();
|
||||
let mut highlight_ranges = Vec::new();
|
||||
|
@ -2164,7 +2274,13 @@ impl BufferSnapshot {
|
|||
continue;
|
||||
}
|
||||
|
||||
let range = capture.node.start_byte()..capture.node.end_byte();
|
||||
let mut range = capture.node.start_byte()..capture.node.end_byte();
|
||||
let start = capture.node.start_position();
|
||||
if capture.node.end_position().row > start.row {
|
||||
range.end =
|
||||
range.start + self.line_len(start.row as u32) as usize - start.column;
|
||||
}
|
||||
|
||||
if !text.is_empty() {
|
||||
text.push(' ');
|
||||
}
|
||||
|
@ -2397,7 +2513,7 @@ impl BufferSnapshot {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
|
||||
fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
|
||||
indent_size_for_text(text.chars_at(Point::new(row, 0)))
|
||||
}
|
||||
|
||||
|
|
|
@ -455,6 +455,32 @@ async fn test_outline(cx: &mut gpui::TestAppContext) {
|
|||
}
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_outline_nodes_with_newlines(cx: &mut gpui::TestAppContext) {
|
||||
let text = r#"
|
||||
impl A for B<
|
||||
C
|
||||
> {
|
||||
};
|
||||
"#
|
||||
.unindent();
|
||||
|
||||
let buffer =
|
||||
cx.add_model(|cx| Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx));
|
||||
let outline = buffer
|
||||
.read_with(cx, |buffer, _| buffer.snapshot().outline(None))
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
outline
|
||||
.items
|
||||
.iter()
|
||||
.map(|item| (item.text.as_str(), item.depth))
|
||||
.collect::<Vec<_>>(),
|
||||
&[("impl A for B<", 0)]
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_symbols_containing(cx: &mut gpui::TestAppContext) {
|
||||
let text = r#"
|
||||
|
@ -774,23 +800,29 @@ fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut Muta
|
|||
cx.set_global(settings);
|
||||
|
||||
cx.add_model(|cx| {
|
||||
let text = "
|
||||
let mut buffer = Buffer::new(
|
||||
0,
|
||||
"
|
||||
fn a() {
|
||||
c;
|
||||
d;
|
||||
}
|
||||
"
|
||||
.unindent();
|
||||
|
||||
let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
|
||||
"
|
||||
.unindent(),
|
||||
cx,
|
||||
)
|
||||
.with_language(Arc::new(rust_lang()), cx);
|
||||
|
||||
// Lines 2 and 3 don't match the indentation suggestion. When editing these lines,
|
||||
// their indentation is not adjusted.
|
||||
buffer.edit(
|
||||
[
|
||||
(empty(Point::new(1, 1)), "()"),
|
||||
(empty(Point::new(2, 1)), "()"),
|
||||
],
|
||||
buffer.edit_via_marked_text(
|
||||
&"
|
||||
fn a() {
|
||||
c«()»;
|
||||
d«()»;
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
Some(AutoindentMode::EachLine),
|
||||
cx,
|
||||
);
|
||||
|
@ -807,14 +839,22 @@ fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut Muta
|
|||
|
||||
// When appending new content after these lines, the indentation is based on the
|
||||
// preceding lines' actual indentation.
|
||||
buffer.edit(
|
||||
[
|
||||
(empty(Point::new(1, 1)), "\n.f\n.g"),
|
||||
(empty(Point::new(2, 1)), "\n.f\n.g"),
|
||||
],
|
||||
buffer.edit_via_marked_text(
|
||||
&"
|
||||
fn a() {
|
||||
c«
|
||||
.f
|
||||
.g()»;
|
||||
d«
|
||||
.f
|
||||
.g()»;
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
Some(AutoindentMode::EachLine),
|
||||
cx,
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
buffer.text(),
|
||||
"
|
||||
|
@ -833,20 +873,27 @@ fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut Muta
|
|||
});
|
||||
|
||||
cx.add_model(|cx| {
|
||||
let text = "
|
||||
let mut buffer = Buffer::new(
|
||||
0,
|
||||
"
|
||||
fn a() {
|
||||
{
|
||||
b()?
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
"
|
||||
.unindent();
|
||||
let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
|
||||
b();
|
||||
|
|
||||
"
|
||||
.replace("|", "") // marker to preserve trailing whitespace
|
||||
.unindent(),
|
||||
cx,
|
||||
)
|
||||
.with_language(Arc::new(rust_lang()), cx);
|
||||
|
||||
// Delete a closing curly brace changes the suggested indent for the line.
|
||||
buffer.edit(
|
||||
[(Point::new(3, 4)..Point::new(3, 5), "")],
|
||||
// Insert a closing brace. It is outdented.
|
||||
buffer.edit_via_marked_text(
|
||||
&"
|
||||
fn a() {
|
||||
b();
|
||||
«}»
|
||||
"
|
||||
.unindent(),
|
||||
Some(AutoindentMode::EachLine),
|
||||
cx,
|
||||
);
|
||||
|
@ -854,19 +901,20 @@ fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut Muta
|
|||
buffer.text(),
|
||||
"
|
||||
fn a() {
|
||||
{
|
||||
b()?
|
||||
|
|
||||
Ok(())
|
||||
b();
|
||||
}
|
||||
"
|
||||
.replace('|', "") // included in the string to preserve trailing whites
|
||||
.unindent()
|
||||
);
|
||||
|
||||
// Manually editing the leading whitespace
|
||||
buffer.edit(
|
||||
[(Point::new(3, 0)..Point::new(3, 12), "")],
|
||||
// Manually edit the leading whitespace. The edit is preserved.
|
||||
buffer.edit_via_marked_text(
|
||||
&"
|
||||
fn a() {
|
||||
b();
|
||||
« »}
|
||||
"
|
||||
.unindent(),
|
||||
Some(AutoindentMode::EachLine),
|
||||
cx,
|
||||
);
|
||||
|
@ -874,11 +922,8 @@ fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut Muta
|
|||
buffer.text(),
|
||||
"
|
||||
fn a() {
|
||||
{
|
||||
b()?
|
||||
|
||||
Ok(())
|
||||
}
|
||||
b();
|
||||
}
|
||||
"
|
||||
.unindent()
|
||||
);
|
||||
|
@ -887,30 +932,108 @@ fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut Muta
|
|||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_autoindent_adjusts_lines_when_only_text_changes(cx: &mut MutableAppContext) {
|
||||
cx.set_global(Settings::test(cx));
|
||||
fn test_autoindent_does_not_adjust_lines_within_newly_created_errors(cx: &mut MutableAppContext) {
|
||||
let settings = Settings::test(cx);
|
||||
cx.set_global(settings);
|
||||
|
||||
cx.add_model(|cx| {
|
||||
let text = "
|
||||
fn a() {}
|
||||
"
|
||||
.unindent();
|
||||
let mut buffer = Buffer::new(
|
||||
0,
|
||||
"
|
||||
fn a() {
|
||||
i
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
cx,
|
||||
)
|
||||
.with_language(Arc::new(rust_lang()), cx);
|
||||
|
||||
let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
|
||||
|
||||
buffer.edit([(5..5, "\nb")], Some(AutoindentMode::EachLine), cx);
|
||||
// Regression test: line does not get outdented due to syntax error
|
||||
buffer.edit_via_marked_text(
|
||||
&"
|
||||
fn a() {
|
||||
i«f let Some(x) = y»
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
Some(AutoindentMode::EachLine),
|
||||
cx,
|
||||
);
|
||||
assert_eq!(
|
||||
buffer.text(),
|
||||
"
|
||||
fn a(
|
||||
b) {}
|
||||
fn a() {
|
||||
if let Some(x) = y
|
||||
}
|
||||
"
|
||||
.unindent()
|
||||
);
|
||||
|
||||
buffer.edit_via_marked_text(
|
||||
&"
|
||||
fn a() {
|
||||
if let Some(x) = y« {»
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
Some(AutoindentMode::EachLine),
|
||||
cx,
|
||||
);
|
||||
assert_eq!(
|
||||
buffer.text(),
|
||||
"
|
||||
fn a() {
|
||||
if let Some(x) = y {
|
||||
}
|
||||
"
|
||||
.unindent()
|
||||
);
|
||||
|
||||
buffer
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_autoindent_adjusts_lines_when_only_text_changes(cx: &mut MutableAppContext) {
|
||||
cx.set_global(Settings::test(cx));
|
||||
cx.add_model(|cx| {
|
||||
let mut buffer = Buffer::new(
|
||||
0,
|
||||
"
|
||||
fn a() {}
|
||||
"
|
||||
.unindent(),
|
||||
cx,
|
||||
)
|
||||
.with_language(Arc::new(rust_lang()), cx);
|
||||
|
||||
buffer.edit_via_marked_text(
|
||||
&"
|
||||
fn a(«
|
||||
b») {}
|
||||
"
|
||||
.unindent(),
|
||||
Some(AutoindentMode::EachLine),
|
||||
cx,
|
||||
);
|
||||
assert_eq!(
|
||||
buffer.text(),
|
||||
"
|
||||
fn a(
|
||||
b) {}
|
||||
"
|
||||
.unindent()
|
||||
);
|
||||
|
||||
// The indentation suggestion changed because `@end` node (a close paren)
|
||||
// is now at the beginning of the line.
|
||||
buffer.edit(
|
||||
[(Point::new(1, 4)..Point::new(1, 5), "")],
|
||||
buffer.edit_via_marked_text(
|
||||
&"
|
||||
fn a(
|
||||
ˇ) {}
|
||||
"
|
||||
.unindent(),
|
||||
Some(AutoindentMode::EachLine),
|
||||
cx,
|
||||
);
|
||||
|
@ -995,12 +1118,17 @@ fn test_autoindent_block_mode(cx: &mut MutableAppContext) {
|
|||
.unindent();
|
||||
let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
|
||||
|
||||
// When this text was copied, both of the quotation marks were at the same
|
||||
// indent level, but the indentation of the first line was not included in
|
||||
// the copied text. This information is retained in the
|
||||
// 'original_indent_columns' vector.
|
||||
let original_indent_columns = vec![4];
|
||||
let inserted_text = r#"
|
||||
"
|
||||
c
|
||||
d
|
||||
e
|
||||
"
|
||||
c
|
||||
d
|
||||
e
|
||||
"
|
||||
"#
|
||||
.unindent();
|
||||
|
||||
|
@ -1009,7 +1137,7 @@ fn test_autoindent_block_mode(cx: &mut MutableAppContext) {
|
|||
buffer.edit(
|
||||
[(Point::new(2, 0)..Point::new(2, 0), inserted_text.clone())],
|
||||
Some(AutoindentMode::Block {
|
||||
original_indent_columns: vec![0],
|
||||
original_indent_columns: original_indent_columns.clone(),
|
||||
}),
|
||||
cx,
|
||||
);
|
||||
|
@ -1037,7 +1165,7 @@ fn test_autoindent_block_mode(cx: &mut MutableAppContext) {
|
|||
buffer.edit(
|
||||
[(Point::new(2, 8)..Point::new(2, 8), inserted_text)],
|
||||
Some(AutoindentMode::Block {
|
||||
original_indent_columns: vec![0],
|
||||
original_indent_columns: original_indent_columns.clone(),
|
||||
}),
|
||||
cx,
|
||||
);
|
||||
|
@ -1060,6 +1188,84 @@ fn test_autoindent_block_mode(cx: &mut MutableAppContext) {
|
|||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_autoindent_block_mode_without_original_indent_columns(cx: &mut MutableAppContext) {
|
||||
cx.set_global(Settings::test(cx));
|
||||
cx.add_model(|cx| {
|
||||
let text = r#"
|
||||
fn a() {
|
||||
if b() {
|
||||
|
||||
}
|
||||
}
|
||||
"#
|
||||
.unindent();
|
||||
let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
|
||||
|
||||
// The original indent columns are not known, so this text is
|
||||
// auto-indented in a block as if the first line was copied in
|
||||
// its entirety.
|
||||
let original_indent_columns = Vec::new();
|
||||
let inserted_text = " c\n .d()\n .e();";
|
||||
|
||||
// Insert the block at column zero. The entire block is indented
|
||||
// so that the first line matches the previous line's indentation.
|
||||
buffer.edit(
|
||||
[(Point::new(2, 0)..Point::new(2, 0), inserted_text.clone())],
|
||||
Some(AutoindentMode::Block {
|
||||
original_indent_columns: original_indent_columns.clone(),
|
||||
}),
|
||||
cx,
|
||||
);
|
||||
assert_eq!(
|
||||
buffer.text(),
|
||||
r#"
|
||||
fn a() {
|
||||
if b() {
|
||||
c
|
||||
.d()
|
||||
.e();
|
||||
}
|
||||
}
|
||||
"#
|
||||
.unindent()
|
||||
);
|
||||
|
||||
// Grouping is disabled in tests, so we need 2 undos
|
||||
buffer.undo(cx); // Undo the auto-indent
|
||||
buffer.undo(cx); // Undo the original edit
|
||||
|
||||
// Insert the block at a deeper indent level. The entire block is outdented.
|
||||
buffer.edit(
|
||||
[(Point::new(2, 0)..Point::new(2, 0), " ".repeat(12))],
|
||||
None,
|
||||
cx,
|
||||
);
|
||||
buffer.edit(
|
||||
[(Point::new(2, 12)..Point::new(2, 12), inserted_text)],
|
||||
Some(AutoindentMode::Block {
|
||||
original_indent_columns: Vec::new(),
|
||||
}),
|
||||
cx,
|
||||
);
|
||||
assert_eq!(
|
||||
buffer.text(),
|
||||
r#"
|
||||
fn a() {
|
||||
if b() {
|
||||
c
|
||||
.d()
|
||||
.e();
|
||||
}
|
||||
}
|
||||
"#
|
||||
.unindent()
|
||||
);
|
||||
|
||||
buffer
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_autoindent_language_without_indents_query(cx: &mut MutableAppContext) {
|
||||
cx.set_global(Settings::test(cx));
|
||||
|
@ -1260,6 +1466,89 @@ fn test_autoindent_query_with_outdent_captures(cx: &mut MutableAppContext) {
|
|||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_language_config_at(cx: &mut MutableAppContext) {
|
||||
cx.set_global(Settings::test(cx));
|
||||
cx.add_model(|cx| {
|
||||
let language = Language::new(
|
||||
LanguageConfig {
|
||||
name: "JavaScript".into(),
|
||||
line_comment: Some("// ".into()),
|
||||
brackets: vec![
|
||||
BracketPair {
|
||||
start: "{".into(),
|
||||
end: "}".into(),
|
||||
close: true,
|
||||
newline: false,
|
||||
},
|
||||
BracketPair {
|
||||
start: "'".into(),
|
||||
end: "'".into(),
|
||||
close: true,
|
||||
newline: false,
|
||||
},
|
||||
],
|
||||
overrides: [
|
||||
(
|
||||
"element".into(),
|
||||
LanguageConfigOverride {
|
||||
line_comment: Override::Remove { remove: true },
|
||||
block_comment: Override::Set(("{/*".into(), "*/}".into())),
|
||||
..Default::default()
|
||||
},
|
||||
),
|
||||
(
|
||||
"string".into(),
|
||||
LanguageConfigOverride {
|
||||
brackets: Override::Set(vec![BracketPair {
|
||||
start: "{".into(),
|
||||
end: "}".into(),
|
||||
close: true,
|
||||
newline: false,
|
||||
}]),
|
||||
..Default::default()
|
||||
},
|
||||
),
|
||||
]
|
||||
.into_iter()
|
||||
.collect(),
|
||||
..Default::default()
|
||||
},
|
||||
Some(tree_sitter_javascript::language()),
|
||||
)
|
||||
.with_override_query(
|
||||
r#"
|
||||
(jsx_element) @element
|
||||
(string) @string
|
||||
"#,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let text = r#"a["b"] = <C d="e"></C>;"#;
|
||||
|
||||
let buffer = Buffer::new(0, text, cx).with_language(Arc::new(language), cx);
|
||||
let snapshot = buffer.snapshot();
|
||||
|
||||
let config = snapshot.language_scope_at(0).unwrap();
|
||||
assert_eq!(config.line_comment_prefix().unwrap().as_ref(), "// ");
|
||||
assert_eq!(config.brackets().len(), 2);
|
||||
|
||||
let string_config = snapshot.language_scope_at(3).unwrap();
|
||||
assert_eq!(config.line_comment_prefix().unwrap().as_ref(), "// ");
|
||||
assert_eq!(string_config.brackets().len(), 1);
|
||||
|
||||
let element_config = snapshot.language_scope_at(10).unwrap();
|
||||
assert_eq!(element_config.line_comment_prefix(), None);
|
||||
assert_eq!(
|
||||
element_config.block_comment_delimiters(),
|
||||
Some((&"{/*".into(), &"*/}".into()))
|
||||
);
|
||||
assert_eq!(element_config.brackets().len(), 2);
|
||||
|
||||
buffer
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_serialization(cx: &mut gpui::MutableAppContext) {
|
||||
let mut now = Instant::now();
|
||||
|
@ -1702,7 +1991,3 @@ fn get_tree_sexp(buffer: &ModelHandle<Buffer>, cx: &gpui::TestAppContext) -> Str
|
|||
layers[0].node.to_sexp()
|
||||
})
|
||||
}
|
||||
|
||||
fn empty(point: Point) -> Range<Point> {
|
||||
point..point
|
||||
}
|
||||
|
|
|
@ -243,6 +243,47 @@ pub struct LanguageConfig {
|
|||
pub line_comment: Option<Arc<str>>,
|
||||
#[serde(default)]
|
||||
pub block_comment: Option<(Arc<str>, Arc<str>)>,
|
||||
#[serde(default)]
|
||||
pub overrides: HashMap<String, LanguageConfigOverride>,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct LanguageScope {
|
||||
language: Arc<Language>,
|
||||
override_id: Option<u32>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Default, Debug)]
|
||||
pub struct LanguageConfigOverride {
|
||||
#[serde(default)]
|
||||
pub line_comment: Override<Arc<str>>,
|
||||
#[serde(default)]
|
||||
pub block_comment: Override<(Arc<str>, Arc<str>)>,
|
||||
#[serde(default)]
|
||||
pub brackets: Override<Vec<BracketPair>>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
#[serde(untagged)]
|
||||
pub enum Override<T> {
|
||||
Remove { remove: bool },
|
||||
Set(T),
|
||||
}
|
||||
|
||||
impl<T> Default for Override<T> {
|
||||
fn default() -> Self {
|
||||
Override::Remove { remove: false }
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Override<T> {
|
||||
fn as_option<'a>(this: Option<&'a Self>, original: Option<&'a T>) -> Option<&'a T> {
|
||||
match this {
|
||||
Some(Self::Set(value)) => Some(value),
|
||||
Some(Self::Remove { remove: true }) => None,
|
||||
Some(Self::Remove { remove: false }) | None => original,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for LanguageConfig {
|
||||
|
@ -257,6 +298,7 @@ impl Default for LanguageConfig {
|
|||
autoclose_before: Default::default(),
|
||||
line_comment: Default::default(),
|
||||
block_comment: Default::default(),
|
||||
overrides: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -306,11 +348,13 @@ pub struct Language {
|
|||
pub struct Grammar {
|
||||
id: usize,
|
||||
pub(crate) ts_language: tree_sitter::Language,
|
||||
pub(crate) error_query: Query,
|
||||
pub(crate) highlights_query: Option<Query>,
|
||||
pub(crate) brackets_config: Option<BracketConfig>,
|
||||
pub(crate) indents_config: Option<IndentConfig>,
|
||||
pub(crate) outline_config: Option<OutlineConfig>,
|
||||
pub(crate) injection_config: Option<InjectionConfig>,
|
||||
pub(crate) override_config: Option<OverrideConfig>,
|
||||
pub(crate) highlight_map: Mutex<HighlightMap>,
|
||||
}
|
||||
|
||||
|
@ -336,6 +380,11 @@ struct InjectionConfig {
|
|||
patterns: Vec<InjectionPatternConfig>,
|
||||
}
|
||||
|
||||
struct OverrideConfig {
|
||||
query: Query,
|
||||
values: HashMap<u32, LanguageConfigOverride>,
|
||||
}
|
||||
|
||||
#[derive(Default, Clone)]
|
||||
struct InjectionPatternConfig {
|
||||
language: Option<Box<str>>,
|
||||
|
@ -635,6 +684,8 @@ impl Language {
|
|||
outline_config: None,
|
||||
indents_config: None,
|
||||
injection_config: None,
|
||||
override_config: None,
|
||||
error_query: Query::new(ts_language, "(ERROR) @error").unwrap(),
|
||||
ts_language,
|
||||
highlight_map: Default::default(),
|
||||
})
|
||||
|
@ -775,6 +826,34 @@ impl Language {
|
|||
Ok(self)
|
||||
}
|
||||
|
||||
pub fn with_override_query(mut self, source: &str) -> Result<Self> {
|
||||
let query = Query::new(self.grammar_mut().ts_language, source)?;
|
||||
|
||||
let mut values = HashMap::default();
|
||||
for (ix, name) in query.capture_names().iter().enumerate() {
|
||||
if !name.starts_with('_') {
|
||||
let value = self.config.overrides.remove(name).ok_or_else(|| {
|
||||
anyhow!(
|
||||
"language {:?} has override in query but not in config: {name:?}",
|
||||
self.config.name
|
||||
)
|
||||
})?;
|
||||
values.insert(ix as u32, value);
|
||||
}
|
||||
}
|
||||
|
||||
if !self.config.overrides.is_empty() {
|
||||
let keys = self.config.overrides.keys().collect::<Vec<_>>();
|
||||
Err(anyhow!(
|
||||
"language {:?} has overrides in config not in query: {keys:?}",
|
||||
self.config.name
|
||||
))?;
|
||||
}
|
||||
|
||||
self.grammar_mut().override_config = Some(OverrideConfig { query, values });
|
||||
Ok(self)
|
||||
}
|
||||
|
||||
fn grammar_mut(&mut self) -> &mut Grammar {
|
||||
Arc::get_mut(self.grammar.as_mut().unwrap()).unwrap()
|
||||
}
|
||||
|
@ -800,17 +879,6 @@ impl Language {
|
|||
self.config.name.clone()
|
||||
}
|
||||
|
||||
pub fn line_comment_prefix(&self) -> Option<&Arc<str>> {
|
||||
self.config.line_comment.as_ref()
|
||||
}
|
||||
|
||||
pub fn block_comment_delimiters(&self) -> Option<(&Arc<str>, &Arc<str>)> {
|
||||
self.config
|
||||
.block_comment
|
||||
.as_ref()
|
||||
.map(|(start, end)| (start, end))
|
||||
}
|
||||
|
||||
pub async fn disk_based_diagnostic_sources(&self) -> &[String] {
|
||||
match self.adapter.as_ref() {
|
||||
Some(adapter) => &adapter.disk_based_diagnostic_sources,
|
||||
|
@ -886,10 +954,6 @@ impl Language {
|
|||
result
|
||||
}
|
||||
|
||||
pub fn brackets(&self) -> &[BracketPair] {
|
||||
&self.config.brackets
|
||||
}
|
||||
|
||||
pub fn path_suffixes(&self) -> &[String] {
|
||||
&self.config.path_suffixes
|
||||
}
|
||||
|
@ -912,6 +976,42 @@ impl Language {
|
|||
}
|
||||
}
|
||||
|
||||
impl LanguageScope {
|
||||
pub fn line_comment_prefix(&self) -> Option<&Arc<str>> {
|
||||
Override::as_option(
|
||||
self.config_override().map(|o| &o.line_comment),
|
||||
self.language.config.line_comment.as_ref(),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn block_comment_delimiters(&self) -> Option<(&Arc<str>, &Arc<str>)> {
|
||||
Override::as_option(
|
||||
self.config_override().map(|o| &o.block_comment),
|
||||
self.language.config.block_comment.as_ref(),
|
||||
)
|
||||
.map(|e| (&e.0, &e.1))
|
||||
}
|
||||
|
||||
pub fn brackets(&self) -> &[BracketPair] {
|
||||
Override::as_option(
|
||||
self.config_override().map(|o| &o.brackets),
|
||||
Some(&self.language.config.brackets),
|
||||
)
|
||||
.map_or(&[], Vec::as_slice)
|
||||
}
|
||||
|
||||
pub fn should_autoclose_before(&self, c: char) -> bool {
|
||||
c.is_whitespace() || self.language.config.autoclose_before.contains(c)
|
||||
}
|
||||
|
||||
fn config_override(&self) -> Option<&LanguageConfigOverride> {
|
||||
let id = self.override_id?;
|
||||
let grammar = self.language.grammar.as_ref()?;
|
||||
let override_config = grammar.override_config.as_ref()?;
|
||||
override_config.values.get(&id)
|
||||
}
|
||||
}
|
||||
|
||||
impl Hash for Language {
|
||||
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
|
||||
self.id().hash(state)
|
||||
|
|
|
@ -11,6 +11,15 @@ use text::*;
|
|||
|
||||
pub use proto::{BufferState, Operation};
|
||||
|
||||
pub fn serialize_fingerprint(fingerprint: RopeFingerprint) -> String {
|
||||
fingerprint.to_hex()
|
||||
}
|
||||
|
||||
pub fn deserialize_fingerprint(fingerprint: &str) -> Result<RopeFingerprint> {
|
||||
RopeFingerprint::from_hex(fingerprint)
|
||||
.map_err(|error| anyhow!("invalid fingerprint: {}", error))
|
||||
}
|
||||
|
||||
pub fn deserialize_line_ending(message: proto::LineEnding) -> fs::LineEnding {
|
||||
match message {
|
||||
proto::LineEnding::Unix => fs::LineEnding::Unix,
|
||||
|
|
|
@ -1127,6 +1127,41 @@ fn splice_included_ranges(
|
|||
ranges
|
||||
}
|
||||
|
||||
impl<'a> SyntaxLayerInfo<'a> {
|
||||
pub(crate) fn override_id(&self, offset: usize, text: &text::BufferSnapshot) -> Option<u32> {
|
||||
let text = TextProvider(text.as_rope());
|
||||
let config = self.language.grammar.as_ref()?.override_config.as_ref()?;
|
||||
|
||||
let mut query_cursor = QueryCursorHandle::new();
|
||||
query_cursor.set_byte_range(offset..offset);
|
||||
|
||||
let mut smallest_match: Option<(u32, Range<usize>)> = None;
|
||||
for mat in query_cursor.matches(&config.query, self.node, text) {
|
||||
for capture in mat.captures {
|
||||
if !config.values.contains_key(&capture.index) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let range = capture.node.byte_range();
|
||||
if offset <= range.start || offset >= range.end {
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Some((_, smallest_range)) = &smallest_match {
|
||||
if range.len() < smallest_range.len() {
|
||||
smallest_match = Some((capture.index, range))
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
smallest_match = Some((capture.index, range));
|
||||
}
|
||||
}
|
||||
|
||||
smallest_match.map(|(index, _)| index)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::ops::Deref for SyntaxMap {
|
||||
type Target = SyntaxSnapshot;
|
||||
|
||||
|
@ -2227,7 +2262,7 @@ mod tests {
|
|||
mutated_syntax_map.reparse(language.clone(), &buffer);
|
||||
|
||||
for (i, marked_string) in steps.into_iter().enumerate() {
|
||||
edit_buffer(&mut buffer, &marked_string.unindent());
|
||||
buffer.edit_via_marked_text(&marked_string.unindent());
|
||||
|
||||
// Reparse the syntax map
|
||||
mutated_syntax_map.interpolate(&buffer);
|
||||
|
@ -2417,52 +2452,6 @@ mod tests {
|
|||
assert_eq!(actual_ranges, expected_ranges);
|
||||
}
|
||||
|
||||
fn edit_buffer(buffer: &mut Buffer, marked_string: &str) {
|
||||
let old_text = buffer.text();
|
||||
let (new_text, mut ranges) = marked_text_ranges(marked_string, false);
|
||||
if ranges.is_empty() {
|
||||
ranges.push(0..new_text.len());
|
||||
}
|
||||
|
||||
assert_eq!(
|
||||
old_text[..ranges[0].start],
|
||||
new_text[..ranges[0].start],
|
||||
"invalid edit"
|
||||
);
|
||||
|
||||
let mut delta = 0;
|
||||
let mut edits = Vec::new();
|
||||
let mut ranges = ranges.into_iter().peekable();
|
||||
|
||||
while let Some(inserted_range) = ranges.next() {
|
||||
let new_start = inserted_range.start;
|
||||
let old_start = (new_start as isize - delta) as usize;
|
||||
|
||||
let following_text = if let Some(next_range) = ranges.peek() {
|
||||
&new_text[inserted_range.end..next_range.start]
|
||||
} else {
|
||||
&new_text[inserted_range.end..]
|
||||
};
|
||||
|
||||
let inserted_len = inserted_range.len();
|
||||
let deleted_len = old_text[old_start..]
|
||||
.find(following_text)
|
||||
.expect("invalid edit");
|
||||
|
||||
let old_range = old_start..old_start + deleted_len;
|
||||
edits.push((old_range, new_text[inserted_range].to_string()));
|
||||
delta += inserted_len as isize - deleted_len as isize;
|
||||
}
|
||||
|
||||
assert_eq!(
|
||||
old_text.len() as isize + delta,
|
||||
new_text.len() as isize,
|
||||
"invalid edit"
|
||||
);
|
||||
|
||||
buffer.edit(edits);
|
||||
}
|
||||
|
||||
pub fn string_contains_sequence(text: &str, parts: &[&str]) -> bool {
|
||||
let mut last_part_end = 0;
|
||||
for part in parts {
|
||||
|
|
|
@ -3,6 +3,7 @@ name = "live_kit_client"
|
|||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
description = "Bindings to LiveKit Swift client SDK"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/live_kit_client.rs"
|
||||
|
|
|
@ -3,6 +3,7 @@ name = "live_kit_server"
|
|||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
description = "SDK for the LiveKit server API"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/live_kit_server.rs"
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
name = "lsp"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/lsp.rs"
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
name = "media"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/media.rs"
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue