Merge branch 'main' into reconnections-2
This commit is contained in:
commit
969c314315
78 changed files with 2759 additions and 1281 deletions
2
.github/workflows/ci.yml
vendored
2
.github/workflows/ci.yml
vendored
|
@ -49,7 +49,7 @@ jobs:
|
||||||
run: cargo build -p collab
|
run: cargo build -p collab
|
||||||
|
|
||||||
- name: Build other binaries
|
- name: Build other binaries
|
||||||
run: cargo build --bins --all-features
|
run: cargo build --workspace --bins --all-features
|
||||||
|
|
||||||
bundle:
|
bundle:
|
||||||
name: Bundle app
|
name: Bundle app
|
||||||
|
|
4
.github/workflows/release_actions.yml
vendored
4
.github/workflows/release_actions.yml
vendored
|
@ -14,10 +14,10 @@ jobs:
|
||||||
content: |
|
content: |
|
||||||
📣 Zed ${{ github.event.release.tag_name }} was just released!
|
📣 Zed ${{ github.event.release.tag_name }} was just released!
|
||||||
|
|
||||||
Restart your Zed or head to https://zed.dev/releases to grab it.
|
Restart your Zed or head to https://zed.dev/releases/latest to grab it.
|
||||||
|
|
||||||
```md
|
```md
|
||||||
### Changelog
|
# Changelog
|
||||||
|
|
||||||
${{ github.event.release.body }}
|
${{ github.event.release.body }}
|
||||||
```
|
```
|
||||||
|
|
21
Cargo.lock
generated
21
Cargo.lock
generated
|
@ -1121,7 +1121,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "collab"
|
name = "collab"
|
||||||
version = "0.2.2"
|
version = "0.2.5"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"async-tungstenite",
|
"async-tungstenite",
|
||||||
|
@ -3125,6 +3125,7 @@ dependencies = [
|
||||||
"text",
|
"text",
|
||||||
"theme",
|
"theme",
|
||||||
"tree-sitter",
|
"tree-sitter",
|
||||||
|
"tree-sitter-embedded-template",
|
||||||
"tree-sitter-html",
|
"tree-sitter-html",
|
||||||
"tree-sitter-javascript",
|
"tree-sitter-javascript",
|
||||||
"tree-sitter-json 0.19.0",
|
"tree-sitter-json 0.19.0",
|
||||||
|
@ -6729,8 +6730,8 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tree-sitter"
|
name = "tree-sitter"
|
||||||
version = "0.20.8"
|
version = "0.20.9"
|
||||||
source = "git+https://github.com/tree-sitter/tree-sitter?rev=366210ae925d7ea0891bc7a0c738f60c77c04d7b#366210ae925d7ea0891bc7a0c738f60c77c04d7b"
|
source = "git+https://github.com/tree-sitter/tree-sitter?rev=36b5b6c89e55ad1a502f8b3234bb3e12ec83a5da#36b5b6c89e55ad1a502f8b3234bb3e12ec83a5da"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cc",
|
"cc",
|
||||||
"regex",
|
"regex",
|
||||||
|
@ -6774,6 +6775,16 @@ dependencies = [
|
||||||
"tree-sitter",
|
"tree-sitter",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "tree-sitter-embedded-template"
|
||||||
|
version = "0.20.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "33817ade928c73a32d4f904a602321e09de9fc24b71d106f3b4b3f8ab30dcc38"
|
||||||
|
dependencies = [
|
||||||
|
"cc",
|
||||||
|
"tree-sitter",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tree-sitter-go"
|
name = "tree-sitter-go"
|
||||||
version = "0.19.1"
|
version = "0.19.1"
|
||||||
|
@ -7100,6 +7111,7 @@ name = "util"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
|
"backtrace",
|
||||||
"futures 0.3.24",
|
"futures 0.3.24",
|
||||||
"git2",
|
"git2",
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
|
@ -7989,7 +8001,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "zed"
|
name = "zed"
|
||||||
version = "0.65.0"
|
version = "0.67.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"activity_indicator",
|
"activity_indicator",
|
||||||
"anyhow",
|
"anyhow",
|
||||||
|
@ -8068,6 +8080,7 @@ dependencies = [
|
||||||
"tree-sitter-cpp",
|
"tree-sitter-cpp",
|
||||||
"tree-sitter-css",
|
"tree-sitter-css",
|
||||||
"tree-sitter-elixir",
|
"tree-sitter-elixir",
|
||||||
|
"tree-sitter-embedded-template",
|
||||||
"tree-sitter-go",
|
"tree-sitter-go",
|
||||||
"tree-sitter-html",
|
"tree-sitter-html",
|
||||||
"tree-sitter-json 0.20.0",
|
"tree-sitter-json 0.20.0",
|
||||||
|
|
|
@ -65,7 +65,7 @@ serde_json = { version = "1.0", features = ["preserve_order", "raw_value"] }
|
||||||
rand = { version = "0.8" }
|
rand = { version = "0.8" }
|
||||||
|
|
||||||
[patch.crates-io]
|
[patch.crates-io]
|
||||||
tree-sitter = { git = "https://github.com/tree-sitter/tree-sitter", rev = "366210ae925d7ea0891bc7a0c738f60c77c04d7b" }
|
tree-sitter = { git = "https://github.com/tree-sitter/tree-sitter", rev = "36b5b6c89e55ad1a502f8b3234bb3e12ec83a5da" }
|
||||||
async-task = { git = "https://github.com/zed-industries/async-task", rev = "341b57d6de98cdfd7b418567b8de2022ca993a6e" }
|
async-task = { git = "https://github.com/zed-industries/async-task", rev = "341b57d6de98cdfd7b418567b8de2022ca993a6e" }
|
||||||
sqlx = { git = "https://github.com/launchbadge/sqlx", rev = "4b7053807c705df312bcb9b6281e184bf7534eb3" }
|
sqlx = { git = "https://github.com/launchbadge/sqlx", rev = "4b7053807c705df312bcb9b6281e184bf7534eb3" }
|
||||||
|
|
||||||
|
|
|
@ -472,6 +472,15 @@
|
||||||
"terminal::SendText",
|
"terminal::SendText",
|
||||||
"\u0001"
|
"\u0001"
|
||||||
],
|
],
|
||||||
|
// Terminal.app compatability
|
||||||
|
"alt-left": [
|
||||||
|
"terminal::SendText",
|
||||||
|
"\u001bb"
|
||||||
|
],
|
||||||
|
"alt-right": [
|
||||||
|
"terminal::SendText",
|
||||||
|
"\u001bf"
|
||||||
|
],
|
||||||
// There are conflicting bindings for these keys in the global context.
|
// There are conflicting bindings for these keys in the global context.
|
||||||
// these bindings override them, remove at your own risk:
|
// these bindings override them, remove at your own risk:
|
||||||
"up": [
|
"up": [
|
||||||
|
|
|
@ -1,230 +1,233 @@
|
||||||
{
|
{
|
||||||
// The name of the Zed theme to use for the UI
|
// The name of the Zed theme to use for the UI
|
||||||
"theme": "One Dark",
|
"theme": "One Dark",
|
||||||
// The name of a font to use for rendering text in the editor
|
// The name of a font to use for rendering text in the editor
|
||||||
"buffer_font_family": "Zed Mono",
|
"buffer_font_family": "Zed Mono",
|
||||||
// The default font size for text in the editor
|
// The default font size for text in the editor
|
||||||
"buffer_font_size": 15,
|
"buffer_font_size": 15,
|
||||||
// Whether to enable vim modes and key bindings
|
// The factor to grow the active pane by. Defaults to 1.0
|
||||||
"vim_mode": false,
|
// which gives the same size as all other panes.
|
||||||
// Whether to show the informational hover box when moving the mouse
|
"active_pane_magnification": 1.0,
|
||||||
// over symbols in the editor.
|
// Whether to enable vim modes and key bindings
|
||||||
"hover_popover_enabled": true,
|
"vim_mode": false,
|
||||||
// Whether the cursor blinks in the editor.
|
// Whether to show the informational hover box when moving the mouse
|
||||||
"cursor_blink": true,
|
// over symbols in the editor.
|
||||||
// Whether to pop the completions menu while typing in an editor without
|
"hover_popover_enabled": true,
|
||||||
// explicitly requesting it.
|
// Whether the cursor blinks in the editor.
|
||||||
"show_completions_on_input": true,
|
"cursor_blink": true,
|
||||||
// Whether new projects should start out 'online'. Online projects
|
// Whether to pop the completions menu while typing in an editor without
|
||||||
// appear in the contacts panel under your name, so that your contacts
|
// explicitly requesting it.
|
||||||
// can see which projects you are working on. Regardless of this
|
"show_completions_on_input": true,
|
||||||
// setting, projects keep their last online status when you reopen them.
|
// Whether new projects should start out 'online'. Online projects
|
||||||
"projects_online_by_default": true,
|
// appear in the contacts panel under your name, so that your contacts
|
||||||
// Whether to use language servers to provide code intelligence.
|
// can see which projects you are working on. Regardless of this
|
||||||
"enable_language_server": true,
|
// setting, projects keep their last online status when you reopen them.
|
||||||
// When to automatically save edited buffers. This setting can
|
"projects_online_by_default": true,
|
||||||
// take four values.
|
// Whether to use language servers to provide code intelligence.
|
||||||
//
|
"enable_language_server": true,
|
||||||
// 1. Never automatically save:
|
// When to automatically save edited buffers. This setting can
|
||||||
// "autosave": "off",
|
// take four values.
|
||||||
// 2. Save when changing focus away from the Zed window:
|
|
||||||
// "autosave": "on_window_change",
|
|
||||||
// 3. Save when changing focus away from a specific buffer:
|
|
||||||
// "autosave": "on_focus_change",
|
|
||||||
// 4. Save when idle for a certain amount of time:
|
|
||||||
// "autosave": { "after_delay": {"milliseconds": 500} },
|
|
||||||
"autosave": "off",
|
|
||||||
// Where to place the dock by default. This setting can take three
|
|
||||||
// values:
|
|
||||||
//
|
|
||||||
// 1. Position the dock attached to the bottom of the workspace
|
|
||||||
// "default_dock_anchor": "bottom"
|
|
||||||
// 2. Position the dock to the right of the workspace like a side panel
|
|
||||||
// "default_dock_anchor": "right"
|
|
||||||
// 3. Position the dock full screen over the entire workspace"
|
|
||||||
// "default_dock_anchor": "expanded"
|
|
||||||
"default_dock_anchor": "right",
|
|
||||||
// Whether or not to perform a buffer format before saving
|
|
||||||
"format_on_save": "on",
|
|
||||||
// How to perform a buffer format. This setting can take two values:
|
|
||||||
//
|
|
||||||
// 1. Format code using the current language server:
|
|
||||||
// "format_on_save": "language_server"
|
|
||||||
// 2. Format code using an external command:
|
|
||||||
// "format_on_save": {
|
|
||||||
// "external": {
|
|
||||||
// "command": "prettier",
|
|
||||||
// "arguments": ["--stdin-filepath", "{buffer_path}"]
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
"formatter": "language_server",
|
|
||||||
// How to soft-wrap long lines of text. This setting can take
|
|
||||||
// three values:
|
|
||||||
//
|
|
||||||
// 1. Do not soft wrap.
|
|
||||||
// "soft_wrap": "none",
|
|
||||||
// 2. Soft wrap lines that overflow the editor:
|
|
||||||
// "soft_wrap": "editor_width",
|
|
||||||
// 3. Soft wrap lines at the preferred line length
|
|
||||||
// "soft_wrap": "preferred_line_length",
|
|
||||||
"soft_wrap": "none",
|
|
||||||
// The column at which to soft-wrap lines, for buffers where soft-wrap
|
|
||||||
// is enabled.
|
|
||||||
"preferred_line_length": 80,
|
|
||||||
// Whether to indent lines using tab characters, as opposed to multiple
|
|
||||||
// spaces.
|
|
||||||
"hard_tabs": false,
|
|
||||||
// How many columns a tab should occupy.
|
|
||||||
"tab_size": 4,
|
|
||||||
// Git gutter behavior configuration.
|
|
||||||
"git": {
|
|
||||||
// Control whether the git gutter is shown. May take 2 values:
|
|
||||||
// 1. Show the gutter
|
|
||||||
// "git_gutter": "tracked_files"
|
|
||||||
// 2. Hide the gutter
|
|
||||||
// "git_gutter": "hide"
|
|
||||||
"git_gutter": "tracked_files"
|
|
||||||
},
|
|
||||||
// Settings specific to journaling
|
|
||||||
"journal": {
|
|
||||||
// The path of the directory where journal entries are stored
|
|
||||||
"path": "~",
|
|
||||||
// What format to display the hours in
|
|
||||||
// May take 2 values:
|
|
||||||
// 1. hour12
|
|
||||||
// 2. hour24
|
|
||||||
"hour_format": "hour12"
|
|
||||||
},
|
|
||||||
// Settings specific to the terminal
|
|
||||||
"terminal": {
|
|
||||||
// What shell to use when opening a terminal. May take 3 values:
|
|
||||||
// 1. Use the system's default terminal configuration (e.g. $TERM).
|
|
||||||
// "shell": "system"
|
|
||||||
// 2. A program:
|
|
||||||
// "shell": {
|
|
||||||
// "program": "sh"
|
|
||||||
// }
|
|
||||||
// 3. A program with arguments:
|
|
||||||
// "shell": {
|
|
||||||
// "with_arguments": {
|
|
||||||
// "program": "/bin/bash",
|
|
||||||
// "arguments": ["--login"]
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
"shell": "system",
|
|
||||||
// What working directory to use when launching the terminal.
|
|
||||||
// May take 4 values:
|
|
||||||
// 1. Use the current file's project directory. Will Fallback to the
|
|
||||||
// first project directory strategy if unsuccessful
|
|
||||||
// "working_directory": "current_project_directory"
|
|
||||||
// 2. Use the first project in this workspace's directory
|
|
||||||
// "working_directory": "first_project_directory"
|
|
||||||
// 3. Always use this platform's home directory (if we can find it)
|
|
||||||
// "working_directory": "always_home"
|
|
||||||
// 4. Always use a specific directory. This value will be shell expanded.
|
|
||||||
// If this path is not a valid directory the terminal will default to
|
|
||||||
// this platform's home directory (if we can find it)
|
|
||||||
// "working_directory": {
|
|
||||||
// "always": {
|
|
||||||
// "directory": "~/zed/projects/"
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
//
|
//
|
||||||
|
// 1. Never automatically save:
|
||||||
|
// "autosave": "off",
|
||||||
|
// 2. Save when changing focus away from the Zed window:
|
||||||
|
// "autosave": "on_window_change",
|
||||||
|
// 3. Save when changing focus away from a specific buffer:
|
||||||
|
// "autosave": "on_focus_change",
|
||||||
|
// 4. Save when idle for a certain amount of time:
|
||||||
|
// "autosave": { "after_delay": {"milliseconds": 500} },
|
||||||
|
"autosave": "off",
|
||||||
|
// Where to place the dock by default. This setting can take three
|
||||||
|
// values:
|
||||||
//
|
//
|
||||||
"working_directory": "current_project_directory",
|
// 1. Position the dock attached to the bottom of the workspace
|
||||||
// Set the cursor blinking behavior in the terminal.
|
// "default_dock_anchor": "bottom"
|
||||||
// May take 4 values:
|
// 2. Position the dock to the right of the workspace like a side panel
|
||||||
// 1. Never blink the cursor, ignoring the terminal mode
|
// "default_dock_anchor": "right"
|
||||||
// "blinking": "off",
|
// 3. Position the dock full screen over the entire workspace"
|
||||||
// 2. Default the cursor blink to off, but allow the terminal to
|
// "default_dock_anchor": "expanded"
|
||||||
// set blinking
|
"default_dock_anchor": "right",
|
||||||
// "blinking": "terminal_controlled",
|
// Whether or not to perform a buffer format before saving
|
||||||
// 3. Always blink the cursor, ignoring the terminal mode
|
"format_on_save": "on",
|
||||||
// "blinking": "on",
|
// How to perform a buffer format. This setting can take two values:
|
||||||
"blinking": "terminal_controlled",
|
//
|
||||||
// Set whether Alternate Scroll mode (code: ?1007) is active by default.
|
// 1. Format code using the current language server:
|
||||||
// Alternate Scroll mode converts mouse scroll events into up / down key
|
// "format_on_save": "language_server"
|
||||||
// presses when in the alternate screen (e.g. when running applications
|
// 2. Format code using an external command:
|
||||||
// like vim or less). The terminal can still set and unset this mode.
|
// "format_on_save": {
|
||||||
// May take 2 values:
|
// "external": {
|
||||||
// 1. Default alternate scroll mode to on
|
// "command": "prettier",
|
||||||
// "alternate_scroll": "on",
|
// "arguments": ["--stdin-filepath", "{buffer_path}"]
|
||||||
// 2. Default alternate scroll mode to off
|
// }
|
||||||
// "alternate_scroll": "off",
|
|
||||||
"alternate_scroll": "off",
|
|
||||||
// Set whether the option key behaves as the meta key.
|
|
||||||
// May take 2 values:
|
|
||||||
// 1. Rely on default platform handling of option key, on macOS
|
|
||||||
// this means generating certain unicode characters
|
|
||||||
// "option_to_meta": false,
|
|
||||||
// 2. Make the option keys behave as a 'meta' key, e.g. for emacs
|
|
||||||
// "option_to_meta": true,
|
|
||||||
"option_as_meta": false,
|
|
||||||
// Whether or not selecting text in the terminal will automatically
|
|
||||||
// copy to the system clipboard.
|
|
||||||
"copy_on_select": false,
|
|
||||||
// Any key-value pairs added to this list will be added to the terminal's
|
|
||||||
// enviroment. Use `:` to seperate multiple values.
|
|
||||||
"env": {
|
|
||||||
// "KEY": "value1:value2"
|
|
||||||
}
|
|
||||||
// Set the terminal's font size. If this option is not included,
|
|
||||||
// the terminal will default to matching the buffer's font size.
|
|
||||||
// "font_size": "15"
|
|
||||||
// Set the terminal's font family. If this option is not included,
|
|
||||||
// the terminal will default to matching the buffer's font family.
|
|
||||||
// "font_family": "Zed Mono"
|
|
||||||
},
|
|
||||||
// Different settings for specific languages.
|
|
||||||
"languages": {
|
|
||||||
"Plain Text": {
|
|
||||||
"soft_wrap": "preferred_line_length"
|
|
||||||
},
|
|
||||||
"C": {
|
|
||||||
"tab_size": 2
|
|
||||||
},
|
|
||||||
"C++": {
|
|
||||||
"tab_size": 2
|
|
||||||
},
|
|
||||||
"Elixir": {
|
|
||||||
"tab_size": 2
|
|
||||||
},
|
|
||||||
"Go": {
|
|
||||||
"tab_size": 4,
|
|
||||||
"hard_tabs": true
|
|
||||||
},
|
|
||||||
"Markdown": {
|
|
||||||
"soft_wrap": "preferred_line_length"
|
|
||||||
},
|
|
||||||
"Rust": {
|
|
||||||
"tab_size": 4
|
|
||||||
},
|
|
||||||
"JavaScript": {
|
|
||||||
"tab_size": 2
|
|
||||||
},
|
|
||||||
"TypeScript": {
|
|
||||||
"tab_size": 2
|
|
||||||
},
|
|
||||||
"TSX": {
|
|
||||||
"tab_size": 2
|
|
||||||
}
|
|
||||||
},
|
|
||||||
// LSP Specific settings.
|
|
||||||
"lsp": {
|
|
||||||
// Specify the LSP name as a key here.
|
|
||||||
// As of 8/10/22, supported LSPs are:
|
|
||||||
// pyright
|
|
||||||
// gopls
|
|
||||||
// rust-analyzer
|
|
||||||
// typescript-language-server
|
|
||||||
// vscode-json-languageserver
|
|
||||||
// "rust_analyzer": {
|
|
||||||
// //These initialization options are merged into Zed's defaults
|
|
||||||
// "initialization_options": {
|
|
||||||
// "checkOnSave": {
|
|
||||||
// "command": "clippy"
|
|
||||||
// }
|
|
||||||
// }
|
// }
|
||||||
// }
|
"formatter": "language_server",
|
||||||
}
|
// How to soft-wrap long lines of text. This setting can take
|
||||||
|
// three values:
|
||||||
|
//
|
||||||
|
// 1. Do not soft wrap.
|
||||||
|
// "soft_wrap": "none",
|
||||||
|
// 2. Soft wrap lines that overflow the editor:
|
||||||
|
// "soft_wrap": "editor_width",
|
||||||
|
// 3. Soft wrap lines at the preferred line length
|
||||||
|
// "soft_wrap": "preferred_line_length",
|
||||||
|
"soft_wrap": "none",
|
||||||
|
// The column at which to soft-wrap lines, for buffers where soft-wrap
|
||||||
|
// is enabled.
|
||||||
|
"preferred_line_length": 80,
|
||||||
|
// Whether to indent lines using tab characters, as opposed to multiple
|
||||||
|
// spaces.
|
||||||
|
"hard_tabs": false,
|
||||||
|
// How many columns a tab should occupy.
|
||||||
|
"tab_size": 4,
|
||||||
|
// Git gutter behavior configuration.
|
||||||
|
"git": {
|
||||||
|
// Control whether the git gutter is shown. May take 2 values:
|
||||||
|
// 1. Show the gutter
|
||||||
|
// "git_gutter": "tracked_files"
|
||||||
|
// 2. Hide the gutter
|
||||||
|
// "git_gutter": "hide"
|
||||||
|
"git_gutter": "tracked_files"
|
||||||
|
},
|
||||||
|
// Settings specific to journaling
|
||||||
|
"journal": {
|
||||||
|
// The path of the directory where journal entries are stored
|
||||||
|
"path": "~",
|
||||||
|
// What format to display the hours in
|
||||||
|
// May take 2 values:
|
||||||
|
// 1. hour12
|
||||||
|
// 2. hour24
|
||||||
|
"hour_format": "hour12"
|
||||||
|
},
|
||||||
|
// Settings specific to the terminal
|
||||||
|
"terminal": {
|
||||||
|
// What shell to use when opening a terminal. May take 3 values:
|
||||||
|
// 1. Use the system's default terminal configuration (e.g. $TERM).
|
||||||
|
// "shell": "system"
|
||||||
|
// 2. A program:
|
||||||
|
// "shell": {
|
||||||
|
// "program": "sh"
|
||||||
|
// }
|
||||||
|
// 3. A program with arguments:
|
||||||
|
// "shell": {
|
||||||
|
// "with_arguments": {
|
||||||
|
// "program": "/bin/bash",
|
||||||
|
// "arguments": ["--login"]
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
"shell": "system",
|
||||||
|
// What working directory to use when launching the terminal.
|
||||||
|
// May take 4 values:
|
||||||
|
// 1. Use the current file's project directory. Will Fallback to the
|
||||||
|
// first project directory strategy if unsuccessful
|
||||||
|
// "working_directory": "current_project_directory"
|
||||||
|
// 2. Use the first project in this workspace's directory
|
||||||
|
// "working_directory": "first_project_directory"
|
||||||
|
// 3. Always use this platform's home directory (if we can find it)
|
||||||
|
// "working_directory": "always_home"
|
||||||
|
// 4. Always use a specific directory. This value will be shell expanded.
|
||||||
|
// If this path is not a valid directory the terminal will default to
|
||||||
|
// this platform's home directory (if we can find it)
|
||||||
|
// "working_directory": {
|
||||||
|
// "always": {
|
||||||
|
// "directory": "~/zed/projects/"
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
//
|
||||||
|
"working_directory": "current_project_directory",
|
||||||
|
// Set the cursor blinking behavior in the terminal.
|
||||||
|
// May take 4 values:
|
||||||
|
// 1. Never blink the cursor, ignoring the terminal mode
|
||||||
|
// "blinking": "off",
|
||||||
|
// 2. Default the cursor blink to off, but allow the terminal to
|
||||||
|
// set blinking
|
||||||
|
// "blinking": "terminal_controlled",
|
||||||
|
// 3. Always blink the cursor, ignoring the terminal mode
|
||||||
|
// "blinking": "on",
|
||||||
|
"blinking": "terminal_controlled",
|
||||||
|
// Set whether Alternate Scroll mode (code: ?1007) is active by default.
|
||||||
|
// Alternate Scroll mode converts mouse scroll events into up / down key
|
||||||
|
// presses when in the alternate screen (e.g. when running applications
|
||||||
|
// like vim or less). The terminal can still set and unset this mode.
|
||||||
|
// May take 2 values:
|
||||||
|
// 1. Default alternate scroll mode to on
|
||||||
|
// "alternate_scroll": "on",
|
||||||
|
// 2. Default alternate scroll mode to off
|
||||||
|
// "alternate_scroll": "off",
|
||||||
|
"alternate_scroll": "off",
|
||||||
|
// Set whether the option key behaves as the meta key.
|
||||||
|
// May take 2 values:
|
||||||
|
// 1. Rely on default platform handling of option key, on macOS
|
||||||
|
// this means generating certain unicode characters
|
||||||
|
// "option_to_meta": false,
|
||||||
|
// 2. Make the option keys behave as a 'meta' key, e.g. for emacs
|
||||||
|
// "option_to_meta": true,
|
||||||
|
"option_as_meta": false,
|
||||||
|
// Whether or not selecting text in the terminal will automatically
|
||||||
|
// copy to the system clipboard.
|
||||||
|
"copy_on_select": false,
|
||||||
|
// Any key-value pairs added to this list will be added to the terminal's
|
||||||
|
// enviroment. Use `:` to seperate multiple values.
|
||||||
|
"env": {
|
||||||
|
// "KEY": "value1:value2"
|
||||||
|
}
|
||||||
|
// Set the terminal's font size. If this option is not included,
|
||||||
|
// the terminal will default to matching the buffer's font size.
|
||||||
|
// "font_size": "15"
|
||||||
|
// Set the terminal's font family. If this option is not included,
|
||||||
|
// the terminal will default to matching the buffer's font family.
|
||||||
|
// "font_family": "Zed Mono"
|
||||||
|
},
|
||||||
|
// Different settings for specific languages.
|
||||||
|
"languages": {
|
||||||
|
"Plain Text": {
|
||||||
|
"soft_wrap": "preferred_line_length"
|
||||||
|
},
|
||||||
|
"C": {
|
||||||
|
"tab_size": 2
|
||||||
|
},
|
||||||
|
"C++": {
|
||||||
|
"tab_size": 2
|
||||||
|
},
|
||||||
|
"Elixir": {
|
||||||
|
"tab_size": 2
|
||||||
|
},
|
||||||
|
"Go": {
|
||||||
|
"tab_size": 4,
|
||||||
|
"hard_tabs": true
|
||||||
|
},
|
||||||
|
"Markdown": {
|
||||||
|
"soft_wrap": "preferred_line_length"
|
||||||
|
},
|
||||||
|
"Rust": {
|
||||||
|
"tab_size": 4
|
||||||
|
},
|
||||||
|
"JavaScript": {
|
||||||
|
"tab_size": 2
|
||||||
|
},
|
||||||
|
"TypeScript": {
|
||||||
|
"tab_size": 2
|
||||||
|
},
|
||||||
|
"TSX": {
|
||||||
|
"tab_size": 2
|
||||||
|
}
|
||||||
|
},
|
||||||
|
// LSP Specific settings.
|
||||||
|
"lsp": {
|
||||||
|
// Specify the LSP name as a key here.
|
||||||
|
// As of 8/10/22, supported LSPs are:
|
||||||
|
// pyright
|
||||||
|
// gopls
|
||||||
|
// rust-analyzer
|
||||||
|
// typescript-language-server
|
||||||
|
// vscode-json-languageserver
|
||||||
|
// "rust_analyzer": {
|
||||||
|
// //These initialization options are merged into Zed's defaults
|
||||||
|
// "initialization_options": {
|
||||||
|
// "checkOnSave": {
|
||||||
|
// "command": "clippy"
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -70,7 +70,14 @@ pub fn init(db: project::Db, http_client: Arc<dyn HttpClient>, cx: &mut MutableA
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
cx.add_global_action(move |_: &ViewReleaseNotes, cx| {
|
cx.add_global_action(move |_: &ViewReleaseNotes, cx| {
|
||||||
cx.platform().open_url(&format!("{server_url}/releases"));
|
let latest_release_url = if cx.has_global::<ReleaseChannel>()
|
||||||
|
&& *cx.global::<ReleaseChannel>() == ReleaseChannel::Preview
|
||||||
|
{
|
||||||
|
format!("{server_url}/releases/preview/latest")
|
||||||
|
} else {
|
||||||
|
format!("{server_url}/releases/latest")
|
||||||
|
};
|
||||||
|
cx.platform().open_url(&latest_release_url);
|
||||||
});
|
});
|
||||||
cx.add_action(UpdateNotification::dismiss);
|
cx.add_action(UpdateNotification::dismiss);
|
||||||
}
|
}
|
||||||
|
|
|
@ -150,7 +150,6 @@ impl UserStore {
|
||||||
client.telemetry.set_authenticated_user_info(None, false);
|
client.telemetry.set_authenticated_user_info(None, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
client.telemetry.report_event("sign in", Default::default());
|
|
||||||
current_user_tx.send(user).await.ok();
|
current_user_tx.send(user).await.ok();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,7 +3,7 @@ authors = ["Nathan Sobo <nathan@zed.dev>"]
|
||||||
default-run = "collab"
|
default-run = "collab"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
name = "collab"
|
name = "collab"
|
||||||
version = "0.2.2"
|
version = "0.2.5"
|
||||||
|
|
||||||
[[bin]]
|
[[bin]]
|
||||||
name = "collab"
|
name = "collab"
|
||||||
|
|
|
@ -0,0 +1,2 @@
|
||||||
|
ALTER TABLE "signups"
|
||||||
|
ADD "added_to_mailing_list" BOOLEAN NOT NULL DEFAULT FALSE;
|
|
@ -338,7 +338,7 @@ async fn create_signup(
|
||||||
Json(params): Json<NewSignup>,
|
Json(params): Json<NewSignup>,
|
||||||
Extension(app): Extension<Arc<AppState>>,
|
Extension(app): Extension<Arc<AppState>>,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
app.db.create_signup(params).await?;
|
app.db.create_signup(¶ms).await?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -57,16 +57,14 @@ async fn main() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut zed_user_ids = Vec::<UserId>::new();
|
|
||||||
for (github_user, admin) in zed_users {
|
for (github_user, admin) in zed_users {
|
||||||
if let Some(user) = db
|
if db
|
||||||
.get_user_by_github_account(&github_user.login, Some(github_user.id))
|
.get_user_by_github_account(&github_user.login, Some(github_user.id))
|
||||||
.await
|
.await
|
||||||
.expect("failed to fetch user")
|
.expect("failed to fetch user")
|
||||||
|
.is_none()
|
||||||
{
|
{
|
||||||
zed_user_ids.push(user.id);
|
if let Some(email) = &github_user.email {
|
||||||
} else if let Some(email) = &github_user.email {
|
|
||||||
zed_user_ids.push(
|
|
||||||
db.create_user(
|
db.create_user(
|
||||||
email,
|
email,
|
||||||
admin,
|
admin,
|
||||||
|
@ -77,11 +75,8 @@ async fn main() {
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
.expect("failed to insert user")
|
.expect("failed to insert user");
|
||||||
.user_id,
|
} else if admin {
|
||||||
);
|
|
||||||
} else if admin {
|
|
||||||
zed_user_ids.push(
|
|
||||||
db.create_user(
|
db.create_user(
|
||||||
&format!("{}@zed.dev", github_user.login),
|
&format!("{}@zed.dev", github_user.login),
|
||||||
admin,
|
admin,
|
||||||
|
@ -92,9 +87,8 @@ async fn main() {
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
.expect("failed to insert user")
|
.expect("failed to insert user");
|
||||||
.user_id,
|
}
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -660,9 +660,9 @@ impl Database {
|
||||||
|
|
||||||
// signups
|
// signups
|
||||||
|
|
||||||
pub async fn create_signup(&self, signup: NewSignup) -> Result<()> {
|
pub async fn create_signup(&self, signup: &NewSignup) -> Result<()> {
|
||||||
self.transact(|tx| async {
|
self.transact(|tx| async {
|
||||||
signup::ActiveModel {
|
signup::Entity::insert(signup::ActiveModel {
|
||||||
email_address: ActiveValue::set(signup.email_address.clone()),
|
email_address: ActiveValue::set(signup.email_address.clone()),
|
||||||
email_confirmation_code: ActiveValue::set(random_email_confirmation_code()),
|
email_confirmation_code: ActiveValue::set(random_email_confirmation_code()),
|
||||||
email_confirmation_sent: ActiveValue::set(false),
|
email_confirmation_sent: ActiveValue::set(false),
|
||||||
|
@ -673,9 +673,15 @@ impl Database {
|
||||||
editor_features: ActiveValue::set(Some(signup.editor_features.clone())),
|
editor_features: ActiveValue::set(Some(signup.editor_features.clone())),
|
||||||
programming_languages: ActiveValue::set(Some(signup.programming_languages.clone())),
|
programming_languages: ActiveValue::set(Some(signup.programming_languages.clone())),
|
||||||
device_id: ActiveValue::set(signup.device_id.clone()),
|
device_id: ActiveValue::set(signup.device_id.clone()),
|
||||||
|
added_to_mailing_list: ActiveValue::set(signup.added_to_mailing_list),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
}
|
})
|
||||||
.insert(&tx)
|
.on_conflict(
|
||||||
|
OnConflict::column(signup::Column::EmailAddress)
|
||||||
|
.update_column(signup::Column::EmailAddress)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.exec(&tx)
|
||||||
.await?;
|
.await?;
|
||||||
tx.commit().await?;
|
tx.commit().await?;
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -746,6 +752,7 @@ impl Database {
|
||||||
.or(signup::Column::PlatformUnknown.eq(true)),
|
.or(signup::Column::PlatformUnknown.eq(true)),
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
.order_by_asc(signup::Column::CreatedAt)
|
||||||
.limit(count as u64)
|
.limit(count as u64)
|
||||||
.into_model()
|
.into_model()
|
||||||
.all(&tx)
|
.all(&tx)
|
||||||
|
@ -772,32 +779,41 @@ impl Database {
|
||||||
Err(anyhow!("email address is already in use"))?;
|
Err(anyhow!("email address is already in use"))?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let inviter = match user::Entity::find()
|
let inviting_user_with_invites = match user::Entity::find()
|
||||||
.filter(user::Column::InviteCode.eq(code))
|
.filter(
|
||||||
|
user::Column::InviteCode
|
||||||
|
.eq(code)
|
||||||
|
.and(user::Column::InviteCount.gt(0)),
|
||||||
|
)
|
||||||
.one(&tx)
|
.one(&tx)
|
||||||
.await?
|
.await?
|
||||||
{
|
{
|
||||||
Some(inviter) => inviter,
|
Some(inviting_user) => inviting_user,
|
||||||
None => {
|
None => {
|
||||||
return Err(Error::Http(
|
return Err(Error::Http(
|
||||||
StatusCode::NOT_FOUND,
|
StatusCode::UNAUTHORIZED,
|
||||||
"invite code not found".to_string(),
|
"unable to find an invite code with invites remaining".to_string(),
|
||||||
))?
|
))?
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
user::Entity::update_many()
|
||||||
if inviter.invite_count == 0 {
|
.filter(
|
||||||
Err(Error::Http(
|
user::Column::Id
|
||||||
StatusCode::UNAUTHORIZED,
|
.eq(inviting_user_with_invites.id)
|
||||||
"no invites remaining".to_string(),
|
.and(user::Column::InviteCount.gt(0)),
|
||||||
))?;
|
)
|
||||||
}
|
.col_expr(
|
||||||
|
user::Column::InviteCount,
|
||||||
|
Expr::col(user::Column::InviteCount).sub(1),
|
||||||
|
)
|
||||||
|
.exec(&tx)
|
||||||
|
.await?;
|
||||||
|
|
||||||
let signup = signup::Entity::insert(signup::ActiveModel {
|
let signup = signup::Entity::insert(signup::ActiveModel {
|
||||||
email_address: ActiveValue::set(email_address.into()),
|
email_address: ActiveValue::set(email_address.into()),
|
||||||
email_confirmation_code: ActiveValue::set(random_email_confirmation_code()),
|
email_confirmation_code: ActiveValue::set(random_email_confirmation_code()),
|
||||||
email_confirmation_sent: ActiveValue::set(false),
|
email_confirmation_sent: ActiveValue::set(false),
|
||||||
inviting_user_id: ActiveValue::set(Some(inviter.id)),
|
inviting_user_id: ActiveValue::set(Some(inviting_user_with_invites.id)),
|
||||||
platform_linux: ActiveValue::set(false),
|
platform_linux: ActiveValue::set(false),
|
||||||
platform_mac: ActiveValue::set(false),
|
platform_mac: ActiveValue::set(false),
|
||||||
platform_windows: ActiveValue::set(false),
|
platform_windows: ActiveValue::set(false),
|
||||||
|
@ -873,26 +889,6 @@ impl Database {
|
||||||
let signup = signup.update(&tx).await?;
|
let signup = signup.update(&tx).await?;
|
||||||
|
|
||||||
if let Some(inviting_user_id) = signup.inviting_user_id {
|
if let Some(inviting_user_id) = signup.inviting_user_id {
|
||||||
let result = user::Entity::update_many()
|
|
||||||
.filter(
|
|
||||||
user::Column::Id
|
|
||||||
.eq(inviting_user_id)
|
|
||||||
.and(user::Column::InviteCount.gt(0)),
|
|
||||||
)
|
|
||||||
.col_expr(
|
|
||||||
user::Column::InviteCount,
|
|
||||||
Expr::col(user::Column::InviteCount).sub(1),
|
|
||||||
)
|
|
||||||
.exec(&tx)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
if result.rows_affected == 0 {
|
|
||||||
Err(Error::Http(
|
|
||||||
StatusCode::UNAUTHORIZED,
|
|
||||||
"no invites remaining".to_string(),
|
|
||||||
))?;
|
|
||||||
}
|
|
||||||
|
|
||||||
contact::Entity::insert(contact::ActiveModel {
|
contact::Entity::insert(contact::ActiveModel {
|
||||||
user_id_a: ActiveValue::set(inviting_user_id),
|
user_id_a: ActiveValue::set(inviting_user_id),
|
||||||
user_id_b: ActiveValue::set(user.id),
|
user_id_b: ActiveValue::set(user.id),
|
||||||
|
|
|
@ -20,6 +20,7 @@ pub struct Model {
|
||||||
pub platform_unknown: bool,
|
pub platform_unknown: bool,
|
||||||
pub editor_features: Option<Vec<String>>,
|
pub editor_features: Option<Vec<String>>,
|
||||||
pub programming_languages: Option<Vec<String>>,
|
pub programming_languages: Option<Vec<String>>,
|
||||||
|
pub added_to_mailing_list: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||||
|
@ -27,7 +28,7 @@ pub enum Relation {}
|
||||||
|
|
||||||
impl ActiveModelBehavior for ActiveModel {}
|
impl ActiveModelBehavior for ActiveModel {}
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq, FromQueryResult, Serialize, Deserialize)]
|
#[derive(Clone, Debug, PartialEq, Eq, FromQueryResult, Serialize, Deserialize)]
|
||||||
pub struct Invite {
|
pub struct Invite {
|
||||||
pub email_address: String,
|
pub email_address: String,
|
||||||
pub email_confirmation_code: String,
|
pub email_confirmation_code: String,
|
||||||
|
@ -42,6 +43,7 @@ pub struct NewSignup {
|
||||||
pub editor_features: Vec<String>,
|
pub editor_features: Vec<String>,
|
||||||
pub programming_languages: Vec<String>,
|
pub programming_languages: Vec<String>,
|
||||||
pub device_id: Option<String>,
|
pub device_id: Option<String>,
|
||||||
|
pub added_to_mailing_list: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Deserialize, Serialize, FromQueryResult)]
|
#[derive(Clone, Debug, PartialEq, Deserialize, Serialize, FromQueryResult)]
|
||||||
|
|
|
@ -667,19 +667,29 @@ async fn test_signups() {
|
||||||
let test_db = TestDb::postgres(build_background_executor());
|
let test_db = TestDb::postgres(build_background_executor());
|
||||||
let db = test_db.db();
|
let db = test_db.db();
|
||||||
|
|
||||||
// people sign up on the waitlist
|
let usernames = (0..8).map(|i| format!("person-{i}")).collect::<Vec<_>>();
|
||||||
for i in 0..8 {
|
|
||||||
db.create_signup(NewSignup {
|
let all_signups = usernames
|
||||||
email_address: format!("person-{i}@example.com"),
|
.iter()
|
||||||
|
.enumerate()
|
||||||
|
.map(|(i, username)| NewSignup {
|
||||||
|
email_address: format!("{username}@example.com"),
|
||||||
platform_mac: true,
|
platform_mac: true,
|
||||||
platform_linux: i % 2 == 0,
|
platform_linux: i % 2 == 0,
|
||||||
platform_windows: i % 4 == 0,
|
platform_windows: i % 4 == 0,
|
||||||
editor_features: vec!["speed".into()],
|
editor_features: vec!["speed".into()],
|
||||||
programming_languages: vec!["rust".into(), "c".into()],
|
programming_languages: vec!["rust".into(), "c".into()],
|
||||||
device_id: Some(format!("device_id_{i}")),
|
device_id: Some(format!("device_id_{i}")),
|
||||||
|
added_to_mailing_list: i != 0, // One user failed to subscribe
|
||||||
})
|
})
|
||||||
.await
|
.collect::<Vec<NewSignup>>();
|
||||||
.unwrap();
|
|
||||||
|
// people sign up on the waitlist
|
||||||
|
for signup in &all_signups {
|
||||||
|
// users can sign up multiple times without issues
|
||||||
|
for _ in 0..2 {
|
||||||
|
db.create_signup(&signup).await.unwrap();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
@ -702,9 +712,9 @@ async fn test_signups() {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
addresses,
|
addresses,
|
||||||
&[
|
&[
|
||||||
"person-0@example.com",
|
all_signups[0].email_address.as_str(),
|
||||||
"person-1@example.com",
|
all_signups[1].email_address.as_str(),
|
||||||
"person-2@example.com"
|
all_signups[2].email_address.as_str()
|
||||||
]
|
]
|
||||||
);
|
);
|
||||||
assert_ne!(
|
assert_ne!(
|
||||||
|
@ -728,9 +738,9 @@ async fn test_signups() {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
addresses,
|
addresses,
|
||||||
&[
|
&[
|
||||||
"person-3@example.com",
|
all_signups[3].email_address.as_str(),
|
||||||
"person-4@example.com",
|
all_signups[4].email_address.as_str(),
|
||||||
"person-5@example.com"
|
all_signups[5].email_address.as_str()
|
||||||
]
|
]
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -756,11 +766,10 @@ async fn test_signups() {
|
||||||
} = db
|
} = db
|
||||||
.create_user_from_invite(
|
.create_user_from_invite(
|
||||||
&Invite {
|
&Invite {
|
||||||
email_address: signups_batch1[0].email_address.clone(),
|
..signups_batch1[0].clone()
|
||||||
email_confirmation_code: signups_batch1[0].email_confirmation_code.clone(),
|
|
||||||
},
|
},
|
||||||
NewUserParams {
|
NewUserParams {
|
||||||
github_login: "person-0".into(),
|
github_login: usernames[0].clone(),
|
||||||
github_user_id: 0,
|
github_user_id: 0,
|
||||||
invite_count: 5,
|
invite_count: 5,
|
||||||
},
|
},
|
||||||
|
@ -770,8 +779,11 @@ async fn test_signups() {
|
||||||
.unwrap();
|
.unwrap();
|
||||||
let user = db.get_user_by_id(user_id).await.unwrap().unwrap();
|
let user = db.get_user_by_id(user_id).await.unwrap().unwrap();
|
||||||
assert!(inviting_user_id.is_none());
|
assert!(inviting_user_id.is_none());
|
||||||
assert_eq!(user.github_login, "person-0");
|
assert_eq!(user.github_login, usernames[0]);
|
||||||
assert_eq!(user.email_address.as_deref(), Some("person-0@example.com"));
|
assert_eq!(
|
||||||
|
user.email_address,
|
||||||
|
Some(all_signups[0].email_address.clone())
|
||||||
|
);
|
||||||
assert_eq!(user.invite_count, 5);
|
assert_eq!(user.invite_count, 5);
|
||||||
assert_eq!(signup_device_id.unwrap(), "device_id_0");
|
assert_eq!(signup_device_id.unwrap(), "device_id_0");
|
||||||
|
|
||||||
|
@ -799,7 +811,7 @@ async fn test_signups() {
|
||||||
email_confirmation_code: "the-wrong-code".to_string(),
|
email_confirmation_code: "the-wrong-code".to_string(),
|
||||||
},
|
},
|
||||||
NewUserParams {
|
NewUserParams {
|
||||||
github_login: "person-1".into(),
|
github_login: usernames[1].clone(),
|
||||||
github_user_id: 2,
|
github_user_id: 2,
|
||||||
invite_count: 5,
|
invite_count: 5,
|
||||||
},
|
},
|
||||||
|
|
|
@ -5566,6 +5566,13 @@ async fn test_random_collaboration(
|
||||||
guest_client.username,
|
guest_client.username,
|
||||||
id
|
id
|
||||||
);
|
);
|
||||||
|
assert_eq!(
|
||||||
|
guest_snapshot.abs_path(),
|
||||||
|
host_snapshot.abs_path(),
|
||||||
|
"{} has different abs path than the host for worktree {}",
|
||||||
|
guest_client.username,
|
||||||
|
id
|
||||||
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
guest_snapshot.entries(false).collect::<Vec<_>>(),
|
guest_snapshot.entries(false).collect::<Vec<_>>(),
|
||||||
host_snapshot.entries(false).collect::<Vec<_>>(),
|
host_snapshot.entries(false).collect::<Vec<_>>(),
|
||||||
|
|
|
@ -322,7 +322,7 @@ impl ProjectDiagnosticsEditor {
|
||||||
);
|
);
|
||||||
let excerpt_id = excerpts
|
let excerpt_id = excerpts
|
||||||
.insert_excerpts_after(
|
.insert_excerpts_after(
|
||||||
&prev_excerpt_id,
|
prev_excerpt_id,
|
||||||
buffer.clone(),
|
buffer.clone(),
|
||||||
[ExcerptRange {
|
[ExcerptRange {
|
||||||
context: excerpt_start..excerpt_end,
|
context: excerpt_start..excerpt_end,
|
||||||
|
@ -384,7 +384,7 @@ impl ProjectDiagnosticsEditor {
|
||||||
|
|
||||||
groups_to_add.push(group_state);
|
groups_to_add.push(group_state);
|
||||||
} else if let Some((group_ix, group_state)) = to_remove {
|
} else if let Some((group_ix, group_state)) = to_remove {
|
||||||
excerpts.remove_excerpts(group_state.excerpts.iter(), excerpts_cx);
|
excerpts.remove_excerpts(group_state.excerpts.iter().copied(), excerpts_cx);
|
||||||
group_ixs_to_remove.push(group_ix);
|
group_ixs_to_remove.push(group_ix);
|
||||||
blocks_to_remove.extend(group_state.blocks.iter().copied());
|
blocks_to_remove.extend(group_state.blocks.iter().copied());
|
||||||
} else if let Some((_, group)) = to_keep {
|
} else if let Some((_, group)) = to_keep {
|
||||||
|
@ -457,10 +457,15 @@ impl ProjectDiagnosticsEditor {
|
||||||
}
|
}
|
||||||
|
|
||||||
// If any selection has lost its position, move it to start of the next primary diagnostic.
|
// If any selection has lost its position, move it to start of the next primary diagnostic.
|
||||||
|
let snapshot = editor.snapshot(cx);
|
||||||
for selection in &mut selections {
|
for selection in &mut selections {
|
||||||
if let Some(new_excerpt_id) = new_excerpt_ids_by_selection_id.get(&selection.id) {
|
if let Some(new_excerpt_id) = new_excerpt_ids_by_selection_id.get(&selection.id) {
|
||||||
let group_ix = match groups.binary_search_by(|probe| {
|
let group_ix = match groups.binary_search_by(|probe| {
|
||||||
probe.excerpts.last().unwrap().cmp(new_excerpt_id)
|
probe
|
||||||
|
.excerpts
|
||||||
|
.last()
|
||||||
|
.unwrap()
|
||||||
|
.cmp(new_excerpt_id, &snapshot.buffer_snapshot)
|
||||||
}) {
|
}) {
|
||||||
Ok(ix) | Err(ix) => ix,
|
Ok(ix) | Err(ix) => ix,
|
||||||
};
|
};
|
||||||
|
@ -738,7 +743,7 @@ mod tests {
|
||||||
DisplayPoint,
|
DisplayPoint,
|
||||||
};
|
};
|
||||||
use gpui::TestAppContext;
|
use gpui::TestAppContext;
|
||||||
use language::{Diagnostic, DiagnosticEntry, DiagnosticSeverity, PointUtf16};
|
use language::{Diagnostic, DiagnosticEntry, DiagnosticSeverity, PointUtf16, Unclipped};
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
use unindent::Unindent as _;
|
use unindent::Unindent as _;
|
||||||
use workspace::AppState;
|
use workspace::AppState;
|
||||||
|
@ -788,7 +793,7 @@ mod tests {
|
||||||
None,
|
None,
|
||||||
vec![
|
vec![
|
||||||
DiagnosticEntry {
|
DiagnosticEntry {
|
||||||
range: PointUtf16::new(1, 8)..PointUtf16::new(1, 9),
|
range: Unclipped(PointUtf16::new(1, 8))..Unclipped(PointUtf16::new(1, 9)),
|
||||||
diagnostic: Diagnostic {
|
diagnostic: Diagnostic {
|
||||||
message:
|
message:
|
||||||
"move occurs because `x` has type `Vec<char>`, which does not implement the `Copy` trait"
|
"move occurs because `x` has type `Vec<char>`, which does not implement the `Copy` trait"
|
||||||
|
@ -801,7 +806,7 @@ mod tests {
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
DiagnosticEntry {
|
DiagnosticEntry {
|
||||||
range: PointUtf16::new(2, 8)..PointUtf16::new(2, 9),
|
range: Unclipped(PointUtf16::new(2, 8))..Unclipped(PointUtf16::new(2, 9)),
|
||||||
diagnostic: Diagnostic {
|
diagnostic: Diagnostic {
|
||||||
message:
|
message:
|
||||||
"move occurs because `y` has type `Vec<char>`, which does not implement the `Copy` trait"
|
"move occurs because `y` has type `Vec<char>`, which does not implement the `Copy` trait"
|
||||||
|
@ -814,7 +819,7 @@ mod tests {
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
DiagnosticEntry {
|
DiagnosticEntry {
|
||||||
range: PointUtf16::new(3, 6)..PointUtf16::new(3, 7),
|
range: Unclipped(PointUtf16::new(3, 6))..Unclipped(PointUtf16::new(3, 7)),
|
||||||
diagnostic: Diagnostic {
|
diagnostic: Diagnostic {
|
||||||
message: "value moved here".to_string(),
|
message: "value moved here".to_string(),
|
||||||
severity: DiagnosticSeverity::INFORMATION,
|
severity: DiagnosticSeverity::INFORMATION,
|
||||||
|
@ -825,7 +830,7 @@ mod tests {
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
DiagnosticEntry {
|
DiagnosticEntry {
|
||||||
range: PointUtf16::new(4, 6)..PointUtf16::new(4, 7),
|
range: Unclipped(PointUtf16::new(4, 6))..Unclipped(PointUtf16::new(4, 7)),
|
||||||
diagnostic: Diagnostic {
|
diagnostic: Diagnostic {
|
||||||
message: "value moved here".to_string(),
|
message: "value moved here".to_string(),
|
||||||
severity: DiagnosticSeverity::INFORMATION,
|
severity: DiagnosticSeverity::INFORMATION,
|
||||||
|
@ -836,7 +841,7 @@ mod tests {
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
DiagnosticEntry {
|
DiagnosticEntry {
|
||||||
range: PointUtf16::new(7, 6)..PointUtf16::new(7, 7),
|
range: Unclipped(PointUtf16::new(7, 6))..Unclipped(PointUtf16::new(7, 7)),
|
||||||
diagnostic: Diagnostic {
|
diagnostic: Diagnostic {
|
||||||
message: "use of moved value\nvalue used here after move".to_string(),
|
message: "use of moved value\nvalue used here after move".to_string(),
|
||||||
severity: DiagnosticSeverity::ERROR,
|
severity: DiagnosticSeverity::ERROR,
|
||||||
|
@ -847,7 +852,7 @@ mod tests {
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
DiagnosticEntry {
|
DiagnosticEntry {
|
||||||
range: PointUtf16::new(8, 6)..PointUtf16::new(8, 7),
|
range: Unclipped(PointUtf16::new(8, 6))..Unclipped(PointUtf16::new(8, 7)),
|
||||||
diagnostic: Diagnostic {
|
diagnostic: Diagnostic {
|
||||||
message: "use of moved value\nvalue used here after move".to_string(),
|
message: "use of moved value\nvalue used here after move".to_string(),
|
||||||
severity: DiagnosticSeverity::ERROR,
|
severity: DiagnosticSeverity::ERROR,
|
||||||
|
@ -939,7 +944,7 @@ mod tests {
|
||||||
PathBuf::from("/test/consts.rs"),
|
PathBuf::from("/test/consts.rs"),
|
||||||
None,
|
None,
|
||||||
vec![DiagnosticEntry {
|
vec![DiagnosticEntry {
|
||||||
range: PointUtf16::new(0, 15)..PointUtf16::new(0, 15),
|
range: Unclipped(PointUtf16::new(0, 15))..Unclipped(PointUtf16::new(0, 15)),
|
||||||
diagnostic: Diagnostic {
|
diagnostic: Diagnostic {
|
||||||
message: "mismatched types\nexpected `usize`, found `char`".to_string(),
|
message: "mismatched types\nexpected `usize`, found `char`".to_string(),
|
||||||
severity: DiagnosticSeverity::ERROR,
|
severity: DiagnosticSeverity::ERROR,
|
||||||
|
@ -1040,7 +1045,8 @@ mod tests {
|
||||||
None,
|
None,
|
||||||
vec![
|
vec![
|
||||||
DiagnosticEntry {
|
DiagnosticEntry {
|
||||||
range: PointUtf16::new(0, 15)..PointUtf16::new(0, 15),
|
range: Unclipped(PointUtf16::new(0, 15))
|
||||||
|
..Unclipped(PointUtf16::new(0, 15)),
|
||||||
diagnostic: Diagnostic {
|
diagnostic: Diagnostic {
|
||||||
message: "mismatched types\nexpected `usize`, found `char`"
|
message: "mismatched types\nexpected `usize`, found `char`"
|
||||||
.to_string(),
|
.to_string(),
|
||||||
|
@ -1052,7 +1058,8 @@ mod tests {
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
DiagnosticEntry {
|
DiagnosticEntry {
|
||||||
range: PointUtf16::new(1, 15)..PointUtf16::new(1, 15),
|
range: Unclipped(PointUtf16::new(1, 15))
|
||||||
|
..Unclipped(PointUtf16::new(1, 15)),
|
||||||
diagnostic: Diagnostic {
|
diagnostic: Diagnostic {
|
||||||
message: "unresolved name `c`".to_string(),
|
message: "unresolved name `c`".to_string(),
|
||||||
severity: DiagnosticSeverity::ERROR,
|
severity: DiagnosticSeverity::ERROR,
|
||||||
|
|
|
@ -12,4 +12,4 @@ collections = { path = "../collections" }
|
||||||
gpui = { path = "../gpui" }
|
gpui = { path = "../gpui" }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
gpui = { path = "../gpui", features = ["test-support"] }
|
gpui = { path = "../gpui", features = ["test-support"] }
|
||||||
|
|
|
@ -4,12 +4,16 @@ use collections::HashSet;
|
||||||
use gpui::{
|
use gpui::{
|
||||||
elements::{Empty, MouseEventHandler, Overlay},
|
elements::{Empty, MouseEventHandler, Overlay},
|
||||||
geometry::{rect::RectF, vector::Vector2F},
|
geometry::{rect::RectF, vector::Vector2F},
|
||||||
scene::MouseDrag,
|
scene::{MouseDown, MouseDrag},
|
||||||
CursorStyle, Element, ElementBox, EventContext, MouseButton, MutableAppContext, RenderContext,
|
CursorStyle, Element, ElementBox, EventContext, MouseButton, MutableAppContext, RenderContext,
|
||||||
View, WeakViewHandle,
|
View, WeakViewHandle,
|
||||||
};
|
};
|
||||||
|
|
||||||
enum State<V: View> {
|
enum State<V: View> {
|
||||||
|
Down {
|
||||||
|
region_offset: Vector2F,
|
||||||
|
region: RectF,
|
||||||
|
},
|
||||||
Dragging {
|
Dragging {
|
||||||
window_id: usize,
|
window_id: usize,
|
||||||
position: Vector2F,
|
position: Vector2F,
|
||||||
|
@ -24,6 +28,13 @@ enum State<V: View> {
|
||||||
impl<V: View> Clone for State<V> {
|
impl<V: View> Clone for State<V> {
|
||||||
fn clone(&self) -> Self {
|
fn clone(&self) -> Self {
|
||||||
match self {
|
match self {
|
||||||
|
&State::Down {
|
||||||
|
region_offset,
|
||||||
|
region,
|
||||||
|
} => State::Down {
|
||||||
|
region_offset,
|
||||||
|
region,
|
||||||
|
},
|
||||||
State::Dragging {
|
State::Dragging {
|
||||||
window_id,
|
window_id,
|
||||||
position,
|
position,
|
||||||
|
@ -87,6 +98,15 @@ impl<V: View> DragAndDrop<V> {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn drag_started(event: MouseDown, cx: &mut EventContext) {
|
||||||
|
cx.update_global(|this: &mut Self, _| {
|
||||||
|
this.currently_dragged = Some(State::Down {
|
||||||
|
region_offset: event.region.origin() - event.position,
|
||||||
|
region: event.region,
|
||||||
|
});
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
pub fn dragging<T: Any>(
|
pub fn dragging<T: Any>(
|
||||||
event: MouseDrag,
|
event: MouseDrag,
|
||||||
payload: Rc<T>,
|
payload: Rc<T>,
|
||||||
|
@ -94,37 +114,32 @@ impl<V: View> DragAndDrop<V> {
|
||||||
render: Rc<impl 'static + Fn(&T, &mut RenderContext<V>) -> ElementBox>,
|
render: Rc<impl 'static + Fn(&T, &mut RenderContext<V>) -> ElementBox>,
|
||||||
) {
|
) {
|
||||||
let window_id = cx.window_id();
|
let window_id = cx.window_id();
|
||||||
cx.update_global::<Self, _, _>(|this, cx| {
|
cx.update_global(|this: &mut Self, cx| {
|
||||||
this.notify_containers_for_window(window_id, cx);
|
this.notify_containers_for_window(window_id, cx);
|
||||||
|
|
||||||
if matches!(this.currently_dragged, Some(State::Canceled)) {
|
match this.currently_dragged.as_ref() {
|
||||||
return;
|
Some(&State::Down {
|
||||||
|
region_offset,
|
||||||
|
region,
|
||||||
|
})
|
||||||
|
| Some(&State::Dragging {
|
||||||
|
region_offset,
|
||||||
|
region,
|
||||||
|
..
|
||||||
|
}) => {
|
||||||
|
this.currently_dragged = Some(State::Dragging {
|
||||||
|
window_id,
|
||||||
|
region_offset,
|
||||||
|
region,
|
||||||
|
position: event.position,
|
||||||
|
payload,
|
||||||
|
render: Rc::new(move |payload, cx| {
|
||||||
|
render(payload.downcast_ref::<T>().unwrap(), cx)
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
}
|
}
|
||||||
|
|
||||||
let (region_offset, region) = if let Some(State::Dragging {
|
|
||||||
region_offset,
|
|
||||||
region,
|
|
||||||
..
|
|
||||||
}) = this.currently_dragged.as_ref()
|
|
||||||
{
|
|
||||||
(*region_offset, *region)
|
|
||||||
} else {
|
|
||||||
(
|
|
||||||
event.region.origin() - event.prev_mouse_position,
|
|
||||||
event.region,
|
|
||||||
)
|
|
||||||
};
|
|
||||||
|
|
||||||
this.currently_dragged = Some(State::Dragging {
|
|
||||||
window_id,
|
|
||||||
region_offset,
|
|
||||||
region,
|
|
||||||
position: event.position,
|
|
||||||
payload,
|
|
||||||
render: Rc::new(move |payload, cx| {
|
|
||||||
render(payload.downcast_ref::<T>().unwrap(), cx)
|
|
||||||
}),
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -135,6 +150,7 @@ impl<V: View> DragAndDrop<V> {
|
||||||
.clone()
|
.clone()
|
||||||
.and_then(|state| {
|
.and_then(|state| {
|
||||||
match state {
|
match state {
|
||||||
|
State::Down { .. } => None,
|
||||||
State::Dragging {
|
State::Dragging {
|
||||||
window_id,
|
window_id,
|
||||||
region_offset,
|
region_offset,
|
||||||
|
@ -263,7 +279,11 @@ impl<Tag> Draggable for MouseEventHandler<Tag> {
|
||||||
{
|
{
|
||||||
let payload = Rc::new(payload);
|
let payload = Rc::new(payload);
|
||||||
let render = Rc::new(render);
|
let render = Rc::new(render);
|
||||||
self.on_drag(MouseButton::Left, move |e, cx| {
|
self.on_down(MouseButton::Left, move |e, cx| {
|
||||||
|
cx.propagate_event();
|
||||||
|
DragAndDrop::<V>::drag_started(e, cx);
|
||||||
|
})
|
||||||
|
.on_drag(MouseButton::Left, move |e, cx| {
|
||||||
let payload = payload.clone();
|
let payload = payload.clone();
|
||||||
let render = render.clone();
|
let render = render.clone();
|
||||||
DragAndDrop::<V>::dragging(e, payload, cx, render)
|
DragAndDrop::<V>::dragging(e, payload, cx, render)
|
||||||
|
|
|
@ -2,7 +2,7 @@ use super::{
|
||||||
wrap_map::{self, WrapEdit, WrapPoint, WrapSnapshot},
|
wrap_map::{self, WrapEdit, WrapPoint, WrapSnapshot},
|
||||||
TextHighlights,
|
TextHighlights,
|
||||||
};
|
};
|
||||||
use crate::{Anchor, ExcerptRange, ToPoint as _};
|
use crate::{Anchor, ExcerptId, ExcerptRange, ToPoint as _};
|
||||||
use collections::{Bound, HashMap, HashSet};
|
use collections::{Bound, HashMap, HashSet};
|
||||||
use gpui::{ElementBox, RenderContext};
|
use gpui::{ElementBox, RenderContext};
|
||||||
use language::{BufferSnapshot, Chunk, Patch, Point};
|
use language::{BufferSnapshot, Chunk, Patch, Point};
|
||||||
|
@ -107,7 +107,7 @@ struct Transform {
|
||||||
pub enum TransformBlock {
|
pub enum TransformBlock {
|
||||||
Custom(Arc<Block>),
|
Custom(Arc<Block>),
|
||||||
ExcerptHeader {
|
ExcerptHeader {
|
||||||
key: usize,
|
id: ExcerptId,
|
||||||
buffer: BufferSnapshot,
|
buffer: BufferSnapshot,
|
||||||
range: ExcerptRange<text::Anchor>,
|
range: ExcerptRange<text::Anchor>,
|
||||||
height: u8,
|
height: u8,
|
||||||
|
@ -371,7 +371,7 @@ impl BlockMap {
|
||||||
.make_wrap_point(Point::new(excerpt_boundary.row, 0), Bias::Left)
|
.make_wrap_point(Point::new(excerpt_boundary.row, 0), Bias::Left)
|
||||||
.row(),
|
.row(),
|
||||||
TransformBlock::ExcerptHeader {
|
TransformBlock::ExcerptHeader {
|
||||||
key: excerpt_boundary.key,
|
id: excerpt_boundary.id,
|
||||||
buffer: excerpt_boundary.buffer,
|
buffer: excerpt_boundary.buffer,
|
||||||
range: excerpt_boundary.range,
|
range: excerpt_boundary.range,
|
||||||
height: if excerpt_boundary.starts_new_buffer {
|
height: if excerpt_boundary.starts_new_buffer {
|
||||||
|
|
|
@ -73,6 +73,7 @@ use std::{
|
||||||
mem,
|
mem,
|
||||||
num::NonZeroU32,
|
num::NonZeroU32,
|
||||||
ops::{Deref, DerefMut, Range, RangeInclusive},
|
ops::{Deref, DerefMut, Range, RangeInclusive},
|
||||||
|
path::Path,
|
||||||
sync::Arc,
|
sync::Arc,
|
||||||
time::{Duration, Instant},
|
time::{Duration, Instant},
|
||||||
};
|
};
|
||||||
|
@ -1161,7 +1162,7 @@ impl Editor {
|
||||||
});
|
});
|
||||||
clone.selections.set_state(&self.selections);
|
clone.selections.set_state(&self.selections);
|
||||||
clone.scroll_position = self.scroll_position;
|
clone.scroll_position = self.scroll_position;
|
||||||
clone.scroll_top_anchor = self.scroll_top_anchor.clone();
|
clone.scroll_top_anchor = self.scroll_top_anchor;
|
||||||
clone.searchable = self.searchable;
|
clone.searchable = self.searchable;
|
||||||
clone
|
clone
|
||||||
}
|
}
|
||||||
|
@ -1304,7 +1305,7 @@ impl Editor {
|
||||||
display_snapshot: self.display_map.update(cx, |map, cx| map.snapshot(cx)),
|
display_snapshot: self.display_map.update(cx, |map, cx| map.snapshot(cx)),
|
||||||
ongoing_scroll: self.ongoing_scroll,
|
ongoing_scroll: self.ongoing_scroll,
|
||||||
scroll_position: self.scroll_position,
|
scroll_position: self.scroll_position,
|
||||||
scroll_top_anchor: self.scroll_top_anchor.clone(),
|
scroll_top_anchor: self.scroll_top_anchor,
|
||||||
placeholder_text: self.placeholder_text.clone(),
|
placeholder_text: self.placeholder_text.clone(),
|
||||||
is_focused: self
|
is_focused: self
|
||||||
.handle
|
.handle
|
||||||
|
@ -1790,17 +1791,15 @@ impl Editor {
|
||||||
.pending_anchor()
|
.pending_anchor()
|
||||||
.expect("extend_selection not called with pending selection");
|
.expect("extend_selection not called with pending selection");
|
||||||
if position >= tail {
|
if position >= tail {
|
||||||
pending_selection.start = tail_anchor.clone();
|
pending_selection.start = tail_anchor;
|
||||||
} else {
|
} else {
|
||||||
pending_selection.end = tail_anchor.clone();
|
pending_selection.end = tail_anchor;
|
||||||
pending_selection.reversed = true;
|
pending_selection.reversed = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut pending_mode = self.selections.pending_mode().unwrap();
|
let mut pending_mode = self.selections.pending_mode().unwrap();
|
||||||
match &mut pending_mode {
|
match &mut pending_mode {
|
||||||
SelectMode::Word(range) | SelectMode::Line(range) => {
|
SelectMode::Word(range) | SelectMode::Line(range) => *range = tail_anchor..tail_anchor,
|
||||||
*range = tail_anchor.clone()..tail_anchor
|
|
||||||
}
|
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2144,10 +2143,9 @@ impl Editor {
|
||||||
));
|
));
|
||||||
if following_text_allows_autoclose && preceding_text_matches_prefix {
|
if following_text_allows_autoclose && preceding_text_matches_prefix {
|
||||||
let anchor = snapshot.anchor_before(selection.end);
|
let anchor = snapshot.anchor_before(selection.end);
|
||||||
new_selections
|
new_selections.push((selection.map(|_| anchor), text.len()));
|
||||||
.push((selection.map(|_| anchor.clone()), text.len()));
|
|
||||||
new_autoclose_regions.push((
|
new_autoclose_regions.push((
|
||||||
anchor.clone(),
|
anchor,
|
||||||
text.len(),
|
text.len(),
|
||||||
selection.id,
|
selection.id,
|
||||||
bracket_pair.clone(),
|
bracket_pair.clone(),
|
||||||
|
@ -2168,10 +2166,8 @@ impl Editor {
|
||||||
&& text.as_ref() == region.pair.end.as_str();
|
&& text.as_ref() == region.pair.end.as_str();
|
||||||
if should_skip {
|
if should_skip {
|
||||||
let anchor = snapshot.anchor_after(selection.end);
|
let anchor = snapshot.anchor_after(selection.end);
|
||||||
new_selections.push((
|
new_selections
|
||||||
selection.map(|_| anchor.clone()),
|
.push((selection.map(|_| anchor), region.pair.end.len()));
|
||||||
region.pair.end.len(),
|
|
||||||
));
|
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2203,7 +2199,7 @@ impl Editor {
|
||||||
// text with the given input and move the selection to the end of the
|
// text with the given input and move the selection to the end of the
|
||||||
// newly inserted text.
|
// newly inserted text.
|
||||||
let anchor = snapshot.anchor_after(selection.end);
|
let anchor = snapshot.anchor_after(selection.end);
|
||||||
new_selections.push((selection.map(|_| anchor.clone()), 0));
|
new_selections.push((selection.map(|_| anchor), 0));
|
||||||
edits.push((selection.start..selection.end, text.clone()));
|
edits.push((selection.start..selection.end, text.clone()));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2305,7 +2301,7 @@ impl Editor {
|
||||||
}
|
}
|
||||||
|
|
||||||
let anchor = buffer.anchor_after(end);
|
let anchor = buffer.anchor_after(end);
|
||||||
let new_selection = selection.map(|_| anchor.clone());
|
let new_selection = selection.map(|_| anchor);
|
||||||
(
|
(
|
||||||
(start..end, new_text),
|
(start..end, new_text),
|
||||||
(insert_extra_newline, new_selection),
|
(insert_extra_newline, new_selection),
|
||||||
|
@ -2385,7 +2381,7 @@ impl Editor {
|
||||||
.iter()
|
.iter()
|
||||||
.map(|s| {
|
.map(|s| {
|
||||||
let anchor = snapshot.anchor_after(s.end);
|
let anchor = snapshot.anchor_after(s.end);
|
||||||
s.map(|_| anchor.clone())
|
s.map(|_| anchor)
|
||||||
})
|
})
|
||||||
.collect::<Vec<_>>()
|
.collect::<Vec<_>>()
|
||||||
};
|
};
|
||||||
|
@ -3649,7 +3645,7 @@ impl Editor {
|
||||||
String::new(),
|
String::new(),
|
||||||
));
|
));
|
||||||
let insertion_anchor = buffer.anchor_after(insertion_point);
|
let insertion_anchor = buffer.anchor_after(insertion_point);
|
||||||
edits.push((insertion_anchor.clone()..insertion_anchor, text));
|
edits.push((insertion_anchor..insertion_anchor, text));
|
||||||
|
|
||||||
let row_delta = range_to_move.start.row - insertion_point.row + 1;
|
let row_delta = range_to_move.start.row - insertion_point.row + 1;
|
||||||
|
|
||||||
|
@ -3754,7 +3750,7 @@ impl Editor {
|
||||||
String::new(),
|
String::new(),
|
||||||
));
|
));
|
||||||
let insertion_anchor = buffer.anchor_after(insertion_point);
|
let insertion_anchor = buffer.anchor_after(insertion_point);
|
||||||
edits.push((insertion_anchor.clone()..insertion_anchor, text));
|
edits.push((insertion_anchor..insertion_anchor, text));
|
||||||
|
|
||||||
let row_delta = insertion_point.row - range_to_move.end.row + 1;
|
let row_delta = insertion_point.row - range_to_move.end.row + 1;
|
||||||
|
|
||||||
|
@ -4624,7 +4620,7 @@ impl Editor {
|
||||||
cursor_anchor: position,
|
cursor_anchor: position,
|
||||||
cursor_position: point,
|
cursor_position: point,
|
||||||
scroll_position: self.scroll_position,
|
scroll_position: self.scroll_position,
|
||||||
scroll_top_anchor: self.scroll_top_anchor.clone(),
|
scroll_top_anchor: self.scroll_top_anchor,
|
||||||
scroll_top_row,
|
scroll_top_row,
|
||||||
}),
|
}),
|
||||||
cx,
|
cx,
|
||||||
|
@ -6536,15 +6532,13 @@ impl Editor {
|
||||||
.as_singleton()
|
.as_singleton()
|
||||||
.and_then(|b| b.read(cx).file()),
|
.and_then(|b| b.read(cx).file()),
|
||||||
) {
|
) {
|
||||||
project.read(cx).client().report_event(
|
let extension = Path::new(file.file_name(cx))
|
||||||
name,
|
.extension()
|
||||||
json!({
|
.and_then(|e| e.to_str());
|
||||||
"File Extension": file
|
project
|
||||||
.path()
|
.read(cx)
|
||||||
.extension()
|
.client()
|
||||||
.and_then(|e| e.to_str())
|
.report_event(name, json!({ "File Extension": extension }));
|
||||||
}),
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -542,7 +542,7 @@ fn test_navigation_history(cx: &mut gpui::MutableAppContext) {
|
||||||
// Set scroll position to check later
|
// Set scroll position to check later
|
||||||
editor.set_scroll_position(Vector2F::new(5.5, 5.5), cx);
|
editor.set_scroll_position(Vector2F::new(5.5, 5.5), cx);
|
||||||
let original_scroll_position = editor.scroll_position;
|
let original_scroll_position = editor.scroll_position;
|
||||||
let original_scroll_top_anchor = editor.scroll_top_anchor.clone();
|
let original_scroll_top_anchor = editor.scroll_top_anchor;
|
||||||
|
|
||||||
// Jump to the end of the document and adjust scroll
|
// Jump to the end of the document and adjust scroll
|
||||||
editor.move_to_end(&MoveToEnd, cx);
|
editor.move_to_end(&MoveToEnd, cx);
|
||||||
|
@ -556,12 +556,12 @@ fn test_navigation_history(cx: &mut gpui::MutableAppContext) {
|
||||||
assert_eq!(editor.scroll_top_anchor, original_scroll_top_anchor);
|
assert_eq!(editor.scroll_top_anchor, original_scroll_top_anchor);
|
||||||
|
|
||||||
// Ensure we don't panic when navigation data contains invalid anchors *and* points.
|
// Ensure we don't panic when navigation data contains invalid anchors *and* points.
|
||||||
let mut invalid_anchor = editor.scroll_top_anchor.clone();
|
let mut invalid_anchor = editor.scroll_top_anchor;
|
||||||
invalid_anchor.text_anchor.buffer_id = Some(999);
|
invalid_anchor.text_anchor.buffer_id = Some(999);
|
||||||
let invalid_point = Point::new(9999, 0);
|
let invalid_point = Point::new(9999, 0);
|
||||||
editor.navigate(
|
editor.navigate(
|
||||||
Box::new(NavigationData {
|
Box::new(NavigationData {
|
||||||
cursor_anchor: invalid_anchor.clone(),
|
cursor_anchor: invalid_anchor,
|
||||||
cursor_position: invalid_point,
|
cursor_position: invalid_point,
|
||||||
scroll_top_anchor: invalid_anchor,
|
scroll_top_anchor: invalid_anchor,
|
||||||
scroll_top_row: invalid_point.row,
|
scroll_top_row: invalid_point.row,
|
||||||
|
@ -4146,14 +4146,26 @@ async fn test_completion(cx: &mut gpui::TestAppContext) {
|
||||||
|
|
||||||
handle_resolve_completion_request(
|
handle_resolve_completion_request(
|
||||||
&mut cx,
|
&mut cx,
|
||||||
Some((
|
Some(vec![
|
||||||
indoc! {"
|
(
|
||||||
one.second_completion
|
//This overlaps with the primary completion edit which is
|
||||||
two
|
//misbehavior from the LSP spec, test that we filter it out
|
||||||
threeˇ
|
indoc! {"
|
||||||
"},
|
one.second_ˇcompletion
|
||||||
"\nadditional edit",
|
two
|
||||||
)),
|
threeˇ
|
||||||
|
"},
|
||||||
|
"overlapping aditional edit",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
indoc! {"
|
||||||
|
one.second_completion
|
||||||
|
two
|
||||||
|
threeˇ
|
||||||
|
"},
|
||||||
|
"\nadditional edit",
|
||||||
|
),
|
||||||
|
]),
|
||||||
)
|
)
|
||||||
.await;
|
.await;
|
||||||
apply_additional_edits.await.unwrap();
|
apply_additional_edits.await.unwrap();
|
||||||
|
@ -4303,19 +4315,24 @@ async fn test_completion(cx: &mut gpui::TestAppContext) {
|
||||||
|
|
||||||
async fn handle_resolve_completion_request<'a>(
|
async fn handle_resolve_completion_request<'a>(
|
||||||
cx: &mut EditorLspTestContext<'a>,
|
cx: &mut EditorLspTestContext<'a>,
|
||||||
edit: Option<(&'static str, &'static str)>,
|
edits: Option<Vec<(&'static str, &'static str)>>,
|
||||||
) {
|
) {
|
||||||
let edit = edit.map(|(marked_string, new_text)| {
|
let edits = edits.map(|edits| {
|
||||||
let (_, marked_ranges) = marked_text_ranges(marked_string, false);
|
edits
|
||||||
let replace_range = cx.to_lsp_range(marked_ranges[0].clone());
|
.iter()
|
||||||
vec![lsp::TextEdit::new(replace_range, new_text.to_string())]
|
.map(|(marked_string, new_text)| {
|
||||||
|
let (_, marked_ranges) = marked_text_ranges(marked_string, false);
|
||||||
|
let replace_range = cx.to_lsp_range(marked_ranges[0].clone());
|
||||||
|
lsp::TextEdit::new(replace_range, new_text.to_string())
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>()
|
||||||
});
|
});
|
||||||
|
|
||||||
cx.handle_request::<lsp::request::ResolveCompletionItem, _, _>(move |_, _, _| {
|
cx.handle_request::<lsp::request::ResolveCompletionItem, _, _>(move |_, _, _| {
|
||||||
let edit = edit.clone();
|
let edits = edits.clone();
|
||||||
async move {
|
async move {
|
||||||
Ok(lsp::CompletionItem {
|
Ok(lsp::CompletionItem {
|
||||||
additional_text_edits: edit,
|
additional_text_edits: edits,
|
||||||
..Default::default()
|
..Default::default()
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -4701,9 +4718,7 @@ fn test_refresh_selections(cx: &mut gpui::MutableAppContext) {
|
||||||
|
|
||||||
// Refreshing selections is a no-op when excerpts haven't changed.
|
// Refreshing selections is a no-op when excerpts haven't changed.
|
||||||
editor.update(cx, |editor, cx| {
|
editor.update(cx, |editor, cx| {
|
||||||
editor.change_selections(None, cx, |s| {
|
editor.change_selections(None, cx, |s| s.refresh());
|
||||||
s.refresh();
|
|
||||||
});
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
editor.selections.ranges(cx),
|
editor.selections.ranges(cx),
|
||||||
[
|
[
|
||||||
|
@ -4714,7 +4729,7 @@ fn test_refresh_selections(cx: &mut gpui::MutableAppContext) {
|
||||||
});
|
});
|
||||||
|
|
||||||
multibuffer.update(cx, |multibuffer, cx| {
|
multibuffer.update(cx, |multibuffer, cx| {
|
||||||
multibuffer.remove_excerpts([&excerpt1_id.unwrap()], cx);
|
multibuffer.remove_excerpts([excerpt1_id.unwrap()], cx);
|
||||||
});
|
});
|
||||||
editor.update(cx, |editor, cx| {
|
editor.update(cx, |editor, cx| {
|
||||||
// Removing an excerpt causes the first selection to become degenerate.
|
// Removing an excerpt causes the first selection to become degenerate.
|
||||||
|
@ -4728,9 +4743,7 @@ fn test_refresh_selections(cx: &mut gpui::MutableAppContext) {
|
||||||
|
|
||||||
// Refreshing selections will relocate the first selection to the original buffer
|
// Refreshing selections will relocate the first selection to the original buffer
|
||||||
// location.
|
// location.
|
||||||
editor.change_selections(None, cx, |s| {
|
editor.change_selections(None, cx, |s| s.refresh());
|
||||||
s.refresh();
|
|
||||||
});
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
editor.selections.ranges(cx),
|
editor.selections.ranges(cx),
|
||||||
[
|
[
|
||||||
|
@ -4784,7 +4797,7 @@ fn test_refresh_selections_while_selecting_with_mouse(cx: &mut gpui::MutableAppC
|
||||||
});
|
});
|
||||||
|
|
||||||
multibuffer.update(cx, |multibuffer, cx| {
|
multibuffer.update(cx, |multibuffer, cx| {
|
||||||
multibuffer.remove_excerpts([&excerpt1_id.unwrap()], cx);
|
multibuffer.remove_excerpts([excerpt1_id.unwrap()], cx);
|
||||||
});
|
});
|
||||||
editor.update(cx, |editor, cx| {
|
editor.update(cx, |editor, cx| {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
@ -4793,9 +4806,7 @@ fn test_refresh_selections_while_selecting_with_mouse(cx: &mut gpui::MutableAppC
|
||||||
);
|
);
|
||||||
|
|
||||||
// Ensure we don't panic when selections are refreshed and that the pending selection is finalized.
|
// Ensure we don't panic when selections are refreshed and that the pending selection is finalized.
|
||||||
editor.change_selections(None, cx, |s| {
|
editor.change_selections(None, cx, |s| s.refresh());
|
||||||
s.refresh();
|
|
||||||
});
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
editor.selections.ranges(cx),
|
editor.selections.ranges(cx),
|
||||||
[Point::new(0, 3)..Point::new(0, 3)]
|
[Point::new(0, 3)..Point::new(0, 3)]
|
||||||
|
|
|
@ -192,8 +192,14 @@ impl EditorElement {
|
||||||
.on_scroll({
|
.on_scroll({
|
||||||
let position_map = position_map.clone();
|
let position_map = position_map.clone();
|
||||||
move |e, cx| {
|
move |e, cx| {
|
||||||
if !Self::scroll(e.position, e.delta, e.precise, &position_map, bounds, cx)
|
if !Self::scroll(
|
||||||
{
|
e.position,
|
||||||
|
*e.delta.raw(),
|
||||||
|
e.delta.precise(),
|
||||||
|
&position_map,
|
||||||
|
bounds,
|
||||||
|
cx,
|
||||||
|
) {
|
||||||
cx.propagate_event()
|
cx.propagate_event()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1328,12 +1334,13 @@ impl EditorElement {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
TransformBlock::ExcerptHeader {
|
TransformBlock::ExcerptHeader {
|
||||||
key,
|
id,
|
||||||
buffer,
|
buffer,
|
||||||
range,
|
range,
|
||||||
starts_new_buffer,
|
starts_new_buffer,
|
||||||
..
|
..
|
||||||
} => {
|
} => {
|
||||||
|
let id = *id;
|
||||||
let jump_icon = project::File::from_dyn(buffer.file()).map(|file| {
|
let jump_icon = project::File::from_dyn(buffer.file()).map(|file| {
|
||||||
let jump_position = range
|
let jump_position = range
|
||||||
.primary
|
.primary
|
||||||
|
@ -1350,7 +1357,7 @@ impl EditorElement {
|
||||||
|
|
||||||
enum JumpIcon {}
|
enum JumpIcon {}
|
||||||
cx.render(&editor, |_, cx| {
|
cx.render(&editor, |_, cx| {
|
||||||
MouseEventHandler::<JumpIcon>::new(*key, cx, |state, _| {
|
MouseEventHandler::<JumpIcon>::new(id.into(), cx, |state, _| {
|
||||||
let style = style.jump_icon.style_for(state, false);
|
let style = style.jump_icon.style_for(state, false);
|
||||||
Svg::new("icons/arrow_up_right_8.svg")
|
Svg::new("icons/arrow_up_right_8.svg")
|
||||||
.with_color(style.color)
|
.with_color(style.color)
|
||||||
|
@ -1369,7 +1376,7 @@ impl EditorElement {
|
||||||
cx.dispatch_action(jump_action.clone())
|
cx.dispatch_action(jump_action.clone())
|
||||||
})
|
})
|
||||||
.with_tooltip::<JumpIcon, _>(
|
.with_tooltip::<JumpIcon, _>(
|
||||||
*key,
|
id.into(),
|
||||||
"Jump to Buffer".to_string(),
|
"Jump to Buffer".to_string(),
|
||||||
Some(Box::new(crate::OpenExcerpts)),
|
Some(Box::new(crate::OpenExcerpts)),
|
||||||
tooltip_style.clone(),
|
tooltip_style.clone(),
|
||||||
|
@ -1600,16 +1607,13 @@ impl Element for EditorElement {
|
||||||
|
|
||||||
highlighted_rows = view.highlighted_rows();
|
highlighted_rows = view.highlighted_rows();
|
||||||
let theme = cx.global::<Settings>().theme.as_ref();
|
let theme = cx.global::<Settings>().theme.as_ref();
|
||||||
highlighted_ranges = view.background_highlights_in_range(
|
highlighted_ranges =
|
||||||
start_anchor.clone()..end_anchor.clone(),
|
view.background_highlights_in_range(start_anchor..end_anchor, &display_map, theme);
|
||||||
&display_map,
|
|
||||||
theme,
|
|
||||||
);
|
|
||||||
|
|
||||||
let mut remote_selections = HashMap::default();
|
let mut remote_selections = HashMap::default();
|
||||||
for (replica_id, line_mode, cursor_shape, selection) in display_map
|
for (replica_id, line_mode, cursor_shape, selection) in display_map
|
||||||
.buffer_snapshot
|
.buffer_snapshot
|
||||||
.remote_selections_in_range(&(start_anchor.clone()..end_anchor.clone()))
|
.remote_selections_in_range(&(start_anchor..end_anchor))
|
||||||
{
|
{
|
||||||
// The local selections match the leader's selections.
|
// The local selections match the leader's selections.
|
||||||
if Some(replica_id) == view.leader_replica_id {
|
if Some(replica_id) == view.leader_replica_id {
|
||||||
|
|
|
@ -221,7 +221,7 @@ fn show_hover(
|
||||||
|
|
||||||
start..end
|
start..end
|
||||||
} else {
|
} else {
|
||||||
anchor.clone()..anchor.clone()
|
anchor..anchor
|
||||||
};
|
};
|
||||||
|
|
||||||
Some(InfoPopover {
|
Some(InfoPopover {
|
||||||
|
|
|
@ -819,11 +819,20 @@ impl StatusItemView for CursorPosition {
|
||||||
|
|
||||||
fn path_for_buffer<'a>(
|
fn path_for_buffer<'a>(
|
||||||
buffer: &ModelHandle<MultiBuffer>,
|
buffer: &ModelHandle<MultiBuffer>,
|
||||||
mut height: usize,
|
height: usize,
|
||||||
include_filename: bool,
|
include_filename: bool,
|
||||||
cx: &'a AppContext,
|
cx: &'a AppContext,
|
||||||
) -> Option<Cow<'a, Path>> {
|
) -> Option<Cow<'a, Path>> {
|
||||||
let file = buffer.read(cx).as_singleton()?.read(cx).file()?;
|
let file = buffer.read(cx).as_singleton()?.read(cx).file()?;
|
||||||
|
path_for_file(file, height, include_filename, cx)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn path_for_file<'a>(
|
||||||
|
file: &'a dyn language::File,
|
||||||
|
mut height: usize,
|
||||||
|
include_filename: bool,
|
||||||
|
cx: &'a AppContext,
|
||||||
|
) -> Option<Cow<'a, Path>> {
|
||||||
// Ensure we always render at least the filename.
|
// Ensure we always render at least the filename.
|
||||||
height += 1;
|
height += 1;
|
||||||
|
|
||||||
|
@ -845,13 +854,82 @@ fn path_for_buffer<'a>(
|
||||||
if include_filename {
|
if include_filename {
|
||||||
Some(full_path.into())
|
Some(full_path.into())
|
||||||
} else {
|
} else {
|
||||||
Some(full_path.parent().unwrap().to_path_buf().into())
|
Some(full_path.parent()?.to_path_buf().into())
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
let mut path = file.path().strip_prefix(prefix).unwrap();
|
let mut path = file.path().strip_prefix(prefix).ok()?;
|
||||||
if !include_filename {
|
if !include_filename {
|
||||||
path = path.parent().unwrap();
|
path = path.parent()?;
|
||||||
}
|
}
|
||||||
Some(path.into())
|
Some(path.into())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use gpui::MutableAppContext;
|
||||||
|
use std::{
|
||||||
|
path::{Path, PathBuf},
|
||||||
|
sync::Arc,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[gpui::test]
|
||||||
|
fn test_path_for_file(cx: &mut MutableAppContext) {
|
||||||
|
let file = TestFile {
|
||||||
|
path: Path::new("").into(),
|
||||||
|
full_path: PathBuf::from(""),
|
||||||
|
};
|
||||||
|
assert_eq!(path_for_file(&file, 0, false, cx), None);
|
||||||
|
}
|
||||||
|
|
||||||
|
struct TestFile {
|
||||||
|
path: Arc<Path>,
|
||||||
|
full_path: PathBuf,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl language::File for TestFile {
|
||||||
|
fn path(&self) -> &Arc<Path> {
|
||||||
|
&self.path
|
||||||
|
}
|
||||||
|
|
||||||
|
fn full_path(&self, _: &gpui::AppContext) -> PathBuf {
|
||||||
|
self.full_path.clone()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn as_local(&self) -> Option<&dyn language::LocalFile> {
|
||||||
|
todo!()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn mtime(&self) -> std::time::SystemTime {
|
||||||
|
todo!()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn file_name<'a>(&'a self, _: &'a gpui::AppContext) -> &'a std::ffi::OsStr {
|
||||||
|
todo!()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn is_deleted(&self) -> bool {
|
||||||
|
todo!()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn save(
|
||||||
|
&self,
|
||||||
|
_: u64,
|
||||||
|
_: language::Rope,
|
||||||
|
_: clock::Global,
|
||||||
|
_: project::LineEnding,
|
||||||
|
_: &mut MutableAppContext,
|
||||||
|
) -> gpui::Task<anyhow::Result<(clock::Global, String, std::time::SystemTime)>> {
|
||||||
|
todo!()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn as_any(&self) -> &dyn std::any::Any {
|
||||||
|
todo!()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn to_proto(&self) -> rpc::proto::File {
|
||||||
|
todo!()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -11,7 +11,7 @@ use language::{
|
||||||
char_kind, AutoindentMode, Buffer, BufferChunks, BufferSnapshot, CharKind, Chunk, CursorShape,
|
char_kind, AutoindentMode, Buffer, BufferChunks, BufferSnapshot, CharKind, Chunk, CursorShape,
|
||||||
DiagnosticEntry, Event, File, IndentSize, Language, OffsetRangeExt, OffsetUtf16, Outline,
|
DiagnosticEntry, Event, File, IndentSize, Language, OffsetRangeExt, OffsetUtf16, Outline,
|
||||||
OutlineItem, Point, PointUtf16, Selection, TextDimension, ToOffset as _, ToOffsetUtf16 as _,
|
OutlineItem, Point, PointUtf16, Selection, TextDimension, ToOffset as _, ToOffsetUtf16 as _,
|
||||||
ToPoint as _, ToPointUtf16 as _, TransactionId,
|
ToPoint as _, ToPointUtf16 as _, TransactionId, Unclipped,
|
||||||
};
|
};
|
||||||
use smallvec::SmallVec;
|
use smallvec::SmallVec;
|
||||||
use std::{
|
use std::{
|
||||||
|
@ -36,13 +36,13 @@ use util::post_inc;
|
||||||
|
|
||||||
const NEWLINES: &[u8] = &[b'\n'; u8::MAX as usize];
|
const NEWLINES: &[u8] = &[b'\n'; u8::MAX as usize];
|
||||||
|
|
||||||
pub type ExcerptId = Locator;
|
#[derive(Debug, Default, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
|
||||||
|
pub struct ExcerptId(usize);
|
||||||
|
|
||||||
pub struct MultiBuffer {
|
pub struct MultiBuffer {
|
||||||
snapshot: RefCell<MultiBufferSnapshot>,
|
snapshot: RefCell<MultiBufferSnapshot>,
|
||||||
buffers: RefCell<HashMap<usize, BufferState>>,
|
buffers: RefCell<HashMap<usize, BufferState>>,
|
||||||
used_excerpt_ids: SumTree<ExcerptId>,
|
next_excerpt_id: usize,
|
||||||
next_excerpt_key: usize,
|
|
||||||
subscriptions: Topic,
|
subscriptions: Topic,
|
||||||
singleton: bool,
|
singleton: bool,
|
||||||
replica_id: ReplicaId,
|
replica_id: ReplicaId,
|
||||||
|
@ -92,7 +92,7 @@ struct BufferState {
|
||||||
last_diagnostics_update_count: usize,
|
last_diagnostics_update_count: usize,
|
||||||
last_file_update_count: usize,
|
last_file_update_count: usize,
|
||||||
last_git_diff_update_count: usize,
|
last_git_diff_update_count: usize,
|
||||||
excerpts: Vec<ExcerptId>,
|
excerpts: Vec<Locator>,
|
||||||
_subscriptions: [gpui::Subscription; 2],
|
_subscriptions: [gpui::Subscription; 2],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -100,6 +100,7 @@ struct BufferState {
|
||||||
pub struct MultiBufferSnapshot {
|
pub struct MultiBufferSnapshot {
|
||||||
singleton: bool,
|
singleton: bool,
|
||||||
excerpts: SumTree<Excerpt>,
|
excerpts: SumTree<Excerpt>,
|
||||||
|
excerpt_ids: SumTree<ExcerptIdMapping>,
|
||||||
parse_count: usize,
|
parse_count: usize,
|
||||||
diagnostics_update_count: usize,
|
diagnostics_update_count: usize,
|
||||||
trailing_excerpt_update_count: usize,
|
trailing_excerpt_update_count: usize,
|
||||||
|
@ -111,7 +112,6 @@ pub struct MultiBufferSnapshot {
|
||||||
|
|
||||||
pub struct ExcerptBoundary {
|
pub struct ExcerptBoundary {
|
||||||
pub id: ExcerptId,
|
pub id: ExcerptId,
|
||||||
pub key: usize,
|
|
||||||
pub row: u32,
|
pub row: u32,
|
||||||
pub buffer: BufferSnapshot,
|
pub buffer: BufferSnapshot,
|
||||||
pub range: ExcerptRange<text::Anchor>,
|
pub range: ExcerptRange<text::Anchor>,
|
||||||
|
@ -121,7 +121,7 @@ pub struct ExcerptBoundary {
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
struct Excerpt {
|
struct Excerpt {
|
||||||
id: ExcerptId,
|
id: ExcerptId,
|
||||||
key: usize,
|
locator: Locator,
|
||||||
buffer_id: usize,
|
buffer_id: usize,
|
||||||
buffer: BufferSnapshot,
|
buffer: BufferSnapshot,
|
||||||
range: ExcerptRange<text::Anchor>,
|
range: ExcerptRange<text::Anchor>,
|
||||||
|
@ -130,6 +130,12 @@ struct Excerpt {
|
||||||
has_trailing_newline: bool,
|
has_trailing_newline: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
struct ExcerptIdMapping {
|
||||||
|
id: ExcerptId,
|
||||||
|
locator: Locator,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||||
pub struct ExcerptRange<T> {
|
pub struct ExcerptRange<T> {
|
||||||
pub context: Range<T>,
|
pub context: Range<T>,
|
||||||
|
@ -139,6 +145,7 @@ pub struct ExcerptRange<T> {
|
||||||
#[derive(Clone, Debug, Default)]
|
#[derive(Clone, Debug, Default)]
|
||||||
struct ExcerptSummary {
|
struct ExcerptSummary {
|
||||||
excerpt_id: ExcerptId,
|
excerpt_id: ExcerptId,
|
||||||
|
excerpt_locator: Locator,
|
||||||
max_buffer_row: u32,
|
max_buffer_row: u32,
|
||||||
text: TextSummary,
|
text: TextSummary,
|
||||||
}
|
}
|
||||||
|
@ -178,8 +185,7 @@ impl MultiBuffer {
|
||||||
Self {
|
Self {
|
||||||
snapshot: Default::default(),
|
snapshot: Default::default(),
|
||||||
buffers: Default::default(),
|
buffers: Default::default(),
|
||||||
used_excerpt_ids: Default::default(),
|
next_excerpt_id: 1,
|
||||||
next_excerpt_key: Default::default(),
|
|
||||||
subscriptions: Default::default(),
|
subscriptions: Default::default(),
|
||||||
singleton: false,
|
singleton: false,
|
||||||
replica_id,
|
replica_id,
|
||||||
|
@ -218,8 +224,7 @@ impl MultiBuffer {
|
||||||
Self {
|
Self {
|
||||||
snapshot: RefCell::new(self.snapshot.borrow().clone()),
|
snapshot: RefCell::new(self.snapshot.borrow().clone()),
|
||||||
buffers: RefCell::new(buffers),
|
buffers: RefCell::new(buffers),
|
||||||
used_excerpt_ids: self.used_excerpt_ids.clone(),
|
next_excerpt_id: 1,
|
||||||
next_excerpt_key: self.next_excerpt_key,
|
|
||||||
subscriptions: Default::default(),
|
subscriptions: Default::default(),
|
||||||
singleton: self.singleton,
|
singleton: self.singleton,
|
||||||
replica_id: self.replica_id,
|
replica_id: self.replica_id,
|
||||||
|
@ -610,11 +615,14 @@ impl MultiBuffer {
|
||||||
let mut selections_by_buffer: HashMap<usize, Vec<Selection<text::Anchor>>> =
|
let mut selections_by_buffer: HashMap<usize, Vec<Selection<text::Anchor>>> =
|
||||||
Default::default();
|
Default::default();
|
||||||
let snapshot = self.read(cx);
|
let snapshot = self.read(cx);
|
||||||
let mut cursor = snapshot.excerpts.cursor::<Option<&ExcerptId>>();
|
let mut cursor = snapshot.excerpts.cursor::<Option<&Locator>>();
|
||||||
for selection in selections {
|
for selection in selections {
|
||||||
cursor.seek(&Some(&selection.start.excerpt_id), Bias::Left, &());
|
let start_locator = snapshot.excerpt_locator_for_id(selection.start.excerpt_id);
|
||||||
|
let end_locator = snapshot.excerpt_locator_for_id(selection.end.excerpt_id);
|
||||||
|
|
||||||
|
cursor.seek(&Some(start_locator), Bias::Left, &());
|
||||||
while let Some(excerpt) = cursor.item() {
|
while let Some(excerpt) = cursor.item() {
|
||||||
if excerpt.id > selection.end.excerpt_id {
|
if excerpt.locator > *end_locator {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -745,7 +753,7 @@ impl MultiBuffer {
|
||||||
where
|
where
|
||||||
O: text::ToOffset,
|
O: text::ToOffset,
|
||||||
{
|
{
|
||||||
self.insert_excerpts_after(&ExcerptId::max(), buffer, ranges, cx)
|
self.insert_excerpts_after(ExcerptId::max(), buffer, ranges, cx)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn push_excerpts_with_context_lines<O>(
|
pub fn push_excerpts_with_context_lines<O>(
|
||||||
|
@ -818,7 +826,7 @@ impl MultiBuffer {
|
||||||
|
|
||||||
pub fn insert_excerpts_after<O>(
|
pub fn insert_excerpts_after<O>(
|
||||||
&mut self,
|
&mut self,
|
||||||
prev_excerpt_id: &ExcerptId,
|
prev_excerpt_id: ExcerptId,
|
||||||
buffer: ModelHandle<Buffer>,
|
buffer: ModelHandle<Buffer>,
|
||||||
ranges: impl IntoIterator<Item = ExcerptRange<O>>,
|
ranges: impl IntoIterator<Item = ExcerptRange<O>>,
|
||||||
cx: &mut ModelContext<Self>,
|
cx: &mut ModelContext<Self>,
|
||||||
|
@ -854,8 +862,12 @@ impl MultiBuffer {
|
||||||
});
|
});
|
||||||
|
|
||||||
let mut snapshot = self.snapshot.borrow_mut();
|
let mut snapshot = self.snapshot.borrow_mut();
|
||||||
let mut cursor = snapshot.excerpts.cursor::<Option<&ExcerptId>>();
|
|
||||||
let mut new_excerpts = cursor.slice(&Some(prev_excerpt_id), Bias::Right, &());
|
let mut prev_locator = snapshot.excerpt_locator_for_id(prev_excerpt_id).clone();
|
||||||
|
let mut new_excerpt_ids = mem::take(&mut snapshot.excerpt_ids);
|
||||||
|
let mut cursor = snapshot.excerpts.cursor::<Option<&Locator>>();
|
||||||
|
let mut new_excerpts = cursor.slice(&prev_locator, Bias::Right, &());
|
||||||
|
prev_locator = cursor.start().unwrap_or(Locator::min_ref()).clone();
|
||||||
|
|
||||||
let edit_start = new_excerpts.summary().text.len;
|
let edit_start = new_excerpts.summary().text.len;
|
||||||
new_excerpts.update_last(
|
new_excerpts.update_last(
|
||||||
|
@ -865,25 +877,17 @@ impl MultiBuffer {
|
||||||
&(),
|
&(),
|
||||||
);
|
);
|
||||||
|
|
||||||
let mut used_cursor = self.used_excerpt_ids.cursor::<Locator>();
|
let next_locator = if let Some(excerpt) = cursor.item() {
|
||||||
used_cursor.seek(prev_excerpt_id, Bias::Right, &());
|
excerpt.locator.clone()
|
||||||
let mut prev_id = if let Some(excerpt_id) = used_cursor.prev_item() {
|
|
||||||
excerpt_id.clone()
|
|
||||||
} else {
|
} else {
|
||||||
ExcerptId::min()
|
Locator::max()
|
||||||
};
|
};
|
||||||
let next_id = if let Some(excerpt_id) = used_cursor.item() {
|
|
||||||
excerpt_id.clone()
|
|
||||||
} else {
|
|
||||||
ExcerptId::max()
|
|
||||||
};
|
|
||||||
drop(used_cursor);
|
|
||||||
|
|
||||||
let mut ids = Vec::new();
|
let mut ids = Vec::new();
|
||||||
while let Some(range) = ranges.next() {
|
while let Some(range) = ranges.next() {
|
||||||
let id = ExcerptId::between(&prev_id, &next_id);
|
let locator = Locator::between(&prev_locator, &next_locator);
|
||||||
if let Err(ix) = buffer_state.excerpts.binary_search(&id) {
|
if let Err(ix) = buffer_state.excerpts.binary_search(&locator) {
|
||||||
buffer_state.excerpts.insert(ix, id.clone());
|
buffer_state.excerpts.insert(ix, locator.clone());
|
||||||
}
|
}
|
||||||
let range = ExcerptRange {
|
let range = ExcerptRange {
|
||||||
context: buffer_snapshot.anchor_before(&range.context.start)
|
context: buffer_snapshot.anchor_before(&range.context.start)
|
||||||
|
@ -893,22 +897,20 @@ impl MultiBuffer {
|
||||||
..buffer_snapshot.anchor_after(&primary.end)
|
..buffer_snapshot.anchor_after(&primary.end)
|
||||||
}),
|
}),
|
||||||
};
|
};
|
||||||
|
let id = ExcerptId(post_inc(&mut self.next_excerpt_id));
|
||||||
let excerpt = Excerpt::new(
|
let excerpt = Excerpt::new(
|
||||||
id.clone(),
|
id,
|
||||||
post_inc(&mut self.next_excerpt_key),
|
locator.clone(),
|
||||||
buffer_id,
|
buffer_id,
|
||||||
buffer_snapshot.clone(),
|
buffer_snapshot.clone(),
|
||||||
range,
|
range,
|
||||||
ranges.peek().is_some() || cursor.item().is_some(),
|
ranges.peek().is_some() || cursor.item().is_some(),
|
||||||
);
|
);
|
||||||
new_excerpts.push(excerpt, &());
|
new_excerpts.push(excerpt, &());
|
||||||
prev_id = id.clone();
|
prev_locator = locator.clone();
|
||||||
|
new_excerpt_ids.push(ExcerptIdMapping { id, locator }, &());
|
||||||
ids.push(id);
|
ids.push(id);
|
||||||
}
|
}
|
||||||
self.used_excerpt_ids.edit(
|
|
||||||
ids.iter().cloned().map(sum_tree::Edit::Insert).collect(),
|
|
||||||
&(),
|
|
||||||
);
|
|
||||||
|
|
||||||
let edit_end = new_excerpts.summary().text.len;
|
let edit_end = new_excerpts.summary().text.len;
|
||||||
|
|
||||||
|
@ -917,6 +919,7 @@ impl MultiBuffer {
|
||||||
new_excerpts.push_tree(suffix, &());
|
new_excerpts.push_tree(suffix, &());
|
||||||
drop(cursor);
|
drop(cursor);
|
||||||
snapshot.excerpts = new_excerpts;
|
snapshot.excerpts = new_excerpts;
|
||||||
|
snapshot.excerpt_ids = new_excerpt_ids;
|
||||||
if changed_trailing_excerpt {
|
if changed_trailing_excerpt {
|
||||||
snapshot.trailing_excerpt_update_count += 1;
|
snapshot.trailing_excerpt_update_count += 1;
|
||||||
}
|
}
|
||||||
|
@ -956,16 +959,16 @@ impl MultiBuffer {
|
||||||
let mut excerpts = Vec::new();
|
let mut excerpts = Vec::new();
|
||||||
let snapshot = self.read(cx);
|
let snapshot = self.read(cx);
|
||||||
let buffers = self.buffers.borrow();
|
let buffers = self.buffers.borrow();
|
||||||
let mut cursor = snapshot.excerpts.cursor::<Option<&ExcerptId>>();
|
let mut cursor = snapshot.excerpts.cursor::<Option<&Locator>>();
|
||||||
for excerpt_id in buffers
|
for locator in buffers
|
||||||
.get(&buffer.id())
|
.get(&buffer.id())
|
||||||
.map(|state| &state.excerpts)
|
.map(|state| &state.excerpts)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.flatten()
|
.flatten()
|
||||||
{
|
{
|
||||||
cursor.seek_forward(&Some(excerpt_id), Bias::Left, &());
|
cursor.seek_forward(&Some(locator), Bias::Left, &());
|
||||||
if let Some(excerpt) = cursor.item() {
|
if let Some(excerpt) = cursor.item() {
|
||||||
if excerpt.id == *excerpt_id {
|
if excerpt.locator == *locator {
|
||||||
excerpts.push((excerpt.id.clone(), excerpt.range.clone()));
|
excerpts.push((excerpt.id.clone(), excerpt.range.clone()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -975,10 +978,11 @@ impl MultiBuffer {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn excerpt_ids(&self) -> Vec<ExcerptId> {
|
pub fn excerpt_ids(&self) -> Vec<ExcerptId> {
|
||||||
self.buffers
|
self.snapshot
|
||||||
.borrow()
|
.borrow()
|
||||||
.values()
|
.excerpts
|
||||||
.flat_map(|state| state.excerpts.iter().cloned())
|
.iter()
|
||||||
|
.map(|entry| entry.id)
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1061,32 +1065,34 @@ impl MultiBuffer {
|
||||||
result
|
result
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn remove_excerpts<'a>(
|
pub fn remove_excerpts(
|
||||||
&mut self,
|
&mut self,
|
||||||
excerpt_ids: impl IntoIterator<Item = &'a ExcerptId>,
|
excerpt_ids: impl IntoIterator<Item = ExcerptId>,
|
||||||
cx: &mut ModelContext<Self>,
|
cx: &mut ModelContext<Self>,
|
||||||
) {
|
) {
|
||||||
self.sync(cx);
|
self.sync(cx);
|
||||||
let mut buffers = self.buffers.borrow_mut();
|
let mut buffers = self.buffers.borrow_mut();
|
||||||
let mut snapshot = self.snapshot.borrow_mut();
|
let mut snapshot = self.snapshot.borrow_mut();
|
||||||
let mut new_excerpts = SumTree::new();
|
let mut new_excerpts = SumTree::new();
|
||||||
let mut cursor = snapshot.excerpts.cursor::<(Option<&ExcerptId>, usize)>();
|
let mut cursor = snapshot.excerpts.cursor::<(Option<&Locator>, usize)>();
|
||||||
let mut edits = Vec::new();
|
let mut edits = Vec::new();
|
||||||
let mut excerpt_ids = excerpt_ids.into_iter().peekable();
|
let mut excerpt_ids = excerpt_ids.into_iter().peekable();
|
||||||
|
|
||||||
while let Some(mut excerpt_id) = excerpt_ids.next() {
|
while let Some(excerpt_id) = excerpt_ids.next() {
|
||||||
// Seek to the next excerpt to remove, preserving any preceding excerpts.
|
// Seek to the next excerpt to remove, preserving any preceding excerpts.
|
||||||
new_excerpts.push_tree(cursor.slice(&Some(excerpt_id), Bias::Left, &()), &());
|
let locator = snapshot.excerpt_locator_for_id(excerpt_id);
|
||||||
|
new_excerpts.push_tree(cursor.slice(&Some(locator), Bias::Left, &()), &());
|
||||||
|
|
||||||
if let Some(mut excerpt) = cursor.item() {
|
if let Some(mut excerpt) = cursor.item() {
|
||||||
if excerpt.id != *excerpt_id {
|
if excerpt.id != excerpt_id {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
let mut old_start = cursor.start().1;
|
let mut old_start = cursor.start().1;
|
||||||
|
|
||||||
// Skip over the removed excerpt.
|
// Skip over the removed excerpt.
|
||||||
loop {
|
'remove_excerpts: loop {
|
||||||
if let Some(buffer_state) = buffers.get_mut(&excerpt.buffer_id) {
|
if let Some(buffer_state) = buffers.get_mut(&excerpt.buffer_id) {
|
||||||
buffer_state.excerpts.retain(|id| id != excerpt_id);
|
buffer_state.excerpts.retain(|l| l != &excerpt.locator);
|
||||||
if buffer_state.excerpts.is_empty() {
|
if buffer_state.excerpts.is_empty() {
|
||||||
buffers.remove(&excerpt.buffer_id);
|
buffers.remove(&excerpt.buffer_id);
|
||||||
}
|
}
|
||||||
|
@ -1094,14 +1100,16 @@ impl MultiBuffer {
|
||||||
cursor.next(&());
|
cursor.next(&());
|
||||||
|
|
||||||
// Skip over any subsequent excerpts that are also removed.
|
// Skip over any subsequent excerpts that are also removed.
|
||||||
if let Some(&next_excerpt_id) = excerpt_ids.peek() {
|
while let Some(&next_excerpt_id) = excerpt_ids.peek() {
|
||||||
|
let next_locator = snapshot.excerpt_locator_for_id(next_excerpt_id);
|
||||||
if let Some(next_excerpt) = cursor.item() {
|
if let Some(next_excerpt) = cursor.item() {
|
||||||
if next_excerpt.id == *next_excerpt_id {
|
if next_excerpt.locator == *next_locator {
|
||||||
|
excerpt_ids.next();
|
||||||
excerpt = next_excerpt;
|
excerpt = next_excerpt;
|
||||||
excerpt_id = excerpt_ids.next().unwrap();
|
continue 'remove_excerpts;
|
||||||
continue;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
break;
|
break;
|
||||||
|
@ -1128,6 +1136,7 @@ impl MultiBuffer {
|
||||||
new_excerpts.push_tree(suffix, &());
|
new_excerpts.push_tree(suffix, &());
|
||||||
drop(cursor);
|
drop(cursor);
|
||||||
snapshot.excerpts = new_excerpts;
|
snapshot.excerpts = new_excerpts;
|
||||||
|
|
||||||
if changed_trailing_excerpt {
|
if changed_trailing_excerpt {
|
||||||
snapshot.trailing_excerpt_update_count += 1;
|
snapshot.trailing_excerpt_update_count += 1;
|
||||||
}
|
}
|
||||||
|
@ -1307,7 +1316,7 @@ impl MultiBuffer {
|
||||||
buffer_state
|
buffer_state
|
||||||
.excerpts
|
.excerpts
|
||||||
.iter()
|
.iter()
|
||||||
.map(|excerpt_id| (excerpt_id, buffer_state.buffer.clone(), buffer_edited)),
|
.map(|locator| (locator, buffer_state.buffer.clone(), buffer_edited)),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1333,14 +1342,14 @@ impl MultiBuffer {
|
||||||
snapshot.is_dirty = is_dirty;
|
snapshot.is_dirty = is_dirty;
|
||||||
snapshot.has_conflict = has_conflict;
|
snapshot.has_conflict = has_conflict;
|
||||||
|
|
||||||
excerpts_to_edit.sort_unstable_by_key(|(excerpt_id, _, _)| *excerpt_id);
|
excerpts_to_edit.sort_unstable_by_key(|(locator, _, _)| *locator);
|
||||||
|
|
||||||
let mut edits = Vec::new();
|
let mut edits = Vec::new();
|
||||||
let mut new_excerpts = SumTree::new();
|
let mut new_excerpts = SumTree::new();
|
||||||
let mut cursor = snapshot.excerpts.cursor::<(Option<&ExcerptId>, usize)>();
|
let mut cursor = snapshot.excerpts.cursor::<(Option<&Locator>, usize)>();
|
||||||
|
|
||||||
for (id, buffer, buffer_edited) in excerpts_to_edit {
|
for (locator, buffer, buffer_edited) in excerpts_to_edit {
|
||||||
new_excerpts.push_tree(cursor.slice(&Some(id), Bias::Left, &()), &());
|
new_excerpts.push_tree(cursor.slice(&Some(locator), Bias::Left, &()), &());
|
||||||
let old_excerpt = cursor.item().unwrap();
|
let old_excerpt = cursor.item().unwrap();
|
||||||
let buffer_id = buffer.id();
|
let buffer_id = buffer.id();
|
||||||
let buffer = buffer.read(cx);
|
let buffer = buffer.read(cx);
|
||||||
|
@ -1365,8 +1374,8 @@ impl MultiBuffer {
|
||||||
);
|
);
|
||||||
|
|
||||||
new_excerpt = Excerpt::new(
|
new_excerpt = Excerpt::new(
|
||||||
id.clone(),
|
old_excerpt.id,
|
||||||
old_excerpt.key,
|
locator.clone(),
|
||||||
buffer_id,
|
buffer_id,
|
||||||
buffer.snapshot(),
|
buffer.snapshot(),
|
||||||
old_excerpt.range.clone(),
|
old_excerpt.range.clone(),
|
||||||
|
@ -1467,13 +1476,7 @@ impl MultiBuffer {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
let excerpt_ids = self
|
let excerpt_ids = self.excerpt_ids();
|
||||||
.buffers
|
|
||||||
.borrow()
|
|
||||||
.values()
|
|
||||||
.flat_map(|b| &b.excerpts)
|
|
||||||
.cloned()
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
if excerpt_ids.is_empty() || (rng.gen() && excerpt_ids.len() < max_excerpts) {
|
if excerpt_ids.is_empty() || (rng.gen() && excerpt_ids.len() < max_excerpts) {
|
||||||
let buffer_handle = if rng.gen() || self.buffers.borrow().is_empty() {
|
let buffer_handle = if rng.gen() || self.buffers.borrow().is_empty() {
|
||||||
let text = RandomCharIter::new(&mut *rng).take(10).collect::<String>();
|
let text = RandomCharIter::new(&mut *rng).take(10).collect::<String>();
|
||||||
|
@ -1511,24 +1514,26 @@ impl MultiBuffer {
|
||||||
log::info!(
|
log::info!(
|
||||||
"Inserting excerpts from buffer {} and ranges {:?}: {:?}",
|
"Inserting excerpts from buffer {} and ranges {:?}: {:?}",
|
||||||
buffer_handle.id(),
|
buffer_handle.id(),
|
||||||
ranges,
|
ranges.iter().map(|r| &r.context).collect::<Vec<_>>(),
|
||||||
ranges
|
ranges
|
||||||
.iter()
|
.iter()
|
||||||
.map(|range| &buffer_text[range.context.clone()])
|
.map(|r| &buffer_text[r.context.clone()])
|
||||||
.collect::<Vec<_>>()
|
.collect::<Vec<_>>()
|
||||||
);
|
);
|
||||||
|
|
||||||
let excerpt_id = self.push_excerpts(buffer_handle.clone(), ranges, cx);
|
let excerpt_id = self.push_excerpts(buffer_handle.clone(), ranges, cx);
|
||||||
log::info!("Inserted with id: {:?}", excerpt_id);
|
log::info!("Inserted with ids: {:?}", excerpt_id);
|
||||||
} else {
|
} else {
|
||||||
let remove_count = rng.gen_range(1..=excerpt_ids.len());
|
let remove_count = rng.gen_range(1..=excerpt_ids.len());
|
||||||
let mut excerpts_to_remove = excerpt_ids
|
let mut excerpts_to_remove = excerpt_ids
|
||||||
.choose_multiple(rng, remove_count)
|
.choose_multiple(rng, remove_count)
|
||||||
.cloned()
|
.cloned()
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
excerpts_to_remove.sort();
|
let snapshot = self.snapshot.borrow();
|
||||||
|
excerpts_to_remove.sort_unstable_by(|a, b| a.cmp(b, &*snapshot));
|
||||||
|
drop(snapshot);
|
||||||
log::info!("Removing excerpts {:?}", excerpts_to_remove);
|
log::info!("Removing excerpts {:?}", excerpts_to_remove);
|
||||||
self.remove_excerpts(&excerpts_to_remove, cx);
|
self.remove_excerpts(excerpts_to_remove, cx);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1563,6 +1568,38 @@ impl MultiBuffer {
|
||||||
} else {
|
} else {
|
||||||
self.randomly_edit_excerpts(rng, mutation_count, cx);
|
self.randomly_edit_excerpts(rng, mutation_count, cx);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
self.check_invariants(cx);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn check_invariants(&self, cx: &mut ModelContext<Self>) {
|
||||||
|
let snapshot = self.read(cx);
|
||||||
|
let excerpts = snapshot.excerpts.items(&());
|
||||||
|
let excerpt_ids = snapshot.excerpt_ids.items(&());
|
||||||
|
|
||||||
|
for (ix, excerpt) in excerpts.iter().enumerate() {
|
||||||
|
if ix == 0 {
|
||||||
|
if excerpt.locator <= Locator::min() {
|
||||||
|
panic!("invalid first excerpt locator {:?}", excerpt.locator);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if excerpt.locator <= excerpts[ix - 1].locator {
|
||||||
|
panic!("excerpts are out-of-order: {:?}", excerpts);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (ix, entry) in excerpt_ids.iter().enumerate() {
|
||||||
|
if ix == 0 {
|
||||||
|
if entry.id.cmp(&ExcerptId::min(), &*snapshot).is_le() {
|
||||||
|
panic!("invalid first excerpt id {:?}", entry.id);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if entry.id <= excerpt_ids[ix - 1].id {
|
||||||
|
panic!("excerpt ids are out-of-order: {:?}", excerpt_ids);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1749,20 +1786,20 @@ impl MultiBufferSnapshot {
|
||||||
*cursor.start() + overshoot
|
*cursor.start() + overshoot
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn clip_point_utf16(&self, point: PointUtf16, bias: Bias) -> PointUtf16 {
|
pub fn clip_point_utf16(&self, point: Unclipped<PointUtf16>, bias: Bias) -> PointUtf16 {
|
||||||
if let Some((_, _, buffer)) = self.as_singleton() {
|
if let Some((_, _, buffer)) = self.as_singleton() {
|
||||||
return buffer.clip_point_utf16(point, bias);
|
return buffer.clip_point_utf16(point, bias);
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut cursor = self.excerpts.cursor::<PointUtf16>();
|
let mut cursor = self.excerpts.cursor::<PointUtf16>();
|
||||||
cursor.seek(&point, Bias::Right, &());
|
cursor.seek(&point.0, Bias::Right, &());
|
||||||
let overshoot = if let Some(excerpt) = cursor.item() {
|
let overshoot = if let Some(excerpt) = cursor.item() {
|
||||||
let excerpt_start = excerpt
|
let excerpt_start = excerpt
|
||||||
.buffer
|
.buffer
|
||||||
.offset_to_point_utf16(excerpt.range.context.start.to_offset(&excerpt.buffer));
|
.offset_to_point_utf16(excerpt.range.context.start.to_offset(&excerpt.buffer));
|
||||||
let buffer_point = excerpt
|
let buffer_point = excerpt
|
||||||
.buffer
|
.buffer
|
||||||
.clip_point_utf16(excerpt_start + (point - cursor.start()), bias);
|
.clip_point_utf16(Unclipped(excerpt_start + (point.0 - cursor.start())), bias);
|
||||||
buffer_point.saturating_sub(excerpt_start)
|
buffer_point.saturating_sub(excerpt_start)
|
||||||
} else {
|
} else {
|
||||||
PointUtf16::zero()
|
PointUtf16::zero()
|
||||||
|
@ -2151,7 +2188,9 @@ impl MultiBufferSnapshot {
|
||||||
D: TextDimension + Ord + Sub<D, Output = D>,
|
D: TextDimension + Ord + Sub<D, Output = D>,
|
||||||
{
|
{
|
||||||
let mut cursor = self.excerpts.cursor::<ExcerptSummary>();
|
let mut cursor = self.excerpts.cursor::<ExcerptSummary>();
|
||||||
cursor.seek(&Some(&anchor.excerpt_id), Bias::Left, &());
|
let locator = self.excerpt_locator_for_id(anchor.excerpt_id);
|
||||||
|
|
||||||
|
cursor.seek(locator, Bias::Left, &());
|
||||||
if cursor.item().is_none() {
|
if cursor.item().is_none() {
|
||||||
cursor.next(&());
|
cursor.next(&());
|
||||||
}
|
}
|
||||||
|
@ -2189,24 +2228,25 @@ impl MultiBufferSnapshot {
|
||||||
let mut cursor = self.excerpts.cursor::<ExcerptSummary>();
|
let mut cursor = self.excerpts.cursor::<ExcerptSummary>();
|
||||||
let mut summaries = Vec::new();
|
let mut summaries = Vec::new();
|
||||||
while let Some(anchor) = anchors.peek() {
|
while let Some(anchor) = anchors.peek() {
|
||||||
let excerpt_id = &anchor.excerpt_id;
|
let excerpt_id = anchor.excerpt_id;
|
||||||
let excerpt_anchors = iter::from_fn(|| {
|
let excerpt_anchors = iter::from_fn(|| {
|
||||||
let anchor = anchors.peek()?;
|
let anchor = anchors.peek()?;
|
||||||
if anchor.excerpt_id == *excerpt_id {
|
if anchor.excerpt_id == excerpt_id {
|
||||||
Some(&anchors.next().unwrap().text_anchor)
|
Some(&anchors.next().unwrap().text_anchor)
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
cursor.seek_forward(&Some(excerpt_id), Bias::Left, &());
|
let locator = self.excerpt_locator_for_id(excerpt_id);
|
||||||
|
cursor.seek_forward(locator, Bias::Left, &());
|
||||||
if cursor.item().is_none() {
|
if cursor.item().is_none() {
|
||||||
cursor.next(&());
|
cursor.next(&());
|
||||||
}
|
}
|
||||||
|
|
||||||
let position = D::from_text_summary(&cursor.start().text);
|
let position = D::from_text_summary(&cursor.start().text);
|
||||||
if let Some(excerpt) = cursor.item() {
|
if let Some(excerpt) = cursor.item() {
|
||||||
if excerpt.id == *excerpt_id {
|
if excerpt.id == excerpt_id {
|
||||||
let excerpt_buffer_start =
|
let excerpt_buffer_start =
|
||||||
excerpt.range.context.start.summary::<D>(&excerpt.buffer);
|
excerpt.range.context.start.summary::<D>(&excerpt.buffer);
|
||||||
let excerpt_buffer_end =
|
let excerpt_buffer_end =
|
||||||
|
@ -2240,13 +2280,18 @@ impl MultiBufferSnapshot {
|
||||||
I: 'a + IntoIterator<Item = &'a Anchor>,
|
I: 'a + IntoIterator<Item = &'a Anchor>,
|
||||||
{
|
{
|
||||||
let mut anchors = anchors.into_iter().enumerate().peekable();
|
let mut anchors = anchors.into_iter().enumerate().peekable();
|
||||||
let mut cursor = self.excerpts.cursor::<Option<&ExcerptId>>();
|
let mut cursor = self.excerpts.cursor::<Option<&Locator>>();
|
||||||
|
cursor.next(&());
|
||||||
|
|
||||||
let mut result = Vec::new();
|
let mut result = Vec::new();
|
||||||
|
|
||||||
while let Some((_, anchor)) = anchors.peek() {
|
while let Some((_, anchor)) = anchors.peek() {
|
||||||
let old_excerpt_id = &anchor.excerpt_id;
|
let old_excerpt_id = anchor.excerpt_id;
|
||||||
|
|
||||||
// Find the location where this anchor's excerpt should be.
|
// Find the location where this anchor's excerpt should be.
|
||||||
cursor.seek_forward(&Some(old_excerpt_id), Bias::Left, &());
|
let old_locator = self.excerpt_locator_for_id(old_excerpt_id);
|
||||||
|
cursor.seek_forward(&Some(old_locator), Bias::Left, &());
|
||||||
|
|
||||||
if cursor.item().is_none() {
|
if cursor.item().is_none() {
|
||||||
cursor.next(&());
|
cursor.next(&());
|
||||||
}
|
}
|
||||||
|
@ -2256,27 +2301,22 @@ impl MultiBufferSnapshot {
|
||||||
|
|
||||||
// Process all of the anchors for this excerpt.
|
// Process all of the anchors for this excerpt.
|
||||||
while let Some((_, anchor)) = anchors.peek() {
|
while let Some((_, anchor)) = anchors.peek() {
|
||||||
if anchor.excerpt_id != *old_excerpt_id {
|
if anchor.excerpt_id != old_excerpt_id {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
let mut kept_position = false;
|
|
||||||
let (anchor_ix, anchor) = anchors.next().unwrap();
|
let (anchor_ix, anchor) = anchors.next().unwrap();
|
||||||
let mut anchor = anchor.clone();
|
let mut anchor = *anchor;
|
||||||
|
|
||||||
let id_invalid =
|
|
||||||
*old_excerpt_id == ExcerptId::max() || *old_excerpt_id == ExcerptId::min();
|
|
||||||
let still_exists = next_excerpt.map_or(false, |excerpt| {
|
|
||||||
excerpt.id == *old_excerpt_id && excerpt.contains(&anchor)
|
|
||||||
});
|
|
||||||
|
|
||||||
// Leave min and max anchors unchanged if invalid or
|
// Leave min and max anchors unchanged if invalid or
|
||||||
// if the old excerpt still exists at this location
|
// if the old excerpt still exists at this location
|
||||||
if id_invalid || still_exists {
|
let mut kept_position = next_excerpt
|
||||||
kept_position = true;
|
.map_or(false, |e| e.id == old_excerpt_id && e.contains(&anchor))
|
||||||
}
|
|| old_excerpt_id == ExcerptId::max()
|
||||||
|
|| old_excerpt_id == ExcerptId::min();
|
||||||
|
|
||||||
// If the old excerpt no longer exists at this location, then attempt to
|
// If the old excerpt no longer exists at this location, then attempt to
|
||||||
// find an equivalent position for this anchor in an adjacent excerpt.
|
// find an equivalent position for this anchor in an adjacent excerpt.
|
||||||
else {
|
if !kept_position {
|
||||||
for excerpt in [next_excerpt, prev_excerpt].iter().filter_map(|e| *e) {
|
for excerpt in [next_excerpt, prev_excerpt].iter().filter_map(|e| *e) {
|
||||||
if excerpt.contains(&anchor) {
|
if excerpt.contains(&anchor) {
|
||||||
anchor.excerpt_id = excerpt.id.clone();
|
anchor.excerpt_id = excerpt.id.clone();
|
||||||
|
@ -2285,6 +2325,7 @@ impl MultiBufferSnapshot {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// If there's no adjacent excerpt that contains the anchor's position,
|
// If there's no adjacent excerpt that contains the anchor's position,
|
||||||
// then report that the anchor has lost its position.
|
// then report that the anchor has lost its position.
|
||||||
if !kept_position {
|
if !kept_position {
|
||||||
|
@ -2354,7 +2395,7 @@ impl MultiBufferSnapshot {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut cursor = self.excerpts.cursor::<(usize, Option<&ExcerptId>)>();
|
let mut cursor = self.excerpts.cursor::<(usize, Option<ExcerptId>)>();
|
||||||
cursor.seek(&offset, Bias::Right, &());
|
cursor.seek(&offset, Bias::Right, &());
|
||||||
if cursor.item().is_none() && offset == cursor.start().0 && bias == Bias::Left {
|
if cursor.item().is_none() && offset == cursor.start().0 && bias == Bias::Left {
|
||||||
cursor.prev(&());
|
cursor.prev(&());
|
||||||
|
@ -2382,8 +2423,9 @@ impl MultiBufferSnapshot {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn anchor_in_excerpt(&self, excerpt_id: ExcerptId, text_anchor: text::Anchor) -> Anchor {
|
pub fn anchor_in_excerpt(&self, excerpt_id: ExcerptId, text_anchor: text::Anchor) -> Anchor {
|
||||||
let mut cursor = self.excerpts.cursor::<Option<&ExcerptId>>();
|
let locator = self.excerpt_locator_for_id(excerpt_id);
|
||||||
cursor.seek(&Some(&excerpt_id), Bias::Left, &());
|
let mut cursor = self.excerpts.cursor::<Option<&Locator>>();
|
||||||
|
cursor.seek(locator, Bias::Left, &());
|
||||||
if let Some(excerpt) = cursor.item() {
|
if let Some(excerpt) = cursor.item() {
|
||||||
if excerpt.id == excerpt_id {
|
if excerpt.id == excerpt_id {
|
||||||
let text_anchor = excerpt.clip_anchor(text_anchor);
|
let text_anchor = excerpt.clip_anchor(text_anchor);
|
||||||
|
@ -2401,7 +2443,7 @@ impl MultiBufferSnapshot {
|
||||||
pub fn can_resolve(&self, anchor: &Anchor) -> bool {
|
pub fn can_resolve(&self, anchor: &Anchor) -> bool {
|
||||||
if anchor.excerpt_id == ExcerptId::min() || anchor.excerpt_id == ExcerptId::max() {
|
if anchor.excerpt_id == ExcerptId::min() || anchor.excerpt_id == ExcerptId::max() {
|
||||||
true
|
true
|
||||||
} else if let Some(excerpt) = self.excerpt(&anchor.excerpt_id) {
|
} else if let Some(excerpt) = self.excerpt(anchor.excerpt_id) {
|
||||||
excerpt.buffer.can_resolve(&anchor.text_anchor)
|
excerpt.buffer.can_resolve(&anchor.text_anchor)
|
||||||
} else {
|
} else {
|
||||||
false
|
false
|
||||||
|
@ -2456,7 +2498,6 @@ impl MultiBufferSnapshot {
|
||||||
let starts_new_buffer = Some(excerpt.buffer_id) != prev_buffer_id;
|
let starts_new_buffer = Some(excerpt.buffer_id) != prev_buffer_id;
|
||||||
let boundary = ExcerptBoundary {
|
let boundary = ExcerptBoundary {
|
||||||
id: excerpt.id.clone(),
|
id: excerpt.id.clone(),
|
||||||
key: excerpt.key,
|
|
||||||
row: cursor.start().1.row,
|
row: cursor.start().1.row,
|
||||||
buffer: excerpt.buffer.clone(),
|
buffer: excerpt.buffer.clone(),
|
||||||
range: excerpt.range.clone(),
|
range: excerpt.range.clone(),
|
||||||
|
@ -2678,8 +2719,8 @@ impl MultiBufferSnapshot {
|
||||||
.flatten()
|
.flatten()
|
||||||
.map(|item| OutlineItem {
|
.map(|item| OutlineItem {
|
||||||
depth: item.depth,
|
depth: item.depth,
|
||||||
range: self.anchor_in_excerpt(excerpt_id.clone(), item.range.start)
|
range: self.anchor_in_excerpt(excerpt_id, item.range.start)
|
||||||
..self.anchor_in_excerpt(excerpt_id.clone(), item.range.end),
|
..self.anchor_in_excerpt(excerpt_id, item.range.end),
|
||||||
text: item.text,
|
text: item.text,
|
||||||
highlight_ranges: item.highlight_ranges,
|
highlight_ranges: item.highlight_ranges,
|
||||||
name_ranges: item.name_ranges,
|
name_ranges: item.name_ranges,
|
||||||
|
@ -2688,11 +2729,29 @@ impl MultiBufferSnapshot {
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn excerpt<'a>(&'a self, excerpt_id: &'a ExcerptId) -> Option<&'a Excerpt> {
|
fn excerpt_locator_for_id<'a>(&'a self, id: ExcerptId) -> &'a Locator {
|
||||||
let mut cursor = self.excerpts.cursor::<Option<&ExcerptId>>();
|
if id == ExcerptId::min() {
|
||||||
cursor.seek(&Some(excerpt_id), Bias::Left, &());
|
Locator::min_ref()
|
||||||
|
} else if id == ExcerptId::max() {
|
||||||
|
Locator::max_ref()
|
||||||
|
} else {
|
||||||
|
let mut cursor = self.excerpt_ids.cursor::<ExcerptId>();
|
||||||
|
cursor.seek(&id, Bias::Left, &());
|
||||||
|
if let Some(entry) = cursor.item() {
|
||||||
|
if entry.id == id {
|
||||||
|
return &entry.locator;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
panic!("invalid excerpt id {:?}", id)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn excerpt<'a>(&'a self, excerpt_id: ExcerptId) -> Option<&'a Excerpt> {
|
||||||
|
let mut cursor = self.excerpts.cursor::<Option<&Locator>>();
|
||||||
|
let locator = self.excerpt_locator_for_id(excerpt_id);
|
||||||
|
cursor.seek(&Some(locator), Bias::Left, &());
|
||||||
if let Some(excerpt) = cursor.item() {
|
if let Some(excerpt) = cursor.item() {
|
||||||
if excerpt.id == *excerpt_id {
|
if excerpt.id == excerpt_id {
|
||||||
return Some(excerpt);
|
return Some(excerpt);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2703,10 +2762,12 @@ impl MultiBufferSnapshot {
|
||||||
&'a self,
|
&'a self,
|
||||||
range: &'a Range<Anchor>,
|
range: &'a Range<Anchor>,
|
||||||
) -> impl 'a + Iterator<Item = (ReplicaId, bool, CursorShape, Selection<Anchor>)> {
|
) -> impl 'a + Iterator<Item = (ReplicaId, bool, CursorShape, Selection<Anchor>)> {
|
||||||
let mut cursor = self.excerpts.cursor::<Option<&ExcerptId>>();
|
let mut cursor = self.excerpts.cursor::<ExcerptSummary>();
|
||||||
cursor.seek(&Some(&range.start.excerpt_id), Bias::Left, &());
|
let start_locator = self.excerpt_locator_for_id(range.start.excerpt_id);
|
||||||
|
let end_locator = self.excerpt_locator_for_id(range.end.excerpt_id);
|
||||||
|
cursor.seek(start_locator, Bias::Left, &());
|
||||||
cursor
|
cursor
|
||||||
.take_while(move |excerpt| excerpt.id <= range.end.excerpt_id)
|
.take_while(move |excerpt| excerpt.locator <= *end_locator)
|
||||||
.flat_map(move |excerpt| {
|
.flat_map(move |excerpt| {
|
||||||
let mut query_range = excerpt.range.context.start..excerpt.range.context.end;
|
let mut query_range = excerpt.range.context.start..excerpt.range.context.end;
|
||||||
if excerpt.id == range.start.excerpt_id {
|
if excerpt.id == range.start.excerpt_id {
|
||||||
|
@ -2916,7 +2977,7 @@ impl History {
|
||||||
impl Excerpt {
|
impl Excerpt {
|
||||||
fn new(
|
fn new(
|
||||||
id: ExcerptId,
|
id: ExcerptId,
|
||||||
key: usize,
|
locator: Locator,
|
||||||
buffer_id: usize,
|
buffer_id: usize,
|
||||||
buffer: BufferSnapshot,
|
buffer: BufferSnapshot,
|
||||||
range: ExcerptRange<text::Anchor>,
|
range: ExcerptRange<text::Anchor>,
|
||||||
|
@ -2924,7 +2985,7 @@ impl Excerpt {
|
||||||
) -> Self {
|
) -> Self {
|
||||||
Excerpt {
|
Excerpt {
|
||||||
id,
|
id,
|
||||||
key,
|
locator,
|
||||||
max_buffer_row: range.context.end.to_point(&buffer).row,
|
max_buffer_row: range.context.end.to_point(&buffer).row,
|
||||||
text_summary: buffer
|
text_summary: buffer
|
||||||
.text_summary_for_range::<TextSummary, _>(range.context.to_offset(&buffer)),
|
.text_summary_for_range::<TextSummary, _>(range.context.to_offset(&buffer)),
|
||||||
|
@ -3010,10 +3071,33 @@ impl Excerpt {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl ExcerptId {
|
||||||
|
pub fn min() -> Self {
|
||||||
|
Self(0)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn max() -> Self {
|
||||||
|
Self(usize::MAX)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn cmp(&self, other: &Self, snapshot: &MultiBufferSnapshot) -> cmp::Ordering {
|
||||||
|
let a = snapshot.excerpt_locator_for_id(*self);
|
||||||
|
let b = snapshot.excerpt_locator_for_id(*other);
|
||||||
|
a.cmp(&b).then_with(|| self.0.cmp(&other.0))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Into<usize> for ExcerptId {
|
||||||
|
fn into(self) -> usize {
|
||||||
|
self.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl fmt::Debug for Excerpt {
|
impl fmt::Debug for Excerpt {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
f.debug_struct("Excerpt")
|
f.debug_struct("Excerpt")
|
||||||
.field("id", &self.id)
|
.field("id", &self.id)
|
||||||
|
.field("locator", &self.locator)
|
||||||
.field("buffer_id", &self.buffer_id)
|
.field("buffer_id", &self.buffer_id)
|
||||||
.field("range", &self.range)
|
.field("range", &self.range)
|
||||||
.field("text_summary", &self.text_summary)
|
.field("text_summary", &self.text_summary)
|
||||||
|
@ -3031,19 +3115,44 @@ impl sum_tree::Item for Excerpt {
|
||||||
text += TextSummary::from("\n");
|
text += TextSummary::from("\n");
|
||||||
}
|
}
|
||||||
ExcerptSummary {
|
ExcerptSummary {
|
||||||
excerpt_id: self.id.clone(),
|
excerpt_id: self.id,
|
||||||
|
excerpt_locator: self.locator.clone(),
|
||||||
max_buffer_row: self.max_buffer_row,
|
max_buffer_row: self.max_buffer_row,
|
||||||
text,
|
text,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl sum_tree::Item for ExcerptIdMapping {
|
||||||
|
type Summary = ExcerptId;
|
||||||
|
|
||||||
|
fn summary(&self) -> Self::Summary {
|
||||||
|
self.id
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl sum_tree::KeyedItem for ExcerptIdMapping {
|
||||||
|
type Key = ExcerptId;
|
||||||
|
|
||||||
|
fn key(&self) -> Self::Key {
|
||||||
|
self.id
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl sum_tree::Summary for ExcerptId {
|
||||||
|
type Context = ();
|
||||||
|
|
||||||
|
fn add_summary(&mut self, other: &Self, _: &()) {
|
||||||
|
*self = *other;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl sum_tree::Summary for ExcerptSummary {
|
impl sum_tree::Summary for ExcerptSummary {
|
||||||
type Context = ();
|
type Context = ();
|
||||||
|
|
||||||
fn add_summary(&mut self, summary: &Self, _: &()) {
|
fn add_summary(&mut self, summary: &Self, _: &()) {
|
||||||
debug_assert!(summary.excerpt_id > self.excerpt_id);
|
debug_assert!(summary.excerpt_locator > self.excerpt_locator);
|
||||||
self.excerpt_id = summary.excerpt_id.clone();
|
self.excerpt_locator = summary.excerpt_locator.clone();
|
||||||
self.text.add_summary(&summary.text, &());
|
self.text.add_summary(&summary.text, &());
|
||||||
self.max_buffer_row = cmp::max(self.max_buffer_row, summary.max_buffer_row);
|
self.max_buffer_row = cmp::max(self.max_buffer_row, summary.max_buffer_row);
|
||||||
}
|
}
|
||||||
|
@ -3067,9 +3176,15 @@ impl<'a> sum_tree::SeekTarget<'a, ExcerptSummary, ExcerptSummary> for usize {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> sum_tree::SeekTarget<'a, ExcerptSummary, ExcerptSummary> for Option<&'a ExcerptId> {
|
impl<'a> sum_tree::SeekTarget<'a, ExcerptSummary, Option<&'a Locator>> for Locator {
|
||||||
|
fn cmp(&self, cursor_location: &Option<&'a Locator>, _: &()) -> cmp::Ordering {
|
||||||
|
Ord::cmp(&Some(self), cursor_location)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> sum_tree::SeekTarget<'a, ExcerptSummary, ExcerptSummary> for Locator {
|
||||||
fn cmp(&self, cursor_location: &ExcerptSummary, _: &()) -> cmp::Ordering {
|
fn cmp(&self, cursor_location: &ExcerptSummary, _: &()) -> cmp::Ordering {
|
||||||
Ord::cmp(self, &Some(&cursor_location.excerpt_id))
|
Ord::cmp(self, &cursor_location.excerpt_locator)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3091,9 +3206,15 @@ impl<'a> sum_tree::Dimension<'a, ExcerptSummary> for PointUtf16 {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> sum_tree::Dimension<'a, ExcerptSummary> for Option<&'a ExcerptId> {
|
impl<'a> sum_tree::Dimension<'a, ExcerptSummary> for Option<&'a Locator> {
|
||||||
fn add_summary(&mut self, summary: &'a ExcerptSummary, _: &()) {
|
fn add_summary(&mut self, summary: &'a ExcerptSummary, _: &()) {
|
||||||
*self = Some(&summary.excerpt_id);
|
*self = Some(&summary.excerpt_locator);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> sum_tree::Dimension<'a, ExcerptSummary> for Option<ExcerptId> {
|
||||||
|
fn add_summary(&mut self, summary: &'a ExcerptSummary, _: &()) {
|
||||||
|
*self = Some(summary.excerpt_id);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3274,12 +3395,6 @@ impl ToOffset for Point {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToOffset for PointUtf16 {
|
|
||||||
fn to_offset<'a>(&self, snapshot: &MultiBufferSnapshot) -> usize {
|
|
||||||
snapshot.point_utf16_to_offset(*self)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ToOffset for usize {
|
impl ToOffset for usize {
|
||||||
fn to_offset<'a>(&self, snapshot: &MultiBufferSnapshot) -> usize {
|
fn to_offset<'a>(&self, snapshot: &MultiBufferSnapshot) -> usize {
|
||||||
assert!(*self <= snapshot.len(), "offset is out of range");
|
assert!(*self <= snapshot.len(), "offset is out of range");
|
||||||
|
@ -3293,6 +3408,12 @@ impl ToOffset for OffsetUtf16 {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl ToOffset for PointUtf16 {
|
||||||
|
fn to_offset<'a>(&self, snapshot: &MultiBufferSnapshot) -> usize {
|
||||||
|
snapshot.point_utf16_to_offset(*self)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl ToOffsetUtf16 for OffsetUtf16 {
|
impl ToOffsetUtf16 for OffsetUtf16 {
|
||||||
fn to_offset_utf16(&self, _snapshot: &MultiBufferSnapshot) -> OffsetUtf16 {
|
fn to_offset_utf16(&self, _snapshot: &MultiBufferSnapshot) -> OffsetUtf16 {
|
||||||
*self
|
*self
|
||||||
|
@ -3591,7 +3712,7 @@ mod tests {
|
||||||
let snapshot = multibuffer.update(cx, |multibuffer, cx| {
|
let snapshot = multibuffer.update(cx, |multibuffer, cx| {
|
||||||
let (buffer_2_excerpt_id, _) =
|
let (buffer_2_excerpt_id, _) =
|
||||||
multibuffer.excerpts_for_buffer(&buffer_2, cx)[0].clone();
|
multibuffer.excerpts_for_buffer(&buffer_2, cx)[0].clone();
|
||||||
multibuffer.remove_excerpts(&[buffer_2_excerpt_id], cx);
|
multibuffer.remove_excerpts([buffer_2_excerpt_id], cx);
|
||||||
multibuffer.snapshot(cx)
|
multibuffer.snapshot(cx)
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -3780,7 +3901,7 @@ mod tests {
|
||||||
|
|
||||||
// Replace the buffer 1 excerpt with new excerpts from buffer 2.
|
// Replace the buffer 1 excerpt with new excerpts from buffer 2.
|
||||||
let (excerpt_id_2, excerpt_id_3) = multibuffer.update(cx, |multibuffer, cx| {
|
let (excerpt_id_2, excerpt_id_3) = multibuffer.update(cx, |multibuffer, cx| {
|
||||||
multibuffer.remove_excerpts([&excerpt_id_1], cx);
|
multibuffer.remove_excerpts([excerpt_id_1], cx);
|
||||||
let mut ids = multibuffer
|
let mut ids = multibuffer
|
||||||
.push_excerpts(
|
.push_excerpts(
|
||||||
buffer_2.clone(),
|
buffer_2.clone(),
|
||||||
|
@ -3810,9 +3931,8 @@ mod tests {
|
||||||
assert_ne!(excerpt_id_2, excerpt_id_1);
|
assert_ne!(excerpt_id_2, excerpt_id_1);
|
||||||
|
|
||||||
// Resolve some anchors from the previous snapshot in the new snapshot.
|
// Resolve some anchors from the previous snapshot in the new snapshot.
|
||||||
// Although there is still an excerpt with the same id, it is for
|
// The current excerpts are from a different buffer, so we don't attempt to
|
||||||
// a different buffer, so we don't attempt to resolve the old text
|
// resolve the old text anchor in the new buffer.
|
||||||
// anchor in the new buffer.
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
snapshot_2.summary_for_anchor::<usize>(&snapshot_1.anchor_before(2)),
|
snapshot_2.summary_for_anchor::<usize>(&snapshot_1.anchor_before(2)),
|
||||||
0
|
0
|
||||||
|
@ -3824,6 +3944,9 @@ mod tests {
|
||||||
]),
|
]),
|
||||||
vec![0, 0]
|
vec![0, 0]
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// Refresh anchors from the old snapshot. The return value indicates that both
|
||||||
|
// anchors lost their original excerpt.
|
||||||
let refresh =
|
let refresh =
|
||||||
snapshot_2.refresh_anchors(&[snapshot_1.anchor_before(2), snapshot_1.anchor_after(3)]);
|
snapshot_2.refresh_anchors(&[snapshot_1.anchor_before(2), snapshot_1.anchor_after(3)]);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
@ -3837,10 +3960,10 @@ mod tests {
|
||||||
// Replace the middle excerpt with a smaller excerpt in buffer 2,
|
// Replace the middle excerpt with a smaller excerpt in buffer 2,
|
||||||
// that intersects the old excerpt.
|
// that intersects the old excerpt.
|
||||||
let excerpt_id_5 = multibuffer.update(cx, |multibuffer, cx| {
|
let excerpt_id_5 = multibuffer.update(cx, |multibuffer, cx| {
|
||||||
multibuffer.remove_excerpts([&excerpt_id_3], cx);
|
multibuffer.remove_excerpts([excerpt_id_3], cx);
|
||||||
multibuffer
|
multibuffer
|
||||||
.insert_excerpts_after(
|
.insert_excerpts_after(
|
||||||
&excerpt_id_3,
|
excerpt_id_2,
|
||||||
buffer_2.clone(),
|
buffer_2.clone(),
|
||||||
[ExcerptRange {
|
[ExcerptRange {
|
||||||
context: 5..8,
|
context: 5..8,
|
||||||
|
@ -3857,8 +3980,8 @@ mod tests {
|
||||||
assert_ne!(excerpt_id_5, excerpt_id_3);
|
assert_ne!(excerpt_id_5, excerpt_id_3);
|
||||||
|
|
||||||
// Resolve some anchors from the previous snapshot in the new snapshot.
|
// Resolve some anchors from the previous snapshot in the new snapshot.
|
||||||
// The anchor in the middle excerpt snaps to the beginning of the
|
// The third anchor can't be resolved, since its excerpt has been removed,
|
||||||
// excerpt, since it is not
|
// so it resolves to the same position as its predecessor.
|
||||||
let anchors = [
|
let anchors = [
|
||||||
snapshot_2.anchor_before(0),
|
snapshot_2.anchor_before(0),
|
||||||
snapshot_2.anchor_after(2),
|
snapshot_2.anchor_after(2),
|
||||||
|
@ -3867,7 +3990,7 @@ mod tests {
|
||||||
];
|
];
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
snapshot_3.summaries_for_anchors::<usize, _>(&anchors),
|
snapshot_3.summaries_for_anchors::<usize, _>(&anchors),
|
||||||
&[0, 2, 5, 13]
|
&[0, 2, 9, 13]
|
||||||
);
|
);
|
||||||
|
|
||||||
let new_anchors = snapshot_3.refresh_anchors(&anchors);
|
let new_anchors = snapshot_3.refresh_anchors(&anchors);
|
||||||
|
@ -3889,7 +4012,7 @@ mod tests {
|
||||||
|
|
||||||
let mut buffers: Vec<ModelHandle<Buffer>> = Vec::new();
|
let mut buffers: Vec<ModelHandle<Buffer>> = Vec::new();
|
||||||
let multibuffer = cx.add_model(|_| MultiBuffer::new(0));
|
let multibuffer = cx.add_model(|_| MultiBuffer::new(0));
|
||||||
let mut excerpt_ids = Vec::new();
|
let mut excerpt_ids = Vec::<ExcerptId>::new();
|
||||||
let mut expected_excerpts = Vec::<(ModelHandle<Buffer>, Range<text::Anchor>)>::new();
|
let mut expected_excerpts = Vec::<(ModelHandle<Buffer>, Range<text::Anchor>)>::new();
|
||||||
let mut anchors = Vec::new();
|
let mut anchors = Vec::new();
|
||||||
let mut old_versions = Vec::new();
|
let mut old_versions = Vec::new();
|
||||||
|
@ -3919,9 +4042,11 @@ mod tests {
|
||||||
.collect::<String>(),
|
.collect::<String>(),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
ids_to_remove.sort_unstable();
|
let snapshot = multibuffer.read(cx).read(cx);
|
||||||
|
ids_to_remove.sort_unstable_by(|a, b| a.cmp(&b, &snapshot));
|
||||||
|
drop(snapshot);
|
||||||
multibuffer.update(cx, |multibuffer, cx| {
|
multibuffer.update(cx, |multibuffer, cx| {
|
||||||
multibuffer.remove_excerpts(&ids_to_remove, cx)
|
multibuffer.remove_excerpts(ids_to_remove, cx)
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
30..=39 if !expected_excerpts.is_empty() => {
|
30..=39 if !expected_excerpts.is_empty() => {
|
||||||
|
@ -3945,7 +4070,6 @@ mod tests {
|
||||||
// Ensure the newly-refreshed anchors point to a valid excerpt and don't
|
// Ensure the newly-refreshed anchors point to a valid excerpt and don't
|
||||||
// overshoot its boundaries.
|
// overshoot its boundaries.
|
||||||
assert_eq!(anchors.len(), prev_len);
|
assert_eq!(anchors.len(), prev_len);
|
||||||
let mut cursor = multibuffer.excerpts.cursor::<Option<&ExcerptId>>();
|
|
||||||
for anchor in &anchors {
|
for anchor in &anchors {
|
||||||
if anchor.excerpt_id == ExcerptId::min()
|
if anchor.excerpt_id == ExcerptId::min()
|
||||||
|| anchor.excerpt_id == ExcerptId::max()
|
|| anchor.excerpt_id == ExcerptId::max()
|
||||||
|
@ -3953,8 +4077,7 @@ mod tests {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
cursor.seek_forward(&Some(&anchor.excerpt_id), Bias::Left, &());
|
let excerpt = multibuffer.excerpt(anchor.excerpt_id).unwrap();
|
||||||
let excerpt = cursor.item().unwrap();
|
|
||||||
assert_eq!(excerpt.id, anchor.excerpt_id);
|
assert_eq!(excerpt.id, anchor.excerpt_id);
|
||||||
assert!(excerpt.contains(anchor));
|
assert!(excerpt.contains(anchor));
|
||||||
}
|
}
|
||||||
|
@ -3994,7 +4117,7 @@ mod tests {
|
||||||
let excerpt_id = multibuffer.update(cx, |multibuffer, cx| {
|
let excerpt_id = multibuffer.update(cx, |multibuffer, cx| {
|
||||||
multibuffer
|
multibuffer
|
||||||
.insert_excerpts_after(
|
.insert_excerpts_after(
|
||||||
&prev_excerpt_id,
|
prev_excerpt_id,
|
||||||
buffer_handle.clone(),
|
buffer_handle.clone(),
|
||||||
[ExcerptRange {
|
[ExcerptRange {
|
||||||
context: start_ix..end_ix,
|
context: start_ix..end_ix,
|
||||||
|
@ -4158,12 +4281,14 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
for _ in 0..ch.len_utf16() {
|
for _ in 0..ch.len_utf16() {
|
||||||
let left_point_utf16 = snapshot.clip_point_utf16(point_utf16, Bias::Left);
|
let left_point_utf16 =
|
||||||
let right_point_utf16 = snapshot.clip_point_utf16(point_utf16, Bias::Right);
|
snapshot.clip_point_utf16(Unclipped(point_utf16), Bias::Left);
|
||||||
|
let right_point_utf16 =
|
||||||
|
snapshot.clip_point_utf16(Unclipped(point_utf16), Bias::Right);
|
||||||
let buffer_left_point_utf16 =
|
let buffer_left_point_utf16 =
|
||||||
buffer.clip_point_utf16(buffer_point_utf16, Bias::Left);
|
buffer.clip_point_utf16(Unclipped(buffer_point_utf16), Bias::Left);
|
||||||
let buffer_right_point_utf16 =
|
let buffer_right_point_utf16 =
|
||||||
buffer.clip_point_utf16(buffer_point_utf16, Bias::Right);
|
buffer.clip_point_utf16(Unclipped(buffer_point_utf16), Bias::Right);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
left_point_utf16,
|
left_point_utf16,
|
||||||
excerpt_start.lines_utf16()
|
excerpt_start.lines_utf16()
|
||||||
|
|
|
@ -6,7 +6,7 @@ use std::{
|
||||||
};
|
};
|
||||||
use sum_tree::Bias;
|
use sum_tree::Bias;
|
||||||
|
|
||||||
#[derive(Clone, Eq, PartialEq, Debug, Hash)]
|
#[derive(Clone, Copy, Eq, PartialEq, Debug, Hash)]
|
||||||
pub struct Anchor {
|
pub struct Anchor {
|
||||||
pub(crate) buffer_id: Option<usize>,
|
pub(crate) buffer_id: Option<usize>,
|
||||||
pub(crate) excerpt_id: ExcerptId,
|
pub(crate) excerpt_id: ExcerptId,
|
||||||
|
@ -30,16 +30,16 @@ impl Anchor {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn excerpt_id(&self) -> &ExcerptId {
|
pub fn excerpt_id(&self) -> ExcerptId {
|
||||||
&self.excerpt_id
|
self.excerpt_id
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn cmp(&self, other: &Anchor, snapshot: &MultiBufferSnapshot) -> Ordering {
|
pub fn cmp(&self, other: &Anchor, snapshot: &MultiBufferSnapshot) -> Ordering {
|
||||||
let excerpt_id_cmp = self.excerpt_id.cmp(&other.excerpt_id);
|
let excerpt_id_cmp = self.excerpt_id.cmp(&other.excerpt_id, snapshot);
|
||||||
if excerpt_id_cmp.is_eq() {
|
if excerpt_id_cmp.is_eq() {
|
||||||
if self.excerpt_id == ExcerptId::min() || self.excerpt_id == ExcerptId::max() {
|
if self.excerpt_id == ExcerptId::min() || self.excerpt_id == ExcerptId::max() {
|
||||||
Ordering::Equal
|
Ordering::Equal
|
||||||
} else if let Some(excerpt) = snapshot.excerpt(&self.excerpt_id) {
|
} else if let Some(excerpt) = snapshot.excerpt(self.excerpt_id) {
|
||||||
self.text_anchor.cmp(&other.text_anchor, &excerpt.buffer)
|
self.text_anchor.cmp(&other.text_anchor, &excerpt.buffer)
|
||||||
} else {
|
} else {
|
||||||
Ordering::Equal
|
Ordering::Equal
|
||||||
|
@ -51,7 +51,7 @@ impl Anchor {
|
||||||
|
|
||||||
pub fn bias_left(&self, snapshot: &MultiBufferSnapshot) -> Anchor {
|
pub fn bias_left(&self, snapshot: &MultiBufferSnapshot) -> Anchor {
|
||||||
if self.text_anchor.bias != Bias::Left {
|
if self.text_anchor.bias != Bias::Left {
|
||||||
if let Some(excerpt) = snapshot.excerpt(&self.excerpt_id) {
|
if let Some(excerpt) = snapshot.excerpt(self.excerpt_id) {
|
||||||
return Self {
|
return Self {
|
||||||
buffer_id: self.buffer_id,
|
buffer_id: self.buffer_id,
|
||||||
excerpt_id: self.excerpt_id.clone(),
|
excerpt_id: self.excerpt_id.clone(),
|
||||||
|
@ -64,7 +64,7 @@ impl Anchor {
|
||||||
|
|
||||||
pub fn bias_right(&self, snapshot: &MultiBufferSnapshot) -> Anchor {
|
pub fn bias_right(&self, snapshot: &MultiBufferSnapshot) -> Anchor {
|
||||||
if self.text_anchor.bias != Bias::Right {
|
if self.text_anchor.bias != Bias::Right {
|
||||||
if let Some(excerpt) = snapshot.excerpt(&self.excerpt_id) {
|
if let Some(excerpt) = snapshot.excerpt(self.excerpt_id) {
|
||||||
return Self {
|
return Self {
|
||||||
buffer_id: self.buffer_id,
|
buffer_id: self.buffer_id,
|
||||||
excerpt_id: self.excerpt_id.clone(),
|
excerpt_id: self.excerpt_id.clone(),
|
||||||
|
|
|
@ -544,11 +544,21 @@ impl<'a> MutableSelectionsCollection<'a> {
|
||||||
T: ToOffset,
|
T: ToOffset,
|
||||||
{
|
{
|
||||||
let buffer = self.buffer.read(self.cx).snapshot(self.cx);
|
let buffer = self.buffer.read(self.cx).snapshot(self.cx);
|
||||||
|
let ranges = ranges
|
||||||
|
.into_iter()
|
||||||
|
.map(|range| range.start.to_offset(&buffer)..range.end.to_offset(&buffer));
|
||||||
|
self.select_offset_ranges(ranges);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn select_offset_ranges<I>(&mut self, ranges: I)
|
||||||
|
where
|
||||||
|
I: IntoIterator<Item = Range<usize>>,
|
||||||
|
{
|
||||||
let selections = ranges
|
let selections = ranges
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|range| {
|
.map(|range| {
|
||||||
let mut start = range.start.to_offset(&buffer);
|
let mut start = range.start;
|
||||||
let mut end = range.end.to_offset(&buffer);
|
let mut end = range.end;
|
||||||
let reversed = if start > end {
|
let reversed = if start > end {
|
||||||
mem::swap(&mut start, &mut end);
|
mem::swap(&mut start, &mut end);
|
||||||
true
|
true
|
||||||
|
|
|
@ -257,17 +257,19 @@ impl Element for Flex {
|
||||||
let axis = self.axis;
|
let axis = self.axis;
|
||||||
move |e, cx| {
|
move |e, cx| {
|
||||||
if remaining_space < 0. {
|
if remaining_space < 0. {
|
||||||
|
let scroll_delta = e.delta.raw();
|
||||||
|
|
||||||
let mut delta = match axis {
|
let mut delta = match axis {
|
||||||
Axis::Horizontal => {
|
Axis::Horizontal => {
|
||||||
if e.delta.x().abs() >= e.delta.y().abs() {
|
if scroll_delta.x().abs() >= scroll_delta.y().abs() {
|
||||||
e.delta.x()
|
scroll_delta.x()
|
||||||
} else {
|
} else {
|
||||||
e.delta.y()
|
scroll_delta.y()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Axis::Vertical => e.delta.y(),
|
Axis::Vertical => scroll_delta.y(),
|
||||||
};
|
};
|
||||||
if !e.precise {
|
if !e.delta.precise() {
|
||||||
delta *= 20.;
|
delta *= 20.;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -258,8 +258,8 @@ impl Element for List {
|
||||||
state.0.borrow_mut().scroll(
|
state.0.borrow_mut().scroll(
|
||||||
&scroll_top,
|
&scroll_top,
|
||||||
height,
|
height,
|
||||||
e.platform_event.delta,
|
*e.platform_event.delta.raw(),
|
||||||
e.platform_event.precise,
|
e.platform_event.delta.precise(),
|
||||||
cx,
|
cx,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
|
@ -295,15 +295,19 @@ impl Element for UniformList {
|
||||||
move |MouseScrollWheel {
|
move |MouseScrollWheel {
|
||||||
platform_event:
|
platform_event:
|
||||||
ScrollWheelEvent {
|
ScrollWheelEvent {
|
||||||
position,
|
position, delta, ..
|
||||||
delta,
|
|
||||||
precise,
|
|
||||||
..
|
|
||||||
},
|
},
|
||||||
..
|
..
|
||||||
},
|
},
|
||||||
cx| {
|
cx| {
|
||||||
if !Self::scroll(state.clone(), position, delta, precise, scroll_max, cx) {
|
if !Self::scroll(
|
||||||
|
state.clone(),
|
||||||
|
position,
|
||||||
|
*delta.raw(),
|
||||||
|
delta.precise(),
|
||||||
|
scroll_max,
|
||||||
|
cx,
|
||||||
|
) {
|
||||||
cx.propagate_event();
|
cx.propagate_event();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,7 @@
|
||||||
use std::ops::Deref;
|
use std::ops::Deref;
|
||||||
|
|
||||||
|
use pathfinder_geometry::vector::vec2f;
|
||||||
|
|
||||||
use crate::{geometry::vector::Vector2F, keymap::Keystroke};
|
use crate::{geometry::vector::Vector2F, keymap::Keystroke};
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
|
@ -44,11 +46,45 @@ pub enum TouchPhase {
|
||||||
Ended,
|
Ended,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy, Debug)]
|
||||||
|
pub enum ScrollDelta {
|
||||||
|
Pixels(Vector2F),
|
||||||
|
Lines(Vector2F),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for ScrollDelta {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::Lines(Default::default())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ScrollDelta {
|
||||||
|
pub fn raw(&self) -> &Vector2F {
|
||||||
|
match self {
|
||||||
|
ScrollDelta::Pixels(v) => v,
|
||||||
|
ScrollDelta::Lines(v) => v,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn precise(&self) -> bool {
|
||||||
|
match self {
|
||||||
|
ScrollDelta::Pixels(_) => true,
|
||||||
|
ScrollDelta::Lines(_) => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn pixel_delta(&self, line_height: f32) -> Vector2F {
|
||||||
|
match self {
|
||||||
|
ScrollDelta::Pixels(delta) => *delta,
|
||||||
|
ScrollDelta::Lines(delta) => vec2f(delta.x() * line_height, delta.y() * line_height),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Clone, Copy, Debug, Default)]
|
#[derive(Clone, Copy, Debug, Default)]
|
||||||
pub struct ScrollWheelEvent {
|
pub struct ScrollWheelEvent {
|
||||||
pub position: Vector2F,
|
pub position: Vector2F,
|
||||||
pub delta: Vector2F,
|
pub delta: ScrollDelta,
|
||||||
pub precise: bool,
|
|
||||||
pub modifiers: Modifiers,
|
pub modifiers: Modifiers,
|
||||||
/// If the platform supports returning the phase of a scroll wheel event, it will be stored here
|
/// If the platform supports returning the phase of a scroll wheel event, it will be stored here
|
||||||
pub phase: Option<TouchPhase>,
|
pub phase: Option<TouchPhase>,
|
||||||
|
|
|
@ -3,7 +3,7 @@ use crate::{
|
||||||
keymap::Keystroke,
|
keymap::Keystroke,
|
||||||
platform::{Event, NavigationDirection},
|
platform::{Event, NavigationDirection},
|
||||||
KeyDownEvent, KeyUpEvent, Modifiers, ModifiersChangedEvent, MouseButton, MouseButtonEvent,
|
KeyDownEvent, KeyUpEvent, Modifiers, ModifiersChangedEvent, MouseButton, MouseButtonEvent,
|
||||||
MouseMovedEvent, ScrollWheelEvent, TouchPhase,
|
MouseMovedEvent, ScrollDelta, ScrollWheelEvent, TouchPhase,
|
||||||
};
|
};
|
||||||
use cocoa::{
|
use cocoa::{
|
||||||
appkit::{NSEvent, NSEventModifierFlags, NSEventPhase, NSEventType},
|
appkit::{NSEvent, NSEventModifierFlags, NSEventPhase, NSEventType},
|
||||||
|
@ -164,17 +164,24 @@ impl Event {
|
||||||
_ => Some(TouchPhase::Moved),
|
_ => Some(TouchPhase::Moved),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let raw_data = vec2f(
|
||||||
|
native_event.scrollingDeltaX() as f32,
|
||||||
|
native_event.scrollingDeltaY() as f32,
|
||||||
|
);
|
||||||
|
|
||||||
|
let delta = if native_event.hasPreciseScrollingDeltas() == YES {
|
||||||
|
ScrollDelta::Pixels(raw_data)
|
||||||
|
} else {
|
||||||
|
ScrollDelta::Lines(raw_data)
|
||||||
|
};
|
||||||
|
|
||||||
Self::ScrollWheel(ScrollWheelEvent {
|
Self::ScrollWheel(ScrollWheelEvent {
|
||||||
position: vec2f(
|
position: vec2f(
|
||||||
native_event.locationInWindow().x as f32,
|
native_event.locationInWindow().x as f32,
|
||||||
window_height - native_event.locationInWindow().y as f32,
|
window_height - native_event.locationInWindow().y as f32,
|
||||||
),
|
),
|
||||||
delta: vec2f(
|
delta,
|
||||||
native_event.scrollingDeltaX() as f32,
|
|
||||||
native_event.scrollingDeltaY() as f32,
|
|
||||||
),
|
|
||||||
phase,
|
phase,
|
||||||
precise: native_event.hasPreciseScrollingDeltas() == YES,
|
|
||||||
modifiers: read_modifiers(native_event),
|
modifiers: read_modifiers(native_event),
|
||||||
})
|
})
|
||||||
}),
|
}),
|
||||||
|
|
|
@ -475,27 +475,35 @@ impl Presenter {
|
||||||
if let MouseEvent::Down(e) = &mouse_event {
|
if let MouseEvent::Down(e) = &mouse_event {
|
||||||
if valid_region
|
if valid_region
|
||||||
.handlers
|
.handlers
|
||||||
.contains_handler(MouseEvent::click_disc(), Some(e.button))
|
.contains(MouseEvent::click_disc(), Some(e.button))
|
||||||
|| valid_region
|
|| valid_region
|
||||||
.handlers
|
.handlers
|
||||||
.contains_handler(MouseEvent::drag_disc(), Some(e.button))
|
.contains(MouseEvent::drag_disc(), Some(e.button))
|
||||||
{
|
{
|
||||||
event_cx.handled = true;
|
event_cx.handled = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(callback) = valid_region.handlers.get(&mouse_event.handler_key()) {
|
// `event_consumed` should only be true if there are any handlers for this event.
|
||||||
event_cx.handled = true;
|
let mut event_consumed = event_cx.handled;
|
||||||
event_cx.with_current_view(valid_region.id().view_id(), {
|
if let Some(callbacks) = valid_region.handlers.get(&mouse_event.handler_key()) {
|
||||||
let region_event = mouse_event.clone();
|
event_consumed = true;
|
||||||
|cx| callback(region_event, cx)
|
for callback in callbacks {
|
||||||
});
|
event_cx.handled = true;
|
||||||
|
event_cx.with_current_view(valid_region.id().view_id(), {
|
||||||
|
let region_event = mouse_event.clone();
|
||||||
|
|cx| callback(region_event, cx)
|
||||||
|
});
|
||||||
|
event_consumed &= event_cx.handled;
|
||||||
|
any_event_handled |= event_cx.handled;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
any_event_handled = any_event_handled || event_cx.handled;
|
any_event_handled |= event_cx.handled;
|
||||||
// For bubbling events, if the event was handled, don't continue dispatching
|
|
||||||
// This only makes sense for local events.
|
// For bubbling events, if the event was handled, don't continue dispatching.
|
||||||
if event_cx.handled && mouse_event.is_capturable() {
|
// This only makes sense for local events which return false from is_capturable.
|
||||||
|
if event_consumed && mouse_event.is_capturable() {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,7 +5,7 @@ use std::{
|
||||||
|
|
||||||
use pathfinder_geometry::{rect::RectF, vector::Vector2F};
|
use pathfinder_geometry::{rect::RectF, vector::Vector2F};
|
||||||
|
|
||||||
use crate::{MouseButton, MouseButtonEvent, MouseMovedEvent, ScrollWheelEvent};
|
use crate::{scene::mouse_region::HandlerKey, MouseButtonEvent, MouseMovedEvent, ScrollWheelEvent};
|
||||||
|
|
||||||
#[derive(Debug, Default, Clone)]
|
#[derive(Debug, Default, Clone)]
|
||||||
pub struct MouseMove {
|
pub struct MouseMove {
|
||||||
|
@ -217,17 +217,17 @@ impl MouseEvent {
|
||||||
discriminant(&MouseEvent::ScrollWheel(Default::default()))
|
discriminant(&MouseEvent::ScrollWheel(Default::default()))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn handler_key(&self) -> (Discriminant<MouseEvent>, Option<MouseButton>) {
|
pub fn handler_key(&self) -> HandlerKey {
|
||||||
match self {
|
match self {
|
||||||
MouseEvent::Move(_) => (Self::move_disc(), None),
|
MouseEvent::Move(_) => HandlerKey::new(Self::move_disc(), None),
|
||||||
MouseEvent::Drag(e) => (Self::drag_disc(), e.pressed_button),
|
MouseEvent::Drag(e) => HandlerKey::new(Self::drag_disc(), e.pressed_button),
|
||||||
MouseEvent::Hover(_) => (Self::hover_disc(), None),
|
MouseEvent::Hover(_) => HandlerKey::new(Self::hover_disc(), None),
|
||||||
MouseEvent::Down(e) => (Self::down_disc(), Some(e.button)),
|
MouseEvent::Down(e) => HandlerKey::new(Self::down_disc(), Some(e.button)),
|
||||||
MouseEvent::Up(e) => (Self::up_disc(), Some(e.button)),
|
MouseEvent::Up(e) => HandlerKey::new(Self::up_disc(), Some(e.button)),
|
||||||
MouseEvent::Click(e) => (Self::click_disc(), Some(e.button)),
|
MouseEvent::Click(e) => HandlerKey::new(Self::click_disc(), Some(e.button)),
|
||||||
MouseEvent::UpOut(e) => (Self::up_out_disc(), Some(e.button)),
|
MouseEvent::UpOut(e) => HandlerKey::new(Self::up_out_disc(), Some(e.button)),
|
||||||
MouseEvent::DownOut(e) => (Self::down_out_disc(), Some(e.button)),
|
MouseEvent::DownOut(e) => HandlerKey::new(Self::down_out_disc(), Some(e.button)),
|
||||||
MouseEvent::ScrollWheel(_) => (Self::scroll_wheel_disc(), None),
|
MouseEvent::ScrollWheel(_) => HandlerKey::new(Self::scroll_wheel_disc(), None),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,6 +3,7 @@ use std::{any::TypeId, fmt::Debug, mem::Discriminant, rc::Rc};
|
||||||
use collections::HashMap;
|
use collections::HashMap;
|
||||||
|
|
||||||
use pathfinder_geometry::rect::RectF;
|
use pathfinder_geometry::rect::RectF;
|
||||||
|
use smallvec::SmallVec;
|
||||||
|
|
||||||
use crate::{EventContext, MouseButton};
|
use crate::{EventContext, MouseButton};
|
||||||
|
|
||||||
|
@ -177,61 +178,105 @@ impl MouseRegionId {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub type HandlerCallback = Rc<dyn Fn(MouseEvent, &mut EventContext)>;
|
||||||
|
|
||||||
|
#[derive(Clone, PartialEq, Eq, Hash)]
|
||||||
|
pub struct HandlerKey {
|
||||||
|
event_kind: Discriminant<MouseEvent>,
|
||||||
|
button: Option<MouseButton>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HandlerKey {
|
||||||
|
pub fn new(event_kind: Discriminant<MouseEvent>, button: Option<MouseButton>) -> HandlerKey {
|
||||||
|
HandlerKey { event_kind, button }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Clone, Default)]
|
#[derive(Clone, Default)]
|
||||||
pub struct HandlerSet {
|
pub struct HandlerSet {
|
||||||
#[allow(clippy::type_complexity)]
|
set: HashMap<HandlerKey, SmallVec<[HandlerCallback; 1]>>,
|
||||||
pub set: HashMap<
|
|
||||||
(Discriminant<MouseEvent>, Option<MouseButton>),
|
|
||||||
Rc<dyn Fn(MouseEvent, &mut EventContext)>,
|
|
||||||
>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl HandlerSet {
|
impl HandlerSet {
|
||||||
pub fn capture_all() -> Self {
|
pub fn capture_all() -> Self {
|
||||||
#[allow(clippy::type_complexity)]
|
let mut set: HashMap<HandlerKey, SmallVec<[HandlerCallback; 1]>> = HashMap::default();
|
||||||
let mut set: HashMap<
|
|
||||||
(Discriminant<MouseEvent>, Option<MouseButton>),
|
|
||||||
Rc<dyn Fn(MouseEvent, &mut EventContext)>,
|
|
||||||
> = Default::default();
|
|
||||||
|
|
||||||
set.insert((MouseEvent::move_disc(), None), Rc::new(|_, _| {}));
|
set.insert(
|
||||||
set.insert((MouseEvent::hover_disc(), None), Rc::new(|_, _| {}));
|
HandlerKey::new(MouseEvent::move_disc(), None),
|
||||||
|
SmallVec::from_buf([Rc::new(|_, _| {})]),
|
||||||
|
);
|
||||||
|
set.insert(
|
||||||
|
HandlerKey::new(MouseEvent::hover_disc(), None),
|
||||||
|
SmallVec::from_buf([Rc::new(|_, _| {})]),
|
||||||
|
);
|
||||||
for button in MouseButton::all() {
|
for button in MouseButton::all() {
|
||||||
set.insert((MouseEvent::drag_disc(), Some(button)), Rc::new(|_, _| {}));
|
|
||||||
set.insert((MouseEvent::down_disc(), Some(button)), Rc::new(|_, _| {}));
|
|
||||||
set.insert((MouseEvent::up_disc(), Some(button)), Rc::new(|_, _| {}));
|
|
||||||
set.insert((MouseEvent::click_disc(), Some(button)), Rc::new(|_, _| {}));
|
|
||||||
set.insert(
|
set.insert(
|
||||||
(MouseEvent::down_out_disc(), Some(button)),
|
HandlerKey::new(MouseEvent::drag_disc(), Some(button)),
|
||||||
Rc::new(|_, _| {}),
|
SmallVec::from_buf([Rc::new(|_, _| {})]),
|
||||||
);
|
);
|
||||||
set.insert(
|
set.insert(
|
||||||
(MouseEvent::up_out_disc(), Some(button)),
|
HandlerKey::new(MouseEvent::down_disc(), Some(button)),
|
||||||
Rc::new(|_, _| {}),
|
SmallVec::from_buf([Rc::new(|_, _| {})]),
|
||||||
|
);
|
||||||
|
set.insert(
|
||||||
|
HandlerKey::new(MouseEvent::up_disc(), Some(button)),
|
||||||
|
SmallVec::from_buf([Rc::new(|_, _| {})]),
|
||||||
|
);
|
||||||
|
set.insert(
|
||||||
|
HandlerKey::new(MouseEvent::click_disc(), Some(button)),
|
||||||
|
SmallVec::from_buf([Rc::new(|_, _| {})]),
|
||||||
|
);
|
||||||
|
set.insert(
|
||||||
|
HandlerKey::new(MouseEvent::down_out_disc(), Some(button)),
|
||||||
|
SmallVec::from_buf([Rc::new(|_, _| {})]),
|
||||||
|
);
|
||||||
|
set.insert(
|
||||||
|
HandlerKey::new(MouseEvent::up_out_disc(), Some(button)),
|
||||||
|
SmallVec::from_buf([Rc::new(|_, _| {})]),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
set.insert((MouseEvent::scroll_wheel_disc(), None), Rc::new(|_, _| {}));
|
set.insert(
|
||||||
|
HandlerKey::new(MouseEvent::scroll_wheel_disc(), None),
|
||||||
|
SmallVec::from_buf([Rc::new(|_, _| {})]),
|
||||||
|
);
|
||||||
|
|
||||||
HandlerSet { set }
|
HandlerSet { set }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get(
|
pub fn get(&self, key: &HandlerKey) -> Option<&[HandlerCallback]> {
|
||||||
&self,
|
self.set.get(key).map(|vec| vec.as_slice())
|
||||||
key: &(Discriminant<MouseEvent>, Option<MouseButton>),
|
|
||||||
) -> Option<Rc<dyn Fn(MouseEvent, &mut EventContext)>> {
|
|
||||||
self.set.get(key).cloned()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn contains_handler(
|
pub fn contains(
|
||||||
&self,
|
&self,
|
||||||
event: Discriminant<MouseEvent>,
|
discriminant: Discriminant<MouseEvent>,
|
||||||
button: Option<MouseButton>,
|
button: Option<MouseButton>,
|
||||||
) -> bool {
|
) -> bool {
|
||||||
self.set.contains_key(&(event, button))
|
self.set
|
||||||
|
.contains_key(&HandlerKey::new(discriminant, button))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn insert(
|
||||||
|
&mut self,
|
||||||
|
event_kind: Discriminant<MouseEvent>,
|
||||||
|
button: Option<MouseButton>,
|
||||||
|
callback: HandlerCallback,
|
||||||
|
) {
|
||||||
|
use std::collections::hash_map::Entry;
|
||||||
|
|
||||||
|
match self.set.entry(HandlerKey::new(event_kind, button)) {
|
||||||
|
Entry::Occupied(mut vec) => {
|
||||||
|
vec.get_mut().push(callback);
|
||||||
|
}
|
||||||
|
|
||||||
|
Entry::Vacant(entry) => {
|
||||||
|
entry.insert(SmallVec::from_buf([callback]));
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn on_move(mut self, handler: impl Fn(MouseMove, &mut EventContext) + 'static) -> Self {
|
pub fn on_move(mut self, handler: impl Fn(MouseMove, &mut EventContext) + 'static) -> Self {
|
||||||
self.set.insert((MouseEvent::move_disc(), None),
|
self.insert(MouseEvent::move_disc(), None,
|
||||||
Rc::new(move |region_event, cx| {
|
Rc::new(move |region_event, cx| {
|
||||||
if let MouseEvent::Move(e) = region_event {
|
if let MouseEvent::Move(e) = region_event {
|
||||||
handler(e, cx);
|
handler(e, cx);
|
||||||
|
@ -249,7 +294,7 @@ impl HandlerSet {
|
||||||
button: MouseButton,
|
button: MouseButton,
|
||||||
handler: impl Fn(MouseDown, &mut EventContext) + 'static,
|
handler: impl Fn(MouseDown, &mut EventContext) + 'static,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
self.set.insert((MouseEvent::down_disc(), Some(button)),
|
self.insert(MouseEvent::down_disc(), Some(button),
|
||||||
Rc::new(move |region_event, cx| {
|
Rc::new(move |region_event, cx| {
|
||||||
if let MouseEvent::Down(e) = region_event {
|
if let MouseEvent::Down(e) = region_event {
|
||||||
handler(e, cx);
|
handler(e, cx);
|
||||||
|
@ -267,7 +312,7 @@ impl HandlerSet {
|
||||||
button: MouseButton,
|
button: MouseButton,
|
||||||
handler: impl Fn(MouseUp, &mut EventContext) + 'static,
|
handler: impl Fn(MouseUp, &mut EventContext) + 'static,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
self.set.insert((MouseEvent::up_disc(), Some(button)),
|
self.insert(MouseEvent::up_disc(), Some(button),
|
||||||
Rc::new(move |region_event, cx| {
|
Rc::new(move |region_event, cx| {
|
||||||
if let MouseEvent::Up(e) = region_event {
|
if let MouseEvent::Up(e) = region_event {
|
||||||
handler(e, cx);
|
handler(e, cx);
|
||||||
|
@ -285,7 +330,7 @@ impl HandlerSet {
|
||||||
button: MouseButton,
|
button: MouseButton,
|
||||||
handler: impl Fn(MouseClick, &mut EventContext) + 'static,
|
handler: impl Fn(MouseClick, &mut EventContext) + 'static,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
self.set.insert((MouseEvent::click_disc(), Some(button)),
|
self.insert(MouseEvent::click_disc(), Some(button),
|
||||||
Rc::new(move |region_event, cx| {
|
Rc::new(move |region_event, cx| {
|
||||||
if let MouseEvent::Click(e) = region_event {
|
if let MouseEvent::Click(e) = region_event {
|
||||||
handler(e, cx);
|
handler(e, cx);
|
||||||
|
@ -303,7 +348,7 @@ impl HandlerSet {
|
||||||
button: MouseButton,
|
button: MouseButton,
|
||||||
handler: impl Fn(MouseDownOut, &mut EventContext) + 'static,
|
handler: impl Fn(MouseDownOut, &mut EventContext) + 'static,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
self.set.insert((MouseEvent::down_out_disc(), Some(button)),
|
self.insert(MouseEvent::down_out_disc(), Some(button),
|
||||||
Rc::new(move |region_event, cx| {
|
Rc::new(move |region_event, cx| {
|
||||||
if let MouseEvent::DownOut(e) = region_event {
|
if let MouseEvent::DownOut(e) = region_event {
|
||||||
handler(e, cx);
|
handler(e, cx);
|
||||||
|
@ -321,7 +366,7 @@ impl HandlerSet {
|
||||||
button: MouseButton,
|
button: MouseButton,
|
||||||
handler: impl Fn(MouseUpOut, &mut EventContext) + 'static,
|
handler: impl Fn(MouseUpOut, &mut EventContext) + 'static,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
self.set.insert((MouseEvent::up_out_disc(), Some(button)),
|
self.insert(MouseEvent::up_out_disc(), Some(button),
|
||||||
Rc::new(move |region_event, cx| {
|
Rc::new(move |region_event, cx| {
|
||||||
if let MouseEvent::UpOut(e) = region_event {
|
if let MouseEvent::UpOut(e) = region_event {
|
||||||
handler(e, cx);
|
handler(e, cx);
|
||||||
|
@ -339,7 +384,7 @@ impl HandlerSet {
|
||||||
button: MouseButton,
|
button: MouseButton,
|
||||||
handler: impl Fn(MouseDrag, &mut EventContext) + 'static,
|
handler: impl Fn(MouseDrag, &mut EventContext) + 'static,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
self.set.insert((MouseEvent::drag_disc(), Some(button)),
|
self.insert(MouseEvent::drag_disc(), Some(button),
|
||||||
Rc::new(move |region_event, cx| {
|
Rc::new(move |region_event, cx| {
|
||||||
if let MouseEvent::Drag(e) = region_event {
|
if let MouseEvent::Drag(e) = region_event {
|
||||||
handler(e, cx);
|
handler(e, cx);
|
||||||
|
@ -353,7 +398,7 @@ impl HandlerSet {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn on_hover(mut self, handler: impl Fn(MouseHover, &mut EventContext) + 'static) -> Self {
|
pub fn on_hover(mut self, handler: impl Fn(MouseHover, &mut EventContext) + 'static) -> Self {
|
||||||
self.set.insert((MouseEvent::hover_disc(), None),
|
self.insert(MouseEvent::hover_disc(), None,
|
||||||
Rc::new(move |region_event, cx| {
|
Rc::new(move |region_event, cx| {
|
||||||
if let MouseEvent::Hover(e) = region_event {
|
if let MouseEvent::Hover(e) = region_event {
|
||||||
handler(e, cx);
|
handler(e, cx);
|
||||||
|
@ -370,7 +415,7 @@ impl HandlerSet {
|
||||||
mut self,
|
mut self,
|
||||||
handler: impl Fn(MouseScrollWheel, &mut EventContext) + 'static,
|
handler: impl Fn(MouseScrollWheel, &mut EventContext) + 'static,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
self.set.insert((MouseEvent::scroll_wheel_disc(), None),
|
self.insert(MouseEvent::scroll_wheel_disc(), None,
|
||||||
Rc::new(move |region_event, cx| {
|
Rc::new(move |region_event, cx| {
|
||||||
if let MouseEvent::ScrollWheel(e) = region_event {
|
if let MouseEvent::ScrollWheel(e) = region_event {
|
||||||
handler(e, cx);
|
handler(e, cx);
|
||||||
|
|
|
@ -16,4 +16,4 @@ chrono = "0.4"
|
||||||
dirs = "4.0"
|
dirs = "4.0"
|
||||||
log = { version = "0.4.16", features = ["kv_unstable_serde"] }
|
log = { version = "0.4.16", features = ["kv_unstable_serde"] }
|
||||||
settings = { path = "../settings" }
|
settings = { path = "../settings" }
|
||||||
shellexpand = "2.1.0"
|
shellexpand = "2.1.0"
|
||||||
|
|
|
@ -72,4 +72,5 @@ tree-sitter-rust = "*"
|
||||||
tree-sitter-python = "*"
|
tree-sitter-python = "*"
|
||||||
tree-sitter-typescript = "*"
|
tree-sitter-typescript = "*"
|
||||||
tree-sitter-ruby = "*"
|
tree-sitter-ruby = "*"
|
||||||
|
tree-sitter-embedded-template = "*"
|
||||||
unindent = "0.1.7"
|
unindent = "0.1.7"
|
||||||
|
|
|
@ -2225,11 +2225,12 @@ impl BufferSnapshot {
|
||||||
range: Range<T>,
|
range: Range<T>,
|
||||||
) -> Option<(Range<usize>, Range<usize>)> {
|
) -> Option<(Range<usize>, Range<usize>)> {
|
||||||
// Find bracket pairs that *inclusively* contain the given range.
|
// Find bracket pairs that *inclusively* contain the given range.
|
||||||
let range = range.start.to_offset(self).saturating_sub(1)
|
let range = range.start.to_offset(self)..range.end.to_offset(self);
|
||||||
..self.len().min(range.end.to_offset(self) + 1);
|
let mut matches = self.syntax.matches(
|
||||||
let mut matches = self.syntax.matches(range, &self.text, |grammar| {
|
range.start.saturating_sub(1)..self.len().min(range.end + 1),
|
||||||
grammar.brackets_config.as_ref().map(|c| &c.query)
|
&self.text,
|
||||||
});
|
|grammar| grammar.brackets_config.as_ref().map(|c| &c.query),
|
||||||
|
);
|
||||||
let configs = matches
|
let configs = matches
|
||||||
.grammars()
|
.grammars()
|
||||||
.iter()
|
.iter()
|
||||||
|
@ -2252,18 +2253,20 @@ impl BufferSnapshot {
|
||||||
|
|
||||||
matches.advance();
|
matches.advance();
|
||||||
|
|
||||||
if let Some((open, close)) = open.zip(close) {
|
let Some((open, close)) = open.zip(close) else { continue };
|
||||||
let len = close.end - open.start;
|
if open.start > range.start || close.end < range.end {
|
||||||
|
continue;
|
||||||
if let Some((existing_open, existing_close)) = &result {
|
|
||||||
let existing_len = existing_close.end - existing_open.start;
|
|
||||||
if len > existing_len {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
result = Some((open, close));
|
|
||||||
}
|
}
|
||||||
|
let len = close.end - open.start;
|
||||||
|
|
||||||
|
if let Some((existing_open, existing_close)) = &result {
|
||||||
|
let existing_len = existing_close.end - existing_open.start;
|
||||||
|
if len > existing_len {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
result = Some((open, close));
|
||||||
}
|
}
|
||||||
|
|
||||||
result
|
result
|
||||||
|
|
|
@ -573,14 +573,72 @@ fn test_enclosing_bracket_ranges(cx: &mut MutableAppContext) {
|
||||||
))
|
))
|
||||||
);
|
);
|
||||||
|
|
||||||
// Regression test: avoid crash when querying at the end of the buffer.
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
buffer.enclosing_bracket_point_ranges(buffer.len() - 1..buffer.len()),
|
buffer.enclosing_bracket_point_ranges(Point::new(4, 1)..Point::new(4, 1)),
|
||||||
Some((
|
Some((
|
||||||
Point::new(0, 6)..Point::new(0, 7),
|
Point::new(0, 6)..Point::new(0, 7),
|
||||||
Point::new(4, 0)..Point::new(4, 1)
|
Point::new(4, 0)..Point::new(4, 1)
|
||||||
))
|
))
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// Regression test: avoid crash when querying at the end of the buffer.
|
||||||
|
assert_eq!(
|
||||||
|
buffer.enclosing_bracket_point_ranges(Point::new(4, 1)..Point::new(5, 0)),
|
||||||
|
None
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[gpui::test]
|
||||||
|
fn test_enclosing_bracket_ranges_where_brackets_are_not_outermost_children(
|
||||||
|
cx: &mut MutableAppContext,
|
||||||
|
) {
|
||||||
|
let javascript_language = Arc::new(
|
||||||
|
Language::new(
|
||||||
|
LanguageConfig {
|
||||||
|
name: "JavaScript".into(),
|
||||||
|
..Default::default()
|
||||||
|
},
|
||||||
|
Some(tree_sitter_javascript::language()),
|
||||||
|
)
|
||||||
|
.with_brackets_query(
|
||||||
|
r#"
|
||||||
|
("{" @open "}" @close)
|
||||||
|
("(" @open ")" @close)
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.unwrap(),
|
||||||
|
);
|
||||||
|
|
||||||
|
cx.set_global(Settings::test(cx));
|
||||||
|
let buffer = cx.add_model(|cx| {
|
||||||
|
let text = "
|
||||||
|
for (const a in b) {
|
||||||
|
// a comment that's longer than the for-loop header
|
||||||
|
}
|
||||||
|
"
|
||||||
|
.unindent();
|
||||||
|
Buffer::new(0, text, cx).with_language(javascript_language, cx)
|
||||||
|
});
|
||||||
|
|
||||||
|
let buffer = buffer.read(cx);
|
||||||
|
assert_eq!(
|
||||||
|
buffer.enclosing_bracket_point_ranges(Point::new(0, 18)..Point::new(0, 18)),
|
||||||
|
Some((
|
||||||
|
Point::new(0, 4)..Point::new(0, 5),
|
||||||
|
Point::new(0, 17)..Point::new(0, 18)
|
||||||
|
))
|
||||||
|
);
|
||||||
|
|
||||||
|
// Regression test: even though the parent node of the parentheses (the for loop) does
|
||||||
|
// intersect the given range, the parentheses themselves do not contain the range, so
|
||||||
|
// they should not be returned. Only the curly braces contain the range.
|
||||||
|
assert_eq!(
|
||||||
|
buffer.enclosing_bracket_point_ranges(Point::new(0, 20)..Point::new(0, 20)),
|
||||||
|
Some((
|
||||||
|
Point::new(0, 19)..Point::new(0, 20),
|
||||||
|
Point::new(2, 0)..Point::new(2, 1)
|
||||||
|
))
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[gpui::test]
|
#[gpui::test]
|
||||||
|
@ -1337,6 +1395,7 @@ fn test_random_collaboration(cx: &mut MutableAppContext, mut rng: StdRng) {
|
||||||
(0..entry_count).map(|_| {
|
(0..entry_count).map(|_| {
|
||||||
let range = buffer.random_byte_range(0, &mut rng);
|
let range = buffer.random_byte_range(0, &mut rng);
|
||||||
let range = range.to_point_utf16(buffer);
|
let range = range.to_point_utf16(buffer);
|
||||||
|
let range = range.start..range.end;
|
||||||
DiagnosticEntry {
|
DiagnosticEntry {
|
||||||
range,
|
range,
|
||||||
diagnostic: Diagnostic {
|
diagnostic: Diagnostic {
|
||||||
|
|
|
@ -71,7 +71,7 @@ impl DiagnosticSet {
|
||||||
diagnostics: SumTree::from_iter(
|
diagnostics: SumTree::from_iter(
|
||||||
entries.into_iter().map(|entry| DiagnosticEntry {
|
entries.into_iter().map(|entry| DiagnosticEntry {
|
||||||
range: buffer.anchor_before(entry.range.start)
|
range: buffer.anchor_before(entry.range.start)
|
||||||
..buffer.anchor_after(entry.range.end),
|
..buffer.anchor_before(entry.range.end),
|
||||||
diagnostic: entry.diagnostic,
|
diagnostic: entry.diagnostic,
|
||||||
}),
|
}),
|
||||||
buffer,
|
buffer,
|
||||||
|
|
|
@ -28,6 +28,7 @@ use std::{
|
||||||
any::Any,
|
any::Any,
|
||||||
cell::RefCell,
|
cell::RefCell,
|
||||||
fmt::Debug,
|
fmt::Debug,
|
||||||
|
hash::Hash,
|
||||||
mem,
|
mem,
|
||||||
ops::Range,
|
ops::Range,
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
|
@ -134,6 +135,10 @@ impl CachedLspAdapter {
|
||||||
self.adapter.process_diagnostics(params).await
|
self.adapter.process_diagnostics(params).await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub async fn process_completion(&self, completion_item: &mut lsp::CompletionItem) {
|
||||||
|
self.adapter.process_completion(completion_item).await
|
||||||
|
}
|
||||||
|
|
||||||
pub async fn label_for_completion(
|
pub async fn label_for_completion(
|
||||||
&self,
|
&self,
|
||||||
completion_item: &lsp::CompletionItem,
|
completion_item: &lsp::CompletionItem,
|
||||||
|
@ -174,6 +179,8 @@ pub trait LspAdapter: 'static + Send + Sync {
|
||||||
|
|
||||||
async fn process_diagnostics(&self, _: &mut lsp::PublishDiagnosticsParams) {}
|
async fn process_diagnostics(&self, _: &mut lsp::PublishDiagnosticsParams) {}
|
||||||
|
|
||||||
|
async fn process_completion(&self, _: &mut lsp::CompletionItem) {}
|
||||||
|
|
||||||
async fn label_for_completion(
|
async fn label_for_completion(
|
||||||
&self,
|
&self,
|
||||||
_: &lsp::CompletionItem,
|
_: &lsp::CompletionItem,
|
||||||
|
@ -326,7 +333,13 @@ struct InjectionConfig {
|
||||||
query: Query,
|
query: Query,
|
||||||
content_capture_ix: u32,
|
content_capture_ix: u32,
|
||||||
language_capture_ix: Option<u32>,
|
language_capture_ix: Option<u32>,
|
||||||
languages_by_pattern_ix: Vec<Option<Box<str>>>,
|
patterns: Vec<InjectionPatternConfig>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Default, Clone)]
|
||||||
|
struct InjectionPatternConfig {
|
||||||
|
language: Option<Box<str>>,
|
||||||
|
combined: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
struct BracketConfig {
|
struct BracketConfig {
|
||||||
|
@ -637,6 +650,10 @@ impl Language {
|
||||||
self.adapter.clone()
|
self.adapter.clone()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn id(&self) -> Option<usize> {
|
||||||
|
self.grammar.as_ref().map(|g| g.id)
|
||||||
|
}
|
||||||
|
|
||||||
pub fn with_highlights_query(mut self, source: &str) -> Result<Self> {
|
pub fn with_highlights_query(mut self, source: &str) -> Result<Self> {
|
||||||
let grammar = self.grammar_mut();
|
let grammar = self.grammar_mut();
|
||||||
grammar.highlights_query = Some(Query::new(grammar.ts_language, source)?);
|
grammar.highlights_query = Some(Query::new(grammar.ts_language, source)?);
|
||||||
|
@ -730,15 +747,21 @@ impl Language {
|
||||||
("content", &mut content_capture_ix),
|
("content", &mut content_capture_ix),
|
||||||
],
|
],
|
||||||
);
|
);
|
||||||
let languages_by_pattern_ix = (0..query.pattern_count())
|
let patterns = (0..query.pattern_count())
|
||||||
.map(|ix| {
|
.map(|ix| {
|
||||||
query.property_settings(ix).iter().find_map(|setting| {
|
let mut config = InjectionPatternConfig::default();
|
||||||
if setting.key.as_ref() == "language" {
|
for setting in query.property_settings(ix) {
|
||||||
return setting.value.clone();
|
match setting.key.as_ref() {
|
||||||
} else {
|
"language" => {
|
||||||
None
|
config.language = setting.value.clone();
|
||||||
|
}
|
||||||
|
"combined" => {
|
||||||
|
config.combined = true;
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
}
|
}
|
||||||
})
|
}
|
||||||
|
config
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
if let Some(content_capture_ix) = content_capture_ix {
|
if let Some(content_capture_ix) = content_capture_ix {
|
||||||
|
@ -746,7 +769,7 @@ impl Language {
|
||||||
query,
|
query,
|
||||||
language_capture_ix,
|
language_capture_ix,
|
||||||
content_capture_ix,
|
content_capture_ix,
|
||||||
languages_by_pattern_ix,
|
patterns,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
Ok(self)
|
Ok(self)
|
||||||
|
@ -809,6 +832,12 @@ impl Language {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub async fn process_completion(self: &Arc<Self>, completion: &mut lsp::CompletionItem) {
|
||||||
|
if let Some(adapter) = self.adapter.as_ref() {
|
||||||
|
adapter.process_completion(completion).await;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub async fn label_for_completion(
|
pub async fn label_for_completion(
|
||||||
self: &Arc<Self>,
|
self: &Arc<Self>,
|
||||||
completion: &lsp::CompletionItem,
|
completion: &lsp::CompletionItem,
|
||||||
|
@ -883,6 +912,20 @@ impl Language {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Hash for Language {
|
||||||
|
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
|
||||||
|
self.id().hash(state)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PartialEq for Language {
|
||||||
|
fn eq(&self, other: &Self) -> bool {
|
||||||
|
self.id().eq(&other.id())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Eq for Language {}
|
||||||
|
|
||||||
impl Debug for Language {
|
impl Debug for Language {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
f.debug_struct("Language")
|
f.debug_struct("Language")
|
||||||
|
@ -1010,8 +1053,8 @@ pub fn point_to_lsp(point: PointUtf16) -> lsp::Position {
|
||||||
lsp::Position::new(point.row, point.column)
|
lsp::Position::new(point.row, point.column)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn point_from_lsp(point: lsp::Position) -> PointUtf16 {
|
pub fn point_from_lsp(point: lsp::Position) -> Unclipped<PointUtf16> {
|
||||||
PointUtf16::new(point.line, point.character)
|
Unclipped(PointUtf16::new(point.line, point.character))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn range_to_lsp(range: Range<PointUtf16>) -> lsp::Range {
|
pub fn range_to_lsp(range: Range<PointUtf16>) -> lsp::Range {
|
||||||
|
@ -1021,7 +1064,7 @@ pub fn range_to_lsp(range: Range<PointUtf16>) -> lsp::Range {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn range_from_lsp(range: lsp::Range) -> Range<PointUtf16> {
|
pub fn range_from_lsp(range: lsp::Range) -> Range<Unclipped<PointUtf16>> {
|
||||||
let mut start = point_from_lsp(range.start);
|
let mut start = point_from_lsp(range.start);
|
||||||
let mut end = point_from_lsp(range.end);
|
let mut end = point_from_lsp(range.end);
|
||||||
if start > end {
|
if start > end {
|
||||||
|
|
|
@ -426,10 +426,11 @@ pub async fn deserialize_completion(
|
||||||
.and_then(deserialize_anchor)
|
.and_then(deserialize_anchor)
|
||||||
.ok_or_else(|| anyhow!("invalid old end"))?;
|
.ok_or_else(|| anyhow!("invalid old end"))?;
|
||||||
let lsp_completion = serde_json::from_slice(&completion.lsp_completion)?;
|
let lsp_completion = serde_json::from_slice(&completion.lsp_completion)?;
|
||||||
let label = match language {
|
|
||||||
Some(l) => l.label_for_completion(&lsp_completion).await,
|
let mut label = None;
|
||||||
None => None,
|
if let Some(language) = language {
|
||||||
};
|
label = language.label_for_completion(&lsp_completion).await;
|
||||||
|
}
|
||||||
|
|
||||||
Ok(Completion {
|
Ok(Completion {
|
||||||
old_range: old_start..old_end,
|
old_range: old_start..old_end,
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -128,8 +128,8 @@ impl LspCommand for PrepareRename {
|
||||||
) = message
|
) = message
|
||||||
{
|
{
|
||||||
let Range { start, end } = range_from_lsp(range);
|
let Range { start, end } = range_from_lsp(range);
|
||||||
if buffer.clip_point_utf16(start, Bias::Left) == start
|
if buffer.clip_point_utf16(start, Bias::Left) == start.0
|
||||||
&& buffer.clip_point_utf16(end, Bias::Left) == end
|
&& buffer.clip_point_utf16(end, Bias::Left) == end.0
|
||||||
{
|
{
|
||||||
return Ok(Some(buffer.anchor_after(start)..buffer.anchor_before(end)));
|
return Ok(Some(buffer.anchor_after(start)..buffer.anchor_before(end)));
|
||||||
}
|
}
|
||||||
|
|
|
@ -30,6 +30,7 @@ use language::{
|
||||||
CodeLabel, Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent,
|
CodeLabel, Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent,
|
||||||
File as _, Language, LanguageRegistry, LanguageServerName, LocalFile, OffsetRangeExt,
|
File as _, Language, LanguageRegistry, LanguageServerName, LocalFile, OffsetRangeExt,
|
||||||
Operation, Patch, PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
|
Operation, Patch, PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
|
||||||
|
Unclipped,
|
||||||
};
|
};
|
||||||
use lsp::{
|
use lsp::{
|
||||||
DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer, LanguageString,
|
DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer, LanguageString,
|
||||||
|
@ -251,7 +252,7 @@ pub struct Symbol {
|
||||||
pub label: CodeLabel,
|
pub label: CodeLabel,
|
||||||
pub name: String,
|
pub name: String,
|
||||||
pub kind: lsp::SymbolKind,
|
pub kind: lsp::SymbolKind,
|
||||||
pub range: Range<PointUtf16>,
|
pub range: Range<Unclipped<PointUtf16>>,
|
||||||
pub signature: [u8; 32],
|
pub signature: [u8; 32],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2582,7 +2583,7 @@ impl Project {
|
||||||
language_server_id: usize,
|
language_server_id: usize,
|
||||||
abs_path: PathBuf,
|
abs_path: PathBuf,
|
||||||
version: Option<i32>,
|
version: Option<i32>,
|
||||||
diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
|
diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
|
||||||
cx: &mut ModelContext<Project>,
|
cx: &mut ModelContext<Project>,
|
||||||
) -> Result<(), anyhow::Error> {
|
) -> Result<(), anyhow::Error> {
|
||||||
let (worktree, relative_path) = self
|
let (worktree, relative_path) = self
|
||||||
|
@ -2620,7 +2621,7 @@ impl Project {
|
||||||
fn update_buffer_diagnostics(
|
fn update_buffer_diagnostics(
|
||||||
&mut self,
|
&mut self,
|
||||||
buffer: &ModelHandle<Buffer>,
|
buffer: &ModelHandle<Buffer>,
|
||||||
mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
|
mut diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
|
||||||
version: Option<i32>,
|
version: Option<i32>,
|
||||||
cx: &mut ModelContext<Self>,
|
cx: &mut ModelContext<Self>,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
|
@ -2644,7 +2645,7 @@ impl Project {
|
||||||
let mut sanitized_diagnostics = Vec::new();
|
let mut sanitized_diagnostics = Vec::new();
|
||||||
let edits_since_save = Patch::new(
|
let edits_since_save = Patch::new(
|
||||||
snapshot
|
snapshot
|
||||||
.edits_since::<PointUtf16>(buffer.read(cx).saved_version())
|
.edits_since::<Unclipped<PointUtf16>>(buffer.read(cx).saved_version())
|
||||||
.collect(),
|
.collect(),
|
||||||
);
|
);
|
||||||
for entry in diagnostics {
|
for entry in diagnostics {
|
||||||
|
@ -2664,13 +2665,14 @@ impl Project {
|
||||||
let mut range = snapshot.clip_point_utf16(start, Bias::Left)
|
let mut range = snapshot.clip_point_utf16(start, Bias::Left)
|
||||||
..snapshot.clip_point_utf16(end, Bias::Right);
|
..snapshot.clip_point_utf16(end, Bias::Right);
|
||||||
|
|
||||||
// Expand empty ranges by one character
|
// Expand empty ranges by one codepoint
|
||||||
if range.start == range.end {
|
if range.start == range.end {
|
||||||
|
// This will be go to the next boundary when being clipped
|
||||||
range.end.column += 1;
|
range.end.column += 1;
|
||||||
range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
|
range.end = snapshot.clip_point_utf16(Unclipped(range.end), Bias::Right);
|
||||||
if range.start == range.end && range.end.column > 0 {
|
if range.start == range.end && range.end.column > 0 {
|
||||||
range.start.column -= 1;
|
range.start.column -= 1;
|
||||||
range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
|
range.end = snapshot.clip_point_utf16(Unclipped(range.end), Bias::Left);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3273,7 +3275,7 @@ impl Project {
|
||||||
return Task::ready(Ok(Default::default()));
|
return Task::ready(Ok(Default::default()));
|
||||||
};
|
};
|
||||||
|
|
||||||
let position = position.to_point_utf16(source_buffer);
|
let position = Unclipped(position.to_point_utf16(source_buffer));
|
||||||
let anchor = source_buffer.anchor_after(position);
|
let anchor = source_buffer.anchor_after(position);
|
||||||
|
|
||||||
if worktree.read(cx).as_local().is_some() {
|
if worktree.read(cx).as_local().is_some() {
|
||||||
|
@ -3292,7 +3294,7 @@ impl Project {
|
||||||
lsp::TextDocumentIdentifier::new(
|
lsp::TextDocumentIdentifier::new(
|
||||||
lsp::Url::from_file_path(buffer_abs_path).unwrap(),
|
lsp::Url::from_file_path(buffer_abs_path).unwrap(),
|
||||||
),
|
),
|
||||||
point_to_lsp(position),
|
point_to_lsp(position.0),
|
||||||
),
|
),
|
||||||
context: Default::default(),
|
context: Default::default(),
|
||||||
work_done_progress_params: Default::default(),
|
work_done_progress_params: Default::default(),
|
||||||
|
@ -3314,88 +3316,91 @@ impl Project {
|
||||||
let snapshot = this.snapshot();
|
let snapshot = this.snapshot();
|
||||||
let clipped_position = this.clip_point_utf16(position, Bias::Left);
|
let clipped_position = this.clip_point_utf16(position, Bias::Left);
|
||||||
let mut range_for_token = None;
|
let mut range_for_token = None;
|
||||||
completions.into_iter().filter_map(move |lsp_completion| {
|
completions
|
||||||
// For now, we can only handle additional edits if they are returned
|
.into_iter()
|
||||||
// when resolving the completion, not if they are present initially.
|
.filter_map(move |mut lsp_completion| {
|
||||||
if lsp_completion
|
// For now, we can only handle additional edits if they are returned
|
||||||
.additional_text_edits
|
// when resolving the completion, not if they are present initially.
|
||||||
.as_ref()
|
if lsp_completion
|
||||||
.map_or(false, |edits| !edits.is_empty())
|
.additional_text_edits
|
||||||
{
|
.as_ref()
|
||||||
return None;
|
.map_or(false, |edits| !edits.is_empty())
|
||||||
}
|
{
|
||||||
|
|
||||||
let (old_range, mut new_text) = match lsp_completion.text_edit.as_ref() {
|
|
||||||
// If the language server provides a range to overwrite, then
|
|
||||||
// check that the range is valid.
|
|
||||||
Some(lsp::CompletionTextEdit::Edit(edit)) => {
|
|
||||||
let range = range_from_lsp(edit.range);
|
|
||||||
let start = snapshot.clip_point_utf16(range.start, Bias::Left);
|
|
||||||
let end = snapshot.clip_point_utf16(range.end, Bias::Left);
|
|
||||||
if start != range.start || end != range.end {
|
|
||||||
log::info!("completion out of expected range");
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
(
|
|
||||||
snapshot.anchor_before(start)..snapshot.anchor_after(end),
|
|
||||||
edit.new_text.clone(),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
// If the language server does not provide a range, then infer
|
|
||||||
// the range based on the syntax tree.
|
|
||||||
None => {
|
|
||||||
if position != clipped_position {
|
|
||||||
log::info!("completion out of expected range");
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
let Range { start, end } = range_for_token
|
|
||||||
.get_or_insert_with(|| {
|
|
||||||
let offset = position.to_offset(&snapshot);
|
|
||||||
let (range, kind) = snapshot.surrounding_word(offset);
|
|
||||||
if kind == Some(CharKind::Word) {
|
|
||||||
range
|
|
||||||
} else {
|
|
||||||
offset..offset
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.clone();
|
|
||||||
let text = lsp_completion
|
|
||||||
.insert_text
|
|
||||||
.as_ref()
|
|
||||||
.unwrap_or(&lsp_completion.label)
|
|
||||||
.clone();
|
|
||||||
(
|
|
||||||
snapshot.anchor_before(start)..snapshot.anchor_after(end),
|
|
||||||
text,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
|
|
||||||
log::info!("unsupported insert/replace completion");
|
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
};
|
|
||||||
|
|
||||||
LineEnding::normalize(&mut new_text);
|
let (old_range, mut new_text) = match lsp_completion.text_edit.as_ref()
|
||||||
let language = language.clone();
|
{
|
||||||
Some(async move {
|
// If the language server provides a range to overwrite, then
|
||||||
let label = if let Some(language) = language {
|
// check that the range is valid.
|
||||||
language.label_for_completion(&lsp_completion).await
|
Some(lsp::CompletionTextEdit::Edit(edit)) => {
|
||||||
} else {
|
let range = range_from_lsp(edit.range);
|
||||||
None
|
let start = snapshot.clip_point_utf16(range.start, Bias::Left);
|
||||||
};
|
let end = snapshot.clip_point_utf16(range.end, Bias::Left);
|
||||||
Completion {
|
if start != range.start.0 || end != range.end.0 {
|
||||||
old_range,
|
log::info!("completion out of expected range");
|
||||||
new_text,
|
return None;
|
||||||
label: label.unwrap_or_else(|| {
|
}
|
||||||
CodeLabel::plain(
|
(
|
||||||
lsp_completion.label.clone(),
|
snapshot.anchor_before(start)..snapshot.anchor_after(end),
|
||||||
lsp_completion.filter_text.as_deref(),
|
edit.new_text.clone(),
|
||||||
)
|
)
|
||||||
}),
|
}
|
||||||
lsp_completion,
|
// If the language server does not provide a range, then infer
|
||||||
}
|
// the range based on the syntax tree.
|
||||||
|
None => {
|
||||||
|
if position.0 != clipped_position {
|
||||||
|
log::info!("completion out of expected range");
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
let Range { start, end } = range_for_token
|
||||||
|
.get_or_insert_with(|| {
|
||||||
|
let offset = position.to_offset(&snapshot);
|
||||||
|
let (range, kind) = snapshot.surrounding_word(offset);
|
||||||
|
if kind == Some(CharKind::Word) {
|
||||||
|
range
|
||||||
|
} else {
|
||||||
|
offset..offset
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.clone();
|
||||||
|
let text = lsp_completion
|
||||||
|
.insert_text
|
||||||
|
.as_ref()
|
||||||
|
.unwrap_or(&lsp_completion.label)
|
||||||
|
.clone();
|
||||||
|
(
|
||||||
|
snapshot.anchor_before(start)..snapshot.anchor_after(end),
|
||||||
|
text,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
|
||||||
|
log::info!("unsupported insert/replace completion");
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
LineEnding::normalize(&mut new_text);
|
||||||
|
let language = language.clone();
|
||||||
|
Some(async move {
|
||||||
|
let mut label = None;
|
||||||
|
if let Some(language) = language {
|
||||||
|
language.process_completion(&mut lsp_completion).await;
|
||||||
|
label = language.label_for_completion(&lsp_completion).await;
|
||||||
|
}
|
||||||
|
Completion {
|
||||||
|
old_range,
|
||||||
|
new_text,
|
||||||
|
label: label.unwrap_or_else(|| {
|
||||||
|
CodeLabel::plain(
|
||||||
|
lsp_completion.label.clone(),
|
||||||
|
lsp_completion.filter_text.as_deref(),
|
||||||
|
)
|
||||||
|
}),
|
||||||
|
lsp_completion,
|
||||||
|
}
|
||||||
|
})
|
||||||
})
|
})
|
||||||
})
|
|
||||||
});
|
});
|
||||||
|
|
||||||
Ok(futures::future::join_all(completions).await)
|
Ok(futures::future::join_all(completions).await)
|
||||||
|
@ -3448,29 +3453,41 @@ impl Project {
|
||||||
let buffer_id = buffer.remote_id();
|
let buffer_id = buffer.remote_id();
|
||||||
|
|
||||||
if self.is_local() {
|
if self.is_local() {
|
||||||
let lang_server = if let Some((_, server)) = self.language_server_for_buffer(buffer, cx)
|
let lang_server = match self.language_server_for_buffer(buffer, cx) {
|
||||||
{
|
Some((_, server)) => server.clone(),
|
||||||
server.clone()
|
_ => return Task::ready(Ok(Default::default())),
|
||||||
} else {
|
|
||||||
return Task::ready(Ok(Default::default()));
|
|
||||||
};
|
};
|
||||||
|
|
||||||
cx.spawn(|this, mut cx| async move {
|
cx.spawn(|this, mut cx| async move {
|
||||||
let resolved_completion = lang_server
|
let resolved_completion = lang_server
|
||||||
.request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
|
.request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
if let Some(edits) = resolved_completion.additional_text_edits {
|
if let Some(edits) = resolved_completion.additional_text_edits {
|
||||||
let edits = this
|
let edits = this
|
||||||
.update(&mut cx, |this, cx| {
|
.update(&mut cx, |this, cx| {
|
||||||
this.edits_from_lsp(&buffer_handle, edits, None, cx)
|
this.edits_from_lsp(&buffer_handle, edits, None, cx)
|
||||||
})
|
})
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
buffer_handle.update(&mut cx, |buffer, cx| {
|
buffer_handle.update(&mut cx, |buffer, cx| {
|
||||||
buffer.finalize_last_transaction();
|
buffer.finalize_last_transaction();
|
||||||
buffer.start_transaction();
|
buffer.start_transaction();
|
||||||
|
|
||||||
for (range, text) in edits {
|
for (range, text) in edits {
|
||||||
buffer.edit([(range, text)], None, cx);
|
let primary = &completion.old_range;
|
||||||
|
let start_within = primary.start.cmp(&range.start, buffer).is_le()
|
||||||
|
&& primary.end.cmp(&range.start, buffer).is_ge();
|
||||||
|
let end_within = range.start.cmp(&primary.end, buffer).is_le()
|
||||||
|
&& range.end.cmp(&primary.end, buffer).is_ge();
|
||||||
|
|
||||||
|
//Skip addtional edits which overlap with the primary completion edit
|
||||||
|
//https://github.com/zed-industries/zed/pull/1871
|
||||||
|
if !start_within && !end_within {
|
||||||
|
buffer.edit([(range, text)], None, cx);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let transaction = if buffer.end_transaction(cx).is_some() {
|
let transaction = if buffer.end_transaction(cx).is_some() {
|
||||||
let transaction = buffer.finalize_last_transaction().unwrap().clone();
|
let transaction = buffer.finalize_last_transaction().unwrap().clone();
|
||||||
if !push_to_history {
|
if !push_to_history {
|
||||||
|
@ -3568,7 +3585,13 @@ impl Project {
|
||||||
partial_result_params: Default::default(),
|
partial_result_params: Default::default(),
|
||||||
context: lsp::CodeActionContext {
|
context: lsp::CodeActionContext {
|
||||||
diagnostics: relevant_diagnostics,
|
diagnostics: relevant_diagnostics,
|
||||||
only: None,
|
only: Some(vec![
|
||||||
|
lsp::CodeActionKind::EMPTY,
|
||||||
|
lsp::CodeActionKind::QUICKFIX,
|
||||||
|
lsp::CodeActionKind::REFACTOR,
|
||||||
|
lsp::CodeActionKind::REFACTOR_EXTRACT,
|
||||||
|
lsp::CodeActionKind::SOURCE,
|
||||||
|
]),
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
.await?
|
.await?
|
||||||
|
@ -5111,22 +5134,30 @@ impl Project {
|
||||||
_: Arc<Client>,
|
_: Arc<Client>,
|
||||||
mut cx: AsyncAppContext,
|
mut cx: AsyncAppContext,
|
||||||
) -> Result<proto::GetCompletionsResponse> {
|
) -> Result<proto::GetCompletionsResponse> {
|
||||||
let position = envelope
|
|
||||||
.payload
|
|
||||||
.position
|
|
||||||
.and_then(language::proto::deserialize_anchor)
|
|
||||||
.ok_or_else(|| anyhow!("invalid position"))?;
|
|
||||||
let version = deserialize_version(envelope.payload.version);
|
|
||||||
let buffer = this.read_with(&cx, |this, cx| {
|
let buffer = this.read_with(&cx, |this, cx| {
|
||||||
this.opened_buffers
|
this.opened_buffers
|
||||||
.get(&envelope.payload.buffer_id)
|
.get(&envelope.payload.buffer_id)
|
||||||
.and_then(|buffer| buffer.upgrade(cx))
|
.and_then(|buffer| buffer.upgrade(cx))
|
||||||
.ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
|
.ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
|
let position = envelope
|
||||||
|
.payload
|
||||||
|
.position
|
||||||
|
.and_then(language::proto::deserialize_anchor)
|
||||||
|
.map(|p| {
|
||||||
|
buffer.read_with(&cx, |buffer, _| {
|
||||||
|
buffer.clip_point_utf16(Unclipped(p.to_point_utf16(buffer)), Bias::Left)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.ok_or_else(|| anyhow!("invalid position"))?;
|
||||||
|
|
||||||
|
let version = deserialize_version(envelope.payload.version);
|
||||||
buffer
|
buffer
|
||||||
.update(&mut cx, |buffer, _| buffer.wait_for_version(version))
|
.update(&mut cx, |buffer, _| buffer.wait_for_version(version))
|
||||||
.await;
|
.await;
|
||||||
let version = buffer.read_with(&cx, |buffer, _| buffer.version());
|
let version = buffer.read_with(&cx, |buffer, _| buffer.version());
|
||||||
|
|
||||||
let completions = this
|
let completions = this
|
||||||
.update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
|
.update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
|
||||||
.await?;
|
.await?;
|
||||||
|
@ -5613,8 +5644,8 @@ impl Project {
|
||||||
},
|
},
|
||||||
|
|
||||||
name: serialized_symbol.name,
|
name: serialized_symbol.name,
|
||||||
range: PointUtf16::new(start.row, start.column)
|
range: Unclipped(PointUtf16::new(start.row, start.column))
|
||||||
..PointUtf16::new(end.row, end.column),
|
..Unclipped(PointUtf16::new(end.row, end.column)),
|
||||||
kind,
|
kind,
|
||||||
signature: serialized_symbol
|
signature: serialized_symbol
|
||||||
.signature
|
.signature
|
||||||
|
@ -5700,10 +5731,10 @@ impl Project {
|
||||||
|
|
||||||
let mut lsp_edits = lsp_edits.into_iter().peekable();
|
let mut lsp_edits = lsp_edits.into_iter().peekable();
|
||||||
let mut edits = Vec::new();
|
let mut edits = Vec::new();
|
||||||
while let Some((mut range, mut new_text)) = lsp_edits.next() {
|
while let Some((range, mut new_text)) = lsp_edits.next() {
|
||||||
// Clip invalid ranges provided by the language server.
|
// Clip invalid ranges provided by the language server.
|
||||||
range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
|
let mut range = snapshot.clip_point_utf16(range.start, Bias::Left)
|
||||||
range.end = snapshot.clip_point_utf16(range.end, Bias::Left);
|
..snapshot.clip_point_utf16(range.end, Bias::Left);
|
||||||
|
|
||||||
// Combine any LSP edits that are adjacent.
|
// Combine any LSP edits that are adjacent.
|
||||||
//
|
//
|
||||||
|
@ -5715,11 +5746,11 @@ impl Project {
|
||||||
// In order for the diffing logic below to work properly, any edits that
|
// In order for the diffing logic below to work properly, any edits that
|
||||||
// cancel each other out must be combined into one.
|
// cancel each other out must be combined into one.
|
||||||
while let Some((next_range, next_text)) = lsp_edits.peek() {
|
while let Some((next_range, next_text)) = lsp_edits.peek() {
|
||||||
if next_range.start > range.end {
|
if next_range.start.0 > range.end {
|
||||||
if next_range.start.row > range.end.row + 1
|
if next_range.start.0.row > range.end.row + 1
|
||||||
|| next_range.start.column > 0
|
|| next_range.start.0.column > 0
|
||||||
|| snapshot.clip_point_utf16(
|
|| snapshot.clip_point_utf16(
|
||||||
PointUtf16::new(range.end.row, u32::MAX),
|
Unclipped(PointUtf16::new(range.end.row, u32::MAX)),
|
||||||
Bias::Left,
|
Bias::Left,
|
||||||
) > range.end
|
) > range.end
|
||||||
{
|
{
|
||||||
|
@ -5727,7 +5758,7 @@ impl Project {
|
||||||
}
|
}
|
||||||
new_text.push('\n');
|
new_text.push('\n');
|
||||||
}
|
}
|
||||||
range.end = next_range.end;
|
range.end = snapshot.clip_point_utf16(next_range.end, Bias::Left);
|
||||||
new_text.push_str(next_text);
|
new_text.push_str(next_text);
|
||||||
lsp_edits.next();
|
lsp_edits.next();
|
||||||
}
|
}
|
||||||
|
@ -6000,13 +6031,13 @@ fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
|
||||||
path: symbol.path.path.to_string_lossy().to_string(),
|
path: symbol.path.path.to_string_lossy().to_string(),
|
||||||
name: symbol.name.clone(),
|
name: symbol.name.clone(),
|
||||||
kind: unsafe { mem::transmute(symbol.kind) },
|
kind: unsafe { mem::transmute(symbol.kind) },
|
||||||
start: Some(proto::Point {
|
start: Some(proto::PointUtf16 {
|
||||||
row: symbol.range.start.row,
|
row: symbol.range.start.0.row,
|
||||||
column: symbol.range.start.column,
|
column: symbol.range.start.0.column,
|
||||||
}),
|
}),
|
||||||
end: Some(proto::Point {
|
end: Some(proto::PointUtf16 {
|
||||||
row: symbol.range.end.row,
|
row: symbol.range.end.0.row,
|
||||||
column: symbol.range.end.column,
|
column: symbol.range.end.0.column,
|
||||||
}),
|
}),
|
||||||
signature: symbol.signature.to_vec(),
|
signature: symbol.signature.to_vec(),
|
||||||
}
|
}
|
||||||
|
|
|
@ -1239,7 +1239,7 @@ async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
|
||||||
&buffer,
|
&buffer,
|
||||||
vec![
|
vec![
|
||||||
DiagnosticEntry {
|
DiagnosticEntry {
|
||||||
range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
|
range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)),
|
||||||
diagnostic: Diagnostic {
|
diagnostic: Diagnostic {
|
||||||
severity: DiagnosticSeverity::ERROR,
|
severity: DiagnosticSeverity::ERROR,
|
||||||
message: "syntax error 1".to_string(),
|
message: "syntax error 1".to_string(),
|
||||||
|
@ -1247,7 +1247,7 @@ async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
DiagnosticEntry {
|
DiagnosticEntry {
|
||||||
range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
|
range: Unclipped(PointUtf16::new(1, 10))..Unclipped(PointUtf16::new(1, 10)),
|
||||||
diagnostic: Diagnostic {
|
diagnostic: Diagnostic {
|
||||||
severity: DiagnosticSeverity::ERROR,
|
severity: DiagnosticSeverity::ERROR,
|
||||||
message: "syntax error 2".to_string(),
|
message: "syntax error 2".to_string(),
|
||||||
|
|
|
@ -20,6 +20,7 @@ use gpui::{
|
||||||
executor, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext,
|
executor, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext,
|
||||||
Task,
|
Task,
|
||||||
};
|
};
|
||||||
|
use language::Unclipped;
|
||||||
use language::{
|
use language::{
|
||||||
proto::{deserialize_version, serialize_line_ending, serialize_version},
|
proto::{deserialize_version, serialize_line_ending, serialize_version},
|
||||||
Buffer, DiagnosticEntry, PointUtf16, Rope,
|
Buffer, DiagnosticEntry, PointUtf16, Rope,
|
||||||
|
@ -64,7 +65,7 @@ pub struct LocalWorktree {
|
||||||
_background_scanner_task: Option<Task<()>>,
|
_background_scanner_task: Option<Task<()>>,
|
||||||
poll_task: Option<Task<()>>,
|
poll_task: Option<Task<()>>,
|
||||||
share: Option<ShareState>,
|
share: Option<ShareState>,
|
||||||
diagnostics: HashMap<Arc<Path>, Vec<DiagnosticEntry<PointUtf16>>>,
|
diagnostics: HashMap<Arc<Path>, Vec<DiagnosticEntry<Unclipped<PointUtf16>>>>,
|
||||||
diagnostic_summaries: TreeMap<PathKey, DiagnosticSummary>,
|
diagnostic_summaries: TreeMap<PathKey, DiagnosticSummary>,
|
||||||
client: Arc<Client>,
|
client: Arc<Client>,
|
||||||
fs: Arc<dyn Fs>,
|
fs: Arc<dyn Fs>,
|
||||||
|
@ -502,7 +503,10 @@ impl LocalWorktree {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn diagnostics_for_path(&self, path: &Path) -> Option<Vec<DiagnosticEntry<PointUtf16>>> {
|
pub fn diagnostics_for_path(
|
||||||
|
&self,
|
||||||
|
path: &Path,
|
||||||
|
) -> Option<Vec<DiagnosticEntry<Unclipped<PointUtf16>>>> {
|
||||||
self.diagnostics.get(path).cloned()
|
self.diagnostics.get(path).cloned()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -510,7 +514,7 @@ impl LocalWorktree {
|
||||||
&mut self,
|
&mut self,
|
||||||
language_server_id: usize,
|
language_server_id: usize,
|
||||||
worktree_path: Arc<Path>,
|
worktree_path: Arc<Path>,
|
||||||
diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
|
diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
|
||||||
_: &mut ModelContext<Worktree>,
|
_: &mut ModelContext<Worktree>,
|
||||||
) -> Result<bool> {
|
) -> Result<bool> {
|
||||||
self.diagnostics.remove(&worktree_path);
|
self.diagnostics.remove(&worktree_path);
|
||||||
|
@ -1168,6 +1172,10 @@ impl Snapshot {
|
||||||
self.id
|
self.id
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn abs_path(&self) -> &Arc<Path> {
|
||||||
|
&self.abs_path
|
||||||
|
}
|
||||||
|
|
||||||
pub fn contains_entry(&self, entry_id: ProjectEntryId) -> bool {
|
pub fn contains_entry(&self, entry_id: ProjectEntryId) -> bool {
|
||||||
self.entries_by_id.get(&entry_id, &()).is_some()
|
self.entries_by_id.get(&entry_id, &()).is_some()
|
||||||
}
|
}
|
||||||
|
@ -1359,10 +1367,6 @@ impl Snapshot {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl LocalSnapshot {
|
impl LocalSnapshot {
|
||||||
pub fn abs_path(&self) -> &Arc<Path> {
|
|
||||||
&self.abs_path
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn extension_counts(&self) -> &HashMap<OsString, usize> {
|
pub fn extension_counts(&self) -> &HashMap<OsString, usize> {
|
||||||
&self.extension_counts
|
&self.extension_counts
|
||||||
}
|
}
|
||||||
|
|
|
@ -43,6 +43,7 @@ pub struct ProjectPanel {
|
||||||
filename_editor: ViewHandle<Editor>,
|
filename_editor: ViewHandle<Editor>,
|
||||||
clipboard_entry: Option<ClipboardEntry>,
|
clipboard_entry: Option<ClipboardEntry>,
|
||||||
context_menu: ViewHandle<ContextMenu>,
|
context_menu: ViewHandle<ContextMenu>,
|
||||||
|
dragged_entry_destination: Option<Arc<Path>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Copy, Clone)]
|
#[derive(Copy, Clone)]
|
||||||
|
@ -95,6 +96,13 @@ pub struct Open {
|
||||||
pub change_focus: bool,
|
pub change_focus: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, PartialEq)]
|
||||||
|
pub struct MoveProjectEntry {
|
||||||
|
pub entry_to_move: ProjectEntryId,
|
||||||
|
pub destination: ProjectEntryId,
|
||||||
|
pub destination_is_file: bool,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Clone, PartialEq)]
|
#[derive(Clone, PartialEq)]
|
||||||
pub struct DeployContextMenu {
|
pub struct DeployContextMenu {
|
||||||
pub position: Vector2F,
|
pub position: Vector2F,
|
||||||
|
@ -117,7 +125,10 @@ actions!(
|
||||||
ToggleFocus
|
ToggleFocus
|
||||||
]
|
]
|
||||||
);
|
);
|
||||||
impl_internal_actions!(project_panel, [Open, ToggleExpanded, DeployContextMenu]);
|
impl_internal_actions!(
|
||||||
|
project_panel,
|
||||||
|
[Open, ToggleExpanded, DeployContextMenu, MoveProjectEntry]
|
||||||
|
);
|
||||||
|
|
||||||
pub fn init(cx: &mut MutableAppContext) {
|
pub fn init(cx: &mut MutableAppContext) {
|
||||||
cx.add_action(ProjectPanel::deploy_context_menu);
|
cx.add_action(ProjectPanel::deploy_context_menu);
|
||||||
|
@ -141,6 +152,7 @@ pub fn init(cx: &mut MutableAppContext) {
|
||||||
this.paste(action, cx);
|
this.paste(action, cx);
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
cx.add_action(ProjectPanel::move_entry);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub enum Event {
|
pub enum Event {
|
||||||
|
@ -219,6 +231,7 @@ impl ProjectPanel {
|
||||||
filename_editor,
|
filename_editor,
|
||||||
clipboard_entry: None,
|
clipboard_entry: None,
|
||||||
context_menu: cx.add_view(ContextMenu::new),
|
context_menu: cx.add_view(ContextMenu::new),
|
||||||
|
dragged_entry_destination: None,
|
||||||
};
|
};
|
||||||
this.update_visible_entries(None, cx);
|
this.update_visible_entries(None, cx);
|
||||||
this
|
this
|
||||||
|
@ -774,6 +787,39 @@ impl ProjectPanel {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn move_entry(
|
||||||
|
&mut self,
|
||||||
|
&MoveProjectEntry {
|
||||||
|
entry_to_move,
|
||||||
|
destination,
|
||||||
|
destination_is_file,
|
||||||
|
}: &MoveProjectEntry,
|
||||||
|
cx: &mut ViewContext<Self>,
|
||||||
|
) {
|
||||||
|
let destination_worktree = self.project.update(cx, |project, cx| {
|
||||||
|
let entry_path = project.path_for_entry(entry_to_move, cx)?;
|
||||||
|
let destination_entry_path = project.path_for_entry(destination, cx)?.path.clone();
|
||||||
|
|
||||||
|
let mut destination_path = destination_entry_path.as_ref();
|
||||||
|
if destination_is_file {
|
||||||
|
destination_path = destination_path.parent()?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut new_path = destination_path.to_path_buf();
|
||||||
|
new_path.push(entry_path.path.file_name()?);
|
||||||
|
if new_path != entry_path.path.as_ref() {
|
||||||
|
let task = project.rename_entry(entry_to_move, new_path, cx)?;
|
||||||
|
cx.foreground().spawn(task).detach_and_log_err(cx);
|
||||||
|
}
|
||||||
|
|
||||||
|
Some(project.worktree_id_for_entry(destination, cx)?)
|
||||||
|
});
|
||||||
|
|
||||||
|
if let Some(destination_worktree) = destination_worktree {
|
||||||
|
self.expand_entry(destination_worktree, destination, cx);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn index_for_selection(&self, selection: Selection) -> Option<(usize, usize, usize)> {
|
fn index_for_selection(&self, selection: Selection) -> Option<(usize, usize, usize)> {
|
||||||
let mut entry_index = 0;
|
let mut entry_index = 0;
|
||||||
let mut visible_entries_index = 0;
|
let mut visible_entries_index = 0;
|
||||||
|
@ -1079,10 +1125,13 @@ impl ProjectPanel {
|
||||||
entry_id: ProjectEntryId,
|
entry_id: ProjectEntryId,
|
||||||
details: EntryDetails,
|
details: EntryDetails,
|
||||||
editor: &ViewHandle<Editor>,
|
editor: &ViewHandle<Editor>,
|
||||||
|
dragged_entry_destination: &mut Option<Arc<Path>>,
|
||||||
theme: &theme::ProjectPanel,
|
theme: &theme::ProjectPanel,
|
||||||
cx: &mut RenderContext<Self>,
|
cx: &mut RenderContext<Self>,
|
||||||
) -> ElementBox {
|
) -> ElementBox {
|
||||||
|
let this = cx.handle();
|
||||||
let kind = details.kind;
|
let kind = details.kind;
|
||||||
|
let path = details.path.clone();
|
||||||
let padding = theme.container.padding.left + details.depth as f32 * theme.indent_width;
|
let padding = theme.container.padding.left + details.depth as f32 * theme.indent_width;
|
||||||
|
|
||||||
let entry_style = if details.is_cut {
|
let entry_style = if details.is_cut {
|
||||||
|
@ -1096,7 +1145,20 @@ impl ProjectPanel {
|
||||||
let show_editor = details.is_editing && !details.is_processing;
|
let show_editor = details.is_editing && !details.is_processing;
|
||||||
|
|
||||||
MouseEventHandler::<Self>::new(entry_id.to_usize(), cx, |state, cx| {
|
MouseEventHandler::<Self>::new(entry_id.to_usize(), cx, |state, cx| {
|
||||||
let style = entry_style.style_for(state, details.is_selected).clone();
|
let mut style = entry_style.style_for(state, details.is_selected).clone();
|
||||||
|
|
||||||
|
if cx
|
||||||
|
.global::<DragAndDrop<Workspace>>()
|
||||||
|
.currently_dragged::<ProjectEntryId>(cx.window_id())
|
||||||
|
.is_some()
|
||||||
|
&& dragged_entry_destination
|
||||||
|
.as_ref()
|
||||||
|
.filter(|destination| details.path.starts_with(destination))
|
||||||
|
.is_some()
|
||||||
|
{
|
||||||
|
style = entry_style.active.clone().unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
let row_container_style = if show_editor {
|
let row_container_style = if show_editor {
|
||||||
theme.filename_editor.container
|
theme.filename_editor.container
|
||||||
} else {
|
} else {
|
||||||
|
@ -1128,6 +1190,35 @@ impl ProjectPanel {
|
||||||
position: e.position,
|
position: e.position,
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
.on_up(MouseButton::Left, move |_, cx| {
|
||||||
|
if let Some((_, dragged_entry)) = cx
|
||||||
|
.global::<DragAndDrop<Workspace>>()
|
||||||
|
.currently_dragged::<ProjectEntryId>(cx.window_id())
|
||||||
|
{
|
||||||
|
cx.dispatch_action(MoveProjectEntry {
|
||||||
|
entry_to_move: *dragged_entry,
|
||||||
|
destination: entry_id,
|
||||||
|
destination_is_file: matches!(details.kind, EntryKind::File(_)),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.on_move(move |_, cx| {
|
||||||
|
if cx
|
||||||
|
.global::<DragAndDrop<Workspace>>()
|
||||||
|
.currently_dragged::<ProjectEntryId>(cx.window_id())
|
||||||
|
.is_some()
|
||||||
|
{
|
||||||
|
if let Some(this) = this.upgrade(cx.app) {
|
||||||
|
this.update(cx.app, |this, _| {
|
||||||
|
this.dragged_entry_destination = if matches!(kind, EntryKind::File(_)) {
|
||||||
|
path.parent().map(|parent| Arc::from(parent))
|
||||||
|
} else {
|
||||||
|
Some(path.clone())
|
||||||
|
};
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
.as_draggable(entry_id, {
|
.as_draggable(entry_id, {
|
||||||
let row_container_style = theme.dragged_entry.container;
|
let row_container_style = theme.dragged_entry.container;
|
||||||
|
|
||||||
|
@ -1154,14 +1245,15 @@ impl View for ProjectPanel {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn render(&mut self, cx: &mut gpui::RenderContext<'_, Self>) -> gpui::ElementBox {
|
fn render(&mut self, cx: &mut gpui::RenderContext<'_, Self>) -> gpui::ElementBox {
|
||||||
enum Tag {}
|
enum ProjectPanel {}
|
||||||
let theme = &cx.global::<Settings>().theme.project_panel;
|
let theme = &cx.global::<Settings>().theme.project_panel;
|
||||||
let mut container_style = theme.container;
|
let mut container_style = theme.container;
|
||||||
let padding = std::mem::take(&mut container_style.padding);
|
let padding = std::mem::take(&mut container_style.padding);
|
||||||
let last_worktree_root_id = self.last_worktree_root_id;
|
let last_worktree_root_id = self.last_worktree_root_id;
|
||||||
|
|
||||||
Stack::new()
|
Stack::new()
|
||||||
.with_child(
|
.with_child(
|
||||||
MouseEventHandler::<Tag>::new(0, cx, |_, cx| {
|
MouseEventHandler::<ProjectPanel>::new(0, cx, |_, cx| {
|
||||||
UniformList::new(
|
UniformList::new(
|
||||||
self.list.clone(),
|
self.list.clone(),
|
||||||
self.visible_entries
|
self.visible_entries
|
||||||
|
@ -1171,15 +1263,19 @@ impl View for ProjectPanel {
|
||||||
cx,
|
cx,
|
||||||
move |this, range, items, cx| {
|
move |this, range, items, cx| {
|
||||||
let theme = cx.global::<Settings>().theme.clone();
|
let theme = cx.global::<Settings>().theme.clone();
|
||||||
|
let mut dragged_entry_destination =
|
||||||
|
this.dragged_entry_destination.clone();
|
||||||
this.for_each_visible_entry(range, cx, |id, details, cx| {
|
this.for_each_visible_entry(range, cx, |id, details, cx| {
|
||||||
items.push(Self::render_entry(
|
items.push(Self::render_entry(
|
||||||
id,
|
id,
|
||||||
details,
|
details,
|
||||||
&this.filename_editor,
|
&this.filename_editor,
|
||||||
|
&mut dragged_entry_destination,
|
||||||
&theme.project_panel,
|
&theme.project_panel,
|
||||||
cx,
|
cx,
|
||||||
));
|
));
|
||||||
});
|
});
|
||||||
|
this.dragged_entry_destination = dragged_entry_destination;
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
.with_padding_top(padding.top)
|
.with_padding_top(padding.top)
|
||||||
|
|
|
@ -28,4 +28,4 @@ settings = { path = "../settings", features = ["test-support"] }
|
||||||
gpui = { path = "../gpui", features = ["test-support"] }
|
gpui = { path = "../gpui", features = ["test-support"] }
|
||||||
language = { path = "../language", features = ["test-support"] }
|
language = { path = "../language", features = ["test-support"] }
|
||||||
lsp = { path = "../lsp", features = ["test-support"] }
|
lsp = { path = "../lsp", features = ["test-support"] }
|
||||||
project = { path = "../project", features = ["test-support"] }
|
project = { path = "../project", features = ["test-support"] }
|
||||||
|
|
|
@ -12,7 +12,7 @@ smallvec = { version = "1.6", features = ["union"] }
|
||||||
sum_tree = { path = "../sum_tree" }
|
sum_tree = { path = "../sum_tree" }
|
||||||
arrayvec = "0.7.1"
|
arrayvec = "0.7.1"
|
||||||
log = { version = "0.4.16", features = ["kv_unstable_serde"] }
|
log = { version = "0.4.16", features = ["kv_unstable_serde"] }
|
||||||
|
util = { path = "../util" }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
rand = "0.8.3"
|
rand = "0.8.3"
|
||||||
|
|
|
@ -1,16 +1,23 @@
|
||||||
mod offset_utf16;
|
mod offset_utf16;
|
||||||
mod point;
|
mod point;
|
||||||
mod point_utf16;
|
mod point_utf16;
|
||||||
|
mod unclipped;
|
||||||
|
|
||||||
use arrayvec::ArrayString;
|
use arrayvec::ArrayString;
|
||||||
use bromberg_sl2::{DigestString, HashMatrix};
|
use bromberg_sl2::{DigestString, HashMatrix};
|
||||||
use smallvec::SmallVec;
|
use smallvec::SmallVec;
|
||||||
use std::{cmp, fmt, io, mem, ops::Range, str};
|
use std::{
|
||||||
|
cmp, fmt, io, mem,
|
||||||
|
ops::{AddAssign, Range},
|
||||||
|
str,
|
||||||
|
};
|
||||||
use sum_tree::{Bias, Dimension, SumTree};
|
use sum_tree::{Bias, Dimension, SumTree};
|
||||||
|
use util::debug_panic;
|
||||||
|
|
||||||
pub use offset_utf16::OffsetUtf16;
|
pub use offset_utf16::OffsetUtf16;
|
||||||
pub use point::Point;
|
pub use point::Point;
|
||||||
pub use point_utf16::PointUtf16;
|
pub use point_utf16::PointUtf16;
|
||||||
|
pub use unclipped::Unclipped;
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
const CHUNK_BASE: usize = 6;
|
const CHUNK_BASE: usize = 6;
|
||||||
|
@ -260,6 +267,14 @@ impl Rope {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn point_utf16_to_offset(&self, point: PointUtf16) -> usize {
|
pub fn point_utf16_to_offset(&self, point: PointUtf16) -> usize {
|
||||||
|
self.point_utf16_to_offset_impl(point, false)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn unclipped_point_utf16_to_offset(&self, point: Unclipped<PointUtf16>) -> usize {
|
||||||
|
self.point_utf16_to_offset_impl(point.0, true)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn point_utf16_to_offset_impl(&self, point: PointUtf16, clip: bool) -> usize {
|
||||||
if point >= self.summary().lines_utf16() {
|
if point >= self.summary().lines_utf16() {
|
||||||
return self.summary().len;
|
return self.summary().len;
|
||||||
}
|
}
|
||||||
|
@ -269,20 +284,20 @@ impl Rope {
|
||||||
cursor.start().1
|
cursor.start().1
|
||||||
+ cursor
|
+ cursor
|
||||||
.item()
|
.item()
|
||||||
.map_or(0, |chunk| chunk.point_utf16_to_offset(overshoot))
|
.map_or(0, |chunk| chunk.point_utf16_to_offset(overshoot, clip))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn point_utf16_to_point(&self, point: PointUtf16) -> Point {
|
pub fn unclipped_point_utf16_to_point(&self, point: Unclipped<PointUtf16>) -> Point {
|
||||||
if point >= self.summary().lines_utf16() {
|
if point.0 >= self.summary().lines_utf16() {
|
||||||
return self.summary().lines;
|
return self.summary().lines;
|
||||||
}
|
}
|
||||||
let mut cursor = self.chunks.cursor::<(PointUtf16, Point)>();
|
let mut cursor = self.chunks.cursor::<(PointUtf16, Point)>();
|
||||||
cursor.seek(&point, Bias::Left, &());
|
cursor.seek(&point.0, Bias::Left, &());
|
||||||
let overshoot = point - cursor.start().0;
|
let overshoot = Unclipped(point.0 - cursor.start().0);
|
||||||
cursor.start().1
|
cursor.start().1
|
||||||
+ cursor
|
+ cursor.item().map_or(Point::zero(), |chunk| {
|
||||||
.item()
|
chunk.unclipped_point_utf16_to_point(overshoot)
|
||||||
.map_or(Point::zero(), |chunk| chunk.point_utf16_to_point(overshoot))
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn clip_offset(&self, mut offset: usize, bias: Bias) -> usize {
|
pub fn clip_offset(&self, mut offset: usize, bias: Bias) -> usize {
|
||||||
|
@ -330,11 +345,11 @@ impl Rope {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn clip_point_utf16(&self, point: PointUtf16, bias: Bias) -> PointUtf16 {
|
pub fn clip_point_utf16(&self, point: Unclipped<PointUtf16>, bias: Bias) -> PointUtf16 {
|
||||||
let mut cursor = self.chunks.cursor::<PointUtf16>();
|
let mut cursor = self.chunks.cursor::<PointUtf16>();
|
||||||
cursor.seek(&point, Bias::Right, &());
|
cursor.seek(&point.0, Bias::Right, &());
|
||||||
if let Some(chunk) = cursor.item() {
|
if let Some(chunk) = cursor.item() {
|
||||||
let overshoot = point - cursor.start();
|
let overshoot = Unclipped(point.0 - cursor.start());
|
||||||
*cursor.start() + chunk.clip_point_utf16(overshoot, bias)
|
*cursor.start() + chunk.clip_point_utf16(overshoot, bias)
|
||||||
} else {
|
} else {
|
||||||
self.summary().lines_utf16()
|
self.summary().lines_utf16()
|
||||||
|
@ -665,28 +680,33 @@ impl Chunk {
|
||||||
fn point_to_offset(&self, target: Point) -> usize {
|
fn point_to_offset(&self, target: Point) -> usize {
|
||||||
let mut offset = 0;
|
let mut offset = 0;
|
||||||
let mut point = Point::new(0, 0);
|
let mut point = Point::new(0, 0);
|
||||||
|
|
||||||
for ch in self.0.chars() {
|
for ch in self.0.chars() {
|
||||||
if point >= target {
|
if point >= target {
|
||||||
if point > target {
|
if point > target {
|
||||||
panic!("point {:?} is inside of character {:?}", target, ch);
|
debug_panic!("point {target:?} is inside of character {ch:?}");
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
if ch == '\n' {
|
if ch == '\n' {
|
||||||
point.row += 1;
|
point.row += 1;
|
||||||
if point.row > target.row {
|
|
||||||
panic!(
|
|
||||||
"point {:?} is beyond the end of a line with length {}",
|
|
||||||
target, point.column
|
|
||||||
);
|
|
||||||
}
|
|
||||||
point.column = 0;
|
point.column = 0;
|
||||||
|
|
||||||
|
if point.row > target.row {
|
||||||
|
debug_panic!(
|
||||||
|
"point {target:?} is beyond the end of a line with length {}",
|
||||||
|
point.column
|
||||||
|
);
|
||||||
|
break;
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
point.column += ch.len_utf8() as u32;
|
point.column += ch.len_utf8() as u32;
|
||||||
}
|
}
|
||||||
|
|
||||||
offset += ch.len_utf8();
|
offset += ch.len_utf8();
|
||||||
}
|
}
|
||||||
|
|
||||||
offset
|
offset
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -711,45 +731,62 @@ impl Chunk {
|
||||||
point_utf16
|
point_utf16
|
||||||
}
|
}
|
||||||
|
|
||||||
fn point_utf16_to_offset(&self, target: PointUtf16) -> usize {
|
fn point_utf16_to_offset(&self, target: PointUtf16, clip: bool) -> usize {
|
||||||
let mut offset = 0;
|
let mut offset = 0;
|
||||||
let mut point = PointUtf16::new(0, 0);
|
let mut point = PointUtf16::new(0, 0);
|
||||||
|
|
||||||
for ch in self.0.chars() {
|
for ch in self.0.chars() {
|
||||||
if point >= target {
|
if point == target {
|
||||||
if point > target {
|
|
||||||
panic!("point {:?} is inside of character {:?}", target, ch);
|
|
||||||
}
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
if ch == '\n' {
|
if ch == '\n' {
|
||||||
point.row += 1;
|
point.row += 1;
|
||||||
if point.row > target.row {
|
|
||||||
panic!(
|
|
||||||
"point {:?} is beyond the end of a line with length {}",
|
|
||||||
target, point.column
|
|
||||||
);
|
|
||||||
}
|
|
||||||
point.column = 0;
|
point.column = 0;
|
||||||
|
|
||||||
|
if point.row > target.row {
|
||||||
|
if !clip {
|
||||||
|
debug_panic!(
|
||||||
|
"point {target:?} is beyond the end of a line with length {}",
|
||||||
|
point.column
|
||||||
|
);
|
||||||
|
}
|
||||||
|
// Return the offset of the newline
|
||||||
|
return offset;
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
point.column += ch.len_utf16() as u32;
|
point.column += ch.len_utf16() as u32;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if point > target {
|
||||||
|
if !clip {
|
||||||
|
debug_panic!("point {target:?} is inside of codepoint {ch:?}");
|
||||||
|
}
|
||||||
|
// Return the offset of the codepoint which we have landed within, bias left
|
||||||
|
return offset;
|
||||||
|
}
|
||||||
|
|
||||||
offset += ch.len_utf8();
|
offset += ch.len_utf8();
|
||||||
}
|
}
|
||||||
|
|
||||||
offset
|
offset
|
||||||
}
|
}
|
||||||
|
|
||||||
fn point_utf16_to_point(&self, target: PointUtf16) -> Point {
|
fn unclipped_point_utf16_to_point(&self, target: Unclipped<PointUtf16>) -> Point {
|
||||||
let mut point = Point::zero();
|
let mut point = Point::zero();
|
||||||
let mut point_utf16 = PointUtf16::zero();
|
let mut point_utf16 = PointUtf16::zero();
|
||||||
|
|
||||||
for ch in self.0.chars() {
|
for ch in self.0.chars() {
|
||||||
if point_utf16 >= target {
|
if point_utf16 == target.0 {
|
||||||
if point_utf16 > target {
|
|
||||||
panic!("point {:?} is inside of character {:?}", target, ch);
|
|
||||||
}
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if point_utf16 > target.0 {
|
||||||
|
// If the point is past the end of a line or inside of a code point,
|
||||||
|
// return the last valid point before the target.
|
||||||
|
return point;
|
||||||
|
}
|
||||||
|
|
||||||
if ch == '\n' {
|
if ch == '\n' {
|
||||||
point_utf16 += PointUtf16::new(1, 0);
|
point_utf16 += PointUtf16::new(1, 0);
|
||||||
point += Point::new(1, 0);
|
point += Point::new(1, 0);
|
||||||
|
@ -758,6 +795,7 @@ impl Chunk {
|
||||||
point += Point::new(0, ch.len_utf8() as u32);
|
point += Point::new(0, ch.len_utf8() as u32);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
point
|
point
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -777,11 +815,11 @@ impl Chunk {
|
||||||
unreachable!()
|
unreachable!()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn clip_point_utf16(&self, target: PointUtf16, bias: Bias) -> PointUtf16 {
|
fn clip_point_utf16(&self, target: Unclipped<PointUtf16>, bias: Bias) -> PointUtf16 {
|
||||||
for (row, line) in self.0.split('\n').enumerate() {
|
for (row, line) in self.0.split('\n').enumerate() {
|
||||||
if row == target.row as usize {
|
if row == target.0.row as usize {
|
||||||
let mut code_units = line.encode_utf16();
|
let mut code_units = line.encode_utf16();
|
||||||
let mut column = code_units.by_ref().take(target.column as usize).count();
|
let mut column = code_units.by_ref().take(target.0.column as usize).count();
|
||||||
if char::decode_utf16(code_units).next().transpose().is_err() {
|
if char::decode_utf16(code_units).next().transpose().is_err() {
|
||||||
match bias {
|
match bias {
|
||||||
Bias::Left => column -= 1,
|
Bias::Left => column -= 1,
|
||||||
|
@ -917,7 +955,7 @@ impl std::ops::Add<Self> for TextSummary {
|
||||||
type Output = Self;
|
type Output = Self;
|
||||||
|
|
||||||
fn add(mut self, rhs: Self) -> Self::Output {
|
fn add(mut self, rhs: Self) -> Self::Output {
|
||||||
self.add_assign(&rhs);
|
AddAssign::add_assign(&mut self, &rhs);
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1114,15 +1152,15 @@ mod tests {
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
rope.clip_point_utf16(PointUtf16::new(0, 1), Bias::Left),
|
rope.clip_point_utf16(Unclipped(PointUtf16::new(0, 1)), Bias::Left),
|
||||||
PointUtf16::new(0, 0)
|
PointUtf16::new(0, 0)
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
rope.clip_point_utf16(PointUtf16::new(0, 1), Bias::Right),
|
rope.clip_point_utf16(Unclipped(PointUtf16::new(0, 1)), Bias::Right),
|
||||||
PointUtf16::new(0, 2)
|
PointUtf16::new(0, 2)
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
rope.clip_point_utf16(PointUtf16::new(0, 3), Bias::Right),
|
rope.clip_point_utf16(Unclipped(PointUtf16::new(0, 3)), Bias::Right),
|
||||||
PointUtf16::new(0, 2)
|
PointUtf16::new(0, 2)
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -1238,7 +1276,7 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut offset_utf16 = OffsetUtf16(0);
|
let mut offset_utf16 = OffsetUtf16(0);
|
||||||
let mut point_utf16 = PointUtf16::zero();
|
let mut point_utf16 = Unclipped(PointUtf16::zero());
|
||||||
for unit in expected.encode_utf16() {
|
for unit in expected.encode_utf16() {
|
||||||
let left_offset = actual.clip_offset_utf16(offset_utf16, Bias::Left);
|
let left_offset = actual.clip_offset_utf16(offset_utf16, Bias::Left);
|
||||||
let right_offset = actual.clip_offset_utf16(offset_utf16, Bias::Right);
|
let right_offset = actual.clip_offset_utf16(offset_utf16, Bias::Right);
|
||||||
|
@ -1250,15 +1288,15 @@ mod tests {
|
||||||
let left_point = actual.clip_point_utf16(point_utf16, Bias::Left);
|
let left_point = actual.clip_point_utf16(point_utf16, Bias::Left);
|
||||||
let right_point = actual.clip_point_utf16(point_utf16, Bias::Right);
|
let right_point = actual.clip_point_utf16(point_utf16, Bias::Right);
|
||||||
assert!(right_point >= left_point);
|
assert!(right_point >= left_point);
|
||||||
// Ensure translating UTF-16 points to offsets doesn't panic.
|
// Ensure translating valid UTF-16 points to offsets doesn't panic.
|
||||||
actual.point_utf16_to_offset(left_point);
|
actual.point_utf16_to_offset(left_point);
|
||||||
actual.point_utf16_to_offset(right_point);
|
actual.point_utf16_to_offset(right_point);
|
||||||
|
|
||||||
offset_utf16.0 += 1;
|
offset_utf16.0 += 1;
|
||||||
if unit == b'\n' as u16 {
|
if unit == b'\n' as u16 {
|
||||||
point_utf16 += PointUtf16::new(1, 0);
|
point_utf16.0 += PointUtf16::new(1, 0);
|
||||||
} else {
|
} else {
|
||||||
point_utf16 += PointUtf16::new(0, 1);
|
point_utf16.0 += PointUtf16::new(0, 1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
57
crates/rope/src/unclipped.rs
Normal file
57
crates/rope/src/unclipped.rs
Normal file
|
@ -0,0 +1,57 @@
|
||||||
|
use crate::{ChunkSummary, TextDimension, TextSummary};
|
||||||
|
use std::ops::{Add, AddAssign, Sub, SubAssign};
|
||||||
|
|
||||||
|
#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||||
|
pub struct Unclipped<T>(pub T);
|
||||||
|
|
||||||
|
impl<T> From<T> for Unclipped<T> {
|
||||||
|
fn from(value: T) -> Self {
|
||||||
|
Unclipped(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, T: sum_tree::Dimension<'a, ChunkSummary>> sum_tree::Dimension<'a, ChunkSummary>
|
||||||
|
for Unclipped<T>
|
||||||
|
{
|
||||||
|
fn add_summary(&mut self, summary: &'a ChunkSummary, _: &()) {
|
||||||
|
self.0.add_summary(summary, &());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: TextDimension> TextDimension for Unclipped<T> {
|
||||||
|
fn from_text_summary(summary: &TextSummary) -> Self {
|
||||||
|
Unclipped(T::from_text_summary(summary))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn add_assign(&mut self, other: &Self) {
|
||||||
|
TextDimension::add_assign(&mut self.0, &other.0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: Add<T, Output = T>> Add<Unclipped<T>> for Unclipped<T> {
|
||||||
|
type Output = Unclipped<T>;
|
||||||
|
|
||||||
|
fn add(self, rhs: Unclipped<T>) -> Self::Output {
|
||||||
|
Unclipped(self.0 + rhs.0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: Sub<T, Output = T>> Sub<Unclipped<T>> for Unclipped<T> {
|
||||||
|
type Output = Unclipped<T>;
|
||||||
|
|
||||||
|
fn sub(self, rhs: Unclipped<T>) -> Self::Output {
|
||||||
|
Unclipped(self.0 - rhs.0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: AddAssign<T>> AddAssign<Unclipped<T>> for Unclipped<T> {
|
||||||
|
fn add_assign(&mut self, rhs: Unclipped<T>) {
|
||||||
|
self.0 += rhs.0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: SubAssign<T>> SubAssign<Unclipped<T>> for Unclipped<T> {
|
||||||
|
fn sub_assign(&mut self, rhs: Unclipped<T>) {
|
||||||
|
self.0 -= rhs.0;
|
||||||
|
}
|
||||||
|
}
|
|
@ -403,8 +403,10 @@ message Symbol {
|
||||||
string name = 4;
|
string name = 4;
|
||||||
int32 kind = 5;
|
int32 kind = 5;
|
||||||
string path = 6;
|
string path = 6;
|
||||||
Point start = 7;
|
// Cannot use generate anchors for unopend files,
|
||||||
Point end = 8;
|
// so we are forced to use point coords instead
|
||||||
|
PointUtf16 start = 7;
|
||||||
|
PointUtf16 end = 8;
|
||||||
bytes signature = 9;
|
bytes signature = 9;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1033,7 +1035,7 @@ message Range {
|
||||||
uint64 end = 2;
|
uint64 end = 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
message Point {
|
message PointUtf16 {
|
||||||
uint32 row = 1;
|
uint32 row = 1;
|
||||||
uint32 column = 2;
|
uint32 column = 2;
|
||||||
}
|
}
|
||||||
|
|
|
@ -28,6 +28,7 @@ pub struct Settings {
|
||||||
pub buffer_font_family: FamilyId,
|
pub buffer_font_family: FamilyId,
|
||||||
pub default_buffer_font_size: f32,
|
pub default_buffer_font_size: f32,
|
||||||
pub buffer_font_size: f32,
|
pub buffer_font_size: f32,
|
||||||
|
pub active_pane_magnification: f32,
|
||||||
pub cursor_blink: bool,
|
pub cursor_blink: bool,
|
||||||
pub hover_popover_enabled: bool,
|
pub hover_popover_enabled: bool,
|
||||||
pub show_completions_on_input: bool,
|
pub show_completions_on_input: bool,
|
||||||
|
@ -253,6 +254,8 @@ pub struct SettingsFileContent {
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
pub buffer_font_size: Option<f32>,
|
pub buffer_font_size: Option<f32>,
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
|
pub active_pane_magnification: Option<f32>,
|
||||||
|
#[serde(default)]
|
||||||
pub cursor_blink: Option<bool>,
|
pub cursor_blink: Option<bool>,
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
pub hover_popover_enabled: Option<bool>,
|
pub hover_popover_enabled: Option<bool>,
|
||||||
|
@ -312,6 +315,7 @@ impl Settings {
|
||||||
.load_family(&[defaults.buffer_font_family.as_ref().unwrap()])
|
.load_family(&[defaults.buffer_font_family.as_ref().unwrap()])
|
||||||
.unwrap(),
|
.unwrap(),
|
||||||
buffer_font_size: defaults.buffer_font_size.unwrap(),
|
buffer_font_size: defaults.buffer_font_size.unwrap(),
|
||||||
|
active_pane_magnification: defaults.active_pane_magnification.unwrap(),
|
||||||
default_buffer_font_size: defaults.buffer_font_size.unwrap(),
|
default_buffer_font_size: defaults.buffer_font_size.unwrap(),
|
||||||
cursor_blink: defaults.cursor_blink.unwrap(),
|
cursor_blink: defaults.cursor_blink.unwrap(),
|
||||||
hover_popover_enabled: defaults.hover_popover_enabled.unwrap(),
|
hover_popover_enabled: defaults.hover_popover_enabled.unwrap(),
|
||||||
|
@ -367,6 +371,10 @@ impl Settings {
|
||||||
data.projects_online_by_default,
|
data.projects_online_by_default,
|
||||||
);
|
);
|
||||||
merge(&mut self.buffer_font_size, data.buffer_font_size);
|
merge(&mut self.buffer_font_size, data.buffer_font_size);
|
||||||
|
merge(
|
||||||
|
&mut self.active_pane_magnification,
|
||||||
|
data.active_pane_magnification,
|
||||||
|
);
|
||||||
merge(&mut self.default_buffer_font_size, data.buffer_font_size);
|
merge(&mut self.default_buffer_font_size, data.buffer_font_size);
|
||||||
merge(&mut self.cursor_blink, data.cursor_blink);
|
merge(&mut self.cursor_blink, data.cursor_blink);
|
||||||
merge(&mut self.hover_popover_enabled, data.hover_popover_enabled);
|
merge(&mut self.hover_popover_enabled, data.hover_popover_enabled);
|
||||||
|
@ -458,6 +466,7 @@ impl Settings {
|
||||||
experiments: FeatureFlags::default(),
|
experiments: FeatureFlags::default(),
|
||||||
buffer_font_family: cx.font_cache().load_family(&["Monaco"]).unwrap(),
|
buffer_font_family: cx.font_cache().load_family(&["Monaco"]).unwrap(),
|
||||||
buffer_font_size: 14.,
|
buffer_font_size: 14.,
|
||||||
|
active_pane_magnification: 1.,
|
||||||
default_buffer_font_size: 14.,
|
default_buffer_font_size: 14.,
|
||||||
cursor_blink: true,
|
cursor_blink: true,
|
||||||
hover_popover_enabled: true,
|
hover_popover_enabled: true,
|
||||||
|
|
|
@ -36,18 +36,6 @@ impl Modifiers {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
///This function checks if to_esc_str would work, assuming all terminal settings are off.
|
|
||||||
///Note that this function is conservative. It can fail in cases where the actual to_esc_str succeeds.
|
|
||||||
///This is unavoidable for our use case. GPUI cannot wait until we acquire the terminal
|
|
||||||
///lock to determine whether we could actually send the keystroke with the current settings. Therefore,
|
|
||||||
///This conservative guess is used instead. Note that in practice the case where this method
|
|
||||||
///Returns false when the actual terminal would consume the keystroke never happens. All keystrokes
|
|
||||||
///that depend on terminal modes also have a mapping that doesn't depend on the terminal mode.
|
|
||||||
///This is fragile, but as these mappings are locked up in legacy compatibility, it's probably good enough
|
|
||||||
pub fn might_convert(keystroke: &Keystroke) -> bool {
|
|
||||||
to_esc_str(keystroke, &TermMode::NONE, false).is_some()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn to_esc_str(keystroke: &Keystroke, mode: &TermMode, alt_is_meta: bool) -> Option<String> {
|
pub fn to_esc_str(keystroke: &Keystroke, mode: &TermMode, alt_is_meta: bool) -> Option<String> {
|
||||||
let modifiers = Modifiers::new(keystroke);
|
let modifiers = Modifiers::new(keystroke);
|
||||||
|
|
||||||
|
|
|
@ -97,7 +97,7 @@ impl MouseButton {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn from_scroll(e: &ScrollWheelEvent) -> Self {
|
fn from_scroll(e: &ScrollWheelEvent) -> Self {
|
||||||
if e.delta.y() > 0. {
|
if e.delta.raw().y() > 0. {
|
||||||
MouseButton::ScrollUp
|
MouseButton::ScrollUp
|
||||||
} else {
|
} else {
|
||||||
MouseButton::ScrollDown
|
MouseButton::ScrollDown
|
||||||
|
|
|
@ -407,13 +407,18 @@ impl TerminalBuilder {
|
||||||
'outer: loop {
|
'outer: loop {
|
||||||
let mut events = vec![];
|
let mut events = vec![];
|
||||||
let mut timer = cx.background().timer(Duration::from_millis(4)).fuse();
|
let mut timer = cx.background().timer(Duration::from_millis(4)).fuse();
|
||||||
|
let mut wakeup = false;
|
||||||
loop {
|
loop {
|
||||||
futures::select_biased! {
|
futures::select_biased! {
|
||||||
_ = timer => break,
|
_ = timer => break,
|
||||||
event = self.events_rx.next() => {
|
event = self.events_rx.next() => {
|
||||||
if let Some(event) = event {
|
if let Some(event) = event {
|
||||||
events.push(event);
|
if matches!(event, AlacTermEvent::Wakeup) {
|
||||||
|
wakeup = true;
|
||||||
|
} else {
|
||||||
|
events.push(event);
|
||||||
|
}
|
||||||
|
|
||||||
if events.len() > 100 {
|
if events.len() > 100 {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -424,11 +429,15 @@ impl TerminalBuilder {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if events.is_empty() {
|
if events.is_empty() && wakeup == false {
|
||||||
smol::future::yield_now().await;
|
smol::future::yield_now().await;
|
||||||
break 'outer;
|
break 'outer;
|
||||||
} else {
|
} else {
|
||||||
this.upgrade(&cx)?.update(&mut cx, |this, cx| {
|
this.upgrade(&cx)?.update(&mut cx, |this, cx| {
|
||||||
|
if wakeup {
|
||||||
|
this.process_event(&AlacTermEvent::Wakeup, cx);
|
||||||
|
}
|
||||||
|
|
||||||
for event in events {
|
for event in events {
|
||||||
this.process_event(&event, cx);
|
this.process_event(&event, cx);
|
||||||
}
|
}
|
||||||
|
@ -627,7 +636,7 @@ impl Terminal {
|
||||||
term.grid_mut().reset_region(..cursor.line);
|
term.grid_mut().reset_region(..cursor.line);
|
||||||
|
|
||||||
// Copy the current line up
|
// Copy the current line up
|
||||||
let line = term.grid()[cursor.line][..cursor.column]
|
let line = term.grid()[cursor.line][..Column(term.grid().columns())]
|
||||||
.iter()
|
.iter()
|
||||||
.cloned()
|
.cloned()
|
||||||
.enumerate()
|
.enumerate()
|
||||||
|
@ -1136,7 +1145,7 @@ impl Terminal {
|
||||||
|
|
||||||
fn determine_scroll_lines(&mut self, e: &MouseScrollWheel, mouse_mode: bool) -> Option<i32> {
|
fn determine_scroll_lines(&mut self, e: &MouseScrollWheel, mouse_mode: bool) -> Option<i32> {
|
||||||
let scroll_multiplier = if mouse_mode { 1. } else { SCROLL_MULTIPLIER };
|
let scroll_multiplier = if mouse_mode { 1. } else { SCROLL_MULTIPLIER };
|
||||||
|
let line_height = self.last_content.size.line_height;
|
||||||
match e.phase {
|
match e.phase {
|
||||||
/* Reset scroll state on started */
|
/* Reset scroll state on started */
|
||||||
Some(gpui::TouchPhase::Started) => {
|
Some(gpui::TouchPhase::Started) => {
|
||||||
|
@ -1145,11 +1154,11 @@ impl Terminal {
|
||||||
}
|
}
|
||||||
/* Calculate the appropriate scroll lines */
|
/* Calculate the appropriate scroll lines */
|
||||||
Some(gpui::TouchPhase::Moved) => {
|
Some(gpui::TouchPhase::Moved) => {
|
||||||
let old_offset = (self.scroll_px / self.last_content.size.line_height) as i32;
|
let old_offset = (self.scroll_px / line_height) as i32;
|
||||||
|
|
||||||
self.scroll_px += e.delta.y() * scroll_multiplier;
|
self.scroll_px += e.delta.pixel_delta(line_height).y() * scroll_multiplier;
|
||||||
|
|
||||||
let new_offset = (self.scroll_px / self.last_content.size.line_height) as i32;
|
let new_offset = (self.scroll_px / line_height) as i32;
|
||||||
|
|
||||||
// Whenever we hit the edges, reset our stored scroll to 0
|
// Whenever we hit the edges, reset our stored scroll to 0
|
||||||
// so we can respond to changes in direction quickly
|
// so we can respond to changes in direction quickly
|
||||||
|
@ -1159,7 +1168,7 @@ impl Terminal {
|
||||||
}
|
}
|
||||||
/* Fall back to delta / line_height */
|
/* Fall back to delta / line_height */
|
||||||
None => Some(
|
None => Some(
|
||||||
((e.delta.y() * scroll_multiplier) / self.last_content.size.line_height) as i32,
|
((e.delta.pixel_delta(line_height).y() * scroll_multiplier) / line_height) as i32,
|
||||||
),
|
),
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,8 +3,8 @@ use smallvec::{smallvec, SmallVec};
|
||||||
use std::iter;
|
use std::iter;
|
||||||
|
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
pub static ref MIN: Locator = Locator::min();
|
static ref MIN: Locator = Locator::min();
|
||||||
pub static ref MAX: Locator = Locator::max();
|
static ref MAX: Locator = Locator::max();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||||
|
@ -19,6 +19,14 @@ impl Locator {
|
||||||
Self(smallvec![u64::MAX])
|
Self(smallvec![u64::MAX])
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn min_ref() -> &'static Self {
|
||||||
|
&*MIN
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn max_ref() -> &'static Self {
|
||||||
|
&*MAX
|
||||||
|
}
|
||||||
|
|
||||||
pub fn assign(&mut self, other: &Self) {
|
pub fn assign(&mut self, other: &Self) {
|
||||||
self.0.resize(other.0.len(), 0);
|
self.0.resize(other.0.len(), 0);
|
||||||
self.0.copy_from_slice(&other.0);
|
self.0.copy_from_slice(&other.0);
|
||||||
|
|
|
@ -1594,8 +1594,12 @@ impl BufferSnapshot {
|
||||||
self.visible_text.point_utf16_to_offset(point)
|
self.visible_text.point_utf16_to_offset(point)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn point_utf16_to_point(&self, point: PointUtf16) -> Point {
|
pub fn unclipped_point_utf16_to_offset(&self, point: Unclipped<PointUtf16>) -> usize {
|
||||||
self.visible_text.point_utf16_to_point(point)
|
self.visible_text.unclipped_point_utf16_to_offset(point)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn unclipped_point_utf16_to_point(&self, point: Unclipped<PointUtf16>) -> Point {
|
||||||
|
self.visible_text.unclipped_point_utf16_to_point(point)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn offset_utf16_to_offset(&self, offset: OffsetUtf16) -> usize {
|
pub fn offset_utf16_to_offset(&self, offset: OffsetUtf16) -> usize {
|
||||||
|
@ -1766,9 +1770,9 @@ impl BufferSnapshot {
|
||||||
|
|
||||||
fn fragment_id_for_anchor(&self, anchor: &Anchor) -> &Locator {
|
fn fragment_id_for_anchor(&self, anchor: &Anchor) -> &Locator {
|
||||||
if *anchor == Anchor::MIN {
|
if *anchor == Anchor::MIN {
|
||||||
&locator::MIN
|
Locator::min_ref()
|
||||||
} else if *anchor == Anchor::MAX {
|
} else if *anchor == Anchor::MAX {
|
||||||
&locator::MAX
|
Locator::max_ref()
|
||||||
} else {
|
} else {
|
||||||
let anchor_key = InsertionFragmentKey {
|
let anchor_key = InsertionFragmentKey {
|
||||||
timestamp: anchor.timestamp,
|
timestamp: anchor.timestamp,
|
||||||
|
@ -1803,7 +1807,10 @@ impl BufferSnapshot {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn anchor_at<T: ToOffset>(&self, position: T, bias: Bias) -> Anchor {
|
pub fn anchor_at<T: ToOffset>(&self, position: T, bias: Bias) -> Anchor {
|
||||||
let offset = position.to_offset(self);
|
self.anchor_at_offset(position.to_offset(self), bias)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn anchor_at_offset(&self, offset: usize, bias: Bias) -> Anchor {
|
||||||
if bias == Bias::Left && offset == 0 {
|
if bias == Bias::Left && offset == 0 {
|
||||||
Anchor::MIN
|
Anchor::MIN
|
||||||
} else if bias == Bias::Right && offset == self.len() {
|
} else if bias == Bias::Right && offset == self.len() {
|
||||||
|
@ -1840,7 +1847,7 @@ impl BufferSnapshot {
|
||||||
self.visible_text.clip_offset_utf16(offset, bias)
|
self.visible_text.clip_offset_utf16(offset, bias)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn clip_point_utf16(&self, point: PointUtf16, bias: Bias) -> PointUtf16 {
|
pub fn clip_point_utf16(&self, point: Unclipped<PointUtf16>, bias: Bias) -> PointUtf16 {
|
||||||
self.visible_text.clip_point_utf16(point, bias)
|
self.visible_text.clip_point_utf16(point, bias)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2354,32 +2361,20 @@ pub trait ToOffset {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToOffset for Point {
|
impl ToOffset for Point {
|
||||||
fn to_offset<'a>(&self, snapshot: &BufferSnapshot) -> usize {
|
fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
|
||||||
snapshot.point_to_offset(*self)
|
snapshot.point_to_offset(*self)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToOffset for PointUtf16 {
|
|
||||||
fn to_offset<'a>(&self, snapshot: &BufferSnapshot) -> usize {
|
|
||||||
snapshot.point_utf16_to_offset(*self)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ToOffset for usize {
|
impl ToOffset for usize {
|
||||||
fn to_offset<'a>(&self, snapshot: &BufferSnapshot) -> usize {
|
fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
|
||||||
assert!(*self <= snapshot.len(), "offset {self} is out of range");
|
assert!(*self <= snapshot.len(), "offset {self} is out of range");
|
||||||
*self
|
*self
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToOffset for OffsetUtf16 {
|
|
||||||
fn to_offset<'a>(&self, snapshot: &BufferSnapshot) -> usize {
|
|
||||||
snapshot.offset_utf16_to_offset(*self)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ToOffset for Anchor {
|
impl ToOffset for Anchor {
|
||||||
fn to_offset<'a>(&self, snapshot: &BufferSnapshot) -> usize {
|
fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
|
||||||
snapshot.summary_for_anchor(self)
|
snapshot.summary_for_anchor(self)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2390,31 +2385,43 @@ impl<'a, T: ToOffset> ToOffset for &'a T {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl ToOffset for PointUtf16 {
|
||||||
|
fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
|
||||||
|
snapshot.point_utf16_to_offset(*self)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ToOffset for Unclipped<PointUtf16> {
|
||||||
|
fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
|
||||||
|
snapshot.unclipped_point_utf16_to_offset(*self)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub trait ToPoint {
|
pub trait ToPoint {
|
||||||
fn to_point(&self, snapshot: &BufferSnapshot) -> Point;
|
fn to_point(&self, snapshot: &BufferSnapshot) -> Point;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToPoint for Anchor {
|
impl ToPoint for Anchor {
|
||||||
fn to_point<'a>(&self, snapshot: &BufferSnapshot) -> Point {
|
fn to_point(&self, snapshot: &BufferSnapshot) -> Point {
|
||||||
snapshot.summary_for_anchor(self)
|
snapshot.summary_for_anchor(self)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToPoint for usize {
|
impl ToPoint for usize {
|
||||||
fn to_point<'a>(&self, snapshot: &BufferSnapshot) -> Point {
|
fn to_point(&self, snapshot: &BufferSnapshot) -> Point {
|
||||||
snapshot.offset_to_point(*self)
|
snapshot.offset_to_point(*self)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToPoint for PointUtf16 {
|
impl ToPoint for Point {
|
||||||
fn to_point<'a>(&self, snapshot: &BufferSnapshot) -> Point {
|
fn to_point(&self, _: &BufferSnapshot) -> Point {
|
||||||
snapshot.point_utf16_to_point(*self)
|
*self
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToPoint for Point {
|
impl ToPoint for Unclipped<PointUtf16> {
|
||||||
fn to_point<'a>(&self, _: &BufferSnapshot) -> Point {
|
fn to_point(&self, snapshot: &BufferSnapshot) -> Point {
|
||||||
*self
|
snapshot.unclipped_point_utf16_to_point(*self)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2423,25 +2430,25 @@ pub trait ToPointUtf16 {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToPointUtf16 for Anchor {
|
impl ToPointUtf16 for Anchor {
|
||||||
fn to_point_utf16<'a>(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
|
fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
|
||||||
snapshot.summary_for_anchor(self)
|
snapshot.summary_for_anchor(self)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToPointUtf16 for usize {
|
impl ToPointUtf16 for usize {
|
||||||
fn to_point_utf16<'a>(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
|
fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
|
||||||
snapshot.offset_to_point_utf16(*self)
|
snapshot.offset_to_point_utf16(*self)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToPointUtf16 for PointUtf16 {
|
impl ToPointUtf16 for PointUtf16 {
|
||||||
fn to_point_utf16<'a>(&self, _: &BufferSnapshot) -> PointUtf16 {
|
fn to_point_utf16(&self, _: &BufferSnapshot) -> PointUtf16 {
|
||||||
*self
|
*self
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToPointUtf16 for Point {
|
impl ToPointUtf16 for Point {
|
||||||
fn to_point_utf16<'a>(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
|
fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
|
||||||
snapshot.point_to_point_utf16(*self)
|
snapshot.point_to_point_utf16(*self)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2451,45 +2458,23 @@ pub trait ToOffsetUtf16 {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToOffsetUtf16 for Anchor {
|
impl ToOffsetUtf16 for Anchor {
|
||||||
fn to_offset_utf16<'a>(&self, snapshot: &BufferSnapshot) -> OffsetUtf16 {
|
fn to_offset_utf16(&self, snapshot: &BufferSnapshot) -> OffsetUtf16 {
|
||||||
snapshot.summary_for_anchor(self)
|
snapshot.summary_for_anchor(self)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToOffsetUtf16 for usize {
|
impl ToOffsetUtf16 for usize {
|
||||||
fn to_offset_utf16<'a>(&self, snapshot: &BufferSnapshot) -> OffsetUtf16 {
|
fn to_offset_utf16(&self, snapshot: &BufferSnapshot) -> OffsetUtf16 {
|
||||||
snapshot.offset_to_offset_utf16(*self)
|
snapshot.offset_to_offset_utf16(*self)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToOffsetUtf16 for OffsetUtf16 {
|
impl ToOffsetUtf16 for OffsetUtf16 {
|
||||||
fn to_offset_utf16<'a>(&self, _snapshot: &BufferSnapshot) -> OffsetUtf16 {
|
fn to_offset_utf16(&self, _snapshot: &BufferSnapshot) -> OffsetUtf16 {
|
||||||
*self
|
*self
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait Clip {
|
|
||||||
fn clip(&self, bias: Bias, snapshot: &BufferSnapshot) -> Self;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Clip for usize {
|
|
||||||
fn clip(&self, bias: Bias, snapshot: &BufferSnapshot) -> Self {
|
|
||||||
snapshot.clip_offset(*self, bias)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Clip for Point {
|
|
||||||
fn clip(&self, bias: Bias, snapshot: &BufferSnapshot) -> Self {
|
|
||||||
snapshot.clip_point(*self, bias)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Clip for PointUtf16 {
|
|
||||||
fn clip(&self, bias: Bias, snapshot: &BufferSnapshot) -> Self {
|
|
||||||
snapshot.clip_point_utf16(*self, bias)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub trait FromAnchor {
|
pub trait FromAnchor {
|
||||||
fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self;
|
fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self;
|
||||||
}
|
}
|
||||||
|
|
|
@ -15,4 +15,4 @@ settings = { path = "../settings" }
|
||||||
workspace = { path = "../workspace" }
|
workspace = { path = "../workspace" }
|
||||||
project = { path = "../project" }
|
project = { path = "../project" }
|
||||||
|
|
||||||
smallvec = { version = "1.6", features = ["union"] }
|
smallvec = { version = "1.6", features = ["union"] }
|
||||||
|
|
|
@ -11,6 +11,7 @@ test-support = ["serde_json", "tempdir", "git2"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1.0.38"
|
anyhow = "1.0.38"
|
||||||
|
backtrace = "0.3"
|
||||||
futures = "0.3"
|
futures = "0.3"
|
||||||
log = { version = "0.4.16", features = ["kv_unstable_serde"] }
|
log = { version = "0.4.16", features = ["kv_unstable_serde"] }
|
||||||
lazy_static = "1.4.0"
|
lazy_static = "1.4.0"
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
#[cfg(any(test, feature = "test-support"))]
|
#[cfg(any(test, feature = "test-support"))]
|
||||||
pub mod test;
|
pub mod test;
|
||||||
|
|
||||||
|
pub use backtrace::Backtrace;
|
||||||
use futures::Future;
|
use futures::Future;
|
||||||
use rand::{seq::SliceRandom, Rng};
|
use rand::{seq::SliceRandom, Rng};
|
||||||
use std::{
|
use std::{
|
||||||
|
@ -10,6 +11,18 @@ use std::{
|
||||||
task::{Context, Poll},
|
task::{Context, Poll},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! debug_panic {
|
||||||
|
( $($fmt_arg:tt)* ) => {
|
||||||
|
if cfg!(debug_assertions) {
|
||||||
|
panic!( $($fmt_arg)* );
|
||||||
|
} else {
|
||||||
|
let backtrace = $crate::Backtrace::new();
|
||||||
|
log::error!("{}\n{:?}", format_args!($($fmt_arg)*), backtrace);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
pub fn truncate(s: &str, max_chars: usize) -> &str {
|
pub fn truncate(s: &str, max_chars: usize) -> &str {
|
||||||
match s.char_indices().nth(max_chars) {
|
match s.char_indices().nth(max_chars) {
|
||||||
None => s,
|
None => s,
|
||||||
|
|
|
@ -42,4 +42,4 @@ language = { path = "../language", features = ["test-support"] }
|
||||||
project = { path = "../project", features = ["test-support"] }
|
project = { path = "../project", features = ["test-support"] }
|
||||||
util = { path = "../util", features = ["test-support"] }
|
util = { path = "../util", features = ["test-support"] }
|
||||||
settings = { path = "../settings" }
|
settings = { path = "../settings" }
|
||||||
workspace = { path = "../workspace", features = ["test-support"] }
|
workspace = { path = "../workspace", features = ["test-support"] }
|
||||||
|
|
|
@ -114,12 +114,12 @@ pub fn change(_: &mut Workspace, _: &VisualChange, cx: &mut ViewContext<Workspac
|
||||||
};
|
};
|
||||||
|
|
||||||
edits.push((expanded_range, "\n"));
|
edits.push((expanded_range, "\n"));
|
||||||
new_selections.push(selection.map(|_| anchor.clone()));
|
new_selections.push(selection.map(|_| anchor));
|
||||||
} else {
|
} else {
|
||||||
let range = selection.map(|p| p.to_point(map)).range();
|
let range = selection.map(|p| p.to_point(map)).range();
|
||||||
let anchor = map.buffer_snapshot.anchor_after(range.end);
|
let anchor = map.buffer_snapshot.anchor_after(range.end);
|
||||||
edits.push((range, ""));
|
edits.push((range, ""));
|
||||||
new_selections.push(selection.map(|_| anchor.clone()));
|
new_selections.push(selection.map(|_| anchor));
|
||||||
}
|
}
|
||||||
selection.goal = SelectionGoal::None;
|
selection.goal = SelectionGoal::None;
|
||||||
});
|
});
|
||||||
|
|
|
@ -46,4 +46,4 @@ client = { path = "../client", features = ["test-support"] }
|
||||||
gpui = { path = "../gpui", features = ["test-support"] }
|
gpui = { path = "../gpui", features = ["test-support"] }
|
||||||
project = { path = "../project", features = ["test-support"] }
|
project = { path = "../project", features = ["test-support"] }
|
||||||
settings = { path = "../settings", features = ["test-support"] }
|
settings = { path = "../settings", features = ["test-support"] }
|
||||||
fs = { path = "../fs", features = ["test-support"] }
|
fs = { path = "../fs", features = ["test-support"] }
|
||||||
|
|
|
@ -118,6 +118,7 @@ pub fn handle_dropped_item(
|
||||||
{
|
{
|
||||||
Action::Open(*project_entry)
|
Action::Open(*project_entry)
|
||||||
} else {
|
} else {
|
||||||
|
cx.propagate_event();
|
||||||
return;
|
return;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -8,6 +8,7 @@ use gpui::{
|
||||||
};
|
};
|
||||||
use project::Project;
|
use project::Project;
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
|
use settings::Settings;
|
||||||
use theme::Theme;
|
use theme::Theme;
|
||||||
|
|
||||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||||
|
@ -63,10 +64,17 @@ impl PaneGroup {
|
||||||
theme: &Theme,
|
theme: &Theme,
|
||||||
follower_states: &FollowerStatesByLeader,
|
follower_states: &FollowerStatesByLeader,
|
||||||
active_call: Option<&ModelHandle<ActiveCall>>,
|
active_call: Option<&ModelHandle<ActiveCall>>,
|
||||||
|
active_pane: &ViewHandle<Pane>,
|
||||||
cx: &mut RenderContext<Workspace>,
|
cx: &mut RenderContext<Workspace>,
|
||||||
) -> ElementBox {
|
) -> ElementBox {
|
||||||
self.root
|
self.root.render(
|
||||||
.render(project, theme, follower_states, active_call, cx)
|
project,
|
||||||
|
theme,
|
||||||
|
follower_states,
|
||||||
|
active_call,
|
||||||
|
active_pane,
|
||||||
|
cx,
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn panes(&self) -> Vec<&ViewHandle<Pane>> {
|
pub(crate) fn panes(&self) -> Vec<&ViewHandle<Pane>> {
|
||||||
|
@ -104,12 +112,20 @@ impl Member {
|
||||||
Member::Axis(PaneAxis { axis, members })
|
Member::Axis(PaneAxis { axis, members })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn contains(&self, needle: &ViewHandle<Pane>) -> bool {
|
||||||
|
match self {
|
||||||
|
Member::Axis(axis) => axis.members.iter().any(|member| member.contains(needle)),
|
||||||
|
Member::Pane(pane) => pane == needle,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn render(
|
pub fn render(
|
||||||
&self,
|
&self,
|
||||||
project: &ModelHandle<Project>,
|
project: &ModelHandle<Project>,
|
||||||
theme: &Theme,
|
theme: &Theme,
|
||||||
follower_states: &FollowerStatesByLeader,
|
follower_states: &FollowerStatesByLeader,
|
||||||
active_call: Option<&ModelHandle<ActiveCall>>,
|
active_call: Option<&ModelHandle<ActiveCall>>,
|
||||||
|
active_pane: &ViewHandle<Pane>,
|
||||||
cx: &mut RenderContext<Workspace>,
|
cx: &mut RenderContext<Workspace>,
|
||||||
) -> ElementBox {
|
) -> ElementBox {
|
||||||
enum FollowIntoExternalProject {}
|
enum FollowIntoExternalProject {}
|
||||||
|
@ -236,7 +252,14 @@ impl Member {
|
||||||
.with_children(prompt)
|
.with_children(prompt)
|
||||||
.boxed()
|
.boxed()
|
||||||
}
|
}
|
||||||
Member::Axis(axis) => axis.render(project, theme, follower_states, active_call, cx),
|
Member::Axis(axis) => axis.render(
|
||||||
|
project,
|
||||||
|
theme,
|
||||||
|
follower_states,
|
||||||
|
active_call,
|
||||||
|
active_pane,
|
||||||
|
cx,
|
||||||
|
),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -337,12 +360,19 @@ impl PaneAxis {
|
||||||
theme: &Theme,
|
theme: &Theme,
|
||||||
follower_state: &FollowerStatesByLeader,
|
follower_state: &FollowerStatesByLeader,
|
||||||
active_call: Option<&ModelHandle<ActiveCall>>,
|
active_call: Option<&ModelHandle<ActiveCall>>,
|
||||||
|
active_pane: &ViewHandle<Pane>,
|
||||||
cx: &mut RenderContext<Workspace>,
|
cx: &mut RenderContext<Workspace>,
|
||||||
) -> ElementBox {
|
) -> ElementBox {
|
||||||
let last_member_ix = self.members.len() - 1;
|
let last_member_ix = self.members.len() - 1;
|
||||||
Flex::new(self.axis)
|
Flex::new(self.axis)
|
||||||
.with_children(self.members.iter().enumerate().map(|(ix, member)| {
|
.with_children(self.members.iter().enumerate().map(|(ix, member)| {
|
||||||
let mut member = member.render(project, theme, follower_state, active_call, cx);
|
let mut flex = 1.0;
|
||||||
|
if member.contains(active_pane) {
|
||||||
|
flex = cx.global::<Settings>().active_pane_magnification;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut member =
|
||||||
|
member.render(project, theme, follower_state, active_call, active_pane, cx);
|
||||||
if ix < last_member_ix {
|
if ix < last_member_ix {
|
||||||
let mut border = theme.workspace.pane_divider;
|
let mut border = theme.workspace.pane_divider;
|
||||||
border.left = false;
|
border.left = false;
|
||||||
|
@ -356,7 +386,7 @@ impl PaneAxis {
|
||||||
member = Container::new(member).with_border(border).boxed();
|
member = Container::new(member).with_border(border).boxed();
|
||||||
}
|
}
|
||||||
|
|
||||||
FlexItem::new(member).flex(1.0, true).boxed()
|
FlexItem::new(member).flex(flex, true).boxed()
|
||||||
}))
|
}))
|
||||||
.boxed()
|
.boxed()
|
||||||
}
|
}
|
||||||
|
|
|
@ -2784,6 +2784,7 @@ impl View for Workspace {
|
||||||
&theme,
|
&theme,
|
||||||
&self.follower_states_by_leader,
|
&self.follower_states_by_leader,
|
||||||
self.active_call(),
|
self.active_call(),
|
||||||
|
self.active_pane(),
|
||||||
cx,
|
cx,
|
||||||
))
|
))
|
||||||
.flex(1., true)
|
.flex(1., true)
|
||||||
|
|
|
@ -3,7 +3,7 @@ authors = ["Nathan Sobo <nathansobo@gmail.com>"]
|
||||||
description = "The fast, collaborative code editor."
|
description = "The fast, collaborative code editor."
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
name = "zed"
|
name = "zed"
|
||||||
version = "0.65.0"
|
version = "0.67.0"
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
name = "zed"
|
name = "zed"
|
||||||
|
@ -95,6 +95,7 @@ tree-sitter-c = "0.20.1"
|
||||||
tree-sitter-cpp = "0.20.0"
|
tree-sitter-cpp = "0.20.0"
|
||||||
tree-sitter-css = { git = "https://github.com/tree-sitter/tree-sitter-css", rev = "769203d0f9abe1a9a691ac2b9fe4bb4397a73c51" }
|
tree-sitter-css = { git = "https://github.com/tree-sitter/tree-sitter-css", rev = "769203d0f9abe1a9a691ac2b9fe4bb4397a73c51" }
|
||||||
tree-sitter-elixir = { git = "https://github.com/elixir-lang/tree-sitter-elixir", rev = "05e3631c6a0701c1fa518b0fee7be95a2ceef5e2" }
|
tree-sitter-elixir = { git = "https://github.com/elixir-lang/tree-sitter-elixir", rev = "05e3631c6a0701c1fa518b0fee7be95a2ceef5e2" }
|
||||||
|
tree-sitter-embedded-template = "0.20.0"
|
||||||
tree-sitter-go = { git = "https://github.com/tree-sitter/tree-sitter-go", rev = "aeb2f33b366fd78d5789ff104956ce23508b85db" }
|
tree-sitter-go = { git = "https://github.com/tree-sitter/tree-sitter-go", rev = "aeb2f33b366fd78d5789ff104956ce23508b85db" }
|
||||||
tree-sitter-json = { git = "https://github.com/tree-sitter/tree-sitter-json", rev = "137e1ce6a02698fc246cdb9c6b886ed1de9a1ed8" }
|
tree-sitter-json = { git = "https://github.com/tree-sitter/tree-sitter-json", rev = "137e1ce6a02698fc246cdb9c6b886ed1de9a1ed8" }
|
||||||
tree-sitter-rust = "0.20.3"
|
tree-sitter-rust = "0.20.3"
|
||||||
|
|
|
@ -12,6 +12,7 @@ mod installation;
|
||||||
mod json;
|
mod json;
|
||||||
mod language_plugin;
|
mod language_plugin;
|
||||||
mod python;
|
mod python;
|
||||||
|
mod ruby;
|
||||||
mod rust;
|
mod rust;
|
||||||
mod typescript;
|
mod typescript;
|
||||||
|
|
||||||
|
@ -116,7 +117,16 @@ pub async fn init(languages: Arc<LanguageRegistry>, _executor: Arc<Background>)
|
||||||
tree_sitter_html::language(),
|
tree_sitter_html::language(),
|
||||||
Some(CachedLspAdapter::new(html::HtmlLspAdapter).await),
|
Some(CachedLspAdapter::new(html::HtmlLspAdapter).await),
|
||||||
),
|
),
|
||||||
("ruby", tree_sitter_ruby::language(), None),
|
(
|
||||||
|
"ruby",
|
||||||
|
tree_sitter_ruby::language(),
|
||||||
|
Some(CachedLspAdapter::new(ruby::RubyLanguageServer).await),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"erb",
|
||||||
|
tree_sitter_embedded_template::language(),
|
||||||
|
Some(CachedLspAdapter::new(ruby::RubyLanguageServer).await),
|
||||||
|
),
|
||||||
] {
|
] {
|
||||||
languages.add(language(name, grammar, lsp_adapter));
|
languages.add(language(name, grammar, lsp_adapter));
|
||||||
}
|
}
|
||||||
|
|
8
crates/zed/src/languages/erb/config.toml
Normal file
8
crates/zed/src/languages/erb/config.toml
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
name = "ERB"
|
||||||
|
path_suffixes = ["erb"]
|
||||||
|
autoclose_before = ">})"
|
||||||
|
brackets = [
|
||||||
|
{ start = "<", end = ">", close = true, newline = true },
|
||||||
|
]
|
||||||
|
|
||||||
|
block_comment = ["<%#", "%>"]
|
12
crates/zed/src/languages/erb/highlights.scm
Normal file
12
crates/zed/src/languages/erb/highlights.scm
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
(comment_directive) @comment
|
||||||
|
|
||||||
|
[
|
||||||
|
"<%#"
|
||||||
|
"<%"
|
||||||
|
"<%="
|
||||||
|
"<%_"
|
||||||
|
"<%-"
|
||||||
|
"%>"
|
||||||
|
"-%>"
|
||||||
|
"_%>"
|
||||||
|
] @keyword
|
7
crates/zed/src/languages/erb/injections.scm
Normal file
7
crates/zed/src/languages/erb/injections.scm
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
((code) @content
|
||||||
|
(#set! "language" "ruby")
|
||||||
|
(#set! "combined"))
|
||||||
|
|
||||||
|
((content) @content
|
||||||
|
(#set! "language" "html")
|
||||||
|
(#set! "combined"))
|
|
@ -87,6 +87,25 @@ impl LspAdapter for PythonLspAdapter {
|
||||||
.log_err()
|
.log_err()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async fn process_completion(&self, item: &mut lsp::CompletionItem) {
|
||||||
|
// Pyright assigns each completion item a `sortText` of the form `XX.YYYY.name`.
|
||||||
|
// Where `XX` is the sorting category, `YYYY` is based on most recent usage,
|
||||||
|
// and `name` is the symbol name itself.
|
||||||
|
//
|
||||||
|
// Because the the symbol name is included, there generally are not ties when
|
||||||
|
// sorting by the `sortText`, so the symbol's fuzzy match score is not taken
|
||||||
|
// into account. Here, we remove the symbol name from the sortText in order
|
||||||
|
// to allow our own fuzzy score to be used to break ties.
|
||||||
|
//
|
||||||
|
// see https://github.com/microsoft/pyright/blob/95ef4e103b9b2f129c9320427e51b73ea7cf78bd/packages/pyright-internal/src/languageService/completionProvider.ts#LL2873
|
||||||
|
let Some(sort_text) = &mut item.sort_text else { return };
|
||||||
|
let mut parts = sort_text.split('.');
|
||||||
|
let Some(first) = parts.next() else { return };
|
||||||
|
let Some(second) = parts.next() else { return };
|
||||||
|
let Some(_) = parts.next() else { return };
|
||||||
|
sort_text.replace_range(first.len() + second.len() + 1.., "");
|
||||||
|
}
|
||||||
|
|
||||||
async fn label_for_completion(
|
async fn label_for_completion(
|
||||||
&self,
|
&self,
|
||||||
item: &lsp::CompletionItem,
|
item: &lsp::CompletionItem,
|
||||||
|
|
145
crates/zed/src/languages/ruby.rs
Normal file
145
crates/zed/src/languages/ruby.rs
Normal file
|
@ -0,0 +1,145 @@
|
||||||
|
use anyhow::{anyhow, Result};
|
||||||
|
use async_trait::async_trait;
|
||||||
|
use client::http::HttpClient;
|
||||||
|
use language::{LanguageServerName, LspAdapter};
|
||||||
|
use std::{any::Any, path::PathBuf, sync::Arc};
|
||||||
|
|
||||||
|
pub struct RubyLanguageServer;
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl LspAdapter for RubyLanguageServer {
|
||||||
|
async fn name(&self) -> LanguageServerName {
|
||||||
|
LanguageServerName("solargraph".into())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn server_args(&self) -> Vec<String> {
|
||||||
|
vec!["stdio".into()]
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn fetch_latest_server_version(
|
||||||
|
&self,
|
||||||
|
_: Arc<dyn HttpClient>,
|
||||||
|
) -> Result<Box<dyn 'static + Any + Send>> {
|
||||||
|
Ok(Box::new(()))
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn fetch_server_binary(
|
||||||
|
&self,
|
||||||
|
_version: Box<dyn 'static + Send + Any>,
|
||||||
|
_: Arc<dyn HttpClient>,
|
||||||
|
_container_dir: PathBuf,
|
||||||
|
) -> Result<PathBuf> {
|
||||||
|
Err(anyhow!("solargraph must be installed manually"))
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn cached_server_binary(&self, _container_dir: PathBuf) -> Option<PathBuf> {
|
||||||
|
Some("solargraph".into())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn label_for_completion(
|
||||||
|
&self,
|
||||||
|
item: &lsp::CompletionItem,
|
||||||
|
language: &Arc<language::Language>,
|
||||||
|
) -> Option<language::CodeLabel> {
|
||||||
|
let label = &item.label;
|
||||||
|
let grammar = language.grammar()?;
|
||||||
|
let highlight_id = match item.kind? {
|
||||||
|
lsp::CompletionItemKind::METHOD => grammar.highlight_id_for_name("function.method")?,
|
||||||
|
lsp::CompletionItemKind::CONSTANT => grammar.highlight_id_for_name("constant")?,
|
||||||
|
lsp::CompletionItemKind::CLASS | lsp::CompletionItemKind::MODULE => {
|
||||||
|
grammar.highlight_id_for_name("type")?
|
||||||
|
}
|
||||||
|
lsp::CompletionItemKind::KEYWORD => {
|
||||||
|
if label.starts_with(":") {
|
||||||
|
grammar.highlight_id_for_name("string.special.symbol")?
|
||||||
|
} else {
|
||||||
|
grammar.highlight_id_for_name("keyword")?
|
||||||
|
}
|
||||||
|
}
|
||||||
|
lsp::CompletionItemKind::VARIABLE => {
|
||||||
|
if label.starts_with("@") {
|
||||||
|
grammar.highlight_id_for_name("property")?
|
||||||
|
} else {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => return None,
|
||||||
|
};
|
||||||
|
Some(language::CodeLabel {
|
||||||
|
text: label.clone(),
|
||||||
|
runs: vec![(0..label.len(), highlight_id)],
|
||||||
|
filter_range: 0..label.len(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn label_for_symbol(
|
||||||
|
&self,
|
||||||
|
label: &str,
|
||||||
|
kind: lsp::SymbolKind,
|
||||||
|
language: &Arc<language::Language>,
|
||||||
|
) -> Option<language::CodeLabel> {
|
||||||
|
let grammar = language.grammar()?;
|
||||||
|
match kind {
|
||||||
|
lsp::SymbolKind::METHOD => {
|
||||||
|
let mut parts = label.split('#');
|
||||||
|
let classes = parts.next()?;
|
||||||
|
let method = parts.next()?;
|
||||||
|
if parts.next().is_some() {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
let class_id = grammar.highlight_id_for_name("type")?;
|
||||||
|
let method_id = grammar.highlight_id_for_name("function.method")?;
|
||||||
|
|
||||||
|
let mut ix = 0;
|
||||||
|
let mut runs = Vec::new();
|
||||||
|
for (i, class) in classes.split("::").enumerate() {
|
||||||
|
if i > 0 {
|
||||||
|
ix += 2;
|
||||||
|
}
|
||||||
|
let end_ix = ix + class.len();
|
||||||
|
runs.push((ix..end_ix, class_id));
|
||||||
|
ix = end_ix;
|
||||||
|
}
|
||||||
|
|
||||||
|
ix += 1;
|
||||||
|
let end_ix = ix + method.len();
|
||||||
|
runs.push((ix..end_ix, method_id));
|
||||||
|
Some(language::CodeLabel {
|
||||||
|
text: label.to_string(),
|
||||||
|
runs,
|
||||||
|
filter_range: 0..label.len(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
lsp::SymbolKind::CONSTANT => {
|
||||||
|
let constant_id = grammar.highlight_id_for_name("constant")?;
|
||||||
|
Some(language::CodeLabel {
|
||||||
|
text: label.to_string(),
|
||||||
|
runs: vec![(0..label.len(), constant_id)],
|
||||||
|
filter_range: 0..label.len(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
lsp::SymbolKind::CLASS | lsp::SymbolKind::MODULE => {
|
||||||
|
let class_id = grammar.highlight_id_for_name("type")?;
|
||||||
|
|
||||||
|
let mut ix = 0;
|
||||||
|
let mut runs = Vec::new();
|
||||||
|
for (i, class) in label.split("::").enumerate() {
|
||||||
|
if i > 0 {
|
||||||
|
ix += "::".len();
|
||||||
|
}
|
||||||
|
let end_ix = ix + class.len();
|
||||||
|
runs.push((ix..end_ix, class_id));
|
||||||
|
ix = end_ix;
|
||||||
|
}
|
||||||
|
|
||||||
|
Some(language::CodeLabel {
|
||||||
|
text: label.to_string(),
|
||||||
|
runs,
|
||||||
|
filter_range: 0..label.len(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
_ => return None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -211,21 +211,6 @@ fn init_paths() {
|
||||||
std::fs::create_dir_all(&*zed::paths::LANGUAGES_DIR).expect("could not create languages path");
|
std::fs::create_dir_all(&*zed::paths::LANGUAGES_DIR).expect("could not create languages path");
|
||||||
std::fs::create_dir_all(&*zed::paths::DB_DIR).expect("could not create database path");
|
std::fs::create_dir_all(&*zed::paths::DB_DIR).expect("could not create database path");
|
||||||
std::fs::create_dir_all(&*zed::paths::LOGS_DIR).expect("could not create logs path");
|
std::fs::create_dir_all(&*zed::paths::LOGS_DIR).expect("could not create logs path");
|
||||||
|
|
||||||
// Copy setting files from legacy locations. TODO: remove this after a few releases.
|
|
||||||
thread::spawn(|| {
|
|
||||||
if std::fs::metadata(&*zed::paths::legacy::SETTINGS).is_ok()
|
|
||||||
&& std::fs::metadata(&*zed::paths::SETTINGS).is_err()
|
|
||||||
{
|
|
||||||
std::fs::copy(&*zed::paths::legacy::SETTINGS, &*zed::paths::SETTINGS).log_err();
|
|
||||||
}
|
|
||||||
|
|
||||||
if std::fs::metadata(&*zed::paths::legacy::KEYMAP).is_ok()
|
|
||||||
&& std::fs::metadata(&*zed::paths::KEYMAP).is_err()
|
|
||||||
{
|
|
||||||
std::fs::copy(&*zed::paths::legacy::KEYMAP, &*zed::paths::KEYMAP).log_err();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn init_logger() {
|
fn init_logger() {
|
||||||
|
|
|
@ -13,6 +13,7 @@ if [[ -n $(git status --short --untracked-files=no) ]]; then
|
||||||
fi
|
fi
|
||||||
|
|
||||||
which cargo-set-version > /dev/null || cargo install cargo-edit
|
which cargo-set-version > /dev/null || cargo install cargo-edit
|
||||||
|
which jq > /dev/null || brew install jq
|
||||||
cargo set-version --package $package --bump $version_increment
|
cargo set-version --package $package --bump $version_increment
|
||||||
cargo check --quiet
|
cargo check --quiet
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue