Merge branch 'main' into typescript
This commit is contained in:
commit
4805cfe48c
7 changed files with 227 additions and 62 deletions
|
@ -141,6 +141,7 @@ action!(ToggleCodeActions, bool);
|
||||||
action!(ConfirmCompletion, Option<usize>);
|
action!(ConfirmCompletion, Option<usize>);
|
||||||
action!(ConfirmCodeAction, Option<usize>);
|
action!(ConfirmCodeAction, Option<usize>);
|
||||||
action!(OpenExcerpts);
|
action!(OpenExcerpts);
|
||||||
|
action!(RestartLanguageServer);
|
||||||
|
|
||||||
enum DocumentHighlightRead {}
|
enum DocumentHighlightRead {}
|
||||||
enum DocumentHighlightWrite {}
|
enum DocumentHighlightWrite {}
|
||||||
|
@ -302,6 +303,7 @@ pub fn init(cx: &mut MutableAppContext) {
|
||||||
Binding::new("ctrl-space", ShowCompletions, Some("Editor")),
|
Binding::new("ctrl-space", ShowCompletions, Some("Editor")),
|
||||||
Binding::new("cmd-.", ToggleCodeActions(false), Some("Editor")),
|
Binding::new("cmd-.", ToggleCodeActions(false), Some("Editor")),
|
||||||
Binding::new("alt-enter", OpenExcerpts, Some("Editor")),
|
Binding::new("alt-enter", OpenExcerpts, Some("Editor")),
|
||||||
|
Binding::new("cmd-f10", RestartLanguageServer, Some("Editor")),
|
||||||
]);
|
]);
|
||||||
|
|
||||||
cx.add_action(Editor::open_new);
|
cx.add_action(Editor::open_new);
|
||||||
|
@ -377,6 +379,7 @@ pub fn init(cx: &mut MutableAppContext) {
|
||||||
cx.add_action(Editor::show_completions);
|
cx.add_action(Editor::show_completions);
|
||||||
cx.add_action(Editor::toggle_code_actions);
|
cx.add_action(Editor::toggle_code_actions);
|
||||||
cx.add_action(Editor::open_excerpts);
|
cx.add_action(Editor::open_excerpts);
|
||||||
|
cx.add_action(Editor::restart_language_server);
|
||||||
cx.add_async_action(Editor::confirm_completion);
|
cx.add_async_action(Editor::confirm_completion);
|
||||||
cx.add_async_action(Editor::confirm_code_action);
|
cx.add_async_action(Editor::confirm_code_action);
|
||||||
cx.add_async_action(Editor::rename);
|
cx.add_async_action(Editor::rename);
|
||||||
|
@ -4867,6 +4870,16 @@ impl Editor {
|
||||||
self.pending_rename.as_ref()
|
self.pending_rename.as_ref()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn restart_language_server(&mut self, _: &RestartLanguageServer, cx: &mut ViewContext<Self>) {
|
||||||
|
if let Some(project) = self.project.clone() {
|
||||||
|
self.buffer.update(cx, |multi_buffer, cx| {
|
||||||
|
project.update(cx, |project, cx| {
|
||||||
|
project.restart_language_servers_for_buffers(multi_buffer.all_buffers(), cx);
|
||||||
|
});
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn refresh_active_diagnostics(&mut self, cx: &mut ViewContext<Editor>) {
|
fn refresh_active_diagnostics(&mut self, cx: &mut ViewContext<Editor>) {
|
||||||
if let Some(active_diagnostics) = self.active_diagnostics.as_mut() {
|
if let Some(active_diagnostics) = self.active_diagnostics.as_mut() {
|
||||||
let buffer = self.buffer.read(cx).snapshot(cx);
|
let buffer = self.buffer.read(cx).snapshot(cx);
|
||||||
|
|
|
@ -259,6 +259,7 @@ impl LanguageRegistry {
|
||||||
|
|
||||||
pub fn start_language_server(
|
pub fn start_language_server(
|
||||||
self: &Arc<Self>,
|
self: &Arc<Self>,
|
||||||
|
server_id: usize,
|
||||||
language: Arc<Language>,
|
language: Arc<Language>,
|
||||||
root_path: Arc<Path>,
|
root_path: Arc<Path>,
|
||||||
http_client: Arc<dyn HttpClient>,
|
http_client: Arc<dyn HttpClient>,
|
||||||
|
@ -328,6 +329,7 @@ impl LanguageRegistry {
|
||||||
let server_binary_path = server_binary_path.await?;
|
let server_binary_path = server_binary_path.await?;
|
||||||
let server_args = adapter.server_args();
|
let server_args = adapter.server_args();
|
||||||
let server = lsp::LanguageServer::new(
|
let server = lsp::LanguageServer::new(
|
||||||
|
server_id,
|
||||||
&server_binary_path,
|
&server_binary_path,
|
||||||
server_args,
|
server_args,
|
||||||
&root_path,
|
&root_path,
|
||||||
|
|
|
@ -34,6 +34,7 @@ type NotificationHandler =
|
||||||
type ResponseHandler = Box<dyn Send + FnOnce(Result<&str, Error>)>;
|
type ResponseHandler = Box<dyn Send + FnOnce(Result<&str, Error>)>;
|
||||||
|
|
||||||
pub struct LanguageServer {
|
pub struct LanguageServer {
|
||||||
|
server_id: usize,
|
||||||
next_id: AtomicUsize,
|
next_id: AtomicUsize,
|
||||||
outbound_tx: channel::Sender<Vec<u8>>,
|
outbound_tx: channel::Sender<Vec<u8>>,
|
||||||
name: String,
|
name: String,
|
||||||
|
@ -113,6 +114,7 @@ struct Error {
|
||||||
|
|
||||||
impl LanguageServer {
|
impl LanguageServer {
|
||||||
pub fn new(
|
pub fn new(
|
||||||
|
server_id: usize,
|
||||||
binary_path: &Path,
|
binary_path: &Path,
|
||||||
args: &[&str],
|
args: &[&str],
|
||||||
root_path: &Path,
|
root_path: &Path,
|
||||||
|
@ -133,7 +135,8 @@ impl LanguageServer {
|
||||||
.spawn()?;
|
.spawn()?;
|
||||||
let stdin = server.stdin.take().unwrap();
|
let stdin = server.stdin.take().unwrap();
|
||||||
let stdout = server.stdout.take().unwrap();
|
let stdout = server.stdout.take().unwrap();
|
||||||
let mut server = Self::new_internal(stdin, stdout, root_path, options, background);
|
let mut server =
|
||||||
|
Self::new_internal(server_id, stdin, stdout, root_path, options, background);
|
||||||
if let Some(name) = binary_path.file_name() {
|
if let Some(name) = binary_path.file_name() {
|
||||||
server.name = name.to_string_lossy().to_string();
|
server.name = name.to_string_lossy().to_string();
|
||||||
}
|
}
|
||||||
|
@ -141,6 +144,7 @@ impl LanguageServer {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn new_internal<Stdin, Stdout>(
|
fn new_internal<Stdin, Stdout>(
|
||||||
|
server_id: usize,
|
||||||
stdin: Stdin,
|
stdin: Stdin,
|
||||||
stdout: Stdout,
|
stdout: Stdout,
|
||||||
root_path: &Path,
|
root_path: &Path,
|
||||||
|
@ -240,6 +244,7 @@ impl LanguageServer {
|
||||||
});
|
});
|
||||||
|
|
||||||
Self {
|
Self {
|
||||||
|
server_id,
|
||||||
notification_handlers,
|
notification_handlers,
|
||||||
response_handlers,
|
response_handlers,
|
||||||
name: Default::default(),
|
name: Default::default(),
|
||||||
|
@ -446,6 +451,10 @@ impl LanguageServer {
|
||||||
&self.capabilities
|
&self.capabilities
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn server_id(&self) -> usize {
|
||||||
|
self.server_id
|
||||||
|
}
|
||||||
|
|
||||||
pub fn request<T: request::Request>(
|
pub fn request<T: request::Request>(
|
||||||
self: &Arc<Self>,
|
self: &Arc<Self>,
|
||||||
params: T::Params,
|
params: T::Params,
|
||||||
|
@ -606,8 +615,14 @@ impl LanguageServer {
|
||||||
});
|
});
|
||||||
|
|
||||||
let executor = cx.background().clone();
|
let executor = cx.background().clone();
|
||||||
let server =
|
let server = Self::new_internal(
|
||||||
Self::new_internal(stdin_writer, stdout_reader, Path::new("/"), None, executor);
|
0,
|
||||||
|
stdin_writer,
|
||||||
|
stdout_reader,
|
||||||
|
Path::new("/"),
|
||||||
|
None,
|
||||||
|
executor,
|
||||||
|
);
|
||||||
(server, fake)
|
(server, fake)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -666,17 +681,13 @@ impl FakeLanguageServer {
|
||||||
let output_task = cx.background().spawn(async move {
|
let output_task = cx.background().spawn(async move {
|
||||||
let mut stdout = smol::io::BufWriter::new(stdout);
|
let mut stdout = smol::io::BufWriter::new(stdout);
|
||||||
while let Some(message) = outgoing_rx.next().await {
|
while let Some(message) = outgoing_rx.next().await {
|
||||||
stdout
|
stdout.write_all(CONTENT_LEN_HEADER.as_bytes()).await?;
|
||||||
.write_all(CONTENT_LEN_HEADER.as_bytes())
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
stdout
|
stdout
|
||||||
.write_all((format!("{}", message.len())).as_bytes())
|
.write_all((format!("{}", message.len())).as_bytes())
|
||||||
.await
|
.await?;
|
||||||
.unwrap();
|
stdout.write_all("\r\n\r\n".as_bytes()).await?;
|
||||||
stdout.write_all("\r\n\r\n".as_bytes()).await.unwrap();
|
stdout.write_all(&message).await?;
|
||||||
stdout.write_all(&message).await.unwrap();
|
stdout.flush().await?;
|
||||||
stdout.flush().await.unwrap();
|
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
});
|
});
|
||||||
|
|
|
@ -1317,6 +1317,7 @@ impl Project {
|
||||||
.or_insert_with(|| {
|
.or_insert_with(|| {
|
||||||
let server_id = post_inc(&mut self.next_language_server_id);
|
let server_id = post_inc(&mut self.next_language_server_id);
|
||||||
let language_server = self.languages.start_language_server(
|
let language_server = self.languages.start_language_server(
|
||||||
|
server_id,
|
||||||
language.clone(),
|
language.clone(),
|
||||||
worktree_path,
|
worktree_path,
|
||||||
self.client.http_client(),
|
self.client.http_client(),
|
||||||
|
@ -1519,6 +1520,64 @@ impl Project {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn restart_language_servers_for_buffers(
|
||||||
|
&mut self,
|
||||||
|
buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
|
||||||
|
cx: &mut ModelContext<Self>,
|
||||||
|
) -> Option<()> {
|
||||||
|
let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
|
||||||
|
.into_iter()
|
||||||
|
.filter_map(|buffer| {
|
||||||
|
let file = File::from_dyn(buffer.read(cx).file())?;
|
||||||
|
let worktree = file.worktree.read(cx).as_local()?;
|
||||||
|
let worktree_id = worktree.id();
|
||||||
|
let worktree_abs_path = worktree.abs_path().clone();
|
||||||
|
let full_path = file.full_path(cx);
|
||||||
|
Some((worktree_id, worktree_abs_path, full_path))
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
|
||||||
|
let language = self.languages.select_language(&full_path)?;
|
||||||
|
self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
|
||||||
|
}
|
||||||
|
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
fn restart_language_server(
|
||||||
|
&mut self,
|
||||||
|
worktree_id: WorktreeId,
|
||||||
|
worktree_path: Arc<Path>,
|
||||||
|
language: Arc<Language>,
|
||||||
|
cx: &mut ModelContext<Self>,
|
||||||
|
) {
|
||||||
|
let adapter = if let Some(adapter) = language.lsp_adapter() {
|
||||||
|
adapter
|
||||||
|
} else {
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
let key = (worktree_id, adapter.name());
|
||||||
|
let server_to_shutdown = self.language_servers.remove(&key);
|
||||||
|
self.started_language_servers.remove(&key);
|
||||||
|
server_to_shutdown
|
||||||
|
.as_ref()
|
||||||
|
.map(|(_, server)| self.language_server_statuses.remove(&server.server_id()));
|
||||||
|
cx.spawn_weak(|this, mut cx| async move {
|
||||||
|
if let Some(this) = this.upgrade(&cx) {
|
||||||
|
if let Some((_, server_to_shutdown)) = server_to_shutdown {
|
||||||
|
if let Some(shutdown_task) = server_to_shutdown.shutdown() {
|
||||||
|
shutdown_task.await;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.update(&mut cx, |this, cx| {
|
||||||
|
this.start_language_server(worktree_id, worktree_path, language, cx);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.detach();
|
||||||
|
}
|
||||||
|
|
||||||
fn on_lsp_event(
|
fn on_lsp_event(
|
||||||
&mut self,
|
&mut self,
|
||||||
language_server_id: usize,
|
language_server_id: usize,
|
||||||
|
@ -4604,7 +4663,7 @@ impl Item for Buffer {
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::{Event, *};
|
use super::{Event, *};
|
||||||
use fs::RealFs;
|
use fs::RealFs;
|
||||||
use futures::StreamExt;
|
use futures::{future, StreamExt};
|
||||||
use gpui::test::subscribe;
|
use gpui::test::subscribe;
|
||||||
use language::{
|
use language::{
|
||||||
tree_sitter_rust, Diagnostic, FakeLspAdapter, LanguageConfig, OffsetRangeExt, Point,
|
tree_sitter_rust, Diagnostic, FakeLspAdapter, LanguageConfig, OffsetRangeExt, Point,
|
||||||
|
@ -4614,7 +4673,7 @@ mod tests {
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
|
use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
|
||||||
use unindent::Unindent as _;
|
use unindent::Unindent as _;
|
||||||
use util::test::temp_tree;
|
use util::{assert_set_eq, test::temp_tree};
|
||||||
use worktree::WorktreeHandle as _;
|
use worktree::WorktreeHandle as _;
|
||||||
|
|
||||||
#[gpui::test]
|
#[gpui::test]
|
||||||
|
@ -4813,8 +4872,7 @@ mod tests {
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
// Another language server is started up, and it is notified about
|
// A json language server is started up and is only notified about the json buffer.
|
||||||
// all three open buffers.
|
|
||||||
let mut fake_json_server = fake_json_servers.next().await.unwrap();
|
let mut fake_json_server = fake_json_servers.next().await.unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
fake_json_server
|
fake_json_server
|
||||||
|
@ -4888,6 +4946,65 @@ mod tests {
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// Restart language servers
|
||||||
|
project.update(cx, |project, cx| {
|
||||||
|
project.restart_language_servers_for_buffers(
|
||||||
|
vec![rust_buffer.clone(), json_buffer.clone()],
|
||||||
|
cx,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
let mut rust_shutdown_requests = fake_rust_server
|
||||||
|
.handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(()));
|
||||||
|
let mut json_shutdown_requests = fake_json_server
|
||||||
|
.handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(()));
|
||||||
|
futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
|
||||||
|
|
||||||
|
let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
|
||||||
|
let mut fake_json_server = fake_json_servers.next().await.unwrap();
|
||||||
|
|
||||||
|
// Ensure both rust documents are reopened in new rust language server without worrying about order
|
||||||
|
assert_set_eq!(
|
||||||
|
[
|
||||||
|
fake_rust_server
|
||||||
|
.receive_notification::<lsp::notification::DidOpenTextDocument>()
|
||||||
|
.await
|
||||||
|
.text_document,
|
||||||
|
fake_rust_server
|
||||||
|
.receive_notification::<lsp::notification::DidOpenTextDocument>()
|
||||||
|
.await
|
||||||
|
.text_document,
|
||||||
|
],
|
||||||
|
[
|
||||||
|
lsp::TextDocumentItem {
|
||||||
|
uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
|
||||||
|
version: 1,
|
||||||
|
text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
|
||||||
|
language_id: Default::default()
|
||||||
|
},
|
||||||
|
lsp::TextDocumentItem {
|
||||||
|
uri: lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
|
||||||
|
version: 1,
|
||||||
|
text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
|
||||||
|
language_id: Default::default()
|
||||||
|
},
|
||||||
|
]
|
||||||
|
);
|
||||||
|
|
||||||
|
// Ensure json document is reopened in new json language server
|
||||||
|
assert_eq!(
|
||||||
|
fake_json_server
|
||||||
|
.receive_notification::<lsp::notification::DidOpenTextDocument>()
|
||||||
|
.await
|
||||||
|
.text_document,
|
||||||
|
lsp::TextDocumentItem {
|
||||||
|
uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
|
||||||
|
version: 0,
|
||||||
|
text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
|
||||||
|
language_id: Default::default()
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
// Close notifications are reported only to servers matching the buffer's language.
|
// Close notifications are reported only to servers matching the buffer's language.
|
||||||
cx.update(|_| drop(json_buffer));
|
cx.update(|_| drop(json_buffer));
|
||||||
let close_message = lsp::DidCloseTextDocumentParams {
|
let close_message = lsp::DidCloseTextDocumentParams {
|
||||||
|
|
|
@ -1,10 +1,12 @@
|
||||||
use std::{
|
mod assertions;
|
||||||
collections::HashMap,
|
mod marked_text;
|
||||||
ops::Range,
|
|
||||||
path::{Path, PathBuf},
|
use std::path::{Path, PathBuf};
|
||||||
};
|
|
||||||
use tempdir::TempDir;
|
use tempdir::TempDir;
|
||||||
|
|
||||||
|
pub use assertions::*;
|
||||||
|
pub use marked_text::*;
|
||||||
|
|
||||||
pub fn temp_tree(tree: serde_json::Value) -> TempDir {
|
pub fn temp_tree(tree: serde_json::Value) -> TempDir {
|
||||||
let dir = TempDir::new("").unwrap();
|
let dir = TempDir::new("").unwrap();
|
||||||
write_tree(dir.path(), tree);
|
write_tree(dir.path(), tree);
|
||||||
|
@ -52,44 +54,3 @@ pub fn sample_text(rows: usize, cols: usize, start_char: char) -> String {
|
||||||
}
|
}
|
||||||
text
|
text
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn marked_text_by(
|
|
||||||
marked_text: &str,
|
|
||||||
markers: Vec<char>,
|
|
||||||
) -> (String, HashMap<char, Vec<usize>>) {
|
|
||||||
let mut extracted_markers: HashMap<char, Vec<usize>> = Default::default();
|
|
||||||
let mut unmarked_text = String::new();
|
|
||||||
|
|
||||||
for char in marked_text.chars() {
|
|
||||||
if markers.contains(&char) {
|
|
||||||
let char_offsets = extracted_markers.entry(char).or_insert(Vec::new());
|
|
||||||
char_offsets.push(unmarked_text.len());
|
|
||||||
} else {
|
|
||||||
unmarked_text.push(char);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
(unmarked_text, extracted_markers)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn marked_text(marked_text: &str) -> (String, Vec<usize>) {
|
|
||||||
let (unmarked_text, mut markers) = marked_text_by(marked_text, vec!['|']);
|
|
||||||
(unmarked_text, markers.remove(&'|').unwrap_or_else(Vec::new))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn marked_text_ranges(marked_text: &str) -> (String, Vec<Range<usize>>) {
|
|
||||||
let (unmarked_text, mut markers) = marked_text_by(marked_text, vec!['[', ']']);
|
|
||||||
let opens = markers.remove(&'[').unwrap_or_default();
|
|
||||||
let closes = markers.remove(&']').unwrap_or_default();
|
|
||||||
assert_eq!(opens.len(), closes.len(), "marked ranges are unbalanced");
|
|
||||||
|
|
||||||
let ranges = opens
|
|
||||||
.into_iter()
|
|
||||||
.zip(closes)
|
|
||||||
.map(|(open, close)| {
|
|
||||||
assert!(close >= open, "marked ranges must be disjoint");
|
|
||||||
open..close
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
(unmarked_text, ranges)
|
|
||||||
}
|
|
||||||
|
|
19
crates/util/src/test/assertions.rs
Normal file
19
crates/util/src/test/assertions.rs
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! assert_set_eq {
|
||||||
|
($left:expr,$right:expr) => {{
|
||||||
|
let left = $left;
|
||||||
|
let right = $right;
|
||||||
|
|
||||||
|
for left_value in left.iter() {
|
||||||
|
if !right.contains(left_value) {
|
||||||
|
panic!("assertion failed: `(left == right)`\n left: {:?}\nright: {:?}\nright does not contain {:?}", left, right, left_value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for right_value in right.iter() {
|
||||||
|
if !left.contains(right_value) {
|
||||||
|
panic!("assertion failed: `(left == right)`\n left: {:?}\nright: {:?}\nleft does not contain {:?}", left, right, right_value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}};
|
||||||
|
}
|
42
crates/util/src/test/marked_text.rs
Normal file
42
crates/util/src/test/marked_text.rs
Normal file
|
@ -0,0 +1,42 @@
|
||||||
|
use std::{collections::HashMap, ops::Range};
|
||||||
|
|
||||||
|
pub fn marked_text_by(
|
||||||
|
marked_text: &str,
|
||||||
|
markers: Vec<char>,
|
||||||
|
) -> (String, HashMap<char, Vec<usize>>) {
|
||||||
|
let mut extracted_markers: HashMap<char, Vec<usize>> = Default::default();
|
||||||
|
let mut unmarked_text = String::new();
|
||||||
|
|
||||||
|
for char in marked_text.chars() {
|
||||||
|
if markers.contains(&char) {
|
||||||
|
let char_offsets = extracted_markers.entry(char).or_insert(Vec::new());
|
||||||
|
char_offsets.push(unmarked_text.len());
|
||||||
|
} else {
|
||||||
|
unmarked_text.push(char);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
(unmarked_text, extracted_markers)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn marked_text(marked_text: &str) -> (String, Vec<usize>) {
|
||||||
|
let (unmarked_text, mut markers) = marked_text_by(marked_text, vec!['|']);
|
||||||
|
(unmarked_text, markers.remove(&'|').unwrap_or_else(Vec::new))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn marked_text_ranges(marked_text: &str) -> (String, Vec<Range<usize>>) {
|
||||||
|
let (unmarked_text, mut markers) = marked_text_by(marked_text, vec!['[', ']']);
|
||||||
|
let opens = markers.remove(&'[').unwrap_or_default();
|
||||||
|
let closes = markers.remove(&']').unwrap_or_default();
|
||||||
|
assert_eq!(opens.len(), closes.len(), "marked ranges are unbalanced");
|
||||||
|
|
||||||
|
let ranges = opens
|
||||||
|
.into_iter()
|
||||||
|
.zip(closes)
|
||||||
|
.map(|(open, close)| {
|
||||||
|
assert!(close >= open, "marked ranges must be disjoint");
|
||||||
|
open..close
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
(unmarked_text, ranges)
|
||||||
|
}
|
Loading…
Add table
Add a link
Reference in a new issue