Use outline queries to chunk files syntactically (#11283)
This chunking strategy uses the existing `outline` query to chunk files. We try to find chunk boundaries that are: * at starts or ends of lines * nested within as few outline items as possible Release Notes: - N/A
This commit is contained in:
parent
1abd58070b
commit
43ad470e58
5 changed files with 322 additions and 353 deletions
|
@ -55,10 +55,10 @@ use std::{
|
|||
Arc,
|
||||
},
|
||||
};
|
||||
use syntax_map::SyntaxSnapshot;
|
||||
use syntax_map::{QueryCursorHandle, SyntaxSnapshot};
|
||||
pub use task_context::{BasicContextProvider, ContextProvider, ContextProviderWithTasks};
|
||||
use theme::SyntaxTheme;
|
||||
use tree_sitter::{self, wasmtime, Query, WasmStore};
|
||||
use tree_sitter::{self, wasmtime, Query, QueryCursor, WasmStore};
|
||||
use util::http::HttpClient;
|
||||
|
||||
pub use buffer::Operation;
|
||||
|
@ -101,6 +101,15 @@ where
|
|||
})
|
||||
}
|
||||
|
||||
pub fn with_query_cursor<F, R>(func: F) -> R
|
||||
where
|
||||
F: FnOnce(&mut QueryCursor) -> R,
|
||||
{
|
||||
use std::ops::DerefMut;
|
||||
let mut cursor = QueryCursorHandle::new();
|
||||
func(cursor.deref_mut())
|
||||
}
|
||||
|
||||
lazy_static! {
|
||||
static ref NEXT_LANGUAGE_ID: AtomicUsize = Default::default();
|
||||
static ref NEXT_GRAMMAR_ID: AtomicUsize = Default::default();
|
||||
|
|
|
@ -211,7 +211,7 @@ struct TextProvider<'a>(&'a Rope);
|
|||
|
||||
struct ByteChunks<'a>(text::Chunks<'a>);
|
||||
|
||||
struct QueryCursorHandle(Option<QueryCursor>);
|
||||
pub(crate) struct QueryCursorHandle(Option<QueryCursor>);
|
||||
|
||||
impl SyntaxMap {
|
||||
pub fn new() -> Self {
|
||||
|
@ -1739,7 +1739,7 @@ impl<'a> Iterator for ByteChunks<'a> {
|
|||
}
|
||||
|
||||
impl QueryCursorHandle {
|
||||
pub(crate) fn new() -> Self {
|
||||
pub fn new() -> Self {
|
||||
let mut cursor = QUERY_CURSORS.lock().pop().unwrap_or_else(QueryCursor::new);
|
||||
cursor.set_match_limit(64);
|
||||
QueryCursorHandle(Some(cursor))
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue