Move all crates to a top-level crates folder
Co-Authored-By: Max Brunsfeld <maxbrunsfeld@gmail.com>
This commit is contained in:
parent
d768224182
commit
fdfed3d7db
282 changed files with 195588 additions and 16 deletions
29
crates/buffer/Cargo.toml
Normal file
29
crates/buffer/Cargo.toml
Normal file
|
@ -0,0 +1,29 @@
|
|||
[package]
|
||||
name = "buffer"
|
||||
version = "0.1.0"
|
||||
edition = "2018"
|
||||
|
||||
[features]
|
||||
test-support = ["rand"]
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0.38"
|
||||
arrayvec = "0.7.1"
|
||||
clock = { path = "../clock" }
|
||||
gpui = { path = "../gpui" }
|
||||
lazy_static = "1.4"
|
||||
log = "0.4"
|
||||
parking_lot = "0.11.1"
|
||||
rand = { version = "0.8.3", optional = true }
|
||||
seahash = "4.1"
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
similar = "1.3"
|
||||
smallvec = { version = "1.6", features = ["union"] }
|
||||
sum_tree = { path = "../sum_tree" }
|
||||
tree-sitter = "0.19.5"
|
||||
zrpc = { path = "../zrpc" }
|
||||
|
||||
[dev-dependencies]
|
||||
rand = "0.8.3"
|
||||
tree-sitter-rust = "0.19.0"
|
||||
unindent = "0.1.7"
|
77
crates/buffer/src/anchor.rs
Normal file
77
crates/buffer/src/anchor.rs
Normal file
|
@ -0,0 +1,77 @@
|
|||
use super::{Buffer, Content};
|
||||
use anyhow::Result;
|
||||
use std::{cmp::Ordering, ops::Range};
|
||||
use sum_tree::Bias;
|
||||
|
||||
#[derive(Clone, Eq, PartialEq, Debug, Hash)]
|
||||
pub struct Anchor {
|
||||
pub offset: usize,
|
||||
pub bias: Bias,
|
||||
pub version: clock::Global,
|
||||
}
|
||||
|
||||
impl Anchor {
|
||||
pub fn min() -> Self {
|
||||
Self {
|
||||
offset: 0,
|
||||
bias: Bias::Left,
|
||||
version: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn max() -> Self {
|
||||
Self {
|
||||
offset: usize::MAX,
|
||||
bias: Bias::Right,
|
||||
version: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn cmp<'a>(&self, other: &Anchor, buffer: impl Into<Content<'a>>) -> Result<Ordering> {
|
||||
let buffer = buffer.into();
|
||||
|
||||
if self == other {
|
||||
return Ok(Ordering::Equal);
|
||||
}
|
||||
|
||||
let offset_comparison = if self.version == other.version {
|
||||
self.offset.cmp(&other.offset)
|
||||
} else {
|
||||
buffer
|
||||
.full_offset_for_anchor(self)
|
||||
.cmp(&buffer.full_offset_for_anchor(other))
|
||||
};
|
||||
|
||||
Ok(offset_comparison.then_with(|| self.bias.cmp(&other.bias)))
|
||||
}
|
||||
|
||||
pub fn bias_left(&self, buffer: &Buffer) -> Anchor {
|
||||
if self.bias == Bias::Left {
|
||||
self.clone()
|
||||
} else {
|
||||
buffer.anchor_before(self)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn bias_right(&self, buffer: &Buffer) -> Anchor {
|
||||
if self.bias == Bias::Right {
|
||||
self.clone()
|
||||
} else {
|
||||
buffer.anchor_after(self)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub trait AnchorRangeExt {
|
||||
fn cmp<'a>(&self, b: &Range<Anchor>, buffer: impl Into<Content<'a>>) -> Result<Ordering>;
|
||||
}
|
||||
|
||||
impl AnchorRangeExt for Range<Anchor> {
|
||||
fn cmp<'a>(&self, other: &Range<Anchor>, buffer: impl Into<Content<'a>>) -> Result<Ordering> {
|
||||
let buffer = buffer.into();
|
||||
Ok(match self.start.cmp(&other.start, &buffer)? {
|
||||
Ordering::Equal => other.end.cmp(&self.end, buffer)?,
|
||||
ord @ _ => ord,
|
||||
})
|
||||
}
|
||||
}
|
96
crates/buffer/src/highlight_map.rs
Normal file
96
crates/buffer/src/highlight_map.rs
Normal file
|
@ -0,0 +1,96 @@
|
|||
use crate::syntax_theme::SyntaxTheme;
|
||||
use std::sync::Arc;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct HighlightMap(Arc<[HighlightId]>);
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct HighlightId(pub u32);
|
||||
|
||||
const DEFAULT_HIGHLIGHT_ID: HighlightId = HighlightId(u32::MAX);
|
||||
|
||||
impl HighlightMap {
|
||||
pub fn new(capture_names: &[String], theme: &SyntaxTheme) -> Self {
|
||||
// For each capture name in the highlight query, find the longest
|
||||
// key in the theme's syntax styles that matches all of the
|
||||
// dot-separated components of the capture name.
|
||||
HighlightMap(
|
||||
capture_names
|
||||
.iter()
|
||||
.map(|capture_name| {
|
||||
theme
|
||||
.highlights
|
||||
.iter()
|
||||
.enumerate()
|
||||
.filter_map(|(i, (key, _))| {
|
||||
let mut len = 0;
|
||||
let capture_parts = capture_name.split('.');
|
||||
for key_part in key.split('.') {
|
||||
if capture_parts.clone().any(|part| part == key_part) {
|
||||
len += 1;
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
Some((i, len))
|
||||
})
|
||||
.max_by_key(|(_, len)| *len)
|
||||
.map_or(DEFAULT_HIGHLIGHT_ID, |(i, _)| HighlightId(i as u32))
|
||||
})
|
||||
.collect(),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn get(&self, capture_id: u32) -> HighlightId {
|
||||
self.0
|
||||
.get(capture_id as usize)
|
||||
.copied()
|
||||
.unwrap_or(DEFAULT_HIGHLIGHT_ID)
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for HighlightMap {
|
||||
fn default() -> Self {
|
||||
Self(Arc::new([]))
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for HighlightId {
|
||||
fn default() -> Self {
|
||||
DEFAULT_HIGHLIGHT_ID
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use gpui::color::Color;
|
||||
|
||||
#[test]
|
||||
fn test_highlight_map() {
|
||||
let theme = SyntaxTheme::new(
|
||||
[
|
||||
("function", Color::from_u32(0x100000ff)),
|
||||
("function.method", Color::from_u32(0x200000ff)),
|
||||
("function.async", Color::from_u32(0x300000ff)),
|
||||
("variable.builtin.self.rust", Color::from_u32(0x400000ff)),
|
||||
("variable.builtin", Color::from_u32(0x500000ff)),
|
||||
("variable", Color::from_u32(0x600000ff)),
|
||||
]
|
||||
.iter()
|
||||
.map(|(name, color)| (name.to_string(), (*color).into()))
|
||||
.collect(),
|
||||
);
|
||||
|
||||
let capture_names = &[
|
||||
"function.special".to_string(),
|
||||
"function.async.rust".to_string(),
|
||||
"variable.builtin.self".to_string(),
|
||||
];
|
||||
|
||||
let map = HighlightMap::new(capture_names, &theme);
|
||||
assert_eq!(theme.highlight_name(map.get(0)), Some("function"));
|
||||
assert_eq!(theme.highlight_name(map.get(1)), Some("function.async"));
|
||||
assert_eq!(theme.highlight_name(map.get(2)), Some("variable.builtin"));
|
||||
}
|
||||
}
|
135
crates/buffer/src/language.rs
Normal file
135
crates/buffer/src/language.rs
Normal file
|
@ -0,0 +1,135 @@
|
|||
use crate::{HighlightMap, SyntaxTheme};
|
||||
use parking_lot::Mutex;
|
||||
use serde::Deserialize;
|
||||
use std::{path::Path, str, sync::Arc};
|
||||
use tree_sitter::{Language as Grammar, Query};
|
||||
pub use tree_sitter::{Parser, Tree};
|
||||
|
||||
#[derive(Default, Deserialize)]
|
||||
pub struct LanguageConfig {
|
||||
pub name: String,
|
||||
pub path_suffixes: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct BracketPair {
|
||||
pub start: String,
|
||||
pub end: String,
|
||||
}
|
||||
|
||||
pub struct Language {
|
||||
pub config: LanguageConfig,
|
||||
pub grammar: Grammar,
|
||||
pub highlight_query: Query,
|
||||
pub brackets_query: Query,
|
||||
pub highlight_map: Mutex<HighlightMap>,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct LanguageRegistry {
|
||||
languages: Vec<Arc<Language>>,
|
||||
}
|
||||
|
||||
impl LanguageRegistry {
|
||||
pub fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
||||
pub fn add(&mut self, language: Arc<Language>) {
|
||||
self.languages.push(language);
|
||||
}
|
||||
|
||||
pub fn set_theme(&self, theme: &SyntaxTheme) {
|
||||
for language in &self.languages {
|
||||
language.set_theme(theme);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn select_language(&self, path: impl AsRef<Path>) -> Option<&Arc<Language>> {
|
||||
let path = path.as_ref();
|
||||
let filename = path.file_name().and_then(|name| name.to_str());
|
||||
let extension = path.extension().and_then(|name| name.to_str());
|
||||
let path_suffixes = [extension, filename];
|
||||
self.languages.iter().find(|language| {
|
||||
language
|
||||
.config
|
||||
.path_suffixes
|
||||
.iter()
|
||||
.any(|suffix| path_suffixes.contains(&Some(suffix.as_str())))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Language {
|
||||
pub fn name(&self) -> &str {
|
||||
self.config.name.as_str()
|
||||
}
|
||||
|
||||
pub fn highlight_map(&self) -> HighlightMap {
|
||||
self.highlight_map.lock().clone()
|
||||
}
|
||||
|
||||
pub fn set_theme(&self, theme: &SyntaxTheme) {
|
||||
*self.highlight_map.lock() = HighlightMap::new(self.highlight_query.capture_names(), theme);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_select_language() {
|
||||
let grammar = tree_sitter_rust::language();
|
||||
let registry = LanguageRegistry {
|
||||
languages: vec![
|
||||
Arc::new(Language {
|
||||
config: LanguageConfig {
|
||||
name: "Rust".to_string(),
|
||||
path_suffixes: vec!["rs".to_string()],
|
||||
..Default::default()
|
||||
},
|
||||
grammar,
|
||||
highlight_query: Query::new(grammar, "").unwrap(),
|
||||
brackets_query: Query::new(grammar, "").unwrap(),
|
||||
highlight_map: Default::default(),
|
||||
}),
|
||||
Arc::new(Language {
|
||||
config: LanguageConfig {
|
||||
name: "Make".to_string(),
|
||||
path_suffixes: vec!["Makefile".to_string(), "mk".to_string()],
|
||||
..Default::default()
|
||||
},
|
||||
grammar,
|
||||
highlight_query: Query::new(grammar, "").unwrap(),
|
||||
brackets_query: Query::new(grammar, "").unwrap(),
|
||||
highlight_map: Default::default(),
|
||||
}),
|
||||
],
|
||||
};
|
||||
|
||||
// matching file extension
|
||||
assert_eq!(
|
||||
registry.select_language("zed/lib.rs").map(|l| l.name()),
|
||||
Some("Rust")
|
||||
);
|
||||
assert_eq!(
|
||||
registry.select_language("zed/lib.mk").map(|l| l.name()),
|
||||
Some("Make")
|
||||
);
|
||||
|
||||
// matching filename
|
||||
assert_eq!(
|
||||
registry.select_language("zed/Makefile").map(|l| l.name()),
|
||||
Some("Make")
|
||||
);
|
||||
|
||||
// matching suffix that is not the full file extension or filename
|
||||
assert_eq!(registry.select_language("zed/cars").map(|l| l.name()), None);
|
||||
assert_eq!(
|
||||
registry.select_language("zed/a.cars").map(|l| l.name()),
|
||||
None
|
||||
);
|
||||
assert_eq!(registry.select_language("zed/sumk").map(|l| l.name()), None);
|
||||
}
|
||||
}
|
4098
crates/buffer/src/lib.rs
Normal file
4098
crates/buffer/src/lib.rs
Normal file
File diff suppressed because it is too large
Load diff
127
crates/buffer/src/operation_queue.rs
Normal file
127
crates/buffer/src/operation_queue.rs
Normal file
|
@ -0,0 +1,127 @@
|
|||
use super::Operation;
|
||||
use std::{fmt::Debug, ops::Add};
|
||||
use sum_tree::{Cursor, Dimension, Edit, Item, KeyedItem, SumTree, Summary};
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct OperationQueue(SumTree<Operation>);
|
||||
|
||||
#[derive(Clone, Copy, Debug, Default, Eq, Ord, PartialEq, PartialOrd)]
|
||||
pub struct OperationKey(clock::Lamport);
|
||||
|
||||
#[derive(Clone, Copy, Debug, Default, Eq, PartialEq)]
|
||||
pub struct OperationSummary {
|
||||
pub key: OperationKey,
|
||||
pub len: usize,
|
||||
}
|
||||
|
||||
impl OperationKey {
|
||||
pub fn new(timestamp: clock::Lamport) -> Self {
|
||||
Self(timestamp)
|
||||
}
|
||||
}
|
||||
|
||||
impl OperationQueue {
|
||||
pub fn new() -> Self {
|
||||
OperationQueue(SumTree::new())
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
self.0.summary().len
|
||||
}
|
||||
|
||||
pub fn insert(&mut self, mut ops: Vec<Operation>) {
|
||||
ops.sort_by_key(|op| op.lamport_timestamp());
|
||||
ops.dedup_by_key(|op| op.lamport_timestamp());
|
||||
self.0
|
||||
.edit(ops.into_iter().map(Edit::Insert).collect(), &());
|
||||
}
|
||||
|
||||
pub fn drain(&mut self) -> Self {
|
||||
let clone = self.clone();
|
||||
self.0 = SumTree::new();
|
||||
clone
|
||||
}
|
||||
|
||||
pub fn cursor(&self) -> Cursor<Operation, ()> {
|
||||
self.0.cursor()
|
||||
}
|
||||
}
|
||||
|
||||
impl Summary for OperationSummary {
|
||||
type Context = ();
|
||||
|
||||
fn add_summary(&mut self, other: &Self, _: &()) {
|
||||
assert!(self.key < other.key);
|
||||
self.key = other.key;
|
||||
self.len += other.len;
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Add<&'a Self> for OperationSummary {
|
||||
type Output = Self;
|
||||
|
||||
fn add(self, other: &Self) -> Self {
|
||||
assert!(self.key < other.key);
|
||||
OperationSummary {
|
||||
key: other.key,
|
||||
len: self.len + other.len,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Dimension<'a, OperationSummary> for OperationKey {
|
||||
fn add_summary(&mut self, summary: &OperationSummary, _: &()) {
|
||||
assert!(*self <= summary.key);
|
||||
*self = summary.key;
|
||||
}
|
||||
}
|
||||
|
||||
impl Item for Operation {
|
||||
type Summary = OperationSummary;
|
||||
|
||||
fn summary(&self) -> Self::Summary {
|
||||
OperationSummary {
|
||||
key: OperationKey::new(self.lamport_timestamp()),
|
||||
len: 1,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl KeyedItem for Operation {
|
||||
type Key = OperationKey;
|
||||
|
||||
fn key(&self) -> Self::Key {
|
||||
OperationKey::new(self.lamport_timestamp())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_len() {
|
||||
let mut clock = clock::Lamport::new(0);
|
||||
|
||||
let mut queue = OperationQueue::new();
|
||||
assert_eq!(queue.len(), 0);
|
||||
|
||||
queue.insert(vec![
|
||||
Operation::Test(clock.tick()),
|
||||
Operation::Test(clock.tick()),
|
||||
]);
|
||||
assert_eq!(queue.len(), 2);
|
||||
|
||||
queue.insert(vec![Operation::Test(clock.tick())]);
|
||||
assert_eq!(queue.len(), 3);
|
||||
|
||||
drop(queue.drain());
|
||||
assert_eq!(queue.len(), 0);
|
||||
|
||||
queue.insert(vec![Operation::Test(clock.tick())]);
|
||||
assert_eq!(queue.len(), 1);
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
struct TestOperation(clock::Lamport);
|
||||
}
|
129
crates/buffer/src/point.rs
Normal file
129
crates/buffer/src/point.rs
Normal file
|
@ -0,0 +1,129 @@
|
|||
use std::{
|
||||
cmp::Ordering,
|
||||
ops::{Add, AddAssign, Sub},
|
||||
};
|
||||
|
||||
#[derive(Clone, Copy, Default, Eq, PartialEq, Debug, Hash)]
|
||||
pub struct Point {
|
||||
pub row: u32,
|
||||
pub column: u32,
|
||||
}
|
||||
|
||||
impl Point {
|
||||
pub const MAX: Self = Self {
|
||||
row: u32::MAX,
|
||||
column: u32::MAX,
|
||||
};
|
||||
|
||||
pub fn new(row: u32, column: u32) -> Self {
|
||||
Point { row, column }
|
||||
}
|
||||
|
||||
pub fn zero() -> Self {
|
||||
Point::new(0, 0)
|
||||
}
|
||||
|
||||
pub fn is_zero(&self) -> bool {
|
||||
self.row == 0 && self.column == 0
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Add<&'a Self> for Point {
|
||||
type Output = Point;
|
||||
|
||||
fn add(self, other: &'a Self) -> Self::Output {
|
||||
if other.row == 0 {
|
||||
Point::new(self.row, self.column + other.column)
|
||||
} else {
|
||||
Point::new(self.row + other.row, other.column)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Add for Point {
|
||||
type Output = Point;
|
||||
|
||||
fn add(self, other: Self) -> Self::Output {
|
||||
self + &other
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Sub<&'a Self> for Point {
|
||||
type Output = Point;
|
||||
|
||||
fn sub(self, other: &'a Self) -> Self::Output {
|
||||
debug_assert!(*other <= self);
|
||||
|
||||
if self.row == other.row {
|
||||
Point::new(0, self.column - other.column)
|
||||
} else {
|
||||
Point::new(self.row - other.row, self.column)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Sub for Point {
|
||||
type Output = Point;
|
||||
|
||||
fn sub(self, other: Self) -> Self::Output {
|
||||
self - &other
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> AddAssign<&'a Self> for Point {
|
||||
fn add_assign(&mut self, other: &'a Self) {
|
||||
*self += *other;
|
||||
}
|
||||
}
|
||||
|
||||
impl AddAssign<Self> for Point {
|
||||
fn add_assign(&mut self, other: Self) {
|
||||
if other.row == 0 {
|
||||
self.column += other.column;
|
||||
} else {
|
||||
self.row += other.row;
|
||||
self.column = other.column;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialOrd for Point {
|
||||
fn partial_cmp(&self, other: &Point) -> Option<Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for Point {
|
||||
#[cfg(target_pointer_width = "64")]
|
||||
fn cmp(&self, other: &Point) -> Ordering {
|
||||
let a = (self.row as usize) << 32 | self.column as usize;
|
||||
let b = (other.row as usize) << 32 | other.column as usize;
|
||||
a.cmp(&b)
|
||||
}
|
||||
|
||||
#[cfg(target_pointer_width = "32")]
|
||||
fn cmp(&self, other: &Point) -> Ordering {
|
||||
match self.row.cmp(&other.row) {
|
||||
Ordering::Equal => self.column.cmp(&other.column),
|
||||
comparison @ _ => comparison,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Into<tree_sitter::Point> for Point {
|
||||
fn into(self) -> tree_sitter::Point {
|
||||
tree_sitter::Point {
|
||||
row: self.row as usize,
|
||||
column: self.column as usize,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<tree_sitter::Point> for Point {
|
||||
fn from(point: tree_sitter::Point) -> Self {
|
||||
Self {
|
||||
row: point.row as u32,
|
||||
column: point.column as u32,
|
||||
}
|
||||
}
|
||||
}
|
28
crates/buffer/src/random_char_iter.rs
Normal file
28
crates/buffer/src/random_char_iter.rs
Normal file
|
@ -0,0 +1,28 @@
|
|||
use rand::prelude::*;
|
||||
|
||||
pub struct RandomCharIter<T: Rng>(T);
|
||||
|
||||
impl<T: Rng> RandomCharIter<T> {
|
||||
pub fn new(rng: T) -> Self {
|
||||
Self(rng)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Rng> Iterator for RandomCharIter<T> {
|
||||
type Item = char;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
match self.0.gen_range(0..100) {
|
||||
// whitespace
|
||||
0..=19 => [' ', '\n', '\t'].choose(&mut self.0).copied(),
|
||||
// two-byte greek letters
|
||||
20..=32 => char::from_u32(self.0.gen_range(('α' as u32)..('ω' as u32 + 1))),
|
||||
// three-byte characters
|
||||
33..=45 => ['✋', '✅', '❌', '❎', '⭐'].choose(&mut self.0).copied(),
|
||||
// four-byte characters
|
||||
46..=58 => ['🍐', '🏀', '🍗', '🎉'].choose(&mut self.0).copied(),
|
||||
// ascii letters
|
||||
_ => Some(self.0.gen_range(b'a'..b'z' + 1).into()),
|
||||
}
|
||||
}
|
||||
}
|
613
crates/buffer/src/rope.rs
Normal file
613
crates/buffer/src/rope.rs
Normal file
|
@ -0,0 +1,613 @@
|
|||
use super::Point;
|
||||
use arrayvec::ArrayString;
|
||||
use smallvec::SmallVec;
|
||||
use std::{cmp, ops::Range, str};
|
||||
use sum_tree::{self, Bias, SumTree};
|
||||
|
||||
#[cfg(test)]
|
||||
const CHUNK_BASE: usize = 6;
|
||||
|
||||
#[cfg(not(test))]
|
||||
const CHUNK_BASE: usize = 16;
|
||||
|
||||
#[derive(Clone, Default, Debug)]
|
||||
pub struct Rope {
|
||||
chunks: SumTree<Chunk>,
|
||||
}
|
||||
|
||||
impl Rope {
|
||||
pub fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
||||
pub fn append(&mut self, rope: Rope) {
|
||||
let mut chunks = rope.chunks.cursor::<()>();
|
||||
chunks.next(&());
|
||||
if let Some(chunk) = chunks.item() {
|
||||
if self.chunks.last().map_or(false, |c| c.0.len() < CHUNK_BASE)
|
||||
|| chunk.0.len() < CHUNK_BASE
|
||||
{
|
||||
self.push(&chunk.0);
|
||||
chunks.next(&());
|
||||
}
|
||||
}
|
||||
|
||||
self.chunks.push_tree(chunks.suffix(&()), &());
|
||||
self.check_invariants();
|
||||
}
|
||||
|
||||
pub fn push(&mut self, text: &str) {
|
||||
let mut new_chunks = SmallVec::<[_; 16]>::new();
|
||||
let mut new_chunk = ArrayString::new();
|
||||
for ch in text.chars() {
|
||||
if new_chunk.len() + ch.len_utf8() > 2 * CHUNK_BASE {
|
||||
new_chunks.push(Chunk(new_chunk));
|
||||
new_chunk = ArrayString::new();
|
||||
}
|
||||
new_chunk.push(ch);
|
||||
}
|
||||
if !new_chunk.is_empty() {
|
||||
new_chunks.push(Chunk(new_chunk));
|
||||
}
|
||||
|
||||
let mut new_chunks = new_chunks.into_iter();
|
||||
let mut first_new_chunk = new_chunks.next();
|
||||
self.chunks.update_last(
|
||||
|last_chunk| {
|
||||
if let Some(first_new_chunk_ref) = first_new_chunk.as_mut() {
|
||||
if last_chunk.0.len() + first_new_chunk_ref.0.len() <= 2 * CHUNK_BASE {
|
||||
last_chunk.0.push_str(&first_new_chunk.take().unwrap().0);
|
||||
} else {
|
||||
let mut text = ArrayString::<{ 4 * CHUNK_BASE }>::new();
|
||||
text.push_str(&last_chunk.0);
|
||||
text.push_str(&first_new_chunk_ref.0);
|
||||
let (left, right) = text.split_at(find_split_ix(&text));
|
||||
last_chunk.0.clear();
|
||||
last_chunk.0.push_str(left);
|
||||
first_new_chunk_ref.0.clear();
|
||||
first_new_chunk_ref.0.push_str(right);
|
||||
}
|
||||
}
|
||||
},
|
||||
&(),
|
||||
);
|
||||
|
||||
self.chunks
|
||||
.extend(first_new_chunk.into_iter().chain(new_chunks), &());
|
||||
self.check_invariants();
|
||||
}
|
||||
|
||||
fn check_invariants(&self) {
|
||||
#[cfg(test)]
|
||||
{
|
||||
// Ensure all chunks except maybe the last one are not underflowing.
|
||||
// Allow some wiggle room for multibyte characters at chunk boundaries.
|
||||
let mut chunks = self.chunks.cursor::<()>().peekable();
|
||||
while let Some(chunk) = chunks.next() {
|
||||
if chunks.peek().is_some() {
|
||||
assert!(chunk.0.len() + 3 >= CHUNK_BASE);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn summary(&self) -> TextSummary {
|
||||
self.chunks.summary()
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
self.chunks.extent(&())
|
||||
}
|
||||
|
||||
pub fn max_point(&self) -> Point {
|
||||
self.chunks.extent(&())
|
||||
}
|
||||
|
||||
pub fn cursor(&self, offset: usize) -> Cursor {
|
||||
Cursor::new(self, offset)
|
||||
}
|
||||
|
||||
pub fn chars(&self) -> impl Iterator<Item = char> + '_ {
|
||||
self.chars_at(0)
|
||||
}
|
||||
|
||||
pub fn chars_at(&self, start: usize) -> impl Iterator<Item = char> + '_ {
|
||||
self.chunks_in_range(start..self.len()).flat_map(str::chars)
|
||||
}
|
||||
|
||||
pub fn chunks<'a>(&'a self) -> Chunks<'a> {
|
||||
self.chunks_in_range(0..self.len())
|
||||
}
|
||||
|
||||
pub fn chunks_in_range<'a>(&'a self, range: Range<usize>) -> Chunks<'a> {
|
||||
Chunks::new(self, range)
|
||||
}
|
||||
|
||||
pub fn to_point(&self, offset: usize) -> Point {
|
||||
assert!(offset <= self.summary().bytes);
|
||||
let mut cursor = self.chunks.cursor::<(usize, Point)>();
|
||||
cursor.seek(&offset, Bias::Left, &());
|
||||
let overshoot = offset - cursor.start().0;
|
||||
cursor.start().1
|
||||
+ cursor
|
||||
.item()
|
||||
.map_or(Point::zero(), |chunk| chunk.to_point(overshoot))
|
||||
}
|
||||
|
||||
pub fn to_offset(&self, point: Point) -> usize {
|
||||
assert!(point <= self.summary().lines);
|
||||
let mut cursor = self.chunks.cursor::<(Point, usize)>();
|
||||
cursor.seek(&point, Bias::Left, &());
|
||||
let overshoot = point - cursor.start().0;
|
||||
cursor.start().1 + cursor.item().map_or(0, |chunk| chunk.to_offset(overshoot))
|
||||
}
|
||||
|
||||
pub fn clip_offset(&self, mut offset: usize, bias: Bias) -> usize {
|
||||
let mut cursor = self.chunks.cursor::<usize>();
|
||||
cursor.seek(&offset, Bias::Left, &());
|
||||
if let Some(chunk) = cursor.item() {
|
||||
let mut ix = offset - cursor.start();
|
||||
while !chunk.0.is_char_boundary(ix) {
|
||||
match bias {
|
||||
Bias::Left => {
|
||||
ix -= 1;
|
||||
offset -= 1;
|
||||
}
|
||||
Bias::Right => {
|
||||
ix += 1;
|
||||
offset += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
offset
|
||||
} else {
|
||||
self.summary().bytes
|
||||
}
|
||||
}
|
||||
|
||||
pub fn clip_point(&self, point: Point, bias: Bias) -> Point {
|
||||
let mut cursor = self.chunks.cursor::<Point>();
|
||||
cursor.seek(&point, Bias::Right, &());
|
||||
if let Some(chunk) = cursor.item() {
|
||||
let overshoot = point - cursor.start();
|
||||
*cursor.start() + chunk.clip_point(overshoot, bias)
|
||||
} else {
|
||||
self.summary().lines
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a str> for Rope {
|
||||
fn from(text: &'a str) -> Self {
|
||||
let mut rope = Self::new();
|
||||
rope.push(text);
|
||||
rope
|
||||
}
|
||||
}
|
||||
|
||||
impl Into<String> for Rope {
|
||||
fn into(self) -> String {
|
||||
self.chunks().collect()
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Cursor<'a> {
|
||||
rope: &'a Rope,
|
||||
chunks: sum_tree::Cursor<'a, Chunk, usize>,
|
||||
offset: usize,
|
||||
}
|
||||
|
||||
impl<'a> Cursor<'a> {
|
||||
pub fn new(rope: &'a Rope, offset: usize) -> Self {
|
||||
let mut chunks = rope.chunks.cursor();
|
||||
chunks.seek(&offset, Bias::Right, &());
|
||||
Self {
|
||||
rope,
|
||||
chunks,
|
||||
offset,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn seek_forward(&mut self, end_offset: usize) {
|
||||
debug_assert!(end_offset >= self.offset);
|
||||
|
||||
self.chunks.seek_forward(&end_offset, Bias::Right, &());
|
||||
self.offset = end_offset;
|
||||
}
|
||||
|
||||
pub fn slice(&mut self, end_offset: usize) -> Rope {
|
||||
debug_assert!(
|
||||
end_offset >= self.offset,
|
||||
"cannot slice backwards from {} to {}",
|
||||
self.offset,
|
||||
end_offset
|
||||
);
|
||||
|
||||
let mut slice = Rope::new();
|
||||
if let Some(start_chunk) = self.chunks.item() {
|
||||
let start_ix = self.offset - self.chunks.start();
|
||||
let end_ix = cmp::min(end_offset, self.chunks.end(&())) - self.chunks.start();
|
||||
slice.push(&start_chunk.0[start_ix..end_ix]);
|
||||
}
|
||||
|
||||
if end_offset > self.chunks.end(&()) {
|
||||
self.chunks.next(&());
|
||||
slice.append(Rope {
|
||||
chunks: self.chunks.slice(&end_offset, Bias::Right, &()),
|
||||
});
|
||||
if let Some(end_chunk) = self.chunks.item() {
|
||||
let end_ix = end_offset - self.chunks.start();
|
||||
slice.push(&end_chunk.0[..end_ix]);
|
||||
}
|
||||
}
|
||||
|
||||
self.offset = end_offset;
|
||||
slice
|
||||
}
|
||||
|
||||
pub fn summary(&mut self, end_offset: usize) -> TextSummary {
|
||||
debug_assert!(end_offset >= self.offset);
|
||||
|
||||
let mut summary = TextSummary::default();
|
||||
if let Some(start_chunk) = self.chunks.item() {
|
||||
let start_ix = self.offset - self.chunks.start();
|
||||
let end_ix = cmp::min(end_offset, self.chunks.end(&())) - self.chunks.start();
|
||||
summary = TextSummary::from(&start_chunk.0[start_ix..end_ix]);
|
||||
}
|
||||
|
||||
if end_offset > self.chunks.end(&()) {
|
||||
self.chunks.next(&());
|
||||
summary += &self.chunks.summary(&end_offset, Bias::Right, &());
|
||||
if let Some(end_chunk) = self.chunks.item() {
|
||||
let end_ix = end_offset - self.chunks.start();
|
||||
summary += TextSummary::from(&end_chunk.0[..end_ix]);
|
||||
}
|
||||
}
|
||||
|
||||
summary
|
||||
}
|
||||
|
||||
pub fn suffix(mut self) -> Rope {
|
||||
self.slice(self.rope.chunks.extent(&()))
|
||||
}
|
||||
|
||||
pub fn offset(&self) -> usize {
|
||||
self.offset
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Chunks<'a> {
|
||||
chunks: sum_tree::Cursor<'a, Chunk, usize>,
|
||||
range: Range<usize>,
|
||||
}
|
||||
|
||||
impl<'a> Chunks<'a> {
|
||||
pub fn new(rope: &'a Rope, range: Range<usize>) -> Self {
|
||||
let mut chunks = rope.chunks.cursor();
|
||||
chunks.seek(&range.start, Bias::Right, &());
|
||||
Self { chunks, range }
|
||||
}
|
||||
|
||||
pub fn offset(&self) -> usize {
|
||||
self.range.start.max(*self.chunks.start())
|
||||
}
|
||||
|
||||
pub fn seek(&mut self, offset: usize) {
|
||||
if offset >= self.chunks.end(&()) {
|
||||
self.chunks.seek_forward(&offset, Bias::Right, &());
|
||||
} else {
|
||||
self.chunks.seek(&offset, Bias::Right, &());
|
||||
}
|
||||
self.range.start = offset;
|
||||
}
|
||||
|
||||
pub fn peek(&self) -> Option<&'a str> {
|
||||
if let Some(chunk) = self.chunks.item() {
|
||||
let offset = *self.chunks.start();
|
||||
if self.range.end > offset {
|
||||
let start = self.range.start.saturating_sub(*self.chunks.start());
|
||||
let end = self.range.end - self.chunks.start();
|
||||
return Some(&chunk.0[start..chunk.0.len().min(end)]);
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Iterator for Chunks<'a> {
|
||||
type Item = &'a str;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let result = self.peek();
|
||||
if result.is_some() {
|
||||
self.chunks.next(&());
|
||||
}
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default)]
|
||||
struct Chunk(ArrayString<{ 2 * CHUNK_BASE }>);
|
||||
|
||||
impl Chunk {
|
||||
fn to_point(&self, target: usize) -> Point {
|
||||
let mut offset = 0;
|
||||
let mut point = Point::new(0, 0);
|
||||
for ch in self.0.chars() {
|
||||
if offset >= target {
|
||||
break;
|
||||
}
|
||||
|
||||
if ch == '\n' {
|
||||
point.row += 1;
|
||||
point.column = 0;
|
||||
} else {
|
||||
point.column += ch.len_utf8() as u32;
|
||||
}
|
||||
offset += ch.len_utf8();
|
||||
}
|
||||
point
|
||||
}
|
||||
|
||||
fn to_offset(&self, target: Point) -> usize {
|
||||
let mut offset = 0;
|
||||
let mut point = Point::new(0, 0);
|
||||
for ch in self.0.chars() {
|
||||
if point >= target {
|
||||
if point > target {
|
||||
panic!("point {:?} is inside of character {:?}", target, ch);
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
if ch == '\n' {
|
||||
point.row += 1;
|
||||
point.column = 0;
|
||||
} else {
|
||||
point.column += ch.len_utf8() as u32;
|
||||
}
|
||||
offset += ch.len_utf8();
|
||||
}
|
||||
offset
|
||||
}
|
||||
|
||||
fn clip_point(&self, target: Point, bias: Bias) -> Point {
|
||||
for (row, line) in self.0.split('\n').enumerate() {
|
||||
if row == target.row as usize {
|
||||
let mut column = target.column.min(line.len() as u32);
|
||||
while !line.is_char_boundary(column as usize) {
|
||||
match bias {
|
||||
Bias::Left => column -= 1,
|
||||
Bias::Right => column += 1,
|
||||
}
|
||||
}
|
||||
return Point::new(row as u32, column);
|
||||
}
|
||||
}
|
||||
unreachable!()
|
||||
}
|
||||
}
|
||||
|
||||
impl sum_tree::Item for Chunk {
|
||||
type Summary = TextSummary;
|
||||
|
||||
fn summary(&self) -> Self::Summary {
|
||||
TextSummary::from(self.0.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Eq, PartialEq)]
|
||||
pub struct TextSummary {
|
||||
pub bytes: usize,
|
||||
pub lines: Point,
|
||||
pub first_line_chars: u32,
|
||||
pub last_line_chars: u32,
|
||||
pub longest_row: u32,
|
||||
pub longest_row_chars: u32,
|
||||
}
|
||||
|
||||
impl<'a> From<&'a str> for TextSummary {
|
||||
fn from(text: &'a str) -> Self {
|
||||
let mut lines = Point::new(0, 0);
|
||||
let mut first_line_chars = 0;
|
||||
let mut last_line_chars = 0;
|
||||
let mut longest_row = 0;
|
||||
let mut longest_row_chars = 0;
|
||||
for c in text.chars() {
|
||||
if c == '\n' {
|
||||
lines.row += 1;
|
||||
lines.column = 0;
|
||||
last_line_chars = 0;
|
||||
} else {
|
||||
lines.column += c.len_utf8() as u32;
|
||||
last_line_chars += 1;
|
||||
}
|
||||
|
||||
if lines.row == 0 {
|
||||
first_line_chars = last_line_chars;
|
||||
}
|
||||
|
||||
if last_line_chars > longest_row_chars {
|
||||
longest_row = lines.row;
|
||||
longest_row_chars = last_line_chars;
|
||||
}
|
||||
}
|
||||
|
||||
TextSummary {
|
||||
bytes: text.len(),
|
||||
lines,
|
||||
first_line_chars,
|
||||
last_line_chars,
|
||||
longest_row,
|
||||
longest_row_chars,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl sum_tree::Summary for TextSummary {
|
||||
type Context = ();
|
||||
|
||||
fn add_summary(&mut self, summary: &Self, _: &Self::Context) {
|
||||
*self += summary;
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> std::ops::AddAssign<&'a Self> for TextSummary {
|
||||
fn add_assign(&mut self, other: &'a Self) {
|
||||
let joined_chars = self.last_line_chars + other.first_line_chars;
|
||||
if joined_chars > self.longest_row_chars {
|
||||
self.longest_row = self.lines.row;
|
||||
self.longest_row_chars = joined_chars;
|
||||
}
|
||||
if other.longest_row_chars > self.longest_row_chars {
|
||||
self.longest_row = self.lines.row + other.longest_row;
|
||||
self.longest_row_chars = other.longest_row_chars;
|
||||
}
|
||||
|
||||
if self.lines.row == 0 {
|
||||
self.first_line_chars += other.first_line_chars;
|
||||
}
|
||||
|
||||
if other.lines.row == 0 {
|
||||
self.last_line_chars += other.first_line_chars;
|
||||
} else {
|
||||
self.last_line_chars = other.last_line_chars;
|
||||
}
|
||||
|
||||
self.bytes += other.bytes;
|
||||
self.lines += &other.lines;
|
||||
}
|
||||
}
|
||||
|
||||
impl std::ops::AddAssign<Self> for TextSummary {
|
||||
fn add_assign(&mut self, other: Self) {
|
||||
*self += &other;
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> sum_tree::Dimension<'a, TextSummary> for usize {
|
||||
fn add_summary(&mut self, summary: &'a TextSummary, _: &()) {
|
||||
*self += summary.bytes;
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> sum_tree::Dimension<'a, TextSummary> for Point {
|
||||
fn add_summary(&mut self, summary: &'a TextSummary, _: &()) {
|
||||
*self += &summary.lines;
|
||||
}
|
||||
}
|
||||
|
||||
fn find_split_ix(text: &str) -> usize {
|
||||
let mut ix = text.len() / 2;
|
||||
while !text.is_char_boundary(ix) {
|
||||
if ix < 2 * CHUNK_BASE {
|
||||
ix += 1;
|
||||
} else {
|
||||
ix = (text.len() / 2) - 1;
|
||||
break;
|
||||
}
|
||||
}
|
||||
while !text.is_char_boundary(ix) {
|
||||
ix -= 1;
|
||||
}
|
||||
|
||||
debug_assert!(ix <= 2 * CHUNK_BASE);
|
||||
debug_assert!(text.len() - ix <= 2 * CHUNK_BASE);
|
||||
ix
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::random_char_iter::RandomCharIter;
|
||||
use rand::prelude::*;
|
||||
use std::env;
|
||||
use Bias::{Left, Right};
|
||||
|
||||
#[test]
|
||||
fn test_all_4_byte_chars() {
|
||||
let mut rope = Rope::new();
|
||||
let text = "🏀".repeat(256);
|
||||
rope.push(&text);
|
||||
assert_eq!(rope.text(), text);
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 100)]
|
||||
fn test_random(mut rng: StdRng) {
|
||||
let operations = env::var("OPERATIONS")
|
||||
.map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
|
||||
.unwrap_or(10);
|
||||
|
||||
let mut expected = String::new();
|
||||
let mut actual = Rope::new();
|
||||
for _ in 0..operations {
|
||||
let end_ix = clip_offset(&expected, rng.gen_range(0..=expected.len()), Right);
|
||||
let start_ix = clip_offset(&expected, rng.gen_range(0..=end_ix), Left);
|
||||
let len = rng.gen_range(0..=64);
|
||||
let new_text: String = RandomCharIter::new(&mut rng).take(len).collect();
|
||||
|
||||
let mut new_actual = Rope::new();
|
||||
let mut cursor = actual.cursor(0);
|
||||
new_actual.append(cursor.slice(start_ix));
|
||||
new_actual.push(&new_text);
|
||||
cursor.seek_forward(end_ix);
|
||||
new_actual.append(cursor.suffix());
|
||||
actual = new_actual;
|
||||
|
||||
expected.replace_range(start_ix..end_ix, &new_text);
|
||||
|
||||
assert_eq!(actual.text(), expected);
|
||||
log::info!("text: {:?}", expected);
|
||||
|
||||
for _ in 0..5 {
|
||||
let end_ix = clip_offset(&expected, rng.gen_range(0..=expected.len()), Right);
|
||||
let start_ix = clip_offset(&expected, rng.gen_range(0..=end_ix), Left);
|
||||
assert_eq!(
|
||||
actual.chunks_in_range(start_ix..end_ix).collect::<String>(),
|
||||
&expected[start_ix..end_ix]
|
||||
);
|
||||
}
|
||||
|
||||
let mut point = Point::new(0, 0);
|
||||
for (ix, ch) in expected.char_indices().chain(Some((expected.len(), '\0'))) {
|
||||
assert_eq!(actual.to_point(ix), point, "to_point({})", ix);
|
||||
assert_eq!(actual.to_offset(point), ix, "to_offset({:?})", point);
|
||||
if ch == '\n' {
|
||||
point.row += 1;
|
||||
point.column = 0
|
||||
} else {
|
||||
point.column += ch.len_utf8() as u32;
|
||||
}
|
||||
}
|
||||
|
||||
for _ in 0..5 {
|
||||
let end_ix = clip_offset(&expected, rng.gen_range(0..=expected.len()), Right);
|
||||
let start_ix = clip_offset(&expected, rng.gen_range(0..=end_ix), Left);
|
||||
assert_eq!(
|
||||
actual.cursor(start_ix).summary(end_ix),
|
||||
TextSummary::from(&expected[start_ix..end_ix])
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn clip_offset(text: &str, mut offset: usize, bias: Bias) -> usize {
|
||||
while !text.is_char_boundary(offset) {
|
||||
match bias {
|
||||
Bias::Left => offset -= 1,
|
||||
Bias::Right => offset += 1,
|
||||
}
|
||||
}
|
||||
offset
|
||||
}
|
||||
|
||||
impl Rope {
|
||||
fn text(&self) -> String {
|
||||
let mut text = String::new();
|
||||
for chunk in self.chunks.cursor::<()>() {
|
||||
text.push_str(&chunk.0);
|
||||
}
|
||||
text
|
||||
}
|
||||
}
|
||||
}
|
75
crates/buffer/src/selection.rs
Normal file
75
crates/buffer/src/selection.rs
Normal file
|
@ -0,0 +1,75 @@
|
|||
use crate::{Anchor, Buffer, Point, ToOffset as _, ToPoint as _};
|
||||
use std::{cmp::Ordering, mem, ops::Range};
|
||||
|
||||
pub type SelectionSetId = clock::Lamport;
|
||||
pub type SelectionsVersion = usize;
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
||||
pub enum SelectionGoal {
|
||||
None,
|
||||
Column(u32),
|
||||
ColumnRange { start: u32, end: u32 },
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
pub struct Selection {
|
||||
pub id: usize,
|
||||
pub start: Anchor,
|
||||
pub end: Anchor,
|
||||
pub reversed: bool,
|
||||
pub goal: SelectionGoal,
|
||||
}
|
||||
|
||||
impl Selection {
|
||||
pub fn head(&self) -> &Anchor {
|
||||
if self.reversed {
|
||||
&self.start
|
||||
} else {
|
||||
&self.end
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_head(&mut self, buffer: &Buffer, cursor: Anchor) {
|
||||
if cursor.cmp(self.tail(), buffer).unwrap() < Ordering::Equal {
|
||||
if !self.reversed {
|
||||
mem::swap(&mut self.start, &mut self.end);
|
||||
self.reversed = true;
|
||||
}
|
||||
self.start = cursor;
|
||||
} else {
|
||||
if self.reversed {
|
||||
mem::swap(&mut self.start, &mut self.end);
|
||||
self.reversed = false;
|
||||
}
|
||||
self.end = cursor;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn tail(&self) -> &Anchor {
|
||||
if self.reversed {
|
||||
&self.end
|
||||
} else {
|
||||
&self.start
|
||||
}
|
||||
}
|
||||
|
||||
pub fn point_range(&self, buffer: &Buffer) -> Range<Point> {
|
||||
let start = self.start.to_point(buffer);
|
||||
let end = self.end.to_point(buffer);
|
||||
if self.reversed {
|
||||
end..start
|
||||
} else {
|
||||
start..end
|
||||
}
|
||||
}
|
||||
|
||||
pub fn offset_range(&self, buffer: &Buffer) -> Range<usize> {
|
||||
let start = self.start.to_offset(buffer);
|
||||
let end = self.end.to_offset(buffer);
|
||||
if self.reversed {
|
||||
end..start
|
||||
} else {
|
||||
start..end
|
||||
}
|
||||
}
|
||||
}
|
49
crates/buffer/src/syntax_theme.rs
Normal file
49
crates/buffer/src/syntax_theme.rs
Normal file
|
@ -0,0 +1,49 @@
|
|||
use std::collections::HashMap;
|
||||
|
||||
use crate::HighlightId;
|
||||
use gpui::fonts::HighlightStyle;
|
||||
use serde::Deserialize;
|
||||
|
||||
pub struct SyntaxTheme {
|
||||
pub(crate) highlights: Vec<(String, HighlightStyle)>,
|
||||
}
|
||||
|
||||
impl SyntaxTheme {
|
||||
pub fn new(highlights: Vec<(String, HighlightStyle)>) -> Self {
|
||||
Self { highlights }
|
||||
}
|
||||
|
||||
pub fn highlight_style(&self, id: HighlightId) -> Option<HighlightStyle> {
|
||||
self.highlights
|
||||
.get(id.0 as usize)
|
||||
.map(|entry| entry.1.clone())
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub fn highlight_name(&self, id: HighlightId) -> Option<&str> {
|
||||
self.highlights.get(id.0 as usize).map(|e| e.0.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for SyntaxTheme {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
let syntax_data: HashMap<String, HighlightStyle> = Deserialize::deserialize(deserializer)?;
|
||||
|
||||
let mut result = Self::new(Vec::new());
|
||||
for (key, style) in syntax_data {
|
||||
match result
|
||||
.highlights
|
||||
.binary_search_by(|(needle, _)| needle.cmp(&key))
|
||||
{
|
||||
Ok(i) | Err(i) => {
|
||||
result.highlights.insert(i, (key, style));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue