Get all test passing
This commit is contained in:
parent
94b034ffc1
commit
c56c3dad42
6 changed files with 48 additions and 32 deletions
|
@ -1713,6 +1713,7 @@ impl<'a> Iterator for BlockChunks<'a> {
|
||||||
|
|
||||||
return Some(Chunk {
|
return Some(Chunk {
|
||||||
text: unsafe { std::str::from_utf8_unchecked(&NEWLINES[..line_count as usize]) },
|
text: unsafe { std::str::from_utf8_unchecked(&NEWLINES[..line_count as usize]) },
|
||||||
|
chars: (1 << line_count) - 1,
|
||||||
..Default::default()
|
..Default::default()
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -1742,17 +1743,26 @@ impl<'a> Iterator for BlockChunks<'a> {
|
||||||
|
|
||||||
let (mut prefix, suffix) = self.input_chunk.text.split_at(prefix_bytes);
|
let (mut prefix, suffix) = self.input_chunk.text.split_at(prefix_bytes);
|
||||||
self.input_chunk.text = suffix;
|
self.input_chunk.text = suffix;
|
||||||
|
self.input_chunk.tabs >>= prefix_bytes;
|
||||||
|
self.input_chunk.chars >>= prefix_bytes;
|
||||||
|
|
||||||
|
let mut tabs = self.input_chunk.tabs;
|
||||||
|
let mut chars = self.input_chunk.chars;
|
||||||
|
|
||||||
if self.masked {
|
if self.masked {
|
||||||
// Not great for multibyte text because to keep cursor math correct we
|
// Not great for multibyte text because to keep cursor math correct we
|
||||||
// need to have the same number of bytes in the input as output.
|
// need to have the same number of bytes in the input as output.
|
||||||
let chars = prefix.chars().count();
|
let chars_count = prefix.chars().count();
|
||||||
let bullet_len = chars;
|
let bullet_len = chars_count;
|
||||||
prefix = &BULLETS[..bullet_len];
|
prefix = &BULLETS[..bullet_len];
|
||||||
|
chars = (1 << bullet_len) - 1;
|
||||||
|
tabs = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
let chunk = Chunk {
|
let chunk = Chunk {
|
||||||
text: prefix,
|
text: prefix,
|
||||||
|
tabs,
|
||||||
|
chars,
|
||||||
..self.input_chunk.clone()
|
..self.input_chunk.clone()
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -526,6 +526,7 @@ impl FoldMap {
|
||||||
},
|
},
|
||||||
placeholder: Some(TransformPlaceholder {
|
placeholder: Some(TransformPlaceholder {
|
||||||
text: ELLIPSIS,
|
text: ELLIPSIS,
|
||||||
|
chars: 1,
|
||||||
renderer: ChunkRenderer {
|
renderer: ChunkRenderer {
|
||||||
id: fold.id,
|
id: fold.id,
|
||||||
render: Arc::new(move |cx| {
|
render: Arc::new(move |cx| {
|
||||||
|
@ -1031,6 +1032,7 @@ struct Transform {
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
struct TransformPlaceholder {
|
struct TransformPlaceholder {
|
||||||
text: &'static str,
|
text: &'static str,
|
||||||
|
chars: u128,
|
||||||
renderer: ChunkRenderer,
|
renderer: ChunkRenderer,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1386,6 +1388,7 @@ impl<'a> Iterator for FoldChunks<'a> {
|
||||||
self.output_offset.0 += placeholder.text.len();
|
self.output_offset.0 += placeholder.text.len();
|
||||||
return Some(Chunk {
|
return Some(Chunk {
|
||||||
text: placeholder.text,
|
text: placeholder.text,
|
||||||
|
chars: placeholder.chars,
|
||||||
renderer: Some(placeholder.renderer.clone()),
|
renderer: Some(placeholder.renderer.clone()),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
});
|
});
|
||||||
|
@ -1422,8 +1425,16 @@ impl<'a> Iterator for FoldChunks<'a> {
|
||||||
|
|
||||||
chunk.text = &chunk.text
|
chunk.text = &chunk.text
|
||||||
[(self.inlay_offset - buffer_chunk_start).0..(chunk_end - buffer_chunk_start).0];
|
[(self.inlay_offset - buffer_chunk_start).0..(chunk_end - buffer_chunk_start).0];
|
||||||
chunk.tabs = chunk.tabs >> (self.inlay_offset - buffer_chunk_start).0;
|
|
||||||
chunk.chars = chunk.chars >> (self.inlay_offset - buffer_chunk_start).0;
|
let bit_end = (chunk_end - buffer_chunk_start).0;
|
||||||
|
let mask = if bit_end >= 128 {
|
||||||
|
u128::MAX
|
||||||
|
} else {
|
||||||
|
(1u128 << bit_end) - 1
|
||||||
|
};
|
||||||
|
|
||||||
|
chunk.tabs = (chunk.tabs >> (self.inlay_offset - buffer_chunk_start).0) & mask;
|
||||||
|
chunk.chars = (chunk.chars >> (self.inlay_offset - buffer_chunk_start).0) & mask;
|
||||||
|
|
||||||
if chunk_end == transform_end {
|
if chunk_end == transform_end {
|
||||||
self.transform_cursor.next(&());
|
self.transform_cursor.next(&());
|
||||||
|
|
|
@ -258,6 +258,7 @@ impl<'a> Iterator for InlayChunks<'a> {
|
||||||
*chunk = self.buffer_chunks.next().unwrap();
|
*chunk = self.buffer_chunks.next().unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// todo! create a tabs/chars bitmask here and pass it in chunk
|
||||||
let (prefix, suffix) = chunk.text.split_at(
|
let (prefix, suffix) = chunk.text.split_at(
|
||||||
chunk
|
chunk
|
||||||
.text
|
.text
|
||||||
|
@ -333,6 +334,7 @@ impl<'a> Iterator for InlayChunks<'a> {
|
||||||
|
|
||||||
self.output_offset.0 += chunk.len();
|
self.output_offset.0 += chunk.len();
|
||||||
|
|
||||||
|
// todo! figure out how to get tabs here
|
||||||
Chunk {
|
Chunk {
|
||||||
text: chunk,
|
text: chunk,
|
||||||
highlight_style,
|
highlight_style,
|
||||||
|
|
|
@ -322,20 +322,6 @@ impl TabSnapshot {
|
||||||
to_next_stop,
|
to_next_stop,
|
||||||
);
|
);
|
||||||
|
|
||||||
// let expected = self.test_to_fold_point(output, bias);
|
|
||||||
|
|
||||||
// if result != expected {
|
|
||||||
// let text = self.buffer_snapshot().text();
|
|
||||||
// let bias = if bias == Bias::Left { "left" } else { "right" };
|
|
||||||
// panic!(
|
|
||||||
// "text: {text}, output: {}, bias: {bias}, result: {:?},{},{}, expected: {expected:?}",
|
|
||||||
// output.row(),
|
|
||||||
// result.0,
|
|
||||||
// result.1,
|
|
||||||
// result.2
|
|
||||||
// );
|
|
||||||
// }
|
|
||||||
|
|
||||||
result
|
result
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1030,7 +1016,6 @@ mod tests {
|
||||||
#[gpui::test]
|
#[gpui::test]
|
||||||
fn test_tab_stop_cursor_utf8(cx: &mut gpui::App) {
|
fn test_tab_stop_cursor_utf8(cx: &mut gpui::App) {
|
||||||
let text = "\tfoo\tbarbarbar\t\tbaz\n";
|
let text = "\tfoo\tbarbarbar\t\tbaz\n";
|
||||||
let text = "rikR~${H25ao'\\@r/<`&bjrzg(uQG})kl#!^r>Z\\27X$mmh\"tz;fq@F>=<Oi+R4;0Xt09,_!WxDZD&Rs/\"%5o7\\Kr`fIJR(.a]2SQHTZJJ)(^cx,%FfwrGkd,u&00&!;\t";
|
|
||||||
let buffer = MultiBuffer::build_simple(text, cx);
|
let buffer = MultiBuffer::build_simple(text, cx);
|
||||||
let buffer_snapshot = buffer.read(cx).snapshot(cx);
|
let buffer_snapshot = buffer.read(cx).snapshot(cx);
|
||||||
let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
|
let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
|
||||||
|
@ -1210,28 +1195,28 @@ mod tests {
|
||||||
);
|
);
|
||||||
let mut cursor = TabStopCursor::new(chunks);
|
let mut cursor = TabStopCursor::new(chunks);
|
||||||
assert!(cursor.seek(0).is_none());
|
assert!(cursor.seek(0).is_none());
|
||||||
let mut tab_stops = Vec::new();
|
|
||||||
|
|
||||||
let mut all_tab_stops = Vec::new();
|
let mut expected_tab_stops = Vec::new();
|
||||||
let mut byte_offset = 0;
|
let mut byte_offset = 0;
|
||||||
let mut char_offset = 0;
|
let mut char_offset = 0;
|
||||||
for ch in buffer.read(cx).snapshot(cx).text().chars() {
|
for ch in fold_snapshot.chars_at(FoldPoint::new(0, 0)) {
|
||||||
// byte_offset += ch.len_utf8();
|
|
||||||
byte_offset += ch.len_utf8() as u32;
|
byte_offset += ch.len_utf8() as u32;
|
||||||
char_offset += 1;
|
char_offset += 1;
|
||||||
|
|
||||||
if ch == '\t' {
|
if ch == '\t' {
|
||||||
all_tab_stops.push(TabStop {
|
expected_tab_stops.push(TabStop {
|
||||||
byte_offset,
|
byte_offset,
|
||||||
char_offset,
|
char_offset,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let mut actual_tab_stops = Vec::new();
|
||||||
while let Some(tab_stop) = cursor.seek(u32::MAX) {
|
while let Some(tab_stop) = cursor.seek(u32::MAX) {
|
||||||
tab_stops.push(tab_stop);
|
actual_tab_stops.push(tab_stop);
|
||||||
}
|
}
|
||||||
pretty_assertions::assert_eq!(tab_stops.as_slice(), all_tab_stops.as_slice(),);
|
|
||||||
|
pretty_assertions::assert_eq!(actual_tab_stops.as_slice(), expected_tab_stops.as_slice(),);
|
||||||
|
|
||||||
assert_eq!(cursor.byte_offset(), byte_offset);
|
assert_eq!(cursor.byte_offset(), byte_offset);
|
||||||
}
|
}
|
||||||
|
@ -1347,19 +1332,19 @@ impl<'a> TabStopCursor<'a> {
|
||||||
let chunk_distance = chunk.text.len() as u32 - chunk_position;
|
let chunk_distance = chunk.text.len() as u32 - chunk_position;
|
||||||
if chunk_distance + distance_traversed >= distance {
|
if chunk_distance + distance_traversed >= distance {
|
||||||
let overshoot = distance_traversed.abs_diff(distance);
|
let overshoot = distance_traversed.abs_diff(distance);
|
||||||
self.byte_offset += overshoot;
|
|
||||||
|
|
||||||
|
self.byte_offset += overshoot;
|
||||||
self.char_offset += get_char_offset(
|
self.char_offset += get_char_offset(
|
||||||
chunk_position..(chunk_position + overshoot).saturating_sub(1).min(127),
|
chunk_position..(chunk_position + overshoot).saturating_sub(1).min(127),
|
||||||
chunk.chars,
|
chunk.chars,
|
||||||
);
|
);
|
||||||
|
|
||||||
self.current_chunk = Some((chunk, chunk_position + overshoot));
|
self.current_chunk = Some((chunk, chunk_position + overshoot));
|
||||||
|
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
self.byte_offset += chunk_distance;
|
self.byte_offset += chunk_distance;
|
||||||
// todo! calculate char offset
|
|
||||||
self.char_offset += get_char_offset(
|
self.char_offset += get_char_offset(
|
||||||
chunk_position..(chunk_position + chunk_distance).saturating_sub(1).min(127),
|
chunk_position..(chunk_position + chunk_distance).saturating_sub(1).min(127),
|
||||||
chunk.chars,
|
chunk.chars,
|
||||||
|
|
|
@ -4597,10 +4597,18 @@ impl<'a> Iterator for BufferChunks<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// todo! write a test for this
|
||||||
let slice =
|
let slice =
|
||||||
&chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
|
&chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
|
||||||
let tabs = tabs >> (chunk_start - self.chunks.offset());
|
let bit_end = chunk_end - self.chunks.offset();
|
||||||
let chars_map = chars_map >> (chunk_start - self.chunks.offset());
|
|
||||||
|
let mask = if bit_end >= 128 {
|
||||||
|
u128::MAX
|
||||||
|
} else {
|
||||||
|
(1u128 << bit_end) - 1
|
||||||
|
};
|
||||||
|
let tabs = (tabs >> (chunk_start - self.chunks.offset())) & mask;
|
||||||
|
let chars_map = (chars_map >> (chunk_start - self.chunks.offset())) & mask;
|
||||||
|
|
||||||
self.range.start = chunk_end;
|
self.range.start = chunk_end;
|
||||||
if self.range.start == self.chunks.offset() + chunk.len() {
|
if self.range.start == self.chunks.offset() + chunk.len() {
|
||||||
|
|
|
@ -793,9 +793,9 @@ impl<'a> Chunks<'a> {
|
||||||
|
|
||||||
// Shift the tabs to align with our slice window
|
// Shift the tabs to align with our slice window
|
||||||
let shifted_tabs = chunk.tabs >> chunk_start_offset;
|
let shifted_tabs = chunk.tabs >> chunk_start_offset;
|
||||||
let shifted_chars_utf16 = chunk.chars();
|
let shifted_chars = chunk.chars() >> chunk_start_offset;
|
||||||
|
|
||||||
Some((slice_text, shifted_tabs, shifted_chars_utf16))
|
Some((slice_text, shifted_tabs, shifted_chars))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn lines(self) -> Lines<'a> {
|
pub fn lines(self) -> Lines<'a> {
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue