Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
54 changes: 52 additions & 2 deletions src/wrap/inline.rs
Original file line number Diff line number Diff line change
Expand Up @@ -150,6 +150,30 @@ pub(super) fn attach_punctuation_to_previous_line(
false
}

fn push_span_with_carry(
buffer: &mut LineBuffer,
tokens: &[String],
start: usize,
end: usize,
carried_whitespace: &mut String,
) {
if start >= end {
return;
}

if carried_whitespace.is_empty() {
buffer.push_span(tokens, start, end);
return;
}

let mut first_token = std::mem::take(carried_whitespace);
first_token.push_str(tokens[start].as_str());
buffer.push_token(first_token.as_str());
if start + 1 < end {
buffer.push_span(tokens, start + 1, end);
}
}

pub(super) fn wrap_preserving_code(text: &str, width: usize) -> Vec<String> {
let tokens = tokenize::segment_inline(text);
if tokens.is_empty() {
Expand All @@ -158,18 +182,31 @@ pub(super) fn wrap_preserving_code(text: &str, width: usize) -> Vec<String> {

let mut lines = Vec::new();
let mut buffer = LineBuffer::new();
let mut carried_whitespace = String::new();
let mut i = 0;

while i < tokens.len() {
let (group_end, group_width) = determine_token_span(&tokens, i);
let span_is_whitespace = tokens[i..group_end]
.iter()
.all(|tok| is_whitespace_token(tok));

if span_is_whitespace && !carried_whitespace.is_empty() && group_end != tokens.len() {
for tok in &tokens[i..group_end] {
carried_whitespace.push_str(tok);
}
i = group_end;
continue;
}

if attach_punctuation_to_previous_line(lines.as_mut_slice(), buffer.text(), &tokens[i]) {
carried_whitespace.clear();
i += 1;
continue;
}

if buffer.width() + group_width <= width {
buffer.push_span(&tokens, i, group_end);
push_span_with_carry(&mut buffer, &tokens, i, group_end, &mut carried_whitespace);
i = group_end;
continue;
}
Expand All @@ -185,10 +222,23 @@ pub(super) fn wrap_preserving_code(text: &str, width: usize) -> Vec<String> {
}

buffer.flush_into(&mut lines);
buffer.push_non_whitespace_span(&tokens, i, group_end);
if span_is_whitespace {
for tok in &tokens[i..group_end] {
carried_whitespace.push_str(tok);
}
i = group_end;
continue;
}

push_span_with_carry(&mut buffer, &tokens, i, group_end, &mut carried_whitespace);
i = group_end;
}

if !carried_whitespace.is_empty() {
buffer.push_token(carried_whitespace.as_str());
carried_whitespace.clear();
}

buffer.flush_into(&mut lines);
lines
}
12 changes: 0 additions & 12 deletions src/wrap/line_buffer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -55,18 +55,6 @@ impl LineBuffer {
}
}

pub(crate) fn push_non_whitespace_span(&mut self, tokens: &[String], start: usize, end: usize) {
for tok in &tokens[start..end] {
if tok.chars().all(char::is_whitespace) {
continue;
}
self.push_token(tok.as_str());
}

// No whitespace was appended; keep split unset.
self.last_split = None;
}

pub(crate) fn flush_into(&mut self, lines: &mut Vec<String>) {
if self.text.is_empty() {
return;
Expand Down
17 changes: 17 additions & 0 deletions src/wrap/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -178,6 +178,23 @@ fn wrap_preserving_code_glues_punctuation_after_code() {
assert_eq!(lines, vec!["line with `code`!".to_string()]);
}

#[rstest]
#[case("alpha beta", 5, &["alpha", " beta"])]
#[case("alpha beta", 5, &["alpha", " beta"])]
#[case("alpha `beta`", 5, &["alpha", " `beta`"])]
fn wrap_preserving_code_preserves_carry_whitespace(
#[case] input: &str,
#[case] width: usize,
#[case] expected: &[&str],
) {
let lines = wrap_preserving_code(input, width);
assert_eq!(
lines,
expected.iter().map(|&s| s.to_string()).collect::<Vec<_>>()
);
assert_eq!(lines.concat(), input);
}

#[test]
fn wrap_text_preserves_hyphenated_words() {
let input = vec!["A word that is very-long-word indeed".to_string()];
Expand Down
Loading