Make get_rust_byte_offsets more generic so it can be used for contents.
This commit is contained in:
parent
6c197c376a
commit
33f4614d28
@ -29,14 +29,10 @@ fn is_slice_of(parent: &str, child: &str) -> bool {
|
|||||||
/// Get the byte offset into source that the rust object exists at.
|
/// Get the byte offset into source that the rust object exists at.
|
||||||
///
|
///
|
||||||
/// These offsets are zero-based unlike the elisp ones.
|
/// These offsets are zero-based unlike the elisp ones.
|
||||||
fn get_rust_byte_offsets<'b, 's, S: StandardProperties<'s> + ?Sized>(
|
fn get_rust_byte_offsets<'b, 's>(original_document: &'s str, subset: &'b str) -> (usize, usize) {
|
||||||
original_document: &'s str,
|
debug_assert!(is_slice_of(original_document, subset));
|
||||||
rust_ast_node: &'b S,
|
let offset = subset.as_ptr() as usize - original_document.as_ptr() as usize;
|
||||||
) -> (usize, usize) {
|
let end = offset + subset.len();
|
||||||
let rust_object_source = rust_ast_node.get_source();
|
|
||||||
debug_assert!(is_slice_of(original_document, rust_object_source));
|
|
||||||
let offset = rust_object_source.as_ptr() as usize - original_document.as_ptr() as usize;
|
|
||||||
let end = offset + rust_object_source.len();
|
|
||||||
(offset, end)
|
(offset, end)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -54,7 +50,7 @@ pub(crate) fn compare_standard_properties<
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn assert_name<S: AsRef<str>>(
|
fn assert_name<S: AsRef<str>>(
|
||||||
emacs: &Token<'_>,
|
emacs: &Token<'_>,
|
||||||
name: S,
|
name: S,
|
||||||
) -> Result<(), Box<dyn std::error::Error>> {
|
) -> Result<(), Box<dyn std::error::Error>> {
|
||||||
@ -77,7 +73,7 @@ pub(crate) fn assert_name<S: AsRef<str>>(
|
|||||||
/// Assert that the character ranges defined by upstream org-mode's :standard-properties match the slices in Organic's StandardProperties.
|
/// Assert that the character ranges defined by upstream org-mode's :standard-properties match the slices in Organic's StandardProperties.
|
||||||
///
|
///
|
||||||
/// This does **not** handle plain text because plain text is a special case.
|
/// This does **not** handle plain text because plain text is a special case.
|
||||||
pub(crate) fn assert_bounds<'b, 's, S: StandardProperties<'s> + ?Sized>(
|
fn assert_bounds<'b, 's, S: StandardProperties<'s> + ?Sized>(
|
||||||
original_document: &'s str,
|
original_document: &'s str,
|
||||||
emacs: &'b Token<'s>,
|
emacs: &'b Token<'s>,
|
||||||
rust: &'b S,
|
rust: &'b S,
|
||||||
@ -89,7 +85,7 @@ pub(crate) fn assert_bounds<'b, 's, S: StandardProperties<'s> + ?Sized>(
|
|||||||
.ok_or("Token should have a begin.")?,
|
.ok_or("Token should have a begin.")?,
|
||||||
standard_properties.end.ok_or("Token should have an end.")?,
|
standard_properties.end.ok_or("Token should have an end.")?,
|
||||||
);
|
);
|
||||||
let (rust_begin, rust_end) = get_rust_byte_offsets(original_document, rust); // 0-based
|
let (rust_begin, rust_end) = get_rust_byte_offsets(original_document, rust.get_source()); // 0-based
|
||||||
let rust_begin_char_offset = original_document[..rust_begin].chars().count() + 1; // 1-based
|
let rust_begin_char_offset = original_document[..rust_begin].chars().count() + 1; // 1-based
|
||||||
let rust_end_char_offset =
|
let rust_end_char_offset =
|
||||||
rust_begin_char_offset + original_document[rust_begin..rust_end].chars().count(); // 1-based
|
rust_begin_char_offset + original_document[rust_begin..rust_end].chars().count(); // 1-based
|
||||||
|
Loading…
x
Reference in New Issue
Block a user