Merge branch 'clippy'
This commit is contained in:
commit
e53140426f
8
Makefile
8
Makefile
@ -37,6 +37,14 @@ clean:
|
||||
format:
|
||||
> $(MAKE) -C docker/cargo_fmt run
|
||||
|
||||
.PHONY: clippy
|
||||
clippy:
|
||||
> cargo clippy --no-deps --all-targets --all-features -- -D warnings
|
||||
|
||||
.PHONY: clippyfix
|
||||
clippyfix:
|
||||
> cargo clippy --fix --lib -p organic --all-features
|
||||
|
||||
.PHONY: test
|
||||
test:
|
||||
> cargo test --no-default-features --features compare --no-fail-fast --lib --test test_loader -- --test-threads $(TESTJOBS)
|
||||
|
4
build.rs
4
build.rs
@ -14,7 +14,7 @@ use walkdir::WalkDir;
|
||||
fn main() {
|
||||
let out_dir = env::var("OUT_DIR").unwrap();
|
||||
let destination = Path::new(&out_dir).join("tests.rs");
|
||||
let mut test_file = File::create(&destination).unwrap();
|
||||
let mut test_file = File::create(destination).unwrap();
|
||||
|
||||
// Re-generate the tests if any org-mode files change
|
||||
println!("cargo:rerun-if-changed=org_mode_samples");
|
||||
@ -51,7 +51,7 @@ fn write_test(test_file: &mut File, test: &walkdir::DirEntry) {
|
||||
.to_lowercase()
|
||||
.strip_suffix(".org")
|
||||
.expect("Should have .org extension")
|
||||
.replace("/", "_");
|
||||
.replace('/', "_");
|
||||
|
||||
write!(
|
||||
test_file,
|
||||
|
@ -15,23 +15,21 @@ mod init_tracing;
|
||||
#[cfg(not(feature = "tracing"))]
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let rt = tokio::runtime::Runtime::new()?;
|
||||
let result = rt.block_on(async {
|
||||
rt.block_on(async {
|
||||
let main_body_result = main_body().await;
|
||||
main_body_result
|
||||
});
|
||||
result
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(feature = "tracing")]
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let rt = tokio::runtime::Runtime::new()?;
|
||||
let result = rt.block_on(async {
|
||||
rt.block_on(async {
|
||||
init_telemetry()?;
|
||||
let main_body_result = main_body().await;
|
||||
shutdown_telemetry()?;
|
||||
main_body_result
|
||||
});
|
||||
result
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
|
@ -21,23 +21,21 @@ mod init_tracing;
|
||||
#[cfg(not(feature = "tracing"))]
|
||||
fn main() -> Result<ExitCode, Box<dyn std::error::Error>> {
|
||||
let rt = tokio::runtime::Runtime::new()?;
|
||||
let result = rt.block_on(async {
|
||||
rt.block_on(async {
|
||||
let main_body_result = main_body().await;
|
||||
main_body_result
|
||||
});
|
||||
result
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(feature = "tracing")]
|
||||
fn main() -> Result<ExitCode, Box<dyn std::error::Error>> {
|
||||
let rt = tokio::runtime::Runtime::new()?;
|
||||
let result = rt.block_on(async {
|
||||
rt.block_on(async {
|
||||
init_telemetry()?;
|
||||
let main_body_result = main_body().await;
|
||||
shutdown_telemetry()?;
|
||||
main_body_result
|
||||
});
|
||||
result
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
@ -108,10 +106,9 @@ fn compare_howard_abrams() -> impl Iterator<Item = TestConfig> {
|
||||
let layer = layer.chain(compare_group("clojure-yesql-xp", || {
|
||||
compare_all_org_document("/foreign_documents/howardabrams/clojure-yesql-xp")
|
||||
}));
|
||||
let layer = layer.chain(compare_group("veep", || {
|
||||
layer.chain(compare_group("veep", || {
|
||||
compare_all_org_document("/foreign_documents/howardabrams/veep")
|
||||
}));
|
||||
layer
|
||||
}))
|
||||
}
|
||||
|
||||
fn compare_group<N: Into<String>, F: Fn() -> I, I: Iterator<Item = TestConfig>>(
|
||||
@ -195,7 +192,6 @@ struct ResultLayer {
|
||||
#[derive(Debug)]
|
||||
struct SingleFileResult {
|
||||
name: String,
|
||||
file_path: PathBuf,
|
||||
status: TestStatus,
|
||||
}
|
||||
|
||||
@ -225,7 +221,6 @@ impl SingleFile {
|
||||
let result = silent_compare_on_file(&self.file_path).await;
|
||||
Ok(SingleFileResult {
|
||||
name: self.name,
|
||||
file_path: self.file_path,
|
||||
status: if let Ok(true) = result {
|
||||
TestStatus::Pass
|
||||
} else {
|
||||
|
@ -56,11 +56,11 @@ impl<'b, 's> ComparePropertiesResult<'b, 's> {
|
||||
/// Do no comparison.
|
||||
///
|
||||
/// This is for when you want to acknowledge that a field exists in the emacs token, but you do not have any validation for it when using the compare_properties!() macro. Ideally, this should be kept to a minimum since this represents untested values.
|
||||
pub(crate) fn compare_noop<'b, 's, 'x, R, RG>(
|
||||
pub(crate) fn compare_noop<'b, 's, R, RG>(
|
||||
_source: &'s str,
|
||||
_emacs: &'b Token<'s>,
|
||||
_rust_node: R,
|
||||
_emacs_field: &'x str,
|
||||
_emacs_field: &str,
|
||||
_rust_value_getter: RG,
|
||||
) -> Result<ComparePropertiesResult<'b, 's>, Box<dyn std::error::Error>> {
|
||||
Ok(ComparePropertiesResult::NoChange)
|
||||
@ -69,18 +69,16 @@ pub(crate) fn compare_noop<'b, 's, 'x, R, RG>(
|
||||
/// Do no comparison.
|
||||
///
|
||||
/// This is for when you want to acknowledge that a field exists in the emacs token, but you do not have any validation for it when using the compare_properties!() macro. Ideally, this should be kept to a minimum since this represents untested values.
|
||||
pub(crate) fn compare_identity() -> () {
|
||||
()
|
||||
}
|
||||
pub(crate) fn compare_identity() {}
|
||||
|
||||
/// Assert that the emacs value is always nil or absent.
|
||||
///
|
||||
/// This is usually used for fields which, in my testing, are always nil. Using this compare function instead of simply doing a compare_noop will enable us to be alerted when we finally come across an org-mode document that has a value other than nil for the property.
|
||||
pub(crate) fn compare_property_always_nil<'b, 's, 'x, R, RG>(
|
||||
pub(crate) fn compare_property_always_nil<'b, 's, R, RG>(
|
||||
_source: &'s str,
|
||||
emacs: &'b Token<'s>,
|
||||
_rust_node: R,
|
||||
emacs_field: &'x str,
|
||||
emacs_field: &str,
|
||||
_rust_value_getter: RG,
|
||||
) -> Result<ComparePropertiesResult<'b, 's>, Box<dyn std::error::Error>> {
|
||||
let value = get_property(emacs, emacs_field)?;
|
||||
@ -99,7 +97,6 @@ pub(crate) fn compare_property_always_nil<'b, 's, 'x, R, RG>(
|
||||
pub(crate) fn compare_property_quoted_string<
|
||||
'b,
|
||||
's,
|
||||
'x,
|
||||
R,
|
||||
RV: AsRef<str> + std::fmt::Debug,
|
||||
RG: Fn(R) -> Option<RV>,
|
||||
@ -107,12 +104,12 @@ pub(crate) fn compare_property_quoted_string<
|
||||
_source: &'s str,
|
||||
emacs: &'b Token<'s>,
|
||||
rust_node: R,
|
||||
emacs_field: &'x str,
|
||||
emacs_field: &str,
|
||||
rust_value_getter: RG,
|
||||
) -> Result<ComparePropertiesResult<'b, 's>, Box<dyn std::error::Error>> {
|
||||
let value = get_property_quoted_string(emacs, emacs_field)?;
|
||||
let rust_value = rust_value_getter(rust_node);
|
||||
if rust_value.as_ref().map(|s| s.as_ref()) != value.as_ref().map(String::as_str) {
|
||||
if rust_value.as_ref().map(|s| s.as_ref()) != value.as_deref() {
|
||||
let this_status = DiffStatus::Bad;
|
||||
let message = Some(format!(
|
||||
"{} mismatch (emacs != rust) {:?} != {:?}",
|
||||
@ -180,7 +177,6 @@ where
|
||||
pub(crate) fn compare_property_list_of_quoted_string<
|
||||
'b,
|
||||
's,
|
||||
'x,
|
||||
R,
|
||||
RV: AsRef<str> + std::fmt::Debug,
|
||||
RI: Iterator<Item = RV>,
|
||||
@ -189,7 +185,7 @@ pub(crate) fn compare_property_list_of_quoted_string<
|
||||
_source: &'s str,
|
||||
emacs: &'b Token<'s>,
|
||||
rust_node: R,
|
||||
emacs_field: &'x str,
|
||||
emacs_field: &str,
|
||||
rust_value_getter: RG,
|
||||
) -> Result<ComparePropertiesResult<'b, 's>, Box<dyn std::error::Error>> {
|
||||
let value = get_property(emacs, emacs_field)?
|
||||
@ -271,10 +267,7 @@ pub(crate) fn compare_property_set_of_quoted_string<
|
||||
.map(unquote)
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
let value: BTreeSet<&str> = value.iter().map(|e| e.as_str()).collect();
|
||||
let mismatched: Vec<_> = value
|
||||
.symmetric_difference(&rust_value)
|
||||
.map(|val| *val)
|
||||
.collect();
|
||||
let mismatched: Vec<_> = value.symmetric_difference(&rust_value).copied().collect();
|
||||
if !mismatched.is_empty() {
|
||||
let this_status = DiffStatus::Bad;
|
||||
let message = Some(format!(
|
||||
@ -290,7 +283,6 @@ pub(crate) fn compare_property_set_of_quoted_string<
|
||||
pub(crate) fn compare_property_optional_pair<
|
||||
'b,
|
||||
's,
|
||||
'x,
|
||||
R,
|
||||
RV: AsRef<str> + std::fmt::Debug,
|
||||
ROV: AsRef<str> + std::fmt::Debug,
|
||||
@ -299,7 +291,7 @@ pub(crate) fn compare_property_optional_pair<
|
||||
_source: &'s str,
|
||||
emacs: &'b Token<'s>,
|
||||
rust_node: R,
|
||||
emacs_field: &'x str,
|
||||
emacs_field: &str,
|
||||
rust_value_getter: RG,
|
||||
) -> Result<ComparePropertiesResult<'b, 's>, Box<dyn std::error::Error>> {
|
||||
let value = get_property(emacs, emacs_field)?
|
||||
@ -388,11 +380,11 @@ pub(crate) fn compare_property_optional_pair<
|
||||
Ok(ComparePropertiesResult::NoChange)
|
||||
}
|
||||
|
||||
pub(crate) fn compare_property_boolean<'b, 's, 'x, R, RG: Fn(R) -> bool>(
|
||||
pub(crate) fn compare_property_boolean<'b, 's, R, RG: Fn(R) -> bool>(
|
||||
_source: &'s str,
|
||||
emacs: &'b Token<'s>,
|
||||
rust_node: R,
|
||||
emacs_field: &'x str,
|
||||
emacs_field: &str,
|
||||
rust_value_getter: RG,
|
||||
) -> Result<ComparePropertiesResult<'b, 's>, Box<dyn std::error::Error>> {
|
||||
// get_property already converts nil to None.
|
||||
@ -476,14 +468,7 @@ where
|
||||
match (value, rust_value) {
|
||||
(None, None) => {}
|
||||
(Some(el), None)
|
||||
if el.len() == 1
|
||||
&& el.into_iter().all(|t| {
|
||||
if let Ok(r#""""#) = t.as_atom() {
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}) => {}
|
||||
if el.len() == 1 && el.iter().all(|t| matches!(t.as_atom(), Ok(r#""""#))) => {}
|
||||
(None, rv @ Some(_)) | (Some(_), rv @ None) => {
|
||||
let this_status = DiffStatus::Bad;
|
||||
let message = Some(format!(
|
||||
@ -560,13 +545,13 @@ where
|
||||
};
|
||||
let mut full_status: Vec<DiffEntry<'b, 's>> = Vec::with_capacity(outer_rust_list.len());
|
||||
|
||||
for (kw_e, kw_r) in outer_emacs_list.into_iter().zip(outer_rust_list) {
|
||||
for (kw_e, kw_r) in outer_emacs_list.iter().zip(outer_rust_list) {
|
||||
let kw_e = kw_e.as_list()?;
|
||||
let child_status_length = kw_r.1.len() + kw_r.0.as_ref().map(|opt| opt.len()).unwrap_or(0);
|
||||
let mut child_status: Vec<DiffEntry<'b, 's>> = Vec::with_capacity(child_status_length);
|
||||
if let Some(or) = &kw_r.0 {
|
||||
// if optional value
|
||||
let mut kw_e = kw_e.into_iter();
|
||||
let mut kw_e = kw_e.iter();
|
||||
// First element is a list representing the mandatory value.
|
||||
if let Some(val_e) = kw_e.next() {
|
||||
let el = val_e.as_list()?;
|
||||
@ -578,7 +563,7 @@ where
|
||||
));
|
||||
return Ok(ComparePropertiesResult::SelfChange(this_status, message));
|
||||
}
|
||||
for (e, r) in el.into_iter().zip(kw_r.1.iter()) {
|
||||
for (e, r) in el.iter().zip(kw_r.1.iter()) {
|
||||
child_status.push(compare_ast_node(source, e, r.into())?);
|
||||
}
|
||||
} else {
|
||||
|
@ -227,7 +227,7 @@ impl<'b, 's> DiffResult<'b, 's> {
|
||||
status_text = status_text,
|
||||
name = self.name,
|
||||
char_offset = preceding_text.chars().count() + 1,
|
||||
message = self.message.as_ref().map(|m| m.as_str()).unwrap_or("")
|
||||
message = self.message.as_deref().unwrap_or("")
|
||||
);
|
||||
for child in self.children.iter() {
|
||||
child.print_indented(indentation + 1, original_document)?;
|
||||
@ -330,8 +330,8 @@ pub(crate) fn artificial_diff_scope<'b, 's>(
|
||||
.into())
|
||||
}
|
||||
|
||||
pub(crate) fn artificial_owned_diff_scope<'b, 's, 'x>(
|
||||
name: &'x str,
|
||||
pub(crate) fn artificial_owned_diff_scope<'b, 's>(
|
||||
name: &str,
|
||||
children: Vec<DiffEntry<'b, 's>>,
|
||||
) -> Result<DiffEntry<'b, 's>, Box<dyn std::error::Error>> {
|
||||
Ok(DiffLayer {
|
||||
@ -426,14 +426,9 @@ pub(crate) fn compare_ast_node<'b, 's>(
|
||||
|
||||
// PlainText is a special case because upstream Org-Mode uses relative values for the bounds in plaintext rather than absolute so the below checks do not account for that.
|
||||
if let AstNode::PlainText(_) = rust {
|
||||
} else {
|
||||
match compare_standard_properties(source, emacs, &rust) {
|
||||
Err(err) => {
|
||||
compare_result.status = DiffStatus::Bad;
|
||||
compare_result.message = Some(err.to_string())
|
||||
}
|
||||
Ok(_) => {}
|
||||
}
|
||||
} else if let Err(err) = compare_standard_properties(source, emacs, &rust) {
|
||||
compare_result.status = DiffStatus::Bad;
|
||||
compare_result.message = Some(err.to_string())
|
||||
}
|
||||
|
||||
Ok(compare_result.into())
|
||||
@ -495,7 +490,7 @@ fn _compare_document<'b, 's>(
|
||||
.map(EmacsField::Required),
|
||||
(
|
||||
EmacsField::Required(":path"),
|
||||
|r| r.path.as_ref().map(|p| p.to_str()).flatten(),
|
||||
|r| r.path.as_ref().and_then(|p| p.to_str()),
|
||||
compare_property_quoted_string
|
||||
),
|
||||
(
|
||||
|
@ -1,3 +1,4 @@
|
||||
#[allow(clippy::module_inception)]
|
||||
mod compare;
|
||||
mod compare_field;
|
||||
mod diff;
|
||||
|
@ -113,24 +113,21 @@ fn is_slice_of(parent: &str, child: &str) -> bool {
|
||||
/// Get a slice of the string that was consumed in a parser using the original input to the parser and the remaining input after the parser.
|
||||
fn get_consumed<'s>(input: &'s str, remaining: &'s str) -> &'s str {
|
||||
debug_assert!(is_slice_of(input, remaining));
|
||||
let source = {
|
||||
let offset = remaining.as_ptr() as usize - input.as_ptr() as usize;
|
||||
&input[..offset]
|
||||
};
|
||||
source.into()
|
||||
let offset = remaining.as_ptr() as usize - input.as_ptr() as usize;
|
||||
&input[..offset]
|
||||
}
|
||||
|
||||
pub(crate) fn unquote(text: &str) -> Result<String, Box<dyn std::error::Error>> {
|
||||
let mut out: Vec<u8> = Vec::with_capacity(text.len());
|
||||
if !text.starts_with(r#"""#) {
|
||||
if !text.starts_with('"') {
|
||||
return Err("Quoted text does not start with quote.".into());
|
||||
}
|
||||
if !text.ends_with(r#"""#) {
|
||||
if !text.ends_with('"') {
|
||||
return Err("Quoted text does not end with quote.".into());
|
||||
}
|
||||
let interior_text = &text[1..(text.len() - 1)];
|
||||
let mut state = ParseState::Normal;
|
||||
for current_char in interior_text.bytes().into_iter() {
|
||||
for current_char in interior_text.bytes() {
|
||||
// Check to see if octal finished
|
||||
state = match (state, current_char) {
|
||||
(ParseState::Octal(octal), b'0'..=b'7') if octal.len() < MAX_OCTAL_LENGTH => {
|
||||
@ -229,11 +226,9 @@ fn atom<'s>(input: &'s str) -> Res<&'s str, Token<'s>> {
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
fn unquoted_atom<'s>(input: &'s str) -> Res<&'s str, Token<'s>> {
|
||||
let (remaining, body) = take_till1(|c| match c {
|
||||
' ' | '\t' | '\r' | '\n' | ')' | ']' => true,
|
||||
_ => false,
|
||||
})(input)?;
|
||||
Ok((remaining, Token::Atom(body.into())))
|
||||
let (remaining, body) =
|
||||
take_till1(|c| matches!(c, ' ' | '\t' | '\r' | '\n' | ')' | ']'))(input)?;
|
||||
Ok((remaining, Token::Atom(body)))
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
@ -264,22 +259,19 @@ fn quoted_atom<'s>(input: &'s str) -> Res<&'s str, Token<'s>> {
|
||||
}
|
||||
let (remaining, _) = tag(r#"""#)(remaining)?;
|
||||
let source = get_consumed(input, remaining);
|
||||
Ok((remaining, Token::Atom(source.into())))
|
||||
Ok((remaining, Token::Atom(source)))
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
fn hash_notation<'s>(input: &'s str) -> Res<&'s str, Token<'s>> {
|
||||
let (remaining, _) = tag("#<")(input)?;
|
||||
let (remaining, _body) = take_till1(|c| match c {
|
||||
'>' => true,
|
||||
_ => false,
|
||||
})(remaining)?;
|
||||
let (remaining, _body) = take_till1(|c| matches!(c, '>'))(remaining)?;
|
||||
let (remaining, _) = tag(">")(remaining)?;
|
||||
let source = get_consumed(input, remaining);
|
||||
Ok((remaining, Token::Atom(source.into())))
|
||||
Ok((remaining, Token::Atom(source)))
|
||||
}
|
||||
|
||||
fn text_with_properties<'s>(input: &'s str) -> Res<&'s str, Token<'s>> {
|
||||
fn text_with_properties(input: &str) -> Res<&str, Token<'_>> {
|
||||
let (remaining, _) = tag("#(")(input)?;
|
||||
let (remaining, (text, props)) = delimited(
|
||||
multispace0,
|
||||
@ -348,10 +340,7 @@ mod tests {
|
||||
let input = r#" (foo "b(a)r" baz ) "#;
|
||||
let (remaining, parsed) = sexp(input).expect("Parse the input");
|
||||
assert_eq!(remaining, "");
|
||||
assert!(match parsed {
|
||||
Token::List(_) => true,
|
||||
_ => false,
|
||||
});
|
||||
assert!(matches!(parsed, Token::List(_)));
|
||||
let children = match parsed {
|
||||
Token::List(children) => children,
|
||||
_ => panic!("Should be a list."),
|
||||
@ -364,14 +353,14 @@ mod tests {
|
||||
r#"foo"#
|
||||
);
|
||||
assert_eq!(
|
||||
match children.iter().nth(1) {
|
||||
match children.get(1) {
|
||||
Some(Token::Atom(body)) => *body,
|
||||
_ => panic!("Second child should be an atom."),
|
||||
},
|
||||
r#""b(a)r""#
|
||||
);
|
||||
assert_eq!(
|
||||
match children.iter().nth(2) {
|
||||
match children.get(2) {
|
||||
Some(Token::Atom(body)) => *body,
|
||||
_ => panic!("Third child should be an atom."),
|
||||
},
|
||||
|
@ -54,8 +54,8 @@ pub(crate) fn compare_standard_properties<
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn assert_name<'b, 's, S: AsRef<str>>(
|
||||
emacs: &'b Token<'s>,
|
||||
pub(crate) fn assert_name<S: AsRef<str>>(
|
||||
emacs: &Token<'_>,
|
||||
name: S,
|
||||
) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let name = name.as_ref();
|
||||
@ -90,9 +90,9 @@ pub(crate) fn assert_bounds<'b, 's, S: StandardProperties<'s> + ?Sized>(
|
||||
standard_properties.end.ok_or("Token should have an end.")?,
|
||||
);
|
||||
let (rust_begin, rust_end) = get_rust_byte_offsets(original_document, rust); // 0-based
|
||||
let rust_begin_char_offset = (&original_document[..rust_begin]).chars().count() + 1; // 1-based
|
||||
let rust_begin_char_offset = original_document[..rust_begin].chars().count() + 1; // 1-based
|
||||
let rust_end_char_offset =
|
||||
rust_begin_char_offset + (&original_document[rust_begin..rust_end]).chars().count(); // 1-based
|
||||
rust_begin_char_offset + original_document[rust_begin..rust_end].chars().count(); // 1-based
|
||||
if rust_begin_char_offset != begin || rust_end_char_offset != end {
|
||||
Err(format!("Rust bounds (in chars) ({rust_begin}, {rust_end}) do not match emacs bounds ({emacs_begin}, {emacs_end})", rust_begin = rust_begin_char_offset, rust_end = rust_end_char_offset, emacs_begin=begin, emacs_end=end))?;
|
||||
}
|
||||
@ -113,21 +113,18 @@ struct EmacsStandardProperties {
|
||||
post_blank: Option<usize>,
|
||||
}
|
||||
|
||||
fn get_emacs_standard_properties<'b, 's>(
|
||||
emacs: &'b Token<'s>,
|
||||
fn get_emacs_standard_properties(
|
||||
emacs: &Token<'_>,
|
||||
) -> Result<EmacsStandardProperties, Box<dyn std::error::Error>> {
|
||||
let children = emacs.as_list()?;
|
||||
let attributes_child = children
|
||||
.iter()
|
||||
.nth(1)
|
||||
.ok_or("Should have an attributes child.")?;
|
||||
let attributes_child = children.get(1).ok_or("Should have an attributes child.")?;
|
||||
let attributes_map = attributes_child.as_map()?;
|
||||
let standard_properties = attributes_map.get(":standard-properties");
|
||||
Ok(if standard_properties.is_some() {
|
||||
let mut std_props = standard_properties
|
||||
.expect("if statement proves its Some")
|
||||
.as_vector()?
|
||||
.into_iter();
|
||||
.iter();
|
||||
let begin = maybe_token_to_usize(std_props.next())?;
|
||||
let post_affiliated = maybe_token_to_usize(std_props.next())?;
|
||||
let contents_begin = maybe_token_to_usize(std_props.next())?;
|
||||
@ -143,16 +140,13 @@ fn get_emacs_standard_properties<'b, 's>(
|
||||
post_blank,
|
||||
}
|
||||
} else {
|
||||
let begin = maybe_token_to_usize(attributes_map.get(":begin").map(|token| *token))?;
|
||||
let end = maybe_token_to_usize(attributes_map.get(":end").map(|token| *token))?;
|
||||
let contents_begin =
|
||||
maybe_token_to_usize(attributes_map.get(":contents-begin").map(|token| *token))?;
|
||||
let contents_end =
|
||||
maybe_token_to_usize(attributes_map.get(":contents-end").map(|token| *token))?;
|
||||
let post_blank =
|
||||
maybe_token_to_usize(attributes_map.get(":post-blank").map(|token| *token))?;
|
||||
let begin = maybe_token_to_usize(attributes_map.get(":begin").copied())?;
|
||||
let end = maybe_token_to_usize(attributes_map.get(":end").copied())?;
|
||||
let contents_begin = maybe_token_to_usize(attributes_map.get(":contents-begin").copied())?;
|
||||
let contents_end = maybe_token_to_usize(attributes_map.get(":contents-end").copied())?;
|
||||
let post_blank = maybe_token_to_usize(attributes_map.get(":post-blank").copied())?;
|
||||
let post_affiliated =
|
||||
maybe_token_to_usize(attributes_map.get(":post-affiliated").map(|token| *token))?;
|
||||
maybe_token_to_usize(attributes_map.get(":post-affiliated").copied())?;
|
||||
EmacsStandardProperties {
|
||||
begin,
|
||||
post_affiliated,
|
||||
@ -170,62 +164,57 @@ fn maybe_token_to_usize(
|
||||
Ok(token
|
||||
.map(|token| token.as_atom())
|
||||
.map_or(Ok(None), |r| r.map(Some))?
|
||||
.map(|val| {
|
||||
.and_then(|val| {
|
||||
if val == "nil" {
|
||||
None
|
||||
} else {
|
||||
Some(val.parse::<usize>())
|
||||
}
|
||||
})
|
||||
.flatten() // Outer option is whether or not the param exists, inner option is whether or not it is nil
|
||||
.map_or(Ok(None), |r| r.map(Some))?)
|
||||
}
|
||||
|
||||
/// Get a named property from the emacs token.
|
||||
///
|
||||
/// Returns Ok(None) if value is nil or absent.
|
||||
pub(crate) fn get_property<'b, 's, 'x>(
|
||||
pub(crate) fn get_property<'b, 's>(
|
||||
emacs: &'b Token<'s>,
|
||||
key: &'x str,
|
||||
key: &str,
|
||||
) -> Result<Option<&'b Token<'s>>, Box<dyn std::error::Error>> {
|
||||
let children = emacs.as_list()?;
|
||||
let attributes_child = children
|
||||
.iter()
|
||||
.nth(1)
|
||||
.ok_or("Should have an attributes child.")?;
|
||||
let attributes_child = children.get(1).ok_or("Should have an attributes child.")?;
|
||||
let attributes_map = attributes_child.as_map()?;
|
||||
let prop = attributes_map.get(key).map(|token| *token);
|
||||
match prop.map(|token| token.as_atom()) {
|
||||
Some(Ok("nil")) => return Ok(None),
|
||||
_ => {}
|
||||
};
|
||||
let prop = attributes_map.get(key).copied();
|
||||
if let Some(Ok("nil")) = prop.map(Token::as_atom) {
|
||||
return Ok(None);
|
||||
}
|
||||
Ok(prop)
|
||||
}
|
||||
|
||||
/// Get a named property containing an unquoted atom from the emacs token.
|
||||
///
|
||||
/// Returns None if key is not found.
|
||||
pub(crate) fn get_property_unquoted_atom<'b, 's, 'x>(
|
||||
emacs: &'b Token<'s>,
|
||||
key: &'x str,
|
||||
pub(crate) fn get_property_unquoted_atom<'s>(
|
||||
emacs: &Token<'s>,
|
||||
key: &str,
|
||||
) -> Result<Option<&'s str>, Box<dyn std::error::Error>> {
|
||||
Ok(get_property(emacs, key)?
|
||||
get_property(emacs, key)?
|
||||
.map(Token::as_atom)
|
||||
.map_or(Ok(None), |r| r.map(Some))?)
|
||||
.map_or(Ok(None), |r| r.map(Some))
|
||||
}
|
||||
|
||||
/// Get a named property containing an quoted string from the emacs token.
|
||||
///
|
||||
/// Returns None if key is not found.
|
||||
pub(crate) fn get_property_quoted_string<'b, 's, 'x>(
|
||||
emacs: &'b Token<'s>,
|
||||
key: &'x str,
|
||||
pub(crate) fn get_property_quoted_string(
|
||||
emacs: &Token<'_>,
|
||||
key: &str,
|
||||
) -> Result<Option<String>, Box<dyn std::error::Error>> {
|
||||
Ok(get_property(emacs, key)?
|
||||
get_property(emacs, key)?
|
||||
.map(Token::as_atom)
|
||||
.map_or(Ok(None), |r| r.map(Some))?
|
||||
.map(unquote)
|
||||
.map_or(Ok(None), |r| r.map(Some))?)
|
||||
.map_or(Ok(None), |r| r.map(Some))
|
||||
}
|
||||
|
||||
/// Get a named property containing an unquoted numeric value.
|
||||
@ -302,8 +291,8 @@ where
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn assert_no_children<'b, 's>(
|
||||
emacs: &'b Token<'s>,
|
||||
pub(crate) fn assert_no_children(
|
||||
emacs: &Token<'_>,
|
||||
this_status: &mut DiffStatus,
|
||||
message: &mut Option<String>,
|
||||
) -> Result<(), Box<dyn std::error::Error>> {
|
||||
@ -332,7 +321,7 @@ where
|
||||
let rust_key = rust_key.as_ref();
|
||||
let rust_value = rust_value.as_ref();
|
||||
let emacs_value = get_property_quoted_string(emacs, rust_key)?;
|
||||
if Some(rust_value) != emacs_value.as_ref().map(String::as_str) {
|
||||
if Some(rust_value) != emacs_value.as_deref() {
|
||||
let this_status = DiffStatus::Bad;
|
||||
let message = Some(format!(
|
||||
"{} mismatch (emacs != rust) {:?} != {:?}",
|
||||
|
@ -1,15 +1,15 @@
|
||||
use super::global_settings::EntityDefinition;
|
||||
|
||||
pub(crate) const DEFAULT_ORG_ELEMENT_PARSED_KEYWORDS: [&'static str; 1] = ["CAPTION"];
|
||||
pub(crate) const DEFAULT_ORG_ELEMENT_PARSED_KEYWORDS: [&str; 1] = ["CAPTION"];
|
||||
|
||||
pub(crate) const DEFAULT_ORG_ELEMENT_DUAL_KEYWORDS: [&'static str; 2] = ["CAPTION", "RESULTS"];
|
||||
pub(crate) const DEFAULT_ORG_ELEMENT_DUAL_KEYWORDS: [&str; 2] = ["CAPTION", "RESULTS"];
|
||||
|
||||
pub(crate) const DEFAULT_ORG_ELEMENT_AFFILIATED_KEYWORDS: [&'static str; 13] = [
|
||||
pub(crate) const DEFAULT_ORG_ELEMENT_AFFILIATED_KEYWORDS: [&str; 13] = [
|
||||
"CAPTION", "DATA", "HEADER", "HEADERS", "LABEL", "NAME", "PLOT", "RESNAME", "RESULT",
|
||||
"RESULTS", "SOURCE", "SRCNAME", "TBLNAME",
|
||||
];
|
||||
|
||||
pub(crate) const DEFAULT_ORG_ELEMENT_KEYWORD_TRANSLATION_ALIST: [(&'static str, &'static str); 8] = [
|
||||
pub(crate) const DEFAULT_ORG_ELEMENT_KEYWORD_TRANSLATION_ALIST: [(&str, &str); 8] = [
|
||||
("DATA", "NAME"),
|
||||
("LABEL", "NAME"),
|
||||
("RESNAME", "NAME"),
|
||||
@ -20,7 +20,7 @@ pub(crate) const DEFAULT_ORG_ELEMENT_KEYWORD_TRANSLATION_ALIST: [(&'static str,
|
||||
("HEADERS", "HEADER"),
|
||||
];
|
||||
|
||||
pub(crate) const DEFAULT_ORG_LINK_PARAMETERS: [&'static str; 23] = [
|
||||
pub(crate) const DEFAULT_ORG_LINK_PARAMETERS: [&str; 23] = [
|
||||
"id",
|
||||
"eww",
|
||||
"rmail",
|
||||
|
@ -112,23 +112,18 @@ impl<'g, 'r, 's> Context<'g, 'r, 's> {
|
||||
let mut current_class_filter = ExitClass::Gamma;
|
||||
for current_node in self.iter_context() {
|
||||
let context_element = current_node.get_data();
|
||||
match context_element {
|
||||
ContextElement::ExitMatcherNode(exit_matcher) => {
|
||||
if exit_matcher.class as u32 <= current_class_filter as u32 {
|
||||
current_class_filter = exit_matcher.class;
|
||||
let local_result = (exit_matcher.exit_matcher)(¤t_node, i);
|
||||
if local_result.is_ok() {
|
||||
return local_result;
|
||||
}
|
||||
if let ContextElement::ExitMatcherNode(exit_matcher) = context_element {
|
||||
if exit_matcher.class as u32 <= current_class_filter as u32 {
|
||||
current_class_filter = exit_matcher.class;
|
||||
let local_result = (exit_matcher.exit_matcher)(¤t_node, i);
|
||||
if local_result.is_ok() {
|
||||
return local_result;
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
};
|
||||
}
|
||||
}
|
||||
// TODO: Make this a specific error instead of just a generic MyError
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"NoExit".into(),
|
||||
))));
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError("NoExit"))));
|
||||
}
|
||||
|
||||
/// Indicates if elements should consume the whitespace after them.
|
||||
@ -140,11 +135,8 @@ impl<'g, 'r, 's> Context<'g, 'r, 's> {
|
||||
|
||||
fn _should_consume_trailing_whitespace(&self) -> Option<bool> {
|
||||
for current_node in self.iter() {
|
||||
match current_node {
|
||||
ContextElement::ConsumeTrailingWhitespace(should) => {
|
||||
return Some(*should);
|
||||
}
|
||||
_ => {}
|
||||
if let ContextElement::ConsumeTrailingWhitespace(should) = current_node {
|
||||
return Some(*should);
|
||||
}
|
||||
}
|
||||
None
|
||||
|
@ -20,10 +20,9 @@ impl FileAccessInterface for LocalFileAccessInterface {
|
||||
fn read_file(&self, path: &str) -> Result<String, std::io::Error> {
|
||||
let final_path = self
|
||||
.working_directory
|
||||
.as_ref()
|
||||
.map(PathBuf::as_path)
|
||||
.as_deref()
|
||||
.map(|pb| pb.join(path))
|
||||
.unwrap_or_else(|| PathBuf::from(path));
|
||||
Ok(std::fs::read_to_string(final_path)?)
|
||||
std::fs::read_to_string(final_path)
|
||||
}
|
||||
}
|
||||
|
@ -126,14 +126,10 @@ impl<'g, 's> Default for GlobalSettings<'g, 's> {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
#[derive(Debug, Clone, PartialEq, Default)]
|
||||
pub enum HeadlineLevelFilter {
|
||||
Odd,
|
||||
|
||||
#[default]
|
||||
OddEven,
|
||||
}
|
||||
|
||||
impl Default for HeadlineLevelFilter {
|
||||
fn default() -> Self {
|
||||
HeadlineLevelFilter::OddEven
|
||||
}
|
||||
}
|
||||
|
@ -50,7 +50,7 @@ impl<'a, T> Iterator for Iter<'a, T> {
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let ret = self.next.map(|link| link.get_data());
|
||||
self.next = self.next.map(|link| link.get_parent()).flatten();
|
||||
self.next = self.next.and_then(|link| link.get_parent());
|
||||
ret
|
||||
}
|
||||
}
|
||||
@ -64,7 +64,7 @@ impl<'a, T> Iterator for IterList<'a, T> {
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let ret = self.next;
|
||||
self.next = self.next.map(|this| this.get_parent()).flatten();
|
||||
self.next = self.next.and_then(|link| link.get_parent());
|
||||
ret
|
||||
}
|
||||
}
|
||||
|
@ -2,6 +2,7 @@ use crate::error::Res;
|
||||
use crate::parser::OrgSource;
|
||||
|
||||
mod constants;
|
||||
#[allow(clippy::module_inception)]
|
||||
mod context;
|
||||
mod exiting;
|
||||
mod file_access_interface;
|
||||
@ -30,4 +31,5 @@ pub use global_settings::GlobalSettings;
|
||||
pub use global_settings::HeadlineLevelFilter;
|
||||
pub use global_settings::DEFAULT_TAB_WIDTH;
|
||||
pub(crate) use list::List;
|
||||
pub(crate) use parser_with_context::bind_context;
|
||||
pub(crate) use parser_with_context::parser_with_context;
|
||||
|
@ -4,3 +4,10 @@ macro_rules! parser_with_context {
|
||||
};
|
||||
}
|
||||
pub(crate) use parser_with_context;
|
||||
|
||||
macro_rules! bind_context {
|
||||
($target:expr, $context:expr) => {
|
||||
|i| $target($context, i)
|
||||
};
|
||||
}
|
||||
pub(crate) use bind_context;
|
||||
|
@ -1,3 +1,4 @@
|
||||
#[allow(clippy::module_inception)]
|
||||
mod error;
|
||||
pub(crate) use error::CustomError;
|
||||
pub(crate) use error::MyError;
|
||||
|
@ -5,7 +5,7 @@ use tracing_subscriber::prelude::__tracing_subscriber_SubscriberExt;
|
||||
#[cfg(feature = "tracing")]
|
||||
use tracing_subscriber::util::SubscriberInitExt;
|
||||
|
||||
const SERVICE_NAME: &'static str = "organic";
|
||||
const SERVICE_NAME: &str = "organic";
|
||||
|
||||
// Despite the obvious verbosity that fully-qualifying everything causes, in these functions I am fully-qualifying everything relating to tracing. This is because the tracing feature involves multiple libraries working together and so I think it is beneficial to see which libraries contribute which bits.
|
||||
|
||||
|
@ -4,6 +4,7 @@
|
||||
#![feature(is_sorted)]
|
||||
#![feature(test)]
|
||||
// TODO: #![warn(missing_docs)]
|
||||
#![allow(clippy::bool_assert_comparison)] // Sometimes you want the long form because its easier to see at a glance.
|
||||
|
||||
extern crate test;
|
||||
|
||||
|
12
src/main.rs
12
src/main.rs
@ -23,13 +23,12 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
#[cfg(feature = "tracing")]
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let rt = tokio::runtime::Runtime::new()?;
|
||||
let result = rt.block_on(async {
|
||||
rt.block_on(async {
|
||||
init_telemetry()?;
|
||||
let main_body_result = main_body();
|
||||
shutdown_telemetry()?;
|
||||
main_body_result
|
||||
});
|
||||
result
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
@ -70,10 +69,9 @@ fn run_parse_on_file<P: AsRef<Path>>(org_path: P) -> Result<(), Box<dyn std::err
|
||||
let file_access_interface = LocalFileAccessInterface {
|
||||
working_directory: Some(parent_directory.to_path_buf()),
|
||||
};
|
||||
let global_settings = {
|
||||
let mut global_settings = GlobalSettings::default();
|
||||
global_settings.file_access = &file_access_interface;
|
||||
global_settings
|
||||
let global_settings = GlobalSettings {
|
||||
file_access: &file_access_interface,
|
||||
..Default::default()
|
||||
};
|
||||
let rust_parsed = parse_with_settings(org_contents, &global_settings)?;
|
||||
println!("{:#?}", rust_parsed);
|
||||
|
@ -16,7 +16,7 @@ use nom::sequence::tuple;
|
||||
|
||||
use super::object_parser::standard_set_object;
|
||||
use super::util::confine_context;
|
||||
use crate::context::parser_with_context;
|
||||
use crate::context::bind_context;
|
||||
use crate::context::Context;
|
||||
use crate::context::ContextElement;
|
||||
use crate::context::GlobalSettings;
|
||||
@ -64,7 +64,7 @@ where
|
||||
eof,
|
||||
)),
|
||||
|(_, _, objects, _, _)| objects,
|
||||
)))(kw.key.into())
|
||||
)))(kw.key)
|
||||
.expect("Parser should always succeed.");
|
||||
ret.insert(
|
||||
translated_name,
|
||||
@ -85,8 +85,9 @@ where
|
||||
map_parser(
|
||||
recognize(many_till(anychar, peek(tuple((tag("]"), eof))))),
|
||||
confine_context(|i| {
|
||||
all_consuming(many0(parser_with_context!(standard_set_object)(
|
||||
&initial_context,
|
||||
all_consuming(many0(bind_context!(
|
||||
standard_set_object,
|
||||
&initial_context
|
||||
)))(i)
|
||||
}),
|
||||
),
|
||||
@ -98,11 +99,11 @@ where
|
||||
.expect("Object parser should always succeed.");
|
||||
|
||||
// TODO: This should be omitting footnote references
|
||||
let (_remaining, objects) =
|
||||
all_consuming(many0(parser_with_context!(standard_set_object)(
|
||||
&initial_context,
|
||||
)))(kw.value.into())
|
||||
.expect("Object parser should always succeed.");
|
||||
let (_remaining, objects) = all_consuming(many0(bind_context!(
|
||||
standard_set_object,
|
||||
&initial_context
|
||||
)))(kw.value.into())
|
||||
.expect("Object parser should always succeed.");
|
||||
|
||||
let entry_per_keyword_list = ret
|
||||
.entry(translated_name)
|
||||
@ -121,7 +122,7 @@ where
|
||||
|
||||
fn translate_name<'g, 's>(global_settings: &'g GlobalSettings<'g, 's>, name: &'s str) -> String {
|
||||
let name_until_optval = name
|
||||
.split_once("[")
|
||||
.split_once('[')
|
||||
.map(|(before, _after)| before)
|
||||
.unwrap_or(name);
|
||||
for (src, dst) in global_settings.element_keyword_translation_alist {
|
||||
|
@ -66,8 +66,7 @@ where
|
||||
}
|
||||
|
||||
let (remaining, _ws) = space0(remaining)?;
|
||||
let (remaining, (value, (call, inside_header, arguments, end_header))) =
|
||||
consumed(babel_call_value)(remaining)?;
|
||||
let (remaining, (value, babel_call_value)) = consumed(babel_call_value)(remaining)?;
|
||||
let (remaining, _ws) = tuple((space0, org_line_ending))(remaining)?;
|
||||
|
||||
let (remaining, _trailing_ws) =
|
||||
@ -83,33 +82,36 @@ where
|
||||
affiliated_keywords,
|
||||
),
|
||||
value: Into::<&str>::into(value).trim_end(),
|
||||
call: call.map(Into::<&str>::into),
|
||||
inside_header: inside_header.map(Into::<&str>::into),
|
||||
arguments: arguments.map(Into::<&str>::into),
|
||||
end_header: end_header.map(Into::<&str>::into),
|
||||
call: babel_call_value.call.map(Into::<&str>::into),
|
||||
inside_header: babel_call_value.inside_header.map(Into::<&str>::into),
|
||||
arguments: babel_call_value.arguments.map(Into::<&str>::into),
|
||||
end_header: babel_call_value.end_header.map(Into::<&str>::into),
|
||||
},
|
||||
))
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct BabelCallValue<'s> {
|
||||
call: Option<OrgSource<'s>>,
|
||||
inside_header: Option<OrgSource<'s>>,
|
||||
arguments: Option<OrgSource<'s>>,
|
||||
end_header: Option<OrgSource<'s>>,
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
fn babel_call_value<'s>(
|
||||
input: OrgSource<'s>,
|
||||
) -> Res<
|
||||
OrgSource<'s>,
|
||||
(
|
||||
Option<OrgSource<'s>>,
|
||||
Option<OrgSource<'s>>,
|
||||
Option<OrgSource<'s>>,
|
||||
Option<OrgSource<'s>>,
|
||||
),
|
||||
> {
|
||||
fn babel_call_value<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, BabelCallValue<'s>> {
|
||||
let (remaining, call) = opt(babel_call_call)(input)?;
|
||||
let (remaining, inside_header) = opt(inside_header)(remaining)?;
|
||||
let (remaining, arguments) = opt(arguments)(remaining)?;
|
||||
let (remaining, end_header) = opt(end_header)(remaining)?;
|
||||
Ok((
|
||||
remaining,
|
||||
(call, inside_header, arguments.flatten(), end_header),
|
||||
BabelCallValue {
|
||||
call,
|
||||
inside_header,
|
||||
arguments: arguments.flatten(),
|
||||
end_header,
|
||||
},
|
||||
))
|
||||
}
|
||||
|
||||
|
@ -205,6 +205,7 @@ fn _global_suffix_end<'b, 'g, 'r, 's>(
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::context::bind_context;
|
||||
use crate::context::Context;
|
||||
use crate::context::GlobalSettings;
|
||||
use crate::context::List;
|
||||
@ -219,7 +220,7 @@ mod tests {
|
||||
let global_settings = GlobalSettings::default();
|
||||
let initial_context = ContextElement::document_context();
|
||||
let initial_context = Context::new(&global_settings, List::new(&initial_context));
|
||||
let paragraph_matcher = parser_with_context!(element(true))(&initial_context);
|
||||
let paragraph_matcher = bind_context!(element(true), &initial_context);
|
||||
let (remaining, first_paragraph) = paragraph_matcher(input).expect("Parse first paragraph");
|
||||
let first_paragraph = match first_paragraph {
|
||||
Element::Paragraph(paragraph) => paragraph,
|
||||
|
@ -200,7 +200,7 @@ where
|
||||
let (remaining, output) = inner(input)?;
|
||||
if remaining.get_bracket_depth() - pre_bracket_depth != 0 {
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"UnbalancedBrackets".into(),
|
||||
"UnbalancedBrackets",
|
||||
))));
|
||||
}
|
||||
Ok((remaining, output))
|
||||
|
@ -36,7 +36,7 @@ pub(crate) fn comment<'b, 'g, 'r, 's>(
|
||||
) -> Res<OrgSource<'s>, Comment<'s>> {
|
||||
if immediate_in_section(context, "comment") {
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"Cannot nest objects of the same element".into(),
|
||||
"Cannot nest objects of the same element",
|
||||
))));
|
||||
}
|
||||
let parser_context = ContextElement::Context("comment");
|
||||
@ -104,6 +104,7 @@ pub(crate) fn detect_comment<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, ()>
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::context::bind_context;
|
||||
use crate::context::Context;
|
||||
use crate::context::ContextElement;
|
||||
use crate::context::GlobalSettings;
|
||||
@ -119,7 +120,7 @@ mod tests {
|
||||
let global_settings = GlobalSettings::default();
|
||||
let initial_context = ContextElement::document_context();
|
||||
let initial_context = Context::new(&global_settings, List::new(&initial_context));
|
||||
let comment_matcher = parser_with_context!(comment)(&initial_context);
|
||||
let comment_matcher = bind_context!(comment, &initial_context);
|
||||
let (remaining, first_comment) = comment_matcher(input).expect("Parse first comment");
|
||||
assert_eq!(
|
||||
Into::<&str>::into(remaining),
|
||||
|
@ -11,6 +11,7 @@ use super::in_buffer_settings::scan_for_in_buffer_settings;
|
||||
use super::org_source::OrgSource;
|
||||
use super::section::zeroth_section;
|
||||
use super::util::get_consumed;
|
||||
use crate::context::bind_context;
|
||||
use crate::context::parser_with_context;
|
||||
use crate::context::Context;
|
||||
use crate::context::ContextElement;
|
||||
@ -30,7 +31,7 @@ use crate::types::Object;
|
||||
///
|
||||
/// This is a main entry point for Organic. It will parse the full contents of the input string as an org-mode document without an underlying file attached.
|
||||
#[allow(dead_code)]
|
||||
pub fn parse<'s>(input: &'s str) -> Result<Document<'s>, Box<dyn std::error::Error>> {
|
||||
pub fn parse(input: &str) -> Result<Document<'_>, Box<dyn std::error::Error>> {
|
||||
parse_file_with_settings::<&Path>(input, &GlobalSettings::default(), None)
|
||||
}
|
||||
|
||||
@ -40,10 +41,10 @@ pub fn parse<'s>(input: &'s str) -> Result<Document<'s>, Box<dyn std::error::Err
|
||||
///
|
||||
/// file_path is not used for reading the file contents. It is only used for determining the document category and filling in the path attribute on the Document.
|
||||
#[allow(dead_code)]
|
||||
pub fn parse_file<'s, P: AsRef<Path>>(
|
||||
input: &'s str,
|
||||
pub fn parse_file<P: AsRef<Path>>(
|
||||
input: &str,
|
||||
file_path: Option<P>,
|
||||
) -> Result<Document<'s>, Box<dyn std::error::Error>> {
|
||||
) -> Result<Document<'_>, Box<dyn std::error::Error>> {
|
||||
parse_file_with_settings(input, &GlobalSettings::default(), file_path)
|
||||
}
|
||||
|
||||
@ -77,7 +78,7 @@ pub fn parse_file_with_settings<'g, 's, P: AsRef<Path>>(
|
||||
let initial_context = Context::new(global_settings, List::new(&initial_context));
|
||||
let wrapped_input = OrgSource::new(input);
|
||||
let mut doc =
|
||||
all_consuming(parser_with_context!(document_org_source)(&initial_context))(wrapped_input)
|
||||
all_consuming(bind_context!(document_org_source, &initial_context))(wrapped_input)
|
||||
.map_err(|err| err.to_string())
|
||||
.map(|(_remaining, parsed_document)| parsed_document)?;
|
||||
if let Some(file_path) = file_path {
|
||||
@ -101,10 +102,7 @@ pub fn parse_file_with_settings<'g, 's, P: AsRef<Path>>(
|
||||
///
|
||||
/// This will not prevent additional settings from being learned during parsing, for example when encountering a "#+TODO".
|
||||
#[allow(dead_code)]
|
||||
fn document<'b, 'g, 'r, 's>(
|
||||
context: RefContext<'b, 'g, 'r, 's>,
|
||||
input: &'s str,
|
||||
) -> Res<&'s str, Document<'s>> {
|
||||
fn document<'s>(context: RefContext<'_, '_, '_, 's>, input: &'s str) -> Res<&'s str, Document<'s>> {
|
||||
let (remaining, doc) = document_org_source(context, input.into()).map_err(convert_error)?;
|
||||
Ok((Into::<&str>::into(remaining), doc))
|
||||
}
|
||||
@ -137,8 +135,7 @@ fn document_org_source<'b, 'g, 'r, 's>(
|
||||
scan_for_in_buffer_settings(setup_file.into()).map_err(|err| {
|
||||
eprintln!("{}", err);
|
||||
nom::Err::Error(CustomError::MyError(MyError(
|
||||
"TODO: make this take an owned string so I can dump err.to_string() into it."
|
||||
.into(),
|
||||
"TODO: make this take an owned string so I can dump err.to_string() into it.",
|
||||
)))
|
||||
})?;
|
||||
final_settings.extend(setup_file_settings);
|
||||
@ -148,8 +145,7 @@ fn document_org_source<'b, 'g, 'r, 's>(
|
||||
.map_err(|err| {
|
||||
eprintln!("{}", err);
|
||||
nom::Err::Error(CustomError::MyError(MyError(
|
||||
"TODO: make this take an owned string so I can dump err.to_string() into it."
|
||||
.into(),
|
||||
"TODO: make this take an owned string so I can dump err.to_string() into it.",
|
||||
)))
|
||||
})?;
|
||||
let new_context = context.with_global_settings(&new_settings);
|
||||
|
@ -49,7 +49,7 @@ where
|
||||
{
|
||||
if immediate_in_section(context, "drawer") {
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"Cannot nest objects of the same element".into(),
|
||||
"Cannot nest objects of the same element",
|
||||
))));
|
||||
}
|
||||
start_of_line(remaining)?;
|
||||
|
@ -55,7 +55,7 @@ where
|
||||
{
|
||||
if immediate_in_section(context, "dynamic block") {
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"Cannot nest objects of the same element".into(),
|
||||
"Cannot nest objects of the same element",
|
||||
))));
|
||||
}
|
||||
|
||||
@ -79,10 +79,7 @@ where
|
||||
let parser_context = context.with_additional_node(&contexts[0]);
|
||||
let parser_context = parser_context.with_additional_node(&contexts[1]);
|
||||
let parser_context = parser_context.with_additional_node(&contexts[2]);
|
||||
let parameters = match parameters {
|
||||
Some((_ws, parameters)) => Some(parameters),
|
||||
None => None,
|
||||
};
|
||||
let parameters = parameters.map(|(_ws, parameters)| parameters);
|
||||
let element_matcher = parser_with_context!(element(true))(&parser_context);
|
||||
let exit_matcher = parser_with_context!(exit_matcher_parser)(&parser_context);
|
||||
not(exit_matcher)(remaining)?;
|
||||
|
@ -301,9 +301,7 @@ fn _detect_element<'b, 'g, 'r, 's>(
|
||||
input
|
||||
);
|
||||
|
||||
if let Ok((_, _)) = detect_comment(input) {
|
||||
return Ok((input, ()));
|
||||
}
|
||||
element!(detect_comment, input);
|
||||
|
||||
ak_element!(
|
||||
detect_fixed_width_area,
|
||||
@ -326,6 +324,6 @@ fn _detect_element<'b, 'g, 'r, 's>(
|
||||
}
|
||||
|
||||
Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"No element detected.".into(),
|
||||
"No element detected.",
|
||||
))))
|
||||
}
|
||||
|
@ -60,21 +60,16 @@ fn name<'b, 'g, 'r, 's>(
|
||||
let result = tuple((
|
||||
tag::<_, _, CustomError<_>>(entity.name),
|
||||
alt((
|
||||
verify(map(tag("{}"), |_| true), |_| !entity.name.ends_with(" ")),
|
||||
verify(map(tag("{}"), |_| true), |_| !entity.name.ends_with(' ')),
|
||||
map(peek(recognize(entity_end)), |_| false),
|
||||
)),
|
||||
))(input);
|
||||
match result {
|
||||
Ok((remaining, (ent, use_brackets))) => {
|
||||
return Ok((remaining, (entity, ent, use_brackets)));
|
||||
}
|
||||
Err(_) => {}
|
||||
if let Ok((remaining, (ent, use_brackets))) = result {
|
||||
return Ok((remaining, (entity, ent, use_brackets)));
|
||||
}
|
||||
}
|
||||
|
||||
Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"NoEntity".into(),
|
||||
))))
|
||||
Err(nom::Err::Error(CustomError::MyError(MyError("NoEntity"))))
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
|
@ -49,7 +49,7 @@ where
|
||||
{
|
||||
if immediate_in_section(context, "footnote definition") {
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"Cannot nest objects of the same element".into(),
|
||||
"Cannot nest objects of the same element",
|
||||
))));
|
||||
}
|
||||
start_of_line(remaining)?;
|
||||
@ -157,6 +157,7 @@ where
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::context::bind_context;
|
||||
use crate::context::Context;
|
||||
use crate::context::GlobalSettings;
|
||||
use crate::context::List;
|
||||
@ -174,7 +175,7 @@ line footnote.",
|
||||
let global_settings = GlobalSettings::default();
|
||||
let initial_context = ContextElement::document_context();
|
||||
let initial_context = Context::new(&global_settings, List::new(&initial_context));
|
||||
let footnote_definition_matcher = parser_with_context!(element(true))(&initial_context);
|
||||
let footnote_definition_matcher = bind_context!(element(true), &initial_context);
|
||||
let (remaining, first_footnote_definition) =
|
||||
footnote_definition_matcher(input).expect("Parse first footnote_definition");
|
||||
let (remaining, second_footnote_definition) =
|
||||
@ -211,7 +212,7 @@ not in the footnote.",
|
||||
let global_settings = GlobalSettings::default();
|
||||
let initial_context = ContextElement::document_context();
|
||||
let initial_context = Context::new(&global_settings, List::new(&initial_context));
|
||||
let footnote_definition_matcher = parser_with_context!(element(true))(&initial_context);
|
||||
let footnote_definition_matcher = bind_context!(element(true), &initial_context);
|
||||
let (remaining, first_footnote_definition) =
|
||||
footnote_definition_matcher(input).expect("Parse first footnote_definition");
|
||||
assert_eq!(Into::<&str>::into(remaining), "not in the footnote.");
|
||||
|
@ -177,7 +177,7 @@ fn _footnote_definition_end<'b, 'g, 'r, 's>(
|
||||
if current_depth > 0 {
|
||||
// Its impossible for the next character to end the footnote reference definition if we're any amount of brackets deep
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"NoFootnoteReferenceDefinitionEnd".into(),
|
||||
"NoFootnoteReferenceDefinitionEnd",
|
||||
))));
|
||||
}
|
||||
if current_depth < 0 {
|
||||
|
@ -61,10 +61,10 @@ where
|
||||
let (remaining, (_begin, name)) = tuple((
|
||||
tag_no_case("#+begin_"),
|
||||
verify(name, |name: &OrgSource<'_>| {
|
||||
match Into::<&str>::into(name).to_lowercase().as_str() {
|
||||
"comment" | "example" | "export" | "src" | "verse" => false,
|
||||
_ => true,
|
||||
}
|
||||
!matches!(
|
||||
Into::<&str>::into(name).to_lowercase().as_str(),
|
||||
"comment" | "example" | "export" | "src" | "verse",
|
||||
)
|
||||
}),
|
||||
))(remaining)?;
|
||||
let name = Into::<&str>::into(name);
|
||||
@ -233,7 +233,7 @@ fn greater_block_body<'c, 'b, 'g, 'r, 's>(
|
||||
) -> Res<OrgSource<'s>, (&'s str, Vec<Element<'s>>)> {
|
||||
if in_section(context, context_name) {
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"Cannot nest objects of the same element".into(),
|
||||
"Cannot nest objects of the same element",
|
||||
))));
|
||||
}
|
||||
let exit_with_name = greater_block_end(name);
|
||||
@ -288,7 +288,7 @@ fn parameters<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, OrgSource<'s>> {
|
||||
recognize(many_till(anychar, peek(tuple((space0, line_ending)))))(input)
|
||||
}
|
||||
|
||||
fn greater_block_end<'c>(name: &'c str) -> impl ContextMatcher + 'c {
|
||||
fn greater_block_end(name: &str) -> impl ContextMatcher + '_ {
|
||||
move |context, input: OrgSource<'_>| _greater_block_end(context, input, name)
|
||||
}
|
||||
|
||||
|
@ -206,11 +206,7 @@ fn headline<'b, 'g, 'r, 's>(
|
||||
.map(|(_, (_, title))| title)
|
||||
.unwrap_or(Vec::new()),
|
||||
tags: maybe_tags
|
||||
.map(|(_ws, tags)| {
|
||||
tags.into_iter()
|
||||
.map(|single_tag| Into::<&str>::into(single_tag))
|
||||
.collect()
|
||||
})
|
||||
.map(|(_ws, tags)| tags.into_iter().map(Into::<&str>::into).collect())
|
||||
.unwrap_or(Vec::new()),
|
||||
is_footnote_section,
|
||||
},
|
||||
@ -265,11 +261,8 @@ fn heading_keyword<'b, 'g, 'r, 's>(
|
||||
.map(String::as_str)
|
||||
{
|
||||
let result = tag::<_, _, CustomError<_>>(todo_keyword)(input);
|
||||
match result {
|
||||
Ok((remaining, ent)) => {
|
||||
return Ok((remaining, (TodoKeywordType::Todo, ent)));
|
||||
}
|
||||
Err(_) => {}
|
||||
if let Ok((remaining, ent)) = result {
|
||||
return Ok((remaining, (TodoKeywordType::Todo, ent)));
|
||||
}
|
||||
}
|
||||
for todo_keyword in global_settings
|
||||
@ -278,20 +271,17 @@ fn heading_keyword<'b, 'g, 'r, 's>(
|
||||
.map(String::as_str)
|
||||
{
|
||||
let result = tag::<_, _, CustomError<_>>(todo_keyword)(input);
|
||||
match result {
|
||||
Ok((remaining, ent)) => {
|
||||
return Ok((remaining, (TodoKeywordType::Done, ent)));
|
||||
}
|
||||
Err(_) => {}
|
||||
if let Ok((remaining, ent)) = result {
|
||||
return Ok((remaining, (TodoKeywordType::Done, ent)));
|
||||
}
|
||||
}
|
||||
Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"NoTodoKeyword".into(),
|
||||
"NoTodoKeyword",
|
||||
))))
|
||||
}
|
||||
}
|
||||
|
||||
fn priority_cookie<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, PriorityCookie> {
|
||||
fn priority_cookie(input: OrgSource<'_>) -> Res<OrgSource<'_>, PriorityCookie> {
|
||||
let (remaining, (_, priority_character, _)) = tuple((
|
||||
tag("[#"),
|
||||
verify(anychar, |c| c.is_alphanumeric()),
|
||||
@ -299,7 +289,7 @@ fn priority_cookie<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, PriorityCooki
|
||||
))(input)?;
|
||||
let cookie = PriorityCookie::try_from(priority_character).map_err(|_| {
|
||||
nom::Err::Error(CustomError::MyError(MyError(
|
||||
"Failed to cast priority cookie to number.".into(),
|
||||
"Failed to cast priority cookie to number.",
|
||||
)))
|
||||
})?;
|
||||
Ok((remaining, cookie))
|
||||
|
@ -132,7 +132,7 @@ fn _header_end<'b, 'g, 'r, 's>(
|
||||
if current_depth > 0 {
|
||||
// Its impossible for the next character to end the header if we're any amount of bracket deep
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"NoHeaderEnd".into(),
|
||||
"NoHeaderEnd",
|
||||
))));
|
||||
}
|
||||
if current_depth < 0 {
|
||||
@ -184,7 +184,7 @@ fn _argument_end<'b, 'g, 'r, 's>(
|
||||
if current_depth > 0 {
|
||||
// Its impossible for the next character to end the argument if we're any amount of parenthesis deep
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"NoArgumentEnd".into(),
|
||||
"NoArgumentEnd",
|
||||
))));
|
||||
}
|
||||
if current_depth < 0 {
|
||||
|
@ -126,7 +126,7 @@ fn _header_end<'b, 'g, 'r, 's>(
|
||||
if current_depth > 0 {
|
||||
// Its impossible for the next character to end the header if we're any amount of bracket deep
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"NoHeaderEnd".into(),
|
||||
"NoHeaderEnd",
|
||||
))));
|
||||
}
|
||||
if current_depth < 0 {
|
||||
@ -187,9 +187,7 @@ fn _body_end<'b, 'g, 'r, 's>(
|
||||
let current_depth = input.get_brace_depth() - starting_brace_depth;
|
||||
if current_depth > 0 {
|
||||
// Its impossible for the next character to end the body if we're any amount of brace deep
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"NoBodyEnd".into(),
|
||||
))));
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError("NoBodyEnd"))));
|
||||
}
|
||||
if current_depth < 0 {
|
||||
// This shouldn't be possible because if depth is 0 then a closing brace should end the body.
|
||||
|
@ -50,24 +50,21 @@ fn _filtered_keyword<'s, F: Fn(OrgSource<'s>) -> Res<OrgSource<'s>, OrgSource<'s
|
||||
// TODO: When key is a member of org-element-parsed-keywords, value can contain the standard set objects, excluding footnote references.
|
||||
let (remaining, (consumed_input, (_, _, parsed_key, _))) =
|
||||
consumed(tuple((space0, tag("#+"), key_parser, tag(":"))))(input)?;
|
||||
match tuple((
|
||||
if let Ok((remaining, _)) = tuple((
|
||||
space0::<OrgSource<'_>, CustomError<OrgSource<'_>>>,
|
||||
alt((line_ending, eof)),
|
||||
))(remaining)
|
||||
{
|
||||
Ok((remaining, _)) => {
|
||||
return Ok((
|
||||
remaining,
|
||||
Keyword {
|
||||
source: consumed_input.into(),
|
||||
affiliated_keywords: AffiliatedKeywords::default(), // To be populated by the caller if this keyword is in a context to support affiliated keywords.
|
||||
key: parsed_key.into(),
|
||||
value: "".into(),
|
||||
},
|
||||
));
|
||||
}
|
||||
Err(_) => {}
|
||||
};
|
||||
return Ok((
|
||||
remaining,
|
||||
Keyword {
|
||||
source: consumed_input.into(),
|
||||
affiliated_keywords: AffiliatedKeywords::default(), // To be populated by the caller if this keyword is in a context to support affiliated keywords.
|
||||
key: parsed_key.into(),
|
||||
value: "",
|
||||
},
|
||||
));
|
||||
}
|
||||
let (remaining, _ws) = space0(remaining)?;
|
||||
let (remaining, parsed_value) = recognize(many_till(
|
||||
anychar,
|
||||
@ -173,16 +170,13 @@ fn plain_affiliated_key<'b, 'g, 'r, 's>(
|
||||
)),
|
||||
|(key, _)| key,
|
||||
)(input);
|
||||
match result {
|
||||
Ok((remaining, ent)) => {
|
||||
return Ok((remaining, ent));
|
||||
}
|
||||
Err(_) => {}
|
||||
if let Ok((remaining, ent)) = result {
|
||||
return Ok((remaining, ent));
|
||||
}
|
||||
}
|
||||
|
||||
Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"NoKeywordKey".into(),
|
||||
"NoKeywordKey",
|
||||
))))
|
||||
}
|
||||
|
||||
@ -199,16 +193,13 @@ fn dual_affiliated_key<'b, 'g, 'r, 's>(
|
||||
tag("]"),
|
||||
peek(tag(":")),
|
||||
)))(input);
|
||||
match result {
|
||||
Ok((remaining, ent)) => {
|
||||
return Ok((remaining, ent));
|
||||
}
|
||||
Err(_) => {}
|
||||
if let Ok((remaining, ent)) = result {
|
||||
return Ok((remaining, ent));
|
||||
}
|
||||
}
|
||||
|
||||
Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"NoKeywordKey".into(),
|
||||
"NoKeywordKey",
|
||||
))))
|
||||
}
|
||||
|
||||
|
@ -19,7 +19,7 @@ use crate::error::Res;
|
||||
/// Parses the text in the value of a #+TODO keyword.
|
||||
///
|
||||
/// Example input: "foo bar baz | lorem ipsum"
|
||||
pub(crate) fn todo_keywords<'s>(input: &'s str) -> Res<&'s str, (Vec<&'s str>, Vec<&'s str>)> {
|
||||
pub(crate) fn todo_keywords(input: &str) -> Res<&str, (Vec<&str>, Vec<&str>)> {
|
||||
let (remaining, mut before_pipe_words) = separated_list0(space1, todo_keyword_word)(input)?;
|
||||
let (remaining, after_pipe_words) = opt(tuple((
|
||||
tuple((space0, tag("|"), space0)),
|
||||
@ -30,12 +30,9 @@ pub(crate) fn todo_keywords<'s>(input: &'s str) -> Res<&'s str, (Vec<&'s str>, V
|
||||
Ok((remaining, (before_pipe_words, after_pipe_words)))
|
||||
} else if !before_pipe_words.is_empty() {
|
||||
// If there was no pipe, then the last word becomes a completion state instead.
|
||||
let mut after_pipe_words = Vec::with_capacity(1);
|
||||
after_pipe_words.push(
|
||||
before_pipe_words
|
||||
.pop()
|
||||
.expect("If-statement proves this is Some."),
|
||||
);
|
||||
let after_pipe_words = vec![before_pipe_words
|
||||
.pop()
|
||||
.expect("If-statement proves this is Some.")];
|
||||
Ok((remaining, (before_pipe_words, after_pipe_words)))
|
||||
} else {
|
||||
// No words founds
|
||||
@ -43,7 +40,7 @@ pub(crate) fn todo_keywords<'s>(input: &'s str) -> Res<&'s str, (Vec<&'s str>, V
|
||||
}
|
||||
}
|
||||
|
||||
fn todo_keyword_word<'s>(input: &'s str) -> Res<&'s str, &'s str> {
|
||||
fn todo_keyword_word(input: &str) -> Res<&str, &str> {
|
||||
let (remaining, keyword) = verify(take_till(|c| "( \t\r\n|".contains(c)), |result: &str| {
|
||||
!result.is_empty()
|
||||
})(input)?;
|
||||
|
@ -210,7 +210,7 @@ fn pre<'b, 'g, 'r, 's>(
|
||||
let preceding_character = input.get_preceding_character();
|
||||
if let Some('$') = preceding_character {
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"Not a valid pre character for dollar char fragment.".into(),
|
||||
"Not a valid pre character for dollar char fragment.",
|
||||
))));
|
||||
}
|
||||
Ok((input, ()))
|
||||
@ -284,7 +284,7 @@ fn close_border<'b, 'g, 'r, 's>(
|
||||
Some(c) if !c.is_whitespace() && !".,;$".contains(c) => Ok((input, ())),
|
||||
_ => {
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"Not a valid pre character for dollar char fragment.".into(),
|
||||
"Not a valid pre character for dollar char fragment.",
|
||||
))));
|
||||
}
|
||||
}
|
||||
|
@ -76,10 +76,7 @@ where
|
||||
let parser_context = context.with_additional_node(&contexts[0]);
|
||||
let parser_context = parser_context.with_additional_node(&contexts[1]);
|
||||
let parser_context = parser_context.with_additional_node(&contexts[2]);
|
||||
let parameters = match parameters {
|
||||
Some((_ws, parameters)) => Some(parameters),
|
||||
None => None,
|
||||
};
|
||||
let parameters = parameters.map(|(_ws, parameters)| parameters);
|
||||
|
||||
let object_matcher = parser_with_context!(standard_set_object)(&parser_context);
|
||||
let exit_matcher = parser_with_context!(exit_matcher_parser)(&parser_context);
|
||||
@ -110,7 +107,7 @@ where
|
||||
context.get_global_settings(),
|
||||
affiliated_keywords,
|
||||
),
|
||||
data: parameters.map(|parameters| Into::<&str>::into(parameters)),
|
||||
data: parameters.map(Into::<&str>::into),
|
||||
children,
|
||||
},
|
||||
))
|
||||
@ -295,7 +292,7 @@ where
|
||||
affiliated_keywords,
|
||||
),
|
||||
export_type: export_type.map(Into::<&str>::into),
|
||||
data: parameters.map(|parameters| Into::<&str>::into(parameters)),
|
||||
data: parameters.map(Into::<&str>::into),
|
||||
contents,
|
||||
},
|
||||
))
|
||||
@ -344,7 +341,7 @@ where
|
||||
let (switches, number_lines, preserve_indent, retain_labels, use_labels, label_format) = {
|
||||
if let Some(switches) = switches {
|
||||
(
|
||||
if switches.source.len() == 0 {
|
||||
if switches.source.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(switches.source)
|
||||
@ -390,7 +387,7 @@ fn data<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, OrgSource<'s>> {
|
||||
is_not("\r\n")(input)
|
||||
}
|
||||
|
||||
fn lesser_block_end<'c>(current_name: &'c str) -> impl ContextMatcher + 'c {
|
||||
fn lesser_block_end(current_name: &str) -> impl ContextMatcher + '_ {
|
||||
// Since the lesser block names are statically defined in code, we can simply assert that the name is lowercase instead of causing an allocation by converting to lowercase.
|
||||
debug_assert!(current_name == current_name.to_lowercase());
|
||||
move |context, input: OrgSource<'_>| _lesser_block_end(context, input, current_name)
|
||||
@ -420,7 +417,7 @@ fn _lesser_block_end<'b, 'g, 'r, 's, 'c>(
|
||||
/// Parser for the beginning of a lesser block
|
||||
///
|
||||
/// current_name MUST be lowercase. We do not do the conversion ourselves because it is not allowed in a const fn.
|
||||
const fn lesser_block_begin<'c>(current_name: &'c str) -> impl ContextMatcher + 'c {
|
||||
const fn lesser_block_begin(current_name: &str) -> impl ContextMatcher + '_ {
|
||||
// TODO: Since this is a const fn, is there ANY way to "generate" functions at compile time?
|
||||
move |context, input: OrgSource<'_>| _lesser_block_begin(context, input, current_name)
|
||||
}
|
||||
@ -531,11 +528,8 @@ fn _example_src_switches<'s>(
|
||||
(SwitchState::Normal, "-r") => {
|
||||
saw_r = true;
|
||||
use_labels = false;
|
||||
match retain_labels {
|
||||
RetainLabels::Yes => {
|
||||
retain_labels = RetainLabels::No;
|
||||
}
|
||||
_ => {}
|
||||
if let RetainLabels::Yes = retain_labels {
|
||||
retain_labels = RetainLabels::No;
|
||||
}
|
||||
}
|
||||
(SwitchState::Normal, "-l") => {
|
||||
@ -675,7 +669,9 @@ pub(crate) fn content<'b, 'g, 'r, 's>(
|
||||
}
|
||||
|
||||
let (remain, (pre_escape_whitespace, line)) = content_line(remaining)?;
|
||||
pre_escape_whitespace.map(|val| ret.push_str(Into::<&str>::into(val)));
|
||||
if let Some(val) = pre_escape_whitespace {
|
||||
ret.push_str(Into::<&str>::into(val));
|
||||
}
|
||||
ret.push_str(line.into());
|
||||
remaining = remain;
|
||||
}
|
||||
|
@ -46,7 +46,7 @@ fn pre<'b, 'g, 'r, 's>(
|
||||
// If None, we are at the start of the file
|
||||
None | Some('\\') => {
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"Not a valid pre character for line break.".into(),
|
||||
"Not a valid pre character for line break.",
|
||||
))));
|
||||
}
|
||||
_ => {}
|
||||
@ -56,7 +56,7 @@ fn pre<'b, 'g, 'r, 's>(
|
||||
let is_non_empty_line = current_line.chars().any(|c| !c.is_whitespace());
|
||||
if !is_non_empty_line {
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"Not a valid pre line for line break.".into(),
|
||||
"Not a valid pre line for line break.",
|
||||
))));
|
||||
}
|
||||
|
||||
|
@ -138,7 +138,7 @@ pub(crate) fn detect_standard_set_object_sans_plain_text<'b, 'g, 'r, 's>(
|
||||
}
|
||||
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"No object detected.".into(),
|
||||
"No object detected.",
|
||||
))));
|
||||
}
|
||||
|
||||
@ -158,7 +158,7 @@ fn detect_minimal_set_object_sans_plain_text<'b, 'g, 'r, 's>(
|
||||
}
|
||||
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"No object detected.".into(),
|
||||
"No object detected.",
|
||||
))));
|
||||
}
|
||||
|
||||
@ -223,9 +223,9 @@ fn detect_regular_link_description_set_object_sans_plain_text<'b, 'g, 'r, 's>(
|
||||
return Ok((input, ()));
|
||||
}
|
||||
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"No object detected.".into(),
|
||||
))));
|
||||
Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"No object detected.",
|
||||
))))
|
||||
}
|
||||
|
||||
#[cfg_attr(
|
||||
@ -290,6 +290,6 @@ fn detect_table_cell_set_object_sans_plain_text<'b, 'g, 'r, 's>(
|
||||
}
|
||||
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"No object detected.".into(),
|
||||
"No object detected.",
|
||||
))));
|
||||
}
|
||||
|
@ -219,7 +219,7 @@ where
|
||||
panic!("Attempted to extend past the end of the WrappedInput.")
|
||||
}
|
||||
if new_start == self.start && new_end == self.end {
|
||||
return self.clone();
|
||||
return *self;
|
||||
}
|
||||
|
||||
let skipped_text = &self.full_source[self.start..new_start];
|
||||
@ -337,7 +337,7 @@ impl<'s> InputTakeAtPosition for OrgSource<'s> {
|
||||
P: Fn(Self::Item) -> bool,
|
||||
{
|
||||
match Into::<&str>::into(self).position(predicate) {
|
||||
Some(0) => Err(nom::Err::Error(E::from_error_kind(self.clone(), e))),
|
||||
Some(0) => Err(nom::Err::Error(E::from_error_kind(*self, e))),
|
||||
Some(idx) => Ok(self.take_split(idx)),
|
||||
None => Err(nom::Err::Incomplete(nom::Needed::new(1))),
|
||||
}
|
||||
@ -366,11 +366,11 @@ impl<'s> InputTakeAtPosition for OrgSource<'s> {
|
||||
{
|
||||
let window = Into::<&str>::into(self);
|
||||
match window.position(predicate) {
|
||||
Some(0) => Err(nom::Err::Error(E::from_error_kind(self.clone(), e))),
|
||||
Some(0) => Err(nom::Err::Error(E::from_error_kind(*self, e))),
|
||||
Some(n) => Ok(self.take_split(n)),
|
||||
None => {
|
||||
if window.input_len() == 0 {
|
||||
Err(nom::Err::Error(E::from_error_kind(self.clone(), e)))
|
||||
Err(nom::Err::Error(E::from_error_kind(*self, e)))
|
||||
} else {
|
||||
Ok(self.take_split(self.input_len()))
|
||||
}
|
||||
@ -398,7 +398,7 @@ pub(crate) fn convert_error<'a, I: Into<CustomError<&'a str>>>(
|
||||
impl<'s> From<CustomError<OrgSource<'s>>> for CustomError<&'s str> {
|
||||
fn from(value: CustomError<OrgSource<'s>>) -> Self {
|
||||
match value {
|
||||
CustomError::MyError(err) => CustomError::MyError(err.into()),
|
||||
CustomError::MyError(err) => CustomError::MyError(err),
|
||||
CustomError::Nom(input, error_kind) => CustomError::Nom(input.into(), error_kind),
|
||||
CustomError::IO(err) => CustomError::IO(err),
|
||||
CustomError::BoxedError(err) => CustomError::BoxedError(err),
|
||||
|
@ -89,7 +89,7 @@ fn paragraph_end<'b, 'g, 'r, 's>(
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::context::parser_with_context;
|
||||
use crate::context::bind_context;
|
||||
use crate::context::Context;
|
||||
use crate::context::ContextElement;
|
||||
use crate::context::GlobalSettings;
|
||||
@ -104,7 +104,7 @@ mod tests {
|
||||
let global_settings = GlobalSettings::default();
|
||||
let initial_context = ContextElement::document_context();
|
||||
let initial_context = Context::new(&global_settings, List::new(&initial_context));
|
||||
let paragraph_matcher = parser_with_context!(element(true))(&initial_context);
|
||||
let paragraph_matcher = bind_context!(element(true), &initial_context);
|
||||
let (remaining, first_paragraph) = paragraph_matcher(input).expect("Parse first paragraph");
|
||||
let (remaining, second_paragraph) =
|
||||
paragraph_matcher(remaining).expect("Parse second paragraph.");
|
||||
|
@ -96,7 +96,7 @@ fn pre<'b, 'g, 'r, 's>(
|
||||
Some(_) => {
|
||||
// Not at start of line, cannot be a heading
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"Not a valid pre character for plain link.".into(),
|
||||
"Not a valid pre character for plain link.",
|
||||
))));
|
||||
}
|
||||
};
|
||||
@ -263,16 +263,13 @@ pub(crate) fn protocol<'b, 'g, 'r, 's>(
|
||||
) -> Res<OrgSource<'s>, OrgSource<'s>> {
|
||||
for link_parameter in context.get_global_settings().link_parameters {
|
||||
let result = tag_no_case::<_, _, CustomError<_>>(*link_parameter)(input);
|
||||
match result {
|
||||
Ok((remaining, ent)) => {
|
||||
return Ok((remaining, ent));
|
||||
}
|
||||
Err(_) => {}
|
||||
if let Ok((remaining, ent)) = result {
|
||||
return Ok((remaining, ent));
|
||||
}
|
||||
}
|
||||
|
||||
Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"NoLinkProtocol".into(),
|
||||
"NoLinkProtocol",
|
||||
))))
|
||||
}
|
||||
|
||||
@ -346,7 +343,7 @@ fn impl_path_plain_end<'b, 'g, 'r, 's>(
|
||||
}
|
||||
|
||||
Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"No path plain end".into(),
|
||||
"No path plain end",
|
||||
))))
|
||||
}
|
||||
|
||||
@ -435,6 +432,6 @@ fn _path_plain_parenthesis_end<'s>(
|
||||
}
|
||||
}
|
||||
Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"No closing parenthesis".into(),
|
||||
"No closing parenthesis",
|
||||
))))
|
||||
}
|
||||
|
@ -71,7 +71,7 @@ where
|
||||
alt((space1, line_ending, eof)),
|
||||
)),
|
||||
|(_start, (indent_level, _), (_bullet_type, bull), _after_whitespace)| {
|
||||
!Into::<&str>::into(bull).starts_with("*") || *indent_level > 0
|
||||
!Into::<&str>::into(bull).starts_with('*') || *indent_level > 0
|
||||
},
|
||||
)(remaining)
|
||||
.is_ok()
|
||||
@ -79,7 +79,7 @@ where
|
||||
return Ok((input, ()));
|
||||
}
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"No element detected.".into(),
|
||||
"No element detected.",
|
||||
))));
|
||||
}
|
||||
|
||||
@ -153,7 +153,7 @@ where
|
||||
Some(final_child) => final_child,
|
||||
None => {
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"Plain lists require at least one element.".into(),
|
||||
"Plain lists require at least one element.",
|
||||
))));
|
||||
}
|
||||
};
|
||||
@ -192,7 +192,7 @@ fn plain_list_item<'b, 'g, 'r, 's>(
|
||||
let (remaining, (indent_level, _leading_whitespace)) = indentation_level(context, input)?;
|
||||
let (remaining, (bullet_type, bull)) = verify(
|
||||
parser_with_context!(bullet)(context),
|
||||
|(_bullet_type, bull)| !Into::<&str>::into(bull).starts_with("*") || indent_level > 0,
|
||||
|(_bullet_type, bull)| !Into::<&str>::into(bull).starts_with('*') || indent_level > 0,
|
||||
)(remaining)?;
|
||||
|
||||
let (remaining, maybe_counter_set) =
|
||||
@ -227,35 +227,32 @@ fn plain_list_item<'b, 'g, 'r, 's>(
|
||||
let maybe_contentless_item: Res<OrgSource<'_>, ()> = peek(parser_with_context!(
|
||||
detect_contentless_item_contents
|
||||
)(&parser_context))(remaining);
|
||||
match maybe_contentless_item {
|
||||
Ok((_rem, _ws)) => {
|
||||
let (remaining, _trailing_ws) = if context.should_consume_trailing_whitespace() {
|
||||
recognize(alt((recognize(many1(blank_line)), eof)))(remaining)?
|
||||
} else {
|
||||
recognize(alt((blank_line, eof)))(remaining)?
|
||||
};
|
||||
let source = get_consumed(input, remaining);
|
||||
return Ok((
|
||||
remaining,
|
||||
(
|
||||
list_type,
|
||||
PlainListItem {
|
||||
source: source.into(),
|
||||
indentation: indent_level,
|
||||
bullet: bull.into(),
|
||||
counter: maybe_counter_set,
|
||||
checkbox: None,
|
||||
tag: maybe_tag
|
||||
.map(|(_ws, item_tag)| item_tag)
|
||||
.unwrap_or(Vec::new()),
|
||||
pre_blank: 0,
|
||||
children: Vec::new(),
|
||||
},
|
||||
),
|
||||
));
|
||||
}
|
||||
Err(_) => {}
|
||||
};
|
||||
if let Ok((_rem, _ws)) = maybe_contentless_item {
|
||||
let (remaining, _trailing_ws) = if context.should_consume_trailing_whitespace() {
|
||||
recognize(alt((recognize(many1(blank_line)), eof)))(remaining)?
|
||||
} else {
|
||||
recognize(alt((blank_line, eof)))(remaining)?
|
||||
};
|
||||
let source = get_consumed(input, remaining);
|
||||
return Ok((
|
||||
remaining,
|
||||
(
|
||||
list_type,
|
||||
PlainListItem {
|
||||
source: source.into(),
|
||||
indentation: indent_level,
|
||||
bullet: bull.into(),
|
||||
counter: maybe_counter_set,
|
||||
checkbox: None,
|
||||
tag: maybe_tag
|
||||
.map(|(_ws, item_tag)| item_tag)
|
||||
.unwrap_or(Vec::new()),
|
||||
pre_blank: 0,
|
||||
children: Vec::new(),
|
||||
},
|
||||
),
|
||||
));
|
||||
}
|
||||
let (remaining, pre_blank) = item_tag_post_gap(&parser_context, remaining)?;
|
||||
let pre_blank = Into::<&str>::into(pre_blank)
|
||||
.bytes()
|
||||
@ -552,6 +549,7 @@ fn detect_contentless_item_contents<'b, 'g, 'r, 's>(
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::context::bind_context;
|
||||
use crate::context::Context;
|
||||
use crate::context::GlobalSettings;
|
||||
use crate::context::List;
|
||||
@ -563,7 +561,7 @@ mod tests {
|
||||
let global_settings = GlobalSettings::default();
|
||||
let initial_context = ContextElement::document_context();
|
||||
let initial_context = Context::new(&global_settings, List::new(&initial_context));
|
||||
let plain_list_item_matcher = parser_with_context!(plain_list_item)(&initial_context);
|
||||
let plain_list_item_matcher = bind_context!(plain_list_item, &initial_context);
|
||||
let (remaining, (_, result)) = plain_list_item_matcher(input).unwrap();
|
||||
assert_eq!(Into::<&str>::into(remaining), "");
|
||||
assert_eq!(result.get_standard_properties().get_source(), "1.");
|
||||
@ -575,7 +573,7 @@ mod tests {
|
||||
let global_settings = GlobalSettings::default();
|
||||
let initial_context = ContextElement::document_context();
|
||||
let initial_context = Context::new(&global_settings, List::new(&initial_context));
|
||||
let plain_list_item_matcher = parser_with_context!(plain_list_item)(&initial_context);
|
||||
let plain_list_item_matcher = bind_context!(plain_list_item, &initial_context);
|
||||
let (remaining, (_, result)) = plain_list_item_matcher(input).unwrap();
|
||||
assert_eq!(Into::<&str>::into(remaining), "");
|
||||
assert_eq!(result.get_standard_properties().get_source(), "1. foo");
|
||||
@ -642,7 +640,7 @@ mod tests {
|
||||
let global_settings = GlobalSettings::default();
|
||||
let initial_context = ContextElement::document_context();
|
||||
let initial_context = Context::new(&global_settings, List::new(&initial_context));
|
||||
let plain_list_matcher = parser_with_context!(element(true))(&initial_context);
|
||||
let plain_list_matcher = bind_context!(element(true), &initial_context);
|
||||
let (remaining, result) =
|
||||
plain_list_matcher(input).expect("Should parse the plain list successfully.");
|
||||
assert_eq!(Into::<&str>::into(remaining), " ipsum\n");
|
||||
@ -670,7 +668,7 @@ baz"#,
|
||||
let global_settings = GlobalSettings::default();
|
||||
let initial_context = ContextElement::document_context();
|
||||
let initial_context = Context::new(&global_settings, List::new(&initial_context));
|
||||
let plain_list_matcher = parser_with_context!(element(true))(&initial_context);
|
||||
let plain_list_matcher = bind_context!(element(true), &initial_context);
|
||||
let (remaining, result) =
|
||||
plain_list_matcher(input).expect("Should parse the plain list successfully.");
|
||||
assert_eq!(Into::<&str>::into(remaining), "baz");
|
||||
@ -703,7 +701,7 @@ dolar"#,
|
||||
let global_settings = GlobalSettings::default();
|
||||
let initial_context = ContextElement::document_context();
|
||||
let initial_context = Context::new(&global_settings, List::new(&initial_context));
|
||||
let plain_list_matcher = parser_with_context!(element(true))(&initial_context);
|
||||
let plain_list_matcher = bind_context!(element(true), &initial_context);
|
||||
let (remaining, result) =
|
||||
plain_list_matcher(input).expect("Should parse the plain list successfully.");
|
||||
assert_eq!(Into::<&str>::into(remaining), "dolar");
|
||||
|
@ -93,39 +93,33 @@ impl<'x> RematchObject<'x> for PlainText<'x> {
|
||||
}
|
||||
|
||||
let is_not_whitespace = is_not::<&str, &str, CustomError<_>>(" \t\r\n")(goal);
|
||||
match is_not_whitespace {
|
||||
Ok((new_goal, payload)) => {
|
||||
let (new_remaining, _) = tuple((
|
||||
tag_no_case(payload),
|
||||
// TODO: Test to see what the REAL condition is. Checking for not-alphabetic works fine for now, but the real criteria might be something like the plain text exit matcher.
|
||||
peek(alt((
|
||||
recognize(verify(anychar, |c| !c.is_alphanumeric())),
|
||||
eof,
|
||||
))),
|
||||
))(remaining)?;
|
||||
remaining = new_remaining;
|
||||
goal = new_goal;
|
||||
continue;
|
||||
}
|
||||
Err(_) => {}
|
||||
};
|
||||
if let Ok((new_goal, payload)) = is_not_whitespace {
|
||||
let (new_remaining, _) = tuple((
|
||||
tag_no_case(payload),
|
||||
// TODO: Test to see what the REAL condition is. Checking for not-alphabetic works fine for now, but the real criteria might be something like the plain text exit matcher.
|
||||
peek(alt((
|
||||
recognize(verify(anychar, |c| !c.is_alphanumeric())),
|
||||
eof,
|
||||
))),
|
||||
))(remaining)?;
|
||||
remaining = new_remaining;
|
||||
goal = new_goal;
|
||||
continue;
|
||||
}
|
||||
|
||||
let is_whitespace = recognize(many1(alt((
|
||||
recognize(one_of::<&str, &str, CustomError<_>>(" \t")),
|
||||
line_ending,
|
||||
))))(goal);
|
||||
match is_whitespace {
|
||||
Ok((new_goal, _)) => {
|
||||
let (new_remaining, _) = many1(org_space_or_line_ending)(remaining)?;
|
||||
remaining = new_remaining;
|
||||
goal = new_goal;
|
||||
continue;
|
||||
}
|
||||
Err(_) => {}
|
||||
};
|
||||
if let Ok((new_goal, _)) = is_whitespace {
|
||||
let (new_remaining, _) = many1(org_space_or_line_ending)(remaining)?;
|
||||
remaining = new_remaining;
|
||||
goal = new_goal;
|
||||
continue;
|
||||
}
|
||||
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"Target does not match.".into(),
|
||||
"Target does not match.",
|
||||
))));
|
||||
}
|
||||
|
||||
@ -144,6 +138,7 @@ mod tests {
|
||||
use nom::combinator::map;
|
||||
|
||||
use super::*;
|
||||
use crate::context::bind_context;
|
||||
use crate::context::Context;
|
||||
use crate::context::ContextElement;
|
||||
use crate::context::GlobalSettings;
|
||||
@ -157,10 +152,14 @@ mod tests {
|
||||
let global_settings = GlobalSettings::default();
|
||||
let initial_context = ContextElement::document_context();
|
||||
let initial_context = Context::new(&global_settings, List::new(&initial_context));
|
||||
let plain_text_matcher = parser_with_context!(plain_text(
|
||||
detect_standard_set_object_sans_plain_text
|
||||
))(&initial_context);
|
||||
let (remaining, result) = map(plain_text_matcher, Object::PlainText)(input).unwrap();
|
||||
let (remaining, result) = map(
|
||||
bind_context!(
|
||||
plain_text(detect_standard_set_object_sans_plain_text),
|
||||
&initial_context
|
||||
),
|
||||
Object::PlainText,
|
||||
)(input)
|
||||
.unwrap();
|
||||
assert_eq!(Into::<&str>::into(remaining), "");
|
||||
assert_eq!(
|
||||
result.get_standard_properties().get_source(),
|
||||
|
@ -40,7 +40,7 @@ pub(crate) fn property_drawer<'b, 'g, 'r, 's>(
|
||||
) -> Res<OrgSource<'s>, PropertyDrawer<'s>> {
|
||||
if immediate_in_section(context, "property-drawer") {
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"Cannot nest objects of the same element".into(),
|
||||
"Cannot nest objects of the same element",
|
||||
))));
|
||||
}
|
||||
let (
|
||||
|
@ -53,7 +53,7 @@ pub(crate) fn radio_link<'b, 'g, 'r, 's>(
|
||||
}
|
||||
}
|
||||
Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"NoRadioLink".into(),
|
||||
"NoRadioLink",
|
||||
))))
|
||||
}
|
||||
|
||||
@ -99,7 +99,7 @@ pub(crate) fn rematch_target<'x, 'b, 'g, 'r, 's>(
|
||||
}
|
||||
_ => {
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"OnlyMinimalSetObjectsAllowed".into(),
|
||||
"OnlyMinimalSetObjectsAllowed",
|
||||
))));
|
||||
}
|
||||
};
|
||||
@ -185,15 +185,14 @@ mod tests {
|
||||
fn plain_text_radio_target() {
|
||||
let input = OrgSource::new("foo bar baz");
|
||||
let radio_target_match = vec![Object::PlainText(PlainText { source: "bar" })];
|
||||
let global_settings = {
|
||||
let mut global_settings = GlobalSettings::default();
|
||||
global_settings.radio_targets = vec![&radio_target_match];
|
||||
global_settings
|
||||
let global_settings = GlobalSettings {
|
||||
radio_targets: vec![&radio_target_match],
|
||||
..Default::default()
|
||||
};
|
||||
let initial_context = ContextElement::document_context();
|
||||
let initial_context = Context::new(&global_settings, List::new(&initial_context));
|
||||
let paragraph_matcher = parser_with_context!(element(true))(&initial_context);
|
||||
let (remaining, first_paragraph) = paragraph_matcher(input).expect("Parse first paragraph");
|
||||
let (remaining, first_paragraph) =
|
||||
element(true)(&initial_context, input).expect("Parse first paragraph");
|
||||
let first_paragraph = match first_paragraph {
|
||||
Element::Paragraph(paragraph) => paragraph,
|
||||
_ => panic!("Should be a paragraph!"),
|
||||
@ -212,7 +211,7 @@ mod tests {
|
||||
&Object::RadioLink(RadioLink {
|
||||
source: "bar ",
|
||||
children: vec![Object::PlainText(PlainText { source: "bar" })],
|
||||
path: "bar".into()
|
||||
path: "bar"
|
||||
})
|
||||
);
|
||||
}
|
||||
@ -224,16 +223,15 @@ mod tests {
|
||||
source: "*bar*",
|
||||
children: vec![Object::PlainText(PlainText { source: "bar" })],
|
||||
})];
|
||||
let global_settings = {
|
||||
let mut global_settings = GlobalSettings::default();
|
||||
global_settings.radio_targets = vec![&radio_target_match];
|
||||
global_settings
|
||||
let global_settings = GlobalSettings {
|
||||
radio_targets: vec![&radio_target_match],
|
||||
..Default::default()
|
||||
};
|
||||
let initial_context = ContextElement::document_context();
|
||||
let initial_context = Context::new(&global_settings, List::new(&initial_context));
|
||||
let paragraph_matcher = parser_with_context!(element(true))(&initial_context);
|
||||
|
||||
let (remaining, first_paragraph) =
|
||||
paragraph_matcher(input.into()).expect("Parse first paragraph");
|
||||
element(true)(&initial_context, input).expect("Parse first paragraph");
|
||||
let first_paragraph = match first_paragraph {
|
||||
Element::Paragraph(paragraph) => paragraph,
|
||||
_ => panic!("Should be a paragraph!"),
|
||||
@ -255,7 +253,7 @@ mod tests {
|
||||
source: "*bar* ",
|
||||
children: vec![Object::PlainText(PlainText { source: "bar" })]
|
||||
})],
|
||||
path: "*bar* ".into()
|
||||
path: "*bar* "
|
||||
})
|
||||
);
|
||||
}
|
||||
|
@ -139,14 +139,7 @@ fn pathreg<'b, 'g, 'r, 's>(
|
||||
input: OrgSource<'s>,
|
||||
) -> Res<OrgSource<'s>, PathReg<'s>> {
|
||||
let (remaining, path) = map_parser(
|
||||
escaped(
|
||||
take_till1(|c| match c {
|
||||
'\\' | '[' | ']' => true,
|
||||
_ => false,
|
||||
}),
|
||||
'\\',
|
||||
anychar,
|
||||
),
|
||||
escaped(take_till1(|c| matches!(c, '\\' | '[' | ']')), '\\', anychar),
|
||||
parser_with_context!(parse_path_reg)(context),
|
||||
)(input)?;
|
||||
Ok((remaining, path))
|
||||
@ -262,11 +255,8 @@ fn apply_link_templates<'b, 'g, 'r, 's>(
|
||||
};
|
||||
}
|
||||
// Handle lingering state
|
||||
match state {
|
||||
ParserState::Percent => {
|
||||
ret.push('%');
|
||||
}
|
||||
_ => {}
|
||||
if let ParserState::Percent = state {
|
||||
ret.push('%');
|
||||
}
|
||||
if !injected_value {
|
||||
ret.push_str(inject_value);
|
||||
@ -494,6 +484,6 @@ fn impl_path_reg_end<'b, 'g, 'r, 's>(
|
||||
}
|
||||
|
||||
Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"No path reg end".into(),
|
||||
"No path reg end",
|
||||
))))
|
||||
}
|
||||
|
@ -65,12 +65,12 @@ pub(crate) fn zeroth_section<'b, 'g, 'r, 's>(
|
||||
},
|
||||
)(remaining)?;
|
||||
|
||||
comment_and_property_drawer_element.map(|(comment, property_drawer, _ws)| {
|
||||
if let Some((comment, property_drawer, _ws)) = comment_and_property_drawer_element {
|
||||
children.insert(0, Element::PropertyDrawer(property_drawer));
|
||||
comment
|
||||
.map(Element::Comment)
|
||||
.map(|ele| children.insert(0, ele));
|
||||
});
|
||||
if let Some(ele) = comment.map(Element::Comment) {
|
||||
children.insert(0, ele);
|
||||
}
|
||||
}
|
||||
|
||||
let (remaining, _trailing_ws) =
|
||||
maybe_consume_trailing_whitespace_if_not_exiting(context, remaining)?;
|
||||
@ -121,12 +121,12 @@ pub(crate) fn section<'b, 'g, 'r, 's>(
|
||||
!children.is_empty() || property_drawer_element.is_some() || planning_element.is_some()
|
||||
},
|
||||
)(remaining)?;
|
||||
property_drawer_element
|
||||
.map(Element::PropertyDrawer)
|
||||
.map(|ele| children.insert(0, ele));
|
||||
planning_element
|
||||
.map(Element::Planning)
|
||||
.map(|ele| children.insert(0, ele));
|
||||
if let Some(ele) = property_drawer_element.map(Element::PropertyDrawer) {
|
||||
children.insert(0, ele);
|
||||
}
|
||||
if let Some(ele) = planning_element.map(Element::Planning) {
|
||||
children.insert(0, ele)
|
||||
}
|
||||
|
||||
let (remaining, _trailing_ws) =
|
||||
maybe_consume_trailing_whitespace_if_not_exiting(context, remaining)?;
|
||||
|
@ -137,7 +137,7 @@ fn script_body<'b, 'g, 'r, 's>(
|
||||
ScriptBody::Braceless(body.into())
|
||||
}),
|
||||
map(parser_with_context!(script_with_braces)(context), |body| {
|
||||
ScriptBody::WithBraces(body.into())
|
||||
ScriptBody::WithBraces(body)
|
||||
}),
|
||||
map(
|
||||
parser_with_context!(script_with_parenthesis)(context),
|
||||
@ -233,7 +233,7 @@ fn _script_with_braces_end<'b, 'g, 'r, 's>(
|
||||
if current_depth > 0 {
|
||||
// Its impossible for the next character to end the subscript or superscript if we're any amount of braces deep
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"Not a valid end for subscript or superscript.".into(),
|
||||
"Not a valid end for subscript or superscript.",
|
||||
))));
|
||||
}
|
||||
if current_depth < 0 {
|
||||
@ -288,6 +288,6 @@ fn _script_with_parenthesis_end<'s>(
|
||||
}
|
||||
}
|
||||
Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"No script parenthesis end.".into(),
|
||||
"No script parenthesis end.",
|
||||
))))
|
||||
}
|
||||
|
@ -197,7 +197,7 @@ fn org_mode_table_cell<'b, 'g, 'r, 's>(
|
||||
let (remaining, (children, _exit_contents)) = verify(
|
||||
many_till(table_cell_set_object_matcher, exit_matcher),
|
||||
|(children, exit_contents)| {
|
||||
!children.is_empty() || Into::<&str>::into(exit_contents).ends_with("|")
|
||||
!children.is_empty() || Into::<&str>::into(exit_contents).ends_with('|')
|
||||
},
|
||||
)(remaining)?;
|
||||
|
||||
|
@ -43,7 +43,7 @@ pub(crate) fn target<'b, 'g, 'r, 's>(
|
||||
.expect("We cannot be at the start of the file because we are inside a target.");
|
||||
if preceding_character.is_whitespace() {
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"Targets cannot end with whitespace.".into(),
|
||||
"Targets cannot end with whitespace.",
|
||||
))));
|
||||
}
|
||||
let (remaining, _) = tag(">>")(remaining)?;
|
||||
|
@ -183,13 +183,13 @@ fn code<'b, 'g, 'r, 's>(
|
||||
))
|
||||
}
|
||||
|
||||
fn text_markup_object<'c>(
|
||||
marker_symbol: &'c str,
|
||||
fn text_markup_object(
|
||||
marker_symbol: &str,
|
||||
) -> impl for<'b, 'g, 'r, 's> Fn(
|
||||
RefContext<'b, 'g, 'r, 's>,
|
||||
OrgSource<'s>,
|
||||
) -> Res<OrgSource<'s>, Vec<Object<'s>>>
|
||||
+ 'c {
|
||||
+ '_ {
|
||||
move |context, input: OrgSource<'_>| _text_markup_object(context, input, marker_symbol)
|
||||
}
|
||||
|
||||
@ -235,7 +235,7 @@ fn _text_markup_object<'b, 'g, 'r, 's, 'c>(
|
||||
let _enter = span.enter();
|
||||
if exit_matcher_parser(context, remaining).is_ok() {
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"Parent exit matcher is triggering.".into(),
|
||||
"Parent exit matcher is triggering.",
|
||||
))));
|
||||
}
|
||||
}
|
||||
@ -246,13 +246,13 @@ fn _text_markup_object<'b, 'g, 'r, 's, 'c>(
|
||||
Ok((remaining, children))
|
||||
}
|
||||
|
||||
fn text_markup_string<'c>(
|
||||
marker_symbol: &'c str,
|
||||
fn text_markup_string(
|
||||
marker_symbol: &str,
|
||||
) -> impl for<'b, 'g, 'r, 's> Fn(
|
||||
RefContext<'b, 'g, 'r, 's>,
|
||||
OrgSource<'s>,
|
||||
) -> Res<OrgSource<'s>, OrgSource<'s>>
|
||||
+ 'c {
|
||||
+ '_ {
|
||||
move |context, input: OrgSource<'_>| _text_markup_string(context, input, marker_symbol)
|
||||
}
|
||||
|
||||
@ -291,7 +291,7 @@ fn _text_markup_string<'b, 'g, 'r, 's, 'c>(
|
||||
let _enter = span.enter();
|
||||
if exit_matcher_parser(context, remaining).is_ok() {
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"Parent exit matcher is triggering.".into(),
|
||||
"Parent exit matcher is triggering.",
|
||||
))));
|
||||
}
|
||||
}
|
||||
@ -322,7 +322,7 @@ fn pre<'b, 'g, 'r, 's>(
|
||||
Some('-') | Some('(') | Some('{') | Some('\'') | Some('"') => {}
|
||||
Some(_) => {
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"Not a valid pre character for text markup.".into(),
|
||||
"Not a valid pre character for text markup.",
|
||||
))));
|
||||
}
|
||||
None => unreachable!(), // None is for start of file, which should already be handled by the start_of_line matcher above.
|
||||
@ -343,10 +343,7 @@ fn post<'b, 'g, 'r, 's>(
|
||||
Ok((remaining, ()))
|
||||
}
|
||||
|
||||
fn text_markup_end<'c>(
|
||||
marker_symbol: &'c str,
|
||||
contents_start_offset: usize,
|
||||
) -> impl ContextMatcher + 'c {
|
||||
fn text_markup_end(marker_symbol: &str, contents_start_offset: usize) -> impl ContextMatcher + '_ {
|
||||
move |context, input: OrgSource<'_>| {
|
||||
_text_markup_end(context, input, marker_symbol, contents_start_offset)
|
||||
}
|
||||
@ -364,7 +361,7 @@ fn _text_markup_end<'b, 'g, 'r, 's, 'c>(
|
||||
) -> Res<OrgSource<'s>, OrgSource<'s>> {
|
||||
if input.get_byte_offset() == contents_start_offset {
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"Text markup cannot be empty".into(),
|
||||
"Text markup cannot be empty",
|
||||
))));
|
||||
}
|
||||
not(preceded_by_whitespace(false))(input)?;
|
||||
@ -499,7 +496,7 @@ fn _rematch_text_markup_object<'b, 'g, 'r, 's, 'x>(
|
||||
let _enter = span.enter();
|
||||
if exit_matcher_parser(context, remaining).is_ok() {
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"Parent exit matcher is triggering.".into(),
|
||||
"Parent exit matcher is triggering.",
|
||||
))));
|
||||
}
|
||||
}
|
||||
|
@ -486,7 +486,7 @@ fn dayname_end<'b, 'g, 'r, 's>(
|
||||
}))(input)
|
||||
}
|
||||
|
||||
const fn time<'c>(
|
||||
const fn time(
|
||||
allow_rest: bool,
|
||||
) -> impl for<'b, 'g, 'r, 's> Fn(RefContext<'b, 'g, 'r, 's>, OrgSource<'s>) -> Res<OrgSource<'s>, Time<'s>>
|
||||
{
|
||||
@ -590,6 +590,7 @@ fn time_range_rest_end<'b, 'g, 'r, 's>(
|
||||
tag("-"),
|
||||
parser_with_context!(time(true))(&parent_node),
|
||||
)))(input);
|
||||
#[allow(clippy::let_and_return)] // otherwise parent_node does not live long enough.
|
||||
exit_contents
|
||||
}
|
||||
|
||||
|
@ -28,10 +28,7 @@ pub(crate) const WORD_CONSTITUENT_CHARACTERS: &str =
|
||||
"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789";
|
||||
|
||||
/// Check if we are below a section of the given section type regardless of depth
|
||||
pub(crate) fn in_section<'b, 'g, 'r, 's, 'x>(
|
||||
context: RefContext<'b, 'g, 'r, 's>,
|
||||
section_name: &'x str,
|
||||
) -> bool {
|
||||
pub(crate) fn in_section(context: RefContext<'_, '_, '_, '_>, section_name: &str) -> bool {
|
||||
for thing in context.iter() {
|
||||
match thing {
|
||||
ContextElement::Context(name) if *name == section_name => return true,
|
||||
@ -42,9 +39,9 @@ pub(crate) fn in_section<'b, 'g, 'r, 's, 'x>(
|
||||
}
|
||||
|
||||
/// Checks if we are currently an immediate child of the given section type
|
||||
pub(crate) fn immediate_in_section<'b, 'g, 'r, 's, 'x>(
|
||||
context: RefContext<'b, 'g, 'r, 's>,
|
||||
section_name: &'x str,
|
||||
pub(crate) fn immediate_in_section(
|
||||
context: RefContext<'_, '_, '_, '_>,
|
||||
section_name: &str,
|
||||
) -> bool {
|
||||
for thing in context.iter() {
|
||||
match thing {
|
||||
@ -133,7 +130,7 @@ pub(crate) fn start_of_line<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, ()>
|
||||
Ok((input, ()))
|
||||
} else {
|
||||
Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"Not at start of line".into(),
|
||||
"Not at start of line",
|
||||
))))
|
||||
}
|
||||
}
|
||||
@ -156,7 +153,7 @@ fn _preceded_by_whitespace<'s>(
|
||||
.unwrap_or(allow_start_of_file)
|
||||
{
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"Must be preceded by a whitespace character.".into(),
|
||||
"Must be preceded by a whitespace character.",
|
||||
))));
|
||||
}
|
||||
Ok((input, ()))
|
||||
@ -198,9 +195,9 @@ pub(crate) fn text_until_exit<'b, 'g, 'r, 's>(
|
||||
|
||||
#[allow(dead_code)]
|
||||
fn not_yet_implemented() -> Res<OrgSource<'static>, ()> {
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"Not implemented yet.".into(),
|
||||
))));
|
||||
Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"Not implemented yet.",
|
||||
))))
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
@ -234,29 +231,27 @@ where
|
||||
/// Match single space or tab.
|
||||
///
|
||||
/// In org-mode syntax, spaces and tabs are often (but not always!) interchangeable.
|
||||
pub(crate) fn org_space<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, char> {
|
||||
pub(crate) fn org_space(input: OrgSource<'_>) -> Res<OrgSource<'_>, char> {
|
||||
one_of(" \t")(input)
|
||||
}
|
||||
|
||||
/// Matches a single space, tab, line ending, or end of file.
|
||||
///
|
||||
/// In org-mode syntax there are often delimiters that could be any whitespace at all or the end of file.
|
||||
pub(crate) fn org_space_or_line_ending<'s>(
|
||||
input: OrgSource<'s>,
|
||||
) -> Res<OrgSource<'s>, OrgSource<'s>> {
|
||||
pub(crate) fn org_space_or_line_ending(input: OrgSource<'_>) -> Res<OrgSource<'_>, OrgSource<'_>> {
|
||||
alt((recognize(org_space), org_line_ending))(input)
|
||||
}
|
||||
|
||||
/// Match a line break or the end of the file.
|
||||
///
|
||||
/// In org-mode syntax, the end of the file can serve the same purpose as a line break syntactically.
|
||||
pub(crate) fn org_line_ending<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, OrgSource<'s>> {
|
||||
pub(crate) fn org_line_ending(input: OrgSource<'_>) -> Res<OrgSource<'_>, OrgSource<'_>> {
|
||||
alt((line_ending, eof))(input)
|
||||
}
|
||||
|
||||
/// Match the whitespace at the beginning of a line and give it an indentation level.
|
||||
pub(crate) fn indentation_level<'b, 'g, 'r, 's>(
|
||||
context: RefContext<'b, 'g, 'r, 's>,
|
||||
pub(crate) fn indentation_level<'s>(
|
||||
context: RefContext<'_, '_, '_, 's>,
|
||||
input: OrgSource<'s>,
|
||||
) -> Res<OrgSource<'s>, (IndentationLevel, OrgSource<'s>)> {
|
||||
let (remaining, leading_whitespace) = space0(input)?;
|
||||
|
@ -19,7 +19,7 @@ pub struct AffiliatedKeyword<'s> {
|
||||
pub value: AffiliatedKeywordValue<'s>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
#[derive(Debug, Default)]
|
||||
pub struct AffiliatedKeywords<'s> {
|
||||
pub(crate) keywords: BTreeMap<String, AffiliatedKeywordValue<'s>>,
|
||||
}
|
||||
@ -27,11 +27,3 @@ pub struct AffiliatedKeywords<'s> {
|
||||
pub trait GetAffiliatedKeywords<'s> {
|
||||
fn get_affiliated_keywords<'a>(&'a self) -> &'a AffiliatedKeywords<'s>;
|
||||
}
|
||||
|
||||
impl<'s> Default for AffiliatedKeywords<'s> {
|
||||
fn default() -> Self {
|
||||
AffiliatedKeywords {
|
||||
keywords: BTreeMap::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -101,9 +101,8 @@ impl<'s> Heading<'s> {
|
||||
_ => None,
|
||||
})
|
||||
.flat_map(|section| section.children.iter())
|
||||
.take_while(|element| match element {
|
||||
Element::Planning(_) | Element::PropertyDrawer(_) => true,
|
||||
_ => false,
|
||||
.take_while(|element| {
|
||||
matches!(element, Element::Planning(_) | Element::PropertyDrawer(_))
|
||||
})
|
||||
.find_map(|element| match element {
|
||||
Element::PropertyDrawer(property_drawer) => Some(property_drawer),
|
||||
@ -121,10 +120,8 @@ impl<'s> Document<'s> {
|
||||
.iter()
|
||||
.flat_map(|zeroth_section| zeroth_section.children.iter());
|
||||
let property_drawer = zeroth_section_children
|
||||
.take_while(|element| match element {
|
||||
Element::Comment(_) => true,
|
||||
Element::PropertyDrawer(_) => true,
|
||||
_ => false,
|
||||
.take_while(|element| {
|
||||
matches!(element, Element::Comment(_) | Element::PropertyDrawer(_))
|
||||
})
|
||||
.find_map(|element| match element {
|
||||
Element::PropertyDrawer(property_drawer) => Some(property_drawer),
|
||||
|
@ -26,6 +26,7 @@ use super::SetSource;
|
||||
use super::SpecialBlock;
|
||||
use super::StandardProperties;
|
||||
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
#[derive(Debug)]
|
||||
pub enum Element<'s> {
|
||||
Paragraph(Paragraph<'s>),
|
||||
|
@ -170,12 +170,10 @@ impl<'s> Paragraph<'s> {
|
||||
///
|
||||
/// This is used for elements that support an "empty" content like greater blocks.
|
||||
pub(crate) fn of_text(input: &'s str) -> Self {
|
||||
let mut objects = Vec::with_capacity(1);
|
||||
objects.push(Object::PlainText(PlainText { source: input }));
|
||||
Paragraph {
|
||||
source: input,
|
||||
affiliated_keywords: AffiliatedKeywords::default(),
|
||||
children: objects,
|
||||
children: vec![Object::PlainText(PlainText { source: input })],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -263,6 +263,7 @@ pub struct Superscript<'s> {
|
||||
pub children: Vec<Object<'s>>,
|
||||
}
|
||||
|
||||
// TODO: Perhaps there is an optimization of converting to unix time we can do to shrink this struct. (ref: clippy::large_enum_variant on Element)
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub struct Timestamp<'s> {
|
||||
pub source: &'s str,
|
||||
@ -315,7 +316,7 @@ pub struct Minute(MinuteInner);
|
||||
|
||||
impl Year {
|
||||
// TODO: Make a real error type instead of a boxed any error.
|
||||
pub fn new<'s>(source: &'s str) -> Result<Self, Box<dyn std::error::Error>> {
|
||||
pub fn new(source: &str) -> Result<Self, Box<dyn std::error::Error>> {
|
||||
let year = source.parse::<YearInner>()?;
|
||||
Ok(Year(year))
|
||||
}
|
||||
@ -327,9 +328,9 @@ impl Year {
|
||||
|
||||
impl Month {
|
||||
// TODO: Make a real error type instead of a boxed any error.
|
||||
pub fn new<'s>(source: &'s str) -> Result<Self, Box<dyn std::error::Error>> {
|
||||
pub fn new(source: &str) -> Result<Self, Box<dyn std::error::Error>> {
|
||||
let month = source.parse::<MonthInner>()?;
|
||||
if month < 1 || month > 12 {
|
||||
if !(1..=12).contains(&month) {
|
||||
Err("Month exceeds possible range.")?;
|
||||
}
|
||||
Ok(Month(month))
|
||||
@ -342,9 +343,9 @@ impl Month {
|
||||
|
||||
impl DayOfMonth {
|
||||
// TODO: Make a real error type instead of a boxed any error.
|
||||
pub fn new<'s>(source: &'s str) -> Result<Self, Box<dyn std::error::Error>> {
|
||||
pub fn new(source: &str) -> Result<Self, Box<dyn std::error::Error>> {
|
||||
let day_of_month = source.parse::<DayOfMonthInner>()?;
|
||||
if day_of_month < 1 || day_of_month > 31 {
|
||||
if !(1..=31).contains(&day_of_month) {
|
||||
Err("Day of month exceeds possible range.")?;
|
||||
}
|
||||
Ok(DayOfMonth(day_of_month))
|
||||
@ -357,7 +358,7 @@ impl DayOfMonth {
|
||||
|
||||
impl Hour {
|
||||
// TODO: Make a real error type instead of a boxed any error.
|
||||
pub fn new<'s>(source: &'s str) -> Result<Self, Box<dyn std::error::Error>> {
|
||||
pub fn new(source: &str) -> Result<Self, Box<dyn std::error::Error>> {
|
||||
let hour = source.parse::<HourInner>()?;
|
||||
if hour > 23 {
|
||||
Err("Hour exceeds possible range.")?;
|
||||
@ -372,7 +373,7 @@ impl Hour {
|
||||
|
||||
impl Minute {
|
||||
// TODO: Make a real error type instead of a boxed any error.
|
||||
pub fn new<'s>(source: &'s str) -> Result<Self, Box<dyn std::error::Error>> {
|
||||
pub fn new(source: &str) -> Result<Self, Box<dyn std::error::Error>> {
|
||||
let minute = source.parse::<MinuteInner>()?;
|
||||
if minute > 59 {
|
||||
Err("Minute exceeds possible range.")?;
|
||||
@ -731,21 +732,21 @@ impl<'s> RegularLink<'s> {
|
||||
/// Coalesce whitespace if the raw_link contains line breaks.
|
||||
///
|
||||
/// This corresponds to the output you would get from the upstream emacs org-mode AST.
|
||||
pub fn get_raw_link<'b>(&'b self) -> Cow<'b, str> {
|
||||
pub fn get_raw_link(&self) -> Cow<'_, str> {
|
||||
coalesce_whitespace_if_line_break(&self.raw_link)
|
||||
}
|
||||
|
||||
/// Coalesce whitespace if the path contains line breaks.
|
||||
///
|
||||
/// This corresponds to the output you would get from the upstream emacs org-mode AST.
|
||||
pub fn get_path<'b>(&'b self) -> Cow<'b, str> {
|
||||
pub fn get_path(&self) -> Cow<'_, str> {
|
||||
coalesce_whitespace_if_line_break(&self.path)
|
||||
}
|
||||
|
||||
/// Coalesce whitespace if the search_option contains line breaks.
|
||||
///
|
||||
/// This corresponds to the output you would get from the upstream emacs org-mode AST.
|
||||
pub fn get_search_option<'b>(&'b self) -> Option<Cow<'b, str>> {
|
||||
pub fn get_search_option(&self) -> Option<Cow<'_, str>> {
|
||||
self.search_option
|
||||
.as_ref()
|
||||
.map(|search_option| coalesce_whitespace_if_line_break(search_option.borrow()))
|
||||
@ -782,7 +783,7 @@ impl<'s> OrgMacro<'s> {
|
||||
pub fn get_args<'b>(&'b self) -> impl Iterator<Item = Cow<'s, str>> + 'b {
|
||||
self.args
|
||||
.iter()
|
||||
.map(|arg| coalesce_whitespace_escaped('\\', |c| ",".contains(c))(*arg))
|
||||
.map(|arg| coalesce_whitespace_escaped('\\', |c| ",".contains(c))(arg))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -3,7 +3,7 @@ use std::borrow::Cow;
|
||||
/// Removes all whitespace from a string if any line breaks are present.
|
||||
///
|
||||
/// Example: "foo bar" => "foo bar" but "foo \n bar" => "foobar".
|
||||
pub(crate) fn remove_whitespace_if_line_break<'s>(input: &'s str) -> Cow<'s, str> {
|
||||
pub(crate) fn remove_whitespace_if_line_break(input: &str) -> Cow<'_, str> {
|
||||
let mut state = RemoveWhitespaceIfLineBreakState::Normal;
|
||||
for (offset, c) in input.char_indices() {
|
||||
match (&mut state, c) {
|
||||
@ -49,7 +49,7 @@ enum RemoveWhitespaceIfLineBreakState {
|
||||
/// Removes all line breaks from a string
|
||||
///
|
||||
/// Example: "foo bar" => "foo bar" but "foo \n bar" => "foo bar".
|
||||
pub(crate) fn remove_line_break<'s>(input: &'s str) -> Cow<'s, str> {
|
||||
pub(crate) fn remove_line_break(input: &str) -> Cow<'_, str> {
|
||||
let mut state = RemoveLineBreakState::Normal;
|
||||
for (offset, c) in input.char_indices() {
|
||||
match (&mut state, c) {
|
||||
@ -79,7 +79,7 @@ enum RemoveLineBreakState {
|
||||
/// Removes all whitespace from a string if any line breaks are present.
|
||||
///
|
||||
/// Example: "foo bar" => "foo bar" but "foo \n bar" => "foobar".
|
||||
pub(crate) fn coalesce_whitespace_if_line_break<'s>(input: &'s str) -> Cow<'s, str> {
|
||||
pub(crate) fn coalesce_whitespace_if_line_break(input: &str) -> Cow<'_, str> {
|
||||
let mut state = CoalesceWhitespaceIfLineBreakState::Normal;
|
||||
for (offset, c) in input.char_indices() {
|
||||
match (&mut state, c) {
|
||||
@ -120,15 +120,13 @@ pub(crate) fn coalesce_whitespace_if_line_break<'s>(input: &'s str) -> Cow<'s, s
|
||||
ret.push(c);
|
||||
}
|
||||
// Do nothing if preceding character was whitespace and this character also is whitespace.
|
||||
} else if c.is_ascii_whitespace() {
|
||||
// Preceding character was not whitespace but this is.
|
||||
sub_loop_in_whitespace = true;
|
||||
ret.push(' ');
|
||||
} else {
|
||||
if c.is_ascii_whitespace() {
|
||||
// Preceding character was not whitespace but this is.
|
||||
sub_loop_in_whitespace = true;
|
||||
ret.push(' ');
|
||||
} else {
|
||||
// Preceding character was not whitespace and this is not either.
|
||||
ret.push(c);
|
||||
}
|
||||
// Preceding character was not whitespace and this is not either.
|
||||
ret.push(c);
|
||||
}
|
||||
}
|
||||
if !*in_whitespace {
|
||||
@ -202,7 +200,7 @@ enum CoalesceWhitespaceIfLineBreakState {
|
||||
///
|
||||
/// Example: "foo bar" => "foobar" and "foo \n bar" => "foobar".
|
||||
#[allow(dead_code)]
|
||||
pub(crate) fn coalesce_whitespace<'s>(input: &'s str) -> Cow<'s, str> {
|
||||
pub(crate) fn coalesce_whitespace(input: &str) -> Cow<'_, str> {
|
||||
let mut state = CoalesceWhitespace::Normal;
|
||||
for (offset, c) in input.char_indices() {
|
||||
match (&mut state, c) {
|
||||
@ -248,18 +246,18 @@ enum CoalesceWhitespace {
|
||||
/// Removes all whitespace from a string and handle escaping characters.
|
||||
///
|
||||
/// Example: "foo bar" => "foobar" and "foo \n bar" => "foobar" but if the escape character is backslash and comma is an escapable character than "foo\,bar" becomes "foo,bar".
|
||||
pub(crate) fn coalesce_whitespace_escaped<'c, C: Fn(char) -> bool>(
|
||||
pub(crate) fn coalesce_whitespace_escaped<C: Fn(char) -> bool>(
|
||||
escape_character: char,
|
||||
escapable_characters: C,
|
||||
) -> impl for<'s> Fn(&'s str) -> Cow<'s, str> {
|
||||
move |input| impl_coalesce_whitespace_escaped(input, escape_character, &escapable_characters)
|
||||
}
|
||||
|
||||
fn impl_coalesce_whitespace_escaped<'s, C: Fn(char) -> bool>(
|
||||
input: &'s str,
|
||||
fn impl_coalesce_whitespace_escaped<C: Fn(char) -> bool>(
|
||||
input: &str,
|
||||
escape_character: char,
|
||||
escapable_characters: C,
|
||||
) -> Cow<'s, str> {
|
||||
) -> Cow<'_, str> {
|
||||
let mut state = CoalesceWhitespaceEscaped::Normal {
|
||||
in_whitespace: false,
|
||||
};
|
||||
@ -451,7 +449,7 @@ enum CoalesceWhitespaceEscaped {
|
||||
},
|
||||
}
|
||||
|
||||
pub(crate) fn to_lowercase<'s>(input: &'s str) -> Cow<'s, str> {
|
||||
pub(crate) fn to_lowercase(input: &str) -> Cow<'_, str> {
|
||||
if input.chars().any(|c| !c.is_lowercase()) {
|
||||
Cow::Owned(input.to_lowercase())
|
||||
} else {
|
||||
|
@ -1,5 +1,3 @@
|
||||
#![cfg(feature = "compare")]
|
||||
|
||||
#[feature(exit_status_error)]
|
||||
|
||||
include!(concat!(env!("OUT_DIR"), "/tests.rs"));
|
||||
|
@ -14,10 +14,9 @@ async fn autogen_default_{name}() -> Result<(), Box<dyn std::error::Error>> {{
|
||||
async fn autogen_la_{name}() -> Result<(), Box<dyn std::error::Error>> {{
|
||||
let org_path = "{path}";
|
||||
let org_contents = std::fs::read_to_string(org_path).expect("Read org file.");
|
||||
let global_settings = {{
|
||||
let mut global_settings = organic::settings::GlobalSettings::default();
|
||||
global_settings.list_allow_alphabetical = true;
|
||||
global_settings
|
||||
let global_settings = organic::settings::GlobalSettings {{
|
||||
list_allow_alphabetical: true,
|
||||
..Default::default()
|
||||
}};
|
||||
assert!(organic::compare::run_anonymous_compare_with_settings(org_contents.as_str(), &global_settings, false).await?);
|
||||
Ok(())
|
||||
@ -28,10 +27,9 @@ async fn autogen_la_{name}() -> Result<(), Box<dyn std::error::Error>> {{
|
||||
async fn autogen_t1_{name}() -> Result<(), Box<dyn std::error::Error>> {{
|
||||
let org_path = "{path}";
|
||||
let org_contents = std::fs::read_to_string(org_path).expect("Read org file.");
|
||||
let global_settings = {{
|
||||
let mut global_settings = organic::settings::GlobalSettings::default();
|
||||
global_settings.tab_width = 1;
|
||||
global_settings
|
||||
let global_settings = organic::settings::GlobalSettings {{
|
||||
tab_width: 1,
|
||||
..Default::default()
|
||||
}};
|
||||
assert!(organic::compare::run_anonymous_compare_with_settings(org_contents.as_str(), &global_settings, false).await?);
|
||||
Ok(())
|
||||
@ -42,10 +40,9 @@ async fn autogen_t1_{name}() -> Result<(), Box<dyn std::error::Error>> {{
|
||||
async fn autogen_t16_{name}() -> Result<(), Box<dyn std::error::Error>> {{
|
||||
let org_path = "{path}";
|
||||
let org_contents = std::fs::read_to_string(org_path).expect("Read org file.");
|
||||
let global_settings = {{
|
||||
let mut global_settings = organic::settings::GlobalSettings::default();
|
||||
global_settings.tab_width = 16;
|
||||
global_settings
|
||||
let global_settings = organic::settings::GlobalSettings {{
|
||||
tab_width: 16,
|
||||
..Default::default()
|
||||
}};
|
||||
assert!(organic::compare::run_anonymous_compare_with_settings(org_contents.as_str(), &global_settings, false).await?);
|
||||
Ok(())
|
||||
@ -56,10 +53,9 @@ async fn autogen_t16_{name}() -> Result<(), Box<dyn std::error::Error>> {{
|
||||
async fn autogen_odd_{name}() -> Result<(), Box<dyn std::error::Error>> {{
|
||||
let org_path = "{path}";
|
||||
let org_contents = std::fs::read_to_string(org_path).expect("Read org file.");
|
||||
let global_settings = {{
|
||||
let mut global_settings = organic::settings::GlobalSettings::default();
|
||||
global_settings.odd_levels_only = organic::settings::HeadlineLevelFilter::Odd;
|
||||
global_settings
|
||||
let global_settings = organic::settings::GlobalSettings {{
|
||||
odd_levels_only: organic::settings::HeadlineLevelFilter::Odd,
|
||||
..Default::default()
|
||||
}};
|
||||
assert!(organic::compare::run_anonymous_compare_with_settings(org_contents.as_str(), &global_settings, false).await?);
|
||||
Ok(())
|
||||
|
Loading…
x
Reference in New Issue
Block a user