2023-09-04 23:17:23 +00:00
|
|
|
use nom::branch::alt;
|
2023-09-16 02:31:15 +00:00
|
|
|
use nom::bytes::complete::is_not;
|
2023-09-04 23:17:23 +00:00
|
|
|
use nom::bytes::complete::tag_no_case;
|
2023-09-24 06:58:32 +00:00
|
|
|
use nom::bytes::complete::take_until;
|
2023-10-07 02:08:26 +00:00
|
|
|
use nom::character::complete::anychar;
|
|
|
|
use nom::character::complete::line_ending;
|
|
|
|
use nom::character::complete::space0;
|
2023-09-16 02:31:15 +00:00
|
|
|
use nom::character::complete::space1;
|
2023-10-07 02:08:26 +00:00
|
|
|
use nom::combinator::eof;
|
|
|
|
use nom::combinator::map;
|
|
|
|
use nom::combinator::peek;
|
|
|
|
use nom::combinator::recognize;
|
|
|
|
use nom::multi::many_till;
|
2023-09-16 02:31:15 +00:00
|
|
|
use nom::multi::separated_list0;
|
2023-10-07 02:08:26 +00:00
|
|
|
use nom::sequence::tuple;
|
2023-09-04 23:17:23 +00:00
|
|
|
|
|
|
|
use super::keyword::filtered_keyword;
|
2023-09-06 15:45:35 +00:00
|
|
|
use super::keyword_todo::todo_keywords;
|
2023-09-04 23:17:23 +00:00
|
|
|
use super::OrgSource;
|
2023-09-16 02:31:15 +00:00
|
|
|
use crate::context::HeadlineLevelFilter;
|
2023-09-24 06:58:32 +00:00
|
|
|
use crate::error::CustomError;
|
2023-09-04 23:17:23 +00:00
|
|
|
use crate::error::Res;
|
2023-09-29 20:37:22 +00:00
|
|
|
use crate::settings::GlobalSettings;
|
2023-10-02 23:24:47 +00:00
|
|
|
use crate::types::AstNode;
|
2023-10-02 14:03:04 +00:00
|
|
|
use crate::types::Document;
|
2023-09-04 23:17:23 +00:00
|
|
|
use crate::types::Keyword;
|
|
|
|
|
2023-09-05 02:15:43 +00:00
|
|
|
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
2023-09-11 17:13:28 +00:00
|
|
|
pub(crate) fn scan_for_in_buffer_settings<'s>(
|
2023-09-04 23:17:23 +00:00
|
|
|
input: OrgSource<'s>,
|
|
|
|
) -> Res<OrgSource<'s>, Vec<Keyword<'s>>> {
|
2023-09-24 06:58:32 +00:00
|
|
|
// TODO: Write some tests to make sure this is functioning properly.
|
2023-09-04 23:17:23 +00:00
|
|
|
|
2023-09-24 06:58:32 +00:00
|
|
|
let mut keywords = Vec::new();
|
|
|
|
let mut remaining = input;
|
|
|
|
loop {
|
|
|
|
// Skip text until possible in_buffer_setting
|
2023-10-17 14:35:33 +00:00
|
|
|
let start_of_pound = take_until::<_, _, CustomError>("#+")(remaining);
|
2023-09-24 06:58:32 +00:00
|
|
|
let start_of_pound = if let Ok((start_of_pound, _)) = start_of_pound {
|
|
|
|
start_of_pound
|
|
|
|
} else {
|
|
|
|
break;
|
|
|
|
};
|
|
|
|
// Go backwards to the start of the line and run the filtered_keyword parser
|
|
|
|
let start_of_line = start_of_pound.get_start_of_line();
|
|
|
|
|
|
|
|
let (remain, maybe_kw) = match filtered_keyword(in_buffer_settings_key)(start_of_line) {
|
|
|
|
Ok((remain, kw)) => (remain, Some(kw)),
|
|
|
|
Err(_) => {
|
2023-10-17 14:35:33 +00:00
|
|
|
let end_of_line = take_until::<_, _, CustomError>("\n")(start_of_pound);
|
2023-09-24 06:58:32 +00:00
|
|
|
if let Ok((end_of_line, _)) = end_of_line {
|
|
|
|
(end_of_line, None)
|
|
|
|
} else {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
if let Some(kw) = maybe_kw {
|
|
|
|
keywords.push(kw);
|
|
|
|
}
|
|
|
|
remaining = remain;
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok((remaining, keywords))
|
2023-09-04 23:17:23 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
|
|
|
fn in_buffer_settings_key<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, OrgSource<'s>> {
|
2023-09-05 02:39:24 +00:00
|
|
|
alt((
|
|
|
|
tag_no_case("archive"),
|
|
|
|
tag_no_case("category"),
|
|
|
|
tag_no_case("columns"),
|
|
|
|
tag_no_case("filetags"),
|
|
|
|
tag_no_case("link"),
|
|
|
|
tag_no_case("priorities"),
|
|
|
|
tag_no_case("property"),
|
|
|
|
tag_no_case("seq_todo"),
|
|
|
|
tag_no_case("setupfile"),
|
|
|
|
tag_no_case("startup"),
|
|
|
|
tag_no_case("tags"),
|
|
|
|
tag_no_case("todo"),
|
|
|
|
tag_no_case("typ_todo"),
|
|
|
|
))(input)
|
2023-09-04 23:17:23 +00:00
|
|
|
}
|
2023-09-06 15:00:19 +00:00
|
|
|
|
2023-10-18 22:36:25 +00:00
|
|
|
#[cfg_attr(
|
|
|
|
feature = "tracing",
|
|
|
|
tracing::instrument(level = "debug", skip(original_settings))
|
|
|
|
)]
|
2023-09-11 17:13:28 +00:00
|
|
|
pub(crate) fn apply_in_buffer_settings<'g, 's, 'sf>(
|
2023-09-06 15:00:19 +00:00
|
|
|
keywords: Vec<Keyword<'sf>>,
|
|
|
|
original_settings: &'g GlobalSettings<'g, 's>,
|
2023-10-17 16:22:52 +00:00
|
|
|
) -> Result<GlobalSettings<'g, 's>, CustomError> {
|
2023-09-06 15:00:19 +00:00
|
|
|
let mut new_settings = original_settings.clone();
|
|
|
|
|
2023-09-16 02:31:15 +00:00
|
|
|
// Todo Keywords
|
2023-09-06 15:00:19 +00:00
|
|
|
for kw in keywords.iter().filter(|kw| {
|
|
|
|
kw.key.eq_ignore_ascii_case("todo")
|
|
|
|
|| kw.key.eq_ignore_ascii_case("seq_todo")
|
|
|
|
|| kw.key.eq_ignore_ascii_case("typ_todo")
|
|
|
|
}) {
|
|
|
|
let (_, (in_progress_words, complete_words)) =
|
2023-10-17 16:22:52 +00:00
|
|
|
todo_keywords(kw.value).map_err(|err| match err {
|
2023-10-21 16:28:47 +00:00
|
|
|
nom::Err::Incomplete(_) => panic!("This parser does not support streaming."),
|
2023-10-17 16:22:52 +00:00
|
|
|
nom::Err::Error(e) => e,
|
|
|
|
nom::Err::Failure(e) => e,
|
|
|
|
})?;
|
2023-09-06 15:00:19 +00:00
|
|
|
new_settings
|
|
|
|
.in_progress_todo_keywords
|
|
|
|
.extend(in_progress_words.into_iter().map(str::to_string));
|
|
|
|
new_settings
|
|
|
|
.complete_todo_keywords
|
|
|
|
.extend(complete_words.into_iter().map(str::to_string));
|
|
|
|
}
|
|
|
|
|
2023-09-16 02:31:15 +00:00
|
|
|
// Startup settings
|
|
|
|
for kw in keywords
|
|
|
|
.iter()
|
|
|
|
.filter(|kw| kw.key.eq_ignore_ascii_case("startup"))
|
|
|
|
{
|
2023-10-17 16:22:52 +00:00
|
|
|
let (_remaining, settings) = separated_list0(space1::<&str, CustomError>, is_not(" \t"))(
|
|
|
|
kw.value,
|
|
|
|
)
|
|
|
|
.map_err(|err: nom::Err<_>| match err {
|
2023-10-21 16:28:47 +00:00
|
|
|
nom::Err::Incomplete(_) => panic!("This parser does not support streaming."),
|
2023-10-17 16:22:52 +00:00
|
|
|
nom::Err::Error(e) => e,
|
|
|
|
nom::Err::Failure(e) => e,
|
|
|
|
})?;
|
2023-09-16 02:31:15 +00:00
|
|
|
if settings.contains(&"odd") {
|
|
|
|
new_settings.odd_levels_only = HeadlineLevelFilter::Odd;
|
|
|
|
}
|
2023-09-16 02:44:39 +00:00
|
|
|
if settings.contains(&"oddeven") {
|
|
|
|
new_settings.odd_levels_only = HeadlineLevelFilter::OddEven;
|
|
|
|
}
|
2023-09-16 02:31:15 +00:00
|
|
|
}
|
|
|
|
|
2023-10-07 02:08:26 +00:00
|
|
|
// Link templates
|
|
|
|
for kw in keywords
|
|
|
|
.iter()
|
|
|
|
.filter(|kw| kw.key.eq_ignore_ascii_case("link"))
|
|
|
|
{
|
2023-10-17 16:42:34 +00:00
|
|
|
let (_, (link_key, link_value)) = link_template(kw.value).map_err(|err| match err {
|
2023-10-21 16:28:47 +00:00
|
|
|
nom::Err::Incomplete(_) => panic!("This parser does not support streaming."),
|
2023-10-17 16:42:34 +00:00
|
|
|
nom::Err::Error(e) => e,
|
|
|
|
nom::Err::Failure(e) => e,
|
|
|
|
})?;
|
2023-10-07 02:08:26 +00:00
|
|
|
new_settings
|
|
|
|
.link_templates
|
|
|
|
.insert(link_key.to_owned(), link_value.to_owned());
|
|
|
|
}
|
|
|
|
|
2023-09-06 15:00:19 +00:00
|
|
|
Ok(new_settings)
|
|
|
|
}
|
2023-09-24 07:09:51 +00:00
|
|
|
|
2023-10-02 14:03:04 +00:00
|
|
|
/// Apply in-buffer settings that do not impact parsing and therefore can be applied after parsing.
|
2023-10-07 02:08:26 +00:00
|
|
|
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
2023-10-17 16:22:52 +00:00
|
|
|
pub(crate) fn apply_post_parse_in_buffer_settings<'g, 's, 'sf>(document: &mut Document<'s>) {
|
2023-10-02 14:03:04 +00:00
|
|
|
document.category = Into::<AstNode>::into(&*document)
|
|
|
|
.into_iter()
|
|
|
|
.filter_map(|ast_node| {
|
|
|
|
if let AstNode::Keyword(ast_node) = ast_node {
|
|
|
|
if ast_node.key.eq_ignore_ascii_case("category") {
|
|
|
|
return Some(ast_node);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
None
|
|
|
|
})
|
|
|
|
.last()
|
|
|
|
.map(|kw| kw.value.to_owned());
|
|
|
|
}
|
|
|
|
|
2023-10-07 02:08:26 +00:00
|
|
|
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
|
|
|
fn link_template<'s>(input: &'s str) -> Res<&'s str, (&'s str, &'s str)> {
|
|
|
|
let (remaining, key) = map(
|
|
|
|
tuple((
|
|
|
|
space0,
|
|
|
|
recognize(many_till(anychar, peek(alt((space1, line_ending, eof))))),
|
|
|
|
)),
|
|
|
|
|(_, key)| key,
|
|
|
|
)(input)?;
|
|
|
|
|
|
|
|
let (remaining, replacement) = map(
|
|
|
|
tuple((
|
|
|
|
space1,
|
|
|
|
recognize(many_till(
|
|
|
|
anychar,
|
|
|
|
peek(tuple((space0, alt((line_ending, eof))))),
|
|
|
|
)),
|
|
|
|
)),
|
|
|
|
|(_, replacement)| replacement,
|
|
|
|
)(remaining)?;
|
|
|
|
|
|
|
|
Ok((remaining, (key, replacement)))
|
|
|
|
}
|
|
|
|
|
2023-09-24 07:09:51 +00:00
|
|
|
#[cfg(test)]
|
|
|
|
mod tests {
|
|
|
|
use super::*;
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn scan_test() -> Result<(), Box<dyn std::error::Error>> {
|
|
|
|
let input = OrgSource::new(
|
|
|
|
r#"
|
|
|
|
foo
|
|
|
|
#+archive: bar
|
|
|
|
|
|
|
|
baz #+category: lorem
|
|
|
|
|
|
|
|
#+label: ipsum
|
|
|
|
|
|
|
|
#+todo: dolar
|
|
|
|
cat
|
|
|
|
"#,
|
|
|
|
);
|
|
|
|
let (remaining, settings) = scan_for_in_buffer_settings(input)?;
|
|
|
|
assert_eq!(Into::<&str>::into(remaining), "cat\n");
|
|
|
|
let keys: Vec<_> = settings.iter().map(|kw| kw.key).collect();
|
|
|
|
// category is skipped because it is not the first non-whitespace on the line.
|
|
|
|
//
|
|
|
|
// label is skipped because it is not an in-buffer setting.
|
|
|
|
assert_eq!(keys, vec!["archive", "todo"]);
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
}
|