Fix clippy issues.
This commit is contained in:
parent
818fca87f2
commit
397d4ea0bc
@ -62,7 +62,7 @@ async fn get_post_directories(config: &Config) -> Result<Vec<PathBuf>, CustomErr
|
||||
|
||||
async fn load_blog_posts(config: &Config) -> Result<Vec<BlogPost>, CustomError> {
|
||||
let root_directory = config.get_root_directory().to_owned();
|
||||
let post_directories = get_post_directories(&config).await?;
|
||||
let post_directories = get_post_directories(config).await?;
|
||||
let load_jobs = post_directories
|
||||
.into_iter()
|
||||
.map(|path| tokio::spawn(BlogPost::load_blog_post(root_directory.clone(), path)));
|
||||
|
@ -13,7 +13,7 @@ pub(crate) async fn init_natter_folder(args: InitArgs) -> Result<(), CustomError
|
||||
|
||||
let mut existing_entries = tokio::fs::read_dir(&args.path).await?;
|
||||
let first_entry = existing_entries.next_entry().await?;
|
||||
if let Some(_) = first_entry {
|
||||
if first_entry.is_some() {
|
||||
return Err("The directory is not empty. Aborting.".into());
|
||||
}
|
||||
|
||||
|
@ -47,8 +47,7 @@ impl Config {
|
||||
}
|
||||
|
||||
pub(crate) fn get_root_directory(&self) -> &Path {
|
||||
&self
|
||||
.config_path
|
||||
self.config_path
|
||||
.parent()
|
||||
.expect("Config file must exist inside a directory.")
|
||||
}
|
||||
@ -86,8 +85,7 @@ impl Config {
|
||||
self.raw
|
||||
.stream
|
||||
.as_ref()
|
||||
.map(|stream| stream.entries_per_page)
|
||||
.flatten()
|
||||
.and_then(|stream| stream.entries_per_page)
|
||||
.unwrap_or(5)
|
||||
}
|
||||
}
|
||||
|
@ -2,7 +2,7 @@ use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
/// This is the struct for the natter.toml config file that ends up in each site's root directory.
|
||||
#[derive(Debug, Deserialize, Serialize)]
|
||||
#[derive(Debug, Deserialize, Serialize, Default)]
|
||||
pub(crate) struct RawConfig {
|
||||
pub(super) site_title: Option<String>,
|
||||
author: Option<String>,
|
||||
@ -12,28 +12,7 @@ pub(crate) struct RawConfig {
|
||||
pub(super) stream: Option<RawConfigStream>,
|
||||
}
|
||||
|
||||
impl Default for RawConfig {
|
||||
fn default() -> Self {
|
||||
RawConfig {
|
||||
site_title: None,
|
||||
author: None,
|
||||
email: None,
|
||||
use_relative_paths: None,
|
||||
web_root: None,
|
||||
stream: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Serialize)]
|
||||
#[derive(Debug, Deserialize, Serialize, Default)]
|
||||
pub(crate) struct RawConfigStream {
|
||||
pub(super) entries_per_page: Option<usize>,
|
||||
}
|
||||
|
||||
impl Default for RawConfigStream {
|
||||
fn default() -> Self {
|
||||
RawConfigStream {
|
||||
entries_per_page: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -117,14 +117,14 @@ pub(crate) enum RenderAstNode {
|
||||
}
|
||||
|
||||
pub(crate) trait IntoRenderAstNode {
|
||||
fn into_render_ast_node(
|
||||
fn as_render_ast_node(
|
||||
&self,
|
||||
render_context: RenderContext<'_>,
|
||||
) -> Result<RenderAstNode, CustomError>;
|
||||
}
|
||||
|
||||
impl IntoRenderAstNode for IAstNode {
|
||||
fn into_render_ast_node(
|
||||
fn as_render_ast_node(
|
||||
&self,
|
||||
render_context: RenderContext<'_>,
|
||||
) -> Result<RenderAstNode, CustomError> {
|
||||
|
@ -14,6 +14,7 @@ use super::RenderDocumentElement;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct RenderBlogPostPageInput<'a> {
|
||||
#[allow(dead_code)]
|
||||
post: &'a BlogPost,
|
||||
page: &'a BlogPostPage,
|
||||
}
|
||||
|
@ -85,7 +85,7 @@ render!(
|
||||
|
||||
let children = original
|
||||
.original
|
||||
.into_iter()
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(i, blog_post)| {
|
||||
RenderBlogStreamEntry::new(
|
||||
|
@ -39,7 +39,7 @@ render!(
|
||||
let contents = {
|
||||
let mut ret = Vec::new();
|
||||
for obj in original.contents.iter() {
|
||||
ret.push(obj.into_render_ast_node(render_context.clone())?);
|
||||
ret.push(obj.as_render_ast_node(render_context.clone())?);
|
||||
}
|
||||
ret
|
||||
};
|
||||
|
@ -113,14 +113,14 @@ pub(crate) enum IAstNode {
|
||||
}
|
||||
|
||||
pub(crate) trait IntoIAstNode<'parse> {
|
||||
fn into_ast_node<'orig>(
|
||||
fn as_ast_node<'orig>(
|
||||
&'orig self,
|
||||
intermediate_context: IntermediateContext<'orig, 'parse>,
|
||||
) -> BoxFuture<'orig, Result<IAstNode, CustomError>>;
|
||||
}
|
||||
|
||||
impl<'parse> IntoIAstNode<'parse> for organic::types::DocumentElement<'parse> {
|
||||
fn into_ast_node<'orig>(
|
||||
fn as_ast_node<'orig>(
|
||||
&'orig self,
|
||||
intermediate_context: IntermediateContext<'orig, 'parse>,
|
||||
) -> BoxFuture<'orig, Result<IAstNode, CustomError>> {
|
||||
@ -139,7 +139,7 @@ impl<'parse> IntoIAstNode<'parse> for organic::types::DocumentElement<'parse> {
|
||||
}
|
||||
|
||||
impl<'parse> IntoIAstNode<'parse> for organic::types::Element<'parse> {
|
||||
fn into_ast_node<'orig>(
|
||||
fn as_ast_node<'orig>(
|
||||
&'orig self,
|
||||
intermediate_context: IntermediateContext<'orig, 'parse>,
|
||||
) -> BoxFuture<'orig, Result<IAstNode, CustomError>> {
|
||||
@ -226,7 +226,7 @@ impl<'parse> IntoIAstNode<'parse> for organic::types::Element<'parse> {
|
||||
}
|
||||
|
||||
impl<'parse> IntoIAstNode<'parse> for organic::types::Object<'parse> {
|
||||
fn into_ast_node<'orig>(
|
||||
fn as_ast_node<'orig>(
|
||||
&'orig self,
|
||||
intermediate_context: IntermediateContext<'orig, 'parse>,
|
||||
) -> BoxFuture<'orig, Result<IAstNode, CustomError>> {
|
||||
|
@ -55,11 +55,10 @@ impl BlogPost {
|
||||
// Assign IDs to the targets
|
||||
organic::types::AstNode::from(parsed_document)
|
||||
.iter_all_ast_nodes()
|
||||
.for_each(|node| match node {
|
||||
organic::types::AstNode::Target(target) => {
|
||||
.for_each(|node| {
|
||||
if let organic::types::AstNode::Target(target) = node {
|
||||
registry.get_target(target.value);
|
||||
}
|
||||
_ => {}
|
||||
});
|
||||
|
||||
let registry = Arc::new(Mutex::new(registry));
|
||||
@ -96,15 +95,14 @@ impl BlogPost {
|
||||
pub(crate) fn get_date(&self) -> Option<&str> {
|
||||
let index_page_date = self
|
||||
.get_index_page()
|
||||
.map(|index_page| index_page.date.as_ref().map(String::as_str))
|
||||
.flatten();
|
||||
.and_then(|index_page| index_page.date.as_deref());
|
||||
if index_page_date.is_some() {
|
||||
return index_page_date;
|
||||
}
|
||||
|
||||
self.pages
|
||||
.iter()
|
||||
.filter_map(|page| page.date.as_ref().map(String::as_str))
|
||||
.filter_map(|page| page.date.as_deref())
|
||||
.next()
|
||||
}
|
||||
|
||||
|
@ -21,7 +21,7 @@ pub(crate) fn get_web_path<D: AsRef<Path>, F: AsRef<Path>, P: AsRef<Path>>(
|
||||
containing_file_relative_to_output_directory
|
||||
.parent()
|
||||
.ok_or("File should exist in a folder.")?,
|
||||
path_from_web_root.parent().unwrap_or(&Path::new("")),
|
||||
path_from_web_root.parent().unwrap_or(Path::new("")),
|
||||
)
|
||||
.collect::<PathBuf>();
|
||||
// Subtracting 1 from the depth to "remove" the file name.
|
||||
|
@ -53,7 +53,7 @@ impl IRealFootnoteDefinition {
|
||||
pub(crate) fn get_reference_id(&self, id_addition: Option<&str>) -> String {
|
||||
let id_addition = id_addition
|
||||
.map(|id_addition| format!("sec{}.", id_addition))
|
||||
.unwrap_or(String::default());
|
||||
.unwrap_or_default();
|
||||
|
||||
format!("{}fnr.{}", id_addition, self.get_display_label())
|
||||
}
|
||||
@ -64,7 +64,7 @@ impl IRealFootnoteDefinition {
|
||||
pub(crate) fn get_definition_id(&self, id_addition: Option<&str>) -> String {
|
||||
let id_addition = id_addition
|
||||
.map(|id_addition| format!("sec{}.", id_addition))
|
||||
.unwrap_or(String::default());
|
||||
.unwrap_or_default();
|
||||
|
||||
format!("{}fn.{}", id_addition, self.get_display_label())
|
||||
}
|
||||
|
@ -38,7 +38,7 @@ impl IFootnoteReference {
|
||||
pub(crate) fn get_reference_id(&self, id_addition: Option<&str>) -> String {
|
||||
let id_addition = id_addition
|
||||
.map(|id_addition| format!("sec{}.", id_addition))
|
||||
.unwrap_or(String::default());
|
||||
.unwrap_or_default();
|
||||
|
||||
if self.duplicate_offset == 0 {
|
||||
format!("{}fnr.{}", id_addition, self.get_display_label())
|
||||
@ -55,7 +55,7 @@ impl IFootnoteReference {
|
||||
pub(crate) fn get_definition_id(&self, id_addition: Option<&str>) -> String {
|
||||
let id_addition = id_addition
|
||||
.map(|id_addition| format!("sec{}.", id_addition))
|
||||
.unwrap_or(String::default());
|
||||
.unwrap_or_default();
|
||||
|
||||
format!("{}fn.{}", id_addition, self.get_display_label())
|
||||
}
|
||||
|
@ -16,7 +16,7 @@ intermediate!(
|
||||
{
|
||||
let value: String = if original.value.starts_with("$$") && original.value.ends_with("$$") {
|
||||
format!("\\[{}\\]", &original.value[2..(original.value.len() - 2)])
|
||||
} else if original.value.starts_with("$") && original.value.ends_with("$") {
|
||||
} else if original.value.starts_with('$') && original.value.ends_with('$') {
|
||||
format!("\\({}\\)", &original.value[1..(original.value.len() - 1)])
|
||||
} else {
|
||||
original.value.to_owned()
|
||||
|
@ -29,11 +29,11 @@ intermediate!(
|
||||
let mut ret = Vec::new();
|
||||
|
||||
// Special case for list items with only paragraphs and sublists as their children. In those cases, the paragraph tags are omitted.
|
||||
let is_simple_list_item = original.children.iter().all(|child| match child {
|
||||
organic::types::Element::Paragraph(_) | organic::types::Element::PlainList(_) => {
|
||||
true
|
||||
}
|
||||
_ => false,
|
||||
let is_simple_list_item = original.children.iter().all(|child| {
|
||||
matches!(
|
||||
child,
|
||||
organic::types::Element::Paragraph(_) | organic::types::Element::PlainList(_)
|
||||
)
|
||||
});
|
||||
if is_simple_list_item {
|
||||
for elem in original.children.iter() {
|
||||
|
@ -52,9 +52,9 @@ impl<'orig, 'parse> Registry<'orig, 'parse> {
|
||||
pub(crate) async fn get_footnote_reference_id<'orig, 'parse>(
|
||||
intermediate_context: IntermediateContext<'orig, 'parse>,
|
||||
label: Option<&'parse str>,
|
||||
definition: &'orig Vec<Object<'parse>>,
|
||||
definition: &'orig [Object<'parse>],
|
||||
) -> Result<(usize, usize), CustomError> {
|
||||
if let None = label {
|
||||
if label.is_none() {
|
||||
// If it has no label then it must always get a new ID.
|
||||
let contents = convert_reference_contents(intermediate_context.clone(), definition).await?;
|
||||
let pos = {
|
||||
@ -148,7 +148,7 @@ pub(crate) async fn register_footnote_definition<'orig, 'parse>(
|
||||
|
||||
async fn convert_reference_contents<'orig, 'parse>(
|
||||
intermediate_context: IntermediateContext<'orig, 'parse>,
|
||||
contents: &'orig Vec<Object<'parse>>,
|
||||
contents: &'orig [Object<'parse>],
|
||||
) -> Result<Vec<IAstNode>, CustomError> {
|
||||
let children = {
|
||||
let mut ret = Vec::new();
|
||||
@ -159,23 +159,19 @@ async fn convert_reference_contents<'orig, 'parse>(
|
||||
};
|
||||
let containing_paragraph =
|
||||
IParagraph::artificial(intermediate_context.clone(), children, 0).await?;
|
||||
let contents = {
|
||||
let mut ret = Vec::new();
|
||||
ret.push(IAstNode::Paragraph(containing_paragraph));
|
||||
ret
|
||||
};
|
||||
let contents = vec![IAstNode::Paragraph(containing_paragraph)];
|
||||
|
||||
Ok(contents)
|
||||
}
|
||||
|
||||
async fn convert_definition_contents<'orig, 'parse>(
|
||||
intermediate_context: IntermediateContext<'orig, 'parse>,
|
||||
contents: &'orig Vec<Element<'parse>>,
|
||||
contents: &'orig [Element<'parse>],
|
||||
) -> Result<Vec<IAstNode>, CustomError> {
|
||||
let contents = {
|
||||
let mut ret = Vec::new();
|
||||
for obj in contents.iter() {
|
||||
ret.push(obj.into_ast_node(intermediate_context.clone()).await?);
|
||||
ret.push(obj.as_ast_node(intermediate_context.clone()).await?);
|
||||
}
|
||||
ret
|
||||
};
|
||||
@ -190,8 +186,7 @@ pub(crate) async fn promote_footnote_definition<'orig, 'parse>(
|
||||
) -> Result<(), CustomError> {
|
||||
let definition = {
|
||||
let mut registry = intermediate_context.registry.lock().unwrap();
|
||||
let definition = registry.on_deck_footnote_ids.remove(label);
|
||||
definition
|
||||
registry.on_deck_footnote_ids.remove(label)
|
||||
};
|
||||
if let Some(elements) = definition {
|
||||
let existing_id = {
|
||||
|
@ -67,10 +67,10 @@ enum GroupIntoSectionsState<'orig, 'parse> {
|
||||
}
|
||||
|
||||
fn group_into_sections<'orig, 'parse>(
|
||||
rows: &'orig Vec<organic::types::TableRow<'parse>>,
|
||||
rows: &'orig [organic::types::TableRow<'parse>],
|
||||
) -> Vec<Vec<&'orig organic::types::TableRow<'parse>>> {
|
||||
let mut sections = Vec::new();
|
||||
let mut rows = rows.into_iter();
|
||||
let mut rows = rows.iter();
|
||||
let mut state = GroupIntoSectionsState::NonSection;
|
||||
loop {
|
||||
state = match (state, rows.next()) {
|
||||
|
@ -5,6 +5,7 @@ use organic::types::StandardProperties;
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct ITarget {
|
||||
pub(crate) id: String,
|
||||
#[allow(dead_code)]
|
||||
value: String,
|
||||
pub(crate) post_blank: organic::types::PostBlank,
|
||||
}
|
||||
|
@ -18,10 +18,7 @@ mod render;
|
||||
|
||||
fn main() -> Result<ExitCode, CustomError> {
|
||||
let rt = tokio::runtime::Runtime::new()?;
|
||||
rt.block_on(async {
|
||||
let main_body_result = main_body().await;
|
||||
main_body_result
|
||||
})
|
||||
rt.block_on(async { main_body().await })
|
||||
}
|
||||
|
||||
async fn main_body() -> Result<ExitCode, CustomError> {
|
||||
|
@ -19,7 +19,7 @@ impl<'a> DusterRenderer<'a> {
|
||||
|
||||
impl<'a> RendererIntegration<'a> for DusterRenderer<'a> {
|
||||
fn load_template(&mut self, name: &'a str, contents: &'a str) -> Result<(), CustomError> {
|
||||
let compiled_template = duster::renderer::compile_template(contents.as_ref())?;
|
||||
let compiled_template = duster::renderer::compile_template(contents)?;
|
||||
self.templates.insert(name, compiled_template);
|
||||
Ok(())
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user