47 Commits

Author SHA1 Message Date
Tom Alexander
acc29e7977 Publish version 0.1.11.
Some checks failed
rustfmt Build rustfmt has succeeded
clippy Build clippy has succeeded
rust-build Build rust-build has succeeded
rust-foreign-document-test Build rust-foreign-document-test has failed
rust-test Build rust-test has succeeded
2023-10-16 19:53:46 -04:00
Tom Alexander
ebc0a30035 Merge branch 'clippy_ci_job' 2023-10-16 19:50:01 -04:00
Tom Alexander
e2d55e13d3 Fix some clippy errors that didn't appear on my host version of clippy.
Some checks failed
clippy Build clippy has succeeded
rust-foreign-document-test Build rust-foreign-document-test has failed
rust-build Build rust-build has succeeded
rust-test Build rust-test has succeeded
2023-10-16 19:43:15 -04:00
Tom Alexander
e4d9c5f467 Add makefile command to run clippy through docker. 2023-10-16 19:38:45 -04:00
Tom Alexander
d8e3a85ef7 We need to add dependencies so we are now building a container. 2023-10-16 19:34:53 -04:00
Tom Alexander
464685b52b Use a cargo cache for the clippy CI job.
Some checks failed
clippy Build clippy has failed
rust-foreign-document-test Build rust-foreign-document-test has failed
rust-build Build rust-build has succeeded
rust-test Build rust-test has succeeded
2023-10-16 19:27:15 -04:00
Tom Alexander
5fed4e80a7 Add a CI job to run clippy for every push. 2023-10-16 19:22:59 -04:00
Tom Alexander
e53140426f Merge branch 'clippy'
Some checks failed
rustfmt Build rustfmt has succeeded
rust-foreign-document-test Build rust-foreign-document-test has failed
rust-build Build rust-build has succeeded
rust-test Build rust-test has succeeded
2023-10-16 19:14:19 -04:00
Tom Alexander
9a4d290cf8 Apply more suggestions. 2023-10-16 19:12:25 -04:00
Tom Alexander
acd24d6198 Apply more suggestions. 2023-10-16 19:02:34 -04:00
Tom Alexander
880b00ef3f Apply more suggestions. 2023-10-16 18:54:41 -04:00
Tom Alexander
3069711447 Apply more suggestions. 2023-10-16 18:29:21 -04:00
Tom Alexander
4b6c717812 Apply more suggestions. 2023-10-16 17:58:52 -04:00
Tom Alexander
1d329cc310 Apply more suggestions. 2023-10-16 17:28:28 -04:00
Tom Alexander
b4f9a3b9b6 Apply more suggestions. 2023-10-16 17:14:44 -04:00
Tom Alexander
2dd5246506 Apply more suggestions. 2023-10-16 17:03:39 -04:00
Tom Alexander
4ba0e3611b Apply more suggestions. 2023-10-16 17:03:39 -04:00
Tom Alexander
728f79b86c Apply more suggestions. 2023-10-16 17:03:39 -04:00
Tom Alexander
192a4a2891 Remove unnecessary lifetimes. 2023-10-16 17:03:39 -04:00
Tom Alexander
fafd85fb30 Apply some clippy fixes. 2023-10-16 17:03:39 -04:00
Tom Alexander
1c23065329 Add a clippy command to the makefile. 2023-10-16 17:03:39 -04:00
Tom Alexander
ed105b04ad Merge branch 'cargo_bench' 2023-10-16 16:03:56 -04:00
Tom Alexander
f10efec21d No performance change switching affiliated_key to using element macro. 2023-10-16 15:57:18 -04:00
Tom Alexander
72b4cf8e71 Add the first use of the rust benchmark tests. 2023-10-16 15:50:08 -04:00
Tom Alexander
547fc40dbe No measurable performance improvement with native builds over LTO release builds.
Some checks failed
rustfmt Build rustfmt has succeeded
rust-foreign-document-test Build rust-foreign-document-test has failed
rust-build Build rust-build has succeeded
rust-test Build rust-test has succeeded
Leaving the code for this commented out because it involved an unstable cargo feature without showing any benefit. I would like to revisit this later.
2023-10-16 15:21:36 -04:00
Tom Alexander
9f1671658d Merge branch 'object_parser_perf'
Some checks failed
rustfmt Build rustfmt has succeeded
rust-foreign-document-test Build rust-foreign-document-test has failed
rust-build Build rust-build has succeeded
rust-test Build rust-test has succeeded
2023-10-16 15:04:04 -04:00
Tom Alexander
18d0676fad Clean up. 2023-10-16 15:03:23 -04:00
Tom Alexander
7833a58461 Apply a similar optimization to the detect element parser but also unify detection of affiliated keywords. 2023-10-16 14:55:40 -04:00
Tom Alexander
0020d71089 Extend that optimization to more object parsers. 2023-10-16 14:41:12 -04:00
Tom Alexander
cfdf39d1fa Significantly reduce the use of closures in the object parsers. 2023-10-16 14:25:02 -04:00
Tom Alexander
26f1eae9a1 Merge branch 'planning_before_property_drawer'
Some checks failed
rustfmt Build rustfmt has succeeded
rust-foreign-document-test Build rust-foreign-document-test has failed
rust-build Build rust-build has succeeded
rust-test Build rust-test has succeeded
2023-10-16 13:48:03 -04:00
Tom Alexander
3eff85059a Add support for planning before property drawer when calculating additional properties for headlines. 2023-10-16 13:35:03 -04:00
Tom Alexander
d2d0e9e5dd Merge branch 'optval_affiliated_keywords'
Some checks failed
rustfmt Build rustfmt has succeeded
rust-foreign-document-test Build rust-foreign-document-test has failed
rust-build Build rust-build has succeeded
rust-test Build rust-test has succeeded
2023-10-16 13:16:44 -04:00
Tom Alexander
c86d1000c0 Do not clear values in lists of strings.
Some checks failed
rust-foreign-document-test Build rust-foreign-document-test has failed
rust-build Build rust-build has succeeded
rust-test Build rust-test has succeeded
This is a hold-over from when I had list of single string which was a misunderstanding of the optional pair type.
2023-10-16 12:58:20 -04:00
Tom Alexander
911634cb42 Attr_ affiliated keywords should be lists of strings. 2023-10-16 12:55:18 -04:00
Tom Alexander
0aa746fb1e Implement comparison for object tree. 2023-10-16 12:50:53 -04:00
Tom Alexander
33800c4a88 Implement comparison for optional pair. 2023-10-16 12:05:36 -04:00
Tom Alexander
909ccadfa1 Beginning update to compare_affiliated_keywords. 2023-10-16 11:45:54 -04:00
Tom Alexander
e352deb989 Update parse_affiliated_keywords for handling optional pairs. 2023-10-16 11:42:20 -04:00
Tom Alexander
f5a6a26c43 Disable the existing handling of affiliated keywords. 2023-10-15 20:31:14 -04:00
Tom Alexander
dd7184da54 Add analysis from test. 2023-10-15 20:22:46 -04:00
Tom Alexander
1168ddb1fe Start an investigation into affiliated keyword behavior. 2023-10-15 17:38:56 -04:00
Tom Alexander
77ab636e6a Merge branch 'unify_keyword_constants' 2023-10-15 15:59:21 -04:00
Tom Alexander
f5dcacc79d Do not match keyword name if a longer keyword name would match. 2023-10-15 15:55:19 -04:00
Tom Alexander
e7c3c7aab6 Switch the keyword parsers over to using the settings from GlobalSettings. 2023-10-15 15:17:08 -04:00
Tom Alexander
7603b0a1cc Add a test showing we are not handling optval properly. 2023-10-15 15:16:23 -04:00
Tom Alexander
dea3721b1c Fix reporting errors in tests.
Some checks failed
rustfmt Build rustfmt has succeeded
rust-build Build rust-build has succeeded
rust-foreign-document-test Build rust-foreign-document-test has failed
rust-test Build rust-test has succeeded
2023-10-15 15:16:06 -04:00
79 changed files with 1424 additions and 955 deletions

View File

@@ -0,0 +1,191 @@
apiVersion: tekton.dev/v1beta1
kind: PipelineRun
metadata:
name: clippy
spec:
pipelineSpec:
params:
- name: image-name
description: The name for the built image
type: string
- name: path-to-image-context
description: The path to the build context
type: string
- name: path-to-dockerfile
description: The path to the Dockerfile
type: string
- name: GIT_USER_NAME
description: The username for git
type: string
default: "fluxcdbot"
- name: GIT_USER_EMAIL
description: The email for git
type: string
default: "fluxcdbot@users.noreply.github.com"
tasks:
- name: do-stuff
taskSpec:
metadata: {}
stepTemplate:
image: alpine:3.18
name: ""
resources:
requests:
cpu: 10m
memory: 600Mi
workingDir: /workspace/source
steps:
- image: alpine:3.18
name: do-stuff-step
script: |
#!/usr/bin/env sh
echo "hello world"
- name: report-pending
taskRef:
name: gitea-set-status
runAfter:
- fetch-repository
params:
- name: CONTEXT
value: "$(params.JOB_NAME)"
- name: REPO_FULL_NAME
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
- name: GITEA_HOST_URL
value: code.fizz.buzz
- name: SHA
value: "$(tasks.fetch-repository.results.commit)"
- name: DESCRIPTION
value: "Build $(params.JOB_NAME) has started"
- name: STATE
value: pending
- name: TARGET_URL
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
- name: fetch-repository
taskRef:
name: git-clone
workspaces:
- name: output
workspace: git-source
params:
- name: url
value: $(params.REPO_URL)
- name: revision
value: $(params.PULL_BASE_SHA)
- name: deleteExisting
value: "true"
- name: build-image
taskRef:
name: kaniko
params:
- name: IMAGE
value: "$(params.image-name):$(tasks.fetch-repository.results.commit)"
- name: CONTEXT
value: $(params.path-to-image-context)
- name: DOCKERFILE
value: $(params.path-to-dockerfile)
- name: BUILDER_IMAGE
value: "gcr.io/kaniko-project/executor:v1.12.1"
- name: EXTRA_ARGS
value:
- --cache=true
- --cache-copy-layers
- --cache-repo=harbor.fizz.buzz/kanikocache/cache
- --use-new-run # Should result in a speed-up
- --reproducible # To remove timestamps so layer caching works.
- --snapshot-mode=redo
- --skip-unused-stages=true
- --registry-mirror=dockerhub.dockerhub.svc.cluster.local
workspaces:
- name: source
workspace: git-source
- name: dockerconfig
workspace: docker-credentials
runAfter:
- fetch-repository
- name: clippy
taskRef:
name: run-docker-image
workspaces:
- name: source
workspace: git-source
- name: cargo-cache
workspace: cargo-cache
runAfter:
- build-image
params:
- name: docker-image
value: "$(params.image-name):$(tasks.fetch-repository.results.commit)"
finally:
- name: report-success
when:
- input: "$(tasks.status)"
operator: in
values: ["Succeeded", "Completed"]
taskRef:
name: gitea-set-status
params:
- name: CONTEXT
value: "$(params.JOB_NAME)"
- name: REPO_FULL_NAME
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
- name: GITEA_HOST_URL
value: code.fizz.buzz
- name: SHA
value: "$(tasks.fetch-repository.results.commit)"
- name: DESCRIPTION
value: "Build $(params.JOB_NAME) has succeeded"
- name: STATE
value: success
- name: TARGET_URL
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
- name: report-failure
when:
- input: "$(tasks.status)"
operator: in
values: ["Failed"]
taskRef:
name: gitea-set-status
params:
- name: CONTEXT
value: "$(params.JOB_NAME)"
- name: REPO_FULL_NAME
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
- name: GITEA_HOST_URL
value: code.fizz.buzz
- name: SHA
value: "$(tasks.fetch-repository.results.commit)"
- name: DESCRIPTION
value: "Build $(params.JOB_NAME) has failed"
- name: STATE
value: failure
- name: TARGET_URL
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
workspaces:
- name: git-source
- name: docker-credentials
workspaces:
- name: git-source
volumeClaimTemplate:
spec:
storageClassName: "nfs-client"
accessModes:
- ReadWriteOnce
resources:
requests:
storage: 10Gi
subPath: rust-source
- name: cargo-cache
persistentVolumeClaim:
claimName: organic-cargo-cache-clippy
- name: docker-credentials
secret:
secretName: harbor-plain
serviceAccountName: build-bot
timeout: 240h0m0s
params:
- name: image-name
value: "harbor.fizz.buzz/private/organic-clippy"
- name: path-to-image-context
value: docker/organic_clippy/
- name: path-to-dockerfile
value: docker/organic_clippy/Dockerfile

View File

@@ -30,3 +30,10 @@ spec:
skip_branches: skip_branches:
# We already run on every commit, so running when the semver tags get pushed is causing needless double-processing. # We already run on every commit, so running when the semver tags get pushed is causing needless double-processing.
- "^v[0-9]+\\.[0-9]+\\.[0-9]+$" - "^v[0-9]+\\.[0-9]+\\.[0-9]+$"
- name: clippy
source: "pipeline-clippy.yaml"
# Override https-based url from lighthouse events.
clone_uri: "git@code.fizz.buzz:talexander/organic.git"
skip_branches:
# We already run on every commit, so running when the semver tags get pushed is causing needless double-processing.
- "^v[0-9]+\\.[0-9]+\\.[0-9]+$"

View File

@@ -1,6 +1,8 @@
# cargo-features = ["profile-rustflags"]
[package] [package]
name = "organic" name = "organic"
version = "0.1.10" version = "0.1.11"
authors = ["Tom Alexander <tom@fizz.buzz>"] authors = ["Tom Alexander <tom@fizz.buzz>"]
description = "An org-mode parser." description = "An org-mode parser."
edition = "2021" edition = "2021"
@@ -64,6 +66,13 @@ inherits = "release"
lto = true lto = true
strip = "symbols" strip = "symbols"
# Optimized build for local execution.
# [profile.native]
# inherits = "release"
# lto = true
# strip = "symbols"
# rustflags = ["-C", "target-cpu=native"]
# Profile for performance testing with the "perf" tool. Notably keeps debug enabled and does not strip symbols to make reading the perf output easier. # Profile for performance testing with the "perf" tool. Notably keeps debug enabled and does not strip symbols to make reading the perf output easier.
[profile.perf] [profile.perf]
inherits = "release" inherits = "release"

View File

@@ -37,6 +37,18 @@ clean:
format: format:
> $(MAKE) -C docker/cargo_fmt run > $(MAKE) -C docker/cargo_fmt run
.PHONY: dockerclippy
dockerclippy:
> $(MAKE) -C docker/organic_clippy run
.PHONY: clippy
clippy:
> cargo clippy --no-deps --all-targets --all-features -- -D warnings
.PHONY: clippyfix
clippyfix:
> cargo clippy --fix --lib -p organic --all-features
.PHONY: test .PHONY: test
test: test:
> cargo test --no-default-features --features compare --no-fail-fast --lib --test test_loader -- --test-threads $(TESTJOBS) > cargo test --no-default-features --features compare --no-fail-fast --lib --test test_loader -- --test-threads $(TESTJOBS)

View File

@@ -14,7 +14,7 @@ use walkdir::WalkDir;
fn main() { fn main() {
let out_dir = env::var("OUT_DIR").unwrap(); let out_dir = env::var("OUT_DIR").unwrap();
let destination = Path::new(&out_dir).join("tests.rs"); let destination = Path::new(&out_dir).join("tests.rs");
let mut test_file = File::create(&destination).unwrap(); let mut test_file = File::create(destination).unwrap();
// Re-generate the tests if any org-mode files change // Re-generate the tests if any org-mode files change
println!("cargo:rerun-if-changed=org_mode_samples"); println!("cargo:rerun-if-changed=org_mode_samples");
@@ -51,7 +51,7 @@ fn write_test(test_file: &mut File, test: &walkdir::DirEntry) {
.to_lowercase() .to_lowercase()
.strip_suffix(".org") .strip_suffix(".org")
.expect("Should have .org extension") .expect("Should have .org extension")
.replace("/", "_"); .replace('/', "_");
write!( write!(
test_file, test_file,

View File

@@ -0,0 +1,5 @@
FROM rustlang/rust:nightly-alpine3.17
RUN apk add --no-cache musl-dev
ENTRYPOINT ["cargo", "clippy", "--no-deps", "--all-targets", "--all-features", "--", "-D", "warnings"]

View File

@@ -0,0 +1,37 @@
IMAGE_NAME:=organic-clippy
# REMOTE_REPO:=harbor.fizz.buzz/private
.PHONY: all
all: build push
.PHONY: build
build:
docker build -t $(IMAGE_NAME) -f Dockerfile .
.PHONY: push
push:
ifdef REMOTE_REPO
docker tag $(IMAGE_NAME) $(REMOTE_REPO)/$(IMAGE_NAME)
docker push $(REMOTE_REPO)/$(IMAGE_NAME)
else
@echo "REMOTE_REPO not defined, not pushing to a remote repo."
endif
.PHONY: clean
clean:
docker rmi $(IMAGE_NAME)
ifdef REMOTE_REPO
docker rmi $(REMOTE_REPO)/$(IMAGE_NAME)
else
@echo "REMOTE_REPO not defined, not removing from remote repo."
endif
docker volume rm cargo-cache
# NOTE: This target will write to folders underneath the git-root
.PHONY: run
run: build
docker run --rm --init --read-only --mount type=tmpfs,destination=/tmp -v "$$(readlink -f ../../):/source" --workdir=/source --mount source=cargo-cache,target=/usr/local/cargo/registry $(IMAGE_NAME)
.PHONY: shell
shell: build
docker run --rm -i -t --entrypoint /bin/sh --mount type=tmpfs,destination=/tmp -v "$$(readlink -f ../../):/source" --workdir=/source --mount source=cargo-cache,target=/usr/local/cargo/registry $(IMAGE_NAME)

View File

@@ -0,0 +1,42 @@
* Elisp Structure
| Keyword | Single | Double | Single Optval | Double Optval |
|---------+---------------+---------------+---------------+---------------|
| CAPTION | objtree | objtree | objtree | objtree |
| DATA | quoted(:name) | quoted(:name) | - | - |
| HEADER | list(quoted) | list(quoted) | - | - |
| NAME | quoted(:name) | quoted(:name) | - | - |
| PLOT | quoted(:plot) | quoted(:plot) | - | - |
| RESULTS | optional pair | optional pair | optional pair | optional pair |
* types
** objtree
Outer list: 1 per keyword
next list: first entry = list of objects for value. remaining entries = optval
** list(quoted)
List of quoted strings, 1 per keyword
** quoted(NAME)
Quoted string under the NAME property (for example quoted(:name))
** optional pair
When optval is supplied this is an alist with the field value being the real value and the 3nd value being the optval.
#+begin_src elisp
("*f*" . "*bar*")
#+end_src
When optval is not supplied this is a list containing a single string of the last occurrence of this keyword.
#+begin_src elisp
("*c*")
#+end_src
* Default settings
#+begin_src text
org-element-dual-keywords ("CAPTION" "RESULTS")
org-element-parsed-keywords ("CAPTION")
org-element-multiple-keywords ("CAPTION" "HEADER")
org-babel-results-keyword "RESULTS"
#+end_src
* Analysis
We don't have an example of a parsed non-dual keyword
Looks like multiple triggers list 1 per keyword
dual triggers support for optval
parsed triggers objects

View File

@@ -0,0 +1,31 @@
#!/usr/bin/env bash
#
set -euo pipefail
IFS=$'\n\t'
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
file_path="${DIR}/test_document.org"
for TARGET_VARIABLE in RESULTS CAPTION HEADER DATA NAME PLOT; do
INIT_SCRIPT=$(cat <<EOF
(progn
(erase-buffer)
(require 'org)
(defun org-table-align () t)
(setq vc-handled-backends nil)
(find-file "/input/${file_path}")
(org-mode)
(replace-regexp-in-region "foo" "${TARGET_VARIABLE}")
(message "%s" (pp-to-string (org-element-parse-buffer)))
)
EOF
)
docker run --init --rm -i --mount type=tmpfs,destination=/tmp -v "/:/input:ro" -w /input --entrypoint "" organic-test emacs -q --no-site-file --no-splash --batch --eval "$INIT_SCRIPT" 2> "${DIR}/${TARGET_VARIABLE}"
done
# exec docker run --init --rm -i -t --mount type=tmpfs,destination=/tmp -v "/:/input:ro" -w /input --entrypoint "" organic-test emacs -q --no-site-file --no-splash --eval "$INIT_SCRIPT"
# org-element-dual-keywords ("CAPTION" "RESULTS")
# org-element-parsed-keywords ("CAPTION")
# org-element-multiple-keywords ("CAPTION" "HEADER")
# org-babel-results-keyword "RESULTS"

View File

@@ -0,0 +1,25 @@
# Single instance
#+foo: *a*
#+begin_example
#+end_example
# Two instances
#+foo: *b*
#+foo: *c*
#+begin_example
#+end_example
# Single with optval
#+foo[*bar*]: *d*
#+begin_example
#+end_example
# Two with optval
#+foo[*bar*]: *e*
#+foo[*bar*]: *f*
#+begin_example
#+end_example

View File

@@ -0,0 +1,7 @@
** Foo
DEADLINE: <2023-10-16 Mon>
:PROPERTIES:
:foo: *a*
:Bar: *b*
:BAZ: *c*
:END:

View File

@@ -0,0 +1,16 @@
#+results[foo]: bar
#+results[lorem]: ipsum
#+begin_example
baz
#+end_example
#+caption[lorem]: ipsum
#+caption[foo]: bar
#+begin_example
baz
#+end_example
#+header[foo]: bar
#+begin_example
baz
#+end_example

View File

@@ -15,23 +15,21 @@ mod init_tracing;
#[cfg(not(feature = "tracing"))] #[cfg(not(feature = "tracing"))]
fn main() -> Result<(), Box<dyn std::error::Error>> { fn main() -> Result<(), Box<dyn std::error::Error>> {
let rt = tokio::runtime::Runtime::new()?; let rt = tokio::runtime::Runtime::new()?;
let result = rt.block_on(async { rt.block_on(async {
let main_body_result = main_body().await; let main_body_result = main_body().await;
main_body_result main_body_result
}); })
result
} }
#[cfg(feature = "tracing")] #[cfg(feature = "tracing")]
fn main() -> Result<(), Box<dyn std::error::Error>> { fn main() -> Result<(), Box<dyn std::error::Error>> {
let rt = tokio::runtime::Runtime::new()?; let rt = tokio::runtime::Runtime::new()?;
let result = rt.block_on(async { rt.block_on(async {
init_telemetry()?; init_telemetry()?;
let main_body_result = main_body().await; let main_body_result = main_body().await;
shutdown_telemetry()?; shutdown_telemetry()?;
main_body_result main_body_result
}); })
result
} }
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))] #[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]

View File

@@ -21,23 +21,21 @@ mod init_tracing;
#[cfg(not(feature = "tracing"))] #[cfg(not(feature = "tracing"))]
fn main() -> Result<ExitCode, Box<dyn std::error::Error>> { fn main() -> Result<ExitCode, Box<dyn std::error::Error>> {
let rt = tokio::runtime::Runtime::new()?; let rt = tokio::runtime::Runtime::new()?;
let result = rt.block_on(async { rt.block_on(async {
let main_body_result = main_body().await; let main_body_result = main_body().await;
main_body_result main_body_result
}); })
result
} }
#[cfg(feature = "tracing")] #[cfg(feature = "tracing")]
fn main() -> Result<ExitCode, Box<dyn std::error::Error>> { fn main() -> Result<ExitCode, Box<dyn std::error::Error>> {
let rt = tokio::runtime::Runtime::new()?; let rt = tokio::runtime::Runtime::new()?;
let result = rt.block_on(async { rt.block_on(async {
init_telemetry()?; init_telemetry()?;
let main_body_result = main_body().await; let main_body_result = main_body().await;
shutdown_telemetry()?; shutdown_telemetry()?;
main_body_result main_body_result
}); })
result
} }
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))] #[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
@@ -108,10 +106,9 @@ fn compare_howard_abrams() -> impl Iterator<Item = TestConfig> {
let layer = layer.chain(compare_group("clojure-yesql-xp", || { let layer = layer.chain(compare_group("clojure-yesql-xp", || {
compare_all_org_document("/foreign_documents/howardabrams/clojure-yesql-xp") compare_all_org_document("/foreign_documents/howardabrams/clojure-yesql-xp")
})); }));
let layer = layer.chain(compare_group("veep", || { layer.chain(compare_group("veep", || {
compare_all_org_document("/foreign_documents/howardabrams/veep") compare_all_org_document("/foreign_documents/howardabrams/veep")
})); }))
layer
} }
fn compare_group<N: Into<String>, F: Fn() -> I, I: Iterator<Item = TestConfig>>( fn compare_group<N: Into<String>, F: Fn() -> I, I: Iterator<Item = TestConfig>>(
@@ -195,7 +192,6 @@ struct ResultLayer {
#[derive(Debug)] #[derive(Debug)]
struct SingleFileResult { struct SingleFileResult {
name: String, name: String,
file_path: PathBuf,
status: TestStatus, status: TestStatus,
} }
@@ -225,7 +221,6 @@ impl SingleFile {
let result = silent_compare_on_file(&self.file_path).await; let result = silent_compare_on_file(&self.file_path).await;
Ok(SingleFileResult { Ok(SingleFileResult {
name: self.name, name: self.name,
file_path: self.file_path,
status: if let Ok(true) = result { status: if let Ok(true) = result {
TestStatus::Pass TestStatus::Pass
} else { } else {

View File

@@ -16,7 +16,6 @@ use super::util::get_property_unquoted_atom;
use crate::types::AstNode; use crate::types::AstNode;
use crate::types::CharOffsetInLine; use crate::types::CharOffsetInLine;
use crate::types::LineNumber; use crate::types::LineNumber;
use crate::types::Object;
use crate::types::RetainLabels; use crate::types::RetainLabels;
use crate::types::SwitchNumberLines; use crate::types::SwitchNumberLines;
@@ -57,11 +56,11 @@ impl<'b, 's> ComparePropertiesResult<'b, 's> {
/// Do no comparison. /// Do no comparison.
/// ///
/// This is for when you want to acknowledge that a field exists in the emacs token, but you do not have any validation for it when using the compare_properties!() macro. Ideally, this should be kept to a minimum since this represents untested values. /// This is for when you want to acknowledge that a field exists in the emacs token, but you do not have any validation for it when using the compare_properties!() macro. Ideally, this should be kept to a minimum since this represents untested values.
pub(crate) fn compare_noop<'b, 's, 'x, R, RG>( pub(crate) fn compare_noop<'b, 's, R, RG>(
_source: &'s str, _source: &'s str,
_emacs: &'b Token<'s>, _emacs: &'b Token<'s>,
_rust_node: R, _rust_node: R,
_emacs_field: &'x str, _emacs_field: &str,
_rust_value_getter: RG, _rust_value_getter: RG,
) -> Result<ComparePropertiesResult<'b, 's>, Box<dyn std::error::Error>> { ) -> Result<ComparePropertiesResult<'b, 's>, Box<dyn std::error::Error>> {
Ok(ComparePropertiesResult::NoChange) Ok(ComparePropertiesResult::NoChange)
@@ -70,18 +69,16 @@ pub(crate) fn compare_noop<'b, 's, 'x, R, RG>(
/// Do no comparison. /// Do no comparison.
/// ///
/// This is for when you want to acknowledge that a field exists in the emacs token, but you do not have any validation for it when using the compare_properties!() macro. Ideally, this should be kept to a minimum since this represents untested values. /// This is for when you want to acknowledge that a field exists in the emacs token, but you do not have any validation for it when using the compare_properties!() macro. Ideally, this should be kept to a minimum since this represents untested values.
pub(crate) fn compare_identity() -> () { pub(crate) fn compare_identity() {}
()
}
/// Assert that the emacs value is always nil or absent. /// Assert that the emacs value is always nil or absent.
/// ///
/// This is usually used for fields which, in my testing, are always nil. Using this compare function instead of simply doing a compare_noop will enable us to be alerted when we finally come across an org-mode document that has a value other than nil for the property. /// This is usually used for fields which, in my testing, are always nil. Using this compare function instead of simply doing a compare_noop will enable us to be alerted when we finally come across an org-mode document that has a value other than nil for the property.
pub(crate) fn compare_property_always_nil<'b, 's, 'x, R, RG>( pub(crate) fn compare_property_always_nil<'b, 's, R, RG>(
_source: &'s str, _source: &'s str,
emacs: &'b Token<'s>, emacs: &'b Token<'s>,
_rust_node: R, _rust_node: R,
emacs_field: &'x str, emacs_field: &str,
_rust_value_getter: RG, _rust_value_getter: RG,
) -> Result<ComparePropertiesResult<'b, 's>, Box<dyn std::error::Error>> { ) -> Result<ComparePropertiesResult<'b, 's>, Box<dyn std::error::Error>> {
let value = get_property(emacs, emacs_field)?; let value = get_property(emacs, emacs_field)?;
@@ -100,7 +97,6 @@ pub(crate) fn compare_property_always_nil<'b, 's, 'x, R, RG>(
pub(crate) fn compare_property_quoted_string< pub(crate) fn compare_property_quoted_string<
'b, 'b,
's, 's,
'x,
R, R,
RV: AsRef<str> + std::fmt::Debug, RV: AsRef<str> + std::fmt::Debug,
RG: Fn(R) -> Option<RV>, RG: Fn(R) -> Option<RV>,
@@ -108,12 +104,12 @@ pub(crate) fn compare_property_quoted_string<
_source: &'s str, _source: &'s str,
emacs: &'b Token<'s>, emacs: &'b Token<'s>,
rust_node: R, rust_node: R,
emacs_field: &'x str, emacs_field: &str,
rust_value_getter: RG, rust_value_getter: RG,
) -> Result<ComparePropertiesResult<'b, 's>, Box<dyn std::error::Error>> { ) -> Result<ComparePropertiesResult<'b, 's>, Box<dyn std::error::Error>> {
let value = get_property_quoted_string(emacs, emacs_field)?; let value = get_property_quoted_string(emacs, emacs_field)?;
let rust_value = rust_value_getter(rust_node); let rust_value = rust_value_getter(rust_node);
if rust_value.as_ref().map(|s| s.as_ref()) != value.as_ref().map(String::as_str) { if rust_value.as_ref().map(|s| s.as_ref()) != value.as_deref() {
let this_status = DiffStatus::Bad; let this_status = DiffStatus::Bad;
let message = Some(format!( let message = Some(format!(
"{} mismatch (emacs != rust) {:?} != {:?}", "{} mismatch (emacs != rust) {:?} != {:?}",
@@ -181,7 +177,6 @@ where
pub(crate) fn compare_property_list_of_quoted_string< pub(crate) fn compare_property_list_of_quoted_string<
'b, 'b,
's, 's,
'x,
R, R,
RV: AsRef<str> + std::fmt::Debug, RV: AsRef<str> + std::fmt::Debug,
RI: Iterator<Item = RV>, RI: Iterator<Item = RV>,
@@ -190,7 +185,7 @@ pub(crate) fn compare_property_list_of_quoted_string<
_source: &'s str, _source: &'s str,
emacs: &'b Token<'s>, emacs: &'b Token<'s>,
rust_node: R, rust_node: R,
emacs_field: &'x str, emacs_field: &str,
rust_value_getter: RG, rust_value_getter: RG,
) -> Result<ComparePropertiesResult<'b, 's>, Box<dyn std::error::Error>> { ) -> Result<ComparePropertiesResult<'b, 's>, Box<dyn std::error::Error>> {
let value = get_property(emacs, emacs_field)? let value = get_property(emacs, emacs_field)?
@@ -272,10 +267,7 @@ pub(crate) fn compare_property_set_of_quoted_string<
.map(unquote) .map(unquote)
.collect::<Result<Vec<_>, _>>()?; .collect::<Result<Vec<_>, _>>()?;
let value: BTreeSet<&str> = value.iter().map(|e| e.as_str()).collect(); let value: BTreeSet<&str> = value.iter().map(|e| e.as_str()).collect();
let mismatched: Vec<_> = value let mismatched: Vec<_> = value.symmetric_difference(&rust_value).copied().collect();
.symmetric_difference(&rust_value)
.map(|val| *val)
.collect();
if !mismatched.is_empty() { if !mismatched.is_empty() {
let this_status = DiffStatus::Bad; let this_status = DiffStatus::Bad;
let message = Some(format!( let message = Some(format!(
@@ -288,11 +280,111 @@ pub(crate) fn compare_property_set_of_quoted_string<
Ok(ComparePropertiesResult::NoChange) Ok(ComparePropertiesResult::NoChange)
} }
pub(crate) fn compare_property_boolean<'b, 's, 'x, R, RG: Fn(R) -> bool>( pub(crate) fn compare_property_optional_pair<
'b,
's,
R,
RV: AsRef<str> + std::fmt::Debug,
ROV: AsRef<str> + std::fmt::Debug,
RG: Fn(R) -> Option<(Option<ROV>, RV)>,
>(
_source: &'s str, _source: &'s str,
emacs: &'b Token<'s>, emacs: &'b Token<'s>,
rust_node: R, rust_node: R,
emacs_field: &'x str, emacs_field: &str,
rust_value_getter: RG,
) -> Result<ComparePropertiesResult<'b, 's>, Box<dyn std::error::Error>> {
let value = get_property(emacs, emacs_field)?
.map(Token::as_list)
.map_or(Ok(None), |r| r.map(Some))?;
let rust_value = rust_value_getter(rust_node);
match (value, &rust_value) {
(None, None) => {}
(None, Some(_)) | (Some(_), None) => {
let this_status = DiffStatus::Bad;
let message = Some(format!(
"{} mismatch (emacs != rust) {:?} != {:?}",
emacs_field, value, rust_value
));
return Ok(ComparePropertiesResult::SelfChange(this_status, message));
}
(Some(el), Some((Some(_), _))) if el.len() != 3 => {
let this_status = DiffStatus::Bad;
let message = Some(format!(
"{} mismatch (emacs != rust) {:?} != {:?}",
emacs_field, value, rust_value
));
return Ok(ComparePropertiesResult::SelfChange(this_status, message));
}
(Some(el), Some((None, _))) if el.len() != 1 => {
let this_status = DiffStatus::Bad;
let message = Some(format!(
"{} mismatch (emacs != rust) {:?} != {:?}",
emacs_field, value, rust_value
));
return Ok(ComparePropertiesResult::SelfChange(this_status, message));
}
(Some(el), Some((Some(orl), rl))) => {
let e = el
.first()
.map(Token::as_atom)
.map_or(Ok(None), |r| r.map(Some))?
.map(unquote)
.map_or(Ok(None), |r| r.map(Some))?
.expect("Above match proved length to be 3.");
let oe = el
.get(2)
.map(Token::as_atom)
.map_or(Ok(None), |r| r.map(Some))?
.map(unquote)
.map_or(Ok(None), |r| r.map(Some))?
.expect("Above match proved length to be 3.");
let r = rl.as_ref();
let or = orl.as_ref();
if e != r {
let this_status = DiffStatus::Bad;
let message = Some(format!(
"{} mismatch (emacs != rust) {:?} != {:?}. Full list: {:?} != {:?}",
emacs_field, e, r, value, rust_value
));
return Ok(ComparePropertiesResult::SelfChange(this_status, message));
}
if oe != or {
let this_status = DiffStatus::Bad;
let message = Some(format!(
"{} mismatch (emacs != rust) {:?} != {:?}. Full list: {:?} != {:?}",
emacs_field, e, r, value, rust_value
));
return Ok(ComparePropertiesResult::SelfChange(this_status, message));
}
}
(Some(el), Some((None, rl))) => {
let e = el
.first()
.map(Token::as_atom)
.map_or(Ok(None), |r| r.map(Some))?
.map(unquote)
.map_or(Ok(None), |r| r.map(Some))?
.expect("Above match proved length to be 1.");
let r = rl.as_ref();
if e != r {
let this_status = DiffStatus::Bad;
let message = Some(format!(
"{} mismatch (emacs != rust) {:?} != {:?}. Full list: {:?} != {:?}",
emacs_field, e, r, value, rust_value
));
return Ok(ComparePropertiesResult::SelfChange(this_status, message));
}
}
}
Ok(ComparePropertiesResult::NoChange)
}
pub(crate) fn compare_property_boolean<'b, 's, R, RG: Fn(R) -> bool>(
_source: &'s str,
emacs: &'b Token<'s>,
rust_node: R,
emacs_field: &str,
rust_value_getter: RG, rust_value_getter: RG,
) -> Result<ComparePropertiesResult<'b, 's>, Box<dyn std::error::Error>> { ) -> Result<ComparePropertiesResult<'b, 's>, Box<dyn std::error::Error>> {
// get_property already converts nil to None. // get_property already converts nil to None.
@@ -376,14 +468,7 @@ where
match (value, rust_value) { match (value, rust_value) {
(None, None) => {} (None, None) => {}
(Some(el), None) (Some(el), None)
if el.len() == 1 if el.len() == 1 && el.iter().all(|t| matches!(t.as_atom(), Ok(r#""""#))) => {}
&& el.into_iter().all(|t| {
if let Ok(r#""""#) = t.as_atom() {
true
} else {
false
}
}) => {}
(None, rv @ Some(_)) | (Some(_), rv @ None) => { (None, rv @ Some(_)) | (Some(_), rv @ None) => {
let this_status = DiffStatus::Bad; let this_status = DiffStatus::Bad;
let message = Some(format!( let message = Some(format!(
@@ -412,118 +497,133 @@ where
Ok(ComparePropertiesResult::NoChange) Ok(ComparePropertiesResult::NoChange)
} }
/// Special compare used for affiliate keywords that are parsed as objects. pub(crate) fn compare_property_object_tree<
///
/// Org-mode seems to store these as a 3-deep list:
/// - Outer list with 1 element per #+caption keyword (or other parsed keyword).
/// - Middle list which has:
/// - first element is a list of objects representing the value after the colon.
/// - every additional element is a list of objects from inside the square brackets (the optional value).
pub(crate) fn compare_property_list_of_list_of_list_of_ast_nodes<
'b, 'b,
's, 's,
'x, 'x,
R, R,
RG: Fn(R) -> Option<&'b Vec<(Option<Vec<Object<'s>>>, Vec<Object<'s>>)>>, RV: std::fmt::Debug + 'b,
ROV: std::fmt::Debug + 'b,
RI: Iterator<Item = &'b (Option<Vec<ROV>>, Vec<RV>)> + ExactSizeIterator + std::fmt::Debug,
RG: Fn(R) -> Option<RI>,
>( >(
source: &'s str, source: &'s str,
emacs: &'b Token<'s>, emacs: &'b Token<'s>,
rust_node: R, rust_node: R,
emacs_field: &'x str, emacs_field: &'x str,
rust_value_getter: RG, rust_value_getter: RG,
) -> Result<ComparePropertiesResult<'b, 's>, Box<dyn std::error::Error>> { ) -> Result<ComparePropertiesResult<'b, 's>, Box<dyn std::error::Error>>
// TODO: Replace Object<'s> with generics. I hard-coded Object in to make lifetimes easier. where
let rust_value = rust_value_getter(rust_node); AstNode<'b, 's>: From<&'b RV>,
AstNode<'b, 's>: From<&'b ROV>,
{
let value = get_property(emacs, emacs_field)? let value = get_property(emacs, emacs_field)?
.map(Token::as_list) .map(Token::as_list)
.map_or(Ok(None), |r| r.map(Some))?; .map_or(Ok(None), |r| r.map(Some))?;
let (value, rust_value) = match (value, rust_value) { let rust_value = rust_value_getter(rust_node);
let (outer_emacs_list, outer_rust_list) = match (value, rust_value) {
(None, None) => { (None, None) => {
return Ok(ComparePropertiesResult::NoChange); return Ok(ComparePropertiesResult::NoChange);
} }
(None, Some(_)) | (Some(_), None) => { (None, rv @ Some(_)) | (Some(_), rv @ None) => {
let this_status = DiffStatus::Bad; let this_status = DiffStatus::Bad;
let message = Some(format!( let message = Some(format!(
"{} mismatch (emacs != rust) {:?} != {:?}", "{} mismatch (emacs != rust) {:?} != {:?}",
emacs_field, value, rust_value emacs_field, value, rv
)); ));
return Ok(ComparePropertiesResult::SelfChange(this_status, message)); return Ok(ComparePropertiesResult::SelfChange(this_status, message));
} }
(Some(value), Some(rust_value)) if value.len() != rust_value.len() => { (Some(el), Some(rl)) if el.len() != rl.len() => {
let this_status = DiffStatus::Bad; let this_status = DiffStatus::Bad;
let message = Some(format!( let message = Some(format!(
"{} mismatch (emacs != rust) {:?} != {:?}", "{} mismatch (emacs != rust) {:?} != {:?}",
emacs_field, value, rust_value emacs_field, el, rl
)); ));
return Ok(ComparePropertiesResult::SelfChange(this_status, message)); return Ok(ComparePropertiesResult::SelfChange(this_status, message));
} }
(Some(value), Some(rust_value)) => (value, rust_value), (Some(el), Some(rl)) => (el, rl),
}; };
let mut full_status: Vec<DiffEntry<'b, 's>> = Vec::with_capacity(outer_rust_list.len());
let mut full_status: Vec<DiffEntry<'b, 's>> = Vec::with_capacity(rust_value.len()); for (kw_e, kw_r) in outer_emacs_list.iter().zip(outer_rust_list) {
let kw_e = kw_e.as_list()?;
// Iterate the outer lists let child_status_length = kw_r.1.len() + kw_r.0.as_ref().map(|opt| opt.len()).unwrap_or(0);
for (value, (rust_optional, rust_value)) in value.iter().zip(rust_value.iter()) { let mut child_status: Vec<DiffEntry<'b, 's>> = Vec::with_capacity(child_status_length);
let mut middle_value = value.as_list()?.iter(); if let Some(or) = &kw_r.0 {
// First element of middle list is the mandatory value (the value past the colon). // if optional value
let mandatory_value = middle_value.next(); let mut kw_e = kw_e.iter();
let mandatory_value = match mandatory_value { // First element is a list representing the mandatory value.
Some(mandatory_value) => mandatory_value, if let Some(val_e) = kw_e.next() {
None => { let el = val_e.as_list()?;
if el.len() != kw_r.1.len() {
let this_status = DiffStatus::Bad;
let message = Some(format!(
"{} mismatch (emacs != rust) {:?} != {:?}",
emacs_field, kw_e, kw_r
));
return Ok(ComparePropertiesResult::SelfChange(this_status, message));
}
for (e, r) in el.iter().zip(kw_r.1.iter()) {
child_status.push(compare_ast_node(source, e, r.into())?);
}
} else {
let this_status = DiffStatus::Bad; let this_status = DiffStatus::Bad;
let message = Some(format!( let message = Some(format!(
"{} mismatch (emacs != rust) {:?} != {:?}", "{} mismatch (emacs != rust) {:?} != {:?}",
emacs_field, value, rust_value emacs_field, kw_e, kw_r
)); ));
return Ok(ComparePropertiesResult::SelfChange(this_status, message)); return Ok(ComparePropertiesResult::SelfChange(this_status, message));
} }
}; // Remaining elements are the optional value.
if kw_e.len() != or.len() {
// Compare optional value
if let Some(rust_optional) = rust_optional {
let mut child_status: Vec<DiffEntry<'b, 's>> = Vec::with_capacity(rust_value.len());
if rust_optional.len() != middle_value.len() {
let this_status = DiffStatus::Bad; let this_status = DiffStatus::Bad;
let message = Some(format!( let message = Some(format!(
"{} optional value length mismatch (emacs != rust) {} != {} | {:?}", "{} mismatch (emacs != rust) {:?} != {:?}",
emacs_field, emacs_field, kw_e, kw_r
middle_value.len(),
rust_optional.len(),
rust_optional
)); ));
return Ok(ComparePropertiesResult::SelfChange(this_status, message)); return Ok(ComparePropertiesResult::SelfChange(this_status, message));
} }
for (e, r) in middle_value.zip(rust_optional) { for (e, r) in kw_e.zip(or.iter()) {
child_status.push(compare_ast_node(source, e, r.into())?); child_status.push(compare_ast_node(source, e, r.into())?);
} }
if !child_status.is_empty() { } else {
let diff_scope = artificial_diff_scope("optional value", child_status)?; // if no optional value
full_status.push(diff_scope); if !kw_e.len() == 1 {
let this_status = DiffStatus::Bad;
let message = Some(format!(
"{} mismatch (emacs != rust) {:?} != {:?}",
emacs_field, kw_e, kw_r
));
return Ok(ComparePropertiesResult::SelfChange(this_status, message));
} }
}
// Compare mandatory value let e = kw_e
let mut child_status: Vec<DiffEntry<'b, 's>> = Vec::with_capacity(rust_value.len()); .first()
let mandatory_value = mandatory_value.as_list()?; .map(Token::as_list)
if rust_value.len() != mandatory_value.len() { .map_or(Ok(None), |r| r.map(Some))?
let this_status = DiffStatus::Bad; .expect("The above if-statement proves this will be Some.")
let message = Some(format!( .iter();
"{} mandatory value length mismatch (emacs != rust) {} != {} | {:?}", let r = kw_r.1.iter();
emacs_field,
mandatory_value.len(), if e.len() != r.len() {
rust_value.len(), let this_status = DiffStatus::Bad;
rust_value let message = Some(format!(
)); "{} mismatch (emacs != rust) {:?} != {:?}",
return Ok(ComparePropertiesResult::SelfChange(this_status, message)); emacs_field, kw_e, kw_r
} ));
for (e, r) in mandatory_value.iter().zip(rust_value) { return Ok(ComparePropertiesResult::SelfChange(this_status, message));
child_status.push(compare_ast_node(source, e, r.into())?); }
for (e, r) in e.zip(r) {
child_status.push(compare_ast_node(source, e, r.into())?);
}
} }
if !child_status.is_empty() { if !child_status.is_empty() {
let diff_scope = artificial_diff_scope("mandatory value", child_status)?; let diff_scope = artificial_diff_scope("mandatory value", child_status)?;
full_status.push(diff_scope); full_status.push(diff_scope);
} }
} }
if full_status.is_empty() { if full_status.is_empty() {
Ok(ComparePropertiesResult::NoChange) Ok(ComparePropertiesResult::NoChange)
} else { } else {

View File

@@ -227,7 +227,7 @@ impl<'b, 's> DiffResult<'b, 's> {
status_text = status_text, status_text = status_text,
name = self.name, name = self.name,
char_offset = preceding_text.chars().count() + 1, char_offset = preceding_text.chars().count() + 1,
message = self.message.as_ref().map(|m| m.as_str()).unwrap_or("") message = self.message.as_deref().unwrap_or("")
); );
for child in self.children.iter() { for child in self.children.iter() {
child.print_indented(indentation + 1, original_document)?; child.print_indented(indentation + 1, original_document)?;
@@ -330,8 +330,8 @@ pub(crate) fn artificial_diff_scope<'b, 's>(
.into()) .into())
} }
pub(crate) fn artificial_owned_diff_scope<'b, 's, 'x>( pub(crate) fn artificial_owned_diff_scope<'b, 's>(
name: &'x str, name: &str,
children: Vec<DiffEntry<'b, 's>>, children: Vec<DiffEntry<'b, 's>>,
) -> Result<DiffEntry<'b, 's>, Box<dyn std::error::Error>> { ) -> Result<DiffEntry<'b, 's>, Box<dyn std::error::Error>> {
Ok(DiffLayer { Ok(DiffLayer {
@@ -426,14 +426,9 @@ pub(crate) fn compare_ast_node<'b, 's>(
// PlainText is a special case because upstream Org-Mode uses relative values for the bounds in plaintext rather than absolute so the below checks do not account for that. // PlainText is a special case because upstream Org-Mode uses relative values for the bounds in plaintext rather than absolute so the below checks do not account for that.
if let AstNode::PlainText(_) = rust { if let AstNode::PlainText(_) = rust {
} else { } else if let Err(err) = compare_standard_properties(source, emacs, &rust) {
match compare_standard_properties(source, emacs, &rust) { compare_result.status = DiffStatus::Bad;
Err(err) => { compare_result.message = Some(err.to_string())
compare_result.status = DiffStatus::Bad;
compare_result.message = Some(err.to_string())
}
Ok(_) => {}
}
} }
Ok(compare_result.into()) Ok(compare_result.into())
@@ -495,7 +490,7 @@ fn _compare_document<'b, 's>(
.map(EmacsField::Required), .map(EmacsField::Required),
( (
EmacsField::Required(":path"), EmacsField::Required(":path"),
|r| r.path.as_ref().map(|p| p.to_str()).flatten(), |r| r.path.as_ref().and_then(|p| p.to_str()),
compare_property_quoted_string compare_property_quoted_string
), ),
( (

View File

@@ -1,3 +1,4 @@
#[allow(clippy::module_inception)]
mod compare; mod compare;
mod compare_field; mod compare_field;
mod diff; mod diff;

View File

@@ -113,24 +113,21 @@ fn is_slice_of(parent: &str, child: &str) -> bool {
/// Get a slice of the string that was consumed in a parser using the original input to the parser and the remaining input after the parser. /// Get a slice of the string that was consumed in a parser using the original input to the parser and the remaining input after the parser.
fn get_consumed<'s>(input: &'s str, remaining: &'s str) -> &'s str { fn get_consumed<'s>(input: &'s str, remaining: &'s str) -> &'s str {
debug_assert!(is_slice_of(input, remaining)); debug_assert!(is_slice_of(input, remaining));
let source = { let offset = remaining.as_ptr() as usize - input.as_ptr() as usize;
let offset = remaining.as_ptr() as usize - input.as_ptr() as usize; &input[..offset]
&input[..offset]
};
source.into()
} }
pub(crate) fn unquote(text: &str) -> Result<String, Box<dyn std::error::Error>> { pub(crate) fn unquote(text: &str) -> Result<String, Box<dyn std::error::Error>> {
let mut out: Vec<u8> = Vec::with_capacity(text.len()); let mut out: Vec<u8> = Vec::with_capacity(text.len());
if !text.starts_with(r#"""#) { if !text.starts_with('"') {
return Err("Quoted text does not start with quote.".into()); return Err("Quoted text does not start with quote.".into());
} }
if !text.ends_with(r#"""#) { if !text.ends_with('"') {
return Err("Quoted text does not end with quote.".into()); return Err("Quoted text does not end with quote.".into());
} }
let interior_text = &text[1..(text.len() - 1)]; let interior_text = &text[1..(text.len() - 1)];
let mut state = ParseState::Normal; let mut state = ParseState::Normal;
for current_char in interior_text.bytes().into_iter() { for current_char in interior_text.bytes() {
// Check to see if octal finished // Check to see if octal finished
state = match (state, current_char) { state = match (state, current_char) {
(ParseState::Octal(octal), b'0'..=b'7') if octal.len() < MAX_OCTAL_LENGTH => { (ParseState::Octal(octal), b'0'..=b'7') if octal.len() < MAX_OCTAL_LENGTH => {
@@ -229,11 +226,9 @@ fn atom<'s>(input: &'s str) -> Res<&'s str, Token<'s>> {
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))] #[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
fn unquoted_atom<'s>(input: &'s str) -> Res<&'s str, Token<'s>> { fn unquoted_atom<'s>(input: &'s str) -> Res<&'s str, Token<'s>> {
let (remaining, body) = take_till1(|c| match c { let (remaining, body) =
' ' | '\t' | '\r' | '\n' | ')' | ']' => true, take_till1(|c| matches!(c, ' ' | '\t' | '\r' | '\n' | ')' | ']'))(input)?;
_ => false, Ok((remaining, Token::Atom(body)))
})(input)?;
Ok((remaining, Token::Atom(body.into())))
} }
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))] #[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
@@ -264,22 +259,19 @@ fn quoted_atom<'s>(input: &'s str) -> Res<&'s str, Token<'s>> {
} }
let (remaining, _) = tag(r#"""#)(remaining)?; let (remaining, _) = tag(r#"""#)(remaining)?;
let source = get_consumed(input, remaining); let source = get_consumed(input, remaining);
Ok((remaining, Token::Atom(source.into()))) Ok((remaining, Token::Atom(source)))
} }
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))] #[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
fn hash_notation<'s>(input: &'s str) -> Res<&'s str, Token<'s>> { fn hash_notation<'s>(input: &'s str) -> Res<&'s str, Token<'s>> {
let (remaining, _) = tag("#<")(input)?; let (remaining, _) = tag("#<")(input)?;
let (remaining, _body) = take_till1(|c| match c { let (remaining, _body) = take_till1(|c| matches!(c, '>'))(remaining)?;
'>' => true,
_ => false,
})(remaining)?;
let (remaining, _) = tag(">")(remaining)?; let (remaining, _) = tag(">")(remaining)?;
let source = get_consumed(input, remaining); let source = get_consumed(input, remaining);
Ok((remaining, Token::Atom(source.into()))) Ok((remaining, Token::Atom(source)))
} }
fn text_with_properties<'s>(input: &'s str) -> Res<&'s str, Token<'s>> { fn text_with_properties(input: &str) -> Res<&str, Token<'_>> {
let (remaining, _) = tag("#(")(input)?; let (remaining, _) = tag("#(")(input)?;
let (remaining, (text, props)) = delimited( let (remaining, (text, props)) = delimited(
multispace0, multispace0,
@@ -348,10 +340,7 @@ mod tests {
let input = r#" (foo "b(a)r" baz ) "#; let input = r#" (foo "b(a)r" baz ) "#;
let (remaining, parsed) = sexp(input).expect("Parse the input"); let (remaining, parsed) = sexp(input).expect("Parse the input");
assert_eq!(remaining, ""); assert_eq!(remaining, "");
assert!(match parsed { assert!(matches!(parsed, Token::List(_)));
Token::List(_) => true,
_ => false,
});
let children = match parsed { let children = match parsed {
Token::List(children) => children, Token::List(children) => children,
_ => panic!("Should be a list."), _ => panic!("Should be a list."),
@@ -364,14 +353,14 @@ mod tests {
r#"foo"# r#"foo"#
); );
assert_eq!( assert_eq!(
match children.iter().nth(1) { match children.get(1) {
Some(Token::Atom(body)) => *body, Some(Token::Atom(body)) => *body,
_ => panic!("Second child should be an atom."), _ => panic!("Second child should be an atom."),
}, },
r#""b(a)r""# r#""b(a)r""#
); );
assert_eq!( assert_eq!(
match children.iter().nth(2) { match children.get(2) {
Some(Token::Atom(body)) => *body, Some(Token::Atom(body)) => *body,
_ => panic!("Third child should be an atom."), _ => panic!("Third child should be an atom."),
}, },

View File

@@ -1,7 +1,8 @@
use std::str::FromStr; use std::str::FromStr;
use super::compare_field::compare_property_list_of_list_of_list_of_ast_nodes;
use super::compare_field::compare_property_list_of_quoted_string; use super::compare_field::compare_property_list_of_quoted_string;
use super::compare_field::compare_property_object_tree;
use super::compare_field::compare_property_optional_pair;
use super::compare_field::compare_property_quoted_string; use super::compare_field::compare_property_quoted_string;
use super::compare_field::ComparePropertiesResult; use super::compare_field::ComparePropertiesResult;
use super::diff::DiffEntry; use super::diff::DiffEntry;
@@ -53,8 +54,8 @@ pub(crate) fn compare_standard_properties<
Ok(()) Ok(())
} }
pub(crate) fn assert_name<'b, 's, S: AsRef<str>>( pub(crate) fn assert_name<S: AsRef<str>>(
emacs: &'b Token<'s>, emacs: &Token<'_>,
name: S, name: S,
) -> Result<(), Box<dyn std::error::Error>> { ) -> Result<(), Box<dyn std::error::Error>> {
let name = name.as_ref(); let name = name.as_ref();
@@ -89,9 +90,9 @@ pub(crate) fn assert_bounds<'b, 's, S: StandardProperties<'s> + ?Sized>(
standard_properties.end.ok_or("Token should have an end.")?, standard_properties.end.ok_or("Token should have an end.")?,
); );
let (rust_begin, rust_end) = get_rust_byte_offsets(original_document, rust); // 0-based let (rust_begin, rust_end) = get_rust_byte_offsets(original_document, rust); // 0-based
let rust_begin_char_offset = (&original_document[..rust_begin]).chars().count() + 1; // 1-based let rust_begin_char_offset = original_document[..rust_begin].chars().count() + 1; // 1-based
let rust_end_char_offset = let rust_end_char_offset =
rust_begin_char_offset + (&original_document[rust_begin..rust_end]).chars().count(); // 1-based rust_begin_char_offset + original_document[rust_begin..rust_end].chars().count(); // 1-based
if rust_begin_char_offset != begin || rust_end_char_offset != end { if rust_begin_char_offset != begin || rust_end_char_offset != end {
Err(format!("Rust bounds (in chars) ({rust_begin}, {rust_end}) do not match emacs bounds ({emacs_begin}, {emacs_end})", rust_begin = rust_begin_char_offset, rust_end = rust_end_char_offset, emacs_begin=begin, emacs_end=end))?; Err(format!("Rust bounds (in chars) ({rust_begin}, {rust_end}) do not match emacs bounds ({emacs_begin}, {emacs_end})", rust_begin = rust_begin_char_offset, rust_end = rust_end_char_offset, emacs_begin=begin, emacs_end=end))?;
} }
@@ -112,21 +113,18 @@ struct EmacsStandardProperties {
post_blank: Option<usize>, post_blank: Option<usize>,
} }
fn get_emacs_standard_properties<'b, 's>( fn get_emacs_standard_properties(
emacs: &'b Token<'s>, emacs: &Token<'_>,
) -> Result<EmacsStandardProperties, Box<dyn std::error::Error>> { ) -> Result<EmacsStandardProperties, Box<dyn std::error::Error>> {
let children = emacs.as_list()?; let children = emacs.as_list()?;
let attributes_child = children let attributes_child = children.get(1).ok_or("Should have an attributes child.")?;
.iter()
.nth(1)
.ok_or("Should have an attributes child.")?;
let attributes_map = attributes_child.as_map()?; let attributes_map = attributes_child.as_map()?;
let standard_properties = attributes_map.get(":standard-properties"); let standard_properties = attributes_map.get(":standard-properties");
Ok(if standard_properties.is_some() { Ok(if standard_properties.is_some() {
let mut std_props = standard_properties let mut std_props = standard_properties
.expect("if statement proves its Some") .expect("if statement proves its Some")
.as_vector()? .as_vector()?
.into_iter(); .iter();
let begin = maybe_token_to_usize(std_props.next())?; let begin = maybe_token_to_usize(std_props.next())?;
let post_affiliated = maybe_token_to_usize(std_props.next())?; let post_affiliated = maybe_token_to_usize(std_props.next())?;
let contents_begin = maybe_token_to_usize(std_props.next())?; let contents_begin = maybe_token_to_usize(std_props.next())?;
@@ -142,16 +140,13 @@ fn get_emacs_standard_properties<'b, 's>(
post_blank, post_blank,
} }
} else { } else {
let begin = maybe_token_to_usize(attributes_map.get(":begin").map(|token| *token))?; let begin = maybe_token_to_usize(attributes_map.get(":begin").copied())?;
let end = maybe_token_to_usize(attributes_map.get(":end").map(|token| *token))?; let end = maybe_token_to_usize(attributes_map.get(":end").copied())?;
let contents_begin = let contents_begin = maybe_token_to_usize(attributes_map.get(":contents-begin").copied())?;
maybe_token_to_usize(attributes_map.get(":contents-begin").map(|token| *token))?; let contents_end = maybe_token_to_usize(attributes_map.get(":contents-end").copied())?;
let contents_end = let post_blank = maybe_token_to_usize(attributes_map.get(":post-blank").copied())?;
maybe_token_to_usize(attributes_map.get(":contents-end").map(|token| *token))?;
let post_blank =
maybe_token_to_usize(attributes_map.get(":post-blank").map(|token| *token))?;
let post_affiliated = let post_affiliated =
maybe_token_to_usize(attributes_map.get(":post-affiliated").map(|token| *token))?; maybe_token_to_usize(attributes_map.get(":post-affiliated").copied())?;
EmacsStandardProperties { EmacsStandardProperties {
begin, begin,
post_affiliated, post_affiliated,
@@ -169,62 +164,57 @@ fn maybe_token_to_usize(
Ok(token Ok(token
.map(|token| token.as_atom()) .map(|token| token.as_atom())
.map_or(Ok(None), |r| r.map(Some))? .map_or(Ok(None), |r| r.map(Some))?
.map(|val| { .and_then(|val| {
if val == "nil" { if val == "nil" {
None None
} else { } else {
Some(val.parse::<usize>()) Some(val.parse::<usize>())
} }
}) })
.flatten() // Outer option is whether or not the param exists, inner option is whether or not it is nil
.map_or(Ok(None), |r| r.map(Some))?) .map_or(Ok(None), |r| r.map(Some))?)
} }
/// Get a named property from the emacs token. /// Get a named property from the emacs token.
/// ///
/// Returns Ok(None) if value is nil or absent. /// Returns Ok(None) if value is nil or absent.
pub(crate) fn get_property<'b, 's, 'x>( pub(crate) fn get_property<'b, 's>(
emacs: &'b Token<'s>, emacs: &'b Token<'s>,
key: &'x str, key: &str,
) -> Result<Option<&'b Token<'s>>, Box<dyn std::error::Error>> { ) -> Result<Option<&'b Token<'s>>, Box<dyn std::error::Error>> {
let children = emacs.as_list()?; let children = emacs.as_list()?;
let attributes_child = children let attributes_child = children.get(1).ok_or("Should have an attributes child.")?;
.iter()
.nth(1)
.ok_or("Should have an attributes child.")?;
let attributes_map = attributes_child.as_map()?; let attributes_map = attributes_child.as_map()?;
let prop = attributes_map.get(key).map(|token| *token); let prop = attributes_map.get(key).copied();
match prop.map(|token| token.as_atom()) { if let Some(Ok("nil")) = prop.map(Token::as_atom) {
Some(Ok("nil")) => return Ok(None), return Ok(None);
_ => {} }
};
Ok(prop) Ok(prop)
} }
/// Get a named property containing an unquoted atom from the emacs token. /// Get a named property containing an unquoted atom from the emacs token.
/// ///
/// Returns None if key is not found. /// Returns None if key is not found.
pub(crate) fn get_property_unquoted_atom<'b, 's, 'x>( pub(crate) fn get_property_unquoted_atom<'s>(
emacs: &'b Token<'s>, emacs: &Token<'s>,
key: &'x str, key: &str,
) -> Result<Option<&'s str>, Box<dyn std::error::Error>> { ) -> Result<Option<&'s str>, Box<dyn std::error::Error>> {
Ok(get_property(emacs, key)? get_property(emacs, key)?
.map(Token::as_atom) .map(Token::as_atom)
.map_or(Ok(None), |r| r.map(Some))?) .map_or(Ok(None), |r| r.map(Some))
} }
/// Get a named property containing an quoted string from the emacs token. /// Get a named property containing an quoted string from the emacs token.
/// ///
/// Returns None if key is not found. /// Returns None if key is not found.
pub(crate) fn get_property_quoted_string<'b, 's, 'x>( pub(crate) fn get_property_quoted_string(
emacs: &'b Token<'s>, emacs: &Token<'_>,
key: &'x str, key: &str,
) -> Result<Option<String>, Box<dyn std::error::Error>> { ) -> Result<Option<String>, Box<dyn std::error::Error>> {
Ok(get_property(emacs, key)? get_property(emacs, key)?
.map(Token::as_atom) .map(Token::as_atom)
.map_or(Ok(None), |r| r.map(Some))? .map_or(Ok(None), |r| r.map(Some))?
.map(unquote) .map(unquote)
.map_or(Ok(None), |r| r.map(Some))?) .map_or(Ok(None), |r| r.map(Some))
} }
/// Get a named property containing an unquoted numeric value. /// Get a named property containing an unquoted numeric value.
@@ -301,8 +291,8 @@ where
Ok(()) Ok(())
} }
pub(crate) fn assert_no_children<'b, 's>( pub(crate) fn assert_no_children(
emacs: &'b Token<'s>, emacs: &Token<'_>,
this_status: &mut DiffStatus, this_status: &mut DiffStatus,
message: &mut Option<String>, message: &mut Option<String>,
) -> Result<(), Box<dyn std::error::Error>> { ) -> Result<(), Box<dyn std::error::Error>> {
@@ -331,7 +321,7 @@ where
let rust_key = rust_key.as_ref(); let rust_key = rust_key.as_ref();
let rust_value = rust_value.as_ref(); let rust_value = rust_value.as_ref();
let emacs_value = get_property_quoted_string(emacs, rust_key)?; let emacs_value = get_property_quoted_string(emacs, rust_key)?;
if Some(rust_value) != emacs_value.as_ref().map(String::as_str) { if Some(rust_value) != emacs_value.as_deref() {
let this_status = DiffStatus::Bad; let this_status = DiffStatus::Bad;
let message = Some(format!( let message = Some(format!(
"{} mismatch (emacs != rust) {:?} != {:?}", "{} mismatch (emacs != rust) {:?} != {:?}",
@@ -376,13 +366,23 @@ where
)?; )?;
ret.push(diff); ret.push(diff);
} }
AffiliatedKeywordValue::ListOfListsOfObjects(rust_value) => { AffiliatedKeywordValue::OptionalPair { optval, val } => {
let diff = compare_property_list_of_list_of_list_of_ast_nodes( let diff = compare_property_optional_pair(
source, source,
emacs, emacs,
rust, rust,
emacs_property_name.as_str(), emacs_property_name.as_str(),
|_| Some(rust_value), |_| Some((*optval, *val)),
)?;
ret.push(diff);
}
AffiliatedKeywordValue::ObjectTree(rust_value) => {
let diff = compare_property_object_tree(
source,
emacs,
rust,
emacs_property_name.as_str(),
|_| Some(rust_value.iter()),
)?; )?;
ret.push(diff); ret.push(diff);
} }

View File

@@ -1,15 +1,15 @@
use super::global_settings::EntityDefinition; use super::global_settings::EntityDefinition;
pub(crate) const DEFAULT_ORG_ELEMENT_PARSED_KEYWORDS: [&'static str; 1] = ["CAPTION"]; pub(crate) const DEFAULT_ORG_ELEMENT_PARSED_KEYWORDS: [&str; 1] = ["CAPTION"];
pub(crate) const DEFAULT_ORG_ELEMENT_DUAL_KEYWORDS: [&'static str; 2] = ["CAPTION", "RESULTS"]; pub(crate) const DEFAULT_ORG_ELEMENT_DUAL_KEYWORDS: [&str; 2] = ["CAPTION", "RESULTS"];
pub(crate) const DEFAULT_ORG_ELEMENT_AFFILIATED_KEYWORDS: [&'static str; 13] = [ pub(crate) const DEFAULT_ORG_ELEMENT_AFFILIATED_KEYWORDS: [&str; 13] = [
"CAPTION", "DATA", "HEADER", "HEADERS", "LABEL", "NAME", "PLOT", "RESNAME", "RESULT", "CAPTION", "DATA", "HEADER", "HEADERS", "LABEL", "NAME", "PLOT", "RESNAME", "RESULT",
"RESULTS", "SOURCE", "SRCNAME", "TBLNAME", "RESULTS", "SOURCE", "SRCNAME", "TBLNAME",
]; ];
pub(crate) const DEFAULT_ORG_ELEMENT_KEYWORD_TRANSLATION_ALIST: [(&'static str, &'static str); 8] = [ pub(crate) const DEFAULT_ORG_ELEMENT_KEYWORD_TRANSLATION_ALIST: [(&str, &str); 8] = [
("DATA", "NAME"), ("DATA", "NAME"),
("LABEL", "NAME"), ("LABEL", "NAME"),
("RESNAME", "NAME"), ("RESNAME", "NAME"),
@@ -20,7 +20,7 @@ pub(crate) const DEFAULT_ORG_ELEMENT_KEYWORD_TRANSLATION_ALIST: [(&'static str,
("HEADERS", "HEADER"), ("HEADERS", "HEADER"),
]; ];
pub(crate) const DEFAULT_ORG_LINK_PARAMETERS: [&'static str; 23] = [ pub(crate) const DEFAULT_ORG_LINK_PARAMETERS: [&str; 23] = [
"id", "id",
"eww", "eww",
"rmail", "rmail",

View File

@@ -112,23 +112,18 @@ impl<'g, 'r, 's> Context<'g, 'r, 's> {
let mut current_class_filter = ExitClass::Gamma; let mut current_class_filter = ExitClass::Gamma;
for current_node in self.iter_context() { for current_node in self.iter_context() {
let context_element = current_node.get_data(); let context_element = current_node.get_data();
match context_element { if let ContextElement::ExitMatcherNode(exit_matcher) = context_element {
ContextElement::ExitMatcherNode(exit_matcher) => { if exit_matcher.class as u32 <= current_class_filter as u32 {
if exit_matcher.class as u32 <= current_class_filter as u32 { current_class_filter = exit_matcher.class;
current_class_filter = exit_matcher.class; let local_result = (exit_matcher.exit_matcher)(&current_node, i);
let local_result = (exit_matcher.exit_matcher)(&current_node, i); if local_result.is_ok() {
if local_result.is_ok() { return local_result;
return local_result;
}
} }
} }
_ => {} }
};
} }
// TODO: Make this a specific error instead of just a generic MyError // TODO: Make this a specific error instead of just a generic MyError
return Err(nom::Err::Error(CustomError::MyError(MyError( return Err(nom::Err::Error(CustomError::MyError(MyError("NoExit"))));
"NoExit".into(),
))));
} }
/// Indicates if elements should consume the whitespace after them. /// Indicates if elements should consume the whitespace after them.
@@ -140,11 +135,8 @@ impl<'g, 'r, 's> Context<'g, 'r, 's> {
fn _should_consume_trailing_whitespace(&self) -> Option<bool> { fn _should_consume_trailing_whitespace(&self) -> Option<bool> {
for current_node in self.iter() { for current_node in self.iter() {
match current_node { if let ContextElement::ConsumeTrailingWhitespace(should) = current_node {
ContextElement::ConsumeTrailingWhitespace(should) => { return Some(*should);
return Some(*should);
}
_ => {}
} }
} }
None None

View File

@@ -20,10 +20,9 @@ impl FileAccessInterface for LocalFileAccessInterface {
fn read_file(&self, path: &str) -> Result<String, std::io::Error> { fn read_file(&self, path: &str) -> Result<String, std::io::Error> {
let final_path = self let final_path = self
.working_directory .working_directory
.as_ref() .as_deref()
.map(PathBuf::as_path)
.map(|pb| pb.join(path)) .map(|pb| pb.join(path))
.unwrap_or_else(|| PathBuf::from(path)); .unwrap_or_else(|| PathBuf::from(path));
Ok(std::fs::read_to_string(final_path)?) std::fs::read_to_string(final_path)
} }
} }

View File

@@ -126,14 +126,10 @@ impl<'g, 's> Default for GlobalSettings<'g, 's> {
} }
} }
#[derive(Debug, Clone, PartialEq)] #[derive(Debug, Clone, PartialEq, Default)]
pub enum HeadlineLevelFilter { pub enum HeadlineLevelFilter {
Odd, Odd,
#[default]
OddEven, OddEven,
} }
impl Default for HeadlineLevelFilter {
fn default() -> Self {
HeadlineLevelFilter::OddEven
}
}

View File

@@ -50,7 +50,7 @@ impl<'a, T> Iterator for Iter<'a, T> {
fn next(&mut self) -> Option<Self::Item> { fn next(&mut self) -> Option<Self::Item> {
let ret = self.next.map(|link| link.get_data()); let ret = self.next.map(|link| link.get_data());
self.next = self.next.map(|link| link.get_parent()).flatten(); self.next = self.next.and_then(|link| link.get_parent());
ret ret
} }
} }
@@ -64,7 +64,7 @@ impl<'a, T> Iterator for IterList<'a, T> {
fn next(&mut self) -> Option<Self::Item> { fn next(&mut self) -> Option<Self::Item> {
let ret = self.next; let ret = self.next;
self.next = self.next.map(|this| this.get_parent()).flatten(); self.next = self.next.and_then(|link| link.get_parent());
ret ret
} }
} }

View File

@@ -2,6 +2,7 @@ use crate::error::Res;
use crate::parser::OrgSource; use crate::parser::OrgSource;
mod constants; mod constants;
#[allow(clippy::module_inception)]
mod context; mod context;
mod exiting; mod exiting;
mod file_access_interface; mod file_access_interface;
@@ -30,4 +31,5 @@ pub use global_settings::GlobalSettings;
pub use global_settings::HeadlineLevelFilter; pub use global_settings::HeadlineLevelFilter;
pub use global_settings::DEFAULT_TAB_WIDTH; pub use global_settings::DEFAULT_TAB_WIDTH;
pub(crate) use list::List; pub(crate) use list::List;
pub(crate) use parser_with_context::bind_context;
pub(crate) use parser_with_context::parser_with_context; pub(crate) use parser_with_context::parser_with_context;

View File

@@ -4,3 +4,10 @@ macro_rules! parser_with_context {
}; };
} }
pub(crate) use parser_with_context; pub(crate) use parser_with_context;
macro_rules! bind_context {
($target:expr, $context:expr) => {
|i| $target($context, i)
};
}
pub(crate) use bind_context;

View File

@@ -1,3 +1,4 @@
#[allow(clippy::module_inception)]
mod error; mod error;
pub(crate) use error::CustomError; pub(crate) use error::CustomError;
pub(crate) use error::MyError; pub(crate) use error::MyError;

View File

@@ -5,7 +5,7 @@ use tracing_subscriber::prelude::__tracing_subscriber_SubscriberExt;
#[cfg(feature = "tracing")] #[cfg(feature = "tracing")]
use tracing_subscriber::util::SubscriberInitExt; use tracing_subscriber::util::SubscriberInitExt;
const SERVICE_NAME: &'static str = "organic"; const SERVICE_NAME: &str = "organic";
// Despite the obvious verbosity that fully-qualifying everything causes, in these functions I am fully-qualifying everything relating to tracing. This is because the tracing feature involves multiple libraries working together and so I think it is beneficial to see which libraries contribute which bits. // Despite the obvious verbosity that fully-qualifying everything causes, in these functions I am fully-qualifying everything relating to tracing. This is because the tracing feature involves multiple libraries working together and so I think it is beneficial to see which libraries contribute which bits.

View File

@@ -2,7 +2,11 @@
#![feature(trait_alias)] #![feature(trait_alias)]
#![feature(path_file_prefix)] #![feature(path_file_prefix)]
#![feature(is_sorted)] #![feature(is_sorted)]
#![feature(test)]
// TODO: #![warn(missing_docs)] // TODO: #![warn(missing_docs)]
#![allow(clippy::bool_assert_comparison)] // Sometimes you want the long form because its easier to see at a glance.
extern crate test;
#[cfg(feature = "compare")] #[cfg(feature = "compare")]
pub mod compare; pub mod compare;

View File

@@ -23,13 +23,12 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
#[cfg(feature = "tracing")] #[cfg(feature = "tracing")]
fn main() -> Result<(), Box<dyn std::error::Error>> { fn main() -> Result<(), Box<dyn std::error::Error>> {
let rt = tokio::runtime::Runtime::new()?; let rt = tokio::runtime::Runtime::new()?;
let result = rt.block_on(async { rt.block_on(async {
init_telemetry()?; init_telemetry()?;
let main_body_result = main_body(); let main_body_result = main_body();
shutdown_telemetry()?; shutdown_telemetry()?;
main_body_result main_body_result
}); })
result
} }
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))] #[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
@@ -70,10 +69,9 @@ fn run_parse_on_file<P: AsRef<Path>>(org_path: P) -> Result<(), Box<dyn std::err
let file_access_interface = LocalFileAccessInterface { let file_access_interface = LocalFileAccessInterface {
working_directory: Some(parent_directory.to_path_buf()), working_directory: Some(parent_directory.to_path_buf()),
}; };
let global_settings = { let global_settings = GlobalSettings {
let mut global_settings = GlobalSettings::default(); file_access: &file_access_interface,
global_settings.file_access = &file_access_interface; ..Default::default()
global_settings
}; };
let rust_parsed = parse_with_settings(org_contents, &global_settings)?; let rust_parsed = parse_with_settings(org_contents, &global_settings)?;
println!("{:#?}", rust_parsed); println!("{:#?}", rust_parsed);

View File

@@ -16,7 +16,7 @@ use nom::sequence::tuple;
use super::object_parser::standard_set_object; use super::object_parser::standard_set_object;
use super::util::confine_context; use super::util::confine_context;
use crate::context::parser_with_context; use crate::context::bind_context;
use crate::context::Context; use crate::context::Context;
use crate::context::ContextElement; use crate::context::ContextElement;
use crate::context::GlobalSettings; use crate::context::GlobalSettings;
@@ -35,82 +35,94 @@ where
let mut ret = BTreeMap::new(); let mut ret = BTreeMap::new();
for kw in input { for kw in input {
let translated_name = translate_name(global_settings, kw.key); let translated_name = translate_name(global_settings, kw.key);
if is_single_string_keyword(global_settings, translated_name.as_str()) { let keyword_type = identify_keyword_type(global_settings, translated_name.as_str());
ret.insert( match keyword_type {
translated_name, AffiliatedKeywordType::SingleString => {
AffiliatedKeywordValue::SingleString(kw.value), ret.insert(
); translated_name,
} else if is_list_of_single_string_keyword(global_settings, translated_name.as_str()) { AffiliatedKeywordValue::SingleString(kw.value),
let list_of_strings = ret );
.entry(translated_name)
.or_insert_with(|| AffiliatedKeywordValue::ListOfStrings(Vec::with_capacity(1)));
match list_of_strings {
AffiliatedKeywordValue::ListOfStrings(list_of_strings)
if list_of_strings.is_empty() =>
{
list_of_strings.push(kw.value);
}
AffiliatedKeywordValue::ListOfStrings(list_of_strings) => {
list_of_strings.clear();
list_of_strings.push(kw.value);
}
_ => panic!("Invalid AffiliatedKeywordValue type."),
} }
} else if is_list_of_objects_keyword(global_settings, translated_name.as_str()) { AffiliatedKeywordType::ListOfStrings => {
let initial_context = ContextElement::document_context(); let list_of_strings = ret.entry(translated_name).or_insert_with(|| {
let initial_context = Context::new(global_settings, List::new(&initial_context)); AffiliatedKeywordValue::ListOfStrings(Vec::with_capacity(1))
});
let (_remaining, optional_objects) = opt(all_consuming(map( match list_of_strings {
tuple(( AffiliatedKeywordValue::ListOfStrings(list_of_strings) => {
take_until("["), list_of_strings.push(kw.value);
tag("["), }
map_parser( _ => panic!("Invalid AffiliatedKeywordValue type."),
}
}
AffiliatedKeywordType::OptionalPair => {
let (_remaining, optional_string) = opt(all_consuming(map(
tuple((
take_until::<_, &str, nom::error::Error<_>>("["),
tag("["),
recognize(many_till(anychar, peek(tuple((tag("]"), eof))))), recognize(many_till(anychar, peek(tuple((tag("]"), eof))))),
confine_context(|i| { tag("]"),
all_consuming(many0(parser_with_context!(standard_set_object)( eof,
&initial_context, )),
)))(i) |(_, _, objects, _, _)| objects,
}), )))(kw.key)
), .expect("Parser should always succeed.");
tag("]"), ret.insert(
eof, translated_name,
)), AffiliatedKeywordValue::OptionalPair {
|(_, _, objects, _, _)| objects, optval: optional_string,
)))(kw.key.into()) val: kw.value,
.expect("Object parser should always succeed."); },
);
}
AffiliatedKeywordType::ObjectTree => {
let initial_context = ContextElement::document_context();
let initial_context = Context::new(global_settings, List::new(&initial_context));
// TODO: This should be omitting footnote references let (_remaining, optional_objects) = opt(all_consuming(map(
let (_remaining, objects) = all_consuming(many0(parser_with_context!( tuple((
standard_set_object take_until("["),
)(&initial_context)))(kw.value.into()) tag("["),
.expect("Object parser should always succeed."); map_parser(
let list_of_lists = ret.entry(translated_name).or_insert_with(|| { recognize(many_till(anychar, peek(tuple((tag("]"), eof))))),
AffiliatedKeywordValue::ListOfListsOfObjects(Vec::with_capacity(1)) confine_context(|i| {
}); all_consuming(many0(bind_context!(
match list_of_lists { standard_set_object,
AffiliatedKeywordValue::ListOfListsOfObjects(list_of_lists) => { &initial_context
list_of_lists.push((optional_objects, objects)); )))(i)
}),
),
tag("]"),
eof,
)),
|(_, _, objects, _, _)| objects,
)))(kw.key.into())
.expect("Object parser should always succeed.");
// TODO: This should be omitting footnote references
let (_remaining, objects) = all_consuming(many0(bind_context!(
standard_set_object,
&initial_context
)))(kw.value.into())
.expect("Object parser should always succeed.");
let entry_per_keyword_list = ret
.entry(translated_name)
.or_insert_with(|| AffiliatedKeywordValue::ObjectTree(Vec::with_capacity(1)));
match entry_per_keyword_list {
AffiliatedKeywordValue::ObjectTree(entry_per_keyword_list) => {
entry_per_keyword_list.push((optional_objects, objects));
}
_ => panic!("Invalid AffiliatedKeywordValue type."),
} }
_ => panic!("Invalid AffiliatedKeywordValue type."),
} }
} else { };
let list_of_strings = ret
.entry(translated_name)
.or_insert_with(|| AffiliatedKeywordValue::ListOfStrings(Vec::with_capacity(1)));
match list_of_strings {
AffiliatedKeywordValue::ListOfStrings(list_of_strings) => {
list_of_strings.push(kw.value);
}
_ => panic!("Invalid AffiliatedKeywordValue type."),
}
}
} }
AffiliatedKeywords { keywords: ret } AffiliatedKeywords { keywords: ret }
} }
fn translate_name<'g, 's>(global_settings: &'g GlobalSettings<'g, 's>, name: &'s str) -> String { fn translate_name<'g, 's>(global_settings: &'g GlobalSettings<'g, 's>, name: &'s str) -> String {
let name_until_optval = name let name_until_optval = name
.split_once("[") .split_once('[')
.map(|(before, _after)| before) .map(|(before, _after)| before)
.unwrap_or(name); .unwrap_or(name);
for (src, dst) in global_settings.element_keyword_translation_alist { for (src, dst) in global_settings.element_keyword_translation_alist {
@@ -121,40 +133,37 @@ fn translate_name<'g, 's>(global_settings: &'g GlobalSettings<'g, 's>, name: &'s
name_until_optval.to_lowercase() name_until_optval.to_lowercase()
} }
fn is_single_string_keyword<'g, 's>( enum AffiliatedKeywordType {
_global_settings: &'g GlobalSettings<'g, 's>, SingleString,
name: &'s str, ListOfStrings,
) -> bool { OptionalPair,
// TODO: Is this defined by an elisp variable? ObjectTree,
for single_string_name in ["plot", "name"] {
if name.eq_ignore_ascii_case(single_string_name) {
return true;
}
}
false
} }
fn is_list_of_single_string_keyword<'g, 's>( fn identify_keyword_type<'g, 's>(
_global_settings: &'g GlobalSettings<'g, 's>,
name: &'s str,
) -> bool {
// TODO: Is this defined by an elisp variable?
for single_string_name in ["results"] {
if name.eq_ignore_ascii_case(single_string_name) {
return true;
}
}
false
}
fn is_list_of_objects_keyword<'g, 's>(
global_settings: &'g GlobalSettings<'g, 's>, global_settings: &'g GlobalSettings<'g, 's>,
name: &'s str, name: &'s str,
) -> bool { ) -> AffiliatedKeywordType {
for parsed_keyword in global_settings.element_parsed_keywords { let is_multiple = ["CAPTION", "HEADER"]
if name.eq_ignore_ascii_case(parsed_keyword) { .into_iter()
return true; .any(|candidate| name.eq_ignore_ascii_case(candidate))
} || name.to_lowercase().starts_with("attr_");
let is_parsed = global_settings
.element_parsed_keywords
.iter()
.any(|candidate| name.eq_ignore_ascii_case(candidate));
let can_have_optval = global_settings
.element_dual_keywords
.iter()
.any(|candidate| name.eq_ignore_ascii_case(candidate));
match (is_multiple, is_parsed, can_have_optval) {
(true, true, true) => AffiliatedKeywordType::ObjectTree,
(true, true, false) => unreachable!("Nothing like this exists in upstream org-mode."),
(true, false, true) => unreachable!("Nothing like this exists in upstream org-mode."),
(true, false, false) => AffiliatedKeywordType::ListOfStrings,
(false, true, true) => unreachable!("Nothing like this exists in upstream org-mode."),
(false, true, false) => unreachable!("Nothing like this exists in upstream org-mode."),
(false, false, true) => AffiliatedKeywordType::OptionalPair,
(false, false, false) => AffiliatedKeywordType::SingleString,
} }
false
} }

View File

@@ -66,8 +66,7 @@ where
} }
let (remaining, _ws) = space0(remaining)?; let (remaining, _ws) = space0(remaining)?;
let (remaining, (value, (call, inside_header, arguments, end_header))) = let (remaining, (value, babel_call_value)) = consumed(babel_call_value)(remaining)?;
consumed(babel_call_value)(remaining)?;
let (remaining, _ws) = tuple((space0, org_line_ending))(remaining)?; let (remaining, _ws) = tuple((space0, org_line_ending))(remaining)?;
let (remaining, _trailing_ws) = let (remaining, _trailing_ws) =
@@ -83,33 +82,36 @@ where
affiliated_keywords, affiliated_keywords,
), ),
value: Into::<&str>::into(value).trim_end(), value: Into::<&str>::into(value).trim_end(),
call: call.map(Into::<&str>::into), call: babel_call_value.call.map(Into::<&str>::into),
inside_header: inside_header.map(Into::<&str>::into), inside_header: babel_call_value.inside_header.map(Into::<&str>::into),
arguments: arguments.map(Into::<&str>::into), arguments: babel_call_value.arguments.map(Into::<&str>::into),
end_header: end_header.map(Into::<&str>::into), end_header: babel_call_value.end_header.map(Into::<&str>::into),
}, },
)) ))
} }
#[derive(Debug)]
struct BabelCallValue<'s> {
call: Option<OrgSource<'s>>,
inside_header: Option<OrgSource<'s>>,
arguments: Option<OrgSource<'s>>,
end_header: Option<OrgSource<'s>>,
}
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))] #[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
fn babel_call_value<'s>( fn babel_call_value<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, BabelCallValue<'s>> {
input: OrgSource<'s>,
) -> Res<
OrgSource<'s>,
(
Option<OrgSource<'s>>,
Option<OrgSource<'s>>,
Option<OrgSource<'s>>,
Option<OrgSource<'s>>,
),
> {
let (remaining, call) = opt(babel_call_call)(input)?; let (remaining, call) = opt(babel_call_call)(input)?;
let (remaining, inside_header) = opt(inside_header)(remaining)?; let (remaining, inside_header) = opt(inside_header)(remaining)?;
let (remaining, arguments) = opt(arguments)(remaining)?; let (remaining, arguments) = opt(arguments)(remaining)?;
let (remaining, end_header) = opt(end_header)(remaining)?; let (remaining, end_header) = opt(end_header)(remaining)?;
Ok(( Ok((
remaining, remaining,
(call, inside_header, arguments.flatten(), end_header), BabelCallValue {
call,
inside_header,
arguments: arguments.flatten(),
end_header,
},
)) ))
} }

View File

@@ -205,6 +205,7 @@ fn _global_suffix_end<'b, 'g, 'r, 's>(
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use crate::context::bind_context;
use crate::context::Context; use crate::context::Context;
use crate::context::GlobalSettings; use crate::context::GlobalSettings;
use crate::context::List; use crate::context::List;
@@ -219,7 +220,7 @@ mod tests {
let global_settings = GlobalSettings::default(); let global_settings = GlobalSettings::default();
let initial_context = ContextElement::document_context(); let initial_context = ContextElement::document_context();
let initial_context = Context::new(&global_settings, List::new(&initial_context)); let initial_context = Context::new(&global_settings, List::new(&initial_context));
let paragraph_matcher = parser_with_context!(element(true))(&initial_context); let paragraph_matcher = bind_context!(element(true), &initial_context);
let (remaining, first_paragraph) = paragraph_matcher(input).expect("Parse first paragraph"); let (remaining, first_paragraph) = paragraph_matcher(input).expect("Parse first paragraph");
let first_paragraph = match first_paragraph { let first_paragraph = match first_paragraph {
Element::Paragraph(paragraph) => paragraph, Element::Paragraph(paragraph) => paragraph,

View File

@@ -200,7 +200,7 @@ where
let (remaining, output) = inner(input)?; let (remaining, output) = inner(input)?;
if remaining.get_bracket_depth() - pre_bracket_depth != 0 { if remaining.get_bracket_depth() - pre_bracket_depth != 0 {
return Err(nom::Err::Error(CustomError::MyError(MyError( return Err(nom::Err::Error(CustomError::MyError(MyError(
"UnbalancedBrackets".into(), "UnbalancedBrackets",
)))); ))));
} }
Ok((remaining, output)) Ok((remaining, output))

View File

@@ -36,7 +36,7 @@ pub(crate) fn comment<'b, 'g, 'r, 's>(
) -> Res<OrgSource<'s>, Comment<'s>> { ) -> Res<OrgSource<'s>, Comment<'s>> {
if immediate_in_section(context, "comment") { if immediate_in_section(context, "comment") {
return Err(nom::Err::Error(CustomError::MyError(MyError( return Err(nom::Err::Error(CustomError::MyError(MyError(
"Cannot nest objects of the same element".into(), "Cannot nest objects of the same element",
)))); ))));
} }
let parser_context = ContextElement::Context("comment"); let parser_context = ContextElement::Context("comment");
@@ -104,6 +104,7 @@ pub(crate) fn detect_comment<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, ()>
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use crate::context::bind_context;
use crate::context::Context; use crate::context::Context;
use crate::context::ContextElement; use crate::context::ContextElement;
use crate::context::GlobalSettings; use crate::context::GlobalSettings;
@@ -119,7 +120,7 @@ mod tests {
let global_settings = GlobalSettings::default(); let global_settings = GlobalSettings::default();
let initial_context = ContextElement::document_context(); let initial_context = ContextElement::document_context();
let initial_context = Context::new(&global_settings, List::new(&initial_context)); let initial_context = Context::new(&global_settings, List::new(&initial_context));
let comment_matcher = parser_with_context!(comment)(&initial_context); let comment_matcher = bind_context!(comment, &initial_context);
let (remaining, first_comment) = comment_matcher(input).expect("Parse first comment"); let (remaining, first_comment) = comment_matcher(input).expect("Parse first comment");
assert_eq!( assert_eq!(
Into::<&str>::into(remaining), Into::<&str>::into(remaining),

View File

@@ -1,11 +1,9 @@
use nom::bytes::complete::is_not; use nom::bytes::complete::is_not;
use nom::bytes::complete::tag; use nom::bytes::complete::tag;
use nom::combinator::recognize; use nom::combinator::recognize;
use nom::multi::many0;
use nom::sequence::tuple; use nom::sequence::tuple;
use super::affiliated_keyword::parse_affiliated_keywords; use super::affiliated_keyword::parse_affiliated_keywords;
use super::keyword::affiliated_keyword;
use super::org_source::OrgSource; use super::org_source::OrgSource;
use super::util::maybe_consume_trailing_whitespace_if_not_exiting; use super::util::maybe_consume_trailing_whitespace_if_not_exiting;
use super::util::org_line_ending; use super::util::org_line_ending;
@@ -49,9 +47,19 @@ where
)) ))
} }
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))] #[cfg_attr(
pub(crate) fn detect_diary_sexp<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, ()> { feature = "tracing",
let (input, _) = many0(affiliated_keyword)(input)?; tracing::instrument(ret, level = "debug", skip(_context, _affiliated_keywords))
tuple((start_of_line, tag("%%(")))(input)?; )]
pub(crate) fn detect_diary_sexp<'b, 'g, 'r, 's, AK>(
_affiliated_keywords: AK,
remaining: OrgSource<'s>,
_context: RefContext<'b, 'g, 'r, 's>,
input: OrgSource<'s>,
) -> Res<OrgSource<'s>, ()>
where
AK: IntoIterator<Item = Keyword<'s>>,
{
tuple((start_of_line, tag("%%(")))(remaining)?;
Ok((input, ())) Ok((input, ()))
} }

View File

@@ -11,6 +11,7 @@ use super::in_buffer_settings::scan_for_in_buffer_settings;
use super::org_source::OrgSource; use super::org_source::OrgSource;
use super::section::zeroth_section; use super::section::zeroth_section;
use super::util::get_consumed; use super::util::get_consumed;
use crate::context::bind_context;
use crate::context::parser_with_context; use crate::context::parser_with_context;
use crate::context::Context; use crate::context::Context;
use crate::context::ContextElement; use crate::context::ContextElement;
@@ -30,7 +31,7 @@ use crate::types::Object;
/// ///
/// This is a main entry point for Organic. It will parse the full contents of the input string as an org-mode document without an underlying file attached. /// This is a main entry point for Organic. It will parse the full contents of the input string as an org-mode document without an underlying file attached.
#[allow(dead_code)] #[allow(dead_code)]
pub fn parse<'s>(input: &'s str) -> Result<Document<'s>, Box<dyn std::error::Error>> { pub fn parse(input: &str) -> Result<Document<'_>, Box<dyn std::error::Error>> {
parse_file_with_settings::<&Path>(input, &GlobalSettings::default(), None) parse_file_with_settings::<&Path>(input, &GlobalSettings::default(), None)
} }
@@ -40,10 +41,10 @@ pub fn parse<'s>(input: &'s str) -> Result<Document<'s>, Box<dyn std::error::Err
/// ///
/// file_path is not used for reading the file contents. It is only used for determining the document category and filling in the path attribute on the Document. /// file_path is not used for reading the file contents. It is only used for determining the document category and filling in the path attribute on the Document.
#[allow(dead_code)] #[allow(dead_code)]
pub fn parse_file<'s, P: AsRef<Path>>( pub fn parse_file<P: AsRef<Path>>(
input: &'s str, input: &str,
file_path: Option<P>, file_path: Option<P>,
) -> Result<Document<'s>, Box<dyn std::error::Error>> { ) -> Result<Document<'_>, Box<dyn std::error::Error>> {
parse_file_with_settings(input, &GlobalSettings::default(), file_path) parse_file_with_settings(input, &GlobalSettings::default(), file_path)
} }
@@ -77,7 +78,7 @@ pub fn parse_file_with_settings<'g, 's, P: AsRef<Path>>(
let initial_context = Context::new(global_settings, List::new(&initial_context)); let initial_context = Context::new(global_settings, List::new(&initial_context));
let wrapped_input = OrgSource::new(input); let wrapped_input = OrgSource::new(input);
let mut doc = let mut doc =
all_consuming(parser_with_context!(document_org_source)(&initial_context))(wrapped_input) all_consuming(bind_context!(document_org_source, &initial_context))(wrapped_input)
.map_err(|err| err.to_string()) .map_err(|err| err.to_string())
.map(|(_remaining, parsed_document)| parsed_document)?; .map(|(_remaining, parsed_document)| parsed_document)?;
if let Some(file_path) = file_path { if let Some(file_path) = file_path {
@@ -101,10 +102,7 @@ pub fn parse_file_with_settings<'g, 's, P: AsRef<Path>>(
/// ///
/// This will not prevent additional settings from being learned during parsing, for example when encountering a "#+TODO". /// This will not prevent additional settings from being learned during parsing, for example when encountering a "#+TODO".
#[allow(dead_code)] #[allow(dead_code)]
fn document<'b, 'g, 'r, 's>( fn document<'s>(context: RefContext<'_, '_, '_, 's>, input: &'s str) -> Res<&'s str, Document<'s>> {
context: RefContext<'b, 'g, 'r, 's>,
input: &'s str,
) -> Res<&'s str, Document<'s>> {
let (remaining, doc) = document_org_source(context, input.into()).map_err(convert_error)?; let (remaining, doc) = document_org_source(context, input.into()).map_err(convert_error)?;
Ok((Into::<&str>::into(remaining), doc)) Ok((Into::<&str>::into(remaining), doc))
} }
@@ -137,8 +135,7 @@ fn document_org_source<'b, 'g, 'r, 's>(
scan_for_in_buffer_settings(setup_file.into()).map_err(|err| { scan_for_in_buffer_settings(setup_file.into()).map_err(|err| {
eprintln!("{}", err); eprintln!("{}", err);
nom::Err::Error(CustomError::MyError(MyError( nom::Err::Error(CustomError::MyError(MyError(
"TODO: make this take an owned string so I can dump err.to_string() into it." "TODO: make this take an owned string so I can dump err.to_string() into it.",
.into(),
))) )))
})?; })?;
final_settings.extend(setup_file_settings); final_settings.extend(setup_file_settings);
@@ -148,8 +145,7 @@ fn document_org_source<'b, 'g, 'r, 's>(
.map_err(|err| { .map_err(|err| {
eprintln!("{}", err); eprintln!("{}", err);
nom::Err::Error(CustomError::MyError(MyError( nom::Err::Error(CustomError::MyError(MyError(
"TODO: make this take an owned string so I can dump err.to_string() into it." "TODO: make this take an owned string so I can dump err.to_string() into it.",
.into(),
))) )))
})?; })?;
let new_context = context.with_global_settings(&new_settings); let new_context = context.with_global_settings(&new_settings);

View File

@@ -49,7 +49,7 @@ where
{ {
if immediate_in_section(context, "drawer") { if immediate_in_section(context, "drawer") {
return Err(nom::Err::Error(CustomError::MyError(MyError( return Err(nom::Err::Error(CustomError::MyError(MyError(
"Cannot nest objects of the same element".into(), "Cannot nest objects of the same element",
)))); ))));
} }
start_of_line(remaining)?; start_of_line(remaining)?;

View File

@@ -55,7 +55,7 @@ where
{ {
if immediate_in_section(context, "dynamic block") { if immediate_in_section(context, "dynamic block") {
return Err(nom::Err::Error(CustomError::MyError(MyError( return Err(nom::Err::Error(CustomError::MyError(MyError(
"Cannot nest objects of the same element".into(), "Cannot nest objects of the same element",
)))); ))));
} }
@@ -79,10 +79,7 @@ where
let parser_context = context.with_additional_node(&contexts[0]); let parser_context = context.with_additional_node(&contexts[0]);
let parser_context = parser_context.with_additional_node(&contexts[1]); let parser_context = parser_context.with_additional_node(&contexts[1]);
let parser_context = parser_context.with_additional_node(&contexts[2]); let parser_context = parser_context.with_additional_node(&contexts[2]);
let parameters = match parameters { let parameters = parameters.map(|(_ws, parameters)| parameters);
Some((_ws, parameters)) => Some(parameters),
None => None,
};
let element_matcher = parser_with_context!(element(true))(&parser_context); let element_matcher = parser_with_context!(element(true))(&parser_context);
let exit_matcher = parser_with_context!(exit_matcher_parser)(&parser_context); let exit_matcher = parser_with_context!(exit_matcher_parser)(&parser_context);
not(exit_matcher)(remaining)?; not(exit_matcher)(remaining)?;

View File

@@ -1,4 +1,3 @@
use nom::branch::alt;
use nom::multi::many0; use nom::multi::many0;
use super::babel_call::babel_call; use super::babel_call::babel_call;
@@ -56,7 +55,8 @@ fn _element<'b, 'g, 'r, 's>(
input: OrgSource<'s>, input: OrgSource<'s>,
can_be_paragraph: bool, can_be_paragraph: bool,
) -> Res<OrgSource<'s>, Element<'s>> { ) -> Res<OrgSource<'s>, Element<'s>> {
let (post_affiliated_keywords_input, affiliated_keywords) = many0(affiliated_keyword)(input)?; let (post_affiliated_keywords_input, affiliated_keywords) =
many0(parser_with_context!(affiliated_keyword)(context))(input)?;
let mut affiliated_keywords = affiliated_keywords.into_iter(); let mut affiliated_keywords = affiliated_keywords.into_iter();
@@ -272,22 +272,58 @@ fn _detect_element<'b, 'g, 'r, 's>(
input: OrgSource<'s>, input: OrgSource<'s>,
can_be_paragraph: bool, can_be_paragraph: bool,
) -> Res<OrgSource<'s>, ()> { ) -> Res<OrgSource<'s>, ()> {
if alt(( let (post_affiliated_keywords_input, affiliated_keywords) =
parser_with_context!(detect_plain_list)(context), many0(parser_with_context!(affiliated_keyword)(context))(input)?;
let mut affiliated_keywords = affiliated_keywords.into_iter();
ak_element!(
detect_plain_list,
&mut affiliated_keywords,
post_affiliated_keywords_input,
context,
input
);
ak_element!(
detect_footnote_definition, detect_footnote_definition,
&mut affiliated_keywords,
post_affiliated_keywords_input,
context,
input
);
ak_element!(
detect_diary_sexp, detect_diary_sexp,
detect_comment, &mut affiliated_keywords,
post_affiliated_keywords_input,
context,
input
);
element!(detect_comment, input);
ak_element!(
detect_fixed_width_area, detect_fixed_width_area,
&mut affiliated_keywords,
post_affiliated_keywords_input,
context,
input
);
ak_element!(
detect_table, detect_table,
))(input) &mut affiliated_keywords,
.is_ok() post_affiliated_keywords_input,
{ context,
return Ok((input, ())); input
} );
if _element(context, input, can_be_paragraph).is_ok() { if _element(context, input, can_be_paragraph).is_ok() {
return Ok((input, ())); return Ok((input, ()));
} }
return Err(nom::Err::Error(CustomError::MyError(MyError(
"No element detected.".into(), Err(nom::Err::Error(CustomError::MyError(MyError(
)))); "No element detected.",
))))
} }

View File

@@ -60,21 +60,16 @@ fn name<'b, 'g, 'r, 's>(
let result = tuple(( let result = tuple((
tag::<_, _, CustomError<_>>(entity.name), tag::<_, _, CustomError<_>>(entity.name),
alt(( alt((
verify(map(tag("{}"), |_| true), |_| !entity.name.ends_with(" ")), verify(map(tag("{}"), |_| true), |_| !entity.name.ends_with(' ')),
map(peek(recognize(entity_end)), |_| false), map(peek(recognize(entity_end)), |_| false),
)), )),
))(input); ))(input);
match result { if let Ok((remaining, (ent, use_brackets))) = result {
Ok((remaining, (ent, use_brackets))) => { return Ok((remaining, (entity, ent, use_brackets)));
return Ok((remaining, (entity, ent, use_brackets)));
}
Err(_) => {}
} }
} }
Err(nom::Err::Error(CustomError::MyError(MyError( Err(nom::Err::Error(CustomError::MyError(MyError("NoEntity"))))
"NoEntity".into(),
))))
} }
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))] #[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]

View File

@@ -10,7 +10,6 @@ use nom::sequence::preceded;
use nom::sequence::tuple; use nom::sequence::tuple;
use super::affiliated_keyword::parse_affiliated_keywords; use super::affiliated_keyword::parse_affiliated_keywords;
use super::keyword::affiliated_keyword;
use super::org_source::OrgSource; use super::org_source::OrgSource;
use super::util::maybe_consume_trailing_whitespace_if_not_exiting; use super::util::maybe_consume_trailing_whitespace_if_not_exiting;
use super::util::org_line_ending; use super::util::org_line_ending;
@@ -89,14 +88,24 @@ fn fixed_width_area_line<'b, 'g, 'r, 's>(
Ok((remaining, value)) Ok((remaining, value))
} }
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))] #[cfg_attr(
pub(crate) fn detect_fixed_width_area<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, ()> { feature = "tracing",
let (input, _) = many0(affiliated_keyword)(input)?; tracing::instrument(ret, level = "debug", skip(_context, _affiliated_keywords))
)]
pub(crate) fn detect_fixed_width_area<'b, 'g, 'r, 's, AK>(
_affiliated_keywords: AK,
remaining: OrgSource<'s>,
_context: RefContext<'b, 'g, 'r, 's>,
input: OrgSource<'s>,
) -> Res<OrgSource<'s>, ()>
where
AK: IntoIterator<Item = Keyword<'s>>,
{
tuple(( tuple((
start_of_line, start_of_line,
space0, space0,
tag(":"), tag(":"),
alt((tag(" "), org_line_ending)), alt((tag(" "), org_line_ending)),
))(input)?; ))(remaining)?;
Ok((input, ())) Ok((input, ()))
} }

View File

@@ -12,7 +12,6 @@ use nom::multi::many_till;
use nom::sequence::tuple; use nom::sequence::tuple;
use super::affiliated_keyword::parse_affiliated_keywords; use super::affiliated_keyword::parse_affiliated_keywords;
use super::keyword::affiliated_keyword;
use super::org_source::OrgSource; use super::org_source::OrgSource;
use super::util::include_input; use super::util::include_input;
use super::util::maybe_consume_trailing_whitespace_if_not_exiting; use super::util::maybe_consume_trailing_whitespace_if_not_exiting;
@@ -50,7 +49,7 @@ where
{ {
if immediate_in_section(context, "footnote definition") { if immediate_in_section(context, "footnote definition") {
return Err(nom::Err::Error(CustomError::MyError(MyError( return Err(nom::Err::Error(CustomError::MyError(MyError(
"Cannot nest objects of the same element".into(), "Cannot nest objects of the same element",
)))); ))));
} }
start_of_line(remaining)?; start_of_line(remaining)?;
@@ -125,7 +124,7 @@ fn footnote_definition_end<'b, 'g, 'r, 's>(
let (remaining, source) = alt(( let (remaining, source) = alt((
recognize(tuple(( recognize(tuple((
parser_with_context!(maybe_consume_trailing_whitespace)(context), parser_with_context!(maybe_consume_trailing_whitespace)(context),
detect_footnote_definition, |i| detect_footnote_definition(std::iter::empty(), i, context, i),
))), ))),
recognize(tuple(( recognize(tuple((
start_of_line, start_of_line,
@@ -138,16 +137,27 @@ fn footnote_definition_end<'b, 'g, 'r, 's>(
Ok((remaining, source)) Ok((remaining, source))
} }
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))] #[cfg_attr(
pub(crate) fn detect_footnote_definition<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, ()> { feature = "tracing",
let (input, _) = many0(affiliated_keyword)(input)?; tracing::instrument(ret, level = "debug", skip(_context, _affiliated_keywords))
tuple((start_of_line, tag_no_case("[fn:"), label, tag("]")))(input)?; )]
pub(crate) fn detect_footnote_definition<'b, 'g, 'r, 's, AK>(
_affiliated_keywords: AK,
remaining: OrgSource<'s>,
_context: RefContext<'b, 'g, 'r, 's>,
input: OrgSource<'s>,
) -> Res<OrgSource<'s>, ()>
where
AK: IntoIterator<Item = Keyword<'s>>,
{
tuple((start_of_line, tag_no_case("[fn:"), label, tag("]")))(remaining)?;
Ok((input, ())) Ok((input, ()))
} }
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use crate::context::bind_context;
use crate::context::Context; use crate::context::Context;
use crate::context::GlobalSettings; use crate::context::GlobalSettings;
use crate::context::List; use crate::context::List;
@@ -165,7 +175,7 @@ line footnote.",
let global_settings = GlobalSettings::default(); let global_settings = GlobalSettings::default();
let initial_context = ContextElement::document_context(); let initial_context = ContextElement::document_context();
let initial_context = Context::new(&global_settings, List::new(&initial_context)); let initial_context = Context::new(&global_settings, List::new(&initial_context));
let footnote_definition_matcher = parser_with_context!(element(true))(&initial_context); let footnote_definition_matcher = bind_context!(element(true), &initial_context);
let (remaining, first_footnote_definition) = let (remaining, first_footnote_definition) =
footnote_definition_matcher(input).expect("Parse first footnote_definition"); footnote_definition_matcher(input).expect("Parse first footnote_definition");
let (remaining, second_footnote_definition) = let (remaining, second_footnote_definition) =
@@ -202,7 +212,7 @@ not in the footnote.",
let global_settings = GlobalSettings::default(); let global_settings = GlobalSettings::default();
let initial_context = ContextElement::document_context(); let initial_context = ContextElement::document_context();
let initial_context = Context::new(&global_settings, List::new(&initial_context)); let initial_context = Context::new(&global_settings, List::new(&initial_context));
let footnote_definition_matcher = parser_with_context!(element(true))(&initial_context); let footnote_definition_matcher = bind_context!(element(true), &initial_context);
let (remaining, first_footnote_definition) = let (remaining, first_footnote_definition) =
footnote_definition_matcher(input).expect("Parse first footnote_definition"); footnote_definition_matcher(input).expect("Parse first footnote_definition");
assert_eq!(Into::<&str>::into(remaining), "not in the footnote."); assert_eq!(Into::<&str>::into(remaining), "not in the footnote.");

View File

@@ -177,7 +177,7 @@ fn _footnote_definition_end<'b, 'g, 'r, 's>(
if current_depth > 0 { if current_depth > 0 {
// Its impossible for the next character to end the footnote reference definition if we're any amount of brackets deep // Its impossible for the next character to end the footnote reference definition if we're any amount of brackets deep
return Err(nom::Err::Error(CustomError::MyError(MyError( return Err(nom::Err::Error(CustomError::MyError(MyError(
"NoFootnoteReferenceDefinitionEnd".into(), "NoFootnoteReferenceDefinitionEnd",
)))); ))));
} }
if current_depth < 0 { if current_depth < 0 {

View File

@@ -61,10 +61,10 @@ where
let (remaining, (_begin, name)) = tuple(( let (remaining, (_begin, name)) = tuple((
tag_no_case("#+begin_"), tag_no_case("#+begin_"),
verify(name, |name: &OrgSource<'_>| { verify(name, |name: &OrgSource<'_>| {
match Into::<&str>::into(name).to_lowercase().as_str() { !matches!(
"comment" | "example" | "export" | "src" | "verse" => false, Into::<&str>::into(name).to_lowercase().as_str(),
_ => true, "comment" | "example" | "export" | "src" | "verse",
} )
}), }),
))(remaining)?; ))(remaining)?;
let name = Into::<&str>::into(name); let name = Into::<&str>::into(name);
@@ -233,7 +233,7 @@ fn greater_block_body<'c, 'b, 'g, 'r, 's>(
) -> Res<OrgSource<'s>, (&'s str, Vec<Element<'s>>)> { ) -> Res<OrgSource<'s>, (&'s str, Vec<Element<'s>>)> {
if in_section(context, context_name) { if in_section(context, context_name) {
return Err(nom::Err::Error(CustomError::MyError(MyError( return Err(nom::Err::Error(CustomError::MyError(MyError(
"Cannot nest objects of the same element".into(), "Cannot nest objects of the same element",
)))); ))));
} }
let exit_with_name = greater_block_end(name); let exit_with_name = greater_block_end(name);
@@ -288,7 +288,7 @@ fn parameters<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, OrgSource<'s>> {
recognize(many_till(anychar, peek(tuple((space0, line_ending)))))(input) recognize(many_till(anychar, peek(tuple((space0, line_ending)))))(input)
} }
fn greater_block_end<'c>(name: &'c str) -> impl ContextMatcher + 'c { fn greater_block_end(name: &str) -> impl ContextMatcher + '_ {
move |context, input: OrgSource<'_>| _greater_block_end(context, input, name) move |context, input: OrgSource<'_>| _greater_block_end(context, input, name)
} }

View File

@@ -206,11 +206,7 @@ fn headline<'b, 'g, 'r, 's>(
.map(|(_, (_, title))| title) .map(|(_, (_, title))| title)
.unwrap_or(Vec::new()), .unwrap_or(Vec::new()),
tags: maybe_tags tags: maybe_tags
.map(|(_ws, tags)| { .map(|(_ws, tags)| tags.into_iter().map(Into::<&str>::into).collect())
tags.into_iter()
.map(|single_tag| Into::<&str>::into(single_tag))
.collect()
})
.unwrap_or(Vec::new()), .unwrap_or(Vec::new()),
is_footnote_section, is_footnote_section,
}, },
@@ -265,11 +261,8 @@ fn heading_keyword<'b, 'g, 'r, 's>(
.map(String::as_str) .map(String::as_str)
{ {
let result = tag::<_, _, CustomError<_>>(todo_keyword)(input); let result = tag::<_, _, CustomError<_>>(todo_keyword)(input);
match result { if let Ok((remaining, ent)) = result {
Ok((remaining, ent)) => { return Ok((remaining, (TodoKeywordType::Todo, ent)));
return Ok((remaining, (TodoKeywordType::Todo, ent)));
}
Err(_) => {}
} }
} }
for todo_keyword in global_settings for todo_keyword in global_settings
@@ -278,20 +271,17 @@ fn heading_keyword<'b, 'g, 'r, 's>(
.map(String::as_str) .map(String::as_str)
{ {
let result = tag::<_, _, CustomError<_>>(todo_keyword)(input); let result = tag::<_, _, CustomError<_>>(todo_keyword)(input);
match result { if let Ok((remaining, ent)) = result {
Ok((remaining, ent)) => { return Ok((remaining, (TodoKeywordType::Done, ent)));
return Ok((remaining, (TodoKeywordType::Done, ent)));
}
Err(_) => {}
} }
} }
Err(nom::Err::Error(CustomError::MyError(MyError( Err(nom::Err::Error(CustomError::MyError(MyError(
"NoTodoKeyword".into(), "NoTodoKeyword",
)))) ))))
} }
} }
fn priority_cookie<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, PriorityCookie> { fn priority_cookie(input: OrgSource<'_>) -> Res<OrgSource<'_>, PriorityCookie> {
let (remaining, (_, priority_character, _)) = tuple(( let (remaining, (_, priority_character, _)) = tuple((
tag("[#"), tag("[#"),
verify(anychar, |c| c.is_alphanumeric()), verify(anychar, |c| c.is_alphanumeric()),
@@ -299,7 +289,7 @@ fn priority_cookie<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, PriorityCooki
))(input)?; ))(input)?;
let cookie = PriorityCookie::try_from(priority_character).map_err(|_| { let cookie = PriorityCookie::try_from(priority_character).map_err(|_| {
nom::Err::Error(CustomError::MyError(MyError( nom::Err::Error(CustomError::MyError(MyError(
"Failed to cast priority cookie to number.".into(), "Failed to cast priority cookie to number.",
))) )))
})?; })?;
Ok((remaining, cookie)) Ok((remaining, cookie))

View File

@@ -132,7 +132,7 @@ fn _header_end<'b, 'g, 'r, 's>(
if current_depth > 0 { if current_depth > 0 {
// Its impossible for the next character to end the header if we're any amount of bracket deep // Its impossible for the next character to end the header if we're any amount of bracket deep
return Err(nom::Err::Error(CustomError::MyError(MyError( return Err(nom::Err::Error(CustomError::MyError(MyError(
"NoHeaderEnd".into(), "NoHeaderEnd",
)))); ))));
} }
if current_depth < 0 { if current_depth < 0 {
@@ -184,7 +184,7 @@ fn _argument_end<'b, 'g, 'r, 's>(
if current_depth > 0 { if current_depth > 0 {
// Its impossible for the next character to end the argument if we're any amount of parenthesis deep // Its impossible for the next character to end the argument if we're any amount of parenthesis deep
return Err(nom::Err::Error(CustomError::MyError(MyError( return Err(nom::Err::Error(CustomError::MyError(MyError(
"NoArgumentEnd".into(), "NoArgumentEnd",
)))); ))));
} }
if current_depth < 0 { if current_depth < 0 {

View File

@@ -126,7 +126,7 @@ fn _header_end<'b, 'g, 'r, 's>(
if current_depth > 0 { if current_depth > 0 {
// Its impossible for the next character to end the header if we're any amount of bracket deep // Its impossible for the next character to end the header if we're any amount of bracket deep
return Err(nom::Err::Error(CustomError::MyError(MyError( return Err(nom::Err::Error(CustomError::MyError(MyError(
"NoHeaderEnd".into(), "NoHeaderEnd",
)))); ))));
} }
if current_depth < 0 { if current_depth < 0 {
@@ -187,9 +187,7 @@ fn _body_end<'b, 'g, 'r, 's>(
let current_depth = input.get_brace_depth() - starting_brace_depth; let current_depth = input.get_brace_depth() - starting_brace_depth;
if current_depth > 0 { if current_depth > 0 {
// Its impossible for the next character to end the body if we're any amount of brace deep // Its impossible for the next character to end the body if we're any amount of brace deep
return Err(nom::Err::Error(CustomError::MyError(MyError( return Err(nom::Err::Error(CustomError::MyError(MyError("NoBodyEnd"))));
"NoBodyEnd".into(),
))));
} }
if current_depth < 0 { if current_depth < 0 {
// This shouldn't be possible because if depth is 0 then a closing brace should end the body. // This shouldn't be possible because if depth is 0 then a closing brace should end the body.

View File

@@ -9,6 +9,7 @@ use nom::character::complete::one_of;
use nom::character::complete::space0; use nom::character::complete::space0;
use nom::combinator::consumed; use nom::combinator::consumed;
use nom::combinator::eof; use nom::combinator::eof;
use nom::combinator::map;
use nom::combinator::not; use nom::combinator::not;
use nom::combinator::peek; use nom::combinator::peek;
use nom::combinator::recognize; use nom::combinator::recognize;
@@ -21,24 +22,19 @@ use super::org_source::BracketDepth;
use super::org_source::OrgSource; use super::org_source::OrgSource;
use super::util::get_consumed; use super::util::get_consumed;
use super::util::maybe_consume_trailing_whitespace_if_not_exiting; use super::util::maybe_consume_trailing_whitespace_if_not_exiting;
use crate::context::Matcher; use crate::context::parser_with_context;
use crate::context::RefContext; use crate::context::RefContext;
use crate::error::CustomError; use crate::error::CustomError;
use crate::error::MyError; use crate::error::MyError;
use crate::error::Res; use crate::error::Res;
use crate::parser::macros::element;
use crate::parser::util::start_of_line; use crate::parser::util::start_of_line;
use crate::types::AffiliatedKeywords; use crate::types::AffiliatedKeywords;
use crate::types::Keyword; use crate::types::Keyword;
const ORG_ELEMENT_AFFILIATED_KEYWORDS: [&'static str; 13] = [ pub(crate) fn filtered_keyword<'s, F: Fn(OrgSource<'s>) -> Res<OrgSource<'s>, OrgSource<'s>>>(
"caption", "data", "headers", "header", "label", "name", "plot", "resname", "results",
"result", "source", "srcname", "tblname",
];
const ORG_ELEMENT_DUAL_KEYWORDS: [&'static str; 2] = ["caption", "results"];
pub(crate) fn filtered_keyword<F: Matcher>(
key_parser: F, key_parser: F,
) -> impl for<'s> Fn(OrgSource<'s>) -> Res<OrgSource<'s>, Keyword<'s>> { ) -> impl Fn(OrgSource<'s>) -> Res<OrgSource<'s>, Keyword<'s>> {
move |input| _filtered_keyword(&key_parser, input) move |input| _filtered_keyword(&key_parser, input)
} }
@@ -46,7 +42,7 @@ pub(crate) fn filtered_keyword<F: Matcher>(
feature = "tracing", feature = "tracing",
tracing::instrument(ret, level = "debug", skip(key_parser)) tracing::instrument(ret, level = "debug", skip(key_parser))
)] )]
fn _filtered_keyword<'s, F: Matcher>( fn _filtered_keyword<'s, F: Fn(OrgSource<'s>) -> Res<OrgSource<'s>, OrgSource<'s>>>(
key_parser: F, key_parser: F,
input: OrgSource<'s>, input: OrgSource<'s>,
) -> Res<OrgSource<'s>, Keyword<'s>> { ) -> Res<OrgSource<'s>, Keyword<'s>> {
@@ -54,24 +50,21 @@ fn _filtered_keyword<'s, F: Matcher>(
// TODO: When key is a member of org-element-parsed-keywords, value can contain the standard set objects, excluding footnote references. // TODO: When key is a member of org-element-parsed-keywords, value can contain the standard set objects, excluding footnote references.
let (remaining, (consumed_input, (_, _, parsed_key, _))) = let (remaining, (consumed_input, (_, _, parsed_key, _))) =
consumed(tuple((space0, tag("#+"), key_parser, tag(":"))))(input)?; consumed(tuple((space0, tag("#+"), key_parser, tag(":"))))(input)?;
match tuple(( if let Ok((remaining, _)) = tuple((
space0::<OrgSource<'_>, CustomError<OrgSource<'_>>>, space0::<OrgSource<'_>, CustomError<OrgSource<'_>>>,
alt((line_ending, eof)), alt((line_ending, eof)),
))(remaining) ))(remaining)
{ {
Ok((remaining, _)) => { return Ok((
return Ok(( remaining,
remaining, Keyword {
Keyword { source: consumed_input.into(),
source: consumed_input.into(), affiliated_keywords: AffiliatedKeywords::default(), // To be populated by the caller if this keyword is in a context to support affiliated keywords.
affiliated_keywords: AffiliatedKeywords::default(), // To be populated by the caller if this keyword is in a context to support affiliated keywords. key: parsed_key.into(),
key: parsed_key.into(), value: "",
value: "".into(), },
}, ));
)); }
}
Err(_) => {}
};
let (remaining, _ws) = space0(remaining)?; let (remaining, _ws) = space0(remaining)?;
let (remaining, parsed_value) = recognize(many_till( let (remaining, parsed_value) = recognize(many_till(
anychar, anychar,
@@ -113,8 +106,11 @@ where
} }
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))] #[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
pub(crate) fn affiliated_keyword<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, Keyword<'s>> { pub(crate) fn affiliated_keyword<'b, 'g, 'r, 's>(
filtered_keyword(affiliated_key)(input) context: RefContext<'b, 'g, 'r, 's>,
input: OrgSource<'s>,
) -> Res<OrgSource<'s>, Keyword<'s>> {
filtered_keyword(parser_with_context!(affiliated_key)(context))(input)
} }
#[cfg_attr( #[cfg_attr(
@@ -149,45 +145,61 @@ fn regular_keyword_key<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, OrgSource
} }
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))] #[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
fn affiliated_key<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, OrgSource<'s>> { fn affiliated_key<'b, 'g, 'r, 's>(
alt(( context: RefContext<'b, 'g, 'r, 's>,
recognize(tuple((dual_affiliated_key, tag("["), optval, tag("]")))), input: OrgSource<'s>,
plain_affiliated_key, ) -> Res<OrgSource<'s>, OrgSource<'s>> {
export_keyword, element!(dual_affiliated_key, context, input);
))(input) element!(plain_affiliated_key, context, input);
} element!(export_keyword, input);
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
fn plain_affiliated_key<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, OrgSource<'s>> {
for keyword in ORG_ELEMENT_AFFILIATED_KEYWORDS {
let result = tag_no_case::<_, _, CustomError<_>>(keyword)(input);
match result {
Ok((remaining, ent)) => {
return Ok((remaining, ent));
}
Err(_) => {}
}
}
Err(nom::Err::Error(CustomError::MyError(MyError( Err(nom::Err::Error(CustomError::MyError(MyError(
"NoKeywordKey".into(), "No affiliated key.",
)))) ))))
} }
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))] #[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
fn dual_affiliated_key<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, OrgSource<'s>> { fn plain_affiliated_key<'b, 'g, 'r, 's>(
for keyword in ORG_ELEMENT_DUAL_KEYWORDS { context: RefContext<'b, 'g, 'r, 's>,
let result = tag_no_case::<_, _, CustomError<_>>(keyword)(input); input: OrgSource<'s>,
match result { ) -> Res<OrgSource<'s>, OrgSource<'s>> {
Ok((remaining, ent)) => { for keyword in context.get_global_settings().element_affiliated_keywords {
return Ok((remaining, ent)); let result = map(
} tuple((
Err(_) => {} tag_no_case::<_, _, CustomError<_>>(*keyword),
peek(tag(":")),
)),
|(key, _)| key,
)(input);
if let Ok((remaining, ent)) = result {
return Ok((remaining, ent));
} }
} }
Err(nom::Err::Error(CustomError::MyError(MyError( Err(nom::Err::Error(CustomError::MyError(MyError(
"NoKeywordKey".into(), "NoKeywordKey",
))))
}
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
fn dual_affiliated_key<'b, 'g, 'r, 's>(
context: RefContext<'b, 'g, 'r, 's>,
input: OrgSource<'s>,
) -> Res<OrgSource<'s>, OrgSource<'s>> {
for keyword in context.get_global_settings().element_dual_keywords {
let result = recognize(tuple((
tag_no_case::<_, _, CustomError<_>>(*keyword),
tag("["),
optval,
tag("]"),
peek(tag(":")),
)))(input);
if let Ok((remaining, ent)) = result {
return Ok((remaining, ent));
}
}
Err(nom::Err::Error(CustomError::MyError(MyError(
"NoKeywordKey",
)))) ))))
} }
@@ -231,3 +243,25 @@ fn export_keyword<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, OrgSource<'s>>
take_while1(|c: char| c.is_alphanumeric() || "-_".contains(c)), take_while1(|c: char| c.is_alphanumeric() || "-_".contains(c)),
)))(input) )))(input)
} }
#[cfg(test)]
mod tests {
use test::Bencher;
use super::*;
use crate::context::Context;
use crate::context::ContextElement;
use crate::context::GlobalSettings;
use crate::context::List;
use crate::parser::OrgSource;
#[bench]
fn bench_affiliated_keyword(b: &mut Bencher) {
let input = OrgSource::new("#+CAPTION[*foo*]: bar *baz*");
let global_settings = GlobalSettings::default();
let initial_context = ContextElement::document_context();
let initial_context = Context::new(&global_settings, List::new(&initial_context));
b.iter(|| assert!(affiliated_keyword(&initial_context, input).is_ok()));
}
}

View File

@@ -19,7 +19,7 @@ use crate::error::Res;
/// Parses the text in the value of a #+TODO keyword. /// Parses the text in the value of a #+TODO keyword.
/// ///
/// Example input: "foo bar baz | lorem ipsum" /// Example input: "foo bar baz | lorem ipsum"
pub(crate) fn todo_keywords<'s>(input: &'s str) -> Res<&'s str, (Vec<&'s str>, Vec<&'s str>)> { pub(crate) fn todo_keywords(input: &str) -> Res<&str, (Vec<&str>, Vec<&str>)> {
let (remaining, mut before_pipe_words) = separated_list0(space1, todo_keyword_word)(input)?; let (remaining, mut before_pipe_words) = separated_list0(space1, todo_keyword_word)(input)?;
let (remaining, after_pipe_words) = opt(tuple(( let (remaining, after_pipe_words) = opt(tuple((
tuple((space0, tag("|"), space0)), tuple((space0, tag("|"), space0)),
@@ -30,12 +30,9 @@ pub(crate) fn todo_keywords<'s>(input: &'s str) -> Res<&'s str, (Vec<&'s str>, V
Ok((remaining, (before_pipe_words, after_pipe_words))) Ok((remaining, (before_pipe_words, after_pipe_words)))
} else if !before_pipe_words.is_empty() { } else if !before_pipe_words.is_empty() {
// If there was no pipe, then the last word becomes a completion state instead. // If there was no pipe, then the last word becomes a completion state instead.
let mut after_pipe_words = Vec::with_capacity(1); let after_pipe_words = vec![before_pipe_words
after_pipe_words.push( .pop()
before_pipe_words .expect("If-statement proves this is Some.")];
.pop()
.expect("If-statement proves this is Some."),
);
Ok((remaining, (before_pipe_words, after_pipe_words))) Ok((remaining, (before_pipe_words, after_pipe_words)))
} else { } else {
// No words founds // No words founds
@@ -43,7 +40,7 @@ pub(crate) fn todo_keywords<'s>(input: &'s str) -> Res<&'s str, (Vec<&'s str>, V
} }
} }
fn todo_keyword_word<'s>(input: &'s str) -> Res<&'s str, &'s str> { fn todo_keyword_word(input: &str) -> Res<&str, &str> {
let (remaining, keyword) = verify(take_till(|c| "( \t\r\n|".contains(c)), |result: &str| { let (remaining, keyword) = verify(take_till(|c| "( \t\r\n|".contains(c)), |result: &str| {
!result.is_empty() !result.is_empty()
})(input)?; })(input)?;

View File

@@ -43,13 +43,8 @@ where
let value_start = remaining; let value_start = remaining;
start_of_line(remaining)?; start_of_line(remaining)?;
let (remaining, _leading_whitespace) = space0(remaining)?; let (remaining, _leading_whitespace) = space0(remaining)?;
let (remaining, (_opening, name, _open_close_brace, _ws, _line_ending)) = tuple(( let (remaining, (_opening, name, _open_close_brace, _ws, _line_ending)) =
tag_no_case(r#"\begin{"#), tuple((tag_no_case(r"\begin{"), name, tag("}"), space0, line_ending))(remaining)?;
name,
tag("}"),
space0,
line_ending,
))(remaining)?;
let latex_environment_end_specialized = latex_environment_end(name.into()); let latex_environment_end_specialized = latex_environment_end(name.into());
let parser_context = ContextElement::ExitMatcherNode(ExitMatcherNode { let parser_context = ContextElement::ExitMatcherNode(ExitMatcherNode {
@@ -127,7 +122,7 @@ fn _latex_environment_end<'b, 'g, 'r, 's, 'c>(
start_of_line(input)?; start_of_line(input)?;
let (remaining, _leading_whitespace) = space0(input)?; let (remaining, _leading_whitespace) = space0(input)?;
let (remaining, (_begin, _name, _close_brace, _ws, _line_ending)) = tuple(( let (remaining, (_begin, _name, _close_brace, _ws, _line_ending)) = tuple((
tag_no_case(r#"\end{"#), tag_no_case(r"\end{"),
tag_no_case(current_name_lower), tag_no_case(current_name_lower),
tag("}"), tag("}"),
space0, space0,

View File

@@ -210,7 +210,7 @@ fn pre<'b, 'g, 'r, 's>(
let preceding_character = input.get_preceding_character(); let preceding_character = input.get_preceding_character();
if let Some('$') = preceding_character { if let Some('$') = preceding_character {
return Err(nom::Err::Error(CustomError::MyError(MyError( return Err(nom::Err::Error(CustomError::MyError(MyError(
"Not a valid pre character for dollar char fragment.".into(), "Not a valid pre character for dollar char fragment.",
)))); ))));
} }
Ok((input, ())) Ok((input, ()))
@@ -284,7 +284,7 @@ fn close_border<'b, 'g, 'r, 's>(
Some(c) if !c.is_whitespace() && !".,;$".contains(c) => Ok((input, ())), Some(c) if !c.is_whitespace() && !".,;$".contains(c) => Ok((input, ())),
_ => { _ => {
return Err(nom::Err::Error(CustomError::MyError(MyError( return Err(nom::Err::Error(CustomError::MyError(MyError(
"Not a valid pre character for dollar char fragment.".into(), "Not a valid pre character for dollar char fragment.",
)))); ))));
} }
} }

View File

@@ -76,10 +76,7 @@ where
let parser_context = context.with_additional_node(&contexts[0]); let parser_context = context.with_additional_node(&contexts[0]);
let parser_context = parser_context.with_additional_node(&contexts[1]); let parser_context = parser_context.with_additional_node(&contexts[1]);
let parser_context = parser_context.with_additional_node(&contexts[2]); let parser_context = parser_context.with_additional_node(&contexts[2]);
let parameters = match parameters { let parameters = parameters.map(|(_ws, parameters)| parameters);
Some((_ws, parameters)) => Some(parameters),
None => None,
};
let object_matcher = parser_with_context!(standard_set_object)(&parser_context); let object_matcher = parser_with_context!(standard_set_object)(&parser_context);
let exit_matcher = parser_with_context!(exit_matcher_parser)(&parser_context); let exit_matcher = parser_with_context!(exit_matcher_parser)(&parser_context);
@@ -110,7 +107,7 @@ where
context.get_global_settings(), context.get_global_settings(),
affiliated_keywords, affiliated_keywords,
), ),
data: parameters.map(|parameters| Into::<&str>::into(parameters)), data: parameters.map(Into::<&str>::into),
children, children,
}, },
)) ))
@@ -295,7 +292,7 @@ where
affiliated_keywords, affiliated_keywords,
), ),
export_type: export_type.map(Into::<&str>::into), export_type: export_type.map(Into::<&str>::into),
data: parameters.map(|parameters| Into::<&str>::into(parameters)), data: parameters.map(Into::<&str>::into),
contents, contents,
}, },
)) ))
@@ -344,7 +341,7 @@ where
let (switches, number_lines, preserve_indent, retain_labels, use_labels, label_format) = { let (switches, number_lines, preserve_indent, retain_labels, use_labels, label_format) = {
if let Some(switches) = switches { if let Some(switches) = switches {
( (
if switches.source.len() == 0 { if switches.source.is_empty() {
None None
} else { } else {
Some(switches.source) Some(switches.source)
@@ -390,7 +387,7 @@ fn data<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, OrgSource<'s>> {
is_not("\r\n")(input) is_not("\r\n")(input)
} }
fn lesser_block_end<'c>(current_name: &'c str) -> impl ContextMatcher + 'c { fn lesser_block_end(current_name: &str) -> impl ContextMatcher + '_ {
// Since the lesser block names are statically defined in code, we can simply assert that the name is lowercase instead of causing an allocation by converting to lowercase. // Since the lesser block names are statically defined in code, we can simply assert that the name is lowercase instead of causing an allocation by converting to lowercase.
debug_assert!(current_name == current_name.to_lowercase()); debug_assert!(current_name == current_name.to_lowercase());
move |context, input: OrgSource<'_>| _lesser_block_end(context, input, current_name) move |context, input: OrgSource<'_>| _lesser_block_end(context, input, current_name)
@@ -420,7 +417,7 @@ fn _lesser_block_end<'b, 'g, 'r, 's, 'c>(
/// Parser for the beginning of a lesser block /// Parser for the beginning of a lesser block
/// ///
/// current_name MUST be lowercase. We do not do the conversion ourselves because it is not allowed in a const fn. /// current_name MUST be lowercase. We do not do the conversion ourselves because it is not allowed in a const fn.
const fn lesser_block_begin<'c>(current_name: &'c str) -> impl ContextMatcher + 'c { const fn lesser_block_begin(current_name: &str) -> impl ContextMatcher + '_ {
// TODO: Since this is a const fn, is there ANY way to "generate" functions at compile time? // TODO: Since this is a const fn, is there ANY way to "generate" functions at compile time?
move |context, input: OrgSource<'_>| _lesser_block_begin(context, input, current_name) move |context, input: OrgSource<'_>| _lesser_block_begin(context, input, current_name)
} }
@@ -531,11 +528,8 @@ fn _example_src_switches<'s>(
(SwitchState::Normal, "-r") => { (SwitchState::Normal, "-r") => {
saw_r = true; saw_r = true;
use_labels = false; use_labels = false;
match retain_labels { if let RetainLabels::Yes = retain_labels {
RetainLabels::Yes => { retain_labels = RetainLabels::No;
retain_labels = RetainLabels::No;
}
_ => {}
} }
} }
(SwitchState::Normal, "-l") => { (SwitchState::Normal, "-l") => {
@@ -675,7 +669,9 @@ pub(crate) fn content<'b, 'g, 'r, 's>(
} }
let (remain, (pre_escape_whitespace, line)) = content_line(remaining)?; let (remain, (pre_escape_whitespace, line)) = content_line(remaining)?;
pre_escape_whitespace.map(|val| ret.push_str(Into::<&str>::into(val))); if let Some(val) = pre_escape_whitespace {
ret.push_str(Into::<&str>::into(val));
}
ret.push_str(line.into()); ret.push_str(line.into());
remaining = remain; remaining = remain;
} }

View File

@@ -21,7 +21,7 @@ pub(crate) fn line_break<'b, 'g, 'r, 's>(
input: OrgSource<'s>, input: OrgSource<'s>,
) -> Res<OrgSource<'s>, LineBreak<'s>> { ) -> Res<OrgSource<'s>, LineBreak<'s>> {
let (remaining, _) = pre(context, input)?; let (remaining, _) = pre(context, input)?;
let (remaining, _) = tag(r#"\\"#)(remaining)?; let (remaining, _) = tag(r"\\")(remaining)?;
let (remaining, _) = recognize(many0(one_of(" \t")))(remaining)?; let (remaining, _) = recognize(many0(one_of(" \t")))(remaining)?;
let (remaining, _) = line_ending(remaining)?; let (remaining, _) = line_ending(remaining)?;
let source = get_consumed(input, remaining); let source = get_consumed(input, remaining);
@@ -46,7 +46,7 @@ fn pre<'b, 'g, 'r, 's>(
// If None, we are at the start of the file // If None, we are at the start of the file
None | Some('\\') => { None | Some('\\') => {
return Err(nom::Err::Error(CustomError::MyError(MyError( return Err(nom::Err::Error(CustomError::MyError(MyError(
"Not a valid pre character for line break.".into(), "Not a valid pre character for line break.",
)))); ))));
} }
_ => {} _ => {}
@@ -56,7 +56,7 @@ fn pre<'b, 'g, 'r, 's>(
let is_non_empty_line = current_line.chars().any(|c| !c.is_whitespace()); let is_non_empty_line = current_line.chars().any(|c| !c.is_whitespace());
if !is_non_empty_line { if !is_non_empty_line {
return Err(nom::Err::Error(CustomError::MyError(MyError( return Err(nom::Err::Error(CustomError::MyError(MyError(
"Not a valid pre line for line break.".into(), "Not a valid pre line for line break.",
)))); ))));
} }

View File

@@ -1,6 +1,6 @@
/// Parse an element that has affiliated keywords. /// Parse an element that has affiliated keywords.
macro_rules! ak_element { macro_rules! ak_element {
($parser:ident, $affiliated_keywords:expr, $post_affiliated_keywords_input: expr, $context: expr, $input: expr, $wrapper: expr) => { ($parser:expr, $affiliated_keywords:expr, $post_affiliated_keywords_input: expr, $context: expr, $input: expr, $wrapper: expr) => {
if let Ok((remaining, ele)) = $parser( if let Ok((remaining, ele)) = $parser(
$affiliated_keywords, $affiliated_keywords,
$post_affiliated_keywords_input, $post_affiliated_keywords_input,
@@ -10,7 +10,7 @@ macro_rules! ak_element {
return Ok((remaining, $wrapper(ele))); return Ok((remaining, $wrapper(ele)));
} }
}; };
($parser:ident, $affiliated_keywords:expr, $post_affiliated_keywords_input: expr, $context: expr, $input: expr) => { ($parser:expr, $affiliated_keywords:expr, $post_affiliated_keywords_input: expr, $context: expr, $input: expr) => {
if let Ok((remaining, ele)) = $parser( if let Ok((remaining, ele)) = $parser(
$affiliated_keywords, $affiliated_keywords,
$post_affiliated_keywords_input, $post_affiliated_keywords_input,
@@ -25,16 +25,21 @@ macro_rules! ak_element {
pub(crate) use ak_element; pub(crate) use ak_element;
macro_rules! element { macro_rules! element {
($parser:ident, $context: expr, $input: expr, $wrapper: expr) => { ($parser:expr, $context: expr, $input: expr, $wrapper: expr) => {
if let Ok((remaining, ele)) = $parser($context, $input) { if let Ok((remaining, ele)) = $parser($context, $input) {
return Ok((remaining, $wrapper(ele))); return Ok((remaining, $wrapper(ele)));
} }
}; };
($parser:ident, $context: expr, $input: expr) => { ($parser:expr, $context: expr, $input: expr) => {
if let Ok((remaining, ele)) = $parser($context, $input) { if let Ok((remaining, ele)) = $parser($context, $input) {
return Ok((remaining, ele)); return Ok((remaining, ele));
} }
}; };
($parser:expr, $input: expr) => {
if let Ok((remaining, ele)) = $parser($input) {
return Ok((remaining, ele));
}
};
} }
pub(crate) use element; pub(crate) use element;

View File

@@ -1,11 +1,7 @@
use nom::branch::alt;
use nom::combinator::map;
use super::org_source::OrgSource; use super::org_source::OrgSource;
use super::plain_text::plain_text; use super::plain_text::plain_text;
use super::regular_link::regular_link; use super::regular_link::regular_link;
use super::subscript_and_superscript::detect_subscript_or_superscript; use super::subscript_and_superscript::detect_subscript_or_superscript;
use crate::context::parser_with_context;
use crate::context::RefContext; use crate::context::RefContext;
use crate::error::CustomError; use crate::error::CustomError;
use crate::error::MyError; use crate::error::MyError;
@@ -19,6 +15,7 @@ use crate::parser::inline_babel_call::inline_babel_call;
use crate::parser::inline_source_block::inline_source_block; use crate::parser::inline_source_block::inline_source_block;
use crate::parser::latex_fragment::latex_fragment; use crate::parser::latex_fragment::latex_fragment;
use crate::parser::line_break::line_break; use crate::parser::line_break::line_break;
use crate::parser::macros::element;
use crate::parser::org_macro::org_macro; use crate::parser::org_macro::org_macro;
use crate::parser::plain_link::plain_link; use crate::parser::plain_link::plain_link;
use crate::parser::radio_link::radio_link; use crate::parser::radio_link::radio_link;
@@ -39,14 +36,14 @@ pub(crate) fn standard_set_object<'b, 'g, 'r, 's>(
context: RefContext<'b, 'g, 'r, 's>, context: RefContext<'b, 'g, 'r, 's>,
input: OrgSource<'s>, input: OrgSource<'s>,
) -> Res<OrgSource<'s>, Object<'s>> { ) -> Res<OrgSource<'s>, Object<'s>> {
let (remaining, object) = alt(( element!(standard_set_object_sans_plain_text, context, input);
parser_with_context!(standard_set_object_sans_plain_text)(context), element!(
map( plain_text(detect_standard_set_object_sans_plain_text),
parser_with_context!(plain_text(detect_standard_set_object_sans_plain_text))(context), context,
Object::PlainText, input,
), Object::PlainText
))(input)?; );
Ok((remaining, object)) Err(nom::Err::Error(CustomError::MyError(MyError("No object."))))
} }
#[cfg_attr( #[cfg_attr(
@@ -57,14 +54,14 @@ pub(crate) fn minimal_set_object<'b, 'g, 'r, 's>(
context: RefContext<'b, 'g, 'r, 's>, context: RefContext<'b, 'g, 'r, 's>,
input: OrgSource<'s>, input: OrgSource<'s>,
) -> Res<OrgSource<'s>, Object<'s>> { ) -> Res<OrgSource<'s>, Object<'s>> {
let (remaining, object) = alt(( element!(minimal_set_object_sans_plain_text, context, input);
parser_with_context!(minimal_set_object_sans_plain_text)(context), element!(
map( plain_text(detect_minimal_set_object_sans_plain_text),
parser_with_context!(plain_text(detect_minimal_set_object_sans_plain_text))(context), context,
Object::PlainText, input,
), Object::PlainText
))(input)?; );
Ok((remaining, object)) Err(nom::Err::Error(CustomError::MyError(MyError("No object."))))
} }
#[cfg_attr( #[cfg_attr(
@@ -75,56 +72,38 @@ fn standard_set_object_sans_plain_text<'b, 'g, 'r, 's>(
context: RefContext<'b, 'g, 'r, 's>, context: RefContext<'b, 'g, 'r, 's>,
input: OrgSource<'s>, input: OrgSource<'s>,
) -> Res<OrgSource<'s>, Object<'s>> { ) -> Res<OrgSource<'s>, Object<'s>> {
let (remaining, object) = alt(( element!(timestamp, context, input, Object::Timestamp);
map(parser_with_context!(timestamp)(context), Object::Timestamp), element!(subscript, context, input, Object::Subscript);
map(parser_with_context!(subscript)(context), Object::Subscript), element!(superscript, context, input, Object::Superscript);
map( element!(statistics_cookie, context, input, Object::StatisticsCookie);
parser_with_context!(superscript)(context), element!(target, context, input, Object::Target);
Object::Superscript, element!(line_break, context, input, Object::LineBreak);
), element!(
map( inline_source_block,
parser_with_context!(statistics_cookie)(context), context,
Object::StatisticsCookie, input,
), Object::InlineSourceBlock
map(parser_with_context!(target)(context), Object::Target), );
map(parser_with_context!(line_break)(context), Object::LineBreak), element!(inline_babel_call, context, input, Object::InlineBabelCall);
map( element!(citation, context, input, Object::Citation);
parser_with_context!(inline_source_block)(context), element!(
Object::InlineSourceBlock, footnote_reference,
), context,
map( input,
parser_with_context!(inline_babel_call)(context), Object::FootnoteReference
Object::InlineBabelCall, );
), element!(export_snippet, context, input, Object::ExportSnippet);
map(parser_with_context!(citation)(context), Object::Citation), element!(entity, context, input, Object::Entity);
map( element!(latex_fragment, context, input, Object::LatexFragment);
parser_with_context!(footnote_reference)(context), element!(radio_link, context, input, Object::RadioLink);
Object::FootnoteReference, element!(radio_target, context, input, Object::RadioTarget);
), element!(text_markup, context, input);
map( element!(regular_link, context, input, Object::RegularLink);
parser_with_context!(export_snippet)(context), element!(plain_link, context, input, Object::PlainLink);
Object::ExportSnippet, element!(angle_link, context, input, Object::AngleLink);
), element!(org_macro, context, input, Object::OrgMacro);
map(parser_with_context!(entity)(context), Object::Entity),
map( Err(nom::Err::Error(CustomError::MyError(MyError("No object."))))
parser_with_context!(latex_fragment)(context),
Object::LatexFragment,
),
map(parser_with_context!(radio_link)(context), Object::RadioLink),
map(
parser_with_context!(radio_target)(context),
Object::RadioTarget,
),
parser_with_context!(text_markup)(context),
map(
parser_with_context!(regular_link)(context),
Object::RegularLink,
),
map(parser_with_context!(plain_link)(context), Object::PlainLink),
map(parser_with_context!(angle_link)(context), Object::AngleLink),
map(parser_with_context!(org_macro)(context), Object::OrgMacro),
))(input)?;
Ok((remaining, object))
} }
#[cfg_attr( #[cfg_attr(
@@ -135,20 +114,12 @@ fn minimal_set_object_sans_plain_text<'b, 'g, 'r, 's>(
context: RefContext<'b, 'g, 'r, 's>, context: RefContext<'b, 'g, 'r, 's>,
input: OrgSource<'s>, input: OrgSource<'s>,
) -> Res<OrgSource<'s>, Object<'s>> { ) -> Res<OrgSource<'s>, Object<'s>> {
let (remaining, object) = alt(( element!(subscript, context, input, Object::Subscript);
map(parser_with_context!(subscript)(context), Object::Subscript), element!(superscript, context, input, Object::Superscript);
map( element!(entity, context, input, Object::Entity);
parser_with_context!(superscript)(context), element!(latex_fragment, context, input, Object::LatexFragment);
Object::Superscript, element!(text_markup, context, input);
), Err(nom::Err::Error(CustomError::MyError(MyError("No object."))))
map(parser_with_context!(entity)(context), Object::Entity),
map(
parser_with_context!(latex_fragment)(context),
Object::LatexFragment,
),
parser_with_context!(text_markup)(context),
))(input)?;
Ok((remaining, object))
} }
#[cfg_attr( #[cfg_attr(
@@ -167,7 +138,7 @@ pub(crate) fn detect_standard_set_object_sans_plain_text<'b, 'g, 'r, 's>(
} }
return Err(nom::Err::Error(CustomError::MyError(MyError( return Err(nom::Err::Error(CustomError::MyError(MyError(
"No object detected.".into(), "No object detected.",
)))); ))));
} }
@@ -187,7 +158,7 @@ fn detect_minimal_set_object_sans_plain_text<'b, 'g, 'r, 's>(
} }
return Err(nom::Err::Error(CustomError::MyError(MyError( return Err(nom::Err::Error(CustomError::MyError(MyError(
"No object detected.".into(), "No object detected.",
)))); ))));
} }
@@ -200,16 +171,18 @@ pub(crate) fn regular_link_description_set_object<'b, 'g, 'r, 's>(
input: OrgSource<'s>, input: OrgSource<'s>,
) -> Res<OrgSource<'s>, Object<'s>> { ) -> Res<OrgSource<'s>, Object<'s>> {
// TODO: It can also contain another link, but only when it is a plain or angle link. It can contain square brackets, but not ]] // TODO: It can also contain another link, but only when it is a plain or angle link. It can contain square brackets, but not ]]
let (remaining, object) = alt(( element!(
parser_with_context!(regular_link_description_set_object_sans_plain_text)(context), regular_link_description_set_object_sans_plain_text,
map( context,
parser_with_context!(plain_text( input
detect_regular_link_description_set_object_sans_plain_text );
))(context), element!(
Object::PlainText, plain_text(detect_regular_link_description_set_object_sans_plain_text),
), context,
))(input)?; input,
Ok((remaining, object)) Object::PlainText
);
Err(nom::Err::Error(CustomError::MyError(MyError("No object."))))
} }
#[cfg_attr( #[cfg_attr(
@@ -221,27 +194,18 @@ fn regular_link_description_set_object_sans_plain_text<'b, 'g, 'r, 's>(
input: OrgSource<'s>, input: OrgSource<'s>,
) -> Res<OrgSource<'s>, Object<'s>> { ) -> Res<OrgSource<'s>, Object<'s>> {
// TODO: It can also contain another link, but only when it is a plain or angle link. It can contain square brackets, but not ]] // TODO: It can also contain another link, but only when it is a plain or angle link. It can contain square brackets, but not ]]
let (remaining, object) = alt(( element!(export_snippet, context, input, Object::ExportSnippet);
map( element!(statistics_cookie, context, input, Object::StatisticsCookie);
parser_with_context!(export_snippet)(context), element!(
Object::ExportSnippet, inline_source_block,
), context,
map( input,
parser_with_context!(statistics_cookie)(context), Object::InlineSourceBlock
Object::StatisticsCookie, );
), element!(inline_babel_call, context, input, Object::InlineBabelCall);
map( element!(org_macro, context, input, Object::OrgMacro);
parser_with_context!(inline_source_block)(context), element!(minimal_set_object_sans_plain_text, context, input);
Object::InlineSourceBlock, Err(nom::Err::Error(CustomError::MyError(MyError("No object."))))
),
map(
parser_with_context!(inline_babel_call)(context),
Object::InlineBabelCall,
),
map(parser_with_context!(org_macro)(context), Object::OrgMacro),
parser_with_context!(minimal_set_object_sans_plain_text)(context),
))(input)?;
Ok((remaining, object))
} }
#[cfg_attr( #[cfg_attr(
@@ -259,9 +223,9 @@ fn detect_regular_link_description_set_object_sans_plain_text<'b, 'g, 'r, 's>(
return Ok((input, ())); return Ok((input, ()));
} }
return Err(nom::Err::Error(CustomError::MyError(MyError( Err(nom::Err::Error(CustomError::MyError(MyError(
"No object detected.".into(), "No object detected.",
)))); ))))
} }
#[cfg_attr( #[cfg_attr(
@@ -272,14 +236,14 @@ pub(crate) fn table_cell_set_object<'b, 'g, 'r, 's>(
context: RefContext<'b, 'g, 'r, 's>, context: RefContext<'b, 'g, 'r, 's>,
input: OrgSource<'s>, input: OrgSource<'s>,
) -> Res<OrgSource<'s>, Object<'s>> { ) -> Res<OrgSource<'s>, Object<'s>> {
let (remaining, object) = alt(( element!(table_cell_set_object_sans_plain_text, context, input);
parser_with_context!(table_cell_set_object_sans_plain_text)(context), element!(
map( plain_text(detect_table_cell_set_object_sans_plain_text),
parser_with_context!(plain_text(detect_table_cell_set_object_sans_plain_text))(context), context,
Object::PlainText, input,
), Object::PlainText
))(input)?; );
Ok((remaining, object)) Err(nom::Err::Error(CustomError::MyError(MyError("No object."))))
} }
#[cfg_attr( #[cfg_attr(
@@ -290,33 +254,24 @@ fn table_cell_set_object_sans_plain_text<'b, 'g, 'r, 's>(
context: RefContext<'b, 'g, 'r, 's>, context: RefContext<'b, 'g, 'r, 's>,
input: OrgSource<'s>, input: OrgSource<'s>,
) -> Res<OrgSource<'s>, Object<'s>> { ) -> Res<OrgSource<'s>, Object<'s>> {
let (remaining, object) = alt(( element!(citation, context, input, Object::Citation);
map(parser_with_context!(citation)(context), Object::Citation), element!(export_snippet, context, input, Object::ExportSnippet);
map( element!(
parser_with_context!(export_snippet)(context), footnote_reference,
Object::ExportSnippet, context,
), input,
map( Object::FootnoteReference
parser_with_context!(footnote_reference)(context), );
Object::FootnoteReference, element!(radio_link, context, input, Object::RadioLink);
), element!(regular_link, context, input, Object::RegularLink);
map(parser_with_context!(radio_link)(context), Object::RadioLink), element!(plain_link, context, input, Object::PlainLink);
map( element!(angle_link, context, input, Object::AngleLink);
parser_with_context!(regular_link)(context), element!(org_macro, context, input, Object::OrgMacro);
Object::RegularLink, element!(radio_target, context, input, Object::RadioTarget);
), element!(target, context, input, Object::Target);
map(parser_with_context!(plain_link)(context), Object::PlainLink), element!(timestamp, context, input, Object::Timestamp);
map(parser_with_context!(angle_link)(context), Object::AngleLink), element!(minimal_set_object_sans_plain_text, context, input);
map(parser_with_context!(org_macro)(context), Object::OrgMacro), Err(nom::Err::Error(CustomError::MyError(MyError("No object."))))
map(
parser_with_context!(radio_target)(context),
Object::RadioTarget,
),
map(parser_with_context!(target)(context), Object::Target),
map(parser_with_context!(timestamp)(context), Object::Timestamp),
parser_with_context!(minimal_set_object_sans_plain_text)(context),
))(input)?;
Ok((remaining, object))
} }
#[cfg_attr( #[cfg_attr(
@@ -335,6 +290,6 @@ fn detect_table_cell_set_object_sans_plain_text<'b, 'g, 'r, 's>(
} }
return Err(nom::Err::Error(CustomError::MyError(MyError( return Err(nom::Err::Error(CustomError::MyError(MyError(
"No object detected.".into(), "No object detected.",
)))); ))));
} }

View File

@@ -219,7 +219,7 @@ where
panic!("Attempted to extend past the end of the WrappedInput.") panic!("Attempted to extend past the end of the WrappedInput.")
} }
if new_start == self.start && new_end == self.end { if new_start == self.start && new_end == self.end {
return self.clone(); return *self;
} }
let skipped_text = &self.full_source[self.start..new_start]; let skipped_text = &self.full_source[self.start..new_start];
@@ -337,7 +337,7 @@ impl<'s> InputTakeAtPosition for OrgSource<'s> {
P: Fn(Self::Item) -> bool, P: Fn(Self::Item) -> bool,
{ {
match Into::<&str>::into(self).position(predicate) { match Into::<&str>::into(self).position(predicate) {
Some(0) => Err(nom::Err::Error(E::from_error_kind(self.clone(), e))), Some(0) => Err(nom::Err::Error(E::from_error_kind(*self, e))),
Some(idx) => Ok(self.take_split(idx)), Some(idx) => Ok(self.take_split(idx)),
None => Err(nom::Err::Incomplete(nom::Needed::new(1))), None => Err(nom::Err::Incomplete(nom::Needed::new(1))),
} }
@@ -366,11 +366,11 @@ impl<'s> InputTakeAtPosition for OrgSource<'s> {
{ {
let window = Into::<&str>::into(self); let window = Into::<&str>::into(self);
match window.position(predicate) { match window.position(predicate) {
Some(0) => Err(nom::Err::Error(E::from_error_kind(self.clone(), e))), Some(0) => Err(nom::Err::Error(E::from_error_kind(*self, e))),
Some(n) => Ok(self.take_split(n)), Some(n) => Ok(self.take_split(n)),
None => { None => {
if window.input_len() == 0 { if window.input_len() == 0 {
Err(nom::Err::Error(E::from_error_kind(self.clone(), e))) Err(nom::Err::Error(E::from_error_kind(*self, e)))
} else { } else {
Ok(self.take_split(self.input_len())) Ok(self.take_split(self.input_len()))
} }
@@ -398,7 +398,7 @@ pub(crate) fn convert_error<'a, I: Into<CustomError<&'a str>>>(
impl<'s> From<CustomError<OrgSource<'s>>> for CustomError<&'s str> { impl<'s> From<CustomError<OrgSource<'s>>> for CustomError<&'s str> {
fn from(value: CustomError<OrgSource<'s>>) -> Self { fn from(value: CustomError<OrgSource<'s>>) -> Self {
match value { match value {
CustomError::MyError(err) => CustomError::MyError(err.into()), CustomError::MyError(err) => CustomError::MyError(err),
CustomError::Nom(input, error_kind) => CustomError::Nom(input.into(), error_kind), CustomError::Nom(input, error_kind) => CustomError::Nom(input.into(), error_kind),
CustomError::IO(err) => CustomError::IO(err), CustomError::IO(err) => CustomError::IO(err),
CustomError::BoxedError(err) => CustomError::BoxedError(err), CustomError::BoxedError(err) => CustomError::BoxedError(err),

View File

@@ -89,7 +89,7 @@ fn paragraph_end<'b, 'g, 'r, 's>(
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use crate::context::parser_with_context; use crate::context::bind_context;
use crate::context::Context; use crate::context::Context;
use crate::context::ContextElement; use crate::context::ContextElement;
use crate::context::GlobalSettings; use crate::context::GlobalSettings;
@@ -104,7 +104,7 @@ mod tests {
let global_settings = GlobalSettings::default(); let global_settings = GlobalSettings::default();
let initial_context = ContextElement::document_context(); let initial_context = ContextElement::document_context();
let initial_context = Context::new(&global_settings, List::new(&initial_context)); let initial_context = Context::new(&global_settings, List::new(&initial_context));
let paragraph_matcher = parser_with_context!(element(true))(&initial_context); let paragraph_matcher = bind_context!(element(true), &initial_context);
let (remaining, first_paragraph) = paragraph_matcher(input).expect("Parse first paragraph"); let (remaining, first_paragraph) = paragraph_matcher(input).expect("Parse first paragraph");
let (remaining, second_paragraph) = let (remaining, second_paragraph) =
paragraph_matcher(remaining).expect("Parse second paragraph."); paragraph_matcher(remaining).expect("Parse second paragraph.");

View File

@@ -96,7 +96,7 @@ fn pre<'b, 'g, 'r, 's>(
Some(_) => { Some(_) => {
// Not at start of line, cannot be a heading // Not at start of line, cannot be a heading
return Err(nom::Err::Error(CustomError::MyError(MyError( return Err(nom::Err::Error(CustomError::MyError(MyError(
"Not a valid pre character for plain link.".into(), "Not a valid pre character for plain link.",
)))); ))));
} }
}; };
@@ -263,16 +263,13 @@ pub(crate) fn protocol<'b, 'g, 'r, 's>(
) -> Res<OrgSource<'s>, OrgSource<'s>> { ) -> Res<OrgSource<'s>, OrgSource<'s>> {
for link_parameter in context.get_global_settings().link_parameters { for link_parameter in context.get_global_settings().link_parameters {
let result = tag_no_case::<_, _, CustomError<_>>(*link_parameter)(input); let result = tag_no_case::<_, _, CustomError<_>>(*link_parameter)(input);
match result { if let Ok((remaining, ent)) = result {
Ok((remaining, ent)) => { return Ok((remaining, ent));
return Ok((remaining, ent));
}
Err(_) => {}
} }
} }
Err(nom::Err::Error(CustomError::MyError(MyError( Err(nom::Err::Error(CustomError::MyError(MyError(
"NoLinkProtocol".into(), "NoLinkProtocol",
)))) ))))
} }
@@ -346,7 +343,7 @@ fn impl_path_plain_end<'b, 'g, 'r, 's>(
} }
Err(nom::Err::Error(CustomError::MyError(MyError( Err(nom::Err::Error(CustomError::MyError(MyError(
"No path plain end".into(), "No path plain end",
)))) ))))
} }
@@ -435,6 +432,6 @@ fn _path_plain_parenthesis_end<'s>(
} }
} }
Err(nom::Err::Error(CustomError::MyError(MyError( Err(nom::Err::Error(CustomError::MyError(MyError(
"No closing parenthesis".into(), "No closing parenthesis",
)))) ))))
} }

View File

@@ -20,7 +20,6 @@ use nom::sequence::tuple;
use super::affiliated_keyword::parse_affiliated_keywords; use super::affiliated_keyword::parse_affiliated_keywords;
use super::element_parser::element; use super::element_parser::element;
use super::keyword::affiliated_keyword;
use super::object_parser::standard_set_object; use super::object_parser::standard_set_object;
use super::org_source::OrgSource; use super::org_source::OrgSource;
use super::util::include_input; use super::util::include_input;
@@ -53,13 +52,17 @@ use crate::types::PlainListType;
#[cfg_attr( #[cfg_attr(
feature = "tracing", feature = "tracing",
tracing::instrument(ret, level = "debug", skip(context)) tracing::instrument(ret, level = "debug", skip(context, _affiliated_keywords))
)] )]
pub(crate) fn detect_plain_list<'b, 'g, 'r, 's>( pub(crate) fn detect_plain_list<'b, 'g, 'r, 's, AK>(
_affiliated_keywords: AK,
remaining: OrgSource<'s>,
context: RefContext<'b, 'g, 'r, 's>, context: RefContext<'b, 'g, 'r, 's>,
input: OrgSource<'s>, input: OrgSource<'s>,
) -> Res<OrgSource<'s>, ()> { ) -> Res<OrgSource<'s>, ()>
let (input, _) = many0(affiliated_keyword)(input)?; where
AK: IntoIterator<Item = Keyword<'s>>,
{
if verify( if verify(
tuple(( tuple((
start_of_line, start_of_line,
@@ -68,15 +71,15 @@ pub(crate) fn detect_plain_list<'b, 'g, 'r, 's>(
alt((space1, line_ending, eof)), alt((space1, line_ending, eof)),
)), )),
|(_start, (indent_level, _), (_bullet_type, bull), _after_whitespace)| { |(_start, (indent_level, _), (_bullet_type, bull), _after_whitespace)| {
!Into::<&str>::into(bull).starts_with("*") || *indent_level > 0 !Into::<&str>::into(bull).starts_with('*') || *indent_level > 0
}, },
)(input) )(remaining)
.is_ok() .is_ok()
{ {
return Ok((input, ())); return Ok((input, ()));
} }
return Err(nom::Err::Error(CustomError::MyError(MyError( return Err(nom::Err::Error(CustomError::MyError(MyError(
"No element detected.".into(), "No element detected.",
)))); ))));
} }
@@ -150,7 +153,7 @@ where
Some(final_child) => final_child, Some(final_child) => final_child,
None => { None => {
return Err(nom::Err::Error(CustomError::MyError(MyError( return Err(nom::Err::Error(CustomError::MyError(MyError(
"Plain lists require at least one element.".into(), "Plain lists require at least one element.",
)))); ))));
} }
}; };
@@ -189,7 +192,7 @@ fn plain_list_item<'b, 'g, 'r, 's>(
let (remaining, (indent_level, _leading_whitespace)) = indentation_level(context, input)?; let (remaining, (indent_level, _leading_whitespace)) = indentation_level(context, input)?;
let (remaining, (bullet_type, bull)) = verify( let (remaining, (bullet_type, bull)) = verify(
parser_with_context!(bullet)(context), parser_with_context!(bullet)(context),
|(_bullet_type, bull)| !Into::<&str>::into(bull).starts_with("*") || indent_level > 0, |(_bullet_type, bull)| !Into::<&str>::into(bull).starts_with('*') || indent_level > 0,
)(remaining)?; )(remaining)?;
let (remaining, maybe_counter_set) = let (remaining, maybe_counter_set) =
@@ -224,35 +227,32 @@ fn plain_list_item<'b, 'g, 'r, 's>(
let maybe_contentless_item: Res<OrgSource<'_>, ()> = peek(parser_with_context!( let maybe_contentless_item: Res<OrgSource<'_>, ()> = peek(parser_with_context!(
detect_contentless_item_contents detect_contentless_item_contents
)(&parser_context))(remaining); )(&parser_context))(remaining);
match maybe_contentless_item { if let Ok((_rem, _ws)) = maybe_contentless_item {
Ok((_rem, _ws)) => { let (remaining, _trailing_ws) = if context.should_consume_trailing_whitespace() {
let (remaining, _trailing_ws) = if context.should_consume_trailing_whitespace() { recognize(alt((recognize(many1(blank_line)), eof)))(remaining)?
recognize(alt((recognize(many1(blank_line)), eof)))(remaining)? } else {
} else { recognize(alt((blank_line, eof)))(remaining)?
recognize(alt((blank_line, eof)))(remaining)? };
}; let source = get_consumed(input, remaining);
let source = get_consumed(input, remaining); return Ok((
return Ok(( remaining,
remaining, (
( list_type,
list_type, PlainListItem {
PlainListItem { source: source.into(),
source: source.into(), indentation: indent_level,
indentation: indent_level, bullet: bull.into(),
bullet: bull.into(), counter: maybe_counter_set,
counter: maybe_counter_set, checkbox: None,
checkbox: None, tag: maybe_tag
tag: maybe_tag .map(|(_ws, item_tag)| item_tag)
.map(|(_ws, item_tag)| item_tag) .unwrap_or(Vec::new()),
.unwrap_or(Vec::new()), pre_blank: 0,
pre_blank: 0, children: Vec::new(),
children: Vec::new(), },
}, ),
), ));
)); }
}
Err(_) => {}
};
let (remaining, pre_blank) = item_tag_post_gap(&parser_context, remaining)?; let (remaining, pre_blank) = item_tag_post_gap(&parser_context, remaining)?;
let pre_blank = Into::<&str>::into(pre_blank) let pre_blank = Into::<&str>::into(pre_blank)
.bytes() .bytes()
@@ -549,6 +549,7 @@ fn detect_contentless_item_contents<'b, 'g, 'r, 's>(
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use crate::context::bind_context;
use crate::context::Context; use crate::context::Context;
use crate::context::GlobalSettings; use crate::context::GlobalSettings;
use crate::context::List; use crate::context::List;
@@ -560,7 +561,7 @@ mod tests {
let global_settings = GlobalSettings::default(); let global_settings = GlobalSettings::default();
let initial_context = ContextElement::document_context(); let initial_context = ContextElement::document_context();
let initial_context = Context::new(&global_settings, List::new(&initial_context)); let initial_context = Context::new(&global_settings, List::new(&initial_context));
let plain_list_item_matcher = parser_with_context!(plain_list_item)(&initial_context); let plain_list_item_matcher = bind_context!(plain_list_item, &initial_context);
let (remaining, (_, result)) = plain_list_item_matcher(input).unwrap(); let (remaining, (_, result)) = plain_list_item_matcher(input).unwrap();
assert_eq!(Into::<&str>::into(remaining), ""); assert_eq!(Into::<&str>::into(remaining), "");
assert_eq!(result.get_standard_properties().get_source(), "1."); assert_eq!(result.get_standard_properties().get_source(), "1.");
@@ -572,7 +573,7 @@ mod tests {
let global_settings = GlobalSettings::default(); let global_settings = GlobalSettings::default();
let initial_context = ContextElement::document_context(); let initial_context = ContextElement::document_context();
let initial_context = Context::new(&global_settings, List::new(&initial_context)); let initial_context = Context::new(&global_settings, List::new(&initial_context));
let plain_list_item_matcher = parser_with_context!(plain_list_item)(&initial_context); let plain_list_item_matcher = bind_context!(plain_list_item, &initial_context);
let (remaining, (_, result)) = plain_list_item_matcher(input).unwrap(); let (remaining, (_, result)) = plain_list_item_matcher(input).unwrap();
assert_eq!(Into::<&str>::into(remaining), ""); assert_eq!(Into::<&str>::into(remaining), "");
assert_eq!(result.get_standard_properties().get_source(), "1. foo"); assert_eq!(result.get_standard_properties().get_source(), "1. foo");
@@ -639,7 +640,7 @@ mod tests {
let global_settings = GlobalSettings::default(); let global_settings = GlobalSettings::default();
let initial_context = ContextElement::document_context(); let initial_context = ContextElement::document_context();
let initial_context = Context::new(&global_settings, List::new(&initial_context)); let initial_context = Context::new(&global_settings, List::new(&initial_context));
let plain_list_matcher = parser_with_context!(element(true))(&initial_context); let plain_list_matcher = bind_context!(element(true), &initial_context);
let (remaining, result) = let (remaining, result) =
plain_list_matcher(input).expect("Should parse the plain list successfully."); plain_list_matcher(input).expect("Should parse the plain list successfully.");
assert_eq!(Into::<&str>::into(remaining), " ipsum\n"); assert_eq!(Into::<&str>::into(remaining), " ipsum\n");
@@ -667,7 +668,7 @@ baz"#,
let global_settings = GlobalSettings::default(); let global_settings = GlobalSettings::default();
let initial_context = ContextElement::document_context(); let initial_context = ContextElement::document_context();
let initial_context = Context::new(&global_settings, List::new(&initial_context)); let initial_context = Context::new(&global_settings, List::new(&initial_context));
let plain_list_matcher = parser_with_context!(element(true))(&initial_context); let plain_list_matcher = bind_context!(element(true), &initial_context);
let (remaining, result) = let (remaining, result) =
plain_list_matcher(input).expect("Should parse the plain list successfully."); plain_list_matcher(input).expect("Should parse the plain list successfully.");
assert_eq!(Into::<&str>::into(remaining), "baz"); assert_eq!(Into::<&str>::into(remaining), "baz");
@@ -700,7 +701,7 @@ dolar"#,
let global_settings = GlobalSettings::default(); let global_settings = GlobalSettings::default();
let initial_context = ContextElement::document_context(); let initial_context = ContextElement::document_context();
let initial_context = Context::new(&global_settings, List::new(&initial_context)); let initial_context = Context::new(&global_settings, List::new(&initial_context));
let plain_list_matcher = parser_with_context!(element(true))(&initial_context); let plain_list_matcher = bind_context!(element(true), &initial_context);
let (remaining, result) = let (remaining, result) =
plain_list_matcher(input).expect("Should parse the plain list successfully."); plain_list_matcher(input).expect("Should parse the plain list successfully.");
assert_eq!(Into::<&str>::into(remaining), "dolar"); assert_eq!(Into::<&str>::into(remaining), "dolar");
@@ -730,7 +731,7 @@ dolar"#,
let global_settings = GlobalSettings::default(); let global_settings = GlobalSettings::default();
let initial_context = ContextElement::document_context(); let initial_context = ContextElement::document_context();
let initial_context = Context::new(&global_settings, List::new(&initial_context)); let initial_context = Context::new(&global_settings, List::new(&initial_context));
let result = detect_plain_list(&initial_context, input); let result = detect_plain_list(std::iter::empty(), input, &initial_context, input);
assert!(result.is_ok()); assert!(result.is_ok());
} }
@@ -740,7 +741,7 @@ dolar"#,
let global_settings = GlobalSettings::default(); let global_settings = GlobalSettings::default();
let initial_context = ContextElement::document_context(); let initial_context = ContextElement::document_context();
let initial_context = Context::new(&global_settings, List::new(&initial_context)); let initial_context = Context::new(&global_settings, List::new(&initial_context));
let result = detect_plain_list(&initial_context, input); let result = detect_plain_list(std::iter::empty(), input, &initial_context, input);
assert!(result.is_ok()); assert!(result.is_ok());
} }
@@ -750,7 +751,7 @@ dolar"#,
let global_settings = GlobalSettings::default(); let global_settings = GlobalSettings::default();
let initial_context = ContextElement::document_context(); let initial_context = ContextElement::document_context();
let initial_context = Context::new(&global_settings, List::new(&initial_context)); let initial_context = Context::new(&global_settings, List::new(&initial_context));
let result = detect_plain_list(&initial_context, input); let result = detect_plain_list(std::iter::empty(), input, &initial_context, input);
// Since there is no whitespace after the '+' this is a paragraph, not a plain list. // Since there is no whitespace after the '+' this is a paragraph, not a plain list.
assert!(result.is_err()); assert!(result.is_err());
} }
@@ -761,7 +762,7 @@ dolar"#,
let global_settings = GlobalSettings::default(); let global_settings = GlobalSettings::default();
let initial_context = ContextElement::document_context(); let initial_context = ContextElement::document_context();
let initial_context = Context::new(&global_settings, List::new(&initial_context)); let initial_context = Context::new(&global_settings, List::new(&initial_context));
let result = detect_plain_list(&initial_context, input); let result = detect_plain_list(std::iter::empty(), input, &initial_context, input);
assert!(result.is_ok()); assert!(result.is_ok());
} }
} }

View File

@@ -93,39 +93,33 @@ impl<'x> RematchObject<'x> for PlainText<'x> {
} }
let is_not_whitespace = is_not::<&str, &str, CustomError<_>>(" \t\r\n")(goal); let is_not_whitespace = is_not::<&str, &str, CustomError<_>>(" \t\r\n")(goal);
match is_not_whitespace { if let Ok((new_goal, payload)) = is_not_whitespace {
Ok((new_goal, payload)) => { let (new_remaining, _) = tuple((
let (new_remaining, _) = tuple(( tag_no_case(payload),
tag_no_case(payload), // TODO: Test to see what the REAL condition is. Checking for not-alphabetic works fine for now, but the real criteria might be something like the plain text exit matcher.
// TODO: Test to see what the REAL condition is. Checking for not-alphabetic works fine for now, but the real criteria might be something like the plain text exit matcher. peek(alt((
peek(alt(( recognize(verify(anychar, |c| !c.is_alphanumeric())),
recognize(verify(anychar, |c| !c.is_alphanumeric())), eof,
eof, ))),
))), ))(remaining)?;
))(remaining)?; remaining = new_remaining;
remaining = new_remaining; goal = new_goal;
goal = new_goal; continue;
continue; }
}
Err(_) => {}
};
let is_whitespace = recognize(many1(alt(( let is_whitespace = recognize(many1(alt((
recognize(one_of::<&str, &str, CustomError<_>>(" \t")), recognize(one_of::<&str, &str, CustomError<_>>(" \t")),
line_ending, line_ending,
))))(goal); ))))(goal);
match is_whitespace { if let Ok((new_goal, _)) = is_whitespace {
Ok((new_goal, _)) => { let (new_remaining, _) = many1(org_space_or_line_ending)(remaining)?;
let (new_remaining, _) = many1(org_space_or_line_ending)(remaining)?; remaining = new_remaining;
remaining = new_remaining; goal = new_goal;
goal = new_goal; continue;
continue; }
}
Err(_) => {}
};
return Err(nom::Err::Error(CustomError::MyError(MyError( return Err(nom::Err::Error(CustomError::MyError(MyError(
"Target does not match.".into(), "Target does not match.",
)))); ))));
} }
@@ -144,6 +138,7 @@ mod tests {
use nom::combinator::map; use nom::combinator::map;
use super::*; use super::*;
use crate::context::bind_context;
use crate::context::Context; use crate::context::Context;
use crate::context::ContextElement; use crate::context::ContextElement;
use crate::context::GlobalSettings; use crate::context::GlobalSettings;
@@ -157,10 +152,14 @@ mod tests {
let global_settings = GlobalSettings::default(); let global_settings = GlobalSettings::default();
let initial_context = ContextElement::document_context(); let initial_context = ContextElement::document_context();
let initial_context = Context::new(&global_settings, List::new(&initial_context)); let initial_context = Context::new(&global_settings, List::new(&initial_context));
let plain_text_matcher = parser_with_context!(plain_text( let (remaining, result) = map(
detect_standard_set_object_sans_plain_text bind_context!(
))(&initial_context); plain_text(detect_standard_set_object_sans_plain_text),
let (remaining, result) = map(plain_text_matcher, Object::PlainText)(input).unwrap(); &initial_context
),
Object::PlainText,
)(input)
.unwrap();
assert_eq!(Into::<&str>::into(remaining), ""); assert_eq!(Into::<&str>::into(remaining), "");
assert_eq!( assert_eq!(
result.get_standard_properties().get_source(), result.get_standard_properties().get_source(),

View File

@@ -40,7 +40,7 @@ pub(crate) fn property_drawer<'b, 'g, 'r, 's>(
) -> Res<OrgSource<'s>, PropertyDrawer<'s>> { ) -> Res<OrgSource<'s>, PropertyDrawer<'s>> {
if immediate_in_section(context, "property-drawer") { if immediate_in_section(context, "property-drawer") {
return Err(nom::Err::Error(CustomError::MyError(MyError( return Err(nom::Err::Error(CustomError::MyError(MyError(
"Cannot nest objects of the same element".into(), "Cannot nest objects of the same element",
)))); ))));
} }
let ( let (

View File

@@ -53,7 +53,7 @@ pub(crate) fn radio_link<'b, 'g, 'r, 's>(
} }
} }
Err(nom::Err::Error(CustomError::MyError(MyError( Err(nom::Err::Error(CustomError::MyError(MyError(
"NoRadioLink".into(), "NoRadioLink",
)))) ))))
} }
@@ -99,7 +99,7 @@ pub(crate) fn rematch_target<'x, 'b, 'g, 'r, 's>(
} }
_ => { _ => {
return Err(nom::Err::Error(CustomError::MyError(MyError( return Err(nom::Err::Error(CustomError::MyError(MyError(
"OnlyMinimalSetObjectsAllowed".into(), "OnlyMinimalSetObjectsAllowed",
)))); ))));
} }
}; };
@@ -185,15 +185,14 @@ mod tests {
fn plain_text_radio_target() { fn plain_text_radio_target() {
let input = OrgSource::new("foo bar baz"); let input = OrgSource::new("foo bar baz");
let radio_target_match = vec![Object::PlainText(PlainText { source: "bar" })]; let radio_target_match = vec![Object::PlainText(PlainText { source: "bar" })];
let global_settings = { let global_settings = GlobalSettings {
let mut global_settings = GlobalSettings::default(); radio_targets: vec![&radio_target_match],
global_settings.radio_targets = vec![&radio_target_match]; ..Default::default()
global_settings
}; };
let initial_context = ContextElement::document_context(); let initial_context = ContextElement::document_context();
let initial_context = Context::new(&global_settings, List::new(&initial_context)); let initial_context = Context::new(&global_settings, List::new(&initial_context));
let paragraph_matcher = parser_with_context!(element(true))(&initial_context); let (remaining, first_paragraph) =
let (remaining, first_paragraph) = paragraph_matcher(input).expect("Parse first paragraph"); element(true)(&initial_context, input).expect("Parse first paragraph");
let first_paragraph = match first_paragraph { let first_paragraph = match first_paragraph {
Element::Paragraph(paragraph) => paragraph, Element::Paragraph(paragraph) => paragraph,
_ => panic!("Should be a paragraph!"), _ => panic!("Should be a paragraph!"),
@@ -212,7 +211,7 @@ mod tests {
&Object::RadioLink(RadioLink { &Object::RadioLink(RadioLink {
source: "bar ", source: "bar ",
children: vec![Object::PlainText(PlainText { source: "bar" })], children: vec![Object::PlainText(PlainText { source: "bar" })],
path: "bar".into() path: "bar"
}) })
); );
} }
@@ -224,16 +223,15 @@ mod tests {
source: "*bar*", source: "*bar*",
children: vec![Object::PlainText(PlainText { source: "bar" })], children: vec![Object::PlainText(PlainText { source: "bar" })],
})]; })];
let global_settings = { let global_settings = GlobalSettings {
let mut global_settings = GlobalSettings::default(); radio_targets: vec![&radio_target_match],
global_settings.radio_targets = vec![&radio_target_match]; ..Default::default()
global_settings
}; };
let initial_context = ContextElement::document_context(); let initial_context = ContextElement::document_context();
let initial_context = Context::new(&global_settings, List::new(&initial_context)); let initial_context = Context::new(&global_settings, List::new(&initial_context));
let paragraph_matcher = parser_with_context!(element(true))(&initial_context);
let (remaining, first_paragraph) = let (remaining, first_paragraph) =
paragraph_matcher(input.into()).expect("Parse first paragraph"); element(true)(&initial_context, input).expect("Parse first paragraph");
let first_paragraph = match first_paragraph { let first_paragraph = match first_paragraph {
Element::Paragraph(paragraph) => paragraph, Element::Paragraph(paragraph) => paragraph,
_ => panic!("Should be a paragraph!"), _ => panic!("Should be a paragraph!"),
@@ -255,7 +253,7 @@ mod tests {
source: "*bar* ", source: "*bar* ",
children: vec![Object::PlainText(PlainText { source: "bar" })] children: vec![Object::PlainText(PlainText { source: "bar" })]
})], })],
path: "*bar* ".into() path: "*bar* "
}) })
); );
} }

View File

@@ -139,14 +139,7 @@ fn pathreg<'b, 'g, 'r, 's>(
input: OrgSource<'s>, input: OrgSource<'s>,
) -> Res<OrgSource<'s>, PathReg<'s>> { ) -> Res<OrgSource<'s>, PathReg<'s>> {
let (remaining, path) = map_parser( let (remaining, path) = map_parser(
escaped( escaped(take_till1(|c| matches!(c, '\\' | '[' | ']')), '\\', anychar),
take_till1(|c| match c {
'\\' | '[' | ']' => true,
_ => false,
}),
'\\',
anychar,
),
parser_with_context!(parse_path_reg)(context), parser_with_context!(parse_path_reg)(context),
)(input)?; )(input)?;
Ok((remaining, path)) Ok((remaining, path))
@@ -262,11 +255,8 @@ fn apply_link_templates<'b, 'g, 'r, 's>(
}; };
} }
// Handle lingering state // Handle lingering state
match state { if let ParserState::Percent = state {
ParserState::Percent => { ret.push('%');
ret.push('%');
}
_ => {}
} }
if !injected_value { if !injected_value {
ret.push_str(inject_value); ret.push_str(inject_value);
@@ -494,6 +484,6 @@ fn impl_path_reg_end<'b, 'g, 'r, 's>(
} }
Err(nom::Err::Error(CustomError::MyError(MyError( Err(nom::Err::Error(CustomError::MyError(MyError(
"No path reg end".into(), "No path reg end",
)))) ))))
} }

View File

@@ -65,12 +65,12 @@ pub(crate) fn zeroth_section<'b, 'g, 'r, 's>(
}, },
)(remaining)?; )(remaining)?;
comment_and_property_drawer_element.map(|(comment, property_drawer, _ws)| { if let Some((comment, property_drawer, _ws)) = comment_and_property_drawer_element {
children.insert(0, Element::PropertyDrawer(property_drawer)); children.insert(0, Element::PropertyDrawer(property_drawer));
comment if let Some(ele) = comment.map(Element::Comment) {
.map(Element::Comment) children.insert(0, ele);
.map(|ele| children.insert(0, ele)); }
}); }
let (remaining, _trailing_ws) = let (remaining, _trailing_ws) =
maybe_consume_trailing_whitespace_if_not_exiting(context, remaining)?; maybe_consume_trailing_whitespace_if_not_exiting(context, remaining)?;
@@ -121,12 +121,12 @@ pub(crate) fn section<'b, 'g, 'r, 's>(
!children.is_empty() || property_drawer_element.is_some() || planning_element.is_some() !children.is_empty() || property_drawer_element.is_some() || planning_element.is_some()
}, },
)(remaining)?; )(remaining)?;
property_drawer_element if let Some(ele) = property_drawer_element.map(Element::PropertyDrawer) {
.map(Element::PropertyDrawer) children.insert(0, ele);
.map(|ele| children.insert(0, ele)); }
planning_element if let Some(ele) = planning_element.map(Element::Planning) {
.map(Element::Planning) children.insert(0, ele)
.map(|ele| children.insert(0, ele)); }
let (remaining, _trailing_ws) = let (remaining, _trailing_ws) =
maybe_consume_trailing_whitespace_if_not_exiting(context, remaining)?; maybe_consume_trailing_whitespace_if_not_exiting(context, remaining)?;

View File

@@ -137,7 +137,7 @@ fn script_body<'b, 'g, 'r, 's>(
ScriptBody::Braceless(body.into()) ScriptBody::Braceless(body.into())
}), }),
map(parser_with_context!(script_with_braces)(context), |body| { map(parser_with_context!(script_with_braces)(context), |body| {
ScriptBody::WithBraces(body.into()) ScriptBody::WithBraces(body)
}), }),
map( map(
parser_with_context!(script_with_parenthesis)(context), parser_with_context!(script_with_parenthesis)(context),
@@ -175,7 +175,7 @@ fn script_alphanum<'b, 'g, 'r, 's>(
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))] #[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
fn script_alphanum_character<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, OrgSource<'s>> { fn script_alphanum_character<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, OrgSource<'s>> {
recognize(verify(anychar, |c| { recognize(verify(anychar, |c| {
c.is_alphanumeric() || r#",.\"#.contains(*c) c.is_alphanumeric() || r",.\".contains(*c)
}))(input) }))(input)
} }
@@ -183,7 +183,7 @@ fn script_alphanum_character<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, Org
fn end_script_alphanum_character<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, OrgSource<'s>> { fn end_script_alphanum_character<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, OrgSource<'s>> {
let (remaining, final_char) = recognize(verify(anychar, |c| c.is_alphanumeric()))(input)?; let (remaining, final_char) = recognize(verify(anychar, |c| c.is_alphanumeric()))(input)?;
peek(tuple(( peek(tuple((
take_while(|c| r#",.\"#.contains(c)), take_while(|c| r",.\".contains(c)),
not(script_alphanum_character), not(script_alphanum_character),
)))(remaining)?; )))(remaining)?;
Ok((remaining, final_char)) Ok((remaining, final_char))
@@ -233,7 +233,7 @@ fn _script_with_braces_end<'b, 'g, 'r, 's>(
if current_depth > 0 { if current_depth > 0 {
// Its impossible for the next character to end the subscript or superscript if we're any amount of braces deep // Its impossible for the next character to end the subscript or superscript if we're any amount of braces deep
return Err(nom::Err::Error(CustomError::MyError(MyError( return Err(nom::Err::Error(CustomError::MyError(MyError(
"Not a valid end for subscript or superscript.".into(), "Not a valid end for subscript or superscript.",
)))); ))));
} }
if current_depth < 0 { if current_depth < 0 {
@@ -288,6 +288,6 @@ fn _script_with_parenthesis_end<'s>(
} }
} }
Err(nom::Err::Error(CustomError::MyError(MyError( Err(nom::Err::Error(CustomError::MyError(MyError(
"No script parenthesis end.".into(), "No script parenthesis end.",
)))) ))))
} }

View File

@@ -14,7 +14,6 @@ use nom::multi::many_till;
use nom::sequence::tuple; use nom::sequence::tuple;
use super::affiliated_keyword::parse_affiliated_keywords; use super::affiliated_keyword::parse_affiliated_keywords;
use super::keyword::affiliated_keyword;
use super::keyword::table_formula_keyword; use super::keyword::table_formula_keyword;
use super::object_parser::table_cell_set_object; use super::object_parser::table_cell_set_object;
use super::org_source::OrgSource; use super::org_source::OrgSource;
@@ -92,10 +91,20 @@ where
)) ))
} }
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))] #[cfg_attr(
pub(crate) fn detect_table<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, ()> { feature = "tracing",
let (input, _) = many0(affiliated_keyword)(input)?; tracing::instrument(ret, level = "debug", skip(_context, _affiliated_keywords))
tuple((start_of_line, space0, tag("|")))(input)?; )]
pub(crate) fn detect_table<'b, 'g, 'r, 's, AK>(
_affiliated_keywords: AK,
remaining: OrgSource<'s>,
_context: RefContext<'b, 'g, 'r, 's>,
input: OrgSource<'s>,
) -> Res<OrgSource<'s>, ()>
where
AK: IntoIterator<Item = Keyword<'s>>,
{
tuple((start_of_line, space0, tag("|")))(remaining)?;
Ok((input, ())) Ok((input, ()))
} }
@@ -188,7 +197,7 @@ fn org_mode_table_cell<'b, 'g, 'r, 's>(
let (remaining, (children, _exit_contents)) = verify( let (remaining, (children, _exit_contents)) = verify(
many_till(table_cell_set_object_matcher, exit_matcher), many_till(table_cell_set_object_matcher, exit_matcher),
|(children, exit_contents)| { |(children, exit_contents)| {
!children.is_empty() || Into::<&str>::into(exit_contents).ends_with("|") !children.is_empty() || Into::<&str>::into(exit_contents).ends_with('|')
}, },
)(remaining)?; )(remaining)?;

View File

@@ -43,7 +43,7 @@ pub(crate) fn target<'b, 'g, 'r, 's>(
.expect("We cannot be at the start of the file because we are inside a target."); .expect("We cannot be at the start of the file because we are inside a target.");
if preceding_character.is_whitespace() { if preceding_character.is_whitespace() {
return Err(nom::Err::Error(CustomError::MyError(MyError( return Err(nom::Err::Error(CustomError::MyError(MyError(
"Targets cannot end with whitespace.".into(), "Targets cannot end with whitespace.",
)))); ))));
} }
let (remaining, _) = tag(">>")(remaining)?; let (remaining, _) = tag(">>")(remaining)?;

View File

@@ -183,13 +183,13 @@ fn code<'b, 'g, 'r, 's>(
)) ))
} }
fn text_markup_object<'c>( fn text_markup_object(
marker_symbol: &'c str, marker_symbol: &str,
) -> impl for<'b, 'g, 'r, 's> Fn( ) -> impl for<'b, 'g, 'r, 's> Fn(
RefContext<'b, 'g, 'r, 's>, RefContext<'b, 'g, 'r, 's>,
OrgSource<'s>, OrgSource<'s>,
) -> Res<OrgSource<'s>, Vec<Object<'s>>> ) -> Res<OrgSource<'s>, Vec<Object<'s>>>
+ 'c { + '_ {
move |context, input: OrgSource<'_>| _text_markup_object(context, input, marker_symbol) move |context, input: OrgSource<'_>| _text_markup_object(context, input, marker_symbol)
} }
@@ -235,7 +235,7 @@ fn _text_markup_object<'b, 'g, 'r, 's, 'c>(
let _enter = span.enter(); let _enter = span.enter();
if exit_matcher_parser(context, remaining).is_ok() { if exit_matcher_parser(context, remaining).is_ok() {
return Err(nom::Err::Error(CustomError::MyError(MyError( return Err(nom::Err::Error(CustomError::MyError(MyError(
"Parent exit matcher is triggering.".into(), "Parent exit matcher is triggering.",
)))); ))));
} }
} }
@@ -246,13 +246,13 @@ fn _text_markup_object<'b, 'g, 'r, 's, 'c>(
Ok((remaining, children)) Ok((remaining, children))
} }
fn text_markup_string<'c>( fn text_markup_string(
marker_symbol: &'c str, marker_symbol: &str,
) -> impl for<'b, 'g, 'r, 's> Fn( ) -> impl for<'b, 'g, 'r, 's> Fn(
RefContext<'b, 'g, 'r, 's>, RefContext<'b, 'g, 'r, 's>,
OrgSource<'s>, OrgSource<'s>,
) -> Res<OrgSource<'s>, OrgSource<'s>> ) -> Res<OrgSource<'s>, OrgSource<'s>>
+ 'c { + '_ {
move |context, input: OrgSource<'_>| _text_markup_string(context, input, marker_symbol) move |context, input: OrgSource<'_>| _text_markup_string(context, input, marker_symbol)
} }
@@ -291,7 +291,7 @@ fn _text_markup_string<'b, 'g, 'r, 's, 'c>(
let _enter = span.enter(); let _enter = span.enter();
if exit_matcher_parser(context, remaining).is_ok() { if exit_matcher_parser(context, remaining).is_ok() {
return Err(nom::Err::Error(CustomError::MyError(MyError( return Err(nom::Err::Error(CustomError::MyError(MyError(
"Parent exit matcher is triggering.".into(), "Parent exit matcher is triggering.",
)))); ))));
} }
} }
@@ -322,7 +322,7 @@ fn pre<'b, 'g, 'r, 's>(
Some('-') | Some('(') | Some('{') | Some('\'') | Some('"') => {} Some('-') | Some('(') | Some('{') | Some('\'') | Some('"') => {}
Some(_) => { Some(_) => {
return Err(nom::Err::Error(CustomError::MyError(MyError( return Err(nom::Err::Error(CustomError::MyError(MyError(
"Not a valid pre character for text markup.".into(), "Not a valid pre character for text markup.",
)))); ))));
} }
None => unreachable!(), // None is for start of file, which should already be handled by the start_of_line matcher above. None => unreachable!(), // None is for start of file, which should already be handled by the start_of_line matcher above.
@@ -343,10 +343,7 @@ fn post<'b, 'g, 'r, 's>(
Ok((remaining, ())) Ok((remaining, ()))
} }
fn text_markup_end<'c>( fn text_markup_end(marker_symbol: &str, contents_start_offset: usize) -> impl ContextMatcher + '_ {
marker_symbol: &'c str,
contents_start_offset: usize,
) -> impl ContextMatcher + 'c {
move |context, input: OrgSource<'_>| { move |context, input: OrgSource<'_>| {
_text_markup_end(context, input, marker_symbol, contents_start_offset) _text_markup_end(context, input, marker_symbol, contents_start_offset)
} }
@@ -364,7 +361,7 @@ fn _text_markup_end<'b, 'g, 'r, 's, 'c>(
) -> Res<OrgSource<'s>, OrgSource<'s>> { ) -> Res<OrgSource<'s>, OrgSource<'s>> {
if input.get_byte_offset() == contents_start_offset { if input.get_byte_offset() == contents_start_offset {
return Err(nom::Err::Error(CustomError::MyError(MyError( return Err(nom::Err::Error(CustomError::MyError(MyError(
"Text markup cannot be empty".into(), "Text markup cannot be empty",
)))); ))));
} }
not(preceded_by_whitespace(false))(input)?; not(preceded_by_whitespace(false))(input)?;
@@ -499,7 +496,7 @@ fn _rematch_text_markup_object<'b, 'g, 'r, 's, 'x>(
let _enter = span.enter(); let _enter = span.enter();
if exit_matcher_parser(context, remaining).is_ok() { if exit_matcher_parser(context, remaining).is_ok() {
return Err(nom::Err::Error(CustomError::MyError(MyError( return Err(nom::Err::Error(CustomError::MyError(MyError(
"Parent exit matcher is triggering.".into(), "Parent exit matcher is triggering.",
)))); ))));
} }
} }

View File

@@ -486,7 +486,7 @@ fn dayname_end<'b, 'g, 'r, 's>(
}))(input) }))(input)
} }
const fn time<'c>( const fn time(
allow_rest: bool, allow_rest: bool,
) -> impl for<'b, 'g, 'r, 's> Fn(RefContext<'b, 'g, 'r, 's>, OrgSource<'s>) -> Res<OrgSource<'s>, Time<'s>> ) -> impl for<'b, 'g, 'r, 's> Fn(RefContext<'b, 'g, 'r, 's>, OrgSource<'s>) -> Res<OrgSource<'s>, Time<'s>>
{ {
@@ -590,6 +590,7 @@ fn time_range_rest_end<'b, 'g, 'r, 's>(
tag("-"), tag("-"),
parser_with_context!(time(true))(&parent_node), parser_with_context!(time(true))(&parent_node),
)))(input); )))(input);
#[allow(clippy::let_and_return)] // otherwise parent_node does not live long enough.
exit_contents exit_contents
} }

View File

@@ -28,10 +28,7 @@ pub(crate) const WORD_CONSTITUENT_CHARACTERS: &str =
"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"; "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789";
/// Check if we are below a section of the given section type regardless of depth /// Check if we are below a section of the given section type regardless of depth
pub(crate) fn in_section<'b, 'g, 'r, 's, 'x>( pub(crate) fn in_section(context: RefContext<'_, '_, '_, '_>, section_name: &str) -> bool {
context: RefContext<'b, 'g, 'r, 's>,
section_name: &'x str,
) -> bool {
for thing in context.iter() { for thing in context.iter() {
match thing { match thing {
ContextElement::Context(name) if *name == section_name => return true, ContextElement::Context(name) if *name == section_name => return true,
@@ -42,9 +39,9 @@ pub(crate) fn in_section<'b, 'g, 'r, 's, 'x>(
} }
/// Checks if we are currently an immediate child of the given section type /// Checks if we are currently an immediate child of the given section type
pub(crate) fn immediate_in_section<'b, 'g, 'r, 's, 'x>( pub(crate) fn immediate_in_section(
context: RefContext<'b, 'g, 'r, 's>, context: RefContext<'_, '_, '_, '_>,
section_name: &'x str, section_name: &str,
) -> bool { ) -> bool {
for thing in context.iter() { for thing in context.iter() {
match thing { match thing {
@@ -133,7 +130,7 @@ pub(crate) fn start_of_line<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, ()>
Ok((input, ())) Ok((input, ()))
} else { } else {
Err(nom::Err::Error(CustomError::MyError(MyError( Err(nom::Err::Error(CustomError::MyError(MyError(
"Not at start of line".into(), "Not at start of line",
)))) ))))
} }
} }
@@ -156,7 +153,7 @@ fn _preceded_by_whitespace<'s>(
.unwrap_or(allow_start_of_file) .unwrap_or(allow_start_of_file)
{ {
return Err(nom::Err::Error(CustomError::MyError(MyError( return Err(nom::Err::Error(CustomError::MyError(MyError(
"Must be preceded by a whitespace character.".into(), "Must be preceded by a whitespace character.",
)))); ))));
} }
Ok((input, ())) Ok((input, ()))
@@ -198,9 +195,9 @@ pub(crate) fn text_until_exit<'b, 'g, 'r, 's>(
#[allow(dead_code)] #[allow(dead_code)]
fn not_yet_implemented() -> Res<OrgSource<'static>, ()> { fn not_yet_implemented() -> Res<OrgSource<'static>, ()> {
return Err(nom::Err::Error(CustomError::MyError(MyError( Err(nom::Err::Error(CustomError::MyError(MyError(
"Not implemented yet.".into(), "Not implemented yet.",
)))); ))))
} }
#[allow(dead_code)] #[allow(dead_code)]
@@ -234,29 +231,27 @@ where
/// Match single space or tab. /// Match single space or tab.
/// ///
/// In org-mode syntax, spaces and tabs are often (but not always!) interchangeable. /// In org-mode syntax, spaces and tabs are often (but not always!) interchangeable.
pub(crate) fn org_space<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, char> { pub(crate) fn org_space(input: OrgSource<'_>) -> Res<OrgSource<'_>, char> {
one_of(" \t")(input) one_of(" \t")(input)
} }
/// Matches a single space, tab, line ending, or end of file. /// Matches a single space, tab, line ending, or end of file.
/// ///
/// In org-mode syntax there are often delimiters that could be any whitespace at all or the end of file. /// In org-mode syntax there are often delimiters that could be any whitespace at all or the end of file.
pub(crate) fn org_space_or_line_ending<'s>( pub(crate) fn org_space_or_line_ending(input: OrgSource<'_>) -> Res<OrgSource<'_>, OrgSource<'_>> {
input: OrgSource<'s>,
) -> Res<OrgSource<'s>, OrgSource<'s>> {
alt((recognize(org_space), org_line_ending))(input) alt((recognize(org_space), org_line_ending))(input)
} }
/// Match a line break or the end of the file. /// Match a line break or the end of the file.
/// ///
/// In org-mode syntax, the end of the file can serve the same purpose as a line break syntactically. /// In org-mode syntax, the end of the file can serve the same purpose as a line break syntactically.
pub(crate) fn org_line_ending<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, OrgSource<'s>> { pub(crate) fn org_line_ending(input: OrgSource<'_>) -> Res<OrgSource<'_>, OrgSource<'_>> {
alt((line_ending, eof))(input) alt((line_ending, eof))(input)
} }
/// Match the whitespace at the beginning of a line and give it an indentation level. /// Match the whitespace at the beginning of a line and give it an indentation level.
pub(crate) fn indentation_level<'b, 'g, 'r, 's>( pub(crate) fn indentation_level<'s>(
context: RefContext<'b, 'g, 'r, 's>, context: RefContext<'_, '_, '_, 's>,
input: OrgSource<'s>, input: OrgSource<'s>,
) -> Res<OrgSource<'s>, (IndentationLevel, OrgSource<'s>)> { ) -> Res<OrgSource<'s>, (IndentationLevel, OrgSource<'s>)> {
let (remaining, leading_whitespace) = space0(input)?; let (remaining, leading_whitespace) = space0(input)?;

View File

@@ -6,7 +6,11 @@ use super::Object;
pub enum AffiliatedKeywordValue<'s> { pub enum AffiliatedKeywordValue<'s> {
SingleString(&'s str), SingleString(&'s str),
ListOfStrings(Vec<&'s str>), ListOfStrings(Vec<&'s str>),
ListOfListsOfObjects(Vec<(Option<Vec<Object<'s>>>, Vec<Object<'s>>)>), OptionalPair {
optval: Option<&'s str>,
val: &'s str,
},
ObjectTree(Vec<(Option<Vec<Object<'s>>>, Vec<Object<'s>>)>),
} }
#[derive(Debug)] #[derive(Debug)]
@@ -15,7 +19,7 @@ pub struct AffiliatedKeyword<'s> {
pub value: AffiliatedKeywordValue<'s>, pub value: AffiliatedKeywordValue<'s>,
} }
#[derive(Debug)] #[derive(Debug, Default)]
pub struct AffiliatedKeywords<'s> { pub struct AffiliatedKeywords<'s> {
pub(crate) keywords: BTreeMap<String, AffiliatedKeywordValue<'s>>, pub(crate) keywords: BTreeMap<String, AffiliatedKeywordValue<'s>>,
} }
@@ -23,11 +27,3 @@ pub struct AffiliatedKeywords<'s> {
pub trait GetAffiliatedKeywords<'s> { pub trait GetAffiliatedKeywords<'s> {
fn get_affiliated_keywords<'a>(&'a self) -> &'a AffiliatedKeywords<'s>; fn get_affiliated_keywords<'a>(&'a self) -> &'a AffiliatedKeywords<'s>;
} }
impl<'s> Default for AffiliatedKeywords<'s> {
fn default() -> Self {
AffiliatedKeywords {
keywords: BTreeMap::new(),
}
}
}

View File

@@ -101,11 +101,14 @@ impl<'s> Heading<'s> {
_ => None, _ => None,
}) })
.flat_map(|section| section.children.iter()) .flat_map(|section| section.children.iter())
.take(1) .take_while(|element| {
.filter_map(|element| match element { matches!(element, Element::Planning(_) | Element::PropertyDrawer(_))
})
.find_map(|element| match element {
Element::PropertyDrawer(property_drawer) => Some(property_drawer), Element::PropertyDrawer(property_drawer) => Some(property_drawer),
_ => None, _ => None,
}) })
.into_iter()
.flat_map(|property_drawer| property_drawer.children.iter()) .flat_map(|property_drawer| property_drawer.children.iter())
} }
} }
@@ -117,10 +120,8 @@ impl<'s> Document<'s> {
.iter() .iter()
.flat_map(|zeroth_section| zeroth_section.children.iter()); .flat_map(|zeroth_section| zeroth_section.children.iter());
let property_drawer = zeroth_section_children let property_drawer = zeroth_section_children
.take_while(|element| match element { .take_while(|element| {
Element::Comment(_) => true, matches!(element, Element::Comment(_) | Element::PropertyDrawer(_))
Element::PropertyDrawer(_) => true,
_ => false,
}) })
.find_map(|element| match element { .find_map(|element| match element {
Element::PropertyDrawer(property_drawer) => Some(property_drawer), Element::PropertyDrawer(property_drawer) => Some(property_drawer),

View File

@@ -26,6 +26,7 @@ use super::SetSource;
use super::SpecialBlock; use super::SpecialBlock;
use super::StandardProperties; use super::StandardProperties;
#[allow(clippy::large_enum_variant)]
#[derive(Debug)] #[derive(Debug)]
pub enum Element<'s> { pub enum Element<'s> {
Paragraph(Paragraph<'s>), Paragraph(Paragraph<'s>),

View File

@@ -170,12 +170,10 @@ impl<'s> Paragraph<'s> {
/// ///
/// This is used for elements that support an "empty" content like greater blocks. /// This is used for elements that support an "empty" content like greater blocks.
pub(crate) fn of_text(input: &'s str) -> Self { pub(crate) fn of_text(input: &'s str) -> Self {
let mut objects = Vec::with_capacity(1);
objects.push(Object::PlainText(PlainText { source: input }));
Paragraph { Paragraph {
source: input, source: input,
affiliated_keywords: AffiliatedKeywords::default(), affiliated_keywords: AffiliatedKeywords::default(),
children: objects, children: vec![Object::PlainText(PlainText { source: input })],
} }
} }
} }

View File

@@ -263,6 +263,7 @@ pub struct Superscript<'s> {
pub children: Vec<Object<'s>>, pub children: Vec<Object<'s>>,
} }
// TODO: Perhaps there is an optimization of converting to unix time we can do to shrink this struct. (ref: clippy::large_enum_variant on Element)
#[derive(Debug, PartialEq, Clone)] #[derive(Debug, PartialEq, Clone)]
pub struct Timestamp<'s> { pub struct Timestamp<'s> {
pub source: &'s str, pub source: &'s str,
@@ -315,7 +316,7 @@ pub struct Minute(MinuteInner);
impl Year { impl Year {
// TODO: Make a real error type instead of a boxed any error. // TODO: Make a real error type instead of a boxed any error.
pub fn new<'s>(source: &'s str) -> Result<Self, Box<dyn std::error::Error>> { pub fn new(source: &str) -> Result<Self, Box<dyn std::error::Error>> {
let year = source.parse::<YearInner>()?; let year = source.parse::<YearInner>()?;
Ok(Year(year)) Ok(Year(year))
} }
@@ -327,9 +328,9 @@ impl Year {
impl Month { impl Month {
// TODO: Make a real error type instead of a boxed any error. // TODO: Make a real error type instead of a boxed any error.
pub fn new<'s>(source: &'s str) -> Result<Self, Box<dyn std::error::Error>> { pub fn new(source: &str) -> Result<Self, Box<dyn std::error::Error>> {
let month = source.parse::<MonthInner>()?; let month = source.parse::<MonthInner>()?;
if month < 1 || month > 12 { if !(1..=12).contains(&month) {
Err("Month exceeds possible range.")?; Err("Month exceeds possible range.")?;
} }
Ok(Month(month)) Ok(Month(month))
@@ -342,9 +343,9 @@ impl Month {
impl DayOfMonth { impl DayOfMonth {
// TODO: Make a real error type instead of a boxed any error. // TODO: Make a real error type instead of a boxed any error.
pub fn new<'s>(source: &'s str) -> Result<Self, Box<dyn std::error::Error>> { pub fn new(source: &str) -> Result<Self, Box<dyn std::error::Error>> {
let day_of_month = source.parse::<DayOfMonthInner>()?; let day_of_month = source.parse::<DayOfMonthInner>()?;
if day_of_month < 1 || day_of_month > 31 { if !(1..=31).contains(&day_of_month) {
Err("Day of month exceeds possible range.")?; Err("Day of month exceeds possible range.")?;
} }
Ok(DayOfMonth(day_of_month)) Ok(DayOfMonth(day_of_month))
@@ -357,7 +358,7 @@ impl DayOfMonth {
impl Hour { impl Hour {
// TODO: Make a real error type instead of a boxed any error. // TODO: Make a real error type instead of a boxed any error.
pub fn new<'s>(source: &'s str) -> Result<Self, Box<dyn std::error::Error>> { pub fn new(source: &str) -> Result<Self, Box<dyn std::error::Error>> {
let hour = source.parse::<HourInner>()?; let hour = source.parse::<HourInner>()?;
if hour > 23 { if hour > 23 {
Err("Hour exceeds possible range.")?; Err("Hour exceeds possible range.")?;
@@ -372,7 +373,7 @@ impl Hour {
impl Minute { impl Minute {
// TODO: Make a real error type instead of a boxed any error. // TODO: Make a real error type instead of a boxed any error.
pub fn new<'s>(source: &'s str) -> Result<Self, Box<dyn std::error::Error>> { pub fn new(source: &str) -> Result<Self, Box<dyn std::error::Error>> {
let minute = source.parse::<MinuteInner>()?; let minute = source.parse::<MinuteInner>()?;
if minute > 59 { if minute > 59 {
Err("Minute exceeds possible range.")?; Err("Minute exceeds possible range.")?;
@@ -731,21 +732,21 @@ impl<'s> RegularLink<'s> {
/// Coalesce whitespace if the raw_link contains line breaks. /// Coalesce whitespace if the raw_link contains line breaks.
/// ///
/// This corresponds to the output you would get from the upstream emacs org-mode AST. /// This corresponds to the output you would get from the upstream emacs org-mode AST.
pub fn get_raw_link<'b>(&'b self) -> Cow<'b, str> { pub fn get_raw_link(&self) -> Cow<'_, str> {
coalesce_whitespace_if_line_break(&self.raw_link) coalesce_whitespace_if_line_break(&self.raw_link)
} }
/// Coalesce whitespace if the path contains line breaks. /// Coalesce whitespace if the path contains line breaks.
/// ///
/// This corresponds to the output you would get from the upstream emacs org-mode AST. /// This corresponds to the output you would get from the upstream emacs org-mode AST.
pub fn get_path<'b>(&'b self) -> Cow<'b, str> { pub fn get_path(&self) -> Cow<'_, str> {
coalesce_whitespace_if_line_break(&self.path) coalesce_whitespace_if_line_break(&self.path)
} }
/// Coalesce whitespace if the search_option contains line breaks. /// Coalesce whitespace if the search_option contains line breaks.
/// ///
/// This corresponds to the output you would get from the upstream emacs org-mode AST. /// This corresponds to the output you would get from the upstream emacs org-mode AST.
pub fn get_search_option<'b>(&'b self) -> Option<Cow<'b, str>> { pub fn get_search_option(&self) -> Option<Cow<'_, str>> {
self.search_option self.search_option
.as_ref() .as_ref()
.map(|search_option| coalesce_whitespace_if_line_break(search_option.borrow())) .map(|search_option| coalesce_whitespace_if_line_break(search_option.borrow()))
@@ -782,7 +783,7 @@ impl<'s> OrgMacro<'s> {
pub fn get_args<'b>(&'b self) -> impl Iterator<Item = Cow<'s, str>> + 'b { pub fn get_args<'b>(&'b self) -> impl Iterator<Item = Cow<'s, str>> + 'b {
self.args self.args
.iter() .iter()
.map(|arg| coalesce_whitespace_escaped('\\', |c| ",".contains(c))(*arg)) .map(|arg| coalesce_whitespace_escaped('\\', |c| ",".contains(c))(arg))
} }
} }

View File

@@ -3,7 +3,7 @@ use std::borrow::Cow;
/// Removes all whitespace from a string if any line breaks are present. /// Removes all whitespace from a string if any line breaks are present.
/// ///
/// Example: "foo bar" => "foo bar" but "foo \n bar" => "foobar". /// Example: "foo bar" => "foo bar" but "foo \n bar" => "foobar".
pub(crate) fn remove_whitespace_if_line_break<'s>(input: &'s str) -> Cow<'s, str> { pub(crate) fn remove_whitespace_if_line_break(input: &str) -> Cow<'_, str> {
let mut state = RemoveWhitespaceIfLineBreakState::Normal; let mut state = RemoveWhitespaceIfLineBreakState::Normal;
for (offset, c) in input.char_indices() { for (offset, c) in input.char_indices() {
match (&mut state, c) { match (&mut state, c) {
@@ -49,7 +49,7 @@ enum RemoveWhitespaceIfLineBreakState {
/// Removes all line breaks from a string /// Removes all line breaks from a string
/// ///
/// Example: "foo bar" => "foo bar" but "foo \n bar" => "foo bar". /// Example: "foo bar" => "foo bar" but "foo \n bar" => "foo bar".
pub(crate) fn remove_line_break<'s>(input: &'s str) -> Cow<'s, str> { pub(crate) fn remove_line_break(input: &str) -> Cow<'_, str> {
let mut state = RemoveLineBreakState::Normal; let mut state = RemoveLineBreakState::Normal;
for (offset, c) in input.char_indices() { for (offset, c) in input.char_indices() {
match (&mut state, c) { match (&mut state, c) {
@@ -79,7 +79,7 @@ enum RemoveLineBreakState {
/// Removes all whitespace from a string if any line breaks are present. /// Removes all whitespace from a string if any line breaks are present.
/// ///
/// Example: "foo bar" => "foo bar" but "foo \n bar" => "foobar". /// Example: "foo bar" => "foo bar" but "foo \n bar" => "foobar".
pub(crate) fn coalesce_whitespace_if_line_break<'s>(input: &'s str) -> Cow<'s, str> { pub(crate) fn coalesce_whitespace_if_line_break(input: &str) -> Cow<'_, str> {
let mut state = CoalesceWhitespaceIfLineBreakState::Normal; let mut state = CoalesceWhitespaceIfLineBreakState::Normal;
for (offset, c) in input.char_indices() { for (offset, c) in input.char_indices() {
match (&mut state, c) { match (&mut state, c) {
@@ -120,15 +120,13 @@ pub(crate) fn coalesce_whitespace_if_line_break<'s>(input: &'s str) -> Cow<'s, s
ret.push(c); ret.push(c);
} }
// Do nothing if preceding character was whitespace and this character also is whitespace. // Do nothing if preceding character was whitespace and this character also is whitespace.
} else if c.is_ascii_whitespace() {
// Preceding character was not whitespace but this is.
sub_loop_in_whitespace = true;
ret.push(' ');
} else { } else {
if c.is_ascii_whitespace() { // Preceding character was not whitespace and this is not either.
// Preceding character was not whitespace but this is. ret.push(c);
sub_loop_in_whitespace = true;
ret.push(' ');
} else {
// Preceding character was not whitespace and this is not either.
ret.push(c);
}
} }
} }
if !*in_whitespace { if !*in_whitespace {
@@ -202,7 +200,7 @@ enum CoalesceWhitespaceIfLineBreakState {
/// ///
/// Example: "foo bar" => "foobar" and "foo \n bar" => "foobar". /// Example: "foo bar" => "foobar" and "foo \n bar" => "foobar".
#[allow(dead_code)] #[allow(dead_code)]
pub(crate) fn coalesce_whitespace<'s>(input: &'s str) -> Cow<'s, str> { pub(crate) fn coalesce_whitespace(input: &str) -> Cow<'_, str> {
let mut state = CoalesceWhitespace::Normal; let mut state = CoalesceWhitespace::Normal;
for (offset, c) in input.char_indices() { for (offset, c) in input.char_indices() {
match (&mut state, c) { match (&mut state, c) {
@@ -248,18 +246,18 @@ enum CoalesceWhitespace {
/// Removes all whitespace from a string and handle escaping characters. /// Removes all whitespace from a string and handle escaping characters.
/// ///
/// Example: "foo bar" => "foobar" and "foo \n bar" => "foobar" but if the escape character is backslash and comma is an escapable character than "foo\,bar" becomes "foo,bar". /// Example: "foo bar" => "foobar" and "foo \n bar" => "foobar" but if the escape character is backslash and comma is an escapable character than "foo\,bar" becomes "foo,bar".
pub(crate) fn coalesce_whitespace_escaped<'c, C: Fn(char) -> bool>( pub(crate) fn coalesce_whitespace_escaped<C: Fn(char) -> bool>(
escape_character: char, escape_character: char,
escapable_characters: C, escapable_characters: C,
) -> impl for<'s> Fn(&'s str) -> Cow<'s, str> { ) -> impl for<'s> Fn(&'s str) -> Cow<'s, str> {
move |input| impl_coalesce_whitespace_escaped(input, escape_character, &escapable_characters) move |input| impl_coalesce_whitespace_escaped(input, escape_character, &escapable_characters)
} }
fn impl_coalesce_whitespace_escaped<'s, C: Fn(char) -> bool>( fn impl_coalesce_whitespace_escaped<C: Fn(char) -> bool>(
input: &'s str, input: &str,
escape_character: char, escape_character: char,
escapable_characters: C, escapable_characters: C,
) -> Cow<'s, str> { ) -> Cow<'_, str> {
let mut state = CoalesceWhitespaceEscaped::Normal { let mut state = CoalesceWhitespaceEscaped::Normal {
in_whitespace: false, in_whitespace: false,
}; };
@@ -451,7 +449,7 @@ enum CoalesceWhitespaceEscaped {
}, },
} }
pub(crate) fn to_lowercase<'s>(input: &'s str) -> Cow<'s, str> { pub(crate) fn to_lowercase(input: &str) -> Cow<'_, str> {
if input.chars().any(|c| !c.is_lowercase()) { if input.chars().any(|c| !c.is_lowercase()) {
Cow::Owned(input.to_lowercase()) Cow::Owned(input.to_lowercase())
} else { } else {

View File

@@ -1,5 +1,3 @@
#![cfg(feature = "compare")] #![cfg(feature = "compare")]
#[feature(exit_status_error)]
include!(concat!(env!("OUT_DIR"), "/tests.rs")); include!(concat!(env!("OUT_DIR"), "/tests.rs"));

View File

@@ -5,7 +5,7 @@
async fn autogen_default_{name}() -> Result<(), Box<dyn std::error::Error>> {{ async fn autogen_default_{name}() -> Result<(), Box<dyn std::error::Error>> {{
let org_path = "{path}"; let org_path = "{path}";
let org_contents = std::fs::read_to_string(org_path).expect("Read org file."); let org_contents = std::fs::read_to_string(org_path).expect("Read org file.");
organic::compare::run_anonymous_compare(org_contents.as_str()).await?; assert!(organic::compare::run_anonymous_compare(org_contents.as_str()).await?);
Ok(()) Ok(())
}} }}
@@ -14,12 +14,11 @@ async fn autogen_default_{name}() -> Result<(), Box<dyn std::error::Error>> {{
async fn autogen_la_{name}() -> Result<(), Box<dyn std::error::Error>> {{ async fn autogen_la_{name}() -> Result<(), Box<dyn std::error::Error>> {{
let org_path = "{path}"; let org_path = "{path}";
let org_contents = std::fs::read_to_string(org_path).expect("Read org file."); let org_contents = std::fs::read_to_string(org_path).expect("Read org file.");
let global_settings = {{ let global_settings = organic::settings::GlobalSettings {{
let mut global_settings = organic::settings::GlobalSettings::default(); list_allow_alphabetical: true,
global_settings.list_allow_alphabetical = true; ..Default::default()
global_settings
}}; }};
organic::compare::run_anonymous_compare_with_settings(org_contents.as_str(), &global_settings, false).await?; assert!(organic::compare::run_anonymous_compare_with_settings(org_contents.as_str(), &global_settings, false).await?);
Ok(()) Ok(())
}} }}
@@ -28,12 +27,11 @@ async fn autogen_la_{name}() -> Result<(), Box<dyn std::error::Error>> {{
async fn autogen_t1_{name}() -> Result<(), Box<dyn std::error::Error>> {{ async fn autogen_t1_{name}() -> Result<(), Box<dyn std::error::Error>> {{
let org_path = "{path}"; let org_path = "{path}";
let org_contents = std::fs::read_to_string(org_path).expect("Read org file."); let org_contents = std::fs::read_to_string(org_path).expect("Read org file.");
let global_settings = {{ let global_settings = organic::settings::GlobalSettings {{
let mut global_settings = organic::settings::GlobalSettings::default(); tab_width: 1,
global_settings.tab_width = 1; ..Default::default()
global_settings
}}; }};
organic::compare::run_anonymous_compare_with_settings(org_contents.as_str(), &global_settings, false).await?; assert!(organic::compare::run_anonymous_compare_with_settings(org_contents.as_str(), &global_settings, false).await?);
Ok(()) Ok(())
}} }}
@@ -42,12 +40,11 @@ async fn autogen_t1_{name}() -> Result<(), Box<dyn std::error::Error>> {{
async fn autogen_t16_{name}() -> Result<(), Box<dyn std::error::Error>> {{ async fn autogen_t16_{name}() -> Result<(), Box<dyn std::error::Error>> {{
let org_path = "{path}"; let org_path = "{path}";
let org_contents = std::fs::read_to_string(org_path).expect("Read org file."); let org_contents = std::fs::read_to_string(org_path).expect("Read org file.");
let global_settings = {{ let global_settings = organic::settings::GlobalSettings {{
let mut global_settings = organic::settings::GlobalSettings::default(); tab_width: 16,
global_settings.tab_width = 16; ..Default::default()
global_settings
}}; }};
organic::compare::run_anonymous_compare_with_settings(org_contents.as_str(), &global_settings, false).await?; assert!(organic::compare::run_anonymous_compare_with_settings(org_contents.as_str(), &global_settings, false).await?);
Ok(()) Ok(())
}} }}
@@ -56,11 +53,10 @@ async fn autogen_t16_{name}() -> Result<(), Box<dyn std::error::Error>> {{
async fn autogen_odd_{name}() -> Result<(), Box<dyn std::error::Error>> {{ async fn autogen_odd_{name}() -> Result<(), Box<dyn std::error::Error>> {{
let org_path = "{path}"; let org_path = "{path}";
let org_contents = std::fs::read_to_string(org_path).expect("Read org file."); let org_contents = std::fs::read_to_string(org_path).expect("Read org file.");
let global_settings = {{ let global_settings = organic::settings::GlobalSettings {{
let mut global_settings = organic::settings::GlobalSettings::default(); odd_levels_only: organic::settings::HeadlineLevelFilter::Odd,
global_settings.odd_levels_only = organic::settings::HeadlineLevelFilter::Odd; ..Default::default()
global_settings
}}; }};
organic::compare::run_anonymous_compare_with_settings(org_contents.as_str(), &global_settings, false).await?; assert!(organic::compare::run_anonymous_compare_with_settings(org_contents.as_str(), &global_settings, false).await?);
Ok(()) Ok(())
}} }}