3 Commits

Author SHA1 Message Date
Tom Alexander
fcd63b1231 fixup
All checks were successful
rust-test Build rust-test has succeeded
rust-build Build rust-build has succeeded
2023-08-31 21:58:41 -04:00
Tom Alexander
743b4c9982 fixup
All checks were successful
rust-test Build rust-test has succeeded
rust-build Build rust-build has succeeded
2023-08-31 21:55:20 -04:00
Tom Alexander
7eccf8fccd Remove dependency on run-docker-image task.
Some checks failed
rust-test Build rust-test has failed
rust-build Build rust-build has succeeded
2023-08-31 21:48:06 -04:00
173 changed files with 5038 additions and 10562 deletions

View File

@@ -1,203 +0,0 @@
apiVersion: tekton.dev/v1beta1
kind: PipelineRun
metadata:
name: rust-foreign-document-test
spec:
pipelineSpec:
timeouts:
pipeline: "2h0m0s"
tasks: "1h0m40s"
finally: "0h30m0s"
params:
- name: image-name
description: The name for the built image
type: string
- name: path-to-image-context
description: The path to the build context
type: string
- name: path-to-dockerfile
description: The path to the Dockerfile
type: string
tasks:
- name: do-stuff
taskSpec:
metadata: {}
stepTemplate:
image: alpine:3.18
name: ""
resources:
requests:
cpu: 10m
memory: 600Mi
workingDir: /workspace/source
steps:
- image: alpine:3.18
name: do-stuff-step
script: |
#!/usr/bin/env sh
echo "hello world"
- name: report-pending
taskRef:
name: gitea-set-status
runAfter:
- fetch-repository
params:
- name: CONTEXT
value: "$(params.JOB_NAME)"
- name: REPO_FULL_NAME
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
- name: GITEA_HOST_URL
value: code.fizz.buzz
- name: SHA
value: "$(tasks.fetch-repository.results.commit)"
- name: DESCRIPTION
value: "Build $(params.JOB_NAME) has started"
- name: STATE
value: pending
- name: TARGET_URL
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
- name: fetch-repository
taskRef:
name: git-clone
workspaces:
- name: output
workspace: git-source
params:
- name: url
value: $(params.REPO_URL)
- name: revision
value: $(params.PULL_BASE_SHA)
- name: deleteExisting
value: "true"
- name: build-image
taskRef:
name: kaniko
params:
- name: IMAGE
value: "$(params.image-name):$(tasks.fetch-repository.results.commit)"
- name: CONTEXT
value: $(params.path-to-image-context)
- name: DOCKERFILE
value: $(params.path-to-dockerfile)
- name: BUILDER_IMAGE
value: "gcr.io/kaniko-project/executor:v1.12.1"
- name: EXTRA_ARGS
value:
- --target=foreign-document-test
- --cache=true
- --cache-copy-layers
- --cache-repo=harbor.fizz.buzz/kanikocache/cache
- --use-new-run # Should result in a speed-up
- --reproducible # To remove timestamps so layer caching works.
- --snapshot-mode=redo
- --skip-unused-stages=true
- --registry-mirror=dockerhub.dockerhub.svc.cluster.local
workspaces:
- name: source
workspace: git-source
- name: dockerconfig
workspace: docker-credentials
runAfter:
- fetch-repository
- name: run-image
taskRef:
name: run-docker-image
workspaces:
- name: source
workspace: git-source
- name: cargo-cache
workspace: cargo-cache
runAfter:
- build-image
params:
- name: docker-image
value: "$(params.image-name):$(tasks.fetch-repository.results.commit)"
finally:
- name: report-success
when:
- input: "$(tasks.status)"
operator: in
values: ["Succeeded", "Completed"]
taskRef:
name: gitea-set-status
params:
- name: CONTEXT
value: "$(params.JOB_NAME)"
- name: REPO_FULL_NAME
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
- name: GITEA_HOST_URL
value: code.fizz.buzz
- name: SHA
value: "$(tasks.fetch-repository.results.commit)"
- name: DESCRIPTION
value: "Build $(params.JOB_NAME) has succeeded"
- name: STATE
value: success
- name: TARGET_URL
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
- name: report-failure
when:
- input: "$(tasks.status)"
operator: in
values: ["Failed"]
taskRef:
name: gitea-set-status
params:
- name: CONTEXT
value: "$(params.JOB_NAME)"
- name: REPO_FULL_NAME
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
- name: GITEA_HOST_URL
value: code.fizz.buzz
- name: SHA
value: "$(tasks.fetch-repository.results.commit)"
- name: DESCRIPTION
value: "Build $(params.JOB_NAME) has failed"
- name: STATE
value: failure
- name: TARGET_URL
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
- name: cargo-cache-autoclean
taskRef:
name: run-docker-image
workspaces:
- name: source
workspace: git-source
- name: cargo-cache
workspace: cargo-cache
params:
- name: command
value: [cargo, cache, --autoclean]
- name: args
value: []
- name: docker-image
value: "$(params.image-name):$(tasks.fetch-repository.results.commit)"
workspaces:
- name: git-source
- name: docker-credentials
- name: cargo-cache
workspaces:
- name: git-source
volumeClaimTemplate:
spec:
storageClassName: "nfs-client"
accessModes:
- ReadWriteOnce
resources:
requests:
storage: 10Gi
subPath: rust-source
- name: cargo-cache
persistentVolumeClaim:
claimName: organic-cargo-cache-test-foreign-document
- name: docker-credentials
secret:
secretName: harbor-plain
serviceAccountName: build-bot
params:
- name: image-name
value: "harbor.fizz.buzz/private/organic-test-foreign-document"
- name: path-to-image-context
value: docker/organic_test/
- name: path-to-dockerfile
value: docker/organic_test/Dockerfile

View File

@@ -83,7 +83,6 @@ spec:
value: "gcr.io/kaniko-project/executor:v1.12.1" value: "gcr.io/kaniko-project/executor:v1.12.1"
- name: EXTRA_ARGS - name: EXTRA_ARGS
value: value:
- --target=tester
- --cache=true - --cache=true
- --cache-copy-layers - --cache-copy-layers
- --cache-repo=harbor.fizz.buzz/kanikocache/cache - --cache-repo=harbor.fizz.buzz/kanikocache/cache
@@ -100,29 +99,41 @@ spec:
runAfter: runAfter:
- fetch-repository - fetch-repository
- name: run-image - name: run-image
taskRef: taskSpec:
name: run-docker-image metadata: {}
stepTemplate:
name: ""
workingDir: "$(workspaces.source.path)"
workspaces:
- name: source
mountPath: /source
- name: cargo-cache
mountPath: /usr/local/cargo/registry
optional: true
steps:
- name: run
image: "$(params.IMAGE)"
command: []
args:
[
--no-default-features,
--features,
compare,
--no-fail-fast,
--lib,
--test,
test_loader,
]
workspaces: workspaces:
- name: source - name: source
workspace: git-source workspace: git-source
- name: cargo-cache - name: cargo-cache
workspace: cargo-cache workspace: cargo-cache
params:
- name: IMAGE
value: "$(params.image-name):$(tasks.fetch-repository.results.commit)"
runAfter: runAfter:
- build-image - build-image
params:
- name: args
value:
[
--no-default-features,
--features,
compare,
--no-fail-fast,
--lib,
--test,
test_loader,
]
- name: docker-image
value: "$(params.image-name):$(tasks.fetch-repository.results.commit)"
finally: finally:
- name: report-success - name: report-success
when: when:

View File

@@ -16,13 +16,6 @@ spec:
skip_branches: skip_branches:
# We already run on every commit, so running when the semver tags get pushed is causing needless double-processing. # We already run on every commit, so running when the semver tags get pushed is causing needless double-processing.
- "^v[0-9]+\\.[0-9]+\\.[0-9]+$" - "^v[0-9]+\\.[0-9]+\\.[0-9]+$"
- name: rust-foreign-document-test
source: "pipeline-foreign-document-test.yaml"
# Override https-based url from lighthouse events.
clone_uri: "git@code.fizz.buzz:talexander/organic.git"
skip_branches:
# We already run on every commit, so running when the semver tags get pushed is causing needless double-processing.
- "^v[0-9]+\\.[0-9]+\\.[0-9]+$"
- name: rust-build - name: rust-build
source: "pipeline-rust-build.yaml" source: "pipeline-rust-build.yaml"
# Override https-based url from lighthouse events. # Override https-based url from lighthouse events.

View File

@@ -1,6 +1,6 @@
[package] [package]
name = "organic" name = "organic"
version = "0.1.9" version = "0.1.4"
authors = ["Tom Alexander <tom@fizz.buzz>"] authors = ["Tom Alexander <tom@fizz.buzz>"]
description = "An org-mode parser." description = "An org-mode parser."
edition = "2021" edition = "2021"
@@ -21,15 +21,9 @@ name = "organic"
path = "src/lib.rs" path = "src/lib.rs"
[[bin]] [[bin]]
# This bin exists for development purposes only. The real target of this crate is the library. # This bin exists for development purposes only. The real target of this crate is the library.
name = "parse" name = "compare"
path = "src/main.rs" path = "src/main.rs"
[[bin]]
# This bin exists for development purposes only. The real target of this crate is the library.
name = "compare"
path = "src/bin_compare.rs"
required-features = ["compare"]
[dependencies] [dependencies]
nom = "7.1.1" nom = "7.1.1"

View File

@@ -33,10 +33,6 @@ release:
clean: clean:
> cargo clean > cargo clean
.PHONY: format
format:
> $(MAKE) -C docker/cargo_fmt run
.PHONY: test .PHONY: test
test: test:
> cargo test --no-default-features --features compare --no-fail-fast --lib --test test_loader -- --test-threads $(TESTJOBS) > cargo test --no-default-features --features compare --no-fail-fast --lib --test test_loader -- --test-threads $(TESTJOBS)
@@ -46,10 +42,6 @@ dockertest:
> $(MAKE) -C docker/organic_test > $(MAKE) -C docker/organic_test
> docker run --init --rm -i -t --read-only -v "$$(readlink -f ./):/source:ro" --mount type=tmpfs,destination=/tmp --mount source=cargo-cache,target=/usr/local/cargo/registry --mount source=rust-cache,target=/target --env CARGO_TARGET_DIR=/target -w /source organic-test --no-default-features --features compare --no-fail-fast --lib --test test_loader -- --test-threads $(TESTJOBS) > docker run --init --rm -i -t --read-only -v "$$(readlink -f ./):/source:ro" --mount type=tmpfs,destination=/tmp --mount source=cargo-cache,target=/usr/local/cargo/registry --mount source=rust-cache,target=/target --env CARGO_TARGET_DIR=/target -w /source organic-test --no-default-features --features compare --no-fail-fast --lib --test test_loader -- --test-threads $(TESTJOBS)
.PHONY: foreign_document_test
foreign_document_test:
> $(MAKE) -C docker/organic_test run_foreign_document_test
.PHONY: dockerclean .PHONY: dockerclean
dockerclean: dockerclean:
# Delete volumes created for running the tests in docker. This does not touch anything related to the jaeger docker container. # Delete volumes created for running the tests in docker. This does not touch anything related to the jaeger docker container.

View File

@@ -2,84 +2,12 @@
Organic is an emacs-less implementation of an [org-mode](https://orgmode.org/) parser. Organic is an emacs-less implementation of an [org-mode](https://orgmode.org/) parser.
## Project Status ## Project Status
This project is still under HEAVY development. While the version remains v0.1.x the API will be changing often. Once we hit v0.2.x we will start following semver. This project is a personal learning project to grow my experience in [rust](https://www.rust-lang.org/). It is under development and at this time I would not recommend anyone use this code. The goal is to turn this into a project others can use, at which point more information will appear in this README.
Currently, the parser is able to correctly identify the start/end bounds of all the org-mode objects and elements (except table.el tables, org-mode tables are supported) but many of the interior properties are not yet populated.
### Project Goals
- We aim to provide perfect parity with the emacs org-mode parser. In that regard, any document that parses differently between Emacs and Organic is considered a bug.
- The parser should be fast. We're not doing anything special, but since this is written in Rust and natively compiled we should be able to beat the existing parsers.
- The parser should have minimal dependencies. This should reduce effort w.r.t.: security audits, legal compliance, portability.
- The parser should be usable everywhere. In the interest of getting org-mode used in as many places as possible, this parser should be usable by everyone everywhere. This means:
- It must have a permissive license for use in proprietary code bases.
- We will investigate compiling to WASM. This is an important goal of the project and will definitely happen, but only after the parser has a more stable API.
- We will investigate compiling to a C library for native linking to other code. This is more of a maybe-goal for the project.
### Project Non-Goals
- This project will not include an elisp engine since that would drastically increase the complexity of the code. Any features requiring an elisp engine will not be implemented (for example, Emacs supports embedded eval expressions in documents but this parser will never support that).
- This project is exclusively an org-mode **parser**. This limits its scope to roughly the output of `(org-element-parse-buffer)`. It will not render org-mode documents in other formats like HTML or LaTeX.
### Project Maybe-Goals
- table.el support. Currently we support org-mode tables but org-mode also allows table.el tables. So far, their use in org-mode documents seems rather uncommon so this is a low-priority feature.
- Document editing support. I do not anticipate any advanced editing features to make editing ergonomic, but it should be relatively easy to be able to parse an org-mode document and serialize it back into org-mode. This would enable cool features to be built on top of the library like auto-formatters. To accomplish this feature, We'd have to capture all of the various separators and whitespace that we are currently simply throwing away. This would add many additional fields to the parsed structs and it would add more noise to the parsers themselves, so I do not want to approach this feature until the parser is more complete since it would make modifications and refactoring more difficult.
### Supported Versions
This project targets the version of Emacs and Org-mode that are built into the [organic-test docker image](docker/organic_test/Dockerfile). This is newer than the version of Org-mode that shipped with Emacs 29.1. The parser itself does not depend on Emacs or Org-mode though, so this only matters for development purposes when running the automated tests that compare against upstream Org-mode.
## Using this library
TODO: Add section on using Organic as a library (which is the intended use for this project). This will be added when we have a bit more API stability since currently the library is under heavy development.
## Development
### The parse binary
This program takes org-mode input either streamed in on stdin or as paths to files passed in as arguments. It then parses them using Organic and dumps the result to stdout. This program is intended solely as a development tool. Examples:
```bash
cat /foo/bar.org | cargo run --bin parse
```
```bash
cargo build --profile release-lto
./target/release-lto/parse /foo/bar.org /lorem/ipsum.org
```
### The compare binary
This program takes org-mode input either streamed in on stdin or as paths to files passed in as arguments. It then parses them using Organic and the official Emacs Org-mode parser and compares the parse result. This program is intended solely as a development tool. Since org-mode is a moving target, it is recommended that you run this through docker since we pin the version of org-mode to a specific revision. Examples:
```bash
cat /foo/bar.org | ./scripts/run_docker_compare.bash
```
```bash
./scripts/run_docker_compare.bash /foo/bar.org /lorem/ipsum.org
```
Not recommended since it is not through docker:
```bash
cat /foo/bar.org | cargo run --features compare --bin compare
```
```bash
cargo build --profile release-lto --features compare
./target/release-lto/compare /foo/bar.org /lorem/ipsum.org
```
## Running the tests
There are three levels of tests for this repository: the standard tests, the autogenerated tests, and the foreign document tests.
### The standard tests
These are regular hand-written rust tests. These can be run with:
```bash
make unittest
```
### The auto-generated tests
These tests are automatically generated from the files in the `org_mode_samples` directory and they are still integrated with the rust/cargo testing framework. For each org-mode document in that folder, a test is generated that will parse the document with both Organic and the official Emacs Org-mode parser and then it will compare the parse results. Any deviation is considered a failure. Since org-mode is a moving target, it is recommended that you run these tests inside docker since the `organic-test` docker image is pinned to a specific revision of org-mode. These can be run with:
```bash
make dockertest
```
### The foreign document tests
These tests function the same as the auto-generated tests except they are **not** integrated with the rust/cargo testing framework and they involve comparing the parse of org-mode documents that live outside this repository. This allows us to test against a far greater variety of org-mode input documents without pulling massive sets of org-mode documents into this repository. The recommended way to run these tests is still through docker because it pins org-mode and the test documents to specific git revisions. These can be run with:
```bash
make foreign_document_test
```
## License ## License
This project is released under the public-domain-equivalent [0BSD license](https://www.tldrlegal.com/license/bsd-0-clause-license), however, this project has a couple permissively licensed non-public-domain-equivalent dependencies which require their copyright notices and/or license texts to be included. I am not a lawyer and this is not legal advice but it is my layperson's understanding that if you distribute a binary statically linking this library, you will need to abide by their terms since their code will also be linked in your binary. This project is released under the public-domain-equivalent [0BSD license](https://www.tldrlegal.com/license/bsd-0-clause-license). This license puts no restrictions on the use of this code (you do not even have to include the copyright notice or license text when using it). HOWEVER, this project has a couple permissively licensed dependencies which do require their copyright notices and/or license texts to be included. I am not a lawyer and this is not legal advice but it is my layperson's understanding that if you distribute a binary with this library linked in, you will need to abide by their terms since their code will also be linked in your binary. I try to keep the dependencies to a minimum and the most restrictive dependency I will ever include is a permissively licensed one.

View File

@@ -16,8 +16,7 @@ fn main() {
let destination = Path::new(&out_dir).join("tests.rs"); let destination = Path::new(&out_dir).join("tests.rs");
let mut test_file = File::create(&destination).unwrap(); let mut test_file = File::create(&destination).unwrap();
// Re-generate the tests if any org-mode files change write_header(&mut test_file);
println!("cargo:rerun-if-changed=org_mode_samples");
let test_files = WalkDir::new("org_mode_samples") let test_files = WalkDir::new("org_mode_samples")
.into_iter() .into_iter()
@@ -52,15 +51,32 @@ fn write_test(test_file: &mut File, test: &walkdir::DirEntry) {
.strip_suffix(".org") .strip_suffix(".org")
.expect("Should have .org extension") .expect("Should have .org extension")
.replace("/", "_"); .replace("/", "_");
let test_name = format!("autogen_{}", test_name);
if let Some(_reason) = is_expect_fail(test_name.as_str()) {
write!(test_file, "#[ignore]\n").unwrap();
}
write!( write!(
test_file, test_file,
include_str!("./tests/test_template"), include_str!("./tests/test_template"),
name = test_name, name = test_name,
path = test.path().display(), path = test.path().display()
expect_fail = is_expect_fail(test_name.as_str()) )
.map(|_| "#[ignore]\n") .unwrap();
.unwrap_or("") }
#[cfg(feature = "compare")]
fn write_header(test_file: &mut File) {
write!(
test_file,
r#"
#[feature(exit_status_error)]
use organic::compare_document;
use organic::parser::document;
use organic::emacs_parse_org_document;
use organic::parser::sexp::sexp_with_padding;
"#
) )
.unwrap(); .unwrap();
} }
@@ -68,8 +84,9 @@ fn write_test(test_file: &mut File, test: &walkdir::DirEntry) {
#[cfg(feature = "compare")] #[cfg(feature = "compare")]
fn is_expect_fail(name: &str) -> Option<&str> { fn is_expect_fail(name: &str) -> Option<&str> {
match name { match name {
"greater_element_drawer_drawer_with_headline_inside" => Some("Apparently lines with :end: become their own paragraph. This odd behavior needs to be investigated more."), "autogen_greater_element_drawer_drawer_with_headline_inside" => Some("Apparently lines with :end: become their own paragraph. This odd behavior needs to be investigated more."),
"element_container_priority_footnote_definition_dynamic_block" => Some("Apparently broken begin lines become their own paragraph."), "autogen_element_container_priority_footnote_definition_dynamic_block" => Some("Apparently broken begin lines become their own paragraph."),
"autogen_lesser_element_paragraphs_paragraph_with_backslash_line_breaks" => Some("The text we're getting out of the parse tree is already processed to remove line breaks, so our comparison needs to take that into account."),
_ => None, _ => None,
} }
} }

View File

@@ -6,7 +6,7 @@ all: build push
.PHONY: build .PHONY: build
build: build:
docker build -t $(IMAGE_NAME) -f Dockerfile . docker build -t $(IMAGE_NAME) -f Dockerfile ../../
.PHONY: push .PHONY: push
push: push:

View File

@@ -6,7 +6,7 @@ all: build push
.PHONY: build .PHONY: build
build: build:
docker build -t $(IMAGE_NAME) -f Dockerfile . docker build -t $(IMAGE_NAME) -f Dockerfile ../../
.PHONY: push .PHONY: push
push: push:

View File

@@ -1,5 +1,5 @@
FROM alpine:3.17 AS build FROM alpine:3.17 AS build
RUN apk add --no-cache build-base musl-dev git autoconf make texinfo gnutls-dev ncurses-dev gawk libgccjit-dev RUN apk add --no-cache build-base musl-dev git autoconf make texinfo gnutls-dev ncurses-dev gawk
FROM build AS build-emacs FROM build AS build-emacs
@@ -8,13 +8,13 @@ RUN git clone --depth 1 --branch $EMACS_VERSION https://git.savannah.gnu.org/git
WORKDIR /root/emacs WORKDIR /root/emacs
RUN mkdir /root/dist RUN mkdir /root/dist
RUN ./autogen.sh RUN ./autogen.sh
RUN ./configure --prefix /usr --without-x --without-sound --with-native-compilation=aot RUN ./configure --prefix /usr --without-x --without-sound
RUN make RUN make
RUN make DESTDIR="/root/dist" install RUN make DESTDIR="/root/dist" install
FROM build AS build-org-mode FROM build AS build-org-mode
ARG ORG_VERSION=c703541ffcc14965e3567f928de1683a1c1e33f6 ARG ORG_VERSION=7bdec435ff5d86220d13c431e799c5ed44a57da1
COPY --from=build-emacs /root/dist/ / COPY --from=build-emacs /root/dist/ /
RUN mkdir /root/dist RUN mkdir /root/dist
# Savannah does not allow fetching specific revisions, so we're going to have to put unnecessary load on their server by cloning main and then checking out the revision we want. # Savannah does not allow fetching specific revisions, so we're going to have to put unnecessary load on their server by cloning main and then checking out the revision we want.
@@ -25,83 +25,11 @@ RUN make compile
RUN make DESTDIR="/root/dist" install RUN make DESTDIR="/root/dist" install
FROM rustlang/rust:nightly-alpine3.17 AS tester FROM rustlang/rust:nightly-alpine3.17
ENV LANG=en_US.UTF-8 ENV LANG=en_US.UTF-8
RUN apk add --no-cache musl-dev ncurses gnutls libgccjit RUN apk add --no-cache musl-dev ncurses gnutls
RUN cargo install --locked --no-default-features --features ci-autoclean cargo-cache RUN cargo install --locked --no-default-features --features ci-autoclean cargo-cache
COPY --from=build-emacs /root/dist/ / COPY --from=build-emacs /root/dist/ /
COPY --from=build-org-mode /root/dist/ / COPY --from=build-org-mode /root/dist/ /
ENTRYPOINT ["cargo", "test"] ENTRYPOINT ["cargo", "test"]
FROM build as foreign-document-gather
ARG HOWARD_ABRAMS_DOT_FILES_VERSION=1b54fe75d74670dc7bcbb6b01ea560c45528c628
ARG HOWARD_ABRAMS_DOT_FILES_PATH=/foreign_documents/howardabrams/dot-files
ARG HOWARD_ABRAMS_DOT_FILES_REPO=https://github.com/howardabrams/dot-files.git
RUN mkdir /foreign_documents
RUN mkdir -p $HOWARD_ABRAMS_DOT_FILES_PATH && git -C $HOWARD_ABRAMS_DOT_FILES_PATH init --initial-branch=main && git -C $HOWARD_ABRAMS_DOT_FILES_PATH remote add origin $HOWARD_ABRAMS_DOT_FILES_REPO && git -C $HOWARD_ABRAMS_DOT_FILES_PATH fetch origin $HOWARD_ABRAMS_DOT_FILES_VERSION && git -C $HOWARD_ABRAMS_DOT_FILES_PATH checkout FETCH_HEAD
ARG HOWARD_ABRAMS_HAMACS_VERSION=da51188cc195d41882175d412fe40a8bc5730c5c
ARG HOWARD_ABRAMS_HAMACS_PATH=/foreign_documents/howardabrams/hamacs
ARG HOWARD_ABRAMS_HAMACS_REPO=https://github.com/howardabrams/hamacs.git
RUN mkdir -p $HOWARD_ABRAMS_HAMACS_PATH && git -C $HOWARD_ABRAMS_HAMACS_PATH init --initial-branch=main && git -C $HOWARD_ABRAMS_HAMACS_PATH remote add origin $HOWARD_ABRAMS_HAMACS_REPO && git -C $HOWARD_ABRAMS_HAMACS_PATH fetch origin $HOWARD_ABRAMS_HAMACS_VERSION && git -C $HOWARD_ABRAMS_HAMACS_PATH checkout FETCH_HEAD
ARG HOWARD_ABRAMS_DEMO_IT_VERSION=e399fd7ceb73caeae7cb50b247359bafcaee2a3f
ARG HOWARD_ABRAMS_DEMO_IT_PATH=/foreign_documents/howardabrams/demo-it
ARG HOWARD_ABRAMS_DEMO_IT_REPO=https://github.com/howardabrams/demo-it.git
RUN mkdir -p $HOWARD_ABRAMS_DEMO_IT_PATH && git -C $HOWARD_ABRAMS_DEMO_IT_PATH init --initial-branch=main && git -C $HOWARD_ABRAMS_DEMO_IT_PATH remote add origin $HOWARD_ABRAMS_DEMO_IT_REPO && git -C $HOWARD_ABRAMS_DEMO_IT_PATH fetch origin $HOWARD_ABRAMS_DEMO_IT_VERSION && git -C $HOWARD_ABRAMS_DEMO_IT_PATH checkout FETCH_HEAD
ARG HOWARD_ABRAMS_MAGIT_DEMO_VERSION=59e82f6bc7c18f550478d86a8f680c3f2da66985
ARG HOWARD_ABRAMS_MAGIT_DEMO_PATH=/foreign_documents/howardabrams/magit-demo
ARG HOWARD_ABRAMS_MAGIT_DEMO_REPO=https://github.com/howardabrams/magit-demo.git
RUN mkdir -p $HOWARD_ABRAMS_MAGIT_DEMO_PATH && git -C $HOWARD_ABRAMS_MAGIT_DEMO_PATH init --initial-branch=main && git -C $HOWARD_ABRAMS_MAGIT_DEMO_PATH remote add origin $HOWARD_ABRAMS_MAGIT_DEMO_REPO && git -C $HOWARD_ABRAMS_MAGIT_DEMO_PATH fetch origin $HOWARD_ABRAMS_MAGIT_DEMO_VERSION && git -C $HOWARD_ABRAMS_MAGIT_DEMO_PATH checkout FETCH_HEAD
ARG HOWARD_ABRAMS_PDX_EMACS_HACKERS_VERSION=bfb7bd640fdf0ce3def21f9fc591ed35d776b26d
ARG HOWARD_ABRAMS_PDX_EMACS_HACKERS_PATH=/foreign_documents/howardabrams/pdx-emacs-hackers
ARG HOWARD_ABRAMS_PDX_EMACS_HACKERS_REPO=https://github.com/howardabrams/pdx-emacs-hackers.git
RUN mkdir -p $HOWARD_ABRAMS_PDX_EMACS_HACKERS_PATH && git -C $HOWARD_ABRAMS_PDX_EMACS_HACKERS_PATH init --initial-branch=main && git -C $HOWARD_ABRAMS_PDX_EMACS_HACKERS_PATH remote add origin $HOWARD_ABRAMS_PDX_EMACS_HACKERS_REPO && git -C $HOWARD_ABRAMS_PDX_EMACS_HACKERS_PATH fetch origin $HOWARD_ABRAMS_PDX_EMACS_HACKERS_VERSION && git -C $HOWARD_ABRAMS_PDX_EMACS_HACKERS_PATH checkout FETCH_HEAD
ARG HOWARD_ABRAMS_FLORA_SIMULATOR_VERSION=50de13068722b9e3878f8598b749b7ccd14e7f8e
ARG HOWARD_ABRAMS_FLORA_SIMULATOR_PATH=/foreign_documents/howardabrams/flora-simulator
ARG HOWARD_ABRAMS_FLORA_SIMULATOR_REPO=https://github.com/howardabrams/flora-simulator.git
RUN mkdir -p $HOWARD_ABRAMS_FLORA_SIMULATOR_PATH && git -C $HOWARD_ABRAMS_FLORA_SIMULATOR_PATH init --initial-branch=main && git -C $HOWARD_ABRAMS_FLORA_SIMULATOR_PATH remote add origin $HOWARD_ABRAMS_FLORA_SIMULATOR_REPO && git -C $HOWARD_ABRAMS_FLORA_SIMULATOR_PATH fetch origin $HOWARD_ABRAMS_FLORA_SIMULATOR_VERSION && git -C $HOWARD_ABRAMS_FLORA_SIMULATOR_PATH checkout FETCH_HEAD
ARG HOWARD_ABRAMS_LITERATE_DEVOPS_DEMO_VERSION=2d7a5e41001a1adf7ec24aeb6acc8525a72d7892
ARG HOWARD_ABRAMS_LITERATE_DEVOPS_DEMO_PATH=/foreign_documents/howardabrams/literate-devops-demo
ARG HOWARD_ABRAMS_LITERATE_DEVOPS_DEMO_REPO=https://github.com/howardabrams/literate-devops-demo.git
RUN mkdir -p $HOWARD_ABRAMS_LITERATE_DEVOPS_DEMO_PATH && git -C $HOWARD_ABRAMS_LITERATE_DEVOPS_DEMO_PATH init --initial-branch=main && git -C $HOWARD_ABRAMS_LITERATE_DEVOPS_DEMO_PATH remote add origin $HOWARD_ABRAMS_LITERATE_DEVOPS_DEMO_REPO && git -C $HOWARD_ABRAMS_LITERATE_DEVOPS_DEMO_PATH fetch origin $HOWARD_ABRAMS_LITERATE_DEVOPS_DEMO_VERSION && git -C $HOWARD_ABRAMS_LITERATE_DEVOPS_DEMO_PATH checkout FETCH_HEAD
ARG HOWARD_ABRAMS_CLOJURE_YESQL_XP_VERSION=b651c7f8b47b2710e99fce9652980902bbc1c6c9
ARG HOWARD_ABRAMS_CLOJURE_YESQL_XP_PATH=/foreign_documents/howardabrams/clojure-yesql-xp
ARG HOWARD_ABRAMS_CLOJURE_YESQL_XP_REPO=https://github.com/howardabrams/clojure-yesql-xp.git
RUN mkdir -p $HOWARD_ABRAMS_CLOJURE_YESQL_XP_PATH && git -C $HOWARD_ABRAMS_CLOJURE_YESQL_XP_PATH init --initial-branch=main && git -C $HOWARD_ABRAMS_CLOJURE_YESQL_XP_PATH remote add origin $HOWARD_ABRAMS_CLOJURE_YESQL_XP_REPO && git -C $HOWARD_ABRAMS_CLOJURE_YESQL_XP_PATH fetch origin $HOWARD_ABRAMS_CLOJURE_YESQL_XP_VERSION && git -C $HOWARD_ABRAMS_CLOJURE_YESQL_XP_PATH checkout FETCH_HEAD
ARG HOWARD_ABRAMS_VEEP_VERSION=e37fcf63a5c4a526255735ee34955528b3b280ae
ARG HOWARD_ABRAMS_VEEP_PATH=/foreign_documents/howardabrams/veep
ARG HOWARD_ABRAMS_VEEP_REPO=https://github.com/howardabrams/veep.git
RUN mkdir -p $HOWARD_ABRAMS_VEEP_PATH && git -C $HOWARD_ABRAMS_VEEP_PATH init --initial-branch=main && git -C $HOWARD_ABRAMS_VEEP_PATH remote add origin $HOWARD_ABRAMS_VEEP_REPO && git -C $HOWARD_ABRAMS_VEEP_PATH fetch origin $HOWARD_ABRAMS_VEEP_VERSION && git -C $HOWARD_ABRAMS_VEEP_PATH checkout FETCH_HEAD
ARG DOOMEMACS_VERSION=42d5fd83504f8aa80f3248036006fbcd49222943
ARG DOOMEMACS_PATH=/foreign_documents/doomemacs
ARG DOOMEMACS_REPO=https://github.com/doomemacs/doomemacs.git
RUN mkdir -p $DOOMEMACS_PATH && git -C $DOOMEMACS_PATH init --initial-branch=main && git -C $DOOMEMACS_PATH remote add origin $DOOMEMACS_REPO && git -C $DOOMEMACS_PATH fetch origin $DOOMEMACS_VERSION && git -C $DOOMEMACS_PATH checkout FETCH_HEAD
ARG WORG_VERSION=ba6cda890f200d428a5d68e819eef15b5306055f
ARG WORG_PATH=/foreign_documents/worg
ARG WORG_REPO=https://git.sr.ht/~bzg/worg
RUN mkdir -p $WORG_PATH && git -C $WORG_PATH init --initial-branch=main && git -C $WORG_PATH remote add origin $WORG_REPO && git -C $WORG_PATH fetch origin $WORG_VERSION && git -C $WORG_PATH checkout FETCH_HEAD
FROM tester as foreign-document-test
RUN apk add --no-cache bash coreutils
RUN mkdir /foreign_documents
COPY --from=foreign-document-gather /foreign_documents/howardabrams /foreign_documents/howardabrams
COPY --from=foreign-document-gather /foreign_documents/doomemacs /foreign_documents/doomemacs
COPY --from=foreign-document-gather /foreign_documents/worg /foreign_documents/worg
COPY --from=build-org-mode /root/org-mode /foreign_documents/org-mode
COPY --from=build-emacs /root/emacs /foreign_documents/emacs
COPY foreign_document_test_entrypoint.sh /entrypoint.sh
RUN chmod +x /entrypoint.sh
ENTRYPOINT ["/entrypoint.sh"]

View File

@@ -6,11 +6,7 @@ all: build push
.PHONY: build .PHONY: build
build: build:
docker build -t $(IMAGE_NAME) -f Dockerfile --target tester . docker build -t $(IMAGE_NAME) -f Dockerfile ../../
.PHONY: build_foreign_document_test
build_foreign_document_test:
docker build -t $(IMAGE_NAME)-foreign-document -f Dockerfile --target foreign-document-test .
.PHONY: push .PHONY: push
push: push:
@@ -38,7 +34,3 @@ run: build
.PHONY: shell .PHONY: shell
shell: build shell: build
docker run --rm -i -t --entrypoint /bin/sh --mount type=tmpfs,destination=/tmp -v "$$(readlink -f ../../):/source:ro" --workdir=/source --mount source=cargo-cache,target=/usr/local/cargo/registry --mount source=rust-cache,target=/target --env CARGO_TARGET_DIR=/target $(IMAGE_NAME) docker run --rm -i -t --entrypoint /bin/sh --mount type=tmpfs,destination=/tmp -v "$$(readlink -f ../../):/source:ro" --workdir=/source --mount source=cargo-cache,target=/usr/local/cargo/registry --mount source=rust-cache,target=/target --env CARGO_TARGET_DIR=/target $(IMAGE_NAME)
.PHONY: run_foreign_document_test
run_foreign_document_test: build_foreign_document_test
docker run --rm --init --read-only --mount type=tmpfs,destination=/tmp -v "$$(readlink -f ../../):/source:ro" --workdir=/source --mount source=cargo-cache,target=/usr/local/cargo/registry --mount source=rust-cache,target=/target --env CARGO_TARGET_DIR=/target $(IMAGE_NAME)-foreign-document

View File

@@ -1,149 +0,0 @@
#!/usr/bin/env bash
#
# Run the Organic compare script against a series of documents sourced from exterior places.
set -euo pipefail
IFS=$'\n\t'
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
REALPATH=$(command -v uu-realpath || command -v realpath)
function log {
(>&2 echo "${@}")
}
function die {
local status_code="$1"
shift
(>&2 echo "${@}")
exit "$status_code"
}
function main {
cargo build --no-default-features --features compare --profile release-lto
if [ "${CARGO_TARGET_DIR:-}" = "" ]; then
CARGO_TARGET_DIR=$(realpath target/)
fi
PARSE="${CARGO_TARGET_DIR}/release-lto/compare"
local all_status=0
set +e
(run_compare_function "org-mode" compare_all_org_document "/foreign_documents/org-mode")
if [ "$?" -ne 0 ]; then all_status=1; fi
(run_compare_function "emacs" compare_all_org_document "/foreign_documents/emacs")
if [ "$?" -ne 0 ]; then all_status=1; fi
(run_compare_function "worg" compare_all_org_document "/foreign_documents/worg")
if [ "$?" -ne 0 ]; then all_status=1; fi
(run_compare_function "howard_abrams" compare_howard_abrams)
if [ "$?" -ne 0 ]; then all_status=1; fi
(run_compare_function "doomemacs" compare_all_org_document "/foreign_documents/doomemacs")
if [ "$?" -ne 0 ]; then all_status=1; fi
set -e
if [ "$all_status" -ne 0 ]; then
red_text "Some tests failed."
else
green_text "All tests passed."
fi
return "$all_status"
}
function green_text {
(IFS=' '; printf '\x1b[38;2;0;255;0m%s\x1b[0m' "${*}")
}
function red_text {
(IFS=' '; printf '\x1b[38;2;255;0;0m%s\x1b[0m' "${*}")
}
function yellow_text {
(IFS=' '; printf '\x1b[38;2;255;255;0m%s\x1b[0m' "${*}")
}
function indent {
local depth="$1"
local scaled_depth=$((depth * 2))
shift 1
local prefix
prefix=$(printf -- "%${scaled_depth}s")
while read -r l; do
(IFS=' '; printf -- '%s%s\n' "$prefix" "$l")
done
}
function run_compare_function {
local name="$1"
local stdoutput
shift 1
set +e
stdoutput=$("${@}")
local status=$?
set -e
if [ "$status" -eq 0 ]; then
echo "$(green_text "GOOD") $name"
indent 1 <<<"$stdoutput"
else
echo "$(red_text "FAIL") $name"
indent 1 <<<"$stdoutput"
return 1
fi
}
function compare_all_org_document {
local root_dir="$1"
local target_document
local all_status=0
while read target_document; do
local relative_path
relative_path=$($REALPATH --relative-to "$root_dir" "$target_document")
set +e
(run_compare "$relative_path" "$target_document")
if [ "$?" -ne 0 ]; then all_status=1; fi
set -e
done<<<"$(find "$root_dir" -type f -iname '*.org' | sort)"
return "$all_status"
}
function run_compare {
local name="$1"
local target_document="$2"
set +e
($PARSE "$target_document" &> /dev/null)
local status=$?
set -e
if [ "$status" -eq 0 ]; then
echo "$(green_text "GOOD") $name"
else
echo "$(red_text "FAIL") $name"
return 1
fi
}
function compare_howard_abrams {
local all_status=0
set +e
(run_compare_function "dot-files" compare_all_org_document "/foreign_documents/howardabrams/dot-files")
if [ "$?" -ne 0 ]; then all_status=1; fi
(run_compare_function "hamacs" compare_all_org_document "/foreign_documents/howardabrams/hamacs")
if [ "$?" -ne 0 ]; then all_status=1; fi
(run_compare_function "demo-it" compare_all_org_document "/foreign_documents/howardabrams/demo-it")
if [ "$?" -ne 0 ]; then all_status=1; fi
(run_compare_function "magit-demo" compare_all_org_document "/foreign_documents/howardabrams/magit-demo")
if [ "$?" -ne 0 ]; then all_status=1; fi
(run_compare_function "pdx-emacs-hackers" compare_all_org_document "/foreign_documents/howardabrams/pdx-emacs-hackers")
if [ "$?" -ne 0 ]; then all_status=1; fi
(run_compare_function "flora-simulator" compare_all_org_document "/foreign_documents/howardabrams/flora-simulator")
if [ "$?" -ne 0 ]; then all_status=1; fi
(run_compare_function "literate-devops-demo" compare_all_org_document "/foreign_documents/howardabrams/literate-devops-demo")
if [ "$?" -ne 0 ]; then all_status=1; fi
(run_compare_function "clojure-yesql-xp" compare_all_org_document "/foreign_documents/howardabrams/clojure-yesql-xp")
if [ "$?" -ne 0 ]; then all_status=1; fi
(run_compare_function "veep" compare_all_org_document "/foreign_documents/howardabrams/veep")
if [ "$?" -ne 0 ]; then all_status=1; fi
set -e
return "$all_status"
}
main "${@}"

View File

@@ -25,4 +25,3 @@ This could significantly reduce our calls to exit matchers.
I think targets would break this. I think targets would break this.
The exit matchers are already implicitly building this behavior since they should all exit very early when the starting character is wrong. Putting this logic in a centralized place, far away from where those characters are actually going to be used, is unfortunate for readability. The exit matchers are already implicitly building this behavior since they should all exit very early when the starting character is wrong. Putting this logic in a centralized place, far away from where those characters are actually going to be used, is unfortunate for readability.
** Use exit matcher to cut off trailing whitespace instead of re-matching in plain lists.

View File

@@ -1,7 +0,0 @@
* Autogen tests
The autogen tests are the tests automatically generated to compare the output of Organic vs the upstream Emacs Org-mode parser using the sample documents in the =org_mode_samples= folder. They will have a prefix based on the settings for each test.
- default :: The test is run with the default settings (The upstream Emacs Org-mode determines the default settings)
- la :: Short for "list alphabetic". Enables alphabetic plain lists.
- t# :: Sets the tab-width to # (as in t4 sets the tab-width to 4).
- odd :: Sets the org-odd-levels-only setting to true (meaning "odd" as opposed to "oddeven").

View File

@@ -1 +0,0 @@
This folder contains org-mode documents that get automatically included as tests using build.rs.

View File

@@ -1 +0,0 @@
#+CATEGORY: theory

View File

@@ -1,5 +0,0 @@
#+CATEGORY: foo
#+CATEGORY: bar
#+begin_src text
#+CATEGORY: baz
#+end_src

View File

@@ -1,3 +0,0 @@
#+BEGIN: timestamp :format "%Y-%m-%d %H:%M"
#+END

View File

@@ -1,8 +0,0 @@
* Footnotes
[fn:1]
#+BEGIN_EXAMPLE
baz
#+END_EXAMPLE

View File

@@ -1,5 +0,0 @@
#+begin_quote
foo
#+end_quote

View File

@@ -1,3 +0,0 @@
#+begin_defun foo bar baz
lorem
#+end_defun

View File

@@ -1,22 +0,0 @@
# An ordered list starting at 3
1. [@3] foo
# An ordered list starting at 11
1. [@D] bar
# An ordered list starting at 1 with the contents of "[@kk] baz"
1. [@kk] baz
# A paragraph when org-list-allow-alphabetical is nil
m. lorem
# A paragraph when org-list-allow-alphabetical is nil
m. [@k] ipsum
# An unordered list with :counter set to 3
- [@3] dolar

View File

@@ -1,30 +0,0 @@
# Alphabetic lists larger than 26 elements should become numbered. From M-x describe-variable org-list-allow-alphabetical:
#
# > Lists with more than 26 items will fallback to standard numbering.
a. 1
a. 2
a. 3
a. 4
a. 5
a. 6
a. 7
a. 8
a. 9
a. 10
a. 11
a. 12
a. 13
a. 14
a. 15
a. 16
a. 17
a. 18
a. 19
a. 20
a. 21
a. 22
a. 23
a. 24
a. 25
a. 26
a. 27

View File

@@ -1,3 +0,0 @@
# These are only allowed by configuring org-list-allow-alphabetical which the automated tests are not currently set up to do, so this will parse as a paragraph:
a. foo
b. bar

View File

@@ -1,2 +0,0 @@
3. [@3] foo
4. bar

View File

@@ -1,6 +0,0 @@
- foo ::
- bar ::
baz

View File

@@ -1,2 +1 @@
- {{{foo(bar)}}} :: baz - {{{foo(bar)}}} :: baz
- =foo= :: bar

View File

@@ -1,3 +0,0 @@
- foo :: bar
- foo :: bar
- foo :: bar

View File

@@ -1,2 +0,0 @@
- =foo :: bar= :: baz
- lorem :: ipsum :: dolar

View File

@@ -1,5 +1,3 @@
1. 1.
2. 2.
3. 3.
* headline

View File

@@ -1,3 +0,0 @@
1. foo
- bar
- lorem :: ipsum

View File

@@ -1,2 +0,0 @@
# Since this is an ordered list, the text before the " :: " is NOT parsed as a tag.
1. foo :: bar

View File

@@ -1,5 +0,0 @@
# "lorem" is prefixed by a tab instead of spaces, so the editor's tab-width value determines whether lorem is a sibling of baz (tab-width 8), a sibling of bar (tab-width < 8), or a child of baz (tab-width > 8).
1. foo
1. bar
1. baz
1. lorem

View File

@@ -1,6 +0,0 @@
* Overwrite
:PROPERTIES:
:header-args: :var foo="lorem"
:header-args:emacs-lisp: :var bar="ipsum"
:header-args:emacs-lisp+: :results silent :var baz=7
:END:

View File

@@ -1,6 +0,0 @@
# The STARTUP directive here instructs org-mode to align tables which emacs normally does when opening the file. Since Organic is solely a parser, we have no business editing the org-mode document so Organic does not handle aligning tables, so in order for this test to pass, we have to avoid that behavior in Emacs.
#+STARTUP: align
|foo|bar|
|-
|lorem|ipsum|

View File

@@ -1,6 +0,0 @@
src_elisp{(bar)}
*src_elisp{(bar)}*
| foo *bar* |
| foo src_elisp{(bar)} |
| foo *src_elisp{(bar)}* |

View File

@@ -1,4 +0,0 @@
| Name | Value |
|------+-------|
| foo | bar |
#+tblfm:

View File

@@ -1,8 +0,0 @@
| Name | Price | Quantity | Total |
|------+-------+----------+-------|
| foo | 7 | 4 | 28 |
| bar | 3.5 | 3 | 10.5 |
|------+-------+----------+-------|
| | | 7 | 38.5 |
#+tblfm: $4=$2*$3::@>$4=vsum(@2..@-1)
#+tblfm: @>$3=vsum(@2..@-1)

View File

@@ -1,5 +1,4 @@
# Comment # Comment
#
# indented line # indented line
# At the top of the file # At the top of the file

View File

@@ -1,6 +0,0 @@
%%(foo
)
%%(bar ; baz
lorem

View File

@@ -1,2 +0,0 @@
# Fixed width areas must begin with colon followed by a space, not a tab, so this is not a fixed width area.
: foo

View File

@@ -1,7 +0,0 @@
# This test is to prove that the parser works with affiliated keywords that have both a shorter and longer version.
#+results:
#+result:
#+begin_latex
\foo
#+end_latex

View File

@@ -1 +0,0 @@
#+call: foo(bar="baz")

View File

@@ -1 +0,0 @@
#+title:foo:bar: baz: lorem: ipsum

View File

@@ -1,2 +0,0 @@
#+begin_src
#+end_src

View File

@@ -1,4 +0,0 @@
# There are trailing spaces after the begin and end src lines
#+begin_src
echo "this is a source block."
#+end_src

View File

@@ -1,3 +0,0 @@
*[fn:: /abcdef[fn::ghijklmnopqrstuvw]xyz/ r]*
*[fn:: /abcdef[fn::ghijk *lmnopq* rstuvw]xyz/ r]*

View File

@@ -1,11 +0,0 @@
# Should be a link:
https://en.wikipedia.org/wiki/Shebang_(Unix)
# No closing parenthesis, so link ends at underscore.
https://en.wikipedia.org/wiki/Shebang_(Unix
# Parenthesis only allowed to depth of 2 so link ends at underscore.
https://en.wikipedia.org/wiki/Shebang_(((Unix)))
# Even though they eventually become balanced, we hit negative parenthesis depth so link ends at )
https://en.wikipedia.org/wiki/Shebang)Unix(

View File

@@ -1 +0,0 @@
mailto:foo@bar.baz .

View File

@@ -1,3 +0,0 @@
<<<Foo Bar Baz>>>
foo bar baz

View File

@@ -1,6 +0,0 @@
<<<foo bar baz>>>
foo
bar
baz

View File

@@ -1 +0,0 @@
[[elisp:(local-set-key "\M-\x" 'foo-bar-baz)]]

View File

@@ -1 +0,0 @@
[[https://en.wikipedia.org/wiki/Shebang_(Unix)]]

View File

@@ -1 +0,0 @@
[[[http://foo.bar/baz][lorem]]]

View File

@@ -1,4 +0,0 @@
[/]
[/2]
[3/]
[%]

View File

@@ -1,7 +0,0 @@
# Even though *exporting* honors the setting to require braces for subscript/superscript, the official org-mode parser still parses subscripts and superscripts.
#+OPTIONS: ^:{}
foo_this isn't a subscript when exported due to lack of braces (but its still a subscript during parsing)
bar_{this is a subscript}

View File

@@ -1,13 +0,0 @@
foo_(bar)
foo_(b(ar)
foo_(b{ar)
foo_{b(ar}
foo_(b(a)r)
foo_b(a)r
foo_(b+ar)

View File

@@ -1 +0,0 @@
foo ** bar ** baz

View File

@@ -1 +0,0 @@
foo ~~ bar ~~ baz

View File

@@ -1,4 +0,0 @@
# Since "foos" has an extra "s", this does not match the target.
the foos bar
The <<<foo>>> and stuff.

View File

@@ -1,4 +1 @@
foo ==>bar=. foo ==>bar=.
# This uses a zero-width space to escape the equals signs to make the verbatim not end.
=lorem == ipsum=

View File

@@ -1,7 +0,0 @@
# All the marks for repeater and warning delay
[1970-01-01 Thu 8:15-13:15foo +1h -2h]
[1970-01-01 Thu 8:15-13:15foo ++1d -2d]
[1970-01-01 Thu 8:15-13:15foo .+1w -2w]
[1970-01-01 Thu 8:15-13:15foo +1m --2m]
[1970-01-01 Thu 8:15-13:15foo ++1y --2y]
[1970-01-01 Thu 8:15-13:15foo .+1d --2h]

View File

@@ -2,17 +2,13 @@
<%%(foo bar baz)> <%%(foo bar baz)>
# active # active
<1970-01-01 Thu 8:15rest +1w -1d> <1970-01-01 Thu 8:15rest +1w -1d>
# Any value for "REST" in the first timestamp makes this a regular timestamp rather than a time range.
<1970-01-01 Thu 8:15rest-13:15otherrest +1w -1d>
# inactive # inactive
[1970-01-01 Thu 8:15rest +1w -1d] [1970-01-01 Thu 8:15rest +1w -1d]
# Any value for "REST" in the first timestamp makes this a regular timestamp rather than a time range.
[1970-01-01 Thu 8:15rest-13:15otherrest +1w -1d]
# active date range # active date range
<1970-01-01 Thu 8:15rest +1w -1d>--<1970-01-01 Thu 8:15rest +1w -1d> <1970-01-01 Thu 8:15rest +1w -1d>--<1970-01-01 Thu 8:15rest +1w -1d>
# active time range # active time range
<1970-01-01 Thu 8:15-13:15otherrest +1w -1d> <1970-01-01 Thu 8:15rest-13:15otherrest +1w -1d>
# inactive date range # inactive date range
[1970-01-01 Thu 8:15rest +1w -1d]--[1970-01-01 Thu 8:15rest +1w -1d] [1970-01-01 Thu 8:15rest +1w -1d]--[1970-01-01 Thu 8:15rest +1w -1d]
# inactive time range # inactive time range
[1970-01-01 Thu 8:15-13:15otherrest +1w -1d] [1970-01-01 Thu 8:15rest-13:15otherrest +1w -1d]

View File

@@ -1,2 +0,0 @@
# This should be a malformed timestamp according to the current org-mode documentation but it is accepted anyway (with no repeater).
<1970-01-01 Thu ++y>

View File

@@ -1,2 +0,0 @@
* TODO [#A] COMMENT foo bar
baz

View File

@@ -1,2 +0,0 @@
* DONE
*

View File

@@ -1,6 +0,0 @@
#+TODO: TODO(t) INPROGRESS(i/!) | DONE(d!) CANCELED(c@/!)
# ! : Log changes leading to this state.
# @ : Log changes leading to this state and prompt for a comment to include.
# /! : Log changes leaving this state if and only if to a state that does not log. This can be combined with the above like WAIT(w!/!) or DELAYED(d@/!)
* INPROGRESS
- State "TODO" from "INPROGRESS" [2023-09-14 Thu 02:13]

View File

@@ -1,2 +0,0 @@
<<<Footnotes>>> and stuff
* Footnotes

View File

@@ -1,3 +0,0 @@
* FOOTNOTES
* Footnotes
* footnotes

View File

@@ -1,2 +0,0 @@
* Footnotes
* Footnotes

View File

@@ -1,3 +0,0 @@
* Foo
* Footnotes :foo:bar:
* Footnotes and stuff

View File

@@ -1,8 +0,0 @@
#+STARTUP: odd
* Foo
***** Bar
* Baz
*** Lorem
* Ipsum
**** Dolar
***** Cat

View File

@@ -1,4 +0,0 @@
* DONE foo
DEADLINE: <2023-09-08 Fri>
* DONE bar

View File

@@ -4,23 +4,10 @@ set -euo pipefail
IFS=$'\n\t' IFS=$'\n\t'
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
: ${PROFILE:="perf"} cd "$DIR/../"
function main { RUSTFLAGS="-C opt-level=0" cargo build --no-default-features
local additional_flags=() valgrind --tool=callgrind --callgrind-out-file=callgrind.out target/debug/compare
if [ "$PROFILE" = "dev" ] || [ "$PROFILE" = "debug" ]; then
PROFILE="debug"
else
additional_flags+=(--profile "$PROFILE")
# We have to disable avx512 because valgrind does not yet support it.
export RUSTFLAGS="-C target-feature=-avx512"
fi
(cd "$DIR/../" && RUSTFLAGS="-C target-cpu=x86-64-v3" cargo build --no-default-features "${additional_flags[@]}") echo "You probably want to run:"
valgrind --tool=callgrind --callgrind-out-file="$DIR/../callgrind.out" "$DIR/../target/${PROFILE}/parse" "${@}" echo "callgrind_annotate --auto=yes callgrind.out"
echo "You probably want to run:"
echo "callgrind_annotate --auto=yes '$DIR/../callgrind.out'"
}
main "${@}"

View File

@@ -6,6 +6,8 @@ DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
: ${PROFILE:="perf"} : ${PROFILE:="perf"}
cd "$DIR/../"
function main { function main {
local additional_flags=() local additional_flags=()
if [ "$PROFILE" = "dev" ] || [ "$PROFILE" = "debug" ]; then if [ "$PROFILE" = "dev" ] || [ "$PROFILE" = "debug" ]; then
@@ -13,12 +15,12 @@ function main {
else else
additional_flags+=(--profile "$PROFILE") additional_flags+=(--profile "$PROFILE")
fi fi
(cd "$DIR/../" && cargo build --no-default-features "${additional_flags[@]}") cargo build --no-default-features "${additional_flags[@]}"
perf record --freq=2000 --call-graph dwarf --output="$DIR/../perf.data" "$DIR/../target/${PROFILE}/parse" "${@}" perf record --freq=2000 --call-graph dwarf --output=perf.data target/${PROFILE}/compare
# Convert to a format firefox will read # Convert to a format firefox will read
# flags to consider --show-info # flags to consider --show-info
perf script -F +pid --input "$DIR/../perf.data" > "$DIR/../perf.firefox" perf script -F +pid --input perf.data > perf.firefox
echo "You probably want to go to https://profiler.firefox.com/" echo "You probably want to go to https://profiler.firefox.com/"
echo "Either that or run hotspot" echo "Either that or run hotspot"

View File

@@ -8,29 +8,15 @@ DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
: ${TRACE:="NO"} # or YES to send traces to jaeger : ${TRACE:="NO"} # or YES to send traces to jaeger
: ${BACKTRACE:="NO"} # or YES to print a rust backtrace when panicking : ${BACKTRACE:="NO"} # or YES to print a rust backtrace when panicking
: ${NO_COLOR:=""} # Set to anything to disable color output : ${NO_COLOR:=""} # Set to anything to disable color output
: ${PROFILE:="debug"}
cd "$DIR/../"
REALPATH=$(command -v uu-realpath || command -v realpath) REALPATH=$(command -v uu-realpath || command -v realpath)
MAKE=$(command -v gmake || command -v make) MAKE=$(command -v gmake || command -v make)
############## Setup #########################
function die {
local status_code="$1"
shift
(>&2 echo "${@}")
exit "$status_code"
}
function log {
(>&2 echo "${@}")
}
############## Program #########################
function main { function main {
build_container build_container
launch_container "${@}" launch_container
} }
function build_container { function build_container {
@@ -39,6 +25,7 @@ function build_container {
function launch_container { function launch_container {
local additional_flags=() local additional_flags=()
local additional_args=()
local features=(compare) local features=(compare)
if [ "$NO_COLOR" != "" ]; then if [ "$NO_COLOR" != "" ]; then
@@ -52,8 +39,11 @@ function launch_container {
fi fi
if [ "$SHELL" != "YES" ]; then if [ "$SHELL" != "YES" ]; then
local features_joined=$(IFS=","; echo "${features[*]}")
additional_args+=(cargo run --no-default-features --features "$features_joined")
additional_flags+=(--read-only) additional_flags+=(--read-only)
else else
additional_args+=(/bin/sh)
additional_flags+=(-t) additional_flags+=(-t)
fi fi
@@ -61,51 +51,7 @@ function launch_container {
additional_flags+=(--env RUST_BACKTRACE=full) additional_flags+=(--env RUST_BACKTRACE=full)
fi fi
if [ "$SHELL" = "YES" ]; then docker run "${additional_flags[@]}" --init --rm -i --mount type=tmpfs,destination=/tmp -v "$($REALPATH ./):/source:ro" --mount source=cargo-cache,target=/usr/local/cargo/registry --mount source=rust-cache,target=/target --env CARGO_TARGET_DIR=/target -w /source --entrypoint "" organic-test "${additional_args[@]}"
exec docker run "${additional_flags[@]}" --init --rm -i --mount type=tmpfs,destination=/tmp -v "/:/input:ro" -v "$($REALPATH "$DIR/../"):/source:ro" --mount source=cargo-cache,target=/usr/local/cargo/registry --mount source=rust-cache,target=/target --env CARGO_TARGET_DIR=/target -w /source --entrypoint "" organic-test /bin/sh
fi
local features_joined
features_joined=$(IFS=","; echo "${features[*]}")
local build_flags=()
if [ "$PROFILE" = "dev" ] || [ "$PROFILE" = "debug" ]; then
PROFILE="debug"
else
build_flags+=(--profile "$PROFILE")
fi
if [ $# -gt 0 ]; then
# If we passed in args, we need to forward them along
for path in "${@}"; do
local full_path
full_path=$($REALPATH "$path")
init_script=$(cat <<EOF
set -euo pipefail
IFS=\$'\n\t'
cargo build --bin compare --no-default-features --features "$features_joined" ${build_flags[@]}
exec /target/${PROFILE}/compare "/input${full_path}"
EOF
)
docker run "${additional_flags[@]}" --init --rm -i --mount type=tmpfs,destination=/tmp -v "/:/input:ro" -v "$($REALPATH "$DIR/../"):/source:ro" --mount source=cargo-cache,target=/usr/local/cargo/registry --mount source=rust-cache,target=/target --env CARGO_TARGET_DIR=/target -w /source --entrypoint "" organic-test sh -c "$init_script"
done
else
local current_directory init_script
current_directory=$(pwd)
init_script=$(cat <<EOF
set -euo pipefail
IFS=\$'\n\t'
cargo build --bin compare --no-default-features --features "$features_joined" ${build_flags[@]}
cd /input${current_directory}
exec /target/${PROFILE}/compare
EOF
)
docker run "${additional_flags[@]}" --init --rm -i --mount type=tmpfs,destination=/tmp -v "/:/input:ro" -v "$($REALPATH "$DIR/../"):/source:ro" --mount source=cargo-cache,target=/usr/local/cargo/registry --mount source=rust-cache,target=/target --env CARGO_TARGET_DIR=/target -w /source --entrypoint "" organic-test sh -c "$init_script"
fi
} }
main "${@}" main "${@}"

View File

@@ -1,72 +0,0 @@
#!/usr/bin/env bash
#
# Bisect parsing a file at various line cut-off points to see which line causes the parse to differ from emacs.
set -euo pipefail
IFS=$'\n\t'
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
REALPATH=$(command -v uu-realpath || command -v realpath)
############## Setup #########################
function die {
local status_code="$1"
shift
(>&2 echo "${@}")
exit "$status_code"
}
function log {
(>&2 echo "${@}")
}
############## Program #########################
function main {
local target_full_path
target_full_path=$($REALPATH "$1")
SOURCE_FOLDER=$(dirname "$target_full_path")
TARGET_DOCUMENT=$(basename "$target_full_path")
local good=0
local bad
bad=$(wc -l "$SOURCE_FOLDER/$TARGET_DOCUMENT" | awk '{print $1}')
set +e
(run_parse "$bad")
local status=$?
set -e
if [ $status -eq 0 ]; then
log "Entire file passes."
exit 0
fi
while [[ "$((bad - good))" -gt 1 ]]; do
local next_line=$((((bad - good) / 2) + good))
log "Testing line $next_line"
set +e
run_parse "$next_line" &> /dev/null
local status=$?
set -e
if [ $status -eq 0 ]; then
good="$next_line"
log "Line $next_line good"
else
bad="$next_line"
log "Line $next_line bad"
fi
done
echo "Bad line: $bad"
}
function run_parse {
local lines="$1"
cd "$SOURCE_FOLDER"
head -n "$lines" "$SOURCE_FOLDER/$TARGET_DOCUMENT" | PROFILE=release-lto "${DIR}/run_docker_compare.bash"
local status=$?
return "$status"
}
main "${@}"

View File

@@ -6,6 +6,7 @@ DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
: ${NO_COLOR:=""} # Set to anything to disable color output : ${NO_COLOR:=""} # Set to anything to disable color output
cd "$DIR/../"
REALPATH=$(command -v uu-realpath || command -v realpath) REALPATH=$(command -v uu-realpath || command -v realpath)
MAKE=$(command -v gmake || command -v make) MAKE=$(command -v gmake || command -v make)
@@ -55,7 +56,7 @@ cargo test --no-default-features --features compare --no-fail-fast --lib --test
EOF EOF
) )
docker run "${additional_flags[@]}" --init --rm --read-only --mount type=tmpfs,destination=/tmp -v "$($REALPATH "$DIR/../"):/source:ro" --mount source=cargo-cache,target=/usr/local/cargo/registry --mount source=rust-cache,target=/target --env CARGO_TARGET_DIR=/target -w /source --entrypoint "" organic-test sh -c "$init_script" docker run "${additional_flags[@]}" --init --rm --read-only --mount type=tmpfs,destination=/tmp -v "$($REALPATH ./):/source:ro" --mount source=cargo-cache,target=/usr/local/cargo/registry --mount source=rust-cache,target=/target --env CARGO_TARGET_DIR=/target -w /source --entrypoint "" organic-test sh -c "$init_script"
} }

View File

@@ -4,6 +4,7 @@ set -euo pipefail
IFS=$'\n\t' IFS=$'\n\t'
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
cd "$DIR/../"
REALPATH=$(command -v uu-realpath || command -v realpath) REALPATH=$(command -v uu-realpath || command -v realpath)
function main { function main {
@@ -11,7 +12,7 @@ function main {
local test local test
while read test; do while read test; do
(cd "$DIR/../" && cargo test --no-default-features --features compare --no-fail-fast --test test_loader "$test" -- --show-output) cargo test --no-default-features --features compare --no-fail-fast --test test_loader "$test" -- --show-output
done<<<"$test_names" done<<<"$test_names"
} }

View File

@@ -7,6 +7,8 @@ DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
: ${PROFILE:="release-lto"} : ${PROFILE:="release-lto"}
cd "$DIR/../"
function main { function main {
local additional_flags=() local additional_flags=()
if [ "$PROFILE" = "dev" ] || [ "$PROFILE" = "debug" ]; then if [ "$PROFILE" = "dev" ] || [ "$PROFILE" = "debug" ]; then
@@ -14,8 +16,8 @@ function main {
else else
additional_flags+=(--profile "$PROFILE") additional_flags+=(--profile "$PROFILE")
fi fi
(cd "$DIR/../" && cargo build --no-default-features "${additional_flags[@]}") cargo build --no-default-features "${additional_flags[@]}"
time "$DIR/../target/${PROFILE}/parse" "${@}" time ./target/${PROFILE}/compare
} }
main "${@}" main "${@}"

View File

@@ -1,52 +0,0 @@
#![feature(round_char_boundary)]
#![feature(exact_size_is_empty)]
use std::io::Read;
use organic::compare::run_anonymous_compare;
use organic::compare::run_compare_on_file;
#[cfg(feature = "tracing")]
use crate::init_tracing::init_telemetry;
#[cfg(feature = "tracing")]
use crate::init_tracing::shutdown_telemetry;
#[cfg(feature = "tracing")]
mod init_tracing;
#[cfg(not(feature = "tracing"))]
fn main() -> Result<(), Box<dyn std::error::Error>> {
main_body()
}
#[cfg(feature = "tracing")]
fn main() -> Result<(), Box<dyn std::error::Error>> {
let rt = tokio::runtime::Runtime::new()?;
let result = rt.block_on(async {
init_telemetry()?;
let main_body_result = main_body();
shutdown_telemetry()?;
main_body_result
});
result
}
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
fn main_body() -> Result<(), Box<dyn std::error::Error>> {
let args = std::env::args().skip(1);
if args.is_empty() {
let org_contents = read_stdin_to_string()?;
run_anonymous_compare(org_contents)
} else {
for arg in args {
run_compare_on_file(arg)?
}
Ok(())
}
}
fn read_stdin_to_string() -> Result<String, Box<dyn std::error::Error>> {
let mut stdin_contents = String::new();
std::io::stdin()
.lock()
.read_to_string(&mut stdin_contents)?;
Ok(stdin_contents)
}

View File

@@ -1,95 +0,0 @@
use std::path::Path;
use crate::compare::diff::compare_document;
use crate::compare::parse::emacs_parse_anonymous_org_document;
use crate::compare::parse::emacs_parse_file_org_document;
use crate::compare::parse::get_emacs_version;
use crate::compare::parse::get_org_mode_version;
use crate::compare::sexp::sexp;
use crate::context::GlobalSettings;
use crate::context::LocalFileAccessInterface;
use crate::parser::parse_file_with_settings;
use crate::parser::parse_with_settings;
pub fn run_anonymous_compare<P: AsRef<str>>(
org_contents: P,
) -> Result<(), Box<dyn std::error::Error>> {
run_anonymous_compare_with_settings(org_contents, &GlobalSettings::default())
}
pub fn run_compare_on_file<P: AsRef<Path>>(org_path: P) -> Result<(), Box<dyn std::error::Error>> {
run_compare_on_file_with_settings(org_path, &GlobalSettings::default())
}
pub fn run_anonymous_compare_with_settings<P: AsRef<str>>(
org_contents: P,
global_settings: &GlobalSettings,
) -> Result<(), Box<dyn std::error::Error>> {
// TODO: This is a work-around to pretend that dos line endings do not exist. It would be better to handle the difference in line endings.
let org_contents = org_contents.as_ref().replace("\r\n", "\n");
let org_contents = org_contents.as_str();
print_versions()?;
let rust_parsed = parse_with_settings(org_contents, global_settings)?;
let org_sexp = emacs_parse_anonymous_org_document(org_contents, global_settings)?;
let (_remaining, parsed_sexp) = sexp(org_sexp.as_str()).map_err(|e| e.to_string())?;
println!("{}\n\n\n", org_contents);
println!("{}", org_sexp);
println!("{:#?}", rust_parsed);
// We do the diffing after printing out both parsed forms in case the diffing panics
let diff_result = compare_document(&parsed_sexp, &rust_parsed)?;
diff_result.print(org_contents)?;
if diff_result.is_bad() {
Err("Diff results do not match.")?;
}
Ok(())
}
pub fn run_compare_on_file_with_settings<P: AsRef<Path>>(
org_path: P,
global_settings: &GlobalSettings,
) -> Result<(), Box<dyn std::error::Error>> {
let org_path = org_path.as_ref();
print_versions()?;
let parent_directory = org_path
.parent()
.ok_or("Should be contained inside a directory.")?;
let org_contents = std::fs::read_to_string(org_path)?;
// TODO: This is a work-around to pretend that dos line endings do not exist. It would be better to handle the difference in line endings.
let org_contents = org_contents.replace("\r\n", "\n");
let org_contents = org_contents.as_str();
let file_access_interface = LocalFileAccessInterface {
working_directory: Some(parent_directory.to_path_buf()),
};
let global_settings = {
let mut global_settings = global_settings.clone();
global_settings.file_access = &file_access_interface;
global_settings
};
let rust_parsed = parse_file_with_settings(org_contents, &global_settings, Some(org_path))?;
let org_sexp = emacs_parse_file_org_document(org_path, &global_settings)?;
let (_remaining, parsed_sexp) = sexp(org_sexp.as_str()).map_err(|e| e.to_string())?;
println!("{}\n\n\n", org_contents);
println!("{}", org_sexp);
println!("{:#?}", rust_parsed);
// We do the diffing after printing out both parsed forms in case the diffing panics
let diff_result = compare_document(&parsed_sexp, &rust_parsed)?;
diff_result.print(org_contents)?;
if diff_result.is_bad() {
Err("Diff results do not match.")?;
}
Ok(())
}
fn print_versions() -> Result<(), Box<dyn std::error::Error>> {
eprintln!("Using emacs version: {}", get_emacs_version()?.trim());
eprintln!("Using org-mode version: {}", get_org_mode_version()?.trim());
Ok(())
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,552 +0,0 @@
use std::borrow::Cow;
use crate::types::AngleLink;
use crate::types::AstNode;
use crate::types::BabelCall;
use crate::types::Bold;
use crate::types::CenterBlock;
use crate::types::Citation;
use crate::types::CitationReference;
use crate::types::Clock;
use crate::types::Code;
use crate::types::Comment;
use crate::types::CommentBlock;
use crate::types::DiarySexp;
use crate::types::Document;
use crate::types::Drawer;
use crate::types::DynamicBlock;
use crate::types::Element;
use crate::types::Entity;
use crate::types::ExampleBlock;
use crate::types::ExportBlock;
use crate::types::ExportSnippet;
use crate::types::FixedWidthArea;
use crate::types::FootnoteDefinition;
use crate::types::FootnoteReference;
use crate::types::Heading;
use crate::types::HorizontalRule;
use crate::types::InlineBabelCall;
use crate::types::InlineSourceBlock;
use crate::types::Italic;
use crate::types::Keyword;
use crate::types::LatexEnvironment;
use crate::types::LatexFragment;
use crate::types::LineBreak;
use crate::types::NodeProperty;
use crate::types::Object;
use crate::types::OrgMacro;
use crate::types::Paragraph;
use crate::types::PlainLink;
use crate::types::PlainList;
use crate::types::PlainListItem;
use crate::types::PlainText;
use crate::types::Planning;
use crate::types::PropertyDrawer;
use crate::types::QuoteBlock;
use crate::types::RadioLink;
use crate::types::RadioTarget;
use crate::types::RegularLink;
use crate::types::Section;
use crate::types::SpecialBlock;
use crate::types::SrcBlock;
use crate::types::StatisticsCookie;
use crate::types::StrikeThrough;
use crate::types::Subscript;
use crate::types::Superscript;
use crate::types::Table;
use crate::types::TableCell;
use crate::types::TableRow;
use crate::types::Target;
use crate::types::Timestamp;
use crate::types::Underline;
use crate::types::Verbatim;
use crate::types::VerseBlock;
pub(crate) trait ElispFact<'s> {
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str>;
}
pub(crate) trait GetElispFact<'s> {
fn get_elisp_fact<'b>(&'b self) -> &'b dyn ElispFact<'s>;
}
impl<'s, I: ElispFact<'s>> GetElispFact<'s> for I {
fn get_elisp_fact<'b>(&'b self) -> &'b dyn ElispFact<'s> {
self
}
}
impl<'r, 's> GetElispFact<'s> for AstNode<'r, 's> {
fn get_elisp_fact<'b>(&'b self) -> &'b dyn ElispFact<'s> {
match self {
AstNode::Document(inner) => *inner,
AstNode::Heading(inner) => *inner,
AstNode::Section(inner) => *inner,
AstNode::Paragraph(inner) => *inner,
AstNode::PlainList(inner) => *inner,
AstNode::PlainListItem(inner) => *inner,
AstNode::CenterBlock(inner) => *inner,
AstNode::QuoteBlock(inner) => *inner,
AstNode::SpecialBlock(inner) => *inner,
AstNode::DynamicBlock(inner) => *inner,
AstNode::FootnoteDefinition(inner) => *inner,
AstNode::Comment(inner) => *inner,
AstNode::Drawer(inner) => *inner,
AstNode::PropertyDrawer(inner) => *inner,
AstNode::NodeProperty(inner) => *inner,
AstNode::Table(inner) => *inner,
AstNode::TableRow(inner) => *inner,
AstNode::VerseBlock(inner) => *inner,
AstNode::CommentBlock(inner) => *inner,
AstNode::ExampleBlock(inner) => *inner,
AstNode::ExportBlock(inner) => *inner,
AstNode::SrcBlock(inner) => *inner,
AstNode::Clock(inner) => *inner,
AstNode::DiarySexp(inner) => *inner,
AstNode::Planning(inner) => *inner,
AstNode::FixedWidthArea(inner) => *inner,
AstNode::HorizontalRule(inner) => *inner,
AstNode::Keyword(inner) => *inner,
AstNode::BabelCall(inner) => *inner,
AstNode::LatexEnvironment(inner) => *inner,
AstNode::Bold(inner) => *inner,
AstNode::Italic(inner) => *inner,
AstNode::Underline(inner) => *inner,
AstNode::StrikeThrough(inner) => *inner,
AstNode::Code(inner) => *inner,
AstNode::Verbatim(inner) => *inner,
AstNode::PlainText(inner) => *inner,
AstNode::RegularLink(inner) => *inner,
AstNode::RadioLink(inner) => *inner,
AstNode::RadioTarget(inner) => *inner,
AstNode::PlainLink(inner) => *inner,
AstNode::AngleLink(inner) => *inner,
AstNode::OrgMacro(inner) => *inner,
AstNode::Entity(inner) => *inner,
AstNode::LatexFragment(inner) => *inner,
AstNode::ExportSnippet(inner) => *inner,
AstNode::FootnoteReference(inner) => *inner,
AstNode::Citation(inner) => *inner,
AstNode::CitationReference(inner) => *inner,
AstNode::InlineBabelCall(inner) => *inner,
AstNode::InlineSourceBlock(inner) => *inner,
AstNode::LineBreak(inner) => *inner,
AstNode::Target(inner) => *inner,
AstNode::StatisticsCookie(inner) => *inner,
AstNode::Subscript(inner) => *inner,
AstNode::Superscript(inner) => *inner,
AstNode::TableCell(inner) => *inner,
AstNode::Timestamp(inner) => *inner,
}
}
}
impl<'s> GetElispFact<'s> for Element<'s> {
fn get_elisp_fact<'b>(&'b self) -> &'b dyn ElispFact<'s> {
match self {
Element::Paragraph(inner) => inner,
Element::PlainList(inner) => inner,
Element::CenterBlock(inner) => inner,
Element::QuoteBlock(inner) => inner,
Element::SpecialBlock(inner) => inner,
Element::DynamicBlock(inner) => inner,
Element::FootnoteDefinition(inner) => inner,
Element::Comment(inner) => inner,
Element::Drawer(inner) => inner,
Element::PropertyDrawer(inner) => inner,
Element::Table(inner) => inner,
Element::VerseBlock(inner) => inner,
Element::CommentBlock(inner) => inner,
Element::ExampleBlock(inner) => inner,
Element::ExportBlock(inner) => inner,
Element::SrcBlock(inner) => inner,
Element::Clock(inner) => inner,
Element::DiarySexp(inner) => inner,
Element::Planning(inner) => inner,
Element::FixedWidthArea(inner) => inner,
Element::HorizontalRule(inner) => inner,
Element::Keyword(inner) => inner,
Element::BabelCall(inner) => inner,
Element::LatexEnvironment(inner) => inner,
}
}
}
impl<'s> GetElispFact<'s> for Object<'s> {
fn get_elisp_fact<'b>(&'b self) -> &'b dyn ElispFact<'s> {
match self {
Object::Bold(inner) => inner,
Object::Italic(inner) => inner,
Object::Underline(inner) => inner,
Object::StrikeThrough(inner) => inner,
Object::Code(inner) => inner,
Object::Verbatim(inner) => inner,
Object::PlainText(inner) => inner,
Object::RegularLink(inner) => inner,
Object::RadioLink(inner) => inner,
Object::RadioTarget(inner) => inner,
Object::PlainLink(inner) => inner,
Object::AngleLink(inner) => inner,
Object::OrgMacro(inner) => inner,
Object::Entity(inner) => inner,
Object::LatexFragment(inner) => inner,
Object::ExportSnippet(inner) => inner,
Object::FootnoteReference(inner) => inner,
Object::Citation(inner) => inner,
Object::CitationReference(inner) => inner,
Object::InlineBabelCall(inner) => inner,
Object::InlineSourceBlock(inner) => inner,
Object::LineBreak(inner) => inner,
Object::Target(inner) => inner,
Object::StatisticsCookie(inner) => inner,
Object::Subscript(inner) => inner,
Object::Superscript(inner) => inner,
Object::Timestamp(inner) => inner,
}
}
}
impl<'s> ElispFact<'s> for Document<'s> {
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
"org-data".into()
}
}
impl<'s> ElispFact<'s> for Section<'s> {
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
"section".into()
}
}
impl<'s> ElispFact<'s> for Heading<'s> {
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
"headline".into()
}
}
impl<'s> ElispFact<'s> for PlainList<'s> {
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
"plain-list".into()
}
}
impl<'s> ElispFact<'s> for PlainListItem<'s> {
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
"item".into()
}
}
impl<'s> ElispFact<'s> for CenterBlock<'s> {
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
"center-block".into()
}
}
impl<'s> ElispFact<'s> for QuoteBlock<'s> {
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
"quote-block".into()
}
}
impl<'s> ElispFact<'s> for SpecialBlock<'s> {
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
"special-block".into()
}
}
impl<'s> ElispFact<'s> for DynamicBlock<'s> {
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
"dynamic-block".into()
}
}
impl<'s> ElispFact<'s> for FootnoteDefinition<'s> {
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
"footnote-definition".into()
}
}
impl<'s> ElispFact<'s> for Drawer<'s> {
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
"drawer".into()
}
}
impl<'s> ElispFact<'s> for PropertyDrawer<'s> {
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
"property-drawer".into()
}
}
impl<'s> ElispFact<'s> for NodeProperty<'s> {
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
"node-property".into()
}
}
impl<'s> ElispFact<'s> for Table<'s> {
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
"table".into()
}
}
impl<'s> ElispFact<'s> for TableRow<'s> {
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
"table-row".into()
}
}
impl<'s> ElispFact<'s> for Paragraph<'s> {
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
"paragraph".into()
}
}
impl<'s> ElispFact<'s> for TableCell<'s> {
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
"table-cell".into()
}
}
impl<'s> ElispFact<'s> for Comment<'s> {
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
"comment".into()
}
}
impl<'s> ElispFact<'s> for VerseBlock<'s> {
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
"verse-block".into()
}
}
impl<'s> ElispFact<'s> for CommentBlock<'s> {
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
"comment-block".into()
}
}
impl<'s> ElispFact<'s> for ExampleBlock<'s> {
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
"example-block".into()
}
}
impl<'s> ElispFact<'s> for ExportBlock<'s> {
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
"export-block".into()
}
}
impl<'s> ElispFact<'s> for SrcBlock<'s> {
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
"src-block".into()
}
}
impl<'s> ElispFact<'s> for Clock<'s> {
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
"clock".into()
}
}
impl<'s> ElispFact<'s> for DiarySexp<'s> {
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
"diary-sexp".into()
}
}
impl<'s> ElispFact<'s> for Planning<'s> {
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
"planning".into()
}
}
impl<'s> ElispFact<'s> for FixedWidthArea<'s> {
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
"fixed-width".into()
}
}
impl<'s> ElispFact<'s> for HorizontalRule<'s> {
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
"horizontal-rule".into()
}
}
impl<'s> ElispFact<'s> for Keyword<'s> {
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
"keyword".into()
}
}
impl<'s> ElispFact<'s> for BabelCall<'s> {
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
"babel-call".into()
}
}
impl<'s> ElispFact<'s> for LatexEnvironment<'s> {
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
"latex-environment".into()
}
}
impl<'s> ElispFact<'s> for Bold<'s> {
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
"bold".into()
}
}
impl<'s> ElispFact<'s> for Italic<'s> {
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
"italic".into()
}
}
impl<'s> ElispFact<'s> for Underline<'s> {
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
"underline".into()
}
}
impl<'s> ElispFact<'s> for StrikeThrough<'s> {
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
"strike-through".into()
}
}
impl<'s> ElispFact<'s> for Code<'s> {
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
"code".into()
}
}
impl<'s> ElispFact<'s> for Verbatim<'s> {
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
"verbatim".into()
}
}
impl<'s> ElispFact<'s> for RegularLink<'s> {
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
"link".into()
}
}
impl<'s> ElispFact<'s> for RadioLink<'s> {
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
"link".into()
}
}
impl<'s> ElispFact<'s> for RadioTarget<'s> {
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
"radio-target".into()
}
}
impl<'s> ElispFact<'s> for PlainLink<'s> {
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
"link".into()
}
}
impl<'s> ElispFact<'s> for AngleLink<'s> {
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
"link".into()
}
}
impl<'s> ElispFact<'s> for OrgMacro<'s> {
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
"macro".into()
}
}
impl<'s> ElispFact<'s> for Entity<'s> {
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
"entity".into()
}
}
impl<'s> ElispFact<'s> for LatexFragment<'s> {
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
"latex-fragment".into()
}
}
impl<'s> ElispFact<'s> for ExportSnippet<'s> {
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
"export-snippet".into()
}
}
impl<'s> ElispFact<'s> for FootnoteReference<'s> {
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
"footnote-reference".into()
}
}
impl<'s> ElispFact<'s> for Citation<'s> {
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
"citation".into()
}
}
impl<'s> ElispFact<'s> for CitationReference<'s> {
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
"citation-reference".into()
}
}
impl<'s> ElispFact<'s> for InlineBabelCall<'s> {
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
"inline-babel-call".into()
}
}
impl<'s> ElispFact<'s> for InlineSourceBlock<'s> {
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
"inline-src-block".into()
}
}
impl<'s> ElispFact<'s> for LineBreak<'s> {
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
"line-break".into()
}
}
impl<'s> ElispFact<'s> for Target<'s> {
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
"target".into()
}
}
impl<'s> ElispFact<'s> for StatisticsCookie<'s> {
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
"statistics-cookie".into()
}
}
impl<'s> ElispFact<'s> for Subscript<'s> {
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
"subscript".into()
}
}
impl<'s> ElispFact<'s> for Superscript<'s> {
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
"superscript".into()
}
}
impl<'s> ElispFact<'s> for Timestamp<'s> {
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
"timestamp".into()
}
}
impl<'s> ElispFact<'s> for PlainText<'s> {
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
// plain text from upstream emacs does not actually have a name but this is included here to make rendering the status diff easier.
"plain-text".into()
}
}

View File

@@ -1,10 +1,7 @@
mod compare;
mod diff; mod diff;
mod elisp_fact;
mod parse; mod parse;
mod sexp;
mod util; mod util;
pub use compare::run_anonymous_compare; pub use diff::compare_document;
pub use compare::run_anonymous_compare_with_settings; pub use parse::emacs_parse_org_document;
pub use compare::run_compare_on_file; pub use parse::get_emacs_version;
pub use compare::run_compare_on_file_with_settings; pub use parse::get_org_mode_version;

View File

@@ -1,34 +1,6 @@
use std::path::Path;
use std::process::Command; use std::process::Command;
use crate::context::HeadlineLevelFilter; pub fn emacs_parse_org_document<C>(file_contents: C) -> Result<String, Box<dyn std::error::Error>>
use crate::settings::GlobalSettings;
/// Generate elisp to configure org-mode parsing settings
///
/// Currently only org-list-allow-alphabetical is supported.
fn global_settings_elisp(global_settings: &GlobalSettings) -> String {
// This string concatenation is wildly inefficient but its only called in tests 🤷.
let mut ret = "".to_owned();
if global_settings.list_allow_alphabetical {
ret += "(setq org-list-allow-alphabetical t)\n"
}
if global_settings.tab_width != crate::settings::DEFAULT_TAB_WIDTH {
ret += format!("(setq-default tab-width {})", global_settings.tab_width).as_str();
}
if global_settings.odd_levels_only != HeadlineLevelFilter::default() {
ret += match global_settings.odd_levels_only {
HeadlineLevelFilter::Odd => "(setq org-odd-levels-only t)\n",
HeadlineLevelFilter::OddEven => "(setq org-odd-levels-only nil)\n",
};
}
ret
}
pub(crate) fn emacs_parse_anonymous_org_document<C>(
file_contents: C,
global_settings: &GlobalSettings,
) -> Result<String, Box<dyn std::error::Error>>
where where
C: AsRef<str>, C: AsRef<str>,
{ {
@@ -36,68 +8,21 @@ where
let elisp_script = format!( let elisp_script = format!(
r#"(progn r#"(progn
(erase-buffer) (erase-buffer)
(require 'org)
(defun org-table-align () t)
(insert "{escaped_file_contents}") (insert "{escaped_file_contents}")
{global_settings}
(org-mode) (org-mode)
(message "%s" (pp-to-string (org-element-parse-buffer))) (message "%s" (pp-to-string (org-element-parse-buffer)))
)"#, )"#,
escaped_file_contents = escaped_file_contents, escaped_file_contents = escaped_file_contents
global_settings = global_settings_elisp(global_settings)
); );
let mut cmd = Command::new("emacs"); let mut cmd = Command::new("emacs");
let cmd = cmd let proc = cmd
.arg("-q") .arg("-q")
.arg("--no-site-file") .arg("--no-site-file")
.arg("--no-splash") .arg("--no-splash")
.arg("--batch") .arg("--batch")
.arg("--eval") .arg("--eval")
.arg(elisp_script); .arg(elisp_script);
let out = cmd.output()?; let out = proc.output()?;
out.status.exit_ok()?;
let org_sexp = out.stderr;
Ok(String::from_utf8(org_sexp)?)
}
pub(crate) fn emacs_parse_file_org_document<P>(
file_path: P,
global_settings: &GlobalSettings,
) -> Result<String, Box<dyn std::error::Error>>
where
P: AsRef<Path>,
{
let file_path = file_path.as_ref().canonicalize()?;
let containing_directory = file_path.parent().ok_or(format!(
"Failed to get containing directory for path {}",
file_path.display()
))?;
let elisp_script = format!(
r#"(progn
(require 'org)
(defun org-table-align () t)
(setq vc-handled-backends nil)
{global_settings}
(find-file-read-only "{file_path}")
(org-mode)
(message "%s" (pp-to-string (org-element-parse-buffer)))
)"#,
global_settings = global_settings_elisp(global_settings),
file_path = file_path
.as_os_str()
.to_str()
.expect("File name should be valid utf-8.")
);
let mut cmd = Command::new("emacs");
let cmd = cmd
.current_dir(containing_directory)
.arg("-q")
.arg("--no-site-file")
.arg("--no-splash")
.arg("--batch")
.arg("--eval")
.arg(elisp_script);
let out = cmd.output()?;
out.status.exit_ok()?; out.status.exit_ok()?;
let org_sexp = out.stderr; let org_sexp = out.stderr;
Ok(String::from_utf8(org_sexp)?) Ok(String::from_utf8(org_sexp)?)
@@ -130,7 +55,7 @@ pub fn get_emacs_version() -> Result<String, Box<dyn std::error::Error>> {
(message "%s" (version)) (message "%s" (version))
)"#; )"#;
let mut cmd = Command::new("emacs"); let mut cmd = Command::new("emacs");
let cmd = cmd let proc = cmd
.arg("-q") .arg("-q")
.arg("--no-site-file") .arg("--no-site-file")
.arg("--no-splash") .arg("--no-splash")
@@ -138,7 +63,7 @@ pub fn get_emacs_version() -> Result<String, Box<dyn std::error::Error>> {
.arg("--eval") .arg("--eval")
.arg(elisp_script); .arg(elisp_script);
let out = cmd.output()?; let out = proc.output()?;
out.status.exit_ok()?; out.status.exit_ok()?;
Ok(String::from_utf8(out.stderr)?) Ok(String::from_utf8(out.stderr)?)
} }
@@ -149,7 +74,7 @@ pub fn get_org_mode_version() -> Result<String, Box<dyn std::error::Error>> {
(message "%s" (org-version nil t nil)) (message "%s" (org-version nil t nil))
)"#; )"#;
let mut cmd = Command::new("emacs"); let mut cmd = Command::new("emacs");
let cmd = cmd let proc = cmd
.arg("-q") .arg("-q")
.arg("--no-site-file") .arg("--no-site-file")
.arg("--no-splash") .arg("--no-splash")
@@ -157,7 +82,7 @@ pub fn get_org_mode_version() -> Result<String, Box<dyn std::error::Error>> {
.arg("--eval") .arg("--eval")
.arg(elisp_script); .arg(elisp_script);
let out = cmd.output()?; let out = proc.output()?;
out.status.exit_ok()?; out.status.exit_ok()?;
Ok(String::from_utf8(out.stderr)?) Ok(String::from_utf8(out.stderr)?)
} }

View File

@@ -1,10 +1,5 @@
use std::str::FromStr; use crate::parser::sexp::Token;
use crate::parser::Source;
use super::elisp_fact::GetElispFact;
use super::sexp::Token;
use crate::compare::sexp::unquote;
use crate::types::GetStandardProperties;
use crate::types::StandardProperties;
/// Check if the child string slice is a slice of the parent string slice. /// Check if the child string slice is a slice of the parent string slice.
fn is_slice_of(parent: &str, child: &str) -> bool { fn is_slice_of(parent: &str, child: &str) -> bool {
@@ -15,39 +10,18 @@ fn is_slice_of(parent: &str, child: &str) -> bool {
child_start >= parent_start && child_end <= parent_end child_start >= parent_start && child_end <= parent_end
} }
/// Get the byte offset into source that the rust object exists at. /// Get the offset into source that the rust object exists at.
/// ///
/// These offsets are zero-based unlike the elisp ones. /// These offsets are zero-based unlike the elisp ones.
fn get_rust_byte_offsets<'b, 's, S: StandardProperties<'s> + ?Sized>( fn get_offsets<'s, S: Source<'s>>(source: &'s str, rust_object: &'s S) -> (usize, usize) {
original_document: &'s str, let rust_object_source = rust_object.get_source();
rust_ast_node: &'b S, assert!(is_slice_of(source, rust_object_source));
) -> (usize, usize) { let offset = rust_object_source.as_ptr() as usize - source.as_ptr() as usize;
let rust_object_source = rust_ast_node.get_source();
debug_assert!(is_slice_of(original_document, rust_object_source));
let offset = rust_object_source.as_ptr() as usize - original_document.as_ptr() as usize;
let end = offset + rust_object_source.len(); let end = offset + rust_object_source.len();
(offset, end) (offset, end)
} }
pub(crate) fn compare_standard_properties< pub fn assert_name<'s>(emacs: &'s Token<'s>, name: &str) -> Result<(), Box<dyn std::error::Error>> {
'b,
's,
S: GetStandardProperties<'s> + GetElispFact<'s> + ?Sized,
>(
original_document: &'s str,
emacs: &'b Token<'s>,
rust: &'b S,
) -> Result<(), Box<dyn std::error::Error>> {
assert_name(emacs, rust.get_elisp_fact().get_elisp_name())?;
assert_bounds(original_document, emacs, rust.get_standard_properties())?;
Ok(())
}
pub(crate) fn assert_name<'b, 's, S: AsRef<str>>(
emacs: &'b Token<'s>,
name: S,
) -> Result<(), Box<dyn std::error::Error>> {
let name = name.as_ref();
let children = emacs.as_list()?; let children = emacs.as_list()?;
let first_child = children let first_child = children
.first() .first()
@@ -55,7 +29,7 @@ pub(crate) fn assert_name<'b, 's, S: AsRef<str>>(
.as_atom()?; .as_atom()?;
if first_child != name { if first_child != name {
Err(format!( Err(format!(
"AST node name mismatch. Expected a (rust) {expected} cell, but found a (emacs) {found} cell.", "Expected a {expected} cell, but found a {found} cell.",
expected = name, expected = name,
found = first_child found = first_child
))?; ))?;
@@ -63,33 +37,30 @@ pub(crate) fn assert_name<'b, 's, S: AsRef<str>>(
Ok(()) Ok(())
} }
/// Assert that the character ranges defined by upstream org-mode's :standard-properties match the slices in Organic's StandardProperties. pub fn assert_bounds<'s, S: Source<'s>>(
/// source: &'s str,
/// This does **not** handle plain text because plain text is a special case. emacs: &'s Token<'s>,
pub(crate) fn assert_bounds<'b, 's, S: StandardProperties<'s> + ?Sized>( rust: &'s S,
original_document: &'s str,
emacs: &'b Token<'s>,
rust: &'b S,
) -> Result<(), Box<dyn std::error::Error>> { ) -> Result<(), Box<dyn std::error::Error>> {
let standard_properties = get_emacs_standard_properties(emacs)?; // 1-based let standard_properties = get_standard_properties(emacs)?;
let (begin, end) = ( let (begin, end) = (
standard_properties standard_properties
.begin .begin
.ok_or("Token should have a begin.")?, .ok_or("Token should have a begin.")?,
standard_properties.end.ok_or("Token should have an end.")?, standard_properties.end.ok_or("Token should have an end.")?,
); );
let (rust_begin, rust_end) = get_rust_byte_offsets(original_document, rust); // 0-based let (rust_begin, rust_end) = get_offsets(source, rust);
let rust_begin_char_offset = (&original_document[..rust_begin]).chars().count() + 1; // 1-based let rust_begin_char_offset = (&source[..rust_begin]).chars().count();
let rust_end_char_offset = let rust_end_char_offset =
rust_begin_char_offset + (&original_document[rust_begin..rust_end]).chars().count(); // 1-based rust_begin_char_offset + (&source[rust_begin..rust_end]).chars().count();
if rust_begin_char_offset != begin || rust_end_char_offset != end { if (rust_begin_char_offset + 1) != begin || (rust_end_char_offset + 1) != end {
Err(format!("Rust bounds (in chars) ({rust_begin}, {rust_end}) do not match emacs bounds ({emacs_begin}, {emacs_end})", rust_begin = rust_begin_char_offset, rust_end = rust_end_char_offset, emacs_begin=begin, emacs_end=end))?; Err(format!("Rust bounds (in chars) ({rust_begin}, {rust_end}) do not match emacs bounds ({emacs_begin}, {emacs_end})", rust_begin = rust_begin_char_offset + 1, rust_end = rust_end_char_offset + 1, emacs_begin=begin, emacs_end=end))?;
} }
Ok(()) Ok(())
} }
struct EmacsStandardProperties { struct StandardProperties {
begin: Option<usize>, begin: Option<usize>,
#[allow(dead_code)] #[allow(dead_code)]
post_affiliated: Option<usize>, post_affiliated: Option<usize>,
@@ -102,9 +73,9 @@ struct EmacsStandardProperties {
post_blank: Option<usize>, post_blank: Option<usize>,
} }
fn get_emacs_standard_properties<'b, 's>( fn get_standard_properties<'s>(
emacs: &'b Token<'s>, emacs: &'s Token<'s>,
) -> Result<EmacsStandardProperties, Box<dyn std::error::Error>> { ) -> Result<StandardProperties, Box<dyn std::error::Error>> {
let children = emacs.as_list()?; let children = emacs.as_list()?;
let attributes_child = children let attributes_child = children
.iter() .iter()
@@ -123,7 +94,7 @@ fn get_emacs_standard_properties<'b, 's>(
let contents_end = maybe_token_to_usize(std_props.next())?; let contents_end = maybe_token_to_usize(std_props.next())?;
let end = maybe_token_to_usize(std_props.next())?; let end = maybe_token_to_usize(std_props.next())?;
let post_blank = maybe_token_to_usize(std_props.next())?; let post_blank = maybe_token_to_usize(std_props.next())?;
EmacsStandardProperties { StandardProperties {
begin, begin,
post_affiliated, post_affiliated,
contents_begin, contents_begin,
@@ -142,7 +113,7 @@ fn get_emacs_standard_properties<'b, 's>(
maybe_token_to_usize(attributes_map.get(":post-blank").map(|token| *token))?; maybe_token_to_usize(attributes_map.get(":post-blank").map(|token| *token))?;
let post_affiliated = let post_affiliated =
maybe_token_to_usize(attributes_map.get(":post-affiliated").map(|token| *token))?; maybe_token_to_usize(attributes_map.get(":post-affiliated").map(|token| *token))?;
EmacsStandardProperties { StandardProperties {
begin, begin,
post_affiliated, post_affiliated,
contents_begin, contents_begin,
@@ -170,85 +141,22 @@ fn maybe_token_to_usize(
.map_or(Ok(None), |r| r.map(Some))?) .map_or(Ok(None), |r| r.map(Some))?)
} }
/// Get a named property from the emacs token. pub fn get_property<'s, 'x>(
/// emacs: &'s Token<'s>,
/// Returns Ok(None) if value is nil or absent.
pub(crate) fn get_property<'b, 's, 'x>(
emacs: &'b Token<'s>,
key: &'x str, key: &'x str,
) -> Result<Option<&'b Token<'s>>, Box<dyn std::error::Error>> { ) -> Result<Option<&'s Token<'s>>, Box<dyn std::error::Error>> {
let children = emacs.as_list()?; let children = emacs.as_list()?;
let attributes_child = children let attributes_child = children
.iter() .iter()
.nth(1) .nth(1)
.ok_or("Should have an attributes child.")?; .ok_or("Should have an attributes child.")?;
let attributes_map = attributes_child.as_map()?; let attributes_map = attributes_child.as_map()?;
let prop = attributes_map.get(key).map(|token| *token); let prop = attributes_map
match prop.map(|token| token.as_atom()) { .get(key)
Some(Ok("nil")) => return Ok(None), .ok_or(format!("Missing {} attribute.", key))?;
match prop.as_atom() {
Ok("nil") => return Ok(None),
_ => {} _ => {}
}; };
Ok(prop) Ok(Some(*prop))
}
/// Get a named property containing an unquoted atom from the emacs token.
///
/// Returns None if key is not found.
pub(crate) fn get_property_unquoted_atom<'b, 's, 'x>(
emacs: &'b Token<'s>,
key: &'x str,
) -> Result<Option<&'s str>, Box<dyn std::error::Error>> {
Ok(get_property(emacs, key)?
.map(Token::as_atom)
.map_or(Ok(None), |r| r.map(Some))?)
}
/// Get a named property containing an quoted string from the emacs token.
///
/// Returns None if key is not found.
pub(crate) fn get_property_quoted_string<'b, 's, 'x>(
emacs: &'b Token<'s>,
key: &'x str,
) -> Result<Option<String>, Box<dyn std::error::Error>> {
Ok(get_property(emacs, key)?
.map(Token::as_atom)
.map_or(Ok(None), |r| r.map(Some))?
.map(unquote)
.map_or(Ok(None), |r| r.map(Some))?)
}
/// Get a named property containing a boolean value.
///
/// This uses the elisp convention of nil == false, non-nil == true.
///
/// Returns false if key is not found.
pub(crate) fn get_property_boolean<'b, 's, 'x>(
emacs: &'b Token<'s>,
key: &'x str,
) -> Result<bool, Box<dyn std::error::Error>> {
Ok(get_property(emacs, key)?
.map(Token::as_atom)
.map_or(Ok(None), |r| r.map(Some))?
.unwrap_or("nil")
!= "nil")
}
/// Get a named property containing an unquoted numeric value.
///
/// Returns None if key is not found.
pub(crate) fn get_property_numeric<'b, 's, 'x, N: FromStr>(
emacs: &'b Token<'s>,
key: &'x str,
) -> Result<Option<N>, Box<dyn std::error::Error + 's>>
where
<N as FromStr>::Err: std::error::Error,
<N as FromStr>::Err: 's,
{
let unparsed_string = get_property(emacs, key)?
.map(Token::as_atom)
.map_or(Ok(None), |r| r.map(Some))?;
let parsed_number = unparsed_string
.map(|val| val.parse::<N>())
.map_or(Ok(None), |r| r.map(Some))?;
Ok(parsed_number)
} }

View File

@@ -1,185 +0,0 @@
use std::marker::PhantomData;
use nom::combinator::eof;
use nom::IResult;
use super::exiting::ExitClass;
use super::global_settings::GlobalSettings;
use super::list::List;
use super::DynContextMatcher;
use super::RefContext;
use crate::error::CustomError;
use crate::error::MyError;
use crate::error::Res;
use crate::parser::OrgSource;
#[derive(Debug)]
pub(crate) enum ContextElement<'r, 's> {
/// Stores a parser that indicates that children should exit upon matching an exit matcher.
ExitMatcherNode(ExitMatcherNode<'r>),
/// Stores the name of the current element to prevent directly nesting elements of the same type.
Context(&'r str),
/// Stores the name of the current object to prevent directly nesting elements of the same type.
ContextObject(&'r str),
/// Indicates if elements should consume the whitespace after them.
ConsumeTrailingWhitespace(bool),
/// This is just here to use the 's lifetime until I'm sure we can eliminate it from ContextElement.
#[allow(dead_code)]
Placeholder(PhantomData<&'s str>),
}
pub(crate) struct ExitMatcherNode<'r> {
// TODO: Should this be "&'r DynContextMatcher<'c>" ?
pub(crate) exit_matcher: &'r DynContextMatcher<'r>,
pub(crate) class: ExitClass,
}
impl<'r> std::fmt::Debug for ExitMatcherNode<'r> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("ExitMatcherNode");
formatter.field("class", &self.class.to_string());
formatter.finish()
}
}
#[derive(Debug)]
pub(crate) struct Context<'g, 'r, 's> {
global_settings: &'g GlobalSettings<'g, 's>,
tree: List<'r, &'r ContextElement<'r, 's>>,
}
impl<'g, 'r, 's> Context<'g, 'r, 's> {
pub(crate) fn new(
global_settings: &'g GlobalSettings<'g, 's>,
tree: List<'r, &'r ContextElement<'r, 's>>,
) -> Self {
Self {
global_settings,
tree,
}
}
pub(crate) fn with_additional_node(&'r self, new_element: &'r ContextElement<'r, 's>) -> Self {
let new_tree = self.tree.push(new_element);
Self::new(self.global_settings, new_tree)
}
pub(crate) fn iter(&'r self) -> super::list::Iter<'r, &'r ContextElement<'r, 's>> {
self.tree.iter()
}
fn iter_context(&'r self) -> Iter<'g, 'r, 's> {
Iter {
next: self.tree.iter_list(),
global_settings: self.global_settings,
}
}
pub(crate) fn get_parent(&'r self) -> Option<Self> {
self.tree.get_parent().map(|parent_tree| Self {
global_settings: self.global_settings,
tree: parent_tree.clone(),
})
}
fn get_data(&self) -> &ContextElement<'r, 's> {
self.tree.get_data()
}
pub(crate) fn get_global_settings(&self) -> &'g GlobalSettings<'g, 's> {
self.global_settings
}
pub(crate) fn with_global_settings<'gg>(
&self,
new_settings: &'gg GlobalSettings<'gg, 's>,
) -> Context<'gg, 'r, 's> {
Context {
global_settings: new_settings,
tree: self.tree.clone(),
}
}
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
pub(crate) fn check_exit_matcher(
&'r self,
i: OrgSource<'s>,
) -> IResult<OrgSource<'s>, OrgSource<'s>, CustomError<OrgSource<'s>>> {
let mut current_class_filter = ExitClass::Gamma;
for current_node in self.iter_context() {
let context_element = current_node.get_data();
match context_element {
ContextElement::ExitMatcherNode(exit_matcher) => {
if exit_matcher.class as u32 <= current_class_filter as u32 {
current_class_filter = exit_matcher.class;
let local_result = (exit_matcher.exit_matcher)(&current_node, i);
if local_result.is_ok() {
return local_result;
}
}
}
_ => {}
};
}
// TODO: Make this a specific error instead of just a generic MyError
return Err(nom::Err::Error(CustomError::MyError(MyError(
"NoExit".into(),
))));
}
/// Indicates if elements should consume the whitespace after them.
///
/// Defaults to true.
pub(crate) fn should_consume_trailing_whitespace(&self) -> bool {
self._should_consume_trailing_whitespace().unwrap_or(true)
}
fn _should_consume_trailing_whitespace(&self) -> Option<bool> {
for current_node in self.iter() {
match current_node {
ContextElement::ConsumeTrailingWhitespace(should) => {
return Some(*should);
}
_ => {}
}
}
None
}
}
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
fn document_end<'b, 'g, 'r, 's>(
_context: RefContext<'b, 'g, 'r, 's>,
input: OrgSource<'s>,
) -> Res<OrgSource<'s>, OrgSource<'s>> {
eof(input)
}
struct Iter<'g, 'r, 's> {
global_settings: &'g GlobalSettings<'g, 's>,
next: super::list::IterList<'r, &'r ContextElement<'r, 's>>,
}
impl<'g, 'r, 's> Iterator for Iter<'g, 'r, 's> {
type Item = Context<'g, 'r, 's>;
fn next(&mut self) -> Option<Self::Item> {
let next_tree = self.next.next();
let ret =
next_tree.map(|parent_tree| Context::new(self.global_settings, parent_tree.clone()));
ret
}
}
impl<'r, 's> ContextElement<'r, 's> {
pub(crate) fn document_context() -> Self {
Self::ExitMatcherNode(ExitMatcherNode {
exit_matcher: &document_end,
class: ExitClass::Document,
})
}
}

View File

@@ -1,13 +0,0 @@
#[derive(Debug, Copy, Clone)]
pub(crate) enum ExitClass {
Document = 1,
Alpha = 2,
Beta = 3,
Gamma = 4,
}
impl std::fmt::Display for ExitClass {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "{:?}", self)
}
}

View File

@@ -1,23 +0,0 @@
use std::fmt::Debug;
use std::path::PathBuf;
pub trait FileAccessInterface: Debug {
fn read_file(&self, path: &str) -> Result<String, std::io::Error>;
}
#[derive(Debug, Clone)]
pub struct LocalFileAccessInterface {
pub working_directory: Option<PathBuf>,
}
impl FileAccessInterface for LocalFileAccessInterface {
fn read_file(&self, path: &str) -> Result<String, std::io::Error> {
let final_path = self
.working_directory
.as_ref()
.map(PathBuf::as_path)
.map(|pb| pb.join(path))
.unwrap_or_else(|| PathBuf::from(path));
Ok(std::fs::read_to_string(final_path)?)
}
}

View File

@@ -1,72 +0,0 @@
use std::collections::BTreeSet;
use super::FileAccessInterface;
use super::LocalFileAccessInterface;
use crate::types::IndentationLevel;
use crate::types::Object;
// TODO: Ultimately, I think we'll need most of this: https://orgmode.org/manual/In_002dbuffer-Settings.html
#[derive(Debug, Clone)]
pub struct GlobalSettings<'g, 's> {
pub radio_targets: Vec<&'g Vec<Object<'s>>>,
pub file_access: &'g dyn FileAccessInterface,
pub in_progress_todo_keywords: BTreeSet<String>,
pub complete_todo_keywords: BTreeSet<String>,
/// Set to true to allow for plain lists using single letters as the bullet in the same way that numbers are used.
///
/// Corresponds to the org-list-allow-alphabetical elisp variable.
pub list_allow_alphabetical: bool,
/// How many spaces a tab should be equal to.
///
/// Corresponds to the tab-width elisp variable.
pub tab_width: IndentationLevel,
/// Whether to only allow odd headline levels.
///
/// Corresponds to org-odd-levels-only elisp variable.
pub odd_levels_only: HeadlineLevelFilter,
/// If a headline title matches this string exactly, then that section will become a "footnote section".
///
/// Corresponds to org-footnote-section elisp variable.
pub footnote_section: &'g str,
}
pub const DEFAULT_TAB_WIDTH: IndentationLevel = 8;
impl<'g, 's> GlobalSettings<'g, 's> {
fn new() -> GlobalSettings<'g, 's> {
GlobalSettings {
radio_targets: Vec::new(),
file_access: &LocalFileAccessInterface {
working_directory: None,
},
in_progress_todo_keywords: BTreeSet::new(),
complete_todo_keywords: BTreeSet::new(),
list_allow_alphabetical: false,
tab_width: DEFAULT_TAB_WIDTH,
odd_levels_only: HeadlineLevelFilter::default(),
footnote_section: "Footnotes",
}
}
}
impl<'g, 's> Default for GlobalSettings<'g, 's> {
fn default() -> GlobalSettings<'g, 's> {
GlobalSettings::new()
}
}
#[derive(Debug, Clone, PartialEq)]
pub enum HeadlineLevelFilter {
Odd,
OddEven,
}
impl Default for HeadlineLevelFilter {
fn default() -> Self {
HeadlineLevelFilter::OddEven
}
}

View File

@@ -1,70 +0,0 @@
use std::fmt::Debug;
#[derive(Debug, Clone)]
pub(crate) struct List<'parent, T> {
data: T,
parent: Link<'parent, T>,
}
// TODO: Should I be defining a lifetime for T in the generics here? Ref: https://quinedot.github.io/rust-learning/dyn-elision-advanced.html#iteraction-with-type-aliases
type Link<'parent, T> = Option<&'parent List<'parent, T>>;
impl<'parent, T> List<'parent, T> {
pub(crate) fn new(first_item: T) -> Self {
Self {
data: first_item,
parent: None,
}
}
pub(crate) fn get_data(&self) -> &T {
&self.data
}
pub(crate) fn get_parent(&'parent self) -> Link<'parent, T> {
self.parent
}
pub(crate) fn iter(&self) -> Iter<'_, T> {
Iter { next: Some(self) }
}
pub(crate) fn iter_list(&self) -> IterList<'_, T> {
IterList { next: Some(self) }
}
pub(crate) fn push(&'parent self, item: T) -> Self {
Self {
data: item,
parent: Some(self),
}
}
}
pub(crate) struct Iter<'a, T> {
next: Link<'a, T>,
}
impl<'a, T> Iterator for Iter<'a, T> {
type Item = &'a T;
fn next(&mut self) -> Option<Self::Item> {
let ret = self.next.map(|link| link.get_data());
self.next = self.next.map(|link| link.get_parent()).flatten();
ret
}
}
pub(crate) struct IterList<'a, T> {
next: Link<'a, T>,
}
impl<'a, T> Iterator for IterList<'a, T> {
type Item = &'a List<'a, T>;
fn next(&mut self) -> Option<Self::Item> {
let ret = self.next;
self.next = self.next.map(|this| this.get_parent()).flatten();
ret
}
}

View File

@@ -1,31 +0,0 @@
use crate::error::Res;
use crate::parser::OrgSource;
mod context;
mod exiting;
mod file_access_interface;
mod global_settings;
mod list;
mod parser_with_context;
pub(crate) type RefContext<'b, 'g, 'r, 's> = &'b Context<'g, 'r, 's>;
pub(crate) trait ContextMatcher = for<'b, 'g, 'r, 's> Fn(
RefContext<'b, 'g, 'r, 's>,
OrgSource<'s>,
) -> Res<OrgSource<'s>, OrgSource<'s>>;
type DynContextMatcher<'c> = dyn ContextMatcher + 'c;
pub(crate) trait Matcher = for<'s> Fn(OrgSource<'s>) -> Res<OrgSource<'s>, OrgSource<'s>>;
#[allow(dead_code)]
type DynMatcher<'c> = dyn Matcher + 'c;
pub(crate) use context::Context;
pub(crate) use context::ContextElement;
pub(crate) use context::ExitMatcherNode;
pub(crate) use exiting::ExitClass;
pub use file_access_interface::FileAccessInterface;
pub use file_access_interface::LocalFileAccessInterface;
pub use global_settings::GlobalSettings;
pub use global_settings::HeadlineLevelFilter;
pub use global_settings::DEFAULT_TAB_WIDTH;
pub(crate) use list::List;
pub(crate) use parser_with_context::parser_with_context;

View File

@@ -2,18 +2,17 @@ use nom::error::ErrorKind;
use nom::error::ParseError; use nom::error::ParseError;
use nom::IResult; use nom::IResult;
pub(crate) type Res<T, U> = IResult<T, U, CustomError<T>>; pub type Res<T, U> = IResult<T, U, CustomError<T>>;
#[derive(Debug)] // TODO: MyError probably shouldn't be based on the same type as the input type since it's used exclusively with static strings right now.
#[derive(Debug, PartialEq)]
pub enum CustomError<I> { pub enum CustomError<I> {
MyError(MyError<&'static str>), MyError(MyError<I>),
Nom(I, ErrorKind), Nom(I, ErrorKind),
IO(std::io::Error),
BoxedError(Box<dyn std::error::Error>),
} }
#[derive(Debug)] #[derive(Debug, PartialEq)]
pub struct MyError<I>(pub(crate) I); pub struct MyError<I>(pub I);
impl<I> ParseError<I> for CustomError<I> { impl<I> ParseError<I> for CustomError<I> {
fn from_error_kind(input: I, kind: ErrorKind) -> Self { fn from_error_kind(input: I, kind: ErrorKind) -> Self {
@@ -25,21 +24,3 @@ impl<I> ParseError<I> for CustomError<I> {
other other
} }
} }
impl<I> From<std::io::Error> for CustomError<I> {
fn from(value: std::io::Error) -> Self {
CustomError::IO(value)
}
}
impl<I> From<&'static str> for CustomError<I> {
fn from(value: &'static str) -> Self {
CustomError::MyError(MyError(value))
}
}
impl<I> From<Box<dyn std::error::Error>> for CustomError<I> {
fn from(value: Box<dyn std::error::Error>) -> Self {
CustomError::BoxedError(value)
}
}

View File

@@ -1,4 +1,4 @@
mod error; mod error;
pub(crate) use error::CustomError; pub use error::CustomError;
pub(crate) use error::MyError; pub use error::MyError;
pub(crate) use error::Res; pub use error::Res;

View File

@@ -10,7 +10,7 @@ const SERVICE_NAME: &'static str = "organic";
// Despite the obvious verbosity that fully-qualifying everything causes, in these functions I am fully-qualifying everything relating to tracing. This is because the tracing feature involves multiple libraries working together and so I think it is beneficial to see which libraries contribute which bits. // Despite the obvious verbosity that fully-qualifying everything causes, in these functions I am fully-qualifying everything relating to tracing. This is because the tracing feature involves multiple libraries working together and so I think it is beneficial to see which libraries contribute which bits.
#[cfg(feature = "tracing")] #[cfg(feature = "tracing")]
pub(crate) fn init_telemetry() -> Result<(), Box<dyn std::error::Error>> { pub fn init_telemetry() -> Result<(), Box<dyn std::error::Error>> {
// by default it will hit http://localhost:4317 with a gRPC payload // by default it will hit http://localhost:4317 with a gRPC payload
// TODO: I think the endpoint can be controlled by the OTEL_EXPORTER_OTLP_TRACES_ENDPOINT env variable instead of hard-coded into this code base. Regardless, I am the only developer right now so I am not too concerned. // TODO: I think the endpoint can be controlled by the OTEL_EXPORTER_OTLP_TRACES_ENDPOINT env variable instead of hard-coded into this code base. Regardless, I am the only developer right now so I am not too concerned.
let exporter = opentelemetry_otlp::new_exporter() let exporter = opentelemetry_otlp::new_exporter()
@@ -55,17 +55,17 @@ pub(crate) fn init_telemetry() -> Result<(), Box<dyn std::error::Error>> {
} }
#[cfg(feature = "tracing")] #[cfg(feature = "tracing")]
pub(crate) fn shutdown_telemetry() -> Result<(), Box<dyn std::error::Error>> { pub fn shutdown_telemetry() -> Result<(), Box<dyn std::error::Error>> {
opentelemetry::global::shutdown_tracer_provider(); opentelemetry::global::shutdown_tracer_provider();
Ok(()) Ok(())
} }
#[cfg(not(feature = "tracing"))] #[cfg(not(feature = "tracing"))]
pub(crate) fn init_telemetry() -> Result<(), Box<dyn std::error::Error>> { pub fn init_telemetry() -> Result<(), Box<dyn std::error::Error>> {
Ok(()) Ok(())
} }
#[cfg(not(feature = "tracing"))] #[cfg(not(feature = "tracing"))]
pub(crate) fn shutdown_telemetry() -> Result<(), Box<dyn std::error::Error>> { pub fn shutdown_telemetry() -> Result<(), Box<dyn std::error::Error>> {
Ok(()) Ok(())
} }

View File

@@ -1,104 +0,0 @@
use std::collections::VecDeque;
use super::ast_node_iter::AstNodeIter;
use crate::types::AstNode;
pub struct AllAstNodeIter<'r, 's> {
root: Option<AstNode<'r, 's>>,
queue: VecDeque<AstNodeIter<'r, 's>>,
}
impl<'r, 's> Iterator for AllAstNodeIter<'r, 's> {
type Item = AstNode<'r, 's>;
fn next(&mut self) -> Option<Self::Item> {
if let Some(root) = self.root.take() {
self.queue.push_back(AstNodeIter::from_ast_node(&root));
return Some(root);
}
while let Some(child) = self.queue.front_mut() {
let next_elem_this_iter = match child {
AstNodeIter::Document(ref mut i) => i.next(),
AstNodeIter::Heading(ref mut i) => i.next(),
AstNodeIter::Section(ref mut i) => i.next(),
AstNodeIter::Paragraph(ref mut i) => i.next(),
AstNodeIter::PlainList(ref mut i) => i.next(),
AstNodeIter::PlainListItem(ref mut i) => i.next(),
AstNodeIter::CenterBlock(ref mut i) => i.next(),
AstNodeIter::QuoteBlock(ref mut i) => i.next(),
AstNodeIter::SpecialBlock(ref mut i) => i.next(),
AstNodeIter::DynamicBlock(ref mut i) => i.next(),
AstNodeIter::FootnoteDefinition(ref mut i) => i.next(),
AstNodeIter::Comment(ref mut i) => i.next(),
AstNodeIter::Drawer(ref mut i) => i.next(),
AstNodeIter::PropertyDrawer(ref mut i) => i.next(),
AstNodeIter::NodeProperty(ref mut i) => i.next(),
AstNodeIter::Table(ref mut i) => i.next(),
AstNodeIter::TableRow(ref mut i) => i.next(),
AstNodeIter::VerseBlock(ref mut i) => i.next(),
AstNodeIter::CommentBlock(ref mut i) => i.next(),
AstNodeIter::ExampleBlock(ref mut i) => i.next(),
AstNodeIter::ExportBlock(ref mut i) => i.next(),
AstNodeIter::SrcBlock(ref mut i) => i.next(),
AstNodeIter::Clock(ref mut i) => i.next(),
AstNodeIter::DiarySexp(ref mut i) => i.next(),
AstNodeIter::Planning(ref mut i) => i.next(),
AstNodeIter::FixedWidthArea(ref mut i) => i.next(),
AstNodeIter::HorizontalRule(ref mut i) => i.next(),
AstNodeIter::Keyword(ref mut i) => i.next(),
AstNodeIter::BabelCall(ref mut i) => i.next(),
AstNodeIter::LatexEnvironment(ref mut i) => i.next(),
AstNodeIter::Bold(ref mut i) => i.next(),
AstNodeIter::Italic(ref mut i) => i.next(),
AstNodeIter::Underline(ref mut i) => i.next(),
AstNodeIter::StrikeThrough(ref mut i) => i.next(),
AstNodeIter::Code(ref mut i) => i.next(),
AstNodeIter::Verbatim(ref mut i) => i.next(),
AstNodeIter::PlainText(ref mut i) => i.next(),
AstNodeIter::RegularLink(ref mut i) => i.next(),
AstNodeIter::RadioLink(ref mut i) => i.next(),
AstNodeIter::RadioTarget(ref mut i) => i.next(),
AstNodeIter::PlainLink(ref mut i) => i.next(),
AstNodeIter::AngleLink(ref mut i) => i.next(),
AstNodeIter::OrgMacro(ref mut i) => i.next(),
AstNodeIter::Entity(ref mut i) => i.next(),
AstNodeIter::LatexFragment(ref mut i) => i.next(),
AstNodeIter::ExportSnippet(ref mut i) => i.next(),
AstNodeIter::FootnoteReference(ref mut i) => i.next(),
AstNodeIter::Citation(ref mut i) => i.next(),
AstNodeIter::CitationReference(ref mut i) => i.next(),
AstNodeIter::InlineBabelCall(ref mut i) => i.next(),
AstNodeIter::InlineSourceBlock(ref mut i) => i.next(),
AstNodeIter::LineBreak(ref mut i) => i.next(),
AstNodeIter::Target(ref mut i) => i.next(),
AstNodeIter::StatisticsCookie(ref mut i) => i.next(),
AstNodeIter::Subscript(ref mut i) => i.next(),
AstNodeIter::Superscript(ref mut i) => i.next(),
AstNodeIter::TableCell(ref mut i) => i.next(),
AstNodeIter::Timestamp(ref mut i) => i.next(),
};
if let Some(next_elem_this_iter) = next_elem_this_iter {
self.queue
.push_back(AstNodeIter::from_ast_node(&next_elem_this_iter));
return Some(next_elem_this_iter);
} else {
self.queue.pop_front();
}
}
None
}
}
impl<'r, 's> IntoIterator for AstNode<'r, 's> {
type Item = AstNode<'r, 's>;
type IntoIter = AllAstNodeIter<'r, 's>;
fn into_iter(self) -> Self::IntoIter {
AllAstNodeIter {
root: Some(self),
queue: VecDeque::new(),
}
}
}

View File

@@ -1,348 +0,0 @@
use std::marker::PhantomData;
use super::macros::children_iter;
use super::macros::empty_iter;
use super::macros::multi_field_iter;
use crate::types::AngleLink;
use crate::types::AstNode;
use crate::types::BabelCall;
use crate::types::Bold;
use crate::types::CenterBlock;
use crate::types::Citation;
use crate::types::CitationReference;
use crate::types::Clock;
use crate::types::Code;
use crate::types::Comment;
use crate::types::CommentBlock;
use crate::types::DiarySexp;
use crate::types::Document;
use crate::types::DocumentElement;
use crate::types::Drawer;
use crate::types::DynamicBlock;
use crate::types::Element;
use crate::types::Entity;
use crate::types::ExampleBlock;
use crate::types::ExportBlock;
use crate::types::ExportSnippet;
use crate::types::FixedWidthArea;
use crate::types::FootnoteDefinition;
use crate::types::FootnoteReference;
use crate::types::Heading;
use crate::types::HorizontalRule;
use crate::types::InlineBabelCall;
use crate::types::InlineSourceBlock;
use crate::types::Italic;
use crate::types::Keyword;
use crate::types::LatexEnvironment;
use crate::types::LatexFragment;
use crate::types::LineBreak;
use crate::types::NodeProperty;
use crate::types::Object;
use crate::types::OrgMacro;
use crate::types::Paragraph;
use crate::types::PlainLink;
use crate::types::PlainList;
use crate::types::PlainListItem;
use crate::types::PlainText;
use crate::types::Planning;
use crate::types::PropertyDrawer;
use crate::types::QuoteBlock;
use crate::types::RadioLink;
use crate::types::RadioTarget;
use crate::types::RegularLink;
use crate::types::Section;
use crate::types::SpecialBlock;
use crate::types::SrcBlock;
use crate::types::StatisticsCookie;
use crate::types::StrikeThrough;
use crate::types::Subscript;
use crate::types::Superscript;
use crate::types::Table;
use crate::types::TableCell;
use crate::types::TableRow;
use crate::types::Target;
use crate::types::Timestamp;
use crate::types::Underline;
use crate::types::Verbatim;
use crate::types::VerseBlock;
/// Iterator over the AST nodes contained within the starting node.
///
/// This only iterates over the children, not the starting node itself. So an AstNodeIter::PlainList would only return PlainListItems, not the PlainList.
///
/// This only iterates over AST nodes, so an AstNodeIter::Heading would iterate over both the title and section contents, but it would not iterate over simple strings like the TODO keyword or priority.
pub(crate) enum AstNodeIter<'r, 's> {
// Document Nodes
Document(DocumentIter<'r, 's>),
Heading(HeadingIter<'r, 's>),
Section(SectionIter<'r, 's>),
// Elements
Paragraph(ParagraphIter<'r, 's>),
PlainList(PlainListIter<'r, 's>),
PlainListItem(PlainListItemIter<'r, 's>),
CenterBlock(CenterBlockIter<'r, 's>),
QuoteBlock(QuoteBlockIter<'r, 's>),
SpecialBlock(SpecialBlockIter<'r, 's>),
DynamicBlock(DynamicBlockIter<'r, 's>),
FootnoteDefinition(FootnoteDefinitionIter<'r, 's>),
Comment(CommentIter<'r, 's>),
Drawer(DrawerIter<'r, 's>),
PropertyDrawer(PropertyDrawerIter<'r, 's>),
NodeProperty(NodePropertyIter<'r, 's>),
Table(TableIter<'r, 's>),
TableRow(TableRowIter<'r, 's>),
VerseBlock(VerseBlockIter<'r, 's>),
CommentBlock(CommentBlockIter<'r, 's>),
ExampleBlock(ExampleBlockIter<'r, 's>),
ExportBlock(ExportBlockIter<'r, 's>),
SrcBlock(SrcBlockIter<'r, 's>),
Clock(ClockIter<'r, 's>),
DiarySexp(DiarySexpIter<'r, 's>),
Planning(PlanningIter<'r, 's>),
FixedWidthArea(FixedWidthAreaIter<'r, 's>),
HorizontalRule(HorizontalRuleIter<'r, 's>),
Keyword(KeywordIter<'r, 's>),
BabelCall(BabelCallIter<'r, 's>),
LatexEnvironment(LatexEnvironmentIter<'r, 's>),
// Objects
Bold(BoldIter<'r, 's>),
Italic(ItalicIter<'r, 's>),
Underline(UnderlineIter<'r, 's>),
StrikeThrough(StrikeThroughIter<'r, 's>),
Code(CodeIter<'r, 's>),
Verbatim(VerbatimIter<'r, 's>),
PlainText(PlainTextIter<'r, 's>),
RegularLink(RegularLinkIter<'r, 's>),
RadioLink(RadioLinkIter<'r, 's>),
RadioTarget(RadioTargetIter<'r, 's>),
PlainLink(PlainLinkIter<'r, 's>),
AngleLink(AngleLinkIter<'r, 's>),
OrgMacro(OrgMacroIter<'r, 's>),
Entity(EntityIter<'r, 's>),
LatexFragment(LatexFragmentIter<'r, 's>),
ExportSnippet(ExportSnippetIter<'r, 's>),
FootnoteReference(FootnoteReferenceIter<'r, 's>),
Citation(CitationIter<'r, 's>),
CitationReference(CitationReferenceIter<'r, 's>),
InlineBabelCall(InlineBabelCallIter<'r, 's>),
InlineSourceBlock(InlineSourceBlockIter<'r, 's>),
LineBreak(LineBreakIter<'r, 's>),
Target(TargetIter<'r, 's>),
StatisticsCookie(StatisticsCookieIter<'r, 's>),
Subscript(SubscriptIter<'r, 's>),
Superscript(SuperscriptIter<'r, 's>),
TableCell(TableCellIter<'r, 's>),
Timestamp(TimestampIter<'r, 's>),
}
impl<'r, 's> AstNodeIter<'r, 's> {
pub(crate) fn from_ast_node(node: &AstNode<'r, 's>) -> AstNodeIter<'r, 's> {
match node {
AstNode::Document(inner) => AstNodeIter::Document(inner.into_iter()),
AstNode::Heading(inner) => AstNodeIter::Heading(inner.into_iter()),
AstNode::Section(inner) => AstNodeIter::Section(inner.into_iter()),
AstNode::Paragraph(inner) => AstNodeIter::Paragraph(inner.into_iter()),
AstNode::PlainList(inner) => AstNodeIter::PlainList(inner.into_iter()),
AstNode::PlainListItem(inner) => AstNodeIter::PlainListItem(inner.into_iter()),
AstNode::CenterBlock(inner) => AstNodeIter::CenterBlock(inner.into_iter()),
AstNode::QuoteBlock(inner) => AstNodeIter::QuoteBlock(inner.into_iter()),
AstNode::SpecialBlock(inner) => AstNodeIter::SpecialBlock(inner.into_iter()),
AstNode::DynamicBlock(inner) => AstNodeIter::DynamicBlock(inner.into_iter()),
AstNode::FootnoteDefinition(inner) => {
AstNodeIter::FootnoteDefinition(inner.into_iter())
}
AstNode::Comment(inner) => AstNodeIter::Comment(inner.into_iter()),
AstNode::Drawer(inner) => AstNodeIter::Drawer(inner.into_iter()),
AstNode::PropertyDrawer(inner) => AstNodeIter::PropertyDrawer(inner.into_iter()),
AstNode::NodeProperty(inner) => AstNodeIter::NodeProperty(inner.into_iter()),
AstNode::Table(inner) => AstNodeIter::Table(inner.into_iter()),
AstNode::TableRow(inner) => AstNodeIter::TableRow(inner.into_iter()),
AstNode::VerseBlock(inner) => AstNodeIter::VerseBlock(inner.into_iter()),
AstNode::CommentBlock(inner) => AstNodeIter::CommentBlock(inner.into_iter()),
AstNode::ExampleBlock(inner) => AstNodeIter::ExampleBlock(inner.into_iter()),
AstNode::ExportBlock(inner) => AstNodeIter::ExportBlock(inner.into_iter()),
AstNode::SrcBlock(inner) => AstNodeIter::SrcBlock(inner.into_iter()),
AstNode::Clock(inner) => AstNodeIter::Clock(inner.into_iter()),
AstNode::DiarySexp(inner) => AstNodeIter::DiarySexp(inner.into_iter()),
AstNode::Planning(inner) => AstNodeIter::Planning(inner.into_iter()),
AstNode::FixedWidthArea(inner) => AstNodeIter::FixedWidthArea(inner.into_iter()),
AstNode::HorizontalRule(inner) => AstNodeIter::HorizontalRule(inner.into_iter()),
AstNode::Keyword(inner) => AstNodeIter::Keyword(inner.into_iter()),
AstNode::BabelCall(inner) => AstNodeIter::BabelCall(inner.into_iter()),
AstNode::LatexEnvironment(inner) => AstNodeIter::LatexEnvironment(inner.into_iter()),
AstNode::Bold(inner) => AstNodeIter::Bold(inner.into_iter()),
AstNode::Italic(inner) => AstNodeIter::Italic(inner.into_iter()),
AstNode::Underline(inner) => AstNodeIter::Underline(inner.into_iter()),
AstNode::StrikeThrough(inner) => AstNodeIter::StrikeThrough(inner.into_iter()),
AstNode::Code(inner) => AstNodeIter::Code(inner.into_iter()),
AstNode::Verbatim(inner) => AstNodeIter::Verbatim(inner.into_iter()),
AstNode::PlainText(inner) => AstNodeIter::PlainText(inner.into_iter()),
AstNode::RegularLink(inner) => AstNodeIter::RegularLink(inner.into_iter()),
AstNode::RadioLink(inner) => AstNodeIter::RadioLink(inner.into_iter()),
AstNode::RadioTarget(inner) => AstNodeIter::RadioTarget(inner.into_iter()),
AstNode::PlainLink(inner) => AstNodeIter::PlainLink(inner.into_iter()),
AstNode::AngleLink(inner) => AstNodeIter::AngleLink(inner.into_iter()),
AstNode::OrgMacro(inner) => AstNodeIter::OrgMacro(inner.into_iter()),
AstNode::Entity(inner) => AstNodeIter::Entity(inner.into_iter()),
AstNode::LatexFragment(inner) => AstNodeIter::LatexFragment(inner.into_iter()),
AstNode::ExportSnippet(inner) => AstNodeIter::ExportSnippet(inner.into_iter()),
AstNode::FootnoteReference(inner) => AstNodeIter::FootnoteReference(inner.into_iter()),
AstNode::Citation(inner) => AstNodeIter::Citation(inner.into_iter()),
AstNode::CitationReference(inner) => AstNodeIter::CitationReference(inner.into_iter()),
AstNode::InlineBabelCall(inner) => AstNodeIter::InlineBabelCall(inner.into_iter()),
AstNode::InlineSourceBlock(inner) => AstNodeIter::InlineSourceBlock(inner.into_iter()),
AstNode::LineBreak(inner) => AstNodeIter::LineBreak(inner.into_iter()),
AstNode::Target(inner) => AstNodeIter::Target(inner.into_iter()),
AstNode::StatisticsCookie(inner) => AstNodeIter::StatisticsCookie(inner.into_iter()),
AstNode::Subscript(inner) => AstNodeIter::Subscript(inner.into_iter()),
AstNode::Superscript(inner) => AstNodeIter::Superscript(inner.into_iter()),
AstNode::TableCell(inner) => AstNodeIter::TableCell(inner.into_iter()),
AstNode::Timestamp(inner) => AstNodeIter::Timestamp(inner.into_iter()),
}
}
}
multi_field_iter!(
Document<'s>,
DocumentIter,
zeroth_section,
std::option::Iter<'r, Section<'s>>,
children,
std::slice::Iter<'r, Heading<'s>>
);
multi_field_iter!(
Heading<'s>,
HeadingIter,
title,
std::slice::Iter<'r, Object<'s>>,
children,
std::slice::Iter<'r, DocumentElement<'s>>
);
children_iter!(Section<'s>, SectionIter, std::slice::Iter<'r, Element<'s>>);
children_iter!(
Paragraph<'s>,
ParagraphIter,
std::slice::Iter<'r, Object<'s>>
);
children_iter!(
PlainList<'s>,
PlainListIter,
std::slice::Iter<'r, PlainListItem<'s>>
);
multi_field_iter!(
PlainListItem<'s>,
PlainListItemIter,
tag,
std::slice::Iter<'r, Object<'s>>,
children,
std::slice::Iter<'r, Element<'s>>
);
children_iter!(
CenterBlock<'s>,
CenterBlockIter,
std::slice::Iter<'r, Element<'s>>
);
children_iter!(
QuoteBlock<'s>,
QuoteBlockIter,
std::slice::Iter<'r, Element<'s>>
);
children_iter!(
SpecialBlock<'s>,
SpecialBlockIter,
std::slice::Iter<'r, Element<'s>>
);
children_iter!(
DynamicBlock<'s>,
DynamicBlockIter,
std::slice::Iter<'r, Element<'s>>
);
children_iter!(
FootnoteDefinition<'s>,
FootnoteDefinitionIter,
std::slice::Iter<'r, Element<'s>>
);
empty_iter!(Comment<'s>, CommentIter);
children_iter!(Drawer<'s>, DrawerIter, std::slice::Iter<'r, Element<'s>>);
children_iter!(
PropertyDrawer<'s>,
PropertyDrawerIter,
std::slice::Iter<'r, NodeProperty<'s>>
);
empty_iter!(NodeProperty<'s>, NodePropertyIter);
children_iter!(Table<'s>, TableIter, std::slice::Iter<'r, TableRow<'s>>);
children_iter!(
TableRow<'s>,
TableRowIter,
std::slice::Iter<'r, TableCell<'s>>
);
children_iter!(
VerseBlock<'s>,
VerseBlockIter,
std::slice::Iter<'r, Object<'s>>
);
empty_iter!(CommentBlock<'s>, CommentBlockIter);
empty_iter!(ExampleBlock<'s>, ExampleBlockIter);
empty_iter!(ExportBlock<'s>, ExportBlockIter);
empty_iter!(SrcBlock<'s>, SrcBlockIter);
empty_iter!(Clock<'s>, ClockIter);
empty_iter!(DiarySexp<'s>, DiarySexpIter);
empty_iter!(Planning<'s>, PlanningIter);
empty_iter!(FixedWidthArea<'s>, FixedWidthAreaIter);
empty_iter!(HorizontalRule<'s>, HorizontalRuleIter);
empty_iter!(Keyword<'s>, KeywordIter);
empty_iter!(BabelCall<'s>, BabelCallIter);
empty_iter!(LatexEnvironment<'s>, LatexEnvironmentIter);
children_iter!(Bold<'s>, BoldIter, std::slice::Iter<'r, Object<'s>>);
children_iter!(Italic<'s>, ItalicIter, std::slice::Iter<'r, Object<'s>>);
children_iter!(
Underline<'s>,
UnderlineIter,
std::slice::Iter<'r, Object<'s>>
);
children_iter!(
StrikeThrough<'s>,
StrikeThroughIter,
std::slice::Iter<'r, Object<'s>>
);
empty_iter!(Code<'s>, CodeIter);
empty_iter!(Verbatim<'s>, VerbatimIter);
empty_iter!(PlainText<'s>, PlainTextIter);
empty_iter!(RegularLink<'s>, RegularLinkIter);
children_iter!(
RadioLink<'s>,
RadioLinkIter,
std::slice::Iter<'r, Object<'s>>
);
children_iter!(
RadioTarget<'s>,
RadioTargetIter,
std::slice::Iter<'r, Object<'s>>
);
empty_iter!(PlainLink<'s>, PlainLinkIter);
empty_iter!(AngleLink<'s>, AngleLinkIter);
empty_iter!(OrgMacro<'s>, OrgMacroIter);
empty_iter!(Entity<'s>, EntityIter);
empty_iter!(LatexFragment<'s>, LatexFragmentIter);
empty_iter!(ExportSnippet<'s>, ExportSnippetIter);
multi_field_iter!(
FootnoteReference<'s>,
FootnoteReferenceIter,
definition,
std::slice::Iter<'r, Object<'s>>,
);
empty_iter!(Citation<'s>, CitationIter);
empty_iter!(CitationReference<'s>, CitationReferenceIter);
empty_iter!(InlineBabelCall<'s>, InlineBabelCallIter);
empty_iter!(InlineSourceBlock<'s>, InlineSourceBlockIter);
empty_iter!(LineBreak<'s>, LineBreakIter);
empty_iter!(Target<'s>, TargetIter);
empty_iter!(StatisticsCookie<'s>, StatisticsCookieIter);
empty_iter!(Subscript<'s>, SubscriptIter);
empty_iter!(Superscript<'s>, SuperscriptIter);
children_iter!(
TableCell<'s>,
TableCellIter,
std::slice::Iter<'r, Object<'s>>
);
empty_iter!(Timestamp<'s>, TimestampIter);

View File

@@ -1,101 +0,0 @@
/// Create iterators for ast nodes where it only has to iterate over children
macro_rules! children_iter {
($astnodetype:ty, $itertype:ident, $innertype:ty) => {
pub struct $itertype<'r, 's> {
next: $innertype,
}
impl<'r, 's> Iterator for $itertype<'r, 's> {
type Item = AstNode<'r, 's>;
fn next(&mut self) -> Option<Self::Item> {
self.next.next().map(Into::<AstNode>::into)
}
}
impl<'r, 's> IntoIterator for &'r $astnodetype {
type Item = AstNode<'r, 's>;
type IntoIter = $itertype<'r, 's>;
fn into_iter(self) -> Self::IntoIter {
$itertype {
next: self.children.iter(),
}
}
}
};
}
pub(crate) use children_iter;
/// Create iterators for ast nodes that do not contain any ast node children.
macro_rules! empty_iter {
($astnodetype:ty, $itertype:ident) => {
pub struct $itertype<'r, 's> {
phantom: PhantomData<&'r $astnodetype>,
}
impl<'r, 's> Iterator for $itertype<'r, 's> {
type Item = AstNode<'r, 's>;
fn next(&mut self) -> Option<Self::Item> {
None
}
}
impl<'r, 's> IntoIterator for &'r $astnodetype {
type Item = AstNode<'r, 's>;
type IntoIter = $itertype<'r, 's>;
fn into_iter(self) -> Self::IntoIter {
$itertype {
phantom: PhantomData,
}
}
}
};
}
pub(crate) use empty_iter;
/// Create iterators for ast nodes where it has to iterate over multiple child lists.
macro_rules! multi_field_iter {
($astnodetype:ty, $itertype:ident, $firstfieldname: ident, $firstinnertype:ty, $($fieldname: ident, $innertype:ty),*) => {
pub struct $itertype<'r, 's> {
$firstfieldname: $firstinnertype,
$(
$fieldname: $innertype,
),*
}
impl<'r, 's> Iterator for $itertype<'r, 's> {
type Item = AstNode<'r, 's>;
fn next(&mut self) -> Option<Self::Item> {
self.$firstfieldname.next().map(Into::<AstNode>::into)
$(
.or_else(|| self.$fieldname.next().map(Into::<AstNode>::into))
),*
}
}
impl<'r, 's> IntoIterator for &'r $astnodetype {
type Item = AstNode<'r, 's>;
type IntoIter = $itertype<'r, 's>;
fn into_iter(self) -> Self::IntoIter {
$itertype {
$firstfieldname: self.$firstfieldname.iter(),
$(
$fieldname: self.$fieldname.iter(),
),*
}
}
}
};
}
pub(crate) use multi_field_iter;

Some files were not shown because too many files have changed in this diff Show More