Compare commits
241 Commits
feature_ma
...
v0.1.7
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
26f41b83aa | ||
|
|
e4c0e32536 | ||
|
|
37e85158ea | ||
|
|
6589a755a6 | ||
|
|
a651b79e77 | ||
|
|
98de5e4ec5 | ||
|
|
cf383fa394 | ||
|
|
84953c1669 | ||
|
|
7650a9edff | ||
|
|
a74319d381 | ||
|
|
7e57285ea7 | ||
|
|
f103d168d5 | ||
|
|
f54081437a | ||
|
|
aa5988bc2f | ||
|
|
76ca2b9762 | ||
|
|
1561e1e580 | ||
|
|
1f11bfa2ec | ||
|
|
8440a3b256 | ||
|
|
de7ad182b3 | ||
|
|
b75d9f5c91 | ||
|
|
612744ebd0 | ||
|
|
1b4b8b4bdb | ||
|
|
5587e19f16 | ||
|
|
80f7098f9b | ||
|
|
84d2babda9 | ||
|
|
cc56b79683 | ||
|
|
0105b49d0d | ||
|
|
d79035e14d | ||
|
|
7545fb7e1a | ||
|
|
f30069efe7 | ||
|
|
d1fe2f6b09 | ||
|
|
21c60d1036 | ||
|
|
6a1bdd5fee | ||
|
|
5d20d3e99b | ||
|
|
a8fbf01124 | ||
|
|
344ef04453 | ||
|
|
ceb722e476 | ||
|
|
b04341882c | ||
|
|
494fe5cceb | ||
|
|
0110d23387 | ||
|
|
0d7a15bfeb | ||
|
|
352c20d1d8 | ||
|
|
f82d2aada1 | ||
|
|
669da4073e | ||
|
|
0056657b65 | ||
|
|
8780976c15 | ||
|
|
dc8b8d08ab | ||
|
|
93d3d9471f | ||
|
|
c7c0deed74 | ||
|
|
b32c21eb1d | ||
|
|
2e6e6fdd2b | ||
|
|
3cc2294387 | ||
|
|
40f22034da | ||
|
|
ab612f293f | ||
|
|
57c2922e4a | ||
|
|
c2eb1f51c8 | ||
|
|
b0930df788 | ||
|
|
69512f559a | ||
|
|
76a81b73ac | ||
|
|
ba291c6776 | ||
|
|
6b82b46e09 | ||
|
|
6676012eb1 | ||
|
|
facbe716e9 | ||
|
|
827f3e1c98 | ||
|
|
fcea7e5a4b | ||
|
|
dda2b1e69f | ||
|
|
f79d07a7c8 | ||
|
|
45283b48d9 | ||
|
|
08e4c646e5 | ||
|
|
f8b99ed235 | ||
|
|
6fc607cfe0 | ||
|
|
49afcf0db6 | ||
|
|
c4d7e646fc | ||
|
|
3fc3a5d1ef | ||
|
|
6e2fc362ea | ||
|
|
90fa48661c | ||
|
|
5cefcd5fac | ||
|
|
b83a103c17 | ||
|
|
d90ff5891b | ||
|
|
a3c01805b8 | ||
|
|
e3d755317d | ||
|
|
b89607fc8b | ||
|
|
51c4e2b62a | ||
|
|
a6561d37fb | ||
|
|
4e8b3eb422 | ||
|
|
2c31590974 | ||
|
|
28b2d27054 | ||
|
|
84edd10864 | ||
|
|
728a79f9a4 | ||
|
|
ad4ef50669 | ||
|
|
12cbb89861 | ||
|
|
7c471ab32e | ||
|
|
400f53e440 | ||
|
|
028aeb70aa | ||
|
|
70fafd801e | ||
|
|
bdba495f69 | ||
|
|
b0392ad6fb | ||
|
|
1c142b68c6 | ||
|
|
9060f9b26d | ||
|
|
d3c733c5ad | ||
|
|
275b4b53d1 | ||
|
|
d38e198258 | ||
|
|
27cf6c0462 | ||
|
|
c7d5c89a60 | ||
|
|
ee02e07717 | ||
|
|
a7330e38e4 | ||
|
|
08eb59acd3 | ||
|
|
da1ce2717d | ||
|
|
a8f277efe5 | ||
|
|
7f6f22717b | ||
|
|
0ef141d65e | ||
|
|
71180d19fb | ||
|
|
33091112a5 | ||
|
|
5997567233 | ||
|
|
2915a81edc | ||
|
|
df79cbd0b7 | ||
|
|
a7b9eb9db4 | ||
|
|
d262833f9b | ||
|
|
0d438a8e0f | ||
|
|
0b009511ff | ||
|
|
3bdb24ad88 | ||
|
|
fdf35ba23c | ||
|
|
cd69e08516 | ||
|
|
b54c6d366c | ||
|
|
15e8d1ab77 | ||
|
|
8502a8830d | ||
|
|
74a6101de7 | ||
|
|
ba57eb16fd | ||
|
|
c309d14776 | ||
|
|
0d728510d7 | ||
|
|
22e9bc991f | ||
|
|
564104f1e8 | ||
|
|
12ad3b09f0 | ||
|
|
eabffe5ecc | ||
|
|
b47029fdbb | ||
|
|
25b8c80d4e | ||
|
|
54825538e4 | ||
|
|
66d10a7a1b | ||
|
|
acf1205e75 | ||
|
|
2cd2f7570c | ||
|
|
f16a554154 | ||
|
|
a40a504f94 | ||
|
|
80d77ff5d6 | ||
|
|
ee92049e5d | ||
|
|
510985e97c | ||
|
|
949d0989f4 | ||
|
|
2a4d22bdd4 | ||
|
|
7a903acedc | ||
|
|
5171326d63 | ||
|
|
67f79aeb51 | ||
|
|
b2383d9f93 | ||
|
|
9e2a323f6f | ||
|
|
0fcb3f73f9 | ||
|
|
bfc9e7f58d | ||
|
|
b5f0521b56 | ||
|
|
2048d8f0b6 | ||
|
|
466716881e | ||
|
|
eb9c582fa5 | ||
|
|
214e895d85 | ||
|
|
db3086743c | ||
|
|
207a0546b0 | ||
|
|
e9480fd156 | ||
|
|
28aca041f7 | ||
|
|
d82def2a70 | ||
|
|
d471f7178b | ||
|
|
2c5c26c55f | ||
|
|
7944659802 | ||
|
|
58aca53144 | ||
|
|
6f2d90162b | ||
|
|
f170a557ed | ||
|
|
eaa38ce772 | ||
|
|
a6d742a536 | ||
|
|
45be9e7bde | ||
|
|
f6c895319f | ||
|
|
2682779534 | ||
|
|
b48d472546 | ||
|
|
ea6faf728c | ||
|
|
f4ea1b7303 | ||
|
|
80b55fdd45 | ||
|
|
f426e32798 | ||
|
|
66037356c5 | ||
|
|
1bcd1895c0 | ||
|
|
e3d38cfbe2 | ||
|
|
2ba0dc49be | ||
|
|
9df40fb13f | ||
|
|
cc671925db | ||
|
|
950baa9d5d | ||
|
|
56865c68fc | ||
|
|
f592b73ae7 | ||
|
|
3206027b96 | ||
|
|
3e6df7ba78 | ||
|
|
ac313d093e | ||
|
|
f376f1cf8e | ||
|
|
f21385a901 | ||
|
|
1d06d95bb1 | ||
|
|
bfc88c1d1b | ||
|
|
f29720e5b9 | ||
|
|
27a9b5aeb1 | ||
|
|
8051c3d2b7 | ||
|
|
bd97d2f69d | ||
|
|
14b1d0526c | ||
|
|
288350daef | ||
|
|
c683516620 | ||
|
|
e731e8ff6b | ||
|
|
4c2037ec44 | ||
|
|
a46b358549 | ||
|
|
ec813e3b3f | ||
|
|
f11f7bcc73 | ||
|
|
9e0e5f6f0a | ||
|
|
16e788c36c | ||
|
|
b35d785e73 | ||
|
|
1952d175c0 | ||
|
|
20c17c40be | ||
|
|
b6b869df25 | ||
|
|
18a396b7cb | ||
|
|
085490476e | ||
|
|
9c9964c66f | ||
|
|
1a3e26c148 | ||
|
|
e9e6a8ff64 | ||
|
|
b124317f30 | ||
|
|
ad389f0776 | ||
|
|
75dfc7f812 | ||
|
|
c17de8ef5e | ||
|
|
378b6bb391 | ||
|
|
cc86591a6c | ||
|
|
f25dbc1d7c | ||
|
|
daee50c160 | ||
|
|
3e143796f7 | ||
|
|
9cc5e63c1b | ||
|
|
be6197e4c7 | ||
|
|
2d4e54845b | ||
|
|
d5ea650b96 | ||
|
|
60363579b5 | ||
|
|
1b678fe81f | ||
|
|
bfea828e62 | ||
|
|
bc5745a95f | ||
|
|
efa372a9e9 | ||
|
|
2fb57daaec | ||
|
|
3a38f4cd35 | ||
|
|
45e16fea2d | ||
|
|
5134cece7b |
203
.lighthouse/pipeline-foreign-document-test.yaml
Normal file
203
.lighthouse/pipeline-foreign-document-test.yaml
Normal file
@@ -0,0 +1,203 @@
|
||||
apiVersion: tekton.dev/v1beta1
|
||||
kind: PipelineRun
|
||||
metadata:
|
||||
name: rust-foreign-document-test
|
||||
spec:
|
||||
pipelineSpec:
|
||||
timeouts:
|
||||
pipeline: "2h0m0s"
|
||||
tasks: "1h0m40s"
|
||||
finally: "0h30m0s"
|
||||
params:
|
||||
- name: image-name
|
||||
description: The name for the built image
|
||||
type: string
|
||||
- name: path-to-image-context
|
||||
description: The path to the build context
|
||||
type: string
|
||||
- name: path-to-dockerfile
|
||||
description: The path to the Dockerfile
|
||||
type: string
|
||||
tasks:
|
||||
- name: do-stuff
|
||||
taskSpec:
|
||||
metadata: {}
|
||||
stepTemplate:
|
||||
image: alpine:3.18
|
||||
name: ""
|
||||
resources:
|
||||
requests:
|
||||
cpu: 10m
|
||||
memory: 600Mi
|
||||
workingDir: /workspace/source
|
||||
steps:
|
||||
- image: alpine:3.18
|
||||
name: do-stuff-step
|
||||
script: |
|
||||
#!/usr/bin/env sh
|
||||
echo "hello world"
|
||||
- name: report-pending
|
||||
taskRef:
|
||||
name: gitea-set-status
|
||||
runAfter:
|
||||
- fetch-repository
|
||||
params:
|
||||
- name: CONTEXT
|
||||
value: "$(params.JOB_NAME)"
|
||||
- name: REPO_FULL_NAME
|
||||
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
|
||||
- name: GITEA_HOST_URL
|
||||
value: code.fizz.buzz
|
||||
- name: SHA
|
||||
value: "$(tasks.fetch-repository.results.commit)"
|
||||
- name: DESCRIPTION
|
||||
value: "Build $(params.JOB_NAME) has started"
|
||||
- name: STATE
|
||||
value: pending
|
||||
- name: TARGET_URL
|
||||
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
|
||||
- name: fetch-repository
|
||||
taskRef:
|
||||
name: git-clone
|
||||
workspaces:
|
||||
- name: output
|
||||
workspace: git-source
|
||||
params:
|
||||
- name: url
|
||||
value: $(params.REPO_URL)
|
||||
- name: revision
|
||||
value: $(params.PULL_BASE_SHA)
|
||||
- name: deleteExisting
|
||||
value: "true"
|
||||
- name: build-image
|
||||
taskRef:
|
||||
name: kaniko
|
||||
params:
|
||||
- name: IMAGE
|
||||
value: "$(params.image-name):$(tasks.fetch-repository.results.commit)"
|
||||
- name: CONTEXT
|
||||
value: $(params.path-to-image-context)
|
||||
- name: DOCKERFILE
|
||||
value: $(params.path-to-dockerfile)
|
||||
- name: BUILDER_IMAGE
|
||||
value: "gcr.io/kaniko-project/executor:v1.12.1"
|
||||
- name: EXTRA_ARGS
|
||||
value:
|
||||
- --target=foreign-document-test
|
||||
- --cache=true
|
||||
- --cache-copy-layers
|
||||
- --cache-repo=harbor.fizz.buzz/kanikocache/cache
|
||||
- --use-new-run # Should result in a speed-up
|
||||
- --reproducible # To remove timestamps so layer caching works.
|
||||
- --snapshot-mode=redo
|
||||
- --skip-unused-stages=true
|
||||
- --registry-mirror=dockerhub.dockerhub.svc.cluster.local
|
||||
workspaces:
|
||||
- name: source
|
||||
workspace: git-source
|
||||
- name: dockerconfig
|
||||
workspace: docker-credentials
|
||||
runAfter:
|
||||
- fetch-repository
|
||||
- name: run-image
|
||||
taskRef:
|
||||
name: run-docker-image
|
||||
workspaces:
|
||||
- name: source
|
||||
workspace: git-source
|
||||
- name: cargo-cache
|
||||
workspace: cargo-cache
|
||||
runAfter:
|
||||
- build-image
|
||||
params:
|
||||
- name: docker-image
|
||||
value: "$(params.image-name):$(tasks.fetch-repository.results.commit)"
|
||||
finally:
|
||||
- name: report-success
|
||||
when:
|
||||
- input: "$(tasks.status)"
|
||||
operator: in
|
||||
values: ["Succeeded", "Completed"]
|
||||
taskRef:
|
||||
name: gitea-set-status
|
||||
params:
|
||||
- name: CONTEXT
|
||||
value: "$(params.JOB_NAME)"
|
||||
- name: REPO_FULL_NAME
|
||||
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
|
||||
- name: GITEA_HOST_URL
|
||||
value: code.fizz.buzz
|
||||
- name: SHA
|
||||
value: "$(tasks.fetch-repository.results.commit)"
|
||||
- name: DESCRIPTION
|
||||
value: "Build $(params.JOB_NAME) has succeeded"
|
||||
- name: STATE
|
||||
value: success
|
||||
- name: TARGET_URL
|
||||
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
|
||||
- name: report-failure
|
||||
when:
|
||||
- input: "$(tasks.status)"
|
||||
operator: in
|
||||
values: ["Failed"]
|
||||
taskRef:
|
||||
name: gitea-set-status
|
||||
params:
|
||||
- name: CONTEXT
|
||||
value: "$(params.JOB_NAME)"
|
||||
- name: REPO_FULL_NAME
|
||||
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
|
||||
- name: GITEA_HOST_URL
|
||||
value: code.fizz.buzz
|
||||
- name: SHA
|
||||
value: "$(tasks.fetch-repository.results.commit)"
|
||||
- name: DESCRIPTION
|
||||
value: "Build $(params.JOB_NAME) has failed"
|
||||
- name: STATE
|
||||
value: failure
|
||||
- name: TARGET_URL
|
||||
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
|
||||
- name: cargo-cache-autoclean
|
||||
taskRef:
|
||||
name: run-docker-image
|
||||
workspaces:
|
||||
- name: source
|
||||
workspace: git-source
|
||||
- name: cargo-cache
|
||||
workspace: cargo-cache
|
||||
params:
|
||||
- name: command
|
||||
value: [cargo, cache, --autoclean]
|
||||
- name: args
|
||||
value: []
|
||||
- name: docker-image
|
||||
value: "$(params.image-name):$(tasks.fetch-repository.results.commit)"
|
||||
workspaces:
|
||||
- name: git-source
|
||||
- name: docker-credentials
|
||||
- name: cargo-cache
|
||||
workspaces:
|
||||
- name: git-source
|
||||
volumeClaimTemplate:
|
||||
spec:
|
||||
storageClassName: "nfs-client"
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 10Gi
|
||||
subPath: rust-source
|
||||
- name: cargo-cache
|
||||
persistentVolumeClaim:
|
||||
claimName: organic-cargo-cache-test-foreign-document
|
||||
- name: docker-credentials
|
||||
secret:
|
||||
secretName: harbor-plain
|
||||
serviceAccountName: build-bot
|
||||
params:
|
||||
- name: image-name
|
||||
value: "harbor.fizz.buzz/private/organic-test-foreign-document"
|
||||
- name: path-to-image-context
|
||||
value: docker/organic_test/
|
||||
- name: path-to-dockerfile
|
||||
value: docker/organic_test/Dockerfile
|
||||
@@ -14,10 +14,6 @@ spec:
|
||||
- name: path-to-dockerfile
|
||||
description: The path to the Dockerfile
|
||||
type: string
|
||||
- name: command
|
||||
type: array
|
||||
description: Command to run.
|
||||
default: []
|
||||
tasks:
|
||||
- name: report-pending
|
||||
taskRef:
|
||||
@@ -92,8 +88,6 @@ spec:
|
||||
runAfter:
|
||||
- build-image
|
||||
params:
|
||||
- name: command
|
||||
value: ["$(params.command[*])"]
|
||||
- name: args
|
||||
value: ["--no-default-features"]
|
||||
- name: docker-image
|
||||
@@ -109,8 +103,6 @@ spec:
|
||||
runAfter:
|
||||
- run-image-none
|
||||
params:
|
||||
- name: command
|
||||
value: ["$(params.command[*])"]
|
||||
- name: args
|
||||
value: ["--no-default-features", "--features", "tracing"]
|
||||
- name: docker-image
|
||||
@@ -126,8 +118,6 @@ spec:
|
||||
runAfter:
|
||||
- run-image-tracing
|
||||
params:
|
||||
- name: command
|
||||
value: ["$(params.command[*])"]
|
||||
- name: args
|
||||
value: ["--no-default-features", "--features", "compare"]
|
||||
- name: docker-image
|
||||
@@ -143,8 +133,6 @@ spec:
|
||||
runAfter:
|
||||
- run-image-compare
|
||||
params:
|
||||
- name: command
|
||||
value: ["$(params.command[*])"]
|
||||
- name: args
|
||||
value: []
|
||||
- name: docker-image
|
||||
@@ -160,8 +148,6 @@ spec:
|
||||
runAfter:
|
||||
- run-image-default
|
||||
params:
|
||||
- name: command
|
||||
value: ["$(params.command[*])"]
|
||||
- name: args
|
||||
value: ["--no-default-features", "--features", "tracing,compare"]
|
||||
- name: docker-image
|
||||
@@ -256,5 +242,3 @@ spec:
|
||||
value: docker/organic_build/
|
||||
- name: path-to-dockerfile
|
||||
value: docker/organic_build/Dockerfile
|
||||
- name: command
|
||||
value: [cargo, build]
|
||||
|
||||
@@ -18,14 +18,6 @@ spec:
|
||||
- name: path-to-dockerfile
|
||||
description: The path to the Dockerfile
|
||||
type: string
|
||||
- name: command
|
||||
type: array
|
||||
description: Command to run.
|
||||
default: []
|
||||
- name: args
|
||||
type: array
|
||||
description: Arguments passed to command.
|
||||
default: []
|
||||
tasks:
|
||||
- name: do-stuff
|
||||
taskSpec:
|
||||
@@ -91,6 +83,7 @@ spec:
|
||||
value: "gcr.io/kaniko-project/executor:v1.12.1"
|
||||
- name: EXTRA_ARGS
|
||||
value:
|
||||
- --target=tester
|
||||
- --cache=true
|
||||
- --cache-copy-layers
|
||||
- --cache-repo=harbor.fizz.buzz/kanikocache/cache
|
||||
@@ -117,10 +110,17 @@ spec:
|
||||
runAfter:
|
||||
- build-image
|
||||
params:
|
||||
- name: command
|
||||
value: ["$(params.command[*])"]
|
||||
- name: args
|
||||
value: ["$(params.args[*])"]
|
||||
value:
|
||||
[
|
||||
--no-default-features,
|
||||
--features,
|
||||
compare,
|
||||
--no-fail-fast,
|
||||
--lib,
|
||||
--test,
|
||||
test_loader,
|
||||
]
|
||||
- name: docker-image
|
||||
value: "$(params.image-name):$(tasks.fetch-repository.results.commit)"
|
||||
finally:
|
||||
@@ -212,7 +212,3 @@ spec:
|
||||
value: docker/organic_test/
|
||||
- name: path-to-dockerfile
|
||||
value: docker/organic_test/Dockerfile
|
||||
- name: command
|
||||
value: [cargo, test]
|
||||
- name: args
|
||||
value: [--lib, --test, test_loader]
|
||||
|
||||
@@ -14,14 +14,6 @@ spec:
|
||||
- name: path-to-dockerfile
|
||||
description: The path to the Dockerfile
|
||||
type: string
|
||||
- name: rustfmt-command
|
||||
type: array
|
||||
description: Command to run rustfmt.
|
||||
default: []
|
||||
- name: rustfmt-args
|
||||
type: array
|
||||
description: Arguments passed to rustfmt.
|
||||
default: []
|
||||
- name: GIT_USER_NAME
|
||||
description: The username for git
|
||||
type: string
|
||||
@@ -119,10 +111,6 @@ spec:
|
||||
runAfter:
|
||||
- build-image
|
||||
params:
|
||||
- name: command
|
||||
value: ["$(params.rustfmt-command[*])"]
|
||||
- name: args
|
||||
value: ["$(params.rustfmt-args[*])"]
|
||||
- name: docker-image
|
||||
value: "$(params.image-name):$(tasks.fetch-repository.results.commit)"
|
||||
- name: cargo-fix
|
||||
@@ -240,7 +228,3 @@ spec:
|
||||
value: docker/cargo_fmt/
|
||||
- name: path-to-dockerfile
|
||||
value: docker/cargo_fmt/Dockerfile
|
||||
- name: command
|
||||
value: [cargo, fmt]
|
||||
- name: args
|
||||
value: []
|
||||
|
||||
@@ -16,6 +16,13 @@ spec:
|
||||
skip_branches:
|
||||
# We already run on every commit, so running when the semver tags get pushed is causing needless double-processing.
|
||||
- "^v[0-9]+\\.[0-9]+\\.[0-9]+$"
|
||||
- name: rust-foreign-document-test
|
||||
source: "pipeline-foreign-document-test.yaml"
|
||||
# Override https-based url from lighthouse events.
|
||||
clone_uri: "git@code.fizz.buzz:talexander/organic.git"
|
||||
skip_branches:
|
||||
# We already run on every commit, so running when the semver tags get pushed is causing needless double-processing.
|
||||
- "^v[0-9]+\\.[0-9]+\\.[0-9]+$"
|
||||
- name: rust-build
|
||||
source: "pipeline-rust-build.yaml"
|
||||
# Override https-based url from lighthouse events.
|
||||
|
||||
17
Cargo.toml
17
Cargo.toml
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "organic"
|
||||
version = "0.1.2"
|
||||
version = "0.1.7"
|
||||
authors = ["Tom Alexander <tom@fizz.buzz>"]
|
||||
description = "An org-mode parser."
|
||||
edition = "2021"
|
||||
@@ -13,8 +13,7 @@ resolver = "2"
|
||||
include = [
|
||||
"LICENSE",
|
||||
"**/*.rs",
|
||||
"Cargo.toml",
|
||||
"tests/*"
|
||||
"Cargo.toml"
|
||||
]
|
||||
|
||||
[lib]
|
||||
@@ -23,9 +22,15 @@ path = "src/lib.rs"
|
||||
|
||||
[[bin]]
|
||||
# This bin exists for development purposes only. The real target of this crate is the library.
|
||||
name = "compare"
|
||||
name = "parse"
|
||||
path = "src/main.rs"
|
||||
|
||||
[[bin]]
|
||||
# This bin exists for development purposes only. The real target of this crate is the library.
|
||||
name = "compare"
|
||||
path = "src/bin_compare.rs"
|
||||
required-features = ["compare"]
|
||||
|
||||
[dependencies]
|
||||
nom = "7.1.1"
|
||||
opentelemetry = { version = "0.20.0", optional = true, default-features = false, features = ["trace", "rt-tokio"] }
|
||||
@@ -40,15 +45,17 @@ tracing-subscriber = { version = "0.3.17", optional = true, features = ["env-fil
|
||||
walkdir = "2.3.3"
|
||||
|
||||
[features]
|
||||
default = ["compare"]
|
||||
default = []
|
||||
compare = []
|
||||
tracing = ["dep:opentelemetry", "dep:opentelemetry-otlp", "dep:opentelemetry-semantic-conventions", "dep:tokio", "dep:tracing", "dep:tracing-opentelemetry", "dep:tracing-subscriber"]
|
||||
|
||||
# Optimized build for any sort of release.
|
||||
[profile.release-lto]
|
||||
inherits = "release"
|
||||
lto = true
|
||||
strip = "symbols"
|
||||
|
||||
# Profile for performance testing with the "perf" tool. Notably keeps debug enabled and does not strip symbols to make reading the perf output easier.
|
||||
[profile.perf]
|
||||
inherits = "release"
|
||||
lto = true
|
||||
|
||||
14
Makefile
14
Makefile
@@ -35,12 +35,16 @@ clean:
|
||||
|
||||
.PHONY: test
|
||||
test:
|
||||
> cargo test --no-fail-fast --lib --test test_loader -- --test-threads $(TESTJOBS)
|
||||
> cargo test --no-default-features --features compare --no-fail-fast --lib --test test_loader -- --test-threads $(TESTJOBS)
|
||||
|
||||
.PHONY: dockertest
|
||||
dockertest:
|
||||
> $(MAKE) -C docker/organic_test
|
||||
> docker run --init --rm -i -t -v "$$(readlink -f ./):/source:ro" --mount source=cargo-cache,target=/usr/local/cargo/registry --mount source=rust-cache,target=/target --env CARGO_TARGET_DIR=/target -w /source organic-test cargo test --no-fail-fast --lib --test test_loader -- --test-threads $(TESTJOBS)
|
||||
> docker run --init --rm -i -t --read-only -v "$$(readlink -f ./):/source:ro" --mount type=tmpfs,destination=/tmp --mount source=cargo-cache,target=/usr/local/cargo/registry --mount source=rust-cache,target=/target --env CARGO_TARGET_DIR=/target -w /source organic-test --no-default-features --features compare --no-fail-fast --lib --test test_loader -- --test-threads $(TESTJOBS)
|
||||
|
||||
.PHONY: foreign_document_test
|
||||
foreign_document_test:
|
||||
> $(MAKE) -C docker/organic_test run_foreign_document_test
|
||||
|
||||
.PHONY: dockerclean
|
||||
dockerclean:
|
||||
@@ -49,18 +53,18 @@ dockerclean:
|
||||
|
||||
.PHONY: integrationtest
|
||||
integrationtest:
|
||||
> cargo test --no-fail-fast --test test_loader -- --test-threads $(TESTJOBS)
|
||||
> cargo test --no-default-features --features compare --no-fail-fast --test test_loader -- --test-threads $(TESTJOBS)
|
||||
|
||||
.PHONY: unittest
|
||||
unittest:
|
||||
> cargo test --lib -- --test-threads $(TESTJOBS)
|
||||
> cargo test --no-default-features --lib -- --test-threads $(TESTJOBS)
|
||||
|
||||
.PHONY: jaeger
|
||||
jaeger:
|
||||
# 4317 for OTLP gRPC, 4318 for OTLP HTTP. We currently use gRPC but I forward both ports regardless.
|
||||
#
|
||||
# These flags didn't help even though they seem like they would: --collector.queue-size=20000 --collector.num-workers=100
|
||||
> docker run -d --rm --name organicdocker -p 4317:4317 -p 4318:4318 -p 16686:16686 -e COLLECTOR_OTLP_ENABLED=true jaegertracing/all-in-one:1.47 --collector.grpc-server.max-message-size=20000000 --collector.otlp.grpc.max-message-size=20000000
|
||||
> docker run -d --rm --name organicdocker --read-only -p 4317:4317 -p 4318:4318 -p 16686:16686 -e COLLECTOR_OTLP_ENABLED=true jaegertracing/all-in-one:1.47 --collector.grpc-server.max-message-size=20000000 --collector.otlp.grpc.max-message-size=20000000
|
||||
|
||||
.PHONY: jaegerweb
|
||||
jaegerweb:
|
||||
|
||||
17
build.rs
17
build.rs
@@ -1,10 +1,16 @@
|
||||
#[cfg(feature = "compare")]
|
||||
use std::env;
|
||||
#[cfg(feature = "compare")]
|
||||
use std::fs::File;
|
||||
#[cfg(feature = "compare")]
|
||||
use std::io::Write;
|
||||
#[cfg(feature = "compare")]
|
||||
use std::path::Path;
|
||||
|
||||
#[cfg(feature = "compare")]
|
||||
use walkdir::WalkDir;
|
||||
|
||||
#[cfg(feature = "compare")]
|
||||
fn main() {
|
||||
let out_dir = env::var("OUT_DIR").unwrap();
|
||||
let destination = Path::new(&out_dir).join("tests.rs");
|
||||
@@ -31,6 +37,10 @@ fn main() {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "compare"))]
|
||||
fn main() {}
|
||||
|
||||
#[cfg(feature = "compare")]
|
||||
fn write_test(test_file: &mut File, test: &walkdir::DirEntry) {
|
||||
let test_name = test
|
||||
.path()
|
||||
@@ -55,26 +65,23 @@ fn write_test(test_file: &mut File, test: &walkdir::DirEntry) {
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
#[cfg(feature = "compare")]
|
||||
fn write_header(test_file: &mut File) {
|
||||
write!(
|
||||
test_file,
|
||||
r#"
|
||||
#[feature(exit_status_error)]
|
||||
use organic::compare_document;
|
||||
use organic::parser::document;
|
||||
use organic::emacs_parse_org_document;
|
||||
use organic::parser::sexp::sexp_with_padding;
|
||||
|
||||
"#
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
#[cfg(feature = "compare")]
|
||||
fn is_expect_fail(name: &str) -> Option<&str> {
|
||||
match name {
|
||||
"autogen_greater_element_drawer_drawer_with_headline_inside" => Some("Apparently lines with :end: become their own paragraph. This odd behavior needs to be investigated more."),
|
||||
"autogen_element_container_priority_footnote_definition_dynamic_block" => Some("Apparently broken begin lines become their own paragraph."),
|
||||
"autogen_lesser_element_paragraphs_paragraph_with_backslash_line_breaks" => Some("The text we're getting out of the parse tree is already processed to remove line breaks, so our comparison needs to take that into account."),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ all: build push
|
||||
|
||||
.PHONY: build
|
||||
build:
|
||||
docker build -t $(IMAGE_NAME) -f Dockerfile ../../
|
||||
docker build -t $(IMAGE_NAME) -f Dockerfile .
|
||||
|
||||
.PHONY: push
|
||||
push:
|
||||
@@ -26,10 +26,11 @@ else
|
||||
@echo "REMOTE_REPO not defined, not removing from remote repo."
|
||||
endif
|
||||
|
||||
# NOTE: This target will write to folders underneath the git-root
|
||||
.PHONY: run
|
||||
run:
|
||||
docker run --rm -i -t $(IMAGE_NAME)
|
||||
run: build
|
||||
docker run --rm --init --read-only --mount type=tmpfs,destination=/tmp -v "$$(readlink -f ../../):/source" --workdir=/source $(IMAGE_NAME)
|
||||
|
||||
.PHONY: shell
|
||||
shell:
|
||||
docker run --rm -i -t --entrypoint /bin/bash $(IMAGE_NAME)
|
||||
shell: build
|
||||
docker run --rm -i -t --entrypoint /bin/sh --mount type=tmpfs,destination=/tmp -v "$$(readlink -f ../../):/source" --workdir=/source $(IMAGE_NAME)
|
||||
|
||||
@@ -2,3 +2,5 @@ FROM rustlang/rust:nightly-alpine3.17
|
||||
|
||||
RUN apk add --no-cache musl-dev
|
||||
RUN cargo install --locked --no-default-features --features ci-autoclean cargo-cache
|
||||
|
||||
ENTRYPOINT ["cargo", "build"]
|
||||
|
||||
@@ -6,7 +6,7 @@ all: build push
|
||||
|
||||
.PHONY: build
|
||||
build:
|
||||
docker build -t $(IMAGE_NAME) -f Dockerfile ../../
|
||||
docker build -t $(IMAGE_NAME) -f Dockerfile .
|
||||
|
||||
.PHONY: push
|
||||
push:
|
||||
@@ -25,11 +25,13 @@ ifdef REMOTE_REPO
|
||||
else
|
||||
@echo "REMOTE_REPO not defined, not removing from remote repo."
|
||||
endif
|
||||
docker volume rm cargo-cache
|
||||
|
||||
# NOTE: This target will write to folders underneath the git-root
|
||||
.PHONY: run
|
||||
run:
|
||||
docker run --rm -i -t $(IMAGE_NAME)
|
||||
run: build
|
||||
docker run --rm --init --read-only --mount type=tmpfs,destination=/tmp -v "$$(readlink -f ../../):/source" --workdir=/source --mount source=cargo-cache,target=/usr/local/cargo/registry $(IMAGE_NAME)
|
||||
|
||||
.PHONY: shell
|
||||
shell:
|
||||
docker run --rm -i -t --entrypoint /bin/bash $(IMAGE_NAME)
|
||||
shell: build
|
||||
docker run --rm -i -t --entrypoint /bin/sh --mount type=tmpfs,destination=/tmp -v "$$(readlink -f ../../):/source" --workdir=/source --mount source=cargo-cache,target=/usr/local/cargo/registry $(IMAGE_NAME)
|
||||
|
||||
@@ -14,7 +14,7 @@ RUN make DESTDIR="/root/dist" install
|
||||
|
||||
|
||||
FROM build AS build-org-mode
|
||||
ARG ORG_VERSION=7bdec435ff5d86220d13c431e799c5ed44a57da1
|
||||
ARG ORG_VERSION=163bafb43dcc2bc94a2c7ccaa77d3d1dd488f1af
|
||||
COPY --from=build-emacs /root/dist/ /
|
||||
RUN mkdir /root/dist
|
||||
# Savannah does not allow fetching specific revisions, so we're going to have to put unnecessary load on their server by cloning main and then checking out the revision we want.
|
||||
@@ -25,8 +25,77 @@ RUN make compile
|
||||
RUN make DESTDIR="/root/dist" install
|
||||
|
||||
|
||||
FROM rustlang/rust:nightly-alpine3.17
|
||||
FROM rustlang/rust:nightly-alpine3.17 AS tester
|
||||
ENV LANG=en_US.UTF-8
|
||||
RUN apk add --no-cache musl-dev ncurses gnutls
|
||||
RUN cargo install --locked --no-default-features --features ci-autoclean cargo-cache
|
||||
COPY --from=build-emacs /root/dist/ /
|
||||
COPY --from=build-org-mode /root/dist/ /
|
||||
|
||||
ENTRYPOINT ["cargo", "test"]
|
||||
|
||||
|
||||
FROM build as foreign-document-gather
|
||||
|
||||
ARG HOWARD_ABRAMS_DOT_FILES_VERSION=1b54fe75d74670dc7bcbb6b01ea560c45528c628
|
||||
ARG HOWARD_ABRAMS_DOT_FILES_PATH=/foreign_documents/howardabrams/dot-files
|
||||
ARG HOWARD_ABRAMS_DOT_FILES_REPO=https://github.com/howardabrams/dot-files.git
|
||||
RUN mkdir /foreign_documents
|
||||
RUN mkdir -p $HOWARD_ABRAMS_DOT_FILES_PATH && git -C $HOWARD_ABRAMS_DOT_FILES_PATH init --initial-branch=main && git -C $HOWARD_ABRAMS_DOT_FILES_PATH remote add origin $HOWARD_ABRAMS_DOT_FILES_REPO && git -C $HOWARD_ABRAMS_DOT_FILES_PATH fetch origin $HOWARD_ABRAMS_DOT_FILES_VERSION && git -C $HOWARD_ABRAMS_DOT_FILES_PATH checkout FETCH_HEAD
|
||||
|
||||
ARG HOWARD_ABRAMS_HAMACS_VERSION=da51188cc195d41882175d412fe40a8bc5730c5c
|
||||
ARG HOWARD_ABRAMS_HAMACS_PATH=/foreign_documents/howardabrams/hamacs
|
||||
ARG HOWARD_ABRAMS_HAMACS_REPO=https://github.com/howardabrams/hamacs.git
|
||||
RUN mkdir -p $HOWARD_ABRAMS_HAMACS_PATH && git -C $HOWARD_ABRAMS_HAMACS_PATH init --initial-branch=main && git -C $HOWARD_ABRAMS_HAMACS_PATH remote add origin $HOWARD_ABRAMS_HAMACS_REPO && git -C $HOWARD_ABRAMS_HAMACS_PATH fetch origin $HOWARD_ABRAMS_HAMACS_VERSION && git -C $HOWARD_ABRAMS_HAMACS_PATH checkout FETCH_HEAD
|
||||
|
||||
ARG HOWARD_ABRAMS_DEMO_IT_VERSION=e399fd7ceb73caeae7cb50b247359bafcaee2a3f
|
||||
ARG HOWARD_ABRAMS_DEMO_IT_PATH=/foreign_documents/howardabrams/demo-it
|
||||
ARG HOWARD_ABRAMS_DEMO_IT_REPO=https://github.com/howardabrams/demo-it.git
|
||||
RUN mkdir -p $HOWARD_ABRAMS_DEMO_IT_PATH && git -C $HOWARD_ABRAMS_DEMO_IT_PATH init --initial-branch=main && git -C $HOWARD_ABRAMS_DEMO_IT_PATH remote add origin $HOWARD_ABRAMS_DEMO_IT_REPO && git -C $HOWARD_ABRAMS_DEMO_IT_PATH fetch origin $HOWARD_ABRAMS_DEMO_IT_VERSION && git -C $HOWARD_ABRAMS_DEMO_IT_PATH checkout FETCH_HEAD
|
||||
|
||||
ARG HOWARD_ABRAMS_MAGIT_DEMO_VERSION=59e82f6bc7c18f550478d86a8f680c3f2da66985
|
||||
ARG HOWARD_ABRAMS_MAGIT_DEMO_PATH=/foreign_documents/howardabrams/magit-demo
|
||||
ARG HOWARD_ABRAMS_MAGIT_DEMO_REPO=https://github.com/howardabrams/magit-demo.git
|
||||
RUN mkdir -p $HOWARD_ABRAMS_MAGIT_DEMO_PATH && git -C $HOWARD_ABRAMS_MAGIT_DEMO_PATH init --initial-branch=main && git -C $HOWARD_ABRAMS_MAGIT_DEMO_PATH remote add origin $HOWARD_ABRAMS_MAGIT_DEMO_REPO && git -C $HOWARD_ABRAMS_MAGIT_DEMO_PATH fetch origin $HOWARD_ABRAMS_MAGIT_DEMO_VERSION && git -C $HOWARD_ABRAMS_MAGIT_DEMO_PATH checkout FETCH_HEAD
|
||||
|
||||
ARG HOWARD_ABRAMS_PDX_EMACS_HACKERS_VERSION=bfb7bd640fdf0ce3def21f9fc591ed35d776b26d
|
||||
ARG HOWARD_ABRAMS_PDX_EMACS_HACKERS_PATH=/foreign_documents/howardabrams/pdx-emacs-hackers
|
||||
ARG HOWARD_ABRAMS_PDX_EMACS_HACKERS_REPO=https://github.com/howardabrams/pdx-emacs-hackers.git
|
||||
RUN mkdir -p $HOWARD_ABRAMS_PDX_EMACS_HACKERS_PATH && git -C $HOWARD_ABRAMS_PDX_EMACS_HACKERS_PATH init --initial-branch=main && git -C $HOWARD_ABRAMS_PDX_EMACS_HACKERS_PATH remote add origin $HOWARD_ABRAMS_PDX_EMACS_HACKERS_REPO && git -C $HOWARD_ABRAMS_PDX_EMACS_HACKERS_PATH fetch origin $HOWARD_ABRAMS_PDX_EMACS_HACKERS_VERSION && git -C $HOWARD_ABRAMS_PDX_EMACS_HACKERS_PATH checkout FETCH_HEAD
|
||||
|
||||
ARG HOWARD_ABRAMS_FLORA_SIMULATOR_VERSION=50de13068722b9e3878f8598b749b7ccd14e7f8e
|
||||
ARG HOWARD_ABRAMS_FLORA_SIMULATOR_PATH=/foreign_documents/howardabrams/flora-simulator
|
||||
ARG HOWARD_ABRAMS_FLORA_SIMULATOR_REPO=https://github.com/howardabrams/flora-simulator.git
|
||||
RUN mkdir -p $HOWARD_ABRAMS_FLORA_SIMULATOR_PATH && git -C $HOWARD_ABRAMS_FLORA_SIMULATOR_PATH init --initial-branch=main && git -C $HOWARD_ABRAMS_FLORA_SIMULATOR_PATH remote add origin $HOWARD_ABRAMS_FLORA_SIMULATOR_REPO && git -C $HOWARD_ABRAMS_FLORA_SIMULATOR_PATH fetch origin $HOWARD_ABRAMS_FLORA_SIMULATOR_VERSION && git -C $HOWARD_ABRAMS_FLORA_SIMULATOR_PATH checkout FETCH_HEAD
|
||||
|
||||
ARG HOWARD_ABRAMS_LITERATE_DEVOPS_DEMO_VERSION=2d7a5e41001a1adf7ec24aeb6acc8525a72d7892
|
||||
ARG HOWARD_ABRAMS_LITERATE_DEVOPS_DEMO_PATH=/foreign_documents/howardabrams/literate-devops-demo
|
||||
ARG HOWARD_ABRAMS_LITERATE_DEVOPS_DEMO_REPO=https://github.com/howardabrams/literate-devops-demo.git
|
||||
RUN mkdir -p $HOWARD_ABRAMS_LITERATE_DEVOPS_DEMO_PATH && git -C $HOWARD_ABRAMS_LITERATE_DEVOPS_DEMO_PATH init --initial-branch=main && git -C $HOWARD_ABRAMS_LITERATE_DEVOPS_DEMO_PATH remote add origin $HOWARD_ABRAMS_LITERATE_DEVOPS_DEMO_REPO && git -C $HOWARD_ABRAMS_LITERATE_DEVOPS_DEMO_PATH fetch origin $HOWARD_ABRAMS_LITERATE_DEVOPS_DEMO_VERSION && git -C $HOWARD_ABRAMS_LITERATE_DEVOPS_DEMO_PATH checkout FETCH_HEAD
|
||||
|
||||
ARG HOWARD_ABRAMS_CLOJURE_YESQL_XP_VERSION=b651c7f8b47b2710e99fce9652980902bbc1c6c9
|
||||
ARG HOWARD_ABRAMS_CLOJURE_YESQL_XP_PATH=/foreign_documents/howardabrams/clojure-yesql-xp
|
||||
ARG HOWARD_ABRAMS_CLOJURE_YESQL_XP_REPO=https://github.com/howardabrams/clojure-yesql-xp.git
|
||||
RUN mkdir -p $HOWARD_ABRAMS_CLOJURE_YESQL_XP_PATH && git -C $HOWARD_ABRAMS_CLOJURE_YESQL_XP_PATH init --initial-branch=main && git -C $HOWARD_ABRAMS_CLOJURE_YESQL_XP_PATH remote add origin $HOWARD_ABRAMS_CLOJURE_YESQL_XP_REPO && git -C $HOWARD_ABRAMS_CLOJURE_YESQL_XP_PATH fetch origin $HOWARD_ABRAMS_CLOJURE_YESQL_XP_VERSION && git -C $HOWARD_ABRAMS_CLOJURE_YESQL_XP_PATH checkout FETCH_HEAD
|
||||
|
||||
ARG HOWARD_ABRAMS_VEEP_VERSION=e37fcf63a5c4a526255735ee34955528b3b280ae
|
||||
ARG HOWARD_ABRAMS_VEEP_PATH=/foreign_documents/howardabrams/veep
|
||||
ARG HOWARD_ABRAMS_VEEP_REPO=https://github.com/howardabrams/veep.git
|
||||
RUN mkdir -p $HOWARD_ABRAMS_VEEP_PATH && git -C $HOWARD_ABRAMS_VEEP_PATH init --initial-branch=main && git -C $HOWARD_ABRAMS_VEEP_PATH remote add origin $HOWARD_ABRAMS_VEEP_REPO && git -C $HOWARD_ABRAMS_VEEP_PATH fetch origin $HOWARD_ABRAMS_VEEP_VERSION && git -C $HOWARD_ABRAMS_VEEP_PATH checkout FETCH_HEAD
|
||||
|
||||
ARG DOOMEMACS_VERSION=42d5fd83504f8aa80f3248036006fbcd49222943
|
||||
ARG DOOMEMACS_PATH=/foreign_documents/doomemacs
|
||||
ARG DOOMEMACS_REPO=https://github.com/doomemacs/doomemacs.git
|
||||
RUN mkdir -p $DOOMEMACS_PATH && git -C $DOOMEMACS_PATH init --initial-branch=main && git -C $DOOMEMACS_PATH remote add origin $DOOMEMACS_REPO && git -C $DOOMEMACS_PATH fetch origin $DOOMEMACS_VERSION && git -C $DOOMEMACS_PATH checkout FETCH_HEAD
|
||||
|
||||
|
||||
FROM tester as foreign-document-test
|
||||
RUN apk add --no-cache bash coreutils
|
||||
RUN mkdir /foreign_documents
|
||||
COPY --from=build-org-mode /root/org-mode /foreign_documents/org-mode
|
||||
COPY --from=build-emacs /root/emacs /foreign_documents/emacs
|
||||
COPY --from=foreign-document-gather /foreign_documents/howardabrams /foreign_documents/howardabrams
|
||||
COPY --from=foreign-document-gather /foreign_documents/doomemacs /foreign_documents/doomemacs
|
||||
COPY foreign_document_test_entrypoint.sh /entrypoint.sh
|
||||
RUN chmod +x /entrypoint.sh
|
||||
ENTRYPOINT ["/entrypoint.sh"]
|
||||
|
||||
@@ -6,7 +6,11 @@ all: build push
|
||||
|
||||
.PHONY: build
|
||||
build:
|
||||
docker build -t $(IMAGE_NAME) -f Dockerfile ../../
|
||||
docker build -t $(IMAGE_NAME) -f Dockerfile --target tester .
|
||||
|
||||
.PHONY: build_foreign_document_test
|
||||
build_foreign_document_test:
|
||||
docker build -t $(IMAGE_NAME)-foreign-document -f Dockerfile --target foreign-document-test .
|
||||
|
||||
.PHONY: push
|
||||
push:
|
||||
@@ -25,11 +29,16 @@ ifdef REMOTE_REPO
|
||||
else
|
||||
@echo "REMOTE_REPO not defined, not removing from remote repo."
|
||||
endif
|
||||
docker volume rm rust-cache cargo-cache
|
||||
|
||||
.PHONY: run
|
||||
run:
|
||||
docker run --rm -i -t $(IMAGE_NAME)
|
||||
run: build
|
||||
docker run --rm --init --read-only --mount type=tmpfs,destination=/tmp -v "$$(readlink -f ../../):/source:ro" --workdir=/source --mount source=cargo-cache,target=/usr/local/cargo/registry --mount source=rust-cache,target=/target --env CARGO_TARGET_DIR=/target $(IMAGE_NAME) --no-default-features --features compare --no-fail-fast --lib --test test_loader
|
||||
|
||||
.PHONY: shell
|
||||
shell:
|
||||
docker run --rm -i -t --entrypoint /bin/bash $(IMAGE_NAME)
|
||||
shell: build
|
||||
docker run --rm -i -t --entrypoint /bin/sh --mount type=tmpfs,destination=/tmp -v "$$(readlink -f ../../):/source:ro" --workdir=/source --mount source=cargo-cache,target=/usr/local/cargo/registry --mount source=rust-cache,target=/target --env CARGO_TARGET_DIR=/target $(IMAGE_NAME)
|
||||
|
||||
.PHONY: run_foreign_document_test
|
||||
run_foreign_document_test: build_foreign_document_test
|
||||
docker run --rm --init --read-only --mount type=tmpfs,destination=/tmp -v "$$(readlink -f ../../):/source:ro" --workdir=/source --mount source=cargo-cache,target=/usr/local/cargo/registry --mount source=rust-cache,target=/target --env CARGO_TARGET_DIR=/target $(IMAGE_NAME)-foreign-document
|
||||
|
||||
145
docker/organic_test/foreign_document_test_entrypoint.sh
Normal file
145
docker/organic_test/foreign_document_test_entrypoint.sh
Normal file
@@ -0,0 +1,145 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# Run the Organic compare script against a series of documents sourced from exterior places.
|
||||
set -euo pipefail
|
||||
IFS=$'\n\t'
|
||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
|
||||
REALPATH=$(command -v uu-realpath || command -v realpath)
|
||||
|
||||
function log {
|
||||
(>&2 echo "${@}")
|
||||
}
|
||||
|
||||
function die {
|
||||
local status_code="$1"
|
||||
shift
|
||||
(>&2 echo "${@}")
|
||||
exit "$status_code"
|
||||
}
|
||||
|
||||
function main {
|
||||
cargo build --no-default-features --features compare --profile release-lto
|
||||
if [ "${CARGO_TARGET_DIR:-}" = "" ]; then
|
||||
CARGO_TARGET_DIR=$(realpath target/)
|
||||
fi
|
||||
PARSE="${CARGO_TARGET_DIR}/release-lto/compare"
|
||||
|
||||
local all_status=0
|
||||
set +e
|
||||
|
||||
(run_compare_function "org-mode" compare_all_org_document "/foreign_documents/org-mode")
|
||||
if [ "$?" -ne 0 ]; then all_status=1; fi
|
||||
(run_compare_function "emacs" compare_all_org_document "/foreign_documents/emacs")
|
||||
if [ "$?" -ne 0 ]; then all_status=1; fi
|
||||
(run_compare_function "howard_abrams" compare_howard_abrams)
|
||||
if [ "$?" -ne 0 ]; then all_status=1; fi
|
||||
(run_compare_function "doomemacs" compare_all_org_document "/foreign_documents/doomemacs")
|
||||
if [ "$?" -ne 0 ]; then all_status=1; fi
|
||||
|
||||
set -e
|
||||
if [ "$all_status" -ne 0 ]; then
|
||||
echo "$(red_text "Some tests failed.")"
|
||||
else
|
||||
echo "$(green_text "All tests passed.")"
|
||||
fi
|
||||
return "$all_status"
|
||||
}
|
||||
|
||||
function green_text {
|
||||
(IFS=' '; printf '\x1b[38;2;0;255;0m%s\x1b[0m' "${*}")
|
||||
}
|
||||
|
||||
function red_text {
|
||||
(IFS=' '; printf '\x1b[38;2;255;0;0m%s\x1b[0m' "${*}")
|
||||
}
|
||||
|
||||
function yellow_text {
|
||||
(IFS=' '; printf '\x1b[38;2;255;255;0m%s\x1b[0m' "${*}")
|
||||
}
|
||||
|
||||
function indent {
|
||||
local depth="$1"
|
||||
local scaled_depth=$((depth * 2))
|
||||
shift 1
|
||||
local prefix=$(printf -- "%${scaled_depth}s")
|
||||
while read l; do
|
||||
(IFS=' '; printf -- '%s%s\n' "$prefix" "$l")
|
||||
done
|
||||
}
|
||||
|
||||
function run_compare_function {
|
||||
local name="$1"
|
||||
local stdoutput
|
||||
shift 1
|
||||
set +e
|
||||
stdoutput=$("${@}")
|
||||
local status=$?
|
||||
set -e
|
||||
if [ "$status" -eq 0 ]; then
|
||||
echo "$(green_text "GOOD") $name"
|
||||
indent 1 <<<"$stdoutput"
|
||||
else
|
||||
echo "$(red_text "FAIL") $name"
|
||||
indent 1 <<<"$stdoutput"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
function compare_all_org_document {
|
||||
local root_dir="$1"
|
||||
local target_document
|
||||
local all_status=0
|
||||
while read target_document; do
|
||||
local relative_path=$($REALPATH --relative-to "$root_dir" "$target_document")
|
||||
set +e
|
||||
(run_compare "$relative_path" "$target_document")
|
||||
if [ "$?" -ne 0 ]; then all_status=1; fi
|
||||
set -e
|
||||
done<<<$(find "$root_dir" -type f -iname '*.org')
|
||||
return "$all_status"
|
||||
}
|
||||
|
||||
function run_compare {
|
||||
local name="$1"
|
||||
local target_document="$2"
|
||||
set +e
|
||||
($PARSE "$target_document" &> /dev/null)
|
||||
local status=$?
|
||||
set -e
|
||||
if [ "$status" -eq 0 ]; then
|
||||
echo "$(green_text "GOOD") $name"
|
||||
else
|
||||
echo "$(red_text "FAIL") $name"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
function compare_howard_abrams {
|
||||
local all_status=0
|
||||
set +e
|
||||
|
||||
(run_compare_function "dot-files" compare_all_org_document "/foreign_documents/howardabrams/dot-files")
|
||||
if [ "$?" -ne 0 ]; then all_status=1; fi
|
||||
(run_compare_function "hamacs" compare_all_org_document "/foreign_documents/howardabrams/hamacs")
|
||||
if [ "$?" -ne 0 ]; then all_status=1; fi
|
||||
(run_compare_function "demo-it" compare_all_org_document "/foreign_documents/howardabrams/demo-it")
|
||||
if [ "$?" -ne 0 ]; then all_status=1; fi
|
||||
(run_compare_function "magit-demo" compare_all_org_document "/foreign_documents/howardabrams/magit-demo")
|
||||
if [ "$?" -ne 0 ]; then all_status=1; fi
|
||||
(run_compare_function "pdx-emacs-hackers" compare_all_org_document "/foreign_documents/howardabrams/pdx-emacs-hackers")
|
||||
if [ "$?" -ne 0 ]; then all_status=1; fi
|
||||
(run_compare_function "flora-simulator" compare_all_org_document "/foreign_documents/howardabrams/flora-simulator")
|
||||
if [ "$?" -ne 0 ]; then all_status=1; fi
|
||||
(run_compare_function "literate-devops-demo" compare_all_org_document "/foreign_documents/howardabrams/literate-devops-demo")
|
||||
if [ "$?" -ne 0 ]; then all_status=1; fi
|
||||
(run_compare_function "clojure-yesql-xp" compare_all_org_document "/foreign_documents/howardabrams/clojure-yesql-xp")
|
||||
if [ "$?" -ne 0 ]; then all_status=1; fi
|
||||
(run_compare_function "veep" compare_all_org_document "/foreign_documents/howardabrams/veep")
|
||||
if [ "$?" -ne 0 ]; then all_status=1; fi
|
||||
|
||||
set -e
|
||||
return "$all_status"
|
||||
}
|
||||
|
||||
main "${@}"
|
||||
1
elisp_snippets/README.md
Normal file
1
elisp_snippets/README.md
Normal file
@@ -0,0 +1 @@
|
||||
This folder is for snippets of elisp that are useful for development.
|
||||
3
elisp_snippets/dump_org_element_affiliated_keywords.el
Normal file
3
elisp_snippets/dump_org_element_affiliated_keywords.el
Normal file
@@ -0,0 +1,3 @@
|
||||
(dolist (var org-element-affiliated-keywords)
|
||||
(message "\"%s\"," (downcase var))
|
||||
)
|
||||
5
elisp_snippets/dump_org_entities.el
Normal file
5
elisp_snippets/dump_org_entities.el
Normal file
@@ -0,0 +1,5 @@
|
||||
(dolist (var org-entities)
|
||||
(when (listp var)
|
||||
(message "\"%s\"," (nth 0 var))
|
||||
)
|
||||
)
|
||||
1
org_mode_samples/README.org
Normal file
1
org_mode_samples/README.org
Normal file
@@ -0,0 +1 @@
|
||||
This folder contains org-mode documents that get automatically included as tests using build.rs.
|
||||
@@ -1,6 +1,7 @@
|
||||
1. foo
|
||||
1. plain-list
|
||||
#+begin_center
|
||||
|
||||
|
||||
#+end_center
|
||||
2. bar
|
||||
|
||||
Is this still in the plain list?
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
This folder is an investigation into whether or not my exit matchers should operate from the top down or bottom up.
|
||||
@@ -1 +0,0 @@
|
||||
foo *bar baz * lorem* ipsum
|
||||
@@ -1,3 +0,0 @@
|
||||
Looks like 2 blank lines always exits the top-level plain list.
|
||||
|
||||
Plain lists do not seem to go inside paragraphs but rather exist beside them.
|
||||
@@ -1,12 +0,0 @@
|
||||
1. foo
|
||||
|
||||
bar
|
||||
|
||||
1. baz
|
||||
|
||||
lorem
|
||||
|
||||
ipsum
|
||||
|
||||
|
||||
dolar
|
||||
@@ -1 +0,0 @@
|
||||
Looks like table cells cannot contain lists but can contain bolds
|
||||
@@ -1,5 +0,0 @@
|
||||
ip *su* m
|
||||
|
||||
| foo | bar |
|
||||
|----------+-----|
|
||||
| 1. lo *re* m | |
|
||||
25
org_mode_samples/greater_element/dynamic_block/simple.org
Normal file
25
org_mode_samples/greater_element/dynamic_block/simple.org
Normal file
@@ -0,0 +1,25 @@
|
||||
#+BEGIN: clocktable :scope file :maxlevel 2
|
||||
#+CAPTION: Clock summary at [2023-08-25 Fri 05:34]
|
||||
| Headline | Time |
|
||||
|--------------+--------|
|
||||
| *Total time* | *0:00* |
|
||||
#+END:
|
||||
|
||||
#+BEGIN: columnview :hlines 1 :id global
|
||||
| ITEM | TODO | PRIORITY | TAGS |
|
||||
|-------+------+----------+------------------------------|
|
||||
| Foo | | B | |
|
||||
|-------+------+----------+------------------------------|
|
||||
| Bar | TODO | B | |
|
||||
|-------+------+----------+------------------------------|
|
||||
| Baz | | B | :thisisatag: |
|
||||
| Lorem | | B | :thisshouldinheritfromabove: |
|
||||
| Ipsum | | B | :multiple:tags: |
|
||||
#+END:
|
||||
* Foo
|
||||
* TODO Bar
|
||||
* Baz :thisisatag:
|
||||
** Lorem :thisshouldinheritfromabove:
|
||||
*** Ipsum :multiple:tags:
|
||||
* Dolar ::
|
||||
* cat :dog: bat
|
||||
@@ -0,0 +1,8 @@
|
||||
* Footnotes
|
||||
|
||||
[fn:1]
|
||||
|
||||
#+BEGIN_EXAMPLE
|
||||
baz
|
||||
#+END_EXAMPLE
|
||||
|
||||
@@ -0,0 +1,18 @@
|
||||
#+begin_defun
|
||||
foo
|
||||
#+begin_lorem
|
||||
,#+begin_center
|
||||
bar
|
||||
,#+end_center
|
||||
ipsum
|
||||
#+end_lorem
|
||||
baz
|
||||
#+end_defun
|
||||
|
||||
#+begin_center
|
||||
#+begin_quote
|
||||
#+begin_center
|
||||
lorem
|
||||
#+end_center
|
||||
#+end_quote
|
||||
#+end_center
|
||||
@@ -0,0 +1,12 @@
|
||||
#+begin_defun
|
||||
foo
|
||||
#+begin_lorem
|
||||
ipsum
|
||||
#+end_lorem
|
||||
bar
|
||||
#+begin_center
|
||||
#+begin_quote
|
||||
baz
|
||||
#+end_quote
|
||||
#+end_center
|
||||
#+end_defun
|
||||
@@ -0,0 +1,5 @@
|
||||
#+begin_defun
|
||||
foo
|
||||
|
||||
{{{bar(baz)}}}
|
||||
#+end_defun
|
||||
@@ -0,0 +1,7 @@
|
||||
1. foo
|
||||
2.
|
||||
bar
|
||||
1.
|
||||
#+begin_center
|
||||
Still in the list
|
||||
#+end_center
|
||||
@@ -0,0 +1,2 @@
|
||||
3. [@3] foo
|
||||
4. bar
|
||||
@@ -0,0 +1,2 @@
|
||||
- foo ::
|
||||
|
||||
@@ -0,0 +1,11 @@
|
||||
- foo :: bar
|
||||
- cat ::
|
||||
dog
|
||||
- lorem
|
||||
:: ipsum
|
||||
-
|
||||
lorem :: ipsum
|
||||
- dolar *bold* foo :: ipsum
|
||||
- big gap ::
|
||||
|
||||
stuff
|
||||
@@ -0,0 +1,2 @@
|
||||
- {{{foo(bar)}}} :: baz
|
||||
- =foo= :: bar
|
||||
@@ -0,0 +1,3 @@
|
||||
- foo :: bar
|
||||
- foo :: bar
|
||||
- foo :: bar
|
||||
@@ -0,0 +1,2 @@
|
||||
- =foo :: bar= :: baz
|
||||
- lorem :: ipsum :: dolar
|
||||
@@ -1,3 +1,5 @@
|
||||
1.
|
||||
2.
|
||||
3.
|
||||
|
||||
* headline
|
||||
|
||||
@@ -0,0 +1,2 @@
|
||||
- foo
|
||||
|
||||
@@ -0,0 +1,6 @@
|
||||
* Overwrite
|
||||
:PROPERTIES:
|
||||
:header-args: :var foo="lorem"
|
||||
:header-args:emacs-lisp: :var bar="ipsum"
|
||||
:header-args:emacs-lisp+: :results silent :var baz=7
|
||||
:END:
|
||||
@@ -0,0 +1,7 @@
|
||||
** foo
|
||||
:PROPERTIES:
|
||||
:DESCRIPTION: lorem
|
||||
:ALT_TITLE: ipsum
|
||||
:END:
|
||||
|
||||
bar
|
||||
@@ -0,0 +1,6 @@
|
||||
src_elisp{(bar)}
|
||||
*src_elisp{(bar)}*
|
||||
|
||||
| foo *bar* |
|
||||
| foo src_elisp{(bar)} |
|
||||
| foo *src_elisp{(bar)}* |
|
||||
8
org_mode_samples/greater_element/table/with_formulas.org
Normal file
8
org_mode_samples/greater_element/table/with_formulas.org
Normal file
@@ -0,0 +1,8 @@
|
||||
| Name | Price | Quantity | Total |
|
||||
|------+-------+----------+-------|
|
||||
| foo | 7 | 4 | 28 |
|
||||
| bar | 3.5 | 3 | 10.5 |
|
||||
|------+-------+----------+-------|
|
||||
| | | 7 | 38.5 |
|
||||
#+tblfm: $4=$2*$3::@>$4=vsum(@2..@-1)
|
||||
#+tblfm: @>$3=vsum(@2..@-1)
|
||||
@@ -0,0 +1,6 @@
|
||||
%%(foo
|
||||
)
|
||||
|
||||
%%(bar ; baz
|
||||
|
||||
lorem
|
||||
@@ -0,0 +1 @@
|
||||
%%(foo bar) ; baz
|
||||
@@ -0,0 +1,7 @@
|
||||
# This test is to prove that the parser works with affiliated keywords that have both a shorter and longer version.
|
||||
|
||||
#+results:
|
||||
#+result:
|
||||
#+begin_latex
|
||||
\foo
|
||||
#+end_latex
|
||||
1
org_mode_samples/lesser_element/keyword/babel_call.org
Normal file
1
org_mode_samples/lesser_element/keyword/babel_call.org
Normal file
@@ -0,0 +1 @@
|
||||
#+call: foo(bar="baz")
|
||||
@@ -0,0 +1,15 @@
|
||||
#+name: foo
|
||||
#+caption: bar
|
||||
#+caption: baz
|
||||
|
||||
[[file:lorem/ipsum.png]]
|
||||
|
||||
#+name: cat
|
||||
#+foo: dog
|
||||
[[file:lorem/ipsum.png]]
|
||||
|
||||
#+name: cat
|
||||
#+foo: dog
|
||||
|
||||
|
||||
foo
|
||||
@@ -0,0 +1 @@
|
||||
#+title:foo:bar: baz: lorem: ipsum
|
||||
@@ -0,0 +1,2 @@
|
||||
#+begin_src
|
||||
#+end_src
|
||||
@@ -0,0 +1,4 @@
|
||||
# There are trailing spaces after the begin and end src lines
|
||||
#+begin_src
|
||||
echo "this is a source block."
|
||||
#+end_src
|
||||
22
org_mode_samples/object/citation/balanced_brackets.org
Normal file
22
org_mode_samples/object/citation/balanced_brackets.org
Normal file
@@ -0,0 +1,22 @@
|
||||
# Extra open
|
||||
[cite/a/b-_/foo:unbalancedglobal[prefix;keyprefix @foo keysuffix;globalsuffix]
|
||||
|
||||
[cite/a/b-_/foo:globalprefix;unbalancedkey[prefix @foo keysuffix;globalsuffix]
|
||||
|
||||
[cite/a/b-_/foo:globalprefix;keyprefix @foo unbalancedkey[suffix;globalsuffix]
|
||||
|
||||
[cite/a/b-_/foo:globalprefix;keyprefix @foo keysuffix;unbalancedglobal[suffix]
|
||||
|
||||
|
||||
# Extra close
|
||||
[cite/a/b-_/foo:unbalancedglobal]prefix;keyprefix @foo keysuffix;globalsuffix]
|
||||
|
||||
[cite/a/b-_/foo:globalprefix;unbalancedkey]prefix @foo keysuffix;globalsuffix]
|
||||
|
||||
[cite/a/b-_/foo:globalprefix;keyprefix @foo unbalancedkey]suffix;globalsuffix]
|
||||
|
||||
[cite/a/b-_/foo:globalprefix;keyprefix @foo keysuffix;unbalancedglobal]suffix]
|
||||
|
||||
|
||||
# balanced:
|
||||
[cite/a/b-_/foo:gl[obalpref]ix;ke[ypref]ix @foo ke[ysuff]ix;gl[obalsuff]ix]
|
||||
@@ -0,0 +1,2 @@
|
||||
[fn:2:This footnote [ has balanced ] brackets inside it]
|
||||
[fn::This footnote does not have balanced [ brackets inside it]
|
||||
@@ -0,0 +1,3 @@
|
||||
*[fn:: /abcdef[fn::ghijklmnopqrstuvw]xyz/ r]*
|
||||
|
||||
*[fn:: /abcdef[fn::ghijk *lmnopq* rstuvw]xyz/ r]*
|
||||
6
org_mode_samples/object/latex_fragment/three_lines.org
Normal file
6
org_mode_samples/object/latex_fragment/three_lines.org
Normal file
@@ -0,0 +1,6 @@
|
||||
$foo
|
||||
bar
|
||||
baz
|
||||
lorem
|
||||
ipsum
|
||||
dolar$
|
||||
52
org_mode_samples/object/plain_link/empty_links.org
Normal file
52
org_mode_samples/object/plain_link/empty_links.org
Normal file
@@ -0,0 +1,52 @@
|
||||
non-link text
|
||||
eww://
|
||||
rmail://
|
||||
mhe://
|
||||
irc://
|
||||
info://
|
||||
gnus://
|
||||
docview://
|
||||
bibtex://
|
||||
bbdb://
|
||||
w3m://
|
||||
doi://
|
||||
file+sys://
|
||||
file+emacs://
|
||||
shell://
|
||||
news://
|
||||
mailto://
|
||||
https://
|
||||
http://
|
||||
ftp://
|
||||
help://
|
||||
file://
|
||||
elisp://
|
||||
randomfakeprotocl://
|
||||
non-link text
|
||||
|
||||
|
||||
non-link text
|
||||
eww:
|
||||
rmail:
|
||||
mhe:
|
||||
irc:
|
||||
info:
|
||||
gnus:
|
||||
docview:
|
||||
bibtex:
|
||||
bbdb:
|
||||
w3m:
|
||||
doi:
|
||||
file+sys:
|
||||
file+emacs:
|
||||
shell:
|
||||
news:
|
||||
mailto:
|
||||
https:
|
||||
http:
|
||||
ftp:
|
||||
help:
|
||||
file:
|
||||
elisp:
|
||||
randomfakeprotocl:
|
||||
non-link text
|
||||
@@ -0,0 +1,3 @@
|
||||
mailto:foo@bar.baz.
|
||||
|
||||
mailto:foo@bar.baz....
|
||||
@@ -0,0 +1 @@
|
||||
mailto:foo@bar.baz .
|
||||
4
org_mode_samples/object/statistics_cookie/empty.org
Normal file
4
org_mode_samples/object/statistics_cookie/empty.org
Normal file
@@ -0,0 +1,4 @@
|
||||
[/]
|
||||
[/2]
|
||||
[3/]
|
||||
[%]
|
||||
17
org_mode_samples/object/text_markup/three_lines.org
Normal file
17
org_mode_samples/object/text_markup/three_lines.org
Normal file
@@ -0,0 +1,17 @@
|
||||
foo *bar
|
||||
baz* lorem
|
||||
|
||||
text *markup
|
||||
can
|
||||
span* more
|
||||
|
||||
than *three
|
||||
lines.
|
||||
foo
|
||||
bar* baz
|
||||
|
||||
foo *bar \\
|
||||
baz \\
|
||||
lorem \\
|
||||
ipsum \\
|
||||
dolar* cat
|
||||
@@ -0,0 +1,4 @@
|
||||
foo ==>bar=.
|
||||
|
||||
# This uses a zero-width space to escape the equals signs to make the verbatim not end.
|
||||
=lorem == ipsum=
|
||||
@@ -0,0 +1,2 @@
|
||||
* TODO [#A] COMMENT foo bar
|
||||
baz
|
||||
9
org_mode_samples/sections_and_headings/empty_section.org
Normal file
9
org_mode_samples/sections_and_headings/empty_section.org
Normal file
@@ -0,0 +1,9 @@
|
||||
* Foo
|
||||
|
||||
* Bar
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
* Baz
|
||||
@@ -0,0 +1,4 @@
|
||||
* DONE foo
|
||||
DEADLINE: <2023-09-08 Fri>
|
||||
|
||||
* DONE bar
|
||||
@@ -0,0 +1 @@
|
||||
* [0/4] foo
|
||||
1
org_mode_samples/unicode/hearts.org
Normal file
1
org_mode_samples/unicode/hearts.org
Normal file
@@ -0,0 +1 @@
|
||||
🧡💛💚💙💜
|
||||
@@ -4,10 +4,10 @@ set -euo pipefail
|
||||
IFS=$'\n\t'
|
||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
|
||||
cd "$DIR/../"
|
||||
|
||||
RUSTFLAGS="-C opt-level=0" cargo build --no-default-features
|
||||
valgrind --tool=callgrind --callgrind-out-file=callgrind.out target/debug/compare
|
||||
|
||||
(cd "$DIR/../" && RUSTFLAGS="-C opt-level=0" cargo build --no-default-features)
|
||||
valgrind --tool=callgrind --callgrind-out-file="$DIR/../callgrind.out" "$DIR/../target/debug/parse" "${@}"
|
||||
|
||||
echo "You probably want to run:"
|
||||
echo "callgrind_annotate --auto=yes callgrind.out"
|
||||
echo "callgrind_annotate --auto=yes '$DIR/../callgrind.out'"
|
||||
|
||||
@@ -6,13 +6,22 @@ DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
|
||||
: ${PROFILE:="perf"}
|
||||
|
||||
cd "$DIR/../"
|
||||
function main {
|
||||
local additional_flags=()
|
||||
if [ "$PROFILE" = "dev" ] || [ "$PROFILE" = "debug" ]; then
|
||||
PROFILE="debug"
|
||||
else
|
||||
additional_flags+=(--profile "$PROFILE")
|
||||
fi
|
||||
(cd "$DIR/../" && cargo build --no-default-features "${additional_flags[@]}")
|
||||
perf record --freq=2000 --call-graph dwarf --output="$DIR/../perf.data" "$DIR/../target/${PROFILE}/parse" "${@}"
|
||||
|
||||
cargo build --profile "$PROFILE" --no-default-features
|
||||
perf record --freq=2000 --call-graph dwarf --output=perf.data target/${PROFILE}/compare
|
||||
# Convert to a format firefox will read
|
||||
# flags to consider --show-info
|
||||
perf script -F +pid --input perf.data > perf.firefox
|
||||
perf script -F +pid --input "$DIR/../perf.data" > "$DIR/../perf.firefox"
|
||||
|
||||
echo "You probably want to go to https://profiler.firefox.com/"
|
||||
echo "Either that or run hotspot"
|
||||
}
|
||||
|
||||
main "${@}"
|
||||
|
||||
@@ -7,14 +7,14 @@ DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
: ${SHELL:="NO"} # or YES to launch a shell instead of running the test
|
||||
: ${TRACE:="NO"} # or YES to send traces to jaeger
|
||||
: ${BACKTRACE:="NO"} # or YES to print a rust backtrace when panicking
|
||||
: ${NO_COLOR:=""} # Set to anything to disable color output
|
||||
|
||||
cd "$DIR/../"
|
||||
REALPATH=$(command -v uu-realpath || command -v realpath)
|
||||
MAKE=$(command -v gmake || command -v make)
|
||||
|
||||
function main {
|
||||
build_container
|
||||
launch_container
|
||||
launch_container "${@}"
|
||||
}
|
||||
|
||||
function build_container {
|
||||
@@ -24,23 +24,42 @@ function build_container {
|
||||
function launch_container {
|
||||
local additional_flags=()
|
||||
local additional_args=()
|
||||
local features=(compare)
|
||||
|
||||
if [ "$SHELL" != "YES" ]; then
|
||||
additional_args+=(cargo run)
|
||||
else
|
||||
additional_flags+=(-t)
|
||||
if [ "$NO_COLOR" != "" ]; then
|
||||
additional_flags+=(--env "NO_COLOR=$NO_COLOR")
|
||||
fi
|
||||
|
||||
if [ "$TRACE" = "YES" ]; then
|
||||
# We use the host network so it can talk to jaeger hosted at 127.0.0.1
|
||||
additional_flags+=(--network=host --env RUST_LOG=debug)
|
||||
features+=(tracing)
|
||||
fi
|
||||
|
||||
if [ "$SHELL" != "YES" ]; then
|
||||
local features_joined=$(IFS=","; echo "${features[*]}")
|
||||
additional_args+=(cargo run --bin compare --no-default-features --features "$features_joined")
|
||||
additional_flags+=(--read-only)
|
||||
else
|
||||
additional_args+=(/bin/sh)
|
||||
additional_flags+=(-t)
|
||||
fi
|
||||
|
||||
if [ "$BACKTRACE" = "YES" ]; then
|
||||
additional_flags+=(--env RUST_BACKTRACE=full)
|
||||
fi
|
||||
|
||||
docker run "${additional_flags[@]}" --init --rm -i -v "$($REALPATH ./):/source:ro" --mount source=cargo-cache,target=/usr/local/cargo/registry --mount source=rust-cache,target=/target --env CARGO_TARGET_DIR=/target -w /source organic-test "${additional_args[@]}"
|
||||
if [ $# -gt 0 ]; then
|
||||
# If we passed in args, we need to forward them along
|
||||
for path in "${@}"; do
|
||||
local full_path=$($REALPATH "$path")
|
||||
local containing_folder=$(dirname "$full_path")
|
||||
local file_name=$(basename "$full_path")
|
||||
docker run "${additional_flags[@]}" --init --rm -i --mount type=tmpfs,destination=/tmp -v "${containing_folder}:/input:ro" -v "$($REALPATH "$DIR/../"):/source:ro" --mount source=cargo-cache,target=/usr/local/cargo/registry --mount source=rust-cache,target=/target --env CARGO_TARGET_DIR=/target -w /source --entrypoint "" organic-test "${additional_args[@]}" -- "/input/$file_name"
|
||||
done
|
||||
else
|
||||
docker run "${additional_flags[@]}" --init --rm -i --mount type=tmpfs,destination=/tmp -v "$($REALPATH "$DIR/../"):/source:ro" --mount source=cargo-cache,target=/usr/local/cargo/registry --mount source=rust-cache,target=/target --env CARGO_TARGET_DIR=/target -w /source --entrypoint "" organic-test "${additional_args[@]}"
|
||||
fi
|
||||
}
|
||||
|
||||
main "${@}"
|
||||
|
||||
92
scripts/run_docker_compare_bisect.bash
Executable file
92
scripts/run_docker_compare_bisect.bash
Executable file
@@ -0,0 +1,92 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# Bisect parsing a file at various line cut-off points to see which line causes the parse to differ from emacs.
|
||||
set -euo pipefail
|
||||
IFS=$'\n\t'
|
||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
|
||||
REALPATH=$(command -v uu-realpath || command -v realpath)
|
||||
|
||||
############## Setup #########################
|
||||
|
||||
function cleanup {
|
||||
for f in "${folders[@]}"; do
|
||||
log "Deleting $f"
|
||||
rm -rf "$f"
|
||||
done
|
||||
}
|
||||
folders=()
|
||||
for sig in EXIT INT QUIT HUP TERM; do
|
||||
trap "set +e; cleanup" "$sig"
|
||||
done
|
||||
|
||||
function die {
|
||||
local status_code="$1"
|
||||
shift
|
||||
(>&2 echo "${@}")
|
||||
exit "$status_code"
|
||||
}
|
||||
|
||||
function log {
|
||||
(>&2 echo "${@}")
|
||||
}
|
||||
|
||||
############## Program #########################
|
||||
|
||||
function main {
|
||||
log "Is is recommended that the output of \`mktemp -d -t 'compare_bisect.XXXXXXXX'\` is inside a tmpfs filesystem since this script will make many writes to these folders."
|
||||
|
||||
local target_full_path=$($REALPATH "$1")
|
||||
SOURCE_FOLDER=$(dirname "$target_full_path")
|
||||
TARGET_DOCUMENT=$(basename "$target_full_path")
|
||||
|
||||
|
||||
local good=0
|
||||
local bad=$(wc -l "$SOURCE_FOLDER/$TARGET_DOCUMENT" | awk '{print $1}')
|
||||
|
||||
set +e
|
||||
run_parse "$bad" &> /dev/null
|
||||
local status=$?
|
||||
set -e
|
||||
if [ $status -eq 0 ]; then
|
||||
log "Entire file passes."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
while [[ "$((bad - good))" -gt 1 ]]; do
|
||||
local next_line=$((((bad - good) / 2) + good))
|
||||
log "Testing line $next_line"
|
||||
set +e
|
||||
run_parse "$next_line" &> /dev/null
|
||||
local status=$?
|
||||
set -e
|
||||
if [ $status -eq 0 ]; then
|
||||
good="$next_line"
|
||||
log "Line $next_line good"
|
||||
else
|
||||
bad="$next_line"
|
||||
log "Line $next_line bad"
|
||||
fi
|
||||
done
|
||||
echo "Bad line: $bad"
|
||||
}
|
||||
|
||||
function setup_temp_dir {
|
||||
local temp_dir=$(mktemp -d -t 'compare_bisect.XXXXXXXX')
|
||||
cp -r "$SOURCE_FOLDER/"* "$temp_dir/"
|
||||
echo "$temp_dir"
|
||||
}
|
||||
|
||||
function run_parse {
|
||||
local lines="$1"
|
||||
local temp_dir=$(setup_temp_dir)
|
||||
folders+=("$temp_dir")
|
||||
cat "$SOURCE_FOLDER/$TARGET_DOCUMENT" | head -n "$lines" > "$temp_dir/$TARGET_DOCUMENT"
|
||||
"${DIR}/run_docker_compare.bash" "$temp_dir/$TARGET_DOCUMENT"
|
||||
local status=$?
|
||||
rm -rf "$temp_dir"
|
||||
# TODO: Remove temp_dir from folders
|
||||
return "$status"
|
||||
}
|
||||
|
||||
main "${@}"
|
||||
@@ -4,7 +4,8 @@ set -euo pipefail
|
||||
IFS=$'\n\t'
|
||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
|
||||
cd "$DIR/../"
|
||||
: ${NO_COLOR:=""} # Set to anything to disable color output
|
||||
|
||||
REALPATH=$(command -v uu-realpath || command -v realpath)
|
||||
MAKE=$(command -v gmake || command -v make)
|
||||
|
||||
@@ -31,7 +32,7 @@ function get_test_names {
|
||||
local test_file_full_path=$($REALPATH "$test_file")
|
||||
local relative_to_samples=$($REALPATH --relative-to "$samples_dir" "$test_file_full_path")
|
||||
local without_extension="${relative_to_samples%.org}"
|
||||
echo "${without_extension/\//_}" | tr '[:upper:]' '[:lower:]'
|
||||
echo "autogen_${without_extension//\//_}" | tr '[:upper:]' '[:lower:]'
|
||||
else
|
||||
echo "$test_file" | tr '[:upper:]' '[:lower:]'
|
||||
fi
|
||||
@@ -40,17 +41,21 @@ function get_test_names {
|
||||
|
||||
function launch_container {
|
||||
local test="$1"
|
||||
local additional_args=()
|
||||
local additional_flags=()
|
||||
|
||||
if [ "$NO_COLOR" != "" ]; then
|
||||
additional_flags+=(--env "NO_COLOR=$NO_COLOR")
|
||||
fi
|
||||
|
||||
local init_script=$(cat <<EOF
|
||||
set -euo pipefail
|
||||
IFS=\$'\n\t'
|
||||
|
||||
cargo test --no-fail-fast --lib --test test_loader "$test" -- --show-output
|
||||
cargo test --no-default-features --features compare --no-fail-fast --lib --test test_loader "$test" -- --show-output
|
||||
EOF
|
||||
)
|
||||
|
||||
docker run --init --rm -v "$($REALPATH ./):/source:ro" --mount source=cargo-cache,target=/usr/local/cargo/registry --mount source=rust-cache,target=/target --env CARGO_TARGET_DIR=/target -w /source organic-test sh -c "$init_script"
|
||||
docker run "${additional_flags[@]}" --init --rm --read-only --mount type=tmpfs,destination=/tmp -v "$($REALPATH "$DIR/../"):/source:ro" --mount source=cargo-cache,target=/usr/local/cargo/registry --mount source=rust-cache,target=/target --env CARGO_TARGET_DIR=/target -w /source --entrypoint "" organic-test sh -c "$init_script"
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -4,7 +4,6 @@ set -euo pipefail
|
||||
IFS=$'\n\t'
|
||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
|
||||
cd "$DIR/../"
|
||||
REALPATH=$(command -v uu-realpath || command -v realpath)
|
||||
|
||||
function main {
|
||||
@@ -12,7 +11,7 @@ function main {
|
||||
|
||||
local test
|
||||
while read test; do
|
||||
cargo test --no-fail-fast --test test_loader "$test" -- --show-output
|
||||
(cd "$DIR/../" && cargo test --no-default-features --features compare --no-fail-fast --test test_loader "$test" -- --show-output)
|
||||
done<<<"$test_names"
|
||||
}
|
||||
|
||||
@@ -25,7 +24,7 @@ function get_test_names {
|
||||
local test_file_full_path=$($REALPATH "$test_file")
|
||||
local relative_to_samples=$($REALPATH --relative-to "$samples_dir" "$test_file_full_path")
|
||||
local without_extension="${relative_to_samples%.org}"
|
||||
echo "${without_extension/\//_}" | tr '[:upper:]' '[:lower:]'
|
||||
echo "${without_extension//\//_}" | tr '[:upper:]' '[:lower:]'
|
||||
else
|
||||
echo "$test_file" | tr '[:upper:]' '[:lower:]'
|
||||
fi
|
||||
|
||||
21
scripts/time_parse.bash
Executable file
21
scripts/time_parse.bash
Executable file
@@ -0,0 +1,21 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# Time running a single parse without invoking a compare with emacs.
|
||||
set -euo pipefail
|
||||
IFS=$'\n\t'
|
||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
|
||||
: ${PROFILE:="release-lto"}
|
||||
|
||||
function main {
|
||||
local additional_flags=()
|
||||
if [ "$PROFILE" = "dev" ] || [ "$PROFILE" = "debug" ]; then
|
||||
PROFILE="debug"
|
||||
else
|
||||
additional_flags+=(--profile "$PROFILE")
|
||||
fi
|
||||
(cd "$DIR/../" && cargo build --no-default-features "${additional_flags[@]}")
|
||||
time "$DIR/../target/${PROFILE}/parse" "${@}"
|
||||
}
|
||||
|
||||
main "${@}"
|
||||
52
src/bin_compare.rs
Normal file
52
src/bin_compare.rs
Normal file
@@ -0,0 +1,52 @@
|
||||
#![feature(round_char_boundary)]
|
||||
#![feature(exact_size_is_empty)]
|
||||
use std::io::Read;
|
||||
|
||||
use organic::compare::run_anonymous_compare;
|
||||
use organic::compare::run_compare_on_file;
|
||||
|
||||
#[cfg(feature = "tracing")]
|
||||
use crate::init_tracing::init_telemetry;
|
||||
#[cfg(feature = "tracing")]
|
||||
use crate::init_tracing::shutdown_telemetry;
|
||||
#[cfg(feature = "tracing")]
|
||||
mod init_tracing;
|
||||
|
||||
#[cfg(not(feature = "tracing"))]
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
main_body()
|
||||
}
|
||||
|
||||
#[cfg(feature = "tracing")]
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let rt = tokio::runtime::Runtime::new()?;
|
||||
let result = rt.block_on(async {
|
||||
init_telemetry()?;
|
||||
let main_body_result = main_body();
|
||||
shutdown_telemetry()?;
|
||||
main_body_result
|
||||
});
|
||||
result
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
fn main_body() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let args = std::env::args().skip(1);
|
||||
if args.is_empty() {
|
||||
let org_contents = read_stdin_to_string()?;
|
||||
run_anonymous_compare(org_contents)
|
||||
} else {
|
||||
for arg in args {
|
||||
run_compare_on_file(arg)?
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn read_stdin_to_string() -> Result<String, Box<dyn std::error::Error>> {
|
||||
let mut stdin_contents = String::new();
|
||||
std::io::stdin()
|
||||
.lock()
|
||||
.read_to_string(&mut stdin_contents)?;
|
||||
Ok(stdin_contents)
|
||||
}
|
||||
73
src/compare/compare.rs
Normal file
73
src/compare/compare.rs
Normal file
@@ -0,0 +1,73 @@
|
||||
use std::path::Path;
|
||||
|
||||
use crate::compare::diff::compare_document;
|
||||
use crate::compare::parse::emacs_parse_anonymous_org_document;
|
||||
use crate::compare::parse::emacs_parse_file_org_document;
|
||||
use crate::compare::parse::get_emacs_version;
|
||||
use crate::compare::parse::get_org_mode_version;
|
||||
use crate::compare::sexp::sexp;
|
||||
use crate::parser::parse;
|
||||
use crate::parser::parse_with_settings;
|
||||
use crate::GlobalSettings;
|
||||
use crate::LocalFileAccessInterface;
|
||||
|
||||
pub fn run_anonymous_compare<P: AsRef<str>>(
|
||||
org_contents: P,
|
||||
) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let org_contents = org_contents.as_ref();
|
||||
eprintln!("Using emacs version: {}", get_emacs_version()?.trim());
|
||||
eprintln!("Using org-mode version: {}", get_org_mode_version()?.trim());
|
||||
let rust_parsed = parse(org_contents)?;
|
||||
let org_sexp = emacs_parse_anonymous_org_document(org_contents)?;
|
||||
let (_remaining, parsed_sexp) = sexp(org_sexp.as_str()).map_err(|e| e.to_string())?;
|
||||
|
||||
println!("{}\n\n\n", org_contents);
|
||||
println!("{}", org_sexp);
|
||||
println!("{:#?}", rust_parsed);
|
||||
|
||||
// We do the diffing after printing out both parsed forms in case the diffing panics
|
||||
let diff_result = compare_document(&parsed_sexp, &rust_parsed)?;
|
||||
diff_result.print(org_contents)?;
|
||||
|
||||
if diff_result.is_bad() {
|
||||
Err("Diff results do not match.")?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn run_compare_on_file<P: AsRef<Path>>(org_path: P) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let org_path = org_path.as_ref();
|
||||
eprintln!("Using emacs version: {}", get_emacs_version()?.trim());
|
||||
eprintln!("Using org-mode version: {}", get_org_mode_version()?.trim());
|
||||
let parent_directory = org_path
|
||||
.parent()
|
||||
.ok_or("Should be contained inside a directory.")?;
|
||||
let org_contents = std::fs::read_to_string(org_path)?;
|
||||
let org_contents = org_contents.as_str();
|
||||
let file_access_interface = LocalFileAccessInterface {
|
||||
working_directory: Some(parent_directory.to_path_buf()),
|
||||
};
|
||||
let global_settings = {
|
||||
let mut global_settings = GlobalSettings::default();
|
||||
global_settings.file_access = &file_access_interface;
|
||||
global_settings
|
||||
};
|
||||
let rust_parsed = parse_with_settings(org_contents, &global_settings)?;
|
||||
let org_sexp = emacs_parse_file_org_document(org_path)?;
|
||||
let (_remaining, parsed_sexp) = sexp(org_sexp.as_str()).map_err(|e| e.to_string())?;
|
||||
|
||||
println!("{}\n\n\n", org_contents);
|
||||
println!("{}", org_sexp);
|
||||
println!("{:#?}", rust_parsed);
|
||||
|
||||
// We do the diffing after printing out both parsed forms in case the diffing panics
|
||||
let diff_result = compare_document(&parsed_sexp, &rust_parsed)?;
|
||||
diff_result.print(org_contents)?;
|
||||
|
||||
if diff_result.is_bad() {
|
||||
Err("Diff results do not match.")?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
1585
src/compare/diff.rs
1585
src/compare/diff.rs
File diff suppressed because it is too large
Load Diff
@@ -1,7 +1,7 @@
|
||||
mod compare;
|
||||
mod diff;
|
||||
mod parse;
|
||||
mod sexp;
|
||||
mod util;
|
||||
pub use diff::compare_document;
|
||||
pub use parse::emacs_parse_org_document;
|
||||
pub use parse::get_emacs_version;
|
||||
pub use parse::get_org_mode_version;
|
||||
pub use compare::run_anonymous_compare;
|
||||
pub use compare::run_compare_on_file;
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
use std::path::Path;
|
||||
use std::process::Command;
|
||||
|
||||
pub fn emacs_parse_org_document<C>(file_contents: C) -> Result<String, Box<dyn std::error::Error>>
|
||||
pub fn emacs_parse_anonymous_org_document<C>(
|
||||
file_contents: C,
|
||||
) -> Result<String, Box<dyn std::error::Error>>
|
||||
where
|
||||
C: AsRef<str>,
|
||||
{
|
||||
@@ -15,14 +18,46 @@ where
|
||||
escaped_file_contents = escaped_file_contents
|
||||
);
|
||||
let mut cmd = Command::new("emacs");
|
||||
let proc = cmd
|
||||
let cmd = cmd
|
||||
.arg("-q")
|
||||
.arg("--no-site-file")
|
||||
.arg("--no-splash")
|
||||
.arg("--batch")
|
||||
.arg("--eval")
|
||||
.arg(elisp_script);
|
||||
let out = proc.output()?;
|
||||
let out = cmd.output()?;
|
||||
out.status.exit_ok()?;
|
||||
let org_sexp = out.stderr;
|
||||
Ok(String::from_utf8(org_sexp)?)
|
||||
}
|
||||
|
||||
pub fn emacs_parse_file_org_document<P>(file_path: P) -> Result<String, Box<dyn std::error::Error>>
|
||||
where
|
||||
P: AsRef<Path>,
|
||||
{
|
||||
let file_path = file_path.as_ref().canonicalize()?;
|
||||
let containing_directory = file_path.parent().ok_or(format!(
|
||||
"Failed to get containing directory for path {}",
|
||||
file_path.display()
|
||||
))?;
|
||||
let elisp_script = format!(
|
||||
r#"(progn
|
||||
(org-mode)
|
||||
(message "%s" (pp-to-string (org-element-parse-buffer)))
|
||||
)"#
|
||||
);
|
||||
let mut cmd = Command::new("emacs");
|
||||
let cmd = cmd
|
||||
.current_dir(containing_directory)
|
||||
.arg("-q")
|
||||
.arg("--no-site-file")
|
||||
.arg("--no-splash")
|
||||
.arg("--batch")
|
||||
.arg("--insert")
|
||||
.arg(file_path.as_os_str())
|
||||
.arg("--eval")
|
||||
.arg(elisp_script);
|
||||
let out = cmd.output()?;
|
||||
out.status.exit_ok()?;
|
||||
let org_sexp = out.stderr;
|
||||
Ok(String::from_utf8(org_sexp)?)
|
||||
@@ -55,7 +90,7 @@ pub fn get_emacs_version() -> Result<String, Box<dyn std::error::Error>> {
|
||||
(message "%s" (version))
|
||||
)"#;
|
||||
let mut cmd = Command::new("emacs");
|
||||
let proc = cmd
|
||||
let cmd = cmd
|
||||
.arg("-q")
|
||||
.arg("--no-site-file")
|
||||
.arg("--no-splash")
|
||||
@@ -63,7 +98,7 @@ pub fn get_emacs_version() -> Result<String, Box<dyn std::error::Error>> {
|
||||
.arg("--eval")
|
||||
.arg(elisp_script);
|
||||
|
||||
let out = proc.output()?;
|
||||
let out = cmd.output()?;
|
||||
out.status.exit_ok()?;
|
||||
Ok(String::from_utf8(out.stderr)?)
|
||||
}
|
||||
@@ -74,7 +109,7 @@ pub fn get_org_mode_version() -> Result<String, Box<dyn std::error::Error>> {
|
||||
(message "%s" (org-version nil t nil))
|
||||
)"#;
|
||||
let mut cmd = Command::new("emacs");
|
||||
let proc = cmd
|
||||
let cmd = cmd
|
||||
.arg("-q")
|
||||
.arg("--no-site-file")
|
||||
.arg("--no-splash")
|
||||
@@ -82,7 +117,7 @@ pub fn get_org_mode_version() -> Result<String, Box<dyn std::error::Error>> {
|
||||
.arg("--eval")
|
||||
.arg(elisp_script);
|
||||
|
||||
let out = proc.output()?;
|
||||
let out = cmd.output()?;
|
||||
out.status.exit_ok()?;
|
||||
Ok(String::from_utf8(out.stderr)?)
|
||||
}
|
||||
|
||||
@@ -16,9 +16,6 @@ use nom::sequence::delimited;
|
||||
use nom::sequence::preceded;
|
||||
use nom::sequence::tuple;
|
||||
|
||||
use super::org_source::convert_error;
|
||||
use super::org_source::OrgSource;
|
||||
use super::util::get_consumed;
|
||||
use crate::error::Res;
|
||||
|
||||
#[derive(Debug)]
|
||||
@@ -31,20 +28,102 @@ pub enum Token<'s> {
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct TextWithProperties<'s> {
|
||||
pub text: &'s str,
|
||||
pub properties: Vec<Token<'s>>,
|
||||
pub(crate) text: &'s str,
|
||||
pub(crate) properties: Vec<Token<'s>>,
|
||||
}
|
||||
|
||||
impl<'s> TextWithProperties<'s> {
|
||||
pub fn unquote(&self) -> Result<String, Box<dyn std::error::Error>> {
|
||||
let mut out = String::with_capacity(self.text.len());
|
||||
if !self.text.starts_with(r#"""#) {
|
||||
enum ParseState {
|
||||
Normal,
|
||||
Escape,
|
||||
}
|
||||
|
||||
impl<'s> Token<'s> {
|
||||
pub(crate) fn as_vector<'p>(
|
||||
&'p self,
|
||||
) -> Result<&'p Vec<Token<'s>>, Box<dyn std::error::Error>> {
|
||||
Ok(match self {
|
||||
Token::Vector(children) => Ok(children),
|
||||
_ => Err(format!("wrong token type, expected vector: {:?}", self)),
|
||||
}?)
|
||||
}
|
||||
|
||||
pub(crate) fn as_list<'p>(&'p self) -> Result<&'p Vec<Token<'s>>, Box<dyn std::error::Error>> {
|
||||
Ok(match self {
|
||||
Token::List(children) => Ok(children),
|
||||
_ => Err(format!("wrong token type, expected list: {:?}", self)),
|
||||
}?)
|
||||
}
|
||||
|
||||
pub(crate) fn as_atom<'p>(&'p self) -> Result<&'s str, Box<dyn std::error::Error>> {
|
||||
Ok(match self {
|
||||
Token::Atom(body) => Ok(*body),
|
||||
_ => Err(format!("wrong token type, expected atom: {:?}", self)),
|
||||
}?)
|
||||
}
|
||||
|
||||
pub(crate) fn as_text<'p>(
|
||||
&'p self,
|
||||
) -> Result<&'p TextWithProperties<'s>, Box<dyn std::error::Error>> {
|
||||
Ok(match self {
|
||||
Token::TextWithProperties(body) => Ok(body),
|
||||
_ => Err(format!("wrong token type, expected text: {:?}", self)),
|
||||
}?)
|
||||
}
|
||||
|
||||
pub(crate) fn as_map<'p>(
|
||||
&'p self,
|
||||
) -> Result<HashMap<&'s str, &'p Token<'s>>, Box<dyn std::error::Error>> {
|
||||
let mut hashmap = HashMap::new();
|
||||
|
||||
let children = self.as_list()?;
|
||||
if children.len() % 2 != 0 {
|
||||
return Err("Expecting an even number of children".into());
|
||||
}
|
||||
let mut key: Option<&str> = None;
|
||||
for child in children.iter() {
|
||||
match key {
|
||||
None => {
|
||||
key = Some(child.as_atom()?);
|
||||
}
|
||||
Some(key_val) => {
|
||||
key = None;
|
||||
hashmap.insert(key_val, child);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
Ok(hashmap)
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if the child string slice is a slice of the parent string slice.
|
||||
fn is_slice_of(parent: &str, child: &str) -> bool {
|
||||
let parent_start = parent.as_ptr() as usize;
|
||||
let parent_end = parent_start + parent.len();
|
||||
let child_start = child.as_ptr() as usize;
|
||||
let child_end = child_start + child.len();
|
||||
child_start >= parent_start && child_end <= parent_end
|
||||
}
|
||||
|
||||
/// Get a slice of the string that was consumed in a parser using the original input to the parser and the remaining input after the parser.
|
||||
pub fn get_consumed<'s>(input: &'s str, remaining: &'s str) -> &'s str {
|
||||
assert!(is_slice_of(input, remaining));
|
||||
let source = {
|
||||
let offset = remaining.as_ptr() as usize - input.as_ptr() as usize;
|
||||
&input[..offset]
|
||||
};
|
||||
source.into()
|
||||
}
|
||||
|
||||
pub(crate) fn unquote(text: &str) -> Result<String, Box<dyn std::error::Error>> {
|
||||
let mut out = String::with_capacity(text.len());
|
||||
if !text.starts_with(r#"""#) {
|
||||
return Err("Quoted text does not start with quote.".into());
|
||||
}
|
||||
if !self.text.ends_with(r#"""#) {
|
||||
if !text.ends_with(r#"""#) {
|
||||
return Err("Quoted text does not end with quote.".into());
|
||||
}
|
||||
let interior_text = &self.text[1..(self.text.len() - 1)];
|
||||
let interior_text = &text[1..(text.len() - 1)];
|
||||
let mut state = ParseState::Normal;
|
||||
for current_char in interior_text.chars().into_iter() {
|
||||
state = match (state, current_char) {
|
||||
@@ -71,92 +150,22 @@ impl<'s> TextWithProperties<'s> {
|
||||
|
||||
Ok(out)
|
||||
}
|
||||
}
|
||||
|
||||
enum ParseState {
|
||||
Normal,
|
||||
Escape,
|
||||
}
|
||||
|
||||
impl<'s> Token<'s> {
|
||||
pub fn as_vector<'p>(&'p self) -> Result<&'p Vec<Token<'s>>, Box<dyn std::error::Error>> {
|
||||
Ok(match self {
|
||||
Token::Vector(children) => Ok(children),
|
||||
_ => Err(format!("wrong token type {:?}", self)),
|
||||
}?)
|
||||
}
|
||||
|
||||
pub fn as_list<'p>(&'p self) -> Result<&'p Vec<Token<'s>>, Box<dyn std::error::Error>> {
|
||||
Ok(match self {
|
||||
Token::List(children) => Ok(children),
|
||||
_ => Err(format!("wrong token type {:?}", self)),
|
||||
}?)
|
||||
}
|
||||
|
||||
pub fn as_atom<'p>(&'p self) -> Result<&'s str, Box<dyn std::error::Error>> {
|
||||
Ok(match self {
|
||||
Token::Atom(body) => Ok(*body),
|
||||
_ => Err(format!("wrong token type {:?}", self)),
|
||||
}?)
|
||||
}
|
||||
|
||||
pub fn as_text<'p>(&'p self) -> Result<&'p TextWithProperties<'s>, Box<dyn std::error::Error>> {
|
||||
Ok(match self {
|
||||
Token::TextWithProperties(body) => Ok(body),
|
||||
_ => Err(format!("wrong token type {:?}", self)),
|
||||
}?)
|
||||
}
|
||||
|
||||
pub fn as_map<'p>(
|
||||
&'p self,
|
||||
) -> Result<HashMap<&'s str, &'p Token<'s>>, Box<dyn std::error::Error>> {
|
||||
let mut hashmap = HashMap::new();
|
||||
|
||||
let children = self.as_list()?;
|
||||
if children.len() % 2 != 0 {
|
||||
return Err("Expecting an even number of children".into());
|
||||
}
|
||||
let mut key: Option<&str> = None;
|
||||
for child in children.iter() {
|
||||
match key {
|
||||
None => {
|
||||
key = Some(child.as_atom()?);
|
||||
}
|
||||
Some(key_val) => {
|
||||
key = None;
|
||||
hashmap.insert(key_val, child);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
Ok(hashmap)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
pub fn sexp_with_padding<'s>(input: &'s str) -> Res<&'s str, Token<'s>> {
|
||||
pub fn sexp<'s>(input: &'s str) -> Res<&'s str, Token<'s>> {
|
||||
let (remaining, _) = multispace0(input)?;
|
||||
let remaining = OrgSource::new(remaining);
|
||||
let (remaining, tkn) = token(remaining)
|
||||
.map(|(rem, out)| (Into::<&str>::into(rem), out))
|
||||
.map_err(convert_error)?;
|
||||
let (remaining, tkn) = token(remaining).map(|(rem, out)| (Into::<&str>::into(rem), out))?;
|
||||
let (remaining, _) = multispace0(remaining)?;
|
||||
Ok((remaining, tkn))
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
pub fn sexp<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, Token<'s>> {
|
||||
let (remaining, tkn) = token(input)?;
|
||||
Ok((remaining, tkn))
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
fn token<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, Token<'s>> {
|
||||
fn token<'s>(input: &'s str) -> Res<&'s str, Token<'s>> {
|
||||
alt((list, vector, atom))(input)
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
fn list<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, Token<'s>> {
|
||||
fn list<'s>(input: &'s str) -> Res<&'s str, Token<'s>> {
|
||||
let (remaining, _) = tag("(")(input)?;
|
||||
let (remaining, children) = delimited(
|
||||
multispace0,
|
||||
@@ -168,7 +177,7 @@ fn list<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, Token<'s>> {
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
fn vector<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, Token<'s>> {
|
||||
fn vector<'s>(input: &'s str) -> Res<&'s str, Token<'s>> {
|
||||
let (remaining, _) = tag("[")(input)?;
|
||||
let (remaining, children) = delimited(
|
||||
multispace0,
|
||||
@@ -180,7 +189,7 @@ fn vector<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, Token<'s>> {
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
fn atom<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, Token<'s>> {
|
||||
fn atom<'s>(input: &'s str) -> Res<&'s str, Token<'s>> {
|
||||
not(peek(one_of(")]")))(input)?;
|
||||
alt((
|
||||
text_with_properties,
|
||||
@@ -191,7 +200,7 @@ fn atom<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, Token<'s>> {
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
fn unquoted_atom<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, Token<'s>> {
|
||||
fn unquoted_atom<'s>(input: &'s str) -> Res<&'s str, Token<'s>> {
|
||||
let (remaining, body) = take_till1(|c| match c {
|
||||
' ' | '\t' | '\r' | '\n' | ')' | ']' => true,
|
||||
_ => false,
|
||||
@@ -200,7 +209,7 @@ fn unquoted_atom<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, Token<'s>> {
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
fn quoted_atom<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, Token<'s>> {
|
||||
fn quoted_atom<'s>(input: &'s str) -> Res<&'s str, Token<'s>> {
|
||||
let (remaining, _) = tag(r#"""#)(input)?;
|
||||
let (remaining, _) = escaped(
|
||||
take_till1(|c| match c {
|
||||
@@ -216,7 +225,7 @@ fn quoted_atom<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, Token<'s>> {
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
fn hash_notation<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, Token<'s>> {
|
||||
fn hash_notation<'s>(input: &'s str) -> Res<&'s str, Token<'s>> {
|
||||
let (remaining, _) = tag("#<")(input)?;
|
||||
let (remaining, _body) = take_till1(|c| match c {
|
||||
'>' => true,
|
||||
@@ -227,7 +236,7 @@ fn hash_notation<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, Token<'s>> {
|
||||
Ok((remaining, Token::Atom(source.into())))
|
||||
}
|
||||
|
||||
fn text_with_properties<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, Token<'s>> {
|
||||
fn text_with_properties<'s>(input: &'s str) -> Res<&'s str, Token<'s>> {
|
||||
let (remaining, _) = tag("#(")(input)?;
|
||||
let (remaining, (text, props)) = delimited(
|
||||
multispace0,
|
||||
@@ -257,7 +266,7 @@ mod tests {
|
||||
#[test]
|
||||
fn simple() {
|
||||
let input = " (foo bar baz ) ";
|
||||
let (remaining, parsed) = sexp_with_padding(input).expect("Parse the input");
|
||||
let (remaining, parsed) = sexp(input).expect("Parse the input");
|
||||
assert_eq!(remaining, "");
|
||||
assert!(match parsed {
|
||||
Token::Atom(_) => false,
|
||||
@@ -270,7 +279,7 @@ mod tests {
|
||||
#[test]
|
||||
fn quoted() {
|
||||
let input = r#" ("foo" bar baz ) "#;
|
||||
let (remaining, parsed) = sexp_with_padding(input).expect("Parse the input");
|
||||
let (remaining, parsed) = sexp(input).expect("Parse the input");
|
||||
assert_eq!(remaining, "");
|
||||
assert!(match parsed {
|
||||
Token::Atom(_) => false,
|
||||
@@ -294,7 +303,7 @@ mod tests {
|
||||
#[test]
|
||||
fn quoted_containing_paren() {
|
||||
let input = r#" (foo "b(a)r" baz ) "#;
|
||||
let (remaining, parsed) = sexp_with_padding(input).expect("Parse the input");
|
||||
let (remaining, parsed) = sexp(input).expect("Parse the input");
|
||||
assert_eq!(remaining, "");
|
||||
assert!(match parsed {
|
||||
Token::List(_) => true,
|
||||
@@ -330,7 +339,7 @@ mod tests {
|
||||
#[test]
|
||||
fn string_containing_escaped_characters() {
|
||||
let input = r#" (foo "\\( x=2 \\)" bar) "#;
|
||||
let (remaining, parsed) = sexp_with_padding(input).expect("Parse the input");
|
||||
let (remaining, parsed) = sexp(input).expect("Parse the input");
|
||||
assert_eq!(remaining, "");
|
||||
assert!(match parsed {
|
||||
Token::Atom(_) => false,
|
||||
@@ -1,5 +1,5 @@
|
||||
use crate::parser::sexp::Token;
|
||||
use crate::parser::Source;
|
||||
use super::sexp::Token;
|
||||
use crate::types::Source;
|
||||
|
||||
/// Check if the child string slice is a slice of the parent string slice.
|
||||
fn is_slice_of(parent: &str, child: &str) -> bool {
|
||||
@@ -13,7 +13,7 @@ fn is_slice_of(parent: &str, child: &str) -> bool {
|
||||
/// Get the offset into source that the rust object exists at.
|
||||
///
|
||||
/// These offsets are zero-based unlike the elisp ones.
|
||||
pub fn get_offsets<'s, S: Source<'s>>(source: &'s str, rust_object: &'s S) -> (usize, usize) {
|
||||
fn get_offsets<'s, S: Source<'s>>(source: &'s str, rust_object: &'s S) -> (usize, usize) {
|
||||
let rust_object_source = rust_object.get_source();
|
||||
assert!(is_slice_of(source, rust_object_source));
|
||||
let offset = rust_object_source.as_ptr() as usize - source.as_ptr() as usize;
|
||||
@@ -21,7 +21,10 @@ pub fn get_offsets<'s, S: Source<'s>>(source: &'s str, rust_object: &'s S) -> (u
|
||||
(offset, end)
|
||||
}
|
||||
|
||||
pub fn assert_name<'s>(emacs: &'s Token<'s>, name: &str) -> Result<(), Box<dyn std::error::Error>> {
|
||||
pub(crate) fn assert_name<'s>(
|
||||
emacs: &'s Token<'s>,
|
||||
name: &str,
|
||||
) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let children = emacs.as_list()?;
|
||||
let first_child = children
|
||||
.first()
|
||||
@@ -37,7 +40,7 @@ pub fn assert_name<'s>(emacs: &'s Token<'s>, name: &str) -> Result<(), Box<dyn s
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn assert_bounds<'s, S: Source<'s>>(
|
||||
pub(crate) fn assert_bounds<'s, S: Source<'s>>(
|
||||
source: &'s str,
|
||||
emacs: &'s Token<'s>,
|
||||
rust: &'s S,
|
||||
@@ -47,13 +50,14 @@ pub fn assert_bounds<'s, S: Source<'s>>(
|
||||
standard_properties
|
||||
.begin
|
||||
.ok_or("Token should have a begin.")?,
|
||||
standard_properties
|
||||
.end
|
||||
.ok_or("Token should have a begin.")?,
|
||||
standard_properties.end.ok_or("Token should have an end.")?,
|
||||
);
|
||||
let (rust_begin, rust_end) = get_offsets(source, rust);
|
||||
if (rust_begin + 1) != begin || (rust_end + 1) != end {
|
||||
Err(format!("Rust bounds ({rust_begin}, {rust_end}) do not match emacs bounds ({emacs_begin}, {emacs_end})", rust_begin = rust_begin + 1, rust_end = rust_end + 1, emacs_begin=begin, emacs_end=end))?;
|
||||
let rust_begin_char_offset = (&source[..rust_begin]).chars().count();
|
||||
let rust_end_char_offset =
|
||||
rust_begin_char_offset + (&source[rust_begin..rust_end]).chars().count();
|
||||
if (rust_begin_char_offset + 1) != begin || (rust_end_char_offset + 1) != end {
|
||||
Err(format!("Rust bounds (in chars) ({rust_begin}, {rust_end}) do not match emacs bounds ({emacs_begin}, {emacs_end})", rust_begin = rust_begin_char_offset + 1, rust_end = rust_end_char_offset + 1, emacs_begin=begin, emacs_end=end))?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -139,3 +143,28 @@ fn maybe_token_to_usize(
|
||||
.flatten() // Outer option is whether or not the param exists, inner option is whether or not it is nil
|
||||
.map_or(Ok(None), |r| r.map(Some))?)
|
||||
}
|
||||
|
||||
/// Get a named property from the emacs token.
|
||||
///
|
||||
/// Returns Ok(None) if value is nil.
|
||||
///
|
||||
/// Returns error if the attribute is not specified on the token at all.
|
||||
pub(crate) fn get_property<'s, 'x>(
|
||||
emacs: &'s Token<'s>,
|
||||
key: &'x str,
|
||||
) -> Result<Option<&'s Token<'s>>, Box<dyn std::error::Error>> {
|
||||
let children = emacs.as_list()?;
|
||||
let attributes_child = children
|
||||
.iter()
|
||||
.nth(1)
|
||||
.ok_or("Should have an attributes child.")?;
|
||||
let attributes_map = attributes_child.as_map()?;
|
||||
let prop = attributes_map
|
||||
.get(key)
|
||||
.ok_or(format!("Missing {} attribute.", key))?;
|
||||
match prop.as_atom() {
|
||||
Ok("nil") => return Ok(None),
|
||||
_ => {}
|
||||
};
|
||||
Ok(Some(*prop))
|
||||
}
|
||||
|
||||
185
src/context/context.rs
Normal file
185
src/context/context.rs
Normal file
@@ -0,0 +1,185 @@
|
||||
use std::marker::PhantomData;
|
||||
|
||||
use nom::combinator::eof;
|
||||
use nom::IResult;
|
||||
|
||||
use super::exiting::ExitClass;
|
||||
use super::global_settings::GlobalSettings;
|
||||
use super::list::List;
|
||||
use super::DynContextMatcher;
|
||||
use super::RefContext;
|
||||
use crate::error::CustomError;
|
||||
use crate::error::MyError;
|
||||
use crate::error::Res;
|
||||
use crate::parser::OrgSource;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) enum ContextElement<'r, 's> {
|
||||
/// Stores a parser that indicates that children should exit upon matching an exit matcher.
|
||||
ExitMatcherNode(ExitMatcherNode<'r>),
|
||||
|
||||
/// Stores the name of the current element to prevent directly nesting elements of the same type.
|
||||
Context(&'r str),
|
||||
|
||||
/// Stores the name of the current object to prevent directly nesting elements of the same type.
|
||||
ContextObject(&'r str),
|
||||
|
||||
/// Indicates if elements should consume the whitespace after them.
|
||||
ConsumeTrailingWhitespace(bool),
|
||||
|
||||
/// This is just here to use the 's lifetime until I'm sure we can eliminate it from ContextElement.
|
||||
#[allow(dead_code)]
|
||||
Placeholder(PhantomData<&'s str>),
|
||||
}
|
||||
|
||||
pub(crate) struct ExitMatcherNode<'r> {
|
||||
// TODO: Should this be "&'r DynContextMatcher<'c>" ?
|
||||
pub(crate) exit_matcher: &'r DynContextMatcher<'r>,
|
||||
pub(crate) class: ExitClass,
|
||||
}
|
||||
|
||||
impl<'r> std::fmt::Debug for ExitMatcherNode<'r> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let mut formatter = f.debug_struct("ExitMatcherNode");
|
||||
formatter.field("class", &self.class.to_string());
|
||||
formatter.finish()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct Context<'g, 'r, 's> {
|
||||
global_settings: &'g GlobalSettings<'g, 's>,
|
||||
tree: List<'r, &'r ContextElement<'r, 's>>,
|
||||
}
|
||||
|
||||
impl<'g, 'r, 's> Context<'g, 'r, 's> {
|
||||
pub(crate) fn new(
|
||||
global_settings: &'g GlobalSettings<'g, 's>,
|
||||
tree: List<'r, &'r ContextElement<'r, 's>>,
|
||||
) -> Self {
|
||||
Self {
|
||||
global_settings,
|
||||
tree,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn with_additional_node(&'r self, new_element: &'r ContextElement<'r, 's>) -> Self {
|
||||
let new_tree = self.tree.push(new_element);
|
||||
Self::new(self.global_settings, new_tree)
|
||||
}
|
||||
|
||||
pub(crate) fn iter(&'r self) -> super::list::Iter<'r, &'r ContextElement<'r, 's>> {
|
||||
self.tree.iter()
|
||||
}
|
||||
|
||||
fn iter_context(&'r self) -> Iter<'g, 'r, 's> {
|
||||
Iter {
|
||||
next: self.tree.iter_list(),
|
||||
global_settings: self.global_settings,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn get_parent(&'r self) -> Option<Self> {
|
||||
self.tree.get_parent().map(|parent_tree| Self {
|
||||
global_settings: self.global_settings,
|
||||
tree: parent_tree.clone(),
|
||||
})
|
||||
}
|
||||
|
||||
fn get_data(&self) -> &ContextElement<'r, 's> {
|
||||
self.tree.get_data()
|
||||
}
|
||||
|
||||
pub(crate) fn get_global_settings(&self) -> &'g GlobalSettings<'g, 's> {
|
||||
self.global_settings
|
||||
}
|
||||
|
||||
pub(crate) fn with_global_settings<'gg>(
|
||||
&self,
|
||||
new_settings: &'gg GlobalSettings<'gg, 's>,
|
||||
) -> Context<'gg, 'r, 's> {
|
||||
Context {
|
||||
global_settings: new_settings,
|
||||
tree: self.tree.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
pub(crate) fn check_exit_matcher(
|
||||
&'r self,
|
||||
i: OrgSource<'s>,
|
||||
) -> IResult<OrgSource<'s>, OrgSource<'s>, CustomError<OrgSource<'s>>> {
|
||||
let mut current_class_filter = ExitClass::Gamma;
|
||||
for current_node in self.iter_context() {
|
||||
let context_element = current_node.get_data();
|
||||
match context_element {
|
||||
ContextElement::ExitMatcherNode(exit_matcher) => {
|
||||
if exit_matcher.class as u32 <= current_class_filter as u32 {
|
||||
current_class_filter = exit_matcher.class;
|
||||
let local_result = (exit_matcher.exit_matcher)(¤t_node, i);
|
||||
if local_result.is_ok() {
|
||||
return local_result;
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
};
|
||||
}
|
||||
// TODO: Make this a specific error instead of just a generic MyError
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"NoExit".into(),
|
||||
))));
|
||||
}
|
||||
|
||||
/// Indicates if elements should consume the whitespace after them.
|
||||
///
|
||||
/// Defaults to true.
|
||||
pub(crate) fn should_consume_trailing_whitespace(&self) -> bool {
|
||||
self._should_consume_trailing_whitespace().unwrap_or(true)
|
||||
}
|
||||
|
||||
fn _should_consume_trailing_whitespace(&self) -> Option<bool> {
|
||||
for current_node in self.iter() {
|
||||
match current_node {
|
||||
ContextElement::ConsumeTrailingWhitespace(should) => {
|
||||
return Some(*should);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
fn document_end<'b, 'g, 'r, 's>(
|
||||
_context: RefContext<'b, 'g, 'r, 's>,
|
||||
input: OrgSource<'s>,
|
||||
) -> Res<OrgSource<'s>, OrgSource<'s>> {
|
||||
eof(input)
|
||||
}
|
||||
|
||||
struct Iter<'g, 'r, 's> {
|
||||
global_settings: &'g GlobalSettings<'g, 's>,
|
||||
next: super::list::IterList<'r, &'r ContextElement<'r, 's>>,
|
||||
}
|
||||
|
||||
impl<'g, 'r, 's> Iterator for Iter<'g, 'r, 's> {
|
||||
type Item = Context<'g, 'r, 's>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let next_tree = self.next.next();
|
||||
let ret =
|
||||
next_tree.map(|parent_tree| Context::new(self.global_settings, parent_tree.clone()));
|
||||
ret
|
||||
}
|
||||
}
|
||||
|
||||
impl<'r, 's> ContextElement<'r, 's> {
|
||||
pub(crate) fn document_context() -> Self {
|
||||
Self::ExitMatcherNode(ExitMatcherNode {
|
||||
exit_matcher: &document_end,
|
||||
class: ExitClass::Document,
|
||||
})
|
||||
}
|
||||
}
|
||||
13
src/context/exiting.rs
Normal file
13
src/context/exiting.rs
Normal file
@@ -0,0 +1,13 @@
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub(crate) enum ExitClass {
|
||||
Document = 1,
|
||||
Alpha = 2,
|
||||
Beta = 3,
|
||||
Gamma = 4,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for ExitClass {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
write!(f, "{:?}", self)
|
||||
}
|
||||
}
|
||||
23
src/context/file_access_interface.rs
Normal file
23
src/context/file_access_interface.rs
Normal file
@@ -0,0 +1,23 @@
|
||||
use std::fmt::Debug;
|
||||
use std::path::PathBuf;
|
||||
|
||||
pub trait FileAccessInterface: Debug {
|
||||
fn read_file(&self, path: &str) -> Result<String, std::io::Error>;
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct LocalFileAccessInterface {
|
||||
pub working_directory: Option<PathBuf>,
|
||||
}
|
||||
|
||||
impl FileAccessInterface for LocalFileAccessInterface {
|
||||
fn read_file(&self, path: &str) -> Result<String, std::io::Error> {
|
||||
let final_path = self
|
||||
.working_directory
|
||||
.as_ref()
|
||||
.map(PathBuf::as_path)
|
||||
.map(|pb| pb.join(path))
|
||||
.unwrap_or_else(|| PathBuf::from(path));
|
||||
Ok(std::fs::read_to_string(final_path)?)
|
||||
}
|
||||
}
|
||||
34
src/context/global_settings.rs
Normal file
34
src/context/global_settings.rs
Normal file
@@ -0,0 +1,34 @@
|
||||
use std::collections::BTreeSet;
|
||||
|
||||
use super::FileAccessInterface;
|
||||
use super::LocalFileAccessInterface;
|
||||
use crate::types::Object;
|
||||
|
||||
// TODO: Ultimately, I think we'll need most of this: https://orgmode.org/manual/In_002dbuffer-Settings.html
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct GlobalSettings<'g, 's> {
|
||||
pub radio_targets: Vec<&'g Vec<Object<'s>>>,
|
||||
pub file_access: &'g dyn FileAccessInterface,
|
||||
pub in_progress_todo_keywords: BTreeSet<String>,
|
||||
pub complete_todo_keywords: BTreeSet<String>,
|
||||
}
|
||||
|
||||
impl<'g, 's> GlobalSettings<'g, 's> {
|
||||
fn new() -> GlobalSettings<'g, 's> {
|
||||
GlobalSettings {
|
||||
radio_targets: Vec::new(),
|
||||
file_access: &LocalFileAccessInterface {
|
||||
working_directory: None,
|
||||
},
|
||||
in_progress_todo_keywords: BTreeSet::new(),
|
||||
complete_todo_keywords: BTreeSet::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'g, 's> Default for GlobalSettings<'g, 's> {
|
||||
fn default() -> GlobalSettings<'g, 's> {
|
||||
GlobalSettings::new()
|
||||
}
|
||||
}
|
||||
69
src/context/list.rs
Normal file
69
src/context/list.rs
Normal file
@@ -0,0 +1,69 @@
|
||||
use std::fmt::Debug;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct List<'parent, T> {
|
||||
data: T,
|
||||
parent: Link<'parent, T>,
|
||||
}
|
||||
|
||||
type Link<'parent, T> = Option<&'parent List<'parent, T>>;
|
||||
|
||||
impl<'parent, T> List<'parent, T> {
|
||||
pub(crate) fn new(first_item: T) -> Self {
|
||||
Self {
|
||||
data: first_item,
|
||||
parent: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn get_data(&self) -> &T {
|
||||
&self.data
|
||||
}
|
||||
|
||||
pub(crate) fn get_parent(&'parent self) -> Link<'parent, T> {
|
||||
self.parent
|
||||
}
|
||||
|
||||
pub(crate) fn iter(&self) -> Iter<'_, T> {
|
||||
Iter { next: Some(self) }
|
||||
}
|
||||
|
||||
pub(crate) fn iter_list(&self) -> IterList<'_, T> {
|
||||
IterList { next: Some(self) }
|
||||
}
|
||||
|
||||
pub(crate) fn push(&'parent self, item: T) -> Self {
|
||||
Self {
|
||||
data: item,
|
||||
parent: Some(self),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct Iter<'a, T> {
|
||||
next: Link<'a, T>,
|
||||
}
|
||||
|
||||
impl<'a, T> Iterator for Iter<'a, T> {
|
||||
type Item = &'a T;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let ret = self.next.map(|link| link.get_data());
|
||||
self.next = self.next.map(|link| link.get_parent()).flatten();
|
||||
ret
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct IterList<'a, T> {
|
||||
next: Link<'a, T>,
|
||||
}
|
||||
|
||||
impl<'a, T> Iterator for IterList<'a, T> {
|
||||
type Item = &'a List<'a, T>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let ret = self.next;
|
||||
self.next = self.next.map(|this| this.get_parent()).flatten();
|
||||
ret
|
||||
}
|
||||
}
|
||||
29
src/context/mod.rs
Normal file
29
src/context/mod.rs
Normal file
@@ -0,0 +1,29 @@
|
||||
use crate::error::Res;
|
||||
use crate::parser::OrgSource;
|
||||
|
||||
mod context;
|
||||
mod exiting;
|
||||
mod file_access_interface;
|
||||
mod global_settings;
|
||||
mod list;
|
||||
mod parser_with_context;
|
||||
|
||||
pub(crate) type RefContext<'b, 'g, 'r, 's> = &'b Context<'g, 'r, 's>;
|
||||
pub(crate) trait ContextMatcher = for<'b, 'g, 'r, 's> Fn(
|
||||
RefContext<'b, 'g, 'r, 's>,
|
||||
OrgSource<'s>,
|
||||
) -> Res<OrgSource<'s>, OrgSource<'s>>;
|
||||
type DynContextMatcher<'c> = dyn ContextMatcher + 'c;
|
||||
pub(crate) trait Matcher = for<'s> Fn(OrgSource<'s>) -> Res<OrgSource<'s>, OrgSource<'s>>;
|
||||
#[allow(dead_code)]
|
||||
type DynMatcher<'c> = dyn Matcher + 'c;
|
||||
|
||||
pub(crate) use context::Context;
|
||||
pub(crate) use context::ContextElement;
|
||||
pub(crate) use context::ExitMatcherNode;
|
||||
pub(crate) use exiting::ExitClass;
|
||||
pub use file_access_interface::FileAccessInterface;
|
||||
pub use file_access_interface::LocalFileAccessInterface;
|
||||
pub use global_settings::GlobalSettings;
|
||||
pub(crate) use list::List;
|
||||
pub(crate) use parser_with_context::parser_with_context;
|
||||
@@ -2,17 +2,18 @@ use nom::error::ErrorKind;
|
||||
use nom::error::ParseError;
|
||||
use nom::IResult;
|
||||
|
||||
pub type Res<T, U> = IResult<T, U, CustomError<T>>;
|
||||
pub(crate) type Res<T, U> = IResult<T, U, CustomError<T>>;
|
||||
|
||||
// TODO: MyError probably shouldn't be based on the same type as the input type since it's used exclusively with static strings right now.
|
||||
#[derive(Debug, PartialEq)]
|
||||
#[derive(Debug)]
|
||||
pub enum CustomError<I> {
|
||||
MyError(MyError<I>),
|
||||
Nom(I, ErrorKind),
|
||||
IO(std::io::Error),
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub struct MyError<I>(pub I);
|
||||
#[derive(Debug)]
|
||||
pub struct MyError<I>(pub(crate) I);
|
||||
|
||||
impl<I> ParseError<I> for CustomError<I> {
|
||||
fn from_error_kind(input: I, kind: ErrorKind) -> Self {
|
||||
@@ -24,3 +25,9 @@ impl<I> ParseError<I> for CustomError<I> {
|
||||
other
|
||||
}
|
||||
}
|
||||
|
||||
impl<I> From<std::io::Error> for CustomError<I> {
|
||||
fn from(value: std::io::Error) -> Self {
|
||||
CustomError::IO(value)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
mod error;
|
||||
pub use error::CustomError;
|
||||
pub use error::MyError;
|
||||
pub use error::Res;
|
||||
pub(crate) use error::CustomError;
|
||||
pub(crate) use error::MyError;
|
||||
pub(crate) use error::Res;
|
||||
|
||||
@@ -10,7 +10,7 @@ const SERVICE_NAME: &'static str = "organic";
|
||||
// Despite the obvious verbosity that fully-qualifying everything causes, in these functions I am fully-qualifying everything relating to tracing. This is because the tracing feature involves multiple libraries working together and so I think it is beneficial to see which libraries contribute which bits.
|
||||
|
||||
#[cfg(feature = "tracing")]
|
||||
pub fn init_telemetry() -> Result<(), Box<dyn std::error::Error>> {
|
||||
pub(crate) fn init_telemetry() -> Result<(), Box<dyn std::error::Error>> {
|
||||
// by default it will hit http://localhost:4317 with a gRPC payload
|
||||
// TODO: I think the endpoint can be controlled by the OTEL_EXPORTER_OTLP_TRACES_ENDPOINT env variable instead of hard-coded into this code base. Regardless, I am the only developer right now so I am not too concerned.
|
||||
let exporter = opentelemetry_otlp::new_exporter()
|
||||
@@ -55,17 +55,17 @@ pub fn init_telemetry() -> Result<(), Box<dyn std::error::Error>> {
|
||||
}
|
||||
|
||||
#[cfg(feature = "tracing")]
|
||||
pub fn shutdown_telemetry() -> Result<(), Box<dyn std::error::Error>> {
|
||||
pub(crate) fn shutdown_telemetry() -> Result<(), Box<dyn std::error::Error>> {
|
||||
opentelemetry::global::shutdown_tracer_provider();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "tracing"))]
|
||||
pub fn init_telemetry() -> Result<(), Box<dyn std::error::Error>> {
|
||||
pub(crate) fn init_telemetry() -> Result<(), Box<dyn std::error::Error>> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "tracing"))]
|
||||
pub fn shutdown_telemetry() -> Result<(), Box<dyn std::error::Error>> {
|
||||
pub(crate) fn shutdown_telemetry() -> Result<(), Box<dyn std::error::Error>> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
18
src/lib.rs
18
src/lib.rs
@@ -1,16 +1,16 @@
|
||||
#![feature(round_char_boundary)]
|
||||
#![feature(exit_status_error)]
|
||||
#![feature(trait_alias)]
|
||||
// TODO: #![warn(missing_docs)]
|
||||
|
||||
#[cfg(feature = "compare")]
|
||||
mod compare;
|
||||
#[cfg(feature = "compare")]
|
||||
pub use compare::compare_document;
|
||||
#[cfg(feature = "compare")]
|
||||
pub use compare::emacs_parse_org_document;
|
||||
#[cfg(feature = "compare")]
|
||||
pub use compare::get_emacs_version;
|
||||
#[cfg(feature = "compare")]
|
||||
pub use compare::get_org_mode_version;
|
||||
pub mod compare;
|
||||
|
||||
mod context;
|
||||
mod error;
|
||||
pub mod parser;
|
||||
pub mod types;
|
||||
|
||||
pub use context::FileAccessInterface;
|
||||
pub use context::GlobalSettings;
|
||||
pub use context::LocalFileAccessInterface;
|
||||
|
||||
84
src/main.rs
84
src/main.rs
@@ -1,17 +1,12 @@
|
||||
#![feature(round_char_boundary)]
|
||||
#![feature(exact_size_is_empty)]
|
||||
use std::io::Read;
|
||||
use std::path::Path;
|
||||
|
||||
use ::organic::parser::document;
|
||||
#[cfg(feature = "compare")]
|
||||
use organic::compare_document;
|
||||
#[cfg(feature = "compare")]
|
||||
use organic::emacs_parse_org_document;
|
||||
#[cfg(feature = "compare")]
|
||||
use organic::get_emacs_version;
|
||||
#[cfg(feature = "compare")]
|
||||
use organic::get_org_mode_version;
|
||||
#[cfg(feature = "compare")]
|
||||
use organic::parser::sexp::sexp_with_padding;
|
||||
use ::organic::parser::parse;
|
||||
use organic::parser::parse_with_settings;
|
||||
use organic::GlobalSettings;
|
||||
use organic::LocalFileAccessInterface;
|
||||
|
||||
#[cfg(feature = "tracing")]
|
||||
use crate::init_tracing::init_telemetry;
|
||||
@@ -39,8 +34,16 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
fn main_body() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let args = std::env::args().skip(1);
|
||||
if args.is_empty() {
|
||||
let org_contents = read_stdin_to_string()?;
|
||||
run_compare(org_contents)
|
||||
run_anonymous_parse(org_contents)
|
||||
} else {
|
||||
for arg in args {
|
||||
run_parse_on_file(arg)?
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn read_stdin_to_string() -> Result<String, Box<dyn std::error::Error>> {
|
||||
@@ -51,41 +54,28 @@ fn read_stdin_to_string() -> Result<String, Box<dyn std::error::Error>> {
|
||||
Ok(stdin_contents)
|
||||
}
|
||||
|
||||
#[cfg(feature = "compare")]
|
||||
fn run_compare<P: AsRef<str>>(org_contents: P) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let emacs_version = get_emacs_version()?;
|
||||
let org_mode_version = get_org_mode_version()?;
|
||||
eprintln!("Using emacs version: {}", emacs_version.trim());
|
||||
eprintln!("Using org-mode version: {}", org_mode_version.trim());
|
||||
let (remaining, rust_parsed) = document(org_contents.as_ref()).map_err(|e| e.to_string())?;
|
||||
let org_sexp = emacs_parse_org_document(org_contents.as_ref())?;
|
||||
let (_remaining, parsed_sexp) =
|
||||
sexp_with_padding(org_sexp.as_str()).map_err(|e| e.to_string())?;
|
||||
|
||||
println!("{}\n\n\n", org_contents.as_ref());
|
||||
println!("{}", org_sexp);
|
||||
println!("{:#?}", rust_parsed);
|
||||
|
||||
// We do the diffing after printing out both parsed forms in case the diffing panics
|
||||
let diff_result = compare_document(&parsed_sexp, &rust_parsed)?;
|
||||
diff_result.print()?;
|
||||
|
||||
if diff_result.is_bad() {
|
||||
Err("Diff results do not match.")?;
|
||||
}
|
||||
if remaining != "" {
|
||||
Err(format!("There was unparsed text remaining: {}", remaining))?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "compare"))]
|
||||
fn run_compare<P: AsRef<str>>(org_contents: P) -> Result<(), Box<dyn std::error::Error>> {
|
||||
eprintln!(
|
||||
"This program was built with compare disabled. Only parsing with organic, not comparing."
|
||||
);
|
||||
let (remaining, rust_parsed) = document(org_contents.as_ref()).map_err(|e| e.to_string())?;
|
||||
fn run_anonymous_parse<P: AsRef<str>>(org_contents: P) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let rust_parsed = parse(org_contents.as_ref())?;
|
||||
println!("{:#?}", rust_parsed);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn run_parse_on_file<P: AsRef<Path>>(org_path: P) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let org_path = org_path.as_ref();
|
||||
let parent_directory = org_path
|
||||
.parent()
|
||||
.ok_or("Should be contained inside a directory.")?;
|
||||
let org_contents = std::fs::read_to_string(org_path)?;
|
||||
let org_contents = org_contents.as_str();
|
||||
let file_access_interface = LocalFileAccessInterface {
|
||||
working_directory: Some(parent_directory.to_path_buf()),
|
||||
};
|
||||
let global_settings = {
|
||||
let mut global_settings = GlobalSettings::default();
|
||||
global_settings.file_access = &file_access_interface;
|
||||
global_settings
|
||||
};
|
||||
let rust_parsed = parse_with_settings(org_contents, &global_settings)?;
|
||||
println!("{:#?}", rust_parsed);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -5,20 +5,21 @@ use nom::combinator::recognize;
|
||||
use nom::multi::many_till;
|
||||
|
||||
use super::org_source::OrgSource;
|
||||
use super::Context;
|
||||
use super::util::maybe_consume_object_trailing_whitespace_if_not_exiting;
|
||||
use crate::context::parser_with_context;
|
||||
use crate::context::ContextElement;
|
||||
use crate::context::ExitClass;
|
||||
use crate::context::ExitMatcherNode;
|
||||
use crate::context::RefContext;
|
||||
use crate::error::Res;
|
||||
use crate::parser::exiting::ExitClass;
|
||||
use crate::parser::parser_context::ContextElement;
|
||||
use crate::parser::parser_context::ExitMatcherNode;
|
||||
use crate::parser::parser_with_context::parser_with_context;
|
||||
use crate::parser::plain_link::protocol;
|
||||
use crate::parser::util::exit_matcher_parser;
|
||||
use crate::parser::util::get_consumed;
|
||||
use crate::parser::AngleLink;
|
||||
use crate::types::AngleLink;
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
pub fn angle_link<'r, 's>(
|
||||
context: Context<'r, 's>,
|
||||
pub(crate) fn angle_link<'b, 'g, 'r, 's>(
|
||||
context: RefContext<'b, 'g, 'r, 's>,
|
||||
input: OrgSource<'s>,
|
||||
) -> Res<OrgSource<'s>, AngleLink<'s>> {
|
||||
let (remaining, _) = tag("<")(input)?;
|
||||
@@ -26,6 +27,8 @@ pub fn angle_link<'r, 's>(
|
||||
let (remaining, _separator) = tag(":")(remaining)?;
|
||||
let (remaining, path) = path_angle(context, remaining)?;
|
||||
let (remaining, _) = tag(">")(remaining)?;
|
||||
let (remaining, _trailing_whitespace) =
|
||||
maybe_consume_object_trailing_whitespace_if_not_exiting(context, remaining)?;
|
||||
let source = get_consumed(input, remaining);
|
||||
Ok((
|
||||
remaining,
|
||||
@@ -38,15 +41,15 @@ pub fn angle_link<'r, 's>(
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
fn path_angle<'r, 's>(
|
||||
context: Context<'r, 's>,
|
||||
fn path_angle<'b, 'g, 'r, 's>(
|
||||
context: RefContext<'b, 'g, 'r, 's>,
|
||||
input: OrgSource<'s>,
|
||||
) -> Res<OrgSource<'s>, OrgSource<'s>> {
|
||||
let parser_context =
|
||||
context.with_additional_node(ContextElement::ExitMatcherNode(ExitMatcherNode {
|
||||
let parser_context = ContextElement::ExitMatcherNode(ExitMatcherNode {
|
||||
class: ExitClass::Gamma,
|
||||
exit_matcher: &path_angle_end,
|
||||
}));
|
||||
});
|
||||
let parser_context = context.with_additional_node(&parser_context);
|
||||
|
||||
let exit_matcher = parser_with_context!(exit_matcher_parser)(&parser_context);
|
||||
|
||||
@@ -55,8 +58,8 @@ fn path_angle<'r, 's>(
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
fn path_angle_end<'r, 's>(
|
||||
_context: Context<'r, 's>,
|
||||
fn path_angle_end<'b, 'g, 'r, 's>(
|
||||
_context: RefContext<'b, 'g, 'r, 's>,
|
||||
input: OrgSource<'s>,
|
||||
) -> Res<OrgSource<'s>, OrgSource<'s>> {
|
||||
tag(">")(input)
|
||||
|
||||
@@ -2,7 +2,6 @@ use nom::branch::alt;
|
||||
use nom::bytes::complete::tag;
|
||||
use nom::bytes::complete::tag_no_case;
|
||||
use nom::character::complete::anychar;
|
||||
use nom::character::complete::space0;
|
||||
use nom::combinator::opt;
|
||||
use nom::combinator::recognize;
|
||||
use nom::combinator::verify;
|
||||
@@ -11,42 +10,47 @@ use nom::multi::many_till;
|
||||
use nom::multi::separated_list1;
|
||||
use nom::sequence::tuple;
|
||||
|
||||
use super::citation_reference::must_balance_bracket;
|
||||
use super::org_source::BracketDepth;
|
||||
use super::org_source::OrgSource;
|
||||
use super::Context;
|
||||
use super::util::maybe_consume_object_trailing_whitespace_if_not_exiting;
|
||||
use crate::context::parser_with_context;
|
||||
use crate::context::ContextElement;
|
||||
use crate::context::ContextMatcher;
|
||||
use crate::context::ExitClass;
|
||||
use crate::context::ExitMatcherNode;
|
||||
use crate::context::RefContext;
|
||||
use crate::error::CustomError;
|
||||
use crate::error::Res;
|
||||
use crate::parser::citation_reference::citation_reference;
|
||||
use crate::parser::citation_reference::citation_reference_key;
|
||||
use crate::parser::citation_reference::get_bracket_depth;
|
||||
use crate::parser::exiting::ExitClass;
|
||||
use crate::parser::object::Citation;
|
||||
use crate::parser::object_parser::standard_set_object;
|
||||
use crate::parser::parser_context::CitationBracket;
|
||||
use crate::parser::parser_context::ContextElement;
|
||||
use crate::parser::parser_context::ExitMatcherNode;
|
||||
use crate::parser::parser_with_context::parser_with_context;
|
||||
use crate::parser::util::exit_matcher_parser;
|
||||
use crate::parser::util::get_consumed;
|
||||
use crate::parser::Object;
|
||||
use crate::types::Citation;
|
||||
use crate::types::Object;
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
pub fn citation<'r, 's>(
|
||||
context: Context<'r, 's>,
|
||||
pub(crate) fn citation<'b, 'g, 'r, 's>(
|
||||
context: RefContext<'b, 'g, 'r, 's>,
|
||||
input: OrgSource<'s>,
|
||||
) -> Res<OrgSource<'s>, Citation<'s>> {
|
||||
// TODO: Despite being a standard object, citations cannot exist inside the global prefix/suffix for other citations because citations must contain something that matches @key which is forbidden inside the global prefix/suffix. This TODO is to evaluate if its worth putting in an explicit check for this (which can be easily accomplished by checking the output of `get_bracket_depth()`). I suspect its not worth it because I expect, outside of intentionally crafted inputs, this parser will exit immediately inside a citation since it is unlikely to find the "[cite" substring inside a citation global prefix/suffix.
|
||||
let (remaining, _) = tag_no_case("[cite")(input)?;
|
||||
let (remaining, _) = opt(citestyle)(remaining)?;
|
||||
let (remaining, _) = tag(":")(remaining)?;
|
||||
let (remaining, _prefix) = opt(parser_with_context!(global_prefix)(context))(remaining)?;
|
||||
let (remaining, _prefix) =
|
||||
must_balance_bracket(opt(parser_with_context!(global_prefix)(context)))(remaining)?;
|
||||
|
||||
let (remaining, _references) =
|
||||
separated_list1(tag(";"), parser_with_context!(citation_reference)(context))(remaining)?;
|
||||
let (remaining, _suffix) = opt(tuple((
|
||||
let (remaining, _suffix) = must_balance_bracket(opt(tuple((
|
||||
tag(";"),
|
||||
parser_with_context!(global_suffix)(context),
|
||||
)))(remaining)?;
|
||||
))))(remaining)?;
|
||||
let (remaining, _) = tag("]")(remaining)?;
|
||||
let (remaining, _) = space0(remaining)?;
|
||||
let (remaining, _trailing_whitespace) =
|
||||
maybe_consume_object_trailing_whitespace_if_not_exiting(context, remaining)?;
|
||||
let source = get_consumed(input, remaining);
|
||||
Ok((
|
||||
remaining,
|
||||
@@ -57,7 +61,7 @@ pub fn citation<'r, 's>(
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
fn citestyle<'r, 's>(input: OrgSource<'s>) -> Res<OrgSource<'s>, OrgSource<'s>> {
|
||||
fn citestyle<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, OrgSource<'s>> {
|
||||
let (remaining, _) = tuple((tag("/"), style))(input)?;
|
||||
let (remaining, _) = opt(tuple((tag("/"), variant)))(remaining)?;
|
||||
let source = get_consumed(input, remaining);
|
||||
@@ -65,34 +69,30 @@ fn citestyle<'r, 's>(input: OrgSource<'s>) -> Res<OrgSource<'s>, OrgSource<'s>>
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
fn style<'r, 's>(input: OrgSource<'s>) -> Res<OrgSource<'s>, OrgSource<'s>> {
|
||||
fn style<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, OrgSource<'s>> {
|
||||
recognize(many1(verify(anychar, |c| {
|
||||
c.is_alphanumeric() || "_-".contains(*c)
|
||||
})))(input)
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
fn variant<'r, 's>(input: OrgSource<'s>) -> Res<OrgSource<'s>, OrgSource<'s>> {
|
||||
fn variant<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, OrgSource<'s>> {
|
||||
recognize(many1(verify(anychar, |c| {
|
||||
c.is_alphanumeric() || "_-/".contains(*c)
|
||||
})))(input)
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
fn global_prefix<'r, 's>(
|
||||
context: Context<'r, 's>,
|
||||
fn global_prefix<'b, 'g, 'r, 's>(
|
||||
context: RefContext<'b, 'g, 'r, 's>,
|
||||
input: OrgSource<'s>,
|
||||
) -> Res<OrgSource<'s>, Vec<Object<'s>>> {
|
||||
// TODO: I could insert CitationBracket entries in the context after each matched object to reduce the scanning done for counting brackets which should be more efficient.
|
||||
let parser_context = context
|
||||
.with_additional_node(ContextElement::CitationBracket(CitationBracket {
|
||||
position: input,
|
||||
depth: 0,
|
||||
}))
|
||||
.with_additional_node(ContextElement::ExitMatcherNode(ExitMatcherNode {
|
||||
let exit_with_depth = global_prefix_end(input.get_bracket_depth());
|
||||
let parser_context = ContextElement::ExitMatcherNode(ExitMatcherNode {
|
||||
class: ExitClass::Gamma,
|
||||
exit_matcher: &global_prefix_end,
|
||||
}));
|
||||
exit_matcher: &exit_with_depth,
|
||||
});
|
||||
let parser_context = context.with_additional_node(&parser_context);
|
||||
let (remaining, (children, _exit_contents)) = verify(
|
||||
many_till(
|
||||
parser_with_context!(standard_set_object)(&parser_context),
|
||||
@@ -104,28 +104,20 @@ fn global_prefix<'r, 's>(
|
||||
Ok((remaining, children))
|
||||
}
|
||||
|
||||
fn global_prefix_end(starting_bracket_depth: BracketDepth) -> impl ContextMatcher {
|
||||
move |context, input: OrgSource<'_>| _global_prefix_end(context, input, starting_bracket_depth)
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
fn global_prefix_end<'r, 's>(
|
||||
context: Context<'r, 's>,
|
||||
fn _global_prefix_end<'b, 'g, 'r, 's>(
|
||||
context: RefContext<'b, 'g, 'r, 's>,
|
||||
input: OrgSource<'s>,
|
||||
starting_bracket_depth: BracketDepth,
|
||||
) -> Res<OrgSource<'s>, OrgSource<'s>> {
|
||||
let context_depth = get_bracket_depth(context)
|
||||
.expect("This function should only be called from inside a citation.");
|
||||
let text_since_context_entry = get_consumed(context_depth.position, input);
|
||||
let mut current_depth = context_depth.depth;
|
||||
for c in Into::<&str>::into(text_since_context_entry).chars() {
|
||||
match c {
|
||||
'[' => {
|
||||
current_depth += 1;
|
||||
}
|
||||
']' if current_depth == 0 => {
|
||||
panic!("Exceeded citation global prefix bracket depth.")
|
||||
}
|
||||
']' if current_depth > 0 => {
|
||||
current_depth -= 1;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
let current_depth = input.get_bracket_depth() - starting_bracket_depth;
|
||||
if current_depth < 0 {
|
||||
// This shouldn't be possible because if depth is 0 then a closing bracket should end the citation.
|
||||
unreachable!("Exceeded citation global prefix bracket depth.")
|
||||
}
|
||||
if current_depth == 0 {
|
||||
let close_bracket = tag::<&str, OrgSource<'_>, CustomError<OrgSource<'_>>>("]")(input);
|
||||
@@ -140,20 +132,16 @@ fn global_prefix_end<'r, 's>(
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
fn global_suffix<'r, 's>(
|
||||
context: Context<'r, 's>,
|
||||
fn global_suffix<'b, 'g, 'r, 's>(
|
||||
context: RefContext<'b, 'g, 'r, 's>,
|
||||
input: OrgSource<'s>,
|
||||
) -> Res<OrgSource<'s>, Vec<Object<'s>>> {
|
||||
// TODO: I could insert CitationBracket entries in the context after each matched object to reduce the scanning done for counting brackets which should be more efficient.
|
||||
let parser_context = context
|
||||
.with_additional_node(ContextElement::CitationBracket(CitationBracket {
|
||||
position: input,
|
||||
depth: 0,
|
||||
}))
|
||||
.with_additional_node(ContextElement::ExitMatcherNode(ExitMatcherNode {
|
||||
let exit_with_depth = global_suffix_end(input.get_bracket_depth());
|
||||
let parser_context = ContextElement::ExitMatcherNode(ExitMatcherNode {
|
||||
class: ExitClass::Gamma,
|
||||
exit_matcher: &global_suffix_end,
|
||||
}));
|
||||
exit_matcher: &exit_with_depth,
|
||||
});
|
||||
let parser_context = context.with_additional_node(&parser_context);
|
||||
let (remaining, (children, _exit_contents)) = verify(
|
||||
many_till(
|
||||
parser_with_context!(standard_set_object)(&parser_context),
|
||||
@@ -164,28 +152,20 @@ fn global_suffix<'r, 's>(
|
||||
Ok((remaining, children))
|
||||
}
|
||||
|
||||
fn global_suffix_end(starting_bracket_depth: BracketDepth) -> impl ContextMatcher {
|
||||
move |context, input: OrgSource<'_>| _global_suffix_end(context, input, starting_bracket_depth)
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
fn global_suffix_end<'r, 's>(
|
||||
context: Context<'r, 's>,
|
||||
fn _global_suffix_end<'b, 'g, 'r, 's>(
|
||||
context: RefContext<'b, 'g, 'r, 's>,
|
||||
input: OrgSource<'s>,
|
||||
starting_bracket_depth: BracketDepth,
|
||||
) -> Res<OrgSource<'s>, OrgSource<'s>> {
|
||||
let context_depth = get_bracket_depth(context)
|
||||
.expect("This function should only be called from inside a citation.");
|
||||
let text_since_context_entry = get_consumed(context_depth.position, input);
|
||||
let mut current_depth = context_depth.depth;
|
||||
for c in Into::<&str>::into(text_since_context_entry).chars() {
|
||||
match c {
|
||||
'[' => {
|
||||
current_depth += 1;
|
||||
}
|
||||
']' if current_depth == 0 => {
|
||||
panic!("Exceeded citation global suffix bracket depth.")
|
||||
}
|
||||
']' if current_depth > 0 => {
|
||||
current_depth -= 1;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
let current_depth = input.get_bracket_depth() - starting_bracket_depth;
|
||||
if current_depth < 0 {
|
||||
// This shouldn't be possible because if depth is 0 then a closing bracket should end the citation.
|
||||
unreachable!("Exceeded citation global suffix bracket depth.")
|
||||
}
|
||||
if current_depth == 0 {
|
||||
let close_bracket = tag::<&str, OrgSource<'_>, CustomError<OrgSource<'_>>>("]")(input);
|
||||
@@ -202,19 +182,23 @@ fn global_suffix_end<'r, 's>(
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::context::Context;
|
||||
use crate::context::GlobalSettings;
|
||||
use crate::context::List;
|
||||
use crate::parser::element_parser::element;
|
||||
use crate::parser::parser_context::ContextTree;
|
||||
use crate::parser::parser_with_context::parser_with_context;
|
||||
use crate::parser::source::Source;
|
||||
use crate::types::Element;
|
||||
use crate::types::Source;
|
||||
|
||||
#[test]
|
||||
fn citation_simple() {
|
||||
let input = OrgSource::new("[cite:@foo]");
|
||||
let initial_context: ContextTree<'_, '_> = ContextTree::new();
|
||||
let global_settings = GlobalSettings::default();
|
||||
let initial_context = ContextElement::document_context();
|
||||
let initial_context = Context::new(&global_settings, List::new(&initial_context));
|
||||
let paragraph_matcher = parser_with_context!(element(true))(&initial_context);
|
||||
let (remaining, first_paragraph) = paragraph_matcher(input).expect("Parse first paragraph");
|
||||
let first_paragraph = match first_paragraph {
|
||||
crate::parser::Element::Paragraph(paragraph) => paragraph,
|
||||
Element::Paragraph(paragraph) => paragraph,
|
||||
_ => panic!("Should be a paragraph!"),
|
||||
};
|
||||
assert_eq!(Into::<&str>::into(remaining), "");
|
||||
|
||||
@@ -10,30 +10,34 @@ use nom::multi::many_till;
|
||||
use nom::sequence::preceded;
|
||||
use nom::sequence::tuple;
|
||||
|
||||
use super::org_source::BracketDepth;
|
||||
use super::org_source::OrgSource;
|
||||
use super::Context;
|
||||
use crate::context::parser_with_context;
|
||||
use crate::context::ContextElement;
|
||||
use crate::context::ContextMatcher;
|
||||
use crate::context::ExitClass;
|
||||
use crate::context::ExitMatcherNode;
|
||||
use crate::context::RefContext;
|
||||
use crate::error::CustomError;
|
||||
use crate::error::MyError;
|
||||
use crate::error::Res;
|
||||
use crate::parser::exiting::ExitClass;
|
||||
use crate::parser::object::CitationReference;
|
||||
use crate::parser::object_parser::minimal_set_object;
|
||||
use crate::parser::parser_context::CitationBracket;
|
||||
use crate::parser::parser_context::ContextElement;
|
||||
use crate::parser::parser_context::ExitMatcherNode;
|
||||
use crate::parser::parser_with_context::parser_with_context;
|
||||
use crate::parser::util::exit_matcher_parser;
|
||||
use crate::parser::util::get_consumed;
|
||||
use crate::parser::util::WORD_CONSTITUENT_CHARACTERS;
|
||||
use crate::parser::Object;
|
||||
use crate::types::CitationReference;
|
||||
use crate::types::Object;
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
pub fn citation_reference<'r, 's>(
|
||||
context: Context<'r, 's>,
|
||||
pub(crate) fn citation_reference<'b, 'g, 'r, 's>(
|
||||
context: RefContext<'b, 'g, 'r, 's>,
|
||||
input: OrgSource<'s>,
|
||||
) -> Res<OrgSource<'s>, CitationReference<'s>> {
|
||||
let (remaining, _prefix) = opt(parser_with_context!(key_prefix)(context))(input)?;
|
||||
let (remaining, _prefix) =
|
||||
must_balance_bracket(opt(parser_with_context!(key_prefix)(context)))(input)?;
|
||||
let (remaining, _key) = parser_with_context!(citation_reference_key)(context)(remaining)?;
|
||||
let (remaining, _suffix) = opt(parser_with_context!(key_suffix)(context))(remaining)?;
|
||||
let (remaining, _suffix) =
|
||||
must_balance_bracket(opt(parser_with_context!(key_suffix)(context)))(remaining)?;
|
||||
let source = get_consumed(input, remaining);
|
||||
|
||||
Ok((
|
||||
@@ -45,8 +49,8 @@ pub fn citation_reference<'r, 's>(
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
pub fn citation_reference_key<'r, 's>(
|
||||
context: Context<'r, 's>,
|
||||
pub(crate) fn citation_reference_key<'b, 'g, 'r, 's>(
|
||||
context: RefContext<'b, 'g, 'r, 's>,
|
||||
input: OrgSource<'s>,
|
||||
) -> Res<OrgSource<'s>, OrgSource<'s>> {
|
||||
let (remaining, source) = recognize(tuple((
|
||||
@@ -65,20 +69,16 @@ pub fn citation_reference_key<'r, 's>(
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
fn key_prefix<'r, 's>(
|
||||
context: Context<'r, 's>,
|
||||
fn key_prefix<'b, 'g, 'r, 's>(
|
||||
context: RefContext<'b, 'g, 'r, 's>,
|
||||
input: OrgSource<'s>,
|
||||
) -> Res<OrgSource<'s>, Vec<Object<'s>>> {
|
||||
// TODO: I could insert CitationBracket entries in the context after each matched object to reduce the scanning done for counting brackets which should be more efficient.
|
||||
let parser_context = context
|
||||
.with_additional_node(ContextElement::CitationBracket(CitationBracket {
|
||||
position: input,
|
||||
depth: 0,
|
||||
}))
|
||||
.with_additional_node(ContextElement::ExitMatcherNode(ExitMatcherNode {
|
||||
let exit_with_depth = key_prefix_end(input.get_bracket_depth());
|
||||
let parser_context = ContextElement::ExitMatcherNode(ExitMatcherNode {
|
||||
class: ExitClass::Gamma,
|
||||
exit_matcher: &key_prefix_end,
|
||||
}));
|
||||
exit_matcher: &exit_with_depth,
|
||||
});
|
||||
let parser_context = context.with_additional_node(&parser_context);
|
||||
let (remaining, (children, _exit_contents)) = verify(
|
||||
many_till(
|
||||
parser_with_context!(minimal_set_object)(&parser_context),
|
||||
@@ -90,20 +90,16 @@ fn key_prefix<'r, 's>(
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
fn key_suffix<'r, 's>(
|
||||
context: Context<'r, 's>,
|
||||
fn key_suffix<'b, 'g, 'r, 's>(
|
||||
context: RefContext<'b, 'g, 'r, 's>,
|
||||
input: OrgSource<'s>,
|
||||
) -> Res<OrgSource<'s>, Vec<Object<'s>>> {
|
||||
// TODO: I could insert CitationBracket entries in the context after each matched object to reduce the scanning done for counting brackets which should be more efficient.
|
||||
let parser_context = context
|
||||
.with_additional_node(ContextElement::CitationBracket(CitationBracket {
|
||||
position: input,
|
||||
depth: 0,
|
||||
}))
|
||||
.with_additional_node(ContextElement::ExitMatcherNode(ExitMatcherNode {
|
||||
let exit_with_depth = key_suffix_end(input.get_bracket_depth());
|
||||
let parser_context = ContextElement::ExitMatcherNode(ExitMatcherNode {
|
||||
class: ExitClass::Gamma,
|
||||
exit_matcher: &key_suffix_end,
|
||||
}));
|
||||
exit_matcher: &exit_with_depth,
|
||||
});
|
||||
let parser_context = context.with_additional_node(&parser_context);
|
||||
let (remaining, (children, _exit_contents)) = verify(
|
||||
many_till(
|
||||
parser_with_context!(minimal_set_object)(&parser_context),
|
||||
@@ -114,39 +110,20 @@ fn key_suffix<'r, 's>(
|
||||
Ok((remaining, children))
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
pub fn get_bracket_depth<'r, 's>(context: Context<'r, 's>) -> Option<&'r CitationBracket<'s>> {
|
||||
for node in context.iter() {
|
||||
match node.get_data() {
|
||||
ContextElement::CitationBracket(depth) => return Some(depth),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
None
|
||||
fn key_prefix_end(starting_bracket_depth: BracketDepth) -> impl ContextMatcher {
|
||||
move |context, input: OrgSource<'_>| _key_prefix_end(context, input, starting_bracket_depth)
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
fn key_prefix_end<'r, 's>(
|
||||
context: Context<'r, 's>,
|
||||
fn _key_prefix_end<'b, 'g, 'r, 's>(
|
||||
context: RefContext<'b, 'g, 'r, 's>,
|
||||
input: OrgSource<'s>,
|
||||
starting_bracket_depth: BracketDepth,
|
||||
) -> Res<OrgSource<'s>, OrgSource<'s>> {
|
||||
let context_depth = get_bracket_depth(context)
|
||||
.expect("This function should only be called from inside a citation reference.");
|
||||
let text_since_context_entry = get_consumed(context_depth.position, input);
|
||||
let mut current_depth = context_depth.depth;
|
||||
for c in Into::<&str>::into(text_since_context_entry).chars() {
|
||||
match c {
|
||||
'[' => {
|
||||
current_depth += 1;
|
||||
}
|
||||
']' if current_depth == 0 => {
|
||||
panic!("Exceeded citation reference key prefix bracket depth.")
|
||||
}
|
||||
']' if current_depth > 0 => {
|
||||
current_depth -= 1;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
let current_depth = input.get_bracket_depth() - starting_bracket_depth;
|
||||
if current_depth < 0 {
|
||||
// This shouldn't be possible because if depth is 0 then a closing bracket should end the citation.
|
||||
unreachable!("Exceeded citation key prefix bracket depth.")
|
||||
}
|
||||
if current_depth == 0 {
|
||||
let close_bracket = tag::<&str, OrgSource<'_>, CustomError<OrgSource<'_>>>("]")(input);
|
||||
@@ -160,28 +137,20 @@ fn key_prefix_end<'r, 's>(
|
||||
))(input)
|
||||
}
|
||||
|
||||
fn key_suffix_end(starting_bracket_depth: BracketDepth) -> impl ContextMatcher {
|
||||
move |context, input: OrgSource<'_>| _key_suffix_end(context, input, starting_bracket_depth)
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
fn key_suffix_end<'r, 's>(
|
||||
context: Context<'r, 's>,
|
||||
fn _key_suffix_end<'b, 'g, 'r, 's>(
|
||||
_context: RefContext<'b, 'g, 'r, 's>,
|
||||
input: OrgSource<'s>,
|
||||
starting_bracket_depth: BracketDepth,
|
||||
) -> Res<OrgSource<'s>, OrgSource<'s>> {
|
||||
let context_depth = get_bracket_depth(context)
|
||||
.expect("This function should only be called from inside a citation reference.");
|
||||
let text_since_context_entry = get_consumed(context_depth.position, input);
|
||||
let mut current_depth = context_depth.depth;
|
||||
for c in Into::<&str>::into(text_since_context_entry).chars() {
|
||||
match c {
|
||||
'[' => {
|
||||
current_depth += 1;
|
||||
}
|
||||
']' if current_depth == 0 => {
|
||||
panic!("Exceeded citation reference key prefix bracket depth.")
|
||||
}
|
||||
']' if current_depth > 0 => {
|
||||
current_depth -= 1;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
let current_depth = input.get_bracket_depth() - starting_bracket_depth;
|
||||
if current_depth < 0 {
|
||||
// This shouldn't be possible because if depth is 0 then a closing bracket should end the citation.
|
||||
unreachable!("Exceeded citation key suffix bracket depth.")
|
||||
}
|
||||
if current_depth == 0 {
|
||||
let close_bracket = tag::<&str, OrgSource<'_>, CustomError<OrgSource<'_>>>("]")(input);
|
||||
@@ -191,3 +160,21 @@ fn key_suffix_end<'r, 's>(
|
||||
}
|
||||
tag(";")(input)
|
||||
}
|
||||
|
||||
pub(crate) fn must_balance_bracket<'s, F, O>(
|
||||
mut inner: F,
|
||||
) -> impl FnMut(OrgSource<'s>) -> Res<OrgSource<'s>, O>
|
||||
where
|
||||
F: FnMut(OrgSource<'s>) -> Res<OrgSource<'s>, O>,
|
||||
{
|
||||
move |input: OrgSource<'_>| {
|
||||
let pre_bracket_depth = input.get_bracket_depth();
|
||||
let (remaining, output) = inner(input)?;
|
||||
if remaining.get_bracket_depth() - pre_bracket_depth != 0 {
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"UnbalancedBrackets".into(),
|
||||
))));
|
||||
}
|
||||
Ok((remaining, output))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -12,16 +12,16 @@ use nom::combinator::verify;
|
||||
use nom::sequence::tuple;
|
||||
|
||||
use super::org_source::OrgSource;
|
||||
use super::Context;
|
||||
use crate::context::parser_with_context;
|
||||
use crate::context::RefContext;
|
||||
use crate::error::Res;
|
||||
use crate::parser::parser_with_context::parser_with_context;
|
||||
use crate::parser::util::get_consumed;
|
||||
use crate::parser::util::start_of_line;
|
||||
use crate::parser::Clock;
|
||||
use crate::types::Clock;
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
pub fn clock<'r, 's>(
|
||||
context: Context<'r, 's>,
|
||||
pub(crate) fn clock<'b, 'g, 'r, 's>(
|
||||
context: RefContext<'b, 'g, 'r, 's>,
|
||||
input: OrgSource<'s>,
|
||||
) -> Res<OrgSource<'s>, Clock<'s>> {
|
||||
start_of_line(input)?;
|
||||
@@ -44,8 +44,8 @@ pub fn clock<'r, 's>(
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
fn inactive_timestamp_range_duration<'r, 's>(
|
||||
_context: Context<'r, 's>,
|
||||
fn inactive_timestamp_range_duration<'b, 'g, 'r, 's>(
|
||||
_context: RefContext<'b, 'g, 'r, 's>,
|
||||
input: OrgSource<'s>,
|
||||
) -> Res<OrgSource<'s>, OrgSource<'s>> {
|
||||
recognize(tuple((
|
||||
@@ -66,8 +66,8 @@ fn inactive_timestamp_range_duration<'r, 's>(
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
fn inactive_timestamp<'r, 's>(
|
||||
_context: Context<'r, 's>,
|
||||
fn inactive_timestamp<'b, 'g, 'r, 's>(
|
||||
_context: RefContext<'b, 'g, 'r, 's>,
|
||||
input: OrgSource<'s>,
|
||||
) -> Res<OrgSource<'s>, OrgSource<'s>> {
|
||||
recognize(tuple((
|
||||
|
||||
@@ -13,20 +13,20 @@ use nom::sequence::tuple;
|
||||
|
||||
use super::org_source::OrgSource;
|
||||
use super::util::get_consumed;
|
||||
use super::Context;
|
||||
use crate::context::parser_with_context;
|
||||
use crate::context::ContextElement;
|
||||
use crate::context::RefContext;
|
||||
use crate::error::CustomError;
|
||||
use crate::error::MyError;
|
||||
use crate::error::Res;
|
||||
use crate::parser::parser_context::ContextElement;
|
||||
use crate::parser::parser_with_context::parser_with_context;
|
||||
use crate::parser::util::exit_matcher_parser;
|
||||
use crate::parser::util::immediate_in_section;
|
||||
use crate::parser::util::start_of_line;
|
||||
use crate::parser::Comment;
|
||||
use crate::types::Comment;
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
pub fn comment<'r, 's>(
|
||||
context: Context<'r, 's>,
|
||||
pub(crate) fn comment<'b, 'g, 'r, 's>(
|
||||
context: RefContext<'b, 'g, 'r, 's>,
|
||||
input: OrgSource<'s>,
|
||||
) -> Res<OrgSource<'s>, Comment<'s>> {
|
||||
if immediate_in_section(context, "comment") {
|
||||
@@ -34,7 +34,8 @@ pub fn comment<'r, 's>(
|
||||
"Cannot nest objects of the same element".into(),
|
||||
))));
|
||||
}
|
||||
let parser_context = context.with_additional_node(ContextElement::Context("comment"));
|
||||
let parser_context = ContextElement::Context("comment");
|
||||
let parser_context = context.with_additional_node(&parser_context);
|
||||
let comment_line_matcher = parser_with_context!(comment_line)(&parser_context);
|
||||
let exit_matcher = parser_with_context!(exit_matcher_parser)(&parser_context);
|
||||
let (remaining, _first_line) = comment_line_matcher(input)?;
|
||||
@@ -51,8 +52,8 @@ pub fn comment<'r, 's>(
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
fn comment_line<'r, 's>(
|
||||
_context: Context<'r, 's>,
|
||||
fn comment_line<'b, 'g, 'r, 's>(
|
||||
_context: RefContext<'b, 'g, 'r, 's>,
|
||||
input: OrgSource<'s>,
|
||||
) -> Res<OrgSource<'s>, OrgSource<'s>> {
|
||||
start_of_line(input)?;
|
||||
@@ -66,11 +67,24 @@ fn comment_line<'r, 's>(
|
||||
Ok((remaining, source))
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
pub(crate) fn detect_comment<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, ()> {
|
||||
tuple((
|
||||
start_of_line,
|
||||
space0,
|
||||
tag("#"),
|
||||
alt((tag(" "), line_ending, eof)),
|
||||
))(input)?;
|
||||
Ok((input, ()))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::parser::parser_context::ContextTree;
|
||||
use crate::parser::parser_with_context::parser_with_context;
|
||||
use crate::context::Context;
|
||||
use crate::context::ContextElement;
|
||||
use crate::context::GlobalSettings;
|
||||
use crate::context::List;
|
||||
|
||||
#[test]
|
||||
fn require_space_after_hash() {
|
||||
@@ -79,7 +93,9 @@ mod tests {
|
||||
#not a comment
|
||||
# Comment again",
|
||||
);
|
||||
let initial_context: ContextTree<'_, '_> = ContextTree::new();
|
||||
let global_settings = GlobalSettings::default();
|
||||
let initial_context = ContextElement::document_context();
|
||||
let initial_context = Context::new(&global_settings, List::new(&initial_context));
|
||||
let comment_matcher = parser_with_context!(comment)(&initial_context);
|
||||
let (remaining, first_comment) = comment_matcher(input).expect("Parse first comment");
|
||||
assert_eq!(
|
||||
|
||||
@@ -1,31 +1,26 @@
|
||||
use nom::branch::alt;
|
||||
use nom::bytes::complete::is_not;
|
||||
use nom::bytes::complete::tag;
|
||||
use nom::character::complete::line_ending;
|
||||
use nom::character::complete::space0;
|
||||
use nom::combinator::eof;
|
||||
use nom::combinator::recognize;
|
||||
use nom::sequence::tuple;
|
||||
|
||||
use super::org_source::OrgSource;
|
||||
use super::sexp::sexp;
|
||||
use super::Context;
|
||||
use crate::context::RefContext;
|
||||
use crate::error::Res;
|
||||
use crate::parser::util::get_consumed;
|
||||
use crate::parser::util::start_of_line;
|
||||
use crate::parser::DiarySexp;
|
||||
use crate::types::DiarySexp;
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
pub fn diary_sexp<'r, 's>(
|
||||
_context: Context<'r, 's>,
|
||||
pub(crate) fn diary_sexp<'b, 'g, 'r, 's>(
|
||||
_context: RefContext<'b, 'g, 'r, 's>,
|
||||
input: OrgSource<'s>,
|
||||
) -> Res<OrgSource<'s>, DiarySexp<'s>> {
|
||||
start_of_line(input)?;
|
||||
let (remaining, _leading_whitespace) = space0(input)?;
|
||||
let (remaining, _clock) = tag("%%")(remaining)?;
|
||||
let (remaining, _gap_whitespace) = space0(remaining)?;
|
||||
let (remaining, _sexp) = recognize(sexp)(remaining)?;
|
||||
let (remaining, _trailing_whitespace) =
|
||||
recognize(tuple((space0, alt((line_ending, eof)))))(remaining)?;
|
||||
let (remaining, _clock) = tag("%%(")(input)?;
|
||||
let (remaining, _contents) = is_not("\r\n")(remaining)?;
|
||||
let (remaining, _eol) = alt((line_ending, eof))(remaining)?;
|
||||
|
||||
let source = get_consumed(input, remaining);
|
||||
Ok((
|
||||
@@ -35,3 +30,9 @@ pub fn diary_sexp<'r, 's>(
|
||||
},
|
||||
))
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
pub(crate) fn detect_diary_sexp<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, ()> {
|
||||
tuple((start_of_line, tag("%%(")))(input)?;
|
||||
Ok((input, ()))
|
||||
}
|
||||
|
||||
@@ -1,106 +1,114 @@
|
||||
use nom::branch::alt;
|
||||
use nom::bytes::complete::tag;
|
||||
use nom::character::complete::line_ending;
|
||||
use nom::character::complete::space1;
|
||||
use nom::combinator::eof;
|
||||
use nom::combinator::map;
|
||||
use nom::combinator::not;
|
||||
use nom::combinator::all_consuming;
|
||||
use nom::combinator::opt;
|
||||
use nom::combinator::recognize;
|
||||
use nom::combinator::verify;
|
||||
use nom::multi::many0;
|
||||
use nom::multi::many1;
|
||||
use nom::multi::many1_count;
|
||||
use nom::multi::many_till;
|
||||
use nom::sequence::tuple;
|
||||
|
||||
use super::element::Element;
|
||||
use super::object::Object;
|
||||
use super::org_source::convert_error;
|
||||
use super::headline::heading;
|
||||
use super::in_buffer_settings::apply_in_buffer_settings;
|
||||
use super::in_buffer_settings::scan_for_in_buffer_settings;
|
||||
use super::org_source::OrgSource;
|
||||
use super::parser_with_context::parser_with_context;
|
||||
use super::source::Source;
|
||||
use super::section::zeroth_section;
|
||||
use super::token::AllTokensIterator;
|
||||
use super::token::Token;
|
||||
use super::util::exit_matcher_parser;
|
||||
use super::util::get_consumed;
|
||||
use super::util::start_of_line;
|
||||
use super::Context;
|
||||
use crate::context::parser_with_context;
|
||||
use crate::context::Context;
|
||||
use crate::context::ContextElement;
|
||||
use crate::context::GlobalSettings;
|
||||
use crate::context::List;
|
||||
use crate::context::RefContext;
|
||||
use crate::error::CustomError;
|
||||
use crate::error::MyError;
|
||||
use crate::error::Res;
|
||||
use crate::parser::comment::comment;
|
||||
use crate::parser::element_parser::element;
|
||||
use crate::parser::exiting::ExitClass;
|
||||
use crate::parser::object_parser::standard_set_object;
|
||||
use crate::parser::parser_context::ContextElement;
|
||||
use crate::parser::parser_context::ContextTree;
|
||||
use crate::parser::parser_context::ExitMatcherNode;
|
||||
use crate::parser::planning::planning;
|
||||
use crate::parser::property_drawer::property_drawer;
|
||||
use crate::parser::org_source::convert_error;
|
||||
use crate::parser::util::blank_line;
|
||||
use crate::parser::util::maybe_consume_trailing_whitespace_if_not_exiting;
|
||||
use crate::types::Document;
|
||||
use crate::types::Object;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Document<'s> {
|
||||
pub source: &'s str,
|
||||
pub zeroth_section: Option<Section<'s>>,
|
||||
pub children: Vec<Heading<'s>>,
|
||||
/// Parse a full org-mode document.
|
||||
///
|
||||
/// This is the main entry point for Organic. It will parse the full contents of the input string as an org-mode document.
|
||||
#[allow(dead_code)]
|
||||
pub fn parse<'s>(input: &'s str) -> Result<Document<'s>, Box<dyn std::error::Error>> {
|
||||
parse_with_settings(input, &GlobalSettings::default())
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Heading<'s> {
|
||||
pub source: &'s str,
|
||||
pub stars: usize,
|
||||
pub title: Vec<Object<'s>>,
|
||||
pub children: Vec<DocumentElement<'s>>,
|
||||
/// Parse a full org-mode document with starting settings.
|
||||
///
|
||||
/// This is the secondary entry point for Organic. It will parse the full contents of the input string as an org-mode document starting with the settings you supplied.
|
||||
///
|
||||
/// This will not prevent additional settings from being learned during parsing, for example when encountering a "#+TODO".
|
||||
#[allow(dead_code)]
|
||||
pub fn parse_with_settings<'g, 's>(
|
||||
input: &'s str,
|
||||
global_settings: &'g GlobalSettings<'g, 's>,
|
||||
) -> Result<Document<'s>, Box<dyn std::error::Error>> {
|
||||
let initial_context = ContextElement::document_context();
|
||||
let initial_context = Context::new(global_settings, List::new(&initial_context));
|
||||
let wrapped_input = OrgSource::new(input);
|
||||
let ret =
|
||||
all_consuming(parser_with_context!(document_org_source)(&initial_context))(wrapped_input)
|
||||
.map_err(|err| err.to_string())
|
||||
.map(|(_remaining, parsed_document)| parsed_document);
|
||||
Ok(ret?)
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Section<'s> {
|
||||
pub source: &'s str,
|
||||
pub children: Vec<Element<'s>>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum DocumentElement<'s> {
|
||||
Heading(Heading<'s>),
|
||||
Section(Section<'s>),
|
||||
}
|
||||
|
||||
impl<'s> Source<'s> for Document<'s> {
|
||||
fn get_source(&'s self) -> &'s str {
|
||||
self.source
|
||||
}
|
||||
}
|
||||
|
||||
impl<'s> Source<'s> for DocumentElement<'s> {
|
||||
fn get_source(&'s self) -> &'s str {
|
||||
match self {
|
||||
DocumentElement::Heading(obj) => obj.source,
|
||||
DocumentElement::Section(obj) => obj.source,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'s> Source<'s> for Section<'s> {
|
||||
fn get_source(&'s self) -> &'s str {
|
||||
self.source
|
||||
}
|
||||
}
|
||||
|
||||
impl<'s> Source<'s> for Heading<'s> {
|
||||
fn get_source(&'s self) -> &'s str {
|
||||
self.source
|
||||
}
|
||||
/// Parse a full org-mode document.
|
||||
///
|
||||
/// Use this entry point when you want to have direct control over the starting context or if you want to use this integrated with other nom parsers. For general-purpose usage, the `parse` and `parse_with_settings` functions are a lot simpler.
|
||||
///
|
||||
/// This will not prevent additional settings from being learned during parsing, for example when encountering a "#+TODO".
|
||||
#[allow(dead_code)]
|
||||
fn document<'b, 'g, 'r, 's>(
|
||||
context: RefContext<'b, 'g, 'r, 's>,
|
||||
input: &'s str,
|
||||
) -> Res<&'s str, Document<'s>> {
|
||||
let (remaining, doc) = document_org_source(context, input.into()).map_err(convert_error)?;
|
||||
Ok((Into::<&str>::into(remaining), doc))
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
#[allow(dead_code)]
|
||||
pub fn document(input: &str) -> Res<&str, Document> {
|
||||
let initial_context: ContextTree<'_, '_> = ContextTree::new();
|
||||
let wrapped_input = OrgSource::new(input);
|
||||
let (remaining, document) = _document(&initial_context, wrapped_input)
|
||||
.map(|(rem, out)| (Into::<&str>::into(rem), out))
|
||||
.map_err(convert_error)?;
|
||||
fn document_org_source<'b, 'g, 'r, 's>(
|
||||
context: RefContext<'b, 'g, 'r, 's>,
|
||||
input: OrgSource<'s>,
|
||||
) -> Res<OrgSource<'s>, Document<'s>> {
|
||||
let mut final_settings = Vec::new();
|
||||
let (_, document_settings) = scan_for_in_buffer_settings(input)?;
|
||||
let setup_files: Vec<String> = document_settings
|
||||
.iter()
|
||||
.filter(|kw| kw.key.eq_ignore_ascii_case("setupfile"))
|
||||
.map(|kw| kw.value)
|
||||
.map(|setup_file| {
|
||||
context
|
||||
.get_global_settings()
|
||||
.file_access
|
||||
.read_file(setup_file)
|
||||
.map_err(|err| nom::Err::<CustomError<OrgSource<'_>>>::Failure(err.into()))
|
||||
})
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
for setup_file in setup_files.iter().map(String::as_str) {
|
||||
let (_, setup_file_settings) =
|
||||
scan_for_in_buffer_settings(setup_file.into()).map_err(|_err| {
|
||||
nom::Err::Error(CustomError::MyError(MyError(
|
||||
"TODO: make this take an owned string so I can dump err.to_string() into it."
|
||||
.into(),
|
||||
)))
|
||||
})?;
|
||||
final_settings.extend(setup_file_settings);
|
||||
}
|
||||
final_settings.extend(document_settings);
|
||||
let new_settings = apply_in_buffer_settings(final_settings, context.get_global_settings())
|
||||
.map_err(|_err| {
|
||||
nom::Err::Error(CustomError::MyError(MyError(
|
||||
"TODO: make this take an owned string so I can dump err.to_string() into it."
|
||||
.into(),
|
||||
)))
|
||||
})?;
|
||||
let new_context = context.with_global_settings(&new_settings);
|
||||
let context = &new_context;
|
||||
|
||||
let (remaining, document) =
|
||||
_document(context, input).map(|(rem, out)| (Into::<&str>::into(rem), out))?;
|
||||
{
|
||||
// If there are radio targets in this document then we need to parse the entire document again with the knowledge of the radio targets.
|
||||
let all_radio_targets: Vec<&Vec<Object<'_>>> = document
|
||||
@@ -116,11 +124,11 @@ pub fn document(input: &str) -> Res<&str, Document> {
|
||||
.map(|rt| &rt.children)
|
||||
.collect();
|
||||
if !all_radio_targets.is_empty() {
|
||||
let initial_context = initial_context
|
||||
.with_additional_node(ContextElement::RadioTarget(all_radio_targets));
|
||||
let (remaining, document) = _document(&initial_context, wrapped_input)
|
||||
.map(|(rem, out)| (Into::<&str>::into(rem), out))
|
||||
.map_err(convert_error)?;
|
||||
let mut new_global_settings = context.get_global_settings().clone();
|
||||
new_global_settings.radio_targets = all_radio_targets;
|
||||
let parser_context = context.with_global_settings(&new_global_settings);
|
||||
let (remaining, document) = _document(&parser_context, input)
|
||||
.map(|(rem, out)| (Into::<&str>::into(rem), out))?;
|
||||
return Ok((remaining.into(), document));
|
||||
}
|
||||
}
|
||||
@@ -128,12 +136,12 @@ pub fn document(input: &str) -> Res<&str, Document> {
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
fn _document<'r, 's>(
|
||||
context: Context<'r, 's>,
|
||||
fn _document<'b, 'g, 'r, 's>(
|
||||
context: RefContext<'b, 'g, 'r, 's>,
|
||||
input: OrgSource<'s>,
|
||||
) -> Res<OrgSource<'s>, Document<'s>> {
|
||||
let zeroth_section_matcher = parser_with_context!(zeroth_section)(context);
|
||||
let heading_matcher = parser_with_context!(heading)(context);
|
||||
let heading_matcher = parser_with_context!(heading(0))(context);
|
||||
let (remaining, _blank_lines) = many0(blank_line)(input)?;
|
||||
let (remaining, zeroth_section) = opt(zeroth_section_matcher)(remaining)?;
|
||||
let (remaining, children) = many0(heading_matcher)(remaining)?;
|
||||
@@ -148,179 +156,8 @@ fn _document<'r, 's>(
|
||||
))
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
fn zeroth_section<'r, 's>(
|
||||
context: Context<'r, 's>,
|
||||
input: OrgSource<'s>,
|
||||
) -> Res<OrgSource<'s>, Section<'s>> {
|
||||
// TODO: The zeroth section is specialized so it probably needs its own parser
|
||||
let parser_context = context
|
||||
.with_additional_node(ContextElement::ConsumeTrailingWhitespace(true))
|
||||
.with_additional_node(ContextElement::Context("section"))
|
||||
.with_additional_node(ContextElement::ExitMatcherNode(ExitMatcherNode {
|
||||
class: ExitClass::Document,
|
||||
exit_matcher: §ion_end,
|
||||
}));
|
||||
let without_consuming_whitespace_context =
|
||||
parser_context.with_additional_node(ContextElement::ConsumeTrailingWhitespace(false));
|
||||
|
||||
let element_matcher = parser_with_context!(element(true))(&parser_context);
|
||||
let exit_matcher = parser_with_context!(exit_matcher_parser)(&parser_context);
|
||||
|
||||
let (remaining, comment_and_property_drawer_element) = opt(tuple((
|
||||
opt(parser_with_context!(comment)(
|
||||
&without_consuming_whitespace_context,
|
||||
)),
|
||||
parser_with_context!(property_drawer)(context),
|
||||
many0(blank_line),
|
||||
)))(input)?;
|
||||
|
||||
let (remaining, (mut children, _exit_contents)) = verify(
|
||||
many_till(element_matcher, exit_matcher),
|
||||
|(children, _exit_contents)| {
|
||||
!children.is_empty() || comment_and_property_drawer_element.is_some()
|
||||
},
|
||||
)(remaining)?;
|
||||
|
||||
comment_and_property_drawer_element.map(|(comment, property_drawer, _ws)| {
|
||||
children.insert(0, Element::PropertyDrawer(property_drawer));
|
||||
comment
|
||||
.map(Element::Comment)
|
||||
.map(|ele| children.insert(0, ele));
|
||||
});
|
||||
|
||||
let (remaining, _trailing_ws) =
|
||||
maybe_consume_trailing_whitespace_if_not_exiting(context, remaining)?;
|
||||
|
||||
let source = get_consumed(input, remaining);
|
||||
Ok((
|
||||
remaining,
|
||||
Section {
|
||||
source: source.into(),
|
||||
children,
|
||||
},
|
||||
))
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
fn section<'r, 's>(
|
||||
context: Context<'r, 's>,
|
||||
mut input: OrgSource<'s>,
|
||||
) -> Res<OrgSource<'s>, Section<'s>> {
|
||||
// TODO: The zeroth section is specialized so it probably needs its own parser
|
||||
let parser_context = context
|
||||
.with_additional_node(ContextElement::ConsumeTrailingWhitespace(true))
|
||||
.with_additional_node(ContextElement::Context("section"))
|
||||
.with_additional_node(ContextElement::ExitMatcherNode(ExitMatcherNode {
|
||||
class: ExitClass::Document,
|
||||
exit_matcher: §ion_end,
|
||||
}));
|
||||
let element_matcher = parser_with_context!(element(true))(&parser_context);
|
||||
let exit_matcher = parser_with_context!(exit_matcher_parser)(&parser_context);
|
||||
let (mut remaining, (planning_element, property_drawer_element)) = tuple((
|
||||
opt(parser_with_context!(planning)(&parser_context)),
|
||||
opt(parser_with_context!(property_drawer)(&parser_context)),
|
||||
))(input)?;
|
||||
if planning_element.is_none() && property_drawer_element.is_none() {
|
||||
let (remain, _ws) = many0(blank_line)(remaining)?;
|
||||
remaining = remain;
|
||||
input = remain;
|
||||
}
|
||||
let (remaining, (mut children, _exit_contents)) = verify(
|
||||
many_till(element_matcher, exit_matcher),
|
||||
|(children, _exit_contents)| {
|
||||
!children.is_empty() || property_drawer_element.is_some() || planning_element.is_some()
|
||||
},
|
||||
)(remaining)?;
|
||||
property_drawer_element
|
||||
.map(Element::PropertyDrawer)
|
||||
.map(|ele| children.insert(0, ele));
|
||||
planning_element
|
||||
.map(Element::Planning)
|
||||
.map(|ele| children.insert(0, ele));
|
||||
|
||||
let (remaining, _trailing_ws) =
|
||||
maybe_consume_trailing_whitespace_if_not_exiting(context, remaining)?;
|
||||
|
||||
let source = get_consumed(input, remaining);
|
||||
Ok((
|
||||
remaining,
|
||||
Section {
|
||||
source: source.into(),
|
||||
children,
|
||||
},
|
||||
))
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
fn section_end<'r, 's>(
|
||||
context: Context<'r, 's>,
|
||||
input: OrgSource<'s>,
|
||||
) -> Res<OrgSource<'s>, OrgSource<'s>> {
|
||||
let headline_matcher = parser_with_context!(headline)(context);
|
||||
recognize(headline_matcher)(input)
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
fn heading<'r, 's>(
|
||||
context: Context<'r, 's>,
|
||||
input: OrgSource<'s>,
|
||||
) -> Res<OrgSource<'s>, Heading<'s>> {
|
||||
not(|i| context.check_exit_matcher(i))(input)?;
|
||||
let (remaining, (star_count, _ws, title)) = headline(context, input)?;
|
||||
let section_matcher = parser_with_context!(section)(context);
|
||||
let heading_matcher = parser_with_context!(heading)(context);
|
||||
let (remaining, children) = many0(alt((
|
||||
map(
|
||||
verify(heading_matcher, |h| h.stars > star_count),
|
||||
DocumentElement::Heading,
|
||||
),
|
||||
map(section_matcher, DocumentElement::Section),
|
||||
)))(remaining)?;
|
||||
let source = get_consumed(input, remaining);
|
||||
Ok((
|
||||
remaining,
|
||||
Heading {
|
||||
source: source.into(),
|
||||
stars: star_count,
|
||||
title,
|
||||
children,
|
||||
},
|
||||
))
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
fn headline<'r, 's>(
|
||||
context: Context<'r, 's>,
|
||||
input: OrgSource<'s>,
|
||||
) -> Res<OrgSource<'s>, (usize, OrgSource<'s>, Vec<Object<'s>>)> {
|
||||
let parser_context =
|
||||
context.with_additional_node(ContextElement::ExitMatcherNode(ExitMatcherNode {
|
||||
class: ExitClass::Document,
|
||||
exit_matcher: &headline_end,
|
||||
}));
|
||||
let standard_set_object_matcher = parser_with_context!(standard_set_object)(&parser_context);
|
||||
|
||||
let (remaining, (_sol, star_count, ws, title, _line_ending)) = tuple((
|
||||
start_of_line,
|
||||
many1_count(tag("*")),
|
||||
space1,
|
||||
many1(standard_set_object_matcher),
|
||||
alt((line_ending, eof)),
|
||||
))(input)?;
|
||||
Ok((remaining, (star_count, ws, title)))
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
fn headline_end<'r, 's>(
|
||||
_context: Context<'r, 's>,
|
||||
input: OrgSource<'s>,
|
||||
) -> Res<OrgSource<'s>, OrgSource<'s>> {
|
||||
line_ending(input)
|
||||
}
|
||||
|
||||
impl<'s> Document<'s> {
|
||||
pub fn iter_tokens<'r>(&'r self) -> impl Iterator<Item = Token<'r, 's>> {
|
||||
fn iter_tokens<'r>(&'r self) -> impl Iterator<Item = Token<'r, 's>> {
|
||||
AllTokensIterator::new(Token::Document(self))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,29 +11,29 @@ use nom::multi::many_till;
|
||||
use nom::sequence::tuple;
|
||||
|
||||
use super::org_source::OrgSource;
|
||||
use super::Context;
|
||||
use crate::context::parser_with_context;
|
||||
use crate::context::ContextElement;
|
||||
use crate::context::ExitClass;
|
||||
use crate::context::ExitMatcherNode;
|
||||
use crate::context::RefContext;
|
||||
use crate::error::CustomError;
|
||||
use crate::error::MyError;
|
||||
use crate::error::Res;
|
||||
use crate::parser::element_parser::element;
|
||||
use crate::parser::exiting::ExitClass;
|
||||
use crate::parser::parser_context::ContextElement;
|
||||
use crate::parser::parser_context::ExitMatcherNode;
|
||||
use crate::parser::parser_with_context::parser_with_context;
|
||||
use crate::parser::source::SetSource;
|
||||
use crate::parser::util::blank_line;
|
||||
use crate::parser::util::exit_matcher_parser;
|
||||
use crate::parser::util::get_consumed;
|
||||
use crate::parser::util::immediate_in_section;
|
||||
use crate::parser::util::start_of_line;
|
||||
use crate::parser::util::WORD_CONSTITUENT_CHARACTERS;
|
||||
use crate::parser::Drawer;
|
||||
use crate::parser::Element;
|
||||
use crate::parser::Paragraph;
|
||||
use crate::types::Drawer;
|
||||
use crate::types::Element;
|
||||
use crate::types::Paragraph;
|
||||
use crate::types::SetSource;
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
pub fn drawer<'r, 's>(
|
||||
context: Context<'r, 's>,
|
||||
pub(crate) fn drawer<'b, 'g, 'r, 's>(
|
||||
context: RefContext<'b, 'g, 'r, 's>,
|
||||
input: OrgSource<'s>,
|
||||
) -> Res<OrgSource<'s>, Drawer<'s>> {
|
||||
if immediate_in_section(context, "drawer") {
|
||||
@@ -50,13 +50,17 @@ pub fn drawer<'r, 's>(
|
||||
recognize(tuple((space0, line_ending))),
|
||||
))(remaining)?;
|
||||
|
||||
let parser_context = context
|
||||
.with_additional_node(ContextElement::ConsumeTrailingWhitespace(true))
|
||||
.with_additional_node(ContextElement::Context("drawer"))
|
||||
.with_additional_node(ContextElement::ExitMatcherNode(ExitMatcherNode {
|
||||
let contexts = [
|
||||
ContextElement::ConsumeTrailingWhitespace(true),
|
||||
ContextElement::Context("drawer"),
|
||||
ContextElement::ExitMatcherNode(ExitMatcherNode {
|
||||
class: ExitClass::Alpha,
|
||||
exit_matcher: &drawer_end,
|
||||
}));
|
||||
}),
|
||||
];
|
||||
let parser_context = context.with_additional_node(&contexts[0]);
|
||||
let parser_context = parser_context.with_additional_node(&contexts[1]);
|
||||
let parser_context = parser_context.with_additional_node(&contexts[2]);
|
||||
|
||||
let element_matcher = parser_with_context!(element(true))(&parser_context);
|
||||
let exit_matcher = parser_with_context!(exit_matcher_parser)(&parser_context);
|
||||
@@ -98,8 +102,8 @@ fn name<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, OrgSource<'s>> {
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
fn drawer_end<'r, 's>(
|
||||
_context: Context<'r, 's>,
|
||||
fn drawer_end<'b, 'g, 'r, 's>(
|
||||
_context: RefContext<'b, 'g, 'r, 's>,
|
||||
input: OrgSource<'s>,
|
||||
) -> Res<OrgSource<'s>, OrgSource<'s>> {
|
||||
start_of_line(input)?;
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user