Compare commits
352 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e673aa862e | ||
|
|
3b6659c5fd | ||
|
|
68a3f8b87e | ||
|
|
b1244de1dc | ||
|
|
e5a402ee1b | ||
|
|
d4a2ad4a7f | ||
|
|
3d1b2713ed | ||
|
|
60bec4695b | ||
|
|
d992947ff1 | ||
|
|
76fb24d1d1 | ||
|
|
b56318fbe4 | ||
|
|
8169499de3 | ||
|
|
29d9e76545 | ||
|
|
4d356b855e | ||
|
|
ae66d1bd89 | ||
|
|
c551938904 | ||
|
|
0fb80e3fee | ||
|
|
590e7fba0e | ||
|
|
4a72747dc9 | ||
|
|
2352636672 | ||
|
|
36217f5704 | ||
|
|
0654b676f7 | ||
|
|
a80d171e4d | ||
|
|
2e1a946ac9 | ||
|
|
01c2f1bf66 | ||
|
|
be483110ef | ||
|
|
94401dcf00 | ||
|
|
2b5df83956 | ||
|
|
d53b9e1e1f | ||
|
|
5c929ffc13 | ||
|
|
bc3224be7a | ||
|
|
54c66fb4d6 | ||
|
|
6a8ae9d838 | ||
|
|
512432c5f0 | ||
|
|
890cd3e4fd | ||
|
|
9846cde2f0 | ||
|
|
dec3242e72 | ||
|
|
a8a34e2d9c | ||
|
|
c55fae86f8 | ||
|
|
e7ec23af3d | ||
|
|
10ae36a419 | ||
|
|
ecdfd7087f | ||
|
|
3ed9b552e2 | ||
|
|
d04c8c832c | ||
|
|
9575ef30ac | ||
|
|
06ecf41663 | ||
|
|
10d03fd432 | ||
|
|
a62c3fc522 | ||
|
|
25f664e69e | ||
|
|
52e0d305aa | ||
|
|
418c5c1ce8 | ||
|
|
ecd523fa8f | ||
|
|
c0555dec0b | ||
|
|
1b788f3f21 | ||
|
|
b3382c66cd | ||
|
|
2a003b85fd | ||
|
|
270ba53150 | ||
|
|
de5788d8f3 | ||
|
|
5a254392cb | ||
|
|
178894680b | ||
|
|
599b3b8f0a | ||
|
|
d78ce10a0b | ||
|
|
12ab9beada | ||
|
|
186201a4b5 | ||
|
|
d38b0a84f6 | ||
|
|
6ed35f4674 | ||
|
|
846a8b3729 | ||
|
|
896250836b | ||
|
|
6c77586960 | ||
|
|
fc7d4bd949 | ||
|
|
f1e35e317b | ||
|
|
3fb2b5d31c | ||
|
|
d1dac0b8de | ||
|
|
93f1bcd744 | ||
|
|
47674a6907 | ||
|
|
5d1582be4d | ||
|
|
dae10c2eef | ||
|
|
5e127fec11 | ||
|
|
064a4eeee7 | ||
|
|
7727b5ef47 | ||
|
|
967e74c147 | ||
|
|
13697df7ea | ||
|
|
07e11e359a | ||
|
|
0c363c8dd6 | ||
|
|
9a479b33e0 | ||
|
|
7a854838ef | ||
|
|
2012e5a6d5 | ||
|
|
f1261ddce8 | ||
|
|
3a422e6435 | ||
|
|
6670f8c768 | ||
|
|
d7a36c8aca | ||
|
|
f820e27b17 | ||
|
|
a4b1d462c3 | ||
|
|
1b7326eafe | ||
|
|
90433aa55f | ||
|
|
a5b4eb40f6 | ||
|
|
48d550e1fc | ||
|
|
9ce042d5b6 | ||
|
|
8784da5179 | ||
|
|
875a50ae46 | ||
|
|
c4ea3fbf88 | ||
|
|
95fa834420 | ||
|
|
32a7ce3f36 | ||
|
|
d8c52568db | ||
|
|
c5be75ee8d | ||
|
|
282417ee94 | ||
|
|
ab46a9e5c6 | ||
|
|
4359fc9266 | ||
|
|
7419b75d76 | ||
|
|
e4cfc296e5 | ||
|
|
9a1d91ae45 | ||
|
|
df5d699a39 | ||
|
|
9111408d83 | ||
|
|
35f058a354 | ||
|
|
dd91e506bd | ||
|
|
cd781a7dcf | ||
|
|
8cd0e4ec63 | ||
|
|
f9460b88d7 | ||
|
|
0b2a5f4fbf | ||
|
|
6097e4df18 | ||
|
|
d5b1014fe4 | ||
|
|
dd8a8207ce | ||
|
|
b4c985071c | ||
|
|
d4f27ef297 | ||
|
|
f25246556c | ||
|
|
3fe56e9aa3 | ||
|
|
f180412ff3 | ||
|
|
f0e28206ff | ||
|
|
1f64e289a2 | ||
|
|
f7690ff64b | ||
|
|
bd5e50d558 | ||
|
|
de87b7df93 | ||
|
|
a267d13fd7 | ||
|
|
a29973a110 | ||
|
|
31c782499e | ||
|
|
b7c7057095 | ||
|
|
49e3c90a3a | ||
|
|
129228c5c5 | ||
|
|
f0a7493a89 | ||
|
|
dc5695ec9f | ||
|
|
4ff62fbfae | ||
|
|
c892d406c3 | ||
|
|
1a41cfc6c7 | ||
|
|
4f34ab9089 | ||
|
|
9b2348c0ef | ||
|
|
5716cbccea | ||
|
|
124cd50243 | ||
|
|
bac5d6e1d9 | ||
|
|
ba15999534 | ||
|
|
61c3e6c10e | ||
|
|
a7e130838d | ||
|
|
853adadf91 | ||
|
|
7b61329889 | ||
|
|
9bcfb2f1da | ||
|
|
4c8d9a3063 | ||
|
|
48cb3c4a02 | ||
|
|
9e60ff6683 | ||
|
|
c1de001786 | ||
|
|
716af5bb45 | ||
|
|
6137a46231 | ||
|
|
bdd04f4d5c | ||
|
|
36bdc54703 | ||
|
|
3031b6edd4 | ||
|
|
1a704dd312 | ||
|
|
a74ea730f4 | ||
|
|
8450785186 | ||
|
|
d443dbd468 | ||
|
|
c9ce32c881 | ||
|
|
85454a0a27 | ||
|
|
fdebf6dec5 | ||
|
|
444d6758aa | ||
|
|
6c7203410e | ||
|
|
bfe67b1f75 | ||
|
|
fd41ad9c29 | ||
|
|
7f751d4f28 | ||
|
|
52a4dab67c | ||
|
|
3d86e75059 | ||
|
|
ca6fdf1924 | ||
|
|
66d16d89ed | ||
|
|
ee5e0698b1 | ||
|
|
22681b6a58 | ||
|
|
876d33239e | ||
|
|
87941271a4 | ||
|
|
32b19d68d0 | ||
|
|
830097b0a9 | ||
|
|
44e9f708c9 | ||
|
|
fc4ff97c14 | ||
|
|
33372429dd | ||
|
|
ac0db64081 | ||
|
|
b8a4876779 | ||
|
|
925c42c8fb | ||
|
|
7d4100d956 | ||
|
|
53d90a2949 | ||
|
|
26f41b83aa | ||
|
|
e4c0e32536 | ||
|
|
37e85158ea | ||
|
|
6589a755a6 | ||
|
|
a651b79e77 | ||
|
|
98de5e4ec5 | ||
|
|
cf383fa394 | ||
|
|
84953c1669 | ||
|
|
7650a9edff | ||
|
|
a74319d381 | ||
|
|
7e57285ea7 | ||
|
|
f103d168d5 | ||
|
|
f54081437a | ||
|
|
aa5988bc2f | ||
|
|
76ca2b9762 | ||
|
|
1561e1e580 | ||
|
|
1f11bfa2ec | ||
|
|
8440a3b256 | ||
|
|
de7ad182b3 | ||
|
|
b75d9f5c91 | ||
|
|
612744ebd0 | ||
|
|
1b4b8b4bdb | ||
|
|
5587e19f16 | ||
|
|
80f7098f9b | ||
|
|
84d2babda9 | ||
|
|
cc56b79683 | ||
|
|
0105b49d0d | ||
|
|
d79035e14d | ||
|
|
7545fb7e1a | ||
|
|
f30069efe7 | ||
|
|
d1fe2f6b09 | ||
|
|
21c60d1036 | ||
|
|
6a1bdd5fee | ||
|
|
5d20d3e99b | ||
|
|
a8fbf01124 | ||
|
|
344ef04453 | ||
|
|
ceb722e476 | ||
|
|
b04341882c | ||
|
|
494fe5cceb | ||
|
|
0110d23387 | ||
|
|
0d7a15bfeb | ||
|
|
352c20d1d8 | ||
|
|
f82d2aada1 | ||
|
|
669da4073e | ||
|
|
0056657b65 | ||
|
|
8780976c15 | ||
|
|
dc8b8d08ab | ||
|
|
93d3d9471f | ||
|
|
c7c0deed74 | ||
|
|
b32c21eb1d | ||
|
|
2e6e6fdd2b | ||
|
|
3cc2294387 | ||
|
|
40f22034da | ||
|
|
ab612f293f | ||
|
|
57c2922e4a | ||
|
|
c2eb1f51c8 | ||
|
|
b0930df788 | ||
|
|
69512f559a | ||
|
|
76a81b73ac | ||
|
|
ba291c6776 | ||
|
|
6b82b46e09 | ||
|
|
6676012eb1 | ||
|
|
facbe716e9 | ||
|
|
827f3e1c98 | ||
|
|
fcea7e5a4b | ||
|
|
dda2b1e69f | ||
|
|
f79d07a7c8 | ||
|
|
45283b48d9 | ||
|
|
08e4c646e5 | ||
|
|
f8b99ed235 | ||
|
|
6fc607cfe0 | ||
|
|
49afcf0db6 | ||
|
|
c4d7e646fc | ||
|
|
3fc3a5d1ef | ||
|
|
6e2fc362ea | ||
|
|
90fa48661c | ||
|
|
5cefcd5fac | ||
|
|
b83a103c17 | ||
|
|
d90ff5891b | ||
|
|
a3c01805b8 | ||
|
|
e3d755317d | ||
|
|
b89607fc8b | ||
|
|
51c4e2b62a | ||
|
|
a6561d37fb | ||
|
|
4e8b3eb422 | ||
|
|
2c31590974 | ||
|
|
28b2d27054 | ||
|
|
84edd10864 | ||
|
|
728a79f9a4 | ||
|
|
ad4ef50669 | ||
|
|
12cbb89861 | ||
|
|
7c471ab32e | ||
|
|
400f53e440 | ||
|
|
028aeb70aa | ||
|
|
70fafd801e | ||
|
|
bdba495f69 | ||
|
|
b0392ad6fb | ||
|
|
1c142b68c6 | ||
|
|
9060f9b26d | ||
|
|
d3c733c5ad | ||
|
|
275b4b53d1 | ||
|
|
d38e198258 | ||
|
|
27cf6c0462 | ||
|
|
c7d5c89a60 | ||
|
|
ee02e07717 | ||
|
|
a7330e38e4 | ||
|
|
08eb59acd3 | ||
|
|
da1ce2717d | ||
|
|
a8f277efe5 | ||
|
|
7f6f22717b | ||
|
|
0ef141d65e | ||
|
|
71180d19fb | ||
|
|
33091112a5 | ||
|
|
5997567233 | ||
|
|
2915a81edc | ||
|
|
df79cbd0b7 | ||
|
|
a7b9eb9db4 | ||
|
|
d262833f9b | ||
|
|
0d438a8e0f | ||
|
|
0b009511ff | ||
|
|
3bdb24ad88 | ||
|
|
fdf35ba23c | ||
|
|
cd69e08516 | ||
|
|
b54c6d366c | ||
|
|
15e8d1ab77 | ||
|
|
8502a8830d | ||
|
|
74a6101de7 | ||
|
|
ba57eb16fd | ||
|
|
c309d14776 | ||
|
|
0d728510d7 | ||
|
|
22e9bc991f | ||
|
|
564104f1e8 | ||
|
|
12ad3b09f0 | ||
|
|
eabffe5ecc | ||
|
|
b47029fdbb | ||
|
|
25b8c80d4e | ||
|
|
54825538e4 | ||
|
|
66d10a7a1b | ||
|
|
acf1205e75 | ||
|
|
2cd2f7570c | ||
|
|
f16a554154 | ||
|
|
a40a504f94 | ||
|
|
80d77ff5d6 | ||
|
|
ee92049e5d | ||
|
|
510985e97c | ||
|
|
949d0989f4 | ||
|
|
2a4d22bdd4 | ||
|
|
7a903acedc | ||
|
|
5171326d63 | ||
|
|
67f79aeb51 | ||
|
|
b2383d9f93 | ||
|
|
9e2a323f6f | ||
|
|
0fcb3f73f9 | ||
|
|
bfc9e7f58d | ||
|
|
b5f0521b56 | ||
|
|
2048d8f0b6 | ||
|
|
466716881e | ||
|
|
eb9c582fa5 | ||
|
|
214e895d85 |
203
.lighthouse/pipeline-foreign-document-test.yaml
Normal file
203
.lighthouse/pipeline-foreign-document-test.yaml
Normal file
@@ -0,0 +1,203 @@
|
|||||||
|
apiVersion: tekton.dev/v1beta1
|
||||||
|
kind: PipelineRun
|
||||||
|
metadata:
|
||||||
|
name: rust-foreign-document-test
|
||||||
|
spec:
|
||||||
|
pipelineSpec:
|
||||||
|
timeouts:
|
||||||
|
pipeline: "2h0m0s"
|
||||||
|
tasks: "1h0m40s"
|
||||||
|
finally: "0h30m0s"
|
||||||
|
params:
|
||||||
|
- name: image-name
|
||||||
|
description: The name for the built image
|
||||||
|
type: string
|
||||||
|
- name: path-to-image-context
|
||||||
|
description: The path to the build context
|
||||||
|
type: string
|
||||||
|
- name: path-to-dockerfile
|
||||||
|
description: The path to the Dockerfile
|
||||||
|
type: string
|
||||||
|
tasks:
|
||||||
|
- name: do-stuff
|
||||||
|
taskSpec:
|
||||||
|
metadata: {}
|
||||||
|
stepTemplate:
|
||||||
|
image: alpine:3.18
|
||||||
|
name: ""
|
||||||
|
resources:
|
||||||
|
requests:
|
||||||
|
cpu: 10m
|
||||||
|
memory: 600Mi
|
||||||
|
workingDir: /workspace/source
|
||||||
|
steps:
|
||||||
|
- image: alpine:3.18
|
||||||
|
name: do-stuff-step
|
||||||
|
script: |
|
||||||
|
#!/usr/bin/env sh
|
||||||
|
echo "hello world"
|
||||||
|
- name: report-pending
|
||||||
|
taskRef:
|
||||||
|
name: gitea-set-status
|
||||||
|
runAfter:
|
||||||
|
- fetch-repository
|
||||||
|
params:
|
||||||
|
- name: CONTEXT
|
||||||
|
value: "$(params.JOB_NAME)"
|
||||||
|
- name: REPO_FULL_NAME
|
||||||
|
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
|
||||||
|
- name: GITEA_HOST_URL
|
||||||
|
value: code.fizz.buzz
|
||||||
|
- name: SHA
|
||||||
|
value: "$(tasks.fetch-repository.results.commit)"
|
||||||
|
- name: DESCRIPTION
|
||||||
|
value: "Build $(params.JOB_NAME) has started"
|
||||||
|
- name: STATE
|
||||||
|
value: pending
|
||||||
|
- name: TARGET_URL
|
||||||
|
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
|
||||||
|
- name: fetch-repository
|
||||||
|
taskRef:
|
||||||
|
name: git-clone
|
||||||
|
workspaces:
|
||||||
|
- name: output
|
||||||
|
workspace: git-source
|
||||||
|
params:
|
||||||
|
- name: url
|
||||||
|
value: $(params.REPO_URL)
|
||||||
|
- name: revision
|
||||||
|
value: $(params.PULL_BASE_SHA)
|
||||||
|
- name: deleteExisting
|
||||||
|
value: "true"
|
||||||
|
- name: build-image
|
||||||
|
taskRef:
|
||||||
|
name: kaniko
|
||||||
|
params:
|
||||||
|
- name: IMAGE
|
||||||
|
value: "$(params.image-name):$(tasks.fetch-repository.results.commit)"
|
||||||
|
- name: CONTEXT
|
||||||
|
value: $(params.path-to-image-context)
|
||||||
|
- name: DOCKERFILE
|
||||||
|
value: $(params.path-to-dockerfile)
|
||||||
|
- name: BUILDER_IMAGE
|
||||||
|
value: "gcr.io/kaniko-project/executor:v1.12.1"
|
||||||
|
- name: EXTRA_ARGS
|
||||||
|
value:
|
||||||
|
- --target=foreign-document-test
|
||||||
|
- --cache=true
|
||||||
|
- --cache-copy-layers
|
||||||
|
- --cache-repo=harbor.fizz.buzz/kanikocache/cache
|
||||||
|
- --use-new-run # Should result in a speed-up
|
||||||
|
- --reproducible # To remove timestamps so layer caching works.
|
||||||
|
- --snapshot-mode=redo
|
||||||
|
- --skip-unused-stages=true
|
||||||
|
- --registry-mirror=dockerhub.dockerhub.svc.cluster.local
|
||||||
|
workspaces:
|
||||||
|
- name: source
|
||||||
|
workspace: git-source
|
||||||
|
- name: dockerconfig
|
||||||
|
workspace: docker-credentials
|
||||||
|
runAfter:
|
||||||
|
- fetch-repository
|
||||||
|
- name: run-image
|
||||||
|
taskRef:
|
||||||
|
name: run-docker-image
|
||||||
|
workspaces:
|
||||||
|
- name: source
|
||||||
|
workspace: git-source
|
||||||
|
- name: cargo-cache
|
||||||
|
workspace: cargo-cache
|
||||||
|
runAfter:
|
||||||
|
- build-image
|
||||||
|
params:
|
||||||
|
- name: docker-image
|
||||||
|
value: "$(params.image-name):$(tasks.fetch-repository.results.commit)"
|
||||||
|
finally:
|
||||||
|
- name: report-success
|
||||||
|
when:
|
||||||
|
- input: "$(tasks.status)"
|
||||||
|
operator: in
|
||||||
|
values: ["Succeeded", "Completed"]
|
||||||
|
taskRef:
|
||||||
|
name: gitea-set-status
|
||||||
|
params:
|
||||||
|
- name: CONTEXT
|
||||||
|
value: "$(params.JOB_NAME)"
|
||||||
|
- name: REPO_FULL_NAME
|
||||||
|
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
|
||||||
|
- name: GITEA_HOST_URL
|
||||||
|
value: code.fizz.buzz
|
||||||
|
- name: SHA
|
||||||
|
value: "$(tasks.fetch-repository.results.commit)"
|
||||||
|
- name: DESCRIPTION
|
||||||
|
value: "Build $(params.JOB_NAME) has succeeded"
|
||||||
|
- name: STATE
|
||||||
|
value: success
|
||||||
|
- name: TARGET_URL
|
||||||
|
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
|
||||||
|
- name: report-failure
|
||||||
|
when:
|
||||||
|
- input: "$(tasks.status)"
|
||||||
|
operator: in
|
||||||
|
values: ["Failed"]
|
||||||
|
taskRef:
|
||||||
|
name: gitea-set-status
|
||||||
|
params:
|
||||||
|
- name: CONTEXT
|
||||||
|
value: "$(params.JOB_NAME)"
|
||||||
|
- name: REPO_FULL_NAME
|
||||||
|
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
|
||||||
|
- name: GITEA_HOST_URL
|
||||||
|
value: code.fizz.buzz
|
||||||
|
- name: SHA
|
||||||
|
value: "$(tasks.fetch-repository.results.commit)"
|
||||||
|
- name: DESCRIPTION
|
||||||
|
value: "Build $(params.JOB_NAME) has failed"
|
||||||
|
- name: STATE
|
||||||
|
value: failure
|
||||||
|
- name: TARGET_URL
|
||||||
|
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
|
||||||
|
- name: cargo-cache-autoclean
|
||||||
|
taskRef:
|
||||||
|
name: run-docker-image
|
||||||
|
workspaces:
|
||||||
|
- name: source
|
||||||
|
workspace: git-source
|
||||||
|
- name: cargo-cache
|
||||||
|
workspace: cargo-cache
|
||||||
|
params:
|
||||||
|
- name: command
|
||||||
|
value: [cargo, cache, --autoclean]
|
||||||
|
- name: args
|
||||||
|
value: []
|
||||||
|
- name: docker-image
|
||||||
|
value: "$(params.image-name):$(tasks.fetch-repository.results.commit)"
|
||||||
|
workspaces:
|
||||||
|
- name: git-source
|
||||||
|
- name: docker-credentials
|
||||||
|
- name: cargo-cache
|
||||||
|
workspaces:
|
||||||
|
- name: git-source
|
||||||
|
volumeClaimTemplate:
|
||||||
|
spec:
|
||||||
|
storageClassName: "nfs-client"
|
||||||
|
accessModes:
|
||||||
|
- ReadWriteOnce
|
||||||
|
resources:
|
||||||
|
requests:
|
||||||
|
storage: 10Gi
|
||||||
|
subPath: rust-source
|
||||||
|
- name: cargo-cache
|
||||||
|
persistentVolumeClaim:
|
||||||
|
claimName: organic-cargo-cache-test-foreign-document
|
||||||
|
- name: docker-credentials
|
||||||
|
secret:
|
||||||
|
secretName: harbor-plain
|
||||||
|
serviceAccountName: build-bot
|
||||||
|
params:
|
||||||
|
- name: image-name
|
||||||
|
value: "harbor.fizz.buzz/private/organic-test-foreign-document"
|
||||||
|
- name: path-to-image-context
|
||||||
|
value: docker/organic_test/
|
||||||
|
- name: path-to-dockerfile
|
||||||
|
value: docker/organic_test/Dockerfile
|
||||||
@@ -83,6 +83,7 @@ spec:
|
|||||||
value: "gcr.io/kaniko-project/executor:v1.12.1"
|
value: "gcr.io/kaniko-project/executor:v1.12.1"
|
||||||
- name: EXTRA_ARGS
|
- name: EXTRA_ARGS
|
||||||
value:
|
value:
|
||||||
|
- --target=tester
|
||||||
- --cache=true
|
- --cache=true
|
||||||
- --cache-copy-layers
|
- --cache-copy-layers
|
||||||
- --cache-repo=harbor.fizz.buzz/kanikocache/cache
|
- --cache-repo=harbor.fizz.buzz/kanikocache/cache
|
||||||
|
|||||||
@@ -16,6 +16,13 @@ spec:
|
|||||||
skip_branches:
|
skip_branches:
|
||||||
# We already run on every commit, so running when the semver tags get pushed is causing needless double-processing.
|
# We already run on every commit, so running when the semver tags get pushed is causing needless double-processing.
|
||||||
- "^v[0-9]+\\.[0-9]+\\.[0-9]+$"
|
- "^v[0-9]+\\.[0-9]+\\.[0-9]+$"
|
||||||
|
- name: rust-foreign-document-test
|
||||||
|
source: "pipeline-foreign-document-test.yaml"
|
||||||
|
# Override https-based url from lighthouse events.
|
||||||
|
clone_uri: "git@code.fizz.buzz:talexander/organic.git"
|
||||||
|
skip_branches:
|
||||||
|
# We already run on every commit, so running when the semver tags get pushed is causing needless double-processing.
|
||||||
|
- "^v[0-9]+\\.[0-9]+\\.[0-9]+$"
|
||||||
- name: rust-build
|
- name: rust-build
|
||||||
source: "pipeline-rust-build.yaml"
|
source: "pipeline-rust-build.yaml"
|
||||||
# Override https-based url from lighthouse events.
|
# Override https-based url from lighthouse events.
|
||||||
|
|||||||
14
Cargo.toml
14
Cargo.toml
@@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "organic"
|
name = "organic"
|
||||||
version = "0.1.4"
|
version = "0.1.9"
|
||||||
authors = ["Tom Alexander <tom@fizz.buzz>"]
|
authors = ["Tom Alexander <tom@fizz.buzz>"]
|
||||||
description = "An org-mode parser."
|
description = "An org-mode parser."
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
@@ -21,9 +21,15 @@ name = "organic"
|
|||||||
path = "src/lib.rs"
|
path = "src/lib.rs"
|
||||||
|
|
||||||
[[bin]]
|
[[bin]]
|
||||||
# This bin exists for development purposes only. The real target of this crate is the library.
|
# This bin exists for development purposes only. The real target of this crate is the library.
|
||||||
name = "compare"
|
name = "parse"
|
||||||
path = "src/main.rs"
|
path = "src/main.rs"
|
||||||
|
|
||||||
|
[[bin]]
|
||||||
|
# This bin exists for development purposes only. The real target of this crate is the library.
|
||||||
|
name = "compare"
|
||||||
|
path = "src/bin_compare.rs"
|
||||||
|
required-features = ["compare"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nom = "7.1.1"
|
nom = "7.1.1"
|
||||||
|
|||||||
12
Makefile
12
Makefile
@@ -33,6 +33,10 @@ release:
|
|||||||
clean:
|
clean:
|
||||||
> cargo clean
|
> cargo clean
|
||||||
|
|
||||||
|
.PHONY: format
|
||||||
|
format:
|
||||||
|
> $(MAKE) -C docker/cargo_fmt run
|
||||||
|
|
||||||
.PHONY: test
|
.PHONY: test
|
||||||
test:
|
test:
|
||||||
> cargo test --no-default-features --features compare --no-fail-fast --lib --test test_loader -- --test-threads $(TESTJOBS)
|
> cargo test --no-default-features --features compare --no-fail-fast --lib --test test_loader -- --test-threads $(TESTJOBS)
|
||||||
@@ -40,7 +44,11 @@ test:
|
|||||||
.PHONY: dockertest
|
.PHONY: dockertest
|
||||||
dockertest:
|
dockertest:
|
||||||
> $(MAKE) -C docker/organic_test
|
> $(MAKE) -C docker/organic_test
|
||||||
> docker run --init --rm -i -t -v "$$(readlink -f ./):/source:ro" --mount source=cargo-cache,target=/usr/local/cargo/registry --mount source=rust-cache,target=/target --env CARGO_TARGET_DIR=/target -w /source organic-test --no-default-features --features compare --no-fail-fast --lib --test test_loader -- --test-threads $(TESTJOBS)
|
> docker run --init --rm -i -t --read-only -v "$$(readlink -f ./):/source:ro" --mount type=tmpfs,destination=/tmp --mount source=cargo-cache,target=/usr/local/cargo/registry --mount source=rust-cache,target=/target --env CARGO_TARGET_DIR=/target -w /source organic-test --no-default-features --features compare --no-fail-fast --lib --test test_loader -- --test-threads $(TESTJOBS)
|
||||||
|
|
||||||
|
.PHONY: foreign_document_test
|
||||||
|
foreign_document_test:
|
||||||
|
> $(MAKE) -C docker/organic_test run_foreign_document_test
|
||||||
|
|
||||||
.PHONY: dockerclean
|
.PHONY: dockerclean
|
||||||
dockerclean:
|
dockerclean:
|
||||||
@@ -60,7 +68,7 @@ jaeger:
|
|||||||
# 4317 for OTLP gRPC, 4318 for OTLP HTTP. We currently use gRPC but I forward both ports regardless.
|
# 4317 for OTLP gRPC, 4318 for OTLP HTTP. We currently use gRPC but I forward both ports regardless.
|
||||||
#
|
#
|
||||||
# These flags didn't help even though they seem like they would: --collector.queue-size=20000 --collector.num-workers=100
|
# These flags didn't help even though they seem like they would: --collector.queue-size=20000 --collector.num-workers=100
|
||||||
> docker run -d --rm --name organicdocker -p 4317:4317 -p 4318:4318 -p 16686:16686 -e COLLECTOR_OTLP_ENABLED=true jaegertracing/all-in-one:1.47 --collector.grpc-server.max-message-size=20000000 --collector.otlp.grpc.max-message-size=20000000
|
> docker run -d --rm --name organicdocker --read-only -p 4317:4317 -p 4318:4318 -p 16686:16686 -e COLLECTOR_OTLP_ENABLED=true jaegertracing/all-in-one:1.47 --collector.grpc-server.max-message-size=20000000 --collector.otlp.grpc.max-message-size=20000000
|
||||||
|
|
||||||
.PHONY: jaegerweb
|
.PHONY: jaegerweb
|
||||||
jaegerweb:
|
jaegerweb:
|
||||||
|
|||||||
78
README.md
78
README.md
@@ -2,12 +2,84 @@
|
|||||||
|
|
||||||
Organic is an emacs-less implementation of an [org-mode](https://orgmode.org/) parser.
|
Organic is an emacs-less implementation of an [org-mode](https://orgmode.org/) parser.
|
||||||
|
|
||||||
|
|
||||||
## Project Status
|
## Project Status
|
||||||
|
|
||||||
This project is a personal learning project to grow my experience in [rust](https://www.rust-lang.org/). It is under development and at this time I would not recommend anyone use this code. The goal is to turn this into a project others can use, at which point more information will appear in this README.
|
This project is still under HEAVY development. While the version remains v0.1.x the API will be changing often. Once we hit v0.2.x we will start following semver.
|
||||||
|
|
||||||
|
Currently, the parser is able to correctly identify the start/end bounds of all the org-mode objects and elements (except table.el tables, org-mode tables are supported) but many of the interior properties are not yet populated.
|
||||||
|
|
||||||
|
### Project Goals
|
||||||
|
- We aim to provide perfect parity with the emacs org-mode parser. In that regard, any document that parses differently between Emacs and Organic is considered a bug.
|
||||||
|
- The parser should be fast. We're not doing anything special, but since this is written in Rust and natively compiled we should be able to beat the existing parsers.
|
||||||
|
- The parser should have minimal dependencies. This should reduce effort w.r.t.: security audits, legal compliance, portability.
|
||||||
|
- The parser should be usable everywhere. In the interest of getting org-mode used in as many places as possible, this parser should be usable by everyone everywhere. This means:
|
||||||
|
- It must have a permissive license for use in proprietary code bases.
|
||||||
|
- We will investigate compiling to WASM. This is an important goal of the project and will definitely happen, but only after the parser has a more stable API.
|
||||||
|
- We will investigate compiling to a C library for native linking to other code. This is more of a maybe-goal for the project.
|
||||||
|
### Project Non-Goals
|
||||||
|
- This project will not include an elisp engine since that would drastically increase the complexity of the code. Any features requiring an elisp engine will not be implemented (for example, Emacs supports embedded eval expressions in documents but this parser will never support that).
|
||||||
|
- This project is exclusively an org-mode **parser**. This limits its scope to roughly the output of `(org-element-parse-buffer)`. It will not render org-mode documents in other formats like HTML or LaTeX.
|
||||||
|
### Project Maybe-Goals
|
||||||
|
- table.el support. Currently we support org-mode tables but org-mode also allows table.el tables. So far, their use in org-mode documents seems rather uncommon so this is a low-priority feature.
|
||||||
|
- Document editing support. I do not anticipate any advanced editing features to make editing ergonomic, but it should be relatively easy to be able to parse an org-mode document and serialize it back into org-mode. This would enable cool features to be built on top of the library like auto-formatters. To accomplish this feature, We'd have to capture all of the various separators and whitespace that we are currently simply throwing away. This would add many additional fields to the parsed structs and it would add more noise to the parsers themselves, so I do not want to approach this feature until the parser is more complete since it would make modifications and refactoring more difficult.
|
||||||
|
### Supported Versions
|
||||||
|
This project targets the version of Emacs and Org-mode that are built into the [organic-test docker image](docker/organic_test/Dockerfile). This is newer than the version of Org-mode that shipped with Emacs 29.1. The parser itself does not depend on Emacs or Org-mode though, so this only matters for development purposes when running the automated tests that compare against upstream Org-mode.
|
||||||
|
|
||||||
|
## Using this library
|
||||||
|
TODO: Add section on using Organic as a library (which is the intended use for this project). This will be added when we have a bit more API stability since currently the library is under heavy development.
|
||||||
|
|
||||||
|
## Development
|
||||||
|
|
||||||
|
### The parse binary
|
||||||
|
This program takes org-mode input either streamed in on stdin or as paths to files passed in as arguments. It then parses them using Organic and dumps the result to stdout. This program is intended solely as a development tool. Examples:
|
||||||
|
```bash
|
||||||
|
cat /foo/bar.org | cargo run --bin parse
|
||||||
|
```
|
||||||
|
```bash
|
||||||
|
cargo build --profile release-lto
|
||||||
|
./target/release-lto/parse /foo/bar.org /lorem/ipsum.org
|
||||||
|
```
|
||||||
|
|
||||||
|
### The compare binary
|
||||||
|
This program takes org-mode input either streamed in on stdin or as paths to files passed in as arguments. It then parses them using Organic and the official Emacs Org-mode parser and compares the parse result. This program is intended solely as a development tool. Since org-mode is a moving target, it is recommended that you run this through docker since we pin the version of org-mode to a specific revision. Examples:
|
||||||
|
```bash
|
||||||
|
cat /foo/bar.org | ./scripts/run_docker_compare.bash
|
||||||
|
```
|
||||||
|
```bash
|
||||||
|
./scripts/run_docker_compare.bash /foo/bar.org /lorem/ipsum.org
|
||||||
|
```
|
||||||
|
|
||||||
|
Not recommended since it is not through docker:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cat /foo/bar.org | cargo run --features compare --bin compare
|
||||||
|
```
|
||||||
|
```bash
|
||||||
|
cargo build --profile release-lto --features compare
|
||||||
|
./target/release-lto/compare /foo/bar.org /lorem/ipsum.org
|
||||||
|
```
|
||||||
|
|
||||||
|
## Running the tests
|
||||||
|
There are three levels of tests for this repository: the standard tests, the autogenerated tests, and the foreign document tests.
|
||||||
|
|
||||||
|
### The standard tests
|
||||||
|
These are regular hand-written rust tests. These can be run with:
|
||||||
|
```bash
|
||||||
|
make unittest
|
||||||
|
```
|
||||||
|
|
||||||
|
### The auto-generated tests
|
||||||
|
These tests are automatically generated from the files in the `org_mode_samples` directory and they are still integrated with the rust/cargo testing framework. For each org-mode document in that folder, a test is generated that will parse the document with both Organic and the official Emacs Org-mode parser and then it will compare the parse results. Any deviation is considered a failure. Since org-mode is a moving target, it is recommended that you run these tests inside docker since the `organic-test` docker image is pinned to a specific revision of org-mode. These can be run with:
|
||||||
|
```bash
|
||||||
|
make dockertest
|
||||||
|
```
|
||||||
|
|
||||||
|
### The foreign document tests
|
||||||
|
These tests function the same as the auto-generated tests except they are **not** integrated with the rust/cargo testing framework and they involve comparing the parse of org-mode documents that live outside this repository. This allows us to test against a far greater variety of org-mode input documents without pulling massive sets of org-mode documents into this repository. The recommended way to run these tests is still through docker because it pins org-mode and the test documents to specific git revisions. These can be run with:
|
||||||
|
```bash
|
||||||
|
make foreign_document_test
|
||||||
|
```
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
|
||||||
This project is released under the public-domain-equivalent [0BSD license](https://www.tldrlegal.com/license/bsd-0-clause-license). This license puts no restrictions on the use of this code (you do not even have to include the copyright notice or license text when using it). HOWEVER, this project has a couple permissively licensed dependencies which do require their copyright notices and/or license texts to be included. I am not a lawyer and this is not legal advice but it is my layperson's understanding that if you distribute a binary with this library linked in, you will need to abide by their terms since their code will also be linked in your binary. I try to keep the dependencies to a minimum and the most restrictive dependency I will ever include is a permissively licensed one.
|
This project is released under the public-domain-equivalent [0BSD license](https://www.tldrlegal.com/license/bsd-0-clause-license), however, this project has a couple permissively licensed non-public-domain-equivalent dependencies which require their copyright notices and/or license texts to be included. I am not a lawyer and this is not legal advice but it is my layperson's understanding that if you distribute a binary statically linking this library, you will need to abide by their terms since their code will also be linked in your binary.
|
||||||
|
|||||||
34
build.rs
34
build.rs
@@ -16,7 +16,8 @@ fn main() {
|
|||||||
let destination = Path::new(&out_dir).join("tests.rs");
|
let destination = Path::new(&out_dir).join("tests.rs");
|
||||||
let mut test_file = File::create(&destination).unwrap();
|
let mut test_file = File::create(&destination).unwrap();
|
||||||
|
|
||||||
write_header(&mut test_file);
|
// Re-generate the tests if any org-mode files change
|
||||||
|
println!("cargo:rerun-if-changed=org_mode_samples");
|
||||||
|
|
||||||
let test_files = WalkDir::new("org_mode_samples")
|
let test_files = WalkDir::new("org_mode_samples")
|
||||||
.into_iter()
|
.into_iter()
|
||||||
@@ -51,32 +52,15 @@ fn write_test(test_file: &mut File, test: &walkdir::DirEntry) {
|
|||||||
.strip_suffix(".org")
|
.strip_suffix(".org")
|
||||||
.expect("Should have .org extension")
|
.expect("Should have .org extension")
|
||||||
.replace("/", "_");
|
.replace("/", "_");
|
||||||
let test_name = format!("autogen_{}", test_name);
|
|
||||||
|
|
||||||
if let Some(_reason) = is_expect_fail(test_name.as_str()) {
|
|
||||||
write!(test_file, "#[ignore]\n").unwrap();
|
|
||||||
}
|
|
||||||
write!(
|
write!(
|
||||||
test_file,
|
test_file,
|
||||||
include_str!("./tests/test_template"),
|
include_str!("./tests/test_template"),
|
||||||
name = test_name,
|
name = test_name,
|
||||||
path = test.path().display()
|
path = test.path().display(),
|
||||||
)
|
expect_fail = is_expect_fail(test_name.as_str())
|
||||||
.unwrap();
|
.map(|_| "#[ignore]\n")
|
||||||
}
|
.unwrap_or("")
|
||||||
|
|
||||||
#[cfg(feature = "compare")]
|
|
||||||
fn write_header(test_file: &mut File) {
|
|
||||||
write!(
|
|
||||||
test_file,
|
|
||||||
r#"
|
|
||||||
#[feature(exit_status_error)]
|
|
||||||
use organic::compare_document;
|
|
||||||
use organic::parser::document;
|
|
||||||
use organic::emacs_parse_org_document;
|
|
||||||
use organic::parser::sexp::sexp_with_padding;
|
|
||||||
|
|
||||||
"#
|
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
}
|
}
|
||||||
@@ -84,10 +68,8 @@ use organic::parser::sexp::sexp_with_padding;
|
|||||||
#[cfg(feature = "compare")]
|
#[cfg(feature = "compare")]
|
||||||
fn is_expect_fail(name: &str) -> Option<&str> {
|
fn is_expect_fail(name: &str) -> Option<&str> {
|
||||||
match name {
|
match name {
|
||||||
"autogen_greater_element_drawer_drawer_with_headline_inside" => Some("Apparently lines with :end: become their own paragraph. This odd behavior needs to be investigated more."),
|
"greater_element_drawer_drawer_with_headline_inside" => Some("Apparently lines with :end: become their own paragraph. This odd behavior needs to be investigated more."),
|
||||||
"autogen_element_container_priority_footnote_definition_dynamic_block" => Some("Apparently broken begin lines become their own paragraph."),
|
"element_container_priority_footnote_definition_dynamic_block" => Some("Apparently broken begin lines become their own paragraph."),
|
||||||
"autogen_lesser_element_paragraphs_paragraph_with_backslash_line_breaks" => Some("The text we're getting out of the parse tree is already processed to remove line breaks, so our comparison needs to take that into account."),
|
|
||||||
"autogen_unicode_hearts" => Some("Unicode is coming out of emacs strange."),
|
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ all: build push
|
|||||||
|
|
||||||
.PHONY: build
|
.PHONY: build
|
||||||
build:
|
build:
|
||||||
docker build -t $(IMAGE_NAME) -f Dockerfile ../../
|
docker build -t $(IMAGE_NAME) -f Dockerfile .
|
||||||
|
|
||||||
.PHONY: push
|
.PHONY: push
|
||||||
push:
|
push:
|
||||||
@@ -29,8 +29,8 @@ endif
|
|||||||
# NOTE: This target will write to folders underneath the git-root
|
# NOTE: This target will write to folders underneath the git-root
|
||||||
.PHONY: run
|
.PHONY: run
|
||||||
run: build
|
run: build
|
||||||
docker run --rm --init -v "$$(readlink -f ../../):/source" --workdir=/source $(IMAGE_NAME)
|
docker run --rm --init --read-only --mount type=tmpfs,destination=/tmp -v "$$(readlink -f ../../):/source" --workdir=/source $(IMAGE_NAME)
|
||||||
|
|
||||||
.PHONY: shell
|
.PHONY: shell
|
||||||
shell: build
|
shell: build
|
||||||
docker run --rm -i -t --entrypoint /bin/sh -v "$$(readlink -f ../../):/source" --workdir=/source $(IMAGE_NAME)
|
docker run --rm -i -t --entrypoint /bin/sh --mount type=tmpfs,destination=/tmp -v "$$(readlink -f ../../):/source" --workdir=/source $(IMAGE_NAME)
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ all: build push
|
|||||||
|
|
||||||
.PHONY: build
|
.PHONY: build
|
||||||
build:
|
build:
|
||||||
docker build -t $(IMAGE_NAME) -f Dockerfile ../../
|
docker build -t $(IMAGE_NAME) -f Dockerfile .
|
||||||
|
|
||||||
.PHONY: push
|
.PHONY: push
|
||||||
push:
|
push:
|
||||||
@@ -30,8 +30,8 @@ endif
|
|||||||
# NOTE: This target will write to folders underneath the git-root
|
# NOTE: This target will write to folders underneath the git-root
|
||||||
.PHONY: run
|
.PHONY: run
|
||||||
run: build
|
run: build
|
||||||
docker run --rm --init -v "$$(readlink -f ../../):/source" --workdir=/source --mount source=cargo-cache,target=/usr/local/cargo/registry $(IMAGE_NAME)
|
docker run --rm --init --read-only --mount type=tmpfs,destination=/tmp -v "$$(readlink -f ../../):/source" --workdir=/source --mount source=cargo-cache,target=/usr/local/cargo/registry $(IMAGE_NAME)
|
||||||
|
|
||||||
.PHONY: shell
|
.PHONY: shell
|
||||||
shell: build
|
shell: build
|
||||||
docker run --rm -i -t --entrypoint /bin/sh -v "$$(readlink -f ../../):/source" --workdir=/source --mount source=cargo-cache,target=/usr/local/cargo/registry $(IMAGE_NAME)
|
docker run --rm -i -t --entrypoint /bin/sh --mount type=tmpfs,destination=/tmp -v "$$(readlink -f ../../):/source" --workdir=/source --mount source=cargo-cache,target=/usr/local/cargo/registry $(IMAGE_NAME)
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
FROM alpine:3.17 AS build
|
FROM alpine:3.17 AS build
|
||||||
RUN apk add --no-cache build-base musl-dev git autoconf make texinfo gnutls-dev ncurses-dev gawk
|
RUN apk add --no-cache build-base musl-dev git autoconf make texinfo gnutls-dev ncurses-dev gawk libgccjit-dev
|
||||||
|
|
||||||
|
|
||||||
FROM build AS build-emacs
|
FROM build AS build-emacs
|
||||||
@@ -8,13 +8,13 @@ RUN git clone --depth 1 --branch $EMACS_VERSION https://git.savannah.gnu.org/git
|
|||||||
WORKDIR /root/emacs
|
WORKDIR /root/emacs
|
||||||
RUN mkdir /root/dist
|
RUN mkdir /root/dist
|
||||||
RUN ./autogen.sh
|
RUN ./autogen.sh
|
||||||
RUN ./configure --prefix /usr --without-x --without-sound
|
RUN ./configure --prefix /usr --without-x --without-sound --with-native-compilation=aot
|
||||||
RUN make
|
RUN make
|
||||||
RUN make DESTDIR="/root/dist" install
|
RUN make DESTDIR="/root/dist" install
|
||||||
|
|
||||||
|
|
||||||
FROM build AS build-org-mode
|
FROM build AS build-org-mode
|
||||||
ARG ORG_VERSION=7bdec435ff5d86220d13c431e799c5ed44a57da1
|
ARG ORG_VERSION=c703541ffcc14965e3567f928de1683a1c1e33f6
|
||||||
COPY --from=build-emacs /root/dist/ /
|
COPY --from=build-emacs /root/dist/ /
|
||||||
RUN mkdir /root/dist
|
RUN mkdir /root/dist
|
||||||
# Savannah does not allow fetching specific revisions, so we're going to have to put unnecessary load on their server by cloning main and then checking out the revision we want.
|
# Savannah does not allow fetching specific revisions, so we're going to have to put unnecessary load on their server by cloning main and then checking out the revision we want.
|
||||||
@@ -25,10 +25,83 @@ RUN make compile
|
|||||||
RUN make DESTDIR="/root/dist" install
|
RUN make DESTDIR="/root/dist" install
|
||||||
|
|
||||||
|
|
||||||
FROM rustlang/rust:nightly-alpine3.17
|
FROM rustlang/rust:nightly-alpine3.17 AS tester
|
||||||
RUN apk add --no-cache musl-dev ncurses gnutls
|
ENV LANG=en_US.UTF-8
|
||||||
|
RUN apk add --no-cache musl-dev ncurses gnutls libgccjit
|
||||||
RUN cargo install --locked --no-default-features --features ci-autoclean cargo-cache
|
RUN cargo install --locked --no-default-features --features ci-autoclean cargo-cache
|
||||||
COPY --from=build-emacs /root/dist/ /
|
COPY --from=build-emacs /root/dist/ /
|
||||||
COPY --from=build-org-mode /root/dist/ /
|
COPY --from=build-org-mode /root/dist/ /
|
||||||
|
|
||||||
ENTRYPOINT ["cargo", "test"]
|
ENTRYPOINT ["cargo", "test"]
|
||||||
|
|
||||||
|
|
||||||
|
FROM build as foreign-document-gather
|
||||||
|
|
||||||
|
ARG HOWARD_ABRAMS_DOT_FILES_VERSION=1b54fe75d74670dc7bcbb6b01ea560c45528c628
|
||||||
|
ARG HOWARD_ABRAMS_DOT_FILES_PATH=/foreign_documents/howardabrams/dot-files
|
||||||
|
ARG HOWARD_ABRAMS_DOT_FILES_REPO=https://github.com/howardabrams/dot-files.git
|
||||||
|
RUN mkdir /foreign_documents
|
||||||
|
RUN mkdir -p $HOWARD_ABRAMS_DOT_FILES_PATH && git -C $HOWARD_ABRAMS_DOT_FILES_PATH init --initial-branch=main && git -C $HOWARD_ABRAMS_DOT_FILES_PATH remote add origin $HOWARD_ABRAMS_DOT_FILES_REPO && git -C $HOWARD_ABRAMS_DOT_FILES_PATH fetch origin $HOWARD_ABRAMS_DOT_FILES_VERSION && git -C $HOWARD_ABRAMS_DOT_FILES_PATH checkout FETCH_HEAD
|
||||||
|
|
||||||
|
ARG HOWARD_ABRAMS_HAMACS_VERSION=da51188cc195d41882175d412fe40a8bc5730c5c
|
||||||
|
ARG HOWARD_ABRAMS_HAMACS_PATH=/foreign_documents/howardabrams/hamacs
|
||||||
|
ARG HOWARD_ABRAMS_HAMACS_REPO=https://github.com/howardabrams/hamacs.git
|
||||||
|
RUN mkdir -p $HOWARD_ABRAMS_HAMACS_PATH && git -C $HOWARD_ABRAMS_HAMACS_PATH init --initial-branch=main && git -C $HOWARD_ABRAMS_HAMACS_PATH remote add origin $HOWARD_ABRAMS_HAMACS_REPO && git -C $HOWARD_ABRAMS_HAMACS_PATH fetch origin $HOWARD_ABRAMS_HAMACS_VERSION && git -C $HOWARD_ABRAMS_HAMACS_PATH checkout FETCH_HEAD
|
||||||
|
|
||||||
|
ARG HOWARD_ABRAMS_DEMO_IT_VERSION=e399fd7ceb73caeae7cb50b247359bafcaee2a3f
|
||||||
|
ARG HOWARD_ABRAMS_DEMO_IT_PATH=/foreign_documents/howardabrams/demo-it
|
||||||
|
ARG HOWARD_ABRAMS_DEMO_IT_REPO=https://github.com/howardabrams/demo-it.git
|
||||||
|
RUN mkdir -p $HOWARD_ABRAMS_DEMO_IT_PATH && git -C $HOWARD_ABRAMS_DEMO_IT_PATH init --initial-branch=main && git -C $HOWARD_ABRAMS_DEMO_IT_PATH remote add origin $HOWARD_ABRAMS_DEMO_IT_REPO && git -C $HOWARD_ABRAMS_DEMO_IT_PATH fetch origin $HOWARD_ABRAMS_DEMO_IT_VERSION && git -C $HOWARD_ABRAMS_DEMO_IT_PATH checkout FETCH_HEAD
|
||||||
|
|
||||||
|
ARG HOWARD_ABRAMS_MAGIT_DEMO_VERSION=59e82f6bc7c18f550478d86a8f680c3f2da66985
|
||||||
|
ARG HOWARD_ABRAMS_MAGIT_DEMO_PATH=/foreign_documents/howardabrams/magit-demo
|
||||||
|
ARG HOWARD_ABRAMS_MAGIT_DEMO_REPO=https://github.com/howardabrams/magit-demo.git
|
||||||
|
RUN mkdir -p $HOWARD_ABRAMS_MAGIT_DEMO_PATH && git -C $HOWARD_ABRAMS_MAGIT_DEMO_PATH init --initial-branch=main && git -C $HOWARD_ABRAMS_MAGIT_DEMO_PATH remote add origin $HOWARD_ABRAMS_MAGIT_DEMO_REPO && git -C $HOWARD_ABRAMS_MAGIT_DEMO_PATH fetch origin $HOWARD_ABRAMS_MAGIT_DEMO_VERSION && git -C $HOWARD_ABRAMS_MAGIT_DEMO_PATH checkout FETCH_HEAD
|
||||||
|
|
||||||
|
ARG HOWARD_ABRAMS_PDX_EMACS_HACKERS_VERSION=bfb7bd640fdf0ce3def21f9fc591ed35d776b26d
|
||||||
|
ARG HOWARD_ABRAMS_PDX_EMACS_HACKERS_PATH=/foreign_documents/howardabrams/pdx-emacs-hackers
|
||||||
|
ARG HOWARD_ABRAMS_PDX_EMACS_HACKERS_REPO=https://github.com/howardabrams/pdx-emacs-hackers.git
|
||||||
|
RUN mkdir -p $HOWARD_ABRAMS_PDX_EMACS_HACKERS_PATH && git -C $HOWARD_ABRAMS_PDX_EMACS_HACKERS_PATH init --initial-branch=main && git -C $HOWARD_ABRAMS_PDX_EMACS_HACKERS_PATH remote add origin $HOWARD_ABRAMS_PDX_EMACS_HACKERS_REPO && git -C $HOWARD_ABRAMS_PDX_EMACS_HACKERS_PATH fetch origin $HOWARD_ABRAMS_PDX_EMACS_HACKERS_VERSION && git -C $HOWARD_ABRAMS_PDX_EMACS_HACKERS_PATH checkout FETCH_HEAD
|
||||||
|
|
||||||
|
ARG HOWARD_ABRAMS_FLORA_SIMULATOR_VERSION=50de13068722b9e3878f8598b749b7ccd14e7f8e
|
||||||
|
ARG HOWARD_ABRAMS_FLORA_SIMULATOR_PATH=/foreign_documents/howardabrams/flora-simulator
|
||||||
|
ARG HOWARD_ABRAMS_FLORA_SIMULATOR_REPO=https://github.com/howardabrams/flora-simulator.git
|
||||||
|
RUN mkdir -p $HOWARD_ABRAMS_FLORA_SIMULATOR_PATH && git -C $HOWARD_ABRAMS_FLORA_SIMULATOR_PATH init --initial-branch=main && git -C $HOWARD_ABRAMS_FLORA_SIMULATOR_PATH remote add origin $HOWARD_ABRAMS_FLORA_SIMULATOR_REPO && git -C $HOWARD_ABRAMS_FLORA_SIMULATOR_PATH fetch origin $HOWARD_ABRAMS_FLORA_SIMULATOR_VERSION && git -C $HOWARD_ABRAMS_FLORA_SIMULATOR_PATH checkout FETCH_HEAD
|
||||||
|
|
||||||
|
ARG HOWARD_ABRAMS_LITERATE_DEVOPS_DEMO_VERSION=2d7a5e41001a1adf7ec24aeb6acc8525a72d7892
|
||||||
|
ARG HOWARD_ABRAMS_LITERATE_DEVOPS_DEMO_PATH=/foreign_documents/howardabrams/literate-devops-demo
|
||||||
|
ARG HOWARD_ABRAMS_LITERATE_DEVOPS_DEMO_REPO=https://github.com/howardabrams/literate-devops-demo.git
|
||||||
|
RUN mkdir -p $HOWARD_ABRAMS_LITERATE_DEVOPS_DEMO_PATH && git -C $HOWARD_ABRAMS_LITERATE_DEVOPS_DEMO_PATH init --initial-branch=main && git -C $HOWARD_ABRAMS_LITERATE_DEVOPS_DEMO_PATH remote add origin $HOWARD_ABRAMS_LITERATE_DEVOPS_DEMO_REPO && git -C $HOWARD_ABRAMS_LITERATE_DEVOPS_DEMO_PATH fetch origin $HOWARD_ABRAMS_LITERATE_DEVOPS_DEMO_VERSION && git -C $HOWARD_ABRAMS_LITERATE_DEVOPS_DEMO_PATH checkout FETCH_HEAD
|
||||||
|
|
||||||
|
ARG HOWARD_ABRAMS_CLOJURE_YESQL_XP_VERSION=b651c7f8b47b2710e99fce9652980902bbc1c6c9
|
||||||
|
ARG HOWARD_ABRAMS_CLOJURE_YESQL_XP_PATH=/foreign_documents/howardabrams/clojure-yesql-xp
|
||||||
|
ARG HOWARD_ABRAMS_CLOJURE_YESQL_XP_REPO=https://github.com/howardabrams/clojure-yesql-xp.git
|
||||||
|
RUN mkdir -p $HOWARD_ABRAMS_CLOJURE_YESQL_XP_PATH && git -C $HOWARD_ABRAMS_CLOJURE_YESQL_XP_PATH init --initial-branch=main && git -C $HOWARD_ABRAMS_CLOJURE_YESQL_XP_PATH remote add origin $HOWARD_ABRAMS_CLOJURE_YESQL_XP_REPO && git -C $HOWARD_ABRAMS_CLOJURE_YESQL_XP_PATH fetch origin $HOWARD_ABRAMS_CLOJURE_YESQL_XP_VERSION && git -C $HOWARD_ABRAMS_CLOJURE_YESQL_XP_PATH checkout FETCH_HEAD
|
||||||
|
|
||||||
|
ARG HOWARD_ABRAMS_VEEP_VERSION=e37fcf63a5c4a526255735ee34955528b3b280ae
|
||||||
|
ARG HOWARD_ABRAMS_VEEP_PATH=/foreign_documents/howardabrams/veep
|
||||||
|
ARG HOWARD_ABRAMS_VEEP_REPO=https://github.com/howardabrams/veep.git
|
||||||
|
RUN mkdir -p $HOWARD_ABRAMS_VEEP_PATH && git -C $HOWARD_ABRAMS_VEEP_PATH init --initial-branch=main && git -C $HOWARD_ABRAMS_VEEP_PATH remote add origin $HOWARD_ABRAMS_VEEP_REPO && git -C $HOWARD_ABRAMS_VEEP_PATH fetch origin $HOWARD_ABRAMS_VEEP_VERSION && git -C $HOWARD_ABRAMS_VEEP_PATH checkout FETCH_HEAD
|
||||||
|
|
||||||
|
ARG DOOMEMACS_VERSION=42d5fd83504f8aa80f3248036006fbcd49222943
|
||||||
|
ARG DOOMEMACS_PATH=/foreign_documents/doomemacs
|
||||||
|
ARG DOOMEMACS_REPO=https://github.com/doomemacs/doomemacs.git
|
||||||
|
RUN mkdir -p $DOOMEMACS_PATH && git -C $DOOMEMACS_PATH init --initial-branch=main && git -C $DOOMEMACS_PATH remote add origin $DOOMEMACS_REPO && git -C $DOOMEMACS_PATH fetch origin $DOOMEMACS_VERSION && git -C $DOOMEMACS_PATH checkout FETCH_HEAD
|
||||||
|
|
||||||
|
ARG WORG_VERSION=ba6cda890f200d428a5d68e819eef15b5306055f
|
||||||
|
ARG WORG_PATH=/foreign_documents/worg
|
||||||
|
ARG WORG_REPO=https://git.sr.ht/~bzg/worg
|
||||||
|
RUN mkdir -p $WORG_PATH && git -C $WORG_PATH init --initial-branch=main && git -C $WORG_PATH remote add origin $WORG_REPO && git -C $WORG_PATH fetch origin $WORG_VERSION && git -C $WORG_PATH checkout FETCH_HEAD
|
||||||
|
|
||||||
|
|
||||||
|
FROM tester as foreign-document-test
|
||||||
|
RUN apk add --no-cache bash coreutils
|
||||||
|
RUN mkdir /foreign_documents
|
||||||
|
COPY --from=foreign-document-gather /foreign_documents/howardabrams /foreign_documents/howardabrams
|
||||||
|
COPY --from=foreign-document-gather /foreign_documents/doomemacs /foreign_documents/doomemacs
|
||||||
|
COPY --from=foreign-document-gather /foreign_documents/worg /foreign_documents/worg
|
||||||
|
COPY --from=build-org-mode /root/org-mode /foreign_documents/org-mode
|
||||||
|
COPY --from=build-emacs /root/emacs /foreign_documents/emacs
|
||||||
|
COPY foreign_document_test_entrypoint.sh /entrypoint.sh
|
||||||
|
RUN chmod +x /entrypoint.sh
|
||||||
|
ENTRYPOINT ["/entrypoint.sh"]
|
||||||
|
|||||||
@@ -6,7 +6,11 @@ all: build push
|
|||||||
|
|
||||||
.PHONY: build
|
.PHONY: build
|
||||||
build:
|
build:
|
||||||
docker build -t $(IMAGE_NAME) -f Dockerfile ../../
|
docker build -t $(IMAGE_NAME) -f Dockerfile --target tester .
|
||||||
|
|
||||||
|
.PHONY: build_foreign_document_test
|
||||||
|
build_foreign_document_test:
|
||||||
|
docker build -t $(IMAGE_NAME)-foreign-document -f Dockerfile --target foreign-document-test .
|
||||||
|
|
||||||
.PHONY: push
|
.PHONY: push
|
||||||
push:
|
push:
|
||||||
@@ -29,8 +33,12 @@ endif
|
|||||||
|
|
||||||
.PHONY: run
|
.PHONY: run
|
||||||
run: build
|
run: build
|
||||||
docker run --rm --init -v "$$(readlink -f ../../):/source:ro" --workdir=/source --mount source=cargo-cache,target=/usr/local/cargo/registry --mount source=rust-cache,target=/target --env CARGO_TARGET_DIR=/target $(IMAGE_NAME) --no-default-features --features compare --no-fail-fast --lib --test test_loader
|
docker run --rm --init --read-only --mount type=tmpfs,destination=/tmp -v "$$(readlink -f ../../):/source:ro" --workdir=/source --mount source=cargo-cache,target=/usr/local/cargo/registry --mount source=rust-cache,target=/target --env CARGO_TARGET_DIR=/target $(IMAGE_NAME) --no-default-features --features compare --no-fail-fast --lib --test test_loader
|
||||||
|
|
||||||
.PHONY: shell
|
.PHONY: shell
|
||||||
shell: build
|
shell: build
|
||||||
docker run --rm -i -t --entrypoint /bin/sh -v "$$(readlink -f ../../):/source:ro" --workdir=/source --mount source=cargo-cache,target=/usr/local/cargo/registry --mount source=rust-cache,target=/target --env CARGO_TARGET_DIR=/target $(IMAGE_NAME)
|
docker run --rm -i -t --entrypoint /bin/sh --mount type=tmpfs,destination=/tmp -v "$$(readlink -f ../../):/source:ro" --workdir=/source --mount source=cargo-cache,target=/usr/local/cargo/registry --mount source=rust-cache,target=/target --env CARGO_TARGET_DIR=/target $(IMAGE_NAME)
|
||||||
|
|
||||||
|
.PHONY: run_foreign_document_test
|
||||||
|
run_foreign_document_test: build_foreign_document_test
|
||||||
|
docker run --rm --init --read-only --mount type=tmpfs,destination=/tmp -v "$$(readlink -f ../../):/source:ro" --workdir=/source --mount source=cargo-cache,target=/usr/local/cargo/registry --mount source=rust-cache,target=/target --env CARGO_TARGET_DIR=/target $(IMAGE_NAME)-foreign-document
|
||||||
|
|||||||
149
docker/organic_test/foreign_document_test_entrypoint.sh
Normal file
149
docker/organic_test/foreign_document_test_entrypoint.sh
Normal file
@@ -0,0 +1,149 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
#
|
||||||
|
# Run the Organic compare script against a series of documents sourced from exterior places.
|
||||||
|
set -euo pipefail
|
||||||
|
IFS=$'\n\t'
|
||||||
|
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||||
|
|
||||||
|
REALPATH=$(command -v uu-realpath || command -v realpath)
|
||||||
|
|
||||||
|
function log {
|
||||||
|
(>&2 echo "${@}")
|
||||||
|
}
|
||||||
|
|
||||||
|
function die {
|
||||||
|
local status_code="$1"
|
||||||
|
shift
|
||||||
|
(>&2 echo "${@}")
|
||||||
|
exit "$status_code"
|
||||||
|
}
|
||||||
|
|
||||||
|
function main {
|
||||||
|
cargo build --no-default-features --features compare --profile release-lto
|
||||||
|
if [ "${CARGO_TARGET_DIR:-}" = "" ]; then
|
||||||
|
CARGO_TARGET_DIR=$(realpath target/)
|
||||||
|
fi
|
||||||
|
PARSE="${CARGO_TARGET_DIR}/release-lto/compare"
|
||||||
|
|
||||||
|
local all_status=0
|
||||||
|
set +e
|
||||||
|
|
||||||
|
(run_compare_function "org-mode" compare_all_org_document "/foreign_documents/org-mode")
|
||||||
|
if [ "$?" -ne 0 ]; then all_status=1; fi
|
||||||
|
(run_compare_function "emacs" compare_all_org_document "/foreign_documents/emacs")
|
||||||
|
if [ "$?" -ne 0 ]; then all_status=1; fi
|
||||||
|
(run_compare_function "worg" compare_all_org_document "/foreign_documents/worg")
|
||||||
|
if [ "$?" -ne 0 ]; then all_status=1; fi
|
||||||
|
(run_compare_function "howard_abrams" compare_howard_abrams)
|
||||||
|
if [ "$?" -ne 0 ]; then all_status=1; fi
|
||||||
|
(run_compare_function "doomemacs" compare_all_org_document "/foreign_documents/doomemacs")
|
||||||
|
if [ "$?" -ne 0 ]; then all_status=1; fi
|
||||||
|
|
||||||
|
set -e
|
||||||
|
if [ "$all_status" -ne 0 ]; then
|
||||||
|
red_text "Some tests failed."
|
||||||
|
else
|
||||||
|
green_text "All tests passed."
|
||||||
|
fi
|
||||||
|
return "$all_status"
|
||||||
|
}
|
||||||
|
|
||||||
|
function green_text {
|
||||||
|
(IFS=' '; printf '\x1b[38;2;0;255;0m%s\x1b[0m' "${*}")
|
||||||
|
}
|
||||||
|
|
||||||
|
function red_text {
|
||||||
|
(IFS=' '; printf '\x1b[38;2;255;0;0m%s\x1b[0m' "${*}")
|
||||||
|
}
|
||||||
|
|
||||||
|
function yellow_text {
|
||||||
|
(IFS=' '; printf '\x1b[38;2;255;255;0m%s\x1b[0m' "${*}")
|
||||||
|
}
|
||||||
|
|
||||||
|
function indent {
|
||||||
|
local depth="$1"
|
||||||
|
local scaled_depth=$((depth * 2))
|
||||||
|
shift 1
|
||||||
|
local prefix
|
||||||
|
prefix=$(printf -- "%${scaled_depth}s")
|
||||||
|
while read -r l; do
|
||||||
|
(IFS=' '; printf -- '%s%s\n' "$prefix" "$l")
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
function run_compare_function {
|
||||||
|
local name="$1"
|
||||||
|
local stdoutput
|
||||||
|
shift 1
|
||||||
|
set +e
|
||||||
|
stdoutput=$("${@}")
|
||||||
|
local status=$?
|
||||||
|
set -e
|
||||||
|
if [ "$status" -eq 0 ]; then
|
||||||
|
echo "$(green_text "GOOD") $name"
|
||||||
|
indent 1 <<<"$stdoutput"
|
||||||
|
else
|
||||||
|
echo "$(red_text "FAIL") $name"
|
||||||
|
indent 1 <<<"$stdoutput"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
function compare_all_org_document {
|
||||||
|
local root_dir="$1"
|
||||||
|
local target_document
|
||||||
|
local all_status=0
|
||||||
|
while read target_document; do
|
||||||
|
local relative_path
|
||||||
|
relative_path=$($REALPATH --relative-to "$root_dir" "$target_document")
|
||||||
|
set +e
|
||||||
|
(run_compare "$relative_path" "$target_document")
|
||||||
|
if [ "$?" -ne 0 ]; then all_status=1; fi
|
||||||
|
set -e
|
||||||
|
done<<<"$(find "$root_dir" -type f -iname '*.org' | sort)"
|
||||||
|
return "$all_status"
|
||||||
|
}
|
||||||
|
|
||||||
|
function run_compare {
|
||||||
|
local name="$1"
|
||||||
|
local target_document="$2"
|
||||||
|
set +e
|
||||||
|
($PARSE "$target_document" &> /dev/null)
|
||||||
|
local status=$?
|
||||||
|
set -e
|
||||||
|
if [ "$status" -eq 0 ]; then
|
||||||
|
echo "$(green_text "GOOD") $name"
|
||||||
|
else
|
||||||
|
echo "$(red_text "FAIL") $name"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
function compare_howard_abrams {
|
||||||
|
local all_status=0
|
||||||
|
set +e
|
||||||
|
|
||||||
|
(run_compare_function "dot-files" compare_all_org_document "/foreign_documents/howardabrams/dot-files")
|
||||||
|
if [ "$?" -ne 0 ]; then all_status=1; fi
|
||||||
|
(run_compare_function "hamacs" compare_all_org_document "/foreign_documents/howardabrams/hamacs")
|
||||||
|
if [ "$?" -ne 0 ]; then all_status=1; fi
|
||||||
|
(run_compare_function "demo-it" compare_all_org_document "/foreign_documents/howardabrams/demo-it")
|
||||||
|
if [ "$?" -ne 0 ]; then all_status=1; fi
|
||||||
|
(run_compare_function "magit-demo" compare_all_org_document "/foreign_documents/howardabrams/magit-demo")
|
||||||
|
if [ "$?" -ne 0 ]; then all_status=1; fi
|
||||||
|
(run_compare_function "pdx-emacs-hackers" compare_all_org_document "/foreign_documents/howardabrams/pdx-emacs-hackers")
|
||||||
|
if [ "$?" -ne 0 ]; then all_status=1; fi
|
||||||
|
(run_compare_function "flora-simulator" compare_all_org_document "/foreign_documents/howardabrams/flora-simulator")
|
||||||
|
if [ "$?" -ne 0 ]; then all_status=1; fi
|
||||||
|
(run_compare_function "literate-devops-demo" compare_all_org_document "/foreign_documents/howardabrams/literate-devops-demo")
|
||||||
|
if [ "$?" -ne 0 ]; then all_status=1; fi
|
||||||
|
(run_compare_function "clojure-yesql-xp" compare_all_org_document "/foreign_documents/howardabrams/clojure-yesql-xp")
|
||||||
|
if [ "$?" -ne 0 ]; then all_status=1; fi
|
||||||
|
(run_compare_function "veep" compare_all_org_document "/foreign_documents/howardabrams/veep")
|
||||||
|
if [ "$?" -ne 0 ]; then all_status=1; fi
|
||||||
|
|
||||||
|
set -e
|
||||||
|
return "$all_status"
|
||||||
|
}
|
||||||
|
|
||||||
|
main "${@}"
|
||||||
@@ -25,3 +25,4 @@ This could significantly reduce our calls to exit matchers.
|
|||||||
I think targets would break this.
|
I think targets would break this.
|
||||||
|
|
||||||
The exit matchers are already implicitly building this behavior since they should all exit very early when the starting character is wrong. Putting this logic in a centralized place, far away from where those characters are actually going to be used, is unfortunate for readability.
|
The exit matchers are already implicitly building this behavior since they should all exit very early when the starting character is wrong. Putting this logic in a centralized place, far away from where those characters are actually going to be used, is unfortunate for readability.
|
||||||
|
** Use exit matcher to cut off trailing whitespace instead of re-matching in plain lists.
|
||||||
|
|||||||
7
notes/test_names.org
Normal file
7
notes/test_names.org
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
* Autogen tests
|
||||||
|
The autogen tests are the tests automatically generated to compare the output of Organic vs the upstream Emacs Org-mode parser using the sample documents in the =org_mode_samples= folder. They will have a prefix based on the settings for each test.
|
||||||
|
|
||||||
|
- default :: The test is run with the default settings (The upstream Emacs Org-mode determines the default settings)
|
||||||
|
- la :: Short for "list alphabetic". Enables alphabetic plain lists.
|
||||||
|
- t# :: Sets the tab-width to # (as in t4 sets the tab-width to 4).
|
||||||
|
- odd :: Sets the org-odd-levels-only setting to true (meaning "odd" as opposed to "oddeven").
|
||||||
1
org_mode_samples/README.org
Normal file
1
org_mode_samples/README.org
Normal file
@@ -0,0 +1 @@
|
|||||||
|
This folder contains org-mode documents that get automatically included as tests using build.rs.
|
||||||
1
org_mode_samples/document/category.org
Normal file
1
org_mode_samples/document/category.org
Normal file
@@ -0,0 +1 @@
|
|||||||
|
#+CATEGORY: theory
|
||||||
5
org_mode_samples/document/category_multiple.org
Normal file
5
org_mode_samples/document/category_multiple.org
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
#+CATEGORY: foo
|
||||||
|
#+CATEGORY: bar
|
||||||
|
#+begin_src text
|
||||||
|
#+CATEGORY: baz
|
||||||
|
#+end_src
|
||||||
@@ -0,0 +1,3 @@
|
|||||||
|
#+BEGIN: timestamp :format "%Y-%m-%d %H:%M"
|
||||||
|
|
||||||
|
#+END
|
||||||
@@ -0,0 +1,8 @@
|
|||||||
|
* Footnotes
|
||||||
|
|
||||||
|
[fn:1]
|
||||||
|
|
||||||
|
#+BEGIN_EXAMPLE
|
||||||
|
baz
|
||||||
|
#+END_EXAMPLE
|
||||||
|
|
||||||
@@ -0,0 +1,18 @@
|
|||||||
|
#+begin_defun
|
||||||
|
foo
|
||||||
|
#+begin_lorem
|
||||||
|
,#+begin_center
|
||||||
|
bar
|
||||||
|
,#+end_center
|
||||||
|
ipsum
|
||||||
|
#+end_lorem
|
||||||
|
baz
|
||||||
|
#+end_defun
|
||||||
|
|
||||||
|
#+begin_center
|
||||||
|
#+begin_quote
|
||||||
|
#+begin_center
|
||||||
|
lorem
|
||||||
|
#+end_center
|
||||||
|
#+end_quote
|
||||||
|
#+end_center
|
||||||
@@ -0,0 +1,12 @@
|
|||||||
|
#+begin_defun
|
||||||
|
foo
|
||||||
|
#+begin_lorem
|
||||||
|
ipsum
|
||||||
|
#+end_lorem
|
||||||
|
bar
|
||||||
|
#+begin_center
|
||||||
|
#+begin_quote
|
||||||
|
baz
|
||||||
|
#+end_quote
|
||||||
|
#+end_center
|
||||||
|
#+end_defun
|
||||||
@@ -0,0 +1,5 @@
|
|||||||
|
#+begin_quote
|
||||||
|
|
||||||
|
foo
|
||||||
|
|
||||||
|
#+end_quote
|
||||||
@@ -0,0 +1,5 @@
|
|||||||
|
#+begin_defun
|
||||||
|
foo
|
||||||
|
|
||||||
|
{{{bar(baz)}}}
|
||||||
|
#+end_defun
|
||||||
@@ -0,0 +1,3 @@
|
|||||||
|
#+begin_defun foo bar baz
|
||||||
|
lorem
|
||||||
|
#+end_defun
|
||||||
@@ -0,0 +1,22 @@
|
|||||||
|
# An ordered list starting at 3
|
||||||
|
1. [@3] foo
|
||||||
|
|
||||||
|
|
||||||
|
# An ordered list starting at 11
|
||||||
|
1. [@D] bar
|
||||||
|
|
||||||
|
|
||||||
|
# An ordered list starting at 1 with the contents of "[@kk] baz"
|
||||||
|
1. [@kk] baz
|
||||||
|
|
||||||
|
|
||||||
|
# A paragraph when org-list-allow-alphabetical is nil
|
||||||
|
m. lorem
|
||||||
|
|
||||||
|
|
||||||
|
# A paragraph when org-list-allow-alphabetical is nil
|
||||||
|
m. [@k] ipsum
|
||||||
|
|
||||||
|
|
||||||
|
# An unordered list with :counter set to 3
|
||||||
|
- [@3] dolar
|
||||||
@@ -0,0 +1,30 @@
|
|||||||
|
# Alphabetic lists larger than 26 elements should become numbered. From M-x describe-variable org-list-allow-alphabetical:
|
||||||
|
#
|
||||||
|
# > Lists with more than 26 items will fallback to standard numbering.
|
||||||
|
a. 1
|
||||||
|
a. 2
|
||||||
|
a. 3
|
||||||
|
a. 4
|
||||||
|
a. 5
|
||||||
|
a. 6
|
||||||
|
a. 7
|
||||||
|
a. 8
|
||||||
|
a. 9
|
||||||
|
a. 10
|
||||||
|
a. 11
|
||||||
|
a. 12
|
||||||
|
a. 13
|
||||||
|
a. 14
|
||||||
|
a. 15
|
||||||
|
a. 16
|
||||||
|
a. 17
|
||||||
|
a. 18
|
||||||
|
a. 19
|
||||||
|
a. 20
|
||||||
|
a. 21
|
||||||
|
a. 22
|
||||||
|
a. 23
|
||||||
|
a. 24
|
||||||
|
a. 25
|
||||||
|
a. 26
|
||||||
|
a. 27
|
||||||
@@ -0,0 +1,3 @@
|
|||||||
|
# These are only allowed by configuring org-list-allow-alphabetical which the automated tests are not currently set up to do, so this will parse as a paragraph:
|
||||||
|
a. foo
|
||||||
|
b. bar
|
||||||
@@ -0,0 +1,2 @@
|
|||||||
|
3. [@3] foo
|
||||||
|
4. bar
|
||||||
@@ -0,0 +1,6 @@
|
|||||||
|
- foo ::
|
||||||
|
|
||||||
|
- bar ::
|
||||||
|
|
||||||
|
|
||||||
|
baz
|
||||||
@@ -0,0 +1,2 @@
|
|||||||
|
- {{{foo(bar)}}} :: baz
|
||||||
|
- =foo= :: bar
|
||||||
@@ -0,0 +1,3 @@
|
|||||||
|
- foo :: bar
|
||||||
|
- foo :: bar
|
||||||
|
- foo :: bar
|
||||||
@@ -0,0 +1,2 @@
|
|||||||
|
- =foo :: bar= :: baz
|
||||||
|
- lorem :: ipsum :: dolar
|
||||||
@@ -1,3 +1,5 @@
|
|||||||
1.
|
1.
|
||||||
2.
|
2.
|
||||||
3.
|
3.
|
||||||
|
|
||||||
|
* headline
|
||||||
|
|||||||
@@ -0,0 +1,3 @@
|
|||||||
|
1. foo
|
||||||
|
- bar
|
||||||
|
- lorem :: ipsum
|
||||||
@@ -0,0 +1,2 @@
|
|||||||
|
# Since this is an ordered list, the text before the " :: " is NOT parsed as a tag.
|
||||||
|
1. foo :: bar
|
||||||
@@ -0,0 +1,5 @@
|
|||||||
|
# "lorem" is prefixed by a tab instead of spaces, so the editor's tab-width value determines whether lorem is a sibling of baz (tab-width 8), a sibling of bar (tab-width < 8), or a child of baz (tab-width > 8).
|
||||||
|
1. foo
|
||||||
|
1. bar
|
||||||
|
1. baz
|
||||||
|
1. lorem
|
||||||
@@ -0,0 +1,2 @@
|
|||||||
|
- foo
|
||||||
|
|
||||||
@@ -0,0 +1,6 @@
|
|||||||
|
* Overwrite
|
||||||
|
:PROPERTIES:
|
||||||
|
:header-args: :var foo="lorem"
|
||||||
|
:header-args:emacs-lisp: :var bar="ipsum"
|
||||||
|
:header-args:emacs-lisp+: :results silent :var baz=7
|
||||||
|
:END:
|
||||||
@@ -0,0 +1,6 @@
|
|||||||
|
# The STARTUP directive here instructs org-mode to align tables which emacs normally does when opening the file. Since Organic is solely a parser, we have no business editing the org-mode document so Organic does not handle aligning tables, so in order for this test to pass, we have to avoid that behavior in Emacs.
|
||||||
|
#+STARTUP: align
|
||||||
|
|
||||||
|
|foo|bar|
|
||||||
|
|-
|
||||||
|
|lorem|ipsum|
|
||||||
@@ -0,0 +1,6 @@
|
|||||||
|
src_elisp{(bar)}
|
||||||
|
*src_elisp{(bar)}*
|
||||||
|
|
||||||
|
| foo *bar* |
|
||||||
|
| foo src_elisp{(bar)} |
|
||||||
|
| foo *src_elisp{(bar)}* |
|
||||||
4
org_mode_samples/greater_element/table/empty_formula.org
Normal file
4
org_mode_samples/greater_element/table/empty_formula.org
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
| Name | Value |
|
||||||
|
|------+-------|
|
||||||
|
| foo | bar |
|
||||||
|
#+tblfm:
|
||||||
8
org_mode_samples/greater_element/table/with_formulas.org
Normal file
8
org_mode_samples/greater_element/table/with_formulas.org
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
| Name | Price | Quantity | Total |
|
||||||
|
|------+-------+----------+-------|
|
||||||
|
| foo | 7 | 4 | 28 |
|
||||||
|
| bar | 3.5 | 3 | 10.5 |
|
||||||
|
|------+-------+----------+-------|
|
||||||
|
| | | 7 | 38.5 |
|
||||||
|
#+tblfm: $4=$2*$3::@>$4=vsum(@2..@-1)
|
||||||
|
#+tblfm: @>$3=vsum(@2..@-1)
|
||||||
@@ -1,4 +1,5 @@
|
|||||||
# Comment
|
# Comment
|
||||||
|
#
|
||||||
# indented line
|
# indented line
|
||||||
# At the top of the file
|
# At the top of the file
|
||||||
|
|
||||||
|
|||||||
@@ -0,0 +1,6 @@
|
|||||||
|
%%(foo
|
||||||
|
)
|
||||||
|
|
||||||
|
%%(bar ; baz
|
||||||
|
|
||||||
|
lorem
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
%%(foo bar) ; baz
|
||||||
@@ -0,0 +1,2 @@
|
|||||||
|
# Fixed width areas must begin with colon followed by a space, not a tab, so this is not a fixed width area.
|
||||||
|
: foo
|
||||||
@@ -0,0 +1,7 @@
|
|||||||
|
# This test is to prove that the parser works with affiliated keywords that have both a shorter and longer version.
|
||||||
|
|
||||||
|
#+results:
|
||||||
|
#+result:
|
||||||
|
#+begin_latex
|
||||||
|
\foo
|
||||||
|
#+end_latex
|
||||||
1
org_mode_samples/lesser_element/keyword/babel_call.org
Normal file
1
org_mode_samples/lesser_element/keyword/babel_call.org
Normal file
@@ -0,0 +1 @@
|
|||||||
|
#+call: foo(bar="baz")
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
#+title:foo:bar: baz: lorem: ipsum
|
||||||
@@ -0,0 +1,2 @@
|
|||||||
|
#+begin_src
|
||||||
|
#+end_src
|
||||||
@@ -0,0 +1,4 @@
|
|||||||
|
# There are trailing spaces after the begin and end src lines
|
||||||
|
#+begin_src
|
||||||
|
echo "this is a source block."
|
||||||
|
#+end_src
|
||||||
@@ -0,0 +1,3 @@
|
|||||||
|
*[fn:: /abcdef[fn::ghijklmnopqrstuvw]xyz/ r]*
|
||||||
|
|
||||||
|
*[fn:: /abcdef[fn::ghijk *lmnopq* rstuvw]xyz/ r]*
|
||||||
11
org_mode_samples/object/plain_link/with_parenthesis.org
Normal file
11
org_mode_samples/object/plain_link/with_parenthesis.org
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
# Should be a link:
|
||||||
|
https://en.wikipedia.org/wiki/Shebang_(Unix)
|
||||||
|
|
||||||
|
# No closing parenthesis, so link ends at underscore.
|
||||||
|
https://en.wikipedia.org/wiki/Shebang_(Unix
|
||||||
|
|
||||||
|
# Parenthesis only allowed to depth of 2 so link ends at underscore.
|
||||||
|
https://en.wikipedia.org/wiki/Shebang_(((Unix)))
|
||||||
|
|
||||||
|
# Even though they eventually become balanced, we hit negative parenthesis depth so link ends at )
|
||||||
|
https://en.wikipedia.org/wiki/Shebang)Unix(
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
mailto:foo@bar.baz .
|
||||||
3
org_mode_samples/object/radio_link/different_case.org
Normal file
3
org_mode_samples/object/radio_link/different_case.org
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
<<<Foo Bar Baz>>>
|
||||||
|
|
||||||
|
foo bar baz
|
||||||
@@ -0,0 +1,6 @@
|
|||||||
|
<<<foo bar baz>>>
|
||||||
|
|
||||||
|
|
||||||
|
foo
|
||||||
|
bar
|
||||||
|
baz
|
||||||
1
org_mode_samples/object/regular_link/elisp.org
Normal file
1
org_mode_samples/object/regular_link/elisp.org
Normal file
@@ -0,0 +1 @@
|
|||||||
|
[[elisp:(local-set-key "\M-\x" 'foo-bar-baz)]]
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
[[https://en.wikipedia.org/wiki/Shebang_(Unix)]]
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
[[[http://foo.bar/baz][lorem]]]
|
||||||
4
org_mode_samples/object/statistics_cookie/empty.org
Normal file
4
org_mode_samples/object/statistics_cookie/empty.org
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
[/]
|
||||||
|
[/2]
|
||||||
|
[3/]
|
||||||
|
[%]
|
||||||
@@ -0,0 +1,7 @@
|
|||||||
|
# Even though *exporting* honors the setting to require braces for subscript/superscript, the official org-mode parser still parses subscripts and superscripts.
|
||||||
|
|
||||||
|
#+OPTIONS: ^:{}
|
||||||
|
foo_this isn't a subscript when exported due to lack of braces (but its still a subscript during parsing)
|
||||||
|
|
||||||
|
|
||||||
|
bar_{this is a subscript}
|
||||||
@@ -0,0 +1,13 @@
|
|||||||
|
foo_(bar)
|
||||||
|
|
||||||
|
foo_(b(ar)
|
||||||
|
|
||||||
|
foo_(b{ar)
|
||||||
|
|
||||||
|
foo_{b(ar}
|
||||||
|
|
||||||
|
foo_(b(a)r)
|
||||||
|
|
||||||
|
foo_b(a)r
|
||||||
|
|
||||||
|
foo_(b+ar)
|
||||||
1
org_mode_samples/object/text_markup/double_star.org
Normal file
1
org_mode_samples/object/text_markup/double_star.org
Normal file
@@ -0,0 +1 @@
|
|||||||
|
foo ** bar ** baz
|
||||||
1
org_mode_samples/object/text_markup/double_tilde.org
Normal file
1
org_mode_samples/object/text_markup/double_tilde.org
Normal file
@@ -0,0 +1 @@
|
|||||||
|
foo ~~ bar ~~ baz
|
||||||
4
org_mode_samples/object/text_markup/target_substring.org
Normal file
4
org_mode_samples/object/text_markup/target_substring.org
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
# Since "foos" has an extra "s", this does not match the target.
|
||||||
|
the foos bar
|
||||||
|
|
||||||
|
The <<<foo>>> and stuff.
|
||||||
@@ -0,0 +1,4 @@
|
|||||||
|
foo ==>bar=.
|
||||||
|
|
||||||
|
# This uses a zero-width space to escape the equals signs to make the verbatim not end.
|
||||||
|
=lorem == ipsum=
|
||||||
@@ -0,0 +1,7 @@
|
|||||||
|
# All the marks for repeater and warning delay
|
||||||
|
[1970-01-01 Thu 8:15-13:15foo +1h -2h]
|
||||||
|
[1970-01-01 Thu 8:15-13:15foo ++1d -2d]
|
||||||
|
[1970-01-01 Thu 8:15-13:15foo .+1w -2w]
|
||||||
|
[1970-01-01 Thu 8:15-13:15foo +1m --2m]
|
||||||
|
[1970-01-01 Thu 8:15-13:15foo ++1y --2y]
|
||||||
|
[1970-01-01 Thu 8:15-13:15foo .+1d --2h]
|
||||||
@@ -2,13 +2,17 @@
|
|||||||
<%%(foo bar baz)>
|
<%%(foo bar baz)>
|
||||||
# active
|
# active
|
||||||
<1970-01-01 Thu 8:15rest +1w -1d>
|
<1970-01-01 Thu 8:15rest +1w -1d>
|
||||||
|
# Any value for "REST" in the first timestamp makes this a regular timestamp rather than a time range.
|
||||||
|
<1970-01-01 Thu 8:15rest-13:15otherrest +1w -1d>
|
||||||
# inactive
|
# inactive
|
||||||
[1970-01-01 Thu 8:15rest +1w -1d]
|
[1970-01-01 Thu 8:15rest +1w -1d]
|
||||||
|
# Any value for "REST" in the first timestamp makes this a regular timestamp rather than a time range.
|
||||||
|
[1970-01-01 Thu 8:15rest-13:15otherrest +1w -1d]
|
||||||
# active date range
|
# active date range
|
||||||
<1970-01-01 Thu 8:15rest +1w -1d>--<1970-01-01 Thu 8:15rest +1w -1d>
|
<1970-01-01 Thu 8:15rest +1w -1d>--<1970-01-01 Thu 8:15rest +1w -1d>
|
||||||
# active time range
|
# active time range
|
||||||
<1970-01-01 Thu 8:15rest-13:15otherrest +1w -1d>
|
<1970-01-01 Thu 8:15-13:15otherrest +1w -1d>
|
||||||
# inactive date range
|
# inactive date range
|
||||||
[1970-01-01 Thu 8:15rest +1w -1d]--[1970-01-01 Thu 8:15rest +1w -1d]
|
[1970-01-01 Thu 8:15rest +1w -1d]--[1970-01-01 Thu 8:15rest +1w -1d]
|
||||||
# inactive time range
|
# inactive time range
|
||||||
[1970-01-01 Thu 8:15rest-13:15otherrest +1w -1d]
|
[1970-01-01 Thu 8:15-13:15otherrest +1w -1d]
|
||||||
|
|||||||
2
org_mode_samples/object/timestamp/timeless_rest.org
Normal file
2
org_mode_samples/object/timestamp/timeless_rest.org
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
# This should be a malformed timestamp according to the current org-mode documentation but it is accepted anyway (with no repeater).
|
||||||
|
<1970-01-01 Thu ++y>
|
||||||
@@ -0,0 +1,2 @@
|
|||||||
|
* TODO [#A] COMMENT foo bar
|
||||||
|
baz
|
||||||
2
org_mode_samples/sections_and_headings/empty_heading.org
Normal file
2
org_mode_samples/sections_and_headings/empty_heading.org
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
* DONE
|
||||||
|
*
|
||||||
@@ -0,0 +1,6 @@
|
|||||||
|
#+TODO: TODO(t) INPROGRESS(i/!) | DONE(d!) CANCELED(c@/!)
|
||||||
|
# ! : Log changes leading to this state.
|
||||||
|
# @ : Log changes leading to this state and prompt for a comment to include.
|
||||||
|
# /! : Log changes leaving this state if and only if to a state that does not log. This can be combined with the above like WAIT(w!/!) or DELAYED(d@/!)
|
||||||
|
* INPROGRESS
|
||||||
|
- State "TODO" from "INPROGRESS" [2023-09-14 Thu 02:13]
|
||||||
@@ -0,0 +1,2 @@
|
|||||||
|
<<<Footnotes>>> and stuff
|
||||||
|
* Footnotes
|
||||||
@@ -0,0 +1,3 @@
|
|||||||
|
* FOOTNOTES
|
||||||
|
* Footnotes
|
||||||
|
* footnotes
|
||||||
@@ -0,0 +1,2 @@
|
|||||||
|
* Footnotes
|
||||||
|
* Footnotes
|
||||||
@@ -0,0 +1,3 @@
|
|||||||
|
* Foo
|
||||||
|
* Footnotes :foo:bar:
|
||||||
|
* Footnotes and stuff
|
||||||
@@ -0,0 +1,8 @@
|
|||||||
|
#+STARTUP: odd
|
||||||
|
* Foo
|
||||||
|
***** Bar
|
||||||
|
* Baz
|
||||||
|
*** Lorem
|
||||||
|
* Ipsum
|
||||||
|
**** Dolar
|
||||||
|
***** Cat
|
||||||
@@ -0,0 +1,4 @@
|
|||||||
|
* DONE foo
|
||||||
|
DEADLINE: <2023-09-08 Fri>
|
||||||
|
|
||||||
|
* DONE bar
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
* [0/4] foo
|
||||||
@@ -4,10 +4,23 @@ set -euo pipefail
|
|||||||
IFS=$'\n\t'
|
IFS=$'\n\t'
|
||||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||||
|
|
||||||
cd "$DIR/../"
|
: ${PROFILE:="perf"}
|
||||||
|
|
||||||
RUSTFLAGS="-C opt-level=0" cargo build --no-default-features
|
function main {
|
||||||
valgrind --tool=callgrind --callgrind-out-file=callgrind.out target/debug/compare
|
local additional_flags=()
|
||||||
|
if [ "$PROFILE" = "dev" ] || [ "$PROFILE" = "debug" ]; then
|
||||||
|
PROFILE="debug"
|
||||||
|
else
|
||||||
|
additional_flags+=(--profile "$PROFILE")
|
||||||
|
# We have to disable avx512 because valgrind does not yet support it.
|
||||||
|
export RUSTFLAGS="-C target-feature=-avx512"
|
||||||
|
fi
|
||||||
|
|
||||||
echo "You probably want to run:"
|
(cd "$DIR/../" && RUSTFLAGS="-C target-cpu=x86-64-v3" cargo build --no-default-features "${additional_flags[@]}")
|
||||||
echo "callgrind_annotate --auto=yes callgrind.out"
|
valgrind --tool=callgrind --callgrind-out-file="$DIR/../callgrind.out" "$DIR/../target/${PROFILE}/parse" "${@}"
|
||||||
|
|
||||||
|
echo "You probably want to run:"
|
||||||
|
echo "callgrind_annotate --auto=yes '$DIR/../callgrind.out'"
|
||||||
|
}
|
||||||
|
|
||||||
|
main "${@}"
|
||||||
|
|||||||
@@ -6,8 +6,6 @@ DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
|||||||
|
|
||||||
: ${PROFILE:="perf"}
|
: ${PROFILE:="perf"}
|
||||||
|
|
||||||
cd "$DIR/../"
|
|
||||||
|
|
||||||
function main {
|
function main {
|
||||||
local additional_flags=()
|
local additional_flags=()
|
||||||
if [ "$PROFILE" = "dev" ] || [ "$PROFILE" = "debug" ]; then
|
if [ "$PROFILE" = "dev" ] || [ "$PROFILE" = "debug" ]; then
|
||||||
@@ -15,12 +13,12 @@ function main {
|
|||||||
else
|
else
|
||||||
additional_flags+=(--profile "$PROFILE")
|
additional_flags+=(--profile "$PROFILE")
|
||||||
fi
|
fi
|
||||||
cargo build --no-default-features "${additional_flags[@]}"
|
(cd "$DIR/../" && cargo build --no-default-features "${additional_flags[@]}")
|
||||||
perf record --freq=2000 --call-graph dwarf --output=perf.data target/${PROFILE}/compare
|
perf record --freq=2000 --call-graph dwarf --output="$DIR/../perf.data" "$DIR/../target/${PROFILE}/parse" "${@}"
|
||||||
|
|
||||||
# Convert to a format firefox will read
|
# Convert to a format firefox will read
|
||||||
# flags to consider --show-info
|
# flags to consider --show-info
|
||||||
perf script -F +pid --input perf.data > perf.firefox
|
perf script -F +pid --input "$DIR/../perf.data" > "$DIR/../perf.firefox"
|
||||||
|
|
||||||
echo "You probably want to go to https://profiler.firefox.com/"
|
echo "You probably want to go to https://profiler.firefox.com/"
|
||||||
echo "Either that or run hotspot"
|
echo "Either that or run hotspot"
|
||||||
|
|||||||
@@ -8,15 +8,29 @@ DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
|||||||
: ${TRACE:="NO"} # or YES to send traces to jaeger
|
: ${TRACE:="NO"} # or YES to send traces to jaeger
|
||||||
: ${BACKTRACE:="NO"} # or YES to print a rust backtrace when panicking
|
: ${BACKTRACE:="NO"} # or YES to print a rust backtrace when panicking
|
||||||
: ${NO_COLOR:=""} # Set to anything to disable color output
|
: ${NO_COLOR:=""} # Set to anything to disable color output
|
||||||
|
: ${PROFILE:="debug"}
|
||||||
|
|
||||||
|
|
||||||
cd "$DIR/../"
|
|
||||||
REALPATH=$(command -v uu-realpath || command -v realpath)
|
REALPATH=$(command -v uu-realpath || command -v realpath)
|
||||||
MAKE=$(command -v gmake || command -v make)
|
MAKE=$(command -v gmake || command -v make)
|
||||||
|
|
||||||
|
############## Setup #########################
|
||||||
|
|
||||||
|
function die {
|
||||||
|
local status_code="$1"
|
||||||
|
shift
|
||||||
|
(>&2 echo "${@}")
|
||||||
|
exit "$status_code"
|
||||||
|
}
|
||||||
|
|
||||||
|
function log {
|
||||||
|
(>&2 echo "${@}")
|
||||||
|
}
|
||||||
|
|
||||||
|
############## Program #########################
|
||||||
|
|
||||||
function main {
|
function main {
|
||||||
build_container
|
build_container
|
||||||
launch_container
|
launch_container "${@}"
|
||||||
}
|
}
|
||||||
|
|
||||||
function build_container {
|
function build_container {
|
||||||
@@ -25,7 +39,6 @@ function build_container {
|
|||||||
|
|
||||||
function launch_container {
|
function launch_container {
|
||||||
local additional_flags=()
|
local additional_flags=()
|
||||||
local additional_args=()
|
|
||||||
local features=(compare)
|
local features=(compare)
|
||||||
|
|
||||||
if [ "$NO_COLOR" != "" ]; then
|
if [ "$NO_COLOR" != "" ]; then
|
||||||
@@ -39,10 +52,8 @@ function launch_container {
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
if [ "$SHELL" != "YES" ]; then
|
if [ "$SHELL" != "YES" ]; then
|
||||||
local features_joined=$(IFS=","; echo "${features[*]}")
|
additional_flags+=(--read-only)
|
||||||
additional_args+=(cargo run --no-default-features --features "$features_joined")
|
|
||||||
else
|
else
|
||||||
additional_args+=(/bin/sh)
|
|
||||||
additional_flags+=(-t)
|
additional_flags+=(-t)
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@@ -50,7 +61,51 @@ function launch_container {
|
|||||||
additional_flags+=(--env RUST_BACKTRACE=full)
|
additional_flags+=(--env RUST_BACKTRACE=full)
|
||||||
fi
|
fi
|
||||||
|
|
||||||
docker run "${additional_flags[@]}" --init --rm -i -v "$($REALPATH ./):/source:ro" --mount source=cargo-cache,target=/usr/local/cargo/registry --mount source=rust-cache,target=/target --env CARGO_TARGET_DIR=/target -w /source --entrypoint "" organic-test "${additional_args[@]}"
|
if [ "$SHELL" = "YES" ]; then
|
||||||
|
exec docker run "${additional_flags[@]}" --init --rm -i --mount type=tmpfs,destination=/tmp -v "/:/input:ro" -v "$($REALPATH "$DIR/../"):/source:ro" --mount source=cargo-cache,target=/usr/local/cargo/registry --mount source=rust-cache,target=/target --env CARGO_TARGET_DIR=/target -w /source --entrypoint "" organic-test /bin/sh
|
||||||
|
fi
|
||||||
|
|
||||||
|
local features_joined
|
||||||
|
features_joined=$(IFS=","; echo "${features[*]}")
|
||||||
|
|
||||||
|
local build_flags=()
|
||||||
|
if [ "$PROFILE" = "dev" ] || [ "$PROFILE" = "debug" ]; then
|
||||||
|
PROFILE="debug"
|
||||||
|
else
|
||||||
|
build_flags+=(--profile "$PROFILE")
|
||||||
|
fi
|
||||||
|
|
||||||
|
|
||||||
|
if [ $# -gt 0 ]; then
|
||||||
|
# If we passed in args, we need to forward them along
|
||||||
|
for path in "${@}"; do
|
||||||
|
local full_path
|
||||||
|
full_path=$($REALPATH "$path")
|
||||||
|
init_script=$(cat <<EOF
|
||||||
|
set -euo pipefail
|
||||||
|
IFS=\$'\n\t'
|
||||||
|
|
||||||
|
cargo build --bin compare --no-default-features --features "$features_joined" ${build_flags[@]}
|
||||||
|
exec /target/${PROFILE}/compare "/input${full_path}"
|
||||||
|
EOF
|
||||||
|
)
|
||||||
|
docker run "${additional_flags[@]}" --init --rm -i --mount type=tmpfs,destination=/tmp -v "/:/input:ro" -v "$($REALPATH "$DIR/../"):/source:ro" --mount source=cargo-cache,target=/usr/local/cargo/registry --mount source=rust-cache,target=/target --env CARGO_TARGET_DIR=/target -w /source --entrypoint "" organic-test sh -c "$init_script"
|
||||||
|
done
|
||||||
|
else
|
||||||
|
local current_directory init_script
|
||||||
|
current_directory=$(pwd)
|
||||||
|
init_script=$(cat <<EOF
|
||||||
|
set -euo pipefail
|
||||||
|
IFS=\$'\n\t'
|
||||||
|
|
||||||
|
cargo build --bin compare --no-default-features --features "$features_joined" ${build_flags[@]}
|
||||||
|
cd /input${current_directory}
|
||||||
|
exec /target/${PROFILE}/compare
|
||||||
|
EOF
|
||||||
|
)
|
||||||
|
|
||||||
|
docker run "${additional_flags[@]}" --init --rm -i --mount type=tmpfs,destination=/tmp -v "/:/input:ro" -v "$($REALPATH "$DIR/../"):/source:ro" --mount source=cargo-cache,target=/usr/local/cargo/registry --mount source=rust-cache,target=/target --env CARGO_TARGET_DIR=/target -w /source --entrypoint "" organic-test sh -c "$init_script"
|
||||||
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
main "${@}"
|
main "${@}"
|
||||||
|
|||||||
72
scripts/run_docker_compare_bisect.bash
Executable file
72
scripts/run_docker_compare_bisect.bash
Executable file
@@ -0,0 +1,72 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
#
|
||||||
|
# Bisect parsing a file at various line cut-off points to see which line causes the parse to differ from emacs.
|
||||||
|
set -euo pipefail
|
||||||
|
IFS=$'\n\t'
|
||||||
|
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||||
|
|
||||||
|
REALPATH=$(command -v uu-realpath || command -v realpath)
|
||||||
|
|
||||||
|
############## Setup #########################
|
||||||
|
|
||||||
|
function die {
|
||||||
|
local status_code="$1"
|
||||||
|
shift
|
||||||
|
(>&2 echo "${@}")
|
||||||
|
exit "$status_code"
|
||||||
|
}
|
||||||
|
|
||||||
|
function log {
|
||||||
|
(>&2 echo "${@}")
|
||||||
|
}
|
||||||
|
|
||||||
|
############## Program #########################
|
||||||
|
|
||||||
|
function main {
|
||||||
|
local target_full_path
|
||||||
|
target_full_path=$($REALPATH "$1")
|
||||||
|
SOURCE_FOLDER=$(dirname "$target_full_path")
|
||||||
|
TARGET_DOCUMENT=$(basename "$target_full_path")
|
||||||
|
|
||||||
|
|
||||||
|
local good=0
|
||||||
|
local bad
|
||||||
|
bad=$(wc -l "$SOURCE_FOLDER/$TARGET_DOCUMENT" | awk '{print $1}')
|
||||||
|
|
||||||
|
set +e
|
||||||
|
(run_parse "$bad")
|
||||||
|
local status=$?
|
||||||
|
set -e
|
||||||
|
if [ $status -eq 0 ]; then
|
||||||
|
log "Entire file passes."
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
while [[ "$((bad - good))" -gt 1 ]]; do
|
||||||
|
local next_line=$((((bad - good) / 2) + good))
|
||||||
|
log "Testing line $next_line"
|
||||||
|
set +e
|
||||||
|
run_parse "$next_line" &> /dev/null
|
||||||
|
local status=$?
|
||||||
|
set -e
|
||||||
|
if [ $status -eq 0 ]; then
|
||||||
|
good="$next_line"
|
||||||
|
log "Line $next_line good"
|
||||||
|
else
|
||||||
|
bad="$next_line"
|
||||||
|
log "Line $next_line bad"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
echo "Bad line: $bad"
|
||||||
|
}
|
||||||
|
|
||||||
|
function run_parse {
|
||||||
|
local lines="$1"
|
||||||
|
|
||||||
|
cd "$SOURCE_FOLDER"
|
||||||
|
head -n "$lines" "$SOURCE_FOLDER/$TARGET_DOCUMENT" | PROFILE=release-lto "${DIR}/run_docker_compare.bash"
|
||||||
|
local status=$?
|
||||||
|
return "$status"
|
||||||
|
}
|
||||||
|
|
||||||
|
main "${@}"
|
||||||
@@ -6,7 +6,6 @@ DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
|||||||
|
|
||||||
: ${NO_COLOR:=""} # Set to anything to disable color output
|
: ${NO_COLOR:=""} # Set to anything to disable color output
|
||||||
|
|
||||||
cd "$DIR/../"
|
|
||||||
REALPATH=$(command -v uu-realpath || command -v realpath)
|
REALPATH=$(command -v uu-realpath || command -v realpath)
|
||||||
MAKE=$(command -v gmake || command -v make)
|
MAKE=$(command -v gmake || command -v make)
|
||||||
|
|
||||||
@@ -33,7 +32,7 @@ function get_test_names {
|
|||||||
local test_file_full_path=$($REALPATH "$test_file")
|
local test_file_full_path=$($REALPATH "$test_file")
|
||||||
local relative_to_samples=$($REALPATH --relative-to "$samples_dir" "$test_file_full_path")
|
local relative_to_samples=$($REALPATH --relative-to "$samples_dir" "$test_file_full_path")
|
||||||
local without_extension="${relative_to_samples%.org}"
|
local without_extension="${relative_to_samples%.org}"
|
||||||
echo "${without_extension/\//_}" | tr '[:upper:]' '[:lower:]'
|
echo "autogen_${without_extension//\//_}" | tr '[:upper:]' '[:lower:]'
|
||||||
else
|
else
|
||||||
echo "$test_file" | tr '[:upper:]' '[:lower:]'
|
echo "$test_file" | tr '[:upper:]' '[:lower:]'
|
||||||
fi
|
fi
|
||||||
@@ -56,7 +55,7 @@ cargo test --no-default-features --features compare --no-fail-fast --lib --test
|
|||||||
EOF
|
EOF
|
||||||
)
|
)
|
||||||
|
|
||||||
docker run "${additional_flags[@]}" --init --rm -v "$($REALPATH ./):/source:ro" --mount source=cargo-cache,target=/usr/local/cargo/registry --mount source=rust-cache,target=/target --env CARGO_TARGET_DIR=/target -w /source --entrypoint "" organic-test sh -c "$init_script"
|
docker run "${additional_flags[@]}" --init --rm --read-only --mount type=tmpfs,destination=/tmp -v "$($REALPATH "$DIR/../"):/source:ro" --mount source=cargo-cache,target=/usr/local/cargo/registry --mount source=rust-cache,target=/target --env CARGO_TARGET_DIR=/target -w /source --entrypoint "" organic-test sh -c "$init_script"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -4,7 +4,6 @@ set -euo pipefail
|
|||||||
IFS=$'\n\t'
|
IFS=$'\n\t'
|
||||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||||
|
|
||||||
cd "$DIR/../"
|
|
||||||
REALPATH=$(command -v uu-realpath || command -v realpath)
|
REALPATH=$(command -v uu-realpath || command -v realpath)
|
||||||
|
|
||||||
function main {
|
function main {
|
||||||
@@ -12,7 +11,7 @@ function main {
|
|||||||
|
|
||||||
local test
|
local test
|
||||||
while read test; do
|
while read test; do
|
||||||
cargo test --no-default-features --features compare --no-fail-fast --test test_loader "$test" -- --show-output
|
(cd "$DIR/../" && cargo test --no-default-features --features compare --no-fail-fast --test test_loader "$test" -- --show-output)
|
||||||
done<<<"$test_names"
|
done<<<"$test_names"
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -25,7 +24,7 @@ function get_test_names {
|
|||||||
local test_file_full_path=$($REALPATH "$test_file")
|
local test_file_full_path=$($REALPATH "$test_file")
|
||||||
local relative_to_samples=$($REALPATH --relative-to "$samples_dir" "$test_file_full_path")
|
local relative_to_samples=$($REALPATH --relative-to "$samples_dir" "$test_file_full_path")
|
||||||
local without_extension="${relative_to_samples%.org}"
|
local without_extension="${relative_to_samples%.org}"
|
||||||
echo "${without_extension/\//_}" | tr '[:upper:]' '[:lower:]'
|
echo "${without_extension//\//_}" | tr '[:upper:]' '[:lower:]'
|
||||||
else
|
else
|
||||||
echo "$test_file" | tr '[:upper:]' '[:lower:]'
|
echo "$test_file" | tr '[:upper:]' '[:lower:]'
|
||||||
fi
|
fi
|
||||||
|
|||||||
@@ -7,8 +7,6 @@ DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
|||||||
|
|
||||||
: ${PROFILE:="release-lto"}
|
: ${PROFILE:="release-lto"}
|
||||||
|
|
||||||
cd "$DIR/../"
|
|
||||||
|
|
||||||
function main {
|
function main {
|
||||||
local additional_flags=()
|
local additional_flags=()
|
||||||
if [ "$PROFILE" = "dev" ] || [ "$PROFILE" = "debug" ]; then
|
if [ "$PROFILE" = "dev" ] || [ "$PROFILE" = "debug" ]; then
|
||||||
@@ -16,8 +14,8 @@ function main {
|
|||||||
else
|
else
|
||||||
additional_flags+=(--profile "$PROFILE")
|
additional_flags+=(--profile "$PROFILE")
|
||||||
fi
|
fi
|
||||||
cargo build --no-default-features "${additional_flags[@]}"
|
(cd "$DIR/../" && cargo build --no-default-features "${additional_flags[@]}")
|
||||||
time ./target/${PROFILE}/compare
|
time "$DIR/../target/${PROFILE}/parse" "${@}"
|
||||||
}
|
}
|
||||||
|
|
||||||
main "${@}"
|
main "${@}"
|
||||||
|
|||||||
52
src/bin_compare.rs
Normal file
52
src/bin_compare.rs
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
#![feature(round_char_boundary)]
|
||||||
|
#![feature(exact_size_is_empty)]
|
||||||
|
use std::io::Read;
|
||||||
|
|
||||||
|
use organic::compare::run_anonymous_compare;
|
||||||
|
use organic::compare::run_compare_on_file;
|
||||||
|
|
||||||
|
#[cfg(feature = "tracing")]
|
||||||
|
use crate::init_tracing::init_telemetry;
|
||||||
|
#[cfg(feature = "tracing")]
|
||||||
|
use crate::init_tracing::shutdown_telemetry;
|
||||||
|
#[cfg(feature = "tracing")]
|
||||||
|
mod init_tracing;
|
||||||
|
|
||||||
|
#[cfg(not(feature = "tracing"))]
|
||||||
|
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||||
|
main_body()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "tracing")]
|
||||||
|
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||||
|
let rt = tokio::runtime::Runtime::new()?;
|
||||||
|
let result = rt.block_on(async {
|
||||||
|
init_telemetry()?;
|
||||||
|
let main_body_result = main_body();
|
||||||
|
shutdown_telemetry()?;
|
||||||
|
main_body_result
|
||||||
|
});
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||||
|
fn main_body() -> Result<(), Box<dyn std::error::Error>> {
|
||||||
|
let args = std::env::args().skip(1);
|
||||||
|
if args.is_empty() {
|
||||||
|
let org_contents = read_stdin_to_string()?;
|
||||||
|
run_anonymous_compare(org_contents)
|
||||||
|
} else {
|
||||||
|
for arg in args {
|
||||||
|
run_compare_on_file(arg)?
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn read_stdin_to_string() -> Result<String, Box<dyn std::error::Error>> {
|
||||||
|
let mut stdin_contents = String::new();
|
||||||
|
std::io::stdin()
|
||||||
|
.lock()
|
||||||
|
.read_to_string(&mut stdin_contents)?;
|
||||||
|
Ok(stdin_contents)
|
||||||
|
}
|
||||||
95
src/compare/compare.rs
Normal file
95
src/compare/compare.rs
Normal file
@@ -0,0 +1,95 @@
|
|||||||
|
use std::path::Path;
|
||||||
|
|
||||||
|
use crate::compare::diff::compare_document;
|
||||||
|
use crate::compare::parse::emacs_parse_anonymous_org_document;
|
||||||
|
use crate::compare::parse::emacs_parse_file_org_document;
|
||||||
|
use crate::compare::parse::get_emacs_version;
|
||||||
|
use crate::compare::parse::get_org_mode_version;
|
||||||
|
use crate::compare::sexp::sexp;
|
||||||
|
use crate::context::GlobalSettings;
|
||||||
|
use crate::context::LocalFileAccessInterface;
|
||||||
|
use crate::parser::parse_file_with_settings;
|
||||||
|
use crate::parser::parse_with_settings;
|
||||||
|
|
||||||
|
pub fn run_anonymous_compare<P: AsRef<str>>(
|
||||||
|
org_contents: P,
|
||||||
|
) -> Result<(), Box<dyn std::error::Error>> {
|
||||||
|
run_anonymous_compare_with_settings(org_contents, &GlobalSettings::default())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn run_compare_on_file<P: AsRef<Path>>(org_path: P) -> Result<(), Box<dyn std::error::Error>> {
|
||||||
|
run_compare_on_file_with_settings(org_path, &GlobalSettings::default())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn run_anonymous_compare_with_settings<P: AsRef<str>>(
|
||||||
|
org_contents: P,
|
||||||
|
global_settings: &GlobalSettings,
|
||||||
|
) -> Result<(), Box<dyn std::error::Error>> {
|
||||||
|
// TODO: This is a work-around to pretend that dos line endings do not exist. It would be better to handle the difference in line endings.
|
||||||
|
let org_contents = org_contents.as_ref().replace("\r\n", "\n");
|
||||||
|
let org_contents = org_contents.as_str();
|
||||||
|
print_versions()?;
|
||||||
|
let rust_parsed = parse_with_settings(org_contents, global_settings)?;
|
||||||
|
let org_sexp = emacs_parse_anonymous_org_document(org_contents, global_settings)?;
|
||||||
|
let (_remaining, parsed_sexp) = sexp(org_sexp.as_str()).map_err(|e| e.to_string())?;
|
||||||
|
|
||||||
|
println!("{}\n\n\n", org_contents);
|
||||||
|
println!("{}", org_sexp);
|
||||||
|
println!("{:#?}", rust_parsed);
|
||||||
|
|
||||||
|
// We do the diffing after printing out both parsed forms in case the diffing panics
|
||||||
|
let diff_result = compare_document(&parsed_sexp, &rust_parsed)?;
|
||||||
|
diff_result.print(org_contents)?;
|
||||||
|
|
||||||
|
if diff_result.is_bad() {
|
||||||
|
Err("Diff results do not match.")?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn run_compare_on_file_with_settings<P: AsRef<Path>>(
|
||||||
|
org_path: P,
|
||||||
|
global_settings: &GlobalSettings,
|
||||||
|
) -> Result<(), Box<dyn std::error::Error>> {
|
||||||
|
let org_path = org_path.as_ref();
|
||||||
|
print_versions()?;
|
||||||
|
let parent_directory = org_path
|
||||||
|
.parent()
|
||||||
|
.ok_or("Should be contained inside a directory.")?;
|
||||||
|
let org_contents = std::fs::read_to_string(org_path)?;
|
||||||
|
// TODO: This is a work-around to pretend that dos line endings do not exist. It would be better to handle the difference in line endings.
|
||||||
|
let org_contents = org_contents.replace("\r\n", "\n");
|
||||||
|
let org_contents = org_contents.as_str();
|
||||||
|
let file_access_interface = LocalFileAccessInterface {
|
||||||
|
working_directory: Some(parent_directory.to_path_buf()),
|
||||||
|
};
|
||||||
|
let global_settings = {
|
||||||
|
let mut global_settings = global_settings.clone();
|
||||||
|
global_settings.file_access = &file_access_interface;
|
||||||
|
global_settings
|
||||||
|
};
|
||||||
|
let rust_parsed = parse_file_with_settings(org_contents, &global_settings, Some(org_path))?;
|
||||||
|
let org_sexp = emacs_parse_file_org_document(org_path, &global_settings)?;
|
||||||
|
let (_remaining, parsed_sexp) = sexp(org_sexp.as_str()).map_err(|e| e.to_string())?;
|
||||||
|
|
||||||
|
println!("{}\n\n\n", org_contents);
|
||||||
|
println!("{}", org_sexp);
|
||||||
|
println!("{:#?}", rust_parsed);
|
||||||
|
|
||||||
|
// We do the diffing after printing out both parsed forms in case the diffing panics
|
||||||
|
let diff_result = compare_document(&parsed_sexp, &rust_parsed)?;
|
||||||
|
diff_result.print(org_contents)?;
|
||||||
|
|
||||||
|
if diff_result.is_bad() {
|
||||||
|
Err("Diff results do not match.")?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn print_versions() -> Result<(), Box<dyn std::error::Error>> {
|
||||||
|
eprintln!("Using emacs version: {}", get_emacs_version()?.trim());
|
||||||
|
eprintln!("Using org-mode version: {}", get_org_mode_version()?.trim());
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
3292
src/compare/diff.rs
3292
src/compare/diff.rs
File diff suppressed because it is too large
Load Diff
552
src/compare/elisp_fact.rs
Normal file
552
src/compare/elisp_fact.rs
Normal file
@@ -0,0 +1,552 @@
|
|||||||
|
use std::borrow::Cow;
|
||||||
|
|
||||||
|
use crate::types::AngleLink;
|
||||||
|
use crate::types::AstNode;
|
||||||
|
use crate::types::BabelCall;
|
||||||
|
use crate::types::Bold;
|
||||||
|
use crate::types::CenterBlock;
|
||||||
|
use crate::types::Citation;
|
||||||
|
use crate::types::CitationReference;
|
||||||
|
use crate::types::Clock;
|
||||||
|
use crate::types::Code;
|
||||||
|
use crate::types::Comment;
|
||||||
|
use crate::types::CommentBlock;
|
||||||
|
use crate::types::DiarySexp;
|
||||||
|
use crate::types::Document;
|
||||||
|
use crate::types::Drawer;
|
||||||
|
use crate::types::DynamicBlock;
|
||||||
|
use crate::types::Element;
|
||||||
|
use crate::types::Entity;
|
||||||
|
use crate::types::ExampleBlock;
|
||||||
|
use crate::types::ExportBlock;
|
||||||
|
use crate::types::ExportSnippet;
|
||||||
|
use crate::types::FixedWidthArea;
|
||||||
|
use crate::types::FootnoteDefinition;
|
||||||
|
use crate::types::FootnoteReference;
|
||||||
|
use crate::types::Heading;
|
||||||
|
use crate::types::HorizontalRule;
|
||||||
|
use crate::types::InlineBabelCall;
|
||||||
|
use crate::types::InlineSourceBlock;
|
||||||
|
use crate::types::Italic;
|
||||||
|
use crate::types::Keyword;
|
||||||
|
use crate::types::LatexEnvironment;
|
||||||
|
use crate::types::LatexFragment;
|
||||||
|
use crate::types::LineBreak;
|
||||||
|
use crate::types::NodeProperty;
|
||||||
|
use crate::types::Object;
|
||||||
|
use crate::types::OrgMacro;
|
||||||
|
use crate::types::Paragraph;
|
||||||
|
use crate::types::PlainLink;
|
||||||
|
use crate::types::PlainList;
|
||||||
|
use crate::types::PlainListItem;
|
||||||
|
use crate::types::PlainText;
|
||||||
|
use crate::types::Planning;
|
||||||
|
use crate::types::PropertyDrawer;
|
||||||
|
use crate::types::QuoteBlock;
|
||||||
|
use crate::types::RadioLink;
|
||||||
|
use crate::types::RadioTarget;
|
||||||
|
use crate::types::RegularLink;
|
||||||
|
use crate::types::Section;
|
||||||
|
use crate::types::SpecialBlock;
|
||||||
|
use crate::types::SrcBlock;
|
||||||
|
use crate::types::StatisticsCookie;
|
||||||
|
use crate::types::StrikeThrough;
|
||||||
|
use crate::types::Subscript;
|
||||||
|
use crate::types::Superscript;
|
||||||
|
use crate::types::Table;
|
||||||
|
use crate::types::TableCell;
|
||||||
|
use crate::types::TableRow;
|
||||||
|
use crate::types::Target;
|
||||||
|
use crate::types::Timestamp;
|
||||||
|
use crate::types::Underline;
|
||||||
|
use crate::types::Verbatim;
|
||||||
|
use crate::types::VerseBlock;
|
||||||
|
|
||||||
|
pub(crate) trait ElispFact<'s> {
|
||||||
|
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str>;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) trait GetElispFact<'s> {
|
||||||
|
fn get_elisp_fact<'b>(&'b self) -> &'b dyn ElispFact<'s>;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'s, I: ElispFact<'s>> GetElispFact<'s> for I {
|
||||||
|
fn get_elisp_fact<'b>(&'b self) -> &'b dyn ElispFact<'s> {
|
||||||
|
self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'r, 's> GetElispFact<'s> for AstNode<'r, 's> {
|
||||||
|
fn get_elisp_fact<'b>(&'b self) -> &'b dyn ElispFact<'s> {
|
||||||
|
match self {
|
||||||
|
AstNode::Document(inner) => *inner,
|
||||||
|
AstNode::Heading(inner) => *inner,
|
||||||
|
AstNode::Section(inner) => *inner,
|
||||||
|
AstNode::Paragraph(inner) => *inner,
|
||||||
|
AstNode::PlainList(inner) => *inner,
|
||||||
|
AstNode::PlainListItem(inner) => *inner,
|
||||||
|
AstNode::CenterBlock(inner) => *inner,
|
||||||
|
AstNode::QuoteBlock(inner) => *inner,
|
||||||
|
AstNode::SpecialBlock(inner) => *inner,
|
||||||
|
AstNode::DynamicBlock(inner) => *inner,
|
||||||
|
AstNode::FootnoteDefinition(inner) => *inner,
|
||||||
|
AstNode::Comment(inner) => *inner,
|
||||||
|
AstNode::Drawer(inner) => *inner,
|
||||||
|
AstNode::PropertyDrawer(inner) => *inner,
|
||||||
|
AstNode::NodeProperty(inner) => *inner,
|
||||||
|
AstNode::Table(inner) => *inner,
|
||||||
|
AstNode::TableRow(inner) => *inner,
|
||||||
|
AstNode::VerseBlock(inner) => *inner,
|
||||||
|
AstNode::CommentBlock(inner) => *inner,
|
||||||
|
AstNode::ExampleBlock(inner) => *inner,
|
||||||
|
AstNode::ExportBlock(inner) => *inner,
|
||||||
|
AstNode::SrcBlock(inner) => *inner,
|
||||||
|
AstNode::Clock(inner) => *inner,
|
||||||
|
AstNode::DiarySexp(inner) => *inner,
|
||||||
|
AstNode::Planning(inner) => *inner,
|
||||||
|
AstNode::FixedWidthArea(inner) => *inner,
|
||||||
|
AstNode::HorizontalRule(inner) => *inner,
|
||||||
|
AstNode::Keyword(inner) => *inner,
|
||||||
|
AstNode::BabelCall(inner) => *inner,
|
||||||
|
AstNode::LatexEnvironment(inner) => *inner,
|
||||||
|
AstNode::Bold(inner) => *inner,
|
||||||
|
AstNode::Italic(inner) => *inner,
|
||||||
|
AstNode::Underline(inner) => *inner,
|
||||||
|
AstNode::StrikeThrough(inner) => *inner,
|
||||||
|
AstNode::Code(inner) => *inner,
|
||||||
|
AstNode::Verbatim(inner) => *inner,
|
||||||
|
AstNode::PlainText(inner) => *inner,
|
||||||
|
AstNode::RegularLink(inner) => *inner,
|
||||||
|
AstNode::RadioLink(inner) => *inner,
|
||||||
|
AstNode::RadioTarget(inner) => *inner,
|
||||||
|
AstNode::PlainLink(inner) => *inner,
|
||||||
|
AstNode::AngleLink(inner) => *inner,
|
||||||
|
AstNode::OrgMacro(inner) => *inner,
|
||||||
|
AstNode::Entity(inner) => *inner,
|
||||||
|
AstNode::LatexFragment(inner) => *inner,
|
||||||
|
AstNode::ExportSnippet(inner) => *inner,
|
||||||
|
AstNode::FootnoteReference(inner) => *inner,
|
||||||
|
AstNode::Citation(inner) => *inner,
|
||||||
|
AstNode::CitationReference(inner) => *inner,
|
||||||
|
AstNode::InlineBabelCall(inner) => *inner,
|
||||||
|
AstNode::InlineSourceBlock(inner) => *inner,
|
||||||
|
AstNode::LineBreak(inner) => *inner,
|
||||||
|
AstNode::Target(inner) => *inner,
|
||||||
|
AstNode::StatisticsCookie(inner) => *inner,
|
||||||
|
AstNode::Subscript(inner) => *inner,
|
||||||
|
AstNode::Superscript(inner) => *inner,
|
||||||
|
AstNode::TableCell(inner) => *inner,
|
||||||
|
AstNode::Timestamp(inner) => *inner,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'s> GetElispFact<'s> for Element<'s> {
|
||||||
|
fn get_elisp_fact<'b>(&'b self) -> &'b dyn ElispFact<'s> {
|
||||||
|
match self {
|
||||||
|
Element::Paragraph(inner) => inner,
|
||||||
|
Element::PlainList(inner) => inner,
|
||||||
|
Element::CenterBlock(inner) => inner,
|
||||||
|
Element::QuoteBlock(inner) => inner,
|
||||||
|
Element::SpecialBlock(inner) => inner,
|
||||||
|
Element::DynamicBlock(inner) => inner,
|
||||||
|
Element::FootnoteDefinition(inner) => inner,
|
||||||
|
Element::Comment(inner) => inner,
|
||||||
|
Element::Drawer(inner) => inner,
|
||||||
|
Element::PropertyDrawer(inner) => inner,
|
||||||
|
Element::Table(inner) => inner,
|
||||||
|
Element::VerseBlock(inner) => inner,
|
||||||
|
Element::CommentBlock(inner) => inner,
|
||||||
|
Element::ExampleBlock(inner) => inner,
|
||||||
|
Element::ExportBlock(inner) => inner,
|
||||||
|
Element::SrcBlock(inner) => inner,
|
||||||
|
Element::Clock(inner) => inner,
|
||||||
|
Element::DiarySexp(inner) => inner,
|
||||||
|
Element::Planning(inner) => inner,
|
||||||
|
Element::FixedWidthArea(inner) => inner,
|
||||||
|
Element::HorizontalRule(inner) => inner,
|
||||||
|
Element::Keyword(inner) => inner,
|
||||||
|
Element::BabelCall(inner) => inner,
|
||||||
|
Element::LatexEnvironment(inner) => inner,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'s> GetElispFact<'s> for Object<'s> {
|
||||||
|
fn get_elisp_fact<'b>(&'b self) -> &'b dyn ElispFact<'s> {
|
||||||
|
match self {
|
||||||
|
Object::Bold(inner) => inner,
|
||||||
|
Object::Italic(inner) => inner,
|
||||||
|
Object::Underline(inner) => inner,
|
||||||
|
Object::StrikeThrough(inner) => inner,
|
||||||
|
Object::Code(inner) => inner,
|
||||||
|
Object::Verbatim(inner) => inner,
|
||||||
|
Object::PlainText(inner) => inner,
|
||||||
|
Object::RegularLink(inner) => inner,
|
||||||
|
Object::RadioLink(inner) => inner,
|
||||||
|
Object::RadioTarget(inner) => inner,
|
||||||
|
Object::PlainLink(inner) => inner,
|
||||||
|
Object::AngleLink(inner) => inner,
|
||||||
|
Object::OrgMacro(inner) => inner,
|
||||||
|
Object::Entity(inner) => inner,
|
||||||
|
Object::LatexFragment(inner) => inner,
|
||||||
|
Object::ExportSnippet(inner) => inner,
|
||||||
|
Object::FootnoteReference(inner) => inner,
|
||||||
|
Object::Citation(inner) => inner,
|
||||||
|
Object::CitationReference(inner) => inner,
|
||||||
|
Object::InlineBabelCall(inner) => inner,
|
||||||
|
Object::InlineSourceBlock(inner) => inner,
|
||||||
|
Object::LineBreak(inner) => inner,
|
||||||
|
Object::Target(inner) => inner,
|
||||||
|
Object::StatisticsCookie(inner) => inner,
|
||||||
|
Object::Subscript(inner) => inner,
|
||||||
|
Object::Superscript(inner) => inner,
|
||||||
|
Object::Timestamp(inner) => inner,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'s> ElispFact<'s> for Document<'s> {
|
||||||
|
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
|
||||||
|
"org-data".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'s> ElispFact<'s> for Section<'s> {
|
||||||
|
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
|
||||||
|
"section".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'s> ElispFact<'s> for Heading<'s> {
|
||||||
|
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
|
||||||
|
"headline".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'s> ElispFact<'s> for PlainList<'s> {
|
||||||
|
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
|
||||||
|
"plain-list".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'s> ElispFact<'s> for PlainListItem<'s> {
|
||||||
|
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
|
||||||
|
"item".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'s> ElispFact<'s> for CenterBlock<'s> {
|
||||||
|
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
|
||||||
|
"center-block".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'s> ElispFact<'s> for QuoteBlock<'s> {
|
||||||
|
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
|
||||||
|
"quote-block".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'s> ElispFact<'s> for SpecialBlock<'s> {
|
||||||
|
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
|
||||||
|
"special-block".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'s> ElispFact<'s> for DynamicBlock<'s> {
|
||||||
|
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
|
||||||
|
"dynamic-block".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'s> ElispFact<'s> for FootnoteDefinition<'s> {
|
||||||
|
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
|
||||||
|
"footnote-definition".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'s> ElispFact<'s> for Drawer<'s> {
|
||||||
|
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
|
||||||
|
"drawer".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'s> ElispFact<'s> for PropertyDrawer<'s> {
|
||||||
|
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
|
||||||
|
"property-drawer".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'s> ElispFact<'s> for NodeProperty<'s> {
|
||||||
|
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
|
||||||
|
"node-property".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'s> ElispFact<'s> for Table<'s> {
|
||||||
|
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
|
||||||
|
"table".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'s> ElispFact<'s> for TableRow<'s> {
|
||||||
|
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
|
||||||
|
"table-row".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'s> ElispFact<'s> for Paragraph<'s> {
|
||||||
|
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
|
||||||
|
"paragraph".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'s> ElispFact<'s> for TableCell<'s> {
|
||||||
|
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
|
||||||
|
"table-cell".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'s> ElispFact<'s> for Comment<'s> {
|
||||||
|
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
|
||||||
|
"comment".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'s> ElispFact<'s> for VerseBlock<'s> {
|
||||||
|
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
|
||||||
|
"verse-block".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl<'s> ElispFact<'s> for CommentBlock<'s> {
|
||||||
|
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
|
||||||
|
"comment-block".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl<'s> ElispFact<'s> for ExampleBlock<'s> {
|
||||||
|
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
|
||||||
|
"example-block".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl<'s> ElispFact<'s> for ExportBlock<'s> {
|
||||||
|
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
|
||||||
|
"export-block".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl<'s> ElispFact<'s> for SrcBlock<'s> {
|
||||||
|
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
|
||||||
|
"src-block".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'s> ElispFact<'s> for Clock<'s> {
|
||||||
|
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
|
||||||
|
"clock".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'s> ElispFact<'s> for DiarySexp<'s> {
|
||||||
|
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
|
||||||
|
"diary-sexp".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'s> ElispFact<'s> for Planning<'s> {
|
||||||
|
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
|
||||||
|
"planning".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'s> ElispFact<'s> for FixedWidthArea<'s> {
|
||||||
|
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
|
||||||
|
"fixed-width".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'s> ElispFact<'s> for HorizontalRule<'s> {
|
||||||
|
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
|
||||||
|
"horizontal-rule".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'s> ElispFact<'s> for Keyword<'s> {
|
||||||
|
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
|
||||||
|
"keyword".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'s> ElispFact<'s> for BabelCall<'s> {
|
||||||
|
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
|
||||||
|
"babel-call".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'s> ElispFact<'s> for LatexEnvironment<'s> {
|
||||||
|
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
|
||||||
|
"latex-environment".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'s> ElispFact<'s> for Bold<'s> {
|
||||||
|
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
|
||||||
|
"bold".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'s> ElispFact<'s> for Italic<'s> {
|
||||||
|
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
|
||||||
|
"italic".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'s> ElispFact<'s> for Underline<'s> {
|
||||||
|
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
|
||||||
|
"underline".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'s> ElispFact<'s> for StrikeThrough<'s> {
|
||||||
|
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
|
||||||
|
"strike-through".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'s> ElispFact<'s> for Code<'s> {
|
||||||
|
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
|
||||||
|
"code".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'s> ElispFact<'s> for Verbatim<'s> {
|
||||||
|
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
|
||||||
|
"verbatim".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'s> ElispFact<'s> for RegularLink<'s> {
|
||||||
|
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
|
||||||
|
"link".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'s> ElispFact<'s> for RadioLink<'s> {
|
||||||
|
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
|
||||||
|
"link".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'s> ElispFact<'s> for RadioTarget<'s> {
|
||||||
|
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
|
||||||
|
"radio-target".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'s> ElispFact<'s> for PlainLink<'s> {
|
||||||
|
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
|
||||||
|
"link".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'s> ElispFact<'s> for AngleLink<'s> {
|
||||||
|
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
|
||||||
|
"link".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'s> ElispFact<'s> for OrgMacro<'s> {
|
||||||
|
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
|
||||||
|
"macro".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'s> ElispFact<'s> for Entity<'s> {
|
||||||
|
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
|
||||||
|
"entity".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'s> ElispFact<'s> for LatexFragment<'s> {
|
||||||
|
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
|
||||||
|
"latex-fragment".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'s> ElispFact<'s> for ExportSnippet<'s> {
|
||||||
|
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
|
||||||
|
"export-snippet".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'s> ElispFact<'s> for FootnoteReference<'s> {
|
||||||
|
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
|
||||||
|
"footnote-reference".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'s> ElispFact<'s> for Citation<'s> {
|
||||||
|
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
|
||||||
|
"citation".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'s> ElispFact<'s> for CitationReference<'s> {
|
||||||
|
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
|
||||||
|
"citation-reference".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'s> ElispFact<'s> for InlineBabelCall<'s> {
|
||||||
|
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
|
||||||
|
"inline-babel-call".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'s> ElispFact<'s> for InlineSourceBlock<'s> {
|
||||||
|
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
|
||||||
|
"inline-src-block".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'s> ElispFact<'s> for LineBreak<'s> {
|
||||||
|
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
|
||||||
|
"line-break".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'s> ElispFact<'s> for Target<'s> {
|
||||||
|
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
|
||||||
|
"target".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'s> ElispFact<'s> for StatisticsCookie<'s> {
|
||||||
|
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
|
||||||
|
"statistics-cookie".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'s> ElispFact<'s> for Subscript<'s> {
|
||||||
|
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
|
||||||
|
"subscript".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'s> ElispFact<'s> for Superscript<'s> {
|
||||||
|
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
|
||||||
|
"superscript".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'s> ElispFact<'s> for Timestamp<'s> {
|
||||||
|
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
|
||||||
|
"timestamp".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'s> ElispFact<'s> for PlainText<'s> {
|
||||||
|
fn get_elisp_name<'b>(&'b self) -> Cow<'s, str> {
|
||||||
|
// plain text from upstream emacs does not actually have a name but this is included here to make rendering the status diff easier.
|
||||||
|
"plain-text".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,7 +1,10 @@
|
|||||||
|
mod compare;
|
||||||
mod diff;
|
mod diff;
|
||||||
|
mod elisp_fact;
|
||||||
mod parse;
|
mod parse;
|
||||||
|
mod sexp;
|
||||||
mod util;
|
mod util;
|
||||||
pub use diff::compare_document;
|
pub use compare::run_anonymous_compare;
|
||||||
pub use parse::emacs_parse_org_document;
|
pub use compare::run_anonymous_compare_with_settings;
|
||||||
pub use parse::get_emacs_version;
|
pub use compare::run_compare_on_file;
|
||||||
pub use parse::get_org_mode_version;
|
pub use compare::run_compare_on_file_with_settings;
|
||||||
|
|||||||
@@ -1,6 +1,34 @@
|
|||||||
|
use std::path::Path;
|
||||||
use std::process::Command;
|
use std::process::Command;
|
||||||
|
|
||||||
pub fn emacs_parse_org_document<C>(file_contents: C) -> Result<String, Box<dyn std::error::Error>>
|
use crate::context::HeadlineLevelFilter;
|
||||||
|
use crate::settings::GlobalSettings;
|
||||||
|
|
||||||
|
/// Generate elisp to configure org-mode parsing settings
|
||||||
|
///
|
||||||
|
/// Currently only org-list-allow-alphabetical is supported.
|
||||||
|
fn global_settings_elisp(global_settings: &GlobalSettings) -> String {
|
||||||
|
// This string concatenation is wildly inefficient but its only called in tests 🤷.
|
||||||
|
let mut ret = "".to_owned();
|
||||||
|
if global_settings.list_allow_alphabetical {
|
||||||
|
ret += "(setq org-list-allow-alphabetical t)\n"
|
||||||
|
}
|
||||||
|
if global_settings.tab_width != crate::settings::DEFAULT_TAB_WIDTH {
|
||||||
|
ret += format!("(setq-default tab-width {})", global_settings.tab_width).as_str();
|
||||||
|
}
|
||||||
|
if global_settings.odd_levels_only != HeadlineLevelFilter::default() {
|
||||||
|
ret += match global_settings.odd_levels_only {
|
||||||
|
HeadlineLevelFilter::Odd => "(setq org-odd-levels-only t)\n",
|
||||||
|
HeadlineLevelFilter::OddEven => "(setq org-odd-levels-only nil)\n",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
ret
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn emacs_parse_anonymous_org_document<C>(
|
||||||
|
file_contents: C,
|
||||||
|
global_settings: &GlobalSettings,
|
||||||
|
) -> Result<String, Box<dyn std::error::Error>>
|
||||||
where
|
where
|
||||||
C: AsRef<str>,
|
C: AsRef<str>,
|
||||||
{
|
{
|
||||||
@@ -8,21 +36,68 @@ where
|
|||||||
let elisp_script = format!(
|
let elisp_script = format!(
|
||||||
r#"(progn
|
r#"(progn
|
||||||
(erase-buffer)
|
(erase-buffer)
|
||||||
|
(require 'org)
|
||||||
|
(defun org-table-align () t)
|
||||||
(insert "{escaped_file_contents}")
|
(insert "{escaped_file_contents}")
|
||||||
|
{global_settings}
|
||||||
(org-mode)
|
(org-mode)
|
||||||
(message "%s" (pp-to-string (org-element-parse-buffer)))
|
(message "%s" (pp-to-string (org-element-parse-buffer)))
|
||||||
)"#,
|
)"#,
|
||||||
escaped_file_contents = escaped_file_contents
|
escaped_file_contents = escaped_file_contents,
|
||||||
|
global_settings = global_settings_elisp(global_settings)
|
||||||
);
|
);
|
||||||
let mut cmd = Command::new("emacs");
|
let mut cmd = Command::new("emacs");
|
||||||
let proc = cmd
|
let cmd = cmd
|
||||||
.arg("-q")
|
.arg("-q")
|
||||||
.arg("--no-site-file")
|
.arg("--no-site-file")
|
||||||
.arg("--no-splash")
|
.arg("--no-splash")
|
||||||
.arg("--batch")
|
.arg("--batch")
|
||||||
.arg("--eval")
|
.arg("--eval")
|
||||||
.arg(elisp_script);
|
.arg(elisp_script);
|
||||||
let out = proc.output()?;
|
let out = cmd.output()?;
|
||||||
|
out.status.exit_ok()?;
|
||||||
|
let org_sexp = out.stderr;
|
||||||
|
Ok(String::from_utf8(org_sexp)?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn emacs_parse_file_org_document<P>(
|
||||||
|
file_path: P,
|
||||||
|
global_settings: &GlobalSettings,
|
||||||
|
) -> Result<String, Box<dyn std::error::Error>>
|
||||||
|
where
|
||||||
|
P: AsRef<Path>,
|
||||||
|
{
|
||||||
|
let file_path = file_path.as_ref().canonicalize()?;
|
||||||
|
let containing_directory = file_path.parent().ok_or(format!(
|
||||||
|
"Failed to get containing directory for path {}",
|
||||||
|
file_path.display()
|
||||||
|
))?;
|
||||||
|
let elisp_script = format!(
|
||||||
|
r#"(progn
|
||||||
|
(require 'org)
|
||||||
|
(defun org-table-align () t)
|
||||||
|
(setq vc-handled-backends nil)
|
||||||
|
{global_settings}
|
||||||
|
(find-file-read-only "{file_path}")
|
||||||
|
(org-mode)
|
||||||
|
(message "%s" (pp-to-string (org-element-parse-buffer)))
|
||||||
|
)"#,
|
||||||
|
global_settings = global_settings_elisp(global_settings),
|
||||||
|
file_path = file_path
|
||||||
|
.as_os_str()
|
||||||
|
.to_str()
|
||||||
|
.expect("File name should be valid utf-8.")
|
||||||
|
);
|
||||||
|
let mut cmd = Command::new("emacs");
|
||||||
|
let cmd = cmd
|
||||||
|
.current_dir(containing_directory)
|
||||||
|
.arg("-q")
|
||||||
|
.arg("--no-site-file")
|
||||||
|
.arg("--no-splash")
|
||||||
|
.arg("--batch")
|
||||||
|
.arg("--eval")
|
||||||
|
.arg(elisp_script);
|
||||||
|
let out = cmd.output()?;
|
||||||
out.status.exit_ok()?;
|
out.status.exit_ok()?;
|
||||||
let org_sexp = out.stderr;
|
let org_sexp = out.stderr;
|
||||||
Ok(String::from_utf8(org_sexp)?)
|
Ok(String::from_utf8(org_sexp)?)
|
||||||
@@ -55,7 +130,7 @@ pub fn get_emacs_version() -> Result<String, Box<dyn std::error::Error>> {
|
|||||||
(message "%s" (version))
|
(message "%s" (version))
|
||||||
)"#;
|
)"#;
|
||||||
let mut cmd = Command::new("emacs");
|
let mut cmd = Command::new("emacs");
|
||||||
let proc = cmd
|
let cmd = cmd
|
||||||
.arg("-q")
|
.arg("-q")
|
||||||
.arg("--no-site-file")
|
.arg("--no-site-file")
|
||||||
.arg("--no-splash")
|
.arg("--no-splash")
|
||||||
@@ -63,7 +138,7 @@ pub fn get_emacs_version() -> Result<String, Box<dyn std::error::Error>> {
|
|||||||
.arg("--eval")
|
.arg("--eval")
|
||||||
.arg(elisp_script);
|
.arg(elisp_script);
|
||||||
|
|
||||||
let out = proc.output()?;
|
let out = cmd.output()?;
|
||||||
out.status.exit_ok()?;
|
out.status.exit_ok()?;
|
||||||
Ok(String::from_utf8(out.stderr)?)
|
Ok(String::from_utf8(out.stderr)?)
|
||||||
}
|
}
|
||||||
@@ -74,7 +149,7 @@ pub fn get_org_mode_version() -> Result<String, Box<dyn std::error::Error>> {
|
|||||||
(message "%s" (org-version nil t nil))
|
(message "%s" (org-version nil t nil))
|
||||||
)"#;
|
)"#;
|
||||||
let mut cmd = Command::new("emacs");
|
let mut cmd = Command::new("emacs");
|
||||||
let proc = cmd
|
let cmd = cmd
|
||||||
.arg("-q")
|
.arg("-q")
|
||||||
.arg("--no-site-file")
|
.arg("--no-site-file")
|
||||||
.arg("--no-splash")
|
.arg("--no-splash")
|
||||||
@@ -82,7 +157,7 @@ pub fn get_org_mode_version() -> Result<String, Box<dyn std::error::Error>> {
|
|||||||
.arg("--eval")
|
.arg("--eval")
|
||||||
.arg(elisp_script);
|
.arg(elisp_script);
|
||||||
|
|
||||||
let out = proc.output()?;
|
let out = cmd.output()?;
|
||||||
out.status.exit_ok()?;
|
out.status.exit_ok()?;
|
||||||
Ok(String::from_utf8(out.stderr)?)
|
Ok(String::from_utf8(out.stderr)?)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,9 +1,10 @@
|
|||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
use nom::branch::alt;
|
use nom::branch::alt;
|
||||||
use nom::bytes::complete::escaped;
|
|
||||||
use nom::bytes::complete::tag;
|
use nom::bytes::complete::tag;
|
||||||
use nom::bytes::complete::take_till1;
|
use nom::bytes::complete::take_till1;
|
||||||
|
use nom::character::complete::anychar;
|
||||||
|
use nom::character::complete::digit1;
|
||||||
use nom::character::complete::multispace0;
|
use nom::character::complete::multispace0;
|
||||||
use nom::character::complete::multispace1;
|
use nom::character::complete::multispace1;
|
||||||
use nom::character::complete::one_of;
|
use nom::character::complete::one_of;
|
||||||
@@ -11,16 +12,16 @@ use nom::combinator::map;
|
|||||||
use nom::combinator::not;
|
use nom::combinator::not;
|
||||||
use nom::combinator::opt;
|
use nom::combinator::opt;
|
||||||
use nom::combinator::peek;
|
use nom::combinator::peek;
|
||||||
|
use nom::combinator::recognize;
|
||||||
use nom::multi::separated_list1;
|
use nom::multi::separated_list1;
|
||||||
use nom::sequence::delimited;
|
use nom::sequence::delimited;
|
||||||
use nom::sequence::preceded;
|
use nom::sequence::preceded;
|
||||||
use nom::sequence::tuple;
|
use nom::sequence::tuple;
|
||||||
|
|
||||||
use super::org_source::convert_error;
|
|
||||||
use super::org_source::OrgSource;
|
|
||||||
use super::util::get_consumed;
|
|
||||||
use crate::error::Res;
|
use crate::error::Res;
|
||||||
|
|
||||||
|
const MAX_OCTAL_LENGTH: usize = 3;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub enum Token<'s> {
|
pub enum Token<'s> {
|
||||||
Atom(&'s str),
|
Atom(&'s str),
|
||||||
@@ -31,45 +32,50 @@ pub enum Token<'s> {
|
|||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct TextWithProperties<'s> {
|
pub struct TextWithProperties<'s> {
|
||||||
pub text: &'s str,
|
pub(crate) text: &'s str,
|
||||||
pub properties: Vec<Token<'s>>,
|
pub(crate) properties: Vec<Token<'s>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
enum ParseState {
|
enum ParseState {
|
||||||
Normal,
|
Normal,
|
||||||
Escape,
|
Escape,
|
||||||
|
Octal(Vec<u8>),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'s> Token<'s> {
|
impl<'s> Token<'s> {
|
||||||
pub fn as_vector<'p>(&'p self) -> Result<&'p Vec<Token<'s>>, Box<dyn std::error::Error>> {
|
pub(crate) fn as_vector<'p>(
|
||||||
|
&'p self,
|
||||||
|
) -> Result<&'p Vec<Token<'s>>, Box<dyn std::error::Error>> {
|
||||||
Ok(match self {
|
Ok(match self {
|
||||||
Token::Vector(children) => Ok(children),
|
Token::Vector(children) => Ok(children),
|
||||||
_ => Err(format!("wrong token type, expected vector: {:?}", self)),
|
_ => Err(format!("wrong token type, expected vector: {:?}", self)),
|
||||||
}?)
|
}?)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn as_list<'p>(&'p self) -> Result<&'p Vec<Token<'s>>, Box<dyn std::error::Error>> {
|
pub(crate) fn as_list<'p>(&'p self) -> Result<&'p Vec<Token<'s>>, Box<dyn std::error::Error>> {
|
||||||
Ok(match self {
|
Ok(match self {
|
||||||
Token::List(children) => Ok(children),
|
Token::List(children) => Ok(children),
|
||||||
_ => Err(format!("wrong token type, expected list: {:?}", self)),
|
_ => Err(format!("wrong token type, expected list: {:?}", self)),
|
||||||
}?)
|
}?)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn as_atom<'p>(&'p self) -> Result<&'s str, Box<dyn std::error::Error>> {
|
pub(crate) fn as_atom<'p>(&'p self) -> Result<&'s str, Box<dyn std::error::Error>> {
|
||||||
Ok(match self {
|
Ok(match self {
|
||||||
Token::Atom(body) => Ok(*body),
|
Token::Atom(body) => Ok(*body),
|
||||||
_ => Err(format!("wrong token type, expected atom: {:?}", self)),
|
_ => Err(format!("wrong token type, expected atom: {:?}", self)),
|
||||||
}?)
|
}?)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn as_text<'p>(&'p self) -> Result<&'p TextWithProperties<'s>, Box<dyn std::error::Error>> {
|
pub(crate) fn as_text<'p>(
|
||||||
|
&'p self,
|
||||||
|
) -> Result<&'p TextWithProperties<'s>, Box<dyn std::error::Error>> {
|
||||||
Ok(match self {
|
Ok(match self {
|
||||||
Token::TextWithProperties(body) => Ok(body),
|
Token::TextWithProperties(body) => Ok(body),
|
||||||
_ => Err(format!("wrong token type, expected text: {:?}", self)),
|
_ => Err(format!("wrong token type, expected text: {:?}", self)),
|
||||||
}?)
|
}?)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn as_map<'p>(
|
pub(crate) fn as_map<'p>(
|
||||||
&'p self,
|
&'p self,
|
||||||
) -> Result<HashMap<&'s str, &'p Token<'s>>, Box<dyn std::error::Error>> {
|
) -> Result<HashMap<&'s str, &'p Token<'s>>, Box<dyn std::error::Error>> {
|
||||||
let mut hashmap = HashMap::new();
|
let mut hashmap = HashMap::new();
|
||||||
@@ -95,8 +101,27 @@ impl<'s> Token<'s> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn unquote(text: &str) -> Result<String, Box<dyn std::error::Error>> {
|
/// Check if the child string slice is a slice of the parent string slice.
|
||||||
let mut out = String::with_capacity(text.len());
|
fn is_slice_of(parent: &str, child: &str) -> bool {
|
||||||
|
let parent_start = parent.as_ptr() as usize;
|
||||||
|
let parent_end = parent_start + parent.len();
|
||||||
|
let child_start = child.as_ptr() as usize;
|
||||||
|
let child_end = child_start + child.len();
|
||||||
|
child_start >= parent_start && child_end <= parent_end
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get a slice of the string that was consumed in a parser using the original input to the parser and the remaining input after the parser.
|
||||||
|
fn get_consumed<'s>(input: &'s str, remaining: &'s str) -> &'s str {
|
||||||
|
debug_assert!(is_slice_of(input, remaining));
|
||||||
|
let source = {
|
||||||
|
let offset = remaining.as_ptr() as usize - input.as_ptr() as usize;
|
||||||
|
&input[..offset]
|
||||||
|
};
|
||||||
|
source.into()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn unquote(text: &str) -> Result<String, Box<dyn std::error::Error>> {
|
||||||
|
let mut out: Vec<u8> = Vec::with_capacity(text.len());
|
||||||
if !text.starts_with(r#"""#) {
|
if !text.starts_with(r#"""#) {
|
||||||
return Err("Quoted text does not start with quote.".into());
|
return Err("Quoted text does not start with quote.".into());
|
||||||
}
|
}
|
||||||
@@ -105,56 +130,70 @@ pub fn unquote(text: &str) -> Result<String, Box<dyn std::error::Error>> {
|
|||||||
}
|
}
|
||||||
let interior_text = &text[1..(text.len() - 1)];
|
let interior_text = &text[1..(text.len() - 1)];
|
||||||
let mut state = ParseState::Normal;
|
let mut state = ParseState::Normal;
|
||||||
for current_char in interior_text.chars().into_iter() {
|
for current_char in interior_text.bytes().into_iter() {
|
||||||
|
// Check to see if octal finished
|
||||||
state = match (state, current_char) {
|
state = match (state, current_char) {
|
||||||
(ParseState::Normal, '\\') => ParseState::Escape,
|
(ParseState::Octal(octal), b'0'..=b'7') if octal.len() < MAX_OCTAL_LENGTH => {
|
||||||
|
ParseState::Octal(octal)
|
||||||
|
}
|
||||||
|
(ParseState::Octal(octal), _) => {
|
||||||
|
let octal_number_string = String::from_utf8(octal)?;
|
||||||
|
let decoded_byte = u8::from_str_radix(&octal_number_string, 8)?;
|
||||||
|
out.push(decoded_byte);
|
||||||
|
ParseState::Normal
|
||||||
|
}
|
||||||
|
(state, _) => state,
|
||||||
|
};
|
||||||
|
|
||||||
|
state = match (state, current_char) {
|
||||||
|
(ParseState::Normal, b'\\') => ParseState::Escape,
|
||||||
(ParseState::Normal, _) => {
|
(ParseState::Normal, _) => {
|
||||||
out.push(current_char);
|
out.push(current_char);
|
||||||
ParseState::Normal
|
ParseState::Normal
|
||||||
}
|
}
|
||||||
(ParseState::Escape, 'n') => {
|
(ParseState::Escape, b'n') => {
|
||||||
out.push('\n');
|
out.push(b'\n');
|
||||||
ParseState::Normal
|
ParseState::Normal
|
||||||
}
|
}
|
||||||
(ParseState::Escape, '\\') => {
|
(ParseState::Escape, b'\\') => {
|
||||||
out.push('\\');
|
out.push(b'\\');
|
||||||
ParseState::Normal
|
ParseState::Normal
|
||||||
}
|
}
|
||||||
(ParseState::Escape, '"') => {
|
(ParseState::Escape, b'"') => {
|
||||||
out.push('"');
|
out.push(b'"');
|
||||||
ParseState::Normal
|
ParseState::Normal
|
||||||
}
|
}
|
||||||
_ => todo!(),
|
(ParseState::Escape, b'0'..=b'7') => {
|
||||||
|
let mut octal = Vec::with_capacity(MAX_OCTAL_LENGTH);
|
||||||
|
octal.push(current_char);
|
||||||
|
ParseState::Octal(octal)
|
||||||
|
}
|
||||||
|
(ParseState::Octal(mut octal), b'0'..=b'7') => {
|
||||||
|
octal.push(current_char);
|
||||||
|
ParseState::Octal(octal)
|
||||||
|
}
|
||||||
|
_ => panic!("Invalid state unquoting string."),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(out)
|
Ok(String::from_utf8(out)?)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||||
pub fn sexp_with_padding<'s>(input: &'s str) -> Res<&'s str, Token<'s>> {
|
pub fn sexp<'s>(input: &'s str) -> Res<&'s str, Token<'s>> {
|
||||||
let (remaining, _) = multispace0(input)?;
|
let (remaining, _) = multispace0(input)?;
|
||||||
let remaining = OrgSource::new(remaining);
|
let (remaining, tkn) = token(remaining).map(|(rem, out)| (Into::<&str>::into(rem), out))?;
|
||||||
let (remaining, tkn) = token(remaining)
|
|
||||||
.map(|(rem, out)| (Into::<&str>::into(rem), out))
|
|
||||||
.map_err(convert_error)?;
|
|
||||||
let (remaining, _) = multispace0(remaining)?;
|
let (remaining, _) = multispace0(remaining)?;
|
||||||
Ok((remaining, tkn))
|
Ok((remaining, tkn))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||||
pub fn sexp<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, Token<'s>> {
|
fn token<'s>(input: &'s str) -> Res<&'s str, Token<'s>> {
|
||||||
let (remaining, tkn) = token(input)?;
|
|
||||||
Ok((remaining, tkn))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
|
||||||
fn token<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, Token<'s>> {
|
|
||||||
alt((list, vector, atom))(input)
|
alt((list, vector, atom))(input)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||||
fn list<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, Token<'s>> {
|
fn list<'s>(input: &'s str) -> Res<&'s str, Token<'s>> {
|
||||||
let (remaining, _) = tag("(")(input)?;
|
let (remaining, _) = tag("(")(input)?;
|
||||||
let (remaining, children) = delimited(
|
let (remaining, children) = delimited(
|
||||||
multispace0,
|
multispace0,
|
||||||
@@ -166,7 +205,7 @@ fn list<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, Token<'s>> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||||
fn vector<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, Token<'s>> {
|
fn vector<'s>(input: &'s str) -> Res<&'s str, Token<'s>> {
|
||||||
let (remaining, _) = tag("[")(input)?;
|
let (remaining, _) = tag("[")(input)?;
|
||||||
let (remaining, children) = delimited(
|
let (remaining, children) = delimited(
|
||||||
multispace0,
|
multispace0,
|
||||||
@@ -178,7 +217,7 @@ fn vector<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, Token<'s>> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||||
fn atom<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, Token<'s>> {
|
fn atom<'s>(input: &'s str) -> Res<&'s str, Token<'s>> {
|
||||||
not(peek(one_of(")]")))(input)?;
|
not(peek(one_of(")]")))(input)?;
|
||||||
alt((
|
alt((
|
||||||
text_with_properties,
|
text_with_properties,
|
||||||
@@ -189,7 +228,7 @@ fn atom<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, Token<'s>> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||||
fn unquoted_atom<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, Token<'s>> {
|
fn unquoted_atom<'s>(input: &'s str) -> Res<&'s str, Token<'s>> {
|
||||||
let (remaining, body) = take_till1(|c| match c {
|
let (remaining, body) = take_till1(|c| match c {
|
||||||
' ' | '\t' | '\r' | '\n' | ')' | ']' => true,
|
' ' | '\t' | '\r' | '\n' | ')' | ']' => true,
|
||||||
_ => false,
|
_ => false,
|
||||||
@@ -198,23 +237,38 @@ fn unquoted_atom<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, Token<'s>> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||||
fn quoted_atom<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, Token<'s>> {
|
fn quoted_atom<'s>(input: &'s str) -> Res<&'s str, Token<'s>> {
|
||||||
let (remaining, _) = tag(r#"""#)(input)?;
|
let (mut remaining, _) = tag(r#"""#)(input)?;
|
||||||
let (remaining, _) = escaped(
|
let mut in_escape = false;
|
||||||
take_till1(|c| match c {
|
loop {
|
||||||
'\\' | '"' => true,
|
if in_escape {
|
||||||
_ => false,
|
let (remain, _) = alt((recognize(one_of(r#""n\\"#)), digit1))(remaining)?;
|
||||||
}),
|
remaining = remain;
|
||||||
'\\',
|
in_escape = false;
|
||||||
one_of(r#""n\\"#),
|
} else {
|
||||||
)(remaining)?;
|
let end_quote = tag::<_, _, nom::error::Error<_>>(r#"""#)(remaining);
|
||||||
|
if end_quote.is_ok() {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
let escape_backslash = tag::<_, _, nom::error::Error<_>>("\\")(remaining);
|
||||||
|
if let Ok((remain, _)) = escape_backslash {
|
||||||
|
remaining = remain;
|
||||||
|
in_escape = true;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let (remain, _) = anychar(remaining)?;
|
||||||
|
remaining = remain;
|
||||||
|
}
|
||||||
|
}
|
||||||
let (remaining, _) = tag(r#"""#)(remaining)?;
|
let (remaining, _) = tag(r#"""#)(remaining)?;
|
||||||
let source = get_consumed(input, remaining);
|
let source = get_consumed(input, remaining);
|
||||||
Ok((remaining, Token::Atom(source.into())))
|
Ok((remaining, Token::Atom(source.into())))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||||
fn hash_notation<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, Token<'s>> {
|
fn hash_notation<'s>(input: &'s str) -> Res<&'s str, Token<'s>> {
|
||||||
let (remaining, _) = tag("#<")(input)?;
|
let (remaining, _) = tag("#<")(input)?;
|
||||||
let (remaining, _body) = take_till1(|c| match c {
|
let (remaining, _body) = take_till1(|c| match c {
|
||||||
'>' => true,
|
'>' => true,
|
||||||
@@ -225,7 +279,7 @@ fn hash_notation<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, Token<'s>> {
|
|||||||
Ok((remaining, Token::Atom(source.into())))
|
Ok((remaining, Token::Atom(source.into())))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn text_with_properties<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, Token<'s>> {
|
fn text_with_properties<'s>(input: &'s str) -> Res<&'s str, Token<'s>> {
|
||||||
let (remaining, _) = tag("#(")(input)?;
|
let (remaining, _) = tag("#(")(input)?;
|
||||||
let (remaining, (text, props)) = delimited(
|
let (remaining, (text, props)) = delimited(
|
||||||
multispace0,
|
multispace0,
|
||||||
@@ -255,7 +309,7 @@ mod tests {
|
|||||||
#[test]
|
#[test]
|
||||||
fn simple() {
|
fn simple() {
|
||||||
let input = " (foo bar baz ) ";
|
let input = " (foo bar baz ) ";
|
||||||
let (remaining, parsed) = sexp_with_padding(input).expect("Parse the input");
|
let (remaining, parsed) = sexp(input).expect("Parse the input");
|
||||||
assert_eq!(remaining, "");
|
assert_eq!(remaining, "");
|
||||||
assert!(match parsed {
|
assert!(match parsed {
|
||||||
Token::Atom(_) => false,
|
Token::Atom(_) => false,
|
||||||
@@ -268,7 +322,7 @@ mod tests {
|
|||||||
#[test]
|
#[test]
|
||||||
fn quoted() {
|
fn quoted() {
|
||||||
let input = r#" ("foo" bar baz ) "#;
|
let input = r#" ("foo" bar baz ) "#;
|
||||||
let (remaining, parsed) = sexp_with_padding(input).expect("Parse the input");
|
let (remaining, parsed) = sexp(input).expect("Parse the input");
|
||||||
assert_eq!(remaining, "");
|
assert_eq!(remaining, "");
|
||||||
assert!(match parsed {
|
assert!(match parsed {
|
||||||
Token::Atom(_) => false,
|
Token::Atom(_) => false,
|
||||||
@@ -292,7 +346,7 @@ mod tests {
|
|||||||
#[test]
|
#[test]
|
||||||
fn quoted_containing_paren() {
|
fn quoted_containing_paren() {
|
||||||
let input = r#" (foo "b(a)r" baz ) "#;
|
let input = r#" (foo "b(a)r" baz ) "#;
|
||||||
let (remaining, parsed) = sexp_with_padding(input).expect("Parse the input");
|
let (remaining, parsed) = sexp(input).expect("Parse the input");
|
||||||
assert_eq!(remaining, "");
|
assert_eq!(remaining, "");
|
||||||
assert!(match parsed {
|
assert!(match parsed {
|
||||||
Token::List(_) => true,
|
Token::List(_) => true,
|
||||||
@@ -328,7 +382,7 @@ mod tests {
|
|||||||
#[test]
|
#[test]
|
||||||
fn string_containing_escaped_characters() {
|
fn string_containing_escaped_characters() {
|
||||||
let input = r#" (foo "\\( x=2 \\)" bar) "#;
|
let input = r#" (foo "\\( x=2 \\)" bar) "#;
|
||||||
let (remaining, parsed) = sexp_with_padding(input).expect("Parse the input");
|
let (remaining, parsed) = sexp(input).expect("Parse the input");
|
||||||
assert_eq!(remaining, "");
|
assert_eq!(remaining, "");
|
||||||
assert!(match parsed {
|
assert!(match parsed {
|
||||||
Token::Atom(_) => false,
|
Token::Atom(_) => false,
|
||||||
@@ -1,5 +1,10 @@
|
|||||||
use crate::parser::sexp::Token;
|
use std::str::FromStr;
|
||||||
use crate::parser::Source;
|
|
||||||
|
use super::elisp_fact::GetElispFact;
|
||||||
|
use super::sexp::Token;
|
||||||
|
use crate::compare::sexp::unquote;
|
||||||
|
use crate::types::GetStandardProperties;
|
||||||
|
use crate::types::StandardProperties;
|
||||||
|
|
||||||
/// Check if the child string slice is a slice of the parent string slice.
|
/// Check if the child string slice is a slice of the parent string slice.
|
||||||
fn is_slice_of(parent: &str, child: &str) -> bool {
|
fn is_slice_of(parent: &str, child: &str) -> bool {
|
||||||
@@ -10,18 +15,39 @@ fn is_slice_of(parent: &str, child: &str) -> bool {
|
|||||||
child_start >= parent_start && child_end <= parent_end
|
child_start >= parent_start && child_end <= parent_end
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the offset into source that the rust object exists at.
|
/// Get the byte offset into source that the rust object exists at.
|
||||||
///
|
///
|
||||||
/// These offsets are zero-based unlike the elisp ones.
|
/// These offsets are zero-based unlike the elisp ones.
|
||||||
pub fn get_offsets<'s, S: Source<'s>>(source: &'s str, rust_object: &'s S) -> (usize, usize) {
|
fn get_rust_byte_offsets<'b, 's, S: StandardProperties<'s> + ?Sized>(
|
||||||
let rust_object_source = rust_object.get_source();
|
original_document: &'s str,
|
||||||
assert!(is_slice_of(source, rust_object_source));
|
rust_ast_node: &'b S,
|
||||||
let offset = rust_object_source.as_ptr() as usize - source.as_ptr() as usize;
|
) -> (usize, usize) {
|
||||||
|
let rust_object_source = rust_ast_node.get_source();
|
||||||
|
debug_assert!(is_slice_of(original_document, rust_object_source));
|
||||||
|
let offset = rust_object_source.as_ptr() as usize - original_document.as_ptr() as usize;
|
||||||
let end = offset + rust_object_source.len();
|
let end = offset + rust_object_source.len();
|
||||||
(offset, end)
|
(offset, end)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn assert_name<'s>(emacs: &'s Token<'s>, name: &str) -> Result<(), Box<dyn std::error::Error>> {
|
pub(crate) fn compare_standard_properties<
|
||||||
|
'b,
|
||||||
|
's,
|
||||||
|
S: GetStandardProperties<'s> + GetElispFact<'s> + ?Sized,
|
||||||
|
>(
|
||||||
|
original_document: &'s str,
|
||||||
|
emacs: &'b Token<'s>,
|
||||||
|
rust: &'b S,
|
||||||
|
) -> Result<(), Box<dyn std::error::Error>> {
|
||||||
|
assert_name(emacs, rust.get_elisp_fact().get_elisp_name())?;
|
||||||
|
assert_bounds(original_document, emacs, rust.get_standard_properties())?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn assert_name<'b, 's, S: AsRef<str>>(
|
||||||
|
emacs: &'b Token<'s>,
|
||||||
|
name: S,
|
||||||
|
) -> Result<(), Box<dyn std::error::Error>> {
|
||||||
|
let name = name.as_ref();
|
||||||
let children = emacs.as_list()?;
|
let children = emacs.as_list()?;
|
||||||
let first_child = children
|
let first_child = children
|
||||||
.first()
|
.first()
|
||||||
@@ -29,7 +55,7 @@ pub fn assert_name<'s>(emacs: &'s Token<'s>, name: &str) -> Result<(), Box<dyn s
|
|||||||
.as_atom()?;
|
.as_atom()?;
|
||||||
if first_child != name {
|
if first_child != name {
|
||||||
Err(format!(
|
Err(format!(
|
||||||
"Expected a {expected} cell, but found a {found} cell.",
|
"AST node name mismatch. Expected a (rust) {expected} cell, but found a (emacs) {found} cell.",
|
||||||
expected = name,
|
expected = name,
|
||||||
found = first_child
|
found = first_child
|
||||||
))?;
|
))?;
|
||||||
@@ -37,27 +63,33 @@ pub fn assert_name<'s>(emacs: &'s Token<'s>, name: &str) -> Result<(), Box<dyn s
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn assert_bounds<'s, S: Source<'s>>(
|
/// Assert that the character ranges defined by upstream org-mode's :standard-properties match the slices in Organic's StandardProperties.
|
||||||
source: &'s str,
|
///
|
||||||
emacs: &'s Token<'s>,
|
/// This does **not** handle plain text because plain text is a special case.
|
||||||
rust: &'s S,
|
pub(crate) fn assert_bounds<'b, 's, S: StandardProperties<'s> + ?Sized>(
|
||||||
|
original_document: &'s str,
|
||||||
|
emacs: &'b Token<'s>,
|
||||||
|
rust: &'b S,
|
||||||
) -> Result<(), Box<dyn std::error::Error>> {
|
) -> Result<(), Box<dyn std::error::Error>> {
|
||||||
let standard_properties = get_standard_properties(emacs)?;
|
let standard_properties = get_emacs_standard_properties(emacs)?; // 1-based
|
||||||
let (begin, end) = (
|
let (begin, end) = (
|
||||||
standard_properties
|
standard_properties
|
||||||
.begin
|
.begin
|
||||||
.ok_or("Token should have a begin.")?,
|
.ok_or("Token should have a begin.")?,
|
||||||
standard_properties.end.ok_or("Token should have an end.")?,
|
standard_properties.end.ok_or("Token should have an end.")?,
|
||||||
);
|
);
|
||||||
let (rust_begin, rust_end) = get_offsets(source, rust);
|
let (rust_begin, rust_end) = get_rust_byte_offsets(original_document, rust); // 0-based
|
||||||
if (rust_begin + 1) != begin || (rust_end + 1) != end {
|
let rust_begin_char_offset = (&original_document[..rust_begin]).chars().count() + 1; // 1-based
|
||||||
Err(format!("Rust bounds (in bytes) ({rust_begin}, {rust_end}) do not match emacs bounds ({emacs_begin}, {emacs_end})", rust_begin = rust_begin + 1, rust_end = rust_end + 1, emacs_begin=begin, emacs_end=end))?;
|
let rust_end_char_offset =
|
||||||
|
rust_begin_char_offset + (&original_document[rust_begin..rust_end]).chars().count(); // 1-based
|
||||||
|
if rust_begin_char_offset != begin || rust_end_char_offset != end {
|
||||||
|
Err(format!("Rust bounds (in chars) ({rust_begin}, {rust_end}) do not match emacs bounds ({emacs_begin}, {emacs_end})", rust_begin = rust_begin_char_offset, rust_end = rust_end_char_offset, emacs_begin=begin, emacs_end=end))?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
struct StandardProperties {
|
struct EmacsStandardProperties {
|
||||||
begin: Option<usize>,
|
begin: Option<usize>,
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
post_affiliated: Option<usize>,
|
post_affiliated: Option<usize>,
|
||||||
@@ -70,9 +102,9 @@ struct StandardProperties {
|
|||||||
post_blank: Option<usize>,
|
post_blank: Option<usize>,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_standard_properties<'s>(
|
fn get_emacs_standard_properties<'b, 's>(
|
||||||
emacs: &'s Token<'s>,
|
emacs: &'b Token<'s>,
|
||||||
) -> Result<StandardProperties, Box<dyn std::error::Error>> {
|
) -> Result<EmacsStandardProperties, Box<dyn std::error::Error>> {
|
||||||
let children = emacs.as_list()?;
|
let children = emacs.as_list()?;
|
||||||
let attributes_child = children
|
let attributes_child = children
|
||||||
.iter()
|
.iter()
|
||||||
@@ -91,7 +123,7 @@ fn get_standard_properties<'s>(
|
|||||||
let contents_end = maybe_token_to_usize(std_props.next())?;
|
let contents_end = maybe_token_to_usize(std_props.next())?;
|
||||||
let end = maybe_token_to_usize(std_props.next())?;
|
let end = maybe_token_to_usize(std_props.next())?;
|
||||||
let post_blank = maybe_token_to_usize(std_props.next())?;
|
let post_blank = maybe_token_to_usize(std_props.next())?;
|
||||||
StandardProperties {
|
EmacsStandardProperties {
|
||||||
begin,
|
begin,
|
||||||
post_affiliated,
|
post_affiliated,
|
||||||
contents_begin,
|
contents_begin,
|
||||||
@@ -110,7 +142,7 @@ fn get_standard_properties<'s>(
|
|||||||
maybe_token_to_usize(attributes_map.get(":post-blank").map(|token| *token))?;
|
maybe_token_to_usize(attributes_map.get(":post-blank").map(|token| *token))?;
|
||||||
let post_affiliated =
|
let post_affiliated =
|
||||||
maybe_token_to_usize(attributes_map.get(":post-affiliated").map(|token| *token))?;
|
maybe_token_to_usize(attributes_map.get(":post-affiliated").map(|token| *token))?;
|
||||||
StandardProperties {
|
EmacsStandardProperties {
|
||||||
begin,
|
begin,
|
||||||
post_affiliated,
|
post_affiliated,
|
||||||
contents_begin,
|
contents_begin,
|
||||||
@@ -138,22 +170,85 @@ fn maybe_token_to_usize(
|
|||||||
.map_or(Ok(None), |r| r.map(Some))?)
|
.map_or(Ok(None), |r| r.map(Some))?)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_property<'s, 'x>(
|
/// Get a named property from the emacs token.
|
||||||
emacs: &'s Token<'s>,
|
///
|
||||||
|
/// Returns Ok(None) if value is nil or absent.
|
||||||
|
pub(crate) fn get_property<'b, 's, 'x>(
|
||||||
|
emacs: &'b Token<'s>,
|
||||||
key: &'x str,
|
key: &'x str,
|
||||||
) -> Result<Option<&'s Token<'s>>, Box<dyn std::error::Error>> {
|
) -> Result<Option<&'b Token<'s>>, Box<dyn std::error::Error>> {
|
||||||
let children = emacs.as_list()?;
|
let children = emacs.as_list()?;
|
||||||
let attributes_child = children
|
let attributes_child = children
|
||||||
.iter()
|
.iter()
|
||||||
.nth(1)
|
.nth(1)
|
||||||
.ok_or("Should have an attributes child.")?;
|
.ok_or("Should have an attributes child.")?;
|
||||||
let attributes_map = attributes_child.as_map()?;
|
let attributes_map = attributes_child.as_map()?;
|
||||||
let prop = attributes_map
|
let prop = attributes_map.get(key).map(|token| *token);
|
||||||
.get(key)
|
match prop.map(|token| token.as_atom()) {
|
||||||
.ok_or(format!("Missing {} attribute.", key))?;
|
Some(Ok("nil")) => return Ok(None),
|
||||||
match prop.as_atom() {
|
|
||||||
Ok("nil") => return Ok(None),
|
|
||||||
_ => {}
|
_ => {}
|
||||||
};
|
};
|
||||||
Ok(Some(*prop))
|
Ok(prop)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get a named property containing an unquoted atom from the emacs token.
|
||||||
|
///
|
||||||
|
/// Returns None if key is not found.
|
||||||
|
pub(crate) fn get_property_unquoted_atom<'b, 's, 'x>(
|
||||||
|
emacs: &'b Token<'s>,
|
||||||
|
key: &'x str,
|
||||||
|
) -> Result<Option<&'s str>, Box<dyn std::error::Error>> {
|
||||||
|
Ok(get_property(emacs, key)?
|
||||||
|
.map(Token::as_atom)
|
||||||
|
.map_or(Ok(None), |r| r.map(Some))?)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get a named property containing an quoted string from the emacs token.
|
||||||
|
///
|
||||||
|
/// Returns None if key is not found.
|
||||||
|
pub(crate) fn get_property_quoted_string<'b, 's, 'x>(
|
||||||
|
emacs: &'b Token<'s>,
|
||||||
|
key: &'x str,
|
||||||
|
) -> Result<Option<String>, Box<dyn std::error::Error>> {
|
||||||
|
Ok(get_property(emacs, key)?
|
||||||
|
.map(Token::as_atom)
|
||||||
|
.map_or(Ok(None), |r| r.map(Some))?
|
||||||
|
.map(unquote)
|
||||||
|
.map_or(Ok(None), |r| r.map(Some))?)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get a named property containing a boolean value.
|
||||||
|
///
|
||||||
|
/// This uses the elisp convention of nil == false, non-nil == true.
|
||||||
|
///
|
||||||
|
/// Returns false if key is not found.
|
||||||
|
pub(crate) fn get_property_boolean<'b, 's, 'x>(
|
||||||
|
emacs: &'b Token<'s>,
|
||||||
|
key: &'x str,
|
||||||
|
) -> Result<bool, Box<dyn std::error::Error>> {
|
||||||
|
Ok(get_property(emacs, key)?
|
||||||
|
.map(Token::as_atom)
|
||||||
|
.map_or(Ok(None), |r| r.map(Some))?
|
||||||
|
.unwrap_or("nil")
|
||||||
|
!= "nil")
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get a named property containing an unquoted numeric value.
|
||||||
|
///
|
||||||
|
/// Returns None if key is not found.
|
||||||
|
pub(crate) fn get_property_numeric<'b, 's, 'x, N: FromStr>(
|
||||||
|
emacs: &'b Token<'s>,
|
||||||
|
key: &'x str,
|
||||||
|
) -> Result<Option<N>, Box<dyn std::error::Error + 's>>
|
||||||
|
where
|
||||||
|
<N as FromStr>::Err: std::error::Error,
|
||||||
|
<N as FromStr>::Err: 's,
|
||||||
|
{
|
||||||
|
let unparsed_string = get_property(emacs, key)?
|
||||||
|
.map(Token::as_atom)
|
||||||
|
.map_or(Ok(None), |r| r.map(Some))?;
|
||||||
|
let parsed_number = unparsed_string
|
||||||
|
.map(|val| val.parse::<N>())
|
||||||
|
.map_or(Ok(None), |r| r.map(Some))?;
|
||||||
|
Ok(parsed_number)
|
||||||
}
|
}
|
||||||
|
|||||||
185
src/context/context.rs
Normal file
185
src/context/context.rs
Normal file
@@ -0,0 +1,185 @@
|
|||||||
|
use std::marker::PhantomData;
|
||||||
|
|
||||||
|
use nom::combinator::eof;
|
||||||
|
use nom::IResult;
|
||||||
|
|
||||||
|
use super::exiting::ExitClass;
|
||||||
|
use super::global_settings::GlobalSettings;
|
||||||
|
use super::list::List;
|
||||||
|
use super::DynContextMatcher;
|
||||||
|
use super::RefContext;
|
||||||
|
use crate::error::CustomError;
|
||||||
|
use crate::error::MyError;
|
||||||
|
use crate::error::Res;
|
||||||
|
use crate::parser::OrgSource;
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub(crate) enum ContextElement<'r, 's> {
|
||||||
|
/// Stores a parser that indicates that children should exit upon matching an exit matcher.
|
||||||
|
ExitMatcherNode(ExitMatcherNode<'r>),
|
||||||
|
|
||||||
|
/// Stores the name of the current element to prevent directly nesting elements of the same type.
|
||||||
|
Context(&'r str),
|
||||||
|
|
||||||
|
/// Stores the name of the current object to prevent directly nesting elements of the same type.
|
||||||
|
ContextObject(&'r str),
|
||||||
|
|
||||||
|
/// Indicates if elements should consume the whitespace after them.
|
||||||
|
ConsumeTrailingWhitespace(bool),
|
||||||
|
|
||||||
|
/// This is just here to use the 's lifetime until I'm sure we can eliminate it from ContextElement.
|
||||||
|
#[allow(dead_code)]
|
||||||
|
Placeholder(PhantomData<&'s str>),
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) struct ExitMatcherNode<'r> {
|
||||||
|
// TODO: Should this be "&'r DynContextMatcher<'c>" ?
|
||||||
|
pub(crate) exit_matcher: &'r DynContextMatcher<'r>,
|
||||||
|
pub(crate) class: ExitClass,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'r> std::fmt::Debug for ExitMatcherNode<'r> {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
let mut formatter = f.debug_struct("ExitMatcherNode");
|
||||||
|
formatter.field("class", &self.class.to_string());
|
||||||
|
formatter.finish()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub(crate) struct Context<'g, 'r, 's> {
|
||||||
|
global_settings: &'g GlobalSettings<'g, 's>,
|
||||||
|
tree: List<'r, &'r ContextElement<'r, 's>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'g, 'r, 's> Context<'g, 'r, 's> {
|
||||||
|
pub(crate) fn new(
|
||||||
|
global_settings: &'g GlobalSettings<'g, 's>,
|
||||||
|
tree: List<'r, &'r ContextElement<'r, 's>>,
|
||||||
|
) -> Self {
|
||||||
|
Self {
|
||||||
|
global_settings,
|
||||||
|
tree,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn with_additional_node(&'r self, new_element: &'r ContextElement<'r, 's>) -> Self {
|
||||||
|
let new_tree = self.tree.push(new_element);
|
||||||
|
Self::new(self.global_settings, new_tree)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn iter(&'r self) -> super::list::Iter<'r, &'r ContextElement<'r, 's>> {
|
||||||
|
self.tree.iter()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn iter_context(&'r self) -> Iter<'g, 'r, 's> {
|
||||||
|
Iter {
|
||||||
|
next: self.tree.iter_list(),
|
||||||
|
global_settings: self.global_settings,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn get_parent(&'r self) -> Option<Self> {
|
||||||
|
self.tree.get_parent().map(|parent_tree| Self {
|
||||||
|
global_settings: self.global_settings,
|
||||||
|
tree: parent_tree.clone(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_data(&self) -> &ContextElement<'r, 's> {
|
||||||
|
self.tree.get_data()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn get_global_settings(&self) -> &'g GlobalSettings<'g, 's> {
|
||||||
|
self.global_settings
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn with_global_settings<'gg>(
|
||||||
|
&self,
|
||||||
|
new_settings: &'gg GlobalSettings<'gg, 's>,
|
||||||
|
) -> Context<'gg, 'r, 's> {
|
||||||
|
Context {
|
||||||
|
global_settings: new_settings,
|
||||||
|
tree: self.tree.clone(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||||
|
pub(crate) fn check_exit_matcher(
|
||||||
|
&'r self,
|
||||||
|
i: OrgSource<'s>,
|
||||||
|
) -> IResult<OrgSource<'s>, OrgSource<'s>, CustomError<OrgSource<'s>>> {
|
||||||
|
let mut current_class_filter = ExitClass::Gamma;
|
||||||
|
for current_node in self.iter_context() {
|
||||||
|
let context_element = current_node.get_data();
|
||||||
|
match context_element {
|
||||||
|
ContextElement::ExitMatcherNode(exit_matcher) => {
|
||||||
|
if exit_matcher.class as u32 <= current_class_filter as u32 {
|
||||||
|
current_class_filter = exit_matcher.class;
|
||||||
|
let local_result = (exit_matcher.exit_matcher)(¤t_node, i);
|
||||||
|
if local_result.is_ok() {
|
||||||
|
return local_result;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
// TODO: Make this a specific error instead of just a generic MyError
|
||||||
|
return Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||||
|
"NoExit".into(),
|
||||||
|
))));
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Indicates if elements should consume the whitespace after them.
|
||||||
|
///
|
||||||
|
/// Defaults to true.
|
||||||
|
pub(crate) fn should_consume_trailing_whitespace(&self) -> bool {
|
||||||
|
self._should_consume_trailing_whitespace().unwrap_or(true)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn _should_consume_trailing_whitespace(&self) -> Option<bool> {
|
||||||
|
for current_node in self.iter() {
|
||||||
|
match current_node {
|
||||||
|
ContextElement::ConsumeTrailingWhitespace(should) => {
|
||||||
|
return Some(*should);
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||||
|
fn document_end<'b, 'g, 'r, 's>(
|
||||||
|
_context: RefContext<'b, 'g, 'r, 's>,
|
||||||
|
input: OrgSource<'s>,
|
||||||
|
) -> Res<OrgSource<'s>, OrgSource<'s>> {
|
||||||
|
eof(input)
|
||||||
|
}
|
||||||
|
|
||||||
|
struct Iter<'g, 'r, 's> {
|
||||||
|
global_settings: &'g GlobalSettings<'g, 's>,
|
||||||
|
next: super::list::IterList<'r, &'r ContextElement<'r, 's>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'g, 'r, 's> Iterator for Iter<'g, 'r, 's> {
|
||||||
|
type Item = Context<'g, 'r, 's>;
|
||||||
|
|
||||||
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
|
let next_tree = self.next.next();
|
||||||
|
let ret =
|
||||||
|
next_tree.map(|parent_tree| Context::new(self.global_settings, parent_tree.clone()));
|
||||||
|
ret
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'r, 's> ContextElement<'r, 's> {
|
||||||
|
pub(crate) fn document_context() -> Self {
|
||||||
|
Self::ExitMatcherNode(ExitMatcherNode {
|
||||||
|
exit_matcher: &document_end,
|
||||||
|
class: ExitClass::Document,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
13
src/context/exiting.rs
Normal file
13
src/context/exiting.rs
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
pub(crate) enum ExitClass {
|
||||||
|
Document = 1,
|
||||||
|
Alpha = 2,
|
||||||
|
Beta = 3,
|
||||||
|
Gamma = 4,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::fmt::Display for ExitClass {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||||
|
write!(f, "{:?}", self)
|
||||||
|
}
|
||||||
|
}
|
||||||
23
src/context/file_access_interface.rs
Normal file
23
src/context/file_access_interface.rs
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
use std::fmt::Debug;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
pub trait FileAccessInterface: Debug {
|
||||||
|
fn read_file(&self, path: &str) -> Result<String, std::io::Error>;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct LocalFileAccessInterface {
|
||||||
|
pub working_directory: Option<PathBuf>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FileAccessInterface for LocalFileAccessInterface {
|
||||||
|
fn read_file(&self, path: &str) -> Result<String, std::io::Error> {
|
||||||
|
let final_path = self
|
||||||
|
.working_directory
|
||||||
|
.as_ref()
|
||||||
|
.map(PathBuf::as_path)
|
||||||
|
.map(|pb| pb.join(path))
|
||||||
|
.unwrap_or_else(|| PathBuf::from(path));
|
||||||
|
Ok(std::fs::read_to_string(final_path)?)
|
||||||
|
}
|
||||||
|
}
|
||||||
72
src/context/global_settings.rs
Normal file
72
src/context/global_settings.rs
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
use std::collections::BTreeSet;
|
||||||
|
|
||||||
|
use super::FileAccessInterface;
|
||||||
|
use super::LocalFileAccessInterface;
|
||||||
|
use crate::types::IndentationLevel;
|
||||||
|
use crate::types::Object;
|
||||||
|
|
||||||
|
// TODO: Ultimately, I think we'll need most of this: https://orgmode.org/manual/In_002dbuffer-Settings.html
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct GlobalSettings<'g, 's> {
|
||||||
|
pub radio_targets: Vec<&'g Vec<Object<'s>>>,
|
||||||
|
pub file_access: &'g dyn FileAccessInterface,
|
||||||
|
pub in_progress_todo_keywords: BTreeSet<String>,
|
||||||
|
pub complete_todo_keywords: BTreeSet<String>,
|
||||||
|
/// Set to true to allow for plain lists using single letters as the bullet in the same way that numbers are used.
|
||||||
|
///
|
||||||
|
/// Corresponds to the org-list-allow-alphabetical elisp variable.
|
||||||
|
pub list_allow_alphabetical: bool,
|
||||||
|
|
||||||
|
/// How many spaces a tab should be equal to.
|
||||||
|
///
|
||||||
|
/// Corresponds to the tab-width elisp variable.
|
||||||
|
pub tab_width: IndentationLevel,
|
||||||
|
|
||||||
|
/// Whether to only allow odd headline levels.
|
||||||
|
///
|
||||||
|
/// Corresponds to org-odd-levels-only elisp variable.
|
||||||
|
pub odd_levels_only: HeadlineLevelFilter,
|
||||||
|
|
||||||
|
/// If a headline title matches this string exactly, then that section will become a "footnote section".
|
||||||
|
///
|
||||||
|
/// Corresponds to org-footnote-section elisp variable.
|
||||||
|
pub footnote_section: &'g str,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub const DEFAULT_TAB_WIDTH: IndentationLevel = 8;
|
||||||
|
|
||||||
|
impl<'g, 's> GlobalSettings<'g, 's> {
|
||||||
|
fn new() -> GlobalSettings<'g, 's> {
|
||||||
|
GlobalSettings {
|
||||||
|
radio_targets: Vec::new(),
|
||||||
|
file_access: &LocalFileAccessInterface {
|
||||||
|
working_directory: None,
|
||||||
|
},
|
||||||
|
in_progress_todo_keywords: BTreeSet::new(),
|
||||||
|
complete_todo_keywords: BTreeSet::new(),
|
||||||
|
list_allow_alphabetical: false,
|
||||||
|
tab_width: DEFAULT_TAB_WIDTH,
|
||||||
|
odd_levels_only: HeadlineLevelFilter::default(),
|
||||||
|
footnote_section: "Footnotes",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'g, 's> Default for GlobalSettings<'g, 's> {
|
||||||
|
fn default() -> GlobalSettings<'g, 's> {
|
||||||
|
GlobalSettings::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
|
pub enum HeadlineLevelFilter {
|
||||||
|
Odd,
|
||||||
|
OddEven,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for HeadlineLevelFilter {
|
||||||
|
fn default() -> Self {
|
||||||
|
HeadlineLevelFilter::OddEven
|
||||||
|
}
|
||||||
|
}
|
||||||
70
src/context/list.rs
Normal file
70
src/context/list.rs
Normal file
@@ -0,0 +1,70 @@
|
|||||||
|
use std::fmt::Debug;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub(crate) struct List<'parent, T> {
|
||||||
|
data: T,
|
||||||
|
parent: Link<'parent, T>,
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: Should I be defining a lifetime for T in the generics here? Ref: https://quinedot.github.io/rust-learning/dyn-elision-advanced.html#iteraction-with-type-aliases
|
||||||
|
type Link<'parent, T> = Option<&'parent List<'parent, T>>;
|
||||||
|
|
||||||
|
impl<'parent, T> List<'parent, T> {
|
||||||
|
pub(crate) fn new(first_item: T) -> Self {
|
||||||
|
Self {
|
||||||
|
data: first_item,
|
||||||
|
parent: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn get_data(&self) -> &T {
|
||||||
|
&self.data
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn get_parent(&'parent self) -> Link<'parent, T> {
|
||||||
|
self.parent
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn iter(&self) -> Iter<'_, T> {
|
||||||
|
Iter { next: Some(self) }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn iter_list(&self) -> IterList<'_, T> {
|
||||||
|
IterList { next: Some(self) }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn push(&'parent self, item: T) -> Self {
|
||||||
|
Self {
|
||||||
|
data: item,
|
||||||
|
parent: Some(self),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) struct Iter<'a, T> {
|
||||||
|
next: Link<'a, T>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, T> Iterator for Iter<'a, T> {
|
||||||
|
type Item = &'a T;
|
||||||
|
|
||||||
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
|
let ret = self.next.map(|link| link.get_data());
|
||||||
|
self.next = self.next.map(|link| link.get_parent()).flatten();
|
||||||
|
ret
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) struct IterList<'a, T> {
|
||||||
|
next: Link<'a, T>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, T> Iterator for IterList<'a, T> {
|
||||||
|
type Item = &'a List<'a, T>;
|
||||||
|
|
||||||
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
|
let ret = self.next;
|
||||||
|
self.next = self.next.map(|this| this.get_parent()).flatten();
|
||||||
|
ret
|
||||||
|
}
|
||||||
|
}
|
||||||
31
src/context/mod.rs
Normal file
31
src/context/mod.rs
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
use crate::error::Res;
|
||||||
|
use crate::parser::OrgSource;
|
||||||
|
|
||||||
|
mod context;
|
||||||
|
mod exiting;
|
||||||
|
mod file_access_interface;
|
||||||
|
mod global_settings;
|
||||||
|
mod list;
|
||||||
|
mod parser_with_context;
|
||||||
|
|
||||||
|
pub(crate) type RefContext<'b, 'g, 'r, 's> = &'b Context<'g, 'r, 's>;
|
||||||
|
pub(crate) trait ContextMatcher = for<'b, 'g, 'r, 's> Fn(
|
||||||
|
RefContext<'b, 'g, 'r, 's>,
|
||||||
|
OrgSource<'s>,
|
||||||
|
) -> Res<OrgSource<'s>, OrgSource<'s>>;
|
||||||
|
type DynContextMatcher<'c> = dyn ContextMatcher + 'c;
|
||||||
|
pub(crate) trait Matcher = for<'s> Fn(OrgSource<'s>) -> Res<OrgSource<'s>, OrgSource<'s>>;
|
||||||
|
#[allow(dead_code)]
|
||||||
|
type DynMatcher<'c> = dyn Matcher + 'c;
|
||||||
|
|
||||||
|
pub(crate) use context::Context;
|
||||||
|
pub(crate) use context::ContextElement;
|
||||||
|
pub(crate) use context::ExitMatcherNode;
|
||||||
|
pub(crate) use exiting::ExitClass;
|
||||||
|
pub use file_access_interface::FileAccessInterface;
|
||||||
|
pub use file_access_interface::LocalFileAccessInterface;
|
||||||
|
pub use global_settings::GlobalSettings;
|
||||||
|
pub use global_settings::HeadlineLevelFilter;
|
||||||
|
pub use global_settings::DEFAULT_TAB_WIDTH;
|
||||||
|
pub(crate) use list::List;
|
||||||
|
pub(crate) use parser_with_context::parser_with_context;
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user