Compare commits
315 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
84b8ddb582 | ||
![]() |
113bb5888a | ||
![]() |
bf5fe6920b | ||
![]() |
4b52ed0d2a | ||
![]() |
d2c558ccfa | ||
![]() |
a01f78b510 | ||
![]() |
d80b473fae | ||
![]() |
e6b4bc3d94 | ||
![]() |
c6cde8db74 | ||
![]() |
841a348dd0 | ||
![]() |
b46fae331b | ||
![]() |
7223e08df3 | ||
![]() |
8321f83dac | ||
![]() |
bd441a0293 | ||
![]() |
f5a07e0d70 | ||
![]() |
9d750ed5e1 | ||
![]() |
9f111fe445 | ||
![]() |
a4e433dab1 | ||
![]() |
4e9f1e4fac | ||
![]() |
4dee130873 | ||
![]() |
8e712532e1 | ||
![]() |
4b85236c5f | ||
![]() |
66f003e6fd | ||
![]() |
b35a2d5f5a | ||
![]() |
320b5f8568 | ||
![]() |
99b2af6c99 | ||
![]() |
6e71acdb7d | ||
![]() |
8406d37991 | ||
![]() |
64bb597908 | ||
![]() |
068864ea87 | ||
![]() |
03a3ddbd63 | ||
![]() |
122adee23b | ||
![]() |
556afecbb8 | ||
![]() |
e4407cbdd1 | ||
![]() |
f57d60dab0 | ||
![]() |
0aa3939a75 | ||
![]() |
52cb81e75e | ||
![]() |
945121202d | ||
![]() |
f4e0dddd9d | ||
![]() |
6b62176fd0 | ||
![]() |
44483b4d54 | ||
![]() |
48d3de77fe | ||
![]() |
680b176501 | ||
![]() |
dc0338e978 | ||
![]() |
ff3e0a50af | ||
![]() |
03c8c07fe0 | ||
![]() |
3a6fc5b669 | ||
![]() |
d258cdb839 | ||
![]() |
aa5629354e | ||
![]() |
efc4a04829 | ||
![]() |
dd611ea64a | ||
![]() |
4bd5f3bec7 | ||
![]() |
c2b3509b6a | ||
![]() |
7f3f5fb889 | ||
![]() |
e0fbf17226 | ||
![]() |
4e18cbafba | ||
![]() |
46c36d7f3e | ||
![]() |
c46a935cfc | ||
![]() |
f50415cb32 | ||
![]() |
4f1a151e97 | ||
![]() |
c8e3fdba51 | ||
![]() |
4b3fc20c62 | ||
![]() |
3131f8ac64 | ||
![]() |
60a4835590 | ||
![]() |
172d72aa46 | ||
![]() |
b4fcc6500b | ||
![]() |
ddb6f31562 | ||
![]() |
dc080b30fc | ||
![]() |
9901e17437 | ||
![]() |
ea000894f0 | ||
![]() |
e7742b529a | ||
![]() |
8eba0c4923 | ||
![]() |
e0c0070a13 | ||
![]() |
65ce116998 | ||
![]() |
e348e7d4e3 | ||
![]() |
492090470c | ||
![]() |
3ec900c8df | ||
![]() |
d0a008ed22 | ||
![]() |
f2292f1c07 | ||
![]() |
44392cfcca | ||
![]() |
110630d230 | ||
![]() |
ebe12d96c1 | ||
![]() |
24c8ac8e21 | ||
![]() |
259ad6e242 | ||
![]() |
dd1f7c7777 | ||
![]() |
c1b471208d | ||
![]() |
606bab9e6d | ||
![]() |
0edf5620a2 | ||
![]() |
cdf87641c5 | ||
![]() |
eb2995dd3b | ||
![]() |
cd6a64c015 | ||
![]() |
a4a83d047d | ||
![]() |
a4414369ce | ||
![]() |
83e4b72307 | ||
![]() |
34b3e4fa7b | ||
![]() |
c0e879dc1e | ||
![]() |
fa31b001f4 | ||
![]() |
0897061ff6 | ||
![]() |
28a3e1bc7b | ||
![]() |
3fd3d20722 | ||
![]() |
90735586b5 | ||
![]() |
78befc7665 | ||
![]() |
ef549d3b19 | ||
![]() |
777c756a7f | ||
![]() |
037caf369c | ||
![]() |
54085b5833 | ||
![]() |
2bfa8e59e7 | ||
![]() |
5d31db39a4 | ||
![]() |
adcd0de7e4 | ||
![]() |
c2f9789a64 | ||
![]() |
579cbb5d11 | ||
![]() |
cad2be43bf | ||
![]() |
a0a4f0eb90 | ||
![]() |
9f4f8e79ce | ||
![]() |
77e0dbb42e | ||
![]() |
eff5cdbf40 | ||
![]() |
eef3571299 | ||
![]() |
f227d8405e | ||
![]() |
9520e5814b | ||
![]() |
28ad4fd046 | ||
![]() |
7626a69fa1 | ||
![]() |
121c0ce516 | ||
![]() |
5a64db98fe | ||
![]() |
abfae9c6c0 | ||
![]() |
5272e2f1b4 | ||
![]() |
90d4b11922 | ||
![]() |
d552ef6569 | ||
![]() |
f050e9b6a8 | ||
![]() |
a5e108bc37 | ||
![]() |
58290515b5 | ||
![]() |
423f65046e | ||
![]() |
badeaf8246 | ||
![]() |
d38100581c | ||
![]() |
f4eff5ca56 | ||
![]() |
5b02c21ebf | ||
![]() |
5f1668702a | ||
![]() |
1faaeeebf1 | ||
![]() |
20a7c89084 | ||
![]() |
e83417b243 | ||
![]() |
36b80dc093 | ||
![]() |
1812b1a56e | ||
![]() |
1a70b3d2c0 | ||
![]() |
abf066701e | ||
![]() |
4984ea4179 | ||
![]() |
3cb251ea6c | ||
![]() |
4bfea41291 | ||
![]() |
99376515ef | ||
![]() |
23f4ba4205 | ||
![]() |
55ad136283 | ||
![]() |
c717541099 | ||
![]() |
c2e921c2dc | ||
![]() |
e499169f0e | ||
![]() |
84c088df67 | ||
![]() |
f210f95f99 | ||
![]() |
17b81c7c72 | ||
![]() |
2911fce7cc | ||
![]() |
e622d9fa6b | ||
![]() |
8186fbb8b3 | ||
![]() |
68ccff74fa | ||
![]() |
9a13cb72c6 | ||
![]() |
65abaa332f | ||
![]() |
67e5829fd9 | ||
![]() |
995b41e697 | ||
![]() |
eb51bdfe2f | ||
![]() |
bbb9ec637a | ||
![]() |
dc012b49f5 | ||
![]() |
13863a68f7 | ||
![]() |
2962f76c81 | ||
![]() |
b9b3ef6e74 | ||
![]() |
310ab2eab2 | ||
![]() |
53320070da | ||
![]() |
2d5593681f | ||
![]() |
b3f97dbb40 | ||
![]() |
a48d76321e | ||
![]() |
59222c58b1 | ||
![]() |
4d95a7f244 | ||
![]() |
5a8159eed7 | ||
![]() |
e24fcb9ded | ||
![]() |
4b94dc60d2 | ||
![]() |
2046603d01 | ||
![]() |
30412361e1 | ||
![]() |
e846c85188 | ||
![]() |
99b74095e6 | ||
![]() |
6b802d36bf | ||
![]() |
33ca43ca40 | ||
![]() |
f5280a3090 | ||
![]() |
c28d8ccea4 | ||
![]() |
9690545901 | ||
![]() |
eba4fb94cf | ||
![]() |
565978225a | ||
![]() |
cce9ca87fa | ||
![]() |
683c523ece | ||
![]() |
7a4dc20dc9 | ||
![]() |
022dda06eb | ||
![]() |
7b88a2d248 | ||
![]() |
fce5b92091 | ||
![]() |
45a506334c | ||
![]() |
e47901a67f | ||
![]() |
7430daa768 | ||
![]() |
6ce25c8a3b | ||
![]() |
7b8fa1eb4a | ||
![]() |
ffa5349f25 | ||
![]() |
bb472b63cc | ||
![]() |
57f566a7a1 | ||
![]() |
2181993246 | ||
![]() |
60d1ecfa75 | ||
![]() |
3962db12a8 | ||
![]() |
f192507cd9 | ||
![]() |
252be3e001 | ||
![]() |
28f12a04f7 | ||
![]() |
d6232dc49c | ||
![]() |
68a220aa1c | ||
![]() |
2e7db0f8bd | ||
![]() |
175ff1e6c4 | ||
![]() |
0b42139393 | ||
![]() |
67a9103b07 | ||
![]() |
f141a4e186 | ||
![]() |
aba29df34c | ||
![]() |
87ce7d7432 | ||
![]() |
68dccd54b1 | ||
![]() |
4753f4c7c6 | ||
![]() |
13c62bf29f | ||
![]() |
670209e9fc | ||
![]() |
4af0d3141f | ||
![]() |
ab281de3c6 | ||
![]() |
d556d28f49 | ||
![]() |
9cfb2fa052 | ||
![]() |
30c03b5529 | ||
![]() |
b943f90766 | ||
![]() |
0108f5b0b1 | ||
![]() |
50145c6cf2 | ||
![]() |
4a8607726c | ||
![]() |
9bcba4020d | ||
![]() |
8fd9ff3848 | ||
![]() |
3fb7cb82cd | ||
![]() |
e0ec5c115f | ||
![]() |
f0868ba3ed | ||
![]() |
425bc12353 | ||
![]() |
03754be71e | ||
![]() |
70002800c2 | ||
![]() |
281c35677b | ||
![]() |
92d15c3d91 | ||
![]() |
b1773ac90e | ||
![]() |
645d9abf9c | ||
![]() |
d2f2bdf88d | ||
![]() |
90ba17b68c | ||
![]() |
31406fd520 | ||
![]() |
49bc51ba89 | ||
![]() |
92592104a4 | ||
![]() |
33f4614d28 | ||
![]() |
6c197c376a | ||
![]() |
bcf1b49db2 | ||
![]() |
49f6e70a19 | ||
![]() |
31fb815681 | ||
![]() |
7dfe24ff98 | ||
![]() |
a5627d0cee | ||
![]() |
93cfa71df2 | ||
![]() |
78320d3265 | ||
![]() |
9e908935f8 | ||
![]() |
b18a703529 | ||
![]() |
ea52dc60be | ||
![]() |
f5699ce830 | ||
![]() |
10aa0956ee | ||
![]() |
816c164996 | ||
![]() |
ee201e1336 | ||
![]() |
4897952330 | ||
![]() |
e1d85c6dc2 | ||
![]() |
c420ccd029 | ||
![]() |
a880629831 | ||
![]() |
5e2dea1f28 | ||
![]() |
f47d688be4 | ||
![]() |
acfc5e5e68 | ||
![]() |
503db94b2c | ||
![]() |
a4381e5e39 | ||
![]() |
e11de60def | ||
![]() |
b2479e9de8 | ||
![]() |
49d1cef7ae | ||
![]() |
ba72cc1b29 | ||
![]() |
c58b0e7c35 | ||
![]() |
f19d262825 | ||
![]() |
68f3f2e159 | ||
![]() |
269e23c1b1 | ||
![]() |
e111b8b9b8 | ||
![]() |
353ff07420 | ||
![]() |
94dec31130 | ||
![]() |
cf5d3ed745 | ||
![]() |
b0b287cd47 | ||
![]() |
bcdf1f5e9d | ||
![]() |
17d8e76e05 | ||
![]() |
8db9038c53 | ||
![]() |
a276ba70e0 | ||
![]() |
b7442c1e92 | ||
![]() |
364ba79517 | ||
![]() |
47408763e5 | ||
![]() |
bd187ebfe7 | ||
![]() |
59cb3c2bbf | ||
![]() |
44f7412a5c | ||
![]() |
01464057ad | ||
![]() |
0208020e3e | ||
![]() |
a2f53361eb | ||
![]() |
17db05c2c7 | ||
![]() |
6139ea328d | ||
![]() |
d20b4a410b | ||
![]() |
05c64f53b1 | ||
![]() |
f65d0bb82d | ||
![]() |
50d2831081 | ||
![]() |
bc9bd4f97b | ||
![]() |
369d3e8c50 | ||
![]() |
7d73eb6bd4 | ||
![]() |
f59f153ee7 | ||
![]() |
20c4a0f8f7 | ||
![]() |
e776a051ad | ||
![]() |
77e6c22ad8 | ||
![]() |
c9d7251e3b | ||
![]() |
8417b5fc9d |
@ -2,3 +2,4 @@
|
||||
target
|
||||
Cargo.lock
|
||||
notes/
|
||||
.lighthouse/
|
||||
|
@ -1,191 +0,0 @@
|
||||
apiVersion: tekton.dev/v1beta1
|
||||
kind: PipelineRun
|
||||
metadata:
|
||||
name: clippy
|
||||
spec:
|
||||
pipelineSpec:
|
||||
params:
|
||||
- name: image-name
|
||||
description: The name for the built image
|
||||
type: string
|
||||
- name: path-to-image-context
|
||||
description: The path to the build context
|
||||
type: string
|
||||
- name: path-to-dockerfile
|
||||
description: The path to the Dockerfile
|
||||
type: string
|
||||
- name: GIT_USER_NAME
|
||||
description: The username for git
|
||||
type: string
|
||||
default: "fluxcdbot"
|
||||
- name: GIT_USER_EMAIL
|
||||
description: The email for git
|
||||
type: string
|
||||
default: "fluxcdbot@users.noreply.github.com"
|
||||
tasks:
|
||||
- name: do-stuff
|
||||
taskSpec:
|
||||
metadata: {}
|
||||
stepTemplate:
|
||||
image: alpine:3.18
|
||||
name: ""
|
||||
resources:
|
||||
requests:
|
||||
cpu: 10m
|
||||
memory: 600Mi
|
||||
workingDir: /workspace/source
|
||||
steps:
|
||||
- image: alpine:3.18
|
||||
name: do-stuff-step
|
||||
script: |
|
||||
#!/usr/bin/env sh
|
||||
echo "hello world"
|
||||
- name: report-pending
|
||||
taskRef:
|
||||
name: gitea-set-status
|
||||
runAfter:
|
||||
- fetch-repository
|
||||
params:
|
||||
- name: CONTEXT
|
||||
value: "$(params.JOB_NAME)"
|
||||
- name: REPO_FULL_NAME
|
||||
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
|
||||
- name: GITEA_HOST_URL
|
||||
value: code.fizz.buzz
|
||||
- name: SHA
|
||||
value: "$(tasks.fetch-repository.results.commit)"
|
||||
- name: DESCRIPTION
|
||||
value: "Build $(params.JOB_NAME) has started"
|
||||
- name: STATE
|
||||
value: pending
|
||||
- name: TARGET_URL
|
||||
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
|
||||
- name: fetch-repository
|
||||
taskRef:
|
||||
name: git-clone
|
||||
workspaces:
|
||||
- name: output
|
||||
workspace: git-source
|
||||
params:
|
||||
- name: url
|
||||
value: $(params.REPO_URL)
|
||||
- name: revision
|
||||
value: $(params.PULL_BASE_SHA)
|
||||
- name: deleteExisting
|
||||
value: "true"
|
||||
- name: build-image
|
||||
taskRef:
|
||||
name: kaniko
|
||||
params:
|
||||
- name: IMAGE
|
||||
value: "$(params.image-name):$(tasks.fetch-repository.results.commit)"
|
||||
- name: CONTEXT
|
||||
value: $(params.path-to-image-context)
|
||||
- name: DOCKERFILE
|
||||
value: $(params.path-to-dockerfile)
|
||||
- name: BUILDER_IMAGE
|
||||
value: "gcr.io/kaniko-project/executor:v1.12.1"
|
||||
- name: EXTRA_ARGS
|
||||
value:
|
||||
- --cache=true
|
||||
- --cache-copy-layers
|
||||
- --cache-repo=harbor.fizz.buzz/kanikocache/cache
|
||||
- --use-new-run # Should result in a speed-up
|
||||
- --reproducible # To remove timestamps so layer caching works.
|
||||
- --snapshot-mode=redo
|
||||
- --skip-unused-stages=true
|
||||
- --registry-mirror=dockerhub.dockerhub.svc.cluster.local
|
||||
workspaces:
|
||||
- name: source
|
||||
workspace: git-source
|
||||
- name: dockerconfig
|
||||
workspace: docker-credentials
|
||||
runAfter:
|
||||
- fetch-repository
|
||||
- name: clippy
|
||||
taskRef:
|
||||
name: run-docker-image
|
||||
workspaces:
|
||||
- name: source
|
||||
workspace: git-source
|
||||
- name: cargo-cache
|
||||
workspace: cargo-cache
|
||||
runAfter:
|
||||
- build-image
|
||||
params:
|
||||
- name: docker-image
|
||||
value: "$(params.image-name):$(tasks.fetch-repository.results.commit)"
|
||||
finally:
|
||||
- name: report-success
|
||||
when:
|
||||
- input: "$(tasks.status)"
|
||||
operator: in
|
||||
values: ["Succeeded", "Completed"]
|
||||
taskRef:
|
||||
name: gitea-set-status
|
||||
params:
|
||||
- name: CONTEXT
|
||||
value: "$(params.JOB_NAME)"
|
||||
- name: REPO_FULL_NAME
|
||||
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
|
||||
- name: GITEA_HOST_URL
|
||||
value: code.fizz.buzz
|
||||
- name: SHA
|
||||
value: "$(tasks.fetch-repository.results.commit)"
|
||||
- name: DESCRIPTION
|
||||
value: "Build $(params.JOB_NAME) has succeeded"
|
||||
- name: STATE
|
||||
value: success
|
||||
- name: TARGET_URL
|
||||
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
|
||||
- name: report-failure
|
||||
when:
|
||||
- input: "$(tasks.status)"
|
||||
operator: in
|
||||
values: ["Failed"]
|
||||
taskRef:
|
||||
name: gitea-set-status
|
||||
params:
|
||||
- name: CONTEXT
|
||||
value: "$(params.JOB_NAME)"
|
||||
- name: REPO_FULL_NAME
|
||||
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
|
||||
- name: GITEA_HOST_URL
|
||||
value: code.fizz.buzz
|
||||
- name: SHA
|
||||
value: "$(tasks.fetch-repository.results.commit)"
|
||||
- name: DESCRIPTION
|
||||
value: "Build $(params.JOB_NAME) has failed"
|
||||
- name: STATE
|
||||
value: failure
|
||||
- name: TARGET_URL
|
||||
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
|
||||
workspaces:
|
||||
- name: git-source
|
||||
- name: docker-credentials
|
||||
workspaces:
|
||||
- name: git-source
|
||||
volumeClaimTemplate:
|
||||
spec:
|
||||
storageClassName: "nfs-client"
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 10Gi
|
||||
subPath: rust-source
|
||||
- name: cargo-cache
|
||||
persistentVolumeClaim:
|
||||
claimName: organic-cargo-cache-clippy
|
||||
- name: docker-credentials
|
||||
secret:
|
||||
secretName: harbor-plain
|
||||
serviceAccountName: build-bot
|
||||
timeout: 240h0m0s
|
||||
params:
|
||||
- name: image-name
|
||||
value: "harbor.fizz.buzz/private/organic-clippy"
|
||||
- name: path-to-image-context
|
||||
value: docker/organic_clippy/
|
||||
- name: path-to-dockerfile
|
||||
value: docker/organic_clippy/Dockerfile
|
@ -1,203 +0,0 @@
|
||||
apiVersion: tekton.dev/v1beta1
|
||||
kind: PipelineRun
|
||||
metadata:
|
||||
name: rust-foreign-document-test
|
||||
spec:
|
||||
pipelineSpec:
|
||||
timeouts:
|
||||
pipeline: "2h0m0s"
|
||||
tasks: "1h0m40s"
|
||||
finally: "0h30m0s"
|
||||
params:
|
||||
- name: image-name
|
||||
description: The name for the built image
|
||||
type: string
|
||||
- name: path-to-image-context
|
||||
description: The path to the build context
|
||||
type: string
|
||||
- name: path-to-dockerfile
|
||||
description: The path to the Dockerfile
|
||||
type: string
|
||||
tasks:
|
||||
- name: do-stuff
|
||||
taskSpec:
|
||||
metadata: {}
|
||||
stepTemplate:
|
||||
image: alpine:3.18
|
||||
name: ""
|
||||
resources:
|
||||
requests:
|
||||
cpu: 10m
|
||||
memory: 600Mi
|
||||
workingDir: /workspace/source
|
||||
steps:
|
||||
- image: alpine:3.18
|
||||
name: do-stuff-step
|
||||
script: |
|
||||
#!/usr/bin/env sh
|
||||
echo "hello world"
|
||||
- name: report-pending
|
||||
taskRef:
|
||||
name: gitea-set-status
|
||||
runAfter:
|
||||
- fetch-repository
|
||||
params:
|
||||
- name: CONTEXT
|
||||
value: "$(params.JOB_NAME)"
|
||||
- name: REPO_FULL_NAME
|
||||
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
|
||||
- name: GITEA_HOST_URL
|
||||
value: code.fizz.buzz
|
||||
- name: SHA
|
||||
value: "$(tasks.fetch-repository.results.commit)"
|
||||
- name: DESCRIPTION
|
||||
value: "Build $(params.JOB_NAME) has started"
|
||||
- name: STATE
|
||||
value: pending
|
||||
- name: TARGET_URL
|
||||
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
|
||||
- name: fetch-repository
|
||||
taskRef:
|
||||
name: git-clone
|
||||
workspaces:
|
||||
- name: output
|
||||
workspace: git-source
|
||||
params:
|
||||
- name: url
|
||||
value: $(params.REPO_URL)
|
||||
- name: revision
|
||||
value: $(params.PULL_BASE_SHA)
|
||||
- name: deleteExisting
|
||||
value: "true"
|
||||
- name: build-image
|
||||
taskRef:
|
||||
name: kaniko
|
||||
params:
|
||||
- name: IMAGE
|
||||
value: "$(params.image-name):$(tasks.fetch-repository.results.commit)"
|
||||
- name: CONTEXT
|
||||
value: $(params.path-to-image-context)
|
||||
- name: DOCKERFILE
|
||||
value: $(params.path-to-dockerfile)
|
||||
- name: BUILDER_IMAGE
|
||||
value: "gcr.io/kaniko-project/executor:v1.12.1"
|
||||
- name: EXTRA_ARGS
|
||||
value:
|
||||
- --target=foreign-document-test
|
||||
- --cache=true
|
||||
- --cache-copy-layers
|
||||
- --cache-repo=harbor.fizz.buzz/kanikocache/cache
|
||||
- --use-new-run # Should result in a speed-up
|
||||
- --reproducible # To remove timestamps so layer caching works.
|
||||
- --snapshot-mode=redo
|
||||
- --skip-unused-stages=true
|
||||
- --registry-mirror=dockerhub.dockerhub.svc.cluster.local
|
||||
workspaces:
|
||||
- name: source
|
||||
workspace: git-source
|
||||
- name: dockerconfig
|
||||
workspace: docker-credentials
|
||||
runAfter:
|
||||
- fetch-repository
|
||||
- name: run-image
|
||||
taskRef:
|
||||
name: run-docker-image
|
||||
workspaces:
|
||||
- name: source
|
||||
workspace: git-source
|
||||
- name: cargo-cache
|
||||
workspace: cargo-cache
|
||||
runAfter:
|
||||
- build-image
|
||||
params:
|
||||
- name: docker-image
|
||||
value: "$(params.image-name):$(tasks.fetch-repository.results.commit)"
|
||||
finally:
|
||||
- name: report-success
|
||||
when:
|
||||
- input: "$(tasks.status)"
|
||||
operator: in
|
||||
values: ["Succeeded", "Completed"]
|
||||
taskRef:
|
||||
name: gitea-set-status
|
||||
params:
|
||||
- name: CONTEXT
|
||||
value: "$(params.JOB_NAME)"
|
||||
- name: REPO_FULL_NAME
|
||||
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
|
||||
- name: GITEA_HOST_URL
|
||||
value: code.fizz.buzz
|
||||
- name: SHA
|
||||
value: "$(tasks.fetch-repository.results.commit)"
|
||||
- name: DESCRIPTION
|
||||
value: "Build $(params.JOB_NAME) has succeeded"
|
||||
- name: STATE
|
||||
value: success
|
||||
- name: TARGET_URL
|
||||
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
|
||||
- name: report-failure
|
||||
when:
|
||||
- input: "$(tasks.status)"
|
||||
operator: in
|
||||
values: ["Failed"]
|
||||
taskRef:
|
||||
name: gitea-set-status
|
||||
params:
|
||||
- name: CONTEXT
|
||||
value: "$(params.JOB_NAME)"
|
||||
- name: REPO_FULL_NAME
|
||||
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
|
||||
- name: GITEA_HOST_URL
|
||||
value: code.fizz.buzz
|
||||
- name: SHA
|
||||
value: "$(tasks.fetch-repository.results.commit)"
|
||||
- name: DESCRIPTION
|
||||
value: "Build $(params.JOB_NAME) has failed"
|
||||
- name: STATE
|
||||
value: failure
|
||||
- name: TARGET_URL
|
||||
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
|
||||
- name: cargo-cache-autoclean
|
||||
taskRef:
|
||||
name: run-docker-image
|
||||
workspaces:
|
||||
- name: source
|
||||
workspace: git-source
|
||||
- name: cargo-cache
|
||||
workspace: cargo-cache
|
||||
params:
|
||||
- name: command
|
||||
value: [cargo, cache, --autoclean]
|
||||
- name: args
|
||||
value: []
|
||||
- name: docker-image
|
||||
value: "$(params.image-name):$(tasks.fetch-repository.results.commit)"
|
||||
workspaces:
|
||||
- name: git-source
|
||||
- name: docker-credentials
|
||||
- name: cargo-cache
|
||||
workspaces:
|
||||
- name: git-source
|
||||
volumeClaimTemplate:
|
||||
spec:
|
||||
storageClassName: "nfs-client"
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 10Gi
|
||||
subPath: rust-source
|
||||
- name: cargo-cache
|
||||
persistentVolumeClaim:
|
||||
claimName: organic-cargo-cache-test-foreign-document
|
||||
- name: docker-credentials
|
||||
secret:
|
||||
secretName: harbor-plain
|
||||
serviceAccountName: build-bot
|
||||
params:
|
||||
- name: image-name
|
||||
value: "harbor.fizz.buzz/private/organic-test-foreign-document"
|
||||
- name: path-to-image-context
|
||||
value: docker/organic_test/
|
||||
- name: path-to-dockerfile
|
||||
value: docker/organic_test/Dockerfile
|
@ -1,284 +0,0 @@
|
||||
apiVersion: tekton.dev/v1beta1
|
||||
kind: PipelineRun
|
||||
metadata:
|
||||
name: rust-build
|
||||
spec:
|
||||
pipelineSpec:
|
||||
params:
|
||||
- name: image-name
|
||||
description: The name for the built image
|
||||
type: string
|
||||
- name: path-to-image-context
|
||||
description: The path to the build context
|
||||
type: string
|
||||
- name: path-to-dockerfile
|
||||
description: The path to the Dockerfile
|
||||
type: string
|
||||
tasks:
|
||||
- name: report-pending
|
||||
taskRef:
|
||||
name: gitea-set-status
|
||||
runAfter:
|
||||
- fetch-repository
|
||||
params:
|
||||
- name: CONTEXT
|
||||
value: "$(params.JOB_NAME)"
|
||||
- name: REPO_FULL_NAME
|
||||
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
|
||||
- name: GITEA_HOST_URL
|
||||
value: code.fizz.buzz
|
||||
- name: SHA
|
||||
value: "$(tasks.fetch-repository.results.commit)"
|
||||
- name: DESCRIPTION
|
||||
value: "Build $(params.JOB_NAME) has started"
|
||||
- name: STATE
|
||||
value: pending
|
||||
- name: TARGET_URL
|
||||
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
|
||||
- name: fetch-repository
|
||||
taskRef:
|
||||
name: git-clone
|
||||
workspaces:
|
||||
- name: output
|
||||
workspace: git-source
|
||||
params:
|
||||
- name: url
|
||||
value: $(params.REPO_URL)
|
||||
- name: revision
|
||||
value: $(params.PULL_BASE_SHA)
|
||||
- name: deleteExisting
|
||||
value: "true"
|
||||
- name: build-image
|
||||
taskRef:
|
||||
name: kaniko
|
||||
params:
|
||||
- name: IMAGE
|
||||
value: "$(params.image-name):$(tasks.fetch-repository.results.commit)"
|
||||
- name: CONTEXT
|
||||
value: $(params.path-to-image-context)
|
||||
- name: DOCKERFILE
|
||||
value: $(params.path-to-dockerfile)
|
||||
- name: BUILDER_IMAGE
|
||||
value: "gcr.io/kaniko-project/executor:v1.12.1"
|
||||
- name: EXTRA_ARGS
|
||||
value:
|
||||
- --cache=true
|
||||
- --cache-copy-layers
|
||||
- --cache-repo=harbor.fizz.buzz/kanikocache/cache
|
||||
- --use-new-run # Should result in a speed-up
|
||||
- --reproducible # To remove timestamps so layer caching works.
|
||||
- --snapshot-mode=redo
|
||||
- --skip-unused-stages=true
|
||||
- --registry-mirror=dockerhub.dockerhub.svc.cluster.local
|
||||
workspaces:
|
||||
- name: source
|
||||
workspace: git-source
|
||||
- name: dockerconfig
|
||||
workspace: docker-credentials
|
||||
runAfter:
|
||||
- fetch-repository
|
||||
- name: run-image-none
|
||||
taskRef:
|
||||
name: run-docker-image
|
||||
workspaces:
|
||||
- name: source
|
||||
workspace: git-source
|
||||
- name: cargo-cache
|
||||
workspace: cargo-cache
|
||||
runAfter:
|
||||
- build-image
|
||||
params:
|
||||
- name: args
|
||||
value: ["--no-default-features"]
|
||||
- name: docker-image
|
||||
value: "$(params.image-name):$(tasks.fetch-repository.results.commit)"
|
||||
- name: run-image-tracing
|
||||
taskRef:
|
||||
name: run-docker-image
|
||||
workspaces:
|
||||
- name: source
|
||||
workspace: git-source
|
||||
- name: cargo-cache
|
||||
workspace: cargo-cache
|
||||
runAfter:
|
||||
- run-image-none
|
||||
params:
|
||||
- name: args
|
||||
value: ["--no-default-features", "--features", "tracing"]
|
||||
- name: docker-image
|
||||
value: "$(params.image-name):$(tasks.fetch-repository.results.commit)"
|
||||
- name: run-image-compare
|
||||
taskRef:
|
||||
name: run-docker-image
|
||||
workspaces:
|
||||
- name: source
|
||||
workspace: git-source
|
||||
- name: cargo-cache
|
||||
workspace: cargo-cache
|
||||
runAfter:
|
||||
- run-image-tracing
|
||||
params:
|
||||
- name: args
|
||||
value: ["--no-default-features", "--features", "compare"]
|
||||
- name: docker-image
|
||||
value: "$(params.image-name):$(tasks.fetch-repository.results.commit)"
|
||||
- name: run-image-default
|
||||
taskRef:
|
||||
name: run-docker-image
|
||||
workspaces:
|
||||
- name: source
|
||||
workspace: git-source
|
||||
- name: cargo-cache
|
||||
workspace: cargo-cache
|
||||
runAfter:
|
||||
- run-image-compare
|
||||
params:
|
||||
- name: args
|
||||
value: []
|
||||
- name: docker-image
|
||||
value: "$(params.image-name):$(tasks.fetch-repository.results.commit)"
|
||||
- name: run-image-tracing-compare
|
||||
taskRef:
|
||||
name: run-docker-image
|
||||
workspaces:
|
||||
- name: source
|
||||
workspace: git-source
|
||||
- name: cargo-cache
|
||||
workspace: cargo-cache
|
||||
runAfter:
|
||||
- run-image-default
|
||||
params:
|
||||
- name: args
|
||||
value: ["--no-default-features", "--features", "tracing,compare"]
|
||||
- name: docker-image
|
||||
value: "$(params.image-name):$(tasks.fetch-repository.results.commit)"
|
||||
- name: run-image-compare-foreign
|
||||
taskRef:
|
||||
name: run-docker-image
|
||||
workspaces:
|
||||
- name: source
|
||||
workspace: git-source
|
||||
- name: cargo-cache
|
||||
workspace: cargo-cache
|
||||
runAfter:
|
||||
- run-image-tracing-compare
|
||||
params:
|
||||
- name: args
|
||||
value:
|
||||
[
|
||||
"--no-default-features",
|
||||
"--features",
|
||||
"compare,foreign_document_test",
|
||||
]
|
||||
- name: docker-image
|
||||
value: "$(params.image-name):$(tasks.fetch-repository.results.commit)"
|
||||
- name: run-image-all
|
||||
taskRef:
|
||||
name: run-docker-image
|
||||
workspaces:
|
||||
- name: source
|
||||
workspace: git-source
|
||||
- name: cargo-cache
|
||||
workspace: cargo-cache
|
||||
runAfter:
|
||||
- run-image-compare-foreign
|
||||
params:
|
||||
- name: args
|
||||
value:
|
||||
[
|
||||
"--no-default-features",
|
||||
"--features",
|
||||
"tracing,compare,foreign_document_test",
|
||||
]
|
||||
- name: docker-image
|
||||
value: "$(params.image-name):$(tasks.fetch-repository.results.commit)"
|
||||
finally:
|
||||
- name: report-success
|
||||
when:
|
||||
- input: "$(tasks.status)"
|
||||
operator: in
|
||||
values: ["Succeeded", "Completed"]
|
||||
taskRef:
|
||||
name: gitea-set-status
|
||||
params:
|
||||
- name: CONTEXT
|
||||
value: "$(params.JOB_NAME)"
|
||||
- name: REPO_FULL_NAME
|
||||
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
|
||||
- name: GITEA_HOST_URL
|
||||
value: code.fizz.buzz
|
||||
- name: SHA
|
||||
value: "$(tasks.fetch-repository.results.commit)"
|
||||
- name: DESCRIPTION
|
||||
value: "Build $(params.JOB_NAME) has succeeded"
|
||||
- name: STATE
|
||||
value: success
|
||||
- name: TARGET_URL
|
||||
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
|
||||
- name: report-failure
|
||||
when:
|
||||
- input: "$(tasks.status)"
|
||||
operator: in
|
||||
values: ["Failed"]
|
||||
taskRef:
|
||||
name: gitea-set-status
|
||||
params:
|
||||
- name: CONTEXT
|
||||
value: "$(params.JOB_NAME)"
|
||||
- name: REPO_FULL_NAME
|
||||
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
|
||||
- name: GITEA_HOST_URL
|
||||
value: code.fizz.buzz
|
||||
- name: SHA
|
||||
value: "$(tasks.fetch-repository.results.commit)"
|
||||
- name: DESCRIPTION
|
||||
value: "Build $(params.JOB_NAME) has failed"
|
||||
- name: STATE
|
||||
value: failure
|
||||
- name: TARGET_URL
|
||||
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
|
||||
- name: cargo-cache-autoclean
|
||||
taskRef:
|
||||
name: run-docker-image
|
||||
workspaces:
|
||||
- name: source
|
||||
workspace: git-source
|
||||
- name: cargo-cache
|
||||
workspace: cargo-cache
|
||||
params:
|
||||
- name: command
|
||||
value: [cargo, cache, --autoclean]
|
||||
- name: args
|
||||
value: []
|
||||
- name: docker-image
|
||||
value: "$(params.image-name):$(tasks.fetch-repository.results.commit)"
|
||||
workspaces:
|
||||
- name: git-source
|
||||
- name: docker-credentials
|
||||
- name: cargo-cache
|
||||
workspaces:
|
||||
- name: git-source
|
||||
volumeClaimTemplate:
|
||||
spec:
|
||||
storageClassName: "nfs-client"
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 10Gi
|
||||
subPath: rust-source
|
||||
- name: cargo-cache
|
||||
persistentVolumeClaim:
|
||||
claimName: organic-cargo-cache-build
|
||||
- name: docker-credentials
|
||||
secret:
|
||||
secretName: harbor-plain
|
||||
serviceAccountName: build-bot
|
||||
timeout: 240h0m0s
|
||||
params:
|
||||
- name: image-name
|
||||
value: "harbor.fizz.buzz/private/organic-build"
|
||||
- name: path-to-image-context
|
||||
value: docker/organic_build/
|
||||
- name: path-to-dockerfile
|
||||
value: docker/organic_build/Dockerfile
|
@ -1,214 +0,0 @@
|
||||
apiVersion: tekton.dev/v1beta1
|
||||
kind: PipelineRun
|
||||
metadata:
|
||||
name: rust-test
|
||||
spec:
|
||||
pipelineSpec:
|
||||
timeouts:
|
||||
pipeline: "2h0m0s"
|
||||
tasks: "1h0m40s"
|
||||
finally: "0h30m0s"
|
||||
params:
|
||||
- name: image-name
|
||||
description: The name for the built image
|
||||
type: string
|
||||
- name: path-to-image-context
|
||||
description: The path to the build context
|
||||
type: string
|
||||
- name: path-to-dockerfile
|
||||
description: The path to the Dockerfile
|
||||
type: string
|
||||
tasks:
|
||||
- name: do-stuff
|
||||
taskSpec:
|
||||
metadata: {}
|
||||
stepTemplate:
|
||||
image: alpine:3.18
|
||||
name: ""
|
||||
resources:
|
||||
requests:
|
||||
cpu: 10m
|
||||
memory: 600Mi
|
||||
workingDir: /workspace/source
|
||||
steps:
|
||||
- image: alpine:3.18
|
||||
name: do-stuff-step
|
||||
script: |
|
||||
#!/usr/bin/env sh
|
||||
echo "hello world"
|
||||
- name: report-pending
|
||||
taskRef:
|
||||
name: gitea-set-status
|
||||
runAfter:
|
||||
- fetch-repository
|
||||
params:
|
||||
- name: CONTEXT
|
||||
value: "$(params.JOB_NAME)"
|
||||
- name: REPO_FULL_NAME
|
||||
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
|
||||
- name: GITEA_HOST_URL
|
||||
value: code.fizz.buzz
|
||||
- name: SHA
|
||||
value: "$(tasks.fetch-repository.results.commit)"
|
||||
- name: DESCRIPTION
|
||||
value: "Build $(params.JOB_NAME) has started"
|
||||
- name: STATE
|
||||
value: pending
|
||||
- name: TARGET_URL
|
||||
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
|
||||
- name: fetch-repository
|
||||
taskRef:
|
||||
name: git-clone
|
||||
workspaces:
|
||||
- name: output
|
||||
workspace: git-source
|
||||
params:
|
||||
- name: url
|
||||
value: $(params.REPO_URL)
|
||||
- name: revision
|
||||
value: $(params.PULL_BASE_SHA)
|
||||
- name: deleteExisting
|
||||
value: "true"
|
||||
- name: build-image
|
||||
taskRef:
|
||||
name: kaniko
|
||||
params:
|
||||
- name: IMAGE
|
||||
value: "$(params.image-name):$(tasks.fetch-repository.results.commit)"
|
||||
- name: CONTEXT
|
||||
value: $(params.path-to-image-context)
|
||||
- name: DOCKERFILE
|
||||
value: $(params.path-to-dockerfile)
|
||||
- name: BUILDER_IMAGE
|
||||
value: "gcr.io/kaniko-project/executor:v1.12.1"
|
||||
- name: EXTRA_ARGS
|
||||
value:
|
||||
- --target=tester
|
||||
- --cache=true
|
||||
- --cache-copy-layers
|
||||
- --cache-repo=harbor.fizz.buzz/kanikocache/cache
|
||||
- --use-new-run # Should result in a speed-up
|
||||
- --reproducible # To remove timestamps so layer caching works.
|
||||
- --snapshot-mode=redo
|
||||
- --skip-unused-stages=true
|
||||
- --registry-mirror=dockerhub.dockerhub.svc.cluster.local
|
||||
workspaces:
|
||||
- name: source
|
||||
workspace: git-source
|
||||
- name: dockerconfig
|
||||
workspace: docker-credentials
|
||||
runAfter:
|
||||
- fetch-repository
|
||||
- name: run-image
|
||||
taskRef:
|
||||
name: run-docker-image
|
||||
workspaces:
|
||||
- name: source
|
||||
workspace: git-source
|
||||
- name: cargo-cache
|
||||
workspace: cargo-cache
|
||||
runAfter:
|
||||
- build-image
|
||||
params:
|
||||
- name: args
|
||||
value:
|
||||
[
|
||||
--no-default-features,
|
||||
--features,
|
||||
compare,
|
||||
--no-fail-fast,
|
||||
--lib,
|
||||
--test,
|
||||
test_loader,
|
||||
]
|
||||
- name: docker-image
|
||||
value: "$(params.image-name):$(tasks.fetch-repository.results.commit)"
|
||||
finally:
|
||||
- name: report-success
|
||||
when:
|
||||
- input: "$(tasks.status)"
|
||||
operator: in
|
||||
values: ["Succeeded", "Completed"]
|
||||
taskRef:
|
||||
name: gitea-set-status
|
||||
params:
|
||||
- name: CONTEXT
|
||||
value: "$(params.JOB_NAME)"
|
||||
- name: REPO_FULL_NAME
|
||||
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
|
||||
- name: GITEA_HOST_URL
|
||||
value: code.fizz.buzz
|
||||
- name: SHA
|
||||
value: "$(tasks.fetch-repository.results.commit)"
|
||||
- name: DESCRIPTION
|
||||
value: "Build $(params.JOB_NAME) has succeeded"
|
||||
- name: STATE
|
||||
value: success
|
||||
- name: TARGET_URL
|
||||
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
|
||||
- name: report-failure
|
||||
when:
|
||||
- input: "$(tasks.status)"
|
||||
operator: in
|
||||
values: ["Failed"]
|
||||
taskRef:
|
||||
name: gitea-set-status
|
||||
params:
|
||||
- name: CONTEXT
|
||||
value: "$(params.JOB_NAME)"
|
||||
- name: REPO_FULL_NAME
|
||||
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
|
||||
- name: GITEA_HOST_URL
|
||||
value: code.fizz.buzz
|
||||
- name: SHA
|
||||
value: "$(tasks.fetch-repository.results.commit)"
|
||||
- name: DESCRIPTION
|
||||
value: "Build $(params.JOB_NAME) has failed"
|
||||
- name: STATE
|
||||
value: failure
|
||||
- name: TARGET_URL
|
||||
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
|
||||
- name: cargo-cache-autoclean
|
||||
taskRef:
|
||||
name: run-docker-image
|
||||
workspaces:
|
||||
- name: source
|
||||
workspace: git-source
|
||||
- name: cargo-cache
|
||||
workspace: cargo-cache
|
||||
params:
|
||||
- name: command
|
||||
value: [cargo, cache, --autoclean]
|
||||
- name: args
|
||||
value: []
|
||||
- name: docker-image
|
||||
value: "$(params.image-name):$(tasks.fetch-repository.results.commit)"
|
||||
workspaces:
|
||||
- name: git-source
|
||||
- name: docker-credentials
|
||||
- name: cargo-cache
|
||||
workspaces:
|
||||
- name: git-source
|
||||
volumeClaimTemplate:
|
||||
spec:
|
||||
storageClassName: "nfs-client"
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 10Gi
|
||||
subPath: rust-source
|
||||
- name: cargo-cache
|
||||
persistentVolumeClaim:
|
||||
claimName: organic-cargo-cache-test
|
||||
- name: docker-credentials
|
||||
secret:
|
||||
secretName: harbor-plain
|
||||
serviceAccountName: build-bot
|
||||
params:
|
||||
- name: image-name
|
||||
value: "harbor.fizz.buzz/private/organic-test"
|
||||
- name: path-to-image-context
|
||||
value: docker/organic_test/
|
||||
- name: path-to-dockerfile
|
||||
value: docker/organic_test/Dockerfile
|
@ -1,230 +0,0 @@
|
||||
apiVersion: tekton.dev/v1beta1
|
||||
kind: PipelineRun
|
||||
metadata:
|
||||
name: rustfmt
|
||||
spec:
|
||||
pipelineSpec:
|
||||
params:
|
||||
- name: image-name
|
||||
description: The name for the built image
|
||||
type: string
|
||||
- name: path-to-image-context
|
||||
description: The path to the build context
|
||||
type: string
|
||||
- name: path-to-dockerfile
|
||||
description: The path to the Dockerfile
|
||||
type: string
|
||||
- name: GIT_USER_NAME
|
||||
description: The username for git
|
||||
type: string
|
||||
default: "fluxcdbot"
|
||||
- name: GIT_USER_EMAIL
|
||||
description: The email for git
|
||||
type: string
|
||||
default: "fluxcdbot@users.noreply.github.com"
|
||||
tasks:
|
||||
- name: do-stuff
|
||||
taskSpec:
|
||||
metadata: {}
|
||||
stepTemplate:
|
||||
image: alpine:3.18
|
||||
name: ""
|
||||
resources:
|
||||
requests:
|
||||
cpu: 10m
|
||||
memory: 600Mi
|
||||
workingDir: /workspace/source
|
||||
steps:
|
||||
- image: alpine:3.18
|
||||
name: do-stuff-step
|
||||
script: |
|
||||
#!/usr/bin/env sh
|
||||
echo "hello world"
|
||||
- name: report-pending
|
||||
taskRef:
|
||||
name: gitea-set-status
|
||||
runAfter:
|
||||
- fetch-repository
|
||||
params:
|
||||
- name: CONTEXT
|
||||
value: "$(params.JOB_NAME)"
|
||||
- name: REPO_FULL_NAME
|
||||
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
|
||||
- name: GITEA_HOST_URL
|
||||
value: code.fizz.buzz
|
||||
- name: SHA
|
||||
value: "$(tasks.fetch-repository.results.commit)"
|
||||
- name: DESCRIPTION
|
||||
value: "Build $(params.JOB_NAME) has started"
|
||||
- name: STATE
|
||||
value: pending
|
||||
- name: TARGET_URL
|
||||
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
|
||||
- name: fetch-repository
|
||||
taskRef:
|
||||
name: git-clone
|
||||
workspaces:
|
||||
- name: output
|
||||
workspace: git-source
|
||||
params:
|
||||
- name: url
|
||||
value: $(params.REPO_URL)
|
||||
- name: revision
|
||||
value: $(params.PULL_BASE_SHA)
|
||||
- name: deleteExisting
|
||||
value: "true"
|
||||
- name: build-image
|
||||
taskRef:
|
||||
name: kaniko
|
||||
params:
|
||||
- name: IMAGE
|
||||
value: "$(params.image-name):$(tasks.fetch-repository.results.commit)"
|
||||
- name: CONTEXT
|
||||
value: $(params.path-to-image-context)
|
||||
- name: DOCKERFILE
|
||||
value: $(params.path-to-dockerfile)
|
||||
- name: BUILDER_IMAGE
|
||||
value: "gcr.io/kaniko-project/executor:v1.12.1"
|
||||
- name: EXTRA_ARGS
|
||||
value:
|
||||
- --cache=true
|
||||
- --cache-copy-layers
|
||||
- --cache-repo=harbor.fizz.buzz/kanikocache/cache
|
||||
- --use-new-run # Should result in a speed-up
|
||||
- --reproducible # To remove timestamps so layer caching works.
|
||||
- --snapshot-mode=redo
|
||||
- --skip-unused-stages=true
|
||||
- --registry-mirror=dockerhub.dockerhub.svc.cluster.local
|
||||
workspaces:
|
||||
- name: source
|
||||
workspace: git-source
|
||||
- name: dockerconfig
|
||||
workspace: docker-credentials
|
||||
runAfter:
|
||||
- fetch-repository
|
||||
- name: rustfmt
|
||||
taskRef:
|
||||
name: run-docker-image
|
||||
workspaces:
|
||||
- name: source
|
||||
workspace: git-source
|
||||
runAfter:
|
||||
- build-image
|
||||
params:
|
||||
- name: docker-image
|
||||
value: "$(params.image-name):$(tasks.fetch-repository.results.commit)"
|
||||
- name: cargo-fix
|
||||
taskRef:
|
||||
name: run-docker-image
|
||||
workspaces:
|
||||
- name: source
|
||||
workspace: git-source
|
||||
- name: cargo-cache
|
||||
workspace: cargo-cache
|
||||
runAfter:
|
||||
- rustfmt
|
||||
params:
|
||||
- name: command
|
||||
value: ["cargo", "fix"]
|
||||
- name: args
|
||||
value: ["--allow-dirty"]
|
||||
- name: docker-image
|
||||
value: "$(params.image-name):$(tasks.fetch-repository.results.commit)"
|
||||
- name: commit-changes
|
||||
taskRef:
|
||||
name: git-cli
|
||||
params:
|
||||
- name: GIT_USER_NAME
|
||||
value: $(params.GIT_USER_NAME)
|
||||
- name: GIT_USER_EMAIL
|
||||
value: $(params.GIT_USER_EMAIL)
|
||||
- name: GIT_SCRIPT
|
||||
value: |
|
||||
pwd
|
||||
git config --global --add safe.directory /workspace/source
|
||||
git_status=$(git status --porcelain)
|
||||
if [ -n "$git_status" ]; then
|
||||
git commit -a -m "CI: autofix rust code."
|
||||
git push origin HEAD:$(params.PULL_BASE_REF)
|
||||
else
|
||||
echo "No changes to commit."
|
||||
fi
|
||||
workspaces:
|
||||
- name: source
|
||||
workspace: git-source
|
||||
runAfter:
|
||||
- cargo-fix
|
||||
finally:
|
||||
- name: report-success
|
||||
when:
|
||||
- input: "$(tasks.status)"
|
||||
operator: in
|
||||
values: ["Succeeded", "Completed"]
|
||||
taskRef:
|
||||
name: gitea-set-status
|
||||
params:
|
||||
- name: CONTEXT
|
||||
value: "$(params.JOB_NAME)"
|
||||
- name: REPO_FULL_NAME
|
||||
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
|
||||
- name: GITEA_HOST_URL
|
||||
value: code.fizz.buzz
|
||||
- name: SHA
|
||||
value: "$(tasks.fetch-repository.results.commit)"
|
||||
- name: DESCRIPTION
|
||||
value: "Build $(params.JOB_NAME) has succeeded"
|
||||
- name: STATE
|
||||
value: success
|
||||
- name: TARGET_URL
|
||||
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
|
||||
- name: report-failure
|
||||
when:
|
||||
- input: "$(tasks.status)"
|
||||
operator: in
|
||||
values: ["Failed"]
|
||||
taskRef:
|
||||
name: gitea-set-status
|
||||
params:
|
||||
- name: CONTEXT
|
||||
value: "$(params.JOB_NAME)"
|
||||
- name: REPO_FULL_NAME
|
||||
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
|
||||
- name: GITEA_HOST_URL
|
||||
value: code.fizz.buzz
|
||||
- name: SHA
|
||||
value: "$(tasks.fetch-repository.results.commit)"
|
||||
- name: DESCRIPTION
|
||||
value: "Build $(params.JOB_NAME) has failed"
|
||||
- name: STATE
|
||||
value: failure
|
||||
- name: TARGET_URL
|
||||
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
|
||||
workspaces:
|
||||
- name: git-source
|
||||
- name: docker-credentials
|
||||
workspaces:
|
||||
- name: git-source
|
||||
volumeClaimTemplate:
|
||||
spec:
|
||||
storageClassName: "nfs-client"
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 10Gi
|
||||
subPath: rust-source
|
||||
- name: cargo-cache
|
||||
persistentVolumeClaim:
|
||||
claimName: organic-cargo-cache-fmt
|
||||
- name: docker-credentials
|
||||
secret:
|
||||
secretName: harbor-plain
|
||||
serviceAccountName: build-bot
|
||||
timeout: 240h0m0s
|
||||
params:
|
||||
- name: image-name
|
||||
value: "harbor.fizz.buzz/private/organic-fmt"
|
||||
- name: path-to-image-context
|
||||
value: docker/cargo_fmt/
|
||||
- name: path-to-dockerfile
|
||||
value: docker/cargo_fmt/Dockerfile
|
@ -1,39 +0,0 @@
|
||||
apiVersion: config.lighthouse.jenkins-x.io/v1alpha1
|
||||
kind: TriggerConfig
|
||||
spec:
|
||||
postsubmits:
|
||||
- name: rustfmt
|
||||
source: "pipeline-rustfmt.yaml"
|
||||
# Override https-based url from lighthouse events.
|
||||
clone_uri: "git@code.fizz.buzz:talexander/organic.git"
|
||||
branches:
|
||||
- ^main$
|
||||
- ^master$
|
||||
- name: rust-test
|
||||
source: "pipeline-rust-test.yaml"
|
||||
# Override https-based url from lighthouse events.
|
||||
clone_uri: "git@code.fizz.buzz:talexander/organic.git"
|
||||
skip_branches:
|
||||
# We already run on every commit, so running when the semver tags get pushed is causing needless double-processing.
|
||||
- "^v[0-9]+\\.[0-9]+\\.[0-9]+$"
|
||||
- name: rust-foreign-document-test
|
||||
source: "pipeline-foreign-document-test.yaml"
|
||||
# Override https-based url from lighthouse events.
|
||||
clone_uri: "git@code.fizz.buzz:talexander/organic.git"
|
||||
skip_branches:
|
||||
# We already run on every commit, so running when the semver tags get pushed is causing needless double-processing.
|
||||
- "^v[0-9]+\\.[0-9]+\\.[0-9]+$"
|
||||
- name: rust-build
|
||||
source: "pipeline-rust-build.yaml"
|
||||
# Override https-based url from lighthouse events.
|
||||
clone_uri: "git@code.fizz.buzz:talexander/organic.git"
|
||||
skip_branches:
|
||||
# We already run on every commit, so running when the semver tags get pushed is causing needless double-processing.
|
||||
- "^v[0-9]+\\.[0-9]+\\.[0-9]+$"
|
||||
- name: clippy
|
||||
source: "pipeline-clippy.yaml"
|
||||
# Override https-based url from lighthouse events.
|
||||
clone_uri: "git@code.fizz.buzz:talexander/organic.git"
|
||||
skip_branches:
|
||||
# We already run on every commit, so running when the semver tags get pushed is causing needless double-processing.
|
||||
- "^v[0-9]+\\.[0-9]+\\.[0-9]+$"
|
701
.webhook_bridge/pipeline-build-hash.yaml
Normal file
701
.webhook_bridge/pipeline-build-hash.yaml
Normal file
@ -0,0 +1,701 @@
|
||||
apiVersion: tekton.dev/v1
|
||||
kind: PipelineRun
|
||||
metadata:
|
||||
name: build
|
||||
spec:
|
||||
timeouts:
|
||||
pipeline: "2h0m0s"
|
||||
tasks: "1h0m0s"
|
||||
finally: "0h30m0s"
|
||||
taskRunTemplate:
|
||||
serviceAccountName: build-bot
|
||||
pipelineSpec:
|
||||
params:
|
||||
- name: image-name
|
||||
description: The name for the built image
|
||||
type: string
|
||||
- name: target-name
|
||||
description: The dockerfile target to build
|
||||
type: string
|
||||
- name: path-to-image-context
|
||||
description: The path to the build context
|
||||
type: string
|
||||
- name: path-to-dockerfile
|
||||
description: The path to the Dockerfile
|
||||
type: string
|
||||
tasks:
|
||||
- name: report-pending
|
||||
taskRef:
|
||||
resolver: git
|
||||
params:
|
||||
- name: url
|
||||
value: https://code.fizz.buzz/mirror/catalog.git # mirror of https://github.com/tektoncd/catalog.git
|
||||
- name: revision
|
||||
value: df36b3853a5657fd883015cdbf07ad6466918acf
|
||||
- name: pathInRepo
|
||||
value: task/gitea-set-status/0.1/gitea-set-status.yaml
|
||||
params:
|
||||
- name: CONTEXT
|
||||
value: "$(params.JOB_NAME)"
|
||||
- name: REPO_FULL_NAME
|
||||
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
|
||||
- name: GITEA_HOST_URL
|
||||
value: code.fizz.buzz
|
||||
- name: SHA
|
||||
value: "$(tasks.fetch-repository.results.commit)"
|
||||
- name: DESCRIPTION
|
||||
value: "Build $(params.JOB_NAME) has started"
|
||||
- name: STATE
|
||||
value: pending
|
||||
- name: TARGET_URL
|
||||
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
|
||||
- name: fetch-repository
|
||||
taskRef:
|
||||
resolver: git
|
||||
params:
|
||||
- name: url
|
||||
value: https://code.fizz.buzz/mirror/catalog.git # mirror of https://github.com/tektoncd/catalog.git
|
||||
- name: revision
|
||||
value: df36b3853a5657fd883015cdbf07ad6466918acf
|
||||
- name: pathInRepo
|
||||
value: task/git-clone/0.9/git-clone.yaml
|
||||
workspaces:
|
||||
- name: output
|
||||
workspace: git-source
|
||||
params:
|
||||
- name: url
|
||||
value: $(params.REPO_URL)
|
||||
- name: revision
|
||||
value: $(params.PULL_BASE_SHA)
|
||||
- name: deleteExisting
|
||||
value: "true"
|
||||
- name: get-git-commit-time
|
||||
taskSpec:
|
||||
metadata: {}
|
||||
stepTemplate:
|
||||
image: alpine:3.20
|
||||
computeResources:
|
||||
requests:
|
||||
cpu: 10m
|
||||
memory: 600Mi
|
||||
workingDir: "$(workspaces.repo.path)"
|
||||
results:
|
||||
- name: unix-time
|
||||
description: The time of the git commit in unix timestamp format.
|
||||
steps:
|
||||
- image: alpine/git:v2.34.2
|
||||
name: detect-tag-step
|
||||
script: |
|
||||
#!/usr/bin/env sh
|
||||
set -euo pipefail
|
||||
echo -n "$(git log -1 --pretty=%ct)" | tee $(results.unix-time.path)
|
||||
workspaces:
|
||||
- name: repo
|
||||
workspace: git-source
|
||||
runAfter:
|
||||
- fetch-repository
|
||||
- name: build-image
|
||||
taskRef:
|
||||
resolver: git
|
||||
params:
|
||||
- name: url
|
||||
value: https://code.fizz.buzz/talexander/personal_tekton_catalog.git
|
||||
- name: revision
|
||||
value: 7ee31a185243ee6da13dcd26a592c585b64c80e5
|
||||
- name: pathInRepo
|
||||
value: task/buildkit-rootless-daemonless/0.1/buildkit-rootless-daemonless.yaml
|
||||
params:
|
||||
- name: OUTPUT
|
||||
value: >-
|
||||
type=image,"name=$(params.image-name):latest,$(params.image-name):$(tasks.fetch-repository.results.commit)",push=true,compression=zstd,compression-level=22,oci-mediatypes=true
|
||||
- name: CONTEXT
|
||||
value: $(params.path-to-image-context)
|
||||
- name: DOCKERFILE
|
||||
value: $(params.path-to-dockerfile)
|
||||
- name: EXTRA_ARGS
|
||||
value:
|
||||
- "--opt"
|
||||
- "target=$(params.target-name)"
|
||||
- --import-cache
|
||||
- "type=registry,ref=$(params.image-name):buildcache"
|
||||
- --export-cache
|
||||
- "type=registry,ref=$(params.image-name):buildcache,mode=max,compression=zstd,compression-level=22,rewrite-timestamp=true,image-manifest=true,oci-mediatypes=true"
|
||||
- --opt
|
||||
- build-arg:SOURCE_DATE_EPOCH=$(tasks.get-git-commit-time.results.unix-time)
|
||||
- name: BUILDKITD_TOML
|
||||
value: |
|
||||
debug = true
|
||||
[registry."docker.io"]
|
||||
mirrors = ["dockerhub.dockerhub.svc.cluster.local"]
|
||||
[registry."dockerhub.dockerhub.svc.cluster.local"]
|
||||
http = true
|
||||
insecure = true
|
||||
workspaces:
|
||||
- name: source
|
||||
workspace: git-source
|
||||
- name: dockerconfig
|
||||
workspace: docker-credentials
|
||||
runAfter:
|
||||
- fetch-repository
|
||||
#############
|
||||
- name: run-image-none
|
||||
taskSpec:
|
||||
metadata: {}
|
||||
params:
|
||||
- name: docker-image
|
||||
type: string
|
||||
description: Docker image to run.
|
||||
default: alpine:3.18
|
||||
stepTemplate:
|
||||
image: alpine:3.18
|
||||
name: ""
|
||||
resources:
|
||||
requests:
|
||||
cpu: 10m
|
||||
memory: 60Mi
|
||||
workingDir: /workspace/source
|
||||
workspaces:
|
||||
- name: source
|
||||
mountPath: /source
|
||||
- name: cargo-cache
|
||||
mountPath: /usr/local/cargo/registry
|
||||
optional: true
|
||||
steps:
|
||||
- name: run
|
||||
image: $(params.docker-image)
|
||||
workingDir: "$(workspaces.source.path)"
|
||||
command: ["cargo", "build"]
|
||||
args: ["--no-default-features"]
|
||||
env:
|
||||
- name: CARGO_TARGET_DIR
|
||||
value: /target
|
||||
workspaces:
|
||||
- name: source
|
||||
workspace: git-source
|
||||
- name: cargo-cache
|
||||
workspace: cargo-cache
|
||||
subPath: none
|
||||
runAfter:
|
||||
- build-image
|
||||
params:
|
||||
- name: docker-image
|
||||
value: "$(tasks.build-image.results.IMAGE_URL[1])"
|
||||
#############
|
||||
- name: run-image-tracing
|
||||
taskSpec:
|
||||
metadata: {}
|
||||
params:
|
||||
- name: docker-image
|
||||
type: string
|
||||
description: Docker image to run.
|
||||
default: alpine:3.18
|
||||
stepTemplate:
|
||||
image: alpine:3.18
|
||||
name: ""
|
||||
resources:
|
||||
requests:
|
||||
cpu: 10m
|
||||
memory: 60Mi
|
||||
workingDir: /workspace/source
|
||||
workspaces:
|
||||
- name: source
|
||||
mountPath: /source
|
||||
- name: cargo-cache
|
||||
mountPath: /usr/local/cargo/registry
|
||||
optional: true
|
||||
steps:
|
||||
- name: run
|
||||
image: $(params.docker-image)
|
||||
workingDir: "$(workspaces.source.path)"
|
||||
command: ["cargo", "build"]
|
||||
args: ["--no-default-features", "--features", "tracing"]
|
||||
env:
|
||||
- name: CARGO_TARGET_DIR
|
||||
value: /target
|
||||
workspaces:
|
||||
- name: source
|
||||
workspace: git-source
|
||||
- name: cargo-cache
|
||||
workspace: cargo-cache
|
||||
subPath: tracing
|
||||
runAfter:
|
||||
- run-image-none
|
||||
params:
|
||||
- name: docker-image
|
||||
value: "$(tasks.build-image.results.IMAGE_URL[1])"
|
||||
#############
|
||||
- name: run-image-compare
|
||||
taskSpec:
|
||||
metadata: {}
|
||||
params:
|
||||
- name: docker-image
|
||||
type: string
|
||||
description: Docker image to run.
|
||||
default: alpine:3.18
|
||||
stepTemplate:
|
||||
image: alpine:3.18
|
||||
name: ""
|
||||
resources:
|
||||
requests:
|
||||
cpu: 10m
|
||||
memory: 60Mi
|
||||
workingDir: /workspace/source
|
||||
workspaces:
|
||||
- name: source
|
||||
mountPath: /source
|
||||
- name: cargo-cache
|
||||
mountPath: /usr/local/cargo/registry
|
||||
optional: true
|
||||
steps:
|
||||
- name: run
|
||||
image: $(params.docker-image)
|
||||
workingDir: "$(workspaces.source.path)"
|
||||
command: ["cargo", "build"]
|
||||
args: ["--no-default-features", "--features", "compare"]
|
||||
env:
|
||||
- name: CARGO_TARGET_DIR
|
||||
value: /target
|
||||
workspaces:
|
||||
- name: source
|
||||
workspace: git-source
|
||||
- name: cargo-cache
|
||||
workspace: cargo-cache
|
||||
subPath: compare
|
||||
runAfter:
|
||||
- run-image-tracing
|
||||
params:
|
||||
- name: docker-image
|
||||
value: "$(tasks.build-image.results.IMAGE_URL[1])"
|
||||
#############
|
||||
- name: run-image-default
|
||||
taskSpec:
|
||||
metadata: {}
|
||||
params:
|
||||
- name: docker-image
|
||||
type: string
|
||||
description: Docker image to run.
|
||||
default: alpine:3.18
|
||||
stepTemplate:
|
||||
image: alpine:3.18
|
||||
name: ""
|
||||
resources:
|
||||
requests:
|
||||
cpu: 10m
|
||||
memory: 60Mi
|
||||
workingDir: /workspace/source
|
||||
workspaces:
|
||||
- name: source
|
||||
mountPath: /source
|
||||
- name: cargo-cache
|
||||
mountPath: /usr/local/cargo/registry
|
||||
optional: true
|
||||
steps:
|
||||
- name: run
|
||||
image: $(params.docker-image)
|
||||
workingDir: "$(workspaces.source.path)"
|
||||
command: ["cargo", "build"]
|
||||
args: []
|
||||
env:
|
||||
- name: CARGO_TARGET_DIR
|
||||
value: /target
|
||||
workspaces:
|
||||
- name: source
|
||||
workspace: git-source
|
||||
- name: cargo-cache
|
||||
workspace: cargo-cache
|
||||
subPath: default
|
||||
runAfter:
|
||||
- run-image-compare
|
||||
params:
|
||||
- name: docker-image
|
||||
value: "$(tasks.build-image.results.IMAGE_URL[1])"
|
||||
#############
|
||||
- name: run-image-tracing-compare
|
||||
taskSpec:
|
||||
metadata: {}
|
||||
params:
|
||||
- name: docker-image
|
||||
type: string
|
||||
description: Docker image to run.
|
||||
default: alpine:3.18
|
||||
stepTemplate:
|
||||
image: alpine:3.18
|
||||
name: ""
|
||||
resources:
|
||||
requests:
|
||||
cpu: 10m
|
||||
memory: 60Mi
|
||||
workingDir: /workspace/source
|
||||
workspaces:
|
||||
- name: source
|
||||
mountPath: /source
|
||||
- name: cargo-cache
|
||||
mountPath: /usr/local/cargo/registry
|
||||
optional: true
|
||||
steps:
|
||||
- name: run
|
||||
image: $(params.docker-image)
|
||||
workingDir: "$(workspaces.source.path)"
|
||||
command: ["cargo", "build"]
|
||||
args: ["--no-default-features", "--features", "tracing,compare"]
|
||||
env:
|
||||
- name: CARGO_TARGET_DIR
|
||||
value: /target
|
||||
workspaces:
|
||||
- name: source
|
||||
workspace: git-source
|
||||
- name: cargo-cache
|
||||
workspace: cargo-cache
|
||||
subPath: tracing-compare
|
||||
runAfter:
|
||||
- run-image-default
|
||||
params:
|
||||
- name: docker-image
|
||||
value: "$(tasks.build-image.results.IMAGE_URL[1])"
|
||||
#############
|
||||
- name: run-image-compare-foreign
|
||||
taskSpec:
|
||||
metadata: {}
|
||||
params:
|
||||
- name: docker-image
|
||||
type: string
|
||||
description: Docker image to run.
|
||||
default: alpine:3.18
|
||||
stepTemplate:
|
||||
image: alpine:3.18
|
||||
name: ""
|
||||
resources:
|
||||
requests:
|
||||
cpu: 10m
|
||||
memory: 60Mi
|
||||
workingDir: /workspace/source
|
||||
workspaces:
|
||||
- name: source
|
||||
mountPath: /source
|
||||
- name: cargo-cache
|
||||
mountPath: /usr/local/cargo/registry
|
||||
optional: true
|
||||
steps:
|
||||
- name: run
|
||||
image: $(params.docker-image)
|
||||
workingDir: "$(workspaces.source.path)"
|
||||
command: ["cargo", "build"]
|
||||
args:
|
||||
[
|
||||
"--no-default-features",
|
||||
"--features",
|
||||
"compare,foreign_document_test",
|
||||
]
|
||||
env:
|
||||
- name: CARGO_TARGET_DIR
|
||||
value: /target
|
||||
workspaces:
|
||||
- name: source
|
||||
workspace: git-source
|
||||
- name: cargo-cache
|
||||
workspace: cargo-cache
|
||||
subPath: compare-foreign
|
||||
runAfter:
|
||||
- run-image-tracing-compare
|
||||
params:
|
||||
- name: docker-image
|
||||
value: "$(tasks.build-image.results.IMAGE_URL[1])"
|
||||
#############
|
||||
- name: run-image-all
|
||||
taskSpec:
|
||||
metadata: {}
|
||||
params:
|
||||
- name: docker-image
|
||||
type: string
|
||||
description: Docker image to run.
|
||||
default: alpine:3.18
|
||||
stepTemplate:
|
||||
image: alpine:3.18
|
||||
name: ""
|
||||
resources:
|
||||
requests:
|
||||
cpu: 10m
|
||||
memory: 60Mi
|
||||
workingDir: /workspace/source
|
||||
workspaces:
|
||||
- name: source
|
||||
mountPath: /source
|
||||
- name: cargo-cache
|
||||
mountPath: /usr/local/cargo/registry
|
||||
optional: true
|
||||
steps:
|
||||
- name: run
|
||||
image: $(params.docker-image)
|
||||
workingDir: "$(workspaces.source.path)"
|
||||
command: ["cargo", "build"]
|
||||
args:
|
||||
[
|
||||
"--no-default-features",
|
||||
"--features",
|
||||
"tracing,compare,foreign_document_test",
|
||||
]
|
||||
env:
|
||||
- name: CARGO_TARGET_DIR
|
||||
value: /target
|
||||
workspaces:
|
||||
- name: source
|
||||
workspace: git-source
|
||||
- name: cargo-cache
|
||||
workspace: cargo-cache
|
||||
subPath: all
|
||||
runAfter:
|
||||
- run-image-compare-foreign
|
||||
params:
|
||||
- name: docker-image
|
||||
value: "$(tasks.build-image.results.IMAGE_URL[1])"
|
||||
#############
|
||||
- name: run-image-wasm
|
||||
taskSpec:
|
||||
metadata: {}
|
||||
params:
|
||||
- name: docker-image
|
||||
type: string
|
||||
description: Docker image to run.
|
||||
default: alpine:3.18
|
||||
stepTemplate:
|
||||
image: alpine:3.18
|
||||
name: ""
|
||||
resources:
|
||||
requests:
|
||||
cpu: 10m
|
||||
memory: 60Mi
|
||||
workingDir: /workspace/source
|
||||
workspaces:
|
||||
- name: source
|
||||
mountPath: /source
|
||||
- name: cargo-cache
|
||||
mountPath: /usr/local/cargo/registry
|
||||
optional: true
|
||||
steps:
|
||||
- name: run
|
||||
image: $(params.docker-image)
|
||||
workingDir: "$(workspaces.source.path)"
|
||||
command: ["cargo", "build"]
|
||||
args:
|
||||
[
|
||||
"--target",
|
||||
"wasm32-unknown-unknown",
|
||||
"--profile",
|
||||
"wasm",
|
||||
"--bin",
|
||||
"wasm",
|
||||
"--no-default-features",
|
||||
"--features",
|
||||
"wasm",
|
||||
]
|
||||
env:
|
||||
- name: CARGO_TARGET_DIR
|
||||
value: /target
|
||||
workspaces:
|
||||
- name: source
|
||||
workspace: git-source
|
||||
- name: cargo-cache
|
||||
workspace: cargo-cache
|
||||
subPath: wasm
|
||||
runAfter:
|
||||
- run-image-all
|
||||
params:
|
||||
- name: docker-image
|
||||
value: "$(tasks.build-image.results.IMAGE_URL[1])"
|
||||
#############
|
||||
- name: run-image-wasm-test
|
||||
taskSpec:
|
||||
metadata: {}
|
||||
params:
|
||||
- name: docker-image
|
||||
type: string
|
||||
description: Docker image to run.
|
||||
default: alpine:3.18
|
||||
stepTemplate:
|
||||
image: alpine:3.18
|
||||
name: ""
|
||||
resources:
|
||||
requests:
|
||||
cpu: 10m
|
||||
memory: 60Mi
|
||||
workingDir: /workspace/source
|
||||
workspaces:
|
||||
- name: source
|
||||
mountPath: /source
|
||||
- name: cargo-cache
|
||||
mountPath: /usr/local/cargo/registry
|
||||
optional: true
|
||||
steps:
|
||||
- name: run
|
||||
image: $(params.docker-image)
|
||||
workingDir: "$(workspaces.source.path)"
|
||||
command: ["cargo", "build"]
|
||||
args:
|
||||
[
|
||||
"--bin",
|
||||
"wasm_test",
|
||||
"--no-default-features",
|
||||
"--features",
|
||||
"wasm_test",
|
||||
]
|
||||
env:
|
||||
- name: CARGO_TARGET_DIR
|
||||
value: /target
|
||||
workspaces:
|
||||
- name: source
|
||||
workspace: git-source
|
||||
- name: cargo-cache
|
||||
workspace: cargo-cache
|
||||
subPath: wasm-test
|
||||
runAfter:
|
||||
- run-image-wasm
|
||||
params:
|
||||
- name: docker-image
|
||||
value: "$(tasks.build-image.results.IMAGE_URL[1])"
|
||||
#############
|
||||
finally:
|
||||
- name: report-success
|
||||
when:
|
||||
- input: "$(tasks.status)"
|
||||
operator: in
|
||||
values: ["Succeeded", "Completed"]
|
||||
taskRef:
|
||||
resolver: git
|
||||
params:
|
||||
- name: url
|
||||
value: https://code.fizz.buzz/mirror/catalog.git # mirror of https://github.com/tektoncd/catalog.git
|
||||
- name: revision
|
||||
value: df36b3853a5657fd883015cdbf07ad6466918acf
|
||||
- name: pathInRepo
|
||||
value: task/gitea-set-status/0.1/gitea-set-status.yaml
|
||||
params:
|
||||
- name: CONTEXT
|
||||
value: "$(params.JOB_NAME)"
|
||||
- name: REPO_FULL_NAME
|
||||
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
|
||||
- name: GITEA_HOST_URL
|
||||
value: code.fizz.buzz
|
||||
- name: SHA
|
||||
value: "$(tasks.fetch-repository.results.commit)"
|
||||
- name: DESCRIPTION
|
||||
value: "Build $(params.JOB_NAME) has succeeded"
|
||||
- name: STATE
|
||||
value: success
|
||||
- name: TARGET_URL
|
||||
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
|
||||
- name: report-failure
|
||||
when:
|
||||
- input: "$(tasks.status)"
|
||||
operator: in
|
||||
values: ["Failed"]
|
||||
taskRef:
|
||||
resolver: git
|
||||
params:
|
||||
- name: url
|
||||
value: https://code.fizz.buzz/mirror/catalog.git # mirror of https://github.com/tektoncd/catalog.git
|
||||
- name: revision
|
||||
value: df36b3853a5657fd883015cdbf07ad6466918acf
|
||||
- name: pathInRepo
|
||||
value: task/gitea-set-status/0.1/gitea-set-status.yaml
|
||||
params:
|
||||
- name: CONTEXT
|
||||
value: "$(params.JOB_NAME)"
|
||||
- name: REPO_FULL_NAME
|
||||
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
|
||||
- name: GITEA_HOST_URL
|
||||
value: code.fizz.buzz
|
||||
- name: SHA
|
||||
value: "$(tasks.fetch-repository.results.commit)"
|
||||
- name: DESCRIPTION
|
||||
value: "Build $(params.JOB_NAME) has failed"
|
||||
- name: STATE
|
||||
value: failure
|
||||
- name: TARGET_URL
|
||||
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
|
||||
- name: cargo-cache-autoclean
|
||||
taskSpec:
|
||||
metadata: {}
|
||||
params:
|
||||
- name: docker-image
|
||||
type: string
|
||||
description: Docker image to run.
|
||||
default: alpine:3.18
|
||||
- name: cache-subdir
|
||||
type: string
|
||||
description: subPath used in the persistent volume for the cargo cache.
|
||||
steptemplate:
|
||||
image: alpine:3.18
|
||||
name: ""
|
||||
resources:
|
||||
requests:
|
||||
cpu: 10m
|
||||
memory: 60Mi
|
||||
workingDir: /workspace/source
|
||||
workspaces:
|
||||
- name: source
|
||||
mountPath: /source
|
||||
- name: cargo-cache
|
||||
mountPath: /usr/local/cargo/registry
|
||||
optional: true
|
||||
steps:
|
||||
- name: run
|
||||
image: $(params.docker-image)
|
||||
workingDir: "$(workspaces.source.path)"
|
||||
command: ["ls", "$(workspaces.cargo-cache.path)"]
|
||||
# command: [echo, $(params.cache-subdir)]
|
||||
# command: [cargo, cache, --autoclean]
|
||||
args: []
|
||||
workspaces:
|
||||
- name: source
|
||||
workspace: git-source
|
||||
- name: cargo-cache
|
||||
workspace: cargo-cache
|
||||
subPath: $(params.cache-subdir)
|
||||
params:
|
||||
- name: docker-image
|
||||
value: "$(tasks.build-image.results.IMAGE_URL[1])"
|
||||
- name: cache-subdir
|
||||
value: none
|
||||
# matrix:
|
||||
# params:
|
||||
# - name: cache-subdir
|
||||
# value:
|
||||
# - none
|
||||
# - tracing
|
||||
# - compare
|
||||
# - default
|
||||
# - tracing-compare
|
||||
# - compare-foreign
|
||||
# - all
|
||||
# - wasm
|
||||
# - wasm-test
|
||||
workspaces:
|
||||
- name: git-source
|
||||
- name: docker-credentials
|
||||
- name: cargo-cache
|
||||
workspaces:
|
||||
- name: git-source
|
||||
volumeClaimTemplate:
|
||||
spec:
|
||||
storageClassName: "nfs-client"
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 10Gi
|
||||
subPath: rust-source
|
||||
- name: cargo-cache
|
||||
persistentVolumeClaim:
|
||||
claimName: organic-cargo-cache-build
|
||||
- name: docker-credentials
|
||||
secret:
|
||||
secretName: harbor-plain
|
||||
params:
|
||||
- name: image-name
|
||||
value: "harbor.fizz.buzz/private/organic-development-wasm"
|
||||
- name: target-name
|
||||
value: "wasm"
|
||||
- name: path-to-image-context
|
||||
value: .
|
||||
- name: path-to-dockerfile
|
||||
value: docker/organic_development/
|
301
.webhook_bridge/pipeline-foreign-document-test.yaml
Normal file
301
.webhook_bridge/pipeline-foreign-document-test.yaml
Normal file
@ -0,0 +1,301 @@
|
||||
apiVersion: tekton.dev/v1
|
||||
kind: PipelineRun
|
||||
metadata:
|
||||
name: foreign-document-test
|
||||
spec:
|
||||
timeouts:
|
||||
pipeline: "2h0m0s"
|
||||
tasks: "1h0m40s"
|
||||
finally: "0h30m0s"
|
||||
taskRunTemplate:
|
||||
serviceAccountName: build-bot
|
||||
pipelineSpec:
|
||||
params:
|
||||
- name: image-name
|
||||
description: The name for the built image
|
||||
type: string
|
||||
- name: target-name
|
||||
description: The dockerfile target to build
|
||||
type: string
|
||||
- name: path-to-image-context
|
||||
description: The path to the build context
|
||||
type: string
|
||||
- name: path-to-dockerfile
|
||||
description: The path to the Dockerfile
|
||||
type: string
|
||||
tasks:
|
||||
- name: report-pending
|
||||
taskRef:
|
||||
resolver: git
|
||||
params:
|
||||
- name: url
|
||||
value: https://code.fizz.buzz/mirror/catalog.git # mirror of https://github.com/tektoncd/catalog.git
|
||||
- name: revision
|
||||
value: df36b3853a5657fd883015cdbf07ad6466918acf
|
||||
- name: pathInRepo
|
||||
value: task/gitea-set-status/0.1/gitea-set-status.yaml
|
||||
params:
|
||||
- name: CONTEXT
|
||||
value: "$(params.JOB_NAME)"
|
||||
- name: REPO_FULL_NAME
|
||||
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
|
||||
- name: GITEA_HOST_URL
|
||||
value: code.fizz.buzz
|
||||
- name: SHA
|
||||
value: "$(tasks.fetch-repository.results.commit)"
|
||||
- name: DESCRIPTION
|
||||
value: "Build $(params.JOB_NAME) has started"
|
||||
- name: STATE
|
||||
value: pending
|
||||
- name: TARGET_URL
|
||||
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
|
||||
- name: fetch-repository
|
||||
taskRef:
|
||||
resolver: git
|
||||
params:
|
||||
- name: url
|
||||
value: https://code.fizz.buzz/mirror/catalog.git # mirror of https://github.com/tektoncd/catalog.git
|
||||
- name: revision
|
||||
value: df36b3853a5657fd883015cdbf07ad6466918acf
|
||||
- name: pathInRepo
|
||||
value: task/git-clone/0.9/git-clone.yaml
|
||||
workspaces:
|
||||
- name: output
|
||||
workspace: git-source
|
||||
params:
|
||||
- name: url
|
||||
value: $(params.REPO_URL)
|
||||
- name: revision
|
||||
value: $(params.PULL_BASE_SHA)
|
||||
- name: deleteExisting
|
||||
value: "true"
|
||||
- name: get-git-commit-time
|
||||
taskSpec:
|
||||
metadata: {}
|
||||
stepTemplate:
|
||||
image: alpine:3.20
|
||||
computeResources:
|
||||
requests:
|
||||
cpu: 10m
|
||||
memory: 600Mi
|
||||
workingDir: "$(workspaces.repo.path)"
|
||||
results:
|
||||
- name: unix-time
|
||||
description: The time of the git commit in unix timestamp format.
|
||||
steps:
|
||||
- image: alpine/git:v2.34.2
|
||||
name: detect-tag-step
|
||||
script: |
|
||||
#!/usr/bin/env sh
|
||||
set -euo pipefail
|
||||
echo -n "$(git log -1 --pretty=%ct)" | tee $(results.unix-time.path)
|
||||
workspaces:
|
||||
- name: repo
|
||||
workspace: git-source
|
||||
runAfter:
|
||||
- fetch-repository
|
||||
- name: build-image
|
||||
taskRef:
|
||||
resolver: git
|
||||
params:
|
||||
- name: url
|
||||
value: https://code.fizz.buzz/talexander/personal_tekton_catalog.git
|
||||
- name: revision
|
||||
value: 7ee31a185243ee6da13dcd26a592c585b64c80e5
|
||||
- name: pathInRepo
|
||||
value: task/buildkit-rootless-daemonless/0.1/buildkit-rootless-daemonless.yaml
|
||||
params:
|
||||
- name: OUTPUT
|
||||
value: >-
|
||||
type=image,"name=$(params.image-name):latest,$(params.image-name):$(tasks.fetch-repository.results.commit)",push=true,compression=zstd,compression-level=22,oci-mediatypes=true
|
||||
- name: CONTEXT
|
||||
value: $(params.path-to-image-context)
|
||||
- name: DOCKERFILE
|
||||
value: $(params.path-to-dockerfile)
|
||||
- name: EXTRA_ARGS
|
||||
value:
|
||||
- "--opt"
|
||||
- "target=$(params.target-name)"
|
||||
- --import-cache
|
||||
- "type=registry,ref=$(params.image-name):buildcache"
|
||||
- --export-cache
|
||||
- "type=registry,ref=$(params.image-name):buildcache,mode=max,compression=zstd,compression-level=22,rewrite-timestamp=true,image-manifest=true,oci-mediatypes=true"
|
||||
- --opt
|
||||
- build-arg:SOURCE_DATE_EPOCH=$(tasks.get-git-commit-time.results.unix-time)
|
||||
- name: BUILDKITD_TOML
|
||||
value: |
|
||||
debug = true
|
||||
[registry."docker.io"]
|
||||
mirrors = ["dockerhub.dockerhub.svc.cluster.local"]
|
||||
[registry."dockerhub.dockerhub.svc.cluster.local"]
|
||||
http = true
|
||||
insecure = true
|
||||
workspaces:
|
||||
- name: source
|
||||
workspace: git-source
|
||||
- name: dockerconfig
|
||||
workspace: docker-credentials
|
||||
runAfter:
|
||||
- fetch-repository
|
||||
- name: run-test
|
||||
taskSpec:
|
||||
metadata: {}
|
||||
params:
|
||||
- name: docker-image
|
||||
type: string
|
||||
description: Docker image to run.
|
||||
default: alpine:3.20
|
||||
stepTemplate:
|
||||
image: alpine:3.20
|
||||
computeResources:
|
||||
requests:
|
||||
cpu: 10m
|
||||
memory: 600Mi
|
||||
workingDir: /workspace/source
|
||||
workspaces:
|
||||
- name: source
|
||||
mountPath: /source
|
||||
- name: cargo-cache
|
||||
mountPath: /usr/local/cargo/registry
|
||||
optional: true
|
||||
steps:
|
||||
- name: run
|
||||
image: $(params.docker-image)
|
||||
workingDir: "$(workspaces.source.path)"
|
||||
env:
|
||||
- name: CARGO_TARGET_DIR
|
||||
value: /target
|
||||
workspaces:
|
||||
- name: source
|
||||
workspace: git-source
|
||||
- name: cargo-cache
|
||||
workspace: cargo-cache
|
||||
runAfter:
|
||||
- build-image
|
||||
params:
|
||||
- name: docker-image
|
||||
value: "$(tasks.build-image.results.IMAGE_URL[1])"
|
||||
finally:
|
||||
- name: report-success
|
||||
when:
|
||||
- input: "$(tasks.status)"
|
||||
operator: in
|
||||
values: ["Succeeded", "Completed"]
|
||||
taskRef:
|
||||
resolver: git
|
||||
params:
|
||||
- name: url
|
||||
value: https://code.fizz.buzz/mirror/catalog.git # mirror of https://github.com/tektoncd/catalog.git
|
||||
- name: revision
|
||||
value: df36b3853a5657fd883015cdbf07ad6466918acf
|
||||
- name: pathInRepo
|
||||
value: task/gitea-set-status/0.1/gitea-set-status.yaml
|
||||
params:
|
||||
- name: CONTEXT
|
||||
value: "$(params.JOB_NAME)"
|
||||
- name: REPO_FULL_NAME
|
||||
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
|
||||
- name: GITEA_HOST_URL
|
||||
value: code.fizz.buzz
|
||||
- name: SHA
|
||||
value: "$(tasks.fetch-repository.results.commit)"
|
||||
- name: DESCRIPTION
|
||||
value: "Build $(params.JOB_NAME) has succeeded"
|
||||
- name: STATE
|
||||
value: success
|
||||
- name: TARGET_URL
|
||||
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
|
||||
- name: report-failure
|
||||
when:
|
||||
- input: "$(tasks.status)"
|
||||
operator: in
|
||||
values: ["Failed"]
|
||||
taskRef:
|
||||
resolver: git
|
||||
params:
|
||||
- name: url
|
||||
value: https://code.fizz.buzz/mirror/catalog.git # mirror of https://github.com/tektoncd/catalog.git
|
||||
- name: revision
|
||||
value: df36b3853a5657fd883015cdbf07ad6466918acf
|
||||
- name: pathInRepo
|
||||
value: task/gitea-set-status/0.1/gitea-set-status.yaml
|
||||
params:
|
||||
- name: CONTEXT
|
||||
value: "$(params.JOB_NAME)"
|
||||
- name: REPO_FULL_NAME
|
||||
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
|
||||
- name: GITEA_HOST_URL
|
||||
value: code.fizz.buzz
|
||||
- name: SHA
|
||||
value: "$(tasks.fetch-repository.results.commit)"
|
||||
- name: DESCRIPTION
|
||||
value: "Build $(params.JOB_NAME) has failed"
|
||||
- name: STATE
|
||||
value: failure
|
||||
- name: TARGET_URL
|
||||
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
|
||||
- name: cargo-cache-autoclean
|
||||
taskSpec:
|
||||
metadata: {}
|
||||
params:
|
||||
- name: docker-image
|
||||
type: string
|
||||
description: Docker image to run.
|
||||
default: alpine:3.20
|
||||
stepTemplate:
|
||||
image: alpine:3.20
|
||||
computeResources:
|
||||
requests:
|
||||
cpu: 10m
|
||||
memory: 600Mi
|
||||
workingDir: /workspace/source
|
||||
workspaces:
|
||||
- name: source
|
||||
mountPath: /source
|
||||
- name: cargo-cache
|
||||
mountPath: /usr/local/cargo/registry
|
||||
optional: true
|
||||
steps:
|
||||
- name: run
|
||||
image: $(params.docker-image)
|
||||
workingDir: "$(workspaces.source.path)"
|
||||
command: [cargo, cache, --autoclean]
|
||||
args: []
|
||||
workspaces:
|
||||
- name: source
|
||||
workspace: git-source
|
||||
- name: cargo-cache
|
||||
workspace: cargo-cache
|
||||
params:
|
||||
- name: docker-image
|
||||
value: "$(tasks.build-image.results.IMAGE_URL[1])"
|
||||
workspaces:
|
||||
- name: git-source
|
||||
- name: docker-credentials
|
||||
- name: cargo-cache
|
||||
workspaces:
|
||||
- name: git-source
|
||||
volumeClaimTemplate:
|
||||
spec:
|
||||
storageClassName: "nfs-client"
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 10Gi
|
||||
subPath: rust-source
|
||||
- name: cargo-cache
|
||||
persistentVolumeClaim:
|
||||
claimName: organic-cargo-cache-test-foreign-document
|
||||
- name: docker-credentials
|
||||
secret:
|
||||
secretName: harbor-plain
|
||||
params:
|
||||
- name: image-name
|
||||
value: "harbor.fizz.buzz/private/organic-test-foreign-document"
|
||||
- name: target-name
|
||||
value: "foreign-document"
|
||||
- name: path-to-image-context
|
||||
value: docker/organic_test/
|
||||
- name: path-to-dockerfile
|
||||
value: docker/organic_test/
|
334
.webhook_bridge/pipeline-format.yaml
Normal file
334
.webhook_bridge/pipeline-format.yaml
Normal file
@ -0,0 +1,334 @@
|
||||
apiVersion: tekton.dev/v1
|
||||
kind: PipelineRun
|
||||
metadata:
|
||||
name: rust-format
|
||||
spec:
|
||||
timeouts:
|
||||
pipeline: "2h0m0s"
|
||||
tasks: "1h0m0s"
|
||||
finally: "0h30m0s"
|
||||
taskRunTemplate:
|
||||
serviceAccountName: build-bot
|
||||
pipelineSpec:
|
||||
params:
|
||||
- name: image-name
|
||||
description: The name for the built image
|
||||
type: string
|
||||
- name: target-name
|
||||
description: The dockerfile target to build
|
||||
type: string
|
||||
- name: path-to-image-context
|
||||
description: The path to the build context
|
||||
type: string
|
||||
- name: path-to-dockerfile
|
||||
description: The path to the Dockerfile
|
||||
type: string
|
||||
tasks:
|
||||
- name: report-pending
|
||||
taskRef:
|
||||
resolver: git
|
||||
params:
|
||||
- name: url
|
||||
value: https://code.fizz.buzz/mirror/catalog.git # mirror of https://github.com/tektoncd/catalog.git
|
||||
- name: revision
|
||||
value: df36b3853a5657fd883015cdbf07ad6466918acf
|
||||
- name: pathInRepo
|
||||
value: task/gitea-set-status/0.1/gitea-set-status.yaml
|
||||
params:
|
||||
- name: CONTEXT
|
||||
value: "$(params.JOB_NAME)"
|
||||
- name: REPO_FULL_NAME
|
||||
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
|
||||
- name: GITEA_HOST_URL
|
||||
value: code.fizz.buzz
|
||||
- name: SHA
|
||||
value: "$(tasks.fetch-repository.results.commit)"
|
||||
- name: DESCRIPTION
|
||||
value: "Build $(params.JOB_NAME) has started"
|
||||
- name: STATE
|
||||
value: pending
|
||||
- name: TARGET_URL
|
||||
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
|
||||
- name: fetch-repository
|
||||
taskRef:
|
||||
resolver: git
|
||||
params:
|
||||
- name: url
|
||||
value: https://code.fizz.buzz/mirror/catalog.git # mirror of https://github.com/tektoncd/catalog.git
|
||||
- name: revision
|
||||
value: df36b3853a5657fd883015cdbf07ad6466918acf
|
||||
- name: pathInRepo
|
||||
value: task/git-clone/0.9/git-clone.yaml
|
||||
workspaces:
|
||||
- name: output
|
||||
workspace: git-source
|
||||
params:
|
||||
- name: url
|
||||
value: $(params.REPO_URL)
|
||||
- name: revision
|
||||
value: $(params.PULL_BASE_SHA)
|
||||
- name: deleteExisting
|
||||
value: "true"
|
||||
- name: get-git-commit-time
|
||||
taskSpec:
|
||||
metadata: {}
|
||||
stepTemplate:
|
||||
image: alpine:3.20
|
||||
computeResources:
|
||||
requests:
|
||||
cpu: 10m
|
||||
memory: 600Mi
|
||||
workingDir: "$(workspaces.repo.path)"
|
||||
results:
|
||||
- name: unix-time
|
||||
description: The time of the git commit in unix timestamp format.
|
||||
steps:
|
||||
- image: alpine/git:v2.34.2
|
||||
name: detect-tag-step
|
||||
script: |
|
||||
#!/usr/bin/env sh
|
||||
set -euo pipefail
|
||||
echo -n "$(git log -1 --pretty=%ct)" | tee $(results.unix-time.path)
|
||||
workspaces:
|
||||
- name: repo
|
||||
workspace: git-source
|
||||
runAfter:
|
||||
- fetch-repository
|
||||
- name: build-image
|
||||
taskRef:
|
||||
resolver: git
|
||||
params:
|
||||
- name: url
|
||||
value: https://code.fizz.buzz/talexander/personal_tekton_catalog.git
|
||||
- name: revision
|
||||
value: 7ee31a185243ee6da13dcd26a592c585b64c80e5
|
||||
- name: pathInRepo
|
||||
value: task/buildkit-rootless-daemonless/0.1/buildkit-rootless-daemonless.yaml
|
||||
params:
|
||||
- name: OUTPUT
|
||||
value: >-
|
||||
type=image,"name=$(params.image-name):latest,$(params.image-name):$(tasks.fetch-repository.results.commit)",push=true,compression=zstd,compression-level=22,oci-mediatypes=true
|
||||
- name: CONTEXT
|
||||
value: $(params.path-to-image-context)
|
||||
- name: DOCKERFILE
|
||||
value: $(params.path-to-dockerfile)
|
||||
- name: EXTRA_ARGS
|
||||
value:
|
||||
- "--opt"
|
||||
- "target=$(params.target-name)"
|
||||
- --import-cache
|
||||
- "type=registry,ref=$(params.image-name):buildcache"
|
||||
- --export-cache
|
||||
- "type=registry,ref=$(params.image-name):buildcache,mode=max,compression=zstd,compression-level=22,rewrite-timestamp=true,image-manifest=true,oci-mediatypes=true"
|
||||
- --opt
|
||||
- build-arg:SOURCE_DATE_EPOCH=$(tasks.get-git-commit-time.results.unix-time)
|
||||
- name: BUILDKITD_TOML
|
||||
value: |
|
||||
debug = true
|
||||
[registry."docker.io"]
|
||||
mirrors = ["dockerhub.dockerhub.svc.cluster.local"]
|
||||
[registry."dockerhub.dockerhub.svc.cluster.local"]
|
||||
http = true
|
||||
insecure = true
|
||||
workspaces:
|
||||
- name: source
|
||||
workspace: git-source
|
||||
- name: dockerconfig
|
||||
workspace: docker-credentials
|
||||
runAfter:
|
||||
- fetch-repository
|
||||
- name: run-cargo-fmt
|
||||
taskSpec:
|
||||
metadata: {}
|
||||
params:
|
||||
- name: docker-image
|
||||
type: string
|
||||
description: Docker image to run.
|
||||
default: alpine:3.20
|
||||
stepTemplate:
|
||||
image: alpine:3.20
|
||||
computeResources:
|
||||
requests:
|
||||
cpu: 10m
|
||||
memory: 600Mi
|
||||
workingDir: /workspace/source
|
||||
workspaces:
|
||||
- name: source
|
||||
mountPath: /source
|
||||
- name: cargo-cache
|
||||
mountPath: /usr/local/cargo/registry
|
||||
optional: true
|
||||
steps:
|
||||
- name: run
|
||||
image: $(params.docker-image)
|
||||
workingDir: "$(workspaces.source.path)"
|
||||
command: ["cargo", "fmt"]
|
||||
args: []
|
||||
env:
|
||||
- name: CARGO_TARGET_DIR
|
||||
value: /target
|
||||
workspaces:
|
||||
- name: source
|
||||
workspace: git-source
|
||||
- name: cargo-cache
|
||||
workspace: cargo-cache
|
||||
runAfter:
|
||||
- build-image
|
||||
params:
|
||||
- name: docker-image
|
||||
value: "$(tasks.build-image.results.IMAGE_URL[1])"
|
||||
- name: commit-changes
|
||||
taskRef:
|
||||
resolver: git
|
||||
params:
|
||||
- name: url
|
||||
value: https://code.fizz.buzz/mirror/catalog.git # mirror of https://github.com/tektoncd/catalog.git
|
||||
- name: revision
|
||||
value: df36b3853a5657fd883015cdbf07ad6466918acf
|
||||
- name: pathInRepo
|
||||
value: task/git-cli/0.4/git-cli.yaml
|
||||
params:
|
||||
- name: GIT_USER_NAME
|
||||
value: fluxcdbot
|
||||
- name: GIT_USER_EMAIL
|
||||
value: "fluxcdbot@users.noreply.github.com"
|
||||
- name: GIT_SCRIPT
|
||||
value: |
|
||||
pwd
|
||||
git config --global --add safe.directory /workspace/source
|
||||
git_status=$(git status --porcelain)
|
||||
if [ -n "$git_status" ]; then
|
||||
git commit -a -m "CI: autofix rust code."
|
||||
git push origin HEAD:$(params.PULL_BASE_REF)
|
||||
else
|
||||
echo "No changes to commit."
|
||||
fi
|
||||
workspaces:
|
||||
- name: source
|
||||
workspace: git-source
|
||||
runAfter:
|
||||
- run-cargo-fmt
|
||||
finally:
|
||||
- name: report-success
|
||||
when:
|
||||
- input: "$(tasks.status)"
|
||||
operator: in
|
||||
values: ["Succeeded", "Completed"]
|
||||
taskRef:
|
||||
resolver: git
|
||||
params:
|
||||
- name: url
|
||||
value: https://code.fizz.buzz/mirror/catalog.git # mirror of https://github.com/tektoncd/catalog.git
|
||||
- name: revision
|
||||
value: df36b3853a5657fd883015cdbf07ad6466918acf
|
||||
- name: pathInRepo
|
||||
value: task/gitea-set-status/0.1/gitea-set-status.yaml
|
||||
params:
|
||||
- name: CONTEXT
|
||||
value: "$(params.JOB_NAME)"
|
||||
- name: REPO_FULL_NAME
|
||||
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
|
||||
- name: GITEA_HOST_URL
|
||||
value: code.fizz.buzz
|
||||
- name: SHA
|
||||
value: "$(tasks.fetch-repository.results.commit)"
|
||||
- name: DESCRIPTION
|
||||
value: "Build $(params.JOB_NAME) has succeeded"
|
||||
- name: STATE
|
||||
value: success
|
||||
- name: TARGET_URL
|
||||
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
|
||||
- name: report-failure
|
||||
when:
|
||||
- input: "$(tasks.status)"
|
||||
operator: in
|
||||
values: ["Failed"]
|
||||
taskRef:
|
||||
resolver: git
|
||||
params:
|
||||
- name: url
|
||||
value: https://code.fizz.buzz/mirror/catalog.git # mirror of https://github.com/tektoncd/catalog.git
|
||||
- name: revision
|
||||
value: df36b3853a5657fd883015cdbf07ad6466918acf
|
||||
- name: pathInRepo
|
||||
value: task/gitea-set-status/0.1/gitea-set-status.yaml
|
||||
params:
|
||||
- name: CONTEXT
|
||||
value: "$(params.JOB_NAME)"
|
||||
- name: REPO_FULL_NAME
|
||||
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
|
||||
- name: GITEA_HOST_URL
|
||||
value: code.fizz.buzz
|
||||
- name: SHA
|
||||
value: "$(tasks.fetch-repository.results.commit)"
|
||||
- name: DESCRIPTION
|
||||
value: "Build $(params.JOB_NAME) has failed"
|
||||
- name: STATE
|
||||
value: failure
|
||||
- name: TARGET_URL
|
||||
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
|
||||
- name: cargo-cache-autoclean
|
||||
taskSpec:
|
||||
metadata: {}
|
||||
params:
|
||||
- name: docker-image
|
||||
type: string
|
||||
description: Docker image to run.
|
||||
default: alpine:3.20
|
||||
stepTemplate:
|
||||
image: alpine:3.20
|
||||
computeResources:
|
||||
requests:
|
||||
cpu: 10m
|
||||
memory: 600Mi
|
||||
workingDir: /workspace/source
|
||||
workspaces:
|
||||
- name: source
|
||||
mountPath: /source
|
||||
- name: cargo-cache
|
||||
mountPath: /usr/local/cargo/registry
|
||||
optional: true
|
||||
steps:
|
||||
- name: run
|
||||
image: $(params.docker-image)
|
||||
workingDir: "$(workspaces.source.path)"
|
||||
command: [cargo, cache, --autoclean]
|
||||
args: []
|
||||
workspaces:
|
||||
- name: source
|
||||
workspace: git-source
|
||||
- name: cargo-cache
|
||||
workspace: cargo-cache
|
||||
params:
|
||||
- name: docker-image
|
||||
value: "$(tasks.build-image.results.IMAGE_URL[1])"
|
||||
workspaces:
|
||||
- name: git-source
|
||||
- name: docker-credentials
|
||||
- name: cargo-cache
|
||||
workspaces:
|
||||
- name: git-source
|
||||
volumeClaimTemplate:
|
||||
spec:
|
||||
storageClassName: "nfs-client"
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 10Gi
|
||||
subPath: rust-source
|
||||
- name: cargo-cache
|
||||
persistentVolumeClaim:
|
||||
claimName: organic-cargo-cache-fmt
|
||||
- name: docker-credentials
|
||||
secret:
|
||||
secretName: harbor-plain
|
||||
params:
|
||||
- name: image-name
|
||||
value: "harbor.fizz.buzz/private/organic-development-format"
|
||||
- name: target-name
|
||||
value: "format"
|
||||
- name: path-to-image-context
|
||||
value: docker/organic_development/
|
||||
- name: path-to-dockerfile
|
||||
value: docker/organic_development/
|
313
.webhook_bridge/pipeline-rust-clippy.yaml
Normal file
313
.webhook_bridge/pipeline-rust-clippy.yaml
Normal file
@ -0,0 +1,313 @@
|
||||
apiVersion: tekton.dev/v1
|
||||
kind: PipelineRun
|
||||
metadata:
|
||||
name: rust-clippy
|
||||
spec:
|
||||
taskRunTemplate:
|
||||
serviceAccountName: build-bot
|
||||
timeouts:
|
||||
pipeline: "2h0m0s"
|
||||
tasks: "1h0m40s"
|
||||
finally: "0h30m0s"
|
||||
pipelineSpec:
|
||||
params:
|
||||
- name: image-name
|
||||
description: The name for the built image
|
||||
type: string
|
||||
- name: target-name
|
||||
description: The dockerfile target to build
|
||||
type: string
|
||||
- name: path-to-image-context
|
||||
description: The path to the build context
|
||||
type: string
|
||||
- name: path-to-dockerfile
|
||||
description: The path to the Dockerfile
|
||||
type: string
|
||||
tasks:
|
||||
- name: report-pending
|
||||
taskRef:
|
||||
resolver: git
|
||||
params:
|
||||
- name: url
|
||||
value: https://code.fizz.buzz/mirror/catalog.git # mirror of https://github.com/tektoncd/catalog.git
|
||||
- name: revision
|
||||
value: df36b3853a5657fd883015cdbf07ad6466918acf
|
||||
- name: pathInRepo
|
||||
value: task/gitea-set-status/0.1/gitea-set-status.yaml
|
||||
params:
|
||||
- name: CONTEXT
|
||||
value: "$(params.JOB_NAME)"
|
||||
- name: REPO_FULL_NAME
|
||||
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
|
||||
- name: GITEA_HOST_URL
|
||||
value: code.fizz.buzz
|
||||
- name: SHA
|
||||
value: "$(tasks.fetch-repository.results.commit)"
|
||||
- name: DESCRIPTION
|
||||
value: "Build $(params.JOB_NAME) has started"
|
||||
- name: STATE
|
||||
value: pending
|
||||
- name: TARGET_URL
|
||||
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
|
||||
- name: fetch-repository
|
||||
taskRef:
|
||||
resolver: git
|
||||
params:
|
||||
- name: url
|
||||
value: https://code.fizz.buzz/mirror/catalog.git # mirror of https://github.com/tektoncd/catalog.git
|
||||
- name: revision
|
||||
value: df36b3853a5657fd883015cdbf07ad6466918acf
|
||||
- name: pathInRepo
|
||||
value: task/git-clone/0.9/git-clone.yaml
|
||||
workspaces:
|
||||
- name: output
|
||||
workspace: git-source
|
||||
params:
|
||||
- name: url
|
||||
value: $(params.REPO_URL)
|
||||
- name: revision
|
||||
value: $(params.PULL_BASE_SHA)
|
||||
- name: deleteExisting
|
||||
value: "true"
|
||||
- name: get-git-commit-time
|
||||
taskSpec:
|
||||
metadata: {}
|
||||
stepTemplate:
|
||||
image: alpine:3.20
|
||||
computeResources:
|
||||
requests:
|
||||
cpu: 10m
|
||||
memory: 600Mi
|
||||
workingDir: "$(workspaces.repo.path)"
|
||||
results:
|
||||
- name: unix-time
|
||||
description: The time of the git commit in unix timestamp format.
|
||||
steps:
|
||||
- image: alpine/git:v2.34.2
|
||||
name: detect-tag-step
|
||||
script: |
|
||||
#!/usr/bin/env sh
|
||||
set -euo pipefail
|
||||
echo -n "$(git log -1 --pretty=%ct)" | tee $(results.unix-time.path)
|
||||
workspaces:
|
||||
- name: repo
|
||||
workspace: git-source
|
||||
runAfter:
|
||||
- fetch-repository
|
||||
- name: build-image
|
||||
taskRef:
|
||||
resolver: git
|
||||
params:
|
||||
- name: url
|
||||
value: https://code.fizz.buzz/talexander/personal_tekton_catalog.git
|
||||
- name: revision
|
||||
value: 7ee31a185243ee6da13dcd26a592c585b64c80e5
|
||||
- name: pathInRepo
|
||||
value: task/buildkit-rootless-daemonless/0.1/buildkit-rootless-daemonless.yaml
|
||||
params:
|
||||
- name: OUTPUT
|
||||
value: >-
|
||||
type=image,"name=$(params.image-name):latest,$(params.image-name):$(tasks.fetch-repository.results.commit)",push=true,compression=zstd,compression-level=22,oci-mediatypes=true
|
||||
- name: CONTEXT
|
||||
value: $(params.path-to-image-context)
|
||||
- name: DOCKERFILE
|
||||
value: $(params.path-to-dockerfile)
|
||||
- name: EXTRA_ARGS
|
||||
value:
|
||||
- "--opt"
|
||||
- "target=$(params.target-name)"
|
||||
- --import-cache
|
||||
- "type=registry,ref=$(params.image-name):buildcache"
|
||||
- --export-cache
|
||||
- "type=registry,ref=$(params.image-name):buildcache,mode=max,compression=zstd,compression-level=22,rewrite-timestamp=true,image-manifest=true,oci-mediatypes=true"
|
||||
- --opt
|
||||
- build-arg:SOURCE_DATE_EPOCH=$(tasks.get-git-commit-time.results.unix-time)
|
||||
- name: BUILDKITD_TOML
|
||||
value: |
|
||||
debug = true
|
||||
[registry."docker.io"]
|
||||
mirrors = ["dockerhub.dockerhub.svc.cluster.local"]
|
||||
[registry."dockerhub.dockerhub.svc.cluster.local"]
|
||||
http = true
|
||||
insecure = true
|
||||
workspaces:
|
||||
- name: source
|
||||
workspace: git-source
|
||||
- name: dockerconfig
|
||||
workspace: docker-credentials
|
||||
runAfter:
|
||||
- fetch-repository
|
||||
- name: run-cargo-clippy
|
||||
taskSpec:
|
||||
metadata: {}
|
||||
params:
|
||||
- name: docker-image
|
||||
type: string
|
||||
description: Docker image to run.
|
||||
default: alpine:3.20
|
||||
stepTemplate:
|
||||
image: alpine:3.20
|
||||
computeResources:
|
||||
requests:
|
||||
cpu: 10m
|
||||
memory: 600Mi
|
||||
workingDir: /workspace/source
|
||||
workspaces:
|
||||
- name: source
|
||||
mountPath: /source
|
||||
- name: cargo-cache
|
||||
mountPath: /usr/local/cargo/registry
|
||||
optional: true
|
||||
steps:
|
||||
- name: run
|
||||
image: $(params.docker-image)
|
||||
workingDir: "$(workspaces.source.path)"
|
||||
command:
|
||||
[
|
||||
"cargo",
|
||||
"clippy",
|
||||
"--no-deps",
|
||||
"--all-targets",
|
||||
"--all-features",
|
||||
"--",
|
||||
"-D",
|
||||
"warnings",
|
||||
]
|
||||
args: []
|
||||
env:
|
||||
- name: CARGO_TARGET_DIR
|
||||
value: /target
|
||||
workspaces:
|
||||
- name: source
|
||||
workspace: git-source
|
||||
- name: cargo-cache
|
||||
workspace: cargo-cache
|
||||
runAfter:
|
||||
- build-image
|
||||
params:
|
||||
- name: docker-image
|
||||
value: "$(tasks.build-image.results.IMAGE_URL[1])"
|
||||
finally:
|
||||
- name: report-success
|
||||
when:
|
||||
- input: "$(tasks.status)"
|
||||
operator: in
|
||||
values: ["Succeeded", "Completed"]
|
||||
taskRef:
|
||||
resolver: git
|
||||
params:
|
||||
- name: url
|
||||
value: https://code.fizz.buzz/mirror/catalog.git # mirror of https://github.com/tektoncd/catalog.git
|
||||
- name: revision
|
||||
value: df36b3853a5657fd883015cdbf07ad6466918acf
|
||||
- name: pathInRepo
|
||||
value: task/gitea-set-status/0.1/gitea-set-status.yaml
|
||||
params:
|
||||
- name: CONTEXT
|
||||
value: "$(params.JOB_NAME)"
|
||||
- name: REPO_FULL_NAME
|
||||
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
|
||||
- name: GITEA_HOST_URL
|
||||
value: code.fizz.buzz
|
||||
- name: SHA
|
||||
value: "$(tasks.fetch-repository.results.commit)"
|
||||
- name: DESCRIPTION
|
||||
value: "Build $(params.JOB_NAME) has succeeded"
|
||||
- name: STATE
|
||||
value: success
|
||||
- name: TARGET_URL
|
||||
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
|
||||
- name: report-failure
|
||||
when:
|
||||
- input: "$(tasks.status)"
|
||||
operator: in
|
||||
values: ["Failed"]
|
||||
taskRef:
|
||||
resolver: git
|
||||
params:
|
||||
- name: url
|
||||
value: https://code.fizz.buzz/mirror/catalog.git # mirror of https://github.com/tektoncd/catalog.git
|
||||
- name: revision
|
||||
value: df36b3853a5657fd883015cdbf07ad6466918acf
|
||||
- name: pathInRepo
|
||||
value: task/gitea-set-status/0.1/gitea-set-status.yaml
|
||||
params:
|
||||
- name: CONTEXT
|
||||
value: "$(params.JOB_NAME)"
|
||||
- name: REPO_FULL_NAME
|
||||
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
|
||||
- name: GITEA_HOST_URL
|
||||
value: code.fizz.buzz
|
||||
- name: SHA
|
||||
value: "$(tasks.fetch-repository.results.commit)"
|
||||
- name: DESCRIPTION
|
||||
value: "Build $(params.JOB_NAME) has failed"
|
||||
- name: STATE
|
||||
value: failure
|
||||
- name: TARGET_URL
|
||||
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
|
||||
- name: cargo-cache-autoclean
|
||||
taskSpec:
|
||||
metadata: {}
|
||||
params:
|
||||
- name: docker-image
|
||||
type: string
|
||||
description: Docker image to run.
|
||||
default: alpine:3.20
|
||||
stepTemplate:
|
||||
image: alpine:3.20
|
||||
computeResources:
|
||||
requests:
|
||||
cpu: 10m
|
||||
memory: 600Mi
|
||||
workingDir: /workspace/source
|
||||
workspaces:
|
||||
- name: source
|
||||
mountPath: /source
|
||||
- name: cargo-cache
|
||||
mountPath: /usr/local/cargo/registry
|
||||
optional: true
|
||||
steps:
|
||||
- name: run
|
||||
image: $(params.docker-image)
|
||||
workingDir: "$(workspaces.source.path)"
|
||||
command: [cargo, cache, --autoclean]
|
||||
args: []
|
||||
workspaces:
|
||||
- name: source
|
||||
workspace: git-source
|
||||
- name: cargo-cache
|
||||
workspace: cargo-cache
|
||||
params:
|
||||
- name: docker-image
|
||||
value: "$(tasks.build-image.results.IMAGE_URL[1])"
|
||||
workspaces:
|
||||
- name: git-source
|
||||
- name: docker-credentials
|
||||
- name: cargo-cache
|
||||
workspaces:
|
||||
- name: git-source
|
||||
volumeClaimTemplate:
|
||||
spec:
|
||||
storageClassName: "nfs-client"
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 10Gi
|
||||
subPath: rust-source
|
||||
- name: cargo-cache
|
||||
persistentVolumeClaim:
|
||||
claimName: organic-cargo-cache-clippy
|
||||
- name: docker-credentials
|
||||
secret:
|
||||
secretName: harbor-plain
|
||||
params:
|
||||
- name: image-name
|
||||
value: "harbor.fizz.buzz/private/organic-development-clippy"
|
||||
- name: target-name
|
||||
value: "clippy"
|
||||
- name: path-to-image-context
|
||||
value: docker/organic_development/
|
||||
- name: path-to-dockerfile
|
||||
value: docker/organic_development/
|
312
.webhook_bridge/pipeline-rust-test.yaml
Normal file
312
.webhook_bridge/pipeline-rust-test.yaml
Normal file
@ -0,0 +1,312 @@
|
||||
apiVersion: tekton.dev/v1
|
||||
kind: PipelineRun
|
||||
metadata:
|
||||
name: rust-test
|
||||
spec:
|
||||
timeouts:
|
||||
pipeline: "2h0m0s"
|
||||
tasks: "1h0m40s"
|
||||
finally: "0h30m0s"
|
||||
taskRunTemplate:
|
||||
serviceAccountName: build-bot
|
||||
pipelineSpec:
|
||||
params:
|
||||
- name: image-name
|
||||
description: The name for the built image
|
||||
type: string
|
||||
- name: target-name
|
||||
description: The dockerfile target to build
|
||||
type: string
|
||||
- name: path-to-image-context
|
||||
description: The path to the build context
|
||||
type: string
|
||||
- name: path-to-dockerfile
|
||||
description: The path to the Dockerfile
|
||||
type: string
|
||||
tasks:
|
||||
- name: report-pending
|
||||
taskRef:
|
||||
resolver: git
|
||||
params:
|
||||
- name: url
|
||||
value: https://code.fizz.buzz/mirror/catalog.git # mirror of https://github.com/tektoncd/catalog.git
|
||||
- name: revision
|
||||
value: df36b3853a5657fd883015cdbf07ad6466918acf
|
||||
- name: pathInRepo
|
||||
value: task/gitea-set-status/0.1/gitea-set-status.yaml
|
||||
params:
|
||||
- name: CONTEXT
|
||||
value: "$(params.JOB_NAME)"
|
||||
- name: REPO_FULL_NAME
|
||||
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
|
||||
- name: GITEA_HOST_URL
|
||||
value: code.fizz.buzz
|
||||
- name: SHA
|
||||
value: "$(tasks.fetch-repository.results.commit)"
|
||||
- name: DESCRIPTION
|
||||
value: "Build $(params.JOB_NAME) has started"
|
||||
- name: STATE
|
||||
value: pending
|
||||
- name: TARGET_URL
|
||||
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
|
||||
- name: fetch-repository
|
||||
taskRef:
|
||||
resolver: git
|
||||
params:
|
||||
- name: url
|
||||
value: https://code.fizz.buzz/mirror/catalog.git # mirror of https://github.com/tektoncd/catalog.git
|
||||
- name: revision
|
||||
value: df36b3853a5657fd883015cdbf07ad6466918acf
|
||||
- name: pathInRepo
|
||||
value: task/git-clone/0.9/git-clone.yaml
|
||||
workspaces:
|
||||
- name: output
|
||||
workspace: git-source
|
||||
params:
|
||||
- name: url
|
||||
value: $(params.REPO_URL)
|
||||
- name: revision
|
||||
value: $(params.PULL_BASE_SHA)
|
||||
- name: deleteExisting
|
||||
value: "true"
|
||||
- name: get-git-commit-time
|
||||
taskSpec:
|
||||
metadata: {}
|
||||
stepTemplate:
|
||||
image: alpine:3.20
|
||||
computeResources:
|
||||
requests:
|
||||
cpu: 10m
|
||||
memory: 600Mi
|
||||
workingDir: "$(workspaces.repo.path)"
|
||||
results:
|
||||
- name: unix-time
|
||||
description: The time of the git commit in unix timestamp format.
|
||||
steps:
|
||||
- image: alpine/git:v2.34.2
|
||||
name: detect-tag-step
|
||||
script: |
|
||||
#!/usr/bin/env sh
|
||||
set -euo pipefail
|
||||
echo -n "$(git log -1 --pretty=%ct)" | tee $(results.unix-time.path)
|
||||
workspaces:
|
||||
- name: repo
|
||||
workspace: git-source
|
||||
runAfter:
|
||||
- fetch-repository
|
||||
- name: build-image
|
||||
taskRef:
|
||||
resolver: git
|
||||
params:
|
||||
- name: url
|
||||
value: https://code.fizz.buzz/talexander/personal_tekton_catalog.git
|
||||
- name: revision
|
||||
value: 7ee31a185243ee6da13dcd26a592c585b64c80e5
|
||||
- name: pathInRepo
|
||||
value: task/buildkit-rootless-daemonless/0.1/buildkit-rootless-daemonless.yaml
|
||||
params:
|
||||
- name: OUTPUT
|
||||
value: >-
|
||||
type=image,"name=$(params.image-name):latest,$(params.image-name):$(tasks.fetch-repository.results.commit)",push=true,compression=zstd,compression-level=22,oci-mediatypes=true
|
||||
- name: CONTEXT
|
||||
value: $(params.path-to-image-context)
|
||||
- name: DOCKERFILE
|
||||
value: $(params.path-to-dockerfile)
|
||||
- name: EXTRA_ARGS
|
||||
value:
|
||||
- "--opt"
|
||||
- "target=$(params.target-name)"
|
||||
- --import-cache
|
||||
- "type=registry,ref=$(params.image-name):buildcache"
|
||||
- --export-cache
|
||||
- "type=registry,ref=$(params.image-name):buildcache,mode=max,compression=zstd,compression-level=22,rewrite-timestamp=true,image-manifest=true,oci-mediatypes=true"
|
||||
- --opt
|
||||
- build-arg:SOURCE_DATE_EPOCH=$(tasks.get-git-commit-time.results.unix-time)
|
||||
- name: BUILDKITD_TOML
|
||||
value: |
|
||||
debug = true
|
||||
[registry."docker.io"]
|
||||
mirrors = ["dockerhub.dockerhub.svc.cluster.local"]
|
||||
[registry."dockerhub.dockerhub.svc.cluster.local"]
|
||||
http = true
|
||||
insecure = true
|
||||
workspaces:
|
||||
- name: source
|
||||
workspace: git-source
|
||||
- name: dockerconfig
|
||||
workspace: docker-credentials
|
||||
runAfter:
|
||||
- fetch-repository
|
||||
- name: run-cargo-test
|
||||
taskSpec:
|
||||
metadata: {}
|
||||
params:
|
||||
- name: docker-image
|
||||
type: string
|
||||
description: Docker image to run.
|
||||
default: alpine:3.20
|
||||
stepTemplate:
|
||||
image: alpine:3.20
|
||||
computeResources:
|
||||
requests:
|
||||
cpu: 10m
|
||||
memory: 600Mi
|
||||
workingDir: /workspace/source
|
||||
workspaces:
|
||||
- name: source
|
||||
mountPath: /source
|
||||
- name: cargo-cache
|
||||
mountPath: /usr/local/cargo/registry
|
||||
optional: true
|
||||
steps:
|
||||
- name: run
|
||||
image: $(params.docker-image)
|
||||
workingDir: "$(workspaces.source.path)"
|
||||
command: [cargo, test]
|
||||
args:
|
||||
[
|
||||
--no-default-features,
|
||||
--features,
|
||||
"compare,wasm_test",
|
||||
--no-fail-fast,
|
||||
--lib,
|
||||
--test,
|
||||
test_loader,
|
||||
]
|
||||
env:
|
||||
- name: CARGO_TARGET_DIR
|
||||
value: /target
|
||||
workspaces:
|
||||
- name: source
|
||||
workspace: git-source
|
||||
- name: cargo-cache
|
||||
workspace: cargo-cache
|
||||
runAfter:
|
||||
- build-image
|
||||
params:
|
||||
- name: docker-image
|
||||
value: "$(tasks.build-image.results.IMAGE_URL[1])"
|
||||
finally:
|
||||
- name: report-success
|
||||
when:
|
||||
- input: "$(tasks.status)"
|
||||
operator: in
|
||||
values: ["Succeeded", "Completed"]
|
||||
taskRef:
|
||||
resolver: git
|
||||
params:
|
||||
- name: url
|
||||
value: https://code.fizz.buzz/mirror/catalog.git # mirror of https://github.com/tektoncd/catalog.git
|
||||
- name: revision
|
||||
value: df36b3853a5657fd883015cdbf07ad6466918acf
|
||||
- name: pathInRepo
|
||||
value: task/gitea-set-status/0.1/gitea-set-status.yaml
|
||||
params:
|
||||
- name: CONTEXT
|
||||
value: "$(params.JOB_NAME)"
|
||||
- name: REPO_FULL_NAME
|
||||
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
|
||||
- name: GITEA_HOST_URL
|
||||
value: code.fizz.buzz
|
||||
- name: SHA
|
||||
value: "$(tasks.fetch-repository.results.commit)"
|
||||
- name: DESCRIPTION
|
||||
value: "Build $(params.JOB_NAME) has succeeded"
|
||||
- name: STATE
|
||||
value: success
|
||||
- name: TARGET_URL
|
||||
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
|
||||
- name: report-failure
|
||||
when:
|
||||
- input: "$(tasks.status)"
|
||||
operator: in
|
||||
values: ["Failed"]
|
||||
taskRef:
|
||||
resolver: git
|
||||
params:
|
||||
- name: url
|
||||
value: https://code.fizz.buzz/mirror/catalog.git # mirror of https://github.com/tektoncd/catalog.git
|
||||
- name: revision
|
||||
value: df36b3853a5657fd883015cdbf07ad6466918acf
|
||||
- name: pathInRepo
|
||||
value: task/gitea-set-status/0.1/gitea-set-status.yaml
|
||||
params:
|
||||
- name: CONTEXT
|
||||
value: "$(params.JOB_NAME)"
|
||||
- name: REPO_FULL_NAME
|
||||
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
|
||||
- name: GITEA_HOST_URL
|
||||
value: code.fizz.buzz
|
||||
- name: SHA
|
||||
value: "$(tasks.fetch-repository.results.commit)"
|
||||
- name: DESCRIPTION
|
||||
value: "Build $(params.JOB_NAME) has failed"
|
||||
- name: STATE
|
||||
value: failure
|
||||
- name: TARGET_URL
|
||||
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
|
||||
- name: cargo-cache-autoclean
|
||||
taskSpec:
|
||||
metadata: {}
|
||||
params:
|
||||
- name: docker-image
|
||||
type: string
|
||||
description: Docker image to run.
|
||||
default: alpine:3.20
|
||||
stepTemplate:
|
||||
image: alpine:3.20
|
||||
computeResources:
|
||||
requests:
|
||||
cpu: 10m
|
||||
memory: 600Mi
|
||||
workingDir: /workspace/source
|
||||
workspaces:
|
||||
- name: source
|
||||
mountPath: /source
|
||||
- name: cargo-cache
|
||||
mountPath: /usr/local/cargo/registry
|
||||
optional: true
|
||||
steps:
|
||||
- name: run
|
||||
image: $(params.docker-image)
|
||||
workingDir: "$(workspaces.source.path)"
|
||||
command: [cargo, cache, --autoclean]
|
||||
args: []
|
||||
workspaces:
|
||||
- name: source
|
||||
workspace: git-source
|
||||
- name: cargo-cache
|
||||
workspace: cargo-cache
|
||||
params:
|
||||
- name: docker-image
|
||||
value: "$(tasks.build-image.results.IMAGE_URL[1])"
|
||||
workspaces:
|
||||
- name: git-source
|
||||
- name: docker-credentials
|
||||
- name: cargo-cache
|
||||
workspaces:
|
||||
- name: git-source
|
||||
volumeClaimTemplate:
|
||||
spec:
|
||||
storageClassName: "nfs-client"
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 10Gi
|
||||
subPath: rust-source
|
||||
- name: cargo-cache
|
||||
persistentVolumeClaim:
|
||||
claimName: organic-cargo-cache-test
|
||||
- name: docker-credentials
|
||||
secret:
|
||||
secretName: harbor-plain
|
||||
params:
|
||||
- name: image-name
|
||||
value: "harbor.fizz.buzz/private/organic-test"
|
||||
- name: target-name
|
||||
value: "tester"
|
||||
- name: path-to-image-context
|
||||
value: docker/organic_test/
|
||||
- name: path-to-dockerfile
|
||||
value: docker/organic_test/
|
31
.webhook_bridge/webhook_bridge.toml
Normal file
31
.webhook_bridge/webhook_bridge.toml
Normal file
@ -0,0 +1,31 @@
|
||||
version = "0.0.1"
|
||||
|
||||
[[push]]
|
||||
name = "rust-test"
|
||||
source = "pipeline-rust-test.yaml"
|
||||
clone_uri = "git@code.fizz.buzz:talexander/organic.git"
|
||||
skip_branches = [ "^v[0-9]+\\.[0-9]+\\.[0-9]+$" ]
|
||||
|
||||
[[push]]
|
||||
name = "foreign-document-test"
|
||||
source = "pipeline-foreign-document-test.yaml"
|
||||
clone_uri = "git@code.fizz.buzz:talexander/organic.git"
|
||||
branches = [ "^main$", "^master$" ]
|
||||
|
||||
[[push]]
|
||||
name = "clippy"
|
||||
source = "pipeline-rust-clippy.yaml"
|
||||
clone_uri = "git@code.fizz.buzz:talexander/organic.git"
|
||||
skip_branches = [ "^v[0-9]+\\.[0-9]+\\.[0-9]+$" ]
|
||||
|
||||
[[push]]
|
||||
name = "format"
|
||||
source = "pipeline-format.yaml"
|
||||
clone_uri = "git@code.fizz.buzz:talexander/organic.git"
|
||||
skip_branches = [ "^v[0-9]+\\.[0-9]+\\.[0-9]+$" ]
|
||||
|
||||
[[push]]
|
||||
name = "build"
|
||||
source = "pipeline-build-hash.yaml"
|
||||
clone_uri = "git@code.fizz.buzz:talexander/organic.git"
|
||||
branches = [ "^main$", "^master$" ]
|
34
Cargo.toml
34
Cargo.toml
@ -1,8 +1,9 @@
|
||||
# cargo-features = ["profile-rustflags"]
|
||||
cargo-features = ["codegen-backend"]
|
||||
|
||||
[package]
|
||||
name = "organic"
|
||||
version = "0.1.11"
|
||||
version = "0.1.16"
|
||||
authors = ["Tom Alexander <tom@fizz.buzz>"]
|
||||
description = "An org-mode parser."
|
||||
edition = "2021"
|
||||
@ -39,17 +40,33 @@ path = "src/lib.rs"
|
||||
path = "src/bin_foreign_document_test.rs"
|
||||
required-features = ["foreign_document_test"]
|
||||
|
||||
[[bin]]
|
||||
name = "wasm"
|
||||
path = "src/bin_wasm.rs"
|
||||
required-features = ["wasm"]
|
||||
|
||||
[[bin]]
|
||||
# This bin exists for development purposes only. The real target of this crate is the library.
|
||||
name = "wasm_test"
|
||||
path = "src/bin_wasm_test.rs"
|
||||
required-features = ["wasm_test"]
|
||||
|
||||
[dependencies]
|
||||
futures = { version = "0.3.28", optional = true }
|
||||
gloo-utils = "0.2.0"
|
||||
nom = "7.1.1"
|
||||
opentelemetry = { version = "0.20.0", optional = true, default-features = false, features = ["trace", "rt-tokio"] }
|
||||
opentelemetry-otlp = { version = "0.13.0", optional = true }
|
||||
opentelemetry-semantic-conventions = { version = "0.12.0", optional = true }
|
||||
serde = { version = "1.0.193", optional = true, features = ["derive"] }
|
||||
serde-wasm-bindgen = { version = "0.6.3", optional = true }
|
||||
serde_json = { version = "1.0.108", optional = true }
|
||||
tokio = { version = "1.30.0", optional = true, default-features = false, features = ["rt", "rt-multi-thread"] }
|
||||
tracing = { version = "0.1.37", optional = true }
|
||||
tracing-opentelemetry = { version = "0.20.0", optional = true }
|
||||
tracing-subscriber = { version = "0.3.17", optional = true, features = ["env-filter"] }
|
||||
walkdir = { version = "2.3.3", optional = true }
|
||||
wasm-bindgen = { version = "0.2.89", optional = true }
|
||||
|
||||
[build-dependencies]
|
||||
walkdir = "2.3.3"
|
||||
@ -59,6 +76,9 @@ default = []
|
||||
compare = ["tokio/process", "tokio/macros"]
|
||||
foreign_document_test = ["compare", "dep:futures", "tokio/sync", "dep:walkdir", "tokio/process"]
|
||||
tracing = ["dep:opentelemetry", "dep:opentelemetry-otlp", "dep:opentelemetry-semantic-conventions", "dep:tokio", "dep:tracing", "dep:tracing-opentelemetry", "dep:tracing-subscriber"]
|
||||
event_count = []
|
||||
wasm = ["dep:serde", "dep:wasm-bindgen", "dep:serde-wasm-bindgen"]
|
||||
wasm_test = ["wasm", "dep:serde_json", "tokio/process", "tokio/macros"]
|
||||
|
||||
# Optimized build for any sort of release.
|
||||
[profile.release-lto]
|
||||
@ -78,3 +98,15 @@ strip = "symbols"
|
||||
inherits = "release"
|
||||
lto = true
|
||||
debug = true
|
||||
|
||||
[profile.wasm]
|
||||
inherits = "release"
|
||||
lto = true
|
||||
strip = true
|
||||
|
||||
[profile.dev]
|
||||
codegen-backend = "cranelift"
|
||||
|
||||
[profile.dev.package."*"]
|
||||
codegen-backend = "llvm"
|
||||
opt-level = 3
|
||||
|
69
Makefile
69
Makefile
@ -7,6 +7,7 @@ MAKEFLAGS += --no-builtin-rules
|
||||
TESTJOBS := 4
|
||||
OS:=$(shell uname -s)
|
||||
RELEASEFLAGS :=
|
||||
WASMTARGET := bundler # or web
|
||||
|
||||
ifeq ($(OS),Linux)
|
||||
TESTJOBS:=$(shell nproc)
|
||||
@ -21,55 +22,83 @@ ifeq ($(origin .RECIPEPREFIX), undefined)
|
||||
endif
|
||||
.RECIPEPREFIX = >
|
||||
|
||||
.PHONY: help
|
||||
help: ## List the available make targets.
|
||||
> @grep -h "##" $(MAKEFILE_LIST) | grep -v grep | sed -E 's/^([^:]*): *## */\1: /'
|
||||
|
||||
.PHONY: build
|
||||
build:
|
||||
build: ## Make a debug build of the project.
|
||||
> cargo build
|
||||
|
||||
.PHONY: release
|
||||
release:
|
||||
release: ## Make an optimized build of the project.
|
||||
> cargo build --release $(RELEASEFLAGS)
|
||||
|
||||
.PHONY: wasm
|
||||
wasm: ## Build the parser as wasm.
|
||||
> cargo build --target=wasm32-unknown-unknown --profile wasm --bin wasm --features wasm
|
||||
> wasm-bindgen --target $(WASMTARGET) --out-dir target/wasm32-unknown-unknown/js target/wasm32-unknown-unknown/wasm/wasm.wasm
|
||||
|
||||
.PHONY: clean
|
||||
clean:
|
||||
clean: ## Delete the built binaries.
|
||||
> cargo clean
|
||||
> $(MAKE) -C docker/organic_development TARGET=builder clean
|
||||
> $(MAKE) -C docker/organic_development TARGET=format clean
|
||||
> $(MAKE) -C docker/organic_development TARGET=clippy clean
|
||||
> $(MAKE) -C docker/organic_development TARGET=wasm clean
|
||||
> $(MAKE) -C docker/organic_test TARGET=tester build
|
||||
|
||||
.PHONY: format
|
||||
format:
|
||||
> $(MAKE) -C docker/cargo_fmt run
|
||||
format: ## Format the code.
|
||||
> cargo fmt
|
||||
|
||||
.PHONY: dockerclippy
|
||||
dockerclippy:
|
||||
> $(MAKE) -C docker/organic_clippy run
|
||||
.PHONY: docker_format
|
||||
docker_format: ## Format the code using docker.
|
||||
> $(MAKE) -C docker/organic_development TARGET=format build
|
||||
> docker run --rm -i -t --mount type=tmpfs,destination=/tmp -v "$(shell readlink -f .):/source" --workdir=/source --env CARGO_TARGET_DIR=/target -v "organic-cargo-registry:/usr/local/cargo/registry" organic-development-format cargo fmt
|
||||
|
||||
.PHONY: docker_clippy
|
||||
docker_clippy: ## Lint the code using docker.
|
||||
> $(MAKE) -C docker/organic_development TARGET=clippy build
|
||||
> docker run --rm -i -t --mount type=tmpfs,destination=/tmp -v "$(shell readlink -f .):/source" --workdir=/source --env CARGO_TARGET_DIR=/target -v "organic-cargo-registry:/usr/local/cargo/registry" organic-development-clippy cargo clippy --no-deps --all-targets --all-features -- -D warnings
|
||||
|
||||
.PHONY: clippy
|
||||
clippy:
|
||||
clippy: ## Lint the code.
|
||||
> cargo clippy --no-deps --all-targets --all-features -- -D warnings
|
||||
|
||||
.PHONY: clippyfix
|
||||
clippyfix:
|
||||
> cargo clippy --fix --lib -p organic --all-features
|
||||
|
||||
.PHONY: test
|
||||
test:
|
||||
test: ## Run the test suite.
|
||||
> cargo test --no-default-features --features compare --no-fail-fast --lib --test test_loader -- --test-threads $(TESTJOBS)
|
||||
|
||||
.PHONY: dockertest
|
||||
dockertest:
|
||||
> $(MAKE) -C docker/organic_test
|
||||
.PHONY: doc
|
||||
doc: ## Generate documentation.
|
||||
> cargo doc --no-deps --open --lib --release --all-features
|
||||
|
||||
.PHONY: docker_test
|
||||
docker_test: ## Run the test suite using docker.
|
||||
> $(MAKE) -C docker/organic_test TARGET=tester build
|
||||
> docker run --init --rm -i -t --read-only -v "$$(readlink -f ./):/source:ro" --mount type=tmpfs,destination=/tmp --mount source=cargo-cache,target=/usr/local/cargo/registry --mount source=rust-cache,target=/target --env CARGO_TARGET_DIR=/target -w /source organic-test --no-default-features --features compare --no-fail-fast --lib --test test_loader -- --test-threads $(TESTJOBS)
|
||||
|
||||
.PHONY: buildtest
|
||||
buildtest:
|
||||
.PHONY: docker_wasm_test
|
||||
docker_wasm_test: ## Run the test suite with wasm tests.
|
||||
> $(MAKE) -C docker/organic_test TARGET=tester build
|
||||
> docker run --init --rm -i -t --read-only -v "$$(readlink -f ./):/source:ro" --mount type=tmpfs,destination=/tmp --mount source=cargo-cache,target=/usr/local/cargo/registry --mount source=rust-cache,target=/target --env CARGO_TARGET_DIR=/target -w /source organic-test --no-default-features --features compare,wasm_test --no-fail-fast --lib --test test_loader autogen_wasm_ -- --test-threads $(TESTJOBS)
|
||||
|
||||
.PHONY: build_test
|
||||
build_test:
|
||||
> cargo build --no-default-features
|
||||
> cargo build --no-default-features --features compare
|
||||
> cargo build --no-default-features --features tracing
|
||||
> cargo build --no-default-features --features compare,tracing
|
||||
> cargo build --no-default-features --features compare,foreign_document_test
|
||||
> cargo build --no-default-features --features compare,tracing,foreign_document_test
|
||||
> cargo build --target wasm32-unknown-unknown --profile wasm --bin wasm --no-default-features --features wasm
|
||||
> cargo build --bin wasm_test --no-default-features --features wasm_test
|
||||
|
||||
.PHONY: foreign_document_test
|
||||
foreign_document_test:
|
||||
> $(MAKE) -C docker/organic_test run_foreign_document_test
|
||||
> $(MAKE) -C docker/organic_test TARGET=foreign-document build
|
||||
> docker run --init --rm -i -t --read-only -v "$$(readlink -f ./):/source:ro" --mount type=tmpfs,destination=/tmp --mount source=cargo-cache,target=/usr/local/cargo/registry --mount source=rust-cache,target=/target --env CARGO_TARGET_DIR=/target -w /source organic-test-foreign-document
|
||||
|
||||
.PHONY: dockerclean
|
||||
dockerclean:
|
||||
|
@ -10,17 +10,16 @@ Currently, Organic parses most documents the same as the official org-mode parse
|
||||
|
||||
### Project Goals
|
||||
- We aim to provide perfect parity with the emacs org-mode parser. In that regard, any document that parses differently between Emacs and Organic is considered a bug.
|
||||
- The parser should have minimal dependencies. This should reduce effort w.r.t.: security audits, legal compliance, portability.
|
||||
- The parser should be usable everywhere. In the interest of getting org-mode used in as many places as possible, this parser should be usable by everyone everywhere. This means:
|
||||
- The parser should have minimal dependencies.
|
||||
- The parser should be usable everywhere. In the interest of getting org used in as many places as possible, this parser should be usable by everyone everywhere. This means:
|
||||
- It must have a permissive license.
|
||||
- We will investigate compiling to WASM. This is an important goal of the project and will definitely happen, but only after the parser has a more stable API.
|
||||
- It compiles to both natively and to wasm.
|
||||
- We will investigate compiling to a C library for native linking to other code. This is more of a maybe-goal for the project.
|
||||
### Project Non-Goals
|
||||
- This project will not include an elisp engine since that would drastically increase the complexity of the code. Any features requiring an elisp engine will not be implemented (for example, Emacs supports embedded eval expressions in documents but this parser will never support that).
|
||||
- This project is exclusively an org-mode **parser**. This limits its scope to roughly the output of `(org-element-parse-buffer)`. It will not render org-mode documents in other formats like HTML or LaTeX.
|
||||
### Project Maybe-Goals
|
||||
- table.el support. Currently we support org-mode tables but org-mode also allows table.el tables. So far, their use in org-mode documents seems rather uncommon so this is a low-priority feature.
|
||||
- Document editing support. I do not anticipate any advanced editing features to make editing ergonomic, but it should be relatively easy to be able to parse an org-mode document and serialize it back into org-mode. This would enable cool features to be built on top of the library like auto-formatters. To accomplish this feature, We'd have to capture all of the various separators and whitespace that we are currently simply throwing away. This would add many additional fields to the parsed structs and it would add more noise to the parsers themselves, so I do not want to approach this feature until the parser is more complete since it would make modifications and refactoring more difficult.
|
||||
### Supported Versions
|
||||
This project targets the version of Emacs and Org-mode that are built into the [organic-test docker image](docker/organic_test/Dockerfile). This is newer than the version of Org-mode that shipped with Emacs 29.1. The parser itself does not depend on Emacs or Org-mode though, so this only matters for development purposes when running the automated tests that compare against upstream Org-mode.
|
||||
|
||||
|
10
build.rs
10
build.rs
@ -26,7 +26,7 @@ fn main() {
|
||||
dir_entry.file_type().is_file()
|
||||
&& Path::new(dir_entry.file_name())
|
||||
.extension()
|
||||
.map(|ext| ext.to_ascii_lowercase() == "org")
|
||||
.map(|ext| ext.eq_ignore_ascii_case("org"))
|
||||
.unwrap_or(false)
|
||||
}
|
||||
Err(_) => true,
|
||||
@ -66,10 +66,6 @@ fn write_test(test_file: &mut File, test: &walkdir::DirEntry) {
|
||||
}
|
||||
|
||||
#[cfg(feature = "compare")]
|
||||
fn is_expect_fail(name: &str) -> Option<&str> {
|
||||
match name {
|
||||
"greater_element_drawer_drawer_with_headline_inside" => Some("Apparently lines with :end: become their own paragraph. This odd behavior needs to be investigated more."),
|
||||
"element_container_priority_footnote_definition_dynamic_block" => Some("Apparently broken begin lines become their own paragraph."),
|
||||
_ => None,
|
||||
}
|
||||
fn is_expect_fail(_name: &str) -> Option<&str> {
|
||||
None
|
||||
}
|
||||
|
@ -1,6 +0,0 @@
|
||||
FROM rustlang/rust:nightly-alpine3.17
|
||||
|
||||
RUN apk add --no-cache musl-dev
|
||||
RUN rustup component add rustfmt
|
||||
|
||||
ENTRYPOINT ["cargo", "fmt"]
|
@ -1,36 +0,0 @@
|
||||
IMAGE_NAME:=cargo-fmt
|
||||
# REMOTE_REPO:=harbor.fizz.buzz/private
|
||||
|
||||
.PHONY: all
|
||||
all: build push
|
||||
|
||||
.PHONY: build
|
||||
build:
|
||||
docker build -t $(IMAGE_NAME) -f Dockerfile .
|
||||
|
||||
.PHONY: push
|
||||
push:
|
||||
ifdef REMOTE_REPO
|
||||
docker tag $(IMAGE_NAME) $(REMOTE_REPO)/$(IMAGE_NAME)
|
||||
docker push $(REMOTE_REPO)/$(IMAGE_NAME)
|
||||
else
|
||||
@echo "REMOTE_REPO not defined, not pushing to a remote repo."
|
||||
endif
|
||||
|
||||
.PHONY: clean
|
||||
clean:
|
||||
docker rmi $(IMAGE_NAME)
|
||||
ifdef REMOTE_REPO
|
||||
docker rmi $(REMOTE_REPO)/$(IMAGE_NAME)
|
||||
else
|
||||
@echo "REMOTE_REPO not defined, not removing from remote repo."
|
||||
endif
|
||||
|
||||
# NOTE: This target will write to folders underneath the git-root
|
||||
.PHONY: run
|
||||
run: build
|
||||
docker run --rm --init --read-only --mount type=tmpfs,destination=/tmp -v "$$(readlink -f ../../):/source" --workdir=/source $(IMAGE_NAME)
|
||||
|
||||
.PHONY: shell
|
||||
shell: build
|
||||
docker run --rm -i -t --entrypoint /bin/sh --mount type=tmpfs,destination=/tmp -v "$$(readlink -f ../../):/source" --workdir=/source $(IMAGE_NAME)
|
@ -1,6 +0,0 @@
|
||||
FROM rustlang/rust:nightly-alpine3.17
|
||||
|
||||
RUN apk add --no-cache musl-dev
|
||||
RUN cargo install --locked --no-default-features --features ci-autoclean cargo-cache
|
||||
|
||||
ENTRYPOINT ["cargo", "build"]
|
@ -1,37 +0,0 @@
|
||||
IMAGE_NAME:=organic-build
|
||||
# REMOTE_REPO:=harbor.fizz.buzz/private
|
||||
|
||||
.PHONY: all
|
||||
all: build push
|
||||
|
||||
.PHONY: build
|
||||
build:
|
||||
docker build -t $(IMAGE_NAME) -f Dockerfile .
|
||||
|
||||
.PHONY: push
|
||||
push:
|
||||
ifdef REMOTE_REPO
|
||||
docker tag $(IMAGE_NAME) $(REMOTE_REPO)/$(IMAGE_NAME)
|
||||
docker push $(REMOTE_REPO)/$(IMAGE_NAME)
|
||||
else
|
||||
@echo "REMOTE_REPO not defined, not pushing to a remote repo."
|
||||
endif
|
||||
|
||||
.PHONY: clean
|
||||
clean:
|
||||
docker rmi $(IMAGE_NAME)
|
||||
ifdef REMOTE_REPO
|
||||
docker rmi $(REMOTE_REPO)/$(IMAGE_NAME)
|
||||
else
|
||||
@echo "REMOTE_REPO not defined, not removing from remote repo."
|
||||
endif
|
||||
docker volume rm cargo-cache
|
||||
|
||||
# NOTE: This target will write to folders underneath the git-root
|
||||
.PHONY: run
|
||||
run: build
|
||||
docker run --rm --init --read-only --mount type=tmpfs,destination=/tmp -v "$$(readlink -f ../../):/source" --workdir=/source --mount source=cargo-cache,target=/usr/local/cargo/registry $(IMAGE_NAME)
|
||||
|
||||
.PHONY: shell
|
||||
shell: build
|
||||
docker run --rm -i -t --entrypoint /bin/sh --mount type=tmpfs,destination=/tmp -v "$$(readlink -f ../../):/source" --workdir=/source --mount source=cargo-cache,target=/usr/local/cargo/registry $(IMAGE_NAME)
|
@ -1,5 +0,0 @@
|
||||
FROM rustlang/rust:nightly-alpine3.17
|
||||
|
||||
RUN apk add --no-cache musl-dev
|
||||
|
||||
ENTRYPOINT ["cargo", "clippy", "--no-deps", "--all-targets", "--all-features", "--", "-D", "warnings"]
|
@ -1,37 +0,0 @@
|
||||
IMAGE_NAME:=organic-clippy
|
||||
# REMOTE_REPO:=harbor.fizz.buzz/private
|
||||
|
||||
.PHONY: all
|
||||
all: build push
|
||||
|
||||
.PHONY: build
|
||||
build:
|
||||
docker build -t $(IMAGE_NAME) -f Dockerfile .
|
||||
|
||||
.PHONY: push
|
||||
push:
|
||||
ifdef REMOTE_REPO
|
||||
docker tag $(IMAGE_NAME) $(REMOTE_REPO)/$(IMAGE_NAME)
|
||||
docker push $(REMOTE_REPO)/$(IMAGE_NAME)
|
||||
else
|
||||
@echo "REMOTE_REPO not defined, not pushing to a remote repo."
|
||||
endif
|
||||
|
||||
.PHONY: clean
|
||||
clean:
|
||||
docker rmi $(IMAGE_NAME)
|
||||
ifdef REMOTE_REPO
|
||||
docker rmi $(REMOTE_REPO)/$(IMAGE_NAME)
|
||||
else
|
||||
@echo "REMOTE_REPO not defined, not removing from remote repo."
|
||||
endif
|
||||
docker volume rm cargo-cache
|
||||
|
||||
# NOTE: This target will write to folders underneath the git-root
|
||||
.PHONY: run
|
||||
run: build
|
||||
docker run --rm --init --read-only --mount type=tmpfs,destination=/tmp -v "$$(readlink -f ../../):/source" --workdir=/source --mount source=cargo-cache,target=/usr/local/cargo/registry $(IMAGE_NAME)
|
||||
|
||||
.PHONY: shell
|
||||
shell: build
|
||||
docker run --rm -i -t --entrypoint /bin/sh --mount type=tmpfs,destination=/tmp -v "$$(readlink -f ../../):/source" --workdir=/source --mount source=cargo-cache,target=/usr/local/cargo/registry $(IMAGE_NAME)
|
20
docker/organic_development/Dockerfile
Normal file
20
docker/organic_development/Dockerfile
Normal file
@ -0,0 +1,20 @@
|
||||
# syntax=docker/dockerfile:1
|
||||
ARG ALPINE_VERSION="3.20"
|
||||
|
||||
FROM rustlang/rust:nightly-alpine$ALPINE_VERSION AS builder
|
||||
|
||||
RUN apk add --no-cache musl-dev
|
||||
RUN --mount=type=tmpfs,target=/tmp --mount=type=cache,target=/usr/local/cargo/registry,sharing=locked cargo install --locked --no-default-features --features ci-autoclean cargo-cache
|
||||
RUN rustup component add rustc-codegen-cranelift
|
||||
|
||||
FROM builder AS format
|
||||
|
||||
RUN rustup component add rustfmt
|
||||
|
||||
FROM builder AS clippy
|
||||
|
||||
RUN rustup component add clippy
|
||||
|
||||
FROM builder AS wasm
|
||||
|
||||
RUN rustup target add wasm32-unknown-unknown
|
36
docker/organic_development/Makefile
Normal file
36
docker/organic_development/Makefile
Normal file
@ -0,0 +1,36 @@
|
||||
SHELL := bash
|
||||
.ONESHELL:
|
||||
.SHELLFLAGS := -eu -o pipefail -c
|
||||
.DELETE_ON_ERROR:
|
||||
MAKEFLAGS += --warn-undefined-variables
|
||||
MAKEFLAGS += --no-builtin-rules
|
||||
|
||||
ifeq ($(origin .RECIPEPREFIX), undefined)
|
||||
$(error This Make does not support .RECIPEPREFIX. Please use GNU Make 4.0 or later)
|
||||
endif
|
||||
.RECIPEPREFIX = >
|
||||
|
||||
TARGET := builder
|
||||
IMAGE_NAME := organic-development
|
||||
ifneq ($(TARGET),builder)
|
||||
IMAGE_NAME := $(IMAGE_NAME)-$(TARGET)
|
||||
endif
|
||||
|
||||
.PHONY: help
|
||||
help:
|
||||
> @grep -h "##" $(MAKEFILE_LIST) | grep -v grep | sed -E 's/^([^:]*): *## */\1: /'
|
||||
|
||||
.PHONY: build
|
||||
build: ## Build the docker image.
|
||||
> docker build --tag $(IMAGE_NAME) --target=$(TARGET) --file Dockerfile .
|
||||
> docker volume create organic-cargo-registry
|
||||
|
||||
.PHONY: shell
|
||||
shell: ## Launch an interactive shell inside the docker image with the source repository mounted at /source.
|
||||
shell: build
|
||||
> docker run --rm -i -t --entrypoint /bin/sh --mount type=tmpfs,destination=/tmp -v "$$(readlink -f ../../):/source" --workdir=/source --env CARGO_TARGET_DIR=/target -v "organic-cargo-registry:/usr/local/cargo/registry" $(IMAGE_NAME)
|
||||
|
||||
.PHONY: clean
|
||||
clean: ## Remove the docker image and volume.
|
||||
> docker rmi $(IMAGE_NAME)
|
||||
> docker volume rm organic-cargo-registry
|
@ -1,10 +1,25 @@
|
||||
FROM alpine:3.17 AS build
|
||||
# syntax=docker/dockerfile:1
|
||||
ARG ALPINE_VERSION="3.20"
|
||||
|
||||
# ARG EMACS_REPO=https://git.savannah.gnu.org/git/emacs.git
|
||||
ARG EMACS_REPO=https://code.fizz.buzz/mirror/emacs.git
|
||||
|
||||
ARG EMACS_VERSION=emacs-29.1
|
||||
|
||||
# ARG ORG_MODE_REPO=https://git.savannah.gnu.org/git/emacs/org-mode.git
|
||||
ARG ORG_MODE_REPO=https://code.fizz.buzz/mirror/org-mode.git
|
||||
|
||||
ARG ORG_VERSION=abf5156096c06ee5aa05795c3dc5a065f76ada97
|
||||
|
||||
|
||||
FROM alpine:$ALPINE_VERSION AS build
|
||||
RUN apk add --no-cache build-base musl-dev git autoconf make texinfo gnutls-dev ncurses-dev gawk libgccjit-dev
|
||||
|
||||
|
||||
FROM build AS build-emacs
|
||||
ARG EMACS_VERSION=emacs-29.1
|
||||
RUN git clone --depth 1 --branch $EMACS_VERSION https://git.savannah.gnu.org/git/emacs.git /root/emacs
|
||||
ARG EMACS_VERSION
|
||||
ARG EMACS_REPO
|
||||
RUN git clone --depth 1 --branch $EMACS_VERSION $EMACS_REPO /root/emacs
|
||||
WORKDIR /root/emacs
|
||||
RUN mkdir /root/dist
|
||||
RUN ./autogen.sh
|
||||
@ -14,23 +29,25 @@ RUN make DESTDIR="/root/dist" install
|
||||
|
||||
|
||||
FROM build AS build-org-mode
|
||||
ARG ORG_VERSION=abf5156096c06ee5aa05795c3dc5a065f76ada97
|
||||
COPY --from=build-emacs /root/dist/ /
|
||||
ARG ORG_VERSION
|
||||
ARG ORG_MODE_REPO
|
||||
COPY --link --from=build-emacs /root/dist/ /
|
||||
RUN mkdir /root/dist
|
||||
# Savannah does not allow fetching specific revisions, so we're going to have to put unnecessary load on their server by cloning main and then checking out the revision we want.
|
||||
RUN git clone https://git.savannah.gnu.org/git/emacs/org-mode.git /root/org-mode && git -C /root/org-mode checkout $ORG_VERSION
|
||||
# RUN mkdir /root/org-mode && git -C /root/org-mode init --initial-branch=main && git -C /root/org-mode remote add origin https://git.savannah.gnu.org/git/emacs/org-mode.git && git -C /root/org-mode fetch origin $ORG_VERSION && git -C /root/org-mode checkout FETCH_HEAD
|
||||
RUN git clone $ORG_MODE_REPO /root/org-mode && git -C /root/org-mode checkout $ORG_VERSION
|
||||
# RUN mkdir /root/org-mode && git -C /root/org-mode init --initial-branch=main && git -C /root/org-mode remote add origin $ORG_REPO && git -C /root/org-mode fetch origin $ORG_VERSION && git -C /root/org-mode checkout FETCH_HEAD
|
||||
WORKDIR /root/org-mode
|
||||
RUN make compile
|
||||
RUN make DESTDIR="/root/dist" install
|
||||
|
||||
|
||||
FROM rustlang/rust:nightly-alpine3.17 AS tester
|
||||
FROM rustlang/rust:nightly-alpine$ALPINE_VERSION AS tester
|
||||
ENV LANG=en_US.UTF-8
|
||||
RUN apk add --no-cache musl-dev ncurses gnutls libgccjit
|
||||
RUN cargo install --locked --no-default-features --features ci-autoclean cargo-cache
|
||||
COPY --from=build-emacs /root/dist/ /
|
||||
COPY --from=build-org-mode /root/dist/ /
|
||||
RUN --mount=type=tmpfs,target=/tmp --mount=type=cache,target=/usr/local/cargo/registry,sharing=locked cargo install --locked --no-default-features --features ci-autoclean cargo-cache
|
||||
RUN rustup component add rustc-codegen-cranelift
|
||||
COPY --link --from=build-emacs /root/dist/ /
|
||||
COPY --link --from=build-org-mode /root/dist/ /
|
||||
|
||||
ENTRYPOINT ["cargo", "test"]
|
||||
|
||||
@ -93,13 +110,20 @@ ARG WORG_PATH=/foreign_documents/worg
|
||||
ARG WORG_REPO=https://git.sr.ht/~bzg/worg
|
||||
RUN mkdir -p $WORG_PATH && git -C $WORG_PATH init --initial-branch=main && git -C $WORG_PATH remote add origin $WORG_REPO && git -C $WORG_PATH fetch origin $WORG_VERSION && git -C $WORG_PATH checkout FETCH_HEAD
|
||||
|
||||
ARG LITERATE_BUILD_EMACS_VERSION=e3ac1afe1e40af601be7af12c1d13d96308ab209
|
||||
ARG LITERATE_BUILD_EMACS_PATH=/foreign_documents/literate_build_emacs
|
||||
ARG LITERATE_BUILD_EMACS_REPO=https://gitlab.com/spudlyo/orgdemo2.git
|
||||
RUN mkdir -p $LITERATE_BUILD_EMACS_PATH && git -C $LITERATE_BUILD_EMACS_PATH init --initial-branch=main && git -C $LITERATE_BUILD_EMACS_PATH remote add origin $LITERATE_BUILD_EMACS_REPO && git -C $LITERATE_BUILD_EMACS_PATH fetch origin $LITERATE_BUILD_EMACS_VERSION && git -C $LITERATE_BUILD_EMACS_PATH checkout FETCH_HEAD
|
||||
# unused/aws.org contains invalid paths for setupfile which causes both upstream org-mode and Organic to error out.
|
||||
RUN rm $LITERATE_BUILD_EMACS_PATH/unused/aws.org
|
||||
|
||||
FROM tester as foreign-document-test
|
||||
FROM tester as foreign-document
|
||||
RUN apk add --no-cache bash coreutils
|
||||
RUN mkdir /foreign_documents
|
||||
COPY --from=foreign-document-gather /foreign_documents/howardabrams /foreign_documents/howardabrams
|
||||
COPY --from=foreign-document-gather /foreign_documents/doomemacs /foreign_documents/doomemacs
|
||||
COPY --from=foreign-document-gather /foreign_documents/worg /foreign_documents/worg
|
||||
COPY --from=build-org-mode /root/org-mode /foreign_documents/org-mode
|
||||
COPY --from=build-emacs /root/emacs /foreign_documents/emacs
|
||||
COPY --link --from=foreign-document-gather /foreign_documents/howardabrams /foreign_documents/howardabrams
|
||||
COPY --link --from=foreign-document-gather /foreign_documents/doomemacs /foreign_documents/doomemacs
|
||||
COPY --link --from=foreign-document-gather /foreign_documents/worg /foreign_documents/worg
|
||||
COPY --link --from=foreign-document-gather /foreign_documents/literate_build_emacs /foreign_documents/literate_build_emacs
|
||||
COPY --link --from=build-org-mode /root/org-mode /foreign_documents/org-mode
|
||||
COPY --link --from=build-emacs /root/emacs /foreign_documents/emacs
|
||||
ENTRYPOINT ["cargo", "run", "--bin", "foreign_document_test", "--features", "compare,foreign_document_test", "--profile", "release-lto"]
|
||||
|
@ -1,44 +1,36 @@
|
||||
IMAGE_NAME:=organic-test
|
||||
# REMOTE_REPO:=harbor.fizz.buzz/private
|
||||
SHELL := bash
|
||||
.ONESHELL:
|
||||
.SHELLFLAGS := -eu -o pipefail -c
|
||||
.DELETE_ON_ERROR:
|
||||
MAKEFLAGS += --warn-undefined-variables
|
||||
MAKEFLAGS += --no-builtin-rules
|
||||
|
||||
.PHONY: all
|
||||
all: build push
|
||||
ifeq ($(origin .RECIPEPREFIX), undefined)
|
||||
$(error This Make does not support .RECIPEPREFIX. Please use GNU Make 4.0 or later)
|
||||
endif
|
||||
.RECIPEPREFIX = >
|
||||
|
||||
TARGET := tester
|
||||
IMAGE_NAME := organic-test
|
||||
ifneq ($(TARGET),tester)
|
||||
IMAGE_NAME := $(IMAGE_NAME)-$(TARGET)
|
||||
endif
|
||||
|
||||
.PHONY: help
|
||||
help:
|
||||
> @grep -h "##" $(MAKEFILE_LIST) | grep -v grep | sed -E 's/^([^:]*): *## */\1: /'
|
||||
|
||||
.PHONY: build
|
||||
build:
|
||||
docker build -t $(IMAGE_NAME) -f Dockerfile --target tester .
|
||||
|
||||
.PHONY: build_foreign_document_test
|
||||
build_foreign_document_test:
|
||||
docker build -t $(IMAGE_NAME)-foreign-document -f Dockerfile --target foreign-document-test .
|
||||
|
||||
.PHONY: push
|
||||
push:
|
||||
ifdef REMOTE_REPO
|
||||
docker tag $(IMAGE_NAME) $(REMOTE_REPO)/$(IMAGE_NAME)
|
||||
docker push $(REMOTE_REPO)/$(IMAGE_NAME)
|
||||
else
|
||||
@echo "REMOTE_REPO not defined, not pushing to a remote repo."
|
||||
endif
|
||||
|
||||
.PHONY: clean
|
||||
clean:
|
||||
docker rmi $(IMAGE_NAME)
|
||||
ifdef REMOTE_REPO
|
||||
docker rmi $(REMOTE_REPO)/$(IMAGE_NAME)
|
||||
else
|
||||
@echo "REMOTE_REPO not defined, not removing from remote repo."
|
||||
endif
|
||||
docker volume rm rust-cache cargo-cache
|
||||
|
||||
.PHONY: run
|
||||
run: build
|
||||
docker run --rm --init --read-only --mount type=tmpfs,destination=/tmp -v "$$(readlink -f ../../):/source:ro" --workdir=/source --mount source=cargo-cache,target=/usr/local/cargo/registry --mount source=rust-cache,target=/target --env CARGO_TARGET_DIR=/target $(IMAGE_NAME) --no-default-features --features compare --no-fail-fast --lib --test test_loader
|
||||
build: ## Build the docker image.
|
||||
> docker build --tag $(IMAGE_NAME) --target=$(TARGET) --file Dockerfile .
|
||||
> docker volume create organic-cargo-registry
|
||||
|
||||
.PHONY: shell
|
||||
shell: ## Launch an interactive shell inside the docker image with the source repository mounted at /source.
|
||||
shell: build
|
||||
docker run --rm -i -t --entrypoint /bin/sh --mount type=tmpfs,destination=/tmp -v "$$(readlink -f ../../):/source:ro" --workdir=/source --mount source=cargo-cache,target=/usr/local/cargo/registry --mount source=rust-cache,target=/target --env CARGO_TARGET_DIR=/target $(IMAGE_NAME)
|
||||
> docker run --rm -i -t --entrypoint /bin/sh --mount type=tmpfs,destination=/tmp -v "$$(readlink -f ../../):/source" --workdir=/source --env CARGO_TARGET_DIR=/target -v "organic-cargo-registry:/usr/local/cargo/registry" $(IMAGE_NAME)
|
||||
|
||||
.PHONY: run_foreign_document_test
|
||||
run_foreign_document_test: build_foreign_document_test
|
||||
docker run --rm --init --read-only --mount type=tmpfs,destination=/tmp -v "$$(readlink -f ../../):/source:ro" --workdir=/source --mount source=cargo-cache,target=/usr/local/cargo/registry --mount source=rust-cache,target=/target --env CARGO_TARGET_DIR=/target $(IMAGE_NAME)-foreign-document
|
||||
.PHONY: clean
|
||||
clean: ## Remove the docker image and volume.
|
||||
> docker rmi $(IMAGE_NAME)
|
||||
> docker volume rm organic-cargo-registry
|
||||
|
5
org_mode_samples/affiliated_keyword/empty_caption.org
Normal file
5
org_mode_samples/affiliated_keyword/empty_caption.org
Normal file
@ -0,0 +1,5 @@
|
||||
#+caption:
|
||||
#+caption: *foo*
|
||||
#+caption[bar]:
|
||||
#+begin_src bash
|
||||
#+end_src
|
@ -0,0 +1,3 @@
|
||||
foo
|
||||
:end:
|
||||
bar
|
@ -0,0 +1,2 @@
|
||||
foo
|
||||
:end:
|
0
org_mode_samples/document/empty.org
Normal file
0
org_mode_samples/document/empty.org
Normal file
4
org_mode_samples/document/only_line_breaks.org
Normal file
4
org_mode_samples/document/only_line_breaks.org
Normal file
@ -0,0 +1,4 @@
|
||||
|
||||
|
||||
|
||||
|
5
org_mode_samples/document/post_blank.org
Normal file
5
org_mode_samples/document/post_blank.org
Normal file
@ -0,0 +1,5 @@
|
||||
* foo
|
||||
|
||||
|
||||
|
||||
|
@ -1,3 +1,32 @@
|
||||
* Empty
|
||||
:PROPERTIES:
|
||||
:END:
|
||||
* Single new line
|
||||
:PROPERTIES:
|
||||
|
||||
:END:
|
||||
* Single line with spaces
|
||||
:PROPERTIES:
|
||||
|
||||
:END:
|
||||
* Many lines, first line without spaces
|
||||
:PROPERTIES:
|
||||
|
||||
|
||||
|
||||
|
||||
:END:
|
||||
* Many lines, first line with spaces
|
||||
:PROPERTIES:
|
||||
|
||||
|
||||
|
||||
|
||||
:END:
|
||||
* Many lines, first line with spaces, later line with spaces
|
||||
:PROPERTIES:
|
||||
|
||||
|
||||
|
||||
|
||||
:END:
|
||||
|
@ -5,3 +5,5 @@
|
||||
#+call: dolar cat(dog)
|
||||
|
||||
#+call: (bat)
|
||||
|
||||
#+call:
|
||||
|
@ -0,0 +1,3 @@
|
||||
: foo
|
||||
:
|
||||
: bar
|
@ -0,0 +1,6 @@
|
||||
1. foo
|
||||
#+begin_src text
|
||||
|
||||
#+end_src
|
||||
|
||||
2. baz
|
@ -0,0 +1 @@
|
||||
[[file:simple.org::2]]
|
1
org_mode_samples/object/regular_link/tramp_link.org
Normal file
1
org_mode_samples/object/regular_link/tramp_link.org
Normal file
@ -0,0 +1 @@
|
||||
[[/ssh:admin@test.example:important/file.pdf]]
|
@ -1,3 +1,3 @@
|
||||
foo <<bar>> baz
|
||||
<<FOO>> bar
|
||||
|
||||
lorem << ipsum >> dolar
|
||||
[[FOO][baz]]
|
||||
|
@ -0,0 +1,5 @@
|
||||
* foo
|
||||
|
||||
** bar
|
||||
|
||||
* baz
|
76
scripts/build_all_feature_flag_combinations.bash
Executable file
76
scripts/build_all_feature_flag_combinations.bash
Executable file
@ -0,0 +1,76 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# Time running a single parse without invoking a compare with emacs.
|
||||
set -euo pipefail
|
||||
IFS=$'\n\t'
|
||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
|
||||
: ${PROFILE:="debug"}
|
||||
|
||||
############## Setup #########################
|
||||
|
||||
function cleanup {
|
||||
for f in "${folders[@]}"; do
|
||||
log "Deleting $f"
|
||||
rm -rf "$f"
|
||||
done
|
||||
}
|
||||
folders=()
|
||||
for sig in EXIT INT QUIT HUP TERM; do
|
||||
trap "set +e; cleanup" "$sig"
|
||||
done
|
||||
|
||||
function die {
|
||||
local status_code="$1"
|
||||
shift
|
||||
(>&2 echo "${@}")
|
||||
exit "$status_code"
|
||||
}
|
||||
|
||||
function log {
|
||||
(>&2 echo "${@}")
|
||||
}
|
||||
|
||||
############## Program #########################
|
||||
|
||||
function main {
|
||||
if [ "$#" -gt 0 ]; then
|
||||
export CARGO_TARGET_DIR="$1"
|
||||
else
|
||||
local work_directory=$(mktemp -d -t 'organic.XXXXXX')
|
||||
folders+=("$work_directory")
|
||||
export CARGO_TARGET_DIR="$work_directory"
|
||||
fi
|
||||
local features=(compare foreign_document_test tracing event_count wasm wasm_test)
|
||||
ENABLED_FEATURES= for_each_combination "${features[@]}"
|
||||
}
|
||||
|
||||
function for_each_combination {
|
||||
local additional_flags=()
|
||||
if [ "$PROFILE" = "dev" ] || [ "$PROFILE" = "debug" ]; then
|
||||
PROFILE="debug"
|
||||
else
|
||||
additional_flags+=(--profile "$PROFILE")
|
||||
fi
|
||||
|
||||
|
||||
local flag=$1
|
||||
shift
|
||||
|
||||
if [ "$#" -gt 0 ]; then
|
||||
ENABLED_FEATURES="$ENABLED_FEATURES" for_each_combination "${@}"
|
||||
elif [ -z "$ENABLED_FEATURES" ]; then
|
||||
(cd "$DIR/../" && printf "\n\n\n========== no features ==========\n\n\n" && set -x && cargo build "${additional_flags[@]}" --no-default-features)
|
||||
else
|
||||
(cd "$DIR/../" && printf "\n\n\n========== %s ==========\n\n\n" "${ENABLED_FEATURES:1}" && set -x && cargo build "${additional_flags[@]}" --no-default-features --features "${ENABLED_FEATURES:1}")
|
||||
fi
|
||||
|
||||
ENABLED_FEATURES="$ENABLED_FEATURES,$flag"
|
||||
if [ "$#" -gt 0 ]; then
|
||||
ENABLED_FEATURES="$ENABLED_FEATURES" for_each_combination "${@}"
|
||||
else
|
||||
(cd "$DIR/../" && printf "\n\n\n========== %s ==========\n\n\n" "${ENABLED_FEATURES:1}" && set -x && cargo build "${additional_flags[@]}" --no-default-features --features "${ENABLED_FEATURES:1}")
|
||||
fi
|
||||
}
|
||||
|
||||
main "${@}"
|
58
scripts/dump_ast.bash
Executable file
58
scripts/dump_ast.bash
Executable file
@ -0,0 +1,58 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# Dump the AST of an org-mode document from emacs
|
||||
set -euo pipefail
|
||||
IFS=$'\n\t'
|
||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
|
||||
REALPATH=$(command -v uu-realpath || command -v realpath)
|
||||
MAKE=$(command -v gmake || command -v make)
|
||||
|
||||
############## Setup #########################
|
||||
|
||||
function die {
|
||||
local status_code="$1"
|
||||
shift
|
||||
(>&2 echo "${@}")
|
||||
exit "$status_code"
|
||||
}
|
||||
|
||||
function log {
|
||||
(>&2 echo "${@}")
|
||||
}
|
||||
|
||||
############## Program #########################
|
||||
|
||||
function main {
|
||||
if [ $# -eq 0 ]; then
|
||||
dump_ast_stdin "${@}"
|
||||
else
|
||||
dump_ast_file "${@}"
|
||||
fi
|
||||
}
|
||||
|
||||
function dump_ast_stdin {
|
||||
# Until we can find a good way to encode stdin as an elisp string in bash, I cannot operate on stdin.
|
||||
die 1 "This script only works on files."
|
||||
}
|
||||
|
||||
function dump_ast_file {
|
||||
local target_file mounted_file elisp_script
|
||||
target_file=$($REALPATH "$1")
|
||||
mounted_file="/input${target_file}"
|
||||
elisp_script=$(cat <<EOF
|
||||
(progn
|
||||
(erase-buffer)
|
||||
(require 'org)
|
||||
(defun org-table-align () t)
|
||||
(find-file-read-only "${mounted_file}")
|
||||
(org-mode)
|
||||
(message "%s" (pp-to-string (org-element-parse-buffer)))
|
||||
)
|
||||
EOF
|
||||
)
|
||||
exec docker run --init --rm -i --mount type=tmpfs,destination=/tmp -v "/:/input:ro" --entrypoint "" organic-test emacs -q --no-site-file --no-splash --batch --eval "$elisp_script"
|
||||
}
|
||||
|
||||
|
||||
main "${@}"
|
@ -14,7 +14,7 @@ function main {
|
||||
additional_flags+=(--profile "$PROFILE")
|
||||
fi
|
||||
(cd "$DIR/../" && cargo build --no-default-features "${additional_flags[@]}")
|
||||
perf record --freq=2000 --call-graph dwarf --output="$DIR/../perf.data" "$DIR/../target/${PROFILE}/parse" "${@}"
|
||||
perf record --freq=70000 --call-graph dwarf --output="$DIR/../perf.data" "$DIR/../target/${PROFILE}/parse" "${@}"
|
||||
|
||||
# Convert to a format firefox will read
|
||||
# flags to consider --show-info
|
||||
|
111
scripts/run_docker_wasm_compare.bash
Executable file
111
scripts/run_docker_wasm_compare.bash
Executable file
@ -0,0 +1,111 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
set -euo pipefail
|
||||
IFS=$'\n\t'
|
||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
|
||||
: ${SHELL:="NO"} # or YES to launch a shell instead of running the test
|
||||
: ${TRACE:="NO"} # or YES to send traces to jaeger
|
||||
: ${BACKTRACE:="NO"} # or YES to print a rust backtrace when panicking
|
||||
: ${NO_COLOR:=""} # Set to anything to disable color output
|
||||
: ${PROFILE:="debug"}
|
||||
|
||||
REALPATH=$(command -v uu-realpath || command -v realpath)
|
||||
MAKE=$(command -v gmake || command -v make)
|
||||
|
||||
############## Setup #########################
|
||||
|
||||
function die {
|
||||
local status_code="$1"
|
||||
shift
|
||||
(>&2 echo "${@}")
|
||||
exit "$status_code"
|
||||
}
|
||||
|
||||
function log {
|
||||
(>&2 echo "${@}")
|
||||
}
|
||||
|
||||
############## Program #########################
|
||||
|
||||
function main {
|
||||
build_container
|
||||
launch_container "${@}"
|
||||
}
|
||||
|
||||
function build_container {
|
||||
$MAKE -C "$DIR/../docker/organic_test"
|
||||
}
|
||||
|
||||
function launch_container {
|
||||
local additional_flags=()
|
||||
local features=(wasm_test)
|
||||
|
||||
if [ "$NO_COLOR" != "" ]; then
|
||||
additional_flags+=(--env "NO_COLOR=$NO_COLOR")
|
||||
fi
|
||||
|
||||
if [ "$TRACE" = "YES" ]; then
|
||||
# We use the host network so it can talk to jaeger hosted at 127.0.0.1
|
||||
additional_flags+=(--network=host --env RUST_LOG=debug)
|
||||
features+=(tracing)
|
||||
fi
|
||||
|
||||
if [ "$SHELL" != "YES" ]; then
|
||||
additional_flags+=(--read-only)
|
||||
else
|
||||
additional_flags+=(-t)
|
||||
fi
|
||||
|
||||
if [ "$BACKTRACE" = "YES" ]; then
|
||||
additional_flags+=(--env RUST_BACKTRACE=full)
|
||||
fi
|
||||
|
||||
if [ "$SHELL" = "YES" ]; then
|
||||
exec docker run "${additional_flags[@]}" --init --rm -i --mount type=tmpfs,destination=/tmp -v "/:/input:ro" -v "$($REALPATH "$DIR/../"):/source:ro" --mount source=cargo-cache,target=/usr/local/cargo/registry --mount source=rust-cache,target=/target --env CARGO_TARGET_DIR=/target -w /source --entrypoint "" organic-test /bin/sh
|
||||
fi
|
||||
|
||||
local features_joined
|
||||
features_joined=$(IFS=","; echo "${features[*]}")
|
||||
|
||||
local build_flags=()
|
||||
if [ "$PROFILE" = "dev" ] || [ "$PROFILE" = "debug" ]; then
|
||||
PROFILE="debug"
|
||||
else
|
||||
build_flags+=(--profile "$PROFILE")
|
||||
fi
|
||||
|
||||
|
||||
if [ $# -gt 0 ]; then
|
||||
# If we passed in args, we need to forward them along
|
||||
for path in "${@}"; do
|
||||
local full_path
|
||||
full_path=$($REALPATH "$path")
|
||||
init_script=$(cat <<EOF
|
||||
set -euo pipefail
|
||||
IFS=\$'\n\t'
|
||||
|
||||
cargo build --bin wasm_test --no-default-features --features "$features_joined" ${build_flags[@]}
|
||||
exec /target/${PROFILE}/wasm_test "/input${full_path}"
|
||||
EOF
|
||||
)
|
||||
docker run "${additional_flags[@]}" --init --rm -i --mount type=tmpfs,destination=/tmp -v "/:/input:ro" -v "$($REALPATH "$DIR/../"):/source:ro" --mount source=cargo-cache,target=/usr/local/cargo/registry --mount source=rust-cache,target=/target --env CARGO_TARGET_DIR=/target -w /source --entrypoint "" organic-test sh -c "$init_script"
|
||||
done
|
||||
else
|
||||
local current_directory init_script
|
||||
current_directory=$(pwd)
|
||||
init_script=$(cat <<EOF
|
||||
set -euo pipefail
|
||||
IFS=\$'\n\t'
|
||||
|
||||
cargo build --bin wasm_test --no-default-features --features "$features_joined" ${build_flags[@]}
|
||||
cd /input${current_directory}
|
||||
exec /target/${PROFILE}/wasm_test
|
||||
EOF
|
||||
)
|
||||
|
||||
docker run "${additional_flags[@]}" --init --rm -i --mount type=tmpfs,destination=/tmp -v "/:/input:ro" -v "$($REALPATH "$DIR/../"):/source:ro" --mount source=cargo-cache,target=/usr/local/cargo/registry --mount source=rust-cache,target=/target --env CARGO_TARGET_DIR=/target -w /source --entrypoint "" organic-test sh -c "$init_script"
|
||||
fi
|
||||
}
|
||||
|
||||
main "${@}"
|
@ -1,3 +1,4 @@
|
||||
#![feature(exit_status_error)]
|
||||
#![feature(round_char_boundary)]
|
||||
#![feature(exact_size_is_empty)]
|
||||
use std::io::Read;
|
||||
|
@ -53,6 +53,9 @@ async fn main_body() -> Result<ExitCode, Box<dyn std::error::Error>> {
|
||||
let layer = layer.chain(compare_group("doomemacs", || {
|
||||
compare_all_org_document("/foreign_documents/doomemacs")
|
||||
}));
|
||||
let layer = layer.chain(compare_group("literate_build_emacs", || {
|
||||
compare_all_org_document("/foreign_documents/literate_build_emacs")
|
||||
}));
|
||||
|
||||
let running_tests: Vec<_> = layer.map(|c| tokio::spawn(c.run_test())).collect();
|
||||
let mut any_failed = false;
|
||||
|
10
src/bin_wasm.rs
Normal file
10
src/bin_wasm.rs
Normal file
@ -0,0 +1,10 @@
|
||||
use wasm_bindgen::prelude::wasm_bindgen;
|
||||
|
||||
#[wasm_bindgen]
|
||||
pub fn parse_org(org_contents: &str) -> wasm_bindgen::JsValue {
|
||||
organic::wasm_cli::parse_org(org_contents)
|
||||
}
|
||||
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
Ok(())
|
||||
}
|
62
src/bin_wasm_test.rs
Normal file
62
src/bin_wasm_test.rs
Normal file
@ -0,0 +1,62 @@
|
||||
#![feature(exact_size_is_empty)]
|
||||
#![feature(exit_status_error)]
|
||||
use std::io::Read;
|
||||
|
||||
use organic::wasm_test::wasm_run_anonymous_compare;
|
||||
use organic::wasm_test::wasm_run_compare_on_file;
|
||||
|
||||
#[cfg(feature = "tracing")]
|
||||
use crate::init_tracing::init_telemetry;
|
||||
#[cfg(feature = "tracing")]
|
||||
use crate::init_tracing::shutdown_telemetry;
|
||||
#[cfg(feature = "tracing")]
|
||||
mod init_tracing;
|
||||
|
||||
#[cfg(not(feature = "tracing"))]
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let rt = tokio::runtime::Runtime::new()?;
|
||||
rt.block_on(async {
|
||||
let main_body_result = main_body().await;
|
||||
main_body_result
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(feature = "tracing")]
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let rt = tokio::runtime::Runtime::new()?;
|
||||
rt.block_on(async {
|
||||
init_telemetry()?;
|
||||
let main_body_result = main_body().await;
|
||||
shutdown_telemetry()?;
|
||||
main_body_result
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
async fn main_body() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let args = std::env::args().skip(1);
|
||||
if args.is_empty() {
|
||||
let org_contents = read_stdin_to_string()?;
|
||||
if wasm_run_anonymous_compare(org_contents).await? {
|
||||
} else {
|
||||
Err("Diff results do not match.")?;
|
||||
}
|
||||
Ok(())
|
||||
} else {
|
||||
for arg in args {
|
||||
if wasm_run_compare_on_file(arg).await? {
|
||||
} else {
|
||||
Err("Diff results do not match.")?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn read_stdin_to_string() -> Result<String, Box<dyn std::error::Error>> {
|
||||
let mut stdin_contents = String::new();
|
||||
std::io::stdin()
|
||||
.lock()
|
||||
.read_to_string(&mut stdin_contents)?;
|
||||
Ok(stdin_contents)
|
||||
}
|
@ -1,16 +1,16 @@
|
||||
use std::path::Path;
|
||||
|
||||
use crate::compare::diff::compare_document;
|
||||
use crate::compare::diff::DiffResult;
|
||||
use crate::compare::parse::emacs_parse_anonymous_org_document;
|
||||
use crate::compare::parse::emacs_parse_file_org_document;
|
||||
use crate::compare::parse::get_emacs_version;
|
||||
use crate::compare::parse::get_org_mode_version;
|
||||
use crate::compare::sexp::sexp;
|
||||
use crate::context::GlobalSettings;
|
||||
use crate::context::LocalFileAccessInterface;
|
||||
use crate::parser::parse_file_with_settings;
|
||||
use crate::parser::parse_with_settings;
|
||||
use crate::util::cli::emacs_parse_anonymous_org_document;
|
||||
use crate::util::cli::emacs_parse_file_org_document;
|
||||
use crate::util::cli::print_versions;
|
||||
use crate::util::elisp::sexp;
|
||||
use crate::util::terminal::foreground_color;
|
||||
use crate::util::terminal::reset_color;
|
||||
|
||||
pub async fn run_anonymous_compare<P: AsRef<str>>(
|
||||
org_contents: P,
|
||||
@ -68,8 +68,8 @@ pub async fn run_anonymous_compare_with_settings<'g, 's, P: AsRef<str>>(
|
||||
} else if !silent {
|
||||
println!(
|
||||
"{color}Entire document passes.{reset}",
|
||||
color = DiffResult::foreground_color(0, 255, 0),
|
||||
reset = DiffResult::reset_color(),
|
||||
color = foreground_color(0, 255, 0),
|
||||
reset = reset_color(),
|
||||
);
|
||||
}
|
||||
|
||||
@ -121,19 +121,10 @@ pub async fn run_compare_on_file_with_settings<'g, 's, P: AsRef<Path>>(
|
||||
} else if !silent {
|
||||
println!(
|
||||
"{color}Entire document passes.{reset}",
|
||||
color = DiffResult::foreground_color(0, 255, 0),
|
||||
reset = DiffResult::reset_color(),
|
||||
color = foreground_color(0, 255, 0),
|
||||
reset = reset_color(),
|
||||
);
|
||||
}
|
||||
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
async fn print_versions() -> Result<(), Box<dyn std::error::Error>> {
|
||||
eprintln!("Using emacs version: {}", get_emacs_version().await?.trim());
|
||||
eprintln!(
|
||||
"Using org-mode version: {}",
|
||||
get_org_mode_version().await?.trim()
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
@ -1,3 +1,5 @@
|
||||
use std::borrow::Borrow;
|
||||
use std::borrow::Cow;
|
||||
use std::collections::BTreeSet;
|
||||
use std::fmt::Debug;
|
||||
use std::str::FromStr;
|
||||
@ -7,8 +9,6 @@ use super::diff::artificial_owned_diff_scope;
|
||||
use super::diff::compare_ast_node;
|
||||
use super::diff::DiffEntry;
|
||||
use super::diff::DiffStatus;
|
||||
use super::sexp::unquote;
|
||||
use super::sexp::Token;
|
||||
use super::util::get_property;
|
||||
use super::util::get_property_numeric;
|
||||
use super::util::get_property_quoted_string;
|
||||
@ -18,6 +18,8 @@ use crate::types::CharOffsetInLine;
|
||||
use crate::types::LineNumber;
|
||||
use crate::types::RetainLabels;
|
||||
use crate::types::SwitchNumberLines;
|
||||
use crate::util::elisp::unquote;
|
||||
use crate::util::elisp::Token;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) enum EmacsField<'s> {
|
||||
@ -262,11 +264,11 @@ pub(crate) fn compare_property_set_of_quoted_string<
|
||||
.iter()
|
||||
.map(|e| e.as_atom())
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
let value: Vec<String> = value
|
||||
let value: Vec<Cow<'_, str>> = value
|
||||
.into_iter()
|
||||
.map(unquote)
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
let value: BTreeSet<&str> = value.iter().map(|e| e.as_str()).collect();
|
||||
let value: BTreeSet<&str> = value.iter().map(|e| e.borrow()).collect();
|
||||
let mismatched: Vec<_> = value.symmetric_difference(&rust_value).copied().collect();
|
||||
if !mismatched.is_empty() {
|
||||
let this_status = DiffStatus::Bad;
|
||||
@ -546,6 +548,21 @@ where
|
||||
let mut full_status: Vec<DiffEntry<'b, 's>> = Vec::with_capacity(outer_rust_list.len());
|
||||
|
||||
for (kw_e, kw_r) in outer_emacs_list.iter().zip(outer_rust_list) {
|
||||
match (kw_e.as_atom(), kw_r) {
|
||||
(Ok("nil"), (None, mandatory_value)) if mandatory_value.is_empty() => {
|
||||
// If its an empty keyword then it becomes nil in the elisp.
|
||||
continue;
|
||||
}
|
||||
(Ok("nil"), _) => {
|
||||
let this_status = DiffStatus::Bad;
|
||||
let message = Some(format!(
|
||||
"{} mismatch (emacs != rust) {:?} != {:?}",
|
||||
emacs_field, kw_e, kw_r
|
||||
));
|
||||
return Ok(ComparePropertiesResult::SelfChange(this_status, message));
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
let kw_e = kw_e.as_list()?;
|
||||
let child_status_length = kw_r.1.len() + kw_r.0.as_ref().map(|opt| opt.len()).unwrap_or(0);
|
||||
let mut child_status: Vec<DiffEntry<'b, 's>> = Vec::with_capacity(child_status_length);
|
||||
@ -554,18 +571,31 @@ where
|
||||
let mut kw_e = kw_e.iter();
|
||||
// First element is a list representing the mandatory value.
|
||||
if let Some(val_e) = kw_e.next() {
|
||||
let el = val_e.as_list()?;
|
||||
if el.len() != kw_r.1.len() {
|
||||
let this_status = DiffStatus::Bad;
|
||||
let message = Some(format!(
|
||||
"{} mismatch (emacs != rust) {:?} != {:?}",
|
||||
emacs_field, kw_e, kw_r
|
||||
));
|
||||
return Ok(ComparePropertiesResult::SelfChange(this_status, message));
|
||||
}
|
||||
for (e, r) in el.iter().zip(kw_r.1.iter()) {
|
||||
child_status.push(compare_ast_node(source, e, r.into())?);
|
||||
}
|
||||
match (val_e.as_atom(), kw_r) {
|
||||
(Ok("nil"), (_, mandatory_value)) if mandatory_value.is_empty() => {}
|
||||
(Ok("nil"), _) => {
|
||||
let this_status = DiffStatus::Bad;
|
||||
let message = Some(format!(
|
||||
"{} mismatch (emacs != rust) {:?} != {:?}",
|
||||
emacs_field, kw_e, kw_r
|
||||
));
|
||||
return Ok(ComparePropertiesResult::SelfChange(this_status, message));
|
||||
}
|
||||
_ => {
|
||||
let el = val_e.as_list()?;
|
||||
if el.len() != kw_r.1.len() {
|
||||
let this_status = DiffStatus::Bad;
|
||||
let message = Some(format!(
|
||||
"{} mismatch (emacs != rust) {:?} != {:?}",
|
||||
emacs_field, kw_e, kw_r
|
||||
));
|
||||
return Ok(ComparePropertiesResult::SelfChange(this_status, message));
|
||||
}
|
||||
for (e, r) in el.iter().zip(kw_r.1.iter()) {
|
||||
child_status.push(compare_ast_node(source, e, r.into())?);
|
||||
}
|
||||
}
|
||||
};
|
||||
} else {
|
||||
let this_status = DiffStatus::Bad;
|
||||
let message = Some(format!(
|
||||
@ -653,7 +683,7 @@ pub(crate) fn compare_property_number_lines<
|
||||
(Some(number_lines), Some(rust_number_lines)) => {
|
||||
let token_list = number_lines.as_list()?;
|
||||
let number_type = token_list
|
||||
.get(0)
|
||||
.first()
|
||||
.map(Token::as_atom)
|
||||
.map_or(Ok(None), |r| r.map(Some))?
|
||||
.ok_or(":number-lines should have a type.")?;
|
||||
|
@ -16,10 +16,6 @@ use super::compare_field::compare_property_retain_labels;
|
||||
use super::compare_field::compare_property_set_of_quoted_string;
|
||||
use super::compare_field::compare_property_single_ast_node;
|
||||
use super::compare_field::compare_property_unquoted_atom;
|
||||
use super::elisp_fact::ElispFact;
|
||||
use super::elisp_fact::GetElispFact;
|
||||
use super::sexp::unquote;
|
||||
use super::sexp::Token;
|
||||
use super::util::affiliated_keywords_names;
|
||||
use super::util::assert_no_children;
|
||||
use super::util::compare_additional_properties;
|
||||
@ -57,7 +53,6 @@ use crate::types::FixedWidthArea;
|
||||
use crate::types::FootnoteDefinition;
|
||||
use crate::types::FootnoteReference;
|
||||
use crate::types::FootnoteReferenceType;
|
||||
use crate::types::GetStandardProperties;
|
||||
use crate::types::Heading;
|
||||
use crate::types::HorizontalRule;
|
||||
use crate::types::Hour;
|
||||
@ -110,6 +105,12 @@ use crate::types::Verbatim;
|
||||
use crate::types::VerseBlock;
|
||||
use crate::types::WarningDelayType;
|
||||
use crate::types::Year;
|
||||
use crate::util::elisp::unquote;
|
||||
use crate::util::elisp::Token;
|
||||
use crate::util::elisp_fact::ElispFact;
|
||||
use crate::util::elisp_fact::GetElispFact;
|
||||
use crate::util::terminal::foreground_color;
|
||||
use crate::util::terminal::reset_color;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum DiffEntry<'b, 's> {
|
||||
@ -128,7 +129,7 @@ pub struct DiffResult<'b, 's> {
|
||||
emacs_token: &'b Token<'s>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
#[derive(Debug)]
|
||||
pub(crate) enum DiffStatus {
|
||||
Good,
|
||||
Bad,
|
||||
@ -164,7 +165,7 @@ impl<'b, 's> DiffEntry<'b, 's> {
|
||||
|
||||
fn is_immediately_bad(&self) -> bool {
|
||||
match self {
|
||||
DiffEntry::DiffResult(diff) => diff.status == DiffStatus::Bad,
|
||||
DiffEntry::DiffResult(diff) => matches!(diff.status, DiffStatus::Bad),
|
||||
DiffEntry::DiffLayer(_) => false,
|
||||
}
|
||||
}
|
||||
@ -201,21 +202,21 @@ impl<'b, 's> DiffResult<'b, 's> {
|
||||
if self.has_bad_children() {
|
||||
format!(
|
||||
"{color}BADCHILD{reset}",
|
||||
color = DiffResult::foreground_color(255, 255, 0),
|
||||
reset = DiffResult::reset_color(),
|
||||
color = foreground_color(255, 255, 0),
|
||||
reset = reset_color(),
|
||||
)
|
||||
} else {
|
||||
format!(
|
||||
"{color}GOOD{reset}",
|
||||
color = DiffResult::foreground_color(0, 255, 0),
|
||||
reset = DiffResult::reset_color(),
|
||||
color = foreground_color(0, 255, 0),
|
||||
reset = reset_color(),
|
||||
)
|
||||
}
|
||||
}
|
||||
DiffStatus::Bad => format!(
|
||||
"{color}BAD{reset}",
|
||||
color = DiffResult::foreground_color(255, 0, 0),
|
||||
reset = DiffResult::reset_color(),
|
||||
color = foreground_color(255, 0, 0),
|
||||
reset = reset_color(),
|
||||
),
|
||||
}
|
||||
};
|
||||
@ -240,45 +241,6 @@ impl<'b, 's> DiffResult<'b, 's> {
|
||||
.iter()
|
||||
.any(|child| child.is_immediately_bad() || child.has_bad_children())
|
||||
}
|
||||
|
||||
pub(crate) fn foreground_color(red: u8, green: u8, blue: u8) -> String {
|
||||
if DiffResult::should_use_color() {
|
||||
format!(
|
||||
"\x1b[38;2;{red};{green};{blue}m",
|
||||
red = red,
|
||||
green = green,
|
||||
blue = blue
|
||||
)
|
||||
} else {
|
||||
String::new()
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub(crate) fn background_color(red: u8, green: u8, blue: u8) -> String {
|
||||
if DiffResult::should_use_color() {
|
||||
format!(
|
||||
"\x1b[48;2;{red};{green};{blue}m",
|
||||
red = red,
|
||||
green = green,
|
||||
blue = blue
|
||||
)
|
||||
} else {
|
||||
String::new()
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn reset_color() -> &'static str {
|
||||
if DiffResult::should_use_color() {
|
||||
"\x1b[0m"
|
||||
} else {
|
||||
""
|
||||
}
|
||||
}
|
||||
|
||||
fn should_use_color() -> bool {
|
||||
!std::env::var("NO_COLOR").is_ok_and(|val| !val.is_empty())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'b, 's> DiffLayer<'b, 's> {
|
||||
@ -296,14 +258,14 @@ impl<'b, 's> DiffLayer<'b, 's> {
|
||||
let status_text = if self.has_bad_children() {
|
||||
format!(
|
||||
"{color}BADCHILD{reset}",
|
||||
color = DiffResult::foreground_color(255, 255, 0),
|
||||
reset = DiffResult::reset_color(),
|
||||
color = foreground_color(255, 255, 0),
|
||||
reset = reset_color(),
|
||||
)
|
||||
} else {
|
||||
format!(
|
||||
"{color}GOOD{reset}",
|
||||
color = DiffResult::foreground_color(0, 255, 0),
|
||||
reset = DiffResult::reset_color(),
|
||||
color = foreground_color(0, 255, 0),
|
||||
reset = reset_color(),
|
||||
)
|
||||
};
|
||||
println!(
|
||||
@ -413,7 +375,7 @@ pub(crate) fn compare_ast_node<'b, 's>(
|
||||
name: rust.get_elisp_fact().get_elisp_name(),
|
||||
message: Some(e.to_string()),
|
||||
children: Vec::new(),
|
||||
rust_source: rust.get_standard_properties().get_source(),
|
||||
rust_source: rust.get_source(),
|
||||
emacs_token: emacs,
|
||||
}
|
||||
.into()
|
||||
@ -1576,7 +1538,7 @@ fn compare_example_block<'b, 's>(
|
||||
[],
|
||||
(
|
||||
EmacsField::Required(":value"),
|
||||
|r| Some(r.contents.as_str()),
|
||||
|r| Some(r.get_value()),
|
||||
compare_property_quoted_string
|
||||
),
|
||||
(
|
||||
@ -1654,7 +1616,7 @@ fn compare_export_block<'b, 's>(
|
||||
),
|
||||
(
|
||||
EmacsField::Required(":value"),
|
||||
|r| Some(r.contents.as_str()),
|
||||
|r| Some(r.get_value()),
|
||||
compare_property_quoted_string
|
||||
)
|
||||
) {
|
||||
@ -1702,7 +1664,7 @@ fn compare_src_block<'b, 's>(
|
||||
),
|
||||
(
|
||||
EmacsField::Required(":value"),
|
||||
|r| Some(r.contents.as_str()),
|
||||
|r| Some(r.get_value()),
|
||||
compare_property_quoted_string
|
||||
),
|
||||
(
|
||||
@ -2153,7 +2115,7 @@ fn compare_plain_text<'b, 's>(
|
||||
let text = emacs.as_text()?;
|
||||
let start_ind: usize = text
|
||||
.properties
|
||||
.get(0)
|
||||
.first()
|
||||
.expect("Should have start index.")
|
||||
.as_atom()?
|
||||
.parse()?;
|
||||
|
@ -2,10 +2,7 @@
|
||||
mod compare;
|
||||
mod compare_field;
|
||||
mod diff;
|
||||
mod elisp_fact;
|
||||
mod macros;
|
||||
mod parse;
|
||||
mod sexp;
|
||||
mod util;
|
||||
pub use compare::run_anonymous_compare;
|
||||
pub use compare::run_anonymous_compare_with_settings;
|
||||
|
@ -1,3 +1,4 @@
|
||||
use std::borrow::Cow;
|
||||
use std::str::FromStr;
|
||||
|
||||
use super::compare_field::compare_property_list_of_quoted_string;
|
||||
@ -7,15 +8,15 @@ use super::compare_field::compare_property_quoted_string;
|
||||
use super::compare_field::ComparePropertiesResult;
|
||||
use super::diff::DiffEntry;
|
||||
use super::diff::DiffStatus;
|
||||
use super::elisp_fact::GetElispFact;
|
||||
use super::sexp::Token;
|
||||
use crate::compare::diff::compare_ast_node;
|
||||
use crate::compare::sexp::unquote;
|
||||
use crate::types::AffiliatedKeywordValue;
|
||||
use crate::types::AstNode;
|
||||
use crate::types::GetAffiliatedKeywords;
|
||||
use crate::types::GetStandardProperties;
|
||||
use crate::types::StandardProperties;
|
||||
use crate::util::elisp::get_emacs_standard_properties;
|
||||
use crate::util::elisp::unquote;
|
||||
use crate::util::elisp::Token;
|
||||
use crate::util::elisp_fact::GetElispFact;
|
||||
|
||||
/// Check if the child string slice is a slice of the parent string slice.
|
||||
fn is_slice_of(parent: &str, child: &str) -> bool {
|
||||
@ -29,32 +30,29 @@ fn is_slice_of(parent: &str, child: &str) -> bool {
|
||||
/// Get the byte offset into source that the rust object exists at.
|
||||
///
|
||||
/// These offsets are zero-based unlike the elisp ones.
|
||||
fn get_rust_byte_offsets<'b, 's, S: StandardProperties<'s> + ?Sized>(
|
||||
original_document: &'s str,
|
||||
rust_ast_node: &'b S,
|
||||
) -> (usize, usize) {
|
||||
let rust_object_source = rust_ast_node.get_source();
|
||||
debug_assert!(is_slice_of(original_document, rust_object_source));
|
||||
let offset = rust_object_source.as_ptr() as usize - original_document.as_ptr() as usize;
|
||||
let end = offset + rust_object_source.len();
|
||||
fn get_rust_byte_offsets(original_document: &str, subset: &str) -> (usize, usize) {
|
||||
debug_assert!(is_slice_of(original_document, subset));
|
||||
let offset = subset.as_ptr() as usize - original_document.as_ptr() as usize;
|
||||
let end = offset + subset.len();
|
||||
(offset, end)
|
||||
}
|
||||
|
||||
pub(crate) fn compare_standard_properties<
|
||||
'b,
|
||||
's,
|
||||
S: GetStandardProperties<'s> + GetElispFact<'s> + ?Sized,
|
||||
S: StandardProperties<'s> + GetElispFact<'s> + ?Sized,
|
||||
>(
|
||||
original_document: &'s str,
|
||||
emacs: &'b Token<'s>,
|
||||
rust: &'b S,
|
||||
) -> Result<(), Box<dyn std::error::Error>> {
|
||||
assert_name(emacs, rust.get_elisp_fact().get_elisp_name())?;
|
||||
assert_bounds(original_document, emacs, rust.get_standard_properties())?;
|
||||
assert_bounds(original_document, emacs, rust)?;
|
||||
assert_post_blank(emacs, rust)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn assert_name<S: AsRef<str>>(
|
||||
fn assert_name<S: AsRef<str>>(
|
||||
emacs: &Token<'_>,
|
||||
name: S,
|
||||
) -> Result<(), Box<dyn std::error::Error>> {
|
||||
@ -77,101 +75,75 @@ pub(crate) fn assert_name<S: AsRef<str>>(
|
||||
/// Assert that the character ranges defined by upstream org-mode's :standard-properties match the slices in Organic's StandardProperties.
|
||||
///
|
||||
/// This does **not** handle plain text because plain text is a special case.
|
||||
pub(crate) fn assert_bounds<'b, 's, S: StandardProperties<'s> + ?Sized>(
|
||||
fn assert_bounds<'b, 's, S: StandardProperties<'s> + ?Sized>(
|
||||
original_document: &'s str,
|
||||
emacs: &'b Token<'s>,
|
||||
rust: &'b S,
|
||||
) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let standard_properties = get_emacs_standard_properties(emacs)?; // 1-based
|
||||
let (begin, end) = (
|
||||
standard_properties
|
||||
.begin
|
||||
.ok_or("Token should have a begin.")?,
|
||||
standard_properties.end.ok_or("Token should have an end.")?,
|
||||
);
|
||||
let (rust_begin, rust_end) = get_rust_byte_offsets(original_document, rust); // 0-based
|
||||
let rust_begin_char_offset = original_document[..rust_begin].chars().count() + 1; // 1-based
|
||||
let rust_end_char_offset =
|
||||
rust_begin_char_offset + original_document[rust_begin..rust_end].chars().count(); // 1-based
|
||||
if rust_begin_char_offset != begin || rust_end_char_offset != end {
|
||||
Err(format!("Rust bounds (in chars) ({rust_begin}, {rust_end}) do not match emacs bounds ({emacs_begin}, {emacs_end})", rust_begin = rust_begin_char_offset, rust_end = rust_end_char_offset, emacs_begin=begin, emacs_end=end))?;
|
||||
|
||||
// Check begin/end
|
||||
{
|
||||
let (begin, end) = (
|
||||
standard_properties
|
||||
.begin
|
||||
.ok_or("Token should have a begin.")?,
|
||||
standard_properties.end.ok_or("Token should have an end.")?,
|
||||
);
|
||||
let (rust_begin, rust_end) = get_rust_byte_offsets(original_document, rust.get_source()); // 0-based
|
||||
let rust_begin_char_offset = original_document[..rust_begin].chars().count() + 1; // 1-based
|
||||
let rust_end_char_offset =
|
||||
rust_begin_char_offset + original_document[rust_begin..rust_end].chars().count(); // 1-based
|
||||
if rust_begin_char_offset != begin || rust_end_char_offset != end {
|
||||
Err(format!("Rust bounds (in chars) ({rust_begin}, {rust_end}) do not match emacs bounds ({emacs_begin}, {emacs_end})", rust_begin = rust_begin_char_offset, rust_end = rust_end_char_offset, emacs_begin=begin, emacs_end=end))?;
|
||||
}
|
||||
}
|
||||
|
||||
// Check contents-begin/contents-end
|
||||
{
|
||||
if let Some(rust_contents) = rust.get_contents() {
|
||||
let (begin, end) = (
|
||||
standard_properties
|
||||
.contents_begin
|
||||
.ok_or("Token should have a contents-begin.")?,
|
||||
standard_properties
|
||||
.contents_end
|
||||
.ok_or("Token should have an contents-end.")?,
|
||||
);
|
||||
let (rust_begin, rust_end) = get_rust_byte_offsets(original_document, rust_contents); // 0-based
|
||||
let rust_begin_char_offset = original_document[..rust_begin].chars().count() + 1; // 1-based
|
||||
let rust_end_char_offset =
|
||||
rust_begin_char_offset + original_document[rust_begin..rust_end].chars().count(); // 1-based
|
||||
if rust_begin_char_offset != begin || rust_end_char_offset != end {
|
||||
Err(format!("Rust contents bounds (in chars) ({rust_begin}, {rust_end}) do not match emacs contents bounds ({emacs_begin}, {emacs_end})", rust_begin = rust_begin_char_offset, rust_end = rust_end_char_offset, emacs_begin=begin, emacs_end=end))?;
|
||||
}
|
||||
} else if standard_properties.contents_begin.is_some()
|
||||
|| standard_properties.contents_end.is_some()
|
||||
{
|
||||
Err(format!("Rust contents is None but emacs contents bounds are ({emacs_begin:?}, {emacs_end:?})", emacs_begin=standard_properties.contents_begin, emacs_end=standard_properties.contents_end))?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
struct EmacsStandardProperties {
|
||||
begin: Option<usize>,
|
||||
#[allow(dead_code)]
|
||||
post_affiliated: Option<usize>,
|
||||
#[allow(dead_code)]
|
||||
contents_begin: Option<usize>,
|
||||
#[allow(dead_code)]
|
||||
contents_end: Option<usize>,
|
||||
end: Option<usize>,
|
||||
#[allow(dead_code)]
|
||||
post_blank: Option<usize>,
|
||||
}
|
||||
/// Assert that the post blank matches between emacs and organic.
|
||||
///
|
||||
/// This does **not** handle plain text because plain text is a special case.
|
||||
fn assert_post_blank<'b, 's, S: StandardProperties<'s> + ?Sized>(
|
||||
emacs: &'b Token<'s>,
|
||||
rust: &'b S,
|
||||
) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let standard_properties = get_emacs_standard_properties(emacs)?; // 1-based
|
||||
let rust_post_blank = rust.get_post_blank();
|
||||
let emacs_post_blank = standard_properties
|
||||
.post_blank
|
||||
.ok_or("Token should have a post-blank.")?;
|
||||
if rust_post_blank as usize != emacs_post_blank {
|
||||
Err(format!("Rust post-blank {rust_post_blank} does not match emacs post-blank ({emacs_post_blank})", rust_post_blank = rust_post_blank, emacs_post_blank = emacs_post_blank))?;
|
||||
}
|
||||
|
||||
fn get_emacs_standard_properties(
|
||||
emacs: &Token<'_>,
|
||||
) -> Result<EmacsStandardProperties, Box<dyn std::error::Error>> {
|
||||
let children = emacs.as_list()?;
|
||||
let attributes_child = children.get(1).ok_or("Should have an attributes child.")?;
|
||||
let attributes_map = attributes_child.as_map()?;
|
||||
let standard_properties = attributes_map.get(":standard-properties");
|
||||
Ok(if standard_properties.is_some() {
|
||||
let mut std_props = standard_properties
|
||||
.expect("if statement proves its Some")
|
||||
.as_vector()?
|
||||
.iter();
|
||||
let begin = maybe_token_to_usize(std_props.next())?;
|
||||
let post_affiliated = maybe_token_to_usize(std_props.next())?;
|
||||
let contents_begin = maybe_token_to_usize(std_props.next())?;
|
||||
let contents_end = maybe_token_to_usize(std_props.next())?;
|
||||
let end = maybe_token_to_usize(std_props.next())?;
|
||||
let post_blank = maybe_token_to_usize(std_props.next())?;
|
||||
EmacsStandardProperties {
|
||||
begin,
|
||||
post_affiliated,
|
||||
contents_begin,
|
||||
contents_end,
|
||||
end,
|
||||
post_blank,
|
||||
}
|
||||
} else {
|
||||
let begin = maybe_token_to_usize(attributes_map.get(":begin").copied())?;
|
||||
let end = maybe_token_to_usize(attributes_map.get(":end").copied())?;
|
||||
let contents_begin = maybe_token_to_usize(attributes_map.get(":contents-begin").copied())?;
|
||||
let contents_end = maybe_token_to_usize(attributes_map.get(":contents-end").copied())?;
|
||||
let post_blank = maybe_token_to_usize(attributes_map.get(":post-blank").copied())?;
|
||||
let post_affiliated =
|
||||
maybe_token_to_usize(attributes_map.get(":post-affiliated").copied())?;
|
||||
EmacsStandardProperties {
|
||||
begin,
|
||||
post_affiliated,
|
||||
contents_begin,
|
||||
contents_end,
|
||||
end,
|
||||
post_blank,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn maybe_token_to_usize(
|
||||
token: Option<&Token<'_>>,
|
||||
) -> Result<Option<usize>, Box<dyn std::error::Error>> {
|
||||
Ok(token
|
||||
.map(|token| token.as_atom())
|
||||
.map_or(Ok(None), |r| r.map(Some))?
|
||||
.and_then(|val| {
|
||||
if val == "nil" {
|
||||
None
|
||||
} else {
|
||||
Some(val.parse::<usize>())
|
||||
}
|
||||
})
|
||||
.map_or(Ok(None), |r| r.map(Some))?)
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get a named property from the emacs token.
|
||||
@ -206,10 +178,10 @@ pub(crate) fn get_property_unquoted_atom<'s>(
|
||||
/// Get a named property containing an quoted string from the emacs token.
|
||||
///
|
||||
/// Returns None if key is not found.
|
||||
pub(crate) fn get_property_quoted_string(
|
||||
emacs: &Token<'_>,
|
||||
pub(crate) fn get_property_quoted_string<'s>(
|
||||
emacs: &Token<'s>,
|
||||
key: &str,
|
||||
) -> Result<Option<String>, Box<dyn std::error::Error>> {
|
||||
) -> Result<Option<Cow<'s, str>>, Box<dyn std::error::Error>> {
|
||||
get_property(emacs, key)?
|
||||
.map(Token::as_atom)
|
||||
.map_or(Ok(None), |r| r.map(Some))?
|
||||
@ -240,7 +212,7 @@ where
|
||||
pub(crate) fn compare_children<'b, 's, 'x, RC>(
|
||||
source: &'s str,
|
||||
emacs: &'b Token<'s>,
|
||||
rust_children: &'x Vec<RC>,
|
||||
rust_children: &'x [RC],
|
||||
child_status: &mut Vec<DiffEntry<'b, 's>>,
|
||||
this_status: &mut DiffStatus,
|
||||
message: &mut Option<String>,
|
||||
|
@ -1,15 +1,27 @@
|
||||
use super::global_settings::EntityDefinition;
|
||||
|
||||
pub(crate) const DEFAULT_ORG_ELEMENT_PARSED_KEYWORDS: [&str; 1] = ["CAPTION"];
|
||||
/// Keywords that contain the standard set of objects (excluding footnote references).
|
||||
///
|
||||
/// Corresponds to org-element-parsed-keywords elisp variable.
|
||||
pub(crate) const ORG_ELEMENT_PARSED_KEYWORDS: [&str; 1] = ["CAPTION"];
|
||||
|
||||
pub(crate) const DEFAULT_ORG_ELEMENT_DUAL_KEYWORDS: [&str; 2] = ["CAPTION", "RESULTS"];
|
||||
/// Keywords that can have a secondary value in square brackets.
|
||||
///
|
||||
/// Corresponds to org-element-dual-keywords elisp variable.
|
||||
pub(crate) const ORG_ELEMENT_DUAL_KEYWORDS: [&str; 2] = ["CAPTION", "RESULTS"];
|
||||
|
||||
pub(crate) const DEFAULT_ORG_ELEMENT_AFFILIATED_KEYWORDS: [&str; 13] = [
|
||||
/// Keywords that can be affiliated with an element.
|
||||
///
|
||||
/// Corresponds to org-element-affiliated-keywords elisp variable.
|
||||
pub(crate) const ORG_ELEMENT_AFFILIATED_KEYWORDS: [&str; 13] = [
|
||||
"CAPTION", "DATA", "HEADER", "HEADERS", "LABEL", "NAME", "PLOT", "RESNAME", "RESULT",
|
||||
"RESULTS", "SOURCE", "SRCNAME", "TBLNAME",
|
||||
];
|
||||
|
||||
pub(crate) const DEFAULT_ORG_ELEMENT_KEYWORD_TRANSLATION_ALIST: [(&str, &str); 8] = [
|
||||
/// Mapping of keyword names.
|
||||
///
|
||||
/// Corresponds to org-element-keyword-translation-alist elisp variable.
|
||||
pub(crate) const ORG_ELEMENT_KEYWORD_TRANSLATION_ALIST: [(&str, &str); 8] = [
|
||||
("DATA", "NAME"),
|
||||
("LABEL", "NAME"),
|
||||
("RESNAME", "NAME"),
|
||||
|
@ -9,11 +9,9 @@ use super::list::List;
|
||||
use super::DynContextMatcher;
|
||||
use super::RefContext;
|
||||
use crate::error::CustomError;
|
||||
use crate::error::MyError;
|
||||
use crate::error::Res;
|
||||
use crate::parser::OrgSource;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) enum ContextElement<'r, 's> {
|
||||
/// Stores a parser that indicates that children should exit upon matching an exit matcher.
|
||||
ExitMatcherNode(ExitMatcherNode<'r>),
|
||||
@ -35,15 +33,6 @@ pub(crate) struct ExitMatcherNode<'r> {
|
||||
pub(crate) class: ExitClass,
|
||||
}
|
||||
|
||||
impl<'r> std::fmt::Debug for ExitMatcherNode<'r> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let mut formatter = f.debug_struct("ExitMatcherNode");
|
||||
formatter.field("class", &self.class.to_string());
|
||||
formatter.finish()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct Context<'g, 'r, 's> {
|
||||
global_settings: &'g GlobalSettings<'g, 's>,
|
||||
tree: List<'r, &'r ContextElement<'r, 's>>,
|
||||
@ -108,7 +97,7 @@ impl<'g, 'r, 's> Context<'g, 'r, 's> {
|
||||
pub(crate) fn check_exit_matcher(
|
||||
&'r self,
|
||||
i: OrgSource<'s>,
|
||||
) -> IResult<OrgSource<'s>, OrgSource<'s>, CustomError<OrgSource<'s>>> {
|
||||
) -> IResult<OrgSource<'s>, OrgSource<'s>, CustomError> {
|
||||
let mut current_class_filter = ExitClass::Gamma;
|
||||
for current_node in self.iter_context() {
|
||||
let context_element = current_node.get_data();
|
||||
@ -123,7 +112,7 @@ impl<'g, 'r, 's> Context<'g, 'r, 's> {
|
||||
}
|
||||
}
|
||||
// TODO: Make this a specific error instead of just a generic MyError
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError("NoExit"))));
|
||||
return Err(nom::Err::Error(CustomError::Static("NoExit")));
|
||||
}
|
||||
|
||||
/// Indicates if elements should consume the whitespace after them.
|
||||
|
@ -1,13 +1,7 @@
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
#[derive(Copy, Clone)]
|
||||
pub(crate) enum ExitClass {
|
||||
Document = 1,
|
||||
Alpha = 2,
|
||||
Beta = 3,
|
||||
Gamma = 4,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for ExitClass {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
write!(f, "{:?}", self)
|
||||
}
|
||||
}
|
||||
|
@ -1,17 +1,16 @@
|
||||
use std::fmt::Debug;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[cfg(any(feature = "compare", feature = "foreign_document_test"))]
|
||||
pub trait FileAccessInterface: Sync + Debug {
|
||||
pub trait FileAccessInterface: Sync {
|
||||
fn read_file(&self, path: &str) -> Result<String, std::io::Error>;
|
||||
}
|
||||
|
||||
#[cfg(not(any(feature = "compare", feature = "foreign_document_test")))]
|
||||
pub trait FileAccessInterface: Debug {
|
||||
pub trait FileAccessInterface {
|
||||
fn read_file(&self, path: &str) -> Result<String, std::io::Error>;
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
#[derive(Clone)]
|
||||
pub struct LocalFileAccessInterface {
|
||||
pub working_directory: Option<PathBuf>,
|
||||
}
|
||||
|
@ -5,16 +5,12 @@ use super::constants::DEFAULT_ORG_ENTITIES;
|
||||
use super::constants::DEFAULT_ORG_LINK_PARAMETERS;
|
||||
use super::FileAccessInterface;
|
||||
use super::LocalFileAccessInterface;
|
||||
use crate::context::constants::DEFAULT_ORG_ELEMENT_AFFILIATED_KEYWORDS;
|
||||
use crate::context::constants::DEFAULT_ORG_ELEMENT_DUAL_KEYWORDS;
|
||||
use crate::context::constants::DEFAULT_ORG_ELEMENT_KEYWORD_TRANSLATION_ALIST;
|
||||
use crate::context::constants::DEFAULT_ORG_ELEMENT_PARSED_KEYWORDS;
|
||||
use crate::types::IndentationLevel;
|
||||
use crate::types::Object;
|
||||
|
||||
// TODO: Ultimately, I think we'll need most of this: https://orgmode.org/manual/In_002dbuffer-Settings.html
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
#[derive(Clone)]
|
||||
pub struct GlobalSettings<'g, 's> {
|
||||
pub radio_targets: Vec<&'g Vec<Object<'s>>>,
|
||||
pub file_access: &'g dyn FileAccessInterface,
|
||||
@ -58,26 +54,6 @@ pub struct GlobalSettings<'g, 's> {
|
||||
///
|
||||
/// Corresponds to org-entities elisp variable.
|
||||
pub entities: &'g [EntityDefinition<'s>],
|
||||
|
||||
/// Keywords that contain the standard set of objects (excluding footnote references).
|
||||
///
|
||||
/// Corresponds to org-element-parsed-keywords elisp variable.
|
||||
pub element_parsed_keywords: &'g [&'s str],
|
||||
|
||||
/// Keywords that can have a secondary value in square brackets.
|
||||
///
|
||||
/// Corresponds to org-element-dual-keywords elisp variable.
|
||||
pub element_dual_keywords: &'g [&'s str],
|
||||
|
||||
/// Keywords that can be affiliated with an element.
|
||||
///
|
||||
/// Corresponds to org-element-affiliated-keywords elisp variable.
|
||||
pub element_affiliated_keywords: &'g [&'s str],
|
||||
|
||||
/// Mapping of keyword names.
|
||||
///
|
||||
/// Corresponds to org-element-keyword-translation-alist elisp variable.
|
||||
pub element_keyword_translation_alist: &'g [(&'s str, &'s str)],
|
||||
}
|
||||
|
||||
pub const DEFAULT_TAB_WIDTH: IndentationLevel = 8;
|
||||
@ -95,9 +71,7 @@ pub struct EntityDefinition<'a> {
|
||||
|
||||
impl<'g, 's> GlobalSettings<'g, 's> {
|
||||
fn new() -> GlobalSettings<'g, 's> {
|
||||
debug_assert!(
|
||||
DEFAULT_ORG_ENTITIES.is_sorted_by(|a, b| b.name.len().partial_cmp(&a.name.len()))
|
||||
);
|
||||
debug_assert!(DEFAULT_ORG_ENTITIES.is_sorted_by(|a, b| a.name.len() >= b.name.len()));
|
||||
GlobalSettings {
|
||||
radio_targets: Vec::new(),
|
||||
file_access: &LocalFileAccessInterface {
|
||||
@ -112,10 +86,6 @@ impl<'g, 's> GlobalSettings<'g, 's> {
|
||||
link_parameters: &DEFAULT_ORG_LINK_PARAMETERS,
|
||||
link_templates: BTreeMap::new(),
|
||||
entities: &DEFAULT_ORG_ENTITIES,
|
||||
element_parsed_keywords: &DEFAULT_ORG_ELEMENT_PARSED_KEYWORDS,
|
||||
element_dual_keywords: &DEFAULT_ORG_ELEMENT_DUAL_KEYWORDS,
|
||||
element_affiliated_keywords: &DEFAULT_ORG_ELEMENT_AFFILIATED_KEYWORDS,
|
||||
element_keyword_translation_alist: &DEFAULT_ORG_ELEMENT_KEYWORD_TRANSLATION_ALIST,
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -126,7 +96,7 @@ impl<'g, 's> Default for GlobalSettings<'g, 's> {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Default)]
|
||||
#[derive(Clone, PartialEq, Default)]
|
||||
pub enum HeadlineLevelFilter {
|
||||
Odd,
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
use crate::error::Res;
|
||||
use crate::parser::OrgSource;
|
||||
|
||||
mod constants;
|
||||
pub(crate) mod constants;
|
||||
#[allow(clippy::module_inception)]
|
||||
mod context;
|
||||
mod exiting;
|
||||
|
@ -2,22 +2,18 @@ use nom::error::ErrorKind;
|
||||
use nom::error::ParseError;
|
||||
use nom::IResult;
|
||||
|
||||
pub(crate) type Res<T, U> = IResult<T, U, CustomError<T>>;
|
||||
pub(crate) type Res<T, U> = IResult<T, U, CustomError>;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum CustomError<I> {
|
||||
MyError(MyError<&'static str>),
|
||||
Nom(I, ErrorKind),
|
||||
IO(std::io::Error),
|
||||
BoxedError(Box<dyn std::error::Error>),
|
||||
pub enum CustomError {
|
||||
Static(#[allow(dead_code)] &'static str),
|
||||
IO(#[allow(dead_code)] std::io::Error),
|
||||
Parser(#[allow(dead_code)] ErrorKind),
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct MyError<I>(pub(crate) I);
|
||||
|
||||
impl<I> ParseError<I> for CustomError<I> {
|
||||
fn from_error_kind(input: I, kind: ErrorKind) -> Self {
|
||||
CustomError::Nom(input, kind)
|
||||
impl<I: std::fmt::Debug> ParseError<I> for CustomError {
|
||||
fn from_error_kind(_input: I, kind: ErrorKind) -> Self {
|
||||
CustomError::Parser(kind)
|
||||
}
|
||||
|
||||
fn append(_input: I, _kind: ErrorKind, /*mut*/ other: Self) -> Self {
|
||||
@ -26,20 +22,14 @@ impl<I> ParseError<I> for CustomError<I> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<I> From<std::io::Error> for CustomError<I> {
|
||||
impl From<std::io::Error> for CustomError {
|
||||
fn from(value: std::io::Error) -> Self {
|
||||
CustomError::IO(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl<I> From<&'static str> for CustomError<I> {
|
||||
impl From<&'static str> for CustomError {
|
||||
fn from(value: &'static str) -> Self {
|
||||
CustomError::MyError(MyError(value))
|
||||
}
|
||||
}
|
||||
|
||||
impl<I> From<Box<dyn std::error::Error>> for CustomError<I> {
|
||||
fn from(value: Box<dyn std::error::Error>) -> Self {
|
||||
CustomError::BoxedError(value)
|
||||
CustomError::Static(value)
|
||||
}
|
||||
}
|
||||
|
@ -1,5 +1,4 @@
|
||||
#[allow(clippy::module_inception)]
|
||||
mod error;
|
||||
pub(crate) use error::CustomError;
|
||||
pub(crate) use error::MyError;
|
||||
pub(crate) use error::Res;
|
||||
|
43
src/event_count/database.rs
Normal file
43
src/event_count/database.rs
Normal file
@ -0,0 +1,43 @@
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Mutex;
|
||||
|
||||
use super::EventType;
|
||||
use crate::parser::OrgSource;
|
||||
|
||||
#[derive(Debug, Eq, Hash, PartialEq)]
|
||||
struct EventKey {
|
||||
event_type: EventType,
|
||||
byte_offset: usize,
|
||||
}
|
||||
|
||||
pub(crate) type EventCount = usize;
|
||||
|
||||
static GLOBAL_DATA: Mutex<Option<HashMap<EventKey, EventCount>>> = Mutex::new(None);
|
||||
|
||||
pub(crate) fn record_event(event_type: EventType, input: OrgSource<'_>) {
|
||||
let mut db = GLOBAL_DATA.lock().unwrap();
|
||||
let db = db.get_or_insert_with(HashMap::new);
|
||||
let key = EventKey {
|
||||
event_type,
|
||||
byte_offset: input.get_byte_offset(),
|
||||
};
|
||||
*db.entry(key).or_insert(0) += 1;
|
||||
}
|
||||
|
||||
pub(crate) fn report(original_document: &str) {
|
||||
let mut db = GLOBAL_DATA.lock().unwrap();
|
||||
let db = db.get_or_insert_with(HashMap::new);
|
||||
let mut results: Vec<_> = db.iter().collect();
|
||||
results.sort_by_key(|(_k, v)| *v);
|
||||
// This would put the most common at the top, but that is a pain when there is already a lot of output from the parser.
|
||||
// results.sort_by(|(_ak, av), (_bk, bv)| bv.cmp(av));
|
||||
for (key, count) in results {
|
||||
println!(
|
||||
"{:?} {} character offset: {} byte offset: {}",
|
||||
key.event_type,
|
||||
count,
|
||||
original_document[..key.byte_offset].chars().count() + 1,
|
||||
key.byte_offset
|
||||
)
|
||||
}
|
||||
}
|
4
src/event_count/event_type.rs
Normal file
4
src/event_count/event_type.rs
Normal file
@ -0,0 +1,4 @@
|
||||
#[derive(Debug, Eq, Hash, PartialEq)]
|
||||
pub(crate) enum EventType {
|
||||
ElementStart,
|
||||
}
|
6
src/event_count/mod.rs
Normal file
6
src/event_count/mod.rs
Normal file
@ -0,0 +1,6 @@
|
||||
mod database;
|
||||
mod event_type;
|
||||
|
||||
pub(crate) use database::record_event;
|
||||
pub(crate) use database::report;
|
||||
pub(crate) use event_type::EventType;
|
@ -90,12 +90,11 @@ impl<'r, 's> Iterator for AllAstNodeIter<'r, 's> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'r, 's> IntoIterator for AstNode<'r, 's> {
|
||||
type Item = AstNode<'r, 's>;
|
||||
|
||||
type IntoIter = AllAstNodeIter<'r, 's>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
impl<'r, 's> AstNode<'r, 's> {
|
||||
/// Iterate all AST nodes.
|
||||
///
|
||||
/// This is different from the iter/into_iter functions which iterate a single level of the children. This iterates the entire tree including returning the root node itself.
|
||||
pub fn iter_all_ast_nodes(self) -> AllAstNodeIter<'r, 's> {
|
||||
AllAstNodeIter {
|
||||
root: Some(self),
|
||||
queue: VecDeque::new(),
|
||||
|
14
src/lib.rs
14
src/lib.rs
@ -1,8 +1,9 @@
|
||||
#![feature(exit_status_error)]
|
||||
#![feature(trait_alias)]
|
||||
#![feature(path_file_prefix)]
|
||||
#![feature(is_sorted)]
|
||||
#![feature(test)]
|
||||
#![feature(iter_intersperse)]
|
||||
#![feature(exact_size_is_empty)]
|
||||
// TODO: #![warn(missing_docs)]
|
||||
#![allow(clippy::bool_assert_comparison)] // Sometimes you want the long form because its easier to see at a glance.
|
||||
|
||||
@ -10,9 +11,20 @@ extern crate test;
|
||||
|
||||
#[cfg(feature = "compare")]
|
||||
pub mod compare;
|
||||
pub mod parse_cli;
|
||||
#[cfg(any(feature = "compare", feature = "wasm", feature = "wasm_test"))]
|
||||
mod util;
|
||||
#[cfg(any(feature = "wasm", feature = "wasm_test"))]
|
||||
mod wasm;
|
||||
#[cfg(any(feature = "wasm", feature = "wasm_test"))]
|
||||
pub mod wasm_cli;
|
||||
#[cfg(feature = "wasm_test")]
|
||||
pub mod wasm_test;
|
||||
|
||||
mod context;
|
||||
mod error;
|
||||
#[cfg(feature = "event_count")]
|
||||
mod event_count;
|
||||
mod iter;
|
||||
pub mod parser;
|
||||
pub mod types;
|
||||
|
57
src/main.rs
57
src/main.rs
@ -1,12 +1,4 @@
|
||||
#![feature(round_char_boundary)]
|
||||
#![feature(exact_size_is_empty)]
|
||||
use std::io::Read;
|
||||
use std::path::Path;
|
||||
|
||||
use ::organic::parser::parse;
|
||||
use organic::parser::parse_with_settings;
|
||||
use organic::settings::GlobalSettings;
|
||||
use organic::settings::LocalFileAccessInterface;
|
||||
use organic::parse_cli::main_body;
|
||||
|
||||
#[cfg(feature = "tracing")]
|
||||
use crate::init_tracing::init_telemetry;
|
||||
@ -30,50 +22,3 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
main_body_result
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
fn main_body() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let args = std::env::args().skip(1);
|
||||
if args.is_empty() {
|
||||
let org_contents = read_stdin_to_string()?;
|
||||
run_anonymous_parse(org_contents)
|
||||
} else {
|
||||
for arg in args {
|
||||
run_parse_on_file(arg)?
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn read_stdin_to_string() -> Result<String, Box<dyn std::error::Error>> {
|
||||
let mut stdin_contents = String::new();
|
||||
std::io::stdin()
|
||||
.lock()
|
||||
.read_to_string(&mut stdin_contents)?;
|
||||
Ok(stdin_contents)
|
||||
}
|
||||
|
||||
fn run_anonymous_parse<P: AsRef<str>>(org_contents: P) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let rust_parsed = parse(org_contents.as_ref())?;
|
||||
println!("{:#?}", rust_parsed);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn run_parse_on_file<P: AsRef<Path>>(org_path: P) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let org_path = org_path.as_ref();
|
||||
let parent_directory = org_path
|
||||
.parent()
|
||||
.ok_or("Should be contained inside a directory.")?;
|
||||
let org_contents = std::fs::read_to_string(org_path)?;
|
||||
let org_contents = org_contents.as_str();
|
||||
let file_access_interface = LocalFileAccessInterface {
|
||||
working_directory: Some(parent_directory.to_path_buf()),
|
||||
};
|
||||
let global_settings = GlobalSettings {
|
||||
file_access: &file_access_interface,
|
||||
..Default::default()
|
||||
};
|
||||
let rust_parsed = parse_with_settings(org_contents, &global_settings)?;
|
||||
println!("{:#?}", rust_parsed);
|
||||
Ok(())
|
||||
}
|
||||
|
59
src/parse_cli/mod.rs
Normal file
59
src/parse_cli/mod.rs
Normal file
@ -0,0 +1,59 @@
|
||||
use std::io::Read;
|
||||
use std::path::Path;
|
||||
|
||||
use crate::parser::parse;
|
||||
use crate::parser::parse_with_settings;
|
||||
use crate::settings::GlobalSettings;
|
||||
use crate::settings::LocalFileAccessInterface;
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
pub fn main_body() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let args = std::env::args().skip(1);
|
||||
if args.is_empty() {
|
||||
let org_contents = read_stdin_to_string()?;
|
||||
run_anonymous_parse(org_contents)
|
||||
} else {
|
||||
for arg in args {
|
||||
run_parse_on_file(arg)?
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn read_stdin_to_string() -> Result<String, Box<dyn std::error::Error>> {
|
||||
let mut stdin_contents = String::new();
|
||||
std::io::stdin()
|
||||
.lock()
|
||||
.read_to_string(&mut stdin_contents)?;
|
||||
Ok(stdin_contents)
|
||||
}
|
||||
|
||||
fn run_anonymous_parse<P: AsRef<str>>(org_contents: P) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let org_contents = org_contents.as_ref();
|
||||
let rust_parsed = parse(org_contents)?;
|
||||
println!("{:#?}", rust_parsed);
|
||||
#[cfg(feature = "event_count")]
|
||||
crate::event_count::report(org_contents);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn run_parse_on_file<P: AsRef<Path>>(org_path: P) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let org_path = org_path.as_ref();
|
||||
let parent_directory = org_path
|
||||
.parent()
|
||||
.ok_or("Should be contained inside a directory.")?;
|
||||
let org_contents = std::fs::read_to_string(org_path)?;
|
||||
let org_contents = org_contents.as_str();
|
||||
let file_access_interface = LocalFileAccessInterface {
|
||||
working_directory: Some(parent_directory.to_path_buf()),
|
||||
};
|
||||
let global_settings = GlobalSettings {
|
||||
file_access: &file_access_interface,
|
||||
..Default::default()
|
||||
};
|
||||
let rust_parsed = parse_with_settings(org_contents, &global_settings)?;
|
||||
println!("{:#?}", rust_parsed);
|
||||
#[cfg(feature = "event_count")]
|
||||
crate::event_count::report(org_contents);
|
||||
Ok(())
|
||||
}
|
@ -14,17 +14,46 @@ use nom::multi::many0;
|
||||
use nom::multi::many_till;
|
||||
use nom::sequence::tuple;
|
||||
|
||||
use super::keyword::affiliated_keyword;
|
||||
use super::object_parser::standard_set_object;
|
||||
use super::util::confine_context;
|
||||
use super::OrgSource;
|
||||
use crate::context::bind_context;
|
||||
use crate::context::constants::ORG_ELEMENT_DUAL_KEYWORDS;
|
||||
use crate::context::constants::ORG_ELEMENT_KEYWORD_TRANSLATION_ALIST;
|
||||
use crate::context::constants::ORG_ELEMENT_PARSED_KEYWORDS;
|
||||
use crate::context::Context;
|
||||
use crate::context::ContextElement;
|
||||
use crate::context::GlobalSettings;
|
||||
use crate::context::List;
|
||||
use crate::error::Res;
|
||||
use crate::types::AffiliatedKeywordValue;
|
||||
use crate::types::AffiliatedKeywords;
|
||||
use crate::types::Keyword;
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
pub(crate) fn affiliated_keywords<'s>(
|
||||
input: OrgSource<'s>,
|
||||
) -> Res<OrgSource<'s>, Vec<Keyword<'s>>> {
|
||||
let mut ret = Vec::new();
|
||||
let mut remaining = input;
|
||||
|
||||
loop {
|
||||
let result = affiliated_keyword(remaining);
|
||||
match result {
|
||||
Ok((remain, kw)) => {
|
||||
remaining = remain;
|
||||
ret.push(kw);
|
||||
}
|
||||
Err(_) => {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok((remaining, ret))
|
||||
}
|
||||
|
||||
pub(crate) fn parse_affiliated_keywords<'g, 's, AK>(
|
||||
global_settings: &'g GlobalSettings<'g, 's>,
|
||||
input: AK,
|
||||
@ -34,8 +63,8 @@ where
|
||||
{
|
||||
let mut ret = BTreeMap::new();
|
||||
for kw in input {
|
||||
let translated_name = translate_name(global_settings, kw.key);
|
||||
let keyword_type = identify_keyword_type(global_settings, translated_name.as_str());
|
||||
let translated_name = translate_name(kw.key);
|
||||
let keyword_type = identify_keyword_type(translated_name.as_str());
|
||||
match keyword_type {
|
||||
AffiliatedKeywordType::SingleString => {
|
||||
ret.insert(
|
||||
@ -120,12 +149,12 @@ where
|
||||
AffiliatedKeywords { keywords: ret }
|
||||
}
|
||||
|
||||
fn translate_name<'g, 's>(global_settings: &'g GlobalSettings<'g, 's>, name: &'s str) -> String {
|
||||
fn translate_name(name: &str) -> String {
|
||||
let name_until_optval = name
|
||||
.split_once('[')
|
||||
.map(|(before, _after)| before)
|
||||
.unwrap_or(name);
|
||||
for (src, dst) in global_settings.element_keyword_translation_alist {
|
||||
for (src, dst) in ORG_ELEMENT_KEYWORD_TRANSLATION_ALIST {
|
||||
if name_until_optval.eq_ignore_ascii_case(src) {
|
||||
return dst.to_lowercase();
|
||||
}
|
||||
@ -140,20 +169,15 @@ enum AffiliatedKeywordType {
|
||||
ObjectTree,
|
||||
}
|
||||
|
||||
fn identify_keyword_type<'g, 's>(
|
||||
global_settings: &'g GlobalSettings<'g, 's>,
|
||||
name: &'s str,
|
||||
) -> AffiliatedKeywordType {
|
||||
fn identify_keyword_type(name: &str) -> AffiliatedKeywordType {
|
||||
let is_multiple = ["CAPTION", "HEADER"]
|
||||
.into_iter()
|
||||
.any(|candidate| name.eq_ignore_ascii_case(candidate))
|
||||
|| name.to_lowercase().starts_with("attr_");
|
||||
let is_parsed = global_settings
|
||||
.element_parsed_keywords
|
||||
let is_parsed = ORG_ELEMENT_PARSED_KEYWORDS
|
||||
.iter()
|
||||
.any(|candidate| name.eq_ignore_ascii_case(candidate));
|
||||
let can_have_optval = global_settings
|
||||
.element_dual_keywords
|
||||
let can_have_optval = ORG_ELEMENT_DUAL_KEYWORDS
|
||||
.iter()
|
||||
.any(|candidate| name.eq_ignore_ascii_case(candidate));
|
||||
match (is_multiple, is_parsed, can_have_optval) {
|
||||
|
@ -47,7 +47,7 @@ pub(crate) fn angle_link<'b, 'g, 'r, 's>(
|
||||
parser_with_context!(parse_angle_link)(context),
|
||||
))(remaining)?;
|
||||
let (remaining, _) = tag(">")(remaining)?;
|
||||
let (remaining, _trailing_whitespace) =
|
||||
let (remaining, post_blank) =
|
||||
maybe_consume_object_trailing_whitespace_if_not_exiting(context, remaining)?;
|
||||
let source = get_consumed(input, remaining);
|
||||
Ok((
|
||||
@ -59,6 +59,7 @@ pub(crate) fn angle_link<'b, 'g, 'r, 's>(
|
||||
raw_link: raw_link.into(),
|
||||
search_option: parsed_link.search_option,
|
||||
application: parsed_link.application,
|
||||
post_blank: post_blank.map(Into::<&str>::into),
|
||||
},
|
||||
))
|
||||
}
|
||||
|
@ -4,7 +4,6 @@ use nom::bytes::complete::tag_no_case;
|
||||
use nom::character::complete::anychar;
|
||||
use nom::character::complete::one_of;
|
||||
use nom::character::complete::space0;
|
||||
use nom::combinator::consumed;
|
||||
use nom::combinator::opt;
|
||||
use nom::combinator::peek;
|
||||
use nom::combinator::recognize;
|
||||
@ -21,7 +20,6 @@ use super::OrgSource;
|
||||
use crate::context::Matcher;
|
||||
use crate::context::RefContext;
|
||||
use crate::error::CustomError;
|
||||
use crate::error::MyError;
|
||||
use crate::error::Res;
|
||||
use crate::parser::util::get_consumed;
|
||||
use crate::parser::util::org_line_ending;
|
||||
@ -44,32 +42,10 @@ where
|
||||
start_of_line(remaining)?;
|
||||
let (remaining, _) = tuple((space0, tag("#+"), tag_no_case("call"), tag(":")))(remaining)?;
|
||||
|
||||
if let Ok((remaining, (_, line_break))) = tuple((space0, org_line_ending))(remaining) {
|
||||
let (remaining, _trailing_ws) =
|
||||
maybe_consume_trailing_whitespace_if_not_exiting(context, remaining)?;
|
||||
let source = get_consumed(input, remaining);
|
||||
return Ok((
|
||||
remaining,
|
||||
BabelCall {
|
||||
source: Into::<&str>::into(source),
|
||||
affiliated_keywords: parse_affiliated_keywords(
|
||||
context.get_global_settings(),
|
||||
affiliated_keywords,
|
||||
),
|
||||
value: Into::<&str>::into(line_break.take(0)),
|
||||
call: None,
|
||||
inside_header: None,
|
||||
arguments: None,
|
||||
end_header: None,
|
||||
},
|
||||
));
|
||||
}
|
||||
|
||||
let (remaining, _ws) = space0(remaining)?;
|
||||
let (remaining, (value, babel_call_value)) = consumed(babel_call_value)(remaining)?;
|
||||
let (remaining, _ws) = tuple((space0, org_line_ending))(remaining)?;
|
||||
let (remaining, babel_call_value) = babel_call_value(remaining)?;
|
||||
|
||||
let (remaining, _trailing_ws) =
|
||||
let (remaining, post_blank) =
|
||||
maybe_consume_trailing_whitespace_if_not_exiting(context, remaining)?;
|
||||
let source = get_consumed(input, remaining);
|
||||
|
||||
@ -81,17 +57,22 @@ where
|
||||
context.get_global_settings(),
|
||||
affiliated_keywords,
|
||||
),
|
||||
value: Into::<&str>::into(value).trim_end(),
|
||||
value: Into::<&str>::into(babel_call_value.value),
|
||||
call: babel_call_value.call.map(Into::<&str>::into),
|
||||
inside_header: babel_call_value.inside_header.map(Into::<&str>::into),
|
||||
arguments: babel_call_value.arguments.map(Into::<&str>::into),
|
||||
end_header: babel_call_value.end_header.map(Into::<&str>::into),
|
||||
post_blank: post_blank.map(Into::<&str>::into),
|
||||
},
|
||||
))
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct BabelCallValue<'s> {
|
||||
/// The entire string to the right of "#+call: " without the trailing line break.
|
||||
value: OrgSource<'s>,
|
||||
|
||||
/// The function name which may contain a line break if there are no headers/arguments.
|
||||
call: Option<OrgSource<'s>>,
|
||||
inside_header: Option<OrgSource<'s>>,
|
||||
arguments: Option<OrgSource<'s>>,
|
||||
@ -100,13 +81,45 @@ struct BabelCallValue<'s> {
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
fn babel_call_value<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, BabelCallValue<'s>> {
|
||||
let (remaining, call) = opt(babel_call_call)(input)?;
|
||||
let (remaining, inside_header) = opt(inside_header)(remaining)?;
|
||||
let (remaining, arguments) = opt(arguments)(remaining)?;
|
||||
let (remaining, end_header) = opt(end_header)(remaining)?;
|
||||
alt((
|
||||
babel_call_value_without_headers,
|
||||
babel_call_value_with_headers,
|
||||
))(input)
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
fn babel_call_value_without_headers<'s>(
|
||||
input: OrgSource<'s>,
|
||||
) -> Res<OrgSource<'s>, BabelCallValue<'s>> {
|
||||
let (remaining, value) = babel_call_call_with_headers(input)?;
|
||||
let (remaining, _ws) = tuple((space0, org_line_ending))(remaining)?;
|
||||
let call = get_consumed(input, remaining);
|
||||
Ok((
|
||||
remaining,
|
||||
BabelCallValue {
|
||||
value,
|
||||
call: Some(call),
|
||||
inside_header: None,
|
||||
arguments: None,
|
||||
end_header: None,
|
||||
},
|
||||
))
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
fn babel_call_value_with_headers<'s>(
|
||||
input: OrgSource<'s>,
|
||||
) -> Res<OrgSource<'s>, BabelCallValue<'s>> {
|
||||
let (remaining, call) = opt(babel_call_call_with_headers)(input)?;
|
||||
let (remaining, inside_header) = opt(inside_header)(remaining)?;
|
||||
let (remaining, arguments) = opt(arguments)(remaining)?;
|
||||
let (remaining, end_header) = opt(end_header)(remaining)?;
|
||||
let value = get_consumed(input, remaining);
|
||||
let (remaining, _ws) = tuple((space0, org_line_ending))(remaining)?;
|
||||
Ok((
|
||||
remaining,
|
||||
BabelCallValue {
|
||||
value,
|
||||
call,
|
||||
inside_header,
|
||||
arguments: arguments.flatten(),
|
||||
@ -116,14 +129,15 @@ fn babel_call_value<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, BabelCallVal
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
fn babel_call_call<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, OrgSource<'s>> {
|
||||
fn babel_call_call_with_headers<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, OrgSource<'s>> {
|
||||
// When babel call contains no arguments or headers (for example: "#+call: lorem ipsum\n") then the trailing line break is part of the call. Otherwise, it is not.
|
||||
verify(
|
||||
recognize(many_till(
|
||||
anychar,
|
||||
alt((
|
||||
peek(recognize(one_of("[("))),
|
||||
peek(alt((
|
||||
recognize(one_of("[(")),
|
||||
recognize(tuple((space0, org_line_ending))),
|
||||
)),
|
||||
))),
|
||||
)),
|
||||
|s| s.len() > 0,
|
||||
)(input)
|
||||
@ -217,9 +231,7 @@ fn impl_balanced_bracket<
|
||||
}
|
||||
|
||||
if fail_parser(remaining).is_ok() {
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"Fail parser matched.",
|
||||
))));
|
||||
return Err(nom::Err::Error(CustomError::Static("Fail parser matched.")));
|
||||
}
|
||||
|
||||
let (remain, _) = anychar(remaining)?;
|
||||
@ -228,26 +240,10 @@ fn impl_balanced_bracket<
|
||||
let contents_end = remaining;
|
||||
|
||||
let (remaining, _) = end_parser(remaining)?;
|
||||
let contents = if contents_start != contents_end {
|
||||
let contents = if Into::<&str>::into(contents_start) != Into::<&str>::into(contents_end) {
|
||||
Some(contents_start.get_until(contents_end))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
Ok((remaining, contents))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use nom::combinator::opt;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn simple_call() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let input = OrgSource::new("()");
|
||||
let (remaining, call) = opt(babel_call_call)(input)?;
|
||||
assert_eq!(Into::<&str>::into(remaining), "()");
|
||||
assert_eq!(call, None);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
167
src/parser/bullshitium.rs
Normal file
167
src/parser/bullshitium.rs
Normal file
@ -0,0 +1,167 @@
|
||||
use nom::branch::alt;
|
||||
use nom::bytes::complete::tag_no_case;
|
||||
use nom::character::complete::anychar;
|
||||
use nom::character::complete::space0;
|
||||
use nom::multi::many_till;
|
||||
use nom::sequence::tuple;
|
||||
|
||||
use super::paragraph::paragraph;
|
||||
use super::util::maybe_consume_trailing_whitespace_if_not_exiting;
|
||||
use super::util::org_line_ending;
|
||||
use super::util::start_of_line;
|
||||
use super::OrgSource;
|
||||
use crate::context::bind_context;
|
||||
use crate::context::RefContext;
|
||||
use crate::error::CustomError;
|
||||
use crate::error::Res;
|
||||
use crate::parser::macros::element;
|
||||
use crate::types::Object;
|
||||
use crate::types::Paragraph;
|
||||
|
||||
#[cfg_attr(
|
||||
feature = "tracing",
|
||||
tracing::instrument(ret, level = "debug", skip(context))
|
||||
)]
|
||||
pub(crate) fn bullshitium<'b, 'g, 'r, 's>(
|
||||
context: RefContext<'b, 'g, 'r, 's>,
|
||||
input: OrgSource<'s>,
|
||||
) -> Res<OrgSource<'s>, Paragraph<'s>> {
|
||||
alt((
|
||||
bind_context!(broken_end, context),
|
||||
bind_context!(broken_dynamic_block, context),
|
||||
))(input)
|
||||
}
|
||||
|
||||
#[cfg_attr(
|
||||
feature = "tracing",
|
||||
tracing::instrument(ret, level = "debug", skip(context))
|
||||
)]
|
||||
pub(crate) fn detect_bullshitium<'b, 'g, 'r, 's>(
|
||||
context: RefContext<'b, 'g, 'r, 's>,
|
||||
input: OrgSource<'s>,
|
||||
) -> Res<OrgSource<'s>, ()> {
|
||||
element!(detect_broken_end, context, input);
|
||||
element!(detect_broken_dynamic_block, context, input);
|
||||
Err(nom::Err::Error(CustomError::Static("No bullshitium.")))
|
||||
}
|
||||
|
||||
#[cfg_attr(
|
||||
feature = "tracing",
|
||||
tracing::instrument(ret, level = "debug", skip(context))
|
||||
)]
|
||||
pub(crate) fn broken_end<'b, 'g, 'r, 's>(
|
||||
context: RefContext<'b, 'g, 'r, 's>,
|
||||
input: OrgSource<'s>,
|
||||
) -> Res<OrgSource<'s>, Paragraph<'s>> {
|
||||
start_of_line(input)?;
|
||||
let (remaining, _) = space0(input)?;
|
||||
let (remaining, _) = tag_no_case(":end:")(remaining)?;
|
||||
let (lead_in_remaining, _) = tuple((space0, org_line_ending))(remaining)?;
|
||||
if let Ok((remaining, mut paragraph)) =
|
||||
paragraph(std::iter::empty(), lead_in_remaining, context, input)
|
||||
{
|
||||
match paragraph.children.first_mut() {
|
||||
Some(Object::PlainText(plain_text)) => {
|
||||
plain_text.source = input.get_until_end_of_str(plain_text.source).into();
|
||||
paragraph.contents = Some(input.get_until_end_of_str(plain_text.source).into());
|
||||
}
|
||||
Some(obj) => {
|
||||
panic!("Unhandled first object type inside bullshitium {:?}", obj);
|
||||
}
|
||||
None => {
|
||||
unreachable!("Paragraph must have children.");
|
||||
}
|
||||
};
|
||||
Ok((remaining, paragraph))
|
||||
} else {
|
||||
let (remaining, post_blank) =
|
||||
maybe_consume_trailing_whitespace_if_not_exiting(context, lead_in_remaining)?;
|
||||
|
||||
let body = Into::<&str>::into(input.get_until(lead_in_remaining));
|
||||
|
||||
Ok((
|
||||
remaining,
|
||||
Paragraph::of_text(
|
||||
input.get_until(remaining).into(),
|
||||
body,
|
||||
if !body.is_empty() { Some(body) } else { None },
|
||||
post_blank.map(Into::<&str>::into),
|
||||
),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(
|
||||
feature = "tracing",
|
||||
tracing::instrument(ret, level = "debug", skip(_context))
|
||||
)]
|
||||
pub(crate) fn detect_broken_end<'b, 'g, 'r, 's>(
|
||||
_context: RefContext<'b, 'g, 'r, 's>,
|
||||
input: OrgSource<'s>,
|
||||
) -> Res<OrgSource<'s>, ()> {
|
||||
start_of_line(input)?;
|
||||
let (remaining, _) = space0(input)?;
|
||||
let (remaining, _) = tag_no_case(":end:")(remaining)?;
|
||||
let (_remaining, _) = tuple((space0, org_line_ending))(remaining)?;
|
||||
Ok((input, ()))
|
||||
}
|
||||
|
||||
#[cfg_attr(
|
||||
feature = "tracing",
|
||||
tracing::instrument(ret, level = "debug", skip(context))
|
||||
)]
|
||||
pub(crate) fn broken_dynamic_block<'b, 'g, 'r, 's>(
|
||||
context: RefContext<'b, 'g, 'r, 's>,
|
||||
input: OrgSource<'s>,
|
||||
) -> Res<OrgSource<'s>, Paragraph<'s>> {
|
||||
start_of_line(input)?;
|
||||
let (remaining, _) = space0(input)?;
|
||||
let (remaining, _) = tag_no_case("#+BEGIN:")(remaining)?;
|
||||
let (lead_in_remaining, _) = many_till(anychar, org_line_ending)(remaining)?;
|
||||
if let Ok((remaining, mut paragraph)) =
|
||||
paragraph(std::iter::empty(), lead_in_remaining, context, input)
|
||||
{
|
||||
match paragraph.children.first_mut() {
|
||||
Some(Object::PlainText(plain_text)) => {
|
||||
plain_text.source = input.get_until_end_of_str(plain_text.source).into();
|
||||
paragraph.contents = Some(input.get_until_end_of_str(plain_text.source).into());
|
||||
}
|
||||
Some(obj) => {
|
||||
panic!("Unhandled first object type inside bullshitium {:?}", obj);
|
||||
}
|
||||
None => {
|
||||
unreachable!("Paragraph must have children.");
|
||||
}
|
||||
};
|
||||
Ok((remaining, paragraph))
|
||||
} else {
|
||||
let (remaining, post_blank) =
|
||||
maybe_consume_trailing_whitespace_if_not_exiting(context, lead_in_remaining)?;
|
||||
|
||||
let body = Into::<&str>::into(input.get_until(lead_in_remaining));
|
||||
|
||||
Ok((
|
||||
remaining,
|
||||
Paragraph::of_text(
|
||||
input.get_until(remaining).into(),
|
||||
body,
|
||||
if !body.is_empty() { Some(body) } else { None },
|
||||
post_blank.map(Into::<&str>::into),
|
||||
),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(
|
||||
feature = "tracing",
|
||||
tracing::instrument(ret, level = "debug", skip(_context))
|
||||
)]
|
||||
pub(crate) fn detect_broken_dynamic_block<'b, 'g, 'r, 's>(
|
||||
_context: RefContext<'b, 'g, 'r, 's>,
|
||||
input: OrgSource<'s>,
|
||||
) -> Res<OrgSource<'s>, ()> {
|
||||
start_of_line(input)?;
|
||||
let (remaining, _) = space0(input)?;
|
||||
let (_remaining, _) = tag_no_case("#+BEGIN:")(remaining)?;
|
||||
Ok((input, ()))
|
||||
}
|
@ -46,16 +46,22 @@ pub(crate) fn citation<'b, 'g, 'r, 's>(
|
||||
let (remaining, prefix) =
|
||||
must_balance_bracket(opt(parser_with_context!(global_prefix)(context)))(remaining)?;
|
||||
|
||||
let contents_begin = remaining;
|
||||
let (remaining, references) =
|
||||
separated_list1(tag(";"), parser_with_context!(citation_reference)(context))(remaining)?;
|
||||
let contents_end = {
|
||||
let (rem, _) = opt(tag(";"))(remaining)?;
|
||||
rem
|
||||
};
|
||||
let (remaining, suffix) = must_balance_bracket(opt(map(
|
||||
tuple((tag(";"), parser_with_context!(global_suffix)(context))),
|
||||
|(_, suffix)| suffix,
|
||||
)))(remaining)?;
|
||||
let (remaining, _) = tag("]")(remaining)?;
|
||||
let (remaining, _trailing_whitespace) =
|
||||
let (remaining, post_blank) =
|
||||
maybe_consume_object_trailing_whitespace_if_not_exiting(context, remaining)?;
|
||||
let source = get_consumed(input, remaining);
|
||||
let contents = contents_begin.get_until(contents_end);
|
||||
Ok((
|
||||
remaining,
|
||||
Citation {
|
||||
@ -64,6 +70,8 @@ pub(crate) fn citation<'b, 'g, 'r, 's>(
|
||||
prefix: prefix.unwrap_or(Vec::new()),
|
||||
suffix: suffix.unwrap_or(Vec::new()),
|
||||
children: references,
|
||||
contents: Into::<&str>::into(contents),
|
||||
post_blank: post_blank.map(Into::<&str>::into),
|
||||
},
|
||||
))
|
||||
}
|
||||
@ -137,7 +145,7 @@ fn _global_prefix_end<'b, 'g, 'r, 's>(
|
||||
unreachable!("Exceeded citation global prefix bracket depth.")
|
||||
}
|
||||
if current_depth == 0 {
|
||||
let close_bracket = tag::<&str, OrgSource<'_>, CustomError<OrgSource<'_>>>("]")(input);
|
||||
let close_bracket = tag::<_, _, CustomError>("]")(input);
|
||||
if close_bracket.is_ok() {
|
||||
return close_bracket;
|
||||
}
|
||||
@ -191,7 +199,7 @@ fn _global_suffix_end<'b, 'g, 'r, 's>(
|
||||
unreachable!("Exceeded citation global suffix bracket depth.")
|
||||
}
|
||||
if current_depth == 0 {
|
||||
let close_bracket = tag::<&str, OrgSource<'_>, CustomError<OrgSource<'_>>>("]")(input);
|
||||
let close_bracket = tag::<_, _, CustomError>("]")(input);
|
||||
if close_bracket.is_ok() {
|
||||
return close_bracket;
|
||||
}
|
||||
@ -210,12 +218,11 @@ mod tests {
|
||||
use crate::context::GlobalSettings;
|
||||
use crate::context::List;
|
||||
use crate::parser::element_parser::element;
|
||||
use crate::types::CitationReference;
|
||||
use crate::types::Element;
|
||||
use crate::types::GetStandardProperties;
|
||||
use crate::types::StandardProperties;
|
||||
|
||||
#[test]
|
||||
fn citation_simple() {
|
||||
fn citation_simple() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let input = OrgSource::new("[cite:@foo]");
|
||||
let global_settings = GlobalSettings::default();
|
||||
let initial_context = ContextElement::document_context();
|
||||
@ -227,28 +234,33 @@ mod tests {
|
||||
_ => panic!("Should be a paragraph!"),
|
||||
};
|
||||
assert_eq!(Into::<&str>::into(remaining), "");
|
||||
assert_eq!(
|
||||
first_paragraph.get_standard_properties().get_source(),
|
||||
"[cite:@foo]"
|
||||
);
|
||||
assert_eq!(first_paragraph.get_source(), "[cite:@foo]");
|
||||
assert_eq!(first_paragraph.children.len(), 1);
|
||||
assert_eq!(
|
||||
first_paragraph
|
||||
.children
|
||||
.get(0)
|
||||
.expect("Len already asserted to be 1"),
|
||||
&Object::Citation(Citation {
|
||||
source: "[cite:@foo]",
|
||||
style: None,
|
||||
prefix: vec![],
|
||||
suffix: vec![],
|
||||
children: vec![CitationReference {
|
||||
source: "@foo",
|
||||
key: "foo",
|
||||
prefix: vec![],
|
||||
suffix: vec![]
|
||||
}]
|
||||
})
|
||||
);
|
||||
|
||||
match first_paragraph
|
||||
.children
|
||||
.first()
|
||||
.expect("Len already asserted to be 1.")
|
||||
{
|
||||
Object::Citation(inner) => {
|
||||
assert_eq!(inner.get_source(), "[cite:@foo]");
|
||||
assert_eq!(inner.children.len(), 1);
|
||||
assert!(inner.prefix.is_empty());
|
||||
assert!(inner.suffix.is_empty());
|
||||
assert!(inner.style.is_none());
|
||||
let citation_reference = inner
|
||||
.children
|
||||
.first()
|
||||
.expect("Len already asserted to be 1.");
|
||||
assert_eq!(citation_reference.get_source(), "@foo");
|
||||
assert_eq!(citation_reference.key, "foo");
|
||||
assert!(citation_reference.prefix.is_empty());
|
||||
assert!(citation_reference.suffix.is_empty());
|
||||
}
|
||||
_ => {
|
||||
return Err("Child should be a citation.".into());
|
||||
}
|
||||
};
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
@ -20,7 +20,6 @@ use crate::context::ExitClass;
|
||||
use crate::context::ExitMatcherNode;
|
||||
use crate::context::RefContext;
|
||||
use crate::error::CustomError;
|
||||
use crate::error::MyError;
|
||||
use crate::error::Res;
|
||||
use crate::parser::object_parser::minimal_set_object;
|
||||
use crate::parser::util::exit_matcher_parser;
|
||||
@ -151,7 +150,7 @@ fn _key_prefix_end<'b, 'g, 'r, 's>(
|
||||
unreachable!("Exceeded citation key prefix bracket depth.")
|
||||
}
|
||||
if current_depth == 0 {
|
||||
let close_bracket = tag::<&str, OrgSource<'_>, CustomError<OrgSource<'_>>>("]")(input);
|
||||
let close_bracket = tag::<_, _, CustomError>("]")(input);
|
||||
if close_bracket.is_ok() {
|
||||
return close_bracket;
|
||||
}
|
||||
@ -181,7 +180,7 @@ fn _key_suffix_end<'b, 'g, 'r, 's>(
|
||||
unreachable!("Exceeded citation key suffix bracket depth.")
|
||||
}
|
||||
if current_depth == 0 {
|
||||
let close_bracket = tag::<&str, OrgSource<'_>, CustomError<OrgSource<'_>>>("]")(input);
|
||||
let close_bracket = tag::<_, _, CustomError>("]")(input);
|
||||
if close_bracket.is_ok() {
|
||||
return close_bracket;
|
||||
}
|
||||
@ -199,9 +198,7 @@ where
|
||||
let pre_bracket_depth = input.get_bracket_depth();
|
||||
let (remaining, output) = inner(input)?;
|
||||
if remaining.get_bracket_depth() - pre_bracket_depth != 0 {
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"UnbalancedBrackets",
|
||||
))));
|
||||
return Err(nom::Err::Error(CustomError::Static("UnbalancedBrackets")));
|
||||
}
|
||||
Ok((remaining, output))
|
||||
}
|
||||
|
@ -40,7 +40,7 @@ pub(crate) fn clock<'b, 'g, 'r, 's>(
|
||||
let (remaining, (timestamp, duration)) = clock_timestamp(context, remaining)?;
|
||||
let (remaining, _) = tuple((space0, org_line_ending))(remaining)?;
|
||||
|
||||
let (remaining, _trailing_ws) =
|
||||
let (remaining, post_blank) =
|
||||
maybe_consume_trailing_whitespace_if_not_exiting(context, remaining)?;
|
||||
let source = get_consumed(input, remaining);
|
||||
Ok((
|
||||
@ -54,6 +54,7 @@ pub(crate) fn clock<'b, 'g, 'r, 's>(
|
||||
} else {
|
||||
ClockStatus::Running
|
||||
},
|
||||
post_blank: post_blank.map(Into::<&str>::into),
|
||||
},
|
||||
))
|
||||
}
|
||||
@ -81,7 +82,7 @@ fn clock_timestamp<'b, 'g, 'r, 's>(
|
||||
|(timestamp, duration)| (timestamp, duration.map(Into::<&str>::into)),
|
||||
),
|
||||
map(
|
||||
parser_with_context!(inactive_timestamp)(context),
|
||||
parser_with_context!(inactive_timestamp(true))(context),
|
||||
|timestamp| (timestamp, None),
|
||||
),
|
||||
))(input)
|
||||
|
@ -19,7 +19,6 @@ use crate::context::parser_with_context;
|
||||
use crate::context::ContextElement;
|
||||
use crate::context::RefContext;
|
||||
use crate::error::CustomError;
|
||||
use crate::error::MyError;
|
||||
use crate::error::Res;
|
||||
use crate::parser::util::exit_matcher_parser;
|
||||
use crate::parser::util::immediate_in_section;
|
||||
@ -35,9 +34,9 @@ pub(crate) fn comment<'b, 'g, 'r, 's>(
|
||||
input: OrgSource<'s>,
|
||||
) -> Res<OrgSource<'s>, Comment<'s>> {
|
||||
if immediate_in_section(context, "comment") {
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
return Err(nom::Err::Error(CustomError::Static(
|
||||
"Cannot nest objects of the same element",
|
||||
))));
|
||||
)));
|
||||
}
|
||||
let parser_context = ContextElement::Context("comment");
|
||||
let parser_context = context.with_additional_node(&parser_context);
|
||||
@ -47,7 +46,7 @@ pub(crate) fn comment<'b, 'g, 'r, 's>(
|
||||
let (remaining, mut remaining_lines) =
|
||||
many0(preceded(not(exit_matcher), comment_line_matcher))(remaining)?;
|
||||
|
||||
let (remaining, _trailing_ws) =
|
||||
let (remaining, post_blank) =
|
||||
maybe_consume_trailing_whitespace_if_not_exiting(context, remaining)?;
|
||||
let source = get_consumed(input, remaining);
|
||||
let mut value = Vec::with_capacity(remaining_lines.len() + 1);
|
||||
@ -68,6 +67,7 @@ pub(crate) fn comment<'b, 'g, 'r, 's>(
|
||||
Comment {
|
||||
source: source.into(),
|
||||
value,
|
||||
post_blank: post_blank.map(Into::<&str>::into),
|
||||
},
|
||||
))
|
||||
}
|
||||
@ -106,7 +106,6 @@ mod tests {
|
||||
use super::*;
|
||||
use crate::context::bind_context;
|
||||
use crate::context::Context;
|
||||
use crate::context::ContextElement;
|
||||
use crate::context::GlobalSettings;
|
||||
use crate::context::List;
|
||||
|
||||
|
@ -31,7 +31,7 @@ where
|
||||
let (remaining, value) = recognize(tuple((tag("%%("), is_not("\r\n"))))(remaining)?;
|
||||
let (remaining, _eol) = org_line_ending(remaining)?;
|
||||
|
||||
let (remaining, _trailing_ws) =
|
||||
let (remaining, post_blank) =
|
||||
maybe_consume_trailing_whitespace_if_not_exiting(context, remaining)?;
|
||||
let source = get_consumed(input, remaining);
|
||||
Ok((
|
||||
@ -43,6 +43,7 @@ where
|
||||
affiliated_keywords,
|
||||
),
|
||||
value: Into::<&str>::into(value),
|
||||
post_blank: post_blank.map(Into::<&str>::into),
|
||||
},
|
||||
))
|
||||
}
|
||||
|
@ -3,6 +3,7 @@ use std::path::Path;
|
||||
use nom::combinator::all_consuming;
|
||||
use nom::combinator::opt;
|
||||
use nom::multi::many0;
|
||||
use nom::InputTake;
|
||||
|
||||
use super::headline::heading;
|
||||
use super::in_buffer_settings::apply_in_buffer_settings;
|
||||
@ -19,9 +20,7 @@ use crate::context::GlobalSettings;
|
||||
use crate::context::List;
|
||||
use crate::context::RefContext;
|
||||
use crate::error::CustomError;
|
||||
use crate::error::MyError;
|
||||
use crate::error::Res;
|
||||
use crate::parser::org_source::convert_error;
|
||||
use crate::parser::util::blank_line;
|
||||
use crate::types::AstNode;
|
||||
use crate::types::Document;
|
||||
@ -103,7 +102,7 @@ pub fn parse_file_with_settings<'g, 's, P: AsRef<Path>>(
|
||||
/// This will not prevent additional settings from being learned during parsing, for example when encountering a "#+TODO".
|
||||
#[allow(dead_code)]
|
||||
fn document<'s>(context: RefContext<'_, '_, '_, 's>, input: &'s str) -> Res<&'s str, Document<'s>> {
|
||||
let (remaining, doc) = document_org_source(context, input.into()).map_err(convert_error)?;
|
||||
let (remaining, doc) = document_org_source(context, input.into())?;
|
||||
Ok((Into::<&str>::into(remaining), doc))
|
||||
}
|
||||
|
||||
@ -127,27 +126,16 @@ fn document_org_source<'b, 'g, 'r, 's>(
|
||||
.get_global_settings()
|
||||
.file_access
|
||||
.read_file(setup_file)
|
||||
.map_err(|err| nom::Err::<CustomError<OrgSource<'_>>>::Failure(err.into()))
|
||||
.map_err(|err| nom::Err::<CustomError>::Failure(err.into()))
|
||||
})
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
for setup_file in setup_files.iter().map(String::as_str) {
|
||||
let (_, setup_file_settings) =
|
||||
scan_for_in_buffer_settings(setup_file.into()).map_err(|err| {
|
||||
eprintln!("{}", err);
|
||||
nom::Err::Error(CustomError::MyError(MyError(
|
||||
"TODO: make this take an owned string so I can dump err.to_string() into it.",
|
||||
)))
|
||||
})?;
|
||||
let (_, setup_file_settings) = scan_for_in_buffer_settings(setup_file.into())?;
|
||||
final_settings.extend(setup_file_settings);
|
||||
}
|
||||
final_settings.extend(document_settings);
|
||||
let new_settings = apply_in_buffer_settings(final_settings, context.get_global_settings())
|
||||
.map_err(|err| {
|
||||
eprintln!("{}", err);
|
||||
nom::Err::Error(CustomError::MyError(MyError(
|
||||
"TODO: make this take an owned string so I can dump err.to_string() into it.",
|
||||
)))
|
||||
})?;
|
||||
.map_err(nom::Err::Error)?;
|
||||
let new_context = context.with_global_settings(&new_settings);
|
||||
let context = &new_context;
|
||||
|
||||
@ -156,7 +144,7 @@ fn document_org_source<'b, 'g, 'r, 's>(
|
||||
{
|
||||
// If there are radio targets in this document then we need to parse the entire document again with the knowledge of the radio targets.
|
||||
let all_radio_targets: Vec<&Vec<Object<'_>>> = Into::<AstNode>::into(&document)
|
||||
.into_iter()
|
||||
.iter_all_ast_nodes()
|
||||
.filter_map(|ast_node| {
|
||||
if let AstNode::RadioTarget(ast_node) = ast_node {
|
||||
Some(ast_node)
|
||||
@ -172,15 +160,13 @@ fn document_org_source<'b, 'g, 'r, 's>(
|
||||
let parser_context = context.with_global_settings(&new_global_settings);
|
||||
let (remaining, mut document) = _document(&parser_context, input)
|
||||
.map(|(rem, out)| (Into::<&str>::into(rem), out))?;
|
||||
apply_post_parse_in_buffer_settings(&mut document)
|
||||
.map_err(|err| nom::Err::<CustomError<OrgSource<'_>>>::Failure(err.into()))?;
|
||||
apply_post_parse_in_buffer_settings(&mut document);
|
||||
return Ok((remaining.into(), document));
|
||||
}
|
||||
}
|
||||
|
||||
// Find final in-buffer settings that do not impact parsing
|
||||
apply_post_parse_in_buffer_settings(&mut document)
|
||||
.map_err(|err| nom::Err::<CustomError<OrgSource<'_>>>::Failure(err.into()))?;
|
||||
apply_post_parse_in_buffer_settings(&mut document);
|
||||
|
||||
Ok((remaining.into(), document))
|
||||
}
|
||||
@ -196,8 +182,10 @@ fn _document<'b, 'g, 'r, 's>(
|
||||
let zeroth_section_matcher = parser_with_context!(zeroth_section)(context);
|
||||
let heading_matcher = parser_with_context!(heading(0))(context);
|
||||
let (remaining, _blank_lines) = many0(blank_line)(input)?;
|
||||
let contents_begin = remaining;
|
||||
let (remaining, zeroth_section) = opt(zeroth_section_matcher)(remaining)?;
|
||||
let (remaining, children) = many0(heading_matcher)(remaining)?;
|
||||
let contents = get_consumed(contents_begin, remaining);
|
||||
let source = get_consumed(input, remaining);
|
||||
Ok((
|
||||
remaining,
|
||||
@ -207,6 +195,25 @@ fn _document<'b, 'g, 'r, 's>(
|
||||
path: None,
|
||||
zeroth_section,
|
||||
children,
|
||||
contents: if contents.len() > 0 {
|
||||
Into::<&str>::into(contents)
|
||||
} else {
|
||||
Into::<&str>::into(remaining.take(0))
|
||||
},
|
||||
},
|
||||
))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use test::Bencher;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[bench]
|
||||
fn bench_full_document(b: &mut Bencher) {
|
||||
let input = include_str!("../../org_mode_samples/element_container_priority/README.org");
|
||||
|
||||
b.iter(|| assert!(parse(input).is_ok()));
|
||||
}
|
||||
}
|
||||
|
@ -4,6 +4,7 @@ use nom::bytes::complete::tag_no_case;
|
||||
use nom::bytes::complete::take_while;
|
||||
use nom::character::complete::line_ending;
|
||||
use nom::character::complete::space0;
|
||||
use nom::combinator::consumed;
|
||||
use nom::combinator::eof;
|
||||
use nom::combinator::not;
|
||||
use nom::combinator::recognize;
|
||||
@ -12,17 +13,17 @@ use nom::sequence::tuple;
|
||||
|
||||
use super::affiliated_keyword::parse_affiliated_keywords;
|
||||
use super::org_source::OrgSource;
|
||||
use super::paragraph::empty_paragraph;
|
||||
use super::util::maybe_consume_trailing_whitespace_if_not_exiting;
|
||||
use crate::context::bind_context;
|
||||
use crate::context::parser_with_context;
|
||||
use crate::context::ContextElement;
|
||||
use crate::context::ExitClass;
|
||||
use crate::context::ExitMatcherNode;
|
||||
use crate::context::RefContext;
|
||||
use crate::error::CustomError;
|
||||
use crate::error::MyError;
|
||||
use crate::error::Res;
|
||||
use crate::parser::element_parser::element;
|
||||
use crate::parser::util::blank_line;
|
||||
use crate::parser::util::exit_matcher_parser;
|
||||
use crate::parser::util::get_consumed;
|
||||
use crate::parser::util::immediate_in_section;
|
||||
@ -31,8 +32,6 @@ use crate::parser::util::WORD_CONSTITUENT_CHARACTERS;
|
||||
use crate::types::Drawer;
|
||||
use crate::types::Element;
|
||||
use crate::types::Keyword;
|
||||
use crate::types::Paragraph;
|
||||
use crate::types::SetSource;
|
||||
|
||||
#[cfg_attr(
|
||||
feature = "tracing",
|
||||
@ -48,9 +47,9 @@ where
|
||||
AK: IntoIterator<Item = Keyword<'s>>,
|
||||
{
|
||||
if immediate_in_section(context, "drawer") {
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
return Err(nom::Err::Error(CustomError::Static(
|
||||
"Cannot nest objects of the same element",
|
||||
))));
|
||||
)));
|
||||
}
|
||||
start_of_line(remaining)?;
|
||||
let (remaining, _leading_whitespace) = space0(remaining)?;
|
||||
@ -72,30 +71,12 @@ where
|
||||
let parser_context = context.with_additional_node(&contexts[0]);
|
||||
let parser_context = parser_context.with_additional_node(&contexts[1]);
|
||||
let parser_context = parser_context.with_additional_node(&contexts[2]);
|
||||
let (remaining, (contents, children)) =
|
||||
consumed(parser_with_context!(children)(&parser_context))(remaining)?;
|
||||
|
||||
let element_matcher = parser_with_context!(element(true))(&parser_context);
|
||||
let exit_matcher = parser_with_context!(exit_matcher_parser)(&parser_context);
|
||||
let (remaining, children) = match tuple((
|
||||
not(exit_matcher),
|
||||
blank_line,
|
||||
many_till(blank_line, exit_matcher),
|
||||
))(remaining)
|
||||
{
|
||||
Ok((remain, (_not_immediate_exit, first_line, (_trailing_whitespace, _exit_contents)))) => {
|
||||
let mut element = Element::Paragraph(Paragraph::of_text(first_line.into()));
|
||||
let source = get_consumed(remaining, remain);
|
||||
element.set_source(source.into());
|
||||
(remain, vec![element])
|
||||
}
|
||||
Err(_) => {
|
||||
let (remaining, (children, _exit_contents)) =
|
||||
many_till(element_matcher, exit_matcher)(remaining)?;
|
||||
(remaining, children)
|
||||
}
|
||||
};
|
||||
let (remaining, _end) = drawer_end(&parser_context, remaining)?;
|
||||
|
||||
let (remaining, _trailing_ws) =
|
||||
let (remaining, post_blank) =
|
||||
maybe_consume_trailing_whitespace_if_not_exiting(context, remaining)?;
|
||||
let source = get_consumed(input, remaining);
|
||||
|
||||
@ -109,10 +90,34 @@ where
|
||||
),
|
||||
drawer_name: drawer_name.into(),
|
||||
children,
|
||||
contents: Some(contents.into()),
|
||||
post_blank: post_blank.map(Into::<&str>::into),
|
||||
},
|
||||
))
|
||||
}
|
||||
|
||||
#[cfg_attr(
|
||||
feature = "tracing",
|
||||
tracing::instrument(ret, level = "debug", skip(context))
|
||||
)]
|
||||
fn children<'b, 'g, 'r, 's>(
|
||||
context: RefContext<'b, 'g, 'r, 's>,
|
||||
input: OrgSource<'s>,
|
||||
) -> Res<OrgSource<'s>, Vec<Element<'s>>> {
|
||||
let element_matcher = parser_with_context!(element(true))(context);
|
||||
let exit_matcher = parser_with_context!(exit_matcher_parser)(context);
|
||||
|
||||
if let Ok((remaining, (_not_exit, empty_para))) =
|
||||
tuple((not(exit_matcher), bind_context!(empty_paragraph, context)))(input)
|
||||
{
|
||||
return Ok((remaining, vec![Element::Paragraph(empty_para)]));
|
||||
}
|
||||
|
||||
let (remaining, (children, _exit_contents)) = many_till(element_matcher, exit_matcher)(input)?;
|
||||
|
||||
Ok((remaining, children))
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
fn name<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, OrgSource<'s>> {
|
||||
take_while(|c| WORD_CONSTITUENT_CHARACTERS.contains(c) || "-_".contains(c))(input)
|
||||
|
@ -6,30 +6,28 @@ use nom::character::complete::anychar;
|
||||
use nom::character::complete::line_ending;
|
||||
use nom::character::complete::space0;
|
||||
use nom::character::complete::space1;
|
||||
use nom::combinator::consumed;
|
||||
use nom::combinator::eof;
|
||||
use nom::combinator::not;
|
||||
use nom::combinator::opt;
|
||||
use nom::combinator::peek;
|
||||
use nom::combinator::recognize;
|
||||
use nom::multi::many0;
|
||||
use nom::multi::many_till;
|
||||
use nom::sequence::preceded;
|
||||
use nom::sequence::tuple;
|
||||
|
||||
use super::affiliated_keyword::parse_affiliated_keywords;
|
||||
use super::greater_block::leading_blank_lines_end;
|
||||
use super::org_source::OrgSource;
|
||||
use super::paragraph::empty_paragraph;
|
||||
use super::util::maybe_consume_trailing_whitespace_if_not_exiting;
|
||||
use crate::context::bind_context;
|
||||
use crate::context::parser_with_context;
|
||||
use crate::context::ContextElement;
|
||||
use crate::context::ExitClass;
|
||||
use crate::context::ExitMatcherNode;
|
||||
use crate::context::RefContext;
|
||||
use crate::error::CustomError;
|
||||
use crate::error::MyError;
|
||||
use crate::error::Res;
|
||||
use crate::parser::element_parser::element;
|
||||
use crate::parser::util::blank_line;
|
||||
use crate::parser::util::exit_matcher_parser;
|
||||
use crate::parser::util::get_consumed;
|
||||
use crate::parser::util::immediate_in_section;
|
||||
@ -37,8 +35,6 @@ use crate::parser::util::start_of_line;
|
||||
use crate::types::DynamicBlock;
|
||||
use crate::types::Element;
|
||||
use crate::types::Keyword;
|
||||
use crate::types::Paragraph;
|
||||
use crate::types::SetSource;
|
||||
|
||||
#[cfg_attr(
|
||||
feature = "tracing",
|
||||
@ -54,9 +50,9 @@ where
|
||||
AK: IntoIterator<Item = Keyword<'s>>,
|
||||
{
|
||||
if immediate_in_section(context, "dynamic block") {
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
return Err(nom::Err::Error(CustomError::Static(
|
||||
"Cannot nest objects of the same element",
|
||||
))));
|
||||
)));
|
||||
}
|
||||
|
||||
start_of_line(remaining)?;
|
||||
@ -83,25 +79,25 @@ where
|
||||
let element_matcher = parser_with_context!(element(true))(&parser_context);
|
||||
let exit_matcher = parser_with_context!(exit_matcher_parser)(&parser_context);
|
||||
not(exit_matcher)(remaining)?;
|
||||
let (remaining, leading_blank_lines) = opt(consumed(tuple((
|
||||
blank_line,
|
||||
many0(preceded(not(exit_matcher), blank_line)),
|
||||
))))(remaining)?;
|
||||
let leading_blank_lines =
|
||||
leading_blank_lines.map(|(source, (first_line, _remaining_lines))| {
|
||||
let mut element = Element::Paragraph(Paragraph::of_text(first_line.into()));
|
||||
element.set_source(source.into());
|
||||
element
|
||||
});
|
||||
let contents_begin = remaining;
|
||||
let blank_line_context = ContextElement::ExitMatcherNode(ExitMatcherNode {
|
||||
class: ExitClass::Alpha,
|
||||
exit_matcher: &leading_blank_lines_end,
|
||||
});
|
||||
let blank_line_context = parser_context.with_additional_node(&blank_line_context);
|
||||
|
||||
let (remaining, leading_blank_lines) =
|
||||
opt(bind_context!(empty_paragraph, &blank_line_context))(remaining)?;
|
||||
let (remaining, (mut children, _exit_contents)) =
|
||||
many_till(element_matcher, exit_matcher)(remaining)?;
|
||||
if let Some(lines) = leading_blank_lines {
|
||||
children.insert(0, lines);
|
||||
children.insert(0, Element::Paragraph(lines));
|
||||
}
|
||||
let contents = get_consumed(contents_begin, remaining);
|
||||
|
||||
let (remaining, _end) = dynamic_block_end(&parser_context, remaining)?;
|
||||
|
||||
let (remaining, _trailing_ws) =
|
||||
let (remaining, post_blank) =
|
||||
maybe_consume_trailing_whitespace_if_not_exiting(context, remaining)?;
|
||||
let source = get_consumed(input, remaining);
|
||||
Ok((
|
||||
@ -115,6 +111,12 @@ where
|
||||
block_name: name.into(),
|
||||
parameters: parameters.map(|val| val.into()),
|
||||
children,
|
||||
contents: if contents.len() > 0 {
|
||||
Some(Into::<&str>::into(contents))
|
||||
} else {
|
||||
None
|
||||
},
|
||||
post_blank: post_blank.map(Into::<&str>::into),
|
||||
},
|
||||
))
|
||||
}
|
||||
|
@ -1,5 +1,3 @@
|
||||
use nom::multi::many0;
|
||||
|
||||
use super::babel_call::babel_call;
|
||||
use super::clock::clock;
|
||||
use super::comment::comment;
|
||||
@ -14,7 +12,6 @@ use super::footnote_definition::detect_footnote_definition;
|
||||
use super::footnote_definition::footnote_definition;
|
||||
use super::greater_block::greater_block;
|
||||
use super::horizontal_rule::horizontal_rule;
|
||||
use super::keyword::affiliated_keyword;
|
||||
use super::keyword::keyword;
|
||||
use super::latex_environment::latex_environment;
|
||||
use super::lesser_block::comment_block;
|
||||
@ -27,11 +24,16 @@ use super::paragraph::paragraph;
|
||||
use super::plain_list::detect_plain_list;
|
||||
use super::plain_list::plain_list;
|
||||
use super::table::detect_table;
|
||||
use crate::context::parser_with_context;
|
||||
use crate::context::RefContext;
|
||||
use crate::error::CustomError;
|
||||
use crate::error::MyError;
|
||||
use crate::error::Res;
|
||||
#[cfg(feature = "event_count")]
|
||||
use crate::event_count::record_event;
|
||||
#[cfg(feature = "event_count")]
|
||||
use crate::event_count::EventType;
|
||||
use crate::parser::affiliated_keyword::affiliated_keywords;
|
||||
use crate::parser::bullshitium::bullshitium;
|
||||
use crate::parser::bullshitium::detect_bullshitium;
|
||||
use crate::parser::macros::ak_element;
|
||||
use crate::parser::macros::element;
|
||||
use crate::parser::table::org_mode_table;
|
||||
@ -55,8 +57,9 @@ fn _element<'b, 'g, 'r, 's>(
|
||||
input: OrgSource<'s>,
|
||||
can_be_paragraph: bool,
|
||||
) -> Res<OrgSource<'s>, Element<'s>> {
|
||||
let (post_affiliated_keywords_input, affiliated_keywords) =
|
||||
many0(parser_with_context!(affiliated_keyword)(context))(input)?;
|
||||
#[cfg(feature = "event_count")]
|
||||
record_event(EventType::ElementStart, input);
|
||||
let (post_affiliated_keywords_input, affiliated_keywords) = affiliated_keywords(input)?;
|
||||
|
||||
let mut affiliated_keywords = affiliated_keywords.into_iter();
|
||||
|
||||
@ -240,6 +243,9 @@ fn _element<'b, 'g, 'r, 's>(
|
||||
);
|
||||
|
||||
if can_be_paragraph {
|
||||
// Fake paragraphs
|
||||
element!(bullshitium, context, input, Element::Paragraph);
|
||||
|
||||
// Paragraph without affiliated keyword
|
||||
ak_element!(
|
||||
paragraph,
|
||||
@ -251,9 +257,7 @@ fn _element<'b, 'g, 'r, 's>(
|
||||
);
|
||||
}
|
||||
|
||||
Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"No element.",
|
||||
))))
|
||||
Err(nom::Err::Error(CustomError::Static("No element.")))
|
||||
}
|
||||
|
||||
pub(crate) const fn detect_element(
|
||||
@ -272,8 +276,7 @@ fn _detect_element<'b, 'g, 'r, 's>(
|
||||
input: OrgSource<'s>,
|
||||
can_be_paragraph: bool,
|
||||
) -> Res<OrgSource<'s>, ()> {
|
||||
let (post_affiliated_keywords_input, affiliated_keywords) =
|
||||
many0(parser_with_context!(affiliated_keyword)(context))(input)?;
|
||||
let (post_affiliated_keywords_input, affiliated_keywords) = affiliated_keywords(input)?;
|
||||
|
||||
let mut affiliated_keywords = affiliated_keywords.into_iter();
|
||||
|
||||
@ -319,11 +322,14 @@ fn _detect_element<'b, 'g, 'r, 's>(
|
||||
input
|
||||
);
|
||||
|
||||
// Fake paragraphs
|
||||
if !can_be_paragraph {
|
||||
element!(detect_bullshitium, context, input);
|
||||
}
|
||||
|
||||
if _element(context, input, can_be_paragraph).is_ok() {
|
||||
return Ok((input, ()));
|
||||
}
|
||||
|
||||
Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"No element detected.",
|
||||
))))
|
||||
Err(nom::Err::Error(CustomError::Static("No element detected.")))
|
||||
}
|
||||
|
@ -1,11 +1,11 @@
|
||||
use nom::branch::alt;
|
||||
use nom::bytes::complete::tag;
|
||||
use nom::character::complete::satisfy;
|
||||
use nom::combinator::cond;
|
||||
use nom::combinator::eof;
|
||||
use nom::combinator::map;
|
||||
use nom::combinator::peek;
|
||||
use nom::combinator::recognize;
|
||||
use nom::combinator::verify;
|
||||
use nom::sequence::tuple;
|
||||
|
||||
use super::org_source::OrgSource;
|
||||
@ -13,7 +13,6 @@ use super::util::maybe_consume_object_trailing_whitespace_if_not_exiting;
|
||||
use crate::context::EntityDefinition;
|
||||
use crate::context::RefContext;
|
||||
use crate::error::CustomError;
|
||||
use crate::error::MyError;
|
||||
use crate::error::Res;
|
||||
use crate::parser::util::get_consumed;
|
||||
use crate::types::Entity;
|
||||
@ -29,7 +28,7 @@ pub(crate) fn entity<'b, 'g, 'r, 's>(
|
||||
let (remaining, _) = tag("\\")(input)?;
|
||||
let (remaining, (entity_definition, entity_name, use_brackets)) = name(context, remaining)?;
|
||||
|
||||
let (remaining, _trailing_whitespace) =
|
||||
let (remaining, post_blank) =
|
||||
maybe_consume_object_trailing_whitespace_if_not_exiting(context, remaining)?;
|
||||
|
||||
let source = get_consumed(input, remaining);
|
||||
@ -44,6 +43,7 @@ pub(crate) fn entity<'b, 'g, 'r, 's>(
|
||||
ascii: entity_definition.ascii,
|
||||
utf8: entity_definition.utf8,
|
||||
use_brackets,
|
||||
post_blank: post_blank.map(Into::<&str>::into),
|
||||
},
|
||||
))
|
||||
}
|
||||
@ -58,18 +58,21 @@ fn name<'b, 'g, 'r, 's>(
|
||||
) -> Res<OrgSource<'s>, (&'g EntityDefinition<'s>, OrgSource<'s>, bool)> {
|
||||
for entity in context.get_global_settings().entities {
|
||||
let result = tuple((
|
||||
tag::<_, _, CustomError<_>>(entity.name),
|
||||
alt((
|
||||
verify(map(tag("{}"), |_| true), |_| !entity.name.ends_with(' ')),
|
||||
map(peek(recognize(entity_end)), |_| false),
|
||||
)),
|
||||
tag::<_, _, CustomError>(entity.name),
|
||||
cond(
|
||||
!entity.name.ends_with(' '),
|
||||
alt((
|
||||
map(tag("{}"), |_| true),
|
||||
map(peek(recognize(entity_end)), |_| false),
|
||||
)),
|
||||
),
|
||||
))(input);
|
||||
if let Ok((remaining, (ent, use_brackets))) = result {
|
||||
return Ok((remaining, (entity, ent, use_brackets)));
|
||||
return Ok((remaining, (entity, ent, use_brackets.unwrap_or(false))));
|
||||
}
|
||||
}
|
||||
|
||||
Err(nom::Err::Error(CustomError::MyError(MyError("NoEntity"))))
|
||||
Err(nom::Err::Error(CustomError::Static("NoEntity")))
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
|
@ -39,7 +39,7 @@ pub(crate) fn export_snippet<'b, 'g, 'r, 's>(
|
||||
parser_with_context!(contents)(&parser_context),
|
||||
)))(remaining)?;
|
||||
let (remaining, _) = tag("@@")(remaining)?;
|
||||
let (remaining, _trailing_whitespace) =
|
||||
let (remaining, post_blank) =
|
||||
maybe_consume_object_trailing_whitespace_if_not_exiting(context, remaining)?;
|
||||
let source = get_consumed(input, remaining);
|
||||
Ok((
|
||||
@ -48,6 +48,7 @@ pub(crate) fn export_snippet<'b, 'g, 'r, 's>(
|
||||
source: source.into(),
|
||||
backend: backend_name.into(),
|
||||
contents: backend_contents.map(|(_colon, backend_contents)| backend_contents.into()),
|
||||
post_blank: post_blank.map(Into::<&str>::into),
|
||||
},
|
||||
))
|
||||
}
|
||||
|
@ -2,12 +2,15 @@ use nom::branch::alt;
|
||||
use nom::bytes::complete::tag;
|
||||
use nom::character::complete::anychar;
|
||||
use nom::character::complete::space0;
|
||||
use nom::combinator::map;
|
||||
use nom::combinator::not;
|
||||
use nom::combinator::peek;
|
||||
use nom::combinator::recognize;
|
||||
use nom::multi::many0;
|
||||
use nom::multi::many_till;
|
||||
use nom::sequence::preceded;
|
||||
use nom::sequence::tuple;
|
||||
use nom::InputTake;
|
||||
|
||||
use super::affiliated_keyword::parse_affiliated_keywords;
|
||||
use super::org_source::OrgSource;
|
||||
@ -35,28 +38,25 @@ pub(crate) fn fixed_width_area<'b, 'g, 'r, 's, AK>(
|
||||
where
|
||||
AK: IntoIterator<Item = Keyword<'s>>,
|
||||
{
|
||||
let fixed_width_area_line_matcher = parser_with_context!(fixed_width_area_line)(context);
|
||||
let exit_matcher = parser_with_context!(exit_matcher_parser)(context);
|
||||
let (remaining, first_line) = fixed_width_area_line_matcher(remaining)?;
|
||||
let (remaining, mut remaining_lines) =
|
||||
many0(preceded(not(exit_matcher), fixed_width_area_line_matcher))(remaining)?;
|
||||
let (remaining, first_line) = fixed_width_area_line(remaining)?;
|
||||
let (remaining, remaining_lines) = many0(preceded(
|
||||
not(tuple((org_line_ending, exit_matcher))),
|
||||
map(
|
||||
tuple((org_line_ending, fixed_width_area_line)),
|
||||
|(_line_ending, line_contents)| line_contents,
|
||||
),
|
||||
))(remaining)?;
|
||||
|
||||
let (remaining, _trailing_ws) =
|
||||
let post_blank_begin = remaining;
|
||||
let (remaining, _first_line_break) = org_line_ending(remaining)?;
|
||||
let (remaining, _additional_post_blank) =
|
||||
maybe_consume_trailing_whitespace_if_not_exiting(context, remaining)?;
|
||||
let post_blank = get_consumed(post_blank_begin, remaining);
|
||||
let source = get_consumed(input, remaining);
|
||||
let mut value = Vec::with_capacity(remaining_lines.len() + 1);
|
||||
let last_line = remaining_lines.pop();
|
||||
if let Some(last_line) = last_line {
|
||||
value.push(Into::<&str>::into(first_line));
|
||||
value.extend(remaining_lines.into_iter().map(Into::<&str>::into));
|
||||
let last_line = Into::<&str>::into(last_line);
|
||||
// Trim the line ending from the final line.
|
||||
value.push(&last_line[..(last_line.len() - 1)])
|
||||
} else {
|
||||
// Trim the line ending from the only line.
|
||||
let only_line = Into::<&str>::into(first_line);
|
||||
value.push(&only_line[..(only_line.len() - 1)])
|
||||
}
|
||||
value.push(Into::<&str>::into(first_line));
|
||||
value.extend(remaining_lines.into_iter().map(Into::<&str>::into));
|
||||
Ok((
|
||||
remaining,
|
||||
FixedWidthArea {
|
||||
@ -66,25 +66,24 @@ where
|
||||
affiliated_keywords,
|
||||
),
|
||||
value,
|
||||
post_blank: if post_blank.len() > 0 {
|
||||
Some(Into::<&str>::into(post_blank))
|
||||
} else {
|
||||
None
|
||||
},
|
||||
},
|
||||
))
|
||||
}
|
||||
|
||||
#[cfg_attr(
|
||||
feature = "tracing",
|
||||
tracing::instrument(ret, level = "debug", skip(_context))
|
||||
)]
|
||||
fn fixed_width_area_line<'b, 'g, 'r, 's>(
|
||||
_context: RefContext<'b, 'g, 'r, 's>,
|
||||
input: OrgSource<'s>,
|
||||
) -> Res<OrgSource<'s>, OrgSource<'s>> {
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
fn fixed_width_area_line<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, OrgSource<'s>> {
|
||||
start_of_line(input)?;
|
||||
let (remaining, _) = tuple((space0, tag(":")))(input)?;
|
||||
if let Ok((remaining, line_break)) = org_line_ending(remaining) {
|
||||
return Ok((remaining, line_break));
|
||||
if let Ok((_remain, _line_break)) = org_line_ending(remaining) {
|
||||
return Ok((remaining, remaining.take(0)));
|
||||
}
|
||||
let (remaining, _) = tag(" ")(remaining)?;
|
||||
let (remaining, value) = recognize(many_till(anychar, org_line_ending))(remaining)?;
|
||||
let (remaining, value) = recognize(many_till(anychar, peek(org_line_ending)))(remaining)?;
|
||||
Ok((remaining, value))
|
||||
}
|
||||
|
||||
|
@ -22,7 +22,6 @@ use crate::context::ExitClass;
|
||||
use crate::context::ExitMatcherNode;
|
||||
use crate::context::RefContext;
|
||||
use crate::error::CustomError;
|
||||
use crate::error::MyError;
|
||||
use crate::error::Res;
|
||||
use crate::parser::element_parser::element;
|
||||
use crate::parser::util::blank_line;
|
||||
@ -48,9 +47,9 @@ where
|
||||
AK: IntoIterator<Item = Keyword<'s>>,
|
||||
{
|
||||
if immediate_in_section(context, "footnote definition") {
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
return Err(nom::Err::Error(CustomError::Static(
|
||||
"Cannot nest objects of the same element",
|
||||
))));
|
||||
)));
|
||||
}
|
||||
start_of_line(remaining)?;
|
||||
// Cannot be indented.
|
||||
@ -76,6 +75,7 @@ where
|
||||
let parser_context = parser_context.with_additional_node(&contexts[2]);
|
||||
let element_matcher = parser_with_context!(element(true))(&parser_context);
|
||||
let exit_matcher = parser_with_context!(exit_matcher_parser)(&parser_context);
|
||||
let before_contents = remaining;
|
||||
let (mut remaining, (mut children, _exit_contents)) =
|
||||
many_till(include_input(element_matcher), exit_matcher)(remaining)?;
|
||||
|
||||
@ -91,13 +91,16 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
let (remaining, _trailing_ws) =
|
||||
let contents = get_consumed(before_contents, remaining);
|
||||
let (remaining, post_blank) =
|
||||
maybe_consume_trailing_whitespace_if_not_exiting(context, remaining)?;
|
||||
let source = get_consumed(input, remaining);
|
||||
Ok((
|
||||
remaining,
|
||||
FootnoteDefinition {
|
||||
source: source.into(),
|
||||
contents: Some(contents.into()),
|
||||
post_blank: post_blank.map(Into::<&str>::into),
|
||||
affiliated_keywords: parse_affiliated_keywords(
|
||||
context.get_global_settings(),
|
||||
affiliated_keywords,
|
||||
@ -161,7 +164,7 @@ mod tests {
|
||||
use crate::context::Context;
|
||||
use crate::context::GlobalSettings;
|
||||
use crate::context::List;
|
||||
use crate::types::GetStandardProperties;
|
||||
use crate::types::StandardProperties;
|
||||
|
||||
#[test]
|
||||
fn two_paragraphs() {
|
||||
@ -182,17 +185,13 @@ line footnote.",
|
||||
footnote_definition_matcher(remaining).expect("Parse second footnote_definition.");
|
||||
assert_eq!(Into::<&str>::into(remaining), "");
|
||||
assert_eq!(
|
||||
first_footnote_definition
|
||||
.get_standard_properties()
|
||||
.get_source(),
|
||||
first_footnote_definition.get_source(),
|
||||
"[fn:1] A footnote.
|
||||
|
||||
"
|
||||
);
|
||||
assert_eq!(
|
||||
second_footnote_definition
|
||||
.get_standard_properties()
|
||||
.get_source(),
|
||||
second_footnote_definition.get_source(),
|
||||
"[fn:2] A multi-
|
||||
|
||||
line footnote."
|
||||
@ -217,9 +216,7 @@ not in the footnote.",
|
||||
footnote_definition_matcher(input).expect("Parse first footnote_definition");
|
||||
assert_eq!(Into::<&str>::into(remaining), "not in the footnote.");
|
||||
assert_eq!(
|
||||
first_footnote_definition
|
||||
.get_standard_properties()
|
||||
.get_source(),
|
||||
first_footnote_definition.get_source(),
|
||||
"[fn:2] A multi-
|
||||
|
||||
line footnote.
|
||||
|
@ -2,6 +2,7 @@ use nom::branch::alt;
|
||||
use nom::bytes::complete::tag;
|
||||
use nom::bytes::complete::tag_no_case;
|
||||
use nom::combinator::all_consuming;
|
||||
use nom::combinator::consumed;
|
||||
use nom::combinator::map_parser;
|
||||
use nom::combinator::verify;
|
||||
use nom::multi::many1;
|
||||
@ -20,7 +21,6 @@ use crate::context::ExitMatcherNode;
|
||||
use crate::context::List;
|
||||
use crate::context::RefContext;
|
||||
use crate::error::CustomError;
|
||||
use crate::error::MyError;
|
||||
use crate::error::Res;
|
||||
use crate::parser::footnote_definition::label;
|
||||
use crate::parser::object_parser::standard_set_object;
|
||||
@ -60,7 +60,7 @@ fn anonymous_footnote<'b, 'g, 'r, 's>(
|
||||
let initial_context = ContextElement::document_context();
|
||||
let initial_context = Context::new(context.get_global_settings(), List::new(&initial_context));
|
||||
|
||||
let (remaining, children) = map_parser(
|
||||
let (remaining, (contents, children)) = consumed(map_parser(
|
||||
verify(
|
||||
parser_with_context!(text_until_exit)(&parser_context),
|
||||
|text| text.len() > 0,
|
||||
@ -70,17 +70,19 @@ fn anonymous_footnote<'b, 'g, 'r, 's>(
|
||||
&initial_context,
|
||||
)))(i)
|
||||
}),
|
||||
)(remaining)?;
|
||||
))(remaining)?;
|
||||
|
||||
let (remaining, _) = tag("]")(remaining)?;
|
||||
|
||||
let (remaining, _trailing_whitespace) =
|
||||
let (remaining, post_blank) =
|
||||
maybe_consume_object_trailing_whitespace_if_not_exiting(context, remaining)?;
|
||||
let source = get_consumed(input, remaining);
|
||||
Ok((
|
||||
remaining,
|
||||
FootnoteReference {
|
||||
source: source.into(),
|
||||
contents: Some(contents.into()),
|
||||
post_blank: post_blank.map(Into::<&str>::into),
|
||||
label: None,
|
||||
definition: children,
|
||||
},
|
||||
@ -107,7 +109,7 @@ fn inline_footnote<'b, 'g, 'r, 's>(
|
||||
let initial_context = ContextElement::document_context();
|
||||
let initial_context = Context::new(context.get_global_settings(), List::new(&initial_context));
|
||||
|
||||
let (remaining, children) = map_parser(
|
||||
let (remaining, (contents, children)) = consumed(map_parser(
|
||||
verify(
|
||||
parser_with_context!(text_until_exit)(&parser_context),
|
||||
|text| text.len() > 0,
|
||||
@ -117,17 +119,19 @@ fn inline_footnote<'b, 'g, 'r, 's>(
|
||||
&initial_context,
|
||||
)))(i)
|
||||
}),
|
||||
)(remaining)?;
|
||||
))(remaining)?;
|
||||
|
||||
let (remaining, _) = tag("]")(remaining)?;
|
||||
|
||||
let (remaining, _trailing_whitespace) =
|
||||
let (remaining, post_blank) =
|
||||
maybe_consume_object_trailing_whitespace_if_not_exiting(context, remaining)?;
|
||||
let source = get_consumed(input, remaining);
|
||||
Ok((
|
||||
remaining,
|
||||
FootnoteReference {
|
||||
source: source.into(),
|
||||
contents: Some(contents.into()),
|
||||
post_blank: post_blank.map(Into::<&str>::into),
|
||||
label: Some(label_contents.into()),
|
||||
definition: children,
|
||||
},
|
||||
@ -145,13 +149,15 @@ fn footnote_reference_only<'b, 'g, 'r, 's>(
|
||||
let (remaining, _) = tag_no_case("[fn:")(input)?;
|
||||
let (remaining, label_contents) = label(remaining)?;
|
||||
let (remaining, _) = tag("]")(remaining)?;
|
||||
let (remaining, _trailing_whitespace) =
|
||||
let (remaining, post_blank) =
|
||||
maybe_consume_object_trailing_whitespace_if_not_exiting(context, remaining)?;
|
||||
let source = get_consumed(input, remaining);
|
||||
Ok((
|
||||
remaining,
|
||||
FootnoteReference {
|
||||
source: source.into(),
|
||||
contents: None,
|
||||
post_blank: post_blank.map(Into::<&str>::into),
|
||||
label: Some(label_contents.into()),
|
||||
definition: Vec::with_capacity(0),
|
||||
},
|
||||
@ -176,9 +182,9 @@ fn _footnote_definition_end<'b, 'g, 'r, 's>(
|
||||
let current_depth = input.get_bracket_depth() - starting_bracket_depth;
|
||||
if current_depth > 0 {
|
||||
// Its impossible for the next character to end the footnote reference definition if we're any amount of brackets deep
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
return Err(nom::Err::Error(CustomError::Static(
|
||||
"NoFootnoteReferenceDefinitionEnd",
|
||||
))));
|
||||
)));
|
||||
}
|
||||
if current_depth < 0 {
|
||||
// This shouldn't be possible because if depth is 0 then a closing bracket should end the footnote definition.
|
||||
|
@ -5,22 +5,21 @@ use nom::character::complete::anychar;
|
||||
use nom::character::complete::line_ending;
|
||||
use nom::character::complete::space0;
|
||||
use nom::character::complete::space1;
|
||||
use nom::combinator::consumed;
|
||||
use nom::combinator::eof;
|
||||
use nom::combinator::not;
|
||||
use nom::combinator::opt;
|
||||
use nom::combinator::peek;
|
||||
use nom::combinator::recognize;
|
||||
use nom::combinator::verify;
|
||||
use nom::multi::many0;
|
||||
use nom::multi::many_till;
|
||||
use nom::sequence::preceded;
|
||||
use nom::sequence::tuple;
|
||||
|
||||
use super::affiliated_keyword::parse_affiliated_keywords;
|
||||
use super::org_source::OrgSource;
|
||||
use super::paragraph::empty_paragraph;
|
||||
use super::util::in_section;
|
||||
use super::util::maybe_consume_trailing_whitespace_if_not_exiting;
|
||||
use crate::context::bind_context;
|
||||
use crate::context::parser_with_context;
|
||||
use crate::context::ContextElement;
|
||||
use crate::context::ContextMatcher;
|
||||
@ -28,7 +27,6 @@ use crate::context::ExitClass;
|
||||
use crate::context::ExitMatcherNode;
|
||||
use crate::context::RefContext;
|
||||
use crate::error::CustomError;
|
||||
use crate::error::MyError;
|
||||
use crate::error::Res;
|
||||
use crate::parser::element_parser::element;
|
||||
use crate::parser::util::blank_line;
|
||||
@ -38,9 +36,7 @@ use crate::parser::util::start_of_line;
|
||||
use crate::types::CenterBlock;
|
||||
use crate::types::Element;
|
||||
use crate::types::Keyword;
|
||||
use crate::types::Paragraph;
|
||||
use crate::types::QuoteBlock;
|
||||
use crate::types::SetSource;
|
||||
use crate::types::SpecialBlock;
|
||||
|
||||
#[cfg_attr(
|
||||
@ -104,7 +100,7 @@ fn center_block<'b, 'g, 'r, 's, AK>(
|
||||
where
|
||||
AK: IntoIterator<Item = Keyword<'s>>,
|
||||
{
|
||||
let (remaining, (source, children)) = greater_block_body(
|
||||
let (remaining, body) = greater_block_body(
|
||||
context,
|
||||
input,
|
||||
pre_affiliated_keywords_input,
|
||||
@ -114,12 +110,14 @@ where
|
||||
Ok((
|
||||
remaining,
|
||||
Element::CenterBlock(CenterBlock {
|
||||
source,
|
||||
source: body.source,
|
||||
affiliated_keywords: parse_affiliated_keywords(
|
||||
context.get_global_settings(),
|
||||
affiliated_keywords,
|
||||
),
|
||||
children,
|
||||
children: body.children,
|
||||
contents: body.contents,
|
||||
post_blank: body.post_blank,
|
||||
}),
|
||||
))
|
||||
}
|
||||
@ -137,7 +135,7 @@ fn quote_block<'b, 'g, 'r, 's, AK>(
|
||||
where
|
||||
AK: IntoIterator<Item = Keyword<'s>>,
|
||||
{
|
||||
let (remaining, (source, children)) = greater_block_body(
|
||||
let (remaining, body) = greater_block_body(
|
||||
context,
|
||||
input,
|
||||
pre_affiliated_keywords_input,
|
||||
@ -147,12 +145,14 @@ where
|
||||
Ok((
|
||||
remaining,
|
||||
Element::QuoteBlock(QuoteBlock {
|
||||
source,
|
||||
source: body.source,
|
||||
affiliated_keywords: parse_affiliated_keywords(
|
||||
context.get_global_settings(),
|
||||
affiliated_keywords,
|
||||
),
|
||||
children,
|
||||
children: body.children,
|
||||
contents: body.contents,
|
||||
post_blank: body.post_blank,
|
||||
}),
|
||||
))
|
||||
}
|
||||
@ -198,7 +198,7 @@ where
|
||||
AK: IntoIterator<Item = Keyword<'s>>,
|
||||
{
|
||||
let (remaining, parameters) = opt(tuple((space1, parameters)))(input)?;
|
||||
let (remaining, (source, children)) = greater_block_body(
|
||||
let (remaining, body) = greater_block_body(
|
||||
context,
|
||||
remaining,
|
||||
pre_affiliated_keywords_input,
|
||||
@ -208,18 +208,28 @@ where
|
||||
Ok((
|
||||
remaining,
|
||||
Element::SpecialBlock(SpecialBlock {
|
||||
source,
|
||||
source: body.source,
|
||||
affiliated_keywords: parse_affiliated_keywords(
|
||||
context.get_global_settings(),
|
||||
affiliated_keywords,
|
||||
),
|
||||
children,
|
||||
children: body.children,
|
||||
block_type: name,
|
||||
parameters: parameters.map(|(_, parameters)| Into::<&str>::into(parameters)),
|
||||
contents: body.contents,
|
||||
post_blank: body.post_blank,
|
||||
}),
|
||||
))
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct GreaterBlockBody<'s> {
|
||||
source: &'s str,
|
||||
children: Vec<Element<'s>>,
|
||||
contents: Option<&'s str>,
|
||||
post_blank: Option<&'s str>,
|
||||
}
|
||||
|
||||
#[cfg_attr(
|
||||
feature = "tracing",
|
||||
tracing::instrument(ret, level = "debug", skip(context))
|
||||
@ -230,11 +240,11 @@ fn greater_block_body<'c, 'b, 'g, 'r, 's>(
|
||||
pre_affiliated_keywords_input: OrgSource<'s>,
|
||||
name: &'c str,
|
||||
context_name: &'c str,
|
||||
) -> Res<OrgSource<'s>, (&'s str, Vec<Element<'s>>)> {
|
||||
) -> Res<OrgSource<'s>, GreaterBlockBody<'s>> {
|
||||
if in_section(context, context_name) {
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
return Err(nom::Err::Error(CustomError::Static(
|
||||
"Cannot nest objects of the same element",
|
||||
))));
|
||||
)));
|
||||
}
|
||||
let exit_with_name = greater_block_end(name);
|
||||
let (remaining, _nl) = tuple((space0, line_ending))(input)?;
|
||||
@ -252,30 +262,43 @@ fn greater_block_body<'c, 'b, 'g, 'r, 's>(
|
||||
let element_matcher = parser_with_context!(element(true))(&parser_context);
|
||||
let exit_matcher = parser_with_context!(exit_matcher_parser)(&parser_context);
|
||||
not(exit_matcher)(remaining)?;
|
||||
let (remaining, leading_blank_lines) = opt(consumed(tuple((
|
||||
blank_line,
|
||||
many0(preceded(not(exit_matcher), blank_line)),
|
||||
))))(remaining)?;
|
||||
let leading_blank_lines =
|
||||
leading_blank_lines.map(|(source, (first_line, _remaining_lines))| {
|
||||
let mut element = Element::Paragraph(Paragraph::of_text(first_line.into()));
|
||||
element.set_source(source.into());
|
||||
element
|
||||
});
|
||||
let contents_begin = remaining;
|
||||
|
||||
let blank_line_context = ContextElement::ExitMatcherNode(ExitMatcherNode {
|
||||
class: ExitClass::Alpha,
|
||||
exit_matcher: &leading_blank_lines_end,
|
||||
});
|
||||
let blank_line_context = parser_context.with_additional_node(&blank_line_context);
|
||||
|
||||
let (remaining, leading_blank_lines) =
|
||||
opt(bind_context!(empty_paragraph, &blank_line_context))(remaining)?;
|
||||
let (remaining, (mut children, _exit_contents)) =
|
||||
many_till(element_matcher, exit_matcher)(remaining)?;
|
||||
if let Some(lines) = leading_blank_lines {
|
||||
children.insert(0, lines);
|
||||
children.insert(0, Element::Paragraph(lines));
|
||||
}
|
||||
let contents = get_consumed(contents_begin, remaining);
|
||||
|
||||
let (remaining, _end) = exit_with_name(&parser_context, remaining)?;
|
||||
|
||||
// Not checking if parent exit matcher is causing exit because the greater_block_end matcher asserts we matched a full greater block
|
||||
|
||||
let (remaining, _trailing_ws) =
|
||||
let (remaining, post_blank) =
|
||||
maybe_consume_trailing_whitespace_if_not_exiting(context, remaining)?;
|
||||
let source = get_consumed(pre_affiliated_keywords_input, remaining);
|
||||
Ok((remaining, (Into::<&str>::into(source), children)))
|
||||
Ok((
|
||||
remaining,
|
||||
GreaterBlockBody {
|
||||
source: Into::<&str>::into(source),
|
||||
children,
|
||||
contents: if contents.len() > 0 {
|
||||
Some(Into::<&str>::into(contents))
|
||||
} else {
|
||||
None
|
||||
},
|
||||
post_blank: post_blank.map(Into::<&str>::into),
|
||||
},
|
||||
))
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
@ -311,3 +334,14 @@ fn _greater_block_end<'b, 'g, 'r, 's, 'c>(
|
||||
let source = get_consumed(input, remaining);
|
||||
Ok((remaining, source))
|
||||
}
|
||||
|
||||
#[cfg_attr(
|
||||
feature = "tracing",
|
||||
tracing::instrument(ret, level = "debug", skip(_context))
|
||||
)]
|
||||
pub(crate) fn leading_blank_lines_end<'b, 'g, 'r, 's, 'c>(
|
||||
_context: RefContext<'b, 'g, 'r, 's>,
|
||||
input: OrgSource<'s>,
|
||||
) -> Res<OrgSource<'s>, OrgSource<'s>> {
|
||||
recognize(not(blank_line))(input)
|
||||
}
|
||||
|
@ -18,18 +18,18 @@ use nom::sequence::tuple;
|
||||
|
||||
use super::org_source::OrgSource;
|
||||
use super::section::section;
|
||||
use super::util::exit_matcher_parser;
|
||||
use super::util::get_consumed;
|
||||
use super::util::org_line_ending;
|
||||
use super::util::org_space;
|
||||
use super::util::org_space_or_line_ending;
|
||||
use super::util::start_of_line;
|
||||
use crate::context::parser_with_context;
|
||||
use crate::context::bind_context;
|
||||
use crate::context::ContextElement;
|
||||
use crate::context::ExitClass;
|
||||
use crate::context::ExitMatcherNode;
|
||||
use crate::context::RefContext;
|
||||
use crate::error::CustomError;
|
||||
use crate::error::MyError;
|
||||
use crate::error::Res;
|
||||
use crate::parser::object_parser::standard_set_object;
|
||||
use crate::parser::util::blank_line;
|
||||
@ -62,10 +62,12 @@ fn _heading<'b, 'g, 'r, 's>(
|
||||
let mut scheduled = None;
|
||||
let mut deadline = None;
|
||||
let mut closed = None;
|
||||
not(|i| context.check_exit_matcher(i))(input)?;
|
||||
not(bind_context!(exit_matcher_parser, context))(input)?;
|
||||
let (remaining, pre_headline) = headline(context, input, parent_star_count)?;
|
||||
let section_matcher = parser_with_context!(section)(context);
|
||||
let heading_matcher = parser_with_context!(heading(pre_headline.star_count))(context);
|
||||
let section_matcher = bind_context!(section, context);
|
||||
let heading_matcher = bind_context!(heading(pre_headline.star_count), context);
|
||||
let (contents_begin, _) = opt(many0(blank_line))(remaining)?;
|
||||
let maybe_post_blank = get_consumed(remaining, contents_begin);
|
||||
let (remaining, maybe_section) =
|
||||
opt(map(section_matcher, DocumentElement::Section))(remaining)?;
|
||||
let (remaining, _ws) = opt(tuple((start_of_line, many0(blank_line))))(remaining)?;
|
||||
@ -75,14 +77,15 @@ fn _heading<'b, 'g, 'r, 's>(
|
||||
// If the section has a planning then the timestamp values are copied to the heading.
|
||||
if let DocumentElement::Section(inner_section) = §ion {
|
||||
if let Some(Element::Planning(planning)) = inner_section.children.first() {
|
||||
scheduled = planning.scheduled.clone();
|
||||
deadline = planning.deadline.clone();
|
||||
closed = planning.closed.clone();
|
||||
scheduled.clone_from(&planning.scheduled);
|
||||
deadline.clone_from(&planning.deadline);
|
||||
closed.clone_from(&planning.closed);
|
||||
}
|
||||
}
|
||||
children.insert(0, section);
|
||||
}
|
||||
let remaining = if children.is_empty() {
|
||||
let has_children = !children.is_empty();
|
||||
let remaining = if !has_children {
|
||||
// Support empty headings
|
||||
let (remain, _ws) = many0(blank_line)(remaining)?;
|
||||
remain
|
||||
@ -91,6 +94,7 @@ fn _heading<'b, 'g, 'r, 's>(
|
||||
};
|
||||
let is_archived = pre_headline.tags.contains(&"ARCHIVE");
|
||||
|
||||
let contents = get_consumed(contents_begin, remaining);
|
||||
let source = get_consumed(input, remaining);
|
||||
Ok((
|
||||
remaining,
|
||||
@ -112,6 +116,16 @@ fn _heading<'b, 'g, 'r, 's>(
|
||||
scheduled,
|
||||
deadline,
|
||||
closed,
|
||||
contents: if contents.len() > 0 {
|
||||
Some(Into::<&str>::into(contents))
|
||||
} else {
|
||||
None
|
||||
},
|
||||
post_blank: if has_children {
|
||||
None
|
||||
} else {
|
||||
Some(Into::<&str>::into(maybe_post_blank))
|
||||
},
|
||||
},
|
||||
))
|
||||
}
|
||||
@ -155,7 +169,7 @@ fn headline<'b, 'g, 'r, 's>(
|
||||
let (remaining, (_, (headline_level, star_count, _), _)) = tuple((
|
||||
start_of_line,
|
||||
verify(
|
||||
parser_with_context!(headline_level)(&parser_context),
|
||||
bind_context!(headline_level, &parser_context),
|
||||
|(_, count, _)| *count > parent_star_count,
|
||||
),
|
||||
peek(org_space),
|
||||
@ -163,7 +177,7 @@ fn headline<'b, 'g, 'r, 's>(
|
||||
|
||||
let (remaining, maybe_todo_keyword) = opt(tuple((
|
||||
space1,
|
||||
parser_with_context!(heading_keyword)(&parser_context),
|
||||
bind_context!(heading_keyword, &parser_context),
|
||||
peek(org_space_or_line_ending),
|
||||
)))(remaining)?;
|
||||
|
||||
@ -177,9 +191,7 @@ fn headline<'b, 'g, 'r, 's>(
|
||||
|
||||
let (remaining, maybe_title) = opt(tuple((
|
||||
space1,
|
||||
consumed(many1(parser_with_context!(standard_set_object)(
|
||||
&parser_context,
|
||||
))),
|
||||
consumed(many1(bind_context!(standard_set_object, &parser_context))),
|
||||
)))(remaining)?;
|
||||
|
||||
let (remaining, maybe_tags) = opt(tuple((space0, tags)))(remaining)?;
|
||||
@ -260,7 +272,7 @@ fn heading_keyword<'b, 'g, 'r, 's>(
|
||||
.iter()
|
||||
.map(String::as_str)
|
||||
{
|
||||
let result = tag::<_, _, CustomError<_>>(todo_keyword)(input);
|
||||
let result = tag::<_, _, CustomError>(todo_keyword)(input);
|
||||
if let Ok((remaining, ent)) = result {
|
||||
return Ok((remaining, (TodoKeywordType::Todo, ent)));
|
||||
}
|
||||
@ -270,14 +282,12 @@ fn heading_keyword<'b, 'g, 'r, 's>(
|
||||
.iter()
|
||||
.map(String::as_str)
|
||||
{
|
||||
let result = tag::<_, _, CustomError<_>>(todo_keyword)(input);
|
||||
let result = tag::<_, _, CustomError>(todo_keyword)(input);
|
||||
if let Ok((remaining, ent)) = result {
|
||||
return Ok((remaining, (TodoKeywordType::Done, ent)));
|
||||
}
|
||||
}
|
||||
Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"NoTodoKeyword",
|
||||
))))
|
||||
Err(nom::Err::Error(CustomError::Static("NoTodoKeyword")))
|
||||
}
|
||||
}
|
||||
|
||||
@ -288,9 +298,9 @@ fn priority_cookie(input: OrgSource<'_>) -> Res<OrgSource<'_>, PriorityCookie> {
|
||||
tag("]"),
|
||||
))(input)?;
|
||||
let cookie = PriorityCookie::try_from(priority_character).map_err(|_| {
|
||||
nom::Err::Error(CustomError::MyError(MyError(
|
||||
nom::Err::Error(CustomError::Static(
|
||||
"Failed to cast priority cookie to number.",
|
||||
)))
|
||||
))
|
||||
})?;
|
||||
Ok((remaining, cookie))
|
||||
}
|
||||
|
@ -38,7 +38,7 @@ where
|
||||
space0,
|
||||
alt((line_ending, eof)),
|
||||
)))(remaining)?;
|
||||
let (remaining, _trailing_ws) =
|
||||
let (remaining, post_blank) =
|
||||
maybe_consume_trailing_whitespace_if_not_exiting(context, remaining)?;
|
||||
let source = get_consumed(input, remaining);
|
||||
Ok((
|
||||
@ -49,6 +49,7 @@ where
|
||||
context.get_global_settings(),
|
||||
affiliated_keywords,
|
||||
),
|
||||
post_blank: post_blank.map(Into::<&str>::into),
|
||||
},
|
||||
))
|
||||
}
|
||||
|
@ -35,7 +35,7 @@ pub(crate) fn scan_for_in_buffer_settings<'s>(
|
||||
let mut remaining = input;
|
||||
loop {
|
||||
// Skip text until possible in_buffer_setting
|
||||
let start_of_pound = take_until::<_, _, CustomError<_>>("#+")(remaining);
|
||||
let start_of_pound = take_until::<_, _, CustomError>("#+")(remaining);
|
||||
let start_of_pound = if let Ok((start_of_pound, _)) = start_of_pound {
|
||||
start_of_pound
|
||||
} else {
|
||||
@ -47,7 +47,7 @@ pub(crate) fn scan_for_in_buffer_settings<'s>(
|
||||
let (remain, maybe_kw) = match filtered_keyword(in_buffer_settings_key)(start_of_line) {
|
||||
Ok((remain, kw)) => (remain, Some(kw)),
|
||||
Err(_) => {
|
||||
let end_of_line = take_until::<_, _, CustomError<_>>("\n")(start_of_pound);
|
||||
let end_of_line = take_until::<_, _, CustomError>("\n")(start_of_pound);
|
||||
if let Ok((end_of_line, _)) = end_of_line {
|
||||
(end_of_line, None)
|
||||
} else {
|
||||
@ -84,11 +84,14 @@ fn in_buffer_settings_key<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, OrgSou
|
||||
))(input)
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(level = "debug"))]
|
||||
#[cfg_attr(
|
||||
feature = "tracing",
|
||||
tracing::instrument(level = "debug", skip(original_settings))
|
||||
)]
|
||||
pub(crate) fn apply_in_buffer_settings<'g, 's, 'sf>(
|
||||
keywords: Vec<Keyword<'sf>>,
|
||||
original_settings: &'g GlobalSettings<'g, 's>,
|
||||
) -> Result<GlobalSettings<'g, 's>, String> {
|
||||
) -> Result<GlobalSettings<'g, 's>, CustomError> {
|
||||
let mut new_settings = original_settings.clone();
|
||||
|
||||
// Todo Keywords
|
||||
@ -98,7 +101,11 @@ pub(crate) fn apply_in_buffer_settings<'g, 's, 'sf>(
|
||||
|| kw.key.eq_ignore_ascii_case("typ_todo")
|
||||
}) {
|
||||
let (_, (in_progress_words, complete_words)) =
|
||||
todo_keywords(kw.value).map_err(|err| err.to_string())?;
|
||||
todo_keywords(kw.value).map_err(|err| match err {
|
||||
nom::Err::Incomplete(_) => panic!("This parser does not support streaming."),
|
||||
nom::Err::Error(e) => e,
|
||||
nom::Err::Failure(e) => e,
|
||||
})?;
|
||||
new_settings
|
||||
.in_progress_todo_keywords
|
||||
.extend(in_progress_words.into_iter().map(str::to_string));
|
||||
@ -112,9 +119,14 @@ pub(crate) fn apply_in_buffer_settings<'g, 's, 'sf>(
|
||||
.iter()
|
||||
.filter(|kw| kw.key.eq_ignore_ascii_case("startup"))
|
||||
{
|
||||
let (_remaining, settings) =
|
||||
separated_list0(space1::<&str, nom::error::Error<_>>, is_not(" \t"))(kw.value)
|
||||
.map_err(|err: nom::Err<_>| err.to_string())?;
|
||||
let (_remaining, settings) = separated_list0(space1::<&str, CustomError>, is_not(" \t"))(
|
||||
kw.value,
|
||||
)
|
||||
.map_err(|err: nom::Err<_>| match err {
|
||||
nom::Err::Incomplete(_) => panic!("This parser does not support streaming."),
|
||||
nom::Err::Error(e) => e,
|
||||
nom::Err::Failure(e) => e,
|
||||
})?;
|
||||
if settings.contains(&"odd") {
|
||||
new_settings.odd_levels_only = HeadlineLevelFilter::Odd;
|
||||
}
|
||||
@ -128,7 +140,11 @@ pub(crate) fn apply_in_buffer_settings<'g, 's, 'sf>(
|
||||
.iter()
|
||||
.filter(|kw| kw.key.eq_ignore_ascii_case("link"))
|
||||
{
|
||||
let (_, (link_key, link_value)) = link_template(kw.value).map_err(|e| e.to_string())?;
|
||||
let (_, (link_key, link_value)) = link_template(kw.value).map_err(|err| match err {
|
||||
nom::Err::Incomplete(_) => panic!("This parser does not support streaming."),
|
||||
nom::Err::Error(e) => e,
|
||||
nom::Err::Failure(e) => e,
|
||||
})?;
|
||||
new_settings
|
||||
.link_templates
|
||||
.insert(link_key.to_owned(), link_value.to_owned());
|
||||
@ -139,11 +155,9 @@ pub(crate) fn apply_in_buffer_settings<'g, 's, 'sf>(
|
||||
|
||||
/// Apply in-buffer settings that do not impact parsing and therefore can be applied after parsing.
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
pub(crate) fn apply_post_parse_in_buffer_settings<'g, 's, 'sf>(
|
||||
document: &mut Document<'s>,
|
||||
) -> Result<(), &'static str> {
|
||||
pub(crate) fn apply_post_parse_in_buffer_settings<'g, 's, 'sf>(document: &mut Document<'s>) {
|
||||
document.category = Into::<AstNode>::into(&*document)
|
||||
.into_iter()
|
||||
.iter_all_ast_nodes()
|
||||
.filter_map(|ast_node| {
|
||||
if let AstNode::Keyword(ast_node) = ast_node {
|
||||
if ast_node.key.eq_ignore_ascii_case("category") {
|
||||
@ -154,7 +168,6 @@ pub(crate) fn apply_post_parse_in_buffer_settings<'g, 's, 'sf>(
|
||||
})
|
||||
.last()
|
||||
.map(|kw| kw.value.to_owned());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
|
@ -19,7 +19,6 @@ use crate::context::ExitClass;
|
||||
use crate::context::ExitMatcherNode;
|
||||
use crate::context::RefContext;
|
||||
use crate::error::CustomError;
|
||||
use crate::error::MyError;
|
||||
use crate::error::Res;
|
||||
use crate::parser::util::exit_matcher_parser;
|
||||
use crate::parser::util::get_consumed;
|
||||
@ -39,7 +38,7 @@ pub(crate) fn inline_babel_call<'b, 'g, 'r, 's>(
|
||||
let (remaining, arguments) = argument(context, remaining)?;
|
||||
let (remaining, end_header) = opt(parser_with_context!(header)(context))(remaining)?;
|
||||
let value = get_consumed(input, remaining);
|
||||
let (remaining, _trailing_whitespace) =
|
||||
let (remaining, post_blank) =
|
||||
maybe_consume_object_trailing_whitespace_if_not_exiting(context, remaining)?;
|
||||
let source = get_consumed(input, remaining);
|
||||
Ok((
|
||||
@ -55,6 +54,7 @@ pub(crate) fn inline_babel_call<'b, 'g, 'r, 's>(
|
||||
None
|
||||
},
|
||||
end_header: end_header.map(Into::<&str>::into),
|
||||
post_blank: post_blank.map(Into::<&str>::into),
|
||||
},
|
||||
))
|
||||
}
|
||||
@ -131,9 +131,7 @@ fn _header_end<'b, 'g, 'r, 's>(
|
||||
let current_depth = input.get_bracket_depth() - starting_bracket_depth;
|
||||
if current_depth > 0 {
|
||||
// Its impossible for the next character to end the header if we're any amount of bracket deep
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"NoHeaderEnd",
|
||||
))));
|
||||
return Err(nom::Err::Error(CustomError::Static("NoHeaderEnd")));
|
||||
}
|
||||
if current_depth < 0 {
|
||||
// This shouldn't be possible because if depth is 0 then a closing bracket should end the header.
|
||||
@ -183,9 +181,7 @@ fn _argument_end<'b, 'g, 'r, 's>(
|
||||
let current_depth = input.get_parenthesis_depth() - starting_parenthesis_depth;
|
||||
if current_depth > 0 {
|
||||
// Its impossible for the next character to end the argument if we're any amount of parenthesis deep
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"NoArgumentEnd",
|
||||
))));
|
||||
return Err(nom::Err::Error(CustomError::Static("NoArgumentEnd")));
|
||||
}
|
||||
if current_depth < 0 {
|
||||
// This shouldn't be possible because if depth is 0 then a closing parenthesis should end the argument.
|
||||
|
@ -21,7 +21,6 @@ use crate::context::ExitClass;
|
||||
use crate::context::ExitMatcherNode;
|
||||
use crate::context::RefContext;
|
||||
use crate::error::CustomError;
|
||||
use crate::error::MyError;
|
||||
use crate::error::Res;
|
||||
use crate::parser::util::exit_matcher_parser;
|
||||
use crate::parser::util::get_consumed;
|
||||
@ -39,7 +38,7 @@ pub(crate) fn inline_source_block<'b, 'g, 'r, 's>(
|
||||
let (remaining, language) = lang(context, remaining)?;
|
||||
let (remaining, parameters) = opt(parser_with_context!(header)(context))(remaining)?;
|
||||
let (remaining, value) = body(context, remaining)?;
|
||||
let (remaining, _trailing_whitespace) =
|
||||
let (remaining, post_blank) =
|
||||
maybe_consume_object_trailing_whitespace_if_not_exiting(context, remaining)?;
|
||||
let source = get_consumed(input, remaining);
|
||||
Ok((
|
||||
@ -49,6 +48,7 @@ pub(crate) fn inline_source_block<'b, 'g, 'r, 's>(
|
||||
language: language.into(),
|
||||
parameters: parameters.map(Into::<&str>::into),
|
||||
value: value.into(),
|
||||
post_blank: post_blank.map(Into::<&str>::into),
|
||||
},
|
||||
))
|
||||
}
|
||||
@ -125,9 +125,7 @@ fn _header_end<'b, 'g, 'r, 's>(
|
||||
let current_depth = input.get_bracket_depth() - starting_bracket_depth;
|
||||
if current_depth > 0 {
|
||||
// Its impossible for the next character to end the header if we're any amount of bracket deep
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"NoHeaderEnd",
|
||||
))));
|
||||
return Err(nom::Err::Error(CustomError::Static("NoHeaderEnd")));
|
||||
}
|
||||
if current_depth < 0 {
|
||||
// This shouldn't be possible because if depth is 0 then a closing bracket should end the header.
|
||||
@ -187,7 +185,7 @@ fn _body_end<'b, 'g, 'r, 's>(
|
||||
let current_depth = input.get_brace_depth() - starting_brace_depth;
|
||||
if current_depth > 0 {
|
||||
// Its impossible for the next character to end the body if we're any amount of brace deep
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError("NoBodyEnd"))));
|
||||
return Err(nom::Err::Error(CustomError::Static("NoBodyEnd")));
|
||||
}
|
||||
if current_depth < 0 {
|
||||
// This shouldn't be possible because if depth is 0 then a closing brace should end the body.
|
||||
|
@ -4,11 +4,9 @@ use nom::bytes::complete::tag;
|
||||
use nom::bytes::complete::tag_no_case;
|
||||
use nom::bytes::complete::take_while1;
|
||||
use nom::character::complete::anychar;
|
||||
use nom::character::complete::line_ending;
|
||||
use nom::character::complete::one_of;
|
||||
use nom::character::complete::space0;
|
||||
use nom::combinator::consumed;
|
||||
use nom::combinator::eof;
|
||||
use nom::combinator::map;
|
||||
use nom::combinator::not;
|
||||
use nom::combinator::peek;
|
||||
@ -22,10 +20,11 @@ use super::org_source::BracketDepth;
|
||||
use super::org_source::OrgSource;
|
||||
use super::util::get_consumed;
|
||||
use super::util::maybe_consume_trailing_whitespace_if_not_exiting;
|
||||
use crate::context::parser_with_context;
|
||||
use super::util::org_line_ending;
|
||||
use crate::context::constants::ORG_ELEMENT_AFFILIATED_KEYWORDS;
|
||||
use crate::context::constants::ORG_ELEMENT_DUAL_KEYWORDS;
|
||||
use crate::context::RefContext;
|
||||
use crate::error::CustomError;
|
||||
use crate::error::MyError;
|
||||
use crate::error::Res;
|
||||
use crate::parser::macros::element;
|
||||
use crate::parser::util::start_of_line;
|
||||
@ -50,11 +49,8 @@ fn _filtered_keyword<'s, F: Fn(OrgSource<'s>) -> Res<OrgSource<'s>, OrgSource<'s
|
||||
// TODO: When key is a member of org-element-parsed-keywords, value can contain the standard set objects, excluding footnote references.
|
||||
let (remaining, (consumed_input, (_, _, parsed_key, _))) =
|
||||
consumed(tuple((space0, tag("#+"), key_parser, tag(":"))))(input)?;
|
||||
if let Ok((remaining, _)) = tuple((
|
||||
space0::<OrgSource<'_>, CustomError<OrgSource<'_>>>,
|
||||
alt((line_ending, eof)),
|
||||
))(remaining)
|
||||
{
|
||||
let (remaining, _ws) = space0(remaining)?;
|
||||
if let Ok((remaining, _)) = org_line_ending(remaining) {
|
||||
return Ok((
|
||||
remaining,
|
||||
Keyword {
|
||||
@ -62,15 +58,13 @@ fn _filtered_keyword<'s, F: Fn(OrgSource<'s>) -> Res<OrgSource<'s>, OrgSource<'s
|
||||
affiliated_keywords: AffiliatedKeywords::default(), // To be populated by the caller if this keyword is in a context to support affiliated keywords.
|
||||
key: parsed_key.into(),
|
||||
value: "",
|
||||
post_blank: None,
|
||||
},
|
||||
));
|
||||
}
|
||||
let (remaining, _ws) = space0(remaining)?;
|
||||
let (remaining, parsed_value) = recognize(many_till(
|
||||
anychar,
|
||||
peek(tuple((space0, alt((line_ending, eof))))),
|
||||
))(remaining)?;
|
||||
let (remaining, _ws) = tuple((space0, alt((line_ending, eof))))(remaining)?;
|
||||
let (remaining, parsed_value) =
|
||||
recognize(many_till(anychar, peek(tuple((space0, org_line_ending)))))(remaining)?;
|
||||
let (remaining, _ws) = tuple((space0, org_line_ending))(remaining)?;
|
||||
Ok((
|
||||
remaining,
|
||||
Keyword {
|
||||
@ -78,6 +72,7 @@ fn _filtered_keyword<'s, F: Fn(OrgSource<'s>) -> Res<OrgSource<'s>, OrgSource<'s
|
||||
affiliated_keywords: AffiliatedKeywords::default(), // To be populated by the caller if this keyword is in a context to support affiliated keywords.
|
||||
key: parsed_key.into(),
|
||||
value: parsed_value.into(),
|
||||
post_blank: None,
|
||||
},
|
||||
))
|
||||
}
|
||||
@ -96,21 +91,19 @@ where
|
||||
AK: IntoIterator<Item = Keyword<'s>>,
|
||||
{
|
||||
let (remaining, mut kw) = filtered_keyword(regular_keyword_key)(remaining)?;
|
||||
let (remaining, _trailing_ws) =
|
||||
let (remaining, post_blank) =
|
||||
maybe_consume_trailing_whitespace_if_not_exiting(context, remaining)?;
|
||||
let source = get_consumed(input, remaining);
|
||||
kw.affiliated_keywords =
|
||||
parse_affiliated_keywords(context.get_global_settings(), affiliated_keywords);
|
||||
kw.source = Into::<&str>::into(source);
|
||||
kw.post_blank = post_blank.map(Into::<&str>::into);
|
||||
Ok((remaining, kw))
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
pub(crate) fn affiliated_keyword<'b, 'g, 'r, 's>(
|
||||
context: RefContext<'b, 'g, 'r, 's>,
|
||||
input: OrgSource<'s>,
|
||||
) -> Res<OrgSource<'s>, Keyword<'s>> {
|
||||
filtered_keyword(parser_with_context!(affiliated_key)(context))(input)
|
||||
pub(crate) fn affiliated_keyword<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, Keyword<'s>> {
|
||||
filtered_keyword(affiliated_key)(input)
|
||||
}
|
||||
|
||||
#[cfg_attr(
|
||||
@ -145,29 +138,18 @@ fn regular_keyword_key<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, OrgSource
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
fn affiliated_key<'b, 'g, 'r, 's>(
|
||||
context: RefContext<'b, 'g, 'r, 's>,
|
||||
input: OrgSource<'s>,
|
||||
) -> Res<OrgSource<'s>, OrgSource<'s>> {
|
||||
element!(dual_affiliated_key, context, input);
|
||||
element!(plain_affiliated_key, context, input);
|
||||
fn affiliated_key<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, OrgSource<'s>> {
|
||||
element!(dual_affiliated_key, input);
|
||||
element!(plain_affiliated_key, input);
|
||||
element!(export_keyword, input);
|
||||
Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"No affiliated key.",
|
||||
))))
|
||||
Err(nom::Err::Error(CustomError::Static("No affiliated key.")))
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
fn plain_affiliated_key<'b, 'g, 'r, 's>(
|
||||
context: RefContext<'b, 'g, 'r, 's>,
|
||||
input: OrgSource<'s>,
|
||||
) -> Res<OrgSource<'s>, OrgSource<'s>> {
|
||||
for keyword in context.get_global_settings().element_affiliated_keywords {
|
||||
fn plain_affiliated_key<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, OrgSource<'s>> {
|
||||
for keyword in ORG_ELEMENT_AFFILIATED_KEYWORDS {
|
||||
let result = map(
|
||||
tuple((
|
||||
tag_no_case::<_, _, CustomError<_>>(*keyword),
|
||||
peek(tag(":")),
|
||||
)),
|
||||
tuple((tag_no_case::<_, _, CustomError>(keyword), peek(tag(":")))),
|
||||
|(key, _)| key,
|
||||
)(input);
|
||||
if let Ok((remaining, ent)) = result {
|
||||
@ -175,19 +157,14 @@ fn plain_affiliated_key<'b, 'g, 'r, 's>(
|
||||
}
|
||||
}
|
||||
|
||||
Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"NoKeywordKey",
|
||||
))))
|
||||
Err(nom::Err::Error(CustomError::Static("NoKeywordKey")))
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
fn dual_affiliated_key<'b, 'g, 'r, 's>(
|
||||
context: RefContext<'b, 'g, 'r, 's>,
|
||||
input: OrgSource<'s>,
|
||||
) -> Res<OrgSource<'s>, OrgSource<'s>> {
|
||||
for keyword in context.get_global_settings().element_dual_keywords {
|
||||
fn dual_affiliated_key<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, OrgSource<'s>> {
|
||||
for keyword in ORG_ELEMENT_DUAL_KEYWORDS {
|
||||
let result = recognize(tuple((
|
||||
tag_no_case::<_, _, CustomError<_>>(*keyword),
|
||||
tag_no_case::<_, _, CustomError>(keyword),
|
||||
tag("["),
|
||||
optval,
|
||||
tag("]"),
|
||||
@ -198,9 +175,7 @@ fn dual_affiliated_key<'b, 'g, 'r, 's>(
|
||||
}
|
||||
}
|
||||
|
||||
Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"NoKeywordKey",
|
||||
))))
|
||||
Err(nom::Err::Error(CustomError::Static("NoKeywordKey")))
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
@ -228,7 +203,7 @@ fn _optval_end<'s>(
|
||||
unreachable!("Exceeded optval bracket depth.")
|
||||
}
|
||||
if current_depth == 0 {
|
||||
let close_bracket = tag::<_, _, CustomError<_>>("]")(input);
|
||||
let close_bracket = tag::<_, _, CustomError>("]")(input);
|
||||
if close_bracket.is_ok() {
|
||||
return close_bracket;
|
||||
}
|
||||
@ -249,19 +224,11 @@ mod tests {
|
||||
use test::Bencher;
|
||||
|
||||
use super::*;
|
||||
use crate::context::Context;
|
||||
use crate::context::ContextElement;
|
||||
use crate::context::GlobalSettings;
|
||||
use crate::context::List;
|
||||
use crate::parser::OrgSource;
|
||||
|
||||
#[bench]
|
||||
fn bench_affiliated_keyword(b: &mut Bencher) {
|
||||
let input = OrgSource::new("#+CAPTION[*foo*]: bar *baz*");
|
||||
let global_settings = GlobalSettings::default();
|
||||
let initial_context = ContextElement::document_context();
|
||||
let initial_context = Context::new(&global_settings, List::new(&initial_context));
|
||||
|
||||
b.iter(|| assert!(affiliated_keyword(&initial_context, input).is_ok()));
|
||||
b.iter(|| assert!(affiliated_keyword(input).is_ok()));
|
||||
}
|
||||
}
|
||||
|
@ -57,7 +57,7 @@ where
|
||||
let (remaining, _end) = latex_environment_end_specialized(&parser_context, remaining)?;
|
||||
let value_end = remaining;
|
||||
|
||||
let (remaining, _trailing_ws) =
|
||||
let (remaining, post_blank) =
|
||||
maybe_consume_trailing_whitespace_if_not_exiting(context, remaining)?;
|
||||
let source = get_consumed(input, remaining);
|
||||
let value = get_consumed(value_start, value_end);
|
||||
@ -70,6 +70,7 @@ where
|
||||
affiliated_keywords,
|
||||
),
|
||||
value: value.into(),
|
||||
post_blank: post_blank.map(Into::<&str>::into),
|
||||
},
|
||||
))
|
||||
}
|
||||
|
@ -17,7 +17,6 @@ use super::util::maybe_consume_object_trailing_whitespace_if_not_exiting;
|
||||
use crate::context::parser_with_context;
|
||||
use crate::context::RefContext;
|
||||
use crate::error::CustomError;
|
||||
use crate::error::MyError;
|
||||
use crate::error::Res;
|
||||
use crate::parser::util::exit_matcher_parser;
|
||||
use crate::parser::util::get_consumed;
|
||||
@ -40,7 +39,7 @@ pub(crate) fn latex_fragment<'b, 'g, 'r, 's>(
|
||||
parser_with_context!(bordered_dollar_fragment)(context),
|
||||
))(input)?;
|
||||
let value = get_consumed(input, remaining);
|
||||
let (remaining, _trailing_whitespace) =
|
||||
let (remaining, post_blank) =
|
||||
maybe_consume_object_trailing_whitespace_if_not_exiting(context, remaining)?;
|
||||
let source = get_consumed(input, remaining);
|
||||
Ok((
|
||||
@ -48,6 +47,7 @@ pub(crate) fn latex_fragment<'b, 'g, 'r, 's>(
|
||||
LatexFragment {
|
||||
source: source.into(),
|
||||
value: value.into(),
|
||||
post_blank: post_blank.map(Into::<&str>::into),
|
||||
},
|
||||
))
|
||||
}
|
||||
@ -209,9 +209,9 @@ fn pre<'b, 'g, 'r, 's>(
|
||||
) -> Res<OrgSource<'s>, ()> {
|
||||
let preceding_character = input.get_preceding_character();
|
||||
if let Some('$') = preceding_character {
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
return Err(nom::Err::Error(CustomError::Static(
|
||||
"Not a valid pre character for dollar char fragment.",
|
||||
))));
|
||||
)));
|
||||
}
|
||||
Ok((input, ()))
|
||||
}
|
||||
@ -283,9 +283,9 @@ fn close_border<'b, 'g, 'r, 's>(
|
||||
match preceding_character {
|
||||
Some(c) if !c.is_whitespace() && !".,;$".contains(c) => Ok((input, ())),
|
||||
_ => {
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
return Err(nom::Err::Error(CustomError::Static(
|
||||
"Not a valid pre character for dollar char fragment.",
|
||||
))));
|
||||
)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -27,7 +27,6 @@ use crate::context::ExitClass;
|
||||
use crate::context::ExitMatcherNode;
|
||||
use crate::context::RefContext;
|
||||
use crate::error::CustomError;
|
||||
use crate::error::MyError;
|
||||
use crate::error::Res;
|
||||
use crate::parser::object_parser::standard_set_object;
|
||||
use crate::parser::util::blank_line;
|
||||
@ -81,22 +80,28 @@ where
|
||||
let object_matcher = parser_with_context!(standard_set_object)(&parser_context);
|
||||
let exit_matcher = parser_with_context!(exit_matcher_parser)(&parser_context);
|
||||
// Check for a completely empty block
|
||||
let (remaining, children) = match consumed(many_till(blank_line, exit_matcher))(remaining) {
|
||||
Ok((remaining, (whitespace, (_children, _exit_contents)))) => (
|
||||
remaining,
|
||||
vec![Object::PlainText(PlainText {
|
||||
source: whitespace.into(),
|
||||
})],
|
||||
),
|
||||
Err(_) => {
|
||||
let (remaining, (children, _exit_contents)) =
|
||||
many_till(object_matcher, exit_matcher)(remaining)?;
|
||||
(remaining, children)
|
||||
}
|
||||
};
|
||||
let (remaining, contents, children) =
|
||||
match consumed(many_till(blank_line, exit_matcher))(remaining) {
|
||||
Ok((remaining, (whitespace, (_children, _exit_contents)))) => (
|
||||
remaining,
|
||||
whitespace,
|
||||
if whitespace.len() > 0 {
|
||||
vec![Object::PlainText(PlainText {
|
||||
source: whitespace.into(),
|
||||
})]
|
||||
} else {
|
||||
Vec::new()
|
||||
},
|
||||
),
|
||||
Err(_) => {
|
||||
let (remaining, (contents, (children, _exit_contents))) =
|
||||
consumed(many_till(object_matcher, exit_matcher))(remaining)?;
|
||||
(remaining, contents, children)
|
||||
}
|
||||
};
|
||||
let (remaining, _end) = lesser_block_end_specialized(&parser_context, remaining)?;
|
||||
|
||||
let (remaining, _trailing_ws) =
|
||||
let (remaining, post_blank) =
|
||||
maybe_consume_trailing_whitespace_if_not_exiting(context, remaining)?;
|
||||
let source = get_consumed(input, remaining);
|
||||
Ok((
|
||||
@ -109,6 +114,8 @@ where
|
||||
),
|
||||
data: parameters.map(Into::<&str>::into),
|
||||
children,
|
||||
contents: Into::<&str>::into(contents),
|
||||
post_blank: post_blank.map(Into::<&str>::into),
|
||||
},
|
||||
))
|
||||
}
|
||||
@ -145,7 +152,7 @@ where
|
||||
let (remaining, contents) = parser_with_context!(text_until_exit)(&parser_context)(remaining)?;
|
||||
let (remaining, _end) = lesser_block_end_specialized(&parser_context, remaining)?;
|
||||
|
||||
let (remaining, _trailing_ws) =
|
||||
let (remaining, post_blank) =
|
||||
maybe_consume_trailing_whitespace_if_not_exiting(context, remaining)?;
|
||||
let source = get_consumed(input, remaining);
|
||||
Ok((
|
||||
@ -157,6 +164,7 @@ where
|
||||
affiliated_keywords,
|
||||
),
|
||||
contents: contents.into(),
|
||||
post_blank: post_blank.map(Into::<&str>::into),
|
||||
},
|
||||
))
|
||||
}
|
||||
@ -203,10 +211,10 @@ where
|
||||
let parser_context = parser_context.with_additional_node(&contexts[1]);
|
||||
let parser_context = parser_context.with_additional_node(&contexts[2]);
|
||||
|
||||
let (remaining, contents) = content(&parser_context, remaining)?;
|
||||
let (remaining, contents) = text_until_exit(&parser_context, remaining)?;
|
||||
let (remaining, _end) = lesser_block_end_specialized(&parser_context, remaining)?;
|
||||
|
||||
let (remaining, _trailing_ws) =
|
||||
let (remaining, post_blank) =
|
||||
maybe_consume_trailing_whitespace_if_not_exiting(context, remaining)?;
|
||||
let source = get_consumed(input, remaining);
|
||||
let (switches, number_lines, preserve_indent, retain_labels, use_labels, label_format) = {
|
||||
@ -237,7 +245,8 @@ where
|
||||
retain_labels,
|
||||
use_labels,
|
||||
label_format,
|
||||
contents,
|
||||
value: Into::<&str>::into(contents),
|
||||
post_blank: post_blank.map(Into::<&str>::into),
|
||||
},
|
||||
))
|
||||
}
|
||||
@ -277,10 +286,10 @@ where
|
||||
let parser_context = parser_context.with_additional_node(&contexts[1]);
|
||||
let parser_context = parser_context.with_additional_node(&contexts[2]);
|
||||
|
||||
let (remaining, contents) = content(&parser_context, remaining)?;
|
||||
let (remaining, contents) = text_until_exit(&parser_context, remaining)?;
|
||||
let (remaining, _end) = lesser_block_end_specialized(&parser_context, remaining)?;
|
||||
|
||||
let (remaining, _trailing_ws) =
|
||||
let (remaining, post_blank) =
|
||||
maybe_consume_trailing_whitespace_if_not_exiting(context, remaining)?;
|
||||
let source = get_consumed(input, remaining);
|
||||
Ok((
|
||||
@ -293,7 +302,8 @@ where
|
||||
),
|
||||
export_type: export_type.map(Into::<&str>::into),
|
||||
data: parameters.map(Into::<&str>::into),
|
||||
contents,
|
||||
value: Into::<&str>::into(contents),
|
||||
post_blank: post_blank.map(Into::<&str>::into),
|
||||
},
|
||||
))
|
||||
}
|
||||
@ -332,10 +342,10 @@ where
|
||||
let parser_context = context.with_additional_node(&contexts[0]);
|
||||
let parser_context = parser_context.with_additional_node(&contexts[1]);
|
||||
let parser_context = parser_context.with_additional_node(&contexts[2]);
|
||||
let (remaining, contents) = content(&parser_context, remaining)?;
|
||||
let (remaining, contents) = text_until_exit(&parser_context, remaining)?;
|
||||
let (remaining, _end) = lesser_block_end_specialized(&parser_context, remaining)?;
|
||||
|
||||
let (remaining, _trailing_ws) =
|
||||
let (remaining, post_blank) =
|
||||
maybe_consume_trailing_whitespace_if_not_exiting(context, remaining)?;
|
||||
let source = get_consumed(input, remaining);
|
||||
let (switches, number_lines, preserve_indent, retain_labels, use_labels, label_format) = {
|
||||
@ -372,7 +382,8 @@ where
|
||||
retain_labels,
|
||||
use_labels,
|
||||
label_format,
|
||||
contents,
|
||||
value: Into::<&str>::into(contents),
|
||||
post_blank: post_blank.map(Into::<&str>::into),
|
||||
},
|
||||
))
|
||||
}
|
||||
@ -604,7 +615,7 @@ fn _example_src_switches<'s>(
|
||||
}
|
||||
}
|
||||
if !matched_a_word {
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError("No words."))));
|
||||
return Err(nom::Err::Error(CustomError::Static("No words.")));
|
||||
}
|
||||
let remaining = last_match_remaining;
|
||||
|
||||
@ -651,51 +662,3 @@ fn switch_word<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, OrgSource<'s>> {
|
||||
is_not(" \t\r\n"),
|
||||
))(input)
|
||||
}
|
||||
|
||||
#[cfg_attr(
|
||||
feature = "tracing",
|
||||
tracing::instrument(ret, level = "debug", skip(context))
|
||||
)]
|
||||
pub(crate) fn content<'b, 'g, 'r, 's>(
|
||||
context: RefContext<'b, 'g, 'r, 's>,
|
||||
input: OrgSource<'s>,
|
||||
) -> Res<OrgSource<'s>, String> {
|
||||
let mut ret = String::new();
|
||||
let mut remaining = input;
|
||||
let exit_matcher_parser = parser_with_context!(exit_matcher_parser)(context);
|
||||
loop {
|
||||
if exit_matcher_parser(remaining).is_ok() {
|
||||
break;
|
||||
}
|
||||
|
||||
let (remain, (pre_escape_whitespace, line)) = content_line(remaining)?;
|
||||
if let Some(val) = pre_escape_whitespace {
|
||||
ret.push_str(Into::<&str>::into(val));
|
||||
}
|
||||
ret.push_str(line.into());
|
||||
remaining = remain;
|
||||
}
|
||||
|
||||
Ok((remaining, ret))
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
|
||||
fn content_line<'s>(
|
||||
input: OrgSource<'s>,
|
||||
) -> Res<OrgSource<'s>, (Option<OrgSource<'s>>, OrgSource<'s>)> {
|
||||
let (remaining, pre_escape_whitespace) = opt(map(
|
||||
tuple((
|
||||
recognize(tuple((
|
||||
space0,
|
||||
many_till(
|
||||
tag(","),
|
||||
peek(tuple((tag(","), alt((tag("#+"), tag("*")))))),
|
||||
),
|
||||
))),
|
||||
tag(","),
|
||||
)),
|
||||
|(pre_comma, _)| pre_comma,
|
||||
))(input)?;
|
||||
let (remaining, line_post_escape) = recognize(many_till(anychar, line_ending))(remaining)?;
|
||||
Ok((remaining, (pre_escape_whitespace, line_post_escape)))
|
||||
}
|
||||
|
@ -7,7 +7,6 @@ use nom::multi::many0;
|
||||
use super::org_source::OrgSource;
|
||||
use crate::context::RefContext;
|
||||
use crate::error::CustomError;
|
||||
use crate::error::MyError;
|
||||
use crate::error::Res;
|
||||
use crate::parser::util::get_consumed;
|
||||
use crate::types::LineBreak;
|
||||
@ -45,9 +44,9 @@ fn pre<'b, 'g, 'r, 's>(
|
||||
match preceding_character {
|
||||
// If None, we are at the start of the file
|
||||
None | Some('\\') => {
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
return Err(nom::Err::Error(CustomError::Static(
|
||||
"Not a valid pre character for line break.",
|
||||
))));
|
||||
)));
|
||||
}
|
||||
_ => {}
|
||||
};
|
||||
@ -55,9 +54,9 @@ fn pre<'b, 'g, 'r, 's>(
|
||||
let current_line = input.text_since_line_break();
|
||||
let is_non_empty_line = current_line.chars().any(|c| !c.is_whitespace());
|
||||
if !is_non_empty_line {
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
return Err(nom::Err::Error(CustomError::Static(
|
||||
"Not a valid pre line for line break.",
|
||||
))));
|
||||
)));
|
||||
}
|
||||
|
||||
Ok((input, ()))
|
||||
|
@ -1,6 +1,7 @@
|
||||
mod affiliated_keyword;
|
||||
mod angle_link;
|
||||
mod babel_call;
|
||||
mod bullshitium;
|
||||
mod citation;
|
||||
mod citation_reference;
|
||||
mod clock;
|
||||
|
@ -4,7 +4,6 @@ use super::regular_link::regular_link;
|
||||
use super::subscript_and_superscript::detect_subscript_or_superscript;
|
||||
use crate::context::RefContext;
|
||||
use crate::error::CustomError;
|
||||
use crate::error::MyError;
|
||||
use crate::error::Res;
|
||||
use crate::parser::angle_link::angle_link;
|
||||
use crate::parser::citation::citation;
|
||||
@ -43,7 +42,7 @@ pub(crate) fn standard_set_object<'b, 'g, 'r, 's>(
|
||||
input,
|
||||
Object::PlainText
|
||||
);
|
||||
Err(nom::Err::Error(CustomError::MyError(MyError("No object."))))
|
||||
Err(nom::Err::Error(CustomError::Static("No object.")))
|
||||
}
|
||||
|
||||
#[cfg_attr(
|
||||
@ -61,7 +60,7 @@ pub(crate) fn minimal_set_object<'b, 'g, 'r, 's>(
|
||||
input,
|
||||
Object::PlainText
|
||||
);
|
||||
Err(nom::Err::Error(CustomError::MyError(MyError("No object."))))
|
||||
Err(nom::Err::Error(CustomError::Static("No object.")))
|
||||
}
|
||||
|
||||
#[cfg_attr(
|
||||
@ -103,7 +102,7 @@ fn standard_set_object_sans_plain_text<'b, 'g, 'r, 's>(
|
||||
element!(angle_link, context, input, Object::AngleLink);
|
||||
element!(org_macro, context, input, Object::OrgMacro);
|
||||
|
||||
Err(nom::Err::Error(CustomError::MyError(MyError("No object."))))
|
||||
Err(nom::Err::Error(CustomError::Static("No object.")))
|
||||
}
|
||||
|
||||
#[cfg_attr(
|
||||
@ -119,7 +118,7 @@ fn minimal_set_object_sans_plain_text<'b, 'g, 'r, 's>(
|
||||
element!(entity, context, input, Object::Entity);
|
||||
element!(latex_fragment, context, input, Object::LatexFragment);
|
||||
element!(text_markup, context, input);
|
||||
Err(nom::Err::Error(CustomError::MyError(MyError("No object."))))
|
||||
Err(nom::Err::Error(CustomError::Static("No object.")))
|
||||
}
|
||||
|
||||
#[cfg_attr(
|
||||
@ -137,9 +136,7 @@ pub(crate) fn detect_standard_set_object_sans_plain_text<'b, 'g, 'r, 's>(
|
||||
return Ok((input, ()));
|
||||
}
|
||||
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"No object detected.",
|
||||
))));
|
||||
return Err(nom::Err::Error(CustomError::Static("No object detected.")));
|
||||
}
|
||||
|
||||
#[cfg_attr(
|
||||
@ -157,9 +154,7 @@ fn detect_minimal_set_object_sans_plain_text<'b, 'g, 'r, 's>(
|
||||
return Ok((input, ()));
|
||||
}
|
||||
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"No object detected.",
|
||||
))));
|
||||
return Err(nom::Err::Error(CustomError::Static("No object detected.")));
|
||||
}
|
||||
|
||||
#[cfg_attr(
|
||||
@ -182,7 +177,7 @@ pub(crate) fn regular_link_description_set_object<'b, 'g, 'r, 's>(
|
||||
input,
|
||||
Object::PlainText
|
||||
);
|
||||
Err(nom::Err::Error(CustomError::MyError(MyError("No object."))))
|
||||
Err(nom::Err::Error(CustomError::Static("No object.")))
|
||||
}
|
||||
|
||||
#[cfg_attr(
|
||||
@ -205,7 +200,7 @@ fn regular_link_description_set_object_sans_plain_text<'b, 'g, 'r, 's>(
|
||||
element!(inline_babel_call, context, input, Object::InlineBabelCall);
|
||||
element!(org_macro, context, input, Object::OrgMacro);
|
||||
element!(minimal_set_object_sans_plain_text, context, input);
|
||||
Err(nom::Err::Error(CustomError::MyError(MyError("No object."))))
|
||||
Err(nom::Err::Error(CustomError::Static("No object.")))
|
||||
}
|
||||
|
||||
#[cfg_attr(
|
||||
@ -223,9 +218,7 @@ fn detect_regular_link_description_set_object_sans_plain_text<'b, 'g, 'r, 's>(
|
||||
return Ok((input, ()));
|
||||
}
|
||||
|
||||
Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"No object detected.",
|
||||
))))
|
||||
Err(nom::Err::Error(CustomError::Static("No object detected.")))
|
||||
}
|
||||
|
||||
#[cfg_attr(
|
||||
@ -243,7 +236,7 @@ pub(crate) fn table_cell_set_object<'b, 'g, 'r, 's>(
|
||||
input,
|
||||
Object::PlainText
|
||||
);
|
||||
Err(nom::Err::Error(CustomError::MyError(MyError("No object."))))
|
||||
Err(nom::Err::Error(CustomError::Static("No object.")))
|
||||
}
|
||||
|
||||
#[cfg_attr(
|
||||
@ -271,7 +264,7 @@ fn table_cell_set_object_sans_plain_text<'b, 'g, 'r, 's>(
|
||||
element!(target, context, input, Object::Target);
|
||||
element!(timestamp, context, input, Object::Timestamp);
|
||||
element!(minimal_set_object_sans_plain_text, context, input);
|
||||
Err(nom::Err::Error(CustomError::MyError(MyError("No object."))))
|
||||
Err(nom::Err::Error(CustomError::Static("No object.")))
|
||||
}
|
||||
|
||||
#[cfg_attr(
|
||||
@ -289,7 +282,5 @@ fn detect_table_cell_set_object_sans_plain_text<'b, 'g, 'r, 's>(
|
||||
return Ok((input, ()));
|
||||
}
|
||||
|
||||
return Err(nom::Err::Error(CustomError::MyError(MyError(
|
||||
"No object detected.",
|
||||
))));
|
||||
return Err(nom::Err::Error(CustomError::Static("No object detected.")));
|
||||
}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user