273 Commits

Author SHA1 Message Date
Tom Alexander
482d5ecfa3 Switch to local-path-provisioner.
Some checks failed
format Build format has succeeded
clippy Build clippy has failed
rust-test Build rust-test has succeeded
foreign-document-test Build foreign-document-test has succeeded
2025-08-31 19:51:03 -04:00
Tom Alexander
84b8ddb582 Merge branch 'add_jump_to_line_number' 2025-02-01 21:55:43 -05:00
Tom Alexander
113bb5888a Add a test with a tramp link.
Some checks failed
rust-test Build rust-test has started
format Build format has succeeded
clippy Build clippy has failed
2025-02-01 19:19:05 -05:00
Tom Alexander
bf5fe6920b Add a test for jump to line number.
Some checks failed
format Build format has succeeded
clippy Build clippy has failed
rust-test Build rust-test has failed
2025-02-01 18:42:10 -05:00
Tom Alexander
4b52ed0d2a Fix clippy lint.
Some checks failed
format Build format has succeeded
clippy Build clippy has failed
foreign-document-test Build foreign-document-test has succeeded
rust-test Build rust-test has succeeded
2024-10-21 00:25:45 -04:00
Tom Alexander
d2c558ccfa Merge branch 'buildkit'
Some checks failed
format Build format has succeeded
clippy Build clippy has failed
rust-test Build rust-test has succeeded
foreign-document-test Build foreign-document-test has succeeded
2024-10-21 00:13:02 -04:00
Tom Alexander
a01f78b510 Update dockerfiles to take advantage of BuildKit.
Some checks failed
format Build format has succeeded
clippy Build clippy has failed
rust-test Build rust-test has succeeded
2024-10-20 23:13:07 -04:00
Tom Alexander
d80b473fae Switch to using BuiltKit instead of Kaniko to build docker images. 2024-10-20 22:55:22 -04:00
Tom Alexander
e6b4bc3d94 Merge branch 'webhook_bridge'
All checks were successful
format Build format has succeeded
clippy Build clippy has succeeded
foreign-document-test Build foreign-document-test has succeeded
rust-test Build rust-test has succeeded
2024-09-30 17:34:52 -04:00
Tom Alexander
c6cde8db74 Switch to using webhook_bridge instead of lighthouse for triggering the CI. 2024-09-30 17:33:54 -04:00
Tom Alexander
841a348dd0 Publish version 0.1.16.
All checks were successful
format Build format has succeeded
rust-test Build rust-test has succeeded
build-organic Build build-organic has succeeded
clippy Build clippy has succeeded
rust-foreign-document-test Build rust-foreign-document-test has succeeded
2024-04-11 21:34:22 -04:00
Tom Alexander
b46fae331b Fix clippy errors.
All checks were successful
format Build format has succeeded
clippy Build clippy has succeeded
build-organic Build build-organic has succeeded
rust-foreign-document-test Build rust-foreign-document-test has succeeded
rust-test Build rust-test has succeeded
2024-04-11 21:03:50 -04:00
Tom Alexander
7223e08df3 Merge branch 'fix_docker'
Some checks failed
format Build format has failed
clippy Build clippy has failed
rust-foreign-document-test Build rust-foreign-document-test has succeeded
rust-test Build rust-test has succeeded
build-organic Build build-organic has succeeded
2024-04-11 20:24:06 -04:00
Tom Alexander
8321f83dac Inline the foreign document test. 2024-04-11 20:21:50 -04:00
Tom Alexander
bd441a0293 Serialize the build job to try to reduce disruption on the cluster.
Some checks failed
rust-test Build rust-test has failed
format Build format has succeeded
clippy Build clippy has failed
build-organic Build build-organic has succeeded
2024-04-10 23:41:47 -04:00
Tom Alexander
f5a07e0d70 Reduce memory requests to make builds less disruptive.
Some checks failed
rust-test Build rust-test has failed
format Build format has succeeded
clippy Build clippy has failed
build-organic Build build-organic has succeeded
2024-04-10 23:25:06 -04:00
Tom Alexander
9d750ed5e1 Fix workflows for new targets.
Some checks failed
rust-test Build rust-test has failed
format Build format has succeeded
clippy Build clippy has failed
build-organic Build build-organic has succeeded
2024-04-10 22:48:04 -04:00
Tom Alexander
9f111fe445 Rework the makefiles.
Some checks failed
clippy Build clippy has failed
build-organic Build build-organic has failed
format Build format has failed
2024-04-10 20:48:11 -04:00
Tom Alexander
a4e433dab1 Inline build pipeline. 2024-04-06 21:44:54 -04:00
Tom Alexander
4e9f1e4fac Inline the format pipeline.
Some checks failed
rust-foreign-document-test Build rust-foreign-document-test has failed
rust-test Build rust-test has failed
rust-build Build rust-build has failed
format Build format has succeeded
clippy Build clippy has failed
2024-04-06 12:00:07 -04:00
Tom Alexander
4dee130873 Add cranelift.
Some checks failed
rust-test Build rust-test has failed
rust-foreign-document-test Build rust-foreign-document-test has failed
rust-build Build rust-build has failed
clippy Build clippy has failed
2024-04-06 11:45:28 -04:00
Tom Alexander
8e712532e1 Add a organic_development image.
Some checks failed
rust-foreign-document-test Build rust-foreign-document-test has failed
rust-test Build rust-test has failed
rust-build Build rust-build has failed
clippy Build clippy has failed
This image will be shared by CI jobs rather than having a separate image for each, mirroring the developments I've done in natter.
2024-04-06 11:39:47 -04:00
Tom Alexander
4b85236c5f Inline clippy pipeline spec.
Some checks failed
rust-foreign-document-test Build rust-foreign-document-test has failed
rust-test Build rust-test has failed
clippy Build clippy has failed
rust-build Build rust-build has failed
2024-04-06 11:17:54 -04:00
Tom Alexander
66f003e6fd Update docker images to latest alpine.
Some checks failed
rust-foreign-document-test Build rust-foreign-document-test has failed
rust-test Build rust-test has failed
clippy Build clippy has failed
rust-build Build rust-build has failed
2024-04-06 10:25:22 -04:00
Tom Alexander
b35a2d5f5a Fix debug assert.
Some checks failed
rustfmt Build rustfmt has failed
rust-foreign-document-test Build rust-foreign-document-test has failed
rust-test Build rust-test has failed
clippy Build clippy has failed
rust-build Build rust-build has failed
2024-04-06 10:12:06 -04:00
Tom Alexander
320b5f8568 Publish version 0.1.15.
All checks were successful
rust-build Build rust-build has succeeded
rust-test Build rust-test has succeeded
clippy Build clippy has succeeded
rustfmt Build rustfmt has succeeded
rust-foreign-document-test Build rust-foreign-document-test has succeeded
2024-01-28 17:12:45 -05:00
Tom Alexander
99b2af6c99 Fix clippy. 2024-01-28 17:11:18 -05:00
Tom Alexander
6e71acdb7d Update README.
Some checks failed
clippy Build clippy has failed
rustfmt Build rustfmt has succeeded
rust-foreign-document-test Build rust-foreign-document-test has succeeded
rust-build Build rust-build has succeeded
rust-test Build rust-test has succeeded
2024-01-28 14:25:57 -05:00
Tom Alexander
8406d37991 Switch to using JSON for wasm.
Some checks failed
rust-build Build rust-build has failed
clippy Build clippy has failed
rustfmt Build rustfmt has succeeded
rust-foreign-document-test Build rust-foreign-document-test has succeeded
rust-test Build rust-test has succeeded
serde_wasm_bindgen was silently dropping many attributes (I suspect it is triggered by serde flatten) so this switches to serializing to JSON for passing values from wasm to js.
2024-01-27 16:13:17 -05:00
Tom Alexander
64bb597908 Build bundler wasm target by default. 2024-01-26 21:17:40 -05:00
Tom Alexander
068864ea87 Publish version 0.1.14.
All checks were successful
clippy Build clippy has succeeded
rustfmt Build rustfmt has succeeded
rust-foreign-document-test Build rust-foreign-document-test has succeeded
rust-build Build rust-build has succeeded
rust-test Build rust-test has succeeded
2024-01-03 23:59:51 -05:00
Tom Alexander
03a3ddbd63 Merge branch 'wasm'
All checks were successful
clippy Build clippy has succeeded
rustfmt Build rustfmt has succeeded
rust-foreign-document-test Build rust-foreign-document-test has succeeded
rust-build Build rust-build has succeeded
rust-test Build rust-test has succeeded
2024-01-03 23:56:35 -05:00
Tom Alexander
122adee23b Hide the wasm module.
All checks were successful
clippy Build clippy has succeeded
rust-foreign-document-test Build rust-foreign-document-test has succeeded
rust-build Build rust-build has succeeded
rust-test Build rust-test has succeeded
2024-01-03 23:38:04 -05:00
Tom Alexander
556afecbb8 Hide the util module. 2024-01-03 23:04:47 -05:00
Tom Alexander
e4407cbdd1 Hide the event_count module.
All checks were successful
clippy Build clippy has succeeded
rust-foreign-document-test Build rust-foreign-document-test has succeeded
rust-build Build rust-build has succeeded
rust-test Build rust-test has succeeded
By placing the code for the parse executable inside a module inside the organic library, we only need to expose the entrypoint publicly rather than all functions it calls. This hides the event_count module, but I will be expanding the practice to the rest of the code base shortly. This is important for not inadvertently promising stability w.r.t. semver for essentially internal functions for development tools.

It was the parse binary, not compare.
2024-01-03 21:17:44 -05:00
Tom Alexander
f57d60dab0 Add a doc target to the Makefile. 2024-01-03 19:55:22 -05:00
Tom Alexander
0aa3939a75 Format. 2024-01-01 18:34:10 -05:00
Tom Alexander
52cb81e75e Cleanup.
All checks were successful
clippy Build clippy has succeeded
rust-foreign-document-test Build rust-foreign-document-test has succeeded
rust-build Build rust-build has succeeded
rust-test Build rust-test has succeeded
2023-12-31 12:02:02 -05:00
Tom Alexander
945121202d Remove wasm_test's dependency on compare module.
All checks were successful
clippy Build clippy has succeeded
rust-foreign-document-test Build rust-foreign-document-test has succeeded
rust-build Build rust-build has succeeded
rust-test Build rust-test has succeeded
2023-12-31 11:11:25 -05:00
Tom Alexander
f4e0dddd9d Fix clippy.
All checks were successful
clippy Build clippy has succeeded
rust-foreign-document-test Build rust-foreign-document-test has succeeded
rust-build Build rust-build has succeeded
rust-test Build rust-test has succeeded
2023-12-30 23:14:40 -05:00
Tom Alexander
6b62176fd0 Run cargo fix.
Some checks failed
clippy Build clippy has failed
rust-foreign-document-test Build rust-foreign-document-test has succeeded
rust-build Build rust-build has succeeded
rust-test Build rust-test has succeeded
2023-12-30 22:22:32 -05:00
Tom Alexander
44483b4d54 Break util up into modules. 2023-12-30 22:19:16 -05:00
Tom Alexander
48d3de77fe Move elisp fact to util. 2023-12-30 21:37:25 -05:00
Tom Alexander
680b176501 Fix src block value.
Some checks failed
clippy Build clippy has failed
rust-foreign-document-test Build rust-foreign-document-test has succeeded
rust-build Build rust-build has failed
rust-test Build rust-test has succeeded
2023-12-30 21:30:08 -05:00
Tom Alexander
dc0338e978 Handle nil in object tree. 2023-12-30 21:28:25 -05:00
Tom Alexander
ff3e0a50af Implement dynamic block.
Some checks failed
clippy Build clippy has failed
rust-foreign-document-test Build rust-foreign-document-test has succeeded
rust-build Build rust-build has failed
rust-test Build rust-test has failed
2023-12-30 20:56:35 -05:00
Tom Alexander
03c8c07fe0 Implement quote block. 2023-12-30 20:53:20 -05:00
Tom Alexander
3a6fc5b669 Support noop on all token types. 2023-12-30 20:50:28 -05:00
Tom Alexander
d258cdb839 Support null vs noop comparison. 2023-12-30 20:47:17 -05:00
Tom Alexander
aa5629354e Implement special block. 2023-12-30 20:43:01 -05:00
Tom Alexander
efc4a04829 Implement center block. 2023-12-30 20:38:08 -05:00
Tom Alexander
dd611ea64a Fix plain list item. 2023-12-30 20:35:27 -05:00
Tom Alexander
4bd5f3bec7 Implement node property. 2023-12-30 19:01:07 -05:00
Tom Alexander
c2b3509b6a Implement property drawer. 2023-12-30 18:59:52 -05:00
Tom Alexander
7f3f5fb889 Implement table cell.
Some checks failed
clippy Build clippy has failed
rust-foreign-document-test Build rust-foreign-document-test has succeeded
rust-build Build rust-build has failed
rust-test Build rust-test has failed
2023-12-30 18:52:52 -05:00
Tom Alexander
e0fbf17226 Implement table row. 2023-12-30 18:52:51 -05:00
Tom Alexander
4e18cbafba Implement table. 2023-12-30 18:52:51 -05:00
Tom Alexander
46c36d7f3e Implement babel call. 2023-12-30 18:15:58 -05:00
Tom Alexander
c46a935cfc Implement clock.
Some checks failed
clippy Build clippy has failed
rust-foreign-document-test Build rust-foreign-document-test has succeeded
rust-build Build rust-build has failed
rust-test Build rust-test has failed
2023-12-30 18:12:09 -05:00
Tom Alexander
f50415cb32 Implement drawer. 2023-12-30 18:05:46 -05:00
Tom Alexander
4f1a151e97 Implement diary sexp. 2023-12-30 18:03:57 -05:00
Tom Alexander
c8e3fdba51 Implement horizontal rule. 2023-12-30 18:02:38 -05:00
Tom Alexander
4b3fc20c62 Fix order of reading optional pair values from elisp. 2023-12-30 18:00:26 -05:00
Tom Alexander
3131f8ac64 Implement example block and export block. 2023-12-30 17:55:56 -05:00
Tom Alexander
60a4835590 Implement comment block. 2023-12-30 17:44:32 -05:00
Tom Alexander
172d72aa46 Implement src block.
Some checks failed
clippy Build clippy has failed
rust-foreign-document-test Build rust-foreign-document-test has succeeded
rust-build Build rust-build has failed
rust-test Build rust-test has failed
2023-12-30 17:40:15 -05:00
Tom Alexander
b4fcc6500b Implement verse block. 2023-12-30 16:47:24 -05:00
Tom Alexander
ddb6f31562 Implement angle link. 2023-12-30 16:41:55 -05:00
Tom Alexander
dc080b30fc Implement citation reference. 2023-12-30 16:35:01 -05:00
Tom Alexander
9901e17437 Implement citation. 2023-12-30 16:33:02 -05:00
Tom Alexander
ea000894f0 Implement entity. 2023-12-30 16:24:51 -05:00
Tom Alexander
e7742b529a Implement export snippet.
Some checks failed
clippy Build clippy has failed
rust-foreign-document-test Build rust-foreign-document-test has succeeded
rust-build Build rust-build has failed
rust-test Build rust-test has failed
2023-12-30 16:18:08 -05:00
Tom Alexander
8eba0c4923 Implement footnote definition. 2023-12-30 16:13:54 -05:00
Tom Alexander
e0c0070a13 Implement footnote reference. 2023-12-30 16:05:41 -05:00
Tom Alexander
65ce116998 Implement inline babel call. 2023-12-30 15:52:48 -05:00
Tom Alexander
e348e7d4e3 Implement inline source block. 2023-12-30 13:13:35 -05:00
Tom Alexander
492090470c Implement latex environment. 2023-12-30 13:07:16 -05:00
Tom Alexander
3ec900c8df Implement latex fragment.
Some checks failed
clippy Build clippy has failed
rust-build Build rust-build has failed
rust-foreign-document-test Build rust-foreign-document-test has succeeded
rust-test Build rust-test has failed
2023-12-30 13:00:07 -05:00
Tom Alexander
d0a008ed22 Implement org macro. 2023-12-30 13:00:07 -05:00
Tom Alexander
f2292f1c07 Implement target. 2023-12-30 12:22:23 -05:00
Tom Alexander
44392cfcca Implement radio target. 2023-12-30 12:17:04 -05:00
Tom Alexander
110630d230 Implement radio link and regular link. 2023-12-30 12:14:03 -05:00
Tom Alexander
ebe12d96c1 Implement subscript and superscript.
Some checks failed
clippy Build clippy has failed
rust-foreign-document-test Build rust-foreign-document-test has succeeded
rust-build Build rust-build has failed
rust-test Build rust-test has failed
2023-12-29 23:46:47 -05:00
Tom Alexander
24c8ac8e21 Implement all the text markup.
Some checks failed
clippy Build clippy has failed
rust-foreign-document-test Build rust-foreign-document-test has succeeded
rust-build Build rust-build has failed
rust-test Build rust-test has failed
2023-12-29 23:41:15 -05:00
Tom Alexander
259ad6e242 Implement line break. 2023-12-29 23:27:37 -05:00
Tom Alexander
dd1f7c7777 Support a no-op for headline pre-blank.
Some checks failed
clippy Build clippy has failed
rust-build Build rust-build has failed
rust-foreign-document-test Build rust-foreign-document-test has succeeded
rust-test Build rust-test has failed
2023-12-29 23:21:30 -05:00
Tom Alexander
c1b471208d Implement plain list item.
Some checks failed
clippy Build clippy has failed
rust-foreign-document-test Build rust-foreign-document-test has succeeded
rust-build Build rust-build has failed
rust-test Build rust-test has failed
2023-12-29 23:06:45 -05:00
Tom Alexander
606bab9e6d Fix handling of optval. 2023-12-29 22:58:32 -05:00
Tom Alexander
0edf5620a2 Implement plain list. 2023-12-29 22:04:34 -05:00
Tom Alexander
cdf87641c5 Implement comment. 2023-12-29 21:59:45 -05:00
Tom Alexander
eb2995dd3b Support list with empty string as only element for empty list. 2023-12-29 21:56:31 -05:00
Tom Alexander
cd6a64c015 Implement keyword. 2023-12-29 21:36:52 -05:00
Tom Alexander
a4a83d047d Fix node name getting chopped off.
Some checks failed
clippy Build clippy has failed
rust-foreign-document-test Build rust-foreign-document-test has succeeded
rust-build Build rust-build has failed
rust-test Build rust-test has failed
2023-12-29 21:33:17 -05:00
Tom Alexander
a4414369ce Remove unnecessary additional properties in the already-implemented types. 2023-12-29 21:04:31 -05:00
Tom Alexander
83e4b72307 Implement timestamp. 2023-12-29 20:55:01 -05:00
Tom Alexander
34b3e4fa7b Implement statistics cookie. 2023-12-29 20:26:12 -05:00
Tom Alexander
c0e879dc1e Implement headline. 2023-12-29 20:26:11 -05:00
Tom Alexander
fa31b001f4 Implement fixed width area. 2023-12-29 19:21:35 -05:00
Tom Alexander
0897061ff6 Add wasm tests to the CI. 2023-12-29 19:07:07 -05:00
Tom Alexander
28a3e1bc7b Implement bold. 2023-12-29 18:56:29 -05:00
Tom Alexander
3fd3d20722 Merge branch 'test_wasm_json' into wasm
Some checks failed
clippy Build clippy has failed
rust-build Build rust-build has failed
rust-foreign-document-test Build rust-foreign-document-test has succeeded
rust-test Build rust-test has succeeded
2023-12-29 18:54:58 -05:00
Tom Alexander
90735586b5 Add special case for object trees. 2023-12-29 18:54:41 -05:00
Tom Alexander
78befc7665 Remove old code. 2023-12-29 17:31:14 -05:00
Tom Alexander
ef549d3b19 Compare quoted strings. 2023-12-29 17:29:13 -05:00
Tom Alexander
777c756a7f Compare plain text AST nodes. 2023-12-29 17:24:38 -05:00
Tom Alexander
037caf369c Standardize parameter order. 2023-12-29 16:56:02 -05:00
Tom Alexander
54085b5833 Implement compare optional pair. 2023-12-29 16:51:52 -05:00
Tom Alexander
2bfa8e59e7 Add code to compare children. 2023-12-29 16:06:07 -05:00
Tom Alexander
5d31db39a4 Remove some underscores from wasm schema to match elisp. 2023-12-29 15:41:41 -05:00
Tom Alexander
adcd0de7e4 Compare standard properties. 2023-12-29 15:38:18 -05:00
Tom Alexander
c2f9789a64 Placeholder for comparing quoted strings. 2023-12-29 15:09:54 -05:00
Tom Alexander
579cbb5d11 Switch everything over to the new to_wasm macro. 2023-12-29 15:03:36 -05:00
Tom Alexander
cad2be43bf Implement a new to_wasm macro that uses the WasmAstNodeWrapper. 2023-12-29 14:06:10 -05:00
Tom Alexander
a0a4f0eb90 Remove lifetimes from wasm ast nodes. 2023-12-29 12:49:43 -05:00
Tom Alexander
9f4f8e79ce Implement a wrapper type for AST nodes.
This is to make it impossible to have a collision for attribute names that are real attributes vs attributes I've added for structure (like children and ast_node).
2023-12-29 11:58:46 -05:00
Tom Alexander
77e0dbb42e Start working on a version of compare based on json values.
This will be a better test because it will be testing that what we export to json is equivalent to the elisp AST generated from emacs. Because of these tests, we could also confidently use the wasm structure to elisp.
2023-12-29 11:37:30 -05:00
Tom Alexander
eff5cdbf40 Flatten some structures.
Some checks failed
clippy Build clippy has failed
rust-foreign-document-test Build rust-foreign-document-test has succeeded
rust-build Build rust-build has succeeded
rust-test Build rust-test has succeeded
2023-12-29 10:04:59 -05:00
Tom Alexander
eef3571299 Add compare logic for optional pair.
Some checks failed
clippy Build clippy has failed
rust-foreign-document-test Build rust-foreign-document-test has succeeded
rust-build Build rust-build has succeeded
rust-test Build rust-test has succeeded
2023-12-27 21:23:06 -05:00
Tom Alexander
f227d8405e Implement compare for list of quoted strings.
Some checks failed
clippy Build clippy has failed
rust-build Build rust-build has succeeded
rust-foreign-document-test Build rust-foreign-document-test has succeeded
rust-test Build rust-test has succeeded
2023-12-27 21:00:12 -05:00
Tom Alexander
9520e5814b Add conversion for affiliated keywords to wasm additional properties. 2023-12-27 20:36:35 -05:00
Tom Alexander
28ad4fd046 Add conversion to WasmAstNode for wasm Objects. 2023-12-27 19:53:07 -05:00
Tom Alexander
7626a69fa1 Add default implementations for WasmElispCompare. 2023-12-27 19:42:45 -05:00
Tom Alexander
121c0ce516 Move the logic functions into their own module. 2023-12-27 19:22:43 -05:00
Tom Alexander
5a64db98fe Move wasm diff structs to their own module. 2023-12-27 19:15:39 -05:00
Tom Alexander
abfae9c6c0 Compare section.
Some checks failed
clippy Build clippy has failed
rust-foreign-document-test Build rust-foreign-document-test has succeeded
rust-test Build rust-test has succeeded
rust-build Build rust-build has succeeded
2023-12-27 19:10:43 -05:00
Tom Alexander
5272e2f1b4 Start adding paragraph. 2023-12-27 18:47:59 -05:00
Tom Alexander
90d4b11922 Switch to a formatted print of the wasm compare status.
Some checks failed
clippy Build clippy has failed
rust-build Build rust-build has succeeded
rust-foreign-document-test Build rust-foreign-document-test has succeeded
rust-test Build rust-test has succeeded
2023-12-27 18:39:10 -05:00
Tom Alexander
d552ef6569 Compare the additional properties. 2023-12-27 18:20:23 -05:00
Tom Alexander
f050e9b6a8 Taking into account additional property names but not comparing their values. 2023-12-27 18:01:56 -05:00
Tom Alexander
a5e108bc37 Compare the standard properties.
Some checks failed
clippy Build clippy has failed
rust-foreign-document-test Build rust-foreign-document-test has succeeded
rust-build Build rust-build has succeeded
rust-test Build rust-test has succeeded
2023-12-27 17:07:42 -05:00
Tom Alexander
58290515b5 Enable child checking. 2023-12-27 16:47:02 -05:00
Tom Alexander
423f65046e Record the property comparisons. 2023-12-27 16:40:55 -05:00
Tom Alexander
badeaf8246 Add compare for document category. 2023-12-27 16:34:04 -05:00
Tom Alexander
d38100581c Add a script to run the wasm test inside docker. 2023-12-27 16:32:06 -05:00
Tom Alexander
f4eff5ca56 Fix wasm build.
Some checks failed
clippy Build clippy has failed
rust-build Build rust-build has succeeded
rust-foreign-document-test Build rust-foreign-document-test has succeeded
rust-test Build rust-test has succeeded
2023-12-27 16:00:16 -05:00
Tom Alexander
5b02c21ebf Progress on comparing properties in the wasm_compare macro. 2023-12-27 15:58:31 -05:00
Tom Alexander
5f1668702a Starting the wasm_compare macro.
Some checks failed
clippy Build clippy has failed
rust-foreign-document-test Build rust-foreign-document-test has succeeded
rust-build Build rust-build has failed
rust-test Build rust-test has succeeded
2023-12-27 15:38:30 -05:00
Tom Alexander
1faaeeebf1 Simplify wasm diff result types.
Some checks failed
clippy Build clippy has failed
rust-foreign-document-test Build rust-foreign-document-test has succeeded
rust-build Build rust-build has succeeded
rust-test Build rust-test has succeeded
2023-12-27 14:19:25 -05:00
Tom Alexander
20a7c89084 Improving WasmElispCompare. 2023-12-27 13:21:20 -05:00
Tom Alexander
e83417b243 Introducing a trait for running compares.
This should enable us to invoke compares without needing a reference ast node type.
2023-12-27 12:38:21 -05:00
Tom Alexander
36b80dc093 Separate out rust parsing step to support references to values stored in the parsed state.
Some checks failed
clippy Build clippy has failed
rust-foreign-document-test Build rust-foreign-document-test has succeeded
rust-build Build rust-build has succeeded
rust-test Build rust-test has succeeded
2023-12-27 12:24:21 -05:00
Tom Alexander
1812b1a56e Remove phantom data. 2023-12-27 12:24:21 -05:00
Tom Alexander
1a70b3d2c0 Add a lifetime for data in the parsed result but not from the source. 2023-12-27 12:24:21 -05:00
Tom Alexander
abf066701e Add category and path to WasmDocument. 2023-12-27 11:31:35 -05:00
Tom Alexander
4984ea4179 More of the test structure. 2023-12-27 11:10:40 -05:00
Tom Alexander
3cb251ea6c Move terminal colors to the shared util module. 2023-12-27 10:57:40 -05:00
Tom Alexander
4bfea41291 Add more structure to the wasm compare. 2023-12-27 10:52:59 -05:00
Tom Alexander
99376515ef Invoking wasm_compare_document.
Some checks failed
clippy Build clippy has failed
rust-foreign-document-test Build rust-foreign-document-test has succeeded
rust-build Build rust-build has succeeded
rust-test Build rust-test has succeeded
2023-12-27 09:31:54 -05:00
Tom Alexander
23f4ba4205 Serialize to wasm during wasm compare.
Some checks failed
clippy Build clippy has failed
rust-foreign-document-test Build rust-foreign-document-test has succeeded
rust-build Build rust-build has failed
rust-test Build rust-test has succeeded
2023-12-27 08:57:56 -05:00
Tom Alexander
55ad136283 Fix imports for wasm. 2023-12-27 08:49:34 -05:00
Tom Alexander
c717541099 Move the parsing of the elisp to the util module. 2023-12-27 08:46:18 -05:00
Tom Alexander
c2e921c2dc Move wasm test to a top-level module.
For some unknown reason, this makes rust-analyzer not angry.
2023-12-27 08:42:13 -05:00
Tom Alexander
e499169f0e Fix imports for wasm_test.
Some checks failed
clippy Build clippy has failed
rust-build Build rust-build has failed
rust-foreign-document-test Build rust-foreign-document-test has succeeded
rust-test Build rust-test has succeeded
2023-12-27 08:37:34 -05:00
Tom Alexander
84c088df67 Add wasm targets to the build test in the CI.
Some checks failed
clippy Build clippy has failed
rust-foreign-document-test Build rust-foreign-document-test has succeeded
rust-build Build rust-build has succeeded
rust-test Build rust-test has succeeded
2023-12-27 08:04:03 -05:00
Tom Alexander
f210f95f99 Use a temporary folder for the builds. 2023-12-26 21:23:20 -05:00
Tom Alexander
17b81c7c72 Add a script to build every possible feature combination. 2023-12-26 21:05:40 -05:00
Tom Alexander
2911fce7cc Put util under the library. 2023-12-26 20:18:41 -05:00
Tom Alexander
e622d9fa6b Remove the old implementation of print_versions.
Some checks failed
clippy Build clippy has failed
rust-foreign-document-test Build rust-foreign-document-test has succeeded
rust-build Build rust-build has succeeded
rust-test Build rust-test has succeeded
2023-12-26 19:15:02 -05:00
Tom Alexander
8186fbb8b3 Move print_versions into a util crate. 2023-12-26 19:06:12 -05:00
Tom Alexander
68ccff74fa Outline for the wasm compare function. 2023-12-26 18:55:28 -05:00
Tom Alexander
9a13cb72c6 Make the wasm test binary async. 2023-12-25 14:32:01 -05:00
Tom Alexander
65abaa332f Separate out the wasm test into its own feature/binary. 2023-12-25 13:12:32 -05:00
Tom Alexander
67e5829fd9 Populating document's children. 2023-12-25 12:55:48 -05:00
Tom Alexander
995b41e697 Remove deserialize to support borrows. 2023-12-25 12:42:38 -05:00
Tom Alexander
eb51bdfe2f Add original field name to wasm macro. 2023-12-25 12:32:35 -05:00
Tom Alexander
bbb9ec637a Add code to test the wasm code path without actually dropping into wasm. 2023-12-25 12:14:50 -05:00
Tom Alexander
dc012b49f5 Add a generic WasmAstNode enum. 2023-12-25 11:51:39 -05:00
Tom Alexander
13863a68f7 Add placeholders for all the wasm ast nodes. 2023-12-25 11:33:43 -05:00
Tom Alexander
2962f76c81 Add lifetime to wasm objects. 2023-12-25 11:19:09 -05:00
Tom Alexander
b9b3ef6e74 Populate standard properties. 2023-12-25 10:47:10 -05:00
Tom Alexander
310ab2eab2 Add standard properties to wasm. 2023-12-24 15:26:45 -05:00
Tom Alexander
53320070da Define a wasm document. 2023-12-24 15:17:41 -05:00
Tom Alexander
2d5593681f Start defining the return type. 2023-12-24 13:02:34 -05:00
Tom Alexander
b3f97dbb40 Add wasm-bindgen. 2023-12-24 00:59:41 -05:00
Tom Alexander
a48d76321e Building basic wasm. 2023-12-24 00:47:32 -05:00
Tom Alexander
59222c58b1 Publish version 0.1.13.
All checks were successful
rustfmt Build rustfmt has succeeded
clippy Build clippy has succeeded
rust-build Build rust-build has succeeded
rust-foreign-document-test Build rust-foreign-document-test has succeeded
rust-test Build rust-test has succeeded
2023-12-15 21:10:21 -05:00
Tom Alexander
4d95a7f244 Merge branch 'post_blank'
All checks were successful
clippy Build clippy has succeeded
rustfmt Build rustfmt has succeeded
rust-foreign-document-test Build rust-foreign-document-test has succeeded
rust-build Build rust-build has succeeded
rust-test Build rust-test has succeeded
2023-12-15 20:11:20 -05:00
Tom Alexander
5a8159eed7 Fix clippy.
All checks were successful
clippy Build clippy has succeeded
rust-build Build rust-build has succeeded
rust-test Build rust-test has succeeded
rust-foreign-document-test Build rust-foreign-document-test has succeeded
2023-12-15 19:57:35 -05:00
Tom Alexander
e24fcb9ded Add dummy values for new fields for plaintext. 2023-12-15 19:54:03 -05:00
Tom Alexander
4b94dc60d2 Fix handling of documents containing only whitespace. 2023-12-15 19:49:12 -05:00
Tom Alexander
2046603d01 Fix handling post blank for org documents. 2023-12-15 19:42:43 -05:00
Tom Alexander
30412361e1 Fix handling fixed width area post-blank inside a list. 2023-12-15 19:37:33 -05:00
Tom Alexander
e846c85188 Fix handling fixed width areas with empty lines in the middle. 2023-12-15 19:17:16 -05:00
Tom Alexander
99b74095e6 Fix heading post-blank. 2023-12-15 19:10:14 -05:00
Tom Alexander
6b802d36bf Implement the new fields for target. 2023-12-15 18:57:19 -05:00
Tom Alexander
33ca43ca40 Remove the old Paragraph::of_text function.
Some checks failed
clippy Build clippy has failed
rust-foreign-document-test Build rust-foreign-document-test has failed
rust-build Build rust-build has succeeded
rust-test Build rust-test has succeeded
2023-12-15 18:06:48 -05:00
Tom Alexander
f5280a3090 Implement the new fields for bullshitium broken dynamic block. 2023-12-15 18:04:42 -05:00
Tom Alexander
c28d8ccea4 Fix post-blank for headlines containing only whitespace. 2023-12-15 17:59:47 -05:00
Tom Alexander
9690545901 Fix setting contents for broken end bullshitium when there is a paragraph present.
Some checks failed
clippy Build clippy has failed
rust-build Build rust-build has succeeded
rust-foreign-document-test Build rust-foreign-document-test has failed
rust-test Build rust-test has failed
2023-12-15 17:40:08 -05:00
Tom Alexander
eba4fb94cf Implement the new fields for dynamic block. 2023-12-15 17:26:01 -05:00
Tom Alexander
565978225a Implement the new fields for table.
Some checks failed
clippy Build clippy has failed
rust-foreign-document-test Build rust-foreign-document-test has failed
rust-build Build rust-build has succeeded
rust-test Build rust-test has failed
2023-12-15 17:16:49 -05:00
Tom Alexander
cce9ca87fa Fix handling of leading blank lines in greater blocks.
Some checks failed
clippy Build clippy has failed
rust-foreign-document-test Build rust-foreign-document-test has failed
rust-build Build rust-build has succeeded
rust-test Build rust-test has failed
2023-12-15 16:55:47 -05:00
Tom Alexander
683c523ece Implement the new fields for greater block. 2023-12-15 16:15:22 -05:00
Tom Alexander
7a4dc20dc9 Implement the new fields for plain list.
Some checks failed
clippy Build clippy has failed
rust-foreign-document-test Build rust-foreign-document-test has failed
rust-build Build rust-build has succeeded
rust-test Build rust-test has failed
2023-12-15 15:59:00 -05:00
Tom Alexander
022dda06eb Implement the new fields for plain list item. 2023-12-15 15:52:53 -05:00
Tom Alexander
7b88a2d248 Implement the new fields for broken end bullshitium. 2023-12-15 15:40:17 -05:00
Tom Alexander
fce5b92091 Remove leading blank lines from document contents. 2023-12-15 15:30:46 -05:00
Tom Alexander
45a506334c Remove leading blank lines from heading contents. 2023-12-15 15:20:31 -05:00
Tom Alexander
e47901a67f Implement the new fields for node property.
Some checks failed
clippy Build clippy has failed
rust-build Build rust-build has succeeded
rust-foreign-document-test Build rust-foreign-document-test has failed
rust-test Build rust-test has failed
2023-12-15 15:05:03 -05:00
Tom Alexander
7430daa768 Fix handling of property drawers containing only whitespace. 2023-12-15 15:05:03 -05:00
Tom Alexander
6ce25c8a3b Update property drawer empty test to include a variety of whitespace. 2023-12-15 14:25:01 -05:00
Tom Alexander
7b8fa1eb4a Fix get_contents for sections. 2023-12-15 13:21:58 -05:00
Tom Alexander
ffa5349f25 Fix get_contents for headlines. 2023-12-15 13:14:49 -05:00
Tom Alexander
bb472b63cc Implement the new fields for property drawer. 2023-12-15 13:03:42 -05:00
Tom Alexander
57f566a7a1 Implement the new fields for planning. 2023-12-15 12:55:05 -05:00
Tom Alexander
2181993246 Implement the new fields for horizontal rule. 2023-12-15 12:50:01 -05:00
Tom Alexander
60d1ecfa75 Fix fixed width area to not consume trailing line break so it can be part of the post-blank.
Some checks failed
clippy Build clippy has failed
rust-foreign-document-test Build rust-foreign-document-test has failed
rust-build Build rust-build has succeeded
rust-test Build rust-test has failed
2023-12-15 12:45:51 -05:00
Tom Alexander
3962db12a8 Implement the new fields for fixed width area. 2023-12-15 12:29:46 -05:00
Tom Alexander
f192507cd9 Implement the new fields for diary sexp.
Some checks failed
clippy Build clippy has failed
rust-foreign-document-test Build rust-foreign-document-test has failed
rust-build Build rust-build has succeeded
rust-test Build rust-test has failed
2023-12-15 12:10:34 -05:00
Tom Alexander
252be3e001 Fix post blank for timestamp date ranges. 2023-12-15 11:38:52 -05:00
Tom Alexander
28f12a04f7 Implement the new fields for drawer.
Some checks failed
clippy Build clippy has failed
rust-foreign-document-test Build rust-foreign-document-test has failed
rust-build Build rust-build has succeeded
rust-test Build rust-test has failed
2023-12-15 11:22:27 -05:00
Tom Alexander
d6232dc49c Implement the new fields for clock. 2023-12-15 10:49:04 -05:00
Tom Alexander
68a220aa1c Implement the new fields for babel call.
Some checks failed
clippy Build clippy has failed
rust-foreign-document-test Build rust-foreign-document-test has failed
rust-build Build rust-build has succeeded
rust-test Build rust-test has failed
2023-12-15 10:45:16 -05:00
Tom Alexander
2e7db0f8bd Implement the new fields for lesser block.
Some checks failed
clippy Build clippy has failed
rust-foreign-document-test Build rust-foreign-document-test has failed
rust-build Build rust-build has succeeded
rust-test Build rust-test has failed
2023-12-11 15:59:56 -05:00
Tom Alexander
175ff1e6c4 Implement the new fields for LaTeX environment.
Some checks failed
clippy Build clippy has failed
rust-foreign-document-test Build rust-foreign-document-test has failed
rust-build Build rust-build has succeeded
rust-test Build rust-test has failed
2023-12-11 14:49:18 -05:00
Tom Alexander
0b42139393 Implement the new fields for inline babel call and inline source block. 2023-12-11 14:47:22 -05:00
Tom Alexander
67a9103b07 Implement the new fields for export snippet. 2023-12-11 14:41:49 -05:00
Tom Alexander
f141a4e186 Implement the new fields for citation.
Some checks failed
clippy Build clippy has failed
rust-foreign-document-test Build rust-foreign-document-test has failed
rust-build Build rust-build has succeeded
rust-test Build rust-test has failed
2023-12-11 14:38:14 -05:00
Tom Alexander
aba29df34c Implement the new fields for org macro. 2023-12-11 14:22:56 -05:00
Tom Alexander
87ce7d7432 Implement the new fields for timestamp. 2023-12-11 14:18:04 -05:00
Tom Alexander
68dccd54b1 Implement the new fields for radio link. 2023-12-11 14:10:27 -05:00
Tom Alexander
4753f4c7c6 Implement the new fields for plain link.
Some checks failed
clippy Build clippy has failed
rust-foreign-document-test Build rust-foreign-document-test has failed
rust-build Build rust-build has succeeded
rust-test Build rust-test has failed
2023-12-11 13:08:53 -05:00
Tom Alexander
13c62bf29f Implement the new fields for angle link.
Some checks failed
clippy Build clippy has failed
rust-foreign-document-test Build rust-foreign-document-test has failed
rust-build Build rust-build has succeeded
rust-test Build rust-test has failed
2023-12-11 13:00:43 -05:00
Tom Alexander
670209e9fc Fix post blank for comment. 2023-12-11 12:58:05 -05:00
Tom Alexander
4af0d3141f Implement the new fields for statistics cookie. 2023-12-11 12:51:07 -05:00
Tom Alexander
ab281de3c6 Implement the new fields for latex fragment.
Some checks failed
clippy Build clippy has failed
rust-foreign-document-test Build rust-foreign-document-test has failed
rust-build Build rust-build has succeeded
rust-test Build rust-test has failed
2023-12-11 12:46:20 -05:00
Tom Alexander
d556d28f49 Implement the new fields for entity. 2023-12-11 12:44:42 -05:00
Tom Alexander
9cfb2fa052 Implement the new fields for keywords.
Some checks failed
clippy Build clippy has failed
rust-foreign-document-test Build rust-foreign-document-test has failed
rust-build Build rust-build has succeeded
rust-test Build rust-test has failed
2023-12-11 12:38:13 -05:00
Tom Alexander
30c03b5529 Implement the new fields for radio target. 2023-12-11 12:27:35 -05:00
Tom Alexander
b943f90766 Implement the new fields for regular link.
Some checks failed
clippy Build clippy has failed
rust-foreign-document-test Build rust-foreign-document-test has failed
rust-build Build rust-build has succeeded
rust-test Build rust-test has failed
2023-12-11 12:19:49 -05:00
Tom Alexander
0108f5b0b1 Implement the new fields for subscript and superscript.
Some checks failed
clippy Build clippy has failed
rust-foreign-document-test Build rust-foreign-document-test has failed
rust-build Build rust-build has succeeded
rust-test Build rust-test has failed
2023-12-11 12:04:59 -05:00
Tom Alexander
50145c6cf2 Implement the new fields for line break.
Some checks failed
clippy Build clippy has failed
rust-foreign-document-test Build rust-foreign-document-test has failed
rust-build Build rust-build has succeeded
rust-test Build rust-test has failed
2023-12-08 16:20:58 -05:00
Tom Alexander
4a8607726c Implement the new fields for comment. 2023-12-08 16:20:58 -05:00
Tom Alexander
9bcba4020d Implement the new fields for verbatim and code. 2023-12-08 16:20:58 -05:00
Tom Alexander
8fd9ff3848 Implement the new fields for bold, italic, underline, and strike-through. 2023-12-08 15:51:38 -05:00
Tom Alexander
3fb7cb82cd Implement get_contents for document.
Some checks failed
clippy Build clippy has failed
rust-foreign-document-test Build rust-foreign-document-test has failed
rust-build Build rust-build has succeeded
rust-test Build rust-test has failed
2023-11-01 01:22:43 -04:00
Tom Alexander
e0ec5c115f Need a constant value for generic numbers. 2023-11-01 00:49:22 -04:00
Tom Alexander
f0868ba3ed Add a post blank implementation to document. 2023-10-31 23:56:40 -04:00
Tom Alexander
425bc12353 Add implementations to calculate the new fields for heading. 2023-10-31 23:46:53 -04:00
Tom Alexander
03754be71e Implement the new fields for section. 2023-10-31 23:16:57 -04:00
Tom Alexander
70002800c2 Implement the new fields for footnote definitions. 2023-10-31 23:12:04 -04:00
Tom Alexander
281c35677b Implement the new fields for paragraph. 2023-10-31 23:06:43 -04:00
Tom Alexander
92d15c3d91 Fix clippy. 2023-10-31 22:58:17 -04:00
Tom Alexander
b1773ac90e Get post blank for footnote references. 2023-10-31 22:58:17 -04:00
Tom Alexander
645d9abf9c Support nil contents. 2023-10-31 22:58:17 -04:00
Tom Alexander
d2f2bdf88d Implement get_contents for footnote references. 2023-10-31 22:58:17 -04:00
Tom Alexander
90ba17b68c Switch to a numeric post-blank.
Turns out post-blank has different meanings to different object types so we need to return a number to properly do the compare.
2023-10-31 22:32:01 -04:00
Tom Alexander
31406fd520 Fix clippy.
Some checks failed
clippy Build clippy has succeeded
rust-foreign-document-test Build rust-foreign-document-test has failed
rust-build Build rust-build has succeeded
rust-test Build rust-test has failed
2023-10-31 22:19:39 -04:00
Tom Alexander
49bc51ba89 Compare post-blank. 2023-10-31 22:18:28 -04:00
Tom Alexander
92592104a4 Compare contents begin/end. 2023-10-31 22:11:38 -04:00
Tom Alexander
33f4614d28 Make get_rust_byte_offsets more generic so it can be used for contents.
Some checks failed
clippy Build clippy has failed
rust-foreign-document-test Build rust-foreign-document-test has succeeded
rust-build Build rust-build has succeeded
rust-test Build rust-test has succeeded
2023-10-31 21:59:58 -04:00
Tom Alexander
6c197c376a Add todo implementations of the new standard property functions. 2023-10-31 21:49:33 -04:00
Tom Alexander
bcf1b49db2 Remove the GetStandardProperties trait.
This was using dynamic dispatch to deal with enums to avoid the repetitive typing.
2023-10-31 21:26:00 -04:00
Tom Alexander
49f6e70a19 Use RPIT to get static dispatch GetStandardProperties. 2023-10-31 21:20:46 -04:00
Tom Alexander
31fb815681 Add a function for getting the post blank. 2023-10-31 21:20:46 -04:00
Tom Alexander
7dfe24ff98 Merge branch 'lazy_parse_lesser_block_contents'
All checks were successful
clippy Build clippy has succeeded
rustfmt Build rustfmt has succeeded
rust-build Build rust-build has succeeded
rust-foreign-document-test Build rust-foreign-document-test has succeeded
rust-test Build rust-test has succeeded
2023-10-31 20:54:01 -04:00
Tom Alexander
a5627d0cee Do not parse the lesser block contents during parsing, but rather only if the contents are requested.
Some checks failed
rust-test Build rust-test has failed
clippy Build clippy has succeeded
rust-foreign-document-test Build rust-foreign-document-test has succeeded
rust-build Build rust-build has succeeded
This seemed like an unnecessary allocation during parsing, especially considering we throw away some parses based on whether or not we found radio targets in the source.
2023-10-31 20:43:08 -04:00
Tom Alexander
93cfa71df2 Merge branch 'foreign_document_literate_build_emacs'
All checks were successful
rustfmt Build rustfmt has succeeded
clippy Build clippy has succeeded
rust-build Build rust-build has succeeded
rust-foreign-document-test Build rust-foreign-document-test has succeeded
rust-test Build rust-test has succeeded
2023-10-31 19:49:15 -04:00
Tom Alexander
78320d3265 Fix clippy errors.
All checks were successful
clippy Build clippy has succeeded
rust-build Build rust-build has succeeded
rust-foreign-document-test Build rust-foreign-document-test has succeeded
rust-test Build rust-test has succeeded
2023-10-31 18:32:01 -04:00
Tom Alexander
9e908935f8 Add special case to delete invalid org-mode file.
Some checks failed
clippy Build clippy has failed
rust-foreign-document-test Build rust-foreign-document-test has succeeded
rust-build Build rust-build has succeeded
rust-test Build rust-test has succeeded
2023-10-31 17:26:13 -04:00
Tom Alexander
b18a703529 Handle nil values for compare_property_object_tree. 2023-10-31 17:20:35 -04:00
Tom Alexander
ea52dc60be Add a literate tutorial for building emacs to the foreign documents test. 2023-10-31 16:33:11 -04:00
Tom Alexander
f5699ce830 Remove PartialEq from Object.
Some checks failed
rustfmt Build rustfmt has succeeded
clippy Build clippy has failed
rust-foreign-document-test Build rust-foreign-document-test has succeeded
rust-test Build rust-test has succeeded
rust-build Build rust-build has succeeded
2023-10-31 16:33:10 -04:00
Tom Alexander
10aa0956ee Merge branch 'lesser_block_memory_optimization'
All checks were successful
clippy Build clippy has succeeded
rustfmt Build rustfmt has succeeded
rust-foreign-document-test Build rust-foreign-document-test has succeeded
rust-build Build rust-build has succeeded
rust-test Build rust-test has succeeded
2023-10-27 22:13:25 -04:00
Tom Alexander
816c164996 Only allocate memory if removing text for lesser blocks.
All checks were successful
clippy Build clippy has succeeded
rust-build Build rust-build has succeeded
rust-foreign-document-test Build rust-foreign-document-test has succeeded
rust-test Build rust-test has succeeded
2023-10-27 21:50:08 -04:00
Tom Alexander
ee201e1336 Merge branch 'explicit_all_node_iter'
All checks were successful
rustfmt Build rustfmt has succeeded
clippy Build clippy has succeeded
rust-foreign-document-test Build rust-foreign-document-test has succeeded
rust-build Build rust-build has succeeded
rust-test Build rust-test has succeeded
2023-10-23 18:39:16 -04:00
Tom Alexander
4897952330 Make creating AllAstNodeIter explicit.
All checks were successful
clippy Build clippy has succeeded
rust-foreign-document-test Build rust-foreign-document-test has succeeded
rust-build Build rust-build has succeeded
rust-test Build rust-test has succeeded
This is to remove the ambiguity between calling iter on the specific structs like Document and calling iter on an AstNode by having an explicitly-named function to create the iterator.
2023-10-23 18:25:59 -04:00
Tom Alexander
e1d85c6dc2 Merge branch 'remove_set_source'
All checks were successful
rustfmt Build rustfmt has succeeded
clippy Build clippy has succeeded
rust-build Build rust-build has succeeded
rust-foreign-document-test Build rust-foreign-document-test has succeeded
rust-test Build rust-test has succeeded
2023-10-23 18:06:56 -04:00
Tom Alexander
c420ccd029 Fix clippy errors.
All checks were successful
clippy Build clippy has succeeded
rust-foreign-document-test Build rust-foreign-document-test has succeeded
rust-build Build rust-build has succeeded
rust-test Build rust-test has succeeded
2023-10-23 17:43:43 -04:00
Tom Alexander
a880629831 Make clippy not write to the host git repo. 2023-10-23 17:43:32 -04:00
Tom Alexander
5e2dea1f28 Remove the SetSource trait.
It was only being used for creating paragraphs of specific text, so I just adjusted the of_text function to handle it.
2023-10-23 17:43:32 -04:00
Tom Alexander
f47d688be4 Remove owned String from CustomError.
Some checks failed
rustfmt Build rustfmt has failed
rust-test Build rust-test has failed
clippy Build clippy has failed
rust-build Build rust-build has failed
rust-foreign-document-test Build rust-foreign-document-test has succeeded
This is a 15% performance improvement.
2023-10-21 14:29:37 -04:00
Tom Alexander
acfc5e5e68 Only allocate memory when unquoting sexp string that contains escapes.
All checks were successful
rust-build Build rust-build has succeeded
rust-foreign-document-test Build rust-foreign-document-test has succeeded
rust-test Build rust-test has succeeded
clippy Build clippy has succeeded
rustfmt Build rustfmt has succeeded
If the quoted string contains no escape sequences, then unquoting the string can be done by simply shaving off the leading and trailing quotation marks which can be a slice operation. By returning Cow, we can return either a borrowed slice or an owned String.
2023-10-20 12:53:27 -04:00
189 changed files with 9814 additions and 2460 deletions

View File

@@ -2,3 +2,4 @@
target
Cargo.lock
notes/
.lighthouse/

View File

@@ -1,191 +0,0 @@
apiVersion: tekton.dev/v1beta1
kind: PipelineRun
metadata:
name: clippy
spec:
pipelineSpec:
params:
- name: image-name
description: The name for the built image
type: string
- name: path-to-image-context
description: The path to the build context
type: string
- name: path-to-dockerfile
description: The path to the Dockerfile
type: string
- name: GIT_USER_NAME
description: The username for git
type: string
default: "fluxcdbot"
- name: GIT_USER_EMAIL
description: The email for git
type: string
default: "fluxcdbot@users.noreply.github.com"
tasks:
- name: do-stuff
taskSpec:
metadata: {}
stepTemplate:
image: alpine:3.18
name: ""
resources:
requests:
cpu: 10m
memory: 600Mi
workingDir: /workspace/source
steps:
- image: alpine:3.18
name: do-stuff-step
script: |
#!/usr/bin/env sh
echo "hello world"
- name: report-pending
taskRef:
name: gitea-set-status
runAfter:
- fetch-repository
params:
- name: CONTEXT
value: "$(params.JOB_NAME)"
- name: REPO_FULL_NAME
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
- name: GITEA_HOST_URL
value: code.fizz.buzz
- name: SHA
value: "$(tasks.fetch-repository.results.commit)"
- name: DESCRIPTION
value: "Build $(params.JOB_NAME) has started"
- name: STATE
value: pending
- name: TARGET_URL
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
- name: fetch-repository
taskRef:
name: git-clone
workspaces:
- name: output
workspace: git-source
params:
- name: url
value: $(params.REPO_URL)
- name: revision
value: $(params.PULL_BASE_SHA)
- name: deleteExisting
value: "true"
- name: build-image
taskRef:
name: kaniko
params:
- name: IMAGE
value: "$(params.image-name):$(tasks.fetch-repository.results.commit)"
- name: CONTEXT
value: $(params.path-to-image-context)
- name: DOCKERFILE
value: $(params.path-to-dockerfile)
- name: BUILDER_IMAGE
value: "gcr.io/kaniko-project/executor:v1.12.1"
- name: EXTRA_ARGS
value:
- --cache=true
- --cache-copy-layers
- --cache-repo=harbor.fizz.buzz/kanikocache/cache
- --use-new-run # Should result in a speed-up
- --reproducible # To remove timestamps so layer caching works.
- --snapshot-mode=redo
- --skip-unused-stages=true
- --registry-mirror=dockerhub.dockerhub.svc.cluster.local
workspaces:
- name: source
workspace: git-source
- name: dockerconfig
workspace: docker-credentials
runAfter:
- fetch-repository
- name: clippy
taskRef:
name: run-docker-image
workspaces:
- name: source
workspace: git-source
- name: cargo-cache
workspace: cargo-cache
runAfter:
- build-image
params:
- name: docker-image
value: "$(params.image-name):$(tasks.fetch-repository.results.commit)"
finally:
- name: report-success
when:
- input: "$(tasks.status)"
operator: in
values: ["Succeeded", "Completed"]
taskRef:
name: gitea-set-status
params:
- name: CONTEXT
value: "$(params.JOB_NAME)"
- name: REPO_FULL_NAME
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
- name: GITEA_HOST_URL
value: code.fizz.buzz
- name: SHA
value: "$(tasks.fetch-repository.results.commit)"
- name: DESCRIPTION
value: "Build $(params.JOB_NAME) has succeeded"
- name: STATE
value: success
- name: TARGET_URL
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
- name: report-failure
when:
- input: "$(tasks.status)"
operator: in
values: ["Failed"]
taskRef:
name: gitea-set-status
params:
- name: CONTEXT
value: "$(params.JOB_NAME)"
- name: REPO_FULL_NAME
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
- name: GITEA_HOST_URL
value: code.fizz.buzz
- name: SHA
value: "$(tasks.fetch-repository.results.commit)"
- name: DESCRIPTION
value: "Build $(params.JOB_NAME) has failed"
- name: STATE
value: failure
- name: TARGET_URL
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
workspaces:
- name: git-source
- name: docker-credentials
workspaces:
- name: git-source
volumeClaimTemplate:
spec:
storageClassName: "nfs-client"
accessModes:
- ReadWriteOnce
resources:
requests:
storage: 10Gi
subPath: rust-source
- name: cargo-cache
persistentVolumeClaim:
claimName: organic-cargo-cache-clippy
- name: docker-credentials
secret:
secretName: harbor-plain
serviceAccountName: build-bot
timeout: 240h0m0s
params:
- name: image-name
value: "harbor.fizz.buzz/private/organic-clippy"
- name: path-to-image-context
value: docker/organic_clippy/
- name: path-to-dockerfile
value: docker/organic_clippy/Dockerfile

View File

@@ -1,203 +0,0 @@
apiVersion: tekton.dev/v1beta1
kind: PipelineRun
metadata:
name: rust-foreign-document-test
spec:
pipelineSpec:
timeouts:
pipeline: "2h0m0s"
tasks: "1h0m40s"
finally: "0h30m0s"
params:
- name: image-name
description: The name for the built image
type: string
- name: path-to-image-context
description: The path to the build context
type: string
- name: path-to-dockerfile
description: The path to the Dockerfile
type: string
tasks:
- name: do-stuff
taskSpec:
metadata: {}
stepTemplate:
image: alpine:3.18
name: ""
resources:
requests:
cpu: 10m
memory: 600Mi
workingDir: /workspace/source
steps:
- image: alpine:3.18
name: do-stuff-step
script: |
#!/usr/bin/env sh
echo "hello world"
- name: report-pending
taskRef:
name: gitea-set-status
runAfter:
- fetch-repository
params:
- name: CONTEXT
value: "$(params.JOB_NAME)"
- name: REPO_FULL_NAME
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
- name: GITEA_HOST_URL
value: code.fizz.buzz
- name: SHA
value: "$(tasks.fetch-repository.results.commit)"
- name: DESCRIPTION
value: "Build $(params.JOB_NAME) has started"
- name: STATE
value: pending
- name: TARGET_URL
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
- name: fetch-repository
taskRef:
name: git-clone
workspaces:
- name: output
workspace: git-source
params:
- name: url
value: $(params.REPO_URL)
- name: revision
value: $(params.PULL_BASE_SHA)
- name: deleteExisting
value: "true"
- name: build-image
taskRef:
name: kaniko
params:
- name: IMAGE
value: "$(params.image-name):$(tasks.fetch-repository.results.commit)"
- name: CONTEXT
value: $(params.path-to-image-context)
- name: DOCKERFILE
value: $(params.path-to-dockerfile)
- name: BUILDER_IMAGE
value: "gcr.io/kaniko-project/executor:v1.12.1"
- name: EXTRA_ARGS
value:
- --target=foreign-document-test
- --cache=true
- --cache-copy-layers
- --cache-repo=harbor.fizz.buzz/kanikocache/cache
- --use-new-run # Should result in a speed-up
- --reproducible # To remove timestamps so layer caching works.
- --snapshot-mode=redo
- --skip-unused-stages=true
- --registry-mirror=dockerhub.dockerhub.svc.cluster.local
workspaces:
- name: source
workspace: git-source
- name: dockerconfig
workspace: docker-credentials
runAfter:
- fetch-repository
- name: run-image
taskRef:
name: run-docker-image
workspaces:
- name: source
workspace: git-source
- name: cargo-cache
workspace: cargo-cache
runAfter:
- build-image
params:
- name: docker-image
value: "$(params.image-name):$(tasks.fetch-repository.results.commit)"
finally:
- name: report-success
when:
- input: "$(tasks.status)"
operator: in
values: ["Succeeded", "Completed"]
taskRef:
name: gitea-set-status
params:
- name: CONTEXT
value: "$(params.JOB_NAME)"
- name: REPO_FULL_NAME
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
- name: GITEA_HOST_URL
value: code.fizz.buzz
- name: SHA
value: "$(tasks.fetch-repository.results.commit)"
- name: DESCRIPTION
value: "Build $(params.JOB_NAME) has succeeded"
- name: STATE
value: success
- name: TARGET_URL
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
- name: report-failure
when:
- input: "$(tasks.status)"
operator: in
values: ["Failed"]
taskRef:
name: gitea-set-status
params:
- name: CONTEXT
value: "$(params.JOB_NAME)"
- name: REPO_FULL_NAME
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
- name: GITEA_HOST_URL
value: code.fizz.buzz
- name: SHA
value: "$(tasks.fetch-repository.results.commit)"
- name: DESCRIPTION
value: "Build $(params.JOB_NAME) has failed"
- name: STATE
value: failure
- name: TARGET_URL
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
- name: cargo-cache-autoclean
taskRef:
name: run-docker-image
workspaces:
- name: source
workspace: git-source
- name: cargo-cache
workspace: cargo-cache
params:
- name: command
value: [cargo, cache, --autoclean]
- name: args
value: []
- name: docker-image
value: "$(params.image-name):$(tasks.fetch-repository.results.commit)"
workspaces:
- name: git-source
- name: docker-credentials
- name: cargo-cache
workspaces:
- name: git-source
volumeClaimTemplate:
spec:
storageClassName: "nfs-client"
accessModes:
- ReadWriteOnce
resources:
requests:
storage: 10Gi
subPath: rust-source
- name: cargo-cache
persistentVolumeClaim:
claimName: organic-cargo-cache-test-foreign-document
- name: docker-credentials
secret:
secretName: harbor-plain
serviceAccountName: build-bot
params:
- name: image-name
value: "harbor.fizz.buzz/private/organic-test-foreign-document"
- name: path-to-image-context
value: docker/organic_test/
- name: path-to-dockerfile
value: docker/organic_test/Dockerfile

View File

@@ -1,284 +0,0 @@
apiVersion: tekton.dev/v1beta1
kind: PipelineRun
metadata:
name: rust-build
spec:
pipelineSpec:
params:
- name: image-name
description: The name for the built image
type: string
- name: path-to-image-context
description: The path to the build context
type: string
- name: path-to-dockerfile
description: The path to the Dockerfile
type: string
tasks:
- name: report-pending
taskRef:
name: gitea-set-status
runAfter:
- fetch-repository
params:
- name: CONTEXT
value: "$(params.JOB_NAME)"
- name: REPO_FULL_NAME
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
- name: GITEA_HOST_URL
value: code.fizz.buzz
- name: SHA
value: "$(tasks.fetch-repository.results.commit)"
- name: DESCRIPTION
value: "Build $(params.JOB_NAME) has started"
- name: STATE
value: pending
- name: TARGET_URL
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
- name: fetch-repository
taskRef:
name: git-clone
workspaces:
- name: output
workspace: git-source
params:
- name: url
value: $(params.REPO_URL)
- name: revision
value: $(params.PULL_BASE_SHA)
- name: deleteExisting
value: "true"
- name: build-image
taskRef:
name: kaniko
params:
- name: IMAGE
value: "$(params.image-name):$(tasks.fetch-repository.results.commit)"
- name: CONTEXT
value: $(params.path-to-image-context)
- name: DOCKERFILE
value: $(params.path-to-dockerfile)
- name: BUILDER_IMAGE
value: "gcr.io/kaniko-project/executor:v1.12.1"
- name: EXTRA_ARGS
value:
- --cache=true
- --cache-copy-layers
- --cache-repo=harbor.fizz.buzz/kanikocache/cache
- --use-new-run # Should result in a speed-up
- --reproducible # To remove timestamps so layer caching works.
- --snapshot-mode=redo
- --skip-unused-stages=true
- --registry-mirror=dockerhub.dockerhub.svc.cluster.local
workspaces:
- name: source
workspace: git-source
- name: dockerconfig
workspace: docker-credentials
runAfter:
- fetch-repository
- name: run-image-none
taskRef:
name: run-docker-image
workspaces:
- name: source
workspace: git-source
- name: cargo-cache
workspace: cargo-cache
runAfter:
- build-image
params:
- name: args
value: ["--no-default-features"]
- name: docker-image
value: "$(params.image-name):$(tasks.fetch-repository.results.commit)"
- name: run-image-tracing
taskRef:
name: run-docker-image
workspaces:
- name: source
workspace: git-source
- name: cargo-cache
workspace: cargo-cache
runAfter:
- run-image-none
params:
- name: args
value: ["--no-default-features", "--features", "tracing"]
- name: docker-image
value: "$(params.image-name):$(tasks.fetch-repository.results.commit)"
- name: run-image-compare
taskRef:
name: run-docker-image
workspaces:
- name: source
workspace: git-source
- name: cargo-cache
workspace: cargo-cache
runAfter:
- run-image-tracing
params:
- name: args
value: ["--no-default-features", "--features", "compare"]
- name: docker-image
value: "$(params.image-name):$(tasks.fetch-repository.results.commit)"
- name: run-image-default
taskRef:
name: run-docker-image
workspaces:
- name: source
workspace: git-source
- name: cargo-cache
workspace: cargo-cache
runAfter:
- run-image-compare
params:
- name: args
value: []
- name: docker-image
value: "$(params.image-name):$(tasks.fetch-repository.results.commit)"
- name: run-image-tracing-compare
taskRef:
name: run-docker-image
workspaces:
- name: source
workspace: git-source
- name: cargo-cache
workspace: cargo-cache
runAfter:
- run-image-default
params:
- name: args
value: ["--no-default-features", "--features", "tracing,compare"]
- name: docker-image
value: "$(params.image-name):$(tasks.fetch-repository.results.commit)"
- name: run-image-compare-foreign
taskRef:
name: run-docker-image
workspaces:
- name: source
workspace: git-source
- name: cargo-cache
workspace: cargo-cache
runAfter:
- run-image-tracing-compare
params:
- name: args
value:
[
"--no-default-features",
"--features",
"compare,foreign_document_test",
]
- name: docker-image
value: "$(params.image-name):$(tasks.fetch-repository.results.commit)"
- name: run-image-all
taskRef:
name: run-docker-image
workspaces:
- name: source
workspace: git-source
- name: cargo-cache
workspace: cargo-cache
runAfter:
- run-image-compare-foreign
params:
- name: args
value:
[
"--no-default-features",
"--features",
"tracing,compare,foreign_document_test",
]
- name: docker-image
value: "$(params.image-name):$(tasks.fetch-repository.results.commit)"
finally:
- name: report-success
when:
- input: "$(tasks.status)"
operator: in
values: ["Succeeded", "Completed"]
taskRef:
name: gitea-set-status
params:
- name: CONTEXT
value: "$(params.JOB_NAME)"
- name: REPO_FULL_NAME
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
- name: GITEA_HOST_URL
value: code.fizz.buzz
- name: SHA
value: "$(tasks.fetch-repository.results.commit)"
- name: DESCRIPTION
value: "Build $(params.JOB_NAME) has succeeded"
- name: STATE
value: success
- name: TARGET_URL
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
- name: report-failure
when:
- input: "$(tasks.status)"
operator: in
values: ["Failed"]
taskRef:
name: gitea-set-status
params:
- name: CONTEXT
value: "$(params.JOB_NAME)"
- name: REPO_FULL_NAME
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
- name: GITEA_HOST_URL
value: code.fizz.buzz
- name: SHA
value: "$(tasks.fetch-repository.results.commit)"
- name: DESCRIPTION
value: "Build $(params.JOB_NAME) has failed"
- name: STATE
value: failure
- name: TARGET_URL
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
- name: cargo-cache-autoclean
taskRef:
name: run-docker-image
workspaces:
- name: source
workspace: git-source
- name: cargo-cache
workspace: cargo-cache
params:
- name: command
value: [cargo, cache, --autoclean]
- name: args
value: []
- name: docker-image
value: "$(params.image-name):$(tasks.fetch-repository.results.commit)"
workspaces:
- name: git-source
- name: docker-credentials
- name: cargo-cache
workspaces:
- name: git-source
volumeClaimTemplate:
spec:
storageClassName: "nfs-client"
accessModes:
- ReadWriteOnce
resources:
requests:
storage: 10Gi
subPath: rust-source
- name: cargo-cache
persistentVolumeClaim:
claimName: organic-cargo-cache-build
- name: docker-credentials
secret:
secretName: harbor-plain
serviceAccountName: build-bot
timeout: 240h0m0s
params:
- name: image-name
value: "harbor.fizz.buzz/private/organic-build"
- name: path-to-image-context
value: docker/organic_build/
- name: path-to-dockerfile
value: docker/organic_build/Dockerfile

View File

@@ -1,214 +0,0 @@
apiVersion: tekton.dev/v1beta1
kind: PipelineRun
metadata:
name: rust-test
spec:
pipelineSpec:
timeouts:
pipeline: "2h0m0s"
tasks: "1h0m40s"
finally: "0h30m0s"
params:
- name: image-name
description: The name for the built image
type: string
- name: path-to-image-context
description: The path to the build context
type: string
- name: path-to-dockerfile
description: The path to the Dockerfile
type: string
tasks:
- name: do-stuff
taskSpec:
metadata: {}
stepTemplate:
image: alpine:3.18
name: ""
resources:
requests:
cpu: 10m
memory: 600Mi
workingDir: /workspace/source
steps:
- image: alpine:3.18
name: do-stuff-step
script: |
#!/usr/bin/env sh
echo "hello world"
- name: report-pending
taskRef:
name: gitea-set-status
runAfter:
- fetch-repository
params:
- name: CONTEXT
value: "$(params.JOB_NAME)"
- name: REPO_FULL_NAME
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
- name: GITEA_HOST_URL
value: code.fizz.buzz
- name: SHA
value: "$(tasks.fetch-repository.results.commit)"
- name: DESCRIPTION
value: "Build $(params.JOB_NAME) has started"
- name: STATE
value: pending
- name: TARGET_URL
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
- name: fetch-repository
taskRef:
name: git-clone
workspaces:
- name: output
workspace: git-source
params:
- name: url
value: $(params.REPO_URL)
- name: revision
value: $(params.PULL_BASE_SHA)
- name: deleteExisting
value: "true"
- name: build-image
taskRef:
name: kaniko
params:
- name: IMAGE
value: "$(params.image-name):$(tasks.fetch-repository.results.commit)"
- name: CONTEXT
value: $(params.path-to-image-context)
- name: DOCKERFILE
value: $(params.path-to-dockerfile)
- name: BUILDER_IMAGE
value: "gcr.io/kaniko-project/executor:v1.12.1"
- name: EXTRA_ARGS
value:
- --target=tester
- --cache=true
- --cache-copy-layers
- --cache-repo=harbor.fizz.buzz/kanikocache/cache
- --use-new-run # Should result in a speed-up
- --reproducible # To remove timestamps so layer caching works.
- --snapshot-mode=redo
- --skip-unused-stages=true
- --registry-mirror=dockerhub.dockerhub.svc.cluster.local
workspaces:
- name: source
workspace: git-source
- name: dockerconfig
workspace: docker-credentials
runAfter:
- fetch-repository
- name: run-image
taskRef:
name: run-docker-image
workspaces:
- name: source
workspace: git-source
- name: cargo-cache
workspace: cargo-cache
runAfter:
- build-image
params:
- name: args
value:
[
--no-default-features,
--features,
compare,
--no-fail-fast,
--lib,
--test,
test_loader,
]
- name: docker-image
value: "$(params.image-name):$(tasks.fetch-repository.results.commit)"
finally:
- name: report-success
when:
- input: "$(tasks.status)"
operator: in
values: ["Succeeded", "Completed"]
taskRef:
name: gitea-set-status
params:
- name: CONTEXT
value: "$(params.JOB_NAME)"
- name: REPO_FULL_NAME
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
- name: GITEA_HOST_URL
value: code.fizz.buzz
- name: SHA
value: "$(tasks.fetch-repository.results.commit)"
- name: DESCRIPTION
value: "Build $(params.JOB_NAME) has succeeded"
- name: STATE
value: success
- name: TARGET_URL
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
- name: report-failure
when:
- input: "$(tasks.status)"
operator: in
values: ["Failed"]
taskRef:
name: gitea-set-status
params:
- name: CONTEXT
value: "$(params.JOB_NAME)"
- name: REPO_FULL_NAME
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
- name: GITEA_HOST_URL
value: code.fizz.buzz
- name: SHA
value: "$(tasks.fetch-repository.results.commit)"
- name: DESCRIPTION
value: "Build $(params.JOB_NAME) has failed"
- name: STATE
value: failure
- name: TARGET_URL
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
- name: cargo-cache-autoclean
taskRef:
name: run-docker-image
workspaces:
- name: source
workspace: git-source
- name: cargo-cache
workspace: cargo-cache
params:
- name: command
value: [cargo, cache, --autoclean]
- name: args
value: []
- name: docker-image
value: "$(params.image-name):$(tasks.fetch-repository.results.commit)"
workspaces:
- name: git-source
- name: docker-credentials
- name: cargo-cache
workspaces:
- name: git-source
volumeClaimTemplate:
spec:
storageClassName: "nfs-client"
accessModes:
- ReadWriteOnce
resources:
requests:
storage: 10Gi
subPath: rust-source
- name: cargo-cache
persistentVolumeClaim:
claimName: organic-cargo-cache-test
- name: docker-credentials
secret:
secretName: harbor-plain
serviceAccountName: build-bot
params:
- name: image-name
value: "harbor.fizz.buzz/private/organic-test"
- name: path-to-image-context
value: docker/organic_test/
- name: path-to-dockerfile
value: docker/organic_test/Dockerfile

View File

@@ -1,230 +0,0 @@
apiVersion: tekton.dev/v1beta1
kind: PipelineRun
metadata:
name: rustfmt
spec:
pipelineSpec:
params:
- name: image-name
description: The name for the built image
type: string
- name: path-to-image-context
description: The path to the build context
type: string
- name: path-to-dockerfile
description: The path to the Dockerfile
type: string
- name: GIT_USER_NAME
description: The username for git
type: string
default: "fluxcdbot"
- name: GIT_USER_EMAIL
description: The email for git
type: string
default: "fluxcdbot@users.noreply.github.com"
tasks:
- name: do-stuff
taskSpec:
metadata: {}
stepTemplate:
image: alpine:3.18
name: ""
resources:
requests:
cpu: 10m
memory: 600Mi
workingDir: /workspace/source
steps:
- image: alpine:3.18
name: do-stuff-step
script: |
#!/usr/bin/env sh
echo "hello world"
- name: report-pending
taskRef:
name: gitea-set-status
runAfter:
- fetch-repository
params:
- name: CONTEXT
value: "$(params.JOB_NAME)"
- name: REPO_FULL_NAME
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
- name: GITEA_HOST_URL
value: code.fizz.buzz
- name: SHA
value: "$(tasks.fetch-repository.results.commit)"
- name: DESCRIPTION
value: "Build $(params.JOB_NAME) has started"
- name: STATE
value: pending
- name: TARGET_URL
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
- name: fetch-repository
taskRef:
name: git-clone
workspaces:
- name: output
workspace: git-source
params:
- name: url
value: $(params.REPO_URL)
- name: revision
value: $(params.PULL_BASE_SHA)
- name: deleteExisting
value: "true"
- name: build-image
taskRef:
name: kaniko
params:
- name: IMAGE
value: "$(params.image-name):$(tasks.fetch-repository.results.commit)"
- name: CONTEXT
value: $(params.path-to-image-context)
- name: DOCKERFILE
value: $(params.path-to-dockerfile)
- name: BUILDER_IMAGE
value: "gcr.io/kaniko-project/executor:v1.12.1"
- name: EXTRA_ARGS
value:
- --cache=true
- --cache-copy-layers
- --cache-repo=harbor.fizz.buzz/kanikocache/cache
- --use-new-run # Should result in a speed-up
- --reproducible # To remove timestamps so layer caching works.
- --snapshot-mode=redo
- --skip-unused-stages=true
- --registry-mirror=dockerhub.dockerhub.svc.cluster.local
workspaces:
- name: source
workspace: git-source
- name: dockerconfig
workspace: docker-credentials
runAfter:
- fetch-repository
- name: rustfmt
taskRef:
name: run-docker-image
workspaces:
- name: source
workspace: git-source
runAfter:
- build-image
params:
- name: docker-image
value: "$(params.image-name):$(tasks.fetch-repository.results.commit)"
- name: cargo-fix
taskRef:
name: run-docker-image
workspaces:
- name: source
workspace: git-source
- name: cargo-cache
workspace: cargo-cache
runAfter:
- rustfmt
params:
- name: command
value: ["cargo", "fix"]
- name: args
value: ["--allow-dirty"]
- name: docker-image
value: "$(params.image-name):$(tasks.fetch-repository.results.commit)"
- name: commit-changes
taskRef:
name: git-cli
params:
- name: GIT_USER_NAME
value: $(params.GIT_USER_NAME)
- name: GIT_USER_EMAIL
value: $(params.GIT_USER_EMAIL)
- name: GIT_SCRIPT
value: |
pwd
git config --global --add safe.directory /workspace/source
git_status=$(git status --porcelain)
if [ -n "$git_status" ]; then
git commit -a -m "CI: autofix rust code."
git push origin HEAD:$(params.PULL_BASE_REF)
else
echo "No changes to commit."
fi
workspaces:
- name: source
workspace: git-source
runAfter:
- cargo-fix
finally:
- name: report-success
when:
- input: "$(tasks.status)"
operator: in
values: ["Succeeded", "Completed"]
taskRef:
name: gitea-set-status
params:
- name: CONTEXT
value: "$(params.JOB_NAME)"
- name: REPO_FULL_NAME
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
- name: GITEA_HOST_URL
value: code.fizz.buzz
- name: SHA
value: "$(tasks.fetch-repository.results.commit)"
- name: DESCRIPTION
value: "Build $(params.JOB_NAME) has succeeded"
- name: STATE
value: success
- name: TARGET_URL
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
- name: report-failure
when:
- input: "$(tasks.status)"
operator: in
values: ["Failed"]
taskRef:
name: gitea-set-status
params:
- name: CONTEXT
value: "$(params.JOB_NAME)"
- name: REPO_FULL_NAME
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
- name: GITEA_HOST_URL
value: code.fizz.buzz
- name: SHA
value: "$(tasks.fetch-repository.results.commit)"
- name: DESCRIPTION
value: "Build $(params.JOB_NAME) has failed"
- name: STATE
value: failure
- name: TARGET_URL
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
workspaces:
- name: git-source
- name: docker-credentials
workspaces:
- name: git-source
volumeClaimTemplate:
spec:
storageClassName: "nfs-client"
accessModes:
- ReadWriteOnce
resources:
requests:
storage: 10Gi
subPath: rust-source
- name: cargo-cache
persistentVolumeClaim:
claimName: organic-cargo-cache-fmt
- name: docker-credentials
secret:
secretName: harbor-plain
serviceAccountName: build-bot
timeout: 240h0m0s
params:
- name: image-name
value: "harbor.fizz.buzz/private/organic-fmt"
- name: path-to-image-context
value: docker/cargo_fmt/
- name: path-to-dockerfile
value: docker/cargo_fmt/Dockerfile

View File

@@ -1,39 +0,0 @@
apiVersion: config.lighthouse.jenkins-x.io/v1alpha1
kind: TriggerConfig
spec:
postsubmits:
- name: rustfmt
source: "pipeline-rustfmt.yaml"
# Override https-based url from lighthouse events.
clone_uri: "git@code.fizz.buzz:talexander/organic.git"
branches:
- ^main$
- ^master$
- name: rust-test
source: "pipeline-rust-test.yaml"
# Override https-based url from lighthouse events.
clone_uri: "git@code.fizz.buzz:talexander/organic.git"
skip_branches:
# We already run on every commit, so running when the semver tags get pushed is causing needless double-processing.
- "^v[0-9]+\\.[0-9]+\\.[0-9]+$"
- name: rust-foreign-document-test
source: "pipeline-foreign-document-test.yaml"
# Override https-based url from lighthouse events.
clone_uri: "git@code.fizz.buzz:talexander/organic.git"
skip_branches:
# We already run on every commit, so running when the semver tags get pushed is causing needless double-processing.
- "^v[0-9]+\\.[0-9]+\\.[0-9]+$"
- name: rust-build
source: "pipeline-rust-build.yaml"
# Override https-based url from lighthouse events.
clone_uri: "git@code.fizz.buzz:talexander/organic.git"
skip_branches:
# We already run on every commit, so running when the semver tags get pushed is causing needless double-processing.
- "^v[0-9]+\\.[0-9]+\\.[0-9]+$"
- name: clippy
source: "pipeline-clippy.yaml"
# Override https-based url from lighthouse events.
clone_uri: "git@code.fizz.buzz:talexander/organic.git"
skip_branches:
# We already run on every commit, so running when the semver tags get pushed is causing needless double-processing.
- "^v[0-9]+\\.[0-9]+\\.[0-9]+$"

View File

@@ -0,0 +1,701 @@
apiVersion: tekton.dev/v1
kind: PipelineRun
metadata:
name: build
spec:
timeouts:
pipeline: "2h0m0s"
tasks: "1h0m0s"
finally: "0h30m0s"
taskRunTemplate:
serviceAccountName: build-bot
pipelineSpec:
params:
- name: image-name
description: The name for the built image
type: string
- name: target-name
description: The dockerfile target to build
type: string
- name: path-to-image-context
description: The path to the build context
type: string
- name: path-to-dockerfile
description: The path to the Dockerfile
type: string
tasks:
- name: report-pending
taskRef:
resolver: git
params:
- name: url
value: https://code.fizz.buzz/mirror/catalog.git # mirror of https://github.com/tektoncd/catalog.git
- name: revision
value: df36b3853a5657fd883015cdbf07ad6466918acf
- name: pathInRepo
value: task/gitea-set-status/0.1/gitea-set-status.yaml
params:
- name: CONTEXT
value: "$(params.JOB_NAME)"
- name: REPO_FULL_NAME
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
- name: GITEA_HOST_URL
value: code.fizz.buzz
- name: SHA
value: "$(tasks.fetch-repository.results.commit)"
- name: DESCRIPTION
value: "Build $(params.JOB_NAME) has started"
- name: STATE
value: pending
- name: TARGET_URL
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
- name: fetch-repository
taskRef:
resolver: git
params:
- name: url
value: https://code.fizz.buzz/mirror/catalog.git # mirror of https://github.com/tektoncd/catalog.git
- name: revision
value: df36b3853a5657fd883015cdbf07ad6466918acf
- name: pathInRepo
value: task/git-clone/0.9/git-clone.yaml
workspaces:
- name: output
workspace: git-source
params:
- name: url
value: $(params.REPO_URL)
- name: revision
value: $(params.PULL_BASE_SHA)
- name: deleteExisting
value: "true"
- name: get-git-commit-time
taskSpec:
metadata: {}
stepTemplate:
image: alpine:3.20
computeResources:
requests:
cpu: 10m
memory: 600Mi
workingDir: "$(workspaces.repo.path)"
results:
- name: unix-time
description: The time of the git commit in unix timestamp format.
steps:
- image: alpine/git:v2.34.2
name: detect-tag-step
script: |
#!/usr/bin/env sh
set -euo pipefail
echo -n "$(git log -1 --pretty=%ct)" | tee $(results.unix-time.path)
workspaces:
- name: repo
workspace: git-source
runAfter:
- fetch-repository
- name: build-image
taskRef:
resolver: git
params:
- name: url
value: https://code.fizz.buzz/talexander/personal_tekton_catalog.git
- name: revision
value: 7ee31a185243ee6da13dcd26a592c585b64c80e5
- name: pathInRepo
value: task/buildkit-rootless-daemonless/0.1/buildkit-rootless-daemonless.yaml
params:
- name: OUTPUT
value: >-
type=image,"name=$(params.image-name):latest,$(params.image-name):$(tasks.fetch-repository.results.commit)",push=true,compression=zstd,compression-level=22,oci-mediatypes=true
- name: CONTEXT
value: $(params.path-to-image-context)
- name: DOCKERFILE
value: $(params.path-to-dockerfile)
- name: EXTRA_ARGS
value:
- "--opt"
- "target=$(params.target-name)"
- --import-cache
- "type=registry,ref=$(params.image-name):buildcache"
- --export-cache
- "type=registry,ref=$(params.image-name):buildcache,mode=max,compression=zstd,compression-level=22,rewrite-timestamp=true,image-manifest=true,oci-mediatypes=true"
- --opt
- build-arg:SOURCE_DATE_EPOCH=$(tasks.get-git-commit-time.results.unix-time)
- name: BUILDKITD_TOML
value: |
debug = true
[registry."docker.io"]
mirrors = ["dockerhub.dockerhub.svc.cluster.local"]
[registry."dockerhub.dockerhub.svc.cluster.local"]
http = true
insecure = true
workspaces:
- name: source
workspace: git-source
- name: dockerconfig
workspace: docker-credentials
runAfter:
- fetch-repository
#############
- name: run-image-none
taskSpec:
metadata: {}
params:
- name: docker-image
type: string
description: Docker image to run.
default: alpine:3.18
stepTemplate:
image: alpine:3.18
name: ""
resources:
requests:
cpu: 10m
memory: 60Mi
workingDir: /workspace/source
workspaces:
- name: source
mountPath: /source
- name: cargo-cache
mountPath: /usr/local/cargo/registry
optional: true
steps:
- name: run
image: $(params.docker-image)
workingDir: "$(workspaces.source.path)"
command: ["cargo", "build"]
args: ["--no-default-features"]
env:
- name: CARGO_TARGET_DIR
value: /target
workspaces:
- name: source
workspace: git-source
- name: cargo-cache
workspace: cargo-cache
subPath: none
runAfter:
- build-image
params:
- name: docker-image
value: "$(tasks.build-image.results.IMAGE_URL[1])"
#############
- name: run-image-tracing
taskSpec:
metadata: {}
params:
- name: docker-image
type: string
description: Docker image to run.
default: alpine:3.18
stepTemplate:
image: alpine:3.18
name: ""
resources:
requests:
cpu: 10m
memory: 60Mi
workingDir: /workspace/source
workspaces:
- name: source
mountPath: /source
- name: cargo-cache
mountPath: /usr/local/cargo/registry
optional: true
steps:
- name: run
image: $(params.docker-image)
workingDir: "$(workspaces.source.path)"
command: ["cargo", "build"]
args: ["--no-default-features", "--features", "tracing"]
env:
- name: CARGO_TARGET_DIR
value: /target
workspaces:
- name: source
workspace: git-source
- name: cargo-cache
workspace: cargo-cache
subPath: tracing
runAfter:
- run-image-none
params:
- name: docker-image
value: "$(tasks.build-image.results.IMAGE_URL[1])"
#############
- name: run-image-compare
taskSpec:
metadata: {}
params:
- name: docker-image
type: string
description: Docker image to run.
default: alpine:3.18
stepTemplate:
image: alpine:3.18
name: ""
resources:
requests:
cpu: 10m
memory: 60Mi
workingDir: /workspace/source
workspaces:
- name: source
mountPath: /source
- name: cargo-cache
mountPath: /usr/local/cargo/registry
optional: true
steps:
- name: run
image: $(params.docker-image)
workingDir: "$(workspaces.source.path)"
command: ["cargo", "build"]
args: ["--no-default-features", "--features", "compare"]
env:
- name: CARGO_TARGET_DIR
value: /target
workspaces:
- name: source
workspace: git-source
- name: cargo-cache
workspace: cargo-cache
subPath: compare
runAfter:
- run-image-tracing
params:
- name: docker-image
value: "$(tasks.build-image.results.IMAGE_URL[1])"
#############
- name: run-image-default
taskSpec:
metadata: {}
params:
- name: docker-image
type: string
description: Docker image to run.
default: alpine:3.18
stepTemplate:
image: alpine:3.18
name: ""
resources:
requests:
cpu: 10m
memory: 60Mi
workingDir: /workspace/source
workspaces:
- name: source
mountPath: /source
- name: cargo-cache
mountPath: /usr/local/cargo/registry
optional: true
steps:
- name: run
image: $(params.docker-image)
workingDir: "$(workspaces.source.path)"
command: ["cargo", "build"]
args: []
env:
- name: CARGO_TARGET_DIR
value: /target
workspaces:
- name: source
workspace: git-source
- name: cargo-cache
workspace: cargo-cache
subPath: default
runAfter:
- run-image-compare
params:
- name: docker-image
value: "$(tasks.build-image.results.IMAGE_URL[1])"
#############
- name: run-image-tracing-compare
taskSpec:
metadata: {}
params:
- name: docker-image
type: string
description: Docker image to run.
default: alpine:3.18
stepTemplate:
image: alpine:3.18
name: ""
resources:
requests:
cpu: 10m
memory: 60Mi
workingDir: /workspace/source
workspaces:
- name: source
mountPath: /source
- name: cargo-cache
mountPath: /usr/local/cargo/registry
optional: true
steps:
- name: run
image: $(params.docker-image)
workingDir: "$(workspaces.source.path)"
command: ["cargo", "build"]
args: ["--no-default-features", "--features", "tracing,compare"]
env:
- name: CARGO_TARGET_DIR
value: /target
workspaces:
- name: source
workspace: git-source
- name: cargo-cache
workspace: cargo-cache
subPath: tracing-compare
runAfter:
- run-image-default
params:
- name: docker-image
value: "$(tasks.build-image.results.IMAGE_URL[1])"
#############
- name: run-image-compare-foreign
taskSpec:
metadata: {}
params:
- name: docker-image
type: string
description: Docker image to run.
default: alpine:3.18
stepTemplate:
image: alpine:3.18
name: ""
resources:
requests:
cpu: 10m
memory: 60Mi
workingDir: /workspace/source
workspaces:
- name: source
mountPath: /source
- name: cargo-cache
mountPath: /usr/local/cargo/registry
optional: true
steps:
- name: run
image: $(params.docker-image)
workingDir: "$(workspaces.source.path)"
command: ["cargo", "build"]
args:
[
"--no-default-features",
"--features",
"compare,foreign_document_test",
]
env:
- name: CARGO_TARGET_DIR
value: /target
workspaces:
- name: source
workspace: git-source
- name: cargo-cache
workspace: cargo-cache
subPath: compare-foreign
runAfter:
- run-image-tracing-compare
params:
- name: docker-image
value: "$(tasks.build-image.results.IMAGE_URL[1])"
#############
- name: run-image-all
taskSpec:
metadata: {}
params:
- name: docker-image
type: string
description: Docker image to run.
default: alpine:3.18
stepTemplate:
image: alpine:3.18
name: ""
resources:
requests:
cpu: 10m
memory: 60Mi
workingDir: /workspace/source
workspaces:
- name: source
mountPath: /source
- name: cargo-cache
mountPath: /usr/local/cargo/registry
optional: true
steps:
- name: run
image: $(params.docker-image)
workingDir: "$(workspaces.source.path)"
command: ["cargo", "build"]
args:
[
"--no-default-features",
"--features",
"tracing,compare,foreign_document_test",
]
env:
- name: CARGO_TARGET_DIR
value: /target
workspaces:
- name: source
workspace: git-source
- name: cargo-cache
workspace: cargo-cache
subPath: all
runAfter:
- run-image-compare-foreign
params:
- name: docker-image
value: "$(tasks.build-image.results.IMAGE_URL[1])"
#############
- name: run-image-wasm
taskSpec:
metadata: {}
params:
- name: docker-image
type: string
description: Docker image to run.
default: alpine:3.18
stepTemplate:
image: alpine:3.18
name: ""
resources:
requests:
cpu: 10m
memory: 60Mi
workingDir: /workspace/source
workspaces:
- name: source
mountPath: /source
- name: cargo-cache
mountPath: /usr/local/cargo/registry
optional: true
steps:
- name: run
image: $(params.docker-image)
workingDir: "$(workspaces.source.path)"
command: ["cargo", "build"]
args:
[
"--target",
"wasm32-unknown-unknown",
"--profile",
"wasm",
"--bin",
"wasm",
"--no-default-features",
"--features",
"wasm",
]
env:
- name: CARGO_TARGET_DIR
value: /target
workspaces:
- name: source
workspace: git-source
- name: cargo-cache
workspace: cargo-cache
subPath: wasm
runAfter:
- run-image-all
params:
- name: docker-image
value: "$(tasks.build-image.results.IMAGE_URL[1])"
#############
- name: run-image-wasm-test
taskSpec:
metadata: {}
params:
- name: docker-image
type: string
description: Docker image to run.
default: alpine:3.18
stepTemplate:
image: alpine:3.18
name: ""
resources:
requests:
cpu: 10m
memory: 60Mi
workingDir: /workspace/source
workspaces:
- name: source
mountPath: /source
- name: cargo-cache
mountPath: /usr/local/cargo/registry
optional: true
steps:
- name: run
image: $(params.docker-image)
workingDir: "$(workspaces.source.path)"
command: ["cargo", "build"]
args:
[
"--bin",
"wasm_test",
"--no-default-features",
"--features",
"wasm_test",
]
env:
- name: CARGO_TARGET_DIR
value: /target
workspaces:
- name: source
workspace: git-source
- name: cargo-cache
workspace: cargo-cache
subPath: wasm-test
runAfter:
- run-image-wasm
params:
- name: docker-image
value: "$(tasks.build-image.results.IMAGE_URL[1])"
#############
finally:
- name: report-success
when:
- input: "$(tasks.status)"
operator: in
values: ["Succeeded", "Completed"]
taskRef:
resolver: git
params:
- name: url
value: https://code.fizz.buzz/mirror/catalog.git # mirror of https://github.com/tektoncd/catalog.git
- name: revision
value: df36b3853a5657fd883015cdbf07ad6466918acf
- name: pathInRepo
value: task/gitea-set-status/0.1/gitea-set-status.yaml
params:
- name: CONTEXT
value: "$(params.JOB_NAME)"
- name: REPO_FULL_NAME
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
- name: GITEA_HOST_URL
value: code.fizz.buzz
- name: SHA
value: "$(tasks.fetch-repository.results.commit)"
- name: DESCRIPTION
value: "Build $(params.JOB_NAME) has succeeded"
- name: STATE
value: success
- name: TARGET_URL
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
- name: report-failure
when:
- input: "$(tasks.status)"
operator: in
values: ["Failed"]
taskRef:
resolver: git
params:
- name: url
value: https://code.fizz.buzz/mirror/catalog.git # mirror of https://github.com/tektoncd/catalog.git
- name: revision
value: df36b3853a5657fd883015cdbf07ad6466918acf
- name: pathInRepo
value: task/gitea-set-status/0.1/gitea-set-status.yaml
params:
- name: CONTEXT
value: "$(params.JOB_NAME)"
- name: REPO_FULL_NAME
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
- name: GITEA_HOST_URL
value: code.fizz.buzz
- name: SHA
value: "$(tasks.fetch-repository.results.commit)"
- name: DESCRIPTION
value: "Build $(params.JOB_NAME) has failed"
- name: STATE
value: failure
- name: TARGET_URL
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
- name: cargo-cache-autoclean
taskSpec:
metadata: {}
params:
- name: docker-image
type: string
description: Docker image to run.
default: alpine:3.18
- name: cache-subdir
type: string
description: subPath used in the persistent volume for the cargo cache.
steptemplate:
image: alpine:3.18
name: ""
resources:
requests:
cpu: 10m
memory: 60Mi
workingDir: /workspace/source
workspaces:
- name: source
mountPath: /source
- name: cargo-cache
mountPath: /usr/local/cargo/registry
optional: true
steps:
- name: run
image: $(params.docker-image)
workingDir: "$(workspaces.source.path)"
command: ["ls", "$(workspaces.cargo-cache.path)"]
# command: [echo, $(params.cache-subdir)]
# command: [cargo, cache, --autoclean]
args: []
workspaces:
- name: source
workspace: git-source
- name: cargo-cache
workspace: cargo-cache
subPath: $(params.cache-subdir)
params:
- name: docker-image
value: "$(tasks.build-image.results.IMAGE_URL[1])"
- name: cache-subdir
value: none
# matrix:
# params:
# - name: cache-subdir
# value:
# - none
# - tracing
# - compare
# - default
# - tracing-compare
# - compare-foreign
# - all
# - wasm
# - wasm-test
workspaces:
- name: git-source
- name: docker-credentials
- name: cargo-cache
workspaces:
- name: git-source
volumeClaimTemplate:
spec:
storageClassName: "local-path"
accessModes:
- ReadWriteOnce
resources:
requests:
storage: 10Gi
subPath: rust-source
- name: cargo-cache
persistentVolumeClaim:
claimName: organic-cargo-cache-build
- name: docker-credentials
secret:
secretName: harbor-plain
params:
- name: image-name
value: "harbor.fizz.buzz/private/organic-development-wasm"
- name: target-name
value: "wasm"
- name: path-to-image-context
value: .
- name: path-to-dockerfile
value: docker/organic_development/

View File

@@ -0,0 +1,301 @@
apiVersion: tekton.dev/v1
kind: PipelineRun
metadata:
name: foreign-document-test
spec:
timeouts:
pipeline: "2h0m0s"
tasks: "1h0m40s"
finally: "0h30m0s"
taskRunTemplate:
serviceAccountName: build-bot
pipelineSpec:
params:
- name: image-name
description: The name for the built image
type: string
- name: target-name
description: The dockerfile target to build
type: string
- name: path-to-image-context
description: The path to the build context
type: string
- name: path-to-dockerfile
description: The path to the Dockerfile
type: string
tasks:
- name: report-pending
taskRef:
resolver: git
params:
- name: url
value: https://code.fizz.buzz/mirror/catalog.git # mirror of https://github.com/tektoncd/catalog.git
- name: revision
value: df36b3853a5657fd883015cdbf07ad6466918acf
- name: pathInRepo
value: task/gitea-set-status/0.1/gitea-set-status.yaml
params:
- name: CONTEXT
value: "$(params.JOB_NAME)"
- name: REPO_FULL_NAME
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
- name: GITEA_HOST_URL
value: code.fizz.buzz
- name: SHA
value: "$(tasks.fetch-repository.results.commit)"
- name: DESCRIPTION
value: "Build $(params.JOB_NAME) has started"
- name: STATE
value: pending
- name: TARGET_URL
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
- name: fetch-repository
taskRef:
resolver: git
params:
- name: url
value: https://code.fizz.buzz/mirror/catalog.git # mirror of https://github.com/tektoncd/catalog.git
- name: revision
value: df36b3853a5657fd883015cdbf07ad6466918acf
- name: pathInRepo
value: task/git-clone/0.9/git-clone.yaml
workspaces:
- name: output
workspace: git-source
params:
- name: url
value: $(params.REPO_URL)
- name: revision
value: $(params.PULL_BASE_SHA)
- name: deleteExisting
value: "true"
- name: get-git-commit-time
taskSpec:
metadata: {}
stepTemplate:
image: alpine:3.20
computeResources:
requests:
cpu: 10m
memory: 600Mi
workingDir: "$(workspaces.repo.path)"
results:
- name: unix-time
description: The time of the git commit in unix timestamp format.
steps:
- image: alpine/git:v2.34.2
name: detect-tag-step
script: |
#!/usr/bin/env sh
set -euo pipefail
echo -n "$(git log -1 --pretty=%ct)" | tee $(results.unix-time.path)
workspaces:
- name: repo
workspace: git-source
runAfter:
- fetch-repository
- name: build-image
taskRef:
resolver: git
params:
- name: url
value: https://code.fizz.buzz/talexander/personal_tekton_catalog.git
- name: revision
value: 7ee31a185243ee6da13dcd26a592c585b64c80e5
- name: pathInRepo
value: task/buildkit-rootless-daemonless/0.1/buildkit-rootless-daemonless.yaml
params:
- name: OUTPUT
value: >-
type=image,"name=$(params.image-name):latest,$(params.image-name):$(tasks.fetch-repository.results.commit)",push=true,compression=zstd,compression-level=22,oci-mediatypes=true
- name: CONTEXT
value: $(params.path-to-image-context)
- name: DOCKERFILE
value: $(params.path-to-dockerfile)
- name: EXTRA_ARGS
value:
- "--opt"
- "target=$(params.target-name)"
- --import-cache
- "type=registry,ref=$(params.image-name):buildcache"
- --export-cache
- "type=registry,ref=$(params.image-name):buildcache,mode=max,compression=zstd,compression-level=22,rewrite-timestamp=true,image-manifest=true,oci-mediatypes=true"
- --opt
- build-arg:SOURCE_DATE_EPOCH=$(tasks.get-git-commit-time.results.unix-time)
- name: BUILDKITD_TOML
value: |
debug = true
[registry."docker.io"]
mirrors = ["dockerhub.dockerhub.svc.cluster.local"]
[registry."dockerhub.dockerhub.svc.cluster.local"]
http = true
insecure = true
workspaces:
- name: source
workspace: git-source
- name: dockerconfig
workspace: docker-credentials
runAfter:
- fetch-repository
- name: run-test
taskSpec:
metadata: {}
params:
- name: docker-image
type: string
description: Docker image to run.
default: alpine:3.20
stepTemplate:
image: alpine:3.20
computeResources:
requests:
cpu: 10m
memory: 600Mi
workingDir: /workspace/source
workspaces:
- name: source
mountPath: /source
- name: cargo-cache
mountPath: /usr/local/cargo/registry
optional: true
steps:
- name: run
image: $(params.docker-image)
workingDir: "$(workspaces.source.path)"
env:
- name: CARGO_TARGET_DIR
value: /target
workspaces:
- name: source
workspace: git-source
- name: cargo-cache
workspace: cargo-cache
runAfter:
- build-image
params:
- name: docker-image
value: "$(tasks.build-image.results.IMAGE_URL[1])"
finally:
- name: report-success
when:
- input: "$(tasks.status)"
operator: in
values: ["Succeeded", "Completed"]
taskRef:
resolver: git
params:
- name: url
value: https://code.fizz.buzz/mirror/catalog.git # mirror of https://github.com/tektoncd/catalog.git
- name: revision
value: df36b3853a5657fd883015cdbf07ad6466918acf
- name: pathInRepo
value: task/gitea-set-status/0.1/gitea-set-status.yaml
params:
- name: CONTEXT
value: "$(params.JOB_NAME)"
- name: REPO_FULL_NAME
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
- name: GITEA_HOST_URL
value: code.fizz.buzz
- name: SHA
value: "$(tasks.fetch-repository.results.commit)"
- name: DESCRIPTION
value: "Build $(params.JOB_NAME) has succeeded"
- name: STATE
value: success
- name: TARGET_URL
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
- name: report-failure
when:
- input: "$(tasks.status)"
operator: in
values: ["Failed"]
taskRef:
resolver: git
params:
- name: url
value: https://code.fizz.buzz/mirror/catalog.git # mirror of https://github.com/tektoncd/catalog.git
- name: revision
value: df36b3853a5657fd883015cdbf07ad6466918acf
- name: pathInRepo
value: task/gitea-set-status/0.1/gitea-set-status.yaml
params:
- name: CONTEXT
value: "$(params.JOB_NAME)"
- name: REPO_FULL_NAME
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
- name: GITEA_HOST_URL
value: code.fizz.buzz
- name: SHA
value: "$(tasks.fetch-repository.results.commit)"
- name: DESCRIPTION
value: "Build $(params.JOB_NAME) has failed"
- name: STATE
value: failure
- name: TARGET_URL
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
- name: cargo-cache-autoclean
taskSpec:
metadata: {}
params:
- name: docker-image
type: string
description: Docker image to run.
default: alpine:3.20
stepTemplate:
image: alpine:3.20
computeResources:
requests:
cpu: 10m
memory: 600Mi
workingDir: /workspace/source
workspaces:
- name: source
mountPath: /source
- name: cargo-cache
mountPath: /usr/local/cargo/registry
optional: true
steps:
- name: run
image: $(params.docker-image)
workingDir: "$(workspaces.source.path)"
command: [cargo, cache, --autoclean]
args: []
workspaces:
- name: source
workspace: git-source
- name: cargo-cache
workspace: cargo-cache
params:
- name: docker-image
value: "$(tasks.build-image.results.IMAGE_URL[1])"
workspaces:
- name: git-source
- name: docker-credentials
- name: cargo-cache
workspaces:
- name: git-source
volumeClaimTemplate:
spec:
storageClassName: "local-path"
accessModes:
- ReadWriteOnce
resources:
requests:
storage: 10Gi
subPath: rust-source
- name: cargo-cache
persistentVolumeClaim:
claimName: organic-cargo-cache-test-foreign-document
- name: docker-credentials
secret:
secretName: harbor-plain
params:
- name: image-name
value: "harbor.fizz.buzz/private/organic-test-foreign-document"
- name: target-name
value: "foreign-document"
- name: path-to-image-context
value: docker/organic_test/
- name: path-to-dockerfile
value: docker/organic_test/

View File

@@ -0,0 +1,334 @@
apiVersion: tekton.dev/v1
kind: PipelineRun
metadata:
name: rust-format
spec:
timeouts:
pipeline: "2h0m0s"
tasks: "1h0m0s"
finally: "0h30m0s"
taskRunTemplate:
serviceAccountName: build-bot
pipelineSpec:
params:
- name: image-name
description: The name for the built image
type: string
- name: target-name
description: The dockerfile target to build
type: string
- name: path-to-image-context
description: The path to the build context
type: string
- name: path-to-dockerfile
description: The path to the Dockerfile
type: string
tasks:
- name: report-pending
taskRef:
resolver: git
params:
- name: url
value: https://code.fizz.buzz/mirror/catalog.git # mirror of https://github.com/tektoncd/catalog.git
- name: revision
value: df36b3853a5657fd883015cdbf07ad6466918acf
- name: pathInRepo
value: task/gitea-set-status/0.1/gitea-set-status.yaml
params:
- name: CONTEXT
value: "$(params.JOB_NAME)"
- name: REPO_FULL_NAME
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
- name: GITEA_HOST_URL
value: code.fizz.buzz
- name: SHA
value: "$(tasks.fetch-repository.results.commit)"
- name: DESCRIPTION
value: "Build $(params.JOB_NAME) has started"
- name: STATE
value: pending
- name: TARGET_URL
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
- name: fetch-repository
taskRef:
resolver: git
params:
- name: url
value: https://code.fizz.buzz/mirror/catalog.git # mirror of https://github.com/tektoncd/catalog.git
- name: revision
value: df36b3853a5657fd883015cdbf07ad6466918acf
- name: pathInRepo
value: task/git-clone/0.9/git-clone.yaml
workspaces:
- name: output
workspace: git-source
params:
- name: url
value: $(params.REPO_URL)
- name: revision
value: $(params.PULL_BASE_SHA)
- name: deleteExisting
value: "true"
- name: get-git-commit-time
taskSpec:
metadata: {}
stepTemplate:
image: alpine:3.20
computeResources:
requests:
cpu: 10m
memory: 600Mi
workingDir: "$(workspaces.repo.path)"
results:
- name: unix-time
description: The time of the git commit in unix timestamp format.
steps:
- image: alpine/git:v2.34.2
name: detect-tag-step
script: |
#!/usr/bin/env sh
set -euo pipefail
echo -n "$(git log -1 --pretty=%ct)" | tee $(results.unix-time.path)
workspaces:
- name: repo
workspace: git-source
runAfter:
- fetch-repository
- name: build-image
taskRef:
resolver: git
params:
- name: url
value: https://code.fizz.buzz/talexander/personal_tekton_catalog.git
- name: revision
value: 7ee31a185243ee6da13dcd26a592c585b64c80e5
- name: pathInRepo
value: task/buildkit-rootless-daemonless/0.1/buildkit-rootless-daemonless.yaml
params:
- name: OUTPUT
value: >-
type=image,"name=$(params.image-name):latest,$(params.image-name):$(tasks.fetch-repository.results.commit)",push=true,compression=zstd,compression-level=22,oci-mediatypes=true
- name: CONTEXT
value: $(params.path-to-image-context)
- name: DOCKERFILE
value: $(params.path-to-dockerfile)
- name: EXTRA_ARGS
value:
- "--opt"
- "target=$(params.target-name)"
- --import-cache
- "type=registry,ref=$(params.image-name):buildcache"
- --export-cache
- "type=registry,ref=$(params.image-name):buildcache,mode=max,compression=zstd,compression-level=22,rewrite-timestamp=true,image-manifest=true,oci-mediatypes=true"
- --opt
- build-arg:SOURCE_DATE_EPOCH=$(tasks.get-git-commit-time.results.unix-time)
- name: BUILDKITD_TOML
value: |
debug = true
[registry."docker.io"]
mirrors = ["dockerhub.dockerhub.svc.cluster.local"]
[registry."dockerhub.dockerhub.svc.cluster.local"]
http = true
insecure = true
workspaces:
- name: source
workspace: git-source
- name: dockerconfig
workspace: docker-credentials
runAfter:
- fetch-repository
- name: run-cargo-fmt
taskSpec:
metadata: {}
params:
- name: docker-image
type: string
description: Docker image to run.
default: alpine:3.20
stepTemplate:
image: alpine:3.20
computeResources:
requests:
cpu: 10m
memory: 600Mi
workingDir: /workspace/source
workspaces:
- name: source
mountPath: /source
- name: cargo-cache
mountPath: /usr/local/cargo/registry
optional: true
steps:
- name: run
image: $(params.docker-image)
workingDir: "$(workspaces.source.path)"
command: ["cargo", "fmt"]
args: []
env:
- name: CARGO_TARGET_DIR
value: /target
workspaces:
- name: source
workspace: git-source
- name: cargo-cache
workspace: cargo-cache
runAfter:
- build-image
params:
- name: docker-image
value: "$(tasks.build-image.results.IMAGE_URL[1])"
- name: commit-changes
taskRef:
resolver: git
params:
- name: url
value: https://code.fizz.buzz/mirror/catalog.git # mirror of https://github.com/tektoncd/catalog.git
- name: revision
value: df36b3853a5657fd883015cdbf07ad6466918acf
- name: pathInRepo
value: task/git-cli/0.4/git-cli.yaml
params:
- name: GIT_USER_NAME
value: fluxcdbot
- name: GIT_USER_EMAIL
value: "fluxcdbot@users.noreply.github.com"
- name: GIT_SCRIPT
value: |
pwd
git config --global --add safe.directory /workspace/source
git_status=$(git status --porcelain)
if [ -n "$git_status" ]; then
git commit -a -m "CI: autofix rust code."
git push origin HEAD:$(params.PULL_BASE_REF)
else
echo "No changes to commit."
fi
workspaces:
- name: source
workspace: git-source
runAfter:
- run-cargo-fmt
finally:
- name: report-success
when:
- input: "$(tasks.status)"
operator: in
values: ["Succeeded", "Completed"]
taskRef:
resolver: git
params:
- name: url
value: https://code.fizz.buzz/mirror/catalog.git # mirror of https://github.com/tektoncd/catalog.git
- name: revision
value: df36b3853a5657fd883015cdbf07ad6466918acf
- name: pathInRepo
value: task/gitea-set-status/0.1/gitea-set-status.yaml
params:
- name: CONTEXT
value: "$(params.JOB_NAME)"
- name: REPO_FULL_NAME
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
- name: GITEA_HOST_URL
value: code.fizz.buzz
- name: SHA
value: "$(tasks.fetch-repository.results.commit)"
- name: DESCRIPTION
value: "Build $(params.JOB_NAME) has succeeded"
- name: STATE
value: success
- name: TARGET_URL
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
- name: report-failure
when:
- input: "$(tasks.status)"
operator: in
values: ["Failed"]
taskRef:
resolver: git
params:
- name: url
value: https://code.fizz.buzz/mirror/catalog.git # mirror of https://github.com/tektoncd/catalog.git
- name: revision
value: df36b3853a5657fd883015cdbf07ad6466918acf
- name: pathInRepo
value: task/gitea-set-status/0.1/gitea-set-status.yaml
params:
- name: CONTEXT
value: "$(params.JOB_NAME)"
- name: REPO_FULL_NAME
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
- name: GITEA_HOST_URL
value: code.fizz.buzz
- name: SHA
value: "$(tasks.fetch-repository.results.commit)"
- name: DESCRIPTION
value: "Build $(params.JOB_NAME) has failed"
- name: STATE
value: failure
- name: TARGET_URL
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
- name: cargo-cache-autoclean
taskSpec:
metadata: {}
params:
- name: docker-image
type: string
description: Docker image to run.
default: alpine:3.20
stepTemplate:
image: alpine:3.20
computeResources:
requests:
cpu: 10m
memory: 600Mi
workingDir: /workspace/source
workspaces:
- name: source
mountPath: /source
- name: cargo-cache
mountPath: /usr/local/cargo/registry
optional: true
steps:
- name: run
image: $(params.docker-image)
workingDir: "$(workspaces.source.path)"
command: [cargo, cache, --autoclean]
args: []
workspaces:
- name: source
workspace: git-source
- name: cargo-cache
workspace: cargo-cache
params:
- name: docker-image
value: "$(tasks.build-image.results.IMAGE_URL[1])"
workspaces:
- name: git-source
- name: docker-credentials
- name: cargo-cache
workspaces:
- name: git-source
volumeClaimTemplate:
spec:
storageClassName: "local-path"
accessModes:
- ReadWriteOnce
resources:
requests:
storage: 10Gi
subPath: rust-source
- name: cargo-cache
persistentVolumeClaim:
claimName: organic-cargo-cache-fmt
- name: docker-credentials
secret:
secretName: harbor-plain
params:
- name: image-name
value: "harbor.fizz.buzz/private/organic-development-format"
- name: target-name
value: "format"
- name: path-to-image-context
value: docker/organic_development/
- name: path-to-dockerfile
value: docker/organic_development/

View File

@@ -0,0 +1,313 @@
apiVersion: tekton.dev/v1
kind: PipelineRun
metadata:
name: rust-clippy
spec:
taskRunTemplate:
serviceAccountName: build-bot
timeouts:
pipeline: "2h0m0s"
tasks: "1h0m40s"
finally: "0h30m0s"
pipelineSpec:
params:
- name: image-name
description: The name for the built image
type: string
- name: target-name
description: The dockerfile target to build
type: string
- name: path-to-image-context
description: The path to the build context
type: string
- name: path-to-dockerfile
description: The path to the Dockerfile
type: string
tasks:
- name: report-pending
taskRef:
resolver: git
params:
- name: url
value: https://code.fizz.buzz/mirror/catalog.git # mirror of https://github.com/tektoncd/catalog.git
- name: revision
value: df36b3853a5657fd883015cdbf07ad6466918acf
- name: pathInRepo
value: task/gitea-set-status/0.1/gitea-set-status.yaml
params:
- name: CONTEXT
value: "$(params.JOB_NAME)"
- name: REPO_FULL_NAME
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
- name: GITEA_HOST_URL
value: code.fizz.buzz
- name: SHA
value: "$(tasks.fetch-repository.results.commit)"
- name: DESCRIPTION
value: "Build $(params.JOB_NAME) has started"
- name: STATE
value: pending
- name: TARGET_URL
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
- name: fetch-repository
taskRef:
resolver: git
params:
- name: url
value: https://code.fizz.buzz/mirror/catalog.git # mirror of https://github.com/tektoncd/catalog.git
- name: revision
value: df36b3853a5657fd883015cdbf07ad6466918acf
- name: pathInRepo
value: task/git-clone/0.9/git-clone.yaml
workspaces:
- name: output
workspace: git-source
params:
- name: url
value: $(params.REPO_URL)
- name: revision
value: $(params.PULL_BASE_SHA)
- name: deleteExisting
value: "true"
- name: get-git-commit-time
taskSpec:
metadata: {}
stepTemplate:
image: alpine:3.20
computeResources:
requests:
cpu: 10m
memory: 600Mi
workingDir: "$(workspaces.repo.path)"
results:
- name: unix-time
description: The time of the git commit in unix timestamp format.
steps:
- image: alpine/git:v2.34.2
name: detect-tag-step
script: |
#!/usr/bin/env sh
set -euo pipefail
echo -n "$(git log -1 --pretty=%ct)" | tee $(results.unix-time.path)
workspaces:
- name: repo
workspace: git-source
runAfter:
- fetch-repository
- name: build-image
taskRef:
resolver: git
params:
- name: url
value: https://code.fizz.buzz/talexander/personal_tekton_catalog.git
- name: revision
value: 7ee31a185243ee6da13dcd26a592c585b64c80e5
- name: pathInRepo
value: task/buildkit-rootless-daemonless/0.1/buildkit-rootless-daemonless.yaml
params:
- name: OUTPUT
value: >-
type=image,"name=$(params.image-name):latest,$(params.image-name):$(tasks.fetch-repository.results.commit)",push=true,compression=zstd,compression-level=22,oci-mediatypes=true
- name: CONTEXT
value: $(params.path-to-image-context)
- name: DOCKERFILE
value: $(params.path-to-dockerfile)
- name: EXTRA_ARGS
value:
- "--opt"
- "target=$(params.target-name)"
- --import-cache
- "type=registry,ref=$(params.image-name):buildcache"
- --export-cache
- "type=registry,ref=$(params.image-name):buildcache,mode=max,compression=zstd,compression-level=22,rewrite-timestamp=true,image-manifest=true,oci-mediatypes=true"
- --opt
- build-arg:SOURCE_DATE_EPOCH=$(tasks.get-git-commit-time.results.unix-time)
- name: BUILDKITD_TOML
value: |
debug = true
[registry."docker.io"]
mirrors = ["dockerhub.dockerhub.svc.cluster.local"]
[registry."dockerhub.dockerhub.svc.cluster.local"]
http = true
insecure = true
workspaces:
- name: source
workspace: git-source
- name: dockerconfig
workspace: docker-credentials
runAfter:
- fetch-repository
- name: run-cargo-clippy
taskSpec:
metadata: {}
params:
- name: docker-image
type: string
description: Docker image to run.
default: alpine:3.20
stepTemplate:
image: alpine:3.20
computeResources:
requests:
cpu: 10m
memory: 600Mi
workingDir: /workspace/source
workspaces:
- name: source
mountPath: /source
- name: cargo-cache
mountPath: /usr/local/cargo/registry
optional: true
steps:
- name: run
image: $(params.docker-image)
workingDir: "$(workspaces.source.path)"
command:
[
"cargo",
"clippy",
"--no-deps",
"--all-targets",
"--all-features",
"--",
"-D",
"warnings",
]
args: []
env:
- name: CARGO_TARGET_DIR
value: /target
workspaces:
- name: source
workspace: git-source
- name: cargo-cache
workspace: cargo-cache
runAfter:
- build-image
params:
- name: docker-image
value: "$(tasks.build-image.results.IMAGE_URL[1])"
finally:
- name: report-success
when:
- input: "$(tasks.status)"
operator: in
values: ["Succeeded", "Completed"]
taskRef:
resolver: git
params:
- name: url
value: https://code.fizz.buzz/mirror/catalog.git # mirror of https://github.com/tektoncd/catalog.git
- name: revision
value: df36b3853a5657fd883015cdbf07ad6466918acf
- name: pathInRepo
value: task/gitea-set-status/0.1/gitea-set-status.yaml
params:
- name: CONTEXT
value: "$(params.JOB_NAME)"
- name: REPO_FULL_NAME
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
- name: GITEA_HOST_URL
value: code.fizz.buzz
- name: SHA
value: "$(tasks.fetch-repository.results.commit)"
- name: DESCRIPTION
value: "Build $(params.JOB_NAME) has succeeded"
- name: STATE
value: success
- name: TARGET_URL
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
- name: report-failure
when:
- input: "$(tasks.status)"
operator: in
values: ["Failed"]
taskRef:
resolver: git
params:
- name: url
value: https://code.fizz.buzz/mirror/catalog.git # mirror of https://github.com/tektoncd/catalog.git
- name: revision
value: df36b3853a5657fd883015cdbf07ad6466918acf
- name: pathInRepo
value: task/gitea-set-status/0.1/gitea-set-status.yaml
params:
- name: CONTEXT
value: "$(params.JOB_NAME)"
- name: REPO_FULL_NAME
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
- name: GITEA_HOST_URL
value: code.fizz.buzz
- name: SHA
value: "$(tasks.fetch-repository.results.commit)"
- name: DESCRIPTION
value: "Build $(params.JOB_NAME) has failed"
- name: STATE
value: failure
- name: TARGET_URL
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
- name: cargo-cache-autoclean
taskSpec:
metadata: {}
params:
- name: docker-image
type: string
description: Docker image to run.
default: alpine:3.20
stepTemplate:
image: alpine:3.20
computeResources:
requests:
cpu: 10m
memory: 600Mi
workingDir: /workspace/source
workspaces:
- name: source
mountPath: /source
- name: cargo-cache
mountPath: /usr/local/cargo/registry
optional: true
steps:
- name: run
image: $(params.docker-image)
workingDir: "$(workspaces.source.path)"
command: [cargo, cache, --autoclean]
args: []
workspaces:
- name: source
workspace: git-source
- name: cargo-cache
workspace: cargo-cache
params:
- name: docker-image
value: "$(tasks.build-image.results.IMAGE_URL[1])"
workspaces:
- name: git-source
- name: docker-credentials
- name: cargo-cache
workspaces:
- name: git-source
volumeClaimTemplate:
spec:
storageClassName: "local-path"
accessModes:
- ReadWriteOnce
resources:
requests:
storage: 10Gi
subPath: rust-source
- name: cargo-cache
persistentVolumeClaim:
claimName: organic-cargo-cache-clippy
- name: docker-credentials
secret:
secretName: harbor-plain
params:
- name: image-name
value: "harbor.fizz.buzz/private/organic-development-clippy"
- name: target-name
value: "clippy"
- name: path-to-image-context
value: docker/organic_development/
- name: path-to-dockerfile
value: docker/organic_development/

View File

@@ -0,0 +1,312 @@
apiVersion: tekton.dev/v1
kind: PipelineRun
metadata:
name: rust-test
spec:
timeouts:
pipeline: "2h0m0s"
tasks: "1h0m40s"
finally: "0h30m0s"
taskRunTemplate:
serviceAccountName: build-bot
pipelineSpec:
params:
- name: image-name
description: The name for the built image
type: string
- name: target-name
description: The dockerfile target to build
type: string
- name: path-to-image-context
description: The path to the build context
type: string
- name: path-to-dockerfile
description: The path to the Dockerfile
type: string
tasks:
- name: report-pending
taskRef:
resolver: git
params:
- name: url
value: https://code.fizz.buzz/mirror/catalog.git # mirror of https://github.com/tektoncd/catalog.git
- name: revision
value: df36b3853a5657fd883015cdbf07ad6466918acf
- name: pathInRepo
value: task/gitea-set-status/0.1/gitea-set-status.yaml
params:
- name: CONTEXT
value: "$(params.JOB_NAME)"
- name: REPO_FULL_NAME
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
- name: GITEA_HOST_URL
value: code.fizz.buzz
- name: SHA
value: "$(tasks.fetch-repository.results.commit)"
- name: DESCRIPTION
value: "Build $(params.JOB_NAME) has started"
- name: STATE
value: pending
- name: TARGET_URL
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
- name: fetch-repository
taskRef:
resolver: git
params:
- name: url
value: https://code.fizz.buzz/mirror/catalog.git # mirror of https://github.com/tektoncd/catalog.git
- name: revision
value: df36b3853a5657fd883015cdbf07ad6466918acf
- name: pathInRepo
value: task/git-clone/0.9/git-clone.yaml
workspaces:
- name: output
workspace: git-source
params:
- name: url
value: $(params.REPO_URL)
- name: revision
value: $(params.PULL_BASE_SHA)
- name: deleteExisting
value: "true"
- name: get-git-commit-time
taskSpec:
metadata: {}
stepTemplate:
image: alpine:3.20
computeResources:
requests:
cpu: 10m
memory: 600Mi
workingDir: "$(workspaces.repo.path)"
results:
- name: unix-time
description: The time of the git commit in unix timestamp format.
steps:
- image: alpine/git:v2.34.2
name: detect-tag-step
script: |
#!/usr/bin/env sh
set -euo pipefail
echo -n "$(git log -1 --pretty=%ct)" | tee $(results.unix-time.path)
workspaces:
- name: repo
workspace: git-source
runAfter:
- fetch-repository
- name: build-image
taskRef:
resolver: git
params:
- name: url
value: https://code.fizz.buzz/talexander/personal_tekton_catalog.git
- name: revision
value: 7ee31a185243ee6da13dcd26a592c585b64c80e5
- name: pathInRepo
value: task/buildkit-rootless-daemonless/0.1/buildkit-rootless-daemonless.yaml
params:
- name: OUTPUT
value: >-
type=image,"name=$(params.image-name):latest,$(params.image-name):$(tasks.fetch-repository.results.commit)",push=true,compression=zstd,compression-level=22,oci-mediatypes=true
- name: CONTEXT
value: $(params.path-to-image-context)
- name: DOCKERFILE
value: $(params.path-to-dockerfile)
- name: EXTRA_ARGS
value:
- "--opt"
- "target=$(params.target-name)"
- --import-cache
- "type=registry,ref=$(params.image-name):buildcache"
- --export-cache
- "type=registry,ref=$(params.image-name):buildcache,mode=max,compression=zstd,compression-level=22,rewrite-timestamp=true,image-manifest=true,oci-mediatypes=true"
- --opt
- build-arg:SOURCE_DATE_EPOCH=$(tasks.get-git-commit-time.results.unix-time)
- name: BUILDKITD_TOML
value: |
debug = true
[registry."docker.io"]
mirrors = ["dockerhub.dockerhub.svc.cluster.local"]
[registry."dockerhub.dockerhub.svc.cluster.local"]
http = true
insecure = true
workspaces:
- name: source
workspace: git-source
- name: dockerconfig
workspace: docker-credentials
runAfter:
- fetch-repository
- name: run-cargo-test
taskSpec:
metadata: {}
params:
- name: docker-image
type: string
description: Docker image to run.
default: alpine:3.20
stepTemplate:
image: alpine:3.20
computeResources:
requests:
cpu: 10m
memory: 600Mi
workingDir: /workspace/source
workspaces:
- name: source
mountPath: /source
- name: cargo-cache
mountPath: /usr/local/cargo/registry
optional: true
steps:
- name: run
image: $(params.docker-image)
workingDir: "$(workspaces.source.path)"
command: [cargo, test]
args:
[
--no-default-features,
--features,
"compare,wasm_test",
--no-fail-fast,
--lib,
--test,
test_loader,
]
env:
- name: CARGO_TARGET_DIR
value: /target
workspaces:
- name: source
workspace: git-source
- name: cargo-cache
workspace: cargo-cache
runAfter:
- build-image
params:
- name: docker-image
value: "$(tasks.build-image.results.IMAGE_URL[1])"
finally:
- name: report-success
when:
- input: "$(tasks.status)"
operator: in
values: ["Succeeded", "Completed"]
taskRef:
resolver: git
params:
- name: url
value: https://code.fizz.buzz/mirror/catalog.git # mirror of https://github.com/tektoncd/catalog.git
- name: revision
value: df36b3853a5657fd883015cdbf07ad6466918acf
- name: pathInRepo
value: task/gitea-set-status/0.1/gitea-set-status.yaml
params:
- name: CONTEXT
value: "$(params.JOB_NAME)"
- name: REPO_FULL_NAME
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
- name: GITEA_HOST_URL
value: code.fizz.buzz
- name: SHA
value: "$(tasks.fetch-repository.results.commit)"
- name: DESCRIPTION
value: "Build $(params.JOB_NAME) has succeeded"
- name: STATE
value: success
- name: TARGET_URL
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
- name: report-failure
when:
- input: "$(tasks.status)"
operator: in
values: ["Failed"]
taskRef:
resolver: git
params:
- name: url
value: https://code.fizz.buzz/mirror/catalog.git # mirror of https://github.com/tektoncd/catalog.git
- name: revision
value: df36b3853a5657fd883015cdbf07ad6466918acf
- name: pathInRepo
value: task/gitea-set-status/0.1/gitea-set-status.yaml
params:
- name: CONTEXT
value: "$(params.JOB_NAME)"
- name: REPO_FULL_NAME
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
- name: GITEA_HOST_URL
value: code.fizz.buzz
- name: SHA
value: "$(tasks.fetch-repository.results.commit)"
- name: DESCRIPTION
value: "Build $(params.JOB_NAME) has failed"
- name: STATE
value: failure
- name: TARGET_URL
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
- name: cargo-cache-autoclean
taskSpec:
metadata: {}
params:
- name: docker-image
type: string
description: Docker image to run.
default: alpine:3.20
stepTemplate:
image: alpine:3.20
computeResources:
requests:
cpu: 10m
memory: 600Mi
workingDir: /workspace/source
workspaces:
- name: source
mountPath: /source
- name: cargo-cache
mountPath: /usr/local/cargo/registry
optional: true
steps:
- name: run
image: $(params.docker-image)
workingDir: "$(workspaces.source.path)"
command: [cargo, cache, --autoclean]
args: []
workspaces:
- name: source
workspace: git-source
- name: cargo-cache
workspace: cargo-cache
params:
- name: docker-image
value: "$(tasks.build-image.results.IMAGE_URL[1])"
workspaces:
- name: git-source
- name: docker-credentials
- name: cargo-cache
workspaces:
- name: git-source
volumeClaimTemplate:
spec:
storageClassName: "local-path"
accessModes:
- ReadWriteOnce
resources:
requests:
storage: 10Gi
subPath: rust-source
- name: cargo-cache
persistentVolumeClaim:
claimName: organic-cargo-cache-test
- name: docker-credentials
secret:
secretName: harbor-plain
params:
- name: image-name
value: "harbor.fizz.buzz/private/organic-test"
- name: target-name
value: "tester"
- name: path-to-image-context
value: docker/organic_test/
- name: path-to-dockerfile
value: docker/organic_test/

View File

@@ -0,0 +1,31 @@
version = "0.0.1"
[[push]]
name = "rust-test"
source = "pipeline-rust-test.yaml"
clone_uri = "git@code.fizz.buzz:talexander/organic.git"
skip_branches = [ "^v[0-9]+\\.[0-9]+\\.[0-9]+$" ]
[[push]]
name = "foreign-document-test"
source = "pipeline-foreign-document-test.yaml"
clone_uri = "git@code.fizz.buzz:talexander/organic.git"
branches = [ "^main$", "^master$" ]
[[push]]
name = "clippy"
source = "pipeline-rust-clippy.yaml"
clone_uri = "git@code.fizz.buzz:talexander/organic.git"
skip_branches = [ "^v[0-9]+\\.[0-9]+\\.[0-9]+$" ]
[[push]]
name = "format"
source = "pipeline-format.yaml"
clone_uri = "git@code.fizz.buzz:talexander/organic.git"
skip_branches = [ "^v[0-9]+\\.[0-9]+\\.[0-9]+$" ]
[[push]]
name = "build"
source = "pipeline-build-hash.yaml"
clone_uri = "git@code.fizz.buzz:talexander/organic.git"
branches = [ "^main$", "^master$" ]

View File

@@ -1,8 +1,9 @@
# cargo-features = ["profile-rustflags"]
cargo-features = ["codegen-backend"]
[package]
name = "organic"
version = "0.1.12"
version = "0.1.16"
authors = ["Tom Alexander <tom@fizz.buzz>"]
description = "An org-mode parser."
edition = "2021"
@@ -39,17 +40,33 @@ path = "src/lib.rs"
path = "src/bin_foreign_document_test.rs"
required-features = ["foreign_document_test"]
[[bin]]
name = "wasm"
path = "src/bin_wasm.rs"
required-features = ["wasm"]
[[bin]]
# This bin exists for development purposes only. The real target of this crate is the library.
name = "wasm_test"
path = "src/bin_wasm_test.rs"
required-features = ["wasm_test"]
[dependencies]
futures = { version = "0.3.28", optional = true }
gloo-utils = "0.2.0"
nom = "7.1.1"
opentelemetry = { version = "0.20.0", optional = true, default-features = false, features = ["trace", "rt-tokio"] }
opentelemetry-otlp = { version = "0.13.0", optional = true }
opentelemetry-semantic-conventions = { version = "0.12.0", optional = true }
serde = { version = "1.0.193", optional = true, features = ["derive"] }
serde-wasm-bindgen = { version = "0.6.3", optional = true }
serde_json = { version = "1.0.108", optional = true }
tokio = { version = "1.30.0", optional = true, default-features = false, features = ["rt", "rt-multi-thread"] }
tracing = { version = "0.1.37", optional = true }
tracing-opentelemetry = { version = "0.20.0", optional = true }
tracing-subscriber = { version = "0.3.17", optional = true, features = ["env-filter"] }
walkdir = { version = "2.3.3", optional = true }
wasm-bindgen = { version = "0.2.89", optional = true }
[build-dependencies]
walkdir = "2.3.3"
@@ -60,6 +77,8 @@ compare = ["tokio/process", "tokio/macros"]
foreign_document_test = ["compare", "dep:futures", "tokio/sync", "dep:walkdir", "tokio/process"]
tracing = ["dep:opentelemetry", "dep:opentelemetry-otlp", "dep:opentelemetry-semantic-conventions", "dep:tokio", "dep:tracing", "dep:tracing-opentelemetry", "dep:tracing-subscriber"]
event_count = []
wasm = ["dep:serde", "dep:wasm-bindgen", "dep:serde-wasm-bindgen"]
wasm_test = ["wasm", "dep:serde_json", "tokio/process", "tokio/macros"]
# Optimized build for any sort of release.
[profile.release-lto]
@@ -79,3 +98,15 @@ strip = "symbols"
inherits = "release"
lto = true
debug = true
[profile.wasm]
inherits = "release"
lto = true
strip = true
[profile.dev]
codegen-backend = "cranelift"
[profile.dev.package."*"]
codegen-backend = "llvm"
opt-level = 3

View File

@@ -7,6 +7,7 @@ MAKEFLAGS += --no-builtin-rules
TESTJOBS := 4
OS:=$(shell uname -s)
RELEASEFLAGS :=
WASMTARGET := bundler # or web
ifeq ($(OS),Linux)
TESTJOBS:=$(shell nproc)
@@ -21,55 +22,83 @@ ifeq ($(origin .RECIPEPREFIX), undefined)
endif
.RECIPEPREFIX = >
.PHONY: help
help: ## List the available make targets.
> @grep -h "##" $(MAKEFILE_LIST) | grep -v grep | sed -E 's/^([^:]*): *## */\1: /'
.PHONY: build
build:
build: ## Make a debug build of the project.
> cargo build
.PHONY: release
release:
release: ## Make an optimized build of the project.
> cargo build --release $(RELEASEFLAGS)
.PHONY: wasm
wasm: ## Build the parser as wasm.
> cargo build --target=wasm32-unknown-unknown --profile wasm --bin wasm --features wasm
> wasm-bindgen --target $(WASMTARGET) --out-dir target/wasm32-unknown-unknown/js target/wasm32-unknown-unknown/wasm/wasm.wasm
.PHONY: clean
clean:
clean: ## Delete the built binaries.
> cargo clean
> $(MAKE) -C docker/organic_development TARGET=builder clean
> $(MAKE) -C docker/organic_development TARGET=format clean
> $(MAKE) -C docker/organic_development TARGET=clippy clean
> $(MAKE) -C docker/organic_development TARGET=wasm clean
> $(MAKE) -C docker/organic_test TARGET=tester build
.PHONY: format
format:
> $(MAKE) -C docker/cargo_fmt run
format: ## Format the code.
> cargo fmt
.PHONY: dockerclippy
dockerclippy:
> $(MAKE) -C docker/organic_clippy run
.PHONY: docker_format
docker_format: ## Format the code using docker.
> $(MAKE) -C docker/organic_development TARGET=format build
> docker run --rm -i -t --mount type=tmpfs,destination=/tmp -v "$(shell readlink -f .):/source" --workdir=/source --env CARGO_TARGET_DIR=/target -v "organic-cargo-registry:/usr/local/cargo/registry" organic-development-format cargo fmt
.PHONY: docker_clippy
docker_clippy: ## Lint the code using docker.
> $(MAKE) -C docker/organic_development TARGET=clippy build
> docker run --rm -i -t --mount type=tmpfs,destination=/tmp -v "$(shell readlink -f .):/source" --workdir=/source --env CARGO_TARGET_DIR=/target -v "organic-cargo-registry:/usr/local/cargo/registry" organic-development-clippy cargo clippy --no-deps --all-targets --all-features -- -D warnings
.PHONY: clippy
clippy:
clippy: ## Lint the code.
> cargo clippy --no-deps --all-targets --all-features -- -D warnings
.PHONY: clippyfix
clippyfix:
> cargo clippy --fix --lib -p organic --all-features
.PHONY: test
test:
test: ## Run the test suite.
> cargo test --no-default-features --features compare --no-fail-fast --lib --test test_loader -- --test-threads $(TESTJOBS)
.PHONY: dockertest
dockertest:
> $(MAKE) -C docker/organic_test
.PHONY: doc
doc: ## Generate documentation.
> cargo doc --no-deps --open --lib --release --all-features
.PHONY: docker_test
docker_test: ## Run the test suite using docker.
> $(MAKE) -C docker/organic_test TARGET=tester build
> docker run --init --rm -i -t --read-only -v "$$(readlink -f ./):/source:ro" --mount type=tmpfs,destination=/tmp --mount source=cargo-cache,target=/usr/local/cargo/registry --mount source=rust-cache,target=/target --env CARGO_TARGET_DIR=/target -w /source organic-test --no-default-features --features compare --no-fail-fast --lib --test test_loader -- --test-threads $(TESTJOBS)
.PHONY: buildtest
buildtest:
.PHONY: docker_wasm_test
docker_wasm_test: ## Run the test suite with wasm tests.
> $(MAKE) -C docker/organic_test TARGET=tester build
> docker run --init --rm -i -t --read-only -v "$$(readlink -f ./):/source:ro" --mount type=tmpfs,destination=/tmp --mount source=cargo-cache,target=/usr/local/cargo/registry --mount source=rust-cache,target=/target --env CARGO_TARGET_DIR=/target -w /source organic-test --no-default-features --features compare,wasm_test --no-fail-fast --lib --test test_loader autogen_wasm_ -- --test-threads $(TESTJOBS)
.PHONY: build_test
build_test:
> cargo build --no-default-features
> cargo build --no-default-features --features compare
> cargo build --no-default-features --features tracing
> cargo build --no-default-features --features compare,tracing
> cargo build --no-default-features --features compare,foreign_document_test
> cargo build --no-default-features --features compare,tracing,foreign_document_test
> cargo build --target wasm32-unknown-unknown --profile wasm --bin wasm --no-default-features --features wasm
> cargo build --bin wasm_test --no-default-features --features wasm_test
.PHONY: foreign_document_test
foreign_document_test:
> $(MAKE) -C docker/organic_test run_foreign_document_test
> $(MAKE) -C docker/organic_test TARGET=foreign-document build
> docker run --init --rm -i -t --read-only -v "$$(readlink -f ./):/source:ro" --mount type=tmpfs,destination=/tmp --mount source=cargo-cache,target=/usr/local/cargo/registry --mount source=rust-cache,target=/target --env CARGO_TARGET_DIR=/target -w /source organic-test-foreign-document
.PHONY: dockerclean
dockerclean:

View File

@@ -10,17 +10,16 @@ Currently, Organic parses most documents the same as the official org-mode parse
### Project Goals
- We aim to provide perfect parity with the emacs org-mode parser. In that regard, any document that parses differently between Emacs and Organic is considered a bug.
- The parser should have minimal dependencies. This should reduce effort w.r.t.: security audits, legal compliance, portability.
- The parser should be usable everywhere. In the interest of getting org-mode used in as many places as possible, this parser should be usable by everyone everywhere. This means:
- The parser should have minimal dependencies.
- The parser should be usable everywhere. In the interest of getting org used in as many places as possible, this parser should be usable by everyone everywhere. This means:
- It must have a permissive license.
- We will investigate compiling to WASM. This is an important goal of the project and will definitely happen, but only after the parser has a more stable API.
- It compiles to both natively and to wasm.
- We will investigate compiling to a C library for native linking to other code. This is more of a maybe-goal for the project.
### Project Non-Goals
- This project will not include an elisp engine since that would drastically increase the complexity of the code. Any features requiring an elisp engine will not be implemented (for example, Emacs supports embedded eval expressions in documents but this parser will never support that).
- This project is exclusively an org-mode **parser**. This limits its scope to roughly the output of `(org-element-parse-buffer)`. It will not render org-mode documents in other formats like HTML or LaTeX.
### Project Maybe-Goals
- table.el support. Currently we support org-mode tables but org-mode also allows table.el tables. So far, their use in org-mode documents seems rather uncommon so this is a low-priority feature.
- Document editing support. I do not anticipate any advanced editing features to make editing ergonomic, but it should be relatively easy to be able to parse an org-mode document and serialize it back into org-mode. This would enable cool features to be built on top of the library like auto-formatters. To accomplish this feature, We'd have to capture all of the various separators and whitespace that we are currently simply throwing away. This would add many additional fields to the parsed structs and it would add more noise to the parsers themselves, so I do not want to approach this feature until the parser is more complete since it would make modifications and refactoring more difficult.
### Supported Versions
This project targets the version of Emacs and Org-mode that are built into the [organic-test docker image](docker/organic_test/Dockerfile). This is newer than the version of Org-mode that shipped with Emacs 29.1. The parser itself does not depend on Emacs or Org-mode though, so this only matters for development purposes when running the automated tests that compare against upstream Org-mode.

View File

@@ -26,7 +26,7 @@ fn main() {
dir_entry.file_type().is_file()
&& Path::new(dir_entry.file_name())
.extension()
.map(|ext| ext.to_ascii_lowercase() == "org")
.map(|ext| ext.eq_ignore_ascii_case("org"))
.unwrap_or(false)
}
Err(_) => true,

View File

@@ -1,6 +0,0 @@
FROM rustlang/rust:nightly-alpine3.17
RUN apk add --no-cache musl-dev
RUN rustup component add rustfmt
ENTRYPOINT ["cargo", "fmt"]

View File

@@ -1,36 +0,0 @@
IMAGE_NAME:=cargo-fmt
# REMOTE_REPO:=harbor.fizz.buzz/private
.PHONY: all
all: build push
.PHONY: build
build:
docker build -t $(IMAGE_NAME) -f Dockerfile .
.PHONY: push
push:
ifdef REMOTE_REPO
docker tag $(IMAGE_NAME) $(REMOTE_REPO)/$(IMAGE_NAME)
docker push $(REMOTE_REPO)/$(IMAGE_NAME)
else
@echo "REMOTE_REPO not defined, not pushing to a remote repo."
endif
.PHONY: clean
clean:
docker rmi $(IMAGE_NAME)
ifdef REMOTE_REPO
docker rmi $(REMOTE_REPO)/$(IMAGE_NAME)
else
@echo "REMOTE_REPO not defined, not removing from remote repo."
endif
# NOTE: This target will write to folders underneath the git-root
.PHONY: run
run: build
docker run --rm --init --read-only --mount type=tmpfs,destination=/tmp -v "$$(readlink -f ../../):/source" --workdir=/source $(IMAGE_NAME)
.PHONY: shell
shell: build
docker run --rm -i -t --entrypoint /bin/sh --mount type=tmpfs,destination=/tmp -v "$$(readlink -f ../../):/source" --workdir=/source $(IMAGE_NAME)

View File

@@ -1,6 +0,0 @@
FROM rustlang/rust:nightly-alpine3.17
RUN apk add --no-cache musl-dev
RUN cargo install --locked --no-default-features --features ci-autoclean cargo-cache
ENTRYPOINT ["cargo", "build"]

View File

@@ -1,37 +0,0 @@
IMAGE_NAME:=organic-build
# REMOTE_REPO:=harbor.fizz.buzz/private
.PHONY: all
all: build push
.PHONY: build
build:
docker build -t $(IMAGE_NAME) -f Dockerfile .
.PHONY: push
push:
ifdef REMOTE_REPO
docker tag $(IMAGE_NAME) $(REMOTE_REPO)/$(IMAGE_NAME)
docker push $(REMOTE_REPO)/$(IMAGE_NAME)
else
@echo "REMOTE_REPO not defined, not pushing to a remote repo."
endif
.PHONY: clean
clean:
docker rmi $(IMAGE_NAME)
ifdef REMOTE_REPO
docker rmi $(REMOTE_REPO)/$(IMAGE_NAME)
else
@echo "REMOTE_REPO not defined, not removing from remote repo."
endif
docker volume rm cargo-cache
# NOTE: This target will write to folders underneath the git-root
.PHONY: run
run: build
docker run --rm --init --read-only --mount type=tmpfs,destination=/tmp -v "$$(readlink -f ../../):/source" --workdir=/source --mount source=cargo-cache,target=/usr/local/cargo/registry $(IMAGE_NAME)
.PHONY: shell
shell: build
docker run --rm -i -t --entrypoint /bin/sh --mount type=tmpfs,destination=/tmp -v "$$(readlink -f ../../):/source" --workdir=/source --mount source=cargo-cache,target=/usr/local/cargo/registry $(IMAGE_NAME)

View File

@@ -1,5 +0,0 @@
FROM rustlang/rust:nightly-alpine3.17
RUN apk add --no-cache musl-dev
ENTRYPOINT ["cargo", "clippy", "--no-deps", "--all-targets", "--all-features", "--", "-D", "warnings"]

View File

@@ -1,37 +0,0 @@
IMAGE_NAME:=organic-clippy
# REMOTE_REPO:=harbor.fizz.buzz/private
.PHONY: all
all: build push
.PHONY: build
build:
docker build -t $(IMAGE_NAME) -f Dockerfile .
.PHONY: push
push:
ifdef REMOTE_REPO
docker tag $(IMAGE_NAME) $(REMOTE_REPO)/$(IMAGE_NAME)
docker push $(REMOTE_REPO)/$(IMAGE_NAME)
else
@echo "REMOTE_REPO not defined, not pushing to a remote repo."
endif
.PHONY: clean
clean:
docker rmi $(IMAGE_NAME)
ifdef REMOTE_REPO
docker rmi $(REMOTE_REPO)/$(IMAGE_NAME)
else
@echo "REMOTE_REPO not defined, not removing from remote repo."
endif
docker volume rm cargo-cache
# NOTE: This target will write to folders underneath the git-root
.PHONY: run
run: build
docker run --rm --init --read-only --mount type=tmpfs,destination=/tmp -v "$$(readlink -f ../../):/source" --workdir=/source --mount source=cargo-cache,target=/usr/local/cargo/registry $(IMAGE_NAME)
.PHONY: shell
shell: build
docker run --rm -i -t --entrypoint /bin/sh --mount type=tmpfs,destination=/tmp -v "$$(readlink -f ../../):/source" --workdir=/source --mount source=cargo-cache,target=/usr/local/cargo/registry $(IMAGE_NAME)

View File

@@ -0,0 +1,20 @@
# syntax=docker/dockerfile:1
ARG ALPINE_VERSION="3.20"
FROM rustlang/rust:nightly-alpine$ALPINE_VERSION AS builder
RUN apk add --no-cache musl-dev
RUN --mount=type=tmpfs,target=/tmp --mount=type=cache,target=/usr/local/cargo/registry,sharing=locked cargo install --locked --no-default-features --features ci-autoclean cargo-cache
RUN rustup component add rustc-codegen-cranelift
FROM builder AS format
RUN rustup component add rustfmt
FROM builder AS clippy
RUN rustup component add clippy
FROM builder AS wasm
RUN rustup target add wasm32-unknown-unknown

View File

@@ -0,0 +1,36 @@
SHELL := bash
.ONESHELL:
.SHELLFLAGS := -eu -o pipefail -c
.DELETE_ON_ERROR:
MAKEFLAGS += --warn-undefined-variables
MAKEFLAGS += --no-builtin-rules
ifeq ($(origin .RECIPEPREFIX), undefined)
$(error This Make does not support .RECIPEPREFIX. Please use GNU Make 4.0 or later)
endif
.RECIPEPREFIX = >
TARGET := builder
IMAGE_NAME := organic-development
ifneq ($(TARGET),builder)
IMAGE_NAME := $(IMAGE_NAME)-$(TARGET)
endif
.PHONY: help
help:
> @grep -h "##" $(MAKEFILE_LIST) | grep -v grep | sed -E 's/^([^:]*): *## */\1: /'
.PHONY: build
build: ## Build the docker image.
> docker build --tag $(IMAGE_NAME) --target=$(TARGET) --file Dockerfile .
> docker volume create organic-cargo-registry
.PHONY: shell
shell: ## Launch an interactive shell inside the docker image with the source repository mounted at /source.
shell: build
> docker run --rm -i -t --entrypoint /bin/sh --mount type=tmpfs,destination=/tmp -v "$$(readlink -f ../../):/source" --workdir=/source --env CARGO_TARGET_DIR=/target -v "organic-cargo-registry:/usr/local/cargo/registry" $(IMAGE_NAME)
.PHONY: clean
clean: ## Remove the docker image and volume.
> docker rmi $(IMAGE_NAME)
> docker volume rm organic-cargo-registry

View File

@@ -1,10 +1,25 @@
FROM alpine:3.17 AS build
# syntax=docker/dockerfile:1
ARG ALPINE_VERSION="3.20"
# ARG EMACS_REPO=https://git.savannah.gnu.org/git/emacs.git
ARG EMACS_REPO=https://code.fizz.buzz/mirror/emacs.git
ARG EMACS_VERSION=emacs-29.1
# ARG ORG_MODE_REPO=https://git.savannah.gnu.org/git/emacs/org-mode.git
ARG ORG_MODE_REPO=https://code.fizz.buzz/mirror/org-mode.git
ARG ORG_VERSION=abf5156096c06ee5aa05795c3dc5a065f76ada97
FROM alpine:$ALPINE_VERSION AS build
RUN apk add --no-cache build-base musl-dev git autoconf make texinfo gnutls-dev ncurses-dev gawk libgccjit-dev
FROM build AS build-emacs
ARG EMACS_VERSION=emacs-29.1
RUN git clone --depth 1 --branch $EMACS_VERSION https://git.savannah.gnu.org/git/emacs.git /root/emacs
ARG EMACS_VERSION
ARG EMACS_REPO
RUN git clone --depth 1 --branch $EMACS_VERSION $EMACS_REPO /root/emacs
WORKDIR /root/emacs
RUN mkdir /root/dist
RUN ./autogen.sh
@@ -14,23 +29,25 @@ RUN make DESTDIR="/root/dist" install
FROM build AS build-org-mode
ARG ORG_VERSION=abf5156096c06ee5aa05795c3dc5a065f76ada97
COPY --from=build-emacs /root/dist/ /
ARG ORG_VERSION
ARG ORG_MODE_REPO
COPY --link --from=build-emacs /root/dist/ /
RUN mkdir /root/dist
# Savannah does not allow fetching specific revisions, so we're going to have to put unnecessary load on their server by cloning main and then checking out the revision we want.
RUN git clone https://git.savannah.gnu.org/git/emacs/org-mode.git /root/org-mode && git -C /root/org-mode checkout $ORG_VERSION
# RUN mkdir /root/org-mode && git -C /root/org-mode init --initial-branch=main && git -C /root/org-mode remote add origin https://git.savannah.gnu.org/git/emacs/org-mode.git && git -C /root/org-mode fetch origin $ORG_VERSION && git -C /root/org-mode checkout FETCH_HEAD
RUN git clone $ORG_MODE_REPO /root/org-mode && git -C /root/org-mode checkout $ORG_VERSION
# RUN mkdir /root/org-mode && git -C /root/org-mode init --initial-branch=main && git -C /root/org-mode remote add origin $ORG_REPO && git -C /root/org-mode fetch origin $ORG_VERSION && git -C /root/org-mode checkout FETCH_HEAD
WORKDIR /root/org-mode
RUN make compile
RUN make DESTDIR="/root/dist" install
FROM rustlang/rust:nightly-alpine3.17 AS tester
FROM rustlang/rust:nightly-alpine$ALPINE_VERSION AS tester
ENV LANG=en_US.UTF-8
RUN apk add --no-cache musl-dev ncurses gnutls libgccjit
RUN cargo install --locked --no-default-features --features ci-autoclean cargo-cache
COPY --from=build-emacs /root/dist/ /
COPY --from=build-org-mode /root/dist/ /
RUN --mount=type=tmpfs,target=/tmp --mount=type=cache,target=/usr/local/cargo/registry,sharing=locked cargo install --locked --no-default-features --features ci-autoclean cargo-cache
RUN rustup component add rustc-codegen-cranelift
COPY --link --from=build-emacs /root/dist/ /
COPY --link --from=build-org-mode /root/dist/ /
ENTRYPOINT ["cargo", "test"]
@@ -93,13 +110,20 @@ ARG WORG_PATH=/foreign_documents/worg
ARG WORG_REPO=https://git.sr.ht/~bzg/worg
RUN mkdir -p $WORG_PATH && git -C $WORG_PATH init --initial-branch=main && git -C $WORG_PATH remote add origin $WORG_REPO && git -C $WORG_PATH fetch origin $WORG_VERSION && git -C $WORG_PATH checkout FETCH_HEAD
ARG LITERATE_BUILD_EMACS_VERSION=e3ac1afe1e40af601be7af12c1d13d96308ab209
ARG LITERATE_BUILD_EMACS_PATH=/foreign_documents/literate_build_emacs
ARG LITERATE_BUILD_EMACS_REPO=https://gitlab.com/spudlyo/orgdemo2.git
RUN mkdir -p $LITERATE_BUILD_EMACS_PATH && git -C $LITERATE_BUILD_EMACS_PATH init --initial-branch=main && git -C $LITERATE_BUILD_EMACS_PATH remote add origin $LITERATE_BUILD_EMACS_REPO && git -C $LITERATE_BUILD_EMACS_PATH fetch origin $LITERATE_BUILD_EMACS_VERSION && git -C $LITERATE_BUILD_EMACS_PATH checkout FETCH_HEAD
# unused/aws.org contains invalid paths for setupfile which causes both upstream org-mode and Organic to error out.
RUN rm $LITERATE_BUILD_EMACS_PATH/unused/aws.org
FROM tester as foreign-document-test
FROM tester as foreign-document
RUN apk add --no-cache bash coreutils
RUN mkdir /foreign_documents
COPY --from=foreign-document-gather /foreign_documents/howardabrams /foreign_documents/howardabrams
COPY --from=foreign-document-gather /foreign_documents/doomemacs /foreign_documents/doomemacs
COPY --from=foreign-document-gather /foreign_documents/worg /foreign_documents/worg
COPY --from=build-org-mode /root/org-mode /foreign_documents/org-mode
COPY --from=build-emacs /root/emacs /foreign_documents/emacs
COPY --link --from=foreign-document-gather /foreign_documents/howardabrams /foreign_documents/howardabrams
COPY --link --from=foreign-document-gather /foreign_documents/doomemacs /foreign_documents/doomemacs
COPY --link --from=foreign-document-gather /foreign_documents/worg /foreign_documents/worg
COPY --link --from=foreign-document-gather /foreign_documents/literate_build_emacs /foreign_documents/literate_build_emacs
COPY --link --from=build-org-mode /root/org-mode /foreign_documents/org-mode
COPY --link --from=build-emacs /root/emacs /foreign_documents/emacs
ENTRYPOINT ["cargo", "run", "--bin", "foreign_document_test", "--features", "compare,foreign_document_test", "--profile", "release-lto"]

View File

@@ -1,44 +1,36 @@
IMAGE_NAME:=organic-test
# REMOTE_REPO:=harbor.fizz.buzz/private
SHELL := bash
.ONESHELL:
.SHELLFLAGS := -eu -o pipefail -c
.DELETE_ON_ERROR:
MAKEFLAGS += --warn-undefined-variables
MAKEFLAGS += --no-builtin-rules
.PHONY: all
all: build push
ifeq ($(origin .RECIPEPREFIX), undefined)
$(error This Make does not support .RECIPEPREFIX. Please use GNU Make 4.0 or later)
endif
.RECIPEPREFIX = >
TARGET := tester
IMAGE_NAME := organic-test
ifneq ($(TARGET),tester)
IMAGE_NAME := $(IMAGE_NAME)-$(TARGET)
endif
.PHONY: help
help:
> @grep -h "##" $(MAKEFILE_LIST) | grep -v grep | sed -E 's/^([^:]*): *## */\1: /'
.PHONY: build
build:
docker build -t $(IMAGE_NAME) -f Dockerfile --target tester .
.PHONY: build_foreign_document_test
build_foreign_document_test:
docker build -t $(IMAGE_NAME)-foreign-document -f Dockerfile --target foreign-document-test .
.PHONY: push
push:
ifdef REMOTE_REPO
docker tag $(IMAGE_NAME) $(REMOTE_REPO)/$(IMAGE_NAME)
docker push $(REMOTE_REPO)/$(IMAGE_NAME)
else
@echo "REMOTE_REPO not defined, not pushing to a remote repo."
endif
.PHONY: clean
clean:
docker rmi $(IMAGE_NAME)
ifdef REMOTE_REPO
docker rmi $(REMOTE_REPO)/$(IMAGE_NAME)
else
@echo "REMOTE_REPO not defined, not removing from remote repo."
endif
docker volume rm rust-cache cargo-cache
.PHONY: run
run: build
docker run --rm --init --read-only --mount type=tmpfs,destination=/tmp -v "$$(readlink -f ../../):/source:ro" --workdir=/source --mount source=cargo-cache,target=/usr/local/cargo/registry --mount source=rust-cache,target=/target --env CARGO_TARGET_DIR=/target $(IMAGE_NAME) --no-default-features --features compare --no-fail-fast --lib --test test_loader
build: ## Build the docker image.
> docker build --tag $(IMAGE_NAME) --target=$(TARGET) --file Dockerfile .
> docker volume create organic-cargo-registry
.PHONY: shell
shell: ## Launch an interactive shell inside the docker image with the source repository mounted at /source.
shell: build
docker run --rm -i -t --entrypoint /bin/sh --mount type=tmpfs,destination=/tmp -v "$$(readlink -f ../../):/source:ro" --workdir=/source --mount source=cargo-cache,target=/usr/local/cargo/registry --mount source=rust-cache,target=/target --env CARGO_TARGET_DIR=/target $(IMAGE_NAME)
> docker run --rm -i -t --entrypoint /bin/sh --mount type=tmpfs,destination=/tmp -v "$$(readlink -f ../../):/source" --workdir=/source --env CARGO_TARGET_DIR=/target -v "organic-cargo-registry:/usr/local/cargo/registry" $(IMAGE_NAME)
.PHONY: run_foreign_document_test
run_foreign_document_test: build_foreign_document_test
docker run --rm --init --read-only --mount type=tmpfs,destination=/tmp -v "$$(readlink -f ../../):/source:ro" --workdir=/source --mount source=cargo-cache,target=/usr/local/cargo/registry --mount source=rust-cache,target=/target --env CARGO_TARGET_DIR=/target $(IMAGE_NAME)-foreign-document
.PHONY: clean
clean: ## Remove the docker image and volume.
> docker rmi $(IMAGE_NAME)
> docker volume rm organic-cargo-registry

View File

@@ -0,0 +1,5 @@
#+caption:
#+caption: *foo*
#+caption[bar]:
#+begin_src bash
#+end_src

View File

View File

@@ -0,0 +1,4 @@

View File

@@ -0,0 +1,5 @@
* foo

View File

@@ -1,3 +1,32 @@
* Empty
:PROPERTIES:
:END:
* Single new line
:PROPERTIES:
:END:
* Single line with spaces
:PROPERTIES:
:END:
* Many lines, first line without spaces
:PROPERTIES:
:END:
* Many lines, first line with spaces
:PROPERTIES:
:END:
* Many lines, first line with spaces, later line with spaces
:PROPERTIES:
:END:

View File

@@ -5,3 +5,5 @@
#+call: dolar cat(dog)
#+call: (bat)
#+call:

View File

@@ -0,0 +1,3 @@
: foo
:
: bar

View File

@@ -0,0 +1,6 @@
1. foo
#+begin_src text
#+end_src
2. baz

View File

@@ -0,0 +1 @@
[[file:simple.org::2]]

View File

@@ -0,0 +1 @@
[[/ssh:admin@test.example:important/file.pdf]]

View File

@@ -1,3 +1,3 @@
foo <<bar>> baz
<<FOO>> bar
lorem << ipsum >> dolar
[[FOO][baz]]

View File

@@ -0,0 +1,5 @@
* foo
** bar
* baz

View File

@@ -0,0 +1,76 @@
#!/usr/bin/env bash
#
# Time running a single parse without invoking a compare with emacs.
set -euo pipefail
IFS=$'\n\t'
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
: ${PROFILE:="debug"}
############## Setup #########################
function cleanup {
for f in "${folders[@]}"; do
log "Deleting $f"
rm -rf "$f"
done
}
folders=()
for sig in EXIT INT QUIT HUP TERM; do
trap "set +e; cleanup" "$sig"
done
function die {
local status_code="$1"
shift
(>&2 echo "${@}")
exit "$status_code"
}
function log {
(>&2 echo "${@}")
}
############## Program #########################
function main {
if [ "$#" -gt 0 ]; then
export CARGO_TARGET_DIR="$1"
else
local work_directory=$(mktemp -d -t 'organic.XXXXXX')
folders+=("$work_directory")
export CARGO_TARGET_DIR="$work_directory"
fi
local features=(compare foreign_document_test tracing event_count wasm wasm_test)
ENABLED_FEATURES= for_each_combination "${features[@]}"
}
function for_each_combination {
local additional_flags=()
if [ "$PROFILE" = "dev" ] || [ "$PROFILE" = "debug" ]; then
PROFILE="debug"
else
additional_flags+=(--profile "$PROFILE")
fi
local flag=$1
shift
if [ "$#" -gt 0 ]; then
ENABLED_FEATURES="$ENABLED_FEATURES" for_each_combination "${@}"
elif [ -z "$ENABLED_FEATURES" ]; then
(cd "$DIR/../" && printf "\n\n\n========== no features ==========\n\n\n" && set -x && cargo build "${additional_flags[@]}" --no-default-features)
else
(cd "$DIR/../" && printf "\n\n\n========== %s ==========\n\n\n" "${ENABLED_FEATURES:1}" && set -x && cargo build "${additional_flags[@]}" --no-default-features --features "${ENABLED_FEATURES:1}")
fi
ENABLED_FEATURES="$ENABLED_FEATURES,$flag"
if [ "$#" -gt 0 ]; then
ENABLED_FEATURES="$ENABLED_FEATURES" for_each_combination "${@}"
else
(cd "$DIR/../" && printf "\n\n\n========== %s ==========\n\n\n" "${ENABLED_FEATURES:1}" && set -x && cargo build "${additional_flags[@]}" --no-default-features --features "${ENABLED_FEATURES:1}")
fi
}
main "${@}"

View File

@@ -0,0 +1,111 @@
#!/usr/bin/env bash
#
set -euo pipefail
IFS=$'\n\t'
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
: ${SHELL:="NO"} # or YES to launch a shell instead of running the test
: ${TRACE:="NO"} # or YES to send traces to jaeger
: ${BACKTRACE:="NO"} # or YES to print a rust backtrace when panicking
: ${NO_COLOR:=""} # Set to anything to disable color output
: ${PROFILE:="debug"}
REALPATH=$(command -v uu-realpath || command -v realpath)
MAKE=$(command -v gmake || command -v make)
############## Setup #########################
function die {
local status_code="$1"
shift
(>&2 echo "${@}")
exit "$status_code"
}
function log {
(>&2 echo "${@}")
}
############## Program #########################
function main {
build_container
launch_container "${@}"
}
function build_container {
$MAKE -C "$DIR/../docker/organic_test"
}
function launch_container {
local additional_flags=()
local features=(wasm_test)
if [ "$NO_COLOR" != "" ]; then
additional_flags+=(--env "NO_COLOR=$NO_COLOR")
fi
if [ "$TRACE" = "YES" ]; then
# We use the host network so it can talk to jaeger hosted at 127.0.0.1
additional_flags+=(--network=host --env RUST_LOG=debug)
features+=(tracing)
fi
if [ "$SHELL" != "YES" ]; then
additional_flags+=(--read-only)
else
additional_flags+=(-t)
fi
if [ "$BACKTRACE" = "YES" ]; then
additional_flags+=(--env RUST_BACKTRACE=full)
fi
if [ "$SHELL" = "YES" ]; then
exec docker run "${additional_flags[@]}" --init --rm -i --mount type=tmpfs,destination=/tmp -v "/:/input:ro" -v "$($REALPATH "$DIR/../"):/source:ro" --mount source=cargo-cache,target=/usr/local/cargo/registry --mount source=rust-cache,target=/target --env CARGO_TARGET_DIR=/target -w /source --entrypoint "" organic-test /bin/sh
fi
local features_joined
features_joined=$(IFS=","; echo "${features[*]}")
local build_flags=()
if [ "$PROFILE" = "dev" ] || [ "$PROFILE" = "debug" ]; then
PROFILE="debug"
else
build_flags+=(--profile "$PROFILE")
fi
if [ $# -gt 0 ]; then
# If we passed in args, we need to forward them along
for path in "${@}"; do
local full_path
full_path=$($REALPATH "$path")
init_script=$(cat <<EOF
set -euo pipefail
IFS=\$'\n\t'
cargo build --bin wasm_test --no-default-features --features "$features_joined" ${build_flags[@]}
exec /target/${PROFILE}/wasm_test "/input${full_path}"
EOF
)
docker run "${additional_flags[@]}" --init --rm -i --mount type=tmpfs,destination=/tmp -v "/:/input:ro" -v "$($REALPATH "$DIR/../"):/source:ro" --mount source=cargo-cache,target=/usr/local/cargo/registry --mount source=rust-cache,target=/target --env CARGO_TARGET_DIR=/target -w /source --entrypoint "" organic-test sh -c "$init_script"
done
else
local current_directory init_script
current_directory=$(pwd)
init_script=$(cat <<EOF
set -euo pipefail
IFS=\$'\n\t'
cargo build --bin wasm_test --no-default-features --features "$features_joined" ${build_flags[@]}
cd /input${current_directory}
exec /target/${PROFILE}/wasm_test
EOF
)
docker run "${additional_flags[@]}" --init --rm -i --mount type=tmpfs,destination=/tmp -v "/:/input:ro" -v "$($REALPATH "$DIR/../"):/source:ro" --mount source=cargo-cache,target=/usr/local/cargo/registry --mount source=rust-cache,target=/target --env CARGO_TARGET_DIR=/target -w /source --entrypoint "" organic-test sh -c "$init_script"
fi
}
main "${@}"

View File

@@ -1,3 +1,4 @@
#![feature(exit_status_error)]
#![feature(round_char_boundary)]
#![feature(exact_size_is_empty)]
use std::io::Read;

View File

@@ -53,6 +53,9 @@ async fn main_body() -> Result<ExitCode, Box<dyn std::error::Error>> {
let layer = layer.chain(compare_group("doomemacs", || {
compare_all_org_document("/foreign_documents/doomemacs")
}));
let layer = layer.chain(compare_group("literate_build_emacs", || {
compare_all_org_document("/foreign_documents/literate_build_emacs")
}));
let running_tests: Vec<_> = layer.map(|c| tokio::spawn(c.run_test())).collect();
let mut any_failed = false;

10
src/bin_wasm.rs Normal file
View File

@@ -0,0 +1,10 @@
use wasm_bindgen::prelude::wasm_bindgen;
#[wasm_bindgen]
pub fn parse_org(org_contents: &str) -> wasm_bindgen::JsValue {
organic::wasm_cli::parse_org(org_contents)
}
fn main() -> Result<(), Box<dyn std::error::Error>> {
Ok(())
}

62
src/bin_wasm_test.rs Normal file
View File

@@ -0,0 +1,62 @@
#![feature(exact_size_is_empty)]
#![feature(exit_status_error)]
use std::io::Read;
use organic::wasm_test::wasm_run_anonymous_compare;
use organic::wasm_test::wasm_run_compare_on_file;
#[cfg(feature = "tracing")]
use crate::init_tracing::init_telemetry;
#[cfg(feature = "tracing")]
use crate::init_tracing::shutdown_telemetry;
#[cfg(feature = "tracing")]
mod init_tracing;
#[cfg(not(feature = "tracing"))]
fn main() -> Result<(), Box<dyn std::error::Error>> {
let rt = tokio::runtime::Runtime::new()?;
rt.block_on(async {
let main_body_result = main_body().await;
main_body_result
})
}
#[cfg(feature = "tracing")]
fn main() -> Result<(), Box<dyn std::error::Error>> {
let rt = tokio::runtime::Runtime::new()?;
rt.block_on(async {
init_telemetry()?;
let main_body_result = main_body().await;
shutdown_telemetry()?;
main_body_result
})
}
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
async fn main_body() -> Result<(), Box<dyn std::error::Error>> {
let args = std::env::args().skip(1);
if args.is_empty() {
let org_contents = read_stdin_to_string()?;
if wasm_run_anonymous_compare(org_contents).await? {
} else {
Err("Diff results do not match.")?;
}
Ok(())
} else {
for arg in args {
if wasm_run_compare_on_file(arg).await? {
} else {
Err("Diff results do not match.")?;
}
}
Ok(())
}
}
fn read_stdin_to_string() -> Result<String, Box<dyn std::error::Error>> {
let mut stdin_contents = String::new();
std::io::stdin()
.lock()
.read_to_string(&mut stdin_contents)?;
Ok(stdin_contents)
}

View File

@@ -1,16 +1,16 @@
use std::path::Path;
use crate::compare::diff::compare_document;
use crate::compare::diff::DiffResult;
use crate::compare::parse::emacs_parse_anonymous_org_document;
use crate::compare::parse::emacs_parse_file_org_document;
use crate::compare::parse::get_emacs_version;
use crate::compare::parse::get_org_mode_version;
use crate::compare::sexp::sexp;
use crate::context::GlobalSettings;
use crate::context::LocalFileAccessInterface;
use crate::parser::parse_file_with_settings;
use crate::parser::parse_with_settings;
use crate::util::cli::emacs_parse_anonymous_org_document;
use crate::util::cli::emacs_parse_file_org_document;
use crate::util::cli::print_versions;
use crate::util::elisp::sexp;
use crate::util::terminal::foreground_color;
use crate::util::terminal::reset_color;
pub async fn run_anonymous_compare<P: AsRef<str>>(
org_contents: P,
@@ -68,8 +68,8 @@ pub async fn run_anonymous_compare_with_settings<'g, 's, P: AsRef<str>>(
} else if !silent {
println!(
"{color}Entire document passes.{reset}",
color = DiffResult::foreground_color(0, 255, 0),
reset = DiffResult::reset_color(),
color = foreground_color(0, 255, 0),
reset = reset_color(),
);
}
@@ -121,19 +121,10 @@ pub async fn run_compare_on_file_with_settings<'g, 's, P: AsRef<Path>>(
} else if !silent {
println!(
"{color}Entire document passes.{reset}",
color = DiffResult::foreground_color(0, 255, 0),
reset = DiffResult::reset_color(),
color = foreground_color(0, 255, 0),
reset = reset_color(),
);
}
Ok(true)
}
async fn print_versions() -> Result<(), Box<dyn std::error::Error>> {
eprintln!("Using emacs version: {}", get_emacs_version().await?.trim());
eprintln!(
"Using org-mode version: {}",
get_org_mode_version().await?.trim()
);
Ok(())
}

View File

@@ -1,3 +1,5 @@
use std::borrow::Borrow;
use std::borrow::Cow;
use std::collections::BTreeSet;
use std::fmt::Debug;
use std::str::FromStr;
@@ -7,8 +9,6 @@ use super::diff::artificial_owned_diff_scope;
use super::diff::compare_ast_node;
use super::diff::DiffEntry;
use super::diff::DiffStatus;
use super::sexp::unquote;
use super::sexp::Token;
use super::util::get_property;
use super::util::get_property_numeric;
use super::util::get_property_quoted_string;
@@ -18,6 +18,8 @@ use crate::types::CharOffsetInLine;
use crate::types::LineNumber;
use crate::types::RetainLabels;
use crate::types::SwitchNumberLines;
use crate::util::elisp::unquote;
use crate::util::elisp::Token;
#[derive(Debug)]
pub(crate) enum EmacsField<'s> {
@@ -262,11 +264,11 @@ pub(crate) fn compare_property_set_of_quoted_string<
.iter()
.map(|e| e.as_atom())
.collect::<Result<Vec<_>, _>>()?;
let value: Vec<String> = value
let value: Vec<Cow<'_, str>> = value
.into_iter()
.map(unquote)
.collect::<Result<Vec<_>, _>>()?;
let value: BTreeSet<&str> = value.iter().map(|e| e.as_str()).collect();
let value: BTreeSet<&str> = value.iter().map(|e| e.borrow()).collect();
let mismatched: Vec<_> = value.symmetric_difference(&rust_value).copied().collect();
if !mismatched.is_empty() {
let this_status = DiffStatus::Bad;
@@ -546,6 +548,21 @@ where
let mut full_status: Vec<DiffEntry<'b, 's>> = Vec::with_capacity(outer_rust_list.len());
for (kw_e, kw_r) in outer_emacs_list.iter().zip(outer_rust_list) {
match (kw_e.as_atom(), kw_r) {
(Ok("nil"), (None, mandatory_value)) if mandatory_value.is_empty() => {
// If its an empty keyword then it becomes nil in the elisp.
continue;
}
(Ok("nil"), _) => {
let this_status = DiffStatus::Bad;
let message = Some(format!(
"{} mismatch (emacs != rust) {:?} != {:?}",
emacs_field, kw_e, kw_r
));
return Ok(ComparePropertiesResult::SelfChange(this_status, message));
}
_ => {}
}
let kw_e = kw_e.as_list()?;
let child_status_length = kw_r.1.len() + kw_r.0.as_ref().map(|opt| opt.len()).unwrap_or(0);
let mut child_status: Vec<DiffEntry<'b, 's>> = Vec::with_capacity(child_status_length);
@@ -554,6 +571,17 @@ where
let mut kw_e = kw_e.iter();
// First element is a list representing the mandatory value.
if let Some(val_e) = kw_e.next() {
match (val_e.as_atom(), kw_r) {
(Ok("nil"), (_, mandatory_value)) if mandatory_value.is_empty() => {}
(Ok("nil"), _) => {
let this_status = DiffStatus::Bad;
let message = Some(format!(
"{} mismatch (emacs != rust) {:?} != {:?}",
emacs_field, kw_e, kw_r
));
return Ok(ComparePropertiesResult::SelfChange(this_status, message));
}
_ => {
let el = val_e.as_list()?;
if el.len() != kw_r.1.len() {
let this_status = DiffStatus::Bad;
@@ -566,6 +594,8 @@ where
for (e, r) in el.iter().zip(kw_r.1.iter()) {
child_status.push(compare_ast_node(source, e, r.into())?);
}
}
};
} else {
let this_status = DiffStatus::Bad;
let message = Some(format!(
@@ -653,7 +683,7 @@ pub(crate) fn compare_property_number_lines<
(Some(number_lines), Some(rust_number_lines)) => {
let token_list = number_lines.as_list()?;
let number_type = token_list
.get(0)
.first()
.map(Token::as_atom)
.map_or(Ok(None), |r| r.map(Some))?
.ok_or(":number-lines should have a type.")?;

View File

@@ -16,10 +16,6 @@ use super::compare_field::compare_property_retain_labels;
use super::compare_field::compare_property_set_of_quoted_string;
use super::compare_field::compare_property_single_ast_node;
use super::compare_field::compare_property_unquoted_atom;
use super::elisp_fact::ElispFact;
use super::elisp_fact::GetElispFact;
use super::sexp::unquote;
use super::sexp::Token;
use super::util::affiliated_keywords_names;
use super::util::assert_no_children;
use super::util::compare_additional_properties;
@@ -57,7 +53,6 @@ use crate::types::FixedWidthArea;
use crate::types::FootnoteDefinition;
use crate::types::FootnoteReference;
use crate::types::FootnoteReferenceType;
use crate::types::GetStandardProperties;
use crate::types::Heading;
use crate::types::HorizontalRule;
use crate::types::Hour;
@@ -110,6 +105,12 @@ use crate::types::Verbatim;
use crate::types::VerseBlock;
use crate::types::WarningDelayType;
use crate::types::Year;
use crate::util::elisp::unquote;
use crate::util::elisp::Token;
use crate::util::elisp_fact::ElispFact;
use crate::util::elisp_fact::GetElispFact;
use crate::util::terminal::foreground_color;
use crate::util::terminal::reset_color;
#[derive(Debug)]
pub enum DiffEntry<'b, 's> {
@@ -128,7 +129,7 @@ pub struct DiffResult<'b, 's> {
emacs_token: &'b Token<'s>,
}
#[derive(Debug, PartialEq)]
#[derive(Debug)]
pub(crate) enum DiffStatus {
Good,
Bad,
@@ -164,7 +165,7 @@ impl<'b, 's> DiffEntry<'b, 's> {
fn is_immediately_bad(&self) -> bool {
match self {
DiffEntry::DiffResult(diff) => diff.status == DiffStatus::Bad,
DiffEntry::DiffResult(diff) => matches!(diff.status, DiffStatus::Bad),
DiffEntry::DiffLayer(_) => false,
}
}
@@ -201,21 +202,21 @@ impl<'b, 's> DiffResult<'b, 's> {
if self.has_bad_children() {
format!(
"{color}BADCHILD{reset}",
color = DiffResult::foreground_color(255, 255, 0),
reset = DiffResult::reset_color(),
color = foreground_color(255, 255, 0),
reset = reset_color(),
)
} else {
format!(
"{color}GOOD{reset}",
color = DiffResult::foreground_color(0, 255, 0),
reset = DiffResult::reset_color(),
color = foreground_color(0, 255, 0),
reset = reset_color(),
)
}
}
DiffStatus::Bad => format!(
"{color}BAD{reset}",
color = DiffResult::foreground_color(255, 0, 0),
reset = DiffResult::reset_color(),
color = foreground_color(255, 0, 0),
reset = reset_color(),
),
}
};
@@ -240,45 +241,6 @@ impl<'b, 's> DiffResult<'b, 's> {
.iter()
.any(|child| child.is_immediately_bad() || child.has_bad_children())
}
pub(crate) fn foreground_color(red: u8, green: u8, blue: u8) -> String {
if DiffResult::should_use_color() {
format!(
"\x1b[38;2;{red};{green};{blue}m",
red = red,
green = green,
blue = blue
)
} else {
String::new()
}
}
#[allow(dead_code)]
pub(crate) fn background_color(red: u8, green: u8, blue: u8) -> String {
if DiffResult::should_use_color() {
format!(
"\x1b[48;2;{red};{green};{blue}m",
red = red,
green = green,
blue = blue
)
} else {
String::new()
}
}
pub(crate) fn reset_color() -> &'static str {
if DiffResult::should_use_color() {
"\x1b[0m"
} else {
""
}
}
fn should_use_color() -> bool {
!std::env::var("NO_COLOR").is_ok_and(|val| !val.is_empty())
}
}
impl<'b, 's> DiffLayer<'b, 's> {
@@ -296,14 +258,14 @@ impl<'b, 's> DiffLayer<'b, 's> {
let status_text = if self.has_bad_children() {
format!(
"{color}BADCHILD{reset}",
color = DiffResult::foreground_color(255, 255, 0),
reset = DiffResult::reset_color(),
color = foreground_color(255, 255, 0),
reset = reset_color(),
)
} else {
format!(
"{color}GOOD{reset}",
color = DiffResult::foreground_color(0, 255, 0),
reset = DiffResult::reset_color(),
color = foreground_color(0, 255, 0),
reset = reset_color(),
)
};
println!(
@@ -413,7 +375,7 @@ pub(crate) fn compare_ast_node<'b, 's>(
name: rust.get_elisp_fact().get_elisp_name(),
message: Some(e.to_string()),
children: Vec::new(),
rust_source: rust.get_standard_properties().get_source(),
rust_source: rust.get_source(),
emacs_token: emacs,
}
.into()
@@ -1576,7 +1538,7 @@ fn compare_example_block<'b, 's>(
[],
(
EmacsField::Required(":value"),
|r| Some(r.contents.as_str()),
|r| Some(r.get_value()),
compare_property_quoted_string
),
(
@@ -1654,7 +1616,7 @@ fn compare_export_block<'b, 's>(
),
(
EmacsField::Required(":value"),
|r| Some(r.contents.as_str()),
|r| Some(r.get_value()),
compare_property_quoted_string
)
) {
@@ -1702,7 +1664,7 @@ fn compare_src_block<'b, 's>(
),
(
EmacsField::Required(":value"),
|r| Some(r.contents.as_str()),
|r| Some(r.get_value()),
compare_property_quoted_string
),
(
@@ -2153,7 +2115,7 @@ fn compare_plain_text<'b, 's>(
let text = emacs.as_text()?;
let start_ind: usize = text
.properties
.get(0)
.first()
.expect("Should have start index.")
.as_atom()?
.parse()?;

View File

@@ -2,10 +2,7 @@
mod compare;
mod compare_field;
mod diff;
mod elisp_fact;
mod macros;
mod parse;
mod sexp;
mod util;
pub use compare::run_anonymous_compare;
pub use compare::run_anonymous_compare_with_settings;

View File

@@ -1,3 +1,4 @@
use std::borrow::Cow;
use std::str::FromStr;
use super::compare_field::compare_property_list_of_quoted_string;
@@ -7,15 +8,15 @@ use super::compare_field::compare_property_quoted_string;
use super::compare_field::ComparePropertiesResult;
use super::diff::DiffEntry;
use super::diff::DiffStatus;
use super::elisp_fact::GetElispFact;
use super::sexp::Token;
use crate::compare::diff::compare_ast_node;
use crate::compare::sexp::unquote;
use crate::types::AffiliatedKeywordValue;
use crate::types::AstNode;
use crate::types::GetAffiliatedKeywords;
use crate::types::GetStandardProperties;
use crate::types::StandardProperties;
use crate::util::elisp::get_emacs_standard_properties;
use crate::util::elisp::unquote;
use crate::util::elisp::Token;
use crate::util::elisp_fact::GetElispFact;
/// Check if the child string slice is a slice of the parent string slice.
fn is_slice_of(parent: &str, child: &str) -> bool {
@@ -29,32 +30,29 @@ fn is_slice_of(parent: &str, child: &str) -> bool {
/// Get the byte offset into source that the rust object exists at.
///
/// These offsets are zero-based unlike the elisp ones.
fn get_rust_byte_offsets<'b, 's, S: StandardProperties<'s> + ?Sized>(
original_document: &'s str,
rust_ast_node: &'b S,
) -> (usize, usize) {
let rust_object_source = rust_ast_node.get_source();
debug_assert!(is_slice_of(original_document, rust_object_source));
let offset = rust_object_source.as_ptr() as usize - original_document.as_ptr() as usize;
let end = offset + rust_object_source.len();
fn get_rust_byte_offsets(original_document: &str, subset: &str) -> (usize, usize) {
debug_assert!(is_slice_of(original_document, subset));
let offset = subset.as_ptr() as usize - original_document.as_ptr() as usize;
let end = offset + subset.len();
(offset, end)
}
pub(crate) fn compare_standard_properties<
'b,
's,
S: GetStandardProperties<'s> + GetElispFact<'s> + ?Sized,
S: StandardProperties<'s> + GetElispFact<'s> + ?Sized,
>(
original_document: &'s str,
emacs: &'b Token<'s>,
rust: &'b S,
) -> Result<(), Box<dyn std::error::Error>> {
assert_name(emacs, rust.get_elisp_fact().get_elisp_name())?;
assert_bounds(original_document, emacs, rust.get_standard_properties())?;
assert_bounds(original_document, emacs, rust)?;
assert_post_blank(emacs, rust)?;
Ok(())
}
pub(crate) fn assert_name<S: AsRef<str>>(
fn assert_name<S: AsRef<str>>(
emacs: &Token<'_>,
name: S,
) -> Result<(), Box<dyn std::error::Error>> {
@@ -77,101 +75,75 @@ pub(crate) fn assert_name<S: AsRef<str>>(
/// Assert that the character ranges defined by upstream org-mode's :standard-properties match the slices in Organic's StandardProperties.
///
/// This does **not** handle plain text because plain text is a special case.
pub(crate) fn assert_bounds<'b, 's, S: StandardProperties<'s> + ?Sized>(
fn assert_bounds<'b, 's, S: StandardProperties<'s> + ?Sized>(
original_document: &'s str,
emacs: &'b Token<'s>,
rust: &'b S,
) -> Result<(), Box<dyn std::error::Error>> {
let standard_properties = get_emacs_standard_properties(emacs)?; // 1-based
// Check begin/end
{
let (begin, end) = (
standard_properties
.begin
.ok_or("Token should have a begin.")?,
standard_properties.end.ok_or("Token should have an end.")?,
);
let (rust_begin, rust_end) = get_rust_byte_offsets(original_document, rust); // 0-based
let (rust_begin, rust_end) = get_rust_byte_offsets(original_document, rust.get_source()); // 0-based
let rust_begin_char_offset = original_document[..rust_begin].chars().count() + 1; // 1-based
let rust_end_char_offset =
rust_begin_char_offset + original_document[rust_begin..rust_end].chars().count(); // 1-based
if rust_begin_char_offset != begin || rust_end_char_offset != end {
Err(format!("Rust bounds (in chars) ({rust_begin}, {rust_end}) do not match emacs bounds ({emacs_begin}, {emacs_end})", rust_begin = rust_begin_char_offset, rust_end = rust_end_char_offset, emacs_begin=begin, emacs_end=end))?;
}
}
// Check contents-begin/contents-end
{
if let Some(rust_contents) = rust.get_contents() {
let (begin, end) = (
standard_properties
.contents_begin
.ok_or("Token should have a contents-begin.")?,
standard_properties
.contents_end
.ok_or("Token should have an contents-end.")?,
);
let (rust_begin, rust_end) = get_rust_byte_offsets(original_document, rust_contents); // 0-based
let rust_begin_char_offset = original_document[..rust_begin].chars().count() + 1; // 1-based
let rust_end_char_offset =
rust_begin_char_offset + original_document[rust_begin..rust_end].chars().count(); // 1-based
if rust_begin_char_offset != begin || rust_end_char_offset != end {
Err(format!("Rust contents bounds (in chars) ({rust_begin}, {rust_end}) do not match emacs contents bounds ({emacs_begin}, {emacs_end})", rust_begin = rust_begin_char_offset, rust_end = rust_end_char_offset, emacs_begin=begin, emacs_end=end))?;
}
} else if standard_properties.contents_begin.is_some()
|| standard_properties.contents_end.is_some()
{
Err(format!("Rust contents is None but emacs contents bounds are ({emacs_begin:?}, {emacs_end:?})", emacs_begin=standard_properties.contents_begin, emacs_end=standard_properties.contents_end))?;
}
}
Ok(())
}
struct EmacsStandardProperties {
begin: Option<usize>,
#[allow(dead_code)]
post_affiliated: Option<usize>,
#[allow(dead_code)]
contents_begin: Option<usize>,
#[allow(dead_code)]
contents_end: Option<usize>,
end: Option<usize>,
#[allow(dead_code)]
post_blank: Option<usize>,
/// Assert that the post blank matches between emacs and organic.
///
/// This does **not** handle plain text because plain text is a special case.
fn assert_post_blank<'b, 's, S: StandardProperties<'s> + ?Sized>(
emacs: &'b Token<'s>,
rust: &'b S,
) -> Result<(), Box<dyn std::error::Error>> {
let standard_properties = get_emacs_standard_properties(emacs)?; // 1-based
let rust_post_blank = rust.get_post_blank();
let emacs_post_blank = standard_properties
.post_blank
.ok_or("Token should have a post-blank.")?;
if rust_post_blank as usize != emacs_post_blank {
Err(format!("Rust post-blank {rust_post_blank} does not match emacs post-blank ({emacs_post_blank})", rust_post_blank = rust_post_blank, emacs_post_blank = emacs_post_blank))?;
}
fn get_emacs_standard_properties(
emacs: &Token<'_>,
) -> Result<EmacsStandardProperties, Box<dyn std::error::Error>> {
let children = emacs.as_list()?;
let attributes_child = children.get(1).ok_or("Should have an attributes child.")?;
let attributes_map = attributes_child.as_map()?;
let standard_properties = attributes_map.get(":standard-properties");
Ok(if standard_properties.is_some() {
let mut std_props = standard_properties
.expect("if statement proves its Some")
.as_vector()?
.iter();
let begin = maybe_token_to_usize(std_props.next())?;
let post_affiliated = maybe_token_to_usize(std_props.next())?;
let contents_begin = maybe_token_to_usize(std_props.next())?;
let contents_end = maybe_token_to_usize(std_props.next())?;
let end = maybe_token_to_usize(std_props.next())?;
let post_blank = maybe_token_to_usize(std_props.next())?;
EmacsStandardProperties {
begin,
post_affiliated,
contents_begin,
contents_end,
end,
post_blank,
}
} else {
let begin = maybe_token_to_usize(attributes_map.get(":begin").copied())?;
let end = maybe_token_to_usize(attributes_map.get(":end").copied())?;
let contents_begin = maybe_token_to_usize(attributes_map.get(":contents-begin").copied())?;
let contents_end = maybe_token_to_usize(attributes_map.get(":contents-end").copied())?;
let post_blank = maybe_token_to_usize(attributes_map.get(":post-blank").copied())?;
let post_affiliated =
maybe_token_to_usize(attributes_map.get(":post-affiliated").copied())?;
EmacsStandardProperties {
begin,
post_affiliated,
contents_begin,
contents_end,
end,
post_blank,
}
})
}
fn maybe_token_to_usize(
token: Option<&Token<'_>>,
) -> Result<Option<usize>, Box<dyn std::error::Error>> {
Ok(token
.map(|token| token.as_atom())
.map_or(Ok(None), |r| r.map(Some))?
.and_then(|val| {
if val == "nil" {
None
} else {
Some(val.parse::<usize>())
}
})
.map_or(Ok(None), |r| r.map(Some))?)
Ok(())
}
/// Get a named property from the emacs token.
@@ -206,10 +178,10 @@ pub(crate) fn get_property_unquoted_atom<'s>(
/// Get a named property containing an quoted string from the emacs token.
///
/// Returns None if key is not found.
pub(crate) fn get_property_quoted_string(
emacs: &Token<'_>,
pub(crate) fn get_property_quoted_string<'s>(
emacs: &Token<'s>,
key: &str,
) -> Result<Option<String>, Box<dyn std::error::Error>> {
) -> Result<Option<Cow<'s, str>>, Box<dyn std::error::Error>> {
get_property(emacs, key)?
.map(Token::as_atom)
.map_or(Ok(None), |r| r.map(Some))?
@@ -240,7 +212,7 @@ where
pub(crate) fn compare_children<'b, 's, 'x, RC>(
source: &'s str,
emacs: &'b Token<'s>,
rust_children: &'x Vec<RC>,
rust_children: &'x [RC],
child_status: &mut Vec<DiffEntry<'b, 's>>,
this_status: &mut DiffStatus,
message: &mut Option<String>,

View File

@@ -71,9 +71,7 @@ pub struct EntityDefinition<'a> {
impl<'g, 's> GlobalSettings<'g, 's> {
fn new() -> GlobalSettings<'g, 's> {
debug_assert!(
DEFAULT_ORG_ENTITIES.is_sorted_by(|a, b| b.name.len().partial_cmp(&a.name.len()))
);
debug_assert!(DEFAULT_ORG_ENTITIES.is_sorted_by(|a, b| a.name.len() >= b.name.len()));
GlobalSettings {
radio_targets: Vec::new(),
file_access: &LocalFileAccessInterface {

View File

@@ -6,11 +6,9 @@ pub(crate) type Res<T, U> = IResult<T, U, CustomError>;
#[derive(Debug)]
pub enum CustomError {
#[allow(dead_code)]
Text(String),
Static(&'static str),
IO(std::io::Error),
Parser(ErrorKind),
Static(#[allow(dead_code)] &'static str),
IO(#[allow(dead_code)] std::io::Error),
Parser(#[allow(dead_code)] ErrorKind),
}
impl<I: std::fmt::Debug> ParseError<I> for CustomError {
@@ -35,9 +33,3 @@ impl From<&'static str> for CustomError {
CustomError::Static(value)
}
}
impl From<String> for CustomError {
fn from(value: String) -> Self {
CustomError::Text(value)
}
}

View File

@@ -24,10 +24,10 @@ pub(crate) fn record_event(event_type: EventType, input: OrgSource<'_>) {
*db.entry(key).or_insert(0) += 1;
}
pub fn report(original_document: &str) {
pub(crate) fn report(original_document: &str) {
let mut db = GLOBAL_DATA.lock().unwrap();
let db = db.get_or_insert_with(HashMap::new);
let mut results: Vec<_> = db.iter().map(|(k, v)| (k, v)).collect();
let mut results: Vec<_> = db.iter().collect();
results.sort_by_key(|(_k, v)| *v);
// This would put the most common at the top, but that is a pain when there is already a lot of output from the parser.
// results.sort_by(|(_ak, av), (_bk, bv)| bv.cmp(av));

View File

@@ -2,5 +2,5 @@ mod database;
mod event_type;
pub(crate) use database::record_event;
pub use database::report;
pub(crate) use database::report;
pub(crate) use event_type::EventType;

View File

@@ -90,12 +90,11 @@ impl<'r, 's> Iterator for AllAstNodeIter<'r, 's> {
}
}
impl<'r, 's> IntoIterator for AstNode<'r, 's> {
type Item = AstNode<'r, 's>;
type IntoIter = AllAstNodeIter<'r, 's>;
fn into_iter(self) -> Self::IntoIter {
impl<'r, 's> AstNode<'r, 's> {
/// Iterate all AST nodes.
///
/// This is different from the iter/into_iter functions which iterate a single level of the children. This iterates the entire tree including returning the root node itself.
pub fn iter_all_ast_nodes(self) -> AllAstNodeIter<'r, 's> {
AllAstNodeIter {
root: Some(self),
queue: VecDeque::new(),

View File

@@ -1,8 +1,9 @@
#![feature(exit_status_error)]
#![feature(trait_alias)]
#![feature(path_file_prefix)]
#![feature(is_sorted)]
#![feature(test)]
#![feature(iter_intersperse)]
#![feature(exact_size_is_empty)]
// TODO: #![warn(missing_docs)]
#![allow(clippy::bool_assert_comparison)] // Sometimes you want the long form because its easier to see at a glance.
@@ -10,11 +11,20 @@ extern crate test;
#[cfg(feature = "compare")]
pub mod compare;
pub mod parse_cli;
#[cfg(any(feature = "compare", feature = "wasm", feature = "wasm_test"))]
mod util;
#[cfg(any(feature = "wasm", feature = "wasm_test"))]
mod wasm;
#[cfg(any(feature = "wasm", feature = "wasm_test"))]
pub mod wasm_cli;
#[cfg(feature = "wasm_test")]
pub mod wasm_test;
mod context;
mod error;
#[cfg(feature = "event_count")]
pub mod event_count;
mod event_count;
mod iter;
pub mod parser;
pub mod types;

View File

@@ -1,12 +1,4 @@
#![feature(round_char_boundary)]
#![feature(exact_size_is_empty)]
use std::io::Read;
use std::path::Path;
use ::organic::parser::parse;
use organic::parser::parse_with_settings;
use organic::settings::GlobalSettings;
use organic::settings::LocalFileAccessInterface;
use organic::parse_cli::main_body;
#[cfg(feature = "tracing")]
use crate::init_tracing::init_telemetry;
@@ -30,55 +22,3 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
main_body_result
})
}
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
fn main_body() -> Result<(), Box<dyn std::error::Error>> {
let args = std::env::args().skip(1);
if args.is_empty() {
let org_contents = read_stdin_to_string()?;
run_anonymous_parse(org_contents)
} else {
for arg in args {
run_parse_on_file(arg)?
}
Ok(())
}
}
fn read_stdin_to_string() -> Result<String, Box<dyn std::error::Error>> {
let mut stdin_contents = String::new();
std::io::stdin()
.lock()
.read_to_string(&mut stdin_contents)?;
Ok(stdin_contents)
}
fn run_anonymous_parse<P: AsRef<str>>(org_contents: P) -> Result<(), Box<dyn std::error::Error>> {
let org_contents = org_contents.as_ref();
let rust_parsed = parse(org_contents)?;
println!("{:#?}", rust_parsed);
#[cfg(feature = "event_count")]
organic::event_count::report(org_contents);
Ok(())
}
fn run_parse_on_file<P: AsRef<Path>>(org_path: P) -> Result<(), Box<dyn std::error::Error>> {
let org_path = org_path.as_ref();
let parent_directory = org_path
.parent()
.ok_or("Should be contained inside a directory.")?;
let org_contents = std::fs::read_to_string(org_path)?;
let org_contents = org_contents.as_str();
let file_access_interface = LocalFileAccessInterface {
working_directory: Some(parent_directory.to_path_buf()),
};
let global_settings = GlobalSettings {
file_access: &file_access_interface,
..Default::default()
};
let rust_parsed = parse_with_settings(org_contents, &global_settings)?;
println!("{:#?}", rust_parsed);
#[cfg(feature = "event_count")]
organic::event_count::report(org_contents);
Ok(())
}

59
src/parse_cli/mod.rs Normal file
View File

@@ -0,0 +1,59 @@
use std::io::Read;
use std::path::Path;
use crate::parser::parse;
use crate::parser::parse_with_settings;
use crate::settings::GlobalSettings;
use crate::settings::LocalFileAccessInterface;
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
pub fn main_body() -> Result<(), Box<dyn std::error::Error>> {
let args = std::env::args().skip(1);
if args.is_empty() {
let org_contents = read_stdin_to_string()?;
run_anonymous_parse(org_contents)
} else {
for arg in args {
run_parse_on_file(arg)?
}
Ok(())
}
}
fn read_stdin_to_string() -> Result<String, Box<dyn std::error::Error>> {
let mut stdin_contents = String::new();
std::io::stdin()
.lock()
.read_to_string(&mut stdin_contents)?;
Ok(stdin_contents)
}
fn run_anonymous_parse<P: AsRef<str>>(org_contents: P) -> Result<(), Box<dyn std::error::Error>> {
let org_contents = org_contents.as_ref();
let rust_parsed = parse(org_contents)?;
println!("{:#?}", rust_parsed);
#[cfg(feature = "event_count")]
crate::event_count::report(org_contents);
Ok(())
}
fn run_parse_on_file<P: AsRef<Path>>(org_path: P) -> Result<(), Box<dyn std::error::Error>> {
let org_path = org_path.as_ref();
let parent_directory = org_path
.parent()
.ok_or("Should be contained inside a directory.")?;
let org_contents = std::fs::read_to_string(org_path)?;
let org_contents = org_contents.as_str();
let file_access_interface = LocalFileAccessInterface {
working_directory: Some(parent_directory.to_path_buf()),
};
let global_settings = GlobalSettings {
file_access: &file_access_interface,
..Default::default()
};
let rust_parsed = parse_with_settings(org_contents, &global_settings)?;
println!("{:#?}", rust_parsed);
#[cfg(feature = "event_count")]
crate::event_count::report(org_contents);
Ok(())
}

View File

@@ -47,7 +47,7 @@ pub(crate) fn angle_link<'b, 'g, 'r, 's>(
parser_with_context!(parse_angle_link)(context),
))(remaining)?;
let (remaining, _) = tag(">")(remaining)?;
let (remaining, _trailing_whitespace) =
let (remaining, post_blank) =
maybe_consume_object_trailing_whitespace_if_not_exiting(context, remaining)?;
let source = get_consumed(input, remaining);
Ok((
@@ -59,6 +59,7 @@ pub(crate) fn angle_link<'b, 'g, 'r, 's>(
raw_link: raw_link.into(),
search_option: parsed_link.search_option,
application: parsed_link.application,
post_blank: post_blank.map(Into::<&str>::into),
},
))
}

View File

@@ -4,7 +4,6 @@ use nom::bytes::complete::tag_no_case;
use nom::character::complete::anychar;
use nom::character::complete::one_of;
use nom::character::complete::space0;
use nom::combinator::consumed;
use nom::combinator::opt;
use nom::combinator::peek;
use nom::combinator::recognize;
@@ -43,32 +42,10 @@ where
start_of_line(remaining)?;
let (remaining, _) = tuple((space0, tag("#+"), tag_no_case("call"), tag(":")))(remaining)?;
if let Ok((remaining, (_, line_break))) = tuple((space0, org_line_ending))(remaining) {
let (remaining, _trailing_ws) =
maybe_consume_trailing_whitespace_if_not_exiting(context, remaining)?;
let source = get_consumed(input, remaining);
return Ok((
remaining,
BabelCall {
source: Into::<&str>::into(source),
affiliated_keywords: parse_affiliated_keywords(
context.get_global_settings(),
affiliated_keywords,
),
value: Into::<&str>::into(line_break.take(0)),
call: None,
inside_header: None,
arguments: None,
end_header: None,
},
));
}
let (remaining, _ws) = space0(remaining)?;
let (remaining, (value, babel_call_value)) = consumed(babel_call_value)(remaining)?;
let (remaining, _ws) = tuple((space0, org_line_ending))(remaining)?;
let (remaining, babel_call_value) = babel_call_value(remaining)?;
let (remaining, _trailing_ws) =
let (remaining, post_blank) =
maybe_consume_trailing_whitespace_if_not_exiting(context, remaining)?;
let source = get_consumed(input, remaining);
@@ -80,17 +57,22 @@ where
context.get_global_settings(),
affiliated_keywords,
),
value: Into::<&str>::into(value).trim_end(),
value: Into::<&str>::into(babel_call_value.value),
call: babel_call_value.call.map(Into::<&str>::into),
inside_header: babel_call_value.inside_header.map(Into::<&str>::into),
arguments: babel_call_value.arguments.map(Into::<&str>::into),
end_header: babel_call_value.end_header.map(Into::<&str>::into),
post_blank: post_blank.map(Into::<&str>::into),
},
))
}
#[derive(Debug)]
struct BabelCallValue<'s> {
/// The entire string to the right of "#+call: " without the trailing line break.
value: OrgSource<'s>,
/// The function name which may contain a line break if there are no headers/arguments.
call: Option<OrgSource<'s>>,
inside_header: Option<OrgSource<'s>>,
arguments: Option<OrgSource<'s>>,
@@ -99,13 +81,45 @@ struct BabelCallValue<'s> {
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
fn babel_call_value<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, BabelCallValue<'s>> {
let (remaining, call) = opt(babel_call_call)(input)?;
let (remaining, inside_header) = opt(inside_header)(remaining)?;
let (remaining, arguments) = opt(arguments)(remaining)?;
let (remaining, end_header) = opt(end_header)(remaining)?;
alt((
babel_call_value_without_headers,
babel_call_value_with_headers,
))(input)
}
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
fn babel_call_value_without_headers<'s>(
input: OrgSource<'s>,
) -> Res<OrgSource<'s>, BabelCallValue<'s>> {
let (remaining, value) = babel_call_call_with_headers(input)?;
let (remaining, _ws) = tuple((space0, org_line_ending))(remaining)?;
let call = get_consumed(input, remaining);
Ok((
remaining,
BabelCallValue {
value,
call: Some(call),
inside_header: None,
arguments: None,
end_header: None,
},
))
}
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
fn babel_call_value_with_headers<'s>(
input: OrgSource<'s>,
) -> Res<OrgSource<'s>, BabelCallValue<'s>> {
let (remaining, call) = opt(babel_call_call_with_headers)(input)?;
let (remaining, inside_header) = opt(inside_header)(remaining)?;
let (remaining, arguments) = opt(arguments)(remaining)?;
let (remaining, end_header) = opt(end_header)(remaining)?;
let value = get_consumed(input, remaining);
let (remaining, _ws) = tuple((space0, org_line_ending))(remaining)?;
Ok((
remaining,
BabelCallValue {
value,
call,
inside_header,
arguments: arguments.flatten(),
@@ -115,14 +129,15 @@ fn babel_call_value<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, BabelCallVal
}
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
fn babel_call_call<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, OrgSource<'s>> {
fn babel_call_call_with_headers<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, OrgSource<'s>> {
// When babel call contains no arguments or headers (for example: "#+call: lorem ipsum\n") then the trailing line break is part of the call. Otherwise, it is not.
verify(
recognize(many_till(
anychar,
alt((
peek(recognize(one_of("[("))),
peek(alt((
recognize(one_of("[(")),
recognize(tuple((space0, org_line_ending))),
)),
))),
)),
|s| s.len() > 0,
)(input)
@@ -225,26 +240,10 @@ fn impl_balanced_bracket<
let contents_end = remaining;
let (remaining, _) = end_parser(remaining)?;
let contents = if contents_start != contents_end {
let contents = if Into::<&str>::into(contents_start) != Into::<&str>::into(contents_end) {
Some(contents_start.get_until(contents_end))
} else {
None
};
Ok((remaining, contents))
}
#[cfg(test)]
mod tests {
use nom::combinator::opt;
use super::*;
#[test]
fn simple_call() -> Result<(), Box<dyn std::error::Error>> {
let input = OrgSource::new("()");
let (remaining, call) = opt(babel_call_call)(input)?;
assert_eq!(Into::<&str>::into(remaining), "()");
assert_eq!(call, None);
Ok(())
}
}

View File

@@ -15,10 +15,8 @@ use crate::context::RefContext;
use crate::error::CustomError;
use crate::error::Res;
use crate::parser::macros::element;
use crate::types::AffiliatedKeywords;
use crate::types::Object;
use crate::types::Paragraph;
use crate::types::PlainText;
#[cfg_attr(
feature = "tracing",
@@ -65,6 +63,7 @@ pub(crate) fn broken_end<'b, 'g, 'r, 's>(
match paragraph.children.first_mut() {
Some(Object::PlainText(plain_text)) => {
plain_text.source = input.get_until_end_of_str(plain_text.source).into();
paragraph.contents = Some(input.get_until_end_of_str(plain_text.source).into());
}
Some(obj) => {
panic!("Unhandled first object type inside bullshitium {:?}", obj);
@@ -75,18 +74,19 @@ pub(crate) fn broken_end<'b, 'g, 'r, 's>(
};
Ok((remaining, paragraph))
} else {
let (remaining, _trailing_ws) =
let (remaining, post_blank) =
maybe_consume_trailing_whitespace_if_not_exiting(context, lead_in_remaining)?;
let body = Into::<&str>::into(input.get_until(lead_in_remaining));
Ok((
remaining,
Paragraph {
source: input.get_until(remaining).into(),
affiliated_keywords: AffiliatedKeywords::default(),
children: vec![Object::PlainText(PlainText {
source: input.get_until(lead_in_remaining).into(),
})],
},
Paragraph::of_text(
input.get_until(remaining).into(),
body,
if !body.is_empty() { Some(body) } else { None },
post_blank.map(Into::<&str>::into),
),
))
}
}
@@ -124,6 +124,7 @@ pub(crate) fn broken_dynamic_block<'b, 'g, 'r, 's>(
match paragraph.children.first_mut() {
Some(Object::PlainText(plain_text)) => {
plain_text.source = input.get_until_end_of_str(plain_text.source).into();
paragraph.contents = Some(input.get_until_end_of_str(plain_text.source).into());
}
Some(obj) => {
panic!("Unhandled first object type inside bullshitium {:?}", obj);
@@ -134,18 +135,19 @@ pub(crate) fn broken_dynamic_block<'b, 'g, 'r, 's>(
};
Ok((remaining, paragraph))
} else {
let (remaining, _trailing_ws) =
let (remaining, post_blank) =
maybe_consume_trailing_whitespace_if_not_exiting(context, lead_in_remaining)?;
let body = Into::<&str>::into(input.get_until(lead_in_remaining));
Ok((
remaining,
Paragraph {
source: input.get_until(remaining).into(),
affiliated_keywords: AffiliatedKeywords::default(),
children: vec![Object::PlainText(PlainText {
source: input.get_until(lead_in_remaining).into(),
})],
},
Paragraph::of_text(
input.get_until(remaining).into(),
body,
if !body.is_empty() { Some(body) } else { None },
post_blank.map(Into::<&str>::into),
),
))
}
}

View File

@@ -46,16 +46,22 @@ pub(crate) fn citation<'b, 'g, 'r, 's>(
let (remaining, prefix) =
must_balance_bracket(opt(parser_with_context!(global_prefix)(context)))(remaining)?;
let contents_begin = remaining;
let (remaining, references) =
separated_list1(tag(";"), parser_with_context!(citation_reference)(context))(remaining)?;
let contents_end = {
let (rem, _) = opt(tag(";"))(remaining)?;
rem
};
let (remaining, suffix) = must_balance_bracket(opt(map(
tuple((tag(";"), parser_with_context!(global_suffix)(context))),
|(_, suffix)| suffix,
)))(remaining)?;
let (remaining, _) = tag("]")(remaining)?;
let (remaining, _trailing_whitespace) =
let (remaining, post_blank) =
maybe_consume_object_trailing_whitespace_if_not_exiting(context, remaining)?;
let source = get_consumed(input, remaining);
let contents = contents_begin.get_until(contents_end);
Ok((
remaining,
Citation {
@@ -64,6 +70,8 @@ pub(crate) fn citation<'b, 'g, 'r, 's>(
prefix: prefix.unwrap_or(Vec::new()),
suffix: suffix.unwrap_or(Vec::new()),
children: references,
contents: Into::<&str>::into(contents),
post_blank: post_blank.map(Into::<&str>::into),
},
))
}
@@ -210,12 +218,11 @@ mod tests {
use crate::context::GlobalSettings;
use crate::context::List;
use crate::parser::element_parser::element;
use crate::types::CitationReference;
use crate::types::Element;
use crate::types::GetStandardProperties;
use crate::types::StandardProperties;
#[test]
fn citation_simple() {
fn citation_simple() -> Result<(), Box<dyn std::error::Error>> {
let input = OrgSource::new("[cite:@foo]");
let global_settings = GlobalSettings::default();
let initial_context = ContextElement::document_context();
@@ -227,28 +234,33 @@ mod tests {
_ => panic!("Should be a paragraph!"),
};
assert_eq!(Into::<&str>::into(remaining), "");
assert_eq!(
first_paragraph.get_standard_properties().get_source(),
"[cite:@foo]"
);
assert_eq!(first_paragraph.get_source(), "[cite:@foo]");
assert_eq!(first_paragraph.children.len(), 1);
assert_eq!(
first_paragraph
match first_paragraph
.children
.get(0)
.expect("Len already asserted to be 1"),
&Object::Citation(Citation {
source: "[cite:@foo]",
style: None,
prefix: vec![],
suffix: vec![],
children: vec![CitationReference {
source: "@foo",
key: "foo",
prefix: vec![],
suffix: vec![]
}]
})
);
.first()
.expect("Len already asserted to be 1.")
{
Object::Citation(inner) => {
assert_eq!(inner.get_source(), "[cite:@foo]");
assert_eq!(inner.children.len(), 1);
assert!(inner.prefix.is_empty());
assert!(inner.suffix.is_empty());
assert!(inner.style.is_none());
let citation_reference = inner
.children
.first()
.expect("Len already asserted to be 1.");
assert_eq!(citation_reference.get_source(), "@foo");
assert_eq!(citation_reference.key, "foo");
assert!(citation_reference.prefix.is_empty());
assert!(citation_reference.suffix.is_empty());
}
_ => {
return Err("Child should be a citation.".into());
}
};
Ok(())
}
}

View File

@@ -40,7 +40,7 @@ pub(crate) fn clock<'b, 'g, 'r, 's>(
let (remaining, (timestamp, duration)) = clock_timestamp(context, remaining)?;
let (remaining, _) = tuple((space0, org_line_ending))(remaining)?;
let (remaining, _trailing_ws) =
let (remaining, post_blank) =
maybe_consume_trailing_whitespace_if_not_exiting(context, remaining)?;
let source = get_consumed(input, remaining);
Ok((
@@ -54,6 +54,7 @@ pub(crate) fn clock<'b, 'g, 'r, 's>(
} else {
ClockStatus::Running
},
post_blank: post_blank.map(Into::<&str>::into),
},
))
}
@@ -81,7 +82,7 @@ fn clock_timestamp<'b, 'g, 'r, 's>(
|(timestamp, duration)| (timestamp, duration.map(Into::<&str>::into)),
),
map(
parser_with_context!(inactive_timestamp)(context),
parser_with_context!(inactive_timestamp(true))(context),
|timestamp| (timestamp, None),
),
))(input)

View File

@@ -46,7 +46,7 @@ pub(crate) fn comment<'b, 'g, 'r, 's>(
let (remaining, mut remaining_lines) =
many0(preceded(not(exit_matcher), comment_line_matcher))(remaining)?;
let (remaining, _trailing_ws) =
let (remaining, post_blank) =
maybe_consume_trailing_whitespace_if_not_exiting(context, remaining)?;
let source = get_consumed(input, remaining);
let mut value = Vec::with_capacity(remaining_lines.len() + 1);
@@ -67,6 +67,7 @@ pub(crate) fn comment<'b, 'g, 'r, 's>(
Comment {
source: source.into(),
value,
post_blank: post_blank.map(Into::<&str>::into),
},
))
}
@@ -105,7 +106,6 @@ mod tests {
use super::*;
use crate::context::bind_context;
use crate::context::Context;
use crate::context::ContextElement;
use crate::context::GlobalSettings;
use crate::context::List;

View File

@@ -31,7 +31,7 @@ where
let (remaining, value) = recognize(tuple((tag("%%("), is_not("\r\n"))))(remaining)?;
let (remaining, _eol) = org_line_ending(remaining)?;
let (remaining, _trailing_ws) =
let (remaining, post_blank) =
maybe_consume_trailing_whitespace_if_not_exiting(context, remaining)?;
let source = get_consumed(input, remaining);
Ok((
@@ -43,6 +43,7 @@ where
affiliated_keywords,
),
value: Into::<&str>::into(value),
post_blank: post_blank.map(Into::<&str>::into),
},
))
}

View File

@@ -3,6 +3,7 @@ use std::path::Path;
use nom::combinator::all_consuming;
use nom::combinator::opt;
use nom::multi::many0;
use nom::InputTake;
use super::headline::heading;
use super::in_buffer_settings::apply_in_buffer_settings;
@@ -143,7 +144,7 @@ fn document_org_source<'b, 'g, 'r, 's>(
{
// If there are radio targets in this document then we need to parse the entire document again with the knowledge of the radio targets.
let all_radio_targets: Vec<&Vec<Object<'_>>> = Into::<AstNode>::into(&document)
.into_iter()
.iter_all_ast_nodes()
.filter_map(|ast_node| {
if let AstNode::RadioTarget(ast_node) = ast_node {
Some(ast_node)
@@ -181,8 +182,10 @@ fn _document<'b, 'g, 'r, 's>(
let zeroth_section_matcher = parser_with_context!(zeroth_section)(context);
let heading_matcher = parser_with_context!(heading(0))(context);
let (remaining, _blank_lines) = many0(blank_line)(input)?;
let contents_begin = remaining;
let (remaining, zeroth_section) = opt(zeroth_section_matcher)(remaining)?;
let (remaining, children) = many0(heading_matcher)(remaining)?;
let contents = get_consumed(contents_begin, remaining);
let source = get_consumed(input, remaining);
Ok((
remaining,
@@ -192,6 +195,11 @@ fn _document<'b, 'g, 'r, 's>(
path: None,
zeroth_section,
children,
contents: if contents.len() > 0 {
Into::<&str>::into(contents)
} else {
Into::<&str>::into(remaining.take(0))
},
},
))
}

View File

@@ -4,6 +4,7 @@ use nom::bytes::complete::tag_no_case;
use nom::bytes::complete::take_while;
use nom::character::complete::line_ending;
use nom::character::complete::space0;
use nom::combinator::consumed;
use nom::combinator::eof;
use nom::combinator::not;
use nom::combinator::recognize;
@@ -12,7 +13,9 @@ use nom::sequence::tuple;
use super::affiliated_keyword::parse_affiliated_keywords;
use super::org_source::OrgSource;
use super::paragraph::empty_paragraph;
use super::util::maybe_consume_trailing_whitespace_if_not_exiting;
use crate::context::bind_context;
use crate::context::parser_with_context;
use crate::context::ContextElement;
use crate::context::ExitClass;
@@ -21,7 +24,6 @@ use crate::context::RefContext;
use crate::error::CustomError;
use crate::error::Res;
use crate::parser::element_parser::element;
use crate::parser::util::blank_line;
use crate::parser::util::exit_matcher_parser;
use crate::parser::util::get_consumed;
use crate::parser::util::immediate_in_section;
@@ -30,8 +32,6 @@ use crate::parser::util::WORD_CONSTITUENT_CHARACTERS;
use crate::types::Drawer;
use crate::types::Element;
use crate::types::Keyword;
use crate::types::Paragraph;
use crate::types::SetSource;
#[cfg_attr(
feature = "tracing",
@@ -71,30 +71,12 @@ where
let parser_context = context.with_additional_node(&contexts[0]);
let parser_context = parser_context.with_additional_node(&contexts[1]);
let parser_context = parser_context.with_additional_node(&contexts[2]);
let (remaining, (contents, children)) =
consumed(parser_with_context!(children)(&parser_context))(remaining)?;
let element_matcher = parser_with_context!(element(true))(&parser_context);
let exit_matcher = parser_with_context!(exit_matcher_parser)(&parser_context);
let (remaining, children) = match tuple((
not(exit_matcher),
blank_line,
many_till(blank_line, exit_matcher),
))(remaining)
{
Ok((remain, (_not_immediate_exit, first_line, (_trailing_whitespace, _exit_contents)))) => {
let mut element = Element::Paragraph(Paragraph::of_text(first_line.into()));
let source = get_consumed(remaining, remain);
element.set_source(source.into());
(remain, vec![element])
}
Err(_) => {
let (remaining, (children, _exit_contents)) =
many_till(element_matcher, exit_matcher)(remaining)?;
(remaining, children)
}
};
let (remaining, _end) = drawer_end(&parser_context, remaining)?;
let (remaining, _trailing_ws) =
let (remaining, post_blank) =
maybe_consume_trailing_whitespace_if_not_exiting(context, remaining)?;
let source = get_consumed(input, remaining);
@@ -108,10 +90,34 @@ where
),
drawer_name: drawer_name.into(),
children,
contents: Some(contents.into()),
post_blank: post_blank.map(Into::<&str>::into),
},
))
}
#[cfg_attr(
feature = "tracing",
tracing::instrument(ret, level = "debug", skip(context))
)]
fn children<'b, 'g, 'r, 's>(
context: RefContext<'b, 'g, 'r, 's>,
input: OrgSource<'s>,
) -> Res<OrgSource<'s>, Vec<Element<'s>>> {
let element_matcher = parser_with_context!(element(true))(context);
let exit_matcher = parser_with_context!(exit_matcher_parser)(context);
if let Ok((remaining, (_not_exit, empty_para))) =
tuple((not(exit_matcher), bind_context!(empty_paragraph, context)))(input)
{
return Ok((remaining, vec![Element::Paragraph(empty_para)]));
}
let (remaining, (children, _exit_contents)) = many_till(element_matcher, exit_matcher)(input)?;
Ok((remaining, children))
}
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
fn name<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, OrgSource<'s>> {
take_while(|c| WORD_CONSTITUENT_CHARACTERS.contains(c) || "-_".contains(c))(input)

View File

@@ -6,20 +6,20 @@ use nom::character::complete::anychar;
use nom::character::complete::line_ending;
use nom::character::complete::space0;
use nom::character::complete::space1;
use nom::combinator::consumed;
use nom::combinator::eof;
use nom::combinator::not;
use nom::combinator::opt;
use nom::combinator::peek;
use nom::combinator::recognize;
use nom::multi::many0;
use nom::multi::many_till;
use nom::sequence::preceded;
use nom::sequence::tuple;
use super::affiliated_keyword::parse_affiliated_keywords;
use super::greater_block::leading_blank_lines_end;
use super::org_source::OrgSource;
use super::paragraph::empty_paragraph;
use super::util::maybe_consume_trailing_whitespace_if_not_exiting;
use crate::context::bind_context;
use crate::context::parser_with_context;
use crate::context::ContextElement;
use crate::context::ExitClass;
@@ -28,7 +28,6 @@ use crate::context::RefContext;
use crate::error::CustomError;
use crate::error::Res;
use crate::parser::element_parser::element;
use crate::parser::util::blank_line;
use crate::parser::util::exit_matcher_parser;
use crate::parser::util::get_consumed;
use crate::parser::util::immediate_in_section;
@@ -36,8 +35,6 @@ use crate::parser::util::start_of_line;
use crate::types::DynamicBlock;
use crate::types::Element;
use crate::types::Keyword;
use crate::types::Paragraph;
use crate::types::SetSource;
#[cfg_attr(
feature = "tracing",
@@ -82,25 +79,25 @@ where
let element_matcher = parser_with_context!(element(true))(&parser_context);
let exit_matcher = parser_with_context!(exit_matcher_parser)(&parser_context);
not(exit_matcher)(remaining)?;
let (remaining, leading_blank_lines) = opt(consumed(tuple((
blank_line,
many0(preceded(not(exit_matcher), blank_line)),
))))(remaining)?;
let leading_blank_lines =
leading_blank_lines.map(|(source, (first_line, _remaining_lines))| {
let mut element = Element::Paragraph(Paragraph::of_text(first_line.into()));
element.set_source(source.into());
element
let contents_begin = remaining;
let blank_line_context = ContextElement::ExitMatcherNode(ExitMatcherNode {
class: ExitClass::Alpha,
exit_matcher: &leading_blank_lines_end,
});
let blank_line_context = parser_context.with_additional_node(&blank_line_context);
let (remaining, leading_blank_lines) =
opt(bind_context!(empty_paragraph, &blank_line_context))(remaining)?;
let (remaining, (mut children, _exit_contents)) =
many_till(element_matcher, exit_matcher)(remaining)?;
if let Some(lines) = leading_blank_lines {
children.insert(0, lines);
children.insert(0, Element::Paragraph(lines));
}
let contents = get_consumed(contents_begin, remaining);
let (remaining, _end) = dynamic_block_end(&parser_context, remaining)?;
let (remaining, _trailing_ws) =
let (remaining, post_blank) =
maybe_consume_trailing_whitespace_if_not_exiting(context, remaining)?;
let source = get_consumed(input, remaining);
Ok((
@@ -114,6 +111,12 @@ where
block_name: name.into(),
parameters: parameters.map(|val| val.into()),
children,
contents: if contents.len() > 0 {
Some(Into::<&str>::into(contents))
} else {
None
},
post_blank: post_blank.map(Into::<&str>::into),
},
))
}

View File

@@ -28,7 +28,7 @@ pub(crate) fn entity<'b, 'g, 'r, 's>(
let (remaining, _) = tag("\\")(input)?;
let (remaining, (entity_definition, entity_name, use_brackets)) = name(context, remaining)?;
let (remaining, _trailing_whitespace) =
let (remaining, post_blank) =
maybe_consume_object_trailing_whitespace_if_not_exiting(context, remaining)?;
let source = get_consumed(input, remaining);
@@ -43,6 +43,7 @@ pub(crate) fn entity<'b, 'g, 'r, 's>(
ascii: entity_definition.ascii,
utf8: entity_definition.utf8,
use_brackets,
post_blank: post_blank.map(Into::<&str>::into),
},
))
}

View File

@@ -39,7 +39,7 @@ pub(crate) fn export_snippet<'b, 'g, 'r, 's>(
parser_with_context!(contents)(&parser_context),
)))(remaining)?;
let (remaining, _) = tag("@@")(remaining)?;
let (remaining, _trailing_whitespace) =
let (remaining, post_blank) =
maybe_consume_object_trailing_whitespace_if_not_exiting(context, remaining)?;
let source = get_consumed(input, remaining);
Ok((
@@ -48,6 +48,7 @@ pub(crate) fn export_snippet<'b, 'g, 'r, 's>(
source: source.into(),
backend: backend_name.into(),
contents: backend_contents.map(|(_colon, backend_contents)| backend_contents.into()),
post_blank: post_blank.map(Into::<&str>::into),
},
))
}

View File

@@ -2,12 +2,15 @@ use nom::branch::alt;
use nom::bytes::complete::tag;
use nom::character::complete::anychar;
use nom::character::complete::space0;
use nom::combinator::map;
use nom::combinator::not;
use nom::combinator::peek;
use nom::combinator::recognize;
use nom::multi::many0;
use nom::multi::many_till;
use nom::sequence::preceded;
use nom::sequence::tuple;
use nom::InputTake;
use super::affiliated_keyword::parse_affiliated_keywords;
use super::org_source::OrgSource;
@@ -35,28 +38,25 @@ pub(crate) fn fixed_width_area<'b, 'g, 'r, 's, AK>(
where
AK: IntoIterator<Item = Keyword<'s>>,
{
let fixed_width_area_line_matcher = parser_with_context!(fixed_width_area_line)(context);
let exit_matcher = parser_with_context!(exit_matcher_parser)(context);
let (remaining, first_line) = fixed_width_area_line_matcher(remaining)?;
let (remaining, mut remaining_lines) =
many0(preceded(not(exit_matcher), fixed_width_area_line_matcher))(remaining)?;
let (remaining, first_line) = fixed_width_area_line(remaining)?;
let (remaining, remaining_lines) = many0(preceded(
not(tuple((org_line_ending, exit_matcher))),
map(
tuple((org_line_ending, fixed_width_area_line)),
|(_line_ending, line_contents)| line_contents,
),
))(remaining)?;
let (remaining, _trailing_ws) =
let post_blank_begin = remaining;
let (remaining, _first_line_break) = org_line_ending(remaining)?;
let (remaining, _additional_post_blank) =
maybe_consume_trailing_whitespace_if_not_exiting(context, remaining)?;
let post_blank = get_consumed(post_blank_begin, remaining);
let source = get_consumed(input, remaining);
let mut value = Vec::with_capacity(remaining_lines.len() + 1);
let last_line = remaining_lines.pop();
if let Some(last_line) = last_line {
value.push(Into::<&str>::into(first_line));
value.extend(remaining_lines.into_iter().map(Into::<&str>::into));
let last_line = Into::<&str>::into(last_line);
// Trim the line ending from the final line.
value.push(&last_line[..(last_line.len() - 1)])
} else {
// Trim the line ending from the only line.
let only_line = Into::<&str>::into(first_line);
value.push(&only_line[..(only_line.len() - 1)])
}
Ok((
remaining,
FixedWidthArea {
@@ -66,25 +66,24 @@ where
affiliated_keywords,
),
value,
post_blank: if post_blank.len() > 0 {
Some(Into::<&str>::into(post_blank))
} else {
None
},
},
))
}
#[cfg_attr(
feature = "tracing",
tracing::instrument(ret, level = "debug", skip(_context))
)]
fn fixed_width_area_line<'b, 'g, 'r, 's>(
_context: RefContext<'b, 'g, 'r, 's>,
input: OrgSource<'s>,
) -> Res<OrgSource<'s>, OrgSource<'s>> {
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
fn fixed_width_area_line<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, OrgSource<'s>> {
start_of_line(input)?;
let (remaining, _) = tuple((space0, tag(":")))(input)?;
if let Ok((remaining, line_break)) = org_line_ending(remaining) {
return Ok((remaining, line_break));
if let Ok((_remain, _line_break)) = org_line_ending(remaining) {
return Ok((remaining, remaining.take(0)));
}
let (remaining, _) = tag(" ")(remaining)?;
let (remaining, value) = recognize(many_till(anychar, org_line_ending))(remaining)?;
let (remaining, value) = recognize(many_till(anychar, peek(org_line_ending)))(remaining)?;
Ok((remaining, value))
}

View File

@@ -75,6 +75,7 @@ where
let parser_context = parser_context.with_additional_node(&contexts[2]);
let element_matcher = parser_with_context!(element(true))(&parser_context);
let exit_matcher = parser_with_context!(exit_matcher_parser)(&parser_context);
let before_contents = remaining;
let (mut remaining, (mut children, _exit_contents)) =
many_till(include_input(element_matcher), exit_matcher)(remaining)?;
@@ -90,13 +91,16 @@ where
}
}
let (remaining, _trailing_ws) =
let contents = get_consumed(before_contents, remaining);
let (remaining, post_blank) =
maybe_consume_trailing_whitespace_if_not_exiting(context, remaining)?;
let source = get_consumed(input, remaining);
Ok((
remaining,
FootnoteDefinition {
source: source.into(),
contents: Some(contents.into()),
post_blank: post_blank.map(Into::<&str>::into),
affiliated_keywords: parse_affiliated_keywords(
context.get_global_settings(),
affiliated_keywords,
@@ -160,7 +164,7 @@ mod tests {
use crate::context::Context;
use crate::context::GlobalSettings;
use crate::context::List;
use crate::types::GetStandardProperties;
use crate::types::StandardProperties;
#[test]
fn two_paragraphs() {
@@ -181,17 +185,13 @@ line footnote.",
footnote_definition_matcher(remaining).expect("Parse second footnote_definition.");
assert_eq!(Into::<&str>::into(remaining), "");
assert_eq!(
first_footnote_definition
.get_standard_properties()
.get_source(),
first_footnote_definition.get_source(),
"[fn:1] A footnote.
"
);
assert_eq!(
second_footnote_definition
.get_standard_properties()
.get_source(),
second_footnote_definition.get_source(),
"[fn:2] A multi-
line footnote."
@@ -216,9 +216,7 @@ not in the footnote.",
footnote_definition_matcher(input).expect("Parse first footnote_definition");
assert_eq!(Into::<&str>::into(remaining), "not in the footnote.");
assert_eq!(
first_footnote_definition
.get_standard_properties()
.get_source(),
first_footnote_definition.get_source(),
"[fn:2] A multi-
line footnote.

View File

@@ -2,6 +2,7 @@ use nom::branch::alt;
use nom::bytes::complete::tag;
use nom::bytes::complete::tag_no_case;
use nom::combinator::all_consuming;
use nom::combinator::consumed;
use nom::combinator::map_parser;
use nom::combinator::verify;
use nom::multi::many1;
@@ -59,7 +60,7 @@ fn anonymous_footnote<'b, 'g, 'r, 's>(
let initial_context = ContextElement::document_context();
let initial_context = Context::new(context.get_global_settings(), List::new(&initial_context));
let (remaining, children) = map_parser(
let (remaining, (contents, children)) = consumed(map_parser(
verify(
parser_with_context!(text_until_exit)(&parser_context),
|text| text.len() > 0,
@@ -69,17 +70,19 @@ fn anonymous_footnote<'b, 'g, 'r, 's>(
&initial_context,
)))(i)
}),
)(remaining)?;
))(remaining)?;
let (remaining, _) = tag("]")(remaining)?;
let (remaining, _trailing_whitespace) =
let (remaining, post_blank) =
maybe_consume_object_trailing_whitespace_if_not_exiting(context, remaining)?;
let source = get_consumed(input, remaining);
Ok((
remaining,
FootnoteReference {
source: source.into(),
contents: Some(contents.into()),
post_blank: post_blank.map(Into::<&str>::into),
label: None,
definition: children,
},
@@ -106,7 +109,7 @@ fn inline_footnote<'b, 'g, 'r, 's>(
let initial_context = ContextElement::document_context();
let initial_context = Context::new(context.get_global_settings(), List::new(&initial_context));
let (remaining, children) = map_parser(
let (remaining, (contents, children)) = consumed(map_parser(
verify(
parser_with_context!(text_until_exit)(&parser_context),
|text| text.len() > 0,
@@ -116,17 +119,19 @@ fn inline_footnote<'b, 'g, 'r, 's>(
&initial_context,
)))(i)
}),
)(remaining)?;
))(remaining)?;
let (remaining, _) = tag("]")(remaining)?;
let (remaining, _trailing_whitespace) =
let (remaining, post_blank) =
maybe_consume_object_trailing_whitespace_if_not_exiting(context, remaining)?;
let source = get_consumed(input, remaining);
Ok((
remaining,
FootnoteReference {
source: source.into(),
contents: Some(contents.into()),
post_blank: post_blank.map(Into::<&str>::into),
label: Some(label_contents.into()),
definition: children,
},
@@ -144,13 +149,15 @@ fn footnote_reference_only<'b, 'g, 'r, 's>(
let (remaining, _) = tag_no_case("[fn:")(input)?;
let (remaining, label_contents) = label(remaining)?;
let (remaining, _) = tag("]")(remaining)?;
let (remaining, _trailing_whitespace) =
let (remaining, post_blank) =
maybe_consume_object_trailing_whitespace_if_not_exiting(context, remaining)?;
let source = get_consumed(input, remaining);
Ok((
remaining,
FootnoteReference {
source: source.into(),
contents: None,
post_blank: post_blank.map(Into::<&str>::into),
label: Some(label_contents.into()),
definition: Vec::with_capacity(0),
},

View File

@@ -5,22 +5,21 @@ use nom::character::complete::anychar;
use nom::character::complete::line_ending;
use nom::character::complete::space0;
use nom::character::complete::space1;
use nom::combinator::consumed;
use nom::combinator::eof;
use nom::combinator::not;
use nom::combinator::opt;
use nom::combinator::peek;
use nom::combinator::recognize;
use nom::combinator::verify;
use nom::multi::many0;
use nom::multi::many_till;
use nom::sequence::preceded;
use nom::sequence::tuple;
use super::affiliated_keyword::parse_affiliated_keywords;
use super::org_source::OrgSource;
use super::paragraph::empty_paragraph;
use super::util::in_section;
use super::util::maybe_consume_trailing_whitespace_if_not_exiting;
use crate::context::bind_context;
use crate::context::parser_with_context;
use crate::context::ContextElement;
use crate::context::ContextMatcher;
@@ -37,9 +36,7 @@ use crate::parser::util::start_of_line;
use crate::types::CenterBlock;
use crate::types::Element;
use crate::types::Keyword;
use crate::types::Paragraph;
use crate::types::QuoteBlock;
use crate::types::SetSource;
use crate::types::SpecialBlock;
#[cfg_attr(
@@ -103,7 +100,7 @@ fn center_block<'b, 'g, 'r, 's, AK>(
where
AK: IntoIterator<Item = Keyword<'s>>,
{
let (remaining, (source, children)) = greater_block_body(
let (remaining, body) = greater_block_body(
context,
input,
pre_affiliated_keywords_input,
@@ -113,12 +110,14 @@ where
Ok((
remaining,
Element::CenterBlock(CenterBlock {
source,
source: body.source,
affiliated_keywords: parse_affiliated_keywords(
context.get_global_settings(),
affiliated_keywords,
),
children,
children: body.children,
contents: body.contents,
post_blank: body.post_blank,
}),
))
}
@@ -136,7 +135,7 @@ fn quote_block<'b, 'g, 'r, 's, AK>(
where
AK: IntoIterator<Item = Keyword<'s>>,
{
let (remaining, (source, children)) = greater_block_body(
let (remaining, body) = greater_block_body(
context,
input,
pre_affiliated_keywords_input,
@@ -146,12 +145,14 @@ where
Ok((
remaining,
Element::QuoteBlock(QuoteBlock {
source,
source: body.source,
affiliated_keywords: parse_affiliated_keywords(
context.get_global_settings(),
affiliated_keywords,
),
children,
children: body.children,
contents: body.contents,
post_blank: body.post_blank,
}),
))
}
@@ -197,7 +198,7 @@ where
AK: IntoIterator<Item = Keyword<'s>>,
{
let (remaining, parameters) = opt(tuple((space1, parameters)))(input)?;
let (remaining, (source, children)) = greater_block_body(
let (remaining, body) = greater_block_body(
context,
remaining,
pre_affiliated_keywords_input,
@@ -207,18 +208,28 @@ where
Ok((
remaining,
Element::SpecialBlock(SpecialBlock {
source,
source: body.source,
affiliated_keywords: parse_affiliated_keywords(
context.get_global_settings(),
affiliated_keywords,
),
children,
children: body.children,
block_type: name,
parameters: parameters.map(|(_, parameters)| Into::<&str>::into(parameters)),
contents: body.contents,
post_blank: body.post_blank,
}),
))
}
#[derive(Debug)]
struct GreaterBlockBody<'s> {
source: &'s str,
children: Vec<Element<'s>>,
contents: Option<&'s str>,
post_blank: Option<&'s str>,
}
#[cfg_attr(
feature = "tracing",
tracing::instrument(ret, level = "debug", skip(context))
@@ -229,7 +240,7 @@ fn greater_block_body<'c, 'b, 'g, 'r, 's>(
pre_affiliated_keywords_input: OrgSource<'s>,
name: &'c str,
context_name: &'c str,
) -> Res<OrgSource<'s>, (&'s str, Vec<Element<'s>>)> {
) -> Res<OrgSource<'s>, GreaterBlockBody<'s>> {
if in_section(context, context_name) {
return Err(nom::Err::Error(CustomError::Static(
"Cannot nest objects of the same element",
@@ -251,30 +262,43 @@ fn greater_block_body<'c, 'b, 'g, 'r, 's>(
let element_matcher = parser_with_context!(element(true))(&parser_context);
let exit_matcher = parser_with_context!(exit_matcher_parser)(&parser_context);
not(exit_matcher)(remaining)?;
let (remaining, leading_blank_lines) = opt(consumed(tuple((
blank_line,
many0(preceded(not(exit_matcher), blank_line)),
))))(remaining)?;
let leading_blank_lines =
leading_blank_lines.map(|(source, (first_line, _remaining_lines))| {
let mut element = Element::Paragraph(Paragraph::of_text(first_line.into()));
element.set_source(source.into());
element
let contents_begin = remaining;
let blank_line_context = ContextElement::ExitMatcherNode(ExitMatcherNode {
class: ExitClass::Alpha,
exit_matcher: &leading_blank_lines_end,
});
let blank_line_context = parser_context.with_additional_node(&blank_line_context);
let (remaining, leading_blank_lines) =
opt(bind_context!(empty_paragraph, &blank_line_context))(remaining)?;
let (remaining, (mut children, _exit_contents)) =
many_till(element_matcher, exit_matcher)(remaining)?;
if let Some(lines) = leading_blank_lines {
children.insert(0, lines);
children.insert(0, Element::Paragraph(lines));
}
let contents = get_consumed(contents_begin, remaining);
let (remaining, _end) = exit_with_name(&parser_context, remaining)?;
// Not checking if parent exit matcher is causing exit because the greater_block_end matcher asserts we matched a full greater block
let (remaining, _trailing_ws) =
let (remaining, post_blank) =
maybe_consume_trailing_whitespace_if_not_exiting(context, remaining)?;
let source = get_consumed(pre_affiliated_keywords_input, remaining);
Ok((remaining, (Into::<&str>::into(source), children)))
Ok((
remaining,
GreaterBlockBody {
source: Into::<&str>::into(source),
children,
contents: if contents.len() > 0 {
Some(Into::<&str>::into(contents))
} else {
None
},
post_blank: post_blank.map(Into::<&str>::into),
},
))
}
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
@@ -310,3 +334,14 @@ fn _greater_block_end<'b, 'g, 'r, 's, 'c>(
let source = get_consumed(input, remaining);
Ok((remaining, source))
}
#[cfg_attr(
feature = "tracing",
tracing::instrument(ret, level = "debug", skip(_context))
)]
pub(crate) fn leading_blank_lines_end<'b, 'g, 'r, 's, 'c>(
_context: RefContext<'b, 'g, 'r, 's>,
input: OrgSource<'s>,
) -> Res<OrgSource<'s>, OrgSource<'s>> {
recognize(not(blank_line))(input)
}

View File

@@ -66,6 +66,8 @@ fn _heading<'b, 'g, 'r, 's>(
let (remaining, pre_headline) = headline(context, input, parent_star_count)?;
let section_matcher = bind_context!(section, context);
let heading_matcher = bind_context!(heading(pre_headline.star_count), context);
let (contents_begin, _) = opt(many0(blank_line))(remaining)?;
let maybe_post_blank = get_consumed(remaining, contents_begin);
let (remaining, maybe_section) =
opt(map(section_matcher, DocumentElement::Section))(remaining)?;
let (remaining, _ws) = opt(tuple((start_of_line, many0(blank_line))))(remaining)?;
@@ -75,14 +77,15 @@ fn _heading<'b, 'g, 'r, 's>(
// If the section has a planning then the timestamp values are copied to the heading.
if let DocumentElement::Section(inner_section) = &section {
if let Some(Element::Planning(planning)) = inner_section.children.first() {
scheduled = planning.scheduled.clone();
deadline = planning.deadline.clone();
closed = planning.closed.clone();
scheduled.clone_from(&planning.scheduled);
deadline.clone_from(&planning.deadline);
closed.clone_from(&planning.closed);
}
}
children.insert(0, section);
}
let remaining = if children.is_empty() {
let has_children = !children.is_empty();
let remaining = if !has_children {
// Support empty headings
let (remain, _ws) = many0(blank_line)(remaining)?;
remain
@@ -91,6 +94,7 @@ fn _heading<'b, 'g, 'r, 's>(
};
let is_archived = pre_headline.tags.contains(&"ARCHIVE");
let contents = get_consumed(contents_begin, remaining);
let source = get_consumed(input, remaining);
Ok((
remaining,
@@ -112,6 +116,16 @@ fn _heading<'b, 'g, 'r, 's>(
scheduled,
deadline,
closed,
contents: if contents.len() > 0 {
Some(Into::<&str>::into(contents))
} else {
None
},
post_blank: if has_children {
None
} else {
Some(Into::<&str>::into(maybe_post_blank))
},
},
))
}

View File

@@ -38,7 +38,7 @@ where
space0,
alt((line_ending, eof)),
)))(remaining)?;
let (remaining, _trailing_ws) =
let (remaining, post_blank) =
maybe_consume_trailing_whitespace_if_not_exiting(context, remaining)?;
let source = get_consumed(input, remaining);
Ok((
@@ -49,6 +49,7 @@ where
context.get_global_settings(),
affiliated_keywords,
),
post_blank: post_blank.map(Into::<&str>::into),
},
))
}

View File

@@ -102,7 +102,7 @@ pub(crate) fn apply_in_buffer_settings<'g, 's, 'sf>(
}) {
let (_, (in_progress_words, complete_words)) =
todo_keywords(kw.value).map_err(|err| match err {
nom::Err::Incomplete(_) => CustomError::Text(err.to_string()),
nom::Err::Incomplete(_) => panic!("This parser does not support streaming."),
nom::Err::Error(e) => e,
nom::Err::Failure(e) => e,
})?;
@@ -123,7 +123,7 @@ pub(crate) fn apply_in_buffer_settings<'g, 's, 'sf>(
kw.value,
)
.map_err(|err: nom::Err<_>| match err {
nom::Err::Incomplete(_) => CustomError::Text(err.to_string()),
nom::Err::Incomplete(_) => panic!("This parser does not support streaming."),
nom::Err::Error(e) => e,
nom::Err::Failure(e) => e,
})?;
@@ -141,7 +141,7 @@ pub(crate) fn apply_in_buffer_settings<'g, 's, 'sf>(
.filter(|kw| kw.key.eq_ignore_ascii_case("link"))
{
let (_, (link_key, link_value)) = link_template(kw.value).map_err(|err| match err {
nom::Err::Incomplete(_) => CustomError::Text(err.to_string()),
nom::Err::Incomplete(_) => panic!("This parser does not support streaming."),
nom::Err::Error(e) => e,
nom::Err::Failure(e) => e,
})?;
@@ -157,7 +157,7 @@ pub(crate) fn apply_in_buffer_settings<'g, 's, 'sf>(
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
pub(crate) fn apply_post_parse_in_buffer_settings<'g, 's, 'sf>(document: &mut Document<'s>) {
document.category = Into::<AstNode>::into(&*document)
.into_iter()
.iter_all_ast_nodes()
.filter_map(|ast_node| {
if let AstNode::Keyword(ast_node) = ast_node {
if ast_node.key.eq_ignore_ascii_case("category") {

View File

@@ -38,7 +38,7 @@ pub(crate) fn inline_babel_call<'b, 'g, 'r, 's>(
let (remaining, arguments) = argument(context, remaining)?;
let (remaining, end_header) = opt(parser_with_context!(header)(context))(remaining)?;
let value = get_consumed(input, remaining);
let (remaining, _trailing_whitespace) =
let (remaining, post_blank) =
maybe_consume_object_trailing_whitespace_if_not_exiting(context, remaining)?;
let source = get_consumed(input, remaining);
Ok((
@@ -54,6 +54,7 @@ pub(crate) fn inline_babel_call<'b, 'g, 'r, 's>(
None
},
end_header: end_header.map(Into::<&str>::into),
post_blank: post_blank.map(Into::<&str>::into),
},
))
}

View File

@@ -38,7 +38,7 @@ pub(crate) fn inline_source_block<'b, 'g, 'r, 's>(
let (remaining, language) = lang(context, remaining)?;
let (remaining, parameters) = opt(parser_with_context!(header)(context))(remaining)?;
let (remaining, value) = body(context, remaining)?;
let (remaining, _trailing_whitespace) =
let (remaining, post_blank) =
maybe_consume_object_trailing_whitespace_if_not_exiting(context, remaining)?;
let source = get_consumed(input, remaining);
Ok((
@@ -48,6 +48,7 @@ pub(crate) fn inline_source_block<'b, 'g, 'r, 's>(
language: language.into(),
parameters: parameters.map(Into::<&str>::into),
value: value.into(),
post_blank: post_blank.map(Into::<&str>::into),
},
))
}

View File

@@ -58,6 +58,7 @@ fn _filtered_keyword<'s, F: Fn(OrgSource<'s>) -> Res<OrgSource<'s>, OrgSource<'s
affiliated_keywords: AffiliatedKeywords::default(), // To be populated by the caller if this keyword is in a context to support affiliated keywords.
key: parsed_key.into(),
value: "",
post_blank: None,
},
));
}
@@ -71,6 +72,7 @@ fn _filtered_keyword<'s, F: Fn(OrgSource<'s>) -> Res<OrgSource<'s>, OrgSource<'s
affiliated_keywords: AffiliatedKeywords::default(), // To be populated by the caller if this keyword is in a context to support affiliated keywords.
key: parsed_key.into(),
value: parsed_value.into(),
post_blank: None,
},
))
}
@@ -89,12 +91,13 @@ where
AK: IntoIterator<Item = Keyword<'s>>,
{
let (remaining, mut kw) = filtered_keyword(regular_keyword_key)(remaining)?;
let (remaining, _trailing_ws) =
let (remaining, post_blank) =
maybe_consume_trailing_whitespace_if_not_exiting(context, remaining)?;
let source = get_consumed(input, remaining);
kw.affiliated_keywords =
parse_affiliated_keywords(context.get_global_settings(), affiliated_keywords);
kw.source = Into::<&str>::into(source);
kw.post_blank = post_blank.map(Into::<&str>::into);
Ok((remaining, kw))
}
@@ -221,7 +224,6 @@ mod tests {
use test::Bencher;
use super::*;
use crate::parser::OrgSource;
#[bench]
fn bench_affiliated_keyword(b: &mut Bencher) {

View File

@@ -57,7 +57,7 @@ where
let (remaining, _end) = latex_environment_end_specialized(&parser_context, remaining)?;
let value_end = remaining;
let (remaining, _trailing_ws) =
let (remaining, post_blank) =
maybe_consume_trailing_whitespace_if_not_exiting(context, remaining)?;
let source = get_consumed(input, remaining);
let value = get_consumed(value_start, value_end);
@@ -70,6 +70,7 @@ where
affiliated_keywords,
),
value: value.into(),
post_blank: post_blank.map(Into::<&str>::into),
},
))
}

View File

@@ -39,7 +39,7 @@ pub(crate) fn latex_fragment<'b, 'g, 'r, 's>(
parser_with_context!(bordered_dollar_fragment)(context),
))(input)?;
let value = get_consumed(input, remaining);
let (remaining, _trailing_whitespace) =
let (remaining, post_blank) =
maybe_consume_object_trailing_whitespace_if_not_exiting(context, remaining)?;
let source = get_consumed(input, remaining);
Ok((
@@ -47,6 +47,7 @@ pub(crate) fn latex_fragment<'b, 'g, 'r, 's>(
LatexFragment {
source: source.into(),
value: value.into(),
post_blank: post_blank.map(Into::<&str>::into),
},
))
}

View File

@@ -80,22 +80,28 @@ where
let object_matcher = parser_with_context!(standard_set_object)(&parser_context);
let exit_matcher = parser_with_context!(exit_matcher_parser)(&parser_context);
// Check for a completely empty block
let (remaining, children) = match consumed(many_till(blank_line, exit_matcher))(remaining) {
let (remaining, contents, children) =
match consumed(many_till(blank_line, exit_matcher))(remaining) {
Ok((remaining, (whitespace, (_children, _exit_contents)))) => (
remaining,
whitespace,
if whitespace.len() > 0 {
vec![Object::PlainText(PlainText {
source: whitespace.into(),
})],
})]
} else {
Vec::new()
},
),
Err(_) => {
let (remaining, (children, _exit_contents)) =
many_till(object_matcher, exit_matcher)(remaining)?;
(remaining, children)
let (remaining, (contents, (children, _exit_contents))) =
consumed(many_till(object_matcher, exit_matcher))(remaining)?;
(remaining, contents, children)
}
};
let (remaining, _end) = lesser_block_end_specialized(&parser_context, remaining)?;
let (remaining, _trailing_ws) =
let (remaining, post_blank) =
maybe_consume_trailing_whitespace_if_not_exiting(context, remaining)?;
let source = get_consumed(input, remaining);
Ok((
@@ -108,6 +114,8 @@ where
),
data: parameters.map(Into::<&str>::into),
children,
contents: Into::<&str>::into(contents),
post_blank: post_blank.map(Into::<&str>::into),
},
))
}
@@ -144,7 +152,7 @@ where
let (remaining, contents) = parser_with_context!(text_until_exit)(&parser_context)(remaining)?;
let (remaining, _end) = lesser_block_end_specialized(&parser_context, remaining)?;
let (remaining, _trailing_ws) =
let (remaining, post_blank) =
maybe_consume_trailing_whitespace_if_not_exiting(context, remaining)?;
let source = get_consumed(input, remaining);
Ok((
@@ -156,6 +164,7 @@ where
affiliated_keywords,
),
contents: contents.into(),
post_blank: post_blank.map(Into::<&str>::into),
},
))
}
@@ -202,10 +211,10 @@ where
let parser_context = parser_context.with_additional_node(&contexts[1]);
let parser_context = parser_context.with_additional_node(&contexts[2]);
let (remaining, contents) = content(&parser_context, remaining)?;
let (remaining, contents) = text_until_exit(&parser_context, remaining)?;
let (remaining, _end) = lesser_block_end_specialized(&parser_context, remaining)?;
let (remaining, _trailing_ws) =
let (remaining, post_blank) =
maybe_consume_trailing_whitespace_if_not_exiting(context, remaining)?;
let source = get_consumed(input, remaining);
let (switches, number_lines, preserve_indent, retain_labels, use_labels, label_format) = {
@@ -236,7 +245,8 @@ where
retain_labels,
use_labels,
label_format,
contents,
value: Into::<&str>::into(contents),
post_blank: post_blank.map(Into::<&str>::into),
},
))
}
@@ -276,10 +286,10 @@ where
let parser_context = parser_context.with_additional_node(&contexts[1]);
let parser_context = parser_context.with_additional_node(&contexts[2]);
let (remaining, contents) = content(&parser_context, remaining)?;
let (remaining, contents) = text_until_exit(&parser_context, remaining)?;
let (remaining, _end) = lesser_block_end_specialized(&parser_context, remaining)?;
let (remaining, _trailing_ws) =
let (remaining, post_blank) =
maybe_consume_trailing_whitespace_if_not_exiting(context, remaining)?;
let source = get_consumed(input, remaining);
Ok((
@@ -292,7 +302,8 @@ where
),
export_type: export_type.map(Into::<&str>::into),
data: parameters.map(Into::<&str>::into),
contents,
value: Into::<&str>::into(contents),
post_blank: post_blank.map(Into::<&str>::into),
},
))
}
@@ -331,10 +342,10 @@ where
let parser_context = context.with_additional_node(&contexts[0]);
let parser_context = parser_context.with_additional_node(&contexts[1]);
let parser_context = parser_context.with_additional_node(&contexts[2]);
let (remaining, contents) = content(&parser_context, remaining)?;
let (remaining, contents) = text_until_exit(&parser_context, remaining)?;
let (remaining, _end) = lesser_block_end_specialized(&parser_context, remaining)?;
let (remaining, _trailing_ws) =
let (remaining, post_blank) =
maybe_consume_trailing_whitespace_if_not_exiting(context, remaining)?;
let source = get_consumed(input, remaining);
let (switches, number_lines, preserve_indent, retain_labels, use_labels, label_format) = {
@@ -371,7 +382,8 @@ where
retain_labels,
use_labels,
label_format,
contents,
value: Into::<&str>::into(contents),
post_blank: post_blank.map(Into::<&str>::into),
},
))
}
@@ -650,51 +662,3 @@ fn switch_word<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, OrgSource<'s>> {
is_not(" \t\r\n"),
))(input)
}
#[cfg_attr(
feature = "tracing",
tracing::instrument(ret, level = "debug", skip(context))
)]
pub(crate) fn content<'b, 'g, 'r, 's>(
context: RefContext<'b, 'g, 'r, 's>,
input: OrgSource<'s>,
) -> Res<OrgSource<'s>, String> {
let mut ret = String::new();
let mut remaining = input;
let exit_matcher_parser = parser_with_context!(exit_matcher_parser)(context);
loop {
if exit_matcher_parser(remaining).is_ok() {
break;
}
let (remain, (pre_escape_whitespace, line)) = content_line(remaining)?;
if let Some(val) = pre_escape_whitespace {
ret.push_str(Into::<&str>::into(val));
}
ret.push_str(line.into());
remaining = remain;
}
Ok((remaining, ret))
}
#[cfg_attr(feature = "tracing", tracing::instrument(ret, level = "debug"))]
fn content_line<'s>(
input: OrgSource<'s>,
) -> Res<OrgSource<'s>, (Option<OrgSource<'s>>, OrgSource<'s>)> {
let (remaining, pre_escape_whitespace) = opt(map(
tuple((
recognize(tuple((
space0,
many_till(
tag(","),
peek(tuple((tag(","), alt((tag("#+"), tag("*")))))),
),
))),
tag(","),
)),
|(pre_comma, _)| pre_comma,
))(input)?;
let (remaining, line_post_escape) = recognize(many_till(anychar, line_ending))(remaining)?;
Ok((remaining, (pre_escape_whitespace, line_post_escape)))
}

View File

@@ -32,7 +32,7 @@ pub(crate) fn org_macro<'b, 'g, 'r, 's>(
let (remaining, macro_args) = opt(parser_with_context!(org_macro_args)(context))(remaining)?;
let (remaining, _) = tag("}}}")(remaining)?;
let macro_value = get_consumed(input, remaining);
let (remaining, _trailing_whitespace) =
let (remaining, post_blank) =
maybe_consume_object_trailing_whitespace_if_not_exiting(context, remaining)?;
let source = get_consumed(input, remaining);
@@ -47,6 +47,7 @@ pub(crate) fn org_macro<'b, 'g, 'r, 's>(
.map(|arg| arg.into())
.collect(),
value: Into::<&str>::into(macro_value),
post_blank: post_blank.map(Into::<&str>::into),
},
))
}

View File

@@ -12,7 +12,7 @@ use nom::Slice;
pub(crate) type BracketDepth = i16;
#[derive(Copy, Clone, PartialEq)]
#[derive(Copy, Clone)]
pub(crate) struct OrgSource<'s> {
full_source: &'s str,
start: usize,

View File

@@ -1,5 +1,8 @@
use nom::branch::alt;
use nom::character::complete::space1;
use nom::combinator::consumed;
use nom::combinator::eof;
use nom::combinator::opt;
use nom::combinator::recognize;
use nom::combinator::verify;
use nom::multi::many1;
@@ -12,6 +15,7 @@ use super::org_source::OrgSource;
use super::util::blank_line;
use super::util::get_consumed;
use super::util::maybe_consume_trailing_whitespace_if_not_exiting;
use super::util::org_line_ending;
use crate::context::parser_with_context;
use crate::context::ContextElement;
use crate::context::ExitClass;
@@ -45,14 +49,14 @@ where
let standard_set_object_matcher = parser_with_context!(standard_set_object)(&parser_context);
let exit_matcher = parser_with_context!(exit_matcher_parser)(&parser_context);
let (remaining, (children, _exit_contents)) = verify(
let (remaining, (contents, (children, _exit_contents))) = consumed(verify(
many_till(standard_set_object_matcher, exit_matcher),
|(children, _exit_contents)| !children.is_empty(),
)(remaining)?;
))(remaining)?;
// Not checking parent exit matcher because if there are any children matched then we have a valid paragraph.
let (remaining, _trailing_ws) =
let (remaining, post_blank) =
maybe_consume_trailing_whitespace_if_not_exiting(context, remaining)?;
let source = get_consumed(input, remaining);
@@ -60,6 +64,8 @@ where
remaining,
Paragraph {
source: source.into(),
contents: Some(contents.into()),
post_blank: post_blank.map(Into::<&str>::into),
affiliated_keywords: parse_affiliated_keywords(
context.get_global_settings(),
affiliated_keywords,
@@ -69,6 +75,57 @@ where
))
}
#[cfg_attr(
feature = "tracing",
tracing::instrument(ret, level = "debug", skip(context))
)]
pub(crate) fn empty_paragraph<'b, 'g, 'r, 's>(
context: RefContext<'b, 'g, 'r, 's>,
input: OrgSource<'s>,
) -> Res<OrgSource<'s>, Paragraph<'s>> {
// If it is just a single newline then source, contents, and post-blank are "\n".
// If it has multiple newlines then contents is the first "\n" and post-blank is all the new lines.
// If there are any spaces on the first line then post-blank excludes the first line.
let exit_matcher = parser_with_context!(exit_matcher_parser)(context);
let (remaining, first_line_with_spaces) =
opt(recognize(tuple((space1, org_line_ending))))(input)?;
let post_blank_begin = remaining;
if let Some(first_line_with_spaces) = first_line_with_spaces {
let (remaining, _additional_lines) =
recognize(many_till(blank_line, exit_matcher))(remaining)?;
let post_blank = get_consumed(post_blank_begin, remaining);
let source = get_consumed(input, remaining);
Ok((
remaining,
Paragraph::of_text(
Into::<&str>::into(source),
Into::<&str>::into(first_line_with_spaces),
Some(Into::<&str>::into(first_line_with_spaces)),
Some(Into::<&str>::into(post_blank)),
),
))
} else {
let (remaining, first_line) = blank_line(remaining)?;
let (remaining, _additional_lines) =
recognize(many_till(blank_line, exit_matcher))(remaining)?;
let post_blank = get_consumed(post_blank_begin, remaining);
let source = get_consumed(input, remaining);
Ok((
remaining,
Paragraph::of_text(
Into::<&str>::into(source),
Into::<&str>::into(first_line),
Some(Into::<&str>::into(first_line)),
Some(Into::<&str>::into(post_blank)),
),
))
}
}
#[cfg_attr(
feature = "tracing",
tracing::instrument(ret, level = "debug", skip(context))
@@ -96,7 +153,8 @@ mod tests {
use crate::context::List;
use crate::parser::element_parser::element;
use crate::parser::org_source::OrgSource;
use crate::types::GetStandardProperties;
use crate::parser::paragraph::empty_paragraph;
use crate::types::StandardProperties;
#[test]
fn two_paragraphs() {
@@ -109,13 +167,20 @@ mod tests {
let (remaining, second_paragraph) =
paragraph_matcher(remaining).expect("Parse second paragraph.");
assert_eq!(Into::<&str>::into(remaining), "");
assert_eq!(
first_paragraph.get_standard_properties().get_source(),
"foo bar baz\n\n"
);
assert_eq!(
second_paragraph.get_standard_properties().get_source(),
"lorem ipsum"
);
assert_eq!(first_paragraph.get_source(), "foo bar baz\n\n");
assert_eq!(second_paragraph.get_source(), "lorem ipsum");
}
#[test]
fn paragraph_whitespace() {
let input = OrgSource::new("\n");
let global_settings = GlobalSettings::default();
let initial_context = ContextElement::document_context();
let initial_context = Context::new(&global_settings, List::new(&initial_context));
let paragraph_matcher = bind_context!(empty_paragraph, &initial_context);
let (remaining, paragraph) = paragraph_matcher(input).expect("Parse paragraph");
assert_eq!(Into::<&str>::into(remaining), "");
assert_eq!(paragraph.get_source(), "\n");
assert_eq!(paragraph.get_contents(), Some("\n"));
}
}

View File

@@ -54,7 +54,7 @@ pub(crate) fn plain_link<'b, 'g, 'r, 's>(
let (remaining, _) = pre(context, input)?;
let (remaining, path_plain) = parse_path_plain(context, remaining)?;
peek(parser_with_context!(post)(context))(remaining)?;
let (remaining, _trailing_whitespace) =
let (remaining, post_blank) =
maybe_consume_object_trailing_whitespace_if_not_exiting(context, remaining)?;
let source = get_consumed(input, remaining);
Ok((
@@ -66,6 +66,7 @@ pub(crate) fn plain_link<'b, 'g, 'r, 's>(
raw_link: path_plain.raw_link,
search_option: path_plain.search_option,
application: path_plain.application,
post_blank: post_blank.map(Into::<&str>::into),
},
))
}

View File

@@ -7,6 +7,7 @@ use nom::character::complete::multispace1;
use nom::character::complete::one_of;
use nom::character::complete::space0;
use nom::character::complete::space1;
use nom::combinator::consumed;
use nom::combinator::eof;
use nom::combinator::map;
use nom::combinator::not;
@@ -152,6 +153,7 @@ where
let mut children = Vec::new();
let mut first_item_indentation: Option<IndentationLevel> = None;
let mut first_item_list_type: Option<PlainListType> = None;
let contents_begin = remaining;
let mut remaining = remaining;
// The final list item does not consume trailing blank lines (which instead get consumed by the list). We have three options here:
@@ -195,7 +197,8 @@ where
)));
}
let (remaining, _trailing_ws) =
let contents = get_consumed(contents_begin, remaining);
let (remaining, post_blank) =
maybe_consume_trailing_whitespace_if_not_exiting(context, remaining)?;
let source = get_consumed(input, remaining);
Ok((
@@ -208,6 +211,8 @@ where
),
list_type: first_item_list_type.expect("Plain lists require at least one element."),
children: children.into_iter().map(|(_start, item)| item).collect(),
contents: Some(Into::<&str>::into(contents)),
post_blank: post_blank.map(Into::<&str>::into),
},
))
}
@@ -265,7 +270,7 @@ fn plain_list_item<'b, 'g, 'r, 's>(
let maybe_contentless_item: Res<OrgSource<'_>, ()> =
detect_contentless_item_contents(&parser_context, remaining);
if let Ok((_rem, _ws)) = maybe_contentless_item {
let (remaining, _trailing_ws) = if tuple((
let (remaining, post_blank) = if tuple((
blank_line,
bind_context!(final_item_whitespace_cutoff, context),
))(remaining)
@@ -291,6 +296,12 @@ fn plain_list_item<'b, 'g, 'r, 's>(
.unwrap_or(Vec::new()),
pre_blank: 0,
children: Vec::new(),
contents: None,
post_blank: if post_blank.len() > 0 {
Some(Into::<&str>::into(post_blank))
} else {
None
},
},
),
));
@@ -301,13 +312,13 @@ fn plain_list_item<'b, 'g, 'r, 's>(
.filter(|b| *b == b'\n')
.count();
let (remaining, (children, _exit_contents)) = many_till(
let (remaining, (contents, (children, _exit_contents))) = consumed(many_till(
include_input(bind_context!(element(true), &parser_context)),
bind_context!(exit_matcher_parser, &parser_context),
)(remaining)?;
))(remaining)?;
// We have to use the parser_context here to include the whitespace cut-off
let (remaining, _trailing_ws) =
let (remaining, post_blank) =
maybe_consume_trailing_whitespace_if_not_exiting(&final_whitespace_context, remaining)?;
let source = get_consumed(input, remaining);
@@ -329,6 +340,12 @@ fn plain_list_item<'b, 'g, 'r, 's>(
pre_blank: PlainListItemPreBlank::try_from(pre_blank)
.expect("pre-blank cannot be larger than 2."),
children: children.into_iter().map(|(_start, item)| item).collect(),
contents: if contents.len() > 0 {
Some(contents.into())
} else {
None
},
post_blank: post_blank.map(Into::<&str>::into),
},
),
));
@@ -629,7 +646,7 @@ mod tests {
use crate::context::Context;
use crate::context::GlobalSettings;
use crate::context::List;
use crate::types::GetStandardProperties;
use crate::types::StandardProperties;
#[test]
fn plain_list_item_empty() {
@@ -640,7 +657,7 @@ mod tests {
let plain_list_item_matcher = bind_context!(plain_list_item, &initial_context);
let (remaining, (_, result)) = plain_list_item_matcher(input).unwrap();
assert_eq!(Into::<&str>::into(remaining), "");
assert_eq!(result.get_standard_properties().get_source(), "1.");
assert_eq!(result.get_source(), "1.");
}
#[test]
@@ -652,7 +669,7 @@ mod tests {
let plain_list_item_matcher = bind_context!(plain_list_item, &initial_context);
let (remaining, (_, result)) = plain_list_item_matcher(input).unwrap();
assert_eq!(Into::<&str>::into(remaining), "");
assert_eq!(result.get_standard_properties().get_source(), "1. foo");
assert_eq!(result.get_source(), "1. foo");
}
#[test]
@@ -664,7 +681,7 @@ mod tests {
let (remaining, result) =
plain_list(std::iter::empty(), input, &initial_context, input).unwrap();
assert_eq!(Into::<&str>::into(remaining), "");
assert_eq!(result.get_standard_properties().get_source(), "1.");
assert_eq!(result.get_source(), "1.");
}
#[test]
@@ -676,7 +693,7 @@ mod tests {
let (remaining, result) =
plain_list(std::iter::empty(), input, &initial_context, input).unwrap();
assert_eq!(Into::<&str>::into(remaining), "");
assert_eq!(result.get_standard_properties().get_source(), "1. foo");
assert_eq!(result.get_source(), "1. foo");
}
#[test]
@@ -721,7 +738,7 @@ mod tests {
plain_list_matcher(input).expect("Should parse the plain list successfully.");
assert_eq!(Into::<&str>::into(remaining), " ipsum\n");
assert_eq!(
result.get_standard_properties().get_source(),
result.get_source(),
r#"1. foo
2. bar
baz
@@ -749,7 +766,7 @@ baz"#,
plain_list_matcher(input).expect("Should parse the plain list successfully.");
assert_eq!(Into::<&str>::into(remaining), "baz");
assert_eq!(
result.get_standard_properties().get_source(),
result.get_source(),
r#"1. foo
1. bar
@@ -782,7 +799,7 @@ dolar"#,
plain_list_matcher(input).expect("Should parse the plain list successfully.");
assert_eq!(Into::<&str>::into(remaining), "dolar");
assert_eq!(
result.get_standard_properties().get_source(),
result.get_source(),
r#"1. foo
bar

View File

@@ -143,7 +143,7 @@ mod tests {
use crate::context::GlobalSettings;
use crate::context::List;
use crate::parser::object_parser::detect_standard_set_object_sans_plain_text;
use crate::types::GetStandardProperties;
use crate::types::StandardProperties;
#[test]
fn plain_text_simple() {
@@ -160,9 +160,6 @@ mod tests {
)(input)
.unwrap();
assert_eq!(Into::<&str>::into(remaining), "");
assert_eq!(
result.get_standard_properties().get_source(),
Into::<&str>::into(input)
);
assert_eq!(result.get_source(), Into::<&str>::into(input));
}
}

View File

@@ -33,7 +33,7 @@ pub(crate) fn planning<'b, 'g, 'r, 's>(
many1(parser_with_context!(planning_parameter)(context))(remaining)?;
let (remaining, _trailing_ws) = tuple((space0, org_line_ending))(remaining)?;
let (remaining, _trailing_ws) =
let (remaining, post_blank) =
maybe_consume_trailing_whitespace_if_not_exiting(context, remaining)?;
let source = get_consumed(input, remaining);
@@ -62,6 +62,7 @@ pub(crate) fn planning<'b, 'g, 'r, 's>(
scheduled,
deadline,
closed,
post_blank: post_blank.map(Into::<&str>::into),
},
))
}

View File

@@ -6,6 +6,7 @@ use nom::character::complete::anychar;
use nom::character::complete::line_ending;
use nom::character::complete::space0;
use nom::character::complete::space1;
use nom::combinator::consumed;
use nom::combinator::eof;
use nom::combinator::opt;
use nom::combinator::recognize;
@@ -64,14 +65,11 @@ pub(crate) fn property_drawer<'b, 'g, 'r, 's>(
let parser_context = context.with_additional_node(&contexts[0]);
let parser_context = parser_context.with_additional_node(&contexts[1]);
let parser_context = parser_context.with_additional_node(&contexts[2]);
let node_property_matcher = parser_with_context!(node_property)(&parser_context);
let exit_matcher = parser_with_context!(exit_matcher_parser)(&parser_context);
let (remaining, (children, _exit_contents)) =
many_till(node_property_matcher, exit_matcher)(remaining)?;
let (remaining, (contents, children)) =
consumed(parser_with_context!(children)(&parser_context))(remaining)?;
let (remaining, _end) = property_drawer_end(&parser_context, remaining)?;
let (remaining, _trailing_ws) =
let (remaining, post_blank) =
maybe_consume_trailing_whitespace_if_not_exiting(context, remaining)?;
let source = get_consumed(input, remaining);
@@ -80,10 +78,31 @@ pub(crate) fn property_drawer<'b, 'g, 'r, 's>(
PropertyDrawer {
source: source.into(),
children,
contents: if contents.len() > 0 {
Some(contents.into())
} else {
None
},
post_blank: post_blank.map(Into::<&str>::into),
},
))
}
#[cfg_attr(
feature = "tracing",
tracing::instrument(ret, level = "debug", skip(context))
)]
fn children<'b, 'g, 'r, 's>(
context: RefContext<'b, 'g, 'r, 's>,
input: OrgSource<'s>,
) -> Res<OrgSource<'s>, Vec<NodeProperty<'s>>> {
let node_property_matcher = parser_with_context!(node_property)(context);
let exit_matcher = parser_with_context!(exit_matcher_parser)(context);
let (remaining, (children, _exit_contents)) =
many_till(node_property_matcher, exit_matcher)(input)?;
Ok((remaining, children))
}
#[cfg_attr(
feature = "tracing",
tracing::instrument(ret, level = "debug", skip(_context))

View File

@@ -39,7 +39,7 @@ pub(crate) fn radio_link<'b, 'g, 'r, 's>(
let rematched_target = rematch_target(context, radio_target, input);
if let Ok((remaining, rematched_target)) = rematched_target {
let path = get_consumed(input, remaining);
let (remaining, _) = space0(remaining)?;
let (remaining, post_blank) = space0(remaining)?;
let source = get_consumed(input, remaining);
return Ok((
remaining,
@@ -47,6 +47,11 @@ pub(crate) fn radio_link<'b, 'g, 'r, 's>(
source: source.into(),
children: rematched_target,
path: path.into(),
post_blank: if post_blank.len() > 0 {
Some(Into::<&str>::into(post_blank))
} else {
None
},
},
));
}
@@ -134,7 +139,7 @@ pub(crate) fn radio_target<'b, 'g, 'r, 's>(
))(remaining)?;
let (remaining, _closing) = tag(">>>")(remaining)?;
let (remaining, _trailing_whitespace) =
let (remaining, post_blank) =
maybe_consume_object_trailing_whitespace_if_not_exiting(context, remaining)?;
let source = get_consumed(input, remaining);
Ok((
@@ -142,6 +147,7 @@ pub(crate) fn radio_target<'b, 'g, 'r, 's>(
RadioTarget {
source: source.into(),
value: raw_value.into(),
post_blank: post_blank.map(Into::<&str>::into),
children,
},
))
@@ -169,17 +175,15 @@ pub(crate) trait RematchObject<'x> {
#[cfg(test)]
mod tests {
use super::*;
use crate::context::Context;
use crate::context::GlobalSettings;
use crate::context::List;
use crate::parser::element_parser::element;
use crate::types::Bold;
use crate::types::Element;
use crate::types::GetStandardProperties;
use crate::types::PlainText;
use crate::types::StandardProperties;
#[test]
fn plain_text_radio_target() {
fn plain_text_radio_target() -> Result<(), Box<dyn std::error::Error>> {
let input = OrgSource::new("foo bar baz");
let radio_target_match = vec![Object::PlainText(PlainText { source: "bar" })];
let global_settings = GlobalSettings {
@@ -195,29 +199,38 @@ mod tests {
_ => panic!("Should be a paragraph!"),
};
assert_eq!(Into::<&str>::into(remaining), "");
assert_eq!(
first_paragraph.get_standard_properties().get_source(),
"foo bar baz"
);
assert_eq!(first_paragraph.get_source(), "foo bar baz");
assert_eq!(first_paragraph.children.len(), 3);
assert_eq!(
first_paragraph
match first_paragraph
.children
.get(1)
.expect("Len already asserted to be 3"),
&Object::RadioLink(RadioLink {
source: "bar ",
children: vec![Object::PlainText(PlainText { source: "bar" })],
path: "bar"
})
);
.expect("Len already asserted to be 3.")
{
Object::RadioLink(inner) => {
assert_eq!(inner.get_source(), "bar ");
assert_eq!(inner.path, "bar");
assert_eq!(inner.children.len(), 1);
let child = inner
.children
.first()
.expect("Length already asserted to be 1.");
assert!(matches!(child, Object::PlainText(_)));
assert_eq!(child.get_source(), "bar");
}
_ => {
return Err("Child should be a radio link.".into());
}
};
Ok(())
}
#[test]
fn bold_radio_target() {
fn bold_radio_target() -> Result<(), Box<dyn std::error::Error>> {
let input = OrgSource::new("foo *bar* baz");
let radio_target_match = vec![Object::Bold(Bold {
source: "*bar*",
contents: "bar",
post_blank: Some(" "),
children: vec![Object::PlainText(PlainText { source: "bar" })],
})];
let global_settings = GlobalSettings {
@@ -234,24 +247,28 @@ mod tests {
_ => panic!("Should be a paragraph!"),
};
assert_eq!(Into::<&str>::into(remaining), "");
assert_eq!(
first_paragraph.get_standard_properties().get_source(),
"foo *bar* baz"
);
assert_eq!(first_paragraph.get_source(), "foo *bar* baz");
assert_eq!(first_paragraph.children.len(), 3);
assert_eq!(
first_paragraph
match first_paragraph
.children
.get(1)
.expect("Len already asserted to be 3"),
&Object::RadioLink(RadioLink {
source: "*bar* ",
children: vec![Object::Bold(Bold {
source: "*bar* ",
children: vec![Object::PlainText(PlainText { source: "bar" })]
})],
path: "*bar* "
})
);
.expect("Len already asserted to be 3.")
{
Object::RadioLink(inner) => {
assert_eq!(inner.get_source(), "*bar* ");
assert_eq!(inner.path, "*bar* ");
assert_eq!(inner.children.len(), 1);
let child = inner
.children
.first()
.expect("Length already asserted to be 1.");
assert!(matches!(child, Object::Bold(_)));
assert_eq!(child.get_source(), "*bar* ");
}
_ => {
return Err("Child should be a radio link.".into());
}
};
Ok(())
}
}

View File

@@ -73,7 +73,7 @@ fn regular_link_without_description<'b, 'g, 'r, 's>(
let (remaining, _opening_bracket) = tag("[[")(input)?;
let (remaining, path) = pathreg(context, remaining)?;
let (remaining, _closing_bracket) = tag("]]")(remaining)?;
let (remaining, _trailing_whitespace) =
let (remaining, post_blank) =
maybe_consume_object_trailing_whitespace_if_not_exiting(context, remaining)?;
let source = get_consumed(input, remaining);
Ok((
@@ -84,6 +84,8 @@ fn regular_link_without_description<'b, 'g, 'r, 's>(
path: path.path,
raw_link: path.raw_link,
search_option: path.search_option,
contents: None,
post_blank: post_blank.map(Into::<&str>::into),
children: Vec::new(),
application: path.application,
},
@@ -101,9 +103,10 @@ fn regular_link_with_description<'b, 'g, 'r, 's>(
let (remaining, _opening_bracket) = tag("[[")(input)?;
let (remaining, path) = pathreg(context, remaining)?;
let (remaining, _closing_bracket) = tag("][")(remaining)?;
let (remaining, description) = description(context, remaining)?;
let (remaining, (contents, description)) =
consumed(parser_with_context!(description)(context))(remaining)?;
let (remaining, _closing_bracket) = tag("]]")(remaining)?;
let (remaining, _trailing_whitespace) =
let (remaining, post_blank) =
maybe_consume_object_trailing_whitespace_if_not_exiting(context, remaining)?;
let source = get_consumed(input, remaining);
Ok((
@@ -114,6 +117,8 @@ fn regular_link_with_description<'b, 'g, 'r, 's>(
path: path.path,
raw_link: path.raw_link,
search_option: path.search_option,
contents: Some(Into::<&str>::into(contents)),
post_blank: post_blank.map(Into::<&str>::into),
children: description,
application: path.application,
},

View File

@@ -72,7 +72,7 @@ pub(crate) fn zeroth_section<'b, 'g, 'r, 's>(
}
}
let (remaining, _trailing_ws) =
let (remaining, post_blank) =
maybe_consume_trailing_whitespace_if_not_exiting(context, remaining)?;
let source = get_consumed(input, remaining);
@@ -80,6 +80,7 @@ pub(crate) fn zeroth_section<'b, 'g, 'r, 's>(
remaining,
Section {
source: source.into(),
post_blank: post_blank.map(Into::<&str>::into),
children,
},
))
@@ -128,7 +129,7 @@ pub(crate) fn section<'b, 'g, 'r, 's>(
children.insert(0, ele)
}
let (remaining, _trailing_ws) =
let (remaining, post_blank) =
maybe_consume_trailing_whitespace_if_not_exiting(context, remaining)?;
let source = get_consumed(input, remaining);
@@ -136,6 +137,7 @@ pub(crate) fn section<'b, 'g, 'r, 's>(
remaining,
Section {
source: source.into(),
post_blank: post_blank.map(Into::<&str>::into),
children,
},
))

View File

@@ -40,7 +40,7 @@ fn percent_statistics_cookie<'b, 'g, 'r, 's>(
tag("%]"),
)))(input)?;
let value = get_consumed(input, remaining);
let (remaining, _trailing_whitespace) =
let (remaining, post_blank) =
maybe_consume_object_trailing_whitespace_if_not_exiting(context, remaining)?;
let source = get_consumed(input, remaining);
Ok((
@@ -48,6 +48,7 @@ fn percent_statistics_cookie<'b, 'g, 'r, 's>(
StatisticsCookie {
source: source.into(),
value: value.into(),
post_blank: post_blank.map(Into::<&str>::into),
},
))
}
@@ -68,7 +69,7 @@ fn fraction_statistics_cookie<'b, 'g, 'r, 's>(
tag("]"),
)))(input)?;
let value = get_consumed(input, remaining);
let (remaining, _trailing_whitespace) =
let (remaining, post_blank) =
maybe_consume_object_trailing_whitespace_if_not_exiting(context, remaining)?;
let source = get_consumed(input, remaining);
Ok((
@@ -76,6 +77,7 @@ fn fraction_statistics_cookie<'b, 'g, 'r, 's>(
StatisticsCookie {
source: source.into(),
value: value.into(),
post_blank: post_blank.map(Into::<&str>::into),
},
))
}

View File

@@ -3,6 +3,7 @@ use nom::bytes::complete::tag;
use nom::bytes::complete::take_while;
use nom::character::complete::anychar;
use nom::character::complete::one_of;
use nom::combinator::consumed;
use nom::combinator::map;
use nom::combinator::not;
use nom::combinator::opt;
@@ -54,17 +55,20 @@ pub(crate) fn subscript<'b, 'g, 'r, 's>(
context: RefContext<'b, 'g, 'r, 's>,
input: OrgSource<'s>,
) -> Res<OrgSource<'s>, Subscript<'s>> {
// We check for the underscore first before checking the pre-character as a minor optimization to avoid walking up the context tree to find the document root unnecessarily.
let (remaining, _) = tag("_")(input)?;
pre(input)?;
let (remaining, body) = script_body(context, remaining)?;
let (remaining, _trailing_whitespace) =
let (remaining, post_blank) =
maybe_consume_object_trailing_whitespace_if_not_exiting(context, remaining)?;
let source = get_consumed(input, remaining);
let (use_brackets, body) = match body {
ScriptBody::Braceless(text) => (false, vec![Object::PlainText(PlainText { source: text })]),
ScriptBody::WithBraces(body) => (true, body),
let (use_brackets, contents, body) = match body {
ScriptBody::Braceless(text) => (
false,
text,
vec![Object::PlainText(PlainText { source: text })],
),
ScriptBody::WithBraces(contents, body) => (true, contents, body),
};
Ok((
@@ -72,6 +76,8 @@ pub(crate) fn subscript<'b, 'g, 'r, 's>(
Subscript {
source: source.into(),
use_brackets,
contents,
post_blank: post_blank.map(Into::<&str>::into),
children: body,
},
))
@@ -89,13 +95,17 @@ pub(crate) fn superscript<'b, 'g, 'r, 's>(
let (remaining, _) = tag("^")(input)?;
pre(input)?;
let (remaining, body) = script_body(context, remaining)?;
let (remaining, _trailing_whitespace) =
let (remaining, post_blank) =
maybe_consume_object_trailing_whitespace_if_not_exiting(context, remaining)?;
let source = get_consumed(input, remaining);
let (use_brackets, body) = match body {
ScriptBody::Braceless(text) => (false, vec![Object::PlainText(PlainText { source: text })]),
ScriptBody::WithBraces(body) => (true, body),
let (use_brackets, contents, body) = match body {
ScriptBody::Braceless(text) => (
false,
text,
vec![Object::PlainText(PlainText { source: text })],
),
ScriptBody::WithBraces(contents, body) => (true, contents, body),
};
Ok((
@@ -103,6 +113,8 @@ pub(crate) fn superscript<'b, 'g, 'r, 's>(
Superscript {
source: source.into(),
use_brackets,
contents,
post_blank: post_blank.map(Into::<&str>::into),
children: body,
},
))
@@ -117,7 +129,7 @@ fn pre<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>, ()> {
#[derive(Debug)]
enum ScriptBody<'s> {
Braceless(&'s str),
WithBraces(Vec<Object<'s>>),
WithBraces(&'s str, Vec<Object<'s>>),
}
#[cfg_attr(
@@ -135,9 +147,10 @@ fn script_body<'b, 'g, 'r, 's>(
map(parser_with_context!(script_alphanum)(context), |body| {
ScriptBody::Braceless(body.into())
}),
map(parser_with_context!(script_with_braces)(context), |body| {
ScriptBody::WithBraces(body)
}),
map(
parser_with_context!(script_with_braces)(context),
|(contents, body)| ScriptBody::WithBraces(Into::<&str>::into(contents), body),
),
map(
parser_with_context!(script_with_parenthesis)(context),
|body| ScriptBody::Braceless(body.into()),
@@ -195,7 +208,7 @@ fn end_script_alphanum_character<'s>(input: OrgSource<'s>) -> Res<OrgSource<'s>,
fn script_with_braces<'b, 'g, 'r, 's>(
context: RefContext<'b, 'g, 'r, 's>,
input: OrgSource<'s>,
) -> Res<OrgSource<'s>, Vec<Object<'s>>> {
) -> Res<OrgSource<'s>, (OrgSource<'s>, Vec<Object<'s>>)> {
let (remaining, _) = tag("{")(input)?;
let exit_with_depth = script_with_braces_end(remaining.get_brace_depth());
let parser_context = ContextElement::ExitMatcherNode(ExitMatcherNode {
@@ -204,13 +217,13 @@ fn script_with_braces<'b, 'g, 'r, 's>(
});
let parser_context = context.with_additional_node(&parser_context);
let (remaining, (children, _exit_contents)) = many_till(
let (remaining, (contents, (children, _exit_contents))) = consumed(many_till(
parser_with_context!(standard_set_object)(&parser_context),
parser_with_context!(exit_matcher_parser)(&parser_context),
)(remaining)?;
))(remaining)?;
let (remaining, _) = tag("}")(remaining)?;
Ok((remaining, children))
Ok((remaining, (contents, children)))
}
fn script_with_braces_end(starting_brace_depth: BracketDepth) -> impl ContextMatcher {

View File

@@ -3,6 +3,7 @@ use nom::bytes::complete::is_not;
use nom::bytes::complete::tag;
use nom::character::complete::line_ending;
use nom::character::complete::space0;
use nom::combinator::consumed;
use nom::combinator::not;
use nom::combinator::opt;
use nom::combinator::peek;
@@ -67,13 +68,13 @@ where
let org_mode_table_row_matcher = parser_with_context!(org_mode_table_row)(&parser_context);
let exit_matcher = parser_with_context!(exit_matcher_parser)(&parser_context);
let (remaining, (children, _exit_contents)) =
many_till(org_mode_table_row_matcher, exit_matcher)(remaining)?;
let (remaining, (contents, (children, _exit_contents))) =
consumed(many_till(org_mode_table_row_matcher, exit_matcher))(remaining)?;
let (remaining, formulas) =
many0(parser_with_context!(table_formula_keyword)(context))(remaining)?;
let (remaining, _trailing_ws) =
let (remaining, post_blank) =
maybe_consume_trailing_whitespace_if_not_exiting(context, remaining)?;
let source = get_consumed(input, remaining);
@@ -87,6 +88,8 @@ where
),
formulas,
children,
contents: Into::<&str>::into(contents),
post_blank: post_blank.map(Into::<&str>::into),
},
))
}
@@ -150,6 +153,7 @@ fn org_mode_table_row_rule<'b, 'g, 'r, 's>(
TableRow {
source: source.into(),
children: Vec::new(),
contents: None,
},
))
}
@@ -164,8 +168,8 @@ fn org_mode_table_row_regular<'b, 'g, 'r, 's>(
) -> Res<OrgSource<'s>, TableRow<'s>> {
start_of_line(input)?;
let (remaining, _) = tuple((space0, tag("|")))(input)?;
let (remaining, children) =
many1(parser_with_context!(org_mode_table_cell)(context))(remaining)?;
let (remaining, (contents, children)) =
consumed(many1(parser_with_context!(org_mode_table_cell)(context)))(remaining)?;
let (remaining, _tail) = recognize(tuple((space0, org_line_ending)))(remaining)?;
let source = get_consumed(input, remaining);
Ok((
@@ -173,6 +177,11 @@ fn org_mode_table_row_regular<'b, 'g, 'r, 's>(
TableRow {
source: source.into(),
children,
contents: if contents.len() > 0 {
Some(Into::<&str>::into(contents))
} else {
None
},
},
))
}
@@ -194,12 +203,12 @@ fn org_mode_table_cell<'b, 'g, 'r, 's>(
parser_with_context!(table_cell_set_object)(&parser_context);
let exit_matcher = parser_with_context!(exit_matcher_parser)(&parser_context);
let (remaining, _) = space0(input)?;
let (remaining, (children, _exit_contents)) = verify(
let (remaining, (contents, (children, _exit_contents))) = consumed(verify(
many_till(table_cell_set_object_matcher, exit_matcher),
|(children, exit_contents)| {
!children.is_empty() || Into::<&str>::into(exit_contents).ends_with('|')
},
)(remaining)?;
))(remaining)?;
let (remaining, _tail) = org_mode_table_cell_end(&parser_context, remaining)?;
@@ -210,6 +219,7 @@ fn org_mode_table_cell<'b, 'g, 'r, 's>(
TableCell {
source: source.into(),
children,
contents: Into::<&str>::into(contents),
},
))
}

View File

@@ -46,7 +46,7 @@ pub(crate) fn target<'b, 'g, 'r, 's>(
)));
}
let (remaining, _) = tag(">>")(remaining)?;
let (remaining, _trailing_whitespace) =
let (remaining, post_blank) =
maybe_consume_object_trailing_whitespace_if_not_exiting(context, remaining)?;
let source = get_consumed(input, remaining);
@@ -55,6 +55,7 @@ pub(crate) fn target<'b, 'g, 'r, 's>(
Target {
source: source.into(),
value: body.into(),
post_blank: post_blank.map(Into::<&str>::into),
},
))
}

View File

@@ -3,11 +3,13 @@ use nom::bytes::complete::tag;
use nom::character::complete::anychar;
use nom::character::complete::multispace1;
use nom::character::complete::one_of;
use nom::character::complete::space0;
use nom::character::complete::space1;
use nom::combinator::all_consuming;
use nom::combinator::consumed;
use nom::combinator::map;
use nom::combinator::map_parser;
use nom::combinator::not;
use nom::combinator::opt;
use nom::combinator::peek;
use nom::combinator::recognize;
use nom::combinator::verify;
@@ -76,12 +78,14 @@ fn bold<'b, 'g, 'r, 's>(
context: RefContext<'b, 'g, 'r, 's>,
input: OrgSource<'s>,
) -> Res<OrgSource<'s>, Bold<'s>> {
let (remaining, children) = text_markup_object("*")(context, input)?;
let (remaining, (contents, children, post_blank)) = text_markup_object("*")(context, input)?;
let source = get_consumed(input, remaining);
Ok((
remaining,
Bold {
source: source.into(),
contents: contents.into(),
post_blank: post_blank.map(Into::<&str>::into),
children,
},
))
@@ -95,12 +99,14 @@ fn italic<'b, 'g, 'r, 's>(
context: RefContext<'b, 'g, 'r, 's>,
input: OrgSource<'s>,
) -> Res<OrgSource<'s>, Italic<'s>> {
let (remaining, children) = text_markup_object("/")(context, input)?;
let (remaining, (contents, children, post_blank)) = text_markup_object("/")(context, input)?;
let source = get_consumed(input, remaining);
Ok((
remaining,
Italic {
source: source.into(),
contents: contents.into(),
post_blank: post_blank.map(Into::<&str>::into),
children,
},
))
@@ -114,12 +120,14 @@ fn underline<'b, 'g, 'r, 's>(
context: RefContext<'b, 'g, 'r, 's>,
input: OrgSource<'s>,
) -> Res<OrgSource<'s>, Underline<'s>> {
let (remaining, children) = text_markup_object("_")(context, input)?;
let (remaining, (contents, children, post_blank)) = text_markup_object("_")(context, input)?;
let source = get_consumed(input, remaining);
Ok((
remaining,
Underline {
source: source.into(),
contents: contents.into(),
post_blank: post_blank.map(Into::<&str>::into),
children,
},
))
@@ -133,12 +141,14 @@ fn strike_through<'b, 'g, 'r, 's>(
context: RefContext<'b, 'g, 'r, 's>,
input: OrgSource<'s>,
) -> Res<OrgSource<'s>, StrikeThrough<'s>> {
let (remaining, children) = text_markup_object("+")(context, input)?;
let (remaining, (contents, children, post_blank)) = text_markup_object("+")(context, input)?;
let source = get_consumed(input, remaining);
Ok((
remaining,
StrikeThrough {
source: source.into(),
contents: contents.into(),
post_blank: post_blank.map(Into::<&str>::into),
children,
},
))
@@ -152,13 +162,14 @@ fn verbatim<'b, 'g, 'r, 's>(
context: RefContext<'b, 'g, 'r, 's>,
input: OrgSource<'s>,
) -> Res<OrgSource<'s>, Verbatim<'s>> {
let (remaining, contents) = text_markup_string("=")(context, input)?;
let (remaining, (contents, post_blank)) = text_markup_string("=")(context, input)?;
let source = get_consumed(input, remaining);
Ok((
remaining,
Verbatim {
source: source.into(),
contents: contents.into(),
post_blank: post_blank.map(Into::<&str>::into),
},
))
}
@@ -171,13 +182,14 @@ fn code<'b, 'g, 'r, 's>(
context: RefContext<'b, 'g, 'r, 's>,
input: OrgSource<'s>,
) -> Res<OrgSource<'s>, Code<'s>> {
let (remaining, contents) = text_markup_string("~")(context, input)?;
let (remaining, (contents, post_blank)) = text_markup_string("~")(context, input)?;
let source = get_consumed(input, remaining);
Ok((
remaining,
Code {
source: source.into(),
contents: contents.into(),
post_blank: post_blank.map(Into::<&str>::into),
},
))
}
@@ -187,8 +199,10 @@ fn text_markup_object(
) -> impl for<'b, 'g, 'r, 's> Fn(
RefContext<'b, 'g, 'r, 's>,
OrgSource<'s>,
) -> Res<OrgSource<'s>, Vec<Object<'s>>>
+ '_ {
) -> Res<
OrgSource<'s>,
(OrgSource<'s>, Vec<Object<'s>>, Option<OrgSource<'s>>),
> + '_ {
move |context, input: OrgSource<'_>| _text_markup_object(context, input, marker_symbol)
}
@@ -200,7 +214,7 @@ fn _text_markup_object<'b, 'g, 'r, 's, 'c>(
context: RefContext<'b, 'g, 'r, 's>,
input: OrgSource<'s>,
marker_symbol: &'c str,
) -> Res<OrgSource<'s>, Vec<Object<'s>>> {
) -> Res<OrgSource<'s>, (OrgSource<'s>, Vec<Object<'s>>, Option<OrgSource<'s>>)> {
let (remaining, _) = pre(context, input)?;
let (remaining, open) = tag(marker_symbol)(remaining)?;
let (remaining, _peek_not_whitespace) =
@@ -215,7 +229,7 @@ fn _text_markup_object<'b, 'g, 'r, 's, 'c>(
let initial_context = ContextElement::document_context();
let initial_context = Context::new(context.get_global_settings(), List::new(&initial_context));
let (remaining, children) = map_parser(
let (remaining, (contents, children)) = consumed(map_parser(
verify(
parser_with_context!(text_until_exit)(&parser_context),
|text| text.len() > 0,
@@ -225,7 +239,7 @@ fn _text_markup_object<'b, 'g, 'r, 's, 'c>(
&initial_context,
)))(i)
}),
)(remaining)?;
))(remaining)?;
{
#[cfg(feature = "tracing")]
@@ -240,9 +254,9 @@ fn _text_markup_object<'b, 'g, 'r, 's, 'c>(
}
let (remaining, _close) = text_markup_end_specialized(context, remaining)?;
let (remaining, _trailing_whitespace) =
let (remaining, post_blank) =
maybe_consume_object_trailing_whitespace_if_not_exiting(context, remaining)?;
Ok((remaining, children))
Ok((remaining, (contents, children, post_blank)))
}
fn text_markup_string(
@@ -250,7 +264,7 @@ fn text_markup_string(
) -> impl for<'b, 'g, 'r, 's> Fn(
RefContext<'b, 'g, 'r, 's>,
OrgSource<'s>,
) -> Res<OrgSource<'s>, OrgSource<'s>>
) -> Res<OrgSource<'s>, (OrgSource<'s>, Option<OrgSource<'s>>)>
+ '_ {
move |context, input: OrgSource<'_>| _text_markup_string(context, input, marker_symbol)
}
@@ -263,7 +277,7 @@ fn _text_markup_string<'b, 'g, 'r, 's, 'c>(
context: RefContext<'b, 'g, 'r, 's>,
input: OrgSource<'s>,
marker_symbol: &'c str,
) -> Res<OrgSource<'s>, OrgSource<'s>> {
) -> Res<OrgSource<'s>, (OrgSource<'s>, Option<OrgSource<'s>>)> {
let (remaining, _) = pre(context, input)?;
let (remaining, open) = tag(marker_symbol)(remaining)?;
let (remaining, _peek_not_whitespace) =
@@ -296,9 +310,9 @@ fn _text_markup_string<'b, 'g, 'r, 's, 'c>(
}
let (remaining, _close) = text_markup_end_specialized(context, remaining)?;
let (remaining, _trailing_whitespace) =
let (remaining, post_blank) =
maybe_consume_object_trailing_whitespace_if_not_exiting(context, remaining)?;
Ok((remaining, contents))
Ok((remaining, (contents, post_blank)))
}
#[cfg_attr(
@@ -382,13 +396,15 @@ impl<'x> RematchObject<'x> for Bold<'x> {
_context: RefContext<'b, 'g, 'r, 's>,
input: OrgSource<'s>,
) -> Res<OrgSource<'s>, Object<'s>> {
let (remaining, children) =
let (remaining, (contents, children, post_blank)) =
_rematch_text_markup_object(_context, input, "*", &self.children)?;
let source = get_consumed(input, remaining);
Ok((
remaining,
Object::Bold(Bold {
source: source.into(),
contents: contents.into(),
post_blank: post_blank.map(Into::<&str>::into),
children,
}),
))
@@ -405,13 +421,15 @@ impl<'x> RematchObject<'x> for Italic<'x> {
_context: RefContext<'b, 'g, 'r, 's>,
input: OrgSource<'s>,
) -> Res<OrgSource<'s>, Object<'s>> {
let (remaining, children) =
let (remaining, (contents, children, post_blank)) =
_rematch_text_markup_object(_context, input, "/", &self.children)?;
let source = get_consumed(input, remaining);
Ok((
remaining,
Object::Italic(Italic {
source: source.into(),
contents: contents.into(),
post_blank: post_blank.map(Into::<&str>::into),
children,
}),
))
@@ -428,13 +446,15 @@ impl<'x> RematchObject<'x> for Underline<'x> {
_context: RefContext<'b, 'g, 'r, 's>,
input: OrgSource<'s>,
) -> Res<OrgSource<'s>, Object<'s>> {
let (remaining, children) =
let (remaining, (contents, children, post_blank)) =
_rematch_text_markup_object(_context, input, "_", &self.children)?;
let source = get_consumed(input, remaining);
Ok((
remaining,
Object::Underline(Underline {
source: source.into(),
contents: contents.into(),
post_blank: post_blank.map(Into::<&str>::into),
children,
}),
))
@@ -451,13 +471,15 @@ impl<'x> RematchObject<'x> for StrikeThrough<'x> {
_context: RefContext<'b, 'g, 'r, 's>,
input: OrgSource<'s>,
) -> Res<OrgSource<'s>, Object<'s>> {
let (remaining, children) =
let (remaining, (contents, children, post_blank)) =
_rematch_text_markup_object(_context, input, "+", &self.children)?;
let source = get_consumed(input, remaining);
Ok((
remaining,
Object::StrikeThrough(StrikeThrough {
source: source.into(),
contents: contents.into(),
post_blank: post_blank.map(Into::<&str>::into),
children,
}),
))
@@ -473,7 +495,7 @@ fn _rematch_text_markup_object<'b, 'g, 'r, 's, 'x>(
input: OrgSource<'s>,
marker_symbol: &'static str,
original_match_children: &'x Vec<Object<'x>>,
) -> Res<OrgSource<'s>, Vec<Object<'s>>> {
) -> Res<OrgSource<'s>, (OrgSource<'s>, Vec<Object<'s>>, Option<OrgSource<'s>>)> {
let (remaining, _) = pre(context, input)?;
let (remaining, open) = tag(marker_symbol)(remaining)?;
let (remaining, _peek_not_whitespace) = peek(not(multispace1))(remaining)?;
@@ -484,6 +506,7 @@ fn _rematch_text_markup_object<'b, 'g, 'r, 's, 'x>(
});
let parser_context = context.with_additional_node(&parser_context);
let contents_begin = remaining;
let (remaining, children) =
// TODO: This doesn't really check the exit matcher between each object. I think it may be possible to construct an org document that parses incorrectly with the current code.
rematch_target(&parser_context, original_match_children, remaining)?;
@@ -499,8 +522,10 @@ fn _rematch_text_markup_object<'b, 'g, 'r, 's, 'x>(
)));
}
}
let contents_end = remaining;
let contents = contents_begin.get_until(contents_end);
let (remaining, _close) = text_markup_end_specialized(context, remaining)?;
let (remaining, _trailing_whitespace) = space0(remaining)?;
Ok((remaining, children))
let (remaining, post_blank) = opt(space1)(remaining)?;
Ok((remaining, (contents, children, post_blank)))
}

View File

@@ -53,8 +53,8 @@ pub(crate) fn timestamp<'b, 'g, 'r, 's>(
parser_with_context!(inactive_time_range_timestamp)(context),
parser_with_context!(active_date_range_timestamp)(context),
parser_with_context!(inactive_date_range_timestamp)(context),
parser_with_context!(active_timestamp)(context),
parser_with_context!(inactive_timestamp)(context),
parser_with_context!(active_timestamp(true))(context),
parser_with_context!(inactive_timestamp(true))(context),
))(input)
}
@@ -69,7 +69,7 @@ fn diary_timestamp<'b, 'g, 'r, 's>(
let (remaining, _) = tag("<%%(")(input)?;
let (remaining, _body) = sexp(context, remaining)?;
let (remaining, _) = tag(")>")(remaining)?;
let (remaining, _trailing_whitespace) =
let (remaining, post_blank) =
maybe_consume_object_trailing_whitespace_if_not_exiting(context, remaining)?;
let source = get_consumed(input, remaining);
@@ -85,6 +85,7 @@ fn diary_timestamp<'b, 'g, 'r, 's>(
end_time: None,
repeater: None,
warning_delay: None,
post_blank: post_blank.map(Into::<&str>::into),
},
))
}
@@ -125,13 +126,23 @@ fn sexp_end<'b, 'g, 'r, 's>(
alt((tag(")>"), recognize(one_of(">\n"))))(input)
}
const fn active_timestamp(
allow_post_blank: bool,
) -> impl for<'b, 'g, 'r, 's> Fn(
RefContext<'b, 'g, 'r, 's>,
OrgSource<'s>,
) -> Res<OrgSource<'s>, Timestamp<'s>> {
move |context, input| impl_active_timestamp(context, input, allow_post_blank)
}
#[cfg_attr(
feature = "tracing",
tracing::instrument(ret, level = "debug", skip(context))
)]
fn active_timestamp<'b, 'g, 'r, 's>(
fn impl_active_timestamp<'b, 'g, 'r, 's>(
context: RefContext<'b, 'g, 'r, 's>,
input: OrgSource<'s>,
allow_post_blank: bool,
) -> Res<OrgSource<'s>, Timestamp<'s>> {
let (remaining, _) = tag("<")(input)?;
let (remaining, start) = date(context, remaining)?;
@@ -159,8 +170,11 @@ fn active_timestamp<'b, 'g, 'r, 's>(
)))(remaining)?;
let (remaining, _) = tag(">")(remaining)?;
let (remaining, _trailing_whitespace) =
maybe_consume_object_trailing_whitespace_if_not_exiting(context, remaining)?;
let (remaining, post_blank) = if allow_post_blank {
maybe_consume_object_trailing_whitespace_if_not_exiting(context, remaining)?
} else {
(remaining, None)
};
let source = get_consumed(input, remaining);
Ok((
@@ -175,17 +189,28 @@ fn active_timestamp<'b, 'g, 'r, 's>(
end_time: time.map(|(_, time)| time),
repeater: repeater.map(|(_, repeater)| repeater),
warning_delay: warning_delay.map(|(_, warning_delay)| warning_delay),
post_blank: post_blank.map(Into::<&str>::into),
},
))
}
pub(crate) const fn inactive_timestamp(
allow_post_blank: bool,
) -> impl for<'b, 'g, 'r, 's> Fn(
RefContext<'b, 'g, 'r, 's>,
OrgSource<'s>,
) -> Res<OrgSource<'s>, Timestamp<'s>> {
move |context, input| impl_inactive_timestamp(context, input, allow_post_blank)
}
#[cfg_attr(
feature = "tracing",
tracing::instrument(ret, level = "debug", skip(context))
)]
pub(crate) fn inactive_timestamp<'b, 'g, 'r, 's>(
fn impl_inactive_timestamp<'b, 'g, 'r, 's>(
context: RefContext<'b, 'g, 'r, 's>,
input: OrgSource<'s>,
allow_post_blank: bool,
) -> Res<OrgSource<'s>, Timestamp<'s>> {
let (remaining, _) = tag("[")(input)?;
let (remaining, start) = date(context, remaining)?;
@@ -213,8 +238,11 @@ pub(crate) fn inactive_timestamp<'b, 'g, 'r, 's>(
)))(remaining)?;
let (remaining, _) = tag("]")(remaining)?;
let (remaining, _trailing_whitespace) =
maybe_consume_object_trailing_whitespace_if_not_exiting(context, remaining)?;
let (remaining, post_blank) = if allow_post_blank {
maybe_consume_object_trailing_whitespace_if_not_exiting(context, remaining)?
} else {
(remaining, None)
};
let source = get_consumed(input, remaining);
Ok((
@@ -229,6 +257,7 @@ pub(crate) fn inactive_timestamp<'b, 'g, 'r, 's>(
end_time: time.map(|(_, time)| time),
repeater: repeater.map(|(_, repeater)| repeater),
warning_delay: warning_delay.map(|(_, warning_delay)| warning_delay),
post_blank: post_blank.map(Into::<&str>::into),
},
))
}
@@ -241,12 +270,12 @@ fn active_date_range_timestamp<'b, 'g, 'r, 's>(
context: RefContext<'b, 'g, 'r, 's>,
input: OrgSource<'s>,
) -> Res<OrgSource<'s>, Timestamp<'s>> {
let (remaining, first_timestamp) = active_timestamp(context, input)?;
let (remaining, first_timestamp) = impl_active_timestamp(context, input, false)?;
// TODO: Does the space0 at the end of the active/inactive timestamp parsers cause this to be incorrect? I could use a look-behind to make sure the preceding character is not whitespace
let (remaining, _separator) = tag("--")(remaining)?;
let (remaining, second_timestamp) = active_timestamp(context, remaining)?;
let (remaining, second_timestamp) = impl_active_timestamp(context, remaining, false)?;
let (remaining, _trailing_whitespace) =
let (remaining, post_blank) =
maybe_consume_object_trailing_whitespace_if_not_exiting(context, remaining)?;
let source = get_consumed(input, remaining);
@@ -264,6 +293,7 @@ fn active_date_range_timestamp<'b, 'g, 'r, 's>(
warning_delay: first_timestamp
.warning_delay
.or(second_timestamp.warning_delay),
post_blank: post_blank.map(Into::<&str>::into),
},
))
}
@@ -302,7 +332,7 @@ fn active_time_range_timestamp<'b, 'g, 'r, 's>(
)))(remaining)?;
let (remaining, _) = tag(">")(remaining)?;
let (remaining, _trailing_whitespace) =
let (remaining, post_blank) =
maybe_consume_object_trailing_whitespace_if_not_exiting(context, remaining)?;
let source = get_consumed(input, remaining);
@@ -318,6 +348,7 @@ fn active_time_range_timestamp<'b, 'g, 'r, 's>(
end_time: Some(second_time),
repeater: repeater.map(|(_, repeater)| repeater),
warning_delay: warning_delay.map(|(_, warning_delay)| warning_delay),
post_blank: post_blank.map(Into::<&str>::into),
},
))
}
@@ -330,12 +361,12 @@ pub(crate) fn inactive_date_range_timestamp<'b, 'g, 'r, 's>(
context: RefContext<'b, 'g, 'r, 's>,
input: OrgSource<'s>,
) -> Res<OrgSource<'s>, Timestamp<'s>> {
let (remaining, first_timestamp) = inactive_timestamp(context, input)?;
let (remaining, first_timestamp) = impl_inactive_timestamp(context, input, false)?;
// TODO: Does the space0 at the end of the active/inactive timestamp parsers cause this to be incorrect? I could use a look-behind to make sure the preceding character is not whitespace
let (remaining, _separator) = tag("--")(remaining)?;
let (remaining, second_timestamp) = inactive_timestamp(context, remaining)?;
let (remaining, second_timestamp) = impl_inactive_timestamp(context, remaining, false)?;
let (remaining, _trailing_whitespace) =
let (remaining, post_blank) =
maybe_consume_object_trailing_whitespace_if_not_exiting(context, remaining)?;
let source = get_consumed(input, remaining);
@@ -354,6 +385,7 @@ pub(crate) fn inactive_date_range_timestamp<'b, 'g, 'r, 's>(
warning_delay: first_timestamp
.warning_delay
.or(second_timestamp.warning_delay),
post_blank: post_blank.map(Into::<&str>::into),
},
))
}
@@ -392,7 +424,7 @@ pub(crate) fn inactive_time_range_timestamp<'b, 'g, 'r, 's>(
)))(remaining)?;
let (remaining, _) = tag("]")(remaining)?;
let (remaining, _trailing_whitespace) =
let (remaining, post_blank) =
maybe_consume_object_trailing_whitespace_if_not_exiting(context, remaining)?;
let source = get_consumed(input, remaining);
@@ -408,6 +440,7 @@ pub(crate) fn inactive_time_range_timestamp<'b, 'g, 'r, 's>(
end_time: Some(second_time),
repeater: repeater.map(|(_, repeater)| repeater),
warning_delay: warning_delay.map(|(_, warning_delay)| warning_delay),
post_blank: post_blank.map(Into::<&str>::into),
},
))
}

Some files were not shown because too many files have changed in this diff Show More