Compare commits

..

No commits in common. "main" and "v0.0.1" have entirely different histories.
main ... v0.0.1

89 changed files with 678 additions and 5410 deletions

View File

@ -1,3 +0,0 @@
**/.git
target/
org_test_documents/

View File

@ -1,224 +0,0 @@
apiVersion: tekton.dev/v1
kind: PipelineRun
metadata:
name: build
spec:
timeouts:
pipeline: "2h0m0s"
tasks: "1h0m0s"
finally: "0h30m0s"
taskRunTemplate:
serviceAccountName: build-bot
pipelineSpec:
params:
- name: image-name
description: The name for the built image
type: string
- name: target-name
description: The dockerfile target to build
type: string
- name: path-to-image-context
description: The path to the build context
type: string
- name: path-to-dockerfile
description: The path to the Dockerfile
type: string
tasks:
- name: report-pending
taskRef:
resolver: git
params:
- name: url
value: https://code.fizz.buzz/mirror/catalog.git # mirror of https://github.com/tektoncd/catalog.git
- name: revision
value: df36b3853a5657fd883015cdbf07ad6466918acf
- name: pathInRepo
value: task/gitea-set-status/0.1/gitea-set-status.yaml
params:
- name: CONTEXT
value: "$(params.JOB_NAME)"
- name: REPO_FULL_NAME
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
- name: GITEA_HOST_URL
value: code.fizz.buzz
- name: SHA
value: "$(tasks.fetch-repository.results.commit)"
- name: DESCRIPTION
value: "Build $(params.JOB_NAME) has started"
- name: STATE
value: pending
- name: TARGET_URL
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
- name: fetch-repository
taskRef:
resolver: git
params:
- name: url
value: https://code.fizz.buzz/mirror/catalog.git # mirror of https://github.com/tektoncd/catalog.git
- name: revision
value: df36b3853a5657fd883015cdbf07ad6466918acf
- name: pathInRepo
value: task/git-clone/0.9/git-clone.yaml
workspaces:
- name: output
workspace: git-source
params:
- name: url
value: $(params.REPO_URL)
- name: revision
value: $(params.PULL_BASE_SHA)
- name: deleteExisting
value: "true"
- name: get-git-commit-time
taskSpec:
metadata: {}
stepTemplate:
image: alpine:3.20
computeResources:
requests:
cpu: 10m
memory: 600Mi
workingDir: "$(workspaces.repo.path)"
results:
- name: unix-time
description: The time of the git commit in unix timestamp format.
steps:
- image: alpine/git:v2.34.2
name: detect-tag-step
script: |
#!/usr/bin/env sh
set -euo pipefail
echo -n "$(git log -1 --pretty=%ct)" | tee $(results.unix-time.path)
workspaces:
- name: repo
workspace: git-source
runAfter:
- fetch-repository
- name: build-image
taskRef:
resolver: git
params:
- name: url
value: https://code.fizz.buzz/talexander/personal_tekton_catalog.git
- name: revision
value: 7ee31a185243ee6da13dcd26a592c585b64c80e5
- name: pathInRepo
value: task/buildkit-rootless-daemonless/0.1/buildkit-rootless-daemonless.yaml
params:
- name: OUTPUT
value: >-
type=image,"name=$(params.image-name):latest,$(params.image-name):$(tasks.fetch-repository.results.commit)",push=true,compression=zstd,compression-level=22,oci-mediatypes=true
- name: CONTEXT
value: $(params.path-to-image-context)
- name: DOCKERFILE
value: $(params.path-to-dockerfile)
- name: EXTRA_ARGS
value:
- "--opt"
- "target=$(params.target-name)"
- --import-cache
- "type=registry,ref=$(params.image-name):buildcache"
- --export-cache
- "type=registry,ref=$(params.image-name):buildcache,mode=max,compression=zstd,compression-level=22,rewrite-timestamp=true,image-manifest=true,oci-mediatypes=true"
- --opt
- build-arg:SOURCE_DATE_EPOCH=$(tasks.get-git-commit-time.results.unix-time)
- name: BUILDKITD_TOML
value: |
debug = true
[registry."docker.io"]
mirrors = ["dockerhub.dockerhub.svc.cluster.local"]
[registry."dockerhub.dockerhub.svc.cluster.local"]
http = true
insecure = true
workspaces:
- name: source
workspace: git-source
- name: dockerconfig
workspace: docker-credentials
runAfter:
- fetch-repository
finally:
- name: report-success
when:
- input: "$(tasks.status)"
operator: in
values: ["Succeeded", "Completed"]
taskRef:
resolver: git
params:
- name: url
value: https://code.fizz.buzz/mirror/catalog.git # mirror of https://github.com/tektoncd/catalog.git
- name: revision
value: df36b3853a5657fd883015cdbf07ad6466918acf
- name: pathInRepo
value: task/gitea-set-status/0.1/gitea-set-status.yaml
params:
- name: CONTEXT
value: "$(params.JOB_NAME)"
- name: REPO_FULL_NAME
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
- name: GITEA_HOST_URL
value: code.fizz.buzz
- name: SHA
value: "$(tasks.fetch-repository.results.commit)"
- name: DESCRIPTION
value: "Build $(params.JOB_NAME) has succeeded"
- name: STATE
value: success
- name: TARGET_URL
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
- name: report-failure
when:
- input: "$(tasks.status)"
operator: in
values: ["Failed"]
taskRef:
resolver: git
params:
- name: url
value: https://code.fizz.buzz/mirror/catalog.git # mirror of https://github.com/tektoncd/catalog.git
- name: revision
value: df36b3853a5657fd883015cdbf07ad6466918acf
- name: pathInRepo
value: task/gitea-set-status/0.1/gitea-set-status.yaml
params:
- name: CONTEXT
value: "$(params.JOB_NAME)"
- name: REPO_FULL_NAME
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
- name: GITEA_HOST_URL
value: code.fizz.buzz
- name: SHA
value: "$(tasks.fetch-repository.results.commit)"
- name: DESCRIPTION
value: "Build $(params.JOB_NAME) has failed"
- name: STATE
value: failure
- name: TARGET_URL
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
workspaces:
- name: git-source
- name: docker-credentials
workspaces:
- name: git-source
volumeClaimTemplate:
spec:
storageClassName: "nfs-client"
accessModes:
- ReadWriteOnce
resources:
requests:
storage: 10Gi
subPath: rust-source
- name: docker-credentials
secret:
secretName: harbor-plain
params:
- name: image-name
value: "harbor.fizz.buzz/private/natter"
- name: target-name
value: ""
- name: path-to-image-context
value: .
- name: path-to-dockerfile
value: docker/natter/

View File

@ -1,369 +0,0 @@
apiVersion: tekton.dev/v1
kind: PipelineRun
metadata:
name: rust-format
spec:
timeouts:
pipeline: "2h0m0s"
tasks: "1h0m0s"
finally: "0h30m0s"
taskRunTemplate:
serviceAccountName: build-bot
pipelineSpec:
params:
- name: image-name
description: The name for the built image
type: string
- name: target-name
description: The dockerfile target to build
type: string
- name: path-to-image-context
description: The path to the build context
type: string
- name: path-to-dockerfile
description: The path to the Dockerfile
type: string
tasks:
- name: report-pending
taskRef:
resolver: git
params:
- name: url
value: https://code.fizz.buzz/mirror/catalog.git # mirror of https://github.com/tektoncd/catalog.git
- name: revision
value: df36b3853a5657fd883015cdbf07ad6466918acf
- name: pathInRepo
value: task/gitea-set-status/0.1/gitea-set-status.yaml
params:
- name: CONTEXT
value: "$(params.JOB_NAME)"
- name: REPO_FULL_NAME
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
- name: GITEA_HOST_URL
value: code.fizz.buzz
- name: SHA
value: "$(tasks.fetch-repository.results.commit)"
- name: DESCRIPTION
value: "Build $(params.JOB_NAME) has started"
- name: STATE
value: pending
- name: TARGET_URL
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
- name: fetch-repository
taskRef:
resolver: git
params:
- name: url
value: https://code.fizz.buzz/mirror/catalog.git # mirror of https://github.com/tektoncd/catalog.git
- name: revision
value: df36b3853a5657fd883015cdbf07ad6466918acf
- name: pathInRepo
value: task/git-clone/0.9/git-clone.yaml
workspaces:
- name: output
workspace: git-source
params:
- name: url
value: $(params.REPO_URL)
- name: revision
value: $(params.PULL_BASE_SHA)
- name: deleteExisting
value: "true"
- name: get-git-commit-time
taskSpec:
metadata: {}
stepTemplate:
image: alpine:3.20
computeResources:
requests:
cpu: 10m
memory: 600Mi
workingDir: "$(workspaces.repo.path)"
results:
- name: unix-time
description: The time of the git commit in unix timestamp format.
steps:
- image: alpine/git:v2.34.2
name: detect-tag-step
script: |
#!/usr/bin/env sh
set -euo pipefail
echo -n "$(git log -1 --pretty=%ct)" | tee $(results.unix-time.path)
workspaces:
- name: repo
workspace: git-source
runAfter:
- fetch-repository
- name: build-image
taskRef:
resolver: git
params:
- name: url
value: https://code.fizz.buzz/talexander/personal_tekton_catalog.git
- name: revision
value: 7ee31a185243ee6da13dcd26a592c585b64c80e5
- name: pathInRepo
value: task/buildkit-rootless-daemonless/0.1/buildkit-rootless-daemonless.yaml
params:
- name: OUTPUT
value: >-
type=image,"name=$(params.image-name):latest,$(params.image-name):$(tasks.fetch-repository.results.commit)",push=true,compression=zstd,compression-level=22,oci-mediatypes=true
- name: CONTEXT
value: $(params.path-to-image-context)
- name: DOCKERFILE
value: $(params.path-to-dockerfile)
- name: EXTRA_ARGS
value:
- "--opt"
- "target=$(params.target-name)"
- --import-cache
- "type=registry,ref=$(params.image-name):buildcache"
- --export-cache
- "type=registry,ref=$(params.image-name):buildcache,mode=max,compression=zstd,compression-level=22,rewrite-timestamp=true,image-manifest=true,oci-mediatypes=true"
- --opt
- build-arg:SOURCE_DATE_EPOCH=$(tasks.get-git-commit-time.results.unix-time)
- name: BUILDKITD_TOML
value: |
debug = true
[registry."docker.io"]
mirrors = ["dockerhub.dockerhub.svc.cluster.local"]
[registry."dockerhub.dockerhub.svc.cluster.local"]
http = true
insecure = true
workspaces:
- name: source
workspace: git-source
- name: dockerconfig
workspace: docker-credentials
runAfter:
- fetch-repository
- name: run-cargo-fmt
taskSpec:
metadata: {}
params:
- name: docker-image
type: string
description: Docker image to run.
default: alpine:3.20
stepTemplate:
image: alpine:3.20
computeResources:
requests:
cpu: 10m
memory: 600Mi
workingDir: /workspace/source
workspaces:
- name: source
mountPath: /source
- name: cargo-cache
mountPath: /usr/local/cargo/registry
optional: true
steps:
- name: run
image: $(params.docker-image)
workingDir: "$(workspaces.source.path)"
command: ["cargo", "fmt"]
args: []
env:
- name: CARGO_TARGET_DIR
value: /target
workspaces:
- name: source
workspace: git-source
- name: cargo-cache
workspace: cargo-cache
runAfter:
- build-image
params:
- name: docker-image
value: "$(tasks.build-image.results.IMAGE_URL[1])"
- name: run-prettier
taskSpec:
metadata: {}
params:
- name: docker-image
type: string
description: Docker image to run.
default: alpine:3.20
stepTemplate:
image: alpine:3.20
computeResources:
requests:
cpu: 10m
memory: 600Mi
workingDir: /workspace/source
workspaces:
- name: source
mountPath: /source
steps:
- name: run
image: $(params.docker-image)
workingDir: "$(workspaces.source.path)"
command: ["sh", "-c"]
args:
- |
prettier --write --no-error-on-unmatched-pattern "default_environment/**/*.js" "default_environment/**/*.css"
workspaces:
- name: source
workspace: git-source
runAfter:
- build-image
params:
- name: docker-image
value: "$(tasks.build-image.results.IMAGE_URL[1])"
- name: commit-changes
taskRef:
resolver: git
params:
- name: url
value: https://code.fizz.buzz/mirror/catalog.git # mirror of https://github.com/tektoncd/catalog.git
- name: revision
value: df36b3853a5657fd883015cdbf07ad6466918acf
- name: pathInRepo
value: task/git-cli/0.4/git-cli.yaml
params:
- name: GIT_USER_NAME
value: fluxcdbot
- name: GIT_USER_EMAIL
value: "fluxcdbot@users.noreply.github.com"
- name: GIT_SCRIPT
value: |
pwd
git config --global --add safe.directory /workspace/source
git_status=$(git status --porcelain)
if [ -n "$git_status" ]; then
git commit -a -m "CI: autofix rust code."
git push origin HEAD:$(params.PULL_BASE_REF)
else
echo "No changes to commit."
fi
workspaces:
- name: source
workspace: git-source
runAfter:
- run-cargo-fmt
- run-prettier
finally:
- name: report-success
when:
- input: "$(tasks.status)"
operator: in
values: ["Succeeded", "Completed"]
taskRef:
resolver: git
params:
- name: url
value: https://code.fizz.buzz/mirror/catalog.git # mirror of https://github.com/tektoncd/catalog.git
- name: revision
value: df36b3853a5657fd883015cdbf07ad6466918acf
- name: pathInRepo
value: task/gitea-set-status/0.1/gitea-set-status.yaml
params:
- name: CONTEXT
value: "$(params.JOB_NAME)"
- name: REPO_FULL_NAME
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
- name: GITEA_HOST_URL
value: code.fizz.buzz
- name: SHA
value: "$(tasks.fetch-repository.results.commit)"
- name: DESCRIPTION
value: "Build $(params.JOB_NAME) has succeeded"
- name: STATE
value: success
- name: TARGET_URL
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
- name: report-failure
when:
- input: "$(tasks.status)"
operator: in
values: ["Failed"]
taskRef:
resolver: git
params:
- name: url
value: https://code.fizz.buzz/mirror/catalog.git # mirror of https://github.com/tektoncd/catalog.git
- name: revision
value: df36b3853a5657fd883015cdbf07ad6466918acf
- name: pathInRepo
value: task/gitea-set-status/0.1/gitea-set-status.yaml
params:
- name: CONTEXT
value: "$(params.JOB_NAME)"
- name: REPO_FULL_NAME
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
- name: GITEA_HOST_URL
value: code.fizz.buzz
- name: SHA
value: "$(tasks.fetch-repository.results.commit)"
- name: DESCRIPTION
value: "Build $(params.JOB_NAME) has failed"
- name: STATE
value: failure
- name: TARGET_URL
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
- name: cargo-cache-autoclean
taskSpec:
metadata: {}
params:
- name: docker-image
type: string
description: Docker image to run.
default: alpine:3.20
stepTemplate:
image: alpine:3.20
computeResources:
requests:
cpu: 10m
memory: 600Mi
workingDir: /workspace/source
workspaces:
- name: source
mountPath: /source
- name: cargo-cache
mountPath: /usr/local/cargo/registry
optional: true
steps:
- name: run
image: $(params.docker-image)
workingDir: "$(workspaces.source.path)"
command: [cargo, cache, --autoclean]
args: []
workspaces:
- name: source
workspace: git-source
- name: cargo-cache
workspace: cargo-cache
params:
- name: docker-image
value: "$(tasks.build-image.results.IMAGE_URL[1])"
workspaces:
- name: git-source
- name: docker-credentials
- name: cargo-cache
workspaces:
- name: git-source
volumeClaimTemplate:
spec:
storageClassName: "nfs-client"
accessModes:
- ReadWriteOnce
resources:
requests:
storage: 10Gi
subPath: rust-source
- name: cargo-cache
persistentVolumeClaim:
claimName: natter-cargo-cache-fmt
- name: docker-credentials
secret:
secretName: harbor-plain
params:
- name: image-name
value: "harbor.fizz.buzz/private/natter-development-format"
- name: target-name
value: ""
- name: path-to-image-context
value: docker/natter_development/
- name: path-to-dockerfile
value: docker/natter_development/

View File

@ -1,313 +0,0 @@
apiVersion: tekton.dev/v1
kind: PipelineRun
metadata:
name: rust-clippy
spec:
taskRunTemplate:
serviceAccountName: build-bot
timeouts:
pipeline: "2h0m0s"
tasks: "1h0m40s"
finally: "0h30m0s"
pipelineSpec:
params:
- name: image-name
description: The name for the built image
type: string
- name: target-name
description: The dockerfile target to build
type: string
- name: path-to-image-context
description: The path to the build context
type: string
- name: path-to-dockerfile
description: The path to the Dockerfile
type: string
tasks:
- name: report-pending
taskRef:
resolver: git
params:
- name: url
value: https://code.fizz.buzz/mirror/catalog.git # mirror of https://github.com/tektoncd/catalog.git
- name: revision
value: df36b3853a5657fd883015cdbf07ad6466918acf
- name: pathInRepo
value: task/gitea-set-status/0.1/gitea-set-status.yaml
params:
- name: CONTEXT
value: "$(params.JOB_NAME)"
- name: REPO_FULL_NAME
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
- name: GITEA_HOST_URL
value: code.fizz.buzz
- name: SHA
value: "$(tasks.fetch-repository.results.commit)"
- name: DESCRIPTION
value: "Build $(params.JOB_NAME) has started"
- name: STATE
value: pending
- name: TARGET_URL
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
- name: fetch-repository
taskRef:
resolver: git
params:
- name: url
value: https://code.fizz.buzz/mirror/catalog.git # mirror of https://github.com/tektoncd/catalog.git
- name: revision
value: df36b3853a5657fd883015cdbf07ad6466918acf
- name: pathInRepo
value: task/git-clone/0.9/git-clone.yaml
workspaces:
- name: output
workspace: git-source
params:
- name: url
value: $(params.REPO_URL)
- name: revision
value: $(params.PULL_BASE_SHA)
- name: deleteExisting
value: "true"
- name: get-git-commit-time
taskSpec:
metadata: {}
stepTemplate:
image: alpine:3.20
computeResources:
requests:
cpu: 10m
memory: 600Mi
workingDir: "$(workspaces.repo.path)"
results:
- name: unix-time
description: The time of the git commit in unix timestamp format.
steps:
- image: alpine/git:v2.34.2
name: detect-tag-step
script: |
#!/usr/bin/env sh
set -euo pipefail
echo -n "$(git log -1 --pretty=%ct)" | tee $(results.unix-time.path)
workspaces:
- name: repo
workspace: git-source
runAfter:
- fetch-repository
- name: build-image
taskRef:
resolver: git
params:
- name: url
value: https://code.fizz.buzz/talexander/personal_tekton_catalog.git
- name: revision
value: 7ee31a185243ee6da13dcd26a592c585b64c80e5
- name: pathInRepo
value: task/buildkit-rootless-daemonless/0.1/buildkit-rootless-daemonless.yaml
params:
- name: OUTPUT
value: >-
type=image,"name=$(params.image-name):latest,$(params.image-name):$(tasks.fetch-repository.results.commit)",push=true,compression=zstd,compression-level=22,oci-mediatypes=true
- name: CONTEXT
value: $(params.path-to-image-context)
- name: DOCKERFILE
value: $(params.path-to-dockerfile)
- name: EXTRA_ARGS
value:
- "--opt"
- "target=$(params.target-name)"
- --import-cache
- "type=registry,ref=$(params.image-name):buildcache"
- --export-cache
- "type=registry,ref=$(params.image-name):buildcache,mode=max,compression=zstd,compression-level=22,rewrite-timestamp=true,image-manifest=true,oci-mediatypes=true"
- --opt
- build-arg:SOURCE_DATE_EPOCH=$(tasks.get-git-commit-time.results.unix-time)
- name: BUILDKITD_TOML
value: |
debug = true
[registry."docker.io"]
mirrors = ["dockerhub.dockerhub.svc.cluster.local"]
[registry."dockerhub.dockerhub.svc.cluster.local"]
http = true
insecure = true
workspaces:
- name: source
workspace: git-source
- name: dockerconfig
workspace: docker-credentials
runAfter:
- fetch-repository
- name: run-cargo-clippy
taskSpec:
metadata: {}
params:
- name: docker-image
type: string
description: Docker image to run.
default: alpine:3.20
stepTemplate:
image: alpine:3.20
computeResources:
requests:
cpu: 10m
memory: 600Mi
workingDir: /workspace/source
workspaces:
- name: source
mountPath: /source
- name: cargo-cache
mountPath: /usr/local/cargo/registry
optional: true
steps:
- name: run
image: $(params.docker-image)
workingDir: "$(workspaces.source.path)"
command:
[
"cargo",
"clippy",
"--no-deps",
"--all-targets",
"--all-features",
"--",
"-D",
"warnings",
]
args: []
env:
- name: CARGO_TARGET_DIR
value: /target
workspaces:
- name: source
workspace: git-source
- name: cargo-cache
workspace: cargo-cache
runAfter:
- build-image
params:
- name: docker-image
value: "$(tasks.build-image.results.IMAGE_URL[1])"
finally:
- name: report-success
when:
- input: "$(tasks.status)"
operator: in
values: ["Succeeded", "Completed"]
taskRef:
resolver: git
params:
- name: url
value: https://code.fizz.buzz/mirror/catalog.git # mirror of https://github.com/tektoncd/catalog.git
- name: revision
value: df36b3853a5657fd883015cdbf07ad6466918acf
- name: pathInRepo
value: task/gitea-set-status/0.1/gitea-set-status.yaml
params:
- name: CONTEXT
value: "$(params.JOB_NAME)"
- name: REPO_FULL_NAME
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
- name: GITEA_HOST_URL
value: code.fizz.buzz
- name: SHA
value: "$(tasks.fetch-repository.results.commit)"
- name: DESCRIPTION
value: "Build $(params.JOB_NAME) has succeeded"
- name: STATE
value: success
- name: TARGET_URL
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
- name: report-failure
when:
- input: "$(tasks.status)"
operator: in
values: ["Failed"]
taskRef:
resolver: git
params:
- name: url
value: https://code.fizz.buzz/mirror/catalog.git # mirror of https://github.com/tektoncd/catalog.git
- name: revision
value: df36b3853a5657fd883015cdbf07ad6466918acf
- name: pathInRepo
value: task/gitea-set-status/0.1/gitea-set-status.yaml
params:
- name: CONTEXT
value: "$(params.JOB_NAME)"
- name: REPO_FULL_NAME
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
- name: GITEA_HOST_URL
value: code.fizz.buzz
- name: SHA
value: "$(tasks.fetch-repository.results.commit)"
- name: DESCRIPTION
value: "Build $(params.JOB_NAME) has failed"
- name: STATE
value: failure
- name: TARGET_URL
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
- name: cargo-cache-autoclean
taskSpec:
metadata: {}
params:
- name: docker-image
type: string
description: Docker image to run.
default: alpine:3.20
stepTemplate:
image: alpine:3.20
computeResources:
requests:
cpu: 10m
memory: 600Mi
workingDir: /workspace/source
workspaces:
- name: source
mountPath: /source
- name: cargo-cache
mountPath: /usr/local/cargo/registry
optional: true
steps:
- name: run
image: $(params.docker-image)
workingDir: "$(workspaces.source.path)"
command: [cargo, cache, --autoclean]
args: []
workspaces:
- name: source
workspace: git-source
- name: cargo-cache
workspace: cargo-cache
params:
- name: docker-image
value: "$(tasks.build-image.results.IMAGE_URL[1])"
workspaces:
- name: git-source
- name: docker-credentials
- name: cargo-cache
workspaces:
- name: git-source
volumeClaimTemplate:
spec:
storageClassName: "nfs-client"
accessModes:
- ReadWriteOnce
resources:
requests:
storage: 10Gi
subPath: rust-source
- name: cargo-cache
persistentVolumeClaim:
claimName: natter-cargo-cache-clippy
- name: docker-credentials
secret:
secretName: harbor-plain
params:
- name: image-name
value: "harbor.fizz.buzz/private/natter-development-clippy"
- name: target-name
value: ""
- name: path-to-image-context
value: docker/natter_development/
- name: path-to-dockerfile
value: docker/natter_development/

View File

@ -1,303 +0,0 @@
apiVersion: tekton.dev/v1
kind: PipelineRun
metadata:
name: rust-test
spec:
timeouts:
pipeline: "2h0m0s"
tasks: "1h0m40s"
finally: "0h30m0s"
taskRunTemplate:
serviceAccountName: build-bot
pipelineSpec:
params:
- name: image-name
description: The name for the built image
type: string
- name: target-name
description: The dockerfile target to build
type: string
- name: path-to-image-context
description: The path to the build context
type: string
- name: path-to-dockerfile
description: The path to the Dockerfile
type: string
tasks:
- name: report-pending
taskRef:
resolver: git
params:
- name: url
value: https://code.fizz.buzz/mirror/catalog.git # mirror of https://github.com/tektoncd/catalog.git
- name: revision
value: df36b3853a5657fd883015cdbf07ad6466918acf
- name: pathInRepo
value: task/gitea-set-status/0.1/gitea-set-status.yaml
params:
- name: CONTEXT
value: "$(params.JOB_NAME)"
- name: REPO_FULL_NAME
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
- name: GITEA_HOST_URL
value: code.fizz.buzz
- name: SHA
value: "$(tasks.fetch-repository.results.commit)"
- name: DESCRIPTION
value: "Build $(params.JOB_NAME) has started"
- name: STATE
value: pending
- name: TARGET_URL
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
- name: fetch-repository
taskRef:
resolver: git
params:
- name: url
value: https://code.fizz.buzz/mirror/catalog.git # mirror of https://github.com/tektoncd/catalog.git
- name: revision
value: df36b3853a5657fd883015cdbf07ad6466918acf
- name: pathInRepo
value: task/git-clone/0.9/git-clone.yaml
workspaces:
- name: output
workspace: git-source
params:
- name: url
value: $(params.REPO_URL)
- name: revision
value: $(params.PULL_BASE_SHA)
- name: deleteExisting
value: "true"
- name: get-git-commit-time
taskSpec:
metadata: {}
stepTemplate:
image: alpine:3.20
computeResources:
requests:
cpu: 10m
memory: 600Mi
workingDir: "$(workspaces.repo.path)"
results:
- name: unix-time
description: The time of the git commit in unix timestamp format.
steps:
- image: alpine/git:v2.34.2
name: detect-tag-step
script: |
#!/usr/bin/env sh
set -euo pipefail
echo -n "$(git log -1 --pretty=%ct)" | tee $(results.unix-time.path)
workspaces:
- name: repo
workspace: git-source
runAfter:
- fetch-repository
- name: build-image
taskRef:
resolver: git
params:
- name: url
value: https://code.fizz.buzz/talexander/personal_tekton_catalog.git
- name: revision
value: 7ee31a185243ee6da13dcd26a592c585b64c80e5
- name: pathInRepo
value: task/buildkit-rootless-daemonless/0.1/buildkit-rootless-daemonless.yaml
params:
- name: OUTPUT
value: >-
type=image,"name=$(params.image-name):latest,$(params.image-name):$(tasks.fetch-repository.results.commit)",push=true,compression=zstd,compression-level=22,oci-mediatypes=true
- name: CONTEXT
value: $(params.path-to-image-context)
- name: DOCKERFILE
value: $(params.path-to-dockerfile)
- name: EXTRA_ARGS
value:
- "--opt"
- "target=$(params.target-name)"
- --import-cache
- "type=registry,ref=$(params.image-name):buildcache"
- --export-cache
- "type=registry,ref=$(params.image-name):buildcache,mode=max,compression=zstd,compression-level=22,rewrite-timestamp=true,image-manifest=true,oci-mediatypes=true"
- --opt
- build-arg:SOURCE_DATE_EPOCH=$(tasks.get-git-commit-time.results.unix-time)
- name: BUILDKITD_TOML
value: |
debug = true
[registry."docker.io"]
mirrors = ["dockerhub.dockerhub.svc.cluster.local"]
[registry."dockerhub.dockerhub.svc.cluster.local"]
http = true
insecure = true
workspaces:
- name: source
workspace: git-source
- name: dockerconfig
workspace: docker-credentials
runAfter:
- fetch-repository
- name: run-cargo-test
taskSpec:
metadata: {}
params:
- name: docker-image
type: string
description: Docker image to run.
default: alpine:3.20
stepTemplate:
image: alpine:3.20
computeResources:
requests:
cpu: 10m
memory: 600Mi
workingDir: /workspace/source
workspaces:
- name: source
mountPath: /source
- name: cargo-cache
mountPath: /usr/local/cargo/registry
optional: true
steps:
- name: run
image: $(params.docker-image)
workingDir: "$(workspaces.source.path)"
command: [cargo, test, --no-fail-fast]
args: []
env:
- name: CARGO_TARGET_DIR
value: /target
workspaces:
- name: source
workspace: git-source
- name: cargo-cache
workspace: cargo-cache
runAfter:
- build-image
params:
- name: docker-image
value: "$(tasks.build-image.results.IMAGE_URL[1])"
finally:
- name: report-success
when:
- input: "$(tasks.status)"
operator: in
values: ["Succeeded", "Completed"]
taskRef:
resolver: git
params:
- name: url
value: https://code.fizz.buzz/mirror/catalog.git # mirror of https://github.com/tektoncd/catalog.git
- name: revision
value: df36b3853a5657fd883015cdbf07ad6466918acf
- name: pathInRepo
value: task/gitea-set-status/0.1/gitea-set-status.yaml
params:
- name: CONTEXT
value: "$(params.JOB_NAME)"
- name: REPO_FULL_NAME
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
- name: GITEA_HOST_URL
value: code.fizz.buzz
- name: SHA
value: "$(tasks.fetch-repository.results.commit)"
- name: DESCRIPTION
value: "Build $(params.JOB_NAME) has succeeded"
- name: STATE
value: success
- name: TARGET_URL
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
- name: report-failure
when:
- input: "$(tasks.status)"
operator: in
values: ["Failed"]
taskRef:
resolver: git
params:
- name: url
value: https://code.fizz.buzz/mirror/catalog.git # mirror of https://github.com/tektoncd/catalog.git
- name: revision
value: df36b3853a5657fd883015cdbf07ad6466918acf
- name: pathInRepo
value: task/gitea-set-status/0.1/gitea-set-status.yaml
params:
- name: CONTEXT
value: "$(params.JOB_NAME)"
- name: REPO_FULL_NAME
value: "$(params.REPO_OWNER)/$(params.REPO_NAME)"
- name: GITEA_HOST_URL
value: code.fizz.buzz
- name: SHA
value: "$(tasks.fetch-repository.results.commit)"
- name: DESCRIPTION
value: "Build $(params.JOB_NAME) has failed"
- name: STATE
value: failure
- name: TARGET_URL
value: "https://tekton.fizz.buzz/#/namespaces/$(context.pipelineRun.namespace)/pipelineruns/$(context.pipelineRun.name)"
- name: cargo-cache-autoclean
taskSpec:
metadata: {}
params:
- name: docker-image
type: string
description: Docker image to run.
default: alpine:3.20
stepTemplate:
image: alpine:3.20
computeResources:
requests:
cpu: 10m
memory: 600Mi
workingDir: /workspace/source
workspaces:
- name: source
mountPath: /source
- name: cargo-cache
mountPath: /usr/local/cargo/registry
optional: true
steps:
- name: run
image: $(params.docker-image)
workingDir: "$(workspaces.source.path)"
command: [cargo, cache, --autoclean]
args: []
workspaces:
- name: source
workspace: git-source
- name: cargo-cache
workspace: cargo-cache
params:
- name: docker-image
value: "$(tasks.build-image.results.IMAGE_URL[1])"
workspaces:
- name: git-source
- name: docker-credentials
- name: cargo-cache
workspaces:
- name: git-source
volumeClaimTemplate:
spec:
storageClassName: "nfs-client"
accessModes:
- ReadWriteOnce
resources:
requests:
storage: 10Gi
subPath: rust-source
- name: cargo-cache
persistentVolumeClaim:
claimName: natter-cargo-cache-test
- name: docker-credentials
secret:
secretName: harbor-plain
params:
- name: image-name
value: "harbor.fizz.buzz/private/natter-development-test"
- name: target-name
value: ""
- name: path-to-image-context
value: docker/natter_development/
- name: path-to-dockerfile
value: docker/natter_development/

View File

@ -1,25 +0,0 @@
version = "0.0.1"
[[push]]
name = "rust-test"
source = "pipeline-rust-test.yaml"
clone_uri = "git@code.fizz.buzz:talexander/natter.git"
skip_branches = [ "^v[0-9]+\\.[0-9]+\\.[0-9]+$" ]
[[push]]
name = "clippy"
source = "pipeline-rust-clippy.yaml"
clone_uri = "git@code.fizz.buzz:talexander/natter.git"
skip_branches = [ "^v[0-9]+\\.[0-9]+\\.[0-9]+$" ]
[[push]]
name = "format"
source = "pipeline-format.yaml"
clone_uri = "git@code.fizz.buzz:talexander/natter.git"
skip_branches = [ "^v[0-9]+\\.[0-9]+\\.[0-9]+$" ]
[[push]]
name = "build"
source = "pipeline-build-hash.yaml"
clone_uri = "git@code.fizz.buzz:talexander/natter.git"
branches = [ "^main$", "^master$" ]

1442
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -26,29 +26,9 @@ futures = "0.3.29"
include_dir = "0.7.3"
# TODO: This is temporary to work on the latest organic code. Eventually switch back to using the published crate.
# organic = { path = "../organic" }
organic = "0.1.16"
organic = "0.1.13"
serde = { version = "1.0.189", default-features = false, features = ["std", "derive"] }
serde_json = "1.0.107"
tokio = { version = "1.30.0", default-features = false, features = ["rt", "rt-multi-thread", "fs", "io-util"] }
toml = "0.8.2"
tree-sitter-bash = "0.23.3"
tree-sitter-highlight = "0.25.2"
tree-sitter-nix = "0.0.2"
tree-sitter-python = "0.23.6"
url = "2.5.0"
tracing = { version = "0.1.37", optional = true }
tracing-opentelemetry = { version = "0.20.0", optional = true }
tracing-subscriber = { version = "0.3.17", optional = true, features = ["env-filter"] }
opentelemetry = { version = "0.20.0", optional = true, default-features = false, features = ["trace", "rt-tokio"] }
opentelemetry-otlp = { version = "0.13.0", optional = true }
opentelemetry-semantic-conventions = { version = "0.12.0", optional = true }
[features]
default = ["tracing"]
tracing = ["dep:opentelemetry", "dep:opentelemetry-otlp", "dep:opentelemetry-semantic-conventions", "dep:tracing", "dep:tracing-opentelemetry", "dep:tracing-subscriber"]
# Optimized build for any sort of release.
[profile.release-lto]
inherits = "release"
lto = true
strip = "symbols"
walkdir = "2.4.0"

View File

@ -1,39 +0,0 @@
SHELL := bash
.ONESHELL:
.SHELLFLAGS := -eu -o pipefail -c
.DELETE_ON_ERROR:
MAKEFLAGS += --warn-undefined-variables
MAKEFLAGS += --no-builtin-rules
OS:=$(shell uname -s)
ifeq ($(origin .RECIPEPREFIX), undefined)
$(error This Make does not support .RECIPEPREFIX. Please use GNU Make 4.0 or later)
endif
.RECIPEPREFIX = >
IMAGE_NAME:=natter
TARGET :=
.PHONY: help
help:
> @grep -h "##" $(MAKEFILE_LIST) | grep -v grep | sed -E 's/^([^:]*): *## */\1: /'
.PHONY: docker_test
docker_test: ## Run the rust tests
> $(MAKE) -C docker/natter_development build
> docker run --rm -i -t --mount type=tmpfs,destination=/tmp -v "$(shell readlink -f .):/source" --workdir=/source --env CARGO_TARGET_DIR=/target -v "natter-cargo-registry:/usr/local/cargo/registry" natter-development cargo test
.PHONY: docker_clippy
docker_clippy: ## Run static analysis of the code.
> $(MAKE) -C docker/natter_development build
> docker run --rm -i -t --mount type=tmpfs,destination=/tmp -v "$(shell readlink -f .):/source" --workdir=/source --env CARGO_TARGET_DIR=/target -v "natter-cargo-registry:/usr/local/cargo/registry" natter-development cargo clippy --no-deps --all-targets --all-features -- -D warnings
.PHONY: docker_format
docker_format: ## Auto-format source files.
> $(MAKE) -C docker/natter_development build
> docker run --rm -i -t --mount type=tmpfs,destination=/tmp -v "$(shell readlink -f .):/source" --workdir=/source --env CARGO_TARGET_DIR=/target -v "natter-cargo-registry:/usr/local/cargo/registry" natter-development cargo fmt
> docker run --rm -i -t --mount type=tmpfs,destination=/tmp -v "$(shell readlink -f .):/source" --workdir=/source --env CARGO_TARGET_DIR=/target -v "natter-cargo-registry:/usr/local/cargo/registry" natter-development prettier --write --no-error-on-unmatched-pattern "default_environment/**/*.js" "default_environment/**/*.css"
.PHONY: clean
clean:
> $(MAKE) -C docker/natter_development clean

View File

@ -1,21 +0,0 @@
* Things to do [6/17]
** DONE If the paragraph only contains an image, text-align center
** DONE Syntax highlighting for code blocks
** TODO Render gnuplot
** TODO Pretty-print the timestamps
** TODO Support Table of Contents
** TODO Support line numbers in code blocks
** TODO Support references to code block lines
** TODO Only include text up to first heading on homepage and include a "read more" link
** DONE Make loading language-specific CSS files conditional on the presence of src blocks using those languages
** DONE Set up tracing so I can use warning and such
** TODO Make copying of language-specific CSS files conditional on the presence of src blocks using those languages
** TODO Switch to an entirely lazily-evaluated output tree
** TODO Add highlighting for languages [1/2]
*** DONE bash
*** TODO gnuplot
https://github.com/dpezto/tree-sitter-gnuplot is not on crates.io so I'd have to add a git dependency to use it. This would prevent publishing this crate to crates.io.
** DONE Bug: carry over highlight starts when breaking lines
** TODO Add dates to posts
** DONE Add support for unlisted posts (posts that do not show up on the homepage).
** TODO Add support for showing file name where we currently show language

View File

@ -1,39 +0,0 @@
:root {
--srclg-bash-srchl-comment-color: #048a81;
--srclg-bash-srchl-function-color: #e95a62;
--srclg-bash-srchl-keyword-color: #1a936f;
--srclg-bash-srchl-property-color: inherit;
--srclg-bash-srchl-string-color: #ecc30b;
}
@media (prefers-color-scheme: light) {
:root {
--srclg-bash-srchl-comment-color: #fb757e;
--srclg-bash-srchl-function-color: #16a59d;
--srclg-bash-srchl-keyword-color: #e56c90;
--srclg-bash-srchl-property-color: inherit;
--srclg-bash-srchl-string-color: #133cf4;
}
}
.main_content {
.src_block {
&.srclg_bash {
.srchl_comment {
color: var(--srclg-bash-srchl-comment-color);
}
.srchl_function {
color: var(--srclg-bash-srchl-function-color);
}
.srchl_keyword {
color: var(--srclg-bash-srchl-keyword-color);
}
.srchl_property {
color: var(--srclg-bash-srchl-property-color);
}
.srchl_string {
color: var(--srclg-bash-srchl-string-color);
}
}
}
}

View File

@ -1,39 +0,0 @@
:root {
--srclg-nix-srchl-keyword-color: #1a936f;
--srclg-nix-srchl-comment-color: #048a81;
--srclg-nix-srchl-property-color: #bfbccb;
--srclg-nix-srchl-string-color: #ecc30b;
--srclg-nix-srchl-string-special-path-color: #067bc2;
}
@media (prefers-color-scheme: light) {
:root {
--srclg-nix-srchl-keyword-color: #e56c90;
--srclg-nix-srchl-comment-color: #fb757e;
--srclg-nix-srchl-property-color: #404334;
--srclg-nix-srchl-string-color: #133cf4;
--srclg-nix-srchl-string-special-path-color: #f9843d;
}
}
.main_content {
.src_block {
&.srclg_nix {
.srchl_keyword {
color: var(--srclg-nix-srchl-keyword-color);
}
.srchl_comment {
color: var(--srclg-nix-srchl-comment-color);
}
.srchl_property {
color: var(--srclg-nix-srchl-property-color);
}
.srchl_string {
color: var(--srclg-nix-srchl-string-color);
}
.srchl_string_special_path {
color: var(--srclg-nix-srchl-string-special-path-color);
}
}
}
}

View File

@ -1,51 +0,0 @@
/* ea912c */
/* e95a62 */
:root {
--srclg-python-srchl-comment-color: #048a81;
--srclg-python-srchl-function-builtin-color: #e95a62;
--srclg-python-srchl-keyword-color: #1a936f;
--srclg-python-srchl-property-color: inherit;
--srclg-python-srchl-string-color: #ecc30b;
--srclg-python-srchl-type-color: #067bc2;
--srclg-python-srchl-variable-color: #ea912c;
}
@media (prefers-color-scheme: light) {
:root {
--srclg-python-srchl-comment-color: #fb757e;
--srclg-python-srchl-function-builtin-color: #16a59d;
--srclg-python-srchl-keyword-color: #e56c90;
--srclg-python-srchl-property-color: inherit;
--srclg-python-srchl-string-color: #133cf4;
--srclg-python-srchl-type-color: #f9843d;
--srclg-python-srchl-variable-color: #156ed3;
}
}
.main_content {
.src_block {
&.srclg_python {
.srchl_comment {
color: var(--srclg-python-srchl-comment-color);
}
.srchl_function_builtin {
color: var(--srclg-python-srchl-function-builtin-color);
}
.srchl_keyword {
color: var(--srclg-python-srchl-keyword-color);
}
.srchl_property {
color: var(--srclg-python-srchl-property-color);
}
.srchl_string {
color: var(--srclg-python-srchl-string-color);
}
.srchl_type {
color: var(--srclg-python-srchl-type-color);
}
.srchl_variable {
color: var(--srclg-python-srchl-variable-color);
}
}
}
}

View File

@ -5,12 +5,8 @@
--header-divider-color: #6a687a;
--stream-divider-color: #6ccff6;
--stream-post-background-color: #0a0a0a;
--blog-post-background-color: #0a0a0a;
--src-font-family: ui-monospace, "Cascadia Code", "Source Code Pro", Menlo,
Consolas, "DejaVu Sans Mono", monospace;
--src-font-family: ui-monospace, 'Cascadia Code', 'Source Code Pro', Menlo, Consolas, 'DejaVu Sans Mono', monospace;
--src-block-background-color: #141414;
--src-block-border-color: #84828f;
@ -18,62 +14,14 @@
--src-block-language-background: #84828f;
--quote-block-border-color: #84828f;
--table-border-color: #6a687a;
--table-odd-background-color: #0a0a0a;
--table-even-background-color: #141414;
--header-nav-regular-font-color: var(--site-text-color);
--header-nav-regular-background-color: var(--site-background-color);
--header-nav-hover-font-color: var(--site-background-color);
--header-nav-hover-background-color: var(--site-text-color);
--header-home-regular-font-color: var(--site-text-color);
--header-home-hover-font-color: #6ccff6;
}
@media (prefers-color-scheme: light) {
:root {
--site-background-color: #f5f5f5;
--site-text-color: #000003;
--header-divider-color: #959785;
--stream-divider-color: #933009;
--stream-post-background-color: #f5f5f5;
--blog-post-background-color: #f5f5f5;
--src-block-background-color: #ebebeb;
--src-block-border-color: #7b7d70;
--src-block-language-color: #f5f5f5;
--src-block-language-background: #7b7d70;
--quote-block-border-color: #7b7d70;
--table-border-color: #959785;
--table-odd-background-color: #f5f5f5;
--table-even-background-color: #ebebeb;
--header-nav-regular-font-color: var(--site-text-color);
--header-nav-regular-background-color: var(--site-background-color);
--header-nav-hover-font-color: var(--site-background-color);
--header-nav-hover-background-color: var(--site-text-color);
--header-home-regular-font-color: var(--site-text-color);
--header-home-hover-font-color: #933009;
}
}
* {
box-sizing: border-box;
}
body {
color: var(--site-text-color);
background-color: var(--site-background-color);
font-family: source-sans-pro, Seravek, "Gill Sans Nova", Ubuntu, Calibri,
"DejaVu Sans", sans-serif;
font-family: source-sans-pro, Seravek, 'Gill Sans Nova', Ubuntu, Calibri, 'DejaVu Sans', sans-serif;
a:link,
a:visited {
a:link, a:visited {
/* TODO: Should I use a different color for links? */
color: var(--site-text-color);
}
@ -86,57 +34,17 @@ body {
}
.page_header {
display: flex;
flex-direction: row;
justify-content: space-between;
align-items: stretch;
width: 100%;
max-width: var(--main-max-width);
border-bottom: 0.1rem solid var(--header-divider-color);
.home_link {
display: block;
font-size: 2rem;
font-size: 1.2rem;
font-weight: 600;
text-decoration: none;
color: var(--header-home-regular-font-color);
transition-property: color;
transition-duration: 0.1s;
transition-timing-function: ease-out;
&:hover {
color: var(--header-home-hover-font-color) !important;
}
&:link,
&:visited {
color: inherit;
}
}
.header_nav_bar {
display: flex;
flex-direction: row;
flex-wrap: wrap;
justify-content: end;
align-items: stretch;
column-gap: 1rem;
.nav_link {
display: flex;
flex-direction: column;
justify-content: space-around;
color: var(--header-nav-regular-font-color);
background: var(--header-nav-regular-background-color);
padding: 0 0.5rem;
transition-property: background, color;
transition-duration: 0.1s;
transition-timing-function: ease-out;
&:hover {
color: var(--header-nav-hover-font-color);
background: var(--header-nav-hover-background-color);
}
&:link, &:visited {
color: var(--site-text-color);
}
}
}
@ -145,64 +53,34 @@ body {
width: 100%;
max-width: var(--main-max-width);
font-size: 1.2rem;
line-height: 1.4;
padding-bottom: 8rem;
line-height: 1.2;
/* A stand-alone blog post (not in a blog stream). */
.blog_post {
padding: 1rem 0.2rem 0 0.2rem;
background: var(--blog-post-background-color);
padding: 1rem 0 3rem 0;
}
.blog_stream {
.stream_divider {
color: var(--stream-divider-color);
margin: 40px 0;
}
.stream_nav {
display: flex;
flex-direction: row;
align-items: center;
> a {
display: inline-block;
padding: 0.2rem 0.5rem;
font-weight: 700;
font-size: 1.5rem;
}
> .spacer {
display: inline-block;
flex: 1 1;
}
}
}
/* A blog post in a blog stream (for example, the homepage). */
.blog_stream_post {
background: var(--stream-post-background-color);
padding: 0 0.2rem;
}
.blog_stream_post {
background: var(--stream-post-background-color);
}
.blog_stream_post:nth-child(1) {
padding-top: 1rem;
background: #1F1F1F;
padding: 1rem 0.2rem;
}
.blog_post_title {
font-size: 2.9rem;
font-size: 2.5rem;
font-weight: 700;
padding-bottom: 1rem;
}
p {
margin: 1rem 0;
&.image {
text-align: center;
}
}
.src_block {
@ -212,7 +90,6 @@ body {
font-size: 1rem;
font-family: var(--src-font-family);
margin: 1rem 0;
font-variant-ligatures: none;
.src_language {
display: inline-block;
@ -229,21 +106,18 @@ body {
.src_line {
white-space: pre-wrap;
overflow-wrap: anywhere;
}
}
}
.inline_source_block {
font-family: var(--src-font-family);
font-size: 1.2rem;
font-variant-ligatures: none;
font-size: 1rem;
}
.code,
.verbatim {
.code, .verbatim {
font-family: var(--src-font-family);
font-variant-ligatures: none;
font-size: 1rem;
}
.quote_block {
@ -252,8 +126,7 @@ body {
margin: 1rem 0 1rem 2rem;
}
h2,
h3 {
h2, h3 {
margin: 1rem 0;
padding-bottom: 0.5rem;
}
@ -311,48 +184,4 @@ body {
}
}
}
.org_table {
table-layout: fixed;
border-collapse: collapse;
border: 1px solid var(--table-border-color);
> tbody {
border-width: 1px 0;
border-style: solid;
border-color: var(--table-border-color);
> tr {
> td {
padding: 0.2rem;
}
}
> tr:nth-child(odd) {
background-color: var(--table-odd-background-color);
}
> tr:nth-child(even) {
background-color: var(--table-even-background-color);
}
}
> thead {
border-width: 1px 0;
border-style: solid;
border-color: var(--table-border-color);
> tr {
> th {
padding: 0.2rem;
font-weight: 600;
}
}
}
}
b {
font-weight: 700;
}
/* Never have media larger than its container */
img,
picture,
video {
max-width: 100%;
}
}

View File

@ -3,87 +3,19 @@
License: none (public domain)
*/
html,
body,
div,
span,
applet,
object,
iframe,
h1,
h2,
h3,
h4,
h5,
h6,
p,
blockquote,
pre,
a,
abbr,
acronym,
address,
big,
cite,
code,
del,
dfn,
em,
img,
ins,
kbd,
q,
s,
samp,
small,
strike,
strong,
sub,
sup,
tt,
var,
b,
u,
i,
center,
dl,
dt,
dd,
ol,
ul,
li,
fieldset,
form,
label,
legend,
table,
caption,
tbody,
tfoot,
thead,
tr,
th,
td,
article,
aside,
canvas,
details,
embed,
figure,
figcaption,
footer,
header,
hgroup,
menu,
nav,
output,
ruby,
section,
summary,
time,
mark,
audio,
video {
html, body, div, span, applet, object, iframe,
h1, h2, h3, h4, h5, h6, p, blockquote, pre,
a, abbr, acronym, address, big, cite, code,
del, dfn, em, img, ins, kbd, q, s, samp,
small, strike, strong, sub, sup, tt, var,
b, u, i, center,
dl, dt, dd, ol, ul, li,
fieldset, form, label, legend,
table, caption, tbody, tfoot, thead, tr, th, td,
article, aside, canvas, details, embed,
figure, figcaption, footer, header, hgroup,
menu, nav, output, ruby, section, summary,
time, mark, audio, video {
margin: 0;
padding: 0;
border: 0;
@ -92,35 +24,22 @@ video {
vertical-align: baseline;
}
/* HTML5 display-role reset for older browsers */
article,
aside,
details,
figcaption,
figure,
footer,
header,
hgroup,
menu,
nav,
section {
article, aside, details, figcaption, figure,
footer, header, hgroup, menu, nav, section {
display: block;
}
body {
line-height: 1;
}
ol,
ul {
ol, ul {
list-style: none;
}
blockquote,
q {
blockquote, q {
quotes: none;
}
blockquote:before,
blockquote:after,
q:before,
q:after {
content: "";
blockquote:before, blockquote:after,
q:before, q:after {
content: '';
content: none;
}
table {

View File

@ -32,8 +32,7 @@
{@eq value="code"}{>code/}{/eq}
{@eq value="verbatim"}{>verbatim/}{/eq}
{@eq value="plain_text"}{>plain_text/}{/eq}
{@eq value="regular_link_anchor"}{>regular_link_anchor/}{/eq}
{@eq value="regular_link_image"}{>regular_link_image/}{/eq}
{@eq value="regular_link"}{>regular_link/}{/eq}
{@eq value="radio_link"}{>radio_link/}{/eq}
{@eq value="radio_target"}{>radio_target/}{/eq}
{@eq value="plain_link"}{>plain_link/}{/eq}

View File

@ -24,10 +24,8 @@
</div>
{/.children}
{#.stream_pagination}
<hr class="stream_divider" />
<div class="stream_nav">
{?.older_link}<a href="{.older_link}">Older</a>{/.older_link}
<div class="spacer"></div>
{?.newer_link}<a href="{.newer_link}">Newer</a>{/.newer_link}
</div>
{/.stream_pagination}

View File

@ -1,8 +1,7 @@
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
<meta name="viewport" content="width=device-width,initial-scale=1" />
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
{#global_settings.css_files}<link rel="stylesheet" href="{.}">{/global_settings.css_files}
{#global_settings.js_files}<script type="text/javascript" src="{.}"></script>{/global_settings.js_files}
{?global_settings.page_title}<title>{global_settings.page_title}</title>{/global_settings.page_title}
@ -11,7 +10,6 @@
{#.page_header}{>page_header/}{/.page_header}
<main class="main_content">
{@select key=.type}
{@eq value="page"}{>page/}{/eq}
{@eq value="blog_post_page"}{>blog_post_page/}{/eq}
{@eq value="blog_stream"}{>blog_stream/}{/eq}
{@none}{!TODO: make this panic!}ERROR: Unrecognized page content type{/none}

View File

@ -6,8 +6,7 @@
{@eq value="code"}{>code/}{/eq}
{@eq value="verbatim"}{>verbatim/}{/eq}
{@eq value="plain_text"}{>plain_text/}{/eq}
{@eq value="regular_link_anchor"}{>regular_link_anchor/}{/eq}
{@eq value="regular_link_image"}{>regular_link_image/}{/eq}
{@eq value="regular_link"}{>regular_link/}{/eq}
{@eq value="radio_link"}{>radio_link/}{/eq}
{@eq value="radio_target"}{>radio_target/}{/eq}
{@eq value="plain_link"}{>plain_link/}{/eq}

View File

@ -1,19 +0,0 @@
<article class="page">
{?.title}<h1 class="blog_post_title"><span>{.title}</span></h1>{/.title}
{! TODO: date? !}
{! TODO: Table of contents? !}
<div class="blog_post_body">
{#.children}
{>document_element/}
{/.children}
{?.footnotes}
<h2>Footnotes:</h2>
{#.footnotes}
{>real_footnote_definition/}
{/.footnotes}
{/.footnotes}
</div>
</article>

View File

@ -1,9 +1,4 @@
<header class="page_header">
<a class="home_link" href="{.home_link}">{.website_title}</a>
{! TODO: Additional links? Probably using the nav semantic element. !}
<nav class="header_nav_bar">
{#.nav_links}
<a class="nav_link" href="{.url}"><div>{.text}</div></a>
{/.nav_links}
</nav>
</header>

View File

@ -1,3 +1,3 @@
<p class="{?.is_single_image}image{/.is_single_image}">{#.children}
<p>{#.children}
{>object/}
{/.children}</p>

View File

@ -1 +1 @@
<a href="{.raw_link}">{.raw_link}</a>
!!!!!!!! plain_link

View File

@ -0,0 +1 @@
<a href="{.raw_link}">{#.children}{>object/}{/.children}</a>

View File

@ -1 +0,0 @@
<a href="{.target}">{#.children}{>object/}{/.children}</a>

View File

@ -1 +0,0 @@
<img src="{.src}" alt="{.alt}" />

View File

@ -1,15 +1,10 @@
<div class="src_block{?.language} srclg_{.language}{/.language}">
<div class="src_block">
{?.language}<div class="src_language">{.language}</div>{/.language}
<table class="src_body">
<tbody>
{#.lines}
<tr>
<td>{#.children}{@select key=.type}
{@eq value="raw_text"}<code class="src_line">{.content}</code>{/eq}
{@eq value="highlight_start"}<span class="srchl_{.name}">{/eq}
{@eq value="highlight_end"}</span>{/eq}
{@none}{!TODO: make this panic!}ERROR: Unrecognized type {.type}.{/none}
{/select}{/.children}</td>
<td><code class="src_line">{.}</code></td>
</tr>
{/.lines}
</tbody>

View File

@ -1,5 +1 @@
<table class="org_table">{#.children}{@select key=.type}
{@eq value="head"}{>table_head/}{/eq}
{@eq value="body"}{>table_body/}{/eq}
{@none}{!TODO: make this panic!}ERROR: Unrecognized type {.type}.{/none}
{/select}{/.children}</table>
<table>{#.children}{>table_row/}{/.children}</table>

View File

@ -1 +0,0 @@
<tbody>{#.children}{>table_row/}{/.children}</tbody>

View File

@ -1 +0,0 @@
<thead>{#.children}{>table_head_row/}{/.children}</thead>

View File

@ -1 +0,0 @@
<th scope="col">{#.children}{>object/}{/.children}</th>

View File

@ -1 +0,0 @@
<tr>{#.children}{>table_head_cell/}{/.children}</tr>

View File

@ -1 +1 @@
<span class="timestamp">{.source}</span>
!!!!!!!! timestamp

View File

@ -1,16 +0,0 @@
# syntax=docker/dockerfile:1
ARG ALPINE_VERSION="3.20"
FROM rustlang/rust:nightly-alpine$ALPINE_VERSION AS builder
RUN apk add --no-cache musl-dev
RUN mkdir /root/natter
WORKDIR /root/natter
COPY --link . .
# TODO: Add static build, which currently errors due to proc_macro. RUSTFLAGS="-C target-feature=+crt-static"
RUN --mount=type=tmpfs,target=/tmp --mount=type=cache,target=/usr/local/cargo/registry,sharing=locked CARGO_TARGET_DIR=/target cargo build --profile release-lto
FROM alpine:$ALPINE_VERSION AS runner
COPY --link --from=builder /target/release-lto/natter /usr/bin/

View File

@ -1,32 +0,0 @@
SHELL := bash
.ONESHELL:
.SHELLFLAGS := -eu -o pipefail -c
.DELETE_ON_ERROR:
MAKEFLAGS += --warn-undefined-variables
MAKEFLAGS += --no-builtin-rules
OS:=$(shell uname -s)
ifeq ($(origin .RECIPEPREFIX), undefined)
$(error This Make does not support .RECIPEPREFIX. Please use GNU Make 4.0 or later)
endif
.RECIPEPREFIX = >
IMAGE_NAME:=natter
TARGET :=
.PHONY: help
help:
> @grep -h "##" $(MAKEFILE_LIST) | grep -v grep | sed -E 's/^([^:]*): *## */\1: /'
.PHONY: build
build: ## Build the docker image.
> docker build --tag $(IMAGE_NAME) --target=$(TARGET) --file Dockerfile ../../
.PHONY: shell
shell: ## Launch an interactive shell inside the docker image.
shell: build
> docker run --rm -i -t --entrypoint /bin/sh --mount type=tmpfs,destination=/tmp $(IMAGE_NAME)
.PHONY: clean
clean:
> docker rmi $(IMAGE_NAME)

View File

@ -1,14 +0,0 @@
# syntax=docker/dockerfile:1
ARG ALPINE_VERSION="3.20"
FROM rustlang/rust:nightly-alpine$ALPINE_VERSION AS builder
RUN apk add --no-cache musl-dev
RUN --mount=type=tmpfs,target=/tmp --mount=type=cache,target=/usr/local/cargo/registry,sharing=locked cargo install --locked --no-default-features --features ci-autoclean cargo-cache
RUN rustup component add rustfmt
RUN rustup component add clippy
FROM builder AS javascript
RUN apk add --no-cache npm
RUN --mount=type=tmpfs,target=/tmp --mount=type=cache,target=/npmcache,sharing=locked npm set cache /npmcache && npm install --global prettier@3.1.0

View File

@ -1,33 +0,0 @@
SHELL := bash
.ONESHELL:
.SHELLFLAGS := -eu -o pipefail -c
.DELETE_ON_ERROR:
MAKEFLAGS += --warn-undefined-variables
MAKEFLAGS += --no-builtin-rules
ifeq ($(origin .RECIPEPREFIX), undefined)
$(error This Make does not support .RECIPEPREFIX. Please use GNU Make 4.0 or later)
endif
.RECIPEPREFIX = >
IMAGE_NAME:=natter-development
TARGET :=
.PHONY: help
help:
> @grep -h "##" $(MAKEFILE_LIST) | grep -v grep | sed -E 's/^([^:]*): *## */\1: /'
.PHONY: build
build: ## Build the docker image.
> docker build --tag $(IMAGE_NAME) --target=$(TARGET) --file Dockerfile .
> docker volume create natter-cargo-registry
.PHONY: shell
shell: ## Launch an interactive shell inside the docker image with the source repository mounted at /source.
shell: build
> docker run --rm -i -t --entrypoint /bin/sh --mount type=tmpfs,destination=/tmp -v "$$(readlink -f ../../):/source" --workdir=/source --env CARGO_TARGET_DIR=/target -v "natter-cargo-registry:/usr/local/cargo/registry" $(IMAGE_NAME)
.PHONY: clean
clean:
> docker rmi $(IMAGE_NAME)
> docker volume rm natter-cargo-registry

View File

@ -1,22 +0,0 @@
[[file:image.svg]]
[[file:/image.svg]]
[[file:./image.svg]]
[[/image.svg]]
[[./image.svg]]
# Check capitalization of extension
[[./image.SVG]]
# Check spaces in path
[[./image and stuff.SVG]]
[[/ssh:admin@test.example:important/file.svg]]
[[file:/ssh:admin@test.example:important/file.svg]]
# Check multiple parts in the path
[[file:/foo/bar/baz/image.svg]]

View File

@ -1,4 +0,0 @@
[toolchain]
channel = "nightly"
profile = "default"
components = ["clippy", "rustfmt"]

View File

@ -1 +0,0 @@

View File

@ -3,27 +3,18 @@ use std::path::PathBuf;
use include_dir::include_dir;
use include_dir::Dir;
use tokio::fs::DirEntry;
use tokio::task::JoinHandle;
use crate::config::Config;
use crate::context::DependencyManager;
use crate::context::RenderBlogPostPage;
use crate::context::RenderBlogPostPageInput;
use crate::context::RenderBlogStream;
use crate::context::RenderBlogStreamInput;
use crate::context::RenderContext;
use crate::context::RenderPage;
use crate::error::CustomError;
use crate::intermediate::get_web_path;
use crate::intermediate::BlogPost;
use crate::intermediate::IPage;
use crate::intermediate::PublishStatus;
use crate::render::DusterRenderer;
use crate::render::RendererIntegration;
use crate::walk_fs::walk_fs;
use crate::walk_fs::WalkAction;
use crate::walk_fs::WalkFsFilterResult;
use super::stylesheet::Stylesheet;
@ -33,7 +24,6 @@ pub(crate) struct SiteRenderer {
output_directory: PathBuf,
blog_posts: Vec<BlogPost>,
stylesheets: Vec<Stylesheet>,
pages: Vec<IPage>,
}
impl SiteRenderer {
@ -41,13 +31,11 @@ impl SiteRenderer {
output_directory: P,
blog_posts: Vec<BlogPost>,
stylesheets: Vec<Stylesheet>,
pages: Vec<IPage>,
) -> SiteRenderer {
SiteRenderer {
output_directory: output_directory.into(),
blog_posts,
stylesheets,
pages,
}
}
@ -82,83 +70,30 @@ impl SiteRenderer {
Ok(renderer_integration)
}
pub(crate) async fn render_pages(&self, config: &Config) -> Result<(), CustomError> {
let renderer_integration = self.init_renderer_integration()?;
for page in self.pages.iter().filter(|page| match page.natter_publish {
PublishStatus::Full => true,
PublishStatus::Unlisted => true,
PublishStatus::Unpublished => false,
}) {
let output_path = self.output_directory.join(page.get_output_path());
let dependency_manager =
std::sync::Arc::new(std::sync::Mutex::new(DependencyManager::new()));
let render_context = RenderContext::new(
config,
self.output_directory.as_path(),
output_path.as_path(),
None,
dependency_manager.clone(),
)?;
let dust_context = RenderPage::new(render_context.clone(), page)?;
let rendered_output = renderer_integration.render(dust_context)?;
let parent_directory = output_path
.parent()
.ok_or("Output file should have a containing directory.")?;
tokio::fs::create_dir_all(parent_directory).await?;
tokio::fs::write(&output_path, rendered_output).await?;
let dependencies = dependency_manager.lock().unwrap().take_dependencies();
for dependency in dependencies {
dependency.perform(render_context.clone()).await?;
}
}
Ok(())
}
pub(crate) async fn render_blog_posts(&self, config: &Config) -> Result<(), CustomError> {
let renderer_integration = self.init_renderer_integration()?;
for blog_post in self.blog_posts.iter().filter(|blog_post| {
match blog_post
.get_index_page()
.expect("Blog posts should have an index page.")
.natter_publish
{
PublishStatus::Full => true,
PublishStatus::Unlisted => true,
PublishStatus::Unpublished => false,
}
}) {
for blog_post in &self.blog_posts {
for blog_post_page in &blog_post.pages {
let output_path = self
.output_directory
.join(config.get_relative_path_to_post(&blog_post.id))
.join(blog_post_page.get_output_path());
let dependency_manager =
std::sync::Arc::new(std::sync::Mutex::new(DependencyManager::new()));
let convert_input = RenderBlogPostPageInput::new(blog_post, blog_post_page);
let render_context = RenderContext::new(
config,
self.output_directory.as_path(),
output_path.as_path(),
None,
dependency_manager.clone(),
)?;
let dust_context = RenderBlogPostPage::new(render_context.clone(), &convert_input)?;
let rendered_output = renderer_integration.render(dust_context)?;
let render_context = RenderBlogPostPage::new(render_context, &convert_input)?;
let rendered_output = renderer_integration.render(render_context)?;
let parent_directory = output_path
.parent()
.ok_or("Output file should have a containing directory.")?;
tokio::fs::create_dir_all(parent_directory).await?;
tokio::fs::write(&output_path, rendered_output).await?;
let dependencies = dependency_manager.lock().unwrap().take_dependencies();
for dependency in dependencies {
dependency.perform(render_context.clone()).await?;
}
tokio::fs::write(output_path, rendered_output).await?;
}
}
@ -170,21 +105,7 @@ impl SiteRenderer {
// Sort blog posts by date, newest first.
let sorted_blog_posts = {
let mut sorted_blog_posts: Vec<_> = self
.blog_posts
.iter()
.filter(|blog_post| {
match blog_post
.get_index_page()
.expect("Blog posts should have an index page.")
.natter_publish
{
PublishStatus::Full => true,
PublishStatus::Unlisted => false,
PublishStatus::Unpublished => false,
}
})
.collect();
let mut sorted_blog_posts: Vec<_> = self.blog_posts.iter().collect();
sorted_blog_posts
.sort_by_key(|blog_post| (blog_post.get_date(), blog_post.id.as_str()));
sorted_blog_posts.reverse();
@ -240,17 +161,14 @@ impl SiteRenderer {
)?)
};
let dependency_manager =
std::sync::Arc::new(std::sync::Mutex::new(DependencyManager::new()));
let convert_input = RenderBlogStreamInput::new(chunk, older_link, newer_link);
let render_context = RenderContext::new(
config,
self.output_directory.as_path(),
output_file.as_path(),
None,
dependency_manager.clone(),
)?;
let blog_stream = RenderBlogStream::new(render_context.clone(), &convert_input)?;
let blog_stream = RenderBlogStream::new(render_context, &convert_input)?;
// Pass each RenderBlogStream to dust as the context to render index.html and any additional stream pages.
let rendered_output = renderer_integration.render(blog_stream)?;
@ -258,12 +176,7 @@ impl SiteRenderer {
.parent()
.ok_or("Output file should have a containing directory.")?;
tokio::fs::create_dir_all(parent_directory).await?;
tokio::fs::write(&output_file, rendered_output).await?;
let dependencies = dependency_manager.lock().unwrap().take_dependencies();
for dependency in dependencies {
dependency.perform(render_context.clone()).await?;
}
tokio::fs::write(output_file, rendered_output).await?;
}
Ok(())
}
@ -283,27 +196,6 @@ impl SiteRenderer {
}
Ok(())
}
pub(crate) async fn copy_static_files(&self, config: &Config) -> Result<(), CustomError> {
let static_files_directory = config
.get_root_directory()
.join(config.get_relative_path_to_static_files());
if !static_files_directory.exists() {
return Ok(());
}
let static_files = get_all_files(&static_files_directory).await?;
for entry in static_files {
let (path, contents) = entry.await??;
let relative_path = path.strip_prefix(&static_files_directory)?;
let output_path = self.output_directory.join(relative_path);
let parent_directory = output_path
.parent()
.ok_or("Output file should have a containing directory.")?;
tokio::fs::create_dir_all(parent_directory).await?;
tokio::fs::write(output_path, contents).await?;
}
Ok(())
}
}
fn build_name_contents_pairs<'a>(
@ -318,32 +210,3 @@ fn build_name_contents_pairs<'a>(
let contents = std::str::from_utf8(entry.contents())?;
Ok((name, contents))
}
type ReadFileResult = std::io::Result<(PathBuf, Vec<u8>)>;
async fn filter_to_files(entry: &DirEntry) -> WalkFsFilterResult {
let file_type = entry.file_type().await?;
if file_type.is_dir() {
return Ok(WalkAction::Recurse);
}
if file_type.is_file() {
return Ok(WalkAction::HaltAndCapture);
}
unreachable!("Unhandled file type.");
}
async fn get_all_files<P: Into<PathBuf>>(
root_dir: P,
) -> Result<impl Iterator<Item = JoinHandle<ReadFileResult>>, CustomError> {
let files = walk_fs(root_dir, filter_to_files).await?;
let files_and_content = files
.into_iter()
.map(|entry| tokio::spawn(read_file(entry.path())));
Ok(files_and_content)
}
async fn read_file(path: PathBuf) -> ReadFileResult {
let contents = tokio::fs::read(&path).await?;
Ok((path, contents))
}

View File

@ -1,25 +1,14 @@
use std::ffi::OsStr;
use std::path::PathBuf;
use std::sync::Arc;
use std::sync::Mutex;
use super::stylesheet::Stylesheet;
use crate::cli::parameters::BuildArgs;
use crate::command::build::render::SiteRenderer;
use crate::config::Config;
use crate::error::CustomError;
use crate::intermediate::get_org_files;
use crate::intermediate::BlogPost;
use crate::intermediate::IPage;
use crate::intermediate::IntermediateContext;
use crate::intermediate::PageInput;
use crate::intermediate::Registry;
use crate::walk_fs::walk_fs;
use crate::walk_fs::WalkAction;
use crate::walk_fs::WalkFsFilterResult;
use include_dir::include_dir;
use include_dir::Dir;
use tokio::fs::DirEntry;
static DEFAULT_STYLESHEETS: Dir =
include_dir!("$CARGO_MANIFEST_DIR/default_environment/stylesheet");
@ -28,18 +17,14 @@ pub(crate) async fn build_site(args: BuildArgs) -> Result<(), CustomError> {
let config = Config::load_from_file(args.config).await?;
let blog_posts = load_blog_posts(&config).await?;
let stylesheets = load_stylesheets().await?;
let pages = load_pages(&config).await?;
let renderer = SiteRenderer::new(
get_output_directory(&config).await?,
blog_posts,
stylesheets,
pages,
);
renderer.render_blog_posts(&config).await?;
renderer.render_blog_stream(&config).await?;
renderer.render_pages(&config).await?;
renderer.render_stylesheets().await?;
renderer.copy_static_files(&config).await?;
Ok(())
}
@ -63,54 +48,24 @@ async fn get_output_directory(config: &Config) -> Result<PathBuf, CustomError> {
Ok(output_directory)
}
async fn filter_to_highest_folders_containing_org_files(entry: &DirEntry) -> WalkFsFilterResult {
let file_type = entry.file_type().await?;
if !file_type.is_dir() {
return Ok(WalkAction::Halt);
}
let mut entries = tokio::fs::read_dir(entry.path()).await?;
while let Some(entry) = entries.next_entry().await? {
let entry_type = entry.file_type().await?;
if !entry_type.is_file() {
continue;
}
match entry.path().extension().and_then(OsStr::to_str) {
Some(ext) if ext.eq_ignore_ascii_case("org") => {
return Ok(WalkAction::HaltAndCapture);
}
_ => {}
}
}
Ok(WalkAction::Recurse)
}
async fn get_post_directories(config: &Config) -> Result<Vec<PathBuf>, CustomError> {
if !config.get_posts_directory().exists() {
return Ok(Vec::new());
let mut ret = Vec::new();
let mut entries = tokio::fs::read_dir(config.get_posts_directory()).await?;
while let Some(entry) = entries.next_entry().await? {
let file_type = entry.file_type().await?;
if file_type.is_dir() {
ret.push(entry.path());
}
let top_level_org_folders = walk_fs(
config.get_posts_directory(),
filter_to_highest_folders_containing_org_files,
)
.await?;
Ok(top_level_org_folders
.into_iter()
.map(|entry| entry.path())
.collect())
}
Ok(ret)
}
async fn load_blog_posts(config: &Config) -> Result<Vec<BlogPost>, CustomError> {
let root_directory = config.get_root_directory().to_owned();
let posts_directory = config.get_posts_directory();
let post_directories = get_post_directories(config).await?;
let load_jobs = post_directories.into_iter().map(|path| {
tokio::spawn(BlogPost::load_blog_post(
root_directory.clone(),
posts_directory.clone(),
path,
))
});
let post_directories = get_post_directories(&config).await?;
let load_jobs = post_directories
.into_iter()
.map(|path| tokio::spawn(BlogPost::load_blog_post(root_directory.clone(), path)));
let mut blog_posts = Vec::new();
for job in load_jobs {
blog_posts.push(job.await??);
@ -118,62 +73,6 @@ async fn load_blog_posts(config: &Config) -> Result<Vec<BlogPost>, CustomError>
Ok(blog_posts)
}
async fn load_pages(config: &Config) -> Result<Vec<IPage>, CustomError> {
let pages_source = config
.get_root_directory()
.join(config.get_relative_path_to_pages());
if !pages_source.exists() {
return Ok(Vec::new());
}
let page_files = get_org_files(&pages_source).await?;
let org_files = {
let mut ret = Vec::new();
for page in page_files {
ret.push(page.await??);
}
ret
};
let parsed_org_files = {
let mut ret = Vec::new();
for (path, contents) in org_files.iter() {
let parsed = organic::parser::parse_file(contents.as_str(), Some(path))
.map_err(|_| CustomError::Static("Failed to parse org-mode document."))?;
ret.push((path, contents, parsed));
}
ret
};
let pages = {
let mut ret = Vec::new();
for (real_path, _contents, parsed_document) in parsed_org_files.iter() {
let mut registry = Registry::new();
// Assign IDs to the targets
organic::types::AstNode::from(parsed_document)
.iter_all_ast_nodes()
.for_each(|node| {
if let organic::types::AstNode::Target(target) = node {
registry.get_target(target.value);
}
});
let registry = Arc::new(Mutex::new(registry));
let intermediate_context = IntermediateContext::new(registry)?;
let relative_to_pages_dir_path = real_path.strip_prefix(&pages_source)?;
ret.push(
IPage::new(
intermediate_context,
PageInput::new(relative_to_pages_dir_path, real_path, parsed_document),
)
.await?,
);
}
ret
};
Ok(pages)
}
async fn load_stylesheets() -> Result<Vec<Stylesheet>, CustomError> {
let sources: Vec<_> = DEFAULT_STYLESHEETS
.files()

View File

@ -13,7 +13,7 @@ pub(crate) async fn init_natter_folder(args: InitArgs) -> Result<(), CustomError
let mut existing_entries = tokio::fs::read_dir(&args.path).await?;
let first_entry = existing_entries.next_entry().await?;
if first_entry.is_some() {
if let Some(_) = first_entry {
return Err("The directory is not empty. Aborting.".into());
}

View File

@ -47,7 +47,8 @@ impl Config {
}
pub(crate) fn get_root_directory(&self) -> &Path {
self.config_path
&self
.config_path
.parent()
.expect("Config file must exist inside a directory.")
}
@ -85,15 +86,8 @@ impl Config {
self.raw
.stream
.as_ref()
.and_then(|stream| stream.entries_per_page)
.map(|stream| stream.entries_per_page)
.flatten()
.unwrap_or(5)
}
pub(crate) fn get_relative_path_to_static_files(&self) -> PathBuf {
Path::new("static").into()
}
pub(crate) fn get_relative_path_to_pages(&self) -> PathBuf {
Path::new("pages").into()
}
}

View File

@ -2,7 +2,7 @@ use serde::Deserialize;
use serde::Serialize;
/// This is the struct for the natter.toml config file that ends up in each site's root directory.
#[derive(Debug, Deserialize, Serialize, Default)]
#[derive(Debug, Deserialize, Serialize)]
pub(crate) struct RawConfig {
pub(super) site_title: Option<String>,
author: Option<String>,
@ -12,7 +12,28 @@ pub(crate) struct RawConfig {
pub(super) stream: Option<RawConfigStream>,
}
#[derive(Debug, Deserialize, Serialize, Default)]
impl Default for RawConfig {
fn default() -> Self {
RawConfig {
site_title: None,
author: None,
email: None,
use_relative_paths: None,
web_root: None,
stream: None,
}
}
}
#[derive(Debug, Deserialize, Serialize)]
pub(crate) struct RawConfigStream {
pub(super) entries_per_page: Option<usize>,
}
impl Default for RawConfigStream {
fn default() -> Self {
RawConfigStream {
entries_per_page: None,
}
}
}

View File

@ -117,14 +117,14 @@ pub(crate) enum RenderAstNode {
}
pub(crate) trait IntoRenderAstNode {
fn as_render_ast_node(
fn into_render_ast_node(
&self,
render_context: RenderContext<'_>,
) -> Result<RenderAstNode, CustomError>;
}
impl IntoRenderAstNode for IAstNode {
fn as_render_ast_node(
fn into_render_ast_node(
&self,
render_context: RenderContext<'_>,
) -> Result<RenderAstNode, CustomError> {

View File

@ -1,9 +1,6 @@
use std::collections::HashSet;
use serde::Serialize;
use super::render_context::RenderContext;
use crate::context::macros::push_file;
use crate::error::CustomError;
use crate::intermediate::get_web_path;
use crate::intermediate::BlogPost;
@ -17,7 +14,6 @@ use super::RenderDocumentElement;
#[derive(Debug)]
pub(crate) struct RenderBlogPostPageInput<'a> {
#[allow(dead_code)]
post: &'a BlogPost,
page: &'a BlogPostPage,
}
@ -52,7 +48,27 @@ render!(
original,
render_context,
{
push_file!(render_context, &original.page.src, {
let css_files = vec![
get_web_path(
render_context.config,
render_context.output_root_directory,
render_context.output_file,
"stylesheet/reset.css",
)?,
get_web_path(
render_context.config,
render_context.output_root_directory,
render_context.output_file,
"stylesheet/main.css",
)?,
];
let js_files = vec![get_web_path(
render_context.config,
render_context.output_root_directory,
render_context.output_file,
"blog_post.js",
)?];
let global_settings = GlobalSettings::new(original.page.title.clone(), css_files, js_files);
let page_header = PageHeader::new(
render_context.config.get_site_title().map(str::to_string),
Some(get_web_path(
@ -61,12 +77,6 @@ render!(
render_context.output_file,
"",
)?),
Some(get_web_path(
render_context.config,
render_context.output_root_directory,
render_context.output_file,
"about_me",
)?),
);
let link_to_blog_post = get_web_path(
render_context.config,
@ -100,46 +110,6 @@ render!(
ret
};
let mut css_files = vec![
get_web_path(
render_context.config,
render_context.output_root_directory,
render_context.output_file,
"stylesheet/reset.css",
)?,
get_web_path(
render_context.config,
render_context.output_root_directory,
render_context.output_file,
"stylesheet/main.css",
)?,
];
let additional_css_files = render_context
.dependency_manager
.lock()
.unwrap()
.list_css()?
.map(|css_name| {
get_web_path(
render_context.config,
render_context.output_root_directory,
render_context.output_file,
format!("stylesheet/{}", css_name),
)
})
.collect::<Result<HashSet<_>, _>>()?;
css_files.extend(additional_css_files.into_iter());
let js_files = vec![get_web_path(
render_context.config,
render_context.output_root_directory,
render_context.output_file,
"blog_post.js",
)?];
let global_settings =
GlobalSettings::new(original.page.title.clone(), css_files, js_files);
let ret = RenderBlogPostPage {
global_settings,
page_header: Some(page_header),
@ -149,6 +119,5 @@ render!(
footnotes,
};
Ok(ret)
})
}
);

View File

@ -1,10 +1,7 @@
use std::collections::HashSet;
use serde::Serialize;
use super::macros::render;
use super::render_context::RenderContext;
use crate::context::macros::push_file;
use crate::context::RenderDocumentElement;
use crate::context::RenderRealFootnoteDefinition;
use crate::error::CustomError;
@ -51,6 +48,31 @@ render!(
original,
render_context,
{
let css_files = vec![
get_web_path(
render_context.config,
render_context.output_root_directory,
render_context.output_file,
"stylesheet/reset.css",
)?,
get_web_path(
render_context.config,
render_context.output_root_directory,
render_context.output_file,
"stylesheet/main.css",
)?,
];
let js_files = vec![get_web_path(
render_context.config,
render_context.output_root_directory,
render_context.output_file,
"blog_post.js",
)?];
let global_settings = GlobalSettings::new(
render_context.config.get_site_title().map(str::to_string),
css_files,
js_files,
);
let page_header = PageHeader::new(
render_context.config.get_site_title().map(str::to_string),
Some(get_web_path(
@ -59,17 +81,11 @@ render!(
render_context.output_file,
"",
)?),
Some(get_web_path(
render_context.config,
render_context.output_root_directory,
render_context.output_file,
"about_me",
)?),
);
let children = original
.original
.iter()
.into_iter()
.enumerate()
.map(|(i, blog_post)| {
RenderBlogStreamEntry::new(
@ -88,49 +104,6 @@ render!(
None
};
let mut css_files = vec![
get_web_path(
render_context.config,
render_context.output_root_directory,
render_context.output_file,
"stylesheet/reset.css",
)?,
get_web_path(
render_context.config,
render_context.output_root_directory,
render_context.output_file,
"stylesheet/main.css",
)?,
];
let additional_css_files = render_context
.dependency_manager
.lock()
.unwrap()
.list_css()?
.map(|css_name| {
get_web_path(
render_context.config,
render_context.output_root_directory,
render_context.output_file,
format!("stylesheet/{}", css_name),
)
})
.collect::<Result<HashSet<_>, _>>()?;
css_files.extend(additional_css_files.into_iter());
let js_files = vec![get_web_path(
render_context.config,
render_context.output_root_directory,
render_context.output_file,
"blog_post.js",
)?];
let global_settings = GlobalSettings::new(
render_context.config.get_site_title().map(str::to_string),
css_files,
js_files,
);
Ok(RenderBlogStream {
global_settings,
page_header: Some(page_header),
@ -191,7 +164,6 @@ render!(
.get_index_page()
.ok_or_else(|| format!("Blog post {} needs an index page.", original.original.id))?;
push_file!(render_context, &index_page.src, {
let title = index_page.title.clone();
let children = index_page
@ -219,7 +191,6 @@ render!(
children,
footnotes,
})
})
}
);

View File

@ -1,51 +0,0 @@
use std::path::PathBuf;
use crate::error::CustomError;
use super::RenderContext;
#[derive(Debug)]
pub(crate) enum Dependency {
StaticFile { absolute_path: PathBuf },
CssFile { name: String },
}
impl Dependency {
pub(crate) async fn perform(
&self,
render_context: RenderContext<'_>,
) -> Result<(), CustomError> {
match self {
Dependency::StaticFile { absolute_path } => {
let input_root_directory = render_context.config.get_root_directory();
let relative_path_to_file = absolute_path.strip_prefix(input_root_directory)?;
let path_to_output = render_context
.output_root_directory
.join(relative_path_to_file);
tokio::fs::create_dir_all(
path_to_output
.parent()
.ok_or("Output file should have a containing directory.")?,
)
.await?;
if tokio::fs::metadata(&path_to_output).await.is_ok() {
// TODO: compare hash and error out if they do not match.
println!(
"Not copying {} to {} because the output file already exists.",
absolute_path.display(),
path_to_output.display()
);
} else {
tokio::fs::copy(absolute_path, path_to_output).await?;
}
Ok(())
}
Dependency::CssFile { name: _ } => {
// We don't do anything because the CSS files are already copied into the output from natter's default environment.
// TODO: When we add support for CSS outside the default environment, we should add support for dynamically picking which ones to copy here.
Ok(())
}
}
}
}

View File

@ -1,84 +0,0 @@
use std::path::Path;
use std::path::PathBuf;
use crate::error::CustomError;
use super::dependency::Dependency;
pub(crate) type RefDependencyManager = std::sync::Arc<std::sync::Mutex<DependencyManager>>;
#[derive(Debug)]
pub(crate) struct DependencyManager {
/// A stack of paths for the files being visited.
///
/// The last entry is the current file being processed. This can be used for handling relative-path links.
file_stack: Vec<PathBuf>,
dependencies: Vec<Dependency>,
}
impl DependencyManager {
pub(crate) fn new() -> Self {
DependencyManager {
file_stack: Vec::new(),
dependencies: Vec::new(),
}
}
pub(crate) fn push_file<P>(&mut self, path: P) -> Result<(), CustomError>
where
P: Into<PathBuf>,
{
self.file_stack.push(path.into());
Ok(())
}
pub(crate) fn pop_file(&mut self) -> Result<(), CustomError> {
self.file_stack
.pop()
.expect("Popped more files off the dependency manager file stack than exist.");
Ok(())
}
pub(crate) fn get_current_folder(&self) -> Result<&Path, CustomError> {
Ok(self
.file_stack
.last()
.ok_or("No current file")?
.parent()
.ok_or("Current file was not in a directory")?)
}
pub(crate) fn mark_file_for_copying<P>(&mut self, path: P) -> Result<(), CustomError>
where
P: Into<PathBuf>,
{
self.dependencies.push(Dependency::StaticFile {
absolute_path: path.into(),
});
Ok(())
}
/// Return the dependencies and forget about them.
pub(crate) fn take_dependencies(&mut self) -> Vec<Dependency> {
let mut dependencies = Vec::new();
std::mem::swap(&mut self.dependencies, &mut dependencies);
dependencies
}
pub(crate) fn include_css<N>(&mut self, name: N) -> Result<(), CustomError>
where
std::string::String: From<N>,
{
self.dependencies
.push(Dependency::CssFile { name: name.into() });
Ok(())
}
pub(crate) fn list_css(&self) -> Result<impl Iterator<Item = &String>, CustomError> {
Ok(self.dependencies.iter().filter_map(|dep| match dep {
Dependency::CssFile { name } => Some(name),
_ => None,
}))
}
}

View File

@ -39,7 +39,7 @@ render!(
let contents = {
let mut ret = Vec::new();
for obj in original.contents.iter() {
ret.push(obj.as_render_ast_node(render_context.clone())?);
ret.push(obj.into_render_ast_node(render_context.clone())?);
}
ret
};

View File

@ -35,23 +35,3 @@ macro_rules! rnoop {
}
pub(crate) use rnoop;
/// Push a file onto the render DependencyManager's file stack while inside the code block.
macro_rules! push_file {
($render_context:ident, $path:expr, $body:tt) => {{
$render_context
.dependency_manager
.lock()
.unwrap()
.push_file($path)?;
let ret = (|| $body)();
$render_context
.dependency_manager
.lock()
.unwrap()
.pop_file()?;
ret
}};
}
pub(crate) use push_file;

View File

@ -11,8 +11,6 @@ mod clock;
mod code;
mod comment;
mod comment_block;
mod dependency;
mod dependency_manager;
mod diary_sexp;
mod document_element;
mod drawer;
@ -38,7 +36,6 @@ mod line_break;
mod macros;
mod object;
mod org_macro;
mod page;
mod page_header;
mod paragraph;
mod plain_link;
@ -62,7 +59,6 @@ mod subscript;
mod superscript;
mod table;
mod table_cell;
mod table_group;
mod table_row;
mod target;
mod timestamp;
@ -74,14 +70,12 @@ pub(crate) use blog_post_page::RenderBlogPostPage;
pub(crate) use blog_post_page::RenderBlogPostPageInput;
pub(crate) use blog_stream::RenderBlogStream;
pub(crate) use blog_stream::RenderBlogStreamInput;
pub(crate) use dependency_manager::DependencyManager;
pub(crate) use document_element::RenderDocumentElement;
pub(crate) use element::RenderElement;
pub(crate) use footnote_definition::RenderRealFootnoteDefinition;
pub(crate) use global_settings::GlobalSettings;
pub(crate) use heading::RenderHeading;
pub(crate) use object::RenderObject;
pub(crate) use page::RenderPage;
pub(crate) use page_header::PageHeader;
pub(crate) use render_context::RenderContext;
pub(crate) use section::RenderSection;

View File

@ -1,131 +0,0 @@
use std::collections::HashSet;
use super::footnote_definition::RenderRealFootnoteDefinition;
use super::macros::render;
use super::render_context::RenderContext;
use super::GlobalSettings;
use super::PageHeader;
use super::RenderDocumentElement;
use crate::context::macros::push_file;
use crate::error::CustomError;
use crate::intermediate::get_web_path;
use crate::intermediate::IPage;
use serde::Serialize;
#[derive(Debug, Serialize)]
#[serde(tag = "type")]
#[serde(rename = "page")]
pub(crate) struct RenderPage {
global_settings: GlobalSettings,
page_header: Option<PageHeader>,
/// The title that will be shown visibly on the page.
title: Option<String>,
self_link: Option<String>,
children: Vec<RenderDocumentElement>,
footnotes: Vec<RenderRealFootnoteDefinition>,
}
render!(RenderPage, IPage, original, render_context, {
push_file!(render_context, &original.src, {
let page_header = PageHeader::new(
render_context.config.get_site_title().map(str::to_string),
Some(get_web_path(
render_context.config,
render_context.output_root_directory,
render_context.output_file,
"",
)?),
Some(get_web_path(
render_context.config,
render_context.output_root_directory,
render_context.output_file,
"about_me",
)?),
);
let link_to_blog_post = get_web_path(
render_context.config,
render_context.output_root_directory,
render_context.output_file,
render_context
.output_file
.strip_prefix(render_context.output_root_directory)?,
)?;
let children = {
let mut children = Vec::new();
for child in original.children.iter() {
children.push(RenderDocumentElement::new(render_context.clone(), child)?);
}
children
};
let footnotes = {
let mut ret = Vec::new();
for footnote in original.footnotes.iter() {
ret.push(RenderRealFootnoteDefinition::new(
render_context.clone(),
footnote,
)?);
}
ret
};
let mut css_files = vec![
get_web_path(
render_context.config,
render_context.output_root_directory,
render_context.output_file,
"stylesheet/reset.css",
)?,
get_web_path(
render_context.config,
render_context.output_root_directory,
render_context.output_file,
"stylesheet/main.css",
)?,
];
let additional_css_files = render_context
.dependency_manager
.lock()
.unwrap()
.list_css()?
.map(|css_name| {
get_web_path(
render_context.config,
render_context.output_root_directory,
render_context.output_file,
format!("stylesheet/{}", css_name),
)
})
.collect::<Result<HashSet<_>, _>>()?;
css_files.extend(additional_css_files.into_iter());
let js_files = vec![get_web_path(
render_context.config,
render_context.output_root_directory,
render_context.output_file,
"blog_post.js",
)?];
let global_settings = GlobalSettings::new(original.title.clone(), css_files, js_files);
let ret = RenderPage {
global_settings,
page_header: Some(page_header),
title: original.title.clone(),
self_link: Some(link_to_blog_post),
children,
footnotes,
};
Ok(ret)
})
});

View File

@ -7,35 +7,13 @@ use serde::Serialize;
pub(crate) struct PageHeader {
website_title: Option<String>,
home_link: Option<String>,
nav_links: Vec<NavLink>,
}
/// A link in the top-right of the page.
#[derive(Debug, Serialize)]
#[serde(tag = "type")]
#[serde(rename = "nav_link")]
pub(crate) struct NavLink {
text: Option<String>,
url: Option<String>,
}
impl PageHeader {
pub(crate) fn new(
website_title: Option<String>,
home_link: Option<String>,
about_me_link: Option<String>,
) -> PageHeader {
pub(crate) fn new(website_title: Option<String>, home_link: Option<String>) -> PageHeader {
PageHeader {
website_title,
home_link,
nav_links: about_me_link
.map(|url| {
vec![NavLink {
text: Some("About Me".to_owned()),
url: Some(url),
}]
})
.unwrap_or_default(),
}
}
}

View File

@ -12,7 +12,6 @@ use super::RenderObject;
#[serde(rename = "paragraph")]
pub(crate) struct RenderParagraph {
children: Vec<RenderObject>,
is_single_image: bool,
post_blank: organic::types::PostBlank,
}
@ -27,7 +26,6 @@ render!(RenderParagraph, IParagraph, original, render_context, {
Ok(RenderParagraph {
children,
is_single_image: original.is_single_image(),
post_blank: original.post_blank,
})
});

View File

@ -1,21 +1,16 @@
use serde::Serialize;
use super::macros::render;
use super::render_context::RenderContext;
use crate::error::CustomError;
use crate::intermediate::IPlainLink;
use super::macros::rnoop;
#[derive(Debug, Serialize)]
#[serde(tag = "type")]
#[serde(rename = "plain_link")]
pub(crate) struct RenderPlainLink {
raw_link: String,
post_blank: organic::types::PostBlank,
}
render!(RenderPlainLink, IPlainLink, original, _render_context, {
Ok(RenderPlainLink {
raw_link: original.raw_link.clone(),
post_blank: original.post_blank,
})
});
rnoop!(RenderPlainLink, IPlainLink);

View File

@ -3,7 +3,6 @@ use serde::Serialize;
use super::render_context::RenderContext;
use crate::error::CustomError;
use crate::intermediate::IRegularLink;
use crate::intermediate::LinkTarget;
use super::macros::render;
use super::RenderObject;
@ -11,29 +10,12 @@ use super::RenderObject;
#[derive(Debug, Serialize)]
#[serde(tag = "type")]
#[serde(rename = "regular_link")]
pub(crate) enum RenderRegularLink {
#[serde(rename = "regular_link_anchor")]
Anchor(RenderRegularLinkAnchor),
#[serde(rename = "regular_link_image")]
Image(RenderRegularLinkImage),
}
#[derive(Debug, Serialize)]
pub(crate) struct RenderRegularLinkAnchor {
target: String,
pub(crate) struct RenderRegularLink {
raw_link: String,
children: Vec<RenderObject>,
post_blank: organic::types::PostBlank,
}
#[derive(Debug, Serialize)]
pub(crate) struct RenderRegularLinkImage {
src: String,
alt: String,
raw_link: String,
post_blank: organic::types::PostBlank,
}
render!(RenderRegularLink, IRegularLink, original, render_context, {
let children = {
let mut ret = Vec::new();
@ -43,27 +25,9 @@ render!(RenderRegularLink, IRegularLink, original, render_context, {
ret
};
let target = original
.target
.generate_final_target(render_context.clone())?
.unwrap_or_else(|| "".to_owned());
let render_link = match &original.target {
LinkTarget::Raw(_) | LinkTarget::Post { .. } | LinkTarget::Target { .. } => {
RenderRegularLink::Anchor(RenderRegularLinkAnchor {
target,
Ok(RenderRegularLink {
raw_link: original.raw_link.clone(),
children,
post_blank: original.post_blank,
})
}
LinkTarget::Image { alt, .. } => RenderRegularLink::Image(RenderRegularLinkImage {
src: target,
alt: alt.clone(),
raw_link: original.raw_link.clone(),
post_blank: original.post_blank,
}),
};
Ok(render_link)
});

View File

@ -3,12 +3,11 @@ use std::path::Path;
use crate::config::Config;
use crate::error::CustomError;
use super::dependency_manager::RefDependencyManager;
/// The supporting information used for converting the intermediate representation into the dust context for rendering.
#[derive(Debug, Clone)]
pub(crate) struct RenderContext<'intermediate> {
pub(crate) config: &'intermediate Config,
// TODO: Perhaps rename to output_root_directory.
pub(crate) output_root_directory: &'intermediate Path,
pub(crate) output_file: &'intermediate Path,
@ -18,13 +17,6 @@ pub(crate) struct RenderContext<'intermediate> {
/// IDs, for example, multiple blog posts with footnotes in a blog
/// stream.
pub(crate) id_addition: Option<&'intermediate str>,
/// Tracks dependencies from rendering Org document(s).
///
/// Examples of dependencies would be:
/// - Static files that need to be copied to the output folder
/// - Code blocks that need to be executed (for example, gnuplot graphs)
pub(crate) dependency_manager: RefDependencyManager,
}
impl<'intermediate> RenderContext<'intermediate> {
@ -33,14 +25,12 @@ impl<'intermediate> RenderContext<'intermediate> {
output_directory: &'intermediate Path,
output_file: &'intermediate Path,
id_addition: Option<&'intermediate str>,
dependency_manager: RefDependencyManager,
) -> Result<RenderContext<'intermediate>, CustomError> {
Ok(RenderContext {
config,
output_root_directory: output_directory,
output_file,
id_addition,
dependency_manager,
})
}
}

View File

@ -3,7 +3,6 @@ use serde::Serialize;
use super::render_context::RenderContext;
use crate::error::CustomError;
use crate::intermediate::ISrcBlock;
use crate::intermediate::ISrcSegment;
use super::macros::render;
@ -11,85 +10,15 @@ use super::macros::render;
#[serde(tag = "type")]
#[serde(rename = "src_block")]
pub(crate) struct RenderSrcBlock {
lines: Vec<RenderSrcLine>,
lines: Vec<String>,
language: Option<String>,
post_blank: organic::types::PostBlank,
}
#[derive(Debug, Serialize)]
pub(crate) struct RenderSrcLine {
children: Vec<RenderSrcSegment>,
}
#[derive(Debug, Serialize)]
#[serde(tag = "type")]
pub(crate) enum RenderSrcSegment {
#[serde(rename = "raw_text")]
RawText { content: String },
#[serde(rename = "highlight_start")]
HighlightStart { name: String },
#[serde(rename = "highlight_end")]
HighlightEnd,
}
render!(RenderSrcBlock, ISrcBlock, original, render_context, {
let lines = original
.lines
.iter()
.map(|original_line| {
let children = original_line
.children
.iter()
.map(|original_segment| match original_segment {
ISrcSegment::RawText(body) => RenderSrcSegment::RawText {
content: body.to_owned(),
},
ISrcSegment::HighlightStart { name } => RenderSrcSegment::HighlightStart {
name: css_safe_name(name),
},
ISrcSegment::HighlightEnd => RenderSrcSegment::HighlightEnd,
})
.collect();
RenderSrcLine { children }
})
.collect();
match original.language.as_deref() {
Some("bash") => {
render_context
.dependency_manager
.lock()
.unwrap()
.include_css("language_bash.css")?;
}
Some("nix") => {
render_context
.dependency_manager
.lock()
.unwrap()
.include_css("language_nix.css")?;
}
Some("python") => {
render_context
.dependency_manager
.lock()
.unwrap()
.include_css("language_python.css")?;
}
_ => {}
};
render!(RenderSrcBlock, ISrcBlock, original, _render_context, {
Ok(RenderSrcBlock {
lines,
lines: original.lines.clone(),
language: original.language.clone(),
post_blank: original.post_blank,
})
});
fn css_safe_name<S>(inp: S) -> String
where
std::string::String: From<S>,
{
let inp: String = inp.into();
inp.replace(".", "_")
}

View File

@ -1,10 +1,8 @@
use serde::Serialize;
use super::render_context::RenderContext;
use super::table_group::RenderTableGroup;
use crate::error::CustomError;
use crate::intermediate::ITable;
use crate::intermediate::ITableGroup;
use super::macros::render;
use super::table_row::RenderTableRow;
@ -13,29 +11,15 @@ use super::table_row::RenderTableRow;
#[serde(tag = "type")]
#[serde(rename = "table")]
pub(crate) struct RenderTable {
children: Vec<RenderTableGroup>,
children: Vec<RenderTableRow>,
post_blank: organic::types::PostBlank,
}
render!(RenderTable, ITable, original, render_context, {
let children = {
let mut ret = Vec::new();
for group in original.children.iter() {
let mut rows = Vec::new();
match group {
ITableGroup::Head(irows) => {
for obj in irows {
rows.push(RenderTableRow::new(render_context.clone(), obj)?);
}
ret.push(RenderTableGroup::Head { children: rows });
}
ITableGroup::Body(irows) => {
for obj in irows {
rows.push(RenderTableRow::new(render_context.clone(), obj)?);
}
ret.push(RenderTableGroup::Body { children: rows });
}
}
for obj in original.children.iter() {
ret.push(RenderTableRow::new(render_context.clone(), obj)?);
}
ret
};

View File

@ -1,12 +0,0 @@
use super::table_row::RenderTableRow;
use serde::Serialize;
#[derive(Debug, Serialize)]
#[serde(tag = "type")]
pub(crate) enum RenderTableGroup {
#[serde(rename = "head")]
Head { children: Vec<RenderTableRow> },
#[serde(rename = "body")]
Body { children: Vec<RenderTableRow> },
}

View File

@ -14,17 +14,9 @@ pub(crate) struct RenderTarget {
post_blank: organic::types::PostBlank,
}
render!(RenderTarget, ITarget, original, render_context, {
let id = format!(
"{}{}",
render_context
.id_addition
.map(|id_addition| format!("sec{}.", id_addition))
.unwrap_or_default(),
original.id
);
render!(RenderTarget, ITarget, original, _render_context, {
Ok(RenderTarget {
id,
id: original.id.clone(),
post_blank: original.post_blank,
})
});

View File

@ -1,21 +1,16 @@
use serde::Serialize;
use super::macros::render;
use super::render_context::RenderContext;
use crate::error::CustomError;
use crate::intermediate::ITimestamp;
use super::macros::rnoop;
#[derive(Debug, Serialize)]
#[serde(tag = "type")]
#[serde(rename = "timestamp")]
pub(crate) struct RenderTimestamp {
source: String,
post_blank: organic::types::PostBlank,
}
render!(RenderTimestamp, ITimestamp, original, _render_context, {
Ok(RenderTimestamp {
source: original.source.clone(),
post_blank: original.post_blank,
})
});
rnoop!(RenderTimestamp, ITimestamp);

View File

@ -3,19 +3,19 @@ use std::string::FromUtf8Error;
#[derive(Debug)]
pub(crate) enum CustomError {
Static(#[allow(dead_code)] &'static str),
String(#[allow(dead_code)] String),
IO(#[allow(dead_code)] std::io::Error),
TomlSerialize(#[allow(dead_code)] toml::ser::Error),
TomlDeserialize(#[allow(dead_code)] toml::de::Error),
Tokio(#[allow(dead_code)] tokio::task::JoinError),
Serde(#[allow(dead_code)] serde_json::Error),
Utf8(#[allow(dead_code)] Utf8Error),
FromUtf8(#[allow(dead_code)] FromUtf8Error),
DusterCompile(#[allow(dead_code)] duster::renderer::CompileError),
DusterRender(#[allow(dead_code)] duster::renderer::RenderError),
PathStripPrefix(#[allow(dead_code)] std::path::StripPrefixError),
UrlParseError(#[allow(dead_code)] url::ParseError),
Static(&'static str),
String(String),
IO(std::io::Error),
TomlSerialize(toml::ser::Error),
TomlDeserialize(toml::de::Error),
WalkDir(walkdir::Error),
Tokio(tokio::task::JoinError),
Serde(serde_json::Error),
Utf8(Utf8Error),
FromUtf8(FromUtf8Error),
DusterCompile(duster::renderer::CompileError),
DusterRender(duster::renderer::RenderError),
PathStripPrefix(std::path::StripPrefixError),
}
impl From<std::io::Error> for CustomError {
@ -48,6 +48,12 @@ impl From<toml::de::Error> for CustomError {
}
}
impl From<walkdir::Error> for CustomError {
fn from(value: walkdir::Error) -> Self {
CustomError::WalkDir(value)
}
}
impl From<tokio::task::JoinError> for CustomError {
fn from(value: tokio::task::JoinError) -> Self {
CustomError::Tokio(value)
@ -89,9 +95,3 @@ impl From<std::path::StripPrefixError> for CustomError {
CustomError::PathStripPrefix(value)
}
}
impl From<url::ParseError> for CustomError {
fn from(value: url::ParseError) -> Self {
CustomError::UrlParseError(value)
}
}

View File

@ -1,88 +0,0 @@
#[cfg(feature = "tracing")]
use opentelemetry_otlp::WithExportConfig;
#[cfg(feature = "tracing")]
use tracing::warn;
#[cfg(feature = "tracing")]
use tracing_subscriber::fmt;
#[cfg(feature = "tracing")]
use tracing_subscriber::prelude::__tracing_subscriber_SubscriberExt;
#[cfg(feature = "tracing")]
use tracing_subscriber::util::SubscriberInitExt;
const SERVICE_NAME: &str = "natter";
// Despite the obvious verbosity that fully-qualifying everything causes, in these functions I am fully-qualifying everything relating to tracing. This is because the tracing feature involves multiple libraries working together and so I think it is beneficial to see which libraries contribute which bits.
#[cfg(feature = "tracing")]
pub(crate) fn init_telemetry() -> Result<(), Box<dyn std::error::Error>> {
let log_to_console = fmt::layer();
let subscriber = tracing_subscriber::Registry::default();
let level_filter_layer = tracing_subscriber::EnvFilter::try_from_default_env()
.unwrap_or(tracing_subscriber::EnvFilter::new("WARN"));
// by default it will hit http://localhost:4317 with a gRPC payload
// TODO: I think the endpoint can be controlled by the OTEL_EXPORTER_OTLP_TRACES_ENDPOINT env variable instead of hard-coded into this code base. Regardless, I am the only developer right now so I am not too concerned.
let exporter = opentelemetry_otlp::new_exporter()
.tonic()
// Using "localhost" is broken inside the docker container when tracing
.with_endpoint("http://127.0.0.1:4317/v1/traces");
let tracer = opentelemetry_otlp::new_pipeline()
.tracing()
.with_exporter(exporter)
.with_trace_config(opentelemetry::sdk::trace::config().with_resource(
opentelemetry::sdk::Resource::new(vec![opentelemetry::KeyValue::new(
opentelemetry_semantic_conventions::resource::SERVICE_NAME,
SERVICE_NAME.to_string(),
)]),
))
// If I do install_batch then 1K+ spans will get orphaned off into their own trace and I get the error message "OpenTelemetry trace error occurred. cannot send message to batch processor as the channel is closed"
//
// If I do install_simple then it only creates 1 trace (which is good!) but my console gets spammed with this concerning log message that makes me think it might be dropping the extra spans on the floor: "OpenTelemetry trace error occurred. Exporter otlp encountered the following error(s): the grpc server returns error (Unknown error): , detailed error message: Service was not ready: transport error"
//
// I suspect it is related to this bug: https://github.com/open-telemetry/opentelemetry-rust/issues/888
//
// .install_simple()
.install_batch(opentelemetry::runtime::Tokio);
let tracing_layer = tracer.map(|tracer| tracing_opentelemetry::layer().with_tracer(tracer));
opentelemetry::global::set_text_map_propagator(
opentelemetry::sdk::propagation::TraceContextPropagator::new(),
);
match tracing_layer {
Ok(tracing_layer) => {
subscriber
.with(level_filter_layer)
.with(tracing_layer)
.with(log_to_console)
.try_init()?;
}
Err(e) => {
subscriber
.with(level_filter_layer)
.with(fmt::layer())
.try_init()?;
warn!("Failed initialize OpenTelemetry tracing: {}", e);
}
};
Ok(())
}
#[cfg(feature = "tracing")]
pub(crate) fn shutdown_telemetry() -> Result<(), Box<dyn std::error::Error>> {
opentelemetry::global::shutdown_tracer_provider();
Ok(())
}
#[cfg(not(feature = "tracing"))]
pub(crate) fn init_telemetry() -> Result<(), Box<dyn std::error::Error>> {
Ok(())
}
#[cfg(not(feature = "tracing"))]
pub(crate) fn shutdown_telemetry() -> Result<(), Box<dyn std::error::Error>> {
Ok(())
}

View File

@ -113,14 +113,14 @@ pub(crate) enum IAstNode {
}
pub(crate) trait IntoIAstNode<'parse> {
fn as_ast_node<'orig>(
fn into_ast_node<'orig>(
&'orig self,
intermediate_context: IntermediateContext<'orig, 'parse>,
) -> BoxFuture<'orig, Result<IAstNode, CustomError>>;
}
impl<'parse> IntoIAstNode<'parse> for organic::types::DocumentElement<'parse> {
fn as_ast_node<'orig>(
fn into_ast_node<'orig>(
&'orig self,
intermediate_context: IntermediateContext<'orig, 'parse>,
) -> BoxFuture<'orig, Result<IAstNode, CustomError>> {
@ -139,7 +139,7 @@ impl<'parse> IntoIAstNode<'parse> for organic::types::DocumentElement<'parse> {
}
impl<'parse> IntoIAstNode<'parse> for organic::types::Element<'parse> {
fn as_ast_node<'orig>(
fn into_ast_node<'orig>(
&'orig self,
intermediate_context: IntermediateContext<'orig, 'parse>,
) -> BoxFuture<'orig, Result<IAstNode, CustomError>> {
@ -226,7 +226,7 @@ impl<'parse> IntoIAstNode<'parse> for organic::types::Element<'parse> {
}
impl<'parse> IntoIAstNode<'parse> for organic::types::Object<'parse> {
fn as_ast_node<'orig>(
fn into_ast_node<'orig>(
&'orig self,
intermediate_context: IntermediateContext<'orig, 'parse>,
) -> BoxFuture<'orig, Result<IAstNode, CustomError>> {

View File

@ -3,16 +3,13 @@ use std::path::PathBuf;
use std::sync::Arc;
use std::sync::Mutex;
use tokio::fs::DirEntry;
use tokio::task::JoinHandle;
use walkdir::WalkDir;
use crate::error::CustomError;
use crate::intermediate::blog_post_page::BlogPostPageInput;
use crate::intermediate::page::BlogPostPageInput;
use crate::intermediate::registry::Registry;
use crate::intermediate::IntermediateContext;
use crate::walk_fs::walk_fs;
use crate::walk_fs::WalkAction;
use crate::walk_fs::WalkFsFilterResult;
use super::BlogPostPage;
@ -23,29 +20,23 @@ pub(crate) struct BlogPost {
}
impl BlogPost {
pub(crate) async fn load_blog_post<P: AsRef<Path>, R: AsRef<Path>, S: AsRef<Path>>(
pub(crate) async fn load_blog_post<P: AsRef<Path>, R: AsRef<Path>>(
root_dir: R,
posts_dir: S,
post_dir: P,
) -> Result<BlogPost, CustomError> {
async fn inner(
_root_dir: &Path,
posts_dir: &Path,
post_dir: &Path,
) -> Result<BlogPost, CustomError> {
let post_id = post_dir.strip_prefix(posts_dir)?.as_os_str();
async fn inner(_root_dir: &Path, post_dir: &Path) -> Result<BlogPost, CustomError> {
let post_id = post_dir
.file_name()
.expect("The post directory should have a name.");
// Load all the *.org files under the post directory from disk into memory
let org_files = {
let mut ret = Vec::new();
let org_files_iter = get_org_files(post_dir).await?;
let org_files_iter = get_org_files(post_dir)?;
for entry in org_files_iter {
ret.push(entry.await??);
}
ret
};
// Parse all the *.org files
let parsed_org_files = {
let mut ret = Vec::new();
for (path, contents) in org_files.iter() {
@ -64,10 +55,11 @@ impl BlogPost {
// Assign IDs to the targets
organic::types::AstNode::from(parsed_document)
.iter_all_ast_nodes()
.for_each(|node| {
if let organic::types::AstNode::Target(target) = node {
.for_each(|node| match node {
organic::types::AstNode::Target(target) => {
registry.get_target(target.value);
}
_ => {}
});
let registry = Arc::new(Mutex::new(registry));
@ -76,11 +68,7 @@ impl BlogPost {
ret.push(
BlogPostPage::new(
intermediate_context,
BlogPostPageInput::new(
relative_to_post_dir_path,
real_path,
parsed_document,
),
BlogPostPageInput::new(relative_to_post_dir_path, parsed_document),
)
.await?,
);
@ -93,7 +81,7 @@ impl BlogPost {
pages,
})
}
inner(root_dir.as_ref(), posts_dir.as_ref(), post_dir.as_ref()).await
inner(root_dir.as_ref(), post_dir.as_ref()).await
}
/// Get the date for a blog post.
@ -108,14 +96,15 @@ impl BlogPost {
pub(crate) fn get_date(&self) -> Option<&str> {
let index_page_date = self
.get_index_page()
.and_then(|index_page| index_page.date.as_deref());
.map(|index_page| index_page.date.as_ref().map(String::as_str))
.flatten();
if index_page_date.is_some() {
return index_page_date;
}
self.pages
.iter()
.filter_map(|page| page.date.as_deref())
.filter_map(|page| page.date.as_ref().map(String::as_str))
.next()
}
@ -132,33 +121,25 @@ async fn read_file(path: PathBuf) -> std::io::Result<(PathBuf, String)> {
Ok((path, contents))
}
pub(crate) async fn get_org_files<P: Into<PathBuf>>(
fn get_org_files<P: AsRef<Path>>(
root_dir: P,
) -> Result<impl Iterator<Item = JoinHandle<std::io::Result<(PathBuf, String)>>>, CustomError> {
let org_files = walk_fs(root_dir, filter_to_org_files).await?;
) -> Result<impl Iterator<Item = JoinHandle<std::io::Result<(PathBuf, String)>>>, walkdir::Error> {
let org_files = WalkDir::new(root_dir)
.into_iter()
.filter(|e| match e {
Ok(dir_entry) => {
dir_entry.file_type().is_file()
&& Path::new(dir_entry.file_name())
.extension()
.map(|ext| ext.to_ascii_lowercase() == "org")
.unwrap_or(false)
}
Err(_) => true,
})
.collect::<Result<Vec<_>, _>>()?;
let org_files = org_files
.into_iter()
.map(|entry| entry.path())
.map(walkdir::DirEntry::into_path)
.map(|path| tokio::spawn(read_file(path)));
Ok(org_files)
}
async fn filter_to_org_files(entry: &DirEntry) -> WalkFsFilterResult {
let file_type = entry.file_type().await?;
if file_type.is_dir() {
return Ok(WalkAction::Recurse);
}
if file_type.is_file() {
if entry
.path()
.extension()
.map(|ext| ext.eq_ignore_ascii_case("org"))
.unwrap_or(false)
{
return Ok(WalkAction::HaltAndCapture);
}
return Ok(WalkAction::Halt);
}
unreachable!("Unhandled file type.");
}

View File

@ -1,164 +0,0 @@
use std::path::PathBuf;
use crate::error::CustomError;
use super::footnote_definition::IRealFootnoteDefinition;
use super::macros::intermediate;
use super::IDocumentElement;
use super::IHeading;
use super::ISection;
#[derive(Debug)]
pub(crate) struct BlogPostPageInput<'b, 'parse> {
/// Relative path from the root of the blog post.
path: PathBuf,
/// The path to the .org source for the file.
src: PathBuf,
document: &'b organic::types::Document<'parse>,
}
impl<'b, 'parse> BlogPostPageInput<'b, 'parse> {
pub(crate) fn new<P: Into<PathBuf>, S: Into<PathBuf>>(
path: P,
src: S,
document: &'b organic::types::Document<'parse>,
) -> BlogPostPageInput<'b, 'parse> {
BlogPostPageInput {
path: path.into(),
src: src.into(),
document,
}
}
}
#[derive(Debug)]
pub(crate) struct BlogPostPage {
/// Relative path from the root of the blog post.
pub(crate) path: PathBuf,
/// The path to the .org source for the file.
pub(crate) src: PathBuf,
pub(crate) title: Option<String>,
pub(crate) date: Option<String>,
pub(crate) children: Vec<IDocumentElement>,
pub(crate) footnotes: Vec<IRealFootnoteDefinition>,
pub(crate) natter_publish: PublishStatus,
}
#[derive(Debug, Default)]
pub(crate) enum PublishStatus {
#[default]
Full,
Unlisted,
Unpublished,
}
intermediate!(
BlogPostPage,
BlogPostPageInput<'orig, 'parse>,
original,
intermediate_context,
{
let mut children = Vec::new();
if let Some(section) = original.document.zeroth_section.as_ref() {
children.push(IDocumentElement::Section(
ISection::new(intermediate_context.clone(), section).await?,
));
}
for heading in original.document.children.iter() {
children.push(IDocumentElement::Heading(
IHeading::new(intermediate_context.clone(), heading).await?,
));
}
let footnotes = {
let footnote_definitions: Vec<_> = {
let registry = intermediate_context.registry.lock().unwrap();
let ret = registry
.get_footnote_ids()
.map(|(id, def)| (id, def.clone()))
.collect();
ret
};
let mut ret = Vec::new();
for (id, def) in footnote_definitions.into_iter() {
ret.push(
IRealFootnoteDefinition::new(intermediate_context.clone(), id, def).await?,
);
}
ret
};
Ok(BlogPostPage {
path: original.path,
src: original.src,
title: get_title(original.document),
date: get_date(original.document),
children,
footnotes,
natter_publish: get_publish_status(original.document).unwrap_or_default(),
})
}
);
impl BlogPostPage {
/// Get the output path relative to the post directory.
pub(crate) fn get_output_path(&self) -> PathBuf {
let mut ret = self.path.clone();
ret.set_extension("html");
ret
}
}
pub(crate) fn get_title(document: &organic::types::Document<'_>) -> Option<String> {
organic::types::AstNode::from(document)
.iter_all_ast_nodes()
.filter_map(|node| match node {
organic::types::AstNode::Keyword(kw) if kw.key.eq_ignore_ascii_case("title") => {
Some(kw)
}
_ => None,
})
.last()
.map(|kw| kw.value.to_owned())
}
pub(crate) fn get_date(document: &organic::types::Document<'_>) -> Option<String> {
organic::types::AstNode::from(document)
.iter_all_ast_nodes()
.filter_map(|node| match node {
organic::types::AstNode::Keyword(kw) if kw.key.eq_ignore_ascii_case("date") => Some(kw),
_ => None,
})
.last()
.map(|kw| kw.value.to_owned())
}
pub(crate) fn get_publish_status(document: &organic::types::Document<'_>) -> Option<PublishStatus> {
let publish_string = organic::types::AstNode::from(document)
.iter_all_ast_nodes()
.filter_map(|node| match node {
organic::types::AstNode::Keyword(kw)
if kw.key.eq_ignore_ascii_case("natter_publish") =>
{
Some(kw)
}
_ => None,
})
.last()
.map(|kw| kw.value);
match publish_string {
Some("full") => Some(PublishStatus::Full),
Some("unlisted") => Some(PublishStatus::Unlisted),
Some("unpublished") => Some(PublishStatus::Unpublished),
Some(status) => panic!("Unrecognized publish status: {}", status),
None => None,
}
}

View File

@ -21,7 +21,7 @@ pub(crate) fn get_web_path<D: AsRef<Path>, F: AsRef<Path>, P: AsRef<Path>>(
containing_file_relative_to_output_directory
.parent()
.ok_or("File should exist in a folder.")?,
path_from_web_root.parent().unwrap_or(Path::new("")),
path_from_web_root.parent().unwrap_or(&Path::new("")),
)
.collect::<PathBuf>();
// Subtracting 1 from the depth to "remove" the file name.

View File

@ -32,7 +32,6 @@ pub(crate) struct IRealFootnoteDefinition {
}
impl IRealFootnoteDefinition {
#[allow(clippy::needless_lifetimes)]
pub(crate) async fn new<'orig, 'parse>(
_intermediate_context: IntermediateContext<'orig, 'parse>,
footnote_id: usize,
@ -54,7 +53,7 @@ impl IRealFootnoteDefinition {
pub(crate) fn get_reference_id(&self, id_addition: Option<&str>) -> String {
let id_addition = id_addition
.map(|id_addition| format!("sec{}.", id_addition))
.unwrap_or_default();
.unwrap_or(String::default());
format!("{}fnr.{}", id_addition, self.get_display_label())
}
@ -65,7 +64,7 @@ impl IRealFootnoteDefinition {
pub(crate) fn get_definition_id(&self, id_addition: Option<&str>) -> String {
let id_addition = id_addition
.map(|id_addition| format!("sec{}.", id_addition))
.unwrap_or_default();
.unwrap_or(String::default());
format!("{}fn.{}", id_addition, self.get_display_label())
}

View File

@ -38,7 +38,7 @@ impl IFootnoteReference {
pub(crate) fn get_reference_id(&self, id_addition: Option<&str>) -> String {
let id_addition = id_addition
.map(|id_addition| format!("sec{}.", id_addition))
.unwrap_or_default();
.unwrap_or(String::default());
if self.duplicate_offset == 0 {
format!("{}fnr.{}", id_addition, self.get_display_label())
@ -55,7 +55,7 @@ impl IFootnoteReference {
pub(crate) fn get_definition_id(&self, id_addition: Option<&str>) -> String {
let id_addition = id_addition
.map(|id_addition| format!("sec{}.", id_addition))
.unwrap_or_default();
.unwrap_or(String::default());
format!("{}fn.{}", id_addition, self.get_display_label())
}

View File

@ -16,7 +16,7 @@ intermediate!(
{
let value: String = if original.value.starts_with("$$") && original.value.ends_with("$$") {
format!("\\[{}\\]", &original.value[2..(original.value.len() - 2)])
} else if original.value.starts_with('$') && original.value.ends_with('$') {
} else if original.value.starts_with("$") && original.value.ends_with("$") {
format!("\\({}\\)", &original.value[1..(original.value.len() - 1)])
} else {
original.value.to_owned()

View File

@ -9,7 +9,6 @@ macro_rules! inoop {
}
impl $istruct {
#[allow(clippy::extra_unused_lifetimes)]
pub(crate) async fn new<'reg, 'orig, 'parse>(
_intermediate_context: crate::intermediate::IntermediateContext<'orig, 'parse>,
original: &'orig organic::types::$pstruct<'parse>,

View File

@ -2,7 +2,6 @@ mod angle_link;
mod ast_node;
mod babel_call;
mod blog_post;
mod blog_post_page;
mod bold;
mod center_block;
mod citation;
@ -60,7 +59,6 @@ mod subscript;
mod superscript;
mod table;
mod table_cell;
mod table_group;
mod table_row;
mod target;
mod timestamp;
@ -71,10 +69,7 @@ mod verse_block;
pub(crate) use angle_link::IAngleLink;
pub(crate) use ast_node::IAstNode;
pub(crate) use babel_call::IBabelCall;
pub(crate) use blog_post::get_org_files;
pub(crate) use blog_post::BlogPost;
pub(crate) use blog_post_page::BlogPostPage;
pub(crate) use blog_post_page::PublishStatus;
pub(crate) use bold::IBold;
pub(crate) use center_block::ICenterBlock;
pub(crate) use citation::ICitation;
@ -109,8 +104,7 @@ pub(crate) use latex_fragment::ILatexFragment;
pub(crate) use line_break::ILineBreak;
pub(crate) use object::IObject;
pub(crate) use org_macro::IOrgMacro;
pub(crate) use page::IPage;
pub(crate) use page::PageInput;
pub(crate) use page::BlogPostPage;
pub(crate) use paragraph::IParagraph;
pub(crate) use plain_link::IPlainLink;
pub(crate) use plain_list::IPlainList;
@ -122,20 +116,16 @@ pub(crate) use property_drawer::IPropertyDrawer;
pub(crate) use quote_block::IQuoteBlock;
pub(crate) use radio_link::IRadioLink;
pub(crate) use radio_target::IRadioTarget;
pub(crate) use registry::Registry;
pub(crate) use regular_link::IRegularLink;
pub(crate) use regular_link::LinkTarget;
pub(crate) use section::ISection;
pub(crate) use special_block::ISpecialBlock;
pub(crate) use src_block::ISrcBlock;
pub(crate) use src_block::ISrcSegment;
pub(crate) use statistics_cookie::IStatisticsCookie;
pub(crate) use strike_through::IStrikeThrough;
pub(crate) use subscript::ISubscript;
pub(crate) use superscript::ISuperscript;
pub(crate) use table::ITable;
pub(crate) use table_cell::ITableCell;
pub(crate) use table_group::ITableGroup;
pub(crate) use table_row::ITableRow;
pub(crate) use target::ITarget;
pub(crate) use timestamp::ITimestamp;

View File

@ -1,38 +1,49 @@
use super::blog_post_page::get_date;
use super::blog_post_page::get_publish_status;
use super::blog_post_page::get_title;
use std::path::PathBuf;
use crate::error::CustomError;
use super::footnote_definition::IRealFootnoteDefinition;
use super::macros::intermediate;
use super::IDocumentElement;
use super::IHeading;
use super::ISection;
use super::PublishStatus;
use crate::error::CustomError;
use std::path::PathBuf;
#[derive(Debug)]
pub(crate) struct IPage {
/// Relative path from the root of the pages directory.
pub(crate) path: PathBuf,
pub(crate) struct BlogPostPageInput<'b, 'parse> {
path: PathBuf,
document: &'b organic::types::Document<'parse>,
}
/// The path to the .org source for the file.
pub(crate) src: PathBuf,
impl<'b, 'parse> BlogPostPageInput<'b, 'parse> {
pub(crate) fn new<P: Into<PathBuf>>(
path: P,
document: &'b organic::types::Document<'parse>,
) -> BlogPostPageInput<'b, 'parse> {
BlogPostPageInput {
path: path.into(),
document,
}
}
}
#[derive(Debug)]
pub(crate) struct BlogPostPage {
/// Relative path from the root of the blog post.
pub(crate) path: PathBuf,
pub(crate) title: Option<String>,
#[allow(dead_code)]
pub(crate) date: Option<String>,
pub(crate) children: Vec<IDocumentElement>,
pub(crate) footnotes: Vec<IRealFootnoteDefinition>,
pub(crate) natter_publish: PublishStatus,
}
intermediate!(
IPage,
PageInput<'orig, 'parse>,
BlogPostPage,
BlogPostPageInput<'orig, 'parse>,
original,
intermediate_context,
{
@ -66,20 +77,18 @@ intermediate!(
ret
};
Ok(IPage {
Ok(BlogPostPage {
path: original.path,
src: original.src,
title: get_title(original.document),
date: get_date(original.document),
children,
footnotes,
natter_publish: get_publish_status(original.document).unwrap_or_default(),
})
}
);
impl IPage {
/// Get the output path relative to the pages directory.
impl BlogPostPage {
/// Get the output path relative to the post directory.
pub(crate) fn get_output_path(&self) -> PathBuf {
let mut ret = self.path.clone();
ret.set_extension("html");
@ -87,26 +96,26 @@ impl IPage {
}
}
#[derive(Debug)]
pub(crate) struct PageInput<'b, 'parse> {
/// Relative path from the root of the page.
path: PathBuf,
/// The path to the .org source for the file.
src: PathBuf,
document: &'b organic::types::Document<'parse>,
fn get_title(document: &organic::types::Document<'_>) -> Option<String> {
organic::types::AstNode::from(document)
.iter_all_ast_nodes()
.filter_map(|node| match node {
organic::types::AstNode::Keyword(kw) if kw.key.eq_ignore_ascii_case("title") => {
Some(kw)
}
_ => None,
})
.last()
.map(|kw| kw.value.to_owned())
}
impl<'b, 'parse> PageInput<'b, 'parse> {
pub(crate) fn new<P: Into<PathBuf>, S: Into<PathBuf>>(
path: P,
src: S,
document: &'b organic::types::Document<'parse>,
) -> PageInput<'b, 'parse> {
PageInput {
path: path.into(),
src: src.into(),
document,
}
}
fn get_date(document: &organic::types::Document<'_>) -> Option<String> {
organic::types::AstNode::from(document)
.iter_all_ast_nodes()
.filter_map(|node| match node {
organic::types::AstNode::Keyword(kw) if kw.key.eq_ignore_ascii_case("date") => Some(kw),
_ => None,
})
.last()
.map(|kw| kw.value.to_owned())
}

View File

@ -31,7 +31,6 @@ intermediate!(
);
impl IParagraph {
#[allow(clippy::needless_lifetimes)]
pub(crate) async fn artificial<'orig, 'parse>(
_intermediate_context: crate::intermediate::IntermediateContext<'orig, 'parse>,
children: Vec<IObject>,
@ -42,32 +41,4 @@ impl IParagraph {
post_blank,
})
}
/// Checks if the paragraph contains nothing but a single image.
///
/// When this happens, we want to center the image.
pub(crate) fn is_single_image(&self) -> bool {
let num_images = self
.children
.iter()
.filter(|c| match c {
IObject::RegularLink(iregular_link) => matches!(
&iregular_link.target,
super::LinkTarget::Image { src: _, alt: _ }
),
_ => false,
})
.count();
num_images == 1
&& self.children.iter().all(|c| match c {
IObject::RegularLink(iregular_link) => matches!(
&iregular_link.target,
super::LinkTarget::Image { src: _, alt: _ }
),
IObject::PlainText(iplain_text) => {
iplain_text.source.chars().all(|c| c.is_ascii_whitespace())
}
_ => false,
})
}
}

View File

@ -1,22 +1,5 @@
use super::macros::intermediate;
use super::macros::inoop;
use crate::error::CustomError;
use organic::types::StandardProperties;
#[derive(Debug, Clone)]
pub(crate) struct IPlainLink {
pub(crate) raw_link: String,
pub(crate) post_blank: organic::types::PostBlank,
}
intermediate!(
IPlainLink,
&'orig organic::types::PlainLink<'parse>,
original,
_intermediate_context,
{
Ok(IPlainLink {
raw_link: original.raw_link.to_owned(),
post_blank: original.get_post_blank(),
})
}
);
inoop!(IPlainLink, PlainLink);

View File

@ -29,11 +29,11 @@ intermediate!(
let mut ret = Vec::new();
// Special case for list items with only paragraphs and sublists as their children. In those cases, the paragraph tags are omitted.
let is_simple_list_item = original.children.iter().all(|child| {
matches!(
child,
organic::types::Element::Paragraph(_) | organic::types::Element::PlainList(_)
)
let is_simple_list_item = original.children.iter().all(|child| match child {
organic::types::Element::Paragraph(_) | organic::types::Element::PlainList(_) => {
true
}
_ => false,
});
if is_simple_list_item {
for elem in original.children.iter() {

View File

@ -14,7 +14,7 @@ type IdCounter = u16;
#[derive(Debug)]
pub(crate) struct Registry<'orig, 'parse> {
id_counter: IdCounter,
targets: HashMap<String, String>,
targets: HashMap<&'parse str, String>,
footnote_ids: Vec<(Option<&'parse str>, Vec<IAstNode>)>,
footnote_reference_counts: HashMap<&'parse str, usize>,
on_deck_footnote_ids: HashMap<&'parse str, &'orig Vec<Element<'parse>>>,
@ -31,8 +31,8 @@ impl<'orig, 'parse> Registry<'orig, 'parse> {
}
}
pub(crate) fn get_target<S: Into<String>>(&mut self, body: S) -> &String {
self.targets.entry(body.into()).or_insert_with(|| {
pub(crate) fn get_target<'reg>(&'reg mut self, body: &'parse str) -> &'reg String {
self.targets.entry(body).or_insert_with(|| {
self.id_counter += 1;
format!("target_{}", self.id_counter)
})
@ -52,9 +52,9 @@ impl<'orig, 'parse> Registry<'orig, 'parse> {
pub(crate) async fn get_footnote_reference_id<'orig, 'parse>(
intermediate_context: IntermediateContext<'orig, 'parse>,
label: Option<&'parse str>,
definition: &'orig [Object<'parse>],
definition: &'orig Vec<Object<'parse>>,
) -> Result<(usize, usize), CustomError> {
if label.is_none() {
if let None = label {
// If it has no label then it must always get a new ID.
let contents = convert_reference_contents(intermediate_context.clone(), definition).await?;
let pos = {
@ -148,7 +148,7 @@ pub(crate) async fn register_footnote_definition<'orig, 'parse>(
async fn convert_reference_contents<'orig, 'parse>(
intermediate_context: IntermediateContext<'orig, 'parse>,
contents: &'orig [Object<'parse>],
contents: &'orig Vec<Object<'parse>>,
) -> Result<Vec<IAstNode>, CustomError> {
let children = {
let mut ret = Vec::new();
@ -159,19 +159,23 @@ async fn convert_reference_contents<'orig, 'parse>(
};
let containing_paragraph =
IParagraph::artificial(intermediate_context.clone(), children, 0).await?;
let contents = vec![IAstNode::Paragraph(containing_paragraph)];
let contents = {
let mut ret = Vec::new();
ret.push(IAstNode::Paragraph(containing_paragraph));
ret
};
Ok(contents)
}
async fn convert_definition_contents<'orig, 'parse>(
intermediate_context: IntermediateContext<'orig, 'parse>,
contents: &'orig [Element<'parse>],
contents: &'orig Vec<Element<'parse>>,
) -> Result<Vec<IAstNode>, CustomError> {
let contents = {
let mut ret = Vec::new();
for obj in contents.iter() {
ret.push(obj.as_ast_node(intermediate_context.clone()).await?);
ret.push(obj.into_ast_node(intermediate_context.clone()).await?);
}
ret
};
@ -180,14 +184,14 @@ async fn convert_definition_contents<'orig, 'parse>(
}
/// Take a footnote definition that has not yet received a reference and move it into the active footnotes.
#[allow(clippy::needless_lifetimes)]
pub(crate) async fn promote_footnote_definition<'orig, 'parse>(
intermediate_context: IntermediateContext<'orig, 'parse>,
label: &'parse str,
) -> Result<(), CustomError> {
let definition = {
let mut registry = intermediate_context.registry.lock().unwrap();
registry.on_deck_footnote_ids.remove(label)
let definition = registry.on_deck_footnote_ids.remove(label);
definition
};
if let Some(elements) = definition {
let existing_id = {

View File

@ -1,23 +1,14 @@
use std::borrow::Cow;
use std::path::Path;
use organic::types::LinkType;
use organic::types::StandardProperties;
use url::Url;
use super::get_web_path;
use super::macros::intermediate;
use super::IntermediateContext;
use super::IObject;
use crate::context::RenderContext;
use crate::error::CustomError;
#[derive(Debug, Clone)]
pub(crate) struct IRegularLink {
pub(crate) raw_link: String,
pub(crate) children: Vec<IObject>,
pub(crate) target: LinkTarget,
pub(crate) post_blank: organic::types::PostBlank,
}
@ -34,279 +25,10 @@ intermediate!(
}
ret
};
let raw_link = original.get_raw_link();
let target = LinkTarget::from_string(
intermediate_context.clone(),
raw_link.clone().into_owned(),
&original.link_type,
)?;
Ok(IRegularLink {
raw_link: raw_link.into_owned(),
raw_link: original.get_raw_link().into_owned(),
children,
target,
post_blank: original.get_post_blank(),
})
}
);
#[derive(Debug, Clone, PartialEq)]
pub(crate) enum LinkTarget {
Raw(String),
Post {
post_id: Option<String>,
subpath: String,
},
Target {
target_id: String,
},
Image {
src: String,
alt: String,
},
}
impl LinkTarget {
pub(crate) fn from_string(
intermediate_context: IntermediateContext<'_, '_>,
input: String,
link_type: &LinkType<'_>,
) -> Result<LinkTarget, CustomError> {
// If link type is file and the path ends in .svg then make it an image target
if let LinkType::File = link_type
&& (input.to_ascii_lowercase().ends_with(".svg")
|| input.to_ascii_lowercase().ends_with(".png"))
{
let src = Self::get_image_src(&input)?;
let alt = Self::get_image_alt(&input)?;
return Ok(LinkTarget::Image { src, alt });
};
let parsed = Url::parse(&input);
if let Err(url::ParseError::RelativeUrlWithoutBase) = parsed {
let target_id = {
let mut registry = intermediate_context.registry.lock().unwrap();
let target_id = registry.get_target(input).to_owned();
target_id
};
return Ok(LinkTarget::Target { target_id });
}
let parsed = parsed?;
match parsed.scheme() {
"post" => {
let post_id = parsed.host_str().map(str::to_owned);
let subpath = {
let subpath = parsed.path();
if let Some(subpath) = subpath.strip_prefix('/') {
subpath
} else {
subpath
}
};
Ok(LinkTarget::Post {
post_id,
subpath: subpath.to_owned(),
})
}
_ => Ok(LinkTarget::Raw(input.to_owned())),
}
}
pub(crate) fn generate_final_target(
&self,
render_context: RenderContext<'_>,
) -> Result<Option<String>, CustomError> {
match self {
LinkTarget::Raw(raw_link) => Ok(Some(raw_link.clone())),
LinkTarget::Post { post_id, subpath } => {
let path = post_id
.as_ref()
.map(|post_id| {
let path_to_post = render_context
.config
.get_relative_path_to_post(post_id)
.join(subpath);
get_web_path(
render_context.config,
render_context.output_root_directory,
render_context.output_file,
path_to_post,
)
})
.map_or(Ok(None), |r| r.map(Some))?;
Ok(path)
}
LinkTarget::Target { target_id } => Ok(Some(format!(
"#{}{}",
render_context
.id_addition
.map(|id_addition| format!("sec{}.", id_addition))
.unwrap_or_default(),
target_id
))),
LinkTarget::Image { src, .. } => {
let path_to_file = render_context
.dependency_manager
.lock()
.unwrap()
.get_current_folder()?
.join(src)
.canonicalize()?;
let input_root_directory = render_context.config.get_root_directory();
let relative_path_to_file = path_to_file.strip_prefix(input_root_directory)?;
let web_path = get_web_path(
render_context.config,
render_context.output_root_directory,
render_context.output_file,
relative_path_to_file,
)?;
render_context
.dependency_manager
.lock()
.unwrap()
.mark_file_for_copying(path_to_file)?;
Ok(Some(web_path))
}
}
}
/// Get the value for the src attribute of the image.
fn get_image_src(input: &str) -> Result<String, CustomError> {
let input = if input.to_ascii_lowercase().starts_with("file:") {
Cow::Borrowed(&input[5..])
} else {
Cow::Borrowed(input)
};
let path = Path::new(input.as_ref());
if input.to_ascii_lowercase().starts_with("/ssh:") {
return Ok(format!("file:/{}", input));
}
if path.is_absolute() {
return Ok(format!("file://{}", input));
}
Ok(input.into_owned())
}
/// Get file name from the last segment of an image path.
fn get_image_alt(input: &str) -> Result<String, CustomError> {
let input = if input.to_ascii_lowercase().starts_with("file:") {
Cow::Borrowed(&input[5..])
} else {
Cow::Borrowed(input)
};
let path = Path::new(input.as_ref());
match path
.components()
.next_back()
.ok_or("Images should have at least one component in their path.")?
{
std::path::Component::Prefix(_) => {
// Prefix components only occur on windows
panic!("Prefix components are not supporterd.")
}
std::path::Component::RootDir
| std::path::Component::CurDir
| std::path::Component::ParentDir => {
Err("Final component of an image path should be a normal component.".into())
}
std::path::Component::Normal(file_name) => Ok(file_name
.to_str()
.ok_or("Image link was not valid utf-8.")?
.to_owned()),
}
}
}
#[cfg(test)]
mod tests {
use std::borrow::Cow;
use std::sync::Arc;
use std::sync::Mutex;
use crate::intermediate::Registry;
use super::*;
#[test]
fn link_target_raw() -> Result<(), CustomError> {
let registry = Registry::new();
let registry = Arc::new(Mutex::new(registry));
let intermediate_context = IntermediateContext::new(registry)?;
#[allow(clippy::single_element_loop)]
for (inp, typ) in [(
"https://test.example/foo",
LinkType::Protocol(Cow::from("https")),
)] {
assert_eq!(
LinkTarget::from_string(intermediate_context.clone(), inp.to_owned(), &typ)?,
LinkTarget::Raw(inp.to_owned())
);
}
Ok(())
}
#[test]
fn link_target_image() -> Result<(), CustomError> {
let registry = Registry::new();
let registry = Arc::new(Mutex::new(registry));
let intermediate_context = IntermediateContext::new(registry)?;
for (inp, typ, expected_src, expected_alt) in [
("file:image.svg", LinkType::File, "image.svg", "image.svg"),
(
"file:/image.svg",
LinkType::File,
"file:///image.svg",
"image.svg",
),
(
"file:./image.svg",
LinkType::File,
"./image.svg",
"image.svg",
),
(
"/image.svg",
LinkType::File,
"file:///image.svg",
"image.svg",
),
("./image.svg", LinkType::File, "./image.svg", "image.svg"),
("./image.SVG", LinkType::File, "./image.SVG", "image.SVG"),
(
"./image and stuff.SVG",
LinkType::File,
"./image and stuff.SVG",
"image and stuff.SVG",
),
(
"/ssh:admin@test.example:important/file.svg",
LinkType::File,
"file://ssh:admin@test.example:important/file.svg",
"file.svg",
),
(
"file:/ssh:admin@test.example:important/file.svg",
LinkType::File,
"file://ssh:admin@test.example:important/file.svg",
"file.svg",
),
(
"file:/foo/bar/baz/image.svg",
LinkType::File,
"file:///foo/bar/baz/image.svg",
"image.svg",
),
] {
assert_eq!(
LinkTarget::from_string(intermediate_context.clone(), inp.to_owned(), &typ)?,
LinkTarget::Image {
src: expected_src.to_owned(),
alt: expected_alt.to_owned()
}
);
}
Ok(())
}
}

View File

@ -1,31 +1,14 @@
use std::borrow::Borrow;
use super::macros::intermediate;
use crate::error::CustomError;
use organic::types::StandardProperties;
use tree_sitter_highlight::HighlightConfiguration;
use tree_sitter_highlight::HighlightEvent;
use tree_sitter_highlight::Highlighter;
#[derive(Debug, Clone)]
pub(crate) struct ISrcBlock {
pub(crate) lines: Vec<ISrcLine>,
pub(crate) lines: Vec<String>,
pub(crate) language: Option<String>,
pub(crate) post_blank: organic::types::PostBlank,
}
#[derive(Debug, Clone)]
pub(crate) struct ISrcLine {
pub(crate) children: Vec<ISrcSegment>,
}
#[derive(Debug, Clone)]
pub(crate) enum ISrcSegment {
RawText(String),
HighlightStart { name: String },
HighlightEnd,
}
intermediate!(
ISrcBlock,
&'orig organic::types::SrcBlock<'parse>,
@ -76,66 +59,14 @@ intermediate!(
})
.collect();
let language = original.language.map(str::to_owned);
match language.as_deref() {
Some(lang @ "bash") => {
let highlighted = highlight_bash(&lines);
if let Ok(highlighted) = highlighted {
return Ok(ISrcBlock {
lines: highlighted,
language,
post_blank: original.get_post_blank(),
});
} else {
println!("Warning: Failed to highlight {} source.", lang);
}
}
Some(lang @ "nix") => {
let highlighted = highlight_nix(&lines);
if let Ok(highlighted) = highlighted {
return Ok(ISrcBlock {
lines: highlighted,
language,
post_blank: original.get_post_blank(),
});
} else {
println!("Warning: Failed to highlight {} source.", lang);
}
}
Some(lang @ "python") => {
let highlighted = highlight_python(&lines);
if let Ok(highlighted) = highlighted {
return Ok(ISrcBlock {
lines: highlighted,
language,
post_blank: original.get_post_blank(),
});
} else {
println!("Warning: Failed to highlight {} source.", lang);
}
}
Some(lang) => {
println!("Warning: No highlighting for language: {}", lang);
}
_ => {}
};
let highlighted = highlight_plain(&lines)?;
Ok(ISrcBlock {
lines: highlighted,
lines,
language,
post_blank: original.get_post_blank(),
})
}
);
impl ISrcLine {
pub(crate) fn new() -> ISrcLine {
ISrcLine {
children: Vec::new(),
}
}
}
fn ascii_whitespace_value(c: char) -> usize {
match c {
' ' => 1,
@ -145,161 +76,3 @@ fn ascii_whitespace_value(c: char) -> usize {
_ => unreachable!("Only ascii whitespace can reach this code."),
}
}
fn highlight_plain<L>(lines: &[L]) -> Result<Vec<ISrcLine>, CustomError>
where
std::string::String: for<'a> From<&'a L>,
{
Ok(lines
.iter()
.map(|l| {
let mut line = ISrcLine::new();
line.children.push(ISrcSegment::RawText(l.into()));
line
})
.collect())
}
fn highlight_tree_sitter<L>(
config: HighlightConfiguration,
highlight_names: &[&str],
lines: &[L],
) -> Result<Vec<ISrcLine>, CustomError>
where
L: Borrow<str>,
{
let combined_text = lines.join("");
// Need 1 highlighter per thread
let mut highlighter = Highlighter::new();
let highlights = highlighter
.highlight(&config, combined_text.as_bytes(), None, |_| None)
.unwrap();
let mut highlighted_text: Vec<ISrcLine> = Vec::with_capacity(lines.len());
let mut current_line = ISrcLine::new();
let mut highlight_stack: Vec<&str> = Vec::new();
for event in highlights {
match event.unwrap() {
HighlightEvent::Source { start, end } => {
let mut span = &combined_text[start..end];
while let Some(line_break_index) = span.find('\n') {
let first_line = &span[..(line_break_index + 1)];
current_line
.children
.push(ISrcSegment::RawText(first_line.to_owned()));
current_line.children.extend(
highlight_stack
.iter()
.map(|_name| ISrcSegment::HighlightEnd),
);
highlighted_text.push(current_line);
current_line = ISrcLine::new();
current_line
.children
.extend(
highlight_stack
.iter()
.map(|name| ISrcSegment::HighlightStart {
name: (*name).into(),
}),
);
span = &span[(line_break_index + 1)..];
}
if !span.is_empty() {
current_line
.children
.push(ISrcSegment::RawText(span.to_owned()));
}
}
HighlightEvent::HighlightStart(s) => {
highlight_stack.push(highlight_names[s.0]);
current_line.children.push(ISrcSegment::HighlightStart {
name: highlight_names[s.0].into(),
});
}
HighlightEvent::HighlightEnd => {
highlight_stack.pop();
current_line.children.push(ISrcSegment::HighlightEnd);
}
}
}
debug_assert!(highlight_stack.is_empty());
Ok(highlighted_text)
}
fn highlight_bash<L>(lines: &[L]) -> Result<Vec<ISrcLine>, CustomError>
where
L: Borrow<str>,
{
let highlight_names = ["comment", "function", "keyword", "property", "string"];
let language = tree_sitter_bash::LANGUAGE.into();
let mut config =
HighlightConfiguration::new(language, "bash", tree_sitter_bash::HIGHLIGHT_QUERY, "", "")
.unwrap();
config.configure(&highlight_names);
highlight_tree_sitter(config, &highlight_names, lines)
}
fn highlight_nix<L>(lines: &[L]) -> Result<Vec<ISrcLine>, CustomError>
where
L: Borrow<str>,
{
let highlight_names = [
"comment",
"keyword",
"property",
"string",
"string.special.path",
// "string.special.uri",
];
let language = tree_sitter_nix::LANGUAGE.into();
let mut config =
HighlightConfiguration::new(language, "nix", tree_sitter_nix::HIGHLIGHTS_QUERY, "", "")
.unwrap();
config.configure(&highlight_names);
highlight_tree_sitter(config, &highlight_names, lines)
}
fn highlight_python<L>(lines: &[L]) -> Result<Vec<ISrcLine>, CustomError>
where
L: Borrow<str>,
{
let highlight_names = [
"comment",
"function.builtin",
"keyword",
"property",
"string",
"type",
"variable",
];
let language = tree_sitter_python::LANGUAGE.into();
let mut config = HighlightConfiguration::new(
language,
"python",
tree_sitter_python::HIGHLIGHTS_QUERY,
"",
"",
)
.unwrap();
config.configure(&highlight_names);
highlight_tree_sitter(config, &highlight_names, lines)
}
// use tree_sitter::Parser;
// fn dump_nix<B>(body: B) -> Result<(), CustomError>
// where
// B: AsRef<str>,
// {
// let mut parser = Parser::new();
// parser
// .set_language(&tree_sitter_nix::LANGUAGE.into())
// .expect("Error loading Nix grammar");
// let mut tree = parser.parse(body.as_ref(), None).unwrap();
// println!("{}", tree.root_node());
// Ok(())
// }

View File

@ -1,12 +1,11 @@
use super::macros::intermediate;
use super::table_row::ITableRow;
use crate::error::CustomError;
use crate::intermediate::table_group::ITableGroup;
use organic::types::StandardProperties;
#[derive(Debug, Clone)]
pub(crate) struct ITable {
pub(crate) children: Vec<ITableGroup>,
pub(crate) children: Vec<ITableRow>,
pub(crate) post_blank: organic::types::PostBlank,
}
@ -16,40 +15,10 @@ intermediate!(
original,
intermediate_context,
{
// Separate groups by lines, multiple contiguous lines are the same as one.
// If there is only one group, it is a tbody.
// If there are more than one group, the first is thead and the rest are tbody.
let sections = group_into_sections(&original.children);
let children = if sections.len() == 1 {
// If there is only one section, then it is a body.
let children = {
let mut ret = Vec::new();
for group in sections.into_iter() {
let mut rows = Vec::new();
for obj in group.into_iter() {
rows.push(ITableRow::new(intermediate_context.clone(), obj).await?)
}
ret.push(ITableGroup::Body(rows));
}
ret
} else {
// If there are more than one section, the first is a head and the rest are body.
let mut ret = Vec::new();
let mut sections = sections.into_iter();
if let Some(group) = sections.next() {
let mut rows = Vec::new();
for obj in group.into_iter() {
rows.push(ITableRow::new(intermediate_context.clone(), obj).await?)
}
ret.push(ITableGroup::Head(rows));
}
for group in sections {
let mut rows = Vec::new();
for obj in group.into_iter() {
rows.push(ITableRow::new(intermediate_context.clone(), obj).await?)
}
ret.push(ITableGroup::Body(rows));
for obj in original.children.iter() {
ret.push(ITableRow::new(intermediate_context.clone(), obj).await?);
}
ret
};
@ -60,41 +29,3 @@ intermediate!(
})
}
);
enum GroupIntoSectionsState<'orig, 'parse> {
NonSection,
Section(Vec<&'orig organic::types::TableRow<'parse>>),
}
fn group_into_sections<'orig, 'parse>(
rows: &'orig [organic::types::TableRow<'parse>],
) -> Vec<Vec<&'orig organic::types::TableRow<'parse>>> {
let mut sections = Vec::new();
let mut rows = rows.iter();
let mut state = GroupIntoSectionsState::NonSection;
loop {
state = match (state, rows.next()) {
(GroupIntoSectionsState::NonSection, None) => break,
(GroupIntoSectionsState::NonSection, Some(row)) if row.children.is_empty() => {
GroupIntoSectionsState::NonSection
}
(GroupIntoSectionsState::NonSection, Some(row)) => {
GroupIntoSectionsState::Section(vec![row])
}
(GroupIntoSectionsState::Section(section), None) => {
sections.push(section);
break;
}
(GroupIntoSectionsState::Section(section), Some(row)) if row.children.is_empty() => {
sections.push(section);
GroupIntoSectionsState::NonSection
}
(GroupIntoSectionsState::Section(mut section), Some(row)) => {
section.push(row);
GroupIntoSectionsState::Section(section)
}
}
}
sections
}

View File

@ -1,7 +0,0 @@
use super::ITableRow;
#[derive(Debug, Clone)]
pub(crate) enum ITableGroup {
Head(Vec<ITableRow>),
Body(Vec<ITableRow>),
}

View File

@ -5,7 +5,6 @@ use organic::types::StandardProperties;
#[derive(Debug, Clone)]
pub(crate) struct ITarget {
pub(crate) id: String,
#[allow(dead_code)]
value: String,
pub(crate) post_blank: organic::types::PostBlank,
}

View File

@ -1,23 +1,5 @@
use super::macros::intermediate;
use super::util::coalesce_whitespace;
use super::macros::inoop;
use crate::error::CustomError;
use organic::types::StandardProperties;
#[derive(Debug, Clone)]
pub(crate) struct ITimestamp {
pub(crate) source: String,
pub(crate) post_blank: organic::types::PostBlank,
}
intermediate!(
ITimestamp,
&'orig organic::types::Timestamp<'parse>,
original,
_intermediate_context,
{
Ok(ITimestamp {
source: coalesce_whitespace(original.source).into_owned(),
post_blank: original.get_post_blank(),
})
}
);
inoop!(ITimestamp, Timestamp);

View File

@ -8,25 +8,23 @@ use self::cli::parameters::Commands;
use self::command::build::build_site;
use self::command::init::init_natter_folder;
use self::error::CustomError;
use self::init_tracing::init_telemetry;
use self::init_tracing::shutdown_telemetry;
mod cli;
mod command;
mod config;
mod context;
mod error;
mod init_tracing;
mod intermediate;
mod render;
mod walk_fs;
fn main() -> Result<ExitCode, CustomError> {
let rt = tokio::runtime::Runtime::new()?;
rt.block_on(async { main_body().await })
rt.block_on(async {
let main_body_result = main_body().await;
main_body_result
})
}
async fn main_body() -> Result<ExitCode, CustomError> {
init_telemetry().expect("Telemetry should initialize successfully.");
let args = Cli::parse();
match args.command {
Commands::Init(args) => {
@ -36,6 +34,5 @@ async fn main_body() -> Result<ExitCode, CustomError> {
build_site(args).await?;
}
};
shutdown_telemetry().expect("Telemetry should shutdown successfully.");
Ok(ExitCode::SUCCESS)
}

View File

@ -19,7 +19,7 @@ impl<'a> DusterRenderer<'a> {
impl<'a> RendererIntegration<'a> for DusterRenderer<'a> {
fn load_template(&mut self, name: &'a str, contents: &'a str) -> Result<(), CustomError> {
let compiled_template = duster::renderer::compile_template(contents)?;
let compiled_template = duster::renderer::compile_template(contents.as_ref())?;
self.templates.insert(name, compiled_template);
Ok(())
}

View File

@ -1,53 +0,0 @@
use std::collections::VecDeque;
use std::ops::AsyncFn;
use std::path::PathBuf;
use tokio::fs::DirEntry;
use crate::error::CustomError;
pub(crate) type WalkFsFilterResult = Result<WalkAction, CustomError>;
pub(crate) async fn walk_fs<P: Into<PathBuf>, F: AsyncFn(&DirEntry) -> WalkFsFilterResult>(
root: P,
filter: F,
) -> Result<Vec<DirEntry>, CustomError> {
let mut ret = Vec::new();
let mut backlog = VecDeque::new();
backlog.push_back(root.into());
while let Some(p) = backlog.pop_front() {
let mut entries = tokio::fs::read_dir(p).await?;
while let Some(entry) = entries.next_entry().await? {
let action = filter(&entry).await?;
match action {
WalkAction::HaltAndCapture => {
ret.push(entry);
}
WalkAction::Halt => {}
WalkAction::RecurseAndCapture => {
backlog.push_back(entry.path());
ret.push(entry);
}
WalkAction::Recurse => {
backlog.push_back(entry.path());
}
};
}
}
Ok(ret)
}
pub(crate) enum WalkAction {
/// Do not walk down this path but add it to the return list.
HaltAndCapture,
/// Do not walk down this path and do not add it to the return list.
Halt,
/// Walk down this path and add it to the return list.
#[allow(dead_code)]
RecurseAndCapture,
/// Walk down this path but do not add it to the return list.
Recurse,
}