Merge branch 'staging-next' into staging
This commit is contained in:
commit
0dc59c1fbb
256
.github/workflows/labels.yml
vendored
256
.github/workflows/labels.yml
vendored
@ -6,16 +6,23 @@
|
||||
name: "Label PR"
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '37 * * * *'
|
||||
workflow_call:
|
||||
workflow_run:
|
||||
workflows:
|
||||
- Review dismissed
|
||||
- Review submitted
|
||||
types: [completed]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
updatedWithin:
|
||||
description: 'Updated within [hours]'
|
||||
type: number
|
||||
required: false
|
||||
default: 0 # everything since last run
|
||||
|
||||
concurrency:
|
||||
group: labels-${{ github.workflow }}-${{ github.event_name }}-${{ github.event.pull_request.number || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
# This explicitly avoids using `run_id` for the concurrency key to make sure that only
|
||||
# *one* non-PR run can run at a time.
|
||||
group: labels-${{ github.workflow }}-${{ github.event_name }}-${{ github.event.pull_request.number }}
|
||||
# PR- and manually-triggered runs will be cancelled, but scheduled runs will be queued.
|
||||
cancel-in-progress: ${{ github.event_name != 'schedule' }}
|
||||
|
||||
permissions:
|
||||
issues: write # needed to create *new* labels
|
||||
@ -31,114 +38,169 @@ jobs:
|
||||
runs-on: ubuntu-24.04-arm
|
||||
if: "!contains(github.event.pull_request.title, '[skip treewide]')"
|
||||
steps:
|
||||
- uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
|
||||
id: eval
|
||||
with:
|
||||
script: |
|
||||
const run_id = (await github.rest.actions.listWorkflowRuns({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
workflow_id: 'eval.yml',
|
||||
event: 'pull_request_target',
|
||||
head_sha: context.payload.pull_request?.head.sha ?? context.payload.workflow_run.head_sha
|
||||
})).data.workflow_runs[0]?.id
|
||||
core.setOutput('run-id', run_id)
|
||||
- name: Install dependencies
|
||||
run: npm install @actions/artifact
|
||||
|
||||
- name: Download the comparison results
|
||||
if: steps.eval.outputs.run-id
|
||||
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
|
||||
with:
|
||||
run-id: ${{ steps.eval.outputs.run-id }}
|
||||
github-token: ${{ github.token }}
|
||||
pattern: comparison
|
||||
path: comparison
|
||||
merge-multiple: true
|
||||
|
||||
- name: Labels from eval
|
||||
if: steps.eval.outputs.run-id && github.event_name != 'pull_request'
|
||||
- name: Labels from API data and Eval results
|
||||
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
|
||||
env:
|
||||
UPDATED_WITHIN: ${{ inputs.updatedWithin }}
|
||||
with:
|
||||
script: |
|
||||
const path = require('node:path')
|
||||
const { DefaultArtifactClient } = require('@actions/artifact')
|
||||
const { readFile } = require('node:fs/promises')
|
||||
|
||||
let pull_requests
|
||||
if (context.payload.workflow_run) {
|
||||
// PRs from forks don't have any PRs associated by default.
|
||||
// Thus, we request the PR number with an API call *to* the fork's repo.
|
||||
// Multiple pull requests can be open from the same head commit, either via
|
||||
// different base branches or head branches.
|
||||
const { head_repository, head_sha, repository } = context.payload.workflow_run
|
||||
pull_requests = (await github.paginate(github.rest.repos.listPullRequestsAssociatedWithCommit, {
|
||||
owner: head_repository.owner.login,
|
||||
repo: head_repository.name,
|
||||
commit_sha: head_sha
|
||||
})).filter(pull_request => pull_request.base.repo.id == repository.id)
|
||||
} else {
|
||||
pull_requests = [ context.payload.pull_request ]
|
||||
const artifactClient = new DefaultArtifactClient()
|
||||
|
||||
if (process.env.UPDATED_WITHIN && !/^\d+$/.test(process.env.UPDATED_WITHIN))
|
||||
throw new Error('Please enter "updated within" as integer in hours.')
|
||||
|
||||
const cutoff = new Date(await (async () => {
|
||||
// Always run for Pull Request triggers, no cutoff since there will be a single
|
||||
// response only anyway. 0 is the Unix epoch, so always smaller.
|
||||
if (context.payload.pull_request?.number) return 0
|
||||
|
||||
// Manually triggered via UI when updatedWithin is set. Will fallthrough to the last
|
||||
// option if the updatedWithin parameter is set to 0, which is the default.
|
||||
const updatedWithin = Number.parseInt(process.env.UPDATED_WITHIN, 10)
|
||||
if (updatedWithin) return new Date().getTime() - updatedWithin * 60 * 60 * 1000
|
||||
|
||||
// Normally a scheduled run, but could be workflow_dispatch, see above. Go back as far
|
||||
// as the last successful run of this workflow to make sure we are not leaving anyone
|
||||
// behind on GHA failures.
|
||||
// Defaults to go back 1 hour on the first run.
|
||||
return (await github.rest.actions.listWorkflowRuns({
|
||||
...context.repo,
|
||||
workflow_id: 'labels.yml',
|
||||
event: 'schedule',
|
||||
status: 'success',
|
||||
exclude_pull_requests: true
|
||||
})).data.workflow_runs[0]?.created_at ?? new Date().getTime() - 1 * 60 * 60 * 1000
|
||||
})())
|
||||
core.info('cutoff timestamp: ' + cutoff.toISOString())
|
||||
|
||||
// To simplify this action's logic we fetch the pull_request data again below, even if
|
||||
// we are already in a pull_request event's context and would have the data readily
|
||||
// available. We do this by filtering the list of pull requests with head and base
|
||||
// branch - there can only be a single open Pull Request for any such combination.
|
||||
const prEventCondition = !context.payload.pull_request ? undefined : {
|
||||
// "label" is in the format of `user:branch` or `org:branch`
|
||||
head: context.payload.pull_request.head.label,
|
||||
base: context.payload.pull_request.base.ref
|
||||
}
|
||||
|
||||
await Promise.all(
|
||||
pull_requests.map(async (pull_request) => {
|
||||
const pr = {
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: pull_request.number
|
||||
}
|
||||
await github.paginate(
|
||||
github.rest.pulls.list,
|
||||
{
|
||||
...context.repo,
|
||||
state: 'open',
|
||||
sort: 'updated',
|
||||
direction: 'desc',
|
||||
...prEventCondition
|
||||
},
|
||||
async (response, done) => (await Promise.allSettled(response.data.map(async (pull_request) => {
|
||||
try {
|
||||
const log = (k,v) => core.info(`PR #${pull_request.number} - ${k}: ${v}`)
|
||||
|
||||
// Get all currently set labels that we manage
|
||||
const before =
|
||||
(await github.paginate(github.rest.issues.listLabelsOnIssue, pr))
|
||||
.map(({ name }) => name)
|
||||
.filter(name =>
|
||||
name.startsWith('10.rebuild') ||
|
||||
name == '11.by: package-maintainer' ||
|
||||
name.startsWith('12.approvals:') ||
|
||||
name == '12.approved-by: package-maintainer'
|
||||
log('Last updated at', pull_request.updated_at)
|
||||
if (new Date(pull_request.updated_at) < cutoff) return done()
|
||||
|
||||
const run_id = (await github.rest.actions.listWorkflowRuns({
|
||||
...context.repo,
|
||||
workflow_id: 'eval.yml',
|
||||
event: 'pull_request_target',
|
||||
// For PR events, the workflow run is still in progress with this job itself.
|
||||
status: prEventCondition ? 'in_progress' : 'success',
|
||||
exclude_pull_requests: true,
|
||||
head_sha: pull_request.head.sha
|
||||
})).data.workflow_runs[0]?.id
|
||||
|
||||
// Newer PRs might not have run Eval to completion, yet. We can skip them, because this
|
||||
// job will be run as part of that Eval run anyway.
|
||||
log('Last eval run', run_id)
|
||||
if (!run_id) return;
|
||||
|
||||
const artifact = (await github.rest.actions.listWorkflowRunArtifacts({
|
||||
...context.repo,
|
||||
run_id,
|
||||
name: 'comparison'
|
||||
})).data.artifacts[0]
|
||||
|
||||
// Instead of checking the boolean artifact.expired, we will give us a minute to
|
||||
// actually download the artifact in the next step and avoid that race condition.
|
||||
log('Artifact expires at', artifact.expires_at)
|
||||
if (new Date(artifact.expires_at) < new Date(new Date().getTime() + 60 * 1000)) return;
|
||||
|
||||
await artifactClient.downloadArtifact(artifact.id, {
|
||||
findBy: {
|
||||
repositoryName: context.repo.repo,
|
||||
repositoryOwner: context.repo.owner,
|
||||
token: core.getInput('github-token')
|
||||
},
|
||||
path: path.resolve(pull_request.number.toString()),
|
||||
expectedHash: artifact.digest
|
||||
})
|
||||
|
||||
// Get all currently set labels that we manage
|
||||
const before =
|
||||
pull_request.labels.map(({ name }) => name)
|
||||
.filter(name =>
|
||||
name.startsWith('10.rebuild') ||
|
||||
name == '11.by: package-maintainer' ||
|
||||
name.startsWith('12.approvals:') ||
|
||||
name == '12.approved-by: package-maintainer'
|
||||
)
|
||||
|
||||
const approvals = new Set(
|
||||
(await github.paginate(github.rest.pulls.listReviews, {
|
||||
...context.repo,
|
||||
pull_number: pull_request.number
|
||||
}))
|
||||
.filter(review => review.state == 'APPROVED')
|
||||
.map(review => review.user.id)
|
||||
)
|
||||
|
||||
const approvals = new Set(
|
||||
(await github.paginate(github.rest.pulls.listReviews, {
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
pull_number: pull_request.number
|
||||
}))
|
||||
.filter(review => review.state == 'APPROVED')
|
||||
.map(review => review.user.id)
|
||||
)
|
||||
const maintainers = new Set(Object.keys(
|
||||
JSON.parse(await readFile(`${pull_request.number}/maintainers.json`, 'utf-8'))
|
||||
))
|
||||
|
||||
const maintainers = new Set(Object.keys(
|
||||
JSON.parse(await readFile('comparison/maintainers.json', 'utf-8'))
|
||||
))
|
||||
// And the labels that should be there
|
||||
const after = JSON.parse(await readFile(`${pull_request.number}/changed-paths.json`, 'utf-8')).labels
|
||||
if (approvals.size > 0) after.push(`12.approvals: ${approvals.size > 2 ? '3+' : approvals.size}`)
|
||||
if (Array.from(maintainers).some(m => approvals.has(m))) after.push('12.approved-by: package-maintainer')
|
||||
|
||||
// And the labels that should be there
|
||||
const after = JSON.parse(await readFile('comparison/changed-paths.json', 'utf-8')).labels
|
||||
if (approvals.size > 0) after.push(`12.approvals: ${approvals.size > 2 ? '3+' : approvals.size}`)
|
||||
if (Array.from(maintainers).some(m => approvals.has(m))) after.push('12.approved-by: package-maintainer')
|
||||
// Remove the ones not needed anymore
|
||||
await Promise.all(
|
||||
before.filter(name => !after.includes(name))
|
||||
.map(name => github.rest.issues.removeLabel({
|
||||
...context.repo,
|
||||
issue_number: pull_request.number,
|
||||
name
|
||||
}))
|
||||
)
|
||||
|
||||
// Remove the ones not needed anymore
|
||||
await Promise.all(
|
||||
before.filter(name => !after.includes(name))
|
||||
.map(name => github.rest.issues.removeLabel({
|
||||
...pr,
|
||||
name
|
||||
}))
|
||||
)
|
||||
|
||||
// And add the ones that aren't set already
|
||||
const added = after.filter(name => !before.includes(name))
|
||||
if (added.length > 0) {
|
||||
await github.rest.issues.addLabels({
|
||||
...pr,
|
||||
labels: added
|
||||
})
|
||||
// And add the ones that aren't set already
|
||||
const added = after.filter(name => !before.includes(name))
|
||||
if (added.length > 0) {
|
||||
await github.rest.issues.addLabels({
|
||||
...context.repo,
|
||||
issue_number: pull_request.number,
|
||||
labels: added
|
||||
})
|
||||
}
|
||||
} catch (cause) {
|
||||
throw new Error(`Labeling PR #${pull_request.number} failed.`, { cause })
|
||||
}
|
||||
})
|
||||
})))
|
||||
.filter(({ status }) => status == 'rejected')
|
||||
.map(({ reason }) => core.setFailed(`${reason.message}\n${reason.cause.stack}`))
|
||||
)
|
||||
|
||||
- uses: actions/labeler@8558fd74291d67161a8a78ce36a881fa63b766a9 # v5.0.0
|
||||
name: Labels from touched files
|
||||
if: |
|
||||
github.event_name != 'workflow_run' &&
|
||||
github.event_name == 'pull_request_target' &&
|
||||
github.event.pull_request.head.repo.owner.login != 'NixOS' || !(
|
||||
github.head_ref == 'haskell-updates' ||
|
||||
github.head_ref == 'python-updates' ||
|
||||
@ -153,7 +215,7 @@ jobs:
|
||||
- uses: actions/labeler@8558fd74291d67161a8a78ce36a881fa63b766a9 # v5.0.0
|
||||
name: Labels from touched files (no sync)
|
||||
if: |
|
||||
github.event_name != 'workflow_run' &&
|
||||
github.event_name == 'pull_request_target' &&
|
||||
github.event.pull_request.head.repo.owner.login != 'NixOS' || !(
|
||||
github.head_ref == 'haskell-updates' ||
|
||||
github.head_ref == 'python-updates' ||
|
||||
@ -171,7 +233,7 @@ jobs:
|
||||
# This is to avoid the mass of labels there, which is mostly useless - and really annoying for
|
||||
# the backport labels.
|
||||
if: |
|
||||
github.event_name != 'workflow_run' &&
|
||||
github.event_name == 'pull_request_target' &&
|
||||
github.event.pull_request.head.repo.owner.login == 'NixOS' && (
|
||||
github.head_ref == 'haskell-updates' ||
|
||||
github.head_ref == 'python-updates' ||
|
||||
|
17
.github/workflows/review-submitted.yml
vendored
17
.github/workflows/review-submitted.yml
vendored
@ -1,17 +0,0 @@
|
||||
name: Review submitted
|
||||
|
||||
on:
|
||||
pull_request_review:
|
||||
types: [submitted]
|
||||
|
||||
permissions: {}
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
trigger:
|
||||
runs-on: ubuntu-24.04-arm
|
||||
steps:
|
||||
- run: echo This is a no-op only used as a trigger for workflow_run.
|
@ -879,6 +879,399 @@ stdenv.mkDerivation (finalAttrs: {
|
||||
})
|
||||
```
|
||||
|
||||
### buildDenoPackage {#javascript-buildDenoPackage}
|
||||
|
||||
`buildDenoPackage` allows you to package [Deno](https://deno.com/) projects in Nixpkgs without the use of an auto-generated dependencies file (as used in [node2nix](#javascript-node2nix)).
|
||||
It works by utilizing Deno's cache functionality -- creating a reproducible cache that contains the dependencies of a project, and pointing Deno to it.
|
||||
|
||||
#### buildDenoDeps {#javascript-buildDenoPackage-buildDenoDeps}
|
||||
|
||||
For every `buildDenoPackage`, first, a [fixed output derivation](https://nix.dev/manual/nix/2.18/language/advanced-attributes.html#adv-attr-outputHash) is
|
||||
created with all the dependencies mentioned in the `deno.lock`.
|
||||
This works as follows:
|
||||
1. They are installed using `deno install`.
|
||||
1. All non-reproducible data is pruned.
|
||||
1. The directories `.deno`, `node_modules` and `vendor` are copied to `$out`.
|
||||
1. The output of the FOD is checked against the `denoDepsHash`.
|
||||
1. The output is copied into the build of `buildDenoPackage`, which is not an FOD.
|
||||
1. The dependencies are installed again using `deno install`, this time from the local cache only.
|
||||
|
||||
The `buildDenoDeps` derivation is in `passthru`, so it can be accessed from a `buildDenoPackage` derivation with `.denoDeps`
|
||||
|
||||
Related options:
|
||||
|
||||
*`denoDepsHash`* (String)
|
||||
|
||||
: The output hash of the `buildDenoDeps` fixed output derivation.
|
||||
|
||||
*`denoInstallFlags`* (Array of strings; optional)
|
||||
|
||||
: The Flags passed to `deno install`.
|
||||
|
||||
: _Default:_ `[ "--allow-scripts" "--frozen" "--cached-only" ]` for `buildDenoPackage`
|
||||
: _Default:_ `[ "--allow-scripts" "--frozen" ]` for `buildDenoDeps` (`"--cached-only"` is filtered out)
|
||||
|
||||
::: {.tip}
|
||||
If you receive errors like these:
|
||||
|
||||
```
|
||||
error: The lockfile is out of date. Run `deno install --frozen=false`, or rerun with `--frozen=false` to update it.
|
||||
```
|
||||
|
||||
or
|
||||
|
||||
```
|
||||
error: Import '<url>' failed.
|
||||
0: error sending request for url (<url>): client error (Connect): dns error: failed to lookup address information: Temporary failure in name resolution: failed to lookup address information:Temporary failure in name resolution
|
||||
1: client error (Connect)
|
||||
2: dns error: failed to lookup address information: Temporary failure in name resolution
|
||||
3: failed to lookup address information: Temporary failure in name resolution
|
||||
at file:///build/source/src/lib/helpers/verifyRequest.ts:2:21
|
||||
build for <your-package> failed in buildPhase with exit code 1
|
||||
```
|
||||
|
||||
or
|
||||
|
||||
```
|
||||
error: Specifier not found in cache: "<url>", --cached-only is specified.
|
||||
|
||||
ERROR: deno failed to install dependencies
|
||||
```
|
||||
|
||||
This can happen due to the `deno install` command deducing different packages than what the actual package needs.
|
||||
|
||||
To fix this, add the entrypoint to the install flags:
|
||||
|
||||
```nix
|
||||
{ buildDenoPackage, nix-gitignore }:
|
||||
buildDenoPackage {
|
||||
pname = "myPackage";
|
||||
version = "0.1.0";
|
||||
denoDepsHash = "sha256-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA";
|
||||
src = nix-gitignore.gitignoreSource [ ] ./.;
|
||||
binaryEntrypointPath = "main.ts";
|
||||
denoInstallFlags = [
|
||||
"--allow-scripts"
|
||||
"--frozen"
|
||||
"--cached-only"
|
||||
"--entrypoint"
|
||||
"<path/to/entrypoint/script>"
|
||||
];
|
||||
}
|
||||
```
|
||||
|
||||
:::
|
||||
|
||||
#### Private registries {#javascript-buildDenoPackage-private-registries}
|
||||
There are currently 2 options, which enable the use of private registries in a `buildDenoPackage` derivation.
|
||||
|
||||
*`denoDepsImpureEnvVars`* (Array of strings; optional)
|
||||
|
||||
: Names of impure environment variables passed to the `buildDenoDeps` derivation. They are forwarded to `deno install`.
|
||||
|
||||
: _Example:_ `[ "NPM_TOKEN" ]`
|
||||
|
||||
: It can be used to set tokens for private NPM registries (in a `.npmrc` file).
|
||||
|
||||
: In a single-user installation of Nix, you can put the variables into the environment, when running the nix build.
|
||||
|
||||
: In multi-user installations of Nix, it's necessary to set the environment variables in the nix-daemon, probably with systemd.
|
||||
|
||||
:::{.example}
|
||||
|
||||
##### configure nix-daemon {#javascript-buildDenoPackage-private-registries-daemon-example}
|
||||
In NixOS:
|
||||
|
||||
```nix
|
||||
# configuration.nix
|
||||
{
|
||||
config,
|
||||
lib,
|
||||
pkgs,
|
||||
...
|
||||
}:
|
||||
{
|
||||
systemd.services.nix-daemon.environment.NPM_TOKEN = "<token>";
|
||||
}
|
||||
```
|
||||
|
||||
In other Linux distributions use
|
||||
|
||||
```
|
||||
$ sudo systemctl edit nix-daemon
|
||||
$ sudo systemctl cat nix-daemon
|
||||
$ sudo systemctl restart nix-daemon
|
||||
```
|
||||
|
||||
:::
|
||||
|
||||
*`denoDepsInjectedEnvVars`* (Attrset; optional)
|
||||
|
||||
: Environment variables as key value pairs. They are forwarded to `deno install`.
|
||||
|
||||
: _Example:_ `{ "NPM_TOKEN" = "<token>"; }`
|
||||
|
||||
: It can be used to set tokens for private NPM registries (in a `.npmrc` file).
|
||||
You could pass these tokens from the Nix CLI with `--arg`,
|
||||
however this can hurt the reproducibility of your builds and such an injected
|
||||
token will also need to be injected in every build that depends on this build.
|
||||
|
||||
:::{.example}
|
||||
|
||||
##### example `.npmrc` {#javascript-buildDenoPackage-private-registries-npmrc-example}
|
||||
|
||||
```ini
|
||||
@<scope>:registry=https://<domain>/<path to private registry>
|
||||
//<domain>/<path to private registry>:_authToken=${NPM_TOKEN}
|
||||
```
|
||||
|
||||
:::
|
||||
|
||||
::: {.caution}
|
||||
|
||||
Hardcoding a token into your NixOS configuration or some other nix build, will as a consequence write that token into `/nix/store`, which is considered world readable.
|
||||
|
||||
:::
|
||||
|
||||
::: {.note}
|
||||
Neither approach is ideal. For `buildNpmPackage`, there exists a third
|
||||
option called `sourceOverrides`, which allows the user to inject Nix packages into
|
||||
the output `node_modules` folder.
|
||||
Since a Nix build implicitly uses the SSH keys of the machine,
|
||||
this offers a third option to access private packages.
|
||||
But this creates the requirement, that the imported package is packaged with nix first,
|
||||
and that the source code can be retrieved with SSH.
|
||||
This is possible for Deno, too, albeit it not
|
||||
completely analogous to `buildNpmPackage`'s solution.
|
||||
However, it has not been implemented yet.
|
||||
:::
|
||||
|
||||
#### Compile to binary {#javascript-buildDenoPackage-compile-to-binary}
|
||||
|
||||
It's possible to compile a Deno project to a single binary using `deno compile`.
|
||||
The binary will be named like the `.name` property in `deno.json`, if available,
|
||||
or the `name` attribute of the derivation.
|
||||
|
||||
:::{.caution}
|
||||
When using packages with a `npm:` specifier, the resulting binary will not be reproducible.
|
||||
See [this issue](https://github.com/denoland/deno/issues/29619) for more information.
|
||||
:::
|
||||
|
||||
Related options:
|
||||
|
||||
*`hostPlatform`* (String; optional)
|
||||
|
||||
: The [host platform](#ssec-cross-platform-parameters) the binary is built for.
|
||||
|
||||
: _Default:_ `builtins.currentSystem`.
|
||||
|
||||
: _Supported values:_
|
||||
- `"x86_64-darwin"`
|
||||
- `"aarch64-darwin"`
|
||||
- `"x86_64-linux"`
|
||||
- `"aarch64-linux"`
|
||||
|
||||
*`denoCompileFlags`* (Array of string; optional)
|
||||
|
||||
: Flags passed to `deno compile [denoTaskFlags] ${binaryEntrypointPath} [extraCompileFlags]`.
|
||||
|
||||
*`extraCompileFlags`* (Array of string; optional)
|
||||
|
||||
: Flags passed to `deno compile [denoTaskFlags] ${binaryEntrypointPath} [extraCompileFlags]`.
|
||||
|
||||
*`binaryEntrypointPath`* (String or null; optional)
|
||||
|
||||
: If not `null`, a binary is created using the specified path as the entry point.
|
||||
The binary is copied to `$out/bin` in the `installPhase`.
|
||||
|
||||
: _Default:_ `null`
|
||||
|
||||
: It's prefixed by `denoWorkspacePath`.
|
||||
|
||||
*`denortPackage`* (Derivation; optional)
|
||||
|
||||
: The package used as the Deno runtime, which is bundled with the JavaScript code to create the binary.
|
||||
|
||||
: _Default:_ `pkgs.denort`
|
||||
|
||||
: Don't use `pkgs.deno` for this, since that is the full Deno CLI, with all the development tooling.
|
||||
|
||||
: If you're cross compiling, this needs to be the `denort` of the `hostPlatform`.
|
||||
|
||||
::: {.note}
|
||||
The binary will be dynamically linked and not executable on NixOS without [nix-ld](https://github.com/nix-community/nix-ld)
|
||||
or [other methods](https://unix.stackexchange.com/questions/522822/different-methods-to-run-a-non-nixos-executable-on-nixos).
|
||||
|
||||
```nix
|
||||
# configuration.nix
|
||||
{
|
||||
config,
|
||||
lib,
|
||||
pkgs,
|
||||
...
|
||||
}:
|
||||
{
|
||||
programs.nix-ld.enable = true;
|
||||
programs.nix-ld.libraries = with pkgs; [
|
||||
glibc
|
||||
gcc-unwrapped
|
||||
];
|
||||
}
|
||||
```
|
||||
|
||||
:::
|
||||
|
||||
:::{.example}
|
||||
|
||||
##### example binary build {#javascript-buildDenoPackage-compile-to-binary-example}
|
||||
|
||||
```nix
|
||||
{ buildDenoPackage, nix-gitignore }:
|
||||
buildDenoPackage {
|
||||
pname = "myPackage";
|
||||
version = "0.1.0";
|
||||
denoDepsHash = "sha256-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA";
|
||||
src = nix-gitignore.gitignoreSource [ ] ./.;
|
||||
binaryEntrypointPath = "main.ts";
|
||||
}
|
||||
```
|
||||
|
||||
:::
|
||||
|
||||
#### Create artifacts in the build {#javascript-buildDenoPackage-artifacts-in-build}
|
||||
|
||||
Instead of compiling to a binary, `deno task` can be executed inside the build
|
||||
to produce some artifact, which can then be copied out in the `installPhase`.
|
||||
|
||||
Related options:
|
||||
|
||||
*`denoTaskScript`* (String; optional)
|
||||
|
||||
: The task in `deno.json` that's executed with `deno task`.
|
||||
|
||||
: _Default:_ `"build"`
|
||||
|
||||
*`denoTaskFlags`* (Array of strings; optional)
|
||||
|
||||
: The flags passed to `deno task [denoTaskFlags] ${denoTaskScript} [extraTaskFlags]`.
|
||||
|
||||
*`extraTaskFlags`* (Array of strings; optional)
|
||||
|
||||
: The flags passed to `deno task [denoTaskFlags] ${denoTaskScript} [extraTaskFlags]`.
|
||||
|
||||
*`denoTaskPrefix`* (String; optional)
|
||||
|
||||
: An unquoted string injected before `deno task`.
|
||||
|
||||
*`denoTaskSuffix`* (String; optional)
|
||||
|
||||
: An unquoted string injected after `deno task` and all its flags. For example to pipe stdout to a file.
|
||||
|
||||
:::{.example}
|
||||
|
||||
##### example artifact build {#javascript-buildDenoPackage-artifacts-in-build-example}
|
||||
|
||||
`deno.json`
|
||||
|
||||
```json
|
||||
{
|
||||
"tasks": {
|
||||
"build": "deno run --allow-all main.ts"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
```nix
|
||||
{ buildDenoPackage, nix-gitignore }:
|
||||
buildDenoPackage {
|
||||
pname = "myPackage";
|
||||
version = "0.1.0";
|
||||
denoDepsHash = "sha256-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA";
|
||||
src = nix-gitignore.gitignoreSource [ ] ./.;
|
||||
denoTaskSuffix = ">out.txt";
|
||||
installPhase = ''
|
||||
cp ./out.txt $out
|
||||
'';
|
||||
}
|
||||
```
|
||||
|
||||
:::
|
||||
|
||||
#### Workspaces {#javascript-buildDenoPackage-workspaces}
|
||||
|
||||
Deno's workspaces are supported.
|
||||
|
||||
To make them work, the whole project needs to be added as source, since the `deno.lock`
|
||||
is always in the root of the project and contains all dependencies.
|
||||
|
||||
This means a build with only the required dependencies of a workspace is not possible.
|
||||
Also, the `denoDepsHash` for all workspaces is the same, since they
|
||||
all share the same dependencies.
|
||||
|
||||
When [running a task inside the build](#javascript-buildDenoPackage-artifacts-in-build),
|
||||
`denoWorkspacePath` can be used to let the task run inside a workspace.
|
||||
|
||||
When [compiling to a binary](#javascript-buildDenoPackage-compile-to-binary),
|
||||
`binaryEntrypointPath` is prefixed by `denoWorkspacePath`.
|
||||
|
||||
Related options:
|
||||
|
||||
*`denoWorkspacePath`* (String; optional)
|
||||
|
||||
: The path to a workspace.
|
||||
|
||||
:::{.example}
|
||||
|
||||
##### example workspaces {#javascript-buildDenoPackage-workspaces-example}
|
||||
|
||||
```nix
|
||||
{ buildDenoPackage, nix-gitignore }:
|
||||
rec {
|
||||
sub1 = buildDenoPackage {
|
||||
pname = "sub1";
|
||||
version = "0.1.0";
|
||||
denoDepsHash = "sha256-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=";
|
||||
src = nix-gitignore.gitignoreSource [ ] ./.;
|
||||
denoWorkspacePath = "./sub1";
|
||||
denoTaskFlags = [
|
||||
"--text"
|
||||
"sub1"
|
||||
];
|
||||
denoTaskSuffix = ">out.txt";
|
||||
installPhase = ''
|
||||
cp out.txt $out
|
||||
'';
|
||||
};
|
||||
sub2 = buildDenoPackage {
|
||||
# Note that we are reusing denoDeps and src,
|
||||
# since they must be the same for both workspaces.
|
||||
inherit (sub1) denoDeps src;
|
||||
pname = "sub2";
|
||||
version = "0.1.0";
|
||||
denoWorkspacePath = "./sub2";
|
||||
binaryEntrypointPath = "./main.ts";
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
:::
|
||||
|
||||
#### Other Options {#javascript-buildDenoPackage-other-options}
|
||||
|
||||
*`denoDir`* (String; optional)
|
||||
|
||||
: `DENO_DIR` will be set to this value for all `deno` commands.
|
||||
|
||||
*`denoFlags`* (Array of string; optional)
|
||||
|
||||
: The flags passed to all `deno` commands.
|
||||
|
||||
*`denoPackage`* (Derivation; optional)
|
||||
|
||||
: The Deno CLI used for all `deno` commands inside the build.
|
||||
|
||||
: _Default:_ `pkgs.deno`
|
||||
|
||||
## Outside Nixpkgs {#javascript-outside-nixpkgs}
|
||||
|
||||
There are some other tools available, which are written in the Nix language.
|
||||
|
@ -3390,6 +3390,42 @@
|
||||
"javascript-nix-npm-buildpackage-pitfalls": [
|
||||
"index.html#javascript-nix-npm-buildpackage-pitfalls"
|
||||
],
|
||||
"javascript-buildDenoPackage-workspaces-example": [
|
||||
"index.html#javascript-buildDenoPackage-workspaces-example"
|
||||
],
|
||||
"javascript-buildDenoPackage-private-registries": [
|
||||
"index.html#javascript-buildDenoPackage-private-registries"
|
||||
],
|
||||
"javascript-buildDenoPackage-buildDenoDeps": [
|
||||
"index.html#javascript-buildDenoPackage-buildDenoDeps"
|
||||
],
|
||||
"javascript-buildDenoPackage-artifacts-in-build": [
|
||||
"index.html#javascript-buildDenoPackage-artifacts-in-build"
|
||||
],
|
||||
"javascript-buildDenoPackage-artifacts-in-build-example": [
|
||||
"index.html#javascript-buildDenoPackage-artifacts-in-build-example"
|
||||
],
|
||||
"javascript-buildDenoPackage-private-registries-daemon-example": [
|
||||
"index.html#javascript-buildDenoPackage-private-registries-daemon-example"
|
||||
],
|
||||
"javascript-buildDenoPackage-private-registries-npmrc-example": [
|
||||
"index.html#javascript-buildDenoPackage-private-registries-npmrc-example"
|
||||
],
|
||||
"javascript-buildDenoPackage-compile-to-binary-example": [
|
||||
"index.html#javascript-buildDenoPackage-compile-to-binary-example"
|
||||
],
|
||||
"javascript-buildDenoPackage-workspaces": [
|
||||
"index.html#javascript-buildDenoPackage-workspaces"
|
||||
],
|
||||
"javascript-buildDenoPackage": [
|
||||
"index.html#javascript-buildDenoPackage"
|
||||
],
|
||||
"javascript-buildDenoPackage-other-options": [
|
||||
"index.html#javascript-buildDenoPackage-other-options"
|
||||
],
|
||||
"javascript-buildDenoPackage-compile-to-binary": [
|
||||
"index.html#javascript-buildDenoPackage-compile-to-binary"
|
||||
],
|
||||
"language-julia": [
|
||||
"index.html#language-julia"
|
||||
],
|
||||
|
@ -551,6 +551,8 @@
|
||||
|
||||
- `ddclient` was updated from 3.11.2 to 4.0.0 [Release notes](https://github.com/ddclient/ddclient/releases/tag/v4.0.0)
|
||||
|
||||
- `buildDenoPackage` was added [see docs](https://github.com/NixOS/nixpkgs/blob/master/doc/languages-frameworks/javascript.section.md#avascript-buildDenoPackage) for more details
|
||||
|
||||
## Nixpkgs Library {#sec-nixpkgs-release-25.05-lib}
|
||||
|
||||
### Breaking changes {#sec-nixpkgs-release-25.05-lib-breaking}
|
||||
|
@ -1165,8 +1165,8 @@ let
|
||||
mktplcRef = {
|
||||
name = "dbclient-jdbc";
|
||||
publisher = "cweijan";
|
||||
version = "1.4.4";
|
||||
hash = "sha256-hrymsnprfrRQeS/WRGqdV3MNPw+C+iJCcXF1IfNjGWE=";
|
||||
version = "1.4.6";
|
||||
hash = "sha256-989egeJlpJ2AfZra9VSQDQ8e+nQCa2sfoUeti674ecA=";
|
||||
};
|
||||
meta = {
|
||||
description = "JDBC Adapter For Database Client";
|
||||
@ -4621,8 +4621,8 @@ let
|
||||
mktplcRef = {
|
||||
name = "vscode-stylelint";
|
||||
publisher = "stylelint";
|
||||
version = "1.5.1";
|
||||
hash = "sha256-Sbp2zy/6PcsMlUPe94spm3JrWxBYHfd7py3f4rb+0G4=";
|
||||
version = "1.5.3";
|
||||
hash = "sha256-fgMs9/gYhhHCkiKJX5rDRbiXy6gxvmLhU6blNxEoNc8=";
|
||||
};
|
||||
meta = {
|
||||
description = "Official Stylelint extension for Visual Studio Code";
|
||||
|
@ -8,8 +8,8 @@ vscode-utils.buildVscodeMarketplaceExtension {
|
||||
mktplcRef = {
|
||||
publisher = "RooVeterinaryInc";
|
||||
name = "roo-cline";
|
||||
version = "3.19.3";
|
||||
hash = "sha256-7GZD7oCrkGcG7B/pgXK92hL0QyyodmqyxOcRhTt5LMs=";
|
||||
version = "3.20.3";
|
||||
hash = "sha256-YCO8TjUZ2IpjTkDYf/4wQgsqGEvn2bt4+yVwWlb2eUQ=";
|
||||
};
|
||||
|
||||
passthru.updateScript = vscode-extension-update-script { };
|
||||
|
@ -16,13 +16,13 @@
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
pname = "tesseract";
|
||||
version = "5.5.0";
|
||||
version = "5.5.1";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "tesseract-ocr";
|
||||
repo = "tesseract";
|
||||
rev = version;
|
||||
sha256 = "sha256-qyckAQZs3gR1NBqWgE+COSKXhv3kPF+iHVQrt6OPi8s=";
|
||||
sha256 = "sha256-bLTYdT9CNfgrmmjP6m0rRqJDHiSOkcuGVCFwPqT12jk=";
|
||||
};
|
||||
|
||||
enableParallelBuilding = true;
|
||||
|
@ -1,10 +1,11 @@
|
||||
{
|
||||
lib,
|
||||
mkDerivation,
|
||||
stdenv,
|
||||
fetchFromGitHub,
|
||||
pkg-config,
|
||||
qmake,
|
||||
qttools,
|
||||
wrapQtAppsHook,
|
||||
boost,
|
||||
kirigami2,
|
||||
kyotocabinet,
|
||||
@ -24,19 +25,19 @@ let
|
||||
date = fetchFromGitHub {
|
||||
owner = "HowardHinnant";
|
||||
repo = "date";
|
||||
rev = "a2fdba1adcb076bf9a8343c07524afdf09aa8dcc";
|
||||
sha256 = "00sf1pbaz0g0gsa0dlm23lxk4h46xm1jv1gzbjj5rr9sf1qccyr5";
|
||||
rev = "a45ea7c17b4a7f320e199b71436074bd624c9e15";
|
||||
hash = "sha256-Mq7Yd+y8M3JNG9BEScwVEmxGWYEy6gaNNSlTGgR9LB4=";
|
||||
};
|
||||
in
|
||||
mkDerivation rec {
|
||||
stdenv.mkDerivation (finalAttrs: {
|
||||
pname = "osmscout-server";
|
||||
version = "3.1.0";
|
||||
version = "3.1.5";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "rinigus";
|
||||
repo = "osmscout-server";
|
||||
rev = version;
|
||||
hash = "sha256-GqUXHn3ZK8gdDlm3TitEp/jhBpQoVeQZUCfAyiyrDEg=";
|
||||
tag = finalAttrs.version;
|
||||
hash = "sha256-gmAHX7Gt2oAvTSTCypAjzI5a9TWOPDAYAMD1i1fJVUY=";
|
||||
fetchSubmodules = true;
|
||||
};
|
||||
|
||||
@ -44,7 +45,9 @@ mkDerivation rec {
|
||||
qmake
|
||||
pkg-config
|
||||
qttools
|
||||
wrapQtAppsHook
|
||||
];
|
||||
|
||||
buildInputs = [
|
||||
kirigami2
|
||||
qtquickcontrols2
|
||||
@ -67,11 +70,11 @@ mkDerivation rec {
|
||||
"CONFIG+=disable_mapnik" # Disable the optional mapnik backend
|
||||
];
|
||||
|
||||
meta = with lib; {
|
||||
meta = {
|
||||
description = "Maps server providing tiles, geocoder, and router";
|
||||
homepage = "https://github.com/rinigus/osmscout-server";
|
||||
license = licenses.gpl3Only;
|
||||
maintainers = [ maintainers.Thra11 ];
|
||||
platforms = platforms.linux;
|
||||
license = lib.licenses.gpl3Only;
|
||||
maintainers = [ lib.maintainers.Thra11 ];
|
||||
platforms = lib.platforms.linux;
|
||||
};
|
||||
}
|
||||
})
|
||||
|
@ -3,7 +3,6 @@
|
||||
lib,
|
||||
binutils,
|
||||
fetchFromGitHub,
|
||||
fetchpatch,
|
||||
cmake,
|
||||
pkg-config,
|
||||
wrapGAppsHook3,
|
||||
@ -33,12 +32,13 @@
|
||||
xorg,
|
||||
libbgcode,
|
||||
heatshrink,
|
||||
catch2,
|
||||
catch2_3,
|
||||
webkitgtk_4_1,
|
||||
ctestCheckHook,
|
||||
withSystemd ? lib.meta.availableOn stdenv.hostPlatform systemd,
|
||||
systemd,
|
||||
udevCheckHook,
|
||||
z3,
|
||||
wxGTK-override ? null,
|
||||
opencascade-override ? null,
|
||||
}:
|
||||
@ -61,36 +61,30 @@ let
|
||||
in
|
||||
stdenv.mkDerivation (finalAttrs: {
|
||||
pname = "prusa-slicer";
|
||||
version = "2.9.0";
|
||||
version = "2.9.2";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "prusa3d";
|
||||
repo = "PrusaSlicer";
|
||||
hash = "sha256-6BrmTNIiu6oI/CbKPKoFQIh1aHEVfJPIkxomQou0xKk=";
|
||||
hash = "sha256-j/fdEgcFq0nWBLpyapwZIbBIXCnqEWV6Tk+6sTHk/Bc=";
|
||||
rev = "version_${finalAttrs.version}";
|
||||
};
|
||||
|
||||
# https://github.com/prusa3d/PrusaSlicer/pull/14010
|
||||
patches = [
|
||||
(fetchpatch {
|
||||
url = "https://github.com/prusa3d/PrusaSlicer/commit/cdc3db58f9002778a0ca74517865527f50ade4c3.patch";
|
||||
hash = "sha256-zgpGg1jtdnCBaWjR6oUcHo5sGuZx5oEzpux3dpRdMAM=";
|
||||
})
|
||||
# https://github.com/prusa3d/PrusaSlicer/pull/11769
|
||||
./fix-ambiguous-constructors.patch
|
||||
];
|
||||
|
||||
# Patch required for GCC 14.
|
||||
# (not applicable to super-slicer fork)
|
||||
# Make Gcode viewer open newer bgcode files.
|
||||
postPatch = lib.optionalString (finalAttrs.pname == "prusa-slicer") ''
|
||||
substituteInPlace src/slic3r-arrange/include/arrange/DataStoreTraits.hpp \
|
||||
--replace-fail \
|
||||
"WritableDataStoreTraits<ArrItem>::template set" \
|
||||
"WritableDataStoreTraits<ArrItem>::set"
|
||||
substituteInPlace src/platform/unix/PrusaGcodeviewer.desktop \
|
||||
--replace-fail 'MimeType=text/x.gcode;' 'MimeType=application/x-bgcode;text/x.gcode;'
|
||||
'';
|
||||
postPatch = lib.optionalString (finalAttrs.pname == "prusa-slicer") (
|
||||
# Patch required for GCC 14, but breaks on clang
|
||||
lib.optionalString stdenv.cc.isGNU ''
|
||||
substituteInPlace src/slic3r-arrange/include/arrange/DataStoreTraits.hpp \
|
||||
--replace-fail \
|
||||
"WritableDataStoreTraits<ArrItem>::template set" \
|
||||
"WritableDataStoreTraits<ArrItem>::set"
|
||||
''
|
||||
# Make Gcode viewer open newer bgcode files.
|
||||
+ ''
|
||||
substituteInPlace src/platform/unix/PrusaGcodeviewer.desktop \
|
||||
--replace-fail 'MimeType=text/x.gcode;' 'MimeType=application/x-bgcode;text/x.gcode;'
|
||||
''
|
||||
);
|
||||
|
||||
nativeBuildInputs = [
|
||||
cmake
|
||||
@ -129,8 +123,9 @@ stdenv.mkDerivation (finalAttrs: {
|
||||
xorg.libX11
|
||||
libbgcode
|
||||
heatshrink
|
||||
catch2
|
||||
catch2_3
|
||||
webkitgtk_4_1
|
||||
z3
|
||||
]
|
||||
++ lib.optionals withSystemd [
|
||||
systemd
|
||||
@ -165,7 +160,7 @@ stdenv.mkDerivation (finalAttrs: {
|
||||
# dlopen(3) for context.
|
||||
if [ -f "src/libslic3r/Format/STEP.cpp" ]; then
|
||||
substituteInPlace src/libslic3r/Format/STEP.cpp \
|
||||
--replace 'libpath /= "OCCTWrapper.so";' 'libpath = "OCCTWrapper.so";'
|
||||
--replace-fail 'libpath /= "OCCTWrapper.so";' 'libpath = "OCCTWrapper.so";'
|
||||
fi
|
||||
# https://github.com/prusa3d/PrusaSlicer/issues/9581
|
||||
if [ -f "cmake/modules/FindEXPAT.cmake" ]; then
|
||||
@ -173,8 +168,10 @@ stdenv.mkDerivation (finalAttrs: {
|
||||
fi
|
||||
|
||||
# Fix resources folder location on macOS
|
||||
substituteInPlace src/PrusaSlicer.cpp \
|
||||
--replace "#ifdef __APPLE__" "#if 0"
|
||||
substituteInPlace src/${
|
||||
if finalAttrs.pname == "prusa-slicer" then "CLI/Setup.cpp" else "PrusaSlicer.cpp"
|
||||
} \
|
||||
--replace-fail "#ifdef __APPLE__" "#if 0"
|
||||
'';
|
||||
|
||||
cmakeFlags = [
|
||||
|
@ -1,37 +0,0 @@
|
||||
From 910328f3131e24e330808f5d4cb814454dbe201d Mon Sep 17 00:00:00 2001
|
||||
From: Gregor Riepl <onitake@gmail.com>
|
||||
Date: Mon, 27 Nov 2023 13:01:55 +0100
|
||||
Subject: [PATCH] Make initializers explicit to avoid ambiguous wxArrayString
|
||||
overloads
|
||||
|
||||
---
|
||||
src/slic3r/GUI/PhysicalPrinterDialog.cpp | 2 +-
|
||||
src/slic3r/GUI/Plater.cpp | 2 +-
|
||||
2 files changed, 2 insertions(+), 2 deletions(-)
|
||||
|
||||
diff --git a/src/slic3r/GUI/PhysicalPrinterDialog.cpp b/src/slic3r/GUI/PhysicalPrinterDialog.cpp
|
||||
index 849e987c731..7d0c628c23f 100644
|
||||
--- a/src/slic3r/GUI/PhysicalPrinterDialog.cpp
|
||||
+++ b/src/slic3r/GUI/PhysicalPrinterDialog.cpp
|
||||
@@ -607,7 +607,7 @@ void PhysicalPrinterDialog::build_printhost_settings(ConfigOptionsGroup* m_optgr
|
||||
// Always fill in the "printhost_port" combo box from the config and select it.
|
||||
{
|
||||
Choice* choice = dynamic_cast<Choice*>(m_optgroup->get_field("printhost_port"));
|
||||
- choice->set_values({ m_config->opt_string("printhost_port") });
|
||||
+ choice->set_values(std::vector<std::string>({ m_config->opt_string("printhost_port") }));
|
||||
choice->set_selection();
|
||||
}
|
||||
|
||||
diff --git a/src/slic3r/GUI/Plater.cpp b/src/slic3r/GUI/Plater.cpp
|
||||
index debfe625fd4..4d61e29a2dc 100644
|
||||
--- a/src/slic3r/GUI/Plater.cpp
|
||||
+++ b/src/slic3r/GUI/Plater.cpp
|
||||
@@ -4420,7 +4420,7 @@ void Plater::load_project(const wxString& filename)
|
||||
s_multiple_beds.set_loading_project_flag(true);
|
||||
ScopeGuard guard([](){ s_multiple_beds.set_loading_project_flag(false);});
|
||||
|
||||
- if (! load_files({ into_path(filename) }).empty()) {
|
||||
+ if (! load_files(std::vector<boost::filesystem::path>({ into_path(filename) })).empty()) {
|
||||
// At least one file was loaded.
|
||||
p->set_project_filename(filename);
|
||||
// Save the names of active presets and project specific config into ProjectDirtyStateManager.
|
@ -13,7 +13,7 @@
|
||||
extra-cmake-modules,
|
||||
glog,
|
||||
libXdmcp,
|
||||
python3,
|
||||
python312,
|
||||
wayland,
|
||||
}:
|
||||
|
||||
@ -26,6 +26,7 @@ let
|
||||
rev = "Natron-v${minorVersion}";
|
||||
hash = "sha256-TD7Uge9kKbFxOmOCn+TSQovnKTmFS3uERTu5lmZFHbc=";
|
||||
};
|
||||
python3 = python312;
|
||||
in
|
||||
stdenv.mkDerivation {
|
||||
inherit version;
|
||||
@ -104,6 +105,7 @@ stdenv.mkDerivation {
|
||||
license = lib.licenses.gpl2;
|
||||
maintainers = [ maintainers.puffnfresh ];
|
||||
platforms = platforms.linux;
|
||||
broken = stdenv.hostPlatform.isLinux && stdenv.hostPlatform.isAarch64;
|
||||
# error: 'LogMessageVoidify' is not a member of 'google'
|
||||
broken = true;
|
||||
};
|
||||
}
|
||||
|
@ -315,6 +315,8 @@ rec {
|
||||
|
||||
installPhase =
|
||||
''
|
||||
runHook preInstall
|
||||
|
||||
install -Dm755 ./build/docker $out/libexec/docker/docker
|
||||
|
||||
makeWrapper $out/libexec/docker/docker $out/bin/docker \
|
||||
@ -330,11 +332,16 @@ rec {
|
||||
ln -s ${moby}/etc/systemd/system/docker.service $out/etc/systemd/system/docker.service
|
||||
ln -s ${moby}/etc/systemd/system/docker.socket $out/etc/systemd/system/docker.socket
|
||||
''
|
||||
+ ''
|
||||
# Required to avoid breaking cross builds
|
||||
+ lib.optionalString (stdenv.buildPlatform.canExecute stdenv.hostPlatform) ''
|
||||
# completion (cli)
|
||||
installShellCompletion --bash ./contrib/completion/bash/docker
|
||||
installShellCompletion --fish ./contrib/completion/fish/docker.fish
|
||||
installShellCompletion --zsh ./contrib/completion/zsh/_docker
|
||||
installShellCompletion --cmd docker \
|
||||
--bash <($out/bin/docker completion bash) \
|
||||
--fish <($out/bin/docker completion fish) \
|
||||
--zsh <($out/bin/docker completion zsh)
|
||||
''
|
||||
+ ''
|
||||
runHook postInstall
|
||||
'';
|
||||
|
||||
passthru = {
|
||||
|
172
pkgs/build-support/deno/build-deno-package/default.nix
Normal file
172
pkgs/build-support/deno/build-deno-package/default.nix
Normal file
@ -0,0 +1,172 @@
|
||||
# NOTE: much of this structure is inspired from https://github.com/NixOS/nixpkgs/tree/fff29a3e5f7991512e790617d1a693df5f3550f6/pkgs/build-support/node
|
||||
{
|
||||
stdenvNoCC,
|
||||
deno,
|
||||
denort,
|
||||
diffutils,
|
||||
zip,
|
||||
jq,
|
||||
fetchDenoDeps,
|
||||
buildPackages,
|
||||
lib,
|
||||
}:
|
||||
{
|
||||
name ? "${args.pname}-${args.version}",
|
||||
src ? null,
|
||||
# The output hash of the dependencies for this project.
|
||||
denoDepsHash ? lib.fakeHash,
|
||||
# The host platform, the output binary is compiled for.
|
||||
hostPlatform ? stdenvNoCC.hostPlatform.system,
|
||||
# A list of strings, which are names of impure env vars passed to the deps build.
|
||||
# Example:
|
||||
# `[ "NPM_TOKEN" ]`
|
||||
# They will be forwarded to `deno install`.
|
||||
# It can be used to set tokens for private NPM registries (in an `.npmrc` file).
|
||||
# In multi user installations of Nix, you need to set the env vars in the daemon (probably with systemd).
|
||||
# In nixos: `systemd.services.nix-daemon.environment.NPM_TOKEN = "<token>";`
|
||||
denoDepsImpureEnvVars ? [ ],
|
||||
# An attr set with env vars as key value pairs.
|
||||
# Example:
|
||||
# `{ "NPM_TOKEN" = "<token>"; }`
|
||||
# They will be forwarded to `deno install`.
|
||||
# It can be used to set tokens for private NPM registries (in an `.npmrc` file).
|
||||
# You could pass these tokens from the cli with `--arg` (this can make your builds painful).
|
||||
denoDepsInjectedEnvVars ? { },
|
||||
# TODO: source overrides like in buildNpmPackage, i.e. injecting nix packages into the denoDeps
|
||||
# this is more involved, since they can't directly be injected into the fixed output derivation
|
||||
# of fetchDenoDeps. Instead we need to patch the lock file and remove the packages we intend to
|
||||
# inject, then we need to build the rest of the packages like before and in a
|
||||
# second step create normal derivation with the injected packages.
|
||||
# then the two need to be merged into a single denoDeps derivation and finally the lock file needs
|
||||
# to be reverted back to it's original form.
|
||||
# It is possible to manipulate the registry.json files of the injected packages so that deno accepts them as is.
|
||||
denoDeps ? fetchDenoDeps {
|
||||
inherit
|
||||
src
|
||||
denoDepsInjectedEnvVars
|
||||
denoDepsImpureEnvVars
|
||||
denoFlags
|
||||
denoDir
|
||||
;
|
||||
denoInstallFlags = builtins.filter (e: e != "--cached-only") denoInstallFlags;
|
||||
name = "${name}-deno-deps";
|
||||
hash = denoDepsHash;
|
||||
},
|
||||
# The package used for every deno command in the build
|
||||
denoPackage ? deno,
|
||||
# The package used as the runtime that is bundled with the the src to create the binary.
|
||||
denortPackage ? denort,
|
||||
# The script to run to build the project.
|
||||
# You still need to specify in the installPhase, what artifacts to copy to `$out`.
|
||||
denoTaskScript ? "build",
|
||||
# If not null, create a binary using the specified path as the entrypoint,
|
||||
# copy it to `$out/bin` in installPhase and fix it in fixupPhase.
|
||||
binaryEntrypointPath ? null,
|
||||
# Flags to pass to all deno commands.
|
||||
denoFlags ? [ ],
|
||||
# Flags to pass to `deno task [denoTaskFlags] ${denoTaskScript}`.
|
||||
denoTaskFlags ? [ ],
|
||||
# Flags to pass to `deno compile [denoTaskFlags] ${binaryEntrypointPath}`.
|
||||
denoCompileFlags ? [ ],
|
||||
# Flags to pass to `deno install [denoInstallFlags]`.
|
||||
denoInstallFlags ? [
|
||||
"--allow-scripts"
|
||||
"--frozen"
|
||||
"--cached-only"
|
||||
],
|
||||
# Flags to pass to `deno task [denoTaskFlags] ${denoTaskScript} [extraTaskFlags]`.
|
||||
extraTaskFlags ? [ ],
|
||||
# Flags to pass to `deno compile [denoTaskFlags] ${binaryEntrypointPath} [extraCompileFlags]`.
|
||||
extraCompileFlags ? [ ],
|
||||
nativeBuildInputs ? [ ],
|
||||
dontFixup ? true,
|
||||
# Custom denoConfigHook
|
||||
denoConfigHook ? null,
|
||||
# Custom denoBuildHook
|
||||
denoBuildHook ? null,
|
||||
# Custom denoInstallHook
|
||||
denoInstallHook ? null,
|
||||
# Path to deno workspace, where the denoTaskScript should be run
|
||||
denoWorkspacePath ? null,
|
||||
# Unquoted string injected before `deno task`
|
||||
denoTaskPrefix ? "",
|
||||
# Unquoted string injected after `deno task` and all its flags
|
||||
denoTaskSuffix ? "",
|
||||
# Used as the name of the local DENO_DIR
|
||||
denoDir ? "./.deno",
|
||||
...
|
||||
}@args:
|
||||
let
|
||||
denoFlags_ = builtins.concatStringsSep " " denoFlags;
|
||||
denoTaskFlags_ = builtins.concatStringsSep " " denoTaskFlags;
|
||||
denoCompileFlags_ = builtins.concatStringsSep " " denoCompileFlags;
|
||||
denoInstallFlags_ = builtins.concatStringsSep " " denoInstallFlags;
|
||||
extraTaskFlags_ = builtins.concatStringsSep " " extraTaskFlags;
|
||||
extraCompileFlags_ = builtins.concatStringsSep " " extraCompileFlags;
|
||||
|
||||
args' = builtins.removeAttrs args [ "denoDepsInjectedEnvVars" ];
|
||||
|
||||
denoHooks =
|
||||
(buildPackages.denoHooks.override {
|
||||
denort = denortPackage;
|
||||
})
|
||||
{
|
||||
inherit denoTaskSuffix denoTaskPrefix binaryEntrypointPath;
|
||||
};
|
||||
systemLookupTable = {
|
||||
"x86_64-darwin" = "x86_64-apple-darwin";
|
||||
"arm64-darwin" = "aarch64-apple-darwin";
|
||||
"aarch64-darwin" = "aarch64-apple-darwin";
|
||||
"x86_64-linux" = "x86_64-unknown-linux-gnu";
|
||||
"arm64-linux" = "aarch64-unknown-linux-gnu";
|
||||
"aarch64-linux" = "aarch64-unknown-linux-gnu";
|
||||
};
|
||||
hostPlatform_ =
|
||||
if builtins.hasAttr hostPlatform systemLookupTable then
|
||||
systemLookupTable."${hostPlatform}"
|
||||
else
|
||||
(lib.systems.elaborate hostPlatform).config;
|
||||
in
|
||||
stdenvNoCC.mkDerivation (
|
||||
args'
|
||||
// {
|
||||
inherit
|
||||
name
|
||||
denoDeps
|
||||
src
|
||||
denoFlags_
|
||||
denoTaskFlags_
|
||||
denoCompileFlags_
|
||||
denoInstallFlags_
|
||||
extraTaskFlags_
|
||||
extraCompileFlags_
|
||||
binaryEntrypointPath
|
||||
hostPlatform_
|
||||
denoWorkspacePath
|
||||
denoTaskScript
|
||||
;
|
||||
|
||||
nativeBuildInputs = nativeBuildInputs ++ [
|
||||
# Prefer passed hooks
|
||||
(if denoConfigHook != null then denoConfigHook else denoHooks.denoConfigHook)
|
||||
(if denoBuildHook != null then denoBuildHook else denoHooks.denoBuildHook)
|
||||
(if denoInstallHook != null then denoInstallHook else denoHooks.denoInstallHook)
|
||||
denoPackage
|
||||
diffutils
|
||||
zip
|
||||
jq
|
||||
];
|
||||
|
||||
DENO_DIR = denoDir;
|
||||
|
||||
dontFixup = if binaryEntrypointPath != null then false else dontFixup;
|
||||
|
||||
passthru = {
|
||||
inherit denoDeps;
|
||||
};
|
||||
|
||||
meta = (args.meta or { }) // {
|
||||
platforms = args.meta.platforms or denoPackage.meta.platforms;
|
||||
};
|
||||
}
|
||||
)
|
29
pkgs/build-support/deno/build-deno-package/hooks/default.nix
Normal file
29
pkgs/build-support/deno/build-deno-package/hooks/default.nix
Normal file
@ -0,0 +1,29 @@
|
||||
{
|
||||
makeSetupHook,
|
||||
denort,
|
||||
lib,
|
||||
}:
|
||||
{
|
||||
denoTaskSuffix,
|
||||
denoTaskPrefix,
|
||||
binaryEntrypointPath,
|
||||
}:
|
||||
{
|
||||
denoConfigHook = makeSetupHook {
|
||||
name = "deno-config-hook";
|
||||
substitutions = {
|
||||
denortBinary = lib.optionalString (binaryEntrypointPath != null) (lib.getExe denort);
|
||||
};
|
||||
} ./deno-config-hook.sh;
|
||||
|
||||
denoBuildHook = makeSetupHook {
|
||||
name = "deno-build-hook";
|
||||
substitutions = {
|
||||
inherit denoTaskSuffix denoTaskPrefix;
|
||||
};
|
||||
} ./deno-build-hook.sh;
|
||||
|
||||
denoInstallHook = makeSetupHook {
|
||||
name = "deno-install-hook";
|
||||
} ./deno-install-hook.sh;
|
||||
}
|
@ -0,0 +1,58 @@
|
||||
# shellcheck shell=bash
|
||||
|
||||
denoBuildHook() {
|
||||
echo "Executing denoBuildHook"
|
||||
|
||||
runHook preBuild
|
||||
|
||||
if [ -n "${binaryEntrypointPath-}" ]; then
|
||||
echo "Creating binary"
|
||||
|
||||
package_name=$(jq -r '.name' deno.json)
|
||||
if [ "$package_name" == "null" ]; then
|
||||
package_name="$name"
|
||||
fi
|
||||
|
||||
deno compile \
|
||||
--output "$package_name" \
|
||||
--target "$hostPlatform_" \
|
||||
$denoCompileFlags \
|
||||
$denoFlags \
|
||||
"${denoWorkspacePath+$denoWorkspacePath/}$binaryEntrypointPath"
|
||||
$extraCompileFlags \
|
||||
|
||||
elif [ -n "${denoTaskScript-}" ]; then
|
||||
if ! @denoTaskPrefix@ \
|
||||
deno task \
|
||||
${denoWorkspacePath+--cwd=$denoWorkspacePath} \
|
||||
$denoTaskFlags \
|
||||
$denoFlags \
|
||||
"$denoTaskScript" \
|
||||
$extraTaskFlags \
|
||||
@denoTaskSuffix@; then
|
||||
echo
|
||||
echo 'ERROR: `deno task` failed'
|
||||
echo
|
||||
echo "Here are a few things you can try, depending on the error:"
|
||||
echo "1. Make sure your task script ($denoTaskScript) exists"
|
||||
echo
|
||||
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo
|
||||
echo "ERROR: nothing to do in buildPhase"
|
||||
echo "Specify either 'binaryEntrypointPath' or 'denoTaskScript' or override 'buildPhase'"
|
||||
echo
|
||||
|
||||
exit 1
|
||||
fi
|
||||
|
||||
runHook postBuild
|
||||
|
||||
echo "Finished denoBuildHook"
|
||||
}
|
||||
|
||||
if [ -z "${buildPhase-}" ]; then
|
||||
buildPhase=denoBuildHook
|
||||
fi
|
@ -0,0 +1,108 @@
|
||||
# shellcheck shell=bash
|
||||
|
||||
denoConfigHook() {
|
||||
echo "Executing denoConfigHook"
|
||||
|
||||
if [ -z "${denoDeps-}" ]; then
|
||||
echo
|
||||
echo "ERROR: no dependencies were specified"
|
||||
echo 'Hint: set `denoDeps` if using these hooks individually. If this is happening with `buildDenoPackage`, please open an issue.'
|
||||
echo
|
||||
|
||||
exit 1
|
||||
fi
|
||||
|
||||
local -r cacheLockfile="$denoDeps/deno.lock"
|
||||
local -r srcLockfile="$PWD/deno.lock"
|
||||
|
||||
echo "Validating consistency between $srcLockfile and $cacheLockfile"
|
||||
|
||||
if ! diff "$srcLockfile" "$cacheLockfile"; then
|
||||
# If the diff failed, first double-check that the file exists, so we can
|
||||
# give a friendlier error msg.
|
||||
if ! [ -e "$srcLockfile" ]; then
|
||||
echo
|
||||
echo "ERROR: Missing deno.lock from src. Expected to find it at: $srcLockfile"
|
||||
echo
|
||||
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! [ -e "$cacheLockfile" ]; then
|
||||
echo
|
||||
echo "ERROR: Missing lockfile from cache. Expected to find it at: $cacheLockfile"
|
||||
echo
|
||||
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo
|
||||
echo "ERROR: denoDepsHash is out of date"
|
||||
echo
|
||||
echo "The deno.lock in src is not the same as the in $denoDeps."
|
||||
echo
|
||||
echo "To fix the issue:"
|
||||
echo '1. Use `lib.fakeHash` as the denoDepsHash value'
|
||||
echo "2. Build the derivation and wait for it to fail with a hash mismatch"
|
||||
echo "3. Copy the 'got: sha256-' value back into the denoDepsHash field"
|
||||
echo
|
||||
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# NOTE: we need to use vendor in the build too, since we used it for the deps
|
||||
useVendor() {
|
||||
jq '.vendor = true' deno.json >temp.json &&
|
||||
rm -f deno.json &&
|
||||
mv temp.json deno.json
|
||||
}
|
||||
echo "Adding vendor to deno.json"
|
||||
useVendor
|
||||
|
||||
echo "Installing dependencies"
|
||||
|
||||
export DENO_DIR="$(pwd)"/"$DENO_DIR"
|
||||
|
||||
installDeps() {
|
||||
if [[ -d "$denoDeps/.deno" ]]; then
|
||||
cp -r --no-preserve=mode "$denoDeps/.deno" "$DENO_DIR"
|
||||
fi
|
||||
if [[ -d "$denoDeps/vendor" ]]; then
|
||||
cp -r --no-preserve=mode "$denoDeps/vendor" ./vendor
|
||||
fi
|
||||
if [[ -d "$denoDeps/node_modules" ]]; then
|
||||
cp -r --no-preserve=mode "$denoDeps/node_modules" ./node_modules
|
||||
fi
|
||||
}
|
||||
installDeps
|
||||
|
||||
if ! deno install $denoInstallFlags_ $denoFlags_; then
|
||||
echo
|
||||
echo "ERROR: deno failed to install dependencies"
|
||||
echo
|
||||
|
||||
exit 1
|
||||
fi
|
||||
|
||||
installDenort() {
|
||||
version="$(deno --version | head -1 | awk '{print $2}')"
|
||||
zipfile=denort-"$hostPlatform_".zip
|
||||
dir="$DENO_DIR"/dl/release/v"$version"
|
||||
mkdir -p "$dir"
|
||||
cp "@denortBinary@" ./denort
|
||||
zip "$dir"/"$zipfile" ./denort
|
||||
rm ./denort
|
||||
}
|
||||
if [ -n "${binaryEntrypointPath-}" ]; then
|
||||
echo "Installing denort for binary build"
|
||||
installDenort
|
||||
fi
|
||||
|
||||
patchShebangs .deno
|
||||
patchShebangs node_modules
|
||||
patchShebangs vendor
|
||||
|
||||
echo "Finished denoConfigHook"
|
||||
}
|
||||
|
||||
postPatchHooks+=(denoConfigHook)
|
@ -0,0 +1,32 @@
|
||||
# shellcheck shell=bash
|
||||
|
||||
denoInstallHook() {
|
||||
echo "Executing denoInstallHook"
|
||||
|
||||
runHook preInstall
|
||||
|
||||
if [ -n "${binaryEntrypointPath-}" ]; then
|
||||
package_name=$(jq -r '.name' deno.json)
|
||||
if [ "$package_name" == "null" ]; then
|
||||
package_name="$name"
|
||||
fi
|
||||
|
||||
mkdir -p "$out/bin"
|
||||
cp "$package_name"* "$out/bin"
|
||||
else
|
||||
echo
|
||||
echo "ERROR: nothing to do in installPhase"
|
||||
echo "Specify either 'binaryEntrypointPath' or override 'installPhase'"
|
||||
echo
|
||||
|
||||
exit 1
|
||||
fi
|
||||
|
||||
runHook postInstall
|
||||
|
||||
echo "Finished denoInstallHook"
|
||||
}
|
||||
|
||||
if [ -z "${dontDenoInstall-}" ] && [ -z "${installPhase-}" ]; then
|
||||
installPhase=denoInstallHook
|
||||
fi
|
4
pkgs/build-support/deno/fetch-deno-deps/.gitignore
vendored
Normal file
4
pkgs/build-support/deno/fetch-deno-deps/.gitignore
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
.deno/
|
||||
vendor/
|
||||
node_modules/
|
||||
.direnv
|
185
pkgs/build-support/deno/fetch-deno-deps/default.nix
Normal file
185
pkgs/build-support/deno/fetch-deno-deps/default.nix
Normal file
@ -0,0 +1,185 @@
|
||||
# NOTE: much of this structure is inspired from https://github.com/NixOS/nixpkgs/tree/fff29a3e5f7991512e790617d1a693df5f3550f6/pkgs/build-support/node
|
||||
{
|
||||
lib,
|
||||
stdenvNoCC,
|
||||
deno,
|
||||
jq,
|
||||
cacert,
|
||||
}:
|
||||
{
|
||||
fetchDenoDeps =
|
||||
{
|
||||
name ? "deno-deps",
|
||||
src,
|
||||
hash ? lib.fakeHash,
|
||||
denoPackage ? deno,
|
||||
denoFlags ? [ ],
|
||||
denoInstallFlags ? [
|
||||
"--allow-scripts"
|
||||
"--frozen"
|
||||
],
|
||||
nativeBuildInputs ? [ ],
|
||||
denoDepsImpureEnvVars ? [ ],
|
||||
denoDepsInjectedEnvVars ? { },
|
||||
denoDir ? "./.deno",
|
||||
...
|
||||
}@args:
|
||||
let
|
||||
hash_ =
|
||||
if hash != "" then
|
||||
{ outputHash = hash; }
|
||||
else
|
||||
{
|
||||
outputHash = "";
|
||||
outputHashAlgo = "sha256";
|
||||
};
|
||||
denoInstallFlags_ = builtins.concatStringsSep " " denoInstallFlags;
|
||||
denoFlags_ = builtins.concatStringsSep " " denoFlags;
|
||||
denoDepsInjectedEnvVarsString =
|
||||
if denoDepsInjectedEnvVars != { } then
|
||||
lib.attrsets.foldlAttrs (
|
||||
acc: name: value:
|
||||
"${acc} ${name}=${value}"
|
||||
) "" denoDepsInjectedEnvVars
|
||||
else
|
||||
"";
|
||||
# need to remove denoDepsInjectedEnvVars, since it's an attrset and
|
||||
# stdenv.mkDerivation would try to convert it to string
|
||||
args' = builtins.removeAttrs args [ "denoDepsInjectedEnvVars" ];
|
||||
in
|
||||
stdenvNoCC.mkDerivation (
|
||||
args'
|
||||
// {
|
||||
inherit name src;
|
||||
|
||||
nativeBuildInputs = nativeBuildInputs ++ [
|
||||
denoPackage
|
||||
jq
|
||||
];
|
||||
|
||||
DENO_DIR = denoDir;
|
||||
|
||||
buildPhase = ''
|
||||
runHook preBuild
|
||||
|
||||
if [[ ! -e "deno.json" ]]; then
|
||||
echo ""
|
||||
echo "ERROR: deno.json required, but not found"
|
||||
echo ""
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ ! -e "deno.lock" ]]; then
|
||||
echo ""
|
||||
echo "ERROR: deno.lock required, but not found"
|
||||
echo ""
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# NOTE: using vendor reduces the pruning effort a little
|
||||
useVendor() {
|
||||
jq '.vendor = true' deno.json >temp.json && \
|
||||
rm -f deno.json && \
|
||||
mv temp.json deno.json
|
||||
}
|
||||
useVendor
|
||||
|
||||
# uses $DENO_DIR
|
||||
${denoDepsInjectedEnvVarsString} deno install ${denoInstallFlags_} ${denoFlags_}
|
||||
|
||||
echo "pruning non reproducible files"
|
||||
|
||||
# `node_modules` is used when there are install scripts in a dependencies' package.json.
|
||||
# these install scripts can also require internet, so they should also be executed in this fetcher
|
||||
pruneNonReproducibles() {
|
||||
export tempDenoDir="$DENO_DIR"
|
||||
|
||||
# `registry.json` files can't just be deleted, else deno install won't work,
|
||||
# but they contain non reproducible data,
|
||||
# which needs to be pruned, leaving only the necessary data behind.
|
||||
# This pruning is done with a helper script written in typescript and executed with deno
|
||||
DENO_DIR=./extra_deno_cache deno run \
|
||||
--lock="${./deno.lock}" \
|
||||
--config="${./deno.json}" \
|
||||
--allow-all \
|
||||
"${./prune-registries.ts}" \
|
||||
--lock-json="./deno.lock" \
|
||||
--cache-path="$tempDenoDir" \
|
||||
--vendor-path="./vendor"
|
||||
|
||||
# Keys in `registry.json` files are not deterministically sorted,
|
||||
# so we do it here.
|
||||
for file in $(find -L "$DENO_DIR" -name registry.json -type f); do
|
||||
jq --sort-keys '.' "$file" >temp.json && \
|
||||
rm -f "$file" && \
|
||||
mv temp.json "$file"
|
||||
done
|
||||
|
||||
# There are various small databases used by deno for caching that
|
||||
# we can simply delete.
|
||||
if [[ -d "./node_modules" ]]; then
|
||||
find -L ./node_modules -name '*cache_v2-shm' -type f | xargs rm -f
|
||||
find -L ./node_modules -name '*cache_v2-wal' -type f | xargs rm -f
|
||||
find -L ./node_modules -name 'dep_analysis_cache_v2' -type f | xargs rm -f
|
||||
find -L ./node_modules -name 'node_analysis_cache_v2' -type f | xargs rm -f
|
||||
find -L ./node_modules -name v8_code_cache_v2 -type f | xargs rm -f
|
||||
rm -f ./node_modules/.deno/.deno.lock.poll
|
||||
|
||||
# sometimes a .deno dir is slipped into a node_modules package
|
||||
# it's unclear why. but it can just be deleted
|
||||
find -L ./node_modules -name ".deno" -type d | sort -r | head -n-1 | xargs rm -rf
|
||||
fi
|
||||
|
||||
rm -f "$DENO_DIR"/dep_analysis_cache_v2-shm
|
||||
rm -f "$DENO_DIR"/dep_analysis_cache_v2-wal
|
||||
rm -f "$DENO_DIR"/dep_analysis_cache_v2
|
||||
}
|
||||
pruneNonReproducibles
|
||||
|
||||
runHook postBuild
|
||||
'';
|
||||
|
||||
installPhase = ''
|
||||
runHook preInstall
|
||||
|
||||
if [[ -d "$DENO_DIR" ]]; then
|
||||
mkdir -p $out/$DENO_DIR
|
||||
cp -r --no-preserve=mode $DENO_DIR $out
|
||||
fi
|
||||
if [[ -d "./vendor" ]]; then
|
||||
mkdir -p $out/vendor
|
||||
cp -r --no-preserve=mode ./vendor $out
|
||||
fi
|
||||
if [[ -d "./node_modules" ]]; then
|
||||
mkdir -p $out/node_modules
|
||||
cp -r --no-preserve=mode ./node_modules $out
|
||||
fi
|
||||
|
||||
cp ./deno.lock $out
|
||||
|
||||
runHook postInstall
|
||||
'';
|
||||
|
||||
dontFixup = true;
|
||||
|
||||
outputHashMode = "recursive";
|
||||
|
||||
impureEnvVars = lib.fetchers.proxyImpureEnvVars ++ denoDepsImpureEnvVars;
|
||||
|
||||
SSL_CERT_FILE =
|
||||
if
|
||||
(
|
||||
hash_.outputHash == ""
|
||||
|| hash_.outputHash == lib.fakeSha256
|
||||
|| hash_.outputHash == lib.fakeSha512
|
||||
|| hash_.outputHash == lib.fakeHash
|
||||
)
|
||||
then
|
||||
"${cacert}/etc/ssl/certs/ca-bundle.crt"
|
||||
else
|
||||
"/no-cert-file.crt";
|
||||
|
||||
}
|
||||
// hash_
|
||||
);
|
||||
}
|
8
pkgs/build-support/deno/fetch-deno-deps/deno.json
Normal file
8
pkgs/build-support/deno/fetch-deno-deps/deno.json
Normal file
@ -0,0 +1,8 @@
|
||||
{
|
||||
"tasks": { "test": "deno test" },
|
||||
"imports": {
|
||||
"@std/assert": "jsr:@std/assert@1.0.13",
|
||||
"@std/cli": "jsr:@std/cli@1.0.16",
|
||||
"@std/fs": "jsr:@std/fs@1.0.16"
|
||||
}
|
||||
}
|
53
pkgs/build-support/deno/fetch-deno-deps/deno.lock
generated
Normal file
53
pkgs/build-support/deno/fetch-deno-deps/deno.lock
generated
Normal file
@ -0,0 +1,53 @@
|
||||
{
|
||||
"version": "5",
|
||||
"specifiers": {
|
||||
"jsr:@std/assert@1.0.13": "1.0.13",
|
||||
"jsr:@std/cli@1.0.16": "1.0.16",
|
||||
"jsr:@std/fs@1.0.16": "1.0.16",
|
||||
"jsr:@std/internal@^1.0.6": "1.0.7",
|
||||
"jsr:@std/path@1.0.9": "1.0.9",
|
||||
"jsr:@std/path@^1.0.8": "1.0.9",
|
||||
"npm:@types/node@*": "22.15.15"
|
||||
},
|
||||
"jsr": {
|
||||
"@std/assert@1.0.13": {
|
||||
"integrity": "ae0d31e41919b12c656c742b22522c32fb26ed0cba32975cb0de2a273cb68b29",
|
||||
"dependencies": [
|
||||
"jsr:@std/internal"
|
||||
]
|
||||
},
|
||||
"@std/cli@1.0.16": {
|
||||
"integrity": "02df293099c35b9e97d8ca05f57f54bd1ee08134f25d19a4756b3924695f4b00"
|
||||
},
|
||||
"@std/fs@1.0.16": {
|
||||
"integrity": "81878f62b6eeda0bf546197fc3daa5327c132fee1273f6113f940784a468b036",
|
||||
"dependencies": [
|
||||
"jsr:@std/path@^1.0.8"
|
||||
]
|
||||
},
|
||||
"@std/internal@1.0.7": {
|
||||
"integrity": "39eeb5265190a7bc5d5591c9ff019490bd1f2c3907c044a11b0d545796158a0f"
|
||||
},
|
||||
"@std/path@1.0.9": {
|
||||
"integrity": "260a49f11edd3db93dd38350bf9cd1b4d1366afa98e81b86167b4e3dd750129e"
|
||||
}
|
||||
},
|
||||
"npm": {
|
||||
"@types/node@22.15.15": {
|
||||
"integrity": "sha512-R5muMcZob3/Jjchn5LcO8jdKwSCbzqmPB6ruBxMcf9kbxtniZHP327s6C37iOfuw8mbKK3cAQa7sEl7afLrQ8A==",
|
||||
"dependencies": [
|
||||
"undici-types"
|
||||
]
|
||||
},
|
||||
"undici-types@6.21.0": {
|
||||
"integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ=="
|
||||
}
|
||||
},
|
||||
"workspace": {
|
||||
"dependencies": [
|
||||
"jsr:@std/assert@1.0.13",
|
||||
"jsr:@std/cli@1.0.16",
|
||||
"jsr:@std/fs@1.0.16"
|
||||
]
|
||||
}
|
||||
}
|
1054
pkgs/build-support/deno/fetch-deno-deps/prune-registries.test.ts
Normal file
1054
pkgs/build-support/deno/fetch-deno-deps/prune-registries.test.ts
Normal file
File diff suppressed because it is too large
Load Diff
346
pkgs/build-support/deno/fetch-deno-deps/prune-registries.ts
Normal file
346
pkgs/build-support/deno/fetch-deno-deps/prune-registries.ts
Normal file
@ -0,0 +1,346 @@
|
||||
#!/usr/bin/env deno
|
||||
import { parseArgs } from "@std/cli/parse-args";
|
||||
import { walkSync } from "@std/fs/walk";
|
||||
|
||||
/**
|
||||
* NOTE: The problem this script solves, is that in every npm dependency in the deno cache
|
||||
* is a registry.json file, which serves as a sort of local registry cache for the deno cli.
|
||||
* Such a file looks like this (with deno v2.1.4):
|
||||
* ```json
|
||||
* {
|
||||
* "name": "@floating-ui/core",
|
||||
* "versions": {
|
||||
* "0.7.0": { ... },
|
||||
* "1.6.0": { ... },
|
||||
* "0.1.2": { ... },
|
||||
* ...
|
||||
* },
|
||||
* "dist-tags": { "latest": "1.7.0" }
|
||||
* }
|
||||
* ```
|
||||
* The deno cli will look into this file when called to look up if the required versions are there.
|
||||
* The problem is that the available versions for a package change over time. The registry.json files
|
||||
* need to be part of the fixed output derivation, which will eventually change the hash of the FOD,
|
||||
* if all those unwanted versions aren't pruned.
|
||||
*
|
||||
* On top of that a similar thing happens for jsr packages in the vendor directory
|
||||
* with `meta.json` files. These also need to be pruned.
|
||||
* Such a file looks like this (with deno v2.1.4):
|
||||
* ```json
|
||||
* {
|
||||
* "scope": "std",
|
||||
* "name": "internal",
|
||||
* "latest": "1.0.6",
|
||||
* "versions": {
|
||||
* "0.202.0": {},
|
||||
* "1.0.1": {},
|
||||
* "0.225.0": {
|
||||
* "yanked": true
|
||||
* },
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
|
||||
export type PackageSpecifiers = {
|
||||
[packageIdent: string]: string;
|
||||
};
|
||||
|
||||
export type LockJson = {
|
||||
specifiers: PackageSpecifiers;
|
||||
version: string;
|
||||
workspace: any;
|
||||
[registry: string]: any;
|
||||
};
|
||||
|
||||
export type Config = {
|
||||
lockJson: LockJson;
|
||||
cachePath: string;
|
||||
vendorPath: string;
|
||||
};
|
||||
|
||||
export type PackageInfo = {
|
||||
full: string;
|
||||
registry: string | undefined;
|
||||
scope: string | undefined;
|
||||
name: string;
|
||||
version: string;
|
||||
suffix: string | undefined;
|
||||
};
|
||||
|
||||
export type PackagesByRegistry = {
|
||||
[registry: string]: {
|
||||
[packageName: string]: {
|
||||
[version: string]: PackageInfo;
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
export type PathsByRegistry = {
|
||||
[packageRegistry: string]: string[];
|
||||
};
|
||||
|
||||
export type RegistryJson = {
|
||||
"dist-tags": any;
|
||||
"_deno.etag": string;
|
||||
versions: { [version: string]: any };
|
||||
name: string;
|
||||
};
|
||||
|
||||
export type MetaJson = {
|
||||
scope: string;
|
||||
name: string;
|
||||
latest: string;
|
||||
versions: {
|
||||
[version: string]: any;
|
||||
};
|
||||
};
|
||||
|
||||
export function getConfig(): Config {
|
||||
const flags = parseArgs(Deno.args, {
|
||||
string: ["lock-json", "cache-path", "vendor-path"],
|
||||
});
|
||||
|
||||
if (!flags["lock-json"]) {
|
||||
throw "--lock-json flag not set but required";
|
||||
}
|
||||
if (!flags["cache-path"]) {
|
||||
throw "--cache-path flag not set but required";
|
||||
}
|
||||
if (!flags["vendor-path"]) {
|
||||
throw "--vendor-path flag not set but required";
|
||||
}
|
||||
|
||||
const lockJson = JSON.parse(
|
||||
new TextDecoder("utf-8").decode(Deno.readFileSync(flags["lock-json"]))
|
||||
);
|
||||
if (!lockJson) {
|
||||
throw `could not parse lockJson at ${flags["lock-json"]}`;
|
||||
}
|
||||
|
||||
return {
|
||||
lockJson,
|
||||
cachePath: flags["cache-path"],
|
||||
vendorPath: flags["vendor-path"],
|
||||
};
|
||||
}
|
||||
|
||||
export function getAllPackageRegistries(
|
||||
specifiers: PackageSpecifiers
|
||||
): Set<string> {
|
||||
return Object.keys(specifiers).reduce((acc: Set<string>, v: string) => {
|
||||
const s = v.split(":");
|
||||
if (s.length !== 2) {
|
||||
throw "unexpected registry format";
|
||||
}
|
||||
const registry = s[0];
|
||||
acc.add(registry);
|
||||
return acc;
|
||||
}, new Set());
|
||||
}
|
||||
|
||||
export function parsePackageSpecifier(packageSpecifier: string): PackageInfo {
|
||||
const match =
|
||||
/^((?<registry>.*):)?((?<scope>@.*?)\/)?(?<name>.*?)@(?<version>.*?)(?<suffix>_.*)?$/.exec(
|
||||
packageSpecifier
|
||||
);
|
||||
if (
|
||||
match !== null &&
|
||||
match.groups?.name !== undefined &&
|
||||
match.groups?.version !== undefined
|
||||
) {
|
||||
return {
|
||||
// npm:@amazn/style-dictionary@4.2.4_prettier@3.5.3
|
||||
full: match[0],
|
||||
// npm
|
||||
registry: match.groups?.registry,
|
||||
// @amazn
|
||||
scope: match.groups?.scope,
|
||||
// style-dictionary
|
||||
name: match.groups?.name,
|
||||
// 4.2.4
|
||||
version: match.groups?.version,
|
||||
// _prettier@3.5.3
|
||||
suffix: match.groups?.suffix,
|
||||
};
|
||||
}
|
||||
|
||||
throw "unexpected package specifier format";
|
||||
}
|
||||
|
||||
export function getScopedName(name: string, scope?: string): string {
|
||||
if (scope !== undefined) {
|
||||
return `${scope[0] === "@" ? "" : "@"}${scope}/${name}`;
|
||||
}
|
||||
return name;
|
||||
}
|
||||
|
||||
export function getAllPackagesByPackageRegistry(
|
||||
lockJson: LockJson,
|
||||
registries: Set<string>
|
||||
): PackagesByRegistry {
|
||||
const result: PackagesByRegistry = {};
|
||||
for (const registry of Array.from(registries)) {
|
||||
const packageInfosOfRegistries = Object.keys(lockJson[registry]).map(
|
||||
parsePackageSpecifier
|
||||
);
|
||||
result[registry] = {};
|
||||
for (const packageInfo of packageInfosOfRegistries) {
|
||||
const scopedName = getScopedName(packageInfo.name, packageInfo.scope);
|
||||
if (result[registry][scopedName] === undefined) {
|
||||
result[registry][scopedName] = {};
|
||||
}
|
||||
result[registry][scopedName][packageInfo.version] = packageInfo;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
export function findRegistryJsonPaths(
|
||||
cachePath: string,
|
||||
nonJsrPackages: PackagesByRegistry
|
||||
): PathsByRegistry {
|
||||
const result: PathsByRegistry = {};
|
||||
for (const registry of Object.keys(nonJsrPackages)) {
|
||||
const path = `${cachePath}/${registry}`;
|
||||
const registryJsonPaths = Array.from(walkSync(path))
|
||||
.filter((v) => v.name === "registry.json")
|
||||
.map((v) => v.path);
|
||||
result[registry] = registryJsonPaths;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
export function pruneRegistryJson(
|
||||
registryJson: RegistryJson,
|
||||
nonJsrPackages: PackagesByRegistry,
|
||||
registry: string
|
||||
) {
|
||||
const scopedName = registryJson.name;
|
||||
const packageInfoByVersion = nonJsrPackages[registry][scopedName];
|
||||
if (!packageInfoByVersion) {
|
||||
throw `could not find key "${scopedName}" in\n${Object.keys(
|
||||
nonJsrPackages[registry]
|
||||
)}`;
|
||||
}
|
||||
|
||||
const newRegistryJson: RegistryJson = {
|
||||
...registryJson,
|
||||
"_deno.etag": "",
|
||||
"dist-tags": {},
|
||||
versions: {},
|
||||
};
|
||||
|
||||
for (const version of Object.keys(packageInfoByVersion)) {
|
||||
newRegistryJson.versions[version] = registryJson.versions[version];
|
||||
}
|
||||
|
||||
return newRegistryJson;
|
||||
}
|
||||
|
||||
export function pruneRegistryJsonFiles(
|
||||
nonJsrPackages: PackagesByRegistry,
|
||||
registryJsonPathsByRegistry: PathsByRegistry
|
||||
): void {
|
||||
for (const [registry, paths] of Object.entries(registryJsonPathsByRegistry)) {
|
||||
for (const path of paths) {
|
||||
const registryJson: RegistryJson = JSON.parse(
|
||||
new TextDecoder("utf-8").decode(Deno.readFileSync(path))
|
||||
);
|
||||
|
||||
const newRegistryJson = pruneRegistryJson(
|
||||
registryJson,
|
||||
nonJsrPackages,
|
||||
registry
|
||||
);
|
||||
|
||||
Deno.writeFileSync(
|
||||
path,
|
||||
new TextEncoder().encode(JSON.stringify(newRegistryJson))
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function findMetaJsonPaths(
|
||||
vendorPath: string,
|
||||
jsrPackages: PackagesByRegistry
|
||||
): PathsByRegistry {
|
||||
const result: PathsByRegistry = {};
|
||||
for (const registry of Object.keys(jsrPackages)) {
|
||||
const path = `${vendorPath}`;
|
||||
const metaJsonPaths = Array.from(walkSync(path))
|
||||
.filter((v) => v.name === "meta.json")
|
||||
.map((v) => v.path);
|
||||
result[registry] = metaJsonPaths;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
export function pruneMetaJson(
|
||||
metaJson: MetaJson,
|
||||
jsrPackages: PackagesByRegistry,
|
||||
registry: string
|
||||
): MetaJson {
|
||||
const scopedName = getScopedName(metaJson.name, metaJson.scope);
|
||||
const packageInfoByVersion = jsrPackages[registry][scopedName];
|
||||
if (!packageInfoByVersion) {
|
||||
throw `could not find key "${scopedName}" in\n${Object.keys(
|
||||
jsrPackages[registry]
|
||||
)}`;
|
||||
}
|
||||
const newMetaJson: MetaJson = {
|
||||
...metaJson,
|
||||
latest: "",
|
||||
versions: {},
|
||||
};
|
||||
|
||||
for (const version of Object.keys(packageInfoByVersion)) {
|
||||
newMetaJson.versions[version] = metaJson.versions[version];
|
||||
}
|
||||
return newMetaJson;
|
||||
}
|
||||
|
||||
export function pruneMetaJsonFiles(
|
||||
jsrPackages: PackagesByRegistry,
|
||||
metaJsonPathsByRegistry: PathsByRegistry
|
||||
): void {
|
||||
for (const [registry, paths] of Object.entries(metaJsonPathsByRegistry)) {
|
||||
for (const path of paths) {
|
||||
const metaJson: MetaJson = JSON.parse(
|
||||
new TextDecoder("utf-8").decode(Deno.readFileSync(path))
|
||||
);
|
||||
|
||||
const newMetaJson = pruneMetaJson(metaJson, jsrPackages, registry);
|
||||
|
||||
Deno.writeFileSync(
|
||||
path,
|
||||
new TextEncoder().encode(JSON.stringify(newMetaJson))
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function main() {
|
||||
const config = getConfig();
|
||||
const registries = getAllPackageRegistries(config.lockJson.specifiers);
|
||||
const packages = getAllPackagesByPackageRegistry(config.lockJson, registries);
|
||||
|
||||
const jsrPackages = {
|
||||
jsr: structuredClone(packages.jsr),
|
||||
} satisfies PackagesByRegistry;
|
||||
delete packages.jsr;
|
||||
const nonJsrPackages = packages;
|
||||
|
||||
const metaJsonpaths = findMetaJsonPaths(config.vendorPath, jsrPackages);
|
||||
pruneMetaJsonFiles(jsrPackages, metaJsonpaths);
|
||||
|
||||
const registryJsonPaths = findRegistryJsonPaths(
|
||||
config.cachePath,
|
||||
nonJsrPackages
|
||||
);
|
||||
pruneRegistryJsonFiles(nonJsrPackages, registryJsonPaths);
|
||||
}
|
||||
|
||||
if (import.meta.main) {
|
||||
main();
|
||||
}
|
7
pkgs/build-support/deno/fetch-deno-deps/shell.nix
Normal file
7
pkgs/build-support/deno/fetch-deno-deps/shell.nix
Normal file
@ -0,0 +1,7 @@
|
||||
let
|
||||
pkgs = import ../../../../default.nix { };
|
||||
in
|
||||
pkgs.mkShell {
|
||||
buildInputs = [ pkgs.deno ];
|
||||
DENO_DIR = "./.deno";
|
||||
}
|
@ -30,4 +30,6 @@ stdenv.mkDerivation {
|
||||
|
||||
# 1Password is notarized.
|
||||
dontFixup = true;
|
||||
|
||||
passthru.updateScript = ./update.sh;
|
||||
}
|
||||
|
@ -153,4 +153,6 @@ stdenv.mkDerivation {
|
||||
# https://1password.community/discussion/comment/624011/#Comment_624011
|
||||
#--add-flags "\''${NIXOS_OZONE_WL:+\''${WAYLAND_DISPLAY:+--ozone-platform-hint=auto --enable-features=WaylandWindowDecorations --enable-wayland-ime=true}}"
|
||||
'';
|
||||
|
||||
passthru.updateScript = ./update.sh;
|
||||
}
|
||||
|
@ -11,18 +11,18 @@
|
||||
let
|
||||
pname = "1password";
|
||||
|
||||
versions = builtins.fromJSON (builtins.readFile ./versions.json);
|
||||
hostOs = if stdenv.hostPlatform.isLinux then "linux" else "darwin";
|
||||
version = versions."${channel}-${hostOs}" or (throw "unknown channel-os ${channel}-${hostOs}");
|
||||
|
||||
hostOs = stdenv.hostPlatform.parsed.kernel.name;
|
||||
hostArch = stdenv.hostPlatform.parsed.cpu.name;
|
||||
sources = builtins.fromJSON (builtins.readFile ./sources.json);
|
||||
|
||||
sourcesChan = sources.${channel} or (throw "unsupported channel ${channel}");
|
||||
sourcesChanOs = sourcesChan.${hostOs} or (throw "unsupported OS ${hostOs}");
|
||||
sourcesChanOsArch =
|
||||
sourcesChanOs.sources.${hostArch} or (throw "unsupported architecture ${hostArch}");
|
||||
|
||||
inherit (sourcesChanOs) version;
|
||||
src = fetchurl {
|
||||
inherit
|
||||
(sources.${channel}.${stdenv.system} or (throw "unsupported system ${stdenv.hostPlatform.system}"))
|
||||
url
|
||||
hash
|
||||
;
|
||||
inherit (sourcesChanOsArch) url hash;
|
||||
};
|
||||
|
||||
meta = {
|
||||
@ -37,7 +37,12 @@ let
|
||||
sebtm
|
||||
bdd
|
||||
];
|
||||
platforms = builtins.attrNames sources.${channel};
|
||||
platforms = [
|
||||
"x86_64-linux"
|
||||
"x86_64-darwin"
|
||||
"aarch64-linux"
|
||||
"aarch64-darwin"
|
||||
];
|
||||
mainProgram = "1password";
|
||||
};
|
||||
|
||||
|
@ -1,38 +1,58 @@
|
||||
{
|
||||
"stable": {
|
||||
"x86_64-linux": {
|
||||
"url": "https://downloads.1password.com/linux/tar/stable/x86_64/1password-8.10.78.x64.tar.gz",
|
||||
"hash": "sha256-COmXSjbCetPsbm40OrWGVtULPheEgnHEO0ZcIgWaG1w="
|
||||
"linux": {
|
||||
"version": "8.10.78",
|
||||
"sources": {
|
||||
"x86_64": {
|
||||
"url": "https://downloads.1password.com/linux/tar/stable/x86_64/1password-8.10.78.x64.tar.gz",
|
||||
"hash": "sha256-COmXSjbCetPsbm40OrWGVtULPheEgnHEO0ZcIgWaG1w="
|
||||
},
|
||||
"aarch64": {
|
||||
"url": "https://downloads.1password.com/linux/tar/stable/aarch64/1password-8.10.78.arm64.tar.gz",
|
||||
"hash": "sha256-diy7VhKRluSnVSR35Ogamf9RDHdqxSJifLOOYmMrJHE="
|
||||
}
|
||||
}
|
||||
},
|
||||
"aarch64-linux": {
|
||||
"url": "https://downloads.1password.com/linux/tar/stable/aarch64/1password-8.10.78.arm64.tar.gz",
|
||||
"hash": "sha256-diy7VhKRluSnVSR35Ogamf9RDHdqxSJifLOOYmMrJHE="
|
||||
},
|
||||
"x86_64-darwin": {
|
||||
"url": "https://downloads.1password.com/mac/1Password-8.10.78-x86_64.zip",
|
||||
"hash": "sha256-8fbjEc/Z0xCdXq/uHp4bQE5Js5hNLbVCRZxnepUdLUs="
|
||||
},
|
||||
"aarch64-darwin": {
|
||||
"url": "https://downloads.1password.com/mac/1Password-8.10.78-aarch64.zip",
|
||||
"hash": "sha256-x03dZ/eVrvFcbese1cBAvyJKwtWe6rOcgytn0OsEFDQ="
|
||||
"darwin": {
|
||||
"version": "8.10.78",
|
||||
"sources": {
|
||||
"x86_64": {
|
||||
"url": "https://downloads.1password.com/mac/1Password-8.10.78-x86_64.zip",
|
||||
"hash": "sha256-8fbjEc/Z0xCdXq/uHp4bQE5Js5hNLbVCRZxnepUdLUs="
|
||||
},
|
||||
"aarch64": {
|
||||
"url": "https://downloads.1password.com/mac/1Password-8.10.78-aarch64.zip",
|
||||
"hash": "sha256-x03dZ/eVrvFcbese1cBAvyJKwtWe6rOcgytn0OsEFDQ="
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"beta": {
|
||||
"x86_64-linux": {
|
||||
"url": "https://downloads.1password.com/linux/tar/beta/x86_64/1password-8.10.80-18.BETA.x64.tar.gz",
|
||||
"hash": "sha256-X2Wu/dQQ7fv+tTAU2/70S38wL6WdJuc/DXWoiHZvSP4="
|
||||
"linux": {
|
||||
"version": "8.10.80-18.BETA",
|
||||
"sources": {
|
||||
"x86_64": {
|
||||
"url": "https://downloads.1password.com/linux/tar/beta/x86_64/1password-8.10.80-18.BETA.x64.tar.gz",
|
||||
"hash": "sha256-X2Wu/dQQ7fv+tTAU2/70S38wL6WdJuc/DXWoiHZvSP4="
|
||||
},
|
||||
"aarch64": {
|
||||
"url": "https://downloads.1password.com/linux/tar/beta/aarch64/1password-8.10.80-18.BETA.arm64.tar.gz",
|
||||
"hash": "sha256-52aRg6QD/fKOzOHoG88q8VNJIizxnISFnpxek7bJ05w="
|
||||
}
|
||||
}
|
||||
},
|
||||
"aarch64-linux": {
|
||||
"url": "https://downloads.1password.com/linux/tar/beta/aarch64/1password-8.10.80-18.BETA.arm64.tar.gz",
|
||||
"hash": "sha256-52aRg6QD/fKOzOHoG88q8VNJIizxnISFnpxek7bJ05w="
|
||||
},
|
||||
"x86_64-darwin": {
|
||||
"url": "https://downloads.1password.com/mac/1Password-8.10.80-18.BETA-x86_64.zip",
|
||||
"hash": "sha256-kUU+nm19DmdY8ZG6d+EJFQXcCy/BOauXh83suQLSvz0="
|
||||
},
|
||||
"aarch64-darwin": {
|
||||
"url": "https://downloads.1password.com/mac/1Password-8.10.80-18.BETA-aarch64.zip",
|
||||
"hash": "sha256-eZG0QaB5NRwRCYcmlfZA/HTceLq7eUzR+AvzDeOrzAY="
|
||||
"darwin": {
|
||||
"version": "8.10.80-18.BETA",
|
||||
"sources": {
|
||||
"x86_64": {
|
||||
"url": "https://downloads.1password.com/mac/1Password-8.10.80-18.BETA-x86_64.zip",
|
||||
"hash": "sha256-kUU+nm19DmdY8ZG6d+EJFQXcCy/BOauXh83suQLSvz0="
|
||||
},
|
||||
"aarch64": {
|
||||
"url": "https://downloads.1password.com/mac/1Password-8.10.80-18.BETA-aarch64.zip",
|
||||
"hash": "sha256-eZG0QaB5NRwRCYcmlfZA/HTceLq7eUzR+AvzDeOrzAY="
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
122
pkgs/by-name/_1/_1password-gui/update-sources.py
Executable file
122
pkgs/by-name/_1/_1password-gui/update-sources.py
Executable file
@ -0,0 +1,122 @@
|
||||
#!/usr/bin/env nix-shell
|
||||
#!nix-shell -i python3 -p python3 gnupg
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
from collections import OrderedDict
|
||||
|
||||
DOWNLOADS_BASE_URL = "https://downloads.1password.com"
|
||||
OP_PGP_KEYID = "3FEF9748469ADBE15DA7CA80AC2D62742012EA22"
|
||||
|
||||
|
||||
class Sources(OrderedDict):
|
||||
def __init__(self):
|
||||
self._jsonfp = open("sources.json", "r+")
|
||||
self.update(json.load(self._jsonfp))
|
||||
self._jsonfp.seek(0, os.SEEK_SET)
|
||||
|
||||
def persist(self):
|
||||
json.dump(self, self._jsonfp, indent=2)
|
||||
self._jsonfp.write("\n") # keep fmt.check happy
|
||||
|
||||
|
||||
class GPG:
|
||||
def __new__(cls):
|
||||
if not hasattr(cls, "_instance"):
|
||||
cls._instance = super().__new__(cls)
|
||||
return cls._instance
|
||||
|
||||
def __init__(self):
|
||||
if hasattr(self, "gnupghome"):
|
||||
return
|
||||
|
||||
self.gpg = shutil.which("gpg")
|
||||
self.gpgv = shutil.which("gpgv")
|
||||
self.gnupghome = tempfile.mkdtemp(prefix="1password-gui-gnupghome.")
|
||||
self.env = {"GNUPGHOME": self.gnupghome}
|
||||
self._run(
|
||||
self.gpg,
|
||||
"--no-default-keyring",
|
||||
"--keyring",
|
||||
"trustedkeys.kbx",
|
||||
"--keyserver",
|
||||
"keyserver.ubuntu.com",
|
||||
"--receive-keys",
|
||||
OP_PGP_KEYID,
|
||||
)
|
||||
|
||||
def __del__(self):
|
||||
shutil.rmtree(self.gnupghome)
|
||||
|
||||
def _run(self, *args):
|
||||
try:
|
||||
subprocess.run(args, env=self.env, check=True, capture_output=True)
|
||||
except subprocess.CalledProcessError as cpe:
|
||||
print(cpe.stderr, file=sys.stderr)
|
||||
raise SystemExit(f"gpg error: {cpe.cmd}")
|
||||
|
||||
def verify(self, sigfile, datafile):
|
||||
return self._run(self.gpgv, sigfile, datafile)
|
||||
|
||||
|
||||
def nix_store_prefetch(url):
|
||||
nix = shutil.which("nix")
|
||||
cp = subprocess.run(
|
||||
[nix, "store", "prefetch-file", "--json", url], check=True, capture_output=True
|
||||
)
|
||||
out = json.loads(cp.stdout)
|
||||
|
||||
return out["storePath"], out["hash"]
|
||||
|
||||
|
||||
def mk_url(channel, os, version, arch):
|
||||
if os == "linux":
|
||||
arch_alias = {"x86_64": "x64", "aarch64": "arm64"}[arch]
|
||||
path = f"linux/tar/{channel}/{arch}/1password-{version}.{arch_alias}.tar.gz"
|
||||
elif os == "darwin":
|
||||
path = f"mac/1Password-{version}-{arch}.zip"
|
||||
else:
|
||||
raise SystemExit(f"update-sources.py: unsupported OS {os}")
|
||||
|
||||
return f"{DOWNLOADS_BASE_URL}/{path}"
|
||||
|
||||
|
||||
def download(channel, os, version, arch):
|
||||
url = mk_url(channel, os, version, arch)
|
||||
store_path_tarball, hash = nix_store_prefetch(url)
|
||||
|
||||
# Linux release tarballs come with detached PGP signatures.
|
||||
if os == "linux":
|
||||
store_path_sig, _ = nix_store_prefetch(url + ".sig")
|
||||
GPG().verify(store_path_sig, store_path_tarball)
|
||||
|
||||
return url, hash
|
||||
|
||||
|
||||
def main(args):
|
||||
"""Gets called with args in `channel/os/version` format.
|
||||
|
||||
e.g.:
|
||||
update-sources.py stable/linux/8.10.80 beta/linux/8.10.82-12.BETA
|
||||
"""
|
||||
sources = Sources()
|
||||
|
||||
for triplet in args[1:]:
|
||||
channel, os, version = triplet.split("/")
|
||||
release = sources[channel][os]
|
||||
if release["version"] == version:
|
||||
continue
|
||||
|
||||
release["version"] = version
|
||||
for arch in release["sources"]:
|
||||
url, hash = download(channel, os, version, arch)
|
||||
release["sources"][arch].update({"url": url, "hash": hash})
|
||||
|
||||
sources.persist()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main(sys.argv))
|
@ -1,84 +1,118 @@
|
||||
#!/usr/bin/env nix-shell
|
||||
#!nix-shell -i bash -p jq gnupg
|
||||
#!nix-shell -i bash -p jq curl
|
||||
#shellcheck shell=bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# For Linux version checks we rely on Repology API to check 1Password managed Arch User Repository.
|
||||
REPOLOGY_PROJECT_URI="https://repology.org/api/v1/project/1password"
|
||||
|
||||
# For Darwin version checks we query the same endpoint 1Password 8 for Mac queries.
|
||||
# This is the base URI. For stable channel an additional path of "N", for beta channel, "Y" is required.
|
||||
APP_UPDATES_URI_BASE="https://app-updates.agilebits.com/check/2/99/aarch64/OPM8/en/0/A1"
|
||||
|
||||
CURL=(
|
||||
"curl" "--silent" "--show-error" "--fail"
|
||||
"--proto" "=https" # enforce https
|
||||
"--tlsv1.2" # do not accept anything below tls 1.2
|
||||
"-H" "user-agent: nixpkgs#_1password-gui update.sh" # repology requires a descriptive user-agent
|
||||
)
|
||||
|
||||
JQ=(
|
||||
"jq"
|
||||
"--raw-output"
|
||||
"--exit-status" # exit non-zero if no output is produced
|
||||
)
|
||||
|
||||
|
||||
read_local_versions() {
|
||||
local channel="$1"
|
||||
|
||||
while IFS='=' read -r key value; do
|
||||
local_versions["${key}"]="${value}"
|
||||
done < <(jq -r --arg channel "${channel}" '
|
||||
.[$channel] | to_entries[] | .key as $os | .value.version as $version |
|
||||
"\($channel)/\($os)=\($version)"
|
||||
' sources.json)
|
||||
}
|
||||
|
||||
read_remote_versions() {
|
||||
local channel="$1"
|
||||
|
||||
if [[ ${channel} == "stable" ]]; then
|
||||
remote_versions["stable/linux"]=$(
|
||||
"${CURL[@]}" "${REPOLOGY_PROJECT_URI}" \
|
||||
| "${JQ[@]}" '.[] | select(.repo == "aur" and .srcname == "1password" and .status == "newest") | .version'
|
||||
)
|
||||
|
||||
remote_versions["stable/darwin"]=$(
|
||||
"${CURL[@]}" "${APP_UPDATES_URI_BASE}/N" \
|
||||
| "${JQ[@]}" 'select(.available == "1") | .version'
|
||||
)
|
||||
else
|
||||
remote_versions["beta/linux"]=$(
|
||||
# AUR version string uses underscores instead of dashes for betas.
|
||||
# We fix that with a `sub` in jq query.
|
||||
"${CURL[@]}" "${REPOLOGY_PROJECT_URI}" \
|
||||
| "${JQ[@]}" '.[] | select(.repo == "aur" and .srcname == "1password-beta") | .version | sub("_"; "-")'
|
||||
)
|
||||
|
||||
remote_versions["beta/darwin"]=$(
|
||||
"${CURL[@]}" "${APP_UPDATES_URI_BASE}/Y" \
|
||||
| "${JQ[@]}" 'select(.available == "1") | .version'
|
||||
)
|
||||
fi
|
||||
}
|
||||
|
||||
render_versions_json() {
|
||||
local key value
|
||||
|
||||
for key in "${!local_versions[@]}"; do
|
||||
value="${local_versions[${key}]}"
|
||||
echo "${key}"
|
||||
echo "${value}"
|
||||
done \
|
||||
| jq -nR 'reduce inputs as $i ({}; . + { $i: input })'
|
||||
}
|
||||
|
||||
|
||||
cd -- "$(dirname "${BASH_SOURCE[0]}")"
|
||||
|
||||
mk_url() {
|
||||
local \
|
||||
base_url="https://downloads.1password.com" \
|
||||
os="$1" \
|
||||
channel="$2" \
|
||||
arch="$3" \
|
||||
version="$4"
|
||||
attr_path=${UPDATE_NIX_ATTR_PATH}
|
||||
case "${attr_path}" in
|
||||
_1password-gui) channel="stable" ;;
|
||||
_1password-gui-beta) channel="beta" ;;
|
||||
*)
|
||||
echo "Unknown attribute path ${attr_path}" >&2
|
||||
exit 1
|
||||
esac
|
||||
|
||||
if [[ ${os} == "linux" ]]; then
|
||||
if [[ ${arch} == "x86_64" ]]; then
|
||||
ext="x64.tar.gz"
|
||||
else
|
||||
ext="arm64.tar.gz"
|
||||
fi
|
||||
url="${base_url}/${os}/tar/${channel}/${arch}/1password-${version}.${ext}"
|
||||
else
|
||||
ext="${arch}.zip"
|
||||
url="${base_url}/mac/1Password-${version}-${ext}"
|
||||
declare -A local_versions remote_versions
|
||||
declare -a new_version_available=()
|
||||
read_local_versions "${channel}"
|
||||
read_remote_versions "${channel}"
|
||||
for i in "${!remote_versions[@]}"; do
|
||||
if [[ "${local_versions[$i]}" != "${remote_versions[$i]}" ]]; then
|
||||
old_version="${local_versions[$i]}"
|
||||
new_version="${remote_versions[$i]}"
|
||||
new_version_available+=("$i/$new_version")
|
||||
fi
|
||||
|
||||
echo "${url}"
|
||||
}
|
||||
|
||||
cleanup() {
|
||||
if [[ -d ${TMP_GNUPGHOME-} ]]; then
|
||||
rm -r "${TMP_GNUPGHOME}"
|
||||
fi
|
||||
|
||||
if [[ -f ${JSON_HEAP-} ]]; then
|
||||
rm "${JSON_HEAP}"
|
||||
fi
|
||||
}
|
||||
|
||||
trap cleanup EXIT
|
||||
|
||||
# Get channel versions from versions.json
|
||||
declare -A versions
|
||||
while IFS='=' read -r key value; do
|
||||
versions["${key}"]="${value}"
|
||||
done < <(jq -r 'to_entries[] | "\(.key)=\(.value)"' versions.json)
|
||||
|
||||
TMP_GNUPGHOME=$(mktemp -dt 1password-gui.gnupghome.XXXXXX)
|
||||
export GNUPGHOME="${TMP_GNUPGHOME}"
|
||||
gpg --no-default-keyring --keyring trustedkeys.kbx \
|
||||
--keyserver keyserver.ubuntu.com \
|
||||
--receive-keys 3FEF9748469ADBE15DA7CA80AC2D62742012EA22
|
||||
|
||||
JSON_HEAP=$(mktemp -t 1password-gui.jsonheap.XXXXXX)
|
||||
for channel in stable beta; do
|
||||
for os in linux darwin; do
|
||||
for arch in x86_64 aarch64; do
|
||||
version="${versions[${channel}-${os}]}"
|
||||
url=$(mk_url ${os} ${channel} ${arch} ${version})
|
||||
nix store prefetch-file --json "${url}" | jq "
|
||||
{
|
||||
\"${channel}\": {
|
||||
\"${arch}-${os}\": {
|
||||
\"url\": \"${url}\",
|
||||
\"hash\": .hash,
|
||||
\"storePath\": .storePath
|
||||
}
|
||||
}
|
||||
}" >> "${JSON_HEAP}"
|
||||
|
||||
# For some reason 1Password PGP signs only Linux binaries.
|
||||
if [[ ${os} == "linux" ]]; then
|
||||
gpgv \
|
||||
$(nix store prefetch-file --json "${url}.sig" | jq -r .storePath) \
|
||||
$(jq -r --slurp ".[-1].[].[].storePath" "${JSON_HEAP}")
|
||||
fi
|
||||
done
|
||||
done
|
||||
done
|
||||
|
||||
# Combine heap of hash+url objects into a single JSON object.
|
||||
jq --slurp 'reduce .[] as $x ({}; . * $x) | del (.[].[].storePath)' "${JSON_HEAP}" > sources.json
|
||||
if [[ ${#new_version_available[@]} -eq 0 ]]; then
|
||||
# up to date
|
||||
exit
|
||||
fi
|
||||
|
||||
./update-sources.py "${new_version_available[@]}"
|
||||
cat <<EOF
|
||||
[
|
||||
{
|
||||
"attrPath": "${attr_path}",
|
||||
"oldVersion": "${old_version}",
|
||||
"newVersion": "${new_version}",
|
||||
"files": [
|
||||
"$PWD/sources.json"
|
||||
]
|
||||
}
|
||||
]
|
||||
EOF
|
||||
|
@ -1,6 +0,0 @@
|
||||
{
|
||||
"stable-linux": "8.10.78",
|
||||
"stable-darwin": "8.10.78",
|
||||
"beta-linux":"8.10.80-18.BETA",
|
||||
"beta-darwin": "8.10.80-18.BETA"
|
||||
}
|
@ -10,17 +10,17 @@
|
||||
|
||||
rustPlatform.buildRustPackage (finalAttrs: {
|
||||
pname = "alistral";
|
||||
version = "0.5.10";
|
||||
version = "0.5.11";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "RustyNova016";
|
||||
repo = "Alistral";
|
||||
tag = "v${finalAttrs.version}";
|
||||
hash = "sha256-O19Btz6jLJJnCbAusRHKfUa6II8mofzifW+cbPOaHVI=";
|
||||
hash = "sha256-wiNXwg6mC24nWwakA9cX8OYDOhghoEgm0yVR3Tmtod4=";
|
||||
};
|
||||
|
||||
useFetchCargoVendor = true;
|
||||
cargoHash = "sha256-zQvPgigUQW9dpyLe7fgW8i9I4nm38bQKDLwezeSYx9Q=";
|
||||
cargoHash = "sha256-M3nwa93vzVm+GtCdmBn/jqIvgJRcULw+8FFFLPmfbyg=";
|
||||
|
||||
nativeBuildInputs = [
|
||||
pkg-config
|
||||
|
@ -1,35 +0,0 @@
|
||||
{ buildNpmPackage, anubis }:
|
||||
|
||||
buildNpmPackage {
|
||||
pname = "${anubis.pname}-xess";
|
||||
inherit (anubis) version src;
|
||||
|
||||
npmDepsHash = "sha256-wI8XCUGq3aI20B++RAT3lc/nBrDMEmE9+810lewzXa0=";
|
||||
|
||||
buildPhase = ''
|
||||
runHook preBuild
|
||||
|
||||
npx postcss ./xess/xess.css -o xess.min.css
|
||||
|
||||
runHook postBuild
|
||||
'';
|
||||
|
||||
installPhase = ''
|
||||
runHook preInstall
|
||||
|
||||
install -Dm644 xess.min.css $out/xess.min.css
|
||||
|
||||
runHook postInstall
|
||||
'';
|
||||
|
||||
meta = anubis.meta // {
|
||||
description = "Xess files for Anubis";
|
||||
longDescription = ''
|
||||
This package is consumed by the main `anubis` package to render the final
|
||||
styling for the bot check page.
|
||||
|
||||
**It is not supposed to be used as a standalone package**, and it exists to
|
||||
ensure Anubis' styling is override-able by downstreams.
|
||||
'';
|
||||
};
|
||||
}
|
@ -4,8 +4,7 @@
|
||||
fetchFromGitHub,
|
||||
nixosTests,
|
||||
stdenv,
|
||||
|
||||
anubis-xess,
|
||||
buildNpmPackage,
|
||||
|
||||
esbuild,
|
||||
brotli,
|
||||
@ -31,26 +30,39 @@ buildGoModule (finalAttrs: {
|
||||
zstd
|
||||
];
|
||||
|
||||
subPackages = [
|
||||
"cmd/anubis"
|
||||
];
|
||||
xess = buildNpmPackage {
|
||||
pname = "anubis-xess";
|
||||
inherit (finalAttrs) version src;
|
||||
|
||||
ldflags =
|
||||
[
|
||||
"-s"
|
||||
"-w"
|
||||
"-X=github.com/TecharoHQ/anubis.Version=v${finalAttrs.version}"
|
||||
]
|
||||
++ lib.optionals stdenv.hostPlatform.isLinux [
|
||||
"-extldflags=-static"
|
||||
];
|
||||
npmDepsHash = "sha256-wI8XCUGq3aI20B++RAT3lc/nBrDMEmE9+810lewzXa0=";
|
||||
|
||||
buildPhase = ''
|
||||
runHook preBuild
|
||||
npx postcss ./xess/xess.css -o xess.min.css
|
||||
runHook postBuild
|
||||
'';
|
||||
|
||||
installPhase = ''
|
||||
runHook preInstall
|
||||
install -Dm644 xess.min.css $out/xess.min.css
|
||||
runHook postInstall
|
||||
'';
|
||||
};
|
||||
|
||||
subPackages = [ "cmd/anubis" ];
|
||||
|
||||
ldflags = [
|
||||
"-s"
|
||||
"-w"
|
||||
"-X=github.com/TecharoHQ/anubis.Version=v${finalAttrs.version}"
|
||||
] ++ lib.optionals stdenv.hostPlatform.isLinux [ "-extldflags=-static" ];
|
||||
|
||||
postPatch = ''
|
||||
patchShebangs ./web/build.sh
|
||||
'';
|
||||
|
||||
preBuild = ''
|
||||
go generate ./... && ./web/build.sh && cp -r ${anubis-xess}/xess.min.css ./xess
|
||||
go generate ./... && ./web/build.sh && cp -r ${finalAttrs.xess}/xess.min.css ./xess
|
||||
'';
|
||||
|
||||
preCheck = ''
|
||||
@ -58,10 +70,12 @@ buildGoModule (finalAttrs: {
|
||||
'';
|
||||
|
||||
passthru.tests = { inherit (nixosTests) anubis; };
|
||||
passthru.updateScript = ./update.sh;
|
||||
|
||||
meta = {
|
||||
description = "Weighs the soul of incoming HTTP requests using proof-of-work to stop AI crawlers";
|
||||
homepage = "https://anubis.techaro.lol/";
|
||||
downloadPage = "https://github.com/TecharoHQ/anubis";
|
||||
changelog = "https://github.com/TecharoHQ/anubis/releases/tag/v${finalAttrs.version}";
|
||||
license = lib.licenses.mit;
|
||||
maintainers = with lib.maintainers; [
|
||||
@ -69,6 +83,7 @@ buildGoModule (finalAttrs: {
|
||||
soopyc
|
||||
ryand56
|
||||
sigmasquadron
|
||||
defelo
|
||||
];
|
||||
mainProgram = "anubis";
|
||||
};
|
||||
|
8
pkgs/by-name/an/anubis/update.sh
Executable file
8
pkgs/by-name/an/anubis/update.sh
Executable file
@ -0,0 +1,8 @@
|
||||
#!/usr/bin/env nix-shell
|
||||
#!nix-shell -i bash -p nix-update
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
nix-update anubis --src-only
|
||||
nix-update anubis.xess --version=skip
|
||||
nix-update anubis --version=skip
|
@ -30,13 +30,13 @@
|
||||
|
||||
stdenv.mkDerivation (finalAttrs: {
|
||||
pname = "ccextractor";
|
||||
version = "0.94-unstable-2024-08-12";
|
||||
version = "0.94-unstable-2025-05-20";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "CCExtractor";
|
||||
repo = "ccextractor";
|
||||
rev = "92f2ce0fa026b01fb07db6751210e6bd8c8944d3";
|
||||
hash = "sha256-bp7T9uJK4bauR2Co4lKqqnM6oGa3WZ+1toEKmzOx4mI=";
|
||||
rev = "407d0f4e93611c5b0ceb14b7fc01d4a4c2e90433";
|
||||
hash = "sha256-BfsQmCNB4HRafqJ3pC2ECiwhOgwKuIqiLjr2/bvHr7Q=";
|
||||
};
|
||||
|
||||
patches = [
|
||||
@ -51,7 +51,7 @@ stdenv.mkDerivation (finalAttrs: {
|
||||
cargoDeps = rustPlatform.fetchCargoVendor {
|
||||
inherit (finalAttrs) src cargoRoot;
|
||||
patches = [ ./use-rsmpeg-0.15.patch ];
|
||||
hash = "sha256-7v3gQghByUDWZLJRRGa/7X2ivUumirq6BbexNQcCXCk=";
|
||||
hash = "sha256-68Y8nzPHxhVIRHoPXOy9tc71177lCBuOf//z3cqyDGQ=";
|
||||
};
|
||||
|
||||
nativeBuildInputs = [
|
||||
|
@ -34,10 +34,9 @@ index 4c1e73dcf0..68502915dc 100644
|
||||
+++ b/src/rust/Cargo.toml
|
||||
@@ -15,7 +15,7 @@
|
||||
env_logger = "0.8.4"
|
||||
iconv = "0.1.1"
|
||||
palette = "0.6.0"
|
||||
-rsmpeg = { version = "0.14.1", optional = true, features = [
|
||||
palette = "0.6.1"
|
||||
-rsmpeg = { version = "0.14.2", optional = true, features = [
|
||||
+rsmpeg = { version = "0.15.1", optional = true, features = [
|
||||
"link_system_ffmpeg",
|
||||
] }
|
||||
tesseract-sys = { version = "0.5.14", optional = true, default-features = false }
|
||||
tesseract-sys = { version = "0.5.15", optional = true, default-features = false }
|
||||
|
30
pkgs/by-name/de/denort/package.nix
Normal file
30
pkgs/by-name/de/denort/package.nix
Normal file
@ -0,0 +1,30 @@
|
||||
{
|
||||
deno,
|
||||
lib,
|
||||
}:
|
||||
deno.overrideAttrs (
|
||||
final: prev: {
|
||||
pname = "denort";
|
||||
buildAndTestSubdir = "cli/rt";
|
||||
postInstall = "";
|
||||
installCheckPhase = "";
|
||||
passthru = { };
|
||||
meta = with lib; {
|
||||
homepage = "https://deno.land/";
|
||||
changelog = "https://github.com/denoland/deno/releases/tag/v${final.version}";
|
||||
description = "Slim version of the deno runtime, usually bundled with deno projects into standalone binaries";
|
||||
license = licenses.mit;
|
||||
mainProgram = "denort";
|
||||
maintainers = with maintainers; [
|
||||
jk
|
||||
ofalvai
|
||||
];
|
||||
platforms = [
|
||||
"x86_64-linux"
|
||||
"aarch64-linux"
|
||||
"x86_64-darwin"
|
||||
"aarch64-darwin"
|
||||
];
|
||||
};
|
||||
}
|
||||
)
|
@ -8,17 +8,17 @@
|
||||
|
||||
rustPlatform.buildRustPackage (finalAttrs: {
|
||||
pname = "difftastic";
|
||||
version = "0.63.0";
|
||||
version = "0.64.0";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "wilfred";
|
||||
repo = "difftastic";
|
||||
tag = finalAttrs.version;
|
||||
hash = "sha256-BxWCSkSeDyiiGBY2u0ahPrIhYq2lbujoPPtZGq/OkI0=";
|
||||
hash = "sha256-XMvysYO6Kji9cbfGayB6wPVuNp0j2uXLHfZ9H+dBLt0=";
|
||||
};
|
||||
|
||||
useFetchCargoVendor = true;
|
||||
cargoHash = "sha256-kIqaZ8truDivMV6uo1+j9bmXQReREZjHSr89ZvVDWCw=";
|
||||
cargoHash = "sha256-1u3oUbqhwHXD90ld70pjK2XPJe5hpUbJtU78QpIjAE8=";
|
||||
|
||||
# skip flaky tests
|
||||
checkFlags = [ "--skip=options::tests::test_detect_display_width" ];
|
||||
|
@ -2,9 +2,9 @@
|
||||
|
||||
buildDotnetGlobalTool {
|
||||
pname = "dotnet-ef";
|
||||
version = "9.0.5";
|
||||
version = "9.0.6";
|
||||
|
||||
nugetHash = "sha256-Mu+MlsjH/qa4kMb7z/TuG1lSVSKPX9j9S4mJLVRZ2+E=";
|
||||
nugetHash = "sha256-dHOGvqdIfYhuAz7JwQoG/4uJNE9wpfI/dnL4zj3lD6A=";
|
||||
|
||||
meta = {
|
||||
description = "Tools to help with design-time development tasks";
|
||||
|
@ -8,12 +8,12 @@
|
||||
}:
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
version = "1.2.8";
|
||||
version = "1.2.9";
|
||||
pname = "fllog";
|
||||
|
||||
src = fetchurl {
|
||||
url = "mirror://sourceforge/fldigi/${pname}-${version}.tar.gz";
|
||||
sha256 = "sha256-kJLb1ifd8sUOwGgNsIEmlhH29fQLdTfDMjKLrzK7r1I=";
|
||||
sha256 = "sha256-3eJvT9PjHTrMn0/pArUDIIE7T7y1YnayG5PuGokwtRk=";
|
||||
};
|
||||
|
||||
buildInputs = [
|
||||
|
@ -7,13 +7,13 @@
|
||||
|
||||
buildGoModule rec {
|
||||
pname = "gosmee";
|
||||
version = "0.26.0";
|
||||
version = "0.26.1";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "chmouel";
|
||||
repo = "gosmee";
|
||||
rev = "v${version}";
|
||||
hash = "sha256-dmv2fxL6jV6bpWvtMiLEtb/yg5vuD+B52P1PWVap1NA=";
|
||||
hash = "sha256-qNO7mY03aWabTeUm8rXojy2Ek2IKNG6wimVhwZKxh9g=";
|
||||
};
|
||||
vendorHash = null;
|
||||
|
||||
|
@ -14,13 +14,13 @@
|
||||
|
||||
stdenv.mkDerivation (finalAttrs: {
|
||||
pname = "jailer";
|
||||
version = "16.6.2";
|
||||
version = "16.7";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "Wisser";
|
||||
repo = "Jailer";
|
||||
tag = "v${finalAttrs.version}";
|
||||
hash = "sha256-CeehX+btGexbFFD3p+FVmzXpH0bVWMW9Qdu5q6MJ5lw=";
|
||||
hash = "sha256-lHBthOZu4utJd2X8cTJ7HCp8zLs0su78RIdf/QBbSJk=";
|
||||
};
|
||||
|
||||
nativeBuildInputs = [
|
||||
|
@ -2,14 +2,16 @@
|
||||
stdenv,
|
||||
lib,
|
||||
fetchFromGitHub,
|
||||
python3,
|
||||
python3Packages,
|
||||
python312Packages,
|
||||
qt5,
|
||||
makeWrapper,
|
||||
copyDesktopItems,
|
||||
makeDesktopItem,
|
||||
}:
|
||||
|
||||
let
|
||||
python3Packages = python312Packages;
|
||||
in
|
||||
python3Packages.buildPythonApplication rec {
|
||||
pname = "kohighlights";
|
||||
version = "2.3.1.0";
|
||||
@ -65,7 +67,7 @@ python3Packages.buildPythonApplication rec {
|
||||
mkdir -p $out/bin $out/share/KoHighlights
|
||||
rm -rf docs screens
|
||||
cp -r * $out/share/KoHighlights
|
||||
makeWrapper ${python3.interpreter} $out/bin/KoHighlights \
|
||||
makeWrapper ${python3Packages.python.interpreter} $out/bin/KoHighlights \
|
||||
--add-flags "$out/share/KoHighlights/main.py" \
|
||||
--set PYTHONPATH "${python3Packages.makePythonPath dependencies}" \
|
||||
''${qtWrapperArgs[@]}
|
||||
|
@ -7,17 +7,17 @@
|
||||
heatshrink,
|
||||
zlib,
|
||||
boost,
|
||||
catch2,
|
||||
catch2_3,
|
||||
}:
|
||||
stdenv.mkDerivation {
|
||||
pname = "libbgcode";
|
||||
version = "2023-11-16";
|
||||
version = "0-unstable-2025-02-19";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "prusa3d";
|
||||
repo = "libbgcode";
|
||||
rev = "bc390aab4427589a6402b4c7f65cf4d0a8f987ec";
|
||||
hash = "sha256-TZShYeDAh+fNdmTr1Xqctji9f0vEGpNZv1ba/IY5EoY=";
|
||||
rev = "5041c093b33e2748e76d6b326f2251310823f3df";
|
||||
hash = "sha256-EaxVZerH2v8b1Yqk+RW/r3BvnJvrAelkKf8Bd+EHbEc=";
|
||||
};
|
||||
|
||||
nativeBuildInputs = [
|
||||
@ -29,7 +29,7 @@ stdenv.mkDerivation {
|
||||
heatshrink
|
||||
zlib
|
||||
boost
|
||||
catch2
|
||||
catch2_3
|
||||
];
|
||||
|
||||
meta = with lib; {
|
||||
|
38
pkgs/by-name/mc/mcp-grafana/package.nix
Normal file
38
pkgs/by-name/mc/mcp-grafana/package.nix
Normal file
@ -0,0 +1,38 @@
|
||||
{
|
||||
lib,
|
||||
buildGoModule,
|
||||
fetchFromGitHub,
|
||||
}:
|
||||
|
||||
buildGoModule (finalAttrs: {
|
||||
pname = "mcp-grafana";
|
||||
version = "0.4.2";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "grafana";
|
||||
repo = "mcp-grafana";
|
||||
tag = "v${finalAttrs.version}";
|
||||
hash = "sha256-3w6xnDAcuDMZPr6lGGh0FpcyG2fRpkeVcJlZMdszu/g=";
|
||||
};
|
||||
|
||||
vendorHash = "sha256-61nn/p6Un+uHuPK4hipJ3A2DhAEqpWTGefM8ENAOP1E=";
|
||||
|
||||
ldflags = [
|
||||
"-s"
|
||||
"-w"
|
||||
];
|
||||
|
||||
postInstall = ''
|
||||
rm $out/bin/jsonschema
|
||||
'';
|
||||
|
||||
__darwinAllowLocalNetworking = true;
|
||||
|
||||
meta = {
|
||||
description = "MCP server for Grafana";
|
||||
homepage = "https://github.com/grafana/mcp-grafana";
|
||||
license = lib.licenses.asl20;
|
||||
maintainers = with lib.maintainers; [ pilz ];
|
||||
mainProgram = "mcp-grafana";
|
||||
};
|
||||
})
|
@ -27,13 +27,13 @@ let
|
||||
in
|
||||
stdenv.mkDerivation (finalAttrs: {
|
||||
pname = "melonDS";
|
||||
version = "1.0rc-unstable-2025-05-15";
|
||||
version = "1.0rc-unstable-2025-05-27";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "melonDS-emu";
|
||||
repo = "melonDS";
|
||||
rev = "0e64a06c84f9b9428f8647c2aafde110c9d917f3";
|
||||
hash = "sha256-T+AcpAITNALtZbuwY+oh4RnMgjCAi7n2HPyDjFqpQPI=";
|
||||
rev = "7117178c2dd56df32b6534ba6a54ad1f8547e693";
|
||||
hash = "sha256-6bwagPFIv87WtmQ3cl8cDZ/1A8Ab6itLHAr33CJy/Eo=";
|
||||
};
|
||||
|
||||
nativeBuildInputs = [
|
||||
|
@ -14,9 +14,9 @@
|
||||
libGL,
|
||||
}:
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
stdenv.mkDerivation (finalAttrs: {
|
||||
pname = "mihomo-party";
|
||||
version = "1.7.4";
|
||||
version = "1.7.5";
|
||||
|
||||
src =
|
||||
let
|
||||
@ -29,10 +29,10 @@ stdenv.mkDerivation rec {
|
||||
};
|
||||
in
|
||||
fetchurl {
|
||||
url = "https://github.com/mihomo-party-org/mihomo-party/releases/download/v${version}/mihomo-party-linux-${version}-${arch}.deb";
|
||||
url = "https://github.com/mihomo-party-org/mihomo-party/releases/download/v${finalAttrs.version}/mihomo-party-linux-${finalAttrs.version}-${arch}.deb";
|
||||
hash = selectSystem {
|
||||
x86_64-linux = "sha256-pQcDW9ztCTIS5dbmPuvig32cXWfzYiHksa3Jv/O5J7E=";
|
||||
aarch64-linux = "sha256-YHLHJ05sdMj/Wz/WAEianbDIUz9X+AER2wm9T/QHRXI=";
|
||||
x86_64-linux = "sha256-Kw7VDyJ07DeinAzsilJU0vBhDLViB8zlpIA+mAPpp2M=";
|
||||
aarch64-linux = "sha256-OljIM8BI8umkRB1wUqcwQ/H1i1FhYtQ4d5cXMi/Lt9E=";
|
||||
};
|
||||
};
|
||||
|
||||
@ -88,4 +88,4 @@ stdenv.mkDerivation rec {
|
||||
sourceProvenance = with lib.sourceTypes; [ binaryNativeCode ];
|
||||
maintainers = with lib.maintainers; [ ];
|
||||
};
|
||||
}
|
||||
})
|
||||
|
@ -8,13 +8,13 @@
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
pname = "nghttp3";
|
||||
version = "1.9.0";
|
||||
version = "1.10.1";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "ngtcp2";
|
||||
repo = "nghttp3";
|
||||
rev = "v${version}";
|
||||
hash = "sha256-CTra8vmpIig8LX7RWqRzhWhX9yn0RnFrnV/kYPgZgJk=";
|
||||
hash = "sha256-V4JFqi3VdblpBlZJ1uFX56AlJn894oiX86OfoxVjBbE=";
|
||||
fetchSubmodules = true;
|
||||
};
|
||||
|
||||
|
@ -13,18 +13,18 @@ let
|
||||
in
|
||||
rustPlatform.buildRustPackage (finalAttrs: {
|
||||
pname = "oxigraph";
|
||||
version = "0.4.9";
|
||||
version = "0.4.11";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "oxigraph";
|
||||
repo = "oxigraph";
|
||||
tag = "v${finalAttrs.version}";
|
||||
hash = "sha256-sv9LpAoPQ4oFrGI6j6NgVZwEwpM1wt93lHkUwnvmhIY=";
|
||||
hash = "sha256-M5C+SNZYXKfcosnRe9a+Zicyjuo6wli2uWv/SJxufJc=";
|
||||
fetchSubmodules = true;
|
||||
};
|
||||
|
||||
useFetchCargoVendor = true;
|
||||
cargoHash = "sha256-nVlvmYOxZDMLvxP8JaKTyKMgW6+48B8B+UzlwgthJS0=";
|
||||
cargoHash = "sha256-TgeHmCMwXK+OlTGIyzus/N+MY29lgK+JuzUBwVFbpsI=";
|
||||
|
||||
nativeBuildInputs = [
|
||||
rustPlatform.bindgenHook
|
||||
|
@ -1,11 +1,11 @@
|
||||
{
|
||||
lib,
|
||||
python3,
|
||||
python312,
|
||||
fetchPypi,
|
||||
qt5,
|
||||
}:
|
||||
|
||||
python3.pkgs.buildPythonApplication rec {
|
||||
python312.pkgs.buildPythonApplication rec {
|
||||
pname = "patray";
|
||||
version = "0.1.2";
|
||||
|
||||
@ -20,7 +20,7 @@ python3.pkgs.buildPythonApplication rec {
|
||||
sed -i '/pyside2/d' requirements/production.in
|
||||
'';
|
||||
|
||||
propagatedBuildInputs = with python3.pkgs; [
|
||||
dependencies = with python312.pkgs; [
|
||||
pulsectl
|
||||
loguru
|
||||
cock
|
||||
|
@ -1,13 +1,14 @@
|
||||
{
|
||||
lib,
|
||||
flutter326,
|
||||
flutter329,
|
||||
plant-it,
|
||||
}:
|
||||
|
||||
flutter326.buildFlutterApplication {
|
||||
flutter329.buildFlutterApplication {
|
||||
pname = "plant-it-frontend";
|
||||
inherit (plant-it) version src;
|
||||
sourceRoot = "source/frontend";
|
||||
|
||||
sourceRoot = "${plant-it.src.name}/frontend";
|
||||
|
||||
targetFlutterPlatform = "web";
|
||||
|
||||
@ -15,5 +16,6 @@ flutter326.buildFlutterApplication {
|
||||
|
||||
meta = plant-it.meta // {
|
||||
description = "Frontend for Plant It";
|
||||
platforms = lib.platforms.linux;
|
||||
};
|
||||
}
|
||||
|
@ -7,16 +7,16 @@
|
||||
|
||||
buildGoModule rec {
|
||||
pname = "pocketbase";
|
||||
version = "0.28.2";
|
||||
version = "0.28.3";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "pocketbase";
|
||||
repo = "pocketbase";
|
||||
rev = "v${version}";
|
||||
hash = "sha256-R8sXa3Cs7GFRAs8/+wAoWteFYBUVsgVA+eJDKACb0k8=";
|
||||
hash = "sha256-oFM2QcMxW+1iVUy7GzpNBtMQznq0vL7eeDiUhVPYCOM=";
|
||||
};
|
||||
|
||||
vendorHash = "sha256-bTXxhHibKiu+btHhnktTpNycMJSzGekRJ+w9b1IwAQs=";
|
||||
vendorHash = "sha256-hOB8MOfG+RHDJEP5DSDvSiphb+c86QySNEmRr8633cM=";
|
||||
|
||||
# This is the released subpackage from upstream repo
|
||||
subPackages = [ "examples/base" ];
|
||||
|
@ -13,11 +13,14 @@
|
||||
makeDesktopItem,
|
||||
nettools,
|
||||
protobuf,
|
||||
python3Packages,
|
||||
python312Packages,
|
||||
system-config-printer,
|
||||
wget,
|
||||
}:
|
||||
|
||||
let
|
||||
python3Packages = python312Packages;
|
||||
in
|
||||
python3Packages.buildPythonApplication rec {
|
||||
pname = "rcu";
|
||||
version = "4.0.24";
|
||||
|
@ -1,17 +1,12 @@
|
||||
{
|
||||
lib,
|
||||
buildPythonApplication,
|
||||
fetchFromGitHub,
|
||||
pillow,
|
||||
pyside2,
|
||||
numpy,
|
||||
pyphotonfile,
|
||||
shiboken2,
|
||||
python312Packages,
|
||||
}:
|
||||
let
|
||||
version = "0.1.3+";
|
||||
in
|
||||
buildPythonApplication rec {
|
||||
python312Packages.buildPythonApplication rec {
|
||||
pname = "sl1-to-photon";
|
||||
inherit version;
|
||||
|
||||
@ -22,7 +17,7 @@ buildPythonApplication rec {
|
||||
sha256 = "ssFfjlBMi3FHosDBUA2gs71VUIBkEdPVcV3STNxmOIM=";
|
||||
};
|
||||
|
||||
pythonPath = [
|
||||
pythonPath = with python312Packages; [
|
||||
pyphotonfile
|
||||
pillow
|
||||
numpy
|
@ -15,8 +15,8 @@ stdenv.mkDerivation (finalAttrs: {
|
||||
src = fetchFromGitHub {
|
||||
owner = "smplayer-dev";
|
||||
repo = "smplayer";
|
||||
rev = "v${finalAttrs.version}";
|
||||
hash = "sha256-dyUT8PdvsFZsEZQNSsC2TQd90KOrY9FIb9Do+JKdUHs=";
|
||||
tag = "v${finalAttrs.version}";
|
||||
hash = "sha256-txGz6v9hkvnrmVmBHsi1B2eC/iNT1tg4dU5AcMsSCic=";
|
||||
};
|
||||
|
||||
nativeBuildInputs = [
|
||||
@ -51,7 +51,7 @@ stdenv.mkDerivation (finalAttrs: {
|
||||
options of MPlayer, SMPlayer adds other interesting features like the
|
||||
possibility to play Youtube videos or download subtitles.
|
||||
'';
|
||||
changelog = "https://github.com/smplayer-dev/smplayer/releases/tag/${finalAttrs.src.rev}";
|
||||
changelog = "https://github.com/smplayer-dev/smplayer/releases/tag/${finalAttrs.src.tag}";
|
||||
license = lib.licenses.gpl3Plus;
|
||||
maintainers = with lib.maintainers; [ ];
|
||||
platforms = lib.platforms.linux;
|
||||
|
@ -11,26 +11,23 @@
|
||||
sqlite,
|
||||
zlib,
|
||||
|
||||
unstableGitUpdater,
|
||||
writeShellScript,
|
||||
yq,
|
||||
|
||||
nix-update-script,
|
||||
includeLSP ? true,
|
||||
includeForge ? true,
|
||||
}:
|
||||
rustPlatform.buildRustPackage {
|
||||
pname = "steel";
|
||||
version = "0.6.0-unstable-2025-04-17";
|
||||
version = "0-unstable-2025-06-15";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "mattwparas";
|
||||
repo = "steel";
|
||||
rev = "2f28ab10523198726d343257d29d892864e897b0";
|
||||
hash = "sha256-GcbuuaevPK5EOh0/IVgoL2MPC9ukDc8VXkdgbPX4quE=";
|
||||
rev = "123adb314702d6520f8ab04115e79308d2400c38";
|
||||
hash = "sha256-o1RZBlAGUht0Q7UVF+yPlrWW7B016fpBBcoaxuzRQo4=";
|
||||
};
|
||||
|
||||
useFetchCargoVendor = true;
|
||||
cargoHash = "sha256-PWE64CwHCQWvOGeOqdsqX6rAruWlnCwsQpcxS221M3g=";
|
||||
cargoHash = "sha256-/vPDVVOhLO7mnULyU8QLW+YHh+kGd+BSiPi55jrOWps=";
|
||||
|
||||
nativeBuildInputs = [
|
||||
curl
|
||||
@ -95,20 +92,8 @@ rustPlatform.buildRustPackage {
|
||||
STEEL_HOME = "${placeholder "out"}/lib/steel";
|
||||
};
|
||||
|
||||
passthru.updateScript = unstableGitUpdater {
|
||||
tagConverter = writeShellScript "steel-tagConverter.sh" ''
|
||||
export PATH="${
|
||||
lib.makeBinPath [
|
||||
curl
|
||||
yq
|
||||
]
|
||||
}:$PATH"
|
||||
|
||||
version=$(curl -s https://raw.githubusercontent.com/mattwparas/steel/refs/heads/master/Cargo.toml | tomlq -r .workspace.package.version)
|
||||
|
||||
read -r tag
|
||||
test "$tag" = "0" && tag="$version"; echo "$tag"
|
||||
'';
|
||||
passthru.updateScript = nix-update-script {
|
||||
extraArgs = [ "--version=branch" ];
|
||||
};
|
||||
|
||||
meta = {
|
||||
|
@ -10,19 +10,19 @@
|
||||
|
||||
rustPlatform.buildRustPackage rec {
|
||||
pname = "svix-server";
|
||||
version = "1.66.0";
|
||||
version = "1.67.0";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "svix";
|
||||
repo = "svix-webhooks";
|
||||
rev = "v${version}";
|
||||
hash = "sha256-Us/Bkp5ujC1rd/zpPzXL4kiFAiAygPWvRJF836ErK/0=";
|
||||
hash = "sha256-H9SrYWwSwW03LSKzCTVgtgZIM+o6nL3USBmJ61qxFos=";
|
||||
};
|
||||
|
||||
sourceRoot = "${src.name}/server";
|
||||
|
||||
useFetchCargoVendor = true;
|
||||
cargoHash = "sha256-fGXdWPJYauLQYC7o7I8q8okXn8JXzwnX6Pq71hj36Wo=";
|
||||
cargoHash = "sha256-xDSxevVnUPG95djjq//tjYI7WPb6qkXcvVKa6rBIwF0=";
|
||||
|
||||
nativeBuildInputs = [ pkg-config ];
|
||||
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -1,19 +0,0 @@
|
||||
diff --git a/packages/flutter_tools/lib/src/flutter_cache.dart b/packages/flutter_tools/lib/src/flutter_cache.dart
|
||||
index 252021cf78..e50ef0885d 100644
|
||||
--- a/packages/flutter_tools/lib/src/flutter_cache.dart
|
||||
+++ b/packages/flutter_tools/lib/src/flutter_cache.dart
|
||||
@@ -51,14 +51,6 @@ class FlutterCache extends Cache {
|
||||
registerArtifact(IosUsbArtifacts(artifactName, this, platform: platform));
|
||||
}
|
||||
registerArtifact(FontSubsetArtifacts(this, platform: platform));
|
||||
- registerArtifact(PubDependencies(
|
||||
- logger: logger,
|
||||
- // flutter root and pub must be lazily initialized to avoid accessing
|
||||
- // before the version is determined.
|
||||
- flutterRoot: () => Cache.flutterRoot!,
|
||||
- pub: () => pub,
|
||||
- projectFactory: projectFactory,
|
||||
- ));
|
||||
}
|
||||
}
|
||||
|
@ -1,30 +0,0 @@
|
||||
diff --git a/packages/flutter_tools/lib/src/runner/flutter_command.dart b/packages/flutter_tools/lib/src/runner/flutter_command.dart
|
||||
index e4e474ab6e..5548599802 100644
|
||||
--- a/packages/flutter_tools/lib/src/runner/flutter_command.dart
|
||||
+++ b/packages/flutter_tools/lib/src/runner/flutter_command.dart
|
||||
@@ -1693,7 +1693,7 @@ Run 'flutter -h' (or 'flutter <command> -h') for available flutter commands and
|
||||
|
||||
// Populate the cache. We call this before pub get below so that the
|
||||
// sky_engine package is available in the flutter cache for pub to find.
|
||||
- if (shouldUpdateCache) {
|
||||
+ if (false) {
|
||||
// First always update universal artifacts, as some of these (e.g.
|
||||
// ios-deploy on macOS) are required to determine `requiredArtifacts`.
|
||||
final bool offline;
|
||||
diff --git a/packages/flutter_tools/lib/src/runner/flutter_command_runner.dart b/packages/flutter_tools/lib/src/runner/flutter_command_runner.dart
|
||||
index 50783f8435..db94062840 100644
|
||||
--- a/packages/flutter_tools/lib/src/runner/flutter_command_runner.dart
|
||||
+++ b/packages/flutter_tools/lib/src/runner/flutter_command_runner.dart
|
||||
@@ -377,11 +377,7 @@ class FlutterCommandRunner extends CommandRunner<void> {
|
||||
globals.analytics.suppressTelemetry();
|
||||
}
|
||||
|
||||
- globals.flutterVersion.ensureVersionFile();
|
||||
final bool machineFlag = topLevelResults[FlutterGlobalOptions.kMachineFlag] as bool? ?? false;
|
||||
- if (await _shouldCheckForUpdates(topLevelResults, topLevelMachineFlag: machineFlag)) {
|
||||
- await globals.flutterVersion.checkFlutterVersionFreshness();
|
||||
- }
|
||||
|
||||
// See if the user specified a specific device.
|
||||
final String? specifiedDeviceId = topLevelResults[FlutterGlobalOptions.kDeviceIdOption] as String?;
|
||||
|
@ -1,69 +0,0 @@
|
||||
From 6df275df3b8694daf16302b407520e3b1dee6724 Mon Sep 17 00:00:00 2001
|
||||
From: Philip Hayes <philiphayes9@gmail.com>
|
||||
Date: Thu, 12 Sep 2024 13:23:00 -0700
|
||||
Subject: [PATCH] fix: cleanup xcode_backend.sh to fix iOS build w/
|
||||
`NixOS/nixpkgs` flutter
|
||||
|
||||
This patch cleans up `xcode_backend.sh`. It now effectively just runs
|
||||
`exec $FLUTTER_ROOT/bin/dart ./xcode_backend.dart`.
|
||||
|
||||
The previous `xcode_backend.sh` tries to discover `$FLUTTER_ROOT` from
|
||||
argv[0], even though its presence is already guaranteed (the wrapped
|
||||
`xcode_backend.dart` also relies on this env).
|
||||
|
||||
When using nixpkgs flutter, the flutter SDK directory is composed of several
|
||||
layers, joined together using symlinks (called a `symlinkJoin`). Without this
|
||||
patch, the auto-discover traverses the symlinks into the wrong layer, and so it
|
||||
uses an "unwrapped" `dart` command instead of a "wrapped" dart that sets some
|
||||
important envs/flags (like `$FLUTTER_ROOT`).
|
||||
|
||||
Using the "unwrapped" dart then manifests in this error when compiling, since
|
||||
it doesn't see the ios build-support artifacts:
|
||||
|
||||
```
|
||||
$ flutter run -d iphone
|
||||
Running Xcode build...
|
||||
Xcode build done. 6.4s
|
||||
Failed to build iOS app
|
||||
Error (Xcode): Target debug_unpack_ios failed: Error: Flutter failed to create a directory at "/<nix-store>/XXXX-flutter-3.24.1-unwrapped/bin/cache/artifacts".
|
||||
```
|
||||
---
|
||||
packages/flutter_tools/bin/xcode_backend.sh | 25 ++++-----------------
|
||||
1 file changed, 4 insertions(+), 21 deletions(-)
|
||||
|
||||
diff --git a/packages/flutter_tools/bin/xcode_backend.sh b/packages/flutter_tools/bin/xcode_backend.sh
|
||||
index 2889d7c8e4..48b9d06c6e 100755
|
||||
--- a/packages/flutter_tools/bin/xcode_backend.sh
|
||||
+++ b/packages/flutter_tools/bin/xcode_backend.sh
|
||||
@@ -6,24 +6,7 @@
|
||||
# exit on error, or usage of unset var
|
||||
set -euo pipefail
|
||||
|
||||
-# Needed because if it is set, cd may print the path it changed to.
|
||||
-unset CDPATH
|
||||
-
|
||||
-function follow_links() (
|
||||
- cd -P "$(dirname -- "$1")"
|
||||
- file="$PWD/$(basename -- "$1")"
|
||||
- while [[ -h "$file" ]]; do
|
||||
- cd -P "$(dirname -- "$file")"
|
||||
- file="$(readlink -- "$file")"
|
||||
- cd -P "$(dirname -- "$file")"
|
||||
- file="$PWD/$(basename -- "$file")"
|
||||
- done
|
||||
- echo "$file"
|
||||
-)
|
||||
-
|
||||
-PROG_NAME="$(follow_links "${BASH_SOURCE[0]}")"
|
||||
-BIN_DIR="$(cd "${PROG_NAME%/*}" ; pwd -P)"
|
||||
-FLUTTER_ROOT="$BIN_DIR/../../.."
|
||||
-DART="$FLUTTER_ROOT/bin/dart"
|
||||
-
|
||||
-"$DART" "$BIN_DIR/xcode_backend.dart" "$@"
|
||||
+# Run `dart ./xcode_backend.dart` with the dart from $FLUTTER_ROOT.
|
||||
+dart="${FLUTTER_ROOT}/bin/dart"
|
||||
+xcode_backend_dart="${BASH_SOURCE[0]%.sh}.dart"
|
||||
+exec "${dart}" "${xcode_backend_dart}" "$@"
|
||||
--
|
||||
2.46.0
|
||||
|
@ -1,44 +0,0 @@
|
||||
This patch introduces an intermediate Gradle build step to alter the behavior
|
||||
of flutter_tools' Gradle project, specifically moving the creation of `build`
|
||||
and `.gradle` directories from within the Nix Store to somewhere in `$HOME/.cache/flutter/nix-flutter-tools-gradle/$engineShortRev`.
|
||||
|
||||
Without this patch, flutter_tools' Gradle project tries to generate `build` and `.gradle`
|
||||
directories within the Nix Store. Resulting in read-only errors when trying to build a
|
||||
Flutter Android app at runtime.
|
||||
|
||||
This patch takes advantage of the fact settings.gradle takes priority over settings.gradle.kts to build the intermediate Gradle project
|
||||
when a Flutter app runs `includeBuild("${settings.ext.flutterSdkPath}/packages/flutter_tools/gradle")`
|
||||
|
||||
`rootProject.buildFileName = "/dev/null"` so that the intermediate project doesn't use `build.gradle.kts` that's in the same directory.
|
||||
|
||||
The intermediate project makes a `settings.gradle` file in `$HOME/.cache/flutter/nix-flutter-tools-gradle/<short engine rev>/` and `includeBuild`s it.
|
||||
This Gradle project will build the actual `packages/flutter_tools/gradle` project by setting
|
||||
`rootProject.projectDir = new File("$settingsDir")` and `apply from: new File("$settingsDir/settings.gradle.kts")`.
|
||||
|
||||
Now the `.gradle` will be built in `$HOME/.cache/flutter/nix-flutter-tools-gradle/<short engine rev>/`, but `build` doesn't.
|
||||
To move `build` to `$HOME/.cache/flutter/nix-flutter-tools-gradle/<short engine rev>/` as well, we need to set `buildDirectory`.
|
||||
diff --git a/packages/flutter_tools/gradle/settings.gradle b/packages/flutter_tools/gradle/settings.gradle
|
||||
new file mode 100644
|
||||
index 0000000000..b2485c94b4
|
||||
--- /dev/null
|
||||
+++ b/packages/flutter_tools/gradle/settings.gradle
|
||||
@@ -0,0 +1,19 @@
|
||||
+rootProject.buildFileName = "/dev/null"
|
||||
+
|
||||
+def engineShortRev = (new File("$settingsDir/../../../bin/internal/engine.version")).text.take(10)
|
||||
+def dir = new File("$System.env.HOME/.cache/flutter/nix-flutter-tools-gradle/$engineShortRev")
|
||||
+dir.mkdirs()
|
||||
+def file = new File(dir, "settings.gradle")
|
||||
+
|
||||
+file.text = """
|
||||
+rootProject.projectDir = new File("$settingsDir")
|
||||
+apply from: new File("$settingsDir/settings.gradle.kts")
|
||||
+
|
||||
+gradle.allprojects { project ->
|
||||
+ project.beforeEvaluate {
|
||||
+ project.layout.buildDirectory = new File("$dir/build")
|
||||
+ }
|
||||
+}
|
||||
+"""
|
||||
+
|
||||
+includeBuild(dir)
|
@ -15,11 +15,9 @@ The intermediate project makes a `settings.gradle` file in `$HOME/.cache/flutter
|
||||
This Gradle project will build the actual `packages/flutter_tools/gradle` project by setting
|
||||
`rootProject.projectDir = new File("$settingsDir")` and `apply from: new File("$settingsDir/settings.gradle.kts")`.
|
||||
|
||||
Now the `.gradle` will be built in `$HOME/.cache/flutter/nix-flutter-tools-gradle/<short engine rev>/`, but `build` doesn't.
|
||||
To move `build` to `$HOME/.cache/flutter/nix-flutter-tools-gradle/<short engine rev>/` as well, we need to set `buildDirectory`.
|
||||
diff --git a/packages/flutter_tools/gradle/settings.gradle b/packages/flutter_tools/gradle/settings.gradle
|
||||
new file mode 100644
|
||||
index 0000000000..b2485c94b4
|
||||
To move `build` to `$HOME/.cache/flutter/nix-flutter-tools-gradle/<short engine rev>/`, we need to set `buildDirectory`.
|
||||
To move `.gradle` as well, the `--project-cache-dir` argument must be passed to the Gradle wrapper.
|
||||
Changing the `GradleUtils.getExecutable` function signature is a delibarate choice, to ensure that no new unpatched usages slip in.
|
||||
--- /dev/null
|
||||
+++ b/packages/flutter_tools/gradle/settings.gradle
|
||||
@@ -0,0 +1,19 @@
|
||||
@ -44,12 +42,179 @@ index 0000000000..b2485c94b4
|
||||
+includeBuild(dir)
|
||||
--- a/packages/flutter_tools/gradle/build.gradle.kts
|
||||
+++ b/packages/flutter_tools/gradle/build.gradle.kts
|
||||
@@ -4,6 +4,8 @@
|
||||
@@ -4,6 +4,11 @@
|
||||
|
||||
import org.jetbrains.kotlin.gradle.dsl.JvmTarget
|
||||
|
||||
+// While flutter_tools runs Gradle with a --project-cache-dir, this startParameter
|
||||
+// is not passed correctly to the Kotlin Gradle plugin for some reason, and so
|
||||
+// must be set here as well.
|
||||
+gradle.startParameter.projectCacheDir = layout.buildDirectory.dir("cache").get().asFile
|
||||
+
|
||||
plugins {
|
||||
`java-gradle-plugin`
|
||||
groovy
|
||||
--- a/packages/flutter_tools/lib/src/android/gradle.dart
|
||||
+++ b/packages/flutter_tools/lib/src/android/gradle.dart
|
||||
@@ -456,9 +456,9 @@ class AndroidGradleBuilder implements AndroidBuilder {
|
||||
// from the local.properties file.
|
||||
updateLocalProperties(project: project, buildInfo: androidBuildInfo.buildInfo);
|
||||
|
||||
- final List<String> options = <String>[];
|
||||
-
|
||||
- final String gradleExecutablePath = _gradleUtils.getExecutable(project);
|
||||
+ final [String gradleExecutablePath, ...List<String> options] = _gradleUtils.getExecutable(
|
||||
+ project,
|
||||
+ );
|
||||
|
||||
// All automatically created files should exist.
|
||||
if (configOnly) {
|
||||
@@ -781,7 +781,7 @@ class AndroidGradleBuilder implements AndroidBuilder {
|
||||
'aar_init_script.gradle',
|
||||
);
|
||||
final List<String> command = <String>[
|
||||
- _gradleUtils.getExecutable(project),
|
||||
+ ..._gradleUtils.getExecutable(project),
|
||||
'-I=$initScript',
|
||||
'-Pflutter-root=$flutterRoot',
|
||||
'-Poutput-dir=${outputDirectory.path}',
|
||||
@@ -896,6 +896,10 @@ class AndroidGradleBuilder implements AndroidBuilder {
|
||||
final List<String> results = <String>[];
|
||||
|
||||
try {
|
||||
+ final [String gradleExecutablePath, ...List<String> options] = _gradleUtils.getExecutable(
|
||||
+ project,
|
||||
+ );
|
||||
+
|
||||
exitCode = await _runGradleTask(
|
||||
_kBuildVariantTaskName,
|
||||
preRunTask: () {
|
||||
@@ -911,10 +915,10 @@ class AndroidGradleBuilder implements AndroidBuilder {
|
||||
),
|
||||
);
|
||||
},
|
||||
- options: const <String>['-q'],
|
||||
+ options: <String>[...options, '-q'],
|
||||
project: project,
|
||||
localGradleErrors: gradleErrors,
|
||||
- gradleExecutablePath: _gradleUtils.getExecutable(project),
|
||||
+ gradleExecutablePath: gradleExecutablePath,
|
||||
outputParser: (String line) {
|
||||
if (_kBuildVariantRegex.firstMatch(line) case final RegExpMatch match) {
|
||||
results.add(match.namedGroup(_kBuildVariantRegexGroupName)!);
|
||||
@@ -948,6 +952,10 @@ class AndroidGradleBuilder implements AndroidBuilder {
|
||||
late Stopwatch sw;
|
||||
int exitCode = 1;
|
||||
try {
|
||||
+ final [String gradleExecutablePath, ...List<String> options] = _gradleUtils.getExecutable(
|
||||
+ project,
|
||||
+ );
|
||||
+
|
||||
exitCode = await _runGradleTask(
|
||||
taskName,
|
||||
preRunTask: () {
|
||||
@@ -963,10 +971,10 @@ class AndroidGradleBuilder implements AndroidBuilder {
|
||||
),
|
||||
);
|
||||
},
|
||||
- options: <String>['-q', '-PoutputPath=$outputPath'],
|
||||
+ options: <String>[...options, '-q', '-PoutputPath=$outputPath'],
|
||||
project: project,
|
||||
localGradleErrors: gradleErrors,
|
||||
- gradleExecutablePath: _gradleUtils.getExecutable(project),
|
||||
+ gradleExecutablePath: gradleExecutablePath,
|
||||
);
|
||||
} on Error catch (error) {
|
||||
_logger.printError(error.toString());
|
||||
--- a/packages/flutter_tools/lib/src/android/gradle_errors.dart
|
||||
+++ b/packages/flutter_tools/lib/src/android/gradle_errors.dart
|
||||
@@ -240,7 +240,12 @@ final GradleHandledError flavorUndefinedHandler = GradleHandledError(
|
||||
required bool usesAndroidX,
|
||||
}) async {
|
||||
final RunResult tasksRunResult = await globals.processUtils.run(
|
||||
- <String>[globals.gradleUtils!.getExecutable(project), 'app:tasks', '--all', '--console=auto'],
|
||||
+ <String>[
|
||||
+ ...globals.gradleUtils!.getExecutable(project),
|
||||
+ 'app:tasks',
|
||||
+ '--all',
|
||||
+ '--console=auto',
|
||||
+ ],
|
||||
throwOnError: true,
|
||||
workingDirectory: project.android.hostAppGradleRoot.path,
|
||||
environment: globals.java?.environment,
|
||||
--- a/packages/flutter_tools/lib/src/android/gradle_utils.dart
|
||||
+++ b/packages/flutter_tools/lib/src/android/gradle_utils.dart
|
||||
@@ -3,6 +3,7 @@
|
||||
// found in the LICENSE file.
|
||||
|
||||
import 'package:meta/meta.dart';
|
||||
+import 'package:path/path.dart';
|
||||
import 'package:process/process.dart';
|
||||
import 'package:unified_analytics/unified_analytics.dart';
|
||||
|
||||
@@ -154,9 +155,29 @@ class GradleUtils {
|
||||
final Logger _logger;
|
||||
final OperatingSystemUtils _operatingSystemUtils;
|
||||
|
||||
+ List<String> get _requiredArguments {
|
||||
+ final String cacheDir = join(
|
||||
+ switch (globals.platform.environment['XDG_CACHE_HOME']) {
|
||||
+ final String cacheHome => cacheHome,
|
||||
+ _ => join(
|
||||
+ globals.fsUtils.homeDirPath ?? throwToolExit('No cache directory has been specified.'),
|
||||
+ '.cache',
|
||||
+ ),
|
||||
+ },
|
||||
+ 'flutter',
|
||||
+ 'nix-flutter-tools-gradle',
|
||||
+ globals.flutterVersion.engineRevision.substring(0, 10),
|
||||
+ );
|
||||
+
|
||||
+ return <String>[
|
||||
+ '--project-cache-dir=${join(cacheDir, 'cache')}',
|
||||
+ '-Pkotlin.project.persistent.dir=${join(cacheDir, 'kotlin')}',
|
||||
+ ];
|
||||
+ }
|
||||
+
|
||||
/// Gets the Gradle executable path and prepares the Gradle project.
|
||||
/// This is the `gradlew` or `gradlew.bat` script in the `android/` directory.
|
||||
- String getExecutable(FlutterProject project) {
|
||||
+ List<String> getExecutable(FlutterProject project) {
|
||||
final Directory androidDir = project.android.hostAppGradleRoot;
|
||||
injectGradleWrapperIfNeeded(androidDir);
|
||||
|
||||
@@ -167,7 +188,7 @@ class GradleUtils {
|
||||
// If the Gradle executable doesn't have execute permission,
|
||||
// then attempt to set it.
|
||||
_operatingSystemUtils.makeExecutable(gradle);
|
||||
- return gradle.absolute.path;
|
||||
+ return <String>[gradle.absolute.path, ..._requiredArguments];
|
||||
}
|
||||
throwToolExit(
|
||||
'Unable to locate gradlew script. Please check that ${gradle.path} '
|
||||
--- a/packages/flutter_tools/test/general.shard/android/android_gradle_builder_test.dart
|
||||
+++ b/packages/flutter_tools/test/general.shard/android/android_gradle_builder_test.dart
|
||||
@@ -2740,8 +2740,8 @@ Gradle Crashed
|
||||
|
||||
class FakeGradleUtils extends Fake implements GradleUtils {
|
||||
@override
|
||||
- String getExecutable(FlutterProject project) {
|
||||
- return 'gradlew';
|
||||
+ List<String> getExecutable(FlutterProject project) {
|
||||
+ return const <String>['gradlew'];
|
||||
}
|
||||
}
|
||||
|
||||
--- a/packages/flutter_tools/test/general.shard/android/gradle_errors_test.dart
|
||||
+++ b/packages/flutter_tools/test/general.shard/android/gradle_errors_test.dart
|
||||
@@ -1580,8 +1580,8 @@ Platform fakePlatform(String name) {
|
||||
|
||||
class FakeGradleUtils extends Fake implements GradleUtils {
|
||||
@override
|
||||
- String getExecutable(FlutterProject project) {
|
||||
- return 'gradlew';
|
||||
+ List<String> getExecutable(FlutterProject project) {
|
||||
+ return const <String>['gradlew'];
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -15,11 +15,9 @@ The intermediate project makes a `settings.gradle` file in `$HOME/.cache/flutter
|
||||
This Gradle project will build the actual `packages/flutter_tools/gradle` project by setting
|
||||
`rootProject.projectDir = new File("$settingsDir")` and `apply from: new File("$settingsDir/settings.gradle.kts")`.
|
||||
|
||||
Now the `.gradle` will be built in `$HOME/.cache/flutter/nix-flutter-tools-gradle/<short engine rev>/`, but `build` doesn't.
|
||||
To move `build` to `$HOME/.cache/flutter/nix-flutter-tools-gradle/<short engine rev>/` as well, we need to set `buildDirectory`.
|
||||
diff --git a/packages/flutter_tools/gradle/settings.gradle b/packages/flutter_tools/gradle/settings.gradle
|
||||
new file mode 100644
|
||||
index 0000000000..b2485c94b4
|
||||
To move `build` to `$HOME/.cache/flutter/nix-flutter-tools-gradle/<short engine rev>/`, we need to set `buildDirectory`.
|
||||
To move `.gradle` as well, the `--project-cache-dir` argument must be passed to the Gradle wrapper.
|
||||
Changing the `GradleUtils.getExecutable` function signature is a delibarate choice, to ensure that no new unpatched usages slip in.
|
||||
--- /dev/null
|
||||
+++ b/packages/flutter_tools/gradle/settings.gradle
|
||||
@@ -0,0 +1,19 @@
|
||||
@ -44,12 +42,179 @@ index 0000000000..b2485c94b4
|
||||
+includeBuild(dir)
|
||||
--- a/packages/flutter_tools/gradle/build.gradle.kts
|
||||
+++ b/packages/flutter_tools/gradle/build.gradle.kts
|
||||
@@ -4,6 +4,8 @@
|
||||
@@ -4,6 +4,11 @@
|
||||
|
||||
import org.jetbrains.kotlin.gradle.dsl.JvmTarget
|
||||
|
||||
+// While flutter_tools runs Gradle with a --project-cache-dir, this startParameter
|
||||
+// is not passed correctly to the Kotlin Gradle plugin for some reason, and so
|
||||
+// must be set here as well.
|
||||
+gradle.startParameter.projectCacheDir = layout.buildDirectory.dir("cache").get().asFile
|
||||
+
|
||||
plugins {
|
||||
`java-gradle-plugin`
|
||||
groovy
|
||||
--- a/packages/flutter_tools/lib/src/android/gradle.dart
|
||||
+++ b/packages/flutter_tools/lib/src/android/gradle.dart
|
||||
@@ -456,9 +456,9 @@ class AndroidGradleBuilder implements AndroidBuilder {
|
||||
// from the local.properties file.
|
||||
updateLocalProperties(project: project, buildInfo: androidBuildInfo.buildInfo);
|
||||
|
||||
- final List<String> options = <String>[];
|
||||
-
|
||||
- final String gradleExecutablePath = _gradleUtils.getExecutable(project);
|
||||
+ final [String gradleExecutablePath, ...List<String> options] = _gradleUtils.getExecutable(
|
||||
+ project,
|
||||
+ );
|
||||
|
||||
// All automatically created files should exist.
|
||||
if (configOnly) {
|
||||
@@ -781,7 +781,7 @@ class AndroidGradleBuilder implements AndroidBuilder {
|
||||
'aar_init_script.gradle',
|
||||
);
|
||||
final List<String> command = <String>[
|
||||
- _gradleUtils.getExecutable(project),
|
||||
+ ..._gradleUtils.getExecutable(project),
|
||||
'-I=$initScript',
|
||||
'-Pflutter-root=$flutterRoot',
|
||||
'-Poutput-dir=${outputDirectory.path}',
|
||||
@@ -896,6 +896,10 @@ class AndroidGradleBuilder implements AndroidBuilder {
|
||||
final List<String> results = <String>[];
|
||||
|
||||
try {
|
||||
+ final [String gradleExecutablePath, ...List<String> options] = _gradleUtils.getExecutable(
|
||||
+ project,
|
||||
+ );
|
||||
+
|
||||
exitCode = await _runGradleTask(
|
||||
_kBuildVariantTaskName,
|
||||
preRunTask: () {
|
||||
@@ -911,10 +915,10 @@ class AndroidGradleBuilder implements AndroidBuilder {
|
||||
),
|
||||
);
|
||||
},
|
||||
- options: const <String>['-q'],
|
||||
+ options: <String>[...options, '-q'],
|
||||
project: project,
|
||||
localGradleErrors: gradleErrors,
|
||||
- gradleExecutablePath: _gradleUtils.getExecutable(project),
|
||||
+ gradleExecutablePath: gradleExecutablePath,
|
||||
outputParser: (String line) {
|
||||
if (_kBuildVariantRegex.firstMatch(line) case final RegExpMatch match) {
|
||||
results.add(match.namedGroup(_kBuildVariantRegexGroupName)!);
|
||||
@@ -948,6 +952,10 @@ class AndroidGradleBuilder implements AndroidBuilder {
|
||||
late Stopwatch sw;
|
||||
int exitCode = 1;
|
||||
try {
|
||||
+ final [String gradleExecutablePath, ...List<String> options] = _gradleUtils.getExecutable(
|
||||
+ project,
|
||||
+ );
|
||||
+
|
||||
exitCode = await _runGradleTask(
|
||||
taskName,
|
||||
preRunTask: () {
|
||||
@@ -963,10 +971,10 @@ class AndroidGradleBuilder implements AndroidBuilder {
|
||||
),
|
||||
);
|
||||
},
|
||||
- options: <String>['-q', '-PoutputPath=$outputPath'],
|
||||
+ options: <String>[...options, '-q', '-PoutputPath=$outputPath'],
|
||||
project: project,
|
||||
localGradleErrors: gradleErrors,
|
||||
- gradleExecutablePath: _gradleUtils.getExecutable(project),
|
||||
+ gradleExecutablePath: gradleExecutablePath,
|
||||
);
|
||||
} on Error catch (error) {
|
||||
_logger.printError(error.toString());
|
||||
--- a/packages/flutter_tools/lib/src/android/gradle_errors.dart
|
||||
+++ b/packages/flutter_tools/lib/src/android/gradle_errors.dart
|
||||
@@ -240,7 +240,12 @@ final GradleHandledError flavorUndefinedHandler = GradleHandledError(
|
||||
required bool usesAndroidX,
|
||||
}) async {
|
||||
final RunResult tasksRunResult = await globals.processUtils.run(
|
||||
- <String>[globals.gradleUtils!.getExecutable(project), 'app:tasks', '--all', '--console=auto'],
|
||||
+ <String>[
|
||||
+ ...globals.gradleUtils!.getExecutable(project),
|
||||
+ 'app:tasks',
|
||||
+ '--all',
|
||||
+ '--console=auto',
|
||||
+ ],
|
||||
throwOnError: true,
|
||||
workingDirectory: project.android.hostAppGradleRoot.path,
|
||||
environment: globals.java?.environment,
|
||||
--- a/packages/flutter_tools/lib/src/android/gradle_utils.dart
|
||||
+++ b/packages/flutter_tools/lib/src/android/gradle_utils.dart
|
||||
@@ -3,6 +3,7 @@
|
||||
// found in the LICENSE file.
|
||||
|
||||
import 'package:meta/meta.dart';
|
||||
+import 'package:path/path.dart';
|
||||
import 'package:process/process.dart';
|
||||
import 'package:unified_analytics/unified_analytics.dart';
|
||||
|
||||
@@ -154,9 +155,29 @@ class GradleUtils {
|
||||
final Logger _logger;
|
||||
final OperatingSystemUtils _operatingSystemUtils;
|
||||
|
||||
+ List<String> get _requiredArguments {
|
||||
+ final String cacheDir = join(
|
||||
+ switch (globals.platform.environment['XDG_CACHE_HOME']) {
|
||||
+ final String cacheHome => cacheHome,
|
||||
+ _ => join(
|
||||
+ globals.fsUtils.homeDirPath ?? throwToolExit('No cache directory has been specified.'),
|
||||
+ '.cache',
|
||||
+ ),
|
||||
+ },
|
||||
+ 'flutter',
|
||||
+ 'nix-flutter-tools-gradle',
|
||||
+ globals.flutterVersion.engineRevision.substring(0, 10),
|
||||
+ );
|
||||
+
|
||||
+ return <String>[
|
||||
+ '--project-cache-dir=${join(cacheDir, 'cache')}',
|
||||
+ '-Pkotlin.project.persistent.dir=${join(cacheDir, 'kotlin')}',
|
||||
+ ];
|
||||
+ }
|
||||
+
|
||||
/// Gets the Gradle executable path and prepares the Gradle project.
|
||||
/// This is the `gradlew` or `gradlew.bat` script in the `android/` directory.
|
||||
- String getExecutable(FlutterProject project) {
|
||||
+ List<String> getExecutable(FlutterProject project) {
|
||||
final Directory androidDir = project.android.hostAppGradleRoot;
|
||||
injectGradleWrapperIfNeeded(androidDir);
|
||||
|
||||
@@ -167,7 +188,7 @@ class GradleUtils {
|
||||
// If the Gradle executable doesn't have execute permission,
|
||||
// then attempt to set it.
|
||||
_operatingSystemUtils.makeExecutable(gradle);
|
||||
- return gradle.absolute.path;
|
||||
+ return <String>[gradle.absolute.path, ..._requiredArguments];
|
||||
}
|
||||
throwToolExit(
|
||||
'Unable to locate gradlew script. Please check that ${gradle.path} '
|
||||
--- a/packages/flutter_tools/test/general.shard/android/android_gradle_builder_test.dart
|
||||
+++ b/packages/flutter_tools/test/general.shard/android/android_gradle_builder_test.dart
|
||||
@@ -2740,8 +2740,8 @@ Gradle Crashed
|
||||
|
||||
class FakeGradleUtils extends Fake implements GradleUtils {
|
||||
@override
|
||||
- String getExecutable(FlutterProject project) {
|
||||
- return 'gradlew';
|
||||
+ List<String> getExecutable(FlutterProject project) {
|
||||
+ return const <String>['gradlew'];
|
||||
}
|
||||
}
|
||||
|
||||
--- a/packages/flutter_tools/test/general.shard/android/gradle_errors_test.dart
|
||||
+++ b/packages/flutter_tools/test/general.shard/android/gradle_errors_test.dart
|
||||
@@ -1580,8 +1580,8 @@ Platform fakePlatform(String name) {
|
||||
|
||||
class FakeGradleUtils extends Fake implements GradleUtils {
|
||||
@override
|
||||
- String getExecutable(FlutterProject project) {
|
||||
- return 'gradlew';
|
||||
+ List<String> getExecutable(FlutterProject project) {
|
||||
+ return const <String>['gradlew'];
|
||||
}
|
||||
}
|
||||
|
||||
|
44
pkgs/development/ocaml-modules/patricia-tree/default.nix
Normal file
44
pkgs/development/ocaml-modules/patricia-tree/default.nix
Normal file
@ -0,0 +1,44 @@
|
||||
{
|
||||
lib,
|
||||
buildDunePackage,
|
||||
fetchFromGitHub,
|
||||
findlib,
|
||||
mdx,
|
||||
qcheck-core,
|
||||
ppx_inline_test,
|
||||
}:
|
||||
|
||||
buildDunePackage rec {
|
||||
pname = "patricia-tree";
|
||||
version = "0.11.0";
|
||||
|
||||
minimalOCamlVersion = "4.14";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "codex-semantics-library";
|
||||
repo = "patricia-tree";
|
||||
tag = "v${version}";
|
||||
hash = "sha256-lpmU0KhsyIHxPBiw38ssA7XFEMsRvOT03MByoJG88Xs=";
|
||||
};
|
||||
|
||||
nativeCheckInputs = [
|
||||
mdx.bin
|
||||
];
|
||||
|
||||
checkInputs = [
|
||||
mdx
|
||||
ppx_inline_test
|
||||
qcheck-core
|
||||
];
|
||||
|
||||
doCheck = true;
|
||||
|
||||
meta = {
|
||||
description = "Patricia Tree data structure in OCaml";
|
||||
homepage = "https://codex.top/api/patricia-tree/";
|
||||
downloadPage = "https://github.com/codex-semantics-library/patricia-tree";
|
||||
changelog = "https://github.com/codex-semantics-library/patricia-tree/releases/tag/v${version}";
|
||||
license = lib.licenses.lgpl21Only;
|
||||
maintainers = [ lib.maintainers.ethancedwards8 ];
|
||||
};
|
||||
}
|
30
pkgs/development/ocaml-modules/processor/default.nix
Normal file
30
pkgs/development/ocaml-modules/processor/default.nix
Normal file
@ -0,0 +1,30 @@
|
||||
{
|
||||
lib,
|
||||
buildDunePackage,
|
||||
fetchFromGitHub,
|
||||
}:
|
||||
|
||||
buildDunePackage rec {
|
||||
pname = "processor";
|
||||
version = "0.1";
|
||||
|
||||
minimalOCamlVersion = "4.08";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "haesbaert";
|
||||
repo = "ocaml-processor";
|
||||
tag = "v${version}";
|
||||
hash = "sha256-eGSNYjVbUIUMelajqZYOd3gvmRKQ9UP3TfMflLR9i7k=";
|
||||
};
|
||||
|
||||
doCheck = true;
|
||||
|
||||
meta = {
|
||||
description = "CPU topology and affinity for ocaml-multicore";
|
||||
homepage = "https://haesbaert.github.io/ocaml-processor/processor/index.html";
|
||||
downloadPage = "https://github.com/haesbaert/ocaml-processor";
|
||||
changelog = "https://github.com/haesbaert/ocaml-processor/releases/tag/v${version}";
|
||||
license = lib.licenses.isc;
|
||||
maintainers = [ lib.maintainers.ethancedwards8 ];
|
||||
};
|
||||
}
|
@ -34,6 +34,7 @@
|
||||
blockbuster,
|
||||
freezegun,
|
||||
gunicorn,
|
||||
isa-l,
|
||||
isal,
|
||||
proxy-py,
|
||||
pytest-codspeed,
|
||||
@ -59,6 +60,10 @@ buildPythonPackage rec {
|
||||
hash = "sha256-/lzbGnF3+ufs+GPtm+avjQ+lGVCsiE2E64NkRHS3wCM=";
|
||||
};
|
||||
|
||||
patches = lib.optionals (!lib.meta.availableOn stdenv.hostPlatform isa-l) [
|
||||
./remove-isal.patch
|
||||
];
|
||||
|
||||
postPatch = ''
|
||||
rm -r vendor
|
||||
patchShebangs tools
|
||||
@ -105,7 +110,8 @@ buildPythonPackage rec {
|
||||
blockbuster
|
||||
freezegun
|
||||
gunicorn
|
||||
isal
|
||||
# broken on aarch64-darwin
|
||||
(if lib.meta.availableOn stdenv.hostPlatform isa-l then isal else null)
|
||||
proxy-py
|
||||
pytest-codspeed
|
||||
pytest-cov-stub
|
||||
|
21
pkgs/development/python-modules/aiohttp/remove-isal.patch
Normal file
21
pkgs/development/python-modules/aiohttp/remove-isal.patch
Normal file
@ -0,0 +1,21 @@
|
||||
diff --git a/tests/conftest.py b/tests/conftest.py
|
||||
index 62fb04f2e..bb5b279dd 100644
|
||||
--- a/tests/conftest.py
|
||||
+++ b/tests/conftest.py
|
||||
@@ -12,7 +12,6 @@ from typing import Any, AsyncIterator, Callable, Generator, Iterator
|
||||
from unittest import mock
|
||||
from uuid import uuid4
|
||||
|
||||
-import isal.isal_zlib
|
||||
import pytest
|
||||
import zlib_ng.zlib_ng
|
||||
from blockbuster import blockbuster_ctx
|
||||
@@ -333,7 +332,7 @@ def unused_port_socket() -> Generator[socket.socket, None, None]:
|
||||
s.close()
|
||||
|
||||
|
||||
-@pytest.fixture(params=[zlib, zlib_ng.zlib_ng, isal.isal_zlib])
|
||||
+@pytest.fixture(params=[zlib, zlib_ng.zlib_ng])
|
||||
def parametrize_zlib_backend(
|
||||
request: pytest.FixtureRequest,
|
||||
) -> Generator[None, None, None]:
|
@ -12,12 +12,12 @@
|
||||
|
||||
buildPythonPackage rec {
|
||||
pname = "backtesting";
|
||||
version = "0.6.3";
|
||||
version = "0.6.4";
|
||||
pyproject = true;
|
||||
|
||||
src = fetchPypi {
|
||||
inherit pname version;
|
||||
hash = "sha256-xryzvjKT+FRbF4pnniHvkRA98jrZVoCyYOmjYU93Ta4=";
|
||||
hash = "sha256-8Xasb7VG39XXQ/A47lgkYk5Vo4pJPE3Vghcxt0yGeq4=";
|
||||
};
|
||||
|
||||
build-system = [
|
||||
|
@ -11,7 +11,7 @@
|
||||
|
||||
buildPythonPackage rec {
|
||||
pname = "bagit";
|
||||
version = "1.9b2";
|
||||
version = "1.9.0";
|
||||
pyproject = true;
|
||||
build-system = [
|
||||
setuptools
|
||||
@ -21,8 +21,8 @@ buildPythonPackage rec {
|
||||
src = fetchFromGitHub {
|
||||
owner = "LibraryOfCongress";
|
||||
repo = "bagit-python";
|
||||
rev = "v${version}";
|
||||
hash = "sha256-IkRMsCrtX8nS0nrxs5B9csMq1YrI75QLDuT8eTPILkw=";
|
||||
tag = "v${version}";
|
||||
hash = "sha256-gHilCG07BXL28vBOaqvKhEQw+9l/AkzZRQxucBTEDos=";
|
||||
};
|
||||
|
||||
nativeBuildInputs = [
|
||||
|
@ -9,7 +9,7 @@
|
||||
|
||||
buildPythonPackage rec {
|
||||
pname = "genie-partner-sdk";
|
||||
version = "1.0.4";
|
||||
version = "1.0.5";
|
||||
pyproject = true;
|
||||
|
||||
disabled = pythonOlder "3.11";
|
||||
@ -17,7 +17,7 @@ buildPythonPackage rec {
|
||||
src = fetchPypi {
|
||||
inherit version;
|
||||
pname = "genie_partner_sdk";
|
||||
hash = "sha256-DwbIe1pq1YKAA3hRlhYCVJ9xtvfxvoqLLjDSQicUKuw=";
|
||||
hash = "sha256-JxsUaC7WgspUU9ngIc4GOjFr/lHjD2+5YlcLXtJH6LE=";
|
||||
};
|
||||
|
||||
nativeBuildInputs = [ hatchling ];
|
||||
|
66
pkgs/development/python-modules/pycrdt/Cargo.lock
generated
66
pkgs/development/python-modules/pycrdt/Cargo.lock
generated
@ -44,15 +44,15 @@ checksum = "1b8e56985ec62d17e9c1001dc89c88ecd7dc08e47eba5ec7c29c7b5eeecde967"
|
||||
|
||||
[[package]]
|
||||
name = "bumpalo"
|
||||
version = "3.17.0"
|
||||
version = "3.18.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1628fb46dfa0b37568d12e5edd512553eccf6a22a78e8bde00bb4aed84d5bdbf"
|
||||
checksum = "793db76d6187cd04dff33004d8e6c9cc4e05cd330500379d2394209271b4aeee"
|
||||
|
||||
[[package]]
|
||||
name = "cfg-if"
|
||||
version = "1.0.0"
|
||||
version = "1.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
||||
checksum = "9555578bc9e57714c812a1f84e4fc5b4d21fcb063490c624de019f7464c91268"
|
||||
|
||||
[[package]]
|
||||
name = "concurrent-queue"
|
||||
@ -162,15 +162,15 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.172"
|
||||
version = "0.2.173"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d750af042f7ef4f724306de029d18836c26c1765a54a6a3f094cbd23a7267ffa"
|
||||
checksum = "d8cfeafaffdbc32176b64fb251369d52ea9f0a8fbc6f8759edffef7b525d64bb"
|
||||
|
||||
[[package]]
|
||||
name = "lock_api"
|
||||
version = "0.4.12"
|
||||
version = "0.4.13"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17"
|
||||
checksum = "96936507f153605bddfcda068dd804796c84324ed2510809e5b2a624c81da765"
|
||||
dependencies = [
|
||||
"autocfg",
|
||||
"scopeguard",
|
||||
@ -184,9 +184,9 @@ checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94"
|
||||
|
||||
[[package]]
|
||||
name = "memchr"
|
||||
version = "2.7.4"
|
||||
version = "2.7.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3"
|
||||
checksum = "32a282da65faaf38286cf3be983213fcf1d2e2a58700e808f83f4ea9a4804bc0"
|
||||
|
||||
[[package]]
|
||||
name = "memoffset"
|
||||
@ -211,9 +211,9 @@ checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba"
|
||||
|
||||
[[package]]
|
||||
name = "parking_lot_core"
|
||||
version = "0.9.10"
|
||||
version = "0.9.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8"
|
||||
checksum = "bc838d2a56b5b1a6c25f55575dfc605fabb63bb2365f6c2353ef9159aa69e4a5"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"libc",
|
||||
@ -230,9 +230,9 @@ checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b"
|
||||
|
||||
[[package]]
|
||||
name = "portable-atomic"
|
||||
version = "1.11.0"
|
||||
version = "1.11.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "350e9b48cbc6b0e028b0473b114454c6316e57336ee184ceab6e53f72c178b3e"
|
||||
checksum = "f84267b20a16ea918e43c6a88433c2d54fa145c92a811b5b047ccbe153674483"
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro2"
|
||||
@ -245,7 +245,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pycrdt"
|
||||
version = "0.12.20"
|
||||
version = "0.12.21"
|
||||
dependencies = [
|
||||
"pyo3",
|
||||
"yrs",
|
||||
@ -253,9 +253,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pyo3"
|
||||
version = "0.25.0"
|
||||
version = "0.25.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f239d656363bcee73afef85277f1b281e8ac6212a1d42aa90e55b90ed43c47a4"
|
||||
checksum = "8970a78afe0628a3e3430376fc5fd76b6b45c4d43360ffd6cdd40bdde72b682a"
|
||||
dependencies = [
|
||||
"indoc",
|
||||
"libc",
|
||||
@ -270,9 +270,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-build-config"
|
||||
version = "0.25.0"
|
||||
version = "0.25.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "755ea671a1c34044fa165247aaf6f419ca39caa6003aee791a0df2713d8f1b6d"
|
||||
checksum = "458eb0c55e7ece017adeba38f2248ff3ac615e53660d7c71a238d7d2a01c7598"
|
||||
dependencies = [
|
||||
"once_cell",
|
||||
"target-lexicon",
|
||||
@ -280,9 +280,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-ffi"
|
||||
version = "0.25.0"
|
||||
version = "0.25.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fc95a2e67091e44791d4ea300ff744be5293f394f1bafd9f78c080814d35956e"
|
||||
checksum = "7114fe5457c61b276ab77c5055f206295b812608083644a5c5b2640c3102565c"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"pyo3-build-config",
|
||||
@ -290,9 +290,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-macros"
|
||||
version = "0.25.0"
|
||||
version = "0.25.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a179641d1b93920829a62f15e87c0ed791b6c8db2271ba0fd7c2686090510214"
|
||||
checksum = "a8725c0a622b374d6cb051d11a0983786448f7785336139c3c94f5aa6bef7e50"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"pyo3-macros-backend",
|
||||
@ -302,9 +302,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-macros-backend"
|
||||
version = "0.25.0"
|
||||
version = "0.25.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9dff85ebcaab8c441b0e3f7ae40a6963ecea8a9f5e74f647e33fcf5ec9a1e89e"
|
||||
checksum = "4109984c22491085343c05b0dbc54ddc405c3cf7b4374fc533f5c3313a572ccc"
|
||||
dependencies = [
|
||||
"heck",
|
||||
"proc-macro2",
|
||||
@ -324,9 +324,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "redox_syscall"
|
||||
version = "0.5.12"
|
||||
version = "0.5.13"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "928fca9cf2aa042393a8325b9ead81d2f0df4cb12e1e24cef072922ccd99c5af"
|
||||
checksum = "0d04b7d0ee6b4a0207a0a7adb104d23ecb0b47d6beae7152d0fa34b692b29fd6"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
]
|
||||
@ -386,15 +386,15 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "smallvec"
|
||||
version = "1.15.0"
|
||||
version = "1.15.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8917285742e9f3e1683f0a9c4e6b57960b7314d0b08d30d1ecd426713ee2eee9"
|
||||
checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03"
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "2.0.101"
|
||||
version = "2.0.103"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8ce2b7fc941b3a24138a0a7cf8e858bfc6a992e7978a068a5c760deb0ed43caf"
|
||||
checksum = "e4307e30089d6fd6aff212f2da3a1f9e32f3223b1f010fb09b7c95f90f3ca1e8"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@ -441,9 +441,9 @@ checksum = "7264e107f553ccae879d21fbea1d6724ac785e8c3bfc762137959b5802826ef3"
|
||||
|
||||
[[package]]
|
||||
name = "wasi"
|
||||
version = "0.11.0+wasi-snapshot-preview1"
|
||||
version = "0.11.1+wasi-snapshot-preview1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
|
||||
checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b"
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen"
|
||||
|
@ -19,14 +19,14 @@
|
||||
|
||||
buildPythonPackage rec {
|
||||
pname = "pycrdt";
|
||||
version = "0.12.20";
|
||||
version = "0.12.21";
|
||||
pyproject = true;
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "y-crdt";
|
||||
repo = "pycrdt";
|
||||
tag = version;
|
||||
hash = "sha256-kSwmQf46c5UJD75cfJxR3EfxFXExHhLXB+xdExr4lCk=";
|
||||
hash = "sha256-QfgBq/jL/dFOr1YAC4y0s0tpdIC4bwxknPd1wJ/Z1lo=";
|
||||
};
|
||||
|
||||
postPatch = ''
|
||||
|
@ -12,7 +12,7 @@
|
||||
|
||||
buildPythonPackage rec {
|
||||
pname = "pypck";
|
||||
version = "0.8.6";
|
||||
version = "0.8.8";
|
||||
pyproject = true;
|
||||
|
||||
disabled = pythonOlder "3.11";
|
||||
@ -21,7 +21,7 @@ buildPythonPackage rec {
|
||||
owner = "alengwenus";
|
||||
repo = "pypck";
|
||||
tag = version;
|
||||
hash = "sha256-Us6CkRt6s/Dfw2k1R4WLWfl9ekhSwTL54BJg2hToOYY=";
|
||||
hash = "sha256-n7UFmWQyw60FTWNxs4mA9ZziZ2EwphkMI9pOLx7gFcA=";
|
||||
};
|
||||
|
||||
postPatch = ''
|
||||
@ -47,7 +47,7 @@ buildPythonPackage rec {
|
||||
meta = with lib; {
|
||||
description = "LCN-PCK library written in Python";
|
||||
homepage = "https://github.com/alengwenus/pypck";
|
||||
changelog = "https://github.com/alengwenus/pypck/releases/tag/${version}";
|
||||
changelog = "https://github.com/alengwenus/pypck/releases/tag/${src.tag}";
|
||||
license = licenses.epl20;
|
||||
maintainers = with maintainers; [ fab ];
|
||||
};
|
||||
|
@ -1,14 +1,16 @@
|
||||
{
|
||||
lib,
|
||||
python3Packages,
|
||||
buildPythonPackage,
|
||||
fetchFromGitHub,
|
||||
cython,
|
||||
djvulibre,
|
||||
setuptools,
|
||||
ghostscript_headless,
|
||||
pkg-config,
|
||||
unittestCheckHook,
|
||||
}:
|
||||
|
||||
python3Packages.buildPythonPackage rec {
|
||||
buildPythonPackage rec {
|
||||
pname = "python-djvulibre";
|
||||
version = "0.9.3";
|
||||
pyproject = true;
|
||||
@ -21,14 +23,14 @@ python3Packages.buildPythonPackage rec {
|
||||
};
|
||||
|
||||
build-system = [
|
||||
python3Packages.cython
|
||||
cython
|
||||
djvulibre
|
||||
ghostscript_headless
|
||||
pkg-config
|
||||
python3Packages.setuptools
|
||||
setuptools
|
||||
];
|
||||
|
||||
dependencies = with python3Packages; [
|
||||
dependencies = [
|
||||
djvulibre
|
||||
ghostscript_headless
|
||||
];
|
||||
|
@ -0,0 +1,40 @@
|
||||
{
|
||||
lib,
|
||||
buildPythonPackage,
|
||||
fetchPypi,
|
||||
poetry-core,
|
||||
robotframework,
|
||||
robotframework-pythonlibcore,
|
||||
}:
|
||||
|
||||
buildPythonPackage rec {
|
||||
pname = "robotframework-assertion-engine";
|
||||
version = "3.0.3";
|
||||
pyproject = true;
|
||||
|
||||
src = fetchPypi {
|
||||
pname = "robotframework_assertion_engine";
|
||||
inherit version;
|
||||
hash = "sha256-HGCNTGnZZSCYah3cbe8Px/foSVIPHmiCpjO1HbuY/Yg=";
|
||||
};
|
||||
|
||||
build-system = [
|
||||
poetry-core
|
||||
];
|
||||
|
||||
dependencies = [
|
||||
robotframework
|
||||
robotframework-pythonlibcore
|
||||
];
|
||||
|
||||
pythonImportsCheck = [
|
||||
"assertionengine"
|
||||
];
|
||||
|
||||
meta = {
|
||||
description = "Generic way to create meaningful and easy to use assertions for the Robot Framework libraries";
|
||||
homepage = "https://pypi.org/project/robotframework-assertion-engine/";
|
||||
license = lib.licenses.asl20;
|
||||
maintainers = with lib.maintainers; [ bjornfor ];
|
||||
};
|
||||
}
|
@ -4,6 +4,7 @@
|
||||
fetchFromGitHub,
|
||||
setuptools,
|
||||
robotframework,
|
||||
robotframework-assertion-engine,
|
||||
robotframework-excellib,
|
||||
pytestCheckHook,
|
||||
}:
|
||||
@ -27,6 +28,7 @@ buildPythonPackage rec {
|
||||
|
||||
propagatedBuildInputs = [
|
||||
robotframework
|
||||
robotframework-assertion-engine
|
||||
robotframework-excellib
|
||||
];
|
||||
|
||||
|
@ -77,6 +77,12 @@ buildPythonPackage rec {
|
||||
disabledTests = [
|
||||
# Assertion issues
|
||||
"test_textual_env_var"
|
||||
|
||||
# Fail since tree-sitter-markdown was updated to 0.5.0
|
||||
# ValueError: Incompatible Language version 15. Must be between 13 and 14
|
||||
# https://github.com/Textualize/textual/issues/5868
|
||||
"test_setting_builtin_language_via_attribute"
|
||||
"test_setting_builtin_language_via_constructor"
|
||||
];
|
||||
|
||||
pytestFlags = [
|
||||
|
@ -46,6 +46,7 @@
|
||||
|
||||
# dependencies
|
||||
astunparse,
|
||||
binutils,
|
||||
expecttest,
|
||||
filelock,
|
||||
fsspec,
|
||||
@ -331,6 +332,10 @@ buildPythonPackage rec {
|
||||
# flag from cmakeFlags doesn't work, not clear why
|
||||
# setting it at the top of NNPACK's own CMakeLists does
|
||||
sed -i '2s;^;set(PYTHON_SIX_SOURCE_DIR ${six.src})\n;' third_party/NNPACK/CMakeLists.txt
|
||||
|
||||
# Ensure that torch profiler unwind uses addr2line from nix
|
||||
substituteInPlace torch/csrc/profiler/unwind/unwind.cpp \
|
||||
--replace-fail 'addr2line_binary_ = "addr2line"' 'addr2line_binary_ = "${lib.getExe' binutils "addr2line"}"'
|
||||
''
|
||||
+ lib.optionalString rocmSupport ''
|
||||
# https://github.com/facebookincubator/gloo/pull/297
|
||||
|
100
pkgs/development/python-modules/torchao/default.nix
Normal file
100
pkgs/development/python-modules/torchao/default.nix
Normal file
@ -0,0 +1,100 @@
|
||||
{
|
||||
lib,
|
||||
stdenv,
|
||||
buildPythonPackage,
|
||||
fetchFromGitHub,
|
||||
|
||||
# build-system
|
||||
setuptools,
|
||||
|
||||
# dependencies
|
||||
torch,
|
||||
|
||||
# tests
|
||||
bitsandbytes,
|
||||
expecttest,
|
||||
fire,
|
||||
pytest-xdist,
|
||||
pytestCheckHook,
|
||||
parameterized,
|
||||
tabulate,
|
||||
transformers,
|
||||
unittest-xml-reporting,
|
||||
}:
|
||||
|
||||
buildPythonPackage rec {
|
||||
pname = "ao";
|
||||
version = "0.11.0";
|
||||
pyproject = true;
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "pytorch";
|
||||
repo = "ao";
|
||||
tag = "v${version}";
|
||||
hash = "sha256-CNb9xaubOmIRanLq3TM4sBbszTcVK/WFpcq/sWpof44=";
|
||||
};
|
||||
|
||||
build-system = [
|
||||
setuptools
|
||||
];
|
||||
|
||||
dependencies = [
|
||||
torch
|
||||
];
|
||||
|
||||
env = {
|
||||
USE_SYSTEM_LIBS = true;
|
||||
};
|
||||
|
||||
# Otherwise, the tests are loading the python module from the source instead of the installed one
|
||||
preCheck = ''
|
||||
rm -rf torchao
|
||||
'';
|
||||
|
||||
pythonImportsCheck = [
|
||||
"torchao"
|
||||
];
|
||||
|
||||
nativeCheckInputs = [
|
||||
bitsandbytes
|
||||
expecttest
|
||||
fire
|
||||
parameterized
|
||||
pytest-xdist
|
||||
pytestCheckHook
|
||||
tabulate
|
||||
transformers
|
||||
unittest-xml-reporting
|
||||
];
|
||||
|
||||
disabledTests =
|
||||
[
|
||||
# Requires internet access
|
||||
"test_on_dummy_distilbert"
|
||||
|
||||
# FileNotFoundError: [Errno 2] No such file or directory: 'checkpoints/meta-llama/Llama-2-7b-chat-hf/model.pth'
|
||||
"test_gptq_mt"
|
||||
]
|
||||
++ lib.optionals (stdenv.hostPlatform.isLinux && stdenv.hostPlatform.isAarch64) [
|
||||
# RuntimeError: failed to initialize QNNPACK
|
||||
"test_smooth_linear_cpu"
|
||||
|
||||
# torch._inductor.exc.InductorError: LoweringException: AssertionError: Expect L1_cache_size > 0 but got 0
|
||||
"test_int8_weight_only_quant_with_freeze_0_cpu"
|
||||
"test_int8_weight_only_quant_with_freeze_1_cpu"
|
||||
"test_int8_weight_only_quant_with_freeze_2_cpu"
|
||||
|
||||
# FileNotFoundError: [Errno 2] No such file or directory: 'test.pth'
|
||||
"test_save_load_int4woqtensors_2_cpu"
|
||||
"test_save_load_int8woqtensors_0_cpu"
|
||||
"test_save_load_int8woqtensors_1_cpu"
|
||||
];
|
||||
|
||||
meta = {
|
||||
description = "PyTorch native quantization and sparsity for training and inference";
|
||||
homepage = "https://github.com/pytorch/ao";
|
||||
changelog = "https://github.com/pytorch/ao/releases/tag/v${version}";
|
||||
license = lib.licenses.bsd3;
|
||||
maintainers = with lib.maintainers; [ GaetanLepage ];
|
||||
};
|
||||
}
|
@ -9,14 +9,14 @@
|
||||
|
||||
buildPythonPackage rec {
|
||||
pname = "tree-sitter-markdown";
|
||||
version = "0.4.1";
|
||||
version = "0.5.0";
|
||||
pyproject = true;
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "tree-sitter-grammars";
|
||||
repo = "tree-sitter-markdown";
|
||||
tag = "v${version}";
|
||||
hash = "sha256-Oe2iL5b1Cyv+dK0nQYFNLCCOCe+93nojxt6ukH2lEmU=";
|
||||
hash = "sha256-I9KDE1yZce8KIGPLG5tmv5r/NCWwN95R6fIyvGdx+So=";
|
||||
};
|
||||
|
||||
build-system = [
|
||||
@ -39,7 +39,7 @@ buildPythonPackage rec {
|
||||
meta = {
|
||||
description = "Markdown grammar for tree-sitter";
|
||||
homepage = "https://github.com/tree-sitter-grammars/tree-sitter-markdown";
|
||||
changelog = "https://github.com/tree-sitter-grammars/tree-sitter-markdown/releases/tag/v${version}";
|
||||
changelog = "https://github.com/tree-sitter-grammars/tree-sitter-markdown/releases/tag/${src.tag}";
|
||||
license = lib.licenses.mit;
|
||||
maintainers = with lib.maintainers; [ GaetanLepage ];
|
||||
};
|
||||
|
@ -7,16 +7,16 @@
|
||||
|
||||
buildGoModule rec {
|
||||
pname = "spicedb";
|
||||
version = "1.44.0";
|
||||
version = "1.44.3";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "authzed";
|
||||
repo = "spicedb";
|
||||
rev = "v${version}";
|
||||
hash = "sha256-7QWYqMAX3K16ITkDaVlrEzTH7uIaKDtZom04mBhPZS8=";
|
||||
hash = "sha256-V/uUfysSHDtkZjRoJMI4qbfl1PuCE6pbwRhk6D14s60=";
|
||||
};
|
||||
|
||||
vendorHash = "sha256-X+AQgn5aVIFOV+F8H8Byf1tsu7CVb0PwjzS8x5xn3l0=";
|
||||
vendorHash = "sha256-Tdg9HOzH7N465QX7m65S7+HfT+sdgVpdTnHjhart1ec=";
|
||||
|
||||
ldflags = [
|
||||
"-X 'github.com/jzelinskie/cobrautil/v2.Version=${src.rev}'"
|
||||
|
3
pkgs/test/build-deno-package/.gitignore
vendored
Normal file
3
pkgs/test/build-deno-package/.gitignore
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
.deno/
|
||||
node_modules/
|
||||
vendor/
|
33
pkgs/test/build-deno-package/binaries/default.nix
Normal file
33
pkgs/test/build-deno-package/binaries/default.nix
Normal file
@ -0,0 +1,33 @@
|
||||
{ nix-gitignore, buildDenoPackage }:
|
||||
{
|
||||
with-npm-linux = buildDenoPackage rec {
|
||||
pname = "test-deno-build-binaries-with-npm-${targetSystem}";
|
||||
version = "0.1.0";
|
||||
denoDepsHash = "sha256-k2js/8XsxGVu83rGMJed457orraue8WUZF+JUMMfhVQ=";
|
||||
src = nix-gitignore.gitignoreSource [ ] ./with-npm;
|
||||
binaryEntrypointPath = "./main.ts";
|
||||
targetSystem = "x86_64-linux";
|
||||
};
|
||||
without-npm-linux = buildDenoPackage rec {
|
||||
pname = "test-deno-build-binaries-without-npm-${targetSystem}";
|
||||
version = "0.1.0";
|
||||
denoDepsHash = "sha256-keshKcgawVcuSGNYAIepUrRl7iqpp0ExRJag4aiV18c=";
|
||||
src = nix-gitignore.gitignoreSource [ ] ./without-npm;
|
||||
binaryEntrypointPath = "./main.ts";
|
||||
targetSystem = "x86_64-linux";
|
||||
};
|
||||
# mac =
|
||||
# let
|
||||
# targetSystem = "aarch64-darwin";
|
||||
# macpkgs = import ../../../../default.nix { crossSystem = { config = "arm64-apple-darwin"; };};
|
||||
# in
|
||||
# buildDenoPackage {
|
||||
# pname = "test-deno-build-binaries-${targetSystem}";
|
||||
# version = "0.1.0";
|
||||
# denoDepsHash = "";
|
||||
# src = nix-gitignore.gitignoreSource [ ] ./.;
|
||||
# binaryEntrypointPath = "./main.ts";
|
||||
# denortPackage = macpkgs.denort;
|
||||
# inherit targetSystem;
|
||||
# };
|
||||
}
|
3
pkgs/test/build-deno-package/binaries/with-npm/.gitignore
vendored
Normal file
3
pkgs/test/build-deno-package/binaries/with-npm/.gitignore
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
.deno/
|
||||
node_modules/
|
||||
vendor/
|
13
pkgs/test/build-deno-package/binaries/with-npm/deno.json
Normal file
13
pkgs/test/build-deno-package/binaries/with-npm/deno.json
Normal file
@ -0,0 +1,13 @@
|
||||
{
|
||||
"name": "binary build",
|
||||
"tasks": {
|
||||
"build": "deno run --allow-all main.ts"
|
||||
},
|
||||
"imports": {
|
||||
"@luca/cases": "jsr:@luca/cases@1.0.0",
|
||||
"@std/cli": "jsr:@std/cli@1.0.17",
|
||||
"cowsay": "npm:cowsay@1.6.0",
|
||||
"cases": "https://deno.land/x/case@2.2.0/mod.ts"
|
||||
},
|
||||
"vendor": true
|
||||
}
|
215
pkgs/test/build-deno-package/binaries/with-npm/deno.lock
generated
Normal file
215
pkgs/test/build-deno-package/binaries/with-npm/deno.lock
generated
Normal file
@ -0,0 +1,215 @@
|
||||
{
|
||||
"version": "5",
|
||||
"specifiers": {
|
||||
"jsr:@luca/cases@1.0.0": "1.0.0",
|
||||
"jsr:@std/cli@1.0.17": "1.0.17",
|
||||
"npm:cowsay@1.6.0": "1.6.0"
|
||||
},
|
||||
"jsr": {
|
||||
"@luca/cases@1.0.0": {
|
||||
"integrity": "b5f9471f1830595e63a2b7d62821ac822a19e16899e6584799be63f17a1fbc30"
|
||||
},
|
||||
"@std/cli@1.0.17": {
|
||||
"integrity": "e15b9abe629e17be90cc6216327f03a29eae613365f1353837fa749aad29ce7b"
|
||||
}
|
||||
},
|
||||
"npm": {
|
||||
"ansi-regex@3.0.1": {
|
||||
"integrity": "sha512-+O9Jct8wf++lXxxFc4hc8LsjaSq0HFzzL7cVsw8pRDIPdjKD2mT4ytDZlLuSBZ4cLKZFXIrMGO7DbQCtMJJMKw=="
|
||||
},
|
||||
"ansi-regex@5.0.1": {
|
||||
"integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="
|
||||
},
|
||||
"ansi-styles@4.3.0": {
|
||||
"integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
|
||||
"dependencies": [
|
||||
"color-convert"
|
||||
]
|
||||
},
|
||||
"camelcase@5.3.1": {
|
||||
"integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg=="
|
||||
},
|
||||
"cliui@6.0.0": {
|
||||
"integrity": "sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ==",
|
||||
"dependencies": [
|
||||
"string-width@4.2.3",
|
||||
"strip-ansi@6.0.1",
|
||||
"wrap-ansi"
|
||||
]
|
||||
},
|
||||
"color-convert@2.0.1": {
|
||||
"integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
|
||||
"dependencies": [
|
||||
"color-name"
|
||||
]
|
||||
},
|
||||
"color-name@1.1.4": {
|
||||
"integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="
|
||||
},
|
||||
"cowsay@1.6.0": {
|
||||
"integrity": "sha512-8C4H1jdrgNusTQr3Yu4SCm+ZKsAlDFbpa0KS0Z3im8ueag+9pGOf3CrioruvmeaW/A5oqg9L0ar6qeftAh03jw==",
|
||||
"dependencies": [
|
||||
"get-stdin",
|
||||
"string-width@2.1.1",
|
||||
"strip-final-newline",
|
||||
"yargs"
|
||||
],
|
||||
"bin": true
|
||||
},
|
||||
"decamelize@1.2.0": {
|
||||
"integrity": "sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA=="
|
||||
},
|
||||
"emoji-regex@8.0.0": {
|
||||
"integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="
|
||||
},
|
||||
"find-up@4.1.0": {
|
||||
"integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==",
|
||||
"dependencies": [
|
||||
"locate-path",
|
||||
"path-exists"
|
||||
]
|
||||
},
|
||||
"get-caller-file@2.0.5": {
|
||||
"integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg=="
|
||||
},
|
||||
"get-stdin@8.0.0": {
|
||||
"integrity": "sha512-sY22aA6xchAzprjyqmSEQv4UbAAzRN0L2dQB0NlN5acTTK9Don6nhoc3eAbUnpZiCANAMfd/+40kVdKfFygohg=="
|
||||
},
|
||||
"is-fullwidth-code-point@2.0.0": {
|
||||
"integrity": "sha512-VHskAKYM8RfSFXwee5t5cbN5PZeq1Wrh6qd5bkyiXIf6UQcN6w/A0eXM9r6t8d+GYOh+o6ZhiEnb88LN/Y8m2w=="
|
||||
},
|
||||
"is-fullwidth-code-point@3.0.0": {
|
||||
"integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg=="
|
||||
},
|
||||
"locate-path@5.0.0": {
|
||||
"integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==",
|
||||
"dependencies": [
|
||||
"p-locate"
|
||||
]
|
||||
},
|
||||
"p-limit@2.3.0": {
|
||||
"integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==",
|
||||
"dependencies": [
|
||||
"p-try"
|
||||
]
|
||||
},
|
||||
"p-locate@4.1.0": {
|
||||
"integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==",
|
||||
"dependencies": [
|
||||
"p-limit"
|
||||
]
|
||||
},
|
||||
"p-try@2.2.0": {
|
||||
"integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ=="
|
||||
},
|
||||
"path-exists@4.0.0": {
|
||||
"integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w=="
|
||||
},
|
||||
"require-directory@2.1.1": {
|
||||
"integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q=="
|
||||
},
|
||||
"require-main-filename@2.0.0": {
|
||||
"integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg=="
|
||||
},
|
||||
"set-blocking@2.0.0": {
|
||||
"integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw=="
|
||||
},
|
||||
"string-width@2.1.1": {
|
||||
"integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==",
|
||||
"dependencies": [
|
||||
"is-fullwidth-code-point@2.0.0",
|
||||
"strip-ansi@4.0.0"
|
||||
]
|
||||
},
|
||||
"string-width@4.2.3": {
|
||||
"integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
|
||||
"dependencies": [
|
||||
"emoji-regex",
|
||||
"is-fullwidth-code-point@3.0.0",
|
||||
"strip-ansi@6.0.1"
|
||||
]
|
||||
},
|
||||
"strip-ansi@4.0.0": {
|
||||
"integrity": "sha512-4XaJ2zQdCzROZDivEVIDPkcQn8LMFSa8kj8Gxb/Lnwzv9A8VctNZ+lfivC/sV3ivW8ElJTERXZoPBRrZKkNKow==",
|
||||
"dependencies": [
|
||||
"ansi-regex@3.0.1"
|
||||
]
|
||||
},
|
||||
"strip-ansi@6.0.1": {
|
||||
"integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
|
||||
"dependencies": [
|
||||
"ansi-regex@5.0.1"
|
||||
]
|
||||
},
|
||||
"strip-final-newline@2.0.0": {
|
||||
"integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA=="
|
||||
},
|
||||
"which-module@2.0.1": {
|
||||
"integrity": "sha512-iBdZ57RDvnOR9AGBhML2vFZf7h8vmBjhoaZqODJBFWHVtKkDmKuHai3cx5PgVMrX5YDNp27AofYbAwctSS+vhQ=="
|
||||
},
|
||||
"wrap-ansi@6.2.0": {
|
||||
"integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==",
|
||||
"dependencies": [
|
||||
"ansi-styles",
|
||||
"string-width@4.2.3",
|
||||
"strip-ansi@6.0.1"
|
||||
]
|
||||
},
|
||||
"y18n@4.0.3": {
|
||||
"integrity": "sha512-JKhqTOwSrqNA1NY5lSztJ1GrBiUodLMmIZuLiDaMRJ+itFd+ABVE8XBjOvIWL+rSqNDC74LCSFmlb/U4UZ4hJQ=="
|
||||
},
|
||||
"yargs-parser@18.1.3": {
|
||||
"integrity": "sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ==",
|
||||
"dependencies": [
|
||||
"camelcase",
|
||||
"decamelize"
|
||||
]
|
||||
},
|
||||
"yargs@15.4.1": {
|
||||
"integrity": "sha512-aePbxDmcYW++PaqBsJ+HYUFwCdv4LVvdnhBy78E57PIor8/OVvhMrADFFEDh8DHDFRv/O9i3lPhsENjO7QX0+A==",
|
||||
"dependencies": [
|
||||
"cliui",
|
||||
"decamelize",
|
||||
"find-up",
|
||||
"get-caller-file",
|
||||
"require-directory",
|
||||
"require-main-filename",
|
||||
"set-blocking",
|
||||
"string-width@4.2.3",
|
||||
"which-module",
|
||||
"y18n",
|
||||
"yargs-parser"
|
||||
]
|
||||
}
|
||||
},
|
||||
"remote": {
|
||||
"https://deno.land/x/case@2.2.0/camelCase.ts": "b9a4cf361a7c9740ecb75e00b5e2c006bd4e5d40e442d26c5f2760286fa66796",
|
||||
"https://deno.land/x/case@2.2.0/constantCase.ts": "c698fc32f00cd267c1684b1d413d784260d7e7798f2bf506803e418497d839b5",
|
||||
"https://deno.land/x/case@2.2.0/dotCase.ts": "03ae55d5635e6a4ca894a003d9297cd9cd283af2e7d761dd3de13663849a9423",
|
||||
"https://deno.land/x/case@2.2.0/headerCase.ts": "3f6c8ab2ab30a88147326bce28a00d1189ec98ab61c83ab72ce79e852afddc4a",
|
||||
"https://deno.land/x/case@2.2.0/lowerCase.ts": "d75eb55cadfa589f9f2a973924a8a209054477d9574da669410f4d817ab25b41",
|
||||
"https://deno.land/x/case@2.2.0/lowerFirstCase.ts": "b001efbf2d715b53d066b22cdbf8eda7f99aa7108e3d12fb02f80d499bae93d9",
|
||||
"https://deno.land/x/case@2.2.0/mod.ts": "28b0b1329c7b18730799ac05627a433d9547c04b9bfb429116247c60edecd97b",
|
||||
"https://deno.land/x/case@2.2.0/normalCase.ts": "085c8b6f9d69283c8b86f2e504d43278c2be8b7e56a3ed8d4a5f395e398bdc29",
|
||||
"https://deno.land/x/case@2.2.0/paramCase.ts": "a234c9c17dfbaddee647b6571c2c90e8f6530123fed26c4546f4063d67c1609f",
|
||||
"https://deno.land/x/case@2.2.0/pascalCase.ts": "4b3ef0a68173871a821d306d4067e8f72d42aeeef1eea6aeab30af6bfa3d7427",
|
||||
"https://deno.land/x/case@2.2.0/pathCase.ts": "330a34b4df365b0291d8e36158235340131730aae6f6add66962ed2d0fbead4a",
|
||||
"https://deno.land/x/case@2.2.0/sentenceCase.ts": "b312cef147a13b58ffdf3c36bf55b33aa8322c91f4aa9b32318f3911bb92327f",
|
||||
"https://deno.land/x/case@2.2.0/snakeCase.ts": "e5ac1e08532ca397aa3150a0a3255d59f63a186d934e5094a8ffd24cbca7f955",
|
||||
"https://deno.land/x/case@2.2.0/swapCase.ts": "bb03742fcf613f733890680ceca1b39b65ed290f36a317fcd47edd517c4e0e1e",
|
||||
"https://deno.land/x/case@2.2.0/titleCase.ts": "c287131ea2c955e67cdd5cf604de96d31a8e2813305759922b9ed27e3be354e7",
|
||||
"https://deno.land/x/case@2.2.0/types.ts": "8e2bd6edaa27c0d1972c0d5b76698564740f37b4d3787d58d1fb5f48de611e61",
|
||||
"https://deno.land/x/case@2.2.0/upperCase.ts": "6cca267bb04d098bf4abf21e42e60c3e68ede89b12e525643c6b6eff3e10de34",
|
||||
"https://deno.land/x/case@2.2.0/upperFirstCase.ts": "b964c2d8d3a85c78cd35f609135cbde99d84b9522a21470336b5af80a37facbd",
|
||||
"https://deno.land/x/case@2.2.0/vendor/camelCaseRegexp.ts": "7d9ff02aad4ab6429eeab7c7353f7bcdd6cc5909a8bd3dda97918c8bbb7621ae",
|
||||
"https://deno.land/x/case@2.2.0/vendor/camelCaseUpperRegexp.ts": "292de54a698370f90adcdf95727993d09888b7f33d17f72f8e54ba75f7791787",
|
||||
"https://deno.land/x/case@2.2.0/vendor/nonWordRegexp.ts": "c1a052629a694144b48c66b0175a22a83f4d61cb40f4e45293fc5d6b123f927e"
|
||||
},
|
||||
"workspace": {
|
||||
"dependencies": [
|
||||
"jsr:@luca/cases@1.0.0",
|
||||
"jsr:@std/cli@1.0.17",
|
||||
"npm:cowsay@1.6.0"
|
||||
]
|
||||
}
|
||||
}
|
15
pkgs/test/build-deno-package/binaries/with-npm/main.ts
Normal file
15
pkgs/test/build-deno-package/binaries/with-npm/main.ts
Normal file
@ -0,0 +1,15 @@
|
||||
import { camelCase } from "@luca/cases";
|
||||
import { say } from "cowsay";
|
||||
import { pascalCase } from "cases";
|
||||
import { parseArgs } from "@std/cli";
|
||||
|
||||
const flags = parseArgs(Deno.args, {
|
||||
string: ["text"],
|
||||
});
|
||||
|
||||
if (!flags.text) {
|
||||
throw "--text required but not specified";
|
||||
}
|
||||
|
||||
console.log(camelCase(say({ text: flags.text })));
|
||||
console.log(pascalCase(say({ text: flags.text })));
|
3
pkgs/test/build-deno-package/binaries/without-npm/.gitignore
vendored
Normal file
3
pkgs/test/build-deno-package/binaries/without-npm/.gitignore
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
.deno/
|
||||
node_modules/
|
||||
vendor/
|
12
pkgs/test/build-deno-package/binaries/without-npm/deno.json
Normal file
12
pkgs/test/build-deno-package/binaries/without-npm/deno.json
Normal file
@ -0,0 +1,12 @@
|
||||
{
|
||||
"name": "binary build",
|
||||
"tasks": {
|
||||
"build": "deno run --allow-all main.ts"
|
||||
},
|
||||
"imports": {
|
||||
"@luca/cases": "jsr:@luca/cases@1.0.0",
|
||||
"@std/cli": "jsr:@std/cli@1.0.17",
|
||||
"cases": "https://deno.land/x/case@2.2.0/mod.ts"
|
||||
},
|
||||
"vendor": true
|
||||
}
|
44
pkgs/test/build-deno-package/binaries/without-npm/deno.lock
generated
Normal file
44
pkgs/test/build-deno-package/binaries/without-npm/deno.lock
generated
Normal file
@ -0,0 +1,44 @@
|
||||
{
|
||||
"version": "5",
|
||||
"specifiers": {
|
||||
"jsr:@luca/cases@1.0.0": "1.0.0",
|
||||
"jsr:@std/cli@1.0.17": "1.0.17"
|
||||
},
|
||||
"jsr": {
|
||||
"@luca/cases@1.0.0": {
|
||||
"integrity": "b5f9471f1830595e63a2b7d62821ac822a19e16899e6584799be63f17a1fbc30"
|
||||
},
|
||||
"@std/cli@1.0.17": {
|
||||
"integrity": "e15b9abe629e17be90cc6216327f03a29eae613365f1353837fa749aad29ce7b"
|
||||
}
|
||||
},
|
||||
"remote": {
|
||||
"https://deno.land/x/case@2.2.0/camelCase.ts": "b9a4cf361a7c9740ecb75e00b5e2c006bd4e5d40e442d26c5f2760286fa66796",
|
||||
"https://deno.land/x/case@2.2.0/constantCase.ts": "c698fc32f00cd267c1684b1d413d784260d7e7798f2bf506803e418497d839b5",
|
||||
"https://deno.land/x/case@2.2.0/dotCase.ts": "03ae55d5635e6a4ca894a003d9297cd9cd283af2e7d761dd3de13663849a9423",
|
||||
"https://deno.land/x/case@2.2.0/headerCase.ts": "3f6c8ab2ab30a88147326bce28a00d1189ec98ab61c83ab72ce79e852afddc4a",
|
||||
"https://deno.land/x/case@2.2.0/lowerCase.ts": "d75eb55cadfa589f9f2a973924a8a209054477d9574da669410f4d817ab25b41",
|
||||
"https://deno.land/x/case@2.2.0/lowerFirstCase.ts": "b001efbf2d715b53d066b22cdbf8eda7f99aa7108e3d12fb02f80d499bae93d9",
|
||||
"https://deno.land/x/case@2.2.0/mod.ts": "28b0b1329c7b18730799ac05627a433d9547c04b9bfb429116247c60edecd97b",
|
||||
"https://deno.land/x/case@2.2.0/normalCase.ts": "085c8b6f9d69283c8b86f2e504d43278c2be8b7e56a3ed8d4a5f395e398bdc29",
|
||||
"https://deno.land/x/case@2.2.0/paramCase.ts": "a234c9c17dfbaddee647b6571c2c90e8f6530123fed26c4546f4063d67c1609f",
|
||||
"https://deno.land/x/case@2.2.0/pascalCase.ts": "4b3ef0a68173871a821d306d4067e8f72d42aeeef1eea6aeab30af6bfa3d7427",
|
||||
"https://deno.land/x/case@2.2.0/pathCase.ts": "330a34b4df365b0291d8e36158235340131730aae6f6add66962ed2d0fbead4a",
|
||||
"https://deno.land/x/case@2.2.0/sentenceCase.ts": "b312cef147a13b58ffdf3c36bf55b33aa8322c91f4aa9b32318f3911bb92327f",
|
||||
"https://deno.land/x/case@2.2.0/snakeCase.ts": "e5ac1e08532ca397aa3150a0a3255d59f63a186d934e5094a8ffd24cbca7f955",
|
||||
"https://deno.land/x/case@2.2.0/swapCase.ts": "bb03742fcf613f733890680ceca1b39b65ed290f36a317fcd47edd517c4e0e1e",
|
||||
"https://deno.land/x/case@2.2.0/titleCase.ts": "c287131ea2c955e67cdd5cf604de96d31a8e2813305759922b9ed27e3be354e7",
|
||||
"https://deno.land/x/case@2.2.0/types.ts": "8e2bd6edaa27c0d1972c0d5b76698564740f37b4d3787d58d1fb5f48de611e61",
|
||||
"https://deno.land/x/case@2.2.0/upperCase.ts": "6cca267bb04d098bf4abf21e42e60c3e68ede89b12e525643c6b6eff3e10de34",
|
||||
"https://deno.land/x/case@2.2.0/upperFirstCase.ts": "b964c2d8d3a85c78cd35f609135cbde99d84b9522a21470336b5af80a37facbd",
|
||||
"https://deno.land/x/case@2.2.0/vendor/camelCaseRegexp.ts": "7d9ff02aad4ab6429eeab7c7353f7bcdd6cc5909a8bd3dda97918c8bbb7621ae",
|
||||
"https://deno.land/x/case@2.2.0/vendor/camelCaseUpperRegexp.ts": "292de54a698370f90adcdf95727993d09888b7f33d17f72f8e54ba75f7791787",
|
||||
"https://deno.land/x/case@2.2.0/vendor/nonWordRegexp.ts": "c1a052629a694144b48c66b0175a22a83f4d61cb40f4e45293fc5d6b123f927e"
|
||||
},
|
||||
"workspace": {
|
||||
"dependencies": [
|
||||
"jsr:@luca/cases@1.0.0",
|
||||
"jsr:@std/cli@1.0.17"
|
||||
]
|
||||
}
|
||||
}
|
14
pkgs/test/build-deno-package/binaries/without-npm/main.ts
Normal file
14
pkgs/test/build-deno-package/binaries/without-npm/main.ts
Normal file
@ -0,0 +1,14 @@
|
||||
import { camelCase } from "@luca/cases";
|
||||
import { pascalCase } from "cases";
|
||||
import { parseArgs } from "@std/cli";
|
||||
|
||||
const flags = parseArgs(Deno.args, {
|
||||
string: ["text"],
|
||||
});
|
||||
|
||||
if (!flags.text) {
|
||||
throw "--text required but not specified";
|
||||
}
|
||||
|
||||
console.log(camelCase(flags.text));
|
||||
console.log(pascalCase(flags.text));
|
4
pkgs/test/build-deno-package/default.nix
Normal file
4
pkgs/test/build-deno-package/default.nix
Normal file
@ -0,0 +1,4 @@
|
||||
{ pkgs }:
|
||||
(pkgs.callPackage ./workspaces { })
|
||||
// (pkgs.callPackage ./binaries { })
|
||||
// (pkgs.callPackage ./external { })
|
67
pkgs/test/build-deno-package/external/default.nix
vendored
Normal file
67
pkgs/test/build-deno-package/external/default.nix
vendored
Normal file
@ -0,0 +1,67 @@
|
||||
{ fetchFromGitHub, buildDenoPackage }:
|
||||
{
|
||||
readma-cli-linux = buildDenoPackage rec {
|
||||
pname = "readma-cli";
|
||||
version = "2.11.0";
|
||||
denoDepsHash = "sha256-ixet3k6OEWfxVnN/V7vk4qDvoXjA+6bU/JjXk76aThE=";
|
||||
src = fetchFromGitHub {
|
||||
owner = "elcoosp";
|
||||
repo = "readma";
|
||||
rev = "${version}";
|
||||
hash = "sha256-FVQTn+r7Ztj02vNvqFZIRIsokWeo1tPfFYffK2tvxjA=";
|
||||
};
|
||||
denoInstallFlags = [
|
||||
"--allow-scripts"
|
||||
"--frozen"
|
||||
"--cached-only"
|
||||
"--entrypoint"
|
||||
"./cli/mod.ts"
|
||||
];
|
||||
binaryEntrypointPath = "./cli/mod.ts";
|
||||
targetSystem = "x86_64-linux";
|
||||
};
|
||||
fresh-init-cli-linux = buildDenoPackage {
|
||||
pname = "fresh-init-cli";
|
||||
version = "";
|
||||
denoDepsHash = "sha256-WlMv431qTt3gw0w/V7lG8LnLkEt8VW1fNpyclzBwMcw=";
|
||||
src = fetchFromGitHub {
|
||||
owner = "denoland";
|
||||
repo = "fresh";
|
||||
rev = "c7c341b695bad8d0f3e3575e5fa9c82e0fa28bd4";
|
||||
hash = "sha256-bC4akr4Wt4sRqGkgjNuXztW8Q6YBLBsbuIOhsXH8NQU=";
|
||||
};
|
||||
denoWorkspacePath = "./init";
|
||||
binaryEntrypointPath = "./src/mod.ts";
|
||||
targetSystem = "x86_64-linux";
|
||||
};
|
||||
invidious-companion-cli-linux = buildDenoPackage {
|
||||
pname = "invidious-companion-cli";
|
||||
version = "";
|
||||
denoDepsHash = "sha256-sPcvVaVb4VsLI87kiYe3Z3eoXL1uDKwTQMck91cXVnM=";
|
||||
src = fetchFromGitHub {
|
||||
owner = "iv-org";
|
||||
repo = "invidious-companion";
|
||||
rev = "a34c27ff63e51f9e3adc0e8647cd12382f8f1ffe";
|
||||
hash = "sha256-/S8F7G8li12k0objsdFuh+mle6p2mk8zNUUCrG9hgns=";
|
||||
};
|
||||
binaryEntrypointPath = "src/main.ts";
|
||||
denoCompileFlags = [
|
||||
"--include=./src/lib/helpers/youtubePlayerReq.ts"
|
||||
"--include=./src/lib/helpers/getFetchClient.ts"
|
||||
"--allow-import=github.com:443,jsr.io:443,cdn.jsdelivr.net:443,esm.sh:443,deno.land:443"
|
||||
"--allow-net"
|
||||
"--allow-env"
|
||||
"--allow-read"
|
||||
"--allow-sys=hostname"
|
||||
"--allow-write=/var/tmp/youtubei.js"
|
||||
];
|
||||
denoInstallFlags = [
|
||||
"--allow-scripts"
|
||||
"--frozen"
|
||||
"--cached-only"
|
||||
"--entrypoint"
|
||||
"src/main.ts"
|
||||
];
|
||||
targetSystem = "x86_64-linux";
|
||||
};
|
||||
}
|
9
pkgs/test/build-deno-package/shell.nix
Normal file
9
pkgs/test/build-deno-package/shell.nix
Normal file
@ -0,0 +1,9 @@
|
||||
let
|
||||
pkgs = import ../../../default.nix { };
|
||||
in
|
||||
pkgs.mkShell {
|
||||
buildInputs = [ pkgs.deno ];
|
||||
DENO_DIR = "./.deno";
|
||||
|
||||
shellHook = '''';
|
||||
}
|
3
pkgs/test/build-deno-package/workspaces/.gitignore
vendored
Normal file
3
pkgs/test/build-deno-package/workspaces/.gitignore
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
.deno/
|
||||
node_modules/
|
||||
vendor/
|
40
pkgs/test/build-deno-package/workspaces/default.nix
Normal file
40
pkgs/test/build-deno-package/workspaces/default.nix
Normal file
@ -0,0 +1,40 @@
|
||||
{ nix-gitignore, buildDenoPackage }:
|
||||
rec {
|
||||
sub1 = buildDenoPackage {
|
||||
pname = "test-deno-build-workspaces-sub1";
|
||||
version = "0.1.0";
|
||||
denoDepsHash = "sha256-imraVvtIJqi31aaWv7U1ODVRmOuou1ZR++z7QqnTPr0=";
|
||||
src = nix-gitignore.gitignoreSource [ ] ./.;
|
||||
denoWorkspacePath = "./sub1";
|
||||
extraTaskFlags = [
|
||||
"--text"
|
||||
"sub1"
|
||||
];
|
||||
denoTaskSuffix = ">out.txt";
|
||||
|
||||
installPhase = ''
|
||||
cp out.txt $out
|
||||
'';
|
||||
};
|
||||
sub2 = buildDenoPackage {
|
||||
pname = "test-deno-build-workspaces-sub2";
|
||||
version = "0.1.0";
|
||||
inherit (sub1) denoDeps src;
|
||||
denoWorkspacePath = "./sub2";
|
||||
extraTaskFlags = [
|
||||
"--text"
|
||||
"sub2"
|
||||
];
|
||||
denoTaskSuffix = ">out.txt";
|
||||
installPhase = ''
|
||||
cp out.txt $out
|
||||
'';
|
||||
};
|
||||
sub1Binary = buildDenoPackage {
|
||||
pname = "test-deno-build-workspaces-sub1-binary";
|
||||
version = "0.1.0";
|
||||
inherit (sub1) denoDeps src;
|
||||
denoWorkspacePath = "./sub1";
|
||||
binaryEntrypointPath = "./main.ts";
|
||||
};
|
||||
}
|
7
pkgs/test/build-deno-package/workspaces/deno.json
Normal file
7
pkgs/test/build-deno-package/workspaces/deno.json
Normal file
@ -0,0 +1,7 @@
|
||||
{
|
||||
"workspace":[
|
||||
"./sub1",
|
||||
"./sub2"
|
||||
],
|
||||
"vendor": true
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user