Merge remote-tracking branch 'origin/master' into staging-next
This commit is contained in:
commit
3c4bd15763
@ -193,6 +193,9 @@ cffc27daf06c77c0d76bc35d24b929cb9d68c3c9
|
|||||||
# nixos/kanidm: inherit lib, nixfmt
|
# nixos/kanidm: inherit lib, nixfmt
|
||||||
8f18393d380079904d072007fb19dc64baef0a3a
|
8f18393d380079904d072007fb19dc64baef0a3a
|
||||||
|
|
||||||
|
# fetchhg: format after refactoring with lib.extendMkDerivation and make overridable (#423539)
|
||||||
|
34a5b1eb23129f8fb62c677e3760903f6d43228f
|
||||||
|
|
||||||
# fetchurl: nixfmt-rfc-style
|
# fetchurl: nixfmt-rfc-style
|
||||||
ce21e97a1f20dee15da85c084f9d1148d84f853b
|
ce21e97a1f20dee15da85c084f9d1148d84f853b
|
||||||
|
|
||||||
|
409
.github/workflows/labels.yml
vendored
409
.github/workflows/labels.yml
vendored
@ -40,6 +40,11 @@ jobs:
|
|||||||
runs-on: ubuntu-24.04-arm
|
runs-on: ubuntu-24.04-arm
|
||||||
if: github.event_name != 'schedule' || github.repository_owner == 'NixOS'
|
if: github.event_name != 'schedule' || github.repository_owner == 'NixOS'
|
||||||
steps:
|
steps:
|
||||||
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
with:
|
||||||
|
sparse-checkout: |
|
||||||
|
ci/labels
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: npm install @actions/artifact bottleneck
|
run: npm install @actions/artifact bottleneck
|
||||||
|
|
||||||
@ -64,406 +69,12 @@ jobs:
|
|||||||
github-token: ${{ steps.app-token.outputs.token || github.token }}
|
github-token: ${{ steps.app-token.outputs.token || github.token }}
|
||||||
retries: 3
|
retries: 3
|
||||||
script: |
|
script: |
|
||||||
const Bottleneck = require('bottleneck')
|
require('./ci/labels/labels.cjs')({
|
||||||
const path = require('node:path')
|
github,
|
||||||
const { DefaultArtifactClient } = require('@actions/artifact')
|
context,
|
||||||
const { readFile, writeFile } = require('node:fs/promises')
|
core,
|
||||||
|
dry: context.eventName == 'pull_request'
|
||||||
const artifactClient = new DefaultArtifactClient()
|
|
||||||
|
|
||||||
const stats = {
|
|
||||||
issues: 0,
|
|
||||||
prs: 0,
|
|
||||||
requests: 0,
|
|
||||||
artifacts: 0
|
|
||||||
}
|
|
||||||
|
|
||||||
// Rate-Limiting and Throttling, see for details:
|
|
||||||
// https://github.com/octokit/octokit.js/issues/1069#throttling
|
|
||||||
// https://docs.github.com/en/rest/using-the-rest-api/best-practices-for-using-the-rest-api
|
|
||||||
const allLimits = new Bottleneck({
|
|
||||||
// Avoid concurrent requests
|
|
||||||
maxConcurrent: 1,
|
|
||||||
// Will be updated with first `updateReservoir()` call below.
|
|
||||||
reservoir: 0
|
|
||||||
})
|
})
|
||||||
// Pause between mutative requests
|
|
||||||
const writeLimits = new Bottleneck({ minTime: 1000 }).chain(allLimits)
|
|
||||||
github.hook.wrap('request', async (request, options) => {
|
|
||||||
// Requests to the /rate_limit endpoint do not count against the rate limit.
|
|
||||||
if (options.url == '/rate_limit') return request(options)
|
|
||||||
// Search requests are in a different resource group, which allows 30 requests / minute.
|
|
||||||
// We do less than a handful each run, so not implementing throttling for now.
|
|
||||||
if (options.url.startsWith('/search/')) return request(options)
|
|
||||||
stats.requests++
|
|
||||||
if (['POST', 'PUT', 'PATCH', 'DELETE'].includes(options.method))
|
|
||||||
return writeLimits.schedule(request.bind(null, options))
|
|
||||||
else
|
|
||||||
return allLimits.schedule(request.bind(null, options))
|
|
||||||
})
|
|
||||||
|
|
||||||
async function updateReservoir() {
|
|
||||||
let response
|
|
||||||
try {
|
|
||||||
response = await github.rest.rateLimit.get()
|
|
||||||
} catch (err) {
|
|
||||||
core.error(`Failed updating reservoir:\n${err}`)
|
|
||||||
// Keep retrying on failed rate limit requests instead of exiting the script early.
|
|
||||||
return
|
|
||||||
}
|
|
||||||
// Always keep 1000 spare requests for other jobs to do their regular duty.
|
|
||||||
// They normally use below 100, so 1000 is *plenty* of room to work with.
|
|
||||||
const reservoir = Math.max(0, response.data.resources.core.remaining - 1000)
|
|
||||||
core.info(`Updating reservoir to: ${reservoir}`)
|
|
||||||
allLimits.updateSettings({ reservoir })
|
|
||||||
}
|
|
||||||
await updateReservoir()
|
|
||||||
// Update remaining requests every minute to account for other jobs running in parallel.
|
|
||||||
const reservoirUpdater = setInterval(updateReservoir, 60 * 1000)
|
|
||||||
|
|
||||||
async function handlePullRequest(item) {
|
|
||||||
const log = (k,v) => core.info(`PR #${item.number} - ${k}: ${v}`)
|
|
||||||
|
|
||||||
const pull_number = item.number
|
|
||||||
|
|
||||||
// This API request is important for the merge-conflict label, because it triggers the
|
|
||||||
// creation of a new test merge commit. This is needed to actually determine the state of a PR.
|
|
||||||
const pull_request = (await github.rest.pulls.get({
|
|
||||||
...context.repo,
|
|
||||||
pull_number
|
|
||||||
})).data
|
|
||||||
|
|
||||||
const reviews = await github.paginate(github.rest.pulls.listReviews, {
|
|
||||||
...context.repo,
|
|
||||||
pull_number
|
|
||||||
})
|
|
||||||
|
|
||||||
const approvals = new Set(
|
|
||||||
reviews
|
|
||||||
.filter(review => review.state == 'APPROVED')
|
|
||||||
.map(review => review.user?.id)
|
|
||||||
)
|
|
||||||
|
|
||||||
// After creation of a Pull Request, `merge_commit_sha` will be null initially:
|
|
||||||
// The very first merge commit will only be calculated after a little while.
|
|
||||||
// To avoid labeling the PR as conflicted before that, we wait a few minutes.
|
|
||||||
// This is intentionally less than the time that Eval takes, so that the label job
|
|
||||||
// running after Eval can indeed label the PR as conflicted if that is the case.
|
|
||||||
const merge_commit_sha_valid = new Date() - new Date(pull_request.created_at) > 3 * 60 * 1000
|
|
||||||
|
|
||||||
const prLabels = {
|
|
||||||
// We intentionally don't use the mergeable or mergeable_state attributes.
|
|
||||||
// Those have an intermediate state while the test merge commit is created.
|
|
||||||
// This doesn't work well for us, because we might have just triggered another
|
|
||||||
// test merge commit creation by request the pull request via API at the start
|
|
||||||
// of this function.
|
|
||||||
// The attribute merge_commit_sha keeps the old value of null or the hash *until*
|
|
||||||
// the new test merge commit has either successfully been created or failed so.
|
|
||||||
// This essentially means we are updating the merge conflict label in two steps:
|
|
||||||
// On the first pass of the day, we just fetch the pull request, which triggers
|
|
||||||
// the creation. At this stage, the label is likely not updated, yet.
|
|
||||||
// The second pass will then read the result from the first pass and set the label.
|
|
||||||
'2.status: merge conflict': merge_commit_sha_valid && !pull_request.merge_commit_sha,
|
|
||||||
'12.approvals: 1': approvals.size == 1,
|
|
||||||
'12.approvals: 2': approvals.size == 2,
|
|
||||||
'12.approvals: 3+': approvals.size >= 3,
|
|
||||||
'12.first-time contribution':
|
|
||||||
[ 'NONE', 'FIRST_TIMER', 'FIRST_TIME_CONTRIBUTOR' ].includes(pull_request.author_association),
|
|
||||||
}
|
|
||||||
|
|
||||||
const { id: run_id, conclusion } = (await github.rest.actions.listWorkflowRuns({
|
|
||||||
...context.repo,
|
|
||||||
workflow_id: 'pr.yml',
|
|
||||||
event: 'pull_request_target',
|
|
||||||
exclude_pull_requests: true,
|
|
||||||
head_sha: pull_request.head.sha
|
|
||||||
})).data.workflow_runs[0] ??
|
|
||||||
// TODO: Remove this after 2025-09-17, at which point all eval.yml artifacts will have expired.
|
|
||||||
(await github.rest.actions.listWorkflowRuns({
|
|
||||||
...context.repo,
|
|
||||||
// In older PRs, we need eval.yml instead of pr.yml.
|
|
||||||
workflow_id: 'eval.yml',
|
|
||||||
event: 'pull_request_target',
|
|
||||||
status: 'success',
|
|
||||||
exclude_pull_requests: true,
|
|
||||||
head_sha: pull_request.head.sha
|
|
||||||
})).data.workflow_runs[0] ?? {}
|
|
||||||
|
|
||||||
// Newer PRs might not have run Eval to completion, yet.
|
|
||||||
// Older PRs might not have an eval.yml workflow, yet.
|
|
||||||
// In either case we continue without fetching an artifact on a best-effort basis.
|
|
||||||
log('Last eval run', run_id ?? '<n/a>')
|
|
||||||
|
|
||||||
if (conclusion === 'success') {
|
|
||||||
Object.assign(prLabels, {
|
|
||||||
// We only set this label if the latest eval run was successful, because if it was not, it
|
|
||||||
// *could* have requested reviewers. We will let the PR author fix CI first, before "escalating"
|
|
||||||
// this PR to "needs: reviewer".
|
|
||||||
// Since the first Eval run on a PR always sets rebuild labels, the same PR will be "recently
|
|
||||||
// updated" for the next scheduled run. Thus, this label will still be set within a few minutes
|
|
||||||
// after a PR is created, if required.
|
|
||||||
// Note that a "requested reviewer" disappears once they have given a review, so we check
|
|
||||||
// existing reviews, too.
|
|
||||||
'9.needs: reviewer':
|
|
||||||
!pull_request.draft &&
|
|
||||||
pull_request.requested_reviewers.length == 0 &&
|
|
||||||
reviews.length == 0,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
const artifact = run_id && (await github.rest.actions.listWorkflowRunArtifacts({
|
|
||||||
...context.repo,
|
|
||||||
run_id,
|
|
||||||
name: 'comparison'
|
|
||||||
})).data.artifacts[0]
|
|
||||||
|
|
||||||
// Instead of checking the boolean artifact.expired, we will give us a minute to
|
|
||||||
// actually download the artifact in the next step and avoid that race condition.
|
|
||||||
// Older PRs, where the workflow run was already eval.yml, but the artifact was not
|
|
||||||
// called "comparison", yet, will skip the download.
|
|
||||||
const expired = !artifact || new Date(artifact?.expires_at ?? 0) < new Date(new Date().getTime() + 60 * 1000)
|
|
||||||
log('Artifact expires at', artifact?.expires_at ?? '<n/a>')
|
|
||||||
if (!expired) {
|
|
||||||
stats.artifacts++
|
|
||||||
|
|
||||||
await artifactClient.downloadArtifact(artifact.id, {
|
|
||||||
findBy: {
|
|
||||||
repositoryName: context.repo.repo,
|
|
||||||
repositoryOwner: context.repo.owner,
|
|
||||||
token: core.getInput('github-token')
|
|
||||||
},
|
|
||||||
path: path.resolve(pull_number.toString()),
|
|
||||||
expectedHash: artifact.digest
|
|
||||||
})
|
|
||||||
|
|
||||||
const maintainers = new Set(Object.keys(
|
|
||||||
JSON.parse(await readFile(`${pull_number}/maintainers.json`, 'utf-8'))
|
|
||||||
).map(m => Number.parseInt(m, 10)))
|
|
||||||
|
|
||||||
const evalLabels = JSON.parse(await readFile(`${pull_number}/changed-paths.json`, 'utf-8')).labels
|
|
||||||
|
|
||||||
Object.assign(
|
|
||||||
prLabels,
|
|
||||||
// Ignore `evalLabels` if it's an array.
|
|
||||||
// This can happen for older eval runs, before we switched to objects.
|
|
||||||
// The old eval labels would have been set by the eval run,
|
|
||||||
// so now they'll be present in `before`.
|
|
||||||
// TODO: Simplify once old eval results have expired (~2025-10)
|
|
||||||
(Array.isArray(evalLabels) ? undefined : evalLabels),
|
|
||||||
{
|
|
||||||
'12.approved-by: package-maintainer': Array.from(maintainers).some(m => approvals.has(m)),
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
return prLabels
|
|
||||||
}
|
|
||||||
|
|
||||||
async function handle(item) {
|
|
||||||
try {
|
|
||||||
const log = (k,v,skip) => {
|
|
||||||
core.info(`#${item.number} - ${k}: ${v}` + (skip ? ' (skipped)' : ''))
|
|
||||||
return skip
|
|
||||||
}
|
|
||||||
|
|
||||||
log('Last updated at', item.updated_at)
|
|
||||||
log('URL', item.html_url)
|
|
||||||
|
|
||||||
const issue_number = item.number
|
|
||||||
|
|
||||||
const itemLabels = {}
|
|
||||||
|
|
||||||
if (item.pull_request) {
|
|
||||||
stats.prs++
|
|
||||||
Object.assign(itemLabels, await handlePullRequest(item))
|
|
||||||
} else {
|
|
||||||
stats.issues++
|
|
||||||
}
|
|
||||||
|
|
||||||
const latest_event_at = new Date(
|
|
||||||
(await github.paginate(
|
|
||||||
github.rest.issues.listEventsForTimeline,
|
|
||||||
{
|
|
||||||
...context.repo,
|
|
||||||
issue_number,
|
|
||||||
per_page: 100
|
|
||||||
}
|
|
||||||
))
|
|
||||||
.filter(({ event }) => [
|
|
||||||
// These events are hand-picked from:
|
|
||||||
// https://docs.github.com/en/rest/using-the-rest-api/issue-event-types?apiVersion=2022-11-28
|
|
||||||
// Each of those causes a PR/issue to *not* be considered as stale anymore.
|
|
||||||
// Most of these use created_at.
|
|
||||||
'assigned',
|
|
||||||
'commented', // uses updated_at, because that could be > created_at
|
|
||||||
'committed', // uses committer.date
|
|
||||||
'head_ref_force_pushed',
|
|
||||||
'milestoned',
|
|
||||||
'pinned',
|
|
||||||
'ready_for_review',
|
|
||||||
'renamed',
|
|
||||||
'reopened',
|
|
||||||
'review_dismissed',
|
|
||||||
'review_requested',
|
|
||||||
'reviewed', // uses submitted_at
|
|
||||||
'unlocked',
|
|
||||||
'unmarked_as_duplicate',
|
|
||||||
].includes(event))
|
|
||||||
.map(({ created_at, updated_at, committer, submitted_at }) => new Date(updated_at ?? created_at ?? submitted_at ?? committer.date))
|
|
||||||
// Reverse sort by date value. The default sort() sorts by string representation, which is bad for dates.
|
|
||||||
.sort((a,b) => b-a)
|
|
||||||
.at(0) ?? item.created_at
|
|
||||||
)
|
|
||||||
log('latest_event_at', latest_event_at.toISOString())
|
|
||||||
|
|
||||||
const stale_at = new Date(new Date().setDate(new Date().getDate() - 180))
|
|
||||||
|
|
||||||
// Create a map (Label -> Boolean) of all currently set labels.
|
|
||||||
// Each label is set to True and can be disabled later.
|
|
||||||
const before = Object.fromEntries(
|
|
||||||
(await github.paginate(github.rest.issues.listLabelsOnIssue, {
|
|
||||||
...context.repo,
|
|
||||||
issue_number
|
|
||||||
}))
|
|
||||||
.map(({ name }) => [name, true])
|
|
||||||
)
|
|
||||||
|
|
||||||
Object.assign(itemLabels, {
|
|
||||||
'2.status: stale': !before['1.severity: security'] && latest_event_at < stale_at,
|
|
||||||
})
|
|
||||||
|
|
||||||
const after = Object.assign({}, before, itemLabels)
|
|
||||||
|
|
||||||
// No need for an API request, if all labels are the same.
|
|
||||||
const hasChanges = Object.keys(after).some(name => (before[name] ?? false) != after[name])
|
|
||||||
if (log('Has changes', hasChanges, !hasChanges))
|
|
||||||
return;
|
|
||||||
|
|
||||||
// Skipping labeling on a pull_request event, because we have no privileges.
|
|
||||||
const labels = Object.entries(after).filter(([,value]) => value).map(([name]) => name)
|
|
||||||
if (log('Set labels', labels, context.eventName == 'pull_request'))
|
|
||||||
return;
|
|
||||||
|
|
||||||
await github.rest.issues.setLabels({
|
|
||||||
...context.repo,
|
|
||||||
issue_number,
|
|
||||||
labels
|
|
||||||
})
|
|
||||||
} catch (cause) {
|
|
||||||
throw new Error(`Labeling #${item.number} failed.`, { cause })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
if (context.payload.pull_request) {
|
|
||||||
await handle(context.payload.pull_request)
|
|
||||||
} else {
|
|
||||||
const lastRun = (await github.rest.actions.listWorkflowRuns({
|
|
||||||
...context.repo,
|
|
||||||
workflow_id: 'labels.yml',
|
|
||||||
event: 'schedule',
|
|
||||||
status: 'success',
|
|
||||||
exclude_pull_requests: true,
|
|
||||||
per_page: 1
|
|
||||||
})).data.workflow_runs[0]
|
|
||||||
|
|
||||||
// Go back as far as the last successful run of this workflow to make sure
|
|
||||||
// we are not leaving anyone behind on GHA failures.
|
|
||||||
// Defaults to go back 1 hour on the first run.
|
|
||||||
const cutoff = new Date(lastRun?.created_at ?? new Date().getTime() - 1 * 60 * 60 * 1000)
|
|
||||||
core.info('cutoff timestamp: ' + cutoff.toISOString())
|
|
||||||
|
|
||||||
const updatedItems = await github.paginate(
|
|
||||||
github.rest.search.issuesAndPullRequests,
|
|
||||||
{
|
|
||||||
q: [
|
|
||||||
`repo:"${process.env.GITHUB_REPOSITORY}"`,
|
|
||||||
'is:open',
|
|
||||||
`updated:>=${cutoff.toISOString()}`
|
|
||||||
].join(' AND '),
|
|
||||||
// TODO: Remove in 2025-10, when it becomes the default.
|
|
||||||
advanced_search: true
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
let cursor
|
|
||||||
|
|
||||||
// No workflow run available the first time.
|
|
||||||
if (lastRun) {
|
|
||||||
// The cursor to iterate through the full list of issues and pull requests
|
|
||||||
// is passed between jobs as an artifact.
|
|
||||||
const artifact = (await github.rest.actions.listWorkflowRunArtifacts({
|
|
||||||
...context.repo,
|
|
||||||
run_id: lastRun.id,
|
|
||||||
name: 'pagination-cursor'
|
|
||||||
})).data.artifacts[0]
|
|
||||||
|
|
||||||
// If the artifact is not available, the next iteration starts at the beginning.
|
|
||||||
if (artifact) {
|
|
||||||
stats.artifacts++
|
|
||||||
|
|
||||||
const { downloadPath } = await artifactClient.downloadArtifact(artifact.id, {
|
|
||||||
findBy: {
|
|
||||||
repositoryName: context.repo.repo,
|
|
||||||
repositoryOwner: context.repo.owner,
|
|
||||||
token: core.getInput('github-token')
|
|
||||||
},
|
|
||||||
expectedHash: artifact.digest
|
|
||||||
})
|
|
||||||
|
|
||||||
cursor = await readFile(path.resolve(downloadPath, 'cursor'), 'utf-8')
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// From GitHub's API docs:
|
|
||||||
// GitHub's REST API considers every pull request an issue, but not every issue is a pull request.
|
|
||||||
// For this reason, "Issues" endpoints may return both issues and pull requests in the response.
|
|
||||||
// You can identify pull requests by the pull_request key.
|
|
||||||
const allItems = await github.rest.issues.listForRepo({
|
|
||||||
...context.repo,
|
|
||||||
state: 'open',
|
|
||||||
sort: 'created',
|
|
||||||
direction: 'asc',
|
|
||||||
per_page: 100,
|
|
||||||
after: cursor
|
|
||||||
})
|
|
||||||
|
|
||||||
// Regex taken and comment adjusted from:
|
|
||||||
// https://github.com/octokit/plugin-paginate-rest.js/blob/8e5da25f975d2f31dda6b8b588d71f2c768a8df2/src/iterator.ts#L36-L41
|
|
||||||
// `allItems.headers.link` format:
|
|
||||||
// <https://api.github.com/repositories/4542716/issues?page=3&per_page=100&after=Y3Vyc29yOnYyOpLPAAABl8qNnYDOvnSJxA%3D%3D>; rel="next",
|
|
||||||
// <https://api.github.com/repositories/4542716/issues?page=1&per_page=100&before=Y3Vyc29yOnYyOpLPAAABl8xFV9DOvoouJg%3D%3D>; rel="prev"
|
|
||||||
// Sets `next` to undefined if "next" URL is not present or `link` header is not set.
|
|
||||||
const next = ((allItems.headers.link ?? '').match(/<([^<>]+)>;\s*rel="next"/) ?? [])[1]
|
|
||||||
if (next) {
|
|
||||||
cursor = new URL(next).searchParams.get('after')
|
|
||||||
const uploadPath = path.resolve('cursor')
|
|
||||||
await writeFile(uploadPath, cursor, 'utf-8')
|
|
||||||
// No stats.artifacts++, because this does not allow passing a custom token.
|
|
||||||
// Thus, the upload will not happen with the app token, but the default github.token.
|
|
||||||
await artifactClient.uploadArtifact(
|
|
||||||
'pagination-cursor',
|
|
||||||
[uploadPath],
|
|
||||||
path.resolve('.'),
|
|
||||||
{
|
|
||||||
retentionDays: 1
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Some items might be in both search results, so filtering out duplicates as well.
|
|
||||||
const items = [].concat(updatedItems, allItems.data)
|
|
||||||
.filter((thisItem, idx, arr) => idx == arr.findIndex(firstItem => firstItem.number == thisItem.number))
|
|
||||||
|
|
||||||
;(await Promise.allSettled(items.map(handle)))
|
|
||||||
.filter(({ status }) => status == 'rejected')
|
|
||||||
.map(({ reason }) => core.setFailed(`${reason.message}\n${reason.cause.stack}`))
|
|
||||||
|
|
||||||
core.notice(`Processed ${stats.prs} PRs, ${stats.issues} Issues, made ${stats.requests + stats.artifacts} API requests and downloaded ${stats.artifacts} artifacts.`)
|
|
||||||
}
|
|
||||||
} finally {
|
|
||||||
clearInterval(reservoirUpdater)
|
|
||||||
}
|
|
||||||
|
|
||||||
- name: Log current API rate limits
|
- name: Log current API rate limits
|
||||||
env:
|
env:
|
||||||
|
4
ci/labels/.editorconfig
Normal file
4
ci/labels/.editorconfig
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
# TODO: Move to <top-level>/.editorconfig, once ci/.editorconfig has made its way through staging.
|
||||||
|
[*.cjs]
|
||||||
|
indent_style = space
|
||||||
|
indent_size = 2
|
1
ci/labels/.gitignore
vendored
Normal file
1
ci/labels/.gitignore
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
node_modules
|
1
ci/labels/.npmrc
Normal file
1
ci/labels/.npmrc
Normal file
@ -0,0 +1 @@
|
|||||||
|
package-lock-only = true
|
4
ci/labels/README.md
Normal file
4
ci/labels/README.md
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
To test the labeler locally:
|
||||||
|
- Provide `gh` on `PATH` and make sure it's authenticated.
|
||||||
|
- Enter `nix-shell` in `./ci/labels`.
|
||||||
|
- Run `./run.js OWNER REPO`, where OWNER is your username or "NixOS" and REPO the name of your fork or "nixpkgs".
|
463
ci/labels/labels.cjs
Normal file
463
ci/labels/labels.cjs
Normal file
@ -0,0 +1,463 @@
|
|||||||
|
module.exports = async function ({ github, context, core, dry }) {
|
||||||
|
const Bottleneck = require('bottleneck')
|
||||||
|
const path = require('node:path')
|
||||||
|
const { DefaultArtifactClient } = require('@actions/artifact')
|
||||||
|
const { readFile, writeFile } = require('node:fs/promises')
|
||||||
|
|
||||||
|
const artifactClient = new DefaultArtifactClient()
|
||||||
|
|
||||||
|
const stats = {
|
||||||
|
issues: 0,
|
||||||
|
prs: 0,
|
||||||
|
requests: 0,
|
||||||
|
artifacts: 0,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Rate-Limiting and Throttling, see for details:
|
||||||
|
// https://github.com/octokit/octokit.js/issues/1069#throttling
|
||||||
|
// https://docs.github.com/en/rest/using-the-rest-api/best-practices-for-using-the-rest-api
|
||||||
|
const allLimits = new Bottleneck({
|
||||||
|
// Avoid concurrent requests
|
||||||
|
maxConcurrent: 1,
|
||||||
|
// Will be updated with first `updateReservoir()` call below.
|
||||||
|
reservoir: 0,
|
||||||
|
})
|
||||||
|
// Pause between mutative requests
|
||||||
|
const writeLimits = new Bottleneck({ minTime: 1000 }).chain(allLimits)
|
||||||
|
github.hook.wrap('request', async (request, options) => {
|
||||||
|
// Requests to the /rate_limit endpoint do not count against the rate limit.
|
||||||
|
if (options.url == '/rate_limit') return request(options)
|
||||||
|
// Search requests are in a different resource group, which allows 30 requests / minute.
|
||||||
|
// We do less than a handful each run, so not implementing throttling for now.
|
||||||
|
if (options.url.startsWith('/search/')) return request(options)
|
||||||
|
stats.requests++
|
||||||
|
if (['POST', 'PUT', 'PATCH', 'DELETE'].includes(options.method))
|
||||||
|
return writeLimits.schedule(request.bind(null, options))
|
||||||
|
else return allLimits.schedule(request.bind(null, options))
|
||||||
|
})
|
||||||
|
|
||||||
|
async function updateReservoir() {
|
||||||
|
let response
|
||||||
|
try {
|
||||||
|
response = await github.rest.rateLimit.get()
|
||||||
|
} catch (err) {
|
||||||
|
core.error(`Failed updating reservoir:\n${err}`)
|
||||||
|
// Keep retrying on failed rate limit requests instead of exiting the script early.
|
||||||
|
return
|
||||||
|
}
|
||||||
|
// Always keep 1000 spare requests for other jobs to do their regular duty.
|
||||||
|
// They normally use below 100, so 1000 is *plenty* of room to work with.
|
||||||
|
const reservoir = Math.max(0, response.data.resources.core.remaining - 1000)
|
||||||
|
core.info(`Updating reservoir to: ${reservoir}`)
|
||||||
|
allLimits.updateSettings({ reservoir })
|
||||||
|
}
|
||||||
|
await updateReservoir()
|
||||||
|
// Update remaining requests every minute to account for other jobs running in parallel.
|
||||||
|
const reservoirUpdater = setInterval(updateReservoir, 60 * 1000)
|
||||||
|
|
||||||
|
async function handlePullRequest(item) {
|
||||||
|
const log = (k, v) => core.info(`PR #${item.number} - ${k}: ${v}`)
|
||||||
|
|
||||||
|
const pull_number = item.number
|
||||||
|
|
||||||
|
// This API request is important for the merge-conflict label, because it triggers the
|
||||||
|
// creation of a new test merge commit. This is needed to actually determine the state of a PR.
|
||||||
|
const pull_request = (
|
||||||
|
await github.rest.pulls.get({
|
||||||
|
...context.repo,
|
||||||
|
pull_number,
|
||||||
|
})
|
||||||
|
).data
|
||||||
|
|
||||||
|
const reviews = await github.paginate(github.rest.pulls.listReviews, {
|
||||||
|
...context.repo,
|
||||||
|
pull_number,
|
||||||
|
})
|
||||||
|
|
||||||
|
const approvals = new Set(
|
||||||
|
reviews
|
||||||
|
.filter((review) => review.state == 'APPROVED')
|
||||||
|
.map((review) => review.user?.id),
|
||||||
|
)
|
||||||
|
|
||||||
|
// After creation of a Pull Request, `merge_commit_sha` will be null initially:
|
||||||
|
// The very first merge commit will only be calculated after a little while.
|
||||||
|
// To avoid labeling the PR as conflicted before that, we wait a few minutes.
|
||||||
|
// This is intentionally less than the time that Eval takes, so that the label job
|
||||||
|
// running after Eval can indeed label the PR as conflicted if that is the case.
|
||||||
|
const merge_commit_sha_valid =
|
||||||
|
new Date() - new Date(pull_request.created_at) > 3 * 60 * 1000
|
||||||
|
|
||||||
|
const prLabels = {
|
||||||
|
// We intentionally don't use the mergeable or mergeable_state attributes.
|
||||||
|
// Those have an intermediate state while the test merge commit is created.
|
||||||
|
// This doesn't work well for us, because we might have just triggered another
|
||||||
|
// test merge commit creation by request the pull request via API at the start
|
||||||
|
// of this function.
|
||||||
|
// The attribute merge_commit_sha keeps the old value of null or the hash *until*
|
||||||
|
// the new test merge commit has either successfully been created or failed so.
|
||||||
|
// This essentially means we are updating the merge conflict label in two steps:
|
||||||
|
// On the first pass of the day, we just fetch the pull request, which triggers
|
||||||
|
// the creation. At this stage, the label is likely not updated, yet.
|
||||||
|
// The second pass will then read the result from the first pass and set the label.
|
||||||
|
'2.status: merge conflict':
|
||||||
|
merge_commit_sha_valid && !pull_request.merge_commit_sha,
|
||||||
|
'12.approvals: 1': approvals.size == 1,
|
||||||
|
'12.approvals: 2': approvals.size == 2,
|
||||||
|
'12.approvals: 3+': approvals.size >= 3,
|
||||||
|
'12.first-time contribution': [
|
||||||
|
'NONE',
|
||||||
|
'FIRST_TIMER',
|
||||||
|
'FIRST_TIME_CONTRIBUTOR',
|
||||||
|
].includes(pull_request.author_association),
|
||||||
|
}
|
||||||
|
|
||||||
|
const { id: run_id, conclusion } =
|
||||||
|
(
|
||||||
|
await github.rest.actions.listWorkflowRuns({
|
||||||
|
...context.repo,
|
||||||
|
workflow_id: 'pr.yml',
|
||||||
|
event: 'pull_request_target',
|
||||||
|
exclude_pull_requests: true,
|
||||||
|
head_sha: pull_request.head.sha,
|
||||||
|
})
|
||||||
|
).data.workflow_runs[0] ??
|
||||||
|
// TODO: Remove this after 2025-09-17, at which point all eval.yml artifacts will have expired.
|
||||||
|
(
|
||||||
|
await github.rest.actions.listWorkflowRuns({
|
||||||
|
...context.repo,
|
||||||
|
// In older PRs, we need eval.yml instead of pr.yml.
|
||||||
|
workflow_id: 'eval.yml',
|
||||||
|
event: 'pull_request_target',
|
||||||
|
status: 'success',
|
||||||
|
exclude_pull_requests: true,
|
||||||
|
head_sha: pull_request.head.sha,
|
||||||
|
})
|
||||||
|
).data.workflow_runs[0] ??
|
||||||
|
{}
|
||||||
|
|
||||||
|
// Newer PRs might not have run Eval to completion, yet.
|
||||||
|
// Older PRs might not have an eval.yml workflow, yet.
|
||||||
|
// In either case we continue without fetching an artifact on a best-effort basis.
|
||||||
|
log('Last eval run', run_id ?? '<n/a>')
|
||||||
|
|
||||||
|
if (conclusion === 'success') {
|
||||||
|
Object.assign(prLabels, {
|
||||||
|
// We only set this label if the latest eval run was successful, because if it was not, it
|
||||||
|
// *could* have requested reviewers. We will let the PR author fix CI first, before "escalating"
|
||||||
|
// this PR to "needs: reviewer".
|
||||||
|
// Since the first Eval run on a PR always sets rebuild labels, the same PR will be "recently
|
||||||
|
// updated" for the next scheduled run. Thus, this label will still be set within a few minutes
|
||||||
|
// after a PR is created, if required.
|
||||||
|
// Note that a "requested reviewer" disappears once they have given a review, so we check
|
||||||
|
// existing reviews, too.
|
||||||
|
'9.needs: reviewer':
|
||||||
|
!pull_request.draft &&
|
||||||
|
pull_request.requested_reviewers.length == 0 &&
|
||||||
|
reviews.length == 0,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const artifact =
|
||||||
|
run_id &&
|
||||||
|
(
|
||||||
|
await github.rest.actions.listWorkflowRunArtifacts({
|
||||||
|
...context.repo,
|
||||||
|
run_id,
|
||||||
|
name: 'comparison',
|
||||||
|
})
|
||||||
|
).data.artifacts[0]
|
||||||
|
|
||||||
|
// Instead of checking the boolean artifact.expired, we will give us a minute to
|
||||||
|
// actually download the artifact in the next step and avoid that race condition.
|
||||||
|
// Older PRs, where the workflow run was already eval.yml, but the artifact was not
|
||||||
|
// called "comparison", yet, will skip the download.
|
||||||
|
const expired =
|
||||||
|
!artifact ||
|
||||||
|
new Date(artifact?.expires_at ?? 0) <
|
||||||
|
new Date(new Date().getTime() + 60 * 1000)
|
||||||
|
log('Artifact expires at', artifact?.expires_at ?? '<n/a>')
|
||||||
|
if (!expired) {
|
||||||
|
stats.artifacts++
|
||||||
|
|
||||||
|
await artifactClient.downloadArtifact(artifact.id, {
|
||||||
|
findBy: {
|
||||||
|
repositoryName: context.repo.repo,
|
||||||
|
repositoryOwner: context.repo.owner,
|
||||||
|
token: core.getInput('github-token'),
|
||||||
|
},
|
||||||
|
path: path.resolve(pull_number.toString()),
|
||||||
|
expectedHash: artifact.digest,
|
||||||
|
})
|
||||||
|
|
||||||
|
const maintainers = new Set(
|
||||||
|
Object.keys(
|
||||||
|
JSON.parse(
|
||||||
|
await readFile(`${pull_number}/maintainers.json`, 'utf-8'),
|
||||||
|
),
|
||||||
|
).map((m) => Number.parseInt(m, 10)),
|
||||||
|
)
|
||||||
|
|
||||||
|
const evalLabels = JSON.parse(
|
||||||
|
await readFile(`${pull_number}/changed-paths.json`, 'utf-8'),
|
||||||
|
).labels
|
||||||
|
|
||||||
|
Object.assign(
|
||||||
|
prLabels,
|
||||||
|
// Ignore `evalLabels` if it's an array.
|
||||||
|
// This can happen for older eval runs, before we switched to objects.
|
||||||
|
// The old eval labels would have been set by the eval run,
|
||||||
|
// so now they'll be present in `before`.
|
||||||
|
// TODO: Simplify once old eval results have expired (~2025-10)
|
||||||
|
Array.isArray(evalLabels) ? undefined : evalLabels,
|
||||||
|
{
|
||||||
|
'12.approved-by: package-maintainer': Array.from(maintainers).some(
|
||||||
|
(m) => approvals.has(m),
|
||||||
|
),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
return prLabels
|
||||||
|
}
|
||||||
|
|
||||||
|
async function handle(item) {
|
||||||
|
try {
|
||||||
|
const log = (k, v, skip) => {
|
||||||
|
core.info(`#${item.number} - ${k}: ${v}` + (skip ? ' (skipped)' : ''))
|
||||||
|
return skip
|
||||||
|
}
|
||||||
|
|
||||||
|
log('Last updated at', item.updated_at)
|
||||||
|
log('URL', item.html_url)
|
||||||
|
|
||||||
|
const issue_number = item.number
|
||||||
|
|
||||||
|
const itemLabels = {}
|
||||||
|
|
||||||
|
if (item.pull_request) {
|
||||||
|
stats.prs++
|
||||||
|
Object.assign(itemLabels, await handlePullRequest(item))
|
||||||
|
} else {
|
||||||
|
stats.issues++
|
||||||
|
}
|
||||||
|
|
||||||
|
const latest_event_at = new Date(
|
||||||
|
(
|
||||||
|
await github.paginate(github.rest.issues.listEventsForTimeline, {
|
||||||
|
...context.repo,
|
||||||
|
issue_number,
|
||||||
|
per_page: 100,
|
||||||
|
})
|
||||||
|
)
|
||||||
|
.filter(({ event }) =>
|
||||||
|
[
|
||||||
|
// These events are hand-picked from:
|
||||||
|
// https://docs.github.com/en/rest/using-the-rest-api/issue-event-types?apiVersion=2022-11-28
|
||||||
|
// Each of those causes a PR/issue to *not* be considered as stale anymore.
|
||||||
|
// Most of these use created_at.
|
||||||
|
'assigned',
|
||||||
|
'commented', // uses updated_at, because that could be > created_at
|
||||||
|
'committed', // uses committer.date
|
||||||
|
'head_ref_force_pushed',
|
||||||
|
'milestoned',
|
||||||
|
'pinned',
|
||||||
|
'ready_for_review',
|
||||||
|
'renamed',
|
||||||
|
'reopened',
|
||||||
|
'review_dismissed',
|
||||||
|
'review_requested',
|
||||||
|
'reviewed', // uses submitted_at
|
||||||
|
'unlocked',
|
||||||
|
'unmarked_as_duplicate',
|
||||||
|
].includes(event),
|
||||||
|
)
|
||||||
|
.map(
|
||||||
|
({ created_at, updated_at, committer, submitted_at }) =>
|
||||||
|
new Date(
|
||||||
|
updated_at ?? created_at ?? submitted_at ?? committer.date,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
// Reverse sort by date value. The default sort() sorts by string representation, which is bad for dates.
|
||||||
|
.sort((a, b) => b - a)
|
||||||
|
.at(0) ?? item.created_at,
|
||||||
|
)
|
||||||
|
log('latest_event_at', latest_event_at.toISOString())
|
||||||
|
|
||||||
|
const stale_at = new Date(new Date().setDate(new Date().getDate() - 180))
|
||||||
|
|
||||||
|
// Create a map (Label -> Boolean) of all currently set labels.
|
||||||
|
// Each label is set to True and can be disabled later.
|
||||||
|
const before = Object.fromEntries(
|
||||||
|
(
|
||||||
|
await github.paginate(github.rest.issues.listLabelsOnIssue, {
|
||||||
|
...context.repo,
|
||||||
|
issue_number,
|
||||||
|
})
|
||||||
|
).map(({ name }) => [name, true]),
|
||||||
|
)
|
||||||
|
|
||||||
|
Object.assign(itemLabels, {
|
||||||
|
'2.status: stale':
|
||||||
|
!before['1.severity: security'] && latest_event_at < stale_at,
|
||||||
|
})
|
||||||
|
|
||||||
|
const after = Object.assign({}, before, itemLabels)
|
||||||
|
|
||||||
|
// No need for an API request, if all labels are the same.
|
||||||
|
const hasChanges = Object.keys(after).some(
|
||||||
|
(name) => (before[name] ?? false) != after[name],
|
||||||
|
)
|
||||||
|
if (log('Has changes', hasChanges, !hasChanges)) return
|
||||||
|
|
||||||
|
// Skipping labeling on a pull_request event, because we have no privileges.
|
||||||
|
const labels = Object.entries(after)
|
||||||
|
.filter(([, value]) => value)
|
||||||
|
.map(([name]) => name)
|
||||||
|
if (log('Set labels', labels, dry)) return
|
||||||
|
|
||||||
|
await github.rest.issues.setLabels({
|
||||||
|
...context.repo,
|
||||||
|
issue_number,
|
||||||
|
labels,
|
||||||
|
})
|
||||||
|
} catch (cause) {
|
||||||
|
throw new Error(`Labeling #${item.number} failed.`, { cause })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
if (context.payload.pull_request) {
|
||||||
|
await handle(context.payload.pull_request)
|
||||||
|
} else {
|
||||||
|
const lastRun = (
|
||||||
|
await github.rest.actions.listWorkflowRuns({
|
||||||
|
...context.repo,
|
||||||
|
workflow_id: 'labels.yml',
|
||||||
|
event: 'schedule',
|
||||||
|
status: 'success',
|
||||||
|
exclude_pull_requests: true,
|
||||||
|
per_page: 1,
|
||||||
|
})
|
||||||
|
).data.workflow_runs[0]
|
||||||
|
|
||||||
|
// Go back as far as the last successful run of this workflow to make sure
|
||||||
|
// we are not leaving anyone behind on GHA failures.
|
||||||
|
// Defaults to go back 1 hour on the first run.
|
||||||
|
const cutoff = new Date(
|
||||||
|
lastRun?.created_at ?? new Date().getTime() - 1 * 60 * 60 * 1000,
|
||||||
|
)
|
||||||
|
core.info('cutoff timestamp: ' + cutoff.toISOString())
|
||||||
|
|
||||||
|
const updatedItems = await github.paginate(
|
||||||
|
github.rest.search.issuesAndPullRequests,
|
||||||
|
{
|
||||||
|
q: [
|
||||||
|
`repo:"${context.repo.owner}/${context.repo.repo}"`,
|
||||||
|
'is:open',
|
||||||
|
`updated:>=${cutoff.toISOString()}`,
|
||||||
|
].join(' AND '),
|
||||||
|
// TODO: Remove in 2025-10, when it becomes the default.
|
||||||
|
advanced_search: true,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
let cursor
|
||||||
|
|
||||||
|
// No workflow run available the first time.
|
||||||
|
if (lastRun) {
|
||||||
|
// The cursor to iterate through the full list of issues and pull requests
|
||||||
|
// is passed between jobs as an artifact.
|
||||||
|
const artifact = (
|
||||||
|
await github.rest.actions.listWorkflowRunArtifacts({
|
||||||
|
...context.repo,
|
||||||
|
run_id: lastRun.id,
|
||||||
|
name: 'pagination-cursor',
|
||||||
|
})
|
||||||
|
).data.artifacts[0]
|
||||||
|
|
||||||
|
// If the artifact is not available, the next iteration starts at the beginning.
|
||||||
|
if (artifact) {
|
||||||
|
stats.artifacts++
|
||||||
|
|
||||||
|
const { downloadPath } = await artifactClient.downloadArtifact(
|
||||||
|
artifact.id,
|
||||||
|
{
|
||||||
|
findBy: {
|
||||||
|
repositoryName: context.repo.repo,
|
||||||
|
repositoryOwner: context.repo.owner,
|
||||||
|
token: core.getInput('github-token'),
|
||||||
|
},
|
||||||
|
expectedHash: artifact.digest,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
cursor = await readFile(path.resolve(downloadPath, 'cursor'), 'utf-8')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// From GitHub's API docs:
|
||||||
|
// GitHub's REST API considers every pull request an issue, but not every issue is a pull request.
|
||||||
|
// For this reason, "Issues" endpoints may return both issues and pull requests in the response.
|
||||||
|
// You can identify pull requests by the pull_request key.
|
||||||
|
const allItems = await github.rest.issues.listForRepo({
|
||||||
|
...context.repo,
|
||||||
|
state: 'open',
|
||||||
|
sort: 'created',
|
||||||
|
direction: 'asc',
|
||||||
|
per_page: 100,
|
||||||
|
after: cursor,
|
||||||
|
})
|
||||||
|
|
||||||
|
// Regex taken and comment adjusted from:
|
||||||
|
// https://github.com/octokit/plugin-paginate-rest.js/blob/8e5da25f975d2f31dda6b8b588d71f2c768a8df2/src/iterator.ts#L36-L41
|
||||||
|
// `allItems.headers.link` format:
|
||||||
|
// <https://api.github.com/repositories/4542716/issues?page=3&per_page=100&after=Y3Vyc29yOnYyOpLPAAABl8qNnYDOvnSJxA%3D%3D>; rel="next",
|
||||||
|
// <https://api.github.com/repositories/4542716/issues?page=1&per_page=100&before=Y3Vyc29yOnYyOpLPAAABl8xFV9DOvoouJg%3D%3D>; rel="prev"
|
||||||
|
// Sets `next` to undefined if "next" URL is not present or `link` header is not set.
|
||||||
|
const next = ((allItems.headers.link ?? '').match(
|
||||||
|
/<([^<>]+)>;\s*rel="next"/,
|
||||||
|
) ?? [])[1]
|
||||||
|
if (next) {
|
||||||
|
cursor = new URL(next).searchParams.get('after')
|
||||||
|
const uploadPath = path.resolve('cursor')
|
||||||
|
await writeFile(uploadPath, cursor, 'utf-8')
|
||||||
|
if (dry) {
|
||||||
|
core.info(`pagination-cursor: ${cursor} (upload skipped)`)
|
||||||
|
} else {
|
||||||
|
// No stats.artifacts++, because this does not allow passing a custom token.
|
||||||
|
// Thus, the upload will not happen with the app token, but the default github.token.
|
||||||
|
await artifactClient.uploadArtifact(
|
||||||
|
'pagination-cursor',
|
||||||
|
[uploadPath],
|
||||||
|
path.resolve('.'),
|
||||||
|
{
|
||||||
|
retentionDays: 1,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Some items might be in both search results, so filtering out duplicates as well.
|
||||||
|
const items = []
|
||||||
|
.concat(updatedItems, allItems.data)
|
||||||
|
.filter(
|
||||||
|
(thisItem, idx, arr) =>
|
||||||
|
idx ==
|
||||||
|
arr.findIndex((firstItem) => firstItem.number == thisItem.number),
|
||||||
|
)
|
||||||
|
|
||||||
|
;(await Promise.allSettled(items.map(handle)))
|
||||||
|
.filter(({ status }) => status == 'rejected')
|
||||||
|
.map(({ reason }) =>
|
||||||
|
core.setFailed(`${reason.message}\n${reason.cause.stack}`),
|
||||||
|
)
|
||||||
|
|
||||||
|
core.notice(
|
||||||
|
`Processed ${stats.prs} PRs, ${stats.issues} Issues, made ${stats.requests + stats.artifacts} API requests and downloaded ${stats.artifacts} artifacts.`,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
clearInterval(reservoirUpdater)
|
||||||
|
}
|
||||||
|
}
|
1897
ci/labels/package-lock.json
generated
Normal file
1897
ci/labels/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
9
ci/labels/package.json
Normal file
9
ci/labels/package.json
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
{
|
||||||
|
"private": true,
|
||||||
|
"type": "module",
|
||||||
|
"dependencies": {
|
||||||
|
"@actions/artifact": "2.3.2",
|
||||||
|
"@actions/github": "6.0.1",
|
||||||
|
"bottleneck": "2.19.5"
|
||||||
|
}
|
||||||
|
}
|
45
ci/labels/run.js
Executable file
45
ci/labels/run.js
Executable file
@ -0,0 +1,45 @@
|
|||||||
|
#!/usr/bin/env node
|
||||||
|
import { execSync } from 'node:child_process'
|
||||||
|
import { mkdtempSync, rmSync } from 'node:fs'
|
||||||
|
import { tmpdir } from 'node:os'
|
||||||
|
import { join } from 'node:path'
|
||||||
|
import { getOctokit } from '@actions/github'
|
||||||
|
import labels from './labels.cjs'
|
||||||
|
|
||||||
|
if (process.argv.length !== 4)
|
||||||
|
throw new Error('Call this with exactly two arguments: ./run.js OWNER REPO')
|
||||||
|
const [, , owner, repo] = process.argv
|
||||||
|
|
||||||
|
const token = execSync('gh auth token', { encoding: 'utf-8' }).trim()
|
||||||
|
|
||||||
|
const tmp = mkdtempSync(join(tmpdir(), 'labels-'))
|
||||||
|
try {
|
||||||
|
process.env.GITHUB_WORKSPACE = tmp
|
||||||
|
process.chdir(tmp)
|
||||||
|
|
||||||
|
await labels({
|
||||||
|
github: getOctokit(token),
|
||||||
|
context: {
|
||||||
|
payload: {},
|
||||||
|
repo: {
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
core: {
|
||||||
|
getInput() {
|
||||||
|
return token
|
||||||
|
},
|
||||||
|
error: console.error,
|
||||||
|
info: console.log,
|
||||||
|
notice: console.log,
|
||||||
|
setFailed(msg) {
|
||||||
|
console.error(msg)
|
||||||
|
process.exitCode = 1
|
||||||
|
},
|
||||||
|
},
|
||||||
|
dry: true,
|
||||||
|
})
|
||||||
|
} finally {
|
||||||
|
rmSync(tmp, { recursive: true })
|
||||||
|
}
|
23
ci/labels/shell.nix
Normal file
23
ci/labels/shell.nix
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
{
|
||||||
|
system ? builtins.currentSystem,
|
||||||
|
pkgs ? (import ../. { inherit system; }).pkgs,
|
||||||
|
}:
|
||||||
|
|
||||||
|
pkgs.callPackage (
|
||||||
|
{
|
||||||
|
mkShell,
|
||||||
|
importNpmLock,
|
||||||
|
nodejs,
|
||||||
|
}:
|
||||||
|
mkShell {
|
||||||
|
packages = [
|
||||||
|
importNpmLock.hooks.linkNodeModulesHook
|
||||||
|
nodejs
|
||||||
|
];
|
||||||
|
|
||||||
|
npmDeps = importNpmLock.buildNodeModules {
|
||||||
|
npmRoot = ./.;
|
||||||
|
inherit nodejs;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
) { }
|
@ -840,7 +840,7 @@ Used with CVS. Expects `cvsRoot`, `tag`, and `hash`.
|
|||||||
|
|
||||||
## `fetchhg` {#fetchhg}
|
## `fetchhg` {#fetchhg}
|
||||||
|
|
||||||
Used with Mercurial. Expects `url`, `rev`, and `hash`.
|
Used with Mercurial. Expects `url`, `rev`, `hash`, overridable with [`<pkg>.overrideAttrs`](#sec-pkg-overrideAttrs).
|
||||||
|
|
||||||
A number of fetcher functions wrap part of `fetchurl` and `fetchzip`. They are mainly convenience functions intended for commonly used destinations of source code in Nixpkgs. These wrapper fetchers are listed below.
|
A number of fetcher functions wrap part of `fetchurl` and `fetchzip`. They are mainly convenience functions intended for commonly used destinations of source code in Nixpkgs. These wrapper fetchers are listed below.
|
||||||
|
|
||||||
|
@ -42,7 +42,9 @@ let
|
|||||||
# Filter out version control software files/directories
|
# Filter out version control software files/directories
|
||||||
(
|
(
|
||||||
baseName == ".git"
|
baseName == ".git"
|
||||||
|| type == "directory" && (baseName == ".svn" || baseName == "CVS" || baseName == ".hg")
|
||
|
||||||
|
type == "directory"
|
||||||
|
&& (baseName == ".svn" || baseName == "CVS" || baseName == ".hg" || baseName == ".jj")
|
||||||
)
|
)
|
||||||
||
|
||
|
||||||
# Filter out editor backup / swap files.
|
# Filter out editor backup / swap files.
|
||||||
|
@ -478,7 +478,7 @@ with lib.maintainers;
|
|||||||
willcohen
|
willcohen
|
||||||
];
|
];
|
||||||
githubTeams = [ "geospatial" ];
|
githubTeams = [ "geospatial" ];
|
||||||
scope = "Maintain geospatial packages.";
|
scope = "Maintain geospatial, remote sensing and OpenStreetMap software.";
|
||||||
shortName = "Geospatial";
|
shortName = "Geospatial";
|
||||||
enableFeatureFreezePing = true;
|
enableFeatureFreezePing = true;
|
||||||
};
|
};
|
||||||
|
@ -24,20 +24,23 @@
|
|||||||
serviceConfig.Type = "oneshot";
|
serviceConfig.Type = "oneshot";
|
||||||
};
|
};
|
||||||
|
|
||||||
networking.primaryIPAddress = "192.168.1.${toString config.virtualisation.test.nodeNumber}";
|
networking.primaryIPAddress = lib.mkForce "192.168.1.${toString config.virtualisation.test.nodeNumber}";
|
||||||
|
|
||||||
|
virtualisation.interfaces.eth1 = {
|
||||||
|
vlan = 1;
|
||||||
|
assignIP = false;
|
||||||
|
};
|
||||||
|
virtualisation.interfaces.eth2 = {
|
||||||
|
vlan = 2;
|
||||||
|
assignIP = false;
|
||||||
|
};
|
||||||
|
|
||||||
virtualisation.vlans = [
|
|
||||||
1
|
|
||||||
2
|
|
||||||
];
|
|
||||||
networking.bridges.br0.interfaces = [
|
networking.bridges.br0.interfaces = [
|
||||||
"eth1"
|
"eth1"
|
||||||
"eth2"
|
"eth2"
|
||||||
];
|
];
|
||||||
|
|
||||||
networking.interfaces = {
|
networking.interfaces = {
|
||||||
eth1.ipv4.addresses = lib.mkForce [ ];
|
|
||||||
eth2.ipv4.addresses = lib.mkForce [ ];
|
|
||||||
br0.ipv4.addresses = [
|
br0.ipv4.addresses = [
|
||||||
{
|
{
|
||||||
address = config.networking.primaryIPAddress;
|
address = config.networking.primaryIPAddress;
|
||||||
|
@ -11,6 +11,7 @@
|
|||||||
qtbase,
|
qtbase,
|
||||||
qtsvg,
|
qtsvg,
|
||||||
qttools,
|
qttools,
|
||||||
|
qtwayland,
|
||||||
qwt,
|
qwt,
|
||||||
qscintilla,
|
qscintilla,
|
||||||
kissfftFloat,
|
kissfftFloat,
|
||||||
@ -82,6 +83,7 @@ stdenv.mkDerivation rec {
|
|||||||
qtbase
|
qtbase
|
||||||
qtsvg
|
qtsvg
|
||||||
qttools
|
qttools
|
||||||
|
qtwayland
|
||||||
qwt
|
qwt
|
||||||
qscintilla
|
qscintilla
|
||||||
kissfftFloat
|
kissfftFloat
|
||||||
|
@ -5,13 +5,13 @@
|
|||||||
}:
|
}:
|
||||||
mkLibretroCore {
|
mkLibretroCore {
|
||||||
core = "mame2003-plus";
|
core = "mame2003-plus";
|
||||||
version = "0-unstable-2025-05-16";
|
version = "0-unstable-2025-06-28";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "libretro";
|
owner = "libretro";
|
||||||
repo = "mame2003-plus-libretro";
|
repo = "mame2003-plus-libretro";
|
||||||
rev = "c478eae7484b76aaacc76659dd4d7b8e1163bc87";
|
rev = "04fb75e4f1291a490574168f3a04f9455e4a008d";
|
||||||
hash = "sha256-l7GwSj7/A/1ZAAqWz1GtMDCl6F45GJqucDBD89yqcsU=";
|
hash = "sha256-dMfLK47DojJwSvd7KMW0D0azgQalRW8mBJqYJHTA6ew=";
|
||||||
};
|
};
|
||||||
|
|
||||||
makefile = "Makefile";
|
makefile = "Makefile";
|
||||||
|
@ -9,15 +9,15 @@
|
|||||||
|
|
||||||
buildGoModule (finalAttrs: {
|
buildGoModule (finalAttrs: {
|
||||||
pname = "kubernetes-helm";
|
pname = "kubernetes-helm";
|
||||||
version = "3.18.3";
|
version = "3.18.4";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "helm";
|
owner = "helm";
|
||||||
repo = "helm";
|
repo = "helm";
|
||||||
rev = "v${finalAttrs.version}";
|
rev = "v${finalAttrs.version}";
|
||||||
sha256 = "sha256-V5gWzgsinT0hGFDocPlljH1ls8Z0j5cz37oPrB6LI9Y=";
|
sha256 = "sha256-2xOrTguenFzX7rvwm1ojSqV6ARCUSPUs07y3ut9Teec=";
|
||||||
};
|
};
|
||||||
vendorHash = "sha256-r9DLYgEjxapUOAz+FCgYXqdE6APhGKO/YnshbLRmdrU=";
|
vendorHash = "sha256-Z3OAbuoeAtChd9Sk4bbzgwIxmFrw+/1c4zyxpNP0xXg=";
|
||||||
|
|
||||||
subPackages = [ "cmd/helm" ];
|
subPackages = [ "cmd/helm" ];
|
||||||
ldflags = [
|
ldflags = [
|
||||||
|
@ -0,0 +1,12 @@
|
|||||||
|
Remove about:buildconfig. If used as-is, it would add unnecessary runtime dependencies.
|
||||||
|
--- a/comm/mail/base/jar.mn
|
||||||
|
+++ b/comm/mail/base/jar.mn
|
||||||
|
@@ -132,8 +132,6 @@
|
||||||
|
% override chrome://mozapps/content/profile/profileDowngrade.js chrome://messenger/content/profileDowngrade.js
|
||||||
|
% override chrome://mozapps/content/profile/profileDowngrade.xhtml chrome://messenger/content/profileDowngrade.xhtml
|
||||||
|
|
||||||
|
-* content/messenger/buildconfig.html (content/buildconfig.html)
|
||||||
|
-% override chrome://global/content/buildconfig.html chrome://messenger/content/buildconfig.html
|
||||||
|
% override chrome://global/locale/appstrings.properties chrome://messenger/locale/appstrings.properties
|
||||||
|
|
||||||
|
comm.jar:
|
@ -47,7 +47,7 @@ let
|
|||||||
extraPatches =
|
extraPatches =
|
||||||
[
|
[
|
||||||
# The file to be patched is different from firefox's `no-buildconfig-ffx90.patch`.
|
# The file to be patched is different from firefox's `no-buildconfig-ffx90.patch`.
|
||||||
./no-buildconfig.patch
|
(if lib.versionOlder version "140" then ./no-buildconfig.patch else ./no-buildconfig-tb140.patch)
|
||||||
]
|
]
|
||||||
++ lib.optionals (lib.versionOlder version "139") [
|
++ lib.optionals (lib.versionOlder version "139") [
|
||||||
# clang-19 fixes for char_traits build issue
|
# clang-19 fixes for char_traits build issue
|
||||||
@ -95,8 +95,8 @@ rec {
|
|||||||
thunderbird = thunderbird-latest;
|
thunderbird = thunderbird-latest;
|
||||||
|
|
||||||
thunderbird-latest = common {
|
thunderbird-latest = common {
|
||||||
version = "139.0.2";
|
version = "140.0";
|
||||||
sha512 = "edb20c692674dc5c3ba70673f7dd03710bf7ac0ce2be614a7a4b3d2b40b20b4974aab2a621dd5b43720c412a590c08f8b78abeb9b61f288f3217c6a04cc1e8ff";
|
sha512 = "2e9a5fb44b21eba3e3295205142bfad666a65f9eea43118388968320597a940cf3c5675fbcf458fbbaa9e1bb85fe8a663feda6461b7e23f7103c5bb7a1103bd4";
|
||||||
|
|
||||||
updateScript = callPackage ./update.nix {
|
updateScript = callPackage ./update.nix {
|
||||||
attrPath = "thunderbirdPackages.thunderbird-latest";
|
attrPath = "thunderbirdPackages.thunderbird-latest";
|
||||||
|
@ -39,11 +39,11 @@
|
|||||||
|
|
||||||
stdenv.mkDerivation (finalAttrs: {
|
stdenv.mkDerivation (finalAttrs: {
|
||||||
pname = "gnunet";
|
pname = "gnunet";
|
||||||
version = "0.24.2";
|
version = "0.24.3";
|
||||||
|
|
||||||
src = fetchurl {
|
src = fetchurl {
|
||||||
url = "mirror://gnu/gnunet/gnunet-${finalAttrs.version}.tar.gz";
|
url = "mirror://gnu/gnunet/gnunet-${finalAttrs.version}.tar.gz";
|
||||||
hash = "sha256-Lk5KkH2UJ/DD3U1nlczq9yzPOX6dyWH2DtvvMAb2r0c=";
|
hash = "sha256-WwaJew6ESJu7Q4J47HPkNiRCsuBaY+QAI+wdDMzGxXY=";
|
||||||
};
|
};
|
||||||
|
|
||||||
enableParallelBuilding = true;
|
enableParallelBuilding = true;
|
||||||
|
@ -15,13 +15,13 @@
|
|||||||
}:
|
}:
|
||||||
let
|
let
|
||||||
pname = "fah-client";
|
pname = "fah-client";
|
||||||
version = "8.3.18";
|
version = "8.4.9";
|
||||||
|
|
||||||
cbangSrc = fetchFromGitHub {
|
cbangSrc = fetchFromGitHub {
|
||||||
owner = "cauldrondevelopmentllc";
|
owner = "cauldrondevelopmentllc";
|
||||||
repo = "cbang";
|
repo = "cbang";
|
||||||
rev = "bastet-v${version}";
|
rev = "bastet-v${version}";
|
||||||
sha256 = "sha256-BQNomjz6Bhod3FOC5iICwt1rPrZgIxGQ08yspSvAnJc=";
|
sha256 = "sha256-xApE5m8YyIFRJLQYeboWelWukuuIjHNZxPDyq0RzSL4=";
|
||||||
};
|
};
|
||||||
|
|
||||||
fah-client = stdenv.mkDerivation {
|
fah-client = stdenv.mkDerivation {
|
||||||
@ -31,7 +31,7 @@ let
|
|||||||
owner = "FoldingAtHome";
|
owner = "FoldingAtHome";
|
||||||
repo = "fah-client-bastet";
|
repo = "fah-client-bastet";
|
||||||
rev = "v${version}";
|
rev = "v${version}";
|
||||||
sha256 = "sha256-lqpC1fAMFb8iX02daVre/pE0c7DkwswlFigJS3ZGEjM=";
|
sha256 = "sha256-PewXhmkTru2yJhMkenbn7pcmVsa7eomjrMvs1PUGph8=";
|
||||||
};
|
};
|
||||||
|
|
||||||
nativeBuildInputs = [
|
nativeBuildInputs = [
|
||||||
|
@ -3,39 +3,48 @@
|
|||||||
stdenvNoCC,
|
stdenvNoCC,
|
||||||
mercurial,
|
mercurial,
|
||||||
}:
|
}:
|
||||||
{
|
|
||||||
name ? null,
|
|
||||||
url,
|
|
||||||
rev ? null,
|
|
||||||
sha256 ? null,
|
|
||||||
hash ? null,
|
|
||||||
fetchSubrepos ? false,
|
|
||||||
preferLocalBuild ? true,
|
|
||||||
}:
|
|
||||||
|
|
||||||
if hash != null && sha256 != null then
|
lib.extendMkDerivation {
|
||||||
throw "Only one of sha256 or hash can be set"
|
constructDrv = stdenvNoCC.mkDerivation;
|
||||||
else
|
|
||||||
# TODO: statically check if mercurial as the https support if the url starts with https.
|
|
||||||
stdenvNoCC.mkDerivation {
|
|
||||||
name = "hg-archive" + (lib.optionalString (name != null) "-${name}");
|
|
||||||
builder = ./builder.sh;
|
|
||||||
nativeBuildInputs = [ mercurial ];
|
|
||||||
|
|
||||||
impureEnvVars = lib.fetchers.proxyImpureEnvVars;
|
extendDrvArgs =
|
||||||
|
finalAttrs:
|
||||||
|
{
|
||||||
|
name ? null,
|
||||||
|
url,
|
||||||
|
rev ? null,
|
||||||
|
sha256 ? null,
|
||||||
|
hash ? null,
|
||||||
|
fetchSubrepos ? false,
|
||||||
|
preferLocalBuild ? true,
|
||||||
|
}:
|
||||||
|
# TODO: statically check if mercurial as the https support if the url starts with https.
|
||||||
|
{
|
||||||
|
name = "hg-archive" + (lib.optionalString (name != null) "-${name}");
|
||||||
|
builder = ./builder.sh;
|
||||||
|
nativeBuildInputs = [ mercurial ];
|
||||||
|
|
||||||
subrepoClause = lib.optionalString fetchSubrepos "S";
|
impureEnvVars = lib.fetchers.proxyImpureEnvVars;
|
||||||
|
|
||||||
outputHashAlgo = if hash != null then null else "sha256";
|
subrepoClause = lib.optionalString fetchSubrepos "S";
|
||||||
outputHashMode = "recursive";
|
|
||||||
outputHash =
|
|
||||||
if hash != null then
|
|
||||||
hash
|
|
||||||
else if sha256 != null then
|
|
||||||
sha256
|
|
||||||
else
|
|
||||||
lib.fakeSha256;
|
|
||||||
|
|
||||||
inherit url rev;
|
outputHashAlgo = if finalAttrs.hash != null && finalAttrs.hash != "" then null else "sha256";
|
||||||
inherit preferLocalBuild;
|
outputHashMode = "recursive";
|
||||||
}
|
outputHash =
|
||||||
|
lib.throwIf (finalAttrs.hash != null && sha256 != null) "Only one of sha256 or hash can be set"
|
||||||
|
(
|
||||||
|
if finalAttrs.hash != null then
|
||||||
|
finalAttrs.hash
|
||||||
|
else if sha256 != null then
|
||||||
|
sha256
|
||||||
|
else
|
||||||
|
""
|
||||||
|
);
|
||||||
|
|
||||||
|
inherit url rev hash;
|
||||||
|
inherit preferLocalBuild;
|
||||||
|
};
|
||||||
|
|
||||||
|
# No ellipsis
|
||||||
|
inheritFunctionArgs = false;
|
||||||
|
}
|
||||||
|
58
pkgs/by-name/be/beeref/package.nix
Normal file
58
pkgs/by-name/be/beeref/package.nix
Normal file
@ -0,0 +1,58 @@
|
|||||||
|
{
|
||||||
|
lib,
|
||||||
|
python3Packages,
|
||||||
|
fetchFromGitHub,
|
||||||
|
versionCheckHook,
|
||||||
|
nix-update-script,
|
||||||
|
}:
|
||||||
|
python3Packages.buildPythonApplication rec {
|
||||||
|
pname = "beeref";
|
||||||
|
version = "0.3.3";
|
||||||
|
pyproject = true;
|
||||||
|
|
||||||
|
src = fetchFromGitHub {
|
||||||
|
owner = "rbreu";
|
||||||
|
repo = "beeref";
|
||||||
|
tag = "v${version}";
|
||||||
|
hash = "sha256-GtxiJKj3tlzI1kVXzJg0LNAUcodXSna17ZvAtsAEH4M=";
|
||||||
|
};
|
||||||
|
|
||||||
|
build-system = [ python3Packages.setuptools ];
|
||||||
|
|
||||||
|
dependencies = with python3Packages; [
|
||||||
|
exif
|
||||||
|
lxml
|
||||||
|
pyqt6
|
||||||
|
rectangle-packer
|
||||||
|
];
|
||||||
|
|
||||||
|
pythonRelaxDeps = [
|
||||||
|
"lxml"
|
||||||
|
"pyqt6"
|
||||||
|
"rectangle-packer"
|
||||||
|
];
|
||||||
|
|
||||||
|
pythonRemoveDeps = [ "pyqt6-qt6" ];
|
||||||
|
|
||||||
|
pythonImportsCheck = [ "beeref" ];
|
||||||
|
|
||||||
|
# Tests fail with "Fatal Python error: Aborted" due to PyQt6 GUI initialization issues in sandbox
|
||||||
|
# Only versionCheckHook and pythonImportsCheck are used for basic validation
|
||||||
|
nativeCheckInputs = [ versionCheckHook ];
|
||||||
|
|
||||||
|
passthru.updateScript = nix-update-script { };
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
changelog = "https://github.com/rbreu/beeref/blob/v${version}/CHANGELOG.rst";
|
||||||
|
description = "Reference image viewer";
|
||||||
|
homepage = "https://beeref.org";
|
||||||
|
license = with lib.licenses; [
|
||||||
|
cc0
|
||||||
|
gpl3Only
|
||||||
|
];
|
||||||
|
mainProgram = "beeref";
|
||||||
|
maintainers = with lib.maintainers; [ HeitorAugustoLN ];
|
||||||
|
platforms = lib.platforms.all;
|
||||||
|
sourceProvenance = [ lib.sourceTypes.fromSource ];
|
||||||
|
};
|
||||||
|
}
|
@ -10,17 +10,17 @@
|
|||||||
}:
|
}:
|
||||||
rustPlatform.buildRustPackage (finalAttrs: {
|
rustPlatform.buildRustPackage (finalAttrs: {
|
||||||
pname = "biome";
|
pname = "biome";
|
||||||
version = "2.0.0";
|
version = "2.1.1";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "biomejs";
|
owner = "biomejs";
|
||||||
repo = "biome";
|
repo = "biome";
|
||||||
rev = "@biomejs/biome@${finalAttrs.version}";
|
rev = "@biomejs/biome@${finalAttrs.version}";
|
||||||
hash = "sha256-2oHEaHKTyD+j34Or/Obb0pPGpEXEgSq6wowyYVV6DqI=";
|
hash = "sha256-ZnmMo3zUk+///avGQ497YNj9gChds4efpD88cjTr2JA=";
|
||||||
};
|
};
|
||||||
|
|
||||||
useFetchCargoVendor = true;
|
useFetchCargoVendor = true;
|
||||||
cargoHash = "sha256-jh7LlX7Ip2oy5NcXHfFkGeyJVGeu4Y0HqN690bok+/E=";
|
cargoHash = "sha256-WIZrxQh83tebalDMa/2/d/+xFDe7uhpTf/Gmx1Kr55E=";
|
||||||
|
|
||||||
nativeBuildInputs = [ pkg-config ];
|
nativeBuildInputs = [ pkg-config ];
|
||||||
|
|
||||||
@ -66,6 +66,7 @@ rustPlatform.buildRustPackage (finalAttrs: {
|
|||||||
maintainers = with lib.maintainers; [
|
maintainers = with lib.maintainers; [
|
||||||
figsoda
|
figsoda
|
||||||
isabelroses
|
isabelroses
|
||||||
|
wrbbz
|
||||||
];
|
];
|
||||||
mainProgram = "biome";
|
mainProgram = "biome";
|
||||||
};
|
};
|
||||||
|
@ -13,17 +13,17 @@
|
|||||||
|
|
||||||
rustPlatform.buildRustPackage rec {
|
rustPlatform.buildRustPackage rec {
|
||||||
pname = "tauri";
|
pname = "tauri";
|
||||||
version = "2.5.0";
|
version = "2.6.2";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "tauri-apps";
|
owner = "tauri-apps";
|
||||||
repo = "tauri";
|
repo = "tauri";
|
||||||
tag = "tauri-cli-v${version}";
|
tag = "tauri-cli-v${version}";
|
||||||
hash = "sha256-ut5Etn5yf4X3NvFa5JCRH2sQGnC/xzaRhALoyxdjy2k=";
|
hash = "sha256-QdboIHbRKC/0k6FGKDuCA7AR3eIa7KVij3fGekD9kNk=";
|
||||||
};
|
};
|
||||||
|
|
||||||
useFetchCargoVendor = true;
|
useFetchCargoVendor = true;
|
||||||
cargoHash = "sha256-1YLpK2frSmdCj5aksuZhnHkAZdwHX/ZuVKXyqVJel/s=";
|
cargoHash = "sha256-GFqUQLLURfm6sRpf4MwAp89aKpTwWIlxk3NNRf9QgC0=";
|
||||||
|
|
||||||
nativeBuildInputs = [ pkg-config ];
|
nativeBuildInputs = [ pkg-config ];
|
||||||
|
|
||||||
|
@ -7,16 +7,17 @@
|
|||||||
libcap,
|
libcap,
|
||||||
}:
|
}:
|
||||||
|
|
||||||
stdenv.mkDerivation rec {
|
stdenv.mkDerivation (finalAttrs: {
|
||||||
pname = "cdrtools";
|
pname = "cdrtools";
|
||||||
version = "3.02a09";
|
version = "3.02a09";
|
||||||
|
|
||||||
src = fetchurl {
|
src = fetchurl {
|
||||||
url = "mirror://sourceforge/cdrtools/${pname}-${version}.tar.bz2";
|
url = "mirror://sourceforge/cdrtools/cdrtools-${finalAttrs.version}.tar.bz2";
|
||||||
sha256 = "10ayj48jax2pvsv6j5gybwfsx7b74zdjj84znwag7wwf8n7l6a5a";
|
hash = "sha256-qihDj0WO8/MUt58gKdsnZ52uHV/+FWm23ld0JRGRXoE=";
|
||||||
};
|
};
|
||||||
|
|
||||||
nativeBuildInputs = [ m4 ];
|
nativeBuildInputs = [ m4 ];
|
||||||
|
|
||||||
buildInputs = lib.optionals (!stdenv.hostPlatform.isDarwin) [
|
buildInputs = lib.optionals (!stdenv.hostPlatform.isDarwin) [
|
||||||
acl
|
acl
|
||||||
libcap
|
libcap
|
||||||
@ -57,19 +58,19 @@ stdenv.mkDerivation rec {
|
|||||||
|
|
||||||
hardeningDisable = lib.optional stdenv.hostPlatform.isMusl "fortify";
|
hardeningDisable = lib.optional stdenv.hostPlatform.isMusl "fortify";
|
||||||
|
|
||||||
meta = with lib; {
|
meta = {
|
||||||
homepage = "https://cdrtools.sourceforge.net/private/cdrecord.html";
|
homepage = "https://cdrtools.sourceforge.net/private/cdrecord.html";
|
||||||
description = "Highly portable CD/DVD/BluRay command line recording software";
|
description = "Highly portable CD/DVD/BluRay command line recording software";
|
||||||
license = with licenses; [
|
license = with lib.licenses; [
|
||||||
cddl
|
cddl
|
||||||
gpl2Plus
|
gpl2Plus
|
||||||
lgpl21
|
lgpl21Plus
|
||||||
];
|
];
|
||||||
maintainers = with maintainers; [ wegank ];
|
maintainers = with lib.maintainers; [ wegank ];
|
||||||
platforms = with platforms; linux ++ darwin;
|
platforms = with lib.platforms; linux ++ darwin;
|
||||||
# Licensing issues: This package contains code licensed under CDDL, GPL2
|
# Licensing issues: This package contains code licensed under CDDL, GPL2
|
||||||
# and LGPL2. There is a debate regarding the legality of distributing this
|
# and LGPL2. There is a debate regarding the legality of distributing this
|
||||||
# package in binary form.
|
# package in binary form.
|
||||||
hydraPlatforms = [ ];
|
hydraPlatforms = [ ];
|
||||||
};
|
};
|
||||||
}
|
})
|
@ -6,13 +6,13 @@
|
|||||||
|
|
||||||
stdenvNoCC.mkDerivation (finalAttrs: {
|
stdenvNoCC.mkDerivation (finalAttrs: {
|
||||||
pname = "coconutbattery";
|
pname = "coconutbattery";
|
||||||
version = "4.0.2,152";
|
version = "4.0.4,166";
|
||||||
|
|
||||||
src = fetchzip {
|
src = fetchzip {
|
||||||
url = "https://coconut-flavour.com/downloads/coconutBattery_${
|
url = "https://coconut-flavour.com/downloads/coconutBattery_${
|
||||||
lib.replaceStrings [ "." "," ] [ "" "_" ] finalAttrs.version
|
lib.replaceStrings [ "." "," ] [ "" "_" ] finalAttrs.version
|
||||||
}.zip";
|
}.zip";
|
||||||
hash = "sha256-PNSDUp07lUx5ebcfM3WSJAfRQjeuIIy7KfY0KJ0i1AE=";
|
hash = "sha256-ZbxO6pR752pjaBocA/wqyjPCZaUxV051MaHz1gqQjSg=";
|
||||||
};
|
};
|
||||||
|
|
||||||
installPhase = ''
|
installPhase = ''
|
||||||
|
@ -7,17 +7,17 @@
|
|||||||
|
|
||||||
rustPlatform.buildRustPackage (finalAttrs: {
|
rustPlatform.buildRustPackage (finalAttrs: {
|
||||||
pname = "codebook";
|
pname = "codebook";
|
||||||
version = "0.3.3";
|
version = "0.3.5";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "blopker";
|
owner = "blopker";
|
||||||
repo = "codebook";
|
repo = "codebook";
|
||||||
tag = "v${finalAttrs.version}";
|
tag = "v${finalAttrs.version}";
|
||||||
hash = "sha256-9tXzruyISC+JdzV4aPBB31OCKgZVAO0eU3SsgAZy/+I=";
|
hash = "sha256-lQfk4dJ9WFraxMDWJVSBiTGumikfHYlMBe+0NHa/3nY=";
|
||||||
};
|
};
|
||||||
|
|
||||||
buildAndTestSubdir = "crates/codebook-lsp";
|
buildAndTestSubdir = "crates/codebook-lsp";
|
||||||
cargoHash = "sha256-Bba5v0J5HRaylQRHV41LQ2My0zYybme/AHZ+HDekoHc=";
|
cargoHash = "sha256-MLd7V5Pp8yx4pFAXSjZf4KUGp964ombrnGKbrtXhC0I=";
|
||||||
|
|
||||||
# Integration tests require internet access for dictionaries
|
# Integration tests require internet access for dictionaries
|
||||||
doCheck = false;
|
doCheck = false;
|
||||||
|
47
pkgs/by-name/do/dokieli/missing-hashes.json
Normal file
47
pkgs/by-name/do/dokieli/missing-hashes.json
Normal file
@ -0,0 +1,47 @@
|
|||||||
|
{
|
||||||
|
"@esbuild/aix-ppc64@npm:0.25.4": "0d8f2e73ed75c2273cfe01452ad86585d5205a3f4abaddb5a040cfdb144b83a3e6192e34ab0ceaa77ce1bb08a5d8e5feb98cc30f1f1f6e4a2dccd177fbe6ccda",
|
||||||
|
"@esbuild/android-arm64@npm:0.25.4": "a6bc6be3a75c575e21a4f5135faed0575628ad41fdfa582e5a27e992c66fa0fc7e61fb5edce0eb9dcc28bd33656945bcb48d6e4eaa3f197a56c7399e04d3dd07",
|
||||||
|
"@esbuild/android-arm@npm:0.25.4": "2840ee03556a470145a14416348c7bc8e3e93c7f4c1ab1579f0b1e428f242c9349987c74501d3740bc2679cfd86e63e6288a8ce01e81459e00f95a812fb19e13",
|
||||||
|
"@esbuild/android-x64@npm:0.25.4": "4808d422695cf0f9674bbbd50bd7773d2594563375bbbb24b774cd2060373533bdf726653b16daa73a85cc222e16ccf33bfff1fa14e365aea28293e8df8b027d",
|
||||||
|
"@esbuild/darwin-arm64@npm:0.25.4": "73a6b24a30432074dffc4b725d291a9fb30d787f28b3a17b6c99072854e3317af7ef510577b8c2732636ac4aa51465be8dd71f6443e3113944e8f8bcd8315fe2",
|
||||||
|
"@esbuild/darwin-x64@npm:0.25.4": "cdbe0cdcf8a14402cb8180b1189dd08c457ffbfc23a49f83b6a47d56802bdca5d3701b97e5fea0f11639f455822d29d74aa9a0dceb4b36b02a2bc38d3570c7b9",
|
||||||
|
"@esbuild/freebsd-arm64@npm:0.25.4": "583d6a4649eaed0db27bc06e997ec786df2f854ff7f979b8b24d76750f90f50f8b356ee159466cf08f95c6fe86fad4650817f3812b7b78901974f47ccc5a0f29",
|
||||||
|
"@esbuild/freebsd-x64@npm:0.25.4": "e1a092fa2493d1c193abb9858aa01111287457318f43f167c351fbb5baf32848a78813f2e14bbbb05124bf34f0f2fe5d54bee2514b5aed5171616a9c8e130fe8",
|
||||||
|
"@esbuild/linux-arm64@npm:0.25.4": "6ab9f163a787e1209c87ce2029ff7223645646aa05d1ef7c8d0f33b3230ccdcd62b4cf3fc05636dc4c8209cf58ccb1bd9306ce5eac76add4448c8175fc13311e",
|
||||||
|
"@esbuild/linux-arm@npm:0.25.4": "ab5c452fd502b04d88729cf8fa2b39ae866f0bd7a61e3fd1704f0c4423e4c6a2bee8ae7d03dd5942edbd68d2008e8cfc3dd18e99b5298f366bef452a4e350eb3",
|
||||||
|
"@esbuild/linux-ia32@npm:0.25.4": "ca88f8357a8af3bb7baae8f8cef3c0b19c6fcb5737f75e9d53ec05ac171516abedbfd5072c0a3e0eea5faab4f38f876ea54fa996609803af0b4fd4c8fc8fa963",
|
||||||
|
"@esbuild/linux-loong64@npm:0.25.4": "cfbf0ab09f7ad3576fc85d8d7081e454f89f2b17b4be77aa40e1e6352b79b112adb6db536817954533e7da1a3cfe17cfcb66552f485a8eae1acc54764242c165",
|
||||||
|
"@esbuild/linux-mips64el@npm:0.25.4": "0d530ac0cbf26b5c6595f3bbce0828a36cb00b42e41759e9bd98344aaeafc20c9850f3adf86447f43258bca043236cca36f1617114c65db5155c3e5dd00d083b",
|
||||||
|
"@esbuild/linux-ppc64@npm:0.25.4": "0b9663f108192b8781b4993b406ec6a6beeffba81de689f9fc3d798a012a0e1b270858c8554a4a7bc5d11f43f619b6cef96bf3abdf7ea1b435c3dd3857ba3315",
|
||||||
|
"@esbuild/linux-riscv64@npm:0.25.4": "b24ce37a0b2b573782bf3cd530c5754baea8c1fa22320f6f2baa2bb3f530056a1ac15ea75e0bb2e744b90d197c9fc305e5372a6bfa50e1c71614e2c9dced8e97",
|
||||||
|
"@esbuild/linux-s390x@npm:0.25.4": "4207474583d46be6657390b9f440ddae0e9643cea20fb9d2f80df63ed780efb07acfb34b29ddd67ded8f6bf8a7e2ce2349db61772814d862294a2e081c61e643",
|
||||||
|
"@esbuild/linux-x64@npm:0.25.4": "bf208856a7b9d9b161f02717704fcf5473055994e25d6f7ddd323cf749dc4232193d15a8a69deadf5e803c33bd9cc99e8e2f452fe9ca006c9fe258245b35745c",
|
||||||
|
"@esbuild/netbsd-arm64@npm:0.25.4": "b0c09632d59399fec5f05aa772a649f1812a8b14ec38a6c688113c10be056a7202dc2383b5b9ac74f78ba9abd5a48485aa1ca0b70faf38cce95ab23682f9b7ed",
|
||||||
|
"@esbuild/netbsd-x64@npm:0.25.4": "4c7b46aa8855d1a6d3bae0bd552aa6e8c4662d8c0cff80c33a6d94169cdeb9c576a6d7a44f9b0eacc95435faa7db779c117ac2c7aabb4eddcbd0738423d30e5e",
|
||||||
|
"@esbuild/openbsd-arm64@npm:0.25.4": "55bb5cea852eef8de2e44e0486257a3841988f8114ad1dbaf1b47c869c6e449a69f12d0d2dc82a20830496d9f06a5b1ee109d856dd1285b3a48963eedb70c5e1",
|
||||||
|
"@esbuild/openbsd-x64@npm:0.25.4": "fdfd19bd660a545a648413ba255e3e07e3a1de7b81fde1fc25ede3ff5d8c5f1cf23355655709805d207781850f850e728c91f32e90431e2125a8ef87d66a98f3",
|
||||||
|
"@esbuild/sunos-x64@npm:0.25.4": "78934b4622c935f975e96819cfcbcb223bba65a272b06213ce16177eb3afb92f5e82ec6b9ca4eabefc90bf05a69e22f1788e6b269d4150c090a20446fdbf2e3c",
|
||||||
|
"@esbuild/win32-arm64@npm:0.25.4": "d7d751f6787f2aa56b1e7158e77c81f327502f42ebd2a8685811e19ea4b7536d73a0c4bc3850a7fea0e8ea1de5f89d2b8328d8a41d79b11c03341ba1facf9360",
|
||||||
|
"@esbuild/win32-ia32@npm:0.25.4": "72d90ea6406e58f3fa423e3736aab7da3475a2b8e98713106106218536186501367e19822463d78fd0ce25a1412ab52610313f4b878fed977e179d2a8794dd1a",
|
||||||
|
"@esbuild/win32-x64@npm:0.25.4": "b929850a2cd8cec7431867dc26bb8cb9f8654ddff5a8417be176eea51fa83f7925bdbc61933575d2db5e12bbc44042ed97e30507e5d7bcf5dc590f7808adaad6",
|
||||||
|
"@rollup/rollup-android-arm-eabi@npm:4.41.0": "4bd62501cc9b5571ec314bba4ff5db2ea5216e96eb06bd7e020c4dd569968e54b23ef3d67a13c9a33514ee032ca8bf375e6de5b2be56a7f0db5c57654c0c223a",
|
||||||
|
"@rollup/rollup-android-arm64@npm:4.41.0": "d1f27fe5e0616ceb88208267ccda7aa2cf6e6390af3b32c4f2826b5ec06d8815a7b44e0d29484b92afa92a38915066f0ee5dcbe507870be0b0849a2699962e82",
|
||||||
|
"@rollup/rollup-darwin-arm64@npm:4.41.0": "431ff15144dd58823b17754f16e484e05d16a49818efeb725220497cef265da2080611aebb19eb4283f48085dc7e3fd504f6c7493945f85c2d8887cabadcb1e2",
|
||||||
|
"@rollup/rollup-darwin-x64@npm:4.41.0": "715a2e161a0ea6d1d92ad39fd97cae3ec01c8ae9a0cc6ae0a533d451c187968533bafba0c4695947a4bcd1249dca9478fdb1ebac35f649f0995d34922d06a486",
|
||||||
|
"@rollup/rollup-freebsd-arm64@npm:4.41.0": "b12cf55a85baf59ddb9a03095b13255fea65a4875c38af292dc92c8ef7f1e811ab5e5e79b423ebdb1936bb446fec004a7a42bbf376f94fa94126a2a64901ea6d",
|
||||||
|
"@rollup/rollup-freebsd-x64@npm:4.41.0": "9bf78726c78eeb0c1212ec3e3605744a9b1087fd0e1469d0f54fdd08b4dc81302f1621fc31ea2249d8fb944407343ec7d2857ef57a06d56c6d287d2e3574d06d",
|
||||||
|
"@rollup/rollup-linux-arm-gnueabihf@npm:4.41.0": "b117d0f6f4aaee502b27930d3b3523171528bfbbd31a1791ac7ec36f7aebd8b6cf7f8bd40a3adba8d7048c0a11e9cc665ad1c785740a4422d9b3353dd3f67886",
|
||||||
|
"@rollup/rollup-linux-arm-musleabihf@npm:4.41.0": "7fafe40a34e0095d96f8b610e6d0052d105c734121d3f79c5d44eccee0006d8ccb69ae3bfb8c8cf6f29fc5460347601eaa6a118e51f783dc30031ee8b9998be0",
|
||||||
|
"@rollup/rollup-linux-arm64-gnu@npm:4.41.0": "9f0cd0828e56f7d15975767d7f0ac7163614df9838e37baa716b628ab780d879df13ed84f5ed8de2f324dffa35ee7952fdd4de73b8598de3ccba307c7b2a4c44",
|
||||||
|
"@rollup/rollup-linux-arm64-musl@npm:4.41.0": "7bbd1323a9acc4e7803561f5c19e5d38f3db8d58e0eafcfc8ea08c3a3fdadd4ebc85c3c535a1b9d94f6f3ff62e6c69160d7769887bbda8324f5030f364d88c1a",
|
||||||
|
"@rollup/rollup-linux-loongarch64-gnu@npm:4.41.0": "e3322d87f3ab12c5dbed3ffdcc6c4a341e4d8714454758707af5ec4460003b00d18d44b210313f4e6233f505a30da4af5f46480bd983454c5595f6970c752d0e",
|
||||||
|
"@rollup/rollup-linux-powerpc64le-gnu@npm:4.41.0": "3b7f57d9ce5ffa72f79075261c052017e80927f513926526902b362c1f328ba16b9dd62938e084e394f9dbb77deccaf32dc330434581384fb18bd1d2f2384a43",
|
||||||
|
"@rollup/rollup-linux-riscv64-gnu@npm:4.41.0": "1f64fa850a0952f62e77a5096c6cd81b32710d9d9f8a699fc4d4a39d086c6aff13675f67c359823eed016c1967b157861e96847eb5e8b0663f5db25d42ae0fc3",
|
||||||
|
"@rollup/rollup-linux-riscv64-musl@npm:4.41.0": "e6588e3a75b87a602ed1ece39680bc3b67792e53cc9835757b6b91ce9a728aaab64fe6b25b83aef5f05933addef49ae5f7e80ea206b0c9be488c8d896087f61c",
|
||||||
|
"@rollup/rollup-linux-s390x-gnu@npm:4.41.0": "c664fb7c55d800371fa7ab549242f5e46ab52ec9eadbf998beaf745712e6e00a27aa7074b32098fc0a571acb29be403c206e71c4ce7cfccf00783985eb0c6554",
|
||||||
|
"@rollup/rollup-linux-x64-gnu@npm:4.41.0": "cf9337ee96b3cd3d7165cab9d8514fa6022607e99bf95ba147512bb62ad6af5000971ddcac04c65e7329eb63f505328b5a4f33e78e46f4ebf132e333938678b4",
|
||||||
|
"@rollup/rollup-linux-x64-musl@npm:4.41.0": "357fc65d5db7e66d80a4ef9d4820430db8ae14e0cf7d3f9c78dc2e2c2d9902d680c763f6695ca32703669009dd26f6f969b1ccfd4318eafece4ea754fc014ee8",
|
||||||
|
"@rollup/rollup-win32-arm64-msvc@npm:4.41.0": "abb25ae4c9566d0cba63cfe30d59a141b3c2b84d6742f1ca80bdad62919d3cd0f9237e4ed76bdeac130de2071df1290a5985f6e48f02cba7713cedebfb5c22f8",
|
||||||
|
"@rollup/rollup-win32-ia32-msvc@npm:4.41.0": "914685bea8d942d350528ce7bd36db3b223ec0cb42cdbe2a7368359fab43a7b90bc37cedaa134c43be6a0c068187f130d301d7a0d6301758095b283c00205969",
|
||||||
|
"@rollup/rollup-win32-x64-msvc@npm:4.41.0": "60dd4af8bccaa9148ab8be40d650e3254fc584500927951468b5ec533a59578cdd2269c4c33139fc24e2c890c16f34b9df98dba70d0359e9a500264c620e3a41"
|
||||||
|
}
|
@ -1,69 +1,68 @@
|
|||||||
{
|
{
|
||||||
lib,
|
lib,
|
||||||
stdenv,
|
|
||||||
fetchFromGitHub,
|
fetchFromGitHub,
|
||||||
fetchYarnDeps,
|
|
||||||
yarnConfigHook,
|
|
||||||
makeWrapper,
|
makeWrapper,
|
||||||
yarnBuildHook,
|
nix-update-script,
|
||||||
yarnInstallHook,
|
nodePackages,
|
||||||
nodejs,
|
stdenv,
|
||||||
xsel,
|
xsel,
|
||||||
|
yarn-berry_4,
|
||||||
}:
|
}:
|
||||||
|
let
|
||||||
|
yarn-berry = yarn-berry_4;
|
||||||
|
in
|
||||||
stdenv.mkDerivation (finalAttrs: {
|
stdenv.mkDerivation (finalAttrs: {
|
||||||
pname = "dokieli";
|
pname = "dokieli";
|
||||||
version = "0-unstable-2024-12-12";
|
version = "0-unstable-2025-07-06";
|
||||||
|
|
||||||
# Can't update newer versions currently because newer versions require yarn-berry, and it's not in nixpkgs, yet.
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "linkeddata";
|
owner = "dokieli";
|
||||||
repo = "dokieli";
|
repo = "dokieli";
|
||||||
rev = "d8dc72c81b84ec12f791892a6377a7f6ec46ed3b";
|
rev = "825d3aa5d754b512f8cbde2cc504b1d00788022c";
|
||||||
hash = "sha256-CzSyQVyeJVOP8NCsa7ST3atG87V1KPSBzTRi0brMFYw=";
|
hash = "sha256-ZjsRG+qIzRb+8FM7a4qSkahxQeq1ayXSgD/oo7jdi6w=";
|
||||||
};
|
};
|
||||||
|
|
||||||
offlineCache = fetchYarnDeps {
|
missingHashes = ./missing-hashes.json;
|
||||||
yarnLock = "${finalAttrs.src}/yarn.lock";
|
offlineCache = yarn-berry.fetchYarnBerryDeps {
|
||||||
hash =
|
inherit (finalAttrs) src missingHashes;
|
||||||
if stdenv.hostPlatform.isDarwin then
|
hash = "sha256-4SK1ecjEnnaow5Z2biCPaHirpX6J/5cytQWWicPgmB0=";
|
||||||
"sha256-bw5HszcHZ60qgYgm4qfhZEYXjJAQ2DXhWU0Reqb9VpQ="
|
|
||||||
else
|
|
||||||
"sha256-rwHBDBWZe4cdTyL7lNkB4nlpd5MWzbTU6kzdLBWcq0M=";
|
|
||||||
};
|
};
|
||||||
|
|
||||||
installPhase = ''
|
installPhase = ''
|
||||||
|
runHook preInstall
|
||||||
|
|
||||||
mkdir -p $out/bin
|
mkdir -p $out/bin
|
||||||
cp -r * $out
|
cp -r * $out
|
||||||
|
|
||||||
|
runHook postInstall
|
||||||
'';
|
'';
|
||||||
|
|
||||||
nativeBuildInputs = [
|
nativeBuildInputs = [
|
||||||
makeWrapper
|
makeWrapper
|
||||||
yarnConfigHook
|
yarn-berry.yarnBerryConfigHook
|
||||||
yarnBuildHook
|
|
||||||
yarnInstallHook
|
|
||||||
# Needed for executing package.json scripts
|
|
||||||
nodejs
|
|
||||||
];
|
];
|
||||||
|
|
||||||
postFixup = ''
|
postFixup =
|
||||||
makeWrapper ${nodejs}/bin/npx $out/bin/dokieli \
|
let
|
||||||
--prefix PATH : ${
|
serve = lib.getExe' nodePackages.serve "serve";
|
||||||
lib.makeBinPath ([
|
in
|
||||||
nodejs
|
''
|
||||||
xsel
|
makeWrapper ${serve} $out/bin/dokieli \
|
||||||
])
|
--prefix PATH : ${lib.makeBinPath [ xsel ]} \
|
||||||
} \
|
--chdir $out
|
||||||
--add-flags serve \
|
'';
|
||||||
--chdir $out/deps/dokieli
|
|
||||||
'';
|
|
||||||
|
|
||||||
doDist = false;
|
passthru.updateScript = nix-update-script {
|
||||||
|
extraArgs = [ "--version=branch" ];
|
||||||
|
};
|
||||||
|
|
||||||
meta = {
|
meta = {
|
||||||
description = "dokieli is a clientside editor for decentralised article publishing, annotations and social interactions";
|
description = "Clientside editor for decentralised article publishing, annotations and social interactions";
|
||||||
homepage = "https://github.com/linkeddata/dokieli";
|
homepage = "https://github.com/linkeddata/dokieli";
|
||||||
license = lib.licenses.mit;
|
license = with lib.licenses; [
|
||||||
|
cc-by-40
|
||||||
|
mit
|
||||||
|
];
|
||||||
platforms = lib.platforms.all;
|
platforms = lib.platforms.all;
|
||||||
maintainers = with lib.maintainers; [ shogo ];
|
maintainers = with lib.maintainers; [ shogo ];
|
||||||
teams = [ lib.teams.ngi ];
|
teams = [ lib.teams.ngi ];
|
||||||
|
@ -1,3 +1,3 @@
|
|||||||
{
|
{
|
||||||
"yarn_offline_cache_hash": "sha256-OFzs3151Y5GGMS9VfV/Y7yUSqTOCns4QE+S/Cn3TN90="
|
"yarn_offline_cache_hash": "sha256-M0K26W917xtv2KxyATI2pCsyDo8ybjiFqLHPlXY9c8g="
|
||||||
}
|
}
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "draupnir",
|
"name": "draupnir",
|
||||||
"version": "2.4.1",
|
"version": "2.5.0",
|
||||||
"description": "A moderation tool for Matrix",
|
"description": "A moderation tool for Matrix",
|
||||||
"main": "lib/index.js",
|
"main": "lib/index.js",
|
||||||
"repository": "https://github.com/the-draupnir-project/Draupnir.git",
|
"repository": "https://github.com/the-draupnir-project/Draupnir.git",
|
||||||
@ -63,7 +63,7 @@
|
|||||||
"jsdom": "^24.0.0",
|
"jsdom": "^24.0.0",
|
||||||
"matrix-appservice-bridge": "^10.3.1",
|
"matrix-appservice-bridge": "^10.3.1",
|
||||||
"matrix-bot-sdk": "npm:@vector-im/matrix-bot-sdk@^0.7.1-element.6",
|
"matrix-bot-sdk": "npm:@vector-im/matrix-bot-sdk@^0.7.1-element.6",
|
||||||
"matrix-protection-suite": "npm:@gnuxie/matrix-protection-suite@3.6.2",
|
"matrix-protection-suite": "npm:@gnuxie/matrix-protection-suite@3.7.1",
|
||||||
"matrix-protection-suite-for-matrix-bot-sdk": "npm:@gnuxie/matrix-protection-suite-for-matrix-bot-sdk@3.6.6",
|
"matrix-protection-suite-for-matrix-bot-sdk": "npm:@gnuxie/matrix-protection-suite-for-matrix-bot-sdk@3.6.6",
|
||||||
"pg": "^8.8.0",
|
"pg": "^8.8.0",
|
||||||
"yaml": "^2.3.2"
|
"yaml": "^2.3.2"
|
||||||
|
@ -22,13 +22,13 @@ let
|
|||||||
in
|
in
|
||||||
mkYarnPackage rec {
|
mkYarnPackage rec {
|
||||||
pname = "draupnir";
|
pname = "draupnir";
|
||||||
version = "2.4.1";
|
version = "2.5.0";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "the-draupnir-project";
|
owner = "the-draupnir-project";
|
||||||
repo = "Draupnir";
|
repo = "Draupnir";
|
||||||
tag = "v${version}";
|
tag = "v${version}";
|
||||||
hash = "sha256-mH5SzguZq1H+njHxWLfjXF3y3/v34KUg45ELL+femNw=";
|
hash = "sha256-au0qYS646MAXyfQTk6gJem3geTecgDwzZl+87/6VE5A=";
|
||||||
};
|
};
|
||||||
|
|
||||||
nativeBuildInputs = [
|
nativeBuildInputs = [
|
||||||
|
@ -24,13 +24,13 @@ let
|
|||||||
in
|
in
|
||||||
buildGoModule rec {
|
buildGoModule rec {
|
||||||
pname = "faas-cli";
|
pname = "faas-cli";
|
||||||
version = "0.17.4";
|
version = "0.17.5";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "openfaas";
|
owner = "openfaas";
|
||||||
repo = "faas-cli";
|
repo = "faas-cli";
|
||||||
rev = version;
|
rev = version;
|
||||||
sha256 = "sha256-GM2gRfrdfUhfBn2atG21H7bNbW1HtgwQ7d7kMXvyMAs=";
|
sha256 = "sha256-/gDst9XDh63j2ZfLfioIKcIiXbOGyJWvYIbQwbV6xOc=";
|
||||||
};
|
};
|
||||||
|
|
||||||
vendorHash = null;
|
vendorHash = null;
|
||||||
|
@ -11,14 +11,14 @@
|
|||||||
|
|
||||||
stdenv.mkDerivation (finalAttrs: {
|
stdenv.mkDerivation (finalAttrs: {
|
||||||
pname = "feedbackd-device-themes";
|
pname = "feedbackd-device-themes";
|
||||||
version = "0.8.3";
|
version = "0.8.4";
|
||||||
|
|
||||||
src = fetchFromGitLab {
|
src = fetchFromGitLab {
|
||||||
domain = "gitlab.freedesktop.org";
|
domain = "gitlab.freedesktop.org";
|
||||||
owner = "agx";
|
owner = "agx";
|
||||||
repo = "feedbackd-device-themes";
|
repo = "feedbackd-device-themes";
|
||||||
rev = "v${finalAttrs.version}";
|
rev = "v${finalAttrs.version}";
|
||||||
hash = "sha256-z+A2G1g2gNfC0cVWUO/LT3QVvXeotcBd+5UEpEtcPfY=";
|
hash = "sha256-eLR1BnPpIdo6udQsLcLn4hK2TTRYUIh5vwAg+rdMLKU=";
|
||||||
};
|
};
|
||||||
|
|
||||||
nativeBuildInputs = [
|
nativeBuildInputs = [
|
||||||
|
@ -10,16 +10,16 @@
|
|||||||
|
|
||||||
buildNpmPackage rec {
|
buildNpmPackage rec {
|
||||||
pname = "firebase-tools";
|
pname = "firebase-tools";
|
||||||
version = "14.7.0";
|
version = "14.9.0";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "firebase";
|
owner = "firebase";
|
||||||
repo = "firebase-tools";
|
repo = "firebase-tools";
|
||||||
tag = "v${version}";
|
tag = "v${version}";
|
||||||
hash = "sha256-sZQoP6XsJkDI6I41eQv678aDtHhaRX6u03z/D6S7nBQ=";
|
hash = "sha256-LUPG0FiwOvC+4ZXkrGGHnayusg06QvIw96Jg0ug+UBQ=";
|
||||||
};
|
};
|
||||||
|
|
||||||
npmDepsHash = "sha256-QrJgImV7YCzME/ZwzwJP3FFvonmvCSm0hd9fLc8gyyk=";
|
npmDepsHash = "sha256-g6tcBNzCr5lOR874qAGPAuG8WBManHYY40GKqsrBEJM=";
|
||||||
|
|
||||||
postPatch = ''
|
postPatch = ''
|
||||||
ln -s npm-shrinkwrap.json package-lock.json
|
ln -s npm-shrinkwrap.json package-lock.json
|
||||||
|
@ -114,11 +114,15 @@ freecad-utils.makeCustomizable (
|
|||||||
url = "https://github.com/FreeCAD/FreeCAD/commit/8e04c0a3dd9435df0c2dec813b17d02f7b723b19.patch?full_index=1";
|
url = "https://github.com/FreeCAD/FreeCAD/commit/8e04c0a3dd9435df0c2dec813b17d02f7b723b19.patch?full_index=1";
|
||||||
hash = "sha256-H6WbJFTY5/IqEdoi5N+7D4A6pVAmZR4D+SqDglwS18c=";
|
hash = "sha256-H6WbJFTY5/IqEdoi5N+7D4A6pVAmZR4D+SqDglwS18c=";
|
||||||
})
|
})
|
||||||
|
# https://github.com/FreeCAD/FreeCAD/pull/22221
|
||||||
|
(fetchpatch {
|
||||||
|
url = "https://github.com/FreeCAD/FreeCAD/commit/3d2b7dc9c7ac898b30fe469b7cbd424ed1bca0a2.patch?full_index=1";
|
||||||
|
hash = "sha256-XCQdv/+dYdJ/ptA2VKrD63qYILyaP276ISMkmWLtT30=";
|
||||||
|
})
|
||||||
];
|
];
|
||||||
|
|
||||||
cmakeFlags = [
|
cmakeFlags = [
|
||||||
"-Wno-dev" # turns off warnings which otherwise makes it hard to see what is going on
|
"-Wno-dev" # turns off warnings which otherwise makes it hard to see what is going on
|
||||||
"-DBUILD_FLAT_MESH:BOOL=ON"
|
|
||||||
"-DBUILD_DRAWING=ON"
|
"-DBUILD_DRAWING=ON"
|
||||||
"-DBUILD_FLAT_MESH:BOOL=ON"
|
"-DBUILD_FLAT_MESH:BOOL=ON"
|
||||||
"-DINSTALL_TO_SITEPACKAGES=OFF"
|
"-DINSTALL_TO_SITEPACKAGES=OFF"
|
||||||
|
@ -3,16 +3,17 @@
|
|||||||
fetchFromGitHub,
|
fetchFromGitHub,
|
||||||
buildGoModule,
|
buildGoModule,
|
||||||
nixosTests,
|
nixosTests,
|
||||||
|
nix-update-script,
|
||||||
}:
|
}:
|
||||||
|
|
||||||
buildGoModule rec {
|
buildGoModule (finalAttrs: {
|
||||||
pname = "galene";
|
pname = "galene";
|
||||||
version = "0.96.3";
|
version = "0.96.3";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "jech";
|
owner = "jech";
|
||||||
repo = "galene";
|
repo = "galene";
|
||||||
rev = "galene-${version}";
|
tag = "galene-${finalAttrs.version}";
|
||||||
hash = "sha256-loAiPfwTyPi4BKn4TNgVVde2mO119h443A+HwlLvi4g=";
|
hash = "sha256-loAiPfwTyPi4BKn4TNgVVde2mO119h443A+HwlLvi4g=";
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -36,12 +37,15 @@ buildGoModule rec {
|
|||||||
|
|
||||||
passthru = {
|
passthru = {
|
||||||
tests.vm = nixosTests.galene.basic;
|
tests.vm = nixosTests.galene.basic;
|
||||||
|
updateScript = nix-update-script {
|
||||||
|
extraArgs = [ "--version-regex=galene-(.*)" ];
|
||||||
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
meta = {
|
meta = {
|
||||||
description = "Videoconferencing server that is easy to deploy, written in Go";
|
description = "Videoconferencing server that is easy to deploy, written in Go";
|
||||||
homepage = "https://github.com/jech/galene";
|
homepage = "https://github.com/jech/galene";
|
||||||
changelog = "https://github.com/jech/galene/raw/galene-${version}/CHANGES";
|
changelog = "https://github.com/jech/galene/raw/${finalAttrs.src.tag}/CHANGES";
|
||||||
license = lib.licenses.mit;
|
license = lib.licenses.mit;
|
||||||
platforms = lib.platforms.linux;
|
platforms = lib.platforms.linux;
|
||||||
teams = [ lib.teams.ngi ];
|
teams = [ lib.teams.ngi ];
|
||||||
@ -50,4 +54,4 @@ buildGoModule rec {
|
|||||||
erdnaxe
|
erdnaxe
|
||||||
];
|
];
|
||||||
};
|
};
|
||||||
}
|
})
|
||||||
|
@ -15,13 +15,13 @@
|
|||||||
|
|
||||||
stdenv.mkDerivation rec {
|
stdenv.mkDerivation rec {
|
||||||
pname = "geonkick";
|
pname = "geonkick";
|
||||||
version = "3.5.2";
|
version = "3.6.0";
|
||||||
|
|
||||||
src = fetchFromGitLab {
|
src = fetchFromGitLab {
|
||||||
owner = "Geonkick-Synthesizer";
|
owner = "Geonkick-Synthesizer";
|
||||||
repo = "geonkick";
|
repo = "geonkick";
|
||||||
rev = "v${version}";
|
rev = "v${version}";
|
||||||
hash = "sha256-QtBzrYW/yYnWSXyHV63gKzgdAR5kamij9Z0g+15CTPg=";
|
hash = "sha256-lTJ5O2UfXRUELErbMS9fCdCUgxhBxKRv/nIK8w5Fkiw=";
|
||||||
};
|
};
|
||||||
|
|
||||||
nativeBuildInputs = [
|
nativeBuildInputs = [
|
||||||
|
@ -10,16 +10,16 @@
|
|||||||
}:
|
}:
|
||||||
buildGoModule rec {
|
buildGoModule rec {
|
||||||
pname = "goreleaser";
|
pname = "goreleaser";
|
||||||
version = "2.10.2";
|
version = "2.11.0";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "goreleaser";
|
owner = "goreleaser";
|
||||||
repo = "goreleaser";
|
repo = "goreleaser";
|
||||||
rev = "v${version}";
|
rev = "v${version}";
|
||||||
hash = "sha256-MryN6Qm/cedL/aIpf8Pxb4nPdH4iw9aW0mhEv5jBvUo=";
|
hash = "sha256-YneVW3oyDGhyliQefpqEsS9VLDGm+/chSxCyDifNrtk=";
|
||||||
};
|
};
|
||||||
|
|
||||||
vendorHash = "sha256-s23FmEbtITW/PD64f8qecClC7SDp08VhC4bRpbgVrb0=";
|
vendorHash = "sha256-+6Icafqyig0xm5qZARn1a7Yu7UGi6ejJmzyvOIHsfH0=";
|
||||||
|
|
||||||
ldflags = [
|
ldflags = [
|
||||||
"-s"
|
"-s"
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
lib,
|
lib,
|
||||||
fetchFromSourcehut,
|
fetchFromGitLab,
|
||||||
libjpeg,
|
libjpeg,
|
||||||
libpng,
|
libpng,
|
||||||
meson,
|
meson,
|
||||||
@ -16,15 +16,21 @@
|
|||||||
|
|
||||||
stdenv.mkDerivation (finalAttrs: {
|
stdenv.mkDerivation (finalAttrs: {
|
||||||
pname = "grim";
|
pname = "grim";
|
||||||
version = "1.4.1";
|
version = "1.5.0";
|
||||||
|
|
||||||
src = fetchFromSourcehut {
|
src = fetchFromGitLab {
|
||||||
owner = "~emersion";
|
domain = "gitlab.freedesktop.org";
|
||||||
|
owner = "emersion";
|
||||||
repo = "grim";
|
repo = "grim";
|
||||||
rev = "v${finalAttrs.version}";
|
rev = "v${finalAttrs.version}";
|
||||||
hash = "sha256-5csJqRLNqhyeXR4dEQtnPUSwuZ8oY+BIt6AVICkm1+o=";
|
hash = "sha256-oPo6zrS3gCnviIK0+gPvtal+6c7fNFWtXnAA0YfaS+U=";
|
||||||
};
|
};
|
||||||
|
|
||||||
|
depsBuildBuild = [
|
||||||
|
# To find wayland-scanner
|
||||||
|
pkg-config
|
||||||
|
];
|
||||||
|
|
||||||
nativeBuildInputs = [
|
nativeBuildInputs = [
|
||||||
meson
|
meson
|
||||||
ninja
|
ninja
|
||||||
@ -46,7 +52,7 @@ stdenv.mkDerivation (finalAttrs: {
|
|||||||
strictDeps = true;
|
strictDeps = true;
|
||||||
|
|
||||||
meta = {
|
meta = {
|
||||||
homepage = "https://sr.ht/~emersion/grim";
|
homepage = "https://gitlab.freedesktop.org/emersion/grim";
|
||||||
description = "Grab images from a Wayland compositor";
|
description = "Grab images from a Wayland compositor";
|
||||||
license = lib.licenses.mit;
|
license = lib.licenses.mit;
|
||||||
mainProgram = "grim";
|
mainProgram = "grim";
|
||||||
|
@ -2,7 +2,6 @@
|
|||||||
stdenv,
|
stdenv,
|
||||||
lib,
|
lib,
|
||||||
fetchFromGitHub,
|
fetchFromGitHub,
|
||||||
fetchpatch,
|
|
||||||
cmake,
|
cmake,
|
||||||
hidapi,
|
hidapi,
|
||||||
udevCheckHook,
|
udevCheckHook,
|
||||||
@ -10,22 +9,15 @@
|
|||||||
|
|
||||||
stdenv.mkDerivation rec {
|
stdenv.mkDerivation rec {
|
||||||
pname = "headsetcontrol";
|
pname = "headsetcontrol";
|
||||||
version = "3.0.0";
|
version = "3.1.0";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "Sapd";
|
owner = "Sapd";
|
||||||
repo = "HeadsetControl";
|
repo = "HeadsetControl";
|
||||||
rev = version;
|
rev = version;
|
||||||
sha256 = "sha256-N1c94iAJgCPhGNDCGjMINg0AL2wPX5gVIsJ+pzn/l9Y=";
|
sha256 = "sha256-9LUqYV0MMTtlFYZCEn81kML5F46GDYWYwoKpO0UORcQ=";
|
||||||
};
|
};
|
||||||
|
|
||||||
patches = [
|
|
||||||
(fetchpatch {
|
|
||||||
url = "https://patch-diff.githubusercontent.com/raw/Sapd/HeadsetControl/pull/337.patch";
|
|
||||||
hash = "sha256-18w9BQsMljEA/eY3rnosHvKwhiaF79TrWH/ayuyZMrM=";
|
|
||||||
})
|
|
||||||
];
|
|
||||||
|
|
||||||
nativeBuildInputs = [
|
nativeBuildInputs = [
|
||||||
cmake
|
cmake
|
||||||
udevCheckHook
|
udevCheckHook
|
||||||
|
@ -10,13 +10,13 @@
|
|||||||
|
|
||||||
stdenv.mkDerivation (finalAttrs: {
|
stdenv.mkDerivation (finalAttrs: {
|
||||||
pname = "hyprutils";
|
pname = "hyprutils";
|
||||||
version = "0.7.1";
|
version = "0.8.1";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "hyprwm";
|
owner = "hyprwm";
|
||||||
repo = "hyprutils";
|
repo = "hyprutils";
|
||||||
tag = "v${finalAttrs.version}";
|
tag = "v${finalAttrs.version}";
|
||||||
hash = "sha256-W9G9bb0zRYDBRseHbVez0J8qVpD5QbizX67H/vsudhM=";
|
hash = "sha256-F2SV9WGqgtRsXIdUrl3sRe0wXlQD+kRRZcSfbepjPJY=";
|
||||||
};
|
};
|
||||||
|
|
||||||
nativeBuildInputs = [
|
nativeBuildInputs = [
|
||||||
|
@ -5,6 +5,7 @@
|
|||||||
autoconf,
|
autoconf,
|
||||||
cmake,
|
cmake,
|
||||||
hdf5,
|
hdf5,
|
||||||
|
versionCheckHook,
|
||||||
zlib,
|
zlib,
|
||||||
nix-update-script,
|
nix-update-script,
|
||||||
}:
|
}:
|
||||||
@ -32,17 +33,28 @@ stdenv.mkDerivation rec {
|
|||||||
|
|
||||||
cmakeFlags = [ "-DUSE_HDF5=ON" ];
|
cmakeFlags = [ "-DUSE_HDF5=ON" ];
|
||||||
|
|
||||||
# Parallel build fails in some cases: https://github.com/pachterlab/kallisto/issues/160
|
|
||||||
enableParallelBuilding = false;
|
enableParallelBuilding = false;
|
||||||
|
|
||||||
|
nativeInstallCheckInputs = [ versionCheckHook ];
|
||||||
|
versionCheckProgramArg = "version";
|
||||||
|
doInstallCheck = true;
|
||||||
|
|
||||||
passthru.updateScript = nix-update-script { };
|
passthru.updateScript = nix-update-script { };
|
||||||
|
|
||||||
meta = with lib; {
|
meta = with lib; {
|
||||||
description = "Program for quantifying abundances of transcripts from RNA-Seq data";
|
description = "Near-optimal quantification of transcripts from RNA-seq data";
|
||||||
|
longDescription = ''
|
||||||
|
kallisto is a program for quantifying abundances of transcripts
|
||||||
|
from RNA sequencing data, or more generally of target sequences
|
||||||
|
using high-throughput sequencing reads. It is based on the novel
|
||||||
|
idea of pseudoalignment for rapidly determining the
|
||||||
|
compatibility of reads with targets, without the need for
|
||||||
|
alignment.
|
||||||
|
'';
|
||||||
mainProgram = "kallisto";
|
mainProgram = "kallisto";
|
||||||
homepage = "https://pachterlab.github.io/kallisto";
|
homepage = "https://pachterlab.github.io/kallisto";
|
||||||
license = licenses.bsd2;
|
license = licenses.bsd2;
|
||||||
platforms = platforms.linux;
|
platforms = platforms.linux;
|
||||||
maintainers = with maintainers; [ arcadio ];
|
maintainers = [ maintainers.arcadio ];
|
||||||
};
|
};
|
||||||
}
|
}
|
@ -9,16 +9,16 @@
|
|||||||
|
|
||||||
buildGoModule rec {
|
buildGoModule rec {
|
||||||
pname = "karmor";
|
pname = "karmor";
|
||||||
version = "1.4.3";
|
version = "1.4.4";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "kubearmor";
|
owner = "kubearmor";
|
||||||
repo = "kubearmor-client";
|
repo = "kubearmor-client";
|
||||||
rev = "v${version}";
|
rev = "v${version}";
|
||||||
hash = "sha256-g60T9npfU1uwQvs95ntRrwwfxXeC67C0iF867ki3WAI=";
|
hash = "sha256-BlMWbd+c/dW3nrG9mQn4lfyXvauJ4GCcJypp+SMfAuY=";
|
||||||
};
|
};
|
||||||
|
|
||||||
vendorHash = "sha256-4F/q6vYOGtLef+rrJXKhLwjM71NMNI4es4dKe1pohZU=";
|
vendorHash = "sha256-SZAJsstFUtZi+/sSkgmvFSjd4115YKsPuPEksWxE9D0=";
|
||||||
|
|
||||||
nativeBuildInputs = [ installShellFiles ];
|
nativeBuildInputs = [ installShellFiles ];
|
||||||
|
|
||||||
|
@ -7,14 +7,10 @@
|
|||||||
valgrind,
|
valgrind,
|
||||||
librandombytes,
|
librandombytes,
|
||||||
libcpucycles,
|
libcpucycles,
|
||||||
lib25519,
|
|
||||||
}:
|
}:
|
||||||
let
|
|
||||||
version = "20241004";
|
|
||||||
in
|
|
||||||
stdenv.mkDerivation (finalAttrs: {
|
stdenv.mkDerivation (finalAttrs: {
|
||||||
pname = "lib25519";
|
pname = "lib25519";
|
||||||
inherit version;
|
version = "20241004";
|
||||||
|
|
||||||
src = fetchzip {
|
src = fetchzip {
|
||||||
url = "https://lib25519.cr.yp.to/lib25519-${finalAttrs.version}.tar.gz";
|
url = "https://lib25519.cr.yp.to/lib25519-${finalAttrs.version}.tar.gz";
|
||||||
@ -64,9 +60,9 @@ stdenv.mkDerivation (finalAttrs: {
|
|||||||
passthru = {
|
passthru = {
|
||||||
updateScript = ./update.sh;
|
updateScript = ./update.sh;
|
||||||
tests.version = testers.testVersion {
|
tests.version = testers.testVersion {
|
||||||
package = lib25519;
|
package = finalAttrs.finalPackage;
|
||||||
command = "lib25519-test | head -n 2 | grep version";
|
command = "lib25519-test | head -n 2 | grep version";
|
||||||
version = "lib25519 version ${version}";
|
version = "lib25519 version ${finalAttrs.version}";
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -87,6 +83,7 @@ stdenv.mkDerivation (finalAttrs: {
|
|||||||
imadnyc
|
imadnyc
|
||||||
jleightcap
|
jleightcap
|
||||||
];
|
];
|
||||||
|
teams = with lib.teams; [ ngi ];
|
||||||
# This supports whatever platforms libcpucycles supports
|
# This supports whatever platforms libcpucycles supports
|
||||||
inherit (libcpucycles.meta) platforms;
|
inherit (libcpucycles.meta) platforms;
|
||||||
};
|
};
|
||||||
|
@ -9,14 +9,14 @@
|
|||||||
|
|
||||||
python3.pkgs.buildPythonApplication rec {
|
python3.pkgs.buildPythonApplication rec {
|
||||||
pname = "marge-bot";
|
pname = "marge-bot";
|
||||||
version = "0.16.0";
|
version = "0.16.1";
|
||||||
pyproject = true;
|
pyproject = true;
|
||||||
|
|
||||||
src = fetchFromGitLab {
|
src = fetchFromGitLab {
|
||||||
owner = "marge-org";
|
owner = "marge-org";
|
||||||
repo = "marge-bot";
|
repo = "marge-bot";
|
||||||
rev = version;
|
rev = version;
|
||||||
hash = "sha256-UgdbeJegeTFP6YF6oMxAeQDI9AO2k6yk4WAFZ/Xspu8=";
|
hash = "sha256-I1yQwTqHFynEbjF0BpGBHek6qgtTzZztDAqdl+2j2U0=";
|
||||||
};
|
};
|
||||||
|
|
||||||
nativeBuildInputs = [
|
nativeBuildInputs = [
|
||||||
|
@ -6,17 +6,17 @@
|
|||||||
|
|
||||||
rustPlatform.buildRustPackage rec {
|
rustPlatform.buildRustPackage rec {
|
||||||
pname = "mdbook-alerts";
|
pname = "mdbook-alerts";
|
||||||
version = "0.7.0";
|
version = "0.8.0";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "lambdalisue";
|
owner = "lambdalisue";
|
||||||
repo = "rs-mdbook-alerts";
|
repo = "rs-mdbook-alerts";
|
||||||
rev = "v${version}";
|
rev = "v${version}";
|
||||||
hash = "sha256-MZS9TESITj3tzdaXYu5S2QUCW7cZuTpH1skFKeVi/sQ=";
|
hash = "sha256-82WbO/j9F0WKGkSkTf27dGdxdHF3OONFvn68ujWMwSM=";
|
||||||
};
|
};
|
||||||
|
|
||||||
useFetchCargoVendor = true;
|
useFetchCargoVendor = true;
|
||||||
cargoHash = "sha256-ZL8M9Ces8qs8ClayjJTt5FvlG+WcRpJLuZBNATEbLtQ=";
|
cargoHash = "sha256-A+jodjynhQ6WFp/Ci5Jk0+baDx6QzJ8u+UMmLugtJUc=";
|
||||||
|
|
||||||
meta = {
|
meta = {
|
||||||
description = "Preprocessor for mdbook to support the inclusion of Markdown alerts";
|
description = "Preprocessor for mdbook to support the inclusion of Markdown alerts";
|
||||||
|
@ -179,11 +179,11 @@ in
|
|||||||
|
|
||||||
stdenvNoCC.mkDerivation (finalAttrs: {
|
stdenvNoCC.mkDerivation (finalAttrs: {
|
||||||
pname = "microsoft-edge";
|
pname = "microsoft-edge";
|
||||||
version = "138.0.3351.65";
|
version = "138.0.3351.77";
|
||||||
|
|
||||||
src = fetchurl {
|
src = fetchurl {
|
||||||
url = "https://packages.microsoft.com/repos/edge/pool/main/m/microsoft-edge-stable/microsoft-edge-stable_${finalAttrs.version}-1_amd64.deb";
|
url = "https://packages.microsoft.com/repos/edge/pool/main/m/microsoft-edge-stable/microsoft-edge-stable_${finalAttrs.version}-1_amd64.deb";
|
||||||
hash = "sha256-+8bV3pwoYvp4e0eJHj5/NSu15QiFwVJuGxFJkS76gwI=";
|
hash = "sha256-8D0aYlzkp5ol7s6m1342BJONiiQgyZeClREFw0mZqHY=";
|
||||||
};
|
};
|
||||||
|
|
||||||
# With strictDeps on, some shebangs were not being patched correctly
|
# With strictDeps on, some shebangs were not being patched correctly
|
||||||
|
@ -2,16 +2,17 @@
|
|||||||
lib,
|
lib,
|
||||||
buildGoModule,
|
buildGoModule,
|
||||||
fetchFromGitHub,
|
fetchFromGitHub,
|
||||||
|
nix-update-script,
|
||||||
}:
|
}:
|
||||||
|
|
||||||
buildGoModule rec {
|
buildGoModule (finalAttrs: {
|
||||||
pname = "mox";
|
pname = "mox";
|
||||||
version = "0.0.15";
|
version = "0.0.15";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "mjl-";
|
owner = "mjl-";
|
||||||
repo = "mox";
|
repo = "mox";
|
||||||
tag = "v${version}";
|
tag = "v${finalAttrs.version}";
|
||||||
hash = "sha256-apIV+nClXTUbmCssnvgG9UwpTNTHTe6FgLCxp14/s0A=";
|
hash = "sha256-apIV+nClXTUbmCssnvgG9UwpTNTHTe6FgLCxp14/s0A=";
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -23,10 +24,12 @@ buildGoModule rec {
|
|||||||
ldflags = [
|
ldflags = [
|
||||||
"-s"
|
"-s"
|
||||||
"-w"
|
"-w"
|
||||||
"-X github.com/mjl-/mox/moxvar.Version=${version}"
|
"-X github.com/mjl-/mox/moxvar.Version=${finalAttrs.version}"
|
||||||
"-X github.com/mjl-/mox/moxvar.VersionBare=${version}"
|
"-X github.com/mjl-/mox/moxvar.VersionBare=${finalAttrs.version}"
|
||||||
];
|
];
|
||||||
|
|
||||||
|
passthru.updateScript = nix-update-script { };
|
||||||
|
|
||||||
meta = {
|
meta = {
|
||||||
description = "Modern full-featured open source secure mail server for low-maintenance self-hosted email";
|
description = "Modern full-featured open source secure mail server for low-maintenance self-hosted email";
|
||||||
mainProgram = "mox";
|
mainProgram = "mox";
|
||||||
@ -36,5 +39,6 @@ buildGoModule rec {
|
|||||||
dit7ya
|
dit7ya
|
||||||
kotatsuyaki
|
kotatsuyaki
|
||||||
];
|
];
|
||||||
|
teams = with lib.teams; [ ngi ];
|
||||||
};
|
};
|
||||||
}
|
})
|
||||||
|
@ -10,16 +10,16 @@
|
|||||||
|
|
||||||
buildGoModule rec {
|
buildGoModule rec {
|
||||||
pname = "myks";
|
pname = "myks";
|
||||||
version = "4.8.4";
|
version = "4.11.0";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "mykso";
|
owner = "mykso";
|
||||||
repo = "myks";
|
repo = "myks";
|
||||||
tag = "v${version}";
|
tag = "v${version}";
|
||||||
hash = "sha256-WMedmDw4AlM8XAwbnFBiNFHd9ocBJhXq8qVQTOm9aDI=";
|
hash = "sha256-NXb+7JEk5w1hq7ML6LQD1QXbnOqp+BJ9K+uqrwUsrDs=";
|
||||||
};
|
};
|
||||||
|
|
||||||
vendorHash = "sha256-IZopDehj8y7I4EDkiWGod5bexj8vzIS7eLx22UscXOs=";
|
vendorHash = "sha256-iAihVBR63OcORNAW7szKPOMyc+X1X76u++j5HLHU35I=";
|
||||||
|
|
||||||
subPackages = ".";
|
subPackages = ".";
|
||||||
|
|
||||||
|
@ -16,6 +16,23 @@ let
|
|||||||
py = python3.override {
|
py = python3.override {
|
||||||
self = py;
|
self = py;
|
||||||
packageOverrides = lib.foldr lib.composeExtensions (self: super: { }) ([
|
packageOverrides = lib.foldr lib.composeExtensions (self: super: { }) ([
|
||||||
|
(
|
||||||
|
|
||||||
|
self: super: {
|
||||||
|
# fix tornado.httputil.HTTPInputError: Multiple host headers not allowed
|
||||||
|
tornado = super.tornado.overridePythonAttrs (oldAttrs: {
|
||||||
|
version = "6.4.2";
|
||||||
|
format = "setuptools";
|
||||||
|
pyproject = null;
|
||||||
|
src = fetchFromGitHub {
|
||||||
|
owner = "tornadoweb";
|
||||||
|
repo = "tornado";
|
||||||
|
tag = "v6.4.2";
|
||||||
|
hash = "sha256-qgJh8pnC1ALF8KxhAYkZFAc0DE6jHVB8R/ERJFL4OFc=";
|
||||||
|
};
|
||||||
|
doCheck = false;
|
||||||
|
});
|
||||||
|
})
|
||||||
# Built-in dependency
|
# Built-in dependency
|
||||||
(self: super: {
|
(self: super: {
|
||||||
octoprint-filecheck = self.buildPythonPackage rec {
|
octoprint-filecheck = self.buildPythonPackage rec {
|
||||||
|
@ -1,12 +1,12 @@
|
|||||||
diff --git a/src/linker.cpp b/src/linker.cpp
|
diff --git a/src/linker.cpp b/src/linker.cpp
|
||||||
index ec165ef7d..91a28b8fc 100644
|
index 2210c13..803b654 100644
|
||||||
--- a/src/linker.cpp
|
--- a/src/linker.cpp
|
||||||
+++ b/src/linker.cpp
|
+++ b/src/linker.cpp
|
||||||
@@ -769,18 +769,6 @@ try_cross_linking:;
|
@@ -779,18 +779,6 @@ try_cross_linking:;
|
||||||
gbString platform_lib_str = gb_string_make(heap_allocator(), "");
|
}
|
||||||
defer (gb_string_free(platform_lib_str));
|
platform_lib_str = gb_string_append_fmt(platform_lib_str, "--sysroot %s ", darwin_sdk_path);
|
||||||
if (build_context.metrics.os == TargetOs_darwin) {
|
|
||||||
- platform_lib_str = gb_string_appendc(platform_lib_str, "-Wl,-syslibroot /Library/Developer/CommandLineTools/SDKs/MacOSX.sdk -L/usr/local/lib ");
|
- platform_lib_str = gb_string_appendc(platform_lib_str, "-L/usr/local/lib ");
|
||||||
-
|
-
|
||||||
- // Homebrew's default library path, checking if it exists to avoid linking warnings.
|
- // Homebrew's default library path, checking if it exists to avoid linking warnings.
|
||||||
- if (gb_file_exists("/opt/homebrew/lib")) {
|
- if (gb_file_exists("/opt/homebrew/lib")) {
|
||||||
|
@ -12,13 +12,13 @@ let
|
|||||||
in
|
in
|
||||||
stdenv.mkDerivation (finalAttrs: {
|
stdenv.mkDerivation (finalAttrs: {
|
||||||
pname = "odin";
|
pname = "odin";
|
||||||
version = "dev-2025-04";
|
version = "dev-2025-06";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "odin-lang";
|
owner = "odin-lang";
|
||||||
repo = "Odin";
|
repo = "Odin";
|
||||||
tag = finalAttrs.version;
|
tag = finalAttrs.version;
|
||||||
hash = "sha256-dVC7MgaNdgKy3X9OE5ZcNCPnuDwqXszX9iAoUglfz2k=";
|
hash = "sha256-Dhy62+ccIjXUL/lK8IQ+vvGEsTrd153tPp4WIdl3rh4=";
|
||||||
};
|
};
|
||||||
|
|
||||||
patches = [
|
patches = [
|
||||||
|
@ -25,6 +25,12 @@ rustPlatform.buildRustPackage (finalAttrs: {
|
|||||||
hash = "sha256-utbey8DFXUWU6u2H2unNjCHE3/bwhPdrxAOApC+unGA=";
|
hash = "sha256-utbey8DFXUWU6u2H2unNjCHE3/bwhPdrxAOApC+unGA=";
|
||||||
};
|
};
|
||||||
|
|
||||||
|
# Avoiding optimizations for reproducibility
|
||||||
|
prePatch = ''
|
||||||
|
substituteInPlace .cargo/config.toml \
|
||||||
|
--replace-fail '"-C", "target-cpu=native", ' ""
|
||||||
|
'';
|
||||||
|
|
||||||
cargoHash = "sha256-rwf9jdr+RDpUcTEG7Xhpph0zuyz6tdFx6hWEZRuxkTY=";
|
cargoHash = "sha256-rwf9jdr+RDpUcTEG7Xhpph0zuyz6tdFx6hWEZRuxkTY=";
|
||||||
|
|
||||||
nativeBuildInputs = [
|
nativeBuildInputs = [
|
||||||
|
@ -9,14 +9,14 @@
|
|||||||
|
|
||||||
python3Packages.buildPythonApplication rec {
|
python3Packages.buildPythonApplication rec {
|
||||||
pname = "oterm";
|
pname = "oterm";
|
||||||
version = "0.12.1";
|
version = "0.14.1";
|
||||||
pyproject = true;
|
pyproject = true;
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "ggozad";
|
owner = "ggozad";
|
||||||
repo = "oterm";
|
repo = "oterm";
|
||||||
tag = version;
|
tag = version;
|
||||||
hash = "sha256-6y73Lh3cV/fnvpZWzfyD4CletC4UV2zl+I7l88BYPIk=";
|
hash = "sha256-Fkr0oabFSkgUkqO3cqZANP3xUSD0uO2F8I/VEzkhvis=";
|
||||||
};
|
};
|
||||||
|
|
||||||
pythonRelaxDeps = [
|
pythonRelaxDeps = [
|
||||||
|
@ -9,17 +9,17 @@
|
|||||||
|
|
||||||
rustPlatform.buildRustPackage rec {
|
rustPlatform.buildRustPackage rec {
|
||||||
pname = "pueue";
|
pname = "pueue";
|
||||||
version = "4.0.0";
|
version = "4.0.1";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "Nukesor";
|
owner = "Nukesor";
|
||||||
repo = "pueue";
|
repo = "pueue";
|
||||||
rev = "v${version}";
|
rev = "v${version}";
|
||||||
hash = "sha256-TDxTj7VGzJzd6RWyVbe2ubpVS57bqq7OVvi23ZHmYDM=";
|
hash = "sha256-m6mXq62imJ9yVpH6M8O3I7Z4FDdnEtp9ADfMjD4RDM4=";
|
||||||
};
|
};
|
||||||
|
|
||||||
useFetchCargoVendor = true;
|
useFetchCargoVendor = true;
|
||||||
cargoHash = "sha256-R94D9/J+Zl86Rb4+5O2Hp9GmcwnRt+0wJ56CHFoy/zg=";
|
cargoHash = "sha256-E2mLpRCffFySzBZVtxS4YZPuTRhjU4LrFEfC1dbF6ug=";
|
||||||
|
|
||||||
nativeBuildInputs =
|
nativeBuildInputs =
|
||||||
[
|
[
|
||||||
|
@ -4,7 +4,7 @@
|
|||||||
buildNpmPackage,
|
buildNpmPackage,
|
||||||
fetchFromGitHub,
|
fetchFromGitHub,
|
||||||
makeWrapper,
|
makeWrapper,
|
||||||
electron_36,
|
electron_37,
|
||||||
vulkan-loader,
|
vulkan-loader,
|
||||||
makeDesktopItem,
|
makeDesktopItem,
|
||||||
copyDesktopItems,
|
copyDesktopItems,
|
||||||
@ -18,20 +18,20 @@
|
|||||||
}:
|
}:
|
||||||
|
|
||||||
let
|
let
|
||||||
electron = electron_36;
|
electron = electron_37;
|
||||||
in
|
in
|
||||||
buildNpmPackage (finalAttrs: {
|
buildNpmPackage (finalAttrs: {
|
||||||
pname = "shogihome";
|
pname = "shogihome";
|
||||||
version = "1.23.2";
|
version = "1.24.0";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "sunfish-shogi";
|
owner = "sunfish-shogi";
|
||||||
repo = "shogihome";
|
repo = "shogihome";
|
||||||
tag = "v${finalAttrs.version}";
|
tag = "v${finalAttrs.version}";
|
||||||
hash = "sha256-tZw9iEhZ5ss+mv/WUFaj+xQ6GP4GAHq+PvBOv6F5tgM=";
|
hash = "sha256-8tHKU/6jEyt3o7kbYyhYfQY7luLbX01U7EtK4eEHh+M=";
|
||||||
};
|
};
|
||||||
|
|
||||||
npmDepsHash = "sha256-dx66k82o+TWrrK9xBHPbnudDn0CG8mM7c1xeoSAM4Fs=";
|
npmDepsHash = "sha256-QARCfpeC19iCpnllHlzAS3Ey9Sj35QQCsPA+/JdzLjM=";
|
||||||
|
|
||||||
postPatch = ''
|
postPatch = ''
|
||||||
substituteInPlace package.json \
|
substituteInPlace package.json \
|
||||||
|
@ -17,13 +17,13 @@
|
|||||||
|
|
||||||
stdenv.mkDerivation (finalAttrs: {
|
stdenv.mkDerivation (finalAttrs: {
|
||||||
pname = "sidplayfp";
|
pname = "sidplayfp";
|
||||||
version = "2.14.1";
|
version = "2.15.0";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "libsidplayfp";
|
owner = "libsidplayfp";
|
||||||
repo = "sidplayfp";
|
repo = "sidplayfp";
|
||||||
rev = "v${finalAttrs.version}";
|
rev = "v${finalAttrs.version}";
|
||||||
hash = "sha256-t2bZcslHPRxrTt3cDW6mTXK35/MMw1u1CnVXTzN/UEs=";
|
hash = "sha256-piPvNXEmjqz7r0+Uft6cmSJsWJdxwMJFpluVV1hEO2U=";
|
||||||
};
|
};
|
||||||
|
|
||||||
strictDeps = true;
|
strictDeps = true;
|
||||||
|
@ -8,16 +8,16 @@
|
|||||||
|
|
||||||
rustPlatform.buildRustPackage (finalAttrs: {
|
rustPlatform.buildRustPackage (finalAttrs: {
|
||||||
pname = "similarity";
|
pname = "similarity";
|
||||||
version = "0.2.4";
|
version = "0.3.1";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "mizchi";
|
owner = "mizchi";
|
||||||
repo = "similarity";
|
repo = "similarity";
|
||||||
tag = "v${finalAttrs.version}";
|
tag = "v${finalAttrs.version}";
|
||||||
hash = "sha256-Z2ZaKBpq7N8KIX8nOzPhm8evfoUxBzaAK0+4cU9qBDE=";
|
hash = "sha256-eZQ0FTmysFYwqg3sjasZW3S0lps2XbFWUbWuZzkFWkA=";
|
||||||
};
|
};
|
||||||
|
|
||||||
cargoHash = "sha256-oYqdCHGY6OZSbYXhjIt20ZL2JkZP7UEOhn0fhuZQnZo=";
|
cargoHash = "sha256-7qLC1RvjBXd9JFrJdDTIngZhMvyQV1ko3MXRr/2y7hA=";
|
||||||
|
|
||||||
nativeInstallCheckInputs = [ versionCheckHook ];
|
nativeInstallCheckInputs = [ versionCheckHook ];
|
||||||
versionCheckProgram = "${placeholder "out"}/bin/${finalAttrs.pname}-ts";
|
versionCheckProgram = "${placeholder "out"}/bin/${finalAttrs.pname}-ts";
|
||||||
|
@ -21,13 +21,13 @@
|
|||||||
|
|
||||||
buildDotnetModule (finalAttrs: {
|
buildDotnetModule (finalAttrs: {
|
||||||
pname = "sourcegit";
|
pname = "sourcegit";
|
||||||
version = "2025.23";
|
version = "2025.25";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "sourcegit-scm";
|
owner = "sourcegit-scm";
|
||||||
repo = "sourcegit";
|
repo = "sourcegit";
|
||||||
tag = "v${finalAttrs.version}";
|
tag = "v${finalAttrs.version}";
|
||||||
hash = "sha256-q8w1AOVsj9zVGRQzn8paUnjH3VDf+2egVSjiH24q6zI=";
|
hash = "sha256-WPwvOfbCOCQBOvJU2HuGU9/Rh00MCmhZEaKn9Nr1Q0I=";
|
||||||
};
|
};
|
||||||
|
|
||||||
patches = [ ./fix-darwin-git-path.patch ];
|
patches = [ ./fix-darwin-git-path.patch ];
|
||||||
|
@ -8,13 +8,13 @@
|
|||||||
|
|
||||||
stdenv.mkDerivation rec {
|
stdenv.mkDerivation rec {
|
||||||
pname = "spectra";
|
pname = "spectra";
|
||||||
version = "1.1.0";
|
version = "1.2.0";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "yixuan";
|
owner = "yixuan";
|
||||||
repo = "spectra";
|
repo = "spectra";
|
||||||
rev = "v${version}";
|
rev = "v${version}";
|
||||||
sha256 = "sha256-ut6nEOpzIoFy+IUWQy9x2pJ4+sA0d/Dt8WaNq5AFCFg=";
|
sha256 = "sha256-lfbOwnTP3GrN/1N/tyMXZrtEHIxAq3EjuHS8M+I87to=";
|
||||||
};
|
};
|
||||||
|
|
||||||
nativeBuildInputs = [ cmake ];
|
nativeBuildInputs = [ cmake ];
|
||||||
|
@ -7,13 +7,13 @@
|
|||||||
|
|
||||||
buildNpmPackage rec {
|
buildNpmPackage rec {
|
||||||
pname = "speedscope";
|
pname = "speedscope";
|
||||||
version = "1.22.2";
|
version = "1.23.0";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "jlfwong";
|
owner = "jlfwong";
|
||||||
repo = "speedscope";
|
repo = "speedscope";
|
||||||
tag = "v${version}";
|
tag = "v${version}";
|
||||||
hash = "sha256-JzlS5onVac1UKJUl1YYE7a3oWk2crMyuowea8a7UoOo=";
|
hash = "sha256-I7XulOJuMSxDXyGlXL6AeqP0ohjNhzGTEyWsq6MiTho=";
|
||||||
|
|
||||||
# scripts/prepack.sh wants to extract the git commit from .git
|
# scripts/prepack.sh wants to extract the git commit from .git
|
||||||
# We don't want to keep .git for reproducibility reasons, so save the commit
|
# We don't want to keep .git for reproducibility reasons, so save the commit
|
||||||
@ -25,7 +25,7 @@ buildNpmPackage rec {
|
|||||||
'';
|
'';
|
||||||
};
|
};
|
||||||
|
|
||||||
npmDepsHash = "sha256-3LCixJJyz3O6xQxP0A/WyQXsDvkXpdo7KYNDoufZVS4=";
|
npmDepsHash = "sha256-5gsWnk37F+fModNUWETBercXE1avEtbAAu8/qi76yDY=";
|
||||||
|
|
||||||
patches = [
|
patches = [
|
||||||
./fix-shebang.patch
|
./fix-shebang.patch
|
||||||
|
@ -1,31 +0,0 @@
|
|||||||
{
|
|
||||||
lib,
|
|
||||||
buildNpmPackage,
|
|
||||||
fetchFromGitHub,
|
|
||||||
}:
|
|
||||||
|
|
||||||
buildNpmPackage rec {
|
|
||||||
pname = "syn2mas";
|
|
||||||
version = "0.15.0";
|
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
|
||||||
owner = "element-hq";
|
|
||||||
repo = "matrix-authentication-service";
|
|
||||||
rev = "v${version}";
|
|
||||||
hash = "sha256-RK58yfsPJirOKHyBnth42sLTkWo+AkMylEigH0w/RWc=";
|
|
||||||
};
|
|
||||||
|
|
||||||
sourceRoot = "${src.name}/tools/syn2mas";
|
|
||||||
|
|
||||||
npmDepsHash = "sha256-RzZjTKS4d/n9mIJ+eUY7NlqWssCnJ2Yh2nAMwasY8Fk=";
|
|
||||||
|
|
||||||
dontBuild = true;
|
|
||||||
|
|
||||||
meta = {
|
|
||||||
description = "Tool to help with the migration of a Matrix Synapse installation to the Matrix Authentication Service";
|
|
||||||
homepage = "https://github.com/element-hq/matrix-authentication-service/tree/main/tools/syn2mas";
|
|
||||||
license = lib.licenses.agpl3Only;
|
|
||||||
maintainers = with lib.maintainers; [ teutat3s ];
|
|
||||||
mainProgram = "syn2mas";
|
|
||||||
};
|
|
||||||
}
|
|
@ -5,12 +5,12 @@
|
|||||||
}:
|
}:
|
||||||
python3Packages.buildPythonApplication rec {
|
python3Packages.buildPythonApplication rec {
|
||||||
pname = "terraform_local";
|
pname = "terraform_local";
|
||||||
version = "0.23.0";
|
version = "0.24.0";
|
||||||
pyproject = true;
|
pyproject = true;
|
||||||
|
|
||||||
src = fetchPypi {
|
src = fetchPypi {
|
||||||
inherit pname version;
|
inherit pname version;
|
||||||
hash = "sha256-3GlXR2F28jpeXhFsJAH7yrKp8vrVhCozS8Ew6oi39P4=";
|
hash = "sha256-w+Jd8OPA5XjLw5zp8AEwHlPT5Or4z+elaW9pFKMR9OA=";
|
||||||
};
|
};
|
||||||
|
|
||||||
build-system = with python3Packages; [ setuptools ];
|
build-system = with python3Packages; [ setuptools ];
|
||||||
|
@ -11,7 +11,7 @@ rustPlatform.buildRustPackage (finalAttrs: {
|
|||||||
version = "0.13.13";
|
version = "0.13.13";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "Enter-tainer";
|
owner = "typstyle-rs";
|
||||||
repo = "typstyle";
|
repo = "typstyle";
|
||||||
tag = "v${finalAttrs.version}";
|
tag = "v${finalAttrs.version}";
|
||||||
hash = "sha256-IAKCwKekeFekHBjfdC4pi74SXJzCDFoby3n1Z0Pu5q4=";
|
hash = "sha256-IAKCwKekeFekHBjfdC4pi74SXJzCDFoby3n1Z0Pu5q4=";
|
||||||
@ -36,9 +36,9 @@ rustPlatform.buildRustPackage (finalAttrs: {
|
|||||||
};
|
};
|
||||||
|
|
||||||
meta = {
|
meta = {
|
||||||
changelog = "https://github.com/Enter-tainer/typstyle/blob/${finalAttrs.src.tag}/CHANGELOG.md";
|
changelog = "https://github.com/typstyle-rs/typstyle/blob/${finalAttrs.src.tag}/CHANGELOG.md";
|
||||||
description = "Format your typst source code";
|
description = "Format your typst source code";
|
||||||
homepage = "https://github.com/Enter-tainer/typstyle";
|
homepage = "https://github.com/typstyle-rs/typstyle";
|
||||||
license = lib.licenses.asl20;
|
license = lib.licenses.asl20;
|
||||||
mainProgram = "typstyle";
|
mainProgram = "typstyle";
|
||||||
maintainers = with lib.maintainers; [
|
maintainers = with lib.maintainers; [
|
||||||
|
@ -92,20 +92,23 @@ let
|
|||||||
in
|
in
|
||||||
python.pkgs.buildPythonApplication rec {
|
python.pkgs.buildPythonApplication rec {
|
||||||
pname = "vectorcode";
|
pname = "vectorcode";
|
||||||
version = "0.7.4";
|
version = "0.7.7";
|
||||||
pyproject = true;
|
pyproject = true;
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "Davidyz";
|
owner = "Davidyz";
|
||||||
repo = "VectorCode";
|
repo = "VectorCode";
|
||||||
tag = version;
|
tag = version;
|
||||||
hash = "sha256-N74XBQahUIj0rKJI0emtNvGlG9uYkeHqweppp8fUSLU=";
|
hash = "sha256-c8Wp/bP5KHDN/i2bMyiOQgnHDw8tPbg4IZIQ5Ut4SIo=";
|
||||||
};
|
};
|
||||||
|
|
||||||
build-system = with python.pkgs; [
|
build-system = with python.pkgs; [
|
||||||
pdm-backend
|
pdm-backend
|
||||||
];
|
];
|
||||||
|
|
||||||
|
pythonRelaxDeps = [
|
||||||
|
"posthog"
|
||||||
|
];
|
||||||
dependencies =
|
dependencies =
|
||||||
with python.pkgs;
|
with python.pkgs;
|
||||||
[
|
[
|
||||||
|
@ -12,16 +12,16 @@
|
|||||||
|
|
||||||
buildNpmPackage (finalAttrs: {
|
buildNpmPackage (finalAttrs: {
|
||||||
pname = "vsce";
|
pname = "vsce";
|
||||||
version = "3.5.0";
|
version = "3.6.0";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "microsoft";
|
owner = "microsoft";
|
||||||
repo = "vscode-vsce";
|
repo = "vscode-vsce";
|
||||||
rev = "v${finalAttrs.version}";
|
rev = "v${finalAttrs.version}";
|
||||||
hash = "sha256-55yiW2BJe0AlfHit8gpZJyZSHqVq6kywuEtZ5HGcfT0=";
|
hash = "sha256-6Tt7IewbCLHG8DVoD8PV6VmrNu3MCUHITgYFq9smvOo=";
|
||||||
};
|
};
|
||||||
|
|
||||||
npmDepsHash = "sha256-tUboOVkZY4iag+36aJLbHGJqqlFE7fx8BXK50G7DJ64=";
|
npmDepsHash = "sha256-pZUDui2mhGe+My9QL+pqeBU16AyJ+/udULbo2EQjZd0=";
|
||||||
|
|
||||||
postPatch = ''
|
postPatch = ''
|
||||||
substituteInPlace package.json --replace-fail '"version": "0.0.0"' '"version": "${finalAttrs.version}"'
|
substituteInPlace package.json --replace-fail '"version": "0.0.0"' '"version": "${finalAttrs.version}"'
|
||||||
|
@ -8,17 +8,17 @@
|
|||||||
|
|
||||||
rustPlatform.buildRustPackage (finalAttrs: {
|
rustPlatform.buildRustPackage (finalAttrs: {
|
||||||
pname = "weaver";
|
pname = "weaver";
|
||||||
version = "0.15.2";
|
version = "0.16.1";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "open-telemetry";
|
owner = "open-telemetry";
|
||||||
repo = "weaver";
|
repo = "weaver";
|
||||||
tag = "v${finalAttrs.version}";
|
tag = "v${finalAttrs.version}";
|
||||||
hash = "sha256-F7FLQ0EAJFll8Twbg11MQ7fqzzlOntqwqVG9+PjRfQM=";
|
hash = "sha256-fpr6GMUYRkHqy0e9M2qU4qKTiziSmfrHH+EBav7mgeA=";
|
||||||
};
|
};
|
||||||
|
|
||||||
useFetchCargoVendor = true;
|
useFetchCargoVendor = true;
|
||||||
cargoHash = "sha256-alk9TIBN69JvrygcODkuDWQB8qvo7pF9HKoMJsNpaY4=";
|
cargoHash = "sha256-8+gOEpeM2n3H1nboZgtaMnWXX90eRom1ERnfItPqmFA=";
|
||||||
|
|
||||||
checkFlags = [
|
checkFlags = [
|
||||||
# Skip tests requiring network
|
# Skip tests requiring network
|
||||||
|
@ -28,13 +28,18 @@ let
|
|||||||
sources =
|
sources =
|
||||||
let
|
let
|
||||||
# https://dldir1.qq.com/weixin/mac/mac-release.xml
|
# https://dldir1.qq.com/weixin/mac/mac-release.xml
|
||||||
any-darwin = {
|
any-darwin =
|
||||||
version = "4.0.6.17-29381";
|
let
|
||||||
src = fetchurl {
|
version = "4.0.6.19-29383";
|
||||||
url = "https://dldir1v6.qq.com/weixin/Universal/Mac/xWeChatMac_universal_4.0.6.17_29381.dmg";
|
version' = lib.replaceString "-" "_" version;
|
||||||
hash = "sha256-Fv5UYZ5+WfHvs+V3yiLSQitj++3WU+0GJsnIdlwB+TE=";
|
in
|
||||||
|
{
|
||||||
|
inherit version;
|
||||||
|
src = fetchurl {
|
||||||
|
url = "https://dldir1v6.qq.com/weixin/Universal/Mac/xWeChatMac_universal_${version'}.dmg";
|
||||||
|
hash = "sha256-HloG++DGzsxM7C0AbN4PXkkzFdhUhBDcH5Vq+bTUuEE=";
|
||||||
|
};
|
||||||
};
|
};
|
||||||
};
|
|
||||||
in
|
in
|
||||||
{
|
{
|
||||||
aarch64-darwin = any-darwin;
|
aarch64-darwin = any-darwin;
|
||||||
|
@ -21,7 +21,7 @@
|
|||||||
glib,
|
glib,
|
||||||
gnugrep,
|
gnugrep,
|
||||||
gnused,
|
gnused,
|
||||||
hostname-debian,
|
hostname,
|
||||||
jq,
|
jq,
|
||||||
procps,
|
procps,
|
||||||
which,
|
which,
|
||||||
@ -166,7 +166,7 @@ let
|
|||||||
scripts = [ "bin/xdg-open" ];
|
scripts = [ "bin/xdg-open" ];
|
||||||
interpreter = "${bash}/bin/bash";
|
interpreter = "${bash}/bin/bash";
|
||||||
inputs = commonDeps ++ [
|
inputs = commonDeps ++ [
|
||||||
hostname-debian
|
hostname
|
||||||
glib.bin
|
glib.bin
|
||||||
"${placeholder "out"}/bin"
|
"${placeholder "out"}/bin"
|
||||||
];
|
];
|
||||||
@ -206,7 +206,7 @@ let
|
|||||||
scripts = [ "bin/xdg-screensaver" ];
|
scripts = [ "bin/xdg-screensaver" ];
|
||||||
interpreter = "${bash}/bin/bash";
|
interpreter = "${bash}/bin/bash";
|
||||||
inputs = commonDeps ++ [
|
inputs = commonDeps ++ [
|
||||||
hostname-debian
|
hostname
|
||||||
perl
|
perl
|
||||||
procps
|
procps
|
||||||
];
|
];
|
||||||
|
@ -2,16 +2,18 @@
|
|||||||
lib,
|
lib,
|
||||||
buildGoModule,
|
buildGoModule,
|
||||||
fetchFromGitHub,
|
fetchFromGitHub,
|
||||||
|
versionCheckHook,
|
||||||
|
nix-update-script,
|
||||||
}:
|
}:
|
||||||
|
|
||||||
buildGoModule rec {
|
buildGoModule (finalAttrs: {
|
||||||
pname = "yaml2json";
|
pname = "yaml2json";
|
||||||
version = "1.3.5";
|
version = "1.3.5";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "bronze1man";
|
owner = "bronze1man";
|
||||||
repo = "yaml2json";
|
repo = "yaml2json";
|
||||||
rev = "v${version}";
|
tag = "v${finalAttrs.version}";
|
||||||
hash = "sha256-mIjtR1VsSeUhEgeSKDG0qT0kj+NCqVwn31m300cMDeU=";
|
hash = "sha256-mIjtR1VsSeUhEgeSKDG0qT0kj+NCqVwn31m300cMDeU=";
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -24,11 +26,18 @@ buildGoModule rec {
|
|||||||
"-w"
|
"-w"
|
||||||
];
|
];
|
||||||
|
|
||||||
meta = with lib; {
|
nativeCheckInputs = [ versionCheckHook ];
|
||||||
|
versionCheckProgramArg = "--version";
|
||||||
|
doInstallCheck = true;
|
||||||
|
|
||||||
|
passthru.updateScript = nix-update-script { };
|
||||||
|
|
||||||
|
meta = {
|
||||||
homepage = "https://github.com/bronze1man/yaml2json";
|
homepage = "https://github.com/bronze1man/yaml2json";
|
||||||
|
changelog = "https://github.com/bronze1man/yaml2json/releases/tag/v${finalAttrs.version}";
|
||||||
description = "Convert yaml to json";
|
description = "Convert yaml to json";
|
||||||
mainProgram = "yaml2json";
|
mainProgram = "yaml2json";
|
||||||
license = with licenses; [ mit ];
|
license = with lib.licenses; [ mit ];
|
||||||
maintainers = [ ];
|
maintainers = [ ];
|
||||||
};
|
};
|
||||||
}
|
})
|
||||||
|
@ -27,11 +27,11 @@ let
|
|||||||
in
|
in
|
||||||
stdenv.mkDerivation (finalAttrs: {
|
stdenv.mkDerivation (finalAttrs: {
|
||||||
pname = "go";
|
pname = "go";
|
||||||
version = "1.23.10";
|
version = "1.23.11";
|
||||||
|
|
||||||
src = fetchurl {
|
src = fetchurl {
|
||||||
url = "https://go.dev/dl/go${finalAttrs.version}.src.tar.gz";
|
url = "https://go.dev/dl/go${finalAttrs.version}.src.tar.gz";
|
||||||
hash = "sha256-gAp64b/xeaIntlOi9kRRfIAEQ7i0q/MnOvXhy3ET3lk=";
|
hash = "sha256-KWOBYHpIOoqGZ9dpUzF1L5Sh8jHCBOJSfS8i4ePRJH0=";
|
||||||
};
|
};
|
||||||
|
|
||||||
strictDeps = true;
|
strictDeps = true;
|
||||||
|
173
pkgs/development/compilers/go/1.25.nix
Normal file
173
pkgs/development/compilers/go/1.25.nix
Normal file
@ -0,0 +1,173 @@
|
|||||||
|
{
|
||||||
|
lib,
|
||||||
|
stdenv,
|
||||||
|
fetchurl,
|
||||||
|
apple-sdk_12,
|
||||||
|
tzdata,
|
||||||
|
replaceVars,
|
||||||
|
iana-etc,
|
||||||
|
mailcap,
|
||||||
|
buildPackages,
|
||||||
|
pkgsBuildTarget,
|
||||||
|
targetPackages,
|
||||||
|
testers,
|
||||||
|
skopeo,
|
||||||
|
buildGo125Module,
|
||||||
|
}:
|
||||||
|
|
||||||
|
let
|
||||||
|
goBootstrap = buildPackages.callPackage ./bootstrap122.nix { };
|
||||||
|
|
||||||
|
skopeoTest = skopeo.override { buildGoModule = buildGo125Module; };
|
||||||
|
|
||||||
|
# We need a target compiler which is still runnable at build time,
|
||||||
|
# to handle the cross-building case where build != host == target
|
||||||
|
targetCC = pkgsBuildTarget.targetPackages.stdenv.cc;
|
||||||
|
|
||||||
|
isCross = stdenv.buildPlatform != stdenv.targetPlatform;
|
||||||
|
in
|
||||||
|
stdenv.mkDerivation (finalAttrs: {
|
||||||
|
pname = "go";
|
||||||
|
version = "1.25rc2";
|
||||||
|
|
||||||
|
src = fetchurl {
|
||||||
|
url = "https://go.dev/dl/go${finalAttrs.version}.src.tar.gz";
|
||||||
|
hash = "sha256-5jFKMjTEwDuNAGvNHRRZTZKBcNGES23/3V+lojM0SeE=";
|
||||||
|
};
|
||||||
|
|
||||||
|
strictDeps = true;
|
||||||
|
buildInputs =
|
||||||
|
[ ]
|
||||||
|
++ lib.optionals stdenv.hostPlatform.isLinux [ stdenv.cc.libc.out ]
|
||||||
|
++ lib.optionals (stdenv.hostPlatform.libc == "glibc") [ stdenv.cc.libc.static ];
|
||||||
|
|
||||||
|
depsTargetTargetPropagated = lib.optionals stdenv.targetPlatform.isDarwin [
|
||||||
|
apple-sdk_12
|
||||||
|
];
|
||||||
|
|
||||||
|
depsBuildTarget = lib.optional isCross targetCC;
|
||||||
|
|
||||||
|
depsTargetTarget = lib.optional stdenv.targetPlatform.isWindows targetPackages.threads.package;
|
||||||
|
|
||||||
|
postPatch = ''
|
||||||
|
patchShebangs .
|
||||||
|
'';
|
||||||
|
|
||||||
|
patches = [
|
||||||
|
(replaceVars ./iana-etc-1.25.patch {
|
||||||
|
iana = iana-etc;
|
||||||
|
})
|
||||||
|
# Patch the mimetype database location which is missing on NixOS.
|
||||||
|
# but also allow static binaries built with NixOS to run outside nix
|
||||||
|
(replaceVars ./mailcap-1.17.patch {
|
||||||
|
inherit mailcap;
|
||||||
|
})
|
||||||
|
# prepend the nix path to the zoneinfo files but also leave the original value for static binaries
|
||||||
|
# that run outside a nix server
|
||||||
|
(replaceVars ./tzdata-1.19.patch {
|
||||||
|
inherit tzdata;
|
||||||
|
})
|
||||||
|
./remove-tools-1.11.patch
|
||||||
|
./go_no_vendor_checks-1.23.patch
|
||||||
|
];
|
||||||
|
|
||||||
|
inherit (stdenv.targetPlatform.go) GOOS GOARCH GOARM;
|
||||||
|
# GOHOSTOS/GOHOSTARCH must match the building system, not the host system.
|
||||||
|
# Go will nevertheless build a for host system that we will copy over in
|
||||||
|
# the install phase.
|
||||||
|
GOHOSTOS = stdenv.buildPlatform.go.GOOS;
|
||||||
|
GOHOSTARCH = stdenv.buildPlatform.go.GOARCH;
|
||||||
|
|
||||||
|
# {CC,CXX}_FOR_TARGET must be only set for cross compilation case as go expect those
|
||||||
|
# to be different from CC/CXX
|
||||||
|
CC_FOR_TARGET = if isCross then "${targetCC}/bin/${targetCC.targetPrefix}cc" else null;
|
||||||
|
CXX_FOR_TARGET = if isCross then "${targetCC}/bin/${targetCC.targetPrefix}c++" else null;
|
||||||
|
|
||||||
|
GO386 = "softfloat"; # from Arch: don't assume sse2 on i686
|
||||||
|
# Wasi does not support CGO
|
||||||
|
CGO_ENABLED = if stdenv.targetPlatform.isWasi then 0 else 1;
|
||||||
|
|
||||||
|
GOROOT_BOOTSTRAP = "${goBootstrap}/share/go";
|
||||||
|
|
||||||
|
buildPhase = ''
|
||||||
|
runHook preBuild
|
||||||
|
export GOCACHE=$TMPDIR/go-cache
|
||||||
|
|
||||||
|
export PATH=$(pwd)/bin:$PATH
|
||||||
|
|
||||||
|
${lib.optionalString isCross ''
|
||||||
|
# Independent from host/target, CC should produce code for the building system.
|
||||||
|
# We only set it when cross-compiling.
|
||||||
|
export CC=${buildPackages.stdenv.cc}/bin/cc
|
||||||
|
''}
|
||||||
|
ulimit -a
|
||||||
|
|
||||||
|
pushd src
|
||||||
|
./make.bash
|
||||||
|
popd
|
||||||
|
runHook postBuild
|
||||||
|
'';
|
||||||
|
|
||||||
|
preInstall =
|
||||||
|
''
|
||||||
|
# Contains the wrong perl shebang when cross compiling,
|
||||||
|
# since it is not used for anything we can deleted as well.
|
||||||
|
rm src/regexp/syntax/make_perl_groups.pl
|
||||||
|
''
|
||||||
|
+ (
|
||||||
|
if (stdenv.buildPlatform.system != stdenv.hostPlatform.system) then
|
||||||
|
''
|
||||||
|
mv bin/*_*/* bin
|
||||||
|
rmdir bin/*_*
|
||||||
|
${lib.optionalString
|
||||||
|
(!(finalAttrs.GOHOSTARCH == finalAttrs.GOARCH && finalAttrs.GOOS == finalAttrs.GOHOSTOS))
|
||||||
|
''
|
||||||
|
rm -rf pkg/${finalAttrs.GOHOSTOS}_${finalAttrs.GOHOSTARCH} pkg/tool/${finalAttrs.GOHOSTOS}_${finalAttrs.GOHOSTARCH}
|
||||||
|
''
|
||||||
|
}
|
||||||
|
''
|
||||||
|
else
|
||||||
|
lib.optionalString (stdenv.hostPlatform.system != stdenv.targetPlatform.system) ''
|
||||||
|
rm -rf bin/*_*
|
||||||
|
${lib.optionalString
|
||||||
|
(!(finalAttrs.GOHOSTARCH == finalAttrs.GOARCH && finalAttrs.GOOS == finalAttrs.GOHOSTOS))
|
||||||
|
''
|
||||||
|
rm -rf pkg/${finalAttrs.GOOS}_${finalAttrs.GOARCH} pkg/tool/${finalAttrs.GOOS}_${finalAttrs.GOARCH}
|
||||||
|
''
|
||||||
|
}
|
||||||
|
''
|
||||||
|
);
|
||||||
|
|
||||||
|
installPhase = ''
|
||||||
|
runHook preInstall
|
||||||
|
mkdir -p $out/share/go
|
||||||
|
cp -a bin pkg src lib misc api doc go.env VERSION $out/share/go
|
||||||
|
mkdir -p $out/bin
|
||||||
|
ln -s $out/share/go/bin/* $out/bin
|
||||||
|
runHook postInstall
|
||||||
|
'';
|
||||||
|
|
||||||
|
disallowedReferences = [ goBootstrap ];
|
||||||
|
|
||||||
|
passthru = {
|
||||||
|
inherit goBootstrap skopeoTest;
|
||||||
|
tests = {
|
||||||
|
skopeo = testers.testVersion { package = skopeoTest; };
|
||||||
|
version = testers.testVersion {
|
||||||
|
package = finalAttrs.finalPackage;
|
||||||
|
command = "go version";
|
||||||
|
version = "go${finalAttrs.version}";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
meta = with lib; {
|
||||||
|
changelog = "https://go.dev/doc/devel/release#go${lib.versions.majorMinor finalAttrs.version}";
|
||||||
|
description = "Go Programming language";
|
||||||
|
homepage = "https://go.dev/";
|
||||||
|
license = licenses.bsd3;
|
||||||
|
teams = [ teams.golang ];
|
||||||
|
platforms = platforms.darwin ++ platforms.linux ++ platforms.wasi ++ platforms.freebsd;
|
||||||
|
mainProgram = "go";
|
||||||
|
};
|
||||||
|
})
|
26
pkgs/development/compilers/go/iana-etc-1.25.patch
Normal file
26
pkgs/development/compilers/go/iana-etc-1.25.patch
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
diff --git a/src/net/lookup_unix.go b/src/net/lookup_unix.go
|
||||||
|
index 7416cb01f8..62722cab14 100644
|
||||||
|
--- a/src/net/lookup_unix.go
|
||||||
|
+++ b/src/net/lookup_unix.go
|
||||||
|
@@ -15,7 +15,7 @@ import (
|
||||||
|
// readProtocolsOnce loads contents of /etc/protocols into protocols map
|
||||||
|
// for quick access.
|
||||||
|
var readProtocolsOnce = sync.OnceFunc(func() {
|
||||||
|
- file, err := open("/etc/protocols")
|
||||||
|
+ file, err := open("@iana@/etc/protocols")
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
diff --git a/src/net/port_unix.go b/src/net/port_unix.go
|
||||||
|
index df73dbabb3..a5dcf2ca1c 100644
|
||||||
|
--- a/src/net/port_unix.go
|
||||||
|
+++ b/src/net/port_unix.go
|
||||||
|
@@ -16,7 +16,7 @@ import (
|
||||||
|
var onceReadServices sync.Once
|
||||||
|
|
||||||
|
func readServices() {
|
||||||
|
- file, err := open("/etc/services")
|
||||||
|
+ file, err := open("@iana@/etc/services")
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
@ -32,7 +32,7 @@ mkCoqDerivation {
|
|||||||
metacoq.version
|
metacoq.version
|
||||||
]
|
]
|
||||||
[
|
[
|
||||||
(case (range "8.17" "9.1") (range "1.3.1" "1.3.4") "0.1.1")
|
(case (range "8.17" "9.0") (range "1.3.1" "1.3.4") "0.1.1")
|
||||||
]
|
]
|
||||||
null;
|
null;
|
||||||
|
|
||||||
|
@ -16,6 +16,15 @@ mkCoqDerivation {
|
|||||||
|
|
||||||
inherit version;
|
inherit version;
|
||||||
defaultVersion =
|
defaultVersion =
|
||||||
|
let
|
||||||
|
case = coq: mc: out: {
|
||||||
|
cases = [
|
||||||
|
coq
|
||||||
|
mc
|
||||||
|
];
|
||||||
|
inherit out;
|
||||||
|
};
|
||||||
|
in
|
||||||
with versions;
|
with versions;
|
||||||
switch
|
switch
|
||||||
[
|
[
|
||||||
@ -23,20 +32,8 @@ mkCoqDerivation {
|
|||||||
metacoq.version
|
metacoq.version
|
||||||
]
|
]
|
||||||
[
|
[
|
||||||
{
|
(case (range "8.20" "9.0") (range "1.3.2" "1.3.4") "0.1.1")
|
||||||
cases = [
|
(case (range "8.17" "8.19") (range "1.3.1" "1.3.3") "0.1.0")
|
||||||
(range "8.17" "8.19")
|
|
||||||
(range "1.3.1" "1.3.3")
|
|
||||||
];
|
|
||||||
out = "0.1.0";
|
|
||||||
}
|
|
||||||
{
|
|
||||||
cases = [
|
|
||||||
(range "8.20" "9.0")
|
|
||||||
(range "1.3.2" "1.3.4")
|
|
||||||
];
|
|
||||||
out = "0.1.1";
|
|
||||||
}
|
|
||||||
]
|
]
|
||||||
null;
|
null;
|
||||||
|
|
||||||
|
@ -15,7 +15,7 @@ mkCoqDerivation {
|
|||||||
in
|
in
|
||||||
with lib.versions;
|
with lib.versions;
|
||||||
lib.switch coq.coq-version [
|
lib.switch coq.coq-version [
|
||||||
(case (range "8.10" "9.0") "0.3.4")
|
(case (range "8.10" "9.1") "0.3.4")
|
||||||
] null;
|
] null;
|
||||||
release."0.3.4".sha256 = "sha256-AhEcugUiVIsgbq884Lur/bQIuGw8prk+3AlNkP1omcw=";
|
release."0.3.4".sha256 = "sha256-AhEcugUiVIsgbq884Lur/bQIuGw8prk+3AlNkP1omcw=";
|
||||||
release."0.3.3".sha256 = "sha256-HDIPeFHiC9EwhiOH7yMGJ9d2zJMhboTpRGf9kWcB9Io=";
|
release."0.3.3".sha256 = "sha256-HDIPeFHiC9EwhiOH7yMGJ9d2zJMhboTpRGf9kWcB9Io=";
|
||||||
|
@ -19,7 +19,7 @@ mkCoqDerivation {
|
|||||||
in
|
in
|
||||||
with lib.versions;
|
with lib.versions;
|
||||||
lib.switch coq.coq-version [
|
lib.switch coq.coq-version [
|
||||||
(case (range "8.12" "9.0") "3.4.3")
|
(case (range "8.12" "9.1") "3.4.3")
|
||||||
(case (range "8.12" "8.20") "3.4.2")
|
(case (range "8.12" "8.20") "3.4.2")
|
||||||
(case (range "8.12" "8.18") "3.4.0")
|
(case (range "8.12" "8.18") "3.4.0")
|
||||||
(case (range "8.12" "8.17") "3.3.0")
|
(case (range "8.12" "8.17") "3.3.0")
|
||||||
|
@ -26,7 +26,7 @@ mkCoqDerivation {
|
|||||||
lib.switch
|
lib.switch
|
||||||
[ coq.coq-version mathcomp-algebra.version ]
|
[ coq.coq-version mathcomp-algebra.version ]
|
||||||
[
|
[
|
||||||
(case (range "9.0" "9.0") (range "2.4.0" "2.4.0") "2.2.0")
|
(case (range "9.0" "9.1") (range "2.4.0" "2.4.0") "2.2.0")
|
||||||
]
|
]
|
||||||
null;
|
null;
|
||||||
release."2.2.0".sha256 = "sha256-VnfK+RHWiq27hxEJ9stpVp609/dMiPH6UHFhzaHdAnM=";
|
release."2.2.0".sha256 = "sha256-VnfK+RHWiq27hxEJ9stpVp609/dMiPH6UHFhzaHdAnM=";
|
||||||
|
@ -19,7 +19,7 @@ mkCoqDerivation {
|
|||||||
in
|
in
|
||||||
with lib.versions;
|
with lib.versions;
|
||||||
lib.switch coq.coq-version [
|
lib.switch coq.coq-version [
|
||||||
(case (range "8.15" "9.0") "4.2.1")
|
(case (range "8.15" "9.1") "4.2.1")
|
||||||
(case (range "8.14" "8.20") "4.2.0")
|
(case (range "8.14" "8.20") "4.2.0")
|
||||||
(case (range "8.14" "8.18") "4.1.3")
|
(case (range "8.14" "8.18") "4.1.3")
|
||||||
(case (range "8.14" "8.17") "4.1.1")
|
(case (range "8.14" "8.17") "4.1.1")
|
||||||
|
@ -27,7 +27,7 @@ mkCoqDerivation {
|
|||||||
lib.switch
|
lib.switch
|
||||||
[ coq.coq-version mathcomp.version ]
|
[ coq.coq-version mathcomp.version ]
|
||||||
[
|
[
|
||||||
(case (range "8.19" "9.0") (range "2.2" "2.4") "2025.02.0")
|
(case (range "8.19" "9.1") (range "2.2" "2.4") "2025.02.0")
|
||||||
(case (isEq "8.18") (isEq "2.2") "2024.07.2")
|
(case (isEq "8.18") (isEq "2.2") "2024.07.2")
|
||||||
]
|
]
|
||||||
null;
|
null;
|
||||||
|
@ -11,24 +11,15 @@
|
|||||||
let
|
let
|
||||||
ocamlPackages = coq.ocamlPackages;
|
ocamlPackages = coq.ocamlPackages;
|
||||||
defaultVersion =
|
defaultVersion =
|
||||||
|
let
|
||||||
|
case = case: out: { inherit case out; };
|
||||||
|
in
|
||||||
with lib.versions;
|
with lib.versions;
|
||||||
lib.switch coq.coq-version [
|
lib.switch coq.coq-version [
|
||||||
{
|
(case (range "8.18" "9.1") "2.2.6")
|
||||||
case = range "8.18" "9.0";
|
(case (range "8.18" "8.20") "2.2.1")
|
||||||
out = "2.2.6";
|
(case (range "8.18" "8.19") "2.1.2")
|
||||||
}
|
(case (isEq "8.18") "2.0.3+coq8.18")
|
||||||
{
|
|
||||||
case = range "8.18" "8.20";
|
|
||||||
out = "2.2.1";
|
|
||||||
}
|
|
||||||
{
|
|
||||||
case = range "8.18" "8.19";
|
|
||||||
out = "2.1.2";
|
|
||||||
}
|
|
||||||
{
|
|
||||||
case = isEq "8.18";
|
|
||||||
out = "2.0.3+coq8.18";
|
|
||||||
}
|
|
||||||
] null;
|
] null;
|
||||||
location = {
|
location = {
|
||||||
domain = "github.com";
|
domain = "github.com";
|
||||||
|
@ -174,11 +174,11 @@ in
|
|||||||
|
|
||||||
saxon_11-he = common rec {
|
saxon_11-he = common rec {
|
||||||
pname = "saxon-he";
|
pname = "saxon-he";
|
||||||
version = "11.6";
|
version = "11.7";
|
||||||
jar = "saxon-he-${version}";
|
jar = "saxon-he-${version}";
|
||||||
src = fetchurl {
|
src = fetchurl {
|
||||||
url = github.downloadUrl version;
|
url = github.downloadUrl version;
|
||||||
sha256 = "/AVX5mtZSO6Is19t3+FlEvtIBsnwB3MIWAPCht8Aqnw=";
|
sha256 = "MGzhUW9ZLVvTSqEdpAZWAiwTYxCZxbn26zESDmIe4Vo=";
|
||||||
};
|
};
|
||||||
updateScript = github.updateScript version;
|
updateScript = github.updateScript version;
|
||||||
description = "Processor for XSLT 3.0, XPath 2.0 and 3.1, and XQuery 3.1";
|
description = "Processor for XSLT 3.0, XPath 2.0 and 3.1, and XQuery 3.1";
|
||||||
@ -186,11 +186,11 @@ in
|
|||||||
|
|
||||||
saxon_12-he = common rec {
|
saxon_12-he = common rec {
|
||||||
pname = "saxon-he";
|
pname = "saxon-he";
|
||||||
version = "12.7";
|
version = "12.8";
|
||||||
jar = "saxon-he-${version}";
|
jar = "saxon-he-${version}";
|
||||||
src = fetchurl {
|
src = fetchurl {
|
||||||
url = github.downloadUrl version;
|
url = github.downloadUrl version;
|
||||||
hash = "sha256-+J4ghaw1fZtsuKIxcHrrxff20LTsOmJhRLqWVvdZLN4=";
|
hash = "sha256-K6hRrseSW4giCBgsSMk2IwIF1VjjNWNrvkZia9gANZg=";
|
||||||
};
|
};
|
||||||
updateScript = github.updateScript version;
|
updateScript = github.updateScript version;
|
||||||
description = "Processor for XSLT 3.0, XPath 3.1, and XQuery 3.1";
|
description = "Processor for XSLT 3.0, XPath 3.1, and XQuery 3.1";
|
||||||
|
@ -48,13 +48,13 @@ let
|
|||||||
in
|
in
|
||||||
stdenv.mkDerivation (finalAttrs: {
|
stdenv.mkDerivation (finalAttrs: {
|
||||||
pname = "libpulsar";
|
pname = "libpulsar";
|
||||||
version = "3.7.1";
|
version = "3.7.2";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "apache";
|
owner = "apache";
|
||||||
repo = "pulsar-client-cpp";
|
repo = "pulsar-client-cpp";
|
||||||
rev = "v${finalAttrs.version}";
|
rev = "v${finalAttrs.version}";
|
||||||
hash = "sha256-RHWi0KCq7U7Dr3Ic7kduc8P64VpAThTQ3lDxLLEqzIU=";
|
hash = "sha256-3kUyimyv0Si3zUFaIsIVdulzH8l2fxe6BO9a5L6n8I8=";
|
||||||
};
|
};
|
||||||
|
|
||||||
nativeBuildInputs =
|
nativeBuildInputs =
|
||||||
|
@ -5,28 +5,45 @@
|
|||||||
domain-local-await,
|
domain-local-await,
|
||||||
domain-local-timeout,
|
domain-local-timeout,
|
||||||
alcotest,
|
alcotest,
|
||||||
|
multicore-magic,
|
||||||
|
backoff,
|
||||||
|
domain_shims,
|
||||||
|
mdx,
|
||||||
}:
|
}:
|
||||||
|
|
||||||
buildDunePackage rec {
|
buildDunePackage rec {
|
||||||
pname = "kcas";
|
pname = "kcas";
|
||||||
version = "0.6.1";
|
version = "0.7.0";
|
||||||
|
|
||||||
|
minimalOCamlVersion = "4.13.0";
|
||||||
|
|
||||||
src = fetchurl {
|
src = fetchurl {
|
||||||
url = "https://github.com/ocaml-multicore/kcas/releases/download/${version}/kcas-${version}.tbz";
|
url = "https://github.com/ocaml-multicore/kcas/releases/download/${version}/kcas-${version}.tbz";
|
||||||
hash = "sha256-u3Z8uAvITRUhOcB2EUYjWtpxIFJMvm2O/kyNr/AELWI=";
|
hash = "sha256-mo/otnkB79QdyVgLw1sZFfkR/Z/l15cRVfEYPPd6H5E=";
|
||||||
};
|
};
|
||||||
|
|
||||||
propagatedBuildInputs = [
|
propagatedBuildInputs = [
|
||||||
domain-local-await
|
domain-local-await
|
||||||
domain-local-timeout
|
domain-local-timeout
|
||||||
|
multicore-magic
|
||||||
|
backoff
|
||||||
];
|
];
|
||||||
|
|
||||||
doCheck = true;
|
doCheck = true;
|
||||||
checkInputs = [ alcotest ];
|
nativeCheckInputs = [ mdx.bin ];
|
||||||
|
checkInputs = [
|
||||||
|
alcotest
|
||||||
|
domain_shims
|
||||||
|
mdx
|
||||||
|
];
|
||||||
|
|
||||||
meta = {
|
meta = {
|
||||||
homepage = "https://github.com/ocaml-multicore/kcas";
|
homepage = "https://github.com/ocaml-multicore/kcas";
|
||||||
description = "STM based on lock-free MCAS";
|
description = "STM based on lock-free MCAS";
|
||||||
|
longDescription = ''
|
||||||
|
A software transactional memory (STM) implementation based on an atomic lock-free multi-word compare-and-set (MCAS) algorithm enhanced with read-only compare operations and ability to block awaiting for changes.
|
||||||
|
'';
|
||||||
|
changelog = "https://raw.githubusercontent.com/ocaml-multicore/kcas/refs/tags/${version}/CHANGES.md";
|
||||||
license = lib.licenses.isc;
|
license = lib.licenses.isc;
|
||||||
maintainers = [ lib.maintainers.vbgl ];
|
maintainers = [ lib.maintainers.vbgl ];
|
||||||
};
|
};
|
||||||
|
@ -2,22 +2,27 @@
|
|||||||
lib,
|
lib,
|
||||||
buildDunePackage,
|
buildDunePackage,
|
||||||
fetchurl,
|
fetchurl,
|
||||||
|
bigarray-compat,
|
||||||
}:
|
}:
|
||||||
|
|
||||||
buildDunePackage rec {
|
buildDunePackage rec {
|
||||||
pname = "mmap";
|
pname = "mmap";
|
||||||
version = "1.1.0";
|
version = "1.2.0";
|
||||||
|
|
||||||
useDune2 = true;
|
|
||||||
|
|
||||||
src = fetchurl {
|
src = fetchurl {
|
||||||
url = "https://github.com/mirage/mmap/releases/download/v${version}/mmap-v${version}.tbz";
|
url = "https://github.com/mirage/mmap/releases/download/v${version}/mmap-${version}.tbz";
|
||||||
sha256 = "0l6waidal2n8mkdn74avbslvc10sf49f5d889n838z03pra5chsc";
|
hash = "sha256-FgKoq8jiMvqUdxpS5UDleAtAwvJ2Lu5q+9koZQIRbds=";
|
||||||
};
|
};
|
||||||
|
|
||||||
|
propagatedBuildInputs = [ bigarray-compat ];
|
||||||
|
|
||||||
meta = {
|
meta = {
|
||||||
homepage = "https://github.com/mirage/mmap";
|
homepage = "https://github.com/mirage/mmap";
|
||||||
description = "Function for mapping files in memory";
|
description = "Function for mapping files in memory";
|
||||||
|
longDescription = ''
|
||||||
|
This project provides a Mmap.map_file functions for mapping files in memory.
|
||||||
|
'';
|
||||||
|
changelog = "https://raw.githubusercontent.com/mirage/mmap/refs/tags/v${version}/CHANGES.md";
|
||||||
license = lib.licenses.lgpl21;
|
license = lib.licenses.lgpl21;
|
||||||
maintainers = [ lib.maintainers.vbgl ];
|
maintainers = [ lib.maintainers.vbgl ];
|
||||||
};
|
};
|
||||||
|
@ -5,37 +5,40 @@
|
|||||||
pythonOlder,
|
pythonOlder,
|
||||||
cryptography,
|
cryptography,
|
||||||
jinja2,
|
jinja2,
|
||||||
|
librouteros,
|
||||||
mako,
|
mako,
|
||||||
|
packaging,
|
||||||
passlib,
|
passlib,
|
||||||
pyyaml,
|
pyyaml,
|
||||||
requests,
|
requests,
|
||||||
rtoml,
|
rtoml,
|
||||||
setuptools,
|
setuptools,
|
||||||
tomlkit,
|
tomlkit,
|
||||||
librouteros,
|
|
||||||
pytestCheckHook,
|
pytestCheckHook,
|
||||||
|
versionCheckHook,
|
||||||
}:
|
}:
|
||||||
|
|
||||||
buildPythonPackage rec {
|
let
|
||||||
|
version = "4.23.1";
|
||||||
|
in
|
||||||
|
buildPythonPackage {
|
||||||
pname = "bundlewrap";
|
pname = "bundlewrap";
|
||||||
version = "4.22.0";
|
inherit version;
|
||||||
pyproject = true;
|
pyproject = true;
|
||||||
|
|
||||||
disabled = pythonOlder "3.8";
|
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "bundlewrap";
|
owner = "bundlewrap";
|
||||||
repo = "bundlewrap";
|
repo = "bundlewrap";
|
||||||
tag = version;
|
tag = version;
|
||||||
hash = "sha256-F3Ipoep9ZmAqkh8mFLXpaEcYb4dpV9Dt/VgMa9X24Hw=";
|
hash = "sha256-Nzfx2L/FlYXQcbKq/cuRZ+PWnjv4HDld9q01nwQ1sA8=";
|
||||||
};
|
};
|
||||||
|
|
||||||
build-system = [ setuptools ];
|
build-system = [ setuptools ];
|
||||||
dependencies = [
|
dependencies = [
|
||||||
setuptools
|
|
||||||
cryptography
|
cryptography
|
||||||
jinja2
|
jinja2
|
||||||
mako
|
mako
|
||||||
|
packaging
|
||||||
passlib
|
passlib
|
||||||
pyyaml
|
pyyaml
|
||||||
requests
|
requests
|
||||||
@ -45,18 +48,24 @@ buildPythonPackage rec {
|
|||||||
|
|
||||||
pythonImportsCheck = [ "bundlewrap" ];
|
pythonImportsCheck = [ "bundlewrap" ];
|
||||||
|
|
||||||
nativeCheckInputs = [ pytestCheckHook ];
|
nativeCheckInputs = [
|
||||||
|
pytestCheckHook
|
||||||
|
versionCheckHook
|
||||||
|
];
|
||||||
|
versionCheckProgram = "${placeholder "out"}/bin/bw";
|
||||||
|
versionCheckProgramArg = "--version";
|
||||||
|
|
||||||
enabledTestPaths = [
|
enabledTestPaths = [
|
||||||
# only unit tests as integration tests need a OpenSSH client/server setup
|
# only unit tests as integration tests need a OpenSSH client/server setup
|
||||||
"tests/unit"
|
"tests/unit"
|
||||||
];
|
];
|
||||||
|
|
||||||
meta = with lib; {
|
meta = {
|
||||||
homepage = "https://bundlewrap.org/";
|
homepage = "https://bundlewrap.org/";
|
||||||
description = "Easy, Concise and Decentralized Config management with Python";
|
description = "Easy, Concise and Decentralized Config management with Python";
|
||||||
|
changelog = "https://github.com/bundlewrap/bundlewrap/blob/${version}/CHANGELOG.md";
|
||||||
mainProgram = "bw";
|
mainProgram = "bw";
|
||||||
license = [ licenses.gpl3 ];
|
license = [ lib.licenses.gpl3 ];
|
||||||
maintainers = with maintainers; [ wamserma ];
|
maintainers = with lib.maintainers; [ wamserma ];
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -30,7 +30,7 @@
|
|||||||
|
|
||||||
buildPythonPackage rec {
|
buildPythonPackage rec {
|
||||||
pname = "craft-parts";
|
pname = "craft-parts";
|
||||||
version = "2.15.0";
|
version = "2.16.0";
|
||||||
|
|
||||||
pyproject = true;
|
pyproject = true;
|
||||||
|
|
||||||
@ -38,7 +38,7 @@ buildPythonPackage rec {
|
|||||||
owner = "canonical";
|
owner = "canonical";
|
||||||
repo = "craft-parts";
|
repo = "craft-parts";
|
||||||
tag = version;
|
tag = version;
|
||||||
hash = "sha256-UW4VdJvEG4w7CM5aj1OKK91nsywQgsguJ+tnhEbqwYA=";
|
hash = "sha256-JuFx5Ap2ioYsc20E5Ho4z+/DFs7d+OBF3XLlOOZ10Zk=";
|
||||||
};
|
};
|
||||||
|
|
||||||
patches = [ ./bash-path.patch ];
|
patches = [ ./bash-path.patch ];
|
||||||
|
@ -26,14 +26,14 @@
|
|||||||
}:
|
}:
|
||||||
buildPythonPackage rec {
|
buildPythonPackage rec {
|
||||||
pname = "cynthion";
|
pname = "cynthion";
|
||||||
version = "0.2.0";
|
version = "0.2.2";
|
||||||
pyproject = true;
|
pyproject = true;
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "greatscottgadgets";
|
owner = "greatscottgadgets";
|
||||||
repo = "cynthion";
|
repo = "cynthion";
|
||||||
tag = version;
|
tag = version;
|
||||||
hash = "sha256-rbvw2eieZwTxStwCRuvIx/f4vdPsOFnV/U80Ga+fNPA=";
|
hash = "sha256-xL1/ckX+xKUQpugQkLB3SlZeNcBEaTMascTgoQ4C+hA=";
|
||||||
};
|
};
|
||||||
|
|
||||||
sourceRoot = "${src.name}/cynthion/python";
|
sourceRoot = "${src.name}/cynthion/python";
|
||||||
|
@ -15,14 +15,14 @@
|
|||||||
|
|
||||||
buildPythonPackage rec {
|
buildPythonPackage rec {
|
||||||
pname = "mpltoolbox";
|
pname = "mpltoolbox";
|
||||||
version = "25.04.0";
|
version = "25.05.0";
|
||||||
pyproject = true;
|
pyproject = true;
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "scipp";
|
owner = "scipp";
|
||||||
repo = "mpltoolbox";
|
repo = "mpltoolbox";
|
||||||
tag = version;
|
tag = version;
|
||||||
hash = "sha256-+LqPTlVbSxuewWuPNUfGdgQjWFxo7s2i3e21WkNNK78=";
|
hash = "sha256-XDfCzkn/Mr0Rca+Ku/mDSneqSfMHS9i/XUP8vVLSKMQ=";
|
||||||
};
|
};
|
||||||
|
|
||||||
build-system = [
|
build-system = [
|
||||||
|
@ -27,7 +27,7 @@
|
|||||||
|
|
||||||
buildPythonPackage rec {
|
buildPythonPackage rec {
|
||||||
pname = "pypdf";
|
pname = "pypdf";
|
||||||
version = "5.6.1";
|
version = "5.7.0";
|
||||||
pyproject = true;
|
pyproject = true;
|
||||||
|
|
||||||
disabled = pythonOlder "3.8";
|
disabled = pythonOlder "3.8";
|
||||||
@ -38,7 +38,7 @@ buildPythonPackage rec {
|
|||||||
tag = version;
|
tag = version;
|
||||||
# fetch sample files used in tests
|
# fetch sample files used in tests
|
||||||
fetchSubmodules = true;
|
fetchSubmodules = true;
|
||||||
hash = "sha256-+/tb370yraT78wGeLc0WVYQ5/SE9PBXMtuazGLiaAJc=";
|
hash = "sha256-O1kV9VKbI3MFsWvr3jrDg9rJ0G+92ny0v7g4Xx/1ytM=";
|
||||||
};
|
};
|
||||||
|
|
||||||
outputs = [
|
outputs = [
|
||||||
|
@ -125,6 +125,9 @@ buildPythonPackage rec {
|
|||||||
"test_state_with_invalid_yield"
|
"test_state_with_invalid_yield"
|
||||||
# tries to run bun or npm
|
# tries to run bun or npm
|
||||||
"test_output_system_info"
|
"test_output_system_info"
|
||||||
|
# Comparison with magic string
|
||||||
|
# TODO Recheck on next update as it appears to be fixed in 8.0.x
|
||||||
|
"test_background_task_no_block"
|
||||||
];
|
];
|
||||||
|
|
||||||
disabledTestPaths = [
|
disabledTestPaths = [
|
||||||
|
@ -12,7 +12,7 @@ nvidia_x11: sha256:
|
|||||||
let
|
let
|
||||||
sys = lib.concatStringsSep "-" (lib.reverseList (lib.splitString "-" stdenv.system));
|
sys = lib.concatStringsSep "-" (lib.reverseList (lib.splitString "-" stdenv.system));
|
||||||
bsys = builtins.replaceStrings [ "_" ] [ "-" ] sys;
|
bsys = builtins.replaceStrings [ "_" ] [ "-" ] sys;
|
||||||
fmver = nvidia_x11.version;
|
fmver = nvidia_x11.fabricmanagerVersion;
|
||||||
ldd = (lib.getBin glibc) + "/bin/ldd";
|
ldd = (lib.getBin glibc) + "/bin/ldd";
|
||||||
in
|
in
|
||||||
|
|
||||||
|
@ -6,11 +6,11 @@
|
|||||||
sha256_aarch64 ? null,
|
sha256_aarch64 ? null,
|
||||||
openSha256 ? null,
|
openSha256 ? null,
|
||||||
settingsSha256 ? null,
|
settingsSha256 ? null,
|
||||||
settingsVersion ? version,
|
settingsVersion ? null,
|
||||||
persistencedSha256 ? null,
|
persistencedSha256 ? null,
|
||||||
persistencedVersion ? version,
|
persistencedVersion ? null,
|
||||||
fabricmanagerSha256 ? null,
|
fabricmanagerSha256 ? null,
|
||||||
fabricmanagerVersion ? version,
|
fabricmanagerVersion ? null,
|
||||||
useGLVND ? true,
|
useGLVND ? true,
|
||||||
useProfiles ? true,
|
useProfiles ? true,
|
||||||
preferGtk2 ? false,
|
preferGtk2 ? false,
|
||||||
@ -140,199 +140,203 @@ let
|
|||||||
|
|
||||||
[1]: https://www.nvidia.com/content/DriverDownloads/licence.php?lang=us
|
[1]: https://www.nvidia.com/content/DriverDownloads/licence.php?lang=us
|
||||||
'';
|
'';
|
||||||
|
|
||||||
self = stdenv.mkDerivation {
|
|
||||||
name = "nvidia-${if useFabricmanager then "dc" else "x11"}-${version}${nameSuffix}";
|
|
||||||
|
|
||||||
builder = ./builder.sh;
|
|
||||||
|
|
||||||
src =
|
|
||||||
if !acceptLicense && (openSha256 == null) then
|
|
||||||
throwLicense
|
|
||||||
else if stdenv.hostPlatform.system == "x86_64-linux" then
|
|
||||||
fetchurl {
|
|
||||||
urls =
|
|
||||||
if args ? url then
|
|
||||||
[ args.url ]
|
|
||||||
else
|
|
||||||
[
|
|
||||||
"https://us.download.nvidia.com/XFree86/Linux-x86_64/${version}/NVIDIA-Linux-x86_64-${version}${pkgSuffix}.run"
|
|
||||||
"https://download.nvidia.com/XFree86/Linux-x86_64/${version}/NVIDIA-Linux-x86_64-${version}${pkgSuffix}.run"
|
|
||||||
];
|
|
||||||
sha256 = sha256_64bit;
|
|
||||||
}
|
|
||||||
else if stdenv.hostPlatform.system == "i686-linux" then
|
|
||||||
fetchurl {
|
|
||||||
urls =
|
|
||||||
if args ? url then
|
|
||||||
[ args.url ]
|
|
||||||
else
|
|
||||||
[
|
|
||||||
"https://us.download.nvidia.com/XFree86/Linux-x86/${version}/NVIDIA-Linux-x86-${version}${pkgSuffix}.run"
|
|
||||||
"https://download.nvidia.com/XFree86/Linux-x86/${version}/NVIDIA-Linux-x86-${version}${pkgSuffix}.run"
|
|
||||||
];
|
|
||||||
sha256 = sha256_32bit;
|
|
||||||
}
|
|
||||||
else if stdenv.hostPlatform.system == "aarch64-linux" && sha256_aarch64 != null then
|
|
||||||
fetchurl {
|
|
||||||
urls =
|
|
||||||
if args ? url then
|
|
||||||
[ args.url ]
|
|
||||||
else
|
|
||||||
[
|
|
||||||
"https://us.download.nvidia.com/XFree86/aarch64/${version}/NVIDIA-Linux-aarch64-${version}${pkgSuffix}.run"
|
|
||||||
"https://download.nvidia.com/XFree86/Linux-aarch64/${version}/NVIDIA-Linux-aarch64-${version}${pkgSuffix}.run"
|
|
||||||
];
|
|
||||||
sha256 = sha256_aarch64;
|
|
||||||
}
|
|
||||||
else
|
|
||||||
throw "nvidia-x11 does not support platform ${stdenv.hostPlatform.system}";
|
|
||||||
|
|
||||||
patches = if libsOnly then null else patches;
|
|
||||||
inherit prePatch postPatch patchFlags;
|
|
||||||
inherit preInstall postInstall;
|
|
||||||
inherit version useGLVND useProfiles;
|
|
||||||
inherit (stdenv.hostPlatform) system;
|
|
||||||
inherit i686bundled;
|
|
||||||
|
|
||||||
outputs =
|
|
||||||
[ "out" ]
|
|
||||||
++ lib.optional i686bundled "lib32"
|
|
||||||
++ lib.optional (!libsOnly) "bin"
|
|
||||||
++ lib.optional (!libsOnly && firmware) "firmware";
|
|
||||||
outputDev = if libsOnly then null else "bin";
|
|
||||||
|
|
||||||
kernel = if libsOnly then null else kernel.dev;
|
|
||||||
kernelVersion = if libsOnly then null else kernel.modDirVersion;
|
|
||||||
|
|
||||||
makeFlags = lib.optionals (!libsOnly) (
|
|
||||||
kernelModuleMakeFlags
|
|
||||||
++ [
|
|
||||||
"IGNORE_PREEMPT_RT_PRESENCE=1"
|
|
||||||
"NV_BUILD_SUPPORTS_HMM=1"
|
|
||||||
"SYSSRC=${kernel.dev}/lib/modules/${kernel.modDirVersion}/source"
|
|
||||||
"SYSOUT=${kernel.dev}/lib/modules/${kernel.modDirVersion}/build"
|
|
||||||
]
|
|
||||||
);
|
|
||||||
|
|
||||||
hardeningDisable = [
|
|
||||||
"pic"
|
|
||||||
"format"
|
|
||||||
];
|
|
||||||
|
|
||||||
dontStrip = true;
|
|
||||||
dontPatchELF = true;
|
|
||||||
|
|
||||||
libPath = libPathFor pkgs;
|
|
||||||
libPath32 = lib.optionalString i686bundled (libPathFor pkgsi686Linux);
|
|
||||||
|
|
||||||
nativeBuildInputs = [
|
|
||||||
perl
|
|
||||||
nukeReferences
|
|
||||||
which
|
|
||||||
libarchive
|
|
||||||
jq
|
|
||||||
] ++ lib.optionals (!libsOnly) kernel.moduleBuildDependencies;
|
|
||||||
|
|
||||||
disallowedReferences = lib.optionals (!libsOnly) [ kernel.dev ];
|
|
||||||
|
|
||||||
passthru =
|
|
||||||
let
|
|
||||||
fetchFromGithubOrNvidia =
|
|
||||||
{
|
|
||||||
owner,
|
|
||||||
repo,
|
|
||||||
rev,
|
|
||||||
...
|
|
||||||
}@args:
|
|
||||||
let
|
|
||||||
args' = builtins.removeAttrs args [
|
|
||||||
"owner"
|
|
||||||
"repo"
|
|
||||||
"rev"
|
|
||||||
];
|
|
||||||
baseUrl = "https://github.com/${owner}/${repo}";
|
|
||||||
in
|
|
||||||
fetchzip (
|
|
||||||
args'
|
|
||||||
// {
|
|
||||||
urls = [
|
|
||||||
"${baseUrl}/archive/${rev}.tar.gz"
|
|
||||||
"https://download.nvidia.com/XFree86/${repo}/${repo}-${rev}.tar.bz2"
|
|
||||||
];
|
|
||||||
# github and nvidia use different compression algorithms,
|
|
||||||
# use an invalid file extension to force detection.
|
|
||||||
extension = "tar.??";
|
|
||||||
}
|
|
||||||
);
|
|
||||||
in
|
|
||||||
{
|
|
||||||
open = lib.mapNullable (
|
|
||||||
hash:
|
|
||||||
callPackage ./open.nix {
|
|
||||||
inherit hash;
|
|
||||||
nvidia_x11 = self;
|
|
||||||
patches =
|
|
||||||
(builtins.map (rewritePatch {
|
|
||||||
from = "kernel";
|
|
||||||
to = "kernel-open";
|
|
||||||
}) patches)
|
|
||||||
++ patchesOpen;
|
|
||||||
broken = brokenOpen;
|
|
||||||
}
|
|
||||||
) openSha256;
|
|
||||||
settings =
|
|
||||||
if useSettings then
|
|
||||||
(if settings32Bit then pkgsi686Linux.callPackage else callPackage)
|
|
||||||
(import ./settings.nix self settingsSha256)
|
|
||||||
{
|
|
||||||
withGtk2 = preferGtk2;
|
|
||||||
withGtk3 = !preferGtk2;
|
|
||||||
fetchFromGitHub = fetchFromGithubOrNvidia;
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{ };
|
|
||||||
persistenced =
|
|
||||||
if usePersistenced then
|
|
||||||
lib.mapNullable (
|
|
||||||
hash:
|
|
||||||
callPackage (import ./persistenced.nix self hash) {
|
|
||||||
fetchFromGitHub = fetchFromGithubOrNvidia;
|
|
||||||
}
|
|
||||||
) persistencedSha256
|
|
||||||
else
|
|
||||||
{ };
|
|
||||||
fabricmanager =
|
|
||||||
if useFabricmanager then
|
|
||||||
lib.mapNullable (hash: callPackage (import ./fabricmanager.nix self hash) { }) fabricmanagerSha256
|
|
||||||
else
|
|
||||||
{ };
|
|
||||||
inherit persistencedVersion settingsVersion;
|
|
||||||
compressFirmware = false;
|
|
||||||
ibtSupport = ibtSupport || (lib.versionAtLeast version "530");
|
|
||||||
}
|
|
||||||
// lib.optionalAttrs (!i686bundled) {
|
|
||||||
inherit lib32;
|
|
||||||
};
|
|
||||||
|
|
||||||
meta = with lib; {
|
|
||||||
homepage = "https://www.nvidia.com/object/unix.html";
|
|
||||||
description = "${
|
|
||||||
if useFabricmanager then "Data Center" else "X.org"
|
|
||||||
} driver and kernel module for NVIDIA cards";
|
|
||||||
license = licenses.unfreeRedistributable;
|
|
||||||
platforms =
|
|
||||||
[ "x86_64-linux" ]
|
|
||||||
++ lib.optionals (sha256_32bit != null) [ "i686-linux" ]
|
|
||||||
++ lib.optionals (sha256_aarch64 != null) [ "aarch64-linux" ];
|
|
||||||
maintainers = with maintainers; [
|
|
||||||
kiskae
|
|
||||||
edwtjo
|
|
||||||
];
|
|
||||||
priority = 4; # resolves collision with xorg-server's "lib/xorg/modules/extensions/libglx.so"
|
|
||||||
inherit broken;
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
in
|
in
|
||||||
self
|
|
||||||
|
stdenv.mkDerivation (finalAttrs: {
|
||||||
|
name = "nvidia-${if useFabricmanager then "dc" else "x11"}-${version}${nameSuffix}";
|
||||||
|
|
||||||
|
builder = ./builder.sh;
|
||||||
|
|
||||||
|
src =
|
||||||
|
if !acceptLicense && (openSha256 == null) then
|
||||||
|
throwLicense
|
||||||
|
else if stdenv.hostPlatform.system == "x86_64-linux" then
|
||||||
|
fetchurl {
|
||||||
|
urls =
|
||||||
|
if args ? url then
|
||||||
|
[ args.url ]
|
||||||
|
else
|
||||||
|
[
|
||||||
|
"https://us.download.nvidia.com/XFree86/Linux-x86_64/${version}/NVIDIA-Linux-x86_64-${version}${pkgSuffix}.run"
|
||||||
|
"https://download.nvidia.com/XFree86/Linux-x86_64/${version}/NVIDIA-Linux-x86_64-${version}${pkgSuffix}.run"
|
||||||
|
];
|
||||||
|
sha256 = sha256_64bit;
|
||||||
|
}
|
||||||
|
else if stdenv.hostPlatform.system == "i686-linux" then
|
||||||
|
fetchurl {
|
||||||
|
urls =
|
||||||
|
if args ? url then
|
||||||
|
[ args.url ]
|
||||||
|
else
|
||||||
|
[
|
||||||
|
"https://us.download.nvidia.com/XFree86/Linux-x86/${version}/NVIDIA-Linux-x86-${version}${pkgSuffix}.run"
|
||||||
|
"https://download.nvidia.com/XFree86/Linux-x86/${version}/NVIDIA-Linux-x86-${version}${pkgSuffix}.run"
|
||||||
|
];
|
||||||
|
sha256 = sha256_32bit;
|
||||||
|
}
|
||||||
|
else if stdenv.hostPlatform.system == "aarch64-linux" && sha256_aarch64 != null then
|
||||||
|
fetchurl {
|
||||||
|
urls =
|
||||||
|
if args ? url then
|
||||||
|
[ args.url ]
|
||||||
|
else
|
||||||
|
[
|
||||||
|
"https://us.download.nvidia.com/XFree86/aarch64/${version}/NVIDIA-Linux-aarch64-${version}${pkgSuffix}.run"
|
||||||
|
"https://download.nvidia.com/XFree86/Linux-aarch64/${version}/NVIDIA-Linux-aarch64-${version}${pkgSuffix}.run"
|
||||||
|
];
|
||||||
|
sha256 = sha256_aarch64;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
throw "nvidia-x11 does not support platform ${stdenv.hostPlatform.system}";
|
||||||
|
|
||||||
|
patches = if libsOnly then null else patches;
|
||||||
|
inherit prePatch postPatch patchFlags;
|
||||||
|
inherit preInstall postInstall;
|
||||||
|
inherit version useGLVND useProfiles;
|
||||||
|
inherit (stdenv.hostPlatform) system;
|
||||||
|
inherit i686bundled;
|
||||||
|
|
||||||
|
outputs =
|
||||||
|
[ "out" ]
|
||||||
|
++ lib.optional i686bundled "lib32"
|
||||||
|
++ lib.optional (!libsOnly) "bin"
|
||||||
|
++ lib.optional (!libsOnly && firmware) "firmware";
|
||||||
|
outputDev = if libsOnly then null else "bin";
|
||||||
|
|
||||||
|
kernel = if libsOnly then null else kernel.dev;
|
||||||
|
kernelVersion = if libsOnly then null else kernel.modDirVersion;
|
||||||
|
|
||||||
|
makeFlags = lib.optionals (!libsOnly) (
|
||||||
|
kernelModuleMakeFlags
|
||||||
|
++ [
|
||||||
|
"IGNORE_PREEMPT_RT_PRESENCE=1"
|
||||||
|
"NV_BUILD_SUPPORTS_HMM=1"
|
||||||
|
"SYSSRC=${kernel.dev}/lib/modules/${kernel.modDirVersion}/source"
|
||||||
|
"SYSOUT=${kernel.dev}/lib/modules/${kernel.modDirVersion}/build"
|
||||||
|
]
|
||||||
|
);
|
||||||
|
|
||||||
|
hardeningDisable = [
|
||||||
|
"pic"
|
||||||
|
"format"
|
||||||
|
];
|
||||||
|
|
||||||
|
dontStrip = true;
|
||||||
|
dontPatchELF = true;
|
||||||
|
|
||||||
|
libPath = libPathFor pkgs;
|
||||||
|
libPath32 = lib.optionalString i686bundled (libPathFor pkgsi686Linux);
|
||||||
|
|
||||||
|
nativeBuildInputs = [
|
||||||
|
perl
|
||||||
|
nukeReferences
|
||||||
|
which
|
||||||
|
libarchive
|
||||||
|
jq
|
||||||
|
] ++ lib.optionals (!libsOnly) kernel.moduleBuildDependencies;
|
||||||
|
|
||||||
|
disallowedReferences = lib.optionals (!libsOnly) [ kernel.dev ];
|
||||||
|
|
||||||
|
passthru =
|
||||||
|
let
|
||||||
|
fetchFromGithubOrNvidia =
|
||||||
|
{
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
rev,
|
||||||
|
...
|
||||||
|
}@args:
|
||||||
|
let
|
||||||
|
args' = builtins.removeAttrs args [
|
||||||
|
"owner"
|
||||||
|
"repo"
|
||||||
|
"rev"
|
||||||
|
];
|
||||||
|
baseUrl = "https://github.com/${owner}/${repo}";
|
||||||
|
in
|
||||||
|
fetchzip (
|
||||||
|
args'
|
||||||
|
// {
|
||||||
|
urls = [
|
||||||
|
"${baseUrl}/archive/${rev}.tar.gz"
|
||||||
|
"https://download.nvidia.com/XFree86/${repo}/${repo}-${rev}.tar.bz2"
|
||||||
|
];
|
||||||
|
# github and nvidia use different compression algorithms,
|
||||||
|
# use an invalid file extension to force detection.
|
||||||
|
extension = "tar.??";
|
||||||
|
}
|
||||||
|
);
|
||||||
|
in
|
||||||
|
{
|
||||||
|
open = lib.mapNullable (
|
||||||
|
hash:
|
||||||
|
callPackage ./open.nix {
|
||||||
|
inherit hash;
|
||||||
|
nvidia_x11 = finalAttrs.finalPackage;
|
||||||
|
patches =
|
||||||
|
(builtins.map (rewritePatch {
|
||||||
|
from = "kernel";
|
||||||
|
to = "kernel-open";
|
||||||
|
}) patches)
|
||||||
|
++ patchesOpen;
|
||||||
|
broken = brokenOpen;
|
||||||
|
}
|
||||||
|
) openSha256;
|
||||||
|
settings =
|
||||||
|
if useSettings then
|
||||||
|
(if settings32Bit then pkgsi686Linux.callPackage else callPackage)
|
||||||
|
(import ./settings.nix finalAttrs.finalPackage settingsSha256)
|
||||||
|
{
|
||||||
|
withGtk2 = preferGtk2;
|
||||||
|
withGtk3 = !preferGtk2;
|
||||||
|
fetchFromGitHub = fetchFromGithubOrNvidia;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{ };
|
||||||
|
persistenced =
|
||||||
|
if usePersistenced then
|
||||||
|
lib.mapNullable (
|
||||||
|
hash:
|
||||||
|
callPackage (import ./persistenced.nix finalAttrs.finalPackage hash) {
|
||||||
|
fetchFromGitHub = fetchFromGithubOrNvidia;
|
||||||
|
}
|
||||||
|
) persistencedSha256
|
||||||
|
else
|
||||||
|
{ };
|
||||||
|
fabricmanager =
|
||||||
|
if useFabricmanager then
|
||||||
|
lib.mapNullable (
|
||||||
|
hash: callPackage (import ./fabricmanager.nix finalAttrs.finalPackage hash) { }
|
||||||
|
) fabricmanagerSha256
|
||||||
|
else
|
||||||
|
{ };
|
||||||
|
settingsVersion = if settingsVersion != null then settingsVersion else finalAttrs.version;
|
||||||
|
persistencedVersion =
|
||||||
|
if persistencedVersion != null then persistencedVersion else finalAttrs.version;
|
||||||
|
fabricmanagerVersion =
|
||||||
|
if fabricmanagerVersion != null then fabricmanagerVersion else finalAttrs.version;
|
||||||
|
compressFirmware = false;
|
||||||
|
ibtSupport = ibtSupport || (lib.versionAtLeast version "530");
|
||||||
|
}
|
||||||
|
// lib.optionalAttrs (!i686bundled) {
|
||||||
|
inherit lib32;
|
||||||
|
};
|
||||||
|
|
||||||
|
meta = with lib; {
|
||||||
|
homepage = "https://www.nvidia.com/object/unix.html";
|
||||||
|
description = "${
|
||||||
|
if useFabricmanager then "Data Center" else "X.org"
|
||||||
|
} driver and kernel module for NVIDIA cards";
|
||||||
|
license = licenses.unfreeRedistributable;
|
||||||
|
platforms =
|
||||||
|
[ "x86_64-linux" ]
|
||||||
|
++ lib.optionals (sha256_32bit != null) [ "i686-linux" ]
|
||||||
|
++ lib.optionals (sha256_aarch64 != null) [ "aarch64-linux" ];
|
||||||
|
maintainers = with maintainers; [
|
||||||
|
kiskae
|
||||||
|
edwtjo
|
||||||
|
];
|
||||||
|
priority = 4; # resolves collision with xorg-server's "lib/xorg/modules/extensions/libglx.so"
|
||||||
|
inherit broken;
|
||||||
|
};
|
||||||
|
})
|
||||||
|
@ -13,13 +13,13 @@
|
|||||||
|
|
||||||
postgresqlBuildExtension (finalAttrs: {
|
postgresqlBuildExtension (finalAttrs: {
|
||||||
pname = "timescaledb${lib.optionalString (!enableUnfree) "-apache"}";
|
pname = "timescaledb${lib.optionalString (!enableUnfree) "-apache"}";
|
||||||
version = "2.20.3";
|
version = "2.21.0";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "timescale";
|
owner = "timescale";
|
||||||
repo = "timescaledb";
|
repo = "timescaledb";
|
||||||
tag = finalAttrs.version;
|
tag = finalAttrs.version;
|
||||||
hash = "sha256-Ma6h2ISMjBz14y5Pbx4T4QOMrrvUy5wkPyKawm9rpx0=";
|
hash = "sha256-t3BPy1rmV3f/OFDHqiRh1E9tNH7dc1LCTktvkSSZLro=";
|
||||||
};
|
};
|
||||||
|
|
||||||
nativeBuildInputs = [ cmake ];
|
nativeBuildInputs = [ cmake ];
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
}:
|
}:
|
||||||
|
|
||||||
let
|
let
|
||||||
tests = tests-stdenv // test-extendMkDerivation // tests-go // tests-python;
|
tests = tests-stdenv // test-extendMkDerivation // tests-fetchhg // tests-go // tests-python;
|
||||||
|
|
||||||
tests-stdenv =
|
tests-stdenv =
|
||||||
let
|
let
|
||||||
@ -131,6 +131,51 @@ let
|
|||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
tests-fetchhg =
|
||||||
|
let
|
||||||
|
ruamel_0_18_14-hash = "sha256-HDkPPp1xI3uoGYlS9mwPp1ZjG2gKvx6vog0Blj6tBuI=";
|
||||||
|
ruamel_0_18_14-src = pkgs.fetchhg {
|
||||||
|
url = "http://hg.code.sf.net/p/ruamel-yaml/code";
|
||||||
|
rev = "0.18.14";
|
||||||
|
hash = ruamel_0_18_14-hash;
|
||||||
|
};
|
||||||
|
ruamel_0_17_21-hash = "sha256-6PV0NyPQfd+4RBqoj5vJaOHShx+TJVHD2IamRinU0VU=";
|
||||||
|
ruamel_0_17_21-src = pkgs.fetchhg {
|
||||||
|
url = "http://hg.code.sf.net/p/ruamel-yaml/code";
|
||||||
|
rev = "0.17.21";
|
||||||
|
hash = ruamel_0_17_21-hash;
|
||||||
|
};
|
||||||
|
ruamel_0_17_21-src-by-overriding = ruamel_0_18_14-src.overrideAttrs {
|
||||||
|
rev = "0.17.21";
|
||||||
|
hash = ruamel_0_17_21-hash;
|
||||||
|
};
|
||||||
|
in
|
||||||
|
{
|
||||||
|
hash-outputHash-equivalence = {
|
||||||
|
expr = ruamel_0_17_21-src.outputHash == ruamel_0_17_21-hash;
|
||||||
|
expected = true;
|
||||||
|
};
|
||||||
|
|
||||||
|
hash-overridability-outputHash = {
|
||||||
|
expr = ruamel_0_17_21-src-by-overriding.outputHash == ruamel_0_17_21-hash;
|
||||||
|
expected = true;
|
||||||
|
};
|
||||||
|
|
||||||
|
hash-overridability-drvPath = {
|
||||||
|
expr =
|
||||||
|
lib.isString ruamel_0_17_21-src-by-overriding.drvPath
|
||||||
|
&& ruamel_0_17_21-src-by-overriding.drvPath == ruamel_0_17_21-src.drvPath;
|
||||||
|
expected = true;
|
||||||
|
};
|
||||||
|
|
||||||
|
hash-overridability-outPath = {
|
||||||
|
expr =
|
||||||
|
lib.isString ruamel_0_17_21-src-by-overriding.outPath
|
||||||
|
&& ruamel_0_17_21-src-by-overriding.outPath == ruamel_0_17_21-src.outPath;
|
||||||
|
expected = true;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
tests-go =
|
tests-go =
|
||||||
let
|
let
|
||||||
pet_0_3_4 = pkgs.buildGoModule rec {
|
pet_0_3_4 = pkgs.buildGoModule rec {
|
||||||
|
@ -1911,6 +1911,7 @@ mapAliases {
|
|||||||
swt_jdk8 = throw "'swt_jdk8' has been removed due to being unused and broken for a long time"; # Added 2025-01-07
|
swt_jdk8 = throw "'swt_jdk8' has been removed due to being unused and broken for a long time"; # Added 2025-01-07
|
||||||
Sylk = sylk; # Added 2024-06-12
|
Sylk = sylk; # Added 2024-06-12
|
||||||
symbiyosys = sby; # Added 2024-08-18
|
symbiyosys = sby; # Added 2024-08-18
|
||||||
|
syn2mas = throw "'syn2mas' has been removed. It has been integrated into the main matrix-authentication-service CLI as a subcommand: 'mas-cli syn2mas'."; # Added 2025-07-07
|
||||||
sync = taler-sync; # Added 2024-09-04
|
sync = taler-sync; # Added 2024-09-04
|
||||||
syncthing-cli = throw "'syncthing-cli' has been renamed to/replaced by 'syncthing'"; # Converted to throw 2024-10-17
|
syncthing-cli = throw "'syncthing-cli' has been renamed to/replaced by 'syncthing'"; # Converted to throw 2024-10-17
|
||||||
syncthingtray-qt6 = syncthingtray; # Added 2024-03-06
|
syncthingtray-qt6 = syncthingtray; # Added 2024-03-06
|
||||||
|
@ -2485,8 +2485,6 @@ with pkgs;
|
|||||||
|
|
||||||
cdist = python3Packages.callPackage ../tools/admin/cdist { };
|
cdist = python3Packages.callPackage ../tools/admin/cdist { };
|
||||||
|
|
||||||
cdrtools = callPackage ../tools/cd-dvd/cdrtools { };
|
|
||||||
|
|
||||||
cemu-ti = qt5.callPackage ../applications/science/math/cemu-ti { };
|
cemu-ti = qt5.callPackage ../applications/science/math/cemu-ti { };
|
||||||
|
|
||||||
libceph = ceph.lib;
|
libceph = ceph.lib;
|
||||||
@ -9822,6 +9820,11 @@ with pkgs;
|
|||||||
go = buildPackages.go_1_24;
|
go = buildPackages.go_1_24;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
go_1_25 = callPackage ../development/compilers/go/1.25.nix { };
|
||||||
|
buildGo125Module = callPackage ../build-support/go/module.nix {
|
||||||
|
go = buildPackages.go_1_25;
|
||||||
|
};
|
||||||
|
|
||||||
### DEVELOPMENT / HARE
|
### DEVELOPMENT / HARE
|
||||||
|
|
||||||
hareHook = callPackage ../by-name/ha/hare/hook.nix { };
|
hareHook = callPackage ../by-name/ha/hare/hook.nix { };
|
||||||
@ -15293,10 +15296,6 @@ with pkgs;
|
|||||||
inherit (llvmPackages) openmp;
|
inherit (llvmPackages) openmp;
|
||||||
};
|
};
|
||||||
|
|
||||||
kallisto = callPackage ../applications/science/biology/kallisto {
|
|
||||||
autoconf = buildPackages.autoconf269;
|
|
||||||
};
|
|
||||||
|
|
||||||
mirtk = callPackage ../development/libraries/science/biology/mirtk { itk = itk_5_2; };
|
mirtk = callPackage ../development/libraries/science/biology/mirtk { itk = itk_5_2; };
|
||||||
|
|
||||||
nest = callPackage ../applications/science/biology/nest { };
|
nest = callPackage ../applications/science/biology/nest { };
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user