staging-next 2025-07-04 (#422427)

This commit is contained in:
Vladimír Čunát 2025-07-13 19:33:14 +02:00
commit b47d4f01d4
No known key found for this signature in database
GPG Key ID: E747DF1F9575A3AA
1126 changed files with 15077 additions and 10357 deletions

View File

@ -24,7 +24,7 @@ insert_final_newline = false
# see https://nixos.org/nixpkgs/manual/#chap-conventions # see https://nixos.org/nixpkgs/manual/#chap-conventions
# Match json/lockfiles/markdown/nix/perl/python/ruby/shell/docbook files, set indent to spaces # Match json/lockfiles/markdown/nix/perl/python/ruby/shell/docbook files, set indent to spaces
[*.{bash,json,lock,md,nix,pl,pm,py,rb,sh,xml}] [*.{bash,js,json,lock,md,nix,pl,pm,py,rb,sh,xml}]
indent_style = space indent_style = space
# Match docbook files, set indent width of one # Match docbook files, set indent width of one
@ -32,7 +32,7 @@ indent_style = space
indent_size = 1 indent_size = 1
# Match json/lockfiles/markdown/nix/ruby files, set indent width of two # Match json/lockfiles/markdown/nix/ruby files, set indent width of two
[*.{json,lock,md,nix,rb}] [*.{js,json,lock,md,nix,rb}]
indent_size = 2 indent_size = 2
# Match all the Bash code in Nix files, set indent width of two # Match all the Bash code in Nix files, set indent width of two

View File

@ -1,4 +0,0 @@
# TODO: Move to top-level via staging PR
[*.js]
indent_style = space
indent_size = 2

View File

@ -91,3 +91,4 @@ Meson setup hook.
- `prefixKey` - `prefixKey`
- `enableParallelBuilding` - `enableParallelBuilding`
- `enableParallelChecking`

View File

@ -31,7 +31,7 @@ It does so in a clean environment (using `env --ignore-environment`), and it che
The variables that this phase control are: The variables that this phase control are:
- `dontVersionCheck`: Disable adding this hook to the [`preInstallCheckHooks`](#ssec-installCheck-phase). Useful if you do want to load the bash functions of the hook, but run them differently. - `dontVersionCheck`: Disable adding this hook to the [`preInstallCheckHooks`](#ssec-installCheck-phase). Useful if you do want to load the bash functions of the hook, but run them differently.
- `versionCheckProgram`: The full path to the program that should print the `${version}` string. Defaults roughly to `${placeholder "out"}/bin/${pname}`. Using `$out` in the value of this variable won't work, as environment variables from this variable are not expanded by the hook. Hence using `placeholder` is unavoidable. - `versionCheckProgram`: The full path to the program that should print the `${version}` string. Defaults to using the first non-empty value `$binary` out of `${NIX_MAIN_PROGRAM}` and `${pname}`, in that order, to build roughly `${placeholder "out"}/bin/$binary`. `${NIX_MAIN_PROGRAM}`'s value comes from `meta.mainProgram`, and does not normally need to be set explicitly. When setting `versionCheckProgram`, using `$out` directly won't work, as environment variables from this variable are not expanded by the hook. Hence using `placeholder "out"` is unavoidable.
- `versionCheckProgramArg`: The argument that needs to be passed to `versionCheckProgram`. If undefined the hook tries first `--help` and then `--version`. Examples: `version`, `-V`, `-v`. - `versionCheckProgramArg`: The argument that needs to be passed to `versionCheckProgram`. If undefined the hook tries first `--help` and then `--version`. Examples: `version`, `-V`, `-v`.
- `versionCheckKeepEnvironment`: A list of environment variables to keep and pass to the command. Only those variables should be added to this list that are actually required for the version command to work. If it is not feasible to explicitly list all these environment variables you can set this parameter to the special value `"*"` to disable the `--ignore-environment` flag and thus keep all environment variables. - `versionCheckKeepEnvironment`: A list of environment variables to keep and pass to the command. Only those variables should be added to this list that are actually required for the version command to work. If it is not feasible to explicitly list all these environment variables you can set this parameter to the special value `"*"` to disable the `--ignore-environment` flag and thus keep all environment variables.
- `preVersionCheck`: A hook to run before the check is done. - `preVersionCheck`: A hook to run before the check is done.

View File

@ -20,6 +20,8 @@
- `space-orbit` package has been removed due to lack of upstream maintenance. Debian upstream stopped tracking it in 2011. - `space-orbit` package has been removed due to lack of upstream maintenance. Debian upstream stopped tracking it in 2011.
- `command-not-found` package is now disabled by default; it works only for nix-channels based systems, and requires setup for it to work. - `command-not-found` package is now disabled by default; it works only for nix-channels based systems, and requires setup for it to work.
- Derivations setting both `separateDebugInfo` and one of `allowedReferences`, `allowedRequistes`, `disallowedReferences` or `disallowedRequisites` must now set `__structuredAttrs` to `true`. The effect of reference whitelisting or blacklisting will be disabled on the `debug` output created by `separateDebugInfo`.
- `victoriametrics` no longer contains VictoriaLogs components. These have been separated into the new package `victorialogs`. - `victoriametrics` no longer contains VictoriaLogs components. These have been separated into the new package `victorialogs`.
- `gnome-keyring` no longer ships with an SSH agent anymore because it has been deprecated upstream. You should use `gcr_4` instead, which provides the same features. More information on why this was done can be found on [the relevant GCR upstream PR](https://gitlab.gnome.org/GNOME/gcr/-/merge_requests/67). - `gnome-keyring` no longer ships with an SSH agent anymore because it has been deprecated upstream. You should use `gcr_4` instead, which provides the same features. More information on why this was done can be found on [the relevant GCR upstream PR](https://gitlab.gnome.org/GNOME/gcr/-/merge_requests/67).
@ -54,6 +56,19 @@
- `gramps` has been updated to 6.0.0 - `gramps` has been updated to 6.0.0
Upstream recommends [backing up your Family Trees](https://gramps-project.org/wiki/index.php/Gramps_6.0_Wiki_Manual_-_Manage_Family_Trees#Backing_up_a_Family_Tree) before upgrading. Upstream recommends [backing up your Family Trees](https://gramps-project.org/wiki/index.php/Gramps_6.0_Wiki_Manual_-_Manage_Family_Trees#Backing_up_a_Family_Tree) before upgrading.
- `meta.mainProgram`: Changing this `meta` entry can lead to a package rebuild due to being used to determine the `NIX_MAIN_PROGRAM` environment variable.
- `versionCheckHook`: Packages that previously relied solely on `pname` to locate the program used to version check, but have a differing `meta.mainProgram` entry, might now fail.
- The debug outputs produced by `separateDebugInfo = true;` now contain symlinks mapping build-ids to the original source and ELF file.
Specifically, if `$out/bin/ninja` has build-id `483bd7f7229bdb06462222e1e353e4f37e15c293`, then
* `$debug/lib/debug/.build-id/48/3bd7f7229bdb06462222e1e353e4f37e15c293.executable` is a symlink to `$out/bin/ninja`
* `$debug/lib/debug/.build-id/48/3bd7f7229bdb06462222e1e353e4f37e15c293.source` is a symlink to the value of `$src` during build
* `$debug/lib/debug/.build-id/48/3bd7f7229bdb06462222e1e353e4f37e15c293.sourceoverlay` is a symlink to a directory with the same structure as the expanded `$sourceRoot` but containing only a copy of files which were patched during the build
* `$debug/lib/debug/.build-id/48/3bd7f7229bdb06462222e1e353e4f37e15c293.debug` is the file containing debug symbols (like before).
## Nixpkgs Library {#sec-nixpkgs-release-25.11-lib} ## Nixpkgs Library {#sec-nixpkgs-release-25.11-lib}
<!-- To avoid merge conflicts, consider adding your item at an arbitrary place in the list instead. --> <!-- To avoid merge conflicts, consider adding your item at an arbitrary place in the list instead. -->

View File

@ -1415,7 +1415,7 @@ This setup hook checks for, reports, and (by default) fails builds when "broken"
This hook can be disabled by setting `dontCheckForBrokenSymlinks`. This hook can be disabled by setting `dontCheckForBrokenSymlinks`.
::: {.note} ::: {.note}
The hook only considers symlinks with targets inside the Nix store. The hook only considers symlinks with targets inside the Nix store or $TMPDIR directory (typically /nix/store and /build in the builder environment, the later being where build is executed).
::: :::
::: {.note} ::: {.note}

View File

@ -8868,6 +8868,12 @@
github = "fzakaria"; github = "fzakaria";
githubId = 605070; githubId = 605070;
}; };
fzdslr = {
name = "FZDSLR";
email = "fzdslr@outlook.com";
github = "fzdslr";
githubId = 62922415;
};
gabesoft = { gabesoft = {
email = "gabesoft@gmail.com"; email = "gabesoft@gmail.com";
github = "gabesoft"; github = "gabesoft";

View File

@ -1,5 +1,5 @@
#! /usr/bin/env nix-shell #! /usr/bin/env nix-shell
#! nix-shell -i bash -p coreutils haskellPackages.cabal2nix-unstable git nix nixfmt-rfc-style -I nixpkgs=. #! nix-shell -i bash -p coreutils haskellPackages.cabal2nix-unstable git nixfmt-rfc-style -I nixpkgs=.
set -euo pipefail set -euo pipefail

View File

@ -1,5 +1,5 @@
#! /usr/bin/env nix-shell #! /usr/bin/env nix-shell
#! nix-shell -i bash -p coreutils jq nix -I nixpkgs=. #! nix-shell -i bash -p coreutils jq -I nixpkgs=.
set -euo pipefail set -euo pipefail

View File

@ -1,5 +1,5 @@
#! /usr/bin/env nix-shell #! /usr/bin/env nix-shell
#! nix-shell -i bash -p nix curl jq git gnused -I nixpkgs=. #! nix-shell -i bash -p curl jq git gnused -I nixpkgs=.
# See regenerate-hackage-packages.sh for details on the purpose of this script. # See regenerate-hackage-packages.sh for details on the purpose of this script.

View File

@ -1,5 +1,5 @@
#! /usr/bin/env nix-shell #! /usr/bin/env nix-shell
#! nix-shell -i bash -p nix curl jq git gnused gnugrep -I nixpkgs=. #! nix-shell -i bash -p curl jq git gnused gnugrep -I nixpkgs=.
# shellcheck shell=bash # shellcheck shell=bash
set -eu -o pipefail set -eu -o pipefail

View File

@ -1,5 +1,5 @@
#! /usr/bin/env nix-shell #! /usr/bin/env nix-shell
#! nix-shell -i bash -p nix curl gnused -I nixpkgs=. #! nix-shell -i bash -p curl gnused -I nixpkgs=.
# On Hackage every package description shows a category "Distributions" which # On Hackage every package description shows a category "Distributions" which
# lists a "NixOS" version. # lists a "NixOS" version.

View File

@ -68,6 +68,8 @@
- The Perl implementation of the `switch-to-configuration` program is removed. All switchable systems now use the Rust rewrite. Any prior usage of `system.switch.enableNg` must now be removed. If you have any outstanding issues with the new implementation, please open an issue on GitHub. - The Perl implementation of the `switch-to-configuration` program is removed. All switchable systems now use the Rust rewrite. Any prior usage of `system.switch.enableNg` must now be removed. If you have any outstanding issues with the new implementation, please open an issue on GitHub.
- The `no-broken-symlink` build hook now also fails builds whose output derivation contains links to $TMPDIR (typically /build, which contains the build directory).
- The `services.polipo` module has been removed as `polipo` is unmaintained and archived upstream. - The `services.polipo` module has been removed as `polipo` is unmaintained and archived upstream.
- The Pocket ID module ([`services.pocket-id`][#opt-services.pocket-id.enable]) and package (`pocket-id`) has been updated to 1.0.0. Some environment variables have been changed or removed, see the [migration guide](https://pocket-id.org/docs/setup/migrate-to-v1/). - The Pocket ID module ([`services.pocket-id`][#opt-services.pocket-id.enable]) and package (`pocket-id`) has been updated to 1.0.0. Some environment variables have been changed or removed, see the [migration guide](https://pocket-id.org/docs/setup/migrate-to-v1/).

View File

@ -104,7 +104,7 @@ in
path = with pkgs; [ path = with pkgs; [
procps procps
openssh openssh
nettools net-tools
]; ];
description = "spark master service."; description = "spark master service.";
after = [ "network.target" ]; after = [ "network.target" ];
@ -133,7 +133,7 @@ in
path = with pkgs; [ path = with pkgs; [
procps procps
openssh openssh
nettools net-tools
rsync rsync
]; ];
description = "spark master service."; description = "spark master service.";

View File

@ -424,7 +424,7 @@ in
]; ];
path = [ path = [
hydra-package hydra-package
pkgs.nettools pkgs.net-tools
pkgs.openssh pkgs.openssh
pkgs.bzip2 pkgs.bzip2
config.nix.package config.nix.package
@ -459,7 +459,7 @@ in
]; ];
path = with pkgs; [ path = with pkgs; [
hydra-package hydra-package
nettools net-tools
jq jq
]; ];
restartTriggers = [ hydraConf ]; restartTriggers = [ hydraConf ];

View File

@ -43,7 +43,7 @@ in
rsync rsync
kmod kmod
gawk gawk
nettools net-tools
util-linux util-linux
profile-sync-daemon profile-sync-daemon
]; ];
@ -69,7 +69,7 @@ in
rsync rsync
kmod kmod
gawk gawk
nettools net-tools
util-linux util-linux
profile-sync-daemon profile-sync-daemon
]; ];

View File

@ -45,7 +45,7 @@ let
defaultsFile = pkgs.writeText "60-defaults.conf" '' defaultsFile = pkgs.writeText "60-defaults.conf" ''
# 01-system-paths.conf # 01-system-paths.conf
${dirStanzas home} ${dirStanzas home}
ZM_PATH_ARP=${lib.getBin pkgs.nettools}/bin/arp ZM_PATH_ARP=${lib.getBin pkgs.net-tools}/bin/arp
ZM_PATH_LOGS=/var/log/${dirName} ZM_PATH_LOGS=/var/log/${dirName}
ZM_PATH_MAP=/dev/shm ZM_PATH_MAP=/dev/shm
ZM_PATH_SOCKS=/run/${dirName} ZM_PATH_SOCKS=/run/${dirName}

View File

@ -59,9 +59,9 @@ in
extraPackages = mkOption { extraPackages = mkOption {
type = types.listOf types.package; type = types.listOf types.package;
default = with pkgs; [ nettools ]; default = with pkgs; [ net-tools ];
defaultText = literalExpression "with pkgs; [ nettools ]"; defaultText = literalExpression "with pkgs; [ net-tools ]";
example = literalExpression "with pkgs; [ nettools mysql ]"; example = literalExpression "with pkgs; [ net-tools mysql ]";
description = '' description = ''
Packages to be added to the Zabbix {env}`PATH`. Packages to be added to the Zabbix {env}`PATH`.
Typically used to add executables for scripts, but can be anything. Typically used to add executables for scripts, but can be anything.

View File

@ -91,11 +91,11 @@ in
extraPackages = mkOption { extraPackages = mkOption {
type = types.listOf types.package; type = types.listOf types.package;
default = with pkgs; [ default = with pkgs; [
nettools net-tools
nmap nmap
traceroute traceroute
]; ];
defaultText = literalExpression "[ nettools nmap traceroute ]"; defaultText = literalExpression "[ net-tools nmap traceroute ]";
description = '' description = ''
Packages to be added to the Zabbix {env}`PATH`. Packages to be added to the Zabbix {env}`PATH`.
Typically used to add executables for scripts, but can be anything. Typically used to add executables for scripts, but can be anything.

View File

@ -88,11 +88,11 @@ in
extraPackages = mkOption { extraPackages = mkOption {
type = types.listOf types.package; type = types.listOf types.package;
default = with pkgs; [ default = with pkgs; [
nettools net-tools
nmap nmap
traceroute traceroute
]; ];
defaultText = literalExpression "[ nettools nmap traceroute ]"; defaultText = literalExpression "[ net-tools nmap traceroute ]";
description = '' description = ''
Packages to be added to the Zabbix {env}`PATH`. Packages to be added to the Zabbix {env}`PATH`.
Typically used to add executables for scripts, but can be anything. Typically used to add executables for scripts, but can be anything.

View File

@ -165,7 +165,7 @@ in
procps procps
nssTools nssTools
iptables iptables
nettools net-tools
]; ];
preStart = lib.optionalString cfg.disableRedirects '' preStart = lib.optionalString cfg.disableRedirects ''
# Disable send/receive redirects # Disable send/receive redirects

View File

@ -74,7 +74,7 @@ let
path = [ path = [
pkgs.iptables pkgs.iptables
pkgs.iproute2 pkgs.iproute2
pkgs.nettools pkgs.net-tools
]; ];
serviceConfig.ExecStart = "@${openvpn}/sbin/openvpn openvpn --suppress-timestamps --config ${configFile}"; serviceConfig.ExecStart = "@${openvpn}/sbin/openvpn openvpn --suppress-timestamps --config ${configFile}";

View File

@ -80,8 +80,8 @@
builtins.storeDir builtins.storeDir
"/etc/ssl" "/etc/ssl"
"/etc/static/ssl" "/etc/static/ssl"
"${pkgs.nettools}/bin/route:/usr/bin/route" "${pkgs.net-tools}/bin/route:/usr/bin/route"
"${pkgs.nettools}/bin/ifconfig:/usr/bin/ifconfig" "${pkgs.net-tools}/bin/ifconfig:/usr/bin/ifconfig"
]; ];
BindPaths = [ BindPaths = [

View File

@ -11,7 +11,7 @@ let
[ [
cloud-init cloud-init
iproute2 iproute2
nettools net-tools
openssh openssh
shadow shadow
util-linux util-linux

View File

@ -36,7 +36,7 @@ with lib;
pkgs.which pkgs.which
pkgs.openssl pkgs.openssl
pkgs.xorg.xauth pkgs.xorg.xauth
pkgs.nettools pkgs.net-tools
pkgs.shadow pkgs.shadow
pkgs.procps pkgs.procps
pkgs.util-linux pkgs.util-linux

View File

@ -144,7 +144,7 @@ with lib;
systemd.services.digitalocean-set-hostname = mkIf (hostName == "") { systemd.services.digitalocean-set-hostname = mkIf (hostName == "") {
path = [ path = [
pkgs.curl pkgs.curl
pkgs.nettools pkgs.net-tools
]; ];
description = "Set hostname provided by Digitalocean"; description = "Set hostname provided by Digitalocean";
wantedBy = [ "network.target" ]; wantedBy = [ "network.target" ];

View File

@ -23,7 +23,7 @@ in
environment.systemPackages = with pkgs; [ environment.systemPackages = with pkgs; [
findutils findutils
iputils iputils
nettools net-tools
netcat netcat
nfs-utils nfs-utils
rsync rsync

View File

@ -351,7 +351,7 @@ in
parted parted
# for hostname # for hostname
nettools net-tools
# for pidof # for pidof
procps procps
# for useradd, usermod # for useradd, usermod

View File

@ -29,7 +29,7 @@ python3Packages.buildPythonApplication rec {
pytest-mock pytest-mock
]; ];
pytestFlagsArray = [ "tests/" ]; enabledTestPaths = [ "tests/" ];
meta = { meta = {
description = "Mopidy extension for playing music from Tidal"; description = "Mopidy extension for playing music from Tidal";

View File

@ -21,11 +21,11 @@ assert withConplay -> !libOnly;
stdenv.mkDerivation rec { stdenv.mkDerivation rec {
pname = "${lib.optionalString libOnly "lib"}mpg123"; pname = "${lib.optionalString libOnly "lib"}mpg123";
version = "1.32.10"; version = "1.33.0";
src = fetchurl { src = fetchurl {
url = "mirror://sourceforge/mpg123/mpg123-${version}.tar.bz2"; url = "mirror://sourceforge/mpg123/mpg123-${version}.tar.bz2";
hash = "sha256-h7LBf+DJedPvOO7O/2Nis1sorIWJ+/GFS1vnXJq2VXw="; hash = "sha256-IpDjrt5vTRY+GhdFIWWvM8qtS18JSPmUKc+i2Dhfqp0=";
}; };
outputs = [ outputs = [

View File

@ -1,6 +1,6 @@
{ lib, fetchFromGitHub }: { lib, fetchFromGitHub }:
rec { rec {
version = "9.1.1401"; version = "9.1.1475";
outputs = [ outputs = [
"out" "out"
@ -11,7 +11,7 @@ rec {
owner = "vim"; owner = "vim";
repo = "vim"; repo = "vim";
rev = "v${version}"; rev = "v${version}";
hash = "sha256-oYde6i5coECUzQQEMo0dvkOXFimKe4y2aGoV2nVLx58="; hash = "sha256-KKUzS0dS9K/jlfP+igyLX1Fwjb7Y5ZAzGLjqHvkA3bs=";
}; };
enableParallelBuilding = true; enableParallelBuilding = true;

View File

@ -58,7 +58,7 @@ python3.pkgs.buildPythonApplication rec {
umockdev umockdev
]; ];
pytestFlagsArray = [ enabledTestPaths = [
"test" "test"
]; ];

View File

@ -134,7 +134,7 @@ python3.pkgs.buildPythonApplication rec {
pycryptodomex pycryptodomex
]; ];
pytestFlagsArray = [ "tests" ]; enabledTestPaths = [ "tests" ];
postCheck = '' postCheck = ''
$out/bin/electrum help >/dev/null $out/bin/electrum help >/dev/null

View File

@ -159,7 +159,7 @@ python3.pkgs.buildPythonApplication {
]; ];
buildInputs = lib.optional stdenv.hostPlatform.isLinux qtwayland; buildInputs = lib.optional stdenv.hostPlatform.isLinux qtwayland;
pytestFlagsArray = [ "electrum_ltc/tests" ]; enabledTestPaths = [ "electrum_ltc/tests" ];
disabledTests = [ disabledTests = [
"test_loop" # test tries to bind 127.0.0.1 causing permission error "test_loop" # test tries to bind 127.0.0.1 causing permission error

View File

@ -23,7 +23,7 @@ buildPythonApplication rec {
build-system = [ setuptools ]; build-system = [ setuptools ];
nativeCheckInputs = [ pytestCheckHook ]; nativeCheckInputs = [ pytestCheckHook ];
pytestFlagsArray = [ "test/test.py" ]; enabledTestPaths = [ "test/test.py" ];
meta = { meta = {
description = "Importer and exporter for MBTiles"; description = "Importer and exporter for MBTiles";

View File

@ -10,7 +10,7 @@
iptables, iptables,
iputils, iputils,
kmod, kmod,
nettools, net-tools,
procps, procps,
tcpdump, tcpdump,
traceroute, traceroute,
@ -59,7 +59,7 @@ stdenv.mkDerivation rec {
iptables iptables
iputils iputils
kmod kmod
nettools net-tools
procps procps
tcpdump tcpdump
traceroute traceroute

View File

@ -32,7 +32,7 @@
gst-plugins-good, gst-plugins-good,
gst-plugins-bad, gst-plugins-bad,
gst-vaapi, gst-vaapi,
webrtc-audio-processing_1, webrtc-audio-processing,
}: }:
stdenv.mkDerivation (finalAttrs: { stdenv.mkDerivation (finalAttrs: {
@ -84,7 +84,7 @@ stdenv.mkDerivation (finalAttrs: {
gst-plugins-good # contains rtpbin, required for VP9 gst-plugins-good # contains rtpbin, required for VP9
gst-plugins-bad # required for H264, MSDK gst-plugins-bad # required for H264, MSDK
gst-vaapi # required for VAAPI gst-vaapi # required for VAAPI
webrtc-audio-processing_1 webrtc-audio-processing
]; ];
doCheck = true; doCheck = true;

View File

@ -30,7 +30,7 @@
secp256k1, secp256k1,
speex, speex,
udev, udev,
webrtc-audio-processing, webrtc-audio-processing_0_3,
yaml-cpp, yaml-cpp,
zlib, zlib,
@ -220,7 +220,7 @@ stdenv.mkDerivation rec {
secp256k1 secp256k1
speex speex
udev udev
webrtc-audio-processing webrtc-audio-processing_0_3
yaml-cpp yaml-cpp
zlib zlib
]; ];

View File

@ -1,6 +1,8 @@
diff --git a/desktop/qa/desktop_lib/test_desktop_lib.cxx b/desktop/qa/desktop_lib/test_desktop_lib.cxx
index 231aea8d0f3c..214a23c82562 100644
--- a/desktop/qa/desktop_lib/test_desktop_lib.cxx --- a/desktop/qa/desktop_lib/test_desktop_lib.cxx
+++ b/desktop/qa/desktop_lib/test_desktop_lib.cxx +++ b/desktop/qa/desktop_lib/test_desktop_lib.cxx
@@ -595,6 +595,8 @@ void DesktopLOKTest::testGetFilterTypes() @@ -603,6 +603,8 @@ void DesktopLOKTest::testGetFilterTypes()
void DesktopLOKTest::testSearchCalc() void DesktopLOKTest::testSearchCalc()
{ {
@ -9,7 +11,7 @@
LibLibreOffice_Impl aOffice; LibLibreOffice_Impl aOffice;
LibLODocument_Impl* pDocument = loadDoc("search.ods"); LibLODocument_Impl* pDocument = loadDoc("search.ods");
pDocument->pClass->initializeForRendering(pDocument, nullptr); pDocument->pClass->initializeForRendering(pDocument, nullptr);
@@ -625,6 +627,8 @@ void DesktopLOKTest::testSearchCalc() @@ -633,6 +635,8 @@ void DesktopLOKTest::testSearchCalc()
void DesktopLOKTest::testSearchAllNotificationsCalc() void DesktopLOKTest::testSearchAllNotificationsCalc()
{ {
@ -18,6 +20,8 @@
LibLibreOffice_Impl aOffice; LibLibreOffice_Impl aOffice;
LibLODocument_Impl* pDocument = loadDoc("search.ods"); LibLODocument_Impl* pDocument = loadDoc("search.ods");
pDocument->pClass->initializeForRendering(pDocument, nullptr); pDocument->pClass->initializeForRendering(pDocument, nullptr);
diff --git a/svgio/qa/cppunit/data/tdf160386.svg b/svgio/qa/cppunit/data/tdf160386.svg
index 1644b0d15514..cf429508bcd4 100644
--- a/svgio/qa/cppunit/data/tdf160386.svg --- a/svgio/qa/cppunit/data/tdf160386.svg
+++ b/svgio/qa/cppunit/data/tdf160386.svg +++ b/svgio/qa/cppunit/data/tdf160386.svg
@@ -8,7 +8,6 @@ @@ -8,7 +8,6 @@
@ -28,9 +32,11 @@
<text systemLanguage="ru">Привет!</text> <text systemLanguage="ru">Привет!</text>
<text>☺</text> <text>☺</text>
</switch> </switch>
diff --git a/sw/qa/core/accessibilitycheck/AccessibilityCheckTest.cxx b/sw/qa/core/accessibilitycheck/AccessibilityCheckTest.cxx
index d8093e57a4e8..c95a742a68d3 100644
--- a/sw/qa/core/accessibilitycheck/AccessibilityCheckTest.cxx --- a/sw/qa/core/accessibilitycheck/AccessibilityCheckTest.cxx
+++ b/sw/qa/core/accessibilitycheck/AccessibilityCheckTest.cxx +++ b/sw/qa/core/accessibilitycheck/AccessibilityCheckTest.cxx
@@ -361,6 +361,8 @@ void checkIssuePosition(std::shared_ptr<sfx::AccessibilityIssue> const& pIssue, @@ -422,6 +422,8 @@ void checkIssuePosition(std::shared_ptr<sfx::AccessibilityIssue> const& pIssue,
CPPUNIT_TEST_FIXTURE(AccessibilityCheckTest, testOnlineNodeSplitAppend) CPPUNIT_TEST_FIXTURE(AccessibilityCheckTest, testOnlineNodeSplitAppend)
{ {
@ -39,9 +45,11 @@
// Checks the a11y checker is setting the a11y issues to the nodes // Checks the a11y checker is setting the a11y issues to the nodes
// correctly when splitting and appending nodes (through undo), which // correctly when splitting and appending nodes (through undo), which
// happen on editing all the time. // happen on editing all the time.
diff --git a/sw/qa/core/text/text.cxx b/sw/qa/core/text/text.cxx
index b81146642bd4..2094b7ea9477 100644
--- a/sw/qa/core/text/text.cxx --- a/sw/qa/core/text/text.cxx
+++ b/sw/qa/core/text/text.cxx +++ b/sw/qa/core/text/text.cxx
@@ -1630,6 +1630,8 @@ CPPUNIT_TEST_FIXTURE(SwCoreTextTest, testParaUpperMarginFlyIntersect) @@ -1596,6 +1596,8 @@ CPPUNIT_TEST_FIXTURE(SwCoreTextTest, testParaUpperMarginFlyIntersect)
CPPUNIT_TEST_FIXTURE(SwCoreTextTest, testTdf129810) CPPUNIT_TEST_FIXTURE(SwCoreTextTest, testTdf129810)
{ {
@ -50,9 +58,23 @@
// Load the document. // Load the document.
// The document embeds a subset of "Source Han Serif SC" so that it works // The document embeds a subset of "Source Han Serif SC" so that it works
// even when the font is not installed. // even when the font is not installed.
diff --git a/sw/qa/extras/docbookexport/docbookexport.cxx b/sw/qa/extras/docbookexport/docbookexport.cxx
index e7543d99577e..f44f92a47c7e 100644
--- a/sw/qa/extras/docbookexport/docbookexport.cxx
+++ b/sw/qa/extras/docbookexport/docbookexport.cxx
@@ -21,6 +21,7 @@ public:
CPPUNIT_TEST_FIXTURE(DocbookExportTest, testsimple)
{
+ return; // fails on latest libxml
createSwDoc("simple.docx");
save(mpFilter);
xmlDocUniquePtr pDoc = parseXml(maTempFile);
diff --git a/sw/qa/extras/htmlimport/htmlimport.cxx b/sw/qa/extras/htmlimport/htmlimport.cxx
index 6cf8f22647b9..12848713771b 100644
--- a/sw/qa/extras/htmlimport/htmlimport.cxx --- a/sw/qa/extras/htmlimport/htmlimport.cxx
+++ b/sw/qa/extras/htmlimport/htmlimport.cxx +++ b/sw/qa/extras/htmlimport/htmlimport.cxx
@@ -308,6 +308,8 @@ CPPUNIT_TEST_FIXTURE(HtmlImportTest, testTableBorder1px) @@ -297,6 +297,8 @@ CPPUNIT_TEST_FIXTURE(HtmlImportTest, testTableBorder1px)
CPPUNIT_TEST_FIXTURE(HtmlImportTest, testOutlineLevel) CPPUNIT_TEST_FIXTURE(HtmlImportTest, testOutlineLevel)
{ {
@ -61,9 +83,11 @@
createSwWebDoc("outline-level.html"); createSwWebDoc("outline-level.html");
// This was 0, HTML imported into Writer lost the outline numbering for // This was 0, HTML imported into Writer lost the outline numbering for
// Heading 1 styles. // Heading 1 styles.
diff --git a/sw/qa/extras/layout/layout3.cxx b/sw/qa/extras/layout/layout3.cxx
index e53d22c8fd1a..2f9decb0d931 100644
--- a/sw/qa/extras/layout/layout3.cxx --- a/sw/qa/extras/layout/layout3.cxx
+++ b/sw/qa/extras/layout/layout3.cxx +++ b/sw/qa/extras/layout/layout3.cxx
@@ -1038,6 +1038,8 @@ CPPUNIT_TEST_FIXTURE(SwLayoutWriter3, testTdf158658c) @@ -1388,6 +1388,8 @@ CPPUNIT_TEST_FIXTURE(SwLayoutWriter3, testTdf158658c)
CPPUNIT_TEST_FIXTURE(SwLayoutWriter3, testTdf155177) CPPUNIT_TEST_FIXTURE(SwLayoutWriter3, testTdf155177)
{ {
@ -72,6 +96,8 @@
createSwDoc("tdf155177-1-min.odt"); createSwDoc("tdf155177-1-min.odt");
uno::Reference<beans::XPropertySet> xStyle( uno::Reference<beans::XPropertySet> xStyle(
diff --git a/sw/qa/extras/odfimport/odfimport.cxx b/sw/qa/extras/odfimport/odfimport.cxx
index 794b3bd16ed4..3feaadd7a20f 100644
--- a/sw/qa/extras/odfimport/odfimport.cxx --- a/sw/qa/extras/odfimport/odfimport.cxx
+++ b/sw/qa/extras/odfimport/odfimport.cxx +++ b/sw/qa/extras/odfimport/odfimport.cxx
@@ -602,6 +602,8 @@ CPPUNIT_TEST_FIXTURE(Test, testFdo56272) @@ -602,6 +602,8 @@ CPPUNIT_TEST_FIXTURE(Test, testFdo56272)
@ -83,18 +109,20 @@
createSwDoc("incorrectsum.odt"); createSwDoc("incorrectsum.odt");
Scheduler::ProcessEventsToIdle(); Scheduler::ProcessEventsToIdle();
uno::Reference<text::XTextTablesSupplier> xTablesSupplier(mxComponent, uno::UNO_QUERY); uno::Reference<text::XTextTablesSupplier> xTablesSupplier(mxComponent, uno::UNO_QUERY);
diff --git a/sw/qa/extras/tiledrendering/tiledrendering.cxx b/sw/qa/extras/tiledrendering/tiledrendering.cxx
index 4ebc4be96149..85b8908e16b9 100644
--- a/sw/qa/extras/tiledrendering/tiledrendering.cxx --- a/sw/qa/extras/tiledrendering/tiledrendering.cxx
+++ b/sw/qa/extras/tiledrendering/tiledrendering.cxx +++ b/sw/qa/extras/tiledrendering/tiledrendering.cxx
@@ -2762,6 +2762,8 @@ CPPUNIT_TEST_FIXTURE(SwTiledRenderingTest, testDeleteNodeRedlineCallback) @@ -2538,6 +2538,8 @@ CPPUNIT_TEST_FIXTURE(SwTiledRenderingTest, testVisCursorInvalidation)
CPPUNIT_TEST_FIXTURE(SwTiledRenderingTest, testVisCursorInvalidation) CPPUNIT_TEST_FIXTURE(SwTiledRenderingTest, testDeselectCustomShape)
{ {
+ return; // flaky on some backends? + return; // flaky on some backends?
+ +
SwXTextDocument* pXTextDocument = createDoc("dummy.fodt"); SwXTextDocument* pXTextDocument = createDoc("dummy.fodt");
ViewCallback aView1; SwWrtShell* pWrtShell = getSwDocShell()->GetWrtShell();
int nView1 = SfxLokHelper::getView(); SwShellCursor* pShellCursor = pWrtShell->getShellCursor(false);
@@ -3048,6 +3050,8 @@ CPPUNIT_TEST_FIXTURE(SwTiledRenderingTest, testDoubleUnderlineAndStrikeOut) @@ -2745,6 +2747,8 @@ CPPUNIT_TEST_FIXTURE(SwTiledRenderingTest, testDoubleUnderlineAndStrikeOut)
CPPUNIT_TEST_FIXTURE(SwTiledRenderingTest, testTdf43244_SpacesOnMargin) CPPUNIT_TEST_FIXTURE(SwTiledRenderingTest, testTdf43244_SpacesOnMargin)
{ {
@ -103,7 +131,7 @@
// Load a document where the top left tile contains // Load a document where the top left tile contains
// paragraph and line break symbols with redlining. // paragraph and line break symbols with redlining.
SwXTextDocument* pXTextDocument = createDoc("tdf43244_SpacesOnMargin.odt"); SwXTextDocument* pXTextDocument = createDoc("tdf43244_SpacesOnMargin.odt");
@@ -4091,6 +4095,7 @@ CPPUNIT_TEST_FIXTURE(SwTiledRenderingTest, testRedlineTooltip) @@ -3786,6 +3790,7 @@ CPPUNIT_TEST_FIXTURE(SwTiledRenderingTest, testRedlineTooltip)
// toggling Formatting Marks on/off for one view should have no effect on other views // toggling Formatting Marks on/off for one view should have no effect on other views
CPPUNIT_TEST_FIXTURE(SwTiledRenderingTest, testToggleFormattingMarks) CPPUNIT_TEST_FIXTURE(SwTiledRenderingTest, testToggleFormattingMarks)
{ {
@ -111,13 +139,15 @@
SwXTextDocument* pXTextDocument = createDoc(); SwXTextDocument* pXTextDocument = createDoc();
int nView1 = SfxLokHelper::getView(); int nView1 = SfxLokHelper::getView();
diff --git a/sw/qa/extras/uiwriter/uiwriter5.cxx b/sw/qa/extras/uiwriter/uiwriter5.cxx
index e37df27fd817..937c12e8c4c5 100644
--- a/sw/qa/extras/uiwriter/uiwriter5.cxx --- a/sw/qa/extras/uiwriter/uiwriter5.cxx
+++ b/sw/qa/extras/uiwriter/uiwriter5.cxx +++ b/sw/qa/extras/uiwriter/uiwriter5.cxx
@@ -1571,6 +1571,7 @@ CPPUNIT_TEST_FIXTURE(SwUiWriterTest5, testDateFormFieldContentOperations) @@ -1549,6 +1549,7 @@ CPPUNIT_TEST_FIXTURE(SwUiWriterTest5, testDateFormFieldContentOperations)
CPPUNIT_TEST_FIXTURE(SwUiWriterTest5, testDateFormFieldCurrentDateHandling) CPPUNIT_TEST_FIXTURE(SwUiWriterTest5, testDateFormFieldCurrentDateHandling)
{ {
+ return; // flaky on KF6 + return; // flaky on KF6
createSwDoc(); createSwDoc();
SwDoc* pDoc = getSwDoc(); SwDoc* pDoc = getSwDoc();
CPPUNIT_ASSERT(pDoc); IDocumentMarkAccess* pMarkAccess = pDoc->getIDocumentMarkAccess();

View File

@ -59,7 +59,7 @@ python3.pkgs.buildPythonApplication rec {
unittest-xml-reporting unittest-xml-reporting
]; ];
pytestFlagsArray = [ enabledTestPaths = [
"test" "test"
]; ];

View File

@ -60,7 +60,7 @@ assert sendEmailSupport -> perlSupport;
assert svnSupport -> perlSupport; assert svnSupport -> perlSupport;
let let
version = "2.49.0"; version = "2.50.0";
svn = subversionClient.override { perlBindings = perlSupport; }; svn = subversionClient.override { perlBindings = perlSupport; };
gitwebPerlLibs = with perlPackages; [ gitwebPerlLibs = with perlPackages; [
CGI CGI
@ -89,11 +89,12 @@ stdenv.mkDerivation (finalAttrs: {
}.tar.xz" }.tar.xz"
else else
"https://www.kernel.org/pub/software/scm/git/git-${version}.tar.xz"; "https://www.kernel.org/pub/software/scm/git/git-${version}.tar.xz";
hash = "sha256-YYGQz1kLfp9sEfkfI7HSZ82Yw6szuFBBbYdY+LWoVig="; hash = "sha256-3/PAAOQArOOmO4pvizt2uI7P3/1FBKBKukJINyzewEU=";
}; };
outputs = [ "out" ] ++ lib.optional withManual "doc"; outputs = [ "out" ] ++ lib.optional withManual "doc";
separateDebugInfo = true; separateDebugInfo = true;
__structuredAttrs = true;
hardeningDisable = [ "format" ]; hardeningDisable = [ "format" ];
@ -116,7 +117,8 @@ stdenv.mkDerivation (finalAttrs: {
# Fix references to gettext introduced by ./git-sh-i18n.patch # Fix references to gettext introduced by ./git-sh-i18n.patch
substituteInPlace git-sh-i18n.sh \ substituteInPlace git-sh-i18n.sh \
--subst-var-by gettext ${gettext} --subst-var-by gettext ${gettext}
''
+ lib.optionalString doInstallCheck ''
# ensure we are using the correct shell when executing the test scripts # ensure we are using the correct shell when executing the test scripts
patchShebangs t/*.sh patchShebangs t/*.sh
'' ''
@ -165,7 +167,7 @@ stdenv.mkDerivation (finalAttrs: {
]; ];
# required to support pthread_cancel() # required to support pthread_cancel()
NIX_LDFLAGS = env.NIX_LDFLAGS =
lib.optionalString (stdenv.cc.isGNU && stdenv.hostPlatform.libc == "glibc") "-lgcc_s" lib.optionalString (stdenv.cc.isGNU && stdenv.hostPlatform.libc == "glibc") "-lgcc_s"
+ lib.optionalString (stdenv.hostPlatform.isFreeBSD) "-lthr"; + lib.optionalString (stdenv.hostPlatform.isFreeBSD) "-lthr";
@ -282,10 +284,6 @@ stdenv.mkDerivation (finalAttrs: {
postInstall = postInstall =
'' ''
notSupported() {
unlink $1 || true
}
# Set up the flags array for make in the same way as for the main install # Set up the flags array for make in the same way as for the main install
# phase from stdenv. # phase from stdenv.
local flagsArray=( local flagsArray=(
@ -303,7 +301,6 @@ stdenv.mkDerivation (finalAttrs: {
mkdir -p $out/share/git mkdir -p $out/share/git
cp -a contrib $out/share/git/ cp -a contrib $out/share/git/
mkdir -p $out/share/bash-completion/completions mkdir -p $out/share/bash-completion/completions
ln -s $out/share/git/contrib/completion/git-completion.bash $out/share/bash-completion/completions/git
ln -s $out/share/git/contrib/completion/git-prompt.sh $out/share/bash-completion/completions/ ln -s $out/share/git/contrib/completion/git-prompt.sh $out/share/bash-completion/completions/
# only readme, developed in another repo # only readme, developed in another repo
rm -r contrib/hooks/multimail rm -r contrib/hooks/multimail
@ -382,8 +379,7 @@ stdenv.mkDerivation (finalAttrs: {
'' ''
else else
'' ''
# replace git-svn by notification script rm $out/libexec/git-core/git-svn
notSupported $out/libexec/git-core/git-svn
'' ''
) )
@ -396,14 +392,13 @@ stdenv.mkDerivation (finalAttrs: {
'' ''
else else
'' ''
# replace git-send-email by notification script rm $out/libexec/git-core/git-send-email
notSupported $out/libexec/git-core/git-send-email
'' ''
) )
+ lib.optionalString withManual '' + lib.optionalString withManual ''
# Install man pages # Install man pages
make "''${flagsArray[@]}" cmd-list.made install install-html \ make "''${flagsArray[@]}" install install-html \
-C Documentation -C Documentation
'' ''
@ -420,9 +415,8 @@ stdenv.mkDerivation (finalAttrs: {
'' ''
else else
'' ''
# Don't wrap Tcl/Tk, replace them by notification scripts
for prog in bin/gitk libexec/git-core/git-gui; do for prog in bin/gitk libexec/git-core/git-gui; do
notSupported "$out/$prog" rm "$out/$prog"
done done
'' ''
) )
@ -573,6 +567,8 @@ stdenv.mkDerivation (finalAttrs: {
wmertens wmertens
globin globin
kashw2 kashw2
me-and
philiptaron
]; ];
mainProgram = "git"; mainProgram = "git";
}; };

View File

@ -144,11 +144,11 @@ stdenv.mkDerivation (finalAttrs: {
+ lib.optionalString nixosTestRunner "-for-vm-tests" + lib.optionalString nixosTestRunner "-for-vm-tests"
+ lib.optionalString toolsOnly "-utils" + lib.optionalString toolsOnly "-utils"
+ lib.optionalString userOnly "-user"; + lib.optionalString userOnly "-user";
version = "10.0.0"; version = "10.0.2";
src = fetchurl { src = fetchurl {
url = "https://download.qemu.org/qemu-${finalAttrs.version}.tar.xz"; url = "https://download.qemu.org/qemu-${finalAttrs.version}.tar.xz";
hash = "sha256-IsB1YB/c+MeyZxqDnr3O8dTylz62c1JU/S4b0PMLOJY="; hash = "sha256-73hvI5jLUYRgD2mu9NXWke/URXajz/QSbTjUxv7Id1k=";
}; };
depsBuildBuild = depsBuildBuild =

View File

@ -35,7 +35,7 @@
alsa-lib, alsa-lib,
curl, curl,
libvpx, libvpx,
nettools, net-tools,
dbus, dbus,
replaceVars, replaceVars,
gsoap, gsoap,
@ -263,7 +263,7 @@ stdenv.mkDerivation (finalAttrs: {
]; ];
postPatch = '' postPatch = ''
sed -i -e 's|/sbin/ifconfig|${nettools}/bin/ifconfig|' \ sed -i -e 's|/sbin/ifconfig|${net-tools}/bin/ifconfig|' \
src/VBox/HostDrivers/adpctl/VBoxNetAdpCtl.cpp src/VBox/HostDrivers/adpctl/VBoxNetAdpCtl.cpp
''; '';

View File

@ -20,7 +20,7 @@
gobject-introspection, gobject-introspection,
which, which,
dbus, dbus,
nettools, net-tools,
git, git,
doxygen, doxygen,
xmlto, xmlto,
@ -105,7 +105,7 @@ stdenv.mkDerivation rec {
libstartup_notification libstartup_notification
libxdg_basedir libxdg_basedir
lua lua
nettools net-tools
pango pango
xcb-util-cursor xcb-util-cursor
xorg.libXau xorg.libXau

View File

@ -9,8 +9,8 @@ use JSON::PP;
STDOUT->autoflush(1); STDOUT->autoflush(1);
$SIG{__WARN__} = sub { warn "warning: ", @_ }; $SIG{__WARN__} = sub { warn "pkgs.buildEnv warning: ", @_ };
$SIG{__DIE__} = sub { die "error: ", @_ }; $SIG{__DIE__} = sub { die "pkgs.buildEnv error: ", @_ };
my $out = $ENV{"out"}; my $out = $ENV{"out"};
my $extraPrefix = $ENV{"extraPrefix"}; my $extraPrefix = $ENV{"extraPrefix"};
@ -109,7 +109,7 @@ sub findFiles($relName, $target, $baseName, $ignoreCollisions, $checkCollisionCo
# The store path must not be a file when not ignoreSingleFileOutputs # The store path must not be a file when not ignoreSingleFileOutputs
if (-f $target && isStorePath $target) { if (-f $target && isStorePath $target) {
if ($ignoreSingleFileOutputs) { if ($ignoreSingleFileOutputs) {
warn "The store path $target is a file and can't be merged into an environment using pkgs.buildEnv"; warn "The store path $target is a file and can't be merged into an environment using pkgs.buildEnv, ignoring it";
return; return;
} else { } else {
die "The store path $target is a file and can't be merged into an environment using pkgs.buildEnv!"; die "The store path $target is a file and can't be merged into an environment using pkgs.buildEnv!";
@ -173,12 +173,12 @@ sub findFiles($relName, $target, $baseName, $ignoreCollisions, $checkCollisionCo
my $oldTargetRef = prependDangling($oldTarget); my $oldTargetRef = prependDangling($oldTarget);
if ($ignoreCollisions) { if ($ignoreCollisions) {
warn "collision between $targetRef and $oldTargetRef\n" if $ignoreCollisions == 1; warn "colliding subpath (ignored): $targetRef and $oldTargetRef\n" if $ignoreCollisions == 1;
return; return;
} elsif ($checkCollisionContents && checkCollision($oldTarget, $target)) { } elsif ($checkCollisionContents && checkCollision($oldTarget, $target)) {
return; return;
} else { } else {
die "collision between $targetRef and $oldTargetRef\n"; die "two given paths contain a conflicting subpath:\n $targetRef and\n $oldTargetRef\nhint: this may be caused by two different versions of the same package in buildEnv's `paths` parameter\nhint: `pkgs.nix-diff` can be used to compare derivations\n";
} }
} }

View File

@ -18,21 +18,6 @@ if [[ -n "${hardeningEnableMap[fortify3]-}" ]]; then
hardeningEnableMap["fortify"]=1 hardeningEnableMap["fortify"]=1
fi fi
# Remove unsupported flags.
for flag in @hardening_unsupported_flags@; do
unset -v "hardeningEnableMap[$flag]"
# fortify being unsupported implies fortify3 is unsupported
if [[ "$flag" = 'fortify' ]] ; then
unset -v "hardeningEnableMap['fortify3']"
fi
done
# now make fortify and fortify3 mutually exclusive
if [[ -n "${hardeningEnableMap[fortify3]-}" ]]; then
unset -v "hardeningEnableMap['fortify']"
fi
# strictflexarrays3 implies strictflexarrays1 enablement - make explicit before # strictflexarrays3 implies strictflexarrays1 enablement - make explicit before
# we filter unsupported flags because unsupporting strictflexarrays3 # we filter unsupported flags because unsupporting strictflexarrays3
# doesn't mean we should unsupport strictflexarrays1 too # doesn't mean we should unsupport strictflexarrays1 too
@ -40,15 +25,26 @@ if [[ -n "${hardeningEnableMap[strictflexarrays3]-}" ]]; then
hardeningEnableMap["strictflexarrays1"]=1 hardeningEnableMap["strictflexarrays1"]=1
fi fi
# Remove unsupported flags. # Remove unsupported flags.
for flag in @hardening_unsupported_flags@; do for flag in @hardening_unsupported_flags@; do
unset -v "hardeningEnableMap[$flag]" unset -v "hardeningEnableMap[$flag]"
# fortify being unsupported implies fortify3 is unsupported
if [[ "$flag" = 'fortify' ]] ; then
unset -v "hardeningEnableMap['fortify3']"
fi
# strictflexarrays1 being unsupported implies strictflexarrays3 is unsupported # strictflexarrays1 being unsupported implies strictflexarrays3 is unsupported
if [[ "$flag" = 'strictflexarrays1' ]] ; then if [[ "$flag" = 'strictflexarrays1' ]] ; then
unset -v "hardeningEnableMap['strictflexarrays3']" unset -v "hardeningEnableMap['strictflexarrays3']"
fi fi
done done
# now make fortify and fortify3 mutually exclusive
if [[ -n "${hardeningEnableMap[fortify3]-}" ]]; then
unset -v "hardeningEnableMap['fortify']"
fi
# now make strictflexarrays1 and strictflexarrays3 mutually exclusive # now make strictflexarrays1 and strictflexarrays3 mutually exclusive
if [[ -n "${hardeningEnableMap[strictflexarrays3]-}" ]]; then if [[ -n "${hardeningEnableMap[strictflexarrays3]-}" ]]; then
unset -v "hardeningEnableMap['strictflexarrays1']" unset -v "hardeningEnableMap['strictflexarrays1']"

View File

@ -3,17 +3,17 @@ const path = require('path')
// This has to match the logic in pkgs/development/tools/yarn2nix-moretea/yarn2nix/lib/urlToName.js // This has to match the logic in pkgs/development/tools/yarn2nix-moretea/yarn2nix/lib/urlToName.js
// so that fixup_yarn_lock produces the same paths // so that fixup_yarn_lock produces the same paths
const urlToName = url => { const urlToName = url => {
const isCodeloadGitTarballUrl = url.startsWith('https://codeload.github.com/') && url.includes('/tar.gz/') const isCodeloadGitTarballUrl = url.startsWith('https://codeload.github.com/') && url.includes('/tar.gz/')
if (url.startsWith('file:')) { if (url.startsWith('file:')) {
return url return url
} else if (url.startsWith('git+') || isCodeloadGitTarballUrl) { } else if (url.startsWith('git+') || isCodeloadGitTarballUrl) {
return path.basename(url) return path.basename(url)
} else { } else {
return url return url
.replace(/https:\/\/(.)*(.com)\//g, '') // prevents having long directory names .replace(/https:\/\/(.)*(.com)\//g, '') // prevents having long directory names
.replace(/[@/%:-]/g, '_') // replace @ and : and - and % characters with underscore .replace(/[@/%:-]/g, '_') // replace @ and : and - and % characters with underscore
} }
} }
module.exports = { urlToName }; module.exports = { urlToName };

View File

@ -7,79 +7,79 @@ const lockfile = require('./yarnpkg-lockfile.js')
const { urlToName } = require('./common.js') const { urlToName } = require('./common.js')
const fixupYarnLock = async (lockContents, verbose) => { const fixupYarnLock = async (lockContents, verbose) => {
const lockData = lockfile.parse(lockContents) const lockData = lockfile.parse(lockContents)
const fixedData = Object.fromEntries( const fixedData = Object.fromEntries(
Object.entries(lockData.object) Object.entries(lockData.object)
.map(([dep, pkg]) => { .map(([dep, pkg]) => {
if (pkg.resolved === undefined) { if (pkg.resolved === undefined) {
console.warn(`no resolved URL for package ${dep}`) console.warn(`no resolved URL for package ${dep}`)
var maybeFile = dep.split("@", 2)[1] var maybeFile = dep.split("@", 2)[1]
if (maybeFile.startsWith("file:")) { if (maybeFile.startsWith("file:")) {
console.log(`Rewriting URL for local file dependency ${dep}`) console.log(`Rewriting URL for local file dependency ${dep}`)
pkg.resolved = maybeFile pkg.resolved = maybeFile
} }
return [dep, pkg] return [dep, pkg]
} }
const [ url, hash ] = pkg.resolved.split("#", 2) const [ url, hash ] = pkg.resolved.split("#", 2)
if (hash || url.startsWith("https://codeload.github.com/")) { if (hash || url.startsWith("https://codeload.github.com/")) {
if (verbose) console.log(`Removing integrity for git dependency ${dep}`) if (verbose) console.log(`Removing integrity for git dependency ${dep}`)
delete pkg.integrity delete pkg.integrity
} }
if (verbose) console.log(`Rewriting URL ${url} for dependency ${dep}`) if (verbose) console.log(`Rewriting URL ${url} for dependency ${dep}`)
pkg.resolved = urlToName(url) pkg.resolved = urlToName(url)
if (hash) if (hash)
pkg.resolved += `#${hash}` pkg.resolved += `#${hash}`
return [dep, pkg] return [dep, pkg]
}) })
) )
if (verbose) console.log('Done') if (verbose) console.log('Done')
return fixedData return fixedData
} }
const showUsage = async () => { const showUsage = async () => {
process.stderr.write(` process.stderr.write(`
syntax: fixup-yarn-lock [path to yarn.lock] [options] syntax: fixup-yarn-lock [path to yarn.lock] [options]
Options: Options:
-h --help Show this help -h --help Show this help
-v --verbose Verbose output -v --verbose Verbose output
`) `)
process.exit(1) process.exit(1)
} }
const main = async () => { const main = async () => {
const args = process.argv.slice(2) const args = process.argv.slice(2)
let next, lockFile, verbose let next, lockFile, verbose
while (next = args.shift()) { while (next = args.shift()) {
if (next == '--verbose' || next == '-v') { if (next == '--verbose' || next == '-v') {
verbose = true verbose = true
} else if (next == '--help' || next == '-h') { } else if (next == '--help' || next == '-h') {
showUsage() showUsage()
} else if (!lockFile) { } else if (!lockFile) {
lockFile = next lockFile = next
} else { } else {
showUsage() showUsage()
} }
} }
let lockContents let lockContents
try { try {
lockContents = await fs.promises.readFile(lockFile || 'yarn.lock', 'utf-8') lockContents = await fs.promises.readFile(lockFile || 'yarn.lock', 'utf-8')
} catch { } catch {
showUsage() showUsage()
} }
const fixedData = await fixupYarnLock(lockContents, verbose) const fixedData = await fixupYarnLock(lockContents, verbose)
await fs.promises.writeFile(lockFile || 'yarn.lock', lockfile.stringify(fixedData)) await fs.promises.writeFile(lockFile || 'yarn.lock', lockfile.stringify(fixedData))
} }
main() main()
.catch(e => { .catch(e => {
console.error(e) console.error(e)
process.exit(1) process.exit(1)
}) })

View File

@ -15,155 +15,155 @@ const { urlToName } = require('./common.js')
const execFile = promisify(child_process.execFile) const execFile = promisify(child_process.execFile)
const exec = async (...args) => { const exec = async (...args) => {
const res = await execFile(...args) const res = await execFile(...args)
if (res.error) throw new Error(res.stderr) if (res.error) throw new Error(res.stderr)
return res return res
} }
const downloadFileHttps = (fileName, url, expectedHash, hashType = 'sha1') => { const downloadFileHttps = (fileName, url, expectedHash, hashType = 'sha1') => {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
const get = (url, redirects = 0) => https.get(url, (res) => { const get = (url, redirects = 0) => https.get(url, (res) => {
if(redirects > 10) { if(redirects > 10) {
reject('Too many redirects!'); reject('Too many redirects!');
return; return;
} }
if(res.statusCode === 301 || res.statusCode === 302) { if(res.statusCode === 301 || res.statusCode === 302) {
return get(res.headers.location, redirects + 1) return get(res.headers.location, redirects + 1)
} }
const file = fs.createWriteStream(fileName) const file = fs.createWriteStream(fileName)
const hash = crypto.createHash(hashType) const hash = crypto.createHash(hashType)
res.pipe(file) res.pipe(file)
res.pipe(hash).setEncoding('hex') res.pipe(hash).setEncoding('hex')
res.on('end', () => { res.on('end', () => {
file.close() file.close()
const h = hash.read() const h = hash.read()
if (expectedHash === undefined){ if (expectedHash === undefined){
console.log(`Warning: lockfile url ${url} doesn't end in "#<hash>" to validate against. Downloaded file had hash ${h}.`); console.log(`Warning: lockfile url ${url} doesn't end in "#<hash>" to validate against. Downloaded file had hash ${h}.`);
} else if (h != expectedHash) return reject(new Error(`hash mismatch, expected ${expectedHash}, got ${h} for ${url}`)) } else if (h != expectedHash) return reject(new Error(`hash mismatch, expected ${expectedHash}, got ${h} for ${url}`))
resolve() resolve()
}) })
res.on('error', e => reject(e)) res.on('error', e => reject(e))
}) })
get(url) get(url)
}) })
} }
const downloadGit = async (fileName, url, rev) => { const downloadGit = async (fileName, url, rev) => {
await exec('nix-prefetch-git', [ await exec('nix-prefetch-git', [
'--out', fileName + '.tmp', '--out', fileName + '.tmp',
'--url', url, '--url', url,
'--rev', rev, '--rev', rev,
'--builder' '--builder'
]) ])
await exec('tar', [ await exec('tar', [
// hopefully make it reproducible across runs and systems // hopefully make it reproducible across runs and systems
'--owner=0', '--group=0', '--numeric-owner', '--format=gnu', '--sort=name', '--mtime=@1', '--owner=0', '--group=0', '--numeric-owner', '--format=gnu', '--sort=name', '--mtime=@1',
// Set u+w because tar-fs can't unpack archives with read-only dirs: https://github.com/mafintosh/tar-fs/issues/79 // Set u+w because tar-fs can't unpack archives with read-only dirs: https://github.com/mafintosh/tar-fs/issues/79
'--mode', 'u+w', '--mode', 'u+w',
'-C', fileName + '.tmp', '-C', fileName + '.tmp',
'-cf', fileName, '.' '-cf', fileName, '.'
]) ])
await exec('rm', [ '-rf', fileName + '.tmp', ]) await exec('rm', [ '-rf', fileName + '.tmp', ])
} }
const isGitUrl = pattern => { const isGitUrl = pattern => {
// https://github.com/yarnpkg/yarn/blob/3119382885ea373d3c13d6a846de743eca8c914b/src/resolvers/exotics/git-resolver.js#L15-L47 // https://github.com/yarnpkg/yarn/blob/3119382885ea373d3c13d6a846de743eca8c914b/src/resolvers/exotics/git-resolver.js#L15-L47
const GIT_HOSTS = ['github.com', 'gitlab.com', 'bitbucket.com', 'bitbucket.org'] const GIT_HOSTS = ['github.com', 'gitlab.com', 'bitbucket.com', 'bitbucket.org']
const GIT_PATTERN_MATCHERS = [/^git:/, /^git\+.+:/, /^ssh:/, /^https?:.+\.git$/, /^https?:.+\.git#.+/] const GIT_PATTERN_MATCHERS = [/^git:/, /^git\+.+:/, /^ssh:/, /^https?:.+\.git$/, /^https?:.+\.git#.+/]
for (const matcher of GIT_PATTERN_MATCHERS) if (matcher.test(pattern)) return true for (const matcher of GIT_PATTERN_MATCHERS) if (matcher.test(pattern)) return true
const {hostname, path} = url.parse(pattern) const {hostname, path} = url.parse(pattern)
if (hostname && path && GIT_HOSTS.indexOf(hostname) >= 0 if (hostname && path && GIT_HOSTS.indexOf(hostname) >= 0
// only if dependency is pointing to a git repo, // only if dependency is pointing to a git repo,
// e.g. facebook/flow and not file in a git repo facebook/flow/archive/v1.0.0.tar.gz // e.g. facebook/flow and not file in a git repo facebook/flow/archive/v1.0.0.tar.gz
&& path.split('/').filter(p => !!p).length === 2 && path.split('/').filter(p => !!p).length === 2
) return true ) return true
return false return false
} }
const downloadPkg = (pkg, verbose) => { const downloadPkg = (pkg, verbose) => {
for (let marker of ['@file:', '@link:']) { for (let marker of ['@file:', '@link:']) {
const split = pkg.key.split(marker) const split = pkg.key.split(marker)
if (split.length == 2) { if (split.length == 2) {
console.info(`ignoring lockfile entry "${split[0]}" which points at path "${split[1]}"`) console.info(`ignoring lockfile entry "${split[0]}" which points at path "${split[1]}"`)
return return
} else if (split.length > 2) { } else if (split.length > 2) {
throw new Error(`The lockfile entry key "${pkg.key}" contains "${marker}" more than once. Processing is not implemented.`) throw new Error(`The lockfile entry key "${pkg.key}" contains "${marker}" more than once. Processing is not implemented.`)
} }
} }
if (pkg.resolved === undefined) { if (pkg.resolved === undefined) {
throw new Error(`The lockfile entry with key "${pkg.key}" cannot be downloaded because it is missing the "resolved" attribute, which should contain the URL to download from. The lockfile might be invalid.`) throw new Error(`The lockfile entry with key "${pkg.key}" cannot be downloaded because it is missing the "resolved" attribute, which should contain the URL to download from. The lockfile might be invalid.`)
} }
const [ url, hash ] = pkg.resolved.split('#') const [ url, hash ] = pkg.resolved.split('#')
if (verbose) console.log('downloading ' + url) if (verbose) console.log('downloading ' + url)
const fileName = urlToName(url) const fileName = urlToName(url)
const s = url.split('/') const s = url.split('/')
if (url.startsWith('https://codeload.github.com/') && url.includes('/tar.gz/')) { if (url.startsWith('https://codeload.github.com/') && url.includes('/tar.gz/')) {
return downloadGit(fileName, `https://github.com/${s[3]}/${s[4]}.git`, s[s.length-1]) return downloadGit(fileName, `https://github.com/${s[3]}/${s[4]}.git`, s[s.length-1])
} else if (url.startsWith('https://github.com/') && url.endsWith('.tar.gz') && } else if (url.startsWith('https://github.com/') && url.endsWith('.tar.gz') &&
( (
s.length <= 5 || // https://github.com/owner/repo.tgz#feedface... s.length <= 5 || // https://github.com/owner/repo.tgz#feedface...
s[5] == "archive" // https://github.com/owner/repo/archive/refs/tags/v0.220.1.tar.gz s[5] == "archive" // https://github.com/owner/repo/archive/refs/tags/v0.220.1.tar.gz
)) { )) {
return downloadGit(fileName, `https://github.com/${s[3]}/${s[4]}.git`, s[s.length-1].replace(/.tar.gz$/, '')) return downloadGit(fileName, `https://github.com/${s[3]}/${s[4]}.git`, s[s.length-1].replace(/.tar.gz$/, ''))
} else if (isGitUrl(url)) { } else if (isGitUrl(url)) {
return downloadGit(fileName, url.replace(/^git\+/, ''), hash) return downloadGit(fileName, url.replace(/^git\+/, ''), hash)
} else if (url.startsWith('https://')) { } else if (url.startsWith('https://')) {
if (typeof pkg.integrity === 'string' || pkg.integrity instanceof String) { if (typeof pkg.integrity === 'string' || pkg.integrity instanceof String) {
const [ type, checksum ] = pkg.integrity.split('-') const [ type, checksum ] = pkg.integrity.split('-')
return downloadFileHttps(fileName, url, Buffer.from(checksum, 'base64').toString('hex'), type) return downloadFileHttps(fileName, url, Buffer.from(checksum, 'base64').toString('hex'), type)
} }
return downloadFileHttps(fileName, url, hash) return downloadFileHttps(fileName, url, hash)
} else if (url.startsWith('file:')) { } else if (url.startsWith('file:')) {
console.warn(`ignoring unsupported file:path url "${url}"`) console.warn(`ignoring unsupported file:path url "${url}"`)
} else { } else {
throw new Error('don\'t know how to download "' + url + '"') throw new Error('don\'t know how to download "' + url + '"')
} }
} }
const performParallel = tasks => { const performParallel = tasks => {
const worker = async () => { const worker = async () => {
while (tasks.length > 0) await tasks.shift()() while (tasks.length > 0) await tasks.shift()()
} }
const workers = [] const workers = []
for (let i = 0; i < 4; i++) { for (let i = 0; i < 4; i++) {
workers.push(worker()) workers.push(worker())
} }
return Promise.all(workers) return Promise.all(workers)
} }
// This could be implemented using [`Map.groupBy`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Map/groupBy), // This could be implemented using [`Map.groupBy`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Map/groupBy),
// but that method is only supported starting with Node 21 // but that method is only supported starting with Node 21
const uniqueBy = (arr, callback) => { const uniqueBy = (arr, callback) => {
const map = new Map() const map = new Map()
for (const elem of arr) { for (const elem of arr) {
map.set(callback(elem), elem) map.set(callback(elem), elem)
} }
return [...map.values()] return [...map.values()]
} }
const prefetchYarnDeps = async (lockContents, verbose) => { const prefetchYarnDeps = async (lockContents, verbose) => {
const lockData = lockfile.parse(lockContents) const lockData = lockfile.parse(lockContents)
await performParallel( await performParallel(
uniqueBy(Object.entries(lockData.object), ([_, value]) => value.resolved) uniqueBy(Object.entries(lockData.object), ([_, value]) => value.resolved)
.map(([key, value]) => () => downloadPkg({ key, ...value }, verbose)) .map(([key, value]) => () => downloadPkg({ key, ...value }, verbose))
) )
await fs.promises.writeFile('yarn.lock', lockContents) await fs.promises.writeFile('yarn.lock', lockContents)
if (verbose) console.log('Done') if (verbose) console.log('Done')
} }
const showUsage = async () => { const showUsage = async () => {
process.stderr.write(` process.stderr.write(`
syntax: prefetch-yarn-deps [path to yarn.lock] [options] syntax: prefetch-yarn-deps [path to yarn.lock] [options]
Options: Options:
@ -171,50 +171,50 @@ Options:
-v --verbose Verbose output -v --verbose Verbose output
--builder Only perform the download to current directory, then exit --builder Only perform the download to current directory, then exit
`) `)
process.exit(1) process.exit(1)
} }
const main = async () => { const main = async () => {
const args = process.argv.slice(2) const args = process.argv.slice(2)
let next, lockFile, verbose, isBuilder let next, lockFile, verbose, isBuilder
while (next = args.shift()) { while (next = args.shift()) {
if (next == '--builder') { if (next == '--builder') {
isBuilder = true isBuilder = true
} else if (next == '--verbose' || next == '-v') { } else if (next == '--verbose' || next == '-v') {
verbose = true verbose = true
} else if (next == '--help' || next == '-h') { } else if (next == '--help' || next == '-h') {
showUsage() showUsage()
} else if (!lockFile) { } else if (!lockFile) {
lockFile = next lockFile = next
} else { } else {
showUsage() showUsage()
} }
} }
let lockContents let lockContents
try { try {
lockContents = await fs.promises.readFile(lockFile || 'yarn.lock', 'utf-8') lockContents = await fs.promises.readFile(lockFile || 'yarn.lock', 'utf-8')
} catch { } catch {
showUsage() showUsage()
} }
if (isBuilder) { if (isBuilder) {
await prefetchYarnDeps(lockContents, verbose) await prefetchYarnDeps(lockContents, verbose)
} else { } else {
const { stdout: tmpDir } = await exec('mktemp', [ '-d' ]) const { stdout: tmpDir } = await exec('mktemp', [ '-d' ])
try { try {
process.chdir(tmpDir.trim()) process.chdir(tmpDir.trim())
await prefetchYarnDeps(lockContents, verbose) await prefetchYarnDeps(lockContents, verbose)
const { stdout: hash } = await exec('nix-hash', [ '--type', 'sha256', '--base32', tmpDir.trim() ]) const { stdout: hash } = await exec('nix-hash', [ '--type', 'sha256', '--base32', tmpDir.trim() ])
console.log(hash) console.log(hash)
} finally { } finally {
await exec('rm', [ '-rf', tmpDir.trim() ]) await exec('rm', [ '-rf', tmpDir.trim() ])
} }
} }
} }
main() main()
.catch(e => { .catch(e => {
console.error(e) console.error(e)
process.exit(1) process.exit(1)
}) })

View File

@ -22,6 +22,7 @@ lib.extendMkDerivation {
"depsExtraArgs" "depsExtraArgs"
"cargoUpdateHook" "cargoUpdateHook"
"cargoLock" "cargoLock"
"useFetchCargoVendor"
]; ];
extendDrvArgs = extendDrvArgs =

View File

@ -91,13 +91,21 @@
lib.optionalString (stdenv.hostPlatform.config != stdenv.targetPlatform.config) '' lib.optionalString (stdenv.hostPlatform.config != stdenv.targetPlatform.config) ''
[target."${stdenv.targetPlatform.rust.rustcTarget}"] [target."${stdenv.targetPlatform.rust.rustcTarget}"]
"linker" = "${pkgsTargetTarget.stdenv.cc}/bin/${pkgsTargetTarget.stdenv.cc.targetPrefix}cc" "linker" = "${pkgsTargetTarget.stdenv.cc}/bin/${pkgsTargetTarget.stdenv.cc.targetPrefix}cc"
"rustflags" = [ "-C", "target-feature=${ "rustflags" = [ ${
if pkgsTargetTarget.stdenv.targetPlatform.isStatic then "+" else "-" lib.concatStringsSep ", " (
}crt-static" ] [
''"-Ctarget-feature=${if stdenv.targetPlatform.isStatic then "+" else "-"}crt-static"''
]
++ lib.optional (!stdenv.targetPlatform.isx86_32) ''"-Cforce-frame-pointers=yes"''
)
} ]
'' ''
+ '' + ''
[target."${stdenv.hostPlatform.rust.rustcTarget}"] [target."${stdenv.hostPlatform.rust.rustcTarget}"]
"linker" = "${stdenv.cc}/bin/${stdenv.cc.targetPrefix}cc" "linker" = "${stdenv.cc}/bin/${stdenv.cc.targetPrefix}cc"
"rustflags" = [ ${
lib.optionalString (!stdenv.hostPlatform.isx86_32) ''"-Cforce-frame-pointers=yes"''
} ]
''; '';
}; };

View File

@ -15,27 +15,54 @@ auditTmpdir() {
echo "checking for references to $TMPDIR/ in $dir..." echo "checking for references to $TMPDIR/ in $dir..."
_processFile() { local tmpdir elf_fifo script_fifo
local file="$1" tmpdir="$(mktemp -d)"
if isELF "$file"; then elf_fifo="$tmpdir/elf"
if { printf :; patchelf --print-rpath "$file"; } | grep -q -F ":$TMPDIR/"; then script_fifo="$tmpdir/script"
echo "RPATH of binary $file contains a forbidden reference to $TMPDIR/" mkfifo "$elf_fifo" "$script_fifo"
exit 1
fi # Classifier: identify ELF and script files
elif isScript "$file"; then (
filename=${i##*/} find "$dir" -type f -not -path '*/.build-id/*' -print0 \
dir=${i%/*} | while IFS= read -r -d $'\0' file; do
if [ -e "$dir/.$filename-wrapped" ]; then if isELF "$file"; then
if grep -q -F "$TMPDIR/" "$file"; then printf '%s\0' "$file" >&3
echo "wrapper script $file contains a forbidden reference to $TMPDIR/" elif isScript "$file"; then
exit 1 filename=${file##*/}
dir=${file%/*}
if [ -e "$dir/.$filename-wrapped" ]; then
printf '%s\0' "$file" >&4
fi fi
fi fi
fi done
} exec 3>&- 4>&-
) 3> "$elf_fifo" 4> "$script_fifo" &
find "$dir" -type f -not -path '*/.build-id/*' -print0 \ # Handler: check RPATHs concurrently
| parallelMap _processFile (
xargs -0 -r -P "$NIX_BUILD_CORES" -n 1 sh -c '
if { printf :; patchelf --print-rpath "$1"; } | grep -q -F ":$TMPDIR/"; then
echo "RPATH of binary $1 contains a forbidden reference to $TMPDIR/"
exit 1
fi
' _ < "$elf_fifo"
) &
local pid_elf=$!
unset -f _processFile # Handler: check wrapper scripts concurrently
local pid_script
(
xargs -0 -r -P "$NIX_BUILD_CORES" -n 1 sh -c '
if grep -q -F "$TMPDIR/" "$1"; then
echo "wrapper script $1 contains a forbidden reference to $TMPDIR/"
exit 1
fi
' _ < "$script_fifo"
) &
local pid_script=$!
wait "$pid_elf" || { echo "Some binaries contain forbidden references to $TMPDIR/. Check the error above!"; exit 1; }
wait "$pid_script" || { echo "Some scripts contain forbidden references to $TMPDIR/. Check the error above!"; exit 1; }
rm -r "$tmpdir"
} }

View File

@ -95,6 +95,7 @@ autoPatchelfPostFixup() {
if [[ -z "${dontAutoPatchelf-}" ]]; then if [[ -z "${dontAutoPatchelf-}" ]]; then
autoPatchelf -- $(for output in $(getAllOutputNames); do autoPatchelf -- $(for output in $(getAllOutputNames); do
[ -e "${!output}" ] || continue [ -e "${!output}" ] || continue
[ "${output}" = debug ] && continue
echo "${!output}" echo "${!output}"
done) done)
fi fi

View File

@ -51,6 +51,12 @@ noBrokenSymlinks() {
symlinkTarget="$(realpath --no-symlinks --canonicalize-missing "$pathParent/$symlinkTarget")" symlinkTarget="$(realpath --no-symlinks --canonicalize-missing "$pathParent/$symlinkTarget")"
fi fi
# use $TMPDIR like audit-tmpdir.sh
if [[ $symlinkTarget = "$TMPDIR"/* ]]; then
nixErrorLog "the symlink $path points to $TMPDIR directory: $symlinkTarget"
numDanglingSymlinks+=1
continue
fi
if [[ $symlinkTarget != "$NIX_STORE"/* ]]; then if [[ $symlinkTarget != "$NIX_STORE"/* ]]; then
nixInfoLog "symlink $path points outside the Nix store; ignoring" nixInfoLog "symlink $path points outside the Nix store; ignoring"
continue continue

View File

@ -1,89 +0,0 @@
# Parallel execution utilities
# These functions provide a framework for parallel processing of jobs from stdin
# parallelRun - Execute a command in parallel across multiple cores
#
# Reads null-delimited jobs from stdin and distributes them across NIX_BUILD_CORES
# worker processes. Each worker executes the provided command, receiving jobs
# via stdin in null-delimited format.
#
# Usage: some_producer | parallelRun command [args...]
#
# The command receives jobs one at a time via stdin (null-delimited).
#
# Example:
# find . -name '*.log' -print0 | parallelRun sh -c '
# while read -r -d "" file; do gzip "$file"; done
# '
parallelRun() {
local pids
local lock
pids=()
lock=$(mktemp -u)
mkfifo "$lock"
for ((i=0; i<NIX_BUILD_CORES; i++)); do
{
exec 3<"$lock" # fd-3 = read side of lock
exec 4>"$lock" # fd-4 = write side of lock (push token back)
local job
while :; do
# Acquire the lock: blocks until a token can be read
read -r -n1 >/dev/null <&3
# read one job from stdin
# This is guarded by the lock above in order to prevent
# multiple workers from reading from stdin simultaneously.
if ! IFS= read -r -d '' job; then
# If stdin is closed, release lock and exit
printf 'x' >&4
break
fi
# Release the lock: write a token back to the lock FIFO
printf 'y' >&4
# Forward job to the worker process' stdin
printf '%s\0' "$job"
done \
| "$@" # launch the worker process
} &
pids[$i]=$!
done
# launch the workers by writing a token to the lock FIFO
printf 'a' >"$lock" &
# Wait for all workers to finish
for pid in "${pids[@]}"; do
if ! wait "$pid"; then
echo "A parallel job failed with exit code $? (check for errors above)" >&2
echo -e "Failing Command:\n $@" >&2
exit 1
fi
done
rm "$lock"
}
# parallelMap - Apply a shell function to each job in parallel
#
# A higher-level wrapper around parallelRun that applies a shell function to each
# null-delimited job from stdin. The shell function receives each job as its first
# argument.
#
# Usage: some_producer | parallelMap shell_function [additional_args...]
#
# The shell function is called as: shell_function job [additional_args...]
# for each job read from stdin.
#
# Example:
# compress() { gzip "$1" }
# find . -name '*.log' -print0 | parallelMap compress
parallelMap() {
_wrapper() {
while IFS= read -r -d '' job; do
"$@" "$job"
done
}
parallelRun _wrapper "$@"
unset -f _wrapper
}

View File

@ -68,6 +68,31 @@ patchShebangs() {
return 0 return 0
fi fi
# like sponge from moreutils but in pure bash
_sponge() {
local content
local target
local restoreReadOnly
content=""
target="$1"
# Make file writable if it is read-only
if [[ ! -w "$target" ]]; then
chmod +w "$target"
restoreReadOnly=true
fi
while IFS= read -r line || [[ -n "$line" ]]; do
content+="$line"$'\n'
done
printf '%s' "$content" > "$target"
# Restore read-only if it was read-only before
if [[ -n "${restoreReadOnly:-}" ]]; then
chmod -w "$target"
fi
}
local f local f
while IFS= read -r -d $'\0' f; do while IFS= read -r -d $'\0' f; do
isScript "$f" || continue isScript "$f" || continue
@ -126,11 +151,14 @@ patchShebangs() {
# Preserve times, see: https://github.com/NixOS/nixpkgs/pull/33281 # Preserve times, see: https://github.com/NixOS/nixpkgs/pull/33281
timestamp=$(stat --printf "%y" "$f") timestamp=$(stat --printf "%y" "$f")
sed -i -e "1 s|.*|#\!$escapedInterpreterLine|" "$f" sed -e "1 s|.*|#\!$escapedInterpreterLine|" "$f" | _sponge "$f"
touch --date "$timestamp" "$f" touch --date "$timestamp" "$f"
fi fi
fi fi
done < <(find "$@" -type f -perm -0100 -print0) done < <(find "$@" -type f -perm -0100 -print0)
unset -f _sponge
} }
patchShebangsAuto () { patchShebangsAuto () {

View File

@ -3,18 +3,47 @@ export NIX_LDFLAGS+=" --compress-debug-sections=zlib"
export NIX_CFLAGS_COMPILE+=" -ggdb -Wa,--compress-debug-sections" export NIX_CFLAGS_COMPILE+=" -ggdb -Wa,--compress-debug-sections"
export NIX_RUSTFLAGS+=" -g -C strip=none" export NIX_RUSTFLAGS+=" -g -C strip=none"
cksumAlgo=sha256
fixupOutputHooks+=(_separateDebugInfo) fixupOutputHooks+=(_separateDebugInfo)
postUnpackHooks+=(_recordPristineSourceHashes)
_recordPristineSourceHashes() {
# shellcheck disable=2154
[ -e "$sourceRoot" ] || return 0
local checksumFileName=__nix_source_checksums
echo "separate-debug-info: recording checksum of source files for debug support..."
find "$sourceRoot" -type f -exec cksum -a "$cksumAlgo" '{}' \+ > "$checksumFileName"
recordedSourceChecksumsFileName="$(readlink -f "$checksumFileName")"
}
_separateDebugInfo() { _separateDebugInfo() {
# shellcheck disable=2154
[ -e "$prefix" ] || return 0 [ -e "$prefix" ] || return 0
local dst="${debug:-$out}" local debugOutput="${debug:-$out}"
if [ "$prefix" = "$dst" ]; then return 0; fi if [ "$prefix" = "$debugOutput" ]; then return 0; fi
# in case there is nothing to strip, don't fail the build # in case there is nothing to strip, don't fail the build
mkdir -p "$dst" mkdir -p "$debugOutput"
dst="$dst/lib/debug/.build-id" local dst="$debugOutput/lib/debug/.build-id"
local source
local sourceOverlay
# shellcheck disable=2154
if [ -e "$src" ]; then
source="$src"
if [ -n "${recordedSourceChecksumsFileName:-}" ]; then
sourceOverlay="$debugOutput/src/overlay"
else
sourceOverlay=""
fi
else
source=""
sourceOverlay=""
fi
# Find executables and dynamic libraries. # Find executables and dynamic libraries.
local i local i
@ -25,30 +54,64 @@ _separateDebugInfo() {
[ -z "${OBJCOPY:-}" ] && echo "_separateDebugInfo: '\$OBJCOPY' variable is empty, skipping." 1>&2 && break [ -z "${OBJCOPY:-}" ] && echo "_separateDebugInfo: '\$OBJCOPY' variable is empty, skipping." 1>&2 && break
# Extract the Build ID. FIXME: there's probably a cleaner way. # Extract the Build ID. FIXME: there's probably a cleaner way.
local id="$($READELF -n "$i" | sed 's/.*Build ID: \([0-9a-f]*\).*/\1/; t; d')" local id
id="$($READELF -n "$i" | sed 's/.*Build ID: \([0-9a-f]*\).*/\1/; t; d')"
if [ "${#id}" != 40 ]; then if [ "${#id}" != 40 ]; then
echo "could not find build ID of $i, skipping" >&2 echo "could not find build ID of $i, skipping" >&2
continue continue
fi fi
# Extract the debug info. # Extract the debug info.
echo "separating debug info from $i (build ID $id)" echo "separating debug info from $i (build ID $id)"
destDir=$dst/${id:0:2} local debuginfoDir="$dst/${id:0:2}"
destFile=$dst/${id:0:2}/${id:2}.debug local buildIdPrefix="$debuginfoDir/${id:2}"
local debuginfoFile="$buildIdPrefix.debug"
local executableSymlink="$buildIdPrefix.executable"
local sourceSymlink="$buildIdPrefix.source"
local sourceOverlaySymlink="$buildIdPrefix.sourceoverlay"
mkdir -p "$destDir" mkdir -p "$debuginfoDir"
if [ -f "$destFile" ]; then if [ -f "$debuginfoFile" ]; then
echo "separate-debug-info: warning: multiple files with build id $id found, overwriting" echo "separate-debug-info: warning: multiple files with build id $id found, overwriting"
fi fi
# This may fail, e.g. if the binary is for a different # This may fail, e.g. if the binary is for a different
# architecture than we're building for. (This happens with # architecture than we're building for. (This happens with
# firmware blobs in QEMU.) # firmware blobs in QEMU.)
if $OBJCOPY --only-keep-debug "$i" "$destFile"; then if $OBJCOPY --only-keep-debug "$i" "$debuginfoFile"; then
# If we succeeded, also a create a symlink <original-name>.debug. # If we succeeded, also a create a symlink <original-name>.debug.
ln -sfn ".build-id/${id:0:2}/${id:2}.debug" "$dst/../$(basename "$i")" ln -sfn "$debuginfoFile" "$dst/../$(basename "$i")"
# also create a symlink mapping the build-id to the original elf file and the source
# debuginfod protocol relies on it
ln -sfn "$i" "$executableSymlink"
if [ -n "$source" ]; then
ln -sfn "$source" "$sourceSymlink"
fi
if [ -n "$sourceOverlay" ]; then
# create it lazily
if [ ! -d "$sourceOverlay" ]; then
echo "separate-debug-info: copying patched source files to $sourceOverlay..."
mkdir -p "$sourceOverlay"
pushd "$(dirname "$recordedSourceChecksumsFileName")" || { echo "separate-debug-info: failed to cd parent directory of $recordedSourceChecksumsFileName"; return 1; }
while IFS= read -r -d $'\0' modifiedSourceFile; do
if [ -z "$modifiedSourceFile" ]; then
continue
fi
# this can happen with files with '\n' in their name
if [ ! -f "$modifiedSourceFile" ]; then
echo "separate-debug-info: cannot save modified source file $modifiedSourceFile: does not exist. ignoring"
continue
fi
mkdir -p "$sourceOverlay/$(dirname "$modifiedSourceFile")"
cp -v "$modifiedSourceFile" "$sourceOverlay/$modifiedSourceFile"
done < <(LANG=C cksum -a "$cksumAlgo" --check --ignore-missing --quiet "$recordedSourceChecksumsFileName" 2>&1 | sed -n -e 's/: FAILED$/\x00/p' | sed -z -e 's/^\n//')
popd || { echo "separate-debug-info: failed to popd" ; return 1; }
fi
ln -sfn "$sourceOverlay" "$sourceOverlaySymlink"
fi
else else
# If we failed, try to clean up unnecessary directories # If we failed, try to clean up unnecessary directories
rmdir -p "$dst/${id:0:2}" --ignore-fail-on-non-empty rmdir -p "$dst/${id:0:2}" --ignore-fail-on-non-empty

View File

@ -1,23 +0,0 @@
{
stdenv,
}:
{
# test based on bootstrap tools to prevent rebuilding stdenv on each change
parallel =
(derivation {
name = "test-parallel-hook";
system = stdenv.system;
builder = "${stdenv.bootstrapTools}/bin/bash";
PATH = "${stdenv.bootstrapTools}/bin";
args = [
"-c"
''
. ${../parallel.sh}
. ${./test-parallel.sh}
''
];
})
// {
meta = { };
};
}

View File

@ -1,146 +0,0 @@
export NIX_BUILD_CORES=4
echo "Testing worker distribution..."
# Generate 100 jobs to ensure all workers get some
for i in {1..100}; do
printf "job%d\0" $i
done | parallelRun sh -c '
while IFS= read -r -d "" job; do
sleep 0.05 # Simulate some work
echo "Worker $$ processed $job" >> /tmp/worker-output
done
'
# Check that all 4 workers were actually utilized
worker_count=$(sort /tmp/worker-output | cut -d" " -f2 | sort -u | wc -l)
if [ "$worker_count" -ne 4 ]; then
echo "ERROR: Expected exactly 4 workers, got $worker_count"
cat /tmp/worker-output
exit 1
fi
echo "SUCCESS: All 4 workers participated"
rm -f /tmp/worker-output
echo "Testing error propagation..."
# Test that errors from workers are propagated
if printf "job1\0job2\0job3\0" | parallelRun sh -c '
while IFS= read -r -d "" job; do
if [ "$job" = "job2" ]; then
echo "Worker failing on $job" >&2
exit 1
fi
echo "Worker processed $job"
done
' 2>/dev/null; then
echo "ERROR: Expected command to fail but it succeeded"
exit 1
else
echo "SUCCESS: Error was properly propagated"
fi
echo "Testing error message..."
error_output=$(printf "job1\0job2\0job3\0" | parallelRun sh -c '
while IFS= read -r -d "" job; do
if [ "$job" = "job2" ]; then
echo "Worker failing on $job" >&2
exit 1
fi
echo "Worker processed $job"
done
' 2>&1 || true)
if [[ "$error_output" != *"job failed"* ]]; then
echo "ERROR: Expected 'job failed' in error message, got: $error_output"
exit 1
fi
echo "SUCCESS: Error message was displayed"
echo "Testing Verify all jobs are processed when no errors occur..."
# Generate jobs and count processed ones
for i in {1..10}; do
printf "job%d\0" $i
done | parallelRun sh -c '
while IFS= read -r -d "" job; do
echo "$job" >> /tmp/processed-jobs
done
'
processed_count=$(wc -l < /tmp/processed-jobs)
if [ "$processed_count" -ne 10 ]; then
echo "ERROR: Expected 10 jobs processed, got $processed_count"
exit 1
fi
echo "SUCCESS: All 10 jobs were processed"
rm -f /tmp/processed-jobs
echo "All parallelRun tests passed!"
# ---------------------------------------------------------------------
echo "Testing parallelMap basic functionality..."
# Define a test function
testFunc() {
echo "Processing: $1" >> /tmp/map-output
}
# Test that parallelMap calls the function with each job
for i in {1..5}; do
printf "item%d\0" $i
done | parallelMap testFunc
# Check all jobs were processed
processed_map_count=$(wc -l < /tmp/map-output)
if [ "$processed_map_count" -ne 5 ]; then
echo "ERROR: Expected 5 items processed by parallelMap, got $processed_map_count"
exit 1
fi
echo "SUCCESS: parallelMap processed all 5 items"
rm -f /tmp/map-output
echo "Testing parallelMap error propagation..."
# Define a function that fails on specific input
failFunc() {
if [ "$1" = "item2" ]; then
echo "Function failing on $1" >&2
exit 1
fi
echo "Function processed $1"
}
# Test that errors are propagated
if printf "item1\0item2\0item3\0" | parallelMap failFunc 2>/dev/null; then
echo "ERROR: Expected parallelMap to fail but it succeeded"
exit 1
else
echo "SUCCESS: parallelMap error was properly propagated"
fi
echo "Testing parallelMap with additional arguments..."
# Define a function that uses additional arguments
argFunc() {
echo "$1: $2" >> /tmp/map-args-output
}
# Test with additional arguments
for i in {1..3}; do
printf "value%d\0" $i
done | parallelMap argFunc "PREFIX"
# Check output contains the prefix
if ! grep -q "PREFIX: value1" /tmp/map-args-output; then
echo "ERROR: parallelMap did not pass additional arguments correctly"
cat /tmp/map-args-output
exit 1
fi
echo "SUCCESS: parallelMap passed additional arguments correctly"
rm -f /tmp/map-args-output
echo "All parallelRun and parallelMap tests passed!"
touch $out

View File

@ -671,8 +671,8 @@ rec {
throw "linkFarm entries must be either attrs or a list!"; throw "linkFarm entries must be either attrs or a list!";
linkCommands = lib.mapAttrsToList (name: path: '' linkCommands = lib.mapAttrsToList (name: path: ''
mkdir -p "$(dirname ${lib.escapeShellArg "${name}"})" mkdir -p -- "$(dirname -- ${lib.escapeShellArg "${name}"})"
ln -s ${lib.escapeShellArg "${path}"} ${lib.escapeShellArg "${name}"} ln -s -- ${lib.escapeShellArg "${path}"} ${lib.escapeShellArg "${name}"}
'') entries'; '') entries';
in in
runCommand name runCommand name

View File

@ -39,6 +39,11 @@ let
linkFarmFromAttrs = linkFarm "linkFarmFromAttrs" { linkFarmFromAttrs = linkFarm "linkFarmFromAttrs" {
inherit foo hello; inherit foo hello;
}; };
linkFarmDelimitOptionList = linkFarm "linkFarmDelimitOptionList" {
"-foo" = foo;
"-hello" = hello;
};
in in
runCommand "test-linkFarm" { } '' runCommand "test-linkFarm" { } ''
function assertPathEquals() { function assertPathEquals() {
@ -61,5 +66,9 @@ runCommand "test-linkFarm" { } ''
assertPathEquals "${linkFarmFromAttrs}/foo" "${foo}" assertPathEquals "${linkFarmFromAttrs}/foo" "${foo}"
assertPathEquals "${linkFarmFromAttrs}/hello" "${hello}" assertPathEquals "${linkFarmFromAttrs}/hello" "${hello}"
assertPathEquals "${linkFarmDelimitOptionList}/-foo" "${foo}"
assertPathEquals "${linkFarmDelimitOptionList}/-hello" "${hello}"
touch $out touch $out
'' ''

View File

@ -39,13 +39,10 @@ python3Packages.buildPythonApplication rec {
defusedxml defusedxml
]; ];
pytestFlagsArray = [ pytestFlags = [
"-W" "-Wignore::sphinx.deprecation.RemovedInSphinx90Warning"
"ignore::sphinx.deprecation.RemovedInSphinx90Warning" "--rootdir=src/ablog"
"--rootdir" "-Wignore::sphinx.deprecation.RemovedInSphinx90Warning" # Ignore ImportError
"src/ablog"
"-W"
"ignore::sphinx.deprecation.RemovedInSphinx90Warning" # Ignore ImportError
]; ];
# assert "post 1" not in html # assert "post 1" not in html

View File

@ -10,13 +10,13 @@
stdenv.mkDerivation (finalAttrs: { stdenv.mkDerivation (finalAttrs: {
pname = "abseil-cpp"; pname = "abseil-cpp";
version = "20250127.1"; version = "20250512.1";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "abseil"; owner = "abseil";
repo = "abseil-cpp"; repo = "abseil-cpp";
tag = finalAttrs.version; tag = finalAttrs.version;
hash = "sha256-QTywqQCkyGFpdbtDBvUwz9bGXxbJs/qoFKF6zYAZUmQ="; hash = "sha256-eB7OqTO9Vwts9nYQ/Mdq0Ds4T1KgmmpYdzU09VPWOhk=";
}; };
cmakeFlags = cmakeFlags =

View File

@ -177,6 +177,10 @@ stdenv.mkDerivation (finalAttrs: {
enableParallelChecking = false; enableParallelChecking = false;
enabledTestPaths = [
"../testing/adios2/python/Test*.py"
];
__darwinAllowLocalNetworking = finalAttrs.finalPackage.doCheck && mpiSupport; __darwinAllowLocalNetworking = finalAttrs.finalPackage.doCheck && mpiSupport;
nativeCheckInputs = [ nativeCheckInputs = [

View File

@ -5,14 +5,14 @@
cmake, cmake,
}: }:
stdenv.mkDerivation { stdenv.mkDerivation (finalAttrs: {
pname = "aemu"; pname = "aemu";
version = "0.1.2"; version = "0.1.2";
src = fetchFromGitiles { src = fetchFromGitiles {
url = "https://android.googlesource.com/platform/hardware/google/aemu"; url = "https://android.googlesource.com/platform/hardware/google/aemu";
rev = "07ccc3ded3357e67e39104f18f35feaf8b3b6a0e"; rev = "v${finalAttrs.version}-aemu-release";
hash = "sha256-H3IU9aTFSzUAqYgrtHd4F18hbhZsbOJGC4K5JwMQOOw="; hash = "sha256-8UMm2dXdvmX6rUn4wQWuqI8bamwgf0x/5BQT+7atzjY=";
}; };
patches = [ patches = [
@ -48,4 +48,4 @@ stdenv.mkDerivation {
"aarch64-darwin" "aarch64-darwin"
]; ];
}; };
} })

View File

@ -9,31 +9,20 @@
stdenv.mkDerivation (finalAttrs: { stdenv.mkDerivation (finalAttrs: {
pname = "alsa-lib"; pname = "alsa-lib";
version = "1.2.13"; version = "1.2.14";
src = fetchurl { src = fetchurl {
url = "mirror://alsa/lib/alsa-lib-${finalAttrs.version}.tar.bz2"; url = "mirror://alsa/lib/alsa-lib-${finalAttrs.version}.tar.bz2";
hash = "sha256-jE/zdVPL6JYY4Yfkx3n3GpuyqLJ7kfh+1AmHzJIz2PY="; hash = "sha256-vpyIoLNgQ2fddBZ6K3VKNeFC9nApKuR6L97yei7pejI=";
}; };
patches = patches = [
[ # Add a "libs" field to the syntax recognized in the /etc/asound.conf file.
# Add a "libs" field to the syntax recognized in the /etc/asound.conf file. # The nixos modules for pulseaudio, jack, and pipewire are leveraging this
# The nixos modules for pulseaudio, jack, and pipewire are leveraging this # "libs" field to declare locations for both native and 32bit plugins, in
# "libs" field to declare locations for both native and 32bit plugins, in # order to support apps with 32bit sound running on x86_64 architecture.
# order to support apps with 32bit sound running on x86_64 architecture. ./alsa-plugin-conf-multilib.patch
./alsa-plugin-conf-multilib.patch ];
]
++ lib.optional (stdenv.hostPlatform.useLLVM or false)
# Fixes version script under LLVM, should be fixed in the next update.
# Check if "pkgsLLVM.alsa-lib" builds on next version bump and remove this
# if it succeeds.
(
fetchurl {
url = "https://github.com/alsa-project/alsa-lib/commit/76edab4e595bd5f3f4c636cccc8d7976d3c519d6.patch";
hash = "sha256-WCOXfe0/PPZRMXdNa29Jn28S2r0PQ7iTsabsxZVSwnk=";
}
);
enableParallelBuilding = true; enableParallelBuilding = true;

View File

@ -6,7 +6,7 @@
darwin, darwin,
fetchFromGitHub, fetchFromGitHub,
coreutils, coreutils,
nettools, net-tools,
util-linux, util-linux,
stdenv, stdenv,
dmidecode, dmidecode,
@ -92,7 +92,7 @@ buildGoModule rec {
substituteInPlace agent/platform/platform_unix.go \ substituteInPlace agent/platform/platform_unix.go \
--replace-fail "/usr/bin/uname" "${coreutils}/bin/uname" \ --replace-fail "/usr/bin/uname" "${coreutils}/bin/uname" \
--replace-fail '"/bin", "hostname"' '"${nettools}/bin/hostname"' \ --replace-fail '"/bin", "hostname"' '"${net-tools}/bin/hostname"' \
--replace-fail '"lsb_release"' '"${fake-lsb-release}/bin/lsb_release"' --replace-fail '"lsb_release"' '"${fake-lsb-release}/bin/lsb_release"'
substituteInPlace agent/session/shell/shell_unix.go \ substituteInPlace agent/session/shell/shell_unix.go \

View File

@ -12,7 +12,7 @@ stdenv.mkDerivation rec {
owner = "GPUOpen-LibrariesAndSDKs"; owner = "GPUOpen-LibrariesAndSDKs";
repo = "AMF"; repo = "AMF";
tag = "v${version}"; tag = "v${version}";
sha256 = "sha256-u6gvdc1acemd01TO5EbuF3H7HkEJX4GUx73xCo71yPY="; sha256 = "sha256-0PgWEq+329/EhI0/CgPsCkJ4CiTsFe56w2O+AcjVUdc=";
}; };
installPhase = '' installPhase = ''

View File

@ -290,7 +290,7 @@ buildPythonPackage rec {
airflow db reset -y airflow db reset -y
''; '';
pytestFlagsArray = [ enabledTestPaths = [
"tests/core/test_core.py" "tests/core/test_core.py"
]; ];

View File

@ -59,9 +59,15 @@ stdenv.mkDerivation (finalAttrs: {
checkTarget = "tests"; checkTarget = "tests";
checkFlags = lib.optionals stdenv.hostPlatform.isMusl [
# equality tests are broken on musl due to different priority values
# https://gitlab.com/apparmor/apparmor/-/issues/513
"-o equality"
];
postCheck = "popd"; postCheck = "popd";
doCheck = stdenv.hostPlatform == stdenv.buildPlatform && !stdenv.hostPlatform.isMusl; doCheck = stdenv.hostPlatform == stdenv.buildPlatform;
checkInputs = [ checkInputs = [
bashInteractive bashInteractive
perl perl

View File

@ -48,7 +48,7 @@ python3Packages.buildPythonPackage rec {
"pierky.arouteserver" "pierky.arouteserver"
]; ];
pytestFlagsArray = [ "tests/static" ]; enabledTestPaths = [ "tests/static" ];
disabledTests = [ disabledTests = [
# disable copyright year check of files # disable copyright year check of files

View File

@ -2,6 +2,7 @@
lib, lib,
stdenv, stdenv,
fetchFromGitHub, fetchFromGitHub,
fetchpatch,
autoreconfHook, autoreconfHook,
bash, bash,
buildPackages, buildPackages,
@ -9,23 +10,44 @@
python3, python3,
swig, swig,
pkgsCross, pkgsCross,
libcap_ng,
# Enabling python support while cross compiling would be possible, but the # Enabling python support while cross compiling would be possible, but the
# configure script tries executing python to gather info instead of relying on # configure script tries executing python to gather info instead of relying on
# python3-config exclusively # python3-config exclusively
enablePython ? stdenv.hostPlatform == stdenv.buildPlatform, enablePython ? stdenv.hostPlatform == stdenv.buildPlatform,
nix-update-script,
testers,
}: }:
stdenv.mkDerivation (finalAttrs: { stdenv.mkDerivation (finalAttrs: {
pname = "audit"; pname = "audit";
version = "4.0.3"; version = "4.0.5";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "linux-audit"; owner = "linux-audit";
repo = "audit-userspace"; repo = "audit-userspace";
tag = "v${finalAttrs.version}"; tag = "v${finalAttrs.version}";
hash = "sha256-+M5Nai/ruK16udsHcMwv1YoVQbCLKNuz/4FCXaLbiCw="; hash = "sha256-SgMt1MmcH7r7O6bmJCetRg3IdoZXAXjVJyeu0HRfyf8=";
}; };
patches = [
# nix configures most stuff by symlinks, e.g. in /etc
# thus, for plugins to be picked up, symlinks must be allowed
# https://github.com/linux-audit/audit-userspace/pull/467
(fetchpatch {
url = "https://github.com/linux-audit/audit-userspace/pull/467/commits/dbefc642b3bd0cafe599fcd18c6c88cb672397ee.patch?full_index=1";
hash = "sha256-Ksn/qKBQYFAjvs1OVuWhgWCdf4Bdp9/a+MrhyJAT+Bw=";
})
(fetchpatch {
url = "https://github.com/linux-audit/audit-userspace/pull/467/commits/50094f56fefc0b9033ef65e8c4f108ed52ef5de5.patch?full_index=1";
hash = "sha256-CJKDLdlpsCd+bG6j5agcnxY1+vMCImHwHGN6BXURa4c=";
})
(fetchpatch {
url = "https://github.com/linux-audit/audit-userspace/pull/467/commits/5e75091abd297807b71b3cfe54345c2ef223939a.patch?full_index=1";
hash = "sha256-LPpO4PH/3MyCJq2xhmhhcnFeK3yh7LK6Mjypuvhacu4=";
})
];
postPatch = '' postPatch = ''
substituteInPlace bindings/swig/src/auditswig.i \ substituteInPlace bindings/swig/src/auditswig.i \
--replace-fail "/usr/include/linux/audit.h" \ --replace-fail "/usr/include/linux/audit.h" \
@ -57,6 +79,7 @@ stdenv.mkDerivation (finalAttrs: {
buildInputs = [ buildInputs = [
bash bash
libcap_ng
]; ];
configureFlags = [ configureFlags = [
@ -65,13 +88,20 @@ stdenv.mkDerivation (finalAttrs: {
"--disable-zos-remote" "--disable-zos-remote"
"--with-arm" "--with-arm"
"--with-aarch64" "--with-aarch64"
# capability dropping, currently mostly for plugins as those get spawned as root
# see auditd-plugins(5)
"--with-libcap-ng=yes"
(if enablePython then "--with-python" else "--without-python") (if enablePython then "--with-python" else "--without-python")
]; ];
enableParallelBuilding = true; enableParallelBuilding = true;
passthru.tests = { passthru = {
musl = pkgsCross.musl64.audit; updateScript = nix-update-script { };
tests = {
musl = pkgsCross.musl64.audit;
pkg-config = testers.testMetaPkgConfig finalAttrs.finalPackage;
};
}; };
meta = { meta = {
@ -79,7 +109,11 @@ stdenv.mkDerivation (finalAttrs: {
description = "Audit Library"; description = "Audit Library";
changelog = "https://github.com/linux-audit/audit-userspace/releases/tag/v${finalAttrs.version}"; changelog = "https://github.com/linux-audit/audit-userspace/releases/tag/v${finalAttrs.version}";
license = lib.licenses.gpl2Plus; license = lib.licenses.gpl2Plus;
maintainers = with lib.maintainers; [ ]; maintainers = with lib.maintainers; [ grimmauld ];
pkgConfigModules = [
"audit"
"auparse"
];
platforms = lib.platforms.linux; platforms = lib.platforms.linux;
}; };
}) })

View File

@ -42,6 +42,13 @@ def is_dynamic_executable(elf: ELFFile) -> bool:
# section but their ELF type is DYN. # section but their ELF type is DYN.
return bool(elf.get_section_by_name(".interp")) return bool(elf.get_section_by_name(".interp"))
def is_separate_debug_object(elf: ELFFile) -> bool:
# objects created by separateDebugInfo = true have all the section headers
# of the unstripped objects but those that normal `strip` would have kept
# are NOBITS
text_section = elf.get_section_by_name(".text")
return elf.has_dwarf_info() and bool(text_section) and text_section.header['sh_type'] == "SHT_NOBITS"
def get_dependencies(elf: ELFFile) -> list[list[Path]]: def get_dependencies(elf: ELFFile) -> list[list[Path]]:
dependencies = [] dependencies = []
@ -174,6 +181,10 @@ def populate_cache(initial: list[Path], recursive: bool =False) -> None:
try: try:
with open_elf(path) as elf: with open_elf(path) as elf:
if is_separate_debug_object(elf):
print(f"skipping {path} because it looks like a separate debug object")
continue
osabi = get_osabi(elf) osabi = get_osabi(elf)
arch = get_arch(elf) arch = get_arch(elf)
rpath = [Path(p) for p in get_rpath(elf) rpath = [Path(p) for p in get_rpath(elf)

View File

@ -60,9 +60,8 @@ localPython.pkgs.buildPythonApplication rec {
]; ];
# Upstream did not adapt to pytest 8 yet. # Upstream did not adapt to pytest 8 yet.
pytestFlagsArray = [ pytestFlags = [
"-W" "-Wignore::pytest.PytestRemovedIn8Warning"
"ignore::pytest.PytestRemovedIn8Warning"
]; ];
passthru = { passthru = {

View File

@ -145,7 +145,7 @@ py.pkgs.buildPythonApplication rec {
# tests/unit/customizations/sso/test_utils.py uses sockets # tests/unit/customizations/sso/test_utils.py uses sockets
__darwinAllowLocalNetworking = true; __darwinAllowLocalNetworking = true;
pytestFlagsArray = [ pytestFlags = [
"-Wignore::DeprecationWarning" "-Wignore::DeprecationWarning"
]; ];

View File

@ -70,7 +70,7 @@ python.pkgs.buildPythonApplication rec {
pytestCheckHook pytestCheckHook
]; ];
pytestFlagsArray = [ enabledTestPaths = [
"tests/unit" "tests/unit"
]; ];

View File

@ -29,7 +29,7 @@ stdenv.mkDerivation rec {
python3Packages.pytestCheckHook python3Packages.pytestCheckHook
]; ];
pytestFlagsArray = [ "test.py" ]; enabledTestPaths = [ "test.py" ];
meta = with lib; { meta = with lib; {
description = "Storage conversion and expression calculator"; description = "Storage conversion and expression calculator";

View File

@ -13,7 +13,6 @@
pkg-config, pkg-config,
python3Packages, python3Packages,
readline, readline,
systemdMinimal,
udev, udev,
# Test gobject-introspection instead of pygobject because the latter # Test gobject-introspection instead of pygobject because the latter
# causes an infinite recursion. # causes an infinite recursion.
@ -28,11 +27,11 @@
stdenv.mkDerivation (finalAttrs: { stdenv.mkDerivation (finalAttrs: {
pname = "bluez"; pname = "bluez";
version = "5.80"; version = "5.83";
src = fetchurl { src = fetchurl {
url = "mirror://kernel/linux/bluetooth/bluez-${finalAttrs.version}.tar.xz"; url = "mirror://kernel/linux/bluetooth/bluez-${finalAttrs.version}.tar.xz";
hash = "sha256-pNC8oymWkfBtW9l3O4VGOCBKUaUCbEKwrX8cbPFrRZo="; hash = "sha256-EIUi2QnSIFgTmb/sk9qrYgNVOc7vPdo+eZcHhcY70kw=";
}; };
buildInputs = [ buildInputs = [
@ -63,7 +62,7 @@ stdenv.mkDerivation (finalAttrs: {
postPatch = postPatch =
'' ''
substituteInPlace tools/hid2hci.rules \ substituteInPlace tools/hid2hci.rules \
--replace-fail /sbin/udevadm ${systemdMinimal}/bin/udevadm \ --replace-fail /sbin/udevadm ${udev}/bin/udevadm \
--replace-fail "hid2hci " "$out/lib/udev/hid2hci " --replace-fail "hid2hci " "$out/lib/udev/hid2hci "
'' ''
+ +

View File

@ -11,11 +11,11 @@
stdenv.mkDerivation (finalAttrs: { stdenv.mkDerivation (finalAttrs: {
pname = "bmake"; pname = "bmake";
version = "20250308"; version = "20250528";
src = fetchurl { src = fetchurl {
url = "https://www.crufty.net/ftp/pub/sjg/bmake-${finalAttrs.version}.tar.gz"; url = "https://www.crufty.net/ftp/pub/sjg/bmake-${finalAttrs.version}.tar.gz";
hash = "sha256-I4jZ+xhldmM6pyX/FjVSpdunpqN1qMuakBSrV+59maI="; hash = "sha256-DcOJpeApiqWFNTtgeW1dYy3mYNreWNAKzWCtcihGyaM=";
}; };
patches = [ patches = [

View File

@ -101,7 +101,7 @@ python.pkgs.buildPythonApplication rec {
pytestCheckHook pytestCheckHook
]; ];
pytestFlagsArray = [ pytestFlags = [
"--benchmark-skip" "--benchmark-skip"
"--pyargs" "--pyargs"
"borg.testsuite" "borg.testsuite"

View File

@ -71,7 +71,7 @@ python3.pkgs.buildPythonApplication rec {
"browsr" "browsr"
]; ];
pytestFlagsArray = [ pytestFlags = [
"--snapshot-update" "--snapshot-update"
]; ];

View File

@ -21,11 +21,11 @@
stdenv.mkDerivation rec { stdenv.mkDerivation rec {
pname = "btrfs-progs"; pname = "btrfs-progs";
version = "6.14"; version = "6.15";
src = fetchurl { src = fetchurl {
url = "mirror://kernel/linux/kernel/people/kdave/btrfs-progs/btrfs-progs-v${version}.tar.xz"; url = "mirror://kernel/linux/kernel/people/kdave/btrfs-progs/btrfs-progs-v${version}.tar.xz";
hash = "sha256-31q4BPyzbikcQq2DYfgBrR4QJBtDvTBP5Qzj355+PaE="; hash = "sha256-V9pCjdIZn9iNg+zxytBWeM54ZA735S12M76Yh872dLs=";
}; };
nativeBuildInputs = nativeBuildInputs =

View File

@ -23,7 +23,7 @@ let
lib.concatStringsSep "\n\n" extraCertificateStrings lib.concatStringsSep "\n\n" extraCertificateStrings
); );
srcVersion = "3.111"; srcVersion = "3.113.1";
version = if nssOverride != null then nssOverride.version else srcVersion; version = if nssOverride != null then nssOverride.version else srcVersion;
meta = with lib; { meta = with lib; {
homepage = "https://curl.haxx.se/docs/caextract.html"; homepage = "https://curl.haxx.se/docs/caextract.html";
@ -47,7 +47,7 @@ let
owner = "nss-dev"; owner = "nss-dev";
repo = "nss"; repo = "nss";
rev = "NSS_${lib.replaceStrings [ "." ] [ "_" ] version}_RTM"; rev = "NSS_${lib.replaceStrings [ "." ] [ "_" ] version}_RTM";
hash = "sha256-GFtoSvLF5nAwBIiMa9CeEl5geAOK60gG2tjuQFubgYs="; hash = "sha256-Yfs9Hh98ASJe1D4qyQEXaTC2xjeDI2Cdxp5Xgy0rYdQ=";
}; };
dontBuild = true; dontBuild = true;

View File

@ -2,7 +2,7 @@
lib, lib,
stdenv, stdenv,
fetchurl, fetchurl,
fetchpatch, lzo,
gtk-doc, gtk-doc,
meson, meson,
ninja, ninja,
@ -34,13 +34,13 @@ stdenv.mkDerivation (
in in
{ {
pname = "cairo"; pname = "cairo";
version = "1.18.2"; version = "1.18.4";
src = fetchurl { src = fetchurl {
url = "https://cairographics.org/${ url = "https://cairographics.org/${
if lib.mod (builtins.fromJSON (lib.versions.minor version)) 2 == 0 then "releases" else "snapshots" if lib.mod (builtins.fromJSON (lib.versions.minor version)) 2 == 0 then "releases" else "snapshots"
}/${pname}-${version}.tar.xz"; }/${pname}-${version}.tar.xz";
hash = "sha256-piubtCQl6ETMPW3d4EP/Odur7dFULrpXout5+FiJ1Fo="; hash = "sha256-RF7YIIpuSCPeEianTKMZ02AOg/Y2n5mxQmUAZZnDLMs=";
}; };
outputs = [ outputs = [
@ -61,16 +61,7 @@ stdenv.mkDerivation (
buildInputs = [ buildInputs = [
docbook_xsl docbook_xsl
]; lzo
patches = [
# Pull upstream fix to fix "out of memory" errors:
# https://gitlab.freedesktop.org/cairo/cairo/-/merge_requests/595
(fetchpatch {
name = "fix-oom.patch";
url = "https://gitlab.freedesktop.org/cairo/cairo/-/commit/b9eed915f9a67380e7ef9d8746656455c43f67e2.patch";
hash = "sha256-iWYxMVeNpseClSTf7BfU9GBe+tJWc+DUJWTWE5MnGh4=";
})
]; ];
propagatedBuildInputs = propagatedBuildInputs =

View File

@ -39,7 +39,7 @@ python3.pkgs.buildPythonApplication rec {
pytestCheckHook pytestCheckHook
]; ];
pytestFlagsArray = [ enabledTestPaths = [
"tests" "tests"
]; ];

View File

@ -112,7 +112,7 @@ python.pkgs.buildPythonApplication rec {
writableTmpDirAsHomeHook writableTmpDirAsHomeHook
]; ];
pytestFlagsArray = [ "tests/unit" ]; enabledTestPaths = [ "tests/unit" ];
disabledTests = [ disabledTests = [
# Relies upon the `charm` tool being installed # Relies upon the `charm` tool being installed

View File

@ -60,6 +60,9 @@ stdenv.mkDerivation (finalAttrs: {
zlib zlib
]; ];
# with trivialautovarinit enabled can produce an empty .pc file
hardeningDisable = [ "trivialautovarinit" ];
cmakeFlags = [ cmakeFlags = [
(lib.cmakeBool "BUILD_EXAMPLES" withExamples) (lib.cmakeBool "BUILD_EXAMPLES" withExamples)
(lib.cmakeBool "BUILD_TOOLS" withTools) (lib.cmakeBool "BUILD_TOOLS" withTools)

View File

@ -10,13 +10,13 @@
}: }:
stdenv.mkDerivation (finalAttrs: { stdenv.mkDerivation (finalAttrs: {
pname = "cpuinfo"; pname = "cpuinfo";
version = "0-unstable-2025-03-27"; version = "0-unstable-2025-06-10";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "pytorch"; owner = "pytorch";
repo = "cpuinfo"; repo = "cpuinfo";
rev = "39ea79a3c132f4e678695c579ea9353d2bd29968"; rev = "d7427551d6531037da216d20cd36feb19ed4905f";
hash = "sha256-uochXC0AtOw8N/ycyVJdiRw4pibCW2ENrFMT3jtxDSg="; hash = "sha256-gJgvE3823NyVOIL0Grkldde3U/N9NNqlLAA0btj3TSg=";
}; };
passthru.updateScript = nix-update-script { extraArgs = [ "--version=branch" ]; }; passthru.updateScript = nix-update-script { extraArgs = [ "--version=branch" ]; };

View File

@ -27,7 +27,7 @@ python3.pkgs.buildPythonApplication rec {
wireshark-cli wireshark-cli
]; ];
pytestFlagsArray = [ enabledTestPaths = [
"tests/tests.py" "tests/tests.py"
]; ];

View File

@ -2,7 +2,6 @@
lib, lib,
stdenv, stdenv,
fetchurl, fetchurl,
fetchpatch,
lvm2, lvm2,
json_c, json_c,
asciidoctor, asciidoctor,
@ -26,7 +25,7 @@
stdenv.mkDerivation rec { stdenv.mkDerivation rec {
pname = "cryptsetup"; pname = "cryptsetup";
version = "2.7.5"; version = "2.8.0";
outputs = [ outputs = [
"bin" "bin"
@ -38,20 +37,12 @@ stdenv.mkDerivation rec {
src = fetchurl { src = fetchurl {
url = "mirror://kernel/linux/utils/cryptsetup/v${lib.versions.majorMinor version}/${pname}-${version}.tar.xz"; url = "mirror://kernel/linux/utils/cryptsetup/v${lib.versions.majorMinor version}/${pname}-${version}.tar.xz";
hash = "sha256-0r5Dlbj1A7Dr9LLYHbkMNalwUKNY7iH+YqDftm5dVSI="; hash = "sha256-zJ4tN8JahxzqN1ILKNUyIHsMFnD7EPxU1oBx9j9SQ6I=";
}; };
patches = [ patches = [
# Allow reading tokens from a relative path, see #167994 # Allow reading tokens from a relative path, see #167994
./relative-token-path.patch ./relative-token-path.patch
# Do not use pagesize as fallback for block size.
# Remove when https://gitlab.com/cryptsetup/cryptsetup/-/merge_requests/782 is in the latest stable release
# Fixes https://gitlab.com/cryptsetup/cryptsetup/-/issues/943
(fetchpatch {
url = "https://gitlab.com/cryptsetup/cryptsetup/-/commit/a39a0d00e504ad7a89442874f72cf0561d6089c4.diff";
hash = "sha256-teQ/uFYrKuS0ksMEv7rP+d9EUuOl3sINsNhDC88P0xw=";
})
]; ];
postPatch = '' postPatch = ''
@ -95,6 +86,8 @@ stdenv.mkDerivation rec {
popt popt
] ++ lib.optional (!withInternalArgon2) libargon2; ] ++ lib.optional (!withInternalArgon2) libargon2;
enableParallelBuilding = true;
# The test [7] header backup in compat-test fails with a mysterious # The test [7] header backup in compat-test fails with a mysterious
# "out of memory" error, even though tons of memory is available. # "out of memory" error, even though tons of memory is available.
# Issue filed upstream: https://gitlab.com/cryptsetup/cryptsetup/-/issues/763 # Issue filed upstream: https://gitlab.com/cryptsetup/cryptsetup/-/issues/763

View File

@ -5,7 +5,7 @@
makeWrapper, makeWrapper,
cvs, cvs,
perl, perl,
nettools, net-tools,
findutils, findutils,
rsync, rsync,
coreutils, coreutils,
@ -25,7 +25,7 @@ stdenv.mkDerivation rec {
buildInputs = [ buildInputs = [
cvs cvs
perl perl
nettools net-tools
findutils findutils
rsync rsync
coreutils coreutils
@ -40,7 +40,7 @@ stdenv.mkDerivation rec {
wrapProgram $out/bin/cvsq --prefix PATH : ${ wrapProgram $out/bin/cvsq --prefix PATH : ${
lib.makeBinPath [ lib.makeBinPath [
cvs cvs
nettools net-tools
findutils findutils
rsync rsync
coreutils coreutils
@ -50,7 +50,7 @@ stdenv.mkDerivation rec {
wrapProgram $out/bin/cvsq-branch --prefix PATH : ${ wrapProgram $out/bin/cvsq-branch --prefix PATH : ${
lib.makeBinPath [ lib.makeBinPath [
cvs cvs
nettools net-tools
findutils findutils
rsync rsync
coreutils coreutils
@ -60,7 +60,7 @@ stdenv.mkDerivation rec {
wrapProgram $out/bin/cvsq-merge --prefix PATH : ${ wrapProgram $out/bin/cvsq-merge --prefix PATH : ${
lib.makeBinPath [ lib.makeBinPath [
cvs cvs
nettools net-tools
findutils findutils
rsync rsync
coreutils coreutils
@ -70,7 +70,7 @@ stdenv.mkDerivation rec {
wrapProgram $out/bin/cvsq-switch --prefix PATH : ${ wrapProgram $out/bin/cvsq-switch --prefix PATH : ${
lib.makeBinPath [ lib.makeBinPath [
cvs cvs
nettools net-tools
findutils findutils
rsync rsync
coreutils coreutils
@ -80,7 +80,7 @@ stdenv.mkDerivation rec {
wrapProgram $out/bin/lcvs --prefix PATH : ${ wrapProgram $out/bin/lcvs --prefix PATH : ${
lib.makeBinPath [ lib.makeBinPath [
cvs cvs
nettools net-tools
findutils findutils
rsync rsync
coreutils coreutils

View File

@ -12,6 +12,7 @@
pkg-config, pkg-config,
python3, python3,
xvfb-run, xvfb-run,
gettext,
}: }:
stdenv.mkDerivation (finalAttrs: { stdenv.mkDerivation (finalAttrs: {
@ -24,6 +25,11 @@ stdenv.mkDerivation (finalAttrs: {
sha256 = "sha256-4yH19X98SVqpviCBIWzIX6FYHWxCbREpuKCNjQuTFDk="; sha256 = "sha256-4yH19X98SVqpviCBIWzIX6FYHWxCbREpuKCNjQuTFDk=";
}; };
patches = [
# glib gettext is deprecated and broken, so use regular gettext instead
./use-regular-gettext.patch
];
postPatch = '' postPatch = ''
patchShebangs tests/test-wait-outputer patchShebangs tests/test-wait-outputer
@ -39,6 +45,7 @@ stdenv.mkDerivation (finalAttrs: {
nativeBuildInputs = [ nativeBuildInputs = [
autoreconfHook autoreconfHook
glib # for autoconf macro, gtester, gdbus glib # for autoconf macro, gtester, gdbus
gettext
intltool intltool
pkg-config pkg-config
]; ];

View File

@ -0,0 +1,12 @@
--- a/configure.ac
+++ b/configure.ac
@@ -46,7 +46,8 @@ GETTEXT_PACKAGE=dbus-test-runner
AC_SUBST(GETTEXT_PACKAGE)
AC_DEFINE_UNQUOTED(GETTEXT_PACKAGE, "$GETTEXT_PACKAGE", [Name of the default gettext domain])
-AM_GLIB_GNU_GETTEXT
+AM_GNU_GETTEXT([external])
+AM_GNU_GETTEXT_VERSION([0.21])
###########################
# gcov coverage reporting

View File

@ -56,7 +56,7 @@ python3.pkgs.buildPythonApplication rec {
export HOME=$(mktemp -d); export HOME=$(mktemp -d);
''; '';
pytestFlagsArray = [ pytestFlags = [
# --fast skips tests which try to start a devpi-server improperly # --fast skips tests which try to start a devpi-server improperly
"--fast" "--fast"
]; ];

View File

@ -260,7 +260,7 @@ python.pkgs.buildPythonApplication rec {
nativeCheckInputs = with python.pkgs; [ pytestCheckHook ] ++ pythonPath; nativeCheckInputs = with python.pkgs; [ pytestCheckHook ] ++ pythonPath;
pytestFlagsArray = [ pytestFlags = [
# Always show more information when tests fail # Always show more information when tests fail
"-vv" "-vv"
]; ];

View File

@ -52,6 +52,11 @@ stdenv.mkDerivation rec {
XMLNamespaceSupport XMLNamespaceSupport
]); ]);
# configure tries to find osx in PATH and hardcodes the resulting path
# (if any) on the Perl code. this fails under strictDeps, so override
# the autoconf test:
OSX = "${opensp}/bin/osx";
postConfigure = '' postConfigure = ''
# Broken substitution is used for `perl/config.pl', which leaves literal # Broken substitution is used for `perl/config.pl', which leaves literal
# `$prefix' in it. # `$prefix' in it.

View File

@ -0,0 +1,42 @@
From 893a84738606a8ac588ba1e9d4145cbbcbfff811 Mon Sep 17 00:00:00 2001
From: Alyssa Ross <hi@alyssa.is>
Date: Wed, 2 Jul 2025 12:03:53 +0200
Subject: [PATCH] Fix autoreconf with gettext 0.25
This fixes the following error that appeared when running autoreconf
after updating to gettext 0.25:
configure.ac:76: error: possibly undefined macro: AM_ICONV
If this token and others are legitimate, please use m4_pattern_allow.
See the Autoconf documentation.
autoreconf: error: /nix/store/dvpiwvz7an7icljfscdi76h11c03cma4-autoconf-2.72/bin/autoconf failed with exit status: 1
The version of gettext given in AM_GNU_GETTEXT_VERSION() is picked
quite arbitrarily based on what's likely to be available in distros,
since gettext itself is not actually used here, just some supporting
stuff from it.
Link: https://github.com/dosfstools/dosfstools/pull/218
---
configure.ac | 3 +++
1 file changed, 3 insertions(+)
diff --git a/configure.ac b/configure.ac
index efb8fb5..2cecab1 100644
--- a/configure.ac
+++ b/configure.ac
@@ -15,8 +15,11 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>.
AC_INIT([dosfstools], [4.2])
+AC_CONFIG_MACRO_DIRS([m4])
AC_SUBST([RELEASE_DATE], [2021-01-31])
AM_INIT_AUTOMAKE([1.11 foreign subdir-objects parallel-tests])
+AM_GNU_GETTEXT_VERSION([0.20])
+AM_GNU_GETTEXT([external])
AC_ARG_ENABLE([compat-symlinks],
[AS_HELP_STRING([--enable-compat-symlinks],
--
2.49.0

View File

@ -39,19 +39,15 @@ stdenv.mkDerivation rec {
url = "https://github.com/dosfstools/dosfstools/commit/8da7bc93315cb0c32ad868f17808468b81fa76ec.patch"; url = "https://github.com/dosfstools/dosfstools/commit/8da7bc93315cb0c32ad868f17808468b81fa76ec.patch";
sha256 = "sha256-Quegj5uYZgACgjSZef6cjrWQ64SToGQxbxyqCdl8C7o="; sha256 = "sha256-Quegj5uYZgACgjSZef6cjrWQ64SToGQxbxyqCdl8C7o=";
}) })
./gettext-0.25.patch
]; ];
nativeBuildInputs = [ nativeBuildInputs = [
autoreconfHook autoreconfHook
gettext
pkg-config pkg-config
] ++ lib.optional stdenv.hostPlatform.isDarwin libiconv; ] ++ lib.optional stdenv.hostPlatform.isDarwin libiconv;
# configure.ac:75: error: required file './config.rpath' not found
# https://github.com/dosfstools/dosfstools/blob/master/autogen.sh
postPatch = ''
cp ${gettext}/share/gettext/config.rpath config.rpath
'';
configureFlags = [ "--enable-compat-symlinks" ]; configureFlags = [ "--enable-compat-symlinks" ];
nativeCheckInputs = [ xxd ]; nativeCheckInputs = [ xxd ];

View File

@ -3,6 +3,7 @@
stdenv, stdenv,
buildPackages, buildPackages,
fetchurl, fetchurl,
fetchpatch,
pkg-config, pkg-config,
libuuid, libuuid,
gettext, gettext,
@ -12,6 +13,7 @@
shared ? !stdenv.hostPlatform.isStatic, shared ? !stdenv.hostPlatform.isStatic,
e2fsprogs, e2fsprogs,
runCommand, runCommand,
libarchive,
}: }:
stdenv.mkDerivation rec { stdenv.mkDerivation rec {
@ -23,6 +25,20 @@ stdenv.mkDerivation rec {
hash = "sha256-CCQuZMoOgZTZwcqtSXYrGSCaBjGBmbY850rk7y105jw="; hash = "sha256-CCQuZMoOgZTZwcqtSXYrGSCaBjGBmbY850rk7y105jw=";
}; };
# 2025-05-31: Fix libarchive, from https://github.com/tytso/e2fsprogs/pull/230
patches = [
(fetchpatch {
name = "0001-create_inode_libarchive.c-define-libarchive-dylib-for-darwin.patch";
url = "https://github.com/tytso/e2fsprogs/commit/e86c65bc7ee276cd9ca920d96e18ed0cddab3412.patch";
hash = "sha256-HFZAznaNl5rzgVEvYx1LDKh2jd/VEXD/o0wypIh4TR8=";
})
(fetchpatch {
name = "0002-mkgnutar.pl-avoid-uninitialized-username-variable.patch";
url = "https://github.com/tytso/e2fsprogs/commit/9217c359db1d1b6d031a0e2ca9a885634fed00da.patch";
hash = "sha256-iDXmLq77eJolH1mkXSbvZ9tRVtGQt2F45CdkVphUZSs=";
})
];
# fuse2fs adds 14mb of dependencies # fuse2fs adds 14mb of dependencies
outputs = [ outputs = [
"bin" "bin"
@ -40,27 +56,30 @@ stdenv.mkDerivation rec {
buildInputs = [ buildInputs = [
libuuid libuuid
gettext gettext
libarchive
] ++ lib.optionals withFuse [ fuse3 ]; ] ++ lib.optionals withFuse [ fuse3 ];
configureFlags = configureFlags =
if stdenv.hostPlatform.isLinux then [
[ "--with-libarchive=direct"
# It seems that the e2fsprogs is one of the few packages that cannot be ]
# build with shared and static libs. ++ lib.optionals stdenv.hostPlatform.isLinux [
(if shared then "--enable-elf-shlibs" else "--disable-elf-shlibs") # It seems that the e2fsprogs is one of the few packages that cannot be
"--enable-symlink-install" # build with shared and static libs.
"--enable-relative-symlinks" (if shared then "--enable-elf-shlibs" else "--disable-elf-shlibs")
"--with-crond-dir=no" "--enable-symlink-install"
# fsck, libblkid, libuuid and uuidd are in util-linux-ng (the "libuuid" dependency) "--enable-relative-symlinks"
"--disable-fsck" "--with-crond-dir=no"
"--disable-libblkid" # fsck, libblkid, libuuid and uuidd are in util-linux-ng (the "libuuid" dependency)
"--disable-libuuid" "--disable-fsck"
"--disable-uuidd" "--disable-libblkid"
] "--disable-libuuid"
else "--disable-uuidd"
[ ]
"--enable-libuuid --disable-e2initrd-helper" ++ lib.optionals (!stdenv.hostPlatform.isLinux) [
]; "--enable-libuuid"
"--disable-e2initrd-helper"
];
nativeCheckInputs = [ buildPackages.perl ]; nativeCheckInputs = [ buildPackages.perl ];
doCheck = true; doCheck = true;
@ -88,6 +107,7 @@ stdenv.mkDerivation rec {
[ -e $out/success ] [ -e $out/success ]
''; '';
}; };
meta = { meta = {
homepage = "https://e2fsprogs.sourceforge.net/"; homepage = "https://e2fsprogs.sourceforge.net/";
changelog = "https://e2fsprogs.sourceforge.net/e2fsprogs-release.html#${version}"; changelog = "https://e2fsprogs.sourceforge.net/e2fsprogs-release.html#${version}";
@ -99,6 +119,6 @@ stdenv.mkDerivation rec {
mit # lib/et, lib/ss mit # lib/et, lib/ss
]; ];
platforms = lib.platforms.unix; platforms = lib.platforms.unix;
maintainers = with lib.maintainers; [ ]; maintainers = with lib.maintainers; [ usertam ];
}; };
} }

View File

@ -6,7 +6,7 @@
cmake, cmake,
ninja, ninja,
removeReferencesTo, sanitiseHeaderPathsHook,
glog, glog,
gflags, gflags,
@ -47,7 +47,7 @@ stdenv.mkDerivation (finalAttrs: {
nativeBuildInputs = [ nativeBuildInputs = [
cmake cmake
ninja ninja
removeReferencesTo sanitiseHeaderPathsHook
]; ];
buildInputs = [ buildInputs = [
@ -98,18 +98,6 @@ stdenv.mkDerivation (finalAttrs: {
'find_package(FBThrift CONFIG REQUIRED COMPONENTS cpp2)' 'find_package(FBThrift CONFIG REQUIRED COMPONENTS cpp2)'
''; '';
postFixup = ''
# Sanitize header paths to avoid runtime dependencies leaking in
# through `__FILE__`.
(
shopt -s globstar
for header in "$dev/include"/**/*.h; do
sed -i "1i#line 1 \"$header\"" "$header"
remove-references-to -t "$dev" "$header"
done
)
'';
passthru.updateScript = nix-update-script { }; passthru.updateScript = nix-update-script { };
meta = { meta = {

Some files were not shown because too many files have changed in this diff Show More