staging-next 2025-07-04 (#422427)
This commit is contained in:
commit
b47d4f01d4
@ -24,7 +24,7 @@ insert_final_newline = false
|
||||
# see https://nixos.org/nixpkgs/manual/#chap-conventions
|
||||
|
||||
# Match json/lockfiles/markdown/nix/perl/python/ruby/shell/docbook files, set indent to spaces
|
||||
[*.{bash,json,lock,md,nix,pl,pm,py,rb,sh,xml}]
|
||||
[*.{bash,js,json,lock,md,nix,pl,pm,py,rb,sh,xml}]
|
||||
indent_style = space
|
||||
|
||||
# Match docbook files, set indent width of one
|
||||
@ -32,7 +32,7 @@ indent_style = space
|
||||
indent_size = 1
|
||||
|
||||
# Match json/lockfiles/markdown/nix/ruby files, set indent width of two
|
||||
[*.{json,lock,md,nix,rb}]
|
||||
[*.{js,json,lock,md,nix,rb}]
|
||||
indent_size = 2
|
||||
|
||||
# Match all the Bash code in Nix files, set indent width of two
|
||||
|
@ -1,4 +0,0 @@
|
||||
# TODO: Move to top-level via staging PR
|
||||
[*.js]
|
||||
indent_style = space
|
||||
indent_size = 2
|
@ -91,3 +91,4 @@ Meson setup hook.
|
||||
|
||||
- `prefixKey`
|
||||
- `enableParallelBuilding`
|
||||
- `enableParallelChecking`
|
||||
|
@ -31,7 +31,7 @@ It does so in a clean environment (using `env --ignore-environment`), and it che
|
||||
The variables that this phase control are:
|
||||
|
||||
- `dontVersionCheck`: Disable adding this hook to the [`preInstallCheckHooks`](#ssec-installCheck-phase). Useful if you do want to load the bash functions of the hook, but run them differently.
|
||||
- `versionCheckProgram`: The full path to the program that should print the `${version}` string. Defaults roughly to `${placeholder "out"}/bin/${pname}`. Using `$out` in the value of this variable won't work, as environment variables from this variable are not expanded by the hook. Hence using `placeholder` is unavoidable.
|
||||
- `versionCheckProgram`: The full path to the program that should print the `${version}` string. Defaults to using the first non-empty value `$binary` out of `${NIX_MAIN_PROGRAM}` and `${pname}`, in that order, to build roughly `${placeholder "out"}/bin/$binary`. `${NIX_MAIN_PROGRAM}`'s value comes from `meta.mainProgram`, and does not normally need to be set explicitly. When setting `versionCheckProgram`, using `$out` directly won't work, as environment variables from this variable are not expanded by the hook. Hence using `placeholder "out"` is unavoidable.
|
||||
- `versionCheckProgramArg`: The argument that needs to be passed to `versionCheckProgram`. If undefined the hook tries first `--help` and then `--version`. Examples: `version`, `-V`, `-v`.
|
||||
- `versionCheckKeepEnvironment`: A list of environment variables to keep and pass to the command. Only those variables should be added to this list that are actually required for the version command to work. If it is not feasible to explicitly list all these environment variables you can set this parameter to the special value `"*"` to disable the `--ignore-environment` flag and thus keep all environment variables.
|
||||
- `preVersionCheck`: A hook to run before the check is done.
|
||||
|
@ -20,6 +20,8 @@
|
||||
- `space-orbit` package has been removed due to lack of upstream maintenance. Debian upstream stopped tracking it in 2011.
|
||||
- `command-not-found` package is now disabled by default; it works only for nix-channels based systems, and requires setup for it to work.
|
||||
|
||||
- Derivations setting both `separateDebugInfo` and one of `allowedReferences`, `allowedRequistes`, `disallowedReferences` or `disallowedRequisites` must now set `__structuredAttrs` to `true`. The effect of reference whitelisting or blacklisting will be disabled on the `debug` output created by `separateDebugInfo`.
|
||||
|
||||
- `victoriametrics` no longer contains VictoriaLogs components. These have been separated into the new package `victorialogs`.
|
||||
|
||||
- `gnome-keyring` no longer ships with an SSH agent anymore because it has been deprecated upstream. You should use `gcr_4` instead, which provides the same features. More information on why this was done can be found on [the relevant GCR upstream PR](https://gitlab.gnome.org/GNOME/gcr/-/merge_requests/67).
|
||||
@ -54,6 +56,19 @@
|
||||
- `gramps` has been updated to 6.0.0
|
||||
Upstream recommends [backing up your Family Trees](https://gramps-project.org/wiki/index.php/Gramps_6.0_Wiki_Manual_-_Manage_Family_Trees#Backing_up_a_Family_Tree) before upgrading.
|
||||
|
||||
- `meta.mainProgram`: Changing this `meta` entry can lead to a package rebuild due to being used to determine the `NIX_MAIN_PROGRAM` environment variable.
|
||||
|
||||
- `versionCheckHook`: Packages that previously relied solely on `pname` to locate the program used to version check, but have a differing `meta.mainProgram` entry, might now fail.
|
||||
|
||||
|
||||
- The debug outputs produced by `separateDebugInfo = true;` now contain symlinks mapping build-ids to the original source and ELF file.
|
||||
Specifically, if `$out/bin/ninja` has build-id `483bd7f7229bdb06462222e1e353e4f37e15c293`, then
|
||||
* `$debug/lib/debug/.build-id/48/3bd7f7229bdb06462222e1e353e4f37e15c293.executable` is a symlink to `$out/bin/ninja`
|
||||
* `$debug/lib/debug/.build-id/48/3bd7f7229bdb06462222e1e353e4f37e15c293.source` is a symlink to the value of `$src` during build
|
||||
* `$debug/lib/debug/.build-id/48/3bd7f7229bdb06462222e1e353e4f37e15c293.sourceoverlay` is a symlink to a directory with the same structure as the expanded `$sourceRoot` but containing only a copy of files which were patched during the build
|
||||
* `$debug/lib/debug/.build-id/48/3bd7f7229bdb06462222e1e353e4f37e15c293.debug` is the file containing debug symbols (like before).
|
||||
|
||||
|
||||
## Nixpkgs Library {#sec-nixpkgs-release-25.11-lib}
|
||||
|
||||
<!-- To avoid merge conflicts, consider adding your item at an arbitrary place in the list instead. -->
|
||||
|
@ -1415,7 +1415,7 @@ This setup hook checks for, reports, and (by default) fails builds when "broken"
|
||||
This hook can be disabled by setting `dontCheckForBrokenSymlinks`.
|
||||
|
||||
::: {.note}
|
||||
The hook only considers symlinks with targets inside the Nix store.
|
||||
The hook only considers symlinks with targets inside the Nix store or $TMPDIR directory (typically /nix/store and /build in the builder environment, the later being where build is executed).
|
||||
:::
|
||||
|
||||
::: {.note}
|
||||
|
@ -8868,6 +8868,12 @@
|
||||
github = "fzakaria";
|
||||
githubId = 605070;
|
||||
};
|
||||
fzdslr = {
|
||||
name = "FZDSLR";
|
||||
email = "fzdslr@outlook.com";
|
||||
github = "fzdslr";
|
||||
githubId = 62922415;
|
||||
};
|
||||
gabesoft = {
|
||||
email = "gabesoft@gmail.com";
|
||||
github = "gabesoft";
|
||||
|
@ -1,5 +1,5 @@
|
||||
#! /usr/bin/env nix-shell
|
||||
#! nix-shell -i bash -p coreutils haskellPackages.cabal2nix-unstable git nix nixfmt-rfc-style -I nixpkgs=.
|
||||
#! nix-shell -i bash -p coreutils haskellPackages.cabal2nix-unstable git nixfmt-rfc-style -I nixpkgs=.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
#! /usr/bin/env nix-shell
|
||||
#! nix-shell -i bash -p coreutils jq nix -I nixpkgs=.
|
||||
#! nix-shell -i bash -p coreutils jq -I nixpkgs=.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
#! /usr/bin/env nix-shell
|
||||
#! nix-shell -i bash -p nix curl jq git gnused -I nixpkgs=.
|
||||
#! nix-shell -i bash -p curl jq git gnused -I nixpkgs=.
|
||||
|
||||
# See regenerate-hackage-packages.sh for details on the purpose of this script.
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
#! /usr/bin/env nix-shell
|
||||
#! nix-shell -i bash -p nix curl jq git gnused gnugrep -I nixpkgs=.
|
||||
#! nix-shell -i bash -p curl jq git gnused gnugrep -I nixpkgs=.
|
||||
# shellcheck shell=bash
|
||||
|
||||
set -eu -o pipefail
|
||||
|
@ -1,5 +1,5 @@
|
||||
#! /usr/bin/env nix-shell
|
||||
#! nix-shell -i bash -p nix curl gnused -I nixpkgs=.
|
||||
#! nix-shell -i bash -p curl gnused -I nixpkgs=.
|
||||
|
||||
# On Hackage every package description shows a category "Distributions" which
|
||||
# lists a "NixOS" version.
|
||||
|
@ -68,6 +68,8 @@
|
||||
|
||||
- The Perl implementation of the `switch-to-configuration` program is removed. All switchable systems now use the Rust rewrite. Any prior usage of `system.switch.enableNg` must now be removed. If you have any outstanding issues with the new implementation, please open an issue on GitHub.
|
||||
|
||||
- The `no-broken-symlink` build hook now also fails builds whose output derivation contains links to $TMPDIR (typically /build, which contains the build directory).
|
||||
|
||||
- The `services.polipo` module has been removed as `polipo` is unmaintained and archived upstream.
|
||||
|
||||
- The Pocket ID module ([`services.pocket-id`][#opt-services.pocket-id.enable]) and package (`pocket-id`) has been updated to 1.0.0. Some environment variables have been changed or removed, see the [migration guide](https://pocket-id.org/docs/setup/migrate-to-v1/).
|
||||
|
@ -104,7 +104,7 @@ in
|
||||
path = with pkgs; [
|
||||
procps
|
||||
openssh
|
||||
nettools
|
||||
net-tools
|
||||
];
|
||||
description = "spark master service.";
|
||||
after = [ "network.target" ];
|
||||
@ -133,7 +133,7 @@ in
|
||||
path = with pkgs; [
|
||||
procps
|
||||
openssh
|
||||
nettools
|
||||
net-tools
|
||||
rsync
|
||||
];
|
||||
description = "spark master service.";
|
||||
|
@ -424,7 +424,7 @@ in
|
||||
];
|
||||
path = [
|
||||
hydra-package
|
||||
pkgs.nettools
|
||||
pkgs.net-tools
|
||||
pkgs.openssh
|
||||
pkgs.bzip2
|
||||
config.nix.package
|
||||
@ -459,7 +459,7 @@ in
|
||||
];
|
||||
path = with pkgs; [
|
||||
hydra-package
|
||||
nettools
|
||||
net-tools
|
||||
jq
|
||||
];
|
||||
restartTriggers = [ hydraConf ];
|
||||
|
@ -43,7 +43,7 @@ in
|
||||
rsync
|
||||
kmod
|
||||
gawk
|
||||
nettools
|
||||
net-tools
|
||||
util-linux
|
||||
profile-sync-daemon
|
||||
];
|
||||
@ -69,7 +69,7 @@ in
|
||||
rsync
|
||||
kmod
|
||||
gawk
|
||||
nettools
|
||||
net-tools
|
||||
util-linux
|
||||
profile-sync-daemon
|
||||
];
|
||||
|
@ -45,7 +45,7 @@ let
|
||||
defaultsFile = pkgs.writeText "60-defaults.conf" ''
|
||||
# 01-system-paths.conf
|
||||
${dirStanzas home}
|
||||
ZM_PATH_ARP=${lib.getBin pkgs.nettools}/bin/arp
|
||||
ZM_PATH_ARP=${lib.getBin pkgs.net-tools}/bin/arp
|
||||
ZM_PATH_LOGS=/var/log/${dirName}
|
||||
ZM_PATH_MAP=/dev/shm
|
||||
ZM_PATH_SOCKS=/run/${dirName}
|
||||
|
@ -59,9 +59,9 @@ in
|
||||
|
||||
extraPackages = mkOption {
|
||||
type = types.listOf types.package;
|
||||
default = with pkgs; [ nettools ];
|
||||
defaultText = literalExpression "with pkgs; [ nettools ]";
|
||||
example = literalExpression "with pkgs; [ nettools mysql ]";
|
||||
default = with pkgs; [ net-tools ];
|
||||
defaultText = literalExpression "with pkgs; [ net-tools ]";
|
||||
example = literalExpression "with pkgs; [ net-tools mysql ]";
|
||||
description = ''
|
||||
Packages to be added to the Zabbix {env}`PATH`.
|
||||
Typically used to add executables for scripts, but can be anything.
|
||||
|
@ -91,11 +91,11 @@ in
|
||||
extraPackages = mkOption {
|
||||
type = types.listOf types.package;
|
||||
default = with pkgs; [
|
||||
nettools
|
||||
net-tools
|
||||
nmap
|
||||
traceroute
|
||||
];
|
||||
defaultText = literalExpression "[ nettools nmap traceroute ]";
|
||||
defaultText = literalExpression "[ net-tools nmap traceroute ]";
|
||||
description = ''
|
||||
Packages to be added to the Zabbix {env}`PATH`.
|
||||
Typically used to add executables for scripts, but can be anything.
|
||||
|
@ -88,11 +88,11 @@ in
|
||||
extraPackages = mkOption {
|
||||
type = types.listOf types.package;
|
||||
default = with pkgs; [
|
||||
nettools
|
||||
net-tools
|
||||
nmap
|
||||
traceroute
|
||||
];
|
||||
defaultText = literalExpression "[ nettools nmap traceroute ]";
|
||||
defaultText = literalExpression "[ net-tools nmap traceroute ]";
|
||||
description = ''
|
||||
Packages to be added to the Zabbix {env}`PATH`.
|
||||
Typically used to add executables for scripts, but can be anything.
|
||||
|
@ -165,7 +165,7 @@ in
|
||||
procps
|
||||
nssTools
|
||||
iptables
|
||||
nettools
|
||||
net-tools
|
||||
];
|
||||
preStart = lib.optionalString cfg.disableRedirects ''
|
||||
# Disable send/receive redirects
|
||||
|
@ -74,7 +74,7 @@ let
|
||||
path = [
|
||||
pkgs.iptables
|
||||
pkgs.iproute2
|
||||
pkgs.nettools
|
||||
pkgs.net-tools
|
||||
];
|
||||
|
||||
serviceConfig.ExecStart = "@${openvpn}/sbin/openvpn openvpn --suppress-timestamps --config ${configFile}";
|
||||
|
@ -80,8 +80,8 @@
|
||||
builtins.storeDir
|
||||
"/etc/ssl"
|
||||
"/etc/static/ssl"
|
||||
"${pkgs.nettools}/bin/route:/usr/bin/route"
|
||||
"${pkgs.nettools}/bin/ifconfig:/usr/bin/ifconfig"
|
||||
"${pkgs.net-tools}/bin/route:/usr/bin/route"
|
||||
"${pkgs.net-tools}/bin/ifconfig:/usr/bin/ifconfig"
|
||||
];
|
||||
|
||||
BindPaths = [
|
||||
|
@ -11,7 +11,7 @@ let
|
||||
[
|
||||
cloud-init
|
||||
iproute2
|
||||
nettools
|
||||
net-tools
|
||||
openssh
|
||||
shadow
|
||||
util-linux
|
||||
|
@ -36,7 +36,7 @@ with lib;
|
||||
pkgs.which
|
||||
pkgs.openssl
|
||||
pkgs.xorg.xauth
|
||||
pkgs.nettools
|
||||
pkgs.net-tools
|
||||
pkgs.shadow
|
||||
pkgs.procps
|
||||
pkgs.util-linux
|
||||
|
@ -144,7 +144,7 @@ with lib;
|
||||
systemd.services.digitalocean-set-hostname = mkIf (hostName == "") {
|
||||
path = [
|
||||
pkgs.curl
|
||||
pkgs.nettools
|
||||
pkgs.net-tools
|
||||
];
|
||||
description = "Set hostname provided by Digitalocean";
|
||||
wantedBy = [ "network.target" ];
|
||||
|
@ -23,7 +23,7 @@ in
|
||||
environment.systemPackages = with pkgs; [
|
||||
findutils
|
||||
iputils
|
||||
nettools
|
||||
net-tools
|
||||
netcat
|
||||
nfs-utils
|
||||
rsync
|
||||
|
@ -351,7 +351,7 @@ in
|
||||
parted
|
||||
|
||||
# for hostname
|
||||
nettools
|
||||
net-tools
|
||||
# for pidof
|
||||
procps
|
||||
# for useradd, usermod
|
||||
|
@ -29,7 +29,7 @@ python3Packages.buildPythonApplication rec {
|
||||
pytest-mock
|
||||
];
|
||||
|
||||
pytestFlagsArray = [ "tests/" ];
|
||||
enabledTestPaths = [ "tests/" ];
|
||||
|
||||
meta = {
|
||||
description = "Mopidy extension for playing music from Tidal";
|
||||
|
@ -21,11 +21,11 @@ assert withConplay -> !libOnly;
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
pname = "${lib.optionalString libOnly "lib"}mpg123";
|
||||
version = "1.32.10";
|
||||
version = "1.33.0";
|
||||
|
||||
src = fetchurl {
|
||||
url = "mirror://sourceforge/mpg123/mpg123-${version}.tar.bz2";
|
||||
hash = "sha256-h7LBf+DJedPvOO7O/2Nis1sorIWJ+/GFS1vnXJq2VXw=";
|
||||
hash = "sha256-IpDjrt5vTRY+GhdFIWWvM8qtS18JSPmUKc+i2Dhfqp0=";
|
||||
};
|
||||
|
||||
outputs = [
|
||||
|
@ -1,6 +1,6 @@
|
||||
{ lib, fetchFromGitHub }:
|
||||
rec {
|
||||
version = "9.1.1401";
|
||||
version = "9.1.1475";
|
||||
|
||||
outputs = [
|
||||
"out"
|
||||
@ -11,7 +11,7 @@ rec {
|
||||
owner = "vim";
|
||||
repo = "vim";
|
||||
rev = "v${version}";
|
||||
hash = "sha256-oYde6i5coECUzQQEMo0dvkOXFimKe4y2aGoV2nVLx58=";
|
||||
hash = "sha256-KKUzS0dS9K/jlfP+igyLX1Fwjb7Y5ZAzGLjqHvkA3bs=";
|
||||
};
|
||||
|
||||
enableParallelBuilding = true;
|
||||
|
@ -58,7 +58,7 @@ python3.pkgs.buildPythonApplication rec {
|
||||
umockdev
|
||||
];
|
||||
|
||||
pytestFlagsArray = [
|
||||
enabledTestPaths = [
|
||||
"test"
|
||||
];
|
||||
|
||||
|
@ -134,7 +134,7 @@ python3.pkgs.buildPythonApplication rec {
|
||||
pycryptodomex
|
||||
];
|
||||
|
||||
pytestFlagsArray = [ "tests" ];
|
||||
enabledTestPaths = [ "tests" ];
|
||||
|
||||
postCheck = ''
|
||||
$out/bin/electrum help >/dev/null
|
||||
|
@ -159,7 +159,7 @@ python3.pkgs.buildPythonApplication {
|
||||
];
|
||||
buildInputs = lib.optional stdenv.hostPlatform.isLinux qtwayland;
|
||||
|
||||
pytestFlagsArray = [ "electrum_ltc/tests" ];
|
||||
enabledTestPaths = [ "electrum_ltc/tests" ];
|
||||
|
||||
disabledTests = [
|
||||
"test_loop" # test tries to bind 127.0.0.1 causing permission error
|
||||
|
@ -23,7 +23,7 @@ buildPythonApplication rec {
|
||||
build-system = [ setuptools ];
|
||||
|
||||
nativeCheckInputs = [ pytestCheckHook ];
|
||||
pytestFlagsArray = [ "test/test.py" ];
|
||||
enabledTestPaths = [ "test/test.py" ];
|
||||
|
||||
meta = {
|
||||
description = "Importer and exporter for MBTiles";
|
||||
|
@ -10,7 +10,7 @@
|
||||
iptables,
|
||||
iputils,
|
||||
kmod,
|
||||
nettools,
|
||||
net-tools,
|
||||
procps,
|
||||
tcpdump,
|
||||
traceroute,
|
||||
@ -59,7 +59,7 @@ stdenv.mkDerivation rec {
|
||||
iptables
|
||||
iputils
|
||||
kmod
|
||||
nettools
|
||||
net-tools
|
||||
procps
|
||||
tcpdump
|
||||
traceroute
|
||||
|
@ -32,7 +32,7 @@
|
||||
gst-plugins-good,
|
||||
gst-plugins-bad,
|
||||
gst-vaapi,
|
||||
webrtc-audio-processing_1,
|
||||
webrtc-audio-processing,
|
||||
}:
|
||||
|
||||
stdenv.mkDerivation (finalAttrs: {
|
||||
@ -84,7 +84,7 @@ stdenv.mkDerivation (finalAttrs: {
|
||||
gst-plugins-good # contains rtpbin, required for VP9
|
||||
gst-plugins-bad # required for H264, MSDK
|
||||
gst-vaapi # required for VAAPI
|
||||
webrtc-audio-processing_1
|
||||
webrtc-audio-processing
|
||||
];
|
||||
|
||||
doCheck = true;
|
||||
|
@ -30,7 +30,7 @@
|
||||
secp256k1,
|
||||
speex,
|
||||
udev,
|
||||
webrtc-audio-processing,
|
||||
webrtc-audio-processing_0_3,
|
||||
yaml-cpp,
|
||||
zlib,
|
||||
|
||||
@ -220,7 +220,7 @@ stdenv.mkDerivation rec {
|
||||
secp256k1
|
||||
speex
|
||||
udev
|
||||
webrtc-audio-processing
|
||||
webrtc-audio-processing_0_3
|
||||
yaml-cpp
|
||||
zlib
|
||||
];
|
||||
|
@ -1,6 +1,8 @@
|
||||
diff --git a/desktop/qa/desktop_lib/test_desktop_lib.cxx b/desktop/qa/desktop_lib/test_desktop_lib.cxx
|
||||
index 231aea8d0f3c..214a23c82562 100644
|
||||
--- a/desktop/qa/desktop_lib/test_desktop_lib.cxx
|
||||
+++ b/desktop/qa/desktop_lib/test_desktop_lib.cxx
|
||||
@@ -595,6 +595,8 @@ void DesktopLOKTest::testGetFilterTypes()
|
||||
@@ -603,6 +603,8 @@ void DesktopLOKTest::testGetFilterTypes()
|
||||
|
||||
void DesktopLOKTest::testSearchCalc()
|
||||
{
|
||||
@ -9,7 +11,7 @@
|
||||
LibLibreOffice_Impl aOffice;
|
||||
LibLODocument_Impl* pDocument = loadDoc("search.ods");
|
||||
pDocument->pClass->initializeForRendering(pDocument, nullptr);
|
||||
@@ -625,6 +627,8 @@ void DesktopLOKTest::testSearchCalc()
|
||||
@@ -633,6 +635,8 @@ void DesktopLOKTest::testSearchCalc()
|
||||
|
||||
void DesktopLOKTest::testSearchAllNotificationsCalc()
|
||||
{
|
||||
@ -18,6 +20,8 @@
|
||||
LibLibreOffice_Impl aOffice;
|
||||
LibLODocument_Impl* pDocument = loadDoc("search.ods");
|
||||
pDocument->pClass->initializeForRendering(pDocument, nullptr);
|
||||
diff --git a/svgio/qa/cppunit/data/tdf160386.svg b/svgio/qa/cppunit/data/tdf160386.svg
|
||||
index 1644b0d15514..cf429508bcd4 100644
|
||||
--- a/svgio/qa/cppunit/data/tdf160386.svg
|
||||
+++ b/svgio/qa/cppunit/data/tdf160386.svg
|
||||
@@ -8,7 +8,6 @@
|
||||
@ -28,9 +32,11 @@
|
||||
<text systemLanguage="ru">Привет!</text>
|
||||
<text>☺</text>
|
||||
</switch>
|
||||
diff --git a/sw/qa/core/accessibilitycheck/AccessibilityCheckTest.cxx b/sw/qa/core/accessibilitycheck/AccessibilityCheckTest.cxx
|
||||
index d8093e57a4e8..c95a742a68d3 100644
|
||||
--- a/sw/qa/core/accessibilitycheck/AccessibilityCheckTest.cxx
|
||||
+++ b/sw/qa/core/accessibilitycheck/AccessibilityCheckTest.cxx
|
||||
@@ -361,6 +361,8 @@ void checkIssuePosition(std::shared_ptr<sfx::AccessibilityIssue> const& pIssue,
|
||||
@@ -422,6 +422,8 @@ void checkIssuePosition(std::shared_ptr<sfx::AccessibilityIssue> const& pIssue,
|
||||
|
||||
CPPUNIT_TEST_FIXTURE(AccessibilityCheckTest, testOnlineNodeSplitAppend)
|
||||
{
|
||||
@ -39,9 +45,11 @@
|
||||
// Checks the a11y checker is setting the a11y issues to the nodes
|
||||
// correctly when splitting and appending nodes (through undo), which
|
||||
// happen on editing all the time.
|
||||
diff --git a/sw/qa/core/text/text.cxx b/sw/qa/core/text/text.cxx
|
||||
index b81146642bd4..2094b7ea9477 100644
|
||||
--- a/sw/qa/core/text/text.cxx
|
||||
+++ b/sw/qa/core/text/text.cxx
|
||||
@@ -1630,6 +1630,8 @@ CPPUNIT_TEST_FIXTURE(SwCoreTextTest, testParaUpperMarginFlyIntersect)
|
||||
@@ -1596,6 +1596,8 @@ CPPUNIT_TEST_FIXTURE(SwCoreTextTest, testParaUpperMarginFlyIntersect)
|
||||
|
||||
CPPUNIT_TEST_FIXTURE(SwCoreTextTest, testTdf129810)
|
||||
{
|
||||
@ -50,9 +58,23 @@
|
||||
// Load the document.
|
||||
// The document embeds a subset of "Source Han Serif SC" so that it works
|
||||
// even when the font is not installed.
|
||||
diff --git a/sw/qa/extras/docbookexport/docbookexport.cxx b/sw/qa/extras/docbookexport/docbookexport.cxx
|
||||
index e7543d99577e..f44f92a47c7e 100644
|
||||
--- a/sw/qa/extras/docbookexport/docbookexport.cxx
|
||||
+++ b/sw/qa/extras/docbookexport/docbookexport.cxx
|
||||
@@ -21,6 +21,7 @@ public:
|
||||
|
||||
CPPUNIT_TEST_FIXTURE(DocbookExportTest, testsimple)
|
||||
{
|
||||
+ return; // fails on latest libxml
|
||||
createSwDoc("simple.docx");
|
||||
save(mpFilter);
|
||||
xmlDocUniquePtr pDoc = parseXml(maTempFile);
|
||||
diff --git a/sw/qa/extras/htmlimport/htmlimport.cxx b/sw/qa/extras/htmlimport/htmlimport.cxx
|
||||
index 6cf8f22647b9..12848713771b 100644
|
||||
--- a/sw/qa/extras/htmlimport/htmlimport.cxx
|
||||
+++ b/sw/qa/extras/htmlimport/htmlimport.cxx
|
||||
@@ -308,6 +308,8 @@ CPPUNIT_TEST_FIXTURE(HtmlImportTest, testTableBorder1px)
|
||||
@@ -297,6 +297,8 @@ CPPUNIT_TEST_FIXTURE(HtmlImportTest, testTableBorder1px)
|
||||
|
||||
CPPUNIT_TEST_FIXTURE(HtmlImportTest, testOutlineLevel)
|
||||
{
|
||||
@ -61,9 +83,11 @@
|
||||
createSwWebDoc("outline-level.html");
|
||||
// This was 0, HTML imported into Writer lost the outline numbering for
|
||||
// Heading 1 styles.
|
||||
diff --git a/sw/qa/extras/layout/layout3.cxx b/sw/qa/extras/layout/layout3.cxx
|
||||
index e53d22c8fd1a..2f9decb0d931 100644
|
||||
--- a/sw/qa/extras/layout/layout3.cxx
|
||||
+++ b/sw/qa/extras/layout/layout3.cxx
|
||||
@@ -1038,6 +1038,8 @@ CPPUNIT_TEST_FIXTURE(SwLayoutWriter3, testTdf158658c)
|
||||
@@ -1388,6 +1388,8 @@ CPPUNIT_TEST_FIXTURE(SwLayoutWriter3, testTdf158658c)
|
||||
|
||||
CPPUNIT_TEST_FIXTURE(SwLayoutWriter3, testTdf155177)
|
||||
{
|
||||
@ -72,6 +96,8 @@
|
||||
createSwDoc("tdf155177-1-min.odt");
|
||||
|
||||
uno::Reference<beans::XPropertySet> xStyle(
|
||||
diff --git a/sw/qa/extras/odfimport/odfimport.cxx b/sw/qa/extras/odfimport/odfimport.cxx
|
||||
index 794b3bd16ed4..3feaadd7a20f 100644
|
||||
--- a/sw/qa/extras/odfimport/odfimport.cxx
|
||||
+++ b/sw/qa/extras/odfimport/odfimport.cxx
|
||||
@@ -602,6 +602,8 @@ CPPUNIT_TEST_FIXTURE(Test, testFdo56272)
|
||||
@ -83,18 +109,20 @@
|
||||
createSwDoc("incorrectsum.odt");
|
||||
Scheduler::ProcessEventsToIdle();
|
||||
uno::Reference<text::XTextTablesSupplier> xTablesSupplier(mxComponent, uno::UNO_QUERY);
|
||||
diff --git a/sw/qa/extras/tiledrendering/tiledrendering.cxx b/sw/qa/extras/tiledrendering/tiledrendering.cxx
|
||||
index 4ebc4be96149..85b8908e16b9 100644
|
||||
--- a/sw/qa/extras/tiledrendering/tiledrendering.cxx
|
||||
+++ b/sw/qa/extras/tiledrendering/tiledrendering.cxx
|
||||
@@ -2762,6 +2762,8 @@ CPPUNIT_TEST_FIXTURE(SwTiledRenderingTest, testDeleteNodeRedlineCallback)
|
||||
@@ -2538,6 +2538,8 @@ CPPUNIT_TEST_FIXTURE(SwTiledRenderingTest, testVisCursorInvalidation)
|
||||
|
||||
CPPUNIT_TEST_FIXTURE(SwTiledRenderingTest, testVisCursorInvalidation)
|
||||
CPPUNIT_TEST_FIXTURE(SwTiledRenderingTest, testDeselectCustomShape)
|
||||
{
|
||||
+ return; // flaky on some backends?
|
||||
+
|
||||
SwXTextDocument* pXTextDocument = createDoc("dummy.fodt");
|
||||
ViewCallback aView1;
|
||||
int nView1 = SfxLokHelper::getView();
|
||||
@@ -3048,6 +3050,8 @@ CPPUNIT_TEST_FIXTURE(SwTiledRenderingTest, testDoubleUnderlineAndStrikeOut)
|
||||
SwWrtShell* pWrtShell = getSwDocShell()->GetWrtShell();
|
||||
SwShellCursor* pShellCursor = pWrtShell->getShellCursor(false);
|
||||
@@ -2745,6 +2747,8 @@ CPPUNIT_TEST_FIXTURE(SwTiledRenderingTest, testDoubleUnderlineAndStrikeOut)
|
||||
|
||||
CPPUNIT_TEST_FIXTURE(SwTiledRenderingTest, testTdf43244_SpacesOnMargin)
|
||||
{
|
||||
@ -103,7 +131,7 @@
|
||||
// Load a document where the top left tile contains
|
||||
// paragraph and line break symbols with redlining.
|
||||
SwXTextDocument* pXTextDocument = createDoc("tdf43244_SpacesOnMargin.odt");
|
||||
@@ -4091,6 +4095,7 @@ CPPUNIT_TEST_FIXTURE(SwTiledRenderingTest, testRedlineTooltip)
|
||||
@@ -3786,6 +3790,7 @@ CPPUNIT_TEST_FIXTURE(SwTiledRenderingTest, testRedlineTooltip)
|
||||
// toggling Formatting Marks on/off for one view should have no effect on other views
|
||||
CPPUNIT_TEST_FIXTURE(SwTiledRenderingTest, testToggleFormattingMarks)
|
||||
{
|
||||
@ -111,13 +139,15 @@
|
||||
SwXTextDocument* pXTextDocument = createDoc();
|
||||
int nView1 = SfxLokHelper::getView();
|
||||
|
||||
diff --git a/sw/qa/extras/uiwriter/uiwriter5.cxx b/sw/qa/extras/uiwriter/uiwriter5.cxx
|
||||
index e37df27fd817..937c12e8c4c5 100644
|
||||
--- a/sw/qa/extras/uiwriter/uiwriter5.cxx
|
||||
+++ b/sw/qa/extras/uiwriter/uiwriter5.cxx
|
||||
@@ -1571,6 +1571,7 @@ CPPUNIT_TEST_FIXTURE(SwUiWriterTest5, testDateFormFieldContentOperations)
|
||||
@@ -1549,6 +1549,7 @@ CPPUNIT_TEST_FIXTURE(SwUiWriterTest5, testDateFormFieldContentOperations)
|
||||
|
||||
CPPUNIT_TEST_FIXTURE(SwUiWriterTest5, testDateFormFieldCurrentDateHandling)
|
||||
{
|
||||
+ return; // flaky on KF6
|
||||
createSwDoc();
|
||||
SwDoc* pDoc = getSwDoc();
|
||||
CPPUNIT_ASSERT(pDoc);
|
||||
IDocumentMarkAccess* pMarkAccess = pDoc->getIDocumentMarkAccess();
|
||||
|
@ -59,7 +59,7 @@ python3.pkgs.buildPythonApplication rec {
|
||||
unittest-xml-reporting
|
||||
];
|
||||
|
||||
pytestFlagsArray = [
|
||||
enabledTestPaths = [
|
||||
"test"
|
||||
];
|
||||
|
||||
|
@ -60,7 +60,7 @@ assert sendEmailSupport -> perlSupport;
|
||||
assert svnSupport -> perlSupport;
|
||||
|
||||
let
|
||||
version = "2.49.0";
|
||||
version = "2.50.0";
|
||||
svn = subversionClient.override { perlBindings = perlSupport; };
|
||||
gitwebPerlLibs = with perlPackages; [
|
||||
CGI
|
||||
@ -89,11 +89,12 @@ stdenv.mkDerivation (finalAttrs: {
|
||||
}.tar.xz"
|
||||
else
|
||||
"https://www.kernel.org/pub/software/scm/git/git-${version}.tar.xz";
|
||||
hash = "sha256-YYGQz1kLfp9sEfkfI7HSZ82Yw6szuFBBbYdY+LWoVig=";
|
||||
hash = "sha256-3/PAAOQArOOmO4pvizt2uI7P3/1FBKBKukJINyzewEU=";
|
||||
};
|
||||
|
||||
outputs = [ "out" ] ++ lib.optional withManual "doc";
|
||||
separateDebugInfo = true;
|
||||
__structuredAttrs = true;
|
||||
|
||||
hardeningDisable = [ "format" ];
|
||||
|
||||
@ -116,7 +117,8 @@ stdenv.mkDerivation (finalAttrs: {
|
||||
# Fix references to gettext introduced by ./git-sh-i18n.patch
|
||||
substituteInPlace git-sh-i18n.sh \
|
||||
--subst-var-by gettext ${gettext}
|
||||
|
||||
''
|
||||
+ lib.optionalString doInstallCheck ''
|
||||
# ensure we are using the correct shell when executing the test scripts
|
||||
patchShebangs t/*.sh
|
||||
''
|
||||
@ -165,7 +167,7 @@ stdenv.mkDerivation (finalAttrs: {
|
||||
];
|
||||
|
||||
# required to support pthread_cancel()
|
||||
NIX_LDFLAGS =
|
||||
env.NIX_LDFLAGS =
|
||||
lib.optionalString (stdenv.cc.isGNU && stdenv.hostPlatform.libc == "glibc") "-lgcc_s"
|
||||
+ lib.optionalString (stdenv.hostPlatform.isFreeBSD) "-lthr";
|
||||
|
||||
@ -282,10 +284,6 @@ stdenv.mkDerivation (finalAttrs: {
|
||||
|
||||
postInstall =
|
||||
''
|
||||
notSupported() {
|
||||
unlink $1 || true
|
||||
}
|
||||
|
||||
# Set up the flags array for make in the same way as for the main install
|
||||
# phase from stdenv.
|
||||
local flagsArray=(
|
||||
@ -303,7 +301,6 @@ stdenv.mkDerivation (finalAttrs: {
|
||||
mkdir -p $out/share/git
|
||||
cp -a contrib $out/share/git/
|
||||
mkdir -p $out/share/bash-completion/completions
|
||||
ln -s $out/share/git/contrib/completion/git-completion.bash $out/share/bash-completion/completions/git
|
||||
ln -s $out/share/git/contrib/completion/git-prompt.sh $out/share/bash-completion/completions/
|
||||
# only readme, developed in another repo
|
||||
rm -r contrib/hooks/multimail
|
||||
@ -382,8 +379,7 @@ stdenv.mkDerivation (finalAttrs: {
|
||||
''
|
||||
else
|
||||
''
|
||||
# replace git-svn by notification script
|
||||
notSupported $out/libexec/git-core/git-svn
|
||||
rm $out/libexec/git-core/git-svn
|
||||
''
|
||||
)
|
||||
|
||||
@ -396,14 +392,13 @@ stdenv.mkDerivation (finalAttrs: {
|
||||
''
|
||||
else
|
||||
''
|
||||
# replace git-send-email by notification script
|
||||
notSupported $out/libexec/git-core/git-send-email
|
||||
rm $out/libexec/git-core/git-send-email
|
||||
''
|
||||
)
|
||||
|
||||
+ lib.optionalString withManual ''
|
||||
# Install man pages
|
||||
make "''${flagsArray[@]}" cmd-list.made install install-html \
|
||||
make "''${flagsArray[@]}" install install-html \
|
||||
-C Documentation
|
||||
''
|
||||
|
||||
@ -420,9 +415,8 @@ stdenv.mkDerivation (finalAttrs: {
|
||||
''
|
||||
else
|
||||
''
|
||||
# Don't wrap Tcl/Tk, replace them by notification scripts
|
||||
for prog in bin/gitk libexec/git-core/git-gui; do
|
||||
notSupported "$out/$prog"
|
||||
rm "$out/$prog"
|
||||
done
|
||||
''
|
||||
)
|
||||
@ -573,6 +567,8 @@ stdenv.mkDerivation (finalAttrs: {
|
||||
wmertens
|
||||
globin
|
||||
kashw2
|
||||
me-and
|
||||
philiptaron
|
||||
];
|
||||
mainProgram = "git";
|
||||
};
|
||||
|
@ -144,11 +144,11 @@ stdenv.mkDerivation (finalAttrs: {
|
||||
+ lib.optionalString nixosTestRunner "-for-vm-tests"
|
||||
+ lib.optionalString toolsOnly "-utils"
|
||||
+ lib.optionalString userOnly "-user";
|
||||
version = "10.0.0";
|
||||
version = "10.0.2";
|
||||
|
||||
src = fetchurl {
|
||||
url = "https://download.qemu.org/qemu-${finalAttrs.version}.tar.xz";
|
||||
hash = "sha256-IsB1YB/c+MeyZxqDnr3O8dTylz62c1JU/S4b0PMLOJY=";
|
||||
hash = "sha256-73hvI5jLUYRgD2mu9NXWke/URXajz/QSbTjUxv7Id1k=";
|
||||
};
|
||||
|
||||
depsBuildBuild =
|
||||
|
@ -35,7 +35,7 @@
|
||||
alsa-lib,
|
||||
curl,
|
||||
libvpx,
|
||||
nettools,
|
||||
net-tools,
|
||||
dbus,
|
||||
replaceVars,
|
||||
gsoap,
|
||||
@ -263,7 +263,7 @@ stdenv.mkDerivation (finalAttrs: {
|
||||
];
|
||||
|
||||
postPatch = ''
|
||||
sed -i -e 's|/sbin/ifconfig|${nettools}/bin/ifconfig|' \
|
||||
sed -i -e 's|/sbin/ifconfig|${net-tools}/bin/ifconfig|' \
|
||||
src/VBox/HostDrivers/adpctl/VBoxNetAdpCtl.cpp
|
||||
'';
|
||||
|
||||
|
@ -20,7 +20,7 @@
|
||||
gobject-introspection,
|
||||
which,
|
||||
dbus,
|
||||
nettools,
|
||||
net-tools,
|
||||
git,
|
||||
doxygen,
|
||||
xmlto,
|
||||
@ -105,7 +105,7 @@ stdenv.mkDerivation rec {
|
||||
libstartup_notification
|
||||
libxdg_basedir
|
||||
lua
|
||||
nettools
|
||||
net-tools
|
||||
pango
|
||||
xcb-util-cursor
|
||||
xorg.libXau
|
||||
|
@ -9,8 +9,8 @@ use JSON::PP;
|
||||
|
||||
STDOUT->autoflush(1);
|
||||
|
||||
$SIG{__WARN__} = sub { warn "warning: ", @_ };
|
||||
$SIG{__DIE__} = sub { die "error: ", @_ };
|
||||
$SIG{__WARN__} = sub { warn "pkgs.buildEnv warning: ", @_ };
|
||||
$SIG{__DIE__} = sub { die "pkgs.buildEnv error: ", @_ };
|
||||
|
||||
my $out = $ENV{"out"};
|
||||
my $extraPrefix = $ENV{"extraPrefix"};
|
||||
@ -109,7 +109,7 @@ sub findFiles($relName, $target, $baseName, $ignoreCollisions, $checkCollisionCo
|
||||
# The store path must not be a file when not ignoreSingleFileOutputs
|
||||
if (-f $target && isStorePath $target) {
|
||||
if ($ignoreSingleFileOutputs) {
|
||||
warn "The store path $target is a file and can't be merged into an environment using pkgs.buildEnv";
|
||||
warn "The store path $target is a file and can't be merged into an environment using pkgs.buildEnv, ignoring it";
|
||||
return;
|
||||
} else {
|
||||
die "The store path $target is a file and can't be merged into an environment using pkgs.buildEnv!";
|
||||
@ -173,12 +173,12 @@ sub findFiles($relName, $target, $baseName, $ignoreCollisions, $checkCollisionCo
|
||||
my $oldTargetRef = prependDangling($oldTarget);
|
||||
|
||||
if ($ignoreCollisions) {
|
||||
warn "collision between $targetRef and $oldTargetRef\n" if $ignoreCollisions == 1;
|
||||
warn "colliding subpath (ignored): $targetRef and $oldTargetRef\n" if $ignoreCollisions == 1;
|
||||
return;
|
||||
} elsif ($checkCollisionContents && checkCollision($oldTarget, $target)) {
|
||||
return;
|
||||
} else {
|
||||
die "collision between $targetRef and $oldTargetRef\n";
|
||||
die "two given paths contain a conflicting subpath:\n $targetRef and\n $oldTargetRef\nhint: this may be caused by two different versions of the same package in buildEnv's `paths` parameter\nhint: `pkgs.nix-diff` can be used to compare derivations\n";
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -18,21 +18,6 @@ if [[ -n "${hardeningEnableMap[fortify3]-}" ]]; then
|
||||
hardeningEnableMap["fortify"]=1
|
||||
fi
|
||||
|
||||
# Remove unsupported flags.
|
||||
for flag in @hardening_unsupported_flags@; do
|
||||
unset -v "hardeningEnableMap[$flag]"
|
||||
# fortify being unsupported implies fortify3 is unsupported
|
||||
if [[ "$flag" = 'fortify' ]] ; then
|
||||
unset -v "hardeningEnableMap['fortify3']"
|
||||
fi
|
||||
done
|
||||
|
||||
# now make fortify and fortify3 mutually exclusive
|
||||
if [[ -n "${hardeningEnableMap[fortify3]-}" ]]; then
|
||||
unset -v "hardeningEnableMap['fortify']"
|
||||
fi
|
||||
|
||||
|
||||
# strictflexarrays3 implies strictflexarrays1 enablement - make explicit before
|
||||
# we filter unsupported flags because unsupporting strictflexarrays3
|
||||
# doesn't mean we should unsupport strictflexarrays1 too
|
||||
@ -40,15 +25,26 @@ if [[ -n "${hardeningEnableMap[strictflexarrays3]-}" ]]; then
|
||||
hardeningEnableMap["strictflexarrays1"]=1
|
||||
fi
|
||||
|
||||
|
||||
# Remove unsupported flags.
|
||||
for flag in @hardening_unsupported_flags@; do
|
||||
unset -v "hardeningEnableMap[$flag]"
|
||||
# fortify being unsupported implies fortify3 is unsupported
|
||||
if [[ "$flag" = 'fortify' ]] ; then
|
||||
unset -v "hardeningEnableMap['fortify3']"
|
||||
fi
|
||||
# strictflexarrays1 being unsupported implies strictflexarrays3 is unsupported
|
||||
if [[ "$flag" = 'strictflexarrays1' ]] ; then
|
||||
unset -v "hardeningEnableMap['strictflexarrays3']"
|
||||
fi
|
||||
done
|
||||
|
||||
|
||||
# now make fortify and fortify3 mutually exclusive
|
||||
if [[ -n "${hardeningEnableMap[fortify3]-}" ]]; then
|
||||
unset -v "hardeningEnableMap['fortify']"
|
||||
fi
|
||||
|
||||
# now make strictflexarrays1 and strictflexarrays3 mutually exclusive
|
||||
if [[ -n "${hardeningEnableMap[strictflexarrays3]-}" ]]; then
|
||||
unset -v "hardeningEnableMap['strictflexarrays1']"
|
||||
|
@ -3,17 +3,17 @@ const path = require('path')
|
||||
// This has to match the logic in pkgs/development/tools/yarn2nix-moretea/yarn2nix/lib/urlToName.js
|
||||
// so that fixup_yarn_lock produces the same paths
|
||||
const urlToName = url => {
|
||||
const isCodeloadGitTarballUrl = url.startsWith('https://codeload.github.com/') && url.includes('/tar.gz/')
|
||||
const isCodeloadGitTarballUrl = url.startsWith('https://codeload.github.com/') && url.includes('/tar.gz/')
|
||||
|
||||
if (url.startsWith('file:')) {
|
||||
return url
|
||||
} else if (url.startsWith('git+') || isCodeloadGitTarballUrl) {
|
||||
return path.basename(url)
|
||||
} else {
|
||||
return url
|
||||
.replace(/https:\/\/(.)*(.com)\//g, '') // prevents having long directory names
|
||||
.replace(/[@/%:-]/g, '_') // replace @ and : and - and % characters with underscore
|
||||
}
|
||||
if (url.startsWith('file:')) {
|
||||
return url
|
||||
} else if (url.startsWith('git+') || isCodeloadGitTarballUrl) {
|
||||
return path.basename(url)
|
||||
} else {
|
||||
return url
|
||||
.replace(/https:\/\/(.)*(.com)\//g, '') // prevents having long directory names
|
||||
.replace(/[@/%:-]/g, '_') // replace @ and : and - and % characters with underscore
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { urlToName };
|
||||
|
@ -7,79 +7,79 @@ const lockfile = require('./yarnpkg-lockfile.js')
|
||||
const { urlToName } = require('./common.js')
|
||||
|
||||
const fixupYarnLock = async (lockContents, verbose) => {
|
||||
const lockData = lockfile.parse(lockContents)
|
||||
const lockData = lockfile.parse(lockContents)
|
||||
|
||||
const fixedData = Object.fromEntries(
|
||||
Object.entries(lockData.object)
|
||||
.map(([dep, pkg]) => {
|
||||
if (pkg.resolved === undefined) {
|
||||
console.warn(`no resolved URL for package ${dep}`)
|
||||
var maybeFile = dep.split("@", 2)[1]
|
||||
if (maybeFile.startsWith("file:")) {
|
||||
console.log(`Rewriting URL for local file dependency ${dep}`)
|
||||
pkg.resolved = maybeFile
|
||||
}
|
||||
return [dep, pkg]
|
||||
}
|
||||
const [ url, hash ] = pkg.resolved.split("#", 2)
|
||||
const fixedData = Object.fromEntries(
|
||||
Object.entries(lockData.object)
|
||||
.map(([dep, pkg]) => {
|
||||
if (pkg.resolved === undefined) {
|
||||
console.warn(`no resolved URL for package ${dep}`)
|
||||
var maybeFile = dep.split("@", 2)[1]
|
||||
if (maybeFile.startsWith("file:")) {
|
||||
console.log(`Rewriting URL for local file dependency ${dep}`)
|
||||
pkg.resolved = maybeFile
|
||||
}
|
||||
return [dep, pkg]
|
||||
}
|
||||
const [ url, hash ] = pkg.resolved.split("#", 2)
|
||||
|
||||
if (hash || url.startsWith("https://codeload.github.com/")) {
|
||||
if (verbose) console.log(`Removing integrity for git dependency ${dep}`)
|
||||
delete pkg.integrity
|
||||
}
|
||||
if (hash || url.startsWith("https://codeload.github.com/")) {
|
||||
if (verbose) console.log(`Removing integrity for git dependency ${dep}`)
|
||||
delete pkg.integrity
|
||||
}
|
||||
|
||||
if (verbose) console.log(`Rewriting URL ${url} for dependency ${dep}`)
|
||||
pkg.resolved = urlToName(url)
|
||||
if (hash)
|
||||
pkg.resolved += `#${hash}`
|
||||
if (verbose) console.log(`Rewriting URL ${url} for dependency ${dep}`)
|
||||
pkg.resolved = urlToName(url)
|
||||
if (hash)
|
||||
pkg.resolved += `#${hash}`
|
||||
|
||||
return [dep, pkg]
|
||||
})
|
||||
)
|
||||
return [dep, pkg]
|
||||
})
|
||||
)
|
||||
|
||||
if (verbose) console.log('Done')
|
||||
if (verbose) console.log('Done')
|
||||
|
||||
return fixedData
|
||||
return fixedData
|
||||
}
|
||||
|
||||
const showUsage = async () => {
|
||||
process.stderr.write(`
|
||||
process.stderr.write(`
|
||||
syntax: fixup-yarn-lock [path to yarn.lock] [options]
|
||||
|
||||
Options:
|
||||
-h --help Show this help
|
||||
-v --verbose Verbose output
|
||||
`)
|
||||
process.exit(1)
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
const main = async () => {
|
||||
const args = process.argv.slice(2)
|
||||
let next, lockFile, verbose
|
||||
while (next = args.shift()) {
|
||||
if (next == '--verbose' || next == '-v') {
|
||||
verbose = true
|
||||
} else if (next == '--help' || next == '-h') {
|
||||
showUsage()
|
||||
} else if (!lockFile) {
|
||||
lockFile = next
|
||||
} else {
|
||||
showUsage()
|
||||
}
|
||||
}
|
||||
let lockContents
|
||||
try {
|
||||
lockContents = await fs.promises.readFile(lockFile || 'yarn.lock', 'utf-8')
|
||||
} catch {
|
||||
showUsage()
|
||||
}
|
||||
const args = process.argv.slice(2)
|
||||
let next, lockFile, verbose
|
||||
while (next = args.shift()) {
|
||||
if (next == '--verbose' || next == '-v') {
|
||||
verbose = true
|
||||
} else if (next == '--help' || next == '-h') {
|
||||
showUsage()
|
||||
} else if (!lockFile) {
|
||||
lockFile = next
|
||||
} else {
|
||||
showUsage()
|
||||
}
|
||||
}
|
||||
let lockContents
|
||||
try {
|
||||
lockContents = await fs.promises.readFile(lockFile || 'yarn.lock', 'utf-8')
|
||||
} catch {
|
||||
showUsage()
|
||||
}
|
||||
|
||||
const fixedData = await fixupYarnLock(lockContents, verbose)
|
||||
await fs.promises.writeFile(lockFile || 'yarn.lock', lockfile.stringify(fixedData))
|
||||
const fixedData = await fixupYarnLock(lockContents, verbose)
|
||||
await fs.promises.writeFile(lockFile || 'yarn.lock', lockfile.stringify(fixedData))
|
||||
}
|
||||
|
||||
main()
|
||||
.catch(e => {
|
||||
console.error(e)
|
||||
process.exit(1)
|
||||
})
|
||||
.catch(e => {
|
||||
console.error(e)
|
||||
process.exit(1)
|
||||
})
|
||||
|
@ -15,155 +15,155 @@ const { urlToName } = require('./common.js')
|
||||
const execFile = promisify(child_process.execFile)
|
||||
|
||||
const exec = async (...args) => {
|
||||
const res = await execFile(...args)
|
||||
if (res.error) throw new Error(res.stderr)
|
||||
return res
|
||||
const res = await execFile(...args)
|
||||
if (res.error) throw new Error(res.stderr)
|
||||
return res
|
||||
}
|
||||
|
||||
const downloadFileHttps = (fileName, url, expectedHash, hashType = 'sha1') => {
|
||||
return new Promise((resolve, reject) => {
|
||||
const get = (url, redirects = 0) => https.get(url, (res) => {
|
||||
if(redirects > 10) {
|
||||
reject('Too many redirects!');
|
||||
return;
|
||||
}
|
||||
if(res.statusCode === 301 || res.statusCode === 302) {
|
||||
return get(res.headers.location, redirects + 1)
|
||||
}
|
||||
const file = fs.createWriteStream(fileName)
|
||||
const hash = crypto.createHash(hashType)
|
||||
res.pipe(file)
|
||||
res.pipe(hash).setEncoding('hex')
|
||||
res.on('end', () => {
|
||||
file.close()
|
||||
const h = hash.read()
|
||||
if (expectedHash === undefined){
|
||||
console.log(`Warning: lockfile url ${url} doesn't end in "#<hash>" to validate against. Downloaded file had hash ${h}.`);
|
||||
} else if (h != expectedHash) return reject(new Error(`hash mismatch, expected ${expectedHash}, got ${h} for ${url}`))
|
||||
resolve()
|
||||
})
|
||||
res.on('error', e => reject(e))
|
||||
})
|
||||
get(url)
|
||||
})
|
||||
return new Promise((resolve, reject) => {
|
||||
const get = (url, redirects = 0) => https.get(url, (res) => {
|
||||
if(redirects > 10) {
|
||||
reject('Too many redirects!');
|
||||
return;
|
||||
}
|
||||
if(res.statusCode === 301 || res.statusCode === 302) {
|
||||
return get(res.headers.location, redirects + 1)
|
||||
}
|
||||
const file = fs.createWriteStream(fileName)
|
||||
const hash = crypto.createHash(hashType)
|
||||
res.pipe(file)
|
||||
res.pipe(hash).setEncoding('hex')
|
||||
res.on('end', () => {
|
||||
file.close()
|
||||
const h = hash.read()
|
||||
if (expectedHash === undefined){
|
||||
console.log(`Warning: lockfile url ${url} doesn't end in "#<hash>" to validate against. Downloaded file had hash ${h}.`);
|
||||
} else if (h != expectedHash) return reject(new Error(`hash mismatch, expected ${expectedHash}, got ${h} for ${url}`))
|
||||
resolve()
|
||||
})
|
||||
res.on('error', e => reject(e))
|
||||
})
|
||||
get(url)
|
||||
})
|
||||
}
|
||||
|
||||
const downloadGit = async (fileName, url, rev) => {
|
||||
await exec('nix-prefetch-git', [
|
||||
'--out', fileName + '.tmp',
|
||||
'--url', url,
|
||||
'--rev', rev,
|
||||
'--builder'
|
||||
])
|
||||
await exec('nix-prefetch-git', [
|
||||
'--out', fileName + '.tmp',
|
||||
'--url', url,
|
||||
'--rev', rev,
|
||||
'--builder'
|
||||
])
|
||||
|
||||
await exec('tar', [
|
||||
// hopefully make it reproducible across runs and systems
|
||||
'--owner=0', '--group=0', '--numeric-owner', '--format=gnu', '--sort=name', '--mtime=@1',
|
||||
await exec('tar', [
|
||||
// hopefully make it reproducible across runs and systems
|
||||
'--owner=0', '--group=0', '--numeric-owner', '--format=gnu', '--sort=name', '--mtime=@1',
|
||||
|
||||
// Set u+w because tar-fs can't unpack archives with read-only dirs: https://github.com/mafintosh/tar-fs/issues/79
|
||||
'--mode', 'u+w',
|
||||
// Set u+w because tar-fs can't unpack archives with read-only dirs: https://github.com/mafintosh/tar-fs/issues/79
|
||||
'--mode', 'u+w',
|
||||
|
||||
'-C', fileName + '.tmp',
|
||||
'-cf', fileName, '.'
|
||||
])
|
||||
'-C', fileName + '.tmp',
|
||||
'-cf', fileName, '.'
|
||||
])
|
||||
|
||||
await exec('rm', [ '-rf', fileName + '.tmp', ])
|
||||
await exec('rm', [ '-rf', fileName + '.tmp', ])
|
||||
}
|
||||
|
||||
const isGitUrl = pattern => {
|
||||
// https://github.com/yarnpkg/yarn/blob/3119382885ea373d3c13d6a846de743eca8c914b/src/resolvers/exotics/git-resolver.js#L15-L47
|
||||
const GIT_HOSTS = ['github.com', 'gitlab.com', 'bitbucket.com', 'bitbucket.org']
|
||||
const GIT_PATTERN_MATCHERS = [/^git:/, /^git\+.+:/, /^ssh:/, /^https?:.+\.git$/, /^https?:.+\.git#.+/]
|
||||
// https://github.com/yarnpkg/yarn/blob/3119382885ea373d3c13d6a846de743eca8c914b/src/resolvers/exotics/git-resolver.js#L15-L47
|
||||
const GIT_HOSTS = ['github.com', 'gitlab.com', 'bitbucket.com', 'bitbucket.org']
|
||||
const GIT_PATTERN_MATCHERS = [/^git:/, /^git\+.+:/, /^ssh:/, /^https?:.+\.git$/, /^https?:.+\.git#.+/]
|
||||
|
||||
for (const matcher of GIT_PATTERN_MATCHERS) if (matcher.test(pattern)) return true
|
||||
for (const matcher of GIT_PATTERN_MATCHERS) if (matcher.test(pattern)) return true
|
||||
|
||||
const {hostname, path} = url.parse(pattern)
|
||||
if (hostname && path && GIT_HOSTS.indexOf(hostname) >= 0
|
||||
// only if dependency is pointing to a git repo,
|
||||
// e.g. facebook/flow and not file in a git repo facebook/flow/archive/v1.0.0.tar.gz
|
||||
&& path.split('/').filter(p => !!p).length === 2
|
||||
) return true
|
||||
const {hostname, path} = url.parse(pattern)
|
||||
if (hostname && path && GIT_HOSTS.indexOf(hostname) >= 0
|
||||
// only if dependency is pointing to a git repo,
|
||||
// e.g. facebook/flow and not file in a git repo facebook/flow/archive/v1.0.0.tar.gz
|
||||
&& path.split('/').filter(p => !!p).length === 2
|
||||
) return true
|
||||
|
||||
return false
|
||||
return false
|
||||
}
|
||||
|
||||
const downloadPkg = (pkg, verbose) => {
|
||||
for (let marker of ['@file:', '@link:']) {
|
||||
const split = pkg.key.split(marker)
|
||||
if (split.length == 2) {
|
||||
console.info(`ignoring lockfile entry "${split[0]}" which points at path "${split[1]}"`)
|
||||
return
|
||||
} else if (split.length > 2) {
|
||||
throw new Error(`The lockfile entry key "${pkg.key}" contains "${marker}" more than once. Processing is not implemented.`)
|
||||
}
|
||||
}
|
||||
for (let marker of ['@file:', '@link:']) {
|
||||
const split = pkg.key.split(marker)
|
||||
if (split.length == 2) {
|
||||
console.info(`ignoring lockfile entry "${split[0]}" which points at path "${split[1]}"`)
|
||||
return
|
||||
} else if (split.length > 2) {
|
||||
throw new Error(`The lockfile entry key "${pkg.key}" contains "${marker}" more than once. Processing is not implemented.`)
|
||||
}
|
||||
}
|
||||
|
||||
if (pkg.resolved === undefined) {
|
||||
throw new Error(`The lockfile entry with key "${pkg.key}" cannot be downloaded because it is missing the "resolved" attribute, which should contain the URL to download from. The lockfile might be invalid.`)
|
||||
}
|
||||
if (pkg.resolved === undefined) {
|
||||
throw new Error(`The lockfile entry with key "${pkg.key}" cannot be downloaded because it is missing the "resolved" attribute, which should contain the URL to download from. The lockfile might be invalid.`)
|
||||
}
|
||||
|
||||
const [ url, hash ] = pkg.resolved.split('#')
|
||||
if (verbose) console.log('downloading ' + url)
|
||||
const fileName = urlToName(url)
|
||||
const s = url.split('/')
|
||||
if (url.startsWith('https://codeload.github.com/') && url.includes('/tar.gz/')) {
|
||||
return downloadGit(fileName, `https://github.com/${s[3]}/${s[4]}.git`, s[s.length-1])
|
||||
} else if (url.startsWith('https://github.com/') && url.endsWith('.tar.gz') &&
|
||||
(
|
||||
s.length <= 5 || // https://github.com/owner/repo.tgz#feedface...
|
||||
s[5] == "archive" // https://github.com/owner/repo/archive/refs/tags/v0.220.1.tar.gz
|
||||
)) {
|
||||
return downloadGit(fileName, `https://github.com/${s[3]}/${s[4]}.git`, s[s.length-1].replace(/.tar.gz$/, ''))
|
||||
} else if (isGitUrl(url)) {
|
||||
return downloadGit(fileName, url.replace(/^git\+/, ''), hash)
|
||||
} else if (url.startsWith('https://')) {
|
||||
if (typeof pkg.integrity === 'string' || pkg.integrity instanceof String) {
|
||||
const [ type, checksum ] = pkg.integrity.split('-')
|
||||
return downloadFileHttps(fileName, url, Buffer.from(checksum, 'base64').toString('hex'), type)
|
||||
}
|
||||
return downloadFileHttps(fileName, url, hash)
|
||||
} else if (url.startsWith('file:')) {
|
||||
console.warn(`ignoring unsupported file:path url "${url}"`)
|
||||
} else {
|
||||
throw new Error('don\'t know how to download "' + url + '"')
|
||||
}
|
||||
const [ url, hash ] = pkg.resolved.split('#')
|
||||
if (verbose) console.log('downloading ' + url)
|
||||
const fileName = urlToName(url)
|
||||
const s = url.split('/')
|
||||
if (url.startsWith('https://codeload.github.com/') && url.includes('/tar.gz/')) {
|
||||
return downloadGit(fileName, `https://github.com/${s[3]}/${s[4]}.git`, s[s.length-1])
|
||||
} else if (url.startsWith('https://github.com/') && url.endsWith('.tar.gz') &&
|
||||
(
|
||||
s.length <= 5 || // https://github.com/owner/repo.tgz#feedface...
|
||||
s[5] == "archive" // https://github.com/owner/repo/archive/refs/tags/v0.220.1.tar.gz
|
||||
)) {
|
||||
return downloadGit(fileName, `https://github.com/${s[3]}/${s[4]}.git`, s[s.length-1].replace(/.tar.gz$/, ''))
|
||||
} else if (isGitUrl(url)) {
|
||||
return downloadGit(fileName, url.replace(/^git\+/, ''), hash)
|
||||
} else if (url.startsWith('https://')) {
|
||||
if (typeof pkg.integrity === 'string' || pkg.integrity instanceof String) {
|
||||
const [ type, checksum ] = pkg.integrity.split('-')
|
||||
return downloadFileHttps(fileName, url, Buffer.from(checksum, 'base64').toString('hex'), type)
|
||||
}
|
||||
return downloadFileHttps(fileName, url, hash)
|
||||
} else if (url.startsWith('file:')) {
|
||||
console.warn(`ignoring unsupported file:path url "${url}"`)
|
||||
} else {
|
||||
throw new Error('don\'t know how to download "' + url + '"')
|
||||
}
|
||||
}
|
||||
|
||||
const performParallel = tasks => {
|
||||
const worker = async () => {
|
||||
while (tasks.length > 0) await tasks.shift()()
|
||||
}
|
||||
const worker = async () => {
|
||||
while (tasks.length > 0) await tasks.shift()()
|
||||
}
|
||||
|
||||
const workers = []
|
||||
for (let i = 0; i < 4; i++) {
|
||||
workers.push(worker())
|
||||
}
|
||||
const workers = []
|
||||
for (let i = 0; i < 4; i++) {
|
||||
workers.push(worker())
|
||||
}
|
||||
|
||||
return Promise.all(workers)
|
||||
return Promise.all(workers)
|
||||
}
|
||||
|
||||
// This could be implemented using [`Map.groupBy`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Map/groupBy),
|
||||
// but that method is only supported starting with Node 21
|
||||
const uniqueBy = (arr, callback) => {
|
||||
const map = new Map()
|
||||
for (const elem of arr) {
|
||||
map.set(callback(elem), elem)
|
||||
}
|
||||
return [...map.values()]
|
||||
const map = new Map()
|
||||
for (const elem of arr) {
|
||||
map.set(callback(elem), elem)
|
||||
}
|
||||
return [...map.values()]
|
||||
}
|
||||
|
||||
const prefetchYarnDeps = async (lockContents, verbose) => {
|
||||
const lockData = lockfile.parse(lockContents)
|
||||
await performParallel(
|
||||
uniqueBy(Object.entries(lockData.object), ([_, value]) => value.resolved)
|
||||
.map(([key, value]) => () => downloadPkg({ key, ...value }, verbose))
|
||||
)
|
||||
await fs.promises.writeFile('yarn.lock', lockContents)
|
||||
if (verbose) console.log('Done')
|
||||
const lockData = lockfile.parse(lockContents)
|
||||
await performParallel(
|
||||
uniqueBy(Object.entries(lockData.object), ([_, value]) => value.resolved)
|
||||
.map(([key, value]) => () => downloadPkg({ key, ...value }, verbose))
|
||||
)
|
||||
await fs.promises.writeFile('yarn.lock', lockContents)
|
||||
if (verbose) console.log('Done')
|
||||
}
|
||||
|
||||
const showUsage = async () => {
|
||||
process.stderr.write(`
|
||||
process.stderr.write(`
|
||||
syntax: prefetch-yarn-deps [path to yarn.lock] [options]
|
||||
|
||||
Options:
|
||||
@ -171,50 +171,50 @@ Options:
|
||||
-v --verbose Verbose output
|
||||
--builder Only perform the download to current directory, then exit
|
||||
`)
|
||||
process.exit(1)
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
const main = async () => {
|
||||
const args = process.argv.slice(2)
|
||||
let next, lockFile, verbose, isBuilder
|
||||
while (next = args.shift()) {
|
||||
if (next == '--builder') {
|
||||
isBuilder = true
|
||||
} else if (next == '--verbose' || next == '-v') {
|
||||
verbose = true
|
||||
} else if (next == '--help' || next == '-h') {
|
||||
showUsage()
|
||||
} else if (!lockFile) {
|
||||
lockFile = next
|
||||
} else {
|
||||
showUsage()
|
||||
}
|
||||
}
|
||||
let lockContents
|
||||
try {
|
||||
lockContents = await fs.promises.readFile(lockFile || 'yarn.lock', 'utf-8')
|
||||
} catch {
|
||||
showUsage()
|
||||
}
|
||||
const args = process.argv.slice(2)
|
||||
let next, lockFile, verbose, isBuilder
|
||||
while (next = args.shift()) {
|
||||
if (next == '--builder') {
|
||||
isBuilder = true
|
||||
} else if (next == '--verbose' || next == '-v') {
|
||||
verbose = true
|
||||
} else if (next == '--help' || next == '-h') {
|
||||
showUsage()
|
||||
} else if (!lockFile) {
|
||||
lockFile = next
|
||||
} else {
|
||||
showUsage()
|
||||
}
|
||||
}
|
||||
let lockContents
|
||||
try {
|
||||
lockContents = await fs.promises.readFile(lockFile || 'yarn.lock', 'utf-8')
|
||||
} catch {
|
||||
showUsage()
|
||||
}
|
||||
|
||||
if (isBuilder) {
|
||||
await prefetchYarnDeps(lockContents, verbose)
|
||||
} else {
|
||||
const { stdout: tmpDir } = await exec('mktemp', [ '-d' ])
|
||||
if (isBuilder) {
|
||||
await prefetchYarnDeps(lockContents, verbose)
|
||||
} else {
|
||||
const { stdout: tmpDir } = await exec('mktemp', [ '-d' ])
|
||||
|
||||
try {
|
||||
process.chdir(tmpDir.trim())
|
||||
await prefetchYarnDeps(lockContents, verbose)
|
||||
const { stdout: hash } = await exec('nix-hash', [ '--type', 'sha256', '--base32', tmpDir.trim() ])
|
||||
console.log(hash)
|
||||
} finally {
|
||||
await exec('rm', [ '-rf', tmpDir.trim() ])
|
||||
}
|
||||
}
|
||||
try {
|
||||
process.chdir(tmpDir.trim())
|
||||
await prefetchYarnDeps(lockContents, verbose)
|
||||
const { stdout: hash } = await exec('nix-hash', [ '--type', 'sha256', '--base32', tmpDir.trim() ])
|
||||
console.log(hash)
|
||||
} finally {
|
||||
await exec('rm', [ '-rf', tmpDir.trim() ])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
main()
|
||||
.catch(e => {
|
||||
console.error(e)
|
||||
process.exit(1)
|
||||
})
|
||||
.catch(e => {
|
||||
console.error(e)
|
||||
process.exit(1)
|
||||
})
|
||||
|
@ -22,6 +22,7 @@ lib.extendMkDerivation {
|
||||
"depsExtraArgs"
|
||||
"cargoUpdateHook"
|
||||
"cargoLock"
|
||||
"useFetchCargoVendor"
|
||||
];
|
||||
|
||||
extendDrvArgs =
|
||||
|
@ -91,13 +91,21 @@
|
||||
lib.optionalString (stdenv.hostPlatform.config != stdenv.targetPlatform.config) ''
|
||||
[target."${stdenv.targetPlatform.rust.rustcTarget}"]
|
||||
"linker" = "${pkgsTargetTarget.stdenv.cc}/bin/${pkgsTargetTarget.stdenv.cc.targetPrefix}cc"
|
||||
"rustflags" = [ "-C", "target-feature=${
|
||||
if pkgsTargetTarget.stdenv.targetPlatform.isStatic then "+" else "-"
|
||||
}crt-static" ]
|
||||
"rustflags" = [ ${
|
||||
lib.concatStringsSep ", " (
|
||||
[
|
||||
''"-Ctarget-feature=${if stdenv.targetPlatform.isStatic then "+" else "-"}crt-static"''
|
||||
]
|
||||
++ lib.optional (!stdenv.targetPlatform.isx86_32) ''"-Cforce-frame-pointers=yes"''
|
||||
)
|
||||
} ]
|
||||
''
|
||||
+ ''
|
||||
[target."${stdenv.hostPlatform.rust.rustcTarget}"]
|
||||
"linker" = "${stdenv.cc}/bin/${stdenv.cc.targetPrefix}cc"
|
||||
"rustflags" = [ ${
|
||||
lib.optionalString (!stdenv.hostPlatform.isx86_32) ''"-Cforce-frame-pointers=yes"''
|
||||
} ]
|
||||
'';
|
||||
};
|
||||
|
||||
|
@ -15,27 +15,54 @@ auditTmpdir() {
|
||||
|
||||
echo "checking for references to $TMPDIR/ in $dir..."
|
||||
|
||||
_processFile() {
|
||||
local file="$1"
|
||||
if isELF "$file"; then
|
||||
if { printf :; patchelf --print-rpath "$file"; } | grep -q -F ":$TMPDIR/"; then
|
||||
echo "RPATH of binary $file contains a forbidden reference to $TMPDIR/"
|
||||
exit 1
|
||||
fi
|
||||
elif isScript "$file"; then
|
||||
filename=${i##*/}
|
||||
dir=${i%/*}
|
||||
if [ -e "$dir/.$filename-wrapped" ]; then
|
||||
if grep -q -F "$TMPDIR/" "$file"; then
|
||||
echo "wrapper script $file contains a forbidden reference to $TMPDIR/"
|
||||
exit 1
|
||||
local tmpdir elf_fifo script_fifo
|
||||
tmpdir="$(mktemp -d)"
|
||||
elf_fifo="$tmpdir/elf"
|
||||
script_fifo="$tmpdir/script"
|
||||
mkfifo "$elf_fifo" "$script_fifo"
|
||||
|
||||
# Classifier: identify ELF and script files
|
||||
(
|
||||
find "$dir" -type f -not -path '*/.build-id/*' -print0 \
|
||||
| while IFS= read -r -d $'\0' file; do
|
||||
if isELF "$file"; then
|
||||
printf '%s\0' "$file" >&3
|
||||
elif isScript "$file"; then
|
||||
filename=${file##*/}
|
||||
dir=${file%/*}
|
||||
if [ -e "$dir/.$filename-wrapped" ]; then
|
||||
printf '%s\0' "$file" >&4
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
}
|
||||
done
|
||||
exec 3>&- 4>&-
|
||||
) 3> "$elf_fifo" 4> "$script_fifo" &
|
||||
|
||||
find "$dir" -type f -not -path '*/.build-id/*' -print0 \
|
||||
| parallelMap _processFile
|
||||
# Handler: check RPATHs concurrently
|
||||
(
|
||||
xargs -0 -r -P "$NIX_BUILD_CORES" -n 1 sh -c '
|
||||
if { printf :; patchelf --print-rpath "$1"; } | grep -q -F ":$TMPDIR/"; then
|
||||
echo "RPATH of binary $1 contains a forbidden reference to $TMPDIR/"
|
||||
exit 1
|
||||
fi
|
||||
' _ < "$elf_fifo"
|
||||
) &
|
||||
local pid_elf=$!
|
||||
|
||||
unset -f _processFile
|
||||
# Handler: check wrapper scripts concurrently
|
||||
local pid_script
|
||||
(
|
||||
xargs -0 -r -P "$NIX_BUILD_CORES" -n 1 sh -c '
|
||||
if grep -q -F "$TMPDIR/" "$1"; then
|
||||
echo "wrapper script $1 contains a forbidden reference to $TMPDIR/"
|
||||
exit 1
|
||||
fi
|
||||
' _ < "$script_fifo"
|
||||
) &
|
||||
local pid_script=$!
|
||||
|
||||
wait "$pid_elf" || { echo "Some binaries contain forbidden references to $TMPDIR/. Check the error above!"; exit 1; }
|
||||
wait "$pid_script" || { echo "Some scripts contain forbidden references to $TMPDIR/. Check the error above!"; exit 1; }
|
||||
|
||||
rm -r "$tmpdir"
|
||||
}
|
||||
|
@ -95,6 +95,7 @@ autoPatchelfPostFixup() {
|
||||
if [[ -z "${dontAutoPatchelf-}" ]]; then
|
||||
autoPatchelf -- $(for output in $(getAllOutputNames); do
|
||||
[ -e "${!output}" ] || continue
|
||||
[ "${output}" = debug ] && continue
|
||||
echo "${!output}"
|
||||
done)
|
||||
fi
|
||||
|
@ -51,6 +51,12 @@ noBrokenSymlinks() {
|
||||
symlinkTarget="$(realpath --no-symlinks --canonicalize-missing "$pathParent/$symlinkTarget")"
|
||||
fi
|
||||
|
||||
# use $TMPDIR like audit-tmpdir.sh
|
||||
if [[ $symlinkTarget = "$TMPDIR"/* ]]; then
|
||||
nixErrorLog "the symlink $path points to $TMPDIR directory: $symlinkTarget"
|
||||
numDanglingSymlinks+=1
|
||||
continue
|
||||
fi
|
||||
if [[ $symlinkTarget != "$NIX_STORE"/* ]]; then
|
||||
nixInfoLog "symlink $path points outside the Nix store; ignoring"
|
||||
continue
|
||||
|
@ -1,89 +0,0 @@
|
||||
# Parallel execution utilities
|
||||
# These functions provide a framework for parallel processing of jobs from stdin
|
||||
|
||||
# parallelRun - Execute a command in parallel across multiple cores
|
||||
#
|
||||
# Reads null-delimited jobs from stdin and distributes them across NIX_BUILD_CORES
|
||||
# worker processes. Each worker executes the provided command, receiving jobs
|
||||
# via stdin in null-delimited format.
|
||||
#
|
||||
# Usage: some_producer | parallelRun command [args...]
|
||||
#
|
||||
# The command receives jobs one at a time via stdin (null-delimited).
|
||||
#
|
||||
# Example:
|
||||
# find . -name '*.log' -print0 | parallelRun sh -c '
|
||||
# while read -r -d "" file; do gzip "$file"; done
|
||||
# '
|
||||
parallelRun() {
|
||||
local pids
|
||||
local lock
|
||||
pids=()
|
||||
lock=$(mktemp -u)
|
||||
mkfifo "$lock"
|
||||
for ((i=0; i<NIX_BUILD_CORES; i++)); do
|
||||
{
|
||||
exec 3<"$lock" # fd-3 = read side of lock
|
||||
exec 4>"$lock" # fd-4 = write side of lock (push token back)
|
||||
local job
|
||||
|
||||
while :; do
|
||||
# Acquire the lock: blocks until a token can be read
|
||||
read -r -n1 >/dev/null <&3
|
||||
|
||||
# read one job from stdin
|
||||
# This is guarded by the lock above in order to prevent
|
||||
# multiple workers from reading from stdin simultaneously.
|
||||
if ! IFS= read -r -d '' job; then
|
||||
# If stdin is closed, release lock and exit
|
||||
printf 'x' >&4
|
||||
break
|
||||
fi
|
||||
|
||||
# Release the lock: write a token back to the lock FIFO
|
||||
printf 'y' >&4
|
||||
|
||||
# Forward job to the worker process' stdin
|
||||
printf '%s\0' "$job"
|
||||
|
||||
done \
|
||||
| "$@" # launch the worker process
|
||||
} &
|
||||
pids[$i]=$!
|
||||
done
|
||||
# launch the workers by writing a token to the lock FIFO
|
||||
printf 'a' >"$lock" &
|
||||
# Wait for all workers to finish
|
||||
for pid in "${pids[@]}"; do
|
||||
if ! wait "$pid"; then
|
||||
echo "A parallel job failed with exit code $? (check for errors above)" >&2
|
||||
echo -e "Failing Command:\n $@" >&2
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
rm "$lock"
|
||||
}
|
||||
|
||||
# parallelMap - Apply a shell function to each job in parallel
|
||||
#
|
||||
# A higher-level wrapper around parallelRun that applies a shell function to each
|
||||
# null-delimited job from stdin. The shell function receives each job as its first
|
||||
# argument.
|
||||
#
|
||||
# Usage: some_producer | parallelMap shell_function [additional_args...]
|
||||
#
|
||||
# The shell function is called as: shell_function job [additional_args...]
|
||||
# for each job read from stdin.
|
||||
#
|
||||
# Example:
|
||||
# compress() { gzip "$1" }
|
||||
# find . -name '*.log' -print0 | parallelMap compress
|
||||
parallelMap() {
|
||||
_wrapper() {
|
||||
while IFS= read -r -d '' job; do
|
||||
"$@" "$job"
|
||||
done
|
||||
}
|
||||
parallelRun _wrapper "$@"
|
||||
unset -f _wrapper
|
||||
}
|
@ -68,6 +68,31 @@ patchShebangs() {
|
||||
return 0
|
||||
fi
|
||||
|
||||
# like sponge from moreutils but in pure bash
|
||||
_sponge() {
|
||||
local content
|
||||
local target
|
||||
local restoreReadOnly
|
||||
content=""
|
||||
target="$1"
|
||||
|
||||
# Make file writable if it is read-only
|
||||
if [[ ! -w "$target" ]]; then
|
||||
chmod +w "$target"
|
||||
restoreReadOnly=true
|
||||
fi
|
||||
|
||||
while IFS= read -r line || [[ -n "$line" ]]; do
|
||||
content+="$line"$'\n'
|
||||
done
|
||||
printf '%s' "$content" > "$target"
|
||||
|
||||
# Restore read-only if it was read-only before
|
||||
if [[ -n "${restoreReadOnly:-}" ]]; then
|
||||
chmod -w "$target"
|
||||
fi
|
||||
}
|
||||
|
||||
local f
|
||||
while IFS= read -r -d $'\0' f; do
|
||||
isScript "$f" || continue
|
||||
@ -126,11 +151,14 @@ patchShebangs() {
|
||||
|
||||
# Preserve times, see: https://github.com/NixOS/nixpkgs/pull/33281
|
||||
timestamp=$(stat --printf "%y" "$f")
|
||||
sed -i -e "1 s|.*|#\!$escapedInterpreterLine|" "$f"
|
||||
sed -e "1 s|.*|#\!$escapedInterpreterLine|" "$f" | _sponge "$f"
|
||||
|
||||
touch --date "$timestamp" "$f"
|
||||
fi
|
||||
fi
|
||||
done < <(find "$@" -type f -perm -0100 -print0)
|
||||
|
||||
unset -f _sponge
|
||||
}
|
||||
|
||||
patchShebangsAuto () {
|
||||
|
@ -3,18 +3,47 @@ export NIX_LDFLAGS+=" --compress-debug-sections=zlib"
|
||||
export NIX_CFLAGS_COMPILE+=" -ggdb -Wa,--compress-debug-sections"
|
||||
export NIX_RUSTFLAGS+=" -g -C strip=none"
|
||||
|
||||
cksumAlgo=sha256
|
||||
|
||||
fixupOutputHooks+=(_separateDebugInfo)
|
||||
postUnpackHooks+=(_recordPristineSourceHashes)
|
||||
|
||||
_recordPristineSourceHashes() {
|
||||
# shellcheck disable=2154
|
||||
[ -e "$sourceRoot" ] || return 0
|
||||
|
||||
local checksumFileName=__nix_source_checksums
|
||||
echo "separate-debug-info: recording checksum of source files for debug support..."
|
||||
find "$sourceRoot" -type f -exec cksum -a "$cksumAlgo" '{}' \+ > "$checksumFileName"
|
||||
recordedSourceChecksumsFileName="$(readlink -f "$checksumFileName")"
|
||||
}
|
||||
|
||||
_separateDebugInfo() {
|
||||
# shellcheck disable=2154
|
||||
[ -e "$prefix" ] || return 0
|
||||
|
||||
local dst="${debug:-$out}"
|
||||
if [ "$prefix" = "$dst" ]; then return 0; fi
|
||||
local debugOutput="${debug:-$out}"
|
||||
if [ "$prefix" = "$debugOutput" ]; then return 0; fi
|
||||
|
||||
# in case there is nothing to strip, don't fail the build
|
||||
mkdir -p "$dst"
|
||||
mkdir -p "$debugOutput"
|
||||
|
||||
dst="$dst/lib/debug/.build-id"
|
||||
local dst="$debugOutput/lib/debug/.build-id"
|
||||
|
||||
local source
|
||||
local sourceOverlay
|
||||
# shellcheck disable=2154
|
||||
if [ -e "$src" ]; then
|
||||
source="$src"
|
||||
if [ -n "${recordedSourceChecksumsFileName:-}" ]; then
|
||||
sourceOverlay="$debugOutput/src/overlay"
|
||||
else
|
||||
sourceOverlay=""
|
||||
fi
|
||||
else
|
||||
source=""
|
||||
sourceOverlay=""
|
||||
fi
|
||||
|
||||
# Find executables and dynamic libraries.
|
||||
local i
|
||||
@ -25,30 +54,64 @@ _separateDebugInfo() {
|
||||
[ -z "${OBJCOPY:-}" ] && echo "_separateDebugInfo: '\$OBJCOPY' variable is empty, skipping." 1>&2 && break
|
||||
|
||||
# Extract the Build ID. FIXME: there's probably a cleaner way.
|
||||
local id="$($READELF -n "$i" | sed 's/.*Build ID: \([0-9a-f]*\).*/\1/; t; d')"
|
||||
local id
|
||||
id="$($READELF -n "$i" | sed 's/.*Build ID: \([0-9a-f]*\).*/\1/; t; d')"
|
||||
if [ "${#id}" != 40 ]; then
|
||||
echo "could not find build ID of $i, skipping" >&2
|
||||
continue
|
||||
fi
|
||||
|
||||
|
||||
# Extract the debug info.
|
||||
echo "separating debug info from $i (build ID $id)"
|
||||
|
||||
destDir=$dst/${id:0:2}
|
||||
destFile=$dst/${id:0:2}/${id:2}.debug
|
||||
local debuginfoDir="$dst/${id:0:2}"
|
||||
local buildIdPrefix="$debuginfoDir/${id:2}"
|
||||
local debuginfoFile="$buildIdPrefix.debug"
|
||||
local executableSymlink="$buildIdPrefix.executable"
|
||||
local sourceSymlink="$buildIdPrefix.source"
|
||||
local sourceOverlaySymlink="$buildIdPrefix.sourceoverlay"
|
||||
|
||||
mkdir -p "$destDir"
|
||||
mkdir -p "$debuginfoDir"
|
||||
|
||||
if [ -f "$destFile" ]; then
|
||||
if [ -f "$debuginfoFile" ]; then
|
||||
echo "separate-debug-info: warning: multiple files with build id $id found, overwriting"
|
||||
fi
|
||||
|
||||
# This may fail, e.g. if the binary is for a different
|
||||
# architecture than we're building for. (This happens with
|
||||
# firmware blobs in QEMU.)
|
||||
if $OBJCOPY --only-keep-debug "$i" "$destFile"; then
|
||||
if $OBJCOPY --only-keep-debug "$i" "$debuginfoFile"; then
|
||||
# If we succeeded, also a create a symlink <original-name>.debug.
|
||||
ln -sfn ".build-id/${id:0:2}/${id:2}.debug" "$dst/../$(basename "$i")"
|
||||
ln -sfn "$debuginfoFile" "$dst/../$(basename "$i")"
|
||||
# also create a symlink mapping the build-id to the original elf file and the source
|
||||
# debuginfod protocol relies on it
|
||||
ln -sfn "$i" "$executableSymlink"
|
||||
if [ -n "$source" ]; then
|
||||
ln -sfn "$source" "$sourceSymlink"
|
||||
fi
|
||||
if [ -n "$sourceOverlay" ]; then
|
||||
# create it lazily
|
||||
if [ ! -d "$sourceOverlay" ]; then
|
||||
echo "separate-debug-info: copying patched source files to $sourceOverlay..."
|
||||
mkdir -p "$sourceOverlay"
|
||||
pushd "$(dirname "$recordedSourceChecksumsFileName")" || { echo "separate-debug-info: failed to cd parent directory of $recordedSourceChecksumsFileName"; return 1; }
|
||||
while IFS= read -r -d $'\0' modifiedSourceFile; do
|
||||
if [ -z "$modifiedSourceFile" ]; then
|
||||
continue
|
||||
fi
|
||||
# this can happen with files with '\n' in their name
|
||||
if [ ! -f "$modifiedSourceFile" ]; then
|
||||
echo "separate-debug-info: cannot save modified source file $modifiedSourceFile: does not exist. ignoring"
|
||||
continue
|
||||
fi
|
||||
mkdir -p "$sourceOverlay/$(dirname "$modifiedSourceFile")"
|
||||
cp -v "$modifiedSourceFile" "$sourceOverlay/$modifiedSourceFile"
|
||||
done < <(LANG=C cksum -a "$cksumAlgo" --check --ignore-missing --quiet "$recordedSourceChecksumsFileName" 2>&1 | sed -n -e 's/: FAILED$/\x00/p' | sed -z -e 's/^\n//')
|
||||
popd || { echo "separate-debug-info: failed to popd" ; return 1; }
|
||||
fi
|
||||
ln -sfn "$sourceOverlay" "$sourceOverlaySymlink"
|
||||
fi
|
||||
else
|
||||
# If we failed, try to clean up unnecessary directories
|
||||
rmdir -p "$dst/${id:0:2}" --ignore-fail-on-non-empty
|
||||
|
@ -1,23 +0,0 @@
|
||||
{
|
||||
stdenv,
|
||||
}:
|
||||
{
|
||||
# test based on bootstrap tools to prevent rebuilding stdenv on each change
|
||||
parallel =
|
||||
(derivation {
|
||||
name = "test-parallel-hook";
|
||||
system = stdenv.system;
|
||||
builder = "${stdenv.bootstrapTools}/bin/bash";
|
||||
PATH = "${stdenv.bootstrapTools}/bin";
|
||||
args = [
|
||||
"-c"
|
||||
''
|
||||
. ${../parallel.sh}
|
||||
. ${./test-parallel.sh}
|
||||
''
|
||||
];
|
||||
})
|
||||
// {
|
||||
meta = { };
|
||||
};
|
||||
}
|
@ -1,146 +0,0 @@
|
||||
export NIX_BUILD_CORES=4
|
||||
|
||||
echo "Testing worker distribution..."
|
||||
|
||||
# Generate 100 jobs to ensure all workers get some
|
||||
for i in {1..100}; do
|
||||
printf "job%d\0" $i
|
||||
done | parallelRun sh -c '
|
||||
while IFS= read -r -d "" job; do
|
||||
sleep 0.05 # Simulate some work
|
||||
echo "Worker $$ processed $job" >> /tmp/worker-output
|
||||
done
|
||||
'
|
||||
|
||||
# Check that all 4 workers were actually utilized
|
||||
worker_count=$(sort /tmp/worker-output | cut -d" " -f2 | sort -u | wc -l)
|
||||
if [ "$worker_count" -ne 4 ]; then
|
||||
echo "ERROR: Expected exactly 4 workers, got $worker_count"
|
||||
cat /tmp/worker-output
|
||||
exit 1
|
||||
fi
|
||||
echo "SUCCESS: All 4 workers participated"
|
||||
rm -f /tmp/worker-output
|
||||
|
||||
echo "Testing error propagation..."
|
||||
|
||||
# Test that errors from workers are propagated
|
||||
if printf "job1\0job2\0job3\0" | parallelRun sh -c '
|
||||
while IFS= read -r -d "" job; do
|
||||
if [ "$job" = "job2" ]; then
|
||||
echo "Worker failing on $job" >&2
|
||||
exit 1
|
||||
fi
|
||||
echo "Worker processed $job"
|
||||
done
|
||||
' 2>/dev/null; then
|
||||
echo "ERROR: Expected command to fail but it succeeded"
|
||||
exit 1
|
||||
else
|
||||
echo "SUCCESS: Error was properly propagated"
|
||||
fi
|
||||
|
||||
echo "Testing error message..."
|
||||
|
||||
error_output=$(printf "job1\0job2\0job3\0" | parallelRun sh -c '
|
||||
while IFS= read -r -d "" job; do
|
||||
if [ "$job" = "job2" ]; then
|
||||
echo "Worker failing on $job" >&2
|
||||
exit 1
|
||||
fi
|
||||
echo "Worker processed $job"
|
||||
done
|
||||
' 2>&1 || true)
|
||||
|
||||
if [[ "$error_output" != *"job failed"* ]]; then
|
||||
echo "ERROR: Expected 'job failed' in error message, got: $error_output"
|
||||
exit 1
|
||||
fi
|
||||
echo "SUCCESS: Error message was displayed"
|
||||
|
||||
echo "Testing Verify all jobs are processed when no errors occur..."
|
||||
|
||||
# Generate jobs and count processed ones
|
||||
for i in {1..10}; do
|
||||
printf "job%d\0" $i
|
||||
done | parallelRun sh -c '
|
||||
while IFS= read -r -d "" job; do
|
||||
echo "$job" >> /tmp/processed-jobs
|
||||
done
|
||||
'
|
||||
|
||||
processed_count=$(wc -l < /tmp/processed-jobs)
|
||||
if [ "$processed_count" -ne 10 ]; then
|
||||
echo "ERROR: Expected 10 jobs processed, got $processed_count"
|
||||
exit 1
|
||||
fi
|
||||
echo "SUCCESS: All 10 jobs were processed"
|
||||
rm -f /tmp/processed-jobs
|
||||
|
||||
echo "All parallelRun tests passed!"
|
||||
|
||||
# ---------------------------------------------------------------------
|
||||
|
||||
echo "Testing parallelMap basic functionality..."
|
||||
|
||||
# Define a test function
|
||||
testFunc() {
|
||||
echo "Processing: $1" >> /tmp/map-output
|
||||
}
|
||||
|
||||
# Test that parallelMap calls the function with each job
|
||||
for i in {1..5}; do
|
||||
printf "item%d\0" $i
|
||||
done | parallelMap testFunc
|
||||
|
||||
# Check all jobs were processed
|
||||
processed_map_count=$(wc -l < /tmp/map-output)
|
||||
if [ "$processed_map_count" -ne 5 ]; then
|
||||
echo "ERROR: Expected 5 items processed by parallelMap, got $processed_map_count"
|
||||
exit 1
|
||||
fi
|
||||
echo "SUCCESS: parallelMap processed all 5 items"
|
||||
rm -f /tmp/map-output
|
||||
|
||||
echo "Testing parallelMap error propagation..."
|
||||
|
||||
# Define a function that fails on specific input
|
||||
failFunc() {
|
||||
if [ "$1" = "item2" ]; then
|
||||
echo "Function failing on $1" >&2
|
||||
exit 1
|
||||
fi
|
||||
echo "Function processed $1"
|
||||
}
|
||||
|
||||
# Test that errors are propagated
|
||||
if printf "item1\0item2\0item3\0" | parallelMap failFunc 2>/dev/null; then
|
||||
echo "ERROR: Expected parallelMap to fail but it succeeded"
|
||||
exit 1
|
||||
else
|
||||
echo "SUCCESS: parallelMap error was properly propagated"
|
||||
fi
|
||||
|
||||
echo "Testing parallelMap with additional arguments..."
|
||||
|
||||
# Define a function that uses additional arguments
|
||||
argFunc() {
|
||||
echo "$1: $2" >> /tmp/map-args-output
|
||||
}
|
||||
|
||||
# Test with additional arguments
|
||||
for i in {1..3}; do
|
||||
printf "value%d\0" $i
|
||||
done | parallelMap argFunc "PREFIX"
|
||||
|
||||
# Check output contains the prefix
|
||||
if ! grep -q "PREFIX: value1" /tmp/map-args-output; then
|
||||
echo "ERROR: parallelMap did not pass additional arguments correctly"
|
||||
cat /tmp/map-args-output
|
||||
exit 1
|
||||
fi
|
||||
echo "SUCCESS: parallelMap passed additional arguments correctly"
|
||||
rm -f /tmp/map-args-output
|
||||
|
||||
echo "All parallelRun and parallelMap tests passed!"
|
||||
touch $out
|
@ -671,8 +671,8 @@ rec {
|
||||
throw "linkFarm entries must be either attrs or a list!";
|
||||
|
||||
linkCommands = lib.mapAttrsToList (name: path: ''
|
||||
mkdir -p "$(dirname ${lib.escapeShellArg "${name}"})"
|
||||
ln -s ${lib.escapeShellArg "${path}"} ${lib.escapeShellArg "${name}"}
|
||||
mkdir -p -- "$(dirname -- ${lib.escapeShellArg "${name}"})"
|
||||
ln -s -- ${lib.escapeShellArg "${path}"} ${lib.escapeShellArg "${name}"}
|
||||
'') entries';
|
||||
in
|
||||
runCommand name
|
||||
|
@ -39,6 +39,11 @@ let
|
||||
linkFarmFromAttrs = linkFarm "linkFarmFromAttrs" {
|
||||
inherit foo hello;
|
||||
};
|
||||
|
||||
linkFarmDelimitOptionList = linkFarm "linkFarmDelimitOptionList" {
|
||||
"-foo" = foo;
|
||||
"-hello" = hello;
|
||||
};
|
||||
in
|
||||
runCommand "test-linkFarm" { } ''
|
||||
function assertPathEquals() {
|
||||
@ -61,5 +66,9 @@ runCommand "test-linkFarm" { } ''
|
||||
|
||||
assertPathEquals "${linkFarmFromAttrs}/foo" "${foo}"
|
||||
assertPathEquals "${linkFarmFromAttrs}/hello" "${hello}"
|
||||
|
||||
assertPathEquals "${linkFarmDelimitOptionList}/-foo" "${foo}"
|
||||
assertPathEquals "${linkFarmDelimitOptionList}/-hello" "${hello}"
|
||||
|
||||
touch $out
|
||||
''
|
||||
|
@ -39,13 +39,10 @@ python3Packages.buildPythonApplication rec {
|
||||
defusedxml
|
||||
];
|
||||
|
||||
pytestFlagsArray = [
|
||||
"-W"
|
||||
"ignore::sphinx.deprecation.RemovedInSphinx90Warning"
|
||||
"--rootdir"
|
||||
"src/ablog"
|
||||
"-W"
|
||||
"ignore::sphinx.deprecation.RemovedInSphinx90Warning" # Ignore ImportError
|
||||
pytestFlags = [
|
||||
"-Wignore::sphinx.deprecation.RemovedInSphinx90Warning"
|
||||
"--rootdir=src/ablog"
|
||||
"-Wignore::sphinx.deprecation.RemovedInSphinx90Warning" # Ignore ImportError
|
||||
];
|
||||
|
||||
# assert "post 1" not in html
|
||||
|
@ -10,13 +10,13 @@
|
||||
|
||||
stdenv.mkDerivation (finalAttrs: {
|
||||
pname = "abseil-cpp";
|
||||
version = "20250127.1";
|
||||
version = "20250512.1";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "abseil";
|
||||
repo = "abseil-cpp";
|
||||
tag = finalAttrs.version;
|
||||
hash = "sha256-QTywqQCkyGFpdbtDBvUwz9bGXxbJs/qoFKF6zYAZUmQ=";
|
||||
hash = "sha256-eB7OqTO9Vwts9nYQ/Mdq0Ds4T1KgmmpYdzU09VPWOhk=";
|
||||
};
|
||||
|
||||
cmakeFlags =
|
||||
|
@ -177,6 +177,10 @@ stdenv.mkDerivation (finalAttrs: {
|
||||
|
||||
enableParallelChecking = false;
|
||||
|
||||
enabledTestPaths = [
|
||||
"../testing/adios2/python/Test*.py"
|
||||
];
|
||||
|
||||
__darwinAllowLocalNetworking = finalAttrs.finalPackage.doCheck && mpiSupport;
|
||||
|
||||
nativeCheckInputs = [
|
||||
|
@ -5,14 +5,14 @@
|
||||
cmake,
|
||||
}:
|
||||
|
||||
stdenv.mkDerivation {
|
||||
stdenv.mkDerivation (finalAttrs: {
|
||||
pname = "aemu";
|
||||
version = "0.1.2";
|
||||
|
||||
src = fetchFromGitiles {
|
||||
url = "https://android.googlesource.com/platform/hardware/google/aemu";
|
||||
rev = "07ccc3ded3357e67e39104f18f35feaf8b3b6a0e";
|
||||
hash = "sha256-H3IU9aTFSzUAqYgrtHd4F18hbhZsbOJGC4K5JwMQOOw=";
|
||||
rev = "v${finalAttrs.version}-aemu-release";
|
||||
hash = "sha256-8UMm2dXdvmX6rUn4wQWuqI8bamwgf0x/5BQT+7atzjY=";
|
||||
};
|
||||
|
||||
patches = [
|
||||
@ -48,4 +48,4 @@ stdenv.mkDerivation {
|
||||
"aarch64-darwin"
|
||||
];
|
||||
};
|
||||
}
|
||||
})
|
||||
|
@ -9,31 +9,20 @@
|
||||
|
||||
stdenv.mkDerivation (finalAttrs: {
|
||||
pname = "alsa-lib";
|
||||
version = "1.2.13";
|
||||
version = "1.2.14";
|
||||
|
||||
src = fetchurl {
|
||||
url = "mirror://alsa/lib/alsa-lib-${finalAttrs.version}.tar.bz2";
|
||||
hash = "sha256-jE/zdVPL6JYY4Yfkx3n3GpuyqLJ7kfh+1AmHzJIz2PY=";
|
||||
hash = "sha256-vpyIoLNgQ2fddBZ6K3VKNeFC9nApKuR6L97yei7pejI=";
|
||||
};
|
||||
|
||||
patches =
|
||||
[
|
||||
# Add a "libs" field to the syntax recognized in the /etc/asound.conf file.
|
||||
# The nixos modules for pulseaudio, jack, and pipewire are leveraging this
|
||||
# "libs" field to declare locations for both native and 32bit plugins, in
|
||||
# order to support apps with 32bit sound running on x86_64 architecture.
|
||||
./alsa-plugin-conf-multilib.patch
|
||||
]
|
||||
++ lib.optional (stdenv.hostPlatform.useLLVM or false)
|
||||
# Fixes version script under LLVM, should be fixed in the next update.
|
||||
# Check if "pkgsLLVM.alsa-lib" builds on next version bump and remove this
|
||||
# if it succeeds.
|
||||
(
|
||||
fetchurl {
|
||||
url = "https://github.com/alsa-project/alsa-lib/commit/76edab4e595bd5f3f4c636cccc8d7976d3c519d6.patch";
|
||||
hash = "sha256-WCOXfe0/PPZRMXdNa29Jn28S2r0PQ7iTsabsxZVSwnk=";
|
||||
}
|
||||
);
|
||||
patches = [
|
||||
# Add a "libs" field to the syntax recognized in the /etc/asound.conf file.
|
||||
# The nixos modules for pulseaudio, jack, and pipewire are leveraging this
|
||||
# "libs" field to declare locations for both native and 32bit plugins, in
|
||||
# order to support apps with 32bit sound running on x86_64 architecture.
|
||||
./alsa-plugin-conf-multilib.patch
|
||||
];
|
||||
|
||||
enableParallelBuilding = true;
|
||||
|
||||
|
@ -6,7 +6,7 @@
|
||||
darwin,
|
||||
fetchFromGitHub,
|
||||
coreutils,
|
||||
nettools,
|
||||
net-tools,
|
||||
util-linux,
|
||||
stdenv,
|
||||
dmidecode,
|
||||
@ -92,7 +92,7 @@ buildGoModule rec {
|
||||
|
||||
substituteInPlace agent/platform/platform_unix.go \
|
||||
--replace-fail "/usr/bin/uname" "${coreutils}/bin/uname" \
|
||||
--replace-fail '"/bin", "hostname"' '"${nettools}/bin/hostname"' \
|
||||
--replace-fail '"/bin", "hostname"' '"${net-tools}/bin/hostname"' \
|
||||
--replace-fail '"lsb_release"' '"${fake-lsb-release}/bin/lsb_release"'
|
||||
|
||||
substituteInPlace agent/session/shell/shell_unix.go \
|
||||
|
@ -12,7 +12,7 @@ stdenv.mkDerivation rec {
|
||||
owner = "GPUOpen-LibrariesAndSDKs";
|
||||
repo = "AMF";
|
||||
tag = "v${version}";
|
||||
sha256 = "sha256-u6gvdc1acemd01TO5EbuF3H7HkEJX4GUx73xCo71yPY=";
|
||||
sha256 = "sha256-0PgWEq+329/EhI0/CgPsCkJ4CiTsFe56w2O+AcjVUdc=";
|
||||
};
|
||||
|
||||
installPhase = ''
|
||||
|
@ -290,7 +290,7 @@ buildPythonPackage rec {
|
||||
airflow db reset -y
|
||||
'';
|
||||
|
||||
pytestFlagsArray = [
|
||||
enabledTestPaths = [
|
||||
"tests/core/test_core.py"
|
||||
];
|
||||
|
||||
|
@ -59,9 +59,15 @@ stdenv.mkDerivation (finalAttrs: {
|
||||
|
||||
checkTarget = "tests";
|
||||
|
||||
checkFlags = lib.optionals stdenv.hostPlatform.isMusl [
|
||||
# equality tests are broken on musl due to different priority values
|
||||
# https://gitlab.com/apparmor/apparmor/-/issues/513
|
||||
"-o equality"
|
||||
];
|
||||
|
||||
postCheck = "popd";
|
||||
|
||||
doCheck = stdenv.hostPlatform == stdenv.buildPlatform && !stdenv.hostPlatform.isMusl;
|
||||
doCheck = stdenv.hostPlatform == stdenv.buildPlatform;
|
||||
checkInputs = [
|
||||
bashInteractive
|
||||
perl
|
||||
|
@ -48,7 +48,7 @@ python3Packages.buildPythonPackage rec {
|
||||
"pierky.arouteserver"
|
||||
];
|
||||
|
||||
pytestFlagsArray = [ "tests/static" ];
|
||||
enabledTestPaths = [ "tests/static" ];
|
||||
|
||||
disabledTests = [
|
||||
# disable copyright year check of files
|
||||
|
@ -2,6 +2,7 @@
|
||||
lib,
|
||||
stdenv,
|
||||
fetchFromGitHub,
|
||||
fetchpatch,
|
||||
autoreconfHook,
|
||||
bash,
|
||||
buildPackages,
|
||||
@ -9,23 +10,44 @@
|
||||
python3,
|
||||
swig,
|
||||
pkgsCross,
|
||||
libcap_ng,
|
||||
|
||||
# Enabling python support while cross compiling would be possible, but the
|
||||
# configure script tries executing python to gather info instead of relying on
|
||||
# python3-config exclusively
|
||||
enablePython ? stdenv.hostPlatform == stdenv.buildPlatform,
|
||||
nix-update-script,
|
||||
testers,
|
||||
}:
|
||||
stdenv.mkDerivation (finalAttrs: {
|
||||
pname = "audit";
|
||||
version = "4.0.3";
|
||||
version = "4.0.5";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "linux-audit";
|
||||
repo = "audit-userspace";
|
||||
tag = "v${finalAttrs.version}";
|
||||
hash = "sha256-+M5Nai/ruK16udsHcMwv1YoVQbCLKNuz/4FCXaLbiCw=";
|
||||
hash = "sha256-SgMt1MmcH7r7O6bmJCetRg3IdoZXAXjVJyeu0HRfyf8=";
|
||||
};
|
||||
|
||||
patches = [
|
||||
# nix configures most stuff by symlinks, e.g. in /etc
|
||||
# thus, for plugins to be picked up, symlinks must be allowed
|
||||
# https://github.com/linux-audit/audit-userspace/pull/467
|
||||
(fetchpatch {
|
||||
url = "https://github.com/linux-audit/audit-userspace/pull/467/commits/dbefc642b3bd0cafe599fcd18c6c88cb672397ee.patch?full_index=1";
|
||||
hash = "sha256-Ksn/qKBQYFAjvs1OVuWhgWCdf4Bdp9/a+MrhyJAT+Bw=";
|
||||
})
|
||||
(fetchpatch {
|
||||
url = "https://github.com/linux-audit/audit-userspace/pull/467/commits/50094f56fefc0b9033ef65e8c4f108ed52ef5de5.patch?full_index=1";
|
||||
hash = "sha256-CJKDLdlpsCd+bG6j5agcnxY1+vMCImHwHGN6BXURa4c=";
|
||||
})
|
||||
(fetchpatch {
|
||||
url = "https://github.com/linux-audit/audit-userspace/pull/467/commits/5e75091abd297807b71b3cfe54345c2ef223939a.patch?full_index=1";
|
||||
hash = "sha256-LPpO4PH/3MyCJq2xhmhhcnFeK3yh7LK6Mjypuvhacu4=";
|
||||
})
|
||||
];
|
||||
|
||||
postPatch = ''
|
||||
substituteInPlace bindings/swig/src/auditswig.i \
|
||||
--replace-fail "/usr/include/linux/audit.h" \
|
||||
@ -57,6 +79,7 @@ stdenv.mkDerivation (finalAttrs: {
|
||||
|
||||
buildInputs = [
|
||||
bash
|
||||
libcap_ng
|
||||
];
|
||||
|
||||
configureFlags = [
|
||||
@ -65,13 +88,20 @@ stdenv.mkDerivation (finalAttrs: {
|
||||
"--disable-zos-remote"
|
||||
"--with-arm"
|
||||
"--with-aarch64"
|
||||
# capability dropping, currently mostly for plugins as those get spawned as root
|
||||
# see auditd-plugins(5)
|
||||
"--with-libcap-ng=yes"
|
||||
(if enablePython then "--with-python" else "--without-python")
|
||||
];
|
||||
|
||||
enableParallelBuilding = true;
|
||||
|
||||
passthru.tests = {
|
||||
musl = pkgsCross.musl64.audit;
|
||||
passthru = {
|
||||
updateScript = nix-update-script { };
|
||||
tests = {
|
||||
musl = pkgsCross.musl64.audit;
|
||||
pkg-config = testers.testMetaPkgConfig finalAttrs.finalPackage;
|
||||
};
|
||||
};
|
||||
|
||||
meta = {
|
||||
@ -79,7 +109,11 @@ stdenv.mkDerivation (finalAttrs: {
|
||||
description = "Audit Library";
|
||||
changelog = "https://github.com/linux-audit/audit-userspace/releases/tag/v${finalAttrs.version}";
|
||||
license = lib.licenses.gpl2Plus;
|
||||
maintainers = with lib.maintainers; [ ];
|
||||
maintainers = with lib.maintainers; [ grimmauld ];
|
||||
pkgConfigModules = [
|
||||
"audit"
|
||||
"auparse"
|
||||
];
|
||||
platforms = lib.platforms.linux;
|
||||
};
|
||||
})
|
||||
|
@ -42,6 +42,13 @@ def is_dynamic_executable(elf: ELFFile) -> bool:
|
||||
# section but their ELF type is DYN.
|
||||
return bool(elf.get_section_by_name(".interp"))
|
||||
|
||||
def is_separate_debug_object(elf: ELFFile) -> bool:
|
||||
# objects created by separateDebugInfo = true have all the section headers
|
||||
# of the unstripped objects but those that normal `strip` would have kept
|
||||
# are NOBITS
|
||||
text_section = elf.get_section_by_name(".text")
|
||||
return elf.has_dwarf_info() and bool(text_section) and text_section.header['sh_type'] == "SHT_NOBITS"
|
||||
|
||||
|
||||
def get_dependencies(elf: ELFFile) -> list[list[Path]]:
|
||||
dependencies = []
|
||||
@ -174,6 +181,10 @@ def populate_cache(initial: list[Path], recursive: bool =False) -> None:
|
||||
|
||||
try:
|
||||
with open_elf(path) as elf:
|
||||
if is_separate_debug_object(elf):
|
||||
print(f"skipping {path} because it looks like a separate debug object")
|
||||
continue
|
||||
|
||||
osabi = get_osabi(elf)
|
||||
arch = get_arch(elf)
|
||||
rpath = [Path(p) for p in get_rpath(elf)
|
||||
|
@ -60,9 +60,8 @@ localPython.pkgs.buildPythonApplication rec {
|
||||
];
|
||||
|
||||
# Upstream did not adapt to pytest 8 yet.
|
||||
pytestFlagsArray = [
|
||||
"-W"
|
||||
"ignore::pytest.PytestRemovedIn8Warning"
|
||||
pytestFlags = [
|
||||
"-Wignore::pytest.PytestRemovedIn8Warning"
|
||||
];
|
||||
|
||||
passthru = {
|
||||
|
@ -145,7 +145,7 @@ py.pkgs.buildPythonApplication rec {
|
||||
# tests/unit/customizations/sso/test_utils.py uses sockets
|
||||
__darwinAllowLocalNetworking = true;
|
||||
|
||||
pytestFlagsArray = [
|
||||
pytestFlags = [
|
||||
"-Wignore::DeprecationWarning"
|
||||
];
|
||||
|
||||
|
@ -70,7 +70,7 @@ python.pkgs.buildPythonApplication rec {
|
||||
pytestCheckHook
|
||||
];
|
||||
|
||||
pytestFlagsArray = [
|
||||
enabledTestPaths = [
|
||||
"tests/unit"
|
||||
];
|
||||
|
||||
|
@ -29,7 +29,7 @@ stdenv.mkDerivation rec {
|
||||
python3Packages.pytestCheckHook
|
||||
];
|
||||
|
||||
pytestFlagsArray = [ "test.py" ];
|
||||
enabledTestPaths = [ "test.py" ];
|
||||
|
||||
meta = with lib; {
|
||||
description = "Storage conversion and expression calculator";
|
||||
|
@ -13,7 +13,6 @@
|
||||
pkg-config,
|
||||
python3Packages,
|
||||
readline,
|
||||
systemdMinimal,
|
||||
udev,
|
||||
# Test gobject-introspection instead of pygobject because the latter
|
||||
# causes an infinite recursion.
|
||||
@ -28,11 +27,11 @@
|
||||
|
||||
stdenv.mkDerivation (finalAttrs: {
|
||||
pname = "bluez";
|
||||
version = "5.80";
|
||||
version = "5.83";
|
||||
|
||||
src = fetchurl {
|
||||
url = "mirror://kernel/linux/bluetooth/bluez-${finalAttrs.version}.tar.xz";
|
||||
hash = "sha256-pNC8oymWkfBtW9l3O4VGOCBKUaUCbEKwrX8cbPFrRZo=";
|
||||
hash = "sha256-EIUi2QnSIFgTmb/sk9qrYgNVOc7vPdo+eZcHhcY70kw=";
|
||||
};
|
||||
|
||||
buildInputs = [
|
||||
@ -63,7 +62,7 @@ stdenv.mkDerivation (finalAttrs: {
|
||||
postPatch =
|
||||
''
|
||||
substituteInPlace tools/hid2hci.rules \
|
||||
--replace-fail /sbin/udevadm ${systemdMinimal}/bin/udevadm \
|
||||
--replace-fail /sbin/udevadm ${udev}/bin/udevadm \
|
||||
--replace-fail "hid2hci " "$out/lib/udev/hid2hci "
|
||||
''
|
||||
+
|
||||
|
@ -11,11 +11,11 @@
|
||||
|
||||
stdenv.mkDerivation (finalAttrs: {
|
||||
pname = "bmake";
|
||||
version = "20250308";
|
||||
version = "20250528";
|
||||
|
||||
src = fetchurl {
|
||||
url = "https://www.crufty.net/ftp/pub/sjg/bmake-${finalAttrs.version}.tar.gz";
|
||||
hash = "sha256-I4jZ+xhldmM6pyX/FjVSpdunpqN1qMuakBSrV+59maI=";
|
||||
hash = "sha256-DcOJpeApiqWFNTtgeW1dYy3mYNreWNAKzWCtcihGyaM=";
|
||||
};
|
||||
|
||||
patches = [
|
||||
|
@ -101,7 +101,7 @@ python.pkgs.buildPythonApplication rec {
|
||||
pytestCheckHook
|
||||
];
|
||||
|
||||
pytestFlagsArray = [
|
||||
pytestFlags = [
|
||||
"--benchmark-skip"
|
||||
"--pyargs"
|
||||
"borg.testsuite"
|
||||
|
@ -71,7 +71,7 @@ python3.pkgs.buildPythonApplication rec {
|
||||
"browsr"
|
||||
];
|
||||
|
||||
pytestFlagsArray = [
|
||||
pytestFlags = [
|
||||
"--snapshot-update"
|
||||
];
|
||||
|
||||
|
@ -21,11 +21,11 @@
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
pname = "btrfs-progs";
|
||||
version = "6.14";
|
||||
version = "6.15";
|
||||
|
||||
src = fetchurl {
|
||||
url = "mirror://kernel/linux/kernel/people/kdave/btrfs-progs/btrfs-progs-v${version}.tar.xz";
|
||||
hash = "sha256-31q4BPyzbikcQq2DYfgBrR4QJBtDvTBP5Qzj355+PaE=";
|
||||
hash = "sha256-V9pCjdIZn9iNg+zxytBWeM54ZA735S12M76Yh872dLs=";
|
||||
};
|
||||
|
||||
nativeBuildInputs =
|
||||
|
@ -23,7 +23,7 @@ let
|
||||
lib.concatStringsSep "\n\n" extraCertificateStrings
|
||||
);
|
||||
|
||||
srcVersion = "3.111";
|
||||
srcVersion = "3.113.1";
|
||||
version = if nssOverride != null then nssOverride.version else srcVersion;
|
||||
meta = with lib; {
|
||||
homepage = "https://curl.haxx.se/docs/caextract.html";
|
||||
@ -47,7 +47,7 @@ let
|
||||
owner = "nss-dev";
|
||||
repo = "nss";
|
||||
rev = "NSS_${lib.replaceStrings [ "." ] [ "_" ] version}_RTM";
|
||||
hash = "sha256-GFtoSvLF5nAwBIiMa9CeEl5geAOK60gG2tjuQFubgYs=";
|
||||
hash = "sha256-Yfs9Hh98ASJe1D4qyQEXaTC2xjeDI2Cdxp5Xgy0rYdQ=";
|
||||
};
|
||||
|
||||
dontBuild = true;
|
||||
|
@ -2,7 +2,7 @@
|
||||
lib,
|
||||
stdenv,
|
||||
fetchurl,
|
||||
fetchpatch,
|
||||
lzo,
|
||||
gtk-doc,
|
||||
meson,
|
||||
ninja,
|
||||
@ -34,13 +34,13 @@ stdenv.mkDerivation (
|
||||
in
|
||||
{
|
||||
pname = "cairo";
|
||||
version = "1.18.2";
|
||||
version = "1.18.4";
|
||||
|
||||
src = fetchurl {
|
||||
url = "https://cairographics.org/${
|
||||
if lib.mod (builtins.fromJSON (lib.versions.minor version)) 2 == 0 then "releases" else "snapshots"
|
||||
}/${pname}-${version}.tar.xz";
|
||||
hash = "sha256-piubtCQl6ETMPW3d4EP/Odur7dFULrpXout5+FiJ1Fo=";
|
||||
hash = "sha256-RF7YIIpuSCPeEianTKMZ02AOg/Y2n5mxQmUAZZnDLMs=";
|
||||
};
|
||||
|
||||
outputs = [
|
||||
@ -61,16 +61,7 @@ stdenv.mkDerivation (
|
||||
|
||||
buildInputs = [
|
||||
docbook_xsl
|
||||
];
|
||||
|
||||
patches = [
|
||||
# Pull upstream fix to fix "out of memory" errors:
|
||||
# https://gitlab.freedesktop.org/cairo/cairo/-/merge_requests/595
|
||||
(fetchpatch {
|
||||
name = "fix-oom.patch";
|
||||
url = "https://gitlab.freedesktop.org/cairo/cairo/-/commit/b9eed915f9a67380e7ef9d8746656455c43f67e2.patch";
|
||||
hash = "sha256-iWYxMVeNpseClSTf7BfU9GBe+tJWc+DUJWTWE5MnGh4=";
|
||||
})
|
||||
lzo
|
||||
];
|
||||
|
||||
propagatedBuildInputs =
|
||||
|
@ -39,7 +39,7 @@ python3.pkgs.buildPythonApplication rec {
|
||||
pytestCheckHook
|
||||
];
|
||||
|
||||
pytestFlagsArray = [
|
||||
enabledTestPaths = [
|
||||
"tests"
|
||||
];
|
||||
|
||||
|
@ -112,7 +112,7 @@ python.pkgs.buildPythonApplication rec {
|
||||
writableTmpDirAsHomeHook
|
||||
];
|
||||
|
||||
pytestFlagsArray = [ "tests/unit" ];
|
||||
enabledTestPaths = [ "tests/unit" ];
|
||||
|
||||
disabledTests = [
|
||||
# Relies upon the `charm` tool being installed
|
||||
|
@ -60,6 +60,9 @@ stdenv.mkDerivation (finalAttrs: {
|
||||
zlib
|
||||
];
|
||||
|
||||
# with trivialautovarinit enabled can produce an empty .pc file
|
||||
hardeningDisable = [ "trivialautovarinit" ];
|
||||
|
||||
cmakeFlags = [
|
||||
(lib.cmakeBool "BUILD_EXAMPLES" withExamples)
|
||||
(lib.cmakeBool "BUILD_TOOLS" withTools)
|
||||
|
@ -10,13 +10,13 @@
|
||||
}:
|
||||
stdenv.mkDerivation (finalAttrs: {
|
||||
pname = "cpuinfo";
|
||||
version = "0-unstable-2025-03-27";
|
||||
version = "0-unstable-2025-06-10";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "pytorch";
|
||||
repo = "cpuinfo";
|
||||
rev = "39ea79a3c132f4e678695c579ea9353d2bd29968";
|
||||
hash = "sha256-uochXC0AtOw8N/ycyVJdiRw4pibCW2ENrFMT3jtxDSg=";
|
||||
rev = "d7427551d6531037da216d20cd36feb19ed4905f";
|
||||
hash = "sha256-gJgvE3823NyVOIL0Grkldde3U/N9NNqlLAA0btj3TSg=";
|
||||
};
|
||||
|
||||
passthru.updateScript = nix-update-script { extraArgs = [ "--version=branch" ]; };
|
||||
|
@ -27,7 +27,7 @@ python3.pkgs.buildPythonApplication rec {
|
||||
wireshark-cli
|
||||
];
|
||||
|
||||
pytestFlagsArray = [
|
||||
enabledTestPaths = [
|
||||
"tests/tests.py"
|
||||
];
|
||||
|
||||
|
@ -2,7 +2,6 @@
|
||||
lib,
|
||||
stdenv,
|
||||
fetchurl,
|
||||
fetchpatch,
|
||||
lvm2,
|
||||
json_c,
|
||||
asciidoctor,
|
||||
@ -26,7 +25,7 @@
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
pname = "cryptsetup";
|
||||
version = "2.7.5";
|
||||
version = "2.8.0";
|
||||
|
||||
outputs = [
|
||||
"bin"
|
||||
@ -38,20 +37,12 @@ stdenv.mkDerivation rec {
|
||||
|
||||
src = fetchurl {
|
||||
url = "mirror://kernel/linux/utils/cryptsetup/v${lib.versions.majorMinor version}/${pname}-${version}.tar.xz";
|
||||
hash = "sha256-0r5Dlbj1A7Dr9LLYHbkMNalwUKNY7iH+YqDftm5dVSI=";
|
||||
hash = "sha256-zJ4tN8JahxzqN1ILKNUyIHsMFnD7EPxU1oBx9j9SQ6I=";
|
||||
};
|
||||
|
||||
patches = [
|
||||
# Allow reading tokens from a relative path, see #167994
|
||||
./relative-token-path.patch
|
||||
|
||||
# Do not use pagesize as fallback for block size.
|
||||
# Remove when https://gitlab.com/cryptsetup/cryptsetup/-/merge_requests/782 is in the latest stable release
|
||||
# Fixes https://gitlab.com/cryptsetup/cryptsetup/-/issues/943
|
||||
(fetchpatch {
|
||||
url = "https://gitlab.com/cryptsetup/cryptsetup/-/commit/a39a0d00e504ad7a89442874f72cf0561d6089c4.diff";
|
||||
hash = "sha256-teQ/uFYrKuS0ksMEv7rP+d9EUuOl3sINsNhDC88P0xw=";
|
||||
})
|
||||
];
|
||||
|
||||
postPatch = ''
|
||||
@ -95,6 +86,8 @@ stdenv.mkDerivation rec {
|
||||
popt
|
||||
] ++ lib.optional (!withInternalArgon2) libargon2;
|
||||
|
||||
enableParallelBuilding = true;
|
||||
|
||||
# The test [7] header backup in compat-test fails with a mysterious
|
||||
# "out of memory" error, even though tons of memory is available.
|
||||
# Issue filed upstream: https://gitlab.com/cryptsetup/cryptsetup/-/issues/763
|
||||
|
@ -5,7 +5,7 @@
|
||||
makeWrapper,
|
||||
cvs,
|
||||
perl,
|
||||
nettools,
|
||||
net-tools,
|
||||
findutils,
|
||||
rsync,
|
||||
coreutils,
|
||||
@ -25,7 +25,7 @@ stdenv.mkDerivation rec {
|
||||
buildInputs = [
|
||||
cvs
|
||||
perl
|
||||
nettools
|
||||
net-tools
|
||||
findutils
|
||||
rsync
|
||||
coreutils
|
||||
@ -40,7 +40,7 @@ stdenv.mkDerivation rec {
|
||||
wrapProgram $out/bin/cvsq --prefix PATH : ${
|
||||
lib.makeBinPath [
|
||||
cvs
|
||||
nettools
|
||||
net-tools
|
||||
findutils
|
||||
rsync
|
||||
coreutils
|
||||
@ -50,7 +50,7 @@ stdenv.mkDerivation rec {
|
||||
wrapProgram $out/bin/cvsq-branch --prefix PATH : ${
|
||||
lib.makeBinPath [
|
||||
cvs
|
||||
nettools
|
||||
net-tools
|
||||
findutils
|
||||
rsync
|
||||
coreutils
|
||||
@ -60,7 +60,7 @@ stdenv.mkDerivation rec {
|
||||
wrapProgram $out/bin/cvsq-merge --prefix PATH : ${
|
||||
lib.makeBinPath [
|
||||
cvs
|
||||
nettools
|
||||
net-tools
|
||||
findutils
|
||||
rsync
|
||||
coreutils
|
||||
@ -70,7 +70,7 @@ stdenv.mkDerivation rec {
|
||||
wrapProgram $out/bin/cvsq-switch --prefix PATH : ${
|
||||
lib.makeBinPath [
|
||||
cvs
|
||||
nettools
|
||||
net-tools
|
||||
findutils
|
||||
rsync
|
||||
coreutils
|
||||
@ -80,7 +80,7 @@ stdenv.mkDerivation rec {
|
||||
wrapProgram $out/bin/lcvs --prefix PATH : ${
|
||||
lib.makeBinPath [
|
||||
cvs
|
||||
nettools
|
||||
net-tools
|
||||
findutils
|
||||
rsync
|
||||
coreutils
|
||||
|
@ -12,6 +12,7 @@
|
||||
pkg-config,
|
||||
python3,
|
||||
xvfb-run,
|
||||
gettext,
|
||||
}:
|
||||
|
||||
stdenv.mkDerivation (finalAttrs: {
|
||||
@ -24,6 +25,11 @@ stdenv.mkDerivation (finalAttrs: {
|
||||
sha256 = "sha256-4yH19X98SVqpviCBIWzIX6FYHWxCbREpuKCNjQuTFDk=";
|
||||
};
|
||||
|
||||
patches = [
|
||||
# glib gettext is deprecated and broken, so use regular gettext instead
|
||||
./use-regular-gettext.patch
|
||||
];
|
||||
|
||||
postPatch = ''
|
||||
patchShebangs tests/test-wait-outputer
|
||||
|
||||
@ -39,6 +45,7 @@ stdenv.mkDerivation (finalAttrs: {
|
||||
nativeBuildInputs = [
|
||||
autoreconfHook
|
||||
glib # for autoconf macro, gtester, gdbus
|
||||
gettext
|
||||
intltool
|
||||
pkg-config
|
||||
];
|
||||
|
12
pkgs/by-name/db/dbus-test-runner/use-regular-gettext.patch
Normal file
12
pkgs/by-name/db/dbus-test-runner/use-regular-gettext.patch
Normal file
@ -0,0 +1,12 @@
|
||||
--- a/configure.ac
|
||||
+++ b/configure.ac
|
||||
@@ -46,7 +46,8 @@ GETTEXT_PACKAGE=dbus-test-runner
|
||||
AC_SUBST(GETTEXT_PACKAGE)
|
||||
AC_DEFINE_UNQUOTED(GETTEXT_PACKAGE, "$GETTEXT_PACKAGE", [Name of the default gettext domain])
|
||||
|
||||
-AM_GLIB_GNU_GETTEXT
|
||||
+AM_GNU_GETTEXT([external])
|
||||
+AM_GNU_GETTEXT_VERSION([0.21])
|
||||
|
||||
###########################
|
||||
# gcov coverage reporting
|
@ -56,7 +56,7 @@ python3.pkgs.buildPythonApplication rec {
|
||||
export HOME=$(mktemp -d);
|
||||
'';
|
||||
|
||||
pytestFlagsArray = [
|
||||
pytestFlags = [
|
||||
# --fast skips tests which try to start a devpi-server improperly
|
||||
"--fast"
|
||||
];
|
||||
|
@ -260,7 +260,7 @@ python.pkgs.buildPythonApplication rec {
|
||||
|
||||
nativeCheckInputs = with python.pkgs; [ pytestCheckHook ] ++ pythonPath;
|
||||
|
||||
pytestFlagsArray = [
|
||||
pytestFlags = [
|
||||
# Always show more information when tests fail
|
||||
"-vv"
|
||||
];
|
||||
|
@ -52,6 +52,11 @@ stdenv.mkDerivation rec {
|
||||
XMLNamespaceSupport
|
||||
]);
|
||||
|
||||
# configure tries to find osx in PATH and hardcodes the resulting path
|
||||
# (if any) on the Perl code. this fails under strictDeps, so override
|
||||
# the autoconf test:
|
||||
OSX = "${opensp}/bin/osx";
|
||||
|
||||
postConfigure = ''
|
||||
# Broken substitution is used for `perl/config.pl', which leaves literal
|
||||
# `$prefix' in it.
|
||||
|
42
pkgs/by-name/do/dosfstools/gettext-0.25.patch
Normal file
42
pkgs/by-name/do/dosfstools/gettext-0.25.patch
Normal file
@ -0,0 +1,42 @@
|
||||
From 893a84738606a8ac588ba1e9d4145cbbcbfff811 Mon Sep 17 00:00:00 2001
|
||||
From: Alyssa Ross <hi@alyssa.is>
|
||||
Date: Wed, 2 Jul 2025 12:03:53 +0200
|
||||
Subject: [PATCH] Fix autoreconf with gettext 0.25
|
||||
|
||||
This fixes the following error that appeared when running autoreconf
|
||||
after updating to gettext 0.25:
|
||||
|
||||
configure.ac:76: error: possibly undefined macro: AM_ICONV
|
||||
If this token and others are legitimate, please use m4_pattern_allow.
|
||||
See the Autoconf documentation.
|
||||
autoreconf: error: /nix/store/dvpiwvz7an7icljfscdi76h11c03cma4-autoconf-2.72/bin/autoconf failed with exit status: 1
|
||||
|
||||
The version of gettext given in AM_GNU_GETTEXT_VERSION() is picked
|
||||
quite arbitrarily based on what's likely to be available in distros,
|
||||
since gettext itself is not actually used here, just some supporting
|
||||
stuff from it.
|
||||
|
||||
Link: https://github.com/dosfstools/dosfstools/pull/218
|
||||
---
|
||||
configure.ac | 3 +++
|
||||
1 file changed, 3 insertions(+)
|
||||
|
||||
diff --git a/configure.ac b/configure.ac
|
||||
index efb8fb5..2cecab1 100644
|
||||
--- a/configure.ac
|
||||
+++ b/configure.ac
|
||||
@@ -15,8 +15,11 @@
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
AC_INIT([dosfstools], [4.2])
|
||||
+AC_CONFIG_MACRO_DIRS([m4])
|
||||
AC_SUBST([RELEASE_DATE], [2021-01-31])
|
||||
AM_INIT_AUTOMAKE([1.11 foreign subdir-objects parallel-tests])
|
||||
+AM_GNU_GETTEXT_VERSION([0.20])
|
||||
+AM_GNU_GETTEXT([external])
|
||||
|
||||
AC_ARG_ENABLE([compat-symlinks],
|
||||
[AS_HELP_STRING([--enable-compat-symlinks],
|
||||
--
|
||||
2.49.0
|
||||
|
@ -39,19 +39,15 @@ stdenv.mkDerivation rec {
|
||||
url = "https://github.com/dosfstools/dosfstools/commit/8da7bc93315cb0c32ad868f17808468b81fa76ec.patch";
|
||||
sha256 = "sha256-Quegj5uYZgACgjSZef6cjrWQ64SToGQxbxyqCdl8C7o=";
|
||||
})
|
||||
./gettext-0.25.patch
|
||||
];
|
||||
|
||||
nativeBuildInputs = [
|
||||
autoreconfHook
|
||||
gettext
|
||||
pkg-config
|
||||
] ++ lib.optional stdenv.hostPlatform.isDarwin libiconv;
|
||||
|
||||
# configure.ac:75: error: required file './config.rpath' not found
|
||||
# https://github.com/dosfstools/dosfstools/blob/master/autogen.sh
|
||||
postPatch = ''
|
||||
cp ${gettext}/share/gettext/config.rpath config.rpath
|
||||
'';
|
||||
|
||||
configureFlags = [ "--enable-compat-symlinks" ];
|
||||
|
||||
nativeCheckInputs = [ xxd ];
|
||||
|
@ -3,6 +3,7 @@
|
||||
stdenv,
|
||||
buildPackages,
|
||||
fetchurl,
|
||||
fetchpatch,
|
||||
pkg-config,
|
||||
libuuid,
|
||||
gettext,
|
||||
@ -12,6 +13,7 @@
|
||||
shared ? !stdenv.hostPlatform.isStatic,
|
||||
e2fsprogs,
|
||||
runCommand,
|
||||
libarchive,
|
||||
}:
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
@ -23,6 +25,20 @@ stdenv.mkDerivation rec {
|
||||
hash = "sha256-CCQuZMoOgZTZwcqtSXYrGSCaBjGBmbY850rk7y105jw=";
|
||||
};
|
||||
|
||||
# 2025-05-31: Fix libarchive, from https://github.com/tytso/e2fsprogs/pull/230
|
||||
patches = [
|
||||
(fetchpatch {
|
||||
name = "0001-create_inode_libarchive.c-define-libarchive-dylib-for-darwin.patch";
|
||||
url = "https://github.com/tytso/e2fsprogs/commit/e86c65bc7ee276cd9ca920d96e18ed0cddab3412.patch";
|
||||
hash = "sha256-HFZAznaNl5rzgVEvYx1LDKh2jd/VEXD/o0wypIh4TR8=";
|
||||
})
|
||||
(fetchpatch {
|
||||
name = "0002-mkgnutar.pl-avoid-uninitialized-username-variable.patch";
|
||||
url = "https://github.com/tytso/e2fsprogs/commit/9217c359db1d1b6d031a0e2ca9a885634fed00da.patch";
|
||||
hash = "sha256-iDXmLq77eJolH1mkXSbvZ9tRVtGQt2F45CdkVphUZSs=";
|
||||
})
|
||||
];
|
||||
|
||||
# fuse2fs adds 14mb of dependencies
|
||||
outputs = [
|
||||
"bin"
|
||||
@ -40,27 +56,30 @@ stdenv.mkDerivation rec {
|
||||
buildInputs = [
|
||||
libuuid
|
||||
gettext
|
||||
libarchive
|
||||
] ++ lib.optionals withFuse [ fuse3 ];
|
||||
|
||||
configureFlags =
|
||||
if stdenv.hostPlatform.isLinux then
|
||||
[
|
||||
# It seems that the e2fsprogs is one of the few packages that cannot be
|
||||
# build with shared and static libs.
|
||||
(if shared then "--enable-elf-shlibs" else "--disable-elf-shlibs")
|
||||
"--enable-symlink-install"
|
||||
"--enable-relative-symlinks"
|
||||
"--with-crond-dir=no"
|
||||
# fsck, libblkid, libuuid and uuidd are in util-linux-ng (the "libuuid" dependency)
|
||||
"--disable-fsck"
|
||||
"--disable-libblkid"
|
||||
"--disable-libuuid"
|
||||
"--disable-uuidd"
|
||||
]
|
||||
else
|
||||
[
|
||||
"--enable-libuuid --disable-e2initrd-helper"
|
||||
];
|
||||
[
|
||||
"--with-libarchive=direct"
|
||||
]
|
||||
++ lib.optionals stdenv.hostPlatform.isLinux [
|
||||
# It seems that the e2fsprogs is one of the few packages that cannot be
|
||||
# build with shared and static libs.
|
||||
(if shared then "--enable-elf-shlibs" else "--disable-elf-shlibs")
|
||||
"--enable-symlink-install"
|
||||
"--enable-relative-symlinks"
|
||||
"--with-crond-dir=no"
|
||||
# fsck, libblkid, libuuid and uuidd are in util-linux-ng (the "libuuid" dependency)
|
||||
"--disable-fsck"
|
||||
"--disable-libblkid"
|
||||
"--disable-libuuid"
|
||||
"--disable-uuidd"
|
||||
]
|
||||
++ lib.optionals (!stdenv.hostPlatform.isLinux) [
|
||||
"--enable-libuuid"
|
||||
"--disable-e2initrd-helper"
|
||||
];
|
||||
|
||||
nativeCheckInputs = [ buildPackages.perl ];
|
||||
doCheck = true;
|
||||
@ -88,6 +107,7 @@ stdenv.mkDerivation rec {
|
||||
[ -e $out/success ]
|
||||
'';
|
||||
};
|
||||
|
||||
meta = {
|
||||
homepage = "https://e2fsprogs.sourceforge.net/";
|
||||
changelog = "https://e2fsprogs.sourceforge.net/e2fsprogs-release.html#${version}";
|
||||
@ -99,6 +119,6 @@ stdenv.mkDerivation rec {
|
||||
mit # lib/et, lib/ss
|
||||
];
|
||||
platforms = lib.platforms.unix;
|
||||
maintainers = with lib.maintainers; [ ];
|
||||
maintainers = with lib.maintainers; [ usertam ];
|
||||
};
|
||||
}
|
||||
|
@ -6,7 +6,7 @@
|
||||
|
||||
cmake,
|
||||
ninja,
|
||||
removeReferencesTo,
|
||||
sanitiseHeaderPathsHook,
|
||||
|
||||
glog,
|
||||
gflags,
|
||||
@ -47,7 +47,7 @@ stdenv.mkDerivation (finalAttrs: {
|
||||
nativeBuildInputs = [
|
||||
cmake
|
||||
ninja
|
||||
removeReferencesTo
|
||||
sanitiseHeaderPathsHook
|
||||
];
|
||||
|
||||
buildInputs = [
|
||||
@ -98,18 +98,6 @@ stdenv.mkDerivation (finalAttrs: {
|
||||
'find_package(FBThrift CONFIG REQUIRED COMPONENTS cpp2)'
|
||||
'';
|
||||
|
||||
postFixup = ''
|
||||
# Sanitize header paths to avoid runtime dependencies leaking in
|
||||
# through `__FILE__`.
|
||||
(
|
||||
shopt -s globstar
|
||||
for header in "$dev/include"/**/*.h; do
|
||||
sed -i "1i#line 1 \"$header\"" "$header"
|
||||
remove-references-to -t "$dev" "$header"
|
||||
done
|
||||
)
|
||||
'';
|
||||
|
||||
passthru.updateScript = nix-update-script { };
|
||||
|
||||
meta = {
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user