summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorVladimír Čunát <v@cunat.cz>2023-12-31 11:35:18 +0100
committerVladimír Čunát <v@cunat.cz>2023-12-31 11:35:18 +0100
commit2c9c58e98243930f8cb70387934daa4bc8b00373 (patch)
treed4204811cdf8c1e21d4da54ba8d34eda2f64dff4
parentMerge pull request #263840 from NixOS/backport-263739-to-release-23.05 (diff)
parentMerge release-23.05 into staging-next-23.05 (diff)
downloadnixpkgs-2c9c58e98243930f8cb70387934daa4bc8b00373.tar.gz
Merge #273936: staging-next-23.05 iteration 11
...into release-23.05
-rwxr-xr-xlib/tests/modules.sh28
-rw-r--r--lib/tests/modules/raw.nix5
-rw-r--r--lib/tests/modules/types-anything/equal-atoms.nix4
-rw-r--r--lib/tests/modules/types-anything/functions.nix4
-rw-r--r--pkgs/applications/networking/browsers/firefox/common.nix5
-rw-r--r--pkgs/applications/office/libreoffice/default.nix8
-rw-r--r--pkgs/build-support/cc-wrapper/default.nix4
-rw-r--r--pkgs/build-support/node/build-npm-package/hooks/npm-install-hook.sh6
-rw-r--r--pkgs/build-support/node/fetch-npm-deps/Cargo.lock120
-rw-r--r--pkgs/build-support/node/fetch-npm-deps/Cargo.toml11
-rw-r--r--pkgs/build-support/node/fetch-npm-deps/default.nix12
-rw-r--r--pkgs/build-support/node/fetch-npm-deps/src/cacache.rs9
-rw-r--r--pkgs/build-support/node/fetch-npm-deps/src/main.rs4
-rw-r--r--pkgs/build-support/node/fetch-npm-deps/src/parse/lock.rs99
-rw-r--r--pkgs/build-support/node/fetch-npm-deps/src/parse/mod.rs5
-rw-r--r--pkgs/data/misc/cacert/default.nix26
-rw-r--r--pkgs/development/libraries/gnutls/default.nix11
-rw-r--r--pkgs/development/libraries/gstreamer/bad/default.nix4
-rw-r--r--pkgs/development/libraries/gstreamer/base/default.nix4
-rw-r--r--pkgs/development/libraries/gstreamer/core/default.nix4
-rw-r--r--pkgs/development/libraries/gstreamer/devtools/default.nix4
-rw-r--r--pkgs/development/libraries/gstreamer/ges/default.nix4
-rw-r--r--pkgs/development/libraries/gstreamer/good/default.nix4
-rw-r--r--pkgs/development/libraries/gstreamer/libav/default.nix4
-rw-r--r--pkgs/development/libraries/gstreamer/rtsp-server/default.nix4
-rw-r--r--pkgs/development/libraries/gstreamer/ugly/default.nix4
-rw-r--r--pkgs/development/libraries/gstreamer/vaapi/default.nix4
-rw-r--r--pkgs/development/libraries/http-parser/build-shared.patch30
-rw-r--r--pkgs/development/libraries/http-parser/default.nix39
-rw-r--r--pkgs/development/libraries/http-parser/enable-static-shared.patch93
-rw-r--r--pkgs/development/libraries/libde265/default.nix4
-rw-r--r--pkgs/development/libraries/libgit2/default.nix4
-rw-r--r--pkgs/development/libraries/libsass/default.nix24
-rw-r--r--pkgs/development/libraries/libssh/default.nix6
-rw-r--r--pkgs/development/libraries/libssh2/CVE-2023-48795.patch459
-rw-r--r--pkgs/development/libraries/libssh2/default.nix7
-rw-r--r--pkgs/development/libraries/tracker/default.nix11
-rw-r--r--pkgs/development/libraries/wolfssl/default.nix2
-rw-r--r--pkgs/development/python-modules/gevent/22.10.2-CVE-2023-41419.patch648
-rw-r--r--pkgs/development/python-modules/gevent/default.nix18
-rw-r--r--pkgs/development/python-modules/gst-python/default.nix4
-rw-r--r--pkgs/development/python-modules/jupyter-server/default.nix14
-rw-r--r--pkgs/development/python-modules/urllib3/default.nix10
-rw-r--r--pkgs/misc/ghostscript/default.nix4
-rw-r--r--pkgs/os-specific/linux/bluez/default.nix6
-rw-r--r--pkgs/servers/dns/knot-dns/default.nix4
46 files changed, 1571 insertions, 217 deletions
diff --git a/lib/tests/modules.sh b/lib/tests/modules.sh
index c81febb4156f..6ec59066e908 100755
--- a/lib/tests/modules.sh
+++ b/lib/tests/modules.sh
@@ -18,14 +18,14 @@ evalConfig() {
local attr=$1
shift
local script="import ./default.nix { modules = [ $* ];}"
- nix-instantiate --timeout 1 -E "$script" -A "$attr" --eval-only --show-trace --read-write-mode
+ nix-instantiate --timeout 1 -E "$script" -A "$attr" --eval-only --show-trace --read-write-mode --json
}
reportFailure() {
local attr=$1
shift
local script="import ./default.nix { modules = [ $* ];}"
- echo 2>&1 "$ nix-instantiate -E '$script' -A '$attr' --eval-only"
+ echo 2>&1 "$ nix-instantiate -E '$script' -A '$attr' --eval-only --json"
evalConfig "$attr" "$@" || true
((++fail))
}
@@ -103,7 +103,7 @@ checkConfigOutput '^42$' config.value ./declare-either.nix ./define-value-int-po
checkConfigOutput '^"24"$' config.value ./declare-either.nix ./define-value-string.nix
# types.oneOf
checkConfigOutput '^42$' config.value ./declare-oneOf.nix ./define-value-int-positive.nix
-checkConfigOutput '^\[ \]$' config.value ./declare-oneOf.nix ./define-value-list.nix
+checkConfigOutput '^\[\]$' config.value ./declare-oneOf.nix ./define-value-list.nix
checkConfigOutput '^"24"$' config.value ./declare-oneOf.nix ./define-value-string.nix
# Check mkForce without submodules.
@@ -275,7 +275,7 @@ checkConfigOutput '^"24"$' config.value ./freeform-attrsOf.nix ./define-value-st
# Shorthand modules interpret `meta` and `class` as config items
checkConfigOutput '^true$' options._module.args.value.result ./freeform-attrsOf.nix ./define-freeform-keywords-shorthand.nix
# No freeform assignments shouldn't make it error
-checkConfigOutput '^{ }$' config ./freeform-attrsOf.nix
+checkConfigOutput '^{}$' config ./freeform-attrsOf.nix
# but only if the type matches
checkConfigError 'A definition for option .* is not of type .*' config.value ./freeform-attrsOf.nix ./define-value-list.nix
# and properties should be applied
@@ -313,19 +313,19 @@ checkConfigError 'The option .* has conflicting definitions' config.value ./type
checkConfigOutput '^0$' config.value.int ./types-anything/equal-atoms.nix
checkConfigOutput '^false$' config.value.bool ./types-anything/equal-atoms.nix
checkConfigOutput '^""$' config.value.string ./types-anything/equal-atoms.nix
-checkConfigOutput '^/$' config.value.path ./types-anything/equal-atoms.nix
+checkConfigOutput '^"/[^"]\+"$' config.value.path ./types-anything/equal-atoms.nix
checkConfigOutput '^null$' config.value.null ./types-anything/equal-atoms.nix
checkConfigOutput '^0.1$' config.value.float ./types-anything/equal-atoms.nix
# Functions can't be merged together
checkConfigError "The option .value.multiple-lambdas.<function body>. has conflicting option types" config.applied.multiple-lambdas ./types-anything/functions.nix
-checkConfigOutput '^<LAMBDA>$' config.value.single-lambda ./types-anything/functions.nix
+checkConfigOutput '^true$' config.valueIsFunction.single-lambda ./types-anything/functions.nix
checkConfigOutput '^null$' config.applied.merging-lambdas.x ./types-anything/functions.nix
checkConfigOutput '^null$' config.applied.merging-lambdas.y ./types-anything/functions.nix
# Check that all mk* modifiers are applied
checkConfigError 'attribute .* not found' config.value.mkiffalse ./types-anything/mk-mods.nix
-checkConfigOutput '^{ }$' config.value.mkiftrue ./types-anything/mk-mods.nix
+checkConfigOutput '^{}$' config.value.mkiftrue ./types-anything/mk-mods.nix
checkConfigOutput '^1$' config.value.mkdefault ./types-anything/mk-mods.nix
-checkConfigOutput '^{ }$' config.value.mkmerge ./types-anything/mk-mods.nix
+checkConfigOutput '^{}$' config.value.mkmerge ./types-anything/mk-mods.nix
checkConfigOutput '^true$' config.value.mkbefore ./types-anything/mk-mods.nix
checkConfigOutput '^1$' config.value.nested.foo ./types-anything/mk-mods.nix
checkConfigOutput '^"baz"$' config.value.nested.bar.baz ./types-anything/mk-mods.nix
@@ -345,16 +345,16 @@ checkConfigOutput '^"a b y z"$' config.resultFooBar ./declare-variants.nix ./def
checkConfigOutput '^"a b c"$' config.resultFooFoo ./declare-variants.nix ./define-variant.nix
## emptyValue's
-checkConfigOutput "[ ]" config.list.a ./emptyValues.nix
-checkConfigOutput "{ }" config.attrs.a ./emptyValues.nix
+checkConfigOutput "\[\]" config.list.a ./emptyValues.nix
+checkConfigOutput "{}" config.attrs.a ./emptyValues.nix
checkConfigOutput "null" config.null.a ./emptyValues.nix
-checkConfigOutput "{ }" config.submodule.a ./emptyValues.nix
+checkConfigOutput "{}" config.submodule.a ./emptyValues.nix
# These types don't have empty values
checkConfigError 'The option .int.a. is used but not defined' config.int.a ./emptyValues.nix
checkConfigError 'The option .nonEmptyList.a. is used but not defined' config.nonEmptyList.a ./emptyValues.nix
## types.raw
-checkConfigOutput "{ foo = <CODE>; }" config.unprocessedNesting ./raw.nix
+checkConfigOutput '^true$' config.unprocessedNestingEvaluates.success ./raw.nix
checkConfigOutput "10" config.processedToplevel ./raw.nix
checkConfigError "The option .multiple. is defined multiple times" config.multiple ./raw.nix
checkConfigOutput "bar" config.priorities ./raw.nix
@@ -380,13 +380,13 @@ checkConfigOutput 'ok' config.freeformItems.foo.bar ./adhoc-freeformType-survive
checkConfigOutput '^1$' config.sub.specialisation.value ./extendModules-168767-imports.nix
# Class checks, evalModules
-checkConfigOutput '^{ }$' config.ok.config ./class-check.nix
+checkConfigOutput '^{}$' config.ok.config ./class-check.nix
checkConfigOutput '"nixos"' config.ok.class ./class-check.nix
checkConfigError 'The module .*/module-class-is-darwin.nix was imported into nixos instead of darwin.' config.fail.config ./class-check.nix
checkConfigError 'The module foo.nix#darwinModules.default was imported into nixos instead of darwin.' config.fail-anon.config ./class-check.nix
# Class checks, submoduleWith
-checkConfigOutput '^{ }$' config.sub.nixosOk ./class-check.nix
+checkConfigOutput '^{}$' config.sub.nixosOk ./class-check.nix
checkConfigError 'The module .*/module-class-is-darwin.nix was imported into nixos instead of darwin.' config.sub.nixosFail.config ./class-check.nix
# submoduleWith type merge with different class
diff --git a/lib/tests/modules/raw.nix b/lib/tests/modules/raw.nix
index 418e671ed076..9eb7c5ce8f21 100644
--- a/lib/tests/modules/raw.nix
+++ b/lib/tests/modules/raw.nix
@@ -1,4 +1,4 @@
-{ lib, ... }: {
+{ lib, config, ... }: {
options = {
processedToplevel = lib.mkOption {
@@ -13,6 +13,9 @@
priorities = lib.mkOption {
type = lib.types.raw;
};
+ unprocessedNestingEvaluates = lib.mkOption {
+ default = builtins.tryEval config.unprocessedNesting;
+ };
};
config = {
diff --git a/lib/tests/modules/types-anything/equal-atoms.nix b/lib/tests/modules/types-anything/equal-atoms.nix
index 972711201a09..9925cfd60892 100644
--- a/lib/tests/modules/types-anything/equal-atoms.nix
+++ b/lib/tests/modules/types-anything/equal-atoms.nix
@@ -9,7 +9,7 @@
value.int = 0;
value.bool = false;
value.string = "";
- value.path = /.;
+ value.path = ./.;
value.null = null;
value.float = 0.1;
}
@@ -17,7 +17,7 @@
value.int = 0;
value.bool = false;
value.string = "";
- value.path = /.;
+ value.path = ./.;
value.null = null;
value.float = 0.1;
}
diff --git a/lib/tests/modules/types-anything/functions.nix b/lib/tests/modules/types-anything/functions.nix
index 21edd4aff9c4..3288b64f9b7e 100644
--- a/lib/tests/modules/types-anything/functions.nix
+++ b/lib/tests/modules/types-anything/functions.nix
@@ -1,5 +1,9 @@
{ lib, config, ... }: {
+ options.valueIsFunction = lib.mkOption {
+ default = lib.mapAttrs (name: lib.isFunction) config.value;
+ };
+
options.value = lib.mkOption {
type = lib.types.anything;
};
diff --git a/pkgs/applications/networking/browsers/firefox/common.nix b/pkgs/applications/networking/browsers/firefox/common.nix
index 70efa3a2d9da..73ad38468d6b 100644
--- a/pkgs/applications/networking/browsers/firefox/common.nix
+++ b/pkgs/applications/networking/browsers/firefox/common.nix
@@ -521,11 +521,6 @@ buildStdenv.mkDerivation ({
LDFLAGS = "-Wl,-rpath,${placeholder "out"}/lib/${binaryName}";
};
- # workaround for clang statically linking against libstdc++
- NIX_LDFLAGS = lib.optionals stdenv.cc.isGNU [
- "-L${stdenv.cc.cc.lib}/lib"
- ];
-
# tests were disabled in configureFlags
doCheck = false;
diff --git a/pkgs/applications/office/libreoffice/default.nix b/pkgs/applications/office/libreoffice/default.nix
index 25e8f18998e2..3d7770652513 100644
--- a/pkgs/applications/office/libreoffice/default.nix
+++ b/pkgs/applications/office/libreoffice/default.nix
@@ -1,5 +1,6 @@
{ stdenv
, fetchurl
+, fetchpatch
, lib
, substituteAll
, pam
@@ -214,6 +215,13 @@ in
tar -xf ${srcs.translations}
'';
+ patches = [
+ # Backport fix for tests broken by expired test certificates.
+ (fetchpatch {
+ url = "https://cgit.freedesktop.org/libreoffice/core/patch/?id=ececb678b8362e3be8e02768ddd5e4197d87dc2a";
+ hash = "sha256-TUfKlwNxUTOJ95VLqwVD+ez1xhu7bW6xZlgIaCyIiNg=";
+ })
+ ];
### QT/KDE
#
# configure.ac assumes that the first directory that contains headers and
diff --git a/pkgs/build-support/cc-wrapper/default.nix b/pkgs/build-support/cc-wrapper/default.nix
index f922c86d1143..a2f710a6c825 100644
--- a/pkgs/build-support/cc-wrapper/default.nix
+++ b/pkgs/build-support/cc-wrapper/default.nix
@@ -101,6 +101,8 @@ let
&& !(stdenv.targetPlatform.useAndroidPrebuilt or false)
&& !(stdenv.targetPlatform.isiOS or false)
&& gccForLibs != null;
+ gccForLibs_solib = getLib gccForLibs
+ + optionalString (targetPlatform != hostPlatform) "/${targetPlatform.config}";
# older compilers (for example bootstrap's GCC 5) fail with -march=too-modern-cpu
isGccArchSupported = arch:
@@ -349,7 +351,7 @@ stdenv.mkDerivation {
''
+ optionalString useGccForLibs ''
echo "-L${gccForLibs}/lib/gcc/${targetPlatform.config}/${gccForLibs.version}" >> $out/nix-support/cc-ldflags
- echo "-L${gccForLibs.lib}/${targetPlatform.config}/lib" >> $out/nix-support/cc-ldflags
+ echo "-L${gccForLibs_solib}/lib" >> $out/nix-support/cc-ldflags
''
# TODO We would like to connect this to `useGccForLibs`, but we cannot yet
diff --git a/pkgs/build-support/node/build-npm-package/hooks/npm-install-hook.sh b/pkgs/build-support/node/build-npm-package/hooks/npm-install-hook.sh
index dfc560dad790..4482c13097c6 100644
--- a/pkgs/build-support/node/build-npm-package/hooks/npm-install-hook.sh
+++ b/pkgs/build-support/node/build-npm-package/hooks/npm-install-hook.sh
@@ -5,16 +5,14 @@ npmInstallHook() {
runHook preInstall
- # `npm pack` writes to cache
- npm config delete cache
-
local -r packageOut="$out/lib/node_modules/$(@jq@ --raw-output '.name' package.json)"
+ # `npm pack` writes to cache so temporarily override it
while IFS= read -r file; do
local dest="$packageOut/$(dirname "$file")"
mkdir -p "$dest"
cp "${npmWorkspace-.}/$file" "$dest"
- done < <(@jq@ --raw-output '.[0].files | map(.path) | join("\n")' <<< "$(npm pack --json --dry-run --loglevel=warn --no-foreground-scripts ${npmWorkspace+--workspace=$npmWorkspace} $npmPackFlags "${npmPackFlagsArray[@]}" $npmFlags "${npmFlagsArray[@]}")")
+ done < <(@jq@ --raw-output '.[0].files | map(.path) | join("\n")' <<< "$(npm_config_cache="$HOME/.npm" npm pack --json --dry-run --loglevel=warn --no-foreground-scripts ${npmWorkspace+--workspace=$npmWorkspace} $npmPackFlags "${npmPackFlagsArray[@]}" $npmFlags "${npmFlagsArray[@]}")")
while IFS=" " read -ra bin; do
mkdir -p "$out/bin"
diff --git a/pkgs/build-support/node/fetch-npm-deps/Cargo.lock b/pkgs/build-support/node/fetch-npm-deps/Cargo.lock
index 4f6e177fd9ae..8ba72a7b76c4 100644
--- a/pkgs/build-support/node/fetch-npm-deps/Cargo.lock
+++ b/pkgs/build-support/node/fetch-npm-deps/Cargo.lock
@@ -47,9 +47,9 @@ dependencies = [
[[package]]
name = "base64"
-version = "0.21.4"
+version = "0.21.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9ba43ea6f343b788c8764558649e08df62f86c6ef251fdaeb1ffd010a9ae50a2"
+checksum = "35636a1494ede3b646cc98f74f8e62c773a38a659ebc777a2cf26b9b74171df9"
[[package]]
name = "bitflags"
@@ -59,9 +59,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
[[package]]
name = "bitflags"
-version = "2.4.0"
+version = "2.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b4682ae6287fcf752ecaabbfcc7b6f9b72aa33933dc23a554d853aea8eea8635"
+checksum = "327762f6e5a765692301e5bb513e0d9fef63be86bbc14528052b1cd3e6f03e07"
[[package]]
name = "block-buffer"
@@ -110,9 +110,9 @@ dependencies = [
[[package]]
name = "cpufeatures"
-version = "0.2.9"
+version = "0.2.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a17b76ff3a4162b0b27f354a0c87015ddad39d35f9c0c36607a3bdd175dde1f1"
+checksum = "ce420fe07aecd3e67c5f910618fe65e94158f6dcc0adf44e00d69ce2bdfe0fd0"
dependencies = [
"libc",
]
@@ -177,9 +177,9 @@ dependencies = [
[[package]]
name = "curl-sys"
-version = "0.4.67+curl-8.3.0"
+version = "0.4.68+curl-8.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3cc35d066510b197a0f72de863736641539957628c8a42e70e27c66849e77c34"
+checksum = "b4a0d18d88360e374b16b2273c832b5e57258ffc1d4aa4f96b108e0738d5752f"
dependencies = [
"cc",
"libc",
@@ -208,9 +208,9 @@ checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07"
[[package]]
name = "env_logger"
-version = "0.10.0"
+version = "0.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "85cdab6a89accf66733ad5a1693a4dcced6aeff64602b634530dd73c1f3ee9f0"
+checksum = "95b3f3e67048839cb0d0781f445682a35113da7121f7c949db0e2be96a4fbece"
dependencies = [
"humantime",
"is-terminal",
@@ -221,9 +221,9 @@ dependencies = [
[[package]]
name = "errno"
-version = "0.3.5"
+version = "0.3.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ac3e13f66a2f95e32a39eaa81f6b95d42878ca0e1db0c7543723dfe12557e860"
+checksum = "f258a7194e7f7c2a7837a8913aeab7fd8c383457034fa20ce4dd3dcb813e8eb8"
dependencies = [
"libc",
"windows-sys",
@@ -267,15 +267,15 @@ dependencies = [
[[package]]
name = "futures-core"
-version = "0.3.28"
+version = "0.3.29"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4bca583b7e26f571124fe5b7561d49cb2868d79116cfa0eefce955557c6fee8c"
+checksum = "eb1d22c66e66d9d72e1758f0bd7d4fd0bee04cad842ee34587d68c07e45d088c"
[[package]]
name = "futures-io"
-version = "0.3.28"
+version = "0.3.29"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4fff74096e71ed47f8e023204cfd0aa1289cd54ae5430a9523be060cdb849964"
+checksum = "8bf34a163b5c4c52d0478a4d757da8fb65cabef42ba90515efee0f6f9fa45aaa"
[[package]]
name = "futures-lite"
@@ -304,9 +304,9 @@ dependencies = [
[[package]]
name = "getrandom"
-version = "0.2.10"
+version = "0.2.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427"
+checksum = "fe9006bed769170c11f845cf00c7c1e9092aeb3f268e007c3e760ac68008070f"
dependencies = [
"cfg-if",
"libc",
@@ -321,9 +321,9 @@ checksum = "d77f7ec81a6d05a3abb01ab6eb7590f6083d08449fe5a1c8b1e620283546ccb7"
[[package]]
name = "http"
-version = "0.2.9"
+version = "0.2.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bd6effc99afb63425aff9b05836f029929e345a6148a14b7ecd5ab67af944482"
+checksum = "8947b1a6fad4393052c7ba1f4cd97bed3e953a95c79c92ad9b051a04611d9fbb"
dependencies = [
"bytes",
"fnv",
@@ -399,9 +399,9 @@ checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38"
[[package]]
name = "libc"
-version = "0.2.149"
+version = "0.2.150"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a08173bc88b7955d1b3145aa561539096c421ac8debde8cbc3612ec635fee29b"
+checksum = "89d92a4743f9a61002fae18374ed11e7973f530cb3a3255fb354818118b2203c"
[[package]]
name = "libz-sys"
@@ -417,9 +417,9 @@ dependencies = [
[[package]]
name = "linux-raw-sys"
-version = "0.4.10"
+version = "0.4.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "da2479e8c062e40bf0066ffa0bc823de0a9368974af99c9f6df941d2c231e03f"
+checksum = "969488b55f8ac402214f3f5fd243ebb7206cf82de60d3172994707a4bcc2b829"
[[package]]
name = "log"
@@ -456,9 +456,9 @@ checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf"
[[package]]
name = "openssl-sys"
-version = "0.9.93"
+version = "0.9.95"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "db4d56a4c0478783083cfafcc42493dd4a981d41669da64b4572a2a089b51b1d"
+checksum = "40a4130519a360279579c2053038317e40eff64d13fd3f004f9e1b72b8a6aaf9"
dependencies = [
"cc",
"libc",
@@ -468,9 +468,9 @@ dependencies = [
[[package]]
name = "parking"
-version = "2.1.1"
+version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e52c774a4c39359c1d1c52e43f73dd91a75a614652c825408eec30c95a9b2067"
+checksum = "bb813b8af86854136c6922af0598d719255ecb2179515e6e7730d468f05c9cae"
[[package]]
name = "percent-encoding"
@@ -542,6 +542,7 @@ dependencies = [
"digest",
"env_logger",
"isahc",
+ "log",
"rayon",
"serde",
"serde_json",
@@ -622,18 +623,18 @@ dependencies = [
[[package]]
name = "redox_syscall"
-version = "0.3.5"
+version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29"
+checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa"
dependencies = [
"bitflags 1.3.2",
]
[[package]]
name = "regex"
-version = "1.9.6"
+version = "1.10.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ebee201405406dbf528b8b672104ae6d6d63e6d118cb10e4d51abbc7b58044ff"
+checksum = "380b951a9c5e80ddfd6136919eef32310721aa4aacd4889a8d39124b026ab343"
dependencies = [
"aho-corasick",
"memchr",
@@ -643,9 +644,9 @@ dependencies = [
[[package]]
name = "regex-automata"
-version = "0.3.9"
+version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "59b23e92ee4318893fa3fe3e6fb365258efbfe6ac6ab30f090cdcbb7aa37efa9"
+checksum = "5f804c7828047e88b2d32e2d7fe5a105da8ee3264f01902f796c8e067dc2483f"
dependencies = [
"aho-corasick",
"memchr",
@@ -654,17 +655,17 @@ dependencies = [
[[package]]
name = "regex-syntax"
-version = "0.7.5"
+version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dbb5fb1acd8a1a18b3dd5be62d25485eb770e05afb408a9627d14d451bae12da"
+checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f"
[[package]]
name = "rustix"
-version = "0.38.18"
+version = "0.38.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5a74ee2d7c2581cd139b42447d7d9389b889bdaad3a73f1ebb16f2a3237bb19c"
+checksum = "dc99bc2d4f1fed22595588a013687477aedf3cdcfb26558c559edb67b4d9b22e"
dependencies = [
- "bitflags 2.4.0",
+ "bitflags 2.4.1",
"errno",
"libc",
"linux-raw-sys",
@@ -703,18 +704,18 @@ checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
[[package]]
name = "serde"
-version = "1.0.188"
+version = "1.0.193"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cf9e0fcba69a370eed61bcf2b728575f726b50b55cba78064753d708ddc7549e"
+checksum = "25dd9975e68d0cb5aa1120c288333fc98731bd1dd12f561e468ea4728c042b89"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
-version = "1.0.188"
+version = "1.0.193"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4eca7ac642d82aa35b60049a6eccb4be6be75e599bd2e9adb5f875a737654af2"
+checksum = "43576ca501357b9b071ac53cdc7da8ef0cbd9493d8df094cd821777ea6e894d3"
dependencies = [
"proc-macro2",
"quote",
@@ -723,9 +724,9 @@ dependencies = [
[[package]]
name = "serde_json"
-version = "1.0.107"
+version = "1.0.108"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6b420ce6e3d8bd882e9b243c6eed35dbc9a6110c9769e74b584e0d68d1f20c65"
+checksum = "3d1c7e3eac408d115102c4c24ad393e0821bb3a5df4d506a80f85f7a742a526b"
dependencies = [
"itoa",
"ryu",
@@ -776,9 +777,9 @@ dependencies = [
[[package]]
name = "socket2"
-version = "0.4.9"
+version = "0.4.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "64a4a911eed85daf18834cfaa86a79b7d266ff93ff5ba14005426219480ed662"
+checksum = "9f7916fc008ca5542385b89a3d3ce689953c143e9304a9bf8beec1de48994c0d"
dependencies = [
"libc",
"winapi",
@@ -786,9 +787,9 @@ dependencies = [
[[package]]
name = "syn"
-version = "2.0.38"
+version = "2.0.39"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e96b79aaa137db8f61e26363a0c9b47d8b4ec75da28b7d1d614c2303e232408b"
+checksum = "23e78b90f2fcf45d3e842032ce32e3f2d1545ba6636271dcbf24fa306d87be7a"
dependencies = [
"proc-macro2",
"quote",
@@ -797,9 +798,9 @@ dependencies = [
[[package]]
name = "tempfile"
-version = "3.8.0"
+version = "3.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cb94d2f3cc536af71caac6b6fcebf65860b347e7ce0cc9ebe8f70d3e521054ef"
+checksum = "7ef1adac450ad7f4b3c28589471ade84f25f731a7a0fe30d71dfa9f60fd808e5"
dependencies = [
"cfg-if",
"fastrand 2.0.1",
@@ -810,9 +811,9 @@ dependencies = [
[[package]]
name = "termcolor"
-version = "1.3.0"
+version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6093bad37da69aab9d123a8091e4be0aa4a03e4d601ec641c327398315f62b64"
+checksum = "ff1bc3d3f05aff0403e8ac0d92ced918ec05b666a43f83297ccef5bea8a3d449"
dependencies = [
"winapi-util",
]
@@ -834,11 +835,10 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
[[package]]
name = "tracing"
-version = "0.1.37"
+version = "0.1.40"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8"
+checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef"
dependencies = [
- "cfg-if",
"log",
"pin-project-lite",
"tracing-attributes",
@@ -847,9 +847,9 @@ dependencies = [
[[package]]
name = "tracing-attributes"
-version = "0.1.26"
+version = "0.1.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab"
+checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7"
dependencies = [
"proc-macro2",
"quote",
@@ -858,9 +858,9 @@ dependencies = [
[[package]]
name = "tracing-core"
-version = "0.1.31"
+version = "0.1.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0955b8137a1df6f1a2e9a37d8a6656291ff0297c1a97c24e0d8425fe2312f79a"
+checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54"
dependencies = [
"once_cell",
]
diff --git a/pkgs/build-support/node/fetch-npm-deps/Cargo.toml b/pkgs/build-support/node/fetch-npm-deps/Cargo.toml
index 0f7735a6e827..ea121c510c95 100644
--- a/pkgs/build-support/node/fetch-npm-deps/Cargo.toml
+++ b/pkgs/build-support/node/fetch-npm-deps/Cargo.toml
@@ -8,15 +8,16 @@ edition = "2021"
[dependencies]
anyhow = "1.0.75"
backoff = "0.4.0"
-base64 = "0.21.4"
+base64 = "0.21.5"
digest = "0.10.7"
-env_logger = "0.10.0"
+env_logger = "0.10.1"
isahc = { version = "1.7.2", default_features = false }
+log = "0.4.20"
rayon = "1.8.0"
-serde = { version = "1.0.188", features = ["derive"] }
-serde_json = "1.0.107"
+serde = { version = "1.0.193", features = ["derive"] }
+serde_json = "1.0.108"
sha1 = "0.10.6"
sha2 = "0.10.8"
-tempfile = "3.8.0"
+tempfile = "3.8.1"
url = { version = "2.4.1", features = ["serde"] }
walkdir = "2.4.0"
diff --git a/pkgs/build-support/node/fetch-npm-deps/default.nix b/pkgs/build-support/node/fetch-npm-deps/default.nix
index c31c5154dfc4..8ca59e2a9f2b 100644
--- a/pkgs/build-support/node/fetch-npm-deps/default.nix
+++ b/pkgs/build-support/node/fetch-npm-deps/default.nix
@@ -125,6 +125,18 @@
forceGitDeps = true;
};
+
+ # This package has a lockfile v1 git dependency with no `dependencies` attribute, since it sementically has no dependencies.
+ jitsiMeet9111 = makeTest {
+ name = "jitsi-meet-9111";
+
+ src = fetchurl {
+ url = "https://raw.githubusercontent.com/jitsi/jitsi-meet/stable/jitsi-meet_9111/package-lock.json";
+ hash = "sha256-NU+eQD4WZ4BMur8uX79uk8wUPsZvIT02KhPWHTmaihk=";
+ };
+
+ hash = "sha256-FhxlJ0HdJMPiWe7+n1HaGLWOr/2HJEPwiS65uqXZM8Y=";
+ };
};
meta = with lib; {
diff --git a/pkgs/build-support/node/fetch-npm-deps/src/cacache.rs b/pkgs/build-support/node/fetch-npm-deps/src/cacache.rs
index 75133b1b03ea..c49c094b85c6 100644
--- a/pkgs/build-support/node/fetch-npm-deps/src/cacache.rs
+++ b/pkgs/build-support/node/fetch-npm-deps/src/cacache.rs
@@ -4,6 +4,7 @@ use serde::{Deserialize, Serialize};
use sha1::Sha1;
use sha2::{Sha256, Sha512};
use std::{
+ fmt::Write as FmtWrite,
fs::{self, File},
io::Write,
path::PathBuf,
@@ -78,10 +79,10 @@ impl Cache {
push_hash_segments(
&mut p,
- &hash
- .into_iter()
- .map(|n| format!("{n:02x}"))
- .collect::<String>(),
+ &hash.into_iter().fold(String::new(), |mut out, n| {
+ let _ = write!(out, "{n:02x}");
+ out
+ }),
);
p
diff --git a/pkgs/build-support/node/fetch-npm-deps/src/main.rs b/pkgs/build-support/node/fetch-npm-deps/src/main.rs
index 2b28e290ad51..dc20c7297049 100644
--- a/pkgs/build-support/node/fetch-npm-deps/src/main.rs
+++ b/pkgs/build-support/node/fetch-npm-deps/src/main.rs
@@ -246,7 +246,9 @@ fn main() -> anyhow::Result<()> {
packages.into_par_iter().try_for_each(|package| {
eprintln!("{}", package.name);
- let tarball = package.tarball()?;
+ let tarball = package
+ .tarball()
+ .map_err(|e| anyhow!("couldn't fetch {} at {}: {e:?}", package.name, package.url))?;
let integrity = package.integrity().map(ToString::to_string);
cache
diff --git a/pkgs/build-support/node/fetch-npm-deps/src/parse/lock.rs b/pkgs/build-support/node/fetch-npm-deps/src/parse/lock.rs
index f50a31651d0e..c6e77153a0b8 100644
--- a/pkgs/build-support/node/fetch-npm-deps/src/parse/lock.rs
+++ b/pkgs/build-support/node/fetch-npm-deps/src/parse/lock.rs
@@ -18,23 +18,20 @@ pub(super) fn packages(content: &str) -> anyhow::Result<Vec<Package>> {
1 => {
let initial_url = get_initial_url()?;
- lockfile
- .dependencies
- .map(|p| to_new_packages(p, &initial_url))
- .transpose()?
+ to_new_packages(lockfile.dependencies.unwrap_or_default(), &initial_url)?
}
- 2 | 3 => lockfile.packages.map(|pkgs| {
- pkgs.into_iter()
- .filter(|(n, p)| !n.is_empty() && matches!(p.resolved, Some(UrlOrString::Url(_))))
- .map(|(n, p)| Package { name: Some(n), ..p })
- .collect()
- }),
+ 2 | 3 => lockfile
+ .packages
+ .unwrap_or_default()
+ .into_iter()
+ .filter(|(n, p)| !n.is_empty() && matches!(p.resolved, Some(UrlOrString::Url(_))))
+ .map(|(n, p)| Package { name: Some(n), ..p })
+ .collect(),
_ => bail!(
"We don't support lockfile version {}, please file an issue.",
lockfile.version
),
- }
- .expect("lockfile should have packages");
+ };
packages.par_sort_by(|x, y| {
x.resolved
@@ -182,6 +179,7 @@ impl fmt::Display for Hash {
}
}
+#[allow(clippy::incorrect_partial_ord_impl_on_ord_type)]
impl PartialOrd for Hash {
fn partial_cmp(&self, other: &Hash) -> Option<Ordering> {
let lhs = self.0.split_once('-')?.0;
@@ -216,29 +214,35 @@ fn to_new_packages(
}
if let UrlOrString::Url(v) = &package.version {
- for (scheme, host) in [
- ("github", "github.com"),
- ("bitbucket", "bitbucket.org"),
- ("gitlab", "gitlab.com"),
- ] {
- if v.scheme() == scheme {
- package.version = {
- let mut new_url = initial_url.clone();
+ if v.scheme() == "npm" {
+ if let Some(UrlOrString::Url(ref url)) = &package.resolved {
+ package.version = UrlOrString::Url(url.clone());
+ }
+ } else {
+ for (scheme, host) in [
+ ("github", "github.com"),
+ ("bitbucket", "bitbucket.org"),
+ ("gitlab", "gitlab.com"),
+ ] {
+ if v.scheme() == scheme {
+ package.version = {
+ let mut new_url = initial_url.clone();
- new_url.set_host(Some(host))?;
+ new_url.set_host(Some(host))?;
- if v.path().ends_with(".git") {
- new_url.set_path(v.path());
- } else {
- new_url.set_path(&format!("{}.git", v.path()));
- }
+ if v.path().ends_with(".git") {
+ new_url.set_path(v.path());
+ } else {
+ new_url.set_path(&format!("{}.git", v.path()));
+ }
- new_url.set_fragment(v.fragment());
+ new_url.set_fragment(v.fragment());
- UrlOrString::Url(new_url)
- };
+ UrlOrString::Url(new_url)
+ };
- break;
+ break;
+ }
}
}
}
@@ -268,7 +272,8 @@ fn get_initial_url() -> anyhow::Result<Url> {
#[cfg(test)]
mod tests {
use super::{
- get_initial_url, to_new_packages, Hash, HashCollection, OldPackage, Package, UrlOrString,
+ get_initial_url, packages, to_new_packages, Hash, HashCollection, OldPackage, Package,
+ UrlOrString,
};
use std::{
cmp::Ordering,
@@ -330,4 +335,36 @@ mod tests {
Some(Hash(String::from("sha512-foo")))
);
}
+
+ #[test]
+ fn parse_lockfile_correctly() {
+ let packages = packages(
+ r#"{
+ "name": "node-ddr",
+ "version": "1.0.0",
+ "lockfileVersion": 1,
+ "requires": true,
+ "dependencies": {
+ "string-width-cjs": {
+ "version": "npm:string-width@4.2.3",
+ "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
+ "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
+ "requires": {
+ "emoji-regex": "^8.0.0",
+ "is-fullwidth-code-point": "^3.0.0",
+ "strip-ansi": "^6.0.1"
+ }
+ }
+ }
+ }"#).unwrap();
+
+ assert_eq!(packages.len(), 1);
+ assert_eq!(
+ packages[0].resolved,
+ Some(UrlOrString::Url(
+ Url::parse("https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz")
+ .unwrap()
+ ))
+ );
+ }
}
diff --git a/pkgs/build-support/node/fetch-npm-deps/src/parse/mod.rs b/pkgs/build-support/node/fetch-npm-deps/src/parse/mod.rs
index 880a972e80e9..0bca33f03915 100644
--- a/pkgs/build-support/node/fetch-npm-deps/src/parse/mod.rs
+++ b/pkgs/build-support/node/fetch-npm-deps/src/parse/mod.rs
@@ -1,5 +1,6 @@
use anyhow::{anyhow, bail, Context};
use lock::UrlOrString;
+use log::{debug, info};
use rayon::prelude::*;
use serde_json::{Map, Value};
use std::{
@@ -19,6 +20,8 @@ pub fn lockfile(
force_git_deps: bool,
force_empty_cache: bool,
) -> anyhow::Result<Vec<Package>> {
+ debug!("parsing lockfile with contents:\n{content}");
+
let mut packages = lock::packages(content)
.context("failed to extract packages from lockfile")?
.into_par_iter()
@@ -46,6 +49,8 @@ pub fn lockfile(
let path = dir.path().join("package");
+ info!("recursively parsing lockfile for {} at {path:?}", pkg.name);
+
let lockfile_contents = fs::read_to_string(path.join("package-lock.json"));
let package_json_path = path.join("package.json");
diff --git a/pkgs/data/misc/cacert/default.nix b/pkgs/data/misc/cacert/default.nix
index d489c23c6b8d..30f2ee38c72f 100644
--- a/pkgs/data/misc/cacert/default.nix
+++ b/pkgs/data/misc/cacert/default.nix
@@ -1,7 +1,7 @@
{ lib
, stdenv
, writeText
-, fetchurl
+, fetchFromGitHub
, buildcatrust
, blacklist ? []
, extraCertificateFiles ? []
@@ -17,20 +17,10 @@
}:
let
- blocklist = writeText "cacert-blocklist.txt" (lib.concatStringsSep "\n" (blacklist ++ [
- # Mozilla does not trust new certificates issued by these CAs after 2022/11/30¹
- # in their products, but unfortunately we don't have such a fine-grained
- # solution for most system packages², so we decided to eject these.
- #
- # [1] https://groups.google.com/a/mozilla.org/g/dev-security-policy/c/oxX69KFvsm4/m/yLohoVqtCgAJ
- # [2] https://utcc.utoronto.ca/~cks/space/blog/linux/CARootStoreTrustProblem
- "TrustCor ECA-1"
- "TrustCor RootCert CA-1"
- "TrustCor RootCert CA-2"
- ]));
+ blocklist = writeText "cacert-blocklist.txt" (lib.concatStringsSep "\n" blacklist);
extraCertificatesBundle = writeText "cacert-extra-certificates-bundle.crt" (lib.concatStringsSep "\n\n" extraCertificateStrings);
- srcVersion = "3.92";
+ srcVersion = "3.95";
version = if nssOverride != null then nssOverride.version else srcVersion;
meta = with lib; {
homepage = "https://curl.haxx.se/docs/caextract.html";
@@ -43,9 +33,11 @@ let
pname = "nss-cacert-certdata";
inherit version;
- src = if nssOverride != null then nssOverride.src else fetchurl {
- url = "mirror://mozilla/security/nss/releases/NSS_${lib.replaceStrings ["."] ["_"] version}_RTM/src/nss-${version}.tar.gz";
- hash = "sha256-PbGS1uiCA5rwKufq8yF+0RS7etg0FMZGdyq4Ah4kolQ=";
+ src = if nssOverride != null then nssOverride.src else fetchFromGitHub {
+ owner = "nss-dev";
+ repo = "nss";
+ rev = "NSS_${lib.replaceStrings ["."] ["_"] version}_RTM";
+ hash = "sha256-qgSbzlRbU+gElC2ae3FEGRUFSM1JHd/lNGNXC0x4xt4=";
};
dontBuild = true;
@@ -54,7 +46,7 @@ let
runHook preInstall
mkdir $out
- cp nss/lib/ckfw/builtins/certdata.txt $out
+ cp lib/ckfw/builtins/certdata.txt $out
runHook postInstall
'';
diff --git a/pkgs/development/libraries/gnutls/default.nix b/pkgs/development/libraries/gnutls/default.nix
index 2a6d0d1088ba..5863d5b59f2e 100644
--- a/pkgs/development/libraries/gnutls/default.nix
+++ b/pkgs/development/libraries/gnutls/default.nix
@@ -1,5 +1,6 @@
{ config, lib, stdenv, fetchurl, zlib, lzo, libtasn1, nettle, pkg-config, lzip
, perl, gmp, autoconf, automake, libidn2, libiconv
+, texinfo
, unbound, dns-root-data, gettext, util-linux
, cxxBindings ? !stdenv.hostPlatform.isStatic # tries to link libstdc++.so
, tpmSupport ? false, trousers, which, nettools, libunistring
@@ -33,11 +34,11 @@ in
stdenv.mkDerivation rec {
pname = "gnutls";
- version = "3.8.0";
+ version = "3.8.2";
src = fetchurl {
url = "mirror://gnupg/gnutls/v${lib.versions.majorMinor version}/gnutls-${version}.tar.xz";
- sha256 = "sha256-DqDRGhZgoeY/lg8Vexl6vm0MjLMlW+JOH7OBWTC5vcU=";
+ hash = "sha256-52XlAW/6m53SQ+NjoEYNV3B0RE7iSRJn2y6WycKt73c=";
};
outputs = [ "bin" "dev" "out" "man" "devdoc" ];
@@ -45,7 +46,9 @@ stdenv.mkDerivation rec {
outputInfo = "devdoc";
outputDoc = "devdoc";
- patches = [ ./nix-ssl-cert-file.patch ];
+ patches = [
+ ./nix-ssl-cert-file.patch
+ ];
# Skip some tests:
# - pkg-config: building against the result won't work before installing (3.5.11)
@@ -80,7 +83,7 @@ stdenv.mkDerivation rec {
++ lib.optional (withP11-kit) p11-kit
++ lib.optional (tpmSupport && stdenv.isLinux) trousers;
- nativeBuildInputs = [ perl pkg-config ]
+ nativeBuildInputs = [ perl pkg-config texinfo ]
++ lib.optionals doCheck [ which nettools util-linux ];
propagatedBuildInputs = [ nettle ]
diff --git a/pkgs/development/libraries/gstreamer/bad/default.nix b/pkgs/development/libraries/gstreamer/bad/default.nix
index 466cc162fb3d..73a4572107e4 100644
--- a/pkgs/development/libraries/gstreamer/bad/default.nix
+++ b/pkgs/development/libraries/gstreamer/bad/default.nix
@@ -107,13 +107,13 @@
stdenv.mkDerivation rec {
pname = "gst-plugins-bad";
- version = "1.22.6";
+ version = "1.22.8";
outputs = [ "out" "dev" ];
src = fetchurl {
url = "https://gstreamer.freedesktop.org/src/${pname}/${pname}-${version}.tar.xz";
- hash = "sha256-tAKc0pCKCJxV8dkCpWXQB0lclbFELYOEhdxH+xLfcTc=";
+ hash = "sha256-RYeD+CNgaJkePilu3Wccjt24vm+skzwcLhUDRihk6g8=";
};
patches = [
diff --git a/pkgs/development/libraries/gstreamer/base/default.nix b/pkgs/development/libraries/gstreamer/base/default.nix
index 782c6997cd91..b7f231164651 100644
--- a/pkgs/development/libraries/gstreamer/base/default.nix
+++ b/pkgs/development/libraries/gstreamer/base/default.nix
@@ -45,7 +45,7 @@
stdenv.mkDerivation (finalAttrs: {
pname = "gst-plugins-base";
- version = "1.22.6";
+ version = "1.22.8";
outputs = [ "out" "dev" ];
@@ -53,7 +53,7 @@ stdenv.mkDerivation (finalAttrs: {
inherit (finalAttrs) pname version;
in fetchurl {
url = "https://gstreamer.freedesktop.org/src/${pname}/${pname}-${version}.tar.xz";
- hash = "sha256-UPK00XwC7v5DC776jFzRNLG+eKU8D2DpURNtls9J/Us=";
+ hash = "sha256-62eS5cc8be+5FZw26m5LeKL4r2USZ4tL07AsjS1JKs8=";
};
strictDeps = true;
diff --git a/pkgs/development/libraries/gstreamer/core/default.nix b/pkgs/development/libraries/gstreamer/core/default.nix
index ecf36c5d9189..7e876093f54b 100644
--- a/pkgs/development/libraries/gstreamer/core/default.nix
+++ b/pkgs/development/libraries/gstreamer/core/default.nix
@@ -24,7 +24,7 @@
stdenv.mkDerivation (finalAttrs: {
pname = "gstreamer";
- version = "1.22.6";
+ version = "1.22.8";
outputs = [
"bin"
@@ -36,7 +36,7 @@ stdenv.mkDerivation (finalAttrs: {
inherit (finalAttrs) pname version;
in fetchurl {
url = "https://gstreamer.freedesktop.org/src/${pname}/${pname}-${version}.tar.xz";
- hash = "sha256-9QDmz93/VZCPk3cR/CaghA3iih6exJYhwLbxrb2PgY4=";
+ hash = "sha256-rU49sXcRObHbF7Gvp8BdsIOuAQC9TaJEtx8WLczkG/w=";
};
depsBuildBuild = [
diff --git a/pkgs/development/libraries/gstreamer/devtools/default.nix b/pkgs/development/libraries/gstreamer/devtools/default.nix
index f0c4a30ba0aa..dda4663e01cf 100644
--- a/pkgs/development/libraries/gstreamer/devtools/default.nix
+++ b/pkgs/development/libraries/gstreamer/devtools/default.nix
@@ -17,11 +17,11 @@
stdenv.mkDerivation rec {
pname = "gst-devtools";
- version = "1.22.6";
+ version = "1.22.8";
src = fetchurl {
url = "https://gstreamer.freedesktop.org/src/${pname}/${pname}-${version}.tar.xz";
- hash = "sha256-iShWDvrxYTfDAoXnGHCOXQurB3frTvgSfgJ04SDT2Gs=";
+ hash = "sha256-zWNAVvyxbQNbPfWVPsha6L1Wxo8pkgtyDvkgynHqdqc=";
};
outputs = [
diff --git a/pkgs/development/libraries/gstreamer/ges/default.nix b/pkgs/development/libraries/gstreamer/ges/default.nix
index 790ca93b5276..eec724a5bb88 100644
--- a/pkgs/development/libraries/gstreamer/ges/default.nix
+++ b/pkgs/development/libraries/gstreamer/ges/default.nix
@@ -18,7 +18,7 @@
stdenv.mkDerivation rec {
pname = "gst-editing-services";
- version = "1.22.6";
+ version = "1.22.8";
outputs = [
"out"
@@ -27,7 +27,7 @@ stdenv.mkDerivation rec {
src = fetchurl {
url = "https://gstreamer.freedesktop.org/src/${pname}/${pname}-${version}.tar.xz";
- hash = "sha256-dI1CNnLFl/h24TCAT7mEhI9bS4nv14pQbLF/dkZ5UwE=";
+ hash = "sha256-0dXnXhkOsL4/1JQJ5Bo6qOaM+03qpzU0iFVGdJVs0d8=";
};
nativeBuildInputs = [
diff --git a/pkgs/development/libraries/gstreamer/good/default.nix b/pkgs/development/libraries/gstreamer/good/default.nix
index 317707c45c0c..4308c08d9b81 100644
--- a/pkgs/development/libraries/gstreamer/good/default.nix
+++ b/pkgs/development/libraries/gstreamer/good/default.nix
@@ -53,13 +53,13 @@ assert raspiCameraSupport -> (stdenv.isLinux && stdenv.isAarch64);
stdenv.mkDerivation rec {
pname = "gst-plugins-good";
- version = "1.22.6";
+ version = "1.22.8";
outputs = [ "out" "dev" ];
src = fetchurl {
url = "https://gstreamer.freedesktop.org/src/${pname}/${pname}-${version}.tar.xz";
- hash = "sha256-s7B/4/HOf+k6qb5yF4ZgRFSPNcSneSKA7sfhCKMvmBc=";
+ hash = "sha256-4wW58H9SdDykgdoKTgx2w179YK2vGwaU6zuwIeITfjk=";
};
strictDeps = true;
diff --git a/pkgs/development/libraries/gstreamer/libav/default.nix b/pkgs/development/libraries/gstreamer/libav/default.nix
index 7dbd9b61cbc3..f3dfac82c799 100644
--- a/pkgs/development/libraries/gstreamer/libav/default.nix
+++ b/pkgs/development/libraries/gstreamer/libav/default.nix
@@ -18,11 +18,11 @@
stdenv.mkDerivation rec {
pname = "gst-libav";
- version = "1.22.6";
+ version = "1.22.8";
src = fetchurl {
url = "https://gstreamer.freedesktop.org/src/${pname}/${pname}-${version}.tar.xz";
- hash = "sha256-d4nmQIOIol8jy/lIz8XGIw1zW7zYt/N/SgHJ40ih46c=";
+ hash = "sha256-vjk0m8B6tM29ml/W6phIxgHHVguloFd61SALg71CSYE=";
};
outputs = [ "out" "dev" ];
diff --git a/pkgs/development/libraries/gstreamer/rtsp-server/default.nix b/pkgs/development/libraries/gstreamer/rtsp-server/default.nix
index a90480d84157..8d744193db08 100644
--- a/pkgs/development/libraries/gstreamer/rtsp-server/default.nix
+++ b/pkgs/development/libraries/gstreamer/rtsp-server/default.nix
@@ -15,11 +15,11 @@
stdenv.mkDerivation rec {
pname = "gst-rtsp-server";
- version = "1.22.6";
+ version = "1.22.8";
src = fetchurl {
url = "https://gstreamer.freedesktop.org/src/${pname}/${pname}-${version}.tar.xz";
- hash = "sha256-CuM6i1BEO2LxFYGpGB6Qa0HNOHey15nb6nKRLD7aS7M=";
+ hash = "sha256-cFF3BRwimXbxca3Nerl2Kua8xLt33DCKC9gKY9psM38=";
};
outputs = [
diff --git a/pkgs/development/libraries/gstreamer/ugly/default.nix b/pkgs/development/libraries/gstreamer/ugly/default.nix
index cf5f93707cca..cc78a6ec0f80 100644
--- a/pkgs/development/libraries/gstreamer/ugly/default.nix
+++ b/pkgs/development/libraries/gstreamer/ugly/default.nix
@@ -26,13 +26,13 @@
stdenv.mkDerivation rec {
pname = "gst-plugins-ugly";
- version = "1.22.6";
+ version = "1.22.8";
outputs = [ "out" "dev" ];
src = fetchurl {
url = "https://gstreamer.freedesktop.org/src/${pname}/${pname}-${version}.tar.xz";
- hash = "sha256-PjFFTJjLL39tLTVezrkzqJL6Dx3Am8NsmryTDY4pykg=";
+ hash = "sha256-B2HZa6UI4BwCcYgbJoKMK//X2K/VCHIhnwiPdVslLKc=";
};
nativeBuildInputs = [
diff --git a/pkgs/development/libraries/gstreamer/vaapi/default.nix b/pkgs/development/libraries/gstreamer/vaapi/default.nix
index 7b7da009de7f..1e44d41e5045 100644
--- a/pkgs/development/libraries/gstreamer/vaapi/default.nix
+++ b/pkgs/development/libraries/gstreamer/vaapi/default.nix
@@ -24,11 +24,11 @@
stdenv.mkDerivation rec {
pname = "gstreamer-vaapi";
- version = "1.22.6";
+ version = "1.22.8";
src = fetchurl {
url = "https://gstreamer.freedesktop.org/src/${pname}/${pname}-${version}.tar.xz";
- hash = "sha256-2bovwmvvmMeOmCxZn1hdRru2X+Ei2onC16tB9GilLHs=";
+ hash = "sha256-Epi6NHpwxCuIzev5G2Wf6gKxu3Jp6r+OKePAvVgniSg=";
};
outputs = [
diff --git a/pkgs/development/libraries/http-parser/build-shared.patch b/pkgs/development/libraries/http-parser/build-shared.patch
deleted file mode 100644
index 5922cdfb5848..000000000000
--- a/pkgs/development/libraries/http-parser/build-shared.patch
+++ /dev/null
@@ -1,30 +0,0 @@
-diff -Naur http-parser-2.1-orig/http_parser.gyp http-parser-2.1/http_parser.gyp
---- http-parser-2.1-orig/http_parser.gyp 2013-03-26 18:35:20.000000000 -0400
-+++ http-parser-2.1/http_parser.gyp 2013-05-23 16:47:49.280488341 -0400
-@@ -21,7 +21,7 @@
- },
- 'Release': {
- 'defines': [ 'NDEBUG' ],
-- 'cflags': [ '-Wall', '-Wextra', '-O3' ],
-+ 'cflags': [ '-Wall', '-Wextra', '-O3', '-fPIC' ],
- 'msvs_settings': {
- 'VCCLCompilerTool': {
- 'RuntimeLibrary': 0, # static release
-@@ -50,7 +50,7 @@
- 'targets': [
- {
- 'target_name': 'http_parser',
-- 'type': 'static_library',
-+ 'type': 'shared_library',
- 'include_dirs': [ '.' ],
- 'direct_dependent_settings': {
- 'defines': [ 'HTTP_PARSER_STRICT=0' ],
-@@ -73,7 +73,7 @@
-
- {
- 'target_name': 'http_parser_strict',
-- 'type': 'static_library',
-+ 'type': 'shared_library',
- 'include_dirs': [ '.' ],
- 'direct_dependent_settings': {
- 'defines': [ 'HTTP_PARSER_STRICT=1' ],
diff --git a/pkgs/development/libraries/http-parser/default.nix b/pkgs/development/libraries/http-parser/default.nix
index dd1ea3559d7a..8963e5c05592 100644
--- a/pkgs/development/libraries/http-parser/default.nix
+++ b/pkgs/development/libraries/http-parser/default.nix
@@ -1,4 +1,7 @@
-{ lib, stdenv, fetchFromGitHub, fetchpatch }:
+{ lib, stdenv, fetchFromGitHub, fetchpatch
+, enableShared ? !stdenv.hostPlatform.isStatic
+, enableStatic ? stdenv.hostPlatform.isStatic
+}:
stdenv.mkDerivation rec {
pname = "http-parser";
@@ -12,8 +15,9 @@ stdenv.mkDerivation rec {
};
env.NIX_CFLAGS_COMPILE = "-Wno-error";
+
patches = [
- ./build-shared.patch
+ ./enable-static-shared.patch
] ++ lib.optionals stdenv.isAarch32 [
# https://github.com/nodejs/http-parser/pull/510
(fetchpatch {
@@ -21,16 +25,41 @@ stdenv.mkDerivation rec {
sha256 = "sha256-rZZMJeow3V1fTnjadRaRa+xTq3pdhZn/eJ4xjxEDoU4=";
})
];
- makeFlags = [ "DESTDIR=" "PREFIX=$(out)" ];
- buildFlags = [ "library" ];
+
+ makeFlags = [
+ "DESTDIR="
+ "PREFIX=$(out)"
+ "BINEXT=${stdenv.hostPlatform.extensions.executable}"
+ "Platform=${lib.toLower stdenv.hostPlatform.uname.system}"
+ "AEXT=${lib.strings.removePrefix "." stdenv.hostPlatform.extensions.staticLibrary}"
+ "ENABLE_SHARED=${if enableShared then "1" else "0"}"
+ "ENABLE_STATIC=${if enableStatic then "1" else "0"}"
+ ] ++ lib.optionals enableShared [
+ "SOEXT=${lib.strings.removePrefix "." stdenv.hostPlatform.extensions.sharedLibrary}"
+ ] ++ lib.optionals enableStatic [
+ "AEXT=${lib.strings.removePrefix "." stdenv.hostPlatform.extensions.staticLibrary}"
+ ] ++ lib.optionals (enableShared && stdenv.hostPlatform.isWindows) [
+ "SONAME=$(SOLIBNAME).$(SOMAJOR).$(SOMINOR).$(SOEXT)"
+ "LIBNAME=$(SOLIBNAME).$(SOMAJOR).$(SOMINOR).$(SOREV).$(SOEXT)"
+ "LDFLAGS=-Wl,--out-implib=$(LIBNAME).a"
+ ];
+
+ buildFlags = lib.optional enableShared "library"
+ ++ lib.optional enableStatic "package";
+
doCheck = true;
checkTarget = "test";
+ postInstall = lib.optionalString stdenv.hostPlatform.isWindows ''
+ install -D *.dll.a $out/lib
+ ln -sf libhttp_parser.${version}.dll.a $out/lib/libhttp_parser.dll.a
+ '';
+
meta = with lib; {
description = "An HTTP message parser written in C";
homepage = "https://github.com/nodejs/http-parser";
maintainers = with maintainers; [ matthewbauer ];
license = licenses.mit;
- platforms = platforms.unix;
+ platforms = platforms.all;
};
}
diff --git a/pkgs/development/libraries/http-parser/enable-static-shared.patch b/pkgs/development/libraries/http-parser/enable-static-shared.patch
new file mode 100644
index 000000000000..42f11d4c817f
--- /dev/null
+++ b/pkgs/development/libraries/http-parser/enable-static-shared.patch
@@ -0,0 +1,93 @@
+commit abcb3cca9452779e91380b7636f32745166af3de
+Author: John Ericson <John.Ericson@Obsidian.Systems>
+Date: Wed Nov 29 23:55:38 2023 -0500
+
+ Make build system: enable/disable shared/static support
+
+ This allows building this package in static-lib-only distros.
+
+diff --git a/Makefile b/Makefile
+index 5d21221..cbc7914 100644
+--- a/Makefile
++++ b/Makefile
+@@ -18,6 +18,9 @@
+ # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+ # IN THE SOFTWARE.
+
++ENABLE_SHARED ?= 1
++ENABLE_STATIC ?=
++
+ PLATFORM ?= $(shell sh -c 'uname -s | tr "[A-Z]" "[a-z]"')
+ HELPER ?=
+ BINEXT ?=
+@@ -25,6 +28,8 @@ SOLIBNAME = libhttp_parser
+ SOMAJOR = 2
+ SOMINOR = 9
+ SOREV = 4
++AEXT = a
++STATICLIBNAME = $(SOLIBNAME).$(AEXT)
+ ifeq (darwin,$(PLATFORM))
+ SOEXT ?= dylib
+ SONAME ?= $(SOLIBNAME).$(SOMAJOR).$(SOMINOR).$(SOEXT)
+@@ -109,11 +114,17 @@ test-valgrind: test_g
+ libhttp_parser.o: http_parser.c http_parser.h Makefile
+ $(CC) $(CPPFLAGS_FAST) $(CFLAGS_LIB) -c http_parser.c -o libhttp_parser.o
+
+-library: libhttp_parser.o
+- $(CC) $(LDFLAGS_LIB) -o $(LIBNAME) $<
++.PHONY: library
++library: $(LIBNAME)
++
++$(LIBNAME): libhttp_parser.o
++ $(CC) $(LDFLAGS_LIB) -o $@ $<
+
+-package: http_parser.o
+- $(AR) rcs libhttp_parser.a http_parser.o
++.PHONY: package
++package: $(STATICLIBNAME)
++
++$(STATICLIBNAME): http_parser.o
++ $(AR) rcs $@ $<
+
+ url_parser: http_parser.o contrib/url_parser.c
+ $(CC) $(CPPFLAGS_FAST) $(CFLAGS_FAST) $^ -o $@
+@@ -130,12 +141,30 @@ parsertrace_g: http_parser_g.o contrib/parsertrace.c
+ tags: http_parser.c http_parser.h test.c
+ ctags $^
+
+-install: library
++.PHONY: install-headers
++install-headers:
+ $(INSTALL) -D http_parser.h $(DESTDIR)$(INCLUDEDIR)/http_parser.h
++
++.PHONY: install-library
++install-library: library
+ $(INSTALL) -D $(LIBNAME) $(DESTDIR)$(LIBDIR)/$(LIBNAME)
+ ln -sf $(LIBNAME) $(DESTDIR)$(LIBDIR)/$(SONAME)
+ ln -sf $(LIBNAME) $(DESTDIR)$(LIBDIR)/$(SOLIBNAME).$(SOEXT)
+
++.PHONY: install-package
++install-package: package
++ $(INSTALL) -D $(STATICLIBNAME) $(DESTDIR)$(LIBDIR)/$(STATICLIBNAME)
++
++.PHONY: install
++install: install-headers
++ifeq ($(ENABLE_SHARED),1)
++install: install-library
++endif
++ifeq ($(ENABLE_STATIC),1)
++install: install-package
++endif
++
++.PHONY: install-strip
+ install-strip: library
+ $(INSTALL) -D http_parser.h $(DESTDIR)$(INCLUDEDIR)/http_parser.h
+ $(INSTALL) -D -s $(LIBNAME) $(DESTDIR)$(LIBDIR)/$(LIBNAME)
+@@ -147,6 +176,7 @@ uninstall:
+ rm $(DESTDIR)$(LIBDIR)/$(SOLIBNAME).$(SOEXT)
+ rm $(DESTDIR)$(LIBDIR)/$(SONAME)
+ rm $(DESTDIR)$(LIBDIR)/$(LIBNAME)
++ rm $(DESTDIR)$(LIBDIR)/$(STATICLIBNAME)
+
+ clean:
+ rm -f *.o *.a tags test test_fast test_g \
diff --git a/pkgs/development/libraries/libde265/default.nix b/pkgs/development/libraries/libde265/default.nix
index 9c1a10f9f5b1..de366da98b96 100644
--- a/pkgs/development/libraries/libde265/default.nix
+++ b/pkgs/development/libraries/libde265/default.nix
@@ -14,14 +14,14 @@
}:
stdenv.mkDerivation (finalAttrs: rec {
- version = "1.0.12";
+ version = "1.0.14";
pname = "libde265";
src = fetchFromGitHub {
owner = "strukturag";
repo = "libde265";
rev = "refs/tags/v${version}";
- hash = "sha256-pl1r3n4T4FcJ4My/wCE54R2fmTdrlJOvgb2U0MZf1BI=";
+ hash = "sha256-aZRtF4wYWxi/6ORNu7yVxFFdkvJTvBwPinL5lC0Mlqg=";
};
nativeBuildInputs = [ autoreconfHook pkg-config ];
diff --git a/pkgs/development/libraries/libgit2/default.nix b/pkgs/development/libraries/libgit2/default.nix
index 3d7b6ff9e012..50832a96b627 100644
--- a/pkgs/development/libraries/libgit2/default.nix
+++ b/pkgs/development/libraries/libgit2/default.nix
@@ -33,6 +33,10 @@ stdenv.mkDerivation rec {
"-DUSE_HTTP_PARSER=system"
"-DUSE_SSH=ON"
"-DBUILD_SHARED_LIBS=${if staticBuild then "OFF" else "ON"}"
+ ] ++ lib.optionals stdenv.hostPlatform.isWindows [
+ "-DDLLTOOL=${stdenv.cc.bintools.targetPrefix}dlltool"
+ # For ws2_32, refered to by a `*.pc` file
+ "-DCMAKE_LIBRARY_PATH=${stdenv.cc.libc}/lib"
];
nativeBuildInputs = [ cmake python3 pkg-config ];
diff --git a/pkgs/development/libraries/libsass/default.nix b/pkgs/development/libraries/libsass/default.nix
index f4293952b9f4..e8dc8d2f86c6 100644
--- a/pkgs/development/libraries/libsass/default.nix
+++ b/pkgs/development/libraries/libsass/default.nix
@@ -1,4 +1,14 @@
-{ lib, stdenv, fetchFromGitHub, autoreconfHook }:
+{ lib
+, stdenv
+, fetchFromGitHub
+, fetchpatch
+, autoreconfHook
+
+# for passthru.tests
+, gtk3
+, gtk4
+, sassc
+}:
stdenv.mkDerivation rec {
pname = "libsass";
@@ -16,12 +26,24 @@ stdenv.mkDerivation rec {
'';
};
+ patches = [
+ (fetchpatch {
+ name = "CVE-2022-26592.CVE-2022-43357.CVE-2022-43358.patch";
+ url = "https://github.com/sass/libsass/pull/3184/commits/5bb0ea0c4b2ebebe542933f788ffacba459a717a.patch";
+ hash = "sha256-DR6pKFWL70uJt//drzq34LeTzT8rUqgUTpgfUHpD2s4=";
+ })
+ ];
+
preConfigure = ''
export LIBSASS_VERSION=${version}
'';
nativeBuildInputs = [ autoreconfHook ];
+ passthru.tests = {
+ inherit gtk3 gtk4 sassc;
+ };
+
meta = with lib; {
description = "A C/C++ implementation of a Sass compiler";
homepage = "https://github.com/sass/libsass";
diff --git a/pkgs/development/libraries/libssh/default.nix b/pkgs/development/libraries/libssh/default.nix
index 417405f42652..3d770402f32d 100644
--- a/pkgs/development/libraries/libssh/default.nix
+++ b/pkgs/development/libraries/libssh/default.nix
@@ -15,11 +15,11 @@
stdenv.mkDerivation rec {
pname = "libssh";
- version = "0.10.5";
+ version = "0.10.6";
src = fetchurl {
- url = "https://www.libssh.org/files/${lib.versions.majorMinor version}/${pname}-${version}.tar.xz";
- sha256 = "sha256-tg4v9/Nnue7itWNNOmMwPd/t4OahjfyojESodw5+QjQ=";
+ url = "https://www.libssh.org/files/${lib.versions.majorMinor version}/libssh-${version}.tar.xz";
+ hash = "sha256-GGHUmPW28XQbarxz5ghHhJHtz5ydS2Yw7vbnRZbencE=";
};
postPatch = ''
diff --git a/pkgs/development/libraries/libssh2/CVE-2023-48795.patch b/pkgs/development/libraries/libssh2/CVE-2023-48795.patch
new file mode 100644
index 000000000000..c89e4a137b72
--- /dev/null
+++ b/pkgs/development/libraries/libssh2/CVE-2023-48795.patch
@@ -0,0 +1,459 @@
+From d34d9258b8420b19ec3f97b4cc5bf7aa7d98e35a Mon Sep 17 00:00:00 2001
+From: Michael Buckley <michael@buckleyisms.com>
+Date: Thu, 30 Nov 2023 15:08:02 -0800
+Subject: [PATCH] src: add 'strict KEX' to fix CVE-2023-48795 "Terrapin Attack"
+
+Refs:
+https://terrapin-attack.com/
+https://seclists.org/oss-sec/2023/q4/292
+https://osv.dev/list?ecosystem=&q=CVE-2023-48795
+https://github.com/advisories/GHSA-45x7-px36-x8w8
+https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2023-48795
+
+Fixes #1290
+Closes #1291
+---
+ src/kex.c | 63 +++++++++++++++++++++++------------
+ src/libssh2_priv.h | 18 +++++++---
+ src/packet.c | 83 +++++++++++++++++++++++++++++++++++++++++++---
+ src/packet.h | 2 +-
+ src/session.c | 3 ++
+ src/transport.c | 12 ++++++-
+ 6 files changed, 149 insertions(+), 32 deletions(-)
+
+diff --git a/src/kex.c b/src/kex.c
+index 8e7b7f0af3..a7b301e157 100644
+--- a/src/kex.c
++++ b/src/kex.c
+@@ -3032,6 +3032,13 @@ kex_method_extension_negotiation = {
+ 0,
+ };
+
++static const LIBSSH2_KEX_METHOD
++kex_method_strict_client_extension = {
++ "kex-strict-c-v00@openssh.com",
++ NULL,
++ 0,
++};
++
+ static const LIBSSH2_KEX_METHOD *libssh2_kex_methods[] = {
+ #if LIBSSH2_ED25519
+ &kex_method_ssh_curve25519_sha256,
+@@ -3050,6 +3057,7 @@ static const LIBSSH2_KEX_METHOD *libssh2_kex_methods[] = {
+ &kex_method_diffie_helman_group1_sha1,
+ &kex_method_diffie_helman_group_exchange_sha1,
+ &kex_method_extension_negotiation,
++ &kex_method_strict_client_extension,
+ NULL
+ };
+
+@@ -3302,13 +3310,13 @@ static int kexinit(LIBSSH2_SESSION * session)
+ return 0;
+ }
+
+-/* kex_agree_instr
++/* _libssh2_kex_agree_instr
+ * Kex specific variant of strstr()
+ * Needle must be preceded by BOL or ',', and followed by ',' or EOL
+ */
+-static unsigned char *
+-kex_agree_instr(unsigned char *haystack, size_t haystack_len,
+- const unsigned char *needle, size_t needle_len)
++unsigned char *
++_libssh2_kex_agree_instr(unsigned char *haystack, size_t haystack_len,
++ const unsigned char *needle, size_t needle_len)
+ {
+ unsigned char *s;
+ unsigned char *end_haystack;
+@@ -3393,7 +3401,7 @@ static int kex_agree_hostkey(LIBSSH2_SESSION * session,
+ while(s && *s) {
+ unsigned char *p = (unsigned char *) strchr((char *) s, ',');
+ size_t method_len = (p ? (size_t)(p - s) : strlen((char *) s));
+- if(kex_agree_instr(hostkey, hostkey_len, s, method_len)) {
++ if(_libssh2_kex_agree_instr(hostkey, hostkey_len, s, method_len)) {
+ const LIBSSH2_HOSTKEY_METHOD *method =
+ (const LIBSSH2_HOSTKEY_METHOD *)
+ kex_get_method_by_name((char *) s, method_len,
+@@ -3427,9 +3435,9 @@ static int kex_agree_hostkey(LIBSSH2_SESSION * session,
+ }
+
+ while(hostkeyp && (*hostkeyp) && (*hostkeyp)->name) {
+- s = kex_agree_instr(hostkey, hostkey_len,
+- (unsigned char *) (*hostkeyp)->name,
+- strlen((*hostkeyp)->name));
++ s = _libssh2_kex_agree_instr(hostkey, hostkey_len,
++ (unsigned char *) (*hostkeyp)->name,
++ strlen((*hostkeyp)->name));
+ if(s) {
+ /* So far so good, but does it suit our purposes? (Encrypting vs
+ Signing) */
+@@ -3463,6 +3471,12 @@ static int kex_agree_kex_hostkey(LIBSSH2_SESSION * session, unsigned char *kex,
+ {
+ const LIBSSH2_KEX_METHOD **kexp = libssh2_kex_methods;
+ unsigned char *s;
++ const unsigned char *strict =
++ (unsigned char *)"kex-strict-s-v00@openssh.com";
++
++ if(_libssh2_kex_agree_instr(kex, kex_len, strict, 28)) {
++ session->kex_strict = 1;
++ }
+
+ if(session->kex_prefs) {
+ s = (unsigned char *) session->kex_prefs;
+@@ -3470,7 +3484,7 @@ static int kex_agree_kex_hostkey(LIBSSH2_SESSION * session, unsigned char *kex,
+ while(s && *s) {
+ unsigned char *q, *p = (unsigned char *) strchr((char *) s, ',');
+ size_t method_len = (p ? (size_t)(p - s) : strlen((char *) s));
+- q = kex_agree_instr(kex, kex_len, s, method_len);
++ q = _libssh2_kex_agree_instr(kex, kex_len, s, method_len);
+ if(q) {
+ const LIBSSH2_KEX_METHOD *method = (const LIBSSH2_KEX_METHOD *)
+ kex_get_method_by_name((char *) s, method_len,
+@@ -3504,9 +3518,9 @@ static int kex_agree_kex_hostkey(LIBSSH2_SESSION * session, unsigned char *kex,
+ }
+
+ while(*kexp && (*kexp)->name) {
+- s = kex_agree_instr(kex, kex_len,
+- (unsigned char *) (*kexp)->name,
+- strlen((*kexp)->name));
++ s = _libssh2_kex_agree_instr(kex, kex_len,
++ (unsigned char *) (*kexp)->name,
++ strlen((*kexp)->name));
+ if(s) {
+ /* We've agreed on a key exchange method,
+ * Can we agree on a hostkey that works with this kex?
+@@ -3550,7 +3564,7 @@ static int kex_agree_crypt(LIBSSH2_SESSION * session,
+ unsigned char *p = (unsigned char *) strchr((char *) s, ',');
+ size_t method_len = (p ? (size_t)(p - s) : strlen((char *) s));
+
+- if(kex_agree_instr(crypt, crypt_len, s, method_len)) {
++ if(_libssh2_kex_agree_instr(crypt, crypt_len, s, method_len)) {
+ const LIBSSH2_CRYPT_METHOD *method =
+ (const LIBSSH2_CRYPT_METHOD *)
+ kex_get_method_by_name((char *) s, method_len,
+@@ -3572,9 +3586,9 @@ static int kex_agree_crypt(LIBSSH2_SESSION * session,
+ }
+
+ while(*cryptp && (*cryptp)->name) {
+- s = kex_agree_instr(crypt, crypt_len,
+- (unsigned char *) (*cryptp)->name,
+- strlen((*cryptp)->name));
++ s = _libssh2_kex_agree_instr(crypt, crypt_len,
++ (unsigned char *) (*cryptp)->name,
++ strlen((*cryptp)->name));
+ if(s) {
+ endpoint->crypt = *cryptp;
+ return 0;
+@@ -3614,7 +3628,7 @@ static int kex_agree_mac(LIBSSH2_SESSION * session,
+ unsigned char *p = (unsigned char *) strchr((char *) s, ',');
+ size_t method_len = (p ? (size_t)(p - s) : strlen((char *) s));
+
+- if(kex_agree_instr(mac, mac_len, s, method_len)) {
++ if(_libssh2_kex_agree_instr(mac, mac_len, s, method_len)) {
+ const LIBSSH2_MAC_METHOD *method = (const LIBSSH2_MAC_METHOD *)
+ kex_get_method_by_name((char *) s, method_len,
+ (const LIBSSH2_COMMON_METHOD **)
+@@ -3635,8 +3649,9 @@ static int kex_agree_mac(LIBSSH2_SESSION * session,
+ }
+
+ while(*macp && (*macp)->name) {
+- s = kex_agree_instr(mac, mac_len, (unsigned char *) (*macp)->name,
+- strlen((*macp)->name));
++ s = _libssh2_kex_agree_instr(mac, mac_len,
++ (unsigned char *) (*macp)->name,
++ strlen((*macp)->name));
+ if(s) {
+ endpoint->mac = *macp;
+ return 0;
+@@ -3667,7 +3682,7 @@ static int kex_agree_comp(LIBSSH2_SESSION *session,
+ unsigned char *p = (unsigned char *) strchr((char *) s, ',');
+ size_t method_len = (p ? (size_t)(p - s) : strlen((char *) s));
+
+- if(kex_agree_instr(comp, comp_len, s, method_len)) {
++ if(_libssh2_kex_agree_instr(comp, comp_len, s, method_len)) {
+ const LIBSSH2_COMP_METHOD *method =
+ (const LIBSSH2_COMP_METHOD *)
+ kex_get_method_by_name((char *) s, method_len,
+@@ -3689,8 +3704,9 @@ static int kex_agree_comp(LIBSSH2_SESSION *session,
+ }
+
+ while(*compp && (*compp)->name) {
+- s = kex_agree_instr(comp, comp_len, (unsigned char *) (*compp)->name,
+- strlen((*compp)->name));
++ s = _libssh2_kex_agree_instr(comp, comp_len,
++ (unsigned char *) (*compp)->name,
++ strlen((*compp)->name));
+ if(s) {
+ endpoint->comp = *compp;
+ return 0;
+@@ -3871,6 +3887,7 @@ _libssh2_kex_exchange(LIBSSH2_SESSION * session, int reexchange,
+ session->local.kexinit = key_state->oldlocal;
+ session->local.kexinit_len = key_state->oldlocal_len;
+ key_state->state = libssh2_NB_state_idle;
++ session->state &= ~LIBSSH2_STATE_INITIAL_KEX;
+ session->state &= ~LIBSSH2_STATE_KEX_ACTIVE;
+ session->state &= ~LIBSSH2_STATE_EXCHANGING_KEYS;
+ return -1;
+@@ -3896,6 +3913,7 @@ _libssh2_kex_exchange(LIBSSH2_SESSION * session, int reexchange,
+ session->local.kexinit = key_state->oldlocal;
+ session->local.kexinit_len = key_state->oldlocal_len;
+ key_state->state = libssh2_NB_state_idle;
++ session->state &= ~LIBSSH2_STATE_INITIAL_KEX;
+ session->state &= ~LIBSSH2_STATE_KEX_ACTIVE;
+ session->state &= ~LIBSSH2_STATE_EXCHANGING_KEYS;
+ return -1;
+@@ -3944,6 +3962,7 @@ _libssh2_kex_exchange(LIBSSH2_SESSION * session, int reexchange,
+ session->remote.kexinit = NULL;
+ }
+
++ session->state &= ~LIBSSH2_STATE_INITIAL_KEX;
+ session->state &= ~LIBSSH2_STATE_KEX_ACTIVE;
+ session->state &= ~LIBSSH2_STATE_EXCHANGING_KEYS;
+
+diff --git a/src/libssh2_priv.h b/src/libssh2_priv.h
+index 7660366954..18d9ab2130 100644
+--- a/src/libssh2_priv.h
++++ b/src/libssh2_priv.h
+@@ -736,6 +736,9 @@ struct _LIBSSH2_SESSION
+ /* key signing algorithm preferences -- NULL yields server order */
+ char *sign_algo_prefs;
+
++ /* Whether to use the OpenSSH Strict KEX extension */
++ int kex_strict;
++
+ /* (remote as source of data -- packet_read ) */
+ libssh2_endpoint_data remote;
+
+@@ -908,6 +911,7 @@ struct _LIBSSH2_SESSION
+ int fullpacket_macstate;
+ size_t fullpacket_payload_len;
+ int fullpacket_packet_type;
++ uint32_t fullpacket_required_type;
+
+ /* State variables used in libssh2_sftp_init() */
+ libssh2_nonblocking_states sftpInit_state;
+@@ -948,10 +952,11 @@ struct _LIBSSH2_SESSION
+ };
+
+ /* session.state bits */
+-#define LIBSSH2_STATE_EXCHANGING_KEYS 0x00000001
+-#define LIBSSH2_STATE_NEWKEYS 0x00000002
+-#define LIBSSH2_STATE_AUTHENTICATED 0x00000004
+-#define LIBSSH2_STATE_KEX_ACTIVE 0x00000008
++#define LIBSSH2_STATE_INITIAL_KEX 0x00000001
++#define LIBSSH2_STATE_EXCHANGING_KEYS 0x00000002
++#define LIBSSH2_STATE_NEWKEYS 0x00000004
++#define LIBSSH2_STATE_AUTHENTICATED 0x00000008
++#define LIBSSH2_STATE_KEX_ACTIVE 0x00000010
+
+ /* session.flag helpers */
+ #ifdef MSG_NOSIGNAL
+@@ -1182,6 +1187,11 @@ ssize_t _libssh2_send(libssh2_socket_t socket, const void *buffer,
+ int _libssh2_kex_exchange(LIBSSH2_SESSION * session, int reexchange,
+ key_exchange_state_t * state);
+
++unsigned char *_libssh2_kex_agree_instr(unsigned char *haystack,
++ size_t haystack_len,
++ const unsigned char *needle,
++ size_t needle_len);
++
+ /* Let crypt.c/hostkey.c expose their method structs */
+ const LIBSSH2_CRYPT_METHOD **libssh2_crypt_methods(void);
+ const LIBSSH2_HOSTKEY_METHOD **libssh2_hostkey_methods(void);
+diff --git a/src/packet.c b/src/packet.c
+index eccb8c56a8..6da14e9fa1 100644
+--- a/src/packet.c
++++ b/src/packet.c
+@@ -624,14 +624,13 @@ packet_authagent_open(LIBSSH2_SESSION * session,
+ * layer when it has received a packet.
+ *
+ * The input pointer 'data' is pointing to allocated data that this function
+- * is asked to deal with so on failure OR success, it must be freed fine.
+- * The only exception is when the return code is LIBSSH2_ERROR_EAGAIN.
++ * will be freed unless return the code is LIBSSH2_ERROR_EAGAIN.
+ *
+ * This function will always be called with 'datalen' greater than zero.
+ */
+ int
+ _libssh2_packet_add(LIBSSH2_SESSION * session, unsigned char *data,
+- size_t datalen, int macstate)
++ size_t datalen, int macstate, uint32_t seq)
+ {
+ int rc = 0;
+ unsigned char *message = NULL;
+@@ -676,6 +675,70 @@ _libssh2_packet_add(LIBSSH2_SESSION * session, unsigned char *data,
+ break;
+ }
+
++ if(session->state & LIBSSH2_STATE_INITIAL_KEX) {
++ if(msg == SSH_MSG_KEXINIT) {
++ if(!session->kex_strict) {
++ if(datalen < 17) {
++ LIBSSH2_FREE(session, data);
++ session->packAdd_state = libssh2_NB_state_idle;
++ return _libssh2_error(session,
++ LIBSSH2_ERROR_BUFFER_TOO_SMALL,
++ "Data too short extracting kex");
++ }
++ else {
++ const unsigned char *strict =
++ (unsigned char *)"kex-strict-s-v00@openssh.com";
++ struct string_buf buf;
++ unsigned char *algs = NULL;
++ size_t algs_len = 0;
++
++ buf.data = (unsigned char *)data;
++ buf.dataptr = buf.data;
++ buf.len = datalen;
++ buf.dataptr += 17; /* advance past type and cookie */
++
++ if(_libssh2_get_string(&buf, &algs, &algs_len)) {
++ LIBSSH2_FREE(session, data);
++ session->packAdd_state = libssh2_NB_state_idle;
++ return _libssh2_error(session,
++ LIBSSH2_ERROR_BUFFER_TOO_SMALL,
++ "Algs too short");
++ }
++
++ if(algs_len == 0 ||
++ _libssh2_kex_agree_instr(algs, algs_len, strict, 28)) {
++ session->kex_strict = 1;
++ }
++ }
++ }
++
++ if(session->kex_strict && seq) {
++ LIBSSH2_FREE(session, data);
++ session->socket_state = LIBSSH2_SOCKET_DISCONNECTED;
++ session->packAdd_state = libssh2_NB_state_idle;
++ libssh2_session_disconnect(session, "strict KEX violation: "
++ "KEXINIT was not the first packet");
++
++ return _libssh2_error(session, LIBSSH2_ERROR_SOCKET_DISCONNECT,
++ "strict KEX violation: "
++ "KEXINIT was not the first packet");
++ }
++ }
++
++ if(session->kex_strict && session->fullpacket_required_type &&
++ session->fullpacket_required_type != msg) {
++ LIBSSH2_FREE(session, data);
++ session->socket_state = LIBSSH2_SOCKET_DISCONNECTED;
++ session->packAdd_state = libssh2_NB_state_idle;
++ libssh2_session_disconnect(session, "strict KEX violation: "
++ "unexpected packet type");
++
++ return _libssh2_error(session, LIBSSH2_ERROR_SOCKET_DISCONNECT,
++ "strict KEX violation: "
++ "unexpected packet type");
++ }
++ }
++
+ if(session->packAdd_state == libssh2_NB_state_allocated) {
+ /* A couple exceptions to the packet adding rule: */
+ switch(msg) {
+@@ -1364,6 +1427,15 @@ _libssh2_packet_ask(LIBSSH2_SESSION * session, unsigned char packet_type,
+
+ return 0;
+ }
++ else if(session->kex_strict &&
++ (session->state & LIBSSH2_STATE_INITIAL_KEX)) {
++ libssh2_session_disconnect(session, "strict KEX violation: "
++ "unexpected packet type");
++
++ return _libssh2_error(session, LIBSSH2_ERROR_SOCKET_DISCONNECT,
++ "strict KEX violation: "
++ "unexpected packet type");
++ }
+ packet = _libssh2_list_next(&packet->node);
+ }
+ return -1;
+@@ -1425,7 +1497,10 @@ _libssh2_packet_require(LIBSSH2_SESSION * session, unsigned char packet_type,
+ }
+
+ while(session->socket_state == LIBSSH2_SOCKET_CONNECTED) {
+- int ret = _libssh2_transport_read(session);
++ int ret;
++ session->fullpacket_required_type = packet_type;
++ ret = _libssh2_transport_read(session);
++ session->fullpacket_required_type = 0;
+ if(ret == LIBSSH2_ERROR_EAGAIN)
+ return ret;
+ else if(ret < 0) {
+diff --git a/src/packet.h b/src/packet.h
+index 1d90b8af12..955351e5f6 100644
+--- a/src/packet.h
++++ b/src/packet.h
+@@ -72,6 +72,6 @@ int _libssh2_packet_burn(LIBSSH2_SESSION * session,
+ int _libssh2_packet_write(LIBSSH2_SESSION * session, unsigned char *data,
+ unsigned long data_len);
+ int _libssh2_packet_add(LIBSSH2_SESSION * session, unsigned char *data,
+- size_t datalen, int macstate);
++ size_t datalen, int macstate, uint32_t seq);
+
+ #endif /* LIBSSH2_PACKET_H */
+diff --git a/src/session.c b/src/session.c
+index 35e7929fe7..9d89ade8ec 100644
+--- a/src/session.c
++++ b/src/session.c
+@@ -469,6 +469,8 @@ libssh2_session_init_ex(LIBSSH2_ALLOC_FUNC((*my_alloc)),
+ session->abstract = abstract;
+ session->api_timeout = 0; /* timeout-free API by default */
+ session->api_block_mode = 1; /* blocking API by default */
++ session->state = LIBSSH2_STATE_INITIAL_KEX;
++ session->fullpacket_required_type = 0;
+ session->packet_read_timeout = LIBSSH2_DEFAULT_READ_TIMEOUT;
+ session->flag.quote_paths = 1; /* default behavior is to quote paths
+ for the scp subsystem */
+@@ -1223,6 +1225,7 @@ libssh2_session_disconnect_ex(LIBSSH2_SESSION *session, int reason,
+ const char *desc, const char *lang)
+ {
+ int rc;
++ session->state &= ~LIBSSH2_STATE_INITIAL_KEX;
+ session->state &= ~LIBSSH2_STATE_EXCHANGING_KEYS;
+ BLOCK_ADJUST(rc, session,
+ session_disconnect(session, reason, desc, lang));
+diff --git a/src/transport.c b/src/transport.c
+index 21be9d2b80..a8bb588a4b 100644
+--- a/src/transport.c
++++ b/src/transport.c
+@@ -186,6 +186,7 @@ fullpacket(LIBSSH2_SESSION * session, int encrypted /* 1 or 0 */ )
+ struct transportpacket *p = &session->packet;
+ int rc;
+ int compressed;
++ uint32_t seq = session->remote.seqno;
+
+ if(session->fullpacket_state == libssh2_NB_state_idle) {
+ session->fullpacket_macstate = LIBSSH2_MAC_CONFIRMED;
+@@ -317,7 +318,7 @@ fullpacket(LIBSSH2_SESSION * session, int encrypted /* 1 or 0 */ )
+ if(session->fullpacket_state == libssh2_NB_state_created) {
+ rc = _libssh2_packet_add(session, p->payload,
+ session->fullpacket_payload_len,
+- session->fullpacket_macstate);
++ session->fullpacket_macstate, seq);
+ if(rc == LIBSSH2_ERROR_EAGAIN)
+ return rc;
+ if(rc) {
+@@ -328,6 +329,11 @@ fullpacket(LIBSSH2_SESSION * session, int encrypted /* 1 or 0 */ )
+
+ session->fullpacket_state = libssh2_NB_state_idle;
+
++ if(session->kex_strict &&
++ session->fullpacket_packet_type == SSH_MSG_NEWKEYS) {
++ session->remote.seqno = 0;
++ }
++
+ return session->fullpacket_packet_type;
+ }
+
+@@ -1093,6 +1099,10 @@ int _libssh2_transport_send(LIBSSH2_SESSION *session,
+
+ session->local.seqno++;
+
++ if(session->kex_strict && data[0] == SSH_MSG_NEWKEYS) {
++ session->local.seqno = 0;
++ }
++
+ ret = LIBSSH2_SEND(session, p->outbuf, total_length,
+ LIBSSH2_SOCKET_SEND_FLAGS(session));
+ if(ret < 0)
diff --git a/pkgs/development/libraries/libssh2/default.nix b/pkgs/development/libraries/libssh2/default.nix
index 091885a1f084..3afef15d2109 100644
--- a/pkgs/development/libraries/libssh2/default.nix
+++ b/pkgs/development/libraries/libssh2/default.nix
@@ -9,6 +9,13 @@ stdenv.mkDerivation rec {
sha256 = "sha256-NzYWHkHiaTMk3rOMJs/cPv5iCdY0ukJY2xzs/2pa1GE=";
};
+ # FIXME: temporarily not applied on *-darwin to avoid rebuild of stdenv
+ patches = lib.optionals (!stdenv.isDarwin) [
+ # fetchpatch cannot be used due to infinite recursion
+ # https://github.com/libssh2/libssh2/commit/d34d9258b8420b19ec3f97b4cc5bf7aa7d98e35a
+ ./CVE-2023-48795.patch
+ ];
+
outputs = [ "out" "dev" "devdoc" ];
propagatedBuildInputs = [ openssl ]; # see Libs: in libssh2.pc
diff --git a/pkgs/development/libraries/tracker/default.nix b/pkgs/development/libraries/tracker/default.nix
index 4ba70506b718..1df7553e6c69 100644
--- a/pkgs/development/libraries/tracker/default.nix
+++ b/pkgs/development/libraries/tracker/default.nix
@@ -1,6 +1,7 @@
{ stdenv
, lib
, fetchurl
+, fetchpatch
, gettext
, meson
, ninja
@@ -40,6 +41,16 @@ stdenv.mkDerivation rec {
sha256 = "+XLVCse6/czxE7HrmdyuNUBGhameVb/vFvOsg7Tel00=";
};
+ patches = [
+ # Backport sqlite-3.42.0 compatibility:
+ # https://gitlab.gnome.org/GNOME/tracker/-/merge_requests/600
+ (fetchpatch {
+ name = "sqlite-3.42.0.patch";
+ url = "https://gitlab.gnome.org/GNOME/tracker/-/commit/4cbbd1773a7367492fa3b3e3804839654e18a12a.patch";
+ hash = "sha256-w5D9I0P1DdyILhpjslh6ifojmlUiBoeFnxHPIr0rO3s=";
+ })
+ ];
+
strictDeps = true;
depsBuildBuild = [
diff --git a/pkgs/development/libraries/wolfssl/default.nix b/pkgs/development/libraries/wolfssl/default.nix
index 314a5a01a50b..41999210161a 100644
--- a/pkgs/development/libraries/wolfssl/default.nix
+++ b/pkgs/development/libraries/wolfssl/default.nix
@@ -52,7 +52,7 @@ stdenv.mkDerivation rec {
autoreconfHook
];
- doCheck = true;
+ doCheck = false; # fix in PR #266483 won't apply, and nixpkgs 23.05 is dying anyway
nativeCheckInputs = [ openssl ];
postInstall = ''
diff --git a/pkgs/development/python-modules/gevent/22.10.2-CVE-2023-41419.patch b/pkgs/development/python-modules/gevent/22.10.2-CVE-2023-41419.patch
new file mode 100644
index 000000000000..cc773acb4ccd
--- /dev/null
+++ b/pkgs/development/python-modules/gevent/22.10.2-CVE-2023-41419.patch
@@ -0,0 +1,648 @@
+Based on upstream 2f53c851eaf926767fbac62385615efd4886221c with minor
+adjustments to apply to 22.10.2
+
+diff --git a/docs/changes/1989.bugfix b/docs/changes/1989.bugfix
+new file mode 100644
+index 00000000..7ce4a93a
+--- /dev/null
++++ b/docs/changes/1989.bugfix
+@@ -0,0 +1,26 @@
++Make ``gevent.pywsgi`` comply more closely with the HTTP specification
++for chunked transfer encoding. In particular, we are much stricter
++about trailers, and trailers that are invalid (too long or featuring
++disallowed characters) forcibly close the connection to the client
++*after* the results have been sent.
++
++Trailers otherwise continue to be ignored and are not available to the
++WSGI application.
++
++Previously, carefully crafted invalid trailers in chunked requests on
++keep-alive connections might appear as two requests to
++``gevent.pywsgi``. Because this was handled exactly as a normal
++keep-alive connection with two requests, the WSGI application should
++handle it normally. However, if you were counting on some upstream
++server to filter incoming requests based on paths or header fields,
++and the upstream server simply passed trailers through without
++validating them, then this embedded second request would bypass those
++checks. (If the upstream server validated that the trailers meet the
++HTTP specification, this could not occur, because characters that are
++required in an HTTP request, like a space, are not allowed in
++trailers.) CVE-2023-41419 was reserved for this.
++
++Our thanks to the original reporters, Keran Mu
++(mkr22@mails.tsinghua.edu.cn) and Jianjun Chen
++(jianjun@tsinghua.edu.cn), from Tsinghua University and Zhongguancun
++Laboratory.
+diff --git a/src/gevent/pywsgi.py b/src/gevent/pywsgi.py
+index 0ebe0954..837903f5 100644
+--- a/src/gevent/pywsgi.py
++++ b/src/gevent/pywsgi.py
+@@ -8,6 +8,25 @@ WSGI work is handled by :class:`WSGIHandler` --- a new instance is
+ created for each request. The server can be customized to use
+ different subclasses of :class:`WSGIHandler`.
+
++.. important::
++
++ This server is intended primarily for development and testing, and
++ secondarily for other "safe" scenarios where it will not be exposed to
++ potentially malicious input. The code has not been security audited,
++ and is not intended for direct exposure to the public Internet. For production
++ usage on the Internet, either choose a production-strength server such as
++ gunicorn, or put a reverse proxy between gevent and the Internet.
++
++.. versionchanged:: NEXT
++
++ Complies more closely with the HTTP specification for chunked transfer encoding.
++ In particular, we are much stricter about trailers, and trailers that
++ are invalid (too long or featuring disallowed characters) forcibly close
++ the connection to the client *after* the results have been sent.
++
++ Trailers otherwise continue to be ignored and are not available to the
++ WSGI application.
++
+ """
+ from __future__ import absolute_import
+
+@@ -22,10 +41,7 @@ import time
+ import traceback
+ from datetime import datetime
+
+-try:
+- from urllib import unquote
+-except ImportError:
+- from urllib.parse import unquote # python 2 pylint:disable=import-error,no-name-in-module
++from urllib.parse import unquote
+
+ from gevent import socket
+ import gevent
+@@ -53,29 +69,52 @@ __all__ = [
+
+ MAX_REQUEST_LINE = 8192
+ # Weekday and month names for HTTP date/time formatting; always English!
+-_WEEKDAYNAME = ["Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"]
+-_MONTHNAME = [None, # Dummy so we can use 1-based month numbers
++_WEEKDAYNAME = ("Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun")
++_MONTHNAME = (None, # Dummy so we can use 1-based month numbers
+ "Jan", "Feb", "Mar", "Apr", "May", "Jun",
+- "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"]
++ "Jul", "Aug", "Sep", "Oct", "Nov", "Dec")
+
+ # The contents of the "HEX" grammar rule for HTTP, upper and lowercase A-F plus digits,
+ # in byte form for comparing to the network.
+ _HEX = string.hexdigits.encode('ascii')
+
++# The characters allowed in "token" rules.
++
++# token = 1*tchar
++# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*"
++# / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~"
++# / DIGIT / ALPHA
++# ; any VCHAR, except delimiters
++# ALPHA = %x41-5A / %x61-7A ; A-Z / a-z
++_ALLOWED_TOKEN_CHARS = frozenset(
++ # Remember we have to be careful because bytestrings
++ # inexplicably iterate as integers, which are not equal to bytes.
++
++ # explicit chars then DIGIT
++ (c.encode('ascii') for c in "!#$%&'*+-.^_`|~0123456789")
++ # Then we add ALPHA
++) | {c.encode('ascii') for c in string.ascii_letters}
++assert b'A' in _ALLOWED_TOKEN_CHARS
++
++
+ # Errors
+ _ERRORS = {}
+ _INTERNAL_ERROR_STATUS = '500 Internal Server Error'
+ _INTERNAL_ERROR_BODY = b'Internal Server Error'
+-_INTERNAL_ERROR_HEADERS = [('Content-Type', 'text/plain'),
+- ('Connection', 'close'),
+- ('Content-Length', str(len(_INTERNAL_ERROR_BODY)))]
++_INTERNAL_ERROR_HEADERS = (
++ ('Content-Type', 'text/plain'),
++ ('Connection', 'close'),
++ ('Content-Length', str(len(_INTERNAL_ERROR_BODY)))
++)
+ _ERRORS[500] = (_INTERNAL_ERROR_STATUS, _INTERNAL_ERROR_HEADERS, _INTERNAL_ERROR_BODY)
+
+ _BAD_REQUEST_STATUS = '400 Bad Request'
+ _BAD_REQUEST_BODY = ''
+-_BAD_REQUEST_HEADERS = [('Content-Type', 'text/plain'),
+- ('Connection', 'close'),
+- ('Content-Length', str(len(_BAD_REQUEST_BODY)))]
++_BAD_REQUEST_HEADERS = (
++ ('Content-Type', 'text/plain'),
++ ('Connection', 'close'),
++ ('Content-Length', str(len(_BAD_REQUEST_BODY)))
++)
+ _ERRORS[400] = (_BAD_REQUEST_STATUS, _BAD_REQUEST_HEADERS, _BAD_REQUEST_BODY)
+
+ _REQUEST_TOO_LONG_RESPONSE = b"HTTP/1.1 414 Request URI Too Long\r\nConnection: close\r\nContent-length: 0\r\n\r\n"
+@@ -204,23 +243,32 @@ class Input(object):
+ # Read and return the next integer chunk length. If no
+ # chunk length can be read, raises _InvalidClientInput.
+
+- # Here's the production for a chunk:
+- # (http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html)
+- # chunk = chunk-size [ chunk-extension ] CRLF
+- # chunk-data CRLF
+- # chunk-size = 1*HEX
+- # chunk-extension= *( ";" chunk-ext-name [ "=" chunk-ext-val ] )
+- # chunk-ext-name = token
+- # chunk-ext-val = token | quoted-string
+-
+- # To cope with malicious or broken clients that fail to send valid
+- # chunk lines, the strategy is to read character by character until we either reach
+- # a ; or newline. If at any time we read a non-HEX digit, we bail. If we hit a
+- # ;, indicating an chunk-extension, we'll read up to the next
+- # MAX_REQUEST_LINE characters
+- # looking for the CRLF, and if we don't find it, we bail. If we read more than 16 hex characters,
+- # (the number needed to represent a 64-bit chunk size), we bail (this protects us from
+- # a client that sends an infinite stream of `F`, for example).
++ # Here's the production for a chunk (actually the whole body):
++ # (https://www.rfc-editor.org/rfc/rfc7230#section-4.1)
++
++ # chunked-body = *chunk
++ # last-chunk
++ # trailer-part
++ # CRLF
++ #
++ # chunk = chunk-size [ chunk-ext ] CRLF
++ # chunk-data CRLF
++ # chunk-size = 1*HEXDIG
++ # last-chunk = 1*("0") [ chunk-ext ] CRLF
++ # trailer-part = *( header-field CRLF )
++ # chunk-data = 1*OCTET ; a sequence of chunk-size octets
++
++ # To cope with malicious or broken clients that fail to send
++ # valid chunk lines, the strategy is to read character by
++ # character until we either reach a ; or newline. If at any
++ # time we read a non-HEX digit, we bail. If we hit a ;,
++ # indicating an chunk-extension, we'll read up to the next
++ # MAX_REQUEST_LINE characters ("A server ought to limit the
++ # total length of chunk extensions received") looking for the
++ # CRLF, and if we don't find it, we bail. If we read more than
++ # 16 hex characters, (the number needed to represent a 64-bit
++ # chunk size), we bail (this protects us from a client that
++ # sends an infinite stream of `F`, for example).
+
+ buf = BytesIO()
+ while 1:
+@@ -228,16 +276,20 @@ class Input(object):
+ if not char:
+ self._chunked_input_error = True
+ raise _InvalidClientInput("EOF before chunk end reached")
+- if char == b'\r':
+- break
+- if char == b';':
++
++ if char in (
++ b'\r', # Beginning EOL
++ b';', # Beginning extension
++ ):
+ break
+
+- if char not in _HEX:
++ if char not in _HEX: # Invalid data.
+ self._chunked_input_error = True
+ raise _InvalidClientInput("Non-hex data", char)
++
+ buf.write(char)
+- if buf.tell() > 16:
++
++ if buf.tell() > 16: # Too many hex bytes
+ self._chunked_input_error = True
+ raise _InvalidClientInput("Chunk-size too large.")
+
+@@ -257,11 +309,72 @@ class Input(object):
+ if char == b'\r':
+ # We either got here from the main loop or from the
+ # end of an extension
++ self.__read_chunk_size_crlf(rfile, newline_only=True)
++ result = int(buf.getvalue(), 16)
++ if result == 0:
++ # The only time a chunk size of zero is allowed is the final
++ # chunk. It is either followed by another \r\n, or some trailers
++ # which are then followed by \r\n.
++ while self.__read_chunk_trailer(rfile):
++ pass
++ return result
++
++ # Trailers have the following production (they are a header-field followed by CRLF)
++ # See above for the definition of "token".
++ #
++ # header-field = field-name ":" OWS field-value OWS
++ # field-name = token
++ # field-value = *( field-content / obs-fold )
++ # field-content = field-vchar [ 1*( SP / HTAB ) field-vchar ]
++ # field-vchar = VCHAR / obs-text
++ # obs-fold = CRLF 1*( SP / HTAB )
++ # ; obsolete line folding
++ # ; see Section 3.2.4
++
++
++ def __read_chunk_trailer(self, rfile, ):
++ # With rfile positioned just after a \r\n, read a trailer line.
++ # Return a true value if a non-empty trailer was read, and
++ # return false if an empty trailer was read (meaning the trailers are
++ # done).
++ # If a single line exceeds the MAX_REQUEST_LINE, raise an exception.
++ # If the field-name portion contains invalid characters, raise an exception.
++
++ i = 0
++ empty = True
++ seen_field_name = False
++ while i < MAX_REQUEST_LINE:
+ char = rfile.read(1)
+- if char != b'\n':
++ if char == b'\r':
++ # Either read the next \n or raise an error.
++ self.__read_chunk_size_crlf(rfile, newline_only=True)
++ break
++ # Not a \r, so we are NOT an empty chunk.
++ empty = False
++ if char == b':' and i > 0:
++ # We're ending the field-name part; stop validating characters.
++ # Unless : was the first character...
++ seen_field_name = True
++ if not seen_field_name and char not in _ALLOWED_TOKEN_CHARS:
++ raise _InvalidClientInput('Invalid token character: %r' % (char,))
++ i += 1
++ else:
++ # We read too much
++ self._chunked_input_error = True
++ raise _InvalidClientInput("Too large chunk trailer")
++ return not empty
++
++ def __read_chunk_size_crlf(self, rfile, newline_only=False):
++ # Also for safety, correctly verify that we get \r\n when expected.
++ if not newline_only:
++ char = rfile.read(1)
++ if char != b'\r':
+ self._chunked_input_error = True
+- raise _InvalidClientInput("Line didn't end in CRLF")
+- return int(buf.getvalue(), 16)
++ raise _InvalidClientInput("Line didn't end in CRLF: %r" % (char,))
++ char = rfile.read(1)
++ if char != b'\n':
++ self._chunked_input_error = True
++ raise _InvalidClientInput("Line didn't end in LF: %r" % (char,))
+
+ def _chunked_read(self, length=None, use_readline=False):
+ # pylint:disable=too-many-branches
+@@ -294,7 +407,7 @@ class Input(object):
+
+ self.position += datalen
+ if self.chunk_length == self.position:
+- rfile.readline()
++ self.__read_chunk_size_crlf(rfile)
+
+ if length is not None:
+ length -= datalen
+@@ -307,9 +420,9 @@ class Input(object):
+ # determine the next size to read
+ self.chunk_length = self.__read_chunk_length(rfile)
+ self.position = 0
+- if self.chunk_length == 0:
+- # Last chunk. Terminates with a CRLF.
+- rfile.readline()
++ # If chunk_length was 0, we already read any trailers and
++ # validated that we have ended with \r\n\r\n.
++
+ return b''.join(response)
+
+ def read(self, length=None):
+@@ -532,7 +645,8 @@ class WSGIHandler(object):
+ elif len(words) == 2:
+ self.command, self.path = words
+ if self.command != "GET":
+- raise _InvalidClientRequest('Expected GET method: %r' % (raw_requestline,))
++ raise _InvalidClientRequest('Expected GET method; Got command=%r; path=%r; raw=%r' % (
++ self.command, self.path, raw_requestline,))
+ self.request_version = "HTTP/0.9"
+ # QQQ I'm pretty sure we can drop support for HTTP/0.9
+ else:
+@@ -1000,14 +1114,28 @@ class WSGIHandler(object):
+ finally:
+ try:
+ self.wsgi_input._discard()
+- except (socket.error, IOError):
+- # Don't let exceptions during discarding
++ except _InvalidClientInput:
++ # This one is deliberately raised to the outer
++ # scope, because, with the incoming stream in some bad state,
++ # we can't be sure we can synchronize and properly parse the next
++ # request.
++ raise
++ except socket.error:
++ # Don't let socket exceptions during discarding
+ # input override any exception that may have been
+ # raised by the application, such as our own _InvalidClientInput.
+ # In the general case, these aren't even worth logging (see the comment
+ # just below)
+ pass
+- except _InvalidClientInput:
++ except _InvalidClientInput as ex:
++ # DO log this one because:
++ # - Some of the data may have been read and acted on by the
++ # application;
++ # - The response may or may not have been sent;
++ # - It's likely that the client is bad, or malicious, and
++ # users might wish to take steps to block the client.
++ self._handle_client_error(ex)
++ self.close_connection = True
+ self._send_error_response_if_possible(400)
+ except socket.error as ex:
+ if ex.args[0] in self.ignored_socket_errors:
+@@ -1054,17 +1182,22 @@ class WSGIHandler(object):
+ def _handle_client_error(self, ex):
+ # Called for invalid client input
+ # Returns the appropriate error response.
+- if not isinstance(ex, ValueError):
++ if not isinstance(ex, (ValueError, _InvalidClientInput)):
+ # XXX: Why not self._log_error to send it through the loop's
+ # handle_error method?
++ # _InvalidClientRequest is a ValueError; _InvalidClientInput is an IOError.
+ traceback.print_exc()
+ if isinstance(ex, _InvalidClientRequest):
+ # No formatting needed, that's already been handled. In fact, because the
+ # formatted message contains user input, it might have a % in it, and attempting
+ # to format that with no arguments would be an error.
+- self.log_error(ex.formatted_message)
++ # However, the error messages do not include the requesting IP
++ # necessarily, so we do add that.
++ self.log_error('(from %s) %s', self.client_address, ex.formatted_message)
+ else:
+- self.log_error('Invalid request: %s', str(ex) or ex.__class__.__name__)
++ self.log_error('Invalid request (from %s): %s',
++ self.client_address,
++ str(ex) or ex.__class__.__name__)
+ return ('400', _BAD_REQUEST_RESPONSE)
+
+ def _headers(self):
+diff --git a/src/gevent/subprocess.py b/src/gevent/subprocess.py
+index 46a82f60..a135d8aa 100644
+--- a/src/gevent/subprocess.py
++++ b/src/gevent/subprocess.py
+@@ -370,10 +370,11 @@ def check_output(*popenargs, **kwargs):
+
+ To capture standard error in the result, use ``stderr=STDOUT``::
+
+- >>> print(check_output(["/bin/sh", "-c",
++ >>> output = check_output(["/bin/sh", "-c",
+ ... "ls -l non_existent_file ; exit 0"],
+- ... stderr=STDOUT).decode('ascii').strip())
+- ls: non_existent_file: No such file or directory
++ ... stderr=STDOUT).decode('ascii').strip()
++ >>> print(output.rsplit(':', 1)[1].strip())
++ No such file or directory
+
+ There is an additional optional argument, "input", allowing you to
+ pass a string to the subprocess's stdin. If you use this argument
+diff --git a/src/gevent/testing/testcase.py b/src/gevent/testing/testcase.py
+index 47484094..862e46ee 100644
+--- a/src/gevent/testing/testcase.py
++++ b/src/gevent/testing/testcase.py
+@@ -225,7 +225,7 @@ class TestCaseMetaClass(type):
+ classDict.pop(key)
+ # XXX: When did we stop doing this?
+ #value = wrap_switch_count_check(value)
+- value = _wrap_timeout(timeout, value)
++ #value = _wrap_timeout(timeout, value)
+ error_fatal = getattr(value, 'error_fatal', error_fatal)
+ if error_fatal:
+ value = errorhandler.wrap_error_fatal(value)
+diff --git a/src/gevent/tests/test__pywsgi.py b/src/gevent/tests/test__pywsgi.py
+index d2125a86..d46030bf 100644
+--- a/src/gevent/tests/test__pywsgi.py
++++ b/src/gevent/tests/test__pywsgi.py
+@@ -25,21 +25,11 @@ from gevent import monkey
+ monkey.patch_all()
+
+ from contextlib import contextmanager
+-try:
+- from urllib.parse import parse_qs
+-except ImportError:
+- # Python 2
+- from urlparse import parse_qs
++from urllib.parse import parse_qs
+ import os
+ import sys
+-try:
+- # On Python 2, we want the C-optimized version if
+- # available; it has different corner-case behaviour than
+- # the Python implementation, and it used by socket.makefile
+- # by default.
+- from cStringIO import StringIO
+-except ImportError:
+- from io import BytesIO as StringIO
++from io import BytesIO as StringIO
++
+ import weakref
+ import unittest
+ from wsgiref.validate import validator
+@@ -156,6 +146,10 @@ class Response(object):
+ @classmethod
+ def read(cls, fd, code=200, reason='default', version='1.1',
+ body=None, chunks=None, content_length=None):
++ """
++ Read an HTTP response, optionally perform assertions,
++ and return the Response object.
++ """
+ # pylint:disable=too-many-branches
+ _status_line, headers = read_headers(fd)
+ self = cls(_status_line, headers)
+@@ -716,7 +710,14 @@ class TestNegativeReadline(TestCase):
+
+ class TestChunkedPost(TestCase):
+
++ calls = 0
++
++ def setUp(self):
++ super().setUp()
++ self.calls = 0
++
+ def application(self, env, start_response):
++ self.calls += 1
+ self.assertTrue(env.get('wsgi.input_terminated'))
+ start_response('200 OK', [('Content-Type', 'text/plain')])
+ if env['PATH_INFO'] == '/a':
+@@ -730,6 +731,8 @@ class TestChunkedPost(TestCase):
+ if env['PATH_INFO'] == '/c':
+ return list(iter(lambda: env['wsgi.input'].read(1), b''))
+
++ return [b'We should not get here', env['PATH_INFO'].encode('ascii')]
++
+ def test_014_chunked_post(self):
+ data = (b'POST /a HTTP/1.1\r\nHost: localhost\r\nConnection: close\r\n'
+ b'Transfer-Encoding: chunked\r\n\r\n'
+@@ -797,6 +800,170 @@ class TestChunkedPost(TestCase):
+ fd.write(data)
+ read_http(fd, code=400)
+
++ def test_trailers_keepalive_ignored(self):
++ # Trailers after a chunk are ignored.
++ data = (
++ b'POST /a HTTP/1.1\r\n'
++ b'Host: localhost\r\n'
++ b'Connection: keep-alive\r\n'
++ b'Transfer-Encoding: chunked\r\n'
++ b'\r\n'
++ b'2\r\noh\r\n'
++ b'4\r\n hai\r\n'
++ b'0\r\n' # last-chunk
++ # Normally the final CRLF would go here, but if you put in a
++ # trailer, it doesn't.
++ b'trailer1: value1\r\n'
++ b'trailer2: value2\r\n'
++ b'\r\n' # Really terminate the chunk.
++ b'POST /a HTTP/1.1\r\n'
++ b'Host: localhost\r\n'
++ b'Connection: close\r\n'
++ b'Transfer-Encoding: chunked\r\n'
++ b'\r\n'
++ b'2\r\noh\r\n'
++ b'4\r\n bye\r\n'
++ b'0\r\n' # last-chunk
++ )
++ with self.makefile() as fd:
++ fd.write(data)
++ read_http(fd, body='oh hai')
++ read_http(fd, body='oh bye')
++
++ self.assertEqual(self.calls, 2)
++
++ def test_trailers_too_long(self):
++ # Trailers after a chunk are ignored.
++ data = (
++ b'POST /a HTTP/1.1\r\n'
++ b'Host: localhost\r\n'
++ b'Connection: keep-alive\r\n'
++ b'Transfer-Encoding: chunked\r\n'
++ b'\r\n'
++ b'2\r\noh\r\n'
++ b'4\r\n hai\r\n'
++ b'0\r\n' # last-chunk
++ # Normally the final CRLF would go here, but if you put in a
++ # trailer, it doesn't.
++ b'trailer2: value2' # not lack of \r\n
++ )
++ data += b't' * pywsgi.MAX_REQUEST_LINE
++ # No termination, because we detect the trailer as being too
++ # long and abort the connection.
++ with self.makefile() as fd:
++ fd.write(data)
++ read_http(fd, body='oh hai')
++ with self.assertRaises(ConnectionClosed):
++ read_http(fd, body='oh bye')
++
++ def test_trailers_request_smuggling_missing_last_chunk_keep_alive(self):
++ # When something that looks like a request line comes in the trailer
++ # as the first line, immediately after an invalid last chunk.
++ # We detect this and abort the connection, because the
++ # whitespace in the GET line isn't a legal part of a trailer.
++ # If we didn't abort the connection, then, because we specified
++ # keep-alive, the server would be hanging around waiting for more input.
++ data = (
++ b'POST /a HTTP/1.1\r\n'
++ b'Host: localhost\r\n'
++ b'Connection: keep-alive\r\n'
++ b'Transfer-Encoding: chunked\r\n'
++ b'\r\n'
++ b'2\r\noh\r\n'
++ b'4\r\n hai\r\n'
++ b'0' # last-chunk, but missing the \r\n
++ # Normally the final CRLF would go here, but if you put in a
++ # trailer, it doesn't.
++ # b'\r\n'
++ b'GET /path2?a=:123 HTTP/1.1\r\n'
++ b'Host: a.com\r\n'
++ b'Connection: close\r\n'
++ b'\r\n'
++ )
++ with self.makefile() as fd:
++ fd.write(data)
++ read_http(fd, body='oh hai')
++ with self.assertRaises(ConnectionClosed):
++ read_http(fd)
++
++ self.assertEqual(self.calls, 1)
++
++ def test_trailers_request_smuggling_missing_last_chunk_close(self):
++ # Same as the above, except the trailers are actually valid
++ # and since we ask to close the connection we don't get stuck
++ # waiting for more input.
++ data = (
++ b'POST /a HTTP/1.1\r\n'
++ b'Host: localhost\r\n'
++ b'Connection: close\r\n'
++ b'Transfer-Encoding: chunked\r\n'
++ b'\r\n'
++ b'2\r\noh\r\n'
++ b'4\r\n hai\r\n'
++ b'0\r\n' # last-chunk
++ # Normally the final CRLF would go here, but if you put in a
++ # trailer, it doesn't.
++ # b'\r\n'
++ b'GETpath2a:123 HTTP/1.1\r\n'
++ b'Host: a.com\r\n'
++ b'Connection: close\r\n'
++ b'\r\n'
++ )
++ with self.makefile() as fd:
++ fd.write(data)
++ read_http(fd, body='oh hai')
++ with self.assertRaises(ConnectionClosed):
++ read_http(fd)
++
++ def test_trailers_request_smuggling_header_first(self):
++ # When something that looks like a header comes in the first line.
++ data = (
++ b'POST /a HTTP/1.1\r\n'
++ b'Host: localhost\r\n'
++ b'Connection: keep-alive\r\n'
++ b'Transfer-Encoding: chunked\r\n'
++ b'\r\n'
++ b'2\r\noh\r\n'
++ b'4\r\n hai\r\n'
++ b'0\r\n' # last-chunk, but only one CRLF
++ b'Header: value\r\n'
++ b'GET /path2?a=:123 HTTP/1.1\r\n'
++ b'Host: a.com\r\n'
++ b'Connection: close\r\n'
++ b'\r\n'
++ )
++ with self.makefile() as fd:
++ fd.write(data)
++ read_http(fd, body='oh hai')
++ with self.assertRaises(ConnectionClosed):
++ read_http(fd, code=400)
++
++ self.assertEqual(self.calls, 1)
++
++ def test_trailers_request_smuggling_request_terminates_then_header(self):
++ data = (
++ b'POST /a HTTP/1.1\r\n'
++ b'Host: localhost\r\n'
++ b'Connection: keep-alive\r\n'
++ b'Transfer-Encoding: chunked\r\n'
++ b'\r\n'
++ b'2\r\noh\r\n'
++ b'4\r\n hai\r\n'
++ b'0\r\n' # last-chunk
++ b'\r\n'
++ b'Header: value'
++ b'GET /path2?a=:123 HTTP/1.1\r\n'
++ b'Host: a.com\r\n'
++ b'Connection: close\r\n'
++ b'\r\n'
++ )
++ with self.makefile() as fd:
++ fd.write(data)
++ read_http(fd, body='oh hai')
++ read_http(fd, code=400)
++
++ self.assertEqual(self.calls, 1)
++
+
+ class TestUseWrite(TestCase):
+
diff --git a/pkgs/development/python-modules/gevent/default.nix b/pkgs/development/python-modules/gevent/default.nix
index 690c0b73e6ea..50f637033ef5 100644
--- a/pkgs/development/python-modules/gevent/default.nix
+++ b/pkgs/development/python-modules/gevent/default.nix
@@ -9,6 +9,12 @@
, zope_event
, zope_interface
, pythonOlder
+
+# for passthru.tests
+, dulwich
+, gunicorn
+, opentracing
+, pika
}:
buildPythonPackage rec {
@@ -23,6 +29,10 @@ buildPythonPackage rec {
hash = "sha256-HKAdoXbuN7NSeicC99QNvJ/7jPx75aA7+k+e7EXlXEY=";
};
+ patches = [
+ ./22.10.2-CVE-2023-41419.patch
+ ];
+
nativeBuildInputs = [
setuptools
];
@@ -45,6 +55,14 @@ buildPythonPackage rec {
"gevent"
];
+ passthru.tests = {
+ inherit
+ dulwich
+ gunicorn
+ opentracing
+ pika;
+ } // lib.filterAttrs (k: v: lib.hasInfix "gevent" k) python.pkgs;
+
meta = with lib; {
description = "Coroutine-based networking library";
homepage = "http://www.gevent.org/";
diff --git a/pkgs/development/python-modules/gst-python/default.nix b/pkgs/development/python-modules/gst-python/default.nix
index 2f9ced5aec75..a5d0a3e90498 100644
--- a/pkgs/development/python-modules/gst-python/default.nix
+++ b/pkgs/development/python-modules/gst-python/default.nix
@@ -14,7 +14,7 @@
buildPythonPackage rec {
pname = "gst-python";
- version = "1.22.6";
+ version = "1.22.8";
format = "other";
@@ -22,7 +22,7 @@ buildPythonPackage rec {
src = fetchurl {
url = "${meta.homepage}/src/gst-python/${pname}-${version}.tar.xz";
- hash = "sha256-Ud4tbROxLOCV6sl8C5TuWcKuujcSu3Rit4xNV93hdsU=";
+ hash = "sha256-1cuPFEBUoqEQ5mcr1RLksV1bG42YecGSuXI1Ne+3C48=";
};
# Python 2.x is not supported.
diff --git a/pkgs/development/python-modules/jupyter-server/default.nix b/pkgs/development/python-modules/jupyter-server/default.nix
index b8ecde78eda7..4f342553d3b8 100644
--- a/pkgs/development/python-modules/jupyter-server/default.nix
+++ b/pkgs/development/python-modules/jupyter-server/default.nix
@@ -1,6 +1,7 @@
{ lib
, stdenv
, buildPythonPackage
+, fetchpatch
, fetchPypi
, pythonOlder
, hatch-jupyter-builder
@@ -45,6 +46,19 @@ buildPythonPackage rec {
hash= "sha256-jddZkukLfKVWeUoe1cylEmPGl6vG0N9WGvV0qhwKAz8=";
};
+ patches = [
+ (fetchpatch {
+ name = "CVE-2023-39968.patch";
+ url = "https://github.com/jupyter-server/jupyter_server/commit/290362593b2ffb23c59f8114d76f77875de4b925.patch";
+ hash = "sha256-EhWKTpjPp2iwLWpR4O6oZpf3yJmwe25SEG288wAiOJE=";
+ })
+ (fetchpatch {
+ name = "CVE-2023-40170.patch";
+ url = "https://github.com/jupyter-server/jupyter_server/commit/87a4927272819f0b1cae1afa4c8c86ee2da002fd.patch";
+ hash = "sha256-D+Dk2dQKNrpXMer0Ezo7PlBwRzHmEi7bGZ45+uNChF8=";
+ })
+ ];
+
nativeBuildInputs = [
hatch-jupyter-builder
hatchling
diff --git a/pkgs/development/python-modules/urllib3/default.nix b/pkgs/development/python-modules/urllib3/default.nix
index 286880bbd71a..1b1ab426b46b 100644
--- a/pkgs/development/python-modules/urllib3/default.nix
+++ b/pkgs/development/python-modules/urllib3/default.nix
@@ -4,6 +4,7 @@
, buildPythonPackage
, certifi
, cryptography
+, fetchpatch
, fetchPypi
, idna
, isPyPy
@@ -28,6 +29,15 @@ buildPythonPackage rec {
hash = "sha256-+OzBu6VmdBNFfFKauVW/jGe0XbeZ0VkGYmFxnjKFgKA=";
};
+ patches = [
+ (fetchpatch {
+ name = "revert-threadsafe-poolmanager.patch";
+ url = "https://github.com/urllib3/urllib3/commit/710114d7810558fd7e224054a566b53bb8601494.patch";
+ revert = true;
+ hash = "sha256-2O0y0Tij1QF4Hx5r+WMxIHDpXTBHign61AXLzsScrGo=";
+ })
+ ];
+
# FIXME: remove backwards compatbility hack
propagatedBuildInputs = passthru.optional-dependencies.brotli
++ passthru.optional-dependencies.socks;
diff --git a/pkgs/misc/ghostscript/default.nix b/pkgs/misc/ghostscript/default.nix
index 09a640b8bdbb..0fd517a2db10 100644
--- a/pkgs/misc/ghostscript/default.nix
+++ b/pkgs/misc/ghostscript/default.nix
@@ -61,11 +61,11 @@ let
in
stdenv.mkDerivation rec {
pname = "ghostscript${lib.optionalString x11Support "-with-X"}";
- version = "10.02.0";
+ version = "10.02.1";
src = fetchurl {
url = "https://github.com/ArtifexSoftware/ghostpdl-downloads/releases/download/gs${lib.replaceStrings ["."] [""] version}/ghostscript-${version}.tar.xz";
- hash = "sha512-xJNEFRBj6RWt1VoKhCwqZF2DYqXLymY70HY49L02maCMreN6nv6QWtWkHgFDU+XhsSaLeSXkMSitMNWwMTlrcQ==";
+ hash = "sha512-7g91TBvYoYQorRTqo+rYD/i5YnWvUBLnqDhPHxBJDaBW7smuPMeRp6E6JOFuVN9bzN0QnH1ToUU0u9c2CjALEQ=";
};
patches = [
diff --git a/pkgs/os-specific/linux/bluez/default.nix b/pkgs/os-specific/linux/bluez/default.nix
index 303eb801478c..a19e82f67e89 100644
--- a/pkgs/os-specific/linux/bluez/default.nix
+++ b/pkgs/os-specific/linux/bluez/default.nix
@@ -36,6 +36,12 @@ in stdenv.mkDerivation rec {
url = "https://git.alpinelinux.org/aports/plain/main/bluez/max-input.patch?id=32b31b484cb13009bd8081c4106e4cf064ec2f1f";
sha256 = "sha256-SczbXtsxBkCO+izH8XOBcrJEO2f7MdtYVT3+2fCV8wU=";
})
+ # CVE-2023-45866 / https://github.com/skysafe/reblog/tree/main/cve-2023-45866
+ (fetchpatch {
+ name = "CVE-2023-45866.patch";
+ url = "https://git.kernel.org/pub/scm/bluetooth/bluez.git/patch/profiles/input?id=25a471a83e02e1effb15d5a488b3f0085eaeb675";
+ sha256 = "sha256-IuPQ18yN0EO/PkqdT/JETyOxdZCKewBiDjGN4CG2GLo=";
+ })
];
buildInputs = [
diff --git a/pkgs/servers/dns/knot-dns/default.nix b/pkgs/servers/dns/knot-dns/default.nix
index 3e8b159f37e6..b43b6e7dff46 100644
--- a/pkgs/servers/dns/knot-dns/default.nix
+++ b/pkgs/servers/dns/knot-dns/default.nix
@@ -7,11 +7,11 @@
stdenv.mkDerivation rec {
pname = "knot-dns";
- version = "3.2.11";
+ version = "3.2.12";
src = fetchurl {
url = "https://secure.nic.cz/files/knot-dns/knot-${version}.tar.xz";
- sha256 = "98656fab2ee0386692511081e66bb541bb6e80c511ff530d76e0cf059dbd70ec";
+ sha256 = "a7b69f0c41748021cfb819ce42f39da076790653fce3bb7c0a39c2e1b0ca0158";
};
outputs = [ "bin" "out" "dev" ];