0
0
Fork 0
mirror of https://github.com/NixOS/nixpkgs.git synced 2025-07-14 06:00:33 +03:00

Merge master into staging-next

This commit is contained in:
nixpkgs-ci[bot] 2025-02-14 12:05:56 +00:00 committed by GitHub
commit 43a5de6cbb
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
67 changed files with 1554 additions and 5755 deletions

View file

@ -253,6 +253,7 @@ pkgs/development/python-modules/buildcatrust/ @ajs124 @lukegb @mweinelt
/nixos/tests/postgresql @NixOS/postgres /nixos/tests/postgresql @NixOS/postgres
# MySQL/MariaDB and related stuff # MySQL/MariaDB and related stuff
/nixos/modules/services/databases/mysql.nix @6543
/nixos/modules/services/backup/mysql-backup.nix @6543 /nixos/modules/services/backup/mysql-backup.nix @6543
# Hardened profile & related modules # Hardened profile & related modules

View file

@ -770,7 +770,6 @@ with lib.maintainers;
mguentner mguentner
ralith ralith
dandellion dandellion
sumnerevans
nickcao nickcao
teutat3s teutat3s
]; ];

View file

@ -1,4 +1,9 @@
{ config, lib, pkgs, ... }: {
config,
lib,
pkgs,
...
}:
let let
cfg = config.services.mysql; cfg = config.services.mysql;
@ -8,8 +13,7 @@ let
# Oracle MySQL has supported "notify" service type since 8.0 # Oracle MySQL has supported "notify" service type since 8.0
hasNotify = isMariaDB || (isOracle && lib.versionAtLeast cfg.package.version "8.0"); hasNotify = isMariaDB || (isOracle && lib.versionAtLeast cfg.package.version "8.0");
mysqldOptions = mysqldOptions = "--user=${cfg.user} --datadir=${cfg.dataDir} --basedir=${cfg.package}";
"--user=${cfg.user} --datadir=${cfg.dataDir} --basedir=${cfg.package}";
format = pkgs.formats.ini { listsAsDuplicateKeys = true; }; format = pkgs.formats.ini { listsAsDuplicateKeys = true; };
configFile = format.generate "my.cnf" cfg.settings; configFile = format.generate "my.cnf" cfg.settings;
@ -18,11 +22,31 @@ in
{ {
imports = [ imports = [
(lib.mkRemovedOptionModule [ "services" "mysql" "pidDir" ] "Don't wait for pidfiles, describe dependencies through systemd.") (lib.mkRemovedOptionModule [
(lib.mkRemovedOptionModule [ "services" "mysql" "rootPassword" ] "Use socket authentication or set the password outside of the nix store.") "services"
(lib.mkRemovedOptionModule [ "services" "mysql" "extraOptions" ] "Use services.mysql.settings.mysqld instead.") "mysql"
(lib.mkRemovedOptionModule [ "services" "mysql" "bind" ] "Use services.mysql.settings.mysqld.bind-address instead.") "pidDir"
(lib.mkRemovedOptionModule [ "services" "mysql" "port" ] "Use services.mysql.settings.mysqld.port instead.") ] "Don't wait for pidfiles, describe dependencies through systemd.")
(lib.mkRemovedOptionModule [
"services"
"mysql"
"rootPassword"
] "Use socket authentication or set the password outside of the nix store.")
(lib.mkRemovedOptionModule [
"services"
"mysql"
"extraOptions"
] "Use services.mysql.settings.mysqld instead.")
(lib.mkRemovedOptionModule [
"services"
"mysql"
"bind"
] "Use services.mysql.settings.mysqld.bind-address instead.")
(lib.mkRemovedOptionModule [
"services"
"mysql"
"port"
] "Use services.mysql.settings.mysqld.port instead.")
]; ];
###### interface ###### interface
@ -137,7 +161,8 @@ in
}; };
initialDatabases = lib.mkOption { initialDatabases = lib.mkOption {
type = lib.types.listOf (lib.types.submodule { type = lib.types.listOf (
lib.types.submodule {
options = { options = {
name = lib.mkOption { name = lib.mkOption {
type = lib.types.str; type = lib.types.str;
@ -154,7 +179,8 @@ in
''; '';
}; };
}; };
}); }
);
default = [ ]; default = [ ];
description = '' description = ''
List of database names and their initial schemas that should be used to create databases on the first startup List of database names and their initial schemas that should be used to create databases on the first startup
@ -190,7 +216,8 @@ in
}; };
ensureUsers = lib.mkOption { ensureUsers = lib.mkOption {
type = lib.types.listOf (lib.types.submodule { type = lib.types.listOf (
lib.types.submodule {
options = { options = {
name = lib.mkOption { name = lib.mkOption {
type = lib.types.str; type = lib.types.str;
@ -221,7 +248,8 @@ in
''; '';
}; };
}; };
}); }
);
default = [ ]; default = [ ];
description = '' description = ''
Ensures that the specified users exist and have at least the ensured permissions. Ensures that the specified users exist and have at least the ensured permissions.
@ -251,7 +279,11 @@ in
replication = { replication = {
role = lib.mkOption { role = lib.mkOption {
type = lib.types.enum [ "master" "slave" "none" ]; type = lib.types.enum [
"master"
"slave"
"none"
];
default = "none"; default = "none";
description = "Role of the MySQL server instance."; description = "Role of the MySQL server instance.";
}; };
@ -292,14 +324,13 @@ in
}; };
###### implementation ###### implementation
config = lib.mkIf cfg.enable { config = lib.mkIf cfg.enable {
services.mysql.dataDir = services.mysql.dataDir = lib.mkDefault (
lib.mkDefault (if lib.versionAtLeast config.system.stateVersion "17.09" then "/var/lib/mysql" if lib.versionAtLeast config.system.stateVersion "17.09" then "/var/lib/mysql" else "/var/mysql"
else "/var/mysql"); );
services.mysql.settings.mysqld = lib.mkMerge [ services.mysql.settings.mysqld = lib.mkMerge [
{ {
@ -311,7 +342,11 @@ in
log-bin-index = "mysql-bin-${toString cfg.replication.serverId}.index"; log-bin-index = "mysql-bin-${toString cfg.replication.serverId}.index";
relay-log = "mysql-relay-bin"; relay-log = "mysql-relay-bin";
server-id = cfg.replication.serverId; server-id = cfg.replication.serverId;
binlog-ignore-db = [ "information_schema" "performance_schema" "mysql" ]; binlog-ignore-db = [
"information_schema"
"performance_schema"
"mysql"
];
}) })
(lib.mkIf (!isMariaDB) { (lib.mkIf (!isMariaDB) {
plugin-load-add = [ "auth_socket.so" ]; plugin-load-add = [ "auth_socket.so" ];
@ -355,12 +390,16 @@ in
pkgs.nettools pkgs.nettools
]; ];
preStart = if isMariaDB then '' preStart =
if isMariaDB then
''
if ! test -e ${cfg.dataDir}/mysql; then if ! test -e ${cfg.dataDir}/mysql; then
${cfg.package}/bin/mysql_install_db --defaults-file=/etc/my.cnf ${mysqldOptions} ${cfg.package}/bin/mysql_install_db --defaults-file=/etc/my.cnf ${mysqldOptions}
touch ${cfg.dataDir}/mysql_init touch ${cfg.dataDir}/mysql_init
fi fi
'' else '' ''
else
''
if ! test -e ${cfg.dataDir}/mysql; then if ! test -e ${cfg.dataDir}/mysql; then
${cfg.package}/bin/mysqld --defaults-file=/etc/my.cnf ${mysqldOptions} --initialize-insecure ${cfg.package}/bin/mysqld --defaults-file=/etc/my.cnf ${mysqldOptions} --initialize-insecure
touch ${cfg.dataDir}/mysql_init touch ${cfg.dataDir}/mysql_init
@ -379,10 +418,12 @@ in
exec ${cfg.package}/bin/mysqld --defaults-file=/etc/my.cnf ${mysqldOptions} $_WSREP_NEW_CLUSTER $_WSREP_START_POSITION exec ${cfg.package}/bin/mysqld --defaults-file=/etc/my.cnf ${mysqldOptions} $_WSREP_NEW_CLUSTER $_WSREP_START_POSITION
''; '';
postStart = let postStart =
let
# The super user account to use on *first* run of MySQL server # The super user account to use on *first* run of MySQL server
superUser = if isMariaDB then cfg.user else "root"; superUser = if isMariaDB then cfg.user else "root";
in '' in
''
${lib.optionalString (!hasNotify) '' ${lib.optionalString (!hasNotify) ''
# Wait until the MySQL server is available for use # Wait until the MySQL server is available for use
while [ ! -e /run/mysqld/mysqld.sock ] while [ ! -e /run/mysqld/mysqld.sock ]
@ -396,7 +437,9 @@ in
then then
# While MariaDB comes with a 'mysql' super user account since 10.4.x, MySQL does not # While MariaDB comes with a 'mysql' super user account since 10.4.x, MySQL does not
# Since we don't want to run this service as 'root' we need to ensure the account exists on first run # Since we don't want to run this service as 'root' we need to ensure the account exists on first run
( echo "CREATE USER IF NOT EXISTS '${cfg.user}'@'localhost' IDENTIFIED WITH ${if isMariaDB then "unix_socket" else "auth_socket"};" ( echo "CREATE USER IF NOT EXISTS '${cfg.user}'@'localhost' IDENTIFIED WITH ${
if isMariaDB then "unix_socket" else "auth_socket"
};"
echo "GRANT ALL PRIVILEGES ON *.* TO '${cfg.user}'@'localhost' WITH GRANT OPTION;" echo "GRANT ALL PRIVILEGES ON *.* TO '${cfg.user}'@'localhost' WITH GRANT OPTION;"
) | ${cfg.package}/bin/mysql -u ${superUser} -N ) | ${cfg.package}/bin/mysql -u ${superUser} -N
@ -423,8 +466,7 @@ in
fi fi
'') cfg.initialDatabases} '') cfg.initialDatabases}
${lib.optionalString (cfg.replication.role == "master") ${lib.optionalString (cfg.replication.role == "master") ''
''
# Set up the replication master # Set up the replication master
( echo "use mysql;" ( echo "use mysql;"
@ -434,8 +476,7 @@ in
) | ${cfg.package}/bin/mysql -u ${superUser} -N ) | ${cfg.package}/bin/mysql -u ${superUser} -N
''} ''}
${lib.optionalString (cfg.replication.role == "slave") ${lib.optionalString (cfg.replication.role == "slave") ''
''
# Set up the replication slave # Set up the replication slave
( echo "stop slave;" ( echo "stop slave;"
@ -444,8 +485,7 @@ in
) | ${cfg.package}/bin/mysql -u ${superUser} -N ) | ${cfg.package}/bin/mysql -u ${superUser} -N
''} ''}
${lib.optionalString (cfg.initialScript != null) ${lib.optionalString (cfg.initialScript != null) ''
''
# Execute initial script # Execute initial script
# using toString to avoid copying the file to nix store if given as path instead of string, # using toString to avoid copying the file to nix store if given as path instead of string,
# as it might contain credentials # as it might contain credentials
@ -463,12 +503,15 @@ in
) | ${cfg.package}/bin/mysql -N ) | ${cfg.package}/bin/mysql -N
''} ''}
${lib.concatMapStrings (user: ${lib.concatMapStrings (user: ''
'' ( echo "CREATE USER IF NOT EXISTS '${user.name}'@'localhost' IDENTIFIED WITH ${
( echo "CREATE USER IF NOT EXISTS '${user.name}'@'localhost' IDENTIFIED WITH ${if isMariaDB then "unix_socket" else "auth_socket"};" if isMariaDB then "unix_socket" else "auth_socket"
${lib.concatStringsSep "\n" (lib.mapAttrsToList (database: permission: '' };"
${lib.concatStringsSep "\n" (
lib.mapAttrsToList (database: permission: ''
echo "GRANT ${permission} ON ${database} TO '${user.name}'@'localhost';" echo "GRANT ${permission} ON ${database} TO '${user.name}'@'localhost';"
'') user.ensurePermissions)} '') user.ensurePermissions
)}
) | ${cfg.package}/bin/mysql -N ) | ${cfg.package}/bin/mysql -N
'') cfg.ensureUsers} '') cfg.ensureUsers}
''; '';
@ -500,7 +543,11 @@ in
ProtectKernelTunables = true; ProtectKernelTunables = true;
ProtectKernelModules = true; ProtectKernelModules = true;
ProtectControlGroups = true; ProtectControlGroups = true;
RestrictAddressFamilies = [ "AF_UNIX" "AF_INET" "AF_INET6" ]; RestrictAddressFamilies = [
"AF_UNIX"
"AF_INET"
"AF_INET6"
];
LockPersonality = true; LockPersonality = true;
MemoryDenyWriteExecute = true; MemoryDenyWriteExecute = true;
RestrictRealtime = true; RestrictRealtime = true;
@ -516,4 +563,6 @@ in
]; ];
}; };
}; };
meta.maintainers = [ lib.maintainers._6543 ];
} }

View file

@ -1597,9 +1597,9 @@ in
}; };
meta = { meta = {
inherit (pkgs.matrix-synapse.meta) maintainers;
buildDocsInSandbox = false; buildDocsInSandbox = false;
doc = ./synapse.md; doc = ./synapse.md;
maintainers = teams.matrix.members;
}; };
} }

View file

@ -16,7 +16,8 @@ in
{ {
imports = [ imports = [
(mkRemovedOptionModule [ (mkRemovedOptionModule [
"settings" "services"
"tabby"
"indexInterval" "indexInterval"
] "These options are now managed within the tabby WebGUI") ] "These options are now managed within the tabby WebGUI")
]; ];

View file

@ -193,7 +193,7 @@ in
"${binYggdrasil} -genconf") + " > /run/yggdrasil/yggdrasil.conf"} "${binYggdrasil} -genconf") + " > /run/yggdrasil/yggdrasil.conf"}
# start yggdrasil # start yggdrasil
${binYggdrasil} -useconffile /run/yggdrasil/yggdrasil.conf ${lib.strings.escapeShellArgs cfg.extraArgs} exec ${binYggdrasil} -useconffile /run/yggdrasil/yggdrasil.conf ${lib.strings.escapeShellArgs cfg.extraArgs}
''; '';
serviceConfig = { serviceConfig = {

View file

@ -245,7 +245,7 @@ that are managed by Nix. If you want automatic updates it is recommended that yo
## Known warnings {#module-services-nextcloud-known-warnings} ## Known warnings {#module-services-nextcloud-known-warnings}
### Failed to get an iterator for log entries: Logreader application only supports "file" log_type {#module-services-nextcloud-warning-logreader} ### Logreader application only supports "file" log_type {#module-services-nextcloud-warning-logreader}
This is because This is because
@ -253,16 +253,12 @@ This is because
* the Logreader application that allows reading logs in the admin panel is enabled * the Logreader application that allows reading logs in the admin panel is enabled
by default and requires logs written to a file. by default and requires logs written to a file.
The logreader application doesn't work, as it was the case before. The only change is that If you want to view logs in the admin panel,
it complains loudly now. So nothing actionable here by default. Alternatively you can set [](#opt-services.nextcloud.settings.log_type) to "file".
* disable the logreader application to shut up the "error". If you prefer logs in the journal, disable the logreader application to shut up the
"info". We can't really do that by default since whether apps are enabled/disabled
We can't really do that by default since whether apps are enabled/disabled is part is part of the application's state and tracked inside the database.
of the application's state and tracked inside the database.
* set [](#opt-services.nextcloud.settings.log_type) to "file" to be able to view logs
from the admin panel.
## Maintainer information {#module-services-nextcloud-maintainer-info} ## Maintainer information {#module-services-nextcloud-maintainer-info}

View file

@ -29,10 +29,11 @@ let
}; };
}; };
default = {}; default = {};
type = attrsWith' "config-name" (attrsWith' "tmpfiles-type" (attrsWith' "path" (types.submodule ({ name, config, ... }: { type = attrsWith' "config-name" (attrsWith' "path" (attrsWith' "tmpfiles-type" (types.submodule ({ name, config, ... }: {
options.type = mkOption { options.type = mkOption {
type = types.str; type = types.str;
default = name; default = name;
defaultText = "tmpfiles-type";
example = "d"; example = "d";
description = '' description = ''
The type of operation to perform on the file. The type of operation to perform on the file.

View file

@ -30,8 +30,8 @@ import ../make-test-python.nix (
in in
{ {
name = "mjolnir"; name = "mjolnir";
meta = with pkgs.lib; { meta = {
maintainers = teams.matrix.members; inherit (pkgs.mjolnir.meta) maintainers;
}; };
nodes = { nodes = {

View file

@ -2,8 +2,8 @@ import ../make-test-python.nix (
{ pkgs, ... }: { pkgs, ... }:
{ {
name = "matrix-synapse-workers"; name = "matrix-synapse-workers";
meta = with pkgs.lib; { meta = {
maintainers = teams.matrix.members; inherit (pkgs.matrix-synapse.meta) maintainers;
}; };
nodes = { nodes = {

View file

@ -54,8 +54,8 @@ import ../make-test-python.nix (
{ {
name = "matrix-synapse"; name = "matrix-synapse";
meta = with pkgs.lib; { meta = {
maintainers = teams.matrix.members; inherit (pkgs.matrix-synapse.meta) maintainers;
}; };
nodes = { nodes = {

View file

@ -100,7 +100,7 @@ let
installPhase = '' installPhase = ''
cp -r . $out cp -r . $out
wrapProgram $out/bin/studio.sh \ wrapProgram $out/bin/studio \
--set-default JAVA_HOME "$out/jbr" \ --set-default JAVA_HOME "$out/jbr" \
--set ANDROID_EMULATOR_USE_SYSTEM_LIBS 1 \ --set ANDROID_EMULATOR_USE_SYSTEM_LIBS 1 \
--set QT_XKB_CONFIG_ROOT "${xkeyboard_config}/share/X11/xkb" \ --set QT_XKB_CONFIG_ROOT "${xkeyboard_config}/share/X11/xkb" \
@ -204,6 +204,7 @@ let
] ]
}" }"
''; '';
meta.mainProgram = "studio";
}; };
desktopItem = makeDesktopItem { desktopItem = makeDesktopItem {
@ -279,7 +280,7 @@ let
unset ANDROID_HOME unset ANDROID_HOME
fi fi
''} ''}
exec ${fhsEnv}/bin/${drvName}-fhs-env ${androidStudio}/bin/studio.sh "$@" exec ${fhsEnv}/bin/${drvName}-fhs-env ${lib.getExe androidStudio} "$@"
''; '';
preferLocalBuild = true; preferLocalBuild = true;
allowSubstitutes = false; allowSubstitutes = false;

View file

@ -36,22 +36,22 @@ let
sha256 = sha256 =
{ {
x86_64-linux = "0gr2z4vzms6fv4kcc8dzc7l3inpb5hasnzdfr1zc2n4i3nl8z8vw"; x86_64-linux = "11a0y0zdz3mmc2xvpnlq06a7q06y6529xpp4hlhpjylj0bk06xn1";
x86_64-darwin = "1qplpjazjds5kns0kmp5qa6zfix30cqa93bl4bcpvblb2x9fh1v8"; x86_64-darwin = "12fxhwqcz36f5pv4kvs7bblmymxyixg7pvi0gb5k0j73pkvqrr6g";
aarch64-linux = "1jhrmwrnxzwvhqgfrs35kyd5hhg2b7dyq3p5k88jhm8607nkds79"; aarch64-linux = "0g5qz7gq7k65p2f8iwz1jiy03nwsmy3v3gb18qwg9mbhm0dk59la";
aarch64-darwin = "072lg4nvq3cdjzrwngaxnz9p952zkxsknsb39zjh55vzrij55g9x"; aarch64-darwin = "1g4fz8nw5m7krjlsjs43937kz1sr7lkflbphpyh8cmalwpxa8ysn";
armv7l-linux = "06bvh72bq4ippr2k8ifcfqhkhhh6na4vxsz1k50swr1k2kzwwr5d"; armv7l-linux = "09r12y9xbpqnnw9mab3k4kx0ngpfng1l6rk09n9l2q36ji20ijmy";
} }
.${system} or throwSystem; .${system} or throwSystem;
in in
callPackage ./generic.nix rec { callPackage ./generic.nix rec {
# Please backport all compatible updates to the stable release. # Please backport all compatible updates to the stable release.
# This is important for the extension ecosystem. # This is important for the extension ecosystem.
version = "1.97.1"; version = "1.97.2";
pname = "vscode" + lib.optionalString isInsiders "-insiders"; pname = "vscode" + lib.optionalString isInsiders "-insiders";
# This is used for VS Code - Remote SSH test # This is used for VS Code - Remote SSH test
rev = "e249dada235c2083c83813bd65b7f4707fb97b76"; rev = "e54c774e0add60467559eb0d1e229c6452cf8447";
executableName = "code" + lib.optionalString isInsiders "-insiders"; executableName = "code" + lib.optionalString isInsiders "-insiders";
longName = "Visual Studio Code" + lib.optionalString isInsiders " - Insiders"; longName = "Visual Studio Code" + lib.optionalString isInsiders " - Insiders";
@ -75,7 +75,7 @@ callPackage ./generic.nix rec {
src = fetchurl { src = fetchurl {
name = "vscode-server-${rev}.tar.gz"; name = "vscode-server-${rev}.tar.gz";
url = "https://update.code.visualstudio.com/commit:${rev}/server-linux-x64/stable"; url = "https://update.code.visualstudio.com/commit:${rev}/server-linux-x64/stable";
sha256 = "01snzahh794ygpgwh4r57c8mnisp6a4fc3v5x76cdhxw2hd9s26n"; sha256 = "15fd401sqmlkpw48pysqpyi5rlsqx4cm55bbwakhkal4qa1qnq4m";
}; };
stdenv = stdenvNoCC; stdenv = stdenvNoCC;
}; };

View file

@ -1092,6 +1092,15 @@
"spdx": "MPL-2.0", "spdx": "MPL-2.0",
"vendorHash": null "vendorHash": null
}, },
"sakuracloud": {
"hash": "sha256-KrzqIAK6ImUW22Iik97R4HARoXN4lG6AquitLjCqS/A=",
"homepage": "https://registry.terraform.io/providers/sacloud/sakuracloud",
"owner": "sacloud",
"repo": "terraform-provider-sakuracloud",
"rev": "v2.26.1",
"spdx": "Apache-2.0",
"vendorHash": "sha256-Ry791h5AuYP03nex9nM8X5Mk6PeL7hNDbFyVRvVPJNE="
},
"scaleway": { "scaleway": {
"hash": "sha256-8aESalFQaA6Qwod4rDeUzrKe80rbHfVJZIKtLliKUME=", "hash": "sha256-8aESalFQaA6Qwod4rDeUzrKe80rbHfVJZIKtLliKUME=",
"homepage": "https://registry.terraform.io/providers/scaleway/scaleway", "homepage": "https://registry.terraform.io/providers/scaleway/scaleway",

View file

@ -1,23 +1,24 @@
{ {
cacert,
cargo,
copyDesktopItems,
fetchFromGitHub,
fetchurl,
findutils,
jq,
lib, lib,
makeDesktopItem,
makeWrapper,
rsync,
rustPlatform,
rustc,
stdenv, stdenv,
stdenvNoCC, stdenvNoCC,
yarn-berry, fetchFromGitHub,
zip, rustPlatform,
electron_33, electron_33,
nodejs_20, nodejs_20,
yarn-berry,
cacert,
writableTmpDirAsHomeHook,
cargo,
rustc,
findutils,
zip,
rsync,
jq,
copyDesktopItems,
makeWrapper,
makeDesktopItem,
nix-update-script,
buildType ? "stable", buildType ? "stable",
commandLineArgs ? "", commandLineArgs ? "",
}: }:
@ -34,92 +35,68 @@ let
electron = electron_33; electron = electron_33;
nodejs = nodejs_20; nodejs = nodejs_20;
yarn = yarn-berry.override { inherit nodejs; }; yarn = yarn-berry.override { inherit nodejs; };
productName = if buildType != "stable" then "AFFiNE-${buildType}" else "AFFiNE";
binName = lib.toLower productName;
in in
stdenv.mkDerivation ( stdenv.mkDerivation (finalAttrs: {
finalAttrs: pname = binName;
(
{
productName = if buildType == "stable" then "AFFiNE" else "AFFiNE-" + buildType;
binName = lib.toLower finalAttrs.productName;
pname = finalAttrs.binName;
# https://github.com/toeverything/AFFiNE/releases/tag/v0.18.1 version = "0.19.6";
version = "0.18.1";
GITHUB_SHA = "8b066a4b398aace25a20508a8e3c1a381721971f";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "toeverything"; owner = "toeverything";
repo = "AFFiNE"; repo = "AFFiNE";
rev = finalAttrs.GITHUB_SHA; tag = "v${finalAttrs.version}";
hash = "sha256-TWwojG3lqQlQFX3BKoFjJ27a3T/SawXgNDO6fP6gW4k="; hash = "sha256-BydTNE36oRIxr2lTnc2+EY0lvMXn4NTLB4EjqzhdjGk=";
}; };
meta =
{
description = "Workspace with fully merged docs, whiteboards and databases";
longDescription = ''
AFFiNE is an open-source, all-in-one workspace and an operating
system for all the building blocks that assemble your knowledge
base and much more -- wiki, knowledge management, presentation
and digital assets
'';
homepage = "https://affine.pro/";
license = lib.licenses.mit;
maintainers = with lib.maintainers; [ xiaoxiangmoe ];
platforms = [
"aarch64-darwin"
"aarch64-linux"
"x86_64-darwin"
"x86_64-linux"
];
sourceProvenance = [ lib.sourceTypes.fromSource ];
}
// lib.optionalAttrs hostPlatform.isLinux {
mainProgram = finalAttrs.binName;
};
env = {
BUILD_TYPE = buildType;
};
cargoDeps = rustPlatform.fetchCargoVendor { cargoDeps = rustPlatform.fetchCargoVendor {
src = finalAttrs.src; inherit (finalAttrs) pname version src;
hash = "sha256-5s/X9CD/H9rSn7SqMHioLg1KRP7y9fsozdFRY3hNiP8="; hash = "sha256-racjpf0VgNod6OxWKSaCbKS9fEkInpDyhVbAHfYWIDo=";
}; };
yarnOfflineCache = stdenvNoCC.mkDerivation { yarnOfflineCache = stdenvNoCC.mkDerivation {
name = "yarn-offline-cache"; name = "yarn-offline-cache";
src = finalAttrs.src; inherit (finalAttrs) src;
nativeBuildInputs = [ nativeBuildInputs = [
yarn yarn
cacert cacert
writableTmpDirAsHomeHook
]; ];
# force yarn install run in CI mode
env.CI = "1";
buildPhase =
let
supportedArchitectures = builtins.toJSON { supportedArchitectures = builtins.toJSON {
os = [ os = [
"darwin" "darwin"
"linux" "linux"
]; ];
cpu = [ cpu = [
"arm64"
"x64" "x64"
"ia32"
"arm64"
]; ];
libc = [ libc = [
"glibc" "glibc"
"musl" "musl"
]; ];
}; };
buildPhase = '' in
export HOME="$NIX_BUILD_TOP" ''
export CI=1 runHook preBuild
mkdir -p $out mkdir -p $out
yarn config set enableTelemetry false yarn config set enableTelemetry false
yarn config set cacheFolder $out yarn config set cacheFolder $out
yarn config set enableGlobalCache false yarn config set enableGlobalCache false
yarn config set supportedArchitectures --json "$supportedArchitectures" yarn config set supportedArchitectures --json '${supportedArchitectures}'
yarn install --immutable --mode=skip-build yarn install --immutable --mode=skip-build
runHook postBuild
''; '';
dontInstall = true; dontInstall = true;
outputHashMode = "recursive"; outputHashMode = "recursive";
outputHash = "sha256-HueTia+1ApfvbBK/b+iE84TB1DCWIDLoQ9XhjYlGCUs="; outputHash = "sha256-E9l5zjOOfyDBzYJOU94VrRvt7Hi4XkRTDav9bVlXvlQ=";
}; };
nativeBuildInputs = nativeBuildInputs =
[ [
@ -131,31 +108,27 @@ stdenv.mkDerivation (
zip zip
jq jq
rsync rsync
writableTmpDirAsHomeHook
] ]
++ lib.optionals hostPlatform.isLinux [ ++ lib.optionals hostPlatform.isLinux [
copyDesktopItems copyDesktopItems
makeWrapper makeWrapper
]; ];
patchPhase = '' # force yarn install run in CI mode
runHook prePatchPhase env.CI = "1";
sed -i '/packagerConfig/a \ electronZipDir: process.env.ELECTRON_FORGE_ELECTRON_ZIP_DIR,' packages/frontend/apps/electron/forge.config.mjs # Remove code under The AFFiNE Enterprise Edition (EE) license.
# Keep file package.json for `yarn install --immutable` lockfile check.
runHook postPatchPhase postPatch = ''
BACKEND_SERVER_PACKAGE_JSON="$(jq 'del(.scripts.postinstall)' packages/backend/server/package.json)"
rm -rf packages/backend/server/{.*,*}
echo "$BACKEND_SERVER_PACKAGE_JSON" > packages/backend/server/package.json
''; '';
configurePhase = configurePhase = ''
let
electronContentPath =
electron + (if hostPlatform.isLinux then "/libexec/electron/" else "/Applications/");
in
''
runHook preConfigurePhase runHook preConfigurePhase
export HOME="$NIX_BUILD_TOP"
export CI=1
# cargo config # cargo config
mkdir -p .cargo mkdir -p .cargo
cat $cargoDeps/.cargo/config.toml >> .cargo/config.toml cat $cargoDeps/.cargo/config.toml >> .cargo/config.toml
@ -168,7 +141,7 @@ stdenv.mkDerivation (
# electron config # electron config
ELECTRON_VERSION_IN_LOCKFILE=$(yarn why electron --json | tail --lines 1 | jq --raw-output '.children | to_entries | first | .key ' | cut -d : -f 2) ELECTRON_VERSION_IN_LOCKFILE=$(yarn why electron --json | tail --lines 1 | jq --raw-output '.children | to_entries | first | .key ' | cut -d : -f 2)
rsync --archive --chmod=u+w ${electronContentPath} $HOME/.electron-prebuilt-zip-tmp rsync --archive --chmod=u+w "${electron.dist}/" $HOME/.electron-prebuilt-zip-tmp
export ELECTRON_FORGE_ELECTRON_ZIP_DIR=$PWD/.electron_zip_dir export ELECTRON_FORGE_ELECTRON_ZIP_DIR=$PWD/.electron_zip_dir
mkdir -p $ELECTRON_FORGE_ELECTRON_ZIP_DIR mkdir -p $ELECTRON_FORGE_ELECTRON_ZIP_DIR
(cd $HOME/.electron-prebuilt-zip-tmp && zip --recurse-paths - .) > $ELECTRON_FORGE_ELECTRON_ZIP_DIR/electron-v$ELECTRON_VERSION_IN_LOCKFILE-${nodePlatform}-${nodeArch}.zip (cd $HOME/.electron-prebuilt-zip-tmp && zip --recurse-paths - .) > $ELECTRON_FORGE_ELECTRON_ZIP_DIR/electron-v$ELECTRON_VERSION_IN_LOCKFILE-${nodePlatform}-${nodeArch}.zip
@ -176,27 +149,26 @@ stdenv.mkDerivation (
runHook postConfigurePhase runHook postConfigurePhase
''; '';
buildPhase = '' buildPhase = ''
runHook preBuild runHook preBuild
# first build # first build
yarn workspaces focus @affine/electron @affine/monorepo yarn install
CARGO_NET_OFFLINE=true yarn workspace @affine/native build CARGO_NET_OFFLINE=true yarn affine @affine/native build
BUILD_TYPE=${buildType} SKIP_NX_CACHE=1 yarn workspace @affine/electron generate-assets GITHUB_SHA=ffffffffffffffffffffffffffffffffffffffff BUILD_TYPE=${buildType} SKIP_NX_CACHE=1 yarn affine @affine/electron generate-assets
# second build # second build
yarn config set nmMode classic yarn config set nmMode classic
yarn config set nmHoistingLimits workspaces yarn config set nmHoistingLimits workspaces
find . -name 'node_modules' -type d -prune -exec rm -rf '{}' + find . -name 'node_modules' -type d -prune -exec rm -rf '{}' +
yarn workspaces focus @affine/electron @affine/monorepo yarn install
BUILD_TYPE=${buildType} SKIP_WEB_BUILD=1 SKIP_BUNDLE=1 HOIST_NODE_MODULES=1 yarn workspace @affine/electron make BUILD_TYPE=${buildType} SKIP_WEB_BUILD=1 SKIP_BUNDLE=1 HOIST_NODE_MODULES=1 yarn affine @affine/electron make
runHook postBuild runHook postBuild
''; '';
installPhase = installPhase =
let
inherit (finalAttrs) binName productName;
in
if hostPlatform.isDarwin then if hostPlatform.isDarwin then
'' ''
runHook preInstall runHook preInstall
@ -214,7 +186,7 @@ stdenv.mkDerivation (
mv packages/frontend/apps/electron/out/${buildType}/${productName}-linux-${nodeArch}/{resources,LICENSE*} $out/lib/${binName}/ mv packages/frontend/apps/electron/out/${buildType}/${productName}-linux-${nodeArch}/{resources,LICENSE*} $out/lib/${binName}/
install -Dm644 packages/frontend/apps/electron/resources/icons/icon_${buildType}_64x64.png $out/share/icons/hicolor/64x64/apps/${binName}.png install -Dm644 packages/frontend/apps/electron/resources/icons/icon_${buildType}_64x64.png $out/share/icons/hicolor/64x64/apps/${binName}.png
makeWrapper "${electron}/bin/electron" $out/bin/${binName} \ makeWrapper "${lib.getExe electron}" $out/bin/${binName} \
--inherit-argv0 \ --inherit-argv0 \
--add-flags $out/lib/${binName}/resources/app.asar \ --add-flags $out/lib/${binName}/resources/app.asar \
--add-flags "\''${NIXOS_OZONE_WL:+\''${WAYLAND_DISPLAY:+--ozone-platform-hint=auto --enable-features=WaylandWindowDecorations --enable-wayland-ime=true}}" \ --add-flags "\''${NIXOS_OZONE_WL:+\''${WAYLAND_DISPLAY:+--ozone-platform-hint=auto --enable-features=WaylandWindowDecorations --enable-wayland-ime=true}}" \
@ -222,13 +194,8 @@ stdenv.mkDerivation (
runHook postInstall runHook postInstall
''; '';
}
// (lib.optionalAttrs hostPlatform.isLinux { desktopItems = [
desktopItems =
let
inherit (finalAttrs) binName productName;
in
[
(makeDesktopItem { (makeDesktopItem {
name = binName; name = binName;
desktopName = productName; desktopName = productName;
@ -241,6 +208,30 @@ stdenv.mkDerivation (
mimeTypes = [ "x-scheme-handler/${binName}" ]; mimeTypes = [ "x-scheme-handler/${binName}" ];
}) })
]; ];
passthru.updateScript = nix-update-script {
extraArgs = [
"--version-regex=^v(\\d+\\.\\d+\\.\\d+)$"
];
};
meta = {
description = "Workspace with fully merged docs, whiteboards and databases";
longDescription = ''
AFFiNE is an open-source, all-in-one workspace and an operating
system for all the building blocks that assemble your knowledge
base and much more -- wiki, knowledge management, presentation
and digital assets
'';
homepage = "https://affine.pro/";
license = lib.licenses.mit;
maintainers = with lib.maintainers; [ xiaoxiangmoe ];
platforms = [
"aarch64-darwin"
"aarch64-linux"
"x86_64-darwin"
"x86_64-linux"
];
sourceProvenance = [ lib.sourceTypes.fromSource ];
};
}) })
)
)

View file

@ -176,15 +176,17 @@ stdenv.mkDerivation (finalAttrs: {
dontWrapGApps = true; dontWrapGApps = true;
# Replace audacity's wrapper, to: # Replace audacity's wrapper, to:
# - put it in the right place, it shouldn't be in "$out/audacity" # - Put it in the right place; it shouldn't be in "$out/audacity"
# - Add the ffmpeg dynamic dependency # - Add the ffmpeg dynamic dependency
# - Use Xwayland by default on Wayland. See https://github.com/audacity/audacity/pull/5977
postFixup = postFixup =
lib.optionalString stdenv.hostPlatform.isLinux '' lib.optionalString stdenv.hostPlatform.isLinux ''
wrapProgram "$out/bin/audacity" \ wrapProgram "$out/bin/audacity" \
"''${gappsWrapperArgs[@]}" \ "''${gappsWrapperArgs[@]}" \
--prefix LD_LIBRARY_PATH : "$out/lib/audacity":${lib.makeLibraryPath [ ffmpeg ]} \ --prefix LD_LIBRARY_PATH : "$out/lib/audacity":${lib.makeLibraryPath [ ffmpeg ]} \
--suffix AUDACITY_MODULES_PATH : "$out/lib/audacity/modules" \ --suffix AUDACITY_MODULES_PATH : "$out/lib/audacity/modules" \
--suffix AUDACITY_PATH : "$out/share/audacity" --suffix AUDACITY_PATH : "$out/share/audacity" \
--set-default GDK_BACKEND x11
'' ''
+ lib.optionalString stdenv.hostPlatform.isDarwin '' + lib.optionalString stdenv.hostPlatform.isDarwin ''
mkdir -p $out/{Applications,bin} mkdir -p $out/{Applications,bin}

View file

@ -74,7 +74,6 @@ rustPlatform.buildRustPackage {
wrapProgram $out/bin/devenv \ wrapProgram $out/bin/devenv \
--prefix PATH ":" "$out/bin:${cachix}/bin" \ --prefix PATH ":" "$out/bin:${cachix}/bin" \
--set DEVENV_NIX ${devenv_nix} \ --set DEVENV_NIX ${devenv_nix} \
--set-default DO_NOT_TRACK 1 \
${setDefaultLocaleArchive} ${setDefaultLocaleArchive}
# Generate manpages # Generate manpages

View file

@ -2,22 +2,25 @@
lib, lib,
rustPlatform, rustPlatform,
fetchFromGitHub, fetchFromGitHub,
nix-update-script,
}: }:
rustPlatform.buildRustPackage rec { rustPlatform.buildRustPackage rec {
pname = "harper"; pname = "harper";
version = "0.20.0"; version = "0.21.1";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "Automattic"; owner = "Automattic";
repo = "harper"; repo = "harper";
rev = "v${version}"; rev = "v${version}";
hash = "sha256-8JeF1HxsP+Y+C1g3YJ0B0+JHoRFkBjz4/T8rVr2KgGw="; hash = "sha256-UTohTnIUMpyQGvkuOD2L7bViF3b5QnbDjRD4VSmf4lE=";
}; };
buildAndTestSubdir = "harper-ls"; buildAndTestSubdir = "harper-ls";
useFetchCargoVendor = true; useFetchCargoVendor = true;
cargoHash = "sha256-uVjDFo5mJi4Xbq0Z+XOjy5VqXqkm0a+4xu+dVnjWXCU="; cargoHash = "sha256-wHXo4yfFc77osCamK0NidbrIYyIFMEpfBr0B6aniBmQ=";
passthru.updateScript = nix-update-script { };
meta = { meta = {
description = "Grammar Checker for Developers"; description = "Grammar Checker for Developers";

View file

@ -0,0 +1,33 @@
{
lib,
rustPackages,
fetchFromGitHub,
pkg-config,
}:
rustPackages.rustPlatform.buildRustPackage rec {
pname = "hawkeye";
version = "6.0.0";
src = fetchFromGitHub {
owner = "korandoru";
repo = "hawkeye";
tag = "v${version}";
hash = "sha256-VfJWj9BwNVR7RVUW+CjFuaniyiEath1U0F/7QJcA3r4=";
};
useFetchCargoVendor = true;
cargoHash = "sha256-SJEl5QsO4KYRv+5xDPHy1Q53qcL89IJ9JTXtzubO5fk=";
nativeBuildInputs = [
pkg-config
];
meta = {
homepage = "https://github.com/korandoro/hawkeye";
description = "Simple license header checker and formatter, in multiple distribution forms";
license = lib.licenses.asl20;
mainProgram = "hawkeye";
maintainers = with lib.maintainers; [ matthiasbeyer ];
};
}

View file

@ -21,6 +21,8 @@ rustPlatform.buildRustPackage rec {
useFetchCargoVendor = true; useFetchCargoVendor = true;
cargoHash = "sha256-J9sGXJbGbO9UgZfgqxqzbiJz9j6WMpq3qC2ys7OJnII="; cargoHash = "sha256-J9sGXJbGbO9UgZfgqxqzbiJz9j6WMpq3qC2ys7OJnII=";
buildFeatures = [ "cli" ];
checkFlags = [ checkFlags = [
# remote access # remote access
"--skip=generation::tests::can_generate_from_remote_repo_with_subdir" "--skip=generation::tests::can_generate_from_remote_repo_with_subdir"

View file

@ -6,13 +6,13 @@
stdenv.mkDerivation rec { stdenv.mkDerivation rec {
pname = "lzbench"; pname = "lzbench";
version = "1.8.1"; version = "2.0.1";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "inikep"; owner = "inikep";
repo = pname; repo = pname;
rev = "v${version}"; rev = "v${version}";
sha256 = "19zlvcjb1qg4fx30rrp6m650660y35736j8szvdxmqh9ipkisyia"; sha256 = "sha256-946AcnD9z60Oihm2pseS8D5j6pGdYeCxmhTLNcW9Mmc=";
}; };
enableParallelBuilding = true; enableParallelBuilding = true;

View file

@ -10,17 +10,17 @@
rustPlatform.buildRustPackage rec { rustPlatform.buildRustPackage rec {
pname = "mpd-discord-rpc"; pname = "mpd-discord-rpc";
version = "1.7.3"; version = "1.8.0";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "JakeStanger"; owner = "JakeStanger";
repo = "mpd-discord-rpc"; repo = "mpd-discord-rpc";
rev = "v${version}"; rev = "v${version}";
hash = "sha256-WiHMXazNKyt5N7WmkftZYEHeQi+l9qoU2yr6jRHfjdE="; hash = "sha256-RuXH0RaR0VVN7tja0pcc8QH826/JzH4tyVVCbrK7ldI=";
}; };
useFetchCargoVendor = true; useFetchCargoVendor = true;
cargoHash = "sha256-v6YQS+Te0bIzSr3q4QaEcXbUjiTCKELxCdqBlbjLI3E="; cargoHash = "sha256-ewmg5t0JljnvxjrGDJzokRwndv7UNw9NMQ7Cx6oDWjg=";
nativeBuildInputs = [ nativeBuildInputs = [
pkg-config pkg-config

View file

@ -89,6 +89,9 @@ python3Packages.buildPythonApplication rec {
ps: with ps; [ ps: with ps; [
mypy mypy
pytest pytest
# this is to help development (e.g.: better diffs) inside devShell
# only, do not use its helpers like `mocker`
pytest-mock
ruff ruff
] ]
); );

View file

@ -7,6 +7,7 @@ from typing import Any
from unittest.mock import ANY, Mock, call, patch from unittest.mock import ANY, Mock, call, patch
import pytest import pytest
from pytest import MonkeyPatch
import nixos_rebuild as nr import nixos_rebuild as nr
@ -125,6 +126,92 @@ def test_parse_args() -> None:
] ]
@patch.dict(nr.os.environ, {}, clear=True)
@patch(get_qualified_name(nr.os.execve, nr.os), autospec=True)
@patch(get_qualified_name(nr.nix.build), autospec=True)
def test_reexec(mock_build: Mock, mock_execve: Mock, monkeypatch: MonkeyPatch) -> None:
monkeypatch.setattr(nr, "EXECUTABLE", "nixos-rebuild-ng")
argv = ["/path/bin/nixos-rebuild-ng", "switch", "--no-flake"]
args, _ = nr.parse_args(argv)
mock_build.return_value = Path("/path")
nr.reexec(argv, args, {"build": True}, {"flake": True})
mock_build.assert_has_calls(
[
call(
"config.system.build.nixos-rebuild",
nr.models.BuildAttr(ANY, ANY),
{"build": True, "no_out_link": True},
)
]
)
# do not exec if there is no new version
mock_execve.assert_not_called()
mock_build.return_value = Path("/path/new")
nr.reexec(argv, args, {}, {})
# exec in the new version successfully
mock_execve.assert_called_once_with(
Path("/path/new/bin/nixos-rebuild-ng"),
["/path/bin/nixos-rebuild-ng", "switch", "--no-flake"],
{"_NIXOS_REBUILD_REEXEC": "1"},
)
mock_execve.reset_mock()
mock_execve.side_effect = [OSError("BOOM"), None]
nr.reexec(argv, args, {}, {})
# exec in the previous version if the new version fails
mock_execve.assert_any_call(
Path("/path/bin/nixos-rebuild-ng"),
["/path/bin/nixos-rebuild-ng", "switch", "--no-flake"],
{"_NIXOS_REBUILD_REEXEC": "1"},
)
@patch.dict(nr.os.environ, {}, clear=True)
@patch(get_qualified_name(nr.os.execve, nr.os), autospec=True)
@patch(get_qualified_name(nr.nix.build_flake), autospec=True)
def test_reexec_flake(
mock_build: Mock, mock_execve: Mock, monkeypatch: MonkeyPatch
) -> None:
monkeypatch.setattr(nr, "EXECUTABLE", "nixos-rebuild-ng")
argv = ["/path/bin/nixos-rebuild-ng", "switch", "--flake"]
args, _ = nr.parse_args(argv)
mock_build.return_value = Path("/path")
nr.reexec(argv, args, {"build": True}, {"flake": True})
mock_build.assert_called_once_with(
"config.system.build.nixos-rebuild",
nr.models.Flake(ANY, ANY),
{"flake": True, "no_link": True},
)
# do not exec if there is no new version
mock_execve.assert_not_called()
mock_build.return_value = Path("/path/new")
nr.reexec(argv, args, {}, {})
# exec in the new version successfully
mock_execve.assert_called_once_with(
Path("/path/new/bin/nixos-rebuild-ng"),
["/path/bin/nixos-rebuild-ng", "switch", "--flake"],
{"_NIXOS_REBUILD_REEXEC": "1"},
)
mock_execve.reset_mock()
mock_execve.side_effect = [OSError("BOOM"), None]
nr.reexec(argv, args, {}, {})
# exec in the previous version if the new version fails
mock_execve.assert_any_call(
Path("/path/bin/nixos-rebuild-ng"),
["/path/bin/nixos-rebuild-ng", "switch", "--flake"],
{"_NIXOS_REBUILD_REEXEC": "1"},
)
@patch.dict(nr.process.os.environ, {}, clear=True) @patch.dict(nr.process.os.environ, {}, clear=True)
@patch(get_qualified_name(nr.process.subprocess.run), autospec=True) @patch(get_qualified_name(nr.process.subprocess.run), autospec=True)
def test_execute_nix_boot(mock_run: Mock, tmp_path: Path) -> None: def test_execute_nix_boot(mock_run: Mock, tmp_path: Path) -> None:
@ -147,7 +234,9 @@ def test_execute_nix_boot(mock_run: Mock, tmp_path: Path) -> None:
nr.execute(["nixos-rebuild", "boot", "--no-flake", "-vvv", "--no-reexec"]) nr.execute(["nixos-rebuild", "boot", "--no-flake", "-vvv", "--no-reexec"])
assert mock_run.call_args_list == [ assert mock_run.call_count == 6
mock_run.assert_has_calls(
[
call( call(
["nix-instantiate", "--find-file", "nixpkgs", "-vvv"], ["nix-instantiate", "--find-file", "nixpkgs", "-vvv"],
stdout=PIPE, stdout=PIPE,
@ -195,6 +284,7 @@ def test_execute_nix_boot(mock_run: Mock, tmp_path: Path) -> None:
**(DEFAULT_RUN_KWARGS | {"env": {"NIXOS_INSTALL_BOOTLOADER": "0"}}), **(DEFAULT_RUN_KWARGS | {"env": {"NIXOS_INSTALL_BOOTLOADER": "0"}}),
), ),
] ]
)
@patch.dict(nr.process.os.environ, {}, clear=True) @patch.dict(nr.process.os.environ, {}, clear=True)
@ -224,7 +314,9 @@ def test_execute_nix_build_vm(mock_run: Mock, tmp_path: Path) -> None:
] ]
) )
assert mock_run.call_args_list == [ assert mock_run.call_count == 1
mock_run.assert_has_calls(
[
call( call(
[ [
"nix-build", "nix-build",
@ -241,6 +333,7 @@ def test_execute_nix_build_vm(mock_run: Mock, tmp_path: Path) -> None:
**DEFAULT_RUN_KWARGS, **DEFAULT_RUN_KWARGS,
) )
] ]
)
@patch.dict(nr.process.os.environ, {}, clear=True) @patch.dict(nr.process.os.environ, {}, clear=True)
@ -279,7 +372,9 @@ def test_execute_nix_build_image_flake(mock_run: Mock, tmp_path: Path) -> None:
] ]
) )
assert mock_run.call_args_list == [ assert mock_run.call_count == 2
mock_run.assert_has_calls(
[
call( call(
[ [
"nix", "nix",
@ -307,6 +402,7 @@ def test_execute_nix_build_image_flake(mock_run: Mock, tmp_path: Path) -> None:
**DEFAULT_RUN_KWARGS, **DEFAULT_RUN_KWARGS,
), ),
] ]
)
@patch.dict(nr.process.os.environ, {}, clear=True) @patch.dict(nr.process.os.environ, {}, clear=True)
@ -340,7 +436,9 @@ def test_execute_nix_switch_flake(mock_run: Mock, tmp_path: Path) -> None:
] ]
) )
assert mock_run.call_args_list == [ assert mock_run.call_count == 3
mock_run.assert_has_calls(
[
call( call(
[ [
"nix", "nix",
@ -377,6 +475,7 @@ def test_execute_nix_switch_flake(mock_run: Mock, tmp_path: Path) -> None:
**(DEFAULT_RUN_KWARGS | {"env": {"NIXOS_INSTALL_BOOTLOADER": "1"}}), **(DEFAULT_RUN_KWARGS | {"env": {"NIXOS_INSTALL_BOOTLOADER": "1"}}),
), ),
] ]
)
@patch.dict(nr.process.os.environ, {}, clear=True) @patch.dict(nr.process.os.environ, {}, clear=True)
@ -430,7 +529,9 @@ def test_execute_nix_switch_build_target_host(
] ]
) )
assert mock_run.call_args_list == [ assert mock_run.call_count == 10
mock_run.assert_has_calls(
[
call( call(
[ [
"nix-instantiate", "nix-instantiate",
@ -570,6 +671,7 @@ def test_execute_nix_switch_build_target_host(
**DEFAULT_RUN_KWARGS, **DEFAULT_RUN_KWARGS,
), ),
] ]
)
@patch.dict(nr.process.os.environ, {}, clear=True) @patch.dict(nr.process.os.environ, {}, clear=True)
@ -604,7 +706,9 @@ def test_execute_nix_switch_flake_target_host(
] ]
) )
assert mock_run.call_args_list == [ assert mock_run.call_count == 4
mock_run.assert_has_calls(
[
call( call(
[ [
"nix", "nix",
@ -656,6 +760,7 @@ def test_execute_nix_switch_flake_target_host(
**DEFAULT_RUN_KWARGS, **DEFAULT_RUN_KWARGS,
), ),
] ]
)
@patch.dict(nr.process.os.environ, {}, clear=True) @patch.dict(nr.process.os.environ, {}, clear=True)
@ -691,7 +796,9 @@ def test_execute_nix_switch_flake_build_host(
] ]
) )
assert mock_run.call_args_list == [ assert mock_run.call_count == 6
mock_run.assert_has_calls(
[
call( call(
[ [
"nix", "nix",
@ -755,6 +862,7 @@ def test_execute_nix_switch_flake_build_host(
**DEFAULT_RUN_KWARGS, **DEFAULT_RUN_KWARGS,
), ),
] ]
)
@patch(get_qualified_name(nr.process.subprocess.run), autospec=True) @patch(get_qualified_name(nr.process.subprocess.run), autospec=True)
@ -783,7 +891,9 @@ def test_execute_switch_rollback(mock_run: Mock, tmp_path: Path) -> None:
] ]
) )
assert mock_run.call_args_list == [ assert mock_run.call_count == 4
mock_run.assert_has_calls(
[
call( call(
["nix-instantiate", "--find-file", "nixpkgs"], ["nix-instantiate", "--find-file", "nixpkgs"],
check=False, check=False,
@ -822,6 +932,7 @@ def test_execute_switch_rollback(mock_run: Mock, tmp_path: Path) -> None:
**DEFAULT_RUN_KWARGS, **DEFAULT_RUN_KWARGS,
), ),
] ]
)
@patch(get_qualified_name(nr.process.subprocess.run), autospec=True) @patch(get_qualified_name(nr.process.subprocess.run), autospec=True)
@ -835,7 +946,9 @@ def test_execute_build(mock_run: Mock, tmp_path: Path) -> None:
nr.execute(["nixos-rebuild", "build", "--no-flake", "--no-reexec"]) nr.execute(["nixos-rebuild", "build", "--no-flake", "--no-reexec"])
assert mock_run.call_args_list == [ assert mock_run.call_count == 1
mock_run.assert_has_calls(
[
call( call(
[ [
"nix-build", "nix-build",
@ -848,6 +961,7 @@ def test_execute_build(mock_run: Mock, tmp_path: Path) -> None:
**DEFAULT_RUN_KWARGS, **DEFAULT_RUN_KWARGS,
) )
] ]
)
@patch(get_qualified_name(nr.process.subprocess.run), autospec=True) @patch(get_qualified_name(nr.process.subprocess.run), autospec=True)
@ -867,7 +981,9 @@ def test_execute_test_flake(mock_run: Mock, tmp_path: Path) -> None:
["nixos-rebuild", "test", "--flake", "github:user/repo#hostname", "--no-reexec"] ["nixos-rebuild", "test", "--flake", "github:user/repo#hostname", "--no-reexec"]
) )
assert mock_run.call_args_list == [ assert mock_run.call_count == 2
mock_run.assert_has_calls(
[
call( call(
[ [
"nix", "nix",
@ -887,6 +1003,7 @@ def test_execute_test_flake(mock_run: Mock, tmp_path: Path) -> None:
**DEFAULT_RUN_KWARGS, **DEFAULT_RUN_KWARGS,
), ),
] ]
)
@patch(get_qualified_name(nr.process.subprocess.run), autospec=True) @patch(get_qualified_name(nr.process.subprocess.run), autospec=True)
@ -917,7 +1034,9 @@ def test_execute_test_rollback(
["nixos-rebuild", "test", "--rollback", "--profile-name", "foo", "--no-reexec"] ["nixos-rebuild", "test", "--rollback", "--profile-name", "foo", "--no-reexec"]
) )
assert mock_run.call_args_list == [ assert mock_run.call_count == 2
mock_run.assert_has_calls(
[
call( call(
[ [
"nix-env", "nix-env",
@ -940,3 +1059,4 @@ def test_execute_test_rollback(
**DEFAULT_RUN_KWARGS, **DEFAULT_RUN_KWARGS,
), ),
] ]
)

View file

@ -26,7 +26,7 @@ def test_build(mock_run: Mock) -> None:
m.BuildAttr("<nixpkgs/nixos>", None), m.BuildAttr("<nixpkgs/nixos>", None),
{"nix_flag": "foo"}, {"nix_flag": "foo"},
) == Path("/path/to/file") ) == Path("/path/to/file")
assert mock_run.call_args == call( mock_run.assert_called_with(
[ [
"nix-build", "nix-build",
"<nixpkgs/nixos>", "<nixpkgs/nixos>",
@ -38,17 +38,13 @@ def test_build(mock_run: Mock) -> None:
stdout=PIPE, stdout=PIPE,
) )
mock_run.reset_mock()
assert n.build( assert n.build(
"config.system.build.attr", m.BuildAttr(Path("file"), "preAttr") "config.system.build.attr", m.BuildAttr(Path("file"), "preAttr")
) == Path("/path/to/file") ) == Path("/path/to/file")
assert mock_run.call_args_list == [ mock_run.assert_called_with(
call(
["nix-build", Path("file"), "--attr", "preAttr.config.system.build.attr"], ["nix-build", Path("file"), "--attr", "preAttr.config.system.build.attr"],
stdout=PIPE, stdout=PIPE,
) )
]
@patch( @patch(
@ -65,7 +61,7 @@ def test_build_flake(mock_run: Mock, monkeypatch: MonkeyPatch, tmpdir: Path) ->
flake, flake,
{"no_link": True, "nix_flag": "foo"}, {"no_link": True, "nix_flag": "foo"},
) == Path("/path/to/file") ) == Path("/path/to/file")
assert mock_run.call_args == call( mock_run.assert_called_with(
[ [
"nix", "nix",
"--extra-experimental-features", "--extra-experimental-features",
@ -114,7 +110,9 @@ def test_build_remote(
instantiate_flags={"inst": True}, instantiate_flags={"inst": True},
copy_flags={"copy": True}, copy_flags={"copy": True},
) == Path("/path/to/config") ) == Path("/path/to/config")
assert mock_run.call_args_list == [
mock_run.assert_has_calls(
[
call( call(
[ [
"nix-instantiate", "nix-instantiate",
@ -135,7 +133,9 @@ def test_build_remote(
"user@host", "user@host",
Path("/path/to/file"), Path("/path/to/file"),
], ],
extra_env={"NIX_SSHOPTS": " ".join([*p.SSH_DEFAULT_OPTS, "--ssh opts"])}, extra_env={
"NIX_SSHOPTS": " ".join([*p.SSH_DEFAULT_OPTS, "--ssh opts"])
},
), ),
call( call(
["mktemp", "-d", "-t", "nixos-rebuild.XXXXX"], ["mktemp", "-d", "-t", "nixos-rebuild.XXXXX"],
@ -161,6 +161,7 @@ def test_build_remote(
), ),
call(["rm", "-rf", Path("/tmp/tmpdir")], remote=build_host, check=False), call(["rm", "-rf", Path("/tmp/tmpdir")], remote=build_host, check=False),
] ]
)
@patch( @patch(
@ -184,7 +185,8 @@ def test_build_remote_flake(
copy_flags={"copy": True}, copy_flags={"copy": True},
flake_build_flags={"build": True}, flake_build_flags={"build": True},
) == Path("/path/to/file") ) == Path("/path/to/file")
assert mock_run.call_args_list == [ mock_run.assert_has_calls(
[
call( call(
[ [
"nix", "nix",
@ -205,7 +207,9 @@ def test_build_remote_flake(
"user@host", "user@host",
Path("/path/to/file"), Path("/path/to/file"),
], ],
extra_env={"NIX_SSHOPTS": " ".join([*p.SSH_DEFAULT_OPTS, "--ssh opts"])}, extra_env={
"NIX_SSHOPTS": " ".join([*p.SSH_DEFAULT_OPTS, "--ssh opts"])
},
), ),
call( call(
[ [
@ -221,6 +225,7 @@ def test_build_remote_flake(
stdout=PIPE, stdout=PIPE,
), ),
] ]
)
def test_copy_closure(monkeypatch: MonkeyPatch) -> None: def test_copy_closure(monkeypatch: MonkeyPatch) -> None:
@ -233,7 +238,7 @@ def test_copy_closure(monkeypatch: MonkeyPatch) -> None:
build_host = m.Remote("user@build.host", [], None) build_host = m.Remote("user@build.host", [], None)
with patch(get_qualified_name(n.run_wrapper, n), autospec=True) as mock_run: with patch(get_qualified_name(n.run_wrapper, n), autospec=True) as mock_run:
n.copy_closure(closure, target_host) n.copy_closure(closure, target_host)
assert mock_run.call_args == call( mock_run.assert_called_with(
["nix-copy-closure", "--to", "user@target.host", closure], ["nix-copy-closure", "--to", "user@target.host", closure],
extra_env={"NIX_SSHOPTS": " ".join(p.SSH_DEFAULT_OPTS)}, extra_env={"NIX_SSHOPTS": " ".join(p.SSH_DEFAULT_OPTS)},
) )
@ -241,7 +246,7 @@ def test_copy_closure(monkeypatch: MonkeyPatch) -> None:
monkeypatch.setenv("NIX_SSHOPTS", "--ssh build-opt") monkeypatch.setenv("NIX_SSHOPTS", "--ssh build-opt")
with patch(get_qualified_name(n.run_wrapper, n), autospec=True) as mock_run: with patch(get_qualified_name(n.run_wrapper, n), autospec=True) as mock_run:
n.copy_closure(closure, None, build_host, {"copy_flag": True}) n.copy_closure(closure, None, build_host, {"copy_flag": True})
assert mock_run.call_args == call( mock_run.assert_called_with(
["nix-copy-closure", "--copy-flag", "--from", "user@build.host", closure], ["nix-copy-closure", "--copy-flag", "--from", "user@build.host", closure],
extra_env={ extra_env={
"NIX_SSHOPTS": " ".join([*p.SSH_DEFAULT_OPTS, "--ssh build-opt"]) "NIX_SSHOPTS": " ".join([*p.SSH_DEFAULT_OPTS, "--ssh build-opt"])
@ -255,7 +260,7 @@ def test_copy_closure(monkeypatch: MonkeyPatch) -> None:
} }
with patch(get_qualified_name(n.run_wrapper, n), autospec=True) as mock_run: with patch(get_qualified_name(n.run_wrapper, n), autospec=True) as mock_run:
n.copy_closure(closure, target_host, build_host, {"copy_flag": True}) n.copy_closure(closure, target_host, build_host, {"copy_flag": True})
assert mock_run.call_args == call( mock_run.assert_called_with(
[ [
"nix", "nix",
"copy", "copy",
@ -272,7 +277,8 @@ def test_copy_closure(monkeypatch: MonkeyPatch) -> None:
monkeypatch.setattr(n, "WITH_NIX_2_18", False) monkeypatch.setattr(n, "WITH_NIX_2_18", False)
with patch(get_qualified_name(n.run_wrapper, n), autospec=True) as mock_run: with patch(get_qualified_name(n.run_wrapper, n), autospec=True) as mock_run:
n.copy_closure(closure, target_host, build_host) n.copy_closure(closure, target_host, build_host)
assert mock_run.call_args_list == [ mock_run.assert_has_calls(
[
call( call(
["nix-copy-closure", "--from", "user@build.host", closure], ["nix-copy-closure", "--from", "user@build.host", closure],
extra_env=extra_env, extra_env=extra_env,
@ -282,6 +288,7 @@ def test_copy_closure(monkeypatch: MonkeyPatch) -> None:
extra_env=extra_env, extra_env=extra_env,
), ),
] ]
)
@patch(get_qualified_name(n.run_wrapper, n), autospec=True) @patch(get_qualified_name(n.run_wrapper, n), autospec=True)
@ -289,7 +296,7 @@ def test_edit(mock_run: Mock, monkeypatch: MonkeyPatch, tmpdir: Path) -> None:
# Flake # Flake
flake = m.Flake.parse(f"{tmpdir}#attr") flake = m.Flake.parse(f"{tmpdir}#attr")
n.edit(flake, {"commit_lock_file": True}) n.edit(flake, {"commit_lock_file": True})
assert mock_run.call_args == call( mock_run.assert_called_with(
[ [
"nix", "nix",
"--extra-experimental-features", "--extra-experimental-features",
@ -311,7 +318,7 @@ def test_edit(mock_run: Mock, monkeypatch: MonkeyPatch, tmpdir: Path) -> None:
mp.setenv("EDITOR", "editor") mp.setenv("EDITOR", "editor")
n.edit(None) n.edit(None)
assert mock_run.call_args == call(["editor", default_nix], check=False) mock_run.assert_called_with(["editor", default_nix], check=False)
@patch( @patch(
@ -334,7 +341,7 @@ def test_get_build_image_variants(mock_run: Mock, tmp_path: Path) -> None:
"azure": "nixos-image-azure-25.05.20250102.6df2492-x86_64-linux.vhd", "azure": "nixos-image-azure-25.05.20250102.6df2492-x86_64-linux.vhd",
"vmware": "nixos-image-vmware-25.05.20250102.6df2492-x86_64-linux.vmdk", "vmware": "nixos-image-vmware-25.05.20250102.6df2492-x86_64-linux.vmdk",
} }
assert mock_run.call_args == call( mock_run.assert_called_with(
[ [
"nix-instantiate", "nix-instantiate",
"--eval", "--eval",
@ -352,14 +359,12 @@ def test_get_build_image_variants(mock_run: Mock, tmp_path: Path) -> None:
stdout=PIPE, stdout=PIPE,
) )
mock_run.reset_mock()
build_attr = m.BuildAttr(Path(tmp_path), "preAttr") build_attr = m.BuildAttr(Path(tmp_path), "preAttr")
assert n.get_build_image_variants(build_attr, {"inst_flag": True}) == { assert n.get_build_image_variants(build_attr, {"inst_flag": True}) == {
"azure": "nixos-image-azure-25.05.20250102.6df2492-x86_64-linux.vhd", "azure": "nixos-image-azure-25.05.20250102.6df2492-x86_64-linux.vhd",
"vmware": "nixos-image-vmware-25.05.20250102.6df2492-x86_64-linux.vmdk", "vmware": "nixos-image-vmware-25.05.20250102.6df2492-x86_64-linux.vmdk",
} }
assert mock_run.call_args == call( mock_run.assert_called_with(
[ [
"nix-instantiate", "nix-instantiate",
"--eval", "--eval",
@ -399,7 +404,7 @@ def test_get_build_image_variants_flake(mock_run: Mock) -> None:
"azure": "nixos-image-azure-25.05.20250102.6df2492-x86_64-linux.vhd", "azure": "nixos-image-azure-25.05.20250102.6df2492-x86_64-linux.vhd",
"vmware": "nixos-image-vmware-25.05.20250102.6df2492-x86_64-linux.vmdk", "vmware": "nixos-image-vmware-25.05.20250102.6df2492-x86_64-linux.vmdk",
} }
assert mock_run.call_args == call( mock_run.assert_called_with(
[ [
"nix", "nix",
"eval", "eval",
@ -424,7 +429,7 @@ def test_get_nixpkgs_rev() -> None:
side_effect=[CompletedProcess([], 0, "")], side_effect=[CompletedProcess([], 0, "")],
) as mock_run: ) as mock_run:
assert n.get_nixpkgs_rev(path) is None assert n.get_nixpkgs_rev(path) is None
assert mock_run.call_args == call( mock_run.assert_called_with(
["git", "-C", path, "rev-parse", "--short", "HEAD"], ["git", "-C", path, "rev-parse", "--short", "HEAD"],
check=False, check=False,
capture_output=True, capture_output=True,
@ -451,7 +456,7 @@ def test_get_nixpkgs_rev() -> None:
], ],
) as mock_run: ) as mock_run:
assert n.get_nixpkgs_rev(path) == ".git.0f7c82403fd6" assert n.get_nixpkgs_rev(path) == ".git.0f7c82403fd6"
assert mock_run.call_args_list == expected_calls mock_run.assert_has_calls(expected_calls)
with patch( with patch(
get_qualified_name(n.run_wrapper, n), get_qualified_name(n.run_wrapper, n),
@ -462,7 +467,7 @@ def test_get_nixpkgs_rev() -> None:
], ],
) as mock_run: ) as mock_run:
assert n.get_nixpkgs_rev(path) == ".git.0f7c82403fd6M" assert n.get_nixpkgs_rev(path) == ".git.0f7c82403fd6M"
assert mock_run.call_args_list == expected_calls mock_run.assert_has_calls(expected_calls)
def test_get_generations(tmp_path: Path) -> None: def test_get_generations(tmp_path: Path) -> None:
@ -503,7 +508,7 @@ def test_get_generations_from_nix_env(tmp_path: Path) -> None:
m.Generation(id=2083, current=False, timestamp="2024-11-07 22:59:41"), m.Generation(id=2083, current=False, timestamp="2024-11-07 22:59:41"),
m.Generation(id=2084, current=True, timestamp="2024-11-07 23:54:17"), m.Generation(id=2084, current=True, timestamp="2024-11-07 23:54:17"),
] ]
assert mock_run.call_args == call( mock_run.assert_called_with(
["nix-env", "-p", path, "--list-generations"], ["nix-env", "-p", path, "--list-generations"],
stdout=PIPE, stdout=PIPE,
remote=None, remote=None,
@ -521,7 +526,7 @@ def test_get_generations_from_nix_env(tmp_path: Path) -> None:
m.Generation(id=2083, current=False, timestamp="2024-11-07 22:59:41"), m.Generation(id=2083, current=False, timestamp="2024-11-07 22:59:41"),
m.Generation(id=2084, current=True, timestamp="2024-11-07 23:54:17"), m.Generation(id=2084, current=True, timestamp="2024-11-07 23:54:17"),
] ]
assert mock_run.call_args == call( mock_run.assert_called_with(
["nix-env", "-p", path, "--list-generations"], ["nix-env", "-p", path, "--list-generations"],
stdout=PIPE, stdout=PIPE,
remote=remote, remote=remote,
@ -573,14 +578,12 @@ def test_list_generations(mock_get_generations: Mock, tmp_path: Path) -> None:
@patch(get_qualified_name(n.run_wrapper, n), autospec=True) @patch(get_qualified_name(n.run_wrapper, n), autospec=True)
def test_repl(mock_run: Mock) -> None: def test_repl(mock_run: Mock) -> None:
n.repl("attr", m.BuildAttr("<nixpkgs/nixos>", None), {"nix_flag": True}) n.repl("attr", m.BuildAttr("<nixpkgs/nixos>", None), {"nix_flag": True})
assert mock_run.call_args == call( mock_run.assert_called_with(
["nix", "repl", "--file", "<nixpkgs/nixos>", "--nix-flag"] ["nix", "repl", "--file", "<nixpkgs/nixos>", "--nix-flag"]
) )
n.repl("attr", m.BuildAttr(Path("file.nix"), "myAttr")) n.repl("attr", m.BuildAttr(Path("file.nix"), "myAttr"))
assert mock_run.call_args == call( mock_run.assert_called_with(["nix", "repl", "--file", Path("file.nix"), "myAttr"])
["nix", "repl", "--file", Path("file.nix"), "myAttr"]
)
@patch(get_qualified_name(n.run_wrapper, n), autospec=True) @patch(get_qualified_name(n.run_wrapper, n), autospec=True)
@ -599,7 +602,7 @@ def test_rollback(mock_run: Mock, tmp_path: Path) -> None:
profile = m.Profile("system", path) profile = m.Profile("system", path)
assert n.rollback(profile, None, False) == profile.path assert n.rollback(profile, None, False) == profile.path
assert mock_run.call_args == call( mock_run.assert_called_with(
["nix-env", "--rollback", "-p", path], ["nix-env", "--rollback", "-p", path],
remote=None, remote=None,
sudo=False, sudo=False,
@ -607,7 +610,7 @@ def test_rollback(mock_run: Mock, tmp_path: Path) -> None:
target_host = m.Remote("user@localhost", [], None) target_host = m.Remote("user@localhost", [], None)
assert n.rollback(profile, target_host, True) == profile.path assert n.rollback(profile, target_host, True) == profile.path
assert mock_run.call_args == call( mock_run.assert_called_with(
["nix-env", "--rollback", "-p", path], ["nix-env", "--rollback", "-p", path],
remote=target_host, remote=target_host,
sudo=True, sudo=True,
@ -619,10 +622,8 @@ def test_rollback_temporary_profile(tmp_path: Path) -> None:
path.touch() path.touch()
profile = m.Profile("system", path) profile = m.Profile("system", path)
with patch( with patch(get_qualified_name(n.run_wrapper, n), autospec=True) as mock_run:
get_qualified_name(n.run_wrapper, n), mock_run.return_value = CompletedProcess(
autospec=True,
return_value=CompletedProcess(
[], [],
0, 0,
stdout=textwrap.dedent("""\ stdout=textwrap.dedent("""\
@ -630,13 +631,12 @@ def test_rollback_temporary_profile(tmp_path: Path) -> None:
2083 2024-11-07 22:59:41 2083 2024-11-07 22:59:41
2084 2024-11-07 23:54:17 (current) 2084 2024-11-07 23:54:17 (current)
"""), """),
), )
) as mock_run:
assert ( assert (
n.rollback_temporary_profile(m.Profile("system", path), None, False) n.rollback_temporary_profile(m.Profile("system", path), None, False)
== path.parent / "system-2083-link" == path.parent / "system-2083-link"
) )
assert mock_run.call_args == call( mock_run.assert_called_with(
[ [
"nix-env", "nix-env",
"-p", "-p",
@ -653,7 +653,7 @@ def test_rollback_temporary_profile(tmp_path: Path) -> None:
n.rollback_temporary_profile(m.Profile("foo", path), target_host, True) n.rollback_temporary_profile(m.Profile("foo", path), target_host, True)
== path.parent / "foo-2083-link" == path.parent / "foo-2083-link"
) )
assert mock_run.call_args == call( mock_run.assert_called_with(
[ [
"nix-env", "nix-env",
"-p", "-p",
@ -665,11 +665,8 @@ def test_rollback_temporary_profile(tmp_path: Path) -> None:
sudo=True, sudo=True,
) )
with patch( with patch(get_qualified_name(n.run_wrapper, n), autospec=True) as mock_run:
get_qualified_name(n.run_wrapper, n), mock_run.return_value = CompletedProcess([], 0, stdout="")
autospec=True,
return_value=CompletedProcess([], 0, stdout=""),
) as mock_run:
assert n.rollback_temporary_profile(profile, None, False) is None assert n.rollback_temporary_profile(profile, None, False) is None
@ -684,7 +681,7 @@ def test_set_profile(mock_run: Mock) -> None:
sudo=False, sudo=False,
) )
assert mock_run.call_args == call( mock_run.assert_called_with(
["nix-env", "-p", profile_path, "--set", config_path], ["nix-env", "-p", profile_path, "--set", config_path],
remote=None, remote=None,
sudo=False, sudo=False,
@ -707,7 +704,7 @@ def test_switch_to_configuration(mock_run: Mock, monkeypatch: MonkeyPatch) -> No
specialisation=None, specialisation=None,
install_bootloader=False, install_bootloader=False,
) )
assert mock_run.call_args == call( mock_run.assert_called_with(
[profile_path / "bin/switch-to-configuration", "switch"], [profile_path / "bin/switch-to-configuration", "switch"],
extra_env={"NIXOS_INSTALL_BOOTLOADER": "0"}, extra_env={"NIXOS_INSTALL_BOOTLOADER": "0"},
sudo=False, sudo=False,
@ -741,7 +738,7 @@ def test_switch_to_configuration(mock_run: Mock, monkeypatch: MonkeyPatch) -> No
install_bootloader=True, install_bootloader=True,
specialisation="special", specialisation="special",
) )
assert mock_run.call_args == call( mock_run.assert_called_with(
[ [
config_path / "specialisation/special/bin/switch-to-configuration", config_path / "specialisation/special/bin/switch-to-configuration",
"test", "test",
@ -765,14 +762,14 @@ def test_switch_to_configuration(mock_run: Mock, monkeypatch: MonkeyPatch) -> No
def test_upgrade_channels(mock_is_dir: Mock, mock_glob: Mock) -> None: def test_upgrade_channels(mock_is_dir: Mock, mock_glob: Mock) -> None:
with patch(get_qualified_name(n.run_wrapper, n), autospec=True) as mock_run: with patch(get_qualified_name(n.run_wrapper, n), autospec=True) as mock_run:
n.upgrade_channels(False) n.upgrade_channels(False)
assert mock_run.call_args == call(["nix-channel", "--update", "nixos"], check=False) mock_run.assert_called_once_with(["nix-channel", "--update", "nixos"], check=False)
mock_run.reset_mock()
with patch(get_qualified_name(n.run_wrapper, n), autospec=True) as mock_run: with patch(get_qualified_name(n.run_wrapper, n), autospec=True) as mock_run:
n.upgrade_channels(True) n.upgrade_channels(True)
assert mock_run.call_args_list == [ mock_run.assert_has_calls(
[
call(["nix-channel", "--update", "nixos"], check=False), call(["nix-channel", "--update", "nixos"], check=False),
call(["nix-channel", "--update", "nixos-hardware"], check=False), call(["nix-channel", "--update", "nixos-hardware"], check=False),
call(["nix-channel", "--update", "home-manager"], check=False), call(["nix-channel", "--update", "home-manager"], check=False),
] ]
)

View file

@ -1,4 +1,5 @@
from unittest.mock import Mock, call, patch from typing import Any
from unittest.mock import patch
from pytest import MonkeyPatch from pytest import MonkeyPatch
@ -9,9 +10,9 @@ from .helpers import get_qualified_name
@patch(get_qualified_name(p.subprocess.run), autospec=True) @patch(get_qualified_name(p.subprocess.run), autospec=True)
def test_run(mock_run: Mock) -> None: def test_run(mock_run: Any) -> None:
p.run_wrapper(["test", "--with", "flags"], check=True) p.run_wrapper(["test", "--with", "flags"], check=True)
assert mock_run.call_args == call( mock_run.assert_called_with(
["test", "--with", "flags"], ["test", "--with", "flags"],
check=True, check=True,
text=True, text=True,
@ -27,7 +28,7 @@ def test_run(mock_run: Mock) -> None:
sudo=True, sudo=True,
extra_env={"FOO": "bar"}, extra_env={"FOO": "bar"},
) )
assert mock_run.call_args == call( mock_run.assert_called_with(
["sudo", "test", "--with", "flags"], ["sudo", "test", "--with", "flags"],
check=False, check=False,
text=True, text=True,
@ -44,7 +45,7 @@ def test_run(mock_run: Mock) -> None:
check=True, check=True,
remote=m.Remote("user@localhost", ["--ssh", "opt"], "password"), remote=m.Remote("user@localhost", ["--ssh", "opt"], "password"),
) )
assert mock_run.call_args == call( mock_run.assert_called_with(
[ [
"ssh", "ssh",
"--ssh", "--ssh",
@ -70,7 +71,7 @@ def test_run(mock_run: Mock) -> None:
extra_env={"FOO": "bar"}, extra_env={"FOO": "bar"},
remote=m.Remote("user@localhost", ["--ssh", "opt"], "password"), remote=m.Remote("user@localhost", ["--ssh", "opt"], "password"),
) )
assert mock_run.call_args == call( mock_run.assert_called_with(
[ [
"ssh", "ssh",
"--ssh", "--ssh",

View file

@ -13,7 +13,7 @@ let
escapeExpect = lib.strings.escapeNixString; escapeExpect = lib.strings.escapeNixString;
expectSetup = '' expectSetup = ''
set timeout 180 set timeout 300
proc expect_simple { pattern } { proc expect_simple { pattern } {
puts "Expecting: $pattern" puts "Expecting: $pattern"
expect { expect {
@ -76,7 +76,7 @@ runCommand "test-nixos-rebuild-repl"
expect ${writeText "test-nixos-rebuild-repl-expect" '' expect ${writeText "test-nixos-rebuild-repl-expect" ''
${expectSetup} ${expectSetup}
spawn nixos-rebuild repl --fast spawn nixos-rebuild repl --no-reexec
expect "nix-repl> " expect "nix-repl> "
@ -116,7 +116,7 @@ runCommand "test-nixos-rebuild-repl"
expect ${writeText "test-nixos-rebuild-repl-absolute-path-expect" '' expect ${writeText "test-nixos-rebuild-repl-absolute-path-expect" ''
${expectSetup} ${expectSetup}
spawn sh -c "nixos-rebuild repl --fast --flake path:\$HOME#testconf" spawn sh -c "nixos-rebuild repl --no-reexec --flake path:\$HOME#testconf"
expect_simple "nix-repl>" expect_simple "nix-repl>"
@ -146,7 +146,7 @@ runCommand "test-nixos-rebuild-repl"
pushd "$HOME" pushd "$HOME"
expect ${writeText "test-nixos-rebuild-repl-relative-path-expect" '' expect ${writeText "test-nixos-rebuild-repl-relative-path-expect" ''
${expectSetup} ${expectSetup}
spawn sh -c "nixos-rebuild repl --fast --flake .#testconf" spawn sh -c "nixos-rebuild repl --no-reexec --flake .#testconf"
expect_simple "nix-repl>" expect_simple "nix-repl>"

View file

@ -25,7 +25,6 @@ stdenv.mkDerivation rec {
meta = with lib; { meta = with lib; {
description = "New GNU Portable Threads Library"; description = "New GNU Portable Threads Library";
mainProgram = "npth-config";
longDescription = '' longDescription = ''
This is a library to provide the GNU Pth API and thus a non-preemptive This is a library to provide the GNU Pth API and thus a non-preemptive
threads implementation. threads implementation.

View file

@ -6,11 +6,11 @@
let let
pname = "nrfconnect"; pname = "nrfconnect";
version = "4.4.1"; version = "5.1.0";
src = fetchurl { src = fetchurl {
url = "https://nsscprodmedia.blob.core.windows.net/prod/software-and-other-downloads/desktop-software/nrf-connect-for-desktop/${lib.versions.major version}-${lib.versions.minor version}-${lib.versions.patch version}/nrfconnect-${version}-x86_64.appimage"; url = "https://nsscprodmedia.blob.core.windows.net/prod/software-and-other-downloads/desktop-software/nrf-connect-for-desktop/${lib.versions.major version}-${lib.versions.minor version}-${lib.versions.patch version}/nrfconnect-${version}-x86_64.appimage";
hash = "sha256-x/vVSOEajuQtLATRXk8DVLlXHegCqP+acecaOFNeBb8="; hash = "sha256-QEoKIdi8tlZ86langbCYJXSO+dGONBEQPdwmREIhZBA=";
name = "${pname}-${version}.AppImage"; name = "${pname}-${version}.AppImage";
}; };
@ -22,7 +22,9 @@ in
appimageTools.wrapType2 { appimageTools.wrapType2 {
inherit pname version src; inherit pname version src;
extraPkgs = pkgs: [ pkgs.segger-jlink ]; extraPkgs = pkgs: [
pkgs.segger-jlink-headless
];
extraInstallCommands = '' extraInstallCommands = ''
install -Dm444 ${appimageContents}/nrfconnect.desktop -t $out/share/applications install -Dm444 ${appimageContents}/nrfconnect.desktop -t $out/share/applications
@ -32,12 +34,12 @@ appimageTools.wrapType2 {
--replace 'Exec=AppRun' 'Exec=nrfconnect' --replace 'Exec=AppRun' 'Exec=nrfconnect'
''; '';
meta = with lib; { meta = {
description = "Nordic Semiconductor nRF Connect for Desktop"; description = "Nordic Semiconductor nRF Connect for Desktop";
homepage = "https://www.nordicsemi.com/Products/Development-tools/nRF-Connect-for-desktop"; homepage = "https://www.nordicsemi.com/Products/Development-tools/nRF-Connect-for-desktop";
license = licenses.unfree; license = lib.licenses.unfree;
platforms = platforms.linux; platforms = lib.platforms.linux;
maintainers = with maintainers; [ stargate01 ]; maintainers = with lib.maintainers; [ stargate01 ];
mainProgram = "nrfconnect"; mainProgram = "nrfconnect";
}; };
} }

View file

@ -7,16 +7,16 @@
buildGoModule rec { buildGoModule rec {
pname = "nuclei"; pname = "nuclei";
version = "3.3.8"; version = "3.3.9";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "projectdiscovery"; owner = "projectdiscovery";
repo = "nuclei"; repo = "nuclei";
tag = "v${version}"; tag = "v${version}";
hash = "sha256-RL6/H1X6+rt9n1rpeRpKv+u3SloOnRX6YzMKDDQw+78="; hash = "sha256-9P8KSuhTI/m0m51PUTZGU+qRbnT3izPbHTzsqZNbMJE=";
}; };
vendorHash = "sha256-k4seYTUO7BmU2HhTWweDRfNnXp+HshWM1riSc9BbYYg="; vendorHash = "sha256-CTdB/+aVaXKqtiwHn8pgmhXjZ0mIDrmLvnKmisExi74=";
proxyVendor = true; # hash mismatch between Linux and Darwin proxyVendor = true; # hash mismatch between Linux and Darwin

View file

@ -7,19 +7,19 @@
}: }:
let let
pname = "open-webui"; pname = "open-webui";
version = "0.5.11"; version = "0.5.12";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "open-webui"; owner = "open-webui";
repo = "open-webui"; repo = "open-webui";
tag = "v${version}"; tag = "v${version}";
hash = "sha256-U+zY/Jgzo52x/H4xcW2/LjM52r+hdJvZ/xsIeAeJniE="; hash = "sha256-+Hg4tyfmgfh3k/pUKMjs7IRahPV2/LRUDj1kt2g45Dw=";
}; };
frontend = buildNpmPackage { frontend = buildNpmPackage {
inherit pname version src; inherit pname version src;
npmDepsHash = "sha256-bAzcNLMB8OqzYRfw9Cr0xuFFl4FIKvBQT/4M2nZP0C8="; npmDepsHash = "sha256-pM8Ie3kkjVq9OJHKpGLQ1E/omd84B0N8lXAHKxUa8/4=";
# Disabling `pyodide:fetch` as it downloads packages during `buildPhase` # Disabling `pyodide:fetch` as it downloads packages during `buildPhase`
# Until this is solved, running python packages from the browser will not work. # Until this is solved, running python packages from the browser will not work.
@ -89,8 +89,6 @@ python312.pkgs.buildPythonApplication rec {
fake-useragent fake-useragent
fastapi fastapi
faster-whisper faster-whisper
flask
flask-cors
fpdf2 fpdf2
ftfy ftfy
gcp-storage-emulator gcp-storage-emulator

View file

@ -17,11 +17,11 @@
stdenv.mkDerivation rec { stdenv.mkDerivation rec {
pname = "photoqt"; pname = "photoqt";
version = "4.8"; version = "4.8.1";
src = fetchurl { src = fetchurl {
url = "https://photoqt.org/pkgs/photoqt-${version}.tar.gz"; url = "https://photoqt.org/pkgs/photoqt-${version}.tar.gz";
hash = "sha256-ccSbG5MTIyVJFqNHstaW53BfsGmN/I4ObCZfY0h22QE="; hash = "sha256-Iq5Fc0v+EYFe1YG3ZhZKl8leXD+TpGGhaQjr800vz7Y=";
}; };
nativeBuildInputs = [ nativeBuildInputs = [

View file

@ -75,13 +75,13 @@ let
in in
buildGoModule rec { buildGoModule rec {
pname = "podman"; pname = "podman";
version = "5.3.1"; version = "5.4.0";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "containers"; owner = "containers";
repo = "podman"; repo = "podman";
rev = "v${version}"; rev = "v${version}";
hash = "sha256-kABP10QX4r11UDUcd6Sukb+9+LRm/ba3iATz6DTOJYw="; hash = "sha256-iEO4njjNByLkhXFLgZ8tO8M8RkwT+Lb0zyfedQDHcNc=";
}; };
patches = [ patches = [
@ -91,15 +91,6 @@ buildGoModule rec {
# we intentionally don't build and install the helper so we shouldn't display messages to users about it # we intentionally don't build and install the helper so we shouldn't display messages to users about it
./rm-podman-mac-helper-msg.patch ./rm-podman-mac-helper-msg.patch
# backport of fix for https://github.com/containers/storage/issues/2184
# https://github.com/containers/storage/pull/2185
(fetchpatch2 {
url = "https://github.com/containers/storage/commit/99b0d2d423c8093807d8a1464437152cd04d7d95.diff?full_index=1";
hash = "sha256-aahYXnDf3qCOlb6MfVDqFKCcQG257r5sbh5qnL0T40I=";
stripLen = 1;
extraPrefix = "vendor/github.com/containers/storage/";
})
]; ];
vendorHash = null; vendorHash = null;

File diff suppressed because it is too large Load diff

View file

@ -2,61 +2,43 @@
lib, lib,
rustPlatform, rustPlatform,
fetchCrate, fetchCrate,
jq,
moreutils,
stdenv, stdenv,
darwin, darwin,
versionCheckHook,
nix-update-script,
}: }:
rustPlatform.buildRustPackage rec { rustPlatform.buildRustPackage rec {
pname = "samply"; pname = "samply";
version = "0.12.0"; version = "0.13.1";
src = fetchCrate { src = fetchCrate {
inherit pname version; inherit pname version;
hash = "sha256-7bf1lDIZGhRpvnn8rHNwzH2GBY8CwtYCjuRAUTQgbsA="; hash = "sha256-zTwAsE6zXY3esO7x6UTCO2DbzdUSKZ6qc5Rr9qcI+Z8=";
}; };
# Can't use fetchCargoVendor: useFetchCargoVendor = true;
# https://github.com/NixOS/nixpkgs/issues/377986 cargoHash = "sha256-mQykzO9Ldokd3PZ1fY4pK/GtLmYMVas2iHj1Pqi9WqQ=";
cargoLock.lockFile = ./Cargo.lock;
# the dependencies linux-perf-data and linux-perf-event-reader contains both README.md and Readme.md,
# which causes a hash mismatch on systems with a case-insensitive filesystem
# this removes the readme files and updates cargo's checksum file accordingly
depsExtraArgs = {
nativeBuildInputs = [
jq
moreutils
];
postBuild = ''
for crate in linux-perf-data linux-perf-event-reader; do
pushd $name/$crate
rm -f README.md Readme.md
jq 'del(.files."README.md") | del(.files."Readme.md")' \
.cargo-checksum.json -c \
| sponge .cargo-checksum.json
popd
done
'';
};
buildInputs = lib.optionals stdenv.hostPlatform.isDarwin [ buildInputs = lib.optionals stdenv.hostPlatform.isDarwin [
darwin.apple_sdk.frameworks.CoreServices darwin.apple_sdk.frameworks.CoreServices
]; ];
meta = with lib; { nativeInstallCheckInputs = [ versionCheckHook ];
versionCheckProgramArg = "--version";
doInstallCheck = true;
passthru.updateScript = nix-update-script { };
meta = {
description = "Command line profiler for macOS and Linux"; description = "Command line profiler for macOS and Linux";
mainProgram = "samply";
homepage = "https://github.com/mstange/samply"; homepage = "https://github.com/mstange/samply";
changelog = "https://github.com/mstange/samply/releases/tag/samply-v${version}"; changelog = "https://github.com/mstange/samply/releases/tag/samply-v${version}";
license = with licenses; [ license = with lib.licenses; [
asl20 asl20
mit mit
]; ];
maintainers = with maintainers; [ figsoda ]; maintainers = with lib.maintainers; [ figsoda ];
mainProgram = "samply";
}; };
} }

View file

@ -16,7 +16,7 @@
}: }:
let let
version = "1.80.0"; version = "1.80.2";
in in
buildGo123Module { buildGo123Module {
pname = "tailscale"; pname = "tailscale";
@ -31,7 +31,7 @@ buildGo123Module {
owner = "tailscale"; owner = "tailscale";
repo = "tailscale"; repo = "tailscale";
rev = "v${version}"; rev = "v${version}";
hash = "sha256-wb52Ffoh56EEVToGGK1Rzfb5DHiR2dLxDJRLcUgYhFg="; hash = "sha256-5HGY9hVSnzqmAdXNJdQ+ZvsK/PmyZ94201UHlHclQE8=";
}; };
patches = [ patches = [
@ -43,7 +43,7 @@ buildGo123Module {
}) })
]; ];
vendorHash = "sha256-a+d02h0AXqr2FuWRAOUACiYVSpm276onkwKxGSJTL5s="; vendorHash = "sha256-81UOjoC5GJqhNs4vWcQ2/B9FMaDWtl0rbuFXmxbu5dI=";
nativeBuildInputs = lib.optionals stdenv.hostPlatform.isLinux [ makeWrapper ] ++ [ nativeBuildInputs = lib.optionals stdenv.hostPlatform.isLinux [ makeWrapper ] ++ [
installShellFiles installShellFiles

View file

@ -0,0 +1,11 @@
--- a/src/Common/CpuCore.c
+++ b/src/Common/CpuCore.c
@@ -96,7 +96,7 @@
derive_key_whirlpool ( word, wordlength+1, salt, PKCS5_SALT_SIZE, 1000, headerKey, cpu_GetMaxPkcs5OutSize ());
else{
perror("Key derivation function not supported");
- return;
+ return 0;
}
value=cpu_Xts(encryptionAlgorithm,encryptedHeader,headerKey,cpu_GetMaxPkcs5OutSize(), masterKey, &length);

View file

@ -6,6 +6,7 @@
config, config,
cudaSupport ? config.cudaSupport, cudaSupport ? config.cudaSupport,
pkg-config, pkg-config,
versionCheckHook,
}: }:
gccStdenv.mkDerivation rec { gccStdenv.mkDerivation rec {
@ -15,10 +16,14 @@ gccStdenv.mkDerivation rec {
src = fetchFromGitLab { src = fetchFromGitLab {
owner = "kalilinux"; owner = "kalilinux";
repo = "packages/truecrack"; repo = "packages/truecrack";
rev = "debian/${version}+git20150326-0kali1"; tag = "kali/${version}+git20150326-0kali4";
sha256 = "+Rw9SfaQtO1AJO6UVVDMCo8DT0dYEbv7zX8SI+pHCRQ="; hash = "sha256-d6ld6KHSqYM4RymHf5qcm2AWK6FHWC0rFaLRfIQ2m5Q=";
}; };
patches = [
./fix-empty-return.patch
];
configureFlags = ( configureFlags = (
if cudaSupport then if cudaSupport then
[ [
@ -38,24 +43,54 @@ gccStdenv.mkDerivation rec {
cudatoolkit cudatoolkit
]; ];
env.NIX_CFLAGS_COMPILE = toString ([
# Workaround build failure on -fno-common toolchains like upstream # Workaround build failure on -fno-common toolchains like upstream
# gcc-10. Otherwise build fails as: # gcc-10. Otherwise build fails as:
# ld: CpuAes.o:/build/source/src/Crypto/CpuAes.h:1233: multiple definition of # ld: CpuAes.o:/build/source/src/Crypto/CpuAes.h:1233: multiple definition of
# `t_rc'; CpuCore.o:/build/source/src/Crypto/CpuAes.h:1237: first defined here # `t_rc'; CpuCore.o:/build/source/src/Crypto/CpuAes.h:1237: first defined here
# TODO: remove on upstream fixes it: # TODO: remove on upstream fixes it:
# https://gitlab.com/kalilinux/packages/truecrack/-/issues/1 # https://gitlab.com/kalilinux/packages/truecrack/-/issues/1
env.NIX_CFLAGS_COMPILE = "-fcommon"; "-fcommon"
# Function are declared after they are used in the file, this is error since gcc-14.
# Common/Crypto.c:42:13: error: implicit declaration of function 'cpu_CipherInit'; did you mean 'CipherInit'? []
# https://gitlab.com/kalilinux/packages/truecrack/-/commit/5b0e3a96b747013bded7b33f65bb42be2dbafc86
"-Wno-error=implicit-function-declaration"
]);
installFlags = [ "prefix=$(out)" ];
enableParallelBuilding = true; enableParallelBuilding = true;
meta = with lib; { installFlags = [ "prefix=$(out)" ];
doInstallCheck = true;
installCheckPhase = ''
runHook preInstallCheck
echo "Cracking test volumes"
$out/bin/${meta.mainProgram} -t test/ripemd160_aes.test.tc -w test/passwords.txt | grep -aF "Found password"
$out/bin/${meta.mainProgram} -t test/ripemd160_aes.test.tc -c test/tes -m 4 | grep -aF "Found password"
$out/bin/${meta.mainProgram} -t test/ripemd160_aes.test.tc -w test/passwords.txt | grep -aF "Found password"
$out/bin/${meta.mainProgram} -t test/whirlpool_aes.test.tc -w test/passwords.txt -k whirlpool | grep -aF "Found password"
$out/bin/${meta.mainProgram} -t test/sha512_aes.test.tc -w test/passwords.txt -k sha512 | grep -aF "Found password"
$out/bin/${meta.mainProgram} -t test/ripemd160_aes.test.tc -w test/passwords.txt | grep -aF "Found password"
$out/bin/${meta.mainProgram} -t test/ripemd160_serpent.test.tc -w test/passwords.txt -e serpent | grep -aF "Found password"
$out/bin/${meta.mainProgram} -t test/ripemd160_twofish.test.tc -w test/passwords.txt -e twofish | grep -aF "Found password"
echo "Finished cracking test volumes"
runHook postInstallCheck
'';
nativeInstallCheckInputs = [
versionCheckHook
];
meta = {
description = "Brute-force password cracker for TrueCrypt volumes, optimized for Nvidia Cuda technology"; description = "Brute-force password cracker for TrueCrypt volumes, optimized for Nvidia Cuda technology";
mainProgram = "truecrack"; mainProgram = "truecrack";
homepage = "https://gitlab.com/kalilinux/packages/truecrack"; homepage = "https://gitlab.com/kalilinux/packages/truecrack";
broken = cudaSupport; broken = cudaSupport;
license = licenses.gpl3Plus; license = lib.licenses.gpl3Plus;
platforms = platforms.unix; platforms = lib.platforms.unix;
maintainers = with maintainers; [ ethancedwards8 ]; maintainers = with lib.maintainers; [ ethancedwards8 ];
}; };
} }

File diff suppressed because it is too large Load diff

View file

@ -1,48 +1,57 @@
{ {
lib, lib,
rustPlatform,
libiconv,
stdenv, stdenv,
installShellFiles, rustPlatform,
darwin,
fetchFromGitHub, fetchFromGitHub,
installShellFiles,
buildPackages,
writableTmpDirAsHomeHook,
versionCheckHook,
nix-update-script,
}: }:
rustPlatform.buildRustPackage rec { rustPlatform.buildRustPackage rec {
pname = "volta"; pname = "volta";
version = "1.1.1"; version = "2.0.2";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "volta-cli"; owner = "volta-cli";
repo = "volta"; repo = "volta";
rev = "v${version}"; tag = "v${version}";
hash = "sha256-+j3WRpunV+3YfZnyuKA/CsiKr+gOaP2NbmnyoGMN+Mg="; hash = "sha256-ZI+3/Xbkg/JaZMLhrJEjaSwjs44fOaiRReM2DUTnkkc=";
}; };
cargoLock = { useFetchCargoVendor = true;
lockFile = ./Cargo.lock; cargoHash = "sha256-xlqsubkaX2A6d5MIcGf9E0b11Gzneksgku0jvW+UdbE=";
outputHashes = {
"detect-indent-0.1.0" = "sha256-qtPkPaBiyuT8GhpEFdU7IkAgKnCbTES0FB2CvNKWqic=";
"semver-0.9.0" = "sha256-nw1somkZe9Qi36vjfWlTcDqHAIbaJj72KBTfmucVxXs=";
"semver-parser-0.10.0" = "sha256-iTGnKSddsriF6JS6lvJNjp9aDzGtfjrHEiCijeie3uE=";
};
};
buildInputs = buildInputs = [ installShellFiles ];
[ installShellFiles ]
++ lib.optionals stdenv.hostPlatform.isDarwin [ postInstall =
darwin.apple_sdk.frameworks.Security let
libiconv emulator = stdenv.hostPlatform.emulator buildPackages;
in
''
installShellCompletion --cmd volta \
--bash <(${emulator} $out/bin/volta completions bash) \
--fish <(${emulator} $out/bin/volta completions fish) \
--zsh <(${emulator} $out/bin/volta completions zsh)
'';
nativeCheckInputs = [
writableTmpDirAsHomeHook
]; ];
HOME = "$TMPDIR"; nativeInstallCheckInputs = [
versionCheckHook
];
versionCheckProgramArg = [ "--version" ];
# Tries to create /var/empty/.volta as $HOME is not writable
doInstallCheck = !stdenv.hostPlatform.isDarwin;
postInstall = lib.optionalString (stdenv.buildPlatform.canExecute stdenv.hostPlatform) '' passthru = {
installShellCompletion --cmd volta \ updateScript = nix-update-script { };
--bash <($out/bin/volta completions bash) \ };
--fish <($out/bin/volta completions fish) \
--zsh <($out/bin/volta completions zsh) meta = {
'';
meta = with lib; {
description = "Hassle-Free JavaScript Tool Manager"; description = "Hassle-Free JavaScript Tool Manager";
longDescription = '' longDescription = ''
With Volta, you can select a Node engine once and then stop worrying With Volta, you can select a Node engine once and then stop worrying
@ -56,7 +65,8 @@ rustPlatform.buildRustPackage rec {
''; '';
homepage = "https://volta.sh/"; homepage = "https://volta.sh/";
changelog = "https://github.com/volta-cli/volta/blob/main/RELEASES.md"; changelog = "https://github.com/volta-cli/volta/blob/main/RELEASES.md";
license = with licenses; [ bsd2 ]; license = with lib.licenses; [ bsd2 ];
maintainers = with maintainers; [ fbrs ]; maintainers = with lib.maintainers; [ fbrs ];
mainProgram = "volta";
}; };
} }

View file

@ -1,7 +1,7 @@
{ {
lib, lib,
stdenv, stdenv,
fetchFromSourcehut, fetchFromGitLab,
meson, meson,
ninja, ninja,
pkg-config, pkg-config,
@ -11,13 +11,14 @@
stdenv.mkDerivation rec { stdenv.mkDerivation rec {
pname = "wlr-randr"; pname = "wlr-randr";
version = "0.4.1"; version = "0.5.0";
src = fetchFromSourcehut { src = fetchFromGitLab {
owner = "~emersion"; domain = "gitlab.freedesktop.org";
repo = pname; owner = "emersion";
repo = "wlr-randr";
rev = "v${version}"; rev = "v${version}";
hash = "sha256-2kWTVAi4hq2d9jQ6yBLVzm3x7n/oSvBdZ45WyjhXhc4="; hash = "sha256-lHOGpY0IVnR8QdSqJbtIA4FkhmQ/zDiFNqqXyj8iw/s=";
}; };
strictDeps = true; strictDeps = true;
@ -34,7 +35,7 @@ stdenv.mkDerivation rec {
meta = with lib; { meta = with lib; {
description = "Xrandr clone for wlroots compositors"; description = "Xrandr clone for wlroots compositors";
homepage = "https://git.sr.ht/~emersion/wlr-randr"; homepage = "https://gitlab.freedesktop.org/emersion/wlr-randr";
license = licenses.mit; license = licenses.mit;
maintainers = with maintainers; [ ma27 ]; maintainers = with maintainers; [ ma27 ];
platforms = platforms.linux; platforms = platforms.linux;

View file

@ -68,11 +68,20 @@ stdenv.mkDerivation (finalAttrs: {
nativeBuildInputs = [ autoreconfHook ]; nativeBuildInputs = [ autoreconfHook ];
env.NIX_CFLAGS_COMPILE = toString (
[
# For some reason libxml2 package headers are in subdirectory and thus arent # For some reason libxml2 package headers are in subdirectory and thus arent
# picked up by stdenvs C compiler wrapper (see ccWrapper_addCVars). This # picked up by stdenvs C compiler wrapper (see ccWrapper_addCVars). This
# doesnt really belong here and either should be part of libxml2 package or # doesnt really belong here and either should be part of libxml2 package or
# libxml2 in Nixpkgs can just fix their header paths. # libxml2 in Nixpkgs can just fix their header paths.
env.NIX_CFLAGS_COMPILE = "-isystem ${libxml2.dev}/include/libxml2"; "-isystem ${libxml2.dev}/include/libxml2"
]
++ lib.optionals stdenv.cc.isGNU [
# fix build on GCC 14
"-Wno-error=implicit-function-declaration"
"-Wno-error=incompatible-pointer-types"
]
);
buildInputs = buildInputs =
[ [

View file

@ -1,53 +0,0 @@
From 03d6f704d07aa3650a2f59be6f7802a8735460c3 Mon Sep 17 00:00:00 2001
From: Lang Hames <lhames@gmail.com>
Date: Wed, 29 Jan 2025 03:58:29 +0000
Subject: [PATCH] [ORC][LLI] Remove redundant eh-frame registration plugin
construction from lli.
As of d0052ebbe2e the setUpGenericLLVMIRPlatform function will automatically
add an instance of the EHFrameRegistrationPlugin (for LLJIT instances whose
object linking layers are ObjectLinkingLayers, not RTDyldObjectLinkingLayers).
This commit removes the redundant plugin creation in the object linking
layer constructor function in lli.cpp to prevent duplicate registration of
eh-frames, which is likely the cause of recent bot failures, e.g.
https://lab.llvm.org/buildbot/#/builders/108/builds/8685.
(cherry picked from commit 9052b37ab1aa67a039b34356f37236fecc42bac2)
---
llvm/tools/lli/lli.cpp | 14 ++++----------
1 file changed, 4 insertions(+), 10 deletions(-)
diff --git a/llvm/tools/lli/lli.cpp b/tools/lli/lli.cpp
index 448660a539a0b0..19246f03941673 100644
--- a/llvm/tools/lli/lli.cpp
+++ b/tools/lli/lli.cpp
@@ -27,9 +27,7 @@
#include "llvm/ExecutionEngine/Orc/AbsoluteSymbols.h"
#include "llvm/ExecutionEngine/Orc/DebugUtils.h"
#include "llvm/ExecutionEngine/Orc/Debugging/DebuggerSupport.h"
-#include "llvm/ExecutionEngine/Orc/EHFrameRegistrationPlugin.h"
#include "llvm/ExecutionEngine/Orc/EPCDynamicLibrarySearchGenerator.h"
-#include "llvm/ExecutionEngine/Orc/EPCEHFrameRegistrar.h"
#include "llvm/ExecutionEngine/Orc/EPCGenericRTDyldMemoryManager.h"
#include "llvm/ExecutionEngine/Orc/ExecutionUtils.h"
#include "llvm/ExecutionEngine/Orc/IRPartitionLayer.h"
@@ -1033,14 +1031,10 @@ int runOrcJIT(const char *ProgName) {
Builder.getJITTargetMachineBuilder()
->setRelocationModel(Reloc::PIC_)
.setCodeModel(CodeModel::Small);
- Builder.setObjectLinkingLayerCreator([&P](orc::ExecutionSession &ES,
- const Triple &TT) {
- auto L = std::make_unique<orc::ObjectLinkingLayer>(ES);
- if (P != LLJITPlatform::ExecutorNative)
- L->addPlugin(std::make_unique<orc::EHFrameRegistrationPlugin>(
- ES, ExitOnErr(orc::EPCEHFrameRegistrar::Create(ES))));
- return L;
- });
+ Builder.setObjectLinkingLayerCreator(
+ [&](orc::ExecutionSession &ES, const Triple &TT) {
+ return std::make_unique<orc::ObjectLinkingLayer>(ES);
+ });
}
auto J = ExitOnErr(Builder.create());

View file

@ -500,10 +500,16 @@ let
}) })
] ]
++ ++
lib.optional (lib.versions.major metadata.release_version == "20") lib.optional (lib.versionAtLeast metadata.release_version "20")
# Fix OrcJIT # Fix OrcJIT tests with page sizes > 16k
# PR: https://github.com/llvm/llvm-project/pull/125431 # PR: https://github.com/llvm/llvm-project/pull/127115
(metadata.getVersionFile "llvm/orcjit.patch"); (
fetchpatch {
url = "https://github.com/llvm/llvm-project/commit/415607e10b56d0e6c4661ff1ec5b9b46bf433cba.patch";
stripLen = 1;
hash = "sha256-vBbuduJB+NnNE9qtR93k64XKrwvc7w3vowjL/aT+iEA=";
}
);
pollyPatches = pollyPatches =
[ (metadata.getVersionFile "llvm/gnu-install-dirs-polly.patch") ] [ (metadata.getVersionFile "llvm/gnu-install-dirs-polly.patch") ]
++ lib.optional (lib.versionAtLeast metadata.release_version "15") ++ lib.optional (lib.versionAtLeast metadata.release_version "15")

View file

@ -14,7 +14,6 @@
ninja, ninja,
isFullBuild ? true, isFullBuild ? true,
linuxHeaders, linuxHeaders,
fetchpatch,
}: }:
let let
pname = "libc"; pname = "libc";
@ -28,26 +27,12 @@ let
''); '');
in in
stdenv.mkDerivation (finalAttrs: { stdenv.mkDerivation (finalAttrs: {
inherit pname version; inherit pname version patches;
src = src'; src = src';
sourceRoot = "${finalAttrs.src.name}/runtimes"; sourceRoot = "${finalAttrs.src.name}/runtimes";
patches =
lib.optional (lib.versions.major version == "20")
# Removes invalid token from the LLVM version being placed in the namespace.
# Can be removed when LLVM 20 bumps to rc2.
# PR: https://github.com/llvm/llvm-project/pull/126284
(
fetchpatch {
url = "https://github.com/llvm/llvm-project/commit/3a3a3230d171e11842a9940b6da0f72022b1c5b3.patch";
stripLen = 1;
hash = "sha256-QiU1cWp+027ZZNVdvfGVwbIoRd9jqtSbftGsmaW1gig=";
}
)
++ patches;
nativeBuildInputs = nativeBuildInputs =
[ [
cmake cmake
@ -75,7 +60,7 @@ stdenv.mkDerivation (finalAttrs: {
''; '';
postInstall = lib.optionalString (!isFullBuild) '' postInstall = lib.optionalString (!isFullBuild) ''
substituteAll ${./libc-shim.so} $out/lib/libc.so substituteAll ${./libc-shim.tpl} $out/lib/libc.so
''; '';
libc = if (!isFullBuild) then stdenv.cc.libc else null; libc = if (!isFullBuild) then stdenv.cc.libc else null;

View file

@ -30,7 +30,7 @@ let
"17.0.6".officialRelease.sha256 = "sha256-8MEDLLhocshmxoEBRSKlJ/GzJ8nfuzQ8qn0X/vLA+ag="; "17.0.6".officialRelease.sha256 = "sha256-8MEDLLhocshmxoEBRSKlJ/GzJ8nfuzQ8qn0X/vLA+ag=";
"18.1.8".officialRelease.sha256 = "sha256-iiZKMRo/WxJaBXct9GdAcAT3cz9d9pnAcO1mmR6oPNE="; "18.1.8".officialRelease.sha256 = "sha256-iiZKMRo/WxJaBXct9GdAcAT3cz9d9pnAcO1mmR6oPNE=";
"19.1.7".officialRelease.sha256 = "sha256-cZAB5vZjeTsXt9QHbP5xluWNQnAHByHtHnAhVDV0E6I="; "19.1.7".officialRelease.sha256 = "sha256-cZAB5vZjeTsXt9QHbP5xluWNQnAHByHtHnAhVDV0E6I=";
"20.1.0-rc1".officialRelease.sha256 = "sha256-yOczbperlR20+iLoao9g0CR+Ml2mjTCx1cqP/9WOhME="; "20.1.0-rc2".officialRelease.sha256 = "sha256-lBx+MWfYBM6XSJozacALMGlo0DUUWqnsBQyO8lDljSo=";
"21.0.0-git".gitRelease = { "21.0.0-git".gitRelease = {
rev = "c9f1d2cbf18990311ea1287cc154e3784a10a3b0"; rev = "c9f1d2cbf18990311ea1287cc154e3784a10a3b0";
rev-version = "21.0.0-unstable-2025-02-10"; rev-version = "21.0.0-unstable-2025-02-10";

View file

@ -0,0 +1,31 @@
{
lib,
fetchurl,
ocaml,
buildDunePackage,
cppo,
ounit2,
}:
buildDunePackage rec {
pname = "arg-complete";
version = "0.2.1";
src = fetchurl {
url = "https://github.com/sim642/ocaml-arg-complete/releases/download/${version}/arg-complete-${version}.tbz";
hash = "sha256-SZvLaeeqY3j2LUvqxGs0Vw57JnnpdvAk1jnE3pk27QU=";
};
nativeBuildInputs = [ cppo ];
doCheck = lib.versionAtLeast ocaml.version "4.08";
checkInputs = [ ounit2 ];
meta = {
description = "Bash completion support for OCaml Stdlib.Arg";
homepage = "https://sim642.github.io/ocaml-arg-complete/";
changelog = "https://raw.githubusercontent.com/sim642/ocaml-arg-complete/refs/tags/${version}/CHANGELOG.md";
license = lib.licenses.mit;
maintainers = [ lib.maintainers.vbgl ];
};
}

View file

@ -11,6 +11,7 @@
ocaml, ocaml,
menhir, menhir,
apron, apron,
arg-complete,
camlidl, camlidl,
yojson, yojson,
zarith, zarith,
@ -18,15 +19,15 @@
buildDunePackage rec { buildDunePackage rec {
pname = "mopsa"; pname = "mopsa";
version = "1.0"; version = "1.1";
minimalOCamlVersion = "4.12"; minimalOCamlVersion = "4.13";
src = fetchFromGitLab { src = fetchFromGitLab {
owner = "mopsa"; owner = "mopsa";
repo = "mopsa-analyzer"; repo = "mopsa-analyzer";
rev = "v${version}"; tag = "v${version}";
hash = "sha256-nGnWwV7g3SYgShbXGUMooyOdFwXFrQHnQvlc8x9TAS4="; hash = "sha256-lO5dtGAl1dq8oJco/hPXrAbN05rKc62Zrci/8CLrQ0c=";
}; };
nativeBuildInputs = [ nativeBuildInputs = [
@ -36,6 +37,7 @@ buildDunePackage rec {
]; ];
buildInputs = [ buildInputs = [
arg-complete
camlidl camlidl
flint flint
libclang libclang

View file

@ -6,7 +6,7 @@
buildDunePackage rec { buildDunePackage rec {
pname = "qcheck-core"; pname = "qcheck-core";
version = "0.22"; version = "0.23";
minimalOCamlVersion = "4.08"; minimalOCamlVersion = "4.08";
@ -14,7 +14,7 @@ buildDunePackage rec {
owner = "c-cube"; owner = "c-cube";
repo = "qcheck"; repo = "qcheck";
rev = "v${version}"; rev = "v${version}";
hash = "sha256-JXnrfce/V7Bdu8uH98ZJCLjIHZoONiQ02ltFx6Fbvhg="; hash = "sha256-tH7NFpAFKOb0jXxLK+zNOIZS9TSORKXe8FuwY13iEUY=";
}; };
meta = { meta = {

View file

@ -9,10 +9,10 @@
buildDunePackage rec { buildDunePackage rec {
pname = "seqes"; pname = "seqes";
version = "0.2"; version = "0.4";
src = fetchurl { src = fetchurl {
url = "https://gitlab.com/nomadic-labs/seqes/-/archive/${version}/seqes-${version}.tar.gz"; url = "https://gitlab.com/raphael-proust/seqes/-/archive/${version}/seqes-${version}.tar.gz";
sha256 = "sha256-IxLA0jaIPdX9Zn/GL8UHDJYjA1UBW6leGbZmp64YMjI="; hash = "sha256-E4BalN68CJP7u6NSC0XBooWvUeSNqV+3KEOtoJ4g/dM=";
}; };
minimalOCamlVersion = "4.14"; minimalOCamlVersion = "4.14";

View file

@ -13,9 +13,6 @@
setuptools-scm, setuptools-scm,
playwright-driver, playwright-driver,
nixosTests, nixosTests,
writeText,
runCommand,
pythonPackages,
nodejs, nodejs,
}: }:
@ -84,16 +81,6 @@ buildPythonPackage rec {
pyee pyee
]; ];
setupHook = writeText "setupHook.sh" ''
addBrowsersPath () {
if [[ ! -v PLAYWRIGHT_BROWSERS_PATH ]] ; then
export PLAYWRIGHT_BROWSERS_PATH="${playwright-driver.browsers}"
fi
}
addEnvHooks "$targetOffset" addBrowsersPath
'';
postInstall = '' postInstall = ''
ln -s ${driver} $out/${python.sitePackages}/playwright/driver ln -s ${driver} $out/${python.sitePackages}/playwright/driver
''; '';
@ -109,9 +96,6 @@ buildPythonPackage rec {
{ {
driver = playwright-driver; driver = playwright-driver;
browsers = playwright-driver.browsers; browsers = playwright-driver.browsers;
env = runCommand "playwright-env-test" {
buildInputs = [ pythonPackages.playwright ];
} "python ${./test.py}";
} }
// lib.optionalAttrs stdenv.hostPlatform.isLinux { // lib.optionalAttrs stdenv.hostPlatform.isLinux {
inherit (nixosTests) playwright-python; inherit (nixosTests) playwright-python;

View file

@ -1,10 +0,0 @@
import os
import sys
from playwright.sync_api import sync_playwright
with sync_playwright() as p:
browser = p.chromium.launch()
context = browser.new_context()
with open(os.environ["out"], "w") as f:
f.write("OK")

View file

@ -10,14 +10,14 @@
buildPythonPackage rec { buildPythonPackage rec {
pname = "pywikibot"; pname = "pywikibot";
version = "9.6.1"; version = "9.6.2";
format = "setuptools"; format = "setuptools";
disabled = pythonOlder "3.7"; disabled = pythonOlder "3.7";
src = fetchPypi { src = fetchPypi {
inherit pname version; inherit pname version;
hash = "sha256-736LEUwW1LofS1105TxVWHMGFaEpQGwa+WGIk2OQxmA="; hash = "sha256-iPmQxOJmc9Ms8UhK43HrYgyyvu0g4/hO8bmO39AXOTo=";
}; };
propagatedBuildInputs = [ propagatedBuildInputs = [

View file

@ -16,8 +16,8 @@ let
hash = "sha256-hHIWjD4f0L/yh+aUsFP8y78gV5o/+VJrYzO+q432Wo0="; hash = "sha256-hHIWjD4f0L/yh+aUsFP8y78gV5o/+VJrYzO+q432Wo0=";
}; };
"10" = { "10" = {
version = "10.2.1"; version = "10.4.0";
hash = "sha256-+Yjw2TuH4dotjN9qx/RaAcb4Q642BrTKDy/9cTuF+XU="; hash = "sha256-5X6KVE96hCR8+nfdbZI+rlGZo3NHTlPqsfVAx5Yok4Y=";
}; };
}; };

View file

@ -12,7 +12,6 @@
makeFontsConf, makeFontsConf,
makeWrapper, makeWrapper,
runCommand, runCommand,
writeText,
cacert, cacert,
}: }:
let let
@ -189,27 +188,9 @@ let
runHook postInstall runHook postInstall
''; '';
setupHook = writeText "setupHook.sh" ''
addBrowsersPath () {
if [[ ! -v PLAYWRIGHT_BROWSERS_PATH ]] ; then
export PLAYWRIGHT_BROWSERS_PATH="${playwright-core.passthru.browsers}"
fi
}
addEnvHooks "$targetOffset" addBrowsersPath
'';
meta = playwright.meta // { meta = playwright.meta // {
mainProgram = "playwright"; mainProgram = "playwright";
}; };
passthru.tests.env = runCommand "playwright-core-env-test" {
buildInputs = [
nodejs
playwright-core
playwright-test
];
} "node ${./test.js}";
}); });
browsers = lib.makeOverridable ( browsers = lib.makeOverridable (

View file

@ -1,8 +0,0 @@
const playwright = require('playwright');
const fs = require('fs');
playwright.chromium.launch()
.then((browser) => {
console.log('OK');
fs.writeFileSync(process.env.out, '');
process.exit(0);
});

View file

@ -17,13 +17,13 @@
let let
data = stdenv.mkDerivation (finalAttrs: { data = stdenv.mkDerivation (finalAttrs: {
pname = "path-of-building-data"; pname = "path-of-building-data";
version = "2.50.0"; version = "2.51.0";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "PathOfBuildingCommunity"; owner = "PathOfBuildingCommunity";
repo = "PathOfBuilding"; repo = "PathOfBuilding";
rev = "v${finalAttrs.version}"; rev = "v${finalAttrs.version}";
hash = "sha256-mclbLRYFNWgn/f4CyaINJlLq06uWh0+ks82Lger4w9w="; hash = "sha256-Rau3UaWPyaI7QBXCNVtIQSenyNsx5hh2dsd3q8jFjc4=";
}; };
nativeBuildInputs = [ unzip ]; nativeBuildInputs = [ unzip ];

View file

@ -29,10 +29,10 @@
}: }:
let let
defaultVersion = "2024.10"; defaultVersion = "2025.01";
defaultSrc = fetchurl { defaultSrc = fetchurl {
url = "https://ftp.denx.de/pub/u-boot/u-boot-${defaultVersion}.tar.bz2"; url = "https://ftp.denx.de/pub/u-boot/u-boot-${defaultVersion}.tar.bz2";
hash = "sha256-so2vSsF+QxVjYweL9RApdYQTf231D87ZsS3zT2GpL7A="; hash = "sha256-ze99UHyT8bvZ8BXqm8IfoHQmhIFAVQGUWrxvhU1baG8=";
}; };
# Dependencies for the tools need to be included as either native or cross, # Dependencies for the tools need to be included as either native or cross,

View file

@ -186,6 +186,6 @@ python3.pkgs.buildPythonApplication rec {
changelog = "https://github.com/element-hq/synapse/releases/tag/v${version}"; changelog = "https://github.com/element-hq/synapse/releases/tag/v${version}";
description = "Matrix reference homeserver"; description = "Matrix reference homeserver";
license = licenses.agpl3Plus; license = licenses.agpl3Plus;
maintainers = teams.matrix.members; maintainers = with lib.maintainers; teams.matrix.members ++ [ sumnerevans ];
}; };
} }

View file

@ -59,14 +59,14 @@ let
in in
{ {
nextcloud29 = generic { nextcloud29 = generic {
version = "29.0.11"; version = "29.0.12";
hash = "sha256-UGf8F91zICzC39m5ccp7uUy5UEghRgJ9rGILEjweztE="; hash = "sha256-wCA1T/Ph0ghzcPcOBY/hcXE2NroPBzpRlK29/zwcr8Y=";
packages = nextcloud29Packages; packages = nextcloud29Packages;
}; };
nextcloud30 = generic { nextcloud30 = generic {
version = "30.0.5"; version = "30.0.6";
hash = "sha256-JIxubmEs7usXDE0luFebCvDmYTq9+gfy/mmTQmt4G+o="; hash = "sha256-rA4JG+aSCWXcDILxSbYy1rWt563uhKezyM/YR0UKjdw=";
packages = nextcloud30Packages; packages = nextcloud30Packages;
}; };

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View file

@ -128,6 +128,11 @@ stdenv.mkDerivation (finalAttrs: {
libdbus = dbus.lib; libdbus = dbus.lib;
inherit hwdata; inherit hwdata;
}) })
# Fix crash when starting hidden
# Upstream PR: https://github.com/flightlessmango/MangoHud/pull/1570
# FIXME: remove when merged
./fix-crash.patch
]; ];
postPatch = '' postPatch = ''

View file

@ -0,0 +1,40 @@
From f0d7e4f4b2d362d90bb81d0b10ef5c505b9661ea Mon Sep 17 00:00:00 2001
From: K900 <me@0upti.me>
Date: Fri, 14 Feb 2025 11:41:09 +0300
Subject: [PATCH] mangoapp: don't crash if gpus is not initialized yet
This seems to happen on startup on Steam Deck style gamescope-session setups.
Just check for gpus = null before trying to access it.
---
src/app/main.cpp | 10 ++++++----
1 file changed, 6 insertions(+), 4 deletions(-)
diff --git a/src/app/main.cpp b/src/app/main.cpp
index 0c7c13e07e..4d1d3b1277 100644
--- a/src/app/main.cpp
+++ b/src/app/main.cpp
@@ -369,8 +369,9 @@ int main(int, char**)
XSync(x11_display, 0);
mangoapp_paused = false;
// resume all GPU threads
- for (auto gpu : gpus->available_gpus)
- gpu->resume();
+ if (gpus)
+ for (auto gpu : gpus->available_gpus)
+ gpu->resume();
}
{
std::unique_lock<std::mutex> lk(mangoapp_m);
@@ -409,8 +410,9 @@ int main(int, char**)
XSync(x11_display, 0);
mangoapp_paused = true;
// pause all GPUs threads
- for (auto gpu : gpus->available_gpus)
- gpu->pause();
+ if (gpus)
+ for (auto gpu : gpus->available_gpus)
+ gpu->pause();
// If mangoapp is hidden, using mangoapp_cv.wait() causes a hang.
// Because of this hang, we can't detect if the user presses R_SHIFT + F12,

View file

@ -25,6 +25,7 @@ in
tables-test.ikarus.sps tables-test.ikarus.sps
lazy.sps lazy.sps
pipeline-operators.sps pipeline-operators.sps
os-environment-variables.sps
' '
''; '';
}) })
@ -45,7 +46,9 @@ in
src = akku.src; src = akku.src;
}) })
# not a tar archive # not a tar archive
(pkg: old: removeAttrs old [ "unpackPhase" ]) (pkg: old: {
unpackPhase = null;
})
]; ];
machine-code = pkg: old: { machine-code = pkg: old: {

View file

@ -38,6 +38,8 @@ let
apron = callPackage ../development/ocaml-modules/apron { }; apron = callPackage ../development/ocaml-modules/apron { };
arg-complete = callPackage ../development/ocaml-modules/arg-complete { };
arp = callPackage ../development/ocaml-modules/arp { }; arp = callPackage ../development/ocaml-modules/arp { };
asai = callPackage ../development/ocaml-modules/asai { }; asai = callPackage ../development/ocaml-modules/asai { };