0
0
Fork 0
mirror of https://github.com/NixOS/nixpkgs.git synced 2025-07-13 21:50:33 +03:00

Merge master into staging-next

This commit is contained in:
nixpkgs-ci[bot] 2025-02-14 12:05:56 +00:00 committed by GitHub
commit 43a5de6cbb
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
67 changed files with 1554 additions and 5755 deletions

View file

@ -253,6 +253,7 @@ pkgs/development/python-modules/buildcatrust/ @ajs124 @lukegb @mweinelt
/nixos/tests/postgresql @NixOS/postgres
# MySQL/MariaDB and related stuff
/nixos/modules/services/databases/mysql.nix @6543
/nixos/modules/services/backup/mysql-backup.nix @6543
# Hardened profile & related modules

View file

@ -770,7 +770,6 @@ with lib.maintainers;
mguentner
ralith
dandellion
sumnerevans
nickcao
teutat3s
];

View file

@ -1,4 +1,9 @@
{ config, lib, pkgs, ... }:
{
config,
lib,
pkgs,
...
}:
let
cfg = config.services.mysql;
@ -8,8 +13,7 @@ let
# Oracle MySQL has supported "notify" service type since 8.0
hasNotify = isMariaDB || (isOracle && lib.versionAtLeast cfg.package.version "8.0");
mysqldOptions =
"--user=${cfg.user} --datadir=${cfg.dataDir} --basedir=${cfg.package}";
mysqldOptions = "--user=${cfg.user} --datadir=${cfg.dataDir} --basedir=${cfg.package}";
format = pkgs.formats.ini { listsAsDuplicateKeys = true; };
configFile = format.generate "my.cnf" cfg.settings;
@ -18,11 +22,31 @@ in
{
imports = [
(lib.mkRemovedOptionModule [ "services" "mysql" "pidDir" ] "Don't wait for pidfiles, describe dependencies through systemd.")
(lib.mkRemovedOptionModule [ "services" "mysql" "rootPassword" ] "Use socket authentication or set the password outside of the nix store.")
(lib.mkRemovedOptionModule [ "services" "mysql" "extraOptions" ] "Use services.mysql.settings.mysqld instead.")
(lib.mkRemovedOptionModule [ "services" "mysql" "bind" ] "Use services.mysql.settings.mysqld.bind-address instead.")
(lib.mkRemovedOptionModule [ "services" "mysql" "port" ] "Use services.mysql.settings.mysqld.port instead.")
(lib.mkRemovedOptionModule [
"services"
"mysql"
"pidDir"
] "Don't wait for pidfiles, describe dependencies through systemd.")
(lib.mkRemovedOptionModule [
"services"
"mysql"
"rootPassword"
] "Use socket authentication or set the password outside of the nix store.")
(lib.mkRemovedOptionModule [
"services"
"mysql"
"extraOptions"
] "Use services.mysql.settings.mysqld instead.")
(lib.mkRemovedOptionModule [
"services"
"mysql"
"bind"
] "Use services.mysql.settings.mysqld.bind-address instead.")
(lib.mkRemovedOptionModule [
"services"
"mysql"
"port"
] "Use services.mysql.settings.mysqld.port instead.")
];
###### interface
@ -106,7 +130,7 @@ in
settings = lib.mkOption {
type = format.type;
default = {};
default = { };
description = ''
MySQL configuration. Refer to
<https://dev.mysql.com/doc/refman/5.7/en/server-system-variables.html>,
@ -137,25 +161,27 @@ in
};
initialDatabases = lib.mkOption {
type = lib.types.listOf (lib.types.submodule {
options = {
name = lib.mkOption {
type = lib.types.str;
description = ''
The name of the database to create.
'';
type = lib.types.listOf (
lib.types.submodule {
options = {
name = lib.mkOption {
type = lib.types.str;
description = ''
The name of the database to create.
'';
};
schema = lib.mkOption {
type = lib.types.nullOr lib.types.path;
default = null;
description = ''
The initial schema of the database; if null (the default),
an empty database is created.
'';
};
};
schema = lib.mkOption {
type = lib.types.nullOr lib.types.path;
default = null;
description = ''
The initial schema of the database; if null (the default),
an empty database is created.
'';
};
};
});
default = [];
}
);
default = [ ];
description = ''
List of database names and their initial schemas that should be used to create databases on the first startup
of MySQL. The schema attribute is optional: If not specified, an empty database is created.
@ -176,7 +202,7 @@ in
ensureDatabases = lib.mkOption {
type = lib.types.listOf lib.types.str;
default = [];
default = [ ];
description = ''
Ensures that the specified databases exist.
This option will never delete existing databases, especially not when the value of this
@ -190,39 +216,41 @@ in
};
ensureUsers = lib.mkOption {
type = lib.types.listOf (lib.types.submodule {
options = {
name = lib.mkOption {
type = lib.types.str;
description = ''
Name of the user to ensure.
'';
};
ensurePermissions = lib.mkOption {
type = lib.types.attrsOf lib.types.str;
default = {};
description = ''
Permissions to ensure for the user, specified as attribute set.
The attribute names specify the database and tables to grant the permissions for,
separated by a dot. You may use wildcards here.
The attribute values specfiy the permissions to grant.
You may specify one or multiple comma-separated SQL privileges here.
type = lib.types.listOf (
lib.types.submodule {
options = {
name = lib.mkOption {
type = lib.types.str;
description = ''
Name of the user to ensure.
'';
};
ensurePermissions = lib.mkOption {
type = lib.types.attrsOf lib.types.str;
default = { };
description = ''
Permissions to ensure for the user, specified as attribute set.
The attribute names specify the database and tables to grant the permissions for,
separated by a dot. You may use wildcards here.
The attribute values specfiy the permissions to grant.
You may specify one or multiple comma-separated SQL privileges here.
For more information on how to specify the target
and on which privileges exist, see the
[GRANT syntax](https://mariadb.com/kb/en/library/grant/).
The attributes are used as `GRANT ''${attrName} ON ''${attrValue}`.
'';
example = lib.literalExpression ''
{
"database.*" = "ALL PRIVILEGES";
"*.*" = "SELECT, LOCK TABLES";
}
'';
For more information on how to specify the target
and on which privileges exist, see the
[GRANT syntax](https://mariadb.com/kb/en/library/grant/).
The attributes are used as `GRANT ''${attrName} ON ''${attrValue}`.
'';
example = lib.literalExpression ''
{
"database.*" = "ALL PRIVILEGES";
"*.*" = "SELECT, LOCK TABLES";
}
'';
};
};
};
});
default = [];
}
);
default = [ ];
description = ''
Ensures that the specified users exist and have at least the ensured permissions.
The MySQL users will be identified using Unix socket authentication. This authenticates the Unix user with the
@ -251,7 +279,11 @@ in
replication = {
role = lib.mkOption {
type = lib.types.enum [ "master" "slave" "none" ];
type = lib.types.enum [
"master"
"slave"
"none"
];
default = "none";
description = "Role of the MySQL server instance.";
};
@ -292,14 +324,13 @@ in
};
###### implementation
config = lib.mkIf cfg.enable {
services.mysql.dataDir =
lib.mkDefault (if lib.versionAtLeast config.system.stateVersion "17.09" then "/var/lib/mysql"
else "/var/mysql");
services.mysql.dataDir = lib.mkDefault (
if lib.versionAtLeast config.system.stateVersion "17.09" then "/var/lib/mysql" else "/var/mysql"
);
services.mysql.settings.mysqld = lib.mkMerge [
{
@ -311,7 +342,11 @@ in
log-bin-index = "mysql-bin-${toString cfg.replication.serverId}.index";
relay-log = "mysql-relay-bin";
server-id = cfg.replication.serverId;
binlog-ignore-db = [ "information_schema" "performance_schema" "mysql" ];
binlog-ignore-db = [
"information_schema"
"performance_schema"
"mysql"
];
})
(lib.mkIf (!isMariaDB) {
plugin-load-add = [ "auth_socket.so" ];
@ -355,17 +390,21 @@ in
pkgs.nettools
];
preStart = if isMariaDB then ''
if ! test -e ${cfg.dataDir}/mysql; then
${cfg.package}/bin/mysql_install_db --defaults-file=/etc/my.cnf ${mysqldOptions}
touch ${cfg.dataDir}/mysql_init
fi
'' else ''
if ! test -e ${cfg.dataDir}/mysql; then
${cfg.package}/bin/mysqld --defaults-file=/etc/my.cnf ${mysqldOptions} --initialize-insecure
touch ${cfg.dataDir}/mysql_init
fi
'';
preStart =
if isMariaDB then
''
if ! test -e ${cfg.dataDir}/mysql; then
${cfg.package}/bin/mysql_install_db --defaults-file=/etc/my.cnf ${mysqldOptions}
touch ${cfg.dataDir}/mysql_init
fi
''
else
''
if ! test -e ${cfg.dataDir}/mysql; then
${cfg.package}/bin/mysqld --defaults-file=/etc/my.cnf ${mysqldOptions} --initialize-insecure
touch ${cfg.dataDir}/mysql_init
fi
'';
script = ''
# https://mariadb.com/kb/en/getting-started-with-mariadb-galera-cluster/#systemd-and-galera-recovery
@ -379,52 +418,55 @@ in
exec ${cfg.package}/bin/mysqld --defaults-file=/etc/my.cnf ${mysqldOptions} $_WSREP_NEW_CLUSTER $_WSREP_START_POSITION
'';
postStart = let
# The super user account to use on *first* run of MySQL server
superUser = if isMariaDB then cfg.user else "root";
in ''
${lib.optionalString (!hasNotify) ''
# Wait until the MySQL server is available for use
while [ ! -e /run/mysqld/mysqld.sock ]
do
echo "MySQL daemon not yet started. Waiting for 1 second..."
sleep 1
done
''}
postStart =
let
# The super user account to use on *first* run of MySQL server
superUser = if isMariaDB then cfg.user else "root";
in
''
${lib.optionalString (!hasNotify) ''
# Wait until the MySQL server is available for use
while [ ! -e /run/mysqld/mysqld.sock ]
do
echo "MySQL daemon not yet started. Waiting for 1 second..."
sleep 1
done
''}
if [ -f ${cfg.dataDir}/mysql_init ]
then
# While MariaDB comes with a 'mysql' super user account since 10.4.x, MySQL does not
# Since we don't want to run this service as 'root' we need to ensure the account exists on first run
( echo "CREATE USER IF NOT EXISTS '${cfg.user}'@'localhost' IDENTIFIED WITH ${if isMariaDB then "unix_socket" else "auth_socket"};"
echo "GRANT ALL PRIVILEGES ON *.* TO '${cfg.user}'@'localhost' WITH GRANT OPTION;"
) | ${cfg.package}/bin/mysql -u ${superUser} -N
if [ -f ${cfg.dataDir}/mysql_init ]
then
# While MariaDB comes with a 'mysql' super user account since 10.4.x, MySQL does not
# Since we don't want to run this service as 'root' we need to ensure the account exists on first run
( echo "CREATE USER IF NOT EXISTS '${cfg.user}'@'localhost' IDENTIFIED WITH ${
if isMariaDB then "unix_socket" else "auth_socket"
};"
echo "GRANT ALL PRIVILEGES ON *.* TO '${cfg.user}'@'localhost' WITH GRANT OPTION;"
) | ${cfg.package}/bin/mysql -u ${superUser} -N
${lib.concatMapStrings (database: ''
# Create initial databases
if ! test -e "${cfg.dataDir}/${database.name}"; then
echo "Creating initial database: ${database.name}"
( echo 'create database `${database.name}`;'
${lib.concatMapStrings (database: ''
# Create initial databases
if ! test -e "${cfg.dataDir}/${database.name}"; then
echo "Creating initial database: ${database.name}"
( echo 'create database `${database.name}`;'
${lib.optionalString (database.schema != null) ''
echo 'use `${database.name}`;'
${lib.optionalString (database.schema != null) ''
echo 'use `${database.name}`;'
# TODO: this silently falls through if database.schema does not exist,
# we should catch this somehow and exit, but can't do it here because we're in a subshell.
if [ -f "${database.schema}" ]
then
cat ${database.schema}
elif [ -d "${database.schema}" ]
then
cat ${database.schema}/mysql-databases/*.sql
fi
''}
) | ${cfg.package}/bin/mysql -u ${superUser} -N
fi
'') cfg.initialDatabases}
# TODO: this silently falls through if database.schema does not exist,
# we should catch this somehow and exit, but can't do it here because we're in a subshell.
if [ -f "${database.schema}" ]
then
cat ${database.schema}
elif [ -d "${database.schema}" ]
then
cat ${database.schema}/mysql-databases/*.sql
fi
''}
) | ${cfg.package}/bin/mysql -u ${superUser} -N
fi
'') cfg.initialDatabases}
${lib.optionalString (cfg.replication.role == "master")
''
${lib.optionalString (cfg.replication.role == "master") ''
# Set up the replication master
( echo "use mysql;"
@ -434,8 +476,7 @@ in
) | ${cfg.package}/bin/mysql -u ${superUser} -N
''}
${lib.optionalString (cfg.replication.role == "slave")
''
${lib.optionalString (cfg.replication.role == "slave") ''
# Set up the replication slave
( echo "stop slave;"
@ -444,34 +485,36 @@ in
) | ${cfg.package}/bin/mysql -u ${superUser} -N
''}
${lib.optionalString (cfg.initialScript != null)
''
${lib.optionalString (cfg.initialScript != null) ''
# Execute initial script
# using toString to avoid copying the file to nix store if given as path instead of string,
# as it might contain credentials
cat ${toString cfg.initialScript} | ${cfg.package}/bin/mysql -u ${superUser} -N
''}
rm ${cfg.dataDir}/mysql_init
fi
rm ${cfg.dataDir}/mysql_init
fi
${lib.optionalString (cfg.ensureDatabases != []) ''
(
${lib.concatMapStrings (database: ''
echo "CREATE DATABASE IF NOT EXISTS \`${database}\`;"
'') cfg.ensureDatabases}
) | ${cfg.package}/bin/mysql -N
''}
${lib.optionalString (cfg.ensureDatabases != [ ]) ''
(
${lib.concatMapStrings (database: ''
echo "CREATE DATABASE IF NOT EXISTS \`${database}\`;"
'') cfg.ensureDatabases}
) | ${cfg.package}/bin/mysql -N
''}
${lib.concatMapStrings (user:
''
( echo "CREATE USER IF NOT EXISTS '${user.name}'@'localhost' IDENTIFIED WITH ${if isMariaDB then "unix_socket" else "auth_socket"};"
${lib.concatStringsSep "\n" (lib.mapAttrsToList (database: permission: ''
echo "GRANT ${permission} ON ${database} TO '${user.name}'@'localhost';"
'') user.ensurePermissions)}
${lib.concatMapStrings (user: ''
( echo "CREATE USER IF NOT EXISTS '${user.name}'@'localhost' IDENTIFIED WITH ${
if isMariaDB then "unix_socket" else "auth_socket"
};"
${lib.concatStringsSep "\n" (
lib.mapAttrsToList (database: permission: ''
echo "GRANT ${permission} ON ${database} TO '${user.name}'@'localhost';"
'') user.ensurePermissions
)}
) | ${cfg.package}/bin/mysql -N
'') cfg.ensureUsers}
'';
'';
serviceConfig = lib.mkMerge [
{
@ -500,7 +543,11 @@ in
ProtectKernelTunables = true;
ProtectKernelModules = true;
ProtectControlGroups = true;
RestrictAddressFamilies = [ "AF_UNIX" "AF_INET" "AF_INET6" ];
RestrictAddressFamilies = [
"AF_UNIX"
"AF_INET"
"AF_INET6"
];
LockPersonality = true;
MemoryDenyWriteExecute = true;
RestrictRealtime = true;
@ -516,4 +563,6 @@ in
];
};
};
meta.maintainers = [ lib.maintainers._6543 ];
}

View file

@ -1597,9 +1597,9 @@ in
};
meta = {
inherit (pkgs.matrix-synapse.meta) maintainers;
buildDocsInSandbox = false;
doc = ./synapse.md;
maintainers = teams.matrix.members;
};
}

View file

@ -16,7 +16,8 @@ in
{
imports = [
(mkRemovedOptionModule [
"settings"
"services"
"tabby"
"indexInterval"
] "These options are now managed within the tabby WebGUI")
];

View file

@ -193,7 +193,7 @@ in
"${binYggdrasil} -genconf") + " > /run/yggdrasil/yggdrasil.conf"}
# start yggdrasil
${binYggdrasil} -useconffile /run/yggdrasil/yggdrasil.conf ${lib.strings.escapeShellArgs cfg.extraArgs}
exec ${binYggdrasil} -useconffile /run/yggdrasil/yggdrasil.conf ${lib.strings.escapeShellArgs cfg.extraArgs}
'';
serviceConfig = {

View file

@ -245,7 +245,7 @@ that are managed by Nix. If you want automatic updates it is recommended that yo
## Known warnings {#module-services-nextcloud-known-warnings}
### Failed to get an iterator for log entries: Logreader application only supports "file" log_type {#module-services-nextcloud-warning-logreader}
### Logreader application only supports "file" log_type {#module-services-nextcloud-warning-logreader}
This is because
@ -253,16 +253,12 @@ This is because
* the Logreader application that allows reading logs in the admin panel is enabled
by default and requires logs written to a file.
The logreader application doesn't work, as it was the case before. The only change is that
it complains loudly now. So nothing actionable here by default. Alternatively you can
If you want to view logs in the admin panel,
set [](#opt-services.nextcloud.settings.log_type) to "file".
* disable the logreader application to shut up the "error".
We can't really do that by default since whether apps are enabled/disabled is part
of the application's state and tracked inside the database.
* set [](#opt-services.nextcloud.settings.log_type) to "file" to be able to view logs
from the admin panel.
If you prefer logs in the journal, disable the logreader application to shut up the
"info". We can't really do that by default since whether apps are enabled/disabled
is part of the application's state and tracked inside the database.
## Maintainer information {#module-services-nextcloud-maintainer-info}

View file

@ -29,10 +29,11 @@ let
};
};
default = {};
type = attrsWith' "config-name" (attrsWith' "tmpfiles-type" (attrsWith' "path" (types.submodule ({ name, config, ... }: {
type = attrsWith' "config-name" (attrsWith' "path" (attrsWith' "tmpfiles-type" (types.submodule ({ name, config, ... }: {
options.type = mkOption {
type = types.str;
default = name;
defaultText = "tmpfiles-type";
example = "d";
description = ''
The type of operation to perform on the file.

View file

@ -30,8 +30,8 @@ import ../make-test-python.nix (
in
{
name = "mjolnir";
meta = with pkgs.lib; {
maintainers = teams.matrix.members;
meta = {
inherit (pkgs.mjolnir.meta) maintainers;
};
nodes = {

View file

@ -2,8 +2,8 @@ import ../make-test-python.nix (
{ pkgs, ... }:
{
name = "matrix-synapse-workers";
meta = with pkgs.lib; {
maintainers = teams.matrix.members;
meta = {
inherit (pkgs.matrix-synapse.meta) maintainers;
};
nodes = {

View file

@ -54,8 +54,8 @@ import ../make-test-python.nix (
{
name = "matrix-synapse";
meta = with pkgs.lib; {
maintainers = teams.matrix.members;
meta = {
inherit (pkgs.matrix-synapse.meta) maintainers;
};
nodes = {

View file

@ -100,7 +100,7 @@ let
installPhase = ''
cp -r . $out
wrapProgram $out/bin/studio.sh \
wrapProgram $out/bin/studio \
--set-default JAVA_HOME "$out/jbr" \
--set ANDROID_EMULATOR_USE_SYSTEM_LIBS 1 \
--set QT_XKB_CONFIG_ROOT "${xkeyboard_config}/share/X11/xkb" \
@ -204,6 +204,7 @@ let
]
}"
'';
meta.mainProgram = "studio";
};
desktopItem = makeDesktopItem {
@ -279,7 +280,7 @@ let
unset ANDROID_HOME
fi
''}
exec ${fhsEnv}/bin/${drvName}-fhs-env ${androidStudio}/bin/studio.sh "$@"
exec ${fhsEnv}/bin/${drvName}-fhs-env ${lib.getExe androidStudio} "$@"
'';
preferLocalBuild = true;
allowSubstitutes = false;

View file

@ -36,22 +36,22 @@ let
sha256 =
{
x86_64-linux = "0gr2z4vzms6fv4kcc8dzc7l3inpb5hasnzdfr1zc2n4i3nl8z8vw";
x86_64-darwin = "1qplpjazjds5kns0kmp5qa6zfix30cqa93bl4bcpvblb2x9fh1v8";
aarch64-linux = "1jhrmwrnxzwvhqgfrs35kyd5hhg2b7dyq3p5k88jhm8607nkds79";
aarch64-darwin = "072lg4nvq3cdjzrwngaxnz9p952zkxsknsb39zjh55vzrij55g9x";
armv7l-linux = "06bvh72bq4ippr2k8ifcfqhkhhh6na4vxsz1k50swr1k2kzwwr5d";
x86_64-linux = "11a0y0zdz3mmc2xvpnlq06a7q06y6529xpp4hlhpjylj0bk06xn1";
x86_64-darwin = "12fxhwqcz36f5pv4kvs7bblmymxyixg7pvi0gb5k0j73pkvqrr6g";
aarch64-linux = "0g5qz7gq7k65p2f8iwz1jiy03nwsmy3v3gb18qwg9mbhm0dk59la";
aarch64-darwin = "1g4fz8nw5m7krjlsjs43937kz1sr7lkflbphpyh8cmalwpxa8ysn";
armv7l-linux = "09r12y9xbpqnnw9mab3k4kx0ngpfng1l6rk09n9l2q36ji20ijmy";
}
.${system} or throwSystem;
in
callPackage ./generic.nix rec {
# Please backport all compatible updates to the stable release.
# This is important for the extension ecosystem.
version = "1.97.1";
version = "1.97.2";
pname = "vscode" + lib.optionalString isInsiders "-insiders";
# This is used for VS Code - Remote SSH test
rev = "e249dada235c2083c83813bd65b7f4707fb97b76";
rev = "e54c774e0add60467559eb0d1e229c6452cf8447";
executableName = "code" + lib.optionalString isInsiders "-insiders";
longName = "Visual Studio Code" + lib.optionalString isInsiders " - Insiders";
@ -75,7 +75,7 @@ callPackage ./generic.nix rec {
src = fetchurl {
name = "vscode-server-${rev}.tar.gz";
url = "https://update.code.visualstudio.com/commit:${rev}/server-linux-x64/stable";
sha256 = "01snzahh794ygpgwh4r57c8mnisp6a4fc3v5x76cdhxw2hd9s26n";
sha256 = "15fd401sqmlkpw48pysqpyi5rlsqx4cm55bbwakhkal4qa1qnq4m";
};
stdenv = stdenvNoCC;
};

View file

@ -1092,6 +1092,15 @@
"spdx": "MPL-2.0",
"vendorHash": null
},
"sakuracloud": {
"hash": "sha256-KrzqIAK6ImUW22Iik97R4HARoXN4lG6AquitLjCqS/A=",
"homepage": "https://registry.terraform.io/providers/sacloud/sakuracloud",
"owner": "sacloud",
"repo": "terraform-provider-sakuracloud",
"rev": "v2.26.1",
"spdx": "Apache-2.0",
"vendorHash": "sha256-Ry791h5AuYP03nex9nM8X5Mk6PeL7hNDbFyVRvVPJNE="
},
"scaleway": {
"hash": "sha256-8aESalFQaA6Qwod4rDeUzrKe80rbHfVJZIKtLliKUME=",
"homepage": "https://registry.terraform.io/providers/scaleway/scaleway",

View file

@ -1,23 +1,24 @@
{
cacert,
cargo,
copyDesktopItems,
fetchFromGitHub,
fetchurl,
findutils,
jq,
lib,
makeDesktopItem,
makeWrapper,
rsync,
rustPlatform,
rustc,
stdenv,
stdenvNoCC,
yarn-berry,
zip,
fetchFromGitHub,
rustPlatform,
electron_33,
nodejs_20,
yarn-berry,
cacert,
writableTmpDirAsHomeHook,
cargo,
rustc,
findutils,
zip,
rsync,
jq,
copyDesktopItems,
makeWrapper,
makeDesktopItem,
nix-update-script,
buildType ? "stable",
commandLineArgs ? "",
}:
@ -34,213 +35,203 @@ let
electron = electron_33;
nodejs = nodejs_20;
yarn = yarn-berry.override { inherit nodejs; };
productName = if buildType != "stable" then "AFFiNE-${buildType}" else "AFFiNE";
binName = lib.toLower productName;
in
stdenv.mkDerivation (
finalAttrs:
(
{
productName = if buildType == "stable" then "AFFiNE" else "AFFiNE-" + buildType;
binName = lib.toLower finalAttrs.productName;
pname = finalAttrs.binName;
stdenv.mkDerivation (finalAttrs: {
pname = binName;
# https://github.com/toeverything/AFFiNE/releases/tag/v0.18.1
version = "0.18.1";
GITHUB_SHA = "8b066a4b398aace25a20508a8e3c1a381721971f";
src = fetchFromGitHub {
owner = "toeverything";
repo = "AFFiNE";
rev = finalAttrs.GITHUB_SHA;
hash = "sha256-TWwojG3lqQlQFX3BKoFjJ27a3T/SawXgNDO6fP6gW4k=";
};
version = "0.19.6";
src = fetchFromGitHub {
owner = "toeverything";
repo = "AFFiNE";
tag = "v${finalAttrs.version}";
hash = "sha256-BydTNE36oRIxr2lTnc2+EY0lvMXn4NTLB4EjqzhdjGk=";
};
meta =
{
description = "Workspace with fully merged docs, whiteboards and databases";
longDescription = ''
AFFiNE is an open-source, all-in-one workspace and an operating
system for all the building blocks that assemble your knowledge
base and much more -- wiki, knowledge management, presentation
and digital assets
'';
homepage = "https://affine.pro/";
license = lib.licenses.mit;
maintainers = with lib.maintainers; [ xiaoxiangmoe ];
platforms = [
"aarch64-darwin"
"aarch64-linux"
"x86_64-darwin"
"x86_64-linux"
];
sourceProvenance = [ lib.sourceTypes.fromSource ];
}
// lib.optionalAttrs hostPlatform.isLinux {
mainProgram = finalAttrs.binName;
};
env = {
BUILD_TYPE = buildType;
};
cargoDeps = rustPlatform.fetchCargoVendor {
src = finalAttrs.src;
hash = "sha256-5s/X9CD/H9rSn7SqMHioLg1KRP7y9fsozdFRY3hNiP8=";
};
yarnOfflineCache = stdenvNoCC.mkDerivation {
name = "yarn-offline-cache";
src = finalAttrs.src;
nativeBuildInputs = [
yarn
cacert
];
cargoDeps = rustPlatform.fetchCargoVendor {
inherit (finalAttrs) pname version src;
hash = "sha256-racjpf0VgNod6OxWKSaCbKS9fEkInpDyhVbAHfYWIDo=";
};
yarnOfflineCache = stdenvNoCC.mkDerivation {
name = "yarn-offline-cache";
inherit (finalAttrs) src;
nativeBuildInputs = [
yarn
cacert
writableTmpDirAsHomeHook
];
# force yarn install run in CI mode
env.CI = "1";
buildPhase =
let
supportedArchitectures = builtins.toJSON {
os = [
"darwin"
"linux"
];
cpu = [
"arm64"
"x64"
"ia32"
"arm64"
];
libc = [
"glibc"
"musl"
];
};
buildPhase = ''
export HOME="$NIX_BUILD_TOP"
export CI=1
mkdir -p $out
yarn config set enableTelemetry false
yarn config set cacheFolder $out
yarn config set enableGlobalCache false
yarn config set supportedArchitectures --json "$supportedArchitectures"
yarn install --immutable --mode=skip-build
'';
dontInstall = true;
outputHashMode = "recursive";
outputHash = "sha256-HueTia+1ApfvbBK/b+iE84TB1DCWIDLoQ9XhjYlGCUs=";
};
nativeBuildInputs =
[
nodejs
yarn
cargo
rustc
findutils
zip
jq
rsync
]
++ lib.optionals hostPlatform.isLinux [
copyDesktopItems
makeWrapper
];
patchPhase = ''
runHook prePatchPhase
sed -i '/packagerConfig/a \ electronZipDir: process.env.ELECTRON_FORGE_ELECTRON_ZIP_DIR,' packages/frontend/apps/electron/forge.config.mjs
runHook postPatchPhase
'';
configurePhase =
let
electronContentPath =
electron + (if hostPlatform.isLinux then "/libexec/electron/" else "/Applications/");
in
''
runHook preConfigurePhase
export HOME="$NIX_BUILD_TOP"
export CI=1
# cargo config
mkdir -p .cargo
cat $cargoDeps/.cargo/config.toml >> .cargo/config.toml
ln -s $cargoDeps @vendor@
# yarn config
yarn config set enableTelemetry false
yarn config set enableGlobalCache false
yarn config set cacheFolder $yarnOfflineCache
# electron config
ELECTRON_VERSION_IN_LOCKFILE=$(yarn why electron --json | tail --lines 1 | jq --raw-output '.children | to_entries | first | .key ' | cut -d : -f 2)
rsync --archive --chmod=u+w ${electronContentPath} $HOME/.electron-prebuilt-zip-tmp
export ELECTRON_FORGE_ELECTRON_ZIP_DIR=$PWD/.electron_zip_dir
mkdir -p $ELECTRON_FORGE_ELECTRON_ZIP_DIR
(cd $HOME/.electron-prebuilt-zip-tmp && zip --recurse-paths - .) > $ELECTRON_FORGE_ELECTRON_ZIP_DIR/electron-v$ELECTRON_VERSION_IN_LOCKFILE-${nodePlatform}-${nodeArch}.zip
export ELECTRON_SKIP_BINARY_DOWNLOAD=1
runHook postConfigurePhase
'';
buildPhase = ''
in
''
runHook preBuild
# first build
yarn workspaces focus @affine/electron @affine/monorepo
CARGO_NET_OFFLINE=true yarn workspace @affine/native build
BUILD_TYPE=${buildType} SKIP_NX_CACHE=1 yarn workspace @affine/electron generate-assets
mkdir -p $out
yarn config set enableTelemetry false
yarn config set cacheFolder $out
yarn config set enableGlobalCache false
yarn config set supportedArchitectures --json '${supportedArchitectures}'
# second build
yarn config set nmMode classic
yarn config set nmHoistingLimits workspaces
find . -name 'node_modules' -type d -prune -exec rm -rf '{}' +
yarn workspaces focus @affine/electron @affine/monorepo
BUILD_TYPE=${buildType} SKIP_WEB_BUILD=1 SKIP_BUNDLE=1 HOIST_NODE_MODULES=1 yarn workspace @affine/electron make
yarn install --immutable --mode=skip-build
runHook postBuild
'';
installPhase =
let
inherit (finalAttrs) binName productName;
in
if hostPlatform.isDarwin then
''
runHook preInstall
dontInstall = true;
outputHashMode = "recursive";
outputHash = "sha256-E9l5zjOOfyDBzYJOU94VrRvt7Hi4XkRTDav9bVlXvlQ=";
};
nativeBuildInputs =
[
nodejs
yarn
cargo
rustc
findutils
zip
jq
rsync
writableTmpDirAsHomeHook
]
++ lib.optionals hostPlatform.isLinux [
copyDesktopItems
makeWrapper
];
mkdir -p $out/Applications
mv packages/frontend/apps/electron/out/${buildType}/${productName}-darwin-${nodeArch}/${productName}.app $out/Applications
# force yarn install run in CI mode
env.CI = "1";
runHook postInstall
''
else
''
runHook preInstall
# Remove code under The AFFiNE Enterprise Edition (EE) license.
# Keep file package.json for `yarn install --immutable` lockfile check.
postPatch = ''
BACKEND_SERVER_PACKAGE_JSON="$(jq 'del(.scripts.postinstall)' packages/backend/server/package.json)"
rm -rf packages/backend/server/{.*,*}
echo "$BACKEND_SERVER_PACKAGE_JSON" > packages/backend/server/package.json
'';
mkdir --parents $out/lib/${binName}/
mv packages/frontend/apps/electron/out/${buildType}/${productName}-linux-${nodeArch}/{resources,LICENSE*} $out/lib/${binName}/
install -Dm644 packages/frontend/apps/electron/resources/icons/icon_${buildType}_64x64.png $out/share/icons/hicolor/64x64/apps/${binName}.png
configurePhase = ''
runHook preConfigurePhase
makeWrapper "${electron}/bin/electron" $out/bin/${binName} \
--inherit-argv0 \
--add-flags $out/lib/${binName}/resources/app.asar \
--add-flags "\''${NIXOS_OZONE_WL:+\''${WAYLAND_DISPLAY:+--ozone-platform-hint=auto --enable-features=WaylandWindowDecorations --enable-wayland-ime=true}}" \
--add-flags ${lib.escapeShellArg commandLineArgs}
# cargo config
mkdir -p .cargo
cat $cargoDeps/.cargo/config.toml >> .cargo/config.toml
ln -s $cargoDeps @vendor@
runHook postInstall
'';
}
// (lib.optionalAttrs hostPlatform.isLinux {
desktopItems =
let
inherit (finalAttrs) binName productName;
in
[
(makeDesktopItem {
name = binName;
desktopName = productName;
comment = "AFFiNE Desktop App";
exec = "${binName} %U";
terminal = false;
icon = binName;
startupWMClass = binName;
categories = [ "Utility" ];
mimeTypes = [ "x-scheme-handler/${binName}" ];
})
];
# yarn config
yarn config set enableTelemetry false
yarn config set enableGlobalCache false
yarn config set cacheFolder $yarnOfflineCache
# electron config
ELECTRON_VERSION_IN_LOCKFILE=$(yarn why electron --json | tail --lines 1 | jq --raw-output '.children | to_entries | first | .key ' | cut -d : -f 2)
rsync --archive --chmod=u+w "${electron.dist}/" $HOME/.electron-prebuilt-zip-tmp
export ELECTRON_FORGE_ELECTRON_ZIP_DIR=$PWD/.electron_zip_dir
mkdir -p $ELECTRON_FORGE_ELECTRON_ZIP_DIR
(cd $HOME/.electron-prebuilt-zip-tmp && zip --recurse-paths - .) > $ELECTRON_FORGE_ELECTRON_ZIP_DIR/electron-v$ELECTRON_VERSION_IN_LOCKFILE-${nodePlatform}-${nodeArch}.zip
export ELECTRON_SKIP_BINARY_DOWNLOAD=1
runHook postConfigurePhase
'';
buildPhase = ''
runHook preBuild
# first build
yarn install
CARGO_NET_OFFLINE=true yarn affine @affine/native build
GITHUB_SHA=ffffffffffffffffffffffffffffffffffffffff BUILD_TYPE=${buildType} SKIP_NX_CACHE=1 yarn affine @affine/electron generate-assets
# second build
yarn config set nmMode classic
yarn config set nmHoistingLimits workspaces
find . -name 'node_modules' -type d -prune -exec rm -rf '{}' +
yarn install
BUILD_TYPE=${buildType} SKIP_WEB_BUILD=1 SKIP_BUNDLE=1 HOIST_NODE_MODULES=1 yarn affine @affine/electron make
runHook postBuild
'';
installPhase =
if hostPlatform.isDarwin then
''
runHook preInstall
mkdir -p $out/Applications
mv packages/frontend/apps/electron/out/${buildType}/${productName}-darwin-${nodeArch}/${productName}.app $out/Applications
runHook postInstall
''
else
''
runHook preInstall
mkdir --parents $out/lib/${binName}/
mv packages/frontend/apps/electron/out/${buildType}/${productName}-linux-${nodeArch}/{resources,LICENSE*} $out/lib/${binName}/
install -Dm644 packages/frontend/apps/electron/resources/icons/icon_${buildType}_64x64.png $out/share/icons/hicolor/64x64/apps/${binName}.png
makeWrapper "${lib.getExe electron}" $out/bin/${binName} \
--inherit-argv0 \
--add-flags $out/lib/${binName}/resources/app.asar \
--add-flags "\''${NIXOS_OZONE_WL:+\''${WAYLAND_DISPLAY:+--ozone-platform-hint=auto --enable-features=WaylandWindowDecorations --enable-wayland-ime=true}}" \
--add-flags ${lib.escapeShellArg commandLineArgs}
runHook postInstall
'';
desktopItems = [
(makeDesktopItem {
name = binName;
desktopName = productName;
comment = "AFFiNE Desktop App";
exec = "${binName} %U";
terminal = false;
icon = binName;
startupWMClass = binName;
categories = [ "Utility" ];
mimeTypes = [ "x-scheme-handler/${binName}" ];
})
)
)
];
passthru.updateScript = nix-update-script {
extraArgs = [
"--version-regex=^v(\\d+\\.\\d+\\.\\d+)$"
];
};
meta = {
description = "Workspace with fully merged docs, whiteboards and databases";
longDescription = ''
AFFiNE is an open-source, all-in-one workspace and an operating
system for all the building blocks that assemble your knowledge
base and much more -- wiki, knowledge management, presentation
and digital assets
'';
homepage = "https://affine.pro/";
license = lib.licenses.mit;
maintainers = with lib.maintainers; [ xiaoxiangmoe ];
platforms = [
"aarch64-darwin"
"aarch64-linux"
"x86_64-darwin"
"x86_64-linux"
];
sourceProvenance = [ lib.sourceTypes.fromSource ];
};
})

View file

@ -176,15 +176,17 @@ stdenv.mkDerivation (finalAttrs: {
dontWrapGApps = true;
# Replace audacity's wrapper, to:
# - put it in the right place, it shouldn't be in "$out/audacity"
# - Put it in the right place; it shouldn't be in "$out/audacity"
# - Add the ffmpeg dynamic dependency
# - Use Xwayland by default on Wayland. See https://github.com/audacity/audacity/pull/5977
postFixup =
lib.optionalString stdenv.hostPlatform.isLinux ''
wrapProgram "$out/bin/audacity" \
"''${gappsWrapperArgs[@]}" \
--prefix LD_LIBRARY_PATH : "$out/lib/audacity":${lib.makeLibraryPath [ ffmpeg ]} \
--suffix AUDACITY_MODULES_PATH : "$out/lib/audacity/modules" \
--suffix AUDACITY_PATH : "$out/share/audacity"
--suffix AUDACITY_PATH : "$out/share/audacity" \
--set-default GDK_BACKEND x11
''
+ lib.optionalString stdenv.hostPlatform.isDarwin ''
mkdir -p $out/{Applications,bin}

View file

@ -74,7 +74,6 @@ rustPlatform.buildRustPackage {
wrapProgram $out/bin/devenv \
--prefix PATH ":" "$out/bin:${cachix}/bin" \
--set DEVENV_NIX ${devenv_nix} \
--set-default DO_NOT_TRACK 1 \
${setDefaultLocaleArchive}
# Generate manpages

View file

@ -2,22 +2,25 @@
lib,
rustPlatform,
fetchFromGitHub,
nix-update-script,
}:
rustPlatform.buildRustPackage rec {
pname = "harper";
version = "0.20.0";
version = "0.21.1";
src = fetchFromGitHub {
owner = "Automattic";
repo = "harper";
rev = "v${version}";
hash = "sha256-8JeF1HxsP+Y+C1g3YJ0B0+JHoRFkBjz4/T8rVr2KgGw=";
hash = "sha256-UTohTnIUMpyQGvkuOD2L7bViF3b5QnbDjRD4VSmf4lE=";
};
buildAndTestSubdir = "harper-ls";
useFetchCargoVendor = true;
cargoHash = "sha256-uVjDFo5mJi4Xbq0Z+XOjy5VqXqkm0a+4xu+dVnjWXCU=";
cargoHash = "sha256-wHXo4yfFc77osCamK0NidbrIYyIFMEpfBr0B6aniBmQ=";
passthru.updateScript = nix-update-script { };
meta = {
description = "Grammar Checker for Developers";

View file

@ -0,0 +1,33 @@
{
lib,
rustPackages,
fetchFromGitHub,
pkg-config,
}:
rustPackages.rustPlatform.buildRustPackage rec {
pname = "hawkeye";
version = "6.0.0";
src = fetchFromGitHub {
owner = "korandoru";
repo = "hawkeye";
tag = "v${version}";
hash = "sha256-VfJWj9BwNVR7RVUW+CjFuaniyiEath1U0F/7QJcA3r4=";
};
useFetchCargoVendor = true;
cargoHash = "sha256-SJEl5QsO4KYRv+5xDPHy1Q53qcL89IJ9JTXtzubO5fk=";
nativeBuildInputs = [
pkg-config
];
meta = {
homepage = "https://github.com/korandoro/hawkeye";
description = "Simple license header checker and formatter, in multiple distribution forms";
license = lib.licenses.asl20;
mainProgram = "hawkeye";
maintainers = with lib.maintainers; [ matthiasbeyer ];
};
}

View file

@ -21,6 +21,8 @@ rustPlatform.buildRustPackage rec {
useFetchCargoVendor = true;
cargoHash = "sha256-J9sGXJbGbO9UgZfgqxqzbiJz9j6WMpq3qC2ys7OJnII=";
buildFeatures = [ "cli" ];
checkFlags = [
# remote access
"--skip=generation::tests::can_generate_from_remote_repo_with_subdir"

View file

@ -6,13 +6,13 @@
stdenv.mkDerivation rec {
pname = "lzbench";
version = "1.8.1";
version = "2.0.1";
src = fetchFromGitHub {
owner = "inikep";
repo = pname;
rev = "v${version}";
sha256 = "19zlvcjb1qg4fx30rrp6m650660y35736j8szvdxmqh9ipkisyia";
sha256 = "sha256-946AcnD9z60Oihm2pseS8D5j6pGdYeCxmhTLNcW9Mmc=";
};
enableParallelBuilding = true;

View file

@ -10,17 +10,17 @@
rustPlatform.buildRustPackage rec {
pname = "mpd-discord-rpc";
version = "1.7.3";
version = "1.8.0";
src = fetchFromGitHub {
owner = "JakeStanger";
repo = "mpd-discord-rpc";
rev = "v${version}";
hash = "sha256-WiHMXazNKyt5N7WmkftZYEHeQi+l9qoU2yr6jRHfjdE=";
hash = "sha256-RuXH0RaR0VVN7tja0pcc8QH826/JzH4tyVVCbrK7ldI=";
};
useFetchCargoVendor = true;
cargoHash = "sha256-v6YQS+Te0bIzSr3q4QaEcXbUjiTCKELxCdqBlbjLI3E=";
cargoHash = "sha256-ewmg5t0JljnvxjrGDJzokRwndv7UNw9NMQ7Cx6oDWjg=";
nativeBuildInputs = [
pkg-config

View file

@ -89,6 +89,9 @@ python3Packages.buildPythonApplication rec {
ps: with ps; [
mypy
pytest
# this is to help development (e.g.: better diffs) inside devShell
# only, do not use its helpers like `mocker`
pytest-mock
ruff
]
);

File diff suppressed because it is too large Load diff

View file

@ -26,7 +26,7 @@ def test_build(mock_run: Mock) -> None:
m.BuildAttr("<nixpkgs/nixos>", None),
{"nix_flag": "foo"},
) == Path("/path/to/file")
assert mock_run.call_args == call(
mock_run.assert_called_with(
[
"nix-build",
"<nixpkgs/nixos>",
@ -38,17 +38,13 @@ def test_build(mock_run: Mock) -> None:
stdout=PIPE,
)
mock_run.reset_mock()
assert n.build(
"config.system.build.attr", m.BuildAttr(Path("file"), "preAttr")
) == Path("/path/to/file")
assert mock_run.call_args_list == [
call(
["nix-build", Path("file"), "--attr", "preAttr.config.system.build.attr"],
stdout=PIPE,
)
]
mock_run.assert_called_with(
["nix-build", Path("file"), "--attr", "preAttr.config.system.build.attr"],
stdout=PIPE,
)
@patch(
@ -65,7 +61,7 @@ def test_build_flake(mock_run: Mock, monkeypatch: MonkeyPatch, tmpdir: Path) ->
flake,
{"no_link": True, "nix_flag": "foo"},
) == Path("/path/to/file")
assert mock_run.call_args == call(
mock_run.assert_called_with(
[
"nix",
"--extra-experimental-features",
@ -114,53 +110,58 @@ def test_build_remote(
instantiate_flags={"inst": True},
copy_flags={"copy": True},
) == Path("/path/to/config")
assert mock_run.call_args_list == [
call(
[
"nix-instantiate",
"<nixpkgs/nixos>",
"--attr",
"preAttr.config.system.build.toplevel",
"--add-root",
n.tmpdir.TMPDIR_PATH / "00000000000000000000000000000001",
"--inst",
],
stdout=PIPE,
),
call(
[
"nix-copy-closure",
"--copy",
"--to",
"user@host",
Path("/path/to/file"),
],
extra_env={"NIX_SSHOPTS": " ".join([*p.SSH_DEFAULT_OPTS, "--ssh opts"])},
),
call(
["mktemp", "-d", "-t", "nixos-rebuild.XXXXX"],
remote=build_host,
stdout=PIPE,
),
call(
[
"nix-store",
"--realise",
Path("/path/to/file"),
"--add-root",
Path("/tmp/tmpdir/00000000000000000000000000000002"),
"--realise",
],
remote=build_host,
stdout=PIPE,
),
call(
["readlink", "-f", "/tmp/tmpdir/config"],
remote=build_host,
stdout=PIPE,
),
call(["rm", "-rf", Path("/tmp/tmpdir")], remote=build_host, check=False),
]
mock_run.assert_has_calls(
[
call(
[
"nix-instantiate",
"<nixpkgs/nixos>",
"--attr",
"preAttr.config.system.build.toplevel",
"--add-root",
n.tmpdir.TMPDIR_PATH / "00000000000000000000000000000001",
"--inst",
],
stdout=PIPE,
),
call(
[
"nix-copy-closure",
"--copy",
"--to",
"user@host",
Path("/path/to/file"),
],
extra_env={
"NIX_SSHOPTS": " ".join([*p.SSH_DEFAULT_OPTS, "--ssh opts"])
},
),
call(
["mktemp", "-d", "-t", "nixos-rebuild.XXXXX"],
remote=build_host,
stdout=PIPE,
),
call(
[
"nix-store",
"--realise",
Path("/path/to/file"),
"--add-root",
Path("/tmp/tmpdir/00000000000000000000000000000002"),
"--realise",
],
remote=build_host,
stdout=PIPE,
),
call(
["readlink", "-f", "/tmp/tmpdir/config"],
remote=build_host,
stdout=PIPE,
),
call(["rm", "-rf", Path("/tmp/tmpdir")], remote=build_host, check=False),
]
)
@patch(
@ -184,43 +185,47 @@ def test_build_remote_flake(
copy_flags={"copy": True},
flake_build_flags={"build": True},
) == Path("/path/to/file")
assert mock_run.call_args_list == [
call(
[
"nix",
"--extra-experimental-features",
"nix-command flakes",
"eval",
"--raw",
".#nixosConfigurations.hostname.config.system.build.toplevel.drvPath",
"--flake",
],
stdout=PIPE,
),
call(
[
"nix-copy-closure",
"--copy",
"--to",
"user@host",
Path("/path/to/file"),
],
extra_env={"NIX_SSHOPTS": " ".join([*p.SSH_DEFAULT_OPTS, "--ssh opts"])},
),
call(
[
"nix",
"--extra-experimental-features",
"nix-command flakes",
"build",
"/path/to/file^*",
"--print-out-paths",
"--build",
],
remote=build_host,
stdout=PIPE,
),
]
mock_run.assert_has_calls(
[
call(
[
"nix",
"--extra-experimental-features",
"nix-command flakes",
"eval",
"--raw",
".#nixosConfigurations.hostname.config.system.build.toplevel.drvPath",
"--flake",
],
stdout=PIPE,
),
call(
[
"nix-copy-closure",
"--copy",
"--to",
"user@host",
Path("/path/to/file"),
],
extra_env={
"NIX_SSHOPTS": " ".join([*p.SSH_DEFAULT_OPTS, "--ssh opts"])
},
),
call(
[
"nix",
"--extra-experimental-features",
"nix-command flakes",
"build",
"/path/to/file^*",
"--print-out-paths",
"--build",
],
remote=build_host,
stdout=PIPE,
),
]
)
def test_copy_closure(monkeypatch: MonkeyPatch) -> None:
@ -233,7 +238,7 @@ def test_copy_closure(monkeypatch: MonkeyPatch) -> None:
build_host = m.Remote("user@build.host", [], None)
with patch(get_qualified_name(n.run_wrapper, n), autospec=True) as mock_run:
n.copy_closure(closure, target_host)
assert mock_run.call_args == call(
mock_run.assert_called_with(
["nix-copy-closure", "--to", "user@target.host", closure],
extra_env={"NIX_SSHOPTS": " ".join(p.SSH_DEFAULT_OPTS)},
)
@ -241,7 +246,7 @@ def test_copy_closure(monkeypatch: MonkeyPatch) -> None:
monkeypatch.setenv("NIX_SSHOPTS", "--ssh build-opt")
with patch(get_qualified_name(n.run_wrapper, n), autospec=True) as mock_run:
n.copy_closure(closure, None, build_host, {"copy_flag": True})
assert mock_run.call_args == call(
mock_run.assert_called_with(
["nix-copy-closure", "--copy-flag", "--from", "user@build.host", closure],
extra_env={
"NIX_SSHOPTS": " ".join([*p.SSH_DEFAULT_OPTS, "--ssh build-opt"])
@ -255,7 +260,7 @@ def test_copy_closure(monkeypatch: MonkeyPatch) -> None:
}
with patch(get_qualified_name(n.run_wrapper, n), autospec=True) as mock_run:
n.copy_closure(closure, target_host, build_host, {"copy_flag": True})
assert mock_run.call_args == call(
mock_run.assert_called_with(
[
"nix",
"copy",
@ -272,16 +277,18 @@ def test_copy_closure(monkeypatch: MonkeyPatch) -> None:
monkeypatch.setattr(n, "WITH_NIX_2_18", False)
with patch(get_qualified_name(n.run_wrapper, n), autospec=True) as mock_run:
n.copy_closure(closure, target_host, build_host)
assert mock_run.call_args_list == [
call(
["nix-copy-closure", "--from", "user@build.host", closure],
extra_env=extra_env,
),
call(
["nix-copy-closure", "--to", "user@target.host", closure],
extra_env=extra_env,
),
]
mock_run.assert_has_calls(
[
call(
["nix-copy-closure", "--from", "user@build.host", closure],
extra_env=extra_env,
),
call(
["nix-copy-closure", "--to", "user@target.host", closure],
extra_env=extra_env,
),
]
)
@patch(get_qualified_name(n.run_wrapper, n), autospec=True)
@ -289,7 +296,7 @@ def test_edit(mock_run: Mock, monkeypatch: MonkeyPatch, tmpdir: Path) -> None:
# Flake
flake = m.Flake.parse(f"{tmpdir}#attr")
n.edit(flake, {"commit_lock_file": True})
assert mock_run.call_args == call(
mock_run.assert_called_with(
[
"nix",
"--extra-experimental-features",
@ -311,7 +318,7 @@ def test_edit(mock_run: Mock, monkeypatch: MonkeyPatch, tmpdir: Path) -> None:
mp.setenv("EDITOR", "editor")
n.edit(None)
assert mock_run.call_args == call(["editor", default_nix], check=False)
mock_run.assert_called_with(["editor", default_nix], check=False)
@patch(
@ -334,7 +341,7 @@ def test_get_build_image_variants(mock_run: Mock, tmp_path: Path) -> None:
"azure": "nixos-image-azure-25.05.20250102.6df2492-x86_64-linux.vhd",
"vmware": "nixos-image-vmware-25.05.20250102.6df2492-x86_64-linux.vmdk",
}
assert mock_run.call_args == call(
mock_run.assert_called_with(
[
"nix-instantiate",
"--eval",
@ -352,14 +359,12 @@ def test_get_build_image_variants(mock_run: Mock, tmp_path: Path) -> None:
stdout=PIPE,
)
mock_run.reset_mock()
build_attr = m.BuildAttr(Path(tmp_path), "preAttr")
assert n.get_build_image_variants(build_attr, {"inst_flag": True}) == {
"azure": "nixos-image-azure-25.05.20250102.6df2492-x86_64-linux.vhd",
"vmware": "nixos-image-vmware-25.05.20250102.6df2492-x86_64-linux.vmdk",
}
assert mock_run.call_args == call(
mock_run.assert_called_with(
[
"nix-instantiate",
"--eval",
@ -399,7 +404,7 @@ def test_get_build_image_variants_flake(mock_run: Mock) -> None:
"azure": "nixos-image-azure-25.05.20250102.6df2492-x86_64-linux.vhd",
"vmware": "nixos-image-vmware-25.05.20250102.6df2492-x86_64-linux.vmdk",
}
assert mock_run.call_args == call(
mock_run.assert_called_with(
[
"nix",
"eval",
@ -424,7 +429,7 @@ def test_get_nixpkgs_rev() -> None:
side_effect=[CompletedProcess([], 0, "")],
) as mock_run:
assert n.get_nixpkgs_rev(path) is None
assert mock_run.call_args == call(
mock_run.assert_called_with(
["git", "-C", path, "rev-parse", "--short", "HEAD"],
check=False,
capture_output=True,
@ -451,7 +456,7 @@ def test_get_nixpkgs_rev() -> None:
],
) as mock_run:
assert n.get_nixpkgs_rev(path) == ".git.0f7c82403fd6"
assert mock_run.call_args_list == expected_calls
mock_run.assert_has_calls(expected_calls)
with patch(
get_qualified_name(n.run_wrapper, n),
@ -462,7 +467,7 @@ def test_get_nixpkgs_rev() -> None:
],
) as mock_run:
assert n.get_nixpkgs_rev(path) == ".git.0f7c82403fd6M"
assert mock_run.call_args_list == expected_calls
mock_run.assert_has_calls(expected_calls)
def test_get_generations(tmp_path: Path) -> None:
@ -503,7 +508,7 @@ def test_get_generations_from_nix_env(tmp_path: Path) -> None:
m.Generation(id=2083, current=False, timestamp="2024-11-07 22:59:41"),
m.Generation(id=2084, current=True, timestamp="2024-11-07 23:54:17"),
]
assert mock_run.call_args == call(
mock_run.assert_called_with(
["nix-env", "-p", path, "--list-generations"],
stdout=PIPE,
remote=None,
@ -521,7 +526,7 @@ def test_get_generations_from_nix_env(tmp_path: Path) -> None:
m.Generation(id=2083, current=False, timestamp="2024-11-07 22:59:41"),
m.Generation(id=2084, current=True, timestamp="2024-11-07 23:54:17"),
]
assert mock_run.call_args == call(
mock_run.assert_called_with(
["nix-env", "-p", path, "--list-generations"],
stdout=PIPE,
remote=remote,
@ -573,14 +578,12 @@ def test_list_generations(mock_get_generations: Mock, tmp_path: Path) -> None:
@patch(get_qualified_name(n.run_wrapper, n), autospec=True)
def test_repl(mock_run: Mock) -> None:
n.repl("attr", m.BuildAttr("<nixpkgs/nixos>", None), {"nix_flag": True})
assert mock_run.call_args == call(
mock_run.assert_called_with(
["nix", "repl", "--file", "<nixpkgs/nixos>", "--nix-flag"]
)
n.repl("attr", m.BuildAttr(Path("file.nix"), "myAttr"))
assert mock_run.call_args == call(
["nix", "repl", "--file", Path("file.nix"), "myAttr"]
)
mock_run.assert_called_with(["nix", "repl", "--file", Path("file.nix"), "myAttr"])
@patch(get_qualified_name(n.run_wrapper, n), autospec=True)
@ -599,7 +602,7 @@ def test_rollback(mock_run: Mock, tmp_path: Path) -> None:
profile = m.Profile("system", path)
assert n.rollback(profile, None, False) == profile.path
assert mock_run.call_args == call(
mock_run.assert_called_with(
["nix-env", "--rollback", "-p", path],
remote=None,
sudo=False,
@ -607,7 +610,7 @@ def test_rollback(mock_run: Mock, tmp_path: Path) -> None:
target_host = m.Remote("user@localhost", [], None)
assert n.rollback(profile, target_host, True) == profile.path
assert mock_run.call_args == call(
mock_run.assert_called_with(
["nix-env", "--rollback", "-p", path],
remote=target_host,
sudo=True,
@ -619,10 +622,8 @@ def test_rollback_temporary_profile(tmp_path: Path) -> None:
path.touch()
profile = m.Profile("system", path)
with patch(
get_qualified_name(n.run_wrapper, n),
autospec=True,
return_value=CompletedProcess(
with patch(get_qualified_name(n.run_wrapper, n), autospec=True) as mock_run:
mock_run.return_value = CompletedProcess(
[],
0,
stdout=textwrap.dedent("""\
@ -630,13 +631,12 @@ def test_rollback_temporary_profile(tmp_path: Path) -> None:
2083 2024-11-07 22:59:41
2084 2024-11-07 23:54:17 (current)
"""),
),
) as mock_run:
)
assert (
n.rollback_temporary_profile(m.Profile("system", path), None, False)
== path.parent / "system-2083-link"
)
assert mock_run.call_args == call(
mock_run.assert_called_with(
[
"nix-env",
"-p",
@ -653,7 +653,7 @@ def test_rollback_temporary_profile(tmp_path: Path) -> None:
n.rollback_temporary_profile(m.Profile("foo", path), target_host, True)
== path.parent / "foo-2083-link"
)
assert mock_run.call_args == call(
mock_run.assert_called_with(
[
"nix-env",
"-p",
@ -665,11 +665,8 @@ def test_rollback_temporary_profile(tmp_path: Path) -> None:
sudo=True,
)
with patch(
get_qualified_name(n.run_wrapper, n),
autospec=True,
return_value=CompletedProcess([], 0, stdout=""),
) as mock_run:
with patch(get_qualified_name(n.run_wrapper, n), autospec=True) as mock_run:
mock_run.return_value = CompletedProcess([], 0, stdout="")
assert n.rollback_temporary_profile(profile, None, False) is None
@ -684,7 +681,7 @@ def test_set_profile(mock_run: Mock) -> None:
sudo=False,
)
assert mock_run.call_args == call(
mock_run.assert_called_with(
["nix-env", "-p", profile_path, "--set", config_path],
remote=None,
sudo=False,
@ -707,7 +704,7 @@ def test_switch_to_configuration(mock_run: Mock, monkeypatch: MonkeyPatch) -> No
specialisation=None,
install_bootloader=False,
)
assert mock_run.call_args == call(
mock_run.assert_called_with(
[profile_path / "bin/switch-to-configuration", "switch"],
extra_env={"NIXOS_INSTALL_BOOTLOADER": "0"},
sudo=False,
@ -741,7 +738,7 @@ def test_switch_to_configuration(mock_run: Mock, monkeypatch: MonkeyPatch) -> No
install_bootloader=True,
specialisation="special",
)
assert mock_run.call_args == call(
mock_run.assert_called_with(
[
config_path / "specialisation/special/bin/switch-to-configuration",
"test",
@ -765,14 +762,14 @@ def test_switch_to_configuration(mock_run: Mock, monkeypatch: MonkeyPatch) -> No
def test_upgrade_channels(mock_is_dir: Mock, mock_glob: Mock) -> None:
with patch(get_qualified_name(n.run_wrapper, n), autospec=True) as mock_run:
n.upgrade_channels(False)
assert mock_run.call_args == call(["nix-channel", "--update", "nixos"], check=False)
mock_run.reset_mock()
mock_run.assert_called_once_with(["nix-channel", "--update", "nixos"], check=False)
with patch(get_qualified_name(n.run_wrapper, n), autospec=True) as mock_run:
n.upgrade_channels(True)
assert mock_run.call_args_list == [
call(["nix-channel", "--update", "nixos"], check=False),
call(["nix-channel", "--update", "nixos-hardware"], check=False),
call(["nix-channel", "--update", "home-manager"], check=False),
]
mock_run.assert_has_calls(
[
call(["nix-channel", "--update", "nixos"], check=False),
call(["nix-channel", "--update", "nixos-hardware"], check=False),
call(["nix-channel", "--update", "home-manager"], check=False),
]
)

View file

@ -1,4 +1,5 @@
from unittest.mock import Mock, call, patch
from typing import Any
from unittest.mock import patch
from pytest import MonkeyPatch
@ -9,9 +10,9 @@ from .helpers import get_qualified_name
@patch(get_qualified_name(p.subprocess.run), autospec=True)
def test_run(mock_run: Mock) -> None:
def test_run(mock_run: Any) -> None:
p.run_wrapper(["test", "--with", "flags"], check=True)
assert mock_run.call_args == call(
mock_run.assert_called_with(
["test", "--with", "flags"],
check=True,
text=True,
@ -27,7 +28,7 @@ def test_run(mock_run: Mock) -> None:
sudo=True,
extra_env={"FOO": "bar"},
)
assert mock_run.call_args == call(
mock_run.assert_called_with(
["sudo", "test", "--with", "flags"],
check=False,
text=True,
@ -44,7 +45,7 @@ def test_run(mock_run: Mock) -> None:
check=True,
remote=m.Remote("user@localhost", ["--ssh", "opt"], "password"),
)
assert mock_run.call_args == call(
mock_run.assert_called_with(
[
"ssh",
"--ssh",
@ -70,7 +71,7 @@ def test_run(mock_run: Mock) -> None:
extra_env={"FOO": "bar"},
remote=m.Remote("user@localhost", ["--ssh", "opt"], "password"),
)
assert mock_run.call_args == call(
mock_run.assert_called_with(
[
"ssh",
"--ssh",

View file

@ -13,7 +13,7 @@ let
escapeExpect = lib.strings.escapeNixString;
expectSetup = ''
set timeout 180
set timeout 300
proc expect_simple { pattern } {
puts "Expecting: $pattern"
expect {
@ -76,7 +76,7 @@ runCommand "test-nixos-rebuild-repl"
expect ${writeText "test-nixos-rebuild-repl-expect" ''
${expectSetup}
spawn nixos-rebuild repl --fast
spawn nixos-rebuild repl --no-reexec
expect "nix-repl> "
@ -116,7 +116,7 @@ runCommand "test-nixos-rebuild-repl"
expect ${writeText "test-nixos-rebuild-repl-absolute-path-expect" ''
${expectSetup}
spawn sh -c "nixos-rebuild repl --fast --flake path:\$HOME#testconf"
spawn sh -c "nixos-rebuild repl --no-reexec --flake path:\$HOME#testconf"
expect_simple "nix-repl>"
@ -146,7 +146,7 @@ runCommand "test-nixos-rebuild-repl"
pushd "$HOME"
expect ${writeText "test-nixos-rebuild-repl-relative-path-expect" ''
${expectSetup}
spawn sh -c "nixos-rebuild repl --fast --flake .#testconf"
spawn sh -c "nixos-rebuild repl --no-reexec --flake .#testconf"
expect_simple "nix-repl>"

View file

@ -25,7 +25,6 @@ stdenv.mkDerivation rec {
meta = with lib; {
description = "New GNU Portable Threads Library";
mainProgram = "npth-config";
longDescription = ''
This is a library to provide the GNU Pth API and thus a non-preemptive
threads implementation.

View file

@ -6,11 +6,11 @@
let
pname = "nrfconnect";
version = "4.4.1";
version = "5.1.0";
src = fetchurl {
url = "https://nsscprodmedia.blob.core.windows.net/prod/software-and-other-downloads/desktop-software/nrf-connect-for-desktop/${lib.versions.major version}-${lib.versions.minor version}-${lib.versions.patch version}/nrfconnect-${version}-x86_64.appimage";
hash = "sha256-x/vVSOEajuQtLATRXk8DVLlXHegCqP+acecaOFNeBb8=";
hash = "sha256-QEoKIdi8tlZ86langbCYJXSO+dGONBEQPdwmREIhZBA=";
name = "${pname}-${version}.AppImage";
};
@ -22,7 +22,9 @@ in
appimageTools.wrapType2 {
inherit pname version src;
extraPkgs = pkgs: [ pkgs.segger-jlink ];
extraPkgs = pkgs: [
pkgs.segger-jlink-headless
];
extraInstallCommands = ''
install -Dm444 ${appimageContents}/nrfconnect.desktop -t $out/share/applications
@ -32,12 +34,12 @@ appimageTools.wrapType2 {
--replace 'Exec=AppRun' 'Exec=nrfconnect'
'';
meta = with lib; {
meta = {
description = "Nordic Semiconductor nRF Connect for Desktop";
homepage = "https://www.nordicsemi.com/Products/Development-tools/nRF-Connect-for-desktop";
license = licenses.unfree;
platforms = platforms.linux;
maintainers = with maintainers; [ stargate01 ];
license = lib.licenses.unfree;
platforms = lib.platforms.linux;
maintainers = with lib.maintainers; [ stargate01 ];
mainProgram = "nrfconnect";
};
}

View file

@ -7,16 +7,16 @@
buildGoModule rec {
pname = "nuclei";
version = "3.3.8";
version = "3.3.9";
src = fetchFromGitHub {
owner = "projectdiscovery";
repo = "nuclei";
tag = "v${version}";
hash = "sha256-RL6/H1X6+rt9n1rpeRpKv+u3SloOnRX6YzMKDDQw+78=";
hash = "sha256-9P8KSuhTI/m0m51PUTZGU+qRbnT3izPbHTzsqZNbMJE=";
};
vendorHash = "sha256-k4seYTUO7BmU2HhTWweDRfNnXp+HshWM1riSc9BbYYg=";
vendorHash = "sha256-CTdB/+aVaXKqtiwHn8pgmhXjZ0mIDrmLvnKmisExi74=";
proxyVendor = true; # hash mismatch between Linux and Darwin

View file

@ -7,19 +7,19 @@
}:
let
pname = "open-webui";
version = "0.5.11";
version = "0.5.12";
src = fetchFromGitHub {
owner = "open-webui";
repo = "open-webui";
tag = "v${version}";
hash = "sha256-U+zY/Jgzo52x/H4xcW2/LjM52r+hdJvZ/xsIeAeJniE=";
hash = "sha256-+Hg4tyfmgfh3k/pUKMjs7IRahPV2/LRUDj1kt2g45Dw=";
};
frontend = buildNpmPackage {
inherit pname version src;
npmDepsHash = "sha256-bAzcNLMB8OqzYRfw9Cr0xuFFl4FIKvBQT/4M2nZP0C8=";
npmDepsHash = "sha256-pM8Ie3kkjVq9OJHKpGLQ1E/omd84B0N8lXAHKxUa8/4=";
# Disabling `pyodide:fetch` as it downloads packages during `buildPhase`
# Until this is solved, running python packages from the browser will not work.
@ -89,8 +89,6 @@ python312.pkgs.buildPythonApplication rec {
fake-useragent
fastapi
faster-whisper
flask
flask-cors
fpdf2
ftfy
gcp-storage-emulator

View file

@ -17,11 +17,11 @@
stdenv.mkDerivation rec {
pname = "photoqt";
version = "4.8";
version = "4.8.1";
src = fetchurl {
url = "https://photoqt.org/pkgs/photoqt-${version}.tar.gz";
hash = "sha256-ccSbG5MTIyVJFqNHstaW53BfsGmN/I4ObCZfY0h22QE=";
hash = "sha256-Iq5Fc0v+EYFe1YG3ZhZKl8leXD+TpGGhaQjr800vz7Y=";
};
nativeBuildInputs = [

View file

@ -75,13 +75,13 @@ let
in
buildGoModule rec {
pname = "podman";
version = "5.3.1";
version = "5.4.0";
src = fetchFromGitHub {
owner = "containers";
repo = "podman";
rev = "v${version}";
hash = "sha256-kABP10QX4r11UDUcd6Sukb+9+LRm/ba3iATz6DTOJYw=";
hash = "sha256-iEO4njjNByLkhXFLgZ8tO8M8RkwT+Lb0zyfedQDHcNc=";
};
patches = [
@ -91,15 +91,6 @@ buildGoModule rec {
# we intentionally don't build and install the helper so we shouldn't display messages to users about it
./rm-podman-mac-helper-msg.patch
# backport of fix for https://github.com/containers/storage/issues/2184
# https://github.com/containers/storage/pull/2185
(fetchpatch2 {
url = "https://github.com/containers/storage/commit/99b0d2d423c8093807d8a1464437152cd04d7d95.diff?full_index=1";
hash = "sha256-aahYXnDf3qCOlb6MfVDqFKCcQG257r5sbh5qnL0T40I=";
stripLen = 1;
extraPrefix = "vendor/github.com/containers/storage/";
})
];
vendorHash = null;

File diff suppressed because it is too large Load diff

View file

@ -2,61 +2,43 @@
lib,
rustPlatform,
fetchCrate,
jq,
moreutils,
stdenv,
darwin,
versionCheckHook,
nix-update-script,
}:
rustPlatform.buildRustPackage rec {
pname = "samply";
version = "0.12.0";
version = "0.13.1";
src = fetchCrate {
inherit pname version;
hash = "sha256-7bf1lDIZGhRpvnn8rHNwzH2GBY8CwtYCjuRAUTQgbsA=";
hash = "sha256-zTwAsE6zXY3esO7x6UTCO2DbzdUSKZ6qc5Rr9qcI+Z8=";
};
# Can't use fetchCargoVendor:
# https://github.com/NixOS/nixpkgs/issues/377986
cargoLock.lockFile = ./Cargo.lock;
# the dependencies linux-perf-data and linux-perf-event-reader contains both README.md and Readme.md,
# which causes a hash mismatch on systems with a case-insensitive filesystem
# this removes the readme files and updates cargo's checksum file accordingly
depsExtraArgs = {
nativeBuildInputs = [
jq
moreutils
];
postBuild = ''
for crate in linux-perf-data linux-perf-event-reader; do
pushd $name/$crate
rm -f README.md Readme.md
jq 'del(.files."README.md") | del(.files."Readme.md")' \
.cargo-checksum.json -c \
| sponge .cargo-checksum.json
popd
done
'';
};
useFetchCargoVendor = true;
cargoHash = "sha256-mQykzO9Ldokd3PZ1fY4pK/GtLmYMVas2iHj1Pqi9WqQ=";
buildInputs = lib.optionals stdenv.hostPlatform.isDarwin [
darwin.apple_sdk.frameworks.CoreServices
];
meta = with lib; {
nativeInstallCheckInputs = [ versionCheckHook ];
versionCheckProgramArg = "--version";
doInstallCheck = true;
passthru.updateScript = nix-update-script { };
meta = {
description = "Command line profiler for macOS and Linux";
mainProgram = "samply";
homepage = "https://github.com/mstange/samply";
changelog = "https://github.com/mstange/samply/releases/tag/samply-v${version}";
license = with licenses; [
license = with lib.licenses; [
asl20
mit
];
maintainers = with maintainers; [ figsoda ];
maintainers = with lib.maintainers; [ figsoda ];
mainProgram = "samply";
};
}

View file

@ -16,7 +16,7 @@
}:
let
version = "1.80.0";
version = "1.80.2";
in
buildGo123Module {
pname = "tailscale";
@ -31,7 +31,7 @@ buildGo123Module {
owner = "tailscale";
repo = "tailscale";
rev = "v${version}";
hash = "sha256-wb52Ffoh56EEVToGGK1Rzfb5DHiR2dLxDJRLcUgYhFg=";
hash = "sha256-5HGY9hVSnzqmAdXNJdQ+ZvsK/PmyZ94201UHlHclQE8=";
};
patches = [
@ -43,7 +43,7 @@ buildGo123Module {
})
];
vendorHash = "sha256-a+d02h0AXqr2FuWRAOUACiYVSpm276onkwKxGSJTL5s=";
vendorHash = "sha256-81UOjoC5GJqhNs4vWcQ2/B9FMaDWtl0rbuFXmxbu5dI=";
nativeBuildInputs = lib.optionals stdenv.hostPlatform.isLinux [ makeWrapper ] ++ [
installShellFiles

View file

@ -0,0 +1,11 @@
--- a/src/Common/CpuCore.c
+++ b/src/Common/CpuCore.c
@@ -96,7 +96,7 @@
derive_key_whirlpool ( word, wordlength+1, salt, PKCS5_SALT_SIZE, 1000, headerKey, cpu_GetMaxPkcs5OutSize ());
else{
perror("Key derivation function not supported");
- return;
+ return 0;
}
value=cpu_Xts(encryptionAlgorithm,encryptedHeader,headerKey,cpu_GetMaxPkcs5OutSize(), masterKey, &length);

View file

@ -6,6 +6,7 @@
config,
cudaSupport ? config.cudaSupport,
pkg-config,
versionCheckHook,
}:
gccStdenv.mkDerivation rec {
@ -15,10 +16,14 @@ gccStdenv.mkDerivation rec {
src = fetchFromGitLab {
owner = "kalilinux";
repo = "packages/truecrack";
rev = "debian/${version}+git20150326-0kali1";
sha256 = "+Rw9SfaQtO1AJO6UVVDMCo8DT0dYEbv7zX8SI+pHCRQ=";
tag = "kali/${version}+git20150326-0kali4";
hash = "sha256-d6ld6KHSqYM4RymHf5qcm2AWK6FHWC0rFaLRfIQ2m5Q=";
};
patches = [
./fix-empty-return.patch
];
configureFlags = (
if cudaSupport then
[
@ -38,24 +43,54 @@ gccStdenv.mkDerivation rec {
cudatoolkit
];
# Workaround build failure on -fno-common toolchains like upstream
# gcc-10. Otherwise build fails as:
# ld: CpuAes.o:/build/source/src/Crypto/CpuAes.h:1233: multiple definition of
# `t_rc'; CpuCore.o:/build/source/src/Crypto/CpuAes.h:1237: first defined here
# TODO: remove on upstream fixes it:
# https://gitlab.com/kalilinux/packages/truecrack/-/issues/1
env.NIX_CFLAGS_COMPILE = "-fcommon";
env.NIX_CFLAGS_COMPILE = toString ([
# Workaround build failure on -fno-common toolchains like upstream
# gcc-10. Otherwise build fails as:
# ld: CpuAes.o:/build/source/src/Crypto/CpuAes.h:1233: multiple definition of
# `t_rc'; CpuCore.o:/build/source/src/Crypto/CpuAes.h:1237: first defined here
# TODO: remove on upstream fixes it:
# https://gitlab.com/kalilinux/packages/truecrack/-/issues/1
"-fcommon"
# Function are declared after they are used in the file, this is error since gcc-14.
# Common/Crypto.c:42:13: error: implicit declaration of function 'cpu_CipherInit'; did you mean 'CipherInit'? []
# https://gitlab.com/kalilinux/packages/truecrack/-/commit/5b0e3a96b747013bded7b33f65bb42be2dbafc86
"-Wno-error=implicit-function-declaration"
]);
installFlags = [ "prefix=$(out)" ];
enableParallelBuilding = true;
meta = with lib; {
installFlags = [ "prefix=$(out)" ];
doInstallCheck = true;
installCheckPhase = ''
runHook preInstallCheck
echo "Cracking test volumes"
$out/bin/${meta.mainProgram} -t test/ripemd160_aes.test.tc -w test/passwords.txt | grep -aF "Found password"
$out/bin/${meta.mainProgram} -t test/ripemd160_aes.test.tc -c test/tes -m 4 | grep -aF "Found password"
$out/bin/${meta.mainProgram} -t test/ripemd160_aes.test.tc -w test/passwords.txt | grep -aF "Found password"
$out/bin/${meta.mainProgram} -t test/whirlpool_aes.test.tc -w test/passwords.txt -k whirlpool | grep -aF "Found password"
$out/bin/${meta.mainProgram} -t test/sha512_aes.test.tc -w test/passwords.txt -k sha512 | grep -aF "Found password"
$out/bin/${meta.mainProgram} -t test/ripemd160_aes.test.tc -w test/passwords.txt | grep -aF "Found password"
$out/bin/${meta.mainProgram} -t test/ripemd160_serpent.test.tc -w test/passwords.txt -e serpent | grep -aF "Found password"
$out/bin/${meta.mainProgram} -t test/ripemd160_twofish.test.tc -w test/passwords.txt -e twofish | grep -aF "Found password"
echo "Finished cracking test volumes"
runHook postInstallCheck
'';
nativeInstallCheckInputs = [
versionCheckHook
];
meta = {
description = "Brute-force password cracker for TrueCrypt volumes, optimized for Nvidia Cuda technology";
mainProgram = "truecrack";
homepage = "https://gitlab.com/kalilinux/packages/truecrack";
broken = cudaSupport;
license = licenses.gpl3Plus;
platforms = platforms.unix;
maintainers = with maintainers; [ ethancedwards8 ];
license = lib.licenses.gpl3Plus;
platforms = lib.platforms.unix;
maintainers = with lib.maintainers; [ ethancedwards8 ];
};
}

File diff suppressed because it is too large Load diff

View file

@ -1,48 +1,57 @@
{
lib,
rustPlatform,
libiconv,
stdenv,
installShellFiles,
darwin,
rustPlatform,
fetchFromGitHub,
installShellFiles,
buildPackages,
writableTmpDirAsHomeHook,
versionCheckHook,
nix-update-script,
}:
rustPlatform.buildRustPackage rec {
pname = "volta";
version = "1.1.1";
version = "2.0.2";
src = fetchFromGitHub {
owner = "volta-cli";
repo = "volta";
rev = "v${version}";
hash = "sha256-+j3WRpunV+3YfZnyuKA/CsiKr+gOaP2NbmnyoGMN+Mg=";
tag = "v${version}";
hash = "sha256-ZI+3/Xbkg/JaZMLhrJEjaSwjs44fOaiRReM2DUTnkkc=";
};
cargoLock = {
lockFile = ./Cargo.lock;
outputHashes = {
"detect-indent-0.1.0" = "sha256-qtPkPaBiyuT8GhpEFdU7IkAgKnCbTES0FB2CvNKWqic=";
"semver-0.9.0" = "sha256-nw1somkZe9Qi36vjfWlTcDqHAIbaJj72KBTfmucVxXs=";
"semver-parser-0.10.0" = "sha256-iTGnKSddsriF6JS6lvJNjp9aDzGtfjrHEiCijeie3uE=";
};
useFetchCargoVendor = true;
cargoHash = "sha256-xlqsubkaX2A6d5MIcGf9E0b11Gzneksgku0jvW+UdbE=";
buildInputs = [ installShellFiles ];
postInstall =
let
emulator = stdenv.hostPlatform.emulator buildPackages;
in
''
installShellCompletion --cmd volta \
--bash <(${emulator} $out/bin/volta completions bash) \
--fish <(${emulator} $out/bin/volta completions fish) \
--zsh <(${emulator} $out/bin/volta completions zsh)
'';
nativeCheckInputs = [
writableTmpDirAsHomeHook
];
nativeInstallCheckInputs = [
versionCheckHook
];
versionCheckProgramArg = [ "--version" ];
# Tries to create /var/empty/.volta as $HOME is not writable
doInstallCheck = !stdenv.hostPlatform.isDarwin;
passthru = {
updateScript = nix-update-script { };
};
buildInputs =
[ installShellFiles ]
++ lib.optionals stdenv.hostPlatform.isDarwin [
darwin.apple_sdk.frameworks.Security
libiconv
];
HOME = "$TMPDIR";
postInstall = lib.optionalString (stdenv.buildPlatform.canExecute stdenv.hostPlatform) ''
installShellCompletion --cmd volta \
--bash <($out/bin/volta completions bash) \
--fish <($out/bin/volta completions fish) \
--zsh <($out/bin/volta completions zsh)
'';
meta = with lib; {
meta = {
description = "Hassle-Free JavaScript Tool Manager";
longDescription = ''
With Volta, you can select a Node engine once and then stop worrying
@ -56,7 +65,8 @@ rustPlatform.buildRustPackage rec {
'';
homepage = "https://volta.sh/";
changelog = "https://github.com/volta-cli/volta/blob/main/RELEASES.md";
license = with licenses; [ bsd2 ];
maintainers = with maintainers; [ fbrs ];
license = with lib.licenses; [ bsd2 ];
maintainers = with lib.maintainers; [ fbrs ];
mainProgram = "volta";
};
}

View file

@ -1,7 +1,7 @@
{
lib,
stdenv,
fetchFromSourcehut,
fetchFromGitLab,
meson,
ninja,
pkg-config,
@ -11,13 +11,14 @@
stdenv.mkDerivation rec {
pname = "wlr-randr";
version = "0.4.1";
version = "0.5.0";
src = fetchFromSourcehut {
owner = "~emersion";
repo = pname;
src = fetchFromGitLab {
domain = "gitlab.freedesktop.org";
owner = "emersion";
repo = "wlr-randr";
rev = "v${version}";
hash = "sha256-2kWTVAi4hq2d9jQ6yBLVzm3x7n/oSvBdZ45WyjhXhc4=";
hash = "sha256-lHOGpY0IVnR8QdSqJbtIA4FkhmQ/zDiFNqqXyj8iw/s=";
};
strictDeps = true;
@ -34,7 +35,7 @@ stdenv.mkDerivation rec {
meta = with lib; {
description = "Xrandr clone for wlroots compositors";
homepage = "https://git.sr.ht/~emersion/wlr-randr";
homepage = "https://gitlab.freedesktop.org/emersion/wlr-randr";
license = licenses.mit;
maintainers = with maintainers; [ ma27 ];
platforms = platforms.linux;

View file

@ -68,11 +68,20 @@ stdenv.mkDerivation (finalAttrs: {
nativeBuildInputs = [ autoreconfHook ];
# For some reason libxml2 package headers are in subdirectory and thus arent
# picked up by stdenvs C compiler wrapper (see ccWrapper_addCVars). This
# doesnt really belong here and either should be part of libxml2 package or
# libxml2 in Nixpkgs can just fix their header paths.
env.NIX_CFLAGS_COMPILE = "-isystem ${libxml2.dev}/include/libxml2";
env.NIX_CFLAGS_COMPILE = toString (
[
# For some reason libxml2 package headers are in subdirectory and thus arent
# picked up by stdenvs C compiler wrapper (see ccWrapper_addCVars). This
# doesnt really belong here and either should be part of libxml2 package or
# libxml2 in Nixpkgs can just fix their header paths.
"-isystem ${libxml2.dev}/include/libxml2"
]
++ lib.optionals stdenv.cc.isGNU [
# fix build on GCC 14
"-Wno-error=implicit-function-declaration"
"-Wno-error=incompatible-pointer-types"
]
);
buildInputs =
[

View file

@ -1,53 +0,0 @@
From 03d6f704d07aa3650a2f59be6f7802a8735460c3 Mon Sep 17 00:00:00 2001
From: Lang Hames <lhames@gmail.com>
Date: Wed, 29 Jan 2025 03:58:29 +0000
Subject: [PATCH] [ORC][LLI] Remove redundant eh-frame registration plugin
construction from lli.
As of d0052ebbe2e the setUpGenericLLVMIRPlatform function will automatically
add an instance of the EHFrameRegistrationPlugin (for LLJIT instances whose
object linking layers are ObjectLinkingLayers, not RTDyldObjectLinkingLayers).
This commit removes the redundant plugin creation in the object linking
layer constructor function in lli.cpp to prevent duplicate registration of
eh-frames, which is likely the cause of recent bot failures, e.g.
https://lab.llvm.org/buildbot/#/builders/108/builds/8685.
(cherry picked from commit 9052b37ab1aa67a039b34356f37236fecc42bac2)
---
llvm/tools/lli/lli.cpp | 14 ++++----------
1 file changed, 4 insertions(+), 10 deletions(-)
diff --git a/llvm/tools/lli/lli.cpp b/tools/lli/lli.cpp
index 448660a539a0b0..19246f03941673 100644
--- a/llvm/tools/lli/lli.cpp
+++ b/tools/lli/lli.cpp
@@ -27,9 +27,7 @@
#include "llvm/ExecutionEngine/Orc/AbsoluteSymbols.h"
#include "llvm/ExecutionEngine/Orc/DebugUtils.h"
#include "llvm/ExecutionEngine/Orc/Debugging/DebuggerSupport.h"
-#include "llvm/ExecutionEngine/Orc/EHFrameRegistrationPlugin.h"
#include "llvm/ExecutionEngine/Orc/EPCDynamicLibrarySearchGenerator.h"
-#include "llvm/ExecutionEngine/Orc/EPCEHFrameRegistrar.h"
#include "llvm/ExecutionEngine/Orc/EPCGenericRTDyldMemoryManager.h"
#include "llvm/ExecutionEngine/Orc/ExecutionUtils.h"
#include "llvm/ExecutionEngine/Orc/IRPartitionLayer.h"
@@ -1033,14 +1031,10 @@ int runOrcJIT(const char *ProgName) {
Builder.getJITTargetMachineBuilder()
->setRelocationModel(Reloc::PIC_)
.setCodeModel(CodeModel::Small);
- Builder.setObjectLinkingLayerCreator([&P](orc::ExecutionSession &ES,
- const Triple &TT) {
- auto L = std::make_unique<orc::ObjectLinkingLayer>(ES);
- if (P != LLJITPlatform::ExecutorNative)
- L->addPlugin(std::make_unique<orc::EHFrameRegistrationPlugin>(
- ES, ExitOnErr(orc::EPCEHFrameRegistrar::Create(ES))));
- return L;
- });
+ Builder.setObjectLinkingLayerCreator(
+ [&](orc::ExecutionSession &ES, const Triple &TT) {
+ return std::make_unique<orc::ObjectLinkingLayer>(ES);
+ });
}
auto J = ExitOnErr(Builder.create());

View file

@ -500,10 +500,16 @@ let
})
]
++
lib.optional (lib.versions.major metadata.release_version == "20")
# Fix OrcJIT
# PR: https://github.com/llvm/llvm-project/pull/125431
(metadata.getVersionFile "llvm/orcjit.patch");
lib.optional (lib.versionAtLeast metadata.release_version "20")
# Fix OrcJIT tests with page sizes > 16k
# PR: https://github.com/llvm/llvm-project/pull/127115
(
fetchpatch {
url = "https://github.com/llvm/llvm-project/commit/415607e10b56d0e6c4661ff1ec5b9b46bf433cba.patch";
stripLen = 1;
hash = "sha256-vBbuduJB+NnNE9qtR93k64XKrwvc7w3vowjL/aT+iEA=";
}
);
pollyPatches =
[ (metadata.getVersionFile "llvm/gnu-install-dirs-polly.patch") ]
++ lib.optional (lib.versionAtLeast metadata.release_version "15")

View file

@ -14,7 +14,6 @@
ninja,
isFullBuild ? true,
linuxHeaders,
fetchpatch,
}:
let
pname = "libc";
@ -28,26 +27,12 @@ let
'');
in
stdenv.mkDerivation (finalAttrs: {
inherit pname version;
inherit pname version patches;
src = src';
sourceRoot = "${finalAttrs.src.name}/runtimes";
patches =
lib.optional (lib.versions.major version == "20")
# Removes invalid token from the LLVM version being placed in the namespace.
# Can be removed when LLVM 20 bumps to rc2.
# PR: https://github.com/llvm/llvm-project/pull/126284
(
fetchpatch {
url = "https://github.com/llvm/llvm-project/commit/3a3a3230d171e11842a9940b6da0f72022b1c5b3.patch";
stripLen = 1;
hash = "sha256-QiU1cWp+027ZZNVdvfGVwbIoRd9jqtSbftGsmaW1gig=";
}
)
++ patches;
nativeBuildInputs =
[
cmake
@ -75,7 +60,7 @@ stdenv.mkDerivation (finalAttrs: {
'';
postInstall = lib.optionalString (!isFullBuild) ''
substituteAll ${./libc-shim.so} $out/lib/libc.so
substituteAll ${./libc-shim.tpl} $out/lib/libc.so
'';
libc = if (!isFullBuild) then stdenv.cc.libc else null;

View file

@ -30,7 +30,7 @@ let
"17.0.6".officialRelease.sha256 = "sha256-8MEDLLhocshmxoEBRSKlJ/GzJ8nfuzQ8qn0X/vLA+ag=";
"18.1.8".officialRelease.sha256 = "sha256-iiZKMRo/WxJaBXct9GdAcAT3cz9d9pnAcO1mmR6oPNE=";
"19.1.7".officialRelease.sha256 = "sha256-cZAB5vZjeTsXt9QHbP5xluWNQnAHByHtHnAhVDV0E6I=";
"20.1.0-rc1".officialRelease.sha256 = "sha256-yOczbperlR20+iLoao9g0CR+Ml2mjTCx1cqP/9WOhME=";
"20.1.0-rc2".officialRelease.sha256 = "sha256-lBx+MWfYBM6XSJozacALMGlo0DUUWqnsBQyO8lDljSo=";
"21.0.0-git".gitRelease = {
rev = "c9f1d2cbf18990311ea1287cc154e3784a10a3b0";
rev-version = "21.0.0-unstable-2025-02-10";

View file

@ -0,0 +1,31 @@
{
lib,
fetchurl,
ocaml,
buildDunePackage,
cppo,
ounit2,
}:
buildDunePackage rec {
pname = "arg-complete";
version = "0.2.1";
src = fetchurl {
url = "https://github.com/sim642/ocaml-arg-complete/releases/download/${version}/arg-complete-${version}.tbz";
hash = "sha256-SZvLaeeqY3j2LUvqxGs0Vw57JnnpdvAk1jnE3pk27QU=";
};
nativeBuildInputs = [ cppo ];
doCheck = lib.versionAtLeast ocaml.version "4.08";
checkInputs = [ ounit2 ];
meta = {
description = "Bash completion support for OCaml Stdlib.Arg";
homepage = "https://sim642.github.io/ocaml-arg-complete/";
changelog = "https://raw.githubusercontent.com/sim642/ocaml-arg-complete/refs/tags/${version}/CHANGELOG.md";
license = lib.licenses.mit;
maintainers = [ lib.maintainers.vbgl ];
};
}

View file

@ -11,6 +11,7 @@
ocaml,
menhir,
apron,
arg-complete,
camlidl,
yojson,
zarith,
@ -18,15 +19,15 @@
buildDunePackage rec {
pname = "mopsa";
version = "1.0";
version = "1.1";
minimalOCamlVersion = "4.12";
minimalOCamlVersion = "4.13";
src = fetchFromGitLab {
owner = "mopsa";
repo = "mopsa-analyzer";
rev = "v${version}";
hash = "sha256-nGnWwV7g3SYgShbXGUMooyOdFwXFrQHnQvlc8x9TAS4=";
tag = "v${version}";
hash = "sha256-lO5dtGAl1dq8oJco/hPXrAbN05rKc62Zrci/8CLrQ0c=";
};
nativeBuildInputs = [
@ -36,6 +37,7 @@ buildDunePackage rec {
];
buildInputs = [
arg-complete
camlidl
flint
libclang

View file

@ -6,7 +6,7 @@
buildDunePackage rec {
pname = "qcheck-core";
version = "0.22";
version = "0.23";
minimalOCamlVersion = "4.08";
@ -14,7 +14,7 @@ buildDunePackage rec {
owner = "c-cube";
repo = "qcheck";
rev = "v${version}";
hash = "sha256-JXnrfce/V7Bdu8uH98ZJCLjIHZoONiQ02ltFx6Fbvhg=";
hash = "sha256-tH7NFpAFKOb0jXxLK+zNOIZS9TSORKXe8FuwY13iEUY=";
};
meta = {

View file

@ -9,10 +9,10 @@
buildDunePackage rec {
pname = "seqes";
version = "0.2";
version = "0.4";
src = fetchurl {
url = "https://gitlab.com/nomadic-labs/seqes/-/archive/${version}/seqes-${version}.tar.gz";
sha256 = "sha256-IxLA0jaIPdX9Zn/GL8UHDJYjA1UBW6leGbZmp64YMjI=";
url = "https://gitlab.com/raphael-proust/seqes/-/archive/${version}/seqes-${version}.tar.gz";
hash = "sha256-E4BalN68CJP7u6NSC0XBooWvUeSNqV+3KEOtoJ4g/dM=";
};
minimalOCamlVersion = "4.14";

View file

@ -13,9 +13,6 @@
setuptools-scm,
playwright-driver,
nixosTests,
writeText,
runCommand,
pythonPackages,
nodejs,
}:
@ -84,16 +81,6 @@ buildPythonPackage rec {
pyee
];
setupHook = writeText "setupHook.sh" ''
addBrowsersPath () {
if [[ ! -v PLAYWRIGHT_BROWSERS_PATH ]] ; then
export PLAYWRIGHT_BROWSERS_PATH="${playwright-driver.browsers}"
fi
}
addEnvHooks "$targetOffset" addBrowsersPath
'';
postInstall = ''
ln -s ${driver} $out/${python.sitePackages}/playwright/driver
'';
@ -109,9 +96,6 @@ buildPythonPackage rec {
{
driver = playwright-driver;
browsers = playwright-driver.browsers;
env = runCommand "playwright-env-test" {
buildInputs = [ pythonPackages.playwright ];
} "python ${./test.py}";
}
// lib.optionalAttrs stdenv.hostPlatform.isLinux {
inherit (nixosTests) playwright-python;

View file

@ -1,10 +0,0 @@
import os
import sys
from playwright.sync_api import sync_playwright
with sync_playwright() as p:
browser = p.chromium.launch()
context = browser.new_context()
with open(os.environ["out"], "w") as f:
f.write("OK")

View file

@ -10,14 +10,14 @@
buildPythonPackage rec {
pname = "pywikibot";
version = "9.6.1";
version = "9.6.2";
format = "setuptools";
disabled = pythonOlder "3.7";
src = fetchPypi {
inherit pname version;
hash = "sha256-736LEUwW1LofS1105TxVWHMGFaEpQGwa+WGIk2OQxmA=";
hash = "sha256-iPmQxOJmc9Ms8UhK43HrYgyyvu0g4/hO8bmO39AXOTo=";
};
propagatedBuildInputs = [

View file

@ -16,8 +16,8 @@ let
hash = "sha256-hHIWjD4f0L/yh+aUsFP8y78gV5o/+VJrYzO+q432Wo0=";
};
"10" = {
version = "10.2.1";
hash = "sha256-+Yjw2TuH4dotjN9qx/RaAcb4Q642BrTKDy/9cTuF+XU=";
version = "10.4.0";
hash = "sha256-5X6KVE96hCR8+nfdbZI+rlGZo3NHTlPqsfVAx5Yok4Y=";
};
};

View file

@ -12,7 +12,6 @@
makeFontsConf,
makeWrapper,
runCommand,
writeText,
cacert,
}:
let
@ -189,27 +188,9 @@ let
runHook postInstall
'';
setupHook = writeText "setupHook.sh" ''
addBrowsersPath () {
if [[ ! -v PLAYWRIGHT_BROWSERS_PATH ]] ; then
export PLAYWRIGHT_BROWSERS_PATH="${playwright-core.passthru.browsers}"
fi
}
addEnvHooks "$targetOffset" addBrowsersPath
'';
meta = playwright.meta // {
mainProgram = "playwright";
};
passthru.tests.env = runCommand "playwright-core-env-test" {
buildInputs = [
nodejs
playwright-core
playwright-test
];
} "node ${./test.js}";
});
browsers = lib.makeOverridable (

View file

@ -1,8 +0,0 @@
const playwright = require('playwright');
const fs = require('fs');
playwright.chromium.launch()
.then((browser) => {
console.log('OK');
fs.writeFileSync(process.env.out, '');
process.exit(0);
});

View file

@ -17,13 +17,13 @@
let
data = stdenv.mkDerivation (finalAttrs: {
pname = "path-of-building-data";
version = "2.50.0";
version = "2.51.0";
src = fetchFromGitHub {
owner = "PathOfBuildingCommunity";
repo = "PathOfBuilding";
rev = "v${finalAttrs.version}";
hash = "sha256-mclbLRYFNWgn/f4CyaINJlLq06uWh0+ks82Lger4w9w=";
hash = "sha256-Rau3UaWPyaI7QBXCNVtIQSenyNsx5hh2dsd3q8jFjc4=";
};
nativeBuildInputs = [ unzip ];

View file

@ -29,10 +29,10 @@
}:
let
defaultVersion = "2024.10";
defaultVersion = "2025.01";
defaultSrc = fetchurl {
url = "https://ftp.denx.de/pub/u-boot/u-boot-${defaultVersion}.tar.bz2";
hash = "sha256-so2vSsF+QxVjYweL9RApdYQTf231D87ZsS3zT2GpL7A=";
hash = "sha256-ze99UHyT8bvZ8BXqm8IfoHQmhIFAVQGUWrxvhU1baG8=";
};
# Dependencies for the tools need to be included as either native or cross,

View file

@ -186,6 +186,6 @@ python3.pkgs.buildPythonApplication rec {
changelog = "https://github.com/element-hq/synapse/releases/tag/v${version}";
description = "Matrix reference homeserver";
license = licenses.agpl3Plus;
maintainers = teams.matrix.members;
maintainers = with lib.maintainers; teams.matrix.members ++ [ sumnerevans ];
};
}

View file

@ -59,14 +59,14 @@ let
in
{
nextcloud29 = generic {
version = "29.0.11";
hash = "sha256-UGf8F91zICzC39m5ccp7uUy5UEghRgJ9rGILEjweztE=";
version = "29.0.12";
hash = "sha256-wCA1T/Ph0ghzcPcOBY/hcXE2NroPBzpRlK29/zwcr8Y=";
packages = nextcloud29Packages;
};
nextcloud30 = generic {
version = "30.0.5";
hash = "sha256-JIxubmEs7usXDE0luFebCvDmYTq9+gfy/mmTQmt4G+o=";
version = "30.0.6";
hash = "sha256-rA4JG+aSCWXcDILxSbYy1rWt563uhKezyM/YR0UKjdw=";
packages = nextcloud30Packages;
};

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View file

@ -128,6 +128,11 @@ stdenv.mkDerivation (finalAttrs: {
libdbus = dbus.lib;
inherit hwdata;
})
# Fix crash when starting hidden
# Upstream PR: https://github.com/flightlessmango/MangoHud/pull/1570
# FIXME: remove when merged
./fix-crash.patch
];
postPatch = ''

View file

@ -0,0 +1,40 @@
From f0d7e4f4b2d362d90bb81d0b10ef5c505b9661ea Mon Sep 17 00:00:00 2001
From: K900 <me@0upti.me>
Date: Fri, 14 Feb 2025 11:41:09 +0300
Subject: [PATCH] mangoapp: don't crash if gpus is not initialized yet
This seems to happen on startup on Steam Deck style gamescope-session setups.
Just check for gpus = null before trying to access it.
---
src/app/main.cpp | 10 ++++++----
1 file changed, 6 insertions(+), 4 deletions(-)
diff --git a/src/app/main.cpp b/src/app/main.cpp
index 0c7c13e07e..4d1d3b1277 100644
--- a/src/app/main.cpp
+++ b/src/app/main.cpp
@@ -369,8 +369,9 @@ int main(int, char**)
XSync(x11_display, 0);
mangoapp_paused = false;
// resume all GPU threads
- for (auto gpu : gpus->available_gpus)
- gpu->resume();
+ if (gpus)
+ for (auto gpu : gpus->available_gpus)
+ gpu->resume();
}
{
std::unique_lock<std::mutex> lk(mangoapp_m);
@@ -409,8 +410,9 @@ int main(int, char**)
XSync(x11_display, 0);
mangoapp_paused = true;
// pause all GPUs threads
- for (auto gpu : gpus->available_gpus)
- gpu->pause();
+ if (gpus)
+ for (auto gpu : gpus->available_gpus)
+ gpu->pause();
// If mangoapp is hidden, using mangoapp_cv.wait() causes a hang.
// Because of this hang, we can't detect if the user presses R_SHIFT + F12,

View file

@ -25,6 +25,7 @@ in
tables-test.ikarus.sps
lazy.sps
pipeline-operators.sps
os-environment-variables.sps
'
'';
})
@ -45,7 +46,9 @@ in
src = akku.src;
})
# not a tar archive
(pkg: old: removeAttrs old [ "unpackPhase" ])
(pkg: old: {
unpackPhase = null;
})
];
machine-code = pkg: old: {

View file

@ -38,6 +38,8 @@ let
apron = callPackage ../development/ocaml-modules/apron { };
arg-complete = callPackage ../development/ocaml-modules/arg-complete { };
arp = callPackage ../development/ocaml-modules/arp { };
asai = callPackage ../development/ocaml-modules/asai { };