nixos/litellm: init

This commit is contained in:
Pol Dellaiera 2025-03-16 10:37:16 +01:00
parent 40ca6d3c04
commit c8a823d40b
6 changed files with 221 additions and 3 deletions

View file

@ -178,6 +178,8 @@
- [echoip](https://github.com/mpolden/echoip), a simple service for looking up your IP address. Available as [services.echoip](#opt-services.echoip.enable). - [echoip](https://github.com/mpolden/echoip), a simple service for looking up your IP address. Available as [services.echoip](#opt-services.echoip.enable).
- [LiteLLM](https://github.com/BerriAI/litellm), a LLM Gateway to provide model access, fallbacks and spend tracking across 100+ LLMs. All in the OpenAI format. Available as [services.litellm](#opt-services.litellm.enable).
- [Buffyboard](https://gitlab.postmarketos.org/postmarketOS/buffybox/-/tree/master/buffyboard), a framebuffer on-screen keyboard. Available as [services.buffyboard](option.html#opt-services.buffyboard). - [Buffyboard](https://gitlab.postmarketos.org/postmarketOS/buffybox/-/tree/master/buffyboard), a framebuffer on-screen keyboard. Available as [services.buffyboard](option.html#opt-services.buffyboard).
- [KanBoard](https://github.com/kanboard/kanboard), a project management tool that focuses on the Kanban methodology. Available as [services.kanboard](#opt-services.kanboard.enable). - [KanBoard](https://github.com/kanboard/kanboard), a project management tool that focuses on the Kanban methodology. Available as [services.kanboard](#opt-services.kanboard.enable).

View file

@ -825,6 +825,7 @@
./services/misc/languagetool.nix ./services/misc/languagetool.nix
./services/misc/leaps.nix ./services/misc/leaps.nix
./services/misc/lifecycled.nix ./services/misc/lifecycled.nix
./services/misc/litellm.nix
./services/misc/llama-cpp.nix ./services/misc/llama-cpp.nix
./services/misc/logkeys.nix ./services/misc/logkeys.nix
./services/misc/mame.nix ./services/misc/mame.nix

View file

@ -0,0 +1,182 @@
{
config,
lib,
pkgs,
...
}:
let
inherit (lib) types;
cfg = config.services.litellm;
settingsFormat = pkgs.formats.yaml { };
in
{
options = {
services.litellm = {
enable = lib.mkEnableOption "LiteLLM server";
package = lib.mkPackageOption pkgs "litellm" { };
stateDir = lib.mkOption {
type = types.path;
default = "/var/lib/litellm";
example = "/home/foo";
description = "State directory of LiteLLM.";
};
host = lib.mkOption {
type = types.str;
default = "127.0.0.1";
example = "0.0.0.0";
description = ''
The host address which the LiteLLM server HTTP interface listens to.
'';
};
port = lib.mkOption {
type = types.port;
default = 8080;
example = 11111;
description = ''
Which port the LiteLLM server listens to.
'';
};
settings = lib.mkOption {
type = types.submodule {
freeformType = settingsFormat.type;
options = {
model_list = lib.mkOption {
type = settingsFormat.type;
description = ''
List of supported models on the server, with model-specific configs.
'';
default = [ ];
};
router_settings = lib.mkOption {
type = settingsFormat.type;
description = ''
LiteLLM Router settings
'';
default = { };
};
litellm_settings = lib.mkOption {
type = settingsFormat.type;
description = ''
LiteLLM Module settings
'';
default = { };
};
general_settings = lib.mkOption {
type = settingsFormat.type;
description = ''
LiteLLM Server settings
'';
default = { };
};
environment_variables = lib.mkOption {
type = settingsFormat.type;
description = ''
Environment variables to pass to the Lite
'';
default = { };
};
};
};
default = { };
description = ''
Configuration for LiteLLM.
See <https://docs.litellm.ai/docs/proxy/configs> for more.
'';
};
environment = lib.mkOption {
type = types.attrsOf types.str;
default = {
SCARF_NO_ANALYTICS = "True";
DO_NOT_TRACK = "True";
ANONYMIZED_TELEMETRY = "False";
};
example = ''
{
NO_DOCS="True";
}
'';
description = ''
Extra environment variables for LiteLLM.
'';
};
environmentFile = lib.mkOption {
description = ''
Environment file to be passed to the systemd service.
Useful for passing secrets to the service to prevent them from being
world-readable in the Nix store.
'';
type = lib.types.nullOr lib.types.path;
default = null;
example = "/var/lib/secrets/liteLLMSecrets";
};
openFirewall = lib.mkOption {
type = types.bool;
default = false;
description = ''
Whether to open the firewall for LiteLLM.
This adds `services.litellm.port` to `networking.firewall.allowedTCPPorts`.
'';
};
};
};
config = lib.mkIf cfg.enable {
systemd.services.litellm = {
description = "LLM Gateway to provide model access, fallbacks and spend tracking across 100+ LLMs.";
wantedBy = [ "multi-user.target" ];
after = [ "network.target" ];
environment = cfg.environment;
serviceConfig =
let
configFile = settingsFormat.generate "config.yaml" cfg.settings;
in
{
ExecStart = "${lib.getExe cfg.package} --host \"${cfg.host}\" --port ${toString cfg.port} --config ${configFile}";
EnvironmentFile = lib.optional (cfg.environmentFile != null) cfg.environmentFile;
WorkingDirectory = cfg.stateDir;
StateDirectory = "litellm";
RuntimeDirectory = "litellm";
RuntimeDirectoryMode = "0755";
PrivateTmp = true;
DynamicUser = true;
DevicePolicy = "closed";
LockPersonality = true;
PrivateUsers = true;
ProtectHome = true;
ProtectHostname = true;
ProtectKernelLogs = true;
ProtectKernelModules = true;
ProtectKernelTunables = true;
ProtectControlGroups = true;
RestrictNamespaces = true;
RestrictRealtime = true;
SystemCallArchitectures = "native";
UMask = "0077";
RestrictAddressFamilies = [
"AF_INET"
"AF_INET6"
"AF_UNIX"
];
ProtectClock = true;
ProtectProc = "invisible";
};
};
networking.firewall = lib.mkIf cfg.openFirewall { allowedTCPPorts = [ cfg.port ]; };
};
meta.maintainers = with lib.maintainers; [ drupol ];
}

View file

@ -634,6 +634,7 @@ in {
limesurvey = handleTest ./limesurvey.nix {}; limesurvey = handleTest ./limesurvey.nix {};
limine = import ./limine { inherit runTest; }; limine = import ./limine { inherit runTest; };
listmonk = handleTestOn [ "x86_64-linux" "aarch64-linux" ] ./listmonk.nix {}; listmonk = handleTestOn [ "x86_64-linux" "aarch64-linux" ] ./listmonk.nix {};
litellm = runTest ./litellm.nix;
litestream = handleTest ./litestream.nix {}; litestream = handleTest ./litestream.nix {};
lldap = handleTest ./lldap.nix {}; lldap = handleTest ./lldap.nix {};
localsend = handleTest ./localsend.nix {}; localsend = handleTest ./localsend.nix {};

27
nixos/tests/litellm.nix Normal file
View file

@ -0,0 +1,27 @@
{ lib, ... }:
let
mainPort = "8080";
in
{
name = "litellm";
nodes = {
machine =
{ ... }:
{
services.litellm = {
enable = true;
};
};
};
testScript = ''
machine.start()
machine.wait_for_unit("litellm.service")
machine.wait_for_open_port(${mainPort})
'';
meta = with lib.maintainers; {
maintainers = [ drupol ];
};
}

View file

@ -35,6 +35,7 @@
tokenizers, tokenizers,
uvloop, uvloop,
uvicorn, uvicorn,
nixosTests,
}: }:
buildPythonPackage rec { buildPythonPackage rec {
@ -99,12 +100,16 @@ buildPythonPackage rec {
# access network # access network
doCheck = false; doCheck = false;
meta = with lib; { passthru.tests = {
inherit (nixosTests) litellm;
};
meta = {
description = "Use any LLM as a drop in replacement for gpt-3.5-turbo. Use Azure, OpenAI, Cohere, Anthropic, Ollama, VLLM, Sagemaker, HuggingFace, Replicate (100+ LLMs)"; description = "Use any LLM as a drop in replacement for gpt-3.5-turbo. Use Azure, OpenAI, Cohere, Anthropic, Ollama, VLLM, Sagemaker, HuggingFace, Replicate (100+ LLMs)";
mainProgram = "litellm"; mainProgram = "litellm";
homepage = "https://github.com/BerriAI/litellm"; homepage = "https://github.com/BerriAI/litellm";
changelog = "https://github.com/BerriAI/litellm/releases/tag/${src.tag}"; changelog = "https://github.com/BerriAI/litellm/releases/tag/${src.tag}";
license = licenses.mit; license = lib.licenses.mit;
maintainers = with maintainers; [ happysalada ]; maintainers = with lib.maintainers; [ happysalada ];
}; };
} }