diff --git a/pkgs/development/python-modules/llama-index-embeddings-openai/default.nix b/pkgs/development/python-modules/llama-index-embeddings-openai/default.nix new file mode 100644 index 000000000000..41e3843220f5 --- /dev/null +++ b/pkgs/development/python-modules/llama-index-embeddings-openai/default.nix @@ -0,0 +1,28 @@ +{ lib +, buildPythonPackage +, fetchFromGitHub +, llama-index-core +, poetry-core +}: + +buildPythonPackage rec { + pname = "llama-index-embeddings-openai"; + + inherit (llama-index-core) version src meta; + + pyproject = true; + + sourceRoot = "${src.name}/llama-index-integrations/embeddings/${pname}"; + + nativeBuildInputs = [ + poetry-core + ]; + + propagatedBuildInputs = [ + llama-index-core + ]; + + pythonImportsCheck = [ + "llama_index.embeddings.openai" + ]; +} diff --git a/pkgs/top-level/python-packages.nix b/pkgs/top-level/python-packages.nix index 5ef90459aaeb..5b57b18d8850 100644 --- a/pkgs/top-level/python-packages.nix +++ b/pkgs/top-level/python-packages.nix @@ -6674,6 +6674,8 @@ self: super: with self; { llama-index-core = callPackage ../development/python-modules/llama-index-core { }; + llama-index-embeddings-openai = callPackage ../development/python-modules/llama-index-embeddings-openai { }; + llama-index-llms-openai = callPackage ../development/python-modules/llama-index-llms-openai { }; llama-index-multi-modal-llms-openai = callPackage ../development/python-modules/llama-index-multi-modal-llms-openai { };