From 1e3900ebacb3a0b385271389686403c97ad76d88 Mon Sep 17 00:00:00 2001 From: Someone Serge Date: Fri, 29 Dec 2023 16:15:37 +0000 Subject: [PATCH] flake.nix: expose full scope in legacyPackages --- .devops/nix/jetson-support.nix | 19 +++++++++++++------ flake.nix | 20 +++++++++++++++++--- 2 files changed, 30 insertions(+), 9 deletions(-) diff --git a/.devops/nix/jetson-support.nix b/.devops/nix/jetson-support.nix index 08426d2ab..78e2e40e0 100644 --- a/.devops/nix/jetson-support.nix +++ b/.devops/nix/jetson-support.nix @@ -8,12 +8,13 @@ pkgsCuda, ... }: - lib.optionalAttrs (system == "aarch64-linux") { - packages = + { + legacyPackages = let - caps.jetson-xavier = "7.2"; - caps.jetson-orin = "8.7"; - caps.jetson-nano = "5.3"; + caps.llamaPackagesXavier = "7.2"; + caps.llamaPackagesOrin = "8.7"; + caps.llamaPackagesTX2 = "6.2"; + caps.llamaPackagesNano = "5.3"; pkgsFor = cap: @@ -27,6 +28,12 @@ }; }; in - builtins.mapAttrs (name: cap: ((pkgsFor cap).callPackage ./scope.nix { }).llama-cpp) caps; + builtins.mapAttrs (name: cap: (pkgsFor cap).callPackage ./scope.nix { }) caps; + + packages = lib.optionalAttrs (system == "aarch64-linux") { + jetson-xavier = config.legacyPackages.llamaPackagesXavier.llama-cpp; + jetson-orin = config.legacyPackages.llamaPackagesOrin.llama-cpp; + jetson-nano = config.legacyPackages.llamaPackagesNano.llama-cpp; + }; }; } diff --git a/flake.nix b/flake.nix index 2209070aa..6785b52f4 100644 --- a/flake.nix +++ b/flake.nix @@ -80,16 +80,30 @@ ... }: { + # Unlike `.#packages`, legacyPackages may contain values of + # arbitrary types (including nested attrsets) and may even throw + # exceptions. This attribute isn't recursed into by `nix flake + # show` either. + # + # You can add arbitrary scripts to `.devops/nix/scope.nix` and + # access them as `nix build .#llamaPackages.${scriptName}` using + # the same path you would with an overlay. + legacyPackages = { + llamaPackages = pkgs.callPackage .devops/nix/scope.nix { inherit llamaVersion; }; + llamaPackagesCuda = pkgsCuda.callPackage .devops/nix/scope.nix { inherit llamaVersion; }; + llamaPackagesRocm = pkgsRocm.callPackage .devops/nix/scope.nix { inherit llamaVersion; }; + }; + # We don't use the overlay here so as to avoid making too many instances of nixpkgs, # cf. https://zimbatm.com/notes/1000-instances-of-nixpkgs packages = { - default = (pkgs.callPackage .devops/nix/scope.nix { inherit llamaVersion; }).llama-cpp; + default = config.legacyPackages.llamaPackages.llama-cpp; } // lib.optionalAttrs pkgs.stdenv.isLinux { opencl = config.packages.default.override { useOpenCL = true; }; - cuda = (pkgsCuda.callPackage .devops/nix/scope.nix { inherit llamaVersion; }).llama-cpp; - rocm = (pkgsRocm.callPackage .devops/nix/scope.nix { inherit llamaVersion; }).llama-cpp; + cuda = config.legacyPackages.llamaPackagesCuda.llama-cpp; + rocm = config.legacyPackages.llamaPackagesRocm.llama-cpp; mpi-cpu = config.packages.default.override { useMpi = true; }; mpi-cuda = config.packages.default.override { useMpi = true; };