flake : fix ggml-metal.metal path and run nixfmt (#1974)

This commit is contained in:
Rowan Hart 2023-06-24 04:07:08 -07:00 committed by GitHub
parent c943d823c1
commit fdd1860911
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23

View file

@ -9,27 +9,33 @@
inherit (pkgs.stdenv) isAarch64 isDarwin; inherit (pkgs.stdenv) isAarch64 isDarwin;
inherit (pkgs.lib) optionals; inherit (pkgs.lib) optionals;
isM1 = isAarch64 && isDarwin; isM1 = isAarch64 && isDarwin;
osSpecific = osSpecific = if isM1 then
if isM1 then with pkgs.darwin.apple_sdk_11_0.frameworks; [ Accelerate MetalKit MetalPerformanceShaders MetalPerformanceShadersGraph ] with pkgs.darwin.apple_sdk_11_0.frameworks; [
else if isDarwin then with pkgs.darwin.apple_sdk.frameworks; [ Accelerate CoreGraphics CoreVideo ] Accelerate
else [ ]; MetalKit
pkgs = import nixpkgs { MetalPerformanceShaders
inherit system; MetalPerformanceShadersGraph
}; ]
llama-python = pkgs.python310.withPackages (ps: with ps; [ else if isDarwin then
numpy with pkgs.darwin.apple_sdk.frameworks; [
sentencepiece Accelerate
]); CoreGraphics
in CoreVideo
{ ]
else
[ ];
pkgs = import nixpkgs { inherit system; };
llama-python =
pkgs.python310.withPackages (ps: with ps; [ numpy sentencepiece ]);
in {
packages.default = pkgs.stdenv.mkDerivation { packages.default = pkgs.stdenv.mkDerivation {
name = "llama.cpp"; name = "llama.cpp";
src = ./.; src = ./.;
postPatch = postPatch = if isM1 then ''
if isM1 then '' substituteInPlace ./ggml-metal.m \
substituteInPlace ./ggml-metal.m \ --replace '[bundle pathForResource:@"ggml-metal" ofType:@"metal"];' "@\"$out/bin/ggml-metal.metal\";"
--replace '[bundle pathForResource:@"ggml-metal" ofType:@"metal"];' "@\"$out/ggml-metal.metal\";" '' else
'' else ""; "";
nativeBuildInputs = with pkgs; [ cmake ]; nativeBuildInputs = with pkgs; [ cmake ];
buildInputs = osSpecific; buildInputs = osSpecific;
cmakeFlags = [ "-DLLAMA_BUILD_SERVER=ON" ] ++ (optionals isM1 [ cmakeFlags = [ "-DLLAMA_BUILD_SERVER=ON" ] ++ (optionals isM1 [
@ -62,11 +68,7 @@
}; };
apps.default = self.apps.${system}.llama; apps.default = self.apps.${system}.llama;
devShells.default = pkgs.mkShell { devShells.default = pkgs.mkShell {
packages = with pkgs; [ packages = with pkgs; [ cmake llama-python ] ++ osSpecific;
cmake
llama-python
] ++ osSpecific;
}; };
} });
);
} }