mirror of
https://github.com/NixOS/nixpkgs.git
synced 2025-06-12 20:55:31 +03:00
mkBinaryCache: process items in parallel
This commit is contained in:
parent
38331789a0
commit
f702ebf594
1 changed files with 56 additions and 30 deletions
|
@ -1,43 +1,69 @@
|
|||
|
||||
from functools import partial
|
||||
import json
|
||||
from multiprocessing import Pool
|
||||
import os
|
||||
from pathlib import Path
|
||||
import subprocess
|
||||
|
||||
with open(os.environ["NIX_ATTRS_JSON_FILE"], "r") as f:
|
||||
closures = json.load(f)["closure"]
|
||||
|
||||
os.chdir(os.environ["out"])
|
||||
def dropPrefix(path, nixPrefix):
|
||||
return path[len(nixPrefix + "/") :]
|
||||
|
||||
nixPrefix = os.environ["NIX_STORE"] # Usually /nix/store
|
||||
|
||||
with open("nix-cache-info", "w") as f:
|
||||
f.write("StoreDir: " + nixPrefix + "\n")
|
||||
def processItem(item, nixPrefix, outDir):
|
||||
narInfoHash = dropPrefix(item["path"], nixPrefix).split("-")[0]
|
||||
|
||||
def dropPrefix(path):
|
||||
return path[len(nixPrefix + "/"):]
|
||||
xzFile = outDir / "nar" / f"{narInfoHash}.nar.xz"
|
||||
with open(xzFile, "wb") as f:
|
||||
subprocess.run(
|
||||
f"nix-store --dump {item['path']} | xz -c",
|
||||
stdout=f,
|
||||
shell=True,
|
||||
check=True,
|
||||
)
|
||||
|
||||
for item in closures:
|
||||
narInfoHash = dropPrefix(item["path"]).split("-")[0]
|
||||
fileHash = (
|
||||
subprocess.run(
|
||||
["nix-hash", "--base32", "--type", "sha256", "--flat", xzFile],
|
||||
capture_output=True,
|
||||
check=True,
|
||||
)
|
||||
.stdout.decode()
|
||||
.strip()
|
||||
)
|
||||
fileSize = os.path.getsize(xzFile)
|
||||
|
||||
xzFile = "nar/" + narInfoHash + ".nar.xz"
|
||||
with open(xzFile, "w") as f:
|
||||
subprocess.run("nix-store --dump %s | xz -c" % item["path"], stdout=f, shell=True)
|
||||
finalXzFileName = Path("nar") / f"{fileHash}.nar.xz"
|
||||
os.rename(xzFile, outDir / finalXzFileName)
|
||||
|
||||
fileHash = subprocess.run(["nix-hash", "--base32", "--type", "sha256", "--flat", xzFile], capture_output=True).stdout.decode().strip()
|
||||
fileSize = os.path.getsize(xzFile)
|
||||
with open(outDir / f"{narInfoHash}.narinfo", "wt") as f:
|
||||
f.write(f"StorePath: {item['path']}\n")
|
||||
f.write(f"URL: {finalXzFileName}\n")
|
||||
f.write("Compression: xz\n")
|
||||
f.write(f"FileHash: sha256:{fileHash}\n")
|
||||
f.write(f"FileSize: {fileSize}\n")
|
||||
f.write(f"NarHash: {item['narHash']}\n")
|
||||
f.write(f"NarSize: {item['narSize']}\n")
|
||||
f.write(f"References: {' '.join(dropPrefix(ref, nixPrefix) for ref in item['references'])}\n")
|
||||
|
||||
# Rename the .nar.xz file to its own hash to match "nix copy" behavior
|
||||
finalXzFile = "nar/" + fileHash + ".nar.xz"
|
||||
os.rename(xzFile, finalXzFile)
|
||||
|
||||
with open(narInfoHash + ".narinfo", "w") as f:
|
||||
f.writelines((x + "\n" for x in [
|
||||
"StorePath: " + item["path"],
|
||||
"URL: " + finalXzFile,
|
||||
"Compression: xz",
|
||||
"FileHash: sha256:" + fileHash,
|
||||
"FileSize: " + str(fileSize),
|
||||
"NarHash: " + item["narHash"],
|
||||
"NarSize: " + str(item["narSize"]),
|
||||
"References: " + " ".join(dropPrefix(ref) for ref in item["references"]),
|
||||
]))
|
||||
def main():
|
||||
outDir = Path(os.environ["out"])
|
||||
nixPrefix = os.environ["NIX_STORE"]
|
||||
numWorkers = int(os.environ.get("NIX_BUILD_CORES", "4"))
|
||||
|
||||
with open(os.environ["NIX_ATTRS_JSON_FILE"], "r") as f:
|
||||
closures = json.load(f)["closure"]
|
||||
|
||||
os.makedirs(outDir / "nar", exist_ok=True)
|
||||
|
||||
with open(outDir / "nix-cache-info", "w") as f:
|
||||
f.write(f"StoreDir: {nixPrefix}\n")
|
||||
|
||||
with Pool(processes=numWorkers) as pool:
|
||||
worker = partial(processItem, nixPrefix=nixPrefix, outDir=outDir)
|
||||
pool.map(worker, closures)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue