2025-03-02 17:33:18 +01:00
|
|
|
|
from graphlib import TopologicalSorter
|
|
|
|
|
from pathlib import Path
|
|
|
|
|
from typing import Any, Generator, Literal
|
2018-11-23 18:03:19 +01:00
|
|
|
|
import argparse
|
2020-09-18 22:22:42 +02:00
|
|
|
|
import asyncio
|
2019-04-12 19:32:44 +02:00
|
|
|
|
import contextlib
|
2018-11-23 18:03:19 +01:00
|
|
|
|
import json
|
|
|
|
|
import os
|
2020-09-20 10:29:34 +02:00
|
|
|
|
import re
|
2018-11-23 18:03:19 +01:00
|
|
|
|
import subprocess
|
|
|
|
|
import sys
|
2019-04-12 19:32:44 +02:00
|
|
|
|
import tempfile
|
2018-11-23 18:03:19 +01:00
|
|
|
|
|
2025-03-02 13:49:45 +01:00
|
|
|
|
|
2025-03-02 17:33:18 +01:00
|
|
|
|
Order = Literal["arbitrary", "reverse-topological", "topological"]
|
|
|
|
|
|
|
|
|
|
|
2020-09-18 22:22:42 +02:00
|
|
|
|
class CalledProcessError(Exception):
|
|
|
|
|
process: asyncio.subprocess.Process
|
2025-03-02 14:06:49 +01:00
|
|
|
|
stderr: bytes | None
|
2019-04-12 19:32:44 +02:00
|
|
|
|
|
2025-03-02 13:49:45 +01:00
|
|
|
|
|
2022-09-10 21:55:25 -04:00
|
|
|
|
class UpdateFailedException(Exception):
|
|
|
|
|
pass
|
|
|
|
|
|
2025-03-02 13:49:45 +01:00
|
|
|
|
|
2025-03-02 14:15:42 +01:00
|
|
|
|
def eprint(*args: Any, **kwargs: Any) -> None:
|
2018-11-23 18:03:19 +01:00
|
|
|
|
print(*args, file=sys.stderr, **kwargs)
|
|
|
|
|
|
2025-03-02 13:49:45 +01:00
|
|
|
|
|
2025-03-02 14:15:42 +01:00
|
|
|
|
async def check_subprocess_output(*args: str, **kwargs: Any) -> bytes:
|
2020-09-18 22:22:42 +02:00
|
|
|
|
"""
|
2022-10-08 01:42:18 +11:00
|
|
|
|
Emulate check and capture_output arguments of subprocess.run function.
|
2020-09-18 22:22:42 +02:00
|
|
|
|
"""
|
|
|
|
|
process = await asyncio.create_subprocess_exec(*args, **kwargs)
|
2022-10-08 01:42:18 +11:00
|
|
|
|
# We need to use communicate() instead of wait(), as the OS pipe buffers
|
|
|
|
|
# can fill up and cause a deadlock.
|
|
|
|
|
stdout, stderr = await process.communicate()
|
2019-04-17 04:04:32 +02:00
|
|
|
|
|
2022-10-08 01:42:18 +11:00
|
|
|
|
if process.returncode != 0:
|
2020-09-18 22:22:42 +02:00
|
|
|
|
error = CalledProcessError()
|
|
|
|
|
error.process = process
|
2022-10-08 01:42:18 +11:00
|
|
|
|
error.stderr = stderr
|
2019-04-12 19:32:44 +02:00
|
|
|
|
|
2020-09-18 22:22:42 +02:00
|
|
|
|
raise error
|
2018-11-23 18:03:19 +01:00
|
|
|
|
|
2022-10-08 01:42:18 +11:00
|
|
|
|
return stdout
|
2018-11-23 18:03:19 +01:00
|
|
|
|
|
2025-03-02 13:49:45 +01:00
|
|
|
|
|
2025-03-02 17:33:18 +01:00
|
|
|
|
async def nix_instantiate(attr_path: str) -> Path:
|
|
|
|
|
out = await check_subprocess_output(
|
|
|
|
|
"nix-instantiate",
|
|
|
|
|
"-A",
|
|
|
|
|
attr_path,
|
|
|
|
|
stdout=asyncio.subprocess.PIPE,
|
|
|
|
|
stderr=asyncio.subprocess.PIPE,
|
|
|
|
|
)
|
|
|
|
|
drv = out.decode("utf-8").strip().split("!", 1)[0]
|
|
|
|
|
|
|
|
|
|
return Path(drv)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async def nix_query_requisites(drv: Path) -> list[Path]:
|
|
|
|
|
requisites = await check_subprocess_output(
|
|
|
|
|
"nix-store",
|
|
|
|
|
"--query",
|
|
|
|
|
"--requisites",
|
|
|
|
|
str(drv),
|
|
|
|
|
stdout=asyncio.subprocess.PIPE,
|
|
|
|
|
stderr=asyncio.subprocess.PIPE,
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
drv_str = str(drv)
|
|
|
|
|
|
|
|
|
|
return [
|
|
|
|
|
Path(requisite)
|
|
|
|
|
for requisite in requisites.decode("utf-8").splitlines()
|
|
|
|
|
# Avoid self-loops.
|
|
|
|
|
if requisite != drv_str
|
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async def attr_instantiation_worker(
|
|
|
|
|
semaphore: asyncio.Semaphore,
|
|
|
|
|
attr_path: str,
|
|
|
|
|
) -> tuple[Path, str]:
|
|
|
|
|
async with semaphore:
|
|
|
|
|
eprint(f"Instantiating {attr_path}…")
|
|
|
|
|
return (await nix_instantiate(attr_path), attr_path)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async def requisites_worker(
|
|
|
|
|
semaphore: asyncio.Semaphore,
|
|
|
|
|
drv: Path,
|
|
|
|
|
) -> tuple[Path, list[Path]]:
|
|
|
|
|
async with semaphore:
|
|
|
|
|
eprint(f"Obtaining requisites for {drv}…")
|
|
|
|
|
return (drv, await nix_query_requisites(drv))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def requisites_to_attrs(
|
|
|
|
|
drv_attr_paths: dict[Path, str],
|
|
|
|
|
requisites: list[Path],
|
|
|
|
|
) -> set[str]:
|
|
|
|
|
"""
|
|
|
|
|
Converts a set of requisite `.drv`s to a set of attribute paths.
|
|
|
|
|
Derivations that do not correspond to any of the packages we want to update will be discarded.
|
|
|
|
|
"""
|
|
|
|
|
return {
|
|
|
|
|
drv_attr_paths[requisite]
|
|
|
|
|
for requisite in requisites
|
|
|
|
|
if requisite in drv_attr_paths
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def reverse_edges(graph: dict[str, set[str]]) -> dict[str, set[str]]:
|
|
|
|
|
"""
|
|
|
|
|
Flips the edges of a directed graph.
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
reversed_graph: dict[str, set[str]] = {}
|
|
|
|
|
for dependent, dependencies in graph.items():
|
|
|
|
|
for dependency in dependencies:
|
|
|
|
|
reversed_graph.setdefault(dependency, set()).add(dependent)
|
|
|
|
|
|
|
|
|
|
return reversed_graph
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_independent_sorter(
|
|
|
|
|
packages: list[dict],
|
|
|
|
|
) -> TopologicalSorter[str]:
|
|
|
|
|
"""
|
|
|
|
|
Returns a sorter which treats all packages as independent,
|
|
|
|
|
which will allow them to be updated in parallel.
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
attr_deps: dict[str, set[str]] = {
|
|
|
|
|
package["attrPath"]: set() for package in packages
|
|
|
|
|
}
|
|
|
|
|
sorter = TopologicalSorter(attr_deps)
|
|
|
|
|
sorter.prepare()
|
|
|
|
|
|
|
|
|
|
return sorter
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async def get_topological_sorter(
|
|
|
|
|
max_workers: int,
|
|
|
|
|
packages: list[dict],
|
|
|
|
|
reverse_order: bool,
|
|
|
|
|
) -> tuple[TopologicalSorter[str], list[dict]]:
|
|
|
|
|
"""
|
|
|
|
|
Returns a sorter which returns packages in topological or reverse topological order,
|
|
|
|
|
which will ensure a package is updated before or after its dependencies, respectively.
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
semaphore = asyncio.Semaphore(max_workers)
|
|
|
|
|
|
|
|
|
|
drv_attr_paths = dict(
|
|
|
|
|
await asyncio.gather(
|
|
|
|
|
*(
|
|
|
|
|
attr_instantiation_worker(semaphore, package["attrPath"])
|
|
|
|
|
for package in packages
|
|
|
|
|
)
|
|
|
|
|
)
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
drv_requisites = await asyncio.gather(
|
|
|
|
|
*(requisites_worker(semaphore, drv) for drv in drv_attr_paths.keys())
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
attr_deps = {
|
|
|
|
|
drv_attr_paths[drv]: requisites_to_attrs(drv_attr_paths, requisites)
|
|
|
|
|
for drv, requisites in drv_requisites
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if reverse_order:
|
|
|
|
|
attr_deps = reverse_edges(attr_deps)
|
|
|
|
|
|
|
|
|
|
# Adjust packages order based on the topological one
|
|
|
|
|
ordered = list(TopologicalSorter(attr_deps).static_order())
|
|
|
|
|
packages = sorted(packages, key=lambda package: ordered.index(package["attrPath"]))
|
|
|
|
|
|
|
|
|
|
sorter = TopologicalSorter(attr_deps)
|
|
|
|
|
sorter.prepare()
|
|
|
|
|
|
|
|
|
|
return sorter, packages
|
|
|
|
|
|
|
|
|
|
|
2025-03-02 13:49:45 +01:00
|
|
|
|
async def run_update_script(
|
|
|
|
|
nixpkgs_root: str,
|
|
|
|
|
merge_lock: asyncio.Lock,
|
2025-03-02 14:06:49 +01:00
|
|
|
|
temp_dir: tuple[str, str] | None,
|
|
|
|
|
package: dict,
|
2025-03-02 13:49:45 +01:00
|
|
|
|
keep_going: bool,
|
2025-03-02 14:15:42 +01:00
|
|
|
|
) -> None:
|
2025-03-02 14:06:49 +01:00
|
|
|
|
worktree: str | None = None
|
2018-11-23 18:03:19 +01:00
|
|
|
|
|
2025-03-02 13:49:45 +01:00
|
|
|
|
update_script_command = package["updateScript"]
|
2020-09-20 10:29:34 +02:00
|
|
|
|
|
2020-09-18 22:22:42 +02:00
|
|
|
|
if temp_dir is not None:
|
|
|
|
|
worktree, _branch = temp_dir
|
2019-04-12 19:32:44 +02:00
|
|
|
|
|
2021-03-21 04:23:04 +01:00
|
|
|
|
# Ensure the worktree is clean before update.
|
2025-03-02 13:49:45 +01:00
|
|
|
|
await check_subprocess_output(
|
|
|
|
|
"git",
|
|
|
|
|
"reset",
|
|
|
|
|
"--hard",
|
|
|
|
|
"--quiet",
|
|
|
|
|
"HEAD",
|
|
|
|
|
cwd=worktree,
|
|
|
|
|
)
|
2021-03-21 04:23:04 +01:00
|
|
|
|
|
2020-09-20 10:29:34 +02:00
|
|
|
|
# Update scripts can use $(dirname $0) to get their location but we want to run
|
|
|
|
|
# their clones in the git worktree, not in the main nixpkgs repo.
|
2025-03-02 13:49:45 +01:00
|
|
|
|
update_script_command = map(
|
|
|
|
|
lambda arg: re.sub(r"^{0}".format(re.escape(nixpkgs_root)), worktree, arg),
|
|
|
|
|
update_script_command,
|
|
|
|
|
)
|
2020-09-20 10:29:34 +02:00
|
|
|
|
|
2020-09-18 22:22:42 +02:00
|
|
|
|
eprint(f" - {package['name']}: UPDATING ...")
|
2019-04-17 04:04:32 +02:00
|
|
|
|
|
|
|
|
|
try:
|
2022-10-08 01:42:18 +11:00
|
|
|
|
update_info = await check_subprocess_output(
|
2025-03-02 13:49:45 +01:00
|
|
|
|
"env",
|
2022-09-26 21:40:42 -03:00
|
|
|
|
f"UPDATE_NIX_NAME={package['name']}",
|
|
|
|
|
f"UPDATE_NIX_PNAME={package['pname']}",
|
|
|
|
|
f"UPDATE_NIX_OLD_VERSION={package['oldVersion']}",
|
|
|
|
|
f"UPDATE_NIX_ATTR_PATH={package['attrPath']}",
|
|
|
|
|
*update_script_command,
|
|
|
|
|
stdout=asyncio.subprocess.PIPE,
|
|
|
|
|
stderr=asyncio.subprocess.PIPE,
|
|
|
|
|
cwd=worktree,
|
|
|
|
|
)
|
2020-09-18 22:22:42 +02:00
|
|
|
|
await merge_changes(merge_lock, package, update_info, temp_dir)
|
|
|
|
|
except KeyboardInterrupt as e:
|
2025-03-02 13:49:45 +01:00
|
|
|
|
eprint("Cancelling…")
|
2020-09-18 22:22:42 +02:00
|
|
|
|
raise asyncio.exceptions.CancelledError()
|
|
|
|
|
except CalledProcessError as e:
|
2019-04-17 04:04:32 +02:00
|
|
|
|
eprint(f" - {package['name']}: ERROR")
|
2025-03-03 00:33:39 +01:00
|
|
|
|
if e.stderr is not None:
|
|
|
|
|
eprint()
|
|
|
|
|
eprint(
|
|
|
|
|
f"--- SHOWING ERROR LOG FOR {package['name']} ----------------------"
|
|
|
|
|
)
|
|
|
|
|
eprint()
|
|
|
|
|
eprint(e.stderr.decode("utf-8"))
|
|
|
|
|
with open(f"{package['pname']}.log", "wb") as logfile:
|
|
|
|
|
logfile.write(e.stderr)
|
|
|
|
|
eprint()
|
|
|
|
|
eprint(
|
|
|
|
|
f"--- SHOWING ERROR LOG FOR {package['name']} ----------------------"
|
|
|
|
|
)
|
2019-04-17 04:04:32 +02:00
|
|
|
|
|
|
|
|
|
if not keep_going:
|
2025-03-02 13:49:45 +01:00
|
|
|
|
raise UpdateFailedException(
|
|
|
|
|
f"The update script for {package['name']} failed with exit code {e.process.returncode}"
|
|
|
|
|
)
|
|
|
|
|
|
2019-04-17 04:04:32 +02:00
|
|
|
|
|
2020-09-18 22:22:42 +02:00
|
|
|
|
@contextlib.contextmanager
|
2025-03-02 14:06:49 +01:00
|
|
|
|
def make_worktree() -> Generator[tuple[str, str], None, None]:
|
2020-09-18 22:22:42 +02:00
|
|
|
|
with tempfile.TemporaryDirectory() as wt:
|
2025-03-02 13:49:45 +01:00
|
|
|
|
branch_name = f"update-{os.path.basename(wt)}"
|
|
|
|
|
target_directory = f"{wt}/nixpkgs"
|
2020-09-18 22:22:42 +02:00
|
|
|
|
|
2025-03-02 13:49:45 +01:00
|
|
|
|
subprocess.run(["git", "worktree", "add", "-b", branch_name, target_directory])
|
2024-07-13 08:40:39 +02:00
|
|
|
|
try:
|
|
|
|
|
yield (target_directory, branch_name)
|
|
|
|
|
finally:
|
2025-03-02 13:49:45 +01:00
|
|
|
|
subprocess.run(["git", "worktree", "remove", "--force", target_directory])
|
|
|
|
|
subprocess.run(["git", "branch", "-D", branch_name])
|
|
|
|
|
|
2020-09-18 22:22:42 +02:00
|
|
|
|
|
2025-03-02 13:49:45 +01:00
|
|
|
|
async def commit_changes(
|
|
|
|
|
name: str,
|
|
|
|
|
merge_lock: asyncio.Lock,
|
|
|
|
|
worktree: str,
|
|
|
|
|
branch: str,
|
2025-03-02 14:06:49 +01:00
|
|
|
|
changes: list[dict],
|
2025-03-02 13:49:45 +01:00
|
|
|
|
) -> None:
|
2020-09-18 22:22:42 +02:00
|
|
|
|
for change in changes:
|
|
|
|
|
# Git can only handle a single index operation at a time
|
|
|
|
|
async with merge_lock:
|
2025-03-02 13:49:45 +01:00
|
|
|
|
await check_subprocess_output("git", "add", *change["files"], cwd=worktree)
|
|
|
|
|
commit_message = "{attrPath}: {oldVersion} -> {newVersion}".format(**change)
|
|
|
|
|
if "commitMessage" in change:
|
|
|
|
|
commit_message = change["commitMessage"]
|
|
|
|
|
elif "commitBody" in change:
|
|
|
|
|
commit_message = commit_message + "\n\n" + change["commitBody"]
|
|
|
|
|
await check_subprocess_output(
|
|
|
|
|
"git",
|
|
|
|
|
"commit",
|
|
|
|
|
"--quiet",
|
|
|
|
|
"-m",
|
|
|
|
|
commit_message,
|
|
|
|
|
cwd=worktree,
|
|
|
|
|
)
|
|
|
|
|
await check_subprocess_output("git", "cherry-pick", branch)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async def check_changes(
|
2025-03-02 14:06:49 +01:00
|
|
|
|
package: dict,
|
2025-03-02 13:49:45 +01:00
|
|
|
|
worktree: str,
|
2025-03-02 14:16:23 +01:00
|
|
|
|
update_info: bytes,
|
2025-03-02 14:15:42 +01:00
|
|
|
|
) -> list[dict]:
|
2025-03-02 13:49:45 +01:00
|
|
|
|
if "commit" in package["supportedFeatures"]:
|
2020-09-19 23:40:04 +02:00
|
|
|
|
changes = json.loads(update_info)
|
|
|
|
|
else:
|
|
|
|
|
changes = [{}]
|
|
|
|
|
|
|
|
|
|
# Try to fill in missing attributes when there is just a single change.
|
|
|
|
|
if len(changes) == 1:
|
|
|
|
|
# Dynamic data from updater take precedence over static data from passthru.updateScript.
|
2025-03-02 13:49:45 +01:00
|
|
|
|
if "attrPath" not in changes[0]:
|
2020-09-20 00:20:34 +02:00
|
|
|
|
# update.nix is always passing attrPath
|
2025-03-02 13:49:45 +01:00
|
|
|
|
changes[0]["attrPath"] = package["attrPath"]
|
2020-09-19 23:40:04 +02:00
|
|
|
|
|
2025-03-02 13:49:45 +01:00
|
|
|
|
if "oldVersion" not in changes[0]:
|
2020-09-19 23:40:04 +02:00
|
|
|
|
# update.nix is always passing oldVersion
|
2025-03-02 13:49:45 +01:00
|
|
|
|
changes[0]["oldVersion"] = package["oldVersion"]
|
|
|
|
|
|
|
|
|
|
if "newVersion" not in changes[0]:
|
|
|
|
|
attr_path = changes[0]["attrPath"]
|
|
|
|
|
obtain_new_version_output = await check_subprocess_output(
|
|
|
|
|
"nix-instantiate",
|
|
|
|
|
"--expr",
|
|
|
|
|
f"with import ./. {{}}; lib.getVersion {attr_path}",
|
|
|
|
|
"--eval",
|
|
|
|
|
"--strict",
|
|
|
|
|
"--json",
|
|
|
|
|
stdout=asyncio.subprocess.PIPE,
|
|
|
|
|
stderr=asyncio.subprocess.PIPE,
|
|
|
|
|
cwd=worktree,
|
|
|
|
|
)
|
|
|
|
|
changes[0]["newVersion"] = json.loads(
|
|
|
|
|
obtain_new_version_output.decode("utf-8")
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
if "files" not in changes[0]:
|
|
|
|
|
changed_files_output = await check_subprocess_output(
|
|
|
|
|
"git",
|
|
|
|
|
"diff",
|
|
|
|
|
"--name-only",
|
|
|
|
|
"HEAD",
|
|
|
|
|
stdout=asyncio.subprocess.PIPE,
|
|
|
|
|
cwd=worktree,
|
|
|
|
|
)
|
2022-10-08 01:42:18 +11:00
|
|
|
|
changed_files = changed_files_output.splitlines()
|
2025-03-02 13:49:45 +01:00
|
|
|
|
changes[0]["files"] = changed_files
|
2020-09-19 16:44:17 +02:00
|
|
|
|
|
2020-09-19 23:40:04 +02:00
|
|
|
|
if len(changed_files) == 0:
|
|
|
|
|
return []
|
|
|
|
|
|
|
|
|
|
return changes
|
|
|
|
|
|
2025-03-02 13:49:45 +01:00
|
|
|
|
|
|
|
|
|
async def merge_changes(
|
|
|
|
|
merge_lock: asyncio.Lock,
|
2025-03-02 14:06:49 +01:00
|
|
|
|
package: dict,
|
2025-03-02 14:16:23 +01:00
|
|
|
|
update_info: bytes,
|
2025-03-02 14:06:49 +01:00
|
|
|
|
temp_dir: tuple[str, str] | None,
|
2025-03-02 13:49:45 +01:00
|
|
|
|
) -> None:
|
2020-09-19 23:40:04 +02:00
|
|
|
|
if temp_dir is not None:
|
|
|
|
|
worktree, branch = temp_dir
|
|
|
|
|
changes = await check_changes(package, worktree, update_info)
|
2020-09-19 16:44:17 +02:00
|
|
|
|
|
2020-09-19 23:52:31 +02:00
|
|
|
|
if len(changes) > 0:
|
2025-03-02 13:49:45 +01:00
|
|
|
|
await commit_changes(package["name"], merge_lock, worktree, branch, changes)
|
2020-09-19 23:52:31 +02:00
|
|
|
|
else:
|
|
|
|
|
eprint(f" - {package['name']}: DONE, no changes.")
|
|
|
|
|
else:
|
|
|
|
|
eprint(f" - {package['name']}: DONE.")
|
2020-09-18 22:22:42 +02:00
|
|
|
|
|
2025-03-02 13:49:45 +01:00
|
|
|
|
|
|
|
|
|
async def updater(
|
|
|
|
|
nixpkgs_root: str,
|
2025-03-02 14:06:49 +01:00
|
|
|
|
temp_dir: tuple[str, str] | None,
|
2025-03-02 13:49:45 +01:00
|
|
|
|
merge_lock: asyncio.Lock,
|
2025-03-02 14:06:49 +01:00
|
|
|
|
packages_to_update: asyncio.Queue[dict | None],
|
2025-03-02 13:49:45 +01:00
|
|
|
|
keep_going: bool,
|
|
|
|
|
commit: bool,
|
2025-03-02 14:15:42 +01:00
|
|
|
|
) -> None:
|
2020-09-18 22:22:42 +02:00
|
|
|
|
while True:
|
|
|
|
|
package = await packages_to_update.get()
|
|
|
|
|
if package is None:
|
|
|
|
|
# A sentinel received, we are done.
|
|
|
|
|
return
|
|
|
|
|
|
2025-03-02 13:49:45 +01:00
|
|
|
|
if not ("commit" in package["supportedFeatures"] or "attrPath" in package):
|
2020-09-18 22:22:42 +02:00
|
|
|
|
temp_dir = None
|
|
|
|
|
|
2020-09-20 10:29:34 +02:00
|
|
|
|
await run_update_script(nixpkgs_root, merge_lock, temp_dir, package, keep_going)
|
2020-09-18 22:22:42 +02:00
|
|
|
|
|
2025-03-02 16:32:46 +01:00
|
|
|
|
packages_to_update.task_done()
|
|
|
|
|
|
2025-03-02 13:49:45 +01:00
|
|
|
|
|
2025-03-02 17:33:18 +01:00
|
|
|
|
async def populate_queue(
|
|
|
|
|
attr_packages: dict[str, dict],
|
|
|
|
|
sorter: TopologicalSorter[str],
|
|
|
|
|
packages_to_update: asyncio.Queue[dict | None],
|
|
|
|
|
num_workers: int,
|
|
|
|
|
) -> None:
|
|
|
|
|
"""
|
|
|
|
|
Keeps populating the queue with packages that can be updated
|
|
|
|
|
according to ordering requirements. If topological order
|
|
|
|
|
is used, the packages will appear in waves, as packages with
|
|
|
|
|
no dependencies are processed and removed from the sorter.
|
|
|
|
|
With `order="none"`, all packages will be enqueued simultaneously.
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
# Fill up an update queue,
|
|
|
|
|
while sorter.is_active():
|
|
|
|
|
ready_packages = list(sorter.get_ready())
|
|
|
|
|
eprint(f"Enqueuing group of {len(ready_packages)} packages")
|
|
|
|
|
for package in ready_packages:
|
|
|
|
|
await packages_to_update.put(attr_packages[package])
|
|
|
|
|
await packages_to_update.join()
|
|
|
|
|
sorter.done(*ready_packages)
|
|
|
|
|
|
|
|
|
|
# Add sentinels, one for each worker.
|
|
|
|
|
# A worker will terminate when it gets a sentinel from the queue.
|
|
|
|
|
for i in range(num_workers):
|
|
|
|
|
await packages_to_update.put(None)
|
|
|
|
|
|
|
|
|
|
|
2025-03-02 13:49:45 +01:00
|
|
|
|
async def start_updates(
|
|
|
|
|
max_workers: int,
|
|
|
|
|
keep_going: bool,
|
|
|
|
|
commit: bool,
|
2025-03-02 17:33:18 +01:00
|
|
|
|
attr_packages: dict[str, dict],
|
|
|
|
|
sorter: TopologicalSorter[str],
|
2025-03-02 14:15:42 +01:00
|
|
|
|
) -> None:
|
2020-09-18 22:22:42 +02:00
|
|
|
|
merge_lock = asyncio.Lock()
|
2025-03-02 14:06:49 +01:00
|
|
|
|
packages_to_update: asyncio.Queue[dict | None] = asyncio.Queue()
|
2020-09-18 22:22:42 +02:00
|
|
|
|
|
|
|
|
|
with contextlib.ExitStack() as stack:
|
2025-03-02 14:06:49 +01:00
|
|
|
|
temp_dirs: list[tuple[str, str] | None] = []
|
2020-09-18 22:22:42 +02:00
|
|
|
|
|
|
|
|
|
# Do not create more workers than there are packages.
|
2025-03-02 17:33:18 +01:00
|
|
|
|
num_workers = min(max_workers, len(attr_packages))
|
2020-09-18 22:22:42 +02:00
|
|
|
|
|
2025-03-02 13:49:45 +01:00
|
|
|
|
nixpkgs_root_output = await check_subprocess_output(
|
|
|
|
|
"git",
|
|
|
|
|
"rev-parse",
|
|
|
|
|
"--show-toplevel",
|
|
|
|
|
stdout=asyncio.subprocess.PIPE,
|
|
|
|
|
)
|
|
|
|
|
nixpkgs_root = nixpkgs_root_output.decode("utf-8").strip()
|
2020-09-20 10:29:34 +02:00
|
|
|
|
|
2020-09-18 22:22:42 +02:00
|
|
|
|
# Set up temporary directories when using auto-commit.
|
|
|
|
|
for i in range(num_workers):
|
|
|
|
|
temp_dir = stack.enter_context(make_worktree()) if commit else None
|
|
|
|
|
temp_dirs.append(temp_dir)
|
|
|
|
|
|
2025-03-02 17:33:18 +01:00
|
|
|
|
queue_task = populate_queue(
|
|
|
|
|
attr_packages,
|
|
|
|
|
sorter,
|
|
|
|
|
packages_to_update,
|
|
|
|
|
num_workers,
|
|
|
|
|
)
|
2020-09-18 22:22:42 +02:00
|
|
|
|
|
|
|
|
|
# Prepare updater workers for each temp_dir directory.
|
|
|
|
|
# At most `num_workers` instances of `run_update_script` will be running at one time.
|
2025-03-02 23:12:05 +01:00
|
|
|
|
updater_tasks = [
|
|
|
|
|
updater(
|
|
|
|
|
nixpkgs_root,
|
|
|
|
|
temp_dir,
|
|
|
|
|
merge_lock,
|
|
|
|
|
packages_to_update,
|
|
|
|
|
keep_going,
|
|
|
|
|
commit,
|
|
|
|
|
)
|
|
|
|
|
for temp_dir in temp_dirs
|
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
tasks = asyncio.gather(
|
|
|
|
|
*updater_tasks,
|
2025-03-02 17:33:18 +01:00
|
|
|
|
queue_task,
|
2025-03-02 13:49:45 +01:00
|
|
|
|
)
|
2020-09-18 22:22:42 +02:00
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
# Start updater workers.
|
2025-03-02 23:12:05 +01:00
|
|
|
|
await tasks
|
2022-09-10 21:55:25 -04:00
|
|
|
|
except asyncio.exceptions.CancelledError:
|
2020-09-18 22:22:42 +02:00
|
|
|
|
# When one worker is cancelled, cancel the others too.
|
2025-03-02 23:12:05 +01:00
|
|
|
|
tasks.cancel()
|
2022-09-10 21:55:25 -04:00
|
|
|
|
except UpdateFailedException as e:
|
|
|
|
|
# When one worker fails, cancel the others, as this exception is only thrown when keep_going is false.
|
2025-03-02 23:12:05 +01:00
|
|
|
|
tasks.cancel()
|
2022-09-10 21:55:25 -04:00
|
|
|
|
eprint(e)
|
|
|
|
|
sys.exit(1)
|
2020-09-18 22:22:42 +02:00
|
|
|
|
|
2025-03-02 13:49:45 +01:00
|
|
|
|
|
2025-03-02 23:12:05 +01:00
|
|
|
|
async def main(
|
2025-03-02 13:49:45 +01:00
|
|
|
|
max_workers: int,
|
|
|
|
|
keep_going: bool,
|
|
|
|
|
commit: bool,
|
|
|
|
|
packages_path: str,
|
|
|
|
|
skip_prompt: bool,
|
2025-03-02 17:33:18 +01:00
|
|
|
|
order: Order,
|
2025-03-02 13:49:45 +01:00
|
|
|
|
) -> None:
|
2020-09-18 22:22:42 +02:00
|
|
|
|
with open(packages_path) as f:
|
2018-11-23 18:03:19 +01:00
|
|
|
|
packages = json.load(f)
|
|
|
|
|
|
2025-03-02 17:33:18 +01:00
|
|
|
|
if order != "arbitrary":
|
|
|
|
|
eprint("Sorting packages…")
|
|
|
|
|
reverse_order = order == "reverse-topological"
|
|
|
|
|
sorter, packages = await get_topological_sorter(
|
|
|
|
|
max_workers,
|
|
|
|
|
packages,
|
|
|
|
|
reverse_order,
|
|
|
|
|
)
|
|
|
|
|
else:
|
|
|
|
|
sorter = get_independent_sorter(packages)
|
|
|
|
|
|
|
|
|
|
attr_packages = {package["attrPath"]: package for package in packages}
|
|
|
|
|
|
2018-11-23 18:03:19 +01:00
|
|
|
|
eprint()
|
2025-03-02 13:49:45 +01:00
|
|
|
|
eprint("Going to be running update for following packages:")
|
2018-11-23 18:03:19 +01:00
|
|
|
|
for package in packages:
|
|
|
|
|
eprint(f" - {package['name']}")
|
|
|
|
|
eprint()
|
|
|
|
|
|
2025-03-02 13:49:45 +01:00
|
|
|
|
confirm = "" if skip_prompt else input("Press Enter key to continue...")
|
2024-06-14 14:09:13 -03:00
|
|
|
|
|
2025-03-02 13:49:45 +01:00
|
|
|
|
if confirm == "":
|
2018-11-23 18:03:19 +01:00
|
|
|
|
eprint()
|
2025-03-02 13:49:45 +01:00
|
|
|
|
eprint("Running update for:")
|
2018-11-23 18:03:19 +01:00
|
|
|
|
|
2025-03-02 17:33:18 +01:00
|
|
|
|
await start_updates(max_workers, keep_going, commit, attr_packages, sorter)
|
2018-11-23 18:03:19 +01:00
|
|
|
|
|
|
|
|
|
eprint()
|
2025-03-02 13:49:45 +01:00
|
|
|
|
eprint("Packages updated!")
|
2018-11-23 18:03:19 +01:00
|
|
|
|
sys.exit()
|
|
|
|
|
else:
|
2025-03-02 13:49:45 +01:00
|
|
|
|
eprint("Aborting!")
|
2018-11-23 18:03:19 +01:00
|
|
|
|
sys.exit(130)
|
|
|
|
|
|
|
|
|
|
|
2025-03-02 13:49:45 +01:00
|
|
|
|
parser = argparse.ArgumentParser(description="Update packages")
|
|
|
|
|
parser.add_argument(
|
|
|
|
|
"--max-workers",
|
|
|
|
|
"-j",
|
|
|
|
|
dest="max_workers",
|
|
|
|
|
type=int,
|
|
|
|
|
help="Number of updates to run concurrently",
|
|
|
|
|
nargs="?",
|
|
|
|
|
default=4,
|
|
|
|
|
)
|
|
|
|
|
parser.add_argument(
|
|
|
|
|
"--keep-going",
|
|
|
|
|
"-k",
|
|
|
|
|
dest="keep_going",
|
|
|
|
|
action="store_true",
|
|
|
|
|
help="Do not stop after first failure",
|
|
|
|
|
)
|
|
|
|
|
parser.add_argument(
|
|
|
|
|
"--commit",
|
|
|
|
|
"-c",
|
|
|
|
|
dest="commit",
|
|
|
|
|
action="store_true",
|
|
|
|
|
help="Commit the changes",
|
|
|
|
|
)
|
2025-03-02 17:33:18 +01:00
|
|
|
|
parser.add_argument(
|
|
|
|
|
"--order",
|
|
|
|
|
dest="order",
|
|
|
|
|
default="arbitrary",
|
|
|
|
|
choices=["arbitrary", "reverse-topological", "topological"],
|
|
|
|
|
help="Sort the packages based on dependency relation",
|
|
|
|
|
)
|
2025-03-02 13:49:45 +01:00
|
|
|
|
parser.add_argument(
|
|
|
|
|
"packages",
|
|
|
|
|
help="JSON file containing the list of package names and their update scripts",
|
|
|
|
|
)
|
|
|
|
|
parser.add_argument(
|
|
|
|
|
"--skip-prompt",
|
|
|
|
|
"-s",
|
|
|
|
|
dest="skip_prompt",
|
|
|
|
|
action="store_true",
|
|
|
|
|
help="Do not stop for prompts",
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
2018-11-23 18:03:19 +01:00
|
|
|
|
args = parser.parse_args()
|
|
|
|
|
|
|
|
|
|
try:
|
2025-03-02 23:12:05 +01:00
|
|
|
|
asyncio.run(
|
|
|
|
|
main(
|
|
|
|
|
args.max_workers,
|
|
|
|
|
args.keep_going,
|
|
|
|
|
args.commit,
|
|
|
|
|
args.packages,
|
|
|
|
|
args.skip_prompt,
|
2025-03-02 17:33:18 +01:00
|
|
|
|
args.order,
|
2025-03-02 23:12:05 +01:00
|
|
|
|
)
|
2025-03-02 13:49:45 +01:00
|
|
|
|
)
|
2020-09-18 22:22:42 +02:00
|
|
|
|
except KeyboardInterrupt as e:
|
|
|
|
|
# Let’s cancel outside of the main loop too.
|
|
|
|
|
sys.exit(130)
|