diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index cfe97fc84a..64ea82a337 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -23,7 +23,7 @@ jobs: python3 -c "import toml; toml.load(open('apps.toml'))" - name: Check all working apps have consistent app id / app url and categories run: | - ./tools/catalog_linter.py + ./tools/catalog_linter.py --apps-dir . - name: Check the generation of the app catalog run: | ./tools/list_builder.py diff --git a/maintenance.sh b/maintenance.sh index 8e199c1908..8d459d5a1d 100644 --- a/maintenance.sh +++ b/maintenance.sh @@ -84,11 +84,17 @@ function git_pull_and_update_cron_and_restart_services_if_needed() systemctl --quiet is-active webhooks || sendxmpppy "[autoreadme] Uhoh, failed to (re)start the autoreadme service?" } +function update_app_cache() +{ + ./tools/app_caches.py -d -l . -c .apps_caches -j20 +} + function rebuild_catalog() { log=$workdir/app_list_auto_update.log date >> $log git_pull_and_update_cron_and_restart_services_if_needed + update_app_cache ./tools/list_builder.py &>> $log || sendxmpppy "[listbuilder] Rebuilding the application list failed miserably" } @@ -97,6 +103,7 @@ function autoupdate_app_sources() log=$workdir/app_sources_auto_update.log date >> $log git_pull_and_update_cron_and_restart_services_if_needed + update_app_cache tools/autoupdate_app_sources/venv/bin/python3 tools/autoupdate_app_sources/autoupdate_app_sources.py \ --latest-commit-weekly --edit --commit --pr --paste -j1 \ &> $log || sendxmpppy "[appsourcesautoupdate] App sources auto-update failed miserably" @@ -104,6 +111,7 @@ function autoupdate_app_sources() function update_app_levels() { + update_app_cache pushd tools/update_app_levels >/dev/null python3 update_app_levels.py popd >/dev/null diff --git a/tools/app_caches.py b/tools/app_caches.py index 6c2de37ae9..da2f73be9e 100755 --- a/tools/app_caches.py +++ b/tools/app_caches.py @@ -10,120 +10,112 @@ import tqdm -from appslib.utils import ( - REPO_APPS_ROOT, # pylint: disable=import-error - get_catalog, - git_repo_age, -) from git import Repo +from git.repo.fun import is_git_dir +from appslib.utils import get_catalog +import appslib.get_apps_repo as get_apps_repo -APPS_CACHE_DIR = REPO_APPS_ROOT / ".apps_cache" +class AppDir: + def __init__(self, name: str, path: Path) -> None: + self.name = name + self.path = path -def app_cache_folder(app: str) -> Path: - return APPS_CACHE_DIR / app + def ensure( + self, remote: str, branch: str, url_ssh: bool, all_branches: bool + ) -> None: + # Patch url for ssh clone + if url_ssh: + remote = remote.replace("https://github.com/", "git@github.com:") + op = self._update if is_git_dir(self.path / ".git") else self._clone + op(remote, all_branches, branch) -def app_cache_clone( - app: str, infos: dict[str, str], all_branches: bool = False -) -> None: - logging.info("Cloning %s...", app) - git_depths = { - "notworking": 5, - "inprogress": 20, - "default": 40, - } - if app_cache_folder(app).exists(): - shutil.rmtree(app_cache_folder(app)) - Repo.clone_from( - infos["url"], - to_path=app_cache_folder(app), - depth=git_depths.get(infos["state"], git_depths["default"]), - single_branch=not all_branches, - branch=infos.get("branch", "master"), - ) - - -def app_cache_clone_or_update( - app: str, - infos: dict[str, str], - ssh_clone: bool = False, - fetch_all_branches: bool = False, -) -> None: - app_path = app_cache_folder(app) - - # Patch url for ssh clone - if ssh_clone: - infos["url"] = infos["url"].replace("https://github.com/", "git@github.com:") - - # Don't refresh if already refreshed during last hour - age = git_repo_age(app_path) - if age is False: - app_cache_clone(app, infos, fetch_all_branches) - return - - # if age < 3600: - # logging.info(f"Skipping {app}, it's been updated recently.") - # return - - logging.info("Updating %s...", app) - repo = Repo(app_path) - repo.remote("origin").set_url(infos["url"]) - - branch = infos.get("branch", "master") - if fetch_all_branches: - repo.git.remote("set-branches", "origin", "*") - repo.remote("origin").fetch() - repo.remote("origin").pull() - else: - if repo.active_branch != branch: - all_branches = [str(b) for b in repo.branches] - if branch in all_branches: - repo.git.checkout(branch, "--force") + def cleanup(self) -> None: + logging.warning(f"Cleaning up {self.path}...") + if self.path.exists(): + if self.path.is_dir(): + shutil.rmtree(self.path) else: - repo.git.remote("set-branches", "--add", "origin", branch) - repo.remote("origin").fetch(f"{branch}:{branch}") - - repo.remote("origin").fetch(refspec=branch, force=True) - repo.git.reset("--hard", f"origin/{branch}") - + self.path.unlink() + + def _clone(self, remote: str, all_branches: bool, branch: str) -> None: + logging.info("Cloning %s...", self.name) + + if self.path.exists(): + self.cleanup() + Repo.clone_from( + remote, + to_path=self.path, + depth=40, + single_branch=not all_branches, + branch=branch, + ) -def __app_cache_clone_or_update_mapped(data): - name, info, ssh_clone, all_branches = data + def _update(self, remote: str, all_branches: bool, branch: str) -> None: + logging.info("Updating %s...", self.name) + repo = Repo(self.path) + repo.remote("origin").set_url(remote) + + if all_branches: + repo.git.remote("set-branches", "origin", "*") + repo.remote("origin").fetch() + repo.remote("origin").pull() + else: + if repo.active_branch != branch: + repo_branches = [str(b) for b in repo.heads] + if branch in repo_branches: + repo.git.checkout(branch, "--force") + else: + repo.git.remote("set-branches", "--add", "origin", branch) + repo.remote("origin").fetch(f"{branch}:{branch}") + + repo.remote("origin").fetch(refspec=branch, force=True) + repo.git.reset("--hard", f"origin/{branch}") + + +def __appdir_ensure_mapped(data): + name, path, url, branch, url_ssh, all_branches = data try: - app_cache_clone_or_update(name, info, ssh_clone, all_branches) + AppDir(name, path).ensure(url, branch, url_ssh, all_branches) except Exception as err: logging.error("[App caches] Error while updating %s: %s", name, err) def apps_cache_update_all( + cache_path: Path, apps: dict[str, dict[str, Any]], parallel: int = 8, - ssh_clone: bool = False, + url_ssh: bool = False, all_branches: bool = False, ) -> None: - with Pool(processes=parallel) as pool: - tasks = pool.imap_unordered( - __app_cache_clone_or_update_mapped, - zip(apps.keys(), apps.values(), repeat(ssh_clone), repeat(all_branches)), + args = ( + ( + app, + cache_path / app, + info["url"], + info.get("branch", "master"), + url_ssh, + all_branches, ) + for app, info in apps.items() + ) + with Pool(processes=parallel) as pool: + tasks = pool.imap_unordered(__appdir_ensure_mapped, args) for _ in tqdm.tqdm(tasks, total=len(apps.keys()), ascii=" ·#"): pass -def apps_cache_cleanup(apps: dict[str, dict[str, Any]]) -> None: - for element in APPS_CACHE_DIR.iterdir(): +def apps_cache_cleanup(cache_path: Path, apps: dict[str, dict[str, Any]]) -> None: + for element in cache_path.iterdir(): if element.name not in apps.keys(): - logging.warning(f"Removing {element}...") - if element.is_dir(): - shutil.rmtree(element) - else: - element.unlink() + AppDir("", element).cleanup() def __run_for_catalog(): parser = argparse.ArgumentParser() + get_apps_repo.add_args(parser) parser.add_argument("-v", "--verbose", action="store_true") parser.add_argument("-j", "--processes", type=int, default=8) parser.add_argument( @@ -141,24 +133,28 @@ def __run_for_catalog(): help="Download all branches from repo", ) parser.add_argument( - "-c", - "--cleanup", + "-d", + "--delete-missing", action="store_true", default=False, help="Remove unknown directories from the app cache", ) args = parser.parse_args() + if args.verbose: logging.getLogger().setLevel(logging.INFO) - APPS_CACHE_DIR.mkdir(exist_ok=True, parents=True) + cache_path = get_apps_repo.cache_path(args) + cache_path.mkdir(exist_ok=True, parents=True) + + if args.delete_missing: + apps_cache_cleanup(cache_path, get_catalog()) - if args.cleanup: - apps_cache_cleanup(get_catalog()) apps_cache_update_all( + cache_path, get_catalog(), parallel=args.processes, - ssh_clone=args.ssh, + url_ssh=args.ssh, all_branches=args.all_branches, ) diff --git a/tools/appslib/get_apps_repo.py b/tools/appslib/get_apps_repo.py new file mode 100644 index 0000000000..7579c0a60b --- /dev/null +++ b/tools/appslib/get_apps_repo.py @@ -0,0 +1,92 @@ +#!/usr/bin/env python3 + +import os +import argparse +import tempfile +import logging +from pathlib import Path +from typing import Optional +from git import Repo +from .utils import set_apps_path + + +DEFAULT_GIT_REPO = "https://github.com/YunoHost/apps" + +# This provides a reference to the tempfile, thus keeping it alive until sys.exit +APPS_REPO_TMPDIR: Optional[tempfile.TemporaryDirectory] = None + +# This is the actual value returned by from_args() +APPS_REPO_PATH: Optional[Path] = None + +APPS_CACHE_PATH: Optional[Path] = None + + +def add_args(parser: argparse.ArgumentParser, allow_temp: bool = True) -> None: + env_apps_dir_str = os.environ.get("YNH_APPS_DIR") + env_apps_dir = Path(env_apps_dir_str) if env_apps_dir_str is not None else None + + repo_group = parser.add_mutually_exclusive_group(required=False) + repo_group.add_argument( + "-l", + "--apps-dir", + type=Path, + default=env_apps_dir, + help="Path to a local 'apps' repository", + ) + if allow_temp: + repo_group.add_argument( + "-r", + "--apps-repo", + type=str, + default=DEFAULT_GIT_REPO, + help="Git url to clone the remote 'apps' repository", + ) + parser.add_argument( + "-c", + "--apps-cache", + type=Path, + help="Path to the apps cache directory (default=/.apps_cache)", + ) + + +def from_args(args: Optional[argparse.Namespace]) -> Path: + global APPS_REPO_PATH + global APPS_REPO_TMPDIR + + if APPS_REPO_PATH is not None: + return APPS_REPO_PATH + + assert args is not None + if args.apps_dir is not None: + APPS_REPO_PATH = args.apps_dir + elif args.apps_repo is not None: + APPS_REPO_TMPDIR = tempfile.TemporaryDirectory(prefix="yunohost_apps_") + APPS_REPO_PATH = Path(APPS_REPO_TMPDIR.name) + logging.info("Cloning the 'apps' repository...") + repo = Repo.clone_from(args.apps_repo, to_path=APPS_REPO_PATH) + assert repo.working_tree_dir is not None + else: + raise RuntimeError("You need to pass either --apps-repo or --apps-dir!") + + assert APPS_REPO_PATH is not None + set_apps_path(APPS_REPO_PATH) + return APPS_REPO_PATH + + +def cache_path(args: Optional[argparse.Namespace]) -> Path: + global APPS_CACHE_PATH + + if APPS_CACHE_PATH is not None: + return APPS_CACHE_PATH + + assert args is not None + if args.apps_cache is not None: + APPS_CACHE_PATH = args.apps_cache + else: + if APPS_REPO_PATH is None: + from_args(args) + assert APPS_REPO_PATH is not None + APPS_CACHE_PATH = APPS_REPO_PATH / ".apps_cache" + + assert APPS_CACHE_PATH is not None + return APPS_CACHE_PATH diff --git a/tools/appslib/utils.py b/tools/appslib/utils.py index 7353ce60b4..5f677d8e8f 100644 --- a/tools/appslib/utils.py +++ b/tools/appslib/utils.py @@ -1,9 +1,7 @@ #!/usr/bin/env python3 -import sys import subprocess -from typing import Any, TextIO, Generator, Optional, Union -import time +from typing import Any, Optional from functools import cache from pathlib import Path from git import Repo @@ -13,9 +11,9 @@ REPO_APPS_ROOT = Path(Repo(__file__, search_parent_directories=True).working_dir) -@cache -def apps_repo_root() -> Path: - return Path(__file__).parent.parent.parent +def set_apps_path(apps_path: Path) -> None: + global REPO_APPS_ROOT + REPO_APPS_ROOT = apps_path def git(cmd: list[str], cwd: Optional[Path] = None) -> str: @@ -33,13 +31,6 @@ def git(cmd: list[str], cwd: Optional[Path] = None) -> str: ) -def git_repo_age(path: Path) -> Union[bool, int]: - for file in [path / ".git" / "FETCH_HEAD", path / ".git" / "HEAD"]: - if file.exists(): - return int(time.time() - file.stat().st_mtime) - return False - - @cache def get_catalog(working_only: bool = False) -> dict[str, dict[str, Any]]: """Load the app catalog and filter out the non-working ones""" diff --git a/tools/autopatches/autopatch.py b/tools/autopatches/autopatch.py index 20190e38c2..c46273856d 100755 --- a/tools/autopatches/autopatch.py +++ b/tools/autopatches/autopatch.py @@ -1,5 +1,6 @@ #!/usr/bin/env python3 +import argparse import json import os import subprocess @@ -14,19 +15,20 @@ sys.path.insert(0, str(Path(__file__).parent.parent)) from appslib.utils import ( # noqa: E402 pylint: disable=import-error,wrong-import-position - REPO_APPS_ROOT, get_catalog, ) +TOOLS_DIR = Path(__file__).resolve().parent.parent + my_env = os.environ.copy() my_env["GIT_TERMINAL_PROMPT"] = "0" os.makedirs(".apps_cache", exist_ok=True) login = ( - (REPO_APPS_ROOT / "tools/.github_login").open("r", encoding="utf-8").read().strip() + (TOOLS_DIR / ".github_login").open("r", encoding="utf-8").read().strip() ) token = ( - (REPO_APPS_ROOT / "tools/.github_token").open("r", encoding="utf-8").read().strip() + (TOOLS_DIR / ".github_token").open("r", encoding="utf-8").read().strip() ) github_api = "https://api.github.com" @@ -193,36 +195,31 @@ def create_pull_request(repo, patch, base_branch, s): def main(): - action = sys.argv[1] - if action == "--help": - print( - """ - Example usage: - -# Init local git clone for all apps -./autopatch.py --build-cache - -# Apply patch in all local clones -./autopatch.py --apply explicit-php-version-in-deps - -# Inspect diff for all apps -./autopatch.py --diff + parser = argparse.ArgumentParser() + parser.add_argument("the_patch", type=str, nargs="?", help="The name of the patch to apply") + parser.add_argument("--cache", "-b", action="store_true", help="Init local git clone for all apps") + parser.add_argument("--apply", "-a", action="store_true", help="Apply patch on all local clones") + parser.add_argument("--diff", "-d", action="store_true", help="Inspect diff for all apps") + parser.add_argument("--push", "-p", action="store_true", help="Push and create pull requests on all apps with non-empty diff") + args = parser.parse_args() + + if not (args.cache or args.apply or args.diff or args.push): + parser.error("We required --cache, --apply, --diff or --push.") + + if args.cache: + build_cache() -# Push and create pull requests on all apps with non-empty diff -./autopatch.py --push explicit-php-version-in-deps -""" - ) + if args.apply: + if not args.the_patch: + parser.error("--apply requires the patch name to be passed") + apply(args.the_patch) - elif action == "--build-cache": - build_cache() - elif action == "--apply": - apply(sys.argv[2]) - elif action == "--diff": + if args.diff: diff() - elif action == "--push": - push(sys.argv[2]) - else: - print("Unknown action %s" % action) + if args.push: + if not args.the_patch: + parser.error("--push requires the patch name to be passed") + push(args.the_patch) main() diff --git a/tools/autoupdate_app_sources/autoupdate_app_sources.py b/tools/autoupdate_app_sources/autoupdate_app_sources.py index 1ce08cc4c3..40618860ac 100755 --- a/tools/autoupdate_app_sources/autoupdate_app_sources.py +++ b/tools/autoupdate_app_sources/autoupdate_app_sources.py @@ -27,15 +27,13 @@ DownloadPageAPI, RefType, ) # noqa: E402,E501 pylint: disable=import-error,wrong-import-position +import appslib.get_apps_repo as get_apps_repo import appslib.logging_sender # noqa: E402 pylint: disable=import-error,wrong-import-position from appslib.utils import ( - REPO_APPS_ROOT, get_catalog, ) # noqa: E402 pylint: disable=import-error,wrong-import-position -from app_caches import ( - app_cache_folder, -) # noqa: E402 pylint: disable=import-error,wrong-import-position +TOOLS_DIR = Path(__file__).resolve().parent.parent STRATEGIES = [ "latest_github_release", @@ -62,22 +60,13 @@ def get_github() -> tuple[ ]: try: github_login = ( - (REPO_APPS_ROOT / "tools" / ".github_login") - .open("r", encoding="utf-8") - .read() - .strip() + (TOOLS_DIR / ".github_login").open("r", encoding="utf-8").read().strip() ) github_token = ( - (REPO_APPS_ROOT / "tools" / ".github_token") - .open("r", encoding="utf-8") - .read() - .strip() + (TOOLS_DIR / ".github_token").open("r", encoding="utf-8").read().strip() ) github_email = ( - (REPO_APPS_ROOT / "tools" / ".github_email") - .open("r", encoding="utf-8") - .read() - .strip() + (TOOLS_DIR / ".github_email").open("r", encoding="utf-8").read().strip() ) auth = (github_login, github_token) @@ -89,7 +78,7 @@ def get_github() -> tuple[ return None, None, None -def apps_to_run_auto_update_for() -> list[str]: +def apps_to_run_auto_update_for(cache_path: Path) -> list[str]: apps_flagged_as_working_and_on_yunohost_apps_org = [ app for app, infos in get_catalog().items() @@ -100,7 +89,7 @@ def apps_to_run_auto_update_for() -> list[str]: relevant_apps = [] for app in apps_flagged_as_working_and_on_yunohost_apps_org: try: - manifest_toml = app_cache_folder(app) / "manifest.toml" + manifest_toml = cache_path / app / "manifest.toml" if manifest_toml.exists(): manifest = toml.load(manifest_toml.open("r", encoding="utf-8")) sources = manifest.get("resources", {}).get("sources", {}) @@ -746,6 +735,7 @@ def main() -> None: parser.add_argument( "-j", "--processes", type=int, default=multiprocessing.cpu_count() ) + get_apps_repo.add_args(parser) args = parser.parse_args() appslib.logging_sender.enable() @@ -757,11 +747,16 @@ def main() -> None: logging.error("--pr requires --commit") sys.exit(1) + get_apps_repo.from_args(args) + cache_path = get_apps_repo.cache_path(args) + # Handle apps or no apps - apps = list(args.apps) if args.apps else apps_to_run_auto_update_for() + apps = list(args.apps) if args.apps else apps_to_run_auto_update_for(cache_path) apps_already = {} # for which a PR already exists apps_updated = {} apps_failed = {} + print(apps) + exit() with multiprocessing.Pool(processes=args.processes) as pool: tasks = pool.imap( diff --git a/tools/catalog_linter.py b/tools/catalog_linter.py index b4438f9d8e..022fa231f4 100755 --- a/tools/catalog_linter.py +++ b/tools/catalog_linter.py @@ -1,5 +1,6 @@ #!/usr/bin/env python3 +import argparse import json import sys from pathlib import Path @@ -7,9 +8,9 @@ from typing import Any, Dict, Generator, List, Tuple import jsonschema +import appslib.get_apps_repo as get_apps_repo from appslib.utils import ( - REPO_APPS_ROOT, # pylint: disable=import-error - get_antifeatures, + get_antifeatures, # pylint: disable=import-error get_catalog, get_categories, get_graveyard, @@ -17,25 +18,24 @@ ) -def validate_schema(data: dict, schema_path: Path) -> Generator[str, None, None]: +def validate_schema(data: dict, schema_path: Path) -> List[str]: schema = json.load(schema_path.open("r", encoding="utf-8")) validator = jsonschema.Draft202012Validator(schema) - for error in validator.iter_errors(data): - yield f"at .{'.'.join(error.path)}: {error.message}" + return [ + f"at .{'.'.join(error.path)}: {error.message}" + for error in validator.iter_errors(data) + ] -def validate_schema_pretty(data: dict, name: str) -> bool: - schema_path = REPO_APPS_ROOT / "schemas" / f"{name}.schema.json" - has_errors = False +def validate_schema_pretty(apps_path: Path, data: dict, name: str) -> bool: + schema_path = apps_path / "schemas" / f"{name}.toml.schema.json" schema_errors = list(validate_schema(data, schema_path)) if schema_errors: - has_errors = True print(f"Error while validating {name} against schema:") - for error in schema_errors: - print(f" - {error}") - if schema_errors: + for error in schema_errors: + print(f" - {error}") print() - return has_errors + return bool(schema_errors) def check_app( @@ -90,29 +90,35 @@ def check_app( yield f"unknown subtag {category} / {subtag}", False -def check_all_apps() -> Generator[Tuple[str, List[Tuple[str, bool]]], None, None]: +def check_all_apps() -> bool: + has_errors = False for app, info in get_catalog().items(): errors = list(check_app(app, info)) if errors: - yield app, errors + print(f"{app}:") + for error, is_fatal in errors: + if is_fatal: + has_errors = True + level = "error" if is_fatal else "warning" + print(f" - {level}: {error}") + return has_errors def main() -> None: + parser = argparse.ArgumentParser() + get_apps_repo.add_args(parser) + args = parser.parse_args() + apps_path = get_apps_repo.from_args(args) + has_errors = False - has_errors |= validate_schema_pretty(get_antifeatures(), "antifeatures.toml") - has_errors |= validate_schema_pretty(get_catalog(), "apps.toml") - has_errors |= validate_schema_pretty(get_categories(), "categories.toml") - has_errors |= validate_schema_pretty(get_graveyard(), "graveyard.toml") - has_errors |= validate_schema_pretty(get_wishlist(), "wishlist.toml") + has_errors |= validate_schema_pretty(apps_path, get_antifeatures(), "antifeatures") + has_errors |= validate_schema_pretty(apps_path, get_catalog(), "apps") + has_errors |= validate_schema_pretty(apps_path, get_categories(), "categories") + has_errors |= validate_schema_pretty(apps_path, get_graveyard(), "graveyard") + has_errors |= validate_schema_pretty(apps_path, get_wishlist(), "wishlist") - for app, errors in check_all_apps(): - print(f"{app}:") - for error, is_fatal in errors: - if is_fatal: - has_errors = True - level = "error" if is_fatal else "warning" - print(f" - {level}: {error}") + has_errors |= check_all_apps() sys.exit(has_errors) diff --git a/tools/list_builder.py b/tools/list_builder.py index 536bf8883d..1c2692df7b 100755 --- a/tools/list_builder.py +++ b/tools/list_builder.py @@ -10,6 +10,7 @@ import time from collections import OrderedDict from functools import cache +from itertools import repeat from pathlib import Path from typing import Any, Optional @@ -19,14 +20,12 @@ from git import Repo import appslib.logging_sender # pylint: disable=import-error -from app_caches import app_cache_folder # pylint: disable=import-error -from app_caches import apps_cache_update_all # pylint: disable=import-error from appslib.utils import ( - REPO_APPS_ROOT, # pylint: disable=import-error - get_antifeatures, + get_antifeatures, # pylint: disable=import-error get_catalog, get_categories, ) +import appslib.get_apps_repo as get_apps_repo now = time.time() @@ -58,21 +57,24 @@ def antifeatures_list(): def __build_app_dict(data) -> Optional[tuple[str, dict[str, Any]]]: - name, info = data + (name, info), cache_path = data try: - return name, build_app_dict(name, info) + return name, build_app_dict(name, info, cache_path) except Exception as err: logging.error("[List builder] Error while updating %s: %s", name, err) return None -def build_base_catalog(nproc: int): +def build_base_catalog( + catalog: dict[str, dict[str, Any]], cache_path: Path, nproc: int +): result_dict = {} - catalog = get_catalog(working_only=True) with multiprocessing.Pool(processes=nproc) as pool: with logging_redirect_tqdm(): - tasks = pool.imap(__build_app_dict, catalog.items()) + tasks = pool.imap( + __build_app_dict, zip(catalog.items(), repeat(cache_path)) + ) for result in tqdm.tqdm(tasks, total=len(catalog.keys()), ascii=" ·#"): if result is not None: @@ -82,7 +84,7 @@ def build_base_catalog(nproc: int): return result_dict -def write_catalog_v3(base_catalog, target_dir: Path) -> None: +def write_catalog_v3(base_catalog, apps_path: Path, target_dir: Path) -> None: logos_dir = target_dir / "logos" logos_dir.mkdir(parents=True, exist_ok=True) @@ -95,7 +97,7 @@ def infos_for_v3(app_id: str, infos: Any) -> Any: del infos["manifest"]["resources"] app_id = app_id.lower() - logo_source = REPO_APPS_ROOT / "logos" / f"{app_id}.png" + logo_source = apps_path / "logos" / f"{app_id}.png" if logo_source.exists(): logo_hash = ( subprocess.check_output(["sha256sum", logo_source]) @@ -158,9 +160,9 @@ def infos_for_doc_catalog(infos): ) -def build_app_dict(app, infos): +def build_app_dict(app, infos, cache_path: Path): # Make sure we have some cache - this_app_cache = app_cache_folder(app) + this_app_cache = cache_path / app assert this_app_cache.exists(), f"No cache yet for {app}" repo = Repo(this_app_cache) @@ -225,12 +227,12 @@ def build_app_dict(app, infos): def main() -> None: parser = argparse.ArgumentParser() + get_apps_repo.add_args(parser) parser.add_argument( "target_dir", type=Path, nargs="?", - default=REPO_APPS_ROOT / "builds" / "default", - help="The directory to write the catalogs to", + help="The directory to write the catalogs to. Defaults to apps/builds/default", ) parser.add_argument( "-j", @@ -240,27 +242,23 @@ def main() -> None: metavar="N", help="Allow N threads to run in parallel", ) - parser.add_argument( - "-c", - "--update-cache", - action=argparse.BooleanOptionalAction, - default=True, - help="Update the apps cache", - ) args = parser.parse_args() appslib.logging_sender.enable() - if args.update_cache: - print("Updating the cache of all the apps directories...") - apps_cache_update_all(get_catalog(), parallel=args.jobs) + apps_dir = get_apps_repo.from_args(args) + cache_path = get_apps_repo.cache_path(args) + cache_path.mkdir(exist_ok=True, parents=True) + target_dir = args.target_dir or apps_dir / "builds" / "default" + + catalog = get_catalog(apps_dir) print("Retrieving all apps' information to build the catalog...") - catalog = build_base_catalog(args.jobs) + base_catalog = build_base_catalog(catalog, cache_path, args.jobs) - print(f"Writing the catalogs to {args.target_dir}...") - write_catalog_v3(catalog, args.target_dir / "v3") - write_catalog_doc(catalog, args.target_dir / "doc_catalog") + print(f"Writing the catalogs to {target_dir}...") + write_catalog_v3(base_catalog, apps_dir, target_dir / "v3") + write_catalog_doc(base_catalog, target_dir / "doc_catalog") print("Done!") diff --git a/tools/readme_generator/make_readme.py b/tools/readme_generator/make_readme.py index f608e02b91..764d7442c2 100755 --- a/tools/readme_generator/make_readme.py +++ b/tools/readme_generator/make_readme.py @@ -1,5 +1,6 @@ #! /usr/bin/env python3 +import sys import os import argparse import json @@ -14,8 +15,12 @@ from babel.messages.pofile import PoFileParser from langcodes import Language +# add apps/tools to sys.path +sys.path.insert(0, str(Path(__file__).parent.parent)) + +from appslib import get_apps_repo + README_GEN_DIR = Path(__file__).resolve().parent -APPS_REPO_ROOT = README_GEN_DIR.parent.parent TRANSLATIONS_DIR = README_GEN_DIR / "translations" @@ -31,7 +36,7 @@ def value_for_lang(values: Dict, lang: str): return list(values.values())[0] -def generate_READMEs(app_path: Path): +def generate_READMEs(app_path: Path, apps_repo_path: Path): if not app_path.exists(): raise Exception("App path provided doesn't exists ?!") @@ -42,11 +47,11 @@ def generate_READMEs(app_path: Path): upstream = manifest.get("upstream", {}) - catalog = toml.load((APPS_REPO_ROOT / "apps.toml").open(encoding="utf-8")) + catalog = toml.load((apps_repo_path / "apps.toml").open(encoding="utf-8")) from_catalog = catalog.get(manifest["id"], {}) antifeatures_list = toml.load( - (APPS_REPO_ROOT / "antifeatures.toml").open(encoding="utf-8") + (apps_repo_path / "antifeatures.toml").open(encoding="utf-8") ) if not upstream and not (app_path / "doc" / "DISCLAIMER.md").exists(): @@ -188,13 +193,16 @@ def generate_single_README(lang_suffix: str, lang: str): (app_path / "ALL_README.md").write_text(out) -if __name__ == "__main__": - parser = argparse.ArgumentParser( - description="Automatically (re)generate README for apps" - ) - parser.add_argument( - "app_path", type=Path, help="Path to the app to generate/update READMEs for" - ) - +def main(): + parser = argparse.ArgumentParser(description="Automatically (re)generate README for apps") + parser.add_argument("app_path", type=Path, help="Path to the app to generate/update READMEs for") + get_apps_repo.add_args(parser) args = parser.parse_args() - generate_READMEs(Path(args.app_path)) + + apps_path = get_apps_repo.from_args(args) + + generate_READMEs(args.app_path, apps_path) + + +if __name__ == "__main__": + main() diff --git a/tools/readme_generator/tests/test_make_readme.py b/tools/readme_generator/tests/test_make_readme.py index 893204b560..3c4d65ad2e 100755 --- a/tools/readme_generator/tests/test_make_readme.py +++ b/tools/readme_generator/tests/test_make_readme.py @@ -27,7 +27,12 @@ def test_running_make_readme(): # Now run test... subprocess.check_call( - [TEST_DIRECTORY.parent / "make_readme.py", temporary_tested_app_directory] + [ + TEST_DIRECTORY.parent / "make_readme.py", + "-l", + TEST_DIRECTORY.parent.parent.parent, + temporary_tested_app_directory, + ] ) assert ( diff --git a/tools/save_added_date.py b/tools/save_added_date.py index f31c8331e7..e3d48253a3 100755 --- a/tools/save_added_date.py +++ b/tools/save_added_date.py @@ -1,22 +1,17 @@ #!/usr/bin/env python3 +import argparse import tomlkit import json from datetime import datetime from git import Repo, Commit from pathlib import Path import logging -from typing import TYPE_CHECKING, Callable +from typing import Callable +import appslib.get_apps_repo as get_apps_repo -if TYPE_CHECKING: - REPO_APPS_ROOT = Path() -else: - from appslib.utils import REPO_APPS_ROOT - - -def git_bisect(repo_path: Path, is_newer: Callable[[Commit], bool]) -> Commit | None: - repo = Repo(repo_path) +def git_bisect(repo: Repo, is_newer: Callable[[Commit], bool]) -> Commit | None: # Start with whole repo first_commit = repo.git.rev_list("HEAD", reverse=True, max_parents=0) repo.git.bisect("reset") @@ -69,19 +64,19 @@ def app_is_deprecated(commit: Commit, name: str) -> bool: return "deprecated-software" in antifeatures -def date_added(name: str) -> int | None: - result = git_bisect(REPO_APPS_ROOT, lambda x: app_is_present(x, name)) +def date_added(repo: Repo, name: str) -> int | None: + result = git_bisect(repo, lambda x: app_is_present(x, name)) print(result) return None if result is None else result.committed_date -def date_deprecated(name: str) -> int | None: - result = git_bisect(REPO_APPS_ROOT, lambda x: app_is_deprecated(x, name)) +def date_deprecated(repo: Repo, name: str) -> int | None: + result = git_bisect(repo, lambda x: app_is_deprecated(x, name)) print(result) return None if result is None else result.committed_date -def add_deprecation_dates(file: Path) -> None: +def add_deprecation_dates(repo: Repo, file: Path) -> None: key = "deprecated_date" document = tomlkit.load(file.open("r", encoding="utf-8")) for app, info in document.items(): @@ -89,7 +84,7 @@ def add_deprecation_dates(file: Path) -> None: continue if "deprecated-software" not in info.get("antifeatures", []): continue - date = date_deprecated(app) + date = date_deprecated(repo, app) if date is None: continue info[key] = date @@ -98,21 +93,17 @@ def add_deprecation_dates(file: Path) -> None: tomlkit.dump(document, file.open("w")) -def date_added_to(match: str, file: Path) -> int | None: - commits = ( - Repo(REPO_APPS_ROOT) - .git.log( - "-S", - match, - "--first-parent", - "--reverse", - "--date=unix", - "--format=%cd", - "--", - str(file), - ) - .splitlines() - ) +def date_added_to(repo: Repo, match: str, file: Path) -> int | None: + commits = repo.git.log( + "-S", + match, + "--first-parent", + "--reverse", + "--date=unix", + "--format=%cd", + "--", + str(file), + ).splitlines() if not commits: return None @@ -120,12 +111,12 @@ def date_added_to(match: str, file: Path) -> int | None: return int(first_commit) -def add_apparition_dates(file: Path, key: str) -> None: +def add_apparition_dates(repo: Repo, file: Path, key: str) -> None: document = tomlkit.load(file.open("r", encoding="utf-8")) for app, info in document.items(): if key in info.keys(): continue - date = date_added_to(f"[{app}]", file) + date = date_added_to(repo, f"[{app}]", file) assert date is not None info[key] = date info[key].comment(datetime.fromtimestamp(info[key]).strftime("%Y/%m/%d")) @@ -134,14 +125,21 @@ def add_apparition_dates(file: Path, key: str) -> None: def main() -> None: + parser = argparse.ArgumentParser() + get_apps_repo.add_args(parser, allow_temp=False) + args = parser.parse_args() + logging.basicConfig(level=logging.DEBUG) - add_apparition_dates(REPO_APPS_ROOT / "apps.toml", key="added_date") - add_apparition_dates(REPO_APPS_ROOT / "wishlist.toml", key="added_date") - add_apparition_dates(REPO_APPS_ROOT / "graveyard.toml", key="killed_date") + apps_repo_dir = get_apps_repo.from_args(args) + apps_repo = Repo(apps_repo_dir) + + add_apparition_dates(apps_repo, apps_repo_dir / "apps.toml", key="added_date") + add_apparition_dates(apps_repo, apps_repo_dir / "wishlist.toml", key="added_date") + add_apparition_dates(apps_repo, apps_repo_dir / "graveyard.toml", key="killed_date") - add_deprecation_dates(REPO_APPS_ROOT / "apps.toml") - add_deprecation_dates(REPO_APPS_ROOT / "graveyard.toml") + add_deprecation_dates(apps_repo, apps_repo_dir / "apps.toml") + add_deprecation_dates(apps_repo, apps_repo_dir / "graveyard.toml") if __name__ == "__main__": diff --git a/tools/update_app_levels/update_app_levels.py b/tools/update_app_levels/update_app_levels.py index ef192f9712..f1a72c9751 100755 --- a/tools/update_app_levels/update_app_levels.py +++ b/tools/update_app_levels/update_app_levels.py @@ -3,6 +3,7 @@ Update app catalog: commit, and create a merge request """ +import sys import argparse import logging import tempfile @@ -17,12 +18,16 @@ import tomlkit.items from git import Repo +# add apps/tools to sys.path +sys.path.insert(0, str(Path(__file__).parent.parent)) + +from appslib import get_apps_repo + APPS_REPO = "YunoHost/apps" CI_RESULTS_URL = "https://ci-apps.yunohost.org/ci/api/results" -REPO_APPS_ROOT = Path(Repo(__file__, search_parent_directories=True).working_dir) -TOOLS_DIR = REPO_APPS_ROOT / "tools" +TOOLS_DIR = Path(__file__).resolve().parent.parent def github_token() -> Optional[str]: @@ -206,49 +211,49 @@ def main(): parser.add_argument("--commit", action=argparse.BooleanOptionalAction, default=True) parser.add_argument("--pr", action=argparse.BooleanOptionalAction, default=True) parser.add_argument("-v", "--verbose", action=argparse.BooleanOptionalAction) + get_apps_repo.add_args(parser) args = parser.parse_args() logging.getLogger().setLevel(logging.INFO) if args.verbose: logging.getLogger().setLevel(logging.DEBUG) - with tempfile.TemporaryDirectory(prefix="update_app_levels_") as tmpdir: - logging.info("Cloning the repository...") - apps_repo = Repo.clone_from(f"git@github.com:{APPS_REPO}", to_path=tmpdir) - assert apps_repo.working_tree_dir is not None - apps_toml_path = Path(apps_repo.working_tree_dir) / "apps.toml" + repo_path = get_apps_repo.from_args(args) - # Load the app catalog and filter out the non-working ones - catalog = tomlkit.load(apps_toml_path.open("r", encoding="utf-8")) + apps_repo = Repo(repo_path) + apps_toml_path = repo_path / "apps.toml" - new_branch = apps_repo.create_head("update_app_levels", apps_repo.refs.master) - apps_repo.head.reference = new_branch + # Load the app catalog and filter out the non-working ones + catalog = tomlkit.load(apps_toml_path.open("r", encoding="utf-8")) - logging.info("Retrieving the CI results...") - ci_results = get_ci_results() + new_branch = apps_repo.create_head("update_app_levels", apps_repo.refs.master) + apps_repo.head.reference = new_branch - # Now compute changes, then update the catalog - changes = list_changes(catalog, ci_results) - pr_body = pretty_changes(changes) - catalog = update_catalog(catalog, ci_results) + logging.info("Retrieving the CI results...") + ci_results = get_ci_results() - # Save the new catalog - updated_catalog = tomlkit.dumps(catalog) - updated_catalog = updated_catalog.replace(",]", " ]") - apps_toml_path.open("w", encoding="utf-8").write(updated_catalog) + # Now compute changes, then update the catalog + changes = list_changes(catalog, ci_results) + pr_body = pretty_changes(changes) + catalog = update_catalog(catalog, ci_results) - if args.commit: - logging.info("Committing and pushing the new catalog...") - apps_repo.index.add("apps.toml") - apps_repo.index.commit("Update app levels according to CI results") - apps_repo.git.push("--set-upstream", "origin", new_branch) + # Save the new catalog + updated_catalog = tomlkit.dumps(catalog) + updated_catalog = updated_catalog.replace(",]", " ]") + apps_toml_path.open("w", encoding="utf-8").write(updated_catalog) - if args.verbose: - print(pr_body) + if args.commit: + logging.info("Committing and pushing the new catalog...") + apps_repo.index.add("apps.toml") + apps_repo.index.commit("Update app levels according to CI results") + apps_repo.git.push("--set-upstream", "origin", new_branch) + + if args.verbose: + print(pr_body) - if args.pr: - logging.info("Opening a pull request...") - make_pull_request(pr_body) + if args.pr: + logging.info("Opening a pull request...") + make_pull_request(pr_body) if __name__ == "__main__":