diff --git a/conda_forge_tick/auto_tick.py b/conda_forge_tick/auto_tick.py index 79222dc97c..1b97af2d21 100644 --- a/conda_forge_tick/auto_tick.py +++ b/conda_forge_tick/auto_tick.py @@ -893,7 +893,7 @@ def _run_migrator( temp: list[AnyStr], time_per: float, git_backend: GitPlatformBackend, - package: str | None = None, + feedstock: str | None = None, ) -> int: """ Run a migrator. @@ -903,7 +903,8 @@ def _run_migrator( :param temp: The list of temporary files. :param time_per: The time limit of this migrator. :param git_backend: The GitPlatformBackend instance to use. - :param package: The package to update, if None, all packages are updated. + :param feedstock: The feedstock to update, if None, all feedstocks are updated. Does not contain the `-feedstock` + suffix. :return: The number of "good" PRs created by the migrator. """ @@ -928,14 +929,14 @@ def _run_migrator( possible_nodes = list(migrator.order(effective_graph, mctx.graph)) - if package: - if package not in possible_nodes: + if feedstock: + if feedstock not in possible_nodes: logger.info( - f"Package {package} is not a candidate for migration of {migrator_name}. " + f"Feedstock {feedstock} is not a candidate for migration of {migrator_name}. " f"If you want to investigate this, run the make-migrators command." ) return 0 - possible_nodes = [package] + possible_nodes = [feedstock] # version debugging info if isinstance(migrator, Version): @@ -1085,17 +1086,18 @@ def _setup_limits(): resource.setrlimit(resource.RLIMIT_AS, (limit_int, limit_int)) -def _update_nodes_with_bot_rerun(gx: nx.DiGraph, package: str | None = None): +def _update_nodes_with_bot_rerun(gx: nx.DiGraph, feedstock: str | None = None): """ Go through all the open PRs and check if they are rerun :param gx: the dependency graph - :param package: the package to update, if None, all packages are updated + :param feedstock: The feedstock to update. If None, all feedstocks are updated. Does not contain the `-feedstock` + suffix. """ print("processing bot-rerun labels", flush=True) - nodes = gx.nodes.items() if not package else [(package, gx.nodes[package])] + nodes = gx.nodes.items() if not feedstock else [(feedstock, gx.nodes[feedstock])] for i, (name, node) in enumerate(nodes): # logger.info( @@ -1154,21 +1156,24 @@ def _filter_ignored_versions(attrs, version): return version -def _update_nodes_with_new_versions(gx: nx.DiGraph, package: str | None = None): +def _update_nodes_with_new_versions(gx: nx.DiGraph, feedstock: str | None = None): """ Updates every node with its new version (when available) :param gx: the dependency graph - :param package: the package to update, if None, all packages are updated + :param feedstock: the feedstock to update, if None, all feedstocks are updated. Does not contain the `-feedstock` + suffix. """ print("updating nodes with new versions", flush=True) - if package and not does_key_exist_in_hashmap("versions", package): - logger.warning(f"Package {package} not found in versions hashmap") + if feedstock and not does_key_exist_in_hashmap("versions", feedstock): + logger.warning(f"Feedstock {feedstock} not found in versions hashmap") return - version_nodes = get_all_keys_for_hashmap("versions") if not package else [package] + version_nodes = ( + get_all_keys_for_hashmap("versions") if not feedstock else [feedstock] + ) for node in version_nodes: version_data = LazyJson(f"versions/{node}.json").data @@ -1194,27 +1199,32 @@ def _update_nodes_with_new_versions(gx: nx.DiGraph, package: str | None = None): vpri["new_version"] = version_from_data -def _remove_closed_pr_json(package: str | None = None): +def _remove_closed_pr_json(feedstock: str | None = None): """ Remove the pull request information for closed PRs. - :param package: The package to remove the PR information for. If None, all PR information is removed. If you pass - a package, closed pr_json files are not removed because this would require iterating all pr_json files. + :param feedstock: The feedstock to remove the PR information for. If None, all PR information is removed. If you pass + a feedstock, closed pr_json files are not removed because this would require iterating all pr_json files. Does not + contain the `-feedstock` suffix. """ print("collapsing closed PR json", flush=True) - if package: + if feedstock: pr_info_nodes = ( - [package] if does_key_exist_in_hashmap("pr_info", package) else [] + [feedstock] if does_key_exist_in_hashmap("pr_info", feedstock) else [] ) version_pr_info_nodes = ( - [package] if does_key_exist_in_hashmap("version_pr_info", package) else [] + [feedstock] + if does_key_exist_in_hashmap("version_pr_info", feedstock) + else [] ) if not pr_info_nodes: - logger.warning(f"Package {package} not found in pr_info hashmap") + logger.warning(f"Feedstock {feedstock} not found in pr_info hashmap") if not version_pr_info_nodes: - logger.warning(f"Package {package} not found in version_pr_info hashmap") + logger.warning( + f"Feedstock {feedstock} not found in version_pr_info hashmap" + ) else: pr_info_nodes = get_all_keys_for_hashmap("pr_info") version_pr_info_nodes = get_all_keys_for_hashmap("version_pr_info") @@ -1255,7 +1265,7 @@ def _remove_closed_pr_json(package: str | None = None): # at this point, any json blob referenced in the pr info is state != closed # so we can remove anything that is empty or closed - if package: + if feedstock: logger.info( "Since you requested a run for a specific package, we are not removing closed pr_json files." ) @@ -1270,22 +1280,32 @@ def _remove_closed_pr_json(package: str | None = None): ) -def _update_graph_with_pr_info(package: str | None = None): - _remove_closed_pr_json(package) +def _update_graph_with_pr_info(feedstock: str | None = None): + """ + :param feedstock: The feedstock to update the graph for. If None, all feedstocks are updated. Does not contain the + `-feedstock` suffix. + """ + _remove_closed_pr_json(feedstock) gx = load_existing_graph() - _update_nodes_with_bot_rerun(gx, package) - _update_nodes_with_new_versions(gx, package) + _update_nodes_with_bot_rerun(gx, feedstock) + _update_nodes_with_new_versions(gx, feedstock) dump_graph(gx) -def main(ctx: CliContext, package: str | None = None) -> None: +def main(ctx: CliContext, feedstock: str | None = None) -> None: + """ + Run the main bot logic. + + :param ctx: The CLI context. + :param feedstock: If not None, only the given feedstock is updated. Does not contain the `-feedstock` suffix. + """ global START_TIME START_TIME = time.time() _setup_limits() with fold_log_lines("updating graph with PR info"): - _update_graph_with_pr_info(package) + _update_graph_with_pr_info(feedstock) deploy(ctx, dirs_to_deploy=["version_pr_info", "pr_json", "pr_info"]) # record tmp dir so we can be sure to clean it later @@ -1339,7 +1359,7 @@ def main(ctx: CliContext, package: str | None = None) -> None: for mg_ind, migrator in enumerate(migrators): good_prs = _run_migrator( - migrator, mctx, temp, time_per_migrator[mg_ind], git_backend, package + migrator, mctx, temp, time_per_migrator[mg_ind], git_backend, feedstock ) if good_prs > 0: pass diff --git a/conda_forge_tick/cli.py b/conda_forge_tick/cli.py index 26273b945c..c7e3926f4b 100644 --- a/conda_forge_tick/cli.py +++ b/conda_forge_tick/cli.py @@ -131,44 +131,46 @@ def make_graph( @job_option @n_jobs_option @click.argument( - "package", + "feedstock", required=False, default=None, type=str, ) @pass_context def update_upstream_versions( - ctx: CliContext, job: int, n_jobs: int, package: Optional[str] + ctx: CliContext, job: int, n_jobs: int, feedstock: Optional[str] ) -> None: """ Update the upstream versions of feedstocks in the graph. - If PACKAGE is given, only update that package, otherwise update all packages. + If FEEDSTOCK is given, only update that feedstock, otherwise update all feedstocks. + The FEEDSTOCK argument should omit the `-feedstock` suffix. """ from . import update_upstream_versions check_job_param_relative(job, n_jobs) - update_upstream_versions.main(ctx, job=job, n_jobs=n_jobs, package=package) + update_upstream_versions.main(ctx, job=job, n_jobs=n_jobs, feedstock=feedstock) @main.command(name="auto-tick") @click.argument( - "package", + "feedstock", required=False, default=None, type=str, ) @pass_context -def auto_tick(ctx: CliContext, package: str | None) -> None: +def auto_tick(ctx: CliContext, feedstock: str | None) -> None: """ Run the main bot logic that runs all migrations, updates the graph accordingly, and opens the corresponding PRs. - If PACKAGE is given, only run the bot for that package, otherwise run the bot for all packages. + If FEEDSTOCK is given, only run the bot for that feedstock, otherwise run the bot for all feedstocks. + The FEEDSTOCK argument should omit the `-feedstock` suffix. """ from . import auto_tick - auto_tick.main(ctx, package=package) + auto_tick.main(ctx, feedstock=feedstock) @main.command(name="make-status-report") diff --git a/conda_forge_tick/update_upstream_versions.py b/conda_forge_tick/update_upstream_versions.py index 0e2052622b..e6c388cdee 100644 --- a/conda_forge_tick/update_upstream_versions.py +++ b/conda_forge_tick/update_upstream_versions.py @@ -436,7 +436,7 @@ def update_upstream_versions( debug: bool = False, job=1, n_jobs=1, - package: Optional[str] = None, + feedstock: Optional[str] = None, ) -> None: """ Update the upstream versions of packages. @@ -445,15 +445,15 @@ def update_upstream_versions( :param debug: Whether to run in debug mode :param job: The job number :param n_jobs: The total number of jobs - :param package: The package to update. If None, update all packages. + :param feedstock: The feedstock to update. If None, update all feedstocks. Does not contain the `-feedstock` suffix. """ - if package and package not in gx.nodes: - logger.error(f"Package {package} not found in graph. Exiting.") + if feedstock and feedstock not in gx.nodes: + logger.error(f"Feedstock {feedstock} not found in graph. Exiting.") return # In the future, we should have some sort of typed graph structure all_nodes: Iterable[Tuple[str, Mapping[str, Mapping]]] = ( - [(package, gx.nodes.get(package))] if package else gx.nodes.items() + [(feedstock, gx.nodes.get(feedstock))] if feedstock else gx.nodes.items() ) job_nodes = filter_nodes_for_job(all_nodes, job, n_jobs) @@ -481,7 +481,7 @@ def extract_payload(node: Tuple[str, Mapping[str, Mapping]]) -> Tuple[str, Mappi updater = ( _update_upstream_versions_sequential - if debug or package + if debug or feedstock else _update_upstream_versions_process_pool ) @@ -493,14 +493,14 @@ def main( ctx: CliContext, job: int = 1, n_jobs: int = 1, - package: Optional[str] = None, + feedstock: Optional[str] = None, ) -> None: """ Main function for updating the upstream versions of packages. :param ctx: The CLI context. :param job: The job number. :param n_jobs: The total number of jobs. - :param package: The package to update. If None, update all packages. + :param feedstock: The feedstock to update. If None, update all feedstocks. Does not contain the `-feedstock` suffix. """ logger.info("Reading graph") # Graph enabled for inspection @@ -514,5 +514,5 @@ def main( debug=ctx.debug, job=job, n_jobs=n_jobs, - package=package, + feedstock=feedstock, ) diff --git a/tests/test_upstream_versions.py b/tests/test_upstream_versions.py index 1bc587db69..a4002fd79f 100644 --- a/tests/test_upstream_versions.py +++ b/tests/test_upstream_versions.py @@ -1223,12 +1223,12 @@ def test_include_node_bad_pull_request_upstream(caplog): def test_update_upstream_versions_nonexistent_package(caplog): - package_name = "nonexistent-package" + feedstock_name = "nonexistent-package" caplog.set_level(logging.DEBUG) update_upstream_versions( nx.DiGraph(), - package=package_name, + feedstock=feedstock_name, ) assert "Package nonexistent-package not found in graph" in caplog.text @@ -1640,7 +1640,7 @@ def test_main( ctx = CliContext() ctx.debug = debug - main(ctx, job=3, n_jobs=10, package="testpackage") + main(ctx, job=3, n_jobs=10, feedstock="testpackage") makedirs_mock.assert_called_once_with("versions", exist_ok=True) load_graph_mock.assert_called_once()