Skip to content

Commit 1fc04dc

Browse files
committed
add single package support for auto_tick, clean up dry run option
1 parent d0a4214 commit 1fc04dc

File tree

3 files changed

+111
-19
lines changed

3 files changed

+111
-19
lines changed

conda_forge_tick/auto_tick.py

Lines changed: 89 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@
99
import traceback
1010
import typing
1111
from dataclasses import dataclass
12-
from typing import Literal, cast
12+
from typing import AnyStr, Literal, cast
1313
from urllib.error import URLError
1414
from uuid import uuid4
1515

@@ -41,6 +41,7 @@
4141
)
4242
from conda_forge_tick.lazy_json_backends import (
4343
LazyJson,
44+
does_key_exist_in_hashmap,
4445
get_all_keys_for_hashmap,
4546
lazy_json_transaction,
4647
remove_key_for_hashmap,
@@ -885,7 +886,26 @@ def _is_migrator_done(_mg_start, good_prs, time_per, pr_limit):
885886
return False
886887

887888

888-
def _run_migrator(migrator, mctx, temp, time_per, git_backend: GitPlatformBackend):
889+
def _run_migrator(
890+
migrator: Migrator,
891+
mctx: MigratorSessionContext,
892+
temp: list[AnyStr],
893+
time_per: float,
894+
git_backend: GitPlatformBackend,
895+
package: str | None = None,
896+
) -> int:
897+
"""
898+
Run a migrator.
899+
900+
:param migrator: The migrator to run.
901+
:param mctx: The migrator session context.
902+
:param temp: The list of temporary files.
903+
:param time_per: The time limit of this migrator.
904+
:param git_backend: The GitPlatformBackend instance to use.
905+
:param package: The package to update, if None, all packages are updated.
906+
907+
:return: The number of "good" PRs created by the migrator.
908+
"""
889909
_mg_start = time.time()
890910

891911
migrator_name = get_migrator_name(migrator)
@@ -907,6 +927,14 @@ def _run_migrator(migrator, mctx, temp, time_per, git_backend: GitPlatformBacken
907927

908928
possible_nodes = list(migrator.order(effective_graph, mctx.graph))
909929

930+
if package:
931+
if package not in possible_nodes:
932+
logger.warning(
933+
f"Package {package} is not a candidate for migration of {migrator_name}"
934+
)
935+
return 0
936+
possible_nodes = [package]
937+
910938
# version debugging info
911939
if isinstance(migrator, Version):
912940
print("possible version migrations:", flush=True)
@@ -1051,18 +1079,26 @@ def _setup_limits():
10511079
resource.setrlimit(resource.RLIMIT_AS, (limit_int, limit_int))
10521080

10531081

1054-
def _update_nodes_with_bot_rerun(gx: nx.DiGraph):
1055-
"""Go through all the open PRs and check if they are rerun"""
1082+
def _update_nodes_with_bot_rerun(gx: nx.DiGraph, package: str | None = None):
1083+
"""
1084+
Go through all the open PRs and check if they are rerun
1085+
1086+
:param gx: the dependency graph
1087+
:param package: the package to update, if None, all packages are updated
1088+
"""
10561089

10571090
print("processing bot-rerun labels", flush=True)
10581091

1059-
for i, (name, node) in enumerate(gx.nodes.items()):
1092+
nodes = gx.nodes.items() if not package else [(package, gx.nodes[package])]
1093+
1094+
for i, (name, node) in nodes:
10601095
# logger.info(
10611096
# f"node: {i} memory usage: "
10621097
# f"{psutil.Process().memory_info().rss // 1024 ** 2}MB",
10631098
# )
10641099
with node["payload"] as payload:
10651100
if payload.get("archived", False):
1101+
logger.debug(f"skipping archived package {name}")
10661102
continue
10671103
with payload["pr_info"] as pri, payload["version_pr_info"] as vpri:
10681104
# reset bad
@@ -1112,12 +1148,21 @@ def _filter_ignored_versions(attrs, version):
11121148
return version
11131149

11141150

1115-
def _update_nodes_with_new_versions(gx):
1116-
"""Updates every node with it's new version (when available)"""
1151+
def _update_nodes_with_new_versions(gx: nx.DiGraph, package: str | None = None):
1152+
"""
1153+
Updates every node with its new version (when available)
1154+
1155+
:param gx: the dependency graph
1156+
:param package: the package to update, if None, all packages are updated
1157+
"""
11171158

11181159
print("updating nodes with new versions", flush=True)
11191160

1120-
version_nodes = get_all_keys_for_hashmap("versions")
1161+
if package and not does_key_exist_in_hashmap("versions", package):
1162+
logger.warning(f"Package {package} not found in versions hashmap")
1163+
return
1164+
1165+
version_nodes = get_all_keys_for_hashmap("versions") if not package else [package]
11211166

11221167
for node in version_nodes:
11231168
version_data = LazyJson(f"versions/{node}.json").data
@@ -1143,13 +1188,35 @@ def _update_nodes_with_new_versions(gx):
11431188
vpri["new_version"] = version_from_data
11441189

11451190

1146-
def _remove_closed_pr_json():
1191+
def _remove_closed_pr_json(package: str | None = None):
1192+
"""
1193+
Remove the pull request information for closed PRs.
1194+
1195+
:param package: The package to remove the PR information for. If None, all PR information is removed. If you pass
1196+
a package, closed pr_json files are not removed because this would require iterating all pr_json files.
1197+
"""
11471198
print("collapsing closed PR json", flush=True)
11481199

1200+
if package:
1201+
pr_info_nodes = (
1202+
[package] if does_key_exist_in_hashmap("pr_info", package) else []
1203+
)
1204+
version_pr_info_nodes = (
1205+
[package] if does_key_exist_in_hashmap("version_pr_info", package) else []
1206+
)
1207+
1208+
if not pr_info_nodes:
1209+
logger.warning(f"Package {package} not found in pr_info hashmap")
1210+
if not version_pr_info_nodes:
1211+
logger.warning(f"Package {package} not found in version_pr_info hashmap")
1212+
else:
1213+
pr_info_nodes = get_all_keys_for_hashmap("pr_info")
1214+
version_pr_info_nodes = get_all_keys_for_hashmap("version_pr_info")
1215+
11491216
# first we go from nodes to pr json and update the pr info and remove the data
11501217
name_nodes = [
1151-
("pr_info", get_all_keys_for_hashmap("pr_info")),
1152-
("version_pr_info", get_all_keys_for_hashmap("version_pr_info")),
1218+
("pr_info", pr_info_nodes),
1219+
("version_pr_info", version_pr_info_nodes),
11531220
]
11541221
for name, nodes in name_nodes:
11551222
for node in nodes:
@@ -1182,6 +1249,11 @@ def _remove_closed_pr_json():
11821249

11831250
# at this point, any json blob referenced in the pr info is state != closed
11841251
# so we can remove anything that is empty or closed
1252+
if package:
1253+
logger.info(
1254+
"Since you requested a run for a specific package, we are not removing closed pr_json files."
1255+
)
1256+
return
11851257
nodes = get_all_keys_for_hashmap("pr_json")
11861258
for node in nodes:
11871259
pr = LazyJson(f"pr_json/{node}.json")
@@ -1192,22 +1264,22 @@ def _remove_closed_pr_json():
11921264
)
11931265

11941266

1195-
def _update_graph_with_pr_info():
1196-
_remove_closed_pr_json()
1267+
def _update_graph_with_pr_info(package: str | None = None):
1268+
_remove_closed_pr_json(package)
11971269
gx = load_existing_graph()
1198-
_update_nodes_with_bot_rerun(gx)
1199-
_update_nodes_with_new_versions(gx)
1270+
_update_nodes_with_bot_rerun(gx, package)
1271+
_update_nodes_with_new_versions(gx, package)
12001272
dump_graph(gx)
12011273

12021274

1203-
def main(ctx: CliContext) -> None:
1275+
def main(ctx: CliContext, package: str | None = None) -> None:
12041276
global START_TIME
12051277
START_TIME = time.time()
12061278

12071279
_setup_limits()
12081280

12091281
with fold_log_lines("updating graph with PR info"):
1210-
_update_graph_with_pr_info()
1282+
_update_graph_with_pr_info(package)
12111283
deploy(ctx, dirs_to_deploy=["version_pr_info", "pr_json", "pr_info"])
12121284

12131285
# record tmp dir so we can be sure to clean it later

conda_forge_tick/cli.py

Lines changed: 11 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -151,11 +151,20 @@ def update_upstream_versions(
151151

152152

153153
@main.command(name="auto-tick")
154+
@click.argument(
155+
"package",
156+
required=False,
157+
)
154158
@pass_context
155-
def auto_tick(ctx: CliContext) -> None:
159+
def auto_tick(ctx: CliContext, package: str | None) -> None:
160+
"""
161+
Run the main bot logic that runs all migrations, updates the graph accordingly, and opens the corresponding PRs.
162+
163+
If PACKAGE is given, only run the bot for that package, otherwise run the bot for all packages.
164+
"""
156165
from . import auto_tick
157166

158-
auto_tick.main(ctx)
167+
auto_tick.main(ctx, package=package)
159168

160169

161170
@main.command(name="make-status-report")

conda_forge_tick/lazy_json_backends.py

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -630,6 +630,17 @@ def get_all_keys_for_hashmap(name):
630630
return backend.hkeys(name)
631631

632632

633+
def does_key_exist_in_hashmap(name: str, key: str) -> bool:
634+
"""
635+
Check if a key exists in a hashmap, using the primary backend.
636+
:param name: The hashmap name.
637+
:param key: The key to check.
638+
:return: True if the key exists, False otherwise.
639+
"""
640+
backend = LAZY_JSON_BACKENDS[CF_TICK_GRAPH_DATA_PRIMARY_BACKEND]()
641+
return backend.hexists(name, name)
642+
643+
633644
@contextlib.contextmanager
634645
def lazy_json_transaction():
635646
try:

0 commit comments

Comments
 (0)