9
9
import traceback
10
10
import typing
11
11
from dataclasses import dataclass
12
- from typing import Literal , cast
12
+ from typing import AnyStr , Literal , cast
13
13
from urllib .error import URLError
14
14
from uuid import uuid4
15
15
41
41
)
42
42
from conda_forge_tick .lazy_json_backends import (
43
43
LazyJson ,
44
+ does_key_exist_in_hashmap ,
44
45
get_all_keys_for_hashmap ,
45
46
lazy_json_transaction ,
46
47
remove_key_for_hashmap ,
@@ -885,7 +886,26 @@ def _is_migrator_done(_mg_start, good_prs, time_per, pr_limit):
885
886
return False
886
887
887
888
888
- def _run_migrator (migrator , mctx , temp , time_per , git_backend : GitPlatformBackend ):
889
+ def _run_migrator (
890
+ migrator : Migrator ,
891
+ mctx : MigratorSessionContext ,
892
+ temp : list [AnyStr ],
893
+ time_per : float ,
894
+ git_backend : GitPlatformBackend ,
895
+ package : str | None = None ,
896
+ ) -> int :
897
+ """
898
+ Run a migrator.
899
+
900
+ :param migrator: The migrator to run.
901
+ :param mctx: The migrator session context.
902
+ :param temp: The list of temporary files.
903
+ :param time_per: The time limit of this migrator.
904
+ :param git_backend: The GitPlatformBackend instance to use.
905
+ :param package: The package to update, if None, all packages are updated.
906
+
907
+ :return: The number of "good" PRs created by the migrator.
908
+ """
889
909
_mg_start = time .time ()
890
910
891
911
migrator_name = get_migrator_name (migrator )
@@ -907,6 +927,14 @@ def _run_migrator(migrator, mctx, temp, time_per, git_backend: GitPlatformBacken
907
927
908
928
possible_nodes = list (migrator .order (effective_graph , mctx .graph ))
909
929
930
+ if package :
931
+ if package not in possible_nodes :
932
+ logger .warning (
933
+ f"Package { package } is not a candidate for migration of { migrator_name } "
934
+ )
935
+ return 0
936
+ possible_nodes = [package ]
937
+
910
938
# version debugging info
911
939
if isinstance (migrator , Version ):
912
940
print ("possible version migrations:" , flush = True )
@@ -1051,18 +1079,26 @@ def _setup_limits():
1051
1079
resource .setrlimit (resource .RLIMIT_AS , (limit_int , limit_int ))
1052
1080
1053
1081
1054
- def _update_nodes_with_bot_rerun (gx : nx .DiGraph ):
1055
- """Go through all the open PRs and check if they are rerun"""
1082
+ def _update_nodes_with_bot_rerun (gx : nx .DiGraph , package : str | None = None ):
1083
+ """
1084
+ Go through all the open PRs and check if they are rerun
1085
+
1086
+ :param gx: the dependency graph
1087
+ :param package: the package to update, if None, all packages are updated
1088
+ """
1056
1089
1057
1090
print ("processing bot-rerun labels" , flush = True )
1058
1091
1059
- for i , (name , node ) in enumerate (gx .nodes .items ()):
1092
+ nodes = gx .nodes .items () if not package else [(package , gx .nodes [package ])]
1093
+
1094
+ for i , (name , node ) in nodes :
1060
1095
# logger.info(
1061
1096
# f"node: {i} memory usage: "
1062
1097
# f"{psutil.Process().memory_info().rss // 1024 ** 2}MB",
1063
1098
# )
1064
1099
with node ["payload" ] as payload :
1065
1100
if payload .get ("archived" , False ):
1101
+ logger .debug (f"skipping archived package { name } " )
1066
1102
continue
1067
1103
with payload ["pr_info" ] as pri , payload ["version_pr_info" ] as vpri :
1068
1104
# reset bad
@@ -1112,12 +1148,21 @@ def _filter_ignored_versions(attrs, version):
1112
1148
return version
1113
1149
1114
1150
1115
- def _update_nodes_with_new_versions (gx ):
1116
- """Updates every node with it's new version (when available)"""
1151
+ def _update_nodes_with_new_versions (gx : nx .DiGraph , package : str | None = None ):
1152
+ """
1153
+ Updates every node with its new version (when available)
1154
+
1155
+ :param gx: the dependency graph
1156
+ :param package: the package to update, if None, all packages are updated
1157
+ """
1117
1158
1118
1159
print ("updating nodes with new versions" , flush = True )
1119
1160
1120
- version_nodes = get_all_keys_for_hashmap ("versions" )
1161
+ if package and not does_key_exist_in_hashmap ("versions" , package ):
1162
+ logger .warning (f"Package { package } not found in versions hashmap" )
1163
+ return
1164
+
1165
+ version_nodes = get_all_keys_for_hashmap ("versions" ) if not package else [package ]
1121
1166
1122
1167
for node in version_nodes :
1123
1168
version_data = LazyJson (f"versions/{ node } .json" ).data
@@ -1143,13 +1188,35 @@ def _update_nodes_with_new_versions(gx):
1143
1188
vpri ["new_version" ] = version_from_data
1144
1189
1145
1190
1146
- def _remove_closed_pr_json ():
1191
+ def _remove_closed_pr_json (package : str | None = None ):
1192
+ """
1193
+ Remove the pull request information for closed PRs.
1194
+
1195
+ :param package: The package to remove the PR information for. If None, all PR information is removed. If you pass
1196
+ a package, closed pr_json files are not removed because this would require iterating all pr_json files.
1197
+ """
1147
1198
print ("collapsing closed PR json" , flush = True )
1148
1199
1200
+ if package :
1201
+ pr_info_nodes = (
1202
+ [package ] if does_key_exist_in_hashmap ("pr_info" , package ) else []
1203
+ )
1204
+ version_pr_info_nodes = (
1205
+ [package ] if does_key_exist_in_hashmap ("version_pr_info" , package ) else []
1206
+ )
1207
+
1208
+ if not pr_info_nodes :
1209
+ logger .warning (f"Package { package } not found in pr_info hashmap" )
1210
+ if not version_pr_info_nodes :
1211
+ logger .warning (f"Package { package } not found in version_pr_info hashmap" )
1212
+ else :
1213
+ pr_info_nodes = get_all_keys_for_hashmap ("pr_info" )
1214
+ version_pr_info_nodes = get_all_keys_for_hashmap ("version_pr_info" )
1215
+
1149
1216
# first we go from nodes to pr json and update the pr info and remove the data
1150
1217
name_nodes = [
1151
- ("pr_info" , get_all_keys_for_hashmap ( "pr_info" ) ),
1152
- ("version_pr_info" , get_all_keys_for_hashmap ( "version_pr_info" ) ),
1218
+ ("pr_info" , pr_info_nodes ),
1219
+ ("version_pr_info" , version_pr_info_nodes ),
1153
1220
]
1154
1221
for name , nodes in name_nodes :
1155
1222
for node in nodes :
@@ -1182,6 +1249,11 @@ def _remove_closed_pr_json():
1182
1249
1183
1250
# at this point, any json blob referenced in the pr info is state != closed
1184
1251
# so we can remove anything that is empty or closed
1252
+ if package :
1253
+ logger .info (
1254
+ "Since you requested a run for a specific package, we are not removing closed pr_json files."
1255
+ )
1256
+ return
1185
1257
nodes = get_all_keys_for_hashmap ("pr_json" )
1186
1258
for node in nodes :
1187
1259
pr = LazyJson (f"pr_json/{ node } .json" )
@@ -1192,22 +1264,22 @@ def _remove_closed_pr_json():
1192
1264
)
1193
1265
1194
1266
1195
- def _update_graph_with_pr_info ():
1196
- _remove_closed_pr_json ()
1267
+ def _update_graph_with_pr_info (package : str | None = None ):
1268
+ _remove_closed_pr_json (package )
1197
1269
gx = load_existing_graph ()
1198
- _update_nodes_with_bot_rerun (gx )
1199
- _update_nodes_with_new_versions (gx )
1270
+ _update_nodes_with_bot_rerun (gx , package )
1271
+ _update_nodes_with_new_versions (gx , package )
1200
1272
dump_graph (gx )
1201
1273
1202
1274
1203
- def main (ctx : CliContext ) -> None :
1275
+ def main (ctx : CliContext , package : str | None = None ) -> None :
1204
1276
global START_TIME
1205
1277
START_TIME = time .time ()
1206
1278
1207
1279
_setup_limits ()
1208
1280
1209
1281
with fold_log_lines ("updating graph with PR info" ):
1210
- _update_graph_with_pr_info ()
1282
+ _update_graph_with_pr_info (package )
1211
1283
deploy (ctx , dirs_to_deploy = ["version_pr_info" , "pr_json" , "pr_info" ])
1212
1284
1213
1285
# record tmp dir so we can be sure to clean it later
0 commit comments