Skip to content

Commit 644c7c7

Browse files
committed
Support local running of "conda-forge-tick --dry-run auto-tick"
1 parent 96385b6 commit 644c7c7

File tree

4 files changed

+70
-24
lines changed

4 files changed

+70
-24
lines changed

README.md

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -281,9 +281,10 @@ pip install -e .
281281
Then you can use the CLI like this:
282282

283283
```bash
284-
conda-forge-tick --help
284+
conda-forge-tick --dry-run ...
285285
```
286286

287+
See `--help` for a list of the commands available.
287288
For debugging, use the `--debug` flag. This enables debug logging and disables multiprocessing.
288289

289290
Note that the bot expects the [conda-forge dependency graph](https://github.com/regro/cf-graph-countyfair) to be
@@ -298,6 +299,7 @@ The local debugging functionality is still work in progress and might not work f
298299
Currently, the following commands are supported and tested:
299300

300301
- `update-upstream-versions`
302+
- `auto-tick`: To ease debugging, pass `--filter-pattern=` to give a regex of which migrators to enable.
301303

302304
### Structure of the Bot's Jobs
303305

conda_forge_tick/auto_tick.py

Lines changed: 38 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -436,6 +436,7 @@ def run_with_tmpdir(
436436
git_backend: GitPlatformBackend,
437437
rerender: bool = True,
438438
base_branch: str = "main",
439+
dry_run: bool = False,
439440
**kwargs: typing.Any,
440441
) -> tuple[MigrationUidTypedDict, dict] | tuple[Literal[False], Literal[False]]:
441442
"""
@@ -454,19 +455,20 @@ def run_with_tmpdir(
454455
git_backend=git_backend,
455456
rerender=rerender,
456457
base_branch=base_branch,
458+
dry_run=dry_run,
457459
**kwargs,
458460
)
459461

460462

461-
def _make_and_sync_pr_lazy_json(pr_data) -> LazyJson:
463+
def _make_and_sync_pr_lazy_json(pr_data, dry_run) -> LazyJson:
462464
if pr_data:
463465
pr_lazy_json = LazyJson(
464466
os.path.join("pr_json", f"{pr_data.id}.json"),
465467
)
466468
with pr_lazy_json as __edit_pr_lazy_json:
467469
__edit_pr_lazy_json.update(**pr_data.model_dump(mode="json"))
468470

469-
if "id" in pr_lazy_json:
471+
if "id" in pr_lazy_json and not dry_run:
470472
sync_lazy_json_object(pr_lazy_json, "file", ["github_api"])
471473

472474
else:
@@ -481,6 +483,7 @@ def run(
481483
git_backend: GitPlatformBackend,
482484
rerender: bool = True,
483485
base_branch: str = "main",
486+
dry_run: bool = False,
484487
**kwargs: typing.Any,
485488
) -> tuple[MigrationUidTypedDict, dict] | tuple[Literal[False], Literal[False]]:
486489
"""For a given feedstock and migration run the migration
@@ -557,7 +560,7 @@ def run(
557560

558561
# spoof this so it looks like the package is done
559562
pr_data = get_spoofed_closed_pr_info()
560-
pr_lazy_json = _make_and_sync_pr_lazy_json(pr_data)
563+
pr_lazy_json = _make_and_sync_pr_lazy_json(pr_data, dry_run)
561564
_reset_pre_pr_migrator_fields(
562565
context.attrs, migrator_name, is_version=is_version_migration
563566
)
@@ -652,7 +655,7 @@ def run(
652655
comment=rerender_info.rerender_comment,
653656
)
654657

655-
pr_lazy_json = _make_and_sync_pr_lazy_json(pr_data)
658+
pr_lazy_json = _make_and_sync_pr_lazy_json(pr_data, dry_run)
656659

657660
# If we've gotten this far then the node is good
658661
with context.attrs["pr_info"] as pri:
@@ -731,6 +734,7 @@ def _run_migrator_on_feedstock_branch(
731734
mctx,
732735
migrator_name,
733736
good_prs,
737+
dry_run,
734738
):
735739
break_loop = False
736740
sync_pr_info = False
@@ -748,6 +752,7 @@ def _run_migrator_on_feedstock_branch(
748752
rerender=migrator.rerender,
749753
base_branch=base_branch,
750754
hash_type=attrs.get("hash_type", "sha256"),
755+
dry_run=dry_run,
751756
)
752757
finally:
753758
fctx.attrs.pop("new_version", None)
@@ -901,19 +906,22 @@ def _run_migrator_on_feedstock_branch(
901906
if sync_pr_info:
902907
with attrs["pr_info"] as pri:
903908
pass
904-
sync_lazy_json_object(pri, "file", ["github_api"])
909+
if not dry_run:
910+
sync_lazy_json_object(pri, "file", ["github_api"])
905911

906912
if sync_version_pr_info:
907913
with attrs["version_pr_info"] as vpri:
908914
pass
909-
sync_lazy_json_object(vpri, "file", ["github_api"])
915+
if not dry_run:
916+
sync_lazy_json_object(vpri, "file", ["github_api"])
910917

911918
return good_prs, break_loop
912919

913920

914-
def _is_migrator_done(_mg_start, good_prs, time_per, pr_limit, tried_prs):
921+
def _is_migrator_done(
922+
_mg_start, good_prs, time_per, pr_limit, tried_prs, backend: GitPlatformBackend
923+
):
915924
curr_time = time.time()
916-
backend = github_backend()
917925
api_req = backend.get_api_requests_left()
918926

919927
if curr_time - START_TIME > TIMEOUT:
@@ -957,7 +965,9 @@ def _is_migrator_done(_mg_start, good_prs, time_per, pr_limit, tried_prs):
957965
return False
958966

959967

960-
def _run_migrator(migrator, mctx, temp, time_per, git_backend: GitPlatformBackend):
968+
def _run_migrator(
969+
migrator, mctx, temp, time_per, git_backend: GitPlatformBackend, dry_run
970+
):
961971
_mg_start = time.time()
962972

963973
migrator_name = get_migrator_name(migrator)
@@ -1013,7 +1023,7 @@ def _run_migrator(migrator, mctx, temp, time_per, git_backend: GitPlatformBacken
10131023
)
10141024

10151025
if _is_migrator_done(
1016-
_mg_start, good_prs, time_per, migrator.pr_limit, tried_prs
1026+
_mg_start, good_prs, time_per, migrator.pr_limit, tried_prs, git_backend
10171027
):
10181028
return 0
10191029

@@ -1032,7 +1042,7 @@ def _run_migrator(migrator, mctx, temp, time_per, git_backend: GitPlatformBacken
10321042
# Don't let CI timeout, break ahead of the timeout so we make certain
10331043
# to write to the repo
10341044
if _is_migrator_done(
1035-
_mg_start, good_prs, time_per, migrator.pr_limit, tried_prs
1045+
_mg_start, good_prs, time_per, migrator.pr_limit, tried_prs, git_backend
10361046
):
10371047
break
10381048

@@ -1089,6 +1099,7 @@ def _run_migrator(migrator, mctx, temp, time_per, git_backend: GitPlatformBacken
10891099
mctx=mctx,
10901100
migrator_name=migrator_name,
10911101
good_prs=good_prs,
1102+
dry_run=dry_run,
10921103
)
10931104
if break_loop:
10941105
break
@@ -1278,15 +1289,16 @@ def _update_graph_with_pr_info():
12781289
dump_graph(gx)
12791290

12801291

1281-
def main(ctx: CliContext) -> None:
1292+
def main(ctx: CliContext, no_update_graph: bool, filter_pattern: str | None) -> None:
12821293
global START_TIME
12831294
START_TIME = time.time()
12841295

12851296
_setup_limits()
12861297

1287-
with fold_log_lines("updating graph with PR info"):
1288-
_update_graph_with_pr_info()
1289-
deploy(ctx, dirs_to_deploy=["version_pr_info", "pr_json", "pr_info"])
1298+
if not no_update_graph:
1299+
with fold_log_lines("updating graph with PR info"):
1300+
_update_graph_with_pr_info()
1301+
deploy(ctx, dirs_to_deploy=["version_pr_info", "pr_json", "pr_info"])
12901302

12911303
# record tmp dir so we can be sure to clean it later
12921304
temp = glob.glob("/tmp/*")
@@ -1305,7 +1317,7 @@ def main(ctx: CliContext) -> None:
13051317
smithy_version=smithy_version,
13061318
pinning_version=pinning_version,
13071319
)
1308-
migrators = load_migrators()
1320+
migrators = load_migrators(pattern=filter_pattern)
13091321

13101322
# compute the time per migrator
13111323
with fold_log_lines("computing migrator run times"):
@@ -1339,7 +1351,15 @@ def main(ctx: CliContext) -> None:
13391351
git_backend = github_backend() if not ctx.dry_run else DryRunBackend()
13401352

13411353
for mg_ind, migrator in enumerate(migrators):
1342-
_run_migrator(migrator, mctx, temp, time_per_migrator[mg_ind], git_backend)
1354+
_run_migrator(
1355+
migrator,
1356+
mctx,
1357+
temp,
1358+
time_per_migrator[mg_ind],
1359+
git_backend,
1360+
dry_run=ctx.dry_run,
1361+
)
13431362

1344-
logger.info("API Calls Remaining: %d", github_backend().get_api_requests_left())
1363+
if not ctx.dry_run:
1364+
logger.info("API Calls Remaining: %d", git_backend.get_api_requests_left())
13451365
logger.info("Done")

conda_forge_tick/cli.py

Lines changed: 10 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -160,11 +160,19 @@ def update_upstream_versions(
160160

161161

162162
@main.command(name="auto-tick")
163+
@click.option(
164+
"--no-update-graph", is_flag=True, help="Don't update the graph with PR info"
165+
)
166+
@click.option(
167+
"--filter-pattern", default=None, help="Only run migrators matching this pattern"
168+
)
163169
@pass_context
164-
def auto_tick(ctx: CliContext) -> None:
170+
def auto_tick(
171+
ctx: CliContext, no_update_graph: bool, filter_pattern=Optional[str]
172+
) -> None:
165173
from . import auto_tick
166174

167-
auto_tick.main(ctx)
175+
auto_tick.main(ctx, no_update_graph, filter_pattern)
168176

169177

170178
@main.command(name="make-status-report")

conda_forge_tick/make_migrators.py

Lines changed: 19 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -872,13 +872,17 @@ def _load(name):
872872
return make_from_lazy_json_data(lzj.data)
873873

874874

875-
def load_migrators(skip_paused: bool = True) -> MutableSequence[Migrator]:
875+
def load_migrators(
876+
skip_paused: bool = True, pattern: str | None = None
877+
) -> MutableSequence[Migrator]:
876878
"""Loads all current migrators.
877879
878880
Parameters
879881
----------
880882
skip_paused : bool, optional
881883
Whether to skip paused migrators, defaults to True.
884+
pattern : str, optional
885+
A regular expression pattern to filter migrators, defaults to None.
882886
883887
Returns
884888
-------
@@ -890,6 +894,15 @@ def load_migrators(skip_paused: bool = True) -> MutableSequence[Migrator]:
890894
pinning_migrators = []
891895
longterm_migrators = []
892896
all_names = get_all_keys_for_hashmap("migrators")
897+
if pattern is not None:
898+
original_all_names = all_names
899+
all_names = [n for n in all_names if re.fullmatch(pattern, n)]
900+
if not all_names:
901+
raise ValueError(
902+
f"No migrators found matching pattern {pattern}. "
903+
f"Available migrators: {original_all_names}"
904+
)
905+
print(f"Reduced migrators from {len(original_all_names)} to {len(all_names)}")
893906
with executor("process", 4) as pool:
894907
futs = [pool.submit(_load, name) for name in all_names]
895908

@@ -914,11 +927,14 @@ def load_migrators(skip_paused: bool = True) -> MutableSequence[Migrator]:
914927
migrators.append(migrator)
915928

916929
if version_migrator is None:
917-
raise RuntimeError("No version migrator found in the migrators directory!")
930+
if pattern is None:
931+
raise RuntimeError("No version migrator found in the migrators directory!")
932+
else:
933+
migrators.insert(0, version_migrator)
918934

919935
RNG.shuffle(pinning_migrators)
920936
RNG.shuffle(longterm_migrators)
921-
migrators = [version_migrator] + migrators + pinning_migrators + longterm_migrators
937+
migrators += pinning_migrators + longterm_migrators
922938

923939
return migrators
924940

0 commit comments

Comments
 (0)