Skip to content

Commit 47cbbbf

Browse files
authored
Merge pull request regro#3923 from ytausch/remove-side-effects
fix: remove side effects from feedstock loading
2 parents b47ef2d + 76e7aec commit 47cbbbf

File tree

7 files changed

+129
-57
lines changed

7 files changed

+129
-57
lines changed

conda_forge_tick/container_cli.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -320,7 +320,7 @@ def _parse_feedstock(
320320

321321
name = attrs["feedstock_name"]
322322

323-
load_feedstock_local(
323+
node_attrs = load_feedstock_local(
324324
name,
325325
attrs,
326326
meta_yaml=meta_yaml,
@@ -329,7 +329,7 @@ def _parse_feedstock(
329329
mark_not_archived=mark_not_archived,
330330
)
331331

332-
return attrs
332+
return node_attrs
333333

334334

335335
def _parse_meta_yaml(

conda_forge_tick/feedstock_parser.py

Lines changed: 61 additions & 47 deletions
Original file line numberDiff line numberDiff line change
@@ -234,17 +234,30 @@ def _clean_req_nones(reqs):
234234

235235
def populate_feedstock_attributes(
236236
name: str,
237-
sub_graph: typing.MutableMapping,
237+
existing_node_attrs: typing.MutableMapping[str, typing.Any],
238238
meta_yaml: str | None = None,
239239
recipe_yaml: str | None = None,
240240
conda_forge_yaml: str | None = None,
241241
mark_not_archived: bool = False,
242242
feedstock_dir: str | Path | None = None,
243-
) -> typing.MutableMapping:
244-
"""Parse the various configuration information into something usable"""
243+
) -> dict[str, typing.Any]:
244+
"""
245+
Parse the various configuration information into the node_attrs of a feedstock.
246+
247+
:param name: The name of the feedstock
248+
:param existing_node_attrs: The existing node_attrs of the feedstock. Pass an empty dict if none.
249+
:param meta_yaml: The meta.yaml file as a string
250+
:param recipe_yaml: The recipe.yaml file as a string
251+
:param conda_forge_yaml: The conda-forge.yaml file as a string
252+
:param mark_not_archived: If True, forcibly mark the feedstock as not archived in the node attrs, even if it is archived.
253+
:param feedstock_dir: The directory where the feedstock is located. If None, some information will not be available.
245254
255+
:return: A dictionary with the new node_attrs of the feedstock, with only some fields populated.
256+
"""
246257
from conda_forge_tick.chaindb import ChainDB, _convert_to_dict
247258

259+
node_attrs = {key: value for key, value in existing_node_attrs.items()}
260+
248261
if isinstance(feedstock_dir, str):
249262
feedstock_dir = Path(feedstock_dir)
250263

@@ -253,35 +266,37 @@ def populate_feedstock_attributes(
253266
):
254267
raise ValueError("Either `meta_yaml` or `recipe_yaml` needs to be given.")
255268

256-
sub_graph.update({"feedstock_name": name, "parsing_error": False, "branch": "main"})
269+
node_attrs.update(
270+
{"feedstock_name": name, "parsing_error": False, "branch": "main"}
271+
)
257272

258273
if mark_not_archived:
259-
sub_graph.update({"archived": False})
274+
node_attrs.update({"archived": False})
260275

261276
# strip out old keys - this removes old platforms when one gets disabled
262-
for key in list(sub_graph.keys()):
277+
for key in list(node_attrs.keys()):
263278
if key.endswith("meta_yaml") or key.endswith("requirements") or key == "req":
264-
del sub_graph[key]
279+
del node_attrs[key]
265280

266281
if isinstance(meta_yaml, str):
267-
sub_graph["raw_meta_yaml"] = meta_yaml
282+
node_attrs["raw_meta_yaml"] = meta_yaml
268283
elif isinstance(recipe_yaml, str):
269-
sub_graph["raw_meta_yaml"] = recipe_yaml
284+
node_attrs["raw_meta_yaml"] = recipe_yaml
270285

271286
# Get the conda-forge.yml
272287
if isinstance(conda_forge_yaml, str):
273288
try:
274-
sub_graph["conda-forge.yml"] = {
289+
node_attrs["conda-forge.yml"] = {
275290
k: v for k, v in yaml.safe_load(conda_forge_yaml).items()
276291
}
277292
except Exception as e:
278293
import traceback
279294

280295
trb = traceback.format_exc()
281-
sub_graph["parsing_error"] = sanitize_string(
296+
node_attrs["parsing_error"] = sanitize_string(
282297
f"feedstock parsing error: cannot load conda-forge.yml: {e}\n{trb}"
283298
)
284-
return sub_graph
299+
return node_attrs
285300

286301
if feedstock_dir is not None:
287302
logger.debug(
@@ -385,7 +400,7 @@ def populate_feedstock_attributes(
385400
else:
386401
logger.debug("doing generic parsing")
387402
plat_archs = [("win", "64"), ("osx", "64"), ("linux", "64")]
388-
for k in set(sub_graph["conda-forge.yml"].get("provider", {})):
403+
for k in set(node_attrs["conda-forge.yml"].get("provider", {})):
389404
if "_" in k:
390405
plat_archs.append(tuple(k.split("_")))
391406
if isinstance(meta_yaml, str):
@@ -401,46 +416,46 @@ def populate_feedstock_attributes(
401416
import traceback
402417

403418
trb = traceback.format_exc()
404-
sub_graph["parsing_error"] = sanitize_string(
419+
node_attrs["parsing_error"] = sanitize_string(
405420
f"feedstock parsing error: cannot rendering recipe: {e}\n{trb}"
406421
)
407422
raise
408423

409424
logger.debug("platforms: %s", plat_archs)
410-
sub_graph["platforms"] = ["_".join(k) for k in plat_archs]
425+
node_attrs["platforms"] = ["_".join(k) for k in plat_archs]
411426

412427
# this makes certain that we have consistent ordering
413428
sorted_variant_yamls = [x for _, x in sorted(zip(plat_archs, variant_yamls))]
414429
yaml_dict = ChainDB(*sorted_variant_yamls)
415430
if not yaml_dict:
416431
logger.error(f"Something odd happened when parsing recipe {name}")
417-
sub_graph["parsing_error"] = (
432+
node_attrs["parsing_error"] = (
418433
"feedstock parsing error: could not combine metadata dicts across platforms"
419434
)
420-
return sub_graph
435+
return node_attrs
421436

422-
sub_graph["meta_yaml"] = _dedupe_meta_yaml(_convert_to_dict(yaml_dict))
423-
meta_yaml = sub_graph["meta_yaml"]
437+
node_attrs["meta_yaml"] = _dedupe_meta_yaml(_convert_to_dict(yaml_dict))
438+
meta_yaml = node_attrs["meta_yaml"]
424439

425440
# remove all plat-arch specific keys to remove old ones if a combination is disabled
426-
for k in list(sub_graph.keys()):
441+
for k in list(node_attrs.keys()):
427442
if k in ["raw_meta_yaml", "total_requirements"]:
428443
continue
429444
if k.endswith("_meta_yaml") or k.endswith("_requirements"):
430-
sub_graph.pop(k)
445+
node_attrs.pop(k)
431446

432447
for k, v in zip(plat_archs, variant_yamls):
433448
plat_arch_name = "_".join(k)
434-
sub_graph[f"{plat_arch_name}_meta_yaml"] = v
435-
_, sub_graph[f"{plat_arch_name}_requirements"], _ = _extract_requirements(
449+
node_attrs[f"{plat_arch_name}_meta_yaml"] = v
450+
_, node_attrs[f"{plat_arch_name}_requirements"], _ = _extract_requirements(
436451
v,
437452
outputs_to_keep=BOOTSTRAP_MAPPINGS.get(name, None),
438453
)
439454

440455
(
441-
sub_graph["total_requirements"],
442-
sub_graph["requirements"],
443-
sub_graph["strong_exports"],
456+
node_attrs["total_requirements"],
457+
node_attrs["requirements"],
458+
node_attrs["strong_exports"],
444459
) = _extract_requirements(
445460
meta_yaml,
446461
outputs_to_keep=BOOTSTRAP_MAPPINGS.get(name, None),
@@ -455,56 +470,56 @@ def populate_feedstock_attributes(
455470
),
456471
)
457472
# handle implicit meta packages
458-
if "run" in sub_graph.get("meta_yaml", {}).get("requirements", {}):
473+
if "run" in node_attrs.get("meta_yaml", {}).get("requirements", {}):
459474
outputs_names.add(meta_yaml["package"]["name"])
460475
# add in single package name
461476
else:
462477
outputs_names = {meta_yaml["package"]["name"]}
463-
sub_graph["outputs_names"] = outputs_names
478+
node_attrs["outputs_names"] = outputs_names
464479

465480
# TODO: Write schema for dict
466481
# TODO: remove this
467482
req = _get_requirements(
468483
yaml_dict,
469484
outputs_to_keep=BOOTSTRAP_MAPPINGS.get(name, []),
470485
)
471-
sub_graph["req"] = req
486+
node_attrs["req"] = req
472487

473488
# set name and version
474489
keys = [("package", "name"), ("package", "version")]
475490
missing_keys = [k[1] for k in keys if k[1] not in yaml_dict.get(k[0], {})]
476491
for k in keys:
477492
if k[1] not in missing_keys:
478-
sub_graph[k[1]] = yaml_dict[k[0]][k[1]]
493+
node_attrs[k[1]] = yaml_dict[k[0]][k[1]]
479494

480495
# sometimes a version is not given at the top level, so we check outputs
481496
# we do not know which version to take, but hopefully they are all the same
482497
if (
483-
"version" not in sub_graph
498+
"version" not in node_attrs
484499
and "outputs" in yaml_dict
485500
and len(yaml_dict["outputs"]) > 0
486501
and "version" in yaml_dict["outputs"][0]
487502
):
488-
sub_graph["version"] = yaml_dict["outputs"][0]["version"]
503+
node_attrs["version"] = yaml_dict["outputs"][0]["version"]
489504

490505
# set the url and hash
491-
sub_graph.pop("url", None)
492-
sub_graph.pop("hash_type", None)
506+
node_attrs.pop("url", None)
507+
node_attrs.pop("hash_type", None)
493508

494509
source = yaml_dict.get("source", [])
495510
if isinstance(source, collections.abc.Mapping):
496511
source = [source]
497512
source_keys: Set[str] = set()
498513
for s in source:
499-
if not sub_graph.get("url"):
500-
sub_graph["url"] = s.get("url")
514+
if not node_attrs.get("url"):
515+
node_attrs["url"] = s.get("url")
501516
source_keys |= s.keys()
502517

503518
kl = list(sorted(source_keys & hashlib.algorithms_available, reverse=True))
504519
if kl:
505-
sub_graph["hash_type"] = kl[0]
520+
node_attrs["hash_type"] = kl[0]
506521

507-
return sub_graph
522+
return node_attrs
508523

509524

510525
def load_feedstock_local(
@@ -514,7 +529,7 @@ def load_feedstock_local(
514529
recipe_yaml: str | None = None,
515530
conda_forge_yaml: str | None = None,
516531
mark_not_archived: bool = False,
517-
):
532+
) -> dict[str, typing.Any]:
518533
"""Load a feedstock into subgraph based on its name. If meta_yaml and/or
519534
conda_forge_yaml are not provided, they will be fetched from the feedstock.
520535
@@ -538,6 +553,7 @@ def load_feedstock_local(
538553
sub_graph : MutableMapping
539554
The sub_graph, now updated with the feedstock metadata
540555
"""
556+
new_sub_graph = {key: value for key, value in sub_graph.items()}
541557

542558
if meta_yaml is not None and recipe_yaml is not None:
543559
raise ValueError("Only either `meta_yaml` or `recipe_yaml` can be overridden.")
@@ -552,17 +568,17 @@ def load_feedstock_local(
552568
# if nothing is overridden and no file is present, error out
553569
if meta_yaml is None and recipe_yaml is None:
554570
if isinstance(feedstock_dir, Response):
555-
sub_graph.update(
571+
new_sub_graph.update(
556572
{"feedstock_name": name, "parsing_error": False, "branch": "main"}
557573
)
558574

559575
if mark_not_archived:
560-
sub_graph.update({"archived": False})
576+
new_sub_graph.update({"archived": False})
561577

562-
sub_graph["parsing_error"] = sanitize_string(
578+
new_sub_graph["parsing_error"] = sanitize_string(
563579
f"make_graph: {feedstock_dir.status_code}"
564580
)
565-
return sub_graph
581+
return new_sub_graph
566582

567583
meta_yaml_path = Path(feedstock_dir).joinpath("recipe", "meta.yaml")
568584
recipe_yaml_path = Path(feedstock_dir).joinpath("recipe", "recipe.yaml")
@@ -580,18 +596,16 @@ def load_feedstock_local(
580596
if conda_forge_yaml_path.exists():
581597
conda_forge_yaml = conda_forge_yaml_path.read_text()
582598

583-
populate_feedstock_attributes(
599+
return populate_feedstock_attributes(
584600
name,
585-
sub_graph,
601+
new_sub_graph,
586602
meta_yaml=meta_yaml,
587603
recipe_yaml=recipe_yaml,
588604
conda_forge_yaml=conda_forge_yaml,
589605
mark_not_archived=mark_not_archived,
590606
feedstock_dir=feedstock_dir,
591607
)
592608

593-
return sub_graph
594-
595609

596610
def load_feedstock_containerized(
597611
name: str,

tests/conftest.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -98,6 +98,10 @@ def __exit__(
9898
) -> None:
9999
pass
100100

101+
@property
102+
def data(self):
103+
return self
104+
101105

102106
def pytest_configure(config):
103107
config.addinivalue_line(

tests/test_make_graph.py

Lines changed: 54 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,54 @@
1+
import pytest
2+
from conftest import FakeLazyJson
3+
4+
from conda_forge_tick.lazy_json_backends import LazyJson
5+
from conda_forge_tick.make_graph import try_load_feedstock
6+
7+
8+
@pytest.mark.parametrize("container_enabled", [True, False])
9+
@pytest.mark.parametrize("existing_archived", [True, False, None])
10+
@pytest.mark.parametrize("mark_not_archived", [True, False])
11+
def test_try_load_feedstock(
12+
request: pytest.FixtureRequest,
13+
mark_not_archived: bool,
14+
existing_archived: bool | None,
15+
container_enabled: bool,
16+
):
17+
if container_enabled:
18+
request.getfixturevalue("use_containers")
19+
20+
feedstock = "typst-test" # archived
21+
22+
fake_lazy_json = FakeLazyJson() # empty dict
23+
24+
with fake_lazy_json as loaded_lazy_json:
25+
if existing_archived is not None:
26+
loaded_lazy_json["archived"] = existing_archived
27+
# FakeLazyJson is not an instance of LazyJson
28+
# noinspection PyTypeChecker
29+
data = try_load_feedstock(feedstock, loaded_lazy_json, mark_not_archived).data # type: ignore
30+
31+
if mark_not_archived:
32+
assert data["archived"] is False
33+
elif existing_archived is None:
34+
assert "archived" not in data
35+
else:
36+
assert data["archived"] is existing_archived
37+
38+
assert data["feedstock_name"] == feedstock
39+
assert data["parsing_error"] is False
40+
assert data["raw_meta_yaml"].startswith("{% set name")
41+
assert isinstance(data["conda-forge.yml"], dict)
42+
assert "linux_64" in data["platforms"]
43+
assert data["meta_yaml"]["about"]["license"] == "MIT"
44+
assert isinstance(data["linux_64_meta_yaml"], dict)
45+
assert isinstance(data["linux_64_requirements"], dict)
46+
assert isinstance(data["total_requirements"], dict)
47+
assert data["strong_exports"] is False
48+
assert data["outputs_names"] == {feedstock}
49+
assert isinstance(data["req"], set)
50+
assert data["name"] == feedstock
51+
assert data["version"].startswith("0.")
52+
assert data["url"].startswith("https://github.com/tingerrr/typst-test")
53+
assert data["hash_type"] == "sha256"
54+
assert isinstance(data["version_pr_info"], LazyJson)

tests/test_migrators.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -549,7 +549,7 @@ def run_test_migration(
549549
name = "blah"
550550

551551
pmy = populate_feedstock_attributes(
552-
name, sub_graph={}, meta_yaml=inp, conda_forge_yaml=cf_yml
552+
name, existing_node_attrs={}, meta_yaml=inp, conda_forge_yaml=cf_yml
553553
)
554554

555555
# these are here for legacy migrators
@@ -571,7 +571,7 @@ def run_test_migration(
571571

572572
pmy = populate_feedstock_attributes(
573573
name,
574-
sub_graph={},
574+
existing_node_attrs={},
575575
recipe_yaml=inp,
576576
conda_forge_yaml=cf_yml,
577577
feedstock_dir=tmp_path,

0 commit comments

Comments
 (0)