Skip to content

Commit a7db50c

Browse files
authored
Merge pull request #3155 from regro/more-version-up
fix: munge weird jinja2 quoting
2 parents dc47f61 + 71fa72d commit a7db50c

15 files changed

+861
-9
lines changed

conda_forge_tick/recipe_parser/_parser.py

Lines changed: 43 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -447,6 +447,42 @@ def _build_jinja2_expr_tmp(jinja2_exprs):
447447
return "\n".join(exprs + tmpls)
448448

449449

450+
def _remove_quoted_jinja2_vars(lines):
451+
"""Remove any quoted jinja2 vars from the lines.
452+
453+
Sometimes people write
454+
455+
'{{ pin_compatible('x') }}'
456+
457+
which causes the parser to fail.
458+
459+
We remove all instances of "['\"]{{" and "}}['\"]" to be safe.
460+
"""
461+
new_lines = []
462+
for line in lines:
463+
if "'{{" in line and "}}'" in line:
464+
start_jinja = line.find("'{{")
465+
end_jinja = line.find("}}'")
466+
elif '"{{' in line and '}}"' in line:
467+
start_jinja = line.find('"{{')
468+
end_jinja = line.find('}}"')
469+
else:
470+
start_jinja = None
471+
end_jinja = None
472+
473+
if (
474+
start_jinja is not None
475+
and end_jinja is not None
476+
and "(" in line[start_jinja:end_jinja]
477+
and ")" in line[start_jinja:end_jinja]
478+
):
479+
new_lines.append(re.sub(r"['\"]{{", "{{", line))
480+
new_lines[-1] = re.sub(r"}}['\"]", "}}", new_lines[-1])
481+
else:
482+
new_lines.append(line)
483+
return new_lines
484+
485+
450486
class CondaMetaYAML:
451487
"""Crude parsing of conda recipes.
452488
@@ -498,9 +534,15 @@ def __init__(self, meta_yaml: str):
498534
"with a conda build selector! (offending line: '%s')" % line,
499535
)
500536

501-
# remove multiline jinja2 statements
537+
# pre-munge odd syntax that we do not want
502538
lines = list(io.StringIO(meta_yaml).readlines())
539+
540+
# remove multiline jinja2 statements
503541
lines = _munge_multiline_jinja2(lines)
542+
543+
# get rid of quoted jinja2 vars
544+
lines = _remove_quoted_jinja2_vars(lines)
545+
504546
meta_yaml = "".join(lines)
505547

506548
# get any variables set in the file by jinja2

tests/test_recipe_parser.py

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
import io
2+
import os
23

34
import pytest
45

@@ -1603,3 +1604,20 @@ def test_recipe_parses_strings_colons_quotes():
16031604
s.seek(0)
16041605
new_recipe = s.read()
16051606
assert new_recipe == recipe_correct
1607+
1608+
1609+
YAML_PATH = os.path.join(os.path.dirname(__file__), "test_yaml")
1610+
1611+
1612+
@pytest.mark.parametrize("recipe", os.listdir(YAML_PATH))
1613+
def test_recipe_parser_yaml_suite(recipe):
1614+
if (
1615+
recipe.endswith("_correct.yaml") and "duplicate_lines_cleanup" not in recipe
1616+
) or recipe.endswith("_after_meta.yaml"):
1617+
with open(os.path.join(YAML_PATH, recipe)) as f:
1618+
recipe = f.read()
1619+
cm = CondaMetaYAML(recipe)
1620+
s = io.StringIO()
1621+
cm.dump(s)
1622+
s.seek(0)
1623+
assert s.read() == recipe

tests/test_version_migrator.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -57,6 +57,10 @@
5757
("boostcpp", "1.74.0"),
5858
("event_stream", "1.6.3"),
5959
("21cmfast", "3.4.0"),
60+
("pyrsmq", "0.6.0"),
61+
("quart_trio", "0.11.1"),
62+
("reproc", "14.2.5"),
63+
("riskfolio_lib", "6.3.1"),
6064
# these contain sources that depend on conda build config variants
6165
pytest.param(
6266
"polars_mixed_selectors",

tests/test_yaml/stdlib_libhdbpp-timescale_after_meta.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,7 @@ outputs: # [linux]
5858
# different versions per cpptango
5959
- cpptango # [linux]
6060
run: # [linux]
61-
- "{{ pin_subpackage('libhdbpp-timescale', exact=True) }}" # [linux]
61+
- {{ pin_subpackage('libhdbpp-timescale', exact=True) }} # [linux]
6262
files: # [linux]
6363
- "lib/libhdb++timescale.so.{{ version }}.dbg" # [linux]
6464
test: # [linux]

tests/test_yaml/stdlib_mgis_after_meta.yaml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -23,10 +23,10 @@ build:
2323

2424
requirements:
2525
build:
26-
- "{{ compiler('c') }}"
26+
- {{ compiler('c') }}
2727
- {{ stdlib("c") }}
28-
- "{{ compiler('cxx') }}"
29-
- "{{ compiler('fortran') }}" # [not win]
28+
- {{ compiler('cxx') }}
29+
- {{ compiler('fortran') }} # [not win]
3030
- cmake >=3.26.4
3131
- ninja # [win]
3232
- autoconf # [not win]

tests/test_yaml/version_giturl.yaml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ source:
1010
git_tag: '{{version}}'
1111

1212
build:
13-
skip: True # [py27 or py33 or win or win_64]
13+
skip: true # [py27 or py33 or win or win_64]
1414
number: 1
1515
script:
1616
- '{{PYTHON}} -m pip install --ignore-installed -vv .'
@@ -46,7 +46,7 @@ about:
4646
license_file: License.txt
4747
summary: 'NetworKit is a growing open-source toolkit for large-scale network analysis.'
4848
description: |
49-
NetworKit is a growing open-source toolkit for large-scale network analysis. Its aim is to provide tools for the analysis of large networks in the size range from thousands to billions of edges. For this purpose, it implements efficient graph algorithms, many of them parallel to utilize multicore architectures. These are meant to compute standard measures of network analysis, such as degree sequences, clustering coefficients, and centrality measures. In this respect, NetworKit is comparable to packages such as NetworkX, albeit with a focus on parallelism and scalability. NetworKit is also a testbed for algorithm engineering and contains novel algorithms from recently published research (see list of Publications).
49+
NetworKit is a growing open-source toolkit for large-scale network analysis. Its aim is to provide tools for the analysis of large networks in the size range from thousands to billions of edges. For this purpose, it implements efficient graph algorithms, many of them parallel to utilize multicore architectures. These are meant to compute standard measures of network analysis, such as degree sequences, clustering coefficients, and centrality measures. In this respect, NetworKit is comparable to packages such as NetworkX, albeit with a focus on parallelism and scalability. NetworKit is also a testbed for algorithm engineering and contains novel algorithms from recently published research (see list of Publications).
5050
doc_url: https://networkit.github.io/
5151
dev_url: https://github.com/networkit/networkit
5252

tests/test_yaml/version_giturl_correct.yaml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ source:
1010
git_tag: '{{version}}'
1111

1212
build:
13-
skip: True # [py27 or py33 or win or win_64]
13+
skip: true # [py27 or py33 or win or win_64]
1414
number: 1
1515
script:
1616
- '{{PYTHON}} -m pip install --ignore-installed -vv .'
@@ -46,7 +46,7 @@ about:
4646
license_file: License.txt
4747
summary: 'NetworKit is a growing open-source toolkit for large-scale network analysis.'
4848
description: |
49-
NetworKit is a growing open-source toolkit for large-scale network analysis. Its aim is to provide tools for the analysis of large networks in the size range from thousands to billions of edges. For this purpose, it implements efficient graph algorithms, many of them parallel to utilize multicore architectures. These are meant to compute standard measures of network analysis, such as degree sequences, clustering coefficients, and centrality measures. In this respect, NetworKit is comparable to packages such as NetworkX, albeit with a focus on parallelism and scalability. NetworKit is also a testbed for algorithm engineering and contains novel algorithms from recently published research (see list of Publications).
49+
NetworKit is a growing open-source toolkit for large-scale network analysis. Its aim is to provide tools for the analysis of large networks in the size range from thousands to billions of edges. For this purpose, it implements efficient graph algorithms, many of them parallel to utilize multicore architectures. These are meant to compute standard measures of network analysis, such as degree sequences, clustering coefficients, and centrality measures. In this respect, NetworKit is comparable to packages such as NetworkX, albeit with a focus on parallelism and scalability. NetworKit is also a testbed for algorithm engineering and contains novel algorithms from recently published research (see list of Publications).
5050
doc_url: https://networkit.github.io/
5151
dev_url: https://github.com/networkit/networkit
5252

Lines changed: 43 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,43 @@
1+
{% set name = "PyRSMQ" %}
2+
{% set version = "0.5.0" %}
3+
4+
package:
5+
name: "{{ name|lower }}"
6+
version: "{{ version }}"
7+
8+
source:
9+
url: https://pypi.io/packages/source/{{ name[0] }}/{{ name }}/{{ name }}-{{ version }}.tar.gz
10+
sha256: 68ba774ba544cd976622c9375c69e21b89a6fbb41221dbebb706733846d557a6
11+
12+
build:
13+
noarch: python
14+
number: 0
15+
script: "{{ PYTHON }} -m pip install . --no-deps --ignore-installed -vv "
16+
17+
requirements:
18+
host:
19+
- pip
20+
- python >=3.6
21+
- setuptools
22+
run:
23+
- python >=3.6
24+
- redis-py
25+
26+
test:
27+
imports:
28+
- rsmq
29+
- rsmq.cmd
30+
- tests
31+
32+
about:
33+
home: https://mlasevich.github.io/PyRSMQ/
34+
license: Apache-2.0
35+
license_family: APACHE
36+
license_file: LICENSE
37+
summary: Python Implementation of Redis SMQ
38+
dev_url: https://github.com/mlasevich/PyRSMQ
39+
40+
extra:
41+
recipe-maintainers:
42+
- lincrosenbach
43+
- kfasolin
Lines changed: 43 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,43 @@
1+
{% set name = "PyRSMQ" %}
2+
{% set version = "0.6.0" %}
3+
4+
package:
5+
name: "{{ name|lower }}"
6+
version: "{{ version }}"
7+
8+
source:
9+
url: https://pypi.io/packages/source/{{ name[0] }}/{{ name }}/{{ name|lower }}-{{ version }}.tar.gz
10+
sha256: dd1f8467e541935489be018dbb0ba1df8b903eb855bf1725947ceee41df92fa4
11+
12+
build:
13+
noarch: python
14+
number: 0
15+
script: "{{ PYTHON }} -m pip install . --no-deps --ignore-installed -vv "
16+
17+
requirements:
18+
host:
19+
- pip
20+
- python >=3.6
21+
- setuptools
22+
run:
23+
- python >=3.6
24+
- redis-py
25+
26+
test:
27+
imports:
28+
- rsmq
29+
- rsmq.cmd
30+
- tests
31+
32+
about:
33+
home: https://mlasevich.github.io/PyRSMQ/
34+
license: Apache-2.0
35+
license_family: APACHE
36+
license_file: LICENSE
37+
summary: Python Implementation of Redis SMQ
38+
dev_url: https://github.com/mlasevich/PyRSMQ
39+
40+
extra:
41+
recipe-maintainers:
42+
- lincrosenbach
43+
- kfasolin
Lines changed: 45 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,45 @@
1+
{% set name = "Quart-Trio" %}
2+
{% set version = "0.7.0" %}
3+
4+
package:
5+
name: {{ name|lower }}
6+
version: {{ version }}
7+
8+
source:
9+
url: https://pypi.io/packages/source/{{ name[0] }}/{{ name }}/{{ name }}-{{ version }}.tar.gz
10+
sha256: 933e3c18e232ece30ccbac7579fdc5f62f2f9c79c3273d6c341f5a1686791eb1
11+
12+
build:
13+
noarch: python
14+
number: 0
15+
script: {{ PYTHON }} -m pip install . -vv
16+
17+
requirements:
18+
host:
19+
- python >=3.7.0
20+
- pip
21+
run:
22+
- python >=3.7.0
23+
- quart >=0.11.1
24+
- trio >=0.10.0
25+
- hypercorn >=0.6.0
26+
27+
test:
28+
imports:
29+
- quart_trio
30+
31+
about:
32+
home: https://gitlab.com/pgjones/quart-trio
33+
license: MIT
34+
license_family: MIT
35+
license_file: LICENSE
36+
summary: Quart-Trio is an extension for Quart to support the Trio event loop
37+
description: |
38+
Quart-Trio is an extension for Quart to support the Trio event loop.
39+
This is an alternative to using the asyncio event loop present in the Python
40+
standard library and supported by default in Quart.
41+
42+
extra:
43+
recipe-maintainers:
44+
- dhirschfeld
45+
- scopatz

0 commit comments

Comments
 (0)