Skip to content

Commit 8fd7bc1

Browse files
Merge pull request #197 from pydata/add_support_for_3.12
Add support for Python 3.12 (and adjust code for tokenizer changes).
2 parents 6bd31ca + 8853aee commit 8fd7bc1

File tree

5 files changed

+27
-14
lines changed

5 files changed

+27
-14
lines changed

.github/workflows/tox.yml

Lines changed: 9 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -11,20 +11,24 @@ jobs:
1111
strategy:
1212
max-parallel: 4
1313
matrix:
14-
python-version: ['2.7', '3.6', '3.7', '3.8', '3.9', '3.10', '3.11-dev']
14+
python-version: ['2.7', '3.6', '3.7', '3.8', '3.9', '3.10', '3.11', '3.12']
1515
pandas-presence: ['with_pandas', 'without_pandas']
1616
env:
1717
PYTHON_VERSION: ${{ matrix.python-version }}
1818
PANDAS_PRESENCE: ${{ matrix.pandas-presence }}
1919
steps:
2020
- uses: actions/checkout@v2
21-
- name: Set up Python ${{ matrix.python-version }}
22-
uses: actions/setup-python@v2
21+
- uses: gabrielfalcao/pyenv-action@v17
2322
with:
24-
python-version: ${{ matrix.python-version }}
23+
default: "${{ matrix.python-version }}"
24+
- name: Install Python ${{ matrix.python-version }}
25+
run: |
26+
pyenv install "${{ matrix.python-version }}"
27+
pyenv local "${{ matrix.python-version }}"
28+
pyenv versions
2529
- name: Install dependencies
2630
run: |
27-
python -m pip install --upgrade pip
31+
pip install -U pip
2832
pip install tox tox-gh-actions
2933
- name: Test with tox
3034
run: |

patsy/parse_formula.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -84,15 +84,15 @@ def _tokenize_formula(code, operator_strings):
8484
# "magic" token does:
8585
end_tokens = set(magic_token_types)
8686
end_tokens.remove("(")
87-
87+
8888
it = PushbackAdapter(python_tokenize(code))
8989
for pytype, token_string, origin in it:
9090
if token_string in magic_token_types:
9191
yield Token(magic_token_types[token_string], origin)
9292
else:
9393
it.push_back((pytype, token_string, origin))
9494
yield _read_python_expr(it, end_tokens)
95-
95+
9696
def test__tokenize_formula():
9797
code = "y ~ a + (foo(b,c + 2)) + -1 + 0 + 10"
9898
tokens = list(_tokenize_formula(code, ["+", "-", "~"]))
@@ -274,8 +274,8 @@ def _parsing_error_test(parse_fn, error_descs): # pragma: no cover
274274
except PatsyError as e:
275275
print(e)
276276
assert e.origin.code == bad_code
277-
assert e.origin.start == start
278-
assert e.origin.end == end
277+
assert e.origin.start in (0, start)
278+
assert e.origin.end in (end, len(bad_code))
279279
else:
280280
assert False, "parser failed to report an error!"
281281

patsy/tokens.py

Lines changed: 9 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -31,11 +31,10 @@ def python_tokenize(code):
3131
for (pytype, string, (_, start), (_, end), code) in it:
3232
if pytype == tokenize.ENDMARKER:
3333
break
34-
origin = Origin(code, start, end)
35-
assert pytype != tokenize.NL
36-
if pytype == tokenize.NEWLINE:
34+
if pytype in (tokenize.NL, tokenize.NEWLINE):
3735
assert string == ""
3836
continue
37+
origin = Origin(code, start, end)
3938
if pytype == tokenize.ERRORTOKEN:
4039
raise PatsyError("error tokenizing input "
4140
"(maybe an unclosed string?)",
@@ -53,8 +52,14 @@ def python_tokenize(code):
5352
# end of the source text. We have our own error handling for
5453
# such cases, so just treat this as an end-of-stream.
5554
#
55+
if "unterminated string literal" in e.args[0]:
56+
raise PatsyError(
57+
"error tokenizing input ({})".format(e.args[0]),
58+
Origin(code, 0, len(code)),
59+
)
60+
5661
# Just in case someone adds some other error case:
57-
assert e.args[0].startswith("EOF in multi-line")
62+
assert "EOF in multi-line" in e.args[0]
5863
return
5964

6065
def test_python_tokenize():

setup.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -46,6 +46,7 @@
4646
"Programming Language :: Python :: 3.9",
4747
"Programming Language :: Python :: 3.10",
4848
"Programming Language :: Python :: 3.11",
49+
"Programming Language :: Python :: 3.12",
4950
"Topic :: Scientific/Engineering",
5051
],
5152
)

tox.ini

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
[tox]
2-
envlist = {py27,py36,py37,py38,py39,py310,py311}-{with_pandas,without_pandas}
2+
envlist = {py27,py36,py37,py38,py39,py310,py311,py312}-{with_pandas,without_pandas}
33

44
[gh-actions]
55
python =
@@ -10,6 +10,7 @@ python =
1010
3.9: py39
1111
3.10: py310
1212
3.11: py311
13+
3.12: py312
1314

1415
[testenv]
1516
deps=
@@ -25,6 +26,8 @@ setenv=
2526
OMP_NUM_THREADS=1
2627
MKL_NUM_THREADS=1
2728
VML_NUM_THREADS=1
29+
allowlist_externals=
30+
env
2831
commands=
2932
pytest -vv --cov=patsy --cov-config={toxinidir}/.coveragerc --cov-report=term-missing --cov-report=xml --cov-report=html:{toxworkdir}/coverage/{envname} {posargs:}
3033
env PATSY_AVOID_OPTIONAL_DEPENDENCIES=1 pytest -vv --cov=patsy --cov-config={toxinidir}/.coveragerc --cov-report=term-missing --cov-report=xml --cov-report=html:{toxworkdir}/coverage/{envname} {posargs:}

0 commit comments

Comments
 (0)