Skip to content
Merged
Show file tree
Hide file tree
Changes from 6 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/code-checks.yml
Original file line number Diff line number Diff line change
Expand Up @@ -177,7 +177,7 @@ jobs:
id: setup_python
uses: actions/setup-python@v5
with:
python-version: '3.10'
python-version: '3.11'
cache: 'pip'
cache-dependency-path: 'requirements-dev.txt'

Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/package-checks.yml
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ jobs:
id: setup_python
uses: actions/setup-python@v5
with:
python-version: '3.10'
python-version: '3.11'

- name: Pip install with extra
run: |
Expand All @@ -53,7 +53,7 @@ jobs:
runs-on: ubuntu-24.04
strategy:
matrix:
python-version: ['3.10', '3.11']
python-version: ['3.11', '3.12']
fail-fast: false
name: Test Conda Forge Recipe - Python ${{ matrix.python-version }}
concurrency:
Expand Down
18 changes: 9 additions & 9 deletions .github/workflows/unit-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -27,18 +27,18 @@ jobs:
strategy:
matrix:
platform: [ubuntu-24.04, ubuntu-24.04-arm]
env_file: [actions-310.yaml, actions-311.yaml, actions-312.yaml, actions-313.yaml]
env_file: [actions-311.yaml, actions-312.yaml, actions-313.yaml]
# Prevent the include jobs from overriding other jobs
pattern: [""]
pandas_future_infer_string: ["1"]
include:
- name: "Downstream Compat"
env_file: actions-311-downstream_compat.yaml
env_file: actions-313-downstream_compat.yaml
pattern: "not slow and not network and not single_cpu"
pytest_target: "pandas/tests/test_downstream.py"
platform: ubuntu-24.04
- name: "Minimum Versions"
env_file: actions-310-minimum_versions.yaml
env_file: actions-311-minimum_versions.yaml
pattern: "not slow and not network and not single_cpu"
platform: ubuntu-24.04
- name: "Freethreading"
Expand All @@ -50,7 +50,7 @@ jobs:
pattern: "not slow and not network and not single_cpu"
platform: ubuntu-24.04
- name: "Locale: it_IT"
env_file: actions-311.yaml
env_file: actions-313.yaml
pattern: "not slow and not network and not single_cpu"
extra_apt: "language-pack-it"
# Use the utf8 version as the default, it has no bad side-effect.
Expand All @@ -61,7 +61,7 @@ jobs:
extra_loc: "it_IT"
platform: ubuntu-24.04
- name: "Locale: zh_CN"
env_file: actions-311.yaml
env_file: actions-313.yaml
pattern: "not slow and not network and not single_cpu"
extra_apt: "language-pack-zh-hans"
# Use the utf8 version as the default, it has no bad side-effect.
Expand All @@ -72,16 +72,16 @@ jobs:
extra_loc: "zh_CN"
platform: ubuntu-24.04
- name: "PANDAS_FUTURE_INFER_STRING=0"
env_file: actions-312.yaml
env_file: actions-313.yaml
pandas_future_infer_string: "0"
platform: ubuntu-24.04
- name: "Numpy Dev"
env_file: actions-311-numpydev.yaml
env_file: actions-313-numpydev.yaml
pattern: "not slow and not network and not single_cpu"
test_args: "-W error::DeprecationWarning -W error::FutureWarning"
platform: ubuntu-24.04
- name: "Pyarrow Nightly"
env_file: actions-311-pyarrownightly.yaml
env_file: actions-313-pyarrownightly.yaml
pattern: "not slow and not network and not single_cpu"
platform: ubuntu-24.04
fail-fast: false
Expand Down Expand Up @@ -183,7 +183,7 @@ jobs:
matrix:
# Note: Don't use macOS latest since macos 14 appears to be arm64 only
os: [macos-13, macos-14, windows-latest]
env_file: [actions-310.yaml, actions-311.yaml, actions-312.yaml, actions-313.yaml]
env_file: [actions-311.yaml, actions-312.yaml, actions-313.yaml]
fail-fast: false
runs-on: ${{ matrix.os }}
name: ${{ format('{0} {1}', matrix.os, matrix.env_file) }}
Expand Down
4 changes: 1 addition & 3 deletions .github/workflows/wheels.yml
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ jobs:
- [macos-14, macosx_arm64]
- [windows-2022, win_amd64]
- [windows-11-arm, win_arm64]
python: [["cp310", "3.10"], ["cp311", "3.11"], ["cp312", "3.12"], ["cp313", "3.13"], ["cp313t", "3.13"]]
python: [["cp311", "3.11"], ["cp312", "3.12"], ["cp313", "3.13"], ["cp313t", "3.13"]]
include:
# Build Pyodide wheels and upload them to Anaconda.org
# NOTE: this job is similar to the one in unit-tests.yml except for the fact
Expand All @@ -110,8 +110,6 @@ jobs:
python: ["cp312", "3.12"]
cibw_build_frontend: 'build'
exclude:
- buildplat: [windows-11-arm, win_arm64]
python: ["cp310", "3.10"]
# BackendUnavailable: Cannot import 'mesonpy'
- buildplat: [windows-11-arm, win_arm64]
python: ["cp313t", "3.13"]
Expand Down
7 changes: 3 additions & 4 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,6 @@ repos:
hooks:
- id: codespell
types_or: [python, rst, markdown, cython, c]
additional_dependencies: [tomli]
- repo: https://github.com/MarcoGorelli/cython-lint
rev: v0.16.7
hooks:
Expand Down Expand Up @@ -77,7 +76,7 @@ repos:
rev: v3.20.0
hooks:
- id: pyupgrade
args: [--py310-plus]
args: [--py311-plus]
- repo: https://github.com/pre-commit/pygrep-hooks
rev: v1.10.0
hooks:
Expand Down Expand Up @@ -235,7 +234,7 @@ repos:
entry: python scripts/generate_pip_deps_from_conda.py
files: ^(environment.yml|requirements-dev.txt)$
pass_filenames: false
additional_dependencies: [tomli, pyyaml]
additional_dependencies: [pyyaml]
- id: title-capitalization
name: Validate correct capitalization among titles in documentation
entry: python scripts/validate_rst_title_capitalization.py
Expand Down Expand Up @@ -290,7 +289,7 @@ repos:
entry: python -m scripts.validate_min_versions_in_sync
language: python
files: ^(ci/deps/actions-.*-minimum_versions\.yaml|pandas/compat/_optional\.py)$
additional_dependencies: [tomli, pyyaml]
additional_dependencies: [pyyaml]
pass_filenames: false
- id: validate-errors-locations
name: Validate errors locations
Expand Down
2 changes: 1 addition & 1 deletion asv_bench/asv.conf.json
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@

// The Pythons you'd like to test against. If not provided, defaults
// to the current version of Python used to run `asv`.
"pythons": ["3.10"],
"pythons": ["3.11"],

// The matrix of dependencies to test. Each key is the name of a
// package (in PyPI) and the values are version numbers. An empty
Expand Down
63 changes: 0 additions & 63 deletions ci/deps/actions-310.yaml

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ name: pandas-dev
channels:
- conda-forge
dependencies:
- python=3.10
- python=3.11

# build dependencies
- versioneer
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ name: pandas-dev
channels:
- conda-forge
dependencies:
- python=3.11
- python=3.13

# build dependencies
- versioneer
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ name: pandas-dev
channels:
- conda-forge
dependencies:
- python=3.11
- python=3.13

# build dependencies
- versioneer
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ name: pandas-dev
channels:
- conda-forge
dependencies:
- python=3.11
- python=3.13

# build dependencies
- versioneer
Expand Down
2 changes: 0 additions & 2 deletions ci/meta.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,6 @@ requirements:
- numpy >=1.21.6 # [py<311]
- numpy >=1.23.2 # [py>=311]
- versioneer
- tomli # [py<311]
run:
- python
- numpy >=1.21.6 # [py<311]
Expand Down Expand Up @@ -66,7 +65,6 @@ test:
- pytest-xdist >=3.4.0
- pytest-cov
- hypothesis >=6.84.0
- tomli # [py<311]

about:
home: http://pandas.pydata.org
Expand Down
2 changes: 1 addition & 1 deletion doc/source/development/contributing_environment.rst
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,7 @@ Consult the docs for setting up pyenv `here <https://github.com/pyenv/pyenv>`__.
pyenv virtualenv <version> <name-to-give-it>

# For instance:
pyenv virtualenv 3.10 pandas-dev
pyenv virtualenv 3.11 pandas-dev

# Activate the virtualenv
pyenv activate pandas-dev
Expand Down
2 changes: 1 addition & 1 deletion doc/source/whatsnew/v3.0.0.rst
Original file line number Diff line number Diff line change
Expand Up @@ -305,7 +305,7 @@ This change also applies to :meth:`.DataFrameGroupBy.value_counts`. Here, there
Increased minimum version for Python
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

pandas 3.0.0 supports Python 3.10 and higher.
pandas 3.0.0 supports Python 3.11 and higher.

Increased minimum versions for dependencies
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Expand Down
1 change: 0 additions & 1 deletion environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -122,5 +122,4 @@ dependencies:
- pip:
- adbc-driver-postgresql>=0.10.0
- adbc-driver-sqlite>=0.8.0
- typing_extensions; python_version<"3.11"
- tzdata>=2023.3
2 changes: 1 addition & 1 deletion pandas/_libs/arrays.pyi
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
from collections.abc import Sequence
from typing import Self

import numpy as np

from pandas._typing import (
AxisInt,
DtypeObj,
Self,
Shape,
)

Expand Down
6 changes: 1 addition & 5 deletions pandas/_libs/include/pandas/vendored/klib/khash_python.h
Original file line number Diff line number Diff line change
Expand Up @@ -222,15 +222,11 @@ static inline int pyobject_cmp(PyObject *a, PyObject *b) {
}

static inline Py_hash_t _Pandas_HashDouble(double val) {
// Since Python3.10, nan is no longer has hash 0
// nan no longer has hash 0
if (isnan(val)) {
return 0;
}
#if PY_VERSION_HEX < 0x030A0000
return _Py_HashDouble(val);
#else
return _Py_HashDouble(NULL, val);
#endif
}

static inline Py_hash_t floatobject_hash(PyFloatObject *key) {
Expand Down
2 changes: 1 addition & 1 deletion pandas/_libs/internals.pyi
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ from collections.abc import (
Sequence,
)
from typing import (
Self,
final,
overload,
)
Expand All @@ -12,7 +13,6 @@ import numpy as np

from pandas._typing import (
ArrayLike,
Self,
npt,
)

Expand Down
6 changes: 2 additions & 4 deletions pandas/_libs/sparse.pyi
Original file line number Diff line number Diff line change
@@ -1,11 +1,9 @@
from collections.abc import Sequence
from typing import Self

import numpy as np

from pandas._typing import (
Self,
npt,
)
from pandas._typing import npt

class SparseIndex:
length: int
Expand Down
3 changes: 1 addition & 2 deletions pandas/_libs/tslibs/dtypes.pyi
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
from enum import Enum

from pandas._typing import Self
from typing import Self

OFFSET_TO_PERIOD_FREQSTR: dict[str, str]

Expand Down
2 changes: 1 addition & 1 deletion pandas/_libs/tslibs/nattype.pyi
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ from datetime import (
from typing import (
Literal,
NoReturn,
Self,
TypeAlias,
overload,
)
Expand All @@ -15,7 +16,6 @@ import numpy as np
from pandas._libs.tslibs.period import Period
from pandas._typing import (
Frequency,
Self,
TimestampNonexistent,
)

Expand Down
2 changes: 1 addition & 1 deletion pandas/_libs/tslibs/offsets.pyi
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ from datetime import (
from typing import (
Any,
Literal,
Self,
TypeVar,
overload,
)
Expand All @@ -16,7 +17,6 @@ import numpy as np
from pandas._libs.tslibs.nattype import NaTType
from pandas._typing import (
OffsetCalendar,
Self,
npt,
)

Expand Down
Loading
Loading