-
-
Notifications
You must be signed in to change notification settings - Fork 19.1k
Description
Pandas version checks
-
I have checked that this issue has not already been reported.
-
I have confirmed this bug exists on the latest version of pandas.
-
I have confirmed this bug exists on the main branch of pandas.
Reproducible Example
$ import pandas as pd
$ import pyarrow as pa
$ arr = ['{"bar": True, "foo": 10}']
$ pd.DataFrame({'json_col': arr}, pd.ArrowDtype(pa.json_(pa.string())))
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
Cell In[7], line 5
2 import pyarrow as pa
4 arr = ['{"bar": True, "foo": 10}']
----> 5 pd.DataFrame({'json_col': arr}, pd.ArrowDtype(pa.json_(pa.string())))
File ~/src/bigframes/venv/lib/python3.12/site-packages/pandas/core/frame.py:778, in DataFrame.__init__(self, data, index, columns, dtype, copy)
772 mgr = self._init_mgr(
773 data, axes={"index": index, "columns": columns}, dtype=dtype, copy=copy
774 )
776 elif isinstance(data, dict):
777 # GH#38939 de facto copy defaults to False only in non-dict cases
--> 778 mgr = dict_to_mgr(data, index, columns, dtype=dtype, copy=copy, typ=manager)
779 elif isinstance(data, ma.MaskedArray):
780 from numpy.ma import mrecords
File ~/src/bigframes/venv/lib/python3.12/site-packages/pandas/core/internals/construction.py:503, in dict_to_mgr(data, index, columns, dtype, typ, copy)
499 else:
500 # dtype check to exclude e.g. range objects, scalars
501 arrays = [x.copy() if hasattr(x, "dtype") else x for x in arrays]
--> 503 return arrays_to_mgr(arrays, columns, index, dtype=dtype, typ=typ, consolidate=copy)
File ~/src/bigframes/venv/lib/python3.12/site-packages/pandas/core/internals/construction.py:116, in arrays_to_mgr(arrays, columns, index, dtype, verify_integrity, typ, consolidate)
114 index = _extract_index(arrays)
115 else:
--> 116 index = ensure_index(index)
118 # don't force copy because getting jammed in an ndarray anyway
119 arrays, refs = _homogenize(arrays, index, dtype)
File ~/src/bigframes/venv/lib/python3.12/site-packages/pandas/core/indexes/base.py:7649, in ensure_index(index_like, copy)
7647 return Index(index_like, copy=copy, tupleize_cols=False)
7648 else:
-> 7649 return Index(index_like, copy=copy)
File ~/src/bigframes/venv/lib/python3.12/site-packages/pandas/core/indexes/base.py:532, in Index.__new__(cls, data, dtype, copy, name, tupleize_cols)
528 return cls(np.asarray(data), dtype=dtype, copy=copy, name=name)
529 elif not is_list_like(data) and not isinstance(data, memoryview):
530 # 2022-11-16 the memoryview check is only necessary on some CI
531 # builds, not clear why
--> 532 raise cls._raise_scalar_data_error(data)
534 else:
535 if tupleize_cols:
536 # GH21470: convert iterable to list before determining if empty
File ~/src/bigframes/venv/lib/python3.12/site-packages/pandas/core/indexes/base.py:5289, in Index._raise_scalar_data_error(cls, data)
5284 @final
5285 @classmethod
5286 def _raise_scalar_data_error(cls, data):
5287 # We return the TypeError so that we can raise it from the constructor
5288 # in order to keep mypy happy
-> 5289 raise TypeError(
5290 f"{cls.__name__}(...) must be called with a collection of some "
5291 f"kind, {repr(data) if not isinstance(data, np.generic) else str(data)} "
5292 "was passed"
5293 )
TypeError: Index(...) must be called with a collection of some kind, extension<arrow.json>[pyarrow] was passed
Issue Description
Apache Arrow v19.0 introduced the pa.json_
extension type (doc). Currently, attempting to create a pandas DataFrame using this data type results in an error. The detailed error call stack can be observed in the reproducible example above.
However, creating a pandas Series with pa.json_ works correctly. Here is an example that shows that Series construction works.
$ import pandas as pd
$ import pyarrow as pa
$ arr = ['{"bar": True, "foo": 10}']
$ pd.Series(arr, dtype=pd.ArrowDtype(pa.json_(pa.string())))
0 {"bar": True, "foo": 10}
dtype: extension<arrow.json>[pyarrow]
Expected Behavior
Pandas should support DataFrame construction with the pa.json_
Arrow extension type, consistent with its support for Series and Index objects.
Installed Versions
INSTALLED VERSIONS
commit : 0691c5c
python : 3.12.1
python-bits : 64
OS : Linux
OS-release : 6.10.11-1rodete2-amd64
Version : #1 SMP PREEMPT_DYNAMIC Debian 6.10.11-1rodete2 (2024-10-16)
machine : x86_64
processor :
byteorder : little
LC_ALL : None
LANG : en_US.UTF-8
LOCALE : en_US.UTF-8
pandas : 2.2.3
numpy : 2.2.2
pytz : 2025.1
dateutil : 2.9.0.post0
pip : 23.2.1
Cython : None
sphinx : None
IPython : 8.32.0
adbc-driver-postgresql: None
adbc-driver-sqlite : None
bs4 : None
blosc : None
bottleneck : None
dataframe-api-compat : None
fastparquet : None
fsspec : 2025.2.0
html5lib : None
hypothesis : None
gcsfs : 2025.2.0
jinja2 : None
lxml.etree : None
matplotlib : 3.10.0
numba : None
numexpr : None
odfpy : None
openpyxl : None
pandas_gbq : 0.27.0
psycopg2 : None
pymysql : None
pyarrow : 19.0.0
pyreadstat : None
pytest : 8.3.4
python-calamine : None
pyxlsb : None
s3fs : None
scipy : 1.15.1
sqlalchemy : 2.0.38
tables : None
tabulate : 0.9.0
xarray : None
xlrd : None
xlsxwriter : None
zstandard : None
tzdata : 2025.1
qtpy : None
pyqt5 : None