|
6 | 6 | from pandas._libs import lib
|
7 | 7 | from pandas.compat._optional import import_optional_dependency
|
8 | 8 | from pandas.errors import (
|
9 |
| - DtypeWarning, |
10 | 9 | Pandas4Warning,
|
11 | 10 | ParserError,
|
12 | 11 | ParserWarning,
|
@@ -146,27 +145,26 @@ def handle_warning(invalid_row) -> str:
|
146 | 145 | if isinstance(self.dtype, dict):
|
147 | 146 | column_types = {}
|
148 | 147 | for col, col_dtype in self.dtype.items():
|
| 148 | + # TODO: Category dtypes are not currently handled - may cause issues |
| 149 | + # with categorical data preservation in pyarrow engine |
| 150 | + if col_dtype == "category": |
| 151 | + continue |
| 152 | + |
149 | 153 | try:
|
150 | 154 | numpy_dtype = pandas_dtype(col_dtype).type
|
151 | 155 | pyarrow_dtype = pa.from_numpy_dtype(numpy_dtype)
|
152 | 156 | column_types[col] = pyarrow_dtype
|
153 | 157 | except (TypeError, ValueError, pa.ArrowNotImplementedError):
|
154 |
| - warnings.warn( |
155 |
| - f"Column '{col}' has dtype '{col_dtype}', " |
156 |
| - "which may not be handled correctly by the pyarrow engine.", |
157 |
| - DtypeWarning, |
158 |
| - stacklevel=find_stack_level(), |
159 |
| - ) |
| 158 | + # TODO: Unsupported dtypes silently ignored - may cause unexpected |
| 159 | + # behavior when pyarrow applies default inference instead of user's dtype |
| 160 | + continue |
160 | 161 |
|
161 | 162 | if column_types:
|
162 | 163 | self.convert_options["column_types"] = column_types
|
163 | 164 | else:
|
164 |
| - warnings.warn( |
165 |
| - f"Global dtype '{self.dtype}' not supported with pyarrow engine. " |
166 |
| - "Use dtype dictionary instead.", |
167 |
| - DtypeWarning, |
168 |
| - stacklevel=find_stack_level(), |
169 |
| - ) |
| 165 | + # TODO: Global dtypes not supported - may cause inconsistent behavior |
| 166 | + # between engines, especially for leading zero preservation |
| 167 | + pass |
170 | 168 |
|
171 | 169 | self.read_options = {
|
172 | 170 | "autogenerate_column_names": self.header is None,
|
|
0 commit comments