|
17 | 17 | import pymongo.errors
|
18 | 18 | from bson import encode
|
19 | 19 | from bson.raw_bson import RawBSONDocument
|
20 |
| -from numpy import ndarray |
21 |
| -from pandas import DataFrame |
22 | 20 | from pyarrow import Schema as ArrowSchema
|
23 | 21 | from pyarrow import Table
|
| 22 | + |
| 23 | +try: |
| 24 | + from numpy import ndarray |
| 25 | +except ImportError: |
| 26 | + ndarray = None |
| 27 | + |
| 28 | +try: |
| 29 | + from pandas import DataFrame |
| 30 | +except ImportError: |
| 31 | + DataFrame = None |
| 32 | + |
24 | 33 | from pymongo.bulk import BulkWriteError
|
25 | 34 | from pymongo.common import MAX_WRITE_BATCH_SIZE
|
26 | 35 | from pymongoarrow.context import PyMongoArrowContext
|
@@ -295,7 +304,7 @@ def _tabular_generator(tabular):
|
295 | 304 | for i in tabular.to_batches():
|
296 | 305 | for row in i.to_pylist():
|
297 | 306 | yield row
|
298 |
| - elif isinstance(tabular, DataFrame): |
| 307 | + elif DataFrame is not None and isinstance(tabular, DataFrame): |
299 | 308 | for row in tabular.to_dict("records"):
|
300 | 309 | yield row
|
301 | 310 | elif isinstance(tabular, dict):
|
@@ -325,11 +334,12 @@ def write(collection, tabular):
|
325 | 334 | tab_size = len(tabular)
|
326 | 335 | if isinstance(tabular, Table):
|
327 | 336 | _validate_schema(tabular.schema.types)
|
328 |
| - elif isinstance(tabular, DataFrame): |
| 337 | + elif DataFrame is not None and isinstance(tabular, DataFrame): |
329 | 338 | _validate_schema(ArrowSchema.from_pandas(tabular).types)
|
330 | 339 | elif (
|
331 | 340 | isinstance(tabular, dict)
|
332 | 341 | and len(tabular.values()) >= 1
|
| 342 | + and ndarray is not None |
333 | 343 | and all([isinstance(i, ndarray) for i in tabular.values()])
|
334 | 344 | ):
|
335 | 345 | _validate_schema([i.dtype for i in tabular.values()])
|
|
0 commit comments