diff --git a/.github/workflows/run-tests.yml b/.github/workflows/run-tests.yml index cc4fbe8..43dd4d3 100644 --- a/.github/workflows/run-tests.yml +++ b/.github/workflows/run-tests.yml @@ -42,5 +42,7 @@ jobs: python-version: ${{ matrix.python_version }} - name: Install the latest version of uv uses: astral-sh/setup-uv@v7 + - name: Sync dependencies (with viz extra) + run: uv sync --frozen --extra viz - name: Run pytest run: uv run --frozen pytest diff --git a/README.md b/README.md index 135a4c3..55e8848 100644 --- a/README.md +++ b/README.md @@ -72,6 +72,9 @@ oa_image.view(how="pyvista") # Export to OME-Parquet. # We can also export OME-TIFF, OME-Zarr or NumPy arrays. oa_image.export(how="ome-parquet", out="your_image.ome.parquet") + +# Export to Vortex (install extras: `pip install 'ome-arrow[vortex]'`). +oa_image.export(how="vortex", out="your_image.vortex") ``` ## Contributing, Development, and Testing diff --git a/pyproject.toml b/pyproject.toml index 9dc4d9e..cc053ac 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -39,11 +39,15 @@ optional-dependencies.viz = [ "trame-vtk>=2.10", "trame-vuetify>=3.1", ] +optional-dependencies.vortex = [ + "vortex-data>=0.56", +] [dependency-groups] dev = [ "poethepoet>=0.34", "pytest>=8.3.5", + "vortex-data>=0.56", ] docs = [ "myst-nb>=1.2", diff --git a/src/ome_arrow/__init__.py b/src/ome_arrow/__init__.py index ee04133..16d5039 100644 --- a/src/ome_arrow/__init__.py +++ b/src/ome_arrow/__init__.py @@ -4,10 +4,17 @@ from ome_arrow._version import version as ome_arrow_version from ome_arrow.core import OMEArrow -from ome_arrow.export import to_numpy, to_ome_parquet, to_ome_tiff, to_ome_zarr +from ome_arrow.export import ( + to_numpy, + to_ome_parquet, + to_ome_tiff, + to_ome_vortex, + to_ome_zarr, +) from ome_arrow.ingest import ( from_numpy, from_ome_parquet, + from_ome_vortex, from_ome_zarr, from_tiff, to_ome_arrow, diff --git a/src/ome_arrow/core.py b/src/ome_arrow/core.py index 7f7d810..d2b1b7f 100644 --- a/src/ome_arrow/core.py +++ b/src/ome_arrow/core.py @@ -11,10 +11,17 @@ import numpy as np import pyarrow as pa -from ome_arrow.export import to_numpy, to_ome_parquet, to_ome_tiff, to_ome_zarr +from ome_arrow.export import ( + to_numpy, + to_ome_parquet, + to_ome_tiff, + to_ome_vortex, + to_ome_zarr, +) from ome_arrow.ingest import ( from_numpy, from_ome_parquet, + from_ome_vortex, from_ome_zarr, from_stack_pattern_path, from_tiff, @@ -59,6 +66,7 @@ def __init__( - a path/URL to an OME-TIFF (.tif/.tiff) - a path/URL to an OME-Zarr store (.zarr / .ome.zarr) - a path/URL to an OME-Parquet file (.parquet / .pq) + - a path/URL to a Vortex file (.vortex) - a NumPy ndarray (2D-5D; interpreted with from_numpy defaults) - a dict already matching the OME-Arrow schema @@ -100,6 +108,12 @@ def __init__( s, column_name=column_name, row_index=row_index ) + # Vortex + elif s.lower().endswith(".vortex") or path.suffix.lower() == ".vortex": + self.data = from_ome_vortex( + s, column_name=column_name, row_index=row_index + ) + # TIFF elif path.suffix.lower() in {".tif", ".tiff"} or s.lower().endswith( (".tif", ".tiff") @@ -117,6 +131,7 @@ def __init__( " • Bio-Formats pattern string (contains '<', '>' or '*')\n" " • OME-Zarr path/URL ending with '.zarr' or '.ome.zarr'\n" " • OME-Parquet file ending with '.parquet' or '.pq'\n" + " • Vortex file ending with '.vortex'\n" " • OME-TIFF path/URL ending with '.tif' or '.tiff'" ) @@ -141,7 +156,7 @@ def __init__( "input data must be str, dict, pa.StructScalar, or numpy.ndarray" ) - def export( + def export( # noqa: PLR0911 self, how: str = "numpy", dtype: np.dtype = np.uint16, @@ -165,6 +180,8 @@ def export( parquet_column_name: str = "ome_arrow", parquet_compression: str | None = "zstd", parquet_metadata: dict[str, str] | None = None, + vortex_column_name: str = "ome_arrow", + vortex_metadata: dict[str, str] | None = None, ) -> np.array | dict | pa.StructScalar | str: """ Export the OME-Arrow content in a chosen representation. @@ -178,6 +195,7 @@ def export( "ome-tiff" → write OME-TIFF via BioIO "ome-zarr" → write OME-Zarr (OME-NGFF) via BioIO "parquet" → write a single-row Parquet with one struct column + "vortex" → write a single-row Vortex file with one struct column dtype: Target dtype for "numpy"/writers (default: np.uint16). strict: @@ -199,6 +217,8 @@ def export( Try to embed per-channel display colors when safe; otherwise omitted. parquet_*: Options for Parquet export (column name, compression, file metadata). + vortex_*: + Options for Vortex export (column name, file metadata). Returns ------- @@ -209,6 +229,7 @@ def export( - "ome-tiff": output path (str) - "ome-zarr": output path (str) - "parquet": output path (str) + - "vortex": output path (str) Raises ------ @@ -271,6 +292,18 @@ def export( ) return out + # Vortex (single row, single struct column) + if mode in {"ome-vortex", "omevortex", "vortex"}: + if not out: + raise ValueError("export(how='vortex') requires 'out' path.") + to_ome_vortex( + data=self.data, + out_path=out, + column_name=vortex_column_name, + file_metadata=vortex_metadata, + ) + return out + raise ValueError(f"Unknown export method: {how}") def info(self) -> Dict[str, Any]: diff --git a/src/ome_arrow/export.py b/src/ome_arrow/export.py index 23eed12..a3fd96e 100644 --- a/src/ome_arrow/export.py +++ b/src/ome_arrow/export.py @@ -420,3 +420,61 @@ def to_ome_parquet( compression=compression, row_group_size=row_group_size, ) + + +def to_ome_vortex( + data: Dict[str, Any] | pa.StructScalar, + out_path: str, + column_name: str = "image", + file_metadata: Optional[Dict[str, str]] = None, +) -> None: + """Export an OME-Arrow record to a Vortex file. + + The file is written as a single-row, single-column Arrow table where the + column holds a struct with the OME-Arrow schema. + + Args: + data: OME-Arrow dict or StructScalar. + out_path: Output path for the Vortex file. + column_name: Column name to store the struct. + file_metadata: Optional file-level metadata to attach. + + Raises: + ImportError: If the optional `vortex-data` dependency is missing. + """ + + try: + import vortex.io as vxio + except ImportError as exc: + raise ImportError( + "Vortex export requires the optional 'vortex-data' dependency." + ) from exc + + # 1) Normalize to a plain Python dict (works better with pyarrow builders, + # especially when the struct has a `null`-typed field like "masks"). + if isinstance(data, pa.StructScalar): + record_dict = data.as_py() + else: + # Validate by round-tripping through a typed scalar, then back to dict. + record_dict = pa.scalar(data, type=OME_ARROW_STRUCT).as_py() + + # 2) Build a single-row struct array from the dict, explicitly passing the schema + struct_array = pa.array([record_dict], type=OME_ARROW_STRUCT) # len=1 + + # 3) Wrap into a one-column table + table = pa.table({column_name: struct_array}) + + # 4) Attach optional file-level metadata + meta: Dict[bytes, bytes] = dict(table.schema.metadata or {}) + try: + meta[b"ome.arrow.type"] = str(OME_ARROW_TAG_TYPE).encode("utf-8") + meta[b"ome.arrow.version"] = str(OME_ARROW_TAG_VERSION).encode("utf-8") + except Exception: + pass + if file_metadata: + for k, v in file_metadata.items(): + meta[str(k).encode("utf-8")] = str(v).encode("utf-8") + table = table.replace_schema_metadata(meta) + + # 5) Write Vortex (single row, single column) + vxio.write(table, str(out_path)) diff --git a/src/ome_arrow/ingest.py b/src/ome_arrow/ingest.py index 2f9e40e..f925582 100644 --- a/src/ome_arrow/ingest.py +++ b/src/ome_arrow/ingest.py @@ -21,6 +21,105 @@ from ome_arrow.meta import OME_ARROW_STRUCT, OME_ARROW_TAG_TYPE, OME_ARROW_TAG_VERSION +def _ome_arrow_from_table( + table: pa.Table, + *, + column_name: Optional[str], + row_index: int, + strict_schema: bool, +) -> pa.StructScalar: + """Extract a single OME-Arrow record from an Arrow table. + + Args: + table: Source Arrow table. + column_name: Column to read; auto-detected when None or invalid. + row_index: Row index to extract. + strict_schema: Require the exact OME-Arrow schema if True. + + Returns: + A typed OME-Arrow StructScalar. + + Raises: + ValueError: If the row index is out of range or no suitable column exists. + """ + if table.num_rows == 0: + raise ValueError("Table contains 0 rows; expected at least 1.") + if not (0 <= row_index < table.num_rows): + raise ValueError(f"row_index {row_index} out of range [0, {table.num_rows}).") + + # 1) Locate the OME-Arrow column + def _struct_matches_ome_fields(t: pa.StructType) -> bool: + ome_fields = {f.name for f in OME_ARROW_STRUCT} + col_fields = {f.name for f in t} + return ome_fields == col_fields + + requested_name = column_name + candidate_col = None + autodetected_name = None + + if column_name is not None and column_name in table.column_names: + arr = table[column_name] + if not pa.types.is_struct(arr.type): + raise ValueError(f"Column '{column_name}' is not a Struct; got {arr.type}.") + if strict_schema and arr.type != OME_ARROW_STRUCT: + raise ValueError( + f"Column '{column_name}' schema != OME_ARROW_STRUCT.\n" + f"Got: {arr.type}\n" + f"Expect:{OME_ARROW_STRUCT}" + ) + if not strict_schema and not _struct_matches_ome_fields(arr.type): + raise ValueError( + f"Column '{column_name}' does not have the expected OME-Arrow fields." + ) + candidate_col = arr + else: + # Auto-detect a struct column that matches OME-Arrow fields + for name in table.column_names: + arr = table[name] + if pa.types.is_struct(arr.type): + if strict_schema and arr.type == OME_ARROW_STRUCT: + candidate_col = arr + autodetected_name = name + column_name = name + break + if not strict_schema and _struct_matches_ome_fields(arr.type): + candidate_col = arr + autodetected_name = name + column_name = name + break + if candidate_col is None: + if column_name is None: + hint = "no struct column with OME-Arrow fields was found." + else: + hint = f"column '{column_name}' not found and auto-detection failed." + raise ValueError(f"Could not locate an OME-Arrow struct column: {hint}") + + # Emit warning if auto-detection was used + if autodetected_name is not None and autodetected_name != requested_name: + warnings.warn( + f"Requested column '{requested_name}' was not usable or not found. " + f"Auto-detected OME-Arrow column '{autodetected_name}'.", + UserWarning, + stacklevel=2, + ) + + # 2) Extract the row as a Python dict + record_dict: Dict[str, Any] = candidate_col.slice(row_index, 1).to_pylist()[0] + + # 3) Reconstruct a typed StructScalar using the canonical schema + scalar = pa.scalar(record_dict, type=OME_ARROW_STRUCT) + + # Optional: soft validation via file-level metadata (if present) + try: + meta = table.schema.metadata or {} + meta.get(b"ome.arrow.type", b"").decode() == str(OME_ARROW_TAG_TYPE) + meta.get(b"ome.arrow.version", b"").decode() == str(OME_ARROW_TAG_VERSION) + except Exception: + pass + + return scalar + + def _normalize_unit(unit: str | None) -> str | None: if not unit: return None @@ -954,88 +1053,72 @@ def from_ome_parquet( row_index: int = 0, strict_schema: bool = False, ) -> pa.StructScalar: - """ - Read an OME-Arrow record from a Parquet file and return a typed StructScalar. + """Read an OME-Arrow record from a Parquet file. + + Args: + parquet_path: Path to the Parquet file. + column_name: Column to read; auto-detected when None or invalid. + row_index: Row index to extract. + strict_schema: Require the exact OME-Arrow schema if True. + + Returns: + A typed OME-Arrow StructScalar. + + Raises: + FileNotFoundError: If the Parquet path does not exist. + ValueError: If the row index is out of range or no suitable column exists. """ p = Path(parquet_path) if not p.exists(): raise FileNotFoundError(f"No such file: {p}") table = pq.read_table(p) + return _ome_arrow_from_table( + table, + column_name=column_name, + row_index=row_index, + strict_schema=strict_schema, + ) - if table.num_rows == 0: - raise ValueError("Parquet file contains 0 rows; expected at least 1.") - if not (0 <= row_index < table.num_rows): - raise ValueError(f"row_index {row_index} out of range [0, {table.num_rows}).") - - # 1) Locate the OME-Arrow column - def _struct_matches_ome_fields(t: pa.StructType) -> bool: - ome_fields = {f.name for f in OME_ARROW_STRUCT} - col_fields = {f.name for f in t} - return ome_fields == col_fields - - requested_name = column_name - candidate_col = None - autodetected_name = None - if column_name is not None and column_name in table.column_names: - arr = table[column_name] - if not pa.types.is_struct(arr.type): - raise ValueError(f"Column '{column_name}' is not a Struct; got {arr.type}.") - if strict_schema and arr.type != OME_ARROW_STRUCT: - raise ValueError( - f"Column '{column_name}' schema != OME_ARROW_STRUCT.\n" - f"Got: {arr.type}\n" - f"Expect:{OME_ARROW_STRUCT}" - ) - if not strict_schema and not _struct_matches_ome_fields(arr.type): - raise ValueError( - f"Column '{column_name}' does not have the expected OME-Arrow fields." - ) - candidate_col = arr - else: - # Auto-detect a struct column that matches OME-Arrow fields - for name in table.column_names: - arr = table[name] - if pa.types.is_struct(arr.type): - if strict_schema and arr.type == OME_ARROW_STRUCT: - candidate_col = arr - autodetected_name = name - column_name = name - break - if not strict_schema and _struct_matches_ome_fields(arr.type): - candidate_col = arr - autodetected_name = name - column_name = name - break - if candidate_col is None: - if column_name is None: - hint = "no struct column with OME-Arrow fields was found." - else: - hint = f"column '{column_name}' not found and auto-detection failed." - raise ValueError(f"Could not locate an OME-Arrow struct column: {hint}") +def from_ome_vortex( + vortex_path: str | Path, + *, + column_name: Optional[str] = "ome_arrow", + row_index: int = 0, + strict_schema: bool = False, +) -> pa.StructScalar: + """Read an OME-Arrow record from a Vortex file. - # Emit warning if auto-detection was used - if autodetected_name is not None and autodetected_name != requested_name: - warnings.warn( - f"Requested column '{requested_name}' was not usable or not found. " - f"Auto-detected OME-Arrow column '{autodetected_name}'.", - UserWarning, - stacklevel=2, - ) + Args: + vortex_path: Path to the Vortex file. + column_name: Column to read; auto-detected when None or invalid. + row_index: Row index to extract. + strict_schema: Require the exact OME-Arrow schema if True. - # 2) Extract the row as a Python dict - record_dict: Dict[str, Any] = candidate_col.slice(row_index, 1).to_pylist()[0] + Returns: + A typed OME-Arrow StructScalar. - # 3) Reconstruct a typed StructScalar using the canonical schema - scalar = pa.scalar(record_dict, type=OME_ARROW_STRUCT) + Raises: + FileNotFoundError: If the Vortex path does not exist. + ImportError: If the optional `vortex-data` dependency is missing. + ValueError: If the row index is out of range or no suitable column exists. + """ + p = Path(vortex_path) + if not p.exists(): + raise FileNotFoundError(f"No such file: {p}") - # Optional: soft validation via file-level metadata (if present) try: - meta = table.schema.metadata or {} - meta.get(b"ome.arrow.type", b"").decode() == str(OME_ARROW_TAG_TYPE) - meta.get(b"ome.arrow.version", b"").decode() == str(OME_ARROW_TAG_VERSION) - except Exception: - pass - - return scalar + import vortex + except ImportError as exc: + raise ImportError( + "Vortex support requires the optional 'vortex-data' dependency." + ) from exc + + table = vortex.open(str(p)).to_arrow().read_all() + return _ome_arrow_from_table( + table, + column_name=column_name, + row_index=row_index, + strict_schema=strict_schema, + ) diff --git a/tests/test_core.py b/tests/test_core.py index a0910c9..33484cc 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -5,6 +5,7 @@ import pathlib import matplotlib +import numpy as np import pytest from ome_arrow.core import OMEArrow @@ -350,3 +351,35 @@ def test_ome_parquet_specific_col_and_row( oa_image = OMEArrow(data=input_data, column_name=column_name, row_index=row_index) assert oa_image.info() == expected_info + + +def test_vortex_roundtrip(tmp_path: pathlib.Path) -> None: + """Smoke-test the Vortex round-trip export/import path.""" + pytest.importorskip( + "vortex", reason="Vortex support is optional (install extras: vortex)." + ) + + arr = np.arange(16, dtype=np.uint16).reshape(1, 1, 1, 4, 4) + oa = OMEArrow(arr) + out = tmp_path / "example.vortex" + + oa.export(how="vortex", out=str(out)) + reloaded = OMEArrow(str(out)) + + assert reloaded.info() == oa.info() + + +def test_vortex_custom_column_name(tmp_path: pathlib.Path) -> None: + """Ensure custom Vortex column names are preserved on round-trip.""" + pytest.importorskip( + "vortex", reason="Vortex support is optional (install extras: vortex)." + ) + + arr = np.arange(12, dtype=np.uint16).reshape(1, 1, 1, 3, 4) + oa = OMEArrow(arr) + out = tmp_path / "custom_column.vortex" + + oa.export(how="vortex", out=str(out), vortex_column_name="custom_ome_arrow") + reloaded = OMEArrow(str(out), column_name="custom_ome_arrow") + + assert reloaded.info() == oa.info() diff --git a/uv.lock b/uv.lock index c008aef..4015ea3 100644 --- a/uv.lock +++ b/uv.lock @@ -1,5 +1,5 @@ version = 1 -revision = 2 +revision = 3 requires-python = ">=3.11" resolution-markers = [ "python_full_version >= '3.13'", @@ -2468,11 +2468,15 @@ viz = [ { name = "trame-vtk" }, { name = "trame-vuetify" }, ] +vortex = [ + { name = "vortex-data" }, +] [package.dev-dependencies] dev = [ { name = "poethepoet" }, { name = "pytest" }, + { name = "vortex-data" }, ] docs = [ { name = "myst-nb" }, @@ -2506,13 +2510,15 @@ requires-dist = [ { name = "trame", marker = "extra == 'viz'", specifier = ">=3.12" }, { name = "trame-vtk", marker = "extra == 'viz'", specifier = ">=2.10" }, { name = "trame-vuetify", marker = "extra == 'viz'", specifier = ">=3.1" }, + { name = "vortex-data", marker = "extra == 'vortex'", specifier = ">=0.56.0" }, ] -provides-extras = ["viz"] +provides-extras = ["viz", "vortex"] [package.metadata.requires-dev] dev = [ { name = "poethepoet", specifier = ">=0.34" }, { name = "pytest", specifier = ">=8.3.5" }, + { name = "vortex-data", specifier = ">=0.56.0" }, ] docs = [ { name = "myst-nb", specifier = ">=1.2" }, @@ -2920,6 +2926,20 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/5b/5a/bc7b4a4ef808fa59a816c17b20c4bef6884daebbdf627ff2a161da67da19/propcache-0.4.1-py3-none-any.whl", hash = "sha256:af2a6052aeb6cf17d3e46ee169099044fd8224cbaf75c76a2ef596e8163e2237", size = 13305, upload-time = "2025-10-08T19:49:00.792Z" }, ] +[[package]] +name = "protobuf" +version = "5.29.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/29/d09e70352e4e88c9c7a198d5645d7277811448d76c23b00345670f7c8a38/protobuf-5.29.5.tar.gz", hash = "sha256:bc1463bafd4b0929216c35f437a8e28731a2b7fe3d98bb77a600efced5a15c84", size = 425226, upload-time = "2025-05-28T23:51:59.82Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5f/11/6e40e9fc5bba02988a214c07cf324595789ca7820160bfd1f8be96e48539/protobuf-5.29.5-cp310-abi3-win32.whl", hash = "sha256:3f1c6468a2cfd102ff4703976138844f78ebd1fb45f49011afc5139e9e283079", size = 422963, upload-time = "2025-05-28T23:51:41.204Z" }, + { url = "https://files.pythonhosted.org/packages/81/7f/73cefb093e1a2a7c3ffd839e6f9fcafb7a427d300c7f8aef9c64405d8ac6/protobuf-5.29.5-cp310-abi3-win_amd64.whl", hash = "sha256:3f76e3a3675b4a4d867b52e4a5f5b78a2ef9565549d4037e06cf7b0942b1d3fc", size = 434818, upload-time = "2025-05-28T23:51:44.297Z" }, + { url = "https://files.pythonhosted.org/packages/dd/73/10e1661c21f139f2c6ad9b23040ff36fee624310dc28fba20d33fdae124c/protobuf-5.29.5-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e38c5add5a311f2a6eb0340716ef9b039c1dfa428b28f25a7838ac329204a671", size = 418091, upload-time = "2025-05-28T23:51:45.907Z" }, + { url = "https://files.pythonhosted.org/packages/6c/04/98f6f8cf5b07ab1294c13f34b4e69b3722bb609c5b701d6c169828f9f8aa/protobuf-5.29.5-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:fa18533a299d7ab6c55a238bf8629311439995f2e7eca5caaff08663606e9015", size = 319824, upload-time = "2025-05-28T23:51:47.545Z" }, + { url = "https://files.pythonhosted.org/packages/85/e4/07c80521879c2d15f321465ac24c70efe2381378c00bf5e56a0f4fbac8cd/protobuf-5.29.5-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:63848923da3325e1bf7e9003d680ce6e14b07e55d0473253a690c3a8b8fd6e61", size = 319942, upload-time = "2025-05-28T23:51:49.11Z" }, + { url = "https://files.pythonhosted.org/packages/7e/cc/7e77861000a0691aeea8f4566e5d3aa716f2b1dece4a24439437e41d3d25/protobuf-5.29.5-py3-none-any.whl", hash = "sha256:6cf42630262c59b2d8de33954443d94b746c952b01434fc58a417fdbd2e84bd5", size = 172823, upload-time = "2025-05-28T23:51:58.157Z" }, +] + [[package]] name = "psutil" version = "7.0.0" @@ -3790,6 +3810,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/f1/7b/ce1eafaf1a76852e2ec9b22edecf1daa58175c090266e9f6c64afcd81d91/stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695", size = 24521, upload-time = "2023-09-30T13:58:03.53Z" }, ] +[[package]] +name = "substrait" +version = "0.25.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/89/ec/a9909bf93df6718945889b379eb7f5da1ff3a4dc3bb73862dff03e6e56d6/substrait-0.25.0.tar.gz", hash = "sha256:090feda0f1a6e996ce0baf2d64635ff4b9b2cae58542c7cf3f818ff5e88a2924", size = 193979, upload-time = "2025-11-05T12:25:56.878Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/82/d20662ede4aae31c05ff263e6a4351c018fa908bc0bbaf0c6e77545e3265/substrait-0.25.0-py3-none-any.whl", hash = "sha256:29ed969f8685109c06abab45dc557c661871c16d289d3fea146e30c36e80be78", size = 160453, upload-time = "2025-11-05T12:25:55.468Z" }, +] + [[package]] name = "tabulate" version = "0.9.0" @@ -4099,6 +4131,22 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/6b/11/cc635220681e93a0183390e26485430ca2c7b5f9d33b15c74c2861cb8091/urllib3-2.4.0-py3-none-any.whl", hash = "sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813", size = 128680, upload-time = "2025-04-10T15:23:37.377Z" }, ] +[[package]] +name = "vortex-data" +version = "0.57.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyarrow" }, + { name = "substrait" }, + { name = "typing-extensions" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/05/d7/22cb4668d356da3a1369c577300e6e14f0d0d4d2ddf7a01b5272c4e276ba/vortex_data-0.57.2-cp311-abi3-macosx_10_12_x86_64.whl", hash = "sha256:8ec8502d66ef76607b4aeefa4640d8a56bc8b18d30e7dc14ac67927786a451f3", size = 13934049, upload-time = "2026-01-06T19:01:34.541Z" }, + { url = "https://files.pythonhosted.org/packages/21/8f/5f14f10bcabc1ec28d34ac2b0f872ae3253c0c87d863a444eb6a14b146d3/vortex_data-0.57.2-cp311-abi3-macosx_11_0_arm64.whl", hash = "sha256:5e0765755b94779607900aae3f06a5fd8ccdf416cd9d417fe7cb316c68ba418a", size = 13056232, upload-time = "2026-01-06T19:01:36.851Z" }, + { url = "https://files.pythonhosted.org/packages/85/07/273304fb75b0e8464a53623a0417444ac53b4c20d3e37b3f4316710d4e35/vortex_data-0.57.2-cp311-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5502ce43c5912f1d53d94090c66d272b9ada3d323acd43911628dde25794982", size = 12066507, upload-time = "2026-01-06T19:01:39.126Z" }, + { url = "https://files.pythonhosted.org/packages/12/2b/a55f675b8ff0faf49b296e5147c78ac15a3b3f837f803b83d470f32c12cb/vortex_data-0.57.2-cp311-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20d304a2273a118a07d8f926e2a0784dbf39975d883852d85503f8e5699b9e0f", size = 12976989, upload-time = "2026-01-06T19:01:41.505Z" }, +] + [[package]] name = "vtk" version = "9.5.2"