Skip to content

Commit e91b9a3

Browse files
authored
Unpin universal-pathlib (#595)
* Unpin universal-pathlib * Set minimum version
1 parent d5fdcaa commit e91b9a3

File tree

4 files changed

+4
-4
lines changed

4 files changed

+4
-4
lines changed

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@ dependencies = [
3636
"pydantic>=2.0",
3737
"scipy>=1.7.2",
3838
"typing-extensions>=4.3.0",
39-
"universal-pathlib>=0.2.2,<0.3.0",
39+
"universal-pathlib>=0.3.1",
4040
]
4141

4242
[project.urls]

src/hats/catalog/index/index_catalog.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ def loc_partitions(self, ids) -> list[HealpixPixel]:
2929
that may contain rows for the id values
3030
"""
3131
metadata_file = paths.get_parquet_metadata_pointer(self.catalog_base_dir)
32-
dataset = pds.parquet_dataset(metadata_file, filesystem=metadata_file.fs)
32+
dataset = pds.parquet_dataset(metadata_file.path, filesystem=metadata_file.fs)
3333

3434
# There's a lot happening in a few pyarrow dataset methods:
3535
# We create a simple pyarrow expression that roughly corresponds to a SQL statement like

src/hats/io/file_io/file_io.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -384,7 +384,7 @@ def read_parquet_file_to_pandas(file_pointer: str | Path | UPath, **kwargs) -> n
384384
# If we are trying to read a remote directory, we need to send the explicit list of files instead.
385385
# We don't want to get the list unnecessarily because it can be expensive.
386386
if file_pointer.protocol not in ("", "file") and file_pointer.is_dir(): # pragma: no cover
387-
file_pointers = [f for f in file_pointer.iterdir() if f.is_file()]
387+
file_pointers = [f.path for f in file_pointer.iterdir() if f.is_file()]
388388
return npd.read_parquet(
389389
file_pointers,
390390
filesystem=file_pointer.fs,

src/hats/io/parquet_metadata.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -147,7 +147,7 @@ def read_row_group_fragments(metadata_file: str):
147147
if not file_io.is_regular_file(metadata_file):
148148
metadata_file = paths.get_parquet_metadata_pointer(metadata_file)
149149

150-
dataset = pds.parquet_dataset(metadata_file, filesystem=metadata_file.fs)
150+
dataset = pds.parquet_dataset(metadata_file.path, filesystem=metadata_file.fs)
151151

152152
for frag in dataset.get_fragments():
153153
yield from frag.row_groups

0 commit comments

Comments
 (0)