Skip to content

Commit fb2e1f3

Browse files
authored
Unpin universal-pathlib (#620)
* Unpin universal-pathlib * Set minimum version * Allow hats to set upath version * Install nested-pandas from the requirements * Fix pre-commit * Fix mypy
1 parent b96c5b2 commit fb2e1f3

File tree

6 files changed

+12
-12
lines changed

6 files changed

+12
-12
lines changed

pyproject.toml

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -25,8 +25,7 @@ dependencies = [
2525
"numpy>=2.2.0,<3",
2626
"pandas>=2.0",
2727
"tqdm>=4.59.0",
28-
# NOTE: package PINNED at <0.3.0, see https://github.com/astronomy-commons/lsdb/issues/1047
29-
"universal-pathlib>=0.2.2,<0.3.0",
28+
"universal-pathlib",
3029
]
3130

3231
# On a mac, install optional dependencies with `pip install '.[dev]'` (include the single quotes)

requirements.txt

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1,2 @@
1-
git+https://github.com/astronomy-commons/hats.git@main
1+
hats @ git+https://github.com/astronomy-commons/hats.git@main
2+
nested-pandas @ git+https://github.com/lincc-frameworks/nested-pandas.git@main

src/hats_import/catalog/map_reduce.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -297,7 +297,7 @@ def reduce_pixel_shards(
297297
healpix_pixel = HealpixPixel(destination_pixel_order, destination_pixel_number)
298298
pixel_dir = get_pixel_cache_directory(cache_shard_path, healpix_pixel)
299299

300-
merged_table = pq.read_table(pixel_dir, schema=schema)
300+
merged_table = pq.read_table(pixel_dir.path, filesystem=pixel_dir.fs, schema=schema)
301301

302302
rows_written = len(merged_table)
303303

src/hats_import/catalog/resume_plan.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -295,12 +295,12 @@ def get_alignment_file(
295295
threshold=pixel_threshold,
296296
drop_empty_siblings=drop_empty_siblings,
297297
)
298-
with open(file_name, "wb") as pickle_file:
298+
with file_name.open("wb") as pickle_file:
299299
alignment = np.array([x if x is not None else [-1, -1, 0] for x in alignment], dtype=np.int64)
300300
pickle.dump(alignment, pickle_file)
301301

302302
if self.destination_pixel_map is None:
303-
with open(file_name, "rb") as pickle_file:
303+
with file_name.open("rb") as pickle_file:
304304
alignment = pickle.load(pickle_file)
305305
pixel_list = np.unique(alignment, axis=0)
306306
self.destination_pixel_map = {

src/hats_import/hipscat_conversion/run_conversion.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -125,7 +125,7 @@ def _convert_partition_file(pixel, args, schema, ra_column, dec_column):
125125
/ f"Npix={pixel.pixel}.parquet"
126126
)
127127

128-
table = pq.read_table(input_file, schema=schema)
128+
table = pq.read_table(input_file.path, filesystem=input_file.fs, schema=schema)
129129

130130
table = table.drop_columns(["_hipscat_index", "Norder", "Dir", "Npix"]).add_column(
131131
0,

src/hats_import/verification/run_verification.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -125,15 +125,15 @@ def from_args(cls, args: VerificationArguments) -> "Verifier":
125125
all_files.append(unquote(child.path))
126126

127127
files_ds = pds.dataset(all_files, filesystem=parquet_fs)
128-
metadata_ds = pds.parquet_dataset(
129-
hats.io.paths.get_parquet_metadata_pointer(args.input_catalog_path), filesystem=parquet_fs
130-
)
128+
metadata_pointer = hats.io.paths.get_parquet_metadata_pointer(args.input_catalog_path)
129+
metadata_ds = pds.parquet_dataset(metadata_pointer.path, filesystem=parquet_fs)
131130

132131
input_truth_schema = None
133132
if args.truth_schema is not None:
134133
input_truth_schema = pds.parquet_dataset(args.truth_schema, filesystem=parquet_fs).schema
134+
common_metadata_pointer = hats.io.paths.get_common_metadata_pointer(args.input_catalog_path)
135135
common_metadata_schema = pds.parquet_dataset(
136-
hats.io.paths.get_common_metadata_pointer(args.input_catalog_path), filesystem=parquet_fs
136+
common_metadata_pointer.path, filesystem=parquet_fs
137137
).schema
138138
constructed_truth_schema = cls._construct_truth_schema(
139139
input_truth_schema=input_truth_schema, common_metadata_schema=common_metadata_schema
@@ -458,7 +458,7 @@ def write_results(self) -> None:
458458
"""Write the verification results to file at `args.output_path` / `args.output_filename`."""
459459
self.args.output_file_path.parent.mkdir(exist_ok=True, parents=True)
460460
# Write provenance info
461-
with open(self.args.output_file_path, self.args.write_mode, encoding="utf8") as fout:
461+
with self.args.output_file_path.open(self.args.write_mode, encoding="utf8") as fout:
462462
fout.writelines(
463463
[
464464
"# HATS verification results for\n",

0 commit comments

Comments
 (0)