Skip to content

Commit 1e83839

Browse files
committed
rename env to settings
1 parent f9c0476 commit 1e83839

File tree

5 files changed

+17
-17
lines changed

5 files changed

+17
-17
lines changed

src/mdio/converters/mdio.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -68,7 +68,7 @@ def mdio_to_segy( # noqa: PLR0912, PLR0913, PLR0915
6868
>>> output_path = UPath("prefix/file.segy")
6969
>>> mdio_to_segy(input_path, output_path)
7070
"""
71-
env = MDIOSettings()
71+
settings = MDIOSettings()
7272

7373
input_path = _normalize_path(input_path)
7474
output_path = _normalize_path(output_path)
@@ -145,7 +145,7 @@ def mdio_to_segy( # noqa: PLR0912, PLR0913, PLR0915
145145
if client is not None:
146146
block_records = block_records.compute()
147147
else:
148-
block_records = block_records.compute(num_workers=env.export_cpus)
148+
block_records = block_records.compute(num_workers=settings.export_cpus)
149149

150150
ordered_files = [rec.path for rec in block_records.ravel() if rec != 0]
151151
ordered_files = [output_path] + ordered_files

src/mdio/converters/segy.py

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -92,17 +92,17 @@ def grid_density_qc(grid: Grid, num_traces: int) -> None:
9292
GridTraceSparsityError: If the sparsity ratio exceeds `MDIO__GRID__SPARSITY_RATIO_LIMIT`
9393
and `MDIO_IGNORE_CHECKS` is not set to a truthy value (e.g., "1", "true").
9494
"""
95-
env = MDIOSettings()
95+
settings = MDIOSettings()
9696
# Calculate total possible traces in the grid (excluding sample dimension)
9797
grid_traces = np.prod(grid.shape[:-1], dtype=np.uint64)
9898

9999
# Handle division by zero if num_traces is 0
100100
sparsity_ratio = float("inf") if num_traces == 0 else grid_traces / num_traces
101101

102102
# Fetch and validate environment variables
103-
warning_ratio = env.grid_sparsity_ratio_warn
104-
error_ratio = env.grid_sparsity_ratio_limit
105-
ignore_checks = env.ignore_checks
103+
warning_ratio = settings.grid_sparsity_ratio_warn
104+
error_ratio = settings.grid_sparsity_ratio_limit
105+
ignore_checks = settings.ignore_checks
106106

107107
# Check sparsity
108108
should_warn = sparsity_ratio > warning_ratio
@@ -360,9 +360,9 @@ def _populate_coordinates(
360360

361361

362362
def _add_segy_file_headers(xr_dataset: xr_Dataset, segy_file_info: SegyFileInfo) -> xr_Dataset:
363-
env = MDIOSettings()
363+
settings = MDIOSettings()
364364

365-
if not env.save_segy_file_header:
365+
if not settings.save_segy_file_header:
366366
return xr_dataset
367367

368368
expected_rows = 40
@@ -386,7 +386,7 @@ def _add_segy_file_headers(xr_dataset: xr_Dataset, segy_file_info: SegyFileInfo)
386386
"binaryHeader": segy_file_info.binary_header_dict,
387387
}
388388
)
389-
if env.raw_headers:
389+
if settings.raw_headers:
390390
raw_binary_base64 = base64.b64encode(segy_file_info.raw_binary_headers).decode("ascii")
391391
xr_dataset["segy_file_header"].attrs.update({"rawBinaryHeader": raw_binary_base64})
392392

@@ -524,7 +524,7 @@ def segy_to_mdio( # noqa PLR0913
524524
Raises:
525525
FileExistsError: If the output location already exists and overwrite is False.
526526
"""
527-
env = MDIOSettings()
527+
settings = MDIOSettings()
528528

529529
_validate_spec_in_template(segy_spec, mdio_template)
530530

@@ -555,7 +555,7 @@ def segy_to_mdio( # noqa PLR0913
555555
_, non_dim_coords = _get_coordinates(grid, segy_headers, mdio_template)
556556
header_dtype = to_structured_type(segy_spec.trace.header.dtype)
557557

558-
if env.raw_headers:
558+
if settings.raw_headers:
559559
if zarr.config.get("default_zarr_format") == ZarrFormat.V2:
560560
logger.warning("Raw headers are only supported for Zarr v3. Skipping raw headers.")
561561
else:

src/mdio/segy/_workers.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -46,13 +46,13 @@ def header_scan_worker(
4646
Returns:
4747
HeaderArray parsed from SEG-Y library.
4848
"""
49-
env = MDIOSettings()
49+
settings = MDIOSettings()
5050

5151
segy_file = SegyFileWrapper(**segy_file_kwargs)
5252

5353
slice_ = slice(*trace_range)
5454

55-
trace_header = segy_file.trace[slice_].header if env.cloud_native else segy_file.header[slice_]
55+
trace_header = segy_file.trace[slice_].header if settings.cloud_native else segy_file.header[slice_]
5656

5757
if subset is not None:
5858
# struct field selection needs a list, not a tuple; a subset is a tuple from the template.

src/mdio/segy/blocked_io.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -69,7 +69,7 @@ def to_zarr( # noqa: PLR0913, PLR0915
6969
Returns:
7070
None
7171
"""
72-
env = MDIOSettings()
72+
settings = MDIOSettings()
7373

7474
data = dataset[data_variable_name]
7575

@@ -82,7 +82,7 @@ def to_zarr( # noqa: PLR0913, PLR0915
8282

8383
# For Unix async writes with s3fs/fsspec & multiprocessing, use 'spawn' instead of default
8484
# 'fork' to avoid deadlocks on cloud stores. Slower but necessary. Default on Windows.
85-
num_workers = min(num_chunks, env.import_cpus)
85+
num_workers = min(num_chunks, settings.import_cpus)
8686
context = mp.get_context("spawn")
8787
executor = ProcessPoolExecutor(max_workers=num_workers, mp_context=context)
8888

src/mdio/segy/parsers.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ def parse_headers(
4040
HeaderArray. Keys are the index names, values are numpy arrays of parsed headers for the
4141
current block. Array is of type byte_type except IBM32 which is mapped to FLOAT32.
4242
"""
43-
env = MDIOSettings()
43+
settings = MDIOSettings()
4444

4545
trace_count = num_traces
4646
n_blocks = int(ceil(trace_count / block_size))
@@ -52,7 +52,7 @@ def parse_headers(
5252

5353
trace_ranges.append((start, stop))
5454

55-
num_workers = min(n_blocks, env.import_cpus)
55+
num_workers = min(n_blocks, settings.import_cpus)
5656

5757
tqdm_kw = {"unit": "block", "dynamic_ncols": True}
5858
# For Unix async writes with s3fs/fsspec & multiprocessing, use 'spawn' instead of default

0 commit comments

Comments
 (0)