Skip to content

Commit 3f1eca1

Browse files
authored
Merge pull request #27 from Oct-HI/astronomics_test
Small fix to scalar column
2 parents 1e7eb37 + a785111 commit 3f1eca1

File tree

1 file changed

+6
-9
lines changed
  • dataplug/formats/astronomics

1 file changed

+6
-9
lines changed

dataplug/formats/astronomics/ms.py

Lines changed: 6 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -308,18 +308,15 @@ def _cleanup_ms(input_ms_path, output_ms_path, num_rows, starting_range, static_
308308
if colname not in ['FLAG_ROW', 'TIME', 'TIME_CENTROID']:
309309
continue
310310

311-
print(colname)
312311
try:
313-
print("I'm working here")
314312

315313
desc = ms.getcoldesc(colname)
316314
ndim = desc.get('ndim', 0)
317315

318-
# Caso 1: columna escalar (optimizado con acceso parcial)
319316
if ndim == 0:
320-
print("scalar (optimized)")
321317
sliced_data = ms.getcol(colname, startrow=starting_range, nrow=fixed_rows)
322-
ms.putcol(colname, sliced_data, startrow=0)
318+
ms.putcol(colname, value=sliced_data, startrow=0, nrow=fixed_rows)
319+
print("[DATAPLUG] Scalar column processed successfully.")
323320
continue
324321

325322

@@ -341,12 +338,11 @@ def _cleanup_ms(input_ms_path, output_ms_path, num_rows, starting_range, static_
341338
ms.putcolslice(colname, buf, blc, trc,
342339
startrow=0,
343340
nrow=fixed_rows)
341+
print("[DATAPLUG] Experimental: Array column processed successfully.")
344342

345343
except Exception as e:
346-
# print(f"Error procesando columna '{colname}': {e}")
347344
continue
348345

349-
# Copiar primeras filas al nuevo MS
350346
selection = ms.selectrows(list(range(0, fixed_rows)))
351347
selection.copy(output_ms_path, deep=True)
352348

@@ -413,9 +409,10 @@ def get(self):
413409
def ms_partitioning_strategy(cloud_object: CloudObject, num_chunks: int):
414410

415411
total_rows = cloud_object.get_attribute("total_rows")
416-
rows_per_timestamp = cloud_object.get_attribute("rows_per_time")
417-
412+
rows_per_timestamp = int(cloud_object.get_attribute("rows_per_time"))
413+
print(f"[DATAPLUG] Total rows: {total_rows}, Rows per timestamp: {rows_per_timestamp}")
418414
max_chunks = total_rows // rows_per_timestamp
415+
print(f"[DATAPLUG] Partition cap: {max_chunks}") #DEBUG
419416
num_chunks = min(num_chunks, max_chunks)
420417

421418
slices = []

0 commit comments

Comments
 (0)