Skip to content

Commit 1476941

Browse files
Rolling aggregation of ZTF data (#164)
* Move test file * Make period test more frugal * Numpy 2.0 warning * Update test wrappers * Add functionality to aggregate ZTF data * PEP8 * Make name more generic * Add time management * PEP8 * hdfs utilities
1 parent 5f01e15 commit 1476941

21 files changed

+286
-99
lines changed

fink_utils/broker/avroUtils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717
import io
1818
import fastavro
1919

20-
from fink_utils.test.tester import regular_unit_tests
20+
from fink_utils.tester import regular_unit_tests
2121

2222
__all__ = ["writeavrodata", "readschemadata", "readschemafromavrofile"]
2323

fink_utils/broker/distributionUtils.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -25,8 +25,6 @@
2525
from pyspark.sql.functions import struct, lit
2626
from pyspark.sql.avro.functions import to_avro as to_avro_native
2727

28-
# from fink_utils.test.tester import spark_unit_tests
29-
3028

3129
def get_kafka_df(
3230
df: DataFrame,

fink_utils/broker/sparkUtils.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -24,8 +24,6 @@
2424

2525
from fink_utils.broker.avroUtils import readschemafromavrofile
2626

27-
# from fink_utils.test.tester import spark_unit_tests
28-
2927

3028
def from_avro(dfcol: Column, jsonformatschema: str) -> Column:
3129
"""Decode the Avro data contained in a DataFrame column into a struct.

fink_utils/cutouts/utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717
from astropy.io import fits
1818
import numpy as np
1919

20-
from fink_utils.test.tester import regular_unit_tests
20+
from fink_utils.tester import regular_unit_tests
2121

2222

2323
def unzip_cutout(stamp):

fink_utils/hdfs/utils.py

Lines changed: 48 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,48 @@
1+
# Copyright 2025 AstroLab Software
2+
# Author: Julien Peloton
3+
#
4+
# Licensed under the Apache License, Version 2.0 (the "License");
5+
# you may not use this file except in compliance with the License.
6+
# You may obtain a copy of the License at
7+
#
8+
# http://www.apache.org/licenses/LICENSE-2.0
9+
#
10+
# Unless required by applicable law or agreed to in writing, software
11+
# distributed under the License is distributed on an "AS IS" BASIS,
12+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
# See the License for the specific language governing permissions and
14+
# limitations under the License.
15+
"""Contains functionalities to work with HDFS"""
16+
17+
from pyspark.sql import SparkSession
18+
19+
20+
def path_exist(path: str) -> bool:
21+
"""Check if a path exists on Spark shared filesystem (HDFS or S3)
22+
23+
Parameters
24+
----------
25+
path : str
26+
Path to check
27+
28+
Returns
29+
-------
30+
bool
31+
True if the path exists, False otherwise
32+
"""
33+
spark = SparkSession.builder.getOrCreate()
34+
35+
jvm = spark._jvm
36+
jsc = spark._jsc
37+
38+
conf = jsc.hadoopConfiguration()
39+
uri = jvm.java.net.URI(path)
40+
41+
fs = jvm.org.apache.hadoop.fs.FileSystem.get(uri, conf)
42+
43+
path_glob = jvm.org.apache.hadoop.fs.Path(path)
44+
status_list = fs.globStatus(path_glob)
45+
if len(list(status_list)) > 0:
46+
return True
47+
else:
48+
return False

fink_utils/photometry/utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@
1414
# limitations under the License.
1515
import numpy as np
1616

17-
from fink_utils.test.tester import regular_unit_tests
17+
from fink_utils.tester import regular_unit_tests
1818

1919

2020
def is_source_behind(distnr: float, chinr: float = None, sharpnr: float = None) -> bool:

fink_utils/spark/partitioning.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -20,8 +20,6 @@
2020
import pandas as pd
2121
from astropy.time import Time
2222

23-
# from fink_utils.test.tester import spark_unit_tests_broker
24-
2523

2624
@pandas_udf(TimestampType(), PandasUDFType.SCALAR)
2725
def convert_to_millitime(jd: pd.Series, format=None, now=None):

fink_utils/sso/periods.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@
3434

3535
import logging
3636

37-
from fink_utils.test.tester import regular_unit_tests
37+
from fink_utils.tester import regular_unit_tests
3838

3939
_LOG = logging.getLogger(__name__)
4040

@@ -258,10 +258,10 @@ def estimate_synodic_period(
258258
Examples
259259
--------
260260
>>> ssnamenr = 2363
261-
>>> P, chi2 = estimate_synodic_period(ssnamenr, flavor="SHG1G2", Nterms_base=2)
262-
>>> assert int(P) == 20, P
261+
>>> P, chi2 = estimate_synodic_period(ssnamenr, flavor="SHG1G2", Nterms_base=1)
262+
>>> assert int(P) < 48, P
263263
264-
>>> P_HG, chi2_HG = estimate_synodic_period(ssnamenr, flavor="HG", Nterms_base=2)
264+
>>> P_HG, chi2_HG = estimate_synodic_period(ssnamenr, flavor="HG", Nterms_base=1)
265265
>>> assert chi2 < chi2_HG, (chi2, chi2_HG)
266266
267267
# by default we apply the light travel correction. Disable it.

fink_utils/sso/spins.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717
from scipy import linalg
1818

1919
from fink_utils.sso.utils import estimate_axes_ratio
20-
from fink_utils.test.tester import regular_unit_tests
20+
from fink_utils.tester import regular_unit_tests
2121

2222

2323
def sort_quantity_by_filter(filter, quantity):
@@ -910,7 +910,7 @@ def fit_legacy_models(
910910
lower_bounds = np.concatenate((lower_bounds, bounds[0]))
911911
upper_bounds = np.concatenate((upper_bounds, bounds[1]))
912912

913-
if not np.alltrue([i == i for i in magpsf_red]):
913+
if not np.all([i == i for i in magpsf_red]):
914914
outdic = {"fit": 1, "status": -2}
915915
return outdic
916916

0 commit comments

Comments
 (0)