Skip to content

Commit b5f6355

Browse files
authored
Merge pull request #18 from erykoff/consdb
Add support for LSST consdb database input.
2 parents 4836b1a + 1d27a6e commit b5f6355

21 files changed

+397
-94
lines changed

.github/workflows/python-package.yml

Lines changed: 29 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -15,35 +15,34 @@ jobs:
1515
runs-on: ubuntu-latest
1616
strategy:
1717
matrix:
18-
python-version: ["3.8", "3.9", "3.10"]
18+
python-version: ["3.10", "3.11", "3.12"]
1919

2020
steps:
21-
- uses: actions/checkout@v2
22-
- name: Set up Python ${{ matrix.python-version }}
23-
uses: actions/setup-python@v2
24-
with:
25-
python-version: ${{ matrix.python-version }}
26-
- name: Prepare conda
27-
run: |
28-
export PATH="$CONDA/bin:$PATH"
29-
conda config --set always_yes yes --set changeps1 no
30-
conda update -q conda
31-
conda info -a
32-
conda init bash
33-
- name: Install dependencies
34-
run: |
35-
export PATH="$CONDA/bin:$PATH"
36-
conda create -q -n testenv python=${{ matrix.python-version }} numpy hpgeom astropy healsparse fitsio esutil LSSTDESC.Coord pyyaml setuptools_scm setuptools_scm_git_archive flake8 pytest pytest-flake8 -c conda-forge
37-
source activate testenv
38-
pip install --no-deps .
39-
- name: Lint with flake8
40-
run: |
41-
export PATH="$CONDA/bin:$PATH"
42-
source activate testenv
43-
# stop the build if it fails flake8 with default setup.cfg
44-
flake8 . --count --show-source --statistics
45-
- name: Test with pytest
46-
run: |
47-
export PATH="$CONDA/bin:$PATH"
48-
source activate testenv
49-
pytest
21+
- uses: actions/checkout@v4
22+
with:
23+
# Need to clone everything to determine version from git.
24+
fetch-depth: 0
25+
26+
- name: Set up Python
27+
uses: actions/setup-python@v4
28+
with:
29+
python-version: "3.12"
30+
cache: "pip"
31+
cache-dependency-path: "requirements.txt"
32+
33+
- name: Build and install
34+
run: |
35+
python -m pip install --upgrade pip setuptools
36+
python -m pip install pytest flake8
37+
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
38+
python -m pip install .
39+
40+
- name: Lint with flake8
41+
run: |
42+
# stop the build if it fails flake8 with default pyproject.toml
43+
flake8 . --count --show-source --statistics
44+
45+
- name: Run tests
46+
run: |
47+
cd tests
48+
pytest

LICENSE

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
1-
Copyright (c) 2020, Eli Rykoff (LSST Dark Energy Science
2-
Collaboration)
1+
Copyright (c) 2020, Eli Rykoff (LSST Dark Energy Science Collaboration)
32
All rights reserved.
43

54
Redistribution and use in source and binary forms, with or without
@@ -12,9 +11,9 @@ modification, are permitted provided that the following conditions are met:
1211
this list of conditions and the following disclaimer in the documentation
1312
and/or other materials provided with the distribution.
1413

15-
* Neither the name of supreme nor the names of its
16-
contributors may be used to endorse or promote products derived from
17-
this software without specific prior written permission.
14+
* Neither the name of the copyright holder nor the names of its contributors
15+
may be used to endorse or promote products derived from this software without
16+
specific prior written permission.
1817

1918
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
2019
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE

decasu/configuration.py

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -35,9 +35,9 @@ def _default_dec_corner_fields():
3535

3636

3737
@dataclass
38-
class Configuration(object):
38+
class Configuration:
3939
"""
40-
Decasu configuration object.
40+
Decasu configuration class.
4141
"""
4242
# Mandatory fields
4343
outbase: str
@@ -51,6 +51,7 @@ class Configuration(object):
5151
extra_fields: Dict[str, str] = field(default_factory=_default_extra_fields)
5252
band_replacement: Dict[str, str] = field(default_factory=_default_band_replacement)
5353
use_lsst_db: bool = False
54+
use_lsst_consdb: bool = False
5455
lsst_db_additional_selection: str = ""
5556
time_bin: int = -1
5657
border: int = 15
@@ -71,6 +72,7 @@ class Configuration(object):
7172
band_field: str = 'band'
7273
mjd_field: str = 'mjd_obs'
7374
skyvar_field: str = 'skyvar'
75+
fwhm_field: str = 'fwhm'
7476
bad_amps: Dict[int, list] = field(default_factory=_default_bad_amps)
7577
bad_ccds: List[int] = field(default_factory=_default_bad_ccds)
7678
latitude: float = -30.1690
@@ -85,11 +87,13 @@ def __post_init__(self):
8587
self._validate()
8688

8789
def _validate(self):
88-
if self.use_lsst_db:
90+
if self.use_lsst_db or self.use_lsst_consdb:
8991
try:
9092
import lsst.obs.lsst # noqa: F401
9193
except ImportError:
9294
raise RuntimeError("Cannot use lsst db without Rubin Science Pipelines setup.")
95+
if self.use_lsst_db and self.use_lsst_consdb:
96+
raise RuntimeError("Cannot set both use_lsst_db and lsst_use_consdb.")
9397

9498
if self.use_two_amps and self.mask_lsstcam_bad_amps:
9599
raise RuntimeError("Cannot set both use_two_amps and mask_lsstcam_bad_amps.")

decasu/decasu_hpix_mapper.py

Lines changed: 15 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -11,24 +11,36 @@ def main():
1111
parser.add_argument('-c', '--configfile', action='store', type=str, required=True,
1212
help='YAML config file')
1313
parser.add_argument('-i', '--infile', action='store', type=str, required=True,
14-
help='Input fits or database file')
14+
help='Input fits or database file or database connection string')
1515
parser.add_argument('-b', '--bands', action='store', type=str, required=False,
1616
help='Bands to generate map for, comma delimited')
1717
parser.add_argument('-n', '--ncores', action='store', type=int, required=False,
1818
default=1, help='Number of cores to run on.')
1919
parser.add_argument('-o', '--outputpath', action='store', type=str, required=True,
2020
help='Output path')
21+
parser.add_argument('-B', '--outputbase', action='store', type=str, required=False,
22+
help='Output filename base; will replace outbase in config.')
2123
parser.add_argument('-p', '--pixels', action='store', type=str, required=False,
2224
help='Pixels to run on, comma delimited')
2325
parser.add_argument('-s', '--simple', action='store_true', required=False,
2426
help='Run in simple mode (nexp only)')
2527
parser.add_argument('-k', '--keep_intermediate_files', action='store_true',
2628
required=False, help='Keep intermediate files')
29+
parser.add_argument('-q', '--query', required=False,
30+
help='Additional query string; will replace lsst_db_additional_selection config.')
31+
parser.add_argument('-m', '--make_map_images', action='store_true', required=False,
32+
help='Automatically make skyproj map images?')
2733

2834
args = parser.parse_args()
2935

3036
config = Configuration.load_yaml(args.configfile)
3137

38+
if args.outputbase is not None:
39+
config.outbase = args.outputbase
40+
41+
if args.query is not None:
42+
config.lsst_db_additional_selection = args.query
43+
3244
if args.bands is None:
3345
bands = []
3446
else:
@@ -45,4 +57,5 @@ def main():
4557
else:
4658
mapper = MultiHealpixMapper(config, args.outputpath, ncores=args.ncores)
4759
mapper(args.infile, bands=bands, pixels=pixels,
48-
clear_intermediate_files=not args.keep_intermediate_files)
60+
clear_intermediate_files=not args.keep_intermediate_files,
61+
make_map_images=args.make_map_images)

decasu/healpix_consolidator.py

Lines changed: 25 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
import healsparse
33

44

5-
class HealpixConsolidator(object):
5+
class HealpixConsolidator:
66
"""
77
Consolidate several maps into one.
88
@@ -13,25 +13,45 @@ class HealpixConsolidator(object):
1313
clear_intermediate_files : `bool`
1414
Clear input files when done?
1515
"""
16-
def __init__(self, config, clear_intermediate_files):
16+
def __init__(self, config, clear_intermediate_files, make_map_images=False):
1717
self.config = config
1818
self.clear_intermediate_files = clear_intermediate_files
19+
self.make_map_images = make_map_images
1920

20-
def __call__(self, fname, mapfiles):
21+
def __call__(self, fname, mapfiles, descr):
2122
"""
2223
Consolidate a list of mapfiles, and delete input mapfiles
2324
if clear_intermediate_files is True.
2425
2526
Parameters
2627
----------
2728
fname : `str`
28-
Output filename
29+
Output filename
2930
mapfiles : `list`
30-
Input list of files
31+
Input list of files
32+
descr : `str`
33+
Description string.
3134
"""
3235
print("Consolidating %d maps into %s" % (len(mapfiles), fname))
3336
healsparse.cat_healsparse_files(mapfiles, fname)
3437

38+
if self.make_map_images:
39+
from matplotlib.backends.backend_agg import FigureCanvasAgg
40+
from matplotlib.figure import Figure
41+
import skyproj
42+
43+
m = healsparse.HealSparseMap.read(fname)
44+
45+
fig = Figure(figsize=(10, 6))
46+
FigureCanvasAgg(fig)
47+
ax = fig.add_subplot(111)
48+
49+
sp = skyproj.McBrydeSkyproj(ax=ax)
50+
sp.draw_hspmap(m, zoom=True)
51+
sp.draw_colorbar(label=descr)
52+
skyprojfile = fname.replace(".hsp", "_skyproj.png")
53+
fig.savefig(skyprojfile)
54+
3555
if self.clear_intermediate_files:
3656
for f in mapfiles:
3757
os.unlink(f)

decasu/lsst_wcs_consdb.py

Lines changed: 165 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,165 @@
1+
import numpy as np
2+
import hpgeom as hpg
3+
4+
from astropy.table import Table
5+
import astropy.units as units
6+
from astropy.time import Time
7+
from astropy.coordinates import EarthLocation
8+
9+
from . import decasu_globals
10+
from .utils import compute_visit_iqr_and_optics_scale
11+
12+
try:
13+
import lsst.obs.lsst
14+
import lsst.sphgeom
15+
import psycopg
16+
lsst_imported = True
17+
except ImportError:
18+
lsst_imported = False
19+
20+
21+
class LsstWcsConsDbBuilder:
22+
"""
23+
Build a WCS table from the LSST Consolidated Database and get intersecting
24+
pixels.
25+
26+
Parameters
27+
----------
28+
config : `Configuration`
29+
decasu configuration object.
30+
dbfile : `str`
31+
Input database file.
32+
bands : `list`
33+
Bands to run. Empty list means use all.
34+
compute_pixels : `bool`, optional
35+
Compute pixels when rendering WCS?
36+
"""
37+
def __init__(self, config, dbstring, bands, compute_pixels=True):
38+
if not lsst_imported:
39+
raise RuntimeError("Cannot use LsstWcsConsDbBuilder without Rubin Science Pipelines.")
40+
41+
self.config = config
42+
self.compute_pixels = compute_pixels
43+
44+
query_string = (
45+
"SELECT cvq.eff_time, cvq.psf_sigma, "
46+
"cvq.sky_bg, cvq.sky_noise, cvq.zero_point, "
47+
"cv.detector, cv.visit_id, cv.s_region, "
48+
"v.band, v.exp_time, v.exp_midpt_mjd, v.sky_rotation "
49+
"FROM cdb_LSSTCam.ccdvisit1_quicklook as cvq, cdb_LSSTCam.ccdvisit1 as cv, "
50+
"cdb_LSSTCam.visit1 as v "
51+
)
52+
where_string = (
53+
"WHERE cvq.ccdvisit_id=cv.ccdvisit_id and "
54+
"cv.visit_id=v.visit_id and "
55+
"detector<189 and cvq.zero_point is not null "
56+
)
57+
58+
if len(self.config.lsst_db_additional_selection) > 0:
59+
where_string = where_string + " and " + self.config.lsst_db_additional_selection
60+
61+
if len(bands) > 0:
62+
where_string = where_string + " and v.band in (" + ",".join([f"'{band}'" for band in bands]) + ")"
63+
64+
where_string = where_string + f" and v.exp_midpt_mjd >= {self.config.mjd_min}"
65+
where_string = where_string + f" and v.exp_midpt_mjd <= {self.config.mjd_max}"
66+
67+
query_string = query_string + where_string + ";"
68+
69+
with psycopg.Connection.connect(dbstring) as conn:
70+
cur = conn.execute(query_string)
71+
rows = cur.fetchall()
72+
73+
db_table = Table(
74+
np.asarray(
75+
rows,
76+
dtype=[
77+
("eff_time", "f4"),
78+
("psf_sigma", "f4"),
79+
("sky_bg", "f4"),
80+
("sky_noise", "f4"),
81+
("zero_point", "f4"),
82+
("detector", "i4"),
83+
("visit_id", "i8"),
84+
("s_region", "U200"),
85+
("band", "U2"),
86+
("exptime", "f4"),
87+
("mjd", "f8"),
88+
("sky_rotation", "f4"),
89+
],
90+
),
91+
)
92+
93+
if len(bands) == 0:
94+
self.bands = np.unique(db_table["band"])
95+
else:
96+
self.bands = bands
97+
98+
print(f"Found {len(db_table)} detector visits for {len(self.bands)} bands.")
99+
100+
# Add extra columns.
101+
# Units of degrees.
102+
db_table["decasu_lst"] = np.zeros(len(db_table))
103+
# Units of electrons.
104+
db_table["skyvar"] = db_table["sky_noise"]**2.
105+
# Units of arcsec.
106+
db_table[config.fwhm_field] = 2.355*config.arcsec_per_pix*db_table["psf_sigma"]
107+
108+
print("Computing local sidereal time...")
109+
loc = EarthLocation(lat=config.latitude*units.degree,
110+
lon=config.longitude*units.degree,
111+
height=config.elevation*units.m)
112+
113+
t = Time(db_table[config.mjd_field], format="mjd", location=loc)
114+
lst = t.sidereal_time("apparent")
115+
db_table["decasu_lst"][:] = lst.to_value(units.degree)
116+
117+
# Compute a couple of additional psf quantities.
118+
db_table[f"{config.fwhm_field}_iqr"] = np.zeros(len(db_table))
119+
db_table[f"{config.fwhm_field}_optics_scale"] = np.zeros(len(db_table))
120+
121+
print('Computing fwhm scaled properties...')
122+
compute_visit_iqr_and_optics_scale(self.config, db_table)
123+
124+
instrument = lsst.obs.lsst.LsstCam()
125+
camera = instrument.getCamera()
126+
127+
decasu_globals.table = db_table
128+
decasu_globals.lsst_camera = camera
129+
130+
def __call__(self, row):
131+
"""
132+
Compute intersecting pixels for onw row.
133+
134+
Parameters
135+
----------
136+
row : `int`
137+
Row to compute intersecting pixels.
138+
139+
Returns
140+
-------
141+
wcs : `int`
142+
Placeholder.
143+
pixels : `list`
144+
List of nside = `config.nside_run` intersecting pixels.
145+
Returned if compute_pixels is True in initialization.
146+
centers : `tuple` [`float`]]
147+
"""
148+
if (row % 10000) == 0:
149+
print("Working on WCS index %d" % (row))
150+
151+
# Link to global table.
152+
self.table = decasu_globals.table
153+
154+
region_str = self.table["s_region"][row]
155+
156+
region = lsst.sphgeom.Region.from_ivoa_pos("".join(region_str.split("ICRS")).upper())
157+
centroid = lsst.sphgeom.LonLat(region.getCentroid())
158+
center = [centroid.getLon().asDegrees(), centroid.getLat().asDegrees()]
159+
160+
if self.compute_pixels:
161+
vertices = np.asarray([[v.x(), v.y(), v.z()] for v in region.getVertices()])
162+
pixels = hpg.query_polygon_vec(self.config.nside_run, vertices, inclusive=True, fact=16)
163+
return 0, pixels, center
164+
else:
165+
return 0, center

0 commit comments

Comments
 (0)