Skip to content

Commit 97aaeb0

Browse files
committed
Merge branch 'main' of github.com:Small-Bodies-Node/catch
2 parents 66d1d40 + 763c8c4 commit 97aaeb0

File tree

15 files changed

+509
-372
lines changed

15 files changed

+509
-372
lines changed

catch/__init__.py

Lines changed: 1 addition & 355 deletions
Original file line numberDiff line numberDiff line change
@@ -3,360 +3,6 @@
33
except ImportError:
44
__version__ = ""
55

6-
from sbsearch.target import FixedTarget
76
from .catch import Catch, IntersectionType # noqa: F401
8-
from . import stats
7+
from . import stats # noqa: F401
98
from .config import Config # noqa: F401
10-
11-
12-
def catch_cli(*args):
13-
"""CATCH command-line script."""
14-
import sys
15-
import argparse
16-
import uuid
17-
from astropy.time import Time
18-
from astropy.table import Table
19-
from catch.config import _config_example
20-
21-
parser = argparse.ArgumentParser(
22-
"catch", epilog=f"Configuration files are JSON-formatted:\n{_config_example}"
23-
)
24-
parser.add_argument("--config", help="CATCH configuration file")
25-
parser.add_argument("--database", help="use this database URI")
26-
parser.add_argument("--log", help="save log messages to this file")
27-
parser.add_argument(
28-
"--arc-limit", type=float, help="maximal arc length to search, radians"
29-
)
30-
parser.add_argument(
31-
"--time-limit", type=float, help="maximal time length to search, days"
32-
)
33-
parser.add_argument("--debug", action="store_true", help="debug mode")
34-
35-
# define subparsers
36-
subparsers = parser.add_subparsers(help="sub-command help")
37-
38-
verify = subparsers.add_parser(
39-
"verify", help="connect to database and verify and create tables"
40-
)
41-
verify.set_defaults(command="verify")
42-
43-
status_sources = subparsers.add_parser(
44-
"status/sources", help="show or update source summaries"
45-
)
46-
status_sources.set_defaults(command="status/sources")
47-
48-
status_updates = subparsers.add_parser(
49-
"status/updates", help="summarize recent updates to the database"
50-
)
51-
status_updates.set_defaults(command="status/updates")
52-
53-
status_queries = subparsers.add_parser(
54-
"status/queries", help="summarize recent queries"
55-
)
56-
status_queries.set_defaults(command="status/queries")
57-
58-
list_sources = subparsers.add_parser("sources", help="show available data sources")
59-
list_sources.set_defaults(command="sources")
60-
61-
moving = subparsers.add_parser("moving", help="search for a moving object")
62-
moving.set_defaults(command="moving")
63-
64-
fixed = subparsers.add_parser("fixed", help="search for a fixed object")
65-
fixed.set_defaults(command="fixed")
66-
67-
# subparser arguments
68-
# moving and fixed have an overlap in parameters
69-
moving.add_argument("desg", help="object designation")
70-
moving.add_argument(
71-
"--padding",
72-
type=float,
73-
default=0,
74-
help="additional padding around the ephemeris to search, arcmin",
75-
)
76-
77-
fixed.add_argument("ra", help="Right ascension")
78-
fixed.add_argument("dec", help="Declination")
79-
fixed.add_argument(
80-
"--unit",
81-
default="hourangle,deg",
82-
help="RA, Dec unit, may be a single string, or two separated"
83-
" by a comma (default: hourangle,deg)",
84-
)
85-
fixed.add_argument(
86-
"--radius",
87-
dest="padding",
88-
type=float,
89-
default=0,
90-
help="search a circle around the point with this radius, arcmin",
91-
)
92-
fixed.add_argument(
93-
"--intersection_type",
94-
choices=list(IntersectionType.__members__.keys()),
95-
default="ImageIntersectsArea",
96-
help="areal intersection requirement (default: AreaIntersectsImage)",
97-
)
98-
99-
for subparser in (moving, fixed):
100-
subparser.add_argument(
101-
"--source",
102-
dest="sources",
103-
action="append",
104-
help="search this observation source (may be used multiple times)",
105-
)
106-
subparser.add_argument(
107-
"--start-date",
108-
dest="start_date",
109-
type=Time,
110-
help="search after this date/time",
111-
)
112-
subparser.add_argument(
113-
"--stop-date",
114-
dest="stop_date",
115-
type=Time,
116-
help="search before this date/time",
117-
)
118-
119-
moving.add_argument(
120-
"--force", dest="cached", action="store_false", help="do not use cached results"
121-
)
122-
123-
for subparser in (moving, fixed):
124-
subparser.add_argument("-o", help="write table to this file")
125-
126-
status_sources.add_argument(
127-
"--update", action="store_true", help="update source status tables"
128-
)
129-
130-
args = parser.parse_args()
131-
132-
try:
133-
getattr(args, "command")
134-
except AttributeError:
135-
parser.print_help()
136-
sys.exit()
137-
138-
if args.command == "verify":
139-
print("Verify databases and create as needed.\n")
140-
141-
rows = []
142-
config = Config.from_args(args)
143-
catch: Catch
144-
with Catch.with_config(config) as catch:
145-
if args.command == "verify":
146-
pass
147-
elif args.command == "sources":
148-
print("Available sources:\n *", "\n * ".join(catch.sources.keys()))
149-
elif args.command == "status/sources":
150-
if args.update:
151-
print("Updating survey statistics.")
152-
stats.update_statistics(catch)
153-
tab = Table(stats.source_statistics(catch))
154-
tab.pprint_all()
155-
elif args.command == "status/updates":
156-
tab = Table(stats.recently_added_observations(catch))
157-
if len(tab) == 0:
158-
print("# No data")
159-
else:
160-
tab.pprint_all()
161-
elif args.command == "status/queries":
162-
tab = Table(stats.recent_queries(catch))
163-
if len(tab) == 0:
164-
print("# No data")
165-
else:
166-
tab.pprint_all()
167-
elif args.command == "moving":
168-
catch.start_date = args.start_date
169-
catch.stop_date = args.stop_date
170-
catch.padding = args.padding
171-
job_id = uuid.uuid4()
172-
catch.query(
173-
args.desg,
174-
job_id,
175-
sources=args.sources,
176-
cached=args.cached,
177-
)
178-
columns = set()
179-
# catch.caught returns a list of rows.
180-
for row in catch.caught(job_id):
181-
r = {}
182-
# Each row consists of a Found and an Observation object. The
183-
# Observation object will be a subclass, e.g.,
184-
# NeatPalomarTricam, or SkyMapperDR4.
185-
for data_object in row:
186-
# Aggregate fields and values from each data object
187-
for k, v in _serialize_object(data_object):
188-
r[k] = v
189-
190-
columns = columns.union(set(r.keys()))
191-
192-
r["cutout_url"] = row.Observation.cutout_url(
193-
row.Found.ra, row.Found.dec
194-
)
195-
196-
r["date"] = Time(row.Found.mjd, format="mjd").iso
197-
198-
rows.append(r)
199-
elif args.command == "fixed":
200-
catch.start_date = args.start_date
201-
catch.stop_date = args.stop_date
202-
catch.padding = args.padding
203-
catch.intersection_type = IntersectionType[args.intersection_type]
204-
job_id = uuid.uuid4()
205-
target = FixedTarget.from_radec(args.ra, args.dec, unit=args.unit)
206-
observations = catch.query(
207-
target,
208-
job_id,
209-
sources=args.sources,
210-
)
211-
columns = set()
212-
for obs in observations:
213-
r = {}
214-
# Aggregate fields and values from each data object, which may
215-
# be from different surveys
216-
for k, v in _serialize_object(obs):
217-
r[k] = v
218-
219-
columns = columns.union(set(r.keys()))
220-
221-
r["cutout_url"] = obs.cutout_url(target.ra.deg, target.dec.deg)
222-
r["date"] = Time((obs.mjd_start + obs.mjd_stop) / 2, format="mjd").iso
223-
224-
rows.append(r)
225-
226-
if args.command == "moving":
227-
if rows == []:
228-
print("# none found")
229-
else:
230-
# make sure all rows have all columns
231-
for i in range(len(rows)):
232-
for col in columns:
233-
rows[i][col] = rows[i].get(col)
234-
tab = Table(rows=rows)
235-
236-
# add a column for the target
237-
tab["designation"] = args.desg
238-
239-
# re-order columns
240-
all_colnames = tab.colnames
241-
base_colnames = [
242-
"designation",
243-
"source",
244-
"date",
245-
"mjd",
246-
"ra",
247-
"dec",
248-
"dra",
249-
"ddec",
250-
"vmag",
251-
"rh",
252-
"drh",
253-
"delta",
254-
"phase",
255-
"elong",
256-
"sangle",
257-
"vangle",
258-
"true_anomaly",
259-
"unc_a",
260-
"unc_b",
261-
"unc_theta",
262-
"retrieved",
263-
"filter",
264-
"exposure",
265-
"mjd_start",
266-
"mjd_stop",
267-
"fov",
268-
"airmass",
269-
"seeing",
270-
"maglimit",
271-
"found_id",
272-
"object_id",
273-
"observation_id",
274-
"orbit_id",
275-
"query_id",
276-
"archive_url",
277-
"cutout_url",
278-
]
279-
colnames = base_colnames + list(set(all_colnames) - set(base_colnames))
280-
tab = tab[colnames]
281-
282-
if args.o:
283-
tab.write(args.o, format="ascii.fixed_width_two_line", overwrite=True)
284-
else:
285-
tab.pprint_all()
286-
287-
if args.command == "fixed":
288-
if rows == []:
289-
print("# none found")
290-
else:
291-
# make sure all rows have all columns
292-
for i in range(len(rows)):
293-
for col in columns:
294-
rows[i][col] = rows[i].get(col)
295-
tab = Table(rows=rows)
296-
297-
# add columns for the target in the user's format
298-
tab["ra"] = args.ra
299-
tab["dec"] = args.dec
300-
301-
# re-order columns
302-
all_colnames = tab.colnames
303-
base_colnames = [
304-
"source",
305-
"date",
306-
"ra",
307-
"dec",
308-
"filter",
309-
"exposure",
310-
"mjd_start",
311-
"mjd_stop",
312-
"fov",
313-
"airmass",
314-
"seeing",
315-
"maglimit",
316-
"observation_id",
317-
"archive_url",
318-
"cutout_url",
319-
]
320-
colnames = base_colnames + list(set(all_colnames) - set(base_colnames))
321-
tab = tab[colnames]
322-
323-
if args.o:
324-
tab.write(args.o, format="ascii.fixed_width_two_line", overwrite=True)
325-
else:
326-
tab.pprint_all()
327-
328-
329-
def _serialize_object(data_object):
330-
"""Iterator over field names to be serialized."""
331-
from .model import Observation
332-
333-
common_fields = dir(Observation) + ["archive_url"]
334-
335-
SKIP_COLUMNS = [
336-
"spatial_terms",
337-
"metadata",
338-
"cutout_url",
339-
"preview_url",
340-
"diff_cutout_url",
341-
"diff_preview_url",
342-
"set_fov",
343-
"registry",
344-
"test_edges",
345-
]
346-
347-
# Scan each data object for field names and their values
348-
for k in dir(data_object):
349-
# Skip unwanted field names.
350-
if k.startswith("_") or k in SKIP_COLUMNS:
351-
continue
352-
353-
field_name = k # default is to use the attribute name
354-
field_value = getattr(data_object, k)
355-
356-
# Are there are any survey-specific fields to add?
357-
if hasattr(data_object, "__field_prefix__"):
358-
if k not in common_fields:
359-
# This field_name is not in Observation, so must be survey-specific.
360-
field_name = f"{data_object.__field_prefix__}:{k}"
361-
362-
yield field_name, field_value

0 commit comments

Comments
 (0)