diff --git a/myst.yml b/myst.yml index d0d10aac..e6ec5572 100644 --- a/myst.yml +++ b/myst.yml @@ -13,6 +13,12 @@ project: url: https://mybinder.org settings: output_matplotlib_strings: remove + error_rules: + - rule: link-resolves + severity: ignore + keys: + # These link resolve in the browser but generates did not resolve and 403 for the build + - https://irsa.ipac.caltech.edu/cgi-bin/Gator/nph-scan?projshort=SPITZER extends: - toc.yml site: diff --git a/tox.ini b/tox.ini index 64730066..cb2f96b3 100644 --- a/tox.ini +++ b/tox.ini @@ -38,7 +38,7 @@ deps = devdeps: git+https://github.com/astropy/pyvo.git#egg=pyvo devdeps: git+https://github.com/astropy/astroquery.git#egg=astroquery -allowlist_externals = bash, jupyter-book +allowlist_externals = bash install_command = @@ -65,16 +65,17 @@ commands = buildhtml: bash -c 'cat ignore_tutorials/ignore_rendering_execution >> ignore_testing' # We only want to run CI in PRs for the notebooks we touched - !buildhtml: bash -c 'if [[ $GITHUB_EVENT_NAME == pull_request && -z "$(grep force_run:all_tests ${GITHUB_EVENT_PATH})" ]]; then git fetch origin main --depth=1; git diff origin/main --name-only tutorials | grep .md; else find tutorials -name "*.md"; fi | grep -vf ignore_testing/ignore_testing | xargs jupytext --to notebook ' + !buildhtml: bash -c 'if [[ $GITHUB_EVENT_NAME == pull_request && -z "$(grep force_run:all_tests ${GITHUB_EVENT_PATH})" ]]; then git fetch origin main --depth=1; git diff origin/main --name-only tutorials | grep .md; else find tutorials -name "*.md"; fi | grep -vf ignore_testing | xargs jupytext --to notebook ' + !buildhtml: bash -c "echo 'Notebooks ignored (not tested/executed) in this job:\n'; cat ignore_testing" !buildhtml: pytest --nbval-lax -vv --suppress-no-test-exit-code --durations=10 tutorials # sed -i needs a bit of hacky conditional on ubuntu to cover the case of an empty ignore buildhtml: bash -c "find tutorials -name '*md' | grep -f ignore_testing | sort | uniq > ignore_execute; if [ -s ignore_execute ]; then cat ignore_execute | xargs -n 1 sed -i -e 's|name: python3|name: python3\nskip_execution: true|g';fi" - # Status check is a workaround until https://github.com/jupyter-book/mystmd/issues/2113 is fixed - buildhtml: bash -c "jupyter-book build --execute --html 2>&1 | tee /tmp/mystbuild.log" - buildhtml: bash -c "if grep -q 'Traceback .most recent call last.' /tmp/mystbuild.log; then exit 1; fi" + buildhtml: bash -c "echo 'Notebooks ignored (not tested/executed) in this job:\n'; cat ignore_execute" + # Using srtict so we fail with trackbacks and debug mode to have a richer log + buildhtml: bash -c "npx myst build --execute --html --strict -d" pip_pre = predeps: true diff --git a/tutorials/cloud_access/cloud-access-intro.md b/tutorials/cloud_access/cloud-access-intro.md index c1e4dd37..2db3169e 100644 --- a/tutorials/cloud_access/cloud-access-intro.md +++ b/tutorials/cloud_access/cloud-access-intro.md @@ -11,6 +11,7 @@ kernelspec: name: python3 --- +(cloud-access-intro)= # IRSA cloud access introduction This is the introductory tutorial demonstrating basic python access to the IRSA-curated images and catalogs available in AWS S3 cloud storage buckets. diff --git a/tutorials/firefly/NEOWISE_light_curve_demo.md b/tutorials/firefly/NEOWISE_light_curve_demo.md index 1e8f3b0d..b60a84ff 100644 --- a/tutorials/firefly/NEOWISE_light_curve_demo.md +++ b/tutorials/firefly/NEOWISE_light_curve_demo.md @@ -37,9 +37,9 @@ For documentation on the firefly client visit https://caltech-ipac.github.io/fir ## Imports -- *firefly_client FireflyClient* - Python API to Firefly for displaying tables, images and charts -- *astropy.utils.data* for downloading the catalog data via TAP query -- *urllib.parse* for converting regular query string to url-safe string +- `firefly_client FireflyClient` - Python API to Firefly for displaying tables, images and charts +- `astropy.utils.data` for downloading the catalog data via TAP query +- `urllib.parse` for converting regular query string to url-safe string ```{code-cell} ipython3 # Uncomment the next line to install dependencies if needed. diff --git a/tutorials/firefly/SEDs_in_Firefly.md b/tutorials/firefly/SEDs_in_Firefly.md index 1d02cab6..104aeb93 100644 --- a/tutorials/firefly/SEDs_in_Firefly.md +++ b/tutorials/firefly/SEDs_in_Firefly.md @@ -84,7 +84,7 @@ import pyvo +++ -From [Figure 10](https://iopscience.iop.org/article/10.3847/1538-3881/ace32f#ajace32ff10) of the referenced paper, pick the source in the upper right corner (J052736.37+344940.6): +From [Figure 10](https://doi.org/10.3847/1538-3881/ace32f#ajace32ff10) of the referenced paper, pick the source in the upper right corner (J052736.37+344940.6): ```{code-cell} ipython3 target = SkyCoord(ra="05h27m36.37s", dec="+34d49m40.6s") diff --git a/tutorials/irsa-sia-examples/sia_2mass_allsky.md b/tutorials/irsa-sia-examples/sia_2mass_allsky.md index 90142065..43ded873 100644 --- a/tutorials/irsa-sia-examples/sia_2mass_allsky.md +++ b/tutorials/irsa-sia-examples/sia_2mass_allsky.md @@ -47,14 +47,14 @@ https://irsa.ipac.caltech.edu/docs/program_interface/api_images.html ## Imports -- *pyvo* for querying IRSA's 2MASS SIA service -- *astropy.coordinates* for defining coordinates -- *astropy.nddata* for creating an image cutout -- *astropy.wcs* for interpreting the World Coordinate System header keywords of a fits file -- *astropy.units* for attaching units to numbers passed to the SIA service -- *matplotlib.pyplot* for plotting -- *astropy.utils.data* for downloading files -- *astropy.io* to manipulate FITS files +- `pyvo` for querying IRSA's 2MASS SIA service +- `astropy.coordinates` for defining coordinates +- `astropy.nddata` for creating an image cutout +- `astropy.wcs` for interpreting the World Coordinate System header keywords of a fits file +- `astropy.units` for attaching units to numbers passed to the SIA service +- `matplotlib.pyplot` for plotting +- `astropy.utils.data` for downloading files +- `astropy.io` to manipulate FITS files ```{code-cell} ipython3 # Uncomment the next line to install dependencies if needed. diff --git a/tutorials/irsa-sia-examples/sia_allwise_atlas.md b/tutorials/irsa-sia-examples/sia_allwise_atlas.md index 2e3b6f07..c2106ad7 100644 --- a/tutorials/irsa-sia-examples/sia_allwise_atlas.md +++ b/tutorials/irsa-sia-examples/sia_allwise_atlas.md @@ -48,14 +48,14 @@ https://irsa.ipac.caltech.edu/docs/program_interface/api_images.html ## Imports -- *pyvo* for querying IRSA's AllWISE Atlas SIA service -- *astropy.coordinates* for defining coordinates -- *astropy.nddata* for creating an image cutout -- *astropy.wcs* for interpreting the World Coordinate System header keywords of a fits file -- *astropy.units* for attaching units to numbers passed to the SIA service -- *matplotlib.pyplot* for plotting -- *astropy.utils.data* for downloading files -- *astropy.io* to manipulate FITS files +- `pyvo` for querying IRSA's AllWISE Atlas SIA service +- `astropy.coordinates` for defining coordinates +- `astropy.nddata` for creating an image cutout +- `astropy.wcs` for interpreting the World Coordinate System header keywords of a fits file +- `astropy.units` for attaching units to numbers passed to the SIA service +- `matplotlib.pyplot` for plotting +- `astropy.utils.data` for downloading files +- `astropy.io` to manipulate FITS files ```{code-cell} ipython3 # Uncomment the next line to install dependencies if needed. diff --git a/tutorials/irsa-sia-examples/sia_cosmos.md b/tutorials/irsa-sia-examples/sia_cosmos.md index bb4a762e..6b8a5eb8 100644 --- a/tutorials/irsa-sia-examples/sia_cosmos.md +++ b/tutorials/irsa-sia-examples/sia_cosmos.md @@ -47,14 +47,14 @@ https://irsa.ipac.caltech.edu/docs/program_interface/api_images.html ## Imports -- *pyvo* for querying IRSA's COSMOS SIA service -- *astropy.coordinates* for defining coordinates -- *astropy.nddata* for creating an image cutout -- *astropy.wcs* for interpreting the World Coordinate System header keywords of a fits file -- *astropy.units* for attaching units to numbers passed to the SIA service -- *matplotlib.pyplot* for plotting -- *astropy.utils.data* for downloading files -- *astropy.io* to manipulate FITS files +- `pyvo` for querying IRSA's COSMOS SIA service +- `astropy.coordinates` for defining coordinates +- `astropy.nddata` for creating an image cutout +- `astropy.wcs` for interpreting the World Coordinate System header keywords of a fits file +- `astropy.units` for attaching units to numbers passed to the SIA service +- `matplotlib.pyplot` for plotting +- `astropy.utils.data` for downloading files +- `astropy.io` to manipulate FITS files ```{code-cell} ipython3 # Uncomment the next line to install dependencies if needed. diff --git a/tutorials/irsa-sia-examples/siav2_seip.md b/tutorials/irsa-sia-examples/siav2_seip.md index fbe09bbe..2e9df628 100644 --- a/tutorials/irsa-sia-examples/siav2_seip.md +++ b/tutorials/irsa-sia-examples/siav2_seip.md @@ -50,14 +50,14 @@ https://irsa.ipac.caltech.edu/docs/program_interface/api_images.html ## Imports -- *pyvo* for querying IRSA's SEIP SIA service -- *astropy.coordinates* for defining coordinates -- *astropy.nddata* for creating an image cutout -- *astropy.wcs* for interpreting the World Coordinate System header keywords of a fits file -- *astropy.units* for attaching units to numbers passed to the SIA service -- *matplotlib.pyplot* for plotting -- *astropy.utils.data* for downloading files -- *astropy.io* to manipulate FITS files +- `pyvo` for querying IRSA's SEIP SIA service +- `astropy.coordinates` for defining coordinates +- `astropy.nddata` for creating an image cutout +- `astropy.wcs` for interpreting the World Coordinate System header keywords of a fits file +- `astropy.units` for attaching units to numbers passed to the SIA service +- `matplotlib.pyplot` for plotting +- `astropy.utils.data` for downloading files +- `astropy.io` to manipulate FITS files ```{code-cell} ipython3 # Uncomment the next line to install dependencies if needed. diff --git a/tutorials/parquet-catalog-demos/irsa-hats-with-lsdb.md b/tutorials/parquet-catalog-demos/irsa-hats-with-lsdb.md index 7bf5a7f9..b8018d05 100644 --- a/tutorials/parquet-catalog-demos/irsa-hats-with-lsdb.md +++ b/tutorials/parquet-catalog-demos/irsa-hats-with-lsdb.md @@ -326,7 +326,7 @@ The rationale for selecting these columns is as follows: ztf_schema_df[ztf_schema_df.name.isin(ztf_columns)] ``` -For a quality cut, we apply the following filter on the number of good epochs (from [Coughlin et al. 2021](https://academic.oup.com/mnras/article/505/2/2954/6284767) section 2): +For a quality cut, we apply the following filter on the number of good epochs (from {cite}`doi.org/10.1093/mnras/stab1502`) section 2): ```{code-cell} ipython3 quality_filters = [ diff --git a/tutorials/parquet-catalog-demos/neowise-source-table-lightcurves.md b/tutorials/parquet-catalog-demos/neowise-source-table-lightcurves.md index d6dc85fd..0206778c 100644 --- a/tutorials/parquet-catalog-demos/neowise-source-table-lightcurves.md +++ b/tutorials/parquet-catalog-demos/neowise-source-table-lightcurves.md @@ -11,6 +11,7 @@ kernelspec: name: python3 --- +(neowise-lightcurves-parquet)= # Make Light Curves from NEOWISE Single-exposure Source Table +++ diff --git a/tutorials/parquet-catalog-demos/neowise-source-table-strategies.md b/tutorials/parquet-catalog-demos/neowise-source-table-strategies.md index c658c29d..c6da7964 100644 --- a/tutorials/parquet-catalog-demos/neowise-source-table-strategies.md +++ b/tutorials/parquet-catalog-demos/neowise-source-table-strategies.md @@ -110,9 +110,9 @@ A fully-worked example is shown in the light curve notebook linked below. ### 1.3 See also -- [IRSA Cloud Access Intro](https://irsa.ipac.caltech.edu/docs/notebooks/cloud-access-intro.html) -- [AllWISE Source Catalog Demo](https://irsa.ipac.caltech.edu/docs/notebooks/wise-allwise-catalog-demo.html) -- [Make Light Curves from NEOWISE Single-exposure Source Table](https://irsa.ipac.caltech.edu/docs/notebooks/neowise-source-table-lightcurves.html) +- [](#cloud-access-intro) +- [](#allwise-source-catalog-parquet) +- [](#neowise-lightcurves-parquet) +++ diff --git a/tutorials/parquet-catalog-demos/wise-allwise-catalog-demo.md b/tutorials/parquet-catalog-demos/wise-allwise-catalog-demo.md index c5e25998..b1f19d00 100644 --- a/tutorials/parquet-catalog-demos/wise-allwise-catalog-demo.md +++ b/tutorials/parquet-catalog-demos/wise-allwise-catalog-demo.md @@ -11,6 +11,7 @@ kernelspec: name: python3 --- +(allwise-source-catalog-parquet)= # Analyzing cloud-hosted AllWISE Source Catalog in Parquet format +++ @@ -29,7 +30,7 @@ kernelspec: ## Introduction -This notebook demonstrates access to the [HEALPix](https://ui.adsabs.harvard.edu/abs/2005ApJ...622..759G/abstract)-partitioned (order 5), [Apache Parquet](https://parquet.apache.org/) version of the [AllWISE Source Catalog](https://wise2.ipc.caltech.edu/docs/release/allwise/expsup/sec1_3.html#src_cat). +This notebook demonstrates access to the [HEALPix](https://ui.adsabs.harvard.edu/abs/2005ApJ...622..759G/abstract)-partitioned (order 5), [Apache Parquet](https://parquet.apache.org/) version of the [AllWISE Source Catalog](https://wise2.ipac.caltech.edu/docs/release/allwise/expsup/sec1_3.html#src_cat). The catalog is available through the [AWS Open Data](https://aws.amazon.com/opendata) program, as part of the [NASA Open-Source Science Initiative](https://science.nasa.gov/open-science-overview). Parquet is convenient for large astronomical catalogs in part because the storage format supports efficient database-style queries on the files themselves, without having to load the catalog into a database (or into memory) first. diff --git a/tutorials/spherex/spherex_intro.md b/tutorials/spherex/spherex_intro.md index 857e8dd1..a76d5760 100644 --- a/tutorials/spherex/spherex_intro.md +++ b/tutorials/spherex/spherex_intro.md @@ -36,7 +36,7 @@ SPHEREx is a NASA Astrophysics Medium Explorer mission that launched in March 20 The community will also mine SPHEREx data and combine it with synergistic data sets to address a variety of additional topics in astrophysics. -More information is available in the [SPHEREx Explanatory Supplement](https://irsa.ipac.caltech.edu/data/SPHEREx/docs/SPHEREx_Expsupp_QR_v1.0.pdf). +More information is available in the [SPHEREx Explanatory Supplement](https://irsa.ipac.caltech.edu/data/SPHEREx/docs/SPHEREx_Expsupp_QR.pdf). +++ @@ -330,7 +330,7 @@ Now let's take a look at some header keywords that provide information about how spectral_image_header['L2 N_*'] ``` -There are 14 flags in total. Typically, most of the pixels are identified as SOURCE pixels, which are pixels mapped to a known source. The remaining flags are described in Table 8 of the [SPHEREx Explanatory Supplement](https://irsa.ipac.caltech.edu/data/SPHEREx/docs/spherex_explanatory_supplement.pdf). +There are 14 flags in total. Typically, most of the pixels are identified as SOURCE pixels, which are pixels mapped to a known source. The remaining flags are described in Table 8 of the [SPHEREx Explanatory Supplement](https://irsa.ipac.caltech.edu/data/SPHEREx/docs/SPHEREx_Expsupp_QR.pdf). +++