Skip to content
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 3 additions & 2 deletions .github/workflows/docs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -43,15 +43,16 @@ jobs:
pip install wheel
sudo apt install pandoc
pip install .[test,docs]
pip install "dense_basis@git+https://github.com/kartheikiyer/dense_basis"
pip install "dense_basis@git+https://github.com/kartheikiyer/dense_basis.git"
pip install "ltu_ili@git+https://github.com/maho3/ltu-ili.git"
pip install "synthesizer@git+https://github.com/synthesizer-project/synthesizer.git"
pip install pytest-xdist # enable parallel pytest execution
- name: Download test data
run: |
# Download test grid data
mkdir -p data/libraries/
synthesizer-download --test-grids --dust-grid
synference-download --test
python -c "from synference.utils import download_test_data; download_test_data()"
- name: Sphinx Build
run: |
# Test sphinx build (runs all notebooks)
Expand Down
4 changes: 3 additions & 1 deletion .github/workflows/python-app.yml
Original file line number Diff line number Diff line change
Expand Up @@ -31,9 +31,11 @@ jobs:
- name: Install dependencies
run: |
python -m pip install --upgrade pip
WITH_OPENMP=1 pip install .
pip install .
pip install ruff pytest
pip install "ltu_ili@git+https://github.com/maho3/ltu-ili.git"
pip install "synthesizer@git+https://github.com/synthesizer-project/synthesizer.git"
python -c "from synference.utils import download_test_data; download_test_data()"

- name: Test import
run: |
Expand Down
4 changes: 2 additions & 2 deletions docs/source/advanced_topics/simformer.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -31,10 +31,10 @@
"metadata": {},
"outputs": [],
"source": [
"from synference import SBI_Fitter\n",
"from synference import SBI_Fitter, test_data_dir\n",
"\n",
"fitter = SBI_Fitter.init_from_hdf5(\n",
" model_name=\"test\", hdf5_path=\"../example_libraries/example_model_library.hdf5\"\n",
" model_name=\"test\", hdf5_path=f\"{test_data_dir}/example_model_library.hdf5\"\n",
")"
]
},
Expand Down
Binary file removed docs/source/gfx/SBI_SED.png
Binary file not shown.
190 changes: 23 additions & 167 deletions docs/source/library_gen/complex_library_generation.ipynb

Large diffs are not rendered by default.

50 changes: 5 additions & 45 deletions docs/source/library_gen/synthesizer_crash_course.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
"\n",
"Synthesizer relies on pre-computed SPS (Stellar Population Synthesis) model grids to generate synthetic observables. The main documentation for these grids can be found [here](https://synthesizer-project.github.io/synthesizer/emission_grids/grids.html). Pre-computed grids are available for download for several popular SPS models, including: BPASS, FSPS, BC03, and Maraston, and are stored in HDF5 format.\n",
"\n",
"Additionally, pre-computed grids have been generated for a variety of IMFs, and have been post-processed to include nebular emission using Cloudy. You should have a `SYNTHESIZER_GRID_DIR` environment variable pointing to the directory where you have stored these grids. \n",
"Additionally, pre-computed grids have been generated for a variety of IMFs, and variants which have been post-processed to include nebular emission using Cloudy are also available. \n",
"\n",
"For the purposes of this crash course, we will use a test grid from BPASS v2.2.1, but the following will work with any of the available grids."
]
Expand All @@ -45,32 +45,10 @@
},
{
"cell_type": "code",
"execution_count": 4,
"execution_count": null,
"id": "3bd60ad0",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"<Popen: returncode: None args: ['synthesizer-download', '--test-grids', '--d...>"
]
},
"execution_count": 4,
"metadata": {},
"output_type": "execute_result"
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"100%|██████████| 201M/201M [00:07<00:00, 26.5MB/s] \n",
"100%|██████████| 19.6M/19.6M [00:00<00:00, 26.1MB/s]\n",
"100%|██████████| 19.6M/19.6M [00:00<00:00, 27.1MB/s]\n",
"100%|██████████| 140M/140M [00:04<00:00, 28.4MB/s] \n",
"100%|██████████| 57.7M/57.7M [00:02<00:00, 24.4MB/s]\n"
]
}
],
"outputs": [],
"source": [
"import subprocess\n",
"\n",
Expand All @@ -79,7 +57,7 @@
},
{
"cell_type": "code",
"execution_count": 5,
"execution_count": null,
"id": "95725972",
"metadata": {},
"outputs": [],
Expand Down Expand Up @@ -532,25 +510,7 @@
]
}
],
"metadata": {
"kernelspec": {
"display_name": "astro",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.9"
}
},
"metadata": {},
"nbformat": 4,
"nbformat_minor": 5
}
14 changes: 7 additions & 7 deletions docs/source/posterior_inference/catalogue_fitting.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@
"from synthesizer import get_grids_dir\n",
"from unyt import Jy\n",
"\n",
"from synference import SBI_Fitter, load_unc_model_from_hdf5\n",
"from synference import SBI_Fitter, load_unc_model_from_hdf5, test_data_dir\n",
"\n",
"print(get_grids_dir())\n",
"\n",
Expand Down Expand Up @@ -68,7 +68,7 @@
"source": [
"from astropy.table import Table\n",
"\n",
"cat = Table.read(\"../example_models/cats/jades_spec_catalogue_subset.fits\")"
"cat = Table.read(f\"{test_data_dir}/jades_spec_catalogue_subset.fits\")"
]
},
{
Expand All @@ -84,15 +84,15 @@
"metadata": {},
"outputs": [],
"source": [
"library_path = \"../example_models/BPASS_DB_v4/grid_BPASS_Chab_DenseBasis_SFH_0.01_z_14_logN_2.7_Calzetti_v3_multinode.hdf5\" # noqa: E501\n",
"library_path = (\n",
" f\"{test_data_dir}/grid_BPASS_Chab_DenseBasis_SFH_0.01_z_14_logN_2.7_Calzetti_v3_multinode.hdf5\" # noqa: E501\n",
")\n",
"\n",
"fitter = SBI_Fitter.load_saved_model(\n",
" model_file=\"../example_models/BPASS_DB_v4\", library_path=library_path, device=\"cpu\"\n",
" model_file=f\"{test_data_dir}\", library_path=library_path, device=\"cpu\"\n",
")\n",
"\n",
"nm_path = (\n",
" \"../example_models/BPASS_DB_v4/BPASS_DenseBasis_v4_final_nsf_0_params_empirical_noise_models.h5\"\n",
")\n",
"nm_path = f\"{test_data_dir}/BPASS_DenseBasis_v4_final_nsf_0_params_empirical_noise_models.h5\"\n",
"noise_models = load_unc_model_from_hdf5(nm_path)\n",
"\n",
"fitter.feature_array_flags[\"empirical_noise_models\"] = noise_models"
Expand Down
3 changes: 1 addition & 2 deletions docs/source/posterior_inference/intro.rst
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,5 @@ Posterior Inference
.. toctree::
:maxdepth: 2

using_your_model
catalogue_fitting
sed_recovery
catalogue_fitting
12 changes: 6 additions & 6 deletions docs/source/posterior_inference/sed_recovery.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -61,17 +61,17 @@
"metadata": {},
"outputs": [],
"source": [
"from synference import SBI_Fitter, load_unc_model_from_hdf5\n",
"from synference import SBI_Fitter, load_unc_model_from_hdf5, test_data_dir\n",
"\n",
"library_path = \"../example_models/BPASS_DB_v4/grid_BPASS_Chab_DenseBasis_SFH_0.01_z_14_logN_2.7_Calzetti_v3_multinode.hdf5\" # noqa: E501\n",
"library_path = (\n",
" f\"{test_data_dir}/grid_BPASS_Chab_DenseBasis_SFH_0.01_z_14_logN_2.7_Calzetti_v3_multinode.hdf5\" # noqa: E501\n",
")\n",
"\n",
"fitter = SBI_Fitter.load_saved_model(\n",
" model_file=\"../example_models/BPASS_DB_v4\", library_path=library_path, device=\"cpu\"\n",
" model_file=f\"{test_data_dir}\", library_path=library_path, device=\"cpu\"\n",
")\n",
"\n",
"nm_path = (\n",
" \"../example_models/BPASS_DB_v4/BPASS_DenseBasis_v4_final_nsf_0_params_empirical_noise_models.h5\"\n",
")\n",
"nm_path = f\"{test_data_dir}/BPASS_DenseBasis_v4_final_nsf_0_params_empirical_noise_models.h5\"\n",
"noise_models = load_unc_model_from_hdf5(nm_path)\n",
"\n",
"fitter.feature_array_flags[\"empirical_noise_models\"] = noise_models"
Expand Down
136 changes: 0 additions & 136 deletions docs/source/posterior_inference/using_your_model.ipynb

This file was deleted.

4 changes: 2 additions & 2 deletions docs/source/sbi_train/basic_sbi_model.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
"metadata": {},
"outputs": [],
"source": [
"from synference import SBI_Fitter"
"from synference import SBI_Fitter, test_data_dir"
]
},
{
Expand All @@ -48,7 +48,7 @@
"outputs": [],
"source": [
"fitter = SBI_Fitter.init_from_hdf5(\n",
" model_name=\"test\", hdf5_path=\"../example_libraries/example_model_library.hdf5\"\n",
" model_name=\"test\", hdf5_path=f\"{test_data_dir}/example_model_library.hdf5\"\n",
")"
]
},
Expand Down
4 changes: 2 additions & 2 deletions docs/source/sbi_train/complex_sbi_model.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,10 @@
"metadata": {},
"outputs": [],
"source": [
"from synference import SBI_Fitter\n",
"from synference import SBI_Fitter, test_data_dir\n",
"\n",
"fitter = SBI_Fitter.init_from_hdf5(\n",
" model_name=\"test\", hdf5_path=\"../example_libraries/example_model_library.hdf5\"\n",
" model_name=\"test\", hdf5_path=f\"{test_data_dir}/example_model_library.hdf5\"\n",
")"
]
},
Expand Down
4 changes: 2 additions & 2 deletions docs/source/sbi_train/feature_array.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -27,10 +27,10 @@
"metadata": {},
"outputs": [],
"source": [
"from synference import SBI_Fitter\n",
"from synference import SBI_Fitter, test_data_dir\n",
"\n",
"fitter = SBI_Fitter.init_from_hdf5(\n",
" model_name=\"test\", hdf5_path=\"../example_libraries/example_model_library.hdf5\"\n",
" model_name=\"test\", hdf5_path=f\"{test_data_dir}/example_model_library.hdf5\"\n",
")"
]
},
Expand Down
4 changes: 2 additions & 2 deletions docs/source/sbi_train/model_optimization.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -27,10 +27,10 @@
"metadata": {},
"outputs": [],
"source": [
"from synference import SBI_Fitter\n",
"from synference import SBI_Fitter, test_data_dir\n",
"\n",
"fitter = SBI_Fitter.init_from_hdf5(\n",
" model_name=\"test\", hdf5_path=\"../example_libraries/example_model_library.hdf5\"\n",
" model_name=\"test\", hdf5_path=f\"{test_data_dir}/example_model_library.hdf5\"\n",
")\n",
"fitter.create_feature_array(verbose=False);"
]
Expand Down
Loading
Loading