Skip to content

Commit 799d6d4

Browse files
authored
Merge pull request #25 from synthesizer-project/qdev
switching over to synference-download, cleaning up files
2 parents 35b61bd + 20e6549 commit 799d6d4

31 files changed

+370
-4911
lines changed

.github/workflows/docs.yml

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -44,15 +44,16 @@ jobs:
4444
sudo apt install pandoc
4545
pip install "cosmos-synthesizer@git+https://github.com/synthesizer-project/synthesizer.git"
4646
pip install .[test,docs]
47-
pip install "dense_basis@git+https://github.com/kartheikiyer/dense_basis"
47+
pip install "dense_basis@git+https://github.com/kartheikiyer/dense_basis.git"
4848
pip install "ltu_ili@git+https://github.com/maho3/ltu-ili.git"
49+
pip install "cosmos-synthesizer@git+https://github.com/synthesizer-project/synthesizer.git#egg=synthesizer"
4950
pip install pytest-xdist # enable parallel pytest execution
5051
- name: Download test data
5152
run: |
5253
# Download test grid data
5354
mkdir -p data/libraries/
5455
synthesizer-download --test-grids --dust-grid
55-
synference-download --test
56+
python -c "from synference.utils import download_test_data; download_test_data()"
5657
- name: Sphinx Build
5758
run: |
5859
# Test sphinx build (runs all notebooks)

.github/workflows/python-app.yml

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,9 +31,11 @@ jobs:
3131
- name: Install dependencies
3232
run: |
3333
python -m pip install --upgrade pip
34-
WITH_OPENMP=1 pip install .
34+
pip install .
3535
pip install ruff pytest
3636
pip install "ltu_ili@git+https://github.com/maho3/ltu-ili.git"
37+
pip install "cosmos-synthesizer@git+https://github.com/synthesizer-project/synthesizer.git#egg=synthesizer"
38+
python -c "from synference.utils import download_test_data; download_test_data()"
3739
3840
- name: Test import
3941
run: |

docs/source/advanced_topics/simformer.ipynb

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -31,10 +31,10 @@
3131
"metadata": {},
3232
"outputs": [],
3333
"source": [
34-
"from synference import SBI_Fitter\n",
34+
"from synference import SBI_Fitter, test_data_dir\n",
3535
"\n",
3636
"fitter = SBI_Fitter.init_from_hdf5(\n",
37-
" model_name=\"test\", hdf5_path=\"../example_libraries/example_model_library.hdf5\"\n",
37+
" model_name=\"test\", hdf5_path=f\"{test_data_dir}/example_model_library.hdf5\"\n",
3838
")"
3939
]
4040
},

docs/source/gfx/SBI_SED.png

-1 MB
Binary file not shown.

docs/source/library_gen/complex_library_generation.ipynb

Lines changed: 23 additions & 167 deletions
Large diffs are not rendered by default.

docs/source/library_gen/synthesizer_crash_course.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@
2828
"\n",
2929
"Synthesizer relies on pre-computed SPS (Stellar Population Synthesis) model grids to generate synthetic observables. The main documentation for these grids can be found [here](https://synthesizer-project.github.io/synthesizer/emission_grids/grids.html). Pre-computed grids are available for download for several popular SPS models, including: BPASS, FSPS, BC03, and Maraston, and are stored in HDF5 format.\n",
3030
"\n",
31-
"Additionally, pre-computed grids have been generated for a variety of IMFs, and have been post-processed to include nebular emission using Cloudy. You should have a `SYNTHESIZER_GRID_DIR` environment variable pointing to the directory where you have stored these grids. \n",
31+
"Additionally, pre-computed grids have been generated for a variety of IMFs, and variants which have been post-processed to include nebular emission using Cloudy are also available. \n",
3232
"\n",
3333
"For the purposes of this crash course, we will use a test grid from BPASS v2.2.1, but the following will work with any of the available grids."
3434
]

docs/source/posterior_inference/catalogue_fitting.ipynb

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@
3232
"from synthesizer import get_grids_dir\n",
3333
"from unyt import Jy\n",
3434
"\n",
35-
"from synference import SBI_Fitter, load_unc_model_from_hdf5\n",
35+
"from synference import SBI_Fitter, load_unc_model_from_hdf5, test_data_dir\n",
3636
"\n",
3737
"print(get_grids_dir())\n",
3838
"\n",
@@ -68,7 +68,7 @@
6868
"source": [
6969
"from astropy.table import Table\n",
7070
"\n",
71-
"cat = Table.read(\"../example_models/cats/jades_spec_catalogue_subset.fits\")"
71+
"cat = Table.read(f\"{test_data_dir}/jades_spec_catalogue_subset.fits\")"
7272
]
7373
},
7474
{
@@ -84,15 +84,15 @@
8484
"metadata": {},
8585
"outputs": [],
8686
"source": [
87-
"library_path = \"../example_models/BPASS_DB_v4/grid_BPASS_Chab_DenseBasis_SFH_0.01_z_14_logN_2.7_Calzetti_v3_multinode.hdf5\" # noqa: E501\n",
87+
"library_path = (\n",
88+
" f\"{test_data_dir}/grid_BPASS_Chab_DenseBasis_SFH_0.01_z_14_logN_2.7_Calzetti_v3_multinode.hdf5\" # noqa: E501\n",
89+
")\n",
8890
"\n",
8991
"fitter = SBI_Fitter.load_saved_model(\n",
90-
" model_file=\"../example_models/BPASS_DB_v4\", library_path=library_path, device=\"cpu\"\n",
92+
" model_file=f\"{test_data_dir}\", library_path=library_path, device=\"cpu\"\n",
9193
")\n",
9294
"\n",
93-
"nm_path = (\n",
94-
" \"../example_models/BPASS_DB_v4/BPASS_DenseBasis_v4_final_nsf_0_params_empirical_noise_models.h5\"\n",
95-
")\n",
95+
"nm_path = f\"{test_data_dir}/BPASS_DenseBasis_v4_final_nsf_0_params_empirical_noise_models.h5\"\n",
9696
"noise_models = load_unc_model_from_hdf5(nm_path)\n",
9797
"\n",
9898
"fitter.feature_array_flags[\"empirical_noise_models\"] = noise_models"

docs/source/posterior_inference/intro.rst

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,5 @@ Posterior Inference
44
.. toctree::
55
:maxdepth: 2
66

7-
using_your_model
7+
catalogue_fitting
88
sed_recovery
9-
catalogue_fitting

docs/source/posterior_inference/sed_recovery.ipynb

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -61,17 +61,17 @@
6161
"metadata": {},
6262
"outputs": [],
6363
"source": [
64-
"from synference import SBI_Fitter, load_unc_model_from_hdf5\n",
64+
"from synference import SBI_Fitter, load_unc_model_from_hdf5, test_data_dir\n",
6565
"\n",
66-
"library_path = \"../example_models/BPASS_DB_v4/grid_BPASS_Chab_DenseBasis_SFH_0.01_z_14_logN_2.7_Calzetti_v3_multinode.hdf5\" # noqa: E501\n",
66+
"library_path = (\n",
67+
" f\"{test_data_dir}/grid_BPASS_Chab_DenseBasis_SFH_0.01_z_14_logN_2.7_Calzetti_v3_multinode.hdf5\" # noqa: E501\n",
68+
")\n",
6769
"\n",
6870
"fitter = SBI_Fitter.load_saved_model(\n",
69-
" model_file=\"../example_models/BPASS_DB_v4\", library_path=library_path, device=\"cpu\"\n",
71+
" model_file=f\"{test_data_dir}\", library_path=library_path, device=\"cpu\"\n",
7072
")\n",
7173
"\n",
72-
"nm_path = (\n",
73-
" \"../example_models/BPASS_DB_v4/BPASS_DenseBasis_v4_final_nsf_0_params_empirical_noise_models.h5\"\n",
74-
")\n",
74+
"nm_path = f\"{test_data_dir}/BPASS_DenseBasis_v4_final_nsf_0_params_empirical_noise_models.h5\"\n",
7575
"noise_models = load_unc_model_from_hdf5(nm_path)\n",
7676
"\n",
7777
"fitter.feature_array_flags[\"empirical_noise_models\"] = noise_models"

docs/source/posterior_inference/using_your_model.ipynb

Lines changed: 0 additions & 136 deletions
This file was deleted.

0 commit comments

Comments
 (0)