diff --git a/docs/user-guide/estia/index.md b/docs/user-guide/estia/index.md index 8e102096..cbc2acac 100644 --- a/docs/user-guide/estia/index.md +++ b/docs/user-guide/estia/index.md @@ -41,4 +41,5 @@ hidden: estia-mcstas-reduction estia-advanced-mcstas-reduction +simulated-spin-flip-sample ``` diff --git a/docs/user-guide/estia/simulated-spin-flip-sample.ipynb b/docs/user-guide/estia/simulated-spin-flip-sample.ipynb new file mode 100644 index 00000000..cc6ecf95 --- /dev/null +++ b/docs/user-guide/estia/simulated-spin-flip-sample.ipynb @@ -0,0 +1,419 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "0", + "metadata": {}, + "source": [ + "# Reduction of spin flip data from McStas simulation\n", + "\n", + "This notebook contains an example of polarized reflectometry data reduction.\n", + "The dataset comes from a McStas simulation of the ESTIA instrument.\n", + "\n", + "\n", + "Four samples were simulated:\n", + "\n", + "* `supermirror` - a perfect nonmagnetic supermirror\n", + "* `magnetic_supermirror` - a magnetic supermirror with high reflectivity for one spin state and low reflectivity for the other\n", + "* `magnetic_supermirror_2` - a magnetic supermirror with slightly different reflectivity curve than `magnetic_supermirror`\n", + "* `spin_flip_sample` - a magnetic supermirror that also causes some incident neutrons to spin flip with 10% probability\n", + "\n", + "Each sample was measured for four different flipper settings `offoff`, `offon`, `onoff`, `onon`, corresponding to the polarizer and analyzer flipper setting.\n", + "\n", + "In the data reduction procedure `supermirror` and `magnetic_supermirror_2` will be used to calibrate the parameters of the instrument." + ] + }, + { + "cell_type": "markdown", + "id": "1", + "metadata": {}, + "source": [ + "We start by importing packages and creating the workflow:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2", + "metadata": {}, + "outputs": [], + "source": [ + "#%matplotlib ipympl\n", + "import scipp as sc\n", + "\n", + "from ess.reflectometry.types import *\n", + "from ess.reflectometry.supermirror import CriticalEdge\n", + "\n", + "from ess.estia.types import *\n", + "from ess.estia import EstiaMcStasWorkflow\n", + "from ess.estia.data import (\n", + " estia_mcstas_spin_flip_example,\n", + " estia_mcstas_spin_flip_example_groundtruth,\n", + " estia_mcstas_spin_flip_example_download_all_to_cache\n", + ")\n", + "from ess.reflectometry.figures import wavelength_z_figure\n", + "\n", + "from ess.polarization import ( # noqa: F401\n", + " Up, Down, ReducedSampleDataBySpinChannel, SupermirrorEfficiencyFunction, Polarizer, Analyzer, PolarizationAnalysisWorkflow,\n", + " SupermirrorWorkflow, SecondDegreePolynomialEfficiency, EfficiencyLookupTable\n", + ")\n", + "from ess.polarization.types import TotalPolarizationCorrectedData\n", + "\n", + "wf = EstiaMcStasWorkflow()\n", + "wf[YIndexLimits] = sc.scalar(35), sc.scalar(64)\n", + "wf[ZIndexLimits] = sc.scalar(600), sc.scalar(930)\n", + "wf[BeamDivergenceLimits] = sc.scalar(-0.75, unit='deg'), sc.scalar(0.75, unit='deg')\n", + "wf[WavelengthBins] = sc.geomspace('wavelength', 4., 12, 700, unit='angstrom')\n", + "wf[QBins] = sc.linspace('Q', 0.001, 0.1, 200, unit='1/angstrom')\n", + "\n", + "# Reference sample is perfect supermirror with reflectivity = 1 everywhere\n", + "wf[CriticalEdge] = sc.scalar(float('inf'), unit='1/angstrom')\n", + "\n", + "# There is no proton current data in the McStas files, here we just add some fake proton current\n", + "# data to make the workflow run.\n", + "wf[ProtonCurrent[SampleRun]] = sc.DataArray(\n", + " sc.array(dims=('time',), values=[]),\n", + " coords={'time': sc.array(dims=('time',), values=[], unit='s')})\n", + "wf[ProtonCurrent[ReferenceRun]] = sc.DataArray(\n", + " sc.array(dims=('time',), values=[]),\n", + " coords={'time': sc.array(dims=('time',), values=[], unit='s')})" + ] + }, + { + "cell_type": "markdown", + "id": "3", + "metadata": {}, + "source": [ + "Download the data: (might take ~2 minutes depending on your internet connection)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4", + "metadata": {}, + "outputs": [], + "source": [ + "%%time\n", + "estia_mcstas_spin_flip_example_download_all_to_cache()" + ] + }, + { + "cell_type": "markdown", + "id": "5", + "metadata": {}, + "source": [ + "## Reducing the data\n", + "\n", + "First each dataset is loaded and reduced separately.\n", + "The datasets are reduced \"as references\" or \"as samples\" depending on how they are supposed to be used.\n", + "`supermirror` and `magnetic_supermirror_2` are used as references to calibrate the instrument, and `spin_flip_sample` and `magnetic_supermirror` are reduced as samples." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6", + "metadata": {}, + "outputs": [], + "source": [ + "references = {}\n", + "for sample in (\n", + " 'supermirror',\n", + " 'magnetic_supermirror_2',\n", + "):\n", + " references[sample] = []\n", + " for flipper_setting in ('offoff', 'offon', 'onoff', 'onon'):\n", + " w = wf.copy()\n", + " w[RawDetector[ReferenceRun]] = sc.io.load_hdf5(estia_mcstas_spin_flip_example(sample, flipper_setting))\n", + " references[sample].append(w.compute(ReducedReference))\n", + "\n", + " # We need to unalign all coords of the references to use\n", + " # them in the calibration procedure\n", + " for c in references[sample][-1].coords:\n", + " references[sample][-1].coords.set_aligned(c, False)\n", + "\n", + "\n", + "samples = {}\n", + "for sample in (\n", + " 'spin_flip_sample',\n", + " 'magnetic_supermirror'\n", + "):\n", + " samples[sample] = []\n", + " for flipper_setting in ('offoff', 'offon', 'onoff', 'onon'):\n", + " w = wf.copy()\n", + " w[RawDetector[SampleRun]] = sc.io.load_hdf5(estia_mcstas_spin_flip_example(sample, flipper_setting))\n", + " samples[sample].append(w.compute(ReducibleData[SampleRun]))\n", + "\n", + " # We need to unalign all coords\n", + " for c in samples[sample][-1].coords:\n", + " samples[sample][-1].coords.set_aligned(c, False)" + ] + }, + { + "cell_type": "markdown", + "id": "7", + "metadata": {}, + "source": [ + "Here we load the ground truth reflectivity curves for the `up` respectively `down` spin state.\n", + "Those will be used to compare to the computed reflectivity curves." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8", + "metadata": {}, + "outputs": [], + "source": [ + "Rdown = sc.io.load_hdf5(estia_mcstas_spin_flip_example_groundtruth('down'))\n", + "Rup = sc.io.load_hdf5(estia_mcstas_spin_flip_example_groundtruth('up'))" + ] + }, + { + "cell_type": "markdown", + "id": "9", + "metadata": {}, + "source": [ + "To calibrate the workflow we estimate the polarization efficiency of the polarizer and the analyzer respectively.\n", + "\n", + "The calibration is performed on the wavelength-dependent intensity obtained by summing over a slice of the detector.\n", + "We don't sum over the entire detector because the signal is best in the center region.\n", + "In the lower part of the detector the reflectivity of the magnetic reference increases quickly to be the same as the reflectivity of the non magnetic supermirror reference.\n", + "In that region we cannot distinguish between the spin states, so doing the calibration there is useless.\n", + "In the top part of the detector the signal is lower and there is more noise.\n", + "We don't want to make the region of the detecor we consider too small because then we will have too little signal.\n", + "\n", + "The trade of is that we only consider the region of the detector highlighted in the below figure:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "10", + "metadata": {}, + "outputs": [], + "source": [ + "wavelength_z_figure(references['magnetic_supermirror_2'][0].assign_masks(\n", + " bad_region=sc.where(\n", + " (references['magnetic_supermirror_2'][0].coords['z_index'] < sc.scalar(21 * 32)) |\n", + " (references['magnetic_supermirror_2'][0].coords['z_index'] >= sc.scalar(25 * 32)),\n", + " sc.scalar(True),\n", + " sc.scalar(False)\n", + " )\n", + "))" + ] + }, + { + "cell_type": "markdown", + "id": "11", + "metadata": {}, + "source": [ + "The intensity is accumulated over each `blade` of the detector, corresponding to a relatively thin interval of scattering angle.\n", + "In such a thin interval of scattering angle the reflectivity of the magnetic supermirror is roughly a function of wavelength, and this is what we need for the calibration to work well.\n", + "\n", + "From the procedure described above we obtain separate calibration results for each `blade` of the detector.\n", + "The separate calibration values are combined using a weighted average where each calibration value is weighted by the inverse of its variance." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "12", + "metadata": {}, + "outputs": [], + "source": [ + "from ess.estia.calibration import PolarizationCalibrationParameters\n", + "\n", + "\n", + "calibration = PolarizationCalibrationParameters.from_reference_measurements(\n", + " [r['blade', 21:25].sum('wire').rebin(wavelength=sc.linspace('wavelength', 4, 10.7, 50, unit='angstrom')) for r in references['supermirror']],\n", + " [r['blade', 21:25].sum('wire').rebin(wavelength=sc.linspace('wavelength', 4, 10.7, 50, unit='angstrom')) for r in references['magnetic_supermirror_2']],\n", + ")\n", + "\n", + "\n", + "def weighted_mean(p, dim):\n", + " return ((p / sc.variances(p)).nanmean(dim) / ((1 / sc.variances(p)).nanmean(dim)))\n", + "\n", + "\n", + "# Assuming the flipper efficiency is wavelength independent (as is the case in this simulation) we can ignore the other parameters\n", + "polarizer_efficiency = weighted_mean(calibration.Pp, 'blade')\n", + "analyzer_efficiency = weighted_mean(calibration.Ap, 'blade')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "13", + "metadata": {}, + "outputs": [], + "source": [ + "polarizer_efficiency.plot(title='Polarizer efficiency') + analyzer_efficiency.plot(title='Analyzer efficiency')" + ] + }, + { + "cell_type": "markdown", + "id": "14", + "metadata": {}, + "source": [ + "Finally the calibration curves are used to fit quadratic polynomials that approximate the polarization efficiency as functions of wavelength." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "15", + "metadata": {}, + "outputs": [], + "source": [ + "def efficiency_model(wavelength, a, b, c):\n", + " '''The polarization efficiency of the analyzer and the polarizer\n", + " is modeled as a quadratic polynomial'''\n", + " return a * wavelength**2 + b * wavelength + c\n", + "\n", + "\n", + "def fit_model_to_efficiency(efficiency):\n", + " da = efficiency.copy()\n", + " da.coords['wavelength'] = sc.midpoints(da.coords['wavelength'])\n", + " par, _ = sc.curve_fit(\n", + " ['wavelength'],\n", + " efficiency_model,\n", + " da,\n", + " p0={\n", + " 'a': sc.scalar(1., unit='1/angstrom**2'),\n", + " 'b': sc.scalar(1., unit='1/angstrom'),\n", + " 'c': sc.scalar(1., unit='dimensionless')\n", + " }\n", + " )\n", + " return {k: sc.values(p.data) for k, p in par.items()}\n", + "\n", + "\n", + "sc.plot({\n", + " 'fit': sc.DataArray(\n", + " efficiency_model(\n", + " polarizer_efficiency.coords['wavelength'],\n", + " **fit_model_to_efficiency(polarizer_efficiency)\n", + " ),\n", + " coords=polarizer_efficiency.coords\n", + " ),\n", + " 'efficiency': polarizer_efficiency\n", + "}, title='polarizer') + sc.plot({\n", + " 'fit': sc.DataArray(\n", + " efficiency_model(\n", + " analyzer_efficiency.coords['wavelength'],\n", + " **fit_model_to_efficiency(analyzer_efficiency)\n", + " ),\n", + " coords=analyzer_efficiency.coords\n", + " ),\n", + " 'efficiency': analyzer_efficiency\n", + "}, title='analyzer')" + ] + }, + { + "cell_type": "markdown", + "id": "16", + "metadata": {}, + "source": [ + "In the polarization correction workflow we can use either the obtained calibration values directly (as a lookup table in wavelength) or we can use the polynomial curves to compute polarization efficiencies as functions of wavelength." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "17", + "metadata": {}, + "outputs": [], + "source": [ + "\n", + "pwf = PolarizationAnalysisWorkflow(polarizer_workflow=SupermirrorWorkflow(), analyzer_workflow=SupermirrorWorkflow())\n", + "\n", + "# To do the calibration using a lookup table - uncomment these lines\n", + "#pwf[SupermirrorEfficiencyFunction[Polarizer]] = EfficiencyLookupTable(polarizer_efficiency)\n", + "#pwf[SupermirrorEfficiencyFunction[Analyzer]] = EfficiencyLookupTable(analyzer_efficiency)\n", + "\n", + "# To do the calibration using the polynomial approximation - uncomment these lines\n", + "pwf[SupermirrorEfficiencyFunction[Polarizer]] = SecondDegreePolynomialEfficiency(**fit_model_to_efficiency(polarizer_efficiency))\n", + "pwf[SupermirrorEfficiencyFunction[Analyzer]] = SecondDegreePolynomialEfficiency(**fit_model_to_efficiency(analyzer_efficiency))\n", + "\n", + "\n", + "for i, pols in enumerate((Down, Up)):\n", + " for j, anas in enumerate((Down, Up)):\n", + " sam = references['supermirror'][2*i + j]\n", + " pwf[ReducedSampleDataBySpinChannel[pols, anas]] = sam.assign_coords(wavelength=sc.midpoints(sam.coords['wavelength']))\n", + "\n", + "\n", + "res = pwf.compute(TotalPolarizationCorrectedData)\n", + "I0 = (res.upup + res.downdown) / 2\n", + "mask = sc.isnan(I0).sum(('blade', 'wire')) > sc.scalar(0, unit=None)\n", + "wf[ReducedReference] = I0.assign_masks(nopolcal=mask.data).assign_coords(wavelength=wf.compute(WavelengthBins))\n", + "\n", + "\n", + "sample_name = 'spin_flip_sample'\n", + "\n", + "for i, pols in enumerate((Down, Up)):\n", + " for j, anas in enumerate((Down, Up)):\n", + " w = wf.copy()\n", + " w[ReducibleData[SampleRun]] = samples[sample_name][2*i + j]\n", + " sam = w.compute(ReflectivityOverQ).bin(wavelength=wf.compute(WavelengthBins)).assign_masks(nopolcal=mask.data)\n", + " pwf[ReducedSampleDataBySpinChannel[pols, anas]] = sam.assign_coords(wavelength=sc.midpoints(sam.coords['wavelength']))\n", + "\n", + "\n", + "spin_flip_sample_reflectivity = pwf.compute(TotalPolarizationCorrectedData)\n", + "\n", + "\n", + "sample_name = 'magnetic_supermirror'\n", + "\n", + "for i, pols in enumerate((Down, Up)):\n", + " for j, anas in enumerate((Down, Up)):\n", + " w = wf.copy()\n", + " w[ReducibleData[SampleRun]] = samples[sample_name][2*i + j]\n", + " sam = w.compute(ReflectivityOverQ).bin(wavelength=wf.compute(WavelengthBins)).assign_masks(nopolcal=mask.data)\n", + " pwf[ReducedSampleDataBySpinChannel[pols, anas]] = sam.assign_coords(wavelength=sc.midpoints(sam.coords['wavelength']))\n", + "\n", + "\n", + "magnetic_supermirror_reflectivity = pwf.compute(TotalPolarizationCorrectedData)\n", + "\n", + "\n", + "(sc.plot(\n", + " {'Rdd': spin_flip_sample_reflectivity.downdown.sum(('wavelength',)),\n", + " 'Ruu': spin_flip_sample_reflectivity.upup.sum(('wavelength',)),\n", + " 'Rdu': spin_flip_sample_reflectivity.downup.sum(('wavelength',)),\n", + " 'Rud': spin_flip_sample_reflectivity.updown.sum(('wavelength',)),\n", + " 'True Rdd': Rdown * 0.9,\n", + " 'True Ruu': Rup * 0.9,\n", + " 'True Rdu': Rdown * 0.1,\n", + " }, norm='log', title='Reflectivity of spin flip sample', vmin=1e-4) +\n", + "sc.plot(\n", + " {'Rdd': magnetic_supermirror_reflectivity.downdown.sum(('wavelength',)),\n", + " 'Ruu': magnetic_supermirror_reflectivity.upup.sum(('wavelength',)),\n", + " 'Rdu': magnetic_supermirror_reflectivity.downup.sum(('wavelength',)),\n", + " 'Rud': magnetic_supermirror_reflectivity.updown.sum(('wavelength',)),\n", + " 'True Rdd': Rdown,\n", + " 'True Ru': Rup,\n", + " 'True Rdu': Rdown * 0.0,\n", + " }, norm='log', title='Reflectivity of magnetic supermirror sample', vmin=1e-4))" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.13" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/pyproject.toml b/pyproject.toml index 3bc429a2..2ad4887b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -40,6 +40,8 @@ dependencies = [ "scippneutron>=24.10.0", "scippnexus>=24.9.1", "essreduce>=25.11.0", + "esspolarization>=25.7.0", + "essreduce>=25.4.0", "pandas>=2.1.2", ] diff --git a/requirements/base.in b/requirements/base.in index d83b9ef1..335a962f 100644 --- a/requirements/base.in +++ b/requirements/base.in @@ -12,4 +12,6 @@ scipp>=24.09.1 scippneutron>=24.10.0 scippnexus>=24.9.1 essreduce>=25.11.0 +esspolarization>=25.7.0 +essreduce>=25.4.0 pandas>=2.1.2 diff --git a/requirements/base.txt b/requirements/base.txt index 6c2822f5..04b28415 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -1,4 +1,4 @@ -# SHA1:ac5d5ed5d7ed7425eb3b92f0cfa128fe6db146b4 +# SHA1:7d24e61b002964358637133b46eac8bab3600eae # # This file was generated by pip-compile-multi. # To update, run: @@ -17,20 +17,28 @@ cyclebane==24.10.0 # via sciline cycler==0.12.1 # via matplotlib -dask==2025.10.0 - # via -r base.in +dask==2025.11.0 + # via + # -r base.in + # esspolarization dnspython==2.8.0 # via email-validator email-validator==2.3.0 # via scippneutron -essreduce==25.11.0 +esspolarization==25.10.0 # via -r base.in +essreduce==25.11.1 + # via + # -r base.in + # esspolarization fonttools==4.60.1 # via matplotlib fsspec==2025.10.0 # via dask graphviz==0.21 - # via -r base.in + # via + # -r base.in + # esspolarization h5py==3.15.1 # via # scippneutron @@ -81,9 +89,9 @@ plopp==25.10.0 # via # -r base.in # scippneutron -pydantic==2.12.3 +pydantic==2.12.4 # via scippneutron -pydantic-core==2.41.4 +pydantic-core==2.41.5 # via pydantic pyparsing==3.2.5 # via matplotlib @@ -93,7 +101,6 @@ python-dateutil==2.9.0.post0 # matplotlib # pandas # scippneutron - # scippnexus pytz==2025.2 # via pandas pyyaml==6.0.3 @@ -103,10 +110,12 @@ pyyaml==6.0.3 sciline==25.11.1 # via # -r base.in + # esspolarization # essreduce -scipp==25.8.0 +scipp==25.11.0 # via # -r base.in + # esspolarization # essreduce # scippneutron # scippnexus @@ -114,13 +123,14 @@ scippneutron==25.7.0 # via # -r base.in # essreduce -scippnexus==25.6.0 +scippnexus==25.11.0 # via # -r base.in # essreduce # scippneutron scipy==1.16.3 # via + # esspolarization # scippneutron # scippnexus six==1.17.0 diff --git a/requirements/basetest.txt b/requirements/basetest.txt index 375775b1..30154e09 100644 --- a/requirements/basetest.txt +++ b/requirements/basetest.txt @@ -5,7 +5,7 @@ # # requirements upgrade # -certifi==2025.10.5 +certifi==2025.11.12 # via requests charset-normalizer==3.4.4 # via requests @@ -25,7 +25,7 @@ pooch==1.8.2 # via -r basetest.in pygments==2.19.2 # via pytest -pytest==8.4.2 +pytest==9.0.0 # via -r basetest.in requests==2.32.5 # via pooch diff --git a/requirements/ci.txt b/requirements/ci.txt index 5bc02f40..116c2bf8 100644 --- a/requirements/ci.txt +++ b/requirements/ci.txt @@ -7,7 +7,7 @@ # cachetools==6.2.1 # via tox -certifi==2025.10.5 +certifi==2025.11.12 # via requests chardet==5.2.0 # via tox diff --git a/requirements/dev.in b/requirements/dev.in index 53ddf47e..0478405a 100644 --- a/requirements/dev.in +++ b/requirements/dev.in @@ -6,6 +6,6 @@ -r test.in -r wheels.in copier -jupyterlab +jupyterlab>=4.4.3 pip-compile-multi pre-commit diff --git a/requirements/dev.txt b/requirements/dev.txt index 4afeb88a..ac827eb6 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -1,4 +1,4 @@ -# SHA1:efd19a3a98c69fc3d6d6233ed855de7e4a208f74 +# SHA1:d4ffe53db30990b73c1af821d46399aafea47d77 # # This file was generated by pip-compile-multi. # To update, run: diff --git a/requirements/docs.txt b/requirements/docs.txt index 7880482d..ed083eff 100644 --- a/requirements/docs.txt +++ b/requirements/docs.txt @@ -30,7 +30,7 @@ bleach[css]==6.3.0 # via nbconvert bqplot==0.12.45 # via ipydatagrid -certifi==2025.10.5 +certifi==2025.11.12 # via requests charset-normalizer==3.4.4 # via requests @@ -66,7 +66,7 @@ ipykernel==7.1.0 # via -r docs.in ipympl==0.9.8 # via -r docs.in -ipython==9.6.0 +ipython==9.7.0 # via # -r docs.in # ipykernel @@ -173,7 +173,7 @@ pybtex==0.25.1 # sphinxcontrib-bibtex pybtex-docutils==1.0.3 # via sphinxcontrib-bibtex -pydantic-settings==2.11.0 +pydantic-settings==2.12.0 # via autodoc-pydantic pydata-sphinx-theme==0.16.1 # via -r docs.in diff --git a/requirements/nightly.in b/requirements/nightly.in index a598c800..5527f553 100644 --- a/requirements/nightly.in +++ b/requirements/nightly.in @@ -6,6 +6,8 @@ python-dateutil graphviz orsopy>=1.2 essreduce>=25.11.0 +esspolarization>=25.7.0 +essreduce>=25.4.0 pandas>=2.1.2 pytest>=7.0 pooch>=1.5 diff --git a/requirements/nightly.txt b/requirements/nightly.txt index b1a1dc31..6f62e2ea 100644 --- a/requirements/nightly.txt +++ b/requirements/nightly.txt @@ -1,4 +1,4 @@ -# SHA1:1e183c3e9d303722f2579385b8d7861f9200b810 +# SHA1:ccf027ae6c53853baac5291a4a5faf53045d0292 # # This file was generated by pip-compile-multi. # To update, run: @@ -10,7 +10,7 @@ annotated-types==0.7.0 # via pydantic -certifi==2025.10.5 +certifi==2025.11.12 # via requests charset-normalizer==3.4.4 # via requests @@ -24,20 +24,28 @@ cyclebane==24.10.0 # via sciline cycler==0.12.1 # via matplotlib -dask==2025.10.0 - # via -r nightly.in +dask==2025.11.0 + # via + # -r nightly.in + # esspolarization dnspython==2.8.0 # via email-validator email-validator==2.3.0 # via scippneutron -essreduce==25.11.0 +esspolarization==25.10.0 # via -r nightly.in +essreduce==25.11.1 + # via + # -r nightly.in + # esspolarization fonttools==4.60.1 # via matplotlib fsspec==2025.10.0 # via dask graphviz==0.21 - # via -r nightly.in + # via + # -r nightly.in + # esspolarization h5py==3.15.1 # via # scippneutron @@ -64,7 +72,7 @@ matplotlib==3.10.7 # plopp mpltoolbox==25.10.0 # via scippneutron -networkx==3.5 +networkx==3.6rc0 # via cyclebane numpy==2.3.4 # via @@ -100,15 +108,15 @@ pluggy==1.6.0 # via pytest pooch==1.8.2 # via -r nightly.in -pydantic==2.12.3 +pydantic==2.12.4 # via scippneutron -pydantic-core==2.41.4 +pydantic-core==2.41.5 # via pydantic pygments==2.19.2 # via pytest pyparsing==3.3.0a1 # via matplotlib -pytest==8.4.2 +pytest==9.0.0 # via -r nightly.in python-dateutil==2.9.0.post0 # via @@ -127,10 +135,12 @@ requests==2.32.5 sciline @ git+https://github.com/scipp/sciline@main # via # -r nightly.in + # esspolarization # essreduce scipp==100.0.0.dev0 # via # -r nightly.in + # esspolarization # essreduce # scippneutron # scippnexus @@ -145,6 +155,7 @@ scippnexus @ git+https://github.com/scipp/scippnexus@main # scippneutron scipy==1.16.3 # via + # esspolarization # scippneutron # scippnexus six==1.17.0 diff --git a/requirements/static.txt b/requirements/static.txt index 991a8921..6e237b24 100644 --- a/requirements/static.txt +++ b/requirements/static.txt @@ -17,7 +17,7 @@ nodeenv==1.9.1 # via pre-commit platformdirs==4.5.0 # via virtualenv -pre-commit==4.3.0 +pre-commit==4.4.0 # via -r static.in pyyaml==6.0.3 # via pre-commit diff --git a/src/ess/amor/normalization.py b/src/ess/amor/normalization.py index d4c450b0..9c0b1d8a 100644 --- a/src/ess/amor/normalization.py +++ b/src/ess/amor/normalization.py @@ -85,6 +85,10 @@ def evaluate_reference_at_sample_coords( ref.coords["sample_size"] = sample.coords["sample_size"] ref.coords["detector_spatial_resolution"] = detector_spatial_resolution ref.coords["wavelength"] = sc.midpoints(ref.coords["wavelength"]) + + if "theta" in ref.coords: + ref.coords.pop("theta") + ref = ref.transform_coords( ( "Q", diff --git a/src/ess/estia/calibration.py b/src/ess/estia/calibration.py new file mode 100644 index 00000000..24b3a686 --- /dev/null +++ b/src/ess/estia/calibration.py @@ -0,0 +1,80 @@ +from dataclasses import dataclass +from typing import Self + +import scipp as sc + + +@dataclass +class PolarizationCalibrationParameters: + I0: sc.DataArray + '''Reference intensity.''' + Pp: sc.DataArray + '''Effective polarization of polarizer when polarizer flipper is off.''' + Pa: sc.DataArray + '''Effective polarization of polarizer when polarizer flipper is on.''' + Ap: sc.DataArray + '''Effective polarization of analyzer when analyzer flipper if on.''' + Aa: sc.DataArray + '''Effective polarization of analyzer when analyzer flipper is off.''' + Rspp: sc.DataArray + '''Magnetic supermirror reflectivity for neutrons with + spin parallel to instrument polarization.''' + Rsaa: sc.DataArray + '''Magnetic supermirror reflectivity for neutrons with + spin anti-parallel to instrument polarization.''' + + @classmethod + def from_reference_measurements( + cls: type[Self], + Io: tuple[sc.DataArray, sc.DataArray, sc.DataArray, sc.DataArray], + Is: tuple[sc.DataArray, sc.DataArray, sc.DataArray, sc.DataArray], + ) -> Self: + """ + Solves for the calibration parameters given the reference + measurements. + + See https://doi.org/10.1016/S0921-4526(00)00823-1. + + Parameters + ------------ + Io: + Intensity from measurements with perfect + non-magnetic supermirror. + Is: + Intensity from measurements with + magnetic supermirror. + + Returns + ---------- + : + Polarization calibration parameters + """ + Iopp, Iopa, Ioap, Ioaa = Io + Ipp, Ipa, Iap, Iaa = Is + + I0 = 2 * (Iopp * Ioaa - Iopa * Ioap) / (Iopp + Ioaa - Iopa - Ioap) + rho = (Ioaa - Ioap) / (Iopp - Iopa) + alp = (Ioaa - Iopa) / (Iopp - Ioap) + + num_base = rho * alp * Ipp + Iaa + rho * Ipa + alp * Iap + den_base = Ipp + Iaa - Ipa - Iap + term_rho = rho * (Ipp - Ipa) - Iaa + Iap + term_alp = alp * (Ipp - Iap) - Iaa + Ipa + + Rspp_plus_Rsaa = 4 * num_base / ((1 + rho) * (1 + alp) * I0) + + Pp = sc.sqrt(den_base * term_alp / (num_base * term_rho)) + Ap = sc.sqrt(den_base * term_rho / (num_base * term_alp)) + Rs = sc.sqrt(term_alp * term_rho / (num_base * den_base)) + + Pa = -rho * Pp + Aa = -alp * Ap + + Rspp_minus_Rsaa = Rs * Rspp_plus_Rsaa + Rspp = (Rspp_plus_Rsaa + Rspp_minus_Rsaa) / 2 + Rsaa = Rspp_plus_Rsaa - Rspp + + return cls(I0, Pp, Pa, Ap, Aa, Rspp, Rsaa) + + +providers = () diff --git a/src/ess/estia/data.py b/src/ess/estia/data.py index 16ae9846..c1fb9f98 100644 --- a/src/ess/estia/data.py +++ b/src/ess/estia/data.py @@ -1,5 +1,6 @@ # SPDX-License-Identifier: BSD-3-Clause # Copyright (c) 2025 Scipp contributors (https://github.com/scipp) +from multiprocessing.pool import ThreadPool import scipp as sc @@ -34,6 +35,27 @@ "examples/NiTiML.ref": "md5:9769b884dfa09d34b6ae449b640463a1", "examples/Si-SiO2.ref": "md5:436e2312e137b63bf31cc39064d28864", "examples/Si-Ni.ref": "md5:76fcfc655635086060387163c9175ab4", + # Spin flip example from McStas simulation. + # All runs have the same sample rotation angle but different samples. + # Each sample is measured using all four flipper setting. + "spin_flip_example/ground_truth_spin_down_reflectivity.h5": "md5:1b3f4c70be6e2d5bae35836c378a0762", # noqa: E501 + "spin_flip_example/ground_truth_spin_up_reflectivity.h5": "md5:77ded33407a5004587e475ede0312424", # noqa: E501 + "spin_flip_example/spin_flip_sample_onon.h5": "md5:da3a075869d4b5525f58c317c914059d", # noqa: E501 + "spin_flip_example/spin_flip_sample_offon.h5": "md5:7382118990012204c6f1d0419b23c68b", # noqa: E501 + "spin_flip_example/magnetic_supermirror_2_offoff.h5": "md5:c2176171d227e58dc0fbf2e81385b1cc", # noqa: E501 + "spin_flip_example/magnetic_supermirror_onon.h5": "md5:bdc4beaca386acda53afdae361a815d9", # noqa: E501 + "spin_flip_example/supermirror_offon.h5": "md5:88caf8839cc3ea06589a41a63847dc1e", # noqa: E501 + "spin_flip_example/magnetic_supermirror_onoff.h5": "md5:3e2dda52e536f1a00c7a26f30d0ed63f", # noqa: E501 + "spin_flip_example/magnetic_supermirror_2_onon.h5": "md5:670df3ed849239208f2b47512c9f6fa1", # noqa: E501 + "spin_flip_example/magnetic_supermirror_2_offon.h5": "md5:6017dc15ed7ab37265d1376aec4fa76e", # noqa: E501 + "spin_flip_example/magnetic_supermirror_offoff.h5": "md5:1a992b7e74d5e5be3267674ea22f5b1c", # noqa: E501 + "spin_flip_example/supermirror_onon.h5": "md5:05929e088691601210a5fdc068375b59", + "spin_flip_example/spin_flip_sample_offoff.h5": "md5:249024737f59c83e28efe9633a3b6b73", # noqa: E501 + "spin_flip_example/spin_flip_sample_onoff.h5": "md5:897ce7e94c748a2bb9eb44b4dc9f023a", # noqa: E501 + "spin_flip_example/magnetic_supermirror_2_onoff.h5": "md5:11dcd560b1d90b0dde699a55b2998d15", # noqa: E501 + "spin_flip_example/magnetic_supermirror_offon.h5": "md5:f2e06c989c347e8e1f32e3f2a48580ce", # noqa: E501 + "spin_flip_example/supermirror_onoff.h5": "md5:3ae9863d13d79c24e29017948bd383ab", # noqa: E501 + "spin_flip_example/supermirror_offoff.h5": "md5:22bc20099b2f456e459391189ee60977", # noqa: E501 }, ) @@ -103,8 +125,51 @@ def parse(fname): raise ValueError(f'"{name}" is not a valid sample name') +def estia_mcstas_spin_flip_example(sample, flipper_setting): + return _registry.get_path(f'spin_flip_example/{sample}_{flipper_setting}.h5') + + +def estia_mcstas_spin_flip_example_groundtruth(up_or_down): + if up_or_down == 'down': + return _registry.get_path( + 'spin_flip_example/ground_truth_spin_down_reflectivity.h5' + ) + if up_or_down == 'up': + return _registry.get_path( + 'spin_flip_example/ground_truth_spin_up_reflectivity.h5' + ) + raise ValueError(f'Ground truth curve for spin state "{up_or_down}" does not exist') + + +def _refresh_cache(args): + estia_mcstas_spin_flip_example(*args) + + +def estia_mcstas_spin_flip_example_download_all_to_cache(): + # Run once to create the folder structure without conflicts + _refresh_cache(('supermirror', 'offoff')) + with ThreadPool(20) as pool: + for _ in pool.map( + _refresh_cache, + [ + (sample, setting) + for sample in ( + 'supermirror', + 'magnetic_supermirror', + 'magnetic_supermirror_2', + 'spin_flip_sample', + ) + for setting in ('offoff', 'offon', 'onoff', 'onon') + ], + ): + pass + + __all__ = [ "estia_mcstas_example", "estia_mcstas_reference_run", "estia_mcstas_sample_run", + "estia_mcstas_spin_flip_example", + "estia_mcstas_spin_flip_example_download_all_to_cache", + "estia_mcstas_spin_flip_example_groundtruth", ] diff --git a/src/ess/estia/mcstas.py b/src/ess/estia/mcstas.py index e1856196..f1a07f99 100644 --- a/src/ess/estia/mcstas.py +++ b/src/ess/estia/mcstas.py @@ -1,6 +1,6 @@ # Copyright (c) 2025 Scipp contributors (https://github.com/scipp) import h5py -import numpy as np +import pandas as pd import scipp as sc from ess.reflectometry.load import load_h5 @@ -33,9 +33,9 @@ def parse_events_ascii(lines): key, _, value = value.partition('=') meta[key] = value else: - data.append(list(map(float, line.strip().split(' ')))) + break - data = np.array(data) + data = pd.read_csv(lines, comment='#', header=None, delimiter=' ') if 'ylabel' in meta: labels = meta['ylabel'].strip().split(' ') @@ -46,10 +46,15 @@ def parse_events_ascii(lines): # Consult the McStas documentation # (section 2.2.1) https://www.mcstas.org/documentation/manual/ # for more information. - sc.array(dims=['events'], values=data[:, 0], variances=data[:, 0] ** 2), + sc.array( + dims=['events'], + values=data[0].to_numpy(), + variances=data[0].to_numpy() ** 2, + ), coords={ - label: sc.array(dims=['events'], values=values) - for values, label in zip(data[:, 1:].T, labels[1:], strict=False) + label: sc.array(dims=['events'], values=data[i].to_numpy()) + for i, label in enumerate(labels) + if i != 0 }, ) for k, v in meta.items(): diff --git a/src/ess/estia/types.py b/src/ess/estia/types.py index 8ec386fd..8acdf9fc 100644 --- a/src/ess/estia/types.py +++ b/src/ess/estia/types.py @@ -6,5 +6,3 @@ WavelengthResolution = NewType("WavelengthResolution", sc.Variable) AngularResolution = NewType("AngularResolution", sc.Variable) SampleSizeResolution = NewType("SampleSizeResolution", sc.Variable) - -CoordTransformationGraph = NewType("CoordTransformationGraph", dict) diff --git a/src/ess/reflectometry/normalization.py b/src/ess/reflectometry/normalization.py index 312df934..1e864c18 100644 --- a/src/ess/reflectometry/normalization.py +++ b/src/ess/reflectometry/normalization.py @@ -20,6 +20,19 @@ ) +def reduce_to_q(da: sc.DataArray, qbins: int | sc.Variable): + if da.bins: + return da.bin(Q=qbins, dim=da.dims) + return da.hist(Q=qbins) + + +def reduce_from_events_to_lz(da: sc.DataArray, wbins: int | sc.Variable): + out = da.bin(wavelength=wbins, dim=('strip',)) + if 'position' in da.coords: + out.coords['position'] = da.coords['position'].mean('strip') + return out + + def reduce_reference( reference: ReducibleData[ReferenceRun], wavelength_bins: WavelengthBins, @@ -40,11 +53,9 @@ def reduce_reference( ) reference = reference.bins.assign_masks(invalid=sc.isnan(R)) reference = reference / R - out = reference.bins.concat(('strip',)).hist(wavelength=wavelength_bins) - - if 'position' in reference.coords: - out.coords['position'] = reference.coords['position'].mean('strip') - return out + binned = reduce_from_events_to_lz(reference, wavelength_bins) + h = binned.hist() + return h def reduce_sample_over_q( @@ -59,12 +70,10 @@ def reduce_sample_over_q( Returns reflectivity as a function of :math:`Q`. """ - s = sample.bins.concat().bin(Q=qbins) - h = sc.values( - (reference if reference.bins is None else reference.bins.concat()).hist( - Q=s.coords['Q'] - ) - ) + s = reduce_to_q(sample, qbins) + h = sc.values(reduce_to_q(reference, s.coords['Q'])) + if h.bins: + h = h.hist() R = s / h.data if 'Q_resolution' in reference.coords or 'Q_resolution' in reference.bins.coords: resolution = ( @@ -96,9 +105,7 @@ def reduce_sample_over_zw( Returns reflectivity as a function of ``blade``, ``wire`` and :math:`\\wavelength`. """ - return sample.bins.concat(('strip',)).bin(wavelength=wbins) / sc.values( - reference.data - ) + return reduce_from_events_to_lz(sample, wbins) / sc.values(reference.data) providers = ( diff --git a/tests/estia/calibration_test.py b/tests/estia/calibration_test.py new file mode 100644 index 00000000..381f1630 --- /dev/null +++ b/tests/estia/calibration_test.py @@ -0,0 +1,69 @@ +import numpy as np +import pytest +import scipp as sc +from scipp.testing import assert_allclose + +from ess.estia.calibration import PolarizationCalibrationParameters + + +def _kronecker_product(A, B): + return [ + [A[ia][ja] * B[ib][jb] for ja in range(2) for jb in range(2)] + for ia in range(2) + for ib in range(2) + ] + + +def _matvec(A, b): + return [sum(A[i][j] * b[j] for j in range(len(b))) for i in range(len(A))] + + +def _polarization_matrix(Pp, Pa, Ap, Aa): + return _kronecker_product( + [ + [(1 + Pp) / 2, (1 - Pp) / 2], + [(1 + Pa) / 2, (1 - Pa) / 2], + ], + [ + [(1 + Ap) / 2, (1 - Ap) / 2], + [(1 + Aa) / 2, (1 - Aa) / 2], + ], + ) + + +def generate_valid_calibration_parameters(): + I0 = np.random.random() + Pp = np.random.random() + Pa = -np.random.random() + Ap = np.random.random() + Aa = -np.random.random() + Rspp = np.random.random() + Rsaa = Rspp * np.random.random() + return tuple(map(sc.scalar, (I0, Pp, Pa, Ap, Aa, Rspp, Rsaa))) + + +def intensity_from_parameters(I0, Pp, Pa, Ap, Aa, Rspp, Rspa, Rsap, Rsaa): + return [ + I0 * v + for v in _matvec(_polarization_matrix(Pp, Pa, Ap, Aa), [Rspp, Rspa, Rsap, Rsaa]) + ] + + +@pytest.mark.parametrize("seed", range(10)) +def test_calibration_solve_recovers_input(seed): + np.random.seed(seed) + I0, Pp, Pa, Ap, Aa, Rspp, Rsaa = generate_valid_calibration_parameters() + Io = intensity_from_parameters( + I0, Pp, Pa, Ap, Aa, sc.scalar(1), sc.scalar(0), sc.scalar(0), sc.scalar(1) + ) + Is = intensity_from_parameters( + I0, Pp, Pa, Ap, Aa, Rspp, sc.scalar(0), sc.scalar(0), Rsaa + ) + cal = PolarizationCalibrationParameters.from_reference_measurements(Io, Is) + tuple( + map( + assert_allclose, + (cal.I0, cal.Pp, cal.Pa, cal.Ap, cal.Aa, cal.Rspp, cal.Rsaa), + (I0, Pp, Pa, Ap, Aa, Rspp, Rsaa), + ) + )