Skip to content

Commit 80edce8

Browse files
Merge branch 'master' of github.com:btschwertfeger/python-cmethods
2 parents ad9da08 + b609c6e commit 80edce8

File tree

9 files changed

+322
-32
lines changed

9 files changed

+322
-32
lines changed
Lines changed: 39 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,39 @@
1+
# -*- coding: utf-8 -*-
2+
# Copyright (C) 2024 Benjamin Thomas Schwertfeger
3+
# GitHub: https://github.com/btschwertfeger
4+
#
5+
# Workflow that approves and merges all pull requests from the dependabot[bot]
6+
# author.
7+
#
8+
# Source (May, 2024):
9+
# - https://blog.somewhatabstract.com/2021/10/11/setting-up-dependabot-with-github-actions-to-approve-and-merge/
10+
11+
name: Dependabot auto-merge
12+
on: pull_request_target
13+
14+
permissions:
15+
pull-requests: write
16+
contents: write
17+
18+
jobs:
19+
dependabot:
20+
runs-on: ubuntu-latest
21+
if: ${{ github.actor == 'dependabot[bot]' }}
22+
steps:
23+
- name: Dependabot metadata
24+
id: dependabot-metadata
25+
uses: dependabot/[email protected]
26+
with:
27+
github-token: "${{ secrets.GITHUB_TOKEN }}"
28+
- name: Approve a PR
29+
if: ${{ steps.dependabot-metadata.outputs.update-type != 'version-update:semver-major' }}
30+
run: gh pr review --approve "$PR_URL"
31+
env:
32+
PR_URL: ${{ github.event.pull_request.html_url }}
33+
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
34+
- name: Enable auto-merge for Dependabot PRs
35+
if: ${{ steps.dependabot-metadata.outputs.update-type != 'version-update:semver-major' }}
36+
run: gh pr merge --auto --squash "$PR_URL"
37+
env:
38+
PR_URL: ${{ github.event.pull_request.html_url }}
39+
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

cmethods/distribution.py

Lines changed: 0 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -62,11 +62,6 @@ def quantile_mapping(
6262

6363
cdf_obs = get_cdf(obs, xbins)
6464
cdf_simh = get_cdf(simh, xbins)
65-
cdf_simh = np.interp(
66-
cdf_simh,
67-
(cdf_simh.min(), cdf_simh.max()),
68-
(cdf_obs.min(), cdf_obs.max()),
69-
)
7065

7166
if kind in ADDITIVE:
7267
epsilon = np.interp(simp, xbins, cdf_simh) # Eq. 1
@@ -129,11 +124,6 @@ def detrended_quantile_mapping(
129124

130125
cdf_obs = get_cdf(obs, xbins)
131126
cdf_simh = get_cdf(simh, xbins)
132-
cdf_simh = np.interp(
133-
cdf_simh,
134-
(cdf_simh.min(), cdf_simh.max()),
135-
(cdf_obs.min(), cdf_obs.max()),
136-
)
137127

138128
# detrended => shift mean of $X_{sim,p}$ to range of $X_{sim,h}$ to adjust extremes
139129
res = np.zeros(len(simp.values))

cmethods/utils.py

Lines changed: 8 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -149,11 +149,11 @@ def get_pdf(
149149
:linenos:
150150
:caption: Compute the probability density function :math:`P(x)`
151151
152-
>>> from cmethods import CMethods as cm
152+
>>> from cmethods get_pdf
153153
154154
>>> x = [1, 2, 3, 4, 5, 5, 5, 6, 7, 8, 9, 10]
155155
>>> xbins = [0, 3, 6, 10]
156-
>>> print(cm.get_pdf(x=x, xbins=xbins))
156+
>>> print(get_pdf(x=x, xbins=xbins))
157157
[2, 5, 5]
158158
"""
159159
pdf, _ = np.histogram(x, xbins)
@@ -178,17 +178,18 @@ def get_cdf(
178178
179179
.. code-block:: python
180180
:linenos:
181-
:caption: Compute the cmmulative distribution function :math:`F(x)`
181+
:caption: Compute the cumulative distribution function :math:`F(x)`
182182
183-
>>> from cmethods import CMethods as cm
183+
>>> from cmethods.utils import get_cdf
184184
185185
>>> x = [1, 2, 3, 4, 5, 5, 5, 6, 7, 8, 9, 10]
186186
>>> xbins = [0, 3, 6, 10]
187-
>>> print(cm.get_cdf(x=x, xbins=xbins))
188-
[0, 2, 7, 12]
187+
>>> print(get_cdf(x=x, xbins=xbins))
188+
[0.0, 0.16666667, 0.58333333, 1.]
189189
"""
190190
pdf, _ = np.histogram(x, xbins)
191-
return np.insert(np.cumsum(pdf), 0, 0.0)
191+
cdf = np.insert(np.cumsum(pdf), 0, 0.0)
192+
return cdf / cdf[-1]
192193

193194

194195
def get_inverse_of_cdf(

doc/methods.rst

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -171,7 +171,9 @@ The Delta Method bias correction technique can be applied on stochastic and
171171
non-stochastic climate variables to minimize deviations in the mean values
172172
between predicted and observed time-series of past and future time periods.
173173

174-
This method requires that the time series can be grouped by ``time.month``.
174+
This method requires that the time series can be grouped by ``time.month`` while
175+
the reference data of the control period must have the same temporal resolution
176+
as the data that is going to be adjusted.
175177

176178
Since the multiplicative scaling can result in very high scaling factors, a
177179
maximum scaling factor of 10 is set. This can be changed by passing the desired

tests/helper.py

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,6 @@
99
from __future__ import annotations
1010

1111
from functools import cache
12-
from typing import List
1312

1413
import numpy as np
1514
import xarray as xr
@@ -64,7 +63,7 @@ def get_datasets(kind: str) -> tuple[xr.Dataset, xr.Dataset, xr.Dataset, xr.Data
6463
)
6564
latitudes = np.arange(23, 27, 1)
6665

67-
def get_hist_temp_for_lat(lat: int) -> List[float]:
66+
def get_hist_temp_for_lat(lat: int) -> list[float]:
6867
"""Returns a fake interval time series by latitude value"""
6968
return 273.15 - (
7069
lat * np.cos(2 * np.pi * historical_time.dayofyear / 365)
@@ -73,7 +72,7 @@ def get_hist_temp_for_lat(lat: int) -> List[float]:
7372
+ 0.1 * (historical_time - historical_time[0]).days / 365
7473
)
7574

76-
def get_fake_hist_precipitation_data() -> List[float]:
75+
def get_fake_hist_precipitation_data() -> list[float]:
7776
"""Returns ratio based fake time series"""
7877
pr = (
7978
np.cos(2 * np.pi * historical_time.dayofyear / 365)
@@ -122,7 +121,7 @@ def get_dataset(data, time, kind: str) -> xr.Dataset:
122121
)
123122
obsh = get_dataset(data, historical_time, kind=kind)
124123
obsp = get_dataset(data * 1.02, historical_time, kind=kind)
125-
simh = get_dataset(data * 0.98, historical_time, kind=kind)
126-
simp = get_dataset(data * 0.09, future_time, kind=kind)
124+
simh = get_dataset(data * 0.95, historical_time, kind=kind)
125+
simp = get_dataset(data * 0.965, future_time, kind=kind)
127126

128127
return obsh, obsp, simh, simp

tests/test_methods.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -27,9 +27,9 @@
2727
("method", "kind"),
2828
[
2929
("linear_scaling", "+"),
30+
("linear_scaling", "*"),
3031
("variance_scaling", "+"),
3132
("delta_method", "+"),
32-
("linear_scaling", "*"),
3333
("delta_method", "*"),
3434
],
3535
)
@@ -65,9 +65,9 @@ def test_1d_scaling(
6565
("method", "kind"),
6666
[
6767
("linear_scaling", "+"),
68+
("linear_scaling", "*"),
6869
("variance_scaling", "+"),
6970
("delta_method", "+"),
70-
("linear_scaling", "*"),
7171
("delta_method", "*"),
7272
],
7373
)
@@ -111,8 +111,8 @@ def test_3d_scaling(
111111
("method", "kind"),
112112
[
113113
("linear_scaling", "+"),
114-
("variance_scaling", "+"),
115114
("linear_scaling", "*"),
115+
("variance_scaling", "+"),
116116
],
117117
)
118118
def test_3d_scaling_different_time_span(
@@ -160,8 +160,8 @@ def test_3d_scaling_different_time_span(
160160
("method", "kind"),
161161
[
162162
("quantile_mapping", "+"),
163-
("quantile_delta_mapping", "+"),
164163
("quantile_mapping", "*"),
164+
("quantile_delta_mapping", "+"),
165165
("quantile_delta_mapping", "*"),
166166
],
167167
)
@@ -192,8 +192,8 @@ def test_1d_distribution(
192192
("method", "kind"),
193193
[
194194
("quantile_mapping", "+"),
195-
("quantile_delta_mapping", "+"),
196195
("quantile_mapping", "*"),
196+
("quantile_delta_mapping", "+"),
197197
("quantile_delta_mapping", "*"),
198198
],
199199
)
@@ -224,8 +224,8 @@ def test_3d_distribution(
224224
("method", "kind"),
225225
[
226226
("quantile_mapping", "+"),
227-
("quantile_delta_mapping", "+"),
228227
("quantile_mapping", "*"),
228+
("quantile_delta_mapping", "+"),
229229
("quantile_delta_mapping", "*"),
230230
],
231231
)

0 commit comments

Comments
 (0)