Skip to content

Commit 34af0a4

Browse files
authored
Merge pull request #244 from JuliaClimate/hurst
new hurst code
2 parents 8b437c6 + fd7044e commit 34af0a4

22 files changed

+1255
-1989
lines changed

Project.toml

Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,21 +6,40 @@ version = "0.24.2"
66

77
[deps]
88
CFTime = "179af706-886a-5703-950a-314cd64e0468"
9+
CircularArrays = "7a955b69-7140-5f4e-a0ed-f168c5e2e749"
910
DataFrames = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0"
1011
Dates = "ade2ca70-3891-5945-98fb-dc099432e06a"
1112
DimensionalData = "0703355e-b756-11e9-17c0-8b28908087d0"
13+
Extremes = "fe3fe864-1b39-11e9-20b8-1f96fa57382d"
1214
Interpolations = "a98d9a8b-a2ab-59e6-89dd-64a1c18fca59"
1315
LombScargle = "fc60dff9-86e7-5f2f-a8a0-edeadbb75bd9"
1416
LongMemory = "f5f8e4a8-cb56-40ea-a13e-090cc212d358"
1517
MarSwitching = "b2b8a7f1-da70-4e5f-beaa-e2774ec39c2f"
1618
NetCDF = "30363a11-5582-574a-97bb-aa9a979735b9"
1719
Polynomials = "f27b6e38-b328-58d1-80ce-0feddd5e7a45"
20+
Shapefile = "8e980c4a-a4fe-5da2-b3a7-4b4b0353a2f4"
1821
StableRNGs = "860ef19b-820b-49d6-a774-d7a799459cd3"
1922
Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2"
2023
YAXArrays = "c21b50f5-aa40-41ea-b809-c0f5e47bfa5c"
2124
Zarr = "0a941bbe-ad1d-11e8-39d9-ab76183a1d99"
2225

2326
[compat]
27+
CFTime = "0.1"
28+
CircularArrays = "1.3"
29+
DataFrames = "1.1"
30+
DimensionalData = "0.27, 0.28, 0.29"
31+
Extremes = "1"
32+
Interpolations = "0.13, 0.14, 0.15"
33+
LombScargle = "1"
34+
LongMemory = "0.1.2"
35+
MarSwitching = "0.2"
36+
NetCDF = "0.10, 0.11, 0.12"
37+
Polynomials = "4"
38+
Shapefile = "0.10, 0.11, 0.12, 0.13"
39+
StableRNGs = "1"
40+
Statistics = "1.11"
41+
YAXArrays = "0.6"
42+
Zarr = "0.9"
2443
julia = "1.10"
2544

2645
[extras]

src/ClimateTools.jl

Lines changed: 14 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,10 @@
11
module ClimateTools
22

3+
using CircularArrays
34
using CFTime
45
using Dates
56
using DimensionalData
7+
using Extremes
68
using LombScargle
79
using LongMemory
810
using Statistics
@@ -13,26 +15,37 @@ using Polynomials
1315
using Interpolations
1416
using DataFrames
1517
using MarSwitching
18+
using Shapefile
1619

1720
include("aggregate.jl")
1821
include("autocorrelation.jl")
1922
include("biascorrect.jl")
2023
include("climatology.jl")
2124
include("ensembles.jl")
22-
#include("functions.jl")
25+
include("functions.jl")
26+
include("gev.jl")
2327
include("markov.jl")
2428
include("power.jl")
2529
include("plotting.jl")
2630
include("processERA5.jl")
31+
include("regrid.jl")
32+
include("spatial.jl")
33+
include("statistics.jl")
2734
include("utils.jl")
2835

2936

3037
export daily_fct, climato_tp, subsample, dates_builder_yearmonth, dates_builder_yearmonth_hardcode, dates_builder_yearmonthday, dates_builder_yearmonthday_hardcode, diff, cumsum, yearly_clim
38+
export ERA5Land_dailysum
39+
export m2mm
3140
export yearly_clim
41+
export quantiles
42+
export regrid_cube
3243
export qqmap, qqmap_bulk
3344
export ensemble_fct
3445
export autocorrelation
46+
export hurst
3547
export MSModel
48+
export rlevels_cube
3649

3750

3851
end

src/EFI.jl

Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,25 @@
1+
function estimate_prob(xout, xin1, xin2; da_cqt_quantile)
2+
3+
# v_cqt = convert.(Float64,collect(da_cqt[31,31,:].data)) # the grid where we want to evaluate the interpolated value (ex. coords variable below)
4+
# v_eqt = collect(da_eqt[:,31,31].data) # the x-coordinate of the initial data
5+
6+
# v_cqt_quantile = collect(da_cqt.quantile); # The y-coordinates of the data points, same length as `xp`.
7+
8+
itp = linear_interpolation(Interpolations.deduplicate_knots!(xin2), da_cqt_quantile, extrapolation_bc=Flat());
9+
xout .= @. itp(xin1)
10+
11+
end
12+
13+
14+
function estimate_prob(da_cqt::YAXArray, da_eq::YAXArray)
15+
16+
Quantiles = collect(da_cqt.quantile)
17+
18+
# Dimensions
19+
indims_da_cqt = InDims("quantile")
20+
indims_da_eqt = InDims("Quantiles")
21+
outdims = OutDims(Dim{:Probability}(Quantiles))
22+
23+
return mapCube(estimate_prob, (da_cqt, da_eq), indims=(indims_da_cqt, indims_da_eqt), outdims=outdims, da_cqt_quantile=Quantiles)
24+
25+
end

src/aggregate.jl

Lines changed: 15 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -63,9 +63,9 @@ function daily_max_mean_min(cube::YAXArray, kwargs...)
6363
index_in_cube = [findall(==(i), time_index) for i in unique(time_index)]
6464

6565
# Dimensions
66-
indims = InDims("Ti")
66+
indims = InDims("time")
6767
# outdims = OutDims(RangeAxis("time", dates_builder_yearmonthday(new_dates)), CategoricalAxis("stats",["max","mean","min"]))
68-
outdims = OutDims(Dim{:Ti}(dates_builder_yearmonthday(new_dates)), CategoricalAxis("stats",["max","mean","min"]))
68+
outdims = OutDims(Dim{:time}(dates_builder_yearmonthday(new_dates)), CategoricalAxis("stats",["max","mean","min"]))
6969

7070
mapCube(daily_max_mean_min, cube, indims=indims, outdims=outdims, index_list=index_in_cube)
7171
end
@@ -87,7 +87,7 @@ A new YAXArray cube with the computed percentiles along the "time", "longitude",
8787
function percentiles(cube::YAXArray, quantiles=[0.1, 0.5, 0.9];lonname="longitude", latname="latitude")
8888

8989
indims = InDims("number")
90-
outdims = OutDims("Ti", lonname, latname)
90+
outdims = OutDims("time", lonname, latname)
9191

9292
mapCube(percentiles, cube, indims=indims, outdims=outdims, quantiles=quantiles)
9393
end
@@ -124,8 +124,8 @@ Compute the difference between consecutive time steps in the given `cube`.
124124
function diff(cube::YAXArray)
125125

126126

127-
indims = InDims("Ti")
128-
outdims = OutDims("Ti")
127+
indims = InDims("time")
128+
outdims = OutDims("time")
129129

130130
mapCube(diff, cube, indims=indims, outdims=outdims)
131131
end
@@ -161,8 +161,8 @@ A new YAXArray cube with the cumulative sum computed along the "time" dimension.
161161
"""
162162
function cumsum(cube::YAXArray, kwargs...)
163163

164-
indims = InDims("Ti")
165-
outdims = OutDims("Ti")
164+
indims = InDims("time")
165+
outdims = OutDims("time")
166166

167167
mapCube(cumsum, cube, indims=indims, outdims=outdims)
168168
end
@@ -212,14 +212,14 @@ A new `YAXArray` with aggregated data on a daily basis.
212212
213213
"""
214214
function daily_fct(cube::YAXArray; fct::Function=mean, shifthour=0, kwargs...)
215-
time_to_index = cube.Ti .+ Dates.Hour(shifthour)
215+
time_to_index = cube.time .+ Dates.Hour(shifthour)
216216
time_index = yearmonthday.(time_to_index)
217217
new_dates = unique(time_index)
218218
index_in_cube = [findall(==(i), time_index) for i in unique(time_index)]
219219

220220
# Dimensions
221-
indims = InDims("Ti")
222-
outdims = OutDims(Dim{:Ti}(dates_builder_yearmonthday(new_dates)))
221+
indims = InDims("time")
222+
outdims = OutDims(Dim{:time}(dates_builder_yearmonthday(new_dates)))
223223

224224
mapCube(daily_fct, cube, indims=indims, outdims=outdims, fct=fct, index_list=index_in_cube)
225225
end
@@ -254,14 +254,14 @@ A new YAXArray containing the yearly climatology.
254254
"""
255255

256256
function yearly_clim(cube::YAXArray; fct::Function=mean, kwargs...)
257-
time_to_index = cube.Ti;
257+
time_to_index = cube.time;
258258
time_index = year.(time_to_index);
259259
new_dates = unique(time_index);
260260
index_in_cube = [findall(==(i), time_index) for i in unique(time_index)]
261261

262262
# Dimensions
263-
indims = InDims("Ti")
264-
outdims = OutDims(Dim{:Ti}(dates_builder_yearmonthday_hardcode(new_dates, imois=7, iday=1)))
263+
indims = InDims("time")
264+
outdims = OutDims(Dim{:time}(dates_builder_yearmonthday_hardcode(new_dates, imois=7, iday=1)))
265265

266266
mapCube(yearly_clim, cube, fct=fct, indims=indims, outdims=outdims, index_list=index_in_cube)
267267
end
@@ -295,8 +295,8 @@ function daily_max(cube::YAXArray, kwargs...)
295295
index_in_cube = [findall(==(i), time_index) for i in unique(time_index)]
296296

297297
# Dimensions
298-
indims = InDims("Ti")
299-
outdims = OutDims(Dim{:Ti}(dates_builder_yearmonthday(new_dates)))
298+
indims = InDims("time")
299+
outdims = OutDims(Dim{:time}(dates_builder_yearmonthday(new_dates)))
300300

301301
mapCube(daily_max, cube, indims=indims, outdims=outdims, index_list=index_in_cube)
302302
end

src/analysis.jl

Lines changed: 0 additions & 103 deletions
This file was deleted.

src/autocorrelation.jl

Lines changed: 40 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,9 +19,48 @@ The autocorrelation of the input dataset `ds` along the time dimension.
1919
function autocorrelation(ds; lags=30)
2020

2121
# Dimensions
22-
indims = InDims("Ti")
22+
indims = InDims("time")
2323
outdims = OutDims(Dim{:lags}(collect(1:lags)))
2424

2525
return mapCube(ClimateTools.autocorrelation, ds, indims=indims, outdims=outdims, lags=lags, nthreads=Threads.nthreads())
2626

27+
28+
end
29+
30+
function hurst(xout, xin; k::Int=20)
31+
d = LongMemory.ClassicEstimators.rescaled_range_est(Array(xin), k=k)
32+
xout .= d + 0.5
33+
# xout .= LongMemory.autocorrelation(Array(xin), lags)
34+
end
35+
36+
37+
38+
"""
39+
hurst(ds; k)
40+
41+
Calculate the Hurst exponent for a time series of dataset `ds`. The Hurst exponent is a measure of the long-term memory of time series data. It can be used to classify time series as:
42+
43+
- **0.5 < H < 1**: The time series is persistent, meaning that high values are likely to be followed by high values, and low values by low values.
44+
- **H = 0.5**: The time series is a random walk, indicating no correlation between values.
45+
- **0 < H < 0.5**: The time series is anti-persistent, meaning that high values are likely to be followed by low values, and vice versa.
46+
47+
# Arguments
48+
- `ds`: YAXArray variable.
49+
50+
# Returns
51+
- `H`: The Hurst exponent of the time series.
52+
53+
54+
"""
55+
function hurst(ds; k::Int=20)
56+
57+
# Dimensions
58+
indims = InDims("time")
59+
# outdims = OutDims(Dim{:hurst}(1))
60+
outdims = OutDims()
61+
62+
return mapCube(ClimateTools.hurst, ds, indims=indims, outdims=outdims, k=k, nthreads=Threads.nthreads())
63+
64+
65+
2766
end

0 commit comments

Comments
 (0)