Skip to content

Commit df24cf0

Browse files
committed
add wavelet validated features
1 parent 04a5e91 commit df24cf0

4 files changed

Lines changed: 124 additions & 3 deletions

File tree

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ __09/07:__ 585 partialed functions, 2300 validated features
1010

1111
__11/07:__ 587 partialed functions, 2488 validated features
1212

13-
__13/07:__ 610 partialed functions, 2511 validated features
13+
__13/07:__ 611 partialed functions, 2527 validated features
1414

1515
# Feature Conversion Progress...
1616
![](https://geps.dev/progress/36)

pyhctsa/Configurations/basic.yaml

Lines changed: 15 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1330,7 +1330,21 @@ Wavelet:
13301330
zscore: True
13311331
hctsa_name: WL_coeffs
13321332
ordered_args: ["wname", "level"]
1333-
1333+
1334+
DetailCoeffs:
1335+
base_name: DetailCoeffs
1336+
labels:
1337+
- wavelet
1338+
dependencies:
1339+
- pywt
1340+
- PyWavelets
1341+
configs:
1342+
- wname: 'db3'
1343+
maxlevel: 'max'
1344+
zscore: True
1345+
hctsa_name: WL_DetailCoeffs
1346+
ordered_args: ["wname", "maxlevel"]
1347+
13341348
HypothesisTests:
13351349
HypothesisTest:
13361350
base_name: HypothesisTest

pyhctsa/Configurations/wavelet.yaml

Lines changed: 15 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,4 +12,18 @@ Wavelet:
1212
zscore: True
1313
hctsa_name: WL_coeffs
1414
ordered_args: ["wname", "level"]
15-
15+
16+
DetailCoeffs:
17+
base_name: DetailCoeffs
18+
labels:
19+
- wavelet
20+
dependencies:
21+
- pywt
22+
- PyWavelets
23+
configs:
24+
- wname: 'db3'
25+
maxlevel: 'max'
26+
zscore: True
27+
hctsa_name: WL_DetailCoeffs
28+
ordered_args: ["wname", "maxlevel"]
29+

pyhctsa/Operations/Wavelet.py

Lines changed: 93 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,99 @@
33
from numpy.typing import ArrayLike
44
from typing import Union
55

6+
def _slosr(xx) -> int:
7+
# helper function for DetailCoeffs
8+
theMaxLevel = len(xx)
9+
slosr = np.zeros(theMaxLevel-2)
10+
for i in range(2, theMaxLevel):
11+
slosr[i-2] = np.sum(xx[:i-1])/np.sum(xx[i:])
12+
absm1 = np.abs(slosr - 1)
13+
idx = np.argwhere(absm1 == np.min(absm1).flatten())[0][0] + 1
14+
return idx
15+
16+
def DetailCoeffs(y : ArrayLike, wname : str = 'db3', maxlevel : Union[int, str] = 20) -> dict:
17+
"""
18+
Detail coefficients of a wavelet decomposition.
19+
20+
Compares the detail coefficients obtained at each level of the wavelet decomposition from 1 to the maximum possible level for the wavelet,
21+
given the length of the input time series.
22+
23+
Parameters
24+
----------
25+
y : array-like
26+
The input time series.
27+
wname : str, optional
28+
The name of the mother wavelet to analyze the data with (e.g., 'db3', 'sym2').
29+
See the Wavelet Toolbox or PyWavelets documentation for details. Default is 'db3'.
30+
maxlevel : int or 'max', optional
31+
The maximum wavelet decomposition level. If 'max', uses the maximum allowed level for the data length and wavelet.
32+
Default is 20.
33+
34+
Returns
35+
-------
36+
dict
37+
Statistics on the detail coefficients at each level.
38+
"""
39+
y = np.asarray(y)
40+
N = len(y)
41+
if maxlevel == 'max':
42+
maxlevel = pywt.dwt_max_level(N, wname)
43+
if pywt.dwt_max_level(N, wname) < maxlevel:
44+
print(f"Chosen wavelet level is too large for the {wname} wavelet for this signal of length N = {N}")
45+
maxlevel = pywt.dwt_max_level(N, wname)
46+
print(f"Using a wavelet level of {maxlevel} instead.")
47+
# Perform a single-level wavelet decomposition
48+
means = np.zeros(maxlevel) # mean detail coefficient magnitude at each level
49+
medians = np.zeros(maxlevel) # median detail coefficient magnitude at each level
50+
maxs = np.zeros(maxlevel) # max detail coefficient magnitude at each level
51+
52+
for k in range(1, maxlevel+1):
53+
level = k
54+
c, l = wavedec(data=y, wavelet=wname, level=level)
55+
det = wrcoef(coefs=c, lengths=l, wavelet=wname, level=level)
56+
absdet = np.abs(det)
57+
means[k-1] = np.mean(absdet)
58+
medians[k-1] = np.median(absdet)
59+
maxs[k-1] = np.max(absdet)
60+
61+
#Return statistics on detail coefficients
62+
means_s = np.sort(means)[::-1] # descending order
63+
medians_s = np.sort(medians)[::-1]
64+
maxs_s = np.sort(maxs)[::-1]
65+
66+
# % What is the maximum across these levels
67+
out = {}
68+
out['max_mean'] = means_s[0]
69+
out['max_median'] = medians_s[0]
70+
out['max_max'] = maxs_s[0]
71+
72+
#% stds
73+
out['std_mean'] = np.std(means, ddof=1)
74+
out['std_median'] = np.std(medians, ddof=1)
75+
out['std_max'] = np.std(maxs, ddof=1)
76+
77+
#% At what level is the maximum
78+
out['wheremax_mean'] = np.argwhere(means == means_s[0]).flatten()[0]
79+
out['wheremax_median'] = np.argwhere(medians == medians_s[0]).flatten()[0]
80+
out['wheremax_max'] = np.argwhere(maxs == maxs_s[0]).flatten()[0]
81+
82+
#% Size of maximum (relative to next maximum)
83+
out['max1on2_mean'] = means_s[0]/means_s[1]
84+
out['max1on2_median'] = medians_s[0]/medians_s[1]
85+
out['max1on2_max'] = maxs_s[0]/maxs_s[1]
86+
87+
# % Where sum of values to left equals sum of values to right
88+
# % Measure of centrality
89+
out['wslesr_mean'] = _slosr(means)
90+
out['wslesr_median'] = _slosr(medians)
91+
out['wslesr_max'] = _slosr(maxs)
92+
93+
#% What's the correlation between maximum and median
94+
r = np.corrcoef(maxs, medians)
95+
out['corrcoef_max_medians'] = r[0, 1]
96+
97+
return out
98+
699
def WLCoeffs(y : ArrayLike, wname : str = 'db3', level : Union[int, str] = 3) -> dict:
7100
"""
8101
Wavelet decomposition of the time series.

0 commit comments

Comments
 (0)