Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
56 commits
Select commit Hold shift + click to select a range
22d903e
example
JaimeRZP Apr 8, 2025
e16d6c3
forwards adapted
JaimeRZP Apr 9, 2025
52ef3a0
nearly there
JaimeRZP Apr 9, 2025
ef90a88
missing file
JaimeRZP Apr 10, 2025
56bb08d
working
JaimeRZP Apr 10, 2025
e86d8a8
testing
JaimeRZP Apr 10, 2025
8f61344
unit testing
JaimeRZP Apr 10, 2025
2d30fd2
unit testing
JaimeRZP Apr 10, 2025
d5b8cf5
pre-commit.ci: style fixes
pre-commit-ci[bot] Apr 10, 2025
99461e9
do not flatten spectra
ntessore Apr 24, 2025
0445e4b
alm2cl() already does everything
ntessore Apr 24, 2025
fcfb090
Merge branch 'nt/261' of https://github.com/heracles-ec/heracles into…
JaimeRZP Apr 25, 2025
0dedeb2
dices working
JaimeRZP Apr 25, 2025
ab710c6
dices working
JaimeRZP Apr 25, 2025
e295aa3
Merge branch 'nt/261' into 256-unmixing
JaimeRZP Apr 28, 2025
33265fb
Merge branch '256-unmixing' of https://github.com/heracles-ec/heracle…
JaimeRZP Apr 28, 2025
9178f9b
binned() for multiple ell axes
ntessore Apr 30, 2025
bdee6cc
remove self-import
ntessore Apr 30, 2025
f6ac653
use different variable name as type might change
ntessore Apr 30, 2025
2f82d8e
fix tests on numpy<2/py39
ntessore Apr 30, 2025
d129ff4
wip new cls
JaimeRZP Apr 30, 2025
589cb57
fixed forward test
JaimeRZP May 2, 2025
3dcbcad
stream lining things in the transdform
JaimeRZP May 2, 2025
129fe2d
ruff
JaimeRZP May 2, 2025
4889fd0
Merge branch 'main' into 256-unmixing
JaimeRZP May 2, 2025
1e31404
ruff
JaimeRZP May 2, 2025
7f83621
Merge branch '256-unmixing' of https://github.com/heracles-ec/heracle…
JaimeRZP May 2, 2025
3986917
Merge branch 'nt/227' into 256-unmixing
JaimeRZP May 2, 2025
954a8ca
transforms
JaimeRZP May 6, 2025
0cecd6b
tests for transforms
JaimeRZP May 6, 2025
4b52f11
format
JaimeRZP May 6, 2025
afbf403
missing files
JaimeRZP May 6, 2025
4e127ed
trying binning
JaimeRZP May 6, 2025
329ac7e
tests for binned
JaimeRZP May 6, 2025
84c3abe
Revert "trying binning"
JaimeRZP May 6, 2025
d3f052d
Merge branch 'main' into 256-unmixing
JaimeRZP May 8, 2025
afebdfb
Merge branch 'main' into 256-unmixing
JaimeRZP May 12, 2025
303d7b0
upd after merging shkrinkage
JaimeRZP May 12, 2025
7953291
Merge branch '256-unmixing' of https://github.com/heracles-ec/heracle…
JaimeRZP May 12, 2025
36ce004
master coded
JaimeRZP May 12, 2025
db58b01
bug
JaimeRZP May 12, 2025
32e8644
natural_unmixing 0,1 --> 1,2
JaimeRZP May 16, 2025
73c8cac
no ell s in jck
JaimeRZP May 19, 2025
3435942
got rid of POS case
JaimeRZP May 19, 2025
5758a5c
more streamlined
JaimeRZP May 19, 2025
c12b4dc
no mask correction
JaimeRZP May 19, 2025
7309f47
ruff
JaimeRZP May 19, 2025
9e9e45f
bug
JaimeRZP May 19, 2025
6f91c7a
working on unmixing cov
JaimeRZP May 19, 2025
204944c
api for unmixed cov
JaimeRZP May 20, 2025
62841d6
ruff
JaimeRZP May 20, 2025
ba93483
potential Polspice implementatio
JaimeRZP Jun 3, 2025
a0abc5a
impose correlation bug
JaimeRZP Jun 7, 2025
46bbc48
bug
JaimeRZP Jun 9, 2025
fc78c71
bugs
JaimeRZP Jun 10, 2025
4f98699
no numpy integrals
JaimeRZP Jun 17, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
299 changes: 124 additions & 175 deletions examples/example.ipynb

Large diffs are not rendered by default.

872 changes: 778 additions & 94 deletions examples/jackknife-covariance.ipynb

Large diffs are not rendered by default.

1,952 changes: 1,952 additions & 0 deletions examples/unmixing.ipynb

Large diffs are not rendered by default.

20 changes: 20 additions & 0 deletions heracles/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,14 @@
"angular_power_spectra",
"debias_cls",
"mixing_matrices",
# transforms
"cl2corr",
"corr2cl",
# unmixing
"forwards",
"inversion",
"master",
"PolSpice",
]

try:
Expand Down Expand Up @@ -139,3 +147,15 @@
debias_cls,
mixing_matrices,
)

from .transforms import (
cl2corr,
corr2cl,
)

from .unmixing import (
forwards,
inversion,
master,
PolSpice,
)
14 changes: 5 additions & 9 deletions heracles/dices/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,13 +26,11 @@
"jackknife_fsky",
"jackknife_bias",
"correct_bias",
"jackknife_maps",
"mask_correction",
"jackknife_covariance",
"debias_covariance",
"delete2_correction",
# mask_correction
"correct_mask",
"cl2corr",
"corr2cl",
# shrinkage
"shrink",
"shrinkage_factor",
Expand All @@ -45,18 +43,16 @@

from .jackknife import (
jackknife_cls,
jackknife_maps,
jackknife_fsky,
jackknife_bias,
correct_bias,
mask_correction,
jackknife_covariance,
debias_covariance,
delete2_correction,
)
from .mask_correction import (
correct_mask,
cl2corr,
corr2cl,
)

from .shrinkage import (
shrink,
shrinkage_factor,
Expand Down
94 changes: 70 additions & 24 deletions heracles/dices/jackknife.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,12 +20,13 @@
import itertools
from copy import deepcopy
from itertools import combinations
from .mask_correction import correct_mask
from .utils import add_to_Cls, sub_to_Cls
from ..core import update_metadata
from ..result import Result, get_result_array
from ..mapping import transform
from ..twopoint import angular_power_spectra
from ..unmixing import _PolSpice
from ..transforms import cl2corr, logistic


def jackknife_cls(data_maps, vis_maps, jk_maps, fields, nd=1):
Expand All @@ -50,7 +51,8 @@ def jackknife_cls(data_maps, vis_maps, jk_maps, fields, nd=1):
_cls = get_cls(data_maps, jk_maps, fields, *regions)
_cls_mm = get_cls(vis_maps, jk_maps, fields, *regions)
# Mask correction
_cls = correct_mask(_cls, _cls_mm, mls0)
alphas = mask_correction(_cls_mm, mls0)
_cls = _PolSpice(_cls, alphas, mode="natural")
# Bias correction
_cls = correct_bias(_cls, jk_maps, fields, *regions)
cls[regions] = _cls
Expand All @@ -69,13 +71,30 @@ def get_cls(maps, jkmaps, fields, jk=0, jk2=0):
returns:
cls (dict): Dictionary of data Cls
"""
# grab metadata
print(f" - Computing Cls for regions ({jk},{jk2})", end="\r", flush=True)
_m = maps[list(maps.keys())[0]]
meta = _m.dtype.metadata
lmax = meta["lmax"]
ell = np.arange(lmax + 1)
# deep copy to avoid modifying the original maps
# remove the region from the maps
_maps = jackknife_maps(maps, jkmaps, jk=jk, jk2=jk2)
# compute alms
alms = transform(fields, _maps)
# compute cls
cls = angular_power_spectra(alms)
# Result
for key in cls.keys():
cls[key] = Result(cls[key])
return cls


def jackknife_maps(maps, jkmaps, jk=0, jk2=0):
"""
Internal method to remove a region from the maps.
inputs:
maps (dict): Dictionary of data maps
jkmaps (dict): Dictionary of mask maps
jk (int): Jackknife region to remove
jk2 (int): Jackknife region to remove
returns:
maps (dict): Dictionary of data maps
"""
_maps = deepcopy(maps)
for key_data, key_mask in zip(maps.keys(), jkmaps.keys()):
_map = _maps[key_data]
Expand All @@ -87,14 +106,7 @@ def get_cls(maps, jkmaps, fields, jk=0, jk2=0):
_mask[cond] = 0.0
# Apply mask
_map *= _mask
# compute alms
alms = transform(fields, _maps)
# compute cls
cls = angular_power_spectra(alms)
# Result
for key in cls.keys():
cls[key] = Result(cls[key], ell=ell)
return cls
return _maps


def bias(cls):
Expand Down Expand Up @@ -180,13 +192,37 @@ def correct_bias(cls, jkmaps, fields, jk=0, jk2=0):
cls = sub_to_Cls(cls, b_jk)
# Update metadata
for key in cls.keys():
cl = cls[key].__array__()
ell = cls[key].ell
cl = cls[key].array
update_metadata(cl, bias=b_jk[key])
cls[key] = Result(cl, ell=ell)
cls[key] = Result(cl)
return cls


def mask_correction(Mljk, Mls0):
"""
Internal method to compute the mask correction.
input:
Mljk (np.array): mask of delete1 Cls
Mls0 (np.array): mask Cls
returns:
alpha (Float64): Mask correction factor
"""
alphas = {}
for key in list(Mljk.keys()):
mljk = Mljk[key]
mls0 = Mls0[key]
# Transform to real space
wmls0 = cl2corr(mls0)
wmls0 = wmls0.T[0]
wmljk = cl2corr(mljk)
wmljk = wmljk.T[0]
# Compute alpha
alpha = wmljk / wmls0
alpha *= logistic(np.log10(abs(wmljk)))
alphas[key] = alpha
return alphas


def jackknife_covariance(dict, nd=1):
"""
Compute the jackknife covariance matrix from a sequence
Expand All @@ -212,12 +248,14 @@ def _jackknife_covariance(samples, nd=1):
samples1 = np.stack([result1] + [spectra[key1] for spectra in rest])
samples2 = np.stack([result2] + [spectra[key2] for spectra in rest])
# if there are multiple samples, compute covariance
if (njk := len(samples1)) > 1:
if (m := len(samples1)) > 1:
# compute jackknife covariance matrix
a = sample_covariance(samples1, samples2)
if nd == 1:
a *= njk - 1
njk = m
a *= (njk - 1)**2 / njk
elif nd == 2:
njk = (1+np.sqrt(1+8*m))/2
a *= (njk * (njk - 1) - 2) / (2 * njk * (njk + 1))
elif nd > 2:
raise ValueError("number of deletions must be 0, 1, or 2")
Expand Down Expand Up @@ -284,7 +322,7 @@ def delete2_correction(cls0, cls1, cls2):
_qii -= (Njk - 1) * cls1[(k1,)][key].array
_qii -= (Njk - 1) * cls1[(k2,)][key].array
_qii += (Njk - 2) * cls2[kk][key].array
_qii = Result(_qii, ell=cls0[key].ell)
_qii = Result(_qii)
qii[key] = _qii
Q_ii.append(qii)
# Compute the correction from the ensemble
Expand All @@ -297,7 +335,7 @@ def delete2_correction(cls0, cls1, cls2):
q_diag_exp = np.zeros_like(q)
diag_indices = np.arange(length) # Indices for the diagonal
q_diag_exp[..., diag_indices, diag_indices] = q_diag
Q[key] = q_diag_exp
Q[key] = Result(q_diag_exp, axis=q.axis, ell=q.ell)
return Q


Expand All @@ -313,9 +351,17 @@ def debias_covariance(cov_jk, cls0, cls1, cls2):
debiased_cov (dict): Dictionary of debiased Jackknife covariance
"""
Q = delete2_correction(cls0, cls1, cls2)
return _debias_covariance(cov_jk, Q)


def _debias_covariance(cov_jk, Q):
"""
Internal method to debias the Jackknife covariance.
Useful when the delete2 correction is already computed.
"""
debiased_cov = {}
for key in list(cov_jk.keys()):
c = cov_jk[key].array - Q[key]
c = cov_jk[key].array - Q[key].array
debiased_cov[key] = Result(
c,
ell=cov_jk[key].ell,
Expand Down
8 changes: 4 additions & 4 deletions heracles/dices/shrinkage.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,10 +50,10 @@ def shrink(cov, target, shrinkage_factor):
shrunk_cov = {}
correlated_target = impose_correlation(target, cov)
for key in cov:
c = cov[key]
tc = correlated_target[key]
c = cov[key].array
tc = correlated_target[key].array
sc = shrinkage_factor * tc + (1 - shrinkage_factor) * c
shrunk_cov[key] = Result(sc, axis=(0, 1), ell=c.ell)
shrunk_cov[key] = Result(sc, axis=cov[key].axis, ell=cov[key].ell)
return shrunk_cov


Expand Down Expand Up @@ -145,7 +145,7 @@ def gaussian_covariance(Cls):
# Remove the extra dimensions
r = np.squeeze(r)
# Make Result
result = Result(r, axis=(0, 1), ell=ell)
result = Result(r, ell=ell)
cov[covkey] = result
return cov

Expand Down
2 changes: 1 addition & 1 deletion heracles/dices/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,5 +71,5 @@ def impose_correlation(cov_a, cov_b):
b_std = np.sqrt(b_v[..., None, :])
c = a * (b_std * np.swapaxes(b_std, -1, -2))
c /= a_std * np.swapaxes(a_std, -1, -2)
cov_c[key] = Result(c, axis=(0, 1), ell=a.ell)
cov_c[key] = Result(c, axis=a.axis, ell=a.ell)
return cov_c
Loading
Loading