Skip to content

Commit 04d735c

Browse files
authored
Merge pull request #433 from jakobrunge/developer
fixed issue with get_measure for variable data type
2 parents 1bbe2f9 + 9fbeecb commit 04d735c

File tree

5 files changed

+70
-59
lines changed

5 files changed

+70
-59
lines changed

setup.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@ def run(self):
6161
# Run the setup
6262
setup(
6363
name="tigramite",
64-
version="5.2.6.5",
64+
version="5.2.6.6",
6565
packages=["tigramite", "tigramite.independence_tests", "tigramite.toymodels"],
6666
license="GNU General Public License v3.0",
6767
description="Tigramite causal inference for time series",

tigramite/data_processing.py

Lines changed: 15 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -1574,18 +1574,18 @@ def nonlin_f(x): return (x + 5. * x**2 * np.exp(-x**2 / 20.))
15741574

15751575
print(frame.T)
15761576

1577-
X=[(0, 0)]
1578-
Y=[(0, 0)]
1579-
Z=[(0, -3)]
1580-
tau_max=5
1581-
frame.construct_array(X, Y, Z, tau_max,
1582-
extraZ=None,
1583-
mask=None,
1584-
mask_type=None,
1585-
return_cleaned_xyz=False,
1586-
do_checks=True,
1587-
cut_off='2xtau_max',
1588-
verbosity=4)
1589-
1590-
print(ordinal_patt_array(data, array_mask=None, dim=2, step=1,
1591-
weights=False, verbosity=0)[0])
1577+
# X=[(0, 0)]
1578+
# Y=[(0, 0)]
1579+
# Z=[(0, -3)]
1580+
# tau_max=5
1581+
# frame.construct_array(X, Y, Z, tau_max,
1582+
# extraZ=None,
1583+
# mask=None,
1584+
# mask_type=None,
1585+
# return_cleaned_xyz=False,
1586+
# do_checks=True,
1587+
# cut_off='2xtau_max',
1588+
# verbosity=4)
1589+
1590+
# print(ordinal_patt_array(data, array_mask=None, dim=2, step=1,
1591+
# weights=False, verbosity=0)[0])

tigramite/independence_tests/independence_tests_base.py

Lines changed: 38 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -837,15 +837,45 @@ def get_measure(self, X, Y, Z=None, tau_max=0,
837837
The test statistic value.
838838
839839
"""
840-
# Make the array
841-
array, xyz, (X, Y, Z), _ = self._get_array(X=X, Y=Y, Z=Z, tau_max=tau_max,
842-
remove_constant_data=False)
843-
D, T = array.shape
844-
# Check it is valid
845-
if np.isnan(array).sum() != 0:
840+
841+
if self.significance == 'fixed_thres' and alpha_or_thres is None:
842+
raise ValueError("significance == 'fixed_thres' requires setting alpha_or_thres")
843+
844+
# Get the array to test on
845+
(array, xyz, XYZ, data_type,
846+
nonzero_array, nonzero_xyz, nonzero_XYZ, nonzero_data_type) = self._get_array(
847+
X=X, Y=Y, Z=Z, tau_max=tau_max,
848+
remove_constant_data=True,
849+
verbosity=self.verbosity)
850+
X, Y, Z = XYZ
851+
nonzero_X, nonzero_Y, nonzero_Z = nonzero_XYZ
852+
853+
# Record the dimensions
854+
# dim, T = array.shape
855+
856+
# Ensure it is a valid array
857+
if np.any(np.isnan(array)):
846858
raise ValueError("nans in the array!")
847-
# Return the dependence measure
848-
return self._get_dependence_measure_recycle(X, Y, Z, xyz, array)
859+
860+
# If all X or all Y are zero, then return pval=1, val=0, dependent=False
861+
if len(nonzero_X) == 0 or len(nonzero_Y) == 0:
862+
val = 0.
863+
else:
864+
# Get the dependence measure, reycling residuals if need be
865+
val = self._get_dependence_measure_recycle(nonzero_X, nonzero_Y, nonzero_Z,
866+
nonzero_xyz, nonzero_array, nonzero_data_type)
867+
868+
return val
869+
870+
# # Make the array
871+
# array, xyz, (X, Y, Z), _ = self._get_array(X=X, Y=Y, Z=Z, tau_max=tau_max,
872+
# remove_constant_data=False)
873+
# D, T = array.shape
874+
# # Check it is valid
875+
# if np.isnan(array).sum() != 0:
876+
# raise ValueError("nans in the array!")
877+
# # Return the dependence measure
878+
# return self._get_dependence_measure_recycle(X, Y, Z, xyz, array)
849879

850880
def get_confidence(self, X, Y, Z=None, tau_max=0,
851881
data_type=None):

tigramite/pcmci.py

Lines changed: 15 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -2855,7 +2855,6 @@ def _run_pcalg_test(self, graph, i, abstau, j, S, lagged_parents, max_conds_py,
28552855
val, pval, dependent = self.cond_ind_test.run_test(X=[(i, -abstau)], Y=[(j, 0)],
28562856
Z=Z, tau_max=tau_max,
28572857
alpha_or_thres=alpha_or_thres,
2858-
# verbosity=self.verbosity
28592858
)
28602859

28612860
return val, pval, Z, dependent
@@ -3925,7 +3924,8 @@ def _optimize_pcmciplus_alpha(self,
39253924

39263925
if __name__ == '__main__':
39273926
from tigramite.independence_tests.parcorr import ParCorr
3928-
from tigramite.independence_tests.cmiknn import CMIknn
3927+
from tigramite.independence_tests.regression_ci import RegressionCI
3928+
# from tigramite.independence_tests.cmiknn import CMIknn
39293929

39303930
import tigramite.data_processing as pp
39313931
from tigramite.toymodels import structural_causal_processes as toys
@@ -4009,39 +4009,20 @@ def _optimize_pcmciplus_alpha(self,
40094009
# data, _ = toys.structural_causal_process(links_coeffs, T=T, seed=3)
40104010
# T, N = data.shape
40114011

4012-
# # Initialize dataframe object
4013-
# dataframe = pp.DataFrame(data)
4014-
# pcmci = PCMCI(
4015-
# dataframe=dataframe,
4016-
# cond_ind_test=ParCorr(),
4017-
# verbosity=0)
4018-
4019-
# multidata[0][40:100, :] = 999.
4020-
4021-
# dataframe = pp.DataFrame(multidata, analysis_mode='multiple',
4022-
# missing_flag = 999.,
4023-
# time_offsets = {0:50, 1:0}
4024-
# # reference_points=list(range(500, 1000))
4025-
# )
4026-
4027-
# pcmci = PCMCI(dataframe=dataframe,
4028-
# cond_ind_test=ParCorr(verbosity=0), verbosity=0)
40294012

4030-
# # results = pcmci.run_pcmciplus(tau_max=1)
4013+
multidata = np.random.randn(10, 100, 5)
4014+
data_type = np.zeros((10, 100, 5), dtype='bool')
4015+
data_type[:,:,:3] = True
40314016

4032-
# results = pcmci.run_sliding_window_of(
4033-
# window_step=499, window_length=500,
4034-
# method='run_pcmciplus', method_args={'tau_max':1,
4035-
# 'link_assumptions':{
4036-
# 0: {(0, -1): '-->'},
4037-
# 1: {(1, -1): '-->', (0, -1): '-!>'},
4038-
# }
4039-
# })
4017+
dataframe = pp.DataFrame(multidata,
4018+
data_type=data_type,
4019+
analysis_mode='multiple',
4020+
missing_flag = 999.,
4021+
time_offsets = {0:50, 1:0}
4022+
# reference_points=list(range(500, 1000))
4023+
)
40404024

4041-
# # tp.plot_graph(results['graph'])
4042-
# print(multidata[0].shape, multidata[1].shape)
4043-
# print(results['window_results']['val_matrix'])
4044-
# print(results['window_results']['val_matrix'][0][0,1])
4045-
# print(results['window_results']['val_matrix'][1][0,1])
4025+
pcmci = PCMCI(dataframe=dataframe,
4026+
cond_ind_test=RegressionCI(verbosity=0), verbosity=0)
40464027

4047-
# plt.show()
4028+
# results = pcmci.run_pcmciplus(tau_max=1)

tigramite/toymodels/structural_causal_processes.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -738,7 +738,7 @@ def structural_causal_process(links, T, noises=None,
738738
elif isinstance(intervention_type, str):
739739
intervention_type = {j:intervention_type for j in intervention}
740740
for j in intervention.keys():
741-
if len(intervention[j]) != T:
741+
if len(np.atleast_1d(intervention[j])) != T:
742742
raise ValueError("intervention array for j=%s must be of length T = %d" %(j, T))
743743
if j not in intervention_type.keys():
744744
raise ValueError("intervention_type dictionary must contain entry for %s" %(j))

0 commit comments

Comments
 (0)