diff --git a/1-covariance-likelihood/Planck2018contour_modifications.py b/1-covariance-likelihood/Planck2018contour_modifications.py new file mode 100644 index 00000000..d013c94c --- /dev/null +++ b/1-covariance-likelihood/Planck2018contour_modifications.py @@ -0,0 +1,129 @@ +#!/usr/bin/env python +# coding: utf-8 + +import numpy as np +import matplotlib.pyplot as plt +import pandas as pd +from matplotlib.patches import Ellipse +import re +import ast + +## Input parameters + path to directory and files ## +directory_name = 'Planck2018thetad' # Name of your directory +path_name = '/Users/Kim/Desktop/MontePythonCluster/chains/' # Path to your MontePython directory +data_name = 'Planck2018thetad_2d_100*theta_d-100*theta_s.dat' # Name of your .dat file +param_list = ['100*theta_s','100*theta_d'] # Name of the variables of interest. See names in 'cov_list' below. +# The first parameter will be the x-variable in the contour plot. +# Now you are ready to run the code. The following two parameters can be adjusted if one wishes to multiply +# the entries of the covariance matrix by a scalar or generate more/less random sample points from a 2D Gaussian. +factor = 1 # Used to multiply the entries of the covariance matrix to better match contours. +size = 100000 # no. of generated points from 2D Gaussian with covariance matrix from .covmat and mu's from .h_info + +# Path to files +cov_file = path_name + directory_name + '/' + directory_name + '.covmat' # path to .covmat file +h_file = path_name + directory_name + '/' + directory_name + '.h_info' +dat_file = path_name + '/' + directory_name + '/plots/' + data_name + +## Creating covariance matrix ## + +# Loading cov_file +with open(cov_file, 'r') as index_cov: + index_line = index_cov.readline() # first line gives the parameter names +# Removing symbols/text from cells +cov_list = [elem.strip() for elem in index_line[1:].split(',')] + +# Creating covariance matrix +covFull = np.loadtxt(cov_file) +variable_indices = [cov_list.index(p) for p in param_list] +cov = covFull[np.ix_(variable_indices, variable_indices)] + +## Selecting mu's from .h_info file ## +with open(h_file, 'r') as index_h: + index_h = index_h.readlines(1) + +# Removing unwanted symbols/text from parameter names +index_h = [i.replace(' param names\t:', '') for i in index_h] +index_h = [i.replace('\n', '') for i in index_h] +index_h = [i.replace('\\', '') for i in index_h] +index_h = [i.replace('{', '') for i in index_h] +index_h = [i.replace('}', '') for i in index_h] +string_h = " ".join(str(x) for x in index_h) # converting to string to seperate spacing with commas +string_h = re.sub("\s+", ",", string_h.strip()) +index_h = string_h.split(",") + +with open(h_file, 'r') as h_list: + lines = h_list.readlines() +mu_indices = [index_h.index(p) for p in param_list] +mean_row = lines[3] +# The mean row is located at row 3 in the h_info file. If one wishes to use best-fit values instead, change 3 to 2 +mean_row = mean_row.split() +mean_row = [m.replace('mean', '') for m in mean_row] +mean_row = [m.replace('\t', '') for m in mean_row] +mean_row = [m.replace('\n', '') for m in mean_row] +mean_row = [m.replace(':', '') for m in mean_row] +mean_row = [m.replace('', '') for m in mean_row] +string_mean = " ".join(str(x) for x in mean_row) # converting to string to seperate spacing with commas +string_mean = re.sub("\s+", ",", string_mean.strip()) +mean_row = string_mean.split(",") +mu_list = [mean_row[x] for x in mu_indices] +mu_list = (('[%s]' % ', '.join(map(str, mu_list)))) +mu_list = ast.literal_eval(mu_list) +mu = tuple(mu_list) + +## 2D Gaussian plot ## +contour = pd.read_csv(dat_file, sep=' ') # Converts .dat to csv.file +contour.rename(columns = {'#':'para1_col','contour':'para2_col'},inplace = True) +# The first row in the file has entries '#' and 'contour' +i = contour[(contour.para1_col == '#')].index +# locates line in the file which says "# contour for confidence level 0.6826000000000001" +contour = contour.drop(i) # removes that line +contour['para1_col'] = contour['para1_col'].map(lambda x: str(x)[:-1]) # removes \t from first column + +para1_col = contour.para1_col +para2_col = contour.para2_col +para1 = pd.to_numeric(para1_col, errors='coerce') # convert to float object +para2 = pd.to_numeric(para2_col, errors='coerce') + +def error_ellipse(ax, xc, yc, cov, sigma=1, facecolor='none', **kwargs): + ''' + Plot an error ellipse contour over your data. + Inputs: + ax : matplotlib Axes() object + xc : x-coordinate of ellipse center + yc : x-coordinate of ellipse center + cov : covariance matrix + sigma : # sigma to plot (default 1) + additional kwargs passed to matplotlib.patches.Ellipse() + ''' + w, v = np.linalg.eigh(cov) # assumes symmetric matrix + order = w.argsort()[::-1] + w, v = w[order], v[:,order] + theta = np.degrees(np.arctan2(*v[:,0][::-1])) + ellipse = Ellipse(xy=(xc,yc), + width=2.*sigma*np.sqrt(w[0]), + height=2.*sigma*np.sqrt(w[1]), + angle=theta, **kwargs) + ellipse.set_facecolor('none') + ax.add_artist(ellipse) + +if __name__ == '__main__': + # Generate random points + points = np.random.multivariate_normal( + mean=mu, cov=cov, size=size) + x, y = points.T + cov = np.cov(x,y, rowvar=False) + # Plot the raw points + fig, ax = plt.subplots(1,1) + ax.scatter(x, y, s=1, color='b') + # Plot one and two sigma error ellipses + error_ellipse(ax, np.mean(x), np.mean(y), cov, sigma=1, ec='orange') + error_ellipse(ax, np.mean(x), np.mean(y), cov, sigma=2, ec='orange') + ax.scatter(mu[0], mu[1], c='orange', s=1, label='Generated Contour') # center of ellipse + if variable_indices[0] : 1.041736e+00 2.634413e+00 1.415058e-05 6.418600e+01 3.214342e-01 + 1-sigma < : 1.042785e+00 3.010907e+00 3.745585e-02 6.766080e+01 3.232256e-01 + 2-sigma > : 1.041226e+00 2.455330e+00 1.415058e-05 6.205549e+01 3.205237e-01 + 2-sigma < : 1.043337e+00 3.203826e+00 1.017514e-01 6.930495e+01 3.240784e-01 + 3-sigma > : 1.040742e+00 2.270155e+00 1.415058e-05 5.953773e+01 3.195933e-01 + 3-sigma < : 1.043904e+00 3.425934e+00 1.976298e-01 7.093975e+01 3.249727e-01 \ No newline at end of file diff --git a/1-covariance-likelihood/Planck2018thetad_2d_100*theta_d-100*theta_s.dat b/1-covariance-likelihood/Planck2018thetad_2d_100*theta_d-100*theta_s.dat new file mode 100644 index 00000000..828efc8f --- /dev/null +++ b/1-covariance-likelihood/Planck2018thetad_2d_100*theta_d-100*theta_s.dat @@ -0,0 +1,340 @@ +# contour for confidence level 0.9540000000000001 +1.0430383 0.32010856 +1.0430904 0.32009013 +1.0431424 0.32007878 +1.0431945 0.32007238 +1.0432466 0.32006889 +1.0432987 0.32006683 +1.0433509 0.32006651 +1.0434031 0.32007136 +1.0434552 0.32008795 +1.0435072 0.32012485 +1.0435157 0.32013498 +1.043559 0.32019825 +1.0435705 0.32022267 +1.043598 0.32031129 +1.0436107 0.32039933 +1.043614 0.32048623 +1.0436142 0.3205724 +1.0436164 0.32065825 +1.0436225 0.3207442 +1.0436304 0.32083045 +1.0436364 0.32091692 +1.0436372 0.3210035 +1.0436303 0.32109006 +1.0436131 0.32117655 +1.0436107 0.32118422 +1.0435882 0.32126297 +1.043559 0.32134023 +1.0435561 0.32134935 +1.0435274 0.32143574 +1.0435072 0.32149809 +1.0435008 0.32152215 +1.043479 0.32160857 +1.0434552 0.32169243 +1.0434545 0.32169501 +1.0434299 0.32178144 +1.0434031 0.32185362 +1.0433981 0.32186788 +1.0433652 0.3219543 +1.0433509 0.32198951 +1.0433328 0.32204073 +1.0433014 0.32212715 +1.0432987 0.32213492 +1.043274 0.32221357 +1.0432466 0.3222878 +1.0432423 0.3223 +1.0432064 0.32238642 +1.0431945 0.32241017 +1.0431627 0.32247285 +1.0431424 0.32250767 +1.0431132 0.32255928 +1.0430904 0.32259689 +1.0430623 0.3226457 +1.0430383 0.32268567 +1.0430116 0.32273213 +1.0429862 0.32277408 +1.0429603 0.32281855 +1.0429341 0.32286216 +1.0429103 0.32290498 +1.042882 0.32295612 +1.0428649 0.32299141 +1.0428299 0.32306529 +1.0428247 0.32307783 +1.0427902 0.32316426 +1.0427778 0.32319397 +1.042755 0.32325068 +1.0427257 0.32331358 +1.042714 0.32333711 +1.0426736 0.32340988 +1.0426651 0.32342353 +1.0426215 0.32349093 +1.0426078 0.32350996 +1.0425694 0.32356447 +1.0425448 0.32359639 +1.0425173 0.32363315 +1.0424752 0.32368282 +1.0424652 0.32369509 +1.0424131 0.32375266 +1.0423956 0.32376926 +1.042361 0.3238057 +1.042309 0.32385315 +1.0423062 0.32385568 +1.0422569 0.32391034 +1.0422272 0.32394209 +1.0422048 0.32397055 +1.042157 0.32402848 +1.0421527 0.32403427 +1.0421006 0.32409941 +1.0420859 0.32411487 +1.0420485 0.32415675 +1.0419972 0.32420129 +1.0419964 0.32420204 +1.0419443 0.32424859 +1.0418931 0.32428777 +1.0418922 0.32428864 +1.0418401 0.32433911 +1.0418008 0.32437433 +1.041788 0.32438836 +1.0417359 0.3244391 +1.041707 0.32446091 +1.0416838 0.32448071 +1.0416317 0.32451322 +1.0415796 0.32453544 +1.0415408 0.32454738 +1.0415276 0.32455254 +1.0414755 0.32456792 +1.0414234 0.32457975 +1.0413713 0.32458637 +1.0413191 0.32458257 +1.0412669 0.32455971 +1.041253 0.32454738 +1.0412148 0.32450776 +1.0411878 0.32446091 +1.0411628 0.32441493 +1.0411468 0.32437433 +1.0411132 0.32428777 +1.041111 0.3242822 +1.0410833 0.32420129 +1.0410593 0.32411916 +1.041058 0.32411487 +1.0410352 0.32402848 +1.0410185 0.32394209 +1.0410081 0.32385568 +1.0410073 0.32384165 +1.0410023 0.32376926 +1.0410027 0.32368282 +1.0410073 0.32362565 +1.0410092 0.32359639 +1.0410199 0.32350996 +1.0410363 0.32342353 +1.0410585 0.32333711 +1.0410593 0.32333439 +1.0410762 0.32325068 +1.0410928 0.32316426 +1.0411072 0.32307783 +1.041111 0.32305026 +1.0411169 0.32299141 +1.0411253 0.32290498 +1.0411364 0.32281855 +1.0411541 0.32273213 +1.0411628 0.32270347 +1.0411812 0.3226457 +1.0412148 0.32257876 +1.0412245 0.32255928 +1.0412669 0.32248765 +1.0412742 0.32247285 +1.0413166 0.32238642 +1.0413191 0.32238079 +1.0413476 0.3223 +1.0413713 0.32223882 +1.0413798 0.32221357 +1.041414 0.32212715 +1.0414234 0.32210703 +1.041452 0.32204073 +1.0414755 0.32199294 +1.0414929 0.3219543 +1.0415276 0.32188203 +1.0415339 0.32186788 +1.0415753 0.32178144 +1.0415796 0.32177293 +1.0416208 0.32169501 +1.0416317 0.32167684 +1.0416732 0.32160857 +1.0416838 0.32159234 +1.041725 0.32152215 +1.0417359 0.32150372 +1.04177 0.32143574 +1.041788 0.32140009 +1.041811 0.32134935 +1.0418401 0.32128929 +1.0418529 0.32126297 +1.0418922 0.32119004 +1.0419002 0.32117655 +1.0419443 0.32110721 +1.0419566 0.32109006 +1.0419964 0.32103436 +1.0420208 0.3210035 +1.0420485 0.32096584 +1.0420888 0.32091692 +1.0421006 0.32090106 +1.0421527 0.32084097 +1.042164 0.32083045 +1.0422048 0.32078767 +1.0422569 0.32074523 +1.0422581 0.3207442 +1.042309 0.32069645 +1.0423466 0.32065825 +1.042361 0.32064221 +1.0424131 0.32057706 +1.0424166 0.3205724 +1.0424652 0.32050862 +1.0424846 0.32048623 +1.0425173 0.32044928 +1.0425694 0.32040409 +1.0425763 0.32039933 +1.0426215 0.32036401 +1.0426736 0.32033289 +1.0427166 0.32031129 +1.0427257 0.32030525 +1.0427778 0.32027206 +1.0428299 0.3202396 +1.0428576 0.32022267 +1.042882 0.3202031 +1.0429341 0.32016587 +1.0429862 0.32013588 +1.0429883 0.32013498 +1.0430383 0.32010856 + + + +# contour for confidence level 0.6826000000000001 +1.0427257 0.32097279 +1.0427778 0.32095754 +1.0428299 0.32095635 +1.042882 0.32096842 +1.0429341 0.32099571 +1.0429435 0.3210035 +1.0429862 0.32104929 +1.043011 0.32109006 +1.0430383 0.32115791 +1.0430442 0.32117655 +1.0430612 0.32126297 +1.0430675 0.32134935 +1.0430647 0.32143574 +1.0430543 0.32152215 +1.0430383 0.32160581 +1.0430378 0.32160857 +1.0430191 0.32169501 +1.042998 0.32178144 +1.0429862 0.3218248 +1.0429758 0.32186788 +1.0429525 0.3219543 +1.0429341 0.3220134 +1.0429263 0.32204073 +1.0428985 0.32212715 +1.042882 0.32217207 +1.0428679 0.32221357 +1.0428351 0.3223 +1.0428299 0.32231281 +1.0428019 0.32238642 +1.0427778 0.32244378 +1.0427664 0.32247285 +1.0427299 0.32255928 +1.0427257 0.32256874 +1.042694 0.3226457 +1.0426736 0.32269223 +1.042657 0.32273213 +1.0426215 0.32281178 +1.0426184 0.32281855 +1.0425766 0.32290498 +1.0425694 0.32291871 +1.0425264 0.32299141 +1.0425173 0.32300576 +1.0424652 0.32307643 +1.042464 0.32307783 +1.0424131 0.32314059 +1.0423926 0.32316426 +1.042361 0.32320304 +1.0423213 0.32325068 +1.042309 0.32326584 +1.0422569 0.323326 +1.0422464 0.32333711 +1.0422048 0.32337942 +1.0421544 0.32342353 +1.0421527 0.32342503 +1.0421006 0.32346744 +1.0420485 0.32350742 +1.0420452 0.32350996 +1.0419964 0.32355062 +1.0419443 0.32359366 +1.0419408 0.32359639 +1.0418922 0.32363726 +1.0418401 0.32367518 +1.0418268 0.32368282 +1.041788 0.3237059 +1.0417359 0.32372675 +1.0416838 0.32373643 +1.0416317 0.32373209 +1.0415796 0.32370805 +1.0415529 0.32368282 +1.0415276 0.32364935 +1.0415021 0.32359639 +1.041478 0.32350996 +1.0414755 0.32349276 +1.0414669 0.32342353 +1.0414633 0.32333711 +1.0414653 0.32325068 +1.0414722 0.32316426 +1.0414755 0.32313972 +1.0414834 0.32307783 +1.0414987 0.32299141 +1.0415179 0.32290498 +1.0415276 0.32286818 +1.0415395 0.32281855 +1.041563 0.32273213 +1.0415796 0.32267744 +1.0415887 0.3226457 +1.0416162 0.32255928 +1.0416317 0.32251587 +1.0416465 0.32247285 +1.0416797 0.32238642 +1.0416838 0.32237632 +1.0417138 0.3223 +1.0417359 0.32224791 +1.0417498 0.32221357 +1.0417877 0.32212715 +1.041788 0.32212638 +1.0418264 0.32204073 +1.0418401 0.32201271 +1.0418682 0.3219543 +1.0418922 0.32190788 +1.0419132 0.32186788 +1.0419443 0.32181149 +1.0419619 0.32178144 +1.0419964 0.32172414 +1.0420159 0.32169501 +1.0420485 0.32164613 +1.0420768 0.32160857 +1.0421006 0.321576 +1.0421438 0.32152215 +1.0421527 0.32151066 +1.0422048 0.32144627 +1.0422136 0.32143574 +1.0422569 0.32138382 +1.0422883 0.32134935 +1.042309 0.32132709 +1.042361 0.32127676 +1.0423769 0.32126297 +1.0424131 0.32123051 +1.0424652 0.32118739 +1.0424789 0.32117655 +1.0425173 0.32114158 +1.0425694 0.32109491 +1.0425753 0.32109006 +1.0426215 0.32104545 +1.0426736 0.3210035 +1.0427257 0.32097279 + + + diff --git a/input/theta_s_YHe.param b/input/theta_s_YHe.param new file mode 100644 index 00000000..77072e29 --- /dev/null +++ b/input/theta_s_YHe.param @@ -0,0 +1,44 @@ +#------Experiments to test (separated with commas)----- + +data.experiments=['eff_like_from_covmat'] + +#------ Parameter list ------- + +# Compute theta_d +data.cosmo_arguments['compute damping scale'] = 'yes' + +# Parameters to vary + +data.parameters['100*theta_s'] = [ 1.04110, None, None, 0.00030, 1, 'cosmo'] +data.parameters['YHe'] = [0.25, 0.1, 0.5, 0.05, 1, 'cosmo'] +data.parameters['100*theta_d'] = [0, None, None, 0, 1, 'derived'] + + +## Constant parameters: +data.cosmo_arguments['omega_b'] = 2.257581e-02 +data.cosmo_arguments['omega_cdm'] = 1.191596e-01 +data.cosmo_arguments['ln10^{10}A_s'] = 3.057198e+00 +data.cosmo_arguments['n_s'] = 9.670203e-01 +data.cosmo_arguments['tau_reio'] = 6.039532e-02 + +data.cosmo_arguments['Omega_Lambda'] = 6.861559e-01 + + +# Other cosmo parameters (fixed parameters, precision parameters, etc.) + +data.cosmo_arguments['sBBN file'] = data.path['cosmo']+'/bbn/sBBN.dat' +data.cosmo_arguments['k_pivot'] = 0.05 + +# The base model features two massless +# and one massive neutrino with m=0.06eV. +# The settings below ensures that Neff=3.046 +# and m/omega = 93.14 eV +data.cosmo_arguments['N_ur'] = 2.0328 +data.cosmo_arguments['N_ncdm'] = 1 +data.cosmo_arguments['m_ncdm'] = 0.06 +data.cosmo_arguments['T_ncdm'] = 0.71611 + +#------ Mcmc parameters ---- + +data.N=10 +data.write_step=5 diff --git a/montepython/likelihoods/eff_like_from_covmat/__init__.py b/montepython/likelihoods/eff_like_from_covmat/__init__.py new file mode 100644 index 00000000..c189386a --- /dev/null +++ b/montepython/likelihoods/eff_like_from_covmat/__init__.py @@ -0,0 +1,33 @@ +import numpy as np +import math +import scipy.linalg as la +from montepython.likelihood_class import Likelihood_prior +from numpy.linalg import multi_dot + +class eff_like_from_covmat(Likelihood_prior): + def __init__(self, path, data, command_line): + # Call __init__ method of super class: + super(eff_like_from_covmat, self).__init__(path, data, command_line) + self.covmat_inverse = la.inv(self.covmat) + self.need_cosmo_arguments(data,{'compute damping scale':'yes'}) + + # Compute likelihood + def loglkl(self, cosmo, data): + covmat = self.covmat + mu = self.mu + mean_vec = np.array(mu) + mean_class_class = [cosmo.theta_s_100(),cosmo.get_current_derived_parameters(['100*theta_d'])['100*theta_d']] + #mean_vec_class = np.array([eval(s) for s in self.get_var_strings]) list comprehension + #mean_vec_class = np.array([1.04219541,0.32418698]) gives constant likelihood + dif_vec = mean_vec - mean_vec_class + dif_vec_T = dif_vec.T + exponent = dif_vec_T.dot(self.covmat_inverse).dot(dif_vec) + loglikelihood = -1/2*exponent # exponent in the multivariate Gaussian + return loglikelihood + + + + + + + diff --git a/montepython/likelihoods/eff_like_from_covmat/eff_like_from_covmat.data b/montepython/likelihoods/eff_like_from_covmat/eff_like_from_covmat.data new file mode 100644 index 00000000..08abdaed --- /dev/null +++ b/montepython/likelihoods/eff_like_from_covmat/eff_like_from_covmat.data @@ -0,0 +1,5 @@ +# Parameter list + +eff_like_from_covmat.covmat = [[ 2.680703e-07,-3.350036e-07],[-3.350036e-07,7.713178e-07]] +eff_like_from_covmat.mu = [1.042278e+00, 3.223170e-01] +eff_like_from_covmat.get_var_strings = ['cosmo.theta_s_100()','cosmo.theta_d_100()']