Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Same Update to master #190

Merged
merged 4 commits into from
Aug 9, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 4 additions & 7 deletions soliket/bias/bias.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,6 @@
function (have a look at the linear bias model for ideas).
"""

from typing import Optional

import numpy as np
from cobaya.theory import Theory

Expand Down Expand Up @@ -72,10 +70,9 @@ def must_provide(self, **requirements):
return needs

def _get_Pk_mm(self):
for pair in self._var_pairs:
self.k, self.z, Pk_mm = \
self.provider.get_Pk_grid(var_pair=pair, nonlinear=self.nonlinear)

self.k, self.z, Pk_mm = \
self.provider.get_Pk_grid(var_pair=list(self._var_pairs)[0],
nonlinear=self.nonlinear)
return Pk_mm

def get_Pk_gg_grid(self) -> dict:
Expand All @@ -93,7 +90,7 @@ class Linear_bias(Bias):
"""

def calculate(self, state: dict, want_derived: bool = True,
**params_values_dict) -> Optional[bool]:
**params_values_dict):
Pk_mm = self._get_Pk_mm()

state["Pk_gg_grid"] = params_values_dict["b_lin"] ** 2. * Pk_mm
Expand Down
7 changes: 4 additions & 3 deletions soliket/cash/cash.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
from typing import Optional

import numpy as np
from cobaya.likelihood import Likelihood

from .cash_data import CashCData


# Likelihood for independent Poisson-distributed data
# (here called Cash-C, see https://arxiv.org/abs/1912.05444)

class CashCLikelihood(Likelihood):
name: str = "Cash-C"
datapath = Optional[str]
Expand All @@ -17,7 +18,7 @@ def initialize(self):

def _get_data(self):
data = np.loadtxt(self.datapath, unpack=False)
N = data[:, -1] # assume data stored like column_stack([z, q, N])
N = data[:, -1] # assume data stored like column_stack([z, q, N])
x = data[:, :-1]
return x, N

Expand Down
2 changes: 1 addition & 1 deletion soliket/ccl/ccl.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,7 @@ def get_can_support_params(self) -> Sequence[str]:
return []

def calculate(self, state: dict, want_derived: bool = True,
**params_values_dict) -> bool:
**params_values_dict):
# calculate the general CCL cosmo object which likelihoods can then use to get
# what they need (likelihoods should cache results appropriately)
# get our requirements from self.provider
Expand Down
31 changes: 15 additions & 16 deletions soliket/clusters/clusters.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
p
"""
import os

import numpy as np
import pandas as pd
from scipy.interpolate import interp1d
Expand All @@ -27,7 +26,7 @@
from soliket.poisson import PoissonLikelihood

from .survey import SurveyData
from .sz_utils import szutils
from .sz_utils import szutils, trapezoid
from cobaya import LoggedError

C_KM_S = 2.99792e5
Expand Down Expand Up @@ -87,15 +86,15 @@ def get_requirements(self):
# "CCL": {"methods": {"sz_model": self._get_sz_model}, "kmax": 10},
}

def _get_sz_model(self, cosmo):
model = SZModel()
model.hmf = self.ccl.halos.MassFuncTinker08(cosmo, mass_def=self.mdef)
model.hmb = self.ccl.halos.HaloBiasTinker10(
cosmo, mass_def=self.mdef, mass_def_strict=False
)
model.hmc = self.ccl.halos.HMCalculator(cosmo, model.hmf, model.hmb, self.mdef)
# model.szk = SZTracer(cosmo)
return model
# def _get_sz_model(self, cosmo):
# model = SZModel()
# model.hmf = self.ccl.halos.MassFuncTinker08(cosmo, mass_def=self.mdef)
# model.hmb = self.ccl.halos.HaloBiasTinker10(
# cosmo, mass_def=self.mdef, mass_def_strict=False
# )
# model.hmc = self.ccl.halos.HMCalculator(cosmo, model.hmf, model.hmb, self.mdef)
# # model.szk = SZTracer(cosmo)
# return model

def _get_catalog(self):
self.survey = SurveyData(
Expand Down Expand Up @@ -202,7 +201,7 @@ def Prob_per_cluster(z, tsz_signal, tsz_signal_err):

dn_dzdm = 10 ** np.squeeze(dn_dzdm_interp((np.log10(HMF.M), c_z))) * h ** 4.0

ans = np.trapz(dn_dzdm * Pfunc_ind, dx=np.diff(HMF.M, axis=0), axis=0)
ans = trapezoid(dn_dzdm * Pfunc_ind, dx=np.diff(HMF.M, axis=0), axis=0)
return ans

return Prob_per_cluster
Expand Down Expand Up @@ -241,11 +240,11 @@ def _get_n_expected(self, **kwargs):

for Yt, frac in zip(self.survey.Ythresh, self.survey.frac_of_survey):
Pfunc = self.szutils.PfuncY(Yt, HMF.M, z_arr, param_vals, Ez_fn, DA_fn)
N_z = np.trapz(
N_z = trapezoid(
dn_dzdm * Pfunc, dx=np.diff(HMF.M[:, None] / h, axis=0), axis=0
)
Ntot += (
np.trapz(N_z * dVdz, x=z_arr)
trapezoid(N_z * dVdz, x=z_arr)
* 4.0
* np.pi
* self.survey.fskytotal
Expand All @@ -269,9 +268,9 @@ def _test_n_tot(self, **kwargs):
dn_dzdm = HMF.dn_dM(HMF.M, 500.0) * h ** 4.0 # getting rid of hs
# Test Mass function against Nemo.
Pfunc = 1.0
N_z = np.trapz(dn_dzdm * Pfunc, dx=np.diff(HMF.M[:, None] / h, axis=0), axis=0)
N_z = trapezoid(dn_dzdm * Pfunc, dx=np.diff(HMF.M[:, None] / h, axis=0), axis=0)
Ntot = (
np.trapz(N_z * dVdz, x=z_arr)
trapezoid(N_z * dVdz, x=z_arr)
* 4.0
* np.pi
* (600.0 / (4 * np.pi * (180 / np.pi) ** 2))
Expand Down
15 changes: 8 additions & 7 deletions soliket/clusters/sz_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,10 @@
"""

import numpy as np
try:
from numpy import trapezoid
except ImportError:
from numpy import trapz as trapezoid
from scipy import interpolate

# from nemo import signals
Expand All @@ -16,8 +20,6 @@
k_Boltzmann)

# from astropy.cosmology import FlatLambdaCDM


# from .clusters import C_KM_S as C_in_kms

rho_crit0H100 = (3. / (8. * np.pi) * (100. * 1.e5) ** 2.) \
Expand Down Expand Up @@ -112,7 +114,7 @@ def P_of_gt_SN(self, LgY, MM, zz, Ynoise, param_vals, Ez_fn, Da_fn):

P_Y = np.nan_to_num(self.P_Yo_vec(LgYa2, MM, zz, param_vals, Ez_fn, Da_fn))

ans = np.trapz(P_Y * sig_thresh, x=LgY, axis=2) * np.log(10)
ans = trapezoid(P_Y * sig_thresh, x=LgY, axis=2) * np.log(10)
return ans

def PfuncY(self, YNoise, M, z_arr, param_vals, Ez_fn, Da_fn):
Expand All @@ -126,13 +128,12 @@ def PfuncY(self, YNoise, M, z_arr, param_vals, Ez_fn, Da_fn):

def P_of_Y_per(self, LgY, MM, zz, Y_c, Y_err, param_vals):
P_Y_sig = np.outer(np.ones(len(MM)), self.Y_prob(Y_c, LgY, Y_err))
LgYa = np.outer(np.ones(len(MM)), LgY)

LgYa = np.outer(np.ones([MM.shape[0], MM.shape[1]]), LgY)
LgYa2 = np.reshape(LgYa, (MM.shape[0], MM.shape[1], len(LgY)))

P_Y = np.nan_to_num(self.P_Yo(LgYa2, MM, zz, param_vals))
ans = np.trapz(P_Y * P_Y_sig, LgY, np.diff(LgY), axis=1) * np.log(10)
ans = trapezoid(P_Y * P_Y_sig, LgY, np.diff(LgY), axis=1) * np.log(10)

return ans

Expand All @@ -148,7 +149,7 @@ def Pfunc_per(self, MM, zz, Y_c, Y_err, param_vals, Ez_fn, Da_fn):

P_Y_sig = self.Y_prob(Y_c, LgY, Y_err)
P_Y = np.nan_to_num(self.P_Yo(LgYa, MM, zz, param_vals, Ez_fn, Da_fn))
ans = np.trapz(P_Y * P_Y_sig, LgY, np.diff(LgY), axis=1)
ans = trapezoid(P_Y * P_Y_sig, LgY, np.diff(LgY), axis=1)

return ans

Expand All @@ -171,7 +172,7 @@ def Pfunc_per_parallel(self, Marr, zarr, Y_c, Y_err, param_vals, Ez_fn, Da_fn):
P_Y_sig = self.Y_prob(Y_c, self.LgY, Y_err)
P_Y = np.nan_to_num(self.P_Yo(self.LgY, Marr, zarr, param_vals, Ez_fn, Da_fn))

ans = np.trapz(P_Y * P_Y_sig, x=self.LgY, axis=2)
ans = trapezoid(P_Y * P_Y_sig, x=self.LgY, axis=2)

return ans

Expand Down
10 changes: 7 additions & 3 deletions soliket/cross_correlation/cross_correlation.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,10 @@
"""

import numpy as np
try:
from numpy import trapezoid
except ImportError:
from numpy import trapz as trapezoid
import sacc
from cobaya.log import LoggedError

Expand Down Expand Up @@ -58,7 +62,7 @@ def _get_nz(self, z, tracer, tracer_name, **params_values):
bias = params_values['{}_deltaz'.format(tracer_name)]
nz_biased = tracer.get_dndz(z - bias)

# nz_biased /= np.trapz(nz_biased, z)
# nz_biased /= np.trapezoid(nz_biased, z)

return nz_biased

Expand Down Expand Up @@ -198,7 +202,7 @@ def _get_theory(self, **params_values):
elif self.ia_mode == 'nla':
A_IA = params_values['A_IA']
eta_IA = params_values['eta_IA']
z0_IA = np.trapz(z_tracer1 * nz_tracer1)
z0_IA = trapezoid(z_tracer1 * nz_tracer1)

ia_z = (z_tracer1, A_IA * ((1 + z_tracer1) / (1 + z0_IA)) ** eta_IA)
elif self.ia_mode == 'nla-perbin':
Expand Down Expand Up @@ -238,7 +242,7 @@ def _get_theory(self, **params_values):
elif self.ia_mode == 'nla':
A_IA = params_values['A_IA']
eta_IA = params_values['eta_IA']
z0_IA = np.trapz(z_tracer2 * nz_tracer2)
z0_IA = trapezoid(z_tracer2 * nz_tracer2)

ia_z = (z_tracer2, A_IA * ((1 + z_tracer2) / (1 + z0_IA)) ** eta_IA)
elif self.ia_mode == 'nla-perbin':
Expand Down
4 changes: 2 additions & 2 deletions soliket/gaussian/gaussian.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,12 +73,12 @@ def initialize(self):

self.log.info('Initialized.')

def initialize_with_provider(self, provider): # pragma: no cover
def initialize_with_provider(self, provider): # pragma: no cover
for like in self.likelihoods:
like.initialize_with_provider(provider)
# super().initialize_with_provider(provider)

def get_helper_theories(self): # pragma: no cover
def get_helper_theories(self): # pragma: no cover
helpers = {}
for like in self.likelihoods:
helpers.update(like.get_helper_theories())
Expand Down
72 changes: 36 additions & 36 deletions soliket/gaussian/gaussian_data.py
Original file line number Diff line number Diff line change
@@ -1,53 +1,51 @@
from typing import Optional, Sequence
import numpy as np

from scipy.linalg import LinAlgError, cholesky

try:
import holoviews as hv
except ImportError:
pass


def multivariate_normal_logpdf(theory, data, cov, inv_cov, log_det):
const = np.log(2 * np.pi) * (-len(data) / 2) + log_det * (-1 / 2)
delta = data - theory
#print(const,delta,np.dot(delta, inv_cov.dot(delta)))
return -0.5 * np.dot(delta, inv_cov.dot(delta)) + const
from cobaya.likelihoods.base_classes import _fast_chi_square


class GaussianData:
"""Named multivariate gaussian data
"""
Named multivariate gaussian data
"""
name: str # name identifier for the data
x: Sequence # labels for each data point
y: np.ndarray # data point values
cov: np.ndarray # covariance matrix
inv_cov: np.ndarray # inverse covariance matrix
ncovsims: Optional[int] # number of simulations used to estimate covariance

_fast_chi_squared = _fast_chi_square()

def __init__(self, name, x, y, cov, ncovsims=None):
def __init__(self, name, x: Sequence, y: Sequence[float], cov: np.ndarray,
ncovsims: Optional[int] = None):

self.name = str(name)
self.ncovsims = ncovsims

if not (len(x) == len(y) and cov.shape == (len(x), len(x))):
raise ValueError(f"Incompatible shapes! x={x.shape}, y={y.shape}, \
raise ValueError(f"Incompatible shapes! x={len(x)}, y={len(y)}, \
cov={cov.shape}")

self.x = x
self.y = y
self.y = np.ascontiguousarray(y)
self.cov = cov
try:
self.cholesky = cholesky(cov)
except LinAlgError:
raise ValueError("Covariance is not SPD!")
if ncovsims is None:
self.inv_cov = np.linalg.inv(self.cov)
else:
self.eigenevalues = np.linalg.eigvalsh(cov)
if self.eigenevalues.min() <= 0:
raise ValueError("Covariance is not positive definite!")

self.inv_cov = np.linalg.inv(self.cov)
if ncovsims is not None:
hartlap_factor = (self.ncovsims - len(x) - 2) / (self.ncovsims - 1)
self.inv_cov = hartlap_factor * np.linalg.inv(self.cov)
self.log_det = np.linalg.slogdet(self.cov)[1]
self.inv_cov *= hartlap_factor
log_det = np.log(self.eigenevalues).sum()
self.norm_const = -(np.log(2 * np.pi) * len(x) + log_det) / 2

def __len__(self):
return len(self.x)

def loglike(self, theory):
return multivariate_normal_logpdf(theory, self.y, self.cov, self.inv_cov,
self.log_det)
delta = self.y - theory
return -0.5 * self._fast_chi_squared(self.inv_cov, delta) + self.norm_const


class MultiGaussianData(GaussianData):
Expand Down Expand Up @@ -108,22 +106,22 @@ def loglike(self, theory):
def name(self):
return self.data.name

@property
def cov(self):
return self.data.cov

@property
def inv_cov(self):
return self.data.inv_cov

@property
def log_det(self):
return self.data.log_det
def cov(self):
return self.data.cov

@property
def norm_const(self):
return self.data.norm_const

@property
def labels(self):
return [x for y in [[name] * len(d) for
name, d in zip(self.names, self.data_list)] for x in y]
name, d in zip(self.names, self.data_list)] for x in y]

def _index_range(self, name):
if name not in self.names:
Expand Down Expand Up @@ -157,6 +155,8 @@ def _assemble_data(self):
self._data = GaussianData(" + ".join(self.names), x, y, cov)

def plot_cov(self, **kwargs):
import holoviews as hv

data = [
(f"{li}: {self.data.x[i]}", f"{lj}: {self.data.x[j]}", self.cov[i, j])
for i, li in zip(range(len(self.data)), self.labels)
Expand Down
Loading
Loading