Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
np.testing.assert_allclose(kk2.meanlogr, kk.meanlogr)
np.testing.assert_allclose(kk2.xi, kk.xi)
ascii_name = 'output/kk_ascii.txt'
kk.write(ascii_name, precision=16)
kk3 = treecorr.KKCorrelation(min_sep=min_sep, max_sep=max_sep, nbins=nbins)
kk3.read(ascii_name)
np.testing.assert_allclose(kk3.npairs, kk.npairs)
np.testing.assert_allclose(kk3.weight, kk.weight)
np.testing.assert_allclose(kk3.meanr, kk.meanr)
np.testing.assert_allclose(kk3.meanlogr, kk.meanlogr)
np.testing.assert_allclose(kk3.xi, kk.xi)
fits_name = 'output/kk_fits.fits'
kk.write(fits_name)
kk4 = treecorr.KKCorrelation(min_sep=min_sep, max_sep=max_sep, nbins=nbins)
kk4.read(fits_name)
np.testing.assert_allclose(kk4.npairs, kk.npairs)
np.testing.assert_allclose(kk4.weight, kk.weight)
np.testing.assert_allclose(kk4.meanr, kk.meanr)
np.testing.assert_allclose(kk4.meanlogr, kk.meanlogr)
np.testing.assert_allclose(kk4.xi, kk.xi)
with assert_raises(TypeError):
kk2 += config
kk4 = treecorr.KKCorrelation(min_sep=min_sep/2, max_sep=max_sep, nbins=nbins)
with assert_raises(ValueError):
kk2 += kk4
kk5 = treecorr.KKCorrelation(min_sep=min_sep, max_sep=max_sep*2, nbins=nbins)
with assert_raises(ValueError):
kk2 += kk5
kk6 = treecorr.KKCorrelation(min_sep=min_sep, max_sep=max_sep, nbins=nbins*2)
return
# Check that running via the corr2 script works correctly.
config = treecorr.config.read_config('configs/kk_direct_spherical.yaml')
cat1.write(config['file_name'])
cat2.write(config['file_name2'])
treecorr.corr2(config)
data = fitsio.read(config['kk_file_name'])
np.testing.assert_allclose(data['r_nom'], kk.rnom)
np.testing.assert_allclose(data['npairs'], kk.npairs)
np.testing.assert_allclose(data['weight'], kk.weight)
np.testing.assert_allclose(data['xi'], kk.xi, rtol=1.e-3)
# Repeat with binslop = 0
# And don't do any top-level recursion so we actually test not going to the leaves.
kk = treecorr.KKCorrelation(min_sep=min_sep, max_sep=max_sep, nbins=nbins,
sep_units='deg', bin_slop=0, max_top=0)
kk.process(cat1, cat2)
np.testing.assert_array_equal(kk.npairs, true_npairs)
np.testing.assert_allclose(kk.weight, true_weight, rtol=1.e-5, atol=1.e-8)
np.testing.assert_allclose(kk.xi, true_xi, rtol=1.e-3, atol=1.e-6)
print('max abs diff = ',np.max(np.abs(kk.xi - xi_brut)))
print('max rel diff = ',np.max(np.abs(kk.xi - xi_brut)/np.abs(kk.xi)))
np.testing.assert_allclose(kk.xi, xi_brut, atol=1.e-7)
# Auto-correlation should do the same thing.
kk.process(cat1)
print('max abs diff = ',np.max(np.abs(kk.xi - xi_brut)))
print('max rel diff = ',np.max(np.abs(kk.xi - xi_brut)/np.abs(kk.xi)))
np.testing.assert_allclose(kk.xi, xi_brut, atol=1.e-7)
# Repeat with weights.
xi_brut = corr2d(x, y, kappa, kappa, w=1./kappa_err**2, rmax=max_sep, bins=nbins)
cat2 = treecorr.Catalog(x=x, y=y, k=kappa, g1=gamma1, g2=gamma2, w=1./kappa_err**2)
# NB. Testing that min_sep = 0 is default
kk = treecorr.KKCorrelation(max_sep=max_sep, nbins=nbins, bin_type='TwoD', brute=True)
kk.process(cat2, cat2)
print('max abs diff = ',np.max(np.abs(kk.xi - xi_brut)))
print('max rel diff = ',np.max(np.abs(kk.xi - xi_brut)/np.abs(kk.xi)))
np.testing.assert_allclose(kk.xi, xi_brut, atol=1.e-7)
kk.process(cat2)
np.testing.assert_allclose(kk.xi, xi_brut, atol=1.e-7)
# Check GG
xi_brut = corr2d(x, y, gamma, np.conj(gamma), rmax=max_sep, bins=nbins)
# Equivalent bin_size = 2. Check omitting nbins
gg = treecorr.GGCorrelation(max_sep=max_sep, bin_size=2., bin_type='TwoD', brute=True)
gg.process(cat1)
print('max abs diff = ',np.max(np.abs(gg.xip - xi_brut)))
print('max rel diff = ',np.max(np.abs(gg.xip - xi_brut)/np.abs(gg.xip)))
np.testing.assert_allclose(gg.xip, xi_brut, atol=2.e-7)
print('NG2: ',s1, t1-t0, s1-s0)
gk_cat2.unload()
np.testing.assert_allclose(ng1.xi, ng2.xi)
np.testing.assert_allclose(ng1.weight, ng2.weight)
# KK with r_col now to test that that works properly.
gk_cat1 = treecorr.Catalog(file_name,
ra_col='ra', dec_col='dec', ra_units='rad', dec_units='rad',
r_col='r', g1_col='g1', g2_col='g2', k_col='k',
patch_centers=patch_centers)
gk_cat2 = treecorr.Catalog(file_name,
ra_col='ra', dec_col='dec', ra_units='rad', dec_units='rad',
r_col='r', g1_col='g1', g2_col='g2', k_col='k',
patch_centers=patch_centers, save_patch_dir='output')
kk1 = treecorr.KKCorrelation(bin_size=0.5, min_sep=1., max_sep=20.)
t0 = time.time()
s0 = hp.heap().size if hp else 0
kk1.process(gk_cat1)
t1 = time.time()
s1 = hp.heap().size if hp else 0
print('KK1: ',s1, t1-t0, s1-s0)
gk_cat1.unload()
kk2 = treecorr.KKCorrelation(bin_size=0.5, min_sep=1., max_sep=20.)
t0 = time.time()
s0 = hp.heap().size if hp else 0
kk2.process(gk_cat2, low_mem=True)
t1 = time.time()
s1 = hp.heap().size if hp else 0
print('KK2: ',s1, t1-t0, s1-s0)
gk_cat2.unload()
np.testing.assert_allclose(kk1.xi, kk2.xi)
r = np.ones_like(x)
# Use x for "kappa" so there's a strong real correlation function
cat1 = treecorr.Catalog(ra=ra, dec=dec, k=x, ra_units='rad', dec_units='rad')
cat2 = treecorr.Catalog(ra=ra, dec=dec, k=x, r=r, ra_units='rad', dec_units='rad')
config = {
'min_sep' : 0.01,
'max_sep' : 1.57,
'nbins' : nbins,
'bin_slop' : 0.2,
'verbose' : 1
}
kk_sphere = treecorr.KKCorrelation(config)
kk_chord = treecorr.KKCorrelation(config)
kk_euclid = treecorr.KKCorrelation(config)
kk_euclid.process(cat1, metric='Euclidean')
kk_sphere.process(cat1, metric='Arc')
kk_chord.process(cat2, metric='Euclidean')
for tag in [ 'rnom', 'logr', 'meanr', 'meanlogr', 'npairs', 'xi' ]:
for name, dd in [ ('Euclid', kk_euclid), ('Sphere', kk_sphere), ('Chord', kk_chord) ]:
print(name, tag, '=', getattr(dd,tag))
# rnom and logr should be identical
np.testing.assert_array_equal(kk_sphere.rnom, kk_euclid.rnom)
np.testing.assert_array_equal(kk_chord.rnom, kk_euclid.rnom)
np.testing.assert_array_equal(kk_sphere.logr, kk_euclid.logr)
np.testing.assert_array_equal(kk_chord.logr, kk_euclid.logr)
# meanr should be similar for sphere and chord, but euclid is larger, since the chord
# distances have been scaled up to the real great circle distances
fits_name = 'output/kk_fits.fits'
kk.write(fits_name)
kk4 = treecorr.KKCorrelation(min_sep=min_sep, max_sep=max_sep, nbins=nbins)
kk4.read(fits_name)
np.testing.assert_allclose(kk4.npairs, kk.npairs)
np.testing.assert_allclose(kk4.weight, kk.weight)
np.testing.assert_allclose(kk4.meanr, kk.meanr)
np.testing.assert_allclose(kk4.meanlogr, kk.meanlogr)
np.testing.assert_allclose(kk4.xi, kk.xi)
with assert_raises(TypeError):
kk2 += config
kk4 = treecorr.KKCorrelation(min_sep=min_sep/2, max_sep=max_sep, nbins=nbins)
with assert_raises(ValueError):
kk2 += kk4
kk5 = treecorr.KKCorrelation(min_sep=min_sep, max_sep=max_sep*2, nbins=nbins)
with assert_raises(ValueError):
kk2 += kk5
kk6 = treecorr.KKCorrelation(min_sep=min_sep, max_sep=max_sep, nbins=nbins*2)
with assert_raises(ValueError):
kk2 += kk6
def comp_2pcf(self, X, y, y_err):
"""
Estimate 2-point correlation function using TreeCorr.
:param X: The independent covariates. (n_samples, 2)
:param y: The dependent responses. (n_samples, n_targets)
:param y_err: Error of y. (n_samples, n_targets)
"""
if np.sum(y_err) == 0:
w = None
else:
w = 1./y_err**2
if self.anisotropic:
cat = treecorr.Catalog(x=X[:,0], y=X[:,1], k=(y-np.mean(y)), w=w)
kk = treecorr.KKCorrelation(min_sep=self.min_sep, max_sep=self.max_sep, nbins=self.nbins,
bin_type='TwoD', bin_slop=0)
kk.process(cat)
# Need a mask in the case of the 2D correlation function, to compute
# the covariance matrix using the bootstrap. The 2D correlation
# function is symmetric, so just half of the correlation function
# is useful to compute the covariance matrix. If it is not done,
# the covariance matrix is consequently not positive definite.
npixels = len(kk.xi)**2
mask = np.ones_like(kk.xi, dtype=bool)
mask = mask.reshape((int(np.sqrt(npixels)), int(np.sqrt(npixels))))
boolean_mask_odd = (len(mask)%2 == 0)
even_or_odd = len(mask)%2
nmask = int((len(mask)/2) + even_or_odd)
mask[nmask:,:] = False
mask[nmask-1][nmask:] = boolean_mask_odd
mask = mask.reshape((int(np.sqrt(npixels)), int(np.sqrt(npixels))))
boolean_mask_odd = (len(mask)%2 == 0)
even_or_odd = len(mask)%2
nmask = int((len(mask)/2) + even_or_odd)
mask[nmask:,:] = False
mask[nmask-1][nmask:] = boolean_mask_odd
mask = mask.reshape(npixels)
distance = np.array([kk.dx.reshape(npixels), kk.dy.reshape(npixels)]).T
Coord = distance
xi = kk.xi.reshape(npixels)
else:
cat = treecorr.Catalog(x=X[:,0], y=X[:,1], k=(y-np.mean(y)), w=w)
kk = treecorr.KKCorrelation(min_sep=self.min_sep, max_sep=self.max_sep, nbins=self.nbins)
kk.process(cat)
distance = kk.meanr
mask = np.ones_like(kk.xi, dtype=bool)
Coord = np.array([distance,np.zeros_like(distance)]).T
xi = kk.xi
return xi, distance, Coord, mask
if config['nn_statistic'] == 'compensated':
logger.warning("Performing DR calculations...")
dr = treecorr.NNCorrelation(config,logger)
dr.process(cat1,rand2)
logger.info("Done DR calculations.")
rd = treecorr.NNCorrelation(config,logger)
rd.process(rand1,cat2)
logger.info("Done RD calculations.")
dd.write(config['nn_file_name'],rr,dr,rd)
logger.warning("Wrote NN correlation to %s",config['nn_file_name'])
# Do KK correlation function if necessary
if 'kk_file_name' in config:
logger.warning("Performing KK calculations...")
kk = treecorr.KKCorrelation(config,logger)
kk.process(cat1,cat2)
logger.info("Done KK calculations.")
kk.write(config['kk_file_name'])
logger.warning("Wrote KK correlation to %s",config['kk_file_name'])
# Do NG correlation function if necessary
if 'nk_file_name' in config:
if cat2 is None:
raise TypeError("file_name2 is required for nk correlation")
logger.warning("Performing NK calculations...")
nk = treecorr.NKCorrelation(config,logger)
nk.process(cat1,cat2)
logger.info("Done NK calculation.")
rk = None
if rand1 is None: