Skip to content

Commit 629133c

Browse files
ConstantKernel bug + minor typos
Multiplying by a constant normally adds an extra parameter from the ConstantKernel, which breaks the code; we need to call ConstantKernel directly and specify that this should be an immutable parameter. Also removed the frivolous differences between rmjarvis/treegp (which this was originally cloned from) and pfleget/treegp (which this is being pushed to.
1 parent d80d7e6 commit 629133c

File tree

2 files changed

+7
-53
lines changed

2 files changed

+7
-53
lines changed

treegp/gp_interp.py

-12
Original file line numberDiff line numberDiff line change
@@ -99,30 +99,22 @@ def _fit(self, kernel, X, y, y_err):
9999
:param y: Values of the field. (n_samples)
100100
:param y_err: Error of y. (n_samples)
101101
"""
102-
print('start _fit')
103-
print('optimizer = ',self.optimizer)
104102
if self.optimizer != "none":
105103
# Hyperparameters estimation using 2-point correlation
106104
# function information.
107105
if self.optimizer in ['two-pcf', 'anisotropic']:
108106
anisotropic = self.optimizer == 'anisotropic'
109-
print('before two_pcf')
110107
self._optimizer = treegp.two_pcf(X, y, y_err,
111108
self.min_sep, self.max_sep,
112109
nbins=self.nbins,
113110
anisotropic=anisotropic,
114111
robust_fit=self.robust_fit,
115112
p0=self.p0_robust_fit)
116-
print('after two_pcf')
117113
kernel = self._optimizer.optimizer(kernel)
118-
print('made kernel')
119114
# Hyperparameters estimation using maximum likelihood fit.
120115
if self.optimizer == 'log-likelihood':
121-
print('before log_like')
122116
self._optimizer = treegp.log_likelihood(X, y, y_err)
123-
print('after log_like')
124117
kernel = self._optimizer.optimizer(kernel)
125-
print('made kernel')
126118
return kernel
127119

128120
def predict(self, X, return_cov=False):
@@ -218,15 +210,11 @@ def solve(self):
218210
Solve for hyperparameters if requested using 2-point correlation
219211
function method or maximum likelihood.
220212
"""
221-
print('start solve')
222213
self._init_theta = []
223214
kernel = copy.deepcopy(self.kernel)
224-
print('copied kernel')
225215
self._init_theta.append(kernel.theta)
226-
print('added to init_theta')
227216
self.kernel = self._fit(self.kernel, self._X,
228217
self._y-self._mean-self._spatial_average, self._y_err)
229-
print('done solve')
230218

231219
def return_2pcf(self):
232220
"""

treegp/two_pcf.py

+7-41
Original file line numberDiff line numberDiff line change
@@ -20,9 +20,9 @@ def get_correlation_length_matrix(size, e1, e2):
2020
:param size: Correlation lenght of the kernel.
2121
:param e1, e2: Shear applied to isotropic kernel.
2222
"""
23-
if abs(e1)>1 or abs(e2)>1:
24-
raise ValueError('abs value of e1 and e2 must be lower than one')
2523
e = np.sqrt(e1**2 + e2**2)
24+
if e>1:
25+
raise ValueError('magnitude of e must be lower than one')
2626
q = (1-e) / (1+e)
2727
phi = 0.5 * np.arctan2(e2,e1)
2828
rot = np.array([[np.cos(phi), np.sin(phi)],
@@ -92,15 +92,15 @@ def _model_skl(self, sigma, corr_length, g1, g2):
9292
from sklearn kernel.
9393
9494
:param sigma: Standard deviation of the gaussian random field.
95-
:param corr_length: Correlation lenght of the kernel.
95+
:param corr_length: Correlation length of the kernel.
9696
:param g1, g2: Shear applied to isotropic kernel.
9797
"""
98-
if abs(g1)>1 or abs(g2)>1:
98+
if (g1**2 + g2**2)>1:
9999
return None
100100
else:
101101
L = get_correlation_length_matrix(corr_length, g1, g2)
102102
invLam = np.linalg.inv(L)
103-
kernel_used = sigma**2 * self.kernel_class(invLam=invLam)
103+
kernel_used = sklearn.gaussian_process.kernels.ConstantKernel(sigma**2,constant_value_bounds = "fixed") * self.kernel_class(invLam=invLam)
104104
pcf = kernel_used.__call__(self.coord,Y=np.zeros_like(self.coord))[:,0]
105105
self.kernel_fit = kernel_used
106106
return pcf
@@ -140,7 +140,6 @@ def _minimize_minuit(self, p0 = [3000., 0.2, 0.2]):
140140
141141
:param p0: List of starting points.
142142
"""
143-
print('start minimize_minuit')
144143
with warnings.catch_warnings():
145144
warnings.simplefilter("ignore")
146145
if int(iminuit.__version__[0])>=2:
@@ -153,13 +152,11 @@ def _minimize_minuit(self, p0 = [3000., 0.2, 0.2]):
153152
self.m.migrad()
154153
results = [self.m.values[key] for key in self.m.values.keys()]
155154
self._fit_ok = self.m.migrad_ok()
156-
print('results = ',results)
157155

158156
self._minuit_result = results
159157
self.result = [np.sqrt(self.alpha[0][0]), results[0],
160158
results[1], results[2],
161159
self.alpha[1][0]]
162-
print('done minimize_minuit')
163160

164161

165162
def minimize_minuit(self, p0 = [3000., 0.2, 0.2]):
@@ -190,11 +187,10 @@ def minimize_minuit(self, p0 = [3000., 0.2, 0.2]):
190187
break
191188
pcf = self._model_skl(self.result[0], self.result[1],
192189
self.result[2], self.result[3])
193-
print('pcf = ',pcf)
194190

195191
class two_pcf(object):
196192
"""
197-
Fit statistical uncertaintie on two-point correlation function using bootstraping.
193+
Fit statistical uncertainty on two-point correlation function using bootstraping.
198194
199195
:param X: Coordinates of the field. (n_samples, 1 or 2)
200196
:param y: Values of the field. (n_samples)
@@ -252,29 +248,22 @@ def comp_2pcf(self, X, y, y_err):
252248
:param y: Values of the field. (n_samples)
253249
:param y_err: Error of y. (n_samples)
254250
"""
255-
print("start comp_2pcf")
256251
if np.sum(y_err) == 0:
257252
w = None
258253
else:
259254
w = 1./y_err**2
260255

261256
if self.anisotropic:
262-
print("anisotropic")
263257
cat = treecorr.Catalog(x=X[:,0], y=X[:,1], k=(y-np.mean(y)), w=w)
264-
print("KK: ",self.min_sep,self.max_sep,self.nbins)
265258
kk = treecorr.KKCorrelation(min_sep=self.min_sep, max_sep=self.max_sep, nbins=self.nbins,
266-
bin_type='TwoD', bin_slop=0, verbose=3)
267-
print("made kk")
259+
bin_type='TwoD', bin_slop=0)
268260
kk.process(cat)
269-
print("done process")
270-
#print("kk.xi = ",kk.xi)
271261
# Need a mask in the case of the 2D correlation function, to compute
272262
# the covariance matrix using the bootstrap. The 2D correlation
273263
# function is symmetric, so just half of the correlation function
274264
# is useful to compute the covariance matrix. If it is not done,
275265
# the covariance matrix is consequently not positive definite.
276266
npixels = len(kk.xi)**2
277-
#print("npixels = ",npixels)
278267
mask = np.ones_like(kk.xi, dtype=bool)
279268
mask = mask.reshape((int(np.sqrt(npixels)), int(np.sqrt(npixels))))
280269

@@ -290,20 +279,16 @@ def comp_2pcf(self, X, y, y_err):
290279
dx = dy.T
291280

292281
distance = np.array([dx.reshape(npixels), dy.reshape(npixels)]).T
293-
#print("distance = ",distance)
294282
Coord = distance
295283
xi = kk.xi.reshape(npixels)
296-
print("xi = ",xi)
297284
else:
298285
cat = treecorr.Catalog(x=X[:,0], y=X[:,1], k=(y-np.mean(y)), w=w)
299-
print("KK: ",self.min_sep,self.max_sep,self.nbins)
300286
kk = treecorr.KKCorrelation(min_sep=self.min_sep, max_sep=self.max_sep, nbins=self.nbins)
301287
kk.process(cat)
302288
distance = kk.meanr
303289
mask = np.ones_like(kk.xi, dtype=bool)
304290
Coord = np.array([distance,np.zeros_like(distance)]).T
305291
xi = kk.xi
306-
print("xi = ",xi)
307292

308293
return xi, distance, Coord, mask
309294

@@ -334,9 +319,7 @@ def return_2pcf(self, seed=610639139):
334319
335320
:param seed: seed of the random generator.
336321
"""
337-
print('start return_2pcf')
338322
xi, distance, coord, mask = self.comp_2pcf(self.X, self.y, self.y_err)
339-
#print('xi = ',xi)
340323
if self.anisotropic:
341324
# Choice done from Andy Taylor et al. 2012
342325
# see https://doi.org/10.1093/mnras/stt270
@@ -346,16 +329,12 @@ def f_bias(x, npixel=len(xi[mask])):
346329
bottom = x - npixel - 2.
347330
return (top/bottom) - 2.
348331
results = optimize.fsolve(f_bias, len(xi[mask]) + 10)
349-
#print('results = ',results)
350332
xi_cov = self.comp_xi_covariance(n_bootstrap=int(results[0]), mask=mask, seed=seed)
351-
#print('xi_cov = ',xi_cov)
352333
bias_factor = (int(results[0]) - 1.) / (int(results[0]) - len(xi[mask]) - 2.)
353334
xi_weight = np.linalg.inv(xi_cov) * bias_factor
354-
#print('xi_wt = ',xi_weight)
355335
else:
356336
# let like developed initialy for the moment
357337
xi_weight = np.eye(len(xi)) * 1./np.var(self.y)
358-
print('done return_2pcf')
359338
return xi, xi_weight, distance, coord, mask
360339

361340
def optimizer(self, kernel):
@@ -365,15 +344,13 @@ def optimizer(self, kernel):
365344
366345
:param kernel: sklearn.gaussian_process kernel.
367346
"""
368-
print('start optimizer')
369347
size_x = np.max(self.X[:,0]) - np.min(self.X[:,0])
370348
if self.ndim == 2:
371349
size_y = np.max(self.X[:,1]) - np.min(self.X[:,1])
372350
rho = float(len(self.X[:,0])) / (size_x * size_y)
373351
if self.ndim == 1:
374352
size_y = 0.
375353
rho = float(len(self.X[:,0])) / size_x
376-
print('rho = ',rho)
377354
# if min_sep is None and isotropic GP, set min_sep to the average separation
378355
# between data.
379356
if self.min_sep is not None:
@@ -383,45 +360,36 @@ def optimizer(self, kernel):
383360
min_sep = 0.
384361
else:
385362
min_sep = np.sqrt(1./rho)
386-
print('min_sep = ',min_sep)
387363
# if max_sep is None, set max_sep to half of the size of the
388364
# given field.
389365
if self.max_sep is not None:
390366
max_sep = self.max_sep
391367
else:
392368
max_sep = np.sqrt(size_x**2 + size_y**2)/2.
393-
print('max_sep = ',min_sep)
394369

395370
self.min_sep = min_sep
396371
self.max_sep = max_sep
397372

398373
xi, xi_weight, distance, coord, mask = self.return_2pcf()
399-
print('xi = ',xi)
400374

401375
def PCF(param, k=kernel):
402376
kernel = k.clone_with_theta(param)
403377
pcf = kernel.__call__(coord,Y=np.zeros_like(coord))[:,0]
404378
return pcf
405379

406380
xi_mask = xi[mask]
407-
print('xi_mask = ',xi_mask)
408381
def chi2(param):
409382
residual = xi_mask - PCF(param)[mask]
410383
return residual.dot(xi_weight.dot(residual))
411384

412-
print('robust_fit? ',self.robust_fit)
413385
if self.robust_fit:
414386
robust = robust_2dfit(kernel, xi,
415387
coord[:,0], coord[:,1],
416388
xi_weight, mask=mask)
417-
print('made robust')
418389
robust.minimize_minuit(p0=self.p0_robust_fit)
419-
print('after minimize_minuit')
420390
kernel = copy.deepcopy(robust.kernel_fit)
421-
print('after copy kernel')
422391
cst = robust.result[-1]
423392
self._results_robust = robust.result
424-
print('results_robust ',robust.result)
425393
else:
426394
p0 = kernel.theta
427395
results_fmin = optimize.fmin(chi2,p0,disp=False)
@@ -432,13 +400,11 @@ def chi2(param):
432400
results = results[ind_min]
433401
kernel = kernel.clone_with_theta(results)
434402
cst = 0
435-
print('results ',results)
436403

437404
self._2pcf = xi
438405
self._2pcf_weight = xi_weight
439406
self._2pcf_dist = distance
440407
self._2pcf_fit = PCF(kernel.theta) + cst
441408
self._2pcf_mask = mask
442409
self._kernel = copy.deepcopy(kernel)
443-
print('done making kernel')
444410
return kernel

0 commit comments

Comments
 (0)