forked from shaonghosh/EM_Bright
-
Notifications
You must be signed in to change notification settings - Fork 0
/
getEllipsoidSamples.py
365 lines (299 loc) · 14.4 KB
/
getEllipsoidSamples.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
import os
import select
import sys
import stat
from functools import partial
from optparse import OptionParser, OptionGroup
import numpy as np
import time
import datetime
import lal
import lalsimulation as lalsim
import glue.lal
from glue.ligolw import utils, ligolw, lsctables, table, ilwd
lsctables.use_in(ligolw.LIGOLWContentHandler)
from glue.ligolw.utils import process
from glue import pipeline
#from pylal import series
from lal import series
from lalinference.rapid_pe import lalsimutils as lsu
import effectiveFisher as eff
from lalinference.rapid_pe import common_cl
'''
This function gives the samples from the 3D ambiguity ellipsoid around the triggered mass1,
mass2 and chi1 value.
Example of the function call:
samples = getSamples('TEST', 10.0, 1.4, -0.5, 12.0, 1000, {'H1=../psds_2016.xml.gz'}, {'L1=../psds_2016.xml.gz'}, saveData=True, plot=True, path=/path/where/you/want/the/output/to/go)
'''
def getSamples(graceid, mass1, mass2, chi1, network_snr, samples, PSD, fmin=30, NMcs=5, NEtas=5, NChis=5, mass1_cut=5.0, chi1_cut=0.05, lowMass_approx='lalsim.SpinTaylorT4', highMass_approx='lalsim.IMRPhenomPv2', Forced=False, logFile=False, saveData=False, plot=False, path=False, show=False):
m1_SI = mass1 * lal.MSUN_SI
m2_SI = mass2 * lal.MSUN_SI
# min_mc_factor, max_mc_factor = 0.9, 1.1
min_mc_factor, max_mc_factor = 0.98, 1.02
min_eta, max_eta = 0.05, 0.25
min_chi1, max_chi1 = -0.99, 0.99
# Control evaluation of the effective Fisher grid
# NMcs = 10
# NEtas = 10
# NChis = 10
# match_cntr = 0.9 # Fill an ellipsoid of match = 0.9
bank_min_match = 0.97
match_cntr = np.min([np.max([0.9, 1 - 9.2/2/network_snr**2]), bank_min_match]) ## Richard's suggestion
# wide_match = 1 - (1 - match_cntr)**(2/3.0)
wide_match = match_cntr * 0.8 ### Richard's suggestion
fit_cntr = match_cntr # Do the effective Fisher fit with pts above this match
Nrandpts = samples # Requested number of pts to put inside the ellipsoid
template_min_freq = fmin ### This needs to be discussed
ip_min_freq = fmin ### This needs to be discussed
lambda1, lambda2 = 0, 0
#
# Setup signal and IP class
#
param_names = ['Mc', 'eta', 'spin1z']
McSIG = lsu.mchirp(m1_SI, m2_SI)
etaSIG = lsu.symRatio(m1_SI, m2_SI)
chiSIG = chi1
if logFile:
log = open(logFile, 'a') ### Generate log file
if Forced + (mass1 > mass1_cut) * (chi1 > chi1_cut): ## If forced to use the IMR waveform, or if the mass2 and spin1z values are above the cut.
if logFile:
log.writelines( str(datetime.datetime.today()) + '\t' + 'Using IMRPhenomPv2 approximation.' + '\n')
else:
print 'Using IMRPhenomPv2 approximation.'
PSIG = lsu.ChooseWaveformParams(
m1=m1_SI, m2=m2_SI, spin1z=chi1,
lambda1=lambda1, lambda2=lambda2,
fmin=template_min_freq,
approx=eval(highMass_approx)
)
else:
if logFile:
log.writelines( str(datetime.datetime.today()) + '\t' + 'Using SpinTaylorT4 approximation.' + '\n')
else:
print 'Using SpinTaylorT4 approximation.'
PSIG = lsu.ChooseWaveformParams(
m1=m1_SI, m2=m2_SI, spin1z=chi1,
lambda1=lambda1, lambda2=lambda2,
fmin=template_min_freq,
approx=eval(lowMass_approx)
)
# Find a deltaF sufficient for entire range to be explored
PTEST = PSIG.copy()
# Check the waveform generated in the corners for the
# longest possible waveform
PTEST.m1, PTEST.m2 = lsu.m1m2(McSIG*min_mc_factor, min_eta)
deltaF_1 = lsu.findDeltaF(PTEST)
PTEST.m1, PTEST.m2 = lsu.m1m2(McSIG*min_mc_factor, max_eta)
deltaF_2 = lsu.findDeltaF(PTEST)
# set deltaF accordingly
PSIG.deltaF = min(deltaF_1, deltaF_2)
PTMPLT = PSIG.copy()
psd_map = common_cl.parse_cl_key_value(PSD)
for inst, psdfile in psd_map.items():
if psd_map.has_key(psdfile):
psd_map[psdfile].add(inst)
else:
psd_map[psdfile] = set([inst])
del psd_map[inst]
for psdf, insts in psd_map.iteritems():
xmldoc = utils.load_filename(psdf, contenthandler=series.PSDContentHandler)
# FIXME: How to handle multiple PSDs
for inst in insts:
psd = series.read_psd_xmldoc(xmldoc, root_name=None)[inst]
psd_f_high = len(psd.data.data)*psd.deltaF
f = np.arange(0, psd_f_high, psd.deltaF)
fvals = np.arange(0, psd_f_high, PSIG.deltaF)
eff_fisher_psd = np.interp(fvals, f, psd.data.data)
analyticPSD_Q = False
freq_upper_bound = np.min( [2000.0, (psd.data.length) * (psd.deltaF) * 0.98])
print 'freq_upper_bound = ' + str(freq_upper_bound)
IP = lsu.Overlap(fLow = ip_min_freq, fMax=freq_upper_bound,
deltaF = PSIG.deltaF,
psd = eff_fisher_psd,
analyticPSD_Q = analyticPSD_Q
)
hfSIG = lsu.norm_hoff(PSIG, IP)
# Find appropriate parameter ranges
min_mc = McSIG * min_mc_factor
max_mc = McSIG * max_mc_factor
param_ranges = eff.find_effective_Fisher_region(PSIG, IP, wide_match,
param_names, [[min_mc, max_mc],[min_eta, max_eta], [min_chi1, max_chi1]])
# setup uniform parameter grid for effective Fisher
pts_per_dim = [NMcs, NEtas, NChis]
Mcpts, etapts, chipts = eff.make_regular_1d_grids(param_ranges, pts_per_dim)
etapts = map(lsu.sanitize_eta, etapts)
McMESH, etaMESH, chiMESH = eff.multi_dim_meshgrid(Mcpts, etapts, chipts)
McFLAT, etaFLAT, chiFLAT = eff.multi_dim_flatgrid(Mcpts, etapts, chipts)
dMcMESH = McMESH - McSIG
detaMESH = etaMESH - etaSIG
dchiMESH = chiMESH - chiSIG
dMcFLAT = McFLAT - McSIG
detaFLAT = etaFLAT - etaSIG
dchiFLAT = chiFLAT - chiSIG
grid = eff.multi_dim_grid(Mcpts, etapts, chipts)
# Change units on Mc
dMcFLAT_MSUN = dMcFLAT / lal.MSUN_SI
dMcMESH_MSUN = dMcMESH / lal.MSUN_SI
McMESH_MSUN = McMESH / lal.MSUN_SI
McSIG_MSUN = McSIG / lal.MSUN_SI
# Evaluate ambiguity function on the grid
rhos = np.array(eff.evaluate_ip_on_grid(hfSIG, PTMPLT, IP, param_names, grid))
rhogrid = rhos.reshape(NMcs, NEtas, NChis)
# Fit to determine effective Fisher matrix
# Adapt the match value to make sure all the Evals are positive
gam_prior = np.diag([10.*10.,4*4.,1.]) # prior on mass, eta, chi. (the 'prior' on mass is mainly used to regularize, and is too narrow)
evals = np.array([-1, -1, -1])
count = 0
start = time.time()
match_cntrs = np.array([0.97, 0.98, 0.99])
while np.any( np.array( [np.real(evals[0]), np.real(evals[1]), np.real(evals[2])] ) < 0 ):
if count>0:
if logFile:
log.writelines( str(datetime.datetime.today()) + '\t' + 'At least one of the eval is negative: switching to match of ' + str(match_cntrs[count]) + '\n' )
else:
print 'At least one of the eval is negative: switching to match of ' + str(match_cntrs[count])
wide_match = 1 - (1 - match_cntrs[count])**(2/3.0)
fit_cntr = match_cntrs[count] # Do the effective Fisher fit with pts above this match
cut = rhos > fit_cntr
if np.sum(cut) >= 6:
fitgamma = eff.effectiveFisher(eff.residuals3d, rhos[cut], dMcFLAT_MSUN[cut], detaFLAT[cut], dchiFLAT[cut])
# Find the eigenvalues/vectors of the effective Fisher matrix
gam = eff.array_to_symmetric_matrix(fitgamma)
gam = gam + gam_prior
evals, evecs, rot = eff.eigensystem(gam)
count += 1
if (count >= 3) and np.any( np.array( [np.real(evals[0]), np.real(evals[1]), np.real(evals[2])] ) < 0 ):
return adapt_failure(logFile)
sys.exit()
else:
return adapt_failure(logFile)
sys.exit()
#
# Distribute points inside predicted ellipsoid of certain level of overlap
#
r1 = np.sqrt(2.*(1.-match_cntr)/np.real(evals[0])) # ellipse radii ...
r2 = np.sqrt(2.*(1.-match_cntr)/np.real(evals[1])) # ... along eigen-directions
r3 = np.sqrt(2.*(1.-match_cntr)/np.real(evals[2])) # ... along eigen-directions
### CHECK ### Should we not use the updated match_cntr value? This seems to be a bug ###
NN = 0
NN_total = 0
cart_grid = [[0., 0., 0.]]
sph_grid = [[0., 0., 0.]]
likelihood_grid = [1.] ## Koh Ueno added
while NN < Nrandpts:
NN_total += 1
r = np.random.rand()
ph = np.random.rand() * 2.*np.pi
costh = np.random.rand()*2. - 1.
sinth = np.sqrt(1. - costh * costh)
th = np.arccos(costh)
rrt = r**(1./3.)
x1 = r1 * rrt * sinth * np.cos(ph)
x2 = r2 * rrt * sinth * np.sin(ph)
x3 = r3 * rrt * costh
### CHECK ####
cart_grid_point = [x1, x2, x3]
cart_grid_point = np.array(np.real( np.dot(rot, cart_grid_point)) )
rand_Mc = cart_grid_point[0] * lal.MSUN_SI + McSIG # Mc (kg)
rand_eta = cart_grid_point[1] + etaSIG # eta
rand_chi = cart_grid_point[2] + chiSIG
### CHECK ####
### Koh Ueno: I added the followings to introduce the correct weight for each sample.
### I think we can do this computation only after "if joint_condition"
### if you want to make the computational cost as small as possible.
### I am wondering if we can use the modified "gam"
### (i.e., the gam which gam_prior is added to)
### for the likelihood computation.
diff_rand_sample = [(rand_Mc - McSIG) / lal.MSUN_SI, rand_eta - etaSIG, rand_chi - chiSIG]
likelihood_tmp = np.dot( np.dot(gam, diff_rand_sample), diff_rand_sample)
likelihood = np.exp( -0.5 * network_snr**2 * likelihood_tmp.item(0) )
### Koh Ueno: ends
condition1 = rand_eta > 0
condition2 = rand_eta <= 0.25
condition3 = np.abs(rand_chi) < 1.0
joint_condition = condition1 * condition2 * condition3
if joint_condition:
cart_grid.append( [cart_grid_point[0], cart_grid_point[1], cart_grid_point[2]] ) ## CHECK
likelihood_grid.append(likelihood) ### Koh Ueno added
NN += 1
cart_grid = np.array(cart_grid)
sph_grid = np.array(sph_grid)
likelihood_grid = np.array(likelihood_grid)
if logFile:
log.writelines(str(datetime.datetime.today()) + '\t' + 'Selected ' + str(NN) + ' points from ' + str(NN_total) + ' random samples within the ellipsoid \n')
else:
print 'Selected ' + str(NN) + ' points from ' + str(NN_total) + ' random samples within the ellipsoid'
# Rotate to get coordinates in parameter basis
### CHECK! No need to rotate again ###
# cart_grid = np.array([ np.real( np.dot(rot, cart_grid[i]))
# for i in xrange(len(cart_grid)) ])
# Put in convenient units,
# change from parameter differential (i.e. dtheta)
# to absolute parameter value (i.e. theta = theta_true + dtheta)
rand_dMcs_MSUN, rand_detas, rand_dChis = tuple(np.transpose(cart_grid)) # dMc, deta, dchi
rand_Mcs = rand_dMcs_MSUN * lal.MSUN_SI + McSIG # Mc (kg)
rand_etas = rand_detas + etaSIG # eta
rand_chis = rand_dChis + chiSIG
# Prune points with unphysical values of eta from cart_grid
rand_etas = np.array(map(partial(lsu.sanitize_eta, exception=np.NAN), rand_etas))
cart_grid = np.transpose((rand_Mcs,rand_etas,rand_chis)) #### CHECK ####
phys_cut = ~np.isnan(cart_grid).any(1) # cut to remove unphysical pts
cart_grid = cart_grid[phys_cut]
keep_phys_spins = np.abs(cart_grid[:,2]) < 1.0 #### CHECK ####
cart_grid = cart_grid[keep_phys_spins] #### CHECK ####
# Output Cartesian and spherical coordinates of intrinsic grid
indices = np.arange(len(cart_grid))
Mcs_MSUN, etas, chis = np.transpose(cart_grid) #### CHECK ####
Mcs_MSUN = Mcs_MSUN / lal.MSUN_SI
outgrid = np.transpose((Mcs_MSUN,etas,chis)) #### CHECK ####
if saveData:
if path: data_dir = path + '/intrinsic_grids'
else: data_dir = 'intrinsic_grids'
os.system('mkdir -p ' + data_dir)
np.savetxt(data_dir + '/intrinsic_grid_' + str(graceid) + '_samples.dat', outgrid, fmt='%f\t%f\t%f')
if plot:
import pylab as pl
from mpl_toolkits.mplot3d import Axes3D
mc = outgrid[:,0]
eta = outgrid[:,1]
sz1 = outgrid[:,2]
fig = pl.figure(figsize=(12, 10))
ax = fig.add_subplot(111, projection='3d')
ax.scatter(mc[1:], eta[1:], sz1[1:], zdir='z', s=3, c='b', alpha=0.2, depthshade=True)
xlim = ax.get_xlim3d()
ylim = ax.get_ylim3d()
zlim = ax.get_zlim3d()
print xlim
print ylim
print zlim
ax.scatter(mc, eta, zlim[0]*np.ones(len(outgrid)), zdir='z', s=3, c='k', alpha=0.04, depthshade=True)
ax.scatter(mc, ylim[1]*np.ones(len(outgrid)), sz1, zdir='z', s=3, c='k', alpha=0.04, depthshade=True)
ax.scatter(xlim[0]*np.ones(len(outgrid)), eta, sz1, zdir='z', s=3, c='k', alpha=0.04, depthshade=True)
ax.scatter(mc[0], eta[0], sz1[0], s=100, c='r', depthshade=True)
ax.scatter(mc[0], eta[0], zlim[0], s=100, c='r', alpha=0.1, depthshade=True)
ax.scatter(mc[0], ylim[1], sz1[0], s=100, c='r', alpha=0.1, depthshade=True)
ax.scatter(xlim[0], eta[0], sz1[0], s=100, c='r', alpha=0.1, depthshade=True)
ax.set_xlim3d(xlim)
ax.set_ylim3d(ylim)
ax.set_zlim3d(zlim)
ax.set_xlabel('$\\mathcal{M}$')
ax.set_ylabel('$\\eta$')
ax.set_zlabel('$\\chi_1$')
pl.title('$m_1$ = ' + str(mass1) + '$M_{\\odot}$: $m_2$ = ' + str(mass2) + '$M_{\\odot}$: $\\chi_1$ = ' + str(chi1), y=1.03)
pl.grid()
pl.rcParams.update({'font.size': 13})
if path: plot_dir = path + '/ellipsoid_sample_plots'
else: plot_dir = 'ellipsoid_sample_plots'
os.system('mkdir -p ' + plot_dir)
pl.savefig(plot_dir + '/ellipsoid_sample_' + str(graceid) + '_plot.png')
if show:
pl.show()
return outgrid, likelihood_grid # KU
def adapt_failure(logFile):
if logFile:
log.writelines( str(datetime.datetime.today()) + '\t' + 'Could not find an all positive set Evals in three attempts... Quitting program' + '\n')
else:
print 'Could not find an all positive set Evals in three attempts... Quitting program'
outgrid = np.array([np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan])
np.savetxt('intrinsic_grid_Failed.dat', outgrid, newline="\t")
return np.array([np.nan, np.nan, np.nan])