-
Notifications
You must be signed in to change notification settings - Fork 3
/
Copy pathtpipe.py
2493 lines (2091 loc) · 125 KB
/
tpipe.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
#! /usr/bin/env python
"""
tpipe.py --- read and visualize visibility data to search for transients
Generalization of evlavis, etc.
Can read MS or Miriad formatted data. Will try to import the following:
- CASA and pyrap for MS reading
- miriad-python for Miriad reading
- aipy for imaging numpy array data
"""
import sys, string, os, shutil, types
from os.path import join
import pickle
import numpy as n
import pylab as p
# set up libraries for reading and imaging visibility data
try:
# for simplicity, we should use pyrap for reading MS
import pyrap
print 'Imported pyrap'
except ImportError:
try:
# as a backup, casa stuff can be imported if running casapy
# from casac import casac; # Recommended by Sanjay B. (Don't ask me why this is so! :)
# ms = casac.ms();
from casa import ms
from casa import quanta as qa
print 'Imported CASA'
except ImportError:
print 'No CASA or pyrap available. Can\'t read MS data.'
try:
# miriad-python can be used to read miriad format data
import mirtask
from mirexec import TaskInvert, TaskClean, TaskRestore, TaskImFit, TaskCgDisp, TaskImStat, TaskUVFit
import miriad
print 'Imported miriad-python'
except ImportError:
print 'No miriad-python available. Can\'t read miriad data.'
try:
# try aipy for imaging numpy array data
import aipy
print 'Imported aipy...'
except ImportError:
print 'No aipy available. Can\'t image in Python.'
try:
import ephem,pywcs
print 'Imported ephem and pywcs...'
except ImportError:
print 'Either ephem or pywcs not available. Can only do flat-sky coord transforms (no ra/dec info).'
class Reader:
""" Master class with basic functions.
self.params defines various tunable parameters for reading data and running pipelines.
A "profile" is a set of params that can be hardwired for easy access.
Can also set parameters giving them as keyword arguments (e.g., "chans=n.array(range(100,110))")
"""
def __init__(self):
raise NotImplementedError('Cannot instantiate class directly. Use \'pipe\' subclasses.')
def set_profile(self, profile='default'):
""" Method called by __init__ in subclasses. This sets all parameters needed elsewhere.
Can optionally use a profile which is a set of params.
Changing parameters done with set_params
"""
# parameters used by various subclasses
# each set is indexed by a name, called a profile
# Note that each parameter must also be listed in set_params method in order to get set
self.profile = profile
self.params = {
'default' : {
'chans': n.array(range(5,59)), # channels to read
'dmarr' : [44.,88.], # dm values to use for dedispersion (only for some subclasses)
'pulsewidth' : 0.0, # width of pulse in time (seconds)
'approxuvw' : True, # flag to make template visibility file to speed up writing of dm track data
'pathout': './', # place to put output files
'beam_params': [0], # flag=0 or list of parameters for twodgaussian parameter definition
'long': -107.6177, # longitude of the array center (vla)
'lat': 34.07875 # latitude of the array center (vla)
},
'vlacrab' : {
'chans': n.array(range(5,59)), # channels to read
'dmarr' : [29.,58.], # dm values to use for dedispersion (only for some subclasses)
'pulsewidth' : 0.0, # width of pulse in time (seconds)
'approxuvw' : True, # flag to make template visibility file to speed up writing of dm track data
'pathout': './', # place to put output files
'beam_params': [0], # flag=0 or list of parameters for twodgaussian parameter definition
'long': -107.6177, # longitude of the array center
'lat': 34.07875 # latitude of the array center
},
'psa' : {
'chans': n.array(range(140,150)), # channels to read
'dmarr' : [0.], # dm values to use for dedispersion (only for some subclasses)
'pulsewidth' : 0.0, # width of pulse in time (seconds)
'approxuvw' : True, # flag to make template visibility file to speed up writing of dm track data
'pathout': './', # place to put output files
'beam_params': [0], # flag=0 or list of parameters for twodgaussian parameter definition
'long': 21.411, # longitude of the array center
'lat': -30.721 # latitude of the array center
},
'pocob0329' : {
'chans': n.array(range(5,59)), # channels to read
'dmarr' : [0, 13.4, 26.8, 40.2, 53.5], # dm values to use for dedispersion (only for some subclasses)
'pulsewidth' : 0.005, # width of pulse in time (seconds)
'approxuvw' : True, # flag to make template visibility file to speed up writing of dm track data
'pathout': './', # place to put output files
'beam_params': [0], # flag=0 or list of parameters for twodgaussian parameter definition
'long': -121.470, # longitude of the array center
'lat': 40.817 # latitude of the array center
},
'mwa' : {
'chans': n.array(n.arange(128)), # channels to read
'dmarr' : [0, 50.], # dm values to use for dedispersion (only for some subclasses)
'pulsewidth' : 0.0, # width of pulse in time (seconds)
'approxuvw' : True, # flag to make template visibility file to speed up writing of dm track data
'pathout': './', # place to put output files
'beam_params': [0], # flag=0 or list of parameters for twodgaussian parameter definition
'long': 116.671, # longitude of the array center
'lat': -26.703 # latitude of the array center
}
}
self.pathout = self.params[self.profile]['pathout']
self.chans = self.params[self.profile]['chans']
self.dmarr = self.params[self.profile]['dmarr']
self.pulsewidth = self.params[self.profile]['pulsewidth'] * n.ones(len(self.chans))
self.approxuvw = self.params[self.profile]['approxuvw']
self.beam_params = self.params[self.profile]['beam_params']
self.long = self.params[self.profile]['long']
self.lat = self.params[self.profile]['lat']
def set_params(self, **kargs):
""" Method called by __init__ in subclasses. This allows one to change parameters.
Assumes set_profile already run on pipe.
"""
# may further modify parameters manually
if len(kargs) > 0:
for key in kargs:
if key in self.params[self.profile].keys():
self.params[self.profile][key] = kargs[key]
else:
print '%s not a standard key. Will not be used.' % (key)
self.pathout = self.params[self.profile]['pathout']
self.chans = self.params[self.profile]['chans']
self.dmarr = self.params[self.profile]['dmarr']
self.pulsewidth = self.params[self.profile]['pulsewidth'] * n.ones(len(self.chans))
self.approxuvw = self.params[self.profile]['approxuvw']
self.beam_params = self.params[self.profile]['beam_params']
self.long = self.params[self.profile]['long']
self.lat = self.params[self.profile]['lat']
def show_params(self):
""" Print parameters of pipeline that can be modified upon creation.
"""
return self.params[self.profile]
def spec(self, ind=[], save=0):
""" Plot spectrogram for phase center by taking mean over baselines and polarizations.
Optionally can zoom in on small range in time with ind parameter.
save=0 is no saving, save=1 is save with default name, save=<string>.png uses custom name (must include .png).
"""
reltime = self.reltime
bf = self.dataph
print 'Data mean, std: %f, %f' % (self.dataph.mean(), self.dataph.std())
(vmin, vmax) = sigma_clip(bf.data.ravel())
if ( (not(vmin >= 0)) & (not(vmin <= 0)) ):
print 'sigma_clip returning NaNs. Using data (min,max).'
vmin = bf.ravel().min()
vmax = bf.ravel().max()
p.figure()
p.clf()
ax = p.axes()
ax.set_position([0.2,0.2,0.7,0.7])
if len(ind) > 0:
for i in ind:
p.subplot(len(ind),1,list(ind).index(i)+1)
intmin = n.max([0,i-50])
intmax = n.min([len(self.reltime),i+50])
im = p.imshow(n.rot90(bf[intmin:intmax]), aspect='auto', origin='upper', interpolation='nearest', extent=(intmin,intmax,0,len(self.chans)), vmin=vmin, vmax=vmax)
p.subplot(len(ind),1,1)
else:
im = p.imshow(n.rot90(bf), aspect='auto', origin='upper', interpolation='nearest', extent=(0,len(self.reltime),0,len(self.chans)), vmin=vmin, vmax=vmax)
p.title(str(self.nskip/self.nbl) + ' nskip, candidates ' + str(ind))
cb = p.colorbar(im)
cb.set_label('Flux Density (Jy)',fontsize=12,fontweight="bold")
ax.spines['top'].set_visible(False)
ax.spines['right'].set_visible(False)
ax.spines['bottom'].set_position(('outward', 20))
ax.spines['left'].set_position(('outward', 30))
ax.yaxis.set_ticks_position('left')
ax.xaxis.set_ticks_position('bottom')
p.yticks(n.arange(0,len(self.chans),4), (self.chans[(n.arange(0,len(self.chans), 4))]))
p.xlabel('Time (integration)',fontsize=12,fontweight="bold")
p.ylabel('Frequency (channel)',fontsize=12,fontweight="bold")
if save:
if save == 1:
savename = self.file.split('.')[:-1]
savename.append(str(self.scan) + '_' + str(self.nskip/self.nbl) + '_spec.png')
savename = string.join(savename,'.')
elif isinstance(save, types.StringType):
savename = save
print 'Saving file as ', savename
p.savefig(self.pathout+savename)
def drops(self, data_type='ms', chan=0, pol=0, show=1):
""" Displays info on missing baselines by looking for zeros in data array.
data_type is needed to understand how to grab baseline info. options are 'ms' and 'mir'.
"""
nints = self.nints
bllen = []
if data_type == 'mir':
bls = self.preamble[:,4]
for bl in n.unique(bls):
bllen.append(n.shape(n.where(bls == bl))[1])
elif data_type == 'ms':
for i in xrange(len(self.blarr)):
bllen.append(len(n.where(self.data[:,i,chan,pol] != 0.00)[0]))
bllen = n.array(bllen)
if show:
p.clf()
for i in xrange(self.nbl):
p.text(self.blarr[i,0], self.blarr[i,1], s=str(100*(bllen[i]/nints - 1)), horizontalalignment='center', verticalalignment='center', fontsize=9)
p.axis((0,self.nants+1,0,self.nants+1))
p.plot([0,self.nants+1],[0,self.nants+1],'b--')
# p.xticks(int(self.blarr[:,0]), self.blarr[:,0])
# p.yticks(int(self.blarr[:,1]), self.blarr[:,1])
p.xlabel('Ant 1')
p.ylabel('Ant 2')
p.title('Drop fraction for chan %d, pol %d' % (chan, pol))
# p.show()
return self.blarr,bllen
def simple_image(self, i=0, c=0, cell=1.0, imagesize=4096):
"""
image a single integration and channel
cell size is in arcmin
returns image as an array
added by DLK 2013-04-05
"""
# select integration and channel
track=n.rollaxis(self.data[i,:,c,:].reshape((self.nbl,1,1)),2)
# take mean over frequency => introduces delay beam
truearr = n.ones( n.shape(track) )
falsearr = 1e-5*n.ones( n.shape(track) ) # need to set to small number so n.average doesn't go NaN
weightarr = n.where(track != 0j, truearr, falsearr) # ignore zeros in mean across channels # bit of a hack
track = n.average(track, axis=2, weights=weightarr)
# select integration and reduce pol axis
if ((pol == 'i') | (pol == 'I')):
if len(trackdata) == 2:
print 'Making Stokes I image as mean of two pols...'
else:
print 'Making Stokes I image as mean over all pols. Hope that\'s ok...'
tr=track.mean(axis=0)
elif isinstance(pol, types.IntType):
print 'Making image of pol %d' % (pol)
tr=track[pol]
# res and size in aipy units (lambda)
# size is pixel scale (cell size)
size=1/n.radians(cell/60.0)
# full field
res=size/imagesize
fov = n.degrees(1./res)*3600. # field of view in arcseconds
# form channel dependent uvw
u_ch = n.outer(self.u[i], self.freq/self.freq_orig[0])
v_ch = n.outer(self.v[i], self.freq/self.freq_orig[0])
w_ch = n.outer(self.w[i], self.freq/self.freq_orig[0])
# make image
ai = aipy.img.Img(size=size, res=res)
uvw_new, tr_new = ai.append_hermitian( (u_ch[:,c], v_ch[:,c], w_ch[:,c]), tr)
ai.put(uvw_new, tr_new)
image = ai.image(center = (size/res/2, size/res/2))
self.ai=ai
self.image_center=(size/res/2, size/res/2)
return image
def image_cube(self, i=0, channels=None, cell=1.0, imagesize=4096, pol='i'):
"""
Image a single integration across all channels.
Cell size is in arcmin. Channels can be an iterable object or None (which assumes all channels)
Returns images as 3d array (ra,dec,chan).
Added by DLK 2013-04-05
"""
# select integration and reduce pol axis
if ((pol == 'i') | (pol == 'I')):
if len(trackdata) == 2:
print 'Making Stokes I image as mean of two pols...'
else:
print 'Making Stokes I image as mean over all pols. Hope that\'s ok...'
tr=self.data.mean(axis=3)[0]
elif isinstance(pol, types.IntType):
print 'Making image of pol %d' % (pol)
tr=self.data[0,:,:,pol]
# res and size in aipy units (lambda)
# size is pixel scale (cell size)
size=1/n.radians(cell/60.0)
# full field
res=size/imagesize
fov = n.degrees(1./res)*3600. # field of view in arcseconds
if channels is None:
channels=range(self.nchan)
# form channel dependent uvw
u_ch = n.outer(self.u[i], self.freq/self.freq_orig[0])
v_ch = n.outer(self.v[i], self.freq/self.freq_orig[0])
w_ch = n.outer(self.w[i], self.freq/self.freq_orig[0])
# make image
image=n.zeros((len(channels),size/res,size/res))
for c,ic in zip(channels,xrange(len(channels))):
print 'Creating image for channel %d...' % c
ai = aipy.img.Img(size=size, res=res)
uvw_new, tr_new = ai.append_hermitian( (u_ch[:,c], v_ch[:,c], w_ch[:,c]), tr[:,c])
ai.put(uvw_new, tr_new)
image[ic] = ai.image(center = (size/res/2, size/res/2))
self.ai=ai
self.image_center=(size/res/2, size/res/2)
return image
def imagetrack(self, trackdata, mode='split', i=0, pol='i', size=48000, res=500, clean=True, gain=0.01, tol=1e-4, newbeam=0, save=0, show=0):
""" Use apiy to image trackdata returned by tracksub of dimensions (npol, nbl, nchan).
mode defines how frequency dependence is handled. 'split' means separate uv and data points in frequency (but not mfs). 'mean' means mean vis across frequency.
int is used to select uvw coordinates for track. default is first int.
pol can be 'i' for a Stokes I image (mean over pol dimension) or a pol index.
default params size and res are good for 1.4 GHz VLA, C-config image.
clean determines if image is cleaned and beam corrected. gain/tol are cleaning params.
newbeam forces the calculation of a new beam for restoring the cleaned image.
save=0 is no saving, save=1 is save with default name, save=<string>.png uses custom name (must include .png).
"""
# reduce pol axis
if ((pol == 'i') | (pol == 'I')):
if len(trackdata) == 2:
print 'Making Stokes I image as mean of two pols...'
else:
print 'Making Stokes I image as mean over all pols. Hope that\'s ok...'
td = trackdata.mean(axis=0)
elif isinstance(pol, types.IntType):
print 'Making image of pol %d' % (pol)
td = trackdata[pol]
# apply w phase rotation. generally this is done externally (e.g., by data writing software) and is not needed here.
# wrot = lambda w: n.exp(-2j*n.pi*n.outer(w, self.freq/self.freq_orig[0]))
# td = td*wrot(self.w[i])
# define handling of freq axis
if mode == 'split':
td = td.flatten()
uu = n.outer(self.u[i], self.freq/self.freq_orig[0]).flatten()
vv = n.outer(self.v[i], self.freq/self.freq_orig[0]).flatten()
ww = n.outer(self.w[i], self.freq/self.freq_orig[0]).flatten()
elif mode == 'mean':
td = td.mean(axis=1)
uu = self.u[i]
vv = self.v[i]
ww = self.w[i]
else:
print 'Mode must be \'mean\' or \'split\'.'
return 0
fov = n.degrees(1./res)*3600. # field of view in arcseconds
p.clf()
# make image
ai = aipy.img.Img(size=size, res=res)
uvw_new, td_new = ai.append_hermitian( (uu, vv, ww), td)
ai.put(uvw_new, td_new)
image = ai.image(center = (size/res/2, size/res/2))
image_final = image
# optionally clean image
if clean:
print 'Cleaning image...'
beam = ai.bm_image()
beamgain = aipy.img.beam_gain(beam[0])
(clean, dd) = aipy.deconv.clean(image, beam[0], verbose=True, gain=gain, tol=tol)
try:
import gaussfitter
if (len(self.beam_params) == 1) | (newbeam == 1) :
print 'Restoring image with new fit to beam shape...'
beam_centered = ai.bm_image(center=(size/res/2, size/res/2))
peak = n.where(beam_centered[0] >= 0.1*beam_centered[0].max(), beam_centered[0], 0.)
self.beam_params = gaussfitter.gaussfit(peak)
kernel = n.roll(n.roll(gaussfitter.twodgaussian(self.beam_params, shape=n.shape(beam[0])), size/res/2, axis=0), size/res/2, axis=1) # fit to beam at center, then roll to corners for later convolution step
except ImportError:
print 'Restoring image with peak of beam...'
kernel = n.where(beam[0] >= 0.4*beam[0].max(), beam[0], 0.) # take only peak (gaussian part) pixels of beam image
restored = aipy.img.convolve2d(clean, kernel)
image_restored = (restored + dd['res']).real/beamgain
image_final = image_restored
if show or save:
ax = p.axes()
ax.set_position([0.2,0.2,0.7,0.7])
# im = p.imshow(image_final, aspect='auto', origin='upper', interpolation='nearest', extent=[-fov/2, fov/2, -fov/2, fov/2])
im = p.imshow(image_final, aspect='auto', origin='lower', interpolation='nearest', extent=[fov/2, -fov/2, -fov/2, fov/2])
cb = p.colorbar(im)
cb.set_label('Flux Density (Jy)',fontsize=12,fontweight="bold")
ax.spines['top'].set_visible(False)
ax.spines['right'].set_visible(False)
ax.spines['bottom'].set_position(('outward', 20))
ax.spines['left'].set_position(('outward', 30))
ax.yaxis.set_ticks_position('left')
ax.xaxis.set_ticks_position('bottom')
p.xlabel('RA/l Offset (arcsec)',fontsize=12,fontweight="bold")
p.ylabel('Dec/m Offset (arcsec)',fontsize=12,fontweight="bold")
peak = n.where(n.max(image_final) == image_final)
print 'Image peak of %e at (%d,%d)' % (n.max(image_final), peak[0][0], peak[1][0])
print 'Peak/RMS = %e' % (image_final.max()/image_final[n.where(image_final <= 0.9*image_final.max())].std()) # estimate std without image peak
if save:
if save == 1:
savename = self.file.split('.')[:-1]
savename.append(str(self.nskip/self.nbl) + '_im.png')
savename = string.join(savename,'.')
elif isinstance(save, string):
savename = save
print 'Saving file as ', savename
p.savefig(self.pathout+savename)
return image_final
def get_shift(self, ra, dec):
"""
Returns the shift (dl,dm) in radians required to move the phase center to the new ra,dec (in degrees).
Needs to know ra0, dec0 of phase center and lat, long of array.
Added by DLK 2013-04-04
First, get (ra0,dec0) and (ra,dec)
use these to compute (l,m) via:
l=cos(dec)*sin(ra-ra0)
m=sin(dec)*cos(dec0)-cos(dec)*sin(dec0)*cos(ra-ra0)
(Synthesis Imaging II, Eqn. 19-8 and 19-9)
Then need to warp the snapshot
l'=l + tan(Z)*sin(chi)*(sqrt(1-l**2-m**2)-1)
m'=m - tan(Z)*cos(chi)*(sqrt(1-l**2-m**2)-1)
(Cornwell et al. 2008, http://adsabs.harvard.edu/abs/2008ISTSP...2..647C, arXiv:0807.4161v1
Eqn. 6, 7)
although the sign in Eqn. 6 might be wrong
Z=zenith angle and chi=parallactic angle, and so these terms are in fact the PV2_1 and PV2_2
that I calculate below. However, this is also given in Synthesis Imaging II, Eqn. 19-21,19-22
in the same form, so maybe I have a sign wrong
This is also all discussed in Ord et al. (2010)
"""
if not self.__dict__.has_key('wcs'):
# set up a fake WCS to do the transformation
# likely the details do not matter much
wcs=pywcs.WCS(naxis=2)
wcs.wcs.ctype=['RA---SIN','DEC--SIN']
wcs.wcs.crval=[n.degrees(self.ra0),n.degrees(self.dec0)]
wcs.wcs.crpix=[2049,2049]
wcs.wcs.cdelt=[-1.0/60,1.0/60]
observer=ephem.Observer()
observer.long=n.radians(self.long)
observer.lat=n.radians(self.lat)
observer.epoch=ephem.J2000
J0 = ephem.julian_date(0)
observer.date=self.time[0]-J0
body=ephem.FixedBody()
body._ra=self.ra0
body._dec=self.dec0
body._epoch=ephem.J2000
body.compute(observer)
LST=observer.sidereal_time()
HA=LST-self.ra0
_dec=self.dec0
_lat=n.radians(self.lat)
# this calculation comes from Steve Ord's fixhdr.c
parallactic_angle=n.arctan2(n.sin(HA)*n.cos(_lat),
n.sin(_lat)*n.cos(_dec)-n.sin(_dec)*n.cos(_lat)*n.cos(HA))
cosz=n.sin(_lat)*n.sin(_dec)+n.cos(_lat)*n.cos(_dec)*n.cos(HA)
z=n.arccos(cosz)
sinz=n.sin(z)
tanz=sinz/cosz
PV2_1=tanz*n.sin(parallactic_angle)
PV2_2=tanz*n.cos(parallactic_angle)
wcs.wcs.set_pv([(2,1,PV2_1),(2,2,PV2_2)])
self.wcs=wcs
if isinstance(ra,n.ndarray):
sky=n.vstack((ra,dec)).T
else:
sky=n.array([[ra,dec]])
pix=self.wcs.wcs_sky2pix(sky,0)
if isinstance(ra,n.ndarray):
x=pix[:,0]
y=pix[:,1]
else:
x=pix[0,0]
y=pix[0,1]
dx=x-(self.wcs.wcs.crpix[0]-1)
dy=y-(self.wcs.wcs.crpix[1]-1)
dl=n.radians(dx*self.wcs.wcs.cdelt[0])
dm=n.radians(dy*self.wcs.wcs.cdelt[1])
return dl,dm
def phaseshift(self, dl=0, dm=0, im=[[0]], size=0):
""" Function to apply phase shift (l,m) coordinates of data array, by (dl, dm).
If dl,dm are arrays, will try to apply the given shift for each integration separately (courtesy DLK)
If instead a 2d-array image, im, is given, phase center is shifted to image peak. Needs size to know image scale.
Sets data and dataph arrays to new values.
Sum of all phase shifts for each integration is tracked in self.l0, self.m0.
"""
ang = lambda dl,dm,u,v,freq: (dl*n.outer(u,freq/self.freq_orig[0]) + dm*n.outer(v,freq/self.freq_orig[0])) # operates on single time of u,v
if ((len(im) != 1) & (size != 0)):
y,x = n.where(im == im.max())
length = len(im)
dl = (length/2 - x[0]) * 1./size
dm = (y[0] - length/2) * 1./size
print 'Shifting phase center to image peak: (dl,dm) = (%e,%e) = (%e,%e) arcsec' % (dl, dm, n.degrees(dl)*3600, n.degrees(dm)*3600)
elif isinstance(dl,n.ndarray) and isinstance(dm,n.ndarray):
if not len(dl) == self.nints:
raise ValueError('dl is an array but its length (%d) does not match the number of integrations (%d)' % (len(dl),self.nints))
elif ((dl != 0) | (dm != 0)):
print 'Shifting phase center by given (dl,dm) = (%e,%e) = (%e,%e) arcsec' % (dl, dm, n.degrees(dl)*3600, n.degrees(dm)*3600)
dl = dl * n.ones(self.nints)
dm = dm * n.ones(self.nints)
else:
raise ValueError('Need to give either dl or dm, or im and size.')
for i in xrange(self.nints):
for pol in xrange(self.npol):
self.data[i,:,:,pol] = self.data[i,:,:,pol] * n.exp(-2j*n.pi*ang(dl[i], dm[i], self.u[i], self.v[i], self.freq))
self.l0 = self.l0 + dl
self.m0 = self.m0 + dm
self.dataph = (self.data.mean(axis=3).mean(axis=1)).real # multi-pol
self.min = self.dataph.min()
self.max = self.dataph.max()
print 'New dataph min, max:'
print self.min, self.max
def make_triples(self, amin=0, amax=0):
""" Calculates and returns data indexes (i,j,k) for all closed triples.
amin and amax define range of antennas (with index, in order). only used if nonzero.
"""
if amax == 0:
amax = self.nants
blarr = self.blarr
# first make triples indexes in antenna numbering
anttrips = []
for i in self.ants[amin:amax+1]:
for j in self.ants[list(self.ants).index(i)+1:amax+1]:
for k in self.ants[list(self.ants).index(j)+1:amax+1]:
anttrips.append([i,j,k])
# next return data indexes for triples
bltrips = []
for (ant1, ant2, ant3) in anttrips:
try:
bl1 = n.where( (blarr[:,0] == ant1) & (blarr[:,1] == ant2) )[0][0]
bl2 = n.where( (blarr[:,0] == ant2) & (blarr[:,1] == ant3) )[0][0]
bl3 = n.where( (blarr[:,0] == ant1) & (blarr[:,1] == ant3) )[0][0]
bltrips.append([bl1, bl2, bl3])
except IndexError:
continue
return n.array(bltrips)
class MiriadReader(Reader):
""" Class for reading Miriad format data with miriad-python.
"""
def __init__(self):
raise NotImplementedError('Cannot instantiate class directly. Use \'pipe\' subclasses.')
def read(self, file, nints, nskip, nocal, nopass, selectpol):
""" Reads in Miriad data using miriad-python.
"""
self.file = file
self.nints = nints
vis = miriad.VisData(self.file,)
# read data into python arrays
i = 0
for inp, preamble, data, flags in vis.readLowlevel ('dsl3', False, nocal=True, nopass=True):
# Loop to skip some data and read shifted data into original data arrays
if i == 0:
# get few general variables
self.nants0 = inp.getScalar ('nants', 0)
self.inttime0 = inp.getScalar ('inttime', 10.0)
self.nspect0 = inp.getScalar ('nspect', 0)
self.nwide0 = inp.getScalar ('nwide', 0)
self.sdf0 = inp.getScalar ('sdf', self.nspect0)
self.nschan0 = inp.getScalar ('nschan', self.nspect0)
self.ischan0 = inp.getScalar ('ischan', self.nspect0)
self.sfreq0 = inp.getScalar ('sfreq', self.nspect0)
self.restfreq0 = inp.getScalar ('restfreq', self.nspect0)
self.pol0 = inp.getScalar ('pol')
# DLK 2013-04-04
# get the initial phase center
self.ra0=inp.getScalar('ra')
self.dec0=inp.getScalar('dec')
self.sfreq = self.sfreq0
self.sdf = self.sdf0
self.nchan = len(data)
print 'Initializing nchan:', self.nchan
bls = []
# build complete list of baselines
bls.append(preamble[4])
# end here. assume at least one instance of each bl occurs before ~six integrations (accommodates MWA)
if len(bls) == 6*len(n.unique(bls)):
blarr = []
for bl in n.unique(bls):
blarr.append(mirtask.util.decodeBaseline (bl))
self.blarr = n.array(blarr)
bldict = dict( zip(n.unique(bls), n.arange(len(blarr))) )
break
i = i+1
# find number of pols in data
uvd = mirtask.UVDataSet(self.file, 'rw')
self.npol_orig = uvd.getNPol()
pols = []
for i in xrange(20): # loop over the first few spectra to find all polarizations in the data
pols.append(uvd.getPol())
uvd.next()
uvd.close()
upols = n.unique(pols) # get unique pols in first few spectra
polstr = mirtask.util.polarizationName(upols[0])
if len(upols) > 1:
for pol in upols[1:]:
polstr = polstr + ', ' + mirtask.util.polarizationName(pol)
self.npol = len(selectpol)
if self.npol > self.npol_orig:
raise ValueError('Trying to select %d pols from %d available.' % (self.npol, self.npol_orig))
for pol in selectpol:
if not pol in polstr:
raise ValueError('Trying to select %s, but %s available.' % (pol, polstr))
print 'Initializing npol: %d (of %d, %s)' % (self.npol, self.npol_orig, polstr)
# Initialize more stuff...
self.freq_orig = self.sfreq + self.sdf * n.arange(self.nchan)
self.freq = self.freq_orig[self.chans]
# good baselines
self.nbl = len(self.blarr)
print 'Initializing nbl:', self.nbl
self.ants = n.unique(self.blarr)
self.nants = len(self.ants)
print 'Initializing nants:', self.nants
self.nskip = int(nskip*self.nbl) # number of iterations to skip (for reading in different parts of buffer)
nskip = int(self.nskip)
# define data arrays
self.rawdata = n.zeros((nints, self.nbl, self.nchan, self.npol),dtype='complex64')
self.flags = n.zeros((nints, self.nbl, self.nchan, self.npol),dtype='bool')
self.u = n.zeros((nints,self.nbl),dtype='float64')
self.v = n.zeros((nints,self.nbl),dtype='float64')
self.w = n.zeros((nints,self.nbl),dtype='float64')
self.preamble = n.zeros((nints*self.nbl,5),dtype='float64')
# go back and read data into arrays
for polnum in range(self.npol):
stokes = selectpol[polnum]
i = 0
for inp, preamble, data, flags in vis.readLowlevel ('dsl3', False, nocal=nocal, nopass=nopass, stokes=stokes):
# Loop to skip some data and read shifted data into original data arrays
if i < nskip:
i = i+1
continue
# assumes ints in order, but may skip. after nbl iterations, it fills next row, regardless of number filled.
if (i-nskip) < nints*self.nbl:
self.preamble[i-nskip] = preamble
self.rawdata[(i-nskip)//self.nbl, bldict[preamble[4]], :, polnum] = data
self.flags[(i-nskip)//self.nbl, bldict[preamble[4]], :, polnum] = flags
# uvw stored in preamble index 0,1,2 in units of ns
# Assumes miriad files store uvw in ns. Set to lambda by multiplying by freq of first channel.
self.u[(i-nskip)//self.nbl, bldict[preamble[4]]] = preamble[0] * self.freq_orig[0]
self.v[(i-nskip)//self.nbl, bldict[preamble[4]]] = preamble[1] * self.freq_orig[0]
self.w[(i-nskip)//self.nbl, bldict[preamble[4]]] = preamble[2] * self.freq_orig[0]
else:
break # stop at nints
if not (i % (self.nbl*100)):
print 'Read spectrum ', str(i)
i = i+1
time = self.preamble[::self.nbl,3]
if ((not n.any(self.rawdata)) & (not n.any(time))):
raise ValueError('rawdata and time arrays at default values. No data read?')
# limit the data to actually real data (DLK)
maxgoodtime=max(n.where(time>0)[0])
if maxgoodtime+1 < nints:
print 'Requested to read %d integrations, but only found %d good integrations' % (nints,
maxgoodtime)
# need to trim off some of the data
time=time[:maxgoodtime]
self.nints=len(time)
self.u=self.u[:maxgoodtime]
self.v=self.v[:maxgoodtime]
self.w=self.w[:maxgoodtime]
self.rawdata=self.rawdata[:maxgoodtime]
self.flags=self.flags[:maxgoodtime]
self.reltime = 24*3600*(time - time[0]) # relative time array in seconds. evla times change...?
# preserve absolute time (DLK)
self.time=time
self.inttime = n.array([self.reltime[i+1] - self.reltime[i] for i in xrange(len(self.reltime)/5,len(self.reltime)-1)]).mean()
# define relative phase center for each integration
self.l0 = n.zeros(self.nints)
self.m0 = n.zeros(self.nints)
# print summary info
print
print 'Shape of raw data, time:'
print self.rawdata.shape, self.reltime.shape
def writetrack(self, dmbin, tbin, tshift=0, bgwindow=0, show=0, pol=0):
""" **Not tested recently** Writes data from track out as miriad visibility file.
Alternative to writetrack that uses stored, approximate preamble used from start of pulse, not middle.
Optional background subtraction bl-by-bl over bgwindow integrations. Note that this is bgwindow *dmtracks* so width is bgwindow+track width
"""
# create bgsub data
datadiffarr = self.tracksub(dmbin, tbin, bgwindow=bgwindow)
if n.shape(datadiffarr) == n.shape([0]): # if track doesn't cross band, ignore this iteration
return 0
data = n.zeros(self.nchan, dtype='complex64') # default data array. gets overwritten.
data0 = n.zeros(self.nchan, dtype='complex64') # zero data array for flagged bls
flags = n.zeros(self.nchan, dtype='bool')
# define output visibility file names
outname = string.join(self.file.split('.')[:-1], '.') + '.' + str(self.nskip/self.nbl) + '-' + 'dm' + str(dmbin) + 't' + str(tbin) + '.mir'
print outname
vis = miriad.VisData(self.file,)
int0 = int((tbin + tshift) * self.nbl)
flags0 = []
i = 0
for inp, preamble, data, flags in vis.readLowlevel ('dsl3', False, nocal=True, nopass=True):
if i == 0:
# prep for temp output vis file
shutil.rmtree(outname, ignore_errors=True)
out = miriad.VisData(outname)
dOut = out.open ('c')
# set variables
dOut.setPreambleType ('uvw', 'time', 'baseline')
dOut.writeVarInt ('nants', self.nants0)
dOut.writeVarFloat ('inttime', self.inttime0)
dOut.writeVarInt ('nspect', self.nspect0)
dOut.writeVarDouble ('sdf', self.sdf0)
dOut.writeVarInt ('nwide', self.nwide0)
dOut.writeVarInt ('nschan', self.nschan0)
dOut.writeVarInt ('ischan', self.ischan0)
dOut.writeVarDouble ('sfreq', self.sfreq0)
dOut.writeVarDouble ('restfreq', self.restfreq0)
dOut.writeVarInt ('pol', self.pol0)
# inp.copyHeader (dOut, 'history')
inp.initVarsAsInput (' ') # ???
inp.copyLineVars (dOut)
if i < self.nbl:
flags0.append(flags.copy())
i = i+1
else:
break
l = 0
for i in xrange(len(flags0)): # iterate over baselines
# write out track, if not flagged
if n.any(flags0[i]):
k = 0
for j in xrange(self.nchan):
if j in self.chans:
data[j] = datadiffarr[pol, l, k]
# flags[j] = flags0[i][j]
k = k+1
else:
data[j] = 0 + 0j
# flags[j] = False
l = l+1
else:
data = data0
# flags = n.zeros(self.nchan, dtype='bool')
dOut.write (self.preamble[int0 + i], data, flags0[i])
dOut.close ()
return 1
def writetrack2(self, dmbin, tbin, tshift=0, bgwindow=0, show=0, pol=0):
""" **Not tested recently** Writes data from track out as miriad visibility file.
Alternative to writetrack that uses stored, approximate preamble used from start of pulse, not middle.
Optional background subtraction bl-by-bl over bgwindow integrations. Note that this is bgwindow *dmtracks* so width is bgwindow+track width
"""
# create bgsub data
datadiffarr = self.tracksub(dmbin, tbin, bgwindow=bgwindow)
if n.shape(datadiffarr) == n.shape([0]): # if track doesn't cross band, ignore this iteration
return 0
data = n.zeros(self.nchan, dtype='complex64') # default data array. gets overwritten.
data0 = n.zeros(self.nchan, dtype='complex64') # zero data array for flagged bls
flags = n.zeros(self.nchan, dtype='bool')
# define output visibility file names
outname = string.join(self.file.split('.')[:-1], '.') + '.' + str(self.nskip/self.nbl) + '-' + 'dm' + str(dmbin) + 't' + str(tbin) + '.mir'
print outname
vis = miriad.VisData(self.file,)
int0 = int((tbin + tshift) * self.nbl)
flags0 = []
i = 0
for inp, preamble, data, flags in vis.readLowlevel ('dsl3', False, nocal=True, nopass=True):
if i == 0:
# prep for temp output vis file
shutil.rmtree(outname, ignore_errors=True)
out = miriad.VisData(outname)
dOut = out.open ('c')
# set variables
dOut.setPreambleType ('uvw', 'time', 'baseline')
dOut.writeVarInt ('nants', self.nants0)
dOut.writeVarFloat ('inttime', self.inttime0)
dOut.writeVarInt ('nspect', self.nspect0)
dOut.writeVarDouble ('sdf', self.sdf0)
dOut.writeVarInt ('nwide', self.nwide0)
dOut.writeVarInt ('nschan', self.nschan0)
dOut.writeVarInt ('ischan', self.ischan0)
dOut.writeVarDouble ('sfreq', self.sfreq0)
dOut.writeVarDouble ('restfreq', self.restfreq0)
dOut.writeVarInt ('pol', self.pol0)
# inp.copyHeader (dOut, 'history')
inp.initVarsAsInput (' ') # ???
inp.copyLineVars (dOut)
if i < self.nbl:
flags0.append(flags.copy())
i = i+1
else:
break
l = 0
for i in xrange(len(flags0)): # iterate over baselines
# write out track, if not flagged
if n.any(flags0[i]):
k = 0
for j in xrange(self.nchan):
if j in self.chans:
data[j] = datadiffarr[pol, l, k]
# flags[j] = flags0[i][j]
k = k+1
else:
data[j] = 0 + 0j
# flags[j] = False
l = l+1
else:
data = data0
# flags = n.zeros(self.nchan, dtype='bool')
dOut.write (self.preamble[int0 + i], data, flags0[i])
dOut.close ()
return 1
class MSReader(Reader):
""" Class for reading MS data with either CASA. (Will eventually use pyrap.)
"""
def __init__(self):
raise NotImplementedError('Cannot instantiate class directly. Use \'pipe\' subclasses.')
def read(self, file, nints, nskip, spw, selectpol, scan, datacol):
""" Reads in Measurement Set data using CASA.
spw is list of subbands. zero-based.
Scan is zero-based selection based on scan order, not actual scan number.
selectpol is list of polarization strings (e.g., ['RR','LL'])
"""
self.file = file
self.scan = scan
self.nints = nints
# get spw info. either load pickled version (if found) or make new one
pklname = string.join(file.split('.')[:-1], '.') + '_init.pkl'
# pklname = pklname.split('/')[-1] # hack to remove path and write locally
if os.path.exists(pklname):
print 'Pickle of initializing info found. Loading...'
pkl = open(pklname, 'r')
try:
(self.npol_orig, self.nbl, self.blarr, self.inttime, self.inttime0, spwinfo, scansummary) = pickle.load(pkl)
except EOFError:
print 'Bad pickle file. Exiting...'
return 1
# old way, casa 3.3?
# scanlist = scansummary['summary'].keys()
# starttime_mjd = scansummary['summary'][scanlist[scan]]['0']['BeginTime']
# new way, casa 4.0?
scanlist = scansummary.keys()
starttime_mjd = scansummary[scanlist[scan]]['0']['BeginTime']
self.nskip = int(nskip*self.nbl) # number of iterations to skip (for reading in different parts of buffer)
self.npol = len(selectpol)
else:
print 'No pickle of initializing info found. Making anew...'
pkl = open(pklname, 'wb')
ms.open(self.file)
spwinfo = ms.getspectralwindowinfo()
scansummary = ms.getscansummary()
# original (general version)
# scanlist = scansummary['summary'].keys()
# starttime_mjd = scansummary['summary'][scanlist[scan]]['0']['BeginTime']
# starttime0 = qa.getvalue(qa.convert(qa.time(qa.quantity(starttime_mjd+0/(24.*60*60),'d'),form=['ymd'], prec=9), 's'))
# stoptime0 = qa.getvalue(qa.convert(qa.time(qa.quantity(starttime_mjd+0.5/(24.*60*60), 'd'), form=['ymd'], prec=9), 's'))
# for casa 4.0 (?) and later
scanlist = scansummary.keys()
starttime_mjd = scansummary[scanlist[scan]]['0']['BeginTime']
starttime0 = qa.getvalue(qa.convert(qa.time(qa.quantity(starttime_mjd+0/(24.*60*60),'d'),form=['ymd'], prec=9)[0], 's'))[0]
stoptime0 = qa.getvalue(qa.convert(qa.time(qa.quantity(starttime_mjd+0.5/(24.*60*60), 'd'), form=['ymd'], prec=9)[0], 's'))[0]
ms.selectinit(datadescid=0) # initialize to initialize params
selection = {'time': [starttime0, stoptime0]}
ms.select(items = selection)
da = ms.getdata([datacol, 'axis_info'], ifraxis=True)
ms.close()
self.npol_orig = da[datacol].shape[0]
self.nbl = da[datacol].shape[2]
print 'Initializing nbl:', self.nbl
# good baselines
bls = da['axis_info']['ifr_axis']['ifr_shortname']
self.blarr = n.array([[int(bls[i].split('-')[0]),int(bls[i].split('-')[1])] for i in xrange(len(bls))])
self.nskip = int(nskip*self.nbl) # number of iterations to skip (for reading in different parts of buffer)
# set integration time
ti0 = da['axis_info']['time_axis']['MJDseconds']
# self.inttime = scansummary['summary'][scanlist[scan]]['0']['IntegrationTime'] # general way
self.inttime = scansummary[scanlist[scan]]['0']['IntegrationTime'] # subset way, or casa 4.0 way?
self.inttime0 = self.inttime
print 'Initializing integration time (s):', self.inttime
pickle.dump((self.npol_orig, self.nbl, self.blarr, self.inttime, self.inttime0, spwinfo, scansummary), pkl)
pkl.close()