本文整理匯總了Python中admit.util.AdmitLogging.AdmitLogging.regression方法的典型用法代碼示例。如果您正苦於以下問題:Python AdmitLogging.regression方法的具體用法?Python AdmitLogging.regression怎麽用?Python AdmitLogging.regression使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類admit.util.AdmitLogging.AdmitLogging
的用法示例。
在下文中一共展示了AdmitLogging.regression方法的11個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: test_regression
# 需要導入模塊: from admit.util.AdmitLogging import AdmitLogging [as 別名]
# 或者: from admit.util.AdmitLogging.AdmitLogging import regression [as 別名]
def test_regression(self):
msg = "unit_test_regression_message"
Alogging.regression(msg)
found = False
r = open(self.logfile, 'r')
for line in r.readlines():
if msg in line:
if(self.verbose):
print "\nFound message >", line
found = True
r.close()
break
self.assertTrue(found)
示例2: run
# 需要導入模塊: from admit.util.AdmitLogging import AdmitLogging [as 別名]
# 或者: from admit.util.AdmitLogging.AdmitLogging import regression [as 別名]
#.........這裏部分代碼省略.........
# Create Summary table
lc_description = admit.util.Table()
lc_description.columns = ["Line Name","Start Channel","End Channel","Output Cube"]
lc_description.units = ["","int","int",""]
lc_description.description = "Parameters of Line Cubes"
# loop over all entries in the line list
rdata = []
for row in rows:
uid = row.getkey("uid")
cdir = self.mkext(imagename,uid)
self.mkdir(cdir)
basefl = uid
lcd = [basefl]
outfl = cdir + os.sep + "lc.im"
args["outfile"] = self.dir(outfl)
start = row.getkey("startchan")
end = row.getkey("endchan")
diff = end - start + 1
startch = 0
if diff < minchan:
add = int(math.ceil(float(minchan - diff) / 2.0))
start -= add
end += add
startch += add
if start < 0:
logging.info("%s is too close to the edge to encompass with the "
+ "requested channels, start=%d resetting to 0" %
(uid, start))
startch += abs(start)
start = 0
if end >= nchan:
logging.info("%s is too close to the edge to encompass with the "
+ "requested channels, end=%d resetting to %d" %
(uid, end, nchan - 1))
end = nchan - 1
#print "\n\nDIFF ",startch,"\n\n"
if pad > 0 and not equalize:
start -= pad
end += pad
if start < 0:
logging.warning("pad=%d too large, start=%d resetting to 0"
% (pad, start))
startch += abs(start)
start = 0
else:
startch += pad
if end >= nchan:
logging.warning("pad=%d too large, end=%d resetting to %d"
% (pad, end, nchan - 1))
end = nchan - 1
elif pad < 0 and not equalize:
mid = (start + end) / 2
start = mid + pad / 2
end = mid - pad / 2 - 1
if start < 0:
logging.warning("pad=%d too large, start=%d resetting to 0"
% (pad, start))
startch += abs(start)
start = 0
else:
startch += abs(start)
if end >= nchan:
logging.warning("pad=%d too large, end=%d resetting to %d"
% (pad, end, nchan - 1))
end = nchan - 1
endch = startch + diff
args["chans"] = "%i~%i" % (start, end)
rdata.append(start)
rdata.append(end)
# for the summmary, which will be a table of
# Line name, start channel, end channel, output image
lc_description.addRow([basefl, start, end, outfl])
# create the slices
imsubimage(**args)
line = row.converttoline()
# set the restfrequency ouf the output cube
imhead(imagename=args["outfile"], mode="put", hdkey="restfreq",
hdvalue="%fGHz" % (row.getkey("frequency")))
# set up the output BDP
images = {bt.CASA : outfl}
casaimage = Image(images=images)
# note that Summary.getLineFluxes() implicitly relies on the BDP out order
# being the same order as in the line list table. If this is ever not
# true, then Summary.getLineFluxes mismatch BDPs and flux values.
#self.addoutput(LineCube_BDP(xmlFile=cdir + os.sep + basefl + ".lc",
self.addoutput(LineCube_BDP(xmlFile=outfl,
image=casaimage, line=line, linechans="%i~%i" % (startch, endch)))
dt.tag("trans-%s" % cdir)
logging.regression("LC: %s" % str(rdata))
taskargs = "pad=%s equalize=%s" % (pad, equalize)
self._summary["linecube"] = SummaryEntry(lc_description.serialize(), "LineCube_AT",
self.id(True), taskargs)
dt.tag("done")
dt.end()
示例3: run
# 需要導入模塊: from admit.util.AdmitLogging import AdmitLogging [as 別名]
# 或者: from admit.util.AdmitLogging.AdmitLogging import regression [as 別名]
#.........這裏部分代碼省略.........
# get the full cube statistics, it depends if robust was pre-selected
if nrargs == 0:
mean0 = imstat0["mean"][0]
sigma0 = imstat0["medabsdevmed"][0]*1.4826
peak0 = imstat0["max"][0]
b2.setkey("mean" , float(mean0))
b2.setkey("sigma", float(sigma0))
b2.setkey("minval",float(imstat0["min"][0]))
b2.setkey("maxval",float(imstat0["max"][0]))
b2.setkey("minpos",imstat0["minpos"][:3].tolist()) #? [] or array(..dtype=int32) ??
b2.setkey("maxpos",imstat0["maxpos"][:3].tolist()) #? [] or array(..dtype=int32) ??
logging.info("CubeMax: %f @ %s" % (imstat0["max"][0],str(imstat0["maxpos"])))
logging.info("CubeMin: %f @ %s" % (imstat0["min"][0],str(imstat0["minpos"])))
logging.info("CubeRMS: %f" % sigma0)
else:
mean0 = imstat0["mean"][0]
sigma0 = imstat0["rms"][0]
peak0 = imstat10["max"][0]
b2.setkey("mean" , float(mean0))
b2.setkey("sigma", float(sigma0))
b2.setkey("minval",float(imstat10["min"][0]))
b2.setkey("maxval",float(imstat10["max"][0]))
b2.setkey("minpos",imstat10["minpos"][:3].tolist()) #? [] or array(..dtype=int32) ??
b2.setkey("maxpos",imstat10["maxpos"][:3].tolist()) #? [] or array(..dtype=int32) ??
logging.info("CubeMax: %f @ %s" % (imstat10["max"][0],str(imstat10["maxpos"])))
logging.info("CubeMin: %f @ %s" % (imstat10["min"][0],str(imstat10["minpos"])))
logging.info("CubeRMS: %f" % sigma0)
b2.setkey("robust",robust)
rms_ratio = imstat0["rms"][0]/sigma0
logging.info("RMS Sanity check %f" % rms_ratio)
if rms_ratio > 1.5:
logging.warning("RMS sanity check = %f. Either bad sidelobes, lotsa signal, or both" % rms_ratio)
logging.regression("CST: %f %f" % (sigma0, rms_ratio))
# plots: no plots need to be made when nchan=1 for continuum
# however we could make a histogram, overlaying the "best" gauss so
# signal deviations are clear?
logging.info('mean,rms,S/N=%f %f %f' % (mean0,sigma0,peak0/sigma0))
if nchan == 1:
# for a continuum/1-channel we only need to stuff some numbers into the _summary
self._summary["chanrms"] = SummaryEntry([float(sigma0), fin], "CubeStats_AT", self.id(True))
self._summary["dynrange"] = SummaryEntry([float(peak0)/float(sigma0), fin], "CubeStats_AT", self.id(True))
self._summary["datamean"] = SummaryEntry([float(mean0), fin], "CubeStats_AT", self.id(True))
else:
y1 = np.log10(ma.masked_invalid(peakval))
y2 = np.log10(ma.masked_invalid(sigma))
y3 = y1-y2
y4 = np.log10(ma.masked_invalid(-minval))
y5 = y1-y4
y = [y1,y2,y3,y4]
title = 'CubeStats: ' + bdp_name+'_0'
xlab = 'Channel'
ylab = 'log(Peak,Noise,Peak/Noise)'
labels = ['log(peak)','log(rms noise)','log(peak/noise)','log(|minval|)']
myplot = APlot(ptype=self._plot_type,pmode=self._plot_mode,abspath=self.dir())
segp = [[chans[0],chans[nchan-1],math.log10(sigma0),math.log10(sigma0)]]
myplot.plotter(chans,y,title,bdp_name+"_0",xlab=xlab,ylab=ylab,segments=segp,labels=labels,thumbnail=True)
imfile = myplot.getFigure(figno=myplot.figno,relative=True)
thumbfile = myplot.getThumbnail(figno=myplot.figno,relative=True)
image0 = Image(images={bt.PNG:imfile},thumbnail=thumbfile,thumbnailtype=bt.PNG,description="CubeStats_0")
b2.addimage(image0,"im0")
示例4: run
# 需要導入模塊: from admit.util.AdmitLogging import AdmitLogging [as 別名]
# 或者: from admit.util.AdmitLogging.AdmitLogging import regression [as 別名]
def run(self):
""" The run method creates the BDP.
Parameters
----------
None
Returns
-------
None
"""
dt = utils.Dtime("ContinuumSub") # tagging time
self._summary = {} # an ADMIT summary will be created here
contsub = self.getkey("contsub")
pad = self.getkey("pad")
fitorder = self.getkey("fitorder")
# x.im -> x.cim + x.lim
# b1 = input spw BDP
# b1a = optional input {Segment,Line}List
# b1b = optional input Cont Map (now deprecated)
# b2 = output line cube
# b3 = output cont map
b1 = self._bdp_in[0]
f1 = b1.getimagefile(bt.CASA)
b1a = self._bdp_in[1]
# b1b = self._bdp_in[2]
b1b = None # do not allow continuum maps to be input
f2 = self.mkext(f1,'lim')
f3 = self.mkext(f1,'cim')
f3a = self.mkext(f1,'cim3d') # temporary cube name, map is needed
b2 = SpwCube_BDP(f2)
b3 = Image_BDP(f3)
self.addoutput(b2)
self.addoutput(b3)
taskinit.ia.open(self.dir(f1))
s = taskinit.ia.summary()
nchan = s['shape'][2] # ingest has guarenteed this to the spectral axis
if b1a != None: # if a LineList was given, use that
if len(b1a.table) > 0:
# this section of code actually works for len(ch0)==0 as well
#
ch0 = b1a.table.getFullColumnByName("startchan")
ch1 = b1a.table.getFullColumnByName("endchan")
if pad != 0: # can widen or narrow the segments
if pad > 0:
logging.info("pad=%d to widen the segments" % pad)
else:
logging.info("pad=%d to narrow the segments" % pad)
ch0 = np.where(ch0-pad < 0, 0, ch0-pad)
ch1 = np.where(ch1+pad >= nchan, nchan-1, ch1+pad)
s = Segments(ch0,ch1,nchan=nchan)
ch = s.getchannels(True) # take the complement of lines as the continuum
else:
ch = range(nchan) # no lines? take everything as continuum (probably bad)
logging.warning("All channels taken as continuum. Are you sure?")
elif len(contsub) > 0: # else if contsub[] was supplied manually
s = Segments(contsub,nchan=nchan)
ch = s.getchannels()
else:
raise Exception,"No contsub= or input LineList given"
if len(ch) > 0:
taskinit.ia.open(self.dir(f1))
taskinit.ia.continuumsub(outline=self.dir(f2),outcont=self.dir(f3a),channels=ch,fitorder=fitorder)
taskinit.ia.close()
dt.tag("continuumsub")
casa.immoments(self.dir(f3a),-1,outfile=self.dir(f3)) # mean of the continuum cube (f3a)
utils.remove(self.dir(f3a)) # is the continuum map (f3)
dt.tag("immoments")
if b1b != None:
# this option is now deprecated (see above, by setting b1b = None), no user option allowed
# there is likely a mis-match in the beam, given how they are produced. So it's safer to
# remove this here, and force the flow to smooth manually
print "Adding back in a continuum map"
f1b = b1b.getimagefile(bt.CASA)
f1c = self.mkext(f1,'sum')
# @todo notice we are not checking for conforming mapsize and WCS
# and let CASA fail out if we've been bad.
casa.immath([self.dir(f3),self.dir(f1b)],'evalexpr',self.dir(f1c),'IM0+IM1')
utils.rename(self.dir(f1c),self.dir(f3))
dt.tag("immath")
else:
raise Exception,"No channels left to determine continuum. pad=%d too large?" % pad
# regression
rdata = casautil.getdata(self.dir(f3)).data
logging.regression("CSUB: %f %f" % (rdata.min(),rdata.max()))
# Create two output images for html and their thumbnails, too
implot = ImPlot(ptype=self._plot_type,pmode=self._plot_mode,abspath=self.dir())
implot.plotter(rasterfile=f3,figname=f3,colorwedge=True)
figname = implot.getFigure(figno=implot.figno,relative=True)
#.........這裏部分代碼省略.........
示例5: run
# 需要導入模塊: from admit.util.AdmitLogging import AdmitLogging [as 別名]
# 或者: from admit.util.AdmitLogging.AdmitLogging import regression [as 別名]
#.........這裏部分代碼省略.........
taskinit.ia.putchunk(plane*0+maxval,blc=[0,0,i,-1])
else:
taskinit.ia.putchunk(plane*0+sigma_array[i],blc=[0,0,i,-1])
count = count + 1
else:
taskinit.ia.putchunk(plane*0+maxval,blc=[0,0,i,-1])
taskinit.ia.close()
logging.info("%d/%d channels used for CubeSum" % (count,nchan))
dt.tag("close_mask")
names = [file, mask]
tmp = file + '.tmp'
if numsigma == 0.0:
# hopefully this will also make use of the mask
exp = "IM0[IM1<%f]" % (0.99*maxval)
else:
exp = "IM0[abs(IM0/IM1)>%f]" % (numsigma)
# print "PJT: exp",exp
casa.immath(mode='evalexpr', imagename=names, expr=exp, outfile=tmp)
args["imagename"] = tmp
dt.tag("immath")
casa.immoments(**args)
dt.tag("immoments")
if sig_const is False:
# get rid of temporary files
utils.remove(tmp)
utils.remove(mask)
# get the flux
taskinit.ia.open(image_out)
st = taskinit.ia.statistics()
taskinit.ia.close()
dt.tag("statistics")
# report that flux, but there's no way to get the units from casa it seems
# ia.summary()['unit'] is usually 'Jy/beam.km/s' for ALMA
# imstat() does seem to know it.
if st.has_key('flux'):
rdata = [st['flux'][0],st['sum'][0]]
logging.info("Total flux: %f (sum=%f)" % (st['flux'],st['sum']))
else:
rdata = [st['sum'][0]]
logging.info("Sum: %f (beam parameters missing)" % (st['sum']))
logging.regression("CSM: %s" % str(rdata))
# Create two output images for html and their thumbnails, too
implot = ImPlot(ptype=self._plot_type,pmode=self._plot_mode,abspath=self.dir())
implot.plotter(rasterfile=bdp_name,figname=bdp_name,colorwedge=True)
figname = implot.getFigure(figno=implot.figno,relative=True)
thumbname = implot.getThumbnail(figno=implot.figno,relative=True)
dt.tag("implot")
thumbtype = bt.PNG # really should be correlated with self._plot_type!!
# 2. Create a histogram of the map data
# get the data for a histogram
data = casautil.getdata(image_out,zeromask=True).compressed()
dt.tag("getdata")
# get the label for the x axis
bunit = casa.imhead(imagename=image_out, mode="get", hdkey="bunit")
# Make the histogram plot
# Since we give abspath in the constructor, figname should be relative
myplot = APlot(ptype=self._plot_type,pmode=self._plot_mode,abspath=self.dir())
auxname = bdp_name + "_histo"
auxtype = bt.PNG # really should be correlated with self._plot_type!!
myplot.histogram(columns = data,
figname = auxname,
xlab = bunit,
ylab = "Count",
title = "Histogram of CubeSum: %s" % (bdp_name),
thumbnail=True)
auxname = myplot.getFigure(figno=myplot.figno,relative=True)
auxthumb = myplot.getThumbnail(figno=myplot.figno,relative=True)
images = {bt.CASA : bdp_name, bt.PNG : figname}
casaimage = Image(images = images,
auxiliary = auxname,
auxtype = auxtype,
thumbnail = thumbname,
thumbnailtype = thumbtype)
if hasattr(b1,"line"): # SpwCube doesn't have Line
line = deepcopy(getattr(b1,"line"))
if type(line) != type(Line):
line = Line(name="Undetermined")
else:
line = Line(name="Undetermined") # fake a Line if there wasn't one
self.addoutput(Moment_BDP(xmlFile=bdp_name,moment=0,image=deepcopy(casaimage),line=line))
imcaption = "Integral (moment 0) of all emission in image cube"
auxcaption = "Histogram of cube sum for image cube"
taskargs = "numsigma=%.1f sigma=%g smooth=%s" % (numsigma, sigma, str(smooth))
self._summary["cubesum"] = SummaryEntry([figname,thumbname,imcaption,auxname,auxthumb,auxcaption,bdp_name,infile],"CubeSum_AT",self.id(True),taskargs)
dt.tag("done")
dt.end()
示例6: run
# 需要導入模塊: from admit.util.AdmitLogging import AdmitLogging [as 別名]
# 或者: from admit.util.AdmitLogging.AdmitLogging import regression [as 別名]
def run(self):
"""Runs the task.
Parameters
----------
None
Returns
-------
None
"""
self._summary = {}
dt = utils.Dtime("CubeSpectrum")
# our BDP's
# b1 = input BDP
# b1s = optional input CubeSpectrum
# b1m = optional input Moment
# b1p = optional input SourceList for positions
# b2 = output BDP
b1 = self._bdp_in[0] # check input SpwCube (or LineCube)
fin = b1.getimagefile(bt.CASA)
if self._bdp_in[0]._type == bt.LINECUBE_BDP:
use_vel = True
else:
use_vel = False
sources = self.getkey("sources")
pos = [] # blank it first, then try and grab it from the optional bdp_in's
cmean = 0.0
csigma = 0.0
smax = [] # accumulate max in each spectrum for regression
self.spec_description = [] # for summary()
if self._bdp_in[1] != None: # check if CubeStats_BDP
#print "BDP[1] type: ",self._bdp_in[1]._type
if self._bdp_in[1]._type != bt.CUBESTATS_BDP:
raise Exception,"bdp_in[1] not a CubeStats_BDP, should never happen"
# a table (cubestats)
b1s = self._bdp_in[1]
pos.append(b1s.maxpos[0])
pos.append(b1s.maxpos[1])
logging.info('CubeStats::maxpos,val=%s,%f' % (str(b1s.maxpos),b1s.maxval))
cmean = b1s.mean
csigma = b1s.sigma
dt.tag("CubeStats-pos")
if self._bdp_in[2] != None: # check if Moment_BDP (probably from CubeSum)
#print "BDP[2] type: ",self._bdp_in[2]._type
if self._bdp_in[2]._type != bt.MOMENT_BDP:
raise Exception,"bdp_in[2] not a Moment_BDP, should never happen"
b1m = self._bdp_in[2]
fim = b1m.getimagefile(bt.CASA)
pos1,maxval = self.maxpos_im(self.dir(fim)) # compute maxpos, since it is not in bdp (yet)
logging.info('CubeSum::maxpos,val=%s,%f' % (str(pos1),maxval))
pos.append(pos1[0])
pos.append(pos1[1])
dt.tag("Moment-pos")
if self._bdp_in[3] != None: # check if SourceList
#print "BDP[3] type: ",self._bdp_in[3]._type
# a table (SourceList)
b1p = self._bdp_in[3]
ra = b1p.table.getFullColumnByName("RA")
dec = b1p.table.getFullColumnByName("DEC")
peak = b1p.table.getFullColumnByName("Peak")
if sources == []:
# use the whole SourceList
for (r,d,p) in zip(ra,dec,peak):
rdc = convert_sexa(r,d)
pos.append(rdc[0])
pos.append(rdc[1])
logging.info('SourceList::maxpos,val=%s,%f' % (str(rdc),p))
else:
# select specific ones from the source list
for ipos in sources:
if ipos < len(ra):
radec = convert_sexa(ra[ipos],dec[ipos])
pos.append(radec[0])
pos.append(radec[1])
logging.info('SourceList::maxpos,val=%s,%f' % (str(radec),peak[ipos]))
else:
logging.warning('Skipping illegal source number %d' % ipos)
dt.tag("SourceList-pos")
# if pos[] still blank, use the AT keyword.
if len(pos) == 0:
pos = self.getkey("pos")
# if still none, try the map center
if len(pos) == 0:
# @todo this could result in a masked pixel and cause further havoc
# @todo could also take the reference pixel, but that could be outside image
taskinit.ia.open(self.dir(fin))
s = taskinit.ia.summary()
pos = [int(s['shape'][0])/2, int(s['shape'][1])/2]
logging.warning("No input positions supplied, map center choosen: %s" % str(pos))
#.........這裏部分代碼省略.........
示例7: run
# 需要導入模塊: from admit.util.AdmitLogging import AdmitLogging [as 別名]
# 或者: from admit.util.AdmitLogging.AdmitLogging import regression [as 別名]
def run(self):
""" Main program for OverlapIntegral
"""
dt = utils.Dtime("OverlapIntegral")
self._summary = {}
chans =self.getkey("chans")
cmap = self.getkey("cmap")
normalize = self.getkey("normalize")
doCross = True
doCross = False
myplot = APlot(pmode=self._plot_mode,ptype=self._plot_type,abspath=self.dir())
dt.tag("start")
n = len(self._bdp_in)
if n==0:
raise Exception,"Need at least 1 input Image_BDP "
logging.debug("Processing %d input maps" % n)
data = range(n) # array in which each element is placeholder for the data
mdata = range(n) # array to hold the max in each array
summarytable = admit.util.Table()
summarytable.columns = ["File name","Spectral Line ID"]
summarytable.description = "Images used in Overlap Integral"
for i in range(n):
bdpfile = self._bdp_in[i].getimagefile(bt.CASA)
if hasattr(self._bdp_in[i],"line"):
line = getattr(self._bdp_in[i],"line")
logging.info("Map %d: %s" % (i,line.uid))
lineid = line.uid
else:
lineid="no line"
data[i] = casautil.getdata(self.dir(bdpfile),chans)
mdata[i] = data[i].max()
logging.info("shape[%d] = %s with %d good data" % (i,data[i].shape,data[i].count()))
if i==0:
shape = data[i].shape
outfile = self.mkext("testOI","oi")
else:
if shape != data[i].shape:
raise Exception,"Shapes not the same, cannot overlap them"
# collect the file names and line identifications for the summary
summarytable.addRow([bdpfile,lineid])
logging.regression("OI: %s" % str(mdata))
if len(shape)>2 and shape[2] > 1:
raise Exception,"Cannot handle 3D cubes yet"
if doCross:
# debug: produce all cross-corr's of the N input maps (expensive!)
crossn(data, myplot)
dt.tag("crossn")
b1 = Image_BDP(outfile)
self.addoutput(b1)
b1.setkey("image", Image(images={bt.CASA:outfile}))
dt.tag("open")
useClone = True
# to create an output dataset, clone the first input, but using the chans=ch0~ch1
# e.g. using imsubimage(infile,outfile=,chans=
if len(chans) > 0:
# ia.regrid() doesn't have the chans=
taskinit.ia.open(self.dir(self._bdp_in[0].getimagefile(bt.CASA)))
taskinit.ia.regrid(outfile=self.dir(outfile))
taskinit.ia.close()
else:
# 2D for now
if not useClone:
logging.info("OVERLAP out=%s" % outfile)
taskinit.ia.fromimage(infile=self.dir(self._bdp_in[0].getimagefile(bt.CASA)),
outfile=self.dir(outfile), overwrite=True)
taskinit.ia.close()
dt.tag("fromimage")
if n==3:
# RGB
logging.info("RGB mode")
out = rgb1(data[0],data[1],data[2], normalize)
else:
# simple sum
out = data[0]
for i in range(1,n):
out = out + data[i]
if useClone:
casautil.putdata_raw(self.dir(outfile),out,clone=self.dir(self._bdp_in[0].getimagefile(bt.CASA)))
else:
taskinit.ia.open(self.dir(outfile))
s1 = taskinit.ia.shape()
s0 = [0,0,0,0]
r1 = taskinit.rg.box(blc=s0,trc=s1)
pixeldata = out.data
pixelmask = ~out.mask
taskinit.ia.putregion(pixels=pixeldata, pixelmask=pixelmask, region=r1)
taskinit.ia.close()
title = "OverlapIntegral"
#.........這裏部分代碼省略.........
示例8: run
# 需要導入模塊: from admit.util.AdmitLogging import AdmitLogging [as 別名]
# 或者: from admit.util.AdmitLogging.AdmitLogging import regression [as 別名]
#.........這裏部分代碼省略.........
utils.rename(fno,fnot)
imtrans(fnot,fno,"0132")
utils.remove(fnot)
else:
# this does not work, what the heck
imtrans(fno,fnot,"0132")
#@todo use safer ia.rename() here.
# https://casa.nrao.edu/docs/CasaRef/image.rename.html
utils.rename(fnot,fno)
nz = s['shape'][3]
# get a new summary 's'
taskinit.ia.open(fno)
s = taskinit.ia.summary()
taskinit.ia.close()
logging.warning("Using imtrans, with nz=%d, to fix axis ordering" % nz)
dt.tag("imtrans4")
# @todo ensure first two axes are position, followed by frequency
elif len(shape)==3:
# the current importfits() can do defaultaxes=True,defaultaxesvalues=['', '', '', 'I']
# but then appears to return a ra-dec-pol-freq cube
# this branch probably never happens, since ia.fromfits() will
# properly convert a 3D cube to 4D now !!
# NO: when NAXIS=3 but various AXIS4's are present, that works. But not if it's pure 3D
# @todo box=
logging.warning("patching up a 3D to 4D cube")
raise Exception,"SHOULD NEVER GET HERE"
fnot = fno + ".trans"
casa.importfits(fni,fnot,defaultaxes=True,defaultaxesvalues=['', '', '', 'I'])
utils.remove(fno) # ieck
imtrans(fnot,fno,"0132")
utils.remove(fnot)
dt.tag("imtrans3")
logging.regression('CUBE: %g %g %g %d %d %d %f' % (s0['min'],s0['max'],s0['rms'],nx,ny,nz,100.0*(1 - fgood)))
# if the cube has only 1 plane (e.g. continuum) , create a visual (png or so)
# for 3D cubes, rely on something like CubeSum
if nz == 1:
implot = ImPlot(pmode=self._plot_mode,ptype=self._plot_type,abspath=self.dir())
implot.plotter(rasterfile=bdpfile,figname=bdpfile)
# @todo needs to be registered for the BDP, right now we only have the plot
# ia.summary() doesn't have this easily available, so run the more expensive imhead()
h = casa.imhead(fno,mode='list')
telescope = h['telescope']
# work around CASA's PIPELINE bug/feature? if 'OBJECT' is blank, try 'FIELD'
srcname = h['object']
if srcname == ' ':
logging.warning('FIELD used for OBJECT')
srcname = casa.imhead(fno,mode='get',hdkey='field')
if srcname == False:
# if no FIELD either, we're doomed. yes, this did happen.
srcname = 'Unknown'
casa.imhead(fno,mode="put",hdkey="object",hdvalue=srcname)
h['object'] = srcname
logging.info('TELESCOPE: %s' % telescope)
logging.info('OBJECT: %s' % srcname)
logging.info('REFFREQTYPE: %s' % h['reffreqtype'])
if h['reffreqtype'].find('TOPO')>=0:
msg = 'Ingest_AT: cannot deal with cubes with TOPOCENTRIC frequencies yet - winging it'
logging.warning(msg)
#raise Exception,msg
# Ensure beam parameters are available if there are multiple beams
# If there is just one beam, then we are just overwriting the header
# variables with their identical values.
if len(commonbeam) != 0:
示例9: run
# 需要導入模塊: from admit.util.AdmitLogging import AdmitLogging [as 別名]
# 或者: from admit.util.AdmitLogging.AdmitLogging import regression [as 別名]
#.........這裏部分代碼省略.........
dt.tag("sepconvolve")
else:
image_tmp = image_out
# now do the spatial smoothing
convolve_to_min_beam = True # default is to convolve to a min enclosing beam
if bmaj > 0 and bmin > 0:
# form qa objects out of these so that casa can understand
bmaj = taskinit.qa.quantity(bmaj,'arcsec')
bmin = taskinit.qa.quantity(bmin,'arcsec')
bpa = taskinit.qa.quantity(bpa,'deg')
target_res={}
target_res['major'] = bmaj
target_res['minor'] = bmin
target_res['positionangle'] = bpa
# throw an exception if cannot be convolved
try:
# for whatever reason, if you give convolve2d a beam parameter,
# it complains ...
im2=taskinit.ia.convolve2d(outfile=image_out,major = bmaj,\
minor = bmin, pa = bpa,\
targetres=True,overwrite=True)
im2.done()
logging.info("Smoothing cube to a resolution of %s by %s at a PA of %s" %
(str(bmaj['value']), str(bmin['value']), str(bpa['value'])))
convolve_to_min_beam = False
achieved_res = target_res
except:
# @todo remind what you need ?
logging.error("Warning: Could not convolve to requested resolution of "\
+str(bmaj['value']) + " by " + str(bmin['value']) + \
" at a PA of "+ str(bpa['value']))
raise Exception,"Could not convolve to beam given!"
dt.tag("convolve2d-1")
if convolve_to_min_beam:
restoring_beams = taskinit.ia.restoringbeam()
commonbeam = taskinit.ia.commonbeam()
# for whatever reason, setrestoringbeam does not use the same set of hashes...
commonbeam['positionangle']=commonbeam['pa']
del commonbeam['pa']
# if there's one beam, apparently the beams keyword does not exist
if 'beams' in restoring_beams:
print "Smoothing cube to a resolution of "+ \
str(commonbeam['major']['value']) +" by "+ \
str(commonbeam['minor']['value'])+" at a PA of "\
+str(commonbeam['pa']['value'])
target_res = commonbeam
im2=taskinit.ia.convolve2d(outfile=image_out,major=commonbeam['major'],\
minor=commonbeam['minor'],\
pa=commonbeam['positionangle'],\
targetres=True,overwrite=True)
im2.done()
achieved_res = commonbeam
dt.tag("convolve2d-2")
else:
print "One beam for all planes. Smoothing to common beam redundant."
achieved_res = commonbeam
if velres['value'] < 0:
taskinit.ia.fromimage(outfile=image_out, infile=image_in)
# not really doing anything
# else, we've already done what we needed to
taskinit.ia.setrestoringbeam(beam = achieved_res)
rdata = achieved_res['major']['value']
# else do no smoothing and just close the image
taskinit.ia.close()
dt.tag("close")
b1 = SpwCube_BDP(bdp_name)
self.addoutput(b1)
# need to update for multiple images.
b1.setkey("image", Image(images={bt.CASA:bdp_name}))
bdpnames = bdpnames.append(bdp_name)
# and clean up the temp image before the next image
if velres['value'] > 0:
utils.remove(image_tmp)
# thes are task arguments not summary entries.
_bmaj = taskinit.qa.convert(achieved_res['major'],'rad')['value']
_bmin = taskinit.qa.convert(achieved_res['minor'],'rad')['value']
_bpa = taskinit.qa.convert(achieved_res['positionangle'],'deg')['value']
vres = "%.2f %s" % (velres['value'],velres['unit'])
logging.regression("SMOOTH: %f %f" % (rdata,velres['value']))
self._summary["smooth"] = SummaryEntry([bdp_name,convolve_to_min_beam,_bmaj,_bmin,_bpa,vres],"Smooth_AT",self.id(True),taskargs)
dt.tag("done")
dt.end()
示例10: run
# 需要導入模塊: from admit.util.AdmitLogging import AdmitLogging [as 別名]
# 或者: from admit.util.AdmitLogging.AdmitLogging import regression [as 別名]
#.........這裏部分代碼省略.........
d = atab[c]['shape']['direction']['m1']['value']
pixel = taskinit.ia.topixel([r,d])
xpos = pixel['numeric'][0]
ypos = pixel['numeric'][1]
rd = taskinit.ia.toworld([xpos,ypos],'s')
ra = rd['string'][0][:12]
dec = rd['string'][1][:12]
flux = atab[c]['flux']['value'][0]
sumflux = sumflux + flux
if atab[c]['shape'].has_key('majoraxis'):
smajor = atab[c]['shape']['majoraxis']['value']
sminor = atab[c]['shape']['minoraxis']['value']
sangle = atab[c]['shape']['positionangle']['value']
else:
smajor = 0.0
sminor = 0.0
sangle = 0.0
peakstr = taskinit.ia.pixelvalue([xpos,ypos,0,0])
if len(peakstr) == 0:
logging.warning("Problem with source %d @ %d,%d" % (i,xpos,ypos))
continue
peakf = peakstr['value']['value']
snr = peakf/sigma
if snr > dynlog:
logscale = True
logging.info("%s %s %8.2f %8.2f %10.3g %10.3g %7.3f %7.3f %6.1f %6.1f" % (ra,dec,xpos,ypos,peakf,flux,smajor,sminor,sangle,snr))
xtab.append(xpos)
ytab.append(ypos)
slbdp.addRow([name,ra,dec,flux,peakf,smajor,sminor,sangle])
if ds9:
ras = ra
des = dec.replace('.',':',2)
msg = 'ellipse(%s,%s,%g",%g",%g) # text={%s}' % (ras,des,smajor,sminor,sangle+90.0,i+1)
fp9.write("%s\n" % msg)
if ds9:
fp9.close()
logging.info("Wrote ds9.reg")
dt.tag("table")
logging.regression("CONTFLUX: %d %g" % (nsources,sumflux))
summary = taskinit.ia.summary()
beammaj = summary['restoringbeam']['major']['value']
beammin = summary['restoringbeam']['minor']['value']
beamunit = summary['restoringbeam']['minor']['unit']
beamang = summary['restoringbeam']['positionangle']['value']
angunit = summary['restoringbeam']['positionangle']['unit']
# @todo add to table comments?
logging.info(" Fitted Gaussian size; NOT deconvolved source size.")
logging.info(" Restoring Beam: Major axis: %10.3g %s , Minor axis: %10.3g %s , PA: %5.1f %s" % (beammaj, beamunit, beammin, beamunit, beamang, angunit))
# form into a xml table
# output is a table_bdp
self.addoutput(slbdp)
# instantiate a plotter for all plots made herein
myplot = APlot(ptype=self._plot_type,pmode=self._plot_mode,abspath=self.dir())
# make output png with circles marking sources found
if mpl:
circles=[]
nx = data.shape[1] # data[] array was already flipud(rot90)'d
ny = data.shape[0] #
for (x,y) in zip(xtab,ytab):
circles.append([x,y,1])
# @todo variable name
if logscale:
logging.warning("LogScaling applied")
data = data/sigma
data = np.where(data<0,-np.log10(1-data),+np.log10(1+data))
title = "SFind2D: %d sources" % nsources
myplot.map1(data,title,slbase,thumbnail=True,circles=circles)
#---------------------------------------------------------
# Get the figure and thumbmail names and create a caption
#---------------------------------------------------------
imname = myplot.getFigure(figno=myplot.figno,relative=True)
thumbnailname = myplot.getThumbnail(figno=myplot.figno,relative=True)
caption = "Image of input map with sources found by SFind2D overlayed in green."
slbdp.table.description="Table of source locations and sizes (not deconvolved)"
#---------------------------------------------------------
# Add finder image to the BDP
#---------------------------------------------------------
image = Image(images={bt.PNG: imname},
thumbnail=thumbnailname,
thumbnailtype=bt.PNG, description=caption)
slbdp.image.addimage(image, "finderimage")
#-------------------------------------------------------------
# Create the summary entry for the table and image
#-------------------------------------------------------------
self._summary["sources"] = SummaryEntry([slbdp.table.serialize(),
slbdp.image.serialize()],
"SFind2D_AT",
self.id(True), taskargs)
dt.tag("done")
dt.end()
示例11: run
# 需要導入模塊: from admit.util.AdmitLogging import AdmitLogging [as 別名]
# 或者: from admit.util.AdmitLogging.AdmitLogging import regression [as 別名]
#.........這裏部分代碼省略.........
thumbnailtype = thumbtype)
auxname = myplot.getFigure(figno=myplot.figno,relative=True)
auxthumb = myplot.getThumbnail(figno=myplot.figno,relative=True)
if hasattr(self._bdp_in[0], "line"): # SpwCube doesn't have Line
line = deepcopy(getattr(self._bdp_in[0], "line"))
if not isinstance(line, Line):
line = Line(name="Unidentified")
else:
# fake a Line if there wasn't one
line = Line(name="Unidentified")
# add the BDP to the output array
self.addoutput(Moment_BDP(xmlFile=imagename, moment=mom,
image=deepcopy(casaimage), line=line))
dt.tag("ren+mask_%d" % mom)
imcaption = "%s Moment %d map of Source %s" % (line.name, mom, objectname)
auxcaption = "Histogram of %s Moment %d of Source %s" % (line.name, mom, objectname)
thismomentsummary = [line.name, mom, imagepng, thumbname, imcaption,
auxname, auxthumb, auxcaption, infile]
momentsummary.append(thismomentsummary)
if map.has_key(0) and map.has_key(1) and map.has_key(2):
logging.debug("MAPs present: %s" % (map.keys()))
# m0 needs a new mask, inherited from the more restricted m1 (and m2)
m0 = ma.masked_where(map[1].mask,map[0])
m1 = map[1]
m2 = map[2]
m01 = m0*m1
m02 = m0*m1*m1
m22 = m0*m2*m2
sum0 = m0.sum()
vmean = m01.sum()/sum0
# lacking the full 3D cube, get two estimates and take the max
sig1 = math.sqrt(m02.sum()/sum0 - vmean*vmean)
sig2 = m2.max()
#vsig = max(sig1,sig2)
vsig = sig1
# consider clipping in the masked array (mom0clip)
# @todo i can't use info from line, so just borrow basename for now for grepping
# this also isn't really the flux, the points per beam is still in there
loc = basename.rfind('/')
sum1 = ma.masked_less(map[0],0.0).sum() # mom0clip
# print out: LINE,FLUX1,FLUX0,BEAMAREA,VMEAN,VSIGMA for regression
# the linechans parameter in bdpin is not useful to print out here, it's local to the LineCube
s_vlsr = admit.Project.summaryData.get('vlsr')[0].getValue()[0]
s_rest = admit.Project.summaryData.get('restfreq')[0].getValue()[0]/1e9
s_line = line.frequency
if loc>0:
if basename[:loc][0:2] == 'U_':
# for U_ lines we'll reference the VLSR w.r.t. RESTFREQ in that band
if abs(vmean) > vsig:
vwarn = '*'
else:
vwarn = ''
vlsr = vmean + (1.0-s_line/s_rest)*utils.c
msg = "MOM0FLUX: %s %g %g %g %g %g %g" % (basename[:loc],map[0].sum(),sum0,beamarea,vmean,vlsr,vsig)
else:
# for identified lines we'll assume the ID was correct and not bother with RESTFREQ
msg = "MOM0FLUX: %s %g %g %g %g %g %g" % (basename[:loc],map[0].sum(),sum0,beamarea,vmean,vmean,vsig)
else:
msg = "MOM0FLUX: %s %g %g %g %g %g %g" % ("SPW_FULL" ,map[0].sum(),sum0,beamarea,vmean,vmean,vsig)
logging.regression(msg)
dt.tag("mom0flux")
# create a histogram of flux per channel
# grab the X coordinates for the histogram, we want them in km/s
# restfreq should also be in summary
restfreq = casa.imhead(self.dir(infile),mode="get",hdkey="restfreq")['value']/1e9 # in GHz
# print "PJT %.10f %.10f" % (restfreq,s_rest)
imval0 = casa.imval(self.dir(infile))
freqs = imval0['coords'].transpose()[2]/1e9
x = (1-freqs/restfreq)*utils.c
#
h = casa.imstat(self.dir(infile), axes=[0,1])
if h.has_key('flux'):
flux0 = h['flux']
else:
flux0 = h['sum']/beamarea
flux0sum = flux0.sum() * abs(x[1]-x[0])
# @todo make a flux1 with fluxes derived from a good mask
flux1 = flux0
# construct histogram
title = 'Flux Spectrum (%g)' % flux0sum
xlab = 'VLSR (km/s)'
ylab = 'Flux (Jy)'
myplot.plotter(x,[flux0,flux1],title=title,figname=fluxname,xlab=xlab,ylab=ylab,histo=True)
dt.tag("flux-spectrum")
self._summary["moments"] = SummaryEntry(momentsummary, "Moment_AT",
self.id(True), taskargs)
# get rid of the temporary mask
if mom0clip > 0.0:
utils.rmdir(self.dir("mom0.masked"))
dt.tag("done")
dt.end()