本文整理汇总了Python中starutil.NDG类的典型用法代码示例。如果您正苦于以下问题:Python NDG类的具体用法?Python NDG怎么用?Python NDG使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了NDG类的13个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: cleanup
def cleanup():
global retain
ParSys.cleanup()
if retain:
msg_out( "Retaining temporary files in {0}".format(NDG.tempdir))
else:
NDG.cleanup()
示例2: cleanup
def cleanup():
global retain
try:
starutil.ParSys.cleanup()
if retain:
msg_out( "Retaining temporary files in {0}".format(NDG.tempdir))
else:
NDG.cleanup()
except:
pass
示例3: run_calcqu
def run_calcqu(input_data,config,harmonic):
# The following call to SMURF:CALCQU creates two HDS container files -
# one holding a set of Q NDFs and the other holding a set of U NDFs. Create
# these container files in the NDG temporary directory.
qcont = NDG(1)
qcont.comment = "qcont"
ucont = NDG(1)
ucont.comment = "ucont"
msg_out( "Calculating Q and U values for each bolometer...")
invoke("$SMURF_DIR/calcqu in={0} config=\"{1}\" lsqfit=no outq={2} outu={3} "
"harmonic={4} fix".format(input_data,starutil.shell_quote(config),
qcont,ucont,harmonic) )
return (qcont,ucont)
示例4: cleanup
def cleanup():
global retain, new_ext_ndfs, new_lut_ndfs, new_noi_ndfs
try:
starutil.ParSys.cleanup()
if retain:
msg_out( "Retaining EXT, LUT and NOI models in {0} and temporary files in {1}".format(os.getcwd(),NDG.tempdir))
else:
NDG.cleanup()
for ext in new_ext_ndfs:
os.remove( ext )
for lut in new_lut_ndfs:
os.remove( lut )
for noi in new_noi_ndfs:
os.remove( noi )
for res in qua:
os.remove( res )
except:
pass
示例5: get_filtered_skydip_data
def get_filtered_skydip_data(qarray,uarray,clip,a):
"""
This function takes q and u array data (output from calcqu), applies ffclean to remove spikes
and puts in numpy array variable
It borrows (copies) heavily from pol2cat.py (2015A)
Invocation:
( qdata_total,qvar_total,udata_total,uvar_total,elevation,opacity_term,bad_pixel_ref ) = ...
get_filtered_skydip_data(qarray,uarray,clip,a)
Arguments:
qarray = An NDF of Q array data (output from calcqu).
uarray = An NDF of U array data (output form calcqu).
clip = The sigma cut for ffclean.
a = A string indicating the array (eg. 'S8A').
Returned Value:
qdata_total = A numpy array with the cleaned qarray data.
qvar_total = A numpy array with the qarray variance data.
udata_total = A numpy array with the cleaned uarray data.
uvar_total = A numpy array with the uarray variance data.
elevation = A numpy array with the elevation data
opacity_term = A numpy array with the opacity brightness term (1-exp(-tau*air_mass))
Here tau is calculated using the WVM data as input.
"""
# Remove spikes from the Q images for the current subarray. The cleaned NDFs
# are written to temporary NDFs specified by the new NDG object "qff", which
# inherit its size from the existing group "qarray"".
msg_out( "Removing spikes from {0} bolometer Q values...".format(a))
qff = NDG(qarray)
qff.comment = "qff"
invoke( "$KAPPA_DIR/ffclean in={0} out={1} genvar=yes box=3 clip=\[{2}\]".format(qarray,qff,clip) )
# Remove spikes from the U images for the current subarray. The cleaned NDFs
# are written to temporary NDFs specified by the new NDG object "uff", which
# inherit its size from the existing group "uarray"".
msg_out( "Removing spikes from {0} bolometer U values...".format(a))
uff = NDG(uarray)
uff.comment = "uff"
invoke( "$KAPPA_DIR/ffclean in={0} out={1} genvar=yes box=3 clip=\[{2}\]"
.format(uarray,uff,clip) )
elevation = []
opacity_term = []
for stare in range(len(qff[:])):
# Stack Q data in numpy array
# Get elevation information
elevation.append(numpy.array( float( invoke( "$KAPPA_DIR/fitsmod ndf={0} edit=print keyword=ELSTART".format( qff[ stare ] ) ) ) ) )
# Get Tau (Opacity) information
tau_temp = numpy.array( float( invoke( "$KAPPA_DIR/fitsmod ndf={0} edit=print keyword=WVMTAUST".format( qff[ stare ] ) ) ) )
# Convert to obs band.
if '4' in a:
tau_temp = 19.04*(tau_temp-0.018) # Eq from Dempsey et al
elif '8' in a:
tau_temp = 5.36*(tau_temp-0.006) # Eq from Dempsey et al.
opacity_term.append(1-numpy.exp(-1*tau_temp/numpy.sin(numpy.radians(elevation[-1]))))
invoke( "$KAPPA_DIR/ndftrace {0} quiet".format(qff[ stare ]))
nx = get_task_par( "dims(1)", "ndftrace" )
ny = get_task_par( "dims(2)", "ndftrace" )
qdata_temp = numpy.reshape( Ndf( qff[ stare ] ).data, (ny,nx))
qdata_temp[numpy.abs(qdata_temp)>1e300] = numpy.nan;
if stare == 0:
qdata_total = qdata_temp
else:
qdata_total = numpy.dstack((qdata_total,qdata_temp))
qvar_temp = numpy.reshape( Ndf( qff[ stare ] ).var, (ny,nx))
qdata_temp[numpy.abs(qvar_temp)>1e300] = numpy.nan;
if stare == 0:
qvar_total = qvar_temp
else:
qvar_total = numpy.dstack((qvar_total,qvar_temp))
# Stack U data in numpy array
invoke( "$KAPPA_DIR/ndftrace {0} quiet".format(uff[ stare ]))
nx = get_task_par( "dims(1)", "ndftrace" )
ny = get_task_par( "dims(2)", "ndftrace" )
udata_temp = numpy.reshape( Ndf( uff[ stare ] ).data, (ny,nx))
udata_temp[numpy.abs(udata_temp)>1e300] = numpy.nan;
if stare == 0:
udata_total = udata_temp
else:
udata_total = numpy.dstack((udata_total,udata_temp))
uvar_temp = numpy.reshape( Ndf( uff[ stare ] ).var, (ny,nx))
udata_temp[numpy.abs(uvar_temp)>1e300] = numpy.nan;
if stare == 0:
uvar_total = uvar_temp
else:
uvar_total = numpy.dstack((uvar_total,uvar_temp))
# Create bad pixel reference.
bad_pixel_ref = NDG(1)
invoke( "$KAPPA_DIR/copybad in={0} ref={1} out={2}".format(qff,uff,bad_pixel_ref))
return( qdata_total,qvar_total,udata_total,uvar_total,elevation,opacity_term,bad_pixel_ref )
示例6: lengths
# Get the quantity to use as the vector lengths (could be "None")
plot = parsys["PLOT"].value
# If any vectors are to be plotted, get the SNR limit for the plotted
# vectors.
if plot != None:
snr = parsys["SNR"].value
maxlen = parsys["MAXLEN"].value
# See if temp files are to be retained.
retain = parsys["RETAIN"].value
# The following call to SMURF:CALCQU creates two HDS container files -
# one holding a set of Q NDFs and the other holding a set of U NDFs. Create
# these container files in the NDG temporary directory.
qcont = NDG(1)
qcont.comment = "qcont"
ucont = NDG(1)
ucont.comment = "ucont"
# Create a set of Q images and a set of U images. These are put into the HDS
# container files "q_TMP.sdf" and "u_TMP.sdf". Each image contains Q or U
# values derived from a short section of raw data during which each bolometer
# moves less than half a pixel.
msg_out( "Calculating Q and U values for each bolometer...")
invoke("$SMURF_DIR/calcqu in={0} config={1} outq={2} outu={3} fix".
format(indata,config,qcont,ucont) )
# Remove spikes from the Q and U images. The cleaned NDFs are written to
# temporary NDFs specified by two new NDG objects "qff" and "uff", which
# inherit their size from the existing groups "qcont" and "ucont".
示例7: UsageError
raise UsageError("\n\nThe directory specified by parameter RESTART ({0}) "
"does not exist".format(restart) )
fred = loadndg( "IN", True )
if indata != fred:
raise UsageError("\n\nThe directory specified by parameter RESTART ({0}) "
"refers to different time-series data".format(restart) )
msg_out( "Re-using data in {0}".format(restart) )
# Initialise the starlink random number seed to a known value so that
# results are repeatable.
os.environ["STAR_SEED"] = "65"
# Flat field the supplied template data
ff = loadndg( "FF" )
if not ff:
ff = NDG(indata)
msg_out( "Flatfielding template data...")
invoke("$SMURF_DIR/flatfield in={0} out={1}".format(indata,ff) )
ff = ff.filter()
savendg( "FF", ff )
else:
msg_out( "Re-using old flatfielded template data...")
# If required, create new artificial I, Q and U maps.
if newart:
msg_out( "Creating new artificial I, Q and U maps...")
# Get the parameters defining the artificial data
ipeak = parsys["IPEAK"].value
ifwhm = parsys["IFWHM"].value
pol = parsys["POL"].value
示例8: invoke
basec1 = math.radians( basec1 )
basec2 = math.radians( basec2 )
# Get the radius of the map.
radius = 0.5*math.sqrt( map_hght*map_hght + map_wdth*map_wdth )
# Create a Frame describing the coordinate system.
if tracksys == "GAL":
sys = "galactic";
elif tracksys == "J2000":
sys = "fk5"
else:
raise starutil.InvalidParameterError("The TRACKSYS header in {0} is {1} "
"- should be GAL or J2000".format(indata,tracksys) )
frame = NDG.tempfile()
invoke( "$ATOOLS_DIR/astskyframe \"'system={0}'\" {1}".format(sys,frame) )
# Create a Circle describing the map.
if region == None:
region = NDG.tempfile()
display = True
else:
display = False
invoke( "$ATOOLS_DIR/astcircle frame={0} form=1 centre=\[{1},{2}\] point={3} "
"unc=! options=! result={4}".format(frame,basec1,basec2,radius,region) )
if display:
f = open( region, "r" )
print( f.read() )
示例9: myremove
retain = parsys["RETAIN"].value
outbase = parsys["OUT"].value
fakemap = parsys["FAKEMAP"].value
# Erase any NDFs holding cleaned data, exteinction or pointing data from
# previous runs.
for path in glob.glob("*_con_res_cln.sdf"):
myremove(path)
base = path[:-16]
myremove("{0}_lat.sdf".format(base))
myremove("{0}_lon.sdf".format(base))
myremove("{0}_con_ext.sdf".format(base))
# Use sc2concat to concatenate and flatfield the data.
msg_out( "Concatenating and flatfielding..." )
concbase = NDG.tempfile("")
invoke("$SMURF_DIR/sc2concat in={0} outbase={1} maxlen=360".format(indata,concbase))
concdata = NDG( "{0}_*".format(concbase) )
# Use makemap to generate quality, extinction and pointing info.
confname = NDG.tempfile()
fd = open(confname,"w")
fd.write("^$STARLINK_DIR/share/smurf/dimmconfig.lis\n")
fd.write("numiter=1\n")
fd.write("exportclean=1\n")
fd.write("exportndf=ext\n")
fd.write("exportlonlat=1\n")
fd.write("dcfitbox=0\n")
fd.write("noisecliphigh=0\n")
fd.write("order=0\n")
fd.write("downsampscale=0\n")
示例10: UsageError
phase16 = 0.0
sigma = 0.0
# See if old temp files are to be re-used.
restart = parsys["RESTART"].value
if restart == None:
retain = parsys["RETAIN"].value
indata.save( "IN" )
else:
retain = True
NDG.tempdir = restart
if not os.path.isdir(restart):
raise UsageError("\n\nThe directory specified by parameter RESTART ({0}) "
"does not exist".format(restart) )
fred = NDG.load( "IN", True )
if indata != fred:
raise UsageError("\n\nThe directory specified by parameter RESTART ({0}) "
"refers to different time-series data".format(restart) )
msg_out( "Re-using data in {0}".format(restart) )
# Initialise the starlink random number seed to a known value so that
# results are repeatable.
os.environ["STAR_SEED"] = "65"
# Flat field the supplied template data
ff = NDG.load( "FF" )
if not ff:
ffdir = NDG.subdir()
msg_out( "Flatfielding template data...")
invoke("$SMURF_DIR/flatfield in={0} out=\"{1}/*\"".format(indata,ffdir) )
示例11: range
# Initial peak value
peak_value = noise*0.5
# Do tests for 5 different peak values
for ipeak in range(0, 1):
starutil.msg_out( ">>> Doing sep={0} and peak={1}....".format(clump_separation,peak_value))
# Get the dimensions of a square image that would be expected to
# contain the target number of clumps at the current separation.
npix = int( clump_separation*math.sqrt( nclump_target ) )
# Create a temporary file containing circular clumps of constant size
# and shape (except for the effects of noise).
model = NDG(1)
out = NDG(1)
outcat = NDG.tempfile(".fit")
invoke( "$CUPID_DIR/makeclumps angle=\[0,0\] beamfwhm=0 deconv=no "
"fwhm1=\[{0},0\] fwhm2=\[{0},0\] lbnd=\[1,1\] ubnd=\[{1},{1}\] "
"model={2} nclump={3} out={4} outcat={5} pardist=normal "
"peak = \[{6},0\] rms={7} trunc=0.1".
format(clump_fwhm,npix,model,nclump_target,out,outcat,
peak_value,noise) )
# Run fellwalker on the data.
mask = NDG(1)
outcat_fw = NDG.tempfile(".fit")
invoke( "$CUPID_DIR/findclumps config=def deconv=no in={0} "
"method=fellwalker out={1} outcat={2} rms={3}".
format(out,mask,outcat_fw,noise) )
# Get the number of clumps found by FellWalker.
示例12: invoke
cen2 = None
if region == None :
system = parsys["SYSTEM"].value
if system == "ICRS" :
parsys["CENTRE1"].prompt = "RA at centre of required circle"
parsys["CENTRE2"].prompt = "Dec at centre of required circle"
else:
parsys["CENTRE1"].prompt = "Galactic longitude at centre of required circle"
parsys["CENTRE2"].prompt = "Galactic latitude at centre of required circle"
centre1 = parsys["CENTRE1"].value
if centre1 != None:
centre2 = parsys["CENTRE2"].value
radius = parsys["RADIUS"].value
frame = NDG.tempfile()
invoke( "$ATOOLS_DIR/astskyframe \"'system={0}'\" {1}".format(system,frame) )
invoke( "$ATOOLS_DIR/astunformat {0} 1 {1}".format(frame,centre1) )
cen1 = starutil.get_task_par( "DVAL", "astunformat" )
invoke( "$ATOOLS_DIR/astunformat {0} 2 {1}".format(frame,centre2) )
cen2 = starutil.get_task_par( "DVAL", "astunformat" )
region = NDG.tempfile()
invoke( "$ATOOLS_DIR/astcircle {0} 1 \[{1},{2}\] {3} ! ! {4}".
format(frame,cen1,cen2,math.radians(radius/60.0),region) )
# If a Region was supplied ,not we do not yet have the coordinates of
# the centre of the required region, and note if the Region is defined by
# an NDF.
else:
示例13: myremove
indata = parsys["IN"].value
retain = parsys["RETAIN"].value
# Erase any NDFs holding cleaned data or pointing data from previous runs.
for path in glob.glob("s*_con_res_cln.sdf"):
myremove(path)
base = path[:-16]
myremove("{0}_lat.sdf".format(base))
myremove("{0}_lon.sdf".format(base))
# Use sc2concat to concatenate and flatfield the data.
invoke("$SMURF_DIR/sc2concat in={0} out='./*_umap'".format(indata))
# Use makemap to generate quaity and pointing info.
concdata = NDG("*_umap")
confname = NDG.tempfile()
fd = open(confname,"w")
fd.write("^$STARLINK_DIR/share/smurf/dimmconfig.lis\n")
fd.write("numiter=1\n")
fd.write("exportclean=1\n")
fd.write("exportlonlat=1\n")
fd.write("dcfitbox=0\n")
fd.write("noisecliphigh=0\n")
fd.write("order=0\n")
fd.write("downsampscale=0\n")
fd.close()
map = NDG(1)
invoke("$SMURF_DIR/makemap in={0} out={1} config='^{2}'".format(concdata,map,confname))
# We do not need the concatenated data any more (we use the cleaned data