本文整理汇总了Python中gwpy.timeseries.TimeSeries.fetch方法的典型用法代码示例。如果您正苦于以下问题:Python TimeSeries.fetch方法的具体用法?Python TimeSeries.fetch怎么用?Python TimeSeries.fetch使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类gwpy.timeseries.TimeSeries
的用法示例。
在下文中一共展示了TimeSeries.fetch方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: generate_fast_vco
# 需要导入模块: from gwpy.timeseries import TimeSeries [as 别名]
# 或者: from gwpy.timeseries.TimeSeries import fetch [as 别名]
def generate_fast_vco(ifo, segment, frames=False, fit=True):
"""
Parameters:
-----------
ifo : start
interferometer, e.g. 'L1'
segment : array like
time segment. first entry start second entry end
frames : bool
read from frames or nds2
fit : bool
fit from imc-f (default)
or spline interpolation
Returns:
--------
vco_data : saves file 'L1:IMC-VCO_PREDICTION-st-dur.hdf'
"""
st = segment[0]
et = segment[1]
chan1_pat = '%s:SYS-TIMING_C_FO_A_PORT_11_SLAVE_CFC_FREQUENCY_5'
chan2_pat = '%s:IMC-F_OUT_DQ'
if frames:
connection = datafind.GWDataFindHTTPConnection()
cache = connection.find_frame_urls(
ifo[0], '%s_R' % ifo, st, et + 1, urltype='file')
if fit:
imc = TimeSeries.read(cache, chan2_pat % ifo, st, et)
else:
imc = TimeSeries.read(cache, chan2_pat % ifo, st, st + 1)
pslvco = TimeSeries.read(cache, chan1_pat % ifo, st, et + 1)
else:
if fit:
imc = TimeSeries.fetch(chan2_pat % ifo, st, et)
else:
print 'HI BEFORE LOADING IMC'
imc = TimeSeries.fetch(chan2_pat % ifo, st, st + 1)
print 'HI BEFORE LOADING PSL'
pslvco = TimeSeries.fetch(chan1_pat % ifo, st, et + 1)
print 'HI AFTER LOADING'
pslvco = pslvco[16 + 8::16]
if fit:
imc_srate = int(imc.sample_rate.value)
imc2 = imc[imc_srate / 2::imc_srate]
data = np.array((imc2.value, pslvco.value)).T
vco_interp = fit_with_imc(data, imc)
else:
vco_interp = interp_spline(pslvco)
chan = "%s:IMC-VCO_PREDICTION" % (ifo,)
vco_data = TimeSeries(vco_interp, epoch=st,
sample_rate=256,
name=chan, channel=chan)
return vco_data
示例2: calibrate_imc_pslvco
# 需要导入模块: from gwpy.timeseries import TimeSeries [as 别名]
# 或者: from gwpy.timeseries.TimeSeries import fetch [as 别名]
def calibrate_imc_pslvco(ifo, start_time, dur, cache=None):
st, et = start_time, start_time + dur
if cache:
pslvco = TimeSeries.read(cache, chan1_pat % ifo, start=st, end=et)
imc = TimeSeries.read(cache, chan2_pat % ifo, start=st, end=et)
else:
imc = TimeSeries.fetch(chan2_pat % ifo, st, et)
pslvco = TimeSeries.fetch(chan1_pat % ifo, st, et)
arr_psl = pslvco[8::16]
arr_imc = imc
tmp1 = (arr_imc[8192::16384])[:-1]
tmp2 = arr_psl[1:]
a, b = numpy.polyfit(tmp1, tmp2, 1)
return a, b
示例3: test_fetch
# 需要导入模块: from gwpy.timeseries import TimeSeries [as 别名]
# 或者: from gwpy.timeseries.TimeSeries import fetch [as 别名]
def test_fetch(self):
try:
nds_buffer = mockutils.mock_nds2_buffer(
'X1:TEST', self.data, 1000000000, self.data.shape[0], 'm')
except ImportError as e:
self.skipTest(str(e))
nds_connection = mockutils.mock_nds2_connection([nds_buffer])
with mock.patch('nds2.connection') as mock_connection, \
mock.patch('nds2.buffer', nds_buffer):
mock_connection.return_value = nds_connection
# use verbose=True to hit more lines
ts = TimeSeries.fetch('X1:TEST', 1000000000, 1000000001,
verbose=True)
nptest.assert_array_equal(ts.value, self.data)
self.assertEqual(ts.sample_rate, self.data.shape[0] * units.Hz)
self.assertTupleEqual(ts.span, (1000000000, 1000000001))
self.assertEqual(ts.unit, units.meter)
示例4: _read_data
# 需要导入模块: from gwpy.timeseries import TimeSeries [as 别名]
# 或者: from gwpy.timeseries.TimeSeries import fetch [as 别名]
def _read_data(channel, st, et, frames=False):
"""
get data, either from frames or from nds2
"""
ifo = channel.split(':')[0]
if frames:
# read from frames
connection = datafind.GWDataFindHTTPConnection()
print ifo[0]
if channel.split(':')[1] == 'GDS-CALIB_STRAIN':
cache = connection.find_frame_urls(ifo[0],ifo+'_HOFT_C00', st, et, urltype='file')
else:
cache = connection.find_frame_urls(ifo[0], ifo + '_C', st, et,urltype='file')
try:
data = TimeSeries.read(cache, channel, st, et)
except IndexError:
cache = connection.find_frame_urls(ifo[0], ifo+'_R', st, et, urltype='file')
data = TimeSeries.read(cache, channel, st, et)
else:
data = TimeSeries.fetch(channel, st, et)
return data
示例5: Time
# 需要导入模块: from gwpy.timeseries import TimeSeries [as 别名]
# 或者: from gwpy.timeseries.TimeSeries import fetch [as 别名]
from gwpy.time import Time
from gwpy.timeseries import TimeSeries
from gwpy.plotter import TimeSeriesPlot
from matplotlib.ticker import MultipleLocator, FormatStrFormatter
start = Time('2014-11-29 00:00:00', format='iso', scale='utc')
end = Time('2014-11-30 00:00:00', format='iso', scale='utc')
print start.iso, start.gps
print end.iso, end.gps
#TCS = TimeSeries.fetch('L1:TCS-ITMY_HWS_CHAMBERTEMPERATURESENSORB.mean,m-trend', start, end, verbose=True)
RH = TimeSeries.fetch('L1:TCS-ITMY_RH_LOWERRTD.mean,m-trend', start, end, verbose=True)
ITMY = TimeSeries.fetch('L1:SUS-ITMY_M0_DAMP_V_IN1_DQ.mean,m-trend', start, end, verbose=True)
示例6: run
# 需要导入模块: from gwpy.timeseries import TimeSeries [as 别名]
# 或者: from gwpy.timeseries.TimeSeries import fetch [as 别名]
I would like to study the gravitational wave strain time-series around the time of an interesting simulated signal during the last science run (S6). I have access to the frame files on the LIGO Data Grid machine `ldas-pcdev2.ligo-wa.caltech.edu` and so can read them directly.
"""
from gwpy.time import Time
from gwpy.timeseries import TimeSeries
from gwpy import version
__author__ = "Duncan Macleod <[email protected]>"
__version__ = version.version
# set the times
start = Time('2010-09-16 06:42:00', format='iso', scale='utc')
end = Time('2010-09-16 06:43:00', format='iso', scale='utc')
# make timeseries
data = TimeSeries.fetch('H1:LDAS-STRAIN', start, end)
data.unit = 'strain'
# plot
plot = data.plot()
if __name__ == '__main__':
try:
outfile = __file__.replace('.py', '.png')
except NameError:
pass
else:
plot.save(outfile)
print("Example output saved as\n%s" % outfile)
示例7:
# 需要导入模块: from gwpy.timeseries import TimeSeries [as 别名]
# 或者: from gwpy.timeseries.TimeSeries import fetch [as 别名]
"""
from gwpy.time import Time, TimeDelta
from gwpy.timeseries import TimeSeries
from gwpy import version
__author__ = "Duncan Macleod <[email protected]>"
__version__ = version.version
# set the times
goodtime = 1061800700
badtime = 1061524816
duration = 120
# read the data over the network
gooddata = TimeSeries.fetch('L1:PSL-ISS_PDB_OUT_DQ', goodtime,
goodtime+duration, verbose=True)
baddata = TimeSeries.fetch('L1:PSL-ISS_PDB_OUT_DQ', badtime,
badtime+duration, verbose=True)
# calculate spectrum with 1.8 Hz resolution
goodasd = gooddata.asd(8, 4, 'welch')
badasd = baddata.asd(8, 4, 'welch')
# plot
plot = badasd.plot()
plot.add_spectrum(goodasd)
plot.xlim = [10, 8000]
plot.ylim = [1e-6, 5e-4]
if __name__ == '__main__':
try:
示例8: Time
# 需要导入模块: from gwpy.timeseries import TimeSeries [as 别名]
# 或者: from gwpy.timeseries.TimeSeries import fetch [as 别名]
"""
from gwpy.time import Time
from gwpy.timeseries import TimeSeries
from gwpy import version
__author__ = "Duncan Macleod <[email protected]>"
__version__ = version.version
# set the times
start = Time('2010-09-16 06:42:00', format='iso', scale='utc')
end = Time('2010-09-16 06:43:00', format='iso', scale='utc')
# find the data using NDS
data = TimeSeries.fetch('H1:LDAS-STRAIN', start.gps, end.gps, verbose=True)
data.unit = 'strain'
# calculate spectrogram
specgram = data.spectrogram(1)
asdspecgram = specgram ** (1/2.)
medratio = asdspecgram.ratio('median')
# plot
plot = medratio.plot()
plot.logy = True
plot.ylim = [40, 4096]
plot.add_colorbar(log=True, clim=[0.1, 10], label='ASD ratio to median average')
plot.ylim = [40, 4000]
if __name__ == '__main__':
示例9: loadtxt
# 需要导入模块: from gwpy.timeseries import TimeSeries [as 别名]
# 或者: from gwpy.timeseries.TimeSeries import fetch [as 别名]
#darmbase = ':OMC-DCPD_SUM_OUT_DQ'
darmbase = ':GDS-CALIB_STRAIN'
darmchan = ifo + darmbase
rangechan = ifo + ':DMT-SNSH_EFFECTIVE_RANGE_MPC.mean'
fnm = 'FMCLVE_channels.txt'
# load channel names from text file
# Note: to generate this file, ran: nds_query -l -n nds.ligo-la.caltech.edu -t raw LVE-* > LVE_channels.txt
lines = loadtxt(fnm,
dtype='str',
usecols=[0],skiprows=2)
# get BNS range data
#if dur>=60*60:
#else:
range=TimeSeries.fetch(rangechan, start_time, start_time+dur, verbose=True,host='nds.ligo.caltech.edu')
trange = arange(0.0,len(range.value))/range.sample_rate.value
# Get DARM data
darm=TimeSeries.fetch(darmchan, start_time-filter_pad, start_time+dur, verbose=True,host='nds.ligo.caltech.edu')
darm=darm.detrend()
darm=darm.highpass(20)
# build notch filter for 60Hz line
Norder=2
zpk = iirfilter(Norder, [59.8/(darm.sample_rate.value/2.0), 60.2/(darm.sample_rate.value/2.0)], btype='bandstop', ftype='butter', output='zpk')
darm = darm.filter(*zpk)
# Calculate DARM BLRMS
stride=30 # seconds
flower=75 # Hz
fupper=95 # Hz
示例10:
# 需要导入模块: from gwpy.timeseries import TimeSeries [as 别名]
# 或者: from gwpy.timeseries.TimeSeries import fetch [as 别名]
from gwpy.timeseries import TimeSeries
data = TimeSeries.fetch('H1:LDAS-STRAIN', 968654552, 968654562)
plot = data.plot()
plot.show()
示例11: getTimeSeries
# 需要导入模块: from gwpy.timeseries import TimeSeries [as 别名]
# 或者: from gwpy.timeseries.TimeSeries import fetch [as 别名]
def getTimeSeries(self, arg_list):
"""Verify and interpret arguments to get all
TimeSeries objects defined"""
# retrieve channel data from NDS as a TimeSeries
for chans in arg_list.chan:
for chan in chans:
if chan not in self.chan_list:
self.chan_list.append(chan)
if len(self.chan_list) < self.min_timeseries:
raise ArgumentError('A minimum of %d channels must be ' +
'specified for this product' %
self.min_timeseries)
if len(arg_list.start) > 0:
for start_arg in arg_list.start:
if type(start_arg) is list:
for starts in start_arg:
if isinstance(starts, basestring):
starti = int(starts)
elif starts is list:
for start_str in starts:
starti = int(start_str)
# ignore duplicates (to make it easy for ldvw)
if starti not in self.start_list:
self.start_list.append(starti)
else:
self.start_list.append(int(start_arg))
else:
raise ArgumentError('No start times specified')
# Verify the number of datasets specified is valid for this plot
self.n_datasets = len(self.chan_list) * len(self.start_list)
if self.n_datasets < self.get_min_datasets():
raise ArgumentError('%d datasets are required for this ' +
'plot but only %d are supplied' %
(self.get_min_datasets(), self.n_datasets))
if self.n_datasets > self.get_max_datasets():
raise ArgumentError('A maximum of %d datasets allowed for ' +
'this plot but %d specified' %
(self.get_max_datasets(), self.n_datasets))
if arg_list.duration:
self.dur = int(arg_list.duration)
else:
self.dur = 10
verb = self.verbose > 1
# determine how we're supposed get our data
source = 'NDS2'
frame_cache = False
if arg_list.framecache:
source = 'frames'
frame_cache = arg_list.framecache
# set up filter parameters for all channels
highpass = 0
if arg_list.highpass:
highpass = float(arg_list.highpass)
self.filter += "highpass(%.1f) " % highpass
# Get the data from NDS or Frames
# time_groups is a list of timeseries index grouped by
# start time for coherence like plots
self.time_groups = []
for start in self.start_list:
time_group = []
for chan in self.chan_list:
if verb:
print 'Fetching %s %d, %d using %s' % \
(chan, start, self.dur, source)
if frame_cache:
data = TimeSeries.read(frame_cache, chan, start=start,
end=start+self.dur)
else:
data = TimeSeries.fetch(chan, start, start+self.dur,
verbose=verb)
if highpass > 0:
data = data.highpass(highpass)
self.timeseries.append(data)
time_group.append(len(self.timeseries)-1)
self.time_groups.append(time_group)
# report what we have if they asked for it
self.log(3, ('Channels: %s' % self.chan_list))
self.log(3, ('Start times: %s, duration' % self.start_list, self.dur))
self.log(3, ('Number of time series: %d' % len(self.timeseries)))
if len(self.timeseries) != self.n_datasets:
self.log(0, ('%d datasets requested but only %d transfered' %
(self.n_datasets, len(self.timeseries))))
if len(self.timeseries) > self.get_min_datasets():
self.log(0, 'Proceeding with the data that was transferred.')
#.........这里部分代码省略.........
示例12: Time
# 需要导入模块: from gwpy.timeseries import TimeSeries [as 别名]
# 或者: from gwpy.timeseries.TimeSeries import fetch [as 别名]
from gwpy.time import Time
from gwpy.timeseries import TimeSeries
from gwpy.plotter import TimeSeriesPlot
from matplotlib.ticker import MultipleLocator, FormatStrFormatter
start = Time('2014-11-27 21:00:00', format='iso', scale='utc')
end = Time('2014-12-01 21:00:00', format='iso', scale='utc')
print start.iso, start.gps
print end.iso, end.gps
FMC = TimeSeries.fetch('L0:FMC-CS_LVEA_AVTEMP.mean,m-trend', start, end, verbose=True)
TCS_HAM4 = TimeSeries.fetch('L1:TCS-ITMY_HWS_CHAMBERTEMPERATURESENSORA.mean,m-trend', start, end, verbose=True)
TCS_HAM5 = TimeSeries.fetch('L1:TCS-ITMX_HWS_CHAMBERTEMPERATURESENSORA.mean,m-trend', start, end, verbose=True)
RH_ITMY = TimeSeries.fetch('L1:TCS-ITMY_RH_LOWERRTD.mean,m-trend', start, end, verbose=True)
#ITMY = TimeSeries.fetch('L1:SUS-ITMY_M0_DAMP_V_IN1_DQ.mean,m-trend', start, end, verbose=True)
print "data fetched"
FMC_C = (FMC-32)*(5.0/9)
plot = TimeSeriesPlot(FMC_C, TCS_HAM4, sep=True)
ax1 = plot.axes[0]
ax1.lines[0].set_color('red')
ax1.set_ylim(17.5,19.1)
ax1.set_ylabel('C')
ax1.set_title('Temperature (4 days)')
ax1.legend(loc='upper right', ncol=1, fancybox=True, shadow=True)
ax1.grid(True, which='both', axis='both') # add more grid
#mj = MultipleLocator(1)
ml1 = MultipleLocator(0.1)
#ax1.yaxis.set_major_locator(mj)
示例13: int
# 需要导入模块: from gwpy.timeseries import TimeSeries [as 别名]
# 或者: from gwpy.timeseries.TimeSeries import fetch [as 别名]
ifo = "L1"
f_cal = {"L1": 33.7, "H1": 37.3}[ifo]
pad = 8
st = int(sys.argv[1]) - pad
dur = 2 * pad
darm_chan = ifo + ":LSC-DARM_OUT_DQ"
sus_chan = ifo + ":SUS-ETMY_L3_ISCINF_L_IN1_DQ"
inj_chan = ifo + ":CAL-INJ_HARDWARE_OUT_DQ"
# Modify these to diagnose NDS2 problems
nds_kwargs = {} # {'verbose':True, 'host':'nds.ligo.caltech.edu'
print "Fetching DARM"
darm = TimeSeries.fetch(darm_chan, st, st + dur + 1, **nds_kwargs)
print "Fetching ISCINF"
sus = TimeSeries.fetch(sus_chan, st, st + dur + 1, **nds_kwargs)
print "Fetching INJ"
inj = TimeSeries.fetch(inj_chan, st, st + dur + 1, **nds_kwargs)
# Sample rates all the same
assert sus.sample_rate.value == darm.sample_rate.value
assert inj.sample_rate.value == darm.sample_rate.value
srate = int(darm.sample_rate.value)
# Hardware inj channel delayed two samples getting to EY SUS
# DARM delayed one sample
# Probably because inj goes from CAL->LSC->SUSEY and DARM just LSC->SUSEY
inj = inj[:-srate]
darm = darm[1 : -srate + 1]
示例14:
# 需要导入模块: from gwpy.timeseries import TimeSeries [as 别名]
# 或者: from gwpy.timeseries.TimeSeries import fetch [as 别名]
to calculate discrete PSDs for each stride. This is fine for long-duration
data, but give poor resolution when studying short-duration phenomena.
The `~TimeSeries.spectrogram2` method allows for highly-overlapping FFT
calculations to over-sample the frequency content of the input `TimeSeries`
to produce a much more feature-rich output.
"""
__author__ = "Duncan Macleod <[email protected]>"
__currentmodule__ = 'gwpy.timeseries'
# As with the other `~gwpy.spectrogram.Spectrogram` examples, we import the
# `TimeSeries` class, and :meth:`~TimeSeries.fetch` the data, but in this
# example we only need 5 seconds of datam,
from gwpy.timeseries import TimeSeries
gwdata = TimeSeries.fetch(
'L1:OAF-CAL_DARM_DQ', 'Feb 28 2015 06:02:05', 'Feb 28 2015 06:02:10')
# Now we can call the `~TimeSeries.spectrogram2` method of `gwdata` to
# calculate our over-dense `~gwpy.spectrogram.Spectrogram`
specgram = gwdata.spectrogram2(fftlength=0.15, overlap=0.14) ** (1/2.)
# To whiten the `specgram` we can use the :meth:`~Spectrogram.ratio` method
# to divide by the overall median:
medratio = specgram.ratio('median')
# Finally, we make a plot:
plot = medratio.plot(norm='log', vmin=0.5, vmax=10)
plot.set_yscale('log')
plot.set_ylim(40, 8192)
plot.add_colorbar(label='Amplitude relative to median')
plot.set_title('L1 $h(t)$ with noise interference')
示例15: Time
# 需要导入模块: from gwpy.timeseries import TimeSeries [as 别名]
# 或者: from gwpy.timeseries.TimeSeries import fetch [as 别名]
from gwpy.time import Time
from gwpy.timeseries import TimeSeries
from matplotlib.ticker import MultipleLocator, FormatStrFormatter
start = Time('2014-11-27 21:00:00', format='iso', scale='utc')
end = Time('2014-12-01 21:00:00', format='iso', scale='utc')
print start.iso, start.gps
print end.iso, end.gps
FMC = TimeSeries.fetch('L0:FMC-CS_LVEA_ZONE1_TP2.mean,m-trend', start, end, verbose=True) #closest to ITMX and HAM4
HWS = TimeSeries.fetch('L1:TCS-ITMY_HWS_CHAMBERTEMPERATURESENSORB.mean,m-trend', start, end, verbose=True) #HAM4
RH = TimeSeries.fetch('L1:TCS-ITMX_RH_LOWERRTD.mean,m-trend', start, end, verbose=True) #assume that ITMX means ITMX
print "data-fetched"
FMC_C = (FMC-32)*(5.0/9)
plot = FMC_C.plot()
ax = plot.gca()
ax.plot(HWS, label='L1:TCS-ITMY\_HWS\_CHAMBERTEMPERATURESENSORB.mean')
ax.plot(RH, label = 'L1:TCS-ITMX\_RH\_LOWERRTD.mean')
#ax.set_ylim()
ax.legend(loc='upper right', ncol=1, fancybox=True, shadow=True)
print ' plotting'
plot.show()