本文整理汇总了Python中obspy.core.Stream.copy方法的典型用法代码示例。如果您正苦于以下问题:Python Stream.copy方法的具体用法?Python Stream.copy怎么用?Python Stream.copy使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类obspy.core.Stream
的用法示例。
在下文中一共展示了Stream.copy方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: setUp
# 需要导入模块: from obspy.core import Stream [as 别名]
# 或者: from obspy.core.Stream import copy [as 别名]
def setUp(self):
# directory where the test files are located
self.path = os.path.join(os.path.dirname(__file__), 'data')
self.filename_css = os.path.join(self.path, 'test_css.wfdisc')
self.filename_nnsa = os.path.join(self.path, 'test_nnsa.wfdisc')
# set up stream for validation
header = {}
header['station'] = 'TEST'
header['starttime'] = UTCDateTime(1296474900.0)
header['sampling_rate'] = 80.0
header['calib'] = 1.0
header['calper'] = 1.0
header['_format'] = 'CSS'
filename = os.path.join(self.path, '201101311155.10.ascii.gz')
with gzip.open(filename, 'rb') as fp:
data = np.loadtxt(fp, dtype=np.int_)
# traces in the test files are sorted ZEN
st = Stream()
for x, cha in zip(data.reshape((3, 4800)), ('HHZ', 'HHE', 'HHN')):
# big-endian copy
tr = Trace(x, header.copy())
tr.stats.station += 'be'
tr.stats.channel = cha
st += tr
# little-endian copy
tr = Trace(x, header.copy())
tr.stats.station += 'le'
tr.stats.channel = cha
st += tr
self.st_result_css = st.copy()
for tr in st:
tr.stats['_format'] = "NNSA_KB_CORE"
self.st_result_nnsa = st
示例2: cosTaper
# 需要导入模块: from obspy.core import Stream [as 别名]
# 或者: from obspy.core.Stream import copy [as 别名]
summary += exceptions
summary.append("#" * 79)
trig = []
mutt = []
if st:
# preprocessing, backup original data for plotting at end
st.merge(0)
st.detrend("linear")
for tr in st:
tr.data = tr.data * cosTaper(len(tr), 0.01)
#st.simulate(paz_remove="self", paz_simulate=cornFreq2Paz(1.0), remove_sensitivity=False)
st.sort()
st.filter("bandpass", freqmin=PAR.LOW, freqmax=PAR.HIGH, corners=1, zerophase=True)
st.trim(T1, T2)
st_trigger = st.copy()
st.normalize(global_max=False)
# do the triggering
trig = coincidenceTrigger("recstalta", PAR.ON, PAR.OFF, st_trigger,
thr_coincidence_sum=PAR.MIN_STATIONS,
max_trigger_length=PAR.MAXLEN, trigger_off_extension=PAR.ALLOWANCE,
details=True, sta=PAR.STA, lta=PAR.LTA)
for t in trig:
info = "%s %ss %s %s" % (t['time'].strftime("%Y-%m-%dT%H:%M:%S"), ("%.1f" % t['duration']).rjust(4), ("%i" % t['cft_peak_wmean']).rjust(3), "-".join(t['stations']))
summary.append(info)
tmp = st.slice(t['time'] - 1, t['time'] + t['duration'])
outfilename = "%s/%s_%.1f_%i_%s-%s_%s.png" % (PLOTDIR, t['time'].strftime("%Y-%m-%dT%H:%M:%S"), t['duration'], t['cft_peak_wmean'], len(t['stations']), num_stations, "-".join(t['stations']))
tmp.plot(outfile=outfilename)
mutt += ("-a", outfilename)
示例3: Seedlink_plotter
# 需要导入模块: from obspy.core import Stream [as 别名]
# 或者: from obspy.core.Stream import copy [as 别名]
class Seedlink_plotter(SLClient):
"""
This module plots realtime seismic data from a Seedlink server
"""
def __init__(self, figure, canvas, interval, backtrace, args):
# Set the log level to display minimal info
super(Seedlink_plotter, self).__init__(loglevel='CRITICAL')
# super(Seedlink_plotter, self).__init__()
self.figure = figure
self.stream = Stream()
self.interval = interval
self.backtrace = backtrace
self.canvas = canvas
self.flip = 0
self.scale = args.scale
self.args = args
self.initial_update_rate = 800
self.update_rate = 2
# Plot after geting the penultimate line of data
self.print_percentage = (
self.backtrace-60.0*self.interval)/self.backtrace
self.print_max = (self.backtrace-60.0*self.interval)
widgets = [FormatLabel('Receiving Data: - '), BouncingBar(
marker=RotatingMarker())]
self.pbar = ProgressBar(maxval=self.print_max, widgets=widgets).start()
# print "max "+ str(self.print_max)
# converter for the colors gradient
def rgb_to_hex(self, r, g, b):
return '#%02X%02X%02X' % (r, g, b)
# Rainbow color generator
def rainbow_color_generator(self, max_color):
color_list = []
frequency = 0.3
for compteur_lignes in xrange(max_color):
red = sin(frequency*compteur_lignes*2 + 0)*127+128
green = sin(frequency*compteur_lignes*2 + 2)*127+128
blue = sin(frequency*compteur_lignes*2 + 4)*127+128
color_list.append(self.rgb_to_hex(red, green, blue))
return tuple(color_list)
def plot_graph(self):
#######################################################################
# filter section
#######################################################################
self.local_stream = self.stream.copy()
# Filter example
# self.local_stream.filter('bandpass', freqmin=0.001, freqmax=0.5,corners=2, zerophase=True)
#######################################################################
# With this upscale factor the graph look nice !
upscale_factor = 30
if args.rainbow:
# Rainbow colors !
self.color = self.rainbow_color_generator(
int(args.nb_rainbow_colors))
else:
# Regular colors
self.color = ('#000000', '#ff0000', '#0000ff', '#56a83c')
self.local_stream.plot(
fig=self.figure, type='dayplot', interval=self.interval,
number_of_ticks=13, tick_format='%d/%m %Hh',
size=(args.x_size * upscale_factor, args.y_size * upscale_factor),
x_labels_size=8,
y_labels_size=8, title=self.title, title_size=14, linewidth=0.5, right_vertical_labels=False,
vertical_scaling_range=self.scale,
subplots_adjust_left=0.03, subplots_adjust_right=0.99,
subplots_adjust_top=0.95, subplots_adjust_bottom=0.1,
one_tick_per_line=True,
# noir Rouge bleu vert
color = self.color,
show_y_UTC_label=False)
def packetHandler(self, count, slpack):
"""
Processes each packet received from the SeedLinkConnection.
:type count: int
:param count: Packet counter.
:type slpack: :class:`~obspy.seedlink.SLPacket`
:param slpack: packet to process.
:return: Boolean true if connection to SeedLink server should be
closed and session terminated, false otherwise.
"""
# check if not a complete packet
if slpack is None or (slpack == SLPacket.SLNOPACKET) or \
(slpack == SLPacket.SLERROR):
return False
# get basic packet info
type = slpack.getType()
#.........这里部分代码省略.........
示例4: test_coincidenceTrigger
# 需要导入模块: from obspy.core import Stream [as 别名]
# 或者: from obspy.core.Stream import copy [as 别名]
def test_coincidenceTrigger(self):
"""
Test network coincidence trigger.
"""
st = Stream()
files = ["BW.UH1._.SHZ.D.2010.147.cut.slist.gz",
"BW.UH2._.SHZ.D.2010.147.cut.slist.gz",
"BW.UH3._.SHZ.D.2010.147.cut.slist.gz",
"BW.UH4._.EHZ.D.2010.147.cut.slist.gz"]
for filename in files:
filename = os.path.join(self.path, filename)
st += read(filename)
# some prefiltering used for UH network
st.filter('bandpass', freqmin=10, freqmax=20)
# 1. no weighting, no stations specified, good settings
# => 3 events, no false triggers
# for the first test we make some additional tests regarding types
res = coincidenceTrigger("recstalta", 3.5, 1, st.copy(), 3, sta=0.5,
lta=10)
self.assertTrue(isinstance(res, list))
self.assertTrue(len(res) == 3)
expected_keys = ['time', 'coincidence_sum', 'duration', 'stations',
'trace_ids']
expected_types = [UTCDateTime, float, float, list, list]
for item in res:
self.assertTrue(isinstance(item, dict))
for key, _type in zip(expected_keys, expected_types):
self.assertTrue(key in item)
self.assertTrue(isinstance(item[key], _type))
self.assertTrue(res[0]['time'] > UTCDateTime("2010-05-27T16:24:31"))
self.assertTrue(res[0]['time'] < UTCDateTime("2010-05-27T16:24:35"))
self.assertTrue(4.2 < res[0]['duration'] < 4.8)
self.assertTrue(res[0]['stations'] == ['UH3', 'UH2', 'UH1', 'UH4'])
self.assertTrue(res[0]['coincidence_sum'] == 4)
self.assertTrue(res[1]['time'] > UTCDateTime("2010-05-27T16:26:59"))
self.assertTrue(res[1]['time'] < UTCDateTime("2010-05-27T16:27:03"))
self.assertTrue(3.2 < res[1]['duration'] < 3.7)
self.assertTrue(res[1]['stations'] == ['UH2', 'UH3', 'UH1'])
self.assertTrue(res[1]['coincidence_sum'] == 3)
self.assertTrue(res[2]['time'] > UTCDateTime("2010-05-27T16:27:27"))
self.assertTrue(res[2]['time'] < UTCDateTime("2010-05-27T16:27:33"))
self.assertTrue(4.2 < res[2]['duration'] < 4.4)
self.assertTrue(res[2]['stations'] == ['UH3', 'UH2', 'UH1', 'UH4'])
self.assertTrue(res[2]['coincidence_sum'] == 4)
# 2. no weighting, station selection
# => 2 events, no false triggers
trace_ids = ['BW.UH1..SHZ', 'BW.UH3..SHZ', 'BW.UH4..EHZ']
# ignore UserWarnings
with warnings.catch_warnings(record=True):
warnings.simplefilter('ignore', UserWarning)
re = coincidenceTrigger("recstalta", 3.5, 1, st.copy(), 3,
trace_ids=trace_ids, sta=0.5, lta=10)
self.assertTrue(len(re) == 2)
self.assertTrue(re[0]['time'] > UTCDateTime("2010-05-27T16:24:31"))
self.assertTrue(re[0]['time'] < UTCDateTime("2010-05-27T16:24:35"))
self.assertTrue(4.2 < re[0]['duration'] < 4.8)
self.assertTrue(re[0]['stations'] == ['UH3', 'UH1', 'UH4'])
self.assertTrue(re[0]['coincidence_sum'] == 3)
self.assertTrue(re[1]['time'] > UTCDateTime("2010-05-27T16:27:27"))
self.assertTrue(re[1]['time'] < UTCDateTime("2010-05-27T16:27:33"))
self.assertTrue(4.2 < re[1]['duration'] < 4.4)
self.assertTrue(re[1]['stations'] == ['UH3', 'UH1', 'UH4'])
self.assertTrue(re[1]['coincidence_sum'] == 3)
# 3. weighting, station selection
# => 3 events, no false triggers
trace_ids = {'BW.UH1..SHZ': 0.4, 'BW.UH2..SHZ': 0.35,
'BW.UH3..SHZ': 0.4, 'BW.UH4..EHZ': 0.25}
res = coincidenceTrigger("recstalta", 3.5, 1, st.copy(), 1.0,
trace_ids=trace_ids, sta=0.5, lta=10)
self.assertTrue(len(res) == 3)
self.assertTrue(res[0]['time'] > UTCDateTime("2010-05-27T16:24:31"))
self.assertTrue(res[0]['time'] < UTCDateTime("2010-05-27T16:24:35"))
self.assertTrue(4.2 < res[0]['duration'] < 4.8)
self.assertTrue(res[0]['stations'] == ['UH3', 'UH2', 'UH1', 'UH4'])
self.assertTrue(res[0]['coincidence_sum'] == 1.4)
self.assertTrue(res[1]['time'] > UTCDateTime("2010-05-27T16:26:59"))
self.assertTrue(res[1]['time'] < UTCDateTime("2010-05-27T16:27:03"))
self.assertTrue(3.2 < res[1]['duration'] < 3.7)
self.assertTrue(res[1]['stations'] == ['UH2', 'UH3', 'UH1'])
self.assertTrue(res[1]['coincidence_sum'] == 1.15)
self.assertTrue(res[2]['time'] > UTCDateTime("2010-05-27T16:27:27"))
self.assertTrue(res[2]['time'] < UTCDateTime("2010-05-27T16:27:33"))
self.assertTrue(4.2 < res[2]['duration'] < 4.4)
self.assertTrue(res[2]['stations'] == ['UH3', 'UH2', 'UH1', 'UH4'])
self.assertTrue(res[2]['coincidence_sum'] == 1.4)
# 4. weighting, station selection, max_len
# => 2 events, no false triggers, small event does not overlap anymore
trace_ids = {'BW.UH1..SHZ': 0.6, 'BW.UH2..SHZ': 0.6}
# ignore UserWarnings
with warnings.catch_warnings(record=True):
warnings.simplefilter('ignore', UserWarning)
re = coincidenceTrigger("recstalta", 3.5, 1, st.copy(), 1.2,
trace_ids=trace_ids,
max_trigger_length=0.13, sta=0.5, lta=10)
self.assertTrue(len(re) == 2)
self.assertTrue(re[0]['time'] > UTCDateTime("2010-05-27T16:24:31"))
self.assertTrue(re[0]['time'] < UTCDateTime("2010-05-27T16:24:35"))
self.assertTrue(0.2 < re[0]['duration'] < 0.3)
self.assertTrue(re[0]['stations'] == ['UH2', 'UH1'])
self.assertTrue(re[0]['coincidence_sum'] == 1.2)
#.........这里部分代码省略.........
示例5: WaveformPlotting
# 需要导入模块: from obspy.core import Stream [as 别名]
# 或者: from obspy.core.Stream import copy [as 别名]
#.........这里部分代码省略.........
"""
Plot the Traces showing one graph per Trace.
Plots the whole time series for self.max_npts points and less. For more
points it plots minmax values.
"""
stream_new = []
# Just remove empty traces.
if not self.automerge:
for tr in self.stream:
stream_new.append([])
if len(tr.data):
stream_new[-1].append(tr)
else:
# Generate sorted list of traces (no copy)
# Sort order, id, starttime, endtime
ids = []
for tr in self.stream:
if hasattr(tr.stats, 'preview') and tr.stats.preview:
id = tr.id + 'preview'
else:
id = tr.id
if not id in ids:
ids.append(id)
for id in ids:
stream_new.append([])
for tr in self.stream:
if hasattr(tr.stats, 'preview') and tr.stats.preview:
tr_id = tr.id + 'preview'
else:
tr_id = tr.id
if tr_id == id:
# does not copy the elements of the data array
tr_ref = copy(tr)
# Trim does nothing if times are outside
if self.starttime >= tr_ref.stats.endtime or \
self.endtime <= tr_ref.stats.starttime:
continue
if tr_ref.data.size:
stream_new[-1].append(tr_ref)
# delete if empty list
if not len(stream_new[-1]):
stream_new.pop()
continue
stream_new[-1].sort(key=lambda x: x.stats.endtime)
stream_new[-1].sort(key=lambda x: x.stats.starttime)
# If everything is lost in the process raise an Exception.
if not len(stream_new):
raise Exception("Nothing to plot")
# Create helper variable to track ids and min/max/mean values.
self.stats = []
# Loop over each Trace and call the appropriate plotting method.
self.axis = []
for _i, tr in enumerate(stream_new):
# Each trace needs to have the same sampling rate.
sampling_rates = set([_tr.stats.sampling_rate for _tr in tr])
if len(sampling_rates) > 1:
msg = "All traces with the same id need to have the same " + \
"sampling rate."
raise Exception(msg)
sampling_rate = sampling_rates.pop()
if self.background_color:
ax = self.fig.add_subplot(len(stream_new), 1, _i + 1,
axisbg=self.background_color)
else:
ax = self.fig.add_subplot(len(stream_new), 1, _i + 1)