本文整理汇总了Python中obspy.core.event.Event.creation_info方法的典型用法代码示例。如果您正苦于以下问题:Python Event.creation_info方法的具体用法?Python Event.creation_info怎么用?Python Event.creation_info使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类obspy.core.event.Event
的用法示例。
在下文中一共展示了Event.creation_info方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: _parse_event
# 需要导入模块: from obspy.core.event import Event [as 别名]
# 或者: from obspy.core.event.Event import creation_info [as 别名]
def _parse_event(self, first_line):
"""
Parse an event.
:type first_line: str
:param first_line: First line of an event block, which contains
the event id.
:rtype: :class:`~obspy.core.event.event.Event`
:return: The parsed event or None.
"""
event_id = first_line[5:].strip()
# Skip event without id
if not event_id:
self._warn('Missing event id')
return None
event = Event()
origin, origin_res_id = self._parse_origin(event)
# Skip event without origin
if not origin:
return None
line = self._skip_empty_lines()
self._parse_region_name(line, event)
self._parse_arrivals(event, origin, origin_res_id)
# Origin ResourceIdentifier should be set at the end, when
# Arrivals are already set.
origin.resource_id = origin_res_id
event.origins.append(origin)
event.preferred_origin_id = origin.resource_id.id
# Must be done after the origin parsing
event.creation_info = self._get_creation_info()
public_id = "event/%s" % event_id
event.resource_id = self._get_res_id(public_id)
event.scope_resource_ids()
return event
示例2: brightness
# 需要导入模块: from obspy.core.event import Event [as 别名]
# 或者: from obspy.core.event.Event import creation_info [as 别名]
#.........这里部分代码省略.........
cum_net_trace.stats.channel = ''
cum_net_trace.stats.network = 'Z'
cum_net_trace.stats.location = ''
cum_net_trace.stats.starttime = stream[0].stats.starttime
cum_net_trace = obspy.Stream(cum_net_trace)
cum_net_trace += stream.select(channel='*N')
cum_net_trace += stream.select(channel='*1')
cum_net_trace.sort(['network', 'station', 'channel'])
# np.save('cum_net_resp.npy',cum_net_resp)
# cum_net_trace.plot(size=(800,600), equal_scale=False,\
# outfile='NR_timeseries.eps')
# Find detection within this network response
print('Finding detections in the cumulatve network response')
detections = _find_detections(cum_net_resp, peak_nodes, threshold,
thresh_type, stream[0].stats.sampling_rate,
realstations, gap)
del cum_net_resp
templates = []
nodesout = []
good_detections = []
if detections:
print('Converting detections in to templates')
# Generate a catalog of detections
detections_cat = Catalog()
for j, detection in enumerate(detections):
print('Converting for detection ' + str(j) + ' of ' +
str(len(detections)))
# Create an event for each detection
event = Event()
# Set up some header info for the event
event.event_descriptions.append(EventDescription())
event.event_descriptions[0].text = 'Brightness detection'
event.creation_info = CreationInfo(agency_id='EQcorrscan')
copy_of_stream = deepcopy(stream_copy)
# Convert detections to obspy.core.event type -
# name of detection template is the node.
node = (detection.template_name.split('_')[0],
detection.template_name.split('_')[1],
detection.template_name.split('_')[2])
print(node)
# Look up node in nodes and find the associated lags
index = nodes.index(node)
detect_lags = lags[:, index]
ksta = Comment(text='Number of stations=' + len(detect_lags))
event.origins.append(Origin())
event.origins[0].comments.append(ksta)
event.origins[0].time = copy_of_stream[0].stats.starttime +\
detect_lags[0] + detection.detect_time
event.origins[0].latitude = node[0]
event.origins[0].longitude = node[1]
event.origins[0].depth = node[2]
for i, detect_lag in enumerate(detect_lags):
station = stations[i]
st = copy_of_stream.select(station=station)
if len(st) != 0:
for tr in st:
_waveform_id = WaveformStreamID(station_code=tr.stats.
station,
channel_code=tr.stats.
channel,
network_code='NA')
event.picks.append(Pick(waveform_id=_waveform_id,
time=tr.stats.starttime +
detect_lag +
detection.detect_time +
示例3: _read_single_hypocenter
# 需要导入模块: from obspy.core.event import Event [as 别名]
# 或者: from obspy.core.event.Event import creation_info [as 别名]
#.........这里部分代码省略.........
used_station_count, depth_phase_count) = map(int, line[1:11:2])
stderr, az_gap, sec_az_gap = map(float, line[11:17:2])
gt_level = line[17]
min_dist, max_dist, med_dist = map(float, line[19:25:2])
# goto location quality info line
line = lines["QML_OriginUncertainty"]
if "COMMENT" in lines:
comment = lines["COMMENT"].strip()
comment = comment.strip('\'"')
comment = comment.strip()
hor_unc, min_hor_unc, max_hor_unc, hor_unc_azim = \
map(float, line.split()[1:9:2])
# assign origin info
event = Event()
o = Origin()
event.origins = [o]
event.preferred_origin_id = o.resource_id
o.origin_uncertainty = OriginUncertainty()
o.quality = OriginQuality()
ou = o.origin_uncertainty
oq = o.quality
o.comments.append(Comment(text=stats_info_string, force_resource_id=False))
event.comments.append(Comment(text=comment, force_resource_id=False))
# SIGNATURE field's first item is LOCSIG, which is supposed to be
# 'Identification of an individual, institiution or other entity'
# according to
# http://alomax.free.fr/nlloc/soft6.00/control.html#_NLLoc_locsig_
# so use it as author in creation info
event.creation_info = CreationInfo(creation_time=creation_time,
version=version,
author=signature)
o.creation_info = CreationInfo(creation_time=creation_time,
version=version,
author=signature)
# negative values can appear on diagonal of covariance matrix due to a
# precision problem in NLLoc implementation when location coordinates are
# large compared to the covariances.
o.longitude = x
try:
o.longitude_errors.uncertainty = kilometer2degrees(sqrt(covariance_xx))
except ValueError:
if covariance_xx < 0:
msg = ("Negative value in XX value of covariance matrix, not "
"setting longitude error (epicentral uncertainties will "
"still be set in origin uncertainty).")
warnings.warn(msg)
else:
raise
o.latitude = y
try:
o.latitude_errors.uncertainty = kilometer2degrees(sqrt(covariance_yy))
except ValueError:
if covariance_yy < 0:
msg = ("Negative value in YY value of covariance matrix, not "
"setting longitude error (epicentral uncertainties will "
"still be set in origin uncertainty).")
warnings.warn(msg)
else:
raise
o.depth = z * 1e3 # meters!
示例4: _readheader
# 需要导入模块: from obspy.core.event import Event [as 别名]
# 或者: from obspy.core.event.Event import creation_info [as 别名]
def _readheader(f):
"""
Internal header reader.
:type f: file
:param f: File open in read-mode.
:returns: :class:`~obspy.core.event.event.Event`
"""
f.seek(0)
# Base populate to allow for empty parts of file
new_event = Event()
topline = _get_headline(f=f)
if not topline:
raise NordicParsingError('No header found, or incorrect '
'formatting: corrupt s-file')
try:
sfile_seconds = int(topline[16:18])
if sfile_seconds == 60:
sfile_seconds = 0
add_seconds = 60
else:
add_seconds = 0
new_event.origins.append(Origin())
new_event.origins[0].time = UTCDateTime(int(topline[1:5]),
int(topline[6:8]),
int(topline[8:10]),
int(topline[11:13]),
int(topline[13:15]),
sfile_seconds,
int(topline[19:20]) *
100000)\
+ add_seconds
except:
NordicParsingError("Couldn't read a date from sfile")
# new_event.loc_mod_ind=topline[20]
new_event.event_descriptions.append(EventDescription())
new_event.event_descriptions[0].text = topline[21:23]
# new_event.ev_id=topline[22]
try:
new_event.origins[0].latitude = float(topline[23:30])
new_event.origins[0].longitude = float(topline[31:38])
new_event.origins[0].depth = float(topline[39:43]) * 1000
except ValueError:
# The origin 'requires' a lat & long
new_event.origins[0].latitude = None
new_event.origins[0].longitude = None
new_event.origins[0].depth = None
# new_event.depth_ind = topline[44]
# new_event.loc_ind = topline[45]
new_event.creation_info = CreationInfo(agency_id=topline[45:48].strip())
ksta = Comment(text='Number of stations=' + topline[49:51].strip())
new_event.origins[0].comments.append(ksta)
if _float_conv(topline[51:55]) is not None:
new_event.origins[0].time_errors['Time_Residual_RMS'] = \
_float_conv(topline[51:55])
# Read in magnitudes if they are there.
for index in [59, 67, 75]:
if not topline[index].isspace():
new_event.magnitudes.append(Magnitude())
new_event.magnitudes[-1].mag = _float_conv(
topline[index - 3:index])
new_event.magnitudes[-1].magnitude_type = \
_nortoevmag(topline[index])
new_event.magnitudes[-1].creation_info = \
CreationInfo(agency_id=topline[index + 1:index + 4].strip())
new_event.magnitudes[-1].origin_id = new_event.origins[0].\
resource_id
# Set the useful things like preferred magnitude and preferred origin
new_event.preferred_origin_id = new_event.origins[0].resource_id
try:
# Select moment first, then local, then
mag_filter = ['MW', 'Mw', 'ML', 'Ml', 'MB', 'Mb',
'MS', 'Ms', 'MC', 'Mc']
_magnitudes = [(m.magnitude_type, m.resource_id)
for m in new_event.magnitudes]
preferred_magnitude = sorted(_magnitudes,
key=lambda x: mag_filter.index(x[0]))[0]
new_event.preferred_magnitude_id = preferred_magnitude[1]
except (ValueError, IndexError):
# If there is a magnitude not specified in filter
try:
new_event.preferred_magnitude_id = new_event.magnitudes[0].\
resource_id
except IndexError:
pass
return new_event
示例5: read_nlloc_hyp
# 需要导入模块: from obspy.core.event import Event [as 别名]
# 或者: from obspy.core.event.Event import creation_info [as 别名]
#.........这里部分代码省略.........
"matrix as 1D marginal (Lon/Lat errors as great circle degrees) "
"while OriginUncertainty min/max horizontal errors are calculated "
"from 2D error ellipsoid and are therefore seemingly higher compared "
"to 1D errors. Error estimates can be reconstructed from the "
"following original NonLinLoc error statistics line:\nSTATISTICS " +
lines["STATISTICS"])
# goto location quality info line
line = lines["QML_OriginQuality"].split()
(assoc_phase_count, used_phase_count, assoc_station_count,
used_station_count, depth_phase_count) = map(int, line[1:11:2])
stderr, az_gap, sec_az_gap = map(float, line[11:17:2])
gt_level = line[17]
min_dist, max_dist, med_dist = map(float, line[19:25:2])
# goto location quality info line
line = lines["QML_OriginUncertainty"]
hor_unc, min_hor_unc, max_hor_unc, hor_unc_azim = \
map(float, line.split()[1:9:2])
# assign origin info
event = Event()
cat = Catalog(events=[event])
o = Origin()
event.origins = [o]
o.origin_uncertainty = OriginUncertainty()
o.quality = OriginQuality()
ou = o.origin_uncertainty
oq = o.quality
o.comments.append(Comment(text=stats_info_string))
cat.creation_info.creation_time = UTCDateTime()
cat.creation_info.version = "ObsPy %s" % __version__
event.creation_info = CreationInfo(creation_time=creation_time,
version=version)
event.creation_info.version = version
o.creation_info = CreationInfo(creation_time=creation_time,
version=version)
# negative values can appear on diagonal of covariance matrix due to a
# precision problem in NLLoc implementation when location coordinates are
# large compared to the covariances.
o.longitude = x
try:
o.longitude_errors.uncertainty = kilometer2degrees(sqrt(covariance_XX))
except ValueError:
if covariance_XX < 0:
msg = ("Negative value in XX value of covariance matrix, not "
"setting longitude error (epicentral uncertainties will "
"still be set in origin uncertainty).")
warnings.warn(msg)
else:
raise
o.latitude = y
try:
o.latitude_errors.uncertainty = kilometer2degrees(sqrt(covariance_YY))
except ValueError:
if covariance_YY < 0:
msg = ("Negative value in YY value of covariance matrix, not "
"setting longitude error (epicentral uncertainties will "
"still be set in origin uncertainty).")
warnings.warn(msg)
else:
raise
示例6: full_test_event
# 需要导入模块: from obspy.core.event import Event [as 别名]
# 或者: from obspy.core.event.Event import creation_info [as 别名]
def full_test_event():
"""
Function to generate a basic, full test event
"""
test_event = Event()
test_event.origins.append(Origin(
time=UTCDateTime("2012-03-26") + 1.2, latitude=45.0, longitude=25.0,
depth=15000))
test_event.event_descriptions.append(EventDescription())
test_event.event_descriptions[0].text = 'LE'
test_event.creation_info = CreationInfo(agency_id='TES')
test_event.magnitudes.append(Magnitude(
mag=0.1, magnitude_type='ML', creation_info=CreationInfo('TES'),
origin_id=test_event.origins[0].resource_id))
test_event.magnitudes.append(Magnitude(
mag=0.5, magnitude_type='Mc', creation_info=CreationInfo('TES'),
origin_id=test_event.origins[0].resource_id))
test_event.magnitudes.append(Magnitude(
mag=1.3, magnitude_type='Ms', creation_info=CreationInfo('TES'),
origin_id=test_event.origins[0].resource_id))
# Define the test pick
_waveform_id_1 = WaveformStreamID(station_code='FOZ', channel_code='SHZ',
network_code='NZ')
_waveform_id_2 = WaveformStreamID(station_code='WTSZ', channel_code='BH1',
network_code=' ')
# Pick to associate with amplitude - 0
test_event.picks = [
Pick(waveform_id=_waveform_id_1, phase_hint='IAML',
polarity='undecidable', time=UTCDateTime("2012-03-26") + 1.68,
evaluation_mode="manual"),
Pick(waveform_id=_waveform_id_1, onset='impulsive', phase_hint='PN',
polarity='positive', time=UTCDateTime("2012-03-26") + 1.68,
evaluation_mode="manual"),
Pick(waveform_id=_waveform_id_1, phase_hint='IAML',
polarity='undecidable', time=UTCDateTime("2012-03-26") + 1.68,
evaluation_mode="manual"),
Pick(waveform_id=_waveform_id_2, onset='impulsive', phase_hint='SG',
polarity='undecidable', time=UTCDateTime("2012-03-26") + 1.72,
evaluation_mode="manual"),
Pick(waveform_id=_waveform_id_2, onset='impulsive', phase_hint='PN',
polarity='undecidable', time=UTCDateTime("2012-03-26") + 1.62,
evaluation_mode="automatic")]
# Test a generic local magnitude amplitude pick
test_event.amplitudes = [
Amplitude(generic_amplitude=2.0, period=0.4,
pick_id=test_event.picks[0].resource_id,
waveform_id=test_event.picks[0].waveform_id, unit='m',
magnitude_hint='ML', category='point', type='AML'),
Amplitude(generic_amplitude=10,
pick_id=test_event.picks[1].resource_id,
waveform_id=test_event.picks[1].waveform_id, type='END',
category='duration', unit='s', magnitude_hint='Mc',
snr=2.3),
Amplitude(generic_amplitude=5.0, period=0.6,
pick_id=test_event.picks[2].resource_id,
waveform_id=test_event.picks[0].waveform_id, unit='m',
category='point', type='AML')]
test_event.origins[0].arrivals = [
Arrival(time_weight=0, phase=test_event.picks[1].phase_hint,
pick_id=test_event.picks[1].resource_id),
Arrival(time_weight=2, phase=test_event.picks[3].phase_hint,
pick_id=test_event.picks[3].resource_id,
backazimuth_residual=5, time_residual=0.2, distance=15,
azimuth=25),
Arrival(time_weight=2, phase=test_event.picks[4].phase_hint,
pick_id=test_event.picks[4].resource_id,
backazimuth_residual=5, time_residual=0.2, distance=15,
azimuth=25)]
# Add in error info (line E)
test_event.origins[0].quality = OriginQuality(
standard_error=0.01, azimuthal_gap=36)
# Origin uncertainty in Seisan is output as long-lat-depth, quakeML has
# semi-major and semi-minor
test_event.origins[0].origin_uncertainty = OriginUncertainty(
confidence_ellipsoid=ConfidenceEllipsoid(
semi_major_axis_length=3000, semi_minor_axis_length=1000,
semi_intermediate_axis_length=2000, major_axis_plunge=20,
major_axis_azimuth=100, major_axis_rotation=4))
test_event.origins[0].time_errors = QuantityError(uncertainty=0.5)
# Add in fault-plane solution info (line F) - Note have to check program
# used to determine which fields are filled....
test_event.focal_mechanisms.append(FocalMechanism(
nodal_planes=NodalPlanes(nodal_plane_1=NodalPlane(
strike=180, dip=20, rake=30, strike_errors=QuantityError(10),
dip_errors=QuantityError(10), rake_errors=QuantityError(20))),
method_id=ResourceIdentifier("smi:nc.anss.org/focalMechanism/FPFIT"),
creation_info=CreationInfo(agency_id="NC"), misfit=0.5,
station_distribution_ratio=0.8))
# Need to test high-precision origin and that it is preferred origin.
# Moment tensor includes another origin
test_event.origins.append(Origin(
time=UTCDateTime("2012-03-26") + 1.2, latitude=45.1, longitude=25.2,
depth=14500))
test_event.magnitudes.append(Magnitude(
mag=0.1, magnitude_type='MW', creation_info=CreationInfo('TES'),
origin_id=test_event.origins[-1].resource_id))
# Moment tensors go with focal-mechanisms
test_event.focal_mechanisms.append(FocalMechanism(
moment_tensor=MomentTensor(
#.........这里部分代码省略.........
示例7: full_test_event
# 需要导入模块: from obspy.core.event import Event [as 别名]
# 或者: from obspy.core.event.Event import creation_info [as 别名]
def full_test_event():
"""
Function to generate a basic, full test event
"""
test_event = Event()
test_event.origins.append(Origin())
test_event.origins[0].time = UTCDateTime("2012-03-26") + 1.2
test_event.event_descriptions.append(EventDescription())
test_event.event_descriptions[0].text = 'LE'
test_event.origins[0].latitude = 45.0
test_event.origins[0].longitude = 25.0
test_event.origins[0].depth = 15000
test_event.creation_info = CreationInfo(agency_id='TES')
test_event.origins[0].time_errors['Time_Residual_RMS'] = 0.01
test_event.magnitudes.append(Magnitude())
test_event.magnitudes[0].mag = 0.1
test_event.magnitudes[0].magnitude_type = 'ML'
test_event.magnitudes[0].creation_info = CreationInfo('TES')
test_event.magnitudes[0].origin_id = test_event.origins[0].resource_id
test_event.magnitudes.append(Magnitude())
test_event.magnitudes[1].mag = 0.5
test_event.magnitudes[1].magnitude_type = 'Mc'
test_event.magnitudes[1].creation_info = CreationInfo('TES')
test_event.magnitudes[1].origin_id = test_event.origins[0].resource_id
test_event.magnitudes.append(Magnitude())
test_event.magnitudes[2].mag = 1.3
test_event.magnitudes[2].magnitude_type = 'Ms'
test_event.magnitudes[2].creation_info = CreationInfo('TES')
test_event.magnitudes[2].origin_id = test_event.origins[0].resource_id
# Define the test pick
_waveform_id_1 = WaveformStreamID(station_code='FOZ', channel_code='SHZ',
network_code='NZ')
_waveform_id_2 = WaveformStreamID(station_code='WTSZ', channel_code='BH1',
network_code=' ')
# Pick to associate with amplitude
test_event.picks.append(
Pick(waveform_id=_waveform_id_1, phase_hint='IAML',
polarity='undecidable', time=UTCDateTime("2012-03-26") + 1.68,
evaluation_mode="manual"))
# Need a second pick for coda
test_event.picks.append(
Pick(waveform_id=_waveform_id_1, onset='impulsive', phase_hint='PN',
polarity='positive', time=UTCDateTime("2012-03-26") + 1.68,
evaluation_mode="manual"))
# Unassociated pick
test_event.picks.append(
Pick(waveform_id=_waveform_id_2, onset='impulsive', phase_hint='SG',
polarity='undecidable', time=UTCDateTime("2012-03-26") + 1.72,
evaluation_mode="manual"))
# Unassociated pick
test_event.picks.append(
Pick(waveform_id=_waveform_id_2, onset='impulsive', phase_hint='PN',
polarity='undecidable', time=UTCDateTime("2012-03-26") + 1.62,
evaluation_mode="automatic"))
# Test a generic local magnitude amplitude pick
test_event.amplitudes.append(
Amplitude(generic_amplitude=2.0, period=0.4,
pick_id=test_event.picks[0].resource_id,
waveform_id=test_event.picks[0].waveform_id, unit='m',
magnitude_hint='ML', category='point', type='AML'))
# Test a coda magnitude pick
test_event.amplitudes.append(
Amplitude(generic_amplitude=10,
pick_id=test_event.picks[1].resource_id,
waveform_id=test_event.picks[1].waveform_id, type='END',
category='duration', unit='s', magnitude_hint='Mc',
snr=2.3))
test_event.origins[0].arrivals.append(
Arrival(time_weight=0, phase=test_event.picks[1].phase_hint,
pick_id=test_event.picks[1].resource_id))
test_event.origins[0].arrivals.append(
Arrival(time_weight=2, phase=test_event.picks[2].phase_hint,
pick_id=test_event.picks[2].resource_id,
backazimuth_residual=5, time_residual=0.2, distance=15,
azimuth=25))
test_event.origins[0].arrivals.append(
Arrival(time_weight=2, phase=test_event.picks[3].phase_hint,
pick_id=test_event.picks[3].resource_id,
backazimuth_residual=5, time_residual=0.2, distance=15,
azimuth=25))
return test_event
示例8: readheader
# 需要导入模块: from obspy.core.event import Event [as 别名]
# 或者: from obspy.core.event.Event import creation_info [as 别名]
def readheader(sfile):
"""
Read header information from a seisan nordic format S-file.
Returns an obspy.core.event.Catalog type: note this changed for version \
0.1.0 from the inbuilt class types.
:type sfile: str
:param sfile: Path to the s-file
:returns: :class: obspy.core.event.Event
>>> event = readheader('eqcorrscan/tests/test_data/REA/TEST_/' +
... '01-0411-15L.S201309')
>>> print(event.origins[0].time)
2013-09-01T04:11:15.700000Z
"""
import warnings
from obspy.core.event import Event, Origin, Magnitude, Comment
from obspy.core.event import EventDescription, CreationInfo
f = open(sfile, 'r')
# Base populate to allow for empty parts of file
new_event = Event()
topline = f.readline()
if not len(topline.rstrip()) == 80:
raise IOError('s-file has a corrupt header, not 80 char long')
f.seek(0)
for line in f:
if line[79] in [' ', '1']:
topline = line
break
if line[79] == '7':
raise IOError('No header found, corrupt s-file?')
try:
sfile_seconds = int(topline[16:18])
if sfile_seconds == 60:
sfile_seconds = 0
add_seconds = 60
else:
add_seconds = 0
new_event.origins.append(Origin())
new_event.origins[0].time = UTCDateTime(int(topline[1:5]),
int(topline[6:8]),
int(topline[8:10]),
int(topline[11:13]),
int(topline[13:15]),
sfile_seconds,
int(topline[19:20]) *
100000)\
+ add_seconds
except:
warnings.warn("Couldn't read a date from sfile: " + sfile)
new_event.origins.append(Origin(time=UTCDateTime(0)))
# new_event.loc_mod_ind=topline[20]
new_event.event_descriptions.append(EventDescription())
new_event.event_descriptions[0].text = topline[21:23]
# new_event.ev_id=topline[22]
if not _float_conv(topline[23:30]) == 999:
new_event.origins[0].latitude = _float_conv(topline[23:30])
new_event.origins[0].longitude = _float_conv(topline[31:38])
new_event.origins[0].depth = _float_conv(topline[39:43]) * 1000
else:
# The origin 'requires' a lat & long
new_event.origins[0].latitude = float('NaN')
new_event.origins[0].longitude = float('NaN')
new_event.origins[0].depth = float('NaN')
# new_event.depth_ind = topline[44]
# new_event.loc_ind = topline[45]
new_event.creation_info = CreationInfo(agency_id=topline[45:48].
strip())
ksta = Comment(text='Number of stations=' +
topline[49:51].strip())
new_event.origins[0].comments.append(ksta)
# new_event.origins[0].nsta??? = _int_conv(topline[49:51])
if not _float_conv(topline[51:55]) == 999:
new_event.origins[0].time_errors['Time_Residual_RMS'] = \
_float_conv(topline[51:55])
# Read in magnitudes if they are there.
if len(topline[59].strip()) > 0:
new_event.magnitudes.append(Magnitude())
new_event.magnitudes[0].mag = _float_conv(topline[56:59])
new_event.magnitudes[0].magnitude_type = topline[59]
new_event.magnitudes[0].creation_info = \
CreationInfo(agency_id=topline[60:63].strip())
new_event.magnitudes[0].origin_id = new_event.origins[0].\
resource_id
if len(topline[67].strip()) > 0:
new_event.magnitudes.append(Magnitude())
new_event.magnitudes[1].mag = _float_conv(topline[64:67])
new_event.magnitudes[1].magnitude_type = topline[67]
new_event.magnitudes[1].creation_info = \
CreationInfo(agency_id=topline[68:71].strip())
new_event.magnitudes[1].origin_id = new_event.origins[0].\
resource_id
if len(topline[75].strip()) > 0:
new_event.magnitudes.append(Magnitude())
new_event.magnitudes[2].mag = _float_conv(topline[72:75])
new_event.magnitudes[2].magnitude_type = topline[75]
new_event.magnitudes[2].creation_info = \
CreationInfo(agency_id=topline[76:79].strip())
new_event.magnitudes[2].origin_id = new_event.origins[0].\
#.........这里部分代码省略.........
示例9: full_test_event
# 需要导入模块: from obspy.core.event import Event [as 别名]
# 或者: from obspy.core.event.Event import creation_info [as 别名]
def full_test_event():
"""
Function to generate a basic, full test event
"""
test_event = Event()
test_event.origins.append(Origin())
test_event.origins[0].time = UTCDateTime("2012-03-26") + 1.2
test_event.event_descriptions.append(EventDescription())
test_event.event_descriptions[0].text = 'LE'
test_event.origins[0].latitude = 45.0
test_event.origins[0].longitude = 25.0
test_event.origins[0].depth = 15000
test_event.creation_info = CreationInfo(agency_id='TES')
test_event.origins[0].quality = OriginQuality(standard_error=0.01)
test_event.magnitudes.append(Magnitude())
test_event.magnitudes[0].mag = 0.1
test_event.magnitudes[0].magnitude_type = 'ML'
test_event.magnitudes[0].creation_info = CreationInfo('TES')
test_event.magnitudes[0].origin_id = test_event.origins[0].resource_id
test_event.magnitudes.append(Magnitude())
test_event.magnitudes[1].mag = 0.5
test_event.magnitudes[1].magnitude_type = 'Mc'
test_event.magnitudes[1].creation_info = CreationInfo('TES')
test_event.magnitudes[1].origin_id = test_event.origins[0].resource_id
test_event.magnitudes.append(Magnitude())
test_event.magnitudes[2].mag = 1.3
test_event.magnitudes[2].magnitude_type = 'Ms'
test_event.magnitudes[2].creation_info = CreationInfo('TES')
test_event.magnitudes[2].origin_id = test_event.origins[0].resource_id
# Define the test pick
_waveform_id_1 = WaveformStreamID(station_code='FOZ', channel_code='SHZ',
network_code='NZ')
_waveform_id_2 = WaveformStreamID(station_code='WTSZ', channel_code='BH1',
network_code=' ')
# Pick to associate with amplitude
test_event.picks.append(
Pick(waveform_id=_waveform_id_1, phase_hint='IAML',
polarity='undecidable', time=UTCDateTime("2012-03-26") + 1.68,
evaluation_mode="manual"))
# Need a second pick for coda
test_event.picks.append(
Pick(waveform_id=_waveform_id_1, onset='impulsive', phase_hint='PN',
polarity='positive', time=UTCDateTime("2012-03-26") + 1.68,
evaluation_mode="manual"))
# Unassociated pick
test_event.picks.append(
Pick(waveform_id=_waveform_id_2, onset='impulsive', phase_hint='SG',
polarity='undecidable', time=UTCDateTime("2012-03-26") + 1.72,
evaluation_mode="manual"))
# Unassociated pick
test_event.picks.append(
Pick(waveform_id=_waveform_id_2, onset='impulsive', phase_hint='PN',
polarity='undecidable', time=UTCDateTime("2012-03-26") + 1.62,
evaluation_mode="automatic"))
# Test a generic local magnitude amplitude pick
test_event.amplitudes.append(
Amplitude(generic_amplitude=2.0, period=0.4,
pick_id=test_event.picks[0].resource_id,
waveform_id=test_event.picks[0].waveform_id, unit='m',
magnitude_hint='ML', category='point', type='AML'))
# Test a coda magnitude pick
test_event.amplitudes.append(
Amplitude(generic_amplitude=10,
pick_id=test_event.picks[1].resource_id,
waveform_id=test_event.picks[1].waveform_id, type='END',
category='duration', unit='s', magnitude_hint='Mc',
snr=2.3))
test_event.origins[0].arrivals.append(
Arrival(time_weight=0, phase=test_event.picks[1].phase_hint,
pick_id=test_event.picks[1].resource_id))
test_event.origins[0].arrivals.append(
Arrival(time_weight=2, phase=test_event.picks[2].phase_hint,
pick_id=test_event.picks[2].resource_id,
backazimuth_residual=5, time_residual=0.2, distance=15,
azimuth=25))
test_event.origins[0].arrivals.append(
Arrival(time_weight=2, phase=test_event.picks[3].phase_hint,
pick_id=test_event.picks[3].resource_id,
backazimuth_residual=5, time_residual=0.2, distance=15,
azimuth=25))
return test_event
def test_nortoevmag(self):
self.assertEqual(_nortoevmag('b'), 'mB')
# raises "UserWarning: bob is not convertible"
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always', UserWarning)
self.assertEqual(_nortoevmag('bob'), '')
self.assertEquals(len(w), 1)
self.assertEquals('bob is not convertible', w[0].messages)
def test_evmagtonor(self):
self.assertEqual(_evmagtonor('mB'), 'B')
self.assertEqual(_evmagtonor('M'), 'W')
# raises "UserWarning: bob is not convertible"
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always', UserWarning)
self.assertEqual(_evmagtonor('bob'), '')
self.assertEquals(len(w), 1)
#.........这里部分代码省略.........
示例10: build
# 需要导入模块: from obspy.core.event import Event [as 别名]
# 或者: from obspy.core.event.Event import creation_info [as 别名]
def build(self):
"""
Build an obspy moment tensor focal mech event
This makes the tensor output into an Event containing:
1) a FocalMechanism with a MomentTensor, NodalPlanes, and PrincipalAxes
2) a Magnitude of the Mw from the Tensor
Which is what we want for outputting QuakeML using
the (slightly modified) obspy code.
Input
-----
filehandle => open file OR str from filehandle.read()
Output
------
event => instance of Event() class as described above
"""
p = self.parser
event = Event(event_type='earthquake')
origin = Origin()
focal_mech = FocalMechanism()
nodal_planes = NodalPlanes()
moment_tensor = MomentTensor()
principal_ax = PrincipalAxes()
magnitude = Magnitude()
data_used = DataUsed()
creation_info = CreationInfo(agency_id='NN')
ev_mode = 'automatic'
ev_stat = 'preliminary'
evid = None
orid = None
# Parse the entire file line by line.
for n,l in enumerate(p.line):
if 'REVIEWED BY NSL STAFF' in l:
ev_mode = 'manual'
ev_stat = 'reviewed'
if 'Event ID' in l:
evid = p._id(n)
if 'Origin ID' in l:
orid = p._id(n)
if 'Ichinose' in l:
moment_tensor.category = 'regional'
if re.match(r'^\d{4}\/\d{2}\/\d{2}', l):
ev = p._event_info(n)
if 'Depth' in l:
derived_depth = p._depth(n)
if 'Mw' in l:
magnitude.mag = p._mw(n)
magnitude.magnitude_type = 'Mw'
if 'Mo' in l and 'dyne' in l:
moment_tensor.scalar_moment = p._mo(n)
if 'Percent Double Couple' in l:
moment_tensor.double_couple = p._percent(n)
if 'Percent CLVD' in l:
moment_tensor.clvd = p._percent(n)
if 'Epsilon' in l:
moment_tensor.variance = p._epsilon(n)
if 'Percent Variance Reduction' in l:
moment_tensor.variance_reduction = p._percent(n)
if 'Major Double Couple' in l and 'strike' in p.line[n+1]:
np = p._double_couple(n)
nodal_planes.nodal_plane_1 = NodalPlane(*np[0])
nodal_planes.nodal_plane_2 = NodalPlane(*np[1])
nodal_planes.preferred_plane = 1
if 'Spherical Coordinates' in l:
mt = p._mt_sphere(n)
moment_tensor.tensor = Tensor(
m_rr = mt['Mrr'],
m_tt = mt['Mtt'],
m_pp = mt['Mff'],
m_rt = mt['Mrt'],
m_rp = mt['Mrf'],
m_tp = mt['Mtf'],
)
if 'Eigenvalues and eigenvectors of the Major Double Couple' in l:
ax = p._vectors(n)
principal_ax.t_axis = Axis(ax['T']['trend'], ax['T']['plunge'], ax['T']['ev'])
principal_ax.p_axis = Axis(ax['P']['trend'], ax['P']['plunge'], ax['P']['ev'])
principal_ax.n_axis = Axis(ax['N']['trend'], ax['N']['plunge'], ax['N']['ev'])
if 'Number of Stations' in l:
data_used.station_count = p._number_of_stations(n)
if 'Maximum' in l and 'Gap' in l:
focal_mech.azimuthal_gap = p._gap(n)
if re.match(r'^Date', l):
creation_info.creation_time = p._creation_time(n)
# Creation Time
creation_info.version = orid
# Fill in magnitude values
magnitude.evaluation_mode = ev_mode
magnitude.evaluation_status = ev_stat
magnitude.creation_info = creation_info.copy()
magnitude.resource_id = self._rid(magnitude)
# Stub origin
origin.time = ev.get('time')
origin.latitude = ev.get('lat')
origin.longitude = ev.get('lon')
origin.depth = derived_depth * 1000.
origin.depth_type = "from moment tensor inversion"
#.........这里部分代码省略.........
示例11: basic_test_event
# 需要导入模块: from obspy.core.event import Event [as 别名]
# 或者: from obspy.core.event.Event import creation_info [as 别名]
def basic_test_event():
"""
Function to generate a basic, full test event
"""
from obspy.core.event import Pick, WaveformStreamID, Arrival, Amplitude
from obspy.core.event import Event, Origin, Magnitude
from obspy.core.event import EventDescription, CreationInfo
from obspy import UTCDateTime
test_event = Event()
test_event.origins.append(Origin())
test_event.origins[0].time = UTCDateTime("2012-03-26") + 1
test_event.event_descriptions.append(EventDescription())
test_event.event_descriptions[0].text = 'LE'
test_event.origins[0].latitude = 45.0
test_event.origins[0].longitude = 25.0
test_event.origins[0].depth = 15000
test_event.creation_info = CreationInfo(agency_id='TES')
test_event.origins[0].time_errors['Time_Residual_RMS'] = 0.01
test_event.magnitudes.append(Magnitude())
test_event.magnitudes[0].mag = 0.1
test_event.magnitudes[0].magnitude_type = 'ML'
test_event.magnitudes[0].creation_info = CreationInfo('TES')
test_event.magnitudes[0].origin_id = test_event.origins[0].resource_id
test_event.magnitudes.append(Magnitude())
test_event.magnitudes[1].mag = 0.5
test_event.magnitudes[1].magnitude_type = 'Mc'
test_event.magnitudes[1].creation_info = CreationInfo('TES')
test_event.magnitudes[1].origin_id = test_event.origins[0].resource_id
test_event.magnitudes.append(Magnitude())
test_event.magnitudes[2].mag = 1.3
test_event.magnitudes[2].magnitude_type = 'Ms'
test_event.magnitudes[2].creation_info = CreationInfo('TES')
test_event.magnitudes[2].origin_id = test_event.origins[0].resource_id
# Define the test pick
_waveform_id = WaveformStreamID(station_code='FOZ', channel_code='SHZ',
network_code='NZ')
test_event.picks.append(Pick(waveform_id=_waveform_id,
onset='impulsive', phase_hint='PN',
polarity='positive',
time=UTCDateTime("2012-03-26") + 1.68,
horizontal_slowness=12, backazimuth=20))
test_event.amplitudes.append(Amplitude(generic_amplitude=2.0,
period=0.4,
pick_id=test_event.picks[0].
resource_id,
waveform_id=test_event.picks[0].
waveform_id,
unit='m'))
test_event.origins[0].arrivals.append(Arrival(time_weight=2,
phase=test_event.
picks[0].
phase_hint,
pick_id=test_event.
picks[0].
resource_id,
backazimuth_residual=5,
time_residual=0.2,
distance=15,
azimuth=25))
return test_event
示例12: get_results
# 需要导入模块: from obspy.core.event import Event [as 别名]
# 或者: from obspy.core.event.Event import creation_info [as 别名]
def get_results(self):
cids = []
clusters = []
results_file = "{}/{}".format(self.hypoDD_control.control_directory,
self.hypoDD_control.relocated_hypocenters_output
)
residuals_file = "{}/{}".format(self.hypoDD_control.control_directory,
self.hypoDD_control.data_residual_output
)
with open(results_file, "r") as f:
for line in f:
num = line.split()
evid = num[0]
lat = float(num[1])
lon = float(num[2])
dep = 1000 * float(num[3]) # km to m
errx = num[7]
erry = num[8]
errz = num[9]
yr = int(num[10])
mo = int(num[11])
dy = int(num[12])
hr = int(num[13])
mi = int(num[14])
sc = float(num[15])
mag = num[16]
nccp = num[17]
nccs = num[18]
nctp = num[19]
ncts = num[20]
rcc = num[21]
rct = num[22]
cid = num[23]
if cid not in cids:
cids.append(cid)
clusters.append(Cluster())
clusters[-1].hypoDD_id=cid
clusters[-1].successful_relocation=True
clusters[-1].catalog=Catalog()
clusters[-1].event_ids=[]
origin=Origin()
isec = int ( math.floor( sc ))
micsec = int ( ( sc - isec) * 1000000 )
origin.time = UTCDateTime(yr, mo, dy, hr, mi, isec, micsec)
origin.longitude = lon
origin.latitude = lat
origin.depth = dep
origin.method_id = "hypoDD"
# TODO (@ogalanis): Add time/location errors (when
# appropriate. Add quality and origin_uncertainty. Add arrivals.
event=Event()
event.creation_info=CreationInfo()
event.creation_info.author = __package__
event.creation_info.version = info.__version__
event.origins=[origin]
event.magnitude=Magnitude()
event.magnitude.mag=mag
idx=cids.index(cid)
clusters[idx].catalog.events.append(event)
clusters[idx].event_ids.append(evid)
if self.hypoDD_control.cid != 0 :
my_list = []
clusters[0].connectedness = Connectedness()
with open(residuals_file, "r") as f:
for line in f:
num = line.split()
evid_1 = num[2]
evid_2 = num[3]
obs_type = num[4]
if obs_type == "1":
my_list = clusters[0].connectedness.cross_corr_P
elif obs_type == "2":
my_list = clusters[0].connectedness.cross_corr_S
elif obs_type == "3":
my_list = clusters[0].connectedness.catalog_P
elif obs_type == "4":
my_list = clusters[0].connectedness.catalog_S
else:
continue
in_list = [x for x in my_list if (( x[0] == evid_1 and
x[1] == evid_2
) or
( x[0] == evid_2 and
x[1] == evid_1
))]
if in_list:
for x in my_list:
if (( x[0] == evid_1 and
x[1] == evid_2
) or
( x[0] == evid_2 and
x[1] == evid_1
)):
x[2] += 1
else:
my_list.append([evid_1,evid_2,1])
return clusters
示例13: cross_net
# 需要导入模块: from obspy.core.event import Event [as 别名]
# 或者: from obspy.core.event.Event import creation_info [as 别名]
def cross_net(stream, env=False, debug=0, master=False):
"""
Generate picks using a simple envelope cross-correlation.
Picks are made for each channel based on optimal moveout \
defined by maximum cross-correlation with master trace. Master trace \
will be the first trace in the stream.
:type stream: :class: obspy.Stream
:param stream: Stream to pick
:type env: bool
:param env: To compute cross-correlations on the envelope or not.
:type debug: int
:param debug: Debug level from 0-5
:type master: obspy.Trace
:param master: Trace to use as master, if False, will use the first trace \
in stream.
:returns: obspy.core.event.Event
.. rubric:: Example
>>> from obspy import read
>>> from eqcorrscan.utils.picker import cross_net
>>> st = read()
>>> event = cross_net(st, env=True)
>>> event.creation_info.author
'EQcorrscan'
"""
from obspy.signal.cross_correlation import xcorr
from obspy.signal.filter import envelope
from obspy import UTCDateTime
from obspy.core.event import Event, Pick, WaveformStreamID
from obspy.core.event import CreationInfo, Comment, Origin
import matplotlib.pyplot as plt
import numpy as np
event = Event()
event.origins.append(Origin())
event.creation_info = CreationInfo(author='EQcorrscan',
creation_time=UTCDateTime())
event.comments.append(Comment(text='cross_net'))
samp_rate = stream[0].stats.sampling_rate
if not env:
if debug > 2:
print('Using the raw data')
st = stream.copy()
st.resample(samp_rate)
else:
st = stream.copy()
if debug > 2:
print('Computing envelope')
for tr in st:
tr.resample(samp_rate)
tr.data = envelope(tr.data)
if debug > 2:
st.plot(equal_scale=False, size=(800, 600))
if not master:
master = st[0]
else:
master = master
master.data = np.nan_to_num(master.data)
for i, tr in enumerate(st):
tr.data = np.nan_to_num(tr.data)
if debug > 2:
msg = ' '.join(['Comparing', tr.stats.station, tr.stats.channel,
'with the master'])
print(msg)
shift_len = int(0.3 * len(tr))
if debug > 2:
print('Shift length is set to ' + str(shift_len) + ' samples')
if debug > 3:
index, cc, cc_vec = xcorr(master, tr, shift_len, full_xcorr=True)
cc_vec = np.nan_to_num(cc_vec)
if debug > 4:
print(cc_vec)
fig = plt.figure()
ax1 = fig.add_subplot(211)
x = np.linspace(0, len(master) / samp_rate,
len(master))
ax1.plot(x, master.data / float(master.data.max()), 'k',
label='Master')
ax1.plot(x + (index / samp_rate), tr.data / float(tr.data.max()),
'r', label='Slave shifted')
ax1.legend(loc="lower right", prop={'size': "small"})
ax1.set_xlabel("time [s]")
ax1.set_ylabel("norm. amplitude")
ax2 = fig.add_subplot(212)
print(len(cc_vec))
x = np.linspace(0, len(cc_vec) / samp_rate, len(cc_vec))
ax2.plot(x, cc_vec, label='xcorr')
# ax2.set_ylim(-1, 1)
# ax2.set_xlim(0, len(master))
plt.show()
index, cc = xcorr(master, tr, shift_len)
wav_id = WaveformStreamID(station_code=tr.stats.station,
channel_code=tr.stats.channel,
network_code=tr.stats.network)
event.picks.append(Pick(time=tr.stats.starttime + (index / tr.stats.sampling_rate),
waveform_id=wav_id,
phase_hint='S',
#.........这里部分代码省略.........
示例14: stalta_pick
# 需要导入模块: from obspy.core.event import Event [as 别名]
# 或者: from obspy.core.event.Event import creation_info [as 别名]
def stalta_pick(stream, stalen, ltalen, trig_on, trig_off, freqmin=False,
freqmax=False, debug=0, show=False):
"""
Basic sta/lta picker, suggest using alternative in obspy.
Simple sta-lta (short-term average/long-term average) picker, using \
obspy's stalta routine to generate the characteristic function.
Currently very basic quick wrapper, there are many other (better) options \
in obspy, found \
`here <http://docs.obspy.org/packages/autogen/obspy.signal.trigger.html>`_.
:type stream: obspy.Stream
:param stream: The stream to pick on, can be any number of channels.
:type stalen: float
:param stalen: Length of the short-term average window in seconds.
:type ltalen: float
:param ltalen: Length of the long-term average window in seconds.
:type trig_on: float
:param trig_on: sta/lta ratio to trigger a detection/pick
:type trig_off: float
:param trig_off: sta/lta ratio to turn the trigger off - no further picks\
will be made between exceeding trig_on until trig_off is reached.
:type freqmin: float
:param freqmin: Low-cut frequency in Hz for bandpass filter
:type freqmax: float
:param freqmax: High-cut frequency in Hz for bandpass filter
:type debug: int
:param debug: Debug output level from 0-5.
:type show: bool
:param show: Show picks on waveform.
:returns: obspy.core.event.Event
.. rubric:: Example
>>> from obspy import read
>>> from eqcorrscan.utils.picker import stalta_pick
>>> st = read()
>>> event = stalta_pick(st, stalen=0.2, ltalen=4, trig_on=10,
... trig_off=1, freqmin=3.0, freqmax=20.0)
>>> event.creation_info.author
'EQcorrscan'
"""
from obspy.signal.trigger import classic_sta_lta, trigger_onset
from obspy.signal.trigger import plot_trigger
from obspy import UTCDateTime
from obspy.core.event import Event, Pick, WaveformStreamID
from obspy.core.event import CreationInfo, Comment, Origin
import eqcorrscan.utils.plotting as plotting
event = Event()
event.origins.append(Origin())
event.creation_info = CreationInfo(author='EQcorrscan',
creation_time=UTCDateTime())
event.comments.append(Comment(text='stalta'))
picks = []
for tr in stream:
# We are going to assume, for now, that if the pick is made on the
# horizontal channel then it is an S, otherwise we will assume it is
# a P-phase: obviously a bad assumption...
if tr.stats.channel[-1] == 'Z':
phase = 'P'
else:
phase = 'S'
if freqmin and freqmax:
tr.detrend('simple')
tr.filter('bandpass', freqmin=freqmin, freqmax=freqmax,
corners=3, zerophase=True)
df = tr.stats.sampling_rate
cft = classic_sta_lta(tr.data, int(stalen * df), int(ltalen * df))
if debug > 3:
plot_trigger(tr, cft, trig_on, trig_off)
triggers = trigger_onset(cft, trig_on, trig_off)
for trigger in triggers:
on = tr.stats.starttime + (trigger[0] / df)
# off = tr.stats.starttime + (trigger[1] / df)
wav_id = WaveformStreamID(station_code=tr.stats.station,
channel_code=tr.stats.channel,
network_code=tr.stats.network)
pick = Pick(waveform_id=wav_id, phase_hint=phase, time=on)
if debug > 2:
print('Pick made:')
print(pick)
picks.append(pick)
# QC picks
del pick
pick_stations = list(set([pick.waveform_id.station_code for pick in picks]))
for pick_station in pick_stations:
station_picks = [pick for pick in picks if
pick.waveform_id.station_code == pick_station]
# If P-pick is after S-picks, remove it.
p_time = [pick.time for pick in station_picks if pick.phase_hint == 'P']
s_time = [pick.time for pick in station_picks if pick.phase_hint == 'S']
if p_time > s_time:
p_pick = [pick for pick in station_picks if pick.phase_hint == 'P']
for pick in p_pick:
print('P pick after S pick, removing P pick')
picks.remove(pick)
if show:
plotting.pretty_template_plot(stream, picks=picks, title='Autopicks',
#.........这里部分代码省略.........
示例15: match_filter
# 需要导入模块: from obspy.core.event import Event [as 别名]
# 或者: from obspy.core.event.Event import creation_info [as 别名]
#.........这里部分代码省略.........
warnings.warn('Mean is not zero! Check this!')
# Set up a trace object for the cccsum as this is easier to plot and
# maintains timing
if plotvar:
stream_plot = copy.deepcopy(stream[0])
# Downsample for plotting
stream_plot.decimate(int(stream[0].stats.sampling_rate / 10))
cccsum_plot = Trace(cccsum)
cccsum_plot.stats.sampling_rate = stream[0].stats.sampling_rate
# Resample here to maintain shape better
cccsum_hist = cccsum_plot.copy()
cccsum_hist = cccsum_hist.decimate(int(stream[0].stats.
sampling_rate / 10)).data
cccsum_plot = plotting.chunk_data(cccsum_plot, 10,
'Maxabs').data
# Enforce same length
stream_plot.data = stream_plot.data[0:len(cccsum_plot)]
cccsum_plot = cccsum_plot[0:len(stream_plot.data)]
cccsum_hist = cccsum_hist[0:len(stream_plot.data)]
plotting.triple_plot(cccsum_plot, cccsum_hist,
stream_plot, rawthresh, True,
plotdir + '/cccsum_plot_' +
template_names[i] + '_' +
stream[0].stats.starttime.
datetime.strftime('%Y-%m-%d') +
'.' + plot_format)
if debug >= 4:
print(' '.join(['Saved the cccsum to:', template_names[i],
stream[0].stats.starttime.datetime.
strftime('%Y%j')]))
np.save(template_names[i] +
stream[0].stats.starttime.datetime.strftime('%Y%j'),
cccsum)
tic = time.clock()
if debug >= 4:
np.save('cccsum_' + str(i) + '.npy', cccsum)
if debug >= 3 and max(cccsum) > rawthresh:
peaks = findpeaks.find_peaks2_short(cccsum, rawthresh,
trig_int * stream[0].stats.
sampling_rate, debug,
stream[0].stats.starttime,
stream[0].stats.sampling_rate)
elif max(cccsum) > rawthresh:
peaks = findpeaks.find_peaks2_short(cccsum, rawthresh,
trig_int * stream[0].stats.
sampling_rate, debug)
else:
print('No peaks found above threshold')
peaks = False
toc = time.clock()
if debug >= 1:
print(' '.join(['Finding peaks took:', str(toc - tic), 's']))
if peaks:
for peak in peaks:
detecttime = stream[0].stats.starttime +\
peak[1] / stream[0].stats.sampling_rate
# Detect time must be valid QuakeML uri within resource_id.
# This will write a formatted string which is still readable by UTCDateTime
rid = ResourceIdentifier(id=template_names[i] + '_' +
str(detecttime.strftime('%Y%m%dT%H%M%S.%f')),
prefix='smi:local')
ev = Event(resource_id=rid)
cr_i = CreationInfo(author='EQcorrscan',
creation_time=UTCDateTime())
ev.creation_info = cr_i
# All detection info in Comments for lack of a better idea
thresh_str = 'threshold=' + str(rawthresh)
ccc_str = 'detect_val=' + str(peak[0])
used_chans = 'channels used: ' +\
' '.join([str(pair) for pair in chans[i]])
ev.comments.append(Comment(text=thresh_str))
ev.comments.append(Comment(text=ccc_str))
ev.comments.append(Comment(text=used_chans))
min_template_tm = min([tr.stats.starttime for tr in template])
for tr in template:
if (tr.stats.station, tr.stats.channel) not in chans[i]:
continue
else:
pick_tm = detecttime + (tr.stats.starttime - min_template_tm)
wv_id = WaveformStreamID(network_code=tr.stats.network,
station_code=tr.stats.station,
channel_code=tr.stats.channel)
ev.picks.append(Pick(time=pick_tm, waveform_id=wv_id))
detections.append(DETECTION(template_names[i],
detecttime,
no_chans[i], peak[0], rawthresh,
'corr', chans[i], event=ev))
if output_cat:
det_cat.append(ev)
if extract_detections:
detection_streams = extract_from_stream(stream, detections)
del stream, templates
if output_cat and not extract_detections:
return detections, det_cat
elif not extract_detections:
return detections
elif extract_detections and not output_cat:
return detections, detection_streams
else:
return detections, det_cat, detection_streams