本文整理汇总了Python中obspy.core.event.MomentTensor.data_used方法的典型用法代码示例。如果您正苦于以下问题:Python MomentTensor.data_used方法的具体用法?Python MomentTensor.data_used怎么用?Python MomentTensor.data_used使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类obspy.core.event.MomentTensor
的用法示例。
在下文中一共展示了MomentTensor.data_used方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: build
# 需要导入模块: from obspy.core.event import MomentTensor [as 别名]
# 或者: from obspy.core.event.MomentTensor import data_used [as 别名]
def build(self):
"""
Build an obspy moment tensor focal mech event
This makes the tensor output into an Event containing:
1) a FocalMechanism with a MomentTensor, NodalPlanes, and PrincipalAxes
2) a Magnitude of the Mw from the Tensor
Which is what we want for outputting QuakeML using
the (slightly modified) obspy code.
Input
-----
filehandle => open file OR str from filehandle.read()
Output
------
event => instance of Event() class as described above
"""
p = self.parser
event = Event(event_type='earthquake')
origin = Origin()
focal_mech = FocalMechanism()
nodal_planes = NodalPlanes()
moment_tensor = MomentTensor()
principal_ax = PrincipalAxes()
magnitude = Magnitude()
data_used = DataUsed()
creation_info = CreationInfo(agency_id='NN')
ev_mode = 'automatic'
ev_stat = 'preliminary'
evid = None
orid = None
# Parse the entire file line by line.
for n,l in enumerate(p.line):
if 'REVIEWED BY NSL STAFF' in l:
ev_mode = 'manual'
ev_stat = 'reviewed'
if 'Event ID' in l:
evid = p._id(n)
if 'Origin ID' in l:
orid = p._id(n)
if 'Ichinose' in l:
moment_tensor.category = 'regional'
if re.match(r'^\d{4}\/\d{2}\/\d{2}', l):
ev = p._event_info(n)
if 'Depth' in l:
derived_depth = p._depth(n)
if 'Mw' in l:
magnitude.mag = p._mw(n)
magnitude.magnitude_type = 'Mw'
if 'Mo' in l and 'dyne' in l:
moment_tensor.scalar_moment = p._mo(n)
if 'Percent Double Couple' in l:
moment_tensor.double_couple = p._percent(n)
if 'Percent CLVD' in l:
moment_tensor.clvd = p._percent(n)
if 'Epsilon' in l:
moment_tensor.variance = p._epsilon(n)
if 'Percent Variance Reduction' in l:
moment_tensor.variance_reduction = p._percent(n)
if 'Major Double Couple' in l and 'strike' in p.line[n+1]:
np = p._double_couple(n)
nodal_planes.nodal_plane_1 = NodalPlane(*np[0])
nodal_planes.nodal_plane_2 = NodalPlane(*np[1])
nodal_planes.preferred_plane = 1
if 'Spherical Coordinates' in l:
mt = p._mt_sphere(n)
moment_tensor.tensor = Tensor(
m_rr = mt['Mrr'],
m_tt = mt['Mtt'],
m_pp = mt['Mff'],
m_rt = mt['Mrt'],
m_rp = mt['Mrf'],
m_tp = mt['Mtf'],
)
if 'Eigenvalues and eigenvectors of the Major Double Couple' in l:
ax = p._vectors(n)
principal_ax.t_axis = Axis(ax['T']['trend'], ax['T']['plunge'], ax['T']['ev'])
principal_ax.p_axis = Axis(ax['P']['trend'], ax['P']['plunge'], ax['P']['ev'])
principal_ax.n_axis = Axis(ax['N']['trend'], ax['N']['plunge'], ax['N']['ev'])
if 'Number of Stations' in l:
data_used.station_count = p._number_of_stations(n)
if 'Maximum' in l and 'Gap' in l:
focal_mech.azimuthal_gap = p._gap(n)
if re.match(r'^Date', l):
creation_info.creation_time = p._creation_time(n)
# Creation Time
creation_info.version = orid
# Fill in magnitude values
magnitude.evaluation_mode = ev_mode
magnitude.evaluation_status = ev_stat
magnitude.creation_info = creation_info.copy()
magnitude.resource_id = self._rid(magnitude)
# Stub origin
origin.time = ev.get('time')
origin.latitude = ev.get('lat')
origin.longitude = ev.get('lon')
origin.depth = derived_depth * 1000.
origin.depth_type = "from moment tensor inversion"
#.........这里部分代码省略.........
示例2: _parse_record_dp
# 需要导入模块: from obspy.core.event import MomentTensor [as 别名]
# 或者: from obspy.core.event.MomentTensor import data_used [as 别名]
#.........这里部分代码省略.........
origin = Origin()
res_id = '/'.join((res_id_prefix, 'origin',
evid, source_contributor.lower(),
'mw' + computation_type.lower()))
origin.resource_id = ResourceIdentifier(id=res_id)
origin.creation_info = \
CreationInfo(agency_id=source_contributor)
date = event.origins[0].time.strftime('%Y%m%d')
origin.time = UTCDateTime(date + centroid_origin_time)
# Check if centroid time is on the next day:
if origin.time < event.origins[0].time:
origin.time += timedelta(days=1)
self._store_uncertainty(origin.time_errors, orig_time_stderr)
origin.latitude = centroid_latitude
origin.longitude = centroid_longitude
origin.depth = centroid_depth * 1000
if lat_stderr == 'Fixed' and lon_stderr == 'Fixed':
origin.epicenter_fixed = True
else:
self._store_uncertainty(origin.latitude_errors,
self._lat_err_to_deg(lat_stderr))
self._store_uncertainty(origin.longitude_errors,
self._lon_err_to_deg(lon_stderr,
origin.latitude))
if depth_stderr == 'Fixed':
origin.depth_type = 'operator assigned'
else:
origin.depth_type = 'from location'
self._store_uncertainty(origin.depth_errors,
depth_stderr, scale=1000)
quality = OriginQuality()
quality.used_station_count = \
station_number + station_number2
quality.used_phase_count = \
component_number + component_number2
origin.quality = quality
origin.origin_type = 'centroid'
event.origins.append(origin)
focal_mechanism = FocalMechanism()
res_id = '/'.join((res_id_prefix, 'focalmechanism',
evid, source_contributor.lower(),
'mw' + computation_type.lower()))
focal_mechanism.resource_id = ResourceIdentifier(id=res_id)
focal_mechanism.creation_info = \
CreationInfo(agency_id=source_contributor)
moment_tensor = MomentTensor()
if origin is not None:
moment_tensor.derived_origin_id = origin.resource_id
else:
# this is required for QuakeML validation:
res_id = '/'.join((res_id_prefix, 'no-origin'))
moment_tensor.derived_origin_id = \
ResourceIdentifier(id=res_id)
for mag in event.magnitudes:
if mag.creation_info.agency_id == source_contributor:
moment_tensor.moment_magnitude_id = mag.resource_id
res_id = '/'.join((res_id_prefix, 'momenttensor',
evid, source_contributor.lower(),
'mw' + computation_type.lower()))
moment_tensor.resource_id = ResourceIdentifier(id=res_id)
moment_tensor.scalar_moment = moment
self._store_uncertainty(moment_tensor.scalar_moment_errors,
moment_stderr)
data_used = DataUsed()
data_used.station_count = station_number + station_number2
data_used.component_count = component_number + component_number2
if computation_type == 'C':
res_id = '/'.join((res_id_prefix, 'methodID=CMT'))
focal_mechanism.method_id = ResourceIdentifier(id=res_id)
# CMT algorithm uses long-period body waves,
# very-long-period surface waves and
# intermediate period surface waves (since 2004
# for shallow and intermediate-depth earthquakes
# --Ekstrom et al., 2012)
data_used.wave_type = 'combined'
if computation_type == 'M':
res_id = '/'.join((res_id_prefix, 'methodID=moment_tensor'))
focal_mechanism.method_id = ResourceIdentifier(id=res_id)
# FIXME: not sure which kind of data is used by
# "moment tensor" algorithm.
data_used.wave_type = 'unknown'
elif computation_type == 'B':
res_id = '/'.join((res_id_prefix, 'methodID=broadband_data'))
focal_mechanism.method_id = ResourceIdentifier(id=res_id)
# FIXME: is 'combined' correct here?
data_used.wave_type = 'combined'
elif computation_type == 'F':
res_id = '/'.join((res_id_prefix, 'methodID=P-wave_first_motion'))
focal_mechanism.method_id = ResourceIdentifier(id=res_id)
data_used.wave_type = 'P waves'
elif computation_type == 'S':
res_id = '/'.join((res_id_prefix, 'methodID=scalar_moment'))
focal_mechanism.method_id = ResourceIdentifier(id=res_id)
# FIXME: not sure which kind of data is used
# for scalar moment determination.
data_used.wave_type = 'unknown'
moment_tensor.data_used = [data_used]
focal_mechanism.moment_tensor = moment_tensor
event.focal_mechanisms.append(focal_mechanism)
return focal_mechanism
示例3: _parseRecordDp
# 需要导入模块: from obspy.core.event import MomentTensor [as 别名]
# 或者: from obspy.core.event.MomentTensor import data_used [as 别名]
#.........这里部分代码省略.........
moment_exponent = self._int(line[58:60])
if (moment is not None) and (moment_exponent is not None):
moment *= math.pow(10, moment_exponent)
if (moment_stderr is not None) and (moment_exponent is not None):
moment_stderr *= math.pow(10, moment_exponent)
evid = event.resource_id.id.split("/")[-1]
# Create a new origin only if centroid time is defined:
origin = None
if centroid_origin_time.strip() != ".":
origin = Origin()
res_id = "/".join(
(res_id_prefix, "origin", evid, source_contributor.lower(), "mw" + computation_type.lower())
)
origin.resource_id = ResourceIdentifier(id=res_id)
origin.creation_info = CreationInfo(agency_id=source_contributor)
date = event.origins[0].time.strftime("%Y%m%d")
origin.time = UTCDateTime(date + centroid_origin_time)
# Check if centroid time is on the next day:
if origin.time < event.origins[0].time:
origin.time += timedelta(days=1)
self._storeUncertainty(origin.time_errors, orig_time_stderr)
origin.latitude = centroid_latitude
origin.longitude = centroid_longitude
origin.depth = centroid_depth * 1000
if lat_stderr == "Fixed" and lon_stderr == "Fixed":
origin.epicenter_fixed = True
else:
self._storeUncertainty(origin.latitude_errors, self._latErrToDeg(lat_stderr))
self._storeUncertainty(origin.longitude_errors, self._lonErrToDeg(lon_stderr, origin.latitude))
if depth_stderr == "Fixed":
origin.depth_type = "operator assigned"
else:
origin.depth_type = "from location"
self._storeUncertainty(origin.depth_errors, depth_stderr, scale=1000)
quality = OriginQuality()
quality.used_station_count = station_number + station_number2
quality.used_phase_count = component_number + component_number2
origin.quality = quality
origin.type = "centroid"
event.origins.append(origin)
focal_mechanism = FocalMechanism()
res_id = "/".join(
(res_id_prefix, "focalmechanism", evid, source_contributor.lower(), "mw" + computation_type.lower())
)
focal_mechanism.resource_id = ResourceIdentifier(id=res_id)
focal_mechanism.creation_info = CreationInfo(agency_id=source_contributor)
moment_tensor = MomentTensor()
if origin is not None:
moment_tensor.derived_origin_id = origin.resource_id
else:
# this is required for QuakeML validation:
res_id = "/".join((res_id_prefix, "no-origin"))
moment_tensor.derived_origin_id = ResourceIdentifier(id=res_id)
for mag in event.magnitudes:
if mag.creation_info.agency_id == source_contributor:
moment_tensor.moment_magnitude_id = mag.resource_id
res_id = "/".join(
(res_id_prefix, "momenttensor", evid, source_contributor.lower(), "mw" + computation_type.lower())
)
moment_tensor.resource_id = ResourceIdentifier(id=res_id)
moment_tensor.scalar_moment = moment
self._storeUncertainty(moment_tensor.scalar_moment_errors, moment_stderr)
data_used = DataUsed()
data_used.station_count = station_number + station_number2
data_used.component_count = component_number + component_number2
if computation_type == "C":
res_id = "/".join((res_id_prefix, "methodID=CMT"))
focal_mechanism.method_id = ResourceIdentifier(id=res_id)
# CMT algorithm uses long-period body waves,
# very-long-period surface waves and
# intermediate period surface waves (since 2004
# for shallow and intermediate-depth earthquakes
# --Ekstrom et al., 2012)
data_used.wave_type = "combined"
if computation_type == "M":
res_id = "/".join((res_id_prefix, "methodID=moment_tensor"))
focal_mechanism.method_id = ResourceIdentifier(id=res_id)
# FIXME: not sure which kind of data is used by
# "moment tensor" algorithm.
data_used.wave_type = "unknown"
elif computation_type == "B":
res_id = "/".join((res_id_prefix, "methodID=broadband_data"))
focal_mechanism.method_id = ResourceIdentifier(id=res_id)
# FIXME: is 'combined' correct here?
data_used.wave_type = "combined"
elif computation_type == "F":
res_id = "/".join((res_id_prefix, "methodID=P-wave_first_motion"))
focal_mechanism.method_id = ResourceIdentifier(id=res_id)
data_used.wave_type = "P waves"
elif computation_type == "S":
res_id = "/".join((res_id_prefix, "methodID=scalar_moment"))
focal_mechanism.method_id = ResourceIdentifier(id=res_id)
# FIXME: not sure which kind of data is used
# for scalar moment determination.
data_used.wave_type = "unknown"
moment_tensor.data_used = data_used
focal_mechanism.moment_tensor = moment_tensor
event.focal_mechanisms.append(focal_mechanism)
return focal_mechanism