本文整理汇总了Python中obspy.core.event.Origin.creation_info方法的典型用法代码示例。如果您正苦于以下问题:Python Origin.creation_info方法的具体用法?Python Origin.creation_info怎么用?Python Origin.creation_info使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类obspy.core.event.Origin
的用法示例。
在下文中一共展示了Origin.creation_info方法的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: _parse_record_ah
# 需要导入模块: from obspy.core.event import Origin [as 别名]
# 或者: from obspy.core.event.Origin import creation_info [as 别名]
def _parse_record_ah(self, line, event):
"""
Parses the 'additional hypocenter' record AH
"""
date = line[2:10]
time = line[11:20]
# unused: hypocenter_quality = line[20]
latitude = self._float(line[21:27])
lat_type = line[27]
longitude = self._float(line[29:36])
lon_type = line[36]
# unused: preliminary_flag = line[37]
depth = self._float(line[38:43])
# unused: depth_quality = line[43]
standard_dev = self._float_unused(line[44:48])
station_number = self._int_unused(line[48:51])
phase_number = self._int_unused(line[51:55])
source_code = line[56:60].strip()
evid = event.resource_id.id.split('/')[-1]
origin = Origin()
res_id = '/'.join((res_id_prefix, 'origin', evid, source_code.lower()))
origin.resource_id = ResourceIdentifier(id=res_id)
origin.creation_info = CreationInfo(agency_id=source_code)
origin.time = UTCDateTime(date + time)
origin.latitude = latitude * self._coordinate_sign(lat_type)
origin.longitude = longitude * self._coordinate_sign(lon_type)
origin.depth = depth * 1000
origin.depth_type = 'from location'
origin.quality = OriginQuality()
origin.quality.standard_error = standard_dev
origin.quality.used_station_count = station_number
origin.quality.used_phase_count = phase_number
origin.origin_type = 'hypocenter'
event.origins.append(origin)
示例2: _parse_record_hy
# 需要导入模块: from obspy.core.event import Origin [as 别名]
# 或者: from obspy.core.event.Origin import creation_info [as 别名]
def _parse_record_hy(self, line):
"""
Parses the 'hypocenter' record HY
"""
date = line[2:10]
time = line[11:20]
# unused: location_quality = line[20]
latitude = self._float(line[21:27])
lat_type = line[27]
longitude = self._float(line[29:36])
lon_type = line[36]
depth = self._float(line[38:43])
# unused: depth_quality = line[43]
standard_dev = self._float(line[44:48])
station_number = self._int(line[48:51])
# unused: version_flag = line[51]
fe_region_number = line[52:55]
fe_region_name = self._decode_fe_region_number(fe_region_number)
source_code = line[55:60].strip()
event = Event()
# FIXME: a smarter way to define evid?
evid = date + time
res_id = '/'.join((res_id_prefix, 'event', evid))
event.resource_id = ResourceIdentifier(id=res_id)
description = EventDescription(
type='region name',
text=fe_region_name)
event.event_descriptions.append(description)
description = EventDescription(
type='Flinn-Engdahl region',
text=fe_region_number)
event.event_descriptions.append(description)
origin = Origin()
res_id = '/'.join((res_id_prefix, 'origin', evid))
origin.resource_id = ResourceIdentifier(id=res_id)
origin.creation_info = CreationInfo()
if source_code:
origin.creation_info.agency_id = source_code
else:
origin.creation_info.agency_id = 'USGS-NEIC'
res_id = '/'.join((res_id_prefix, 'earthmodel/ak135'))
origin.earth_model_id = ResourceIdentifier(id=res_id)
origin.time = UTCDateTime(date + time)
origin.latitude = latitude * self._coordinate_sign(lat_type)
origin.longitude = longitude * self._coordinate_sign(lon_type)
origin.depth = depth * 1000
origin.depth_type = 'from location'
origin.quality = OriginQuality()
origin.quality.associated_station_count = station_number
origin.quality.standard_error = standard_dev
# associated_phase_count can be incremented in records 'P ' and 'S '
origin.quality.associated_phase_count = 0
# depth_phase_count can be incremented in record 'S '
origin.quality.depth_phase_count = 0
origin.origin_type = 'hypocenter'
origin.region = fe_region_name
event.origins.append(origin)
return event
示例3: _read_single_hypocenter
# 需要导入模块: from obspy.core.event import Origin [as 别名]
# 或者: from obspy.core.event.Origin import creation_info [as 别名]
#.........这里部分代码省略.........
used_station_count, depth_phase_count) = map(int, line[1:11:2])
stderr, az_gap, sec_az_gap = map(float, line[11:17:2])
gt_level = line[17]
min_dist, max_dist, med_dist = map(float, line[19:25:2])
# goto location quality info line
line = lines["QML_OriginUncertainty"]
if "COMMENT" in lines:
comment = lines["COMMENT"].strip()
comment = comment.strip('\'"')
comment = comment.strip()
hor_unc, min_hor_unc, max_hor_unc, hor_unc_azim = \
map(float, line.split()[1:9:2])
# assign origin info
event = Event()
o = Origin()
event.origins = [o]
event.preferred_origin_id = o.resource_id
o.origin_uncertainty = OriginUncertainty()
o.quality = OriginQuality()
ou = o.origin_uncertainty
oq = o.quality
o.comments.append(Comment(text=stats_info_string, force_resource_id=False))
event.comments.append(Comment(text=comment, force_resource_id=False))
# SIGNATURE field's first item is LOCSIG, which is supposed to be
# 'Identification of an individual, institiution or other entity'
# according to
# http://alomax.free.fr/nlloc/soft6.00/control.html#_NLLoc_locsig_
# so use it as author in creation info
event.creation_info = CreationInfo(creation_time=creation_time,
version=version,
author=signature)
o.creation_info = CreationInfo(creation_time=creation_time,
version=version,
author=signature)
# negative values can appear on diagonal of covariance matrix due to a
# precision problem in NLLoc implementation when location coordinates are
# large compared to the covariances.
o.longitude = x
try:
o.longitude_errors.uncertainty = kilometer2degrees(sqrt(covariance_xx))
except ValueError:
if covariance_xx < 0:
msg = ("Negative value in XX value of covariance matrix, not "
"setting longitude error (epicentral uncertainties will "
"still be set in origin uncertainty).")
warnings.warn(msg)
else:
raise
o.latitude = y
try:
o.latitude_errors.uncertainty = kilometer2degrees(sqrt(covariance_yy))
except ValueError:
if covariance_yy < 0:
msg = ("Negative value in YY value of covariance matrix, not "
"setting longitude error (epicentral uncertainties will "
"still be set in origin uncertainty).")
warnings.warn(msg)
else:
raise
o.depth = z * 1e3 # meters!
示例4: read_nlloc_hyp
# 需要导入模块: from obspy.core.event import Origin [as 别名]
# 或者: from obspy.core.event.Origin import creation_info [as 别名]
#.........这里部分代码省略.........
"matrix as 1D marginal (Lon/Lat errors as great circle degrees) "
"while OriginUncertainty min/max horizontal errors are calculated "
"from 2D error ellipsoid and are therefore seemingly higher compared "
"to 1D errors. Error estimates can be reconstructed from the "
"following original NonLinLoc error statistics line:\nSTATISTICS " +
lines["STATISTICS"])
# goto location quality info line
line = lines["QML_OriginQuality"].split()
(assoc_phase_count, used_phase_count, assoc_station_count,
used_station_count, depth_phase_count) = map(int, line[1:11:2])
stderr, az_gap, sec_az_gap = map(float, line[11:17:2])
gt_level = line[17]
min_dist, max_dist, med_dist = map(float, line[19:25:2])
# goto location quality info line
line = lines["QML_OriginUncertainty"]
hor_unc, min_hor_unc, max_hor_unc, hor_unc_azim = \
map(float, line.split()[1:9:2])
# assign origin info
event = Event()
cat = Catalog(events=[event])
o = Origin()
event.origins = [o]
o.origin_uncertainty = OriginUncertainty()
o.quality = OriginQuality()
ou = o.origin_uncertainty
oq = o.quality
o.comments.append(Comment(text=stats_info_string))
cat.creation_info.creation_time = UTCDateTime()
cat.creation_info.version = "ObsPy %s" % __version__
event.creation_info = CreationInfo(creation_time=creation_time,
version=version)
event.creation_info.version = version
o.creation_info = CreationInfo(creation_time=creation_time,
version=version)
# negative values can appear on diagonal of covariance matrix due to a
# precision problem in NLLoc implementation when location coordinates are
# large compared to the covariances.
o.longitude = x
try:
o.longitude_errors.uncertainty = kilometer2degrees(sqrt(covariance_XX))
except ValueError:
if covariance_XX < 0:
msg = ("Negative value in XX value of covariance matrix, not "
"setting longitude error (epicentral uncertainties will "
"still be set in origin uncertainty).")
warnings.warn(msg)
else:
raise
o.latitude = y
try:
o.latitude_errors.uncertainty = kilometer2degrees(sqrt(covariance_YY))
except ValueError:
if covariance_YY < 0:
msg = ("Negative value in YY value of covariance matrix, not "
"setting longitude error (epicentral uncertainties will "
"still be set in origin uncertainty).")
warnings.warn(msg)
else:
raise
示例5: _parse_first_line_origin
# 需要导入模块: from obspy.core.event import Origin [as 别名]
# 或者: from obspy.core.event.Origin import creation_info [as 别名]
def _parse_first_line_origin(self, line, event, magnitudes):
"""
Parse the first line of origin data.
:type line: str
:param line: Line to parse.
:type event: :class:`~obspy.core.event.event.Event`
:param event: Event of the origin.
:type magnitudes: list of
:class:`~obspy.core.event.magnitude.Magnitude`
:param magnitudes: Store magnitudes in a list to keep
their positions.
:rtype: :class:`~obspy.core.event.origin.Origin`,
:class:`~obspy.core.event.resourceid.ResourceIdentifier`
:returns: Parsed origin or None, resource identifier of the
origin.
"""
magnitude_types = []
magnitude_values = []
magnitude_station_counts = []
fields = self.fields['line_1']
time_origin = line[fields['time']].strip()
time_fixed_flag = line[fields['time_fixf']].strip()
latitude = line[fields['lat']].strip()
longitude = line[fields['lon']].strip()
epicenter_fixed_flag = line[fields['epicenter_fixf']].strip()
depth = line[fields['depth']].strip()
depth_fixed_flag = line[fields['depth_fixf']].strip()
phase_count = line[fields['n_def']].strip()
station_count = line[fields['n_sta']].strip()
azimuthal_gap = line[fields['gap']].strip()
magnitude_types.append(line[fields['mag_type_1']].strip())
magnitude_values.append(line[fields['mag_1']].strip())
magnitude_station_counts.append(line[fields['mag_n_sta_1']].strip())
magnitude_types.append(line[fields['mag_type_2']].strip())
magnitude_values.append(line[fields['mag_2']].strip())
magnitude_station_counts.append(line[fields['mag_n_sta_2']].strip())
magnitude_types.append(line[fields['mag_type_3']].strip())
magnitude_values.append(line[fields['mag_3']].strip())
magnitude_station_counts.append(line[fields['mag_n_sta_3']].strip())
author = line[fields['author']].strip()
origin_id = line[fields['id']].strip()
origin = Origin()
origin.quality = OriginQuality()
try:
origin.time = UTCDateTime(time_origin.replace('/', '-'))
origin.latitude = float(latitude)
origin.longitude = float(longitude)
except (TypeError, ValueError):
self._warn('Missing origin data, skipping event')
return None, None
origin.time_fixed = time_fixed_flag.lower() == 'f'
origin.epicenter_fixed = epicenter_fixed_flag.lower() == 'f'
try:
# Convert value from km to m
origin.depth = float(depth) * 1000
except ValueError:
pass
try:
origin.depth_type = DEPTH_TYPES[depth_fixed_flag]
except KeyError:
origin.depth_type = OriginDepthType('from location')
try:
origin.quality.used_phase_count = int(phase_count)
origin.quality.associated_phase_count = int(phase_count)
except ValueError:
pass
try:
origin.quality.used_station_count = int(station_count)
origin.quality.associated_station_count = int(station_count)
except ValueError:
pass
try:
origin.quality.azimuthal_gap = float(azimuthal_gap)
except ValueError:
pass
self.author = author
origin.creation_info = self._get_creation_info()
public_id = "origin/%s" % origin_id
origin_res_id = self._get_res_id(public_id)
for i in range(3):
try:
magnitude = Magnitude()
magnitude.creation_info = self._get_creation_info()
magnitude.magnitude_type = magnitude_types[i]
magnitude.mag = float(magnitude_values[i])
magnitude.station_count = int(magnitude_station_counts[i])
magnitude.origin_id = origin_res_id
magnitudes.append(magnitude)
event.magnitudes.append(magnitude)
except ValueError:
#.........这里部分代码省略.........
示例6: _parse_record_dp
# 需要导入模块: from obspy.core.event import Origin [as 别名]
# 或者: from obspy.core.event.Origin import creation_info [as 别名]
def _parse_record_dp(self, line, event):
"""
Parses the 'source parameter data - primary' record Dp
"""
source_contributor = line[2:6].strip()
computation_type = line[6]
exponent = self._int_zero(line[7])
scale = math.pow(10, exponent)
centroid_origin_time = line[8:14] + '.' + line[14]
orig_time_stderr = line[15:17]
if orig_time_stderr == 'FX':
orig_time_stderr = 'Fixed'
else:
orig_time_stderr = \
self._float_with_format(orig_time_stderr, '2.1', scale)
centroid_latitude = self._float_with_format(line[17:21], '4.2')
lat_type = line[21]
if centroid_latitude is not None:
centroid_latitude *= self._coordinate_sign(lat_type)
lat_stderr = line[22:25]
if lat_stderr == 'FX':
lat_stderr = 'Fixed'
else:
lat_stderr = self._float_with_format(lat_stderr, '3.2', scale)
centroid_longitude = self._float_with_format(line[25:30], '5.2')
lon_type = line[30]
if centroid_longitude is not None:
centroid_longitude *= self._coordinate_sign(lon_type)
lon_stderr = line[31:34]
if lon_stderr == 'FX':
lon_stderr = 'Fixed'
else:
lon_stderr = self._float_with_format(lon_stderr, '3.2', scale)
centroid_depth = self._float_with_format(line[34:38], '4.1')
depth_stderr = line[38:40]
if depth_stderr == 'FX' or depth_stderr == 'BD':
depth_stderr = 'Fixed'
else:
depth_stderr = self._float_with_format(depth_stderr, '2.1', scale)
station_number = self._int_zero(line[40:43])
component_number = self._int_zero(line[43:46])
station_number2 = self._int_zero(line[46:48])
component_number2 = self._int_zero(line[48:51])
# unused: half_duration = self._float_with_format(line[51:54], '3.1')
moment = self._float_with_format(line[54:56], '2.1')
moment_stderr = self._float_with_format(line[56:58], '2.1')
moment_exponent = self._int(line[58:60])
if (moment is not None) and (moment_exponent is not None):
moment *= math.pow(10, moment_exponent)
if (moment_stderr is not None) and (moment_exponent is not None):
moment_stderr *= math.pow(10, moment_exponent)
evid = event.resource_id.id.split('/')[-1]
# Create a new origin only if centroid time is defined:
origin = None
if centroid_origin_time.strip() != '.':
origin = Origin()
res_id = '/'.join((res_id_prefix, 'origin',
evid, source_contributor.lower(),
'mw' + computation_type.lower()))
origin.resource_id = ResourceIdentifier(id=res_id)
origin.creation_info = \
CreationInfo(agency_id=source_contributor)
date = event.origins[0].time.strftime('%Y%m%d')
origin.time = UTCDateTime(date + centroid_origin_time)
# Check if centroid time is on the next day:
if origin.time < event.origins[0].time:
origin.time += timedelta(days=1)
self._store_uncertainty(origin.time_errors, orig_time_stderr)
origin.latitude = centroid_latitude
origin.longitude = centroid_longitude
origin.depth = centroid_depth * 1000
if lat_stderr == 'Fixed' and lon_stderr == 'Fixed':
origin.epicenter_fixed = True
else:
self._store_uncertainty(origin.latitude_errors,
self._lat_err_to_deg(lat_stderr))
self._store_uncertainty(origin.longitude_errors,
self._lon_err_to_deg(lon_stderr,
origin.latitude))
if depth_stderr == 'Fixed':
origin.depth_type = 'operator assigned'
else:
origin.depth_type = 'from location'
self._store_uncertainty(origin.depth_errors,
depth_stderr, scale=1000)
quality = OriginQuality()
quality.used_station_count = \
station_number + station_number2
quality.used_phase_count = \
component_number + component_number2
origin.quality = quality
origin.origin_type = 'centroid'
event.origins.append(origin)
focal_mechanism = FocalMechanism()
res_id = '/'.join((res_id_prefix, 'focalmechanism',
evid, source_contributor.lower(),
'mw' + computation_type.lower()))
focal_mechanism.resource_id = ResourceIdentifier(id=res_id)
focal_mechanism.creation_info = \
#.........这里部分代码省略.........
示例7: outputOBSPY
# 需要导入模块: from obspy.core.event import Origin [as 别名]
# 或者: from obspy.core.event.Origin import creation_info [as 别名]
def outputOBSPY(hp, event=None, only_fm_picks=False):
"""
Make an Event which includes the current focal mechanism information from HASH
Use the 'only_fm_picks' flag to only include the picks HASH used for the FocalMechanism.
This flag will replace the 'picks' and 'arrivals' lists of existing events with new ones.
Inputs
-------
hp : hashpy.HashPype instance
event : obspy.core.event.Event
only_fm_picks : bool of whether to overwrite the picks/arrivals lists
Returns
-------
obspy.core.event.Event
Event will be new if no event was input, FocalMech added to existing event
"""
# Returns new (or updates existing) Event with HASH solution
n = hp.npol
if event is None:
event = Event(focal_mechanisms=[], picks=[], origins=[])
origin = Origin(arrivals=[])
origin.time = UTCDateTime(hp.tstamp)
origin.latitude = hp.qlat
origin.longitude = hp.qlon
origin.depth = hp.qdep
origin.creation_info = CreationInfo(version=hp.icusp)
origin.resource_id = ResourceIdentifier('smi:hash/Origin/{0}'.format(hp.icusp))
for _i in range(n):
p = Pick()
p.creation_info = CreationInfo(version=hp.arid[_i])
p.resource_id = ResourceIdentifier('smi:hash/Pick/{0}'.format(p.creation_info.version))
p.waveform_id = WaveformStreamID(network_code=hp.snet[_i], station_code=hp.sname[_i], channel_code=hp.scomp[_i])
if hp.p_pol[_i] > 0:
p.polarity = 'positive'
else:
p.polarity = 'negative'
a = Arrival()
a.creation_info = CreationInfo(version=hp.arid[_i])
a.resource_id = ResourceIdentifier('smi:hash/Arrival/{0}'.format(p.creation_info.version))
a.azimuth = hp.p_azi_mc[_i,0]
a.takeoff_angle = 180. - hp.p_the_mc[_i,0]
a.pick_id = p.resource_id
origin.arrivals.append(a)
event.picks.append(p)
event.origins.append(origin)
event.preferred_origin_id = str(origin.resource_id)
else: # just update the changes
origin = event.preferred_origin()
picks = []
arrivals = []
for _i in range(n):
ind = hp.p_index[_i]
a = origin.arrivals[ind]
p = a.pick_id.getReferredObject()
a.takeoff_angle = hp.p_the_mc[_i,0]
picks.append(p)
arrivals.append(a)
if only_fm_picks:
origin.arrivals = arrivals
event.picks = picks
# Use me double couple calculator and populate planes/axes etc
x = hp._best_quality_index
# Put all the mechanisms into the 'focal_mechanisms' list, mark "best" as preferred
for s in range(hp.nmult):
dc = DoubleCouple([hp.str_avg[s], hp.dip_avg[s], hp.rak_avg[s]])
ax = dc.axis
focal_mech = FocalMechanism()
focal_mech.creation_info = CreationInfo(creation_time=UTCDateTime(), author=hp.author)
focal_mech.triggering_origin_id = origin.resource_id
focal_mech.resource_id = ResourceIdentifier('smi:hash/FocalMechanism/{0}/{1}'.format(hp.icusp, s+1))
focal_mech.method_id = ResourceIdentifier('HASH')
focal_mech.nodal_planes = NodalPlanes()
focal_mech.nodal_planes.nodal_plane_1 = NodalPlane(*dc.plane1)
focal_mech.nodal_planes.nodal_plane_2 = NodalPlane(*dc.plane2)
focal_mech.principal_axes = PrincipalAxes()
focal_mech.principal_axes.t_axis = Axis(azimuth=ax['T']['azimuth'], plunge=ax['T']['dip'])
focal_mech.principal_axes.p_axis = Axis(azimuth=ax['P']['azimuth'], plunge=ax['P']['dip'])
focal_mech.station_polarity_count = n
focal_mech.azimuthal_gap = hp.magap
focal_mech.misfit = hp.mfrac[s]
focal_mech.station_distribution_ratio = hp.stdr[s]
focal_mech.comments.append(
Comment(hp.qual[s], resource_id=ResourceIdentifier(str(focal_mech.resource_id) + '/comment/quality'))
)
#----------------------------------------
event.focal_mechanisms.append(focal_mech)
if s == x:
event.preferred_focal_mechanism_id = str(focal_mech.resource_id)
return event
示例8: build
# 需要导入模块: from obspy.core.event import Origin [as 别名]
# 或者: from obspy.core.event.Origin import creation_info [as 别名]
def build(self):
"""
Build an obspy moment tensor focal mech event
This makes the tensor output into an Event containing:
1) a FocalMechanism with a MomentTensor, NodalPlanes, and PrincipalAxes
2) a Magnitude of the Mw from the Tensor
Which is what we want for outputting QuakeML using
the (slightly modified) obspy code.
Input
-----
filehandle => open file OR str from filehandle.read()
Output
------
event => instance of Event() class as described above
"""
p = self.parser
event = Event(event_type='earthquake')
origin = Origin()
focal_mech = FocalMechanism()
nodal_planes = NodalPlanes()
moment_tensor = MomentTensor()
principal_ax = PrincipalAxes()
magnitude = Magnitude()
data_used = DataUsed()
creation_info = CreationInfo(agency_id='NN')
ev_mode = 'automatic'
ev_stat = 'preliminary'
evid = None
orid = None
# Parse the entire file line by line.
for n,l in enumerate(p.line):
if 'REVIEWED BY NSL STAFF' in l:
ev_mode = 'manual'
ev_stat = 'reviewed'
if 'Event ID' in l:
evid = p._id(n)
if 'Origin ID' in l:
orid = p._id(n)
if 'Ichinose' in l:
moment_tensor.category = 'regional'
if re.match(r'^\d{4}\/\d{2}\/\d{2}', l):
ev = p._event_info(n)
if 'Depth' in l:
derived_depth = p._depth(n)
if 'Mw' in l:
magnitude.mag = p._mw(n)
magnitude.magnitude_type = 'Mw'
if 'Mo' in l and 'dyne' in l:
moment_tensor.scalar_moment = p._mo(n)
if 'Percent Double Couple' in l:
moment_tensor.double_couple = p._percent(n)
if 'Percent CLVD' in l:
moment_tensor.clvd = p._percent(n)
if 'Epsilon' in l:
moment_tensor.variance = p._epsilon(n)
if 'Percent Variance Reduction' in l:
moment_tensor.variance_reduction = p._percent(n)
if 'Major Double Couple' in l and 'strike' in p.line[n+1]:
np = p._double_couple(n)
nodal_planes.nodal_plane_1 = NodalPlane(*np[0])
nodal_planes.nodal_plane_2 = NodalPlane(*np[1])
nodal_planes.preferred_plane = 1
if 'Spherical Coordinates' in l:
mt = p._mt_sphere(n)
moment_tensor.tensor = Tensor(
m_rr = mt['Mrr'],
m_tt = mt['Mtt'],
m_pp = mt['Mff'],
m_rt = mt['Mrt'],
m_rp = mt['Mrf'],
m_tp = mt['Mtf'],
)
if 'Eigenvalues and eigenvectors of the Major Double Couple' in l:
ax = p._vectors(n)
principal_ax.t_axis = Axis(ax['T']['trend'], ax['T']['plunge'], ax['T']['ev'])
principal_ax.p_axis = Axis(ax['P']['trend'], ax['P']['plunge'], ax['P']['ev'])
principal_ax.n_axis = Axis(ax['N']['trend'], ax['N']['plunge'], ax['N']['ev'])
if 'Number of Stations' in l:
data_used.station_count = p._number_of_stations(n)
if 'Maximum' in l and 'Gap' in l:
focal_mech.azimuthal_gap = p._gap(n)
if re.match(r'^Date', l):
creation_info.creation_time = p._creation_time(n)
# Creation Time
creation_info.version = orid
# Fill in magnitude values
magnitude.evaluation_mode = ev_mode
magnitude.evaluation_status = ev_stat
magnitude.creation_info = creation_info.copy()
magnitude.resource_id = self._rid(magnitude)
# Stub origin
origin.time = ev.get('time')
origin.latitude = ev.get('lat')
origin.longitude = ev.get('lon')
origin.depth = derived_depth * 1000.
origin.depth_type = "from moment tensor inversion"
#.........这里部分代码省略.........
示例9: _map_join2origin
# 需要导入模块: from obspy.core.event import Origin [as 别名]
# 或者: from obspy.core.event.Origin import creation_info [as 别名]
def _map_join2origin(self, db):
"""
Return an Origin instance from an dict of CSS key/values
Inputs
======
db : dict of key/values of CSS fields related to the origin (see Join)
Returns
=======
obspy.core.event.Origin
Notes
=====
Any object that supports the dict 'get' method can be passed as
input, e.g. OrderedDict, custom classes, etc.
Join
----
origin <- origerr (outer)
"""
#-- Basic location ------------------------------------------
origin = Origin()
origin.latitude = db.get('lat')
origin.longitude = db.get('lon')
origin.depth = _km2m(db.get('depth'))
origin.time = _utc(db.get('time'))
origin.extra = {}
#-- Quality -------------------------------------------------
quality = OriginQuality(
associated_phase_count = db.get('nass'),
used_phase_count = db.get('ndef'),
standard_error = db.get('sdobs'),
)
origin.quality = quality
#-- Solution Uncertainties ----------------------------------
# in CSS the ellipse is projected onto the horizontal plane
# using the covariance matrix
uncertainty = OriginUncertainty()
a = _km2m(db.get('smajax'))
b = _km2m(db.get('sminax'))
s = db.get('strike')
dep_u = _km2m(db.get('sdepth'))
time_u = db.get('stime')
uncertainty.max_horizontal_uncertainty = a
uncertainty.min_horizontal_uncertainty = b
uncertainty.azimuth_max_horizontal_uncertainty = s
uncertainty.horizontal_uncertainty = a
uncertainty.preferred_description = "horizontal uncertainty"
if db.get('conf') is not None:
uncertainty.confidence_level = db.get('conf') * 100.
if uncertainty.horizontal_uncertainty is not None:
origin.origin_uncertainty = uncertainty
#-- Parameter Uncertainties ---------------------------------
if all([a, b, s]):
n, e = _get_NE_on_ellipse(a, b, s)
lat_u = _m2deg_lat(n)
lon_u = _m2deg_lon(e, lat=origin.latitude)
origin.latitude_errors = {'uncertainty': lat_u}
origin.longitude_errors = {'uncertainty': lon_u}
if dep_u:
origin.depth_errors = {'uncertainty': dep_u}
if time_u:
origin.time_errors = {'uncertainty': time_u}
#-- Analyst-determined Status -------------------------------
posted_author = _str(db.get('auth'))
mode, status = self.get_event_status(posted_author)
origin.evaluation_mode = mode
origin.evaluation_status = status
# Save etype per origin due to schema differences...
css_etype = _str(db.get('etype'))
# Compatible with future patch rename "_namespace" -> "namespace"
origin.extra['etype'] = {
'value': css_etype,
'namespace': CSS_NAMESPACE
}
origin.creation_info = CreationInfo(
creation_time = _utc(db.get('lddate')),
agency_id = self.agency,
version = db.get('orid'),
author = posted_author,
)
origin.resource_id = self._rid(origin)
return origin
示例10: _parseRecordDp
# 需要导入模块: from obspy.core.event import Origin [as 别名]
# 或者: from obspy.core.event.Origin import creation_info [as 别名]
def _parseRecordDp(self, line, event):
"""
Parses the 'source parameter data - primary' record Dp
"""
source_contributor = line[2:6].strip()
computation_type = line[6]
exponent = self._intZero(line[7])
scale = math.pow(10, exponent)
centroid_origin_time = line[8:14] + "." + line[14]
orig_time_stderr = line[15:17]
if orig_time_stderr == "FX":
orig_time_stderr = "Fixed"
else:
orig_time_stderr = self._floatWithFormat(orig_time_stderr, "2.1", scale)
centroid_latitude = self._floatWithFormat(line[17:21], "4.2")
lat_type = line[21]
if centroid_latitude is not None:
centroid_latitude *= self._coordinateSign(lat_type)
lat_stderr = line[22:25]
if lat_stderr == "FX":
lat_stderr = "Fixed"
else:
lat_stderr = self._floatWithFormat(lat_stderr, "3.2", scale)
centroid_longitude = self._floatWithFormat(line[25:30], "5.2")
lon_type = line[30]
if centroid_longitude is not None:
centroid_longitude *= self._coordinateSign(lon_type)
lon_stderr = line[31:34]
if lon_stderr == "FX":
lon_stderr = "Fixed"
else:
lon_stderr = self._floatWithFormat(lon_stderr, "3.2", scale)
centroid_depth = self._floatWithFormat(line[34:38], "4.1")
depth_stderr = line[38:40]
if depth_stderr == "FX" or depth_stderr == "BD":
depth_stderr = "Fixed"
else:
depth_stderr = self._floatWithFormat(depth_stderr, "2.1", scale)
station_number = self._intZero(line[40:43])
component_number = self._intZero(line[43:46])
station_number2 = self._intZero(line[46:48])
component_number2 = self._intZero(line[48:51])
# unused: half_duration = self._floatWithFormat(line[51:54], '3.1')
moment = self._floatWithFormat(line[54:56], "2.1")
moment_stderr = self._floatWithFormat(line[56:58], "2.1")
moment_exponent = self._int(line[58:60])
if (moment is not None) and (moment_exponent is not None):
moment *= math.pow(10, moment_exponent)
if (moment_stderr is not None) and (moment_exponent is not None):
moment_stderr *= math.pow(10, moment_exponent)
evid = event.resource_id.id.split("/")[-1]
# Create a new origin only if centroid time is defined:
origin = None
if centroid_origin_time.strip() != ".":
origin = Origin()
res_id = "/".join(
(res_id_prefix, "origin", evid, source_contributor.lower(), "mw" + computation_type.lower())
)
origin.resource_id = ResourceIdentifier(id=res_id)
origin.creation_info = CreationInfo(agency_id=source_contributor)
date = event.origins[0].time.strftime("%Y%m%d")
origin.time = UTCDateTime(date + centroid_origin_time)
# Check if centroid time is on the next day:
if origin.time < event.origins[0].time:
origin.time += timedelta(days=1)
self._storeUncertainty(origin.time_errors, orig_time_stderr)
origin.latitude = centroid_latitude
origin.longitude = centroid_longitude
origin.depth = centroid_depth * 1000
if lat_stderr == "Fixed" and lon_stderr == "Fixed":
origin.epicenter_fixed = True
else:
self._storeUncertainty(origin.latitude_errors, self._latErrToDeg(lat_stderr))
self._storeUncertainty(origin.longitude_errors, self._lonErrToDeg(lon_stderr, origin.latitude))
if depth_stderr == "Fixed":
origin.depth_type = "operator assigned"
else:
origin.depth_type = "from location"
self._storeUncertainty(origin.depth_errors, depth_stderr, scale=1000)
quality = OriginQuality()
quality.used_station_count = station_number + station_number2
quality.used_phase_count = component_number + component_number2
origin.quality = quality
origin.type = "centroid"
event.origins.append(origin)
focal_mechanism = FocalMechanism()
res_id = "/".join(
(res_id_prefix, "focalmechanism", evid, source_contributor.lower(), "mw" + computation_type.lower())
)
focal_mechanism.resource_id = ResourceIdentifier(id=res_id)
focal_mechanism.creation_info = CreationInfo(agency_id=source_contributor)
moment_tensor = MomentTensor()
if origin is not None:
moment_tensor.derived_origin_id = origin.resource_id
else:
# this is required for QuakeML validation:
res_id = "/".join((res_id_prefix, "no-origin"))
moment_tensor.derived_origin_id = ResourceIdentifier(id=res_id)
for mag in event.magnitudes:
#.........这里部分代码省略.........