本文整理汇总了Python中obspy.core.event.Origin类的典型用法代码示例。如果您正苦于以下问题:Python Origin类的具体用法?Python Origin怎么用?Python Origin使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了Origin类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: _parse_record_ah
def _parse_record_ah(self, line, event):
"""
Parses the 'additional hypocenter' record AH
"""
date = line[2:10]
time = line[11:20]
# unused: hypocenter_quality = line[20]
latitude = self._float(line[21:27])
lat_type = line[27]
longitude = self._float(line[29:36])
lon_type = line[36]
# unused: preliminary_flag = line[37]
depth = self._float(line[38:43])
# unused: depth_quality = line[43]
standard_dev = self._float_unused(line[44:48])
station_number = self._int_unused(line[48:51])
phase_number = self._int_unused(line[51:55])
source_code = line[56:60].strip()
evid = event.resource_id.id.split('/')[-1]
origin = Origin()
res_id = '/'.join((res_id_prefix, 'origin', evid, source_code.lower()))
origin.resource_id = ResourceIdentifier(id=res_id)
origin.creation_info = CreationInfo(agency_id=source_code)
origin.time = UTCDateTime(date + time)
origin.latitude = latitude * self._coordinate_sign(lat_type)
origin.longitude = longitude * self._coordinate_sign(lon_type)
origin.depth = depth * 1000
origin.depth_type = 'from location'
origin.quality = OriginQuality()
origin.quality.standard_error = standard_dev
origin.quality.used_station_count = station_number
origin.quality.used_phase_count = phase_number
origin.origin_type = 'hypocenter'
event.origins.append(origin)
示例2: test_setting_nans_or_inf_fails
def test_setting_nans_or_inf_fails(self):
"""
Tests that settings NaNs or infs as floating point values fails.
"""
o = Origin()
with self.assertRaises(ValueError) as e:
o.latitude = float('nan')
self.assertEqual(
e.exception.args[0],
"On Origin object: Value 'nan' for 'latitude' is not a finite "
"floating point value.")
with self.assertRaises(ValueError) as e:
o.latitude = float('inf')
self.assertEqual(
e.exception.args[0],
"On Origin object: Value 'inf' for 'latitude' is not a finite "
"floating point value.")
with self.assertRaises(ValueError) as e:
o.latitude = float('-inf')
self.assertEqual(
e.exception.args[0],
"On Origin object: Value '-inf' for 'latitude' is "
"not a finite floating point value.")
示例3: test_multipleOrigins
def test_multipleOrigins(self):
"""
Parameters of multiple origins should not interfere with each other.
"""
origin = Origin()
origin.resource_id = 'smi:ch.ethz.sed/origin/37465'
origin.time = UTCDateTime(0)
origin.latitude = 12
origin.latitude_errors.confidence_level = 95
origin.longitude = 42
origin.depth_type = 'from location'
self.assertEqual(
origin.resource_id,
ResourceIdentifier(id='smi:ch.ethz.sed/origin/37465'))
self.assertEqual(origin.latitude, 12)
self.assertEqual(origin.latitude_errors.confidence_level, 95)
self.assertEqual(origin.latitude_errors.uncertainty, None)
self.assertEqual(origin.longitude, 42)
origin2 = Origin(force_resource_id=False)
origin2.latitude = 13.4
self.assertEqual(origin2.depth_type, None)
self.assertEqual(origin2.resource_id, None)
self.assertEqual(origin2.latitude, 13.4)
self.assertEqual(origin2.latitude_errors.confidence_level, None)
self.assertEqual(origin2.longitude, None)
示例4: _deserialize
def _deserialize(self, zmap_str):
catalog = Catalog()
for row in zmap_str.split('\n'):
if len(row) == 0:
continue
origin = Origin()
event = Event(origins=[origin])
event.preferred_origin_id = origin.resource_id.id
# Begin value extraction
columns = row.split('\t', 13)[:13] # ignore extra columns
values = dict(zip(_STD_ZMAP_COLUMNS + _EXT_ZMAP_COLUMNS, columns))
# Extract origin
origin.longitude = self._str2num(values.get('lon'))
origin.latitude = self._str2num(values.get('lat'))
depth = self._str2num(values.get('depth'))
if depth is not None:
origin.depth = depth * 1000.0
z_err = self._str2num(values.get('z_err'))
if z_err is not None:
origin.depth_errors.uncertainty = z_err * 1000.0
h_err = self._str2num(values.get('h_err'))
if h_err is not None:
ou = OriginUncertainty()
ou.horizontal_uncertainty = h_err
ou.preferred_description = 'horizontal uncertainty'
origin.origin_uncertainty = ou
year = self._str2num(values.get('year'))
if year is not None:
t_fields = ['year', 'month', 'day', 'hour', 'minute', 'second']
comps = [self._str2num(values.get(f)) for f in t_fields]
if year % 1 != 0:
origin.time = self._decyear2utc(year)
elif any(v > 0 for v in comps[1:]):
# no seconds involved
if len(comps) < 6:
utc_args = [int(v) for v in comps if v is not None]
# we also have to handle seconds
else:
utc_args = [int(v) if v is not None else 0
for v in comps[:-1]]
# just leave float seconds as is
utc_args.append(comps[-1])
origin.time = UTCDateTime(*utc_args)
mag = self._str2num(values.get('mag'))
# Extract magnitude
if mag is not None:
magnitude = Magnitude(mag=mag)
m_err = self._str2num(values.get('m_err'))
magnitude.mag_errors.uncertainty = m_err
event.magnitudes.append(magnitude)
event.preferred_magnitude_id = magnitude.resource_id.id
event.scope_resource_ids()
catalog.append(event)
return catalog
示例5: event_to_quakeml
def event_to_quakeml(event, filename):
"""
Write one of those events to QuakeML.
"""
# Create all objects.
cat = Catalog()
ev = Event()
org = Origin()
mag = Magnitude()
fm = FocalMechanism()
mt = MomentTensor()
t = Tensor()
# Link them together.
cat.append(ev)
ev.origins.append(org)
ev.magnitudes.append(mag)
ev.focal_mechanisms.append(fm)
fm.moment_tensor = mt
mt.tensor = t
# Fill values
ev.resource_id = "smi:inversion/%s" % str(event["identifier"])
org.time = event["time"]
org.longitude = event["longitude"]
org.latitude = event["latitude"]
org.depth = event["depth_in_km"] * 1000
mag.mag = event["Mw"]
mag.magnitude_type = "Mw"
t.m_rr = event["Mrr"]
t.m_tt = event["Mpp"]
t.m_pp = event["Mtt"]
t.m_rt = event["Mrt"]
t.m_rp = event["Mrp"]
t.m_tp = event["Mtp"]
cat.write(filename, format="quakeml")
示例6: _deserialize
def _deserialize(self, zmap_str):
catalog = Catalog()
for row in zmap_str.split("\n"):
if len(row) == 0:
continue
origin = Origin()
event = Event(origins=[origin])
event.preferred_origin_id = origin.resource_id.id
# Begin value extraction
columns = row.split("\t", 13)[:13] # ignore extra columns
values = dict(zip(_STD_ZMAP_COLUMNS + _EXT_ZMAP_COLUMNS, columns))
# Extract origin
origin.longitude = self._str2num(values.get("lon"))
origin.latitude = self._str2num(values.get("lat"))
depth = self._str2num(values.get("depth"))
if depth is not None:
origin.depth = depth * 1000.0
z_err = self._str2num(values.get("z_err"))
if z_err is not None:
origin.depth_errors.uncertainty = z_err * 1000.0
h_err = self._str2num(values.get("h_err"))
if h_err is not None:
ou = OriginUncertainty()
ou.horizontal_uncertainty = h_err
ou.preferred_description = "horizontal uncertainty"
origin.origin_uncertainty = ou
year = self._str2num(values.get("year"))
if year is not None:
t_fields = ["year", "month", "day", "hour", "minute", "second"]
comps = [self._str2num(values.get(f)) for f in t_fields]
if year % 1 != 0:
origin.time = self._decyear2utc(year)
elif any(v > 0 for v in comps[1:]):
utc_args = [int(v) for v in comps if v is not None]
origin.time = UTCDateTime(*utc_args)
mag = self._str2num(values.get("mag"))
# Extract magnitude
if mag is not None:
magnitude = Magnitude(mag=mag)
m_err = self._str2num(values.get("m_err"))
magnitude.mag_errors.uncertainty = m_err
event.magnitudes.append(magnitude)
event.preferred_magnitude_id = magnitude.resource_id.id
catalog.append(event)
return catalog
示例7: read_nlloc_hyp
def read_nlloc_hyp(filename, coordinate_converter=None, picks=None, **kwargs):
"""
Reads a NonLinLoc Hypocenter-Phase file to a
:class:`~obspy.core.event.Catalog` object.
.. note::
Coordinate conversion from coordinate frame of NonLinLoc model files /
location run to WGS84 has to be specified explicitly by the user if
necessary.
.. note::
An example can be found on the :mod:`~obspy.nlloc` submodule front
page in the documentation pages.
:param filename: File or file-like object in text mode.
:type coordinate_converter: func
:param coordinate_converter: Function to convert (x, y, z)
coordinates of NonLinLoc output to geographical coordinates and depth
in meters (longitude, latitude, depth in kilometers).
If left `None` NonLinLoc (x, y, z) output is left unchanged (e.g. if
it is in geographical coordinates already like for NonLinLoc in
global mode).
The function should accept three arguments x, y, z and return a
tuple of three values (lon, lat, depth in kilometers).
:type picks: list of :class:`~obspy.core.event.Pick`
:param picks: Original picks used to generate the NonLinLoc location.
If provided, the output event will include the original picks and the
arrivals in the output origin will link to them correctly (with their
`pick_id` attribute). If not provided, the output event will include
(the rather basic) pick information that can be reconstructed from the
NonLinLoc hypocenter-phase file.
:rtype: :class:`~obspy.core.event.Catalog`
"""
if not hasattr(filename, "read"):
# Check if it exists, otherwise assume its a string.
try:
with open(filename, "rt") as fh:
data = fh.read()
except:
try:
data = filename.decode()
except:
data = str(filename)
data = data.strip()
else:
data = filename.read()
if hasattr(data, "decode"):
data = data.decode()
lines = data.splitlines()
# remember picks originally used in location, if provided
original_picks = picks
if original_picks is None:
original_picks = []
# determine indices of block start/end of the NLLOC output file
indices_hyp = [None, None]
indices_phases = [None, None]
for i, line in enumerate(lines):
if line.startswith("NLLOC "):
indices_hyp[0] = i
elif line.startswith("END_NLLOC"):
indices_hyp[1] = i
elif line.startswith("PHASE "):
indices_phases[0] = i
elif line.startswith("END_PHASE"):
indices_phases[1] = i
if any([i is None for i in indices_hyp]):
msg = ("NLLOC HYP file seems corrupt,"
" could not detect 'NLLOC' and 'END_NLLOC' lines.")
raise RuntimeError(msg)
# strip any other lines around NLLOC block
lines = lines[indices_hyp[0]:indices_hyp[1]]
# extract PHASES lines (if any)
if any(indices_phases):
if not all(indices_phases):
msg = ("NLLOC HYP file seems corrupt, 'PHASE' block is corrupt.")
raise RuntimeError(msg)
i1, i2 = indices_phases
lines, phases_lines = lines[:i1] + lines[i2 + 1:], lines[i1 + 1:i2]
else:
phases_lines = []
lines = dict([line.split(None, 1) for line in lines])
line = lines["SIGNATURE"]
line = line.rstrip().split('"')[1]
signature, version, date, time = line.rsplit(" ", 3)
creation_time = UTCDateTime().strptime(date + time, str("%d%b%Y%Hh%Mm%S"))
# maximum likelihood origin location info line
line = lines["HYPOCENTER"]
x, y, z = map(float, line.split()[1:7:2])
if coordinate_converter:
#.........这里部分代码省略.........
示例8: __toOrigin
def __toOrigin(parser, origin_el):
"""
Parses a given origin etree element.
:type parser: :class:`~obspy.core.util.xmlwrapper.XMLParser`
:param parser: Open XMLParser object.
:type origin_el: etree.element
:param origin_el: origin element to be parsed.
:return: A ObsPy :class:`~obspy.core.event.Origin` object.
"""
global CURRENT_TYPE
origin = Origin()
origin.resource_id = ResourceIdentifier(prefix="/".join([RESOURCE_ROOT, "origin"]))
# I guess setting the program used as the method id is fine.
origin.method_id = "%s/location_method/%s/1" % (RESOURCE_ROOT,
parser.xpath2obj('program', origin_el))
if str(origin.method_id).lower().endswith("none"):
origin.method_id = None
# Standard parameters.
origin.time, origin.time_errors = \
__toTimeQuantity(parser, origin_el, "time")
origin.latitude, origin_latitude_error = \
__toFloatQuantity(parser, origin_el, "latitude")
origin.longitude, origin_longitude_error = \
__toFloatQuantity(parser, origin_el, "longitude")
origin.depth, origin.depth_errors = \
__toFloatQuantity(parser, origin_el, "depth")
if origin_longitude_error:
origin_longitude_error = origin_longitude_error["uncertainty"]
if origin_latitude_error:
origin_latitude_error = origin_latitude_error["uncertainty"]
# Figure out the depth type.
depth_type = parser.xpath2obj("depth_type", origin_el)
# Map Seishub specific depth type to the QuakeML depth type.
if depth_type == "from location program":
depth_type = "from location"
if depth_type is not None:
origin.depth_type = depth_type
# XXX: CHECK DEPTH ORIENTATION!!
if CURRENT_TYPE == "seiscomp3":
origin.depth *= 1000
if origin.depth_errors.uncertainty:
origin.depth_errors.uncertainty *= 1000
else:
# Convert to m.
origin.depth *= -1000
if origin.depth_errors.uncertainty:
origin.depth_errors.uncertainty *= 1000
# Earth model.
earth_mod = parser.xpath2obj('earth_mod', origin_el, str)
if earth_mod:
earth_mod = earth_mod.split()
earth_mod = ",".join(earth_mod)
origin.earth_model_id = "%s/earth_model/%s/1" % (RESOURCE_ROOT,
earth_mod)
if (origin_latitude_error is None or origin_longitude_error is None) and \
CURRENT_TYPE not in ["seiscomp3", "toni"]:
print "AAAAAAAAAAAAA"
raise Exception
if origin_latitude_error and origin_latitude_error:
if CURRENT_TYPE in ["baynet", "obspyck"]:
uncert = OriginUncertainty()
if origin_latitude_error > origin_longitude_error:
uncert.azimuth_max_horizontal_uncertainty = 0
else:
uncert.azimuth_max_horizontal_uncertainty = 90
uncert.min_horizontal_uncertainty, \
uncert.max_horizontal_uncertainty = \
sorted([origin_longitude_error, origin_latitude_error])
uncert.min_horizontal_uncertainty *= 1000.0
uncert.max_horizontal_uncertainty *= 1000.0
uncert.preferred_description = "uncertainty ellipse"
origin.origin_uncertainty = uncert
elif CURRENT_TYPE == "earthworm":
uncert = OriginUncertainty()
uncert.horizontal_uncertainty = origin_latitude_error
uncert.horizontal_uncertainty *= 1000.0
uncert.preferred_description = "horizontal uncertainty"
origin.origin_uncertainty = uncert
elif CURRENT_TYPE in ["seiscomp3", "toni"]:
pass
else:
raise Exception
# Parse the OriginQuality if applicable.
if not origin_el.xpath("originQuality"):
return origin
origin_quality_el = origin_el.xpath("originQuality")[0]
#.........这里部分代码省略.........
示例9: _parse_first_line_origin
def _parse_first_line_origin(self, line, event, magnitudes):
"""
Parse the first line of origin data.
:type line: str
:param line: Line to parse.
:type event: :class:`~obspy.core.event.event.Event`
:param event: Event of the origin.
:type magnitudes: list of
:class:`~obspy.core.event.magnitude.Magnitude`
:param magnitudes: Store magnitudes in a list to keep
their positions.
:rtype: :class:`~obspy.core.event.origin.Origin`,
:class:`~obspy.core.event.resourceid.ResourceIdentifier`
:returns: Parsed origin or None, resource identifier of the
origin.
"""
magnitude_types = []
magnitude_values = []
magnitude_station_counts = []
fields = self.fields['line_1']
time_origin = line[fields['time']].strip()
time_fixed_flag = line[fields['time_fixf']].strip()
latitude = line[fields['lat']].strip()
longitude = line[fields['lon']].strip()
epicenter_fixed_flag = line[fields['epicenter_fixf']].strip()
depth = line[fields['depth']].strip()
depth_fixed_flag = line[fields['depth_fixf']].strip()
phase_count = line[fields['n_def']].strip()
station_count = line[fields['n_sta']].strip()
azimuthal_gap = line[fields['gap']].strip()
magnitude_types.append(line[fields['mag_type_1']].strip())
magnitude_values.append(line[fields['mag_1']].strip())
magnitude_station_counts.append(line[fields['mag_n_sta_1']].strip())
magnitude_types.append(line[fields['mag_type_2']].strip())
magnitude_values.append(line[fields['mag_2']].strip())
magnitude_station_counts.append(line[fields['mag_n_sta_2']].strip())
magnitude_types.append(line[fields['mag_type_3']].strip())
magnitude_values.append(line[fields['mag_3']].strip())
magnitude_station_counts.append(line[fields['mag_n_sta_3']].strip())
author = line[fields['author']].strip()
origin_id = line[fields['id']].strip()
origin = Origin()
origin.quality = OriginQuality()
try:
origin.time = UTCDateTime(time_origin.replace('/', '-'))
origin.latitude = float(latitude)
origin.longitude = float(longitude)
except (TypeError, ValueError):
self._warn('Missing origin data, skipping event')
return None, None
origin.time_fixed = time_fixed_flag.lower() == 'f'
origin.epicenter_fixed = epicenter_fixed_flag.lower() == 'f'
try:
# Convert value from km to m
origin.depth = float(depth) * 1000
except ValueError:
pass
try:
origin.depth_type = DEPTH_TYPES[depth_fixed_flag]
except KeyError:
origin.depth_type = OriginDepthType('from location')
try:
origin.quality.used_phase_count = int(phase_count)
origin.quality.associated_phase_count = int(phase_count)
except ValueError:
pass
try:
origin.quality.used_station_count = int(station_count)
origin.quality.associated_station_count = int(station_count)
except ValueError:
pass
try:
origin.quality.azimuthal_gap = float(azimuthal_gap)
except ValueError:
pass
self.author = author
origin.creation_info = self._get_creation_info()
public_id = "origin/%s" % origin_id
origin_res_id = self._get_res_id(public_id)
for i in range(3):
try:
magnitude = Magnitude()
magnitude.creation_info = self._get_creation_info()
magnitude.magnitude_type = magnitude_types[i]
magnitude.mag = float(magnitude_values[i])
magnitude.station_count = int(magnitude_station_counts[i])
magnitude.origin_id = origin_res_id
magnitudes.append(magnitude)
event.magnitudes.append(magnitude)
except ValueError:
#.........这里部分代码省略.........
示例10: _parse_record_dp
def _parse_record_dp(self, line, event):
"""
Parses the 'source parameter data - primary' record Dp
"""
source_contributor = line[2:6].strip()
computation_type = line[6]
exponent = self._int_zero(line[7])
scale = math.pow(10, exponent)
centroid_origin_time = line[8:14] + '.' + line[14]
orig_time_stderr = line[15:17]
if orig_time_stderr == 'FX':
orig_time_stderr = 'Fixed'
else:
orig_time_stderr = \
self._float_with_format(orig_time_stderr, '2.1', scale)
centroid_latitude = self._float_with_format(line[17:21], '4.2')
lat_type = line[21]
if centroid_latitude is not None:
centroid_latitude *= self._coordinate_sign(lat_type)
lat_stderr = line[22:25]
if lat_stderr == 'FX':
lat_stderr = 'Fixed'
else:
lat_stderr = self._float_with_format(lat_stderr, '3.2', scale)
centroid_longitude = self._float_with_format(line[25:30], '5.2')
lon_type = line[30]
if centroid_longitude is not None:
centroid_longitude *= self._coordinate_sign(lon_type)
lon_stderr = line[31:34]
if lon_stderr == 'FX':
lon_stderr = 'Fixed'
else:
lon_stderr = self._float_with_format(lon_stderr, '3.2', scale)
centroid_depth = self._float_with_format(line[34:38], '4.1')
depth_stderr = line[38:40]
if depth_stderr == 'FX' or depth_stderr == 'BD':
depth_stderr = 'Fixed'
else:
depth_stderr = self._float_with_format(depth_stderr, '2.1', scale)
station_number = self._int_zero(line[40:43])
component_number = self._int_zero(line[43:46])
station_number2 = self._int_zero(line[46:48])
component_number2 = self._int_zero(line[48:51])
# unused: half_duration = self._float_with_format(line[51:54], '3.1')
moment = self._float_with_format(line[54:56], '2.1')
moment_stderr = self._float_with_format(line[56:58], '2.1')
moment_exponent = self._int(line[58:60])
if (moment is not None) and (moment_exponent is not None):
moment *= math.pow(10, moment_exponent)
if (moment_stderr is not None) and (moment_exponent is not None):
moment_stderr *= math.pow(10, moment_exponent)
evid = event.resource_id.id.split('/')[-1]
# Create a new origin only if centroid time is defined:
origin = None
if centroid_origin_time.strip() != '.':
origin = Origin()
res_id = '/'.join((res_id_prefix, 'origin',
evid, source_contributor.lower(),
'mw' + computation_type.lower()))
origin.resource_id = ResourceIdentifier(id=res_id)
origin.creation_info = \
CreationInfo(agency_id=source_contributor)
date = event.origins[0].time.strftime('%Y%m%d')
origin.time = UTCDateTime(date + centroid_origin_time)
# Check if centroid time is on the next day:
if origin.time < event.origins[0].time:
origin.time += timedelta(days=1)
self._store_uncertainty(origin.time_errors, orig_time_stderr)
origin.latitude = centroid_latitude
origin.longitude = centroid_longitude
origin.depth = centroid_depth * 1000
if lat_stderr == 'Fixed' and lon_stderr == 'Fixed':
origin.epicenter_fixed = True
else:
self._store_uncertainty(origin.latitude_errors,
self._lat_err_to_deg(lat_stderr))
self._store_uncertainty(origin.longitude_errors,
self._lon_err_to_deg(lon_stderr,
origin.latitude))
if depth_stderr == 'Fixed':
origin.depth_type = 'operator assigned'
else:
origin.depth_type = 'from location'
self._store_uncertainty(origin.depth_errors,
depth_stderr, scale=1000)
quality = OriginQuality()
quality.used_station_count = \
station_number + station_number2
quality.used_phase_count = \
component_number + component_number2
origin.quality = quality
origin.origin_type = 'centroid'
event.origins.append(origin)
focal_mechanism = FocalMechanism()
res_id = '/'.join((res_id_prefix, 'focalmechanism',
evid, source_contributor.lower(),
'mw' + computation_type.lower()))
focal_mechanism.resource_id = ResourceIdentifier(id=res_id)
focal_mechanism.creation_info = \
#.........这里部分代码省略.........
示例11: _read_ndk
def _read_ndk(filename, *args, **kwargs): # @UnusedVariable
"""
Reads an NDK file to a :class:`~obspy.core.event.Catalog` object.
:param filename: File or file-like object in text mode.
"""
# Read the whole file at once. While an iterator would be more efficient
# the largest NDK file out in the wild is 13.7 MB so it does not matter
# much.
if not hasattr(filename, "read"):
# Check if it exists, otherwise assume its a string.
try:
with open(filename, "rt") as fh:
data = fh.read()
except:
try:
data = filename.decode()
except:
data = str(filename)
data = data.strip()
else:
data = filename.read()
if hasattr(data, "decode"):
data = data.decode()
# Create iterator that yields lines.
def lines_iter():
prev_line = -1
while True:
next_line = data.find("\n", prev_line + 1)
if next_line < 0:
break
yield data[prev_line + 1: next_line]
prev_line = next_line
if len(data) > prev_line + 1:
yield data[prev_line + 1:]
# Use one Flinn Engdahl object for all region determinations.
fe = FlinnEngdahl()
cat = Catalog(resource_id=_get_resource_id("catalog", str(uuid.uuid4())))
# Loop over 5 lines at once.
for _i, lines in enumerate(itertools.zip_longest(*[lines_iter()] * 5)):
if None in lines:
msg = "Skipped last %i lines. Not a multiple of 5 lines." % (
lines.count(None))
warnings.warn(msg, ObsPyNDKWarning)
continue
# Parse the lines to a human readable dictionary.
try:
record = _read_lines(*lines)
except (ValueError, ObsPyNDKException):
exc = traceback.format_exc()
msg = (
"Could not parse event %i (faulty file?). Will be "
"skipped. Lines of the event:\n"
"\t%s\n"
"%s") % (_i + 1, "\n\t".join(lines), exc)
warnings.warn(msg, ObsPyNDKWarning)
continue
# Use one creation info for essentially every item.
creation_info = CreationInfo(
agency_id="GCMT",
version=record["version_code"]
)
# Use the ObsPy Flinn Engdahl region determiner as the region in the
# NDK files is oftentimes trimmed.
region = fe.get_region(record["centroid_longitude"],
record["centroid_latitude"])
# Create an event object.
event = Event(
force_resource_id=False,
event_type="earthquake",
event_type_certainty="known",
event_descriptions=[
EventDescription(text=region, type="Flinn-Engdahl region"),
EventDescription(text=record["cmt_event_name"],
type="earthquake name")
]
)
# Assemble the time for the reference origin.
try:
time = _parse_date_time(record["date"], record["time"])
except ObsPyNDKException:
msg = ("Invalid time in event %i. '%s' and '%s' cannot be "
"assembled to a valid time. Event will be skipped.") % \
(_i + 1, record["date"], record["time"])
warnings.warn(msg, ObsPyNDKWarning)
continue
# Create two origins, one with the reference latitude/longitude and
# one with the centroidal values.
ref_origin = Origin(
force_resource_id=False,
time=time,
#.........这里部分代码省略.........
示例12: par2quakeml
def par2quakeml(Par_filename, QuakeML_filename, rotation_axis=[0.0, 1.0, 0.0],
rotation_angle=-57.5, origin_time="2000-01-01 00:00:00.0",
event_type="other event"):
# initialise event
ev = Event()
# open and read Par file
fid = open(Par_filename, 'r')
fid.readline()
fid.readline()
fid.readline()
fid.readline()
lat_old = 90.0 - float(fid.readline().strip().split()[0])
lon_old = float(fid.readline().strip().split()[0])
depth = float(fid.readline().strip().split()[0])
fid.readline()
Mtt_old = float(fid.readline().strip().split()[0])
Mpp_old = float(fid.readline().strip().split()[0])
Mrr_old = float(fid.readline().strip().split()[0])
Mtp_old = float(fid.readline().strip().split()[0])
Mtr_old = float(fid.readline().strip().split()[0])
Mpr_old = float(fid.readline().strip().split()[0])
# rotate event into physical domain
lat, lon = rot.rotate_lat_lon(lat_old, lon_old, rotation_axis,
rotation_angle)
Mrr, Mtt, Mpp, Mtr, Mpr, Mtp = rot.rotate_moment_tensor(
Mrr_old, Mtt_old, Mpp_old, Mtr_old, Mpr_old, Mtp_old, lat_old, lon_old,
rotation_axis, rotation_angle)
# populate event origin data
ev.event_type = event_type
ev_origin = Origin()
ev_origin.time = UTCDateTime(origin_time)
ev_origin.latitude = lat
ev_origin.longitude = lon
ev_origin.depth = depth
ev.origins.append(ev_origin)
# populte event moment tensor
ev_tensor = Tensor()
ev_tensor.m_rr = Mrr
ev_tensor.m_tt = Mtt
ev_tensor.m_pp = Mpp
ev_tensor.m_rt = Mtr
ev_tensor.m_rp = Mpr
ev_tensor.m_tp = Mtp
ev_momenttensor = MomentTensor()
ev_momenttensor.tensor = ev_tensor
ev_momenttensor.scalar_moment = np.sqrt(Mrr ** 2 + Mtt ** 2 + Mpp ** 2 +
Mtr ** 2 + Mpr ** 2 + Mtp ** 2)
ev_focalmechanism = FocalMechanism()
ev_focalmechanism.moment_tensor = ev_momenttensor
ev_focalmechanism.nodal_planes = NodalPlanes().setdefault(0, 0)
ev.focal_mechanisms.append(ev_focalmechanism)
# populate event magnitude
ev_magnitude = Magnitude()
ev_magnitude.mag = 0.667 * (np.log10(ev_momenttensor.scalar_moment) - 9.1)
ev_magnitude.magnitude_type = 'Mw'
ev.magnitudes.append(ev_magnitude)
# write QuakeML file
cat = Catalog()
cat.append(ev)
cat.write(QuakeML_filename, format="quakeml")
# clean up
fid.close()
示例13: build
def build(self):
"""
Build an obspy moment tensor focal mech event
This makes the tensor output into an Event containing:
1) a FocalMechanism with a MomentTensor, NodalPlanes, and PrincipalAxes
2) a Magnitude of the Mw from the Tensor
Which is what we want for outputting QuakeML using
the (slightly modified) obspy code.
Input
-----
filehandle => open file OR str from filehandle.read()
Output
------
event => instance of Event() class as described above
"""
p = self.parser
event = Event(event_type='earthquake')
origin = Origin()
focal_mech = FocalMechanism()
nodal_planes = NodalPlanes()
moment_tensor = MomentTensor()
principal_ax = PrincipalAxes()
magnitude = Magnitude()
data_used = DataUsed()
creation_info = CreationInfo(agency_id='NN')
ev_mode = 'automatic'
ev_stat = 'preliminary'
evid = None
orid = None
# Parse the entire file line by line.
for n,l in enumerate(p.line):
if 'REVIEWED BY NSL STAFF' in l:
ev_mode = 'manual'
ev_stat = 'reviewed'
if 'Event ID' in l:
evid = p._id(n)
if 'Origin ID' in l:
orid = p._id(n)
if 'Ichinose' in l:
moment_tensor.category = 'regional'
if re.match(r'^\d{4}\/\d{2}\/\d{2}', l):
ev = p._event_info(n)
if 'Depth' in l:
derived_depth = p._depth(n)
if 'Mw' in l:
magnitude.mag = p._mw(n)
magnitude.magnitude_type = 'Mw'
if 'Mo' in l and 'dyne' in l:
moment_tensor.scalar_moment = p._mo(n)
if 'Percent Double Couple' in l:
moment_tensor.double_couple = p._percent(n)
if 'Percent CLVD' in l:
moment_tensor.clvd = p._percent(n)
if 'Epsilon' in l:
moment_tensor.variance = p._epsilon(n)
if 'Percent Variance Reduction' in l:
moment_tensor.variance_reduction = p._percent(n)
if 'Major Double Couple' in l and 'strike' in p.line[n+1]:
np = p._double_couple(n)
nodal_planes.nodal_plane_1 = NodalPlane(*np[0])
nodal_planes.nodal_plane_2 = NodalPlane(*np[1])
nodal_planes.preferred_plane = 1
if 'Spherical Coordinates' in l:
mt = p._mt_sphere(n)
moment_tensor.tensor = Tensor(
m_rr = mt['Mrr'],
m_tt = mt['Mtt'],
m_pp = mt['Mff'],
m_rt = mt['Mrt'],
m_rp = mt['Mrf'],
m_tp = mt['Mtf'],
)
if 'Eigenvalues and eigenvectors of the Major Double Couple' in l:
ax = p._vectors(n)
principal_ax.t_axis = Axis(ax['T']['trend'], ax['T']['plunge'], ax['T']['ev'])
principal_ax.p_axis = Axis(ax['P']['trend'], ax['P']['plunge'], ax['P']['ev'])
principal_ax.n_axis = Axis(ax['N']['trend'], ax['N']['plunge'], ax['N']['ev'])
if 'Number of Stations' in l:
data_used.station_count = p._number_of_stations(n)
if 'Maximum' in l and 'Gap' in l:
focal_mech.azimuthal_gap = p._gap(n)
if re.match(r'^Date', l):
creation_info.creation_time = p._creation_time(n)
# Creation Time
creation_info.version = orid
# Fill in magnitude values
magnitude.evaluation_mode = ev_mode
magnitude.evaluation_status = ev_stat
magnitude.creation_info = creation_info.copy()
magnitude.resource_id = self._rid(magnitude)
# Stub origin
origin.time = ev.get('time')
origin.latitude = ev.get('lat')
origin.longitude = ev.get('lon')
origin.depth = derived_depth * 1000.
origin.depth_type = "from moment tensor inversion"
#.........这里部分代码省略.........
示例14: get_results
def get_results(self):
cids = []
clusters = []
results_file = "{}/{}".format(self.hypoDD_control.control_directory,
self.hypoDD_control.relocated_hypocenters_output
)
residuals_file = "{}/{}".format(self.hypoDD_control.control_directory,
self.hypoDD_control.data_residual_output
)
with open(results_file, "r") as f:
for line in f:
num = line.split()
evid = num[0]
lat = float(num[1])
lon = float(num[2])
dep = 1000 * float(num[3]) # km to m
errx = num[7]
erry = num[8]
errz = num[9]
yr = int(num[10])
mo = int(num[11])
dy = int(num[12])
hr = int(num[13])
mi = int(num[14])
sc = float(num[15])
mag = num[16]
nccp = num[17]
nccs = num[18]
nctp = num[19]
ncts = num[20]
rcc = num[21]
rct = num[22]
cid = num[23]
if cid not in cids:
cids.append(cid)
clusters.append(Cluster())
clusters[-1].hypoDD_id=cid
clusters[-1].successful_relocation=True
clusters[-1].catalog=Catalog()
clusters[-1].event_ids=[]
origin=Origin()
isec = int ( math.floor( sc ))
micsec = int ( ( sc - isec) * 1000000 )
origin.time = UTCDateTime(yr, mo, dy, hr, mi, isec, micsec)
origin.longitude = lon
origin.latitude = lat
origin.depth = dep
origin.method_id = "hypoDD"
# TODO (@ogalanis): Add time/location errors (when
# appropriate. Add quality and origin_uncertainty. Add arrivals.
event=Event()
event.creation_info=CreationInfo()
event.creation_info.author = __package__
event.creation_info.version = info.__version__
event.origins=[origin]
event.magnitude=Magnitude()
event.magnitude.mag=mag
idx=cids.index(cid)
clusters[idx].catalog.events.append(event)
clusters[idx].event_ids.append(evid)
if self.hypoDD_control.cid != 0 :
my_list = []
clusters[0].connectedness = Connectedness()
with open(residuals_file, "r") as f:
for line in f:
num = line.split()
evid_1 = num[2]
evid_2 = num[3]
obs_type = num[4]
if obs_type == "1":
my_list = clusters[0].connectedness.cross_corr_P
elif obs_type == "2":
my_list = clusters[0].connectedness.cross_corr_S
elif obs_type == "3":
my_list = clusters[0].connectedness.catalog_P
elif obs_type == "4":
my_list = clusters[0].connectedness.catalog_S
else:
continue
in_list = [x for x in my_list if (( x[0] == evid_1 and
x[1] == evid_2
) or
( x[0] == evid_2 and
x[1] == evid_1
))]
if in_list:
for x in my_list:
if (( x[0] == evid_1 and
x[1] == evid_2
) or
( x[0] == evid_2 and
x[1] == evid_1
)):
x[2] += 1
else:
my_list.append([evid_1,evid_2,1])
return clusters
示例15: _map_join2origin
def _map_join2origin(self, db):
"""
Return an Origin instance from an dict of CSS key/values
Inputs
======
db : dict of key/values of CSS fields related to the origin (see Join)
Returns
=======
obspy.core.event.Origin
Notes
=====
Any object that supports the dict 'get' method can be passed as
input, e.g. OrderedDict, custom classes, etc.
Join
----
origin <- origerr (outer)
"""
#-- Basic location ------------------------------------------
origin = Origin()
origin.latitude = db.get('lat')
origin.longitude = db.get('lon')
origin.depth = _km2m(db.get('depth'))
origin.time = _utc(db.get('time'))
origin.extra = {}
#-- Quality -------------------------------------------------
quality = OriginQuality(
associated_phase_count = db.get('nass'),
used_phase_count = db.get('ndef'),
standard_error = db.get('sdobs'),
)
origin.quality = quality
#-- Solution Uncertainties ----------------------------------
# in CSS the ellipse is projected onto the horizontal plane
# using the covariance matrix
uncertainty = OriginUncertainty()
a = _km2m(db.get('smajax'))
b = _km2m(db.get('sminax'))
s = db.get('strike')
dep_u = _km2m(db.get('sdepth'))
time_u = db.get('stime')
uncertainty.max_horizontal_uncertainty = a
uncertainty.min_horizontal_uncertainty = b
uncertainty.azimuth_max_horizontal_uncertainty = s
uncertainty.horizontal_uncertainty = a
uncertainty.preferred_description = "horizontal uncertainty"
if db.get('conf') is not None:
uncertainty.confidence_level = db.get('conf') * 100.
if uncertainty.horizontal_uncertainty is not None:
origin.origin_uncertainty = uncertainty
#-- Parameter Uncertainties ---------------------------------
if all([a, b, s]):
n, e = _get_NE_on_ellipse(a, b, s)
lat_u = _m2deg_lat(n)
lon_u = _m2deg_lon(e, lat=origin.latitude)
origin.latitude_errors = {'uncertainty': lat_u}
origin.longitude_errors = {'uncertainty': lon_u}
if dep_u:
origin.depth_errors = {'uncertainty': dep_u}
if time_u:
origin.time_errors = {'uncertainty': time_u}
#-- Analyst-determined Status -------------------------------
posted_author = _str(db.get('auth'))
mode, status = self.get_event_status(posted_author)
origin.evaluation_mode = mode
origin.evaluation_status = status
# Save etype per origin due to schema differences...
css_etype = _str(db.get('etype'))
# Compatible with future patch rename "_namespace" -> "namespace"
origin.extra['etype'] = {
'value': css_etype,
'namespace': CSS_NAMESPACE
}
origin.creation_info = CreationInfo(
creation_time = _utc(db.get('lddate')),
agency_id = self.agency,
version = db.get('orid'),
author = posted_author,
)
origin.resource_id = self._rid(origin)
return origin