本文整理汇总了Python中pyasdf.ASDFDataSet.add_quakeml方法的典型用法代码示例。如果您正苦于以下问题:Python ASDFDataSet.add_quakeml方法的具体用法?Python ASDFDataSet.add_quakeml怎么用?Python ASDFDataSet.add_quakeml使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类pyasdf.ASDFDataSet
的用法示例。
在下文中一共展示了ASDFDataSet.add_quakeml方法的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: example_data_set
# 需要导入模块: from pyasdf import ASDFDataSet [as 别名]
# 或者: from pyasdf.ASDFDataSet import add_quakeml [as 别名]
def example_data_set(tmpdir):
"""
Fixture creating a small example file.
"""
asdf_filename = os.path.join(tmpdir.strpath, "test.h5")
data_path = os.path.join(data_dir, "small_sample_data_set")
data_set = ASDFDataSet(asdf_filename)
for filename in glob.glob(os.path.join(data_path, "*.xml")):
if "quake.xml" in filename:
data_set.add_quakeml(filename)
else:
data_set.add_stationxml(filename)
for filename in glob.glob(os.path.join(data_path, "*.mseed")):
data_set.add_waveforms(filename, tag="raw_recording",
event_id=data_set.events[0])
# Flush and finish writing.
del data_set
# Return filename and path to tempdir, no need to always create a
# new one.
return Namespace(filename=asdf_filename, tmpdir=tmpdir.strpath)
示例2: test_adding_event_in_various_manners
# 需要导入模块: from pyasdf import ASDFDataSet [as 别名]
# 或者: from pyasdf.ASDFDataSet import add_quakeml [as 别名]
def test_adding_event_in_various_manners(tmpdir):
"""
Events can be added either as filenames, open files, BytesIOs, or ObsPy
objects. In any case, the result should be the same.
"""
asdf_filename = os.path.join(tmpdir.strpath, "test.h5")
data_path = os.path.join(data_dir, "small_sample_data_set")
event_filename = os.path.join(data_path, "quake.xml")
ref_cat = obspy.readEvents(event_filename)
# Add as filename
data_set = ASDFDataSet(asdf_filename)
assert len(data_set.events) == 0
data_set.add_quakeml(event_filename)
assert len(data_set.events) == 1
assert data_set.events == ref_cat
del data_set
os.remove(asdf_filename)
# Add as open file.
data_set = ASDFDataSet(asdf_filename)
assert len(data_set.events) == 0
with open(event_filename, "rb") as fh:
data_set.add_quakeml(fh)
assert len(data_set.events) == 1
assert data_set.events == ref_cat
del data_set
os.remove(asdf_filename)
# Add as BytesIO.
data_set = ASDFDataSet(asdf_filename)
assert len(data_set.events) == 0
with open(event_filename, "rb") as fh:
temp = io.BytesIO(fh.read())
temp.seek(0, 0)
data_set.add_quakeml(temp)
assert len(data_set.events) == 1
assert data_set.events == ref_cat
del data_set
os.remove(asdf_filename)
# Add as ObsPy Catalog.
data_set = ASDFDataSet(asdf_filename)
assert len(data_set.events) == 0
data_set.add_quakeml(ref_cat.copy())
assert len(data_set.events) == 1
assert data_set.events == ref_cat
del data_set
os.remove(asdf_filename)
# Add as an ObsPy event.
data_set = ASDFDataSet(asdf_filename)
assert len(data_set.events) == 0
data_set.add_quakeml(ref_cat.copy()[0])
assert len(data_set.events) == 1
assert data_set.events == ref_cat
del data_set
os.remove(asdf_filename)
示例3: test_data_set_creation
# 需要导入模块: from pyasdf import ASDFDataSet [as 别名]
# 或者: from pyasdf.ASDFDataSet import add_quakeml [as 别名]
def test_data_set_creation(tmpdir):
"""
Test data set creation with a small test dataset.
It tests that the the stuff that goes in is correctly saved and
can be retrieved again.
"""
asdf_filename = os.path.join(tmpdir.strpath, "test.h5")
data_path = os.path.join(data_dir, "small_sample_data_set")
data_set = ASDFDataSet(asdf_filename)
for filename in glob.glob(os.path.join(data_path, "*.mseed")):
data_set.add_waveforms(filename, tag="raw_recording")
for filename in glob.glob(os.path.join(data_path, "*.xml")):
if "quake.xml" in filename:
data_set.add_quakeml(filename)
else:
data_set.add_stationxml(filename)
# Flush and finish writing.
del data_set
# Open once again
data_set = ASDFDataSet(asdf_filename)
# ObsPy is tested enough to make this comparison meaningful.
for station in (("AE", "113A"), ("TA", "POKR")):
# Test the waveforms
stream_asdf = \
getattr(data_set.waveforms, "%s_%s" % station).raw_recording
stream_file = obspy.read(os.path.join(
data_path, "%s.%s.*.mseed" % station))
# Delete the file format specific stats attributes. These are
# meaningless inside ASDF data sets.
for trace in stream_file:
del trace.stats.mseed
del trace.stats._format
for trace in stream_asdf:
del trace.stats.asdf
del trace.stats._format
assert stream_asdf == stream_file
# Test the inventory data.
inv_asdf = \
getattr(data_set.waveforms, "%s_%s" % station).StationXML
inv_file = obspy.read_inventory(
os.path.join(data_path, "%s.%s..BH*.xml" % station))
assert inv_file == inv_asdf
# Test the event.
cat_file = obspy.readEvents(os.path.join(data_path, "quake.xml"))
cat_asdf = data_set.events
# from IPython.core.debugger import Tracer; Tracer(colors="Linux")()
assert cat_file == cat_asdf
示例4: test_adding_same_event_twice_raises
# 需要导入模块: from pyasdf import ASDFDataSet [as 别名]
# 或者: from pyasdf.ASDFDataSet import add_quakeml [as 别名]
def test_adding_same_event_twice_raises(tmpdir):
"""
Adding the same event twice raises.
"""
asdf_filename = os.path.join(tmpdir.strpath, "test.h5")
data_path = os.path.join(data_dir, "small_sample_data_set")
data_set = ASDFDataSet(asdf_filename)
# Add once, all good.
data_set.add_quakeml(os.path.join(data_path, "quake.xml"))
assert len(data_set.events) == 1
# Adding again should raise an error.
with pytest.raises(ValueError):
data_set.add_quakeml(os.path.join(data_path, "quake.xml"))
示例5: convert_to_asdf
# 需要导入模块: from pyasdf import ASDFDataSet [as 别名]
# 或者: from pyasdf.ASDFDataSet import add_quakeml [as 别名]
def convert_to_asdf(asdf_fn, waveform_filelist, tag, quakemlfile=None,
staxml_filelist=None, verbose=False, status_bar=False,
create_simple_inv=False):
"""
Convert files(sac or mseed) to asdf
"""
if verbose:
print("*"*10 + " ASDF Converter " + "*"*10)
nwaveform = len(waveform_filelist)
if nwaveform == 0:
print("No file specified. Return...")
return
if os.path.exists(asdf_fn):
raise Exception("File '%s' exists." % asdf_fn)
ds = ASDFDataSet(asdf_fn, mode='a')
# Add event
if quakemlfile:
if not os.path.exists(quakemlfile):
raise ValueError("Quakeml file not exists:%s" % quakemlfile)
ds.add_quakeml(quakemlfile)
event = ds.events[0]
if status_bar:
drawProgressBar(1.0, "Adding Quakeml data")
else:
raise ValueError("No Event file")
sta_dict = add_waveform_to_asdf(ds, waveform_filelist, tag, event=event,
create_simple_inv=create_simple_inv,
status_bar=status_bar)
add_stationxml_to_asdf(ds, staxml_filelist, event=event,
create_simple_inv=create_simple_inv,
sta_dict=sta_dict,
status_bar=status_bar)
if verbose:
print("ASDF filesize: %s" % ds.pretty_filesize)
del ds
示例6: convert_to_asdf
# 需要导入模块: from pyasdf import ASDFDataSet [as 别名]
# 或者: from pyasdf.ASDFDataSet import add_quakeml [as 别名]
def convert_to_asdf(filelist, asdf_fn, quakemlfile, staxml_filelist=None, tag=None):
"""
Convert files(sac or mseed) to asdf
"""
nfiles = len(filelist)
if nfiles == 0:
print "No file specified. Return..."
return
if os.path.exists(asdf_fn):
raise Exception("File '%s' exists." % asdf_fn)
ds = ASDFDataSet(asdf_fn)
# Add event
if quakemlfile is not None and os.path.exists(quakemlfile):
print "Event info added"
ds.add_quakeml(quakemlfile)
event = ds.events[0]
else:
raise ValueError("No Event file")
# Add waveforms.
print "Adding Waveform data"
for _i, filename in enumerate(filelist):
if os.path.exists(filename):
#print("Adding file %i of %i: %s" % (_i + 1,
# len(filelist), os.path.basename(filename)))
ds.add_waveforms(filename, tag=tag, event_id=event)
else:
print("File not exist %i of %i")
# Add StationXML files.
if staxml_filelist is not None and len(staxml_filelist) > 0:
for _i, filename in enumerate(staxml_filelist):
if os.path.exists(filename):
#print("Adding StationXML file %i of %i..." % (_i + 1, len(filenames)))
ds.add_stationxml(filename)
else:
print("No stationxml added")
示例7: save_adjoint_to_asdf
# 需要导入模块: from pyasdf import ASDFDataSet [as 别名]
# 或者: from pyasdf.ASDFDataSet import add_quakeml [as 别名]
def save_adjoint_to_asdf(outputfile, events, adjoint_sources, stations):
"""
Save events(obspy.Catalog) and adjoint sources, together with
staiton information, to asdf file on disk.
"""
print("="*15 + "\nWrite to file: %s" % outputfile)
outputdir = os.path.dirname(outputfile)
if not os.path.exists(outputdir):
os.makedirs(outputdir)
if os.path.exists(outputfile):
print("Output file exists and removed:%s" % outputfile)
os.remove(outputfile)
ds = ASDFDataSet(outputfile, mode='a', compression=None)
ds.add_quakeml(events)
for adj_id in sorted(adjoint_sources):
adj = adjoint_sources[adj_id]
sta_tag = "%s_%s" % (adj.network, adj.station)
sta_info = stations[sta_tag]
adj_array, adj_path, parameters = \
dump_adjsrc(adj, sta_info)
ds.add_auxiliary_data(adj_array, data_type="AdjointSources",
path=adj_path, parameters=parameters)
示例8: test_saving_event_id
# 需要导入模块: from pyasdf import ASDFDataSet [as 别名]
# 或者: from pyasdf.ASDFDataSet import add_quakeml [as 别名]
def test_saving_event_id(tmpdir):
"""
Tests that the event_id can be saved and retrieved automatically.
"""
data_path = os.path.join(data_dir, "small_sample_data_set")
filename = os.path.join(tmpdir.strpath, "example.h5")
event = obspy.readEvents(os.path.join(data_path, "quake.xml"))[0]
# Add the event object, and associate the waveform with it.
data_set = ASDFDataSet(filename)
data_set.add_quakeml(event)
waveform = obspy.read(os.path.join(data_path, "TA.*.mseed")).sort()
data_set.add_waveforms(waveform, "raw_recording", event_id=event)
st = data_set.waveforms.TA_POKR.raw_recording
for tr in st:
assert tr.stats.asdf.event_id.getReferredObject() == event
del data_set
os.remove(filename)
# Add as a string.
data_set = ASDFDataSet(filename)
data_set.add_quakeml(event)
waveform = obspy.read(os.path.join(data_path, "TA.*.mseed")).sort()
data_set.add_waveforms(waveform, "raw_recording",
event_id=str(event.resource_id.id))
st = data_set.waveforms.TA_POKR.raw_recording
for tr in st:
assert tr.stats.asdf.event_id.getReferredObject() == event
del data_set
os.remove(filename)
# Add as a resource identifier object.
data_set = ASDFDataSet(filename)
data_set.add_quakeml(event)
waveform = obspy.read(os.path.join(data_path, "TA.*.mseed")).sort()
data_set.add_waveforms(waveform, "raw_recording",
event_id=event.resource_id)
st = data_set.waveforms.TA_POKR.raw_recording
for tr in st:
assert tr.stats.asdf.event_id.getReferredObject() == event
del data_set
os.remove(filename)
示例9: Exception
# 需要导入模块: from pyasdf import ASDFDataSet [as 别名]
# 或者: from pyasdf.ASDFDataSet import add_quakeml [as 别名]
import glob
import os
from pyasdf import ASDFDataSet
filename = "synthetic.h5"
if os.path.exists(filename):
raise Exception("File '%s' exists." % filename)
ds = ASDFDataSet(filename)
# Add event
ds.add_quakeml("./GCMT_event_SOUTH_SANDWICH_ISLANDS_REGION_Mag_5.6_2010-3-11-6.xml")
event = ds.events[0]
# Add waveforms.
filenames = glob.glob("./SYNTHETIC_SAC/*.sem")
for _i, filename in enumerate(filenames):
print("Adding SAC file %i of %i..." % (_i + 1, len(filenames)))
ds.add_waveforms(filename, tag="synthetic", event_id=event)
# Add StationXML files.
filenames = glob.glob("./StationXML/*.xml")
for _i, filename in enumerate(filenames):
print("Adding StationXML file %i of %i..." % (_i + 1, len(filenames)))
ds.add_stationxml(filename)