本文整理汇总了Python中storm_analysis.removeFile函数的典型用法代码示例。如果您正苦于以下问题:Python removeFile函数的具体用法?Python removeFile怎么用?Python removeFile使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了removeFile函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_sa_h5py_2
def test_sa_h5py_2():
"""
Test data round trip.
"""
peaks = {"x" : numpy.zeros(10),
"y" : numpy.ones(10)}
filename = "test_sa_hdf5.hdf5"
h5_name = storm_analysis.getPathOutputTest(filename)
storm_analysis.removeFile(h5_name)
# Write data.
with saH5Py.SAH5Py(h5_name, is_existing = False) as h5:
h5.setMovieInformation(1,1,2,"")
h5.addLocalizations(peaks, 1)
h5.addLocalizations(peaks, 1, channel = 1)
# Read data.
with saH5Py.SAH5Py(h5_name) as h5:
# Check that frame 0 is empty.
locs = h5.getLocalizationsInFrame(0)
assert(not bool(locs))
# Check frame1.
locs = h5.getLocalizationsInFrame(1)
assert(numpy.allclose(peaks["x"], locs["x"]))
assert(numpy.allclose(peaks["y"], locs["y"]))
assert(numpy.allclose(peaks["x"], locs["c1_x"]))
assert(numpy.allclose(peaks["y"], locs["c1_y"]))
# Check getting a specific field.
locs = h5.getLocalizationsInFrame(1, fields = ["x"])
assert("x" in locs)
assert(not "y" in locs)
示例2: test_sa_h5py_19
def test_sa_h5py_19():
"""
Test getting specific fields.
"""
peaks = {"bar" : numpy.zeros(10),
"x" : numpy.zeros(10),
"y" : numpy.zeros(10)}
filename = "test_sa_hdf5.hdf5"
h5_name = storm_analysis.getPathOutputTest(filename)
storm_analysis.removeFile(h5_name)
# Write data.
with saH5Py.SAH5Py(h5_name, is_existing = False, overwrite = True) as h5:
h5.setMovieInformation(100, 100, 1, "")
h5.addLocalizations(peaks, 0)
# Get data.
with saH5Py.SAH5Py(h5_name) as h5:
locs = h5.getLocalizationsInFrame(0)
for elt in ["bar", "x", "y"]:
assert elt in locs
locs = h5.getLocalizationsInFrame(0, fields = ["x"])
assert "x" in locs
for elt in ["bar", "y"]:
assert not elt in locs
示例3: test_sa_h5py_16
def test_sa_h5py_16():
"""
Test that localizations iterator skips empty frames.
"""
peaks = {"x" : numpy.zeros(10),
"y" : numpy.ones(10)}
empty = {"x" : numpy.array([]),
"y" : numpy.array([])}
filename = "test_sa_hdf5.hdf5"
h5_name = storm_analysis.getPathOutputTest(filename)
storm_analysis.removeFile(h5_name)
# Write data.
with saH5Py.SAH5Py(h5_name, is_existing = False, overwrite = True) as h5:
h5.setMovieInformation(100, 100, 5, "")
h5.addLocalizations(peaks, 0)
h5.addLocalizations(empty, 1)
h5.addLocalizations(peaks, 2)
# Read data.
with saH5Py.SAH5Py(h5_name) as h5:
for fnum, locs in h5.localizationsIterator():
assert(fnum != 1)
示例4: test_sa_h5py_17
def test_sa_h5py_17():
"""
Test that localizations iterator does not skip empty frames (when requested not to).
"""
peaks = {"x" : numpy.zeros(10),
"y" : numpy.ones(10)}
empty = {"x" : numpy.array([]),
"y" : numpy.array([])}
filename = "test_sa_hdf5.hdf5"
h5_name = storm_analysis.getPathOutputTest(filename)
storm_analysis.removeFile(h5_name)
# Write data.
with saH5Py.SAH5Py(h5_name, is_existing = False, overwrite = True) as h5:
h5.setMovieInformation(100, 100, 5, "")
h5.addLocalizations(peaks, 0)
h5.addLocalizations(empty, 1)
h5.addLocalizations(peaks, 2)
# Read data.
with saH5Py.SAH5Py(h5_name) as h5:
for i, [fnum, locs] in enumerate(h5.localizationsIterator(skip_empty = False)):
assert(i == fnum)
示例5: test_sa_h5py_11
def test_sa_h5py_11():
"""
Test hasLocalizationField() and hasTracksField()
"""
peaks = {"x" : numpy.zeros(10),
"y" : numpy.ones(10)}
filename = "test_sa_hdf5.hdf5"
h5_name = storm_analysis.getPathOutputTest(filename)
storm_analysis.removeFile(h5_name)
# Write data.
with saH5Py.SAH5Py(h5_name, is_existing = False) as h5:
h5.setMovieInformation(256, 256, 10, "XYZZY")
h5.addLocalizations(peaks, 1)
h5.addTracks(peaks)
# Check.
with saH5Py.SAH5Py(h5_name) as h5:
assert(h5.hasLocalizationsField("x"))
assert(not h5.hasLocalizationsField("x1"))
assert(h5.hasTracksField("x"))
assert(not h5.hasTracksField("x1"))
示例6: test_sa_h5py_12
def test_sa_h5py_12():
"""
Test handling of multiple channels.
"""
peaks = {"x" : numpy.zeros(3),
"y" : numpy.ones(3)}
filename = "test_sa_hdf5.hdf5"
h5_name = storm_analysis.getPathOutputTest(filename)
storm_analysis.removeFile(h5_name)
# Write data.
with saH5Py.SAH5Py(h5_name, is_existing = False) as h5:
h5.setMovieInformation(1,1,2,"")
h5.addLocalizations(peaks, 1)
peaks["x"] += 1
peaks["y"] += 1
h5.addLocalizations(peaks, 1, channel = 1)
peaks["x"] += 1
peaks["y"] += 1
h5.addLocalizations(peaks, 1, channel = 2)
# Read data.
with saH5Py.SAH5Py(h5_name) as h5:
# Check getting number of channels.
assert(h5.getNChannels() == 3)
for [fnum, locs] in h5.localizationsIterator():
for i, elt in enumerate(h5.splitByChannel(locs)):
assert(numpy.allclose(elt["x"], i * numpy.ones(3)))
assert(numpy.allclose(elt["y"], i * numpy.ones(3) + 1.0))
示例7: test_sa_h5py_10
def test_sa_h5py_10():
"""
Test 'is_existing' and 'overwrite' parameters.
"""
filename = "test_sa_hdf5.hdf5"
h5_name = storm_analysis.getPathOutputTest(filename)
storm_analysis.removeFile(h5_name)
# Test failure on trying to open a file that does not exist.
try:
with saH5Py.SAH5Py(h5_name) as h5:
pass
except saH5Py.SAH5PyException:
pass
else:
assert(False)
# Test failure on trying to overwrite a file that does exist.
# Create the file.
with saH5Py.SAH5Py(h5_name, is_existing = False) as h5:
pass
# Test that we cannot overwrite it.
try:
with saH5Py.SAH5Py(h5_name, is_existing = False) as h5:
pass
except saH5Py.SAH5PyException:
pass
else:
assert(False)
# Test that we can overwrite it.
with saH5Py.SAH5Py(h5_name, is_existing = False, overwrite = True) as h5:
pass
示例8: test_sa_h5py_9
def test_sa_h5py_9():
"""
Test setting the track id field.
"""
peaks = {"x" : numpy.zeros(10),
"y" : numpy.ones(10)}
filename = "test_sa_hdf5.hdf5"
h5_name = storm_analysis.getPathOutputTest(filename)
storm_analysis.removeFile(h5_name)
# Add localizations and track id.
with saH5Py.SAH5Py(h5_name, is_existing = False) as h5:
h5.addLocalizations(peaks, 1)
h5.addTrackID(numpy.ones(10), 1)
# Check track id.
with saH5Py.SAH5Py(h5_name) as h5:
locs = h5.getLocalizationsInFrame(1)
assert(numpy.allclose(locs["track_id"], numpy.ones(10)))
# Change track id.
with saH5Py.SAH5Py(h5_name) as h5:
h5.addTrackID(numpy.zeros(10), 1)
# Check track id.
with saH5Py.SAH5Py(h5_name) as h5:
locs = h5.getLocalizationsInFrame(1)
assert(numpy.allclose(locs["track_id"], numpy.zeros(10)))
示例9: test_sa_h5py_7
def test_sa_h5py_7():
"""
Test tracks iterator.
"""
tracks = {"x" : numpy.zeros(10),
"y" : numpy.ones(10)}
filename = "test_sa_hdf5.hdf5"
h5_name = storm_analysis.getPathOutputTest(filename)
storm_analysis.removeFile(h5_name)
# No tracks.
with saH5Py.SAH5Py(h5_name, is_existing = False) as h5:
pass
with saH5Py.SAH5Py(h5_name) as h5:
for t in h5.tracksIterator():
assert(False) # We should not get here.
# Tracks.
storm_analysis.removeFile(h5_name)
with saH5Py.SAH5Py(h5_name, is_existing = False) as h5:
h5.addTracks(tracks)
with saH5Py.SAH5Py(h5_name) as h5:
for t in h5.tracksIterator():
assert(numpy.allclose(t["x"], tracks["x"]))
# Only get one field.
for t in h5.tracksIterator(["x"]):
assert(not "y" in t)
示例10: test_sa_h5py_3
def test_sa_h5py_3():
"""
Test getting data from multiple frames.
"""
peaks = {"x" : numpy.zeros(10),
"y" : numpy.ones(10)}
filename = "test_sa_hdf5.hdf5"
h5_name = storm_analysis.getPathOutputTest(filename)
storm_analysis.removeFile(h5_name)
# Write data.
fr = FakeReader()
with saH5Py.SAH5Py(h5_name, is_existing = False) as h5:
h5.addMovieInformation(fr)
for i in range(fr.getMovieL()):
h5.addLocalizations(peaks, i)
# Read data.
with saH5Py.SAH5Py(h5_name) as h5:
# Check localizations in first 5 frames.
locs = h5.getLocalizationsInFrameRange(0,5)
assert(locs["x"].size == 50)
# Get all the localizations.
locs = h5.getLocalizations()
assert(locs["x"].size == (10.0 * fr.getMovieL()))
示例11: test_fiducials_7
def test_fiducials_7():
"""
Iterator test.
"""
peaks = {"x" : numpy.array([1.0, 2.0, 3.0]),
"y" : numpy.array([1.0, 1.0, 1.0])}
filename = "test_fiducials.hdf5"
h5_name = storm_analysis.getPathOutputTest(filename)
storm_analysis.removeFile(h5_name)
# Write data.
with saH5Py.SAH5Py(h5_name, is_existing = False) as h5:
for i in range(3):
temp = {}
for elt in peaks:
temp[elt] = peaks[elt][i:]
h5.addLocalizations(temp, i)
h5.addMovieInformation(FakeReader(n_frames = 4))
# Track fiducials..
fiducials.trackFiducials(h5_name, radius = 0.1, reference_frame = 2)
# Check.
with fiducials.SAH5Fiducials(h5_name) as h5:
for fdcl in h5.fiducialsIterator():
assert(numpy.allclose(fdcl["frame"], numpy.arange(3)))
示例12: test_merge_2
def test_merge_2():
"""
Test file merging, skipping files with no tracks.
"""
metadata = "<xml><field1><data1>data</data1></field></xml>"
ref_tracks = {"x" : numpy.random.randint(0,10,10),
"y" : numpy.random.randint(0,10,10)}
# Create HDF5 files to merge.
h5_names = []
for i in range(3):
h5_name = storm_analysis.getPathOutputTest("test_merge_f" + str(i) + ".hdf5")
h5_names.append(h5_name)
with saH5Py.SAH5Py(h5_name, is_existing = False, overwrite = True) as h5:
h5.addMetadata(metadata)
h5.setMovieInformation(20,20,1,"")
h5.setPixelSize(100.0)
if(i != 1):
h5.addTracks(ref_tracks)
# Merge.
merge_name = storm_analysis.getPathOutputTest("test_merge.hdf5")
storm_analysis.removeFile(merge_name)
mergeHDF5.mergeHDF5(h5_names, merge_name)
# Check merge.
with saH5Py.SAH5Py(merge_name) as h5:
assert(metadata == h5.getMetadata())
for tracks in h5.tracksIterator():
assert(numpy.allclose(ref_tracks["x"], tracks["x"]))
示例13: test_cl_sa_h5py_2
def test_cl_sa_h5py_2():
"""
Test basic cluster file mechanics (using tracks).
"""
tracks = {"x" : numpy.arange(11, dtype = numpy.float),
"y" : numpy.arange(11, dtype = numpy.float)}
filename = "test_clusters_sa_h5py.hdf5"
h5_name = storm_analysis.getPathOutputTest(filename)
storm_analysis.removeFile(h5_name)
# Write track data.
with saH5Py.SAH5Py(h5_name, is_existing = False) as h5:
h5.setMovieInformation(1,1,2,"")
h5.addTracks(tracks)
# Write clustering data for tracks.
cluster_id = numpy.remainder(numpy.arange(11), 3)
cluster_data = {"track_id" : numpy.zeros(11, dtype = numpy.int),
"loc_id" : numpy.arange(11)}
cl_size = [0, 4, 4, 3]
with clSAH5Py.SAH5Clusters(h5_name) as cl_h5:
cl_h5.addClusters(cluster_id, cluster_data)
assert(cl_h5.getNClusters() == (len(cl_size) - 1))
for index, cluster in cl_h5.clustersIterator(skip_unclustered = False):
for field in cluster:
assert(cluster[field].size == cl_size[index])
示例14: test_cl_sa_h5py_5
def test_cl_sa_h5py_5():
"""
Test getting all of the localizations for clustering.
"""
locs = {"category" : numpy.arange(4, dtype = numpy.int32),
"x" : numpy.arange(4, dtype = numpy.float),
"y" : numpy.arange(4, dtype = numpy.float)}
filename = "test_clusters_sa_h5py.hdf5"
h5_name = storm_analysis.getPathOutputTest(filename)
storm_analysis.removeFile(h5_name)
# Write localization data.
with saH5Py.SAH5Py(h5_name, is_existing = False) as h5:
h5.setMovieInformation(1,1,5,"")
h5.setPixelSize(100.0)
h5.addLocalizations(locs, 1)
h5.addLocalizations(locs, 3)
# Test getting all the localization data.
with clSAH5Py.SAH5Clusters(h5_name) as cl_h5:
[x, y, z, c, cl_dict] = cl_h5.getDataForClustering()
assert(numpy.allclose(x, cl_dict['loc_id']))
assert(numpy.allclose(y, cl_dict['loc_id']))
assert(numpy.allclose(z, numpy.zeros(x.size)))
assert(numpy.allclose(c, cl_dict['loc_id']))
assert(numpy.allclose(cl_dict['frame'], numpy.array([1,1,1,1,3,3,3,3])))
示例15: test_cl_sa_h5py_6
def test_cl_sa_h5py_6():
"""
Test getting all of the tracks for clustering.
"""
tracks = {"category" : numpy.arange(4, dtype = numpy.int32),
"x" : numpy.arange(4, dtype = numpy.float),
"y" : numpy.arange(4, dtype = numpy.float),
"z" : numpy.arange(4, dtype = numpy.float)}
filename = "test_clusters_sa_h5py.hdf5"
h5_name = storm_analysis.getPathOutputTest(filename)
storm_analysis.removeFile(h5_name)
# Write tracks data.
with saH5Py.SAH5Py(h5_name, is_existing = False) as h5:
h5.setMovieInformation(1,1,2,"")
h5.setPixelSize(100.0)
h5.addTracks(tracks)
h5.addTracks(tracks)
# Test getting all the tracking data.
with clSAH5Py.SAH5Clusters(h5_name) as cl_h5:
[x, y, z, c, cl_dict] = cl_h5.getDataForClustering()
assert(numpy.allclose(x, cl_dict['loc_id']))
assert(numpy.allclose(y, cl_dict['loc_id']))
assert(numpy.allclose(z, cl_dict['loc_id']))
assert(numpy.allclose(c, cl_dict['loc_id']))
assert(numpy.allclose(cl_dict['track_id'], numpy.array([0,0,0,0,1,1,1,1])))