本文整理汇总了Python中future_builtins.zip方法的典型用法代码示例。如果您正苦于以下问题:Python future_builtins.zip方法的具体用法?Python future_builtins.zip怎么用?Python future_builtins.zip使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类future_builtins
的用法示例。
在下文中一共展示了future_builtins.zip方法的12个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_traces_raw
# 需要导入模块: import future_builtins [as 别名]
# 或者: from future_builtins import zip [as 别名]
def test_traces_raw(openfn, kwargs):
with openfn(**kwargs) as f:
gen_traces = np.array(list(map(np.copy, f.trace)), dtype=np.single)
raw_traces = f.trace.raw[:]
assert np.array_equal(gen_traces, raw_traces)
assert len(gen_traces) == f.tracecount
assert len(raw_traces) == f.tracecount
assert gen_traces[0][49] == raw_traces[0][49]
assert gen_traces[1][49] == f.trace.raw[1][49]
assert gen_traces[2][49] == raw_traces[2][49]
assert np.array_equal(f.trace[10], f.trace.raw[10])
for raw, gen in zip(f.trace.raw[::2], f.trace[::2]):
assert np.array_equal(raw, gen)
for raw, gen in zip(f.trace.raw[::-1], f.trace[::-1]):
assert np.array_equal(raw, gen)
示例2: test_group_single_key
# 需要导入模块: import future_builtins [as 别名]
# 或者: from future_builtins import zip [as 别名]
def test_group_single_key():
with segyio.open(testdata / 'shot-gather.sgy', ignore_geometry = True) as f:
group = f.group(segyio.su.fldr)
assert len(group) == 4
assert 2 in group
assert 4 not in group
expected_keys = [2, 3, 5, 8]
for key, shot in zip(expected_keys, group.values()):
assert key == shot.key
for header in shot.header:
assert key == header[segyio.su.fldr]
for trace in shot.trace:
assert key == trace[0]
示例3: izip_records
# 需要导入模块: import future_builtins [as 别名]
# 或者: from future_builtins import zip [as 别名]
def izip_records(seqarrays, fill_value=None, flatten=True):
"""
Returns an iterator of concatenated items from a sequence of arrays.
Parameters
----------
seqarrays : sequence of arrays
Sequence of arrays.
fill_value : {None, integer}
Value used to pad shorter iterables.
flatten : {True, False},
Whether to
"""
# OK, that's a complete ripoff from Python2.6 itertools.izip_longest
def sentinel(counter=([fill_value] * (len(seqarrays) - 1)).pop):
"Yields the fill_value or raises IndexError"
yield counter()
#
fillers = itertools.repeat(fill_value)
iters = [itertools.chain(it, sentinel(), fillers) for it in seqarrays]
# Should we flatten the items, or just use a nested approach
if flatten:
zipfunc = _izip_fields_flat
else:
zipfunc = _izip_fields
#
try:
for tup in zip(*iters):
yield tuple(zipfunc(tup))
except IndexError:
pass
示例4: test_itertools
# 需要导入模块: import future_builtins [as 别名]
# 或者: from future_builtins import zip [as 别名]
def test_itertools(self):
from itertools import imap, izip, ifilter
# We will assume that the itertools functions work, so provided
# that we've got identical coppies, we will work!
self.assertEqual(map, imap)
self.assertEqual(zip, izip)
self.assertEqual(filter, ifilter)
# Testing that filter(None, stuff) raises a warning lives in
# test_py3kwarn.py
示例5: izip_records
# 需要导入模块: import future_builtins [as 别名]
# 或者: from future_builtins import zip [as 别名]
def izip_records(seqarrays, fill_value=None, flatten=True):
"""
Returns an iterator of concatenated items from a sequence of arrays.
Parameters
----------
seqarray : sequence of arrays
Sequence of arrays.
fill_value : {None, integer}
Value used to pad shorter iterables.
flatten : {True, False},
Whether to
"""
# OK, that's a complete ripoff from Python2.6 itertools.izip_longest
def sentinel(counter=([fill_value] * (len(seqarrays) - 1)).pop):
"Yields the fill_value or raises IndexError"
yield counter()
#
fillers = itertools.repeat(fill_value)
iters = [itertools.chain(it, sentinel(), fillers) for it in seqarrays]
# Should we flatten the items, or just use a nested approach
if flatten:
zipfunc = _izip_fields_flat
else:
zipfunc = _izip_fields
#
try:
for tup in zip(*iters):
yield tuple(zipfunc(tup))
except IndexError:
pass
示例6: apply_all_cells
# 需要导入模块: import future_builtins [as 别名]
# 或者: from future_builtins import zip [as 别名]
def apply_all_cells(F, n_jobs=-1, disp=1, *fnn_args, **fnn_kwargs):
"""
Run FNN deconvolution on multiple cells in parallel
Arguments:
-----------------------------------------------------------------------
F: ndarray, [nc, nt] or [nc, npix, nt]
measured fluorescence values
n_jobs: int scalar
number of jobs to process in parallel. if n_jobs == -1, all cores
are used.
*fnn_args, **fnn_kwargs
additional arguments to pass to deconvolve()
Returns:
-----------------------------------------------------------------------
n_hat_best: ndarray, [nc, nt]
MAP estimate of the most likely spike train
c_hat_best: ndarray, [nc, nt]
estimated intracellular calcium concentration (A.U.)
LL: ndarray, [nc,]
posterior log-likelihood of F given n_hat_best and theta_best
theta_best: tuple, [nc,]
model parameters, updated according to learn_theta
"""
pool = Parallel(n_jobs=n_jobs, verbose=disp, pre_dispatch='n_jobs * 2')
results = pool(delayed(deconvolve)
(rr, *fnn_args, **fnn_kwargs) for rr in F)
n_hat, c_hat, LL, theta = zip(*results)
n_hat, c_hat, LL = (np.vstack(a) for a in (n_hat, c_hat, LL))
return n_hat, c_hat, LL, theta
示例7: test_fast_slow_dimensions
# 需要导入模块: import future_builtins [as 别名]
# 或者: from future_builtins import zip [as 别名]
def test_fast_slow_dimensions(openfn, kwargs):
with openfn(**kwargs) as f:
for iline, fline in zip(f.iline, f.fast):
assert np.array_equal(iline, fline)
for xline, sline in zip(f.xline, f.slow):
assert np.array_equal(xline, sline)
示例8: test_create_sgy
# 需要导入模块: import future_builtins [as 别名]
# 或者: from future_builtins import zip [as 别名]
def test_create_sgy(small):
orig = str(small.dirname + '/small.sgy')
fresh = str(small.dirname + '/fresh.sgy')
with segyio.open(orig) as src:
spec = segyio.spec()
spec.format = int(src.format)
spec.sorting = int(src.sorting)
spec.samples = src.samples
spec.ilines = src.ilines
spec.xlines = src.xlines
with segyio.create(fresh, spec) as dst:
dst.text[0] = src.text[0]
dst.bin = src.bin
# copy all headers
dst.header = src.header
for i, srctr in enumerate(src.trace):
dst.trace[i] = srctr
dst.trace = src.trace
# this doesn't work yet, some restructuring is necessary
# if it turns out to be a desired feature it's rather easy to do
# for dsth, srch in zip(dst.header, src.header):
# dsth = srch
# for dsttr, srctr in zip(dst.trace, src.trace):
# dsttr = srctr
assert filecmp.cmp(orig, fresh)
示例9: test_ref_sliced
# 需要导入模块: import future_builtins [as 别名]
# 或者: from future_builtins import zip [as 别名]
def test_ref_sliced(small):
with segyio.open(small, mode = 'r+') as f:
expected = f.trace.raw[:]
expected[10:15] += expected[:5]
with f.trace.ref as ref:
for x, y in zip(ref[10:15], f.trace[:]):
np.copyto(x, x + y)
with segyio.open(small) as f:
npt.assert_array_almost_equal(expected, f.trace.raw[:])
示例10: test_gather_mode
# 需要导入模块: import future_builtins [as 别名]
# 或者: from future_builtins import zip [as 别名]
def test_gather_mode():
with segyio.open(testdata / 'small-ps.sgy') as f:
empty = np.empty(0, dtype=np.single)
# should raise
with pytest.raises(KeyError):
assert np.array_equal(empty, f.gather[2, 3, 3])
with pytest.raises(KeyError):
assert np.array_equal(empty, f.gather[2, 5, 1])
with pytest.raises(KeyError):
assert np.array_equal(empty, f.gather[5, 2, 1])
assert np.array_equal(f.trace[10], f.gather[2, 3, 1])
assert np.array_equal(f.trace[11], f.gather[2, 3, 2])
traces = segyio.tools.collect(f.trace[10:12])
gather = f.gather[2, 3, :]
assert np.array_equal(traces, gather)
assert np.array_equal(traces, f.gather[2, 3])
assert np.array_equal(empty, f.gather[2, 3, 1:0])
assert np.array_equal(empty, f.gather[2, 3, 3:4])
for g, line in zip(f.gather[1:3, 3, 1], f.iline[1:3]):
assert 10 == len(g)
assert (10,) == g.shape
assert np.array_equal(line[2], g)
for g, line in zip(f.gather[1:3, 3, :], f.iline[1:3]):
assert 2 == len(g)
assert (2, 10) == g.shape
assert np.array_equal(line[2], g[0])
for g, line in zip(f.gather[:, 2, 1], f.iline[:]):
assert 10 == len(g)
assert (10,) == g.shape
assert np.array_equal(line[1], g)
for g, line in zip(f.gather[1, :, 1], f.xline[:]):
assert 10 == len(g)
assert (10,) == g.shape
assert np.array_equal(line[0], g)
# overshoot the inlines, should still work
for g, line in zip(f.gather[3:10, 3, 1], f.iline[3:]):
assert 10 == len(g)
assert (10,) == g.shape
print(g)
print(line[0])
assert np.array_equal(line[2], g)
for g, line in zip(f.gather[1, 1:3, :], f.xline[1:3]):
assert 2 == len(g)
assert (2, 10) == g.shape
assert np.array_equal(line[0], g[0])
for g, line in zip(f.gather[1, 1:3, 3:4], f.xline[1:3]):
assert np.array_equal(empty, g)
示例11: test_create_from_naught_prestack
# 需要导入模块: import future_builtins [as 别名]
# 或者: from future_builtins import zip [as 别名]
def test_create_from_naught_prestack(endian, tmpdir):
spec = segyio.spec()
spec.format = 5
#spec.sorting not set by test design
spec.samples = range(7)
spec.ilines = range(1, 4)
spec.xlines = range(1, 3)
spec.offsets = range(1, 6)
spec.endian = endian
with segyio.create(tmpdir / "mk-ps.sgy", spec) as dst:
arr = np.arange(start=0.000,
stop=0.007,
step=0.001,
dtype=np.single)
arr = np.concatenate([[arr + 0.01], [arr + 0.02]], axis=0)
lines = [arr + i for i in spec.ilines]
cube = [(off * 100) + line for line in lines for off in spec.offsets]
dst.iline[:, :] = cube
for of in spec.offsets:
for il in spec.ilines:
dst.header.iline[il, of] = {TraceField.INLINE_3D: il,
TraceField.offset: of
}
for xl in spec.xlines:
dst.header.xline[xl, of] = {TraceField.CROSSLINE_3D: xl}
with segyio.open(tmpdir / "mk-ps.sgy", endian = endian) as f:
assert 101.010 == approx(f.trace[0][0], abs=1e-4)
assert 101.011 == approx(f.trace[0][1], abs=1e-4)
assert 101.016 == approx(f.trace[0][-1], abs=1e-4)
assert 503.025 == approx(f.trace[-1][5], abs=1e-4)
assert f.header[0][TraceField.offset] != f.header[1][TraceField.offset]
assert 1 == f.header[0][TraceField.offset]
assert 2 == f.header[1][TraceField.offset]
for x, y in zip(f.iline[:, :], cube):
assert list(x.flatten()) == list(y.flatten())
assert f.sorting == TraceSortingFormat.INLINE_SORTING
示例12: test_group_multi_key_corret_index
# 需要导入模块: import future_builtins [as 别名]
# 或者: from future_builtins import zip [as 别名]
def test_group_multi_key_corret_index():
with segyio.open(testdata / 'shot-gather.sgy', ignore_geometry = True) as f:
group = f.group((segyio.su.fldr, segyio.su.grnofr))
assert len(group) == 8
expected_keys = [
((segyio.su.fldr, 2), (segyio.su.grnofr, 1)),
((segyio.su.fldr, 2), (segyio.su.grnofr, 2)),
((segyio.su.fldr, 3), (segyio.su.grnofr, 1)),
((segyio.su.fldr, 3), (segyio.su.grnofr, 2)),
((segyio.su.fldr, 5), (segyio.su.grnofr, 1)),
((segyio.su.fldr, 5), (segyio.su.grnofr, 2)),
((segyio.su.fldr, 8), (segyio.su.grnofr, 1)),
((segyio.su.fldr, 8), (segyio.su.grnofr, 2)),
]
expected_keys = [frozenset(k) for k in expected_keys]
indices = [
# fldr = 2, grnofr = 1
[0, 2, 4, 6, 8],
# fldr = 2, grnofr = 2
[1, 3, 5, 7, 9],
# fldr = 3, grnofr = 1
[10, 12, 14, 16, 18, 20],
# fldr = 3, grnofr = 2
[11, 13, 15, 17, 19, 21],
# fldr = 5, grnofr = 1
[22, 24, 26, 28, 30, 32, 34],
# fldr = 5, grnofr = 2
[23, 25, 27, 29, 31, 33],
# fldr = 8, grnofr = 1
[35, 37, 39, 41, 43, 45, 47, 49, 51, 53, 55, 57, 59],
# fldr = 8, grnofr = 2
[36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60],
]
# this test checks that every individual group maps to the known index
# it's supposed to model. it would be more elegant to represent the
# indices as a key -> index dict, but that makes verifying that every
# key/index pair is covered uglier
assert len(expected_keys) == len(indices)
for key, index, shot in zip(expected_keys, indices, group.values()):
assert index == shot.index
assert key == shot.key