本文整理汇总了Python中numpy.ptp方法的典型用法代码示例。如果您正苦于以下问题:Python numpy.ptp方法的具体用法?Python numpy.ptp怎么用?Python numpy.ptp使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类numpy
的用法示例。
在下文中一共展示了numpy.ptp方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: spline_transform_multi
# 需要导入模块: import numpy [as 别名]
# 或者: from numpy import ptp [as 别名]
def spline_transform_multi(img, mask):
bimask=mask>0
M,N=np.where(bimask)
w=np.ptp(N)+1
h=np.ptp(M)+1
kernel=cv2.getStructuringElement(cv2.MORPH_ELLIPSE,(3,3))
bound=cv2.dilate(bimask.astype('uint8'),kernel)-bimask
y,x=np.where(bound>0)
if x.size>4:
newxy=thin_plate_transform(x,y,w,h,mask.shape[:2],num_points=5)
new_img=cv2.remap(img,newxy,None,cv2.INTER_LINEAR)
new_msk=cv2.remap(mask,newxy,None,cv2.INTER_NEAREST)
elif x.size>0:
new_img=img
new_msk=mask
return new_img,new_msk
示例2: __init__
# 需要导入模块: import numpy [as 别名]
# 或者: from numpy import ptp [as 别名]
def __init__(self, parent=None, visible=True, drawmode=gl.GL_LINES, position=(0, 0, 0), **kwargs):
"""Calculates collision by checking if a point is inside a sphere around the mesh vertices."""
# kwargs['scale'] = np.ptp(parent.vertices, axis=0).max() / 2 if 'scale' not in kwargs else kwargs['scale']
# kwargs['']
from .wavefront import WavefrontReader
from .resources import obj_primitives
reader = WavefrontReader(obj_primitives)
body = reader.bodies[self.primitive_shape]
vertices, normals, texcoords = body['v'], body['vn'], body['vt']
super(ColliderBase, self).__init__(arrays=[vertices, normals, texcoords],
drawmode=drawmode, visible=visible, position=position,
**kwargs)
self.uniforms['diffuse'] = 1., 0, 0
# Changes Scenegraph.parent execution order so self.scale can occur in the CollisionChecker parent property.
if parent:
self.parent = parent
示例3: _hist_bin_sqrt
# 需要导入模块: import numpy [as 别名]
# 或者: from numpy import ptp [as 别名]
def _hist_bin_sqrt(x, range):
"""
Square root histogram bin estimator.
Bin width is inversely proportional to the data size. Used by many
programs for its simplicity.
Parameters
----------
x : array_like
Input data that is to be histogrammed, trimmed to range. May not
be empty.
Returns
-------
h : An estimate of the optimal bin width for the given data.
"""
del range # unused
return x.ptp() / np.sqrt(x.size)
示例4: _hist_bin_sturges
# 需要导入模块: import numpy [as 别名]
# 或者: from numpy import ptp [as 别名]
def _hist_bin_sturges(x, range):
"""
Sturges histogram bin estimator.
A very simplistic estimator based on the assumption of normality of
the data. This estimator has poor performance for non-normal data,
which becomes especially obvious for large data sets. The estimate
depends only on size of the data.
Parameters
----------
x : array_like
Input data that is to be histogrammed, trimmed to range. May not
be empty.
Returns
-------
h : An estimate of the optimal bin width for the given data.
"""
del range # unused
return x.ptp() / (np.log2(x.size) + 1.0)
示例5: _hist_bin_rice
# 需要导入模块: import numpy [as 别名]
# 或者: from numpy import ptp [as 别名]
def _hist_bin_rice(x, range):
"""
Rice histogram bin estimator.
Another simple estimator with no normality assumption. It has better
performance for large data than Sturges, but tends to overestimate
the number of bins. The number of bins is proportional to the cube
root of data size (asymptotically optimal). The estimate depends
only on size of the data.
Parameters
----------
x : array_like
Input data that is to be histogrammed, trimmed to range. May not
be empty.
Returns
-------
h : An estimate of the optimal bin width for the given data.
"""
del range # unused
return x.ptp() / (2.0 * x.size ** (1.0 / 3))
示例6: testPtpExecution
# 需要导入模块: import numpy [as 别名]
# 或者: from numpy import ptp [as 别名]
def testPtpExecution(self):
x = arange(4, chunk_size=1).reshape(2, 2)
t = ptp(x, axis=0)
res = self.executor.execute_tensor(t, concat=True)[0]
expected = np.ptp(np.arange(4).reshape(2, 2), axis=0)
np.testing.assert_equal(res, expected)
t = ptp(x, axis=1)
res = self.executor.execute_tensor(t, concat=True)[0]
expected = np.ptp(np.arange(4).reshape(2, 2), axis=1)
np.testing.assert_equal(res, expected)
t = ptp(x)
res = self.executor.execute_tensor(t)[0]
expected = np.ptp(np.arange(4).reshape(2, 2))
np.testing.assert_equal(res, expected)
示例7: overlay_segmentation
# 需要导入模块: import numpy [as 别名]
# 或者: from numpy import ptp [as 别名]
def overlay_segmentation(vol, seg):
# Scale volume to greyscale range
vol_greyscale = (255*(vol - np.min(vol))/np.ptp(vol)).astype(int)
# Convert volume to RGB
vol_rgb = np.stack([vol_greyscale, vol_greyscale, vol_greyscale], axis=-1)
# Initialize segmentation in RGB
shp = seg.shape
seg_rgb = np.zeros((shp[0], shp[1], shp[2], 3), dtype=np.int)
# Set class to appropriate color
seg_rgb[np.equal(seg, 1)] = [255, 0, 0]
seg_rgb[np.equal(seg, 2)] = [0, 0, 255]
# Get binary array for places where an ROI lives
segbin = np.greater(seg, 0)
repeated_segbin = np.stack((segbin, segbin, segbin), axis=-1)
# Weighted sum where there's a value to overlay
alpha = 0.3
vol_overlayed = np.where(
repeated_segbin,
np.round(alpha*seg_rgb+(1-alpha)*vol_rgb).astype(np.uint8),
np.round(vol_rgb).astype(np.uint8)
)
# Return final volume with segmentation overlay
return vol_overlayed
示例8: plot_checkpoint
# 需要导入模块: import numpy [as 别名]
# 或者: from numpy import ptp [as 别名]
def plot_checkpoint(self,e):
filename = "/data/sample_"+str(e)+".png"
noise = self.sample_latent_space(16)
images = self.generator.Generator.predict(noise)
plt.figure(figsize=(10,10))
for i in range(images.shape[0]):
plt.subplot(4, 4, i+1)
if self.C==1:
image = images[i, :, :]
image = np.reshape(image, [self.H,self.W])
image = (255*(image - np.min(image))/np.ptp(image)).astype(int)
plt.imshow(image,cmap='gray')
elif self.C==3:
image = images[i, :, :, :]
image = np.reshape(image, [self.H,self.W,self.C])
image = (255*(image - np.min(image))/np.ptp(image)).astype(int)
plt.imshow(image)
plt.axis('off')
plt.tight_layout()
plt.savefig(filename)
plt.close('all')
return
示例9: scale_factors
# 需要导入模块: import numpy [as 别名]
# 或者: from numpy import ptp [as 别名]
def scale_factors(self):
"""
Factors to rescale the triangulation into a unit square.
Returns *k*, tuple of 2 scale factors.
Returns
-------
k : tuple of 2 floats (kx, ky)
Tuple of floats that would rescale the triangulation :
``[triangulation.x * kx, triangulation.y * ky]``
fits exactly inside a unit square.
"""
compressed_triangles = self._triangulation.get_masked_triangles()
node_used = (np.bincount(np.ravel(compressed_triangles),
minlength=self._triangulation.x.size) != 0)
return (1 / np.ptp(self._triangulation.x[node_used]),
1 / np.ptp(self._triangulation.y[node_used]))
示例10: compute_ptp_amp
# 需要导入模块: import numpy [as 别名]
# 或者: from numpy import ptp [as 别名]
def compute_ptp_amp(data):
"""Peak-to-peak (PTP) amplitude of the data (per channel).
Parameters
----------
data : ndarray, shape (n_channels, n_times)
Returns
-------
output : ndarray, shape (n_channels,)
Notes
-----
Alias of the feature function: **ptp_amp**
"""
return np.ptp(data, axis=-1)
示例11: fit
# 需要导入模块: import numpy [as 别名]
# 或者: from numpy import ptp [as 别名]
def fit(self, X, y=None):
"""Fit it.
Parameters
----------
X : array, shape (n_epochs, n_times)
The data for one channel.
y : None
Redundant. Necessary to be compatible with sklearn
API.
"""
deltas = np.ptp(X, axis=1)
self.deltas_ = deltas
keep = deltas <= self.thresh
# XXX: actually go over all the folds before setting the min
# in skopt. Otherwise, may confuse skopt.
if self.thresh < np.min(np.ptp(X, axis=1)):
assert np.sum(keep) == 0
keep = deltas <= np.min(np.ptp(X, axis=1))
self.mean_ = _slicemean(X, keep, axis=0)
return self
示例12: _vote_bad_epochs
# 需要导入模块: import numpy [as 别名]
# 或者: from numpy import ptp [as 别名]
def _vote_bad_epochs(self, epochs, picks):
"""Each channel votes for an epoch as good or bad.
Parameters
----------
epochs : instance of mne.Epochs
The epochs object for which bad epochs must be found.
picks : array-like
The indices of the channels to consider.
"""
labels = np.zeros((len(epochs), len(epochs.ch_names)))
labels.fill(np.nan)
bad_sensor_counts = np.zeros((len(epochs),))
this_ch_names = [epochs.ch_names[p] for p in picks]
deltas = np.ptp(epochs.get_data()[:, picks], axis=-1).T
threshes = [self.threshes_[ch_name] for ch_name in this_ch_names]
for ch_idx, (delta, thresh) in enumerate(zip(deltas, threshes)):
bad_epochs_idx = np.where(delta > thresh)[0]
labels[:, picks[ch_idx]] = 0
labels[bad_epochs_idx, picks[ch_idx]] = 1
bad_sensor_counts = np.sum(labels == 1, axis=1)
return labels, bad_sensor_counts
示例13: load_image
# 需要导入模块: import numpy [as 别名]
# 或者: from numpy import ptp [as 别名]
def load_image(filename):
if filename in loaded_files:
return loaded_files[filename]
img = imageio.imread(filename, as_gray=True).astype(np.float)
# Normalize the whole image
# img *= 1.0/(img.max() - img.min())
img = (img - np.min(img))/np.ptp(img)
# Normalize on a sigmoid curve to better separate ink from paper
k = 10
img = np.sqrt(1 / (1 + np.exp(k * (img - 0.5))))
loaded_files[filename] = img
return img
# Pull out the image of a single character.
# Each character has multiple images, specify index (0-5) to choose one
示例14: compute_group
# 需要导入模块: import numpy [as 别名]
# 或者: from numpy import ptp [as 别名]
def compute_group(cls, data, scales, **params):
n = len(data)
if n == 0:
return pd.DataFrame()
weight = data.get('weight')
if params['trim']:
range_y = data['y'].min(), data['y'].max()
else:
range_y = scales.y.dimension()
dens = compute_density(data['y'], weight, range_y, **params)
dens['y'] = dens['x']
dens['x'] = np.mean([data['x'].min(), data['x'].max()])
# Compute width if x has multiple values
if len(np.unique(data['x'])) > 1:
dens['width'] = np.ptp(data['x']) * 0.9
return dens
示例15: test_swarmspan
# 需要导入模块: import numpy [as 别名]
# 或者: from numpy import ptp [as 别名]
def test_swarmspan():
print('Testing swarmspans')
base_mean = np.random.randint(10, 101)
seed, ptp, df = create_dummy_dataset(base_mean=base_mean)
print('\nSeed = {}; base mean = {}'.format(seed, base_mean))
for c in df.columns[1:-1]:
print('{}...'.format(c))
f1, swarmplt = plt.subplots(1)
sns.swarmplot(data=df[[df.columns[0], c]], ax=swarmplt)
sns_yspans = []
for coll in swarmplt.collections:
sns_yspans.append(get_swarm_yspans(coll))
f2, b = _api.plot(data=df, idx=(df.columns[0], c))
dabest_yspans = []
for coll in f2.axes[0].collections:
dabest_yspans.append(get_swarm_yspans(coll))
for j, span in enumerate(sns_yspans):
assert span == pytest.approx(dabest_yspans[j])