本文整理汇总了Python中skimage.transform.estimate_transform函数的典型用法代码示例。如果您正苦于以下问题:Python estimate_transform函数的具体用法?Python estimate_transform怎么用?Python estimate_transform使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了estimate_transform函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_estimate_transform
def test_estimate_transform():
for tform in ('euclidean', 'similarity', 'affine', 'projective',
'polynomial'):
estimate_transform(tform, SRC[:2, :], DST[:2, :])
with pytest.raises(ValueError):
estimate_transform('foobar',
SRC[:2, :], DST[:2, :])
示例2: test_deprecated_params_attributes
def test_deprecated_params_attributes():
for t in ('projective', 'affine', 'similarity'):
tform = estimate_transform(t, SRC, DST)
assert_equal(tform._matrix, tform.params)
tform = estimate_transform('polynomial', SRC, DST, order=3)
assert_equal(tform._params, tform.params)
示例3: test_deprecated_params_attributes
def test_deprecated_params_attributes():
for t in ('projective', 'affine', 'similarity'):
tform = estimate_transform(t, SRC, DST)
with expected_warnings(['`_matrix`.*deprecated']):
assert_equal(tform._matrix, tform.params)
tform = estimate_transform('polynomial', SRC, DST, order=3)
with expected_warnings(['`_params`.*deprecated']):
assert_equal(tform._params, tform.params)
示例4: test_projective_estimation
def test_projective_estimation():
# exact solution
tform = estimate_transform('projective', SRC[:4, :], DST[:4, :])
assert_almost_equal(tform(SRC[:4, :]), DST[:4, :])
# over-determined
tform2 = estimate_transform('projective', SRC, DST)
assert_almost_equal(tform2.inverse(tform2(SRC)), SRC)
# via estimate method
tform3 = ProjectiveTransform()
tform3.estimate(SRC, DST)
assert_almost_equal(tform3.params, tform2.params)
示例5: test_affine_estimation
def test_affine_estimation():
# exact solution
tform = estimate_transform('affine', SRC[:3, :], DST[:3, :])
assert_array_almost_equal(tform(SRC[:3, :]), DST[:3, :])
# over-determined
tform2 = estimate_transform('affine', SRC, DST)
assert_array_almost_equal(tform2.inverse(tform2(SRC)), SRC)
# via estimate method
tform3 = AffineTransform()
tform3.estimate(SRC, DST)
assert_array_almost_equal(tform3._matrix, tform2._matrix)
示例6: main
def main(base_dir):
BASE_DIR = base_dir
# Load the set of pictures
ic = io.ImageCollection(BASE_DIR + '*.JPG')
# Select points on the first picture
f, ax = plt.subplots(1,1)
ax.get_xaxis().set_visible(False)
ax.get_yaxis().set_visible(False)
ax.autoscale(enable=True, axis='both', tight=True);
plt.tight_layout(pad=0.4, w_pad=0.0, h_pad=0.0)
ax.imshow(ic[0])
coords = [plt.ginput(8, timeout=0)]
plt.close()
# Load first picture side-by side with second, select points.
# Scroll through images one-by-one
for i, img in enumerate(ic[1:]):
ax1 = plt.subplot2grid((6,10),(0,1), rowspan=6, colspan=9)
ax0 = plt.subplot2grid((6,10),(0,0))
for ax in [ax0, ax1]:
ax.get_xaxis().set_visible(False)
ax.get_yaxis().set_visible(False)
plt.tight_layout(pad=0.4, w_pad=0.0, h_pad=0.0)
#f, (ax0,ax1) = plt.subplots(1,2)
ax0.imshow(ic[i])
for coord in coords[i]:
ax0.scatter(coord[0],coord[1])
ax1.imshow(img)
coords.append(plt.ginput(8, timeout=0))
plt.close()
# Use a similarity transformation to transform each one.
if not os.path.exists(BASE_DIR + 'corrected'):
os.mkdir(BASE_DIR + 'corrected')
np.save(BASE_DIR + 'corrected/coords.npy', coords)
io.imsave(BASE_DIR + 'corrected/0.jpg', ic[0])
for i, img in enumerate(ic[1:]):
tf = transform.estimate_transform('similarity', np.array(coords[0]), np.array(coords[i+1]))
# Use a translation transformation to center both images for display purposes
img_warped = transform.warp(img, inverse_map=tf,
output_shape=(1728,3072))
print BASE_DIR + 'corrected/%d.jpg' %(i+1)
print img_warped
io.imsave(BASE_DIR + 'corrected/%d.jpg' %(i+1), img_warped)
示例7: estimate_coordinate_transform
def estimate_coordinate_transform(source, target, method, **method_kwargs):
"""Calculates a transformation from a source list of coordinates to a
target list of coordinates.
Parameters
----------
source : Nx2 array
(x, y) coordinate pairs from source image.
target : Nx2 array
(x, y) coordinate pairs from target image. Must be same shape as
'source'.
method : string, optional
Method to use for transform estimation.
**method_kwargs : optional
Additional arguments can be passed in specific to the particular
method. For example, 'order' for a polynomial transform estimation.
Returns
-------
transform : skimage.transform._geometric.GeometricTransform
An skimage transform object.
See Also
--------
skimage.transform.estimate_transform
"""
return tf.estimate_transform(method, source, target, **method_kwargs)
示例8: gen_data
def gen_data(name):
reftracker = scio.loadmat('data/images_tracker.00047.mat')['tracker']
desttracker = scio.loadmat('data/images_tracker/'+name+'.mat')['tracker']
refpos = np.floor(np.mean(reftracker, 0))
xxc, yyc = np.meshgrid(np.arange(1, 1801, dtype=np.int), np.arange(1, 2001, dtype=np.int))
#normalize x and y channels
xxc = (xxc - 600 - refpos[0]) * 1.0 / 600
yyc = (yyc - 600 - refpos[1]) * 1.0 / 600
maskimg = Image.open('data/meanmask.png')
maskc = np.array(maskimg, dtype=np.float)
maskc = np.pad(maskc, (600, 600), 'minimum')
# warp is an inverse transform, and so src and dst must be reversed here
tform = transform.estimate_transform('affine', desttracker + 600, reftracker + 600)
img_data = skio.imread('data/images_data/'+name+'.jpg')
# save org mat
warpedxx = transform.warp(xxc, tform, output_shape=xxc.shape)
warpedyy = transform.warp(yyc, tform, output_shape=xxc.shape)
warpedmask = transform.warp(maskc, tform, output_shape=xxc.shape)
warpedxx = warpedxx[600:1400, 600:1200, :]
warpedyy = warpedyy[600:1400, 600:1200, :]
warpedmask = warpedmask[600:1400, 600:1200, :]
img_h, img_w, _ = img_data.shape
mat = np.zeros((img_h, img_w, 6), dtype=np.float)
mat[:, :, 0] = (img_data[2] * 1.0 - 104.008) / 255
mat[:, :, 1] = (img_data[1] * 1.0 - 116.669) / 255
mat[:, :, 2] = (img_data[0] * 1.0 - 122.675) / 255
scio.savemat('portraitFCN_data/' + name + '.mat', {'img':mat})
mat_plus = np.zeros((img_h, img_w, 6), dtype=np.float)
mat_plus[:, :, 0:3] = mat
mat_plus[:, :, 3] = warpedxx
mat_plus[:, :, 4] = warpedyy
mat_plus[:, :, 5] = warpedmask
示例9: extract_sift
def extract_sift(image,lm=None, shape=[200,300], fix_points='outer',ttype='affine'):
if lm==None:lm = landmarks(image)
if np.any(np.isnan(lm)):
return np.nan*np.ones([out_shape,out_shape,image.shape[2]]).astype(np.float16), np.nan*np.zeros_like(lm)
dst = mean_face[:,p[fix_points]]
dst = dst-dst.mean(1)[:,None]
dst = dst/np.abs(dst).max()
dst *=shape[0]/2
dst +=shape[1]/2
print(dst.min())
print(dst.max())
src = lm[:,p[fix_points]]
tform = transform.estimate_transform(ttype, src.T,dst.T)
lm_reg = tform(lm.T).T
image = transform.warp(image,inverse_map=tform.inverse,output_shape=[shape[1],shape[1]])
image = exposure.equalize_hist(image,mask=image!=0)
S = 12
for l1,l2 in lm_reg.T:
x = np.arange(l2-S,l2+S)
y = np.arange(l1-S,l1+S)
for x_ in x:
for y_ in y:
image[x_,y_,0]=255
return image, lm_reg
示例10: infer
def infer(edge_image, edge_lengths, mu, phi, sigma2,
update_slice=slice(None),
scale_estimate=None,
rotation=0,
translation=(0, 0)):
# edge_points = np.array(np.where(edge_image)).T
# edge_points[:, [0, 1]] = edge_points[:, [1, 0]]
# edge_score = edge_image.shape[0] * np.exp(-edge_lengths[edge_image] / (0.25 * edge_image.shape[0])).reshape(-1, 1)
# edge_points = np.concatenate((edge_points, edge_score), axis=1)
#
# edge_nn = NearestNeighbors(n_neighbors=1).fit(edge_points)
edge_near = scipy.ndimage.distance_transform_edt(~edge_image)
edge_near_blur = gaussian(edge_near, 2)
Gy, Gx = np.gradient(edge_near_blur)
mag = np.sqrt(np.power(Gy, 2) + np.power(Gx, 2))
if scale_estimate is None:
scale_estimate = min(edge_image.shape) * 4
mu = (mu.reshape(-1, 2) - mu.reshape(-1, 2).mean(axis=0)).reshape(-1, 1)
average_distance = np.sqrt(np.power(mu.reshape(-1, 2), 2).sum(axis=1)).mean()
scale_estimate /= average_distance * np.sqrt(2)
h = np.zeros((phi.shape[1], 1))
psi = SimilarityTransform(scale=scale_estimate, rotation=rotation, translation=translation)
while True:
w = (mu + phi @ h).reshape(-1, 2)
image_points = matrix_transform(w, psi.params)[update_slice, :]
image_points = np.concatenate((image_points, np.zeros((image_points.shape[0], 1))), axis=1)
# closest_edge_point_indices = edge_nn.kneighbors(image_points)[1].flatten()
# closest_edge_points = edge_points[closest_edge_point_indices, :2]
closest_edge_points = gradient_step(Gy, Gx, mag, image_points)
w = mu.reshape(-1, 2)
psi = estimate_transform('similarity', w[update_slice, :], closest_edge_points)
image_points = matrix_transform(w, psi.params)[update_slice, :]
image_points = np.concatenate((image_points, np.zeros((image_points.shape[0], 1))), axis=1)
# closest_edge_point_indices = edge_nn.kneighbors(image_points)[1].flatten()
# closest_edge_points = edge_points[closest_edge_point_indices, :2]
closest_edge_points = gradient_step(Gy, Gx, mag, image_points)
mu_slice = mu.reshape(-1, 2)[update_slice, :].reshape(-1, 1)
K = phi.shape[-1]
phi_full = phi.reshape(-1, 2, K)
phi_slice = phi_full[update_slice, :].reshape(-1, K)
h = update_h(sigma2, phi_slice, closest_edge_points, mu_slice, psi)
w = (mu + phi @ h).reshape(-1, 2)
image_points = matrix_transform(w, psi.params)
update_slice = yield image_points, closest_edge_points
示例11: projective
def projective(reference, points, bone, properties_to_transform):
"""
Estimates a projective transform
"""
tform = tf.estimate_transform('projective', points, reference)
transformed = list(map(tform, [ bone[p] for p in properties_to_transform ]))
error = get_error(points, reference, tform)
return transformed, error
示例12: test_similarity_estimation
def test_similarity_estimation():
# exact solution
tform = estimate_transform('similarity', SRC[:2, :], DST[:2, :])
assert_array_almost_equal(tform(SRC[:2, :]), DST[:2, :])
assert_equal(tform._matrix[0, 0], tform._matrix[1, 1])
assert_equal(tform._matrix[0, 1], - tform._matrix[1, 0])
# over-determined
tform2 = estimate_transform('similarity', SRC, DST)
assert_array_almost_equal(tform2.inverse(tform2(SRC)), SRC)
assert_equal(tform2._matrix[0, 0], tform2._matrix[1, 1])
assert_equal(tform2._matrix[0, 1], - tform2._matrix[1, 0])
# via estimate method
tform3 = SimilarityTransform()
tform3.estimate(SRC, DST)
assert_array_almost_equal(tform3._matrix, tform2._matrix)
示例13: test_euclidean_estimation
def test_euclidean_estimation():
# exact solution
tform = estimate_transform('euclidean', SRC[:2, :], SRC[:2, :] + 10)
assert_almost_equal(tform(SRC[:2, :]), SRC[:2, :] + 10)
assert_almost_equal(tform.params[0, 0], tform.params[1, 1])
assert_almost_equal(tform.params[0, 1], - tform.params[1, 0])
# over-determined
tform2 = estimate_transform('euclidean', SRC, DST)
assert_almost_equal(tform2.inverse(tform2(SRC)), SRC)
assert_almost_equal(tform2.params[0, 0], tform2.params[1, 1])
assert_almost_equal(tform2.params[0, 1], - tform2.params[1, 0])
# via estimate method
tform3 = EuclideanTransform()
tform3.estimate(SRC, DST)
assert_almost_equal(tform3.params, tform2.params)
示例14: similarity
def similarity(reference, points, bone, properties_to_transform):
"""
Estimates a similarity transform
"""
tform = tf.estimate_transform('similarity', points, reference)
transformed = list(map(tform, [ bone[p] for p in properties_to_transform ]))
error = get_error(points, reference, tform)
return transformed, error
示例15: test_polynomial_estimation
def test_polynomial_estimation():
# over-determined
tform = estimate_transform('polynomial', SRC, DST, order=10)
assert_array_almost_equal(tform(SRC), DST, 6)
# via estimate method
tform2 = PolynomialTransform()
tform2.estimate(SRC, DST, order=10)
assert_array_almost_equal(tform2._params, tform._params)