本文整理匯總了Python中numpy.transpose方法的典型用法代碼示例。如果您正苦於以下問題:Python numpy.transpose方法的具體用法?Python numpy.transpose怎麽用?Python numpy.transpose使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類numpy
的用法示例。
在下文中一共展示了numpy.transpose方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: similarity_label
# 需要導入模塊: import numpy [as 別名]
# 或者: from numpy import transpose [as 別名]
def similarity_label(self, words, normalization=True):
"""
you can calculate more than one word at the same time.
"""
if self.model==None:
raise Exception('no model.')
if isinstance(words, string_types):
words=[words]
vectors=np.transpose(self.model.wv.__getitem__(words))
if normalization:
unit_vector=unitvec(vectors,ax=0) # 這樣寫比原來那樣速度提升一倍
#unit_vector=np.zeros((len(vectors),len(words)))
#for i in range(len(words)):
# unit_vector[:,i]=matutils.unitvec(vectors[:,i])
dists=np.dot(self.Label_vec_u, unit_vector)
else:
dists=np.dot(self.Label_vec, vectors)
return dists
示例2: train_lr_rfeinman
# 需要導入模塊: import numpy [as 別名]
# 或者: from numpy import transpose [as 別名]
def train_lr_rfeinman(densities_pos, densities_neg, uncerts_pos, uncerts_neg):
"""
TODO
:param densities_pos:
:param densities_neg:
:param uncerts_pos:
:param uncerts_neg:
:return:
"""
values_neg = np.concatenate(
(densities_neg.reshape((1, -1)),
uncerts_neg.reshape((1, -1))),
axis=0).transpose([1, 0])
values_pos = np.concatenate(
(densities_pos.reshape((1, -1)),
uncerts_pos.reshape((1, -1))),
axis=0).transpose([1, 0])
values = np.concatenate((values_neg, values_pos))
labels = np.concatenate(
(np.zeros_like(densities_neg), np.ones_like(densities_pos)))
lr = LogisticRegressionCV(n_jobs=-1).fit(values, labels)
return values, labels, lr
示例3: train
# 需要導入模塊: import numpy [as 別名]
# 或者: from numpy import transpose [as 別名]
def train(self, inputs_list, targets_list):
inputs = np.array(inputs_list, ndmin=2).T
targets = np.array(targets_list, ndmin=2).T
hidden_inputs = np.dot(self.wih, inputs)
hidden_outputs = self.activation_function(hidden_inputs)
final_inputs = np.dot(self.who, hidden_outputs)
final_outputs = self.activation_function(final_inputs)
output_errors = targets - final_outputs
hidden_errors = np.dot(self.who.T, output_errors)
self.who += self.lr * np.dot((output_errors *
final_outputs *
(1.0 - final_outputs)), np.transpose(hidden_outputs))
self.wih += self.lr * np.dot((hidden_errors *
hidden_outputs *
(1.0 - hidden_outputs)), np.transpose(inputs))
pass
# query
示例4: jacobian
# 需要導入模塊: import numpy [as 別名]
# 或者: from numpy import transpose [as 別名]
def jacobian(self, p, into=None):
# transpose to be 3 x 2 x n
p = np.transpose(np.reshape(p, (-1, 3, 2)), (1,2,0))
# First, get the two legs...
(dx_ab, dy_ab) = p[1] - p[0]
(dx_ac, dy_ac) = p[2] - p[0]
(dx_bc, dy_bc) = p[2] - p[1]
# now, the area is half the z-value of the cross-product...
sarea0 = 0.5 * (dx_ab*dy_ac - dx_ac*dy_ab)
# but we want to abs it
dsarea0 = np.sign(sarea0)
z = np.transpose([[-dy_bc,dx_bc], [dy_ac,-dx_ac], [-dy_ab,dx_ab]], (2,0,1))
z = times(0.5*dsarea0, z)
m = numel(p)
n = p.shape[2]
ii = (np.arange(n) * np.ones([6, n])).T.flatten()
z = sps.csr_matrix((z.flatten(), (ii, np.arange(len(ii)))), shape=(n, m))
return safe_into(into, z)
示例5: from_logeccen
# 需要導入模塊: import numpy [as 別名]
# 或者: from numpy import transpose [as 別名]
def from_logeccen(logecc, vmin=0, vmax=90, offset=0.75):
'''
from_logeccen(logecc) yields a rescaled linear-space version of the log-eccentricity value (or
values) logecc.
from_logeccen(logxy_matrix) rescales all the (x,y) points in the given matrix to have
linearly-spaced eccentricity values.
from_logeccen is the inverse of to_logeccen.
'''
if pimms.is_matrix(logecc):
xy = np.asarray(logecc)
trq = xy.shape[0] != 2
xy = np.transpose(xy) if trq else np.asarray(xy)
r = np.sqrt(np.sum(xy**2, axis=0))
esc = from_logeccen(r, vmin=vmin, vmax=vmax, offset=offset)
ecc = zinv(r)
xy = xy * [ecc,ecc] * [esc,esc]
return xy.T if trq else xy
else:
logecc = np.asarray(logecc)
(vmin,vmax,offset) = [np.asarray(u) for u in (vmin,vmax,offset)]
(vmin, vmax) = [np.log(u + offset) for u in (vmin, vmax)]
logecc = logecc*(vmax - vmin) + vmin
return np.exp(logecc) - offset
示例6: angle_to_cortex
# 需要導入模塊: import numpy [as 別名]
# 或者: from numpy import transpose [as 別名]
def angle_to_cortex(self, theta, rho):
'See help(neuropythy.registration.RetinotopyModel.angle_to_cortex).'
#TODO: This should be made to work correctly with visual area boundaries: this could be done
# by, for each area (e.g., V2) looking at its boundaries (with V1 and V3) and flipping the
# adjacent triangles so that there is complete coverage of each hemifield, guaranteed.
if not pimms.is_vector(theta): return self.angle_to_cortex([theta], [rho])[0]
theta = np.asarray(theta)
rho = np.asarray(rho)
zs = np.asarray(
rho * np.exp([np.complex(z) for z in 1j * ((90.0 - theta)/180.0*np.pi)]),
dtype=np.complex)
coords = np.asarray([zs.real, zs.imag]).T
if coords.shape[0] == 0: return np.zeros((0, len(self.visual_meshes), 2))
# we step through each area in the forward model and return the appropriate values
tx = self.transform
res = np.transpose(
[self.visual_meshes[area].interpolate(coords, 'cortical_coordinates', method='linear')
for area in sorted(self.visual_meshes.keys())],
(1,0,2))
if tx is not None:
res = np.asarray(
[np.dot(tx, np.vstack((area_xy.T, np.ones(len(area_xy)))))[0:2].T
for area_xy in res])
return res
示例7: __call__
# 需要導入模塊: import numpy [as 別名]
# 或者: from numpy import transpose [as 別名]
def __call__(self, x, y=None):
if y is not None: x = (x,y)
x = np.asarray(x)
if len(x.shape) == 1: return self([x])[0]
x = np.transpose(x) if x.shape[0] == 2 else x
if not x.flags['WRITEABLE']: x = np.array(x)
crd = self.coordinates
sig = self.sigma
wts = self._weight
res = np.zeros(x.shape[0])
for (sh, qd, bi) in zip(self.spatial_hashes, self.bin_query_distances, self.sigma_bins):
neis = sh.query_ball_point(x, qd)
res += [
np.sum(w * np.exp(-0.5 * d2/s**2))
for (ni,pt) in zip(neis,x)
for ii in [bi[ni]]
for (w,s,d2) in [(wts[ii], sig[ii], np.sum((crd[ii] - pt)**2, axis=1))]]
return res
示例8: visualize_sampling
# 需要導入模塊: import numpy [as 別名]
# 或者: from numpy import transpose [as 別名]
def visualize_sampling(self,permutations):
max_length = len(permutations[0])
grid = np.zeros([max_length,max_length]) # initialize heatmap grid to 0
transposed_permutations = np.transpose(permutations)
for t, cities_t in enumerate(transposed_permutations): # step t, cities chosen at step t
city_indices, counts = np.unique(cities_t,return_counts=True,axis=0)
for u,v in zip(city_indices, counts):
grid[t][u]+=v # update grid with counts from the batch of permutations
# plot heatmap
fig = plt.figure()
rcParams.update({'font.size': 22})
ax = fig.add_subplot(1,1,1)
ax.set_aspect('equal')
plt.imshow(grid, interpolation='nearest', cmap='gray')
plt.colorbar()
plt.title('Sampled permutations')
plt.ylabel('Time t')
plt.xlabel('City i')
plt.show()
# Heatmap of attention (x=cities; y=steps)
示例9: visualize_sampling
# 需要導入模塊: import numpy [as 別名]
# 或者: from numpy import transpose [as 別名]
def visualize_sampling(self, permutations):
max_length = len(permutations[0])
grid = np.zeros([max_length,max_length]) # initialize heatmap grid to 0
transposed_permutations = np.transpose(permutations)
for t, cities_t in enumerate(transposed_permutations): # step t, cities chosen at step t
city_indices, counts = np.unique(cities_t,return_counts=True,axis=0)
for u,v in zip(city_indices, counts):
grid[t][u]+=v # update grid with counts from the batch of permutations
# plot heatmap
fig = plt.figure()
rcParams.update({'font.size': 22})
ax = fig.add_subplot(1,1,1)
ax.set_aspect('equal')
plt.imshow(grid, interpolation='nearest', cmap='gray')
plt.colorbar()
plt.title('Sampled permutations')
plt.ylabel('Time t')
plt.xlabel('City i')
plt.show()
示例10: superpose_array
# 需要導入模塊: import numpy [as 別名]
# 或者: from numpy import transpose [as 別名]
def superpose_array(refArray, array, check=False):
"""
Superpose arrays by calculating the rotation matrix and the
translations that minimize the root mean square deviation between and
array of vectors and a reference array.
:Parameters:
#. refArray (numpy.ndarray): the NX3 reference array to superpose to.
#. array (numpy.ndarray): the NX3 array to calculate the
transformation of.
#. check (boolean): whether to check arguments before generating
points.
:Returns:
#. superposedArray (numpy.ndarray): the NX3 array to superposed array.
"""
rotationMatrix, _,_,_ = get_superposition_transformation(refArray=refArray, array=array, check=check)
return np.dot( rotationMatrix, np.transpose(array).\
reshape(1,3,-1)).transpose().reshape(-1,3)
示例11: measure_cost
# 需要導入模塊: import numpy [as 別名]
# 或者: from numpy import transpose [as 別名]
def measure_cost(repeat, scipy_trans_lhs, scipy_dns_lhs, func_name, *args, **kwargs):
"""Measure time cost of running a function
"""
mx.nd.waitall()
args_list = []
for arg in args:
args_list.append(arg)
start = time.time()
if scipy_trans_lhs:
args_list[0] = np.transpose(args_list[0]) if scipy_dns_lhs else sp.spmatrix.transpose(args_list[0])
for _ in range(repeat):
func_name(*args_list, **kwargs)
mx.nd.waitall()
end = time.time()
diff = end - start
return diff / repeat
示例12: decode_topk
# 需要導入模塊: import numpy [as 別名]
# 或者: from numpy import transpose [as 別名]
def decode_topk(self, sess, latest_tokens, enc_top_states, dec_init_states):
"""Return the topK results and new decoder states."""
feed = {
self._enc_top_states: enc_top_states,
self._dec_in_state:
np.squeeze(np.array(dec_init_states)),
self._abstracts:
np.transpose(np.array([latest_tokens])),
self._abstract_lens: np.ones([len(dec_init_states)], np.int32)}
results = sess.run(
[self._topk_ids, self._topk_log_probs, self._dec_out_state],
feed_dict=feed)
ids, probs, states = results[0], results[1], results[2]
new_states = [s for s in states]
return ids, probs, new_states
示例13: _write_map_files
# 需要導入模塊: import numpy [as 別名]
# 或者: from numpy import transpose [as 別名]
def _write_map_files(b_in, b_out, transform):
cats = get_categories()
env = utils.Foo(padding=10, resolution=5, num_point_threshold=2,
valid_min=-10, valid_max=200, n_samples_per_face=200)
robot = utils.Foo(radius=15, base=10, height=140, sensor_height=120,
camera_elevation_degree=-15)
building_loader = factory.get_dataset('sbpd')
for flip in [False, True]:
b = nav_env.Building(b_out, robot, env, flip=flip,
building_loader=building_loader)
logging.info("building_in: %s, building_out: %s, transform: %d", b_in,
b_out, transform)
maps = _get_semantic_maps(b_in, transform, b.map, flip, cats)
maps = np.transpose(np.array(maps), axes=[1,2,0])
# Load file from the cache.
file_name = '{:s}_{:d}_{:d}_{:d}_{:d}_{:d}_{:d}.pkl'
file_name = file_name.format(b.building_name, b.map.size[0], b.map.size[1],
b.map.origin[0], b.map.origin[1],
b.map.resolution, flip)
out_file = os.path.join(DATA_DIR, 'processing', 'class-maps', file_name)
logging.info('Writing semantic maps to %s.', out_file)
save_variables(out_file, [maps, cats], ['maps', 'cats'], overwrite=True)
示例14: intersection
# 需要導入模塊: import numpy [as 別名]
# 或者: from numpy import transpose [as 別名]
def intersection(boxes1, boxes2):
"""Compute pairwise intersection areas between boxes.
Args:
boxes1: a numpy array with shape [N, 4] holding N boxes
boxes2: a numpy array with shape [M, 4] holding M boxes
Returns:
a numpy array with shape [N*M] representing pairwise intersection area
"""
[y_min1, x_min1, y_max1, x_max1] = np.split(boxes1, 4, axis=1)
[y_min2, x_min2, y_max2, x_max2] = np.split(boxes2, 4, axis=1)
all_pairs_min_ymax = np.minimum(y_max1, np.transpose(y_max2))
all_pairs_max_ymin = np.maximum(y_min1, np.transpose(y_min2))
intersect_heights = np.maximum(
np.zeros(all_pairs_max_ymin.shape),
all_pairs_min_ymax - all_pairs_max_ymin)
all_pairs_min_xmax = np.minimum(x_max1, np.transpose(x_max2))
all_pairs_max_xmin = np.maximum(x_min1, np.transpose(x_min2))
intersect_widths = np.maximum(
np.zeros(all_pairs_max_xmin.shape),
all_pairs_min_xmax - all_pairs_max_xmin)
return intersect_heights * intersect_widths
示例15: collectdata
# 需要導入模塊: import numpy [as 別名]
# 或者: from numpy import transpose [as 別名]
def collectdata(self,):
print 'Start Collect Data...'
train_x_path = os.path.join(self.input_dir, 'unlabeled_X.bin')
train_xf = open(train_x_path, 'rb')
train_x = np.fromfile(train_xf, dtype=np.uint8)
train_x = np.reshape(train_x, (-1, 3, 96, 96))
train_x = np.transpose(train_x, (0, 3, 2, 1))
idx = 0
for i in xrange(train_x.shape[0]):
if not self.skipimg:
transform_and_save(img_arr=train_x[i], output_filename=os.path.join(self.unlabeldir, str(idx) + '.jpg'))
self.trainpairlist[os.path.join('images', 'unlabeled', str(idx) + '.jpg')] = 'labels/11.txt'
idx += 1
print 'Finished Collect Data...'