當前位置: 首頁>>代碼示例>>Python>>正文


Python numpy.fliplr方法代碼示例

本文整理匯總了Python中numpy.fliplr方法的典型用法代碼示例。如果您正苦於以下問題:Python numpy.fliplr方法的具體用法?Python numpy.fliplr怎麽用?Python numpy.fliplr使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在numpy的用法示例。


在下文中一共展示了numpy.fliplr方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。

示例1: load_data

# 需要導入模塊: import numpy [as 別名]
# 或者: from numpy import fliplr [as 別名]
def load_data(self, domain, batch_size=1, is_testing=False):
        data_type = "train%s" % domain if not is_testing else "test%s" % domain
        path = glob('./datasets/%s/%s/*' % (self.dataset_name, data_type))

        batch_images = np.random.choice(path, size=batch_size)

        imgs = []
        for img_path in batch_images:
            img = self.imread(img_path)
            if not is_testing:
                img = scipy.misc.imresize(img, self.img_res)

                if np.random.random() > 0.5:
                    img = np.fliplr(img)
            else:
                img = scipy.misc.imresize(img, self.img_res)
            imgs.append(img)

        imgs = np.array(imgs)/127.5 - 1.

        return imgs 
開發者ID:eriklindernoren,項目名稱:Keras-GAN,代碼行數:23,代碼來源:data_loader.py

示例2: prox_soft_symmetry

# 需要導入模塊: import numpy [as 別名]
# 或者: from numpy import fliplr [as 別名]
def prox_soft_symmetry(X, step, strength=1):
    """Soft version of symmetry
    Using a `strength` that varies from 0 to 1,
    with 0 meaning no symmetry enforced at all and
    1  being completely symmetric, the user can customize
    the level of symmetry required for a component
    """
    pads = [[0, 0], [0, 0]]
    slices = [slice(None), slice(None)]
    if X.shape[0] % 2 == 0:
        pads[0][1] = 1
        slices[0] = slice(0, X.shape[0])
    if X.shape[1] % 2 == 0:
        pads[1][1] = 1
        slices[1] = slice(0, X.shape[1])

    X = fft.fast_zero_pad(X, pads)
    Xs = np.fliplr(np.flipud(X))
    X = 0.5 * strength * (X + Xs) + (1 - strength) * X
    return X[tuple(slices)] 
開發者ID:pmelchior,項目名稱:scarlet,代碼行數:22,代碼來源:operator.py

示例3: save_movie_to_frame

# 需要導入模塊: import numpy [as 別名]
# 或者: from numpy import fliplr [as 別名]
def save_movie_to_frame(images, filename, idx=0, cmap='Blues'):
    # Collect to single image
    image = movie_to_frame(images[idx])

    # Flip it
    # image = np.fliplr(image)
    # image = np.flipud(image)

    f = plt.figure(figsize=[12, 12])
    plt.imshow(image, cmap=plt.cm.get_cmap(cmap), interpolation='none', vmin=0, vmax=1)

    plt.axis('image')
    plt.xticks([])
    plt.yticks([])
    plt.savefig(filename, format='png', bbox_inches='tight', dpi=80)
    plt.close(f) 
開發者ID:simonkamronn,項目名稱:kvae,代碼行數:18,代碼來源:movie.py

示例4: rotation_matrix

# 需要導入模塊: import numpy [as 別名]
# 或者: from numpy import fliplr [as 別名]
def rotation_matrix(self):
        ''' Return rotation matrix between array indices and mm

        Note that we swap the two columns of the 'ImageOrientPatient'
        when we create the rotation matrix.  This is takes into account
        the slightly odd ij transpose construction of the DICOM
        orientation fields - see doc/theory/dicom_orientaiton.rst.
        '''
        iop = self.image_orient_patient
        s_norm = self.slice_normal
        if None in (iop, s_norm):
            return None
        R = np.eye(3)
        # np.fliplr(iop) gives matrix F in
        # doc/theory/dicom_orientation.rst The fliplr accounts for the
        # fact that the first column in ``iop`` refers to changes in
        # column index, and the second to changes in row index.
        R[:,:2] = np.fliplr(iop)
        R[:,2] = s_norm
        # check this is in fact a rotation matrix
        assert np.allclose(np.eye(3),
                           np.dot(R, R.T),
                           atol=1e-6)
        return R 
開發者ID:ME-ICA,項目名稱:me-ica,代碼行數:26,代碼來源:dicomwrappers.py

示例5: test_flip_axis

# 需要導入模塊: import numpy [as 別名]
# 或者: from numpy import fliplr [as 別名]
def test_flip_axis():
    a = np.arange(24).reshape((2,3,4))
    assert_array_equal(
        flip_axis(a),
        np.flipud(a))
    assert_array_equal(
        flip_axis(a, axis=0),
        np.flipud(a))
    assert_array_equal(
        flip_axis(a, axis=1),
        np.fliplr(a))
    # check accepts array-like
    assert_array_equal(
        flip_axis(a.tolist(), axis=0),
        np.flipud(a))
    # third dimension
    b = a.transpose()
    b = np.flipud(b)
    b = b.transpose()
    assert_array_equal(flip_axis(a, axis=2), b) 
開發者ID:ME-ICA,項目名稱:me-ica,代碼行數:22,代碼來源:test_orientations.py

示例6: decompose_projection_matrix

# 需要導入模塊: import numpy [as 別名]
# 或者: from numpy import fliplr [as 別名]
def decompose_projection_matrix(P, return_t=True):
  if P.shape[0] != 3 or P.shape[1] != 4:
    raise Exception('P has to be 3x4')
  M = P[:, :3]
  C = -np.linalg.inv(M) @ P[:, 3:]

  R,K = np.linalg.qr(np.flipud(M).T)
  K = np.flipud(K.T)
  K = np.fliplr(K)
  R = np.flipud(R.T)

  T = np.diag(np.sign(np.diag(K)))
  K = K @ T
  R = T @ R

  if np.linalg.det(R) < 0:
    R *= -1

  K /= K[2,2]
  if return_t:
    return K, R, cameracenter_to_translation(R, C)
  else:
    return K, R, C 
開發者ID:autonomousvision,項目名稱:connecting_the_dots,代碼行數:25,代碼來源:geometry.py

示例7: get_symmetric_densepose

# 需要導入模塊: import numpy [as 別名]
# 或者: from numpy import fliplr [as 別名]
def get_symmetric_densepose(self, I, U, V, x, y, Mask):
        # This is a function to get the mirror symmetric UV labels.
        Labels_sym = np.zeros(I.shape)
        U_sym = np.zeros(U.shape)
        V_sym = np.zeros(V.shape)
        for i in (range(24)):
            if i + 1 in I:
                Labels_sym[I == (i + 1)] = self.Index_Symmetry_List[i]
                jj = np.where(I == (i + 1))
                U_loc = (U[jj] * 255).astype(np.int64)
                V_loc = (V[jj] * 255).astype(np.int64)
                V_sym[jj] = self.UV_symmetry_transformations['V_transforms'][0, i][V_loc, U_loc]
                U_sym[jj] = self.UV_symmetry_transformations['U_transforms'][0, i][V_loc, U_loc]
        Mask_flip = np.fliplr(Mask)
        Mask_flipped = np.zeros(Mask.shape)

        for i in (range(14)):
            Mask_flipped[Mask_flip == (i + 1)] = self.SemanticMaskSymmetries[i + 1]
        [y_max, x_max] = Mask_flip.shape
        y_sym = y
        x_sym = x_max - x

        return Labels_sym, U_sym, V_sym, x_sym, y_sym, Mask_flipped 
開發者ID:soeaver,項目名稱:Parsing-R-CNN,代碼行數:25,代碼來源:densepose_methods.py

示例8: transform

# 需要導入模塊: import numpy [as 別名]
# 或者: from numpy import fliplr [as 別名]
def transform(self, images):
        if self._aug_flag:
            transformed_images =\
                np.zeros([images.shape[0], self._imsize, self._imsize, 3])
            ori_size = images.shape[1]
            for i in range(images.shape[0]):
                h1 = np.floor((ori_size - self._imsize) * np.random.random())
                w1 = np.floor((ori_size - self._imsize) * np.random.random())
                cropped_image =\
                    images[i][w1: w1 + self._imsize, h1: h1 + self._imsize, :]
                if random.random() > 0.5:
                    transformed_images[i] = np.fliplr(cropped_image)
                else:
                    transformed_images[i] = cropped_image
            return transformed_images
        else:
            return images 
開發者ID:hanzhanggit,項目名稱:StackGAN,代碼行數:19,代碼來源:datasets.py

示例9: __init__

# 需要導入模塊: import numpy [as 別名]
# 或者: from numpy import fliplr [as 別名]
def __init__(self):
		nest.ResetKernel()
		nest.SetKernelStatus({"local_num_threads" : 1, "resolution" : p.time_resolution})
		self.spike_generators_l = nest.Create("poisson_generator", p.resolution[0]//2*p.resolution[1], params=p.poisson_params)
		self.spike_generators_r = nest.Create("poisson_generator", p.resolution[0]//2*p.resolution[1], params=p.poisson_params)
		self.neuron_l = nest.Create("iaf_psc_alpha", params=p.iaf_params)
		self.neuron_r = nest.Create("iaf_psc_alpha", params=p.iaf_params)
		self.spike_detector_l = nest.Create("spike_detector", params={"withtime": True})
		self.spike_detector_r = nest.Create("spike_detector", params={"withtime": True})
		self.multimeter_l = nest.Create("multimeter", params={"withtime":True, "record_from":["V_m"]})
		self.multimeter_r = nest.Create("multimeter", params={"withtime":True, "record_from":["V_m"]})
		weights_l = np.fliplr(p.weights_l.T).reshape(p.weights_l.size)
		weights_r = np.fliplr(p.weights_r.T).reshape(p.weights_r.size)
		for i in range(weights_l.size):
			syn_dict = {"model": "static_synapse", 
						"weight": weights_l[i]}
			nest.Connect([self.spike_generators_l[i]], self.neuron_l, syn_spec=syn_dict)
		for i in range(weights_r.size):
			syn_dict = {"model": "static_synapse", 
						"weight": weights_r[i]}
			nest.Connect([self.spike_generators_r[i]], self.neuron_r, syn_spec=syn_dict)
		nest.Connect(self.neuron_l, self.spike_detector_l)
		nest.Connect(self.neuron_r, self.spike_detector_r)
		nest.Connect(self.multimeter_l, self.neuron_l)
		nest.Connect(self.multimeter_r, self.neuron_r) 
開發者ID:clamesc,項目名稱:Training-Neural-Networks-for-Event-Based-End-to-End-Robot-Control,代碼行數:27,代碼來源:network.py

示例10: center_crop

# 需要導入模塊: import numpy [as 別名]
# 或者: from numpy import fliplr [as 別名]
def center_crop(x, crop_h, crop_w=None, resize_w=64):

    if crop_w is None:
        crop_w = crop_h
    h, w = x.shape[:2]
    j = int(round((h - crop_h)/2.))
    i = int(round((w - crop_w)/2.))

    rate = np.random.uniform(0, 1, size=1)

    if rate < 0.5:
        x = np.fliplr(x)

    #first crop tp 178x178 and resize to 128x128
    return scipy.misc.imresize(x[20:218-20, 0: 178], [resize_w, resize_w])

    #Another cropped method

    # return scipy.misc.imresize(x[j:j+crop_h, i:i+crop_w],
    #                            [resize_w, resize_w]) 
開發者ID:zhangqianhui,項目名稱:Residual_Image_Learning_GAN,代碼行數:22,代碼來源:utils.py

示例11: load_image_array

# 需要導入模塊: import numpy [as 別名]
# 或者: from numpy import fliplr [as 別名]
def load_image_array(image_file, image_size):
	img = skimage.io.imread(image_file)
	# GRAYSCALE
	if len(img.shape) == 2:
		img_new = np.ndarray( (img.shape[0], img.shape[1], 3), dtype = 'uint8')
		img_new[:,:,0] = img
		img_new[:,:,1] = img
		img_new[:,:,2] = img
		img = img_new

	img_resized = skimage.transform.resize(img, (image_size, image_size))

	# FLIP HORIZONTAL WIRH A PROBABILITY 0.5
	if random.random() > 0.5:
		img_resized = np.fliplr(img_resized)
	
	
	return img_resized.astype('float32') 
開發者ID:paarthneekhara,項目名稱:text-to-image,代碼行數:20,代碼來源:image_processing.py

示例12: update

# 需要導入模塊: import numpy [as 別名]
# 或者: from numpy import fliplr [as 別名]
def update(self):
        if self.inky_colour is None:
            raise RuntimeError("You must specify which colour of Inky pHAT you're using: inkyphat.set_colour('red', 'black' or 'yellow')")

        self._display_init()

        x1, x2 = self.update_x1, self.update_x2
        y1, y2 = self.update_y1, self.update_y2

        region = self.buffer[y1:y2, x1:x2]

        if self.v_flip:
            region = numpy.fliplr(region)

        if self.h_flip:
            region = numpy.flipud(region)

        buf_red = numpy.packbits(numpy.where(region == RED, 1, 0)).tolist()
        if self.inky_version == 1:
            buf_black = numpy.packbits(numpy.where(region == 0, 0, 1)).tolist()
        else:
            buf_black = numpy.packbits(numpy.where(region == BLACK, 0, 1)).tolist()

        self._display_update(buf_black, buf_red)
        self._display_fini() 
開發者ID:pimoroni,項目名稱:inky-phat,代碼行數:27,代碼來源:inky212x104.py

示例13: __flip

# 需要導入模塊: import numpy [as 別名]
# 或者: from numpy import fliplr [as 別名]
def __flip(self, image, landmarks, run_prob=0.5):
        """
        Do image flop. Only for horizontal
        Args:
            image: a numpy type
            landmarks: face landmarks with format [(x1, y1), (x2, y2), ...]
            run_prob: probability to do this operate. 0.0-1.0
        Return:
            an image and landmarks will be returned
        Raises:
            Unsupport count of landmarks
        """
        if np.random.rand() < run_prob:
            return image, landmarks
        image = np.fliplr(image)
        landmarks[:, 0] = image.shape[1] - landmarks[:, 0]
        landmarks = LandmarkHelper.flip(landmarks, landmarks.shape[0])
        return image, landmarks 
開發者ID:junhwanjang,項目名稱:face_landmark_dnn,代碼行數:20,代碼來源:landmark_augment.py

示例14: step

# 需要導入模塊: import numpy [as 別名]
# 或者: from numpy import fliplr [as 別名]
def step(self, amt=1):
        d = self._ws_thread.get_frame()
        d = d.reshape(WS_FRAME_HEIGHT, WS_FRAME_WIDTH)
        if self.mirror:
            d = np.fliplr(d)

        d = rebin(d, (self.height, self.width)).astype(np.uint16)

        self.shader.render(d) 
開發者ID:ManiacalLabs,項目名稱:BiblioPixelAnimations,代碼行數:11,代碼來源:kimotion.py

示例15: _recalls

# 需要導入模塊: import numpy [as 別名]
# 或者: from numpy import fliplr [as 別名]
def _recalls(all_ious, proposal_nums, thrs):

    img_num = all_ious.shape[0]
    total_gt_num = sum([ious.shape[0] for ious in all_ious])

    _ious = np.zeros((proposal_nums.size, total_gt_num), dtype=np.float32)
    for k, proposal_num in enumerate(proposal_nums):
        tmp_ious = np.zeros(0)
        for i in range(img_num):
            ious = all_ious[i][:, :proposal_num].copy()
            gt_ious = np.zeros((ious.shape[0]))
            if ious.size == 0:
                tmp_ious = np.hstack((tmp_ious, gt_ious))
                continue
            for j in range(ious.shape[0]):
                gt_max_overlaps = ious.argmax(axis=1)
                max_ious = ious[np.arange(0, ious.shape[0]), gt_max_overlaps]
                gt_idx = max_ious.argmax()
                gt_ious[j] = max_ious[gt_idx]
                box_idx = gt_max_overlaps[gt_idx]
                ious[gt_idx, :] = -1
                ious[:, box_idx] = -1
            tmp_ious = np.hstack((tmp_ious, gt_ious))
        _ious[k, :] = tmp_ious

    _ious = np.fliplr(np.sort(_ious, axis=1))
    recalls = np.zeros((proposal_nums.size, thrs.size))
    for i, thr in enumerate(thrs):
        recalls[:, i] = (_ious >= thr).sum(axis=1) / float(total_gt_num)

    return recalls 
開發者ID:open-mmlab,項目名稱:mmdetection,代碼行數:33,代碼來源:recall.py


注:本文中的numpy.fliplr方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。