當前位置: 首頁>>代碼示例>>Python>>正文


Python bbox_transform.bbox_transform方法代碼示例

本文整理匯總了Python中fast_rcnn.bbox_transform.bbox_transform方法的典型用法代碼示例。如果您正苦於以下問題:Python bbox_transform.bbox_transform方法的具體用法?Python bbox_transform.bbox_transform怎麽用?Python bbox_transform.bbox_transform使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在fast_rcnn.bbox_transform的用法示例。


在下文中一共展示了bbox_transform.bbox_transform方法的7個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。

示例1: _compute_targets

# 需要導入模塊: from fast_rcnn import bbox_transform [as 別名]
# 或者: from fast_rcnn.bbox_transform import bbox_transform [as 別名]
def _compute_targets(rois, overlaps, labels):
    """Compute bounding-box regression targets for an image."""
    # Indices of ground-truth ROIs
    gt_inds = np.where(overlaps == 1)[0]
    if len(gt_inds) == 0:
        # Bail if the image has no ground-truth ROIs
        return np.zeros((rois.shape[0], 5), dtype=np.float32)
    # Indices of examples for which we try to make predictions
    ex_inds = np.where(overlaps >= cfg.TRAIN.BBOX_THRESH)[0]

    # Get IoU overlap between each ex ROI and gt ROI
    ex_gt_overlaps = bbox_overlaps(
        np.ascontiguousarray(rois[ex_inds, :], dtype=np.float),
        np.ascontiguousarray(rois[gt_inds, :], dtype=np.float))

    # Find which gt ROI each ex ROI has max overlap with:
    # this will be the ex ROI's gt target
    gt_assignment = ex_gt_overlaps.argmax(axis=1)
    gt_rois = rois[gt_inds[gt_assignment], :]
    ex_rois = rois[ex_inds, :]

    targets = np.zeros((rois.shape[0], 5), dtype=np.float32)
    targets[ex_inds, 0] = labels[ex_inds]
    targets[ex_inds, 1:] = bbox_transform(ex_rois, gt_rois)
    return targets 
開發者ID:playerkk,項目名稱:face-py-faster-rcnn,代碼行數:27,代碼來源:roidb.py

示例2: _propagate_boxes

# 需要導入模塊: from fast_rcnn import bbox_transform [as 別名]
# 或者: from fast_rcnn.bbox_transform import bbox_transform [as 別名]
def _propagate_boxes(boxes, annot_proto, frame_id):
    pred_boxes = []
    annots = []
    for annot in annot_proto['annotations']:
        for idx, box in enumerate(annot['track']):
            if box['frame'] == frame_id and len(annot['track']) > idx + 1:
                gt1 = box['bbox']
                gt2 = annot['track'][idx+1]['bbox']
                delta = bbox_transform(np.asarray([gt1]), np.asarray([gt2]))
                annots.append((gt1, delta))
    gt1 = [annot[0] for annot in annots]
    overlaps = bbox_overlaps(np.require(boxes, dtype=np.float),
                             np.require(gt1, dtype=np.float))
    assert len(overlaps) == len(boxes)
    for gt_overlaps, box in zip(overlaps, boxes):
        max_overlap = np.max(gt_overlaps)
        max_gt = np.argmax(gt_overlaps)
        if max_overlap < 0.5:
            pred_boxes.append(box)
        else:
            delta = annots[max_gt][1]
            pred_boxes.append(bbox_transform_inv(np.asarray([box]), delta)[0].tolist())
    return pred_boxes 
開發者ID:myfavouritekk,項目名稱:TPN,代碼行數:25,代碼來源:gt_guided_tracking.py

示例3: _compute_targets

# 需要導入模塊: from fast_rcnn import bbox_transform [as 別名]
# 或者: from fast_rcnn.bbox_transform import bbox_transform [as 別名]
def _compute_targets(ex_rois, gt_rois, labels):
    """Compute bounding-box regression targets for an image."""

    assert ex_rois.shape[0] == gt_rois.shape[0]
    assert ex_rois.shape[1] == 4
    assert gt_rois.shape[1] == 4

    targets = bbox_transform(ex_rois, gt_rois)
    if cfg.TRAIN.BBOX_NORMALIZE_TARGETS_PRECOMPUTED:
        # Optionally normalize targets by a precomputed mean and stdev
        targets = ((targets - np.array(cfg.TRAIN.BBOX_NORMALIZE_MEANS))
                / np.array(cfg.TRAIN.BBOX_NORMALIZE_STDS))
    return np.hstack(
            (labels[:, np.newaxis], targets)).astype(np.float32, copy=False) 
開發者ID:playerkk,項目名稱:face-py-faster-rcnn,代碼行數:16,代碼來源:proposal_target_layer.py

示例4: _compute_targets

# 需要導入模塊: from fast_rcnn import bbox_transform [as 別名]
# 或者: from fast_rcnn.bbox_transform import bbox_transform [as 別名]
def _compute_targets(ex_rois, gt_rois):
    """Compute bounding-box regression targets for an image."""

    assert ex_rois.shape[0] == gt_rois.shape[0]
    assert ex_rois.shape[1] == 4
    assert gt_rois.shape[1] == 5

    return bbox_transform(ex_rois, gt_rois[:, :4]).astype(np.float32, copy=False) 
開發者ID:playerkk,項目名稱:face-py-faster-rcnn,代碼行數:10,代碼來源:anchor_target_layer.py

示例5: _gt_propagate_boxes

# 需要導入模塊: from fast_rcnn import bbox_transform [as 別名]
# 或者: from fast_rcnn.bbox_transform import bbox_transform [as 別名]
def _gt_propagate_boxes(boxes, annot_proto, frame_id, window, overlap_thres):
    pred_boxes = []
    annots = []
    for annot in annot_proto['annotations']:
        for idx, box in enumerate(annot['track']):
            if box['frame'] == frame_id:
                gt1 = box['bbox']
                deltas = []
                deltas.append(gt1)
                for offset in xrange(1, window):
                    try:
                        gt2 = annot['track'][idx+offset]['bbox']
                    except IndexError:
                        gt2 = gt1
                    delta = bbox_transform(np.asarray([gt1]), np.asarray([gt2]))
                    deltas.append(delta)
                annots.append(deltas)
    gt1s = [annot[0] for annot in annots]
    if not gt1s:
        # no grount-truth, boxes remain still
        return np.tile(np.asarray(boxes)[:,np.newaxis,:], [1,window-1,1])
    overlaps = bbox_overlaps(np.require(boxes, dtype=np.float),
                             np.require(gt1s, dtype=np.float))
    assert len(overlaps) == len(boxes)
    for gt_overlaps, box in zip(overlaps, boxes):
        max_overlap = np.max(gt_overlaps)
        max_gt = np.argmax(gt_overlaps)
        sequence_box = []
        if max_overlap < overlap_thres:
            for offset in xrange(1, window):
                sequence_box.append(box)
        else:
            for offset in xrange(1, window):
                delta = annots[max_gt][offset]
                sequence_box.append(
                    bbox_transform_inv(np.asarray([box]), delta)[0].tolist())
        pred_boxes.append((sequence_box))
    return np.asarray(pred_boxes) 
開發者ID:myfavouritekk,項目名稱:TPN,代碼行數:40,代碼來源:propagate.py

示例6: _accuracy

# 需要導入模塊: from fast_rcnn import bbox_transform [as 別名]
# 或者: from fast_rcnn.bbox_transform import bbox_transform [as 別名]
def _accuracy(track, gt):
    if len(track) < 2:
        return [], [], []
    abs_acc = []
    rel_acc = []
    ious = []
    st_frame = track[0]['frame']
    end_frame = track[-1]['frame']
    assert end_frame - st_frame + 1 == len(track)
    gt_seg = select_gt_segment(gt['track'], st_frame, end_frame)
    assert len(gt_seg) <= len(track)
    track_bbox1 = np.asarray([track[0]['bbox']])
    gt_bbox1 = np.asarray([gt_seg[0]])
    for track_box, gt_bbox in zip(track[1:len(gt_seg)], gt_seg[1:]):
        # current track box
        track_bbox = np.asarray([track_box['bbox']])
        # gt motion
        gt_delta = bbox_transform(gt_bbox1, np.asarray([gt_bbox]))
        # target is the first track_bbox with gt motion
        track_bbox_target = bbox_transform_inv(track_bbox1, gt_delta)
        abs_diff = np.abs(track_bbox - track_bbox_target)
        cur_iou = iou(track_bbox, track_bbox_target)
        width = track_bbox_target[0,2] - track_bbox_target[0,0]
        height = track_bbox_target[0,3] - track_bbox_target[0,1]
        rel_diff = abs_diff / (np.asarray([width, height, width, height]) + np.finfo(float).eps)
        abs_acc.extend(abs_diff.flatten().tolist())
        rel_acc.extend(rel_diff.flatten().tolist())
        ious.extend(cur_iou.flatten().tolist())
    return abs_acc, rel_acc, ious 
開發者ID:myfavouritekk,項目名稱:TPN,代碼行數:31,代碼來源:sequence_tracking_evaluation.py

示例7: _compute_targets

# 需要導入模塊: from fast_rcnn import bbox_transform [as 別名]
# 或者: from fast_rcnn.bbox_transform import bbox_transform [as 別名]
def _compute_targets(ex_rois, gt_rois, labels):
    """Compute bounding-box regression targets for an image."""

    assert ex_rois.shape[0] == gt_rois.shape[0]
    assert ex_rois.shape[1] == 4
    assert gt_rois.shape[1] == 4

    targets = bbox_transform(ex_rois, gt_rois)
    if cfg.TRAIN.BBOX_NORMALIZE_TARGETS_PRECOMPUTED:
        # Optionally normalize targets by a precomputed mean and stdev
        targets = ((targets - np.array(cfg.TRAIN.BBOX_NORMALIZE_MEANS))
                   / np.array(cfg.TRAIN.BBOX_NORMALIZE_STDS))
    return np.hstack(
        (labels[:, np.newaxis], targets)).astype(np.float32, copy=False) 
開發者ID:po0ya,項目名稱:face-magnet,代碼行數:16,代碼來源:proposal_target_layer.py


注:本文中的fast_rcnn.bbox_transform.bbox_transform方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。