本文整理匯總了Python中datasets.json_dataset.add_proposals方法的典型用法代碼示例。如果您正苦於以下問題:Python json_dataset.add_proposals方法的具體用法?Python json_dataset.add_proposals怎麽用?Python json_dataset.add_proposals使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類datasets.json_dataset
的用法示例。
在下文中一共展示了json_dataset.add_proposals方法的8個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: forward
# 需要導入模塊: from datasets import json_dataset [as 別名]
# 或者: from datasets.json_dataset import add_proposals [as 別名]
def forward(self, inputs, outputs):
"""See modeling.detector.GenerateProposalLabels for inputs/outputs
documentation.
"""
# During training we reuse the data loader code. We populate roidb
# entries on the fly using the rois generated by RPN.
# im_info: [[im_height, im_width, im_scale], ...]
rois = inputs[0].data
roidb = blob_utils.deserialize(inputs[1].data)
im_info = inputs[2].data
im_scales = im_info[:, 2]
output_blob_names = roi_data.fast_rcnn.get_fast_rcnn_blob_names()
# For historical consistency with the original Faster R-CNN
# implementation we are *not* filtering crowd proposals.
# This choice should be investigated in the future (it likely does
# not matter).
json_dataset.add_proposals(roidb, rois, im_scales, crowd_thresh=0)
blobs = {k: [] for k in output_blob_names}
roi_data.fast_rcnn.add_fast_rcnn_blobs(blobs, im_scales, roidb)
for i, k in enumerate(output_blob_names):
blob_utils.py_op_copy_blob(blobs[k], outputs[i])
示例2: forward
# 需要導入模塊: from datasets import json_dataset [as 別名]
# 或者: from datasets.json_dataset import add_proposals [as 別名]
def forward(self, inputs, outputs):
# inputs is
# [rpn_rois_fpn2, ..., rpn_rois_fpn6,
# rpn_roi_probs_fpn2, ..., rpn_roi_probs_fpn6]
# If training with Faster R-CNN, then inputs will additionally include
# + [roidb, im_info]
rois = collect(inputs, self._train)
if self._train:
# During training we reuse the data loader code. We populate roidb
# entries on the fly using the rois generated by RPN.
# im_info: [[im_height, im_width, im_scale], ...]
im_info = inputs[-1].data
im_scales = im_info[:, 2]
roidb = blob_utils.deserialize(inputs[-2].data)
output_blob_names = roi_data.fast_rcnn.get_fast_rcnn_blob_names()
json_dataset.add_proposals(roidb, rois, im_scales)
blobs = {k: [] for k in output_blob_names}
roi_data.fast_rcnn.add_fast_rcnn_blobs(blobs, im_scales, roidb)
for i, k in enumerate(output_blob_names):
blob_utils.py_op_copy_blob(blobs[k], outputs[i])
else:
# For inference we have a special code path that avoids some data
# loader overhead
distribute(rois, None, outputs, self._train)
示例3: forward
# 需要導入模塊: from datasets import json_dataset [as 別名]
# 或者: from datasets.json_dataset import add_proposals [as 別名]
def forward(self, inputs, roidb, im_info):
"""
Args:
inputs: a list of [rpn_rois_fpn2, ..., rpn_rois_fpn6,
rpn_roi_probs_fpn2, ..., rpn_roi_probs_fpn6]
im_info: [[im_height, im_width, im_scale], ...]
"""
rois = collect(inputs, self.training)
if self.training:
# During training we reuse the data loader code. We populate roidb
# entries on the fly using the rois generated by RPN.
im_scales = im_info.data.numpy()[:, 2]
# For historical consistency with the original Faster R-CNN
# implementation we are *not* filtering crowd proposals.
# This choice should be investigated in the future (it likely does
# not matter).
json_dataset.add_proposals(roidb, rois, im_scales, crowd_thresh=0)
# Compute training labels for the RPN proposals; also handles
# distributing the proposals over FPN levels
output_blob_names = roi_data.fast_rcnn.get_fast_rcnn_blob_names()
blobs = {k: [] for k in output_blob_names}
roi_data.fast_rcnn.add_fast_rcnn_blobs(blobs, im_scales, roidb)
else:
# For inference we have a special code path that avoids some data
# loader overhead
blobs = distribute(rois, None)
return blobs
示例4: forward
# 需要導入模塊: from datasets import json_dataset [as 別名]
# 或者: from datasets.json_dataset import add_proposals [as 別名]
def forward(self, rpn_rois, roidb, im_info):
"""Op for generating training labels for RPN proposals. This is used
when training RPN jointly with Fast/Mask R-CNN (as in end-to-end
Faster R-CNN training).
blobs_in:
- 'rpn_rois': 2D tensor of RPN proposals output by GenerateProposals
- 'roidb': roidb entries that will be labeled
- 'im_info': See GenerateProposals doc.
blobs_out:
- (variable set of blobs): returns whatever blobs are required for
training the model. It does this by querying the data loader for
the list of blobs that are needed.
"""
im_scales = im_info.data.numpy()[:, 2]
output_blob_names = roi_data.fast_rcnn.get_fast_rcnn_blob_names()
# For historical consistency with the original Faster R-CNN
# implementation we are *not* filtering crowd proposals.
# This choice should be investigated in the future (it likely does
# not matter).
# Note: crowd_thresh=0 will ignore _filter_crowd_proposals
json_dataset.add_proposals(roidb, rpn_rois, im_scales, crowd_thresh=0)
blobs = {k: [] for k in output_blob_names}
roi_data.fast_rcnn.add_fast_rcnn_blobs(blobs, im_scales, roidb)
return blobs
示例5: forward
# 需要導入模塊: from datasets import json_dataset [as 別名]
# 或者: from datasets.json_dataset import add_proposals [as 別名]
def forward(self, inputs, roidb, im_info, stage=0):
"""
Args:
inputs: a list of [rpn_rois_fpn2, ..., rpn_rois_fpn6,
rpn_roi_probs_fpn2, ..., rpn_roi_probs_fpn6]
im_info: [[im_height, im_width, im_scale], ...]
"""
if stage == 0:
rois = collect(inputs, self.training)
else:
rois = inputs
if self.training:
# During training we reuse the data loader code. We populate roidb
# entries on the fly using the rois generated by RPN.
im_scales = im_info.data.numpy()[:, 2]
# For historical consistency with the original Faster R-CNN
# implementation we are *not* filtering crowd proposals.
# This choice should be investigated in the future (it likely does
# not matter).
json_dataset.add_proposals(roidb, rois, im_scales, crowd_thresh=0)
# Compute training labels for the RPN proposals; also handles
# distributing the proposals over FPN levels
output_blob_names = roi_data.fast_rcnn.get_fast_rcnn_blob_names()
blobs = {k: [] for k in output_blob_names}
roi_data.fast_rcnn.add_fast_rcnn_blobs(blobs, im_scales, roidb, stage)
else:
# For inference we have a special code path that avoids some data
# loader overhead
blobs = distribute(rois, None)
return blobs
示例6: forward
# 需要導入模塊: from datasets import json_dataset [as 別名]
# 或者: from datasets.json_dataset import add_proposals [as 別名]
def forward(self, inputs, outputs):
"""See modeling.detector.CollectAndDistributeFpnRpnProposals for
inputs/outputs documentation.
"""
# inputs is
# [rpn_rois_fpn2, ..., rpn_rois_fpn6,
# rpn_roi_probs_fpn2, ..., rpn_roi_probs_fpn6]
# If training with Faster R-CNN, then inputs will additionally include
# + [roidb, im_info]
rois = collect(inputs, self._train)
if self._train:
# During training we reuse the data loader code. We populate roidb
# entries on the fly using the rois generated by RPN.
# im_info: [[im_height, im_width, im_scale], ...]
im_info = inputs[-1].data
im_scales = im_info[:, 2]
roidb = blob_utils.deserialize(inputs[-2].data)
# For historical consistency with the original Faster R-CNN
# implementation we are *not* filtering crowd proposals.
# This choice should be investigated in the future (it likely does
# not matter).
json_dataset.add_proposals(roidb, rois, im_scales, crowd_thresh=0)
# Compute training labels for the RPN proposals; also handles
# distributing the proposals over FPN levels
output_blob_names = roi_data.fast_rcnn.get_fast_rcnn_blob_names()
blobs = {k: [] for k in output_blob_names}
roi_data.fast_rcnn.add_fast_rcnn_blobs(blobs, im_scales, roidb)
for i, k in enumerate(output_blob_names):
blob_utils.py_op_copy_blob(blobs[k], outputs[i])
else:
# For inference we have a special code path that avoids some data
# loader overhead
distribute(rois, None, outputs, self._train)
示例7: forward
# 需要導入模塊: from datasets import json_dataset [as 別名]
# 或者: from datasets.json_dataset import add_proposals [as 別名]
def forward(self, inputs, outputs):
"""See modeling.detector.CollectAndDistributeFpnRpnProposalsRec for
inputs/outputs documentation.
"""
# inputs is
# [rpn_rois_fpn2, ..., rpn_rois_fpn6,
# rpn_roi_probs_fpn2, ..., rpn_roi_probs_fpn6]
# If training with Faster R-CNN, then inputs will additionally include
# + [roidb, im_info]
rois = collect(inputs, self._train)
if self._train:
# During training we reuse the data loader code. We populate roidb
# entries on the fly using the rois generated by RPN.
# im_info: [[im_height, im_width, im_scale], ...]
im_info = inputs[-1].data
im_scales = im_info[:, 2]
roidb = blob_utils.deserialize(inputs[-2].data)
# For historical consistency with the original Faster R-CNN
# implementation we are *not* filtering crowd proposals.
# This choice should be investigated in the future (it likely does
# not matter).
json_dataset.add_proposals(roidb, rois, im_scales, crowd_thresh=0)
# Compute training labels for the RPN proposals; also handles
# distributing the proposals over FPN levels
output_blob_names = roi_data.fast_rcnn.get_fast_rcnn_blob_names()
blobs = {k: [] for k in output_blob_names}
roi_data.fast_rcnn.add_fast_rcnn_blobs_rec(blobs, im_scales, roidb)
for i, k in enumerate(output_blob_names):
blob_utils.py_op_copy_blob(blobs[k], outputs[i])
else:
# For inference we have a special code path that avoids some data
# loader overhead
distribute(rois, None, outputs, self._train)
開發者ID:lvpengyuan,項目名稱:masktextspotter.caffe2,代碼行數:35,代碼來源:collect_and_distribute_fpn_rpn_proposals_rec.py
示例8: forward
# 需要導入模塊: from datasets import json_dataset [as 別名]
# 或者: from datasets.json_dataset import add_proposals [as 別名]
def forward(self, inputs, outputs):
# During training we reuse the data loader code. We populate roidb
# entries on the fly using the rois generated by RPN.
# im_info: [[im_height, im_width, im_scale], ...]
rois = inputs[0].data
roidb = blob_utils.deserialize(inputs[1].data)
im_info = inputs[2].data
im_scales = im_info[:, 2]
output_blob_names = roi_data.fast_rcnn.get_fast_rcnn_blob_names()
json_dataset.add_proposals(roidb, rois, im_scales)
blobs = {k: [] for k in output_blob_names}
roi_data.fast_rcnn.add_fast_rcnn_blobs(blobs, im_scales, roidb)
for i, k in enumerate(output_blob_names):
blob_utils.py_op_copy_blob(blobs[k], outputs[i])