本文整理汇总了Python中datasets.json_dataset.add_proposals方法的典型用法代码示例。如果您正苦于以下问题:Python json_dataset.add_proposals方法的具体用法?Python json_dataset.add_proposals怎么用?Python json_dataset.add_proposals使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类datasets.json_dataset
的用法示例。
在下文中一共展示了json_dataset.add_proposals方法的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: forward
# 需要导入模块: from datasets import json_dataset [as 别名]
# 或者: from datasets.json_dataset import add_proposals [as 别名]
def forward(self, inputs, outputs):
"""See modeling.detector.GenerateProposalLabels for inputs/outputs
documentation.
"""
# During training we reuse the data loader code. We populate roidb
# entries on the fly using the rois generated by RPN.
# im_info: [[im_height, im_width, im_scale], ...]
rois = inputs[0].data
roidb = blob_utils.deserialize(inputs[1].data)
im_info = inputs[2].data
im_scales = im_info[:, 2]
output_blob_names = roi_data.fast_rcnn.get_fast_rcnn_blob_names()
# For historical consistency with the original Faster R-CNN
# implementation we are *not* filtering crowd proposals.
# This choice should be investigated in the future (it likely does
# not matter).
json_dataset.add_proposals(roidb, rois, im_scales, crowd_thresh=0)
blobs = {k: [] for k in output_blob_names}
roi_data.fast_rcnn.add_fast_rcnn_blobs(blobs, im_scales, roidb)
for i, k in enumerate(output_blob_names):
blob_utils.py_op_copy_blob(blobs[k], outputs[i])
示例2: forward
# 需要导入模块: from datasets import json_dataset [as 别名]
# 或者: from datasets.json_dataset import add_proposals [as 别名]
def forward(self, inputs, outputs):
# inputs is
# [rpn_rois_fpn2, ..., rpn_rois_fpn6,
# rpn_roi_probs_fpn2, ..., rpn_roi_probs_fpn6]
# If training with Faster R-CNN, then inputs will additionally include
# + [roidb, im_info]
rois = collect(inputs, self._train)
if self._train:
# During training we reuse the data loader code. We populate roidb
# entries on the fly using the rois generated by RPN.
# im_info: [[im_height, im_width, im_scale], ...]
im_info = inputs[-1].data
im_scales = im_info[:, 2]
roidb = blob_utils.deserialize(inputs[-2].data)
output_blob_names = roi_data.fast_rcnn.get_fast_rcnn_blob_names()
json_dataset.add_proposals(roidb, rois, im_scales)
blobs = {k: [] for k in output_blob_names}
roi_data.fast_rcnn.add_fast_rcnn_blobs(blobs, im_scales, roidb)
for i, k in enumerate(output_blob_names):
blob_utils.py_op_copy_blob(blobs[k], outputs[i])
else:
# For inference we have a special code path that avoids some data
# loader overhead
distribute(rois, None, outputs, self._train)
示例3: forward
# 需要导入模块: from datasets import json_dataset [as 别名]
# 或者: from datasets.json_dataset import add_proposals [as 别名]
def forward(self, inputs, roidb, im_info):
"""
Args:
inputs: a list of [rpn_rois_fpn2, ..., rpn_rois_fpn6,
rpn_roi_probs_fpn2, ..., rpn_roi_probs_fpn6]
im_info: [[im_height, im_width, im_scale], ...]
"""
rois = collect(inputs, self.training)
if self.training:
# During training we reuse the data loader code. We populate roidb
# entries on the fly using the rois generated by RPN.
im_scales = im_info.data.numpy()[:, 2]
# For historical consistency with the original Faster R-CNN
# implementation we are *not* filtering crowd proposals.
# This choice should be investigated in the future (it likely does
# not matter).
json_dataset.add_proposals(roidb, rois, im_scales, crowd_thresh=0)
# Compute training labels for the RPN proposals; also handles
# distributing the proposals over FPN levels
output_blob_names = roi_data.fast_rcnn.get_fast_rcnn_blob_names()
blobs = {k: [] for k in output_blob_names}
roi_data.fast_rcnn.add_fast_rcnn_blobs(blobs, im_scales, roidb)
else:
# For inference we have a special code path that avoids some data
# loader overhead
blobs = distribute(rois, None)
return blobs
示例4: forward
# 需要导入模块: from datasets import json_dataset [as 别名]
# 或者: from datasets.json_dataset import add_proposals [as 别名]
def forward(self, rpn_rois, roidb, im_info):
"""Op for generating training labels for RPN proposals. This is used
when training RPN jointly with Fast/Mask R-CNN (as in end-to-end
Faster R-CNN training).
blobs_in:
- 'rpn_rois': 2D tensor of RPN proposals output by GenerateProposals
- 'roidb': roidb entries that will be labeled
- 'im_info': See GenerateProposals doc.
blobs_out:
- (variable set of blobs): returns whatever blobs are required for
training the model. It does this by querying the data loader for
the list of blobs that are needed.
"""
im_scales = im_info.data.numpy()[:, 2]
output_blob_names = roi_data.fast_rcnn.get_fast_rcnn_blob_names()
# For historical consistency with the original Faster R-CNN
# implementation we are *not* filtering crowd proposals.
# This choice should be investigated in the future (it likely does
# not matter).
# Note: crowd_thresh=0 will ignore _filter_crowd_proposals
json_dataset.add_proposals(roidb, rpn_rois, im_scales, crowd_thresh=0)
blobs = {k: [] for k in output_blob_names}
roi_data.fast_rcnn.add_fast_rcnn_blobs(blobs, im_scales, roidb)
return blobs
示例5: forward
# 需要导入模块: from datasets import json_dataset [as 别名]
# 或者: from datasets.json_dataset import add_proposals [as 别名]
def forward(self, inputs, roidb, im_info, stage=0):
"""
Args:
inputs: a list of [rpn_rois_fpn2, ..., rpn_rois_fpn6,
rpn_roi_probs_fpn2, ..., rpn_roi_probs_fpn6]
im_info: [[im_height, im_width, im_scale], ...]
"""
if stage == 0:
rois = collect(inputs, self.training)
else:
rois = inputs
if self.training:
# During training we reuse the data loader code. We populate roidb
# entries on the fly using the rois generated by RPN.
im_scales = im_info.data.numpy()[:, 2]
# For historical consistency with the original Faster R-CNN
# implementation we are *not* filtering crowd proposals.
# This choice should be investigated in the future (it likely does
# not matter).
json_dataset.add_proposals(roidb, rois, im_scales, crowd_thresh=0)
# Compute training labels for the RPN proposals; also handles
# distributing the proposals over FPN levels
output_blob_names = roi_data.fast_rcnn.get_fast_rcnn_blob_names()
blobs = {k: [] for k in output_blob_names}
roi_data.fast_rcnn.add_fast_rcnn_blobs(blobs, im_scales, roidb, stage)
else:
# For inference we have a special code path that avoids some data
# loader overhead
blobs = distribute(rois, None)
return blobs
示例6: forward
# 需要导入模块: from datasets import json_dataset [as 别名]
# 或者: from datasets.json_dataset import add_proposals [as 别名]
def forward(self, inputs, outputs):
"""See modeling.detector.CollectAndDistributeFpnRpnProposals for
inputs/outputs documentation.
"""
# inputs is
# [rpn_rois_fpn2, ..., rpn_rois_fpn6,
# rpn_roi_probs_fpn2, ..., rpn_roi_probs_fpn6]
# If training with Faster R-CNN, then inputs will additionally include
# + [roidb, im_info]
rois = collect(inputs, self._train)
if self._train:
# During training we reuse the data loader code. We populate roidb
# entries on the fly using the rois generated by RPN.
# im_info: [[im_height, im_width, im_scale], ...]
im_info = inputs[-1].data
im_scales = im_info[:, 2]
roidb = blob_utils.deserialize(inputs[-2].data)
# For historical consistency with the original Faster R-CNN
# implementation we are *not* filtering crowd proposals.
# This choice should be investigated in the future (it likely does
# not matter).
json_dataset.add_proposals(roidb, rois, im_scales, crowd_thresh=0)
# Compute training labels for the RPN proposals; also handles
# distributing the proposals over FPN levels
output_blob_names = roi_data.fast_rcnn.get_fast_rcnn_blob_names()
blobs = {k: [] for k in output_blob_names}
roi_data.fast_rcnn.add_fast_rcnn_blobs(blobs, im_scales, roidb)
for i, k in enumerate(output_blob_names):
blob_utils.py_op_copy_blob(blobs[k], outputs[i])
else:
# For inference we have a special code path that avoids some data
# loader overhead
distribute(rois, None, outputs, self._train)
示例7: forward
# 需要导入模块: from datasets import json_dataset [as 别名]
# 或者: from datasets.json_dataset import add_proposals [as 别名]
def forward(self, inputs, outputs):
"""See modeling.detector.CollectAndDistributeFpnRpnProposalsRec for
inputs/outputs documentation.
"""
# inputs is
# [rpn_rois_fpn2, ..., rpn_rois_fpn6,
# rpn_roi_probs_fpn2, ..., rpn_roi_probs_fpn6]
# If training with Faster R-CNN, then inputs will additionally include
# + [roidb, im_info]
rois = collect(inputs, self._train)
if self._train:
# During training we reuse the data loader code. We populate roidb
# entries on the fly using the rois generated by RPN.
# im_info: [[im_height, im_width, im_scale], ...]
im_info = inputs[-1].data
im_scales = im_info[:, 2]
roidb = blob_utils.deserialize(inputs[-2].data)
# For historical consistency with the original Faster R-CNN
# implementation we are *not* filtering crowd proposals.
# This choice should be investigated in the future (it likely does
# not matter).
json_dataset.add_proposals(roidb, rois, im_scales, crowd_thresh=0)
# Compute training labels for the RPN proposals; also handles
# distributing the proposals over FPN levels
output_blob_names = roi_data.fast_rcnn.get_fast_rcnn_blob_names()
blobs = {k: [] for k in output_blob_names}
roi_data.fast_rcnn.add_fast_rcnn_blobs_rec(blobs, im_scales, roidb)
for i, k in enumerate(output_blob_names):
blob_utils.py_op_copy_blob(blobs[k], outputs[i])
else:
# For inference we have a special code path that avoids some data
# loader overhead
distribute(rois, None, outputs, self._train)
开发者ID:lvpengyuan,项目名称:masktextspotter.caffe2,代码行数:35,代码来源:collect_and_distribute_fpn_rpn_proposals_rec.py
示例8: forward
# 需要导入模块: from datasets import json_dataset [as 别名]
# 或者: from datasets.json_dataset import add_proposals [as 别名]
def forward(self, inputs, outputs):
# During training we reuse the data loader code. We populate roidb
# entries on the fly using the rois generated by RPN.
# im_info: [[im_height, im_width, im_scale], ...]
rois = inputs[0].data
roidb = blob_utils.deserialize(inputs[1].data)
im_info = inputs[2].data
im_scales = im_info[:, 2]
output_blob_names = roi_data.fast_rcnn.get_fast_rcnn_blob_names()
json_dataset.add_proposals(roidb, rois, im_scales)
blobs = {k: [] for k in output_blob_names}
roi_data.fast_rcnn.add_fast_rcnn_blobs(blobs, im_scales, roidb)
for i, k in enumerate(output_blob_names):
blob_utils.py_op_copy_blob(blobs[k], outputs[i])