本文整理汇总了Python中onnx.backend.base.Backend方法的典型用法代码示例。如果您正苦于以下问题:Python base.Backend方法的具体用法?Python base.Backend怎么用?Python base.Backend使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类onnx.backend.base
的用法示例。
在下文中一共展示了base.Backend方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: supports_ngraph_device
# 需要导入模块: from onnx.backend import base [as 别名]
# 或者: from onnx.backend.base import Backend [as 别名]
def supports_ngraph_device(cls, ngraph_device_name): # type: (str) -> bool
"""Check whether particular nGraph device is supported by current nGraph library.
:param ngraph_device_name: Name of nGraph device.
:return: True if current nGraph library supports ngraph_device_name.
"""
try:
ng.runtime(backend_name=ngraph_device_name)
except RuntimeError as e:
# Catch error raised when backend isn't available:
# 'Backend {ngraph_device_name} not found in registered backends'
if str(ngraph_device_name) in str(e) and 'not found' in str(e):
return False
else:
raise e
return True
示例2: __init__
# 需要导入模块: from onnx.backend import base [as 别名]
# 或者: from onnx.backend.base import Backend [as 别名]
def __init__(self, backend, zoo_models, parent_module=None):
# type: (Type[Backend], List[Dict[str,str]], Optional[str]) -> None
self.backend = backend
self._parent_module = parent_module
self._include_patterns = set() # type: Set[Pattern[Text]]
self._exclude_patterns = set() # type: Set[Pattern[Text]]
self._test_items = defaultdict(dict) # type: Dict[Text, Dict[Text, TestItem]]
for zoo_model in zoo_models:
test_name = 'test_{}'.format(zoo_model['model_name'])
test_case = OnnxTestCase(
name=test_name,
url=zoo_model['url'],
model_name=zoo_model['model_name'],
model_dir=None,
model=None,
data_sets=None,
kind='OnnxBackendRealModelTest',
rtol=zoo_model.get('rtol', 0.001),
atol=zoo_model.get('atol', 1e-07),
)
self._add_model_test(test_case, 'Zoo')
示例3: prepare
# 需要导入模块: from onnx.backend import base [as 别名]
# 或者: from onnx.backend.base import Backend [as 别名]
def prepare(cls,
model,
device='CPU',
strict=True,
logging_level='INFO',
**kwargs):
"""Prepare an ONNX model for Tensorflow Backend.
This function converts an ONNX model to an internel representation
of the computational graph called TensorflowRep and returns
the converted representation.
:param model: The ONNX model to be converted.
:param device: The device to execute this model on.
:param strict: Whether to enforce semantic equivalence between the original model
and the converted tensorflow model, defaults to True (yes, enforce semantic equivalence).
Changing to False is strongly discouraged.
Currently, the strict flag only affects the behavior of MaxPool and AveragePool ops.
:param logging_level: The logging level, default is INFO. Change it to DEBUG
to see more conversion details or to WARNING to see less
:returns: A TensorflowRep class object representing the ONNX model
"""
super(TensorflowBackend, cls).prepare(model, device, **kwargs)
common.logger.setLevel(logging_level)
common.logger.handlers[0].setLevel(logging_level)
return cls.onnx_model_to_tensorflow_rep(model, strict)