本文整理匯總了Python中onnx.backend.base.Backend方法的典型用法代碼示例。如果您正苦於以下問題:Python base.Backend方法的具體用法?Python base.Backend怎麽用?Python base.Backend使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類onnx.backend.base
的用法示例。
在下文中一共展示了base.Backend方法的3個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: supports_ngraph_device
# 需要導入模塊: from onnx.backend import base [as 別名]
# 或者: from onnx.backend.base import Backend [as 別名]
def supports_ngraph_device(cls, ngraph_device_name): # type: (str) -> bool
"""Check whether particular nGraph device is supported by current nGraph library.
:param ngraph_device_name: Name of nGraph device.
:return: True if current nGraph library supports ngraph_device_name.
"""
try:
ng.runtime(backend_name=ngraph_device_name)
except RuntimeError as e:
# Catch error raised when backend isn't available:
# 'Backend {ngraph_device_name} not found in registered backends'
if str(ngraph_device_name) in str(e) and 'not found' in str(e):
return False
else:
raise e
return True
示例2: __init__
# 需要導入模塊: from onnx.backend import base [as 別名]
# 或者: from onnx.backend.base import Backend [as 別名]
def __init__(self, backend, zoo_models, parent_module=None):
# type: (Type[Backend], List[Dict[str,str]], Optional[str]) -> None
self.backend = backend
self._parent_module = parent_module
self._include_patterns = set() # type: Set[Pattern[Text]]
self._exclude_patterns = set() # type: Set[Pattern[Text]]
self._test_items = defaultdict(dict) # type: Dict[Text, Dict[Text, TestItem]]
for zoo_model in zoo_models:
test_name = 'test_{}'.format(zoo_model['model_name'])
test_case = OnnxTestCase(
name=test_name,
url=zoo_model['url'],
model_name=zoo_model['model_name'],
model_dir=None,
model=None,
data_sets=None,
kind='OnnxBackendRealModelTest',
rtol=zoo_model.get('rtol', 0.001),
atol=zoo_model.get('atol', 1e-07),
)
self._add_model_test(test_case, 'Zoo')
示例3: prepare
# 需要導入模塊: from onnx.backend import base [as 別名]
# 或者: from onnx.backend.base import Backend [as 別名]
def prepare(cls,
model,
device='CPU',
strict=True,
logging_level='INFO',
**kwargs):
"""Prepare an ONNX model for Tensorflow Backend.
This function converts an ONNX model to an internel representation
of the computational graph called TensorflowRep and returns
the converted representation.
:param model: The ONNX model to be converted.
:param device: The device to execute this model on.
:param strict: Whether to enforce semantic equivalence between the original model
and the converted tensorflow model, defaults to True (yes, enforce semantic equivalence).
Changing to False is strongly discouraged.
Currently, the strict flag only affects the behavior of MaxPool and AveragePool ops.
:param logging_level: The logging level, default is INFO. Change it to DEBUG
to see more conversion details or to WARNING to see less
:returns: A TensorflowRep class object representing the ONNX model
"""
super(TensorflowBackend, cls).prepare(model, device, **kwargs)
common.logger.setLevel(logging_level)
common.logger.handlers[0].setLevel(logging_level)
return cls.onnx_model_to_tensorflow_rep(model, strict)