本文整理匯總了Python中onnx.TensorProto方法的典型用法代碼示例。如果您正苦於以下問題:Python onnx.TensorProto方法的具體用法?Python onnx.TensorProto怎麽用?Python onnx.TensorProto使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類onnx
的用法示例。
在下文中一共展示了onnx.TensorProto方法的9個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: load_test_data
# 需要導入模塊: import onnx [as 別名]
# 或者: from onnx import TensorProto [as 別名]
def load_test_data(data_dir, input_names, output_names):
inout_values = []
for kind, names in [('input', input_names), ('output', output_names)]:
names = list(names)
values = []
for pb in sorted(glob.glob(os.path.join(data_dir, '%s_*.pb' % kind))):
with open(pb, 'rb') as f:
tensor = onnx.TensorProto()
tensor.ParseFromString(f.read())
if tensor.name in names:
name = tensor.name
names.remove(name)
else:
name = names.pop(0)
values.append((name, onnx.numpy_helper.to_array(tensor)))
inout_values.append(values)
return tuple(inout_values)
示例2: _load_protobuf_data
# 需要導入模塊: import onnx [as 別名]
# 或者: from onnx import TensorProto [as 別名]
def _load_protobuf_data(self, model_dir,
data_sets: List[OnnxTestData]):
for test_data_dir in glob.glob(os.path.join(model_dir, "test_data_set*")):
inputs = {}
outputs = {}
inputs_num = len(glob.glob(os.path.join(test_data_dir, 'input_*.pb')))
for i in range(inputs_num):
input_file = os.path.join(test_data_dir, 'input_{}.pb'.format(i))
tensor = onnx.TensorProto()
with open(input_file, 'rb') as f:
tensor.ParseFromString(f.read())
inputs[tensor.name] = numpy_helper.to_array(tensor)
ref_outputs_num = len(glob.glob(os.path.join(test_data_dir, 'output_*.pb')))
for i in range(ref_outputs_num):
output_file = os.path.join(test_data_dir, 'output_{}.pb'.format(i))
tensor = onnx.TensorProto()
with open(output_file, 'rb') as f:
tensor.ParseFromString(f.read())
outputs[tensor.name] = numpy_helper.to_array(tensor)
data_sets.append(OnnxTestData(inputs, outputs))
示例3: convert_cast
# 需要導入模塊: import onnx [as 別名]
# 或者: from onnx import TensorProto [as 別名]
def convert_cast(node, **kwargs):
"""Map MXNet's Cast operator attributes to onnx's Cast operator
and return the created node.
"""
onnx = import_onnx_modules()
name = node["name"]
proc_nodes = kwargs["proc_nodes"]
inputs = node["inputs"]
dtype = node["attrs"]["dtype"]
# dtype can be mapped only with types from TensorProto
# float32 is mapped to float and float64 to double in onnx
# following tensorproto mapping https://github.com/onnx/onnx/blob/master/onnx/mapping.py
if dtype == 'float32':
dtype = 'float'
elif dtype == 'float64':
dtype = 'double'
input_node_id = kwargs["index_lookup"][inputs[0][0]]
input_node = proc_nodes[input_node_id].name
node = onnx.helper.make_node(
"Cast",
[input_node],
[name],
to=getattr(onnx.TensorProto, dtype.upper()),
name=name,
)
return [node]
示例4: get_model_input
# 需要導入模塊: import onnx [as 別名]
# 或者: from onnx import TensorProto [as 別名]
def get_model_input(model_dir):
import onnx
from onnx import numpy_helper
model_inputs = []
for test_data_npz in glob.glob(
os.path.join(model_dir, 'test_data_*.npz')):
test_data = np.load(test_data_npz, encoding='bytes')
model_inputs = list(test_data['inputs'])
for test_data_dir in glob.glob(
os.path.join(model_dir, "test_data_set*")):
inputs_num = len(glob.glob(os.path.join(test_data_dir, 'input_*.pb')))
for i in range(inputs_num):
input_file = os.path.join(test_data_dir, 'input_{}.pb'.format(i))
tensor = onnx.TensorProto()
with open(input_file, 'rb') as f:
tensor.ParseFromString(f.read())
model_inputs.append(numpy_helper.to_array(tensor))
input_shape = model_inputs[-1].shape
# generating 1000 data points for inference time test
for _ in range(1000 - len(model_inputs)):
model_inputs.append(np.random.randn(*input_shape))
return model_inputs
示例5: read_pb_file
# 需要導入模塊: import onnx [as 別名]
# 或者: from onnx import TensorProto [as 別名]
def read_pb_file(data_file):
""" Helper function to get data from pb files"""
tensor = onnx.TensorProto()
with open(data_file, 'rb') as pb_file:
tensor.ParseFromString(pb_file.read())
return numpy_helper.to_array(tensor)
示例6: readInputFromFile
# 需要導入模塊: import onnx [as 別名]
# 或者: from onnx import TensorProto [as 別名]
def readInputFromFile(full_path):
t = onnx.TensorProto()
with open(full_path, 'rb') as f:
t.ParseFromString(f.read())
return t
# Generate a {input/output_name: input/output_arr} dictionary
示例7: equalAndThen
# 需要導入模塊: import onnx [as 別名]
# 或者: from onnx import TensorProto [as 別名]
def equalAndThen(self, x, y, msg, k):
"""
Helper for implementing 'requireEqual' and 'checkEqual'. Upon failure,
invokes continuation 'k' with the error message.
"""
if isinstance(x, onnx.TensorProto) and isinstance(y, onnx.TensorProto):
self.equalAndThen(x.name, y.name, msg, k)
# Use numpy for the comparison
t1 = onnx.numpy_helper.to_array(x)
t2 = onnx.numpy_helper.to_array(y)
new_msg = "{}In embedded parameter '{}'".format(colonize(msg), x.name)
self.equalAndThen(t1, t2, new_msg, k)
elif isinstance(x, np.ndarray) and isinstance(y, np.ndarray):
try:
np.testing.assert_equal(x, y)
except AssertionError as e:
raise
k("{}{}".format(colonize(msg, ": "), str(e).lstrip()))
else:
if x != y:
# TODO: Better algorithm for lists
sx = str(x)
sy = str(y)
if len(sx) > 40 or len(sy) > 40 or '\n' in sx or '\n' in sy:
# long form
l = "=" * 50
k("\n{}The value\n{}\n{}\n{}\n\ndoes not equal\n\n{}\n{}\n{}"
.format(colonize(msg, ":\n"), l, sx, l, l, sy, l))
else:
k("{}{} != {}".format(colonize(msg), sx, sy))
示例8: tensortype_to_ndarray
# 需要導入模塊: import onnx [as 別名]
# 或者: from onnx import TensorProto [as 別名]
def tensortype_to_ndarray(tensor_type):
shape = []
for dim in tensor_type.shape.dim:
shape.append(dim.dim_value)
if tensor_type.elem_type == onnx.TensorProto.FLOAT:
type = np.float32
elif tensor_type.elem_type == onnx.TensorProto.INT:
type = np.int32
else:
raise
array = np.random.rand(*shape).astype(type)
return array
示例9: get_test_files
# 需要導入模塊: import onnx [as 別名]
# 或者: from onnx import TensorProto [as 別名]
def get_test_files(name):
"""Extract tar file and returns model path and input, output data"""
tar_name = download(URLS.get(name), dirname=CURR_PATH.__str__())
# extract tar file
tar_path = os.path.join(CURR_PATH, tar_name)
tar = tarfile.open(tar_path.__str__(), "r:*")
tar.extractall(path=CURR_PATH.__str__())
tar.close()
data_dir = os.path.join(CURR_PATH, name)
model_path = os.path.join(data_dir, 'model.onnx')
inputs = []
outputs = []
# get test files
for test_file in os.listdir(data_dir):
case_dir = os.path.join(data_dir, test_file)
# skip the non-dir files
if not os.path.isdir(case_dir):
continue
input_file = os.path.join(case_dir, 'input_0.pb')
input_tensor = TensorProto()
with open(input_file, 'rb') as proto_file:
input_tensor.ParseFromString(proto_file.read())
inputs.append(numpy_helper.to_array(input_tensor))
output_tensor = TensorProto()
output_file = os.path.join(case_dir, 'output_0.pb')
with open(output_file, 'rb') as proto_file:
output_tensor.ParseFromString(proto_file.read())
outputs.append(numpy_helper.to_array(output_tensor))
return model_path, inputs, outputs