当前位置: 首页>>代码示例>>Python>>正文


Python onnx.TensorProto方法代码示例

本文整理汇总了Python中onnx.TensorProto方法的典型用法代码示例。如果您正苦于以下问题:Python onnx.TensorProto方法的具体用法?Python onnx.TensorProto怎么用?Python onnx.TensorProto使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在onnx的用法示例。


在下文中一共展示了onnx.TensorProto方法的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: load_test_data

# 需要导入模块: import onnx [as 别名]
# 或者: from onnx import TensorProto [as 别名]
def load_test_data(data_dir, input_names, output_names):
    inout_values = []
    for kind, names in [('input', input_names), ('output', output_names)]:
        names = list(names)
        values = []
        for pb in sorted(glob.glob(os.path.join(data_dir, '%s_*.pb' % kind))):
            with open(pb, 'rb') as f:
                tensor = onnx.TensorProto()
                tensor.ParseFromString(f.read())
            if tensor.name in names:
                name = tensor.name
                names.remove(name)
            else:
                name = names.pop(0)
            values.append((name, onnx.numpy_helper.to_array(tensor)))
        inout_values.append(values)
    return tuple(inout_values) 
开发者ID:pfnet-research,项目名称:chainer-compiler,代码行数:19,代码来源:run_onnx_util.py

示例2: _load_protobuf_data

# 需要导入模块: import onnx [as 别名]
# 或者: from onnx import TensorProto [as 别名]
def _load_protobuf_data(self, model_dir,
                            data_sets: List[OnnxTestData]):
        for test_data_dir in glob.glob(os.path.join(model_dir, "test_data_set*")):
            inputs = {}
            outputs = {}
            
            inputs_num = len(glob.glob(os.path.join(test_data_dir, 'input_*.pb')))
            for i in range(inputs_num):
                input_file = os.path.join(test_data_dir, 'input_{}.pb'.format(i))
                tensor = onnx.TensorProto()
                with open(input_file, 'rb') as f:
                    tensor.ParseFromString(f.read())
                inputs[tensor.name] = numpy_helper.to_array(tensor)
            ref_outputs_num = len(glob.glob(os.path.join(test_data_dir, 'output_*.pb')))
            for i in range(ref_outputs_num):
                output_file = os.path.join(test_data_dir, 'output_{}.pb'.format(i))
                tensor = onnx.TensorProto()
                with open(output_file, 'rb') as f:
                    tensor.ParseFromString(f.read())
                outputs[tensor.name] = numpy_helper.to_array(tensor)

            data_sets.append(OnnxTestData(inputs, outputs)) 
开发者ID:deep500,项目名称:deep500,代码行数:24,代码来源:onnx_test_parser.py

示例3: convert_cast

# 需要导入模块: import onnx [as 别名]
# 或者: from onnx import TensorProto [as 别名]
def convert_cast(node, **kwargs):
    """Map MXNet's Cast operator attributes to onnx's Cast operator
    and return the created node.
    """
    onnx = import_onnx_modules()
    name = node["name"]
    proc_nodes = kwargs["proc_nodes"]
    inputs = node["inputs"]
    dtype = node["attrs"]["dtype"]

    # dtype can be mapped only with types from TensorProto
    # float32 is mapped to float and float64 to double in onnx
    # following tensorproto mapping https://github.com/onnx/onnx/blob/master/onnx/mapping.py
    if dtype == 'float32':
        dtype = 'float'
    elif dtype == 'float64':
        dtype = 'double'

    input_node_id = kwargs["index_lookup"][inputs[0][0]]
    input_node = proc_nodes[input_node_id].name

    node = onnx.helper.make_node(
        "Cast",
        [input_node],
        [name],
        to=getattr(onnx.TensorProto, dtype.upper()),
        name=name,
    )
    return [node] 
开发者ID:awslabs,项目名称:dynamic-training-with-apache-mxnet-on-aws,代码行数:31,代码来源:_op_translations.py

示例4: get_model_input

# 需要导入模块: import onnx [as 别名]
# 或者: from onnx import TensorProto [as 别名]
def get_model_input(model_dir):
    import onnx
    from onnx import numpy_helper

    model_inputs = []
    for test_data_npz in glob.glob(
            os.path.join(model_dir, 'test_data_*.npz')):
        test_data = np.load(test_data_npz, encoding='bytes')
        model_inputs = list(test_data['inputs'])

    for test_data_dir in glob.glob(
            os.path.join(model_dir, "test_data_set*")):
        inputs_num = len(glob.glob(os.path.join(test_data_dir, 'input_*.pb')))
        for i in range(inputs_num):
            input_file = os.path.join(test_data_dir, 'input_{}.pb'.format(i))
            tensor = onnx.TensorProto()
            with open(input_file, 'rb') as f:
                tensor.ParseFromString(f.read())
            model_inputs.append(numpy_helper.to_array(tensor))

    input_shape = model_inputs[-1].shape
    # generating 1000 data points for inference time test
    for _ in range(1000 - len(model_inputs)):
        model_inputs.append(np.random.randn(*input_shape))

    return model_inputs 
开发者ID:awslabs,项目名称:deeplearning-benchmark,代码行数:28,代码来源:import_benchmarkscript.py

示例5: read_pb_file

# 需要导入模块: import onnx [as 别名]
# 或者: from onnx import TensorProto [as 别名]
def read_pb_file(data_file):
    """ Helper function to get data from pb files"""
    tensor = onnx.TensorProto()
    with open(data_file, 'rb') as pb_file:
        tensor.ParseFromString(pb_file.read())
    return numpy_helper.to_array(tensor) 
开发者ID:onnx,项目名称:onnx-mxnet,代码行数:8,代码来源:test_models.py

示例6: readInputFromFile

# 需要导入模块: import onnx [as 别名]
# 或者: from onnx import TensorProto [as 别名]
def readInputFromFile(full_path):
    t = onnx.TensorProto()
    with open(full_path, 'rb') as f:
        t.ParseFromString(f.read())
    return t


# Generate a {input/output_name: input/output_arr} dictionary 
开发者ID:microsoft,项目名称:OLive,代码行数:10,代码来源:check_model.py

示例7: equalAndThen

# 需要导入模块: import onnx [as 别名]
# 或者: from onnx import TensorProto [as 别名]
def equalAndThen(self, x, y, msg, k):
        """
        Helper for implementing 'requireEqual' and 'checkEqual'.  Upon failure,
        invokes continuation 'k' with the error message.
        """
        if isinstance(x, onnx.TensorProto) and isinstance(y, onnx.TensorProto):
            self.equalAndThen(x.name, y.name, msg, k)
            # Use numpy for the comparison
            t1 = onnx.numpy_helper.to_array(x)
            t2 = onnx.numpy_helper.to_array(y)
            new_msg = "{}In embedded parameter '{}'".format(colonize(msg), x.name)
            self.equalAndThen(t1, t2, new_msg, k)
        elif isinstance(x, np.ndarray) and isinstance(y, np.ndarray):
            try:
                np.testing.assert_equal(x, y)
            except AssertionError as e:
                raise
                k("{}{}".format(colonize(msg, ": "), str(e).lstrip()))
        else:
            if x != y:
                # TODO: Better algorithm for lists
                sx = str(x)
                sy = str(y)
                if len(sx) > 40 or len(sy) > 40 or '\n' in sx or '\n' in sy:
                    # long form
                    l = "=" * 50
                    k("\n{}The value\n{}\n{}\n{}\n\ndoes not equal\n\n{}\n{}\n{}"
                        .format(colonize(msg, ":\n"), l, sx, l, l, sy, l))
                else:
                    k("{}{} != {}".format(colonize(msg), sx, sy)) 
开发者ID:onnxbot,项目名称:onnx-fb-universe,代码行数:32,代码来源:verify.py

示例8: tensortype_to_ndarray

# 需要导入模块: import onnx [as 别名]
# 或者: from onnx import TensorProto [as 别名]
def tensortype_to_ndarray(tensor_type):
    shape = []
    for dim in tensor_type.shape.dim:
        shape.append(dim.dim_value)
    if tensor_type.elem_type == onnx.TensorProto.FLOAT:
        type = np.float32
    elif tensor_type.elem_type == onnx.TensorProto.INT:
        type = np.int32
    else:
        raise
    array = np.random.rand(*shape).astype(type)
    return array 
开发者ID:onnxbot,项目名称:onnx-fb-universe,代码行数:14,代码来源:update-models-from-caffe2.py

示例9: get_test_files

# 需要导入模块: import onnx [as 别名]
# 或者: from onnx import TensorProto [as 别名]
def get_test_files(name):
    """Extract tar file and returns model path and input, output data"""
    tar_name = download(URLS.get(name), dirname=CURR_PATH.__str__())
    # extract tar file
    tar_path = os.path.join(CURR_PATH, tar_name)
    tar = tarfile.open(tar_path.__str__(), "r:*")
    tar.extractall(path=CURR_PATH.__str__())
    tar.close()
    data_dir = os.path.join(CURR_PATH, name)
    model_path = os.path.join(data_dir, 'model.onnx')

    inputs = []
    outputs = []
    # get test files
    for test_file in os.listdir(data_dir):
        case_dir = os.path.join(data_dir, test_file)
        # skip the non-dir files
        if not os.path.isdir(case_dir):
            continue
        input_file = os.path.join(case_dir, 'input_0.pb')
        input_tensor = TensorProto()
        with open(input_file, 'rb') as proto_file:
            input_tensor.ParseFromString(proto_file.read())
        inputs.append(numpy_helper.to_array(input_tensor))

        output_tensor = TensorProto()
        output_file = os.path.join(case_dir, 'output_0.pb')
        with open(output_file, 'rb') as proto_file:
            output_tensor.ParseFromString(proto_file.read())
        outputs.append(numpy_helper.to_array(output_tensor))

    return model_path, inputs, outputs 
开发者ID:mahyarnajibi,项目名称:SNIPER-mxnet,代码行数:34,代码来源:onnx_test.py


注:本文中的onnx.TensorProto方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。