本文整理汇总了Python中coremltools.models方法的典型用法代码示例。如果您正苦于以下问题:Python coremltools.models方法的具体用法?Python coremltools.models怎么用?Python coremltools.models使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类coremltools
的用法示例。
在下文中一共展示了coremltools.models方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_top_level
# 需要导入模块: import coremltools [as 别名]
# 或者: from coremltools import models [as 别名]
def test_top_level(self):
expected = [
"ClassifierConfig",
"EnumeratedShapes",
"ImageType",
"RangeDim",
"SPECIFICATION_VERSION",
"Shape",
"TensorType",
"convert",
"converters",
"models",
"proto",
"target",
"utils",
"version",
]
_check_visible_modules(_get_visible_items(ct), expected)
示例2: test_models_neural_network
# 需要导入模块: import coremltools [as 别名]
# 或者: from coremltools import models [as 别名]
def test_models_neural_network(self):
expected = [
"AdamParams",
"NeuralNetworkBuilder",
"SgdParams",
"builder",
"datatypes",
"flexible_shape_utils",
"optimization_utils",
"printer",
"quantization_utils",
"set_training_features",
"set_transform_interface_params",
"spec_inspection_utils",
"update_optimizer_utils",
"utils",
]
_check_visible_modules(_get_visible_items(ct.models.neural_network), expected)
示例3: test_models_neural_network_quantization_utils
# 需要导入模块: import coremltools [as 别名]
# 或者: from coremltools import models [as 别名]
def test_models_neural_network_quantization_utils(self):
expected = [
"AdvancedQuantizedLayerSelector",
"MatrixMultiplyLayerSelector",
"ModelMetrics",
"NoiseMetrics",
"OutputMetric",
"QuantizedLayerSelector",
"TopKMetrics",
"activate_int8_int8_matrix_multiplications",
"compare_models",
"quantize_weights",
]
_check_visible_modules(
_get_visible_items(ct.models.neural_network.quantization_utils), expected
)
示例4: test_models_neural_network_flexible_shape_utils
# 需要导入模块: import coremltools [as 别名]
# 或者: from coremltools import models [as 别名]
def test_models_neural_network_flexible_shape_utils(self):
expected = [
"NeuralNetworkImageSize",
"NeuralNetworkImageSizeRange",
"NeuralNetworkMultiArrayShape",
"NeuralNetworkMultiArrayShapeRange",
"Shape",
"ShapeRange",
"Size",
"add_enumerated_image_sizes",
"add_enumerated_multiarray_shapes",
"add_multiarray_ndshape_enumeration",
"set_multiarray_ndshape_range",
"update_image_size_range",
"update_multiarray_shape_range",
]
_check_visible_modules(
_get_visible_items(ct.models.neural_network.flexible_shape_utils), expected
)
示例5: _run_quantized_test
# 需要导入模块: import coremltools [as 别名]
# 或者: from coremltools import models [as 别名]
def _run_quantized_test(self, input_, full_precision_model, quantized_model, delta):
# Output from both models should be the same
full_output = full_precision_model.predict(input_)
quantized_output = quantized_model.predict(input_)
self.assertEqual(full_output.keys(), quantized_output.keys())
for key in full_output.keys():
full_output_flatten = full_output[key].flatten()
quantized_output_flatten = quantized_output[key].flatten()
self.assertTrue(len(full_output_flatten) == len(quantized_output_flatten))
norm_factor = np.maximum(full_output_flatten, quantized_output_flatten)
norm_factor = np.maximum(norm_factor, 1.0)
f_out = full_output_flatten / norm_factor
q_out = quantized_output_flatten / norm_factor
for idx, full_value in enumerate(f_out):
quantized_value = q_out[idx]
self.assertAlmostEqual(full_value, quantized_value, delta=delta)
示例6: test_models
# 需要导入模块: import coremltools [as 别名]
# 或者: from coremltools import models [as 别名]
def test_models(self):
expected = [
"MLModel",
"datatypes",
"model",
"neural_network",
"pipeline",
"tree_ensemble",
"utils",
]
_check_visible_modules(_get_visible_items(ct.models), expected)
示例7: test_models_mlmodel
# 需要导入模块: import coremltools [as 别名]
# 或者: from coremltools import models [as 别名]
def test_models_mlmodel(self):
expected = [
"author",
"get_spec",
"input_description",
"license",
"output_description",
"predict",
"save",
"short_description",
"user_defined_metadata",
"version",
]
_check_visible_modules(_get_visible_items(ct.models.MLModel), expected)
示例8: test_models_neural_network_utils
# 需要导入模块: import coremltools [as 别名]
# 或者: from coremltools import models [as 别名]
def test_models_neural_network_utils(self):
expected = ["NeuralNetworkBuilder", "make_image_input", "make_nn_classifier"]
_check_visible_modules(
_get_visible_items(ct.models.neural_network.utils), expected
)
示例9: test_models_pipeline
# 需要导入模块: import coremltools [as 别名]
# 或者: from coremltools import models [as 别名]
def test_models_pipeline(self):
expected = [
"Pipeline",
"PipelineClassifier",
"PipelineRegressor",
"set_classifier_interface_params",
"set_regressor_interface_params",
"set_training_features",
"set_transform_interface_params",
]
_check_visible_modules(_get_visible_items(ct.models.pipeline), expected)
示例10: test_models_neural_network_update_optimizer_utils
# 需要导入模块: import coremltools [as 别名]
# 或者: from coremltools import models [as 别名]
def test_models_neural_network_update_optimizer_utils(self):
expected = ["AdamParams", "Batch", "RangeParam", "SgdParams"]
_check_visible_modules(
_get_visible_items(ct.models.neural_network.update_optimizer_utils),
expected,
)
示例11: test_models_neural_network_optimization_utils
# 需要导入模块: import coremltools [as 别名]
# 或者: from coremltools import models [as 别名]
def test_models_neural_network_optimization_utils(self):
_check_visible_modules(
_get_visible_items(ct.models.neural_network.optimization_utils), [],
)
示例12: test_simple_loop_fixed_iterations
# 需要导入模块: import coremltools [as 别名]
# 或者: from coremltools import models [as 别名]
def test_simple_loop_fixed_iterations(self):
input_features = [("data", datatypes.Array(1))]
output_features = [("output", None)]
builder_top = NeuralNetworkBuilder(
input_features, output_features, disable_rank5_shape_mapping=True
)
builder_top.add_copy("copy_1", input_name="data", output_name="output")
loop_layer = builder_top.add_loop("loop_layer")
loop_layer.loop.maxLoopIterations = 5
builder_body = NeuralNetworkBuilder(
input_features=None,
output_features=None,
spec=None,
nn_spec=loop_layer.loop.bodyNetwork,
)
builder_body.add_elementwise(
"add", input_names=["output"], output_name="x", mode="ADD", alpha=2
)
builder_body.add_copy("copy_2", input_name="x", output_name="output")
coremltools.models.utils.save_spec(
builder_top.spec, "/tmp/simple_loop_fixed_iterations.mlmodel"
)
mlmodel = MLModel(builder_top.spec)
# True branch case
input_dict = {"data": np.array([0], dtype="float")}
output_ref = {"output": np.array([10], dtype="float")}
self._test_model(mlmodel, input_dict, output_ref)
示例13: compare
# 需要导入模块: import coremltools [as 别名]
# 或者: from coremltools import models [as 别名]
def compare(self, specification_modified=True):
x = np.random.rand(*self.input_shape)
def _get_preds(spec):
mlmodel = coremltools.models.MLModel(spec)
return mlmodel.predict({"data": x}, useCPUOnly=True)["output"]
preds = _get_preds(self.builder.spec)
self.assertEqual(self.builder.spec.specificationVersion, 4)
quantized_spec = activate_int8_int8_matrix_multiplications(
self.builder.spec, self.selector
)
layer = self.builder.spec.neuralNetwork.layers[0]
layer_type = layer.WhichOneof("layer")
if layer_type == "innerProduct":
matmul_layer = layer.innerProduct
elif layer_type == "batchedMatmul":
matmul_layer = layer.batchedMatmul
wp = matmul_layer.weights
if specification_modified:
self.assertEqual(self.builder.spec.specificationVersion, 5)
quant_preds = _get_preds(quantized_spec)
self._test_predictions(preds, quant_preds, SNR=40)
self.assertEqual(len(wp.floatValue), 0)
else:
self.assertEqual(self.builder.spec.specificationVersion, 4)
quant_preds = _get_preds(quantized_spec)
np.testing.assert_array_almost_equal(preds, quant_preds)
self.assertGreater(len(wp.floatValue), 0)
示例14: _check_unsupported_layers
# 需要导入模块: import coremltools [as 别名]
# 或者: from coremltools import models [as 别名]
def _check_unsupported_layers(cls, model, supported_layers):
"""Check for any unsupported layers in the keras model.
Args:
model - a keras model
supported_layers - a dictionary of supported layers. Keys are keras
layer classes and values are corresponding
coreml layer classes.
"""
for i, layer in enumerate(model.layers):
if (isinstance(layer, _keras.models.Sequential) or
isinstance(layer, _keras.models.Model)):
cls._check_unsupported_layers(layer)
else:
if type(layer) not in supported_layers:
print(supported_layers)
raise ValueError(
"Keras layer '%s' not supported. " % str(type(layer))
)
if isinstance(layer, _keras.layers.wrappers.TimeDistributed):
if type(layer.layer) not in supported_layers:
raise ValueError(
"Keras layer '%s' not supported. " %
str(type(layer.layer))
)
if isinstance(layer, _keras.layers.wrappers.Bidirectional):
if not isinstance(layer.layer,
_keras.layers.recurrent.LSTM):
raise ValueError(
'Keras bi-directional wrapper conversion supports '
'only LSTM layer at this time. ')
示例15: _test_function
# 需要导入模块: import coremltools [as 别名]
# 或者: from coremltools import models [as 别名]
def _test_function(self, original_framework, parser):
print("[{}] Testing {} models starts.".format(datetime.now(), original_framework), file=sys.stderr)
ensure_dir(self.cachedir)
ensure_dir(self.tmpdir)
for network_name in self.test_table[original_framework].keys():
print("[{}] Testing {} {} starts.".format(datetime.now(), original_framework, network_name), file=sys.stderr)
# get test input path
test_input = self._get_test_input(network_name)
# get original model prediction result
original_predict = parser(network_name, test_input)
IR_file = TestModels.tmpdir + original_framework + '_' + network_name + "_converted"
for emit in self.test_table[original_framework][network_name]:
if isinstance(emit, staticmethod):
emit = emit.__func__
target_framework = emit.__name__[:-5]
if (target_framework == 'coreml'):
if not is_coreml_supported():
continue
print('[{}] Converting {} from {} to {} starts.'.format(datetime.now(), network_name, original_framework, target_framework), file=sys.stderr)
converted_predict = emit(
original_framework,
network_name,
IR_file + ".pb",
IR_file + ".npy",
test_input)
self._compare_outputs(
original_framework,
target_framework,
network_name,
original_predict,
converted_predict,
self._need_assert(original_framework, target_framework, network_name, original_predict, converted_predict)
)
print('[{}] Converting {} from {} to {} passed.'.format(datetime.now(), network_name, original_framework, target_framework), file=sys.stderr)
try:
os.remove(IR_file + ".json")
except OSError:
pass
os.remove(IR_file + ".pb")
os.remove(IR_file + ".npy")
print("[{}] Testing {} {} passed.".format(datetime.now(), original_framework, network_name), file=sys.stderr)
print("[{}] Testing {} models passed.".format(datetime.now(), original_framework), file=sys.stderr)