本文整理汇总了Python中mlflow.pyfunc方法的典型用法代码示例。如果您正苦于以下问题:Python mlflow.pyfunc方法的具体用法?Python mlflow.pyfunc怎么用?Python mlflow.pyfunc使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类mlflow
的用法示例。
在下文中一共展示了mlflow.pyfunc方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: _save_custom_objects
# 需要导入模块: import mlflow [as 别名]
# 或者: from mlflow import pyfunc [as 别名]
def _save_custom_objects(path, custom_objects):
"""
Save custom objects dictionary to a cloudpickle file so a model can be easily loaded later.
:param path: An absolute path that points to the data directory within /path/to/model.
:param custom_objects: Keras ``custom_objects`` is a dictionary mapping
names (strings) to custom classes or functions to be considered
during deserialization. MLflow saves these custom layers using
CloudPickle and restores them automatically when the model is
loaded with :py:func:`mlflow.keras.load_model` and
:py:func:`mlflow.pyfunc.load_model`.
"""
import cloudpickle
custom_objects_path = os.path.join(path, _CUSTOM_OBJECTS_SAVE_PATH)
with open(custom_objects_path, "wb") as out_f:
cloudpickle.dump(custom_objects, out_f)
示例2: _load_pyfunc_conf_with_model
# 需要导入模块: import mlflow [as 别名]
# 或者: from mlflow import pyfunc [as 别名]
def _load_pyfunc_conf_with_model(model_path):
"""
Loads the `python_function` flavor configuration for the specified model or throws an exception
if the model does not contain the `python_function` flavor.
:param model_path: The absolute path to the model.
:return: The model's `python_function` flavor configuration and the model.
"""
model_path = os.path.abspath(model_path)
model = Model.load(os.path.join(model_path, "MLmodel"))
if pyfunc.FLAVOR_NAME not in model.flavors:
raise MlflowException(
message=("The specified model does not contain the `python_function` flavor. This "
" flavor is required for model deployment required for model deployment."),
error_code=INVALID_PARAMETER_VALUE)
return model.flavors[pyfunc.FLAVOR_NAME], model
示例3: _load_pyfunc
# 需要导入模块: import mlflow [as 别名]
# 或者: from mlflow import pyfunc [as 别名]
def _load_pyfunc(path):
"""
Load PyFunc implementation. Called by ``pyfunc.load_pyfunc``. This function loads an MLflow
model with the TensorFlow flavor into a new TensorFlow graph and exposes it behind the
``pyfunc.predict`` interface.
:param path: Local filesystem path to the MLflow Model with the ``tensorflow`` flavor.
"""
tf_saved_model_dir, tf_meta_graph_tags, tf_signature_def_key =\
_get_and_parse_flavor_configuration(model_path=path)
if LooseVersion(tensorflow.__version__) < LooseVersion('2.0.0'):
tf_graph = tensorflow.Graph()
tf_sess = tensorflow.Session(graph=tf_graph)
with tf_graph.as_default():
signature_def = _load_tensorflow_saved_model(
tf_saved_model_dir=tf_saved_model_dir, tf_sess=tf_sess,
tf_meta_graph_tags=tf_meta_graph_tags, tf_signature_def_key=tf_signature_def_key)
return _TFWrapper(tf_sess=tf_sess, tf_graph=tf_graph, signature_def=signature_def)
else:
loaded_model = tensorflow.saved_model.load( # pylint: disable=no-value-for-parameter
export_dir=tf_saved_model_dir,
tags=tf_meta_graph_tags)
return _TF2Wrapper(infer=loaded_model.signatures[tf_signature_def_key])
示例4: _load_pyfunc
# 需要导入模块: import mlflow [as 别名]
# 或者: from mlflow import pyfunc [as 别名]
def _load_pyfunc(path):
"""
Load PyFunc implementation. Called by ``pyfunc.load_pyfunc``.
:param path: Local filesystem path to the MLflow Model with the ``spark`` flavor.
"""
# NOTE: The getOrCreate() call below may change settings of the active session which we do not
# intend to do here. In particular, setting master to local[1] can break distributed clusters.
# To avoid this problem, we explicitly check for an active session. This is not ideal but there
# is no good workaround at the moment.
import pyspark
spark = pyspark.sql.SparkSession._instantiatedSession
if spark is None:
spark = pyspark.sql.SparkSession.builder.config("spark.python.worker.reuse", True) \
.master("local[1]").getOrCreate()
return _PyFuncModelWrapper(spark, _load_model(model_uri=path))
示例5: score_model_in_sagemaker_docker_container
# 需要导入模块: import mlflow [as 别名]
# 或者: from mlflow import pyfunc [as 别名]
def score_model_in_sagemaker_docker_container(
model_uri, data, content_type, flavor=mlflow.pyfunc.FLAVOR_NAME,
activity_polling_timeout_seconds=500):
"""
:param model_uri: URI to the model to be served.
:param data: The data to send to the docker container for testing. This is either a
Pandas dataframe or string of the format specified by `content_type`.
:param content_type: The type of the data to send to the docker container for testing. This is
one of `mlflow.pyfunc.scoring_server.CONTENT_TYPES`.
:param flavor: Model flavor to be deployed.
:param activity_polling_timeout_seconds: The amount of time, in seconds, to wait before
declaring the scoring process to have failed.
"""
env = dict(os.environ)
env.update(LC_ALL="en_US.UTF-8", LANG="en_US.UTF-8")
proc = _start_scoring_proc(
cmd=['mlflow', 'sagemaker', 'run-local', '-m', model_uri, '-p', "5000", "-f", flavor],
env=env)
return _evaluate_scoring_proc(proc, 5000, data, content_type, activity_polling_timeout_seconds)
示例6: test_model_save_load
# 需要导入模块: import mlflow [as 别名]
# 或者: from mlflow import pyfunc [as 别名]
def test_model_save_load(sklearn_knn_model, iris_data, tmpdir, model_path):
sk_model_path = os.path.join(str(tmpdir), "knn.pkl")
with open(sk_model_path, "wb") as f:
pickle.dump(sklearn_knn_model, f)
model_config = Model(run_id="test", artifact_path="testtest")
mlflow.pyfunc.save_model(path=model_path,
data_path=sk_model_path,
loader_module=os.path.basename(__file__)[:-3],
code_path=[__file__],
mlflow_model=model_config)
reloaded_model_config = Model.load(os.path.join(model_path, "MLmodel"))
assert model_config.__dict__ == reloaded_model_config.__dict__
assert mlflow.pyfunc.FLAVOR_NAME in reloaded_model_config.flavors
assert mlflow.pyfunc.PY_VERSION in reloaded_model_config.flavors[mlflow.pyfunc.FLAVOR_NAME]
reloaded_model = mlflow.pyfunc.load_pyfunc(model_path)
np.testing.assert_array_equal(
sklearn_knn_model.predict(iris_data[0]), reloaded_model.predict(iris_data[0]))
示例7: test_signature_and_examples_are_saved_correctly
# 需要导入模块: import mlflow [as 别名]
# 或者: from mlflow import pyfunc [as 别名]
def test_signature_and_examples_are_saved_correctly(sklearn_knn_model, iris_data):
data = iris_data
signature_ = infer_signature(*data)
example_ = data[0][:3, ]
for signature in (None, signature_):
for example in (None, example_):
with TempDir() as tmp:
with open(tmp.path("skmodel"), "wb") as f:
pickle.dump(sklearn_knn_model, f)
path = tmp.path("model")
mlflow.pyfunc.save_model(path=path,
data_path=tmp.path("skmodel"),
loader_module=os.path.basename(__file__)[:-3],
code_path=[__file__],
signature=signature,
input_example=example)
mlflow_model = Model.load(path)
assert signature == mlflow_model.signature
if example is None:
assert mlflow_model.saved_input_example_info is None
else:
assert all((_read_example(mlflow_model, path) == example).all())
示例8: test_model_log_load
# 需要导入模块: import mlflow [as 别名]
# 或者: from mlflow import pyfunc [as 别名]
def test_model_log_load(sklearn_knn_model, iris_data, tmpdir):
sk_model_path = os.path.join(str(tmpdir), "knn.pkl")
with open(sk_model_path, "wb") as f:
pickle.dump(sklearn_knn_model, f)
pyfunc_artifact_path = "pyfunc_model"
with mlflow.start_run():
mlflow.pyfunc.log_model(artifact_path=pyfunc_artifact_path,
data_path=sk_model_path,
loader_module=os.path.basename(__file__)[:-3],
code_path=[__file__])
pyfunc_model_path = _download_artifact_from_uri("runs:/{run_id}/{artifact_path}".format(
run_id=mlflow.active_run().info.run_id, artifact_path=pyfunc_artifact_path))
model_config = Model.load(os.path.join(pyfunc_model_path, "MLmodel"))
assert mlflow.pyfunc.FLAVOR_NAME in model_config.flavors
assert mlflow.pyfunc.PY_VERSION in model_config.flavors[mlflow.pyfunc.FLAVOR_NAME]
reloaded_model = mlflow.pyfunc.load_pyfunc(pyfunc_model_path)
assert model_config.to_yaml() == reloaded_model.metadata.to_yaml()
np.testing.assert_array_equal(
sklearn_knn_model.predict(iris_data[0]), reloaded_model.predict(iris_data[0]))
示例9: test_model_log_load_no_active_run
# 需要导入模块: import mlflow [as 别名]
# 或者: from mlflow import pyfunc [as 别名]
def test_model_log_load_no_active_run(sklearn_knn_model, iris_data, tmpdir):
sk_model_path = os.path.join(str(tmpdir), "knn.pkl")
with open(sk_model_path, "wb") as f:
pickle.dump(sklearn_knn_model, f)
pyfunc_artifact_path = "pyfunc_model"
assert mlflow.active_run() is None
mlflow.pyfunc.log_model(artifact_path=pyfunc_artifact_path,
data_path=sk_model_path,
loader_module=os.path.basename(__file__)[:-3],
code_path=[__file__])
pyfunc_model_path = _download_artifact_from_uri("runs:/{run_id}/{artifact_path}".format(
run_id=mlflow.active_run().info.run_id, artifact_path=pyfunc_artifact_path))
model_config = Model.load(os.path.join(pyfunc_model_path, "MLmodel"))
assert mlflow.pyfunc.FLAVOR_NAME in model_config.flavors
assert mlflow.pyfunc.PY_VERSION in model_config.flavors[mlflow.pyfunc.FLAVOR_NAME]
reloaded_model = mlflow.pyfunc.load_pyfunc(pyfunc_model_path)
np.testing.assert_array_equal(
sklearn_knn_model.predict(iris_data[0]), reloaded_model.predict(iris_data[0]))
mlflow.end_run()
示例10: test_log_model_without_specified_conda_env_uses_default_env_with_expected_dependencies
# 需要导入模块: import mlflow [as 别名]
# 或者: from mlflow import pyfunc [as 别名]
def test_log_model_without_specified_conda_env_uses_default_env_with_expected_dependencies(
sklearn_knn_model, tmpdir):
sk_model_path = os.path.join(str(tmpdir), "knn.pkl")
with open(sk_model_path, "wb") as f:
pickle.dump(sklearn_knn_model, f)
pyfunc_artifact_path = "pyfunc_model"
with mlflow.start_run():
mlflow.pyfunc.log_model(artifact_path=pyfunc_artifact_path,
data_path=sk_model_path,
loader_module=os.path.basename(__file__)[:-3],
code_path=[__file__])
run_id = mlflow.active_run().info.run_id
pyfunc_model_path = _download_artifact_from_uri("runs:/{run_id}/{artifact_path}".format(
run_id=run_id, artifact_path=pyfunc_artifact_path))
pyfunc_conf = _get_flavor_configuration(
model_path=pyfunc_model_path, flavor_name=mlflow.pyfunc.FLAVOR_NAME)
conda_env_path = os.path.join(pyfunc_model_path, pyfunc_conf[mlflow.pyfunc.ENV])
with open(conda_env_path, "r") as f:
conda_env = yaml.safe_load(f)
assert conda_env == mlflow.pyfunc.model.get_default_conda_env()
示例11: get_model_class
# 需要导入模块: import mlflow [as 别名]
# 或者: from mlflow import pyfunc [as 别名]
def get_model_class():
"""
Defines a custom Python model class that wraps a scikit-learn estimator.
This can be invoked within a pytest fixture to define the class in the ``__main__`` scope.
Alternatively, it can be invoked within a module to define the class in the module's scope.
"""
class CustomSklearnModel(mlflow.pyfunc.PythonModel):
def __init__(self, predict_fn):
self.predict_fn = predict_fn
def load_context(self, context):
super(CustomSklearnModel, self).load_context(context)
# pylint: disable=attribute-defined-outside-init
self.model = mlflow.sklearn.load_model(model_uri=context.artifacts["sk_model"])
def predict(self, context, model_input):
return self.predict_fn(self.model, model_input)
return CustomSklearnModel
示例12: test_model_save_load
# 需要导入模块: import mlflow [as 别名]
# 或者: from mlflow import pyfunc [as 别名]
def test_model_save_load(sklearn_knn_model, main_scoped_model_class, iris_data, tmpdir):
sklearn_model_path = os.path.join(str(tmpdir), "sklearn_model")
mlflow.sklearn.save_model(sk_model=sklearn_knn_model, path=sklearn_model_path)
def test_predict(sk_model, model_input):
return sk_model.predict(model_input) * 2
pyfunc_model_path = os.path.join(str(tmpdir), "pyfunc_model")
mlflow.pyfunc.save_model(path=pyfunc_model_path,
artifacts={
"sk_model": sklearn_model_path
},
conda_env=_conda_env(),
python_model=main_scoped_model_class(test_predict))
loaded_pyfunc_model = mlflow.pyfunc.load_pyfunc(model_uri=pyfunc_model_path)
np.testing.assert_array_equal(
loaded_pyfunc_model.predict(iris_data[0]),
test_predict(sk_model=sklearn_knn_model, model_input=iris_data[0]))
示例13: test_signature_and_examples_are_saved_correctly
# 需要导入模块: import mlflow [as 别名]
# 或者: from mlflow import pyfunc [as 别名]
def test_signature_and_examples_are_saved_correctly(iris_data, main_scoped_model_class):
def test_predict(sk_model, model_input):
return sk_model.predict(model_input) * 2
data = iris_data
signature_ = infer_signature(*data)
example_ = data[0][:3, ]
for signature in (None, signature_):
for example in (None, example_):
with TempDir() as tmp:
path = tmp.path("model")
mlflow.pyfunc.save_model(path=path,
artifacts={},
python_model=main_scoped_model_class(test_predict),
signature=signature,
input_example=example)
mlflow_model = Model.load(path)
assert signature == mlflow_model.signature
if example is None:
assert mlflow_model.saved_input_example_info is None
else:
assert all((_read_example(mlflow_model, path) == example).all())
示例14: test_log_model_calls_register_model
# 需要导入模块: import mlflow [as 别名]
# 或者: from mlflow import pyfunc [as 别名]
def test_log_model_calls_register_model(sklearn_knn_model, main_scoped_model_class):
register_model_patch = mock.patch("mlflow.register_model")
with register_model_patch:
sklearn_artifact_path = "sk_model_no_run"
with mlflow.start_run():
mlflow.sklearn.log_model(sk_model=sklearn_knn_model,
artifact_path=sklearn_artifact_path)
sklearn_model_uri = "runs:/{run_id}/{artifact_path}".format(
run_id=mlflow.active_run().info.run_id,
artifact_path=sklearn_artifact_path)
def test_predict(sk_model, model_input):
return sk_model.predict(model_input) * 2
pyfunc_artifact_path = "pyfunc_model"
assert mlflow.active_run() is None
mlflow.pyfunc.log_model(artifact_path=pyfunc_artifact_path,
artifacts={"sk_model": sklearn_model_uri},
python_model=main_scoped_model_class(test_predict),
registered_model_name="AdsModel1")
model_uri = "runs:/{run_id}/{artifact_path}".format(run_id=mlflow.active_run().info.run_id,
artifact_path=pyfunc_artifact_path)
mlflow.register_model.assert_called_once_with(model_uri, "AdsModel1")
mlflow.end_run()
示例15: test_log_model_no_registered_model_name
# 需要导入模块: import mlflow [as 别名]
# 或者: from mlflow import pyfunc [as 别名]
def test_log_model_no_registered_model_name(sklearn_knn_model, main_scoped_model_class):
register_model_patch = mock.patch("mlflow.register_model")
with register_model_patch:
sklearn_artifact_path = "sk_model_no_run"
with mlflow.start_run():
mlflow.sklearn.log_model(sk_model=sklearn_knn_model,
artifact_path=sklearn_artifact_path)
sklearn_model_uri = "runs:/{run_id}/{artifact_path}".format(
run_id=mlflow.active_run().info.run_id,
artifact_path=sklearn_artifact_path)
def test_predict(sk_model, model_input):
return sk_model.predict(model_input) * 2
pyfunc_artifact_path = "pyfunc_model"
assert mlflow.active_run() is None
mlflow.pyfunc.log_model(artifact_path=pyfunc_artifact_path,
artifacts={"sk_model": sklearn_model_uri},
python_model=main_scoped_model_class(test_predict))
mlflow.register_model.assert_not_called()
mlflow.end_run()