本文整理汇总了Python中azureml.core.model.Model.get_model_path方法的典型用法代码示例。如果您正苦于以下问题:Python Model.get_model_path方法的具体用法?Python Model.get_model_path怎么用?Python Model.get_model_path使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类azureml.core.model.Model
的用法示例。
在下文中一共展示了Model.get_model_path方法的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: init
# 需要导入模块: from azureml.core.model import Model [as 别名]
# 或者: from azureml.core.model.Model import get_model_path [as 别名]
def init():
global model
try:
model_path = Model.get_model_path('tacosandburritos')
except:
model_path = '../../data/model/latest.h5'
print('Attempting to load model')
model = tf.keras.models.load_model(model_path)
model.summary()
print('Done!')
print('Initialized model "{}" at {}'.format(model_path, datetime.datetime.now()))
示例2: init
# 需要导入模块: from azureml.core.model import Model [as 别名]
# 或者: from azureml.core.model.Model import get_model_path [as 别名]
def init():
# load the model from file into a global object
global model
# we assume that we have just one model
# AZUREML_MODEL_DIR is an environment variable created during deployment.
# It is the path to the model folder
# (./azureml-models/$MODEL_NAME/$VERSION)
model_path = Model.get_model_path(
os.getenv("AZUREML_MODEL_DIR").split('/')[-2])
model = joblib.load(model_path)
示例3: init
# 需要导入模块: from azureml.core.model import Model [as 别名]
# 或者: from azureml.core.model.Model import get_model_path [as 别名]
def init():
global model
global inputs_dc, prediction_dc
inputs_dc = ModelDataCollector("torchcnn", identifier="inputs")
prediction_dc = ModelDataCollector("torchcnn", identifier="predictions")
model = CNN()
# The line below loads the model from the AML Service
model_path = Model.get_model_path(model_name="torchcnn")
# It is also possible to load a local model file
# model_path = '/temp/torchcnn.pth'
model.load_state_dict(torch.load(model_path))
model.eval()
示例4: init
# 需要导入模块: from azureml.core.model import Model [as 别名]
# 或者: from azureml.core.model.Model import get_model_path [as 别名]
def init():
global model
print("GPU USAGE: ", tf.test.is_gpu_available())
model_path = Model.get_model_path(MODEL_FILE_NAME)
dotenv.load_dotenv()
print("model_path: ", model_path)
# deserialize the model file back into a sklearn model
model = keras.models.load_model(model_path)
print("Model Loaded")
示例5: init
# 需要导入模块: from azureml.core.model import Model [as 别名]
# 或者: from azureml.core.model.Model import get_model_path [as 别名]
def init():
global model
model_path = Model.get_model_path(MODEL_FILE_NAME)
#dotenv.load_dotenv()
#model_path = os.getenv('AZUREML_MODEL_DIR')
print("model_path: ", model_path)
# deserialize the model file back into a sklearn model
model = keras.models.load_model(model_path)
print("Model Loaded")
示例6: init
# 需要导入模块: from azureml.core.model import Model [as 别名]
# 或者: from azureml.core.model.Model import get_model_path [as 别名]
def init():
global model
model_path = Model.get_model_path(MODEL_FILE_NAME)
# deserialize the model file back into a sklearn model
model = joblib.load(model_path)
示例7: init
# 需要导入模块: from azureml.core.model import Model [as 别名]
# 或者: from azureml.core.model.Model import get_model_path [as 别名]
def init():
global model
# retrieve the path to the model file using the model name
model_path = Model.get_model_path(model_name='movielens_sar_model')
model = joblib.load(model_path)
# Passes data to the model and returns the prediction
示例8: load_prednet_model
# 需要导入模块: from azureml.core.model import Model [as 别名]
# 或者: from azureml.core.model.Model import get_model_path [as 别名]
def load_prednet_model(name):
nt = 10
prednet_path = Model.get_model_path(name)
print(prednet_path)
# load json and create model
with open(os.path.join(prednet_path, 'model.json'), 'r') as json_file:
model_json = json_file.read()
trained_model = model_from_json(
model_json,
custom_objects={"PredNet": PredNet})
# load weights into new model
trained_model.load_weights(os.path.join(prednet_path, "weights.hdf5"))
# Create testing model (to output predictions)
layer_config = trained_model.layers[1].get_config()
layer_config['output_mode'] = 'prediction'
test_prednet = PredNet(
weights=trained_model.layers[1].get_weights(),
**layer_config)
input_shape = list(trained_model.layers[0].batch_input_shape[1:])
input_shape[0] = nt
inputs = Input(shape=tuple(input_shape))
predictions = test_prednet(inputs)
prednet_model = Model_keras(inputs=inputs, outputs=predictions)
return prednet_model
示例9: init
# 需要导入模块: from azureml.core.model import Model [as 别名]
# 或者: from azureml.core.model.Model import get_model_path [as 别名]
def init():
global prednet_models, clf_models
with open('deployment_assets/models.json', 'r') as f:
models = json.load(f)
prednet_models = {}
for name in models['prednet_model_names']:
prednet_models[name] = load_prednet_model(name)
clf_models = {}
for name in models['clf_model_names']:
model_path = Model.get_model_path(name)
clf_models[name] = joblib.load(model_path)