本文整理汇总了Python中bigml.api.BigML.create_batch_prediction方法的典型用法代码示例。如果您正苦于以下问题:Python BigML.create_batch_prediction方法的具体用法?Python BigML.create_batch_prediction怎么用?Python BigML.create_batch_prediction使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类bigml.api.BigML
的用法示例。
在下文中一共展示了BigML.create_batch_prediction方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: bigml
# 需要导入模块: from bigml.api import BigML [as 别名]
# 或者: from bigml.api.BigML import create_batch_prediction [as 别名]
def bigml( train_csv, test_csv, result_csv ):
api = BigML(dev_mode=True)
# train model
start_training = timer()
source_train = api.create_source(train_csv)
dataset_train = api.create_dataset(source_train)
model = api.create_model(dataset_train)
end_training = timer()
print('Training model.')
print('Training took %i Seconds.' % (end_training - start_training) );
# test create_model
start_test = timer()
source_test = api.create_source(test_csv)
dataset_test = api.create_dataset(source_test)
batch_prediction = api.create_batch_prediction(
model,
dataset_test,
{
"name": "census prediction",
"all_fields": True,
"header": False,
"confidence": False
}
)
# wait until batch processing is finished
while api.get_batch_prediction(batch_prediction)['object']['status']['progress'] != 1:
print api.get_batch_prediction(batch_prediction)['object']['status']['progress']
time.sleep(1)
end_test = timer()
print('Testing took %i Seconds' % (end_test - start_test) );
api.download_batch_prediction(batch_prediction['resource'], filename=result_csv)
# cleanup
api.delete_source(source_train)
api.delete_source(source_test)
api.delete_dataset(dataset_train)
api.delete_dataset(dataset_test)
api.delete_model(model)
示例2: BigML
# 需要导入模块: from bigml.api import BigML [as 别名]
# 或者: from bigml.api.BigML import create_batch_prediction [as 别名]
from bigml.api import BigML
api = BigML()
source1 = api.create_source("iris.csv")
api.ok(source1)
dataset1 = api.create_dataset(source1)
api.ok(dataset1)
model1 = api.create_model(dataset1)
api.ok(model1)
batchprediction1 = api.create_batch_prediction(model1, dataset1, {"name": u"my_batch_prediction_name"})
api.ok(batchprediction1)
示例3: BigML
# 需要导入模块: from bigml.api import BigML [as 别名]
# 或者: from bigml.api.BigML import create_batch_prediction [as 别名]
from bigml.api import BigML
api = BigML()
source1 = api.create_source("iris.csv")
api.ok(source1)
dataset1 = api.create_dataset(source1, \
{'name': u'iris'})
api.ok(dataset1)
model1 = api.create_model(dataset1, \
{'name': u'iris'})
api.ok(model1)
batchprediction1 = api.create_batch_prediction(model1, dataset1, \
{'name': u'my_batch_prediction_name'})
api.ok(batchprediction1)
示例4:
# 需要导入模块: from bigml.api import BigML [as 别名]
# 或者: from bigml.api.BigML import create_batch_prediction [as 别名]
source1_file = "iris.csv"
args = \
{u'fields': {u'000000': {u'name': u'sepal length', u'optype': u'numeric'},
u'000001': {u'name': u'sepal width', u'optype': u'numeric'},
u'000002': {u'name': u'petal length', u'optype': u'numeric'},
u'000003': {u'name': u'petal width', u'optype': u'numeric'},
u'000004': {u'name': u'species',
u'optype': u'categorical',
u'term_analysis': {u'enabled': True}}}}
source2 = api.create_source(source1_file, args)
api.ok(source2)
args = \
{u'objective_field': {u'id': u'000004'}}
dataset1 = api.create_dataset(source2, args)
api.ok(dataset1)
args = \
{u'split_candidates': 32}
model1 = api.create_model(dataset1, args)
api.ok(model1)
args = \
{u'fields_map': {u'000001': u'000001',
u'000002': u'000002',
u'000003': u'000003',
u'000004': u'000004'},
u'operating_kind': u'probability'}
batchprediction1 = api.create_batch_prediction(model1, dataset1, args)
api.ok(batchprediction1)
示例5: BigML
# 需要导入模块: from bigml.api import BigML [as 别名]
# 或者: from bigml.api.BigML import create_batch_prediction [as 别名]
from bigml.api import BigML
api = BigML()
source1 = api.create_source("iris.csv")
api.ok(source1)
dataset1 = api.create_dataset(source1, \
{'name': u'iris'})
api.ok(dataset1)
model1 = api.create_model(dataset1, \
{'name': u'iris'})
api.ok(model1)
batchprediction1 = api.create_batch_prediction(model1, dataset1, \
{'name': u'iris dataset with iris', 'output_dataset': True})
api.ok(batchprediction1)
dataset2 = api.get_dataset(batchprediction1['object']['output_dataset_resource'])
api.ok(dataset2)
dataset2 = api.update_dataset(dataset2, \
{'name': u'my_dataset_from_batch_prediction_name'})
api.ok(dataset2)
示例6: BigML
# 需要导入模块: from bigml.api import BigML [as 别名]
# 或者: from bigml.api.BigML import create_batch_prediction [as 别名]
from bigml.api import BigML
api = BigML()
source1 = api.create_source("iris.csv")
api.ok(source1)
dataset1 = api.create_dataset(source1)
api.ok(dataset1)
model1 = api.create_model(dataset1)
api.ok(model1)
batchprediction1 = api.create_batch_prediction(model1, dataset1, \
{'output_dataset': True})
api.ok(batchprediction1)
dataset2 = api.get_dataset(batchprediction1['object']['output_dataset_resource'])
api.ok(dataset2)
dataset2 = api.update_dataset(dataset2, \
{'fields': {u'000000': {'name': u'species'}},
'name': u'my_dataset_from_batch_prediction_name'})
api.ok(dataset2)