本文整理汇总了Python中Classifier.Classifier.predict_predict_proba方法的典型用法代码示例。如果您正苦于以下问题:Python Classifier.predict_predict_proba方法的具体用法?Python Classifier.predict_predict_proba怎么用?Python Classifier.predict_predict_proba使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Classifier.Classifier
的用法示例。
在下文中一共展示了Classifier.predict_predict_proba方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: run
# 需要导入模块: from Classifier import Classifier [as 别名]
# 或者: from Classifier.Classifier import predict_predict_proba [as 别名]
#.........这里部分代码省略.........
subwindowTargetWidth=subwindowTargetWidth,
subwindowTargetHeight=subwindowTargetHeight,
subwindowInterpolation=subwindowInterpolation,
includeOriginalImage=includeOriginalImage,
nbJobs=nbJobs,
verbosity=verbosity,
tempFolder=tempFolder,
random=random,
)
randConvCoord = LoadCoordinator(randConvCoord, learnFile, testFile)
# --Extra-tree--
baseClassif = ExtraTreesClassifier(
nbTrees,
max_features=maxFeatures,
max_depth=maxDepth,
min_samples_split=minSamplesSplit,
min_samples_leaf=minSamplesLeaf,
bootstrap=bootstrap,
n_jobs=nbJobsEstimator,
random_state=randomState,
verbose=verbose,
)
# --Classifier
classifier = Classifier(randConvCoord, baseClassif)
# --Data--
loader = CifarFromNumpies(learningSetDir, learningIndexFile)
learningSet = FileImageBuffer(loader.getFiles(), NumpyImageLoader())
learningSet = learningSet[0:lsSize]
loader = CifarFromNumpies(testingSetDir, testingIndexFile)
testingSet = FileImageBuffer(loader.getFiles(), NumpyImageLoader())
testingSet = testingSet[0:tsSize]
# =====COMPUTATION=====#
# --Learning--#
print "Starting learning"
fitStart = time()
classifier.fit(learningSet)
fitEnd = time()
print "Learning done", formatDuration(fitEnd - fitStart)
sys.stdout.flush()
# --Testing--#
y_truth = testingSet.getLabels()
predStart = time()
y_prob, y_pred = classifier.predict_predict_proba(testingSet)
predEnd = time()
accuracy = classifier.accuracy(y_pred, y_truth)
confMat = classifier.confusionMatrix(y_pred, y_truth)
# ====ANALYSIS=====#
importance, order = randConvCoord.importancePerFeatureGrp(baseClassif)
print "==================RandConv================"
print "-----------Filtering--------------"
print "nb_filters", nb_filters
print "filterPolicy", filterPolicy
print "----------Pooling--------------"
print "poolings", poolings
print "--------SW extractor----------"
print "#Subwindows", nbSubwindows
print "subwindowMinSizeRatio", subwindowMinSizeRatio
print "subwindowMaxSizeRatio", subwindowMaxSizeRatio
print "subwindowTargetWidth", subwindowTargetWidth
print "subwindowTargetHeight", subwindowTargetHeight
print "fixedSize", fixedSize
print "------------Misc-----------------"
print "includeOriginalImage", includeOriginalImage
print "random", random
print "tempFolder", tempFolder
print "verbosity", verbosity
print "nbJobs", nbJobs
print "--------ExtraTrees----------"
print "nbTrees", nbTrees
print "maxFeatures", maxFeatures
print "maxDepth", maxDepth
print "minSamplesSplit", minSamplesSplit
print "minSamplesLeaf", minSamplesLeaf
print "bootstrap", bootstrap
print "nbJobsEstimator", nbJobsEstimator
print "verbose", verbose
print "randomState", randomState
print "------------Data---------------"
print "LearningSet size", len(learningSet)
print "TestingSet size", len(testingSet)
print "-------------------------------"
if shouldSave:
print "saveFile", saveFile
print "Fit time", formatDuration(fitEnd - fitStart)
print "Classifcation time", formatDuration(predEnd - predStart)
print "Accuracy", accuracy
if shouldSave:
np.save(saveFile, y_prob)
return accuracy, confMat, importance, order
示例2: run
# 需要导入模块: from Classifier import Classifier [as 别名]
# 或者: from Classifier.Classifier import predict_predict_proba [as 别名]
#.........这里部分代码省略.........
min_samples_leaf=minSamplesLeaf,
bootstrap=bootstrap,
n_jobs=nbJobsEstimator,
random_state=randomState,
verbose=verbose)
optiClassif = Classifier(randConvCoord, baseClassif)
print "Starting optimization"
optiStart = time()
optiClassif.fit(learningSet)
optiEnd = time()
print "optimization done", formatDuration(optiEnd-optiStart)
_, order = randConvOptimizer.importancePerFeatureGrp(totallyTrees)
filtersTmp = randConvCoord._convolExtractor._finiteFilter._filters
filters = [x for x, _, _ in filtersTmp]
if not includeOriginalImage:
bestIndices = order[:nb_filters]
else:
count = 0
bestIndices = []
for index in order:
if count == nb_filters-1:
break
if index != 0:
bestIndices.append(index-1)
count += 1
bestFlters = []
for i in bestIndices:
bestFlters.append(filters[i])
best3Filters = Finite3SameFilter(bestFlters)
randConvCoord._convolExtractor._finiteFilter = best3Filters
#--Learning--#
print "Starting learning"
fitStart = time()
classifier.fit(learningSet)
fitEnd = time()
print "Learning done", formatDuration(fitEnd-fitStart)
sys.stdout.flush()
#--Testing--#
y_truth = testingSet.getLabels()
predStart = time()
y_prob, y_pred = classifier.predict_predict_proba(testingSet)
predEnd = time()
accuracy = classifier.accuracy(y_pred, y_truth)
confMat = classifier.confusionMatrix(y_pred, y_truth)
#====ANALYSIS=====#
importance, order = randConvCoord.importancePerFeatureGrp(baseClassif)
print "==================RandConv================"
print "-----------Filtering--------------"
print "nb_filters", nb_filters
print "filterPolicy", filterPolicy
print "----------Pooling--------------"
print "poolings", poolings
print "--------SW extractor----------"
print "#Subwindows", nbSubwindows
print "subwindowMinSizeRatio", subwindowMinSizeRatio
print "subwindowMaxSizeRatio", subwindowMaxSizeRatio
print "subwindowTargetWidth", subwindowTargetWidth
print "subwindowTargetHeight", subwindowTargetHeight
print "fixedSize", fixedSize
print "------------Misc-----------------"
print "includeOriginalImage", includeOriginalImage
print "random", random
print "tempFolder", tempFolder
print "verbosity", verbosity
print "nbJobs", nbJobs
print "--------ExtraTrees----------"
print "nbTrees", nbTrees
print "maxFeatures", maxFeatures
print "maxDepth", maxDepth
print "minSamplesSplit", minSamplesSplit
print "minSamplesLeaf", minSamplesLeaf
print "bootstrap", bootstrap
print "nbJobsEstimator", nbJobsEstimator
print "verbose", verbose
print "randomState", randomState
print "------------Data---------------"
print "LearningSet size", len(learningSet)
print "TestingSet size", len(testingSet)
print "-------------------------------"
if shouldSave:
print "saveFile", saveFile
print "Fit time", formatDuration(fitEnd-fitStart)
print "Classifcation time", formatDuration(predEnd-predStart)
print "Accuracy", accuracy
if shouldSave:
np.save(saveFile, y_prob)
filtersTmp = randConvCoord._convolExtractor._finiteFilter._filters
filters = [x for x, _, _ in filtersTmp]
return accuracy, confMat, importance, order, filters