本文整理汇总了Python中Classifier.Classifier.confusionMatrix方法的典型用法代码示例。如果您正苦于以下问题:Python Classifier.confusionMatrix方法的具体用法?Python Classifier.confusionMatrix怎么用?Python Classifier.confusionMatrix使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Classifier.Classifier
的用法示例。
在下文中一共展示了Classifier.confusionMatrix方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: run
# 需要导入模块: from Classifier import Classifier [as 别名]
# 或者: from Classifier.Classifier import confusionMatrix [as 别名]
def run(lsFile, tsFile, **kwargs):
randomState = None
if random:
randomState = 100
#======INSTANTIATING========#
os.environ["JOBLIB_TEMP_FOLDER"] = "/home/jmbegon/jmbegon/code/work/tmp/"
#--Pixit--
randConvCoord = coordinatorRandConvFactory(
nbFilters=nb_filters,
filterMinVal=filter_min_val,
filterMaxVal=filter_max_val,
filterMinSize=filterMinSize,
filterMaxSize=filterMaxSize,
nbSubwindows=nbSubwindows,
subwindowMinSizeRatio=subwindowMinSizeRatio,
subwindowMaxSizeRatio=subwindowMaxSizeRatio,
subwindowTargetWidth=subwindowTargetWidth,
subwindowTargetHeight=subwindowTargetHeight,
poolings=poolings,
filterNormalisation=filterNormalisation,
subwindowInterpolation=subwindowInterpolation,
includeOriginalImage=includeOriginalImage,
nbJobs=nbJobs,
verbosity=verbosity,
tempFolder=tempFolder,
random=random)
#--Extra-tree--
baseClassif = ExtraTreesClassifier(nbTrees,
max_features=maxFeatures,
max_depth=maxDepth,
min_samples_split=minSamplesSplit,
min_samples_leaf=minSamplesLeaf,
bootstrap=bootstrap,
n_jobs=nbJobsEstimator,
random_state=randomState,
verbose=verbose)
#--Classifier
classifier = Classifier(randConvCoord, baseClassif)
#--Data--
with open(lsFile, "wb") as f:
lsSize, Xls, yls = pickle.load(f, protocol=2)
loader = CifarFromNumpies(learningSetDir, learningIndexFile)
learningSet = FileImageBuffer(loader.getFiles(), NumpyImageLoader())
learningSet = learningSet[0:lsSize]
with open(tsFile, "wb") as f:
tsSize, Xts, yts = pickle.load(f, protocol=2)
loader = CifarFromNumpies(testingSetDir, testingIndexFile)
testingSet = FileImageBuffer(loader.getFiles(), NumpyImageLoader())
testingSet = testingSet[0:tsSize]
#=====COMPUTATION=====#
#--Learning--#
print "Starting learning"
fitStart = time()
baseClassif.fit(Xls, yls)
fitEnd = time()
print "Learning done", (fitEnd-fitStart), "seconds (no extraction)"
sys.stdout.flush()
#--Testing--#
y_truth = testingSet.getLabels()
predStart = time()
y_pred = classifier._predict(Xts, lsSize)
predEnd = time()
#====ANALYSIS=====#
accuracy = classifier.accuracy(y_pred, y_truth)
confMat = classifier.confusionMatrix(y_pred, y_truth)
importance, order = randConvCoord.importancePerFeatureGrp(baseClassif)
print "========================================="
print "--------ExtraTrees----------"
print "nbTrees", nbTrees
print "maxFeatures", maxFeatures
print "maxDepth", maxDepth
print "minSamplesSplit", minSamplesSplit
print "minSamplesLeaf", minSamplesLeaf
print "bootstrap", bootstrap
print "nbJobsEstimator", nbJobsEstimator
print "verbose", verbose
print "randomState", randomState
print "------------Data---------------"
print "LearningSet size", len(learningSet)
print "TestingSet size", len(testingSet)
print "-------------------------------"
print "Fit time (no extraction)", (fitEnd-fitStart), "seconds"
print "Classifcation time (no extraction)", (predEnd-predStart), "seconds"
print "Accuracy", accuracy
return accuracy, confMat, importance, order
示例2: run
# 需要导入模块: from Classifier import Classifier [as 别名]
# 或者: from Classifier.Classifier import confusionMatrix [as 别名]
#.........这里部分代码省略.........
subwindowTargetWidth=subwindowTargetWidth,
subwindowTargetHeight=subwindowTargetHeight,
subwindowInterpolation=subwindowInterpolation,
includeOriginalImage=includeOriginalImage,
nbJobs=nbJobs,
verbosity=verbosity,
tempFolder=tempFolder,
random=random,
)
randConvCoord = LoadCoordinator(randConvCoord, learnFile, testFile)
# --Extra-tree--
baseClassif = ExtraTreesClassifier(
nbTrees,
max_features=maxFeatures,
max_depth=maxDepth,
min_samples_split=minSamplesSplit,
min_samples_leaf=minSamplesLeaf,
bootstrap=bootstrap,
n_jobs=nbJobsEstimator,
random_state=randomState,
verbose=verbose,
)
# --Classifier
classifier = Classifier(randConvCoord, baseClassif)
# --Data--
loader = CifarFromNumpies(learningSetDir, learningIndexFile)
learningSet = FileImageBuffer(loader.getFiles(), NumpyImageLoader())
learningSet = learningSet[0:lsSize]
loader = CifarFromNumpies(testingSetDir, testingIndexFile)
testingSet = FileImageBuffer(loader.getFiles(), NumpyImageLoader())
testingSet = testingSet[0:tsSize]
# =====COMPUTATION=====#
# --Learning--#
print "Starting learning"
fitStart = time()
classifier.fit(learningSet)
fitEnd = time()
print "Learning done", formatDuration(fitEnd - fitStart)
sys.stdout.flush()
# --Testing--#
y_truth = testingSet.getLabels()
predStart = time()
y_prob, y_pred = classifier.predict_predict_proba(testingSet)
predEnd = time()
accuracy = classifier.accuracy(y_pred, y_truth)
confMat = classifier.confusionMatrix(y_pred, y_truth)
# ====ANALYSIS=====#
importance, order = randConvCoord.importancePerFeatureGrp(baseClassif)
print "==================RandConv================"
print "-----------Filtering--------------"
print "nb_filters", nb_filters
print "filterPolicy", filterPolicy
print "----------Pooling--------------"
print "poolings", poolings
print "--------SW extractor----------"
print "#Subwindows", nbSubwindows
print "subwindowMinSizeRatio", subwindowMinSizeRatio
print "subwindowMaxSizeRatio", subwindowMaxSizeRatio
print "subwindowTargetWidth", subwindowTargetWidth
print "subwindowTargetHeight", subwindowTargetHeight
print "fixedSize", fixedSize
print "------------Misc-----------------"
print "includeOriginalImage", includeOriginalImage
print "random", random
print "tempFolder", tempFolder
print "verbosity", verbosity
print "nbJobs", nbJobs
print "--------ExtraTrees----------"
print "nbTrees", nbTrees
print "maxFeatures", maxFeatures
print "maxDepth", maxDepth
print "minSamplesSplit", minSamplesSplit
print "minSamplesLeaf", minSamplesLeaf
print "bootstrap", bootstrap
print "nbJobsEstimator", nbJobsEstimator
print "verbose", verbose
print "randomState", randomState
print "------------Data---------------"
print "LearningSet size", len(learningSet)
print "TestingSet size", len(testingSet)
print "-------------------------------"
if shouldSave:
print "saveFile", saveFile
print "Fit time", formatDuration(fitEnd - fitStart)
print "Classifcation time", formatDuration(predEnd - predStart)
print "Accuracy", accuracy
if shouldSave:
np.save(saveFile, y_prob)
return accuracy, confMat, importance, order
示例3: FileImageBuffer
# 需要导入模块: from Classifier import Classifier [as 别名]
# 或者: from Classifier.Classifier import confusionMatrix [as 别名]
testingSet = FileImageBuffer(loader.getFiles(), NumpyImageLoader())
testingSet = testingSet[0:testingUse]
# =====COMPUTATION=====#
# --Learning--#
fitStart = time()
classifier.fit(learningSet)
fitEnd = time()
# --Testing--#
y_truth = testingSet.getLabels()
predStart = time()
y_pred = classifier.predict(testingSet)
predEnd = time()
accuracy = classifier.accuracy(y_pred, y_truth)
confMat = classifier.confusionMatrix(y_pred, y_truth)
print "========================================="
print "--------SW extractor----------"
print "#Subwindows", nbSubwindows
print "subwindowMinSizeRatio", subwindowMinSizeRatio
print "subwindowMaxSizeRatio", subwindowMaxSizeRatio
print "subwindowTargetWidth", subwindowTargetWidth
print "subwindowTargetHeight", subwindowTargetHeight
print "fixedSize", fixedSize
print "nbJobs", nbJobs
print "--------ExtraTrees----------"
print "nbTrees", nbTrees
print "maxFeatures", maxFeatures
print "maxDepth", maxDepth
print "minSamplesSplit", minSamplesSplit
示例4: run
# 需要导入模块: from Classifier import Classifier [as 别名]
# 或者: from Classifier.Classifier import confusionMatrix [as 别名]
def run(poolings=poolings,
nbSubwindows=nbSubwindows,
subwindowMinSizeRatio=subwindowMinSizeRatio,
subwindowMaxSizeRatio=subwindowMaxSizeRatio,
subwindowTargetWidth=subwindowTargetWidth,
subwindowTargetHeight=subwindowTargetHeight,
fixedSize=fixedSize,
subwindowInterpolation=subwindowInterpolation,
includeOriginalImage=includeOriginalImage,
random=random,
nbJobs=nbJobs,
verbosity=verbosity,
tempFolder=tempFolder,
nbTrees=nbTrees,
maxFeatures=maxFeatures,
maxDepth=maxDepth,
minSamplesSplit=minSamplesSplit,
minSamplesLeaf=minSamplesLeaf,
bootstrap=bootstrap,
nbJobsEstimator=nbJobsEstimator,
verbose=verbose,
learningUse=learningUse,
testingUse=testingUse):
randomState = None
if random:
randomState = 100
lsSize = learningUse
if learningUse > maxLearningSize:
lsSize = maxLearningSize
tsSize = testingUse
if testingUse > maxTestingSize:
tsSize = maxTestingSize
#======INSTANTIATING========#
os.environ["JOBLIB_TEMP_FOLDER"] = "/home/jmbegon/jmbegon/code/work/tmp/"
#--customRandConv--
randConvCoord = customRandConvFactory(
nbSubwindows=nbSubwindows,
subwindowMinSizeRatio=subwindowMinSizeRatio,
subwindowMaxSizeRatio=subwindowMaxSizeRatio,
subwindowTargetWidth=subwindowTargetWidth,
subwindowTargetHeight=subwindowTargetHeight,
poolings=poolings,
subwindowInterpolation=subwindowInterpolation,
includeOriginalImage=includeOriginalImage,
nbJobs=nbJobs,
verbosity=verbosity,
tempFolder=tempFolder,
random=random)
#--Extra-tree--
baseClassif = ExtraTreesClassifier(nbTrees,
max_features=maxFeatures,
max_depth=maxDepth,
min_samples_split=minSamplesSplit,
min_samples_leaf=minSamplesLeaf,
bootstrap=bootstrap,
n_jobs=nbJobsEstimator,
random_state=randomState,
verbose=verbose)
#--Classifier
classifier = Classifier(randConvCoord, baseClassif)
#--Data--
loader = CifarFromNumpies(learningSetDir, learningIndexFile)
learningSet = FileImageBuffer(loader.getFiles(), NumpyImageLoader())
learningSet = learningSet[0:lsSize]
loader = CifarFromNumpies(testingSetDir, testingIndexFile)
testingSet = FileImageBuffer(loader.getFiles(), NumpyImageLoader())
testingSet = testingSet[0:tsSize]
#=====COMPUTATION=====#
#--Learning--#
print "Starting learning"
fitStart = time()
classifier.fit(learningSet)
fitEnd = time()
print "Learning done", (fitEnd-fitStart), "seconds"
sys.stdout.flush()
#--Testing--#
y_truth = testingSet.getLabels()
predStart = time()
y_pred = classifier.predict(testingSet)
predEnd = time()
accuracy = classifier.accuracy(y_pred, y_truth)
confMat = classifier.confusionMatrix(y_pred, y_truth)
#====ANALYSIS=====#
importance, order = randConvCoord.importancePerFeatureGrp(baseClassif)
print "==================CUSTOM======================="
print "----------Pooling--------------"
print "poolings", poolings
print "--------SW extractor----------"
#.........这里部分代码省略.........
示例5: run
# 需要导入模块: from Classifier import Classifier [as 别名]
# 或者: from Classifier.Classifier import confusionMatrix [as 别名]
#.........这里部分代码省略.........
subwindowMinSizeRatio=subwindowMinSizeRatio,
subwindowMaxSizeRatio=subwindowMaxSizeRatio,
subwindowTargetWidth=subwindowTargetWidth,
subwindowTargetHeight=subwindowTargetHeight,
poolings=poolings,
filterNormalisation=filterNormalisation,
subwindowInterpolation=subwindowInterpolation,
includeOriginalImage=includeOriginalImage,
compressorType=compressorType,
nbCompressedFeatures=nbCompressedFeatures,
compressOriginalImage=compressOriginalImage,
nbJobs=nbJobs,
verbosity=verbosity,
tempFolder=tempFolder)
#--Extra-tree--
baseClassif = ExtraTreesClassifier(nbTrees,
max_features=maxFeatures,
max_depth=maxDepth,
min_samples_split=minSamplesSplit,
min_samples_leaf=minSamplesLeaf,
bootstrap=bootstrap,
n_jobs=nbJobsEstimator,
random_state=randomState,
verbose=verbose)
#--Classifier
classifier = Classifier(randConvCoord, baseClassif)
#--Data--
loader = CifarFromNumpies(learningSetDir, learningIndexFile)
learningSet = FileImageBuffer(loader.getFiles(), NumpyImageLoader())
learningSet = learningSet[0:learningUse]
loader = CifarFromNumpies(testingSetDir, testingIndexFile)
testingSet = FileImageBuffer(loader.getFiles(), NumpyImageLoader())
testingSet = testingSet[0:testingUse]
#=====COMPUTATION=====#
#--Learning--#
print "Starting learning"
fitStart = time()
classifier.fit(learningSet)
fitEnd = time()
print "Learning done", (fitEnd-fitStart), "seconds"
sys.stdout.flush()
#--Testing--#
y_truth = testingSet.getLabels()
predStart = time()
y_pred = classifier.predict(testingSet)
predEnd = time()
accuracy = classifier.accuracy(y_pred, y_truth)
confMat = classifier.confusionMatrix(y_pred, y_truth)
#====ANALYSIS=====#
importance, order = randConvCoord.importancePerFeatureGrp(baseClassif)
print "========================================="
print "-----------Filtering--------------"
print "nb_filters", nb_filters
print "filter_min_val", filter_min_val
print "filter_max_val", filter_max_val
print "filterMinSize", filterMinSize
print "filterMaxSize", filterMaxSize
print "filterNormalisation", filterNormalisation
print "----------Pooling--------------"
print "poolings", poolings
print "--------SW extractor----------"
print "#Subwindows", nbSubwindows
print "subwindowMinSizeRatio", subwindowMinSizeRatio
print "subwindowMaxSizeRatio", subwindowMaxSizeRatio
print "subwindowTargetWidth", subwindowTargetWidth
print "subwindowTargetHeight", subwindowTargetHeight
print "fixedSize", fixedSize
print "------------Misc-----------------"
print "includeOriginalImage", includeOriginalImage
print "compressorType", compressorType
print "nbCompressedFeatures", nbCompressedFeatures
print "compressOriginalImage", compressOriginalImage
print "nbJobs", nbJobs
print "--------ExtraTrees----------"
print "nbTrees", nbTrees
print "maxFeatures", maxFeatures
print "maxDepth", maxDepth
print "minSamplesSplit", minSamplesSplit
print "minSamplesLeaf", minSamplesLeaf
print "bootstrap", bootstrap
print "nbJobsEstimator", nbJobsEstimator
print "randomState", randomState
print "------------Data---------------"
print "LearningSet size", len(learningSet)
print "TestingSet size", len(testingSet)
print "-------------------------------"
print "Fit time", (fitEnd-fitStart), "seconds"
print "Classifcation time", (predEnd-predStart), "seconds"
print "Accuracy", accuracy
print "Confusion matrix :\n", confMat
return accuracy, confMat, importance, order
示例6: run
# 需要导入模块: from Classifier import Classifier [as 别名]
# 或者: from Classifier.Classifier import confusionMatrix [as 别名]
#.........这里部分代码省略.........
min_samples_leaf=minSamplesLeaf,
bootstrap=bootstrap,
n_jobs=nbJobsEstimator,
random_state=randomState,
verbose=verbose)
optiClassif = Classifier(randConvCoord, baseClassif)
print "Starting optimization"
optiStart = time()
optiClassif.fit(learningSet)
optiEnd = time()
print "optimization done", formatDuration(optiEnd-optiStart)
_, order = randConvOptimizer.importancePerFeatureGrp(totallyTrees)
filtersTmp = randConvCoord._convolExtractor._finiteFilter._filters
filters = [x for x, _, _ in filtersTmp]
if not includeOriginalImage:
bestIndices = order[:nb_filters]
else:
count = 0
bestIndices = []
for index in order:
if count == nb_filters-1:
break
if index != 0:
bestIndices.append(index-1)
count += 1
bestFlters = []
for i in bestIndices:
bestFlters.append(filters[i])
best3Filters = Finite3SameFilter(bestFlters)
randConvCoord._convolExtractor._finiteFilter = best3Filters
#--Learning--#
print "Starting learning"
fitStart = time()
classifier.fit(learningSet)
fitEnd = time()
print "Learning done", formatDuration(fitEnd-fitStart)
sys.stdout.flush()
#--Testing--#
y_truth = testingSet.getLabels()
predStart = time()
y_prob, y_pred = classifier.predict_predict_proba(testingSet)
predEnd = time()
accuracy = classifier.accuracy(y_pred, y_truth)
confMat = classifier.confusionMatrix(y_pred, y_truth)
#====ANALYSIS=====#
importance, order = randConvCoord.importancePerFeatureGrp(baseClassif)
print "==================RandConv================"
print "-----------Filtering--------------"
print "nb_filters", nb_filters
print "filterPolicy", filterPolicy
print "----------Pooling--------------"
print "poolings", poolings
print "--------SW extractor----------"
print "#Subwindows", nbSubwindows
print "subwindowMinSizeRatio", subwindowMinSizeRatio
print "subwindowMaxSizeRatio", subwindowMaxSizeRatio
print "subwindowTargetWidth", subwindowTargetWidth
print "subwindowTargetHeight", subwindowTargetHeight
print "fixedSize", fixedSize
print "------------Misc-----------------"
print "includeOriginalImage", includeOriginalImage
print "random", random
print "tempFolder", tempFolder
print "verbosity", verbosity
print "nbJobs", nbJobs
print "--------ExtraTrees----------"
print "nbTrees", nbTrees
print "maxFeatures", maxFeatures
print "maxDepth", maxDepth
print "minSamplesSplit", minSamplesSplit
print "minSamplesLeaf", minSamplesLeaf
print "bootstrap", bootstrap
print "nbJobsEstimator", nbJobsEstimator
print "verbose", verbose
print "randomState", randomState
print "------------Data---------------"
print "LearningSet size", len(learningSet)
print "TestingSet size", len(testingSet)
print "-------------------------------"
if shouldSave:
print "saveFile", saveFile
print "Fit time", formatDuration(fitEnd-fitStart)
print "Classifcation time", formatDuration(predEnd-predStart)
print "Accuracy", accuracy
if shouldSave:
np.save(saveFile, y_prob)
filtersTmp = randConvCoord._convolExtractor._finiteFilter._filters
filters = [x for x, _, _ in filtersTmp]
return accuracy, confMat, importance, order, filters