本文整理汇总了Python中sklearn.naive_bayes.GaussianNB.name方法的典型用法代码示例。如果您正苦于以下问题:Python GaussianNB.name方法的具体用法?Python GaussianNB.name怎么用?Python GaussianNB.name使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类sklearn.naive_bayes.GaussianNB
的用法示例。
在下文中一共展示了GaussianNB.name方法的1个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: findParameters
# 需要导入模块: from sklearn.naive_bayes import GaussianNB [as 别名]
# 或者: from sklearn.naive_bayes.GaussianNB import name [as 别名]
def findParameters(folder = "data/dataForMeta/ostanek/"):
"""
V funkciji zaganjamo algoritem MetaDES na diskretnem naboru parametrov. Za vsako iteracijo shranimo zgrajen model,
shranimo rezultat po decilih in shranimo izrisan graf, ki nam zraven rezultata algoritma MetaDes izrise se rezultat
vseh algoritmov, ki so bili uporabljeni v ensemblu MetaDES.
:return:
"""
XMeta, YMeta, XSel, YSel, XTest, YTest = readForMeta2(folder = folder)
nb = GaussianNB()#meta classifier for metaDes
nb.name="Bayes"
rf = RandomForestClassifier(n_estimators=1000, n_jobs=2)
rf.name="rf"
elm = GenELMClassifier(hidden_layer = RandomLayer(n_hidden = 400, activation_func = 'multiquadric', alpha=1))
elm.name="elm"
lr = LogisticRegression()
lr.name= "lr"
metaClassifiers = [lr, elm]
hCs = [1.0, 0.5]
nrNeigh = [305]#, 1000, 3000]
modes = ["weighted"]
metrics = ["l2", "chebyshev"]#BallTree.valid_metrics
metaClsModes = ["combined"]
normalizeMetaFeatures = [True, False]
competenceTressholds = [0.4,0.5,0.6]
# metaDes = MetaDES(0.8,1000, 50, lr, competenceTresshold=0.5, mode="weightedAll")
YCaMeta = readClsResponse("Meta", folder=folder) #we read all classifications for meta dataset
YCaSel = readClsResponse("Sel", folder = folder)
YCaTest = readClsResponse("Test", folder = folder)
nrOfTrials = 0
print("we are testing for %d neighbours" %nrNeigh[0])
allTrials = len(nrNeigh)*len(hCs)*len(modes)*len(metrics)*len(metaClassifiers)*len(metaClsModes)*len(normalizeMetaFeatures)
print("We will have %d trials" %allTrials)
for nrN in nrNeigh:
for hC in hCs:
for mode in modes:
for metric in metrics:
try:
metaDes = MetaDES(hC,nrN, nrN, lr, competenceTresshold=0.5, mode=mode,
metric=metric)
print("calculating meta features...")
metaDes.fit(XMeta, YMeta, YCaMeta, folder = folder)
for cls in metaClassifiers:
for metaClsMode in metaClsModes:
for normalizeMetaFeat in normalizeMetaFeatures:
metaDes.metaClsMode = metaClsMode
metaDes.metaCls = cls
metaDes.normalizeMetaFeat = normalizeMetaFeat
name = "metaDes_hC"+str(metaDes.hC)+\
"_K"+str(metaDes.K)+\
"_Kp"+str(metaDes.Kp)+\
"_mode"+metaDes.mode+\
"_competence"+str(metaDes.competenceTresshold)+\
"_cls"+metaDes.metaCls.name+\
"_metric"+metaDes.metric+\
"_metaClsMode"+metaDes.metaClsMode+\
"_normMetaFeat" + str(metaDes.normalizeMetaFeat)
nrOfTrials += 1
print("Fitting %d/%d trial" %(nrOfTrials,allTrials))
metaDes.fitWithAlreadySaved(saveModel = False, folder = folder) #if we already computed features
responseTest = metaDes.predict_proba(XTest, YCaTest, XSel, YSel, YCaSel)[:,1]
plotClassifiersAndSaveResult(YTest,YCaTest, responseTest, name, folder=folder) #we save figure and save results
Helpers.shraniModel(metaDes,folder+"models/"+name+"/") #we save fitted model
except Exception as e:
allTrials -= 1
with open(folder+"error.log", "a") as fw:
fw.write("We were executing "+name+"\n")
fw.write(str(traceback.format_exc())+"\n\n\n***************************************")
print(str(e))