本文整理汇总了Python中sklearn.linear_model.PassiveAggressiveClassifier.partial_fit方法的典型用法代码示例。如果您正苦于以下问题:Python PassiveAggressiveClassifier.partial_fit方法的具体用法?Python PassiveAggressiveClassifier.partial_fit怎么用?Python PassiveAggressiveClassifier.partial_fit使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类sklearn.linear_model.PassiveAggressiveClassifier
的用法示例。
在下文中一共展示了PassiveAggressiveClassifier.partial_fit方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_classifier_partial_fit
# 需要导入模块: from sklearn.linear_model import PassiveAggressiveClassifier [as 别名]
# 或者: from sklearn.linear_model.PassiveAggressiveClassifier import partial_fit [as 别名]
def test_classifier_partial_fit():
classes = np.unique(y)
for data in (X, X_csr):
clf = PassiveAggressiveClassifier(C=1.0,
fit_intercept=True,
random_state=0)
for t in range(30):
clf.partial_fit(data, y, classes)
score = clf.score(data, y)
assert_greater(score, 0.79)
示例2: train_online_model
# 需要导入模块: from sklearn.linear_model import PassiveAggressiveClassifier [as 别名]
# 或者: from sklearn.linear_model.PassiveAggressiveClassifier import partial_fit [as 别名]
def train_online_model(xtr, ytr, model=None):
# Train classifier
t0 = time.time()
if model is None:
model = PassiveAggressiveClassifier()
model.fit(xtr, ytr)
else:
model.partial_fit(xtr, ytr)
print "Training took %.2f seconds" % (time.time()-t0)
return model
示例3: test_classifier_partial_fit
# 需要导入模块: from sklearn.linear_model import PassiveAggressiveClassifier [as 别名]
# 或者: from sklearn.linear_model.PassiveAggressiveClassifier import partial_fit [as 别名]
def test_classifier_partial_fit():
classes = np.unique(y)
for data in (X, X_csr):
for average in (False, True):
clf = PassiveAggressiveClassifier(
C=1.0, fit_intercept=True, random_state=0,
average=average, max_iter=5)
for t in range(30):
clf.partial_fit(data, y, classes)
score = clf.score(data, y)
assert_greater(score, 0.79)
if average:
assert hasattr(clf, 'average_coef_')
assert hasattr(clf, 'average_intercept_')
assert hasattr(clf, 'standard_intercept_')
assert hasattr(clf, 'standard_coef_')
示例4: runLearner
# 需要导入模块: from sklearn.linear_model import PassiveAggressiveClassifier [as 别名]
# 或者: from sklearn.linear_model.PassiveAggressiveClassifier import partial_fit [as 别名]
#.........这里部分代码省略.........
random = RandomFeatureExtractor()
lengthBaseline = LenFeatureExtractor()
fullRST = FullPickledRSTFeatureExtractor(nums) if newData else FullTextRSTFeatureExtractor(nums)
limitedRST = LimitedPickledRSTFeatureExtractor(nums) if newData else LimitedTextRSTFeatureExtractor(nums)
vectorizer = FeatureUnion([('extra',limitedRST),('tfid',tfidvec)])
print 'Fitting random features baseline'
random.fit(texts)
print 'Fitting text length baseline'
lengthBaseline.fit(texts)
print 'Fitting full RST features'
fullRST.fit(texts)
print 'Fitting limited RST features'
limitedRST.fit(texts)
print 'Fitting limited RST with tfidvec features'
vectorizer.fit(texts)
print 'Fitting tfidvec features'
tfidvec.fit(texts)
split = int(0.8*len(ilabels))
trainData = (texts[:split],ilabels[:split])
testData = (texts[split:],ilabels[split:])
X,y = getAsSciKit(trainData[0],trainData[1],random,encoder,selector)
learner.fit(X,y)
X,y = getAsSciKit(trainData[0],trainData[1],random,encoder,selector)
print 'random features baseline trained on %d instances has accuracy %f'%(len(trainData[0]),learner.score(X,y))
dummy = DummyClassifier()
X,y = getAsSciKit(trainData[0],trainData[1],random,encoder,selector)
dummy.fit(X,y)
X,y = getAsSciKit(testData[0],testData[1],random,encoder,selector)
print 'Dummy label distribution baseline trained on %d instances has accuracy %f'%(len(trainData[0]),dummy.score(X,y))
X,y = getAsSciKit(trainData[0],trainData[1],lengthBaseline,encoder,selector)
learner.fit(X,y)
X,y = getAsSciKit(testData[0],testData[1],lengthBaseline,encoder,selector)
print 'text length baseline trained on %d instances has accuracy %f'%(len(trainData[0]),learner.score(X,y))
X,y = getAsSciKit(trainData[0],trainData[1],fullRST,encoder,selector)
learner.fit(X,y)
X,y = getAsSciKit(testData[0],testData[1],fullRST,encoder,selector)
print 'Full RST learner trained on %d instances has accuracy %f'%(len(trainData[0]),learner.score(X,y))
X,y = getAsSciKit(trainData[0],trainData[1],limitedRST,encoder,selector)
learner.fit(X,y)
X,y = getAsSciKit(testData[0],testData[1],limitedRST,encoder,selector)
print 'Limited RST learner trained on %d instances has accuracy %f'%(len(trainData[0]),learner.score(X,y))
X,y = getAsSciKit(trainData[0],trainData[1],vectorizer,encoder,selector)
learner.fit(X,y)
X,y = getAsSciKit(testData[0],testData[1],vectorizer,encoder,selector)
print 'Limited RST with ngram learner trained on %d instances has accuracy %f'%(len(trainData[0]),learner.score(X,y))
X,y = getAsSciKit(trainData[0],trainData[1],tfidvec,encoder,selector)
learner = learner.fit(X,y)
X,y = getAsSciKit(testData[0],testData[1],tfidvec,encoder,selector)
print 'ngram learner trained on %d instances has accuracy %f'%(len(trainData[0]),learner.score(X,y))
else:
vectorizer = tfidvec
testData = None
vocabGotten = False
instances = ([],[])
numVocab = 50000
numTest = 50000
numTrain = 100000
maxTrainStages = 20
for text,label in getSciKitData(stateProgress = False, discreteLabels=discreteHelpfulness):
if label!='few' or useFew:
instances[0].append(text)
instances[1].append(label)
if not vocabGotten and len(instances[0]) == numVocab:
if printStages:
print 'Fitting vocabulary with %d instances'%numVocab
vectorizer.fit(instances[0],None)
if selector is not None:
X,y = getSciKitInstance(instances[0],instances[1],vectorizer,encoder,None)
selector.fit(X,y)
vocabGotten = True
instances = ([],[])
elif vocabGotten and testData is None and len(instances[0]) == numTest:
if printStages:
print 'Getting test data with %d instances'%numTest
testData = getSciKitInstance(instances[0],instances[1],vectorizer,encoder,selector)
instances = ([],[])
elif vocabGotten and testData is not None and len(instances[0]) == numTrain:
X,y = getSciKitInstance(instances[0],instances[1],vectorizer,encoder,selector)
if discreteHelpfulness:
learner = learner.partial_fit(X,y, classes = classlabels)
else:
learner = learner.partial_fit(X,y)
instances = ([],[])
count = count + 1
if printStages:
print 'Baseline trained on %d instances has accuracy %f'%(count*numTrain,learner.score(testData[0],testData[1]))
elif count == maxTrainStages:
break
print 'Final learner trained on %d instances has accuracy %f'%(maxTrainStages*numTrain,learner.score(testData[0],testData[1]))