本文整理汇总了C++中TFltVV::GetCols方法的典型用法代码示例。如果您正苦于以下问题:C++ TFltVV::GetCols方法的具体用法?C++ TFltVV::GetCols怎么用?C++ TFltVV::GetCols使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类TFltVV
的用法示例。
在下文中一共展示了TFltVV::GetCols方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: LeftInstNV
void TDecisionTree::TNode::Split(const TFltVV& FtrVV, const TFltV& ClassV, const TIntV& InstNV) {
// construct the children
int NInstLeft = 0;
for (int i = 0; i < NExamples; i++) {
AssertR(0 <= InstNV[i] && InstNV[i] < FtrVV.GetCols(), "Invalid instance index: " + TInt::GetStr(InstNV[i]) + "!");
if (FtrVV(CutFtrN, InstNV[i]) <= CutFtrVal) {
NInstLeft++;
}
}
TIntV LeftInstNV(NInstLeft, 0);
TIntV RightInstNV(NExamples - NInstLeft, 0);
int InstN;
for (int i = 0; i < NExamples; i++) {
InstN = InstNV[i];
AssertR(0 <= InstN && InstN < FtrVV.GetCols(), "Invalid instance index: " + TInt::GetStr(InstN) + "!");
if (FtrVV(CutFtrN, InstN) <= CutFtrVal) {
LeftInstNV.Add(InstN);
} else {
RightInstNV.Add(InstN);
}
}
Left = new TNode(Tree);
Right = new TNode(Tree);
Left->Fit(FtrVV, ClassV, LeftInstNV);
Right->Fit(FtrVV, ClassV, RightInstNV);
}
示例2: ValClassPrV
void TDecisionTree::TNode::Fit(const TFltVV& FtrVV, const TFltV& ClassV, const TIntV& InstNV) {
EAssert(!InstNV.Empty());
const int Dim = FtrVV.GetRows();
NExamples = InstNV.Len();
ClassHist.Gen(2);
FtrHist.Gen(Dim);
{
int TotalPos = 0;
double BestScore = TFlt::NInf, CutVal = TFlt::NInf, Score = TFlt::NInf;
for (int i = 0; i < NExamples; i++) {
AssertR(0 <= InstNV[i] && InstNV[i] < FtrVV.GetCols(), "Invalid instance index: " + TInt::GetStr(InstNV[i]) + "!");
TotalPos += (int) ClassV[InstNV[i]];
}
ClassHist[0] = 1 - double(TotalPos) / NExamples;
ClassHist[1] = 1 - ClassHist[0];
TFltIntPrV ValClassPrV(NExamples);
// get the best score and cut value
int InstN;
for (int FtrN = 0; FtrN < Dim; FtrN++) {
double FtrSum = 0;
for (int i = 0; i < NExamples; i++) {
InstN = InstNV[i];
AssertR(0 <= InstN && InstN < FtrVV.GetCols(), "Invalid instance index: " + TInt::GetStr(InstN) + "!");
ValClassPrV[i].Val1 = FtrVV(FtrN, InstN);
ValClassPrV[i].Val2 = (int) ClassV[InstN];
FtrSum += FtrVV(FtrN, InstN);
}
ValClassPrV.Sort(true); // have to sort to speed up the calculation
if (CanSplitNumFtr(ValClassPrV, TotalPos, CutVal, Score) && Score > BestScore) {
BestScore = Score;
CutFtrN = FtrN;
CutFtrVal = CutVal;
}
FtrHist[FtrN] = FtrSum / NExamples;
}
}
// cut the dataset into left and right and build the tree recursively
if (ShouldGrow() && CutFtrN >= 0) {
EAssert(CutFtrN < Dim);
// the best attribute is now selected, calculate the correlation between the
// selected attribute and other attributes, then split the node
CalcCorrFtrV(FtrVV, InstNV);
Split(FtrVV, ClassV, InstNV);
}
}
示例3: PredictInternal
void TPropHazards::PredictInternal(const TFltVV& X, TFltV& IntensV) const {
const int NInst = X.GetCols();
TLinAlg::MultiplyT(X, WgtV, IntensV);
for (int i = 0; i < NInst; i++) {
IntensV[i] = exp(IntensV[i]);
}
}
示例4: Grow
void TDecisionTree::Grow(const TFltVV& FtrVV, const TFltV& ClassV, const PNotify& Notify) {
CleanUp();
const int NInst = FtrVV.GetCols();
TIntV RangeV(NInst); TLinAlgTransform::RangeV(NInst, RangeV);
Root = new TNode(this);
Root->Fit(FtrVV, ClassV, RangeV);
}
示例5: GetInvParticipRat
void GetInvParticipRat(const PUNGraph& Graph, int MaxEigVecs, int TimeLimit, TFltPrV& EigValIprV) {
TUNGraphMtx GraphMtx(Graph);
TFltVV EigVecVV;
TFltV EigValV;
TExeTm ExeTm;
if (MaxEigVecs<=1) { MaxEigVecs=1000; }
int EigVecs = TMath::Mn(Graph->GetNodes(), MaxEigVecs);
printf("start %d vecs...", EigVecs);
try {
TSparseSVD::Lanczos2(GraphMtx, EigVecs, TimeLimit, ssotFull, EigValV, EigVecVV, false);
} catch(...) {
printf("\n ***EXCEPTION: TRIED %d GOT %d values** \n", EigVecs, EigValV.Len()); }
printf(" ***TRIED %d GOT %d values in %s\n", EigVecs, EigValV.Len(), ExeTm.GetStr());
TFltV EigVec;
EigValIprV.Clr();
if (EigValV.Empty()) { return; }
for (int v = 0; v < EigVecVV.GetCols(); v++) {
EigVecVV.GetCol(v, EigVec);
EigValIprV.Add(TFltPr(EigValV[v], GetInvParticipRat(EigVec)));
}
EigValIprV.Sort();
}
示例6: Fit
void TPropHazards::Fit(const TFltVV& _X, const TFltV& t, const double& Eps) {
const int NInst = _X.GetCols();
const int Dim = _X.GetRows() + 1;
EAssertR(NInst == t.Len(), "TPropHazards::Fit the number of instances in X.GetCols() and t.Len() do not match");
Notify->OnNotifyFmt(TNotifyType::ntInfo, "Fitting proportional hazards model on %d instances ...", NInst);
TFltVV X(_X.GetRows()+1, NInst);
for (int ColN = 0; ColN < NInst; ColN++) {
X(0, ColN) = 1;
for (int RowN = 0; RowN < _X.GetRows(); RowN++) {
X(RowN+1, ColN) = _X(RowN, ColN);
}
}
WgtV.Gen(Dim);
TFltVV X_t(X); X_t.Transpose(); // TODO slow
TFltVV XTimesW(X.GetRows(), X.GetCols());
TFltVV H(Dim, Dim);
TFltV TempNInstV(NInst, NInst);
TFltV GradV(Dim, Dim);
TFltV DeltaWgtV(Dim, Dim);
TSpVV WgtColSpVV(NInst, NInst);
double IntensTimesT;
// generate weight matrix with only ones on the diagonal
// so you don't recreate all the object every iteration
for (int i = 0; i < NInst; i++) {
WgtColSpVV[i].Add(TIntFltKd(i, 1));
}
int k = 0;
double Diff = TFlt::PInf;
while (Diff > Eps) {
// construct the intensity vector
PredictInternal(X, TempNInstV);
// I) construct the Hessian: X*W*X' + lambda*I
// prepare W and t .* intens - 1
for (int i = 0; i < NInst; i++) {
IntensTimesT = t[i] * TempNInstV[i];
TempNInstV[i] = IntensTimesT - 1;
WgtColSpVV[i][0].Dat = IntensTimesT;
}
// 1) compute X*W
TLinAlg::Multiply(X, WgtColSpVV, XTimesW);
// 2) compute (X*W)*X'
TLinAlg::Multiply(XTimesW, X_t, H);
// 3) (X*W*X') + lambda*I, exclude the base hazard
if (Lambda > 0) {
for (int i = 1; i < Dim; i++) {
H(i,i) += Lambda;
}
}
// II) construct the gradient: (t .* intens - 1) * X' + lambda*[0, w(2:end)]
// 1) (t .* intens - 1) * X'
TLinAlg::Multiply(X, TempNInstV, GradV);
// 2) ((t .* intens - 1) * X') + lambda*[0, w(2:end)]
if (Lambda > 0) {
for (int i = 1; i < Dim; i++) {
GradV[i] += Lambda * WgtV[i];
}
}
// III) compute: delta_w = H \ grad
#ifdef LAPACKE
const TFlt SingEps = 1e-10;
if (H.GetRows() == 1) { // fix for a bug in SVD factorization
DeltaWgtV[0] = GradV[0] / H(0,0);
} else {
TLinAlg::SVDSolve(H, DeltaWgtV, GradV, SingEps);
}
#else
throw TExcept::New("Should include LAPACKE!!");
#endif
// IV) w <= w - delta_w
for (int i = 0; i < Dim; i++) {
WgtV[i] -= DeltaWgtV[i];
}
Diff = TLinAlg::Norm(DeltaWgtV);
EAssertR(!TFlt::IsNan(Diff), "nans in delta wgt vector!");
if (++k % 10 == 0) {
Notify->OnNotifyFmt(TNotifyType::ntInfo, "Step: %d, diff: %.3f", k, Diff);
}
}
Notify->OnNotifyFmt(TNotifyType::ntInfo, "Converged. Diff: %.5f", Diff);
}
示例7: Fit
void TDecisionTree::Fit(const TFltVV& FtrVV, const TFltV& ClassV, const PNotify& Notify) {
Notify->OnNotifyFmt(TNotifyType::ntInfo, "Building a decision tree on %d instances ...", FtrVV.GetCols());
Grow(FtrVV, ClassV, Notify);
Prune(Notify);
Notify->OnNotifyFmt(TNotifyType::ntInfo, "Done!");
}