本文整理汇总了C++中TFltVV::GetRows方法的典型用法代码示例。如果您正苦于以下问题:C++ TFltVV::GetRows方法的具体用法?C++ TFltVV::GetRows怎么用?C++ TFltVV::GetRows使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类TFltVV
的用法示例。
在下文中一共展示了TFltVV::GetRows方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: CutFtrV
void TDecisionTree::TNode::CalcCorrFtrV(const TFltVV& FtrVV, const TIntV& InstNV) {
if (Tree->IsCalcCorr()) {
const int Dim = FtrVV.GetRows();
CutFtrCorrFtrNPValTrV.Gen(Dim-1, 0);
TFltV CutFtrV(NExamples), OthrFtrV(NExamples);
for (int i = 0; i < NExamples; i++) {
CutFtrV[i] = FtrVV(CutFtrN, InstNV[i]);
}
for (int FtrN = 0; FtrN < Dim; FtrN++) {
if (FtrN != CutFtrN) {
for (int i = 0; i < NExamples; i++) {
OthrFtrV[i] = FtrVV(FtrN, InstNV[i]);
}
TCorr Corr(CutFtrV, OthrFtrV);
CutFtrCorrFtrNPValTrV.Add(TFltIntFltTr(Corr.GetCorrCf(), FtrN, Corr.GetCorrCfPrb()));
}
}
CutFtrCorrFtrNPValTrV.Sort(false);
}
}
示例2: ValClassPrV
void TDecisionTree::TNode::Fit(const TFltVV& FtrVV, const TFltV& ClassV, const TIntV& InstNV) {
EAssert(!InstNV.Empty());
const int Dim = FtrVV.GetRows();
NExamples = InstNV.Len();
ClassHist.Gen(2);
FtrHist.Gen(Dim);
{
int TotalPos = 0;
double BestScore = TFlt::NInf, CutVal = TFlt::NInf, Score = TFlt::NInf;
for (int i = 0; i < NExamples; i++) {
AssertR(0 <= InstNV[i] && InstNV[i] < FtrVV.GetCols(), "Invalid instance index: " + TInt::GetStr(InstNV[i]) + "!");
TotalPos += (int) ClassV[InstNV[i]];
}
ClassHist[0] = 1 - double(TotalPos) / NExamples;
ClassHist[1] = 1 - ClassHist[0];
TFltIntPrV ValClassPrV(NExamples);
// get the best score and cut value
int InstN;
for (int FtrN = 0; FtrN < Dim; FtrN++) {
double FtrSum = 0;
for (int i = 0; i < NExamples; i++) {
InstN = InstNV[i];
AssertR(0 <= InstN && InstN < FtrVV.GetCols(), "Invalid instance index: " + TInt::GetStr(InstN) + "!");
ValClassPrV[i].Val1 = FtrVV(FtrN, InstN);
ValClassPrV[i].Val2 = (int) ClassV[InstN];
FtrSum += FtrVV(FtrN, InstN);
}
ValClassPrV.Sort(true); // have to sort to speed up the calculation
if (CanSplitNumFtr(ValClassPrV, TotalPos, CutVal, Score) && Score > BestScore) {
BestScore = Score;
CutFtrN = FtrN;
CutFtrVal = CutVal;
}
FtrHist[FtrN] = FtrSum / NExamples;
}
}
// cut the dataset into left and right and build the tree recursively
if (ShouldGrow() && CutFtrN >= 0) {
EAssert(CutFtrN < Dim);
// the best attribute is now selected, calculate the correlation between the
// selected attribute and other attributes, then split the node
CalcCorrFtrV(FtrVV, InstNV);
Split(FtrVV, ClassV, InstNV);
}
}
示例3: PMultiply
// Result = A * B(:,ColId)
void TUNGraphMtx::PMultiply(const TFltVV& B, int ColId, TFltV& Result) const {
const int RowN = GetRows();
Assert(B.GetRows() >= RowN && Result.Len() >= RowN);
const THash<TInt, TUNGraph::TNode>& NodeH = Graph->NodeH;
for (int j = 0; j < RowN; j++) {
const TIntV& RowV = NodeH[j].NIdV;
Result[j] = 0.0;
for (int i = 0; i < RowV.Len(); i++) {
Result[j] += B(RowV[i], ColId);
}
}
}
示例4: PMultiplyT
// Result = A' * B(:,ColId)
void TNGraphMtx::PMultiplyT(const TFltVV& B, int ColId, TFltV& Result) const {
const int ColN = GetCols();
Assert(B.GetRows() >= ColN && Result.Len() >= ColN);
const THash<TInt, TNGraph::TNode>& NodeH = Graph->NodeH;
for (int i = 0; i < ColN; i++) Result[i] = 0.0;
for (int j = 0; j < ColN; j++) {
const TIntV& RowV = NodeH[j].OutNIdV;
for (int i = 0; i < RowV.Len(); i++) {
Result[RowV[i]] += B(j, ColId);
}
}
}
示例5: Fit
void TPropHazards::Fit(const TFltVV& _X, const TFltV& t, const double& Eps) {
const int NInst = _X.GetCols();
const int Dim = _X.GetRows() + 1;
EAssertR(NInst == t.Len(), "TPropHazards::Fit the number of instances in X.GetCols() and t.Len() do not match");
Notify->OnNotifyFmt(TNotifyType::ntInfo, "Fitting proportional hazards model on %d instances ...", NInst);
TFltVV X(_X.GetRows()+1, NInst);
for (int ColN = 0; ColN < NInst; ColN++) {
X(0, ColN) = 1;
for (int RowN = 0; RowN < _X.GetRows(); RowN++) {
X(RowN+1, ColN) = _X(RowN, ColN);
}
}
WgtV.Gen(Dim);
TFltVV X_t(X); X_t.Transpose(); // TODO slow
TFltVV XTimesW(X.GetRows(), X.GetCols());
TFltVV H(Dim, Dim);
TFltV TempNInstV(NInst, NInst);
TFltV GradV(Dim, Dim);
TFltV DeltaWgtV(Dim, Dim);
TSpVV WgtColSpVV(NInst, NInst);
double IntensTimesT;
// generate weight matrix with only ones on the diagonal
// so you don't recreate all the object every iteration
for (int i = 0; i < NInst; i++) {
WgtColSpVV[i].Add(TIntFltKd(i, 1));
}
int k = 0;
double Diff = TFlt::PInf;
while (Diff > Eps) {
// construct the intensity vector
PredictInternal(X, TempNInstV);
// I) construct the Hessian: X*W*X' + lambda*I
// prepare W and t .* intens - 1
for (int i = 0; i < NInst; i++) {
IntensTimesT = t[i] * TempNInstV[i];
TempNInstV[i] = IntensTimesT - 1;
WgtColSpVV[i][0].Dat = IntensTimesT;
}
// 1) compute X*W
TLinAlg::Multiply(X, WgtColSpVV, XTimesW);
// 2) compute (X*W)*X'
TLinAlg::Multiply(XTimesW, X_t, H);
// 3) (X*W*X') + lambda*I, exclude the base hazard
if (Lambda > 0) {
for (int i = 1; i < Dim; i++) {
H(i,i) += Lambda;
}
}
// II) construct the gradient: (t .* intens - 1) * X' + lambda*[0, w(2:end)]
// 1) (t .* intens - 1) * X'
TLinAlg::Multiply(X, TempNInstV, GradV);
// 2) ((t .* intens - 1) * X') + lambda*[0, w(2:end)]
if (Lambda > 0) {
for (int i = 1; i < Dim; i++) {
GradV[i] += Lambda * WgtV[i];
}
}
// III) compute: delta_w = H \ grad
#ifdef LAPACKE
const TFlt SingEps = 1e-10;
if (H.GetRows() == 1) { // fix for a bug in SVD factorization
DeltaWgtV[0] = GradV[0] / H(0,0);
} else {
TLinAlg::SVDSolve(H, DeltaWgtV, GradV, SingEps);
}
#else
throw TExcept::New("Should include LAPACKE!!");
#endif
// IV) w <= w - delta_w
for (int i = 0; i < Dim; i++) {
WgtV[i] -= DeltaWgtV[i];
}
Diff = TLinAlg::Norm(DeltaWgtV);
EAssertR(!TFlt::IsNan(Diff), "nans in delta wgt vector!");
if (++k % 10 == 0) {
Notify->OnNotifyFmt(TNotifyType::ntInfo, "Step: %d, diff: %.3f", k, Diff);
}
}
Notify->OnNotifyFmt(TNotifyType::ntInfo, "Converged. Diff: %.5f", Diff);
}