本文整理汇总了C++中TFltVV类的典型用法代码示例。如果您正苦于以下问题:C++ TFltVV类的具体用法?C++ TFltVV怎么用?C++ TFltVV使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了TFltVV类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: GetEigVec
// to get first few eigenvectors
void GetEigVec(const PUNGraph& Graph, const int& EigVecs, TFltV& EigValV, TVec<TFltV>& EigVecV) {
const int Nodes = Graph->GetNodes();
// Lanczos
TUNGraphMtx GraphMtx(Graph);
int CalcVals = int(2*EigVecs);
if (CalcVals > Nodes) { CalcVals = Nodes; }
TFltVV EigVecVV;
//while (EigValV.Len() < EigVecs && CalcVals < 10*EigVecs) {
try {
TSparseSVD::Lanczos(GraphMtx, EigVecs, 2*EigVecs, ssotFull, EigValV, EigVecVV, false); }
catch(...) {
printf("\n ***EXCEPTION: TRIED %d GOT %d values** \n", CalcVals, EigValV.Len()); }
if (EigValV.Len() < EigVecs) {
printf(" ***TRIED %d GOT %d values** \n", CalcVals, EigValV.Len()); }
// CalcVals += EigVecs;
//}
TFltIntPrV EigValIdV;
for (int i = 0; i < EigValV.Len(); i++) {
EigValIdV.Add(TFltIntPr(EigValV[i], i));
}
EigValIdV.Sort(false);
EigValV.Sort(false);
for (int v = 0; v < EigValIdV.Len(); v++) { // vector components are not sorted!!!
EigVecV.Add();
EigVecVV.GetCol(EigValIdV[v].Val2, EigVecV.Last());
}
IsAllValVNeg(EigVecV[0], true);
}
示例2: LeftInstNV
void TDecisionTree::TNode::Split(const TFltVV& FtrVV, const TFltV& ClassV, const TIntV& InstNV) {
// construct the children
int NInstLeft = 0;
for (int i = 0; i < NExamples; i++) {
AssertR(0 <= InstNV[i] && InstNV[i] < FtrVV.GetCols(), "Invalid instance index: " + TInt::GetStr(InstNV[i]) + "!");
if (FtrVV(CutFtrN, InstNV[i]) <= CutFtrVal) {
NInstLeft++;
}
}
TIntV LeftInstNV(NInstLeft, 0);
TIntV RightInstNV(NExamples - NInstLeft, 0);
int InstN;
for (int i = 0; i < NExamples; i++) {
InstN = InstNV[i];
AssertR(0 <= InstN && InstN < FtrVV.GetCols(), "Invalid instance index: " + TInt::GetStr(InstN) + "!");
if (FtrVV(CutFtrN, InstN) <= CutFtrVal) {
LeftInstNV.Add(InstN);
} else {
RightInstNV.Add(InstN);
}
}
Left = new TNode(Tree);
Right = new TNode(Tree);
Left->Fit(FtrVV, ClassV, LeftInstNV);
Right->Fit(FtrVV, ClassV, RightInstNV);
}
示例3: ValClassPrV
void TDecisionTree::TNode::Fit(const TFltVV& FtrVV, const TFltV& ClassV, const TIntV& InstNV) {
EAssert(!InstNV.Empty());
const int Dim = FtrVV.GetRows();
NExamples = InstNV.Len();
ClassHist.Gen(2);
FtrHist.Gen(Dim);
{
int TotalPos = 0;
double BestScore = TFlt::NInf, CutVal = TFlt::NInf, Score = TFlt::NInf;
for (int i = 0; i < NExamples; i++) {
AssertR(0 <= InstNV[i] && InstNV[i] < FtrVV.GetCols(), "Invalid instance index: " + TInt::GetStr(InstNV[i]) + "!");
TotalPos += (int) ClassV[InstNV[i]];
}
ClassHist[0] = 1 - double(TotalPos) / NExamples;
ClassHist[1] = 1 - ClassHist[0];
TFltIntPrV ValClassPrV(NExamples);
// get the best score and cut value
int InstN;
for (int FtrN = 0; FtrN < Dim; FtrN++) {
double FtrSum = 0;
for (int i = 0; i < NExamples; i++) {
InstN = InstNV[i];
AssertR(0 <= InstN && InstN < FtrVV.GetCols(), "Invalid instance index: " + TInt::GetStr(InstN) + "!");
ValClassPrV[i].Val1 = FtrVV(FtrN, InstN);
ValClassPrV[i].Val2 = (int) ClassV[InstN];
FtrSum += FtrVV(FtrN, InstN);
}
ValClassPrV.Sort(true); // have to sort to speed up the calculation
if (CanSplitNumFtr(ValClassPrV, TotalPos, CutVal, Score) && Score > BestScore) {
BestScore = Score;
CutFtrN = FtrN;
CutFtrVal = CutVal;
}
FtrHist[FtrN] = FtrSum / NExamples;
}
}
// cut the dataset into left and right and build the tree recursively
if (ShouldGrow() && CutFtrN >= 0) {
EAssert(CutFtrN < Dim);
// the best attribute is now selected, calculate the correlation between the
// selected attribute and other attributes, then split the node
CalcCorrFtrV(FtrVV, InstNV);
Split(FtrVV, ClassV, InstNV);
}
}
示例4: GetStr
TStr TStrUtil::GetStr(const TFltVV& FltVV, const TStr& DelimiterStr, const TStr& FmtStr) {
TChA ResChA;
for (int i = 0; i < FltVV.GetXDim(); i++) {
for (int j = 0; j < FltVV.GetYDim(); j++) {
ResChA += TFlt::GetStr(FltVV(i,j), FmtStr);
if (j < FltVV.GetYDim() - 1) { ResChA += DelimiterStr; }
}
if (i < FltVV.GetXDim() - 1) { ResChA += '\n'; }
}
return ResChA;
}
示例5: CalcKernelMatrix
//////////////////////////////////////////////////////////////////////////
// Kernel utilities
void TKernelUtil::CalcKernelMatrix(PSVMTrainSet Set, TFltVV& K) {
const int Size = Set->Len(); K.Gen(Size, Size);
for (int i = 0; i < Size; i++) {
for (int j = i; j < Size; j++)
K(i,j) = K(j,i) = Set->DotProduct(i,j);
}
}
示例6: CutFtrV
void TDecisionTree::TNode::CalcCorrFtrV(const TFltVV& FtrVV, const TIntV& InstNV) {
if (Tree->IsCalcCorr()) {
const int Dim = FtrVV.GetRows();
CutFtrCorrFtrNPValTrV.Gen(Dim-1, 0);
TFltV CutFtrV(NExamples), OthrFtrV(NExamples);
for (int i = 0; i < NExamples; i++) {
CutFtrV[i] = FtrVV(CutFtrN, InstNV[i]);
}
for (int FtrN = 0; FtrN < Dim; FtrN++) {
if (FtrN != CutFtrN) {
for (int i = 0; i < NExamples; i++) {
OthrFtrV[i] = FtrVV(FtrN, InstNV[i]);
}
TCorr Corr(CutFtrV, OthrFtrV);
CutFtrCorrFtrNPValTrV.Add(TFltIntFltTr(Corr.GetCorrCf(), FtrN, Corr.GetCorrCfPrb()));
}
}
CutFtrCorrFtrNPValTrV.Sort(false);
}
}
示例7: GetNewtonStep
void TLogRegFit::GetNewtonStep(TFltVV& HVV, const TFltV& GradV, TFltV& DeltaLV) {
bool HSingular = false;
for (int i = 0; i < HVV.GetXDim(); i++) {
if (HVV(i,i) == 0.0) {
HVV(i,i) = 0.001;
HSingular = true;
}
DeltaLV[i] = GradV[i] / HVV(i, i);
}
if (! HSingular) {
if (HVV(0, 0) < 0) { // if Hessian is negative definite, convert it to positive definite
for (int r = 0; r < Theta.Len(); r++) {
for (int c = 0; c < Theta.Len(); c++) {
HVV(r, c) = - HVV(r, c);
}
}
TNumericalStuff::SolveSymetricSystem(HVV, GradV, DeltaLV);
}
else {
TNumericalStuff::SolveSymetricSystem(HVV, GradV, DeltaLV);
for (int i = 0; i < DeltaLV.Len(); i++) {
DeltaLV[i] = - DeltaLV[i];
}
}
}
}
示例8: PredictInternal
void TPropHazards::PredictInternal(const TFltVV& X, TFltV& IntensV) const {
const int NInst = X.GetCols();
TLinAlg::MultiplyT(X, WgtV, IntensV);
for (int i = 0; i < NInst; i++) {
IntensV[i] = exp(IntensV[i]);
}
}
示例9: Grow
void TDecisionTree::Grow(const TFltVV& FtrVV, const TFltV& ClassV, const PNotify& Notify) {
CleanUp();
const int NInst = FtrVV.GetCols();
TIntV RangeV(NInst); TLinAlgTransform::RangeV(NInst, RangeV);
Root = new TNode(this);
Root->Fit(FtrVV, ClassV, RangeV);
}
示例10: CenterKernelMatrix
void TKernelUtil::CenterKernelMatrix(TFltVV& K) {
IAssert(K.GetXDim() == K.GetYDim());
const int l = K.GetYDim();
TFltV jK(l); // j'K
double jKj = 0.0; // j'Kj
for (int j = 0; j < l; j++) {
jK[j] = 0.0;
for (int i = 0; i < l; i++)
jK[j] += K(i,j);
jKj += jK[j];
}
double invl = 1.0/l;
for (int i = 0; i < l; i++) {
for (int j = 0; j < l; j++)
K(i,j) = K(i,j) - invl*jK[j] - invl*jK[i] + invl*invl*jKj;
}
}
示例11: PMultiply
// Result = A * B(:,ColId)
void TUNGraphMtx::PMultiply(const TFltVV& B, int ColId, TFltV& Result) const {
const int RowN = GetRows();
Assert(B.GetRows() >= RowN && Result.Len() >= RowN);
const THash<TInt, TUNGraph::TNode>& NodeH = Graph->NodeH;
for (int j = 0; j < RowN; j++) {
const TIntV& RowV = NodeH[j].NIdV;
Result[j] = 0.0;
for (int i = 0; i < RowV.Len(); i++) {
Result[j] += B(RowV[i], ColId);
}
}
}
示例12: PMultiplyT
// Result = A' * B(:,ColId)
void TNGraphMtx::PMultiplyT(const TFltVV& B, int ColId, TFltV& Result) const {
const int ColN = GetCols();
Assert(B.GetRows() >= ColN && Result.Len() >= ColN);
const THash<TInt, TNGraph::TNode>& NodeH = Graph->NodeH;
for (int i = 0; i < ColN; i++) Result[i] = 0.0;
for (int j = 0; j < ColN; j++) {
const TIntV& RowV = NodeH[j].OutNIdV;
for (int i = 0; i < RowV.Len(); i++) {
Result[RowV[i]] += B(j, ColId);
}
}
}
示例13: PMultiplyT
void TBowMatrix::PMultiplyT(const TFltVV& B, int ColId, TFltV& Result) const {
IAssert(B.GetXDim() >= PGetRows() && Result.Len() >= PGetCols());
int ColN = PGetCols();
int i, j, len; //TFlt *ResV = Result.BegI();
for (j = 0; j < ColN; j++) {
PBowSpV ColV = ColSpVV[j];
len = ColV->Len(); Result[j] = 0.0;
for (i = 0; i < len; i++) {
Result[j] += ColV->GetWgt(i) * B(ColV->GetWId(i), ColId);
}
}
}
示例14: GetInvParticipRat
void GetInvParticipRat(const PUNGraph& Graph, int MaxEigVecs, int TimeLimit, TFltPrV& EigValIprV) {
TUNGraphMtx GraphMtx(Graph);
TFltVV EigVecVV;
TFltV EigValV;
TExeTm ExeTm;
if (MaxEigVecs<=1) { MaxEigVecs=1000; }
int EigVecs = TMath::Mn(Graph->GetNodes(), MaxEigVecs);
printf("start %d vecs...", EigVecs);
try {
TSparseSVD::Lanczos2(GraphMtx, EigVecs, TimeLimit, ssotFull, EigValV, EigVecVV, false);
} catch(...) {
printf("\n ***EXCEPTION: TRIED %d GOT %d values** \n", EigVecs, EigValV.Len()); }
printf(" ***TRIED %d GOT %d values in %s\n", EigVecs, EigValV.Len(), ExeTm.GetStr());
TFltV EigVec;
EigValIprV.Clr();
if (EigValV.Empty()) { return; }
for (int v = 0; v < EigVecVV.GetCols(); v++) {
EigVecVV.GetCol(v, EigVec);
EigValIprV.Add(TFltPr(EigValV[v], GetInvParticipRat(EigVec)));
}
EigValIprV.Sort();
}
示例15: Hessian
void TLogRegFit::Hessian(TFltVV& HVV) {
HVV.Gen(Theta.Len(), Theta.Len());
TFltV OutV;
TLogRegPredict::GetCfy(X, OutV, Theta);
for (int i = 0; i < X.Len(); i++) {
for (int r = 0; r < Theta.Len(); r++) {
HVV.At(r, r) += - (X[i][r] * OutV[i] * (1 - OutV[i]) * X[i][r]);
for (int c = r + 1; c < Theta.Len(); c++) {
HVV.At(r, c) += - (X[i][r] * OutV[i] * (1 - OutV[i]) * X[i][c]);
HVV.At(c, r) += - (X[i][r] * OutV[i] * (1 - OutV[i]) * X[i][c]);
}
}
}
/*
printf("\n");
for (int r = 0; r < Theta.Len(); r++) {
for (int c = 0; c < Theta.Len(); c++) {
printf("%f\t", HVV.At(r, c).Val);
}
printf("\n");
}
*/
}