本文整理汇总了C++中FVector::cbegin方法的典型用法代码示例。如果您正苦于以下问题:C++ FVector::cbegin方法的具体用法?C++ FVector::cbegin怎么用?C++ FVector::cbegin使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类FVector
的用法示例。
在下文中一共展示了FVector::cbegin方法的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: OutputFeatureScores
void OutputFeatureScores( std::ostream& out
, const ScoreComponentCollection &features
, const FeatureFunction *ff
, std::string &lastName )
{
const StaticData &staticData = StaticData::Instance();
bool labeledOutput = staticData.IsLabeledNBestList();
// regular features (not sparse)
if (ff->GetNumScoreComponents() != 0) {
if( labeledOutput && lastName != ff->GetScoreProducerDescription() ) {
lastName = ff->GetScoreProducerDescription();
out << " " << lastName << "=";
}
vector<float> scores = features.GetScoresForProducer( ff );
for (size_t j = 0; j<scores.size(); ++j) {
out << " " << scores[j];
}
}
// sparse features
const FVector scores = features.GetVectorForProducer( ff );
for(FVector::FNVmap::const_iterator i = scores.cbegin(); i != scores.cend(); i++) {
out << " " << i->first << "= " << i->second;
}
}
示例2: updateConfidenceCounts
void FVector::updateConfidenceCounts(const FVector& weightUpdate, bool signedCounts)
{
for (size_t i = 0; i < weightUpdate.m_coreFeatures.size(); ++i) {
if (signedCounts) {
//int sign = weightUpdate.m_coreFeatures[i] >= 0 ? 1 : -1;
//m_coreFeatures[i] += (weightUpdate.m_coreFeatures[i] * weightUpdate.m_coreFeatures[i]) * sign;
m_coreFeatures[i] += weightUpdate.m_coreFeatures[i];
} else
//m_coreFeatures[i] += (weightUpdate.m_coreFeatures[i] * weightUpdate.m_coreFeatures[i]);
m_coreFeatures[i] += abs(weightUpdate.m_coreFeatures[i]);
}
for (const_iterator i = weightUpdate.cbegin(); i != weightUpdate.cend(); ++i) {
if (weightUpdate[i->first] == 0)
continue;
float value = get(i->first);
if (signedCounts) {
//int sign = weightUpdate[i->first] >= 0 ? 1 : -1;
//value += (weightUpdate[i->first] * weightUpdate[i->first]) * sign;
value += weightUpdate[i->first];
} else
//value += (weightUpdate[i->first] * weightUpdate[i->first]);
value += abs(weightUpdate[i->first]);
set(i->first, value);
}
}
示例3: OutputSparseFeatureScores
void IOWrapper::OutputSparseFeatureScores( std::ostream& out, const ChartTrellisPath &path, const FeatureFunction *ff, std::string &lastName )
{
const StaticData &staticData = StaticData::Instance();
bool labeledOutput = staticData.IsLabeledNBestList();
const FVector scores = path.GetScoreBreakdown().GetVectorForProducer( ff );
// report weighted aggregate
if (! ff->GetSparseFeatureReporting()) {
const FVector &weights = staticData.GetAllWeights().GetScoresVector();
if (labeledOutput && !boost::contains(ff->GetScoreProducerDescription(), ":"))
out << " " << ff->GetScoreProducerWeightShortName() << ":";
out << " " << scores.inner_product(weights);
}
// report each feature
else {
for(FVector::FNVmap::const_iterator i = scores.cbegin(); i != scores.cend(); i++) {
if (i->second != 0) { // do not report zero-valued features
if (labeledOutput)
out << " " << i->first << ":";
out << " " << i->second;
}
}
}
}
示例4: setToBinaryOf
// count non-zero occurrences for all sparse features
void FVector::setToBinaryOf(const FVector& rhs)
{
for (const_iterator i = rhs.cbegin(); i != rhs.cend(); ++i)
if (rhs.get(i->first) != 0)
set(i->first, 1);
for (size_t i = 0; i < rhs.m_coreFeatures.size(); ++i)
m_coreFeatures[i] = 1;
}
示例5: updateLearningRates
void FVector::updateLearningRates(float decay_core, float decay_sparse, const FVector &confidenceCounts, float core_r0, float sparse_r0)
{
for (size_t i = 0; i < confidenceCounts.m_coreFeatures.size(); ++i) {
m_coreFeatures[i] = 1.0/(1.0/core_r0 + decay_core * abs(confidenceCounts.m_coreFeatures[i]));
}
for (const_iterator i = confidenceCounts.cbegin(); i != confidenceCounts.cend(); ++i) {
float value = 1.0/(1.0/sparse_r0 + decay_sparse * abs(i->second));
set(i->first, value);
}
}
示例6:
bool FVector::operator== (const FVector& rhs) const
{
if (this == &rhs) {
return true;
}
if (m_coreFeatures.size() != rhs.m_coreFeatures.size()) {
return false;
}
for (size_t i = 0; i < m_coreFeatures.size(); ++i) {
if (!equalsTolerance(m_coreFeatures[i], rhs.m_coreFeatures[i])) return false;
}
for (const_iterator i = cbegin(); i != cend(); ++i) {
if (!equalsTolerance(i->second,rhs.get(i->first))) return false;
}
for (const_iterator i = rhs.cbegin(); i != rhs.cend(); ++i) {
if (!equalsTolerance(i->second, get(i->first))) return false;
}
return true;
}
示例7: OutputFeatureScores
void OutputFeatureScores( std::ostream& out, const TrellisPath &path, const FeatureFunction *ff, std::string &lastName )
{
const StaticData &staticData = StaticData::Instance();
bool labeledOutput = staticData.IsLabeledNBestList();
// regular features (not sparse)
if (ff->GetNumScoreComponents() != ScoreProducer::unlimited) {
if( labeledOutput && lastName != ff->GetScoreProducerWeightShortName() ) {
lastName = ff->GetScoreProducerWeightShortName();
out << " " << lastName << ":";
}
vector<float> scores = path.GetScoreBreakdown().GetScoresForProducer( ff );
for (size_t j = 0; j<scores.size(); ++j) {
out << " " << scores[j];
}
}
// sparse features
else {
const FVector scores = path.GetScoreBreakdown().GetVectorForProducer( ff );
// report weighted aggregate
if (! ff->GetSparseFeatureReporting()) {
const FVector &weights = staticData.GetAllWeights().GetScoresVector();
if (labeledOutput && !boost::contains(ff->GetScoreProducerDescription(), ":"))
out << " " << ff->GetScoreProducerWeightShortName() << ":";
out << " " << scores.inner_product(weights);
}
// report each feature
else {
for(FVector::FNVmap::const_iterator i = scores.cbegin(); i != scores.cend(); i++)
out << " " << i->first << ": " << i->second;
/* if (i->second != 0) { // do not report zero-valued features
float weight = staticData.GetSparseWeight(i->first);
if (weight != 0)
out << " " << i->first << "=" << weight;
}*/
}
}
}
示例8: sparsePlusEquals
// add only sparse features
void FVector::sparsePlusEquals(const FVector& rhs)
{
for (const_iterator i = rhs.cbegin(); i != rhs.cend(); ++i)
set(i->first, get(i->first) + i->second);
}