本文整理汇总了C++中TreeType::ParentDistance方法的典型用法代码示例。如果您正苦于以下问题:C++ TreeType::ParentDistance方法的具体用法?C++ TreeType::ParentDistance怎么用?C++ TreeType::ParentDistance使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类TreeType
的用法示例。
在下文中一共展示了TreeType::ParentDistance方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: CheckTrees
void CheckTrees(TreeType& tree,
TreeType& xmlTree,
TreeType& textTree,
TreeType& binaryTree)
{
const typename TreeType::Mat* dataset = &tree.Dataset();
// Make sure that the data matrices are the same.
if (tree.Parent() == NULL)
{
CheckMatrices(*dataset,
xmlTree.Dataset(),
textTree.Dataset(),
binaryTree.Dataset());
// Also ensure that the other parents are null too.
BOOST_REQUIRE_EQUAL(xmlTree.Parent(), (TreeType*) NULL);
BOOST_REQUIRE_EQUAL(textTree.Parent(), (TreeType*) NULL);
BOOST_REQUIRE_EQUAL(binaryTree.Parent(), (TreeType*) NULL);
}
// Make sure the number of children is the same.
BOOST_REQUIRE_EQUAL(tree.NumChildren(), xmlTree.NumChildren());
BOOST_REQUIRE_EQUAL(tree.NumChildren(), textTree.NumChildren());
BOOST_REQUIRE_EQUAL(tree.NumChildren(), binaryTree.NumChildren());
// Make sure the number of descendants is the same.
BOOST_REQUIRE_EQUAL(tree.NumDescendants(), xmlTree.NumDescendants());
BOOST_REQUIRE_EQUAL(tree.NumDescendants(), textTree.NumDescendants());
BOOST_REQUIRE_EQUAL(tree.NumDescendants(), binaryTree.NumDescendants());
// Make sure the number of points is the same.
BOOST_REQUIRE_EQUAL(tree.NumPoints(), xmlTree.NumPoints());
BOOST_REQUIRE_EQUAL(tree.NumPoints(), textTree.NumPoints());
BOOST_REQUIRE_EQUAL(tree.NumPoints(), binaryTree.NumPoints());
// Check that each point is the same.
for (size_t i = 0; i < tree.NumPoints(); ++i)
{
BOOST_REQUIRE_EQUAL(tree.Point(i), xmlTree.Point(i));
BOOST_REQUIRE_EQUAL(tree.Point(i), textTree.Point(i));
BOOST_REQUIRE_EQUAL(tree.Point(i), binaryTree.Point(i));
}
// Check that the parent distance is the same.
BOOST_REQUIRE_CLOSE(tree.ParentDistance(), xmlTree.ParentDistance(), 1e-8);
BOOST_REQUIRE_CLOSE(tree.ParentDistance(), textTree.ParentDistance(), 1e-8);
BOOST_REQUIRE_CLOSE(tree.ParentDistance(), binaryTree.ParentDistance(), 1e-8);
// Check that the furthest descendant distance is the same.
BOOST_REQUIRE_CLOSE(tree.FurthestDescendantDistance(),
xmlTree.FurthestDescendantDistance(), 1e-8);
BOOST_REQUIRE_CLOSE(tree.FurthestDescendantDistance(),
textTree.FurthestDescendantDistance(), 1e-8);
BOOST_REQUIRE_CLOSE(tree.FurthestDescendantDistance(),
binaryTree.FurthestDescendantDistance(), 1e-8);
// Check that the minimum bound distance is the same.
BOOST_REQUIRE_CLOSE(tree.MinimumBoundDistance(),
xmlTree.MinimumBoundDistance(), 1e-8);
BOOST_REQUIRE_CLOSE(tree.MinimumBoundDistance(),
textTree.MinimumBoundDistance(), 1e-8);
BOOST_REQUIRE_CLOSE(tree.MinimumBoundDistance(),
binaryTree.MinimumBoundDistance(), 1e-8);
// Recurse into the children.
for (size_t i = 0; i < tree.NumChildren(); ++i)
{
// Check that the child dataset is the same.
BOOST_REQUIRE_EQUAL(&xmlTree.Dataset(), &xmlTree.Child(i).Dataset());
BOOST_REQUIRE_EQUAL(&textTree.Dataset(), &textTree.Child(i).Dataset());
BOOST_REQUIRE_EQUAL(&binaryTree.Dataset(), &binaryTree.Child(i).Dataset());
// Make sure the parent link is right.
BOOST_REQUIRE_EQUAL(xmlTree.Child(i).Parent(), &xmlTree);
BOOST_REQUIRE_EQUAL(textTree.Child(i).Parent(), &textTree);
BOOST_REQUIRE_EQUAL(binaryTree.Child(i).Parent(), &binaryTree);
CheckTrees(tree.Child(i), xmlTree.Child(i), textTree.Child(i),
binaryTree.Child(i));
}
}
示例2: CalculateBound
double FastMKSRules<KernelType, TreeType>::Score(TreeType& queryNode,
TreeType& referenceNode)
{
// Update and get the query node's bound.
queryNode.Stat().Bound() = CalculateBound(queryNode);
const double bestKernel = queryNode.Stat().Bound();
// First, see if we can make a parent-child or parent-parent prune. These
// four bounds on the maximum kernel value are looser than the bound normally
// used, but they can prevent a base case from needing to be calculated.
// Convenience caching so lines are shorter.
const double queryParentDist = queryNode.ParentDistance();
const double queryDescDist = queryNode.FurthestDescendantDistance();
const double refParentDist = referenceNode.ParentDistance();
const double refDescDist = referenceNode.FurthestDescendantDistance();
double adjustedScore = traversalInfo.LastBaseCase();
const double queryDistBound = (queryParentDist + queryDescDist);
const double refDistBound = (refParentDist + refDescDist);
double dualQueryTerm;
double dualRefTerm;
// The parent-child and parent-parent prunes work by applying the same pruning
// condition as when the parent node was used, except they are tighter because
// queryDistBound < queryNode.Parent()->FurthestDescendantDistance()
// and
// refDistBound < referenceNode.Parent()->FurthestDescendantDistance()
// so we construct the same bounds that were used when Score() was called with
// the parents, except with the tighter distance bounds. Sometimes this
// allows us to prune nodes without evaluating the base cases between them.
if (traversalInfo.LastQueryNode() == queryNode.Parent())
{
// We can assume that queryNode.Parent() != NULL, because at the root node
// combination, the traversalInfo.LastQueryNode() pointer will _not_ be
// NULL. We also should be guaranteed that
// traversalInfo.LastReferenceNode() is either the reference node or the
// parent of the reference node.
adjustedScore += queryDistBound *
traversalInfo.LastReferenceNode()->Stat().SelfKernel();
dualQueryTerm = queryDistBound;
}
else
{
// The query parent could be NULL, which does weird things and we have to
// consider.
if (traversalInfo.LastReferenceNode() != NULL)
{
adjustedScore += queryDescDist *
traversalInfo.LastReferenceNode()->Stat().SelfKernel();
dualQueryTerm = queryDescDist;
}
else
{
// This makes it so a child-parent (or parent-parent) prune is not
// possible.
dualQueryTerm = 0.0;
adjustedScore = bestKernel;
}
}
if (traversalInfo.LastReferenceNode() == referenceNode.Parent())
{
// We can assume that referenceNode.Parent() != NULL, because at the root
// node combination, the traversalInfo.LastReferenceNode() pointer will
// _not_ be NULL.
adjustedScore += refDistBound *
traversalInfo.LastQueryNode()->Stat().SelfKernel();
dualRefTerm = refDistBound;
}
else
{
// The reference parent could be NULL, which does weird things and we have
// to consider.
if (traversalInfo.LastQueryNode() != NULL)
{
adjustedScore += refDescDist *
traversalInfo.LastQueryNode()->Stat().SelfKernel();
dualRefTerm = refDescDist;
}
else
{
// This makes it so a child-parent (or parent-parent) prune is not
// possible.
dualRefTerm = 0.0;
adjustedScore = bestKernel;
}
}
// Now add the dual term.
adjustedScore += (dualQueryTerm * dualRefTerm);
if (adjustedScore < bestKernel)
{
// It is not possible that this node combination can contain a point
// combination with kernel value better than the minimum kernel value to
// improve any of the results, so we can prune it.
return DBL_MAX;
}
//.........这里部分代码省略.........
示例3: if
inline double NeighborSearchRules<SortPolicy, MetricType, TreeType>::Score(
TreeType& queryNode,
TreeType& referenceNode)
{
++scores; // Count number of Score() calls.
// Update our bound.
const double bestDistance = CalculateBound(queryNode);
// Use the traversal info to see if a parent-child or parent-parent prune is
// possible. This is a looser bound than we could make, but it might be
// sufficient.
const double queryParentDist = queryNode.ParentDistance();
const double queryDescDist = queryNode.FurthestDescendantDistance();
const double refParentDist = referenceNode.ParentDistance();
const double refDescDist = referenceNode.FurthestDescendantDistance();
const double score = traversalInfo.LastScore();
double adjustedScore;
// We want to set adjustedScore to be the distance between the centroid of the
// last query node and last reference node. We will do this by adjusting the
// last score. In some cases, we can just use the last base case.
if (tree::TreeTraits<TreeType>::FirstPointIsCentroid)
{
adjustedScore = traversalInfo.LastBaseCase();
}
else if (score == 0.0) // Nothing we can do here.
{
adjustedScore = 0.0;
}
else
{
// The last score is equal to the distance between the centroids minus the
// radii of the query and reference bounds along the axis of the line
// between the two centroids. In the best case, these radii are the
// furthest descendant distances, but that is not always true. It would
// take too long to calculate the exact radii, so we are forced to use
// MinimumBoundDistance() as a lower-bound approximation.
const double lastQueryDescDist =
traversalInfo.LastQueryNode()->MinimumBoundDistance();
const double lastRefDescDist =
traversalInfo.LastReferenceNode()->MinimumBoundDistance();
adjustedScore = SortPolicy::CombineWorst(score, lastQueryDescDist);
adjustedScore = SortPolicy::CombineWorst(score, lastRefDescDist);
}
// Assemble an adjusted score. For nearest neighbor search, this adjusted
// score is a lower bound on MinDistance(queryNode, referenceNode) that is
// assembled without actually calculating MinDistance(). For furthest
// neighbor search, it is an upper bound on
// MaxDistance(queryNode, referenceNode). If the traversalInfo isn't usable
// then the node should not be pruned by this.
if (traversalInfo.LastQueryNode() == queryNode.Parent())
{
const double queryAdjust = queryParentDist + queryDescDist;
adjustedScore = SortPolicy::CombineBest(adjustedScore, queryAdjust);
}
else if (traversalInfo.LastQueryNode() == &queryNode)
{
adjustedScore = SortPolicy::CombineBest(adjustedScore, queryDescDist);
}
else
{
// The last query node wasn't this query node or its parent. So we force
// the adjustedScore to be such that this combination can't be pruned here,
// because we don't really know anything about it.
// It would be possible to modify this section to try and make a prune based
// on the query descendant distance and the distance between the query node
// and last traversal query node, but this case doesn't actually happen for
// kd-trees or cover trees.
adjustedScore = SortPolicy::BestDistance();
}
if (traversalInfo.LastReferenceNode() == referenceNode.Parent())
{
const double refAdjust = refParentDist + refDescDist;
adjustedScore = SortPolicy::CombineBest(adjustedScore, refAdjust);
}
else if (traversalInfo.LastReferenceNode() == &referenceNode)
{
adjustedScore = SortPolicy::CombineBest(adjustedScore, refDescDist);
}
else
{
// The last reference node wasn't this reference node or its parent. So we
// force the adjustedScore to be such that this combination can't be pruned
// here, because we don't really know anything about it.
// It would be possible to modify this section to try and make a prune based
// on the reference descendant distance and the distance between the
// reference node and last traversal reference node, but this case doesn't
// actually happen for kd-trees or cover trees.
adjustedScore = SortPolicy::BestDistance();
}
// Can we prune?
if (SortPolicy::IsBetter(bestDistance, adjustedScore))
{
if (!(tree::TreeTraits<TreeType>::FirstPointIsCentroid && score == 0.0))
//.........这里部分代码省略.........
示例4: products
double FastMKSRules<KernelType, TreeType>::Score(const size_t queryIndex,
TreeType& referenceNode)
{
// Compare with the current best.
const double bestKernel = products(products.n_rows - 1, queryIndex);
// See if we can perform a parent-child prune.
const double furthestDist = referenceNode.FurthestDescendantDistance();
if (referenceNode.Parent() != NULL)
{
double maxKernelBound;
const double parentDist = referenceNode.ParentDistance();
const double combinedDistBound = parentDist + furthestDist;
const double lastKernel = referenceNode.Parent()->Stat().LastKernel();
if (kernel::KernelTraits<KernelType>::IsNormalized)
{
const double squaredDist = std::pow(combinedDistBound, 2.0);
const double delta = (1 - 0.5 * squaredDist);
if (lastKernel <= delta)
{
const double gamma = combinedDistBound * sqrt(1 - 0.25 * squaredDist);
maxKernelBound = lastKernel * delta +
gamma * sqrt(1 - std::pow(lastKernel, 2.0));
}
else
{
maxKernelBound = 1.0;
}
}
else
{
maxKernelBound = lastKernel +
combinedDistBound * queryKernels[queryIndex];
}
if (maxKernelBound < bestKernel)
return DBL_MAX;
}
// Calculate the maximum possible kernel value, either by calculating the
// centroid or, if the centroid is a point, use that.
++scores;
double kernelEval;
if (tree::TreeTraits<TreeType>::FirstPointIsCentroid)
{
// Could it be that this kernel evaluation has already been calculated?
if (tree::TreeTraits<TreeType>::HasSelfChildren &&
referenceNode.Parent() != NULL &&
referenceNode.Point(0) == referenceNode.Parent()->Point(0))
{
kernelEval = referenceNode.Parent()->Stat().LastKernel();
}
else
{
kernelEval = BaseCase(queryIndex, referenceNode.Point(0));
}
}
else
{
const arma::vec queryPoint = querySet.unsafe_col(queryIndex);
arma::vec refCentroid;
referenceNode.Centroid(refCentroid);
kernelEval = kernel.Evaluate(queryPoint, refCentroid);
}
referenceNode.Stat().LastKernel() = kernelEval;
double maxKernel;
if (kernel::KernelTraits<KernelType>::IsNormalized)
{
const double squaredDist = std::pow(furthestDist, 2.0);
const double delta = (1 - 0.5 * squaredDist);
if (kernelEval <= delta)
{
const double gamma = furthestDist * sqrt(1 - 0.25 * squaredDist);
maxKernel = kernelEval * delta +
gamma * sqrt(1 - std::pow(kernelEval, 2.0));
}
else
{
maxKernel = 1.0;
}
}
else
{
maxKernel = kernelEval + furthestDist * queryKernels[queryIndex];
}
// We return the inverse of the maximum kernel so that larger kernels are
// recursed into first.
return (maxKernel > bestKernel) ? (1.0 / maxKernel) : DBL_MAX;
}