本文整理汇总了C++中BSONElement::isNull方法的典型用法代码示例。如果您正苦于以下问题:C++ BSONElement::isNull方法的具体用法?C++ BSONElement::isNull怎么用?C++ BSONElement::isNull使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类BSONElement
的用法示例。
在下文中一共展示了BSONElement::isNull方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: ns
StatusWith<UpdateZoneKeyRangeRequest> UpdateZoneKeyRangeRequest::_parseFromCommand(
const BSONObj& cmdObj, bool forMongos) {
string rawNS;
auto parseNamespaceStatus = bsonExtractStringField(
cmdObj, (forMongos ? kMongosUpdateZoneKeyRange : kConfigsvrUpdateZoneKeyRange), &rawNS);
if (!parseNamespaceStatus.isOK()) {
return parseNamespaceStatus;
}
NamespaceString ns(rawNS);
if (!ns.isValid()) {
return {ErrorCodes::InvalidNamespace,
str::stream() << rawNS << " is not a valid namespace"};
}
auto parseRangeStatus = ChunkRange::fromBSON(cmdObj);
if (!parseRangeStatus.isOK()) {
return parseRangeStatus.getStatus();
}
BSONElement zoneElem;
auto parseZoneNameStatus = bsonExtractField(cmdObj, kZoneName, &zoneElem);
if (!parseZoneNameStatus.isOK()) {
return parseZoneNameStatus;
}
bool isRemove = false;
string zoneName;
if (zoneElem.type() == String) {
zoneName = zoneElem.str();
} else if (zoneElem.isNull()) {
isRemove = true;
} else {
return {ErrorCodes::TypeMismatch,
str::stream() << "\"" << kZoneName << "\" had the wrong type. Expected "
<< typeName(String)
<< " or "
<< typeName(jstNULL)
<< ", found "
<< typeName(zoneElem.type())};
}
if (isRemove) {
return UpdateZoneKeyRangeRequest(std::move(ns), std::move(parseRangeStatus.getValue()));
}
return UpdateZoneKeyRangeRequest(
std::move(ns), std::move(parseRangeStatus.getValue()), std::move(zoneName));
}
示例2: i
void ExpressionKeysPrivate::getS2Keys(const BSONObj& obj,
const BSONObj& keyPattern,
const S2IndexingParams& params,
BSONObjSet* keys) {
BSONObjSet keysToAdd;
// Does one of our documents have a geo field?
bool haveGeoField = false;
// We output keys in the same order as the fields we index.
BSONObjIterator i(keyPattern);
while (i.more()) {
BSONElement e = i.next();
// First, we get the keys that this field adds. Either they're added literally from
// the value of the field, or they're transformed if the field is geo.
BSONElementSet fieldElements;
// false means Don't expand the last array, duh.
obj.getFieldsDotted(e.fieldName(), fieldElements, false);
BSONObjSet keysForThisField;
if (IndexNames::GEO_2DSPHERE == e.valuestr()) {
if (params.indexVersion >= S2_INDEX_VERSION_2) {
// For >= V2,
// geo: null,
// geo: undefined
// geo: []
// should all behave like there is no geo field. So we look for these cases and
// throw out the field elements if we find them.
if (1 == fieldElements.size()) {
BSONElement elt = *fieldElements.begin();
// Get the :null and :undefined cases.
if (elt.isNull() || Undefined == elt.type()) {
fieldElements.clear();
} else if (elt.isABSONObj()) {
// And this is the :[] case.
BSONObj obj = elt.Obj();
if (0 == obj.nFields()) {
fieldElements.clear();
}
}
}
// >= V2 2dsphere indices require that at least one geo field to be present in a
// document in order to index it.
if (fieldElements.size() > 0) {
haveGeoField = true;
}
}
getS2GeoKeys(obj, fieldElements, params, &keysForThisField);
} else {
getS2LiteralKeys(fieldElements, params.collator, &keysForThisField);
}
// We expect there to be the missing field element present in the keys if data is
// missing. So, this should be non-empty.
verify(!keysForThisField.empty());
// We take the Cartesian product of all of the keys. This requires that we have
// some keys to take the Cartesian product with. If keysToAdd.empty(), we
// initialize it.
if (keysToAdd.empty()) {
keysToAdd = keysForThisField;
continue;
}
BSONObjSet updatedKeysToAdd;
for (BSONObjSet::const_iterator it = keysToAdd.begin(); it != keysToAdd.end(); ++it) {
for (BSONObjSet::const_iterator newIt = keysForThisField.begin();
newIt != keysForThisField.end();
++newIt) {
BSONObjBuilder b;
b.appendElements(*it);
b.append(newIt->firstElement());
updatedKeysToAdd.insert(b.obj());
}
}
keysToAdd = updatedKeysToAdd;
}
// Make sure that if we're >= V2 there's at least one geo field present in the doc.
if (params.indexVersion >= S2_INDEX_VERSION_2) {
if (!haveGeoField) {
return;
}
}
if (keysToAdd.size() > params.maxKeysPerInsert) {
warning() << "Insert of geo object generated a high number of keys."
<< " num keys: " << keysToAdd.size() << " obj inserted: " << obj;
}
*keys = keysToAdd;
}
示例3: solutionMatches
// static
bool QueryPlannerTestLib::solutionMatches(const BSONObj& testSoln,
const QuerySolutionNode* trueSoln) {
//
// leaf nodes
//
if (STAGE_COLLSCAN == trueSoln->getType()) {
const CollectionScanNode* csn = static_cast<const CollectionScanNode*>(trueSoln);
BSONElement el = testSoln["cscan"];
if (el.eoo() || !el.isABSONObj()) {
return false;
}
BSONObj csObj = el.Obj();
BSONElement dir = csObj["dir"];
if (dir.eoo() || !dir.isNumber()) {
return false;
}
if (dir.numberInt() != csn->direction) {
return false;
}
BSONElement filter = csObj["filter"];
if (filter.eoo()) {
return true;
} else if (filter.isNull()) {
return NULL == csn->filter;
} else if (!filter.isABSONObj()) {
return false;
}
BSONObj collation;
if (BSONElement collationElt = csObj["collation"]) {
if (!collationElt.isABSONObj()) {
return false;
}
collation = collationElt.Obj();
}
return filterMatches(filter.Obj(), collation, trueSoln);
} else if (STAGE_IXSCAN == trueSoln->getType()) {
const IndexScanNode* ixn = static_cast<const IndexScanNode*>(trueSoln);
BSONElement el = testSoln["ixscan"];
if (el.eoo() || !el.isABSONObj()) {
return false;
}
BSONObj ixscanObj = el.Obj();
BSONElement pattern = ixscanObj["pattern"];
if (pattern.eoo() || !pattern.isABSONObj()) {
return false;
}
if (pattern.Obj() != ixn->indexKeyPattern) {
return false;
}
BSONElement bounds = ixscanObj["bounds"];
if (!bounds.eoo()) {
if (!bounds.isABSONObj()) {
return false;
} else if (!boundsMatch(bounds.Obj(), ixn->bounds)) {
return false;
}
}
BSONElement dir = ixscanObj["dir"];
if (!dir.eoo() && NumberInt == dir.type()) {
if (dir.numberInt() != ixn->direction) {
return false;
}
}
BSONElement filter = ixscanObj["filter"];
if (filter.eoo()) {
return true;
} else if (filter.isNull()) {
return NULL == ixn->filter;
} else if (!filter.isABSONObj()) {
return false;
}
BSONObj collation;
if (BSONElement collationElt = ixscanObj["collation"]) {
if (!collationElt.isABSONObj()) {
return false;
}
collation = collationElt.Obj();
}
return filterMatches(filter.Obj(), collation, trueSoln);
} else if (STAGE_GEO_NEAR_2D == trueSoln->getType()) {
const GeoNear2DNode* node = static_cast<const GeoNear2DNode*>(trueSoln);
BSONElement el = testSoln["geoNear2d"];
if (el.eoo() || !el.isABSONObj()) {
return false;
}
BSONObj geoObj = el.Obj();
return geoObj == node->indexKeyPattern;
} else if (STAGE_GEO_NEAR_2DSPHERE == trueSoln->getType()) {
const GeoNear2DSphereNode* node = static_cast<const GeoNear2DSphereNode*>(trueSoln);
//.........这里部分代码省略.........
示例4: invariant
void ExpressionKeysPrivate::getS2Keys(const BSONObj& obj,
const BSONObj& keyPattern,
const S2IndexingParams& params,
BSONObjSet* keys,
MultikeyPaths* multikeyPaths) {
BSONObjSet keysToAdd = SimpleBSONObjComparator::kInstance.makeBSONObjSet();
// Does one of our documents have a geo field?
bool haveGeoField = false;
if (multikeyPaths) {
invariant(multikeyPaths->empty());
multikeyPaths->resize(keyPattern.nFields());
}
size_t posInIdx = 0;
// We output keys in the same order as the fields we index.
for (const auto keyElem : keyPattern) {
// First, we get the keys that this field adds. Either they're added literally from
// the value of the field, or they're transformed if the field is geo.
BSONElementSet fieldElements;
const bool expandArrayOnTrailingField = false;
std::set<size_t>* arrayComponents = multikeyPaths ? &(*multikeyPaths)[posInIdx] : nullptr;
dps::extractAllElementsAlongPath(
obj, keyElem.fieldName(), fieldElements, expandArrayOnTrailingField, arrayComponents);
// Trailing array values aren't being expanded, so we still need to determine whether the
// last component of the indexed path 'keyElem.fieldName()' causes the index to be multikey.
// We say that it does if
// (a) the last component of the indexed path ever refers to an array value (regardless of
// the number of array elements)
// (b) the last component of the indexed path ever refers to GeoJSON data that requires
// multiple cells for its covering.
bool lastPathComponentCausesIndexToBeMultikey;
BSONObjSet keysForThisField = SimpleBSONObjComparator::kInstance.makeBSONObjSet();
if (IndexNames::GEO_2DSPHERE == keyElem.valuestr()) {
if (params.indexVersion >= S2_INDEX_VERSION_2) {
// For >= V2,
// geo: null,
// geo: undefined
// geo: []
// should all behave like there is no geo field. So we look for these cases and
// throw out the field elements if we find them.
if (1 == fieldElements.size()) {
BSONElement elt = *fieldElements.begin();
// Get the :null and :undefined cases.
if (elt.isNull() || Undefined == elt.type()) {
fieldElements.clear();
} else if (elt.isABSONObj()) {
// And this is the :[] case.
BSONObj obj = elt.Obj();
if (0 == obj.nFields()) {
fieldElements.clear();
}
}
}
// >= V2 2dsphere indices require that at least one geo field to be present in a
// document in order to index it.
if (fieldElements.size() > 0) {
haveGeoField = true;
}
}
lastPathComponentCausesIndexToBeMultikey =
getS2GeoKeys(obj, fieldElements, params, &keysForThisField);
} else {
lastPathComponentCausesIndexToBeMultikey =
getS2LiteralKeys(fieldElements, params.collator, &keysForThisField);
}
// We expect there to be the missing field element present in the keys if data is
// missing. So, this should be non-empty.
verify(!keysForThisField.empty());
if (multikeyPaths && lastPathComponentCausesIndexToBeMultikey) {
const size_t pathLengthOfThisField = FieldRef{keyElem.fieldNameStringData()}.numParts();
invariant(pathLengthOfThisField > 0);
(*multikeyPaths)[posInIdx].insert(pathLengthOfThisField - 1);
}
// We take the Cartesian product of all of the keys. This requires that we have
// some keys to take the Cartesian product with. If keysToAdd.empty(), we
// initialize it.
if (keysToAdd.empty()) {
keysToAdd = keysForThisField;
++posInIdx;
continue;
}
BSONObjSet updatedKeysToAdd = SimpleBSONObjComparator::kInstance.makeBSONObjSet();
for (BSONObjSet::const_iterator it = keysToAdd.begin(); it != keysToAdd.end(); ++it) {
for (BSONObjSet::const_iterator newIt = keysForThisField.begin();
newIt != keysForThisField.end();
++newIt) {
BSONObjBuilder b;
b.appendElements(*it);
b.append(newIt->firstElement());
updatedKeysToAdd.insert(b.obj());
//.........这里部分代码省略.........
示例5: solutionMatches
// static
bool QueryPlannerTestLib::solutionMatches(const BSONObj& testSoln,
const QuerySolutionNode* trueSoln) {
//
// leaf nodes
//
if (STAGE_COLLSCAN == trueSoln->getType()) {
const CollectionScanNode* csn = static_cast<const CollectionScanNode*>(trueSoln);
BSONElement el = testSoln["cscan"];
if (el.eoo() || !el.isABSONObj()) { return false; }
BSONObj csObj = el.Obj();
BSONElement dir = csObj["dir"];
if (dir.eoo() || !dir.isNumber()) { return false; }
if (dir.numberInt() != csn->direction) { return false; }
BSONElement filter = csObj["filter"];
if (filter.eoo()) {
return true;
}
else if (filter.isNull()) {
return NULL == csn->filter;
}
else if (!filter.isABSONObj()) {
return false;
}
return filterMatches(filter.Obj(), trueSoln);
}
else if (STAGE_IXSCAN == trueSoln->getType()) {
const IndexScanNode* ixn = static_cast<const IndexScanNode*>(trueSoln);
BSONElement el = testSoln["ixscan"];
if (el.eoo() || !el.isABSONObj()) { return false; }
BSONObj ixscanObj = el.Obj();
BSONElement pattern = ixscanObj["pattern"];
if (pattern.eoo() || !pattern.isABSONObj()) { return false; }
if (pattern.Obj() != ixn->indexKeyPattern) { return false; }
BSONElement bounds = ixscanObj["bounds"];
if (!bounds.eoo()) {
if (!bounds.isABSONObj()) {
return false;
}
else if (!boundsMatch(bounds.Obj(), ixn->bounds)) {
return false;
}
}
BSONElement dir = ixscanObj["dir"];
if (!dir.eoo() && NumberInt == dir.type()) {
if (dir.numberInt() != ixn->direction) {
return false;
}
}
BSONElement filter = ixscanObj["filter"];
if (filter.eoo()) {
return true;
}
else if (filter.isNull()) {
return NULL == ixn->filter;
}
else if (!filter.isABSONObj()) {
return false;
}
return filterMatches(filter.Obj(), trueSoln);
}
else if (STAGE_GEO_2D == trueSoln->getType()) {
const Geo2DNode* node = static_cast<const Geo2DNode*>(trueSoln);
BSONElement el = testSoln["geo2d"];
if (el.eoo() || !el.isABSONObj()) { return false; }
BSONObj geoObj = el.Obj();
return geoObj == node->indexKeyPattern;
}
else if (STAGE_GEO_NEAR_2D == trueSoln->getType()) {
const GeoNear2DNode* node = static_cast<const GeoNear2DNode*>(trueSoln);
BSONElement el = testSoln["geoNear2d"];
if (el.eoo() || !el.isABSONObj()) { return false; }
BSONObj geoObj = el.Obj();
return geoObj == node->indexKeyPattern;
}
else if (STAGE_GEO_NEAR_2DSPHERE == trueSoln->getType()) {
const GeoNear2DSphereNode* node = static_cast<const GeoNear2DSphereNode*>(trueSoln);
BSONElement el = testSoln["geoNear2dsphere"];
if (el.eoo() || !el.isABSONObj()) { return false; }
BSONObj geoObj = el.Obj();
return geoObj == node->indexKeyPattern;
}
else if (STAGE_TEXT == trueSoln->getType()) {
// {text: {search: "somestr", language: "something", filter: {blah: 1}}}
const TextNode* node = static_cast<const TextNode*>(trueSoln);
BSONElement el = testSoln["text"];
if (el.eoo() || !el.isABSONObj()) { return false; }
BSONObj textObj = el.Obj();
BSONElement searchElt = textObj["search"];
if (!searchElt.eoo()) {
if (searchElt.String() != node->query) {
return false;
}
//.........这里部分代码省略.........
示例6: open
void Dictionary::open(const BSONObj &info,
const mongo::Descriptor &descriptor, const bool may_create,
const bool hot_index) {
int readPageSize = 65536;
int pageSize = 4 * 1024 * 1024;
TOKU_COMPRESSION_METHOD compression = TOKU_ZLIB_WITHOUT_CHECKSUM_METHOD;
BSONObj key_pattern = info["key"].Obj();
BSONElement e;
e = info["readPageSize"];
if (e.ok() && !e.isNull()) {
readPageSize = BytesQuantity<int>(e);
uassert(16743, "readPageSize must be a number > 0.", readPageSize > 0);
TOKULOG(1) << "db " << _dname << ", using read page size " << readPageSize << endl;
}
e = info["pageSize"];
if (e.ok() && !e.isNull()) {
pageSize = BytesQuantity<int>(e);
uassert(16445, "pageSize must be a number > 0.", pageSize > 0);
TOKULOG(1) << "db " << _dname << ", using page size " << pageSize << endl;
}
e = info["compression"];
if (e.ok() && !e.isNull()) {
std::string str = e.String();
if (str == "lzma") {
compression = TOKU_LZMA_METHOD;
} else if (str == "quicklz") {
compression = TOKU_QUICKLZ_METHOD;
} else if (str == "zlib") {
compression = TOKU_ZLIB_WITHOUT_CHECKSUM_METHOD;
} else if (str == "none") {
compression = TOKU_NO_COMPRESSION;
} else {
uassert(16442, "compression must be one of: lzma, quicklz, zlib, none.", false);
}
TOKULOG(1) << "db " << _dname << ", using compression method \"" << str << "\"" << endl;
}
int r = _db->set_readpagesize(_db, readPageSize);
if (r != 0) {
handle_ydb_error(r);
}
r = _db->set_pagesize(_db, pageSize);
if (r != 0) {
handle_ydb_error(r);
}
r = _db->set_compression_method(_db, compression);
if (r != 0) {
handle_ydb_error(r);
}
// If this is a non-creating open for a read-only (or non-existent)
// transaction, we can use an alternate stack since there's nothing
// to roll back and no locktree locks to hold.
const bool needAltTxn = !may_create && (!cc().hasTxn() || cc().txn().readOnly());
scoped_ptr<Client::AlternateTransactionStack> altStack(!needAltTxn ? NULL :
new Client::AlternateTransactionStack());
scoped_ptr<Client::Transaction> altTxn(!needAltTxn ? NULL :
new Client::Transaction(0));
const int db_flags = may_create ? DB_CREATE : 0;
r = _db->open(_db, cc().txn().db_txn(), _dname.c_str(), NULL,
DB_BTREE, db_flags, S_IRUSR|S_IWUSR|S_IRGRP|S_IROTH);
if (r == ENOENT && !may_create) {
throw NeedsCreate();
}
if (r != 0) {
handle_ydb_error(r);
}
if (may_create) {
set_db_descriptor(_db, descriptor, hot_index);
}
verify_or_upgrade_db_descriptor(_db, descriptor, hot_index);
if (altTxn.get() != NULL) {
altTxn->commit();
}
}