本文整理汇总了Python中pymongo.helpers._index_document函数的典型用法代码示例。如果您正苦于以下问题:Python _index_document函数的具体用法?Python _index_document怎么用?Python _index_document使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了_index_document函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: hint
def hint(self, index):
"""Adds a 'hint', telling Mongo the proper index to use for the query.
Judicious use of hints can greatly improve query
performance. When doing a query on multiple fields (at least
one of which is indexed) pass the indexed field as a hint to
the query. Hinting will not do anything if the corresponding
index does not exist. Raises
:class:`~pymongo.errors.InvalidOperation` if this cursor has
already been used.
`index` should be an index as passed to
:meth:`~pymongo.collection.Collection.create_index`
(e.g. ``[('field', ASCENDING)]``). If `index`
is ``None`` any existing hints for this query are cleared. The
last hint applied to this cursor takes precedence over all
others.
:Parameters:
- `index`: index to hint on (as an index specifier)
"""
self.__check_okay_to_chain()
if index is None:
self.__hint = None
return self
self.__hint = helpers._index_document(index)
return self
示例2: __init__
def __init__(self, collection, spec=None, fields=None, skip=0, limit=0,
timeout=True, snapshot=False, tailable=False, sort=None,
max_scan=None, as_class=None,
_sock=None, _must_use_master=False, _is_command=False):
"""Create a new cursor.
Should not be called directly by application developers - see
:meth:`~pymongo.collection.Collection.find` instead.
.. mongodoc:: cursors
"""
self.__id = None
if spec is None:
spec = {}
if not isinstance(spec, dict):
raise TypeError("spec must be an instance of dict")
if not isinstance(skip, int):
raise TypeError("skip must be an instance of int")
if not isinstance(limit, int):
raise TypeError("limit must be an instance of int")
if not isinstance(timeout, bool):
raise TypeError("timeout must be an instance of bool")
if not isinstance(snapshot, bool):
raise TypeError("snapshot must be an instance of bool")
if not isinstance(tailable, bool):
raise TypeError("tailable must be an instance of bool")
if fields is not None:
if not fields:
fields = {"_id": 1}
if not isinstance(fields, dict):
fields = helpers._fields_list_to_dict(fields)
if as_class is None:
as_class = collection.database.connection.document_class
self.__collection = collection
self.__spec = spec
self.__fields = fields
self.__skip = skip
self.__limit = limit
self.__timeout = timeout
self.__tailable = tailable
self.__snapshot = snapshot
self.__ordering = sort and helpers._index_document(sort) or None
self.__max_scan = max_scan
self.__explain = False
self.__hint = None
self.__as_class = as_class
self.__socket = _sock
self.__must_use_master = _must_use_master
self.__is_command = _is_command
self.__data = []
self.__connection_id = None
self.__retrieved = 0
self.__killed = False
示例3: __set_hint
def __set_hint(self, index):
if index is None:
self.__hint = None
return
if isinstance(index, string_type):
self.__hint = index
else:
self.__hint = helpers._index_document(index)
示例4: __init__
def __init__(self, keys, **kwargs):
"""Create an Index instance.
For use with :meth:`~pymongo.collection.Collection.create_indexes`.
Takes either a single key or a list of (key, direction) pairs.
The key(s) must be an instance of :class:`basestring`
(:class:`str` in python 3), and the direction(s) must be one of
(:data:`~pymongo.ASCENDING`, :data:`~pymongo.DESCENDING`,
:data:`~pymongo.GEO2D`, :data:`~pymongo.GEOHAYSTACK`,
:data:`~pymongo.GEOSPHERE`, :data:`~pymongo.HASHED`,
:data:`~pymongo.TEXT`).
Valid options include, but are not limited to:
- `name`: custom name to use for this index - if none is
given, a name will be generated.
- `unique`: if ``True`` creates a uniqueness constraint on the index.
- `background`: if ``True`` this index should be created in the
background.
- `sparse`: if ``True``, omit from the index any documents that lack
the indexed field.
- `bucketSize`: for use with geoHaystack indexes.
Number of documents to group together within a certain proximity
to a given longitude and latitude.
- `min`: minimum value for keys in a :data:`~pymongo.GEO2D`
index.
- `max`: maximum value for keys in a :data:`~pymongo.GEO2D`
index.
- `expireAfterSeconds`: <int> Used to create an expiring (TTL)
collection. MongoDB will automatically delete documents from
this collection after <int> seconds. The indexed field must
be a UTC datetime or the data will not expire.
- `partialFilterExpression`: A document that specifies a filter for
a partial index.
See the MongoDB documentation for a full list of supported options by
server version.
.. note:: `partialFilterExpression` requires server version **>= 3.2**
:Parameters:
- `keys`: a single key or a list of (key, direction)
pairs specifying the index to create
- `**kwargs` (optional): any additional index creation
options (see the above list) should be passed as keyword
arguments
.. versionchanged:: 3.2
Added partialFilterExpression to support partial indexes.
"""
keys = _index_list(keys)
if "name" not in kwargs:
kwargs["name"] = _gen_index_name(keys)
kwargs["key"] = _index_document(keys)
self.__document = kwargs
示例5: create_index
def create_index(self, key_or_list, unique=False, ttl=300):
"""Creates an index on this collection.
Takes either a single key or a list of (key, direction) pairs.
The key(s) must be an instance of :class:`basestring`, and the
directions must be one of (:data:`~pymongo.ASCENDING`,
:data:`~pymongo.DESCENDING`). Returns the name of the created
index.
To create a single key index on the key ``'mike'`` we just use
a string argument:
>>> my_collection.create_index("mike")
For a `compound index`_ on ``'mike'`` descending and
``'eliot'`` ascending we need to use a list of tuples:
>>> my_collection.create_index([("mike", pymongo.DESCENDING),
... ("eliot", pymongo.ASCENDING)])
:Parameters:
- `key_or_list`: a single key or a list of (key, direction)
pairs specifying the index to create
- `unique` (optional): should this index guarantee
uniqueness?
- `ttl` (optional): time window (in seconds) during which
this index will be recognized by subsequent calls to
:meth:`ensure_index` - see documentation for
:meth:`ensure_index` for details
.. seealso:: :meth:`ensure_index`
.. _compound index: http://www.mongodb.org/display/DOCS/Indexes#Indexes-CompoundKeysIndexes
.. mongodoc:: indexes
"""
if not isinstance(key_or_list, (str, unicode, list)):
raise TypeError("key_or_list must either be a single key "
"or a list of (key, direction) pairs")
to_save = SON()
keys = helpers._index_list(key_or_list)
name = self._gen_index_name(keys)
to_save["name"] = name
to_save["ns"] = self.__full_name
to_save["key"] = helpers._index_document(keys)
to_save["unique"] = unique
self.__database.connection._cache_index(self.__database.name,
self.__name, name, ttl)
self.__database.system.indexes.insert(to_save, manipulate=False,
check_keys=False)
return to_save["name"]
示例6: __init__
def __init__(
self,
collection,
spec,
fields,
skip,
limit,
slave_okay,
timeout,
tailable,
snapshot=False,
sort=None,
max_scan=None,
_sock=None,
_must_use_master=False,
_is_command=False,
):
"""Create a new cursor.
Should not be called directly by application developers - see
:meth:`~pymongo.collection.Collection.find` instead.
.. mongodoc:: cursors
"""
self.__id = None
self.__collection = collection
self.__spec = spec
self.__fields = fields
self.__skip = skip
self.__limit = limit
self.__slave_okay = slave_okay
self.__timeout = timeout
self.__tailable = tailable
self.__snapshot = snapshot
self.__ordering = sort and helpers._index_document(sort) or None
self.__max_scan = max_scan
self.__explain = False
self.__hint = None
self.__socket = _sock
self.__must_use_master = _must_use_master
self.__is_command = _is_command
self.__data = []
self.__connection_id = None
self.__retrieved = 0
self.__killed = False
示例7: sort
def sort(self, key_or_list, direction=None):
"""Sorts this cursor's results.
Pass a field name and a direction, either
:data:`~pymongo.ASCENDING` or :data:`~pymongo.DESCENDING`::
for doc in collection.find().sort('field', pymongo.ASCENDING):
print(doc)
To sort by multiple fields, pass a list of (key, direction) pairs::
for doc in collection.find().sort([
('field1', pymongo.ASCENDING),
('field2', pymongo.DESCENDING)]):
print(doc)
Beginning with MongoDB version 2.6, text search results can be
sorted by relevance::
cursor = db.test.find(
{'$text': {'$search': 'some words'}},
{'score': {'$meta': 'textScore'}})
# Sort by 'score' field.
cursor.sort([('score', {'$meta': 'textScore'})])
for doc in cursor:
print(doc)
Raises :class:`~pymongo.errors.InvalidOperation` if this cursor has
already been used. Only the last :meth:`sort` applied to this
cursor has any effect.
:Parameters:
- `key_or_list`: a single key or a list of (key, direction)
pairs specifying the keys to sort on
- `direction` (optional): only used if `key_or_list` is a single
key, if not given :data:`~pymongo.ASCENDING` is assumed
"""
self.__check_okay_to_chain()
keys = helpers._index_list(key_or_list, direction)
self.__ordering = helpers._index_document(keys)
return self
示例8: __init__
def __init__(self,collection,
spec,
fields,
skip,
limit,
slave_okay,
timeout,
tailable,
snapshot=False,
_IOStream=None,
_must_use_master=False,
_is_command=False,
as_class=None,
sort = None):
if as_class is None:
as_class = collection.database.connection.document_class
self.__collection = collection
self.__spec = spec
self.__fields = fields
self.__skip = skip
self.__limit = limit
self.__slave_okay = slave_okay
self.__timeout = timeout
self.__tailable = tailable
self.__snapshot = snapshot
self.__ordering = sort and helpers._index_document(sort) or None
self.__explain = False
self.__hint = None
self.__IOStream = _IOStream
self.__must_use_master = _must_use_master
self.__is_command = _is_command
self.__as_class = as_class
self.__data = []
self.__id = None
self.__connection_id = None
self.__retrieved = 0
self.__killed = False
示例9: sort
def sort(self, key_or_list, direction=None):
"""Sorts this cursor's results.
Takes either a single key and a direction, or a list of (key,
direction) pairs. The key(s) must be an instance of ``(str,
unicode)``, and the direction(s) must be one of
(:data:`~pymongo.ASCENDING`,
:data:`~pymongo.DESCENDING`). Raises
:class:`~pymongo.errors.InvalidOperation` if this cursor has
already been used. Only the last :meth:`sort` applied to this
cursor has any effect.
:Parameters:
- `key_or_list`: a single key or a list of (key, direction)
pairs specifying the keys to sort on
- `direction` (optional): only used if `key_or_list` is a single
key, if not given :data:`~pymongo.ASCENDING` is assumed
"""
self.__check_okay_to_chain()
keys = helpers._index_list(key_or_list, direction)
self.__ordering = helpers._index_document(keys)
return self
示例10: create_index
def create_index(self, key_or_list, cache_for=300, **kwargs):
"""Creates an index on this collection.
Takes either a single key or a list of (key, direction) pairs.
The key(s) must be an instance of :class:`basestring`
(:class:`str` in python 3), and the directions must be one of
(:data:`~pymongo.ASCENDING`, :data:`~pymongo.DESCENDING`,
:data:`~pymongo.GEO2D`). Returns the name of the created index.
All optional index creation paramaters should be passed as
keyword arguments to this method. Valid options include:
check http://docs.mongodb.org/manual/reference/method/db.collection.ensureIndex/#db.collection.ensureIndex
"""
keys = _index_list(key_or_list)
index = {"key": _index_document(keys), "ns": self._collection_name}
name = "name" in kwargs and kwargs["name"] or helpers._gen_index_name(keys)
index["name"] = name
index.update(kwargs)
Client(self._database, 'system.indexes').insert(index, check_keys=False)
self._database._cache_index(self._collection, name, cache_for)
return name
示例11: create_index
def create_index(self, key_or_list, unique=False, ttl=300):
"""Creates an index on this collection.
Takes either a single key or a list of (key, direction) pairs.
The key(s) must be an instance of :class:`basestring`, and the
directions must be one of (:data:`~pymongo.ASCENDING`,
:data:`~pymongo.DESCENDING`). Returns the name of the created
index.
:Parameters:
- `key_or_list`: a single key or a list of (key, direction)
pairs specifying the index to create
- `unique` (optional): should this index guarantee
uniqueness?
- `ttl` (optional): time window (in seconds) during which
this index will be recognized by subsequent calls to
:meth:`ensure_index` - see documentation for
:meth:`ensure_index` for details
"""
if not isinstance(key_or_list, (str, unicode, list)):
raise TypeError("key_or_list must either be a single key "
"or a list of (key, direction) pairs")
to_save = SON()
keys = helpers._index_list(key_or_list)
name = self._gen_index_name(keys)
to_save["name"] = name
to_save["ns"] = self.__full_name
to_save["key"] = helpers._index_document(keys)
to_save["unique"] = unique
self.__database.connection._cache_index(self.__database.name,
self.__name, name, ttl)
self.__database.system.indexes.insert(to_save, manipulate=False,
check_keys=False)
return to_save["name"]
示例12: __init__
def __init__(self, collection, spec=None, fields=None, skip=0, limit=0,
timeout=True, snapshot=False, tailable=False, sort=None,
max_scan=None, as_class=None, slave_okay=False,
await_data=False, partial=False, manipulate=True,
read_preference=ReadPreference.PRIMARY, tag_sets=[{}],
secondary_acceptable_latency_ms=None,
_must_use_master=False, _uuid_subtype=None, **kwargs):
"""Create a new cursor.
Should not be called directly by application developers - see
:meth:`~pymongo.collection.Collection.find` instead.
.. mongodoc:: cursors
"""
self.__id = None
if spec is None:
spec = {}
if not isinstance(spec, dict):
raise TypeError("spec must be an instance of dict")
if not isinstance(skip, int):
raise TypeError("skip must be an instance of int")
if not isinstance(limit, int):
raise TypeError("limit must be an instance of int")
if not isinstance(timeout, bool):
raise TypeError("timeout must be an instance of bool")
if not isinstance(snapshot, bool):
raise TypeError("snapshot must be an instance of bool")
if not isinstance(tailable, bool):
raise TypeError("tailable must be an instance of bool")
if not isinstance(slave_okay, bool):
raise TypeError("slave_okay must be an instance of bool")
if not isinstance(await_data, bool):
raise TypeError("await_data must be an instance of bool")
if not isinstance(partial, bool):
raise TypeError("partial must be an instance of bool")
if fields is not None:
if not fields:
fields = {"_id": 1}
if not isinstance(fields, dict):
fields = helpers._fields_list_to_dict(fields)
if as_class is None:
as_class = collection.database.connection.document_class
self.__collection = collection
self.__spec = spec
self.__fields = fields
self.__skip = skip
self.__limit = limit
self.__batch_size = 0
# This is ugly. People want to be able to do cursor[5:5] and
# get an empty result set (old behavior was an
# exception). It's hard to do that right, though, because the
# server uses limit(0) to mean 'no limit'. So we set __empty
# in that case and check for it when iterating. We also unset
# it anytime we change __limit.
self.__empty = False
self.__timeout = timeout
self.__tailable = tailable
self.__await_data = tailable and await_data
self.__partial = partial
self.__snapshot = snapshot
self.__ordering = sort and helpers._index_document(sort) or None
self.__max_scan = max_scan
self.__explain = False
self.__hint = None
self.__as_class = as_class
self.__slave_okay = slave_okay
self.__manipulate = manipulate
self.__read_preference = read_preference
self.__tag_sets = tag_sets
self.__secondary_acceptable_latency_ms = secondary_acceptable_latency_ms
self.__tz_aware = collection.database.connection.tz_aware
self.__must_use_master = _must_use_master
self.__uuid_subtype = _uuid_subtype or collection.uuid_subtype
self.__query_flags = 0
self.__data = deque()
self.__connection_id = None
self.__retrieved = 0
self.__killed = False
# this is for passing network_timeout through if it's specified
# need to use kwargs as None is a legit value for network_timeout
self.__kwargs = kwargs
示例13: create_index
def create_index(self, key_or_list, deprecated_unique=None,
ttl=300, **kwargs):
"""Creates an index on this collection.
Takes either a single key or a list of (key, direction) pairs.
The key(s) must be an instance of :class:`basestring`, and the
directions must be one of (:data:`~pymongo.ASCENDING`,
:data:`~pymongo.DESCENDING`, :data:`~pymongo.GEO2D`). Returns
the name of the created index.
To create a single key index on the key ``'mike'`` we just use
a string argument:
>>> my_collection.create_index("mike")
For a compound index on ``'mike'`` descending and ``'eliot'``
ascending we need to use a list of tuples:
>>> my_collection.create_index([("mike", pymongo.DESCENDING),
... ("eliot", pymongo.ASCENDING)])
All optional index creation paramaters should be passed as
keyword arguments to this method. Valid options include:
- `name`: custom name to use for this index - if none is
given, a name will be generated
- `unique`: should this index guarantee uniqueness?
- `dropDups` or `drop_dups`: should we drop duplicates
during index creation when creating a unique index?
- `min`: minimum value for keys in a :data:`~pymongo.GEO2D`
index
- `max`: maximum value for keys in a :data:`~pymongo.GEO2D`
index
:Parameters:
- `key_or_list`: a single key or a list of (key, direction)
pairs specifying the index to create
- `deprecated_unique`: DEPRECATED - use `unique` as a kwarg
- `ttl` (optional): time window (in seconds) during which
this index will be recognized by subsequent calls to
:meth:`ensure_index` - see documentation for
:meth:`ensure_index` for details
- `**kwargs` (optional): any additional index creation
options (see the above list) should be passed as keyword
arguments
.. versionchanged:: 1.5.1
Accept kwargs to support all index creation options.
.. versionadded:: 1.5
The `name` parameter.
.. seealso:: :meth:`ensure_index`
.. mongodoc:: indexes
"""
keys = helpers._index_list(key_or_list)
index_doc = helpers._index_document(keys)
index = {"key": index_doc, "ns": self.__full_name}
if deprecated_unique is not None:
warnings.warn("using a positional arg to specify unique is "
"deprecated, please use kwargs",
DeprecationWarning)
index["unique"] = deprecated_unique
name = "name" in kwargs and kwargs["name"] or _gen_index_name(keys)
index["name"] = name
if "drop_dups" in kwargs:
kwargs["dropDups"] = kwargs.pop("drop_dups")
index.update(kwargs)
self.__database.connection._cache_index(self.__database.name,
self.__name, name, ttl)
self.__database.system.indexes.insert(index, manipulate=False,
check_keys=False)
return name
示例14: __init__
def __init__(self, collection, filter=None, projection=None, skip=0,
limit=0, no_cursor_timeout=False,
cursor_type=CursorType.NON_TAILABLE,
sort=None, allow_partial_results=False, oplog_replay=False,
modifiers=None, batch_size=0, manipulate=True,
collation=None, hint=None, max_scan=None, max_time_ms=None,
max=None, min=None, return_key=False, show_record_id=False,
snapshot=False, comment=None, session=None):
"""Create a new cursor.
Should not be called directly by application developers - see
:meth:`~pymongo.collection.Collection.find` instead.
.. mongodoc:: cursors
"""
# Initialize all attributes used in __del__ before possibly raising
# an error to avoid attribute errors during garbage collection.
self.__id = None
self.__exhaust = False
self.__exhaust_mgr = None
self.__killed = False
if session:
self.__session = session
self.__explicit_session = True
else:
self.__session = None
self.__explicit_session = False
spec = filter
if spec is None:
spec = {}
validate_is_mapping("filter", spec)
if not isinstance(skip, int):
raise TypeError("skip must be an instance of int")
if not isinstance(limit, int):
raise TypeError("limit must be an instance of int")
validate_boolean("no_cursor_timeout", no_cursor_timeout)
if cursor_type not in (CursorType.NON_TAILABLE, CursorType.TAILABLE,
CursorType.TAILABLE_AWAIT, CursorType.EXHAUST):
raise ValueError("not a valid value for cursor_type")
validate_boolean("allow_partial_results", allow_partial_results)
validate_boolean("oplog_replay", oplog_replay)
if modifiers is not None:
warnings.warn("the 'modifiers' parameter is deprecated",
DeprecationWarning, stacklevel=2)
validate_is_mapping("modifiers", modifiers)
if not isinstance(batch_size, integer_types):
raise TypeError("batch_size must be an integer")
if batch_size < 0:
raise ValueError("batch_size must be >= 0")
if projection is not None:
if not projection:
projection = {"_id": 1}
projection = helpers._fields_list_to_dict(projection, "projection")
self.__collection = collection
self.__spec = spec
self.__projection = projection
self.__skip = skip
self.__limit = limit
self.__batch_size = batch_size
self.__modifiers = modifiers and modifiers.copy() or {}
self.__ordering = sort and helpers._index_document(sort) or None
self.__max_scan = max_scan
self.__explain = False
self.__comment = comment
self.__max_time_ms = max_time_ms
self.__max_await_time_ms = None
self.__max = max
self.__min = min
self.__manipulate = manipulate
self.__collation = validate_collation_or_none(collation)
self.__return_key = return_key
self.__show_record_id = show_record_id
self.__snapshot = snapshot
self.__set_hint(hint)
# Exhaust cursor support
if cursor_type == CursorType.EXHAUST:
if self.__collection.database.client.is_mongos:
raise InvalidOperation('Exhaust cursors are '
'not supported by mongos')
if limit:
raise InvalidOperation("Can't use limit and exhaust together.")
self.__exhaust = True
# This is ugly. People want to be able to do cursor[5:5] and
# get an empty result set (old behavior was an
# exception). It's hard to do that right, though, because the
# server uses limit(0) to mean 'no limit'. So we set __empty
# in that case and check for it when iterating. We also unset
# it anytime we change __limit.
self.__empty = False
self.__data = deque()
self.__address = None
self.__retrieved = 0
#.........这里部分代码省略.........
示例15: __init__
#.........这里部分代码省略.........
if not isinstance(spec, dict):
raise TypeError("spec must be an instance of dict")
if not isinstance(skip, int):
raise TypeError("skip must be an instance of int")
if not isinstance(limit, int):
raise TypeError("limit must be an instance of int")
if not isinstance(timeout, bool):
raise TypeError("timeout must be an instance of bool")
if not isinstance(snapshot, bool):
raise TypeError("snapshot must be an instance of bool")
if not isinstance(tailable, bool):
raise TypeError("tailable must be an instance of bool")
if not isinstance(slave_okay, bool):
raise TypeError("slave_okay must be an instance of bool")
if not isinstance(await_data, bool):
raise TypeError("await_data must be an instance of bool")
if not isinstance(partial, bool):
raise TypeError("partial must be an instance of bool")
if not isinstance(exhaust, bool):
raise TypeError("exhaust must be an instance of bool")
if not isinstance(oplog_replay, bool):
raise TypeError("oplog_replay must be an instance of bool")
if fields is not None:
if not fields:
fields = {"_id": 1}
if not isinstance(fields, dict):
fields = helpers._fields_list_to_dict(fields)
self.__collection = collection
self.__spec = spec
self.__fields = fields
self.__skip = skip
self.__limit = limit
self.__max_time_ms = None
self.__batch_size = 0
self.__max = None
self.__min = None
self.__modifiers = modifiers and modifiers.copy() or {}
# Exhaust cursor support
if self.__collection.database.connection.is_mongos and exhaust:
raise InvalidOperation('Exhaust cursors are '
'not supported by mongos')
if limit and exhaust:
raise InvalidOperation("Can't use limit and exhaust together.")
self.__exhaust = exhaust
self.__exhaust_mgr = None
# This is ugly. People want to be able to do cursor[5:5] and
# get an empty result set (old behavior was an
# exception). It's hard to do that right, though, because the
# server uses limit(0) to mean 'no limit'. So we set __empty
# in that case and check for it when iterating. We also unset
# it anytime we change __limit.
self.__empty = False
self.__snapshot = snapshot
self.__ordering = sort and helpers._index_document(sort) or None
self.__max_scan = max_scan
self.__explain = False
self.__hint = None
self.__comment = None
self.__slave_okay = slave_okay
self.__manipulate = manipulate
self.__read_preference = read_preference
self.__tag_sets = tag_sets
self.__secondary_acceptable_latency_ms = secondary_acceptable_latency_ms
self.__compile_re = compile_re
self.__must_use_master = _must_use_master
copts = _codec_options or collection.codec_options
if as_class is not None:
copts = _CodecOptions(
as_class, copts.tz_aware, copts.uuid_representation)
self.__codec_options = copts
self.__data = deque()
self.__connection_id = None
self.__retrieved = 0
self.__killed = False
self.__query_flags = 0
if tailable:
self.__query_flags |= _QUERY_OPTIONS["tailable_cursor"]
if not timeout:
self.__query_flags |= _QUERY_OPTIONS["no_timeout"]
if tailable and await_data:
self.__query_flags |= _QUERY_OPTIONS["await_data"]
if exhaust:
self.__query_flags |= _QUERY_OPTIONS["exhaust"]
if partial:
self.__query_flags |= _QUERY_OPTIONS["partial"]
if oplog_replay:
self.__query_flags |= _QUERY_OPTIONS["oplog_replay"]
# this is for passing network_timeout through if it's specified
# need to use kwargs as None is a legit value for network_timeout
self.__kwargs = kwargs