本文整理汇总了Python中pymongo.UpdateOne方法的典型用法代码示例。如果您正苦于以下问题:Python pymongo.UpdateOne方法的具体用法?Python pymongo.UpdateOne怎么用?Python pymongo.UpdateOne使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类pymongo
的用法示例。
在下文中一共展示了pymongo.UpdateOne方法的14个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: migrate_tasks
# 需要导入模块: import pymongo [as 别名]
# 或者: from pymongo import UpdateOne [as 别名]
def migrate_tasks(collection, requests):
query = {
"$or": [
{"time.completed": {"$ne": 0}},
{"time.cancelled": {"$ne": 0}},
{"time.failed": {"$ne": 0}}
]
}
ttl = CONF["cron"]["clean_finished_tasks_after_seconds"]
ttl = datetime.timedelta(seconds=ttl)
for item in collection.find(query, projection=["_id", "time"]):
expired_at = max(item["time"].values())
expired_at = datetime.datetime.utcfromtimestamp(expired_at) + ttl
request = pymongo.UpdateOne(
{"_id": item["_id"]},
{"$set": {task.TTL_FIELDNAME: expired_at}}
)
requests.append(request)
示例2: dump_strings
# 需要导入模块: import pymongo [as 别名]
# 或者: from pymongo import UpdateOne [as 别名]
def dump_strings(self, entries):
operations = []
for entry in entries:
operations.append(pymongo.UpdateOne({'_id': entry.value}, {'$inc': {'count': 1}}, upsert=True))
if len(operations) > 0:
try:
self.strings_collection.bulk_write(operations, ordered=False)
except pymongo.errors.BulkWriteError as bwe:
print(bwe.details)
# filter out "key too large to index" exceptions, which have error code 17280
# we don't care about them
filtered_errors = filter(lambda x: x['code'] != 17280, bwe.details['writeErrors'])
if len(list(filtered_errors)) > 0:
raise
示例3: test_successful_mutiple_queries
# 需要导入模块: import pymongo [as 别名]
# 或者: from pymongo import UpdateOne [as 别名]
def test_successful_mutiple_queries(self):
with tracer.start_active_span("test"):
self.conn.test.records.bulk_write([pymongo.InsertOne({"type": "string"}),
pymongo.UpdateOne({"type": "string"}, {"$set": {"type": "int"}}),
pymongo.DeleteOne({"type": "string"})])
assert_is_none(tracer.active_span)
spans = self.recorder.queued_spans()
self.assertEqual(len(spans), 4)
test_span = spans.pop()
seen_span_ids = set()
commands = []
for span in spans:
self.assertEqual(test_span.t, span.t)
self.assertEqual(span.p, test_span.s)
# check if all spans got a unique id
assert_false(span.s in seen_span_ids)
seen_span_ids.add(span.s)
commands.append(span.data["mongo"]["command"])
# ensure spans are ordered the same way as commands
assert_list_equal(commands, ["insert", "update", "delete"])
示例4: add_task
# 需要导入模块: import pymongo [as 别名]
# 或者: from pymongo import UpdateOne [as 别名]
def add_task(self, base_operation: Union[UpdateOne, InsertOne, RedisOperation, tuple, dict]):
"""添加单个需要执行的操作,程序自动聚合陈批次操作"""
self._to_be_request_queue.put(base_operation)
# self.logger.debug(base_operation)
# @decorators.tomorrow_threads(100)
示例5: test_mongo_bulk_write
# 需要导入模块: import pymongo [as 别名]
# 或者: from pymongo import UpdateOne [as 别名]
def test_mongo_bulk_write(self):
# col = MongoMixin().mongo_16_client.get_database('test').get_collection('ydf_test2')
col = MongoClient('mongodb://myUserAdmin:8mwTdy1klnSYepNo@192.168.199.202:27016/admin').get_database('test').get_collection('ydf_test3')
with decorators.TimerContextManager():
for i in range(5000 + 13):
# time.sleep(0.01)
item = {'_id': i, 'field1': i * 2}
mongo_helper = MongoBulkWriteHelper(col, 100, is_print_log=True)
# mongo_helper.add_task(UpdateOne({'_id': item['_id']}, {'$set': item}, upsert=True))
mongo_helper.add_task(InsertOne({'_id': item['_id']}))
示例6: update_mongo_compound_variants
# 需要导入模块: import pymongo [as 别名]
# 或者: from pymongo import UpdateOne [as 别名]
def update_mongo_compound_variants(self, bulk):
"""Update the compound information for a bulk of variants in the database
Args:
bulk(dict): {'_id': scout.models.Variant}
"""
requests = []
for var_id in bulk:
var_obj = bulk[var_id]
if not var_obj.get("compounds"):
continue
# Add a request to update compounds
operation = pymongo.UpdateOne(
{"_id": var_obj["_id"]}, {"$set": {"compounds": var_obj["compounds"]}}
)
requests.append(operation)
if not requests:
return
try:
self.variant_collection.bulk_write(requests, ordered=False)
except BulkWriteError as err:
LOG.warning("Updating compounds failed")
raise err
示例7: add
# 需要导入模块: import pymongo [as 别名]
# 或者: from pymongo import UpdateOne [as 别名]
def add(self, updatedict: Dict[Hashable, Any], setdict: Dict[Hashable, Any]) -> None:
"""
Add a record to the buffer
Args:
updatedict: the criteria for the update query
setdict: the dictionary describing the new record - OR use {$set: {}} to update a
particular key without replacing the existing record.
"""
self.to_update.append(UpdateOne(updatedict, setdict))
if len(self.to_update) > 1000:
self.flush()
示例8: update
# 需要导入模块: import pymongo [as 别名]
# 或者: from pymongo import UpdateOne [as 别名]
def update(self, statement):
from pymongo import UpdateOne
from pymongo.errors import BulkWriteError
data = statement.serialize()
operations = []
update_operation = UpdateOne(
{'text': statement.text},
{'$set': data},
upsert=True
)
operations.append(update_operation)
# Make sure that an entry for each response is saved
for response_dict in data.get('in_response_to', []):
response_text = response_dict.get('text')
# $setOnInsert does nothing if the document is not created
update_operation = UpdateOne(
{'text': response_text},
{'$set': response_dict},
upsert=True
)
operations.append(update_operation)
try:
self.statements.bulk_write(operations, ordered=False)
except BulkWriteError as bwe:
# Log the details of a bulk write error
self.logger.error(str(bwe.details))
return statement
示例9: create_datamodel_fields
# 需要导入模块: import pymongo [as 别名]
# 或者: from pymongo import UpdateOne [as 别名]
def create_datamodel_fields( logger, coll_model, fields_list, field_class ) :
"""
create datamodel fields from list of field basic dict like DATAMODEL_CORE_FIELDS
"""
timestamp = time.time()
if field_class == "core":
is_visible = False
if field_class == "custom":
is_visible = True
### instantiate db.datamodel with core fields (for internal use)
fields_ = [
{ "field_name" : field["field_name"],
"field_type" : field["field_type"],
"field_open" : field["field_open"],
"field_class" : field_class ,
"added_by" : "admin",
"added_at" : timestamp,
"is_visible" : is_visible
} for field in fields_list
]
logger.warning("... create_datamodel_fields / datamodel - fields_ : ")
# upsert fields as bulk job in mongoDB
# cf : https://stackoverflow.com/questions/5292370/fast-or-bulk-upsert-in-pymongo
operations =[ UpdateOne(
{"field_name" : field["field_name"]},
{'$set': {
k : v for k,v in field.iteritems() if k != "field_name"
}
},
upsert=True # do not upsert otherwise if yo don't want new fields to be created
)
for field in fields_
]
coll_model.bulk_write(operations)
示例10: perform_write
# 需要导入模块: import pymongo [as 别名]
# 或者: from pymongo import UpdateOne [as 别名]
def perform_write(self, responses):
try_time = 0
for each in responses:
if self.config.auto_insert_createDate and self.config.createDate is not None:
each["createDate"] = self.config.createDate
if "_id" not in each:
each["_id"] = self.config.id_hash_func(each)
while try_time < self.config.max_retry:
try:
if UpdateOne is not None:
await self.config.collection_cli.bulk_write([UpdateOne({'_id': each["_id"]}, {"$set": each}, upsert=True) for each in responses])
else:
bulk = self.config.collection_cli.initialize_ordered_bulk_op()
for each in responses:
bulk.find({"_id": each["_id"]}).upsert().replace_one(each)
await bulk.execute()
return True
except Exception as e:
try_time += 1
if try_time < self.config.max_retry:
logging.error("retry: %d, %s" % (try_time, str(e)))
await asyncio.sleep(random.uniform(self.config.random_min_sleep, self.config.random_max_sleep))
else:
logging.error("Give up MongoWriter writer: %s, After retry: %d times, still fail to write, "
"total write %d items, total filtered: %d items, reason: %s" %
(self.config.name, self.config.max_retry, self.success_count, self.total_miss_count,
str(traceback.format_exc())))
return False
示例11: migrate_tokens
# 需要导入模块: import pymongo [as 别名]
# 或者: from pymongo import UpdateOne [as 别名]
def migrate_tokens(collection, requests):
for item in collection.find({}, projection=["_id", "expires_at"]):
request = pymongo.UpdateOne(
{"_id": item["_id"]},
{
"$set": {
"expires_at": datetime.datetime.utcfromtimestamp(
item["expires_at"])
}
}
)
requests.append(request)
示例12: migrate_pwresets
# 需要导入模块: import pymongo [as 别名]
# 或者: from pymongo import UpdateOne [as 别名]
def migrate_pwresets(collection, requests):
for item in collection.find({}, projection=["_id", "expires_at"]):
request = pymongo.UpdateOne(
{"_id": item["_id"]},
{
"$set": {
"expires_at": datetime.datetime.utcfromtimestamp(
item["expires_at"])
}
}
)
requests.append(request)
示例13: update_variant_rank
# 需要导入模块: import pymongo [as 别名]
# 或者: from pymongo import UpdateOne [as 别名]
def update_variant_rank(self, case_obj, variant_type="clinical", category="snv"):
"""Updates the manual rank for all variants in a case
Add a variant rank based on the rank score
Whenever variants are added or removed from a case we need to update the variant rank
Args:
case_obj(Case)
variant_type(str)
"""
# Get all variants sorted by rank score
variants = self.variant_collection.find(
{"case_id": case_obj["_id"], "category": category, "variant_type": variant_type,}
).sort("rank_score", pymongo.DESCENDING)
LOG.info("Updating variant_rank for all variants")
requests = []
for index, var_obj in enumerate(variants):
operation = pymongo.UpdateOne(
{"_id": var_obj["_id"]}, {"$set": {"variant_rank": index + 1}}
)
requests.append(operation)
if len(requests) < 5000:
continue
try:
self.variant_collection.bulk_write(requests, ordered=False)
requests = []
except BulkWriteError as err:
LOG.warning("Updating variant rank failed")
raise err
# Update the final bulk
if len(requests) > 0:
try:
self.variant_collection.bulk_write(requests, ordered=False)
except BulkWriteError as err:
LOG.warning("Updating variant rank failed")
raise err
LOG.info("Updating variant_rank done")
示例14: update_many
# 需要导入模块: import pymongo [as 别名]
# 或者: from pymongo import UpdateOne [as 别名]
def update_many(cls, documents, *fields):
"""
Update multiple documents. Optionally a specific list of fields to
update can be specified.
"""
from mongoframes.queries import to_refs
# Ensure all documents have been converted to frames
frames = cls._ensure_frames(documents)
all_count = len(documents)
assert len([f for f in frames if '_id' in f._document]) == all_count, \
"Can't update documents without `_id`s"
# Send update signal
signal('update').send(cls, frames=frames)
# Prepare the documents to be updated
# Check for selective updates
if len(fields) > 0:
documents = []
for frame in frames:
document = {'_id': frame._id}
for field in fields:
document[field] = cls._path_to_value(
field,
frame._document
)
documents.append(to_refs(document))
else:
documents = [to_refs(f._document) for f in frames]
# Update the documents
requests = []
for document in documents:
_id = document.pop('_id')
requests.append(UpdateOne({'_id': _id}, {'$set': document}))
cls.get_collection().bulk_write(requests)
# Send updated signal
signal('updated').send(cls, frames=frames)