本文整理汇总了Python中inbox.mailsync.gc.DeleteHandler类的典型用法代码示例。如果您正苦于以下问题:Python DeleteHandler类的具体用法?Python DeleteHandler怎么用?Python DeleteHandler使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了DeleteHandler类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_thread_deletion_with_short_ttl
def test_thread_deletion_with_short_ttl(db, default_account, default_namespace,
marked_deleted_message, thread, folder):
handler = DeleteHandler(account_id=default_account.id,
namespace_id=default_namespace.id,
provider_name=default_account.provider,
uid_accessor=lambda m: m.imapuids,
message_ttl=0, thread_ttl=120)
delete_time = marked_deleted_message.deleted_at
handler.check(delete_time + timedelta(seconds=1))
handler.gc_deleted_threads(delete_time + timedelta(seconds=1))
db.session.expire_all()
with pytest.raises(ObjectDeletedError):
marked_deleted_message.id
thread.id
assert thread.deleted_at is not None
handler.check(thread.deleted_at + timedelta(seconds=121))
handler.gc_deleted_threads(thread.deleted_at + timedelta(seconds=121))
db.session.expire_all()
with pytest.raises(ObjectDeletedError):
marked_deleted_message.id
with pytest.raises(ObjectDeletedError):
thread.id
示例2: test_non_orphaned_messages_get_unmarked
def test_non_orphaned_messages_get_unmarked(db, default_account,
default_namespace,
marked_deleted_message, thread,
folder, imapuid):
handler = DeleteHandler(account_id=default_account.id,
namespace_id=default_namespace.id,
uid_accessor=lambda m: m.imapuids,
message_ttl=0)
handler.check(marked_deleted_message.deleted_at + timedelta(seconds=1))
db.session.expire_all()
# message actually has an imapuid associated, so check that the
# DeleteHandler unmarked it.
assert marked_deleted_message.deleted_at is None
示例3: test_deletion_with_short_ttl
def test_deletion_with_short_ttl(db, default_account, default_namespace,
marked_deleted_message, thread, folder):
handler = DeleteHandler(account_id=default_account.id,
namespace_id=default_namespace.id,
uid_accessor=lambda m: m.imapuids,
message_ttl=0)
handler.check(marked_deleted_message.deleted_at + timedelta(seconds=1))
db.session.expire_all()
# Check that objects were actually deleted
with pytest.raises(ObjectDeletedError):
marked_deleted_message.id
with pytest.raises(ObjectDeletedError):
thread.id
示例4: test_deletion_deferred_with_longer_ttl
def test_deletion_deferred_with_longer_ttl(db, default_account,
default_namespace, message, thread,
folder, imapuid):
msg_uid = imapuid.msg_uid
handler = DeleteHandler(account_id=default_account.id,
namespace_id=default_namespace.id,
uid_accessor=lambda m: m.imapuids,
message_ttl=1)
remove_deleted_uids(default_account.id, db.session, [msg_uid], folder.id)
handler.check()
# Would raise ObjectDeletedError if objects were deleted
message.id
thread.id
示例5: test_non_orphaned_messages_get_unmarked
def test_non_orphaned_messages_get_unmarked(db, default_account,
default_namespace, message, thread,
folder, imapuid):
message.deleted_at = datetime.utcnow()
db.session.commit()
handler = DeleteHandler(account_id=default_account.id,
namespace_id=default_namespace.id,
uid_accessor=lambda m: m.imapuids,
message_ttl=0)
handler.check()
# message actually has an imapuid associated, so check that the
# DeleteHandler unmarked it.
assert message.deleted_at is None
示例6: test_deletion_deferred_with_longer_ttl
def test_deletion_deferred_with_longer_ttl(db, default_account,
default_namespace,
marked_deleted_message, thread,
folder):
handler = DeleteHandler(account_id=default_account.id,
namespace_id=default_namespace.id,
uid_accessor=lambda m: m.imapuids,
message_ttl=5)
db.session.commit()
handler.check(marked_deleted_message.deleted_at + timedelta(seconds=1))
# Would raise ObjectDeletedError if objects were deleted
marked_deleted_message.id
thread.id
示例7: test_deletion_with_short_ttl
def test_deletion_with_short_ttl(db, default_account, default_namespace,
message, thread, folder, imapuid):
msg_uid = imapuid.msg_uid
handler = DeleteHandler(account_id=default_account.id,
namespace_id=default_namespace.id,
uid_accessor=lambda m: m.imapuids,
message_ttl=0)
remove_deleted_uids(default_account.id, db.session, [msg_uid], folder.id)
handler.check()
# Check that objects were actually deleted
with pytest.raises(ObjectDeletedError):
message.id
with pytest.raises(ObjectDeletedError):
thread.id
示例8: start_delete_handler
def start_delete_handler(self):
if self.delete_handler is None:
self.delete_handler = DeleteHandler(
account_id=self.account_id,
namespace_id=self.namespace_id,
uid_accessor=lambda m: m.imapuids)
self.delete_handler.start()
示例9: test_threads_only_deleted_when_no_messages_left
def test_threads_only_deleted_when_no_messages_left(db, default_account,
default_namespace, message,
thread, folder, imapuid):
msg_uid = imapuid.msg_uid
handler = DeleteHandler(account_id=default_account.id,
namespace_id=default_namespace.id,
uid_accessor=lambda m: m.imapuids,
message_ttl=0)
# Add another message onto the thread
add_fake_message(db.session, default_namespace.id, thread)
remove_deleted_uids(default_account.id, db.session, [msg_uid], folder.id)
handler.check()
# Check that the orphaned message was deleted.
with pytest.raises(ObjectDeletedError):
message.id
# Would raise ObjectDeletedError if thread was deleted.
thread.id
示例10: test_threads_only_deleted_when_no_messages_left
def test_threads_only_deleted_when_no_messages_left(db, default_account,
default_namespace,
marked_deleted_message,
thread, folder):
handler = DeleteHandler(account_id=default_account.id,
namespace_id=default_namespace.id,
uid_accessor=lambda m: m.imapuids,
message_ttl=0)
# Add another message onto the thread
add_fake_message(db.session, default_namespace.id, thread)
handler.check(marked_deleted_message.deleted_at + timedelta(seconds=1))
db.session.expire_all()
# Check that the orphaned message was deleted.
with pytest.raises(ObjectDeletedError):
marked_deleted_message.id
# Would raise ObjectDeletedError if thread was deleted.
thread.id
示例11: test_deleted_labels_get_gced
def test_deleted_labels_get_gced(db, default_account, thread, message,
imapuid, folder):
# Check that only the labels without messages attached to them
# get deleted.
default_namespace = default_account.namespace
# Create a label w/ no messages attached.
label = Label.find_or_create(db.session, default_account, 'dangling label')
label.deleted_at = datetime.utcnow()
label.category.deleted_at = datetime.utcnow()
label_id = label.id
db.session.commit()
# Create a label with attached messages.
msg_uid = imapuid.msg_uid
update_metadata(default_account.id, folder.id, folder.canonical_name,
{msg_uid: GmailFlags((), ('label',), None)}, db.session)
label_ids = []
for cat in message.categories:
for l in cat.labels:
label_ids.append(l.id)
handler = DeleteHandler(account_id=default_account.id,
namespace_id=default_namespace.id,
provider_name=default_account.provider,
uid_accessor=lambda m: m.imapuids,
message_ttl=0)
handler.gc_deleted_categories()
db.session.commit()
# Check that the first label got gc'ed
marked_deleted = db.session.query(Label).get(label_id)
assert marked_deleted is None
# Check that the other labels didn't.
for label_id in label_ids:
assert db.session.query(Label).get(label_id) is not None
示例12: test_deletion_creates_revision
def test_deletion_creates_revision(db, default_account, default_namespace,
marked_deleted_message, thread, folder):
message_id = marked_deleted_message.id
thread_id = thread.id
handler = DeleteHandler(account_id=default_account.id,
namespace_id=default_namespace.id,
uid_accessor=lambda m: m.imapuids,
message_ttl=0)
handler.check(marked_deleted_message.deleted_at + timedelta(seconds=1))
db.session.commit()
latest_message_transaction = db.session.query(Transaction). \
filter(Transaction.record_id == message_id,
Transaction.object_type == 'message',
Transaction.namespace_id == default_namespace.id). \
order_by(desc(Transaction.id)).first()
assert latest_message_transaction.command == 'delete'
latest_thread_transaction = db.session.query(Transaction). \
filter(Transaction.record_id == thread_id,
Transaction.object_type == 'thread',
Transaction.namespace_id == default_namespace.id). \
order_by(desc(Transaction.id)).first()
assert latest_thread_transaction.command == 'delete'
示例13: ImapSyncMonitor
#.........这里部分代码省略.........
refresh_flags_max: Integer
the maximum number of UIDs for which we'll check flags
periodically.
"""
def __init__(self, account,
heartbeat=1, refresh_frequency=30, poll_frequency=30,
retry_fail_classes=[], refresh_flags_max=2000):
self.refresh_frequency = refresh_frequency
self.poll_frequency = poll_frequency
self.syncmanager_lock = BoundedSemaphore(1)
self.refresh_flags_max = refresh_flags_max
provider_supports_condstore = account.provider_info.get('condstore',
False)
account_supports_condstore = getattr(account, 'supports_condstore',
False)
if provider_supports_condstore or account_supports_condstore:
self.sync_engine_class = CondstoreFolderSyncEngine
else:
self.sync_engine_class = FolderSyncEngine
self.folder_monitors = Group()
BaseMailSyncMonitor.__init__(self, account, heartbeat,
retry_fail_classes)
@retry_crispin
def prepare_sync(self):
"""Ensures that canonical tags are created for the account, and gets
and save Folder objects for folders on the IMAP backend. Returns a list
of tuples (folder_name, folder_id) for each folder we want to sync (in
order)."""
with mailsync_session_scope() as db_session:
with _pool(self.account_id).get() as crispin_client:
sync_folders = crispin_client.sync_folders()
save_folder_names(log, self.account_id,
crispin_client.folder_names(), db_session)
sync_folder_names_ids = []
for folder_name in sync_folders:
try:
id_, = db_session.query(Folder.id). \
filter(Folder.name == folder_name,
Folder.account_id == self.account_id).one()
sync_folder_names_ids.append((folder_name, id_))
except NoResultFound:
log.error("Missing Folder object when starting sync",
folder_name=folder_name)
raise MailsyncError("Missing Folder '{}' on account {}"
.format(folder_name, self.account_id))
return sync_folder_names_ids
def start_new_folder_sync_engines(self, folders=set()):
new_folders = [f for f in self.prepare_sync() if f not in folders]
for folder_name, folder_id in new_folders:
log.info('Folder sync engine started',
account_id=self.account_id,
folder_id=folder_id,
folder_name=folder_name)
thread = self.sync_engine_class(self.account_id,
folder_name,
folder_id,
self.email_address,
self.provider_name,
self.poll_frequency,
self.syncmanager_lock,
self.refresh_flags_max,
self.retry_fail_classes)
self.folder_monitors.start(thread)
while not thread_polling(thread) and \
not thread_finished(thread) and \
not thread.ready():
sleep(self.heartbeat)
# allow individual folder sync monitors to shut themselves down
# after completing the initial sync
if thread_finished(thread) or thread.ready():
log.info('Folder sync engine finished/killed',
account_id=self.account_id,
folder_id=folder_id,
folder_name=folder_name)
# note: thread is automatically removed from
# self.folder_monitors
else:
folders.add((folder_name, folder_id))
def start_delete_handler(self):
self.delete_handler = DeleteHandler(account_id=self.account_id,
namespace_id=self.namespace_id,
uid_accessor=lambda m: m.imapuids)
self.delete_handler.start()
def sync(self):
self.start_delete_handler()
folders = set()
self.start_new_folder_sync_engines(folders)
while True:
sleep(self.refresh_frequency)
self.start_new_folder_sync_engines(folders)
示例14: ImapSyncMonitor
#.........这里部分代码省略.........
* Delete Folders that no longer exist on the remote.
Notes
-----
Generic IMAP uses folders (not labels).
Canonical folders ('inbox') and other folders are created as Folder
objects only accordingly.
We don't canonicalize folder names to lowercase when saving because
different backends may be case-sensitive or otherwise - code that
references saved folder names should canonicalize if needed when doing
comparisons.
"""
account = db_session.query(Account).get(self.account_id)
remote_folder_names = {f.display_name.rstrip()[:MAX_FOLDER_NAME_LENGTH]
for f in raw_folders}
assert 'inbox' in {f.role for f in raw_folders},\
'Account {} has no detected inbox folder'.\
format(account.email_address)
local_folders = {f.name: f for f in db_session.query(Folder).filter(
Folder.account_id == self.account_id)}
# Delete folders no longer present on the remote.
# Note that the folder with canonical_name='inbox' cannot be deleted;
# remote_folder_names will always contain an entry corresponding to it.
discard = set(local_folders) - remote_folder_names
for name in discard:
log.info('Folder deleted from remote', account_id=self.account_id,
name=name)
cat = db_session.query(Category).get(
local_folders[name].category_id)
if cat is not None:
db_session.delete(cat)
del local_folders[name]
# Create new folders
for raw_folder in raw_folders:
Folder.find_or_create(db_session, account, raw_folder.display_name,
raw_folder.role)
# Set the should_run bit for existing folders to True (it's True by
# default for new ones.)
for f in local_folders.values():
if f.imapsyncstatus:
f.imapsyncstatus.sync_should_run = True
db_session.commit()
def start_new_folder_sync_engines(self):
running_monitors = {monitor.folder_name: monitor for monitor in
self.folder_monitors}
for folder_name in self.prepare_sync():
if folder_name in running_monitors:
thread = running_monitors[folder_name]
else:
log.info('Folder sync engine started',
account_id=self.account_id,
folder_name=folder_name)
thread = self.sync_engine_class(self.account_id,
self.namespace_id,
folder_name,
self.email_address,
self.provider_name,
self.syncmanager_lock)
self.folder_monitors.start(thread)
while not thread_polling(thread) and not thread.ready():
sleep(self.heartbeat)
if thread.ready():
log.info('Folder sync engine exited',
account_id=self.account_id,
folder_name=folder_name,
error=thread.exception)
def start_delete_handler(self):
if self.delete_handler is None:
self.delete_handler = DeleteHandler(
account_id=self.account_id,
namespace_id=self.namespace_id,
provider_name=self.provider_name,
uid_accessor=lambda m: m.imapuids)
self.delete_handler.start()
def sync(self):
try:
self.start_delete_handler()
self.start_new_folder_sync_engines()
while True:
sleep(self.refresh_frequency)
self.start_new_folder_sync_engines()
except ValidationError as exc:
log.error(
'Error authenticating; stopping sync', exc_info=True,
account_id=self.account_id, logstash_tag='mark_invalid')
with session_scope(self.namespace_id) as db_session:
account = db_session.query(Account).get(self.account_id)
account.mark_invalid()
account.update_sync_error(str(exc))
示例15: ImapSyncMonitor
#.........这里部分代码省略.........
for f in raw_folders}
assert 'inbox' in {f.role for f in raw_folders},\
'Account {} has no detected inbox folder'.\
format(account.email_address)
local_folders = {f.name: f for f in db_session.query(Folder).filter(
Folder.account_id == self.account_id)}
# Delete folders no longer present on the remote.
# Note that the folder with canonical_name='inbox' cannot be deleted;
# remote_folder_names will always contain an entry corresponding to it.
discard = set(local_folders) - remote_folder_names
for name in discard:
log.info('Folder deleted from remote', account_id=self.account_id,
name=name)
db_session.delete(local_folders[name])
del local_folders[name]
# Create new folders
for raw_folder in raw_folders:
Folder.find_or_create(db_session, account, raw_folder.display_name,
raw_folder.role)
# Set the should_run bit for existing folders to True (it's True by
# default for new ones.)
for f in local_folders.values():
if f.imapsyncstatus:
f.imapsyncstatus.sync_should_run = True
db_session.commit()
def start_new_folder_sync_engines(self, folders=set()):
new_folders = [f for f in self.prepare_sync() if f not in folders]
for folder_name, folder_id in new_folders:
log.info('Folder sync engine started',
account_id=self.account_id,
folder_id=folder_id,
folder_name=folder_name)
thread = self.sync_engine_class(self.account_id,
folder_name,
folder_id,
self.email_address,
self.provider_name,
self.poll_frequency,
self.syncmanager_lock,
self.refresh_flags_max,
self.retry_fail_classes)
self.folder_monitors.start(thread)
while not thread_polling(thread) and \
not thread_finished(thread) and \
not thread.ready():
sleep(self.heartbeat)
# allow individual folder sync monitors to shut themselves down
# after completing the initial sync
if thread_finished(thread) or thread.ready():
if thread.exception:
# Exceptions causing the folder sync to exit should not
# clear the heartbeat.
log.info('Folder sync engine exited with error',
account_id=self.account_id,
folder_id=folder_id,
folder_name=folder_name,
error=thread.exception)
else:
log.info('Folder sync engine finished',
account_id=self.account_id,
folder_id=folder_id,
folder_name=folder_name)
# clear the heartbeat for this folder-thread since it
# exited cleanly.
clear_heartbeat_status(self.account_id, folder_id)
# note: thread is automatically removed from
# self.folder_monitors
else:
folders.add((folder_name, folder_id))
def start_delete_handler(self):
self.delete_handler = DeleteHandler(account_id=self.account_id,
namespace_id=self.namespace_id,
uid_accessor=lambda m: m.imapuids)
self.delete_handler.start()
def sync(self):
try:
self.start_delete_handler()
folders = set()
self.start_new_folder_sync_engines(folders)
while True:
sleep(self.refresh_frequency)
self.start_new_folder_sync_engines(folders)
except ValidationError as exc:
log.error(
'Error authenticating; stopping sync', exc_info=True,
account_id=self.account_id, logstash_tag='mark_invalid')
with mailsync_session_scope() as db_session:
account = db_session.query(Account).get(self.account_id)
account.mark_invalid()
account.update_sync_error(str(exc))