本文整理汇总了Python中lib.db.DB.update_run_stats方法的典型用法代码示例。如果您正苦于以下问题:Python DB.update_run_stats方法的具体用法?Python DB.update_run_stats怎么用?Python DB.update_run_stats使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类lib.db.DB
的用法示例。
在下文中一共展示了DB.update_run_stats方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: wiz_execute
# 需要导入模块: from lib.db import DB [as 别名]
# 或者: from lib.db.DB import update_run_stats [as 别名]
def wiz_execute(wiz):
db = DB()
config = Config.get_config()
backups = config.backups.keys()
backups.sort()
for name in backups:
if wiz.fields[name].value:
# Get the backup object and store
backup = config.backups[name]
store = config.storage[backup.store].copy()
# For each run on the store
with ProgressDialog(wiz, _("Connecting"), _("Connecting to the store.\nPlease wait...")):
store.connect()
prog_dlg = ProgressDialog(wiz, _("Loading"), _("Retrieving data from store.\nPlease wait..."))
prog_dlg.Show()
try:
try:
runs = store.list(backup.name)
except:
# If it fails, there were no backup runs
runs = []
runs.sort()
for run in runs:
(date, type) = run.split(" ")
date = datetime.strptime(date, const.DateTimeFormat)
db.start_run(backup.name, store.name, type, date)
db.save_message(_("Database rebuild started"))
try:
store_size, _file_sizes, nfiles, nfolders = recover_run(config, db, backup, store, run)
db.save_message(_("Database rebuild complete"))
db.update_run_stats(store_size, nfiles, nfolders, backup.include_packages, "")
db.update_run_status(const.StatusSuccess)
except Exception as e:
msg = _("Database rebuild failed. {error}").format(error=str(e))
db.save_message(msg)
db.update_run_status(const.StatusFailed)
dlg.Warn(wiz, msg, _("Error"))
return
finally:
prog_dlg.Destroy()
store.disconnect()
wiz.parent.force_rebuild()
# Now tell app about change.
app.broadcast_update()
dlg.Info(wiz, _("Your backup files database has been rebuilt.\nYou can now view your file and backup history."), _("Rebuild"))
示例2: Run
# 需要导入模块: from lib.db import DB [as 别名]
# 或者: from lib.db.DB import update_run_stats [as 别名]
#.........这里部分代码省略.........
if self.check_backup(local_path, local_file, db_files):
self.do_backup_file(folder, local_file)
except StoreFullException as e:
log.error(str(e))
raise e
except Exception as e:
log.warn("Skipping file %s: %s" % (local_file, str(e)))
# Convert to unicode for checks below...
local_folders = self.list_to_unicode(local_folders)
local_files = self.list_to_unicode(local_files)
# Have backed up all the local files. Now look for DB files
# that exist, but are not local (i.e. they have been deleted)
# Make sure we are only looking for 'F' and 'D' (ignore 'X')
for db_file in db_files.itervalues():
try:
uname = utils.path_to_unicode(db_file.name)
if db_file.type in ['D', 'F'] and not uname in local_files and not uname in local_folders:
self.do_backup_deleted(folder, db_file.name)
except Exception as e:
log.warn("Ignoring exception logging deleted file %s: %s" % (db_file.name, e))
for local_folder in local_folders:
try:
local_path = os.path.join(folder, local_folder)
if self.check_backup(local_path, local_folder, db_files):
self.do_backup_folder(folder, local_folder)
except Exception as e:
log.warn("Ignoring exception backing up folder %s: %s" % (local_path, e))
# # At the completion of a folder - we update the DB storage usage
if not self.dry_run:
self.bytes, self.hash = self.store_thread.get_hash()
self.db.update_run_stats(self.bytes, self.nfiles, self.nfolders, self.backup.include_packages, self.hash)
def lof_record(self, folder, name, type, mod_time=None, size=None):
# Save the entry in the LOF
log.trace("lof_record", folder, name)
if folder != self.lof_folder:
self.lof.write("\n%s\n" % utils.escape(folder))
self.lof_folder = folder
self.lof.write("%s,%s" % (type, utils.escape(name)))
if mod_time:
self.lof.write(',%s,%d' % (mod_time, size))
self.lof.write("\n")
def check_exclusion(self, path):
_, ext = os.path.splitext(path)
ext = ext[1:].lower() # Remove the '.'
# Is this file excluded by type
if ext in self.excl_ext:
return True
# Is this file excluded by filename/folder/glob
ancestors = utils.ancestor_paths(path)
#log.debug("Ancestor Pathlist:", ",".join(ancestors))
for patt in self.backup.exclude_patterns:
for path in ancestors:
if fnmatch.fnmatch(path, patt):
return True
return False