本文整理汇总了Python中SiteStorage.SiteStorage.checkFiles方法的典型用法代码示例。如果您正苦于以下问题:Python SiteStorage.checkFiles方法的具体用法?Python SiteStorage.checkFiles怎么用?Python SiteStorage.checkFiles使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类SiteStorage.SiteStorage
的用法示例。
在下文中一共展示了SiteStorage.checkFiles方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: __init__
# 需要导入模块: from SiteStorage import SiteStorage [as 别名]
# 或者: from SiteStorage.SiteStorage import checkFiles [as 别名]
#.........这里部分代码省略.........
peers_try = [] # Try these peers
queried = [] # Successfully queried from these peers
peers = self.peers.values()
random.shuffle(peers)
for peer in peers: # Try to find connected good peers, but we must have at least 5 peers
if peer.findConnection() and peer.connection.handshake.get("rev", 0) > 125: # Add to the beginning if rev125
peers_try.insert(0, peer)
elif len(peers_try) < 5: # Backup peers, add to end of the try list
peers_try.append(peer)
if since is None: # No since definied, download from last modification time-1day
since = self.settings.get("modified", 60 * 60 * 24) - 60 * 60 * 24
self.log.debug("Try to get listModifications from peers: %s since: %s" % (peers_try, since))
updaters = []
for i in range(3):
updaters.append(gevent.spawn(self.updater, peers_try, queried, since))
gevent.joinall(updaters, timeout=10) # Wait 10 sec to workers done query modifications
time.sleep(0.1)
self.log.debug("Queried listModifications from: %s" % queried)
return queried
# Update content.json from peers and download changed files
# Return: None
@util.Noparallel()
def update(self, announce=False):
self.content_manager.loadContent("content.json") # Reload content.json
self.content_updated = None # Reset content updated time
self.updateWebsocket(updating=True)
if announce:
self.announce()
queried = self.checkModifications()
if not queried: # Not found any client that supports listModifications
self.log.debug("Fallback to old-style update")
self.redownloadContents()
if not self.settings["own"]:
self.storage.checkFiles(quick_check=True) # Quick check files based on file size
changed, deleted = self.content_manager.loadContent("content.json")
if self.bad_files:
self.download()
self.settings["size"] = self.content_manager.getTotalSize() # Update site size
self.updateWebsocket(updated=True)
# Update site by redownload all content.json
def redownloadContents(self):
# Download all content.json again
content_threads = []
for inner_path in self.content_manager.contents.keys():
content_threads.append(self.needFile(inner_path, update=True, blocking=False))
self.log.debug("Waiting %s content.json to finish..." % len(content_threads))
gevent.joinall(content_threads)
# Publish worker
def publisher(self, inner_path, peers, published, limit, event_done=None):
file_size = self.storage.getSize(inner_path)
body = self.storage.read(inner_path)
while 1:
if not peers or len(published) >= limit:
if event_done:
event_done.set(True)
break # All peers done, or published engouht
peer = peers.pop(0)
if peer.connection and peer.connection.last_ping_delay: # Peer connected
# Timeout: 5sec + size in kb + last_ping
timeout = timeout = 5 + int(file_size / 1024) + peer.connection.last_ping_delay
else: # Peer not connected
# Timeout: 5sec + size in kb
timeout = timeout = 5 + int(file_size / 1024)
result = {"exception": "Timeout"}
for retry in range(2):
try:
with gevent.Timeout(timeout, False):
result = peer.request("update", {
"site": self.address,
"inner_path": inner_path,
"body": body,
"peer": (config.ip_external, config.fileserver_port)
})
if result:
break
except Exception, err:
result = {"exception": Debug.formatException(err)}
if result and "ok" in result:
published.append(peer)
self.log.info("[OK] %s: %s" % (peer.key, result["ok"]))
else:
if result == {"exception": "Timeout"}:
peer.onConnectionError()
self.log.info("[FAILED] %s: %s" % (peer.key, result))
示例2: __init__
# 需要导入模块: from SiteStorage import SiteStorage [as 别名]
# 或者: from SiteStorage.SiteStorage import checkFiles [as 别名]
#.........这里部分代码省略.........
if res != True: # Need downloading
file_threads.append(res) # Append evt
# Wait for includes download
include_threads = []
for file_relative_path in self.content_manager.contents[inner_path].get("includes", {}).keys():
file_inner_path = content_inner_dir+file_relative_path
include_thread = gevent.spawn(self.downloadContent, file_inner_path, download_files=download_files, peer=peer)
include_threads.append(include_thread)
self.log.debug("%s: Downloading %s includes..." % (inner_path, len(include_threads)))
gevent.joinall(include_threads)
self.log.debug("%s: Includes downloaded" % inner_path)
self.log.debug("%s: Downloading %s files, changed: %s..." % (inner_path, len(file_threads), len(changed)))
gevent.joinall(file_threads)
self.log.debug("%s: All file downloaded in %.2fs" % (inner_path, time.time()-s))
return True
# Return bad files with less than 3 retry
def getReachableBadFiles(self):
if not self.bad_files: return False
return [bad_file for bad_file, retry in self.bad_files.iteritems() if retry < 3]
# Retry download bad files
def retryBadFiles(self):
for bad_file in self.bad_files.keys():
self.needFile(bad_file, update=True, blocking=False)
# Download all files of the site
@util.Noparallel(blocking=False)
def download(self, check_size=False):
self.log.debug("Start downloading...%s" % self.bad_files)
self.announce()
if check_size: # Check the size first
valid = downloadContent(download_files=False)
if not valid: return False # Cant download content.jsons or size is not fits
found = self.downloadContent("content.json")
return found
# Update content.json from peers and download changed files
@util.Noparallel()
def update(self):
self.content_manager.loadContent("content.json") # Reload content.json
self.content_updated = None
# Download all content.json again
content_threads = []
for inner_path in self.content_manager.contents.keys():
content_threads.append(self.needFile(inner_path, update=True, blocking=False))
self.log.debug("Waiting %s content.json to finish..." % len(content_threads))
gevent.joinall(content_threads)
changed = self.content_manager.loadContent("content.json")
if changed:
for changed_file in changed:
self.bad_files[changed_file] = self.bad_files.get(changed_file, 0)+1
if not self.settings["own"]: self.storage.checkFiles(quick_check=True) # Quick check files based on file size
if self.bad_files:
self.download()
self.settings["size"] = self.content_manager.getTotalSize() # Update site size
return changed
# Publish worker
def publisher(self, inner_path, peers, published, limit, event_done=None):
timeout = 5+int(self.storage.getSize(inner_path)/1024) # Timeout: 5sec + size in kb
while 1:
if not peers or len(published) >= limit:
if event_done: event_done.set(True)
break # All peers done, or published engouht
peer = peers.pop(0)
result = {"exception": "Timeout"}
for retry in range(2):
try:
with gevent.Timeout(timeout, False):
result = peer.request("update", {
"site": self.address,
"inner_path": inner_path,
"body": self.storage.open(inner_path).read(),
"peer": (config.ip_external, config.fileserver_port)
})
if result: break
except Exception, err:
result = {"exception": Debug.formatException(err)}
if result and "ok" in result:
published.append(peer)
self.log.info("[OK] %s: %s" % (peer.key, result["ok"]))
else:
self.log.info("[FAILED] %s: %s" % (peer.key, result))