本文整理汇总了Python中module.utils.save_join函数的典型用法代码示例。如果您正苦于以下问题:Python save_join函数的具体用法?Python save_join怎么用?Python save_join使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了save_join函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: packageFinished
def packageFinished(self, pypack):
download_folder = save_join(self.config['general']['download_folder'], pypack.folder, "")
for link in pypack.getChildren().itervalues():
file_type = splitext(link["name"])[1][1:].lower()
#self.logDebug(link, file_type)
if file_type not in self.formats:
continue
hash_file = fs_encode(save_join(download_folder, link["name"]))
if not isfile(hash_file):
self.logWarning("File not found: %s" % link["name"])
continue
with open(hash_file) as f:
text = f.read()
for m in re.finditer(self.regexps.get(file_type, self.regexps['default']), text):
data = m.groupdict()
self.logDebug(link["name"], data)
local_file = fs_encode(save_join(download_folder, data["name"]))
algorithm = self.methods.get(file_type, file_type)
checksum = computeChecksum(local_file, algorithm)
if checksum == data["hash"]:
self.logInfo('File integrity of "%s" verified by %s checksum (%s).' % (data["name"],
algorithm,
checksum))
else:
self.logWarning("%s checksum for file %s does not match (%s != %s)" % (algorithm,
data["name"],
checksum,
data["hash"]))
示例2: downloads
def downloads():
root = PYLOAD.getConfigValue("general", "download_folder")
if not isdir(root):
return base([_('Download directory not found.')])
data = {
'folder': [],
'files': []
}
items = listdir(fs_encode(root))
for item in sorted([fs_decode(x) for x in items]):
if isdir(save_join(root, item)):
folder = {
'name': item,
'path': item,
'files': []
}
files = listdir(save_join(root, item))
for file in sorted([fs_decode(x) for x in files]):
try:
if isfile(save_join(root, item, file)):
folder['files'].append(file)
except:
pass
data['folder'].append(folder)
elif isfile(join(root, item)):
data['files'].append(item)
return render_to_response('downloads.html', {'files': data}, [pre_processor])
示例3: package_extracted
def package_extracted(self, pypack):
x = False
download_folder = self.pyload.config['general']['download_folder']
extract_destination = self.pyload.config.getPlugin("ExtractArchive", "destination")
extract_subfolder = self.pyload.config.getPlugin("ExtractArchive", "subfolder")
# determine output folder
folder = save_join(download_folder, pypack.folder, extract_destination, "") #: force trailing slash
if extract_subfolder is True:
folder = save_join(folder, pypack.folder)
if self.get_config('delete_extracted') is True:
self.log_debug("MKV-Checkup (package_extracted)")
for root, dirs, files in os.walk(folder):
for name in files:
if name.endswith((".rar", ".r0", ".r12")):
self.log_debug("Hier sind noch Archive")
x = True
break
break
if x == False:
self.log_debug("Hier sind keine Archive")
self.Finished(folder)
else:
self.Finished(folder)
示例4: list
def list(self, password=None):
command = "vb" if self.fullpath else "lb"
p = self.call_cmd(command, "-v", fs_encode(self.filename), password=password)
out, err = p.communicate()
if "Cannot open" in err:
raise ArchiveError(_("Cannot open file"))
if err.strip(): #: only log error at this point
self.manager.logError(err.strip())
result = set()
if not self.fullpath and self.VERSION.startswith('5'):
# NOTE: Unrar 5 always list full path
for f in fs_decode(out).splitlines():
f = save_join(self.out, os.path.basename(f.strip()))
if os.path.isfile(f):
result.add(save_join(self.out, os.path.basename(f)))
else:
for f in fs_decode(out).splitlines():
f = f.strip()
result.add(save_join(self.out, f))
return list(result)
示例5: process
def process(self, pyfile):
site = self.load(pyfile.url)
avail_videos = re.findall(r"""mediaCollection.addMediaStream\(0, ([0-9]*), "([^\"]*)", "([^\"]*)", "[^\"]*"\);""", site)
avail_videos.sort(key=lambda videodesc: int(videodesc[0]), reverse=True) # The higher the number, the better the quality
quality, url, playpath = avail_videos[0]
pyfile.name = re.search(r"<h1>([^<]*)</h1>", site).group(1)
if url.startswith("http"):
# Best quality is available over HTTP. Very rare.
self.download(url)
else:
pyfile.setStatus("downloading")
download_folder = self.config['general']['download_folder']
location = save_join(download_folder, pyfile.package().folder)
if not os.path.exists(location):
os.makedirs(location, int(self.core.config["permission"]["folder"], 8))
if self.core.config["permission"]["change_dl"] and os.name != "nt":
try:
uid = getpwnam(self.config["permission"]["user"])[2]
gid = getgrnam(self.config["permission"]["group"])[2]
chown(location, uid, gid)
except Exception, e:
self.log.warning(_("Setting User and Group failed: %s") % str(e))
output_file = save_join(location, save_path(pyfile.name)) + os.path.splitext(playpath)[1]
RTMP.download_rtmp_stream(url, playpath=playpath, output_file=output_file)
示例6: downloadFinished
def downloadFinished(self, pyfile):
if self.config['general']['folder_per_package']:
download_folder = save_join(self.config['general']['download_folder'], pyfile.package().folder)
else:
download_folder = self.config['general']['download_folder']
for script in self.scripts['download_finished']:
file = save_join(download_folder, pyfile.name)
self.callScript(script, pyfile.id, pyfile.name, file, pyfile.pluginname, pyfile.url)
示例7: packageFinished
def packageFinished(self, pack):
files = {}
fid_dict = {}
for fid, data in pack.getChildren().iteritems():
if re.search("\.\d{3}$", data['name']):
if data['name'][:-4] not in files:
files[data['name'][:-4]] = []
files[data['name'][:-4]].append(data['name'])
files[data['name'][:-4]].sort()
fid_dict[data['name']] = fid
download_folder = self.config['general']['download_folder']
if self.config['general']['folder_per_package']:
download_folder = save_join(download_folder, pack.folder)
for name, file_list in files.iteritems():
self.logInfo(_("Starting merging of"), name)
with open(save_join(download_folder, name), "wb") as final_file:
for splitted_file in file_list:
self.logDebug("Merging part", splitted_file)
pyfile = self.core.files.getFile(fid_dict[splitted_file])
pyfile.setStatus("processing")
try:
with open(save_join(download_folder, splitted_file), "rb") as s_file:
size_written = 0
s_file_size = int(os.path.getsize(os.path.join(download_folder, splitted_file)))
while True:
f_buffer = s_file.read(self.BUFFER_SIZE)
if f_buffer:
final_file.write(f_buffer)
size_written += self.BUFFER_SIZE
pyfile.setProgress((size_written * 100) / s_file_size)
else:
break
self.logDebug("Finished merging part", splitted_file)
except Exception, e:
print_exc()
finally:
pyfile.setProgress(100)
pyfile.setStatus("finished")
pyfile.release()
示例8: checkForSameFiles
def checkForSameFiles(self, starting=False):
""" checks if same file was/is downloaded within same package
:param starting: indicates that the current download is going to start
:raises SkipDownload:
"""
pack = self.pyfile.package()
for pyfile in self.core.files.cache.values():
if pyfile != self.pyfile and pyfile.name == self.pyfile.name and pyfile.package().folder == pack.folder:
if pyfile.status in (0, 12): #finished or downloading
raise SkipDownload(pyfile.pluginname)
elif pyfile.status in (
5, 7) and starting: #a download is waiting/starting and was appenrently started before
raise SkipDownload(pyfile.pluginname)
download_folder = self.config['general']['download_folder']
location = save_join(download_folder, pack.folder, self.pyfile.name)
if starting and self.core.config['download']['skip_existing'] and exists(location):
size = os.stat(location).st_size
if size >= self.pyfile.size:
raise SkipDownload("File exists.")
pyfile = self.core.db.findDuplicates(self.pyfile.id, self.pyfile.package().folder, self.pyfile.name)
if pyfile:
if exists(location):
raise SkipDownload(pyfile[0])
self.log.debug("File %s not skipped, because it does not exists." % self.pyfile.name)
示例9: _copyChunks
def _copyChunks(self):
init = fs_encode(self.info.getChunkName(0)) #initial chunk name
if self.info.getCount() > 1:
fo = open(init, "rb+") #first chunkfile
for i in range(1, self.info.getCount()):
#input file
fo.seek(
self.info.getChunkRange(i - 1)[1] + 1) #seek to beginning of chunk, to get rid of overlapping chunks
fname = fs_encode("%s.chunk%d" % (self.filename, i))
fi = open(fname, "rb")
buf = 32 * 1024
while True: #copy in chunks, consumes less memory
data = fi.read(buf)
if not data:
break
fo.write(data)
fi.close()
if fo.tell() < self.info.getChunkRange(i)[1]:
fo.close()
remove(init)
self.info.remove() #there are probably invalid chunks
raise Exception("Downloaded content was smaller than expected. Try to reduce download connections.")
remove(fname) #remove chunk
fo.close()
if self.nameDisposition and self.disposition:
self.filename = save_join(dirname(self.filename), self.nameDisposition)
move(init, fs_encode(self.filename))
self.info.remove() #remove info file
示例10: download
def download(self, url, get={}, post={}, ref=True, cookies=True, disposition=False):
"""Downloads the content at url to download folder
:param url:
:param get:
:param post:
:param ref:
:param cookies:
:param disposition: if True and server provides content-disposition header\
the filename will be changed if needed
:return: The location where the file was saved
"""
self.checkForSameFiles()
self.pyfile.setStatus("downloading")
download_folder = self.config['general']['download_folder']
location = save_join(download_folder, self.pyfile.package().folder)
if not exists(location):
makedirs(location, int(self.core.config["permission"]["folder"], 8))
if self.core.config["permission"]["change_dl"] and os.name != "nt":
try:
uid = getpwnam(self.config["permission"]["user"])[2]
gid = getgrnam(self.config["permission"]["group"])[2]
chown(location, uid, gid)
except Exception, e:
self.log.warning(_("Setting User and Group failed: %s") % str(e))
示例11: downloadFinished
def downloadFinished(self, pyfile):
filename = os.path.splitext(pyfile.name)
extensions = string.split(self.getConf("extensions"), ',')
if filename[1].replace('.','') in extensions:
package = pyfile.package()
folder = save_join(self.config['general']['download_folder'], package.folder)
self.Finished(folder)
示例12: package_extracted
def package_extracted(self, pypack):
if self.config['general']['folder_per_package']:
download_folder = save_join(self.config['general']['download_folder'], pypack.folder)
else:
download_folder = self.config['general']['download_folder']
for script in self.scripts['package_extracted']:
self.callScript(script, pypack.id, pypack.name, download_folder)
示例13: packageDeleted
def packageDeleted(self, pid):
pack = self.core.api.getPackageInfo(pid)
if self.config['general']['folder_per_package']:
download_folder = save_join(self.config['general']['download_folder'], pack.folder)
else:
download_folder = self.config['general']['download_folder']
for script in self.scripts['package_deleted']:
self.callScript(script, pack.id, pack.name, download_folder, pack.password)
示例14: decrypt
def decrypt(self, pyfile):
match = re.match(self.__pattern__, pyfile.url)
m_id = match.group("ID")
m_type = match.group("TYPE")
if m_type == "user":
self.logDebug("Url recognized as Channel")
user = m_id
channel = self.getChannel(user)
if channel:
playlists = self.getPlaylists(channel["id"])
self.logDebug('%s playlist\s found on channel "%s"' % (len(playlists), channel["title"]))
relatedplaylist = {
p_name: self.getPlaylist(p_id) for p_name, p_id in channel["relatedPlaylists"].iteritems()
}
self.logDebug("Channel's related playlists found = %s" % relatedplaylist.keys())
relatedplaylist["uploads"]["title"] = "Unplaylisted videos"
relatedplaylist["uploads"]["checkDups"] = True #: checkDups flag
for p_name, p_data in relatedplaylist.iteritems():
if self.getConfig(p_name):
p_data["title"] += " of " + user
playlists.append(p_data)
else:
playlists = []
else:
self.logDebug("Url recognized as Playlist")
playlists = [self.getPlaylist(m_id)]
if not playlists:
self.fail("No playlist available")
addedvideos = []
urlize = lambda x: "https://www.youtube.com/watch?v=" + x
for p in playlists:
p_name = p["title"]
p_videos = self.getVideosId(p["id"])
p_folder = save_join(self.config["general"]["download_folder"], p["channelTitle"], p_name)
self.logDebug('%s video\s found on playlist "%s"' % (len(p_videos), p_name))
if not p_videos:
continue
elif "checkDups" in p:
p_urls = [urlize(v_id) for v_id in p_videos if v_id not in addedvideos]
self.logDebug('%s video\s available on playlist "%s" after duplicates cleanup' % (len(p_urls), p_name))
else:
p_urls = map(urlize, p_videos)
self.packages.append((p_name, p_urls, p_folder)) #: folder is NOT recognized by pyload 0.4.9!
addedvideos.extend(p_videos)
示例15: getDeleteFiles
def getDeleteFiles(self):
dir, name = os.path.split(self.filename)
# actually extracted file
files = [self.filename]
# eventually Multipart Files
files.extend(save_join(dir, os.path.basename(file)) for file in filter(self.isMultipart, os.listdir(dir))
if re.sub(self.re_multipart,".rar",name) == re.sub(self.re_multipart,".rar",file))
return files