本文整理汇总了Python中sickbeard.helpers.real_path函数的典型用法代码示例。如果您正苦于以下问题:Python real_path函数的具体用法?Python real_path怎么用?Python real_path使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了real_path函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: delete_folder
def delete_folder(folder, check_empty=True):
"""
Removes a folder from the filesystem
:param folder: Path to folder to remove
:param check_empty: Boolean, check if the folder is empty before removing it, defaults to True
:return: True on success, False on failure
"""
# check if it's a folder
if not ek(os.path.isdir, folder):
return False
# check if it isn't TV_DOWNLOAD_DIR
if sickbeard.TV_DOWNLOAD_DIR:
if helpers.real_path(folder) == helpers.real_path(sickbeard.TV_DOWNLOAD_DIR):
return False
# check if it's empty folder when wanted checked
if check_empty:
check_files = ek(os.listdir, folder)
if check_files:
logger.log(u"Not deleting folder " + folder + " found the following files: " + str(check_files), logger.INFO)
return False
try:
logger.log(u"Deleting folder (if it's empty): " + folder)
os.rmdir(folder)
except (OSError, IOError), e:
logger.log(u"Warning: unable to delete folder: " + folder + ": " + ex(e), logger.WARNING)
return False
示例2: _delete_folder
def _delete_folder(self, folder, check_empty=True):
# check if it's a folder
if not ek.ek(os.path.isdir, folder):
return False
# make sure it isn't TV_DOWNLOAD_DIR
if sickbeard.TV_DOWNLOAD_DIR and helpers.real_path(sickbeard.TV_DOWNLOAD_DIR) == helpers.real_path(folder):
return False
# check if it's empty folder when wanted checked
if check_empty and ek.ek(os.listdir, folder):
return False
# try deleting folder
try:
shutil.rmtree(folder)
except (OSError, IOError) as e:
logger.log(u'Warning: unable to delete folder: %s: %s' % (folder, ex(e)), logger.WARNING)
return False
if ek.ek(os.path.isdir, folder):
logger.log(u'Warning: unable to delete folder: %s' % folder, logger.WARNING)
return False
self._log_helper(u'Deleted folder ' + folder, logger.MESSAGE)
return True
示例3: _remove_old_zoneinfo
def _remove_old_zoneinfo():
if zoneinfo.ZONEINFOFILE is not None:
cur_zoneinfo = ek.ek(basename, zoneinfo.ZONEINFOFILE)
else:
return
cur_file = helpers.real_path(ek.ek(join, ek.ek(os.path.dirname, zoneinfo.__file__), cur_zoneinfo))
for (path, dirs, files) in ek.ek(os.walk, helpers.real_path(ek.ek(os.path.dirname, zoneinfo.__file__))):
for filename in files:
if filename.endswith(".tar.gz"):
file_w_path = ek.ek(join, path, filename)
if file_w_path != cur_file and ek.ek(isfile, file_w_path):
try:
ek.ek(os.remove, file_w_path)
logger.log(u"Delete unneeded old zoneinfo File: %s" % file_w_path)
except:
logger.log(u"Unable to delete: %s" % file_w_path, logger.WARNING)
示例4: remove_zoneinfo
def remove_zoneinfo(cls):
# delete all existing zoneinfo files
for (path, dirs, files) in ek.ek(os.walk, helpers.real_path(sickbeard.ZONEINFO_DIR)):
for filename in files:
if filename.endswith('.tar.gz'):
file_w_path = ek.ek(os.path.join, path, filename)
try:
ek.ek(os.remove, file_w_path)
except (StandardError, Exception):
pass
示例5: _remove_old_zoneinfo
def _remove_old_zoneinfo():
zonefilename = zoneinfo._ZONEFILENAME
if None is zonefilename:
return
cur_zoneinfo = ek.ek(basename, zonefilename)
cur_file = helpers.real_path(ek.ek(join, ek.ek(os.path.dirname, zoneinfo.__file__), cur_zoneinfo))
for (path, dirs, files) in ek.ek(os.walk,
helpers.real_path(ek.ek(os.path.dirname, zoneinfo.__file__))):
for filename in files:
if filename.endswith('.tar.gz'):
file_w_path = ek.ek(join, path, filename)
if file_w_path != cur_file and ek.ek(isfile, file_w_path):
try:
ek.ek(os.remove, file_w_path)
logger.log(u'Delete unneeded old zoneinfo File: %s' % file_w_path)
except:
logger.log(u'Unable to delete: %s' % file_w_path, logger.ERROR)
示例6: _remove_old_zoneinfo
def _remove_old_zoneinfo():
"""
Removes zoneinfo tar.gz file from repository, as we do not need it
"""
if zoneinfo.ZONEINFOFILE is not None:
cur_zoneinfo = ek(basename, zoneinfo.ZONEINFOFILE)
else:
return
cur_file = helpers.real_path(ek(join, ek(os.path.dirname, zoneinfo.__file__), cur_zoneinfo))
for (path, dirs, files) in ek(os.walk, helpers.real_path(ek(os.path.dirname, zoneinfo.__file__))):
for filename in files:
if filename.endswith('.tar.gz'):
file_w_path = ek(join, path, filename)
if file_w_path != cur_file and ek(isfile, file_w_path):
try:
ek(os.remove, file_w_path)
logger.log(u'Delete unneeded old zoneinfo File: %s' % file_w_path)
except:
logger.log(u'Unable to delete: %s' % file_w_path, logger.WARNING)
示例7: update_network_dict
def update_network_dict():
_remove_old_zoneinfo()
_update_zoneinfo()
d = {}
# network timezones are stored in a git submodule
loc = helpers.real_path(ek.ek(join, ek.ek(os.path.dirname, __file__), u'../lib/network_timezones/network_timezones.txt'))
with open(loc, 'r') as file:
data = file.read()
if data is None:
logger.log(u'Updating network timezones failed', logger.ERROR)
load_network_dict()
return
try:
for line in data.splitlines():
(key, val) = line.decode('utf-8').strip().rsplit(u':', 1)
if key is None or val is None:
continue
d[key] = val
except (IOError, OSError):
pass
my_db = db.DBConnection('cache.db')
# load current network timezones
old_d = dict(my_db.select('SELECT * FROM network_timezones'))
# list of sql commands to update the network_timezones table
cl = []
for cur_d, cur_t in d.iteritems():
h_k = old_d.has_key(cur_d)
if h_k and cur_t != old_d[cur_d]:
# update old record
cl.append(
['UPDATE network_timezones SET network_name=?, timezone=? WHERE network_name=?', [cur_d, cur_t, cur_d]])
elif not h_k:
# add new record
cl.append(['INSERT INTO network_timezones (network_name, timezone) VALUES (?,?)', [cur_d, cur_t]])
if h_k:
del old_d[cur_d]
# remove deleted records
if len(old_d) > 0:
old_items = list(va for va in old_d)
cl.append(['DELETE FROM network_timezones WHERE network_name IN (%s)' % ','.join(['?'] * len(old_items)), old_items])
# change all network timezone infos at once (much faster)
if len(cl) > 0:
my_db.mass_action(cl)
load_network_dict()
示例8: delete_folder
def delete_folder(folder, check_empty=True):
# check if it's a folder
if not ek.ek(os.path.isdir, folder):
return False
# check if it isn't TV_DOWNLOAD_DIR
if sickbeard.TV_DOWNLOAD_DIR:
if helpers.real_path(folder) == helpers.real_path(sickbeard.TV_DOWNLOAD_DIR):
return False
# check if it's empty folder when wanted checked
if check_empty:
check_files = ek.ek(os.listdir, folder)
if check_files:
logger.log(u"Not deleting folder " + folder + " found the following files: " + str(check_files), logger.INFO)
return False
try:
logger.log(u"Deleting folder (if it's empty): " + folder)
os.rmdir(folder)
except (OSError, IOError), e:
logger.log(u"Warning: unable to delete folder: " + folder + ": " + ex(e), logger.WARNING)
return False
示例9: delete_folder
def delete_folder(folder, check_empty=True):
"""
Removes a folder from the filesystem
:param folder: Path to folder to remove
:param check_empty: Boolean, check if the folder is empty before removing it, defaults to True
:return: True on success, False on failure
"""
# check if it's a folder
if not ek(os.path.isdir, folder):
return False
# check if it isn't TV_DOWNLOAD_DIR
if sickbeard.TV_DOWNLOAD_DIR:
if helpers.real_path(folder) == helpers.real_path(sickbeard.TV_DOWNLOAD_DIR):
return False
# check if it's empty folder when wanted checked
try:
if check_empty:
check_files = ek(os.listdir, folder)
if check_files:
logging.info("Not deleting folder {} found the following files: {}".format(folder, check_files))
return False
logging.info("Deleting folder (if it's empty): " + folder)
ek(os.rmdir, folder)
else:
logging.info("Deleting folder: " + folder)
ek(removetree, folder)
except (OSError, IOError) as e:
logging.warning("Warning: unable to delete folder: {}: {}".format(folder, ex(e)))
return False
return True
示例10: update_network_dict
def update_network_dict():
_remove_old_zoneinfo()
_update_zoneinfo()
loc = helpers.real_path(ek.ek(join, ek.ek(os.path.dirname, __file__), u'../lib/network_timezones/network_timezones.txt'))
with open(loc, 'r') as file:
data = file.read()
if data is None:
logger.log(u'Updating network timezones failed', logger.ERROR)
load_network_dict()
return
d = {}
try:
for line in data.splitlines():
(key, val) = line.decode('utf-8').strip().rsplit(u':', 1)
if key is None or val is None:
continue
d[key] = val
except (IOError, OSError):
pass
my_db = db.DBConnection('cache.db')
network_list = dict(my_db.select('SELECT * FROM network_timezones;'))
queries = []
for network, timezone in d.iteritems():
existing = network_list.has_key(network)
if not existing:
queries.append(['INSERT OR IGNORE INTO network_timezones VALUES (?,?);', [network, timezone]])
elif network_list[network] is not timezone:
queries.append(['UPDATE OR IGNORE network_timezones SET timezone = ? WHERE network_name = ?;', [timezone, network]])
if existing:
del network_list[network]
if network_list:
purged = list(x for x in network_list)
queries.append(['DELETE FROM network_timezones WHERE network_name IN (%s);' % ','.join(['?'] * len(purged)), purged])
if queries:
my_db.mass_action(queries)
load_network_dict()
示例11: _update_zoneinfo
def _update_zoneinfo():
global sb_timezone
sb_timezone = tz.tzlocal()
# now check if the zoneinfo needs update
url_zv = 'https://raw.githubusercontent.com/Prinz23/sb_network_timezones/master/zoneinfo.txt'
url_data = helpers.getURL(url_zv)
if url_data is None:
# When urlData is None, trouble connecting to github
logger.log(u'Loading zoneinfo.txt failed, this can happen from time to time. Unable to get URL: %s' % url_zv,
logger.WARNING)
return
zonefilename = zoneinfo._ZONEFILENAME
cur_zoneinfo = zonefilename
if None is not cur_zoneinfo:
cur_zoneinfo = ek.ek(basename, zonefilename)
zonefile = helpers.real_path(ek.ek(join, ek.ek(os.path.dirname, zoneinfo.__file__), cur_zoneinfo))
zonemetadata = zoneinfo.gettz_db_metadata() if ek.ek(os.path.isfile, zonefile) else None
(new_zoneinfo, zoneinfo_md5) = url_data.decode('utf-8').strip().rsplit(u' ')
newtz_regex = re.search(r'(\d{4}[^.]+)', new_zoneinfo)
if not newtz_regex or len(newtz_regex.groups()) != 1:
return
newtzversion = newtz_regex.group(1)
if cur_zoneinfo is not None and zonemetadata is not None and 'tzversion' in zonemetadata and zonemetadata['tzversion'] == newtzversion:
return
# now load the new zoneinfo
url_tar = u'https://raw.githubusercontent.com/Prinz23/sb_network_timezones/master/%s' % new_zoneinfo
zonefile_tmp = re.sub(r'\.tar\.gz$', '.tmp', zonefile)
if ek.ek(os.path.exists, zonefile_tmp):
try:
ek.ek(os.remove, zonefile_tmp)
except:
logger.log(u'Unable to delete: %s' % zonefile_tmp, logger.ERROR)
return
if not helpers.download_file(url_tar, zonefile_tmp):
return
if not ek.ek(os.path.exists, zonefile_tmp):
logger.log(u'Download of %s failed.' % zonefile_tmp, logger.ERROR)
return
new_hash = str(helpers.md5_for_file(zonefile_tmp))
if zoneinfo_md5.upper() == new_hash.upper():
logger.log(u'Updating timezone info with new one: %s' % new_zoneinfo, logger.MESSAGE)
try:
# remove the old zoneinfo file
if cur_zoneinfo is not None:
old_file = helpers.real_path(
ek.ek(join, ek.ek(os.path.dirname, zoneinfo.__file__), cur_zoneinfo))
if ek.ek(os.path.exists, old_file):
ek.ek(os.remove, old_file)
# rename downloaded file
ek.ek(os.rename, zonefile_tmp, zonefile)
from dateutil.zoneinfo import gettz
if '_CLASS_ZONE_INSTANCE' in gettz.func_globals:
gettz.func_globals.__setitem__('_CLASS_ZONE_INSTANCE', list())
sb_timezone = tz.tzlocal()
except:
_remove_zoneinfo_failed(zonefile_tmp)
return
else:
_remove_zoneinfo_failed(zonefile_tmp)
logger.log(u'MD5 hash does not match: %s File: %s' % (zoneinfo_md5.upper(), new_hash.upper()), logger.ERROR)
return
示例12: _update_zoneinfo
def _update_zoneinfo():
global sb_timezone
sb_timezone = tz.tzlocal()
# now check if the zoneinfo needs update
url_zv = 'https://github.com/Prinz23/sb_network_timezones/raw/master/zoneinfo.txt'
url_data = helpers.getURL(url_zv)
if url_data is None:
# When urlData is None, trouble connecting to github
logger.log(u"Loading zoneinfo.txt failed. Unable to get URL: " + url_zv, logger.ERROR)
return
if (lib.dateutil.zoneinfo.ZONEINFOFILE is not None):
cur_zoneinfo = ek.ek(basename, lib.dateutil.zoneinfo.ZONEINFOFILE)
else:
cur_zoneinfo = None
(new_zoneinfo, zoneinfo_md5) = url_data.decode('utf-8').strip().rsplit(u' ')
if ((cur_zoneinfo is not None) and (new_zoneinfo == cur_zoneinfo)):
return
# now load the new zoneinfo
url_tar = u'https://github.com/Prinz23/sb_network_timezones/raw/master/' + new_zoneinfo
zonefile = helpers.real_path(ek.ek(join,ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__), new_zoneinfo))
zonefile_tmp = re.sub(r"\.tar\.gz$",'.tmp', zonefile)
if (ek.ek(os.path.exists,zonefile_tmp)):
try:
ek.ek(os.remove,zonefile_tmp)
except:
logger.log(u"Unable to delete: " + zonefile_tmp,logger.ERROR)
return
if not helpers.download_file(url_tar, zonefile_tmp):
return
if not ek.ek(os.path.exists,zonefile_tmp):
logger.log(u"Download of " + zonefile_tmp + " failed.",logger.ERROR)
return
new_hash = str(helpers.md5_for_file(zonefile_tmp))
if (zoneinfo_md5.upper() == new_hash.upper()):
logger.log(u"Updating timezone info with new one: " + new_zoneinfo,logger.MESSAGE)
try:
# remove the old zoneinfo file
if (cur_zoneinfo is not None):
old_file = helpers.real_path(ek.ek(join,ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__), cur_zoneinfo))
if (ek.ek(os.path.exists,old_file)):
ek.ek(os.remove,old_file)
# rename downloaded file
ek.ek(os.rename,zonefile_tmp,zonefile)
# load the new zoneinfo
reload(lib.dateutil.zoneinfo)
sb_timezone = tz.tzlocal()
except:
_remove_zoneinfo_failed(zonefile_tmp)
return
else:
_remove_zoneinfo_failed(zonefile_tmp)
logger.log(u"MD5 HASH doesn't match: " + zoneinfo_md5.upper() + ' File: ' + new_hash.upper(),logger.ERROR)
return
示例13: processDir
def processDir(dirName, nzbName=None, process_method=None, force=False, is_priority=None, failed=False, type="automatic"):
"""
Scans through the files in dirName and processes whatever media files it finds
dirName: The folder name to look in
nzbName: The NZB name which resulted in this folder being downloaded
force: True to postprocess already postprocessed files
failed: Boolean for whether or not the download failed
type: Type of postprocessing automatic or manual
"""
global process_result, returnStr
returnStr = u""
returnStr += logHelper(u"Processing folder " + dirName, logger.DEBUG)
returnStr += logHelper(u"TV_DOWNLOAD_DIR: " + sickbeard.TV_DOWNLOAD_DIR, logger.DEBUG)
# if they passed us a real dir then assume it's the one we want
if ek.ek(os.path.isdir, dirName):
dirName = ek.ek(os.path.realpath, dirName)
# if the client and Sickbeard are not on the same machine translate the Dir in a network dir
elif sickbeard.TV_DOWNLOAD_DIR and ek.ek(os.path.isdir, sickbeard.TV_DOWNLOAD_DIR) \
and helpers.real_path(dirName) != helpers.real_path(sickbeard.TV_DOWNLOAD_DIR):
dirName = ek.ek(os.path.join, sickbeard.TV_DOWNLOAD_DIR, ek.ek(os.path.abspath, dirName).split(os.path.sep)[-1])
returnStr += logHelper(u"Trying to use folder " + dirName, logger.DEBUG)
# if we didn't find a real dir then quit
if not ek.ek(os.path.isdir, dirName):
returnStr += logHelper(u"Unable to figure out what folder to process. If your downloader and Sick Beard aren't on the same PC make sure you fill out your TV download dir in the config.", logger.DEBUG)
return returnStr
path, dirs, files = get_path_dir_files(dirName, nzbName, type)
returnStr += logHelper(u"PostProcessing Path: " + path, logger.DEBUG)
returnStr += logHelper(u"PostProcessing Dirs: " + str(dirs), logger.DEBUG)
rarFiles = filter(helpers.isRarFile, files)
rarContent = unRAR(path, rarFiles, force)
files += rarContent
videoFiles = filter(helpers.isMediaFile, files)
videoInRar = filter(helpers.isMediaFile, rarContent)
returnStr += logHelper(u"PostProcessing Files: " + str(files), logger.DEBUG)
returnStr += logHelper(u"PostProcessing VideoFiles: " + str(videoFiles), logger.DEBUG)
returnStr += logHelper(u"PostProcessing RarContent: " + str(rarContent), logger.DEBUG)
returnStr += logHelper(u"PostProcessing VideoInRar: " + str(videoInRar), logger.DEBUG)
# If nzbName is set and there's more than one videofile in the folder, files will be lost (overwritten).
nzbNameOriginal = nzbName
if len(videoFiles) >= 2:
nzbName = None
if not process_method:
process_method = sickbeard.PROCESS_METHOD
process_result = True
#Don't Link media when the media is extracted from a rar in the same path
if process_method in ('hardlink', 'symlink') and videoInRar:
process_media(path, videoInRar, nzbName, 'move', force, is_priority)
delete_files(path, rarContent)
for video in set(videoFiles) - set(videoInRar):
process_media(path, [video], nzbName, process_method, force, is_priority)
else:
for video in videoFiles:
process_media(path, [video], nzbName, process_method, force, is_priority)
#Process Video File in all TV Subdir
for dir in [x for x in dirs if validateDir(path, x, nzbNameOriginal, failed)]:
process_result = True
for processPath, processDir, fileList in ek.ek(os.walk, ek.ek(os.path.join, path, dir), topdown=False):
rarFiles = filter(helpers.isRarFile, fileList)
rarContent = unRAR(processPath, rarFiles, force)
fileList = set(fileList + rarContent)
videoFiles = filter(helpers.isMediaFile, fileList)
videoInRar = filter(helpers.isMediaFile, rarContent)
notwantedFiles = [x for x in fileList if x not in videoFiles]
#Don't Link media when the media is extracted from a rar in the same path
if process_method in ('hardlink', 'symlink') and videoInRar:
process_media(processPath, videoInRar, nzbName, 'move', force, is_priority)
process_media(processPath, set(videoFiles) - set(videoInRar), nzbName, process_method, force, is_priority)
delete_files(processPath, rarContent)
else:
process_media(processPath, videoFiles, nzbName, process_method, force, is_priority)
#Avoid to delete files
if process_method != "move" or not process_result \
or type == "manual":
continue
delete_files(processPath, notwantedFiles)
if process_method == "move" \
#.........这里部分代码省略.........
示例14: validate_dir
def validate_dir(process_path, release_name, failed, result): # pylint: disable=too-many-locals,too-many-branches,too-many-return-statements
"""
Check if directory is valid for processing
:param process_path: Directory to check
:param release_name: Original NZB/Torrent name
:param failed: Previously failed objects
:param result: Previous results
:return: True if dir is valid for processing, False if not
"""
result.output += log_helper("Processing folder " + process_path, logger.DEBUG)
upper_name = ek(os.path.basename, process_path).upper()
if upper_name.startswith('_FAILED_') or upper_name.endswith('_FAILED_'):
result.output += log_helper("The directory name indicates it failed to extract.", logger.DEBUG)
failed = True
elif upper_name.startswith('_UNDERSIZED_') or upper_name.endswith('_UNDERSIZED_'):
result.output += log_helper("The directory name indicates that it was previously rejected for being undersized.", logger.DEBUG)
failed = True
elif upper_name.startswith('_UNPACK') or upper_name.endswith('_UNPACK'):
result.output += log_helper("The directory name indicates that this release is in the process of being unpacked.", logger.DEBUG)
result.missed_files.append("{0} : Being unpacked".format(process_path))
return False
if failed:
process_failed(process_path, release_name, result)
result.missed_files.append("{0} : Failed download".format(process_path))
return False
if sickbeard.TV_DOWNLOAD_DIR and helpers.real_path(process_path) != helpers.real_path(sickbeard.TV_DOWNLOAD_DIR) and helpers.is_hidden_folder(process_path):
result.output += log_helper("Ignoring hidden folder: {0}".format(process_path), logger.DEBUG)
result.missed_files.append("{0} : Hidden folder".format(process_path))
return False
# make sure the dir isn't inside a show dir
main_db_con = db.DBConnection()
sql_results = main_db_con.select("SELECT location FROM tv_shows")
for sqlShow in sql_results:
if process_path.lower().startswith(ek(os.path.realpath, sqlShow[b"location"]).lower() + os.sep) or \
process_path.lower() == ek(os.path.realpath, sqlShow[b"location"]).lower():
result.output += log_helper(
"Cannot process an episode that's already been moved to its show dir, skipping " + process_path,
logger.WARNING)
return False
for current_directory, directory_names, file_names in ek(os.walk, process_path, topdown=False, followlinks=sickbeard.PROCESSOR_FOLLOW_SYMLINKS):
sync_files = filter(is_sync_file, file_names)
if sync_files and sickbeard.POSTPONE_IF_SYNC_FILES:
result.output += log_helper("Found temporary sync files: {0} in path: {1}".format(sync_files, ek(os.path.join, process_path, sync_files[0])))
result.output += log_helper("Skipping post processing for folder: {0}".format(process_path))
result.missed_files.append("{0} : Sync files found".format(ek(os.path.join, process_path, sync_files[0])))
continue
found_files = filter(helpers.is_media_file, file_names)
if sickbeard.UNPACK == 1:
found_files += filter(helpers.is_rar_file, file_names)
if current_directory != sickbeard.TV_DOWNLOAD_DIR and found_files:
found_files.append(ek(os.path.basename, current_directory))
for found_file in found_files:
try:
NameParser().parse(found_file, cache_result=False)
except (InvalidNameException, InvalidShowException) as e:
pass
else:
return True
result.output += log_helper("{0} : No processable items found in folder".format(process_path), logger.DEBUG)
return False
示例15: test_real_path
def test_real_path(self):
"""
Test real_path
"""
self.assertEqual(helpers.real_path('/usr/SickRage/../root/real/path/'), helpers.real_path('/usr/root/real/path/'))