本文整理汇总了Python中sickbeard.logger.log函数的典型用法代码示例。如果您正苦于以下问题:Python log函数的具体用法?Python log怎么用?Python log使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了log函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: _check_github_for_update
def _check_github_for_update(self):
"""
Uses pygithub to ask github if there is a newer version that the provided
commit hash. If there is a newer version it sets Sick Beard's version text.
commit_hash: hash that we're checking against
"""
self._num_commits_behind = 0
self._newest_commit_hash = None
gh = github.GitHub()
# find newest commit
for curCommit in gh.commits.forBranch('midgetspy', 'Sick-Beard', version.SICKBEARD_VERSION):
if not self._newest_commit_hash:
self._newest_commit_hash = curCommit.id
if not self._cur_commit_hash:
break
if curCommit.id == self._cur_commit_hash:
break
self._num_commits_behind += 1
logger.log(u"newest: "+str(self._newest_commit_hash)+" and current: "+str(self._cur_commit_hash)+" and num_commits: "+str(self._num_commits_behind), logger.DEBUG)
示例2: save_thumbnail
def save_thumbnail(self, ep_obj):
"""
Retrieves a thumbnail and saves it to the correct spot. This method should not need to
be overridden by implementing classes, changing get_episode_thumb_path and
_get_episode_thumb_url should suffice.
ep_obj: a TVEpisode object for which to generate a thumbnail
"""
file_path = self.get_episode_thumb_path(ep_obj)
if not file_path:
logger.log(u"Unable to find a file path to use for this thumbnail, not generating it", logger.DEBUG)
return False
thumb_url = self._get_episode_thumb_url(ep_obj)
# if we can't find one then give up
if not thumb_url:
logger.log("No thumb is available for this episode, not creating a thumb", logger.DEBUG)
return False
thumb_data = metadata_helpers.getShowImage(thumb_url)
result = self._write_image(thumb_data, file_path)
if not result:
return False
for cur_ep in [ep_obj] + ep_obj.relatedEps:
cur_ep.hastbn = True
return True
示例3: _season_thumb_dict
def _season_thumb_dict(self, show_obj):
"""
Should return a dict like:
result = {<season number>:
{1: '<url 1>', 2: <url 2>, ...},}
"""
# This holds our resulting dictionary of season art
result = {}
tvdb_lang = show_obj.lang
try:
# There's gotta be a better way of doing this but we don't wanna
# change the language value elsewhere
ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy()
if tvdb_lang and not tvdb_lang == 'en':
ltvdb_api_parms['language'] = tvdb_lang
t = tvdb_api.Tvdb(banners=True, **ltvdb_api_parms)
tvdb_show_obj = t[show_obj.tvdbid]
except (tvdb_exceptions.tvdb_error, IOError), e:
logger.log(u"Unable to look up show on TVDB, not downloading images: "+ex(e), logger.ERROR)
return result
示例4: getURL
def getURL (url, headers=[]):
"""
Returns a byte-string retrieved from the url provider.
"""
opener = urllib2.build_opener()
opener.addheaders = [('User-Agent', USER_AGENT), ('Accept-Encoding', 'gzip,deflate')]
for cur_header in headers:
opener.addheaders.append(cur_header)
try:
usock = opener.open(url)
url = usock.geturl()
encoding = usock.info().get("Content-Encoding")
if encoding in ('gzip', 'x-gzip', 'deflate'):
content = usock.read()
if encoding == 'deflate':
data = StringIO.StringIO(zlib.decompress(content))
else:
data = gzip.GzipFile('', 'rb', 9, StringIO.StringIO(content))
result = data.read()
else:
result = usock.read()
usock.close()
except urllib2.HTTPError, e:
logger.log(u"HTTP error " + str(e.code) + " while loading URL " + url, logger.WARNING)
return None
示例5: create_https_certificates
def create_https_certificates(ssl_cert, ssl_key):
"""
Create self-signed HTTPS certificares and store in paths 'ssl_cert' and 'ssl_key'
"""
try:
from OpenSSL import crypto #@UnresolvedImport
from lib.certgen import createKeyPair, createCertRequest, createCertificate, TYPE_RSA, serial #@UnresolvedImport
except:
logger.log(u"pyopenssl module missing, please install for https access", logger.WARNING)
return False
# Create the CA Certificate
cakey = createKeyPair(TYPE_RSA, 1024)
careq = createCertRequest(cakey, CN='Certificate Authority')
cacert = createCertificate(careq, (careq, cakey), serial, (0, 60*60*24*365*10)) # ten years
cname = 'SickBeard'
pkey = createKeyPair(TYPE_RSA, 1024)
req = createCertRequest(pkey, CN=cname)
cert = createCertificate(req, (cacert, cakey), serial, (0, 60*60*24*365*10)) # ten years
# Save the key and certificate to disk
try:
open(ssl_key, 'w').write(crypto.dump_privatekey(crypto.FILETYPE_PEM, pkey))
open(ssl_cert, 'w').write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert))
except:
logger.log(u"Error creating SSL key and certificate", logger.ERROR)
return False
return True
示例6: _retrieve_show_images_from_fanart
def _retrieve_show_images_from_fanart(self, show, img_type, thumb=False):
types = {
'poster': fanart.TYPE.TV.POSTER,
'banner': fanart.TYPE.TV.BANNER,
'poster_thumb': fanart.TYPE.TV.POSTER,
'banner_thumb': fanart.TYPE.TV.BANNER,
'fanart': fanart.TYPE.TV.BACKGROUND,
}
try:
indexerid = helpers.mapIndexersToShow(show)[1]
if indexerid:
request = fanartRequest(
apikey=sickbeard.FANART_API_KEY,
id=indexerid,
ws=fanart.WS.TV,
type=types[img_type],
sort=fanart.SORT.POPULAR,
limit=fanart.LIMIT.ONE,
)
resp = request.response()
url = resp[types[img_type]][0]['url']
if thumb:
url = re.sub('/fanart/', '/preview/', url)
return url
except Exception:
pass
logger.log(u"Could not find any " + img_type + " images on Fanart.tv for " + show.name, logger.INFO)
示例7: _getRSSData
def _getRSSData(self):
languages = helpers.getAllLanguages()
languages = filter(lambda x: not x == u"en", languages)
cat = '5030,5040'
if len(languages) > 0:
cat = '5020'
params = {"t": "tvsearch",
"cat": cat}
# hack this in for now
if self.provider.getID() == 'nzbs_org':
params['cat'] += ',5070,5090'
if self.provider.key:
params['apikey'] = self.provider.key
url = self.provider.url + 'api?' + urllib.urlencode(params)
logger.log(self.provider.name + " cache update URL: " + url, logger.DEBUG)
data = self.provider.getURL(url)
# hack this in until it's fixed server side
if data and not data.startswith('<?xml'):
data = '<?xml version="1.0" encoding="ISO-8859-1" ?>' + data
return data
示例8: filterBadReleases
def filterBadReleases(name):
"""
Filters out non-english and just all-around stupid releases by comparing them
to the resultFilters contents.
name: the release name to check
Returns: True if the release name is OK, False if it's bad.
"""
cp = CompleteParser()
cpr = cp.parse(name)
parse_result = cpr.parse_result
# use the extra info and the scene group to filter against
check_string = ''
if parse_result.extra_info:
check_string = parse_result.extra_info
if parse_result.release_group:
if check_string:
check_string = check_string + '-' + parse_result.release_group
else:
check_string = parse_result.release_group
# if there's no info after the season info then assume it's fine
if not check_string:
return True
# if any of the bad strings are in the name then say no
for x in resultFilters + sickbeard.IGNORE_WORDS.split(','):
if re.search('(^|[\W_])' + x + '($|[\W_])', check_string, re.I):
logger.log(u"Invalid scene release: " + name + " contains " + x + ", ignoring it", logger.DEBUG)
return False
return True
示例9: _find_installed_version
def _find_installed_version(self):
try:
version = sickbeard.version.SICKBEARD_VERSION
return int(version[6:])
except ValueError:
logger.log(u"Unknown SickBeard Windows binary release: " + version, logger.ERROR)
return None
示例10: getQuality
def getQuality(self, item):
attributes = item.find(self._report('attributes'))
attr_dict = {}
for attribute in attributes.getiterator(self._report('attribute')):
cur_attr = attribute.attrib['type']
if cur_attr not in attr_dict:
attr_dict[cur_attr] = [attribute.text]
else:
attr_dict[cur_attr].append(attribute.text)
logger.log("Finding quality of item based on attributes "+str(attr_dict), logger.DEBUG)
if self._is_SDTV(attr_dict):
quality = Quality.SDTV
elif self._is_SDDVD(attr_dict):
quality = Quality.SDDVD
elif self._is_HDTV(attr_dict):
quality = Quality.HDTV
elif self._is_WEBDL(attr_dict):
quality = Quality.HDWEBDL
elif self._is_720pBluRay(attr_dict):
quality = Quality.HDBLURAY
elif self._is_1080pBluRay(attr_dict):
quality = Quality.FULLHDBLURAY
else:
quality = Quality.UNKNOWN
logger.log("Resulting quality: "+str(quality), logger.DEBUG)
return quality
示例11: updateCache
def updateCache(self):
# check if we should update
if not self.shouldUpdate():
return
# clear cache
self._clearCache()
# set updated
self.setLastUpdate()
cl = []
for group in ['alt.binaries.hdtv', 'alt.binaries.hdtv.x264', 'alt.binaries.tv', 'alt.binaries.tvseries']:
search_params = {'max': 50, 'g': group}
data = self.getRSSFeed(self.provider.urls['rss'], search_params)['entries']
if not data:
logger.log('No data returned from provider', logger.DEBUG)
continue
for item in data:
ci = self._parseItem(item)
if ci:
cl.append(ci)
if cl:
cache_db_con = self._getDB()
cache_db_con.mass_action(cl)
示例12: _getRSSData
def _getRSSData(self, search=None):
params = {
'searchaction': 'Search',
'fpn': 'p',
'category': 8,
'u_nfo_posts_only': 0,
'u_url_posts_only': 0,
'u_comment_posts_only': 0,
'u_show_passworded': 0,
'u_v3_retention': 0,
'ps_rb_source': 3008,
'ps_rb_video_format': 3082257,
'ps_rb_language': 4096,
'sort': 'date',
'order': 'desc',
'u_post_results_amt': 50,
'feed': 'rss',
'hauth': 1,
}
if search:
params['q'] = search + " AND "
else:
params['q'] = ''
params['q'] += 'Attr:Lang~Eng AND NOT Attr:VideoF=DVD'
url = self.url + "search/?%s" % urllib.urlencode(params)
logger.log("Newzbin search URL: " + url, logger.DEBUG)
data = self.getURL(url)
return data
示例13: filterBadReleases
def filterBadReleases(name):
"""
Filters out non-english and just all-around stupid releases by comparing them
to the resultFilters contents.
name: the release name to check
Returns: True if the release name is OK, False if it's bad.
"""
try:
fp = NameParser()
parse_result = fp.parse(name)
except InvalidNameException:
logger.log(u"Unable to parse the filename " + name + " into a valid episode", logger.WARNING)
return False
# if any of the bad strings are in the name then say no
if sickbeard.IGNORE_WORDS:
resultFilters.extend(sickbeard.IGNORE_WORDS.split(','))
filters = [re.compile('(^|[\W_])%s($|[\W_])' % filter.strip(), re.I) for filter in resultFilters]
for regfilter in filters:
if regfilter.search(name):
logger.log(u"Invalid scene release: " + name + " contains pattern: " + regfilter.pattern + ", ignoring it", logger.DEBUG)
return False
return True
示例14: updateCache
def updateCache(self):
if not self.shouldUpdate():
return
search_params = {'RSS': ['']}
rss_results = self.provider._doSearch(search_params)
if rss_results:
self.setLastUpdate()
else:
return []
logger.log(u"Clearing " + self.provider.name + " cache and updating with new information")
self._clearCache()
cl = []
for result in rss_results:
item = (result[0], result[1])
ci = self._parseItem(item)
if ci is not None:
cl.append(ci)
if len(cl) > 0:
myDB = self._getDB()
myDB.mass_action(cl)
示例15: _season_banners_dict
def _season_banners_dict(self, show_obj, season):
"""
Should return a dict like:
result = {<season number>:
{1: '<url 1>', 2: <url 2>, ...},}
"""
# This holds our resulting dictionary of season art
result = {}
indexer_lang = show_obj.lang
try:
# There's gotta be a better way of doing this but we don't wanna
# change the language value elsewhere
lINDEXER_API_PARMS = sickbeard.indexerApi(show_obj.indexer).api_params.copy()
lINDEXER_API_PARMS['banners'] = True
if indexer_lang and not indexer_lang == sickbeard.INDEXER_DEFAULT_LANGUAGE:
lINDEXER_API_PARMS['language'] = indexer_lang
t = sickbeard.indexerApi(show_obj.indexer).indexer(**lINDEXER_API_PARMS)
indexer_show_obj = t[show_obj.indexerid]
except (sickbeard.indexer_error, IOError), e:
logger.log(u"Unable to look up show on " + sickbeard.indexerApi(
show_obj.indexer).name + ", not downloading images: " + ex(e), logger.WARNING)
logger.log(u"Indexer " + sickbeard.indexerApi(show_obj.indexer).name + "maybe experiencing some problems. Try again later", logger.DEBUG)
return result