本文整理汇总了Python中sickrage.helper.common.convert_size函数的典型用法代码示例。如果您正苦于以下问题:Python convert_size函数的具体用法?Python convert_size怎么用?Python convert_size使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了convert_size函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: search
def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals
results = []
if self.show and not self.show.is_anime:
return results
for mode in search_strings:
items = []
logger.log(u"Search Mode: {}".format(mode), logger.DEBUG)
for search_string in search_strings[mode]:
if mode != 'RSS':
logger.log(u"Search string: {}".format(search_string.decode("utf-8")),
logger.DEBUG)
params = {
"page": 'rss',
"cats": '1_0', # All anime
"sort": 2, # Sort Descending By Seeders
"order": 1
}
if mode != 'RSS':
params["term"] = search_string
search_url = self.url + '?' + urlencode(params)
logger.log(u"Search URL: %s" % search_url, logger.DEBUG)
summary_regex = ur"(\d+) seeder\(s\), (\d+) leecher\(s\), \d+ download\(s\) - (\d+.?\d* [KMGT]iB)(.*)"
s = re.compile(summary_regex, re.DOTALL)
results = []
for curItem in self.cache.getRSSFeed(search_url)['entries'] or []:
title = curItem['title']
download_url = curItem['link']
if not all([title, download_url]):
continue
seeders, leechers, torrent_size, verified = s.findall(curItem['summary'])[0]
size = convert_size(torrent_size) or -1
# Filter unseeded torrent
if seeders < self.minseed or leechers < self.minleech:
if mode != 'RSS':
logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {} (S:{} L:{})".format
(title, seeders, leechers), logger.DEBUG)
continue
if self.confirmed and not verified and mode != 'RSS':
logger.log(u"Found result " + title + " but that doesn't seem like a verified result so I'm ignoring it", logger.DEBUG)
continue
item = title, download_url, size, seeders, leechers
if mode != 'RSS':
logger.log(u"Found result: %s with %s seeders and %s leechers" % (title, seeders, leechers), logger.DEBUG)
items.append(item)
# For each search mode sort all the items by seeders if available
items.sort(key=lambda tup: tup[3], reverse=True)
results += items
return results
示例2: search
def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches
results = []
for mode in search_strings:
items = []
logger.log(u"Search Mode: %s" % mode, logger.DEBUG)
for search_string in search_strings[mode]:
if mode != 'RSS':
logger.log(u"Search string: %s " % search_string, logger.DEBUG)
self.search_params.update({'type': ('search', 'rss')[mode == 'RSS'], 'search': search_string})
url = self.urls['rss'] if not self.custom_url else self.urls['rss'].replace(self.urls['index'], self.custom_url)
data = self.get_url(url, params=self.search_params)
if not data:
logger.log(u"No data returned from provider", logger.DEBUG)
continue
if not data.startswith('<?xml'):
logger.log(u'Expected xml but got something else, is your mirror failing?', logger.INFO)
continue
with BS4Parser(data, 'html5lib') as parser:
for item in parser.findAll('item'):
try:
title = re.sub(r'^<!\[CDATA\[|\]\]>$', '', item.find('title').get_text(strip=True))
seeders = try_int(item.find('seeders').get_text(strip=True))
leechers = try_int(item.find('leechers').get_text(strip=True))
torrent_size = item.find('size').get_text()
size = convert_size(torrent_size) or -1
if sickbeard.TORRENT_METHOD == 'blackhole':
enclosure = item.find('enclosure') # Backlog doesnt have enclosure
download_url = enclosure['url'] if enclosure else item.find('link').next.strip()
download_url = re.sub(r'(.*)/torrent/(.*).html', r'\1/download/\2.torrent', download_url)
else:
info_hash = item.find('info_hash').get_text(strip=True)
download_url = "magnet:?xt=urn:btih:" + info_hash + "&dn=" + title + self._custom_trackers
except (AttributeError, TypeError, KeyError, ValueError):
continue
if not all([title, download_url]):
continue
# Filter unseeded torrent
if seeders < self.minseed or leechers < self.minleech:
if mode != 'RSS':
logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
continue
item = title, download_url, size, seeders, leechers
if mode != 'RSS':
logger.log(u"Found result: %s " % title, logger.DEBUG)
items.append(item)
# For each search mode sort all the items by seeders if available
items.sort(key=lambda tup: tup[3], reverse=True)
results += items
return results
示例3: search
def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals
results = []
for mode in search_strings:
items = []
logger.log("Search Mode: {0}".format(mode), logger.DEBUG)
for search_string in search_strings[mode]:
if mode == 'Season':
search_string = re.sub(r'(.*)S0?', r'\1Saison ', search_string)
if mode != 'RSS':
logger.log("Search string: {0}".format
(search_string.decode("utf-8")), logger.DEBUG)
search_url = self.url + '/recherche/' + search_string.replace('.', '-').replace(' ', '-') + '.html,trie-seeds-d'
else:
search_url = self.url + '/view_cat.php?categorie=series&trie=date-d'
data = self.get_url(search_url, returns='text')
if not data:
continue
with BS4Parser(data, 'html5lib') as html:
torrent_rows = html(class_=re.compile('ligne[01]'))
for result in torrent_rows:
try:
title = result.find(class_="titre").get_text(strip=True).replace("HDTV", "HDTV x264-CPasBien")
title = re.sub(r' Saison', ' Season', title, flags=re.I)
tmp = result.find("a")['href'].split('/')[-1].replace('.html', '.torrent').strip()
download_url = (self.url + '/telechargement/{0}'.format(tmp))
if not all([title, download_url]):
continue
seeders = try_int(result.find(class_="up").get_text(strip=True))
leechers = try_int(result.find(class_="down").get_text(strip=True))
if seeders < self.minseed or leechers < self.minleech:
if mode != 'RSS':
logger.log("Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format
(title, seeders, leechers), logger.DEBUG)
continue
torrent_size = result.find(class_="poid").get_text(strip=True)
units = ['o', 'Ko', 'Mo', 'Go', 'To', 'Po']
size = convert_size(torrent_size, units=units) or -1
item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'hash': ''}
if mode != 'RSS':
logger.log("Found result: {0} with {1} seeders and {2} leechers".format(title, seeders, leechers), logger.DEBUG)
items.append(item)
except StandardError:
continue
# For each search mode sort all the items by seeders if available
items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True)
results += items
return results
示例4: search
def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals
results = []
for mode in search_strings:
items = []
logger.log(u"Search Mode: {0}".format(mode), logger.DEBUG)
for search_string in search_strings[mode]:
# Feed verified does not exist on this clone
# search_url = self.urls['verified'] if self.confirmed else self.urls['feed']
search_url = self.urls['feed']
if mode != 'RSS':
logger.log(u"Search string: {0}".format
(search_string.decode("utf-8")), logger.DEBUG)
data = self.get_url(search_url, params={'f': search_string}, returns='text')
if not data:
logger.log(u"No data returned from provider", logger.DEBUG)
continue
if not data.startswith("<?xml"):
logger.log(u"Expected xml but got something else, is your mirror failing?", logger.INFO)
continue
try:
with BS4Parser(data, 'html5lib') as parser:
for item in parser('item'):
if item.category and 'tv' not in item.category.get_text(strip=True).lower():
continue
title = item.title.get_text(strip=True)
t_hash = item.guid.get_text(strip=True).rsplit('/', 1)[-1]
if not all([title, t_hash]):
continue
download_url = "magnet:?xt=urn:btih:" + t_hash + "&dn=" + title + self._custom_trackers
torrent_size, seeders, leechers = self._split_description(item.find('description').text)
size = convert_size(torrent_size) or -1
# Filter unseeded torrent
if seeders < self.minseed or leechers < self.minleech:
if mode != 'RSS':
logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format
(title, seeders, leechers), logger.DEBUG)
continue
result = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'hash': t_hash}
items.append(result)
except StandardError:
logger.log(u"Failed parsing provider. Traceback: {0!r}".format(traceback.format_exc()), logger.ERROR)
# For each search mode sort all the items by seeders if available
items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True)
results += items
return results
示例5: search
def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals
results = []
for mode in search_strings:
items = []
logger.log(u"Search Mode: {}".format(mode), logger.DEBUG)
for search_string in search_strings[mode]:
search_url = self.urls['verified'] if self.confirmed else self.urls['feed']
if mode != 'RSS':
logger.log(u"Search string: {}".format(search_string.decode("utf-8")),
logger.DEBUG)
search_url += '?q=' + urllib.parse.quote_plus(search_string)
logger.log(u"Search URL: %s" % search_url, logger.DEBUG)
data = self.get_url(search_url)
if not data:
logger.log(u"No data returned from provider", logger.DEBUG)
continue
if not data.startswith("<?xml"):
logger.log(u"Expected xml but got something else, is your mirror failing?", logger.INFO)
continue
try:
with BS4Parser(data, 'html5lib') as parser:
for item in parser.findAll('item'):
if item.category and 'tv' not in item.category.text:
continue
title = item.title.text.rsplit(' ', 1)[0].replace(' ', '.')
t_hash = item.guid.text.rsplit('/', 1)[-1]
if not all([title, t_hash]):
continue
download_url = "magnet:?xt=urn:btih:" + t_hash + "&dn=" + title + self._custom_trackers
torrent_size, seeders, leechers = self._split_description(item.find('description').text)
size = convert_size(torrent_size) or -1
# Filter unseeded torrent
if seeders < self.minseed or leechers < self.minleech:
if mode != 'RSS':
logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {} (S:{} L:{})".format
(title, seeders, leechers), logger.DEBUG)
continue
items.append((title, download_url, size, seeders, leechers))
except StandardError:
logger.log(u"Failed parsing provider. Traceback: %r" % traceback.format_exc(), logger.ERROR)
# For each search mode sort all the items by seeders if available
items.sort(key=lambda tup: tup[3], reverse=True)
results += items
return results
示例6: search
def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals
results = []
search_params = {"p": 0}
for mode in search_strings:
items = []
logger.log("Search Mode: {0}".format(mode), logger.DEBUG)
for search_string in search_strings[mode]:
search_params["q"] = search_string
if mode != "RSS":
search_params["order"] = 0
logger.log("Search string: {0}".format
(search_string.decode("utf-8")), logger.DEBUG)
else:
search_params["order"] = 2
jdata = self.get_url(self.urls["api"], params=search_params, returns="json")
if not jdata:
logger.log("Provider did not return data", logger.DEBUG)
continue
for torrent in jdata:
try:
title = torrent.pop("name", "")
download_url = torrent.pop("magnet") + self._custom_trackers if torrent["magnet"] else None
if not all([title, download_url]):
continue
if float(torrent.pop("ff")):
logger.log("Ignoring result for {0} since it's been reported as fake (level = {1})".format
(title, torrent["ff"]), logger.DEBUG)
continue
if not int(torrent.pop("files")):
logger.log("Ignoring result for {0} because it has no files".format
(title), logger.DEBUG)
continue
# Provider doesn't provide seeders/leechers
seeders = 1
leechers = 0
torrent_size = torrent.pop("size")
size = convert_size(torrent_size) or -1
item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'hash': ''}
if mode != "RSS":
logger.log("Found result: {0} ".format(title), logger.DEBUG)
items.append(item)
except StandardError:
continue
results += items
return results
示例7: search
def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals
results = []
for mode in search_strings:
items = []
logger.log(u"Search Mode: %s" % mode, logger.DEBUG)
for search_string in search_strings[mode]:
if mode != 'RSS':
logger.log(u"Search string: %s " % search_string, logger.DEBUG)
search_url = self.url + '/recherche/' + search_string.replace('.', '-').replace(' ', '-') + '.html,trie-seeds-d'
else:
search_url = self.url + '/view_cat.php?categorie=series&trie=date-d'
logger.log(u"Search URL: %s" % search_url, logger.DEBUG)
data = self.get_url(search_url)
if not data:
continue
with BS4Parser(data, 'html5lib') as html:
torrent_rows = html.find_all(class_=re.compile('ligne[01]'))
for result in torrent_rows:
try:
title = result.find(class_="titre").get_text(strip=True).replace("HDTV", "HDTV x264-CPasBien")
title = re.sub(r' Saison', ' Season', title, flags=re.IGNORECASE)
tmp = result.find("a")['href'].split('/')[-1].replace('.html', '.torrent').strip()
download_url = (self.url + '/telechargement/%s' % tmp)
if not all([title, download_url]):
continue
seeders = try_int(result.find(class_="up").get_text(strip=True))
leechers = try_int(result.find(class_="down").get_text(strip=True))
if seeders < self.minseed or leechers < self.minleech:
if mode != 'RSS':
logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
continue
torrent_size = result.find(class_="poid").get_text(strip=True)
units = ['o', 'Ko', 'Mo', 'Go', 'To', 'Po']
size = convert_size(torrent_size, units=units) or -1
item = title, download_url, size, seeders, leechers
if mode != 'RSS':
logger.log(u"Found result: %s with %s seeders and %s leechers" % (title, seeders, leechers), logger.DEBUG)
items.append(item)
except StandardError:
continue
# For each search mode sort all the items by seeders if available
items.sort(key=lambda tup: tup[3], reverse=True)
results += items
return results
示例8: _get_size
def _get_size(self, item):
size = item.get('sizebytes', -1)
# Try to get the size from the summary tag
if size == -1:
# Units
units = ['B', 'KB', 'MB', 'GB', 'TB', 'PB']
summary = item.get('summary')
if summary:
size_match = re.search(ur'Size[^\d]*([0-9.]*.[A-Z]*)', summary)
size = convert_size(size_match.group(1), units=units) or -1 if size_match else -1
return try_int(size)
示例9: search
def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals
results = []
for mode in search_strings: # Mode = RSS, Season, Episode
items = []
logger.log(u"Search Mode: {}".format(mode), logger.DEBUG)
for search_string in search_strings[mode]:
if mode != 'RSS':
logger.log(u"Search string: " + search_string.strip(), logger.DEBUG)
search_url = self.url + "api/v2/torrents/search/?category=TV&phrase=" + search_string
jdata = self.get_url(search_url, returns='json')
if not jdata:
logger.log(u"No data returned from provider", logger.DEBUG)
return []
results = []
for item in jdata['torrents']:
seeders = ('seeds' in item and item['seeds']) or 0
leechers = ('leeches' in item and item['leeches']) or 0
title = ('torrent_title' in item and item['torrent_title']) or ''
torrent_size = ('size' in item and item['size'])
size = convert_size(torrent_size) or -1
download_url = ('magnet_uri' in item and item['magnet_uri']) or ''
if not all([title, download_url]):
continue
# Filter unseeded torrent
if seeders < self.minseed or leechers < self.minleech:
if mode != 'RSS':
logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {} (S:{} L:{})".format
(title, seeders, leechers), logger.DEBUG)
continue
if mode != 'RSS':
logger.log(u"Found result: %s with %s seeders and %s leechers" % (title, seeders, leechers), logger.DEBUG)
item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'hash': None}
items.append(item)
# For each search mode sort all the items by seeders if available
items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True)
results += items
return results
示例10: search
def search(self, search_strings, age=0, ep_obj=None):
results = []
for mode in search_strings: # Mode = RSS, Season, Episode
items = []
logger.log(u"Search Mode: %s" % mode, logger.DEBUG)
for search_string in search_strings[mode]:
if mode != 'RSS':
logger.log(u"Search string: " + search_string.strip(), logger.DEBUG)
search_url = self.url + "api/v2/torrents/search/?category=TV&phrase=" + search_string
logger.log(u"Search URL: %s" % search_url, logger.DEBUG)
jdata = self.get_url(search_url, json=True)
if not jdata:
logger.log(u"No data returned from provider", logger.DEBUG)
return []
results = []
for item in jdata['torrents']:
seeders = ('seeds' in item and item['seeds']) or 0
leechers = ('leeches' in item and item['leeches']) or 0
title = ('torrent_title' in item and item['torrent_title']) or ''
torrent_size = ('size' in item and item['size'])
size = convert_size(torrent_size) or -1
download_url = ('magnet_uri' in item and item['magnet_uri']) or ''
if not all([title, download_url]):
continue
# Filter unseeded torrent
if seeders < self.minseed or leechers < self.minleech:
if mode != 'RSS':
logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
continue
if mode != 'RSS':
logger.log(u"Found result: %s " % title, logger.DEBUG)
item = title, download_url, size, seeders, leechers
items.append(item)
# For each search mode sort all the items by seeders if available
items.sort(key=lambda tup: tup[3], reverse=True)
results += items
return results
示例11: get_filtered_items_from_torrents
def get_filtered_items_from_torrents(self, mode, torrents):
items = []
for torrent in torrents:
if mode == 'RSS' and 'category' in torrent and try_int(torrent['category'], 0) not in self.subcategories:
continue
try:
title = torrent['name']
torrent_id = torrent['id']
download_url = (self.urls['download'] % torrent_id).encode('utf8')
if not all([title, download_url]):
continue
seeders = try_int(torrent['seeders'])
leechers = try_int(torrent['leechers'])
verified = bool(torrent['isVerified'])
torrent_size = torrent['size']
# Filter unseeded torrent
if seeders < self.minseed or leechers < self.minleech:
if mode != 'RSS':
logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
continue
if self.confirmed and not verified and mode != 'RSS':
logger.log(u"Found result " + title + " but that doesn't seem like a verified result so I'm ignoring it", logger.DEBUG)
continue
size = convert_size(torrent_size) or -1
item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'hash': ''}
if mode != 'RSS':
logger.log(u"Found result: {0} with {1} seeders and {2} leechers".format(title, seeders, leechers), logger.DEBUG)
items.append(item)
except Exception:
logger.log(u"Invalid torrent data, skipping result: {0}".format(torrent), logger.DEBUG)
logger.log(u"Failed parsing provider. Traceback: {0}".format(traceback.format_exc()), logger.DEBUG)
return items
示例12: search
def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches, too-many-statements
results = []
if not self.login():
return results
# Search Params
search_params = {
'do': 'search',
'search_type': 't_name',
'category': 0,
'include_dead_torrents': 'no',
'submit': 'search'
}
# Units
units = ['B', 'KB', 'MB', 'GB', 'TB', 'PB']
for mode in search_strings:
items = []
logger.log('Search Mode: {0}'.format(mode), logger.DEBUG)
for search_string in search_strings[mode]:
if mode == 'Season':
search_string = re.sub(ur'(.*)S0?', ur'\1Series ', search_string)
if mode != 'RSS':
logger.log('Search string: {0}'.format
(search_string.decode('utf-8')), logger.DEBUG)
search_params['keywords'] = search_string
data = self.get_url(self.urls['search'], post_data=search_params, returns='text')
if not data:
logger.log('No data returned from provider', logger.DEBUG)
continue
with BS4Parser(data, 'html5lib') as html:
torrent_table = html.find(id='sortabletable')
torrent_rows = torrent_table("tr") if torrent_table else []
# Continue only if at least one Release is found
if len(torrent_rows) < 2:
logger.log("Data returned from provider does not contain any torrents", logger.DEBUG)
continue
labels = [label.img['title'] if label.img else label.get_text(strip=True) for label in torrent_rows[0]('td')]
for torrent in torrent_rows[1:]:
try:
if self.freeleech and not torrent.find('img', alt=re.compile('Free Torrent')):
continue
title = torrent.find(class_='tooltip-content').div.get_text(strip=True)
download_url = torrent.find(title='Click to Download this Torrent!').parent['href']
if not all([title, download_url]):
continue
seeders = try_int(torrent.find(title='Seeders').get_text(strip=True))
leechers = try_int(torrent.find(title='Leechers').get_text(strip=True))
# Filter unseeded torrent
if seeders < self.minseed or leechers < self.minleech:
if mode != 'RSS':
logger.log('Discarding torrent because it doesn\'t meet the'
' minimum seeders or leechers: {0} (S:{1} L:{2})'.format
(title, seeders, leechers), logger.DEBUG)
continue
# Chop off tracker/channel prefix or we cant parse the result!
if mode != 'RSS' and search_params['keywords']:
show_name_first_word = re.search(ur'^[^ .]+', search_params['keywords']).group()
if not title.startswith(show_name_first_word):
title = re.sub(ur'.*(' + show_name_first_word + '.*)', ur'\1', title)
# Change title from Series to Season, or we can't parse
if mode == 'Season':
title = re.sub(ur'(.*)(?i)Series', ur'\1Season', title)
# Strip year from the end or we can't parse it!
title = re.sub(ur'(.*)[\. ]?\(\d{4}\)', ur'\1', title)
title = re.sub(ur'\s+', ur' ', title)
torrent_size = torrent('td')[labels.index('Size')].get_text(strip=True)
size = convert_size(torrent_size, units=units) or -1
if mode != 'RSS':
logger.log('Found result: {0} with {1} seeders and {2} leechers'.format
(title, seeders, leechers), logger.DEBUG)
item = {'title': title + '.hdtv.x264', 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers}
items.append(item)
except StandardError:
continue
# For each search mode sort all the items by seeders if available
items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True)
results += items
return results
示例13: search
def search(
self, search_params, age=0, ep_obj=None
): # pylint: disable=too-many-locals, too-many-branches, too-many-statements
results = []
if not self.login():
return results
freeleech = "&free=on" if self.freeleech else ""
for mode in search_params:
items = []
logger.log(u"Search Mode: %s" % mode, logger.DEBUG)
for search_string in search_params[mode]:
if mode != "RSS":
logger.log(u"Search string: %s " % search_string, logger.DEBUG)
# URL with 50 tv-show results, or max 150 if adjusted in IPTorrents profile
search_url = self.urls["search"] % (self.categories, freeleech, search_string)
search_url += ";o=seeders" if mode != "RSS" else ""
logger.log(u"Search URL: %s" % search_url, logger.DEBUG)
data = self.get_url(search_url)
if not data:
continue
try:
data = re.sub(r"(?im)<button.+?<[\/]button>", "", data, 0)
with BS4Parser(data, "html5lib") as html:
if not html:
logger.log(u"No data returned from provider", logger.DEBUG)
continue
if html.find(text="No Torrents Found!"):
logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG)
continue
torrent_table = html.find("table", attrs={"class": "torrents"})
torrents = torrent_table.find_all("tr") if torrent_table else []
# Continue only if one Release is found
if len(torrents) < 2:
logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG)
continue
for result in torrents[1:]:
try:
title = result.find_all("td")[1].find("a").text
download_url = self.urls["base_url"] + result.find_all("td")[3].find("a")["href"]
seeders = int(result.find("td", attrs={"class": "ac t_seeders"}).text)
leechers = int(result.find("td", attrs={"class": "ac t_leechers"}).text)
torrent_size = result.find_all("td")[5].text
size = convert_size(torrent_size) or -1
except (AttributeError, TypeError, KeyError):
continue
if not all([title, download_url]):
continue
# Filter unseeded torrent
if seeders < self.minseed or leechers < self.minleech:
if mode != "RSS":
logger.log(
u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(
title, seeders, leechers
),
logger.DEBUG,
)
continue
item = title, download_url, size, seeders, leechers
if mode != "RSS":
logger.log(
u"Found result: %s with %s seeders and %s leechers" % (title, seeders, leechers),
logger.DEBUG,
)
items.append(item)
except Exception as e:
logger.log(u"Failed parsing provider. Error: %r" % ex(e), logger.ERROR)
# For each search mode sort all the items by seeders if available
items.sort(key=lambda tup: tup[3], reverse=True)
results += items
return results
示例14: search
def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-branches,too-many-locals
results = []
for mode in search_strings:
items = []
logger.log(u"Search Mode: {0}".format(mode), logger.DEBUG)
for search_string in search_strings[mode]:
if mode != 'RSS':
logger.log(u"Search string: {0}".format(search_string.decode("utf-8")),
logger.DEBUG)
try:
search_url = (self.urls['rss'], self.urls['search'] + search_string + '/s/d/1/?fmt=rss')[mode != 'RSS']
data = self.get_url(search_url, returns='text')
if not data:
logger.log(u"No data returned from provider", logger.DEBUG)
continue
if not data.startswith('<?xml'):
logger.log(u'Expected xml but got something else, is your mirror failing?', logger.INFO)
continue
data = BeautifulSoup(data, 'html5lib')
for item in data.findAll('item'):
try:
if not item.category.text.endswith(('TV', 'Anime')):
continue
title = item.title.text
assert isinstance(title, unicode)
# Use the torcache link bitsnoop provides,
# unless it is not torcache or we are not using blackhole
# because we want to use magnets if connecting direct to client
# so that proxies work.
download_url = item.enclosure['url']
if sickbeard.TORRENT_METHOD != "blackhole" or 'torcache' not in download_url:
download_url = item.find('magneturi').next.replace('CDATA', '').strip('[]') + self._custom_trackers
if not (title and download_url):
continue
seeders = try_int(item.find('numseeders').text)
leechers = try_int(item.find('numleechers').text)
torrent_size = item.find('size').text
size = convert_size(torrent_size) or -1
info_hash = item.find('infohash').text
except (AttributeError, TypeError, KeyError, ValueError):
continue
# Filter unseeded torrent
if seeders < self.minseed or leechers < self.minleech:
if mode != 'RSS':
logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format
(title, seeders, leechers), logger.DEBUG)
continue
item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'hash': info_hash}
if mode != 'RSS':
logger.log(u"Found result: {0!s} with {1!s} seeders and {2!s} leechers".format(title, seeders, leechers), logger.DEBUG)
items.append(item)
except (AttributeError, TypeError, KeyError, ValueError):
logger.log(u"Failed parsing provider. Traceback: {0!r}".format(traceback.format_exc()), logger.ERROR)
# For each search mode sort all the items by seeders if available
items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True)
results += items
return results
示例15: search
def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-branches, too-many-locals
results = []
url = "http://localhost:3000/"
if self.custom_url:
if not validators.url(self.custom_url, require_tld=False):
logger.log("Invalid custom url set, please check your settings", logger.WARNING)
return results
url = self.custom_url
search_params = {}
anime = ep_obj and ep_obj.show and ep_obj.show.anime
search_params["category"] = ("tv", "anime")[bool(anime)]
if self.api_key:
search_params["apiKey"] = self.api_key
for mode in search_strings:
items = []
logger.log("Search Mode: {}".format(mode), logger.DEBUG)
for search_string in search_strings[mode]:
search_params["q"] = search_string
if mode != "RSS":
logger.log("Search string: {}".format(search_string), logger.DEBUG)
search_url = urljoin(url, "api/search")
parsed_json = self.get_url(search_url, params=search_params, returns="json")
if not parsed_json:
logger.log("No data returned from provider", logger.DEBUG)
continue
if not self._check_auth_from_data(parsed_json):
return results
for result in parsed_json.pop("torrents", {}):
try:
title = result.pop("title", "")
info_hash = result.pop("infoHash", "")
download_url = "magnet:?xt=urn:btih:" + info_hash
if not all([title, download_url, info_hash]):
continue
swarm = result.pop("swarm", None)
if swarm:
seeders = try_int(swarm.pop("seeders", 0))
leechers = try_int(swarm.pop("leechers", 0))
else:
seeders = leechers = 0
if seeders < min(self.minseed, 1):
if mode != "RSS":
logger.log("Discarding torrent because it doesn't meet the "
"minimum seeders: {0}. Seeders: {1})".format
(title, seeders), logger.DEBUG)
continue
size = convert_size(result.pop("size", -1)) or -1
item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None}
if mode != "RSS":
logger.log("Found result: {0} with {1} seeders and {2} leechers".format
(title, seeders, leechers), logger.DEBUG)
items.append(item)
except (AttributeError, TypeError, KeyError, ValueError):
continue
# For each search mode sort all the items by seeders if available
items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True)
results += items
return results