当前位置: 首页>>代码示例>>Python>>正文


Python TVCache.searchCache方法代码示例

本文整理汇总了Python中sickbeard.tvcache.TVCache.searchCache方法的典型用法代码示例。如果您正苦于以下问题:Python TVCache.searchCache方法的具体用法?Python TVCache.searchCache怎么用?Python TVCache.searchCache使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在sickbeard.tvcache.TVCache的用法示例。


在下文中一共展示了TVCache.searchCache方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: GenericProvider

# 需要导入模块: from sickbeard.tvcache import TVCache [as 别名]
# 或者: from sickbeard.tvcache.TVCache import searchCache [as 别名]
class GenericProvider(object):  # pylint: disable=too-many-instance-attributes
    NZB = 'nzb'
    TORRENT = 'torrent'

    def __init__(self, name):
        self.name = name

        self.anime_only = False
        self.bt_cache_urls = [
            #'http://torcache.net/torrent/{torrent_hash}.torrent',
            'http://torrentproject.se/torrent/{torrent_hash}.torrent',
            'http://thetorrent.org/torrent/{torrent_hash}.torrent',
            'http://btdig.com/torrent/{torrent_hash}.torrent',
            # 'http://torrage.com/torrent/{torrent_hash}.torrent',
            'http://itorrents.org/torrent/{torrent_hash}.torrent',
        ]
        self.cache = TVCache(self)
        self.enable_backlog = False
        self.enable_daily = False
        self.enabled = False
        self.headers = {'User-Agent': UA_POOL.random}
        self.proper_strings = ['PROPER|REPACK|REAL']
        self.provider_type = None
        self.public = False
        self.search_fallback = False
        self.search_mode = None
        self.session = make_session()
        self.show = None
        self.supports_absolute_numbering = False
        self.supports_backlog = True
        self.url = ''
        self.urls = {}

        # Use and configure the attribute enable_cookies to show or hide the cookies input field per provider
        self.enable_cookies = False
        self.cookies = ''
        self.rss_cookies = ''

        shuffle(self.bt_cache_urls)

    def download_result(self, result):
        if not self.login():
            return False

        urls, filename = self._make_url(result)

        for url in urls:
            if 'NO_DOWNLOAD_NAME' in url:
                continue

            if url.startswith('http'):
                self.headers.update({
                    'Referer': '/'.join(url.split('/')[:3]) + '/'
                })

            logger.log(u'Downloading a result from {0} at {1}'.format(self.name, url))

            if url.endswith(GenericProvider.TORRENT) and filename.endswith(GenericProvider.NZB):
                filename = replace_extension(filename, GenericProvider.TORRENT)

            if download_file(url, filename, session=self.session, headers=self.headers, hooks={'response': self.get_url_hook}):
                if self._verify_download(filename):
                    logger.log(u'Saved result to {0}'.format(filename), logger.INFO)
                    return True

                logger.log(u'Could not download {0}'.format(url), logger.WARNING)
                remove_file_failed(filename)

        if urls:
            logger.log(u'Failed to download any results', logger.WARNING)

        return False

    def find_propers(self, search_date=None):
        results = self.cache.listPropers(search_date)

        return [Proper(x['name'], x['url'], datetime.fromtimestamp(x['time']), self.show) for x in results]

    def find_search_results(self, show, episodes, search_mode,  # pylint: disable=too-many-branches,too-many-arguments,too-many-locals,too-many-statements
                            manual_search=False, download_current_quality=False):
        self._check_auth()
        self.show = show

        results = {}
        items_list = []
        searched_scene_season = None

        for episode in episodes:
            cache_result = self.cache.searchCache(episode, manualSearch=manual_search,
                                                  downCurQuality=download_current_quality)
            if cache_result:
                if episode.episode not in results:
                    results[episode.episode] = cache_result
                else:
                    results[episode.episode].extend(cache_result)

                continue

            if len(episodes) > 1 and search_mode == 'sponly' and searched_scene_season == episode.scene_season:
                continue
#.........这里部分代码省略.........
开发者ID:NickMolloy,项目名称:SickRage,代码行数:103,代码来源:GenericProvider.py

示例2: GenericProvider

# 需要导入模块: from sickbeard.tvcache import TVCache [as 别名]
# 或者: from sickbeard.tvcache.TVCache import searchCache [as 别名]
class GenericProvider(object):  # pylint: disable=too-many-instance-attributes
    NZB = 'nzb'
    TORRENT = 'torrent'

    def __init__(self, name):
        self.name = name

        self.anime_only = False
        self.bt_cache_urls = [
            'http://torcache.net/torrent/{torrent_hash}.torrent',
            'http://thetorrent.org/torrent/{torrent_hash}.torrent',
            'http://itorrents.org/torrent/{torrent_hash}.torrent',
            # 'http://btdig.com/torrent/{torrent_hash}.torrent',
            # 'http://torrage.com/torrent/{torrent_hash}.torrent',
        ]
        self.cache = TVCache(self)
        self.enable_backlog = False
        self.enable_manualsearch = False
        self.enable_daily = False
        self.enabled = False
        self.headers = {'User-Agent': UA_POOL.random}
        self.proper_strings = ['PROPER|REPACK|REAL']
        self.provider_type = None
        self.public = False
        self.search_fallback = False
        self.search_mode = None
        self.session = make_session()
        self.show = None
        self.supports_absolute_numbering = False
        self.supports_backlog = True
        self.url = ''
        self.urls = {}

        # Use and configure the attribute enable_cookies to show or hide the cookies input field per provider
        self.enable_cookies = False
        self.cookies = ''

        # Paramaters for reducting the daily search results parsing
        self.max_recent_items = 5
        self.stop_at = 3

        shuffle(self.bt_cache_urls)

    def download_result(self, result):
        if not self.login():
            return False

        urls, filename = self._make_url(result)

        for url in urls:
            if 'NO_DOWNLOAD_NAME' in url:
                continue

            if url.startswith('http'):
                self.headers.update({
                    'Referer': '/'.join(url.split('/')[:3]) + '/'
                })

            logger.log('Downloading a result from %s at %s' % (self.name, url))

            if url.endswith(GenericProvider.TORRENT) and filename.endswith(GenericProvider.NZB):
                filename = replace_extension(filename, GenericProvider.TORRENT)

            if download_file(url, filename, session=self.session, headers=self.headers, hooks={'response': self.get_url_hook}):
                if self._verify_download(filename):
                    logger.log('Saved result to %s' % filename, logger.INFO)
                    return True

                logger.log('Could not download %s' % url, logger.WARNING)
                remove_file_failed(filename)

        if urls:
            logger.log('Failed to download any results', logger.WARNING)

        return False

    def find_propers(self, search_date=None):
        results = self.cache.listPropers(search_date)

        return [Proper(x[b'name'], x[b'url'], datetime.fromtimestamp(x[b'time']), self.show, x[b'seeders'],
                       x[b'leechers'], x[b'size'], x[b'pubdate'], x[b'hash']) for x in results]

    def find_search_results(self, show, episodes, search_mode, forced_search=False,
                            download_current_quality=False, manual_search=False,
                            manual_search_type='episode'):  # pylint: disable=too-many-branches,too-many-arguments,too-many-locals,too-many-statements
        self._check_auth()
        self.show = show

        results = {}
        items_list = []
        searched_scene_season = None

        for episode in episodes:
            if not manual_search:
                cache_result = self.cache.searchCache(episode, forced_search=forced_search,
                                                      downCurQuality=download_current_quality)
                if cache_result:
                    if episode.episode not in results:
                        results[episode.episode] = cache_result
                    else:
#.........这里部分代码省略.........
开发者ID:Eiber,项目名称:SickRage-Medusa,代码行数:103,代码来源:GenericProvider.py

示例3: GenericProvider

# 需要导入模块: from sickbeard.tvcache import TVCache [as 别名]
# 或者: from sickbeard.tvcache.TVCache import searchCache [as 别名]
class GenericProvider(object):  # pylint: disable=too-many-instance-attributes
    NZB = 'nzb'
    TORRENT = 'torrent'

    def __init__(self, name):
        self.name = name

        self.anime_only = False
        self.bt_cache_urls = [
            'http://torcache.net/torrent/{torrent_hash}.torrent',
            'http://thetorrent.org/torrent/{torrent_hash}.torrent',
            'http://btdig.com/torrent/{torrent_hash}.torrent',
            # 'http://torrage.com/torrent/{torrent_hash}.torrent',
            # 'http://itorrents.org/torrent/{torrent_hash}.torrent',
        ]
        self.cache = TVCache(self)
        self.enable_backlog = False
        self.enable_daily = False
        self.enabled = False
        self.headers = {'User-Agent': UA_POOL.random}
        self.proper_strings = ['PROPER|REPACK|REAL']
        self.provider_type = None
        self.public = False
        self.search_fallback = False
        self.search_mode = None
        self.session = Session()
        self.show = None
        self.supports_absolute_numbering = False
        self.supports_backlog = True
        self.url = ''
        self.urls = {}

        shuffle(self.bt_cache_urls)

    def download_result(self, result):
        if not self.login():
            return False

        urls, filename = self._make_url(result)

        for url in urls:
            if 'NO_DOWNLOAD_NAME' in url:
                continue

            if url.startswith('http'):
                self.headers.update({
                    'Referer': '/'.join(url.split('/')[:3]) + '/'
                })

            logger.log(u'Downloading a result from %s at %s' % (self.name, url))

            if url.endswith(GenericProvider.TORRENT) and filename.endswith(GenericProvider.NZB):
                filename = replace_extension(filename, GenericProvider.TORRENT)

            if download_file(url, filename, session=self.session, headers=self.headers):
                if self._verify_download(filename):
                    logger.log(u'Saved result to %s' % filename, logger.INFO)
                    return True

                logger.log(u'Could not download %s' % url, logger.WARNING)
                remove_file_failed(filename)

        if len(urls):
            logger.log(u'Failed to download any results', logger.WARNING)

        return False

    def find_propers(self, search_date=None):
        results = self.cache.listPropers(search_date)

        return [Proper(x['name'], x['url'], datetime.fromtimestamp(x['time']), self.show) for x in results]

    def find_search_results(self, show, episodes, search_mode, manual_search=False, download_current_quality=False):  # pylint: disable=too-many-branches,too-many-arguments,too-many-locals,too-many-statements
        self._check_auth()
        self.show = show

        results = {}
        items_list = []
        searched_scene_season = None

        for episode in episodes:
            cache_result = self.cache.searchCache(episode, manualSearch=manual_search,
                                                  downCurQuality=download_current_quality)

            if cache_result:
                if episode.episode not in results:
                    results[episode.episode] = cache_result
                else:
                    results[episode.episode].extend(cache_result)

                continue

            if len(episodes) > 1 and search_mode == 'sponly' and searched_scene_season == episode.scene_season:
                continue

            search_strings = []
            searched_scene_season = episode.scene_season

            if len(episodes) > 1 and search_mode == 'sponly':
                search_strings = self._get_season_search_strings(episode)
#.........这里部分代码省略.........
开发者ID:madtrix74,项目名称:SickRage,代码行数:103,代码来源:GenericProvider.py

示例4: GenericProvider

# 需要导入模块: from sickbeard.tvcache import TVCache [as 别名]
# 或者: from sickbeard.tvcache.TVCache import searchCache [as 别名]
class GenericProvider(object):  # pylint: disable=too-many-instance-attributes
    NZB = "nzb"
    TORRENT = "torrent"

    def __init__(self, name):
        self.name = name

        self.anime_only = False
        self.bt_cache_urls = [
            #'http://torcache.net/torrent/{torrent_hash}.torrent',
            "http://torrentproject.se/torrent/{torrent_hash}.torrent",
            "http://thetorrent.org/torrent/{torrent_hash}.torrent",
            "http://btdig.com/torrent/{torrent_hash}.torrent",
            # 'http://torrage.com/torrent/{torrent_hash}.torrent',
            # 'http://itorrents.org/torrent/{torrent_hash}.torrent',
        ]
        self.cache = TVCache(self)
        self.enable_backlog = False
        self.enable_daily = False
        self.enabled = False
        self.headers = {"User-Agent": UA_POOL.random}
        self.proper_strings = ["PROPER|REPACK|REAL"]
        self.provider_type = None
        self.public = False
        self.search_fallback = False
        self.search_mode = None
        self.session = make_session()
        self.show = None
        self.supports_absolute_numbering = False
        self.supports_backlog = True
        self.url = ""
        self.urls = {}

        # Use and configure the attribute enable_cookies to show or hide the cookies input field per provider
        self.enable_cookies = False
        self.cookies = ""
        self.rss_cookies = ""

        shuffle(self.bt_cache_urls)

    def download_result(self, result):
        if not self.login():
            return False

        urls, filename = self._make_url(result)

        for url in urls:
            if "NO_DOWNLOAD_NAME" in url:
                continue

            if url.startswith("http"):
                self.headers.update({"Referer": "/".join(url.split("/")[:3]) + "/"})

            logger.log(u"Downloading a result from {0} at {1}".format(self.name, url))

            if url.endswith(GenericProvider.TORRENT) and filename.endswith(GenericProvider.NZB):
                filename = replace_extension(filename, GenericProvider.TORRENT)

            if download_file(
                url, filename, session=self.session, headers=self.headers, hooks={"response": self.get_url_hook}
            ):
                if self._verify_download(filename):
                    logger.log(u"Saved result to {0}".format(filename), logger.INFO)
                    return True

                logger.log(u"Could not download {0}".format(url), logger.WARNING)
                remove_file_failed(filename)

        if urls:
            logger.log(u"Failed to download any results", logger.WARNING)

        return False

    def find_propers(self, search_date=None):
        results = self.cache.listPropers(search_date)

        return [Proper(x["name"], x["url"], datetime.fromtimestamp(x["time"]), self.show) for x in results]

    def find_search_results(
        self,
        show,
        episodes,
        search_mode,  # pylint: disable=too-many-branches,too-many-arguments,too-many-locals,too-many-statements
        manual_search=False,
        download_current_quality=False,
    ):
        self._check_auth()
        self.show = show

        results = {}
        items_list = []
        searched_scene_season = None

        for episode in episodes:
            cache_result = self.cache.searchCache(
                episode, manualSearch=manual_search, downCurQuality=download_current_quality
            )
            if cache_result:
                if episode.episode not in results:
                    results[episode.episode] = cache_result
#.........这里部分代码省略.........
开发者ID:Rickol91,项目名称:SickRage,代码行数:103,代码来源:GenericProvider.py


注:本文中的sickbeard.tvcache.TVCache.searchCache方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。