当前位置: 首页>>代码示例>>Python>>正文


Python Settings.get方法代码示例

本文整理汇总了Python中scrapy.settings.Settings.get方法的典型用法代码示例。如果您正苦于以下问题:Python Settings.get方法的具体用法?Python Settings.get怎么用?Python Settings.get使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在scrapy.settings.Settings的用法示例。


在下文中一共展示了Settings.get方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: __init__

# 需要导入模块: from scrapy.settings import Settings [as 别名]
# 或者: from scrapy.settings.Settings import get [as 别名]
    def __init__(self, store_uri, download_func=None, settings=None):
        super(ImagesPipeline, self).__init__(store_uri, settings=settings,
                                             download_func=download_func)

        if isinstance(settings, dict) or settings is None:
            settings = Settings(settings)

        resolve = functools.partial(self._key_for_pipe,
                                    base_class_name="ImagesPipeline")
        self.expires = settings.getint(
            resolve("IMAGES_EXPIRES"), self.EXPIRES
        )

        if not hasattr(self, "IMAGES_RESULT_FIELD"):
            self.IMAGES_RESULT_FIELD = self.DEFAULT_IMAGES_RESULT_FIELD
        if not hasattr(self, "IMAGES_URLS_FIELD"):
            self.IMAGES_URLS_FIELD = self.DEFAULT_IMAGES_URLS_FIELD

        self.images_urls_field = settings.get(
            resolve('IMAGES_URLS_FIELD'),
            self.IMAGES_URLS_FIELD
        )
        self.images_result_field = settings.get(
            resolve('IMAGES_RESULT_FIELD'),
            self.IMAGES_RESULT_FIELD
        )
        self.min_width = settings.getint(
            resolve('IMAGES_MIN_WIDTH'), self.MIN_WIDTH
        )
        self.min_height = settings.getint(
            resolve('IMAGES_MIN_HEIGHT'), self.MIN_HEIGHT
        )
        self.thumbs = settings.get(
            resolve('IMAGES_THUMBS'), self.THUMBS
        )
开发者ID:18115359037,项目名称:scrapy,代码行数:37,代码来源:images.py

示例2: FilesPipelineTestCaseCustomSettings

# 需要导入模块: from scrapy.settings import Settings [as 别名]
# 或者: from scrapy.settings.Settings import get [as 别名]
class FilesPipelineTestCaseCustomSettings(unittest.TestCase):

    def setUp(self):
        self.tempdir = mkdtemp()
        self.pipeline = FilesPipeline(self.tempdir)
        self.default_settings = Settings()

    def tearDown(self):
        rmtree(self.tempdir)

    def test_expires(self):
        another_pipeline = FilesPipeline.from_settings(Settings({'FILES_STORE': self.tempdir,
                                                                'FILES_EXPIRES': 42}))
        self.assertEqual(self.pipeline.expires, self.default_settings.getint('FILES_EXPIRES'))
        self.assertEqual(another_pipeline.expires, 42)

    def test_files_urls_field(self):
        another_pipeline = FilesPipeline.from_settings(Settings({'FILES_STORE': self.tempdir,
                                                                'FILES_URLS_FIELD': 'funny_field'}))
        self.assertEqual(self.pipeline.files_urls_field, self.default_settings.get('FILES_URLS_FIELD'))
        self.assertEqual(another_pipeline.files_urls_field, 'funny_field')

    def test_files_result_field(self):
        another_pipeline = FilesPipeline.from_settings(Settings({'FILES_STORE': self.tempdir,
                                                                'FILES_RESULT_FIELD': 'funny_field'}))
        self.assertEqual(self.pipeline.files_result_field, self.default_settings.get('FILES_RESULT_FIELD'))
        self.assertEqual(another_pipeline.files_result_field, 'funny_field')
开发者ID:01-,项目名称:scrapy,代码行数:29,代码来源:test_pipeline_files.py

示例3: __init__

# 需要导入模块: from scrapy.settings import Settings [as 别名]
# 或者: from scrapy.settings.Settings import get [as 别名]
    def __init__(self, store_uri, download_func=None, settings=None):
        if not store_uri:
            raise NotConfigured
        
        if isinstance(settings, dict) or settings is None:
            settings = Settings(settings)

        cls_name = "FilesPipeline"
        self.store = self._get_store(store_uri)
        resolve = functools.partial(self._key_for_pipe,
                                    base_class_name=cls_name,
                                    settings=settings)
        self.expires = settings.getint(
            resolve('FILES_EXPIRES'), self.EXPIRES
        )
        if not hasattr(self, "FILES_URLS_FIELD"):
            self.FILES_URLS_FIELD = self.DEFAULT_FILES_URLS_FIELD
        if not hasattr(self, "FILES_RESULT_FIELD"):
            self.FILES_RESULT_FIELD = self.DEFAULT_FILES_RESULT_FIELD
        self.files_urls_field = settings.get(
            resolve('FILES_URLS_FIELD'), self.FILES_URLS_FIELD
        )
        self.files_result_field = settings.get(
            resolve('FILES_RESULT_FIELD'), self.FILES_RESULT_FIELD
        )

        super(FilesPipeline, self).__init__(download_func=download_func)
开发者ID:CPoirot3,项目名称:scrapy,代码行数:29,代码来源:files.py

示例4: setup_spider_logging

# 需要导入模块: from scrapy.settings import Settings [as 别名]
# 或者: from scrapy.settings.Settings import get [as 别名]
def setup_spider_logging(spider, settings):
    """Initialize and configure default loggers

    Copied from Scrapy and updated, because version from Scrapy:

     1) doesn't close handlers and observers
     2) opens logobserver for twisted logging each time it's called -
        you can find N log observers logging the same message N
        after N crawls.

    so there's no way to reuse it.

    :return: method that should be called to cleanup handler.

    """
    if isinstance(settings, dict):
        settings = Settings(settings)

    # Looging stdout is a bad idea when mutiple crawls are running
    # if settings.getbool('LOG_STDOUT'):
    #     sys.stdout = StreamLogger(logging.getLogger('stdout'))
    filename = settings.get('LOG_FILE')
    if filename:
        encoding = settings.get('LOG_ENCODING')
        handler = logging.FileHandler(filename, encoding=encoding)
    elif settings.getbool('LOG_ENABLED'):
        handler = logging.StreamHandler()
    else:
        handler = logging.NullHandler()
    formatter = logging.Formatter(
        fmt=settings.get('LOG_FORMAT'),
        datefmt=settings.get('LOG_DATEFORMAT')
    )
    handler.setFormatter(formatter)
    handler.setLevel(settings.get('LOG_LEVEL'))
    filters = [
        TopLevelFormatter(['scrapy']),
        SpiderFilter(spider),
    ]
    for _filter in filters:
        handler.addFilter(_filter)
    logging.root.addHandler(handler)

    _cleanup_functions = [
        lambda: [handler.removeFilter(f) for f in filters],
        lambda: logging.root.removeHandler(handler),
        handler.close,
    ]

    def cleanup():
        for func in _cleanup_functions:
            try:
                func()
            except Exception as e:
                err(e)

    return cleanup
开发者ID:SmileyJames,项目名称:scrapyrt,代码行数:59,代码来源:log.py

示例5: test_update_settings

# 需要导入模块: from scrapy.settings import Settings [as 别名]
# 或者: from scrapy.settings.Settings import get [as 别名]
    def test_update_settings(self):
        spider_settings = {'TEST1': 'spider', 'TEST2': 'spider'}
        project_settings = {'TEST1': 'project', 'TEST3': 'project'}
        self.spider_class.custom_settings = spider_settings
        settings = Settings(project_settings, priority='project')

        self.spider_class.update_settings(settings)
        self.assertEqual(settings.get('TEST1'), 'spider')
        self.assertEqual(settings.get('TEST2'), 'spider')
        self.assertEqual(settings.get('TEST3'), 'project')
开发者ID:nyov,项目名称:scrapy,代码行数:12,代码来源:test_spider.py

示例6: __init__

# 需要导入模块: from scrapy.settings import Settings [as 别名]
# 或者: from scrapy.settings.Settings import get [as 别名]
    def __init__(self, store_uri, download_func=None, settings=None):
        super(ImagesPipeline, self).__init__(store_uri, settings=settings, download_func=download_func)
        
        if isinstance(settings, dict) or settings is None:
            settings = Settings(settings)

        self.expires = settings.getint('IMAGES_EXPIRES')
        self.images_urls_field = settings.get('IMAGES_URLS_FIELD')
        self.images_result_field = settings.get('IMAGES_RESULT_FIELD')
        self.min_width = settings.getint('IMAGES_MIN_WIDTH')
        self.min_height = settings.getint('IMAGES_MIN_HEIGHT')
        self.thumbs = settings.get('IMAGES_THUMBS')
开发者ID:01-,项目名称:scrapy,代码行数:14,代码来源:images.py

示例7: test_get

# 需要导入模块: from scrapy.settings import Settings [as 别名]
# 或者: from scrapy.settings.Settings import get [as 别名]
 def test_get(self):
     settings = Settings({
         'TEST_ENABLED1': '1',
         'TEST_ENABLED2': True,
         'TEST_ENABLED3': 1,
         'TEST_DISABLED1': '0',
         'TEST_DISABLED2': False,
         'TEST_DISABLED3': 0,
         'TEST_INT1': 123,
         'TEST_INT2': '123',
         'TEST_FLOAT1': 123.45,
         'TEST_FLOAT2': '123.45',
         'TEST_LIST1': ['one', 'two'],
         'TEST_LIST2': 'one,two',
         'TEST_STR': 'value',
         'TEST_DICT1': {'key1': 'val1', 'ke2': 3},
         'TEST_DICT2': '{"key1": "val1", "ke2": 3}',
     })
     assert settings.getbool('TEST_ENABLED1') is True
     assert settings.getbool('TEST_ENABLED2') is True
     assert settings.getbool('TEST_ENABLED3') is True
     assert settings.getbool('TEST_ENABLEDx') is False
     assert settings.getbool('TEST_ENABLEDx', True) is True
     assert settings.getbool('TEST_DISABLED1') is False
     assert settings.getbool('TEST_DISABLED2') is False
     assert settings.getbool('TEST_DISABLED3') is False
     self.assertEqual(settings.getint('TEST_INT1'), 123)
     self.assertEqual(settings.getint('TEST_INT2'), 123)
     self.assertEqual(settings.getint('TEST_INTx'), 0)
     self.assertEqual(settings.getint('TEST_INTx', 45), 45)
     self.assertEqual(settings.getfloat('TEST_FLOAT1'), 123.45)
     self.assertEqual(settings.getfloat('TEST_FLOAT2'), 123.45)
     self.assertEqual(settings.getfloat('TEST_FLOATx'), 0.0)
     self.assertEqual(settings.getfloat('TEST_FLOATx', 55.0), 55.0)
     self.assertEqual(settings.getlist('TEST_LIST1'), ['one', 'two'])
     self.assertEqual(settings.getlist('TEST_LIST2'), ['one', 'two'])
     self.assertEqual(settings.getlist('TEST_LISTx'), [])
     self.assertEqual(
         settings.getlist('TEST_LISTx', ['default']), ['default'])
     self.assertEqual(settings['TEST_STR'], 'value')
     self.assertEqual(settings.get('TEST_STR'), 'value')
     self.assertEqual(settings['TEST_STRx'], None)
     self.assertEqual(settings.get('TEST_STRx'), None)
     self.assertEqual(settings.get('TEST_STRx', 'default'), 'default')
     self.assertEqual(
         settings.getdict('TEST_DICT1'), {'key1': 'val1', 'ke2': 3})
     self.assertEqual(
         settings.getdict('TEST_DICT2'), {'key1': 'val1', 'ke2': 3})
     self.assertEqual(settings.getdict('TEST_DICT3'), {})
     self.assertEqual(
         settings.getdict('TEST_DICT3', {'key1': 5}), {'key1': 5})
     self.assertRaises(ValueError, settings.getdict, 'TEST_LIST1')
开发者ID:pyarnold,项目名称:scrapy,代码行数:54,代码来源:test_settings.py

示例8: __init__

# 需要导入模块: from scrapy.settings import Settings [as 别名]
# 或者: from scrapy.settings.Settings import get [as 别名]
    def __init__(self, store_uri, download_func=None, settings=None):
        if not store_uri:
            raise NotConfigured
        
        if isinstance(settings, dict) or settings is None:
            settings = Settings(settings)
        
        self.store = self._get_store(store_uri)
        self.expires = settings.getint('FILES_EXPIRES')
        self.files_urls_field = settings.get('FILES_URLS_FIELD')
        self.files_result_field = settings.get('FILES_RESULT_FIELD')

        super(FilesPipeline, self).__init__(download_func=download_func)
开发者ID:maksimbormot,项目名称:scrapy,代码行数:15,代码来源:files.py

示例9: test_autopromote_dicts

# 需要导入模块: from scrapy.settings import Settings [as 别名]
# 或者: from scrapy.settings.Settings import get [as 别名]
 def test_autopromote_dicts(self):
     settings = Settings()
     mydict = settings.get('TEST_DICT')
     self.assertIsInstance(mydict, BaseSettings)
     self.assertIn('key', mydict)
     self.assertEqual(mydict['key'], 'val')
     self.assertEqual(mydict.getpriority('key'), 0)
开发者ID:q1ang,项目名称:scrapy,代码行数:9,代码来源:__init__.py

示例10: __init__

# 需要导入模块: from scrapy.settings import Settings [as 别名]
# 或者: from scrapy.settings.Settings import get [as 别名]
    def __init__(self, spidercls, settings=None):
        if isinstance(settings, dict) or settings is None:
            settings = Settings(settings)

        self.spidercls = spidercls
        self.settings = settings.copy()
        self.spidercls.update_settings(self.settings)

        self.signals = SignalManager(self)
        self.stats = load_object(self.settings['STATS_CLASS'])(self)

        handler = LogCounterHandler(self, level=settings.get('LOG_LEVEL'))
        logging.root.addHandler(handler)
        # lambda is assigned to Crawler attribute because this way it is not
        # garbage collected after leaving __init__ scope
        self.__remove_handler = lambda: logging.root.removeHandler(handler)
        self.signals.connect(self.__remove_handler, signals.engine_stopped)

        lf_cls = load_object(self.settings['LOG_FORMATTER'])
        self.logformatter = lf_cls.from_crawler(self)
        self.extensions = ExtensionManager.from_crawler(self)

        self.settings.freeze()
        self.crawling = False
        self.spider = None
        self.engine = None
开发者ID:cdingding,项目名称:scrapy,代码行数:28,代码来源:crawler.py

示例11: __init__

# 需要导入模块: from scrapy.settings import Settings [as 别名]
# 或者: from scrapy.settings.Settings import get [as 别名]
    def __init__(self, spidercls, settings):
        if isinstance(settings, dict):
            settings = Settings(settings)

        self.spidercls = spidercls
        self.settings = settings.copy()

        self.signals = SignalManager(self)
        self.stats = load_object(self.settings['STATS_CLASS'])(self)

        handler = LogCounterHandler(self, level=settings.get('LOG_LEVEL'))
        logging.root.addHandler(handler)
        self.signals.connect(lambda: logging.root.removeHandler(handler),
                             signals.engine_stopped)

        lf_cls = load_object(self.settings['LOG_FORMATTER'])
        self.logformatter = lf_cls.from_crawler(self)
        self.extensions = ExtensionManager.from_crawler(self)

        self.spidercls.update_settings(self.settings)
        self.settings.freeze()

        self.crawling = False
        self.spider = None
        self.engine = None
开发者ID:arden,项目名称:scrapy,代码行数:27,代码来源:crawler.py

示例12: ImagesPipelineTestCaseCustomSettings

# 需要导入模块: from scrapy.settings import Settings [as 别名]
# 或者: from scrapy.settings.Settings import get [as 别名]
class ImagesPipelineTestCaseCustomSettings(unittest.TestCase):

    def setUp(self):
        self.tempdir = mkdtemp()
        self.pipeline = ImagesPipeline(self.tempdir)
        self.default_settings = Settings()

    def tearDown(self):
        rmtree(self.tempdir)

    def test_expires(self):
        another_pipeline = ImagesPipeline.from_settings(Settings({'IMAGES_STORE': self.tempdir,
                                                                'IMAGES_EXPIRES': 42}))
        self.assertEqual(self.pipeline.expires, self.default_settings.getint('IMAGES_EXPIRES'))
        self.assertEqual(another_pipeline.expires, 42)

    def test_images_urls_field(self):
        another_pipeline = ImagesPipeline.from_settings(Settings({'IMAGES_STORE': self.tempdir,
                                                                'IMAGES_URLS_FIELD': 'funny_field'}))
        self.assertEqual(self.pipeline.images_urls_field, self.default_settings.get('IMAGES_URLS_FIELD'))
        self.assertEqual(another_pipeline.images_urls_field, 'funny_field')

    def test_images_result_field(self):
        another_pipeline = ImagesPipeline.from_settings(Settings({'IMAGES_STORE': self.tempdir,
                                                                'IMAGES_RESULT_FIELD': 'funny_field'}))
        self.assertEqual(self.pipeline.images_result_field, self.default_settings.get('IMAGES_RESULT_FIELD'))
        self.assertEqual(another_pipeline.images_result_field, 'funny_field')

    def test_min_width(self):
        another_pipeline = ImagesPipeline.from_settings(Settings({'IMAGES_STORE': self.tempdir,
                                                                'IMAGES_MIN_WIDTH': 42}))
        self.assertEqual(self.pipeline.min_width, self.default_settings.getint('IMAGES_MIN_WIDTH'))
        self.assertEqual(another_pipeline.min_width, 42)

    def test_min_height(self):
        another_pipeline = ImagesPipeline.from_settings(Settings({'IMAGES_STORE': self.tempdir,
                                                                'IMAGES_MIN_HEIGHT': 42}))
        self.assertEqual(self.pipeline.min_height, self.default_settings.getint('IMAGES_MIN_HEIGHT'))
        self.assertEqual(another_pipeline.min_height, 42)

    def test_thumbs(self):
        custom_thumbs = {'small': (50, 50), 'big': (270, 270)}
        another_pipeline = ImagesPipeline.from_settings(Settings({'IMAGES_STORE': self.tempdir,
                                                                'IMAGES_THUMBS': custom_thumbs}))
        self.assertEqual(self.pipeline.thumbs, self.default_settings.get('IMAGES_THUMBS'))
        self.assertEqual(another_pipeline.thumbs, custom_thumbs)
开发者ID:01-,项目名称:scrapy,代码行数:48,代码来源:test_pipeline_images.py

示例13: configure_logging

# 需要导入模块: from scrapy.settings import Settings [as 别名]
# 或者: from scrapy.settings.Settings import get [as 别名]
def configure_logging(settings=None):
    """Initialize and configure default loggers

    This function does:
      - Route warnings and twisted logging through Python standard logging
      - Set FailureFormatter filter on Scrapy logger
      - Assign DEBUG and ERROR level to Scrapy and Twisted loggers respectively
      - Create a handler for the root logger according to given settings
    """
    if not sys.warnoptions:
        # Route warnings through python logging
        logging.captureWarnings(True)

    observer = twisted_log.PythonLoggingObserver('twisted')
    observer.start()

    dictConfig(DEFAULT_LOGGING)

    if isinstance(settings, dict):
        settings = Settings(settings)

    if settings:
        logging.root.setLevel(logging.NOTSET)

        if settings.getbool('LOG_STDOUT'):
            sys.stdout = StreamLogger(logging.getLogger('stdout'))

        # Set up the default log handler
        filename = settings.get('LOG_FILE')
        if filename:
            encoding = settings.get('LOG_ENCODING')
            handler = logging.FileHandler(filename, encoding=encoding)
        elif settings.getbool('LOG_ENABLED'):
            handler = logging.StreamHandler()
        else:
            handler = logging.NullHandler()

        formatter = logging.Formatter(
            fmt=settings.get('LOG_FORMAT'),
            datefmt=settings.get('LOG_DATEFORMAT')
        )
        handler.setFormatter(formatter)
        handler.setLevel(settings.get('LOG_LEVEL'))
        handler.addFilter(TopLevelFormatter(['scrapy']))
        logging.root.addHandler(handler)
开发者ID:247DigitalGroup,项目名称:scrapy,代码行数:47,代码来源:log.py

示例14: test_get

# 需要导入模块: from scrapy.settings import Settings [as 别名]
# 或者: from scrapy.settings.Settings import get [as 别名]
 def test_get(self):
     settings = Settings({
         'TEST_ENABLED1': '1',
         'TEST_ENABLED2': True,
         'TEST_ENABLED3': 1,
         'TEST_DISABLED1': '0',
         'TEST_DISABLED2': False,
         'TEST_DISABLED3': 0,
         'TEST_INT1': 123,
         'TEST_INT2': '123',
         'TEST_FLOAT1': 123.45,
         'TEST_FLOAT2': '123.45',
         'TEST_LIST1': ['one', 'two'],
         'TEST_LIST2': 'one,two',
         'TEST_STR': 'value',
     })
     assert settings.getbool('TEST_ENABLED1') is True
     assert settings.getbool('TEST_ENABLED2') is True
     assert settings.getbool('TEST_ENABLED3') is True
     assert settings.getbool('TEST_ENABLEDx') is False
     assert settings.getbool('TEST_ENABLEDx', True) is True
     assert settings.getbool('TEST_DISABLED1') is False
     assert settings.getbool('TEST_DISABLED2') is False
     assert settings.getbool('TEST_DISABLED3') is False
     self.assertEqual(settings.getint('TEST_INT1'), 123)
     self.assertEqual(settings.getint('TEST_INT2'), 123)
     self.assertEqual(settings.getint('TEST_INTx'), 0)
     self.assertEqual(settings.getint('TEST_INTx', 45), 45)
     self.assertEqual(settings.getfloat('TEST_FLOAT1'), 123.45)
     self.assertEqual(settings.getfloat('TEST_FLOAT2'), 123.45)
     self.assertEqual(settings.getfloat('TEST_FLOATx'), 0.0)
     self.assertEqual(settings.getfloat('TEST_FLOATx', 55.0), 55.0)
     self.assertEqual(settings.getlist('TEST_LIST1'), ['one', 'two'])
     self.assertEqual(settings.getlist('TEST_LIST2'), ['one', 'two'])
     self.assertEqual(settings.getlist('TEST_LISTx'), [])
     self.assertEqual(settings.getlist('TEST_LISTx', ['default']), ['default'])
     self.assertEqual(settings['TEST_STR'], 'value')
     self.assertEqual(settings.get('TEST_STR'), 'value')
     self.assertEqual(settings['TEST_STRx'], None)
     self.assertEqual(settings.get('TEST_STRx'), None)
     self.assertEqual(settings.get('TEST_STRx', 'default'), 'default')
开发者ID:073palmer,项目名称:scrapy,代码行数:43,代码来源:test_settings.py

示例15: qwebkit_settings

# 需要导入模块: from scrapy.settings import Settings [as 别名]
# 或者: from scrapy.settings.Settings import get [as 别名]
def qwebkit_settings(settings=None):
    if settings is None:
        settings = Settings()
    elif settings.getbool("__WT__"):
        return settings
    else:
        settings = settings.copy()
        settings.frozen = False
    for name in dir(defs):
        if name.startswith("WT_") and settings.get(name) is None:
            settings.set(name, getattr(defs, name))
    settings.set("__WT__", True)
    return settings
开发者ID:StrongZhu,项目名称:scrapy-webtools,代码行数:15,代码来源:utils.py


注:本文中的scrapy.settings.Settings.get方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。