当前位置: 首页>>代码示例>>Python>>正文


Python ScrapyCommand.add_options方法代码示例

本文整理汇总了Python中scrapy.command.ScrapyCommand.add_options方法的典型用法代码示例。如果您正苦于以下问题:Python ScrapyCommand.add_options方法的具体用法?Python ScrapyCommand.add_options怎么用?Python ScrapyCommand.add_options使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在scrapy.command.ScrapyCommand的用法示例。


在下文中一共展示了ScrapyCommand.add_options方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: add_options

# 需要导入模块: from scrapy.command import ScrapyCommand [as 别名]
# 或者: from scrapy.command.ScrapyCommand import add_options [as 别名]
 def add_options(self, parser):
     ScrapyCommand.add_options(self, parser)
     parser.add_option("-p", "--project", help="the project name in the target")
     parser.add_option("-v", "--version", help="the version to deploy. Defaults to current timestamp")
     parser.add_option("-l", "--list-targets", action="store_true", help="list available targets")
     parser.add_option("-L", "--list-projects", metavar="TARGET", help="list available projects on TARGET")
     parser.add_option("--egg", metavar="FILE", help="use the given egg, instead of building it")
开发者ID:robyoung,项目名称:scrapy,代码行数:9,代码来源:deploy.py

示例2: add_options

# 需要导入模块: from scrapy.command import ScrapyCommand [as 别名]
# 或者: from scrapy.command.ScrapyCommand import add_options [as 别名]
 def add_options(self, parser):
     ScrapyCommand.add_options(self, parser)
     parser.add_option("--spider", dest="spider", default=None, help="use this spider without looking for one")
     parser.add_option(
         "--nolinks", dest="nolinks", action="store_true", help="don't show links to follow (extracted requests)"
     )
     parser.add_option("--noitems", dest="noitems", action="store_true", help="don't show scraped items")
     parser.add_option(
         "--nocolour", dest="nocolour", action="store_true", help="avoid using pygments to colorize the output"
     )
     parser.add_option(
         "-r", "--rules", dest="rules", action="store_true", help="use CrawlSpider rules to discover the callback"
     )
     parser.add_option(
         "-c", "--callback", dest="callback", help="use this callback for parsing, instead looking for a callback"
     )
     parser.add_option(
         "-d",
         "--depth",
         dest="depth",
         type="int",
         default=1,
         help="maximum depth for parsing requests [default: %default]",
     )
     parser.add_option(
         "-v", "--verbose", dest="verbose", action="store_true", help="print each depth level one by one"
     )
开发者ID:philippwinkler,项目名称:scrapy,代码行数:29,代码来源:parse.py

示例3: add_options

# 需要导入模块: from scrapy.command import ScrapyCommand [as 别名]
# 或者: from scrapy.command.ScrapyCommand import add_options [as 别名]
 def add_options(self, parser):
     ScrapyCommand.add_options(self, parser)
     parser.add_option("-a", dest="spargs", action="append", default=[], metavar="NAME=VALUE",
                       help="set spider argument (may be repeated)")
     parser.add_option("-o", "--output", metavar="FILE",
                       help="dump scraped items into FILE (use - for stdout)")
     parser.add_option("-t", "--output-format", metavar="FORMAT", default="jsonlines",
                       help="format to use for dumping items with -o (default: %default)")
开发者ID:pyarnold,项目名称:scrapy,代码行数:10,代码来源:runspider.py

示例4: add_options

# 需要导入模块: from scrapy.command import ScrapyCommand [as 别名]
# 或者: from scrapy.command.ScrapyCommand import add_options [as 别名]
 def add_options(self, parser):
     ScrapyCommand.add_options(self, parser)
     parser.add_option("--list", dest="list", action="store_true")
     parser.add_option("--dump", dest="dump", action="store_true")
     parser.add_option("-t", "--template", dest="template", default="crawl",
         help="Uses a custom template.")
     parser.add_option("--force", dest="force", action="store_true",
         help="If the spider already exists, overwrite it with the template")
开发者ID:serkanh,项目名称:scrapy,代码行数:10,代码来源:genspider.py

示例5: add_options

# 需要导入模块: from scrapy.command import ScrapyCommand [as 别名]
# 或者: from scrapy.command.ScrapyCommand import add_options [as 别名]
 def add_options(self, parser):
     ScrapyCommand.add_options(self, parser)
     parser.add_option("--spider", dest="spider", default=None, \
         help="always use this spider when arguments are urls")
     parser.add_option("-a", dest="spargs", action="append", default=[], metavar="NAME=VALUE", \
         help="set spider argument (may be repeated)")
     parser.add_option("-n", "--nofollow", dest="nofollow", action="store_true", \
         help="don't follow links (for use with URLs only)")
开发者ID:bihicheng,项目名称:scrapy,代码行数:10,代码来源:crawl.py

示例6: add_options

# 需要导入模块: from scrapy.command import ScrapyCommand [as 别名]
# 或者: from scrapy.command.ScrapyCommand import add_options [as 别名]
 def add_options(self, parser):
     ScrapyCommand.add_options(self, parser)
     parser.add_option(
         "--verbose",
         "-v",
         dest="verbose",
         action="store_true",
         help="also display twisted/python/platform info (useful for bug reports)",
     )
开发者ID:ethanfine,项目名称:oh-mainline,代码行数:11,代码来源:version.py

示例7: add_options

# 需要导入模块: from scrapy.command import ScrapyCommand [as 别名]
# 或者: from scrapy.command.ScrapyCommand import add_options [as 别名]
 def add_options(self, parser):
     ScrapyCommand.add_options(self, parser)
     parser.add_option("-d", "--date", help="Print data for given date (in format yyyy-mm-dd, by default is last stored data)")
     parser.add_option("-c", "--compare", metavar="DATE", help="Compare with data on given date (in format yyyy-mm-dd)")
     parser.add_option("-D", "--day", action="store_true", help="Compare with previous day")
     parser.add_option("-W", "--week", action="store_true", help="Compare with previous week")
     parser.add_option("-M", "--month", action="store_true", help="Compare with previous month")
     parser.add_option("-4", "--four_month", action="store_true", help="Compare with previous 4 month")
     parser.add_option("-Y", "--year", action="store_true", help="Compare with previous year")
     parser.add_option("--show_year", action="store_true", help="Show year of comparing date in report")
开发者ID:TotallyBullshit,项目名称:finance-2,代码行数:12,代码来源:report.py

示例8: add_options

# 需要导入模块: from scrapy.command import ScrapyCommand [as 别名]
# 或者: from scrapy.command.ScrapyCommand import add_options [as 别名]
 def add_options(self, parser):
     ScrapyCommand.add_options(self, parser)
     parser.add_option("-l", "--list", dest="list", action="store_true",
         help="List available templates")
     parser.add_option("-d", "--dump", dest="dump", metavar="TEMPLATE",
         help="Dump template to standard output")
     parser.add_option("-t", "--template", dest="template", default="crawl",
         help="Uses a custom template.")
     parser.add_option("--force", dest="force", action="store_true",
         help="If the spider already exists, overwrite it with the template")
开发者ID:bihicheng,项目名称:scrapy,代码行数:12,代码来源:genspider.py

示例9: add_options

# 需要导入模块: from scrapy.command import ScrapyCommand [as 别名]
# 或者: from scrapy.command.ScrapyCommand import add_options [as 别名]
 def add_options(self, parser):
     ScrapyCommand.add_options(self, parser)
     parser.add_option("--server", dest="server", action="store_true", help="Run scraper server that polls for tasks and runs them")
     parser.add_option("--demonize", dest="demonize", action="store_true", help="Run scrapy as a demon process continiously processing the spider's tasks.")
     parser.add_option("--all", dest="all", action="store_true", help="Run all available tasks for the spider.")
     parser.add_option('--interval', dest='interval', help="Polling interval for executing the spider in demon mode.")
     parser.add_option('--priority', dest='priority', help="Priority of task that you want to execute." )
     parser.add_option('--task-name', dest='task_name', help="Name of task that you would like to execute.", default=None )
     parser.add_option('--task-id', dest='task_id', help="Id of task that you would like to execute.", default=None )
     parser.add_option('--child-logfile', dest='child_logfile', help="Pass this parameter if you want to log output of child processes.", default=None)
     parser.add_option("--child", dest="child", action="store_true", help="Make this process a child.")
开发者ID:1060460048,项目名称:djangoscraper,代码行数:13,代码来源:run.py

示例10: add_options

# 需要导入模块: from scrapy.command import ScrapyCommand [as 别名]
# 或者: from scrapy.command.ScrapyCommand import add_options [as 别名]
 def add_options(self, parser):
     ScrapyCommand.add_options(self, parser)
     parser.add_option("--get", dest="get", metavar="SETTING", \
         help="print raw setting value")
     parser.add_option("--getbool", dest="getbool", metavar="SETTING", \
         help="print setting value, intepreted as a boolean")
     parser.add_option("--getint", dest="getint", metavar="SETTING", \
         help="print setting value, intepreted as an integer")
     parser.add_option("--getfloat", dest="getfloat", metavar="SETTING", \
         help="print setting value, intepreted as an float")
     parser.add_option("--getlist", dest="getlist", metavar="SETTING", \
         help="print setting value, intepreted as an float")
开发者ID:1012,项目名称:scrapy,代码行数:14,代码来源:settings.py

示例11: add_options

# 需要导入模块: from scrapy.command import ScrapyCommand [as 别名]
# 或者: from scrapy.command.ScrapyCommand import add_options [as 别名]
 def add_options(self, parser):
     ScrapyCommand.add_options(self, parser)
     parser.add_option("-s", action='store_true', dest='skip_start_urls', default=False,
                       help="Skip start URLs and work on queue")
     parser.add_option("-w", action='store_true', dest='dont_exit', default=False,
                       help="Don't exit when idle, wait around for more work")
     parser.add_option("-a", dest="spargs", action="append", default=[], metavar="NAME=VALUE", \
         help="set spider argument (may be repeated)")
     parser.add_option("-o", "--output", metavar="FILE", \
         help="dump scraped items into FILE (use - for stdout)")
     parser.add_option("-t", "--output-format", metavar="FORMAT", default="jsonlines", \
         help="format to use for dumping items with -o (default: %default)")
开发者ID:chaimsolomon,项目名称:scrapy,代码行数:14,代码来源:crawl.py

示例12: add_options

# 需要导入模块: from scrapy.command import ScrapyCommand [as 别名]
# 或者: from scrapy.command.ScrapyCommand import add_options [as 别名]
 def add_options(self, parser):
     ScrapyCommand.add_options(self, parser)
     parser.add_option("--get", dest="get", metavar="SETTING", \
         help="print raw setting value")
     parser.add_option("--getbool", dest="getbool", metavar="SETTING", \
         help="print setting value, intepreted as a boolean")
     parser.add_option("--getint", dest="getint", metavar="SETTING", \
         help="print setting value, intepreted as an integer")
     parser.add_option("--getfloat", dest="getfloat", metavar="SETTING", \
         help="print setting value, intepreted as an float")
     parser.add_option("--getlist", dest="getlist", metavar="SETTING", \
         help="print setting value, intepreted as an float")
     parser.add_option("--init", dest="init", action="store_true", \
         help="print initial setting value (before loading extensions and spiders)")
开发者ID:kenzouyeh,项目名称:scrapy,代码行数:16,代码来源:settings.py

示例13: add_options

# 需要导入模块: from scrapy.command import ScrapyCommand [as 别名]
# 或者: from scrapy.command.ScrapyCommand import add_options [as 别名]
 def add_options(self, parser):
     ScrapyCommand.add_options(self, parser)
     parser.add_option("-u", "--unreviewed", dest="unreviewed", action="store_true", default=False,
                       help="train and test with unreviewed files")
     parser.add_option("-r", "--reviewed", dest="reviewed", action="store_true", default=True,
                       help="train and test with reviewed files")
                     
     parser.add_option("--report", action="store_true", dest="print_report", default=False,
                       help="Print a detailed classification report.")
     parser.add_option("--confusion_matrix", action="store_true", dest="print_cm", default=False,
                   help="Print the confusion matrix.")
     #Not supported:
     parser.add_option("-t", "--top-n-features", dest="topn", type="int", default=0,
                       help="number of top features to reveal")
开发者ID:dangra,项目名称:scrapy-sci,代码行数:16,代码来源:accuracy.py

示例14: add_options

# 需要导入模块: from scrapy.command import ScrapyCommand [as 别名]
# 或者: from scrapy.command.ScrapyCommand import add_options [as 别名]
 def add_options(self, parser):
     ScrapyCommand.add_options(self, parser)
     parser.add_option("--spider", dest="spider", default=None, \
         help="use this spider without looking for one")
     parser.add_option("--nolinks", dest="nolinks", action="store_true", \
         help="don't show links to follow (extracted requests)")
     parser.add_option("--noitems", dest="noitems", action="store_true", \
         help="don't show scraped items")
     parser.add_option("--nocolour", dest="nocolour", action="store_true", \
         help="avoid using pygments to colorize the output")
     parser.add_option("-r", "--rules", dest="rules", action="store_true", \
         help="use CrawlSpider rules to discover the callback")
     parser.add_option("-c", "--callback", dest="callback", \
         help="use this callback for parsing, instead looking for a callback")
开发者ID:Aaron1011,项目名称:oh-mainline,代码行数:16,代码来源:parse.py

示例15: execute

# 需要导入模块: from scrapy.command import ScrapyCommand [as 别名]
# 或者: from scrapy.command.ScrapyCommand import add_options [as 别名]
def execute(argv=None):
    if argv is None:
        argv = sys.argv

    cmds = _get_commands_dict()

    cmdname = _get_command_name(argv)
    _update_default_settings("scrapy.conf.commands", cmdname)
    _update_default_settings(settings["COMMANDS_SETTINGS_MODULE"], cmdname)

    parser = optparse.OptionParser(
        formatter=optparse.TitledHelpFormatter(), conflict_handler="resolve", add_help_option=False
    )

    if cmdname in cmds:
        cmd = cmds[cmdname]
        cmd.add_options(parser)
        opts, args = parser.parse_args(args=argv[1:])
        cmd.process_options(args, opts)
        parser.usage = "%%prog %s %s" % (cmdname, cmd.syntax())
        parser.description = cmd.long_desc()
        if cmd.requires_project and not settings.settings_module:
            print "Error running: scrapy-ctl.py %s\n" % cmdname
            print "Cannot find project settings module in python path: %s" % settings.settings_module_path
            sys.exit(1)
        if opts.help:
            parser.print_help()
            sys.exit()
    elif not cmdname:
        cmd = ScrapyCommand()
        cmd.add_options(parser)
        opts, args = parser.parse_args(args=argv)
        cmd.process_options(args, opts)
        _print_usage(settings.settings_module)
        sys.exit(2)
    else:
        print "Unknown command: %s\n" % cmdname
        print 'Use "scrapy-ctl.py -h" for help'
        sys.exit(2)

    del args[0]  # remove command name from args
    send_catch_log(signal=command_executed, cmdname=cmdname, cmdobj=cmd, args=args, opts=opts)
    from scrapy.core.manager import scrapymanager

    scrapymanager.configure(control_reactor=True)
    ret = _run_command(cmd, args, opts)
    if ret is False:
        parser.print_help()
开发者ID:kenzouyeh,项目名称:scrapy,代码行数:50,代码来源:cmdline.py


注:本文中的scrapy.command.ScrapyCommand.add_options方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。