本文整理汇总了Python中mrjob.local.LocalMRJobRunner._hadoop_conf_args方法的典型用法代码示例。如果您正苦于以下问题:Python LocalMRJobRunner._hadoop_conf_args方法的具体用法?Python LocalMRJobRunner._hadoop_conf_args怎么用?Python LocalMRJobRunner._hadoop_conf_args使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类mrjob.local.LocalMRJobRunner
的用法示例。
在下文中一共展示了LocalMRJobRunner._hadoop_conf_args方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_hadoop_output_format
# 需要导入模块: from mrjob.local import LocalMRJobRunner [as 别名]
# 或者: from mrjob.local.LocalMRJobRunner import _hadoop_conf_args [as 别名]
def test_hadoop_output_format(self):
format = "org.apache.hadoop.mapred.SequenceFileOutputFormat"
runner = LocalMRJobRunner(conf_paths=[], hadoop_output_format=format)
self.assertEqual(runner._hadoop_conf_args({}, 0, 1), ["-outputformat", format])
# test multi-step job
self.assertEqual(runner._hadoop_conf_args({}, 0, 2), [])
self.assertEqual(runner._hadoop_conf_args({}, 1, 2), ["-outputformat", format])
示例2: test_jobconf
# 需要导入模块: from mrjob.local import LocalMRJobRunner [as 别名]
# 或者: from mrjob.local.LocalMRJobRunner import _hadoop_conf_args [as 别名]
def test_jobconf(self):
jobconf = {"FOO": "bar", "BAZ": "qux", "BAX": "Arnold"}
runner = LocalMRJobRunner(conf_paths=[], jobconf=jobconf)
self.assertEqual(runner._hadoop_conf_args({}, 0, 1), ["-D", "BAX=Arnold", "-D", "BAZ=qux", "-D", "FOO=bar"])
runner = LocalMRJobRunner(conf_paths=[], jobconf=jobconf, hadoop_version="0.18")
self.assertEqual(
runner._hadoop_conf_args({}, 0, 1), ["-jobconf", "BAX=Arnold", "-jobconf", "BAZ=qux", "-jobconf", "FOO=bar"]
)
示例3: test_hadoop_input_format
# 需要导入模块: from mrjob.local import LocalMRJobRunner [as 别名]
# 或者: from mrjob.local.LocalMRJobRunner import _hadoop_conf_args [as 别名]
def test_hadoop_input_format(self):
format = 'org.apache.hadoop.mapred.SequenceFileInputFormat'
runner = LocalMRJobRunner(conf_paths=[], hadoop_input_format=format)
self.assertEqual(runner._hadoop_conf_args(0, 1),
['-inputformat', format])
# test multi-step job
self.assertEqual(runner._hadoop_conf_args(0, 2),
['-inputformat', format])
self.assertEqual(runner._hadoop_conf_args(1, 2), [])
示例4: test_hadoop_output_format
# 需要导入模块: from mrjob.local import LocalMRJobRunner [as 别名]
# 或者: from mrjob.local.LocalMRJobRunner import _hadoop_conf_args [as 别名]
def test_hadoop_output_format(self):
format = 'org.apache.hadoop.mapred.SequenceFileOutputFormat'
runner = LocalMRJobRunner(conf_path=False, hadoop_output_format=format)
assert_equal(runner._hadoop_conf_args(0, 1),
['-outputformat', format])
# test multi-step job
assert_equal(runner._hadoop_conf_args(0, 2), [])
assert_equal(runner._hadoop_conf_args(1, 2),
['-outputformat', format])
示例5: test_hadoop_output_format
# 需要导入模块: from mrjob.local import LocalMRJobRunner [as 别名]
# 或者: from mrjob.local.LocalMRJobRunner import _hadoop_conf_args [as 别名]
def test_hadoop_output_format(self):
format = 'org.apache.hadoop.mapred.SequenceFileOutputFormat'
runner = LocalMRJobRunner(conf_paths=[], hadoop_output_format=format)
self.assertEqual(runner._hadoop_conf_args({}, 0, 1),
['-D', 'mapred.job.name=None > None',
'-outputformat', format])
# test multi-step job
self.assertEqual(runner._hadoop_conf_args({}, 0, 2),
['-D', 'mapred.job.name=None > None (step 1 of 2)'])
self.assertEqual(runner._hadoop_conf_args({}, 1, 2),
['-D', 'mapred.job.name=None > None (step 2 of 2)',
'-outputformat', format
])
示例6: test_jobconf
# 需要导入模块: from mrjob.local import LocalMRJobRunner [as 别名]
# 或者: from mrjob.local.LocalMRJobRunner import _hadoop_conf_args [as 别名]
def test_jobconf(self):
jobconf = {'FOO': 'bar', 'BAZ': 'qux', 'BAX': 'Arnold'}
runner = LocalMRJobRunner(conf_paths=[], jobconf=jobconf)
self.assertEqual(runner._hadoop_conf_args(0, 1),
['-D', 'BAX=Arnold',
'-D', 'BAZ=qux',
'-D', 'FOO=bar',
])
runner = LocalMRJobRunner(conf_paths=[], jobconf=jobconf,
hadoop_version='0.18')
self.assertEqual(runner._hadoop_conf_args(0, 1),
['-jobconf', 'BAX=Arnold',
'-jobconf', 'BAZ=qux',
'-jobconf', 'FOO=bar',
])
示例7: test_empty_jobconf_values
# 需要导入模块: from mrjob.local import LocalMRJobRunner [as 别名]
# 或者: from mrjob.local.LocalMRJobRunner import _hadoop_conf_args [as 别名]
def test_empty_jobconf_values(self):
# value of None means to omit that jobconf
jobconf = {'foo': '', 'bar': None}
runner = LocalMRJobRunner(conf_paths=[], jobconf=jobconf)
self.assertEqual(runner._hadoop_conf_args({}, 0, 1),
['-D', 'foo='])
示例8: test_jobconf_job_name_custom
# 需要导入模块: from mrjob.local import LocalMRJobRunner [as 别名]
# 或者: from mrjob.local.LocalMRJobRunner import _hadoop_conf_args [as 别名]
def test_jobconf_job_name_custom(self):
jobconf = {'BAX': 'Arnold', 'mapred.job.name': 'Foo'}
runner = LocalMRJobRunner(conf_paths=[], jobconf=jobconf,
hadoop_version='0.18')
self.assertEqual(runner._hadoop_conf_args({}, 0, 1),
['-jobconf', 'BAX=Arnold',
'-jobconf', 'mapred.job.name=Foo'
])
示例9: test_partitioner
# 需要导入模块: from mrjob.local import LocalMRJobRunner [as 别名]
# 或者: from mrjob.local.LocalMRJobRunner import _hadoop_conf_args [as 别名]
def test_partitioner(self):
partitioner = 'org.apache.hadoop.mapreduce.Partitioner'
runner = LocalMRJobRunner(conf_paths=[], partitioner=partitioner)
self.assertEqual(runner._hadoop_conf_args({}, 0, 1),
['-D', 'mapred.job.name=None > None',
'-partitioner', partitioner,
])
示例10: test_cmdenv
# 需要导入模块: from mrjob.local import LocalMRJobRunner [as 别名]
# 或者: from mrjob.local.LocalMRJobRunner import _hadoop_conf_args [as 别名]
def test_cmdenv(self):
cmdenv = {'FOO': 'bar', 'BAZ': 'qux', 'BAX': 'Arnold'}
runner = LocalMRJobRunner(conf_paths=[], cmdenv=cmdenv)
self.assertEqual(runner._hadoop_conf_args(0, 1),
['-cmdenv', 'BAX=Arnold',
'-cmdenv', 'BAZ=qux',
'-cmdenv', 'FOO=bar',
])
示例11: test_jobconf_from_step
# 需要导入模块: from mrjob.local import LocalMRJobRunner [as 别名]
# 或者: from mrjob.local.LocalMRJobRunner import _hadoop_conf_args [as 别名]
def test_jobconf_from_step(self):
jobconf = {'FOO': 'bar', 'BAZ': 'qux'}
runner = LocalMRJobRunner(conf_paths=[], jobconf=jobconf)
step = {'jobconf': {'BAZ': 'quux', 'BAX': 'Arnold'}}
self.assertEqual(runner._hadoop_conf_args(step, 0, 1),
['-D', 'BAX=Arnold',
'-D', 'BAZ=quux',
'-D', 'FOO=bar',
])
示例12: test_configuration_translation
# 需要导入模块: from mrjob.local import LocalMRJobRunner [as 别名]
# 或者: from mrjob.local.LocalMRJobRunner import _hadoop_conf_args [as 别名]
def test_configuration_translation(self):
jobconf = {'mapred.jobtracker.maxtasks.per.job': 1}
with no_handlers_for_logger('mrjob.compat'):
runner = LocalMRJobRunner(conf_paths=[], jobconf=jobconf,
hadoop_version='0.21')
self.assertEqual(runner._hadoop_conf_args({}, 0, 1),
['-D', 'mapred.jobtracker.maxtasks.per.job=1',
'-D', 'mapreduce.jobtracker.maxtasks.perjob=1'
])
示例13: test_hadoop_extra_args_comes_first
# 需要导入模块: from mrjob.local import LocalMRJobRunner [as 别名]
# 或者: from mrjob.local.LocalMRJobRunner import _hadoop_conf_args [as 别名]
def test_hadoop_extra_args_comes_first(self):
runner = LocalMRJobRunner(
cmdenv={"FOO": "bar"},
conf_paths=[],
hadoop_extra_args=["-libjar", "qux.jar"],
hadoop_input_format="FooInputFormat",
hadoop_output_format="BarOutputFormat",
jobconf={"baz": "quz"},
partitioner="java.lang.Object",
)
# hadoop_extra_args should come first
conf_args = runner._hadoop_conf_args({}, 0, 1)
self.assertEqual(conf_args[:2], ["-libjar", "qux.jar"])
self.assertEqual(len(conf_args), 12)
示例14: test_hadoop_extra_args_comes_first
# 需要导入模块: from mrjob.local import LocalMRJobRunner [as 别名]
# 或者: from mrjob.local.LocalMRJobRunner import _hadoop_conf_args [as 别名]
def test_hadoop_extra_args_comes_first(self):
runner = LocalMRJobRunner(
cmdenv={'FOO': 'bar'},
conf_paths=[],
hadoop_extra_args=['-libjar', 'qux.jar'],
hadoop_input_format='FooInputFormat',
hadoop_output_format='BarOutputFormat',
jobconf={'baz': 'quz'},
partitioner='java.lang.Object',
)
# hadoop_extra_args should come first
conf_args = runner._hadoop_conf_args({}, 0, 1)
self.assertEqual(conf_args[:2], ['-libjar', 'qux.jar'])
self.assertEqual(len(conf_args), 14)
示例15: test_hadoop_extra_args
# 需要导入模块: from mrjob.local import LocalMRJobRunner [as 别名]
# 或者: from mrjob.local.LocalMRJobRunner import _hadoop_conf_args [as 别名]
def test_hadoop_extra_args(self):
extra_args = ['-foo', 'bar']
runner = LocalMRJobRunner(conf_path=False,
hadoop_extra_args=extra_args)
assert_equal(runner._hadoop_conf_args(0, 1), extra_args)