本文整理汇总了Python中mrjob.fs.hadoop.HadoopFilesystem.get_hadoop_bin方法的典型用法代码示例。如果您正苦于以下问题:Python HadoopFilesystem.get_hadoop_bin方法的具体用法?Python HadoopFilesystem.get_hadoop_bin怎么用?Python HadoopFilesystem.get_hadoop_bin使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类mrjob.fs.hadoop.HadoopFilesystem
的用法示例。
在下文中一共展示了HadoopFilesystem.get_hadoop_bin方法的1个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: FindHadoopBinTestCase
# 需要导入模块: from mrjob.fs.hadoop import HadoopFilesystem [as 别名]
# 或者: from mrjob.fs.hadoop.HadoopFilesystem import get_hadoop_bin [as 别名]
class FindHadoopBinTestCase(SandboxedTestCase):
def setUp(self):
super(FindHadoopBinTestCase, self).setUp()
# track calls to which()
self.which = self.start(patch('mrjob.fs.hadoop.which', wraps=which))
# keep which() from searching in /bin, etc.
os.environ['PATH'] = self.tmp_dir
# create basic HadoopFilesystem (okay to overwrite)
self.fs = HadoopFilesystem()
def _add_hadoop_bin_for_envvar(self, envvar, *dirnames):
"""Add a fake "Hadoop" binary to its own subdirectory of
``self.tmp_dir``, and set *os.environ[envvar]* to point at it. You can
use *dirnames* to put the binary in a subdirectory of
*os.environ[envvar]* (e.g. ``'bin'``).
return the path to the fake Hadoop binary.
"""
os.environ[envvar] = join(self.tmp_dir, envvar.lower())
hadoop_bin_path = join(join(os.environ[envvar], *dirnames), 'hadoop')
self.makefile(hadoop_bin_path, executable=True)
return hadoop_bin_path
# tests without environment variables
def test_do_nothing_on_init(self):
self.assertFalse(self.which.called)
def test_fallback(self):
self.assertFalse(self.which.called)
self.assertEqual(self.fs.get_hadoop_bin(), ['hadoop'])
self.which.assert_called_once_with('hadoop', path=None)
def test_predefined_hadoop_bin(self):
self.fs = HadoopFilesystem(hadoop_bin=['hadoop', '-v'])
self.assertEqual(self.fs.get_hadoop_bin(), ['hadoop', '-v'])
self.assertFalse(self.which.called)
# environment variable tests
def _test_environment_variable(self, envvar, *dirnames):
"""Check if we can find the hadoop binary from *envvar*"""
# okay to add after HadoopFilesystem() created; it hasn't looked yet
hadoop_bin = self._add_hadoop_bin_for_envvar(envvar, *dirnames)
self.assertEqual(self.fs.get_hadoop_bin(), [hadoop_bin])
def test_hadoop_prefix(self):
self._test_environment_variable('HADOOP_PREFIX', 'bin')
def test_hadoop_home_envvar(self):
self._test_environment_variable('HADOOP_HOME', 'bin')
def test_hadoop_install(self):
self._test_environment_variable('HADOOP_INSTALL', 'bin')
def test_hadoop_install_hadoop_subdir(self):
self._test_environment_variable('HADOOP_INSTALL', 'hadoop', 'bin')
def test_path(self):
self._test_environment_variable('PATH')
def test_two_part_path(self):
hadoop_path1 = join(self.tmp_dir, 'path1')
hadoop_path1_bin = self.makefile(join(hadoop_path1, 'hadoop'),
executable=True)
hadoop_path2 = join(self.tmp_dir, 'path2')
hadoop_path2_bin = self.makefile(join(hadoop_path2, 'hadoop'),
executable=True)
os.environ['PATH'] = ':'.join([hadoop_path1, hadoop_path2])
self.assertEqual(self.fs.get_hadoop_bin(), [hadoop_path1_bin])
self.assertNotEqual(self.fs.get_hadoop_bin(), [hadoop_path2_bin])
def test_hadoop_mapred_home(self):
self._test_environment_variable('HADOOP_MAPRED_HOME', 'bin')
def test_hadoop_anything_home(self):
self._test_environment_variable('HADOOP_ANYTHING_HOME', 'bin')
def test_other_environment_variable(self):
self._add_hadoop_bin_for_envvar('HADOOP_YARN_MRJOB_DIR', 'bin')
self.assertEqual(self.fs.get_hadoop_bin(), ['hadoop'])
# precedence tests
def test_hadoop_prefix_beats_hadoop_home_envvar(self):
#.........这里部分代码省略.........