当前位置: 首页>>代码示例>>Python>>正文


Python hdfs.default_is_local函数代码示例

本文整理汇总了Python中pydoop.hdfs.default_is_local函数的典型用法代码示例。如果您正苦于以下问题:Python default_is_local函数的具体用法?Python default_is_local怎么用?Python default_is_local使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。


在下文中一共展示了default_is_local函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: run_pipes

def run_pipes(
    executable,
    input_path,
    output_path,
    more_args=None,
    properties=None,
    force_pydoop_submitter=False,
    hadoop_conf_dir=None,
    logger=None,
    keep_streams=False,
):
    """
    Run a pipes command.

    ``more_args`` (after setting input/output path) and ``properties``
    are passed to :func:`run_cmd`.

    If not specified otherwise, this function sets the properties
    ``hadoop.pipes.java.recordreader`` and ``hadoop.pipes.java.recordwriter``
    to ``"true"``.

    This function works around a bug in Hadoop pipes that affects
    versions of Hadoop with security when the local file system is
    used as the default FS (no HDFS); see
    https://issues.apache.org/jira/browse/MAPREDUCE-4000.  In those
    set-ups, the function uses Pydoop's own pipes submitter
    application.  You can force the use of Pydoop's submitter by
    passing the argument force_pydoop_submitter=True.
    """
    if logger is None:
        logger = utils.NullLogger()
    if not hdfs.path.exists(executable):
        raise IOError("executable %s not found" % executable)
    if not hdfs.path.exists(input_path) and not (set(input_path) & GLOB_CHARS):
        raise IOError("input path %s not found" % input_path)
    if properties is None:
        properties = {}
    properties.setdefault("hadoop.pipes.java.recordreader", "true")
    properties.setdefault("hadoop.pipes.java.recordwriter", "true")
    if force_pydoop_submitter:
        use_pydoop_submit = True
    else:
        use_pydoop_submit = False
        ver = pydoop.hadoop_version_info()
        if ver.has_security():
            if ver.is_cdh_mrv2() and hdfs.default_is_local():
                raise RuntimeError("mrv2 on local fs not supported yet")
            use_pydoop_submit = hdfs.default_is_local()
    args = ["-program", executable, "-input", input_path, "-output", output_path]
    if more_args is not None:
        args.extend(more_args)
    if use_pydoop_submit:
        submitter = "it.crs4.pydoop.pipes.Submitter"
        pydoop_jar = pydoop.jar_path()
        args.extend(("-libjars", pydoop_jar))
        return run_class(submitter, args, properties, classpath=pydoop_jar, logger=logger, keep_streams=keep_streams)
    else:
        return run_cmd(
            "pipes", args, properties, hadoop_conf_dir=hadoop_conf_dir, logger=logger, keep_streams=keep_streams
        )
开发者ID:tivvit,项目名称:pydoop,代码行数:60,代码来源:hadut.py

示例2: without_user

 def without_user(self):
   p = 'foo/bar'
   abs_p = hdfs.path.abspath(p, user=None, local=False)
   if hdfs.default_is_local():
     self.assertEqual(abs_p, '%s%s' % (self.root, os.path.abspath(p)))
   else:
     self.assertEqual(abs_p, '%s/user/%s/%s' % (self.root, DEFAULT_USER, p))
开发者ID:ilveroluca,项目名称:pydoop,代码行数:7,代码来源:test_path.py

示例3: stat_on_local

 def stat_on_local(self):
     wd_ = tempfile.mkdtemp(prefix='pydoop_', suffix=UNI_CHR)
     p_ = os.path.join(wd_, make_random_str())
     if hdfs.default_is_local():
         wd, p = wd_, p_
         host = "default"
     else:
         wd, p = ('file:%s' % _ for _ in (wd_, p_))
         host = ""
     fs = hdfs.hdfs(host, 0)
     with fs.open_file(p_, 'w') as fo:
         fo.write(make_random_str())
     info = fs.get_path_info(p_)
     fs.close()
     s = hdfs.path.stat(p)
     os_s = os.stat(p_)
     for n in dir(s):
         if n.startswith('st_'):
             try:
                 exp_v = getattr(os_s, n)
             except AttributeError:
                 try:
                     exp_v = info[self.NMAP[n]]
                 except KeyError:
                     continue
                 self.assertEqual(getattr(s, n), exp_v)
     self.__check_extra_args(s, info)
     self.__check_wrapper_funcs(p)
     hdfs.rmr(wd)
开发者ID:kikkomep,项目名称:pydoop,代码行数:29,代码来源:test_path.py

示例4: good

 def good(self):
     cases = [
         ('hdfs://localhost:9000/', ('localhost', 9000, '/')),
         ('hdfs://localhost:9000/a/b', ('localhost', 9000, '/a/b')),
         ('hdfs://localhost/a/b', ('localhost', DEFAULT_PORT, '/a/b')),
         ('hdfs:///a/b', ('default', 0, '/a/b')),
         ('hdfs:/', ('default', 0, '/')),
         ('file:///a/b', ('', 0, '/a/b')),
         ('file:/a/b', ('', 0, '/a/b')),
         ('file:///a', ('', 0, '/a')),
         ('file:/a', ('', 0, '/a')),
         ('file://temp/foo.txt', ('', 0, 'temp/foo.txt')),
         ('file://temp', ('', 0, 'temp')),
     ]
     if hdfs.default_is_local():
         cases.extend([
             ('///a/b', ('', 0, '/a/b')),
             ('/a/b', ('', 0, '/a/b')),
             ('a/b', ('', 0, 'a/b')),
         ])
     else:
         cases.extend([
             ('///a/b', ('default', 0, '/a/b')),
             ('/a/b', ('default', 0, '/a/b')),
             ('a/b', ('default', 0, '/user/%s/a/b' % DEFAULT_USER)),
         ])
     for p, r in cases:
         self.assertEqual(hdfs.path.split(p), r)
     for p, r in cases[1:]:
         self.assertEqual(hdfs.path.split(p+UNI_CHR), uni_last(r))
开发者ID:kikkomep,项目名称:pydoop,代码行数:30,代码来源:test_path.py

示例5: with_user

 def with_user(self):
   p = 'foo/bar'
   abs_p = hdfs.path.abspath(p, user="pydoop", local=False)
   if hdfs.default_is_local():
     self.assertEqual(abs_p, '%s%s' % (self.root, os.path.abspath(p)))
   else:
     self.assertEqual(abs_p, '%s/user/pydoop/%s' % (self.root, p))
开发者ID:ilveroluca,项目名称:pydoop,代码行数:7,代码来源:test_path.py

示例6: setUp

 def setUp(self):
   if hdfs.default_is_local():
     self.root = "file:"
   else:
     fs = hdfs.hdfs("default", 0)
     self.root = "hdfs://%s:%s" % (fs.host, fs.port)
     fs.close()
开发者ID:ilveroluca,项目名称:pydoop,代码行数:7,代码来源:test_path.py

示例7: capacity

 def capacity(self):
     fs = hdfs.hdfs("", 0)
     self.assertRaises(RuntimeError, fs.capacity)
     fs.close()
     if not hdfs.default_is_local():
         fs = hdfs.hdfs("default", 0)
         cap = fs.capacity()
         self.assertGreaterEqual(cap, 0)
开发者ID:kikkomep,项目名称:pydoop,代码行数:8,代码来源:test_hdfs.py

示例8: expanduser

 def expanduser(self):
     for pre in '~', '~%s' % DEFAULT_USER:
         for rest in '', '/d':
             p = '%s%s' % (pre, rest)
             if hdfs.default_is_local():
                 self.assertEqual(
                     hdfs.path.expanduser(p), os.path.expanduser(p)
                 )
             else:
                 exp_res = '/user/%s%s' % (DEFAULT_USER, rest)
                 self.assertEqual(hdfs.path.expanduser(p), exp_res)
开发者ID:kikkomep,项目名称:pydoop,代码行数:11,代码来源:test_path.py

示例9: bad

 def bad(self):
   cases = [
     'ftp://localhost:9000/',             # bad scheme
     'hdfs://localhost:spam/',            # port is not an int
     'hdfs://localhost:9000',             # path part is empty
     'hdfs://localhost:9000/a:b',         # colon outside netloc
     ]
   if not hdfs.default_is_local():
     cases.append('/localhost:9000/a/b')  # colon outside netloc
   for p in cases:
     self.assertRaises(ValueError, hdfs.path.split, p)
开发者ID:ilveroluca,项目名称:pydoop,代码行数:11,代码来源:test_path.py

示例10: good_with_user

 def good_with_user(self):
   if hdfs.default_is_local():
     cases = [('a/b', u, ('', 0, 'a/b')) for u in None, DEFAULT_USER, 'foo']
   else:
     cases = [
       ('a/b', None, ('default', 0, '/user/%s/a/b' % DEFAULT_USER)),
       ('a/b', DEFAULT_USER, ('default', 0, '/user/%s/a/b' % DEFAULT_USER)),
       ('a/b', 'foo', ('default', 0, '/user/foo/a/b')),
       ]
   for p, u, r in cases:
     self.assertEqual(hdfs.path.split(p, u), r)
开发者ID:ilveroluca,项目名称:pydoop,代码行数:11,代码来源:test_path.py

示例11: setUp

 def setUp(self):
     self.hp_cases = [("default", 0)]
     self.u_cases = [None, CURRENT_USER]
     if not hdfs.default_is_local():
         self.hp_cases.append((u.HDFS_HOST, u.HDFS_PORT))
         self.u_cases.append("nobody")
         try:
             hdfs_ip = socket.gethostbyname(u.HDFS_HOST)
         except socket.gaierror:
             pass
         else:
             self.hp_cases.append((hdfs_ip, u.HDFS_PORT))
开发者ID:kmatzen,项目名称:pydoop,代码行数:12,代码来源:test_hdfs_fs.py

示例12: get_hosts

 def get_hosts(self):
     if hdfs.default_is_local():
         # only run on HDFS
         return
     hdfs.dump(self.data, self.hdfs_paths[0])
     fs = hdfs.hdfs("default", 0)
     hs = fs.get_hosts(self.hdfs_paths[0], 0, 10)
     self.assertTrue(len(hs) > 0)
     self.assertRaises(
         ValueError, fs.get_hosts, self.hdfs_paths[0], -10, 10
     )
     self.assertRaises(ValueError, fs.get_hosts, self.hdfs_paths[0], 0, -10)
开发者ID:kikkomep,项目名称:pydoop,代码行数:12,代码来源:test_hdfs.py

示例13: bad

 def bad(self):
     cases = [
         '',                                  # not allowed in the Java API
         'hdfs:',                             # no scheme-specific part
         'hdfs://',                           # path part is empty
         'ftp://localhost:9000/',             # bad scheme
         'hdfs://localhost:spam/',            # port is not an int
         'hdfs://localhost:9000',             # path part is empty
         'hdfs://localhost:9000/a:b',         # colon outside netloc
         '//localhost:9000/a/b',              # null scheme
     ]
     if not hdfs.default_is_local():
         cases.append('/localhost:9000/a/b')  # colon outside netloc
     for p in cases:
         self.assertRaises(ValueError, hdfs.path.split, p)
开发者ID:kikkomep,项目名称:pydoop,代码行数:15,代码来源:test_path.py

示例14: setUp

 def setUp(self):
   wd = tempfile.mkdtemp()
   wd_bn = os.path.basename(wd)
   self.local_wd = "file:%s" % wd
   fs = hdfs.hdfs("default", 0)
   fs.create_directory(wd_bn)
   self.hdfs_wd = fs.get_path_info(wd_bn)["name"]
   fs.close()
   basenames = ["test_path_%d" % i for i in xrange(2)]
   self.local_paths = ["%s/%s" % (self.local_wd, bn) for bn in basenames]
   self.hdfs_paths = ["%s/%s" % (self.hdfs_wd, bn) for bn in basenames]
   self.data = make_random_data(4*BUFSIZE + BUFSIZE/2)
   for path in self.local_paths:
     self.assertTrue(path.startswith("file:"))
   for path in self.hdfs_paths:
     if not hdfs.default_is_local():
       self.assertTrue(path.startswith("hdfs:"))
开发者ID:ZEMUSHKA,项目名称:pydoop,代码行数:17,代码来源:test_hdfs.py

示例15: stat

 def stat(self):
     if hdfs.default_is_local():
         return
     bn = '%s%s' % (make_random_str(), UNI_CHR)
     fn = '/user/%s/%s' % (DEFAULT_USER, bn)
     fs = hdfs.hdfs("default", 0)
     p = "hdfs://%s:%s%s" % (fs.host, fs.port, fn)
     with fs.open_file(fn, 'w') as fo:
         fo.write(make_random_str())
     info = fs.get_path_info(fn)
     fs.close()
     s = hdfs.path.stat(p)
     for n1, n2 in self.NMAP.iteritems():
         attr = getattr(s, n1, None)
         self.assertFalse(attr is None)
         self.assertEqual(attr, info[n2])
     self.__check_extra_args(s, info)
     self.__check_wrapper_funcs(p)
     hdfs.rmr(p)
开发者ID:kikkomep,项目名称:pydoop,代码行数:19,代码来源:test_path.py


注:本文中的pydoop.hdfs.default_is_local函数示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。