当前位置: 首页>>代码示例>>Python>>正文


Python hdfs.rmr函数代码示例

本文整理汇总了Python中pydoop.hdfs.rmr函数的典型用法代码示例。如果您正苦于以下问题:Python rmr函数的具体用法?Python rmr怎么用?Python rmr使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。


在下文中一共展示了rmr函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: stat_on_local

 def stat_on_local(self):
     wd_ = tempfile.mkdtemp(prefix='pydoop_', suffix=UNI_CHR)
     p_ = os.path.join(wd_, make_random_str())
     if hdfs.default_is_local():
         wd, p = wd_, p_
         host = "default"
     else:
         wd, p = ('file:%s' % _ for _ in (wd_, p_))
         host = ""
     fs = hdfs.hdfs(host, 0)
     with fs.open_file(p_, 'w') as fo:
         fo.write(make_random_str())
     info = fs.get_path_info(p_)
     fs.close()
     s = hdfs.path.stat(p)
     os_s = os.stat(p_)
     for n in dir(s):
         if n.startswith('st_'):
             try:
                 exp_v = getattr(os_s, n)
             except AttributeError:
                 try:
                     exp_v = info[self.NMAP[n]]
                 except KeyError:
                     continue
                 self.assertEqual(getattr(s, n), exp_v)
     self.__check_extra_args(s, info)
     self.__check_wrapper_funcs(p)
     hdfs.rmr(wd)
开发者ID:kikkomep,项目名称:pydoop,代码行数:29,代码来源:test_path.py

示例2: samefile_link

 def samefile_link(self):
     wd_ = tempfile.mkdtemp(prefix='pydoop_', suffix=UNI_CHR)
     wd = 'file:%s' % wd_
     link = os.path.join(wd_, make_random_str())
     os.symlink(wd_, link)
     self.assertTrue(hdfs.path.samefile('file:%s' % link, 'file:%s' % wd_))
     hdfs.rmr(wd)
开发者ID:kikkomep,项目名称:pydoop,代码行数:7,代码来源:test_path.py

示例3: good

 def good(self):
     base_path = make_random_str()
     for path in base_path, base_path + UNI_CHR:
         hdfs.dump("foo\n", path)
         self.assertTrue(hdfs.path.exists(path))
         hdfs.rmr(path)
         self.assertFalse(hdfs.path.exists(path))
开发者ID:kikkomep,项目名称:pydoop,代码行数:7,代码来源:test_path.py

示例4: _clean_up

 def _clean_up(*paths):
     for p in paths:
         try:
             log.debug("Removing path: %s", p)
             phdfs.rmr(p)
         except StandardError as e:
             log.warning("Error deleting path %s", p)
             log.exception(e)
开发者ID:crs4,项目名称:hadoop-galaxy,代码行数:8,代码来源:dist_cat_paths.py

示例5: realpath

 def realpath(self):
     wd_ = tempfile.mkdtemp(prefix='pydoop_', suffix=UNI_CHR)
     wd = 'file:%s' % wd_
     link = os.path.join(wd_, make_random_str())
     os.symlink(wd_, link)
     expected_path = 'file:%s' % os.path.realpath(wd_)
     self.assertEqual(hdfs.path.realpath('file:%s' % link), expected_path)
     hdfs.rmr(wd)
开发者ID:crs4,项目名称:pydoop,代码行数:8,代码来源:test_path.py

示例6: cleanup

def cleanup(out_pathset):
  # clean-up job output
  for path in out_pathset:
    try:
      print >> sys.stderr, "Deleting output path", path
      phdfs.rmr(path)
    except StandardError as e:
      print >> sys.stderr, "Error!", str(e)
开发者ID:crs4,项目名称:seal-galaxy,代码行数:8,代码来源:recab_table_galaxy.py

示例7: _try_remove_hdfs_dir

def _try_remove_hdfs_dir(path):
    try:
        phdfs.rmr(path)
        return True
    except StandardError as e:
        logger.error("Error while trying to remove directory %s", path)
        logger.exception(e)
    return False
开发者ID:ilveroluca,项目名称:flink-pipeline,代码行数:8,代码来源:workflow.py

示例8: __clean_wd

 def __clean_wd(self):
   if self.remote_wd:
     try:
       self.logger.debug(
         "Removing temporary working directory %s", self.remote_wd
         )
       hdfs.rmr(self.remote_wd)
     except IOError:
       pass
开发者ID:ilveroluca,项目名称:pydoop,代码行数:9,代码来源:script.py

示例9: realpath

 def realpath(self):
     wd_ = tempfile.mkdtemp(prefix='pydoop_', suffix=UNI_CHR)
     wd = 'file:%s' % wd_
     link = os.path.join(wd_, make_random_str())
     os.symlink(wd_, link)
     expected_path = ('file:%s%s' % ("/private", wd_)
                      if sys.platform == "darwin"
                      else 'file:%s' % wd_)
     self.assertEqual(hdfs.path.realpath('file:%s' % link), expected_path)
     hdfs.rmr(wd)
开发者ID:kikkomep,项目名称:pydoop,代码行数:10,代码来源:test_path.py

示例10: delete_files

def delete_files(remote_basedir, retention):
    inodes = walk_remotely(remote_basedir)
    now = time.time()
    deleted_files = []
    for inode in inodes:
        if now - inode['last_mod'] > retention and inode['kind'] == 'file':
            LOGGER.debug("Deleting file {0}".format(inode['path']))
            hdfs.rmr(inode['path'])
            deleted_files.append(inode['path'])
    return deleted_files
开发者ID:hopshadoop,项目名称:hops-hadoop-chef,代码行数:10,代码来源:hadoop_logs_mgm.py

示例11: runTest

 def runTest(self):
     path = make_random_str() + UNI_CHR
     hdfs.dump("foo\n", path)
     st = hdfs.path.stat(path)
     atime, mtime = [getattr(st, 'st_%stime' % _) for _ in 'am']
     new_atime, new_mtime = atime + 100, mtime + 200
     hdfs.path.utime(path, (new_atime, new_mtime))
     st = hdfs.path.stat(path)
     self.assertEqual(st.st_atime, new_atime)
     self.assertEqual(st.st_mtime, new_mtime)
     hdfs.rmr(path)
开发者ID:kikkomep,项目名称:pydoop,代码行数:11,代码来源:test_path.py

示例12: _tear_down_flink_session

def _tear_down_flink_session(app_id):
    if not app_id:
        raise ValueError("_tear_down_flink_session: empty app id!")

    cmd = [ 'yarn', 'application', '-kill', app_id ]
    logger.info("Killing flink session with app id '%s'", app_id)
    logger.debug("Command: %s", cmd)
    subprocess.check_call(cmd)
    # clean up temporary yarn session files, if any
    path = ".flink/" + app_id
    if phdfs.path.exists(path):
        logger.info("Also removing the session's temporary files in %s", path)
        phdfs.rmr(path)
开发者ID:ilveroluca,项目名称:flink-pipeline,代码行数:13,代码来源:workflow.py

示例13: test_isdir

 def test_isdir(self):
     for path in self.path, self.u_path:
         self.assertFalse(hdfs.path.isdir(path))
         try:
             hdfs.dump("foo\n", path)
             self.assertFalse(hdfs.path.isdir(path))
             hdfs.rmr(path)
             hdfs.mkdir(path)
             self.assertTrue(hdfs.path.isdir(path))
         finally:
             try:
                 hdfs.rmr(path)
             except IOError:
                 pass
开发者ID:kikkomep,项目名称:pydoop,代码行数:14,代码来源:test_path.py

示例14: test_kind

 def test_kind(self):
     for path in self.path, self.u_path:
         self.assertTrue(hdfs.path.kind(path) is None)
         try:
             hdfs.dump("foo\n", path)
             self.assertEqual('file', hdfs.path.kind(path))
             hdfs.rmr(path)
             hdfs.mkdir(path)
             self.assertEqual('directory', hdfs.path.kind(path))
         finally:
             try:
                 hdfs.rmr(path)
             except IOError:
                 pass
开发者ID:kikkomep,项目名称:pydoop,代码行数:14,代码来源:test_path.py

示例15: test_isdir

 def test_isdir(self):
   path = utils.make_random_str()
   self.assertFalse(hdfs.path.isdir(path))
   try:
     hdfs.dump("foo\n", path)
     self.assertFalse(hdfs.path.isdir(path))
     hdfs.rmr(path)
     hdfs.mkdir(path)
     self.assertTrue(hdfs.path.isdir(path))
   finally:
     try:
       hdfs.rmr(path)
     except IOError:
       pass
开发者ID:ilveroluca,项目名称:pydoop,代码行数:14,代码来源:test_path.py


注:本文中的pydoop.hdfs.rmr函数示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。