当前位置: 首页>>代码示例>>Java>>正文


Java LightWeightHashSet.add方法代码示例

本文整理汇总了Java中org.apache.hadoop.hdfs.util.LightWeightHashSet.add方法的典型用法代码示例。如果您正苦于以下问题:Java LightWeightHashSet.add方法的具体用法?Java LightWeightHashSet.add怎么用?Java LightWeightHashSet.add使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.hadoop.hdfs.util.LightWeightHashSet的用法示例。


在下文中一共展示了LightWeightHashSet.add方法的12个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: add

import org.apache.hadoop.hdfs.util.LightWeightHashSet; //导入方法依赖的package包/类
/**
 * Add a block to the block collection
 * which will be invalidated on the specified datanode.
 */
synchronized void add(final Block block, final DatanodeInfo datanode,
    final boolean log) {
  LightWeightHashSet<Block> set = node2blocks.get(datanode);
  if (set == null) {
    set = new LightWeightHashSet<Block>();
    node2blocks.put(datanode, set);
  }
  if (set.add(block)) {
    numBlocks++;
    if (log) {
      NameNode.blockStateChangeLog.info("BLOCK* {}: add {} to {}",
          getClass().getSimpleName(), block, datanode);
    }
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:20,代码来源:InvalidateBlocks.java

示例2: add

import org.apache.hadoop.hdfs.util.LightWeightHashSet; //导入方法依赖的package包/类
/**
 * Add a block to the block collection
 * which will be invalidated on the specified datanode.
 */
synchronized void add(final Block block, final DatanodeInfo datanode,
    final boolean log) {
  LightWeightHashSet<Block> set = node2blocks.get(datanode);
  if (set == null) {
    set = new LightWeightHashSet<Block>();
    node2blocks.put(datanode, set);
  }
  if (set.add(block)) {
    numBlocks++;
    if (log) {
      NameNode.blockStateChangeLog.debug("BLOCK* {}: add {} to {}",
          getClass().getSimpleName(), block, datanode);
    }
  }
}
 
开发者ID:aliyun-beta,项目名称:aliyun-oss-hadoop-fs,代码行数:20,代码来源:InvalidateBlocks.java

示例3: addToInvalidatesNoLog

import org.apache.hadoop.hdfs.util.LightWeightHashSet; //导入方法依赖的package包/类
/**
 * Adds block to list of blocks which will be invalidated on
 * specified datanode
 *
 * @param b block
 * @param n datanode
 */
void addToInvalidatesNoLog(Block b, DatanodeInfo n, boolean ackRequired) {
  // We are the standby avatar and we don't want to add blocks to the
  // invalidates list.
  if (this.getNameNode().shouldRetryAbsentBlocks()) {
    return;
  }

  LightWeightHashSet<Block> invalidateSet = recentInvalidateSets.get(n
      .getStorageID());
  if (invalidateSet == null) {
    invalidateSet = new LightWeightHashSet<Block>();
    recentInvalidateSets.put(n.getStorageID(), invalidateSet);
  }
  if(!ackRequired){
    b.setNumBytes(BlockFlags.NO_ACK);
  }
  if (invalidateSet.add(b)) {
    pendingDeletionBlocksCount++;
  }
}
 
开发者ID:rhli,项目名称:hadoop-EAR,代码行数:28,代码来源:FSNamesystem.java

示例4: getBlocksBeingWrittenInfo

import org.apache.hadoop.hdfs.util.LightWeightHashSet; //导入方法依赖的package包/类
void getBlocksBeingWrittenInfo(LightWeightHashSet<Block> blockSet) throws IOException { 
  if (rbwDir == null) {
    return;
  }
 
  File[] blockFiles = rbwDir.listFiles();
  if (blockFiles == null) {
    return;
  }
  String[] blockFileNames = getFileNames(blockFiles);  
  for (int i = 0; i < blockFiles.length; i++) {
    if (!blockFiles[i].isDirectory()) {
    // get each block in the rbwDir directory
      Block block = FSDataset.getBlockFromNames(blockFiles, blockFileNames, i);
      if (block != null) {
        // add this block to block set
        blockSet.add(block);
        if (DataNode.LOG.isDebugEnabled()) {
          DataNode.LOG.debug("recoverBlocksBeingWritten for block " + block);
        }            
      }
    }
  }
}
 
开发者ID:rhli,项目名称:hadoop-EAR,代码行数:25,代码来源:FSDataset.java

示例5: getBlockInfo

import org.apache.hadoop.hdfs.util.LightWeightHashSet; //导入方法依赖的package包/类
/**
 * Populate the given blockSet with any child blocks
 * found at this node.
 * @throws IOException 
 */
public void getBlockInfo(LightWeightHashSet<Block> blockSet) throws IOException {
  FSDir[] children = this.getChildren();
  if (children != null) {
    for (int i = 0; i < children.length; i++) {
      children[i].getBlockInfo(blockSet);
    }
  }

  File blockFiles[] = dir.listFiles();
  String[] blockFilesNames = getFileNames(blockFiles);
  
  for (int i = 0; i < blockFiles.length; i++) {
    Block block = getBlockFromNames(blockFiles, blockFilesNames, i);
    if (block != null) {
      blockSet.add(block);
    }
  }
}
 
开发者ID:rhli,项目名称:hadoop-EAR,代码行数:24,代码来源:FSDataset.java

示例6: getBlockAndFileInfo

import org.apache.hadoop.hdfs.util.LightWeightHashSet; //导入方法依赖的package包/类
/**
 * Populate the given blockSet with any child blocks
 * found at this node. With each block, return the full path
 * of the block file.
 * @throws IOException 
 */
void getBlockAndFileInfo(LightWeightHashSet<BlockAndFile> blockSet) throws IOException {
  FSDir[] children = this.getChildren();
  if (children != null) {
    for (int i = 0; i < children.length; i++) {
      children[i].getBlockAndFileInfo(blockSet);
    }
  }

  File blockFiles[] = dir.listFiles();
  String[] blockFilesNames = getFileNames(blockFiles);      
  for (int i = 0; i < blockFiles.length; i++) {
    Block block = getBlockFromNames(blockFiles, blockFilesNames, i);
    if (block != null) {
      blockSet.add(new BlockAndFile(blockFiles[i].getAbsoluteFile(), block));
    }
  }
}
 
开发者ID:rhli,项目名称:hadoop-EAR,代码行数:24,代码来源:FSDataset.java

示例7: add

import org.apache.hadoop.hdfs.util.LightWeightHashSet; //导入方法依赖的package包/类
/**
 * Add a block to the block collection
 * which will be invalidated on the specified datanode.
 */
synchronized void add(final Block block, final DatanodeInfo datanode,
    final boolean log) {
  LightWeightHashSet<Block> set = node2blocks.get(datanode.getStorageID());
  if (set == null) {
    set = new LightWeightHashSet<Block>();
    node2blocks.put(datanode.getStorageID(), set);
  }
  if (set.add(block)) {
    numBlocks++;
    if (log) {
      NameNode.blockStateChangeLog.info("BLOCK* " + getClass().getSimpleName()
          + ": add " + block + " to " + datanode);
    }
  }
}
 
开发者ID:ict-carch,项目名称:hadoop-plus,代码行数:20,代码来源:InvalidateBlocks.java

示例8: add

import org.apache.hadoop.hdfs.util.LightWeightHashSet; //导入方法依赖的package包/类
/**
 * Add a block to the block collection
 * which will be invalidated on the specified datanode.
 */
synchronized void add(final Block block, final DatanodeInfo datanode,
    final boolean log) {
  LightWeightHashSet<Block> set = node2blocks.get(datanode);
  if (set == null) {
    set = new LightWeightHashSet<Block>();
    node2blocks.put(datanode, set);
  }
  if (set.add(block)) {
    numBlocks++;
    if (log) {
      NameNode.blockStateChangeLog.info("BLOCK* " + getClass().getSimpleName()
          + ": add " + block + " to " + datanode);
    }
  }
}
 
开发者ID:yncxcw,项目名称:FlexMap,代码行数:20,代码来源:InvalidateBlocks.java

示例9: add

import org.apache.hadoop.hdfs.util.LightWeightHashSet; //导入方法依赖的package包/类
/**
 * Add a block to the block collection
 * which will be invalidated on the specified datanode.
 */
synchronized void add(final Block block, final DatanodeInfo datanode,
    final boolean log) {
  LightWeightHashSet<Block> set = node2blocks.get(datanode.getDatanodeUuid());
  if (set == null) {
    set = new LightWeightHashSet<Block>();
    node2blocks.put(datanode.getDatanodeUuid(), set);
  }
  if (set.add(block)) {
    numBlocks++;
    if (log) {
      NameNode.blockStateChangeLog.info("BLOCK* " + getClass().getSimpleName()
          + ": add " + block + " to " + datanode);
    }
  }
}
 
开发者ID:Seagate,项目名称:hadoop-on-lustre2,代码行数:20,代码来源:InvalidateBlocks.java

示例10: getBlockInfo

import org.apache.hadoop.hdfs.util.LightWeightHashSet; //导入方法依赖的package包/类
/**
 * Populate the given blockSet with any child blocks
 * found at this node.
 */
public void getBlockInfo(LightWeightHashSet<Block> blockSet) {
  FSDir[] children = this.getChildren();
  if (children != null) {
    for (int i = 0; i < children.length; i++) {
      children[i].getBlockInfo(blockSet);
    }
  }

  File blockFiles[] = dir.listFiles();
  String[] blockFilesNames = getFileNames(blockFiles);
  
  for (int i = 0; i < blockFiles.length; i++) {
    if (Block.isBlockFilename(blockFilesNames[i])) {
      long genStamp = FSDataset.getGenerationStampFromFile(blockFilesNames,
          blockFilesNames[i]);
      blockSet.add(new Block(blockFiles[i], blockFiles[i].length(), genStamp));
    }
  }
}
 
开发者ID:iVCE,项目名称:RDFS,代码行数:24,代码来源:FSDataset.java

示例11: getBlockAndFileInfo

import org.apache.hadoop.hdfs.util.LightWeightHashSet; //导入方法依赖的package包/类
/**
 * Populate the given blockSet with any child blocks
 * found at this node. With each block, return the full path
 * of the block file.
 */
void getBlockAndFileInfo(LightWeightHashSet<BlockAndFile> blockSet) {
  FSDir[] children = this.getChildren();
  if (children != null) {
    for (int i = 0; i < children.length; i++) {
      children[i].getBlockAndFileInfo(blockSet);
    }
  }

  File blockFiles[] = dir.listFiles();
  String[] blockFilesNames = getFileNames(blockFiles);      
  for (int i = 0; i < blockFiles.length; i++) {
    if (Block.isBlockFilename(blockFilesNames[i])) {
      long genStamp = FSDataset.getGenerationStampFromFile(blockFilesNames,
          blockFilesNames[i]);
      Block block = new Block(blockFiles[i], blockFiles[i].length(), genStamp);
      blockSet.add(new BlockAndFile(blockFiles[i].getAbsoluteFile(), block));
    }
  }
}
 
开发者ID:iVCE,项目名称:RDFS,代码行数:25,代码来源:FSDataset.java

示例12: getBlocksBeingWrittenInfo

import org.apache.hadoop.hdfs.util.LightWeightHashSet; //导入方法依赖的package包/类
void getBlocksBeingWrittenInfo(LightWeightHashSet<Block> blockSet) { 
  if (rbwDir == null) {
    return;
  }
 
  File[] blockFiles = rbwDir.listFiles();
  if (blockFiles == null) {
    return;
  }
  String[] blockFileNames = getFileNames(blockFiles);  
  for (int i = 0; i < blockFiles.length; i++) {
    if (!blockFiles[i].isDirectory()) {
    // get each block in the rbwDir direcotry
      if (Block.isBlockFilename(blockFileNames[i])) {
        long genStamp = FSDataset.getGenerationStampFromFile(
            blockFileNames, blockFileNames[i]);
        Block block = 
          new Block(blockFiles[i], blockFiles[i].length(), genStamp);
        
        // add this block to block set
        blockSet.add(block);
        if (DataNode.LOG.isDebugEnabled()) {
          DataNode.LOG.debug("recoverBlocksBeingWritten for block " + block);
        }
      }
    }
  }
}
 
开发者ID:iVCE,项目名称:RDFS,代码行数:29,代码来源:FSDataset.java


注:本文中的org.apache.hadoop.hdfs.util.LightWeightHashSet.add方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。