本文整理汇总了Java中org.apache.hadoop.hdfs.util.LightWeightHashSet.add方法的典型用法代码示例。如果您正苦于以下问题:Java LightWeightHashSet.add方法的具体用法?Java LightWeightHashSet.add怎么用?Java LightWeightHashSet.add使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.hdfs.util.LightWeightHashSet
的用法示例。
在下文中一共展示了LightWeightHashSet.add方法的12个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: add
import org.apache.hadoop.hdfs.util.LightWeightHashSet; //导入方法依赖的package包/类
/**
* Add a block to the block collection
* which will be invalidated on the specified datanode.
*/
synchronized void add(final Block block, final DatanodeInfo datanode,
final boolean log) {
LightWeightHashSet<Block> set = node2blocks.get(datanode);
if (set == null) {
set = new LightWeightHashSet<Block>();
node2blocks.put(datanode, set);
}
if (set.add(block)) {
numBlocks++;
if (log) {
NameNode.blockStateChangeLog.info("BLOCK* {}: add {} to {}",
getClass().getSimpleName(), block, datanode);
}
}
}
示例2: add
import org.apache.hadoop.hdfs.util.LightWeightHashSet; //导入方法依赖的package包/类
/**
* Add a block to the block collection
* which will be invalidated on the specified datanode.
*/
synchronized void add(final Block block, final DatanodeInfo datanode,
final boolean log) {
LightWeightHashSet<Block> set = node2blocks.get(datanode);
if (set == null) {
set = new LightWeightHashSet<Block>();
node2blocks.put(datanode, set);
}
if (set.add(block)) {
numBlocks++;
if (log) {
NameNode.blockStateChangeLog.debug("BLOCK* {}: add {} to {}",
getClass().getSimpleName(), block, datanode);
}
}
}
示例3: addToInvalidatesNoLog
import org.apache.hadoop.hdfs.util.LightWeightHashSet; //导入方法依赖的package包/类
/**
* Adds block to list of blocks which will be invalidated on
* specified datanode
*
* @param b block
* @param n datanode
*/
void addToInvalidatesNoLog(Block b, DatanodeInfo n, boolean ackRequired) {
// We are the standby avatar and we don't want to add blocks to the
// invalidates list.
if (this.getNameNode().shouldRetryAbsentBlocks()) {
return;
}
LightWeightHashSet<Block> invalidateSet = recentInvalidateSets.get(n
.getStorageID());
if (invalidateSet == null) {
invalidateSet = new LightWeightHashSet<Block>();
recentInvalidateSets.put(n.getStorageID(), invalidateSet);
}
if(!ackRequired){
b.setNumBytes(BlockFlags.NO_ACK);
}
if (invalidateSet.add(b)) {
pendingDeletionBlocksCount++;
}
}
示例4: getBlocksBeingWrittenInfo
import org.apache.hadoop.hdfs.util.LightWeightHashSet; //导入方法依赖的package包/类
void getBlocksBeingWrittenInfo(LightWeightHashSet<Block> blockSet) throws IOException {
if (rbwDir == null) {
return;
}
File[] blockFiles = rbwDir.listFiles();
if (blockFiles == null) {
return;
}
String[] blockFileNames = getFileNames(blockFiles);
for (int i = 0; i < blockFiles.length; i++) {
if (!blockFiles[i].isDirectory()) {
// get each block in the rbwDir directory
Block block = FSDataset.getBlockFromNames(blockFiles, blockFileNames, i);
if (block != null) {
// add this block to block set
blockSet.add(block);
if (DataNode.LOG.isDebugEnabled()) {
DataNode.LOG.debug("recoverBlocksBeingWritten for block " + block);
}
}
}
}
}
示例5: getBlockInfo
import org.apache.hadoop.hdfs.util.LightWeightHashSet; //导入方法依赖的package包/类
/**
* Populate the given blockSet with any child blocks
* found at this node.
* @throws IOException
*/
public void getBlockInfo(LightWeightHashSet<Block> blockSet) throws IOException {
FSDir[] children = this.getChildren();
if (children != null) {
for (int i = 0; i < children.length; i++) {
children[i].getBlockInfo(blockSet);
}
}
File blockFiles[] = dir.listFiles();
String[] blockFilesNames = getFileNames(blockFiles);
for (int i = 0; i < blockFiles.length; i++) {
Block block = getBlockFromNames(blockFiles, blockFilesNames, i);
if (block != null) {
blockSet.add(block);
}
}
}
示例6: getBlockAndFileInfo
import org.apache.hadoop.hdfs.util.LightWeightHashSet; //导入方法依赖的package包/类
/**
* Populate the given blockSet with any child blocks
* found at this node. With each block, return the full path
* of the block file.
* @throws IOException
*/
void getBlockAndFileInfo(LightWeightHashSet<BlockAndFile> blockSet) throws IOException {
FSDir[] children = this.getChildren();
if (children != null) {
for (int i = 0; i < children.length; i++) {
children[i].getBlockAndFileInfo(blockSet);
}
}
File blockFiles[] = dir.listFiles();
String[] blockFilesNames = getFileNames(blockFiles);
for (int i = 0; i < blockFiles.length; i++) {
Block block = getBlockFromNames(blockFiles, blockFilesNames, i);
if (block != null) {
blockSet.add(new BlockAndFile(blockFiles[i].getAbsoluteFile(), block));
}
}
}
示例7: add
import org.apache.hadoop.hdfs.util.LightWeightHashSet; //导入方法依赖的package包/类
/**
* Add a block to the block collection
* which will be invalidated on the specified datanode.
*/
synchronized void add(final Block block, final DatanodeInfo datanode,
final boolean log) {
LightWeightHashSet<Block> set = node2blocks.get(datanode.getStorageID());
if (set == null) {
set = new LightWeightHashSet<Block>();
node2blocks.put(datanode.getStorageID(), set);
}
if (set.add(block)) {
numBlocks++;
if (log) {
NameNode.blockStateChangeLog.info("BLOCK* " + getClass().getSimpleName()
+ ": add " + block + " to " + datanode);
}
}
}
示例8: add
import org.apache.hadoop.hdfs.util.LightWeightHashSet; //导入方法依赖的package包/类
/**
* Add a block to the block collection
* which will be invalidated on the specified datanode.
*/
synchronized void add(final Block block, final DatanodeInfo datanode,
final boolean log) {
LightWeightHashSet<Block> set = node2blocks.get(datanode);
if (set == null) {
set = new LightWeightHashSet<Block>();
node2blocks.put(datanode, set);
}
if (set.add(block)) {
numBlocks++;
if (log) {
NameNode.blockStateChangeLog.info("BLOCK* " + getClass().getSimpleName()
+ ": add " + block + " to " + datanode);
}
}
}
示例9: add
import org.apache.hadoop.hdfs.util.LightWeightHashSet; //导入方法依赖的package包/类
/**
* Add a block to the block collection
* which will be invalidated on the specified datanode.
*/
synchronized void add(final Block block, final DatanodeInfo datanode,
final boolean log) {
LightWeightHashSet<Block> set = node2blocks.get(datanode.getDatanodeUuid());
if (set == null) {
set = new LightWeightHashSet<Block>();
node2blocks.put(datanode.getDatanodeUuid(), set);
}
if (set.add(block)) {
numBlocks++;
if (log) {
NameNode.blockStateChangeLog.info("BLOCK* " + getClass().getSimpleName()
+ ": add " + block + " to " + datanode);
}
}
}
示例10: getBlockInfo
import org.apache.hadoop.hdfs.util.LightWeightHashSet; //导入方法依赖的package包/类
/**
* Populate the given blockSet with any child blocks
* found at this node.
*/
public void getBlockInfo(LightWeightHashSet<Block> blockSet) {
FSDir[] children = this.getChildren();
if (children != null) {
for (int i = 0; i < children.length; i++) {
children[i].getBlockInfo(blockSet);
}
}
File blockFiles[] = dir.listFiles();
String[] blockFilesNames = getFileNames(blockFiles);
for (int i = 0; i < blockFiles.length; i++) {
if (Block.isBlockFilename(blockFilesNames[i])) {
long genStamp = FSDataset.getGenerationStampFromFile(blockFilesNames,
blockFilesNames[i]);
blockSet.add(new Block(blockFiles[i], blockFiles[i].length(), genStamp));
}
}
}
示例11: getBlockAndFileInfo
import org.apache.hadoop.hdfs.util.LightWeightHashSet; //导入方法依赖的package包/类
/**
* Populate the given blockSet with any child blocks
* found at this node. With each block, return the full path
* of the block file.
*/
void getBlockAndFileInfo(LightWeightHashSet<BlockAndFile> blockSet) {
FSDir[] children = this.getChildren();
if (children != null) {
for (int i = 0; i < children.length; i++) {
children[i].getBlockAndFileInfo(blockSet);
}
}
File blockFiles[] = dir.listFiles();
String[] blockFilesNames = getFileNames(blockFiles);
for (int i = 0; i < blockFiles.length; i++) {
if (Block.isBlockFilename(blockFilesNames[i])) {
long genStamp = FSDataset.getGenerationStampFromFile(blockFilesNames,
blockFilesNames[i]);
Block block = new Block(blockFiles[i], blockFiles[i].length(), genStamp);
blockSet.add(new BlockAndFile(blockFiles[i].getAbsoluteFile(), block));
}
}
}
示例12: getBlocksBeingWrittenInfo
import org.apache.hadoop.hdfs.util.LightWeightHashSet; //导入方法依赖的package包/类
void getBlocksBeingWrittenInfo(LightWeightHashSet<Block> blockSet) {
if (rbwDir == null) {
return;
}
File[] blockFiles = rbwDir.listFiles();
if (blockFiles == null) {
return;
}
String[] blockFileNames = getFileNames(blockFiles);
for (int i = 0; i < blockFiles.length; i++) {
if (!blockFiles[i].isDirectory()) {
// get each block in the rbwDir direcotry
if (Block.isBlockFilename(blockFileNames[i])) {
long genStamp = FSDataset.getGenerationStampFromFile(
blockFileNames, blockFileNames[i]);
Block block =
new Block(blockFiles[i], blockFiles[i].length(), genStamp);
// add this block to block set
blockSet.add(block);
if (DataNode.LOG.isDebugEnabled()) {
DataNode.LOG.debug("recoverBlocksBeingWritten for block " + block);
}
}
}
}
}