当前位置: 首页>>代码示例>>Java>>正文


Java PureJavaCrc32.update方法代码示例

本文整理汇总了Java中org.apache.hadoop.util.PureJavaCrc32.update方法的典型用法代码示例。如果您正苦于以下问题:Java PureJavaCrc32.update方法的具体用法?Java PureJavaCrc32.update怎么用?Java PureJavaCrc32.update使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.hadoop.util.PureJavaCrc32的用法示例。


在下文中一共展示了PureJavaCrc32.update方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: checkImages

import org.apache.hadoop.util.PureJavaCrc32; //导入方法依赖的package包/类
private void checkImages(FSNamesystem fsn) throws Exception {
  Iterator<StorageDirectory> iter = fsn.
          getFSImage().dirIterator(FSImage.NameNodeDirType.IMAGE);
  List<Long> checksums = new ArrayList<Long>();
  while (iter.hasNext()) {
    StorageDirectory sd = iter.next();
    File fsImage = FSImage.getImageFile(sd, FSImage.NameNodeFile.IMAGE);
    PureJavaCrc32 crc = new PureJavaCrc32();
    FileInputStream in = new FileInputStream(fsImage);
    byte[] buff = new byte[4096];
    int read = 0;
    while ((read = in.read(buff)) != -1) {
     crc.update(buff, 0, read);
    }
    long val = crc.getValue();
    checksums.add(val);
  }
  assertTrue("Not enough fsimage copies in MiniDFSCluster " + 
             "to test parallel write", checksums.size() > 1);
  for (int i = 1; i < checksums.size(); i++) {
    assertEquals(checksums.get(i - 1), checksums.get(i));
  }
}
 
开发者ID:cumulusyebl,项目名称:cumulus,代码行数:24,代码来源:TestParallelImageWrite.java

示例2: computePartialChunkCrc

import org.apache.hadoop.util.PureJavaCrc32; //导入方法依赖的package包/类
/**
 * reads in the partial crc chunk and computes checksum
 * of pre-existing data in partial chunk.
 */
private void computePartialChunkCrc(long blkoff, long ckoff, 
                                    int bytesPerChecksum) throws IOException {

  // find offset of the beginning of partial chunk.
  //
  int sizePartialChunk = (int) (blkoff % bytesPerChecksum);
  int checksumSize = checksum.getChecksumSize();
  blkoff = blkoff - sizePartialChunk;
  LOG.info("computePartialChunkCrc sizePartialChunk " + 
            sizePartialChunk +
            " block " + block +
            " offset in block " + blkoff +
            " offset in metafile " + ckoff);

  // create an input stream from the block file
  // and read in partial crc chunk into temporary buffer
  //
  byte[] buf = new byte[sizePartialChunk];
  byte[] crcbuf = new byte[checksumSize];
  FSDataset.BlockInputStreams instr = null;
  try { 
    instr = datanode.data.getTmpInputStreams(block, blkoff, ckoff);
    IOUtils.readFully(instr.dataIn, buf, 0, sizePartialChunk);

    // open meta file and read in crc value computer earlier
    IOUtils.readFully(instr.checksumIn, crcbuf, 0, crcbuf.length);
  } finally {
    IOUtils.closeStream(instr);
  }

  // compute crc of partial chunk from data read in the block file.
  partialCrc = new PureJavaCrc32();
  partialCrc.update(buf, 0, sizePartialChunk);
  LOG.info("Read in partial CRC chunk from disk for block " + block);

  // paranoia! verify that the pre-computed crc matches what we
  // recalculated just now
  if (partialCrc.getValue() != FSInputChecker.checksum2long(crcbuf)) {
    String msg = "Partial CRC " + partialCrc.getValue() +
                 " does not match value computed the " +
                 " last time file was closed " +
                 FSInputChecker.checksum2long(crcbuf);
    throw new IOException(msg);
  }
  //LOG.debug("Partial CRC matches 0x" + 
  //            Long.toHexString(partialCrc.getValue()));
}
 
开发者ID:Seagate,项目名称:hadoop-on-lustre,代码行数:52,代码来源:BlockReceiver.java

示例3: computePartialChunkCrc

import org.apache.hadoop.util.PureJavaCrc32; //导入方法依赖的package包/类
/**
 * reads in the partial crc chunk and computes checksum
 * of pre-existing data in partial chunk.
 */
private void computePartialChunkCrc(long blkoff, long ckoff, 
                                    int bytesPerChecksum) throws IOException {

  // find offset of the beginning of partial chunk.
  //
  int sizePartialChunk = (int) (blkoff % bytesPerChecksum);
  int checksumSize = checksum.getChecksumSize();
  blkoff = blkoff - sizePartialChunk;
  LOG.info("computePartialChunkCrc sizePartialChunk " + 
            sizePartialChunk +
            " block " + block +
            " offset in block " + blkoff +
            " offset in metafile " + ckoff);

  // create an input stream from the block file
  // and read in partial crc chunk into temporary buffer
  //
  byte[] buf = new byte[sizePartialChunk];
  byte[] crcbuf = new byte[checksumSize];
  FSDataset.BlockInputStreams instr = null;
  try { 
    instr = datanode.data.getTmpInputStreams(block, blkoff, ckoff);
    IOUtils.readFully(instr.dataIn, buf, 0, sizePartialChunk);

    // open meta file and read in crc value computer earlier
    IOUtils.readFully(instr.checksumIn, crcbuf, 0, crcbuf.length);
  } finally {
    IOUtils.closeStream(instr);
  }

  // compute crc of partial chunk from data read in the block file.
  partialCrc = new PureJavaCrc32();
  partialCrc.update(buf, 0, sizePartialChunk);
  LOG.info("Read in partial CRC chunk from disk for block " + block);

  // paranoia! verify that the pre-computed crc matches what we
  // recalculated just now
  if (partialCrc.getValue() != checksum2long(crcbuf)) {
    String msg = "Partial CRC " + partialCrc.getValue() +
                 " does not match value computed the " +
                 " last time file was closed " +
                 checksum2long(crcbuf);
    throw new IOException(msg);
  }
}
 
开发者ID:cumulusyebl,项目名称:cumulus,代码行数:50,代码来源:BlockReceiver.java


注:本文中的org.apache.hadoop.util.PureJavaCrc32.update方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。