本文整理汇总了Java中org.apache.hadoop.io.MD5Hash.equals方法的典型用法代码示例。如果您正苦于以下问题:Java MD5Hash.equals方法的具体用法?Java MD5Hash.equals怎么用?Java MD5Hash.equals使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.io.MD5Hash
的用法示例。
在下文中一共展示了MD5Hash.equals方法的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: loadFSImage
import org.apache.hadoop.io.MD5Hash; //导入方法依赖的package包/类
/**
* Load in the filesystem image from file. It's a big list of
* filenames and blocks.
*/
private void loadFSImage(File curFile, MD5Hash expectedMd5,
FSNamesystem target, MetaRecoveryContext recovery,
boolean requireSameLayoutVersion) throws IOException {
// BlockPoolId is required when the FsImageLoader loads the rolling upgrade
// information. Make sure the ID is properly set.
target.setBlockPoolId(this.getBlockPoolID());
FSImageFormat.LoaderDelegator loader = FSImageFormat.newLoader(conf, target);
loader.load(curFile, requireSameLayoutVersion);
// Check that the image digest we loaded matches up with what
// we expected
MD5Hash readImageMd5 = loader.getLoadedImageMd5();
if (expectedMd5 != null &&
!expectedMd5.equals(readImageMd5)) {
throw new IOException("Image file " + curFile +
" is corrupt with MD5 checksum of " + readImageMd5 +
" but expecting " + expectedMd5);
}
long txId = loader.getLoadedImageTxId();
LOG.info("Loaded image for txid " + txId + " from " + curFile);
lastAppliedTxId = txId;
storage.setMostRecentCheckpointInfo(txId, curFile.lastModified());
}
示例2: saveDigestAndRenameCheckpointImage
import org.apache.hadoop.io.MD5Hash; //导入方法依赖的package包/类
/**
* This is called by the 2NN after having downloaded an image, and by
* the NN after having received a new image from the 2NN. It
* renames the image from fsimage_N.ckpt to fsimage_N and also
* saves the related .md5 file into place.
*/
synchronized void saveDigestAndRenameCheckpointImage(
long txid, MD5Hash digest) throws IOException {
if (!digest.equals(storage.getCheckpointImageDigest(txid))) {
throw new IOException(
"Checkpoint image is corrupt: expecting an MD5 checksum of" +
digest + " but is " + storage.getCheckpointImageDigest(txid));
}
imageSet.saveDigestAndRenameCheckpointImage(txid, digest);
// So long as this is the newest image available,
// advertise it as such to other checkpointers
// from now on
storage.setMostRecentCheckpointTxId(txid);
}
示例3: loadFSImage
import org.apache.hadoop.io.MD5Hash; //导入方法依赖的package包/类
/**
* Load in the filesystem image from file. It's a big list of
* filenames and blocks.
*/
private void loadFSImage(File curFile, MD5Hash expectedMd5,
FSNamesystem target, MetaRecoveryContext recovery) throws IOException {
FSImageFormat.Loader loader = new FSImageFormat.Loader(
conf, target);
loader.load(curFile);
target.setBlockPoolId(this.getBlockPoolID());
// Check that the image digest we loaded matches up with what
// we expected
MD5Hash readImageMd5 = loader.getLoadedImageMd5();
if (expectedMd5 != null &&
!expectedMd5.equals(readImageMd5)) {
throw new IOException("Image file " + curFile +
" is corrupt with MD5 checksum of " + readImageMd5 +
" but expecting " + expectedMd5);
}
long txId = loader.getLoadedImageTxId();
LOG.info("Loaded image for txid " + txId + " from " + curFile);
lastAppliedTxId = txId;
storage.setMostRecentCheckpointInfo(txId, curFile.lastModified());
}
示例4: verifySavedMD5
import org.apache.hadoop.io.MD5Hash; //导入方法依赖的package包/类
/**
* Verify that the previously saved md5 for the given file matches
* expectedMd5.
* @throws IOException
*/
public static void verifySavedMD5(File dataFile, MD5Hash expectedMD5)
throws IOException {
MD5Hash storedHash = readStoredMd5ForFile(dataFile);
// Check the hash itself
if (!expectedMD5.equals(storedHash)) {
throw new IOException(
"File " + dataFile + " did not match stored MD5 checksum " +
" (stored: " + storedHash + ", computed: " + expectedMD5);
}
}
示例5: setCheckpointImageDigest
import org.apache.hadoop.io.MD5Hash; //导入方法依赖的package包/类
synchronized void setCheckpointImageDigest(long txid, MD5Hash imageDigest)
throws IOException{
if(checkpointImageDigests.containsKey(txid)) {
MD5Hash existing = checkpointImageDigests.get(txid);
if (!existing.equals(imageDigest)) {
throw new IOException(
"Trying to set checkpoint image digest for txid: " + txid + "="
+ imageDigest + " existing " + existing);
}
} else {
checkpointImageDigests.put(txid, imageDigest);
}
}
示例6: saveDigestAndRenameCheckpointImage
import org.apache.hadoop.io.MD5Hash; //导入方法依赖的package包/类
/**
* Roll the image.
*/
public void saveDigestAndRenameCheckpointImage(long txid, MD5Hash digest)
throws IOException {
MD5Hash storedDigest = checkpointImageDigests.get(txid);
if (storedDigest == null || !storedDigest.equals(digest)) {
throw new IOException("Digest of data written: " + storedDigest
+ " does not match requested digest: " + digest + " for txid: "
+ txid + ", journal: " + journalId);
}
imageManager.saveDigestAndRenameCheckpointImage(txid, digest);
checkpointImageDigests.remove(txid);
}
示例7: setCheckpointImageDigest
import org.apache.hadoop.io.MD5Hash; //导入方法依赖的package包/类
synchronized void setCheckpointImageDigest(long txid, MD5Hash imageDigest)
throws IOException {
if (checkpointImageDigests.containsKey(txid)) {
MD5Hash existing = checkpointImageDigests.get(txid);
if (!existing.equals(imageDigest)) {
throw new IOException(
"Trying to set checkpoint image digest for txid: " + txid + "="
+ imageDigest + " existing " + existing + " for txid: " + txid
+ ", journal: " + journalId);
}
} else {
checkpointImageDigests.put(txid, imageDigest);
mostRecentCheckpointTxid = Math.max(mostRecentCheckpointTxid, txid);
}
}
示例8: verifySavedMD5
import org.apache.hadoop.io.MD5Hash; //导入方法依赖的package包/类
/**
* Verify that the previously saved md5 for the given file matches
* expectedMd5.
*
* @throws IOException
*/
public static void verifySavedMD5(File dataFile, MD5Hash expectedMD5)
throws IOException {
MD5Hash storedHash = readStoredMd5ForFile(dataFile);
// Check the hash itself
if (!expectedMD5.equals(storedHash)) {
throw new IOException(
"File " + dataFile + " did not match stored MD5 checksum " +
" (stored: " + storedHash + ", computed: " + expectedMD5);
}
}