本文整理汇总了Java中org.apache.commons.codec.digest.DigestUtils.getMd5Digest方法的典型用法代码示例。如果您正苦于以下问题:Java DigestUtils.getMd5Digest方法的具体用法?Java DigestUtils.getMd5Digest怎么用?Java DigestUtils.getMd5Digest使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.commons.codec.digest.DigestUtils
的用法示例。
在下文中一共展示了DigestUtils.getMd5Digest方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: run
import org.apache.commons.codec.digest.DigestUtils; //导入方法依赖的package包/类
@Override
public void run() {
try {
MessageDigest pieceM5 = DigestUtils.getMd5Digest();
int pieceNum;
ByteBuffer bb, byteBuf = ByteBuffer.allocate(this.pieceSize);
byteBuf.order(ByteOrder.BIG_ENDIAN);
while (true) {
ProtocolContent protocolContent = contQu.poll(waitTime, TimeUnit.SECONDS);
if (protocolContent == null) {
logger.warn("taskId:{} get piece timeout", taskId);
break;
}
if (protocolContent.isPieceType()) {
pieceNum = protocolContent.getPieceNum();
bb = protocolContent.getContent();
byteBuf.putInt(bb.limit() | this.pieceSizeBit);
byteBuf.put(bb);
byteBuf.put((byte)0x7f);
bb.clear();
reusedCache.offer(bb);
reportPiece(pieceM5, byteBuf, pieceNum);
} else {
contQu.put(protocolContent);
break;
}
}
sucCount.incrementAndGet();
} catch (Exception e) {
logger.error("write piece error for taskId:{}", taskId, e);
} finally {
downLatch.countDown();
}
}
示例2: walk
import org.apache.commons.codec.digest.DigestUtils; //导入方法依赖的package包/类
private boolean walk(Iterator<S3ObjectSummary> iter, ObjectId file, String path) throws IOException {
byte[] content;
byte[] newHash;
LOG.debug("Start processing file: {}", path);
try (DigestInputStream is = new DigestInputStream(repository.open(file).openStream(), DigestUtils.getMd5Digest())) {
// Get content
content = IOUtils.toByteArray(is);
// Get hash
newHash = is.getMessageDigest().digest();
}
if (isUploadFile(iter, path, Hex.encodeHexString(newHash))) {
LOG.info("Uploading file: {}", path);
ObjectMetadata bucketMetadata = new ObjectMetadata();
bucketMetadata.setContentMD5(Base64.encodeAsString(newHash));
bucketMetadata.setContentLength(content.length);
// Give Tika a few hints for the content detection
Metadata tikaMetadata = new Metadata();
tikaMetadata.set(Metadata.RESOURCE_NAME_KEY, FilenameUtils.getName(FilenameUtils.normalize(path)));
// Fire!
try (InputStream bis = TikaInputStream.get(content, tikaMetadata)) {
bucketMetadata.setContentType(TIKA_DETECTOR.detect(bis, tikaMetadata).toString());
s3.putObject(bucket.getName(), path, bis, bucketMetadata);
return true;
}
}
LOG.info("Skipping file (same checksum): {}", path);
return false;
}
示例3: processCacheByChannel
import org.apache.commons.codec.digest.DigestUtils; //导入方法依赖的package包/类
/**
* @param breakNum
* @param cacheResult
* @param metaData
* @param task
*/
private void processCacheByChannel(int breakNum, CacheResult cacheResult, FileMetaData metaData,
Task task) {
String taskId = task.getTaskId();
Integer pieceSize = task.getPieceSize();
try (FileInputStream fis = new FileInputStream(PathUtil.getDownloadPath(taskId).toFile());
FileChannel fc = fis.getChannel()) {
List<String> pieceMd5s = new ArrayList<>();
MessageDigest pieceMd5 = DigestUtils.getMd5Digest();
MessageDigest fileM5 = cacheResult.getFileM5();
if (breakNum == -1 && StringUtils.isNotBlank(metaData.getRealMd5())) {
fileM5 = null;
}
ByteBuffer bb = generateByteBuffer();
String pieceMd5Value;
long curFileLen = fc.size();
int curPieceTotal =
breakNum > 0 ? breakNum : (int)((curFileLen + pieceSize - 1) / pieceSize);
int pieceHead, pieceLen;
for (int pieceNum = 0; pieceNum < curPieceTotal; pieceNum++) {
fc.position(pieceNum * (long)pieceSize);
bb.limit(Constants.PIECE_HEAD_SIZE);
fc.read(bb);
bb.flip();
pieceHead = bb.getInt();
pieceLen = pieceHead & 0xffffff;
bb.limit(pieceLen + Constants.PIECE_WRAP_SIZE);
fc.read(bb);
bb.flip();
pieceMd5.update(bb);
pieceMd5Value = Hex.encodeHexString(pieceMd5.digest()) + ":" + bb.limit();
cdnReporter.reportPieceStatus(taskId, pieceNum, pieceMd5Value, PeerPieceStatus.SUCCESS,
FromType.LOCAL.type());
pieceMd5s.add(pieceMd5Value);
pieceMd5.reset();
if (fileM5 != null) {
bb.flip();
bb.limit(bb.limit() - 1);
bb.position(Constants.PIECE_HEAD_SIZE);
fileM5.update(bb);
}
bb.clear();
}
if (breakNum == -1) {
String fileMd5Value = metaData.getRealMd5();
if (StringUtils.isBlank(fileMd5Value)) {
fileMd5Value = Hex.encodeHexString(fileM5.digest());
fileMetaDataService.updateStatusAndResult(taskId, true, true, fileMd5Value, curFileLen);
}
cdnReporter.reportTaskStatus(taskId, CdnStatus.SUCCESS, fileMd5Value, curFileLen,
FromType.LOCAL.type());
fileMetaDataService.writePieceMd5(taskId, fileMd5Value, pieceMd5s);
}
cacheResult.setStartPieceNum(breakNum);
} catch (Exception e) {
throw new RuntimeException("report cache by channel error for taskId:" + taskId, e);
}
}