本文整理汇总了Java中org.hive2hive.core.security.HashUtil类的典型用法代码示例。如果您正苦于以下问题:Java HashUtil类的具体用法?Java HashUtil怎么用?Java HashUtil使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
HashUtil类属于org.hive2hive.core.security包,在下文中一共展示了HashUtil类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: computeFileContentHash
import org.hive2hive.core.security.HashUtil; //导入依赖的package包/类
/**
* This method computes the hash over a file. If the file
* is not accessible for some reason (i.e. locked by another
* process), then the method makes three consecutive tries
* after waiting 3 seconds.
*
* @param path to file on disk
* @return the hash as base64 encoded string
*/
public static String computeFileContentHash(Path path) {
String newHash = "";
if (path != null && path.toFile() != null) {
for (int i = 0; i < 3; i++) {
try {
byte[] rawHash = HashUtil.hash(path.toFile());
if (rawHash != null) {
newHash = base64Encode(rawHash);
}
break;
} catch (IOException e) {
e.printStackTrace();
try {
Thread.sleep(3000);
} catch (InterruptedException e1) {
e1.printStackTrace();
}
}
}
}
return newHash;
}
示例2: computeStructureHash
import org.hive2hive.core.security.HashUtil; //导入依赖的package包/类
/**
* Computes the content hash for this object by appending the content hashes of contained
* components and hashing over it again.
*
* @return
*/
private boolean computeStructureHash() {
String nameHashInput = "";
String oldNamesHash = structureHash;
for (Map.Entry<Path, FileComponent> child : children.entrySet()) {
if(child.getValue().isSynchronized()){
nameHashInput = nameHashInput.concat(child.getKey().toString());
}
}
byte[] rawHash = HashUtil.hash(nameHashInput.getBytes());
structureHash = PathUtils.base64Encode(rawHash);
boolean hasChanged = !structureHash.equals(oldNamesHash);
return hasChanged;
}
示例3: computeContentHash
import org.hive2hive.core.security.HashUtil; //导入依赖的package包/类
@Override
protected boolean computeContentHash() {
String hashOfChildren = "";
for (FileComponent child : children.values()) {
if(child.isSynchronized()){
hashOfChildren = hashOfChildren.concat(child.getContentHash());
}
}
byte[] rawHash = HashUtil.hash(hashOfChildren.getBytes());
String newHash = PathUtils.base64Encode(rawHash);
if (!getContentHash().equals(newHash)) {
setContentHash(newHash);
return true;
} else {
return false;
}
}
示例4: testFileInfo_FileNode
import org.hive2hive.core.security.HashUtil; //导入依赖的package包/类
@Test
public void testFileInfo_FileNode() {
// file
Path file = Paths.get("/path/to/a/file.txt");
FileNode fileNode = new FileNode(null, file.toFile(),
file.getFileName().toString(), HashUtil.hash("hello world".getBytes()), new HashSet<>());
FileInfo fileInfo = new FileInfo(fileNode);
assertEquals(file, fileInfo.getPath());
assertTrue(fileInfo.isFile());
assertEquals("XrY7u+Ae7tCTyyK7j1rNww==", fileInfo.getContentHash());
// folder
Path folder = Paths.get("/path/to/a/folder");
FileNode folderNode = new FileNode(null, folder.toFile(),
folder.getFileName().toString(), null, new HashSet<>());
FileInfo folderInfo = new FileInfo(folderNode);
assertEquals(folder, folderInfo.getPath());
assertTrue(folderInfo.isFolder());
assertEquals("", folderInfo.getContentHash());
}
示例5: getDeletedRemotely
import org.hive2hive.core.security.HashUtil; //导入依赖的package包/类
/**
* Returns a list of files that have been deleted by another client during the absence of this client.
*
* @return a list of files that has been deleted remotely
*/
public List<File> getDeletedRemotely() {
List<File> deletedRemotely = new ArrayList<File>();
for (String p : now.keySet()) {
File file = new File(root, p);
if (before.containsKey(p) && userProfile.getFileByPath(file, root) == null) {
// is on disk but deleted in the user profile
if (HashUtil.compare(before.get(p), now.get(p))) {
// only delete the file, if it was not modified locally
deletedRemotely.add(file);
}
}
}
logger.debug("Found {} files/folders that have been deleted remotely during absence.", deletedRemotely.size());
return deletedRemotely;
}
示例6: visitFiles
import org.hive2hive.core.security.HashUtil; //导入依赖的package包/类
/**
* Visit all files recursively and calculate the hash of the file. Folders are also added to the result.
*
* @param root the root folder
* @return a map where the key is the relative file path to the root and the value is the hash
* @throws IOException if hashing fails
*/
public static Map<String, byte[]> visitFiles(File root) throws IOException {
Map<String, byte[]> digest = new HashMap<String, byte[]>();
Iterator<File> files = FileUtils.iterateFilesAndDirs(root, TrueFileFilter.TRUE, TrueFileFilter.TRUE);
while (files.hasNext()) {
File file = files.next();
if (file.equals(root)) {
// skip root folder
continue;
}
String path = FileUtil.relativize(root, file).toString();
byte[] hash = HashUtil.hash(file);
if (file.isDirectory()) {
digest.put(path + FileUtil.getFileSep(), hash);
} else {
digest.put(path, hash);
}
}
return digest;
}
示例7: verifyAndWriteChunk
import org.hive2hive.core.security.HashUtil; //导入依赖的package包/类
private void verifyAndWriteChunk(MetaChunk metaChunk, Chunk chunk) {
// verify the hash
byte[] respondedHash = HashUtil.hash(chunk.getData());
if (HashUtil.compare(respondedHash, metaChunk.getChunkHash())) {
logger.debug("Peer {} sent a valid content for chunk {}. Hash verified.", context.getSelectedPeer(),
metaChunk.getIndex());
} else {
logger.error("Peer {} sent an invalid content for chunk {}.", context.getSelectedPeer(), metaChunk.getIndex());
responseException = new ProcessExecutionException(this, "Invalid chunk received");
removeLocation();
return;
}
// hash is ok, write it to the file
try {
FileUtils.writeByteArrayToFile(context.getTempDestination(), chunk.getData());
logger.debug("Wrote chunk {} to temporary file {}", context.getMetaChunk().getIndex(),
context.getTempDestination());
// finalize the sub-process
context.getTask().markDownloaded(context.getMetaChunk().getIndex(), context.getTempDestination());
} catch (IOException e) {
context.getTask().abortDownload("Cannot write the chunk to the temporary file. Reason: " + e.getMessage());
}
}
示例8: modifyUserProfile
import org.hive2hive.core.security.HashUtil; //导入依赖的package包/类
@Override
public void modifyUserProfile(UserProfile userProfile) throws AbortModifyException {
BaseMetaFile metaFile = context.consumeMetaFile();
FileIndex index = (FileIndex) userProfile.getFileById(metaFile.getId());
// store hash of meta file
index.setMetaFileHash(context.consumeHash());
// store for backup
originalHash = index.getHash();
if (HashUtil.compare(originalHash, newHash)) {
throw new AbortModifyException(AbortModificationCode.SAME_CONTENT,
"Try to create new version with same content.");
}
// make modifications
logger.debug("Updating the hash in the user profile.");
index.setHash(newHash);
// store for notification
context.provideIndex(index);
}
示例9: initLargeFile
import org.hive2hive.core.security.HashUtil; //导入依赖的package包/类
private void initLargeFile(File file) throws ProcessExecutionException {
// init the large file chunks
IFileConfiguration config = context.consumeFileConfiguration();
int chunks = FileChunkUtil.getNumberOfChunks(file, config.getChunkSize());
logger.trace("{} chunks for large file '{}'.", Integer.toString(chunks), file.getName());
// TODO Hashing is slow --> do this in multiple threads to speedup the initialization.
// process chunk for chunk, hash it and add the meta information to the context
for (int i = 0; i < chunks; i++) {
String chunkId = UUID.randomUUID().toString();
Chunk chunk;
try {
chunk = FileChunkUtil.getChunk(file, config.getChunkSize(), i, chunkId);
} catch (IOException ex) {
throw new ProcessExecutionException(this, ex, "Cannot read the large file.");
}
byte[] hash = HashUtil.hash(chunk.getData());
context.getMetaChunks().add(new MetaChunk(chunkId, hash, i));
}
}
示例10: testUpdatedLocally
import org.hive2hive.core.security.HashUtil; //导入依赖的package包/类
@Test
public void testUpdatedLocally() throws IOException, ClassNotFoundException {
Map<String, byte[]> before = FileSynchronizer.visitFiles(rootFile);
// change two files
FileUtils.writeStringToFile(file1f2, randomString());
FileUtils.writeStringToFile(file2f, randomString());
Map<String, byte[]> after = FileSynchronizer.visitFiles(rootFile);
FileSynchronizer fileSynchronizer = new FileSynchronizer(rootFile, userProfile, before, after);
List<File> updatedLocally = fileSynchronizer.getUpdatedLocally();
Assert.assertEquals(2, updatedLocally.size());
Assert.assertTrue(updatedLocally.contains(file1f2));
Assert.assertTrue(updatedLocally.contains(file2f));
// change file in user profile as well --> should not occur as updated locally
node1f2.setHash(HashUtil.hash(randomString().getBytes()));
fileSynchronizer = new FileSynchronizer(rootFile, userProfile, before, after);
updatedLocally = fileSynchronizer.getUpdatedLocally();
Assert.assertEquals(1, updatedLocally.size());
Assert.assertTrue(updatedLocally.contains(file2f));
}
示例11: testConflictUpdateRemotelyDeleteLocally
import org.hive2hive.core.security.HashUtil; //导入依赖的package包/类
@Test
public void testConflictUpdateRemotelyDeleteLocally() throws IOException, ClassNotFoundException {
Map<String, byte[]> before = FileSynchronizer.visitFiles(rootFile);
// delete a file locally
file1f2.delete();
// modify the same file remotely
node1f2.setHash(HashUtil.hash(randomString().getBytes()));
Map<String, byte[]> after = FileSynchronizer.visitFiles(rootFile);
FileSynchronizer fileSynchronizer = new FileSynchronizer(rootFile, userProfile, before, after);
List<Index> addedRemotely = fileSynchronizer.getAddedRemotely();
Assert.assertEquals(1, addedRemotely.size());
Assert.assertTrue(addedRemotely.contains(node1f2));
List<FileIndex> updatedRemotely = fileSynchronizer.getUpdatedRemotely();
Assert.assertTrue(updatedRemotely.isEmpty());
List<Index> deletedLocally = fileSynchronizer.getDeletedLocally();
Assert.assertTrue(deletedLocally.isEmpty());
}
示例12: testConflictUpdateRemotelyAndLocally
import org.hive2hive.core.security.HashUtil; //导入依赖的package包/类
@Test
public void testConflictUpdateRemotelyAndLocally() throws IOException, ClassNotFoundException {
Map<String, byte[]> before = FileSynchronizer.visitFiles(rootFile);
// change a file in the user profile
node1f2.setHash(HashUtil.hash(randomString().getBytes()));
// change file on disk as well --> should occur as updated remotely since there is a conflict and the
// profile wins
FileUtils.writeStringToFile(file1f2, randomString());
Map<String, byte[]> after = FileSynchronizer.visitFiles(rootFile);
FileSynchronizer fileSynchronizer = new FileSynchronizer(rootFile, userProfile, before, after);
List<FileIndex> updatedRemotely = fileSynchronizer.getUpdatedRemotely();
Assert.assertEquals(1, updatedRemotely.size());
Assert.assertTrue(updatedRemotely.contains(node1f2));
List<File> updatedLocally = fileSynchronizer.getUpdatedLocally();
Assert.assertTrue(updatedLocally.isEmpty());
}
示例13: testSynchronizeAddFileFromAUpdateAtA
import org.hive2hive.core.security.HashUtil; //导入依赖的package包/类
@Test
public void testSynchronizeAddFileFromAUpdateAtA() throws NoSessionException, NoPeerConnectionException, IOException,
IllegalArgumentException, IllegalArgumentException, GetFailedException {
File fileFromAAtA = FileTestUtil.createFileRandomContent("file1FromA", new Random().nextInt(MAX_NUM_CHUNKS) + 1,
sharedFolderA);
logger.info("Upload a new file '{}' from A.", fileFromAAtA.toString());
UseCaseTestUtil.uploadNewFile(network.get(0), fileFromAAtA);
logger.info("Wait till new file '{}' gets synchronized with B.", fileFromAAtA.toString());
File fileFromAAtB = new File(sharedFolderB, fileFromAAtA.getName());
waitTillSynchronized(fileFromAAtB, true);
logger.info("Update file '{}' at A.", fileFromAAtA.toString());
long lastUpdated = fileFromAAtA.lastModified();
FileUtils.write(fileFromAAtA, randomString(), false);
byte[] newHash = HashUtil.hash(fileFromAAtA);
UseCaseTestUtil.uploadNewVersion(network.get(0), fileFromAAtA);
logger.info("Wait till update of file '{}' gets synchronized with B.", fileFromAAtA.toString());
waitTillSynchronizedUpdating(fileFromAAtB, lastUpdated);
compareFiles(fileFromAAtA, fileFromAAtB);
checkFileIndex(fileFromAAtA, fileFromAAtB, newHash);
}
示例14: testSynchronizeAddFileFromAUpdateAtB
import org.hive2hive.core.security.HashUtil; //导入依赖的package包/类
@Test
public void testSynchronizeAddFileFromAUpdateAtB() throws NoSessionException, NoPeerConnectionException, IOException,
IllegalArgumentException, IllegalArgumentException, GetFailedException {
File fileFromAAtA = FileTestUtil.createFileRandomContent("file2FromA", new Random().nextInt(MAX_NUM_CHUNKS) + 1,
sharedFolderA);
logger.info("Upload a new file '{}' from A.", fileFromAAtA.toString());
UseCaseTestUtil.uploadNewFile(network.get(0), fileFromAAtA);
logger.info("Wait till new file '{}' gets synchronized with B.", fileFromAAtA.toString());
File fileFromAAtB = new File(sharedFolderB, fileFromAAtA.getName());
waitTillSynchronized(fileFromAAtB, true);
logger.info("Update file '{}' at B.", fileFromAAtA.toString());
long lastUpdated = fileFromAAtB.lastModified();
FileUtils.write(fileFromAAtB, randomString(), false);
byte[] newHash = HashUtil.hash(fileFromAAtB);
UseCaseTestUtil.uploadNewVersion(network.get(1), fileFromAAtB);
logger.info("Wait till update of file '{}' gets synchronized with A.", fileFromAAtA.toString());
waitTillSynchronizedUpdating(fileFromAAtA, lastUpdated);
compareFiles(fileFromAAtA, fileFromAAtB);
checkFileIndex(fileFromAAtA, fileFromAAtB, newHash);
}
示例15: testSynchronizeAddFileFromBUpdateAtA
import org.hive2hive.core.security.HashUtil; //导入依赖的package包/类
@Test
public void testSynchronizeAddFileFromBUpdateAtA() throws NoSessionException, NoPeerConnectionException, IOException,
IllegalArgumentException, IllegalArgumentException, GetFailedException {
File fileFromBAtB = FileTestUtil.createFileRandomContent("file1FromB", new Random().nextInt(MAX_NUM_CHUNKS) + 1,
sharedFolderB);
logger.info("Upload a new file '{}' from B.", fileFromBAtB.toString());
UseCaseTestUtil.uploadNewFile(network.get(1), fileFromBAtB);
logger.info("Wait till new file '{}' gets synchronized with A.", fileFromBAtB.toString());
File fileFromBAtA = new File(sharedFolderA, fileFromBAtB.getName());
waitTillSynchronized(fileFromBAtA, true);
logger.info("Update file '{}' at A.", fileFromBAtB.toString());
long lastUpdated = fileFromBAtA.lastModified();
FileUtils.write(fileFromBAtA, randomString(), false);
byte[] newHash = HashUtil.hash(fileFromBAtA);
UseCaseTestUtil.uploadNewVersion(network.get(0), fileFromBAtA);
logger.info("Wait till update of file '{}' gets synchronized with B.", fileFromBAtB.toString());
waitTillSynchronizedUpdating(fileFromBAtB, lastUpdated);
compareFiles(fileFromBAtA, fileFromBAtB);
checkFileIndex(fileFromBAtA, fileFromBAtB, newHash);
}