本文整理汇总了Java中org.apache.hadoop.hdfs.server.namenode.snapshot.FileWithSnapshot.FileDiffList类的典型用法代码示例。如果您正苦于以下问题:Java FileDiffList类的具体用法?Java FileDiffList怎么用?Java FileDiffList使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
FileDiffList类属于org.apache.hadoop.hdfs.server.namenode.snapshot.FileWithSnapshot包,在下文中一共展示了FileDiffList类的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: computeQuotaUsage
import org.apache.hadoop.hdfs.server.namenode.snapshot.FileWithSnapshot.FileDiffList; //导入依赖的package包/类
@Override
public final Quota.Counts computeQuotaUsage(Quota.Counts counts,
boolean useCache, int lastSnapshotId) {
long nsDelta = 1;
final long dsDelta;
if (this instanceof FileWithSnapshot) {
FileDiffList fileDiffList = ((FileWithSnapshot) this).getDiffs();
Snapshot last = fileDiffList.getLastSnapshot();
List<FileDiff> diffs = fileDiffList.asList();
if (lastSnapshotId == Snapshot.INVALID_ID || last == null) {
nsDelta += diffs.size();
dsDelta = diskspaceConsumed();
} else if (last.getId() < lastSnapshotId) {
dsDelta = computeFileSize(true, false) * getFileReplication();
} else {
Snapshot s = fileDiffList.getSnapshotById(lastSnapshotId);
dsDelta = diskspaceConsumed(s);
}
} else {
dsDelta = diskspaceConsumed();
}
counts.add(Quota.NAMESPACE, nsDelta);
counts.add(Quota.DISKSPACE, dsDelta);
return counts;
}
示例2: computeContentSummary4Snapshot
import org.apache.hadoop.hdfs.server.namenode.snapshot.FileWithSnapshot.FileDiffList; //导入依赖的package包/类
private void computeContentSummary4Snapshot(final Content.Counts counts) {
// file length and diskspace only counted for the latest state of the file
// i.e. either the current state or the last snapshot
if (this instanceof FileWithSnapshot) {
final FileWithSnapshot withSnapshot = (FileWithSnapshot)this;
final FileDiffList diffs = withSnapshot.getDiffs();
final int n = diffs.asList().size();
counts.add(Content.FILE, n);
if (n > 0 && withSnapshot.isCurrentFileDeleted()) {
counts.add(Content.LENGTH, diffs.getLast().getFileSize());
}
if (withSnapshot.isCurrentFileDeleted()) {
final long lastFileSize = diffs.getLast().getFileSize();
counts.add(Content.DISKSPACE, lastFileSize * getBlockReplication());
}
}
}
示例3: loadFileDiffList
import org.apache.hadoop.hdfs.server.namenode.snapshot.FileWithSnapshot.FileDiffList; //导入依赖的package包/类
public static FileDiffList loadFileDiffList(DataInput in,
FSImageFormat.Loader loader) throws IOException {
final int size = in.readInt();
if (size == -1) {
return null;
} else {
final FileDiffList diffs = new FileDiffList();
FileDiff posterior = null;
for(int i = 0; i < size; i++) {
final FileDiff d = loadFileDiff(posterior, in, loader);
diffs.addFirst(d);
posterior = d;
}
return diffs;
}
}