本文整理汇总了Java中org.apache.hadoop.oncrpc.XDR.readHyper方法的典型用法代码示例。如果您正苦于以下问题:Java XDR.readHyper方法的具体用法?Java XDR.readHyper怎么用?Java XDR.readHyper使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.oncrpc.XDR
的用法示例。
在下文中一共展示了XDR.readHyper方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: deserialize
import org.apache.hadoop.oncrpc.XDR; //导入方法依赖的package包/类
public static CREATE3Request deserialize(XDR xdr) throws IOException {
FileHandle handle = readHandle(xdr);
String name = xdr.readString();
int mode = xdr.readInt();
SetAttr3 objAttr = new SetAttr3();
long verf = 0;
if ((mode == Nfs3Constant.CREATE_UNCHECKED)
|| (mode == Nfs3Constant.CREATE_GUARDED)) {
objAttr.deserialize(xdr);
} else if (mode == Nfs3Constant.CREATE_EXCLUSIVE) {
verf = xdr.readHyper();
} else {
throw new IOException("Wrong create mode:" + mode);
}
return new CREATE3Request(handle, name, mode, objAttr, verf);
}
示例2: deserialize
import org.apache.hadoop.oncrpc.XDR; //导入方法依赖的package包/类
public static READDIR3Response deserialize(XDR xdr) {
int status = xdr.readInt();
xdr.readBoolean();
Nfs3FileAttributes postOpDirAttr = Nfs3FileAttributes.deserialize(xdr);
long cookieVerf = 0;
ArrayList<Entry3> entries = new ArrayList<Entry3>();
DirList3 dirList = null;
if (status == Nfs3Status.NFS3_OK) {
cookieVerf = xdr.readHyper();
while (xdr.readBoolean()) {
Entry3 e = Entry3.deserialzie(xdr);
entries.add(e);
}
boolean eof = xdr.readBoolean();
Entry3[] allEntries = new Entry3[entries.size()];
entries.toArray(allEntries);
dirList = new DirList3(allEntries, eof);
}
return new READDIR3Response(status, postOpDirAttr, cookieVerf, dirList);
}
示例3: deserialize
import org.apache.hadoop.oncrpc.XDR; //导入方法依赖的package包/类
public static FSSTAT3Response deserialize(XDR xdr) {
int status = xdr.readInt();
xdr.readBoolean();
Nfs3FileAttributes postOpAttr = Nfs3FileAttributes.deserialize(xdr);
long tbytes = 0;
long fbytes = 0;
long abytes = 0;
long tfiles = 0;
long ffiles = 0;
long afiles = 0;
int invarsec = 0;
if (status == Nfs3Status.NFS3_OK) {
tbytes = xdr.readHyper();
fbytes = xdr.readHyper();
abytes = xdr.readHyper();
tfiles = xdr.readHyper();
ffiles = xdr.readHyper();
afiles = xdr.readHyper();
invarsec = xdr.readInt();
}
return new FSSTAT3Response(status, postOpAttr, tbytes, fbytes, abytes,
tfiles, ffiles, afiles, invarsec);
}
示例4: deserialize
import org.apache.hadoop.oncrpc.XDR; //导入方法依赖的package包/类
public static READDIRPLUS3Response deserialize(XDR xdr) {
int status = xdr.readInt();
xdr.readBoolean();
Nfs3FileAttributes postOpDirAttr = Nfs3FileAttributes.deserialize(xdr);
long cookieVerf = 0;
ArrayList<EntryPlus3> entries = new ArrayList<EntryPlus3>();
DirListPlus3 dirList = null;
if (status == Nfs3Status.NFS3_OK) {
cookieVerf = xdr.readHyper();
while (xdr.readBoolean()) {
EntryPlus3 e = EntryPlus3.deseralize(xdr);
entries.add(e);
}
boolean eof = xdr.readBoolean();
EntryPlus3[] allEntries = new EntryPlus3[entries.size()];
entries.toArray(allEntries);
dirList = new DirListPlus3(allEntries, eof);
}
return new READDIRPLUS3Response(status, postOpDirAttr, cookieVerf, dirList);
}
示例5: deserialize
import org.apache.hadoop.oncrpc.XDR; //导入方法依赖的package包/类
public static Nfs3FileAttributes deserialize(XDR xdr) {
Nfs3FileAttributes attr = new Nfs3FileAttributes();
attr.type = xdr.readInt();
attr.mode = xdr.readInt();
attr.nlink = xdr.readInt();
attr.uid = xdr.readInt();
attr.gid = xdr.readInt();
attr.size = xdr.readHyper();
attr.used = xdr.readHyper();
attr.rdev = new Specdata3(xdr.readInt(), xdr.readInt());
attr.fsid = xdr.readHyper();
attr.fileId = xdr.readHyper();
attr.atime = NfsTime.deserialize(xdr);
attr.mtime = NfsTime.deserialize(xdr);
attr.ctime = NfsTime.deserialize(xdr);
return attr;
}
示例6: deserialize
import org.apache.hadoop.oncrpc.XDR; //导入方法依赖的package包/类
public static READDIRPLUS3Request deserialize(XDR xdr) throws IOException {
FileHandle handle = readHandle(xdr);
long cookie = xdr.readHyper();
long cookieVerf = xdr.readHyper();
int dirCount = xdr.readInt();
int maxCount = xdr.readInt();
return new READDIRPLUS3Request(handle, cookie, cookieVerf, dirCount,
maxCount);
}
示例7: deserialize
import org.apache.hadoop.oncrpc.XDR; //导入方法依赖的package包/类
public static WRITE3Request deserialize(XDR xdr) throws IOException {
FileHandle handle = readHandle(xdr);
long offset = xdr.readHyper();
int count = xdr.readInt();
WriteStableHow stableHow = WriteStableHow.fromValue(xdr.readInt());
ByteBuffer data = ByteBuffer.wrap(xdr.readFixedOpaque(xdr.readInt()));
return new WRITE3Request(handle, offset, count, stableHow, data);
}
示例8: deserialize
import org.apache.hadoop.oncrpc.XDR; //导入方法依赖的package包/类
public static READDIR3Request deserialize(XDR xdr) throws IOException {
FileHandle handle = readHandle(xdr);
long cookie = xdr.readHyper();
long cookieVerf = xdr.readHyper();
int count = xdr.readInt();
return new READDIR3Request(handle, cookie, cookieVerf, count);
}
示例9: deserialize
import org.apache.hadoop.oncrpc.XDR; //导入方法依赖的package包/类
public static FSINFO3Response deserialize(XDR xdr) {
int status = xdr.readInt();
xdr.readBoolean();
Nfs3FileAttributes postOpObjAttr = Nfs3FileAttributes.deserialize(xdr);
int rtmax = 0;
int rtpref = 0;
int rtmult = 0;
int wtmax = 0;
int wtpref = 0;
int wtmult = 0;
int dtpref = 0;
long maxFileSize = 0;
NfsTime timeDelta = null;
int properties = 0;
if (status == Nfs3Status.NFS3_OK) {
rtmax = xdr.readInt();
rtpref = xdr.readInt();
rtmult = xdr.readInt();
wtmax = xdr.readInt();
wtpref = xdr.readInt();
wtmult = xdr.readInt();
dtpref = xdr.readInt();
maxFileSize = xdr.readHyper();
timeDelta = NfsTime.deserialize(xdr);
properties = xdr.readInt();
}
return new FSINFO3Response(status, postOpObjAttr, rtmax, rtpref, rtmult,
wtmax, wtpref, wtmult, dtpref, maxFileSize, timeDelta, properties);
}
示例10: deserialize
import org.apache.hadoop.oncrpc.XDR; //导入方法依赖的package包/类
public static COMMIT3Response deserialize(XDR xdr) {
int status = xdr.readInt();
long verf = 0;
WccData fileWcc = WccData.deserialize(xdr);
if (status == Nfs3Status.NFS3_OK) {
verf = xdr.readHyper();
}
return new COMMIT3Response(status, fileWcc, verf);
}
示例11: deserialize
import org.apache.hadoop.oncrpc.XDR; //导入方法依赖的package包/类
public static WRITE3Response deserialize(XDR xdr) {
int status = xdr.readInt();
WccData fileWcc = WccData.deserialize(xdr);
int count = 0;
WriteStableHow stableHow = null;
long verifier = 0;
if (status == Nfs3Status.NFS3_OK) {
count = xdr.readInt();
int how = xdr.readInt();
stableHow = WriteStableHow.values()[how];
verifier = xdr.readHyper();
}
return new WRITE3Response(status, fileWcc, count, stableHow, verifier);
}
示例12: deseralize
import org.apache.hadoop.oncrpc.XDR; //导入方法依赖的package包/类
static EntryPlus3 deseralize(XDR xdr) {
long fileId = xdr.readHyper();
String name = xdr.readString();
long cookie = xdr.readHyper();
xdr.readBoolean();
Nfs3FileAttributes nameAttr = Nfs3FileAttributes.deserialize(xdr);
FileHandle objFileHandle = new FileHandle();
objFileHandle.deserialize(xdr);
return new EntryPlus3(fileId, name, cookie, nameAttr, objFileHandle);
}
示例13: deserialize
import org.apache.hadoop.oncrpc.XDR; //导入方法依赖的package包/类
public static COMMIT3Request deserialize(XDR xdr) throws IOException {
FileHandle handle = readHandle(xdr);
long offset = xdr.readHyper();
int count = xdr.readInt();
return new COMMIT3Request(handle, offset, count);
}
示例14: deserialize
import org.apache.hadoop.oncrpc.XDR; //导入方法依赖的package包/类
public static READ3Request deserialize(XDR xdr) throws IOException {
FileHandle handle = readHandle(xdr);
long offset = xdr.readHyper();
int count = xdr.readInt();
return new READ3Request(handle, offset, count);
}
示例15: deserialize
import org.apache.hadoop.oncrpc.XDR; //导入方法依赖的package包/类
public static WccAttr deserialize(XDR xdr) {
long size = xdr.readHyper();
NfsTime mtime = NfsTime.deserialize(xdr);
NfsTime ctime = NfsTime.deserialize(xdr);
return new WccAttr(size, mtime, ctime);
}