本文整理汇总了Java中org.apache.hadoop.io.DataInputByteBuffer类的典型用法代码示例。如果您正苦于以下问题:Java DataInputByteBuffer类的具体用法?Java DataInputByteBuffer怎么用?Java DataInputByteBuffer使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
DataInputByteBuffer类属于org.apache.hadoop.io包,在下文中一共展示了DataInputByteBuffer类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: parseCredentials
import org.apache.hadoop.io.DataInputByteBuffer; //导入依赖的package包/类
private Credentials parseCredentials(ContainerLaunchContext launchContext)
throws IOException {
Credentials credentials = new Credentials();
// //////////// Parse credentials
ByteBuffer tokens = launchContext.getTokens();
if (tokens != null) {
DataInputByteBuffer buf = new DataInputByteBuffer();
tokens.rewind();
buf.reset(tokens);
credentials.readTokenStorageStream(buf);
if (LOG.isDebugEnabled()) {
for (Token<? extends TokenIdentifier> tk : credentials.getAllTokens()) {
LOG.debug(tk.getService() + " = " + tk.toString());
}
}
}
// //////////// End of parsing credentials
return credentials;
}
示例2: parseCredentials
import org.apache.hadoop.io.DataInputByteBuffer; //导入依赖的package包/类
/**
* Parses the container launch context and returns a Credential instance that
* contains all the tokens from the launch context.
* @param launchContext
* @return the credential instance
* @throws IOException
*/
public static Credentials parseCredentials(
ContainerLaunchContext launchContext) throws IOException {
Credentials credentials = new Credentials();
ByteBuffer tokens = launchContext.getTokens();
if (tokens != null) {
DataInputByteBuffer buf = new DataInputByteBuffer();
tokens.rewind();
buf.reset(tokens);
credentials.readTokenStorageStream(buf);
if (LOG.isDebugEnabled()) {
for (Token<? extends TokenIdentifier> tk : credentials
.getAllTokens()) {
LOG.debug(tk.getService() + " = " + tk.toString());
}
}
}
return credentials;
}
示例3: parseCredentials
import org.apache.hadoop.io.DataInputByteBuffer; //导入依赖的package包/类
private Credentials parseCredentials(ContainerLaunchContext launchContext)
throws YarnException {
Credentials credentials = new Credentials();
// //////////// Parse credentials
ByteBuffer tokens = launchContext.getTokens();
if (tokens != null) {
DataInputByteBuffer buf = new DataInputByteBuffer();
tokens.rewind();
buf.reset(tokens);
try {
credentials.readTokenStorageStream(buf);
if (LOG.isDebugEnabled()) {
for (Token<? extends TokenIdentifier> tk : credentials.getAllTokens()) {
LOG.debug(tk.getService() + " = " + tk.toString());
}
}
} catch (IOException e) {
throw RPCUtil.getRemoteException(e);
}
}
// //////////// End of parsing credentials
return credentials;
}
示例4: storeApplicationAttemptStateInternal
import org.apache.hadoop.io.DataInputByteBuffer; //导入依赖的package包/类
@Override
public synchronized void storeApplicationAttemptStateInternal(
ApplicationAttemptId appAttemptId,
ApplicationAttemptStateDataPBImpl attemptStateData)
throws Exception {
Credentials credentials = null;
if(attemptStateData.getAppAttemptTokens() != null){
DataInputByteBuffer dibb = new DataInputByteBuffer();
credentials = new Credentials();
dibb.reset(attemptStateData.getAppAttemptTokens());
credentials.readTokenStorageStream(dibb);
}
ApplicationAttemptState attemptState =
new ApplicationAttemptState(appAttemptId,
attemptStateData.getMasterContainer(), credentials,
attemptStateData.getStartTime());
ApplicationState appState = state.getApplicationState().get(
attemptState.getAttemptId().getApplicationId());
if (appState == null) {
throw new YarnRuntimeException("Application doesn't exist");
}
appState.attempts.put(attemptState.getAttemptId(), attemptState);
}
示例5: setupTokens
import org.apache.hadoop.io.DataInputByteBuffer; //导入依赖的package包/类
private void setupTokens(
ContainerLaunchContext container, ContainerId containerID)
throws IOException {
Map<String, String> environment = container.getEnvironment();
environment.put(ApplicationConstants.APPLICATION_WEB_PROXY_BASE_ENV,
application.getWebProxyBase());
// Set AppSubmitTime and MaxAppAttempts to be consumable by the AM.
ApplicationId applicationId =
application.getAppAttemptId().getApplicationId();
environment.put(
ApplicationConstants.APP_SUBMIT_TIME_ENV,
String.valueOf(rmContext.getRMApps()
.get(applicationId)
.getSubmitTime()));
environment.put(ApplicationConstants.MAX_APP_ATTEMPTS_ENV,
String.valueOf(rmContext.getRMApps().get(
applicationId).getMaxAppAttempts()));
Credentials credentials = new Credentials();
DataInputByteBuffer dibb = new DataInputByteBuffer();
if (container.getTokens() != null) {
// TODO: Don't do this kind of checks everywhere.
dibb.reset(container.getTokens());
credentials.readTokenStorageStream(dibb);
}
// Add AMRMToken
Token<AMRMTokenIdentifier> amrmToken = createAndSetAMRMToken();
if (amrmToken != null) {
credentials.addToken(amrmToken.getService(), amrmToken);
}
DataOutputBuffer dob = new DataOutputBuffer();
credentials.writeTokenStorageToStream(dob);
container.setTokens(ByteBuffer.wrap(dob.getData(), 0, dob.getLength()));
}
示例6: parseCredentials
import org.apache.hadoop.io.DataInputByteBuffer; //导入依赖的package包/类
protected Credentials parseCredentials() throws IOException {
Credentials credentials = new Credentials();
DataInputByteBuffer dibb = new DataInputByteBuffer();
ByteBuffer tokens = submissionContext.getAMContainerSpec().getTokens();
if (tokens != null) {
dibb.reset(tokens);
credentials.readTokenStorageStream(dibb);
tokens.rewind();
}
return credentials;
}
示例7: parseCredentials
import org.apache.hadoop.io.DataInputByteBuffer; //导入依赖的package包/类
protected Credentials parseCredentials(
ApplicationSubmissionContext application) throws IOException {
Credentials credentials = new Credentials();
DataInputByteBuffer dibb = new DataInputByteBuffer();
ByteBuffer tokens = application.getAMContainerSpec().getTokens();
if (tokens != null) {
dibb.reset(tokens);
credentials.readTokenStorageStream(dibb);
tokens.rewind();
}
return credentials;
}
示例8: getContainerCredentials
import org.apache.hadoop.io.DataInputByteBuffer; //导入依赖的package包/类
public Credentials getContainerCredentials() throws IOException {
Credentials credentials = new Credentials();
DataInputByteBuffer buf = new DataInputByteBuffer();
containerTokens.rewind();
buf.reset(containerTokens);
credentials.readTokenStorageStream(buf);
return credentials;
}
示例9: addTimelineDelegationToken
import org.apache.hadoop.io.DataInputByteBuffer; //导入依赖的package包/类
private void addTimelineDelegationToken(
ContainerLaunchContext clc) throws YarnException, IOException {
Credentials credentials = new Credentials();
DataInputByteBuffer dibb = new DataInputByteBuffer();
ByteBuffer tokens = clc.getTokens();
if (tokens != null) {
dibb.reset(tokens);
credentials.readTokenStorageStream(dibb);
tokens.rewind();
}
// If the timeline delegation token is already in the CLC, no need to add
// one more
for (org.apache.hadoop.security.token.Token<? extends TokenIdentifier> token : credentials
.getAllTokens()) {
if (token.getKind().equals(TimelineDelegationTokenIdentifier.KIND_NAME)) {
return;
}
}
org.apache.hadoop.security.token.Token<TimelineDelegationTokenIdentifier>
timelineDelegationToken = getTimelineDelegationToken();
if (timelineDelegationToken == null) {
return;
}
credentials.addToken(timelineService, timelineDelegationToken);
if (LOG.isDebugEnabled()) {
LOG.debug("Add timline delegation token into credentials: "
+ timelineDelegationToken);
}
DataOutputBuffer dob = new DataOutputBuffer();
credentials.writeTokenStorageToStream(dob);
tokens = ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
clc.setTokens(tokens);
}
示例10: deserializeMetaData
import org.apache.hadoop.io.DataInputByteBuffer; //导入依赖的package包/类
/**
* A helper function to deserialize the metadata returned by ShuffleHandler.
* @param meta the metadata returned by the ShuffleHandler
* @return the port the Shuffle Handler is listening on to serve shuffle data.
*/
public static int deserializeMetaData(ByteBuffer meta) throws IOException {
//TODO this should be returning a class not just an int
DataInputByteBuffer in = new DataInputByteBuffer();
in.reset(meta);
int port = in.readInt();
return port;
}
示例11: deserializeServiceData
import org.apache.hadoop.io.DataInputByteBuffer; //导入依赖的package包/类
static Token<JobTokenIdentifier> deserializeServiceData(ByteBuffer secret) throws IOException {
DataInputByteBuffer in = new DataInputByteBuffer();
in.reset(secret);
Token<JobTokenIdentifier> jt = new Token<JobTokenIdentifier>();
jt.readFields(in);
return jt;
}
示例12: setupTokens
import org.apache.hadoop.io.DataInputByteBuffer; //导入依赖的package包/类
@Private
@VisibleForTesting
protected void setupTokens(
ContainerLaunchContext container, ContainerId containerID)
throws IOException {
Map<String, String> environment = container.getEnvironment();
environment.put(ApplicationConstants.APPLICATION_WEB_PROXY_BASE_ENV,
application.getWebProxyBase());
// Set AppSubmitTime to be consumable by the AM.
ApplicationId applicationId =
application.getAppAttemptId().getApplicationId();
environment.put(
ApplicationConstants.APP_SUBMIT_TIME_ENV,
String.valueOf(rmContext.getRMApps()
.get(applicationId)
.getSubmitTime()));
Credentials credentials = new Credentials();
DataInputByteBuffer dibb = new DataInputByteBuffer();
ByteBuffer tokens = container.getTokens();
if (tokens != null) {
// TODO: Don't do this kind of checks everywhere.
dibb.reset(tokens);
credentials.readTokenStorageStream(dibb);
tokens.rewind();
}
// Add AMRMToken
Token<AMRMTokenIdentifier> amrmToken = createAndSetAMRMToken();
if (amrmToken != null) {
credentials.addToken(amrmToken.getService(), amrmToken);
}
DataOutputBuffer dob = new DataOutputBuffer();
credentials.writeTokenStorageToStream(dob);
container.setTokens(ByteBuffer.wrap(dob.getData(), 0, dob.getLength()));
}
示例13: decodeCredentials
import org.apache.hadoop.io.DataInputByteBuffer; //导入依赖的package包/类
/**
* Decodes {@link Credentials} from the given buffer.
* If the buffer is null or empty, it returns an empty Credentials.
*/
public static Credentials decodeCredentials(ByteBuffer buffer) throws IOException {
Credentials credentials = new Credentials();
if (buffer != null && buffer.hasRemaining()) {
DataInputByteBuffer in = new DataInputByteBuffer();
in.reset(buffer);
credentials.readTokenStorageStream(in);
}
return credentials;
}
示例14: storeApplicationAttemptStateInternal
import org.apache.hadoop.io.DataInputByteBuffer; //导入依赖的package包/类
@Override
public synchronized void storeApplicationAttemptStateInternal(
ApplicationAttemptId appAttemptId,
ApplicationAttemptStateData attemptStateData)
throws Exception {
Credentials credentials = null;
if(attemptStateData.getAppAttemptTokens() != null){
DataInputByteBuffer dibb = new DataInputByteBuffer();
credentials = new Credentials();
dibb.reset(attemptStateData.getAppAttemptTokens());
credentials.readTokenStorageStream(dibb);
}
ApplicationAttemptState attemptState =
new ApplicationAttemptState(appAttemptId,
attemptStateData.getMasterContainer(), credentials,
attemptStateData.getStartTime(),
attemptStateData.getMemorySeconds(),
attemptStateData.getVcoreSeconds());
ApplicationState appState = state.getApplicationState().get(
attemptState.getAttemptId().getApplicationId());
if (appState == null) {
throw new YarnRuntimeException("Application doesn't exist");
}
appState.attempts.put(attemptState.getAttemptId(), attemptState);
}
示例15: updateApplicationAttemptStateInternal
import org.apache.hadoop.io.DataInputByteBuffer; //导入依赖的package包/类
@Override
public synchronized void updateApplicationAttemptStateInternal(
ApplicationAttemptId appAttemptId,
ApplicationAttemptStateData attemptStateData)
throws Exception {
Credentials credentials = null;
if (attemptStateData.getAppAttemptTokens() != null) {
DataInputByteBuffer dibb = new DataInputByteBuffer();
credentials = new Credentials();
dibb.reset(attemptStateData.getAppAttemptTokens());
credentials.readTokenStorageStream(dibb);
}
ApplicationAttemptState updatedAttemptState =
new ApplicationAttemptState(appAttemptId,
attemptStateData.getMasterContainer(), credentials,
attemptStateData.getStartTime(), attemptStateData.getState(),
attemptStateData.getFinalTrackingUrl(),
attemptStateData.getDiagnostics(),
attemptStateData.getFinalApplicationStatus(),
attemptStateData.getAMContainerExitStatus(),
attemptStateData.getFinishTime(),
attemptStateData.getMemorySeconds(),
attemptStateData.getVcoreSeconds());
ApplicationState appState =
state.getApplicationState().get(
updatedAttemptState.getAttemptId().getApplicationId());
if (appState == null) {
throw new YarnRuntimeException("Application doesn't exist");
}
LOG.info("Updating final state " + updatedAttemptState.getState()
+ " for attempt: " + updatedAttemptState.getAttemptId());
appState.attempts.put(updatedAttemptState.getAttemptId(),
updatedAttemptState);
}