本文整理汇总了Java中org.apache.hadoop.mapreduce.security.SecureShuffleUtils类的典型用法代码示例。如果您正苦于以下问题:Java SecureShuffleUtils类的具体用法?Java SecureShuffleUtils怎么用?Java SecureShuffleUtils使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
SecureShuffleUtils类属于org.apache.hadoop.mapreduce.security包,在下文中一共展示了SecureShuffleUtils类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: getShuffleResponseCode
import org.apache.hadoop.mapreduce.security.SecureShuffleUtils; //导入依赖的package包/类
private static int getShuffleResponseCode(ShuffleHandler shuffle,
Token<JobTokenIdentifier> jt) throws IOException {
URL url = new URL("http://127.0.0.1:"
+ shuffle.getConfig().get(ShuffleHandler.SHUFFLE_PORT_CONFIG_KEY)
+ "/mapOutput?job=job_12345_0001&reduce=0&map=attempt_12345_1_m_1_0");
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
String encHash = SecureShuffleUtils.hashFromString(
SecureShuffleUtils.buildMsgFrom(url),
JobTokenSecretManager.createSecretKey(jt.getPassword()));
conn.addRequestProperty(
SecureShuffleUtils.HTTP_HEADER_URL_HASH, encHash);
conn.setRequestProperty(ShuffleHeader.HTTP_HEADER_NAME,
ShuffleHeader.DEFAULT_HTTP_HEADER_NAME);
conn.setRequestProperty(ShuffleHeader.HTTP_HEADER_VERSION,
ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION);
conn.connect();
int rc = conn.getResponseCode();
conn.disconnect();
return rc;
}
示例2: setupConnectionsWithRetry
import org.apache.hadoop.mapreduce.security.SecureShuffleUtils; //导入依赖的package包/类
private void setupConnectionsWithRetry(MapHost host,
Set<TaskAttemptID> remaining, URL url) throws IOException {
openConnectionWithRetry(host, remaining, url);
if (stopped) {
return;
}
// generate hash of the url
String msgToEncode = SecureShuffleUtils.buildMsgFrom(url);
String encHash = SecureShuffleUtils.hashFromString(msgToEncode,
shuffleSecretKey);
setupShuffleConnection(encHash);
connect(connection, connectionTimeout);
// verify that the thread wasn't stopped during calls to connect
if (stopped) {
return;
}
verifyConnection(url, msgToEncode, encHash);
}
示例3: verifyConnection
import org.apache.hadoop.mapreduce.security.SecureShuffleUtils; //导入依赖的package包/类
private void verifyConnection(URL url, String msgToEncode, String encHash)
throws IOException {
// Validate response code
int rc = connection.getResponseCode();
if (rc != HttpURLConnection.HTTP_OK) {
throw new IOException(
"Got invalid response code " + rc + " from " + url +
": " + connection.getResponseMessage());
}
// get the shuffle version
if (!ShuffleHeader.DEFAULT_HTTP_HEADER_NAME.equals(
connection.getHeaderField(ShuffleHeader.HTTP_HEADER_NAME))
|| !ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION.equals(
connection.getHeaderField(ShuffleHeader.HTTP_HEADER_VERSION))) {
throw new IOException("Incompatible shuffle response version");
}
// get the replyHash which is HMac of the encHash we sent to the server
String replyHash = connection.getHeaderField(SecureShuffleUtils.HTTP_HEADER_REPLY_URL_HASH);
if(replyHash==null) {
throw new IOException("security validation of TT Map output failed");
}
LOG.debug("url="+msgToEncode+";encHash="+encHash+";replyHash="+replyHash);
// verify that replyHash is HMac of encHash
SecureShuffleUtils.verifyReply(replyHash, encHash, shuffleSecretKey);
LOG.info("for url="+msgToEncode+" sent hash and received reply");
}
示例4: testCopyFromHostConnectionTimeout
import org.apache.hadoop.mapreduce.security.SecureShuffleUtils; //导入依赖的package包/类
@Test(timeout=30000)
public void testCopyFromHostConnectionTimeout() throws Exception {
when(connection.getInputStream()).thenThrow(
new SocketTimeoutException("This is a fake timeout :)"));
Fetcher<Text,Text> underTest = new FakeFetcher<Text,Text>(job, id, ss, mm,
r, metrics, except, key, connection);
underTest.copyFromHost(host);
verify(connection).addRequestProperty(
SecureShuffleUtils.HTTP_HEADER_URL_HASH, encHash);
verify(allErrs).increment(1);
verify(ss).copyFailed(map1ID, host, false, false);
verify(ss).copyFailed(map2ID, host, false, false);
verify(ss).putBackKnownMapOutput(any(MapHost.class), eq(map1ID));
verify(ss).putBackKnownMapOutput(any(MapHost.class), eq(map2ID));
}
示例5: verifyConnection
import org.apache.hadoop.mapreduce.security.SecureShuffleUtils; //导入依赖的package包/类
private void verifyConnection(URL url, String msgToEncode, String encHash)
throws IOException {
// Validate response code
int rc = connection.getResponseCode();
if (rc != HttpURLConnection.HTTP_OK) {
throw new IOException(
"Got invalid response code " + rc + " from " + url +
": " + connection.getResponseMessage());
}
// get the shuffle version
if (!ShuffleHeader.DEFAULT_HTTP_HEADER_NAME.equals(
connection.getHeaderField(ShuffleHeader.HTTP_HEADER_NAME))
|| !ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION.equals(
connection.getHeaderField(ShuffleHeader.HTTP_HEADER_VERSION))) {
throw new IOException("Incompatible shuffle response version");
}
// get the replyHash which is HMac of the encHash we sent to the server
String replyHash = connection.getHeaderField(SecureShuffleUtils.HTTP_HEADER_REPLY_URL_HASH);
if(replyHash==null) {
throw new IOException("security validation of TT Map output failed");
}
LOG.debug("url="+msgToEncode+";encHash="+encHash+";replyHash="+replyHash);
// verify that replyHash is HMac of encHash
SecureShuffleUtils.verifyReply(replyHash, encHash, shuffleSecretKey);
LOG.debug("for url="+msgToEncode+" sent hash and received reply");
}
示例6: setupConnectionsWithRetry
import org.apache.hadoop.mapreduce.security.SecureShuffleUtils; //导入依赖的package包/类
private void setupConnectionsWithRetry(URL url) throws IOException {
openConnectionWithRetry(url);
if (stopped) {
return;
}
// generate hash of the url
String msgToEncode = SecureShuffleUtils.buildMsgFrom(url);
String encHash = SecureShuffleUtils.hashFromString(msgToEncode,
shuffleSecretKey);
setupShuffleConnection(encHash);
connect(connection, connectionTimeout);
// verify that the thread wasn't stopped during calls to connect
if (stopped) {
return;
}
verifyConnection(url, msgToEncode, encHash);
}
示例7: setupShuffleConnection
import org.apache.hadoop.mapreduce.security.SecureShuffleUtils; //导入依赖的package包/类
private void setupShuffleConnection(String encHash) {
// put url hash into http header
connection.addRequestProperty(
SecureShuffleUtils.HTTP_HEADER_URL_HASH, encHash);
// set the read timeout
connection.setReadTimeout(readTimeout);
// put shuffle version into http header
connection.addRequestProperty(ShuffleHeader.HTTP_HEADER_NAME,
ShuffleHeader.DEFAULT_HTTP_HEADER_NAME);
connection.addRequestProperty(ShuffleHeader.HTTP_HEADER_VERSION,
ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION);
}
示例8: testReduceOutOfDiskSpace
import org.apache.hadoop.mapreduce.security.SecureShuffleUtils; //导入依赖的package包/类
@Test
public void testReduceOutOfDiskSpace() throws Throwable {
LOG.info("testReduceOutOfDiskSpace");
Fetcher<Text,Text> underTest = new FakeFetcher<Text,Text>(job, id, ss, mm,
r, metrics, except, key, connection);
String replyHash = SecureShuffleUtils.generateHash(encHash.getBytes(), key);
ShuffleHeader header = new ShuffleHeader(map1ID.toString(), 10, 10, 1);
ByteArrayOutputStream bout = new ByteArrayOutputStream();
header.write(new DataOutputStream(bout));
ByteArrayInputStream in = new ByteArrayInputStream(bout.toByteArray());
when(connection.getResponseCode()).thenReturn(200);
when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_NAME))
.thenReturn(ShuffleHeader.DEFAULT_HTTP_HEADER_NAME);
when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_VERSION))
.thenReturn(ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION);
when(connection.getHeaderField(SecureShuffleUtils.HTTP_HEADER_REPLY_URL_HASH))
.thenReturn(replyHash);
when(connection.getInputStream()).thenReturn(in);
when(mm.reserve(any(TaskAttemptID.class), anyLong(), anyInt()))
.thenThrow(new DiskErrorException("No disk space available"));
underTest.copyFromHost(host);
verify(ss).reportLocalError(any(IOException.class));
}
示例9: testCopyFromHostBogusHeader
import org.apache.hadoop.mapreduce.security.SecureShuffleUtils; //导入依赖的package包/类
@Test
public void testCopyFromHostBogusHeader() throws Exception {
Fetcher<Text,Text> underTest = new FakeFetcher<Text,Text>(job, id, ss, mm,
r, metrics, except, key, connection);
String replyHash = SecureShuffleUtils.generateHash(encHash.getBytes(), key);
when(connection.getResponseCode()).thenReturn(200);
when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_NAME))
.thenReturn(ShuffleHeader.DEFAULT_HTTP_HEADER_NAME);
when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_VERSION))
.thenReturn(ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION);
when(connection.getHeaderField(SecureShuffleUtils.HTTP_HEADER_REPLY_URL_HASH))
.thenReturn(replyHash);
ByteArrayInputStream in = new ByteArrayInputStream(
"\u00010 BOGUS DATA\nBOGUS DATA\nBOGUS DATA\n".getBytes());
when(connection.getInputStream()).thenReturn(in);
underTest.copyFromHost(host);
verify(connection).addRequestProperty(
SecureShuffleUtils.HTTP_HEADER_URL_HASH, encHash);
verify(allErrs).increment(1);
verify(ss).copyFailed(map1ID, host, true, false);
verify(ss).copyFailed(map2ID, host, true, false);
verify(ss).putBackKnownMapOutput(any(MapHost.class), eq(map1ID));
verify(ss).putBackKnownMapOutput(any(MapHost.class), eq(map2ID));
}
示例10: testCopyFromHostIncompatibleShuffleVersion
import org.apache.hadoop.mapreduce.security.SecureShuffleUtils; //导入依赖的package包/类
@Test
public void testCopyFromHostIncompatibleShuffleVersion() throws Exception {
String replyHash = SecureShuffleUtils.generateHash(encHash.getBytes(), key);
when(connection.getResponseCode()).thenReturn(200);
when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_NAME))
.thenReturn("mapreduce").thenReturn("other").thenReturn("other");
when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_VERSION))
.thenReturn("1.0.1").thenReturn("1.0.0").thenReturn("1.0.1");
when(connection.getHeaderField(
SecureShuffleUtils.HTTP_HEADER_REPLY_URL_HASH)).thenReturn(replyHash);
ByteArrayInputStream in = new ByteArrayInputStream(new byte[0]);
when(connection.getInputStream()).thenReturn(in);
for (int i = 0; i < 3; ++i) {
Fetcher<Text,Text> underTest = new FakeFetcher<Text,Text>(job, id, ss, mm,
r, metrics, except, key, connection);
underTest.copyFromHost(host);
}
verify(connection, times(3)).addRequestProperty(
SecureShuffleUtils.HTTP_HEADER_URL_HASH, encHash);
verify(allErrs, times(3)).increment(1);
verify(ss, times(3)).copyFailed(map1ID, host, false, false);
verify(ss, times(3)).copyFailed(map2ID, host, false, false);
verify(ss, times(3)).putBackKnownMapOutput(any(MapHost.class), eq(map1ID));
verify(ss, times(3)).putBackKnownMapOutput(any(MapHost.class), eq(map2ID));
}
示例11: testCopyFromHostIncompatibleShuffleVersionWithRetry
import org.apache.hadoop.mapreduce.security.SecureShuffleUtils; //导入依赖的package包/类
@Test
public void testCopyFromHostIncompatibleShuffleVersionWithRetry()
throws Exception {
String replyHash = SecureShuffleUtils.generateHash(encHash.getBytes(), key);
when(connection.getResponseCode()).thenReturn(200);
when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_NAME))
.thenReturn("mapreduce").thenReturn("other").thenReturn("other");
when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_VERSION))
.thenReturn("1.0.1").thenReturn("1.0.0").thenReturn("1.0.1");
when(connection.getHeaderField(
SecureShuffleUtils.HTTP_HEADER_REPLY_URL_HASH)).thenReturn(replyHash);
ByteArrayInputStream in = new ByteArrayInputStream(new byte[0]);
when(connection.getInputStream()).thenReturn(in);
for (int i = 0; i < 3; ++i) {
Fetcher<Text,Text> underTest = new FakeFetcher<Text,Text>(jobWithRetry,
id, ss, mm, r, metrics, except, key, connection);
underTest.copyFromHost(host);
}
verify(connection, times(3)).addRequestProperty(
SecureShuffleUtils.HTTP_HEADER_URL_HASH, encHash);
verify(allErrs, times(3)).increment(1);
verify(ss, times(3)).copyFailed(map1ID, host, false, false);
verify(ss, times(3)).copyFailed(map2ID, host, false, false);
verify(ss, times(3)).putBackKnownMapOutput(any(MapHost.class), eq(map1ID));
verify(ss, times(3)).putBackKnownMapOutput(any(MapHost.class), eq(map2ID));
}
示例12: testCopyFromHostWait
import org.apache.hadoop.mapreduce.security.SecureShuffleUtils; //导入依赖的package包/类
@Test
public void testCopyFromHostWait() throws Exception {
Fetcher<Text,Text> underTest = new FakeFetcher<Text,Text>(job, id, ss, mm,
r, metrics, except, key, connection);
String replyHash = SecureShuffleUtils.generateHash(encHash.getBytes(), key);
when(connection.getResponseCode()).thenReturn(200);
when(connection.getHeaderField(SecureShuffleUtils.HTTP_HEADER_REPLY_URL_HASH))
.thenReturn(replyHash);
ShuffleHeader header = new ShuffleHeader(map1ID.toString(), 10, 10, 1);
ByteArrayOutputStream bout = new ByteArrayOutputStream();
header.write(new DataOutputStream(bout));
ByteArrayInputStream in = new ByteArrayInputStream(bout.toByteArray());
when(connection.getInputStream()).thenReturn(in);
when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_NAME))
.thenReturn(ShuffleHeader.DEFAULT_HTTP_HEADER_NAME);
when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_VERSION))
.thenReturn(ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION);
//Defaults to null, which is what we want to test
when(mm.reserve(any(TaskAttemptID.class), anyLong(), anyInt()))
.thenReturn(null);
underTest.copyFromHost(host);
verify(connection)
.addRequestProperty(SecureShuffleUtils.HTTP_HEADER_URL_HASH,
encHash);
verify(allErrs, never()).increment(1);
verify(ss, never()).copyFailed(map1ID, host, true, false);
verify(ss, never()).copyFailed(map2ID, host, true, false);
verify(ss).putBackKnownMapOutput(any(MapHost.class), eq(map1ID));
verify(ss).putBackKnownMapOutput(any(MapHost.class), eq(map2ID));
}
示例13: testCopyFromHostCompressFailure
import org.apache.hadoop.mapreduce.security.SecureShuffleUtils; //导入依赖的package包/类
@SuppressWarnings("unchecked")
@Test(timeout=10000)
public void testCopyFromHostCompressFailure() throws Exception {
InMemoryMapOutput<Text, Text> immo = mock(InMemoryMapOutput.class);
Fetcher<Text,Text> underTest = new FakeFetcher<Text,Text>(job, id, ss, mm,
r, metrics, except, key, connection);
String replyHash = SecureShuffleUtils.generateHash(encHash.getBytes(), key);
when(connection.getResponseCode()).thenReturn(200);
when(connection.getHeaderField(SecureShuffleUtils.HTTP_HEADER_REPLY_URL_HASH))
.thenReturn(replyHash);
ShuffleHeader header = new ShuffleHeader(map1ID.toString(), 10, 10, 1);
ByteArrayOutputStream bout = new ByteArrayOutputStream();
header.write(new DataOutputStream(bout));
ByteArrayInputStream in = new ByteArrayInputStream(bout.toByteArray());
when(connection.getInputStream()).thenReturn(in);
when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_NAME))
.thenReturn(ShuffleHeader.DEFAULT_HTTP_HEADER_NAME);
when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_VERSION))
.thenReturn(ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION);
when(mm.reserve(any(TaskAttemptID.class), anyLong(), anyInt()))
.thenReturn(immo);
doThrow(new java.lang.InternalError()).when(immo)
.shuffle(any(MapHost.class), any(InputStream.class), anyLong(),
anyLong(), any(ShuffleClientMetrics.class), any(Reporter.class));
underTest.copyFromHost(host);
verify(connection)
.addRequestProperty(SecureShuffleUtils.HTTP_HEADER_URL_HASH,
encHash);
verify(ss, times(1)).copyFailed(map1ID, host, true, false);
}
示例14: testCopyFromHostWithRetryThenTimeout
import org.apache.hadoop.mapreduce.security.SecureShuffleUtils; //导入依赖的package包/类
@SuppressWarnings("unchecked")
@Test(timeout=10000)
public void testCopyFromHostWithRetryThenTimeout() throws Exception {
InMemoryMapOutput<Text, Text> immo = mock(InMemoryMapOutput.class);
Fetcher<Text,Text> underTest = new FakeFetcher<Text,Text>(jobWithRetry,
id, ss, mm, r, metrics, except, key, connection);
String replyHash = SecureShuffleUtils.generateHash(encHash.getBytes(), key);
when(connection.getResponseCode()).thenReturn(200)
.thenThrow(new SocketTimeoutException("forced timeout"));
when(connection.getHeaderField(SecureShuffleUtils.HTTP_HEADER_REPLY_URL_HASH))
.thenReturn(replyHash);
ShuffleHeader header = new ShuffleHeader(map1ID.toString(), 10, 10, 1);
ByteArrayOutputStream bout = new ByteArrayOutputStream();
header.write(new DataOutputStream(bout));
ByteArrayInputStream in = new ByteArrayInputStream(bout.toByteArray());
when(connection.getInputStream()).thenReturn(in);
when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_NAME))
.thenReturn(ShuffleHeader.DEFAULT_HTTP_HEADER_NAME);
when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_VERSION))
.thenReturn(ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION);
when(mm.reserve(any(TaskAttemptID.class), anyLong(), anyInt()))
.thenReturn(immo);
doThrow(new IOException("forced error")).when(immo).shuffle(
any(MapHost.class), any(InputStream.class), anyLong(),
anyLong(), any(ShuffleClientMetrics.class), any(Reporter.class));
underTest.copyFromHost(host);
verify(allErrs).increment(1);
verify(ss).copyFailed(map1ID, host, false, false);
}
示例15: testCopyFromHostWithRetryUnreserve
import org.apache.hadoop.mapreduce.security.SecureShuffleUtils; //导入依赖的package包/类
@SuppressWarnings("unchecked")
@Test(timeout=10000)
public void testCopyFromHostWithRetryUnreserve() throws Exception {
InMemoryMapOutput<Text, Text> immo = mock(InMemoryMapOutput.class);
Fetcher<Text,Text> underTest = new FakeFetcher<Text,Text>(jobWithRetry,
id, ss, mm, r, metrics, except, key, connection);
String replyHash = SecureShuffleUtils.generateHash(encHash.getBytes(), key);
when(connection.getResponseCode()).thenReturn(200);
when(connection.getHeaderField(SecureShuffleUtils.HTTP_HEADER_REPLY_URL_HASH))
.thenReturn(replyHash);
ShuffleHeader header = new ShuffleHeader(map1ID.toString(), 10, 10, 1);
ByteArrayOutputStream bout = new ByteArrayOutputStream();
header.write(new DataOutputStream(bout));
ByteArrayInputStream in = new ByteArrayInputStream(bout.toByteArray());
when(connection.getInputStream()).thenReturn(in);
when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_NAME))
.thenReturn(ShuffleHeader.DEFAULT_HTTP_HEADER_NAME);
when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_VERSION))
.thenReturn(ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION);
// Verify that unreserve occurs if an exception happens after shuffle
// buffer is reserved.
when(mm.reserve(any(TaskAttemptID.class), anyLong(), anyInt()))
.thenReturn(immo);
doThrow(new IOException("forced error")).when(immo).shuffle(
any(MapHost.class), any(InputStream.class), anyLong(),
anyLong(), any(ShuffleClientMetrics.class), any(Reporter.class));
underTest.copyFromHost(host);
verify(immo).abort();
}