本文整理汇总了Java中org.apache.hadoop.mapreduce.security.SecureShuffleUtils.generateHash方法的典型用法代码示例。如果您正苦于以下问题:Java SecureShuffleUtils.generateHash方法的具体用法?Java SecureShuffleUtils.generateHash怎么用?Java SecureShuffleUtils.generateHash使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.mapreduce.security.SecureShuffleUtils
的用法示例。
在下文中一共展示了SecureShuffleUtils.generateHash方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: testReduceOutOfDiskSpace
import org.apache.hadoop.mapreduce.security.SecureShuffleUtils; //导入方法依赖的package包/类
@Test
public void testReduceOutOfDiskSpace() throws Throwable {
LOG.info("testReduceOutOfDiskSpace");
Fetcher<Text,Text> underTest = new FakeFetcher<Text,Text>(job, id, ss, mm,
r, metrics, except, key, connection);
String replyHash = SecureShuffleUtils.generateHash(encHash.getBytes(), key);
ShuffleHeader header = new ShuffleHeader(map1ID.toString(), 10, 10, 1);
ByteArrayOutputStream bout = new ByteArrayOutputStream();
header.write(new DataOutputStream(bout));
ByteArrayInputStream in = new ByteArrayInputStream(bout.toByteArray());
when(connection.getResponseCode()).thenReturn(200);
when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_NAME))
.thenReturn(ShuffleHeader.DEFAULT_HTTP_HEADER_NAME);
when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_VERSION))
.thenReturn(ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION);
when(connection.getHeaderField(SecureShuffleUtils.HTTP_HEADER_REPLY_URL_HASH))
.thenReturn(replyHash);
when(connection.getInputStream()).thenReturn(in);
when(mm.reserve(any(TaskAttemptID.class), anyLong(), anyInt()))
.thenThrow(new DiskErrorException("No disk space available"));
underTest.copyFromHost(host);
verify(ss).reportLocalError(any(IOException.class));
}
示例2: testCopyFromHostBogusHeader
import org.apache.hadoop.mapreduce.security.SecureShuffleUtils; //导入方法依赖的package包/类
@Test
public void testCopyFromHostBogusHeader() throws Exception {
Fetcher<Text,Text> underTest = new FakeFetcher<Text,Text>(job, id, ss, mm,
r, metrics, except, key, connection);
String replyHash = SecureShuffleUtils.generateHash(encHash.getBytes(), key);
when(connection.getResponseCode()).thenReturn(200);
when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_NAME))
.thenReturn(ShuffleHeader.DEFAULT_HTTP_HEADER_NAME);
when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_VERSION))
.thenReturn(ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION);
when(connection.getHeaderField(SecureShuffleUtils.HTTP_HEADER_REPLY_URL_HASH))
.thenReturn(replyHash);
ByteArrayInputStream in = new ByteArrayInputStream(
"\u00010 BOGUS DATA\nBOGUS DATA\nBOGUS DATA\n".getBytes());
when(connection.getInputStream()).thenReturn(in);
underTest.copyFromHost(host);
verify(connection).addRequestProperty(
SecureShuffleUtils.HTTP_HEADER_URL_HASH, encHash);
verify(allErrs).increment(1);
verify(ss).copyFailed(map1ID, host, true, false);
verify(ss).copyFailed(map2ID, host, true, false);
verify(ss).putBackKnownMapOutput(any(MapHost.class), eq(map1ID));
verify(ss).putBackKnownMapOutput(any(MapHost.class), eq(map2ID));
}
示例3: testCopyFromHostIncompatibleShuffleVersion
import org.apache.hadoop.mapreduce.security.SecureShuffleUtils; //导入方法依赖的package包/类
@Test
public void testCopyFromHostIncompatibleShuffleVersion() throws Exception {
String replyHash = SecureShuffleUtils.generateHash(encHash.getBytes(), key);
when(connection.getResponseCode()).thenReturn(200);
when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_NAME))
.thenReturn("mapreduce").thenReturn("other").thenReturn("other");
when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_VERSION))
.thenReturn("1.0.1").thenReturn("1.0.0").thenReturn("1.0.1");
when(connection.getHeaderField(
SecureShuffleUtils.HTTP_HEADER_REPLY_URL_HASH)).thenReturn(replyHash);
ByteArrayInputStream in = new ByteArrayInputStream(new byte[0]);
when(connection.getInputStream()).thenReturn(in);
for (int i = 0; i < 3; ++i) {
Fetcher<Text,Text> underTest = new FakeFetcher<Text,Text>(job, id, ss, mm,
r, metrics, except, key, connection);
underTest.copyFromHost(host);
}
verify(connection, times(3)).addRequestProperty(
SecureShuffleUtils.HTTP_HEADER_URL_HASH, encHash);
verify(allErrs, times(3)).increment(1);
verify(ss, times(3)).copyFailed(map1ID, host, false, false);
verify(ss, times(3)).copyFailed(map2ID, host, false, false);
verify(ss, times(3)).putBackKnownMapOutput(any(MapHost.class), eq(map1ID));
verify(ss, times(3)).putBackKnownMapOutput(any(MapHost.class), eq(map2ID));
}
示例4: testCopyFromHostIncompatibleShuffleVersionWithRetry
import org.apache.hadoop.mapreduce.security.SecureShuffleUtils; //导入方法依赖的package包/类
@Test
public void testCopyFromHostIncompatibleShuffleVersionWithRetry()
throws Exception {
String replyHash = SecureShuffleUtils.generateHash(encHash.getBytes(), key);
when(connection.getResponseCode()).thenReturn(200);
when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_NAME))
.thenReturn("mapreduce").thenReturn("other").thenReturn("other");
when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_VERSION))
.thenReturn("1.0.1").thenReturn("1.0.0").thenReturn("1.0.1");
when(connection.getHeaderField(
SecureShuffleUtils.HTTP_HEADER_REPLY_URL_HASH)).thenReturn(replyHash);
ByteArrayInputStream in = new ByteArrayInputStream(new byte[0]);
when(connection.getInputStream()).thenReturn(in);
for (int i = 0; i < 3; ++i) {
Fetcher<Text,Text> underTest = new FakeFetcher<Text,Text>(jobWithRetry,
id, ss, mm, r, metrics, except, key, connection);
underTest.copyFromHost(host);
}
verify(connection, times(3)).addRequestProperty(
SecureShuffleUtils.HTTP_HEADER_URL_HASH, encHash);
verify(allErrs, times(3)).increment(1);
verify(ss, times(3)).copyFailed(map1ID, host, false, false);
verify(ss, times(3)).copyFailed(map2ID, host, false, false);
verify(ss, times(3)).putBackKnownMapOutput(any(MapHost.class), eq(map1ID));
verify(ss, times(3)).putBackKnownMapOutput(any(MapHost.class), eq(map2ID));
}
示例5: testCopyFromHostWait
import org.apache.hadoop.mapreduce.security.SecureShuffleUtils; //导入方法依赖的package包/类
@Test
public void testCopyFromHostWait() throws Exception {
Fetcher<Text,Text> underTest = new FakeFetcher<Text,Text>(job, id, ss, mm,
r, metrics, except, key, connection);
String replyHash = SecureShuffleUtils.generateHash(encHash.getBytes(), key);
when(connection.getResponseCode()).thenReturn(200);
when(connection.getHeaderField(SecureShuffleUtils.HTTP_HEADER_REPLY_URL_HASH))
.thenReturn(replyHash);
ShuffleHeader header = new ShuffleHeader(map1ID.toString(), 10, 10, 1);
ByteArrayOutputStream bout = new ByteArrayOutputStream();
header.write(new DataOutputStream(bout));
ByteArrayInputStream in = new ByteArrayInputStream(bout.toByteArray());
when(connection.getInputStream()).thenReturn(in);
when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_NAME))
.thenReturn(ShuffleHeader.DEFAULT_HTTP_HEADER_NAME);
when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_VERSION))
.thenReturn(ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION);
//Defaults to null, which is what we want to test
when(mm.reserve(any(TaskAttemptID.class), anyLong(), anyInt()))
.thenReturn(null);
underTest.copyFromHost(host);
verify(connection)
.addRequestProperty(SecureShuffleUtils.HTTP_HEADER_URL_HASH,
encHash);
verify(allErrs, never()).increment(1);
verify(ss, never()).copyFailed(map1ID, host, true, false);
verify(ss, never()).copyFailed(map2ID, host, true, false);
verify(ss).putBackKnownMapOutput(any(MapHost.class), eq(map1ID));
verify(ss).putBackKnownMapOutput(any(MapHost.class), eq(map2ID));
}
示例6: testCopyFromHostCompressFailure
import org.apache.hadoop.mapreduce.security.SecureShuffleUtils; //导入方法依赖的package包/类
@SuppressWarnings("unchecked")
@Test(timeout=10000)
public void testCopyFromHostCompressFailure() throws Exception {
InMemoryMapOutput<Text, Text> immo = mock(InMemoryMapOutput.class);
Fetcher<Text,Text> underTest = new FakeFetcher<Text,Text>(job, id, ss, mm,
r, metrics, except, key, connection);
String replyHash = SecureShuffleUtils.generateHash(encHash.getBytes(), key);
when(connection.getResponseCode()).thenReturn(200);
when(connection.getHeaderField(SecureShuffleUtils.HTTP_HEADER_REPLY_URL_HASH))
.thenReturn(replyHash);
ShuffleHeader header = new ShuffleHeader(map1ID.toString(), 10, 10, 1);
ByteArrayOutputStream bout = new ByteArrayOutputStream();
header.write(new DataOutputStream(bout));
ByteArrayInputStream in = new ByteArrayInputStream(bout.toByteArray());
when(connection.getInputStream()).thenReturn(in);
when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_NAME))
.thenReturn(ShuffleHeader.DEFAULT_HTTP_HEADER_NAME);
when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_VERSION))
.thenReturn(ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION);
when(mm.reserve(any(TaskAttemptID.class), anyLong(), anyInt()))
.thenReturn(immo);
doThrow(new java.lang.InternalError()).when(immo)
.shuffle(any(MapHost.class), any(InputStream.class), anyLong(),
anyLong(), any(ShuffleClientMetrics.class), any(Reporter.class));
underTest.copyFromHost(host);
verify(connection)
.addRequestProperty(SecureShuffleUtils.HTTP_HEADER_URL_HASH,
encHash);
verify(ss, times(1)).copyFailed(map1ID, host, true, false);
}
示例7: testCopyFromHostWithRetryThenTimeout
import org.apache.hadoop.mapreduce.security.SecureShuffleUtils; //导入方法依赖的package包/类
@SuppressWarnings("unchecked")
@Test(timeout=10000)
public void testCopyFromHostWithRetryThenTimeout() throws Exception {
InMemoryMapOutput<Text, Text> immo = mock(InMemoryMapOutput.class);
Fetcher<Text,Text> underTest = new FakeFetcher<Text,Text>(jobWithRetry,
id, ss, mm, r, metrics, except, key, connection);
String replyHash = SecureShuffleUtils.generateHash(encHash.getBytes(), key);
when(connection.getResponseCode()).thenReturn(200)
.thenThrow(new SocketTimeoutException("forced timeout"));
when(connection.getHeaderField(SecureShuffleUtils.HTTP_HEADER_REPLY_URL_HASH))
.thenReturn(replyHash);
ShuffleHeader header = new ShuffleHeader(map1ID.toString(), 10, 10, 1);
ByteArrayOutputStream bout = new ByteArrayOutputStream();
header.write(new DataOutputStream(bout));
ByteArrayInputStream in = new ByteArrayInputStream(bout.toByteArray());
when(connection.getInputStream()).thenReturn(in);
when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_NAME))
.thenReturn(ShuffleHeader.DEFAULT_HTTP_HEADER_NAME);
when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_VERSION))
.thenReturn(ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION);
when(mm.reserve(any(TaskAttemptID.class), anyLong(), anyInt()))
.thenReturn(immo);
doThrow(new IOException("forced error")).when(immo).shuffle(
any(MapHost.class), any(InputStream.class), anyLong(),
anyLong(), any(ShuffleClientMetrics.class), any(Reporter.class));
underTest.copyFromHost(host);
verify(allErrs).increment(1);
verify(ss).copyFailed(map1ID, host, false, false);
}
示例8: testCopyFromHostWithRetryUnreserve
import org.apache.hadoop.mapreduce.security.SecureShuffleUtils; //导入方法依赖的package包/类
@SuppressWarnings("unchecked")
@Test(timeout=10000)
public void testCopyFromHostWithRetryUnreserve() throws Exception {
InMemoryMapOutput<Text, Text> immo = mock(InMemoryMapOutput.class);
Fetcher<Text,Text> underTest = new FakeFetcher<Text,Text>(jobWithRetry,
id, ss, mm, r, metrics, except, key, connection);
String replyHash = SecureShuffleUtils.generateHash(encHash.getBytes(), key);
when(connection.getResponseCode()).thenReturn(200);
when(connection.getHeaderField(SecureShuffleUtils.HTTP_HEADER_REPLY_URL_HASH))
.thenReturn(replyHash);
ShuffleHeader header = new ShuffleHeader(map1ID.toString(), 10, 10, 1);
ByteArrayOutputStream bout = new ByteArrayOutputStream();
header.write(new DataOutputStream(bout));
ByteArrayInputStream in = new ByteArrayInputStream(bout.toByteArray());
when(connection.getInputStream()).thenReturn(in);
when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_NAME))
.thenReturn(ShuffleHeader.DEFAULT_HTTP_HEADER_NAME);
when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_VERSION))
.thenReturn(ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION);
// Verify that unreserve occurs if an exception happens after shuffle
// buffer is reserved.
when(mm.reserve(any(TaskAttemptID.class), anyLong(), anyInt()))
.thenReturn(immo);
doThrow(new IOException("forced error")).when(immo).shuffle(
any(MapHost.class), any(InputStream.class), anyLong(),
anyLong(), any(ShuffleClientMetrics.class), any(Reporter.class));
underTest.copyFromHost(host);
verify(immo).abort();
}
示例9: verifyRequest
import org.apache.hadoop.mapreduce.security.SecureShuffleUtils; //导入方法依赖的package包/类
/**
* verify that request has correct HASH for the url
* and also add a field to reply header with hash of the HASH
* @param request
* @param response
* @param jt the job token
* @throws IOException
*/
private void verifyRequest(HttpServletRequest request,
HttpServletResponse response, TaskTracker tracker, String jobId)
throws IOException {
SecretKey tokenSecret = tracker.getJobTokenSecretManager()
.retrieveTokenSecret(jobId);
// string to encrypt
String enc_str = SecureShuffleUtils.buildMsgFrom(request);
// hash from the fetcher
String urlHashStr = request.getHeader(SecureShuffleUtils.HTTP_HEADER_URL_HASH);
if(urlHashStr == null) {
response.sendError(HttpServletResponse.SC_UNAUTHORIZED);
throw new IOException("fetcher cannot be authenticated " +
request.getRemoteHost());
}
int len = urlHashStr.length();
LOG.debug("verifying request. enc_str="+enc_str+"; hash=..."+
urlHashStr.substring(len-len/2, len-1)); // half of the hash for debug
// verify - throws exception
try {
SecureShuffleUtils.verifyReply(urlHashStr, enc_str, tokenSecret);
} catch (IOException ioe) {
response.sendError(HttpServletResponse.SC_UNAUTHORIZED);
throw ioe;
}
// verification passed - encode the reply
String reply = SecureShuffleUtils.generateHash(urlHashStr.getBytes(), tokenSecret);
response.addHeader(SecureShuffleUtils.HTTP_HEADER_REPLY_URL_HASH, reply);
len = reply.length();
LOG.debug("Fetcher request verfied. enc_str="+enc_str+";reply="
+reply.substring(len-len/2, len-1));
}
示例10: verifyRequest
import org.apache.hadoop.mapreduce.security.SecureShuffleUtils; //导入方法依赖的package包/类
/**
* verify that request has correct HASH for the url
* and also add a field to reply header with hash of the HASH
* @param request
* @param response
* @param jt the job token
* @throws IOException
*/
private void verifyRequest(HttpServletRequest request,
HttpServletResponse response, TaskTracker tracker, String jobId)
throws IOException {
SecretKey tokenSecret = tracker.getJobTokenSecretManager()
.retrieveTokenSecret(jobId);
// string to encrypt
String enc_str = SecureShuffleUtils.buildMsgFrom(request);
// hash from the fetcher
String urlHashStr = request.getHeader(SecureShuffleUtils.HTTP_HEADER_URL_HASH);
if(urlHashStr == null) {
response.sendError(HttpServletResponse.SC_UNAUTHORIZED);
throw new IOException("fetcher cannot be authenticated");
}
int len = urlHashStr.length();
LOG.debug("verifying request. enc_str="+enc_str+"; hash=..."+
urlHashStr.substring(len-len/2, len-1)); // half of the hash for debug
// verify - throws exception
try {
SecureShuffleUtils.verifyReply(urlHashStr, enc_str, tokenSecret);
} catch (IOException ioe) {
response.sendError(HttpServletResponse.SC_UNAUTHORIZED);
throw ioe;
}
// verification passed - encode the reply
String reply = SecureShuffleUtils.generateHash(urlHashStr.getBytes(), tokenSecret);
response.addHeader(SecureShuffleUtils.HTTP_HEADER_REPLY_URL_HASH, reply);
len = reply.length();
LOG.debug("Fetcher request verfied. enc_str="+enc_str+";reply="
+reply.substring(len-len/2, len-1));
}
示例11: testCopyFromHostWithRetry
import org.apache.hadoop.mapreduce.security.SecureShuffleUtils; //导入方法依赖的package包/类
@SuppressWarnings("unchecked")
@Test(timeout=10000)
public void testCopyFromHostWithRetry() throws Exception {
InMemoryMapOutput<Text, Text> immo = mock(InMemoryMapOutput.class);
ss = mock(ShuffleSchedulerImpl.class);
Fetcher<Text,Text> underTest = new FakeFetcher<Text,Text>(jobWithRetry,
id, ss, mm, r, metrics, except, key, connection, true);
String replyHash = SecureShuffleUtils.generateHash(encHash.getBytes(), key);
when(connection.getResponseCode()).thenReturn(200);
when(connection.getHeaderField(SecureShuffleUtils.HTTP_HEADER_REPLY_URL_HASH))
.thenReturn(replyHash);
ShuffleHeader header = new ShuffleHeader(map1ID.toString(), 10, 10, 1);
ByteArrayOutputStream bout = new ByteArrayOutputStream();
header.write(new DataOutputStream(bout));
ByteArrayInputStream in = new ByteArrayInputStream(bout.toByteArray());
when(connection.getInputStream()).thenReturn(in);
when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_NAME))
.thenReturn(ShuffleHeader.DEFAULT_HTTP_HEADER_NAME);
when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_VERSION))
.thenReturn(ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION);
when(mm.reserve(any(TaskAttemptID.class), anyLong(), anyInt()))
.thenReturn(immo);
final long retryTime = Time.monotonicNow();
doAnswer(new Answer<Void>() {
public Void answer(InvocationOnMock ignore) throws IOException {
// Emulate host down for 3 seconds.
if ((Time.monotonicNow() - retryTime) <= 3000) {
throw new java.lang.InternalError();
}
return null;
}
}).when(immo).shuffle(any(MapHost.class), any(InputStream.class), anyLong(),
anyLong(), any(ShuffleClientMetrics.class), any(Reporter.class));
underTest.copyFromHost(host);
verify(ss, never()).copyFailed(any(TaskAttemptID.class),any(MapHost.class),
anyBoolean(), anyBoolean());
}
示例12: testCopyFromHostExtraBytes
import org.apache.hadoop.mapreduce.security.SecureShuffleUtils; //导入方法依赖的package包/类
@Test
public void testCopyFromHostExtraBytes() throws Exception {
Fetcher<Text,Text> underTest = new FakeFetcher<Text,Text>(job, id, ss, mm,
r, metrics, except, key, connection);
String replyHash = SecureShuffleUtils.generateHash(encHash.getBytes(), key);
when(connection.getResponseCode()).thenReturn(200);
when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_NAME))
.thenReturn(ShuffleHeader.DEFAULT_HTTP_HEADER_NAME);
when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_VERSION))
.thenReturn(ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION);
when(connection.getHeaderField(
SecureShuffleUtils.HTTP_HEADER_REPLY_URL_HASH)).thenReturn(replyHash);
ShuffleHeader header = new ShuffleHeader(map1ID.toString(), 14, 10, 1);
ByteArrayOutputStream bout = new ByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(bout);
IFileOutputStream ios = new IFileOutputStream(dos);
header.write(dos);
ios.write("MAPDATA123".getBytes());
ios.finish();
ShuffleHeader header2 = new ShuffleHeader(map2ID.toString(), 14, 10, 1);
IFileOutputStream ios2 = new IFileOutputStream(dos);
header2.write(dos);
ios2.write("MAPDATA456".getBytes());
ios2.finish();
ByteArrayInputStream in = new ByteArrayInputStream(bout.toByteArray());
when(connection.getInputStream()).thenReturn(in);
// 8 < 10 therefore there appear to be extra bytes in the IFileInputStream
InMemoryMapOutput<Text,Text> mapOut = new InMemoryMapOutput<Text, Text>(
job, map1ID, mm, 8, null, true );
InMemoryMapOutput<Text,Text> mapOut2 = new InMemoryMapOutput<Text, Text>(
job, map2ID, mm, 10, null, true );
when(mm.reserve(eq(map1ID), anyLong(), anyInt())).thenReturn(mapOut);
when(mm.reserve(eq(map2ID), anyLong(), anyInt())).thenReturn(mapOut2);
underTest.copyFromHost(host);
verify(allErrs).increment(1);
verify(ss).copyFailed(map1ID, host, true, false);
verify(ss, never()).copyFailed(map2ID, host, true, false);
verify(ss).putBackKnownMapOutput(any(MapHost.class), eq(map1ID));
verify(ss).putBackKnownMapOutput(any(MapHost.class), eq(map2ID));
}
示例13: testInterruptInMemory
import org.apache.hadoop.mapreduce.security.SecureShuffleUtils; //导入方法依赖的package包/类
@Test(timeout=10000)
public void testInterruptInMemory() throws Exception {
final int FETCHER = 2;
InMemoryMapOutput<Text,Text> immo = spy(new InMemoryMapOutput<Text,Text>(
job, id, mm, 100, null, true));
when(mm.reserve(any(TaskAttemptID.class), anyLong(), anyInt()))
.thenReturn(immo);
doNothing().when(mm).waitForResource();
when(ss.getHost()).thenReturn(host);
String replyHash = SecureShuffleUtils.generateHash(encHash.getBytes(), key);
when(connection.getResponseCode()).thenReturn(200);
when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_NAME))
.thenReturn(ShuffleHeader.DEFAULT_HTTP_HEADER_NAME);
when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_VERSION))
.thenReturn(ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION);
when(connection.getHeaderField(SecureShuffleUtils.HTTP_HEADER_REPLY_URL_HASH))
.thenReturn(replyHash);
ShuffleHeader header = new ShuffleHeader(map1ID.toString(), 10, 10, 1);
ByteArrayOutputStream bout = new ByteArrayOutputStream();
header.write(new DataOutputStream(bout));
final StuckInputStream in =
new StuckInputStream(new ByteArrayInputStream(bout.toByteArray()));
when(connection.getInputStream()).thenReturn(in);
doAnswer(new Answer<Void>() {
public Void answer(InvocationOnMock ignore) throws IOException {
in.close();
return null;
}
}).when(connection).disconnect();
Fetcher<Text,Text> underTest = new FakeFetcher<Text,Text>(job, id, ss, mm,
r, metrics, except, key, connection, FETCHER);
underTest.start();
// wait for read in inputstream
in.waitForFetcher();
underTest.shutDown();
underTest.join(); // rely on test timeout to kill if stuck
assertTrue(in.wasClosedProperly());
verify(immo).abort();
}
示例14: testInterruptOnDisk
import org.apache.hadoop.mapreduce.security.SecureShuffleUtils; //导入方法依赖的package包/类
@Test(timeout=10000)
public void testInterruptOnDisk() throws Exception {
final int FETCHER = 7;
Path p = new Path("file:///tmp/foo");
Path pTmp = OnDiskMapOutput.getTempPath(p, FETCHER);
FileSystem mFs = mock(FileSystem.class, RETURNS_DEEP_STUBS);
MapOutputFile mof = mock(MapOutputFile.class);
when(mof.getInputFileForWrite(any(TaskID.class), anyLong())).thenReturn(p);
OnDiskMapOutput<Text,Text> odmo = spy(new OnDiskMapOutput<Text,Text>(map1ID,
id, mm, 100L, job, mof, FETCHER, true, mFs, p));
when(mm.reserve(any(TaskAttemptID.class), anyLong(), anyInt()))
.thenReturn(odmo);
doNothing().when(mm).waitForResource();
when(ss.getHost()).thenReturn(host);
String replyHash = SecureShuffleUtils.generateHash(encHash.getBytes(), key);
when(connection.getResponseCode()).thenReturn(200);
when(connection.getHeaderField(
SecureShuffleUtils.HTTP_HEADER_REPLY_URL_HASH)).thenReturn(replyHash);
ShuffleHeader header = new ShuffleHeader(map1ID.toString(), 10, 10, 1);
ByteArrayOutputStream bout = new ByteArrayOutputStream();
header.write(new DataOutputStream(bout));
final StuckInputStream in =
new StuckInputStream(new ByteArrayInputStream(bout.toByteArray()));
when(connection.getInputStream()).thenReturn(in);
when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_NAME))
.thenReturn(ShuffleHeader.DEFAULT_HTTP_HEADER_NAME);
when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_VERSION))
.thenReturn(ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION);
doAnswer(new Answer<Void>() {
public Void answer(InvocationOnMock ignore) throws IOException {
in.close();
return null;
}
}).when(connection).disconnect();
Fetcher<Text,Text> underTest = new FakeFetcher<Text,Text>(job, id, ss, mm,
r, metrics, except, key, connection, FETCHER);
underTest.start();
// wait for read in inputstream
in.waitForFetcher();
underTest.shutDown();
underTest.join(); // rely on test timeout to kill if stuck
assertTrue(in.wasClosedProperly());
verify(mFs).create(eq(pTmp));
verify(mFs).delete(eq(pTmp), eq(false));
verify(odmo).abort();
}
示例15: testCopyFromHostExtraBytes
import org.apache.hadoop.mapreduce.security.SecureShuffleUtils; //导入方法依赖的package包/类
@Test
public void testCopyFromHostExtraBytes() throws Exception {
Fetcher<Text,Text> underTest = new FakeFetcher<Text,Text>(job, id, ss, mm,
r, metrics, except, key, connection);
String replyHash = SecureShuffleUtils.generateHash(encHash.getBytes(), key);
when(connection.getResponseCode()).thenReturn(200);
when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_NAME))
.thenReturn(ShuffleHeader.DEFAULT_HTTP_HEADER_NAME);
when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_VERSION))
.thenReturn(ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION);
when(connection.getHeaderField(
SecureShuffleUtils.HTTP_HEADER_REPLY_URL_HASH)).thenReturn(replyHash);
ShuffleHeader header = new ShuffleHeader(map1ID.toString(), 14, 10, 1);
ByteArrayOutputStream bout = new ByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(bout);
IFileOutputStream ios = new IFileOutputStream(dos);
header.write(dos);
ios.write("MAPDATA123".getBytes());
ios.finish();
ShuffleHeader header2 = new ShuffleHeader(map2ID.toString(), 14, 10, 1);
IFileOutputStream ios2 = new IFileOutputStream(dos);
header2.write(dos);
ios2.write("MAPDATA456".getBytes());
ios2.finish();
ByteArrayInputStream in = new ByteArrayInputStream(bout.toByteArray());
when(connection.getInputStream()).thenReturn(in);
// 8 < 10 therefore there appear to be extra bytes in the IFileInputStream
IFileWrappedMapOutput<Text,Text> mapOut = new InMemoryMapOutput<Text, Text>(
job, map1ID, mm, 8, null, true );
IFileWrappedMapOutput<Text,Text> mapOut2 = new InMemoryMapOutput<Text, Text>(
job, map2ID, mm, 10, null, true );
when(mm.reserve(eq(map1ID), anyLong(), anyInt())).thenReturn(mapOut);
when(mm.reserve(eq(map2ID), anyLong(), anyInt())).thenReturn(mapOut2);
underTest.copyFromHost(host);
verify(allErrs).increment(1);
verify(ss).copyFailed(map1ID, host, true, false);
verify(ss, never()).copyFailed(map2ID, host, true, false);
verify(ss).putBackKnownMapOutput(any(MapHost.class), eq(map1ID));
verify(ss).putBackKnownMapOutput(any(MapHost.class), eq(map2ID));
}