本文整理汇总了Java中org.apache.hadoop.io.Text.equals方法的典型用法代码示例。如果您正苦于以下问题:Java Text.equals方法的具体用法?Java Text.equals怎么用?Java Text.equals使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.io.Text
的用法示例。
在下文中一共展示了Text.equals方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: selectToken
import org.apache.hadoop.io.Text; //导入方法依赖的package包/类
@SuppressWarnings("unchecked")
public Token<TimelineDelegationTokenIdentifier> selectToken(Text service,
Collection<Token<? extends TokenIdentifier>> tokens) {
if (service == null) {
return null;
}
if (LOG.isDebugEnabled()) {
LOG.debug("Looking for a token with service " + service.toString());
}
for (Token<? extends TokenIdentifier> token : tokens) {
if (LOG.isDebugEnabled()) {
LOG.debug("Token kind is " + token.getKind().toString()
+ " and the token's service name is " + token.getService());
}
if (TimelineDelegationTokenIdentifier.KIND_NAME.equals(token.getKind())
&& service.equals(token.getService())) {
return (Token<TimelineDelegationTokenIdentifier>) token;
}
}
return null;
}
示例2: selectToken
import org.apache.hadoop.io.Text; //导入方法依赖的package包/类
@SuppressWarnings("unchecked")
public Token<ClientToAMTokenIdentifier> selectToken(Text service,
Collection<Token<? extends TokenIdentifier>> tokens) {
if (service == null) {
return null;
}
LOG.debug("Looking for a token with service " + service.toString());
for (Token<? extends TokenIdentifier> token : tokens) {
LOG.debug("Token kind is " + token.getKind().toString()
+ " and the token's service name is " + token.getService());
if (ClientToAMTokenIdentifier.KIND_NAME.equals(token.getKind())
&& service.equals(token.getService())) {
return (Token<ClientToAMTokenIdentifier>) token;
}
}
return null;
}
示例3: selectToken
import org.apache.hadoop.io.Text; //导入方法依赖的package包/类
@SuppressWarnings("unchecked")
@Override
public Token<NMTokenIdentifier> selectToken(Text service,
Collection<Token<? extends TokenIdentifier>> tokens) {
if (service == null) {
return null;
}
for (Token<? extends TokenIdentifier> token : tokens) {
if (LOG.isDebugEnabled()) {
LOG.info("Looking for service: " + service + ". Current token is "
+ token);
}
if (NMTokenIdentifier.KIND.equals(token.getKind()) &&
service.equals(token.getService())) {
return (Token<NMTokenIdentifier>) token;
}
}
return null;
}
示例4: selectToken
import org.apache.hadoop.io.Text; //导入方法依赖的package包/类
@SuppressWarnings("unchecked")
@Override
public Token<ContainerTokenIdentifier> selectToken(Text service,
Collection<Token<? extends TokenIdentifier>> tokens) {
if (service == null) {
return null;
}
for (Token<? extends TokenIdentifier> token : tokens) {
if (LOG.isDebugEnabled()) {
LOG.info("Looking for service: " + service + ". Current token is "
+ token);
}
if (ContainerTokenIdentifier.KIND.equals(token.getKind()) &&
service.equals(token.getService())) {
return (Token<ContainerTokenIdentifier>) token;
}
}
return null;
}
示例5: selectToken
import org.apache.hadoop.io.Text; //导入方法依赖的package包/类
@SuppressWarnings("unchecked")
public Token<MRDelegationTokenIdentifier> selectToken(Text service,
Collection<Token<? extends TokenIdentifier>> tokens) {
if (service == null) {
return null;
}
LOG.debug("Looking for a token with service " + service.toString());
for (Token<? extends TokenIdentifier> token : tokens) {
if (LOG.isDebugEnabled()) {
LOG.debug("Token kind is " + token.getKind().toString()
+ " and the token's service name is " + token.getService());
}
if (MRDelegationTokenIdentifier.KIND_NAME.equals(token.getKind())
&& service.equals(token.getService())) {
return (Token<MRDelegationTokenIdentifier>) token;
}
}
return null;
}
示例6: selectToken
import org.apache.hadoop.io.Text; //导入方法依赖的package包/类
@Override
public Token<AuthenticationTokenIdentifier> selectToken(Text serviceName,
Collection<Token<? extends TokenIdentifier>> tokens) {
if (serviceName != null) {
for (Token ident : tokens) {
if (serviceName.equals(ident.getService()) &&
AuthenticationTokenIdentifier.AUTH_TOKEN_TYPE.equals(ident.getKind())) {
if (LOG.isDebugEnabled()) {
LOG.debug("Returning token "+ident);
}
return (Token<AuthenticationTokenIdentifier>)ident;
}
}
}
LOG.debug("No matching token found");
return null;
}
示例7: selectToken
import org.apache.hadoop.io.Text; //导入方法依赖的package包/类
@SuppressWarnings("unchecked")
@Override
public Token<TokenIdent> selectToken(Text service,
Collection<Token<? extends TokenIdentifier>> tokens) {
if (service == null) {
return null;
}
for (Token<? extends TokenIdentifier> token : tokens) {
if (kindName.equals(token.getKind())
&& service.equals(token.getService())) {
return (Token<TokenIdent>) token;
}
}
return null;
}
示例8: selectToken
import org.apache.hadoop.io.Text; //导入方法依赖的package包/类
@SuppressWarnings("unchecked")
@Override
public Token<TestTokenIdentifier> selectToken(Text service,
Collection<Token<? extends TokenIdentifier>> tokens) {
if (service == null) {
return null;
}
for (Token<? extends TokenIdentifier> token : tokens) {
if (TestTokenIdentifier.KIND_NAME.equals(token.getKind())
&& service.equals(token.getService())) {
return (Token<TestTokenIdentifier>) token;
}
}
return null;
}
示例9: validateFinalListing
import org.apache.hadoop.io.Text; //导入方法依赖的package包/类
/**
* Validate the final resulting path listing. Checks if there are duplicate entries. If preserving ACLs, checks that
* file system can support ACLs. If preserving XAttrs, checks that file system can support XAttrs.
*
* @param pathToListFile path listing build by doBuildListing
* @param options Input options to S3MapReduceCp
* @throws IOException Any issues while checking for duplicates and throws
* @throws DuplicateFileException if there are duplicates
*/
private void validateFinalListing(Path pathToListFile, S3MapReduceCpOptions options)
throws DuplicateFileException, IOException {
Configuration config = getConf();
FileSystem fs = pathToListFile.getFileSystem(config);
Path sortedList = sortListing(fs, config, pathToListFile);
SequenceFile.Reader reader = new SequenceFile.Reader(config, SequenceFile.Reader.file(sortedList));
try {
Text lastKey = new Text("*"); // source relative path can never hold *
CopyListingFileStatus lastFileStatus = new CopyListingFileStatus();
Text currentKey = new Text();
while (reader.next(currentKey)) {
if (currentKey.equals(lastKey)) {
CopyListingFileStatus currentFileStatus = new CopyListingFileStatus();
reader.getCurrentValue(currentFileStatus);
throw new DuplicateFileException("File "
+ lastFileStatus.getPath()
+ " and "
+ currentFileStatus.getPath()
+ " would cause duplicates. Aborting");
}
reader.getCurrentValue(lastFileStatus);
lastKey.set(currentKey);
}
} finally {
IOUtils.closeStream(reader);
}
}
示例10: reduce
import org.apache.hadoop.io.Text; //导入方法依赖的package包/类
@Override
public void reduce(Text key, Iterator<IntWritable> values,
OutputCollector<Text, IntWritable> output, Reporter reporter)
throws IOException {
int sum = 0;
while (values.hasNext()) {
sum += values.next().get();
}
if (key.equals(OPEN_EXECTIME)){
executionTime[OPEN] = sum;
} else if (key.equals(NUMOPS_OPEN)){
numOfOps[OPEN] = sum;
} else if (key.equals(LIST_EXECTIME)){
executionTime[LIST] = sum;
} else if (key.equals(NUMOPS_LIST)){
numOfOps[LIST] = sum;
} else if (key.equals(DELETE_EXECTIME)){
executionTime[DELETE] = sum;
} else if (key.equals(NUMOPS_DELETE)){
numOfOps[DELETE] = sum;
} else if (key.equals(CREATE_EXECTIME)){
executionTime[CREATE] = sum;
} else if (key.equals(NUMOPS_CREATE)){
numOfOps[CREATE] = sum;
} else if (key.equals(WRITE_CLOSE_EXECTIME)){
System.out.println(WRITE_CLOSE_EXECTIME + " = " + sum);
executionTime[WRITE_CLOSE]= sum;
} else if (key.equals(NUMOPS_WRITE_CLOSE)){
numOfOps[WRITE_CLOSE] = sum;
} else if (key.equals(TOTALOPS)){
totalOps = sum;
} else if (key.equals(ELAPSED_TIME)){
totalTime = sum;
}
result.set(sum);
output.collect(key, result);
// System.out.println("Key = " + key + " Sum is =" + sum);
// printResults(System.out);
}
示例11: selectToken
import org.apache.hadoop.io.Text; //导入方法依赖的package包/类
@SuppressWarnings("unchecked")
@Override
public Token<JobTokenIdentifier> selectToken(Text service,
Collection<Token<? extends TokenIdentifier>> tokens) {
if (service == null) {
return null;
}
for (Token<? extends TokenIdentifier> token : tokens) {
if (JobTokenIdentifier.KIND_NAME.equals(token.getKind())
&& service.equals(token.getService())) {
return (Token<JobTokenIdentifier>) token;
}
}
return null;
}
示例12: handleKind
import org.apache.hadoop.io.Text; //导入方法依赖的package包/类
@Override
public boolean handleKind(Text kind) {
return kind.equals(HftpFileSystem.TOKEN_KIND)
|| kind.equals(HsftpFileSystem.TOKEN_KIND)
|| kind.equals(WebHdfsFileSystem.TOKEN_KIND)
|| kind.equals(SWebHdfsFileSystem.TOKEN_KIND);
}
示例13: getSchemeByKind
import org.apache.hadoop.io.Text; //导入方法依赖的package包/类
private static String getSchemeByKind(Text kind) {
if (kind.equals(HftpFileSystem.TOKEN_KIND)) {
return HftpFileSystem.SCHEME;
} else if (kind.equals(HsftpFileSystem.TOKEN_KIND)) {
return HsftpFileSystem.SCHEME;
} else if (kind.equals(WebHdfsFileSystem.TOKEN_KIND)) {
return WebHdfsFileSystem.SCHEME;
} else if (kind.equals(SWebHdfsFileSystem.TOKEN_KIND)) {
return SWebHdfsFileSystem.SCHEME;
} else {
throw new IllegalArgumentException("Unsupported scheme");
}
}
示例14: selectToken
import org.apache.hadoop.io.Text; //导入方法依赖的package包/类
@SuppressWarnings("unchecked")
@Override
public Token<TestTokenIdentifier> selectToken(Text service,
Collection<Token<? extends TokenIdentifier>> tokens) {
if (service == null) {
return null;
}
for (Token<? extends TokenIdentifier> token : tokens) {
if (TestTokenIdentifier.KIND_NAME.equals(token.getKind())
&& service.equals(token.getService())) {
return (Token<TestTokenIdentifier>) token;
}
}
return null;
}
示例15: isWALCompressionEnabled
import org.apache.hadoop.io.Text; //导入方法依赖的package包/类
/**
* Call this method after init() has been executed
* @return whether WAL compression is enabled
*/
static boolean isWALCompressionEnabled(final Metadata metadata) {
// Check version is >= VERSION?
Text txt = metadata.get(WAL_VERSION_KEY);
if (txt == null || Integer.parseInt(txt.toString()) < COMPRESSION_VERSION) {
return false;
}
// Now check that compression type is present. Currently only one value.
txt = metadata.get(WAL_COMPRESSION_TYPE_KEY);
return txt != null && txt.equals(DICTIONARY_COMPRESSION_TYPE);
}