本文整理汇总了Java中org.apache.hadoop.io.UTF8类的典型用法代码示例。如果您正苦于以下问题:Java UTF8类的具体用法?Java UTF8怎么用?Java UTF8使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
UTF8类属于org.apache.hadoop.io包,在下文中一共展示了UTF8类的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: setupJob
import org.apache.hadoop.io.UTF8; //导入依赖的package包/类
/**
* Create the job configuration.
*/
private JobConf setupJob(int numMaps, int numReduces, String jarFile) {
JobConf jobConf = new JobConf(getConf());
jobConf.setJarByClass(MRBench.class);
FileInputFormat.addInputPath(jobConf, INPUT_DIR);
jobConf.setInputFormat(TextInputFormat.class);
jobConf.setOutputFormat(TextOutputFormat.class);
jobConf.setOutputValueClass(UTF8.class);
jobConf.setMapOutputKeyClass(UTF8.class);
jobConf.setMapOutputValueClass(UTF8.class);
if (null != jarFile) {
jobConf.setJar(jarFile);
}
jobConf.setMapperClass(Map.class);
jobConf.setReducerClass(Reduce.class);
jobConf.setNumMapTasks(numMaps);
jobConf.setNumReduceTasks(numReduces);
jobConf
.setBoolean("mapreduce.job.complete.cancel.delegation.tokens", false);
return jobConf;
}
示例2: writeTest
import org.apache.hadoop.io.UTF8; //导入依赖的package包/类
public static void writeTest(FileSystem fs, boolean fastCheck)
throws Exception {
fs.delete(DATA_DIR, true);
fs.delete(WRITE_DIR, true);
JobConf job = new JobConf(conf, TestFileSystem.class);
job.setBoolean("fs.test.fastCheck", fastCheck);
FileInputFormat.setInputPaths(job, CONTROL_DIR);
job.setInputFormat(SequenceFileInputFormat.class);
job.setMapperClass(WriteMapper.class);
job.setReducerClass(LongSumReducer.class);
FileOutputFormat.setOutputPath(job, WRITE_DIR);
job.setOutputKeyClass(UTF8.class);
job.setOutputValueClass(LongWritable.class);
job.setNumReduceTasks(1);
JobClient.runJob(job);
}
示例3: readTest
import org.apache.hadoop.io.UTF8; //导入依赖的package包/类
public static void readTest(FileSystem fs, boolean fastCheck)
throws Exception {
fs.delete(READ_DIR, true);
JobConf job = new JobConf(conf, TestFileSystem.class);
job.setBoolean("fs.test.fastCheck", fastCheck);
FileInputFormat.setInputPaths(job, CONTROL_DIR);
job.setInputFormat(SequenceFileInputFormat.class);
job.setMapperClass(ReadMapper.class);
job.setReducerClass(LongSumReducer.class);
FileOutputFormat.setOutputPath(job, READ_DIR);
job.setOutputKeyClass(UTF8.class);
job.setOutputValueClass(LongWritable.class);
job.setNumReduceTasks(1);
JobClient.runJob(job);
}
示例4: seekTest
import org.apache.hadoop.io.UTF8; //导入依赖的package包/类
public static void seekTest(FileSystem fs, boolean fastCheck)
throws Exception {
fs.delete(READ_DIR, true);
JobConf job = new JobConf(conf, TestFileSystem.class);
job.setBoolean("fs.test.fastCheck", fastCheck);
FileInputFormat.setInputPaths(job,CONTROL_DIR);
job.setInputFormat(SequenceFileInputFormat.class);
job.setMapperClass(SeekMapper.class);
job.setReducerClass(LongSumReducer.class);
FileOutputFormat.setOutputPath(job, READ_DIR);
job.setOutputKeyClass(UTF8.class);
job.setOutputValueClass(LongWritable.class);
job.setNumReduceTasks(1);
JobClient.runJob(job);
}
示例5: setupJob
import org.apache.hadoop.io.UTF8; //导入依赖的package包/类
/**
* Create the job configuration.
*/
private JobConf setupJob(int numMaps, int numReduces, String jarFile) {
JobConf jobConf = new JobConf(getConf());
jobConf.setJarByClass(MRBench.class);
FileInputFormat.addInputPath(jobConf, INPUT_DIR);
jobConf.setInputFormat(TextInputFormat.class);
jobConf.setOutputFormat(TextOutputFormat.class);
jobConf.setOutputValueClass(UTF8.class);
jobConf.setMapOutputKeyClass(UTF8.class);
jobConf.setMapOutputValueClass(UTF8.class);
if (null != jarFile) {
jobConf.setJar(jarFile);
}
jobConf.setMapperClass(Map.class);
jobConf.setReducerClass(Reduce.class);
jobConf.setNumMapTasks(numMaps);
jobConf.setNumReduceTasks(numReduces);
return jobConf;
}
示例6: isValidUserName
import org.apache.hadoop.io.UTF8; //导入依赖的package包/类
@Override
public boolean isValidUserName(String username) {
if (username == null || username.length() == 0)
return false;
int len = username.length();
char[] carray = UTF8.getCharArray(len);
username.getChars(0, len, carray, 0);
char fc = carray[0];
if (!((fc >= 'a' && fc <= 'z') || fc == '_')) {
return false;
}
for (int i = 1; i < len; i++) {
char c = carray[i];
if (!((c >= 'a' && c <= 'z') || (c >= '0' && c <= '9') || c == '-'
|| c == '_' || (c == '$' && i == len - 1))) {
return false;
}
}
return true;
}
示例7: readFields
import org.apache.hadoop.io.UTF8; //导入依赖的package包/类
@Override
public void readFields(DataInput in) throws IOException {
this.leaderId = UTF8.readString(in);
this.partition = in.readInt();
this.beginOffset = in.readLong();
this.offset = in.readLong();
this.checksum = in.readLong();
this.topic = in.readUTF();
this.time = in.readLong();
this.server = in.readUTF(); // left for legacy
this.service = in.readUTF(); // left for legacy
this.partitionMap = new MapWritable();
try {
this.partitionMap.readFields(in);
} catch (IOException e) {
this.setServer(this.server);
this.setService(this.service);
}
}
示例8: setupJob
import org.apache.hadoop.io.UTF8; //导入依赖的package包/类
/**
* Create the job configuration.
*/
private static JobConf setupJob(int numMaps, int numReduces, String jarFile) {
JobConf jobConf = new JobConf(MRBench.class);
FileInputFormat.addInputPath(jobConf, INPUT_DIR);
jobConf.setInputFormat(TextInputFormat.class);
jobConf.setOutputFormat(TextOutputFormat.class);
jobConf.setOutputValueClass(UTF8.class);
jobConf.setMapOutputKeyClass(UTF8.class);
jobConf.setMapOutputValueClass(UTF8.class);
if (null != jarFile) {
jobConf.setJar(jarFile);
}
jobConf.setMapperClass(Map.class);
jobConf.setReducerClass(Reduce.class);
jobConf.setNumMapTasks(numMaps);
jobConf.setNumReduceTasks(numReduces);
return jobConf;
}