當前位置: 首頁>>代碼示例>>Java>>正文


Java Writer類代碼示例

本文整理匯總了Java中org.apache.hadoop.io.SequenceFile.Writer的典型用法代碼示例。如果您正苦於以下問題:Java Writer類的具體用法?Java Writer怎麽用?Java Writer使用的例子?那麽, 這裏精選的類代碼示例或許可以為您提供幫助。


Writer類屬於org.apache.hadoop.io.SequenceFile包,在下文中一共展示了Writer類的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。

示例1: testJavaSerialization

import org.apache.hadoop.io.SequenceFile.Writer; //導入依賴的package包/類
@Test
public void testJavaSerialization() throws Exception {
  Path file = new Path(System.getProperty("test.build.data",".") +
      "/testseqser.seq");
  
  fs.delete(file, true);
  Writer writer = SequenceFile.createWriter(fs, conf, file, Long.class,
      String.class);
  
  writer.append(1L, "one");
  writer.append(2L, "two");
  
  writer.close();
  
  Reader reader = new Reader(fs, file, conf);
  assertEquals(1L, reader.next((Object) null));
  assertEquals("one", reader.getCurrentValue((Object) null));
  assertEquals(2L, reader.next((Object) null));
  assertEquals("two", reader.getCurrentValue((Object) null));
  assertNull(reader.next((Object) null));
  reader.close();
  
}
 
開發者ID:nucypher,項目名稱:hadoop-oss,代碼行數:24,代碼來源:TestSequenceFileSerialization.java

示例2: doBuildListing

import org.apache.hadoop.io.SequenceFile.Writer; //導入依賴的package包/類
@Override
public void doBuildListing(Path pathToListFile, DistCpOptions options) throws IOException {
  try (Writer writer = newWriter(pathToListFile)) {

    Path sourceRootPath = getRootPath(getConf());

    for (Path sourcePath : options.getSourcePaths()) {

      FileSystem fileSystem = sourcePath.getFileSystem(getConf());
      FileStatus directory = fileSystem.getFileStatus(sourcePath);

      Map<String, CopyListingFileStatus> children = new FileStatusTreeTraverser(fileSystem)
          .preOrderTraversal(directory)
          .transform(new CopyListingFileStatusFunction(fileSystem, options))
          .uniqueIndex(new RelativePathFunction(sourceRootPath));

      for (Entry<String, CopyListingFileStatus> entry : children.entrySet()) {
        LOG.debug("Adding '{}' with relative path '{}'", entry.getValue().getPath(), entry.getKey());
        writer.append(new Text(entry.getKey()), entry.getValue());
        writer.sync();
      }
    }
  }
}
 
開發者ID:HotelsDotCom,項目名稱:circus-train,代碼行數:25,代碼來源:CircusTrainCopyListing.java

示例3: testJavaSerialization

import org.apache.hadoop.io.SequenceFile.Writer; //導入依賴的package包/類
public void testJavaSerialization() throws Exception {
  Path file = new Path(System.getProperty("test.build.data",".") +
      "/testseqser.seq");
  
  fs.delete(file, true);
  Writer writer = SequenceFile.createWriter(fs, conf, file, Long.class,
      String.class);
  
  writer.append(1L, "one");
  writer.append(2L, "two");
  
  writer.close();
  
  Reader reader = new Reader(fs, file, conf);
  assertEquals(1L, reader.next((Object) null));
  assertEquals("one", reader.getCurrentValue((Object) null));
  assertEquals(2L, reader.next((Object) null));
  assertEquals("two", reader.getCurrentValue((Object) null));
  assertNull(reader.next((Object) null));
  reader.close();
  
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:23,代碼來源:TestSequenceFileSerialization.java

示例4: testSimpleConsumerWithEmptySequenceFile

import org.apache.hadoop.io.SequenceFile.Writer; //導入依賴的package包/類
@Test
public void testSimpleConsumerWithEmptySequenceFile() throws Exception {
    if (!canTest()) {
        return;
    }

    final Path file = new Path(new File("target/test/test-camel-sequence-file").getAbsolutePath());
    Configuration conf = new Configuration();
    SequenceFile.Writer writer = createWriter(conf, file, NullWritable.class, BooleanWritable.class);
    writer.sync();
    writer.close();

    MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class);
    resultEndpoint.expectedMessageCount(0);

    context.addRoutes(new RouteBuilder() {
        public void configure() {
            from("hdfs2:localhost/" + file.toUri() + "?fileSystemType=LOCAL&fileType=SEQUENCE_FILE&chunkSize=4096&initialDelay=0").to("mock:result");
        }
    });
    context.start();

    resultEndpoint.assertIsSatisfied();
}
 
開發者ID:HydAu,項目名稱:Camel,代碼行數:25,代碼來源:HdfsConsumerTest.java

示例5: testJavaSerialization

import org.apache.hadoop.io.SequenceFile.Writer; //導入依賴的package包/類
public void testJavaSerialization() throws Exception {
  Path file = new Path(System.getProperty("test.build.data",".") +
      "/test.seq");
  
  fs.delete(file, true);
  Writer writer = SequenceFile.createWriter(fs, conf, file, Long.class,
      String.class);
  
  writer.append(1L, "one");
  writer.append(2L, "two");
  
  writer.close();
  
  Reader reader = new Reader(fs, file, conf);
  assertEquals(1L, reader.next((Object) null));
  assertEquals("one", reader.getCurrentValue((Object) null));
  assertEquals(2L, reader.next((Object) null));
  assertEquals("two", reader.getCurrentValue((Object) null));
  assertNull(reader.next((Object) null));
  reader.close();
  
}
 
開發者ID:rhli,項目名稱:hadoop-EAR,代碼行數:23,代碼來源:TestSequenceFileSerialization.java

示例6: MahoutOutput

import org.apache.hadoop.io.SequenceFile.Writer; //導入依賴的package包/類
public MahoutOutput(String output, FileSystem fs, Configuration conf) throws IOException {

        //clear the output dir
        Path basedir = new Path(output);
        if (fs.exists(basedir)) {
            fs.delete(basedir, true); //Delete existing Directory
        }
        fs.mkdirs(basedir);

        String dictOutput = output + "/dictionary.file";
        dictWriter = createWriter(conf, Writer.file(new Path(dictOutput)),
                Writer.keyClass(Text.class), Writer.valueClass(IntWritable.class));

        String vectorsPath = output + "/tfidf-vectors";
        tfidfWriter = new SequenceFile.Writer(fs, conf,
                new Path(vectorsPath), Text.class, VectorWritable.class);
    }
 
開發者ID:project-asap,項目名稱:IReS-Platform,代碼行數:18,代碼來源:MahoutOutput.java

示例7: SequenceEventWriterInstance

import org.apache.hadoop.io.SequenceFile.Writer; //導入依賴的package包/類
public SequenceEventWriterInstance(OutputStream stream,//
		Class<?> keyClass, //
		Class<?> valueClass,//
		CompressionType compressionType) {
	if (!(stream instanceof FSDataOutputStream)) {
		throw new RuntimeException(
				"OutputStream must be a FSDataOutputStream");
	}
	try {
		writer = SequenceFile.createWriter(hdfs.getHadoopConfig(),
				Writer.stream((FSDataOutputStream) stream),
				Writer.keyClass(keyClass),
				Writer.valueClass(valueClass),
				Writer.compression(compressionType));
	} catch (IOException e) {
		throw new RuntimeException(e);
	}
}
 
開發者ID:pulsarIO,項目名稱:jetstream,代碼行數:19,代碼來源:SequenceEventWriter.java

示例8: createControlFiles

import org.apache.hadoop.io.SequenceFile.Writer; //導入依賴的package包/類
/**
 * Create control files before a test run.
 * Number of files created is equal to the number of maps specified
 * 
 * @throws IOException on error
 */
private void createControlFiles() throws IOException {
  LOG.info("Creating " + numberOfMaps + " control files");

  for (int i = 0; i < numberOfMaps; i++) {
    String strFileName = "NNBench_Controlfile_" + i;
    Path filePath = new Path(new Path(baseDir, CONTROL_DIR_NAME),
            strFileName);

    SequenceFile.Writer writer = null;
    try {
      writer = SequenceFile.createWriter(getConf(), Writer.file(filePath),
          Writer.keyClass(Text.class), Writer.valueClass(LongWritable.class),
          Writer.compression(CompressionType.NONE));
      writer.append(new Text(strFileName), new LongWritable(i));
    } finally {
      if (writer != null) {
        writer.close();
      }
    }
  }
}
 
開發者ID:hopshadoop,項目名稱:hops,代碼行數:28,代碼來源:NNBench.java

示例9: testJavaSerialization

import org.apache.hadoop.io.SequenceFile.Writer; //導入依賴的package包/類
@Test
public void testJavaSerialization() throws Exception {
  Path file = new Path(GenericTestUtils.getTempPath("testseqser.seq"));
  
  fs.delete(file, true);
  Writer writer = SequenceFile.createWriter(fs, conf, file, Long.class,
      String.class);
  
  writer.append(1L, "one");
  writer.append(2L, "two");
  
  writer.close();
  
  Reader reader = new Reader(fs, file, conf);
  assertEquals(1L, reader.next((Object) null));
  assertEquals("one", reader.getCurrentValue((Object) null));
  assertEquals(2L, reader.next((Object) null));
  assertEquals("two", reader.getCurrentValue((Object) null));
  assertNull(reader.next((Object) null));
  reader.close();
  
}
 
開發者ID:hopshadoop,項目名稱:hops,代碼行數:23,代碼來源:TestSequenceFileSerialization.java

示例10: createWriter

import org.apache.hadoop.io.SequenceFile.Writer; //導入依賴的package包/類
@SuppressWarnings("unchecked")
private Writer createWriter(Map<String, String> metadata) throws IOException {
	final Metadata md = new Metadata();

	for (final Entry<String, String> e : metadata.entrySet()) {
		md.set(new Text(e.getKey()), new Text(e.getValue()));
	}
	final Class<K> keyClass = (Class<K>) ((ParameterizedType) getClass().getGenericSuperclass())
			.getActualTypeArguments()[0];
	final Class<V> valueClass = (Class<V>) ((ParameterizedType) getClass().getGenericSuperclass())
			.getActualTypeArguments()[1];

	return SequenceFile.createWriter(fileSystem, config, sequenceFilePath, keyClass, valueClass, compressionType,
			new DefaultCodec(), null,
			md);
}
 
開發者ID:openimaj,項目名稱:openimaj,代碼行數:17,代碼來源:SequenceFileUtility.java

示例11: run

import org.apache.hadoop.io.SequenceFile.Writer; //導入依賴的package包/類
public int run(String[] args) throws Exception {
     // Configuration processed by ToolRunner
Properties p = new Properties();
p.load(new FileInputStream(new File(args[0])));

configure(p);
String inputDirectoryName = p.getProperty("input.directory");
     File f = new File(inputDirectoryName);
     if(!f.exists() || !f.isDirectory()){
     	logger.error("Invalid input directory: " + inputDirectoryName);
     	return -1;
     }
     String outputFileName = p.getProperty("output.file");
     Path outputPath = new Path(outputFileName);
     SequenceFile.Writer writer = SequenceFile.createWriter(getConf(),Writer.keyClass(Text.class),
             Writer.valueClass(Text.class), Writer.file(outputPath));
     for(File document : f.listFiles())
     {
     	String contents = FileUtils.readFileToString(document);
     	writer.append(new Text(document.getName()), new Text(contents));
     }
     writer.close();
     return 0;
   }
 
開發者ID:therelaxist,項目名稱:spring-usc,代碼行數:25,代碼來源:InputFileDirectoryLoader.java

示例12: writeRowIds

import org.apache.hadoop.io.SequenceFile.Writer; //導入依賴的package包/類
private void writeRowIds(Writer writer, SegmentReader segmentReader) throws IOException {
  Terms terms = segmentReader.terms(BlurConstants.ROW_ID);
  if (terms == null) {
    return;
  }
  TermsEnum termsEnum = terms.iterator(null);
  BytesRef rowId;
  long s = System.nanoTime();
  while ((rowId = termsEnum.next()) != null) {
    long n = System.nanoTime();
    if (n + _10_SECONDS > s) {
      _progressable.progress();
      s = System.nanoTime();
    }
    writer.append(new Text(rowId.utf8ToString()), NullWritable.get());
  }
}
 
開發者ID:apache,項目名稱:incubator-blur,代碼行數:18,代碼來源:MergeSortRowIdMatcher.java

示例13: storeGenerations

import org.apache.hadoop.io.SequenceFile.Writer; //導入依賴的package包/類
private synchronized void storeGenerations() throws IOException {
  FileSystem fileSystem = _path.getFileSystem(_configuration);
  FileStatus[] listStatus = fileSystem.listStatus(_path);
  SortedSet<FileStatus> existing = new TreeSet<FileStatus>(Arrays.asList(listStatus));
  long currentFile;
  if (!existing.isEmpty()) {
    FileStatus last = existing.last();
    currentFile = Long.parseLong(last.getPath().getName());
  } else {
    currentFile = 0;
  }
  Path path = new Path(_path, buffer(currentFile + 1));
  LOG.info("Creating new snapshot file [{0}]", path);
  FSDataOutputStream outputStream = fileSystem.create(path, false);
  Writer writer = SequenceFile.createWriter(_configuration, outputStream, Text.class, LongWritable.class,
      CompressionType.NONE, null);
  for (Entry<String, Long> e : _namesToGenerations.entrySet()) {
    writer.append(new Text(e.getKey()), new LongWritable(e.getValue()));
  }
  writer.close();
  outputStream.close();
  cleanupOldFiles(fileSystem, existing);
}
 
開發者ID:apache,項目名稱:incubator-blur,代碼行數:24,代碼來源:SnapshotIndexDeletionPolicy.java

示例14: traverseAdd

import org.apache.hadoop.io.SequenceFile.Writer; //導入依賴的package包/類
/**
 * Traverse the directory and add files to the sequencefile
 * @param seq sequencefile
 * @param pFile
 */
private static void traverseAdd(Writer seq, File pFile) {
	
	if(pFile.isDirectory()) {
		for(File file:pFile.listFiles()) {
			traverseAdd(seq, file);
		}
	} else {
		try {
			addFile(seq, pFile);
		} catch (IOException e) {
			e.printStackTrace();
		}
	}
	
}
 
開發者ID:bl-dpt,項目名稱:geolint,代碼行數:21,代碼來源:SequenceFileLoader.java

示例15: main

import org.apache.hadoop.io.SequenceFile.Writer; //導入依賴的package包/類
/**
 * @param args
 * @throws IOException 
 */
public static void main(String[] args) throws IOException {
	if (args.length != 2) {
		System.err.println("Usage: ConvertFastaForCloud file.fa outfile.br");
		System.exit(-1);
	}
	
	String infile = args[0];
	String outfile = args[1];
	
	System.err.println("Converting " + infile + " into " + outfile);
	
	JobConf config = new JobConf();
	
	SequenceFile.Writer writer = SequenceFile.createWriter(FileSystem.get(config), config,
			new Path(outfile), IntWritable.class, BytesWritable.class);
	
	convertFile(infile, writer);
	
	writer.close();
	
	System.err.println("min_seq_len: " + min_seq_len);
	System.err.println("max_seq_len: " + max_seq_len);
	System.err.println("Using DNAString version: " + DNAString.VERSION);
}
 
開發者ID:aws-samples,項目名稱:emr-sample-apps,代碼行數:29,代碼來源:ConvertFastaForCloud.java


注:本文中的org.apache.hadoop.io.SequenceFile.Writer類示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。