当前位置: 首页>>代码示例>>Java>>正文


Java CodecFactory.fromString方法代码示例

本文整理汇总了Java中org.apache.avro.file.CodecFactory.fromString方法的典型用法代码示例。如果您正苦于以下问题:Java CodecFactory.fromString方法的具体用法?Java CodecFactory.fromString怎么用?Java CodecFactory.fromString使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.avro.file.CodecFactory的用法示例。


在下文中一共展示了CodecFactory.fromString方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: configure

import org.apache.avro.file.CodecFactory; //导入方法依赖的package包/类
@Override
public void configure(Context context) {

  int syncIntervalBytes =
      context.getInteger(SYNC_INTERVAL_BYTES, DEFAULT_SYNC_INTERVAL_BYTES);
  String compressionCodec =
      context.getString(COMPRESSION_CODEC, DEFAULT_COMPRESSION_CODEC);

  writer = new ReflectDatumWriter<T>(getSchema());
  dataFileWriter = new DataFileWriter<T>(writer);

  dataFileWriter.setSyncInterval(syncIntervalBytes);

  try {
    CodecFactory codecFactory = CodecFactory.fromString(compressionCodec);
    dataFileWriter.setCodec(codecFactory);
  } catch (AvroRuntimeException e) {
    logger.warn("Unable to instantiate avro codec with name (" +
        compressionCodec + "). Compression disabled. Exception follows.", e);
  }
}
 
开发者ID:moueimei,项目名称:flume-release-1.7.0,代码行数:22,代码来源:AbstractAvroEventSerializer.java

示例2: getCompressionCodec

import org.apache.avro.file.CodecFactory; //导入方法依赖的package包/类
private CodecFactory getCompressionCodec(Map<String, String> conf) {
	if (getBoolean(conf, CONF_COMPRESS, false)) {
		int deflateLevel = getInt(conf, CONF_DEFLATE_LEVEL, CodecFactory.DEFAULT_DEFLATE_LEVEL);
		int xzLevel = getInt(conf, CONF_XZ_LEVEL, CodecFactory.DEFAULT_XZ_LEVEL);

		String outputCodec = conf.get(CONF_COMPRESS_CODEC);

		if (DataFileConstants.DEFLATE_CODEC.equals(outputCodec)) {
			return CodecFactory.deflateCodec(deflateLevel);
		} else if (DataFileConstants.XZ_CODEC.equals(outputCodec)) {
			return CodecFactory.xzCodec(xzLevel);
		} else {
			return CodecFactory.fromString(outputCodec);
		}
	}
	return CodecFactory.nullCodec();
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:18,代码来源:AvroKeyValueSinkWriter.java

示例3: getCompressionCodec

import org.apache.avro.file.CodecFactory; //导入方法依赖的package包/类
private CodecFactory getCompressionCodec(Map<String,String> conf) {
	if (getBoolean(conf, CONF_COMPRESS, false)) {
		int deflateLevel = getInt(conf, CONF_DEFLATE_LEVEL, CodecFactory.DEFAULT_DEFLATE_LEVEL);
		int xzLevel = getInt(conf, CONF_XZ_LEVEL, CodecFactory.DEFAULT_XZ_LEVEL);

		String outputCodec = conf.get(CONF_COMPRESS_CODEC);

		if (DataFileConstants.DEFLATE_CODEC.equals(outputCodec)) {
			return CodecFactory.deflateCodec(deflateLevel);
		} else if (DataFileConstants.XZ_CODEC.equals(outputCodec)) {
			return CodecFactory.xzCodec(xzLevel);
		} else {
			return CodecFactory.fromString(outputCodec);
		}
	}
	return CodecFactory.nullCodec();
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:18,代码来源:AvroKeyValueSinkWriter.java

示例4: configureDataFileWriter

import org.apache.avro.file.CodecFactory; //导入方法依赖的package包/类
static void configureDataFileWriter(DataFileWriter<GenericData.Record> writer,
    JobConf job) throws UnsupportedEncodingException {
  if (FileOutputFormat.getCompressOutput(job)) {
    int level = job.getInt(DEFLATE_LEVEL_KEY,
        DEFAULT_DEFLATE_LEVEL);
    String codecName = job.get(AvroJob.OUTPUT_CODEC, DEFLATE_CODEC);
    CodecFactory factory = codecName.equals(DEFLATE_CODEC)
      ? CodecFactory.deflateCodec(level)
      : CodecFactory.fromString(codecName);
    writer.setCodec(factory);
  }

  writer.setSyncInterval(job.getInt(SYNC_INTERVAL_KEY,
      DEFAULT_SYNC_INTERVAL));

  // copy metadata from job
  for (Map.Entry<String,String> e : job) {
    if (e.getKey().startsWith(AvroJob.TEXT_PREFIX))
      writer.setMeta(e.getKey().substring(AvroJob.TEXT_PREFIX.length()),
                     e.getValue());
    if (e.getKey().startsWith(AvroJob.BINARY_PREFIX))
      writer.setMeta(e.getKey().substring(AvroJob.BINARY_PREFIX.length()),
                     URLDecoder.decode(e.getValue(), "ISO-8859-1")
                     .getBytes("ISO-8859-1"));
  }
}
 
开发者ID:sigmoidanalytics,项目名称:spork-streaming,代码行数:27,代码来源:AvroRecordWriter.java

示例5: getRecordWriter

import org.apache.avro.file.CodecFactory; //导入方法依赖的package包/类
@Override
public RecordWriter<NullWritable, Object> getRecordWriter(TaskAttemptContext context) throws IOException, InterruptedException {

    if (schema == null)
        throw new IOException("Must provide a schema");
    
    Configuration conf = context.getConfiguration();

    DataFileWriter<Object> writer = new DataFileWriter<Object>(new PigAvroDatumWriter(schema));

    if (FileOutputFormat.getCompressOutput(context)) {
        int level = conf.getInt(DEFLATE_LEVEL_KEY, DEFAULT_DEFLATE_LEVEL);
        String codecName = conf.get(OUTPUT_CODEC, DEFLATE_CODEC);
        CodecFactory factory = codecName.equals(DEFLATE_CODEC)
            ? CodecFactory.deflateCodec(level)
            : CodecFactory.fromString(codecName);
        writer.setCodec(factory);
    }

    Path path = getDefaultWorkFile(context, EXT);
    writer.create(schema, path.getFileSystem(conf).create(path));
    return new PigAvroRecordWriter(writer);
}
 
开发者ID:sigmoidanalytics,项目名称:spork-streaming,代码行数:24,代码来源:PigAvroOutputFormat.java

示例6: configureDataFileWriter

import org.apache.avro.file.CodecFactory; //导入方法依赖的package包/类
static <K> void configureDataFileWriter(DataFileWriter<K> writer,
    JobConf job) throws UnsupportedEncodingException {

    if (FileOutputFormat.getCompressOutput(job)) {
        int level = job.getInt(org.apache.avro.mapred.AvroOutputFormat.DEFLATE_LEVEL_KEY,
                org.apache.avro.mapred.AvroOutputFormat.DEFAULT_DEFLATE_LEVEL);
        String codecName = job.get(AvroJob.OUTPUT_CODEC, DEFLATE_CODEC);
        CodecFactory factory = codecName.equals(DEFLATE_CODEC) ?
            CodecFactory.deflateCodec(level) : CodecFactory.fromString(codecName);
        writer.setCodec(factory);
    }

    writer.setSyncInterval(job.getInt(org.apache.avro.mapred.AvroOutputFormat.SYNC_INTERVAL_KEY,
            DEFAULT_SYNC_INTERVAL));

    // copy metadata from job
    for (Map.Entry<String,String> e : job) {
        if (e.getKey().startsWith(AvroJob.TEXT_PREFIX))
            writer.setMeta(e.getKey().substring(AvroJob.TEXT_PREFIX.length()),e.getValue());
        if (e.getKey().startsWith(AvroJob.BINARY_PREFIX))
            writer.setMeta(e.getKey().substring(AvroJob.BINARY_PREFIX.length()),
                   URLDecoder.decode(e.getValue(), "ISO-8859-1")
                   .getBytes("ISO-8859-1"));
    }
}
 
开发者ID:whale2,项目名称:iow-hadoop-streaming,代码行数:26,代码来源:AvroAsJsonOutputFormat.java

示例7: WriteAvroToByteArray

import org.apache.avro.file.CodecFactory; //导入方法依赖的package包/类
public WriteAvroToByteArray(CommandBuilder builder, Config config, Command parent, Command child, MorphlineContext context) {
  super(builder, config, parent, child, context);      
  this.format = new Validator<Format>().validateEnum(
      config,
      getConfigs().getString(config, "format", Format.container.toString()),
      Format.class);
  
  String codec = getConfigs().getString(config, "codec", null);
  if (codec == null) {
    this.codecFactory = null;
  } else {
    this.codecFactory = CodecFactory.fromString(codec);
  }
  
  Config metadataConfig = getConfigs().getConfig(config, "metadata", ConfigFactory.empty());
  for (Map.Entry<String, Object> entry : new Configs().getEntrySet(metadataConfig)) {
    this.metadata.put(entry.getKey(), entry.getValue().toString());
  }
  
  validateArguments();
}
 
开发者ID:cloudera,项目名称:cdk,代码行数:22,代码来源:WriteAvroToByteArrayBuilder.java

示例8: initialize

import org.apache.avro.file.CodecFactory; //导入方法依赖的package包/类
private void initialize(Event event) throws IOException {
  Schema schema = null;
  String schemaUrl = event.getHeaders().get(AVRO_SCHEMA_URL_HEADER);
  String schemaString = event.getHeaders().get(AVRO_SCHEMA_LITERAL_HEADER);

  if (schemaUrl != null) { // if URL_HEADER is there then use it
    schema = schemaCache.get(schemaUrl);
    if (schema == null) {
      schema = loadFromUrl(schemaUrl);
      schemaCache.put(schemaUrl, schema);
    }
  } else if (schemaString != null) { // fallback to LITERAL_HEADER if it was there
    schema = new Schema.Parser().parse(schemaString);
  } else if (staticSchemaURL != null) {   // fallback to static url if it was there
    schema = schemaCache.get(staticSchemaURL);
    if (schema == null) {
      schema = loadFromUrl(staticSchemaURL);
      schemaCache.put(staticSchemaURL, schema);
    }
  } else { // no other options so giving up
    throw new FlumeException("Could not find schema for event " + event);
  }

  writer = new GenericDatumWriter<Object>(schema);
  dataFileWriter = new DataFileWriter<Object>(writer);

  dataFileWriter.setSyncInterval(syncIntervalBytes);

  try {
    CodecFactory codecFactory = CodecFactory.fromString(compressionCodec);
    dataFileWriter.setCodec(codecFactory);
  } catch (AvroRuntimeException e) {
    logger.warn("Unable to instantiate avro codec with name (" +
        compressionCodec + "). Compression disabled. Exception follows.", e);
  }

  dataFileWriter.create(schema, out);
}
 
开发者ID:moueimei,项目名称:flume-release-1.7.0,代码行数:39,代码来源:AvroEventSerializer.java

示例9: initialize

import org.apache.avro.file.CodecFactory; //导入方法依赖的package包/类
private void initialize(Event event) throws IOException {
  Schema schema = null;
  String schemaUrl = event.getHeaders().get(AVRO_SCHEMA_URL_HEADER);
  if (schemaUrl != null) {
    schema = schemaCache.get(schemaUrl);
    if (schema == null) {
      schema = loadFromUrl(schemaUrl);
      schemaCache.put(schemaUrl, schema);
    }
  }
  if (schema == null) {
    String schemaString = event.getHeaders().get(AVRO_SCHEMA_LITERAL_HEADER);
    if (schemaString == null) {
      throw new FlumeException("Could not find schema for event " + event);
    }
    schema = new Schema.Parser().parse(schemaString);
  }

  writer = new GenericDatumWriter<Object>(schema);
  dataFileWriter = new DataFileWriter<Object>(writer);

  dataFileWriter.setSyncInterval(syncIntervalBytes);

  try {
    CodecFactory codecFactory = CodecFactory.fromString(compressionCodec);
    dataFileWriter.setCodec(codecFactory);
  } catch (AvroRuntimeException e) {
    logger.warn("Unable to instantiate avro codec with name (" +
        compressionCodec + "). Compression disabled. Exception follows.", e);
  }

  dataFileWriter.create(schema, out);
}
 
开发者ID:Transwarp-DE,项目名称:Transwarp-Sample-Code,代码行数:34,代码来源:AvroEventSerializer.java

示例10: configureDataFileWriter

import org.apache.avro.file.CodecFactory; //导入方法依赖的package包/类
static <T> void configureDataFileWriter(DataFileWriter<T> writer,
  TaskAttemptContext context) throws UnsupportedEncodingException {
  if (FileOutputFormat.getCompressOutput(context)) {
    int level = context.getConfiguration()
      .getInt(DEFLATE_LEVEL_KEY, DEFAULT_DEFLATE_LEVEL);
    String codecName = context.getConfiguration()
      .get(org.apache.avro.mapred.AvroJob.OUTPUT_CODEC, DEFLATE_CODEC);
    CodecFactory factory =
      codecName.equals(DEFLATE_CODEC) ? CodecFactory.deflateCodec(level)
        : CodecFactory.fromString(codecName);
    writer.setCodec(factory);
  }

  writer.setSyncInterval(context.getConfiguration()
    .getInt(SYNC_INTERVAL_KEY, DEFAULT_SYNC_INTERVAL));

  // copy metadata from job
  for (Map.Entry<String, String> e : context.getConfiguration()) {
    if (e.getKey().startsWith(org.apache.avro.mapred.AvroJob.TEXT_PREFIX)) {
      writer.setMeta(e.getKey()
        .substring(org.apache.avro.mapred.AvroJob.TEXT_PREFIX.length()),
        e.getValue());
    }
    if (e.getKey().startsWith(org.apache.avro.mapred.AvroJob.BINARY_PREFIX)) {
      writer.setMeta(e.getKey()
        .substring(org.apache.avro.mapred.AvroJob.BINARY_PREFIX.length()),
        URLDecoder.decode(e.getValue(), "ISO-8859-1").getBytes("ISO-8859-1"));
    }
  }
}
 
开发者ID:aliyun,项目名称:aliyun-maxcompute-data-collectors,代码行数:31,代码来源:AvroOutputFormat.java

示例11: readExternal

import org.apache.avro.file.CodecFactory; //导入方法依赖的package包/类
@Override
public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException {
  final String codecStr = in.readUTF();

  switch (codecStr) {
    case NULL_CODEC:
    case SNAPPY_CODEC:
    case BZIP2_CODEC:
      codecFactory = CodecFactory.fromString(codecStr);
      return;
  }

  Matcher deflateMatcher = deflatePattern.matcher(codecStr);
  if (deflateMatcher.find()) {
    codecFactory = CodecFactory.deflateCodec(
        Integer.parseInt(deflateMatcher.group("level")));
    return;
  }

  Matcher xzMatcher = xzPattern.matcher(codecStr);
  if (xzMatcher.find()) {
    codecFactory = CodecFactory.xzCodec(
        Integer.parseInt(xzMatcher.group("level")));
    return;
  }

  throw new IllegalStateException(codecStr + " is not supported");
}
 
开发者ID:apache,项目名称:beam,代码行数:29,代码来源:SerializableAvroCodecFactory.java

示例12: testDefaultCodecsIn

import org.apache.avro.file.CodecFactory; //导入方法依赖的package包/类
@Test
public void testDefaultCodecsIn() throws Exception {
  for (String codec : avroCodecs) {
    SerializableAvroCodecFactory codecFactory = new SerializableAvroCodecFactory(
        CodecFactory.fromString(codec));

    assertEquals((CodecFactory.fromString(codec).toString()), codecFactory.getCodec().toString());
  }
}
 
开发者ID:apache,项目名称:beam,代码行数:10,代码来源:SerializableAvroCodecFactoryTest.java

示例13: testDefaultCodecsSerDe

import org.apache.avro.file.CodecFactory; //导入方法依赖的package包/类
@Test
public void testDefaultCodecsSerDe() throws Exception {
  for (String codec : avroCodecs) {
    SerializableAvroCodecFactory codecFactory = new SerializableAvroCodecFactory(
        CodecFactory.fromString(codec));

    SerializableAvroCodecFactory serdeC = SerializableUtils.clone(codecFactory);

    assertEquals(CodecFactory.fromString(codec).toString(), serdeC.getCodec().toString());
  }
}
 
开发者ID:apache,项目名称:beam,代码行数:12,代码来源:SerializableAvroCodecFactoryTest.java

示例14: getCodecFactory

import org.apache.avro.file.CodecFactory; //导入方法依赖的package包/类
/**
 * Creates a {@link CodecFactory} based on the specified codec name and deflate level. If codecName is absent, then
 * a {@link CodecFactory#deflateCodec(int)} is returned. Otherwise the codecName is converted into a
 * {@link CodecFactory} via the {@link CodecFactory#fromString(String)} method.
 *
 * @param codecName the name of the codec to use (e.g. deflate, snappy, xz, etc.).
 * @param deflateLevel must be an integer from [0-9], and is only applicable if the codecName is "deflate".
 * @return a {@link CodecFactory}.
 */
public static CodecFactory getCodecFactory(Optional<String> codecName, Optional<String> deflateLevel) {
  if (!codecName.isPresent()) {
    return CodecFactory.deflateCodec(ConfigurationKeys.DEFAULT_DEFLATE_LEVEL);
  } else if (codecName.get().equalsIgnoreCase(DataFileConstants.DEFLATE_CODEC)) {
    if (!deflateLevel.isPresent()) {
      return CodecFactory.deflateCodec(ConfigurationKeys.DEFAULT_DEFLATE_LEVEL);
    } else {
      return CodecFactory.deflateCodec(Integer.parseInt(deflateLevel.get()));
    }
  } else {
    return CodecFactory.fromString(codecName.get());
  }
}
 
开发者ID:Hanmourang,项目名称:Gobblin,代码行数:23,代码来源:WriterUtils.java

示例15: getRecordWriter

import org.apache.avro.file.CodecFactory; //导入方法依赖的package包/类
@Override
public RecordWriter<Text, Text> getRecordWriter(FileSystem ignored, JobConf job, String name,
    Progressable progress) throws IOException {
  if (schema == null) {
    SchemaLoader loader = new SchemaLoader(job);
    this.schema = loader.load(
      job.get(SCHEMA_LITERAL), job.get(SCHEMA_URL), job.get(SCHEMA_TYPE_NAME));
    this.converter = new JsonConverter(schema);
    this.readKey = job.getBoolean(READ_KEY, true);
  }
  
  DataFileWriter<GenericRecord> writer = new DataFileWriter<GenericRecord>(
      new GenericDatumWriter<GenericRecord>(schema));
  if (getCompressOutput(job)) {
    int level = job.getInt(AvroOutputFormat.DEFLATE_LEVEL_KEY, AvroOutputFormat.DEFAULT_DEFLATE_LEVEL);
    String codecName = job.get(AvroJob.CONF_OUTPUT_CODEC, 
        org.apache.avro.file.DataFileConstants.DEFLATE_CODEC);
    CodecFactory codec = codecName.equals(DataFileConstants.DEFLATE_CODEC)
        ? CodecFactory.deflateCodec(level)
        : CodecFactory.fromString(codecName);
    writer.setCodec(codec);
  }
  writer.setSyncInterval(job.getInt(AvroOutputFormat.SYNC_INTERVAL_KEY,
      DataFileConstants.DEFAULT_SYNC_INTERVAL));
  
  Path path = FileOutputFormat.getTaskOutputPath(job, name + AvroOutputFormat.EXT);
  writer.create(schema, path.getFileSystem(job).create(path));
  
  return new AvroAsJSONRecordWriter(writer, converter, readKey);
}
 
开发者ID:openaire,项目名称:iis,代码行数:31,代码来源:AvroAsJSONOutputFormat.java


注:本文中的org.apache.avro.file.CodecFactory.fromString方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。