當前位置: 首頁>>代碼示例>>Java>>正文


Java GenericRecord.get方法代碼示例

本文整理匯總了Java中org.apache.avro.generic.GenericRecord.get方法的典型用法代碼示例。如果您正苦於以下問題:Java GenericRecord.get方法的具體用法?Java GenericRecord.get怎麽用?Java GenericRecord.get使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在org.apache.avro.generic.GenericRecord的用法示例。


在下文中一共展示了GenericRecord.get方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。

示例1: convertAvroRecordUseBeforeMap

import org.apache.avro.generic.GenericRecord; //導入方法依賴的package包/類
public static <T extends Object> PairWrapper<String, Object> convertAvroRecordUseBeforeMap(GenericRecord record, Set<T> noorderKeys) {
    Schema schema = record.getSchema();
    List<Schema.Field> fields = schema.getFields();
    PairWrapper<String, Object> wrapper = new PairWrapper<>();

    for (Schema.Field field : fields) {
        String key = field.name();
        Object value = record.get(key);
        // 分離存儲是否關心順序的key-value
        if (noorderKeys.contains(field.name())) {
            wrapper.addProperties(key, value);
        }
    }

    GenericRecord before = getFromRecord(MessageBodyKey.BEFORE, record);

    Map<String, Object> beforeMap = convert2map(before);

    for (Map.Entry<String, Object> entry : beforeMap.entrySet()) {
        if(!entry.getKey().endsWith(MessageBodyKey.IS_MISSING_SUFFIX)) {
            wrapper.addPair(new Pair<>(entry.getKey(), CharSequence.class.isInstance(entry.getValue())?entry.getValue().toString():entry.getValue()));
        }
    }

    return wrapper;
}
 
開發者ID:BriData,項目名稱:DBus,代碼行數:27,代碼來源:BoltCommandHandlerHelper.java

示例2: toSqoopRecord

import org.apache.avro.generic.GenericRecord; //導入方法依賴的package包/類
protected SqoopRecord toSqoopRecord(GenericRecord record) throws IOException {
  Schema avroSchema = record.getSchema();
  for (Map.Entry<Writable, Writable> e : columnTypes.entrySet()) {
    String columnName = e.getKey().toString();
    String columnType = e.getValue().toString();
    String cleanedCol = ClassWriter.toIdentifier(columnName);
    Schema.Field field = getFieldIgnoreCase(avroSchema, cleanedCol);
    if (null == field) {
      throw new IOException("Cannot find field " + cleanedCol
          + " in Avro schema " + avroSchema);
    }

    Object avroObject = record.get(field.name());
    Object fieldVal = AvroUtil.fromAvro(avroObject, field.schema(), columnType);
    recordImpl.setField(cleanedCol, fieldVal);
  }
  return recordImpl;
}
 
開發者ID:aliyun,項目名稱:aliyun-maxcompute-data-collectors,代碼行數:19,代碼來源:GenericRecordExportMapper.java

示例3: testBlobAvroImportInline

import org.apache.avro.generic.GenericRecord; //導入方法依賴的package包/類
/** Import blob data that is smaller than inline lob limit. Blob data
 * should be saved as Avro bytes.
 * @throws IOException
 * @throws SQLException
 */
public void testBlobAvroImportInline() throws IOException, SQLException {
  String [] types = { getBlobType() };
  String expectedVal = "This is short BLOB data";
  String [] vals = { getBlobInsertStr(expectedVal) };

  createTableWithColTypes(types, vals);

  runImport(getArgv());

  Path outputFile = new Path(getTablePath(), "part-m-00000.avro");
  DataFileReader<GenericRecord> reader = read(outputFile);
  GenericRecord record = reader.next();

  // Verify that blob data is imported as Avro bytes.
  ByteBuffer buf = (ByteBuffer) record.get(getColName(0));
  String returnVal = new String(buf.array());

  assertEquals(getColName(0), expectedVal, returnVal);
}
 
開發者ID:aliyun,項目名稱:aliyun-maxcompute-data-collectors,代碼行數:25,代碼來源:LobAvroImportTestCase.java

示例4: testBlobCompressedAvroImportInline

import org.apache.avro.generic.GenericRecord; //導入方法依賴的package包/類
/**
 * Import blob data that is smaller than inline lob limit and compress with
 * deflate codec. Blob data should be encoded and saved as Avro bytes.
 * @throws IOException
 * @throws SQLException
 */
public void testBlobCompressedAvroImportInline()
    throws IOException, SQLException {
  String [] types = { getBlobType() };
  String expectedVal = "This is short BLOB data";
  String [] vals = { getBlobInsertStr(expectedVal) };

  createTableWithColTypes(types, vals);

  runImport(getArgv("--compression-codec", CodecMap.DEFLATE));

  Path outputFile = new Path(getTablePath(), "part-m-00000.avro");
  DataFileReader<GenericRecord> reader = read(outputFile);
  GenericRecord record = reader.next();

  // Verify that the data block of the Avro file is compressed with deflate
  // codec.
  assertEquals(CodecMap.DEFLATE,
      reader.getMetaString(DataFileConstants.CODEC));

  // Verify that all columns are imported correctly.
  ByteBuffer buf = (ByteBuffer) record.get(getColName(0));
  String returnVal = new String(buf.array());

  assertEquals(getColName(0), expectedVal, returnVal);
}
 
開發者ID:aliyun,項目名稱:aliyun-maxcompute-data-collectors,代碼行數:32,代碼來源:LobAvroImportTestCase.java

示例5: applyDiff

import org.apache.avro.generic.GenericRecord; //導入方法依賴的package包/類
public static GenericRecord applyDiff(GenericRecord avroObj, RecordDiff diff, Schema schema) throws IOException {
    GenericRecord modifiedAvroObj = createGenericRecordWithSchema(schema, avroObj);
    Map<String, Object> diffFields = diff.getDiffFields();
    List<Schema.Field> fields = schema.getFields();

    for (Schema.Field field : fields) {
        if (diffFields.containsKey(field.name())) {
            GenericRecord fieldsValue = (GenericRecord) diffFields.get(field.name());
            Class<? extends GenericRecord> fieldsValueClass = fieldsValue.getClass();

            if (fieldsValueClass.isAssignableFrom(PrimitiveDiff.class)) {
                AvroDiffPrimitive.applyPrimitiveDiff(field, avroObj, fieldsValue, modifiedAvroObj, null);
            } else if (fieldsValueClass.isAssignableFrom(MapDiff.class)) {
                AvroDiffMap.applyMapDiff(field, avroObj, fieldsValue, modifiedAvroObj);
            } else if (fieldsValueClass.isAssignableFrom(ArrayDiff.class)) {
                AvroDiffArray.applyArrayDiff(field, avroObj, fieldsValue, modifiedAvroObj);
            } else if (fieldsValueClass.isAssignableFrom(RecordDiff.class)) {
                GenericRecord recordField = (GenericRecord) modifiedAvroObj.get(field.pos());
                GenericRecord genericRecord = applyDiff(recordField, (RecordDiff) fieldsValue, recordField.getSchema());
                modifiedAvroObj.put(field.pos(), genericRecord);
            } else {
                LOGGER.error("Field from RecordDiff has unknown type.");
            }
        } else {
            modifiedAvroObj.put(field.pos(), avroObj.get(field.pos()));
        }
    }

    return SpecificData.get().deepCopy(schema, modifiedAvroObj);
}
 
開發者ID:atlascon,項目名稱:avro-diff,代碼行數:31,代碼來源:AvroDiff.java

示例6: applyPrimitiveDiff

import org.apache.avro.generic.GenericRecord; //導入方法依賴的package包/類
public static void applyPrimitiveDiff(Schema.Field field, GenericRecord avroObj, Object fieldsValue, Object modifiedObj, Object key) throws IOException {
    ByteBuffer diffValue = ((PrimitiveDiff) fieldsValue).getDiffValue();
    Object newValue = ((PrimitiveDiff) fieldsValue).getNewValue();

    if (newValue != null) {
        if (key != null) {
            ((Map) modifiedObj).put(key, newValue);
        } else {
            ((GenericRecord) modifiedObj).put(field.pos(), newValue);
        }
    } else {
        Object avroObjField = avroObj.get(field.pos());
        byte[] avroObjFieldBytes = field.schema().getType().equals(Schema.Type.STRING) ? ((String) avroObjField).getBytes() : ((byte[]) avroObjField);
        byte[] avroObjFieldModifiedBytes = new GDiffPatcher().patch(avroObjFieldBytes, diffValue.array());
        if (key != null) {
            if (field.schema().getType().equals(Schema.Type.STRING)) {
                ((Map) modifiedObj).put(key, new String(avroObjFieldModifiedBytes, StandardCharsets.UTF_8));
            } else {
                ((Map) modifiedObj).put(key, avroObjFieldModifiedBytes);
            }
        } else {
            if (field.schema().getType().equals(Schema.Type.STRING)) {
                ((GenericRecord) modifiedObj).put(field.pos(), new String(avroObjFieldModifiedBytes, StandardCharsets.UTF_8));
            } else {
                ((GenericRecord) modifiedObj).put(field.pos(), avroObjFieldModifiedBytes);
            }
        }
    }
}
 
開發者ID:atlascon,項目名稱:avro-diff,代碼行數:30,代碼來源:AvroDiffPrimitive.java

示例7: convertAvroRecord

import org.apache.avro.generic.GenericRecord; //導入方法依賴的package包/類
public static <T extends Object> PairWrapper<String, Object> convertAvroRecord(GenericRecord record, Set<T> noorderKeys) {
    Schema schema = record.getSchema();
    List<Schema.Field> fields = schema.getFields();
    PairWrapper<String, Object> wrapper = new PairWrapper<>();

    for (Schema.Field field : fields) {
        String key = field.name();
        Object value = record.get(key);
        // 分離存儲是否關心順序的key-value
        if (noorderKeys.contains(field.name())) {
            wrapper.addProperties(key, value);
        }
    }

    GenericRecord before = getFromRecord(MessageBodyKey.BEFORE, record);
    GenericRecord after = getFromRecord(MessageBodyKey.AFTER, record);

    Map<String, Object> beforeMap = convert2map(before);
    Map<String, Object> afterMap = convert2map(after);

    // 覆蓋before
    mergeMap(beforeMap, afterMap);

    for (Map.Entry<String, Object> entry : beforeMap.entrySet()) {
        if(!entry.getKey().endsWith(MessageBodyKey.IS_MISSING_SUFFIX)) {
            wrapper.addPair(new Pair<>(entry.getKey(), CharSequence.class.isInstance(entry.getValue())?entry.getValue().toString():entry.getValue()));
        }
    }

    return wrapper;
}
 
開發者ID:BriData,項目名稱:DBus,代碼行數:32,代碼來源:BoltCommandHandlerHelper.java

示例8: validateAvroFile

import org.apache.avro.generic.GenericRecord; //導入方法依賴的package包/類
public void validateAvroFile(File file) throws IOException {
  // read the events back using GenericRecord
  DatumReader<GenericRecord> reader = new GenericDatumReader<GenericRecord>();
  DataFileReader<GenericRecord> fileReader =
      new DataFileReader<GenericRecord>(file, reader);
  GenericRecord record = new GenericData.Record(fileReader.getSchema());
  int numEvents = 0;
  while (fileReader.hasNext()) {
    fileReader.next(record);
    ByteBuffer body = (ByteBuffer) record.get("body");
    CharsetDecoder decoder = Charsets.UTF_8.newDecoder();
    String bodyStr = decoder.decode(body).toString();
    System.out.println(bodyStr);
    numEvents++;
  }
  fileReader.close();
  Assert.assertEquals("Should have found a total of 3 events", 3, numEvents);
}
 
開發者ID:moueimei,項目名稱:flume-release-1.7.0,代碼行數:19,代碼來源:TestFlumeEventAvroEventSerializer.java

示例9: verifyOutputAvroFiles

import org.apache.avro.generic.GenericRecord; //導入方法依賴的package包/類
private void verifyOutputAvroFiles(FileSystem fs, Configuration conf, String dir, String prefix,
                                   List<String> bodies) throws IOException {
  int found = 0;
  int expected = bodies.size();
  for (String outputFile : getAllFiles(dir)) {
    String name = (new File(outputFile)).getName();
    if (name.startsWith(prefix)) {
      FSDataInputStream input = fs.open(new Path(outputFile));
      DatumReader<GenericRecord> reader = new GenericDatumReader<GenericRecord>();
      DataFileStream<GenericRecord> avroStream =
          new DataFileStream<GenericRecord>(input, reader);
      GenericRecord record = new GenericData.Record(avroStream.getSchema());
      while (avroStream.hasNext()) {
        avroStream.next(record);
        ByteBuffer body = (ByteBuffer) record.get("body");
        CharsetDecoder decoder = Charsets.UTF_8.newDecoder();
        String bodyStr = decoder.decode(body).toString();
        LOG.debug("Removing event: {}", bodyStr);
        bodies.remove(bodyStr);
        found++;
      }
      avroStream.close();
      input.close();
    }
  }
  Assert.assertTrue("Found = " + found + ", Expected = "  +
      expected + ", Left = " + bodies.size() + " " + bodies,
          bodies.size() == 0);
}
 
開發者ID:moueimei,項目名稱:flume-release-1.7.0,代碼行數:30,代碼來源:TestHDFSEventSink.java

示例10: toSqoopRecord

import org.apache.avro.generic.GenericRecord; //導入方法依賴的package包/類
private SqoopRecord toSqoopRecord(GenericRecord genericRecord) throws IOException {
  Schema avroSchema = genericRecord.getSchema();
  for (Schema.Field field : avroSchema.getFields()) {
    Pair<String, String> sqoopRecordField = sqoopRecordFields.get(field.name().toLowerCase());
    if (null == sqoopRecordField) {
      throw new IOException("Cannot find field '" + field.name() + "' in fields of user class"
          + sqoopRecordImpl.getClass().getName() + ". Fields are: "
          + Arrays.deepToString(sqoopRecordFields.values().toArray()));
    }
    Object avroObject = genericRecord.get(field.name());
    Object fieldVal = AvroUtil.fromAvro(avroObject, field.schema(), sqoopRecordField.value());
    sqoopRecordImpl.setField(sqoopRecordField.key(), fieldVal);
  }
  return sqoopRecordImpl;
}
 
開發者ID:aliyun,項目名稱:aliyun-maxcompute-data-collectors,代碼行數:16,代碼來源:MergeAvroMapper.java

示例11: convertGenericRecordToArray

import org.apache.avro.generic.GenericRecord; //導入方法依賴的package包/類
private static Object[] convertGenericRecordToArray(GenericRecord record) {
  Object[] result = new Object[record.getSchema().getFields().size()];
  for (int i = 0; i < result.length; i++) {
    result[i] = record.get(i);
  }
  return result;
}
 
開發者ID:aliyun,項目名稱:aliyun-maxcompute-data-collectors,代碼行數:8,代碼來源:TestHiveImport.java

示例12: testMultiTableImportAsParquetFormat

import org.apache.avro.generic.GenericRecord; //導入方法依賴的package包/類
public void testMultiTableImportAsParquetFormat() throws IOException {
  String [] argv = getArgv(new String[]{"--as-parquetfile"}, null);
  runImport(new ImportAllTablesTool(), argv);

  Path warehousePath = new Path(this.getWarehouseDir());
  int i = 0;
  for (String tableName : this.tableNames) {
    Path tablePath = new Path(warehousePath, tableName);
    Dataset dataset = Datasets.load("dataset:file:" + tablePath);

    // dequeue the expected value for this table. This
    // list has the same order as the tableNames list.
    String expectedVal = Integer.toString(i++) + ","
        + this.expectedStrings.get(0);
    this.expectedStrings.remove(0);

    DatasetReader<GenericRecord> reader = dataset.newReader();
    try {
      GenericRecord record = reader.next();
      String line = record.get(0) + "," + record.get(1);
      assertEquals("Table " + tableName + " expected a different string",
          expectedVal, line);
      assertFalse(reader.hasNext());
    } finally {
      reader.close();
    }
  }
}
 
開發者ID:aliyun,項目名稱:aliyun-maxcompute-data-collectors,代碼行數:29,代碼來源:TestAllTables.java

示例13: createDiff

import org.apache.avro.generic.GenericRecord; //導入方法依賴的package包/類
public static RecordDiff createDiff(GenericRecord avroObj1, GenericRecord avroObj2, Schema schema) throws IOException {
    if (avroObj1 != null) {
        Preconditions.checkArgument(avroObj1.getSchema().equals(avroObj2.getSchema()), "Schemas of Avro objects must match.");
    }
    Preconditions.checkArgument(schema.equals(avroObj2.getSchema()), "Scheme and Avro object's scheme must match.");

    Map<String, Object> diffField = Maps.newHashMap();
    List<Schema.Field> fields = schema.getFields();

    for (Schema.Field field : fields) {
        Type fieldTypeAvroObj1 = field.schema().getType();
        Type fieldTypeAvroObj2 = field.schema().getType();

        if (fieldTypeAvroObj2.equals(Type.UNION)) {
            fieldTypeAvroObj2 = getUsedTypeFromUnion(avroObj2.get(field.pos()));
            if (avroObj1 != null) {
                fieldTypeAvroObj1 = getUsedTypeFromUnion(avroObj1.get(field.pos()));
            } else {
                fieldTypeAvroObj1 = null;
            }
        }

        Object avroObj1Field = null;
        if (avroObj1 != null) {
            avroObj1Field = avroObj1.get(field.pos());
        }
        Object avroObj2Field = avroObj2.get(field.pos());

        if (avroObj1Field == null || !avroObj1Field.equals(avroObj2Field)) {
            if (fieldTypeAvroObj2.equals(Type.STRING) || fieldTypeAvroObj2.equals(Type.BOOLEAN) || fieldTypeAvroObj2.equals(Type.BYTES) ||
                    fieldTypeAvroObj2.equals(Type.DOUBLE) || fieldTypeAvroObj2.equals(Type.FLOAT) || fieldTypeAvroObj2.equals(Type.INT) ||
                    fieldTypeAvroObj2.equals(Type.LONG) || fieldTypeAvroObj2.equals(Type.ENUM)) {
                PrimitiveDiff primitiveDiff = AvroDiffPrimitive.createPrimitiveDiff(avroObj1Field, avroObj2Field, fieldTypeAvroObj1, fieldTypeAvroObj2);
                diffField.put(field.name(), primitiveDiff);
            } else if (fieldTypeAvroObj2.equals(Type.MAP)) {
                MapDiff mapDiff = AvroDiffMap.createMapDiff(avroObj1Field, avroObj2Field);
                diffField.put(field.name(), mapDiff);
            } else if (fieldTypeAvroObj2.equals(Type.ARRAY)) {
                ArrayDiff arrayDiff = AvroDiffArray.createArrayDiff(avroObj1Field, avroObj2Field);
                diffField.put(field.name(), arrayDiff);
            } else if (fieldTypeAvroObj2.equals(Type.RECORD)) {
                RecordDiff diff = createDiff((GenericRecord) avroObj1Field, (GenericRecord) avroObj2Field, ((GenericRecord) avroObj2Field).getSchema());
                diffField.put(field.name(), diff);
            } else {
                LOGGER.error("Schema field has unknown type.");
            }
        }
    }

    return new RecordDiff(diffField);
}
 
開發者ID:atlascon,項目名稱:avro-diff,代碼行數:52,代碼來源:AvroDiff.java

示例14: getFromRecord

import org.apache.avro.generic.GenericRecord; //導入方法依賴的package包/類
private static <T> T getFromRecord(String key, GenericRecord record) {
    return (T) record.get(key);
}
 
開發者ID:BriData,項目名稱:DBus,代碼行數:4,代碼來源:BoltCommandHandlerHelper.java

示例15: test

import org.apache.avro.generic.GenericRecord; //導入方法依賴的package包/類
@Test
public void test() throws FileNotFoundException, IOException {
  // Snappy currently broken on Mac in OpenJDK 7 per FLUME-2012
  Assume.assumeTrue(!"Mac OS X".equals(System.getProperty("os.name")) ||
                    !System.getProperty("java.version").startsWith("1.7."));

  //Schema schema = new Schema.Parser().parse(schemaFile);

  // create the file, write some data
  OutputStream out = new FileOutputStream(testFile);
  String builderName = SyslogAvroEventSerializer.Builder.class.getName();

  Context ctx = new Context();
  ctx.put("syncInterval", "4096");
  ctx.put("compressionCodec", "snappy");

  EventSerializer serializer =
      EventSerializerFactory.getInstance(builderName, ctx, out);
  serializer.afterCreate(); // must call this when a file is newly created

  List<Event> events = generateSyslogEvents();
  for (Event e : events) {
    serializer.write(e);
  }
  serializer.flush();
  serializer.beforeClose();
  out.flush();
  out.close();

  // now try to read the file back

  DatumReader<GenericRecord> reader = new GenericDatumReader<GenericRecord>();
  DataFileReader<GenericRecord> fileReader =
      new DataFileReader<GenericRecord>(testFile, reader);

  GenericRecord record = new GenericData.Record(fileReader.getSchema());
  int numEvents = 0;
  while (fileReader.hasNext()) {
    fileReader.next(record);
    int facility = (Integer) record.get("facility");
    int severity = (Integer) record.get("severity");
    long timestamp = (Long) record.get("timestamp");
    String hostname = record.get("hostname").toString();
    String message = record.get("message").toString();

    Assert.assertEquals("Facility should be 1", 1, facility);
    System.out.println(timestamp + ": " + message);
    numEvents++;
  }

  fileReader.close();
  Assert.assertEquals("Should have found a total of 3 events", 3, numEvents);

  FileUtils.forceDelete(testFile);
}
 
開發者ID:moueimei,項目名稱:flume-release-1.7.0,代碼行數:56,代碼來源:TestSyslogAvroEventSerializer.java


注:本文中的org.apache.avro.generic.GenericRecord.get方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。