本文整理汇总了Java中org.apache.pig.ResourceSchema.ResourceFieldSchema方法的典型用法代码示例。如果您正苦于以下问题:Java ResourceSchema.ResourceFieldSchema方法的具体用法?Java ResourceSchema.ResourceFieldSchema怎么用?Java ResourceSchema.ResourceFieldSchema使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.pig.ResourceSchema
的用法示例。
在下文中一共展示了ResourceSchema.ResourceFieldSchema方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: checkSchema
import org.apache.pig.ResourceSchema; //导入方法依赖的package包/类
@Override
public void checkSchema(ResourceSchema s) throws IOException {
if (myUDFContextSignature == null) {
throw new IllegalStateException("No UDFContext Signature provided to this UDF! Cannot store field names!");
}
ResourceSchema.ResourceFieldSchema[] fields = s.getFields();
if (fields == null || fields.length == 0) {
throw new IOException("Input field names not available from schema during front-end processing! " +
"FusionIndexPipelineStoreFunc must have field names!");
}
List<String> fieldNames = new ArrayList<String>(fields.length);
for (int f = 0; f < fields.length; f++) {
fieldNames.add(fields[f].getName());
}
// Save the fieldIndexToType Mapping in the UDFContext, keyed by our
// UDFContext Signature so we don't step on other FusionIndexPipelineStoreFunc UDFs
Properties udfProps =
UDFContext.getUDFContext().getUDFProperties(getClass(), new String[]{myUDFContextSignature});
udfProps.put(FIELD_NAMES_FROM_SCHEMA_PROPS_KEY, fieldNames);
log.info(String.format("Saved %s=%s into UDFContext using signature: %s",
FIELD_NAMES_FROM_SCHEMA_PROPS_KEY, String.valueOf(fieldNames), myUDFContextSignature));
}
示例2: checkSchema
import org.apache.pig.ResourceSchema; //导入方法依赖的package包/类
@Override
public void checkSchema(ResourceSchema s) throws IOException {
if (myUDFContextSignature == null) {
throw new IllegalStateException("No UDFContext Signature provided to this UDF! Cannot store field names!");
}
ResourceSchema.ResourceFieldSchema[] fields = s.getFields();
if (fields == null || fields.length == 0) {
throw new IOException("Input field names not available from schema during front-end processing! FusionIndexPipelineStoreFunc must have field names!");
}
List<String> fieldNames = new ArrayList<String>(fields.length);
for (int f = 0; f < fields.length; f++) {
fieldNames.add(fields[f].getName());
}
// Save the fieldIndexToType Mapping in the UDFContext, keyed by our
// UDFContext Signature so we don't step on other FusionIndexPipelineStoreFunc UDFs
Properties udfProps =
UDFContext.getUDFContext().getUDFProperties(getClass(), new String[]{myUDFContextSignature});
udfProps.put(FIELD_NAMES_FROM_SCHEMA_PROPS_KEY, fieldNames);
log.info(String.format("Saved %s=%s into UDFContext using signature: %s",
FIELD_NAMES_FROM_SCHEMA_PROPS_KEY, String.valueOf(fieldNames), myUDFContextSignature));
}
示例3: testResourceFlatSchemaCreation
import org.apache.pig.ResourceSchema; //导入方法依赖的package包/类
/**
* Test that ResourceSchema is correctly created given a
* pig.Schema and vice versa
*/
@Test
public void testResourceFlatSchemaCreation()
throws ExecException, SchemaMergeException, FrontendException {
String [] aliases ={"f1", "f2"};
byte[] types = {DataType.CHARARRAY, DataType.INTEGER};
Schema origSchema = TypeCheckingTestUtil.genFlatSchema(
aliases,types);
ResourceSchema rsSchema = new ResourceSchema(origSchema);
assertEquals("num fields", aliases.length, rsSchema.getFields().length);
ResourceSchema.ResourceFieldSchema[] fields = rsSchema.getFields();
for (int i=0; i<fields.length; i++) {
assertEquals(fields[i].getName(), aliases[i]);
assertEquals(fields[i].getType(), types[i]);
}
Schema genSchema = Schema.getPigSchema(rsSchema);
assertTrue("generated schema equals original",
Schema.equals(genSchema, origSchema, true, false));
}
示例4: determineFieldSchema
import org.apache.pig.ResourceSchema; //导入方法依赖的package包/类
/***
* Determine the field schema of an ResourceFieldSchema
* @param rcFieldSchema the rcFieldSchema we want translated
* @return the field schema corresponding to the object
* @throws ExecException,FrontendException,SchemaMergeException
*/
public static Schema.FieldSchema determineFieldSchema(ResourceSchema.ResourceFieldSchema rcFieldSchema)
throws ExecException, FrontendException, SchemaMergeException {
byte dt = rcFieldSchema.getType();
Iterator<ResourceSchema.ResourceFieldSchema> fieldIter = null;
long fieldNum = 0;
if (dt == TUPLE || dt == BAG ) {
fieldIter = Arrays.asList(rcFieldSchema.getSchema().getFields()).iterator();
fieldNum = rcFieldSchema.getSchema().getFields().length;
}
return determineFieldSchema(dt, fieldIter, fieldNum, ResourceSchema.ResourceFieldSchema.class);
}
示例5: resourceSchemaToAvroSchema
import org.apache.pig.ResourceSchema; //导入方法依赖的package包/类
/**
* Translated a ResourceSchema to an Avro Schema.
* @param rs Input schema.
* @param recordName Record name
* @param recordNameSpace Namespace
* @param definedRecordNames Map of already defined record names
* to schema objects
* @return the translated schema
* @throws IOException
*/
public static Schema resourceSchemaToAvroSchema(final ResourceSchema rs,
String recordName, final String recordNameSpace,
final Map<String, List<Schema>> definedRecordNames,
final Boolean doubleColonsToDoubleUnderscores) throws IOException {
if (rs == null) {
return null;
}
recordName = toAvroName(recordName, doubleColonsToDoubleUnderscores);
List<Schema.Field> fields = new ArrayList<Schema.Field>();
Schema newSchema = Schema.createRecord(
recordName, null, recordNameSpace, false);
if (rs.getFields() != null) {
Integer i = 0;
for (ResourceSchema.ResourceFieldSchema rfs : rs.getFields()) {
String rfsName = toAvroName(rfs.getName(),
doubleColonsToDoubleUnderscores);
Schema fieldSchema = resourceFieldSchemaToAvroSchema(
rfsName, recordNameSpace, rfs.getType(),
rfs.getDescription().equals("autogenerated from Pig Field Schema")
? null : rfs.getDescription(),
rfs.getSchema(), definedRecordNames,
doubleColonsToDoubleUnderscores);
fields.add(new Schema.Field((rfsName != null)
? rfsName : recordName + "_" + i.toString(),
fieldSchema,
rfs.getDescription().equals(
"autogenerated from Pig Field Schema")
? null : rfs.getDescription(), null));
i++;
}
newSchema.setFields(fields);
}
return newSchema;
}
示例6: checkSchema
import org.apache.pig.ResourceSchema; //导入方法依赖的package包/类
@Override
public void checkSchema(ResourceSchema schema) throws IOException {
// Get schemaStr and sortColumnNames from the given schema. In the process, we
// also validate the schema and sorting info.
ResourceSchema.Order[] orders = schema.getSortKeyOrders();
boolean descending = false;
for (ResourceSchema.Order order : orders)
{
if (order == ResourceSchema.Order.DESCENDING)
{
Log LOG = LogFactory.getLog(TableStorer.class);
LOG.warn("Sorting in descending order is not supported by Zebra and the table will be unsorted.");
descending = true;
break;
}
}
StringBuilder sortColumnNames = new StringBuilder();
if (!descending) {
ResourceSchema.ResourceFieldSchema[] fields = schema.getFields();
int[] index = schema.getSortKeys();
for( int i = 0; i< index.length; i++ ) {
ResourceFieldSchema field = fields[index[i]];
String name = field.getName();
if( name == null )
throw new IOException("Zebra does not support column positional reference yet");
if( !org.apache.pig.data.DataType.isAtomic( field.getType() ) )
throw new IOException( "Field [" + name + "] is not of simple type as required for a sort column now." );
if( i > 0 )
sortColumnNames.append( "," );
sortColumnNames.append( name );
}
}
// Convert resource schema to zebra schema
org.apache.hadoop.zebra.schema.Schema zebraSchema;
try {
zebraSchema = SchemaConverter.convertFromResourceSchema( schema );
} catch (ParseException ex) {
throw new IOException("Exception thrown from SchemaConverter: " + ex.getMessage() );
}
Properties properties = UDFContext.getUDFContext().getUDFProperties(
this.getClass(), new String[]{ udfContextSignature } );
properties.setProperty( UDFCONTEXT_OUTPUT_SCHEMA, zebraSchema.toString() );
properties.setProperty( UDFCONTEXT_SORT_INFO, sortColumnNames.toString() );
// This is to turn off type check for potential corner cases - for internal use only;
if (System.getenv("zebra_output_checktype") != null && System.getenv("zebra_output_checktype").equals("no")) {
properties.setProperty( UDFCONTEXT_OUTPUT_CHECKTYPE, "no");
}
}