当前位置: 首页>>代码示例>>Java>>正文


Java LoadFunc.setLocation方法代码示例

本文整理汇总了Java中org.apache.pig.LoadFunc.setLocation方法的典型用法代码示例。如果您正苦于以下问题:Java LoadFunc.setLocation方法的具体用法?Java LoadFunc.setLocation怎么用?Java LoadFunc.setLocation使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.pig.LoadFunc的用法示例。


在下文中一共展示了LoadFunc.setLocation方法的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: seekInRightStream

import org.apache.pig.LoadFunc; //导入方法依赖的package包/类
private void seekInRightStream(Object firstLeftKey) throws IOException{
    rightLoader = (LoadFunc)PigContext.instantiateFuncFromSpec(rightLoaderFuncSpec);
    
    // check if hadoop distributed cache is used
    if (indexFile != null && rightLoader instanceof DefaultIndexableLoader) {
        DefaultIndexableLoader loader = (DefaultIndexableLoader)rightLoader;
        loader.setIndexFile(indexFile);
    }
    
    // Pass signature of the loader to rightLoader
    // make a copy of the conf to use in calls to rightLoader.
    rightLoader.setUDFContextSignature(signature);
    Job job = new Job(new Configuration(PigMapReduce.sJobConfInternal.get()));
    rightLoader.setLocation(rightInputFileName, job);
    ((IndexableLoadFunc)rightLoader).initialize(job.getConfiguration());
    ((IndexableLoadFunc)rightLoader).seekNear(
            firstLeftKey instanceof Tuple ? (Tuple)firstLeftKey : mTupleFactory.newTuple(firstLeftKey));
}
 
开发者ID:sigmoidanalytics,项目名称:spork-streaming,代码行数:19,代码来源:POMergeJoin.java

示例2: seekInRightStream

import org.apache.pig.LoadFunc; //导入方法依赖的package包/类
private void seekInRightStream(Object firstLeftKey) throws IOException{
    rightLoader = (LoadFunc)PigContext.instantiateFuncFromSpec(rightLoaderFuncSpec);

    // check if hadoop distributed cache is used
    if (indexFile != null && rightLoader instanceof DefaultIndexableLoader) {
        DefaultIndexableLoader loader = (DefaultIndexableLoader)rightLoader;
        loader.setIndexFile(indexFile);
    }
    
    // Pass signature of the loader to rightLoader
    // make a copy of the conf to use in calls to rightLoader.
    rightLoader.setUDFContextSignature(signature);
    Job job = new Job(new Configuration(PigMapReduce.sJobConfInternal.get()));
    rightLoader.setLocation(rightInputFileName, job);
    ((IndexableLoadFunc)rightLoader).initialize(job.getConfiguration());
    ((IndexableLoadFunc)rightLoader).seekNear(
            firstLeftKey instanceof Tuple ? (Tuple)firstLeftKey : mTupleFactory.newTuple(firstLeftKey));
}
 
开发者ID:sigmoidanalytics,项目名称:spork,代码行数:19,代码来源:POMergeJoin.java

示例3: createPOLoadWithSize

import org.apache.pig.LoadFunc; //导入方法依赖的package包/类
public static POLoad createPOLoadWithSize(long size, LoadFunc loadFunc) throws Exception {
    File file = File.createTempFile("tempFile", ".tmp");
    file.deleteOnExit();
    RandomAccessFile f = new RandomAccessFile(file, "rw");
    f.setLength(size);
    f.close();

    loadFunc.setLocation(file.getAbsolutePath(), new org.apache.hadoop.mapreduce.Job(CONF));
    FuncSpec funcSpec = new FuncSpec(loadFunc.getClass().getCanonicalName());
    POLoad poLoad = new POLoad(new OperatorKey(), loadFunc);
    poLoad.setLFile(new FileSpec(file.getAbsolutePath(), funcSpec));
    poLoad.setPc(new PigContext());
    poLoad.setUp();

    return poLoad;
}
 
开发者ID:sigmoidanalytics,项目名称:spork,代码行数:17,代码来源:TestJobControlCompiler.java

示例4: configureLoader

import org.apache.pig.LoadFunc; //导入方法依赖的package包/类
/**
 * stolen from JobControlCompiler
 * TODO: refactor it to share this
 * @param physicalPlan
 * @param poLoad
 * @param configuration
 * @return
 * @throws java.io.IOException
 */
private static Configuration configureLoader(PhysicalPlan physicalPlan,
		POLoad poLoad, Configuration configuration, PigContext pigContext) throws IOException {

	Job job = new Job(configuration);
	LoadFunc loadFunc = poLoad.getLoadFunc();

	loadFunc.setLocation(poLoad.getLFile().getFileName(), job);

	// stolen from JobControlCompiler
	ArrayList<FileSpec> pigInputs = new ArrayList<FileSpec>();
	//Store the inp filespecs
	pigInputs.add(poLoad.getLFile());

	ArrayList<List<OperatorKey>> inpTargets = Lists.newArrayList();
	ArrayList<String> inpSignatures = Lists.newArrayList();
	ArrayList<Long> inpLimits = Lists.newArrayList();
	//Store the target operators for tuples read
	//from this input
	List<PhysicalOperator> loadSuccessors = physicalPlan.getSuccessors(poLoad);
	List<OperatorKey> loadSuccessorsKeys = Lists.newArrayList();
	if(loadSuccessors!=null){
		for (PhysicalOperator loadSuccessor : loadSuccessors) {
			loadSuccessorsKeys.add(loadSuccessor.getOperatorKey());
		}
	}
	inpTargets.add(loadSuccessorsKeys);
	inpSignatures.add(poLoad.getSignature());
	inpLimits.add(poLoad.getLimit());

	configuration.set("pig.inputs", ObjectSerializer.serialize(pigInputs));
	configuration.set("pig.inpTargets", ObjectSerializer.serialize(inpTargets));
	configuration.set("pig.inpSignatures", ObjectSerializer.serialize(inpSignatures));
	configuration.set("pig.inpLimits", ObjectSerializer.serialize(inpLimits));
	configuration.set("pig.pigContext", ObjectSerializer.serialize(pigContext));
	configuration.set("udf.import.list", ObjectSerializer.serialize(PigContext.getPackageImportList()));
	return configuration;
}
 
开发者ID:sigmoidanalytics,项目名称:spork-streaming,代码行数:47,代码来源:LoadConverter.java

示例5: mergeSplitSpecificConf

import org.apache.pig.LoadFunc; //导入方法依赖的package包/类
/**
 * get the corresponding configuration for the input on which the split
 * is based and merge it with the Conf supplied
 * 
 * package level access so that this is not publicly used elsewhere
 * @throws IOException 
 */
static void mergeSplitSpecificConf(LoadFunc loadFunc, PigSplit pigSplit, Configuration originalConf) 
throws IOException {
    // set up conf with entries from input specific conf
    Job job = new Job(originalConf);
    loadFunc.setLocation(getLoadLocation(pigSplit.getInputIndex(), 
            originalConf), job);
    // The above setLocation call could write to the conf within
    // the job - merge that updated conf with original conf
    ConfigurationUtil.mergeConf(originalConf, job.getConfiguration());
    
}
 
开发者ID:sigmoidanalytics,项目名称:spork-streaming,代码行数:19,代码来源:PigInputFormat.java

示例6: createPOLoadWithSize

import org.apache.pig.LoadFunc; //导入方法依赖的package包/类
public static POLoad createPOLoadWithSize(long size, LoadFunc loadFunc) throws Exception {
    File file = File.createTempFile("tempFile", ".tmp");
    file.deleteOnExit();
    RandomAccessFile f = new RandomAccessFile(file, "rw");
    f.setLength(size);

    loadFunc.setLocation(file.getAbsolutePath(), new org.apache.hadoop.mapreduce.Job(CONF));
    FuncSpec funcSpec = new FuncSpec(loadFunc.getClass().getCanonicalName());
    POLoad poLoad = new POLoad(new OperatorKey(), loadFunc);
    poLoad.setLFile(new FileSpec(file.getAbsolutePath(), funcSpec));
    poLoad.setPc(new PigContext());
    poLoad.setUp();

    return poLoad;
}
 
开发者ID:sigmoidanalytics,项目名称:spork-streaming,代码行数:16,代码来源:TestJobControlCompiler.java

示例7: initialize

import org.apache.pig.LoadFunc; //导入方法依赖的package包/类
@SuppressWarnings("unchecked")
@Override
public void initialize(InputSplit inputSplit,
        TaskAttemptContext taskAttemptContext) throws IOException,
        InterruptedException {

    FileSplit fileSplit = (FileSplit) inputSplit;

    path = fileSplit.getPath();
    String fileName = path.toUri().toString();

    // select the correct load function and initialise
    loadFuncHelper = new LoadFuncHelper(
            taskAttemptContext.getConfiguration());

    FuncSpec funcSpec = loadFuncHelper.determineFunction(fileName);

    if (funcSpec == null) {
        throw new IOException("Cannot determine LoadFunc for "
                + fileName);
    }

    selectedLoadFunc = (LoadFunc) PigContext
            .instantiateFuncFromSpec(funcSpec);

    selectedLoadFunc.setUDFContextSignature(udfSignature);
    selectedLoadFunc.setLocation(fileName,
            new Job(taskAttemptContext.getConfiguration(),
                    taskAttemptContext.getJobName()));

    selectedReader = selectedLoadFunc.getInputFormat()
            .createRecordReader(fileSplit, taskAttemptContext);

    selectedReader.initialize(fileSplit, taskAttemptContext);

    LOG.info("Using LoadFunc " + selectedLoadFunc.getClass().getName()
            + " on " + fileName);

}
 
开发者ID:sigmoidanalytics,项目名称:spork-streaming,代码行数:40,代码来源:AllLoader.java

示例8: configureLoader

import org.apache.pig.LoadFunc; //导入方法依赖的package包/类
/**
 * stolen from JobControlCompiler TODO: refactor it to share this
 *
 * @param physicalPlan
 * @param poLoad
 * @param jobConf
 * @return
 * @throws java.io.IOException
 */
private static JobConf configureLoader(PhysicalPlan physicalPlan,
        POLoad poLoad, JobConf jobConf) throws IOException {

    Job job = new Job(jobConf);
    LoadFunc loadFunc = poLoad.getLoadFunc();

    loadFunc.setLocation(poLoad.getLFile().getFileName(), job);

    // stolen from JobControlCompiler
    ArrayList<FileSpec> pigInputs = new ArrayList<FileSpec>();
    // Store the inp filespecs
    pigInputs.add(poLoad.getLFile());

    ArrayList<List<OperatorKey>> inpTargets = Lists.newArrayList();
    ArrayList<String> inpSignatures = Lists.newArrayList();
    ArrayList<Long> inpLimits = Lists.newArrayList();
    // Store the target operators for tuples read
    // from this input
    List<PhysicalOperator> loadSuccessors = physicalPlan
            .getSuccessors(poLoad);
    List<OperatorKey> loadSuccessorsKeys = Lists.newArrayList();
    if (loadSuccessors != null) {
        for (PhysicalOperator loadSuccessor : loadSuccessors) {
            loadSuccessorsKeys.add(loadSuccessor.getOperatorKey());
        }
    }
    inpTargets.add(loadSuccessorsKeys);
    inpSignatures.add(poLoad.getSignature());
    inpLimits.add(poLoad.getLimit());

    jobConf.set("pig.inputs", ObjectSerializer.serialize(pigInputs));
    jobConf.set("pig.inpTargets", ObjectSerializer.serialize(inpTargets));
    jobConf.set("pig.inpSignatures",
            ObjectSerializer.serialize(inpSignatures));
    jobConf.set("pig.inpLimits", ObjectSerializer.serialize(inpLimits));

    return jobConf;
}
 
开发者ID:sigmoidanalytics,项目名称:spork,代码行数:48,代码来源:LoadConverter.java

示例9: mergeSplitSpecificConf

import org.apache.pig.LoadFunc; //导入方法依赖的package包/类
/**
 * get the corresponding configuration for the input on which the split
 * is based and merge it with the Conf supplied
 *
 * package level access so that this is not publicly used elsewhere
 * @throws IOException
 */
static void mergeSplitSpecificConf(LoadFunc loadFunc, PigSplit pigSplit, Configuration originalConf)
        throws IOException {
    // set up conf with entries from input specific conf
    Job job = new Job(originalConf);
    loadFunc.setLocation(getLoadLocation(pigSplit.getInputIndex(),
            originalConf), job);
    // The above setLocation call could write to the conf within
    // the job - merge that updated conf with original conf
    ConfigurationUtil.mergeConf(originalConf, job.getConfiguration());

}
 
开发者ID:sigmoidanalytics,项目名称:spork,代码行数:19,代码来源:PigInputFormat.java


注:本文中的org.apache.pig.LoadFunc.setLocation方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。