当前位置: 首页>>代码示例>>Java>>正文


Java PigException.REMOTE_ENVIRONMENT属性代码示例

本文整理汇总了Java中org.apache.pig.PigException.REMOTE_ENVIRONMENT属性的典型用法代码示例。如果您正苦于以下问题:Java PigException.REMOTE_ENVIRONMENT属性的具体用法?Java PigException.REMOTE_ENVIRONMENT怎么用?Java PigException.REMOTE_ENVIRONMENT使用的例子?那么, 这里精选的属性代码示例或许可以为您提供帮助。您也可以进一步了解该属性所在org.apache.pig.PigException的用法示例。


在下文中一共展示了PigException.REMOTE_ENVIRONMENT属性的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: asCollection

public HPath[] asCollection(String pattern) throws DataStorageException {
    try {
        FileStatus[] paths = this.fs.globStatus(new Path(pattern));

        if (paths == null)
            return new HPath[0];

        List<HPath> hpaths = new ArrayList<HPath>();
        
        for (int i = 0; i < paths.length; ++i) {
            HPath hpath = (HPath)this.asElement(paths[i].getPath().toString());
            if (!hpath.systemElement()) {
                hpaths.add(hpath);
            }
        }

        return hpaths.toArray(new HPath[hpaths.size()]);
    } catch (IOException e) {
        int errCode = 6008;
        String msg = "Failed to obtain glob for " + pattern;
        throw new DataStorageException(msg, errCode, PigException.REMOTE_ENVIRONMENT, e);
    }
}
 
开发者ID:sigmoidanalytics,项目名称:spork-streaming,代码行数:23,代码来源:HDataStorage.java

示例2: getNext

@Override
public Tuple getNext() throws IOException {
    try {
        boolean notDone = in.nextKeyValue();
        if (!notDone) {
            return null;
        }
        Object value = in.getCurrentValue();

        Tuple t = (Tuple)OrcUtils.convertOrcToPig(value, oi, mRequiredColumns);
        return t;
    } catch (InterruptedException e) {
        int errCode = 6018;
        String errMsg = "Error while reading input";
        throw new ExecException(errMsg, errCode,
                PigException.REMOTE_ENVIRONMENT, e);
    }
}
 
开发者ID:sigmoidanalytics,项目名称:spork,代码行数:18,代码来源:OrcStorage.java

示例3: checkStopOnFailure

/**
 * If stop_on_failure is enabled and any job has failed, an ExecException is thrown.
 * @param stop_on_failure whether it's enabled.
 * @throws ExecException If stop_on_failure is enabled and any job is failed
 */
private void checkStopOnFailure(boolean stop_on_failure) throws ExecException{
    if (jc.getFailedJobs().isEmpty())
        return;

    if (stop_on_failure){
        int errCode = 6017;
        StringBuilder msg = new StringBuilder();

        for (int i=0; i<jc.getFailedJobs().size(); i++) {
            Job j = jc.getFailedJobs().get(i);
            msg.append("JobID: " + j.getAssignedJobID() + " Reason: " + j.getMessage());
            if (i!=jc.getFailedJobs().size()-1) {
                msg.append("\n");
            }
        }

        throw new ExecException(msg.toString(), errCode,
                PigException.REMOTE_ENVIRONMENT);
    }
}
 
开发者ID:sigmoidanalytics,项目名称:spork,代码行数:25,代码来源:MapReduceLauncher.java

示例4: getNext

@Override
public Tuple getNext() throws IOException {
	try {
		boolean notDone = in.nextKeyValue();
		if (!notDone) {
			return null;
		}
		Text value = (Text) in.getCurrentValue();
		// Use the Parser to parse the input line into a Triple
		String[] triple = rdfParser.parseTriple(value.toString());
		// If the parser returns null this line was not a valid RDF triple
		// We then continue with the next line
		if (triple == null) {
			mLog.warn("This is not an RDF triple -> ignored: " + value.toString());
			return getNext();
		}
		// Tuples are always triples of Subject, Predicate, Object
		Tuple t = mTupleFactory.newTuple(3);
		// set the fields of the Tuple as Subject, Predicate, Object
		t.set(0, new DataByteArray(triple[0])); // Subject
		t.set(1, new DataByteArray(triple[1])); // Predicate
		t.set(2, new DataByteArray(triple[2])); // Object
		return t;
	} catch (InterruptedException e) {
		int errCode = 6018;
		String errMsg = "Error while reading input";
		throw new ExecException(errMsg, errCode,
				PigException.REMOTE_ENVIRONMENT, e);
	}
}
 
开发者ID:aschaetzle,项目名称:PigSPARQL,代码行数:30,代码来源:NTriplesStorage.java

示例5: rename

public void rename(String oldName, String newName) throws IOException {
    if (oldName.equals(newName)) {
        return;
    }

    System.out.println("Renaming " + oldName + " to " + newName);

    ElementDescriptor dst = null;
    ElementDescriptor src = null;

    try {
        dst = dfs.asElement(newName);
        src = dfs.asElement(oldName);
    }
    catch (DataStorageException e) {
        byte errSrc = getErrorSource();
        int errCode = 0;
        switch(errSrc) {
        case PigException.REMOTE_ENVIRONMENT:
            errCode = 6005;
            break;
        case PigException.USER_ENVIRONMENT:
            errCode = 4005;
            break;
        default:
            errCode = 2038;
                break;
        }
        String msg = "Unable to rename " + oldName + " to " + newName;
        throw new ExecException(msg, errCode, errSrc, e);
    }

    if (dst.exists()) {
        dst.delete();
    }

    src.rename(dst);

}
 
开发者ID:sigmoidanalytics,项目名称:spork-streaming,代码行数:39,代码来源:PigContext.java

示例6: copy

public void copy(String src, String dst, boolean localDst) throws IOException {
    DataStorage dstStorage = dfs;

    if (localDst) {
        dstStorage = lfs;
    }

    ElementDescriptor srcElement = null;
    ElementDescriptor dstElement = null;

    try {
        srcElement = dfs.asElement(src);
        dstElement = dstStorage.asElement(dst);
    }
    catch (DataStorageException e) {
        byte errSrc = getErrorSource();
        int errCode = 0;
        switch(errSrc) {
        case PigException.REMOTE_ENVIRONMENT:
            errCode = 6006;
            break;
        case PigException.USER_ENVIRONMENT:
            errCode = 4006;
            break;
        default:
            errCode = 2039;
                break;
        }
        String msg = "Unable to copy " + src + " to " + dst;
        throw new ExecException(msg, errCode, errSrc, e);
    }

    srcElement.copy(dstElement, this.properties, false);
}
 
开发者ID:sigmoidanalytics,项目名称:spork-streaming,代码行数:34,代码来源:PigContext.java

示例7: getErrorSource

/**
 * Check the execution mode and return the appropriate error source
 *
 * @return error source
 */
public byte getErrorSource() {
    if(execType == ExecType.LOCAL || execType == ExecType.MAPREDUCE) {
        return PigException.REMOTE_ENVIRONMENT;
    } else {
        return PigException.BUG;
    }
}
 
开发者ID:sigmoidanalytics,项目名称:spork-streaming,代码行数:12,代码来源:PigContext.java

示例8: getNext

@Override
public Tuple getNext() throws IOException {
	if (mProtoTuple == null )
		mProtoTuple = new ArrayList<Object>();
	
    try {
        boolean notDone = in.nextKeyValue();
        if (!notDone)
            return null;
        Text value = (Text) in.getCurrentValue();
        String buf = new String(value.getBytes(), "UTF-8");
        int len = buf.length();
        int start = 0;
        // Perform matching
        Matcher matcher = this.fieldDel.matcher(buf);
        for (int i = 0; i < len; i = start) {
            if (matcher.find(i)){
            	readField(buf, start, matcher.start());
            	start = matcher.end();
            } else {
            	break;
            }
        }
        // pick up the last field
        if (start <= len) {
            readField(buf, start, len);
        }
        Tuple t =  mTupleFactory.newTupleNoCopy(mProtoTuple);
        mProtoTuple = null;
        return t;
    } catch (InterruptedException e) {
        int errCode = 6018;
        String errMsg = "Error while reading input";
        throw new ExecException(errMsg, errCode,
        		PigException.REMOTE_ENVIRONMENT, e);
    }
}
 
开发者ID:caesar0301,项目名称:piggybox,代码行数:37,代码来源:STLRegex.java

示例9: configure

public void configure(POStream stream) 
throws IOException, ExecException {
    super.configure(stream);
    
    // Chmod +x the executable
    File executable = new File(command.getExecutable());
    if (executable.isAbsolute()) {
        // we don't own it. Hope it is executable ...
    } else {
        try {
            FileUtil.chmod(executable.toString(), "a+x");
        } catch (InterruptedException ie) {
            int errCode = 6013;
            String msg = "Unable to chmod " + executable + " . Thread interrupted.";
            throw new ExecException(msg, errCode, PigException.REMOTE_ENVIRONMENT, ie);
        }
    }
    
    // Save a copy of the JobConf
    job = PigMapReduce.sJobConfInternal.get();
    
    // Save the output directory for the Pig Script
    scriptOutputDir = job.get("pig.streaming.task.output.dir");
    scriptLogDir = job.get("pig.streaming.log.dir", "_logs");
    
    // Save the taskid
    taskId = job.get("mapred.task.id");
}
 
开发者ID:sigmoidanalytics,项目名称:spork-streaming,代码行数:28,代码来源:HadoopExecutableManager.java

示例10: close

public void close() throws IOException {
    try {
        super.close();

        // Copy the secondary outputs of the task to HDFS
        Path scriptOutputDir = new Path(this.scriptOutputDir);
        FileSystem fs = scriptOutputDir.getFileSystem(job);
        List<HandleSpec> outputSpecs = command.getHandleSpecs(Handle.OUTPUT);
        if (outputSpecs != null) {
            for (int i=1; i < outputSpecs.size(); ++i) {
                String fileName = outputSpecs.get(i).getName();
                try {
                    int partition = job.getInt("mapred.task.partition", -1);
                    fs.copyFromLocalFile(false, true, new Path(fileName), 
                                         new Path(
                                                 new Path(scriptOutputDir, 
                                                          fileName), 
                                                 getOutputName(partition))
                                        );
                } catch (IOException ioe) {
                    int errCode = 6014; 
                    String msg = "Failed to save secondary output '" + 
                    fileName + "' of task: " + taskId;
                    throw new ExecException(msg, errCode, PigException.REMOTE_ENVIRONMENT, ioe);
                }
            }
    }
    } finally {
        // Footer for stderr file of the task
        writeDebugFooter();
        
        // Close the stderr file on HDFS
        if (errorStream != null) {
            errorStream.close();
        }
    }
}
 
开发者ID:sigmoidanalytics,项目名称:spork-streaming,代码行数:37,代码来源:HadoopExecutableManager.java

示例11: configure

public void configure(POStream stream) 
throws IOException, ExecException {
    super.configure(stream);
    
    // Chmod +x the executable
    File executable = new File(command.getExecutable());
    if (executable.isAbsolute()) {
        // we don't own it. Hope it is executable ...
    } else {
        try {
            FileUtil.chmod(executable.toString(), "a+x");
        } catch (InterruptedException ie) {
            int errCode = 6013;
            String msg = "Unable to chmod " + executable + " . Thread interrupted.";
            throw new ExecException(msg, errCode, PigException.REMOTE_ENVIRONMENT, ie);
        }
    }
    
    // Save a copy of the JobConf
    job = PigMapReduce.sJobConfInternal.get();
    
    // Save the output directory for the Pig Script
    scriptOutputDir = job.get("pig.streaming.task.output.dir");
    scriptLogDir = job.get("pig.streaming.log.dir", "_logs");
    
    // Save the taskid
    // TODO Get an equivalent property in Tez mode (currently this returns null)
    taskId = job.get(MRConfiguration.TASK_ID);
}
 
开发者ID:sigmoidanalytics,项目名称:spork,代码行数:29,代码来源:HadoopExecutableManager.java

示例12: getNext

@Override
public Tuple getNext() throws IOException {
    mProtoTuple = new ArrayList<Object>();
    if (!mRequiredColumnsInitialized) {
        if (signature!=null) {
            Properties p = UDFContext.getUDFContext().getUDFProperties(this.getClass());
            mRequiredColumns = (boolean[])ObjectSerializer.deserialize(p.getProperty(signature));
        }
        mRequiredColumnsInitialized = true;
    }
    //Prepend input source path if source tagging is enabled
    if(tagFile) {
        mProtoTuple.add(new DataByteArray(sourcePath.getName()));
    } else if (tagPath) {
        mProtoTuple.add(new DataByteArray(sourcePath.toString()));
    }

    try {
        boolean notDone = in.nextKeyValue();
        if (!notDone) {
            return null;
        }
        Text value = (Text) in.getCurrentValue();
        byte[] buf = value.getBytes();
        int len = value.getLength();
        int start = 0;
        int fieldID = 0;
        for (int i = 0; i < len; i++) {
            if (buf[i] == fieldDel) {
                if (mRequiredColumns==null || (mRequiredColumns.length>fieldID && mRequiredColumns[fieldID]))
                    addTupleValue(mProtoTuple, buf, start, i);
                start = i + 1;
                fieldID++;
            }
        }
        // pick up the last field
        if (start <= len && (mRequiredColumns==null || (mRequiredColumns.length>fieldID && mRequiredColumns[fieldID]))) {
            addTupleValue(mProtoTuple, buf, start, len);
        }
        Tuple t =  mTupleFactory.newTupleNoCopy(mProtoTuple);

        return dontLoadSchema ? t : applySchema(t);
    } catch (InterruptedException e) {
        int errCode = 6018;
        String errMsg = "Error while reading input";
        throw new ExecException(errMsg, errCode,
                PigException.REMOTE_ENVIRONMENT, e);
    }
}
 
开发者ID:sigmoidanalytics,项目名称:spork-streaming,代码行数:49,代码来源:PigStorage.java

示例13: setupDistributedCache

private static void setupDistributedCache(PigContext pigContext,
        Configuration conf, String[] paths, boolean shipToCluster) throws IOException {
    // Turn on the symlink feature
    DistributedCache.createSymlink(conf);

    for (String path : paths) {
        path = path.trim();
        if (path.length() != 0) {
            Path src = new Path(path);

            // Ensure that 'src' is a valid URI
            URI srcURI = toURI(src);

            // Ship it to the cluster if necessary and add to the
            // DistributedCache
            if (shipToCluster) {
                Path dst =
                    new Path(FileLocalizer.getTemporaryPath(pigContext).toString());
                FileSystem fs = dst.getFileSystem(conf);
                fs.copyFromLocalFile(src, dst);

                // Construct the dst#srcName uri for DistributedCache
                URI dstURI = null;
                try {
                    dstURI = new URI(dst.toString() + "#" + src.getName());
                } catch (URISyntaxException ue) {
                    byte errSrc = pigContext.getErrorSource();
                    int errCode = 0;
                    switch(errSrc) {
                    case PigException.REMOTE_ENVIRONMENT:
                        errCode = 6004;
                        break;
                    case PigException.USER_ENVIRONMENT:
                        errCode = 4004;
                        break;
                    default:
                        errCode = 2037;
                        break;
                    }
                    String msg = "Invalid ship specification. " +
                    "File doesn't exist: " + dst;
                    throw new ExecException(msg, errCode, errSrc);
                }
                DistributedCache.addCacheFile(dstURI, conf);
            } else {
                DistributedCache.addCacheFile(srcURI, conf);
            }
        }
    }
}
 
开发者ID:sigmoidanalytics,项目名称:spork-streaming,代码行数:50,代码来源:JobControlCompiler.java

示例14: getNext

@Override
public Tuple getNext() throws IOException {
    mProtoTuple = new ArrayList<Object>();

    boolean inField = false;
    boolean inQuotedField = false;
    boolean evenQuotesSeen = true;
    
    if (!mRequiredColumnsInitialized) {
        if (signature != null) {
            Properties p = UDFContext.getUDFContext().getUDFProperties(this.getClass());
            mRequiredColumns = (boolean[])ObjectSerializer.deserialize(p.getProperty(signature));
        }
        mRequiredColumnsInitialized = true;
    }
    try {
        if (!in.nextKeyValue()) {
            return null;
        }                                                                                           
        Text value = (Text) in.getCurrentValue();
        byte[] buf = value.getBytes();
        int len = value.getLength();
        int fieldID = 0;

        ByteBuffer fieldBuffer = ByteBuffer.allocate(len);

        for (int i = 0; i < len; i++) {
            byte b = buf[i];
            inField = true;
            if (inQuotedField) {
                if (b == DOUBLE_QUOTE) {
                    evenQuotesSeen = !evenQuotesSeen;
                    if (evenQuotesSeen) {
                        fieldBuffer.put(DOUBLE_QUOTE);
                    }
                } else
                    if (!evenQuotesSeen &&
                            (b == FIELD_DEL || b == RECORD_DEL)) {
                        inQuotedField = false;
                        inField = false;
                        readField(fieldBuffer, fieldID++);
                    } else {
                        fieldBuffer.put(b);
                    }
            } else if (b == DOUBLE_QUOTE) {
                inQuotedField = true;
                evenQuotesSeen = true;
            } else if (b == FIELD_DEL) {
                inField = false;
                readField(fieldBuffer, fieldID++); // end of the field
            } else {
                evenQuotesSeen = true;
                fieldBuffer.put(b);
            }
        }
        if (inField) readField(fieldBuffer, fieldID++);
    } catch (InterruptedException e) {
        int errCode = 6018;
        String errMsg = "Error while reading input";
        throw new ExecException(errMsg, errCode, 
                PigException.REMOTE_ENVIRONMENT, e);
    }

    Tuple t =  mTupleFactory.newTupleNoCopy(mProtoTuple);
    return t;
}
 
开发者ID:sigmoidanalytics,项目名称:spork-streaming,代码行数:66,代码来源:CSVLoader.java

示例15: setupDistributedCache

private static void setupDistributedCache(PigContext pigContext,
        Configuration conf, String[] paths, boolean shipToCluster) throws IOException {
    // Turn on the symlink feature
    DistributedCache.createSymlink(conf);

    for (String path : paths) {
        path = path.trim();
        if (path.length() != 0) {
            Path src = new Path(path);

            // Ensure that 'src' is a valid URI
            URI srcURI = toURI(src);

            // Ship it to the cluster if necessary and add to the
            // DistributedCache
            if (shipToCluster) {
                Path dst =
                        new Path(FileLocalizer.getTemporaryPath(pigContext).toString());
                FileSystem fs = dst.getFileSystem(conf);
                fs.copyFromLocalFile(src, dst);
                fs.setReplication(dst, (short)conf.getInt(MRConfiguration.SUMIT_REPLICATION, 3));

                // Construct the dst#srcName uri for DistributedCache
                URI dstURI = null;
                try {
                    dstURI = new URI(dst.toString() + "#" + src.getName());
                } catch (URISyntaxException ue) {
                    byte errSrc = pigContext.getErrorSource();
                    int errCode = 0;
                    switch(errSrc) {
                    case PigException.REMOTE_ENVIRONMENT:
                        errCode = 6004;
                        break;
                    case PigException.USER_ENVIRONMENT:
                        errCode = 4004;
                        break;
                    default:
                        errCode = 2037;
                        break;
                    }
                    String msg = "Invalid ship specification. " +
                            "File doesn't exist: " + dst;
                    throw new ExecException(msg, errCode, errSrc);
                }
                addToDistributedCache(dstURI, conf);
            } else {
                addToDistributedCache(srcURI, conf);
            }
        }
    }
}
 
开发者ID:sigmoidanalytics,项目名称:spork,代码行数:51,代码来源:JobControlCompiler.java


注:本文中的org.apache.pig.PigException.REMOTE_ENVIRONMENT属性示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。