本文整理汇总了Java中org.apache.pig.tools.pigstats.PigStats类的典型用法代码示例。如果您正苦于以下问题:Java PigStats类的具体用法?Java PigStats怎么用?Java PigStats使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
PigStats类属于org.apache.pig.tools.pigstats包,在下文中一共展示了PigStats类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: runSingle
import org.apache.pig.tools.pigstats.PigStats; //导入依赖的package包/类
/**
* Run a pipeline on Hadoop.
* If there are no stores in this pipeline then nothing will be run.
* @param prop Map of properties that Pig should set when running the script.
* This is intended for use with scripting languages that do not support
* the Properties object.
* @return {@link PigStats}, null if there is no bound query to run.
* @throws IOException
*/
public PigStats runSingle(Properties prop) throws IOException {
if (queries.size() > 1) {
throw new IOException(
"This pipeline contains multiple queries. Use run() method instead");
}
if (queries.isEmpty()) {
LOG.info("No bound query to run");
return null;
}
if (prop != null) {
scriptContext.getPigContext().getProperties().putAll(prop);
}
PigStats ret = exec(queries.get(0));
setPigStats(ret);
return ret;
}
示例2: testBytesWritten_JIRA_1027
import org.apache.pig.tools.pigstats.PigStats; //导入依赖的package包/类
@Test
public void testBytesWritten_JIRA_1027() {
File outputFile = null;
try {
String fileName = this.getClass().getName() + "_" + "testBytesWritten_JIRA_1027";
outputFile = File.createTempFile(fileName, ".out");
String filePath = outputFile.getAbsolutePath();
outputFile.delete();
PigServer pig = new PigServer(ExecType.LOCAL);
pig.registerQuery("A = load 'test/org/apache/pig/test/data/passwd';");
ExecJob job = pig.store("A", filePath);
PigStats stats = job.getStatistics();
File dataFile = new File( outputFile.getAbsoluteFile() + File.separator + "part-00000" );
assertEquals(dataFile.length(), stats.getBytesWritten());
} catch (IOException e) {
LOG.error("Error while generating file", e);
fail("Encountered IOException");
} finally {
if (outputFile != null) {
// Hadoop Local mode creates a directory
// Hence we need to delete a directory recursively
deleteDirectory(outputFile);
}
}
}
示例3: testGroupAllWithParallel
import org.apache.pig.tools.pigstats.PigStats; //导入依赖的package包/类
/**
* Test parallelism for group all
* @throws Exception
*/
@Test
public void testGroupAllWithParallel() throws Exception {
PigServer pigServer = new PigServer(cluster.getExecType(), cluster
.getProperties());
pigServer.registerQuery("A = LOAD '" + INPUT_FILE + "' as (x:chararray);");
pigServer.registerQuery("B = group A all parallel 5;");
{
Iterator<Tuple> iter = pigServer.openIterator("B");
List<Tuple> expectedRes =
Util.getTuplesFromConstantTupleStrings(
new String[] {
"('all',{('one'),('two'),('two')})"
});
Util.checkQueryOutputsAfterSort(iter, expectedRes);
JobGraph jGraph = PigStats.get().getJobGraph();
checkGroupAllWithParallelGraphResult(jGraph);
}
}
示例4: runEmbeddedScript
import org.apache.pig.tools.pigstats.PigStats; //导入依赖的package包/类
private static int runEmbeddedScript(PigContext pigContext, String file, String engine)
throws IOException {
log.info("Run embedded script: " + engine);
pigContext.connect();
ScriptEngine scriptEngine = ScriptEngine.getInstance(engine);
Map<String, List<PigStats>> statsMap = scriptEngine.run(pigContext, file);
PigStatsUtil.setStatsMap(statsMap);
int failCount = 0;
int totalCount = 0;
for (List<PigStats> lst : statsMap.values()) {
if (lst != null && !lst.isEmpty()) {
for (PigStats stats : lst) {
if (!stats.isSuccessful()) failCount++;
totalCount++;
}
}
}
return (totalCount > 0 && failCount == totalCount) ? ReturnCode.FAILURE
: (failCount > 0) ? ReturnCode.PARTIAL_FAILURE
: ReturnCode.SUCCESS;
}
示例5: getJobs
import org.apache.pig.tools.pigstats.PigStats; //导入依赖的package包/类
/**
* Retrieves a list of Job objects from the PigStats object
* @param stats
* @return A list of ExecJob objects
*/
protected List<ExecJob> getJobs(PigStats stats) {
LinkedList<ExecJob> jobs = new LinkedList<ExecJob>();
JobGraph jGraph = stats.getJobGraph();
Iterator<JobStats> iter = jGraph.iterator();
while (iter.hasNext()) {
JobStats js = iter.next();
for (OutputStats output : js.getOutputs()) {
if (js.isSuccessful()) {
jobs.add(new HJob(HJob.JOB_STATUS.COMPLETED, pigContext, output
.getPOStore(), output.getAlias(), stats));
} else {
HJob hjob = new HJob(HJob.JOB_STATUS.FAILED, pigContext, output
.getPOStore(), output.getAlias(), stats);
hjob.setException(js.getException());
jobs.add(hjob);
}
}
}
return jobs;
}
示例6: storeEx
import org.apache.pig.tools.pigstats.PigStats; //导入依赖的package包/类
private PigStats storeEx(String alias, String filename, String func)
throws IOException {
if ("@".equals(alias)) {
alias = getLastRel();
}
currDAG.parseQuery();
currDAG.buildPlan( alias );
try {
QueryParserUtils.attachStorePlan(scope, currDAG.lp, filename, func, currDAG.getOperator( alias ), alias, pigContext);
currDAG.compile();
return executeCompiledLogicalPlan();
} catch (PigException e) {
int errCode = 1002;
String msg = "Unable to store alias " + alias;
throw new PigException(msg, errCode, PigException.INPUT, e);
}
}
示例7: runJob
import org.apache.pig.tools.pigstats.PigStats; //导入依赖的package包/类
public void runJob() throws JobCreationException {
RunJarSecurityManager secMan = new RunJarSecurityManager();
try {
RunJar.main(getNativeMRParams());
MRPigStatsUtil.addNativeJobStats(PigStats.get(), this, true);
} catch (SecurityException se) {
if(secMan.getExitInvoked()) {
if(secMan.getExitCode() != 0) {
throw new JobCreationException("Native job returned with non-zero return code");
}
else {
MRPigStatsUtil.addNativeJobStats(PigStats.get(), this, true);
}
}
} catch (Throwable t) {
JobCreationException e = new JobCreationException(
"Cannot run native mapreduce job "+ t.getMessage(), t);
MRPigStatsUtil.addNativeJobStats(PigStats.get(), this, false, e);
throw e;
} finally {
secMan.retire();
}
}
示例8: testEmptyFile
import org.apache.pig.tools.pigstats.PigStats; //导入依赖的package包/类
@Test // PIG-2006
public void testEmptyFile() throws IOException {
File f1 = new File(PIG_FILE);
FileWriter fw1 = new FileWriter(f1);
fw1.close();
try {
String[] args = { "-x", "local", "-c", PIG_FILE };
PigStats stats = PigRunner.run(args, null);
assertTrue(stats.isSuccessful());
assertEquals( 0, stats.getReturnCode() );
} finally {
new File(PIG_FILE).delete();
Util.deleteFile(cluster, OUTPUT_FILE);
}
}
示例9: returnCodeTest2
import org.apache.pig.tools.pigstats.PigStats; //导入依赖的package包/类
@Test
public void returnCodeTest2() throws Exception {
PrintWriter w = new PrintWriter(new FileWriter(PIG_FILE));
w.println("A = load 'non-existine.file' as (a0, a1);");
w.println("B = load 'data' as (b0, b1);");
w.println("C = join B by b0, A by a0 using 'repl';");
w.println("store C into '" + OUTPUT_FILE + "';");
w.close();
try {
String[] args = { "-x", execType, PIG_FILE };
PigStats stats = PigRunner.run(args, null);
assertTrue(!stats.isSuccessful());
assertTrue(stats.getReturnCode() != 0);
assertTrue(stats.getOutputStats().size() == 0);
} finally {
new File(PIG_FILE).delete();
Util.deleteFile(cluster, OUTPUT_FILE);
}
}
示例10: testNagative7
import org.apache.pig.tools.pigstats.PigStats; //导入依赖的package包/类
@Test // test error message with file name
public void testNagative7() throws IOException {
File f1 = new File("myscript.pig");
f1.deleteOnExit();
FileWriter fw1 = new FileWriter(f1);
fw1.append("A = loadd '1.txt';");
fw1.close();
String[] args = { "-x", "local", "-c", "myscript.pig" };
PigStats stats = PigRunner.run(args, null);
Assert.assertFalse(stats.isSuccessful());
String expected = "<file myscript.pig, line 1, column 0>";
String msg = stats.getErrorMessage();
Assert.assertFalse(msg == null);
Assert.assertTrue(msg.startsWith(expected));
}
示例11: launchPig
import org.apache.pig.tools.pigstats.PigStats; //导入依赖的package包/类
/**
* Runs the fetch task by executing chain of calls on the PhysicalPlan from the leaf
* up to the LoadFunc
*
* @param pp - Physical plan
* @return SimpleFetchPigStats instance representing the fetched result
* @throws IOException
*/
public PigStats launchPig(PhysicalPlan pp) throws IOException {
try {
POStore poStore = (POStore) pp.getLeaves().get(0);
init(pp, poStore);
// run fetch
runPipeline(poStore);
UDFFinishVisitor udfFinisher = new UDFFinishVisitor(pp,
new DependencyOrderWalker<PhysicalOperator, PhysicalPlan>(pp));
udfFinisher.visit();
return PigStats.start(new EmptyPigStats(pigContext, poStore));
}
finally {
UDFContext.getUDFContext().addJobConf(null);
pigContext.getProperties().remove(PigImplConstants.CONVERTED_TO_FETCH);
}
}
示例12: simpleNegativeTest
import org.apache.pig.tools.pigstats.PigStats; //导入依赖的package包/类
@Test
public void simpleNegativeTest() throws Exception {
PrintWriter w = new PrintWriter(new FileWriter(PIG_FILE));
w.println("A = load '" + INPUT_FILE + "' as (a0:int, a1:int, a2:int);");
w.println("B = group A by a;");
w.println("C = foreach B generate group, COUNT(A);");
w.println("store C into '" + OUTPUT_FILE + "';");
w.close();
String[] args = { "-c", PIG_FILE };
PigStats stats = PigRunner.run(args, null);
assertTrue(stats.getReturnCode() == ReturnCode.PIG_EXCEPTION);
// TODO: error message has changed. Need to catch the new message generated from the
// new parser.
// assertTrue(stats.getErrorCode() == 1000);
// assertEquals("Error during parsing. Invalid alias: a in {a0: int,a1: int,a2: int}",
// stats.getErrorMessage());
}
示例13: testRegisterExternalJar
import org.apache.pig.tools.pigstats.PigStats; //导入依赖的package包/类
@Test
public void testRegisterExternalJar() throws Exception {
String[] args = { "-Dpig.additional.jars=pig-withouthadoop.jar",
"-Dmapred.job.queue.name=default",
"-e", "A = load '" + INPUT_FILE + "';store A into '" + OUTPUT_FILE + "';\n" };
PigStats stats = PigRunner.run(args, new TestNotificationListener());
Util.deleteFile(cluster, OUTPUT_FILE);
java.lang.reflect.Method getPigContext = stats.getClass()
.getDeclaredMethod("getPigContext");
getPigContext.setAccessible(true);
PigContext ctx = (PigContext) getPigContext.invoke(stats);
Assert.assertNotNull(ctx);
assertTrue(ctx.extraJars.contains(ClassLoader.getSystemResource("pig-withouthadoop.jar")));
assertTrue("default", ctx.getProperties().getProperty("mapred.job.queue.name")!=null && ctx.getProperties().getProperty("mapred.job.queue.name").equals("default")||
ctx.getProperties().getProperty("mapreduce.job.queuename")!=null && ctx.getProperties().getProperty("mapreduce.job.queuename").equals("default"));
}
示例14: classLoaderTest
import org.apache.pig.tools.pigstats.PigStats; //导入依赖的package包/类
@Test
public void classLoaderTest() throws Exception {
// Skip in hadoop 23 test, see PIG-2449
if (Util.isHadoop23() || Util.isHadoop2_0())
return;
PrintWriter w = new PrintWriter(new FileWriter(PIG_FILE));
w.println("register test/org/apache/pig/test/data/pigtestloader.jar");
w.println("A = load '" + INPUT_FILE + "' using org.apache.pig.test.PigTestLoader();");
w.println("store A into '" + OUTPUT_FILE + "';");
w.close();
try {
String[] args = { PIG_FILE };
PigStats stats = PigRunner.run(args, new TestNotificationListener());
assertTrue(stats.isSuccessful());
} finally {
new File(PIG_FILE).delete();
Util.deleteFile(cluster, OUTPUT_FILE);
}
}
示例15: fsCommandTest
import org.apache.pig.tools.pigstats.PigStats; //导入依赖的package包/类
@Test
public void fsCommandTest() throws Exception {
PrintWriter w = new PrintWriter(new FileWriter(PIG_FILE));
w.println("fs -mv nonexist.file dummy.file");
w.close();
try {
String[] args = { PIG_FILE };
PigStats stats = PigRunner.run(args, new TestNotificationListener());
assertTrue(!stats.isSuccessful());
assertTrue(stats.getReturnCode() == PigRunner.ReturnCode.IO_EXCEPTION);
} finally {
new File(PIG_FILE).delete();
}
}