当前位置: 首页>>代码示例>>Java>>正文


Java ExecType.MAPREDUCE属性代码示例

本文整理汇总了Java中org.apache.pig.ExecType.MAPREDUCE属性的典型用法代码示例。如果您正苦于以下问题:Java ExecType.MAPREDUCE属性的具体用法?Java ExecType.MAPREDUCE怎么用?Java ExecType.MAPREDUCE使用的例子?那么, 这里精选的属性代码示例或许可以为您提供帮助。您也可以进一步了解该属性所在org.apache.pig.ExecType的用法示例。


在下文中一共展示了ExecType.MAPREDUCE属性的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: createPig

protected PigServer createPig() throws ExecException {
    HdpBootstrap.hackHadoopStagingOnWin();

    Properties properties = HdpBootstrap.asProperties(QueryTestParams.provisionQueries(HdpBootstrap.hadoopConfig()));
    String pigHost = properties.getProperty("pig");
    // remote Pig instance
    if (StringUtils.hasText(pigHost) && !"local".equals(pig)) {
        LogFactory.getLog(PigWrapper.class).info("Executing Pig in Map/Reduce mode");
        return new PigServer(ExecType.MAPREDUCE, properties);
    }

    // use local instance
    LogFactory.getLog(PigWrapper.class).info("Executing Pig in local mode");
    properties.put("mapred.job.tracker", "local");
    return new PigServer(ExecType.LOCAL, properties);
}
 
开发者ID:xushjie1987,项目名称:es-hadoop-v2.2.0,代码行数:16,代码来源:PigWrapper.java

示例2: DeduplicationJob

public DeduplicationJob(List<String> files, List<String> dimensions) {
    this.files = files;

    Properties props = new Properties();
    props.setProperty("output.compression.enabled", "true");
    props.setProperty("output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec");

    try {
        this.pigServer = new PigServer(ExecType.MAPREDUCE, props);
    } catch (ExecException e) {
        log.error("Couldn't execute pig server: {}", e.getMessage());
        e.printStackTrace();
    }

    this.usingAsDimensions = Joiner.on(":chararray, ").join(dimensions) + ", data:Map[], count:int";
    this.groupByDimensions = Joiner.on(", ").join(dimensions);
    this.loaderDimensions = "'" + Joiner.on("','").join(dimensions) + "'";
}
 
开发者ID:redBorder,项目名称:camus-sync,代码行数:18,代码来源:DeduplicationJob.java

示例3: testFsCommand

@Test
public void testFsCommand() throws Throwable {

    PigServer server = new PigServer(ExecType.MAPREDUCE,cluster.getProperties());
    PigContext context = server.getPigContext();

    String strCmd = 
            "fs -ls /;"
                    +"fs -mkdir /fstmp;"
                    +"fs -mkdir /fstmp/foo;"
                    +"cd /fstmp;"                
                    +"fs -copyFromLocal test/org/apache/pig/test/data/passwd bar;"
                    +"a = load 'bar';"
                    +"cd foo;"
                    +"store a into 'baz';"
                    +"cd /;"
                    +"fs -ls .;"
                    +"fs -rmr /fstmp/foo/baz;";

    ByteArrayInputStream cmd = new ByteArrayInputStream(strCmd.getBytes());
    InputStreamReader reader = new InputStreamReader(cmd);

    Grunt grunt = new Grunt(new BufferedReader(reader), context);
    grunt.exec();

}
 
开发者ID:sigmoidanalytics,项目名称:spork-streaming,代码行数:26,代码来源:TestGrunt.java

示例4: setUp

@Before
public void setUp() throws Exception {
    pigServer = new PigServer(ExecType.MAPREDUCE, cluster.getProperties());
    inputFile = "TestBestFitCast-input.txt";
    String[] input = new String[LOOP_SIZE];
    long l = 0;
    for (int i = 1; i <= LOOP_SIZE; i++) {
        input[i - 1] = (l + "\t" + i);
    }
    Util.createInputFile(cluster, inputFile, input);

    inputFile2 = "TestBestFitCast-input2.txt";
    l = 0;
    for (int i = 1; i <= LOOP_SIZE; i++) {
        input[i - 1] = (l + "\t" + i + "\t" + i);
    }
    Util.createInputFile(cluster, inputFile2, input);
}
 
开发者ID:sigmoidanalytics,项目名称:spork-streaming,代码行数:18,代码来源:TestBestFitCast.java

示例5: testRegisterWithQuotes

@Test    
public void testRegisterWithQuotes() throws Throwable {
    PigServer server = new PigServer(ExecType.MAPREDUCE, cluster.getProperties());
    PigContext context = server.getPigContext();

    String strCmd = "register 'pig-withouthadoop.jar'\n";

    ByteArrayInputStream cmd = new ByteArrayInputStream(strCmd.getBytes());
    InputStreamReader reader = new InputStreamReader(cmd);

    Grunt grunt = new Grunt(new BufferedReader(reader), context);

    grunt.exec();
    assertEquals(context.extraJars+ " of size 1", 1, context.extraJars.size());
    assertTrue(context.extraJars.get(0)+" ends with /pig-withouthadoop.jar", context.extraJars.get(0).toString().endsWith("/pig-withouthadoop.jar"));
}
 
开发者ID:sigmoidanalytics,项目名称:spork-streaming,代码行数:16,代码来源:TestGrunt.java

示例6: testBagSchemaFail

@Test 
public void testBagSchemaFail() throws Throwable {
    PigServer server = new PigServer(ExecType.MAPREDUCE, cluster.getProperties());
    PigContext context = server.getPigContext();
    
    String strCmd = "a = load 'input1'as (b: bag{t:(i: int, c:chararray, f: float)});\n";
    
    ByteArrayInputStream cmd = new ByteArrayInputStream(strCmd.getBytes());
    InputStreamReader reader = new InputStreamReader(cmd);
    
    Grunt grunt = new Grunt(new BufferedReader(reader), context);

    try {
        grunt.exec();
    } catch (Exception e) {
        assertTrue(e.getMessage().contains("<line 1, column 62>")
                &&  e.getMessage().contains("mismatched input ';' expecting RIGHT_PAREN"));
    }
}
 
开发者ID:sigmoidanalytics,项目名称:spork-streaming,代码行数:19,代码来源:TestGrunt.java

示例7: testExecStatment

@Test
public void testExecStatment() throws Throwable {
    PigServer server = new PigServer(ExecType.MAPREDUCE, cluster.getProperties());
    PigContext context = server.getPigContext();
    boolean caught = false;
    
    String strCmd = "a = load 'foo' as (foo, fast, regenerate);"
            + " exec -param LIMIT=5 -param FUNCTION=COUNT "
            + "-param FILE=foo " + basedir + "/testsub.pig; explain bar;";
    
    ByteArrayInputStream cmd = new ByteArrayInputStream(strCmd.getBytes());
    InputStreamReader reader = new InputStreamReader(cmd);
    
    Grunt grunt = new Grunt(new BufferedReader(reader), context);
    
    try {
        grunt.exec();
    } catch (Exception e) {
        caught = true;
        assertTrue(e.getMessage().contains("alias bar"));
    }
    assertTrue(caught);
}
 
开发者ID:sigmoidanalytics,项目名称:spork-streaming,代码行数:23,代码来源:TestGrunt.java

示例8: testBagConstantWithSchemaInForeachBlock

@Test 
public void testBagConstantWithSchemaInForeachBlock() throws Throwable {
    PigServer server = new PigServer(ExecType.MAPREDUCE, cluster.getProperties());
    PigContext context = server.getPigContext();
    
    String strCmd = "a = load 'input1'; "
            + "b = foreach a {generate {(1, '1', 0.4f),(2, '2', 0.45)} "
            + "as b: bag{t:(i: int, c:chararray, d: double)};};\n";
    
    ByteArrayInputStream cmd = new ByteArrayInputStream(strCmd.getBytes());
    InputStreamReader reader = new InputStreamReader(cmd);
    
    Grunt grunt = new Grunt(new BufferedReader(reader), context);

    grunt.exec();
}
 
开发者ID:sigmoidanalytics,项目名称:spork-streaming,代码行数:16,代码来源:TestGrunt.java

示例9: testPigServer

@Test
public void testPigServer() throws Throwable {
    log.debug("creating pig server");
    PigContext pigContext = new PigContext(ExecType.MAPREDUCE, cluster.getProperties());
    PigServer pig = new PigServer(pigContext);
    System.out.println("testing capacity");
    long capacity = pig.capacity();
    assertTrue(capacity > 0);
    String sampleFileName = "/tmp/fileTest";
    if (!pig.existsFile(sampleFileName)) {
        ElementDescriptor path = pigContext.getDfs().asElement(sampleFileName);
        OutputStream os = path.create();
        os.write("Ben was here!".getBytes());
        os.close();
    }
    long length = pig.fileSize(sampleFileName);
    assertTrue(length > 0);
}
 
开发者ID:sigmoidanalytics,项目名称:spork-streaming,代码行数:18,代码来源:TestMapReduce.java

示例10: testStreamingStderrLogsShouldNotBePersistedByDefault

@Test
public void testStreamingStderrLogsShouldNotBePersistedByDefault() throws Exception {

    Util.createInputFile(cluster, "mydummyinput.txt", new String[] { "dummy"});

    PigServer pig = new PigServer(ExecType.MAPREDUCE,cluster.getProperties());
    pig.setBatchOn();

    pig.registerQuery("define mycmd `echo dummy` ;");
    pig.registerQuery("A = load 'mydummyinput.txt' as (f1:chararray);");
    pig.registerQuery("B = stream A through mycmd;");
    pig.registerQuery("store B into 'output_dir_001' ;");
    pig.executeBatch();

    Assert.assertTrue(Util.exists(pig.getPigContext(), "output_dir_001"));
    Assert.assertFalse(Util.exists(pig.getPigContext(), "output_dir_001/_logs/mycmd"));

}
 
开发者ID:sigmoidanalytics,项目名称:spork-streaming,代码行数:18,代码来源:TestStreaming.java

示例11: init

@Override
protected void init() throws InitUnitException {
    try {
        Configuration configuration = gatherConfigs();
        PigContext pigContext = new PigContext(ExecType.MAPREDUCE, configuration);
        pigServer = new PigServer(pigContext);
    } catch (PigException e) {
        throw new InitUnitException(e);
    }
}
 
开发者ID:intropro,项目名称:prairie,代码行数:10,代码来源:PigUnit.java

示例12: saveToHadoop

public void saveToHadoop() throws IOException {
	PigServer pigServer = new PigServer(ExecType.MAPREDUCE);
	String pigQuery = "REGISTER 'WebContent/WEB-INF/lib/mongo-hadoop-core-1.3.2.jar';"
			+ "REGISTER 'WebContent/WEB-INF/lib/mongo-hadoop-pig-1.3.0.jar';"
			+ " A = LOAD 'mongodb://localhost:27017/chatroom.messageBackup'"
			+ " USING com.mongodb.hadoop.pig.MongoLoader('address, message, time')"
			+ " AS (address:chararray, message:chararray, time:datetime);";
	pigServer.registerQuery(pigQuery);
	pigServer.store("A", "/user/luffy/chatroom/" + this.time.toString().replaceAll(" ", "_").replaceAll(":", "-"));
	pigServer.shutdown();
	deleteBackupCollection();
}
 
开发者ID:sachinB94,项目名称:java-chatroom,代码行数:12,代码来源:ChatroomMessage.java

示例13: testCopyFromLocal

@Test 
public void testCopyFromLocal() throws Throwable {
    PigServer server = new PigServer(ExecType.MAPREDUCE, cluster.getProperties());
    PigContext context = server.getPigContext();
    
    String strCmd = "copyFromLocal /bin/sh sh_copy ;";
    
    ByteArrayInputStream cmd = new ByteArrayInputStream(strCmd.getBytes());
    InputStreamReader reader = new InputStreamReader(cmd);
    
    Grunt grunt = new Grunt(new BufferedReader(reader), context);

    grunt.exec();
}
 
开发者ID:sigmoidanalytics,项目名称:spork-streaming,代码行数:14,代码来源:TestGrunt.java

示例14: testBagSchema

@Test 
public void testBagSchema() throws Throwable {
    PigServer server = new PigServer(ExecType.MAPREDUCE, cluster.getProperties());
    PigContext context = server.getPigContext();
    
    String strCmd = "a = load 'input1' as (b: bag{t:(i: int, c:chararray, f: float)});\n";
    
    ByteArrayInputStream cmd = new ByteArrayInputStream(strCmd.getBytes());
    InputStreamReader reader = new InputStreamReader(cmd);
    
    Grunt grunt = new Grunt(new BufferedReader(reader), context);

    grunt.exec();
}
 
开发者ID:sigmoidanalytics,项目名称:spork-streaming,代码行数:14,代码来源:TestGrunt.java

示例15: testExplainDot

@Test
public void testExplainDot() throws Throwable {
    PigServer server = new PigServer(ExecType.MAPREDUCE, cluster.getProperties());
    PigContext context = server.getPigContext();
    
    String strCmd = "a = load 'foo' as (foo, fast, regenerate); explain -dot -script "
            + basedir + "/testsubnested_run.pig;";
    
    ByteArrayInputStream cmd = new ByteArrayInputStream(strCmd.getBytes());
    InputStreamReader reader = new InputStreamReader(cmd);
    
    Grunt grunt = new Grunt(new BufferedReader(reader), context);

    grunt.exec();
}
 
开发者ID:sigmoidanalytics,项目名称:spork-streaming,代码行数:15,代码来源:TestGrunt.java


注:本文中的org.apache.pig.ExecType.MAPREDUCE属性示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。