当前位置: 首页>>代码示例>>Java>>正文


Java DBConfiguration类代码示例

本文整理汇总了Java中org.apache.hadoop.mapred.lib.db.DBConfiguration的典型用法代码示例。如果您正苦于以下问题:Java DBConfiguration类的具体用法?Java DBConfiguration怎么用?Java DBConfiguration使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


DBConfiguration类属于org.apache.hadoop.mapred.lib.db包,在下文中一共展示了DBConfiguration类的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: testDBRecordReader

import org.apache.hadoop.mapred.lib.db.DBConfiguration; //导入依赖的package包/类
/**
 * 
 * test DBRecordReader. This reader should creates keys, values, know about position.. 
 */
@SuppressWarnings("unchecked")
@Test (timeout = 5000)
public void testDBRecordReader() throws Exception {

  JobConf job = mock(JobConf.class);
  DBConfiguration dbConfig = mock(DBConfiguration.class);
  String[] fields = { "field1", "filed2" };

  @SuppressWarnings("rawtypes")
  DBRecordReader reader = new DBInputFormat<NullDBWritable>().new DBRecordReader(
      new DBInputSplit(),  NullDBWritable.class, job,
      DriverForTest.getConnection(), dbConfig, "condition", fields, "table");
  LongWritable key = reader.createKey();
  assertEquals(0, key.get());
  DBWritable value = reader.createValue();
  assertEquals(
      "org.apache.hadoop.mapred.lib.db.DBInputFormat$NullDBWritable", value
          .getClass().getName());
  assertEquals(0, reader.getPos());
  assertFalse(reader.next(key, value));

}
 
开发者ID:naver,项目名称:hadoop,代码行数:27,代码来源:TestDBInputFormat.java

示例2: configureJobProperties

import org.apache.hadoop.mapred.lib.db.DBConfiguration; //导入依赖的package包/类
private void configureJobProperties(TableDesc tableDesc, Map<String, String> jobProperties) {
    if(LOG.isDebugEnabled()) {
        LOG.debug("tabelDesc: " + tableDesc);
        LOG.debug("jobProperties: " + jobProperties);
    }

    String tblName = tableDesc.getTableName();
    Properties tblProps = tableDesc.getProperties();
    String columnNames = tblProps.getProperty(Constants.LIST_COLUMNS);
    jobProperties.put(DBConfiguration.INPUT_CLASS_PROPERTY, DbRecordWritable.class.getName());
    jobProperties.put(DBConfiguration.INPUT_TABLE_NAME_PROPERTY, tblName);
    jobProperties.put(DBConfiguration.OUTPUT_TABLE_NAME_PROPERTY, tblName);
    jobProperties.put(DBConfiguration.INPUT_FIELD_NAMES_PROPERTY, columnNames);
    jobProperties.put(DBConfiguration.OUTPUT_FIELD_NAMES_PROPERTY, columnNames);

    for(String key : tblProps.stringPropertyNames()) {
        if(key.startsWith("mapred.jdbc.")) {
            String value = tblProps.getProperty(key);
            jobProperties.put(key, value);
        }
    }
}
 
开发者ID:myui,项目名称:HiveJdbcStorageHandler,代码行数:23,代码来源:JdbcStorageHandler.java

示例3: setupDriver

import org.apache.hadoop.mapred.lib.db.DBConfiguration; //导入依赖的package包/类
private void setupDriver(JobConf configuration) throws Exception {
  configuration.set(DBConfiguration.URL_PROPERTY, "testUrl");
  DriverManager.registerDriver(new DriverForTest());
  configuration.set(DBConfiguration.DRIVER_CLASS_PROPERTY,
      DriverForTest.class.getCanonicalName());
}
 
开发者ID:naver,项目名称:hadoop,代码行数:7,代码来源:TestDBInputFormat.java

示例4: testSetInput

import org.apache.hadoop.mapred.lib.db.DBConfiguration; //导入依赖的package包/类
/** 
 * test configuration for db. should works DBConfiguration.* parameters. 
 */
@Test (timeout = 5000)
public void testSetInput() {
  JobConf configuration = new JobConf();

  String[] fieldNames = { "field1", "field2" };
  DBInputFormat.setInput(configuration, NullDBWritable.class, "table",
      "conditions", "orderBy", fieldNames);
  assertEquals(
      "org.apache.hadoop.mapred.lib.db.DBInputFormat$NullDBWritable",
      configuration.getClass(DBConfiguration.INPUT_CLASS_PROPERTY, null)
          .getName());
  assertEquals("table",
      configuration.get(DBConfiguration.INPUT_TABLE_NAME_PROPERTY, null));

  String[] fields = configuration
      .getStrings(DBConfiguration.INPUT_FIELD_NAMES_PROPERTY);
  assertEquals("field1", fields[0]);
  assertEquals("field2", fields[1]);

  assertEquals("conditions",
      configuration.get(DBConfiguration.INPUT_CONDITIONS_PROPERTY, null));
  assertEquals("orderBy",
      configuration.get(DBConfiguration.INPUT_ORDER_BY_PROPERTY, null));

  configuration = new JobConf();

  DBInputFormat.setInput(configuration, NullDBWritable.class, "query",
      "countQuery");
  assertEquals("query", configuration.get(DBConfiguration.INPUT_QUERY, null));
  assertEquals("countQuery",
      configuration.get(DBConfiguration.INPUT_COUNT_QUERY, null));
  
  JobConf jConfiguration = new JobConf();
  DBConfiguration.configureDB(jConfiguration, "driverClass", "dbUrl", "user",
      "password");
  assertEquals("driverClass",
      jConfiguration.get(DBConfiguration.DRIVER_CLASS_PROPERTY));
  assertEquals("dbUrl", jConfiguration.get(DBConfiguration.URL_PROPERTY));
  assertEquals("user", jConfiguration.get(DBConfiguration.USERNAME_PROPERTY));
  assertEquals("password",
      jConfiguration.get(DBConfiguration.PASSWORD_PROPERTY));
  jConfiguration = new JobConf();
  DBConfiguration.configureDB(jConfiguration, "driverClass", "dbUrl");
  assertEquals("driverClass",
      jConfiguration.get(DBConfiguration.DRIVER_CLASS_PROPERTY));
  assertEquals("dbUrl", jConfiguration.get(DBConfiguration.URL_PROPERTY));
  assertNull(jConfiguration.get(DBConfiguration.USERNAME_PROPERTY));
  assertNull(jConfiguration.get(DBConfiguration.PASSWORD_PROPERTY));
}
 
开发者ID:aliyun-beta,项目名称:aliyun-oss-hadoop-fs,代码行数:53,代码来源:TestDBInputFormat.java

示例5: run

import org.apache.hadoop.mapred.lib.db.DBConfiguration; //导入依赖的package包/类
@Override
//Usage DBCountPageView [driverClass dburl]
public int run(String[] args) throws Exception {
  
  String driverClassName = DRIVER_CLASS;
  String url = DB_URL;
  
  if(args.length > 1) {
    driverClassName = args[0];
    url = args[1];
  }
  
  initialize(driverClassName, url);

  JobConf job = new JobConf(getConf(), DBCountPageView.class);
      
  job.setJobName("Count Pageviews of URLs");

  job.setMapperClass(PageviewMapper.class);
  job.setCombinerClass(LongSumReducer.class);
  job.setReducerClass(PageviewReducer.class);

  DBConfiguration.configureDB(job, driverClassName, url);
  
  DBInputFormat.setInput(job, AccessRecord.class, "Access"
      , null, "url", AccessFieldNames);

  DBOutputFormat.setOutput(job, "Pageview", PageviewFieldNames);
  
  job.setMapOutputKeyClass(Text.class);
  job.setMapOutputValueClass(LongWritable.class);

  job.setOutputKeyClass(PageviewRecord.class);
  job.setOutputValueClass(NullWritable.class);

  try {
    JobClient.runJob(job);
    
    boolean correct = verify();
    if(!correct) {
      throw new RuntimeException("Evaluation was not correct!");
    }
  } finally {
    shutdown();    
  }
  return 0;
}
 
开发者ID:Nextzero,项目名称:hadoop-2.6.0-cdh5.4.3,代码行数:48,代码来源:DBCountPageView.java

示例6: run

import org.apache.hadoop.mapred.lib.db.DBConfiguration; //导入依赖的package包/类
/**
 * The MapReduce driver - setup and launch the job.
 *
 * @param args the command-line arguments
 * @return the process exit code
 * @throws Exception if something goes wrong
 */
public int run(final String[] args) throws Exception {

  Cli cli = Cli.builder().setArgs(args).addOptions(CliCommonOpts.OutputFileOption.values()).build();
  int result = cli.runCmd();

  if (result != 0) {
    return result;
  }

  Path output = new Path(cli.getArgValueAsString(CliCommonOpts.OutputFileOption.OUTPUT));

  Configuration conf = super.getConf();

  DBConfiguration.configureDB(conf, "com.mysql.jdbc.Driver",
      "jdbc:mysql://localhost/sqoop_test" +
          "?user=hip_sqoop_user&password=password");

  JobConf job = new JobConf(conf);
  job.setJarByClass(DBImportMapReduce.class);

  job.setInputFormat(DBInputFormat.class);
  job.setOutputFormat(AvroOutputFormat.class);
  AvroJob.setOutputSchema(job, Stock.SCHEMA$);
  job.set(AvroJob.OUTPUT_CODEC, SnappyCodec.class.getName());

  job.setMapperClass(Map.class);

  job.setNumMapTasks(4);
  job.setNumReduceTasks(0);

  job.setMapOutputKeyClass(AvroWrapper.class);
  job.setMapOutputValueClass(NullWritable.class);

  job.setOutputKeyClass(AvroWrapper.class);
  job.setOutputValueClass(NullWritable.class);

  FileOutputFormat.setOutputPath(job, output);

  DBInputFormat.setInput(
      job,
      StockDbWritable.class,
      "select * from stocks",
      "SELECT COUNT(id) FROM stocks");

  RunningJob runningJob = JobClient.runJob(job);

  return runningJob.isSuccessful() ? 0 : 1;
}
 
开发者ID:Hanmourang,项目名称:hiped2,代码行数:56,代码来源:DBImportMapReduce.java

示例7: run

import org.apache.hadoop.mapred.lib.db.DBConfiguration; //导入依赖的package包/类
@Override
//Usage DBCountPageView [driverClass dburl]
public int run(String[] args) throws Exception {

    String driverClassName = DRIVER_CLASS;
    String url = DB_URL;

    if (args.length > 1) {
        driverClassName = args[0];
        url = args[1];
    }

    initialize(driverClassName, url);

    JobConf job = new JobConf(getConf(), DBCountPageView.class);

    job.setJobName("Count Pageviews of URLs");

    job.setMapperClass(PageviewMapper.class);
    job.setCombinerClass(LongSumReducer.class);
    job.setReducerClass(PageviewReducer.class);

    DBConfiguration.configureDB(job, driverClassName, url);

    DBInputFormat.setInput(job, AccessRecord.class, "Access", null, "url", AccessFieldNames);

    DBOutputFormat.setOutput(job, "Pageview", PageviewFieldNames);

    job.setMapOutputKeyClass(Text.class);
    job.setMapOutputValueClass(LongWritable.class);

    job.setOutputKeyClass(PageviewRecord.class);
    job.setOutputValueClass(NullWritable.class);

    try {
        JobClient.runJob(job);

        boolean correct = verify();
        if (!correct) {
            throw new RuntimeException("Evaluation was not correct!");
        }
    } finally {
        shutdown();
    }
    return 0;
}
 
开发者ID:elephantscale,项目名称:hadoop-book,代码行数:47,代码来源:DBCountPageView.java


注:本文中的org.apache.hadoop.mapred.lib.db.DBConfiguration类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。