当前位置: 首页>>代码示例>>Java>>正文


Java DBInputFormat类代码示例

本文整理汇总了Java中org.apache.hadoop.mapreduce.lib.db.DBInputFormat的典型用法代码示例。如果您正苦于以下问题:Java DBInputFormat类的具体用法?Java DBInputFormat怎么用?Java DBInputFormat使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


DBInputFormat类属于org.apache.hadoop.mapreduce.lib.db包,在下文中一共展示了DBInputFormat类的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: main

import org.apache.hadoop.mapreduce.lib.db.DBInputFormat; //导入依赖的package包/类
public static void main(String[] args) throws IOException, Exception {  
 Configuration conf = new Configuration();
 Job job = new Job(conf);
 job.setJarByClass(DBToFileMapReduce.class);  
 DistributedCache.addFileToClassPath(new Path(  
       "/lib/mysql-connector-java-5.1.0-bin.jar"), conf);  
    
 job.setMapperClass(DBInputMapper.class);  
 job.setReducerClass(Reducer.class); 
  
 job.setMapOutputKeyClass(LongWritable.class);  
 job.setMapOutputValueClass(Text.class);  
 job.setOutputKeyClass(LongWritable.class);  
 job.setOutputValueClass(Text.class);  
    
 job.setInputFormatClass(DBInputFormat.class); 
 
 Path outputPath = new Path("/hua01");
 FileOutputFormat.setOutputPath(job, outputPath);  
 outputPath.getFileSystem(conf).delete(outputPath, true);
 
 DBConfiguration.configureDB(conf, "com.mysql.jdbc.Driver",  
       "jdbc:mysql://192.168.3.244:3306/hadoop", "hua", "hadoop");  
 String[] fields = { "id", "name" };  
 DBInputFormat.setInput(job, StudentinfoRecord.class, "studentinfo", null, "id", fields);   

 job.waitForCompletion(true); 
}
 
开发者ID:willddy,项目名称:bigdata_pattern,代码行数:29,代码来源:DBToFileMapReduce.java

示例2: run

import org.apache.hadoop.mapreduce.lib.db.DBInputFormat; //导入依赖的package包/类
@Override
//Usage DBCountPageView [driverClass dburl]
public int run(String[] args) throws Exception {
  
  String driverClassName = DRIVER_CLASS;
  String url = DB_URL;
  
  if(args.length > 1) {
    driverClassName = args[0];
    url = args[1];
  }
  
  initialize(driverClassName, url);
  Configuration conf = getConf();

  DBConfiguration.configureDB(conf, driverClassName, url);

  Job job = new Job(conf);
      
  job.setJobName("Count Pageviews of URLs");
  job.setJarByClass(DBCountPageView.class);
  job.setMapperClass(PageviewMapper.class);
  job.setCombinerClass(LongSumReducer.class);
  job.setReducerClass(PageviewReducer.class);

  DBInputFormat.setInput(job, AccessRecord.class, "Access"
      , null, "url", AccessFieldNames);

  DBOutputFormat.setOutput(job, "Pageview", PageviewFieldNames);
  
  job.setMapOutputKeyClass(Text.class);
  job.setMapOutputValueClass(LongWritable.class);

  job.setOutputKeyClass(PageviewRecord.class);
  job.setOutputValueClass(NullWritable.class);
  int ret;
  try {
    ret = job.waitForCompletion(true) ? 0 : 1;
    boolean correct = verify();
    if(!correct) {
      throw new RuntimeException("Evaluation was not correct!");
    }
  } finally {
    shutdown();    
  }
  return ret;
}
 
开发者ID:naver,项目名称:hadoop,代码行数:48,代码来源:DBCountPageView.java

示例3: run

import org.apache.hadoop.mapreduce.lib.db.DBInputFormat; //导入依赖的package包/类
@Override
//Usage DBCountPageView [driverClass dburl]
public int run(String[] args) throws Exception {
  
  String driverClassName = DRIVER_CLASS;
  String url = DB_URL;
  
  if(args.length > 1) {
    driverClassName = args[0];
    url = args[1];
  }
  
  initialize(driverClassName, url);
  Configuration conf = getConf();

  DBConfiguration.configureDB(conf, driverClassName, url);

  Job job = Job.getInstance(conf);
      
  job.setJobName("Count Pageviews of URLs");
  job.setJarByClass(DBCountPageView.class);
  job.setMapperClass(PageviewMapper.class);
  job.setCombinerClass(LongSumReducer.class);
  job.setReducerClass(PageviewReducer.class);

  DBInputFormat.setInput(job, AccessRecord.class, "HAccess"
      , null, "url", AccessFieldNames);

  DBOutputFormat.setOutput(job, "Pageview", PageviewFieldNames);
  
  job.setMapOutputKeyClass(Text.class);
  job.setMapOutputValueClass(LongWritable.class);

  job.setOutputKeyClass(PageviewRecord.class);
  job.setOutputValueClass(NullWritable.class);
  int ret;
  try {
    ret = job.waitForCompletion(true) ? 0 : 1;
    boolean correct = verify();
    if(!correct) {
      throw new RuntimeException("Evaluation was not correct!");
    }
  } finally {
    shutdown();    
  }
  return ret;
}
 
开发者ID:aliyun-beta,项目名称:aliyun-oss-hadoop-fs,代码行数:48,代码来源:DBCountPageView.java

示例4: validateClass

import org.apache.hadoop.mapreduce.lib.db.DBInputFormat; //导入依赖的package包/类
/**
 * Validate whether class is {@link DBInputFormat} or a subclass.
 *
 * @param clazz the class to validate
 * @param ctx the context to throw the error in if class is invalid
 * @param scope the scope to throw the error in if class is invalid
 */
public static void validateClass(final Class<?> clazz, final Context ctx, final Scriptable scope) {
    if (!ReflectionUtils.isClassOrSubclass(DBInputFormat.class, clazz)) {
        throw Utils.makeError(ctx, scope, LembosMessages.makeInvalidClassErrorMessage(DBInputFormat.class,
                                                                                      clazz));
    }
}
 
开发者ID:apigee,项目名称:lembos,代码行数:14,代码来源:DBInputFormatHelper.java

示例5: setInput

import org.apache.hadoop.mapreduce.lib.db.DBInputFormat; //导入依赖的package包/类
/**
 * Java wrapper for {@link DBInputFormat#setInput(org.apache.hadoop.mapreduce.Job, Class, String, String)} and
 * {@link DBInputFormat#setInput(org.apache.hadoop.mapreduce.Job, Class, String, String, String, String...)}.
 *
 * @param ctx the JavaScript context
 * @param thisObj the 'this' object
 * @param args the function arguments
 * @param func the function being called
 */
@JSStaticFunction
public static void setInput(final Context ctx, final Scriptable thisObj, final Object[] args,
                                final Function func) {
    DBInputFormatHelper.setInput(DBInputFormat.class, ctx, thisObj, args, func);
}
 
开发者ID:apigee,项目名称:lembos,代码行数:15,代码来源:DBInputFormatWrap.java


注:本文中的org.apache.hadoop.mapreduce.lib.db.DBInputFormat类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。