本文整理汇总了Java中com.cloudera.sqoop.testutil.HsqldbTestServer类的典型用法代码示例。如果您正苦于以下问题:Java HsqldbTestServer类的具体用法?Java HsqldbTestServer怎么用?Java HsqldbTestServer使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
HsqldbTestServer类属于com.cloudera.sqoop.testutil包,在下文中一共展示了HsqldbTestServer类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: testUserMapping
import com.cloudera.sqoop.testutil.HsqldbTestServer; //导入依赖的package包/类
public void testUserMapping() throws Exception {
String[] args = {
"--map-column-hive", "id=STRING,value=INTEGER",
};
Configuration conf = new Configuration();
SqoopOptions options =
new ImportTool().parseArguments(args, null, null, false);
TableDefWriter writer = new TableDefWriter(options,
null, HsqldbTestServer.getTableName(), "outputTable", conf, false);
Map<String, Integer> colTypes = new SqlTypeMap<String, Integer>();
colTypes.put("id", Types.INTEGER);
colTypes.put("value", Types.VARCHAR);
writer.setColumnTypes(colTypes);
String createTable = writer.getCreateTableStmt();
assertNotNull(createTable);
assertTrue(createTable.contains("`id` STRING"));
assertTrue(createTable.contains("`value` INTEGER"));
assertFalse(createTable.contains("`id` INTEGER"));
assertFalse(createTable.contains("`value` STRING"));
}
示例2: testUserMappingFailWhenCantBeApplied
import com.cloudera.sqoop.testutil.HsqldbTestServer; //导入依赖的package包/类
public void testUserMappingFailWhenCantBeApplied() throws Exception {
String[] args = {
"--map-column-hive", "id=STRING,value=INTEGER",
};
Configuration conf = new Configuration();
SqoopOptions options =
new ImportTool().parseArguments(args, null, null, false);
TableDefWriter writer = new TableDefWriter(options,
null, HsqldbTestServer.getTableName(), "outputTable", conf, false);
Map<String, Integer> colTypes = new SqlTypeMap<String, Integer>();
colTypes.put("id", Types.INTEGER);
writer.setColumnTypes(colTypes);
try {
String createTable = writer.getCreateTableStmt();
fail("Expected failure on non applied mapping.");
} catch(IllegalArgumentException iae) {
// Expected, ok
}
}
示例3: testHiveDatabase
import com.cloudera.sqoop.testutil.HsqldbTestServer; //导入依赖的package包/类
public void testHiveDatabase() throws Exception {
String[] args = {
"--hive-database", "db",
};
Configuration conf = new Configuration();
SqoopOptions options =
new ImportTool().parseArguments(args, null, null, false);
TableDefWriter writer = new TableDefWriter(options,
null, HsqldbTestServer.getTableName(), "outputTable", conf, false);
Map<String, Integer> colTypes = new SqlTypeMap<String, Integer>();
writer.setColumnTypes(colTypes);
String createTable = writer.getCreateTableStmt();
assertNotNull(createTable);
assertTrue(createTable.contains("`db`.`outputTable`"));
String loadStmt = writer.getLoadDataStmt();
assertNotNull(loadStmt);
assertTrue(createTable.contains("`db`.`outputTable`"));
}
示例4: getOutputArgv
import com.cloudera.sqoop.testutil.HsqldbTestServer; //导入依赖的package包/类
/**
* Create the argv to pass to Sqoop.
*
* @return the argv as an array of strings.
*/
protected String[] getOutputArgv(boolean includeHadoopFlags,
String[] extraArgs) {
ArrayList<String> args = new ArrayList<String>();
if (includeHadoopFlags) {
CommonArgs.addHadoopFlags(args);
}
args.add("--table");
args.add(getTableName());
args.add("--connect");
args.add(HsqldbTestServer.getUrl());
args.add("--warehouse-dir");
args.add(getWarehouseDir());
args.add("--m");
args.add("1");
args.add("--split-by");
args.add("INTFIELD1");
args.add("--as-parquetfile");
if (extraArgs != null) {
args.addAll(Arrays.asList(extraArgs));
}
return args.toArray(new String[args.size()]);
}
示例5: getOutputQueryArgv
import com.cloudera.sqoop.testutil.HsqldbTestServer; //导入依赖的package包/类
protected String[] getOutputQueryArgv(boolean includeHadoopFlags, String[] extraArgs) {
ArrayList<String> args = new ArrayList<String>();
if (includeHadoopFlags) {
CommonArgs.addHadoopFlags(args);
}
args.add("--query");
args.add("SELECT * FROM " + getTableName() + " WHERE $CONDITIONS");
args.add("--connect");
args.add(HsqldbTestServer.getUrl());
args.add("--target-dir");
args.add(getWarehouseDir() + "/" + getTableName());
args.add("--m");
args.add("1");
args.add("--split-by");
args.add("INTFIELD1");
args.add("--as-parquetfile");
if (extraArgs != null) {
args.addAll(Arrays.asList(extraArgs));
}
return args.toArray(new String[args.size()]);
}
示例6: getOutputArgv
import com.cloudera.sqoop.testutil.HsqldbTestServer; //导入依赖的package包/类
/**
* Create the argv to pass to Sqoop.
*
* @return the argv as an array of strings.
*/
protected String[] getOutputArgv(boolean includeHadoopFlags,
String[] extraArgs) {
ArrayList<String> args = new ArrayList<String>();
if (includeHadoopFlags) {
CommonArgs.addHadoopFlags(args);
}
args.add("-m");
args.add("1");
args.add("--table");
args.add(getTableName());
args.add("--connect");
args.add(HsqldbTestServer.getUrl());
args.add("--warehouse-dir");
args.add(getWarehouseDir());
args.add("--split-by");
args.add("INTFIELD1");
args.add("--as-avrodatafile");
if (extraArgs != null) {
args.addAll(Arrays.asList(extraArgs));
}
return args.toArray(new String[0]);
}
示例7: getOutputArgv
import com.cloudera.sqoop.testutil.HsqldbTestServer; //导入依赖的package包/类
/**
* Create the argv to pass to Sqoop.
*
* @return the argv as an array of strings.
*/
protected ArrayList getOutputArgv(boolean includeHadoopFlags) {
ArrayList<String> args = new ArrayList<String>();
if (includeHadoopFlags) {
CommonArgs.addHadoopFlags(args);
}
args.add("--table");
args.add(HsqldbTestServer.getTableName());
args.add("--connect");
args.add(HsqldbTestServer.getUrl());
args.add("--split-by");
args.add("INTFIELD1");
args.add("--as-sequencefile");
return args;
}
示例8: testSetPackageName
import com.cloudera.sqoop.testutil.HsqldbTestServer; //导入依赖的package包/类
/**
* Test that we can generate code with a custom class name that includes a
* package.
*/
@Test
public void testSetPackageName() {
// Set the option strings in an "argv" to redirect our srcdir and bindir
String [] argv = {
"--bindir",
JAR_GEN_DIR,
"--outdir",
CODE_GEN_DIR,
"--package-name",
OVERRIDE_PACKAGE_NAME,
};
runGenerationTest(argv, OVERRIDE_PACKAGE_NAME + "."
+ HsqldbTestServer.getTableName());
}
示例9: runFailedGenerationTest
import com.cloudera.sqoop.testutil.HsqldbTestServer; //导入依赖的package包/类
private void runFailedGenerationTest(String [] argv,
String classNameToCheck) {
File codeGenDirFile = new File(CODE_GEN_DIR);
File classGenDirFile = new File(JAR_GEN_DIR);
try {
options = new ImportTool().parseArguments(argv,
null, options, true);
} catch (Exception e) {
LOG.error("Could not parse options: " + e.toString());
}
CompilationManager compileMgr = new CompilationManager(options);
ClassWriter writer = new ClassWriter(options, manager,
HsqldbTestServer.getTableName(), compileMgr);
try {
writer.generate();
compileMgr.compile();
fail("ORM class file generation succeeded when it was expected to fail");
} catch (Exception ioe) {
LOG.error("Got Exception from ORM generation as expected : "
+ ioe.toString());
}
}
示例10: getOutputArgv
import com.cloudera.sqoop.testutil.HsqldbTestServer; //导入依赖的package包/类
/**
* Create the argv to pass to Sqoop.
*
* @return the argv as an array of strings.
*/
protected String[] getOutputArgv(boolean includeHadoopFlags) {
ArrayList<String> args = new ArrayList<String>();
if (includeHadoopFlags) {
CommonArgs.addHadoopFlags(args);
}
args.add("--table");
args.add(HsqldbTestServer.getTableName());
args.add("--connect");
args.add(HsqldbTestServer.getUrl());
args.add("--warehouse-dir");
args.add(getWarehouseDir());
args.add("--split-by");
args.add("INTFIELD1");
args.add("--as-avrodatafile");
return args.toArray(new String[0]);
}
示例11: getOutputArgvForQuery
import com.cloudera.sqoop.testutil.HsqldbTestServer; //导入依赖的package包/类
/**
* Create the argv to pass to Sqoop.
*
* @return the argv as an array of strings.
*/
protected String[] getOutputArgvForQuery(boolean includeHadoopFlags) {
ArrayList<String> args = new ArrayList<String>();
if (includeHadoopFlags) {
CommonArgs.addHadoopFlags(args);
}
args.add("--query");
args.add("select * from " + HsqldbTestServer.getTableName()
+ " where $CONDITIONS");
args.add("--connect");
args.add(HsqldbTestServer.getUrl());
args.add("--target-dir");
args.add(getWarehouseDir() + "/query_result");
args.add("--split-by");
args.add("INTFIELD1");
args.add("--as-avrodatafile");
return args.toArray(new String[0]);
}
示例12: checkFirstColumnSum
import com.cloudera.sqoop.testutil.HsqldbTestServer; //导入依赖的package包/类
private void checkFirstColumnSum() throws SQLException {
Connection conn = getConnection();
PreparedStatement statement = conn.prepareStatement(
"SELECT SUM(INTFIELD1) FROM " + getTableName(),
ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
int actualVal = 0;
try {
ResultSet rs = statement.executeQuery();
try {
rs.next();
actualVal = rs.getInt(1);
} finally {
rs.close();
}
} finally {
statement.close();
}
assertEquals("First column column sum", HsqldbTestServer.getFirstColSum(),
actualVal);
}
示例13: getOutputArgv
import com.cloudera.sqoop.testutil.HsqldbTestServer; //导入依赖的package包/类
/**
* Create the argv to pass to Sqoop.
*
* @return the argv as an array of strings.
*/
protected String[] getOutputArgv(boolean includeHadoopFlags,
String[] extraArgs) {
ArrayList<String> args = new ArrayList<String>();
if (includeHadoopFlags) {
CommonArgs.addHadoopFlags(args);
}
args.add("--table");
args.add(getTableName());
args.add("--connect");
args.add(HsqldbTestServer.getUrl());
args.add("--warehouse-dir");
args.add(getWarehouseDir());
args.add("--split-by");
args.add("INTFIELD1");
args.add("--as-avrodatafile");
if (extraArgs != null) {
args.addAll(Arrays.asList(extraArgs));
}
return args.toArray(new String[0]);
}
示例14: getCreateTableArgv
import com.cloudera.sqoop.testutil.HsqldbTestServer; //导入依赖的package包/类
/**
* @return the argv to supply to a create-table only job for Hive imports.
*/
protected String [] getCreateTableArgv(boolean includeHadoopFlags,
String [] moreArgs) {
ArrayList<String> args = new ArrayList<String>();
if (null != moreArgs) {
for (String arg: moreArgs) {
args.add(arg);
}
}
args.add("--table");
args.add(getTableName());
args.add("--connect");
args.add(HsqldbTestServer.getUrl());
return args.toArray(new String[0]);
}
示例15: getCreateHiveTableArgs
import com.cloudera.sqoop.testutil.HsqldbTestServer; //导入依赖的package包/类
/**
* @return the argv to supply to a ddl-executing-only job for Hive imports.
*/
protected String [] getCreateHiveTableArgs(String [] extraArgs) {
ArrayList<String> args = new ArrayList<String>();
args.add("--table");
args.add(getTableName());
args.add("--connect");
args.add(HsqldbTestServer.getUrl());
if (null != extraArgs) {
for (String arg : extraArgs) {
args.add(arg);
}
}
return args.toArray(new String[0]);
}