当前位置: 首页>>代码示例>>Java>>正文


Java MLinkConfig类代码示例

本文整理汇总了Java中org.apache.sqoop.model.MLinkConfig的典型用法代码示例。如果您正苦于以下问题:Java MLinkConfig类的具体用法?Java MLinkConfig怎么用?Java MLinkConfig使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


MLinkConfig类属于org.apache.sqoop.model包,在下文中一共展示了MLinkConfig类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: testLinkConfig

import org.apache.sqoop.model.MLinkConfig; //导入依赖的package包/类
@Test
public void testLinkConfig() {
  // No upgrade
  MLinkConfig originalConfigs = new MLinkConfig(ConfigUtils.toConfigs(LinkConfiguration.class));
  MLinkConfig newConfigs = new MLinkConfig(ConfigUtils.toConfigs(LinkConfiguration.class));
  originalConfigs.getInput("linkConfig.jdbcDriver").setValue("test-jdbcDriver");
  originalConfigs.getInput("linkConfig.connectionString").setValue("test-connectionString");
  originalConfigs.getInput("linkConfig.username").setValue("test-username");
  originalConfigs.getInput("linkConfig.password").setValue("test-password");
  originalConfigs.getInput("linkConfig.jdbcProperties").setValue("test-jdbcProperties");
  upgrader.upgradeLinkConfig(originalConfigs, newConfigs);
  assertEquals(originalConfigs, newConfigs);
  assertEquals("test-jdbcDriver", newConfigs.getInput("linkConfig.jdbcDriver").getValue());
  assertEquals("test-connectionString", newConfigs.getInput("linkConfig.connectionString").getValue());
  assertEquals("test-username", newConfigs.getInput("linkConfig.username").getValue());
  assertEquals("test-password", newConfigs.getInput("linkConfig.password").getValue());
  assertEquals("test-jdbcProperties", newConfigs.getInput("linkConfig.jdbcProperties").getValue());
}
 
开发者ID:vybs,项目名称:sqoop-on-spark,代码行数:19,代码来源:TestGenericJdbcConnectorUpgrader.java

示例2: testUpdateLinkConfig

import org.apache.sqoop.model.MLinkConfig; //导入依赖的package包/类
@Test
public void testUpdateLinkConfig() throws Exception {
  loadLinksForLatestVersion();

  assertCountForTable("SQOOP.SQ_LINK", 2);
  assertCountForTable("SQOOP.SQ_LINK_INPUT", 8);
  MLink link = handler.findLink(1, getDerbyDatabaseConnection());

  List<MConfig> configs = link.getConnectorLinkConfig().getConfigs();
  MConfig config = configs.get(0).clone(false);
  MConfig newConfig = new MConfig(config.getName(), config.getInputs());

  ((MStringInput) newConfig.getInputs().get(0)).setValue("LinkConfigUpdated");

  handler.updateLinkConfig(link.getPersistenceId(), newConfig, MConfigUpdateEntityType.USER,
      getDerbyDatabaseConnection());

  MLink updatedLink = handler.findLink(1, getDerbyDatabaseConnection());
  MLinkConfig newConfigs = updatedLink.getConnectorLinkConfig();
  assertEquals(2, newConfigs.getConfigs().size());
  MConfig updatedLinkConfig = newConfigs.getConfigs().get(0);
  assertEquals("LinkConfigUpdated", updatedLinkConfig.getInputs().get(0).getValue());
}
 
开发者ID:vybs,项目名称:sqoop-on-spark,代码行数:24,代码来源:TestLinkHandling.java

示例3: setUp

import org.apache.sqoop.model.MLinkConfig; //导入依赖的package包/类
@BeforeMethod(alwaysRun = true)
public void setUp() throws Exception {
  repoTransactionMock = mock(JdbcRepositoryTransaction.class);
  connectorMgrMock = mock(ConnectorManager.class);
  driverMock = mock(Driver.class);
  repoHandlerMock = mock(JdbcRepositoryHandler.class);
  connectorUpgraderMock = mock(ConnectorConfigurableUpgrader.class);
  driverUpgraderMock = mock(DriverUpgrader.class);
  repoSpy = spy(new JdbcRepository(repoHandlerMock, null));

  // setup transaction and connector manager
  doReturn(repoTransactionMock).when(repoSpy).getTransaction();
  ConnectorManager.setInstance(connectorMgrMock);
  Driver.setInstance(driverMock);

  doNothing().when(connectorUpgraderMock).upgradeLinkConfig(any(MLinkConfig.class),
      any(MLinkConfig.class));
  doNothing().when(connectorUpgraderMock).upgradeFromJobConfig(any(MFromConfig.class),
      any(MFromConfig.class));
  doNothing().when(connectorUpgraderMock).upgradeToJobConfig(any(MToConfig.class),
      any(MToConfig.class));
  doNothing().when(driverUpgraderMock).upgradeJobConfig(any(MDriverConfig.class),
      any(MDriverConfig.class));

}
 
开发者ID:vybs,项目名称:sqoop-on-spark,代码行数:26,代码来源:TestJdbcRepository.java

示例4: instance

import org.apache.sqoop.model.MLinkConfig; //导入依赖的package包/类
public MLink instance() {
    BaseLinkConfig baseLinkConfig = sqoopLinkConfig.getLinkConfig();

    MLink link = sqoopClient.createLink(baseLinkConfig.getConnector());
    link.setName(sqoopLinkConfig.getLinkName());
    link.setCreationUser(sqoopLinkConfig.getCreationUser());
    link.setCreationDate(sqoopLinkConfig.getCreationDate());

    MLinkConfig linkConfig = link.getConnectorLinkConfig();

    baseLinkConfig.linkConfig(linkConfig);

    Status status = null;

    if (SqoopUtil.checkExists(sqoopClient, sqoopLinkConfig.getLinkName())) {
        status = sqoopClient.updateLink(link, sqoopLinkConfig.getLinkName());
        log.info("update link with link Name : " + link.getName());
    } else {
        status = sqoopClient.saveLink(link);
    }

    if (status.canProceed()) {
        log.info("Created Link with Link Name : " + link.getName());
        return link;
    } else {
        log.info("Something went wrong creating the link ");
        return null;
    }
}
 
开发者ID:babymm,项目名称:mmsns,代码行数:30,代码来源:SqoopLinkFactory.java

示例5: testLinkUpgrade

import org.apache.sqoop.model.MLinkConfig; //导入依赖的package包/类
@Test
public void testLinkUpgrade() throws Exception {
  MLinkConfig originalConfigs = new MLinkConfig(new LinkedList<MConfig>());
  MLinkConfig newConfigs = new MLinkConfig(ConfigUtils.toConfigs(LinkConfiguration.class));
  originalConfigs.getConfigs().add(new MConfig("linkConfig", new LinkedList<MInput<?>>()));
  originalConfigs.getConfigs().get(0).getInputs().add(new MStringInput("linkConfig.hdfsHostAndPort", false, InputEditable.ANY, StringUtils.EMPTY, (short)255));
  originalConfigs.getInput("linkConfig.hdfsHostAndPort").setValue("test:8020");
  upgrader.upgradeLinkConfig(originalConfigs, newConfigs);
  assertEquals("test:8020", newConfigs.getInput("linkConfig.authority").getValue());
}
 
开发者ID:vybs,项目名称:sqoop-on-spark,代码行数:11,代码来源:TestKiteConnectorUpgrader.java

示例6: upgradeLinkConfig

import org.apache.sqoop.model.MLinkConfig; //导入依赖的package包/类
/**
 * Upgrade the original link config for the given config type and fill into the upgradeTarget. Note
 * that any data already in {@code upgradeTarget} maybe overwritten.
 * @param original - original config as in the repository
 * @param upgradeTarget - the instance that will be filled in with the
 *                      upgraded config
 */
public void upgradeLinkConfig(MLinkConfig original, MLinkConfig upgradeTarget) {
  // The reasoning for throwing an exception by default is as follows.
  // Sqoop calls the upgrade apis for every connector if and only if the
  // corresponding link object that the config is associated with exists in the sqoop
  // repository. In unexpected scenarios, if a link object is created in the
  // sqoop repository without a corresponding upgrade routine for
  // the link config, then this exception will be thrown to indicate a
  // unexpected code path. In normal circumstances this
  // scenario of having a link object for a connector without link config is
  // very unlikely to happen. A likely scenario is that a connector will not have a link config and hence
  // no link object will be created and thus this method will not be invoked.
  throw new SqoopException(ConfigurableError.CONFIGURABLE_0001);

}
 
开发者ID:vybs,项目名称:sqoop-on-spark,代码行数:22,代码来源:ConnectorConfigurableUpgrader.java

示例7: loadLink

import org.apache.sqoop.model.MLinkConfig; //导入依赖的package包/类
private long loadLink(MLink link) {

    // starting by pretending we have a brand new link
    resetPersistenceId(link);

    Repository repository = RepositoryManager.getInstance().getRepository();

    MConnector mConnector = ConnectorManager.getInstance().getConnectorConfigurable(link.getConnectorId());
    ConnectorConfigurableUpgrader connectorConfigUpgrader = ConnectorManager.getInstance().getSqoopConnector(mConnector.getUniqueName()).getConfigurableUpgrader();

    List<MConfig> connectorConfigs = mConnector.getLinkConfig().clone(false).getConfigs();
    MLinkConfig newLinkConfigs = new MLinkConfig(connectorConfigs);

    // upgrading the configs to make sure they match the current repository
    connectorConfigUpgrader.upgradeLinkConfig(link.getConnectorLinkConfig(), newLinkConfigs);
    MLink newLink = new MLink(link, newLinkConfigs);

    // Transform config structures to objects for validations
    SqoopConnector connector = ConnectorManager.getInstance().getSqoopConnector(
        link.getConnectorId());

    Object connectorConfig = ClassUtils.instantiate(connector.getLinkConfigurationClass());

    ConfigUtils.fromConfigs(link.getConnectorLinkConfig().getConfigs(), connectorConfig);

    ConfigValidationRunner validationRunner = new ConfigValidationRunner();
    ConfigValidationResult result = validationRunner.validate(connectorConfig);

    Status finalStatus = Status.getWorstStatus(result.getStatus());

    if (finalStatus.canProceed()) {
      repository.createLink(newLink);

    } else {
      LOG.error("Failed to load link:" + link.getName());
      LOG.error("Status of connector configs:" + result.getStatus().toString());
    }
    return newLink.getPersistenceId();
  }
 
开发者ID:vybs,项目名称:sqoop-on-spark,代码行数:40,代码来源:RepositoryLoadTool.java

示例8: restoreLink

import org.apache.sqoop.model.MLinkConfig; //导入依赖的package包/类
private MLink restoreLink(Object obj) {
  JSONObject object = (JSONObject) obj;
  long connectorId = (Long) object.get(CONNECTOR_ID);
  JSONArray connectorLinkConfig = (JSONArray) object.get(LINK_CONFIG_VALUES);
  List<MConfig> linkConfig = restoreConfigList(connectorLinkConfig);
  MLink link = new MLink(connectorId, new MLinkConfig(linkConfig));
  link.setPersistenceId((Long) object.get(ID));
  link.setName((String) object.get(NAME));
  link.setEnabled((Boolean) object.get(ENABLED));
  link.setCreationUser((String) object.get(CREATION_USER));
  link.setCreationDate(new Date((Long) object.get(CREATION_DATE)));
  link.setLastUpdateUser((String) object.get(UPDATE_USER));
  link.setLastUpdateDate(new Date((Long) object.get(UPDATE_DATE)));
  return link;
}
 
开发者ID:vybs,项目名称:sqoop-on-spark,代码行数:16,代码来源:LinkBean.java

示例9: getLinkConfig

import org.apache.sqoop.model.MLinkConfig; //导入依赖的package包/类
public static MLinkConfig getLinkConfig() {
  List<MInput<?>> inputs;
  MStringInput input;
  MConfig config;
  List<MConfig> linkConfig = new ArrayList<MConfig>();
  inputs = new ArrayList<MInput<?>>();

  input = new MStringInput("url", false, InputEditable.USER_ONLY, StringUtils.EMPTY, (short) 10);
  input.setPersistenceId(1);
  inputs.add(input);

  input = new MStringInput("username", false, InputEditable.USER_ONLY, "password", (short) 10);
  input.setPersistenceId(2);
  input.setValue("test");
  inputs.add(input);

  input = new MStringInput("password", true, InputEditable.USER_ONLY, StringUtils.EMPTY, (short) 10);
  input.setPersistenceId(3);
  input.setValue("test");
  inputs.add(input);

  config = new MConfig("connection", inputs);
  config.setPersistenceId(10);
  linkConfig.add(config);

  return new MLinkConfig(linkConfig);
}
 
开发者ID:vybs,项目名称:sqoop-on-spark,代码行数:28,代码来源:ConfigTestUtil.java

示例10: main

import org.apache.sqoop.model.MLinkConfig; //导入依赖的package包/类
public static void main(String[] args) throws Exception {

    final SqoopSparkJob sparkJob = new SqoopSparkJob();
    CommandLine cArgs = SqoopSparkJob.parseArgs(createOptions(), args);
    SparkConf conf = sparkJob.init(cArgs);
    JavaSparkContext context = new JavaSparkContext(conf);

    MConnector fromConnector = RepositoryManager.getInstance().getRepository()
        .findConnector("generic-jdbc-connector");

    MLinkConfig fromLinkConfig = fromConnector.getLinkConfig();
    MLink fromLink = new MLink(fromConnector.getPersistenceId(), fromLinkConfig);
    fromLink.setName("jdbcLink-" + System.currentTimeMillis());

    fromLink.getConnectorLinkConfig().getStringInput("linkConfig.jdbcDriver")
        .setValue("com.mysql.jdbc.Driver");

    fromLink.getConnectorLinkConfig().getStringInput("linkConfig.connectionString")
        .setValue(cArgs.getOptionValue("jdbcString"));
    fromLink.getConnectorLinkConfig().getStringInput("linkConfig.username")
        .setValue(cArgs.getOptionValue("u"));
    fromLink.getConnectorLinkConfig().getStringInput("linkConfig.password")
        .setValue(cArgs.getOptionValue("p"));

    RepositoryManager.getInstance().getRepository().createLink(fromLink);

    MConnector toConnector = RepositoryManager.getInstance().getRepository()
        .findConnector("kafka-connector");

    MLinkConfig toLinkConfig = toConnector.getLinkConfig();

    MLink toLink = new MLink(toConnector.getPersistenceId(), toLinkConfig);
    toLink.setName("kafkaLink-" + System.currentTimeMillis());

    toLink.getConnectorLinkConfig().getStringInput("linkConfig.brokerList")
        .setValue(cArgs.getOptionValue("broker"));
    toLink.getConnectorLinkConfig().getStringInput("linkConfig.zookeeperConnect")
        .setValue(cArgs.getOptionValue("zk"));

    RepositoryManager.getInstance().getRepository().createLink(toLink);

    MFromConfig fromJobConfig = fromConnector.getFromConfig();
    MToConfig toJobConfig = toConnector.getToConfig();

    MJob sqoopJob = new MJob(fromConnector.getPersistenceId(), toConnector.getPersistenceId(),
        fromLink.getPersistenceId(), toLink.getPersistenceId(), fromJobConfig, toJobConfig, Driver
            .getInstance().getDriver().getDriverConfig());
    // jdbc configs
    MFromConfig fromConfig = sqoopJob.getFromJobConfig();
    fromConfig.getStringInput("fromJobConfig.tableName").setValue(cArgs.getOptionValue("table"));
    fromConfig.getStringInput("fromJobConfig.partitionColumn").setValue(cArgs.getOptionValue("partitionCol"));
    // kafka configs
    MToConfig toConfig = sqoopJob.getToJobConfig();
    toConfig.getStringInput("toJobConfig.topic").setValue("test-spark-topic");

    MDriverConfig driverConfig = sqoopJob.getDriverConfig();
    if (cArgs.getOptionValue("numE") != null) {
      driverConfig.getIntegerInput("throttlingConfig.numExtractors").setValue(
          Integer.valueOf(cArgs.getOptionValue("numE")));
    }
    if (cArgs.getOptionValue("numL") != null) {

      driverConfig.getIntegerInput("throttlingConfig.numLoaders").setValue(
          Integer.valueOf(cArgs.getOptionValue("numL")));
    }    RepositoryManager.getInstance().getRepository().createJob(sqoopJob);
    sparkJob.setJob(sqoopJob);
    sparkJob.execute(conf, context);
  }
 
开发者ID:vybs,项目名称:sqoop-on-spark,代码行数:69,代码来源:SqoopJDBCKafkaJob.java

示例11: main

import org.apache.sqoop.model.MLinkConfig; //导入依赖的package包/类
public static void main(String[] args) throws Exception {

    final SqoopSparkJob sparkJob = new SqoopSparkJob();
    CommandLine cArgs = SqoopSparkJob.parseArgs(createOptions(), args);
    SparkConf conf = sparkJob.init(cArgs);
    JavaSparkContext context = new JavaSparkContext(conf);

    MConnector fromConnector = RepositoryManager.getInstance().getRepository()
        .findConnector("generic-jdbc-connector");
    MConnector toConnector = RepositoryManager.getInstance().getRepository()
        .findConnector("hdfs-connector");

    MLinkConfig fromLinkConfig = fromConnector.getLinkConfig();
    MLinkConfig toLinkConfig = toConnector.getLinkConfig();

    MLink fromLink = new MLink(fromConnector.getPersistenceId(), fromLinkConfig);
    fromLink.setName("jdbcLink-" + System.currentTimeMillis());
    fromLink.getConnectorLinkConfig().getStringInput("linkConfig.jdbcDriver")
        .setValue("com.mysql.jdbc.Driver");

    fromLink.getConnectorLinkConfig().getStringInput("linkConfig.connectionString")
        .setValue(cArgs.getOptionValue("jdbcString"));
    fromLink.getConnectorLinkConfig().getStringInput("linkConfig.username")
        .setValue(cArgs.getOptionValue("u"));
    fromLink.getConnectorLinkConfig().getStringInput("linkConfig.password")
        .setValue(cArgs.getOptionValue("p"));
    RepositoryManager.getInstance().getRepository().createLink(fromLink);

    MLink toLink = new MLink(toConnector.getPersistenceId(), toLinkConfig);
    toLink.setName("hdfsLink-" + System.currentTimeMillis());
    toLink.getConnectorLinkConfig().getStringInput("linkConfig.confDir")
        .setValue(cArgs.getOptionValue("outputDir"));
    RepositoryManager.getInstance().getRepository().createLink(toLink);

    MFromConfig fromJobConfig = fromConnector.getFromConfig();
    MToConfig toJobConfig = toConnector.getToConfig();

    MJob sqoopJob = new MJob(fromConnector.getPersistenceId(), toConnector.getPersistenceId(),
        fromLink.getPersistenceId(), toLink.getPersistenceId(), fromJobConfig, toJobConfig, Driver
            .getInstance().getDriver().getDriverConfig());

    MConfigList fromConfig = sqoopJob.getJobConfig(Direction.FROM);
    fromConfig.getStringInput("fromJobConfig.tableName").setValue(cArgs.getOptionValue("table"));
    fromConfig.getStringInput("fromJobConfig.partitionColumn").setValue(
        cArgs.getOptionValue("paritionCol"));

    MToConfig toConfig = sqoopJob.getToJobConfig();
    toConfig.getStringInput("toJobConfig.outputDirectory").setValue(
        cArgs.getOptionValue("outputDir") + System.currentTimeMillis());
    MDriverConfig driverConfig = sqoopJob.getDriverConfig();
    if (cArgs.getOptionValue("numE") != null) {
      driverConfig.getIntegerInput("throttlingConfig.numExtractors").setValue(
          Integer.valueOf(cArgs.getOptionValue("numE")));
    }
    if (cArgs.getOptionValue("numL") != null) {

      driverConfig.getIntegerInput("throttlingConfig.numLoaders").setValue(
          Integer.valueOf(cArgs.getOptionValue("numL")));
    }
    RepositoryManager.getInstance().getRepository().createJob(sqoopJob);
    sparkJob.setJob(sqoopJob);
    sparkJob.execute(conf, context);
  }
 
开发者ID:vybs,项目名称:sqoop-on-spark,代码行数:64,代码来源:SqoopJDBCHDFSJob.java

示例12: upgradeLinkConfig

import org.apache.sqoop.model.MLinkConfig; //导入依赖的package包/类
@Override
public void upgradeLinkConfig(MLinkConfig original, MLinkConfig upgradeTarget) {
  ConfigurableUpgradeUtil.doUpgrade(original.getConfigs(), upgradeTarget.getConfigs());
}
 
开发者ID:vybs,项目名称:sqoop-on-spark,代码行数:5,代码来源:GenericJdbcConnectorUpgrader.java

示例13: getLinkConfig

import org.apache.sqoop.model.MLinkConfig; //导入依赖的package包/类
protected MLinkConfig getLinkConfig() {
  return new MLinkConfig(getConfigs("l1", "l2"));
}
 
开发者ID:vybs,项目名称:sqoop-on-spark,代码行数:4,代码来源:DerbyTestCase.java

示例14: testEntityDataSerialization

import org.apache.sqoop.model.MLinkConfig; //导入依赖的package包/类
/**
 * Test that serializing actual data is not an issue.
 */
@Test
public void testEntityDataSerialization() throws Exception {
  MConnector connector = getConnector();
  MDriver driver = getDriver();

  // Register objects for everything and our new connector
  handler.registerConnector(connector, getDerbyDatabaseConnection());
  handler.registerDriver(driver, getDerbyDatabaseConnection());

  // Inserted values
  Map<String, String> map = new HashMap<String, String>();
  map.put("A", "B");

  // Connection object with all various values
  MLink link = new MLink(connector.getPersistenceId(), connector.getLinkConfig());
  MLinkConfig linkConfig = link.getConnectorLinkConfig();
  assertEquals(linkConfig.getStringInput("l1.I1").getEditable(), InputEditable.ANY);
  assertEquals(linkConfig.getStringInput("l1.I1").getOverrides(), "l1.I2");
  assertEquals(linkConfig.getMapInput("l1.I2").getEditable(), InputEditable.CONNECTOR_ONLY);
  assertEquals(linkConfig.getMapInput("l1.I2").getOverrides(), "l1.I5");
  assertEquals(linkConfig.getIntegerInput("l1.I3").getEditable(), InputEditable.ANY);
  assertEquals(linkConfig.getIntegerInput("l1.I3").getOverrides(), "l1.I1");
  assertEquals(linkConfig.getBooleanInput("l1.I4").getEditable(), InputEditable.USER_ONLY);
  assertEquals(linkConfig.getBooleanInput("l1.I4").getOverrides(), "");
  assertEquals(linkConfig.getEnumInput("l1.I5").getEditable(), InputEditable.ANY);
  assertEquals(linkConfig.getEnumInput("l1.I5").getOverrides(), "l1.I4,l1.I3");

  linkConfig.getStringInput("l1.I1").setValue("A");
  linkConfig.getMapInput("l1.I2").setValue(map);
  linkConfig.getIntegerInput("l1.I3").setValue(1);
  linkConfig.getBooleanInput("l1.I4").setValue(true);
  linkConfig.getEnumInput("l1.I5").setValue("YES");

  // Create the link in repository
  handler.createLink(link, getDerbyDatabaseConnection());
  assertNotSame(link.getPersistenceId(), MPersistableEntity.PERSISTANCE_ID_DEFAULT);

  // Retrieve created link
  MLink retrieved = handler.findLink(link.getPersistenceId(), getDerbyDatabaseConnection());
  linkConfig = retrieved.getConnectorLinkConfig();
  assertEquals("A", linkConfig.getStringInput("l1.I1").getValue());
  assertEquals(map, linkConfig.getMapInput("l1.I2").getValue());
  assertEquals(1, (int) linkConfig.getIntegerInput("l1.I3").getValue());
  assertEquals(true, (boolean) linkConfig.getBooleanInput("l1.I4").getValue());
  assertEquals("YES", linkConfig.getEnumInput("l1.I5").getValue());
  assertEquals(linkConfig.getEnumInput("l1.I5").getEditable(), InputEditable.ANY);
  assertEquals(linkConfig.getEnumInput("l1.I5").getOverrides(), "l1.I4,l1.I3");

}
 
开发者ID:vybs,项目名称:sqoop-on-spark,代码行数:53,代码来源:TestInputTypes.java

示例15: loadLinks

import org.apache.sqoop.model.MLinkConfig; //导入依赖的package包/类
private List<MLink> loadLinks(PreparedStatement stmt,
                              Connection conn)
    throws SQLException {
  List<MLink> links = new ArrayList<MLink>();
  ResultSet rsConnection = null;
  PreparedStatement connectorConfigFetchStatement = null;
  PreparedStatement connectorConfigInputStatement = null;

  try {
    rsConnection = stmt.executeQuery();

    connectorConfigFetchStatement = conn.prepareStatement(crudQueries.getStmtSelectConfigForConfigurable());
    connectorConfigInputStatement = conn.prepareStatement(crudQueries.getStmtFetchLinkInput());

    while(rsConnection.next()) {
      long id = rsConnection.getLong(1);
      String name = rsConnection.getString(2);
      long connectorId = rsConnection.getLong(3);
      boolean enabled = rsConnection.getBoolean(4);
      String creationUser = rsConnection.getString(5);
      Date creationDate = rsConnection.getTimestamp(6);
      String updateUser = rsConnection.getString(7);
      Date lastUpdateDate = rsConnection.getTimestamp(8);

      connectorConfigFetchStatement.setLong(1, connectorId);
      connectorConfigInputStatement.setLong(1, id);
      connectorConfigInputStatement.setLong(3, id);

      List<MConfig> connectorLinkConfig = new ArrayList<MConfig>();
      List<MConfig> fromConfig = new ArrayList<MConfig>();
      List<MConfig> toConfig = new ArrayList<MConfig>();

      loadConnectorConfigs(connectorLinkConfig, fromConfig, toConfig, connectorConfigFetchStatement,
          connectorConfigInputStatement, 2, conn);
      MLink link = new MLink(connectorId, new MLinkConfig(connectorLinkConfig));

      link.setPersistenceId(id);
      link.setName(name);
      link.setCreationUser(creationUser);
      link.setCreationDate(creationDate);
      link.setLastUpdateUser(updateUser);
      link.setLastUpdateDate(lastUpdateDate);
      link.setEnabled(enabled);

      links.add(link);
    }
  } finally {
    closeResultSets(rsConnection);
    closeStatements(connectorConfigFetchStatement, connectorConfigInputStatement);
  }

  return links;
}
 
开发者ID:vybs,项目名称:sqoop-on-spark,代码行数:54,代码来源:CommonRepositoryHandler.java


注:本文中的org.apache.sqoop.model.MLinkConfig类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。