本文整理汇总了Java中org.apache.storm.jdbc.common.Column类的典型用法代码示例。如果您正苦于以下问题:Java Column类的具体用法?Java Column怎么用?Java Column使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
Column类属于org.apache.storm.jdbc.common包,在下文中一共展示了Column类的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: process
import org.apache.storm.jdbc.common.Column; //导入依赖的package包/类
@SuppressWarnings("rawtypes")
@Override
protected void process(Tuple tuple) {
try {
List<Column> columns = jdbcMapper.getColumns(tuple);
List<List<Column>> columnLists = new ArrayList<List<Column>>();
columnLists.add(columns);
if(!StringUtils.isBlank(tableName)) {
this.jdbcClient.insert(this.tableName, columnLists);
} else {
this.jdbcClient.executeInsertQuery(this.insertQuery, columnLists);
}
this.collector.ack(tuple);
} catch (Exception e) {
this.collector.reportError(e);
this.collector.fail(tuple);
}
}
示例2: buildBolt
import org.apache.storm.jdbc.common.Column; //导入依赖的package包/类
@Bean("wordCountToMySQLBolt")
public JdbcInsertBolt buildBolt() {
super.setId("wordCountToMySQLBolt");
Map hikariConfigMap = Maps.newHashMap();
hikariConfigMap.put("dataSourceClassName", mySQLProps.getDataSourceClassName());
hikariConfigMap.put("dataSource.url", mySQLProps.getDataSourceUrl());
hikariConfigMap.put("dataSource.user", mySQLProps.getDataSourceUser());
hikariConfigMap.put("dataSource.password", mySQLProps.getDataSourcePassword());
ConnectionProvider connectionProvider = new HikariCPConnectionProvider(hikariConfigMap);
List<Column> columnSchema = Lists.newArrayList(
new Column("targetDate", Types.DATE),
new Column("word", java.sql.Types.VARCHAR),
new Column("count", Types.BIGINT),
new Column("count_0", Types.BIGINT)
);
// JdbcMapper simpleJdbcMapper = new SimpleJdbcMapper(tableName, connectionProvider);
JdbcMapper simpleJdbcMapper = new SimpleJdbcMapper(columnSchema);
JdbcInsertBolt insertBolt = new JdbcInsertBolt(connectionProvider, simpleJdbcMapper)
.withInsertQuery(insertQuery)
.withQueryTimeoutSecs(queryTimeoutSecs);
return insertBolt;
}
示例3: getColumns
import org.apache.storm.jdbc.common.Column; //导入依赖的package包/类
@Override
public List<Column> getColumns(ITuple tuple) {
StreamlineEvent event = (StreamlineEvent) tuple.getValueByField(StreamlineEvent.STREAMLINE_EVENT);
List<Column> res = new ArrayList<>();
if (fieldsToColumns.isEmpty()) {
initFieldsToColumns();
}
for(String field: fields) {
Column<?> column = getColumn(field);
String columnName = column.getColumnName();
Integer columnSqlType = column.getSqlType();
Object value = Util.getJavaType(columnSqlType).cast(event.get(field));
res.add(new Column<>(columnName, value, columnSqlType));
}
return res;
}
示例4: initializeColumns
import org.apache.storm.jdbc.common.Column; //导入依赖的package包/类
public void initializeColumns(Map<String, String> columnsTypeMap) {
if (columnsTypeMap==null||columnsTypeMap.isEmpty()) throw new IllegalArgumentException("Columns and types map not specified");
this.schemaColumns.clear();
for (Entry<String, String> e:columnsTypeMap.entrySet()) {
String colName = e.getKey();
String colType = e.getValue();
this.schemaColumns.add(new Column(colName, parseSlqType(colType)));
}
}
示例5: initFieldsToColumns
import org.apache.storm.jdbc.common.Column; //导入依赖的package包/类
private void initFieldsToColumns() {
connectionProvider.prepare();
JdbcClient client = new JdbcClient(connectionProvider, 30);
for (Column<?> column: client.getColumnSchema(tableName)) {
fieldsToColumns.put(column.getColumnName().toUpperCase(), column);
}
LOG.info("fieldsToColumns {}", fieldsToColumns);
}
示例6: getColumn
import org.apache.storm.jdbc.common.Column; //导入依赖的package包/类
private Column<?> getColumn(String fieldName) {
Column<?> column = fieldsToColumns.get(fieldName.toUpperCase());
if (column != null) {
return column;
}
throw new IllegalArgumentException("Could not find database column: " + fieldName);
}
示例7: getColumns
import org.apache.storm.jdbc.common.Column; //导入依赖的package包/类
@SuppressWarnings("unchecked")
@Override
public List<Column> getColumns(ITuple input) {
try {
String jsonString = input.getString(0);
JsonObject jsonObject = parser.parse(jsonString).getAsJsonObject();
List<Column> columns = new ArrayList<Column>();
for(Column column : schemaColumns) {
String columnName = column.getColumnName();
JsonElement elem = jsonObject.get(columnName);
if (elem==null) continue;
Integer columnSqlType = column.getSqlType();
switch (columnSqlType) {
case Types.VARCHAR:
String strValue = elem.getAsString();
columns.add(new Column(columnName, strValue, columnSqlType));
break;
case Types.FLOAT:
Float floatValue = elem.getAsFloat();
columns.add(new Column(columnName, floatValue, columnSqlType));
break;
case Types.DOUBLE:
Double doubleValue = elem.getAsDouble();
columns.add(new Column(columnName, doubleValue, columnSqlType));
break;
case Types.DATE:
Date dateValue = new Date(FMT_DATE.parseDateTime(elem.getAsString()).toDate().getTime());
columns.add(new Column(columnName, dateValue, columnSqlType));
break;
case Types.TIMESTAMP:
Timestamp timeValue = new Timestamp(elem.getAsLong());
columns.add(new Column(columnName, timeValue, columnSqlType));
break;
case Types.SMALLINT:
Short shortValue = elem.getAsShort();
columns.add(new Column(columnName, shortValue, columnSqlType));
break;
case Types.INTEGER:
Integer intValue = elem.getAsInt();
columns.add(new Column(columnName, intValue, columnSqlType));
break;
case Types.BIGINT:
Long longValue = elem.getAsLong();
columns.add(new Column(columnName, longValue, columnSqlType));
break;
default:
throw new RuntimeException("Unsupported java type in tuple " + Util.getJavaType(columnSqlType));
}
}
return columns;
} catch(Exception err) {
throw new RuntimeException("JSON parsing error", err);
}
}