本文整理汇总了Java中com.datastax.driver.core.ColumnDefinitions类的典型用法代码示例。如果您正苦于以下问题:Java ColumnDefinitions类的具体用法?Java ColumnDefinitions怎么用?Java ColumnDefinitions使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
ColumnDefinitions类属于com.datastax.driver.core包,在下文中一共展示了ColumnDefinitions类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: queryColumns
import com.datastax.driver.core.ColumnDefinitions; //导入依赖的package包/类
/**
* 描述: 查询数据表字段名(key:字段名,value:字段类型名)
* 时间: 2017年11月15日 上午11:29:32
* @author yi.zhang
* @param table 表名
* @return
*/
public Map<String,String> queryColumns(String table){
try {
String sql = "select * from "+table;
ResultSet rs = session.execute(sql);
ColumnDefinitions rscd = rs.getColumnDefinitions();
int count = rscd.size();
Map<String,String> reflect = new HashMap<String,String>();
for (int i = 0; i < count; i++) {
String column = rscd.getName(i);
String type = rscd.getType(i).getName().name().toLowerCase();
reflect.put(column, type);
}
return reflect;
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return null;
}
示例2: getValue
import com.datastax.driver.core.ColumnDefinitions; //导入依赖的package包/类
private Object getValue(Row musicRow, String colname) {
ColumnDefinitions cdef = musicRow.getColumnDefinitions();
String type = cdef.getType(colname).getName().toString().toUpperCase();
switch (type) {
case "BIGINT":
return musicRow.getLong(colname);
case "BOOLEAN":
return musicRow.getBool(colname);
case "BLOB":
return musicRow.getBytes(colname);
case "DOUBLE":
return musicRow.getDouble(colname);
case "INT":
return musicRow.getInt(colname);
case "TIMESTAMP":
return musicRow.getTimestamp(colname);
default:
logger.error("UNEXPECTED COLUMN TYPE: columname="+colname+", columntype="+type);
// fall thru
case "VARCHAR":
return musicRow.getString(colname);
}
}
示例3: SchemaStatement
import com.datastax.driver.core.ColumnDefinitions; //导入依赖的package包/类
public SchemaStatement(Timer timer, StressSettings settings, DataSpec spec,
PreparedStatement statement, Integer thriftId, ConsistencyLevel cl, ValidationType validationType)
{
super(timer, settings, spec);
this.statement = statement;
this.thriftId = thriftId;
this.cl = cl;
this.validationType = validationType;
argumentIndex = new int[statement.getVariables().size()];
bindBuffer = new Object[argumentIndex.length];
int i = 0;
for (ColumnDefinitions.Definition definition : statement.getVariables())
argumentIndex[i++] = spec.partitionGenerator.indexOf(definition.getName());
statement.setConsistencyLevel(JavaDriverClient.from(cl));
}
示例4: map
import com.datastax.driver.core.ColumnDefinitions; //导入依赖的package包/类
public RawNumericMetric map(Row row) {
RawNumericMetric metricRow = new RawNumericMetric(row.getString(0), row.getString(1), row.getDate(2).getTime(), row.getInt(3));
ColumnDefinitions columeDef = row.getColumnDefinitions();
List<Definition> columeDefList = columeDef.asList();
Map<String, String> tagMap = new HashMap<String, String>();
for(Definition def: columeDefList){
if(def.getName().startsWith("tag_")){
tagMap.put(def.getName(), row.getString(def.getName()));
}
}
if(tagMap.size()>0){
metricRow.setTagMap(tagMap);
}
return metricRow;
}
示例5: testDirect
import com.datastax.driver.core.ColumnDefinitions; //导入依赖的package包/类
@Test public void testDirect() throws TranslatorException {
CassandraExecutionFactory cef = new CassandraExecutionFactory();
cef.setSupportsDirectQueryProcedure(true);
String input = "call native('select $1', 'a')";
TranslationUtility util = FakeTranslationFactory.getInstance().getExampleTranslationUtility();
Command command = util.parseCommand(input);
ExecutionContext ec = Mockito.mock(ExecutionContext.class);
RuntimeMetadata rm = Mockito.mock(RuntimeMetadata.class);
CassandraConnection connection = Mockito.mock(CassandraConnection.class);
ResultSet rs = Mockito.mock(ResultSet.class);
Row row = Mockito.mock(Row.class);
ColumnDefinitions cd = Mockito.mock(ColumnDefinitions.class);
Mockito.stub(row.getColumnDefinitions()).toReturn(cd);
Mockito.stub(rs.one()).toReturn(row).toReturn(null);
Mockito.stub(connection.executeQuery("select 'a'")).toReturn(rs);
ResultSetExecution execution = (ResultSetExecution)cef.createExecution(command, ec, rm, connection);
execution.execute();
List<?> vals = execution.next();
assertTrue(vals.get(0) instanceof Object[]);
}
示例6: SchemaStatement
import com.datastax.driver.core.ColumnDefinitions; //导入依赖的package包/类
public SchemaStatement(Timer timer, StressSettings settings, DataSpec spec,
PreparedStatement statement, Integer thriftId, ConsistencyLevel cl)
{
super(timer, settings, spec);
this.statement = statement;
this.thriftId = thriftId;
this.cl = cl;
argumentIndex = new int[statement.getVariables().size()];
bindBuffer = new Object[argumentIndex.length];
definitions = statement.getVariables();
int i = 0;
for (ColumnDefinitions.Definition definition : definitions)
argumentIndex[i++] = spec.partitionGenerator.indexOf(definition.getName());
statement.setConsistencyLevel(JavaDriverClient.from(cl));
}
示例7: get
import com.datastax.driver.core.ColumnDefinitions; //导入依赖的package包/类
/**
* {@inheritDoc}
*
* @param key
* @param fields
* @return
*/
@Override
public Persistent get(Object key, String[] fields) {
if (fields == null) {
fields = getFields();
}
ArrayList<String> cassandraKeys = new ArrayList<>();
ArrayList<Object> cassandraValues = new ArrayList<>();
AvroCassandraUtils.processKeys(mapping, key, cassandraKeys, cassandraValues);
String cqlQuery = CassandraQueryFactory.getSelectObjectWithFieldsQuery(mapping, fields, cassandraKeys);
SimpleStatement statement = new SimpleStatement(cqlQuery, cassandraValues.toArray());
if (readConsistencyLevel != null) {
statement.setConsistencyLevel(ConsistencyLevel.valueOf(readConsistencyLevel));
}
ResultSet resultSet = this.client.getSession().execute(statement);
Iterator<Row> iterator = resultSet.iterator();
ColumnDefinitions definitions = resultSet.getColumnDefinitions();
T obj = null;
if (iterator.hasNext()) {
obj = cassandraDataStore.newPersistent();
AbstractGettableData row = (AbstractGettableData) iterator.next();
populateValuesToPersistent(row, definitions, obj, fields);
}
return obj;
}
示例8: populateValuesToPersistent
import com.datastax.driver.core.ColumnDefinitions; //导入依赖的package包/类
/**
* This method wraps result set data in to DataEntry and creates a list of DataEntry.
**/
private void populateValuesToPersistent(AbstractGettableData row, ColumnDefinitions columnDefinitions, PersistentBase base, String[] fields) {
Object paramValue;
for (String fieldName : fields) {
Schema.Field avroField = base.getSchema().getField(fieldName);
Field field = mapping.getFieldFromFieldName(fieldName);
//to ignore unspecified fields in the mapping
if (field == null || avroField == null) {
continue;
}
Schema fieldSchema = avroField.schema();
String columnName = field.getColumnName();
paramValue = getValue(row, columnDefinitions.getType(columnName), columnName, fieldSchema);
Object value = AvroCassandraUtils.getAvroFieldValue(paramValue, fieldSchema);
base.put(avroField.pos(), value);
}
}
示例9: runPostQuery
import com.datastax.driver.core.ColumnDefinitions; //导入依赖的package包/类
@Override
public void runPostQuery(Object result, XMLStreamWriter xmlWriter, InternalParamCollection params, int queryLevel)
throws DataServiceFault {
ResultSet rs = (ResultSet) result;
if (this.hasResult()) {
Iterator<Row> itr = rs.iterator();
Row row;
DataEntry dataEntry;
ColumnDefinitions defs = rs.getColumnDefinitions();
while (itr.hasNext()) {
row = itr.next();
dataEntry = this.getDataEntryFromRow(row, defs);
this.writeResultEntry(xmlWriter, dataEntry, params, queryLevel);
}
}
}
示例10: index
import com.datastax.driver.core.ColumnDefinitions; //导入依赖的package包/类
@Override
void index(Map<Row, ColumnDefinitions> rows) throws Exception {
IndexDocument indexDocument = new IndexDocument(lang);
FunctionUtils.forEachEx(rows,
(row, columnDefinitions) -> fieldMap.mapRow(fieldMapContext, row, columnDefinitions, indexDocument,
filePathSet));
if (uniqueField != null && !indexDocument.hasContent(uniqueField)) {
rwl.w.lock();
try {
ignoredDocumentCount++;
} finally {
rwl.w.unlock();
}
return;
}
indexDocumentList.add(indexDocument);
rwl.w.lock();
try {
pendingIndexDocumentCount++;
} finally {
rwl.w.unlock();
}
if (DatabaseCrawlCassandraThread.this.index(indexDocumentList, bufferSize))
setStatus(CrawlStatus.CRAWL);
}
示例11: execute
import com.datastax.driver.core.ColumnDefinitions; //导入依赖的package包/类
private void execute(final Object joinColumnValue, final ComplexQuery complexQuery,
final LinkedHashMap<Row, ColumnDefinitions> rowStack) throws Exception {
final ResultSet resultSet = joinColumnValue == null || StringUtils.isBlank(joinColumnValue.toString()) ?
session.executeWithFetchSize(complexQuery.cql, bufferSize) :
session.executeWithFetchSize(complexQuery.cql, bufferSize, joinColumnValue);
if (resultSet == null)
return;
final ColumnDefinitions columnDefinitions = resultSet.getColumnDefinitions();
for (final Row row : resultSet) {
rowStack.put(row, columnDefinitions);
if (complexQuery.join != null) {
FunctionUtils.forEachEx(complexQuery.join, (column, queries) -> {
final Object columnValue = row.getObject(column);
if (queries != null)
for (ComplexQuery query : queries)
execute(columnValue, query, rowStack);
});
}
if (complexQuery.index != null && complexQuery.index)
index(rowStack);
rowStack.remove(row);
if (abort())
break;
}
}
示例12: handleBlob
import com.datastax.driver.core.ColumnDefinitions; //导入依赖的package包/类
private void handleBlob(FieldMapContext context, Row row, ColumnDefinitions columns, IndexDocument target,
Set<String> filePathSet, String columnName, CommonFieldTarget targetField)
throws IOException, SearchLibException, InterruptedException, ParseException, SyntaxError,
InstantiationException, URISyntaxException, IllegalAccessException, ClassNotFoundException {
final String filePath = columns.contains(targetField.getFilePathPrefix()) ?
row.getString(targetField.getFilePathPrefix()) :
null;
if (StringUtils.isBlank(filePath))
return;
final String fileName = FilenameUtils.getName(filePath);
Path binaryPath = null;
try {
binaryPath = Files.createTempFile("oss", fileName);
File binaryFile = binaryPath.toFile();
if (!doBlob(row, binaryFile, columnName))
return;
mapFieldTarget(context, targetField, true, binaryPath.toString(), target, filePathSet);
} finally {
if (binaryPath != null)
Files.deleteIfExists(binaryPath);
}
}
示例13: convertResultSet
import com.datastax.driver.core.ColumnDefinitions; //导入依赖的package包/类
private String convertResultSet(ResultSet rs) {
// TODO Auto-generated method stub
String colStr = "";
String rowStr = "";
JsonObject response = new JsonObject();
List<Row> rows = rs.all();
if (!rows.isEmpty() && rows.size() == 1) {
rowStr = rows.get(0).toString();
}
ColumnDefinitions colDefs = rs.getColumnDefinitions();
colStr = colDefs.toString();
response.putString("columns", colStr.substring(8, colStr.length() - 1));
response.putString("values", rowStr.substring(4, rowStr.length() - 1));
return response.toString();
}
示例14: createOutputRowMeta
import com.datastax.driver.core.ColumnDefinitions; //导入依赖的package包/类
void createOutputRowMeta(RowMetaInterface row, ResultSet rs) {
row.clear();
for (ColumnDefinitions.Definition d : rs.getColumnDefinitions()) {
logDebug(d.getName() + ',' + d.getType().getName() + ',' + d.getType().asFunctionParameterString());
ValueMetaBase valueMeta = new ValueMetaBase(d.getName(), Utils.convertDataType(d.getType()));
valueMeta.setTrimType(0);
row.addValueMeta(valueMeta);
}
}
示例15: syncQuorum
import com.datastax.driver.core.ColumnDefinitions; //导入依赖的package包/类
private static void syncQuorum(String key){
logger.info("Performing sync operation---");
String[] splitString = key.split("\\.");
String keyspaceName = splitString[0];
String tableName = splitString[1];
String primaryKeyValue = splitString[2];
//get the primary key d
TableMetadata tableInfo = returnColumnMetadata(keyspaceName, tableName);
String primaryKeyName = tableInfo.getPrimaryKey().get(0).getName();//we only support single primary key
DataType primaryKeyType = tableInfo.getPrimaryKey().get(0).getType();
String cqlFormattedPrimaryKeyValue = convertToCQLDataType(primaryKeyType, primaryKeyValue);
//get the row of data from a quorum
String selectQuery = "SELECT * FROM "+keyspaceName+"."+tableName+ " WHERE "+primaryKeyName+"="+cqlFormattedPrimaryKeyValue+";";
ResultSet results = getDSHandle().executeCriticalGet(selectQuery);
//write it back to a quorum
Row row = results.one();
ColumnDefinitions colInfo = row.getColumnDefinitions();
int totalColumns = colInfo.size();
int counter =1;
String fieldValueString="";
for (Definition definition : colInfo){
String colName = definition.getName();
if(colName.equals(primaryKeyName))
continue;
DataType colType = definition.getType();
Object valueObj = getDSHandle().getColValue(row, colName, colType);
String valueString = convertToCQLDataType(colType,valueObj);
fieldValueString = fieldValueString+ colName+"="+valueString;
if(counter!=(totalColumns-1))
fieldValueString = fieldValueString+",";
counter = counter +1;
}
String updateQuery = "UPDATE "+keyspaceName+"."+tableName+" SET "+fieldValueString+" WHERE "+primaryKeyName+"="+cqlFormattedPrimaryKeyValue+";";
getDSHandle().executePut(updateQuery, "critical");
}