本文整理汇总了Java中com.datastax.driver.mapping.Mapper类的典型用法代码示例。如果您正苦于以下问题:Java Mapper类的具体用法?Java Mapper怎么用?Java Mapper使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
Mapper类属于com.datastax.driver.mapping包,在下文中一共展示了Mapper类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: open
import com.datastax.driver.mapping.Mapper; //导入依赖的package包/类
@Override
public void open(Configuration configuration) {
super.open(configuration);
try {
this.mappingManager = new MappingManager(session);
this.mapper = mappingManager.mapper(clazz);
if (options != null) {
Mapper.Option[] optionsArray = options.getMapperOptions();
if (optionsArray != null) {
this.mapper.setDefaultSaveOptions(optionsArray);
}
}
} catch (Exception e) {
throw new RuntimeException("Cannot create CassandraPojoSink with input: " + clazz.getSimpleName(), e);
}
}
示例2: main
import com.datastax.driver.mapping.Mapper; //导入依赖的package包/类
public static void main(String[] args) throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
DataStreamSource<Message> source = env.fromCollection(messages);
CassandraSink.addSink(source)
.setClusterBuilder(new ClusterBuilder() {
@Override
protected Cluster buildCluster(Builder builder) {
return builder.addContactPoint("127.0.0.1").build();
}
})
.setMapperOptions(() -> new Mapper.Option[]{Mapper.Option.saveNullFields(true)})
.build();
env.execute("Cassandra Sink example");
}
示例3: NativeSerializer
import com.datastax.driver.mapping.Mapper; //导入依赖的package包/类
NativeSerializer(CassandraClient cassandraClient, Class<K> keyClass, Class<T> persistentClass, CassandraMapping mapping) {
super(cassandraClient, keyClass, persistentClass, mapping);
try {
analyzePersistent();
} catch (Exception e) {
throw new RuntimeException("Error occurred while analyzing the persistent class, :" + e.getMessage());
}
this.createSchema();
MappingManager mappingManager = new MappingManager(cassandraClient.getSession());
mapper = mappingManager.mapper(persistentClass);
if (cassandraClient.getWriteConsistencyLevel() != null) {
mapper.setDefaultDeleteOptions(Mapper.Option.consistencyLevel(ConsistencyLevel.valueOf(cassandraClient.getWriteConsistencyLevel())));
mapper.setDefaultSaveOptions(Mapper.Option.consistencyLevel(ConsistencyLevel.valueOf(cassandraClient.getWriteConsistencyLevel())));
}
if (cassandraClient.getReadConsistencyLevel() != null) {
mapper.setDefaultGetOptions(Mapper.Option.consistencyLevel(ConsistencyLevel.valueOf(cassandraClient.getReadConsistencyLevel())));
}
}
示例4: ClusteredLoader
import com.datastax.driver.mapping.Mapper; //导入依赖的package包/类
public ClusteredLoader(Mapper<Data> mapper, Class<Data> dataClass, Class<CKey> ckeyClass, String tableName) {
MappingManager manager = mapper.getManager();
session = manager.getSession();
this.mapper = manager.mapper(dataClass);
String keyspace = mapper.getTableMetadata().getKeyspace().getName();
MaterializedViewMetadata mv = mapper.getTableMetadata().getKeyspace().getMaterializedView(tableName);
AbstractTableMetadata tableMetadata = mv == null ? mapper.getTableMetadata().getKeyspace().getTable(tableName) : mv;
if (tableMetadata == null) {
throw new IllegalArgumentException("No table or materialized view " + keyspace + "." + tableName + "found");
}
List<ColumnMetadata> primaryKey = tableMetadata.getPrimaryKey();
String pkEq = exceptLast(primaryKey).stream()
.map(c -> c.getName() + "=?")
.collect(Collectors.joining(" and "));
List<ColumnMetadata> clusteringColumns = tableMetadata.getClusteringColumns();
String orderByDesc = orderBy(clusteringColumns, "DESC");
String orderByAsc = orderBy(clusteringColumns, "ASC");
String indexColumn = clusteringColumns.get(clusteringColumns.size() - 1).getName();
indexAccessor = CassandraUtil.findProperty(dataClass, ckeyClass, indexColumn);
selectUnbounded = prepare(String.format("select * from %s.%s where " + pkEq + " order by %s limit ?", keyspace, tableName, orderByDesc));
selectBefore = prepare(String.format("select * from %s.%s where "+pkEq+" and %s < ? order by %s limit ?", keyspace, tableName, indexColumn, orderByDesc));
selectAfter = prepare(String.format("select * from %s.%s where "+pkEq+" and %s > ? order by %s limit ?", keyspace, tableName, indexColumn, orderByDesc));
selectBeforeAfter = prepare(String.format("select * from %s.%s where "+pkEq+" and %s < ? and %s > ? order by %s limit ?", keyspace, tableName, indexColumn, indexColumn, orderByDesc));
selectUnboundedAsc = prepare(String.format("select * from %s.%s where "+pkEq+" order by %s limit ?", keyspace, tableName, orderByAsc));
selectBeforeAsc = prepare(String.format("select * from %s.%s where "+pkEq+" and %s < ? order by %s limit ?", keyspace, tableName, indexColumn, orderByAsc));
selectAfterAsc = prepare(String.format("select * from %s.%s where "+pkEq+" and %s > ? order by %s limit ?", keyspace, tableName, indexColumn, orderByAsc));
selectBeforeAfterAsc = prepare(String.format("select * from %s.%s where "+pkEq+" and %s < ? and %s > ? order by %s limit ?", keyspace, tableName, indexColumn, indexColumn, orderByAsc));
selectByIdKey = prepare(String.format("select * from %s.%s where "+pkEq+" and %s=?", keyspace, tableName, indexColumn));
deleteByIdKey = prepare(String.format("delete from %s.%s where "+pkEq+" and %s=?", keyspace, tableName, indexColumn));
selectAllById = prepare(String.format("select * from %s.%s where " + pkEq, keyspace, tableName));
deleteAllById = prepare(String.format("delete from %s.%s where "+pkEq, keyspace, tableName));
}
示例5: save
import com.datastax.driver.mapping.Mapper; //导入依赖的package包/类
/**
* 描述: 保存数据
* 时间: 2017年11月15日 上午11:26:42
* @author yi.zhang
* @param obj 对象
* @return 返回值
*/
public int save(Object obj) {
try {
if(session!=null){
init(servers, keyspace, username, password);
}
Mapper mapper = mapping.mapper(obj.getClass());
mapper.save(obj, Option.saveNullFields(true));
return 1;
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return -1;
}
示例6: update
import com.datastax.driver.mapping.Mapper; //导入依赖的package包/类
/**
* 描述: 更新数据
* 时间: 2017年11月15日 上午11:26:42
* @author yi.zhang
* @param obj 对象
* @return 返回值
*/
public int update(Object obj) {
try {
if(session!=null){
init(servers, keyspace, username, password);
}
Mapper mapper = mapping.mapper(obj.getClass());
mapper.save(obj, Option.saveNullFields(false),Option.ttl(EXPIRE_TIME));
return 1;
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return -1;
}
示例7: delete
import com.datastax.driver.mapping.Mapper; //导入依赖的package包/类
/**
* 描述: 删除数据
* 时间: 2017年11月15日 上午11:26:42
* @author yi.zhang
* @param obj 对象
* @return 返回值
*/
public int delete(Object obj) {
try {
if(session!=null){
init(servers, keyspace, username, password);
}
Mapper mapper = mapping.mapper(obj.getClass());
mapper.delete(obj);
return 1;
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return -1;
}
示例8: postProcessBeanFactory
import com.datastax.driver.mapping.Mapper; //导入依赖的package包/类
@SuppressWarnings("unused") // compiler bug?
@Override
public void postProcessBeanFactory(ConfigurableListableBeanFactory context) throws BeansException {
synchronized (lock) {
ClassPathScanningCandidateComponentProvider scanner = new ClassPathScanningCandidateComponentProvider(
false);
scanner.addIncludeFilter(new AnnotationTypeFilter(Table.class));
for (BeanDefinition bd : scanner.findCandidateComponents(basePackage)) {
Class<?> entityCls;
try {
entityCls = Class.forName(bd.getBeanClassName());
} catch (ClassNotFoundException e) {
throw new AssertionError(e);
}
log.info("Creating proxy mapper for entity: " + entityCls.getName());
CassandraMapper annotation = entityCls.getAnnotation(CassandraMapper.class);
Mapper<?> bean = createProxy(Mapper.class, new MyInterceptor(entityCls, annotation.singleton()));
String beanName;
if (annotation == null)
beanName = StringUtils.uncapitalize(entityCls.getSimpleName()) + "Mapper";
else
beanName = annotation.value();
context.registerSingleton(beanName, bean);
log.info("Bean registed, name=" + beanName + ", bean=" + bean.toString());
}
}
}
示例9: getTarget
import com.datastax.driver.mapping.Mapper; //导入依赖的package包/类
private Mapper<?> getTarget() {
if (singleton)
try {
return singletonTarget.get();
} catch (ConcurrentException e) {
throw Exceptions.propagate(e);
}
else
return targetInitializer.get();
}
示例10: start
import com.datastax.driver.mapping.Mapper; //导入依赖的package包/类
@Override
public boolean start() throws IOException {
LOG.debug("Starting Cassandra reader");
cluster = getCluster(source.spec.hosts(), source.spec.port(), source.spec.username(),
source.spec.password(), source.spec.localDc(), source.spec.consistencyLevel());
session = cluster.connect();
LOG.debug("Query: " + source.splitQuery);
resultSet = session.execute(source.splitQuery);
final MappingManager mappingManager = new MappingManager(session);
Mapper mapper = mappingManager.mapper(source.spec.entity());
iterator = mapper.map(resultSet).iterator();
return advance();
}
示例11: initializeTestData
import com.datastax.driver.mapping.Mapper; //导入依赖的package包/类
public static void initializeTestData(Date date)
{
try (Cluster cluster = getCluster()) {
try (Session session = cluster.connect()) {
createOrReplaceKeyspace(session, KEYSPACE_NAME);
}
try (Session session = cluster.connect(KEYSPACE_NAME)) {
session.execute("DROP TABLE IF EXISTS " + TABLE_NAME);
session.execute("CREATE TABLE " + TABLE_NAME + " (" +
" key text PRIMARY KEY, " +
" typeuuid uuid, " +
" typeinteger int, " +
" typelong bigint, " +
" typebytes blob, " +
" typetimestamp timestamp " +
")");
Mapper<TableRow> mapper = new MappingManager(session).mapper(TableRow.class);
for (Integer rowNumber = 1; rowNumber < 10; rowNumber++) {
TableRow tableRow = new TableRow(
"key " + rowNumber.toString(),
UUID.fromString(String.format("00000000-0000-0000-0000-%012d", rowNumber)),
rowNumber,
rowNumber.longValue() + 1000,
ByteBuffer.wrap(Ints.toByteArray(rowNumber)).asReadOnlyBuffer(),
date
);
mapper.save(tableRow);
assertEquals(mapper.get(tableRow.getKey()).toString(), tableRow.toString());
}
assertEquals(session.execute("SELECT COUNT(*) FROM presto_test").all().get(0).getLong(0), 9);
}
}
}
示例12: queryAsync
import com.datastax.driver.mapping.Mapper; //导入依赖的package包/类
/**
* Execute a query and map result to entityClass.
*
* @param entityClass Entity class.
* @param statement Statement to execute.
* @return A listenable future holding the result.
*/
public <T> ListenableFuture<Result<T>> queryAsync(final Class<T> entityClass,
final Statement statement) {
Mapper<T> mapper = mapper(entityClass);
Session session = mapper.getManager().getSession();
ResultSetFuture rs = session.executeAsync(statement);
return Futures.transformAsync(rs, rs1 -> Futures.immediateFuture(mapper.map(rs1)));
}
示例13: newDatastore
import com.datastax.driver.mapping.Mapper; //导入依赖的package包/类
@Test
public void newDatastore() throws Exception {
new MockUnit(MappingManager.class, Mapper.class)
.run(unit -> {
new Datastore(unit.get(MappingManager.class));
});
}
示例14: mapper
import com.datastax.driver.mapping.Mapper; //导入依赖的package包/类
@SuppressWarnings({"unchecked", "rawtypes" })
private Block mapper(final Class type) {
return unit -> {
MappingManager manager = unit.get(MappingManager.class);
expect(manager.mapper(type)).andReturn(unit.get(Mapper.class));
};
}
示例15: CqlContentStore
import com.datastax.driver.mapping.Mapper; //导入依赖的package包/类
protected CqlContentStore(Builder builder) {
this.idGenerator = checkNotNull(builder.idGenerator);
this.session = checkNotNull(builder.session);
this.clock = checkNotNull(builder.clock);
MappingManager mappingManager = new MappingManager(session);
// TODO: bug in driver 3.1.0 prompting this hackaround. Remove when it's fixed. MBST-16715
mappingManager.udtCodec(Description.class);
mappingManager.udtCodec(org.atlasapi.content.v2.model.udt.BroadcastRef.class);
mappingManager.udtCodec(org.atlasapi.content.v2.model.udt.LocationSummary.class);
mappingManager.udtCodec(org.atlasapi.content.v2.model.udt.LocationSummary.class);
mappingManager.udtCodec(org.atlasapi.content.v2.model.udt.ItemSummary.class);
this.mapper = mappingManager.mapper(org.atlasapi.content.v2.model.Content.class);
this.accessor = mappingManager.createAccessor(ContentAccessor.class);
mapper.setDefaultGetOptions(Mapper.Option.consistencyLevel(builder.readConsistency));
mapper.setDefaultSaveOptions(Mapper.Option.consistencyLevel(builder.writeConsistency));
mapper.setDefaultDeleteOptions(Mapper.Option.consistencyLevel(builder.writeConsistency));
this.sender = builder.sender;
this.hasher = checkNotNull(builder.hasher);
this.graphStore = checkNotNull(builder.graphStore);
writeContent = builder.metricPrefix + "writeContent.";
writeBroadcast = builder.metricPrefix + "writeBroadcast.";
this.metricRegistry = builder.metricRegistry;
}