本文整理汇总了Java中org.apache.gora.store.DataStore类的典型用法代码示例。如果您正苦于以下问题:Java DataStore类的具体用法?Java DataStore怎么用?Java DataStore使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
DataStore类属于org.apache.gora.store包,在下文中一共展示了DataStore类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: run
import org.apache.gora.store.DataStore; //导入依赖的package包/类
public int run(String[] args) throws Exception {
if (args.length != 1) {
LOG.info("Usage : {} <node to delete>", Delete.class.getSimpleName());
return 0;
}
DataStore<Long,CINode> store = DataStoreFactory.getDataStore(Long.class, CINode.class, new Configuration());
boolean ret = store.delete(new BigInteger(args[0], 16).longValue());
store.flush();
LOG.info("Delete returned {}", ret);
store.close();
return ret ? 0 : 1;
}
示例2: persist
import org.apache.gora.store.DataStore; //导入依赖的package包/类
private static void persist(Context output, DataStore<Long,CINode> store, long count, long[] prev, long[] current, Utf8 id) throws IOException {
for (int i = 0; i < current.length; i++) {
CINode node = store.newPersistent();
node.setCount(count + i);
if (prev != null)
node.setPrev(prev[i]);
else
node.setPrev((long) -1);
node.setClient(id);
store.put(current[i], node);
if (i % 1000 == 0) {
// Tickle progress every so often else maprunner will think us hung
output.progress();
}
}
store.flush();
}
示例3: createJob
import org.apache.gora.store.DataStore; //导入依赖的package包/类
/**
* Creates and returns the {@link Job} for submitting to Hadoop mapreduce.
* @param inStore
* @param outStore
* @param numReducer
* @return
* @throws IOException
*/
public Job createJob(DataStore<Long, Pageview> inStore,
DataStore<String, MetricDatum> outStore, int numReducer) throws IOException {
Job job = new Job(getConf());
job.setJobName("Log Analytics");
log.info("Creating Hadoop Job: {}", job.getJobName());
job.setNumReduceTasks(numReducer);
job.setJarByClass(getClass());
/* Mappers are initialized with GoraMapper.initMapper() or
* GoraInputFormat.setInput()*/
GoraMapper.initMapperJob(job, inStore, TextLong.class, LongWritable.class,
LogAnalyticsMapper.class, true);
/* Reducers are initialized with GoraReducer#initReducer().
* If the output is not to be persisted via Gora, any reducer
* can be used instead. */
GoraReducer.initReducerJob(job, outStore, LogAnalyticsReducer.class);
return job;
}
示例4: run
import org.apache.gora.store.DataStore; //导入依赖的package包/类
@Override
public int run(String[] args) throws Exception {
DataStore<String,WebPage> inStore;
DataStore<String, TokenDatum> outStore;
Configuration conf = new Configuration();
if(args.length > 0) {
String dataStoreClass = args[0];
inStore = DataStoreFactory.getDataStore(dataStoreClass,
String.class, WebPage.class, conf);
if(args.length > 1) {
dataStoreClass = args[1];
}
outStore = DataStoreFactory.getDataStore(dataStoreClass,
String.class, TokenDatum.class, conf);
} else {
inStore = DataStoreFactory.getDataStore(String.class, WebPage.class, conf);
outStore = DataStoreFactory.getDataStore(String.class, TokenDatum.class, conf);
}
return wordCount(inStore, outStore);
}
示例5: generateOutputConf
import org.apache.gora.store.DataStore; //导入依赖的package包/类
/**
* Sets the output parameters for the conf that Spark will use
*
* @param job the job to set the properties for
* @param dataStoreClass the datastore class
* @param keyClass output key class
* @param persistentClass output value class
*/
@SuppressWarnings("rawtypes")
public <K, V extends Persistent> Configuration generateOutputConf(Job job,
Class<? extends DataStore> dataStoreClass,
Class<K> keyClass, Class<V> persistentClass) {
job.setOutputFormatClass(GoraOutputFormat.class);
job.setOutputKeyClass(keyClass);
job.setOutputValueClass(persistentClass);
job.getConfiguration().setClass(GoraOutputFormat.DATA_STORE_CLASS, dataStoreClass,
DataStore.class);
job.getConfiguration().setClass(GoraOutputFormat.OUTPUT_KEY_CLASS, keyClass, Object.class);
job.getConfiguration().setClass(GoraOutputFormat.OUTPUT_VALUE_CLASS,
persistentClass, Persistent.class);
return job.getConfiguration();
}
示例6: getRecordWriter
import org.apache.gora.store.DataStore; //导入依赖的package包/类
@Override
@SuppressWarnings("unchecked")
public RecordWriter<K, T> getRecordWriter(TaskAttemptContext context)
throws IOException, InterruptedException {
Configuration conf = context.getConfiguration();
Class<? extends DataStore<K,T>> dataStoreClass
= (Class<? extends DataStore<K,T>>) conf.getClass(DATA_STORE_CLASS, null);
Class<K> keyClass = (Class<K>) conf.getClass(OUTPUT_KEY_CLASS, null);
Class<T> rowClass = (Class<T>) conf.getClass(OUTPUT_VALUE_CLASS, null);
final DataStore<K, T> store =
DataStoreFactory.createDataStore(dataStoreClass, keyClass, rowClass, context.getConfiguration());
setOutputPath(store, context);
return new GoraRecordWriter(store, context);
}
示例7: initMapperJob
import org.apache.gora.store.DataStore; //导入依赖的package包/类
/**
* Initializes the Mapper, and sets input parameters for the job. All of
* the records in the dataStore are used as the input. If you want to
* include a specific subset, use one of the overloaded methods which takes
* query parameter.
* @param job the job to set the properties for
* @param dataStoreClass the datastore class
* @param inKeyClass Map input key class
* @param inValueClass Map input value class
* @param outKeyClass Map output key class
* @param outValueClass Map output value class
* @param mapperClass the mapper class extending GoraMapper
* @param partitionerClass optional partitioner class
* @param reuseObjects whether to reuse objects in serialization
*/
@SuppressWarnings("rawtypes")
public static <K1, V1 extends Persistent, K2, V2> void initMapperJob(
Job job,
Class<? extends DataStore<K1,V1>> dataStoreClass,
Class<K1> inKeyClass,
Class<V1> inValueClass,
Class<K2> outKeyClass,
Class<V2> outValueClass,
Class<? extends GoraMapper> mapperClass,
Class<? extends Partitioner> partitionerClass,
boolean reuseObjects) throws IOException {
//set the input via GoraInputFormat
GoraInputFormat.setInput(job, dataStoreClass, inKeyClass, inValueClass, reuseObjects);
job.setMapperClass(mapperClass);
job.setMapOutputKeyClass(outKeyClass);
job.setMapOutputValueClass(outValueClass);
if (partitionerClass != null) {
job.setPartitionerClass(partitionerClass);
}
}
示例8: AccumuloResult
import org.apache.gora.store.DataStore; //导入依赖的package包/类
/**
* @param dataStore
* @param query
* @param scanner
*/
public AccumuloResult(DataStore<K,T> dataStore, Query<K,T> query, Scanner scanner) {
super(dataStore, query);
// TODO set batch size based on limit, and construct iterator later
iterator = new RowIterator(scanner.iterator());
}
示例9: createDataStore
import org.apache.gora.store.DataStore; //导入依赖的package包/类
/**
* Creates the DynamoDB store but returns a generic object
*/
@SuppressWarnings("unchecked")
public<K, T extends Persistent> DataStore<K,T>
createDataStore(Class<K> keyClass, Class<T> persistentClass) throws GoraException {
personStore = (DynamoDBStore<DynamoDBKey, person>) WSDataStoreFactory.createDataStore(
(Class<? extends DataStore<K,T>>)dataStoreClass, keyClass, persistentClass, auth);
dataStores.add(personStore);
return (DataStore<K, T>) personStore;
}
示例10: getDataStore
import org.apache.gora.store.DataStore; //导入依赖的package包/类
/**
* Gets or create the DynamoDB data store
* @return
*/
public DataStore<DynamoDBKey, person> getDataStore(){
try {
if(personStore != null)
return personStore;
else
return createDataStore();
} catch (IOException e) {
e.printStackTrace();
return null;
}
}
示例11: createDataStore
import org.apache.gora.store.DataStore; //导入依赖的package包/类
/**
* Method to create the data store
*/
@Override
protected DataStore<DynamoDBKey, person> createDataStore() {
log.info("Creating DynamoDB data store.");
try {
dataStore = getTestDriver().getDataStore();
dataStore.createSchema();
} catch (Exception e) {
log.error("error while creating DynamoDB data store");
e.printStackTrace();
}
return dataStore;
}
示例12: SolrResult
import org.apache.gora.store.DataStore; //导入依赖的package包/类
public SolrResult(DataStore<K, T> dataStore, Query<K, T> query,
SolrServer server, int resultsSize) throws IOException {
super(dataStore, query);
store = (SolrStore<K, T>)dataStore;
ModifiableSolrParams params = new ModifiableSolrParams();
if (query instanceof PartitionQueryImpl) {
query = ((PartitionQueryImpl<K, T>)query).getBaseQuery();
}
String q = ((SolrQuery<K, T>)query).toSolrQuery();
params.set(CommonParams.Q, q);
fields = query.getFields();
if (fields == null) {
params.set(CommonParams.FL, "*");
} else {
HashSet<String> uniqFields = new HashSet<>(Arrays.asList(fields));
String keyFld = ((SolrStore<K, T>)dataStore).getMapping().getPrimaryKey();
uniqFields.add(keyFld); // return also primary key
StringBuilder sb = new StringBuilder();
for (String f : uniqFields) {
if (sb.length() > 0) sb.append(',');
sb.append(f);
}
params.set(CommonParams.FL, sb.toString());
}
params.set(CommonParams.ROWS, resultsSize);
try {
QueryResponse rsp = server.query(params);
list = rsp.getResults();
} catch (SolrServerException e) {
throw new IOException(e);
}
}
示例13: createEmployeeDataStore
import org.apache.gora.store.DataStore; //导入依赖的package包/类
@Override
protected DataStore<String, Employee> createEmployeeDataStore()
throws IOException {
SolrStore<String, Employee> store = new SolrStore<>();
store.initialize(String.class, Employee.class, DataStoreFactory.createProps());
return store;
}
示例14: createWebPageDataStore
import org.apache.gora.store.DataStore; //导入依赖的package包/类
@Override
protected DataStore<String, WebPage> createWebPageDataStore()
throws IOException {
SolrStore<String, WebPage> store = new SolrStore<>();
store.initialize(String.class, WebPage.class, DataStoreFactory.createProps());
return store;
}
示例15: setUp
import org.apache.gora.store.DataStore; //导入依赖的package包/类
@Before
public void setUp() {
//setup mocks
mockCamelExchange = mock(Exchange.class);
mockGoraEndpoint = mock(GoraEndpoint.class);
mockGoraConfiguration = mock(GoraConfiguration.class);
mockCamelMessage = mock(Message.class);
mockDatastore = mock(DataStore.class);
//setup default conditions
when(mockCamelExchange.getIn()).thenReturn(mockCamelMessage);
when(mockCamelExchange.getPattern()).thenReturn(ExchangePattern.InOnly);
}