本文整理汇总了Java中com.sforce.async.BatchStateEnum.NotProcessed方法的典型用法代码示例。如果您正苦于以下问题:Java BatchStateEnum.NotProcessed方法的具体用法?Java BatchStateEnum.NotProcessed怎么用?Java BatchStateEnum.NotProcessed使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类com.sforce.async.BatchStateEnum
的用法示例。
在下文中一共展示了BatchStateEnum.NotProcessed方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: waitBatch
import com.sforce.async.BatchStateEnum; //导入方法依赖的package包/类
private BatchInfo waitBatch(BatchInfo batchInfo)
throws InterruptedException, AsyncApiException {
while(true) {
Thread.sleep(pollingIntervalMillisecond);
batchInfo = bulkConnection.getBatchInfo(
batchInfo.getJobId(),
batchInfo.getId());
BatchStateEnum state = batchInfo.getState();
if (state == BatchStateEnum.Completed ||
state == BatchStateEnum.Failed ||
state == BatchStateEnum.NotProcessed) {
return batchInfo;
}
}
}
示例2: awaitCompletion
import com.sforce.async.BatchStateEnum; //导入方法依赖的package包/类
/**
* Wait for a job to complete by polling the Bulk API.
*
* @param connection
* BulkConnection used to check results.
* @param job
* The job awaiting completion.
* @param batchInfoList
* List of batches for this job.
* @throws AsyncApiException
*/
public void awaitCompletion(BulkConnection connection, JobInfo job, List<BatchInfo> batchInfoList)
throws AsyncApiException {
long sleepTime = 0L;
Set<String> incompleteBatchInfos = new HashSet<String>();
for (BatchInfo bi : batchInfoList) {
incompleteBatchInfos.add(bi.getId());
}
while (!incompleteBatchInfos.isEmpty()) {
try {
Thread.sleep(sleepTime);
} catch (InterruptedException e) {
ApexUnitUtils
.shutDownWithDebugLog(e, "InterruptedException encountered while the thread was attempting to sleep");
}
LOG.debug("Awaiting results... Batches remaining for processing: " + incompleteBatchInfos.size());
sleepTime = 10000L;
BatchInfo[] statusList = connection.getBatchInfoList(job.getId()).getBatchInfo();
for (BatchInfo batchInfo : statusList) {
// Retain the BatchInfo's which are in InProgress and Queued
// status,
// Remove the rest from the incompleteBatchInfos
if (batchInfo.getState() == BatchStateEnum.Completed) {
if (incompleteBatchInfos.remove(batchInfo.getId())) {
LOG.debug("BATCH STATUS:" + batchInfo.getStateMessage());
}
} else if (batchInfo.getState() == BatchStateEnum.NotProcessed) {
LOG.info("Batch " + batchInfo.getId() + " did not process, terminating it");
incompleteBatchInfos.remove(batchInfo.getId());
} else if (batchInfo.getState() == BatchStateEnum.Failed) {
ApexUnitUtils.shutDownWithErrMsg("BATCH STATUS:" + batchInfo.getStateMessage());
}
}
}
}
示例3: doBulkQuery
import com.sforce.async.BatchStateEnum; //导入方法依赖的package包/类
/**
* Creates and executes job for bulk query. Job must be finished in 2 minutes on Salesforce side.<br/>
* From Salesforce documentation two scenarios are possible here:
* <ul>
* <li>simple bulk query. It should have status - {@link BatchStateEnum#Completed}.</li>
* <li>primary key chunking bulk query. It should return first batch info with status - {@link BatchStateEnum#NotProcessed}.<br/>
* Other batch info's should have status - {@link BatchStateEnum#Completed}</li>
* </ul>
*
* @param moduleName - input module name.
* @param queryStatement - to be executed.
* @throws AsyncApiException
* @throws InterruptedException
* @throws ConnectionException
*/
public void doBulkQuery(String moduleName, String queryStatement)
throws AsyncApiException, InterruptedException, ConnectionException {
job = new JobInfo();
job.setObject(moduleName);
job.setOperation(OperationEnum.query);
if (concurrencyMode != null) {
job.setConcurrencyMode(concurrencyMode);
}
job.setContentType(ContentType.CSV);
job = createJob(job);
if (job.getId() == null) { // job creation failed
throw new ComponentException(new DefaultErrorCode(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, "failedBatch"),
ExceptionContext.build().put("failedBatch", job));
}
ByteArrayInputStream bout = new ByteArrayInputStream(queryStatement.getBytes());
BatchInfo info = createBatchFromStream(job, bout);
int secToWait = 1;
int tryCount = 0;
while (true) {
LOGGER.debug("Awaiting " + secToWait + " seconds for results ...\n" + info);
Thread.sleep(secToWait * 1000);
info = getBatchInfo(job.getId(), info.getId());
if (info.getState() == BatchStateEnum.Completed
|| (BatchStateEnum.NotProcessed == info.getState() && 0 < chunkSize)) {
break;
} else if (info.getState() == BatchStateEnum.Failed) {
throw new ComponentException(new DefaultErrorCode(HttpServletResponse.SC_BAD_REQUEST, "failedBatch"),
ExceptionContext.build().put("failedBatch", info));
}
tryCount++;
if (tryCount % 3 == 0) {// after 3 attempt to get the result we multiply the time to wait by 2
secToWait = secToWait * 2;
}
// There is also a 2-minute limit on the time to process the query.
// If the query takes more than 2 minutes to process, a QUERY_TIMEOUT error is returned.
// https://developer.salesforce.com/docs/atlas.en-us.api_asynch.meta/api_asynch/asynch_api_concepts_limits.htm
int processingTime = (int) ((System.currentTimeMillis() - job.getCreatedDate().getTimeInMillis()) / 1000);
if (processingTime > 120) {
throw new ComponentException(new DefaultErrorCode(HttpServletResponse.SC_REQUEST_TIMEOUT, "failedBatch"),
ExceptionContext.build().put("failedBatch", info));
}
}
retrieveResultsOfQuery(info);
}