本文整理汇总了Java中com.sforce.async.BatchStateEnum.Completed方法的典型用法代码示例。如果您正苦于以下问题:Java BatchStateEnum.Completed方法的具体用法?Java BatchStateEnum.Completed怎么用?Java BatchStateEnum.Completed使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类com.sforce.async.BatchStateEnum
的用法示例。
在下文中一共展示了BatchStateEnum.Completed方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: awaitCompletion
import com.sforce.async.BatchStateEnum; //导入方法依赖的package包/类
private void awaitCompletion(JobBatches jb)
throws AsyncApiException {
long sleepTime = 0L;
Set<String> incomplete = new HashSet<>();
for (BatchInfo bi : jb.batchInfoList) {
incomplete.add(bi.getId());
}
while (!incomplete.isEmpty()) {
try {
Thread.sleep(sleepTime);
} catch (InterruptedException e) {}
LOG.info("Awaiting Bulk API results... {}", incomplete.size());
sleepTime = 1000L;
BatchInfo[] statusList =
bulkConnection.getBatchInfoList(jb.job.getId()).getBatchInfo();
for (BatchInfo b : statusList) {
if (b.getState() == BatchStateEnum.Completed
|| b.getState() == BatchStateEnum.Failed) {
if (incomplete.remove(b.getId())) {
LOG.info("Batch status: {}", b);
}
}
}
}
}
示例2: awaitCompletion
import com.sforce.async.BatchStateEnum; //导入方法依赖的package包/类
/**
* Wait for a job to complete by polling the Bulk API.
*
* @throws AsyncApiException
* @throws ConnectionException
*/
private void awaitCompletion() throws AsyncApiException, ConnectionException {
long sleepTime = 0L;
Set<String> incomplete = new HashSet<String>();
for (BatchInfo bi : batchInfoList) {
incomplete.add(bi.getId());
}
while (!incomplete.isEmpty()) {
try {
Thread.sleep(sleepTime);
} catch (InterruptedException e) {
}
sleepTime = awaitTime;
BatchInfo[] statusList = getBatchInfoList(job.getId()).getBatchInfo();
for (BatchInfo b : statusList) {
if (b.getState() == BatchStateEnum.Completed || b.getState() == BatchStateEnum.Failed) {
incomplete.remove(b.getId());
}
}
}
}
示例3: syncQuery
import com.sforce.async.BatchStateEnum; //导入方法依赖的package包/类
public List<Map<String, String>> syncQuery(String objectType, String query)
throws InterruptedException, AsyncApiException, IOException {
// ジョブ作成
JobInfo jobInfo = new JobInfo();
jobInfo.setObject(objectType);
if (queryAll) {
jobInfo.setOperation(OperationEnum.queryAll);
} else {
jobInfo.setOperation(OperationEnum.query);
}
jobInfo.setContentType(ContentType.CSV);
jobInfo = bulkConnection.createJob(jobInfo);
// バッチ作成
InputStream is = new ByteArrayInputStream(query.getBytes());
BatchInfo batchInfo = bulkConnection.createBatchFromStream(jobInfo, is);
// ジョブクローズ
JobInfo closeJob = new JobInfo();
closeJob.setId(jobInfo.getId());
closeJob.setState(JobStateEnum.Closed);
bulkConnection.updateJob(closeJob);
// 実行状況取得
batchInfo = waitBatch(batchInfo);
BatchStateEnum state = batchInfo.getState();
// 実行結果取得
if (state == BatchStateEnum.Completed) {
QueryResultList queryResultList =
bulkConnection.getQueryResultList(
batchInfo.getJobId(),
batchInfo.getId());
return getQueryResultMapList(batchInfo, queryResultList);
} else {
throw new AsyncApiException(batchInfo.getStateMessage(), AsyncExceptionCode.InvalidBatch);
}
}
示例4: waitBatch
import com.sforce.async.BatchStateEnum; //导入方法依赖的package包/类
private BatchInfo waitBatch(BatchInfo batchInfo)
throws InterruptedException, AsyncApiException {
while(true) {
Thread.sleep(pollingIntervalMillisecond);
batchInfo = bulkConnection.getBatchInfo(
batchInfo.getJobId(),
batchInfo.getId());
BatchStateEnum state = batchInfo.getState();
if (state == BatchStateEnum.Completed ||
state == BatchStateEnum.Failed ||
state == BatchStateEnum.NotProcessed) {
return batchInfo;
}
}
}
示例5: awaitCompletion
import com.sforce.async.BatchStateEnum; //导入方法依赖的package包/类
/**
* Wait for a job to complete by polling the Bulk API.
*
* @param connection
* BulkConnection used to check results.
* @param job
* The job awaiting completion.
* @param batchInfoList
* List of batches for this job.
* @throws AsyncApiException
*/
public void awaitCompletion(BulkConnection connection, JobInfo job, List<BatchInfo> batchInfoList)
throws AsyncApiException {
long sleepTime = 0L;
Set<String> incompleteBatchInfos = new HashSet<String>();
for (BatchInfo bi : batchInfoList) {
incompleteBatchInfos.add(bi.getId());
}
while (!incompleteBatchInfos.isEmpty()) {
try {
Thread.sleep(sleepTime);
} catch (InterruptedException e) {
ApexUnitUtils
.shutDownWithDebugLog(e, "InterruptedException encountered while the thread was attempting to sleep");
}
LOG.debug("Awaiting results... Batches remaining for processing: " + incompleteBatchInfos.size());
sleepTime = 10000L;
BatchInfo[] statusList = connection.getBatchInfoList(job.getId()).getBatchInfo();
for (BatchInfo batchInfo : statusList) {
// Retain the BatchInfo's which are in InProgress and Queued
// status,
// Remove the rest from the incompleteBatchInfos
if (batchInfo.getState() == BatchStateEnum.Completed) {
if (incompleteBatchInfos.remove(batchInfo.getId())) {
LOG.debug("BATCH STATUS:" + batchInfo.getStateMessage());
}
} else if (batchInfo.getState() == BatchStateEnum.NotProcessed) {
LOG.info("Batch " + batchInfo.getId() + " did not process, terminating it");
incompleteBatchInfos.remove(batchInfo.getId());
} else if (batchInfo.getState() == BatchStateEnum.Failed) {
ApexUnitUtils.shutDownWithErrMsg("BATCH STATUS:" + batchInfo.getStateMessage());
}
}
}
}
示例6: waitForPkBatches
import com.sforce.async.BatchStateEnum; //导入方法依赖的package包/类
/**
* Waits for the PK batches to complete. The wait will stop after all batches are complete or on the first failed batch
* @param batchInfoList list of batch info
* @param retryInterval the polling interval
* @return the last {@link BatchInfo} processed
* @throws InterruptedException
* @throws AsyncApiException
*/
private BatchInfo waitForPkBatches(BatchInfoList batchInfoList, int retryInterval)
throws InterruptedException, AsyncApiException {
BatchInfo batchInfo = null;
BatchInfo[] batchInfos = batchInfoList.getBatchInfo();
// Wait for all batches other than the first one. The first one is not processed in PK chunking mode
for (int i = 1; i < batchInfos.length; i++) {
BatchInfo bi = batchInfos[i];
// get refreshed job status
bi = this.bulkConnection.getBatchInfo(this.bulkJob.getId(), bi.getId());
while ((bi.getState() != BatchStateEnum.Completed)
&& (bi.getState() != BatchStateEnum.Failed)) {
Thread.sleep(retryInterval * 1000);
bi = this.bulkConnection.getBatchInfo(this.bulkJob.getId(), bi.getId());
log.debug("Bulk Api Batch Info:" + bi);
log.info("Waiting for bulk resultSetIds");
}
batchInfo = bi;
// exit if there was a failure
if (batchInfo.getState() == BatchStateEnum.Failed) {
break;
}
}
return batchInfo;
}
示例7: doBulkQuery
import com.sforce.async.BatchStateEnum; //导入方法依赖的package包/类
/**
* Creates and executes job for bulk query. Job must be finished in 2 minutes on Salesforce side.<br/>
* From Salesforce documentation two scenarios are possible here:
* <ul>
* <li>simple bulk query. It should have status - {@link BatchStateEnum#Completed}.</li>
* <li>primary key chunking bulk query. It should return first batch info with status - {@link BatchStateEnum#NotProcessed}.<br/>
* Other batch info's should have status - {@link BatchStateEnum#Completed}</li>
* </ul>
*
* @param moduleName - input module name.
* @param queryStatement - to be executed.
* @throws AsyncApiException
* @throws InterruptedException
* @throws ConnectionException
*/
public void doBulkQuery(String moduleName, String queryStatement)
throws AsyncApiException, InterruptedException, ConnectionException {
job = new JobInfo();
job.setObject(moduleName);
job.setOperation(OperationEnum.query);
if (concurrencyMode != null) {
job.setConcurrencyMode(concurrencyMode);
}
job.setContentType(ContentType.CSV);
job = createJob(job);
if (job.getId() == null) { // job creation failed
throw new ComponentException(new DefaultErrorCode(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, "failedBatch"),
ExceptionContext.build().put("failedBatch", job));
}
ByteArrayInputStream bout = new ByteArrayInputStream(queryStatement.getBytes());
BatchInfo info = createBatchFromStream(job, bout);
int secToWait = 1;
int tryCount = 0;
while (true) {
LOGGER.debug("Awaiting " + secToWait + " seconds for results ...\n" + info);
Thread.sleep(secToWait * 1000);
info = getBatchInfo(job.getId(), info.getId());
if (info.getState() == BatchStateEnum.Completed
|| (BatchStateEnum.NotProcessed == info.getState() && 0 < chunkSize)) {
break;
} else if (info.getState() == BatchStateEnum.Failed) {
throw new ComponentException(new DefaultErrorCode(HttpServletResponse.SC_BAD_REQUEST, "failedBatch"),
ExceptionContext.build().put("failedBatch", info));
}
tryCount++;
if (tryCount % 3 == 0) {// after 3 attempt to get the result we multiply the time to wait by 2
secToWait = secToWait * 2;
}
// There is also a 2-minute limit on the time to process the query.
// If the query takes more than 2 minutes to process, a QUERY_TIMEOUT error is returned.
// https://developer.salesforce.com/docs/atlas.en-us.api_asynch.meta/api_asynch/asynch_api_concepts_limits.htm
int processingTime = (int) ((System.currentTimeMillis() - job.getCreatedDate().getTimeInMillis()) / 1000);
if (processingTime > 120) {
throw new ComponentException(new DefaultErrorCode(HttpServletResponse.SC_REQUEST_TIMEOUT, "failedBatch"),
ExceptionContext.build().put("failedBatch", info));
}
}
retrieveResultsOfQuery(info);
}