本文整理汇总了Java中io.digdag.spi.TaskRequest类的典型用法代码示例。如果您正苦于以下问题:Java TaskRequest类的具体用法?Java TaskRequest怎么用?Java TaskRequest使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
TaskRequest类属于io.digdag.spi包,在下文中一共展示了TaskRequest类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: uniqueImageName
import io.digdag.spi.TaskRequest; //导入依赖的package包/类
private static String uniqueImageName(TaskRequest request,
String baseImageName, List<String> buildCommands)
{
// Name should include project "id" for security reason because
// conflicting SHA1 hash means that attacker can reuse an image
// built by someone else.
String name = "digdag-project-" + Integer.toString(request.getProjectId());
Config config = request.getConfig().getFactory().create();
config.set("image", baseImageName);
config.set("build", buildCommands);
config.set("revision", request.getRevision().or(UUID.randomUUID().toString()));
String tag = Hashing.sha1().hashString(config.toString(), UTF_8).toString();
return name + ':' + tag;
}
示例2: getCopyConfig
import io.digdag.spi.TaskRequest; //导入依赖的package包/类
private String getCopyConfig(Map<String, Object> configInput, boolean maskConfig)
throws IOException
{
TaskRequest taskRequest = testHelper.createTaskRequest(configInput, Optional.absent());
OperatorContext operatorContext = mock(OperatorContext.class);
when(operatorContext.getProjectPath()).thenReturn(testHelper.projectPath());
when(operatorContext.getTaskRequest()).thenReturn(taskRequest);
RedshiftLoadOperatorFactory.RedshiftLoadOperator operator = (RedshiftLoadOperatorFactory.RedshiftLoadOperator) operatorFactory.newOperator(operatorContext);
assertThat(operator, is(instanceOf(RedshiftLoadOperatorFactory.RedshiftLoadOperator.class)));
AWSSessionCredentials credentials = mock(AWSSessionCredentials.class);
when(credentials.getAWSAccessKeyId()).thenReturn("my-access-key-id");
when(credentials.getAWSSecretKey()).thenReturn("my-secret-access-key");
RedshiftConnection.CopyConfig copyConfig = operator.createCopyConfig(testHelper.createConfig(configInput), credentials);
Connection connection = mock(Connection.class);
RedshiftConnection redshiftConnection = new RedshiftConnection(connection);
return redshiftConnection.buildCopyStatement(copyConfig, maskConfig);
}
示例3: newOperator
import io.digdag.spi.TaskRequest; //导入依赖的package包/类
@Test
public void newOperator()
throws IOException
{
Map<String, Object> configInput = ImmutableMap.of(
"table", "my_table",
"from", "s3://my-bucket/my-path",
"csv", ""
);
TaskRequest taskRequest = testHelper.createTaskRequest(configInput, Optional.absent());
OperatorContext operatorContext = mock(OperatorContext.class);
when(operatorContext.getProjectPath()).thenReturn(testHelper.projectPath());
when(operatorContext.getTaskRequest()).thenReturn(taskRequest);
RedshiftLoadOperatorFactory.RedshiftLoadOperator operator = (RedshiftLoadOperatorFactory.RedshiftLoadOperator) operatorFactory.newOperator(operatorContext);
assertThat(operator, is(instanceOf(RedshiftLoadOperatorFactory.RedshiftLoadOperator.class)));
}
示例4: getUnloadConfig
import io.digdag.spi.TaskRequest; //导入依赖的package包/类
private String getUnloadConfig(Map<String, Object> configInput, String queryId, boolean maskConfig)
throws IOException
{
TaskRequest taskRequest = testHelper.createTaskRequest(configInput, Optional.absent());
OperatorContext operatorContext = mock(OperatorContext.class);
when(operatorContext.getProjectPath()).thenReturn(testHelper.projectPath());
when(operatorContext.getTaskRequest()).thenReturn(taskRequest);
RedshiftUnloadOperatorFactory.RedshiftUnloadOperator operator = (RedshiftUnloadOperatorFactory.RedshiftUnloadOperator) operatorFactory.newOperator(operatorContext);
assertThat(operator, is(instanceOf(RedshiftUnloadOperatorFactory.RedshiftUnloadOperator.class)));
AWSSessionCredentials credentials = mock(AWSSessionCredentials.class);
when(credentials.getAWSAccessKeyId()).thenReturn("my-access-key-id");
when(credentials.getAWSSecretKey()).thenReturn("my-secret-access-key");
RedshiftConnection.UnloadConfig unloadConfig = operator.createUnloadConfig(testHelper.createConfig(configInput), credentials, queryId);
Connection connection = mock(Connection.class);
RedshiftConnection redshiftConnection = new RedshiftConnection(connection);
return redshiftConnection.buildUnloadStatement(unloadConfig, maskConfig);
}
示例5: run
import io.digdag.spi.TaskRequest; //导入依赖的package包/类
public void run(TaskRequest request)
{
long taskId = request.getTaskId();
String origThreadName = String.format("[%d:%s]%s", request.getSiteId(), request.getProjectName().or("----"), request.getTaskName());
// set task name to thread name so that logger shows it
try (SetThreadName threadName = new SetThreadName(origThreadName)) {
try (TaskLogger taskLogger = callback.newTaskLogger(request)) {
TaskContextLogging.enter(LogLevel.DEBUG, taskLogger);
try {
runningTaskMap.put(taskId, request);
try {
runWithHeartbeat(request);
}
finally {
runningTaskMap.remove(taskId);
}
}
finally {
TaskContextLogging.leave();
}
}
}
}
示例6: heartbeat
import io.digdag.spi.TaskRequest; //导入依赖的package包/类
private void heartbeat()
{
try {
Map<Integer, List<String>> sites = runningTaskMap.values().stream()
.collect(Collectors.groupingBy(
TaskRequest::getSiteId,
Collectors.mapping(TaskRequest::getLockId, Collectors.toList())
));
for (Map.Entry<Integer, List<String>> pair : sites.entrySet()) {
int siteId = pair.getKey();
List<String> lockIds = pair.getValue();
callback.taskHeartbeat(siteId, lockIds, agentId, agentConfig.getLockRetentionTime());
}
}
catch (Throwable t) {
logger.error("Uncaught exception during sending task heartbeats to a server. Ignoring. Heartbeat thread will be retried.", t);
errorReporter.reportUncaughtError(t);
}
}
示例7: newTaskLogger
import io.digdag.spi.TaskRequest; //导入依赖的package包/类
@Override
public TaskLogger newTaskLogger(TaskRequest request)
{
long attemptId = request.getAttemptId();
String taskName = request.getTaskName();
LogFilePrefix prefix;
try {
StoredSessionAttemptWithSession attempt =
tm.begin(() -> sm.getSessionStore(request.getSiteId()) .getAttemptById(attemptId),
ResourceNotFoundException.class);
prefix = logFilePrefixFromSessionAttempt(attempt);
}
catch (ResourceNotFoundException ex) {
throw new RuntimeException(ex);
}
return lm.newInProcessTaskLogger(agentId, prefix, taskName);
}
示例8: getTaskRequests
import io.digdag.spi.TaskRequest; //导入依赖的package包/类
public List<TaskRequest> getTaskRequests(List<TaskQueueLock> locks)
{
ImmutableList.Builder<TaskRequest> builder = ImmutableList.builder();
for (TaskQueueLock lock : locks) {
try {
long taskId = parseTaskIdFromEncodedQueuedTaskName(lock.getUniqueName());
Optional<TaskRequest> request = getTaskRequest(taskId, lock.getLockId());
if (request.isPresent()) {
builder.add(request.get());
}
else {
dispatcher.deleteInconsistentTask(lock.getLockId());
}
}
catch (RuntimeException ex) {
tm.reset();
logger.error("Invalid association of task queue lock id: {}", lock, ex);
}
}
return builder.build();
}
示例9: run
import io.digdag.spi.TaskRequest; //导入依赖的package包/类
@Override
public void run(TaskRequest request)
{
String fullName = request.getTaskName();
TaskResult result = cmd.skipTaskReports.apply(fullName);
String origThreadName = String.format("[%d:%s]%s", request.getSiteId(), request.getProjectName().or("----"), request.getTaskName());
if (result != null) {
try (SetThreadName threadName = new SetThreadName(origThreadName)) {
logger.warn("Skipped");
}
callback.taskSucceeded(request.getSiteId(),
request.getTaskId(), request.getLockId(), agentId,
result);
}
else {
super.run(request);
}
}
示例10: callExecutor
import io.digdag.spi.TaskRequest; //导入依赖的package包/类
@Override
protected TaskResult callExecutor(Path workspacePath, String type, TaskRequest mergedRequest)
{
if (cmd.showParams) {
StringBuilder sb = new StringBuilder();
for (String line : yamlMapper.toYaml(mergedRequest.getConfig()).split("\n")) {
sb.append(" ").append(line).append("\n");
}
logger.warn("\n{}", sb.toString());
}
if (cmd.dryRun) {
return TaskResult.empty(cf);
}
else {
return super.callExecutor(workspacePath, type, mergedRequest);
}
}
示例11: RetzOperatorConfig
import io.digdag.spi.TaskRequest; //导入依赖的package包/类
RetzOperatorConfig(TaskRequest taskRequest, Config systemConfig) {
this.taskRequest = taskRequest;
this.retzConfig = taskRequest.getConfig().mergeDefault(
taskRequest.getConfig().getNestedOrGetEmpty(KEY_CONFIG_ROOT));
this.systemConfig = systemConfig;
// TODO interim fix for error log: "Parameter 'xx' is not used at task ..."
dumpConfig();
}
示例12: generateDefaultJobName
import io.digdag.spi.TaskRequest; //导入依赖的package包/类
private String generateDefaultJobName(TaskRequest request) {
/*
* prefix of default job name:
* <attempt-id>
*
* default job name:
* (1) <prefix-head>..
* (2) <prefix>
* (3) <prefix>:..<task-name-tail>
* (4) <prefix>:<task-name>
*/
String prefix = String.format("%d", request.getAttemptId());
String taskName = request.getTaskName();
// (1) <prefix-head>..
if (prefix.length() > RETZ_NAME_MAX) {
return prefix.substring(0, RETZ_NAME_MAX - NAME_ELLIPSIS.length()) + NAME_ELLIPSIS;
}
// (2) <prefix>
// we avoid non-ASCII characters for Retz database
if (prefix.length() > RETZ_NAME_MAX - 2 - NAME_ELLIPSIS.length()
|| taskName.chars().anyMatch(it -> it > 0x7f)) {
return prefix;
}
String candidate = String.format("%s:%s", prefix, taskName);
// (3) <prefix>:..<task-name-tail>
if (candidate.length() > RETZ_NAME_MAX) {
String prefixPlus = String.format("%s:%s", prefix, NAME_ELLIPSIS);
String taskNameTail = taskName.substring(
taskName.length() - (RETZ_NAME_MAX - prefixPlus.length()), taskName.length());
return String.format("%s%s", prefixPlus, taskNameTail);
}
// (4) <prefix>:<task-name>
return candidate;
}
示例13: BaseBqOperator
import io.digdag.spi.TaskRequest; //导入依赖的package包/类
protected BaseBqOperator(Path projectPath, TaskRequest request, BqWaitClient.Factory clientFactory, GcpCredentialProvider credentialProvider)
{
super(projectPath, request, credentialProvider);
this.clientFactory = clientFactory;
this.params = request.getConfig()
.mergeDefault(request.getConfig().getNestedOrGetEmpty("bq"));
}
示例14: BqJobRunner
import io.digdag.spi.TaskRequest; //导入依赖的package包/类
BqJobRunner(TaskRequest request, BqClient bq, String projectId)
{
this.request = Objects.requireNonNull(request, "request");
this.bq = Objects.requireNonNull(bq, "bq");
this.state = TaskState.of(request);
this.projectId = Objects.requireNonNull(projectId, "projectId");
}
示例15: startBulkLoadSession
import io.digdag.spi.TaskRequest; //导入依赖的package包/类
private String startBulkLoadSession(TDOperator op, Config params, String name, TaskRequest request, String domainKey)
{
// TODO: TDOperator requires database to be configured but the database param is not necessary when using a connector session
TDBulkLoadSessionStartRequest req = TDBulkLoadSessionStartRequest.builder()
.setScheduledTime(request.getSessionTime().getEpochSecond())
.setDomainKey(domainKey)
.build();
String jobId = op.submitNewJobWithRetry(client -> client.startBulkLoadSession(name, req).getJobId());
logger.info("Started bulk load session job name={}, id={}", name, jobId);
return jobId;
}