本文整理汇总了Java中org.apache.nifi.annotation.lifecycle.OnScheduled类的典型用法代码示例。如果您正苦于以下问题:Java OnScheduled类的具体用法?Java OnScheduled怎么用?Java OnScheduled使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
OnScheduled类属于org.apache.nifi.annotation.lifecycle包,在下文中一共展示了OnScheduled类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: onScheduled
import org.apache.nifi.annotation.lifecycle.OnScheduled; //导入依赖的package包/类
@OnScheduled
public void onScheduled(final ProcessContext context) {
try {
topic = context.getProperty(TOPIC).getValue();
groupName = context.getProperty(CONSUMER_GROUP_NAME).getValue();
brokerIP = context.getProperty(BROKERIP).getValue();
props = new Properties();
props.put("bootstrap.servers", brokerIP);
props.put("group.id", groupName);
props.put("enable.auto.commit", "true");
props.put("auto.commit.interval.ms", "1000");
props.put("session.timeout.ms", "30000");
props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
props.put("auto.offset.reset", "earliest");
consumer = new KafkaConsumer<String, String>(props);
consumer.subscribe(Arrays.asList(topic));
} catch (Exception e) {
e.printStackTrace();
}
}
示例2: onScheduled
import org.apache.nifi.annotation.lifecycle.OnScheduled; //导入依赖的package包/类
@OnScheduled
public void onScheduled(final ProcessContext context) {
try {
topic = context.getProperty(TOPIC).getValue();
brokerIP = context.getProperty(BROKERIP).getValue();
props = new Properties();
props.put("bootstrap.servers", brokerIP);
props.put("acks", "all");
props.put("retries", 0);
props.put("batch.size", 16384);
props.put("linger.ms", 1);
props.put("buffer.memory", 33554432);
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
producer = new KafkaProducer<>(props);
} catch (Exception e) {
e.printStackTrace();
}
}
示例3: setup
import org.apache.nifi.annotation.lifecycle.OnScheduled; //导入依赖的package包/类
/**
* Performs setup operations when the processor is scheduled to run. This includes evaluating the processor's
* properties, as well as reloading the script (from file or the "Script Body" property)
*
* @param context the context in which to perform the setup operations
*/
@OnScheduled
public void setup(final ProcessContext context) {
scriptingComponentHelper.setupVariables(context);
// Create a script engine for each possible task
int maxTasks = context.getMaxConcurrentTasks();
scriptingComponentHelper.setup(maxTasks, getLogger());
scriptToRun = scriptingComponentHelper.getScriptBody();
try {
if (scriptToRun == null && scriptingComponentHelper.getScriptPath() != null) {
try (final FileInputStream scriptStream = new FileInputStream(scriptingComponentHelper.getScriptPath())) {
scriptToRun = IOUtils.toString(scriptStream, Charset.defaultCharset());
}
}
} catch (IOException ioe) {
throw new ProcessException(ioe);
}
}
示例4: onScheduled
import org.apache.nifi.annotation.lifecycle.OnScheduled; //导入依赖的package包/类
@OnScheduled
public void onScheduled(final ProcessContext context) {
Map<String,String> mFieldMappings = new HashMap<>();
String fieldMappingsProp = context.getProperty(FIELD_MAPPINGS).getValue();
String[] fieldMappingsSplit = fieldMappingsProp.split("[,]");
for (String fieldMapping : fieldMappingsSplit) {
int equalsIndex = fieldMapping.indexOf('=');
String fieldName = fieldMapping.substring(0, equalsIndex);
String mappedName = fieldMapping.substring(equalsIndex+1);
mFieldMappings.put(fieldName, mappedName);
}
this.fieldMappings = Collections.unmodifiableMap(mFieldMappings);
Set<String> mExcludeFields = new HashSet<>();
String excludeFields = context.getProperty(EXCLUDE_FIELDS).getValue();
if (excludeFields != null && !excludeFields.isEmpty()) {
String[] excludeFieldsSplit = excludeFields.split("[,]");
for (String excludeField : excludeFieldsSplit) {
mExcludeFields.add(excludeField);
}
}
this.excludeFields = Collections.unmodifiableSet(mExcludeFields);
}
示例5: onScheduled
import org.apache.nifi.annotation.lifecycle.OnScheduled; //导入依赖的package包/类
@OnScheduled
public void onScheduled(final ProcessContext context) {
final PutFileFromTemplate processor = this;
jsonMapper = new ObjectMapper();
jsonFactory = jsonMapper.getFactory();
JinjavaConfig config = new JinjavaConfig();
jinjava = new Jinjava(config);
jinjava.setResourceLocator(new ResourceLocator() {
@Override
public String getString(String fullName, Charset encoding, JinjavaInterpreter interpreter) throws IOException {
String pathToResource = processor.pathToResource(context, fullName);
try {
return FileUtils.readFileToString(new File(pathToResource), encoding);
} catch (IOException e) {
return null;
}
}
});
}
示例6: setupExecutor
import org.apache.nifi.annotation.lifecycle.OnScheduled; //导入依赖的package包/类
@OnScheduled
public void setupExecutor(final ProcessContext context) {
batchMap = new HashMap<String,Integer>();
executor = Executors.newFixedThreadPool(context.getMaxConcurrentTasks() * 2, new ThreadFactory() {
private final ThreadFactory defaultFactory = Executors.defaultThreadFactory();
@Override
public Thread newThread(final Runnable r) {
final Thread t = defaultFactory.newThread(r);
t.setName("ExecuteProcess " + getIdentifier() + " Task");
return t;
}
});
}
示例7: onScheduled
import org.apache.nifi.annotation.lifecycle.OnScheduled; //导入依赖的package包/类
@OnScheduled
public void onScheduled(final ProcessContext context) {
print_indentation = context.getProperty(PRINT_INDENTATION).getValue();
max_recursiveDepth = Integer.valueOf(context.getProperty(RECURSIVE_DEPTH).getValue());
starting_node = context.getProperty(STARTING_NODE).getValue();
}
示例8: setup
import org.apache.nifi.annotation.lifecycle.OnScheduled; //导入依赖的package包/类
@OnScheduled
public void setup(ProcessContext context) {
// If the query is not set, then an incoming flow file is needed. Otherwise fail the initialization
if (!context.getProperty(SQL_SELECT_QUERY).isSet() && !context.hasIncomingConnection()) {
final String errorString = "Either the Select Query must be specified or there must be an incoming " +
"connection providing flowfile(s) containing a SQL select query";
getLogger().error(errorString);
throw new ProcessException(errorString);
}
}
示例9: updateState
import org.apache.nifi.annotation.lifecycle.OnScheduled; //导入依赖的package包/类
@OnScheduled
public final void updateState(final ProcessContext context) throws IOException {
final String path = getPath(context);
final DistributedMapCacheClient client = context.getProperty(DISTRIBUTED_CACHE_SERVICE).asControllerService(DistributedMapCacheClient.class);
// Check if state already exists for this path. If so, we have already migrated the state.
final StateMap stateMap = context.getStateManager().getState(getStateScope(context));
if (stateMap.getVersion() == -1L) {
try {
// Migrate state from the old way of managing state (distributed cache service and local file)
// to the new mechanism (State Manager).
migrateState(path, client, context.getStateManager(), getStateScope(context));
} catch (final IOException ioe) {
throw new IOException("Failed to properly migrate state to State Manager", ioe);
}
}
// When scheduled to run, check if the associated timestamp is null, signifying a clearing of state and reset the internal timestamp
if (lastListingTime != null && stateMap.get(LISTING_TIMESTAMP_KEY) == null) {
getLogger().info("Detected that state was cleared for this component. Resetting internal values.");
resetTimeStates();
}
if (resetState) {
context.getStateManager().clear(getStateScope(context));
resetState = false;
}
}
示例10: setup
import org.apache.nifi.annotation.lifecycle.OnScheduled; //导入依赖的package包/类
@OnScheduled
public void setup(ConfigurationContext context) throws IOException {
// initAtlasClient has to be done first as it loads AtlasProperty.
initAtlasClient(context);
initProvenanceConsumer(context);
String strategy = context.getProperty(NIFI_LINEAGE_STRATEGY).getValue();
lineageStrategy = "ByFlowFile".equals(strategy) ? LineageStrategy.BY_FILE : LineageStrategy.BY_PATH;
}
示例11: onScheduled
import org.apache.nifi.annotation.lifecycle.OnScheduled; //导入依赖的package包/类
@OnScheduled
public void onScheduled(final ProcessContext context) {
String content = context.getProperty(CONTENT_FIELD).getValue().trim();
JsonObject json = (JsonObject) JSON_PROVIDER.parse(content);
pathsForTemplating = JsonUtil.getJsonPathsForTemplating(json);
}
示例12: onScheduled
import org.apache.nifi.annotation.lifecycle.OnScheduled; //导入依赖的package包/类
@OnScheduled
public void onScheduled(final ProcessContext context) {
url = context.getProperty(WEBSITE_URL).getValue();
timeout = context.getProperty(TIMEOUT).asTimePeriod(TimeUnit.SECONDS);
selectorType = context.getProperty(SELECTOR_TYPE).getValue();
selector = context.getProperty(SELECTOR).getValue();
}
示例13: onScheduled
import org.apache.nifi.annotation.lifecycle.OnScheduled; //导入依赖的package包/类
@OnScheduled
public void onScheduled(final ProcessContext context) {
batchSize = context.getProperty(BATCH_SIZE).asInteger();
reportingIntervalMillis = context.getProperty(REPORTING_INTERVAL).asTimePeriod(TimeUnit.MILLISECONDS);
PropertyValue correlationAttrProp = context.getProperty(CORRELATION_ATTR);
correlationKey = correlationAttrProp.isSet() ? correlationAttrProp.getValue() : DEFAULT_MOMENT_AGGREGATOR_KEY;
momentsMap = new ConcurrentHashMap<>();
latestStats = new ConcurrentHashMap<>();
}
示例14: onScheduled
import org.apache.nifi.annotation.lifecycle.OnScheduled; //导入依赖的package包/类
@OnScheduled
public void onScheduled(final ProcessContext context) throws IOException {
context.getStateManager().clear(Scope.LOCAL);
this.stopWatch = new CheckedStopWatch(context.getProperty(TIME_WINDOW).asTimePeriod(TimeUnit.MILLISECONDS));
this.stopWatch.start();
}
示例15: onScheduled
import org.apache.nifi.annotation.lifecycle.OnScheduled; //导入依赖的package包/类
@OnScheduled
public void onScheduled(final ProcessContext context) {
queryProperty = context.getProperty(QUERY);
projectionProperty = context.getProperty(PROJECTION);
sortProperty = context.getProperty(SORT);
limit = context.getProperty(LIMIT).isSet() ? context.getProperty(LIMIT).asInteger() : null;
createMongoConnection(context);
ensureIndexes(context, collection);
}