当前位置: 首页>>代码示例>>Java>>正文


Java OffsetStorageReader类代码示例

本文整理汇总了Java中org.apache.kafka.connect.storage.OffsetStorageReader的典型用法代码示例。如果您正苦于以下问题:Java OffsetStorageReader类的具体用法?Java OffsetStorageReader怎么用?Java OffsetStorageReader使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


OffsetStorageReader类属于org.apache.kafka.connect.storage包,在下文中一共展示了OffsetStorageReader类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: offer

import org.apache.kafka.connect.storage.OffsetStorageReader; //导入依赖的package包/类
@Override
public FileReader offer(FileMetadata metadata, OffsetStorageReader offsetStorageReader) throws IOException {
    Map<String, Object> partition = new HashMap<String, Object>() {{
        put("path", metadata.getPath());
        //TODO manage blocks
        //put("blocks", metadata.getBlocks().toString());
    }};

    FileSystem current = fileSystems.stream()
            .filter(fs -> metadata.getPath().startsWith(fs.getWorkingDirectory().toString()))
            .findFirst().orElse(null);

    FileReader reader;
    try {
        reader = ReflectionUtils.makeReader((Class<? extends FileReader>) conf.getClass(FsSourceTaskConfig.FILE_READER_CLASS),
                current, new Path(metadata.getPath()), conf.originals());
    } catch (Throwable t) {
        throw new ConnectException("An error has occurred when creating reader for file: " + metadata.getPath(), t);
    }

    Map<String, Object> offset = offsetStorageReader.offset(partition);
    if (offset != null && offset.get("offset") != null) {
        reader.seek(() -> (Long) offset.get("offset"));
    }
    return reader;
}
 
开发者ID:mmolimar,项目名称:kafka-connect-fs,代码行数:27,代码来源:AbstractPolicy.java

示例2: before

import org.apache.kafka.connect.storage.OffsetStorageReader; //导入依赖的package包/类
@BeforeEach
public void before() {
  this.sourceTaskContext = mock(SourceTaskContext.class);
  this.offsetStorageReader = mock(OffsetStorageReader.class);
  when(this.sourceTaskContext.offsetStorageReader()).thenReturn(this.offsetStorageReader);
  this.task = new KinesisSourceTask();
  this.task.initialize(this.sourceTaskContext);
  this.kinesisClient = mock(AmazonKinesis.class);
  this.task.time = mock(Time.class);
  this.task.kinesisClientFactory = mock(KinesisClientFactory.class);
  when(this.task.kinesisClientFactory.create(any())).thenReturn(this.kinesisClient);

  this.settings = TestData.settings();
  this.config = new KinesisSourceConnectorConfig(this.settings);

}
 
开发者ID:jcustenborder,项目名称:kafka-connect-kinesis,代码行数:17,代码来源:KinesisSourceTaskTest.java

示例3: setup

import org.apache.kafka.connect.storage.OffsetStorageReader; //导入依赖的package包/类
@Before
public void setup() throws IOException, SQLException {
    String mysqlHost = "10.100.172.86";
    connection = DriverManager.getConnection("jdbc:mysql://" + mysqlHost + ":3306/mysql", "root", "passwd");
    
    config = new HashMap<>();
    config.put(MySqlSourceConnector.USER_CONFIG, "maxwell");
    config.put(MySqlSourceConnector.PASSWORD_CONFIG, "XXXXXX");
    config.put(MySqlSourceConnector.PORT_CONFIG, "3306");
    config.put(MySqlSourceConnector.HOST_CONFIG, mysqlHost);
    
    task = new MySqlSourceTask();
    offsetStorageReader = PowerMock.createMock(OffsetStorageReader.class);
    context = PowerMock.createMock(SourceTaskContext.class);
    task.initialize(context);

    runSql("drop table if exists test.users");
    runSql("drop database if exists test");
}
 
开发者ID:wushujames,项目名称:kafka-mysql-connector,代码行数:20,代码来源:MySqlSourceTaskTest.java

示例4: setup

import org.apache.kafka.connect.storage.OffsetStorageReader; //导入依赖的package包/类
@Before
public void setup() throws IOException {
    tempFile = File.createTempFile("file-stream-source-task-test", null);
    config = new HashMap<>();
    config.put(FileStreamSourceConnector.FILE_CONFIG, tempFile.getAbsolutePath());
    config.put(FileStreamSourceConnector.TOPIC_CONFIG, TOPIC);
    task = new FileStreamSourceTask();
    offsetStorageReader = PowerMock.createMock(OffsetStorageReader.class);
    context = PowerMock.createMock(SourceTaskContext.class);
    task.initialize(context);
}
 
开发者ID:wngn123,项目名称:wngn-jms-kafka,代码行数:12,代码来源:FileStreamSourceTaskTest.java

示例5: WorkerSourceTask

import org.apache.kafka.connect.storage.OffsetStorageReader; //导入依赖的package包/类
public WorkerSourceTask(ConnectorTaskId id,
                        SourceTask task,
                        TaskStatus.Listener statusListener,
                        TargetState initialState,
                        Converter keyConverter,
                        Converter valueConverter,
                        TransformationChain<SourceRecord> transformationChain,
                        KafkaProducer<byte[], byte[]> producer,
                        OffsetStorageReader offsetReader,
                        OffsetStorageWriter offsetWriter,
                        WorkerConfig workerConfig,
                        ClassLoader loader,
                        Time time) {
    super(id, statusListener, initialState, loader);

    this.workerConfig = workerConfig;
    this.task = task;
    this.keyConverter = keyConverter;
    this.valueConverter = valueConverter;
    this.transformationChain = transformationChain;
    this.producer = producer;
    this.offsetReader = offsetReader;
    this.offsetWriter = offsetWriter;
    this.time = time;

    this.toSend = null;
    this.lastSendFailed = false;
    this.outstandingMessages = new IdentityHashMap<>();
    this.outstandingMessagesBacklog = new IdentityHashMap<>();
    this.flushing = false;
    this.stopRequestedLatch = new CountDownLatch(1);
}
 
开发者ID:YMCoding,项目名称:kafka-0.11.0.0-src-with-comment,代码行数:33,代码来源:WorkerSourceTask.java

示例6: setup

import org.apache.kafka.connect.storage.OffsetStorageReader; //导入依赖的package包/类
@BeforeEach
public void setup(
    @OracleSettings Map<String, String> settings
) throws Exception {
  this.config = new OracleSourceConnectorConfig(settings);
  this.offsetStorageReader = mock(OffsetStorageReader.class);
  this.changeWriter = mock(ChangeWriter.class);
  this.queryService = new QueryService(this.config, this.offsetStorageReader, this.changeWriter);
  this.serviceManager = new ServiceManager(Arrays.asList(this.queryService));
}
 
开发者ID:jcustenborder,项目名称:kafka-connect-cdc-oracle,代码行数:11,代码来源:QueryServiceTest.java

示例7: setup

import org.apache.kafka.connect.storage.OffsetStorageReader; //导入依赖的package包/类
@BeforeEach
public void setup(
    @OracleSettings
        Map<String, String> settings
) {
  this.config = new OracleSourceConnectorConfig(settings);
  this.offsetStorageReader = mock(OffsetStorageReader.class);
  this.tableMetadataProvider = new Oracle12cTableMetadataProvider(this.config, this.offsetStorageReader);
}
 
开发者ID:jcustenborder,项目名称:kafka-connect-cdc-oracle,代码行数:10,代码来源:Oracle12cTableMetadataProviderTest.java

示例8: loadAndGetOffsets

import org.apache.kafka.connect.storage.OffsetStorageReader; //导入依赖的package包/类
private Map<Map<String, String>, Map<String, Object>> loadAndGetOffsets(OffsetStorageReader reader, String jobUrls) {
    String[] jobUrlArray = jobUrls.split(",");

    logger.debug("Total jobs: {}. Loading offsets from Connect.", jobUrlArray.length);
    Collection<Map<String, String>> partitions = new ArrayList<>(jobUrlArray.length);
    for (String jobUrl : jobUrlArray) {
        partitions.add(Collections.singletonMap(JenkinsSourceTask.JOB_NAME, urlDecode(extractJobName(jobUrl))));
    }
    return reader.offsets(partitions);
}
 
开发者ID:yaravind,项目名称:kafka-connect-jenkins,代码行数:11,代码来源:ReadYourWritesOffsetStorageAdapter.java

示例9: initTask

import org.apache.kafka.connect.storage.OffsetStorageReader; //导入依赖的package包/类
@Before
public void initTask() {
    task = new FsSourceTask();
    taskConfig = new HashMap<String, String>() {{
        String uris[] = directories.stream().map(dir -> dir.toString())
                .toArray(size -> new String[size]);
        put(FsSourceTaskConfig.FS_URIS, String.join(",", uris));
        put(FsSourceTaskConfig.TOPIC, "topic_test");
        put(FsSourceTaskConfig.POLICY_CLASS, SimplePolicy.class.getName());
        put(FsSourceTaskConfig.FILE_READER_CLASS, TextFileReader.class.getName());
        put(FsSourceTaskConfig.POLICY_REGEXP, "^[0-9]*\\.txt$");
    }};

    //Mock initialization
    taskContext = PowerMock.createMock(SourceTaskContext.class);
    offsetStorageReader = PowerMock.createMock(OffsetStorageReader.class);

    EasyMock.expect(taskContext.offsetStorageReader())
            .andReturn(offsetStorageReader);

    EasyMock.expect(taskContext.offsetStorageReader())
            .andReturn(offsetStorageReader);

    EasyMock.expect(offsetStorageReader.offset(EasyMock.anyObject()))
            .andReturn(new HashMap<String, Object>() {{
                put("offset", 5L);
            }});
    EasyMock.expect(offsetStorageReader.offset(EasyMock.anyObject()))
            .andReturn(new HashMap<String, Object>() {{
                put("offset", 5L);
            }});

    EasyMock.checkOrder(taskContext, false);
    EasyMock.replay(taskContext);

    EasyMock.checkOrder(offsetStorageReader, false);
    EasyMock.replay(offsetStorageReader);

    task.initialize(taskContext);

}
 
开发者ID:mmolimar,项目名称:kafka-connect-fs,代码行数:42,代码来源:FsSourceTaskTestBase.java

示例10: MsSqlTableMetadataProvider

import org.apache.kafka.connect.storage.OffsetStorageReader; //导入依赖的package包/类
public MsSqlTableMetadataProvider(MsSqlSourceConnectorConfig config, OffsetStorageReader offsetStorageReader) {
  super(config, offsetStorageReader);
}
 
开发者ID:jcustenborder,项目名称:kafka-connect-cdc-mssql,代码行数:4,代码来源:MsSqlTableMetadataProvider.java

示例11: service

import org.apache.kafka.connect.storage.OffsetStorageReader; //导入依赖的package包/类
@Override
protected Service service(ChangeWriter changeWriter, OffsetStorageReader offsetStorageReader) {
  this.changeWriter = changeWriter;
  this.tableMetadataProvider = new MsSqlTableMetadataProvider(this.config, offsetStorageReader);
  return new QueryService(this.time, this.tableMetadataProvider, this.config, this.changeWriter);
}
 
开发者ID:jcustenborder,项目名称:kafka-connect-cdc-mssql,代码行数:7,代码来源:MsSqlSourceTask.java

示例12: before

import org.apache.kafka.connect.storage.OffsetStorageReader; //导入依赖的package包/类
@BeforeEach
public void before(@MsSqlSettings Map<String, String> settings) {
  this.config = new MsSqlSourceConnectorConfig(settings);
  this.offsetStorageReader = mock(OffsetStorageReader.class);
  this.tableMetadataProvider = new MsSqlTableMetadataProvider(this.config, this.offsetStorageReader);
}
 
开发者ID:jcustenborder,项目名称:kafka-connect-cdc-mssql,代码行数:7,代码来源:MsSqlTableMetadataProviderTest.java

示例13: WorkerSourceTaskContext

import org.apache.kafka.connect.storage.OffsetStorageReader; //导入依赖的package包/类
public WorkerSourceTaskContext(OffsetStorageReader reader) {
    this.reader = reader;
}
 
开发者ID:YMCoding,项目名称:kafka-0.11.0.0-src-with-comment,代码行数:4,代码来源:WorkerSourceTaskContext.java

示例14: offsetStorageReader

import org.apache.kafka.connect.storage.OffsetStorageReader; //导入依赖的package包/类
@Override
public OffsetStorageReader offsetStorageReader() {
    return reader;
}
 
开发者ID:YMCoding,项目名称:kafka-0.11.0.0-src-with-comment,代码行数:5,代码来源:WorkerSourceTaskContext.java

示例15: testAddRemoveTask

import org.apache.kafka.connect.storage.OffsetStorageReader; //导入依赖的package包/类
@Test
public void testAddRemoveTask() throws Exception {
    expectConverters();
    expectStartStorage();

    // Create
    TestSourceTask task = PowerMock.createMock(TestSourceTask.class);
    WorkerSourceTask workerTask = PowerMock.createMock(WorkerSourceTask.class);
    EasyMock.expect(workerTask.id()).andStubReturn(TASK_ID);

    EasyMock.expect(plugins.currentThreadLoader()).andReturn(delegatingLoader).times(2);
    PowerMock.expectNew(
            WorkerSourceTask.class, EasyMock.eq(TASK_ID),
            EasyMock.eq(task),
            EasyMock.anyObject(TaskStatus.Listener.class),
            EasyMock.eq(TargetState.STARTED),
            EasyMock.anyObject(JsonConverter.class),
            EasyMock.anyObject(JsonConverter.class),
            EasyMock.eq(TransformationChain.<SourceRecord>noOp()),
            EasyMock.anyObject(KafkaProducer.class),
            EasyMock.anyObject(OffsetStorageReader.class),
            EasyMock.anyObject(OffsetStorageWriter.class),
            EasyMock.eq(config),
            EasyMock.anyObject(ClassLoader.class),
            EasyMock.anyObject(Time.class))
            .andReturn(workerTask);
    Map<String, String> origProps = new HashMap<>();
    origProps.put(TaskConfig.TASK_CLASS_CONFIG, TestSourceTask.class.getName());

    TaskConfig taskConfig = new TaskConfig(origProps);
    // We should expect this call, but the pluginLoader being swapped in is only mocked.
    // EasyMock.expect(pluginLoader.loadClass(TestSourceTask.class.getName()))
    //        .andReturn((Class) TestSourceTask.class);
    EasyMock.expect(plugins.newTask(TestSourceTask.class)).andReturn(task);
    EasyMock.expect(task.version()).andReturn("1.0");

    workerTask.initialize(taskConfig);
    EasyMock.expectLastCall();
    // We should expect this call, but the pluginLoader being swapped in is only mocked.
    // Serializers for the Producer that the task generates. These are loaded while the PluginClassLoader is active
    // and then delegated to the system classloader. This is only called once due to caching
    // EasyMock.expect(pluginLoader.loadClass(ByteArraySerializer.class.getName()))
    //        .andReturn((Class) ByteArraySerializer.class);

    workerTask.run();
    EasyMock.expectLastCall();

    EasyMock.expect(plugins.delegatingLoader()).andReturn(delegatingLoader);
    EasyMock.expect(delegatingLoader.connectorLoader(WorkerTestConnector.class.getName()))
            .andReturn(pluginLoader);

    EasyMock.expect(Plugins.compareAndSwapLoaders(pluginLoader)).andReturn(delegatingLoader)
            .times(2);

    EasyMock.expect(workerTask.loader()).andReturn(pluginLoader);

    EasyMock.expect(Plugins.compareAndSwapLoaders(delegatingLoader)).andReturn(pluginLoader)
            .times(2);

    // Remove
    workerTask.stop();
    EasyMock.expectLastCall();
    EasyMock.expect(workerTask.awaitStop(EasyMock.anyLong())).andStubReturn(true);
    EasyMock.expectLastCall();

    expectStopStorage();

    PowerMock.replayAll();

    worker = new Worker(WORKER_ID, new MockTime(), plugins, config, offsetBackingStore);
    worker.start();
    assertEquals(Collections.emptySet(), worker.taskIds());
    worker.startTask(TASK_ID, anyConnectorConfigMap(), origProps, taskStatusListener, TargetState.STARTED);
    assertEquals(new HashSet<>(Arrays.asList(TASK_ID)), worker.taskIds());
    worker.stopAndAwaitTask(TASK_ID);
    assertEquals(Collections.emptySet(), worker.taskIds());
    // Nothing should be left, so this should effectively be a nop
    worker.stop();

    PowerMock.verifyAll();
}
 
开发者ID:YMCoding,项目名称:kafka-0.11.0.0-src-with-comment,代码行数:82,代码来源:WorkerTest.java


注:本文中的org.apache.kafka.connect.storage.OffsetStorageReader类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。