src_fm_fc_ms_ff
stringlengths
43
86.8k
target
stringlengths
20
276k
StandbyTask extends AbstractTask { Map<TopicPartition, Long> checkpointedOffsets() { return checkpointedOffsets; } StandbyTask(final TaskId id, final String applicationId, final Collection<TopicPartition> partitions, final ProcessorTopology topology, final Consumer<byte[], byte[]> consumer, final ChangelogReader changelogReader, final StreamsConfig config, final StreamsMetrics metrics, final StateDirectory stateDirectory); @Override void resume(); @Override void commit(); @Override void suspend(); @Override void close(final boolean clean); List<ConsumerRecord<byte[], byte[]>> update(final TopicPartition partition, final List<ConsumerRecord<byte[], byte[]>> records); }
@Test public void testStorePartitions() throws Exception { StreamsConfig config = createConfig(baseDir); StandbyTask task = new StandbyTask(taskId, applicationId, topicPartitions, topology, consumer, changelogReader, config, null, stateDirectory); assertEquals(Utils.mkSet(partition2), new HashSet<>(task.checkpointedOffsets().keySet())); }
StandbyTask extends AbstractTask { public List<ConsumerRecord<byte[], byte[]>> update(final TopicPartition partition, final List<ConsumerRecord<byte[], byte[]>> records) { log.debug("{} Updating standby replicas of its state store for partition [{}]", logPrefix, partition); return stateMgr.updateStandbyStates(partition, records); } StandbyTask(final TaskId id, final String applicationId, final Collection<TopicPartition> partitions, final ProcessorTopology topology, final Consumer<byte[], byte[]> consumer, final ChangelogReader changelogReader, final StreamsConfig config, final StreamsMetrics metrics, final StateDirectory stateDirectory); @Override void resume(); @Override void commit(); @Override void suspend(); @Override void close(final boolean clean); List<ConsumerRecord<byte[], byte[]>> update(final TopicPartition partition, final List<ConsumerRecord<byte[], byte[]>> records); }
@SuppressWarnings("unchecked") @Test public void testUpdate() throws Exception { StreamsConfig config = createConfig(baseDir); StandbyTask task = new StandbyTask(taskId, applicationId, topicPartitions, topology, consumer, changelogReader, config, null, stateDirectory); restoreStateConsumer.assign(new ArrayList<>(task.checkpointedOffsets().keySet())); for (ConsumerRecord<Integer, Integer> record : Arrays.asList( new ConsumerRecord<>(partition2.topic(), partition2.partition(), 10, 0L, TimestampType.CREATE_TIME, 0L, 0, 0, 1, 100), new ConsumerRecord<>(partition2.topic(), partition2.partition(), 20, 0L, TimestampType.CREATE_TIME, 0L, 0, 0, 2, 100), new ConsumerRecord<>(partition2.topic(), partition2.partition(), 30, 0L, TimestampType.CREATE_TIME, 0L, 0, 0, 3, 100))) { restoreStateConsumer.bufferRecord(record); } for (Map.Entry<TopicPartition, Long> entry : task.checkpointedOffsets().entrySet()) { TopicPartition partition = entry.getKey(); long offset = entry.getValue(); if (offset >= 0) { restoreStateConsumer.seek(partition, offset); } else { restoreStateConsumer.seekToBeginning(singleton(partition)); } } task.update(partition2, restoreStateConsumer.poll(100).records(partition2)); StandbyContextImpl context = (StandbyContextImpl) task.context(); MockStateStoreSupplier.MockStateStore store1 = (MockStateStoreSupplier.MockStateStore) context.getStateMgr().getStore(storeName1); MockStateStoreSupplier.MockStateStore store2 = (MockStateStoreSupplier.MockStateStore) context.getStateMgr().getStore(storeName2); assertEquals(Collections.emptyList(), store1.keys); assertEquals(Utils.mkList(1, 2, 3), store2.keys); task.closeStateManager(true); File taskDir = stateDirectory.directoryForTask(taskId); OffsetCheckpoint checkpoint = new OffsetCheckpoint(new File(taskDir, ProcessorStateManager.CHECKPOINT_FILE_NAME)); Map<TopicPartition, Long> offsets = checkpoint.read(); assertEquals(1, offsets.size()); assertEquals(new Long(30L + 1L), offsets.get(partition2)); }
StateDirectory { File directoryForTask(final TaskId taskId) { final File taskDir = new File(stateDir, taskId.toString()); if (!taskDir.exists() && !taskDir.mkdir()) { throw new ProcessorStateException(String.format("task directory [%s] doesn't exist and couldn't be created", taskDir.getPath())); } return taskDir; } StateDirectory(final String applicationId, final String stateDirConfig, final Time time); StateDirectory(final String applicationId, final String threadId, final String stateDirConfig, final Time time); void cleanRemovedTasks(final long cleanupDelayMs); }
@Test public void shouldCreateTaskStateDirectory() throws Exception { final TaskId taskId = new TaskId(0, 0); final File taskDirectory = directory.directoryForTask(taskId); assertTrue(taskDirectory.exists()); assertTrue(taskDirectory.isDirectory()); } @Test(expected = ProcessorStateException.class) public void shouldThrowProcessorStateException() throws Exception { final TaskId taskId = new TaskId(0, 0); Utils.delete(stateDir); directory.directoryForTask(taskId); } @Test public void shouldCreateDirectoriesIfParentDoesntExist() throws Exception { final File tempDir = TestUtils.tempDirectory(); final File stateDir = new File(new File(tempDir, "foo"), "state-dir"); final StateDirectory stateDirectory = new StateDirectory(applicationId, stateDir.getPath(), time); final File taskDir = stateDirectory.directoryForTask(new TaskId(0, 0)); assertTrue(stateDir.exists()); assertTrue(taskDir.exists()); }
StateDirectory { boolean lock(final TaskId taskId, int retry) throws IOException { final File lockFile; if (locks.containsKey(taskId)) { log.trace("{} Found cached state dir lock for task {}", logPrefix, taskId); return true; } try { lockFile = new File(directoryForTask(taskId), LOCK_FILE_NAME); } catch (ProcessorStateException e) { return false; } final FileChannel channel; try { channel = getOrCreateFileChannel(taskId, lockFile.toPath()); } catch (NoSuchFileException e) { return false; } final FileLock lock = tryLock(retry, channel); if (lock != null) { locks.put(taskId, lock); log.debug("{} Acquired state dir lock for task {}", logPrefix, taskId); } return lock != null; } StateDirectory(final String applicationId, final String stateDirConfig, final Time time); StateDirectory(final String applicationId, final String threadId, final String stateDirConfig, final Time time); void cleanRemovedTasks(final long cleanupDelayMs); }
@Test public void shouldNotLockDeletedDirectory() throws Exception { final TaskId taskId = new TaskId(0, 0); Utils.delete(stateDir); assertFalse(directory.lock(taskId, 0)); }
StateDirectory { public void cleanRemovedTasks(final long cleanupDelayMs) { final File[] taskDirs = listTaskDirectories(); if (taskDirs == null || taskDirs.length == 0) { return; } for (File taskDir : taskDirs) { final String dirName = taskDir.getName(); TaskId id = TaskId.parse(dirName); if (!locks.containsKey(id)) { try { if (lock(id, 0)) { if (time.milliseconds() > taskDir.lastModified() + cleanupDelayMs) { log.info("{} Deleting obsolete state directory {} for task {} as cleanup delay of {} ms has passed", logPrefix, dirName, id, cleanupDelayMs); Utils.delete(taskDir); } } } catch (OverlappingFileLockException e) { } catch (IOException e) { log.error("{} Failed to lock the state directory due to an unexpected exception", logPrefix, e); } finally { try { unlock(id); } catch (IOException e) { log.error("{} Failed to release the state directory lock", logPrefix); } } } } } StateDirectory(final String applicationId, final String stateDirConfig, final Time time); StateDirectory(final String applicationId, final String threadId, final String stateDirConfig, final Time time); void cleanRemovedTasks(final long cleanupDelayMs); }
@Test public void shouldNotRemoveNonTaskDirectoriesAndFiles() throws Exception { final File otherDir = TestUtils.tempDirectory(stateDir.toPath(), "foo"); directory.cleanRemovedTasks(0); assertTrue(otherDir.exists()); }
StreamThread extends Thread { String threadClientId() { return threadClientId; } StreamThread(final TopologyBuilder builder, final StreamsConfig config, final KafkaClientSupplier clientSupplier, final String applicationId, final String clientId, final UUID processId, final Metrics metrics, final Time time, final StreamsMetadataState streamsMetadataState, final long cacheSizeBytes); @Override void run(); synchronized void close(); synchronized boolean isInitialized(); synchronized boolean stillRunning(); Map<TaskId, StreamTask> tasks(); Set<TaskId> prevActiveTasks(); Set<TaskId> cachedTasks(); void setStateListener(final StreamThread.StateListener listener); synchronized State state(); @Override String toString(); String toString(final String indent); final String applicationId; final String clientId; final UUID processId; }
@Test public void testMetrics() throws Exception { final StreamThread thread = new StreamThread( builder, config, clientSupplier, applicationId, clientId, processId, metrics, mockTime, new StreamsMetadataState(builder, StreamsMetadataState.UNKNOWN_HOST), 0); final String defaultGroupName = "stream-metrics"; final String defaultPrefix = "thread." + thread.threadClientId(); final Map<String, String> defaultTags = Collections.singletonMap("client-id", thread.threadClientId()); assertNotNull(metrics.getSensor(defaultPrefix + ".commit-latency")); assertNotNull(metrics.getSensor(defaultPrefix + ".poll-latency")); assertNotNull(metrics.getSensor(defaultPrefix + ".process-latency")); assertNotNull(metrics.getSensor(defaultPrefix + ".punctuate-latency")); assertNotNull(metrics.getSensor(defaultPrefix + ".task-created")); assertNotNull(metrics.getSensor(defaultPrefix + ".task-closed")); assertNotNull(metrics.getSensor(defaultPrefix + ".skipped-records")); assertNotNull(metrics.metrics().get(metrics.metricName("commit-latency-avg", defaultGroupName, "The average commit time in ms", defaultTags))); assertNotNull(metrics.metrics().get(metrics.metricName("commit-latency-max", defaultGroupName, "The maximum commit time in ms", defaultTags))); assertNotNull(metrics.metrics().get(metrics.metricName("commit-rate", defaultGroupName, "The average per-second number of commit calls", defaultTags))); assertNotNull(metrics.metrics().get(metrics.metricName("poll-latency-avg", defaultGroupName, "The average poll time in ms", defaultTags))); assertNotNull(metrics.metrics().get(metrics.metricName("poll-latency-max", defaultGroupName, "The maximum poll time in ms", defaultTags))); assertNotNull(metrics.metrics().get(metrics.metricName("poll-rate", defaultGroupName, "The average per-second number of record-poll calls", defaultTags))); assertNotNull(metrics.metrics().get(metrics.metricName("process-latency-avg", defaultGroupName, "The average process time in ms", defaultTags))); assertNotNull(metrics.metrics().get(metrics.metricName("process-latency-max", defaultGroupName, "The maximum process time in ms", defaultTags))); assertNotNull(metrics.metrics().get(metrics.metricName("process-rate", defaultGroupName, "The average per-second number of process calls", defaultTags))); assertNotNull(metrics.metrics().get(metrics.metricName("punctuate-latency-avg", defaultGroupName, "The average punctuate time in ms", defaultTags))); assertNotNull(metrics.metrics().get(metrics.metricName("punctuate-latency-max", defaultGroupName, "The maximum punctuate time in ms", defaultTags))); assertNotNull(metrics.metrics().get(metrics.metricName("punctuate-rate", defaultGroupName, "The average per-second number of punctuate calls", defaultTags))); assertNotNull(metrics.metrics().get(metrics.metricName("task-created-rate", defaultGroupName, "The average per-second number of newly created tasks", defaultTags))); assertNotNull(metrics.metrics().get(metrics.metricName("task-closed-rate", defaultGroupName, "The average per-second number of closed tasks", defaultTags))); assertNotNull(metrics.metrics().get(metrics.metricName("skipped-records-rate", defaultGroupName, "The average per-second number of skipped records.", defaultTags))); }
StreamThread extends Thread { protected void maybeClean(final long now) { if (now > lastCleanMs + cleanTimeMs) { stateDirectory.cleanRemovedTasks(cleanTimeMs); lastCleanMs = now; } } StreamThread(final TopologyBuilder builder, final StreamsConfig config, final KafkaClientSupplier clientSupplier, final String applicationId, final String clientId, final UUID processId, final Metrics metrics, final Time time, final StreamsMetadataState streamsMetadataState, final long cacheSizeBytes); @Override void run(); synchronized void close(); synchronized boolean isInitialized(); synchronized boolean stillRunning(); Map<TaskId, StreamTask> tasks(); Set<TaskId> prevActiveTasks(); Set<TaskId> cachedTasks(); void setStateListener(final StreamThread.StateListener listener); synchronized State state(); @Override String toString(); String toString(final String indent); final String applicationId; final String clientId; final UUID processId; }
@Test public void testMaybeClean() throws Exception { final File baseDir = Files.createTempDirectory("test").toFile(); try { final long cleanupDelay = 1000L; final Properties props = configProps(false); props.setProperty(StreamsConfig.STATE_CLEANUP_DELAY_MS_CONFIG, Long.toString(cleanupDelay)); props.setProperty(StreamsConfig.STATE_DIR_CONFIG, baseDir.getCanonicalPath()); final StreamsConfig config = new StreamsConfig(props); final File applicationDir = new File(baseDir, applicationId); applicationDir.mkdir(); final File stateDir1 = new File(applicationDir, task1.toString()); final File stateDir2 = new File(applicationDir, task2.toString()); final File stateDir3 = new File(applicationDir, task3.toString()); final File extraDir = new File(applicationDir, applicationId); stateDir1.mkdir(); stateDir2.mkdir(); stateDir3.mkdir(); extraDir.mkdir(); builder.addSource("source1", "topic1"); final StreamThread thread = new StreamThread( builder, config, clientSupplier, applicationId, clientId, processId, metrics, mockTime, new StreamsMetadataState(builder, StreamsMetadataState.UNKNOWN_HOST), 0) { @Override public void maybeClean(final long now) { super.maybeClean(now); } @Override protected StreamTask createStreamTask(final TaskId id, final Collection<TopicPartition> partitionsForTask) { final ProcessorTopology topology = builder.build(id.topicGroupId); return new TestStreamTask( id, applicationId, partitionsForTask, topology, consumer, clientSupplier.getProducer(new HashMap<String, Object>()), restoreConsumer, config, new MockStreamsMetrics(new Metrics()), stateDirectory); } }; initPartitionGrouper(config, thread, clientSupplier); assertTrue(thread.tasks().isEmpty()); mockTime.sleep(cleanupDelay); assertTrue(stateDir1.exists()); assertTrue(stateDir2.exists()); assertTrue(stateDir3.exists()); assertTrue(extraDir.exists()); List<TopicPartition> revokedPartitions; List<TopicPartition> assignedPartitions; Map<TaskId, StreamTask> prevTasks; final Map<TaskId, Set<TopicPartition>> activeTasks = new HashMap<>(); activeTasks.put(task1, Collections.singleton(t1p1)); activeTasks.put(task2, Collections.singleton(t1p2)); thread.setPartitionAssignor(new MockStreamsPartitionAssignor(activeTasks)); revokedPartitions = Collections.emptyList(); assignedPartitions = Arrays.asList(t1p1, t1p2); prevTasks = new HashMap<>(thread.tasks()); final ConsumerRebalanceListener rebalanceListener = thread.rebalanceListener; rebalanceListener.onPartitionsRevoked(revokedPartitions); rebalanceListener.onPartitionsAssigned(assignedPartitions); assertTrue(prevTasks.isEmpty()); assertEquals(2, thread.tasks().size()); mockTime.sleep(cleanupDelay - 10L); thread.maybeClean(mockTime.milliseconds()); assertTrue(stateDir1.exists()); assertTrue(stateDir2.exists()); assertTrue(stateDir3.exists()); assertTrue(extraDir.exists()); mockTime.sleep(11L); thread.maybeClean(mockTime.milliseconds()); assertTrue(stateDir1.exists()); assertTrue(stateDir2.exists()); assertFalse(stateDir3.exists()); assertTrue(extraDir.exists()); activeTasks.clear(); revokedPartitions = assignedPartitions; assignedPartitions = Collections.emptyList(); prevTasks = new HashMap<>(thread.tasks()); rebalanceListener.onPartitionsRevoked(revokedPartitions); rebalanceListener.onPartitionsAssigned(assignedPartitions); assertEquals(2, prevTasks.size()); for (final StreamTask task : prevTasks.values()) { assertTrue(((TestStreamTask) task).committed); ((TestStreamTask) task).committed = false; } assertTrue(thread.tasks().isEmpty()); mockTime.sleep(cleanupDelay - 10L); thread.maybeClean(mockTime.milliseconds()); assertTrue(stateDir1.exists()); assertTrue(stateDir2.exists()); assertFalse(stateDir3.exists()); assertTrue(extraDir.exists()); mockTime.sleep(11L); thread.maybeClean(mockTime.milliseconds()); assertFalse(stateDir1.exists()); assertFalse(stateDir2.exists()); assertFalse(stateDir3.exists()); assertTrue(extraDir.exists()); } finally { Utils.delete(baseDir); } }
StreamThread extends Thread { protected void maybeCommit(final long now) { if (commitTimeMs >= 0 && lastCommitMs + commitTimeMs < now) { log.debug("{} Committing all active tasks {} and standby tasks {} because the commit interval {}ms has elapsed by {}ms", logPrefix, activeTasks.keySet(), standbyTasks.keySet(), commitTimeMs, now - lastCommitMs); commitAll(); lastCommitMs = now; processStandbyRecords = true; } } StreamThread(final TopologyBuilder builder, final StreamsConfig config, final KafkaClientSupplier clientSupplier, final String applicationId, final String clientId, final UUID processId, final Metrics metrics, final Time time, final StreamsMetadataState streamsMetadataState, final long cacheSizeBytes); @Override void run(); synchronized void close(); synchronized boolean isInitialized(); synchronized boolean stillRunning(); Map<TaskId, StreamTask> tasks(); Set<TaskId> prevActiveTasks(); Set<TaskId> cachedTasks(); void setStateListener(final StreamThread.StateListener listener); synchronized State state(); @Override String toString(); String toString(final String indent); final String applicationId; final String clientId; final UUID processId; }
@Test public void testMaybeCommit() throws Exception { final File baseDir = Files.createTempDirectory("test").toFile(); try { final long commitInterval = 1000L; final Properties props = configProps(false); props.setProperty(StreamsConfig.STATE_DIR_CONFIG, baseDir.getCanonicalPath()); props.setProperty(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, Long.toString(commitInterval)); final StreamsConfig config = new StreamsConfig(props); builder.addSource("source1", "topic1"); final StreamThread thread = new StreamThread( builder, config, clientSupplier, applicationId, clientId, processId, metrics, mockTime, new StreamsMetadataState(builder, StreamsMetadataState.UNKNOWN_HOST), 0) { @Override public void maybeCommit(final long now) { super.maybeCommit(now); } @Override protected StreamTask createStreamTask(final TaskId id, final Collection<TopicPartition> partitionsForTask) { final ProcessorTopology topology = builder.build(id.topicGroupId); return new TestStreamTask( id, applicationId, partitionsForTask, topology, consumer, clientSupplier.getProducer(new HashMap<String, Object>()), restoreConsumer, config, new MockStreamsMetrics(new Metrics()), stateDirectory); } }; initPartitionGrouper(config, thread, clientSupplier); final ConsumerRebalanceListener rebalanceListener = thread.rebalanceListener; final List<TopicPartition> revokedPartitions; final List<TopicPartition> assignedPartitions; revokedPartitions = Collections.emptyList(); assignedPartitions = Arrays.asList(t1p1, t1p2); rebalanceListener.onPartitionsRevoked(revokedPartitions); rebalanceListener.onPartitionsAssigned(assignedPartitions); assertEquals(2, thread.tasks().size()); mockTime.sleep(commitInterval - 10L); thread.maybeCommit(mockTime.milliseconds()); for (final StreamTask task : thread.tasks().values()) { assertFalse(((TestStreamTask) task).committed); } mockTime.sleep(11L); thread.maybeCommit(mockTime.milliseconds()); for (final StreamTask task : thread.tasks().values()) { assertTrue(((TestStreamTask) task).committed); ((TestStreamTask) task).committed = false; } mockTime.sleep(commitInterval - 10L); thread.maybeCommit(mockTime.milliseconds()); for (final StreamTask task : thread.tasks().values()) { assertFalse(((TestStreamTask) task).committed); } mockTime.sleep(11L); thread.maybeCommit(mockTime.milliseconds()); for (final StreamTask task : thread.tasks().values()) { assertTrue(((TestStreamTask) task).committed); ((TestStreamTask) task).committed = false; } } finally { Utils.delete(baseDir); } }
StreamThread extends Thread { void setPartitionAssignor(final StreamPartitionAssignor partitionAssignor) { this.partitionAssignor = partitionAssignor; } StreamThread(final TopologyBuilder builder, final StreamsConfig config, final KafkaClientSupplier clientSupplier, final String applicationId, final String clientId, final UUID processId, final Metrics metrics, final Time time, final StreamsMetadataState streamsMetadataState, final long cacheSizeBytes); @Override void run(); synchronized void close(); synchronized boolean isInitialized(); synchronized boolean stillRunning(); Map<TaskId, StreamTask> tasks(); Set<TaskId> prevActiveTasks(); Set<TaskId> cachedTasks(); void setStateListener(final StreamThread.StateListener listener); synchronized State state(); @Override String toString(); String toString(final String indent); final String applicationId; final String clientId; final UUID processId; }
@Test public void shouldNotNullPointerWhenStandbyTasksAssignedAndNoStateStoresForTopology() throws Exception { builder.addSource("name", "topic").addSink("out", "output"); final StreamThread thread = new StreamThread( builder, config, clientSupplier, applicationId, clientId, processId, metrics, mockTime, new StreamsMetadataState(builder, StreamsMetadataState.UNKNOWN_HOST), 0); thread.setPartitionAssignor(new StreamPartitionAssignor() { @Override Map<TaskId, Set<TopicPartition>> standbyTasks() { return Collections.singletonMap(new TaskId(0, 0), Utils.mkSet(new TopicPartition("topic", 0))); } }); thread.rebalanceListener.onPartitionsRevoked(Collections.<TopicPartition>emptyList()); thread.rebalanceListener.onPartitionsAssigned(Collections.<TopicPartition>emptyList()); } @Test public void shouldInitializeRestoreConsumerWithOffsetsFromStandbyTasks() throws Exception { final KStreamBuilder builder = new KStreamBuilder(); builder.setApplicationId(applicationId); builder.stream("t1").groupByKey().count("count-one"); builder.stream("t2").groupByKey().count("count-two"); final StreamThread thread = new StreamThread( builder, config, clientSupplier, applicationId, clientId, processId, metrics, mockTime, new StreamsMetadataState(builder, StreamsMetadataState.UNKNOWN_HOST), 0); final MockConsumer<byte[], byte[]> restoreConsumer = clientSupplier.restoreConsumer; restoreConsumer.updatePartitions("stream-thread-test-count-one-changelog", Collections.singletonList(new PartitionInfo("stream-thread-test-count-one-changelog", 0, null, new Node[0], new Node[0]))); restoreConsumer.updatePartitions("stream-thread-test-count-two-changelog", Collections.singletonList(new PartitionInfo("stream-thread-test-count-two-changelog", 0, null, new Node[0], new Node[0]))); final Map<TaskId, Set<TopicPartition>> standbyTasks = new HashMap<>(); final TopicPartition t1 = new TopicPartition("t1", 0); standbyTasks.put(new TaskId(0, 0), Utils.mkSet(t1)); thread.setPartitionAssignor(new StreamPartitionAssignor() { @Override Map<TaskId, Set<TopicPartition>> standbyTasks() { return standbyTasks; } }); thread.rebalanceListener.onPartitionsRevoked(Collections.<TopicPartition>emptyList()); thread.rebalanceListener.onPartitionsAssigned(Collections.<TopicPartition>emptyList()); assertThat(restoreConsumer.assignment(), equalTo(Utils.mkSet(new TopicPartition("stream-thread-test-count-one-changelog", 0)))); standbyTasks.put(new TaskId(1, 0), Utils.mkSet(new TopicPartition("t2", 0))); thread.rebalanceListener.onPartitionsRevoked(Collections.<TopicPartition>emptyList()); thread.rebalanceListener.onPartitionsAssigned(Collections.<TopicPartition>emptyList()); assertThat(restoreConsumer.assignment(), equalTo(Utils.mkSet(new TopicPartition("stream-thread-test-count-one-changelog", 0), new TopicPartition("stream-thread-test-count-two-changelog", 0)))); } @Test public void shouldCloseSuspendedTasksThatAreNoLongerAssignedToThisStreamThreadBeforeCreatingNewTasks() throws Exception { final KStreamBuilder builder = new KStreamBuilder(); builder.setApplicationId(applicationId); builder.stream("t1").groupByKey().count("count-one"); builder.stream("t2").groupByKey().count("count-two"); final StreamThread thread = new StreamThread( builder, config, clientSupplier, applicationId, clientId, processId, metrics, mockTime, new StreamsMetadataState(builder, StreamsMetadataState.UNKNOWN_HOST), 0); final MockConsumer<byte[], byte[]> restoreConsumer = clientSupplier.restoreConsumer; restoreConsumer.updatePartitions("stream-thread-test-count-one-changelog", Collections.singletonList(new PartitionInfo("stream-thread-test-count-one-changelog", 0, null, new Node[0], new Node[0]))); restoreConsumer.updatePartitions("stream-thread-test-count-two-changelog", Collections.singletonList(new PartitionInfo("stream-thread-test-count-two-changelog", 0, null, new Node[0], new Node[0]))); final HashMap<TopicPartition, Long> offsets = new HashMap<>(); offsets.put(new TopicPartition("stream-thread-test-count-one-changelog", 0), 0L); offsets.put(new TopicPartition("stream-thread-test-count-two-changelog", 0), 0L); restoreConsumer.updateEndOffsets(offsets); restoreConsumer.updateBeginningOffsets(offsets); final Map<TaskId, Set<TopicPartition>> standbyTasks = new HashMap<>(); final TopicPartition t1 = new TopicPartition("t1", 0); standbyTasks.put(new TaskId(0, 0), Utils.mkSet(t1)); final Map<TaskId, Set<TopicPartition>> activeTasks = new HashMap<>(); final TopicPartition t2 = new TopicPartition("t2", 0); activeTasks.put(new TaskId(1, 0), Utils.mkSet(t2)); thread.setPartitionAssignor(new StreamPartitionAssignor() { @Override Map<TaskId, Set<TopicPartition>> standbyTasks() { return standbyTasks; } @Override Map<TaskId, Set<TopicPartition>> activeTasks() { return activeTasks; } }); thread.rebalanceListener.onPartitionsRevoked(Collections.<TopicPartition>emptyList()); thread.rebalanceListener.onPartitionsAssigned(Utils.mkSet(t2)); standbyTasks.clear(); activeTasks.clear(); standbyTasks.put(new TaskId(1, 0), Utils.mkSet(t2)); activeTasks.put(new TaskId(0, 0), Utils.mkSet(t1)); thread.rebalanceListener.onPartitionsRevoked(Collections.<TopicPartition>emptyList()); thread.rebalanceListener.onPartitionsAssigned(Utils.mkSet(t1)); } @Test public void shouldCloseActiveTasksThatAreAssignedToThisStreamThreadButAssignmentHasChangedBeforeCreatingNewTasks() throws Exception { final KStreamBuilder builder = new KStreamBuilder(); builder.setApplicationId(applicationId); builder.stream(Pattern.compile("t.*")).to("out"); final Map<Collection<TopicPartition>, TestStreamTask> createdTasks = new HashMap<>(); final StreamThread thread = new StreamThread( builder, config, clientSupplier, applicationId, clientId, processId, metrics, mockTime, new StreamsMetadataState(builder, StreamsMetadataState.UNKNOWN_HOST), 0) { @Override protected StreamTask createStreamTask(final TaskId id, final Collection<TopicPartition> partitions) { final ProcessorTopology topology = builder.build(id.topicGroupId); final TestStreamTask task = new TestStreamTask( id, applicationId, partitions, topology, consumer, clientSupplier.getProducer(new HashMap<String, Object>()), restoreConsumer, config, new MockStreamsMetrics(new Metrics()), stateDirectory); createdTasks.put(partitions, task); return task; } }; final Map<TaskId, Set<TopicPartition>> activeTasks = new HashMap<>(); final TopicPartition t1 = new TopicPartition("t1", 0); final Set<TopicPartition> task00Partitions = new HashSet<>(); task00Partitions.add(t1); final TaskId taskId = new TaskId(0, 0); activeTasks.put(taskId, task00Partitions); thread.setPartitionAssignor(new StreamPartitionAssignor() { @Override Map<TaskId, Set<TopicPartition>> activeTasks() { return activeTasks; } }); StreamPartitionAssignor.SubscriptionUpdates subscriptionUpdates = new StreamPartitionAssignor.SubscriptionUpdates(); Field updatedTopicsField = subscriptionUpdates.getClass().getDeclaredField("updatedTopicSubscriptions"); updatedTopicsField.setAccessible(true); Set<String> updatedTopics = (Set<String>) updatedTopicsField.get(subscriptionUpdates); updatedTopics.add(t1.topic()); builder.updateSubscriptions(subscriptionUpdates, null); thread.rebalanceListener.onPartitionsRevoked(Collections.<TopicPartition>emptyList()); thread.rebalanceListener.onPartitionsAssigned(task00Partitions); final TestStreamTask firstTask = createdTasks.get(task00Partitions); assertThat(firstTask.id(), is(taskId)); task00Partitions.add(new TopicPartition("t2", 0)); updatedTopics.add("t2"); thread.rebalanceListener.onPartitionsRevoked(Collections.<TopicPartition>emptyList()); thread.rebalanceListener.onPartitionsAssigned(task00Partitions); assertTrue("task should have been closed as assignment has changed", firstTask.closed); assertTrue("tasks state manager should have been closed as assignment has changed", firstTask.closedStateManager); assertThat(createdTasks.get(task00Partitions).id(), is(taskId)); } @Test public void shouldNotViolateAtLeastOnceWhenExceptionOccursDuringTaskSuspension() throws Exception { final KStreamBuilder builder = new KStreamBuilder(); builder.setApplicationId(applicationId); builder.stream("t1").groupByKey(); final TestStreamTask testStreamTask = new TestStreamTask( new TaskId(0, 0), applicationId, Utils.mkSet(new TopicPartition("t1", 0)), builder.build(0), clientSupplier.consumer, clientSupplier.getProducer(new HashMap<String, Object>()), clientSupplier.restoreConsumer, config, new MockStreamsMetrics(new Metrics()), new StateDirectory(applicationId, config.getString(StreamsConfig.STATE_DIR_CONFIG), mockTime)) { @Override public void suspend() { throw new RuntimeException("KABOOM!"); } }; final StreamThread thread = new StreamThread( builder, config, clientSupplier, applicationId, clientId, processId, metrics, mockTime, new StreamsMetadataState(builder, StreamsMetadataState.UNKNOWN_HOST), 0) { @Override protected StreamTask createStreamTask(final TaskId id, final Collection<TopicPartition> partitions) { return testStreamTask; } }; final Map<TaskId, Set<TopicPartition>> activeTasks = new HashMap<>(); activeTasks.put(testStreamTask.id(), testStreamTask.partitions); thread.setPartitionAssignor(new MockStreamsPartitionAssignor(activeTasks)); thread.rebalanceListener.onPartitionsRevoked(Collections.<TopicPartition>emptyList()); thread.rebalanceListener.onPartitionsAssigned(testStreamTask.partitions); try { thread.rebalanceListener.onPartitionsRevoked(Collections.<TopicPartition>emptyList()); fail("should have thrown exception"); } catch (final Exception e) { } assertFalse(testStreamTask.committed); } @Test public void shouldNotViolateAtLeastOnceWhenExceptionOccursDuringFlushStateWhileSuspendingState() throws Exception { final KStreamBuilder builder = new KStreamBuilder(); builder.setApplicationId(applicationId); builder.stream("t1").groupByKey(); final TestStreamTask testStreamTask = new TestStreamTask( new TaskId(0, 0), applicationId, Utils.mkSet(new TopicPartition("t1", 0)), builder.build(0), clientSupplier.consumer, clientSupplier.getProducer(new HashMap<String, Object>()), clientSupplier.restoreConsumer, config, new MockStreamsMetrics(new Metrics()), new StateDirectory(applicationId, config.getString(StreamsConfig.STATE_DIR_CONFIG), mockTime)) { @Override protected void flushState() { throw new RuntimeException("KABOOM!"); } }; final StreamThread thread = new StreamThread( builder, config, clientSupplier, applicationId, clientId, processId, metrics, mockTime, new StreamsMetadataState(builder, StreamsMetadataState.UNKNOWN_HOST), 0) { @Override protected StreamTask createStreamTask(final TaskId id, final Collection<TopicPartition> partitions) { return testStreamTask; } }; final Map<TaskId, Set<TopicPartition>> activeTasks = new HashMap<>(); activeTasks.put(testStreamTask.id(), testStreamTask.partitions); thread.setPartitionAssignor(new MockStreamsPartitionAssignor(activeTasks)); thread.rebalanceListener.onPartitionsRevoked(Collections.<TopicPartition>emptyList()); thread.rebalanceListener.onPartitionsAssigned(testStreamTask.partitions); try { thread.rebalanceListener.onPartitionsRevoked(Collections.<TopicPartition>emptyList()); fail("should have thrown exception"); } catch (final Exception e) { } assertFalse(testStreamTask.committed); } @Test @SuppressWarnings("unchecked") public void shouldAlwaysUpdateWithLatestTopicsFromStreamPartitionAssignor() throws Exception { final TopologyBuilder topologyBuilder = new TopologyBuilder(); topologyBuilder.addSource("source", Pattern.compile("t.*")); topologyBuilder.addProcessor("processor", new MockProcessorSupplier(), "source"); final StreamThread thread = new StreamThread( topologyBuilder, config, clientSupplier, applicationId, clientId, processId, metrics, mockTime, new StreamsMetadataState(topologyBuilder, StreamsMetadataState.UNKNOWN_HOST), 0); final StreamPartitionAssignor partitionAssignor = new StreamPartitionAssignor(); final Map<String, Object> configurationMap = new HashMap<>(); configurationMap.put(StreamsConfig.InternalConfig.STREAM_THREAD_INSTANCE, thread); configurationMap.put(StreamsConfig.NUM_STANDBY_REPLICAS_CONFIG, 0); partitionAssignor.configure(configurationMap); thread.setPartitionAssignor(partitionAssignor); final Field nodeToSourceTopicsField = topologyBuilder.getClass().getDeclaredField("nodeToSourceTopics"); nodeToSourceTopicsField.setAccessible(true); final Map<String, List<String>> nodeToSourceTopics = (Map<String, List<String>>) nodeToSourceTopicsField.get(topologyBuilder); final List<TopicPartition> topicPartitions = new ArrayList<>(); final TopicPartition topicPartition1 = new TopicPartition("topic-1", 0); final TopicPartition topicPartition2 = new TopicPartition("topic-2", 0); final TopicPartition topicPartition3 = new TopicPartition("topic-3", 0); final TaskId taskId1 = new TaskId(0, 0); final TaskId taskId2 = new TaskId(0, 0); final TaskId taskId3 = new TaskId(0, 0); List<TaskId> activeTasks = Arrays.asList(taskId1); final Map<TaskId, Set<TopicPartition>> standbyTasks = new HashMap<>(); AssignmentInfo info = new AssignmentInfo(activeTasks, standbyTasks, new HashMap<HostInfo, Set<TopicPartition>>()); topicPartitions.addAll(Arrays.asList(topicPartition1)); PartitionAssignor.Assignment assignment = new PartitionAssignor.Assignment(topicPartitions, info.encode()); partitionAssignor.onAssignment(assignment); assertTrue(nodeToSourceTopics.get("source").size() == 1); assertTrue(nodeToSourceTopics.get("source").contains("topic-1")); topicPartitions.clear(); activeTasks = Arrays.asList(taskId1, taskId2); info = new AssignmentInfo(activeTasks, standbyTasks, new HashMap<HostInfo, Set<TopicPartition>>()); topicPartitions.addAll(Arrays.asList(topicPartition1, topicPartition2)); assignment = new PartitionAssignor.Assignment(topicPartitions, info.encode()); partitionAssignor.onAssignment(assignment); assertTrue(nodeToSourceTopics.get("source").size() == 2); assertTrue(nodeToSourceTopics.get("source").contains("topic-1")); assertTrue(nodeToSourceTopics.get("source").contains("topic-2")); topicPartitions.clear(); activeTasks = Arrays.asList(taskId1, taskId2, taskId3); info = new AssignmentInfo(activeTasks, standbyTasks, new HashMap<HostInfo, Set<TopicPartition>>()); topicPartitions.addAll(Arrays.asList(topicPartition1, topicPartition2, topicPartition3)); assignment = new PartitionAssignor.Assignment(topicPartitions, info.encode()); partitionAssignor.onAssignment(assignment); assertTrue(nodeToSourceTopics.get("source").size() == 3); assertTrue(nodeToSourceTopics.get("source").contains("topic-1")); assertTrue(nodeToSourceTopics.get("source").contains("topic-2")); assertTrue(nodeToSourceTopics.get("source").contains("topic-3")); }
StreamsMetricsImpl implements StreamsMetrics { @Override public void removeSensor(Sensor sensor) { Sensor parent = null; Objects.requireNonNull(sensor, "Sensor is null"); metrics.removeSensor(sensor.name()); parent = parentSensors.get(sensor); if (parent != null) { metrics.removeSensor(parent.name()); } } StreamsMetricsImpl(Metrics metrics, String groupName, Map<String, String> tags); Metrics registry(); @Override Sensor addSensor(String name, Sensor.RecordingLevel recordingLevel); @Override Sensor addSensor(String name, Sensor.RecordingLevel recordingLevel, Sensor... parents); @Override Map<MetricName, ? extends Metric> metrics(); @Override void recordLatency(Sensor sensor, long startNs, long endNs); @Override void recordThroughput(Sensor sensor, long value); @Override Sensor addLatencyAndThroughputSensor(String scopeName, String entityName, String operationName, Sensor.RecordingLevel recordingLevel, String... tags); @Override Sensor addThroughputSensor(String scopeName, String entityName, String operationName, Sensor.RecordingLevel recordingLevel, String... tags); void measureLatencyNs(final Time time, final Runnable action, final Sensor sensor); @Override void removeSensor(Sensor sensor); }
@Test(expected = NullPointerException.class) public void testRemoveNullSensor() { String groupName = "doesNotMatter"; Map<String, String> tags = new HashMap<>(); StreamsMetricsImpl streamsMetrics = new StreamsMetricsImpl(new Metrics(), groupName, tags); streamsMetrics.removeSensor(null); } @Test public void testRemoveSensor() { String groupName = "doesNotMatter"; String sensorName = "sensor1"; String scope = "scope"; String entity = "entity"; String operation = "put"; Map<String, String> tags = new HashMap<>(); StreamsMetricsImpl streamsMetrics = new StreamsMetricsImpl(new Metrics(), groupName, tags); Sensor sensor1 = streamsMetrics.addSensor(sensorName, Sensor.RecordingLevel.DEBUG); streamsMetrics.removeSensor(sensor1); Sensor sensor1a = streamsMetrics.addSensor(sensorName, Sensor.RecordingLevel.DEBUG, sensor1); streamsMetrics.removeSensor(sensor1a); Sensor sensor2 = streamsMetrics.addLatencyAndThroughputSensor(scope, entity, operation, Sensor.RecordingLevel.DEBUG); streamsMetrics.removeSensor(sensor2); Sensor sensor3 = streamsMetrics.addThroughputSensor(scope, entity, operation, Sensor.RecordingLevel.DEBUG); streamsMetrics.removeSensor(sensor3); }
GlobalStateManagerImpl implements GlobalStateManager { @Override public Set<String> initialize(final InternalProcessorContext processorContext) { try { if (!stateDirectory.lockGlobalState(MAX_LOCK_ATTEMPTS)) { throw new LockException(String.format("Failed to lock the global state directory: %s", baseDir)); } } catch (IOException e) { throw new LockException(String.format("Failed to lock the global state directory: %s", baseDir)); } try { this.checkpointableOffsets.putAll(checkpoint.read()); } catch (IOException e) { try { stateDirectory.unlockGlobalState(); } catch (IOException e1) { log.error("failed to unlock the global state directory", e); } throw new StreamsException("Failed to read checkpoints for global state stores", e); } final List<StateStore> stateStores = topology.globalStateStores(); for (final StateStore stateStore : stateStores) { globalStoreNames.add(stateStore.name()); stateStore.init(processorContext, stateStore); } return Collections.unmodifiableSet(globalStoreNames); } GlobalStateManagerImpl(final ProcessorTopology topology, final Consumer<byte[], byte[]> consumer, final StateDirectory stateDirectory); @Override Set<String> initialize(final InternalProcessorContext processorContext); @Override StateStore getGlobalStore(final String name); @Override StateStore getStore(final String name); File baseDir(); void register(final StateStore store, final boolean ignored, final StateRestoreCallback stateRestoreCallback); @Override void flush(); @Override void close(final Map<TopicPartition, Long> offsets); @Override void checkpoint(final Map<TopicPartition, Long> offsets); @Override Map<TopicPartition, Long> checkpointed(); }
@Test public void shouldLockGlobalStateDirectory() throws Exception { stateManager.initialize(context); assertTrue(new File(stateDirectory.globalStateDir(), ".lock").exists()); } @Test(expected = LockException.class) public void shouldThrowLockExceptionIfCantGetLock() throws Exception { final StateDirectory stateDir = new StateDirectory("appId", stateDirPath, time); try { stateDir.lockGlobalState(1); stateManager.initialize(context); } finally { stateDir.unlockGlobalState(); } } @Test public void shouldNotDeleteCheckpointFileAfterLoaded() throws Exception { writeCheckpoint(); stateManager.initialize(context); assertTrue(checkpointFile.exists()); } @Test(expected = StreamsException.class) public void shouldThrowStreamsExceptionIfFailedToReadCheckpointedOffsets() throws Exception { writeCorruptCheckpoint(); stateManager.initialize(context); } @Test public void shouldInitializeStateStores() throws Exception { stateManager.initialize(context); assertTrue(store1.initialized); assertTrue(store2.initialized); } @Test public void shouldReturnInitializedStoreNames() throws Exception { final Set<String> storeNames = stateManager.initialize(context); assertEquals(Utils.mkSet(store1.name(), store2.name()), storeNames); } @Test public void shouldReleaseLockIfExceptionWhenLoadingCheckpoints() throws Exception { writeCorruptCheckpoint(); try { stateManager.initialize(context); } catch (StreamsException e) { } final StateDirectory stateDir = new StateDirectory("appId", stateDirPath, new MockTime()); try { assertTrue(stateDir.lockGlobalState(1)); } finally { stateDir.unlockGlobalState(); } } @Test public void shouldThrowLockExceptionIfIOExceptionCaughtWhenTryingToLockStateDir() throws Exception { stateManager = new GlobalStateManagerImpl(topology, consumer, new StateDirectory("appId", stateDirPath, time) { @Override public boolean lockGlobalState(final int retry) throws IOException { throw new IOException("KABOOM!"); } }); try { stateManager.initialize(context); fail("Should have thrown LockException"); } catch (final LockException e) { } }
StreamTask extends AbstractTask implements Punctuator { boolean maybePunctuate() { final long timestamp = partitionGroup.timestamp(); if (timestamp == TimestampTracker.NOT_KNOWN) { return false; } else { return punctuationQueue.mayPunctuate(timestamp, this); } } StreamTask(final TaskId id, final String applicationId, final Collection<TopicPartition> partitions, final ProcessorTopology topology, final Consumer<byte[], byte[]> consumer, final ChangelogReader changelogReader, final StreamsConfig config, final StreamsMetrics metrics, final StateDirectory stateDirectory, final ThreadCache cache, final Time time, final Producer<byte[], byte[]> producer); @Override void resume(); @SuppressWarnings("unchecked") boolean process(); @Override void punctuate(final ProcessorNode node, final long timestamp); @Override void commit(); @Override void suspend(); @Override void close(boolean clean); @SuppressWarnings("unchecked") int addRecords(final TopicPartition partition, final Iterable<ConsumerRecord<byte[], byte[]>> records); void schedule(final long interval); }
@SuppressWarnings("unchecked") @Test public void testMaybePunctuate() throws Exception { task.addRecords(partition1, records( new ConsumerRecord<>(partition1.topic(), partition1.partition(), 20, 0L, TimestampType.CREATE_TIME, 0L, 0, 0, recordKey, recordValue), new ConsumerRecord<>(partition1.topic(), partition1.partition(), 30, 0L, TimestampType.CREATE_TIME, 0L, 0, 0, recordKey, recordValue), new ConsumerRecord<>(partition1.topic(), partition1.partition(), 40, 0L, TimestampType.CREATE_TIME, 0L, 0, 0, recordKey, recordValue) )); task.addRecords(partition2, records( new ConsumerRecord<>(partition2.topic(), partition2.partition(), 25, 0L, TimestampType.CREATE_TIME, 0L, 0, 0, recordKey, recordValue), new ConsumerRecord<>(partition2.topic(), partition2.partition(), 35, 0L, TimestampType.CREATE_TIME, 0L, 0, 0, recordKey, recordValue), new ConsumerRecord<>(partition2.topic(), partition2.partition(), 45, 0L, TimestampType.CREATE_TIME, 0L, 0, 0, recordKey, recordValue) )); assertTrue(task.maybePunctuate()); assertTrue(task.process()); assertEquals(5, task.numBuffered()); assertEquals(1, source1.numReceived); assertEquals(0, source2.numReceived); assertFalse(task.maybePunctuate()); assertTrue(task.process()); assertEquals(4, task.numBuffered()); assertEquals(1, source1.numReceived); assertEquals(1, source2.numReceived); assertTrue(task.maybePunctuate()); assertTrue(task.process()); assertEquals(3, task.numBuffered()); assertEquals(2, source1.numReceived); assertEquals(1, source2.numReceived); assertFalse(task.maybePunctuate()); assertTrue(task.process()); assertEquals(2, task.numBuffered()); assertEquals(2, source1.numReceived); assertEquals(2, source2.numReceived); assertTrue(task.maybePunctuate()); assertTrue(task.process()); assertEquals(1, task.numBuffered()); assertEquals(3, source1.numReceived); assertEquals(2, source2.numReceived); assertFalse(task.maybePunctuate()); assertTrue(task.process()); assertEquals(0, task.numBuffered()); assertEquals(3, source1.numReceived); assertEquals(3, source2.numReceived); assertFalse(task.process()); assertFalse(task.maybePunctuate()); processor.supplier.checkAndClearPunctuateResult(20L, 30L, 40L); }
StreamTask extends AbstractTask implements Punctuator { @Override protected void flushState() { log.trace("{} Flushing state and producer", logPrefix); super.flushState(); recordCollector.flush(); } StreamTask(final TaskId id, final String applicationId, final Collection<TopicPartition> partitions, final ProcessorTopology topology, final Consumer<byte[], byte[]> consumer, final ChangelogReader changelogReader, final StreamsConfig config, final StreamsMetrics metrics, final StateDirectory stateDirectory, final ThreadCache cache, final Time time, final Producer<byte[], byte[]> producer); @Override void resume(); @SuppressWarnings("unchecked") boolean process(); @Override void punctuate(final ProcessorNode node, final long timestamp); @Override void commit(); @Override void suspend(); @Override void close(boolean clean); @SuppressWarnings("unchecked") int addRecords(final TopicPartition partition, final Iterable<ConsumerRecord<byte[], byte[]>> records); void schedule(final long interval); }
@Test public void shouldFlushRecordCollectorOnFlushState() throws Exception { final AtomicBoolean flushed = new AtomicBoolean(false); final StreamsMetrics streamsMetrics = new MockStreamsMetrics(new Metrics()); final StreamTask streamTask = new StreamTask(taskId00, "appId", partitions, topology, consumer, changelogReader, config, streamsMetrics, stateDirectory, null, time, producer) { @Override RecordCollector createRecordCollector() { return new NoOpRecordCollector() { @Override public void flush() { flushed.set(true); } }; } }; streamTask.flushState(); assertTrue(flushed.get()); }
StreamTask extends AbstractTask implements Punctuator { @Override public void commit() { commitImpl(true); } StreamTask(final TaskId id, final String applicationId, final Collection<TopicPartition> partitions, final ProcessorTopology topology, final Consumer<byte[], byte[]> consumer, final ChangelogReader changelogReader, final StreamsConfig config, final StreamsMetrics metrics, final StateDirectory stateDirectory, final ThreadCache cache, final Time time, final Producer<byte[], byte[]> producer); @Override void resume(); @SuppressWarnings("unchecked") boolean process(); @Override void punctuate(final ProcessorNode node, final long timestamp); @Override void commit(); @Override void suspend(); @Override void close(boolean clean); @SuppressWarnings("unchecked") int addRecords(final TopicPartition partition, final Iterable<ConsumerRecord<byte[], byte[]>> records); void schedule(final long interval); }
@SuppressWarnings("unchecked") @Test public void shouldCheckpointOffsetsOnCommit() throws Exception { final String storeName = "test"; final String changelogTopic = ProcessorStateManager.storeChangelogTopic("appId", storeName); final InMemoryKeyValueStore inMemoryStore = new InMemoryKeyValueStore(storeName, null, null) { @Override public void init(final ProcessorContext context, final StateStore root) { context.register(root, true, null); } @Override public boolean persistent() { return true; } }; Map<String, SourceNode> sourceByTopics = new HashMap() { { put(partition1.topic(), source1); put(partition2.topic(), source2); } }; final ProcessorTopology topology = new ProcessorTopology(Collections.<ProcessorNode>emptyList(), sourceByTopics, Collections.<String, SinkNode>emptyMap(), Collections.<StateStore>singletonList(inMemoryStore), Collections.singletonMap(storeName, changelogTopic), Collections.<StateStore>emptyList()); final TopicPartition partition = new TopicPartition(changelogTopic, 0); restoreStateConsumer.updatePartitions(changelogTopic, Collections.singletonList( new PartitionInfo(changelogTopic, 0, null, new Node[0], new Node[0]))); restoreStateConsumer.updateEndOffsets(Collections.singletonMap(partition, 0L)); restoreStateConsumer.updateBeginningOffsets(Collections.singletonMap(partition, 0L)); final long offset = 543L; final StreamTask streamTask = new StreamTask(taskId00, "appId", partitions, topology, consumer, changelogReader, config, streamsMetrics, stateDirectory, null, time, producer) { @Override RecordCollector createRecordCollector() { return new NoOpRecordCollector() { @Override public Map<TopicPartition, Long> offsets() { return Collections.singletonMap(partition, offset); } }; } }; time.sleep(config.getLong(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG)); streamTask.commit(); final OffsetCheckpoint checkpoint = new OffsetCheckpoint(new File(stateDirectory.directoryForTask(taskId00), ProcessorStateManager.CHECKPOINT_FILE_NAME)); assertThat(checkpoint.read(), equalTo(Collections.singletonMap(partition, offset + 1))); } @SuppressWarnings("unchecked") @Test public void shouldNotCheckpointOffsetsOnCommitIfEosIsEnabled() throws Exception { final Map<String, Object> properties = config.originals(); properties.put(StreamsConfig.PROCESSING_GUARANTEE_CONFIG, StreamsConfig.EXACTLY_ONCE); final StreamsConfig testConfig = new StreamsConfig(properties); final String storeName = "test"; final String changelogTopic = ProcessorStateManager.storeChangelogTopic("appId", storeName); final InMemoryKeyValueStore inMemoryStore = new InMemoryKeyValueStore(storeName, null, null) { @Override public void init(final ProcessorContext context, final StateStore root) { context.register(root, true, null); } @Override public boolean persistent() { return true; } }; Map<String, SourceNode> sourceByTopics = new HashMap() { { put(partition1.topic(), source1); put(partition2.topic(), source2); } }; final ProcessorTopology topology = new ProcessorTopology(Collections.<ProcessorNode>emptyList(), sourceByTopics, Collections.<String, SinkNode>emptyMap(), Collections.<StateStore>singletonList(inMemoryStore), Collections.singletonMap(storeName, changelogTopic), Collections.<StateStore>emptyList()); final TopicPartition partition = new TopicPartition(changelogTopic, 0); restoreStateConsumer.updatePartitions(changelogTopic, Collections.singletonList( new PartitionInfo(changelogTopic, 0, null, new Node[0], new Node[0]))); restoreStateConsumer.updateEndOffsets(Collections.singletonMap(partition, 0L)); restoreStateConsumer.updateBeginningOffsets(Collections.singletonMap(partition, 0L)); final long offset = 543L; final StreamTask streamTask = new StreamTask(taskId00, "appId", partitions, topology, consumer, changelogReader, testConfig, streamsMetrics, stateDirectory, null, time, producer) { @Override RecordCollector createRecordCollector() { return new NoOpRecordCollector() { @Override public Map<TopicPartition, Long> offsets() { return Collections.singletonMap(partition, offset); } }; } }; time.sleep(testConfig.getLong(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG)); streamTask.commit(); final File checkpointFile = new File(stateDirectory.directoryForTask(taskId00), ProcessorStateManager.CHECKPOINT_FILE_NAME); assertFalse(checkpointFile.exists()); }
ConnectorsResource { @GET @Path("/") public Collection<String> listConnectors(final @QueryParam("forward") Boolean forward) throws Throwable { FutureCallback<Collection<String>> cb = new FutureCallback<>(); herder.connectors(cb); return completeOrForwardRequest(cb, "/connectors", "GET", null, new TypeReference<Collection<String>>() { }, forward); } ConnectorsResource(Herder herder); @GET @Path("/") Collection<String> listConnectors(final @QueryParam("forward") Boolean forward); @POST @Path("/") Response createConnector(final @QueryParam("forward") Boolean forward, final CreateConnectorRequest createRequest); @GET @Path("/{connector}") ConnectorInfo getConnector(final @PathParam("connector") String connector, final @QueryParam("forward") Boolean forward); @GET @Path("/{connector}/config") Map<String, String> getConnectorConfig(final @PathParam("connector") String connector, final @QueryParam("forward") Boolean forward); @GET @Path("/{connector}/status") ConnectorStateInfo getConnectorStatus(final @PathParam("connector") String connector); @PUT @Path("/{connector}/config") Response putConnectorConfig(final @PathParam("connector") String connector, final @QueryParam("forward") Boolean forward, final Map<String, String> connectorConfig); @POST @Path("/{connector}/restart") void restartConnector(final @PathParam("connector") String connector, final @QueryParam("forward") Boolean forward); @PUT @Path("/{connector}/pause") Response pauseConnector(@PathParam("connector") String connector); @PUT @Path("/{connector}/resume") Response resumeConnector(@PathParam("connector") String connector); @GET @Path("/{connector}/tasks") List<TaskInfo> getTaskConfigs(final @PathParam("connector") String connector, final @QueryParam("forward") Boolean forward); @POST @Path("/{connector}/tasks") void putTaskConfigs(final @PathParam("connector") String connector, final @QueryParam("forward") Boolean forward, final List<Map<String, String>> taskConfigs); @GET @Path("/{connector}/tasks/{task}/status") ConnectorStateInfo.TaskState getTaskStatus(final @PathParam("connector") String connector, final @PathParam("task") Integer task); @POST @Path("/{connector}/tasks/{task}/restart") void restartTask(final @PathParam("connector") String connector, final @PathParam("task") Integer task, final @QueryParam("forward") Boolean forward); @DELETE @Path("/{connector}") void destroyConnector(final @PathParam("connector") String connector, final @QueryParam("forward") Boolean forward); }
@Test public void testListConnectors() throws Throwable { final Capture<Callback<Collection<String>>> cb = Capture.newInstance(); herder.connectors(EasyMock.capture(cb)); expectAndCallbackResult(cb, Arrays.asList(CONNECTOR2_NAME, CONNECTOR_NAME)); PowerMock.replayAll(); Collection<String> connectors = connectorsResource.listConnectors(FORWARD); assertEquals(new HashSet<>(Arrays.asList(CONNECTOR_NAME, CONNECTOR2_NAME)), new HashSet<>(connectors)); PowerMock.verifyAll(); } @Test public void testListConnectorsNotLeader() throws Throwable { final Capture<Callback<Collection<String>>> cb = Capture.newInstance(); herder.connectors(EasyMock.capture(cb)); expectAndCallbackNotLeaderException(cb); EasyMock.expect(RestServer.httpRequest(EasyMock.eq("http: EasyMock.isNull(), EasyMock.anyObject(TypeReference.class))) .andReturn(new RestServer.HttpResponse<>(200, new HashMap<String, List<String>>(), Arrays.asList(CONNECTOR2_NAME, CONNECTOR_NAME))); PowerMock.replayAll(); Collection<String> connectors = connectorsResource.listConnectors(FORWARD); assertEquals(new HashSet<>(Arrays.asList(CONNECTOR_NAME, CONNECTOR2_NAME)), new HashSet<>(connectors)); PowerMock.verifyAll(); } @Test(expected = ConnectException.class) public void testListConnectorsNotSynced() throws Throwable { final Capture<Callback<Collection<String>>> cb = Capture.newInstance(); herder.connectors(EasyMock.capture(cb)); expectAndCallbackException(cb, new ConnectException("not synced")); PowerMock.replayAll(); connectorsResource.listConnectors(FORWARD); }
StreamTask extends AbstractTask implements Punctuator { @Override public void punctuate(final ProcessorNode node, final long timestamp) { if (processorContext.currentNode() != null) { throw new IllegalStateException(String.format("%s Current node is not null", logPrefix)); } updateProcessorContext(new StampedRecord(DUMMY_RECORD, timestamp), node); log.trace("{} Punctuating processor {} with timestamp {}", logPrefix, node.name(), timestamp); try { node.punctuate(timestamp); } catch (final KafkaException e) { throw new StreamsException(String.format("%s Exception caught while punctuating processor '%s'", logPrefix, node.name()), e); } finally { processorContext.setCurrentNode(null); } } StreamTask(final TaskId id, final String applicationId, final Collection<TopicPartition> partitions, final ProcessorTopology topology, final Consumer<byte[], byte[]> consumer, final ChangelogReader changelogReader, final StreamsConfig config, final StreamsMetrics metrics, final StateDirectory stateDirectory, final ThreadCache cache, final Time time, final Producer<byte[], byte[]> producer); @Override void resume(); @SuppressWarnings("unchecked") boolean process(); @Override void punctuate(final ProcessorNode node, final long timestamp); @Override void commit(); @Override void suspend(); @Override void close(boolean clean); @SuppressWarnings("unchecked") int addRecords(final TopicPartition partition, final Iterable<ConsumerRecord<byte[], byte[]>> records); void schedule(final long interval); }
@Test public void shouldCallPunctuateOnPassedInProcessorNode() throws Exception { task.punctuate(processor, 5); assertThat(processor.punctuatedAt, equalTo(5L)); task.punctuate(processor, 10); assertThat(processor.punctuatedAt, equalTo(10L)); }
StreamTask extends AbstractTask implements Punctuator { public void schedule(final long interval) { if (processorContext.currentNode() == null) { throw new IllegalStateException(String.format("%s Current node is null", logPrefix)); } punctuationQueue.schedule(new PunctuationSchedule(processorContext.currentNode(), interval)); } StreamTask(final TaskId id, final String applicationId, final Collection<TopicPartition> partitions, final ProcessorTopology topology, final Consumer<byte[], byte[]> consumer, final ChangelogReader changelogReader, final StreamsConfig config, final StreamsMetrics metrics, final StateDirectory stateDirectory, final ThreadCache cache, final Time time, final Producer<byte[], byte[]> producer); @Override void resume(); @SuppressWarnings("unchecked") boolean process(); @Override void punctuate(final ProcessorNode node, final long timestamp); @Override void commit(); @Override void suspend(); @Override void close(boolean clean); @SuppressWarnings("unchecked") int addRecords(final TopicPartition partition, final Iterable<ConsumerRecord<byte[], byte[]>> records); void schedule(final long interval); }
@Test(expected = IllegalStateException.class) public void shouldThrowIllegalStateExceptionOnScheduleIfCurrentNodeIsNull() throws Exception { task.schedule(1); }
StreamTask extends AbstractTask implements Punctuator { @Override public void close(boolean clean) { log.debug("{} Closing", logPrefix); RuntimeException firstException = null; try { suspend(clean); } catch (final RuntimeException e) { clean = false; firstException = e; log.error("{} Could not close task: ", logPrefix, e); } try { closeStateManager(clean); } catch (final RuntimeException e) { clean = false; if (firstException == null) { firstException = e; } log.error("{} Could not close state manager: ", logPrefix, e); } try { partitionGroup.close(); metrics.removeAllSensors(); } finally { if (eosEnabled) { if (!clean) { try { producer.abortTransaction(); transactionInFlight = false; } catch (final ProducerFencedException e) { } } try { recordCollector.close(); } catch (final Throwable e) { log.error("{} Failed to close producer: ", logPrefix, e); } } } if (firstException != null) { throw firstException; } } StreamTask(final TaskId id, final String applicationId, final Collection<TopicPartition> partitions, final ProcessorTopology topology, final Consumer<byte[], byte[]> consumer, final ChangelogReader changelogReader, final StreamsConfig config, final StreamsMetrics metrics, final StateDirectory stateDirectory, final ThreadCache cache, final Time time, final Producer<byte[], byte[]> producer); @Override void resume(); @SuppressWarnings("unchecked") boolean process(); @Override void punctuate(final ProcessorNode node, final long timestamp); @Override void commit(); @Override void suspend(); @Override void close(boolean clean); @SuppressWarnings("unchecked") int addRecords(final TopicPartition partition, final Iterable<ConsumerRecord<byte[], byte[]>> records); void schedule(final long interval); }
@SuppressWarnings("unchecked") @Test public void shouldThrowExceptionIfAnyExceptionsRaisedDuringCloseButStillCloseAllProcessorNodesTopology() throws Exception { task.close(true); task = createTaskThatThrowsExceptionOnClose(); try { task.close(true); fail("should have thrown runtime exception"); } catch (final RuntimeException e) { task = null; } assertTrue(processor.closed); assertTrue(source1.closed); assertTrue(source2.closed); } @Test public void shouldAbortTransactionOnDirtyClosedIfEosEnabled() throws Exception { final MockProducer producer = new MockProducer(); task = new StreamTask(taskId00, applicationId, partitions, topology, consumer, changelogReader, eosConfig, streamsMetrics, stateDirectory, null, time, producer); task.close(false); task = null; assertTrue(producer.transactionAborted()); } @Test public void shouldNotAbortTransactionOnDirtyClosedIfEosDisabled() throws Exception { final MockProducer producer = new MockProducer(); task = new StreamTask(taskId00, applicationId, partitions, topology, consumer, changelogReader, config, streamsMetrics, stateDirectory, null, time, producer); task.close(false); assertFalse(producer.transactionAborted()); } @SuppressWarnings("unchecked") @Test public void shouldCloseProducerOnCloseWhenEosEnabled() throws Exception { final MockProducer producer = new MockProducer(); task = new StreamTask(taskId00, applicationId, partitions, topology, consumer, changelogReader, eosConfig, streamsMetrics, stateDirectory, null, time, producer); task.close(true); task = null; assertTrue(producer.closed()); }
StreamTask extends AbstractTask implements Punctuator { @Override public void suspend() { suspend(true); } StreamTask(final TaskId id, final String applicationId, final Collection<TopicPartition> partitions, final ProcessorTopology topology, final Consumer<byte[], byte[]> consumer, final ChangelogReader changelogReader, final StreamsConfig config, final StreamsMetrics metrics, final StateDirectory stateDirectory, final ThreadCache cache, final Time time, final Producer<byte[], byte[]> producer); @Override void resume(); @SuppressWarnings("unchecked") boolean process(); @Override void punctuate(final ProcessorNode node, final long timestamp); @Override void commit(); @Override void suspend(); @Override void close(boolean clean); @SuppressWarnings("unchecked") int addRecords(final TopicPartition partition, final Iterable<ConsumerRecord<byte[], byte[]>> records); void schedule(final long interval); }
@Test public void shouldCommitTransactionOnSuspendEvenIfTransactionIsEmptyIfEosEnabled() throws Exception { final MockProducer producer = new MockProducer(); task = new StreamTask(taskId00, applicationId, partitions, topology, consumer, changelogReader, eosConfig, streamsMetrics, stateDirectory, null, time, producer); task.suspend(); assertTrue(producer.transactionCommitted()); assertFalse(producer.transactionInFlight()); }
StreamsMetadataState { public synchronized Collection<StreamsMetadata> getAllMetadataForStore(final String storeName) { Objects.requireNonNull(storeName, "storeName cannot be null"); if (!isInitialized()) { return Collections.emptyList(); } if (globalStores.contains(storeName)) { return allMetadata; } final List<String> sourceTopics = builder.stateStoreNameToSourceTopics().get(storeName); if (sourceTopics == null) { return Collections.emptyList(); } final ArrayList<StreamsMetadata> results = new ArrayList<>(); for (StreamsMetadata metadata : allMetadata) { if (metadata.stateStoreNames().contains(storeName)) { results.add(metadata); } } return results; } StreamsMetadataState(final TopologyBuilder builder, final HostInfo thisHost); synchronized Collection<StreamsMetadata> getAllMetadata(); synchronized Collection<StreamsMetadata> getAllMetadataForStore(final String storeName); synchronized StreamsMetadata getMetadataWithKey(final String storeName, final K key, final Serializer<K> keySerializer); synchronized StreamsMetadata getMetadataWithKey(final String storeName, final K key, final StreamPartitioner<? super K, ?> partitioner); synchronized void onChange(final Map<HostInfo, Set<TopicPartition>> currentState, final Cluster clusterMetadata); static final HostInfo UNKNOWN_HOST; }
@Test public void shouldNotThrowNPEWhenOnChangeNotCalled() throws Exception { new StreamsMetadataState(builder, hostOne).getAllMetadataForStore("store"); } @Test public void shouldGetInstancesForStoreName() throws Exception { final StreamsMetadata one = new StreamsMetadata(hostOne, Utils.mkSet(globalTable, "table-one", "table-two", "merged-table"), Utils.mkSet(topic1P0, topic2P1, topic4P0)); final StreamsMetadata two = new StreamsMetadata(hostTwo, Utils.mkSet(globalTable, "table-two", "table-one", "merged-table"), Utils.mkSet(topic2P0, topic1P1)); final Collection<StreamsMetadata> actual = discovery.getAllMetadataForStore("table-one"); assertEquals(2, actual.size()); assertTrue("expected " + actual + " to contain " + one, actual.contains(one)); assertTrue("expected " + actual + " to contain " + two, actual.contains(two)); } @Test(expected = NullPointerException.class) public void shouldThrowIfStoreNameIsNullOnGetAllInstancesWithStore() throws Exception { discovery.getAllMetadataForStore(null); } @Test public void shouldReturnEmptyCollectionOnGetAllInstancesWithStoreWhenStoreDoesntExist() throws Exception { final Collection<StreamsMetadata> actual = discovery.getAllMetadataForStore("not-a-store"); assertTrue(actual.isEmpty()); } @Test public void shouldHaveGlobalStoreInAllMetadata() throws Exception { final Collection<StreamsMetadata> metadata = discovery.getAllMetadataForStore(globalTable); assertEquals(3, metadata.size()); for (StreamsMetadata streamsMetadata : metadata) { assertTrue(streamsMetadata.stateStoreNames().contains(globalTable)); } }
StreamsMetadataState { public synchronized Collection<StreamsMetadata> getAllMetadata() { return allMetadata; } StreamsMetadataState(final TopologyBuilder builder, final HostInfo thisHost); synchronized Collection<StreamsMetadata> getAllMetadata(); synchronized Collection<StreamsMetadata> getAllMetadataForStore(final String storeName); synchronized StreamsMetadata getMetadataWithKey(final String storeName, final K key, final Serializer<K> keySerializer); synchronized StreamsMetadata getMetadataWithKey(final String storeName, final K key, final StreamPartitioner<? super K, ?> partitioner); synchronized void onChange(final Map<HostInfo, Set<TopicPartition>> currentState, final Cluster clusterMetadata); static final HostInfo UNKNOWN_HOST; }
@Test public void shouldGetAllStreamInstances() throws Exception { final StreamsMetadata one = new StreamsMetadata(hostOne, Utils.mkSet(globalTable, "table-one", "table-two", "merged-table"), Utils.mkSet(topic1P0, topic2P1, topic4P0)); final StreamsMetadata two = new StreamsMetadata(hostTwo, Utils.mkSet(globalTable, "table-two", "table-one", "merged-table"), Utils.mkSet(topic2P0, topic1P1)); final StreamsMetadata three = new StreamsMetadata(hostThree, Utils.mkSet(globalTable, "table-three"), Collections.singleton(topic3P0)); Collection<StreamsMetadata> actual = discovery.getAllMetadata(); assertEquals(3, actual.size()); assertTrue("expected " + actual + " to contain " + one, actual.contains(one)); assertTrue("expected " + actual + " to contain " + two, actual.contains(two)); assertTrue("expected " + actual + " to contain " + three, actual.contains(three)); }
StreamsMetadataState { public synchronized <K> StreamsMetadata getMetadataWithKey(final String storeName, final K key, final Serializer<K> keySerializer) { Objects.requireNonNull(keySerializer, "keySerializer can't be null"); Objects.requireNonNull(storeName, "storeName can't be null"); Objects.requireNonNull(key, "key can't be null"); if (!isInitialized()) { return StreamsMetadata.NOT_AVAILABLE; } if (globalStores.contains(storeName)) { if (thisHost == UNKNOWN_HOST) { return allMetadata.get(0); } return myMetadata; } final SourceTopicsInfo sourceTopicsInfo = getSourceTopicsInfo(storeName); if (sourceTopicsInfo == null) { return null; } return getStreamsMetadataForKey(storeName, key, new DefaultStreamPartitioner<>(keySerializer, clusterMetadata, sourceTopicsInfo.topicWithMostPartitions), sourceTopicsInfo); } StreamsMetadataState(final TopologyBuilder builder, final HostInfo thisHost); synchronized Collection<StreamsMetadata> getAllMetadata(); synchronized Collection<StreamsMetadata> getAllMetadataForStore(final String storeName); synchronized StreamsMetadata getMetadataWithKey(final String storeName, final K key, final Serializer<K> keySerializer); synchronized StreamsMetadata getMetadataWithKey(final String storeName, final K key, final StreamPartitioner<? super K, ?> partitioner); synchronized void onChange(final Map<HostInfo, Set<TopicPartition>> currentState, final Cluster clusterMetadata); static final HostInfo UNKNOWN_HOST; }
@Test public void shouldReturnNullOnGetWithKeyWhenStoreDoesntExist() throws Exception { final StreamsMetadata actual = discovery.getMetadataWithKey("not-a-store", "key", Serdes.String().serializer()); assertNull(actual); } @Test(expected = NullPointerException.class) public void shouldThrowWhenKeyIsNull() throws Exception { discovery.getMetadataWithKey("table-three", null, Serdes.String().serializer()); } @Test(expected = NullPointerException.class) public void shouldThrowWhenSerializerIsNull() throws Exception { discovery.getMetadataWithKey("table-three", "key", (Serializer) null); } @Test(expected = NullPointerException.class) public void shouldThrowIfStoreNameIsNull() throws Exception { discovery.getMetadataWithKey(null, "key", Serdes.String().serializer()); } @SuppressWarnings("unchecked") @Test(expected = NullPointerException.class) public void shouldThrowIfStreamPartitionerIsNull() throws Exception { discovery.getMetadataWithKey(null, "key", (StreamPartitioner) null); } @Test public void shouldGetMyMetadataForGlobalStoreWithKey() throws Exception { final StreamsMetadata metadata = discovery.getMetadataWithKey(globalTable, "key", Serdes.String().serializer()); assertEquals(hostOne, metadata.hostInfo()); } @Test public void shouldGetMyMetadataForGlobalStoreWithKeyAndPartitioner() throws Exception { final StreamsMetadata metadata = discovery.getMetadataWithKey(globalTable, "key", partitioner); assertEquals(hostOne, metadata.hostInfo()); }
SinkNode extends ProcessorNode<K, V> { @Override public void process(final K key, final V value) { final RecordCollector collector = ((RecordCollector.Supplier) context).recordCollector(); final long timestamp = context.timestamp(); if (timestamp < 0) { throw new StreamsException("Invalid (negative) timestamp of " + timestamp + " for output record <" + key + ":" + value + ">."); } try { collector.send(topic, key, value, timestamp, keySerializer, valSerializer, partitioner); } catch (final ClassCastException e) { final String keyClass = key == null ? "unknown because key is null" : key.getClass().getName(); final String valueClass = value == null ? "unknown because value is null" : value.getClass().getName(); throw new StreamsException( String.format("A serializer (key: %s / value: %s) is not compatible to the actual key or value type " + "(key type: %s / value type: %s). Change the default Serdes in StreamConfig or " + "provide correct Serdes via method parameters.", keySerializer.getClass().getName(), valSerializer.getClass().getName(), keyClass, valueClass), e); } } SinkNode(final String name, final String topic, final Serializer<K> keySerializer, final Serializer<V> valSerializer, final StreamPartitioner<? super K, ? super V> partitioner); @Override void addChild(final ProcessorNode<?, ?> child); @SuppressWarnings("unchecked") @Override void init(final ProcessorContext context); @Override void process(final K key, final V value); @Override String toString(); @Override String toString(final String indent); }
@Test @SuppressWarnings("unchecked") public void shouldThrowStreamsExceptionOnInputRecordWithInvalidTimestamp() { final Bytes anyKey = new Bytes("any key".getBytes()); final Bytes anyValue = new Bytes("any value".getBytes()); context.setTime(-1); try { sink.process(anyKey, anyValue); fail("Should have thrown StreamsException"); } catch (final StreamsException ignored) { } } @Test @SuppressWarnings("unchecked") public void shouldThrowStreamsExceptionOnKeyValueTypeSerializerMismatch() { final String keyOfDifferentTypeThanSerializer = "key with different type"; final String valueOfDifferentTypeThanSerializer = "value with different type"; context.setTime(0); try { sink.process(keyOfDifferentTypeThanSerializer, valueOfDifferentTypeThanSerializer); fail("Should have thrown StreamsException"); } catch (final StreamsException e) { assertThat(e.getCause(), instanceOf(ClassCastException.class)); } } @Test @SuppressWarnings("unchecked") public void shouldHandleNullKeysWhenThrowingStreamsExceptionOnKeyValueTypeSerializerMismatch() { final String invalidValueToTriggerSerializerMismatch = ""; context.setTime(1); try { sink.process(null, invalidValueToTriggerSerializerMismatch); fail("Should have thrown StreamsException"); } catch (final StreamsException e) { assertThat(e.getCause(), instanceOf(ClassCastException.class)); assertThat(e.getMessage(), containsString("unknown because key is null")); } } @Test @SuppressWarnings("unchecked") public void shouldHandleNullValuesWhenThrowingStreamsExceptionOnKeyValueTypeSerializerMismatch() { final String invalidKeyToTriggerSerializerMismatch = ""; context.setTime(1); try { sink.process(invalidKeyToTriggerSerializerMismatch, null); fail("Should have thrown StreamsException"); } catch (final StreamsException e) { assertThat(e.getCause(), instanceOf(ClassCastException.class)); assertThat(e.getMessage(), containsString("unknown because value is null")); } }
ProcessorTopology { public List<StateStore> globalStateStores() { return globalStateStores; } ProcessorTopology(final List<ProcessorNode> processorNodes, final Map<String, SourceNode> sourceByTopics, final Map<String, SinkNode> sinkByTopics, final List<StateStore> stateStores, final Map<String, String> storeToChangelogTopic, final List<StateStore> globalStateStores); Set<String> sourceTopics(); SourceNode source(String topic); Set<SourceNode> sources(); Set<String> sinkTopics(); SinkNode sink(String topic); Set<SinkNode> sinks(); List<ProcessorNode> processors(); List<StateStore> stateStores(); Map<String, String> storeToChangelogTopic(); List<StateStore> globalStateStores(); @Override String toString(); String toString(final String indent); }
@SuppressWarnings("unchecked") @Test public void shouldDriveGlobalStore() throws Exception { final StateStoreSupplier storeSupplier = Stores.create("my-store") .withStringKeys().withStringValues().inMemory().disableLogging().build(); final String global = "global"; final String topic = "topic"; final TopologyBuilder topologyBuilder = this.builder .addGlobalStore(storeSupplier, global, STRING_DESERIALIZER, STRING_DESERIALIZER, topic, "processor", define(new StatefulProcessor("my-store"))); driver = new ProcessorTopologyTestDriver(config, topologyBuilder); final KeyValueStore<String, String> globalStore = (KeyValueStore<String, String>) topologyBuilder.globalStateStores().get("my-store"); driver.process(topic, "key1", "value1", STRING_SERIALIZER, STRING_SERIALIZER); driver.process(topic, "key2", "value2", STRING_SERIALIZER, STRING_SERIALIZER); assertEquals("value1", globalStore.get("key1")); assertEquals("value2", globalStore.get("key2")); }
ProcessorTopology { @Override public String toString() { return toString(""); } ProcessorTopology(final List<ProcessorNode> processorNodes, final Map<String, SourceNode> sourceByTopics, final Map<String, SinkNode> sinkByTopics, final List<StateStore> stateStores, final Map<String, String> storeToChangelogTopic, final List<StateStore> globalStateStores); Set<String> sourceTopics(); SourceNode source(String topic); Set<SourceNode> sources(); Set<String> sinkTopics(); SinkNode sink(String topic); Set<SinkNode> sinks(); List<ProcessorNode> processors(); List<StateStore> stateStores(); Map<String, String> storeToChangelogTopic(); List<StateStore> globalStateStores(); @Override String toString(); String toString(final String indent); }
@Test public void shouldCreateStringWithSourceAndTopics() throws Exception { builder.addSource("source", "topic1", "topic2"); final ProcessorTopology topology = builder.build(null); final String result = topology.toString(); assertThat(result, containsString("source:\n\t\ttopics:\t\t[topic1, topic2]\n")); } @Test public void shouldCreateStringWithMultipleSourcesAndTopics() throws Exception { builder.addSource("source", "topic1", "topic2"); builder.addSource("source2", "t", "t1", "t2"); final ProcessorTopology topology = builder.build(null); final String result = topology.toString(); assertThat(result, containsString("source:\n\t\ttopics:\t\t[topic1, topic2]\n")); assertThat(result, containsString("source2:\n\t\ttopics:\t\t[t, t1, t2]\n")); } @Test public void shouldCreateStringWithProcessors() throws Exception { builder.addSource("source", "t") .addProcessor("processor", mockProcessorSupplier, "source") .addProcessor("other", mockProcessorSupplier, "source"); final ProcessorTopology topology = builder.build(null); final String result = topology.toString(); assertThat(result, containsString("\t\tchildren:\t[processor, other]")); assertThat(result, containsString("processor:\n")); assertThat(result, containsString("other:\n")); } @Test public void shouldRecursivelyPrintChildren() throws Exception { builder.addSource("source", "t") .addProcessor("processor", mockProcessorSupplier, "source") .addProcessor("child-one", mockProcessorSupplier, "processor") .addProcessor("child-one-one", mockProcessorSupplier, "child-one") .addProcessor("child-two", mockProcessorSupplier, "processor") .addProcessor("child-two-one", mockProcessorSupplier, "child-two"); final String result = builder.build(null).toString(); assertThat(result, containsString("child-one:\n\t\tchildren:\t[child-one-one]")); assertThat(result, containsString("child-two:\n\t\tchildren:\t[child-two-one]")); }
ConnectorsResource { @POST @Path("/") public Response createConnector(final @QueryParam("forward") Boolean forward, final CreateConnectorRequest createRequest) throws Throwable { String name = createRequest.name(); if (name.contains("/")) { throw new BadRequestException("connector name should not contain '/'"); } Map<String, String> configs = createRequest.config(); if (!configs.containsKey(ConnectorConfig.NAME_CONFIG)) configs.put(ConnectorConfig.NAME_CONFIG, name); FutureCallback<Herder.Created<ConnectorInfo>> cb = new FutureCallback<>(); herder.putConnectorConfig(name, configs, false, cb); Herder.Created<ConnectorInfo> info = completeOrForwardRequest(cb, "/connectors", "POST", createRequest, new TypeReference<ConnectorInfo>() { }, new CreatedConnectorInfoTranslator(), forward); return Response.created(URI.create("/connectors/" + name)).entity(info.result()).build(); } ConnectorsResource(Herder herder); @GET @Path("/") Collection<String> listConnectors(final @QueryParam("forward") Boolean forward); @POST @Path("/") Response createConnector(final @QueryParam("forward") Boolean forward, final CreateConnectorRequest createRequest); @GET @Path("/{connector}") ConnectorInfo getConnector(final @PathParam("connector") String connector, final @QueryParam("forward") Boolean forward); @GET @Path("/{connector}/config") Map<String, String> getConnectorConfig(final @PathParam("connector") String connector, final @QueryParam("forward") Boolean forward); @GET @Path("/{connector}/status") ConnectorStateInfo getConnectorStatus(final @PathParam("connector") String connector); @PUT @Path("/{connector}/config") Response putConnectorConfig(final @PathParam("connector") String connector, final @QueryParam("forward") Boolean forward, final Map<String, String> connectorConfig); @POST @Path("/{connector}/restart") void restartConnector(final @PathParam("connector") String connector, final @QueryParam("forward") Boolean forward); @PUT @Path("/{connector}/pause") Response pauseConnector(@PathParam("connector") String connector); @PUT @Path("/{connector}/resume") Response resumeConnector(@PathParam("connector") String connector); @GET @Path("/{connector}/tasks") List<TaskInfo> getTaskConfigs(final @PathParam("connector") String connector, final @QueryParam("forward") Boolean forward); @POST @Path("/{connector}/tasks") void putTaskConfigs(final @PathParam("connector") String connector, final @QueryParam("forward") Boolean forward, final List<Map<String, String>> taskConfigs); @GET @Path("/{connector}/tasks/{task}/status") ConnectorStateInfo.TaskState getTaskStatus(final @PathParam("connector") String connector, final @PathParam("task") Integer task); @POST @Path("/{connector}/tasks/{task}/restart") void restartTask(final @PathParam("connector") String connector, final @PathParam("task") Integer task, final @QueryParam("forward") Boolean forward); @DELETE @Path("/{connector}") void destroyConnector(final @PathParam("connector") String connector, final @QueryParam("forward") Boolean forward); }
@Test public void testCreateConnector() throws Throwable { CreateConnectorRequest body = new CreateConnectorRequest(CONNECTOR_NAME, Collections.singletonMap(ConnectorConfig.NAME_CONFIG, CONNECTOR_NAME)); final Capture<Callback<Herder.Created<ConnectorInfo>>> cb = Capture.newInstance(); herder.putConnectorConfig(EasyMock.eq(CONNECTOR_NAME), EasyMock.eq(body.config()), EasyMock.eq(false), EasyMock.capture(cb)); expectAndCallbackResult(cb, new Herder.Created<>(true, new ConnectorInfo(CONNECTOR_NAME, CONNECTOR_CONFIG, CONNECTOR_TASK_NAMES))); PowerMock.replayAll(); connectorsResource.createConnector(FORWARD, body); PowerMock.verifyAll(); }
InternalTopicConfig { public Properties toProperties(final long additionalRetentionMs) { final Properties result = new Properties(); for (Map.Entry<String, String> configEntry : logConfig.entrySet()) { result.put(configEntry.getKey(), configEntry.getValue()); } if (retentionMs != null && isCompactDelete()) { result.put(InternalTopicManager.RETENTION_MS, String.valueOf(retentionMs + additionalRetentionMs)); } if (!logConfig.containsKey(InternalTopicManager.CLEANUP_POLICY_PROP)) { final StringBuilder builder = new StringBuilder(); for (CleanupPolicy cleanupPolicy : cleanupPolicies) { builder.append(cleanupPolicy.name()).append(","); } builder.deleteCharAt(builder.length() - 1); result.put(InternalTopicManager.CLEANUP_POLICY_PROP, builder.toString()); } return result; } InternalTopicConfig(final String name, final Set<CleanupPolicy> defaultCleanupPolicies, final Map<String, String> logConfig); Properties toProperties(final long additionalRetentionMs); String name(); void setRetentionMs(final long retentionMs); @Override boolean equals(final Object o); @Override int hashCode(); }
@Test public void shouldHaveCompactionPropSetIfSupplied() throws Exception { final Properties properties = new InternalTopicConfig("name", Collections.singleton(InternalTopicConfig.CleanupPolicy.compact), Collections.<String, String>emptyMap()).toProperties(0); assertEquals("compact", properties.getProperty(InternalTopicManager.CLEANUP_POLICY_PROP)); } @Test public void shouldUseCleanupPolicyFromConfigIfSupplied() throws Exception { final InternalTopicConfig config = new InternalTopicConfig("name", Collections.singleton(InternalTopicConfig.CleanupPolicy.delete), Collections.singletonMap("cleanup.policy", "compact")); final Properties properties = config.toProperties(0); assertEquals("compact", properties.getProperty("cleanup.policy")); } @Test public void shouldHavePropertiesSuppliedByUser() throws Exception { final Map<String, String> configs = new HashMap<>(); configs.put("retention.ms", "1000"); configs.put("retention.bytes", "10000"); final InternalTopicConfig topicConfig = new InternalTopicConfig("name", Collections.singleton(InternalTopicConfig.CleanupPolicy.delete), configs); final Properties properties = topicConfig.toProperties(0); assertEquals("1000", properties.getProperty("retention.ms")); assertEquals("10000", properties.getProperty("retention.bytes")); }
InternalTopicConfig { boolean isCompacted() { return cleanupPolicies.contains(CleanupPolicy.compact); } InternalTopicConfig(final String name, final Set<CleanupPolicy> defaultCleanupPolicies, final Map<String, String> logConfig); Properties toProperties(final long additionalRetentionMs); String name(); void setRetentionMs(final long retentionMs); @Override boolean equals(final Object o); @Override int hashCode(); }
@Test public void shouldBeCompactedIfCleanupPolicyCompactOrCompactAndDelete() throws Exception { assertTrue(new InternalTopicConfig("name", Collections.singleton(InternalTopicConfig.CleanupPolicy.compact), Collections.<String, String>emptyMap()).isCompacted()); assertTrue(new InternalTopicConfig("name", Utils.mkSet(InternalTopicConfig.CleanupPolicy.compact, InternalTopicConfig.CleanupPolicy.delete), Collections.<String, String>emptyMap()).isCompacted()); } @Test public void shouldNotBeCompactedWhenCleanupPolicyIsDelete() throws Exception { assertFalse(new InternalTopicConfig("name", Collections.singleton(InternalTopicConfig.CleanupPolicy.delete), Collections.<String, String>emptyMap()).isCompacted()); }
StreamPartitionAssignor implements PartitionAssignor, Configurable { @Override public Subscription subscription(Set<String> topics) { final Set<TaskId> previousActiveTasks = streamThread.prevActiveTasks(); Set<TaskId> standbyTasks = streamThread.cachedTasks(); standbyTasks.removeAll(previousActiveTasks); SubscriptionInfo data = new SubscriptionInfo(streamThread.processId, previousActiveTasks, standbyTasks, this.userEndPoint); if (streamThread.builder.sourceTopicPattern() != null && !streamThread.builder.subscriptionUpdates().getUpdates().equals(topics)) { updateSubscribedTopics(topics); } return new Subscription(new ArrayList<>(topics), data.encode()); } @Override void configure(Map<String, ?> configs); @Override String name(); @Override Subscription subscription(Set<String> topics); @Override Map<String, Assignment> assign(Cluster metadata, Map<String, Subscription> subscriptions); @Override void onAssignment(Assignment assignment); void close(); final static int NOT_AVAILABLE; }
@SuppressWarnings("unchecked") @Test public void testSubscription() throws Exception { builder.addSource("source1", "topic1"); builder.addSource("source2", "topic2"); builder.addProcessor("processor", new MockProcessorSupplier(), "source1", "source2"); final Set<TaskId> prevTasks = Utils.mkSet( new TaskId(0, 1), new TaskId(1, 1), new TaskId(2, 1)); final Set<TaskId> cachedTasks = Utils.mkSet( new TaskId(0, 1), new TaskId(1, 1), new TaskId(2, 1), new TaskId(0, 2), new TaskId(1, 2), new TaskId(2, 2)); String clientId = "client-id"; UUID processId = UUID.randomUUID(); StreamThread thread = new StreamThread(builder, config, new MockClientSupplier(), "test", clientId, processId, new Metrics(), Time.SYSTEM, new StreamsMetadataState(builder, StreamsMetadataState.UNKNOWN_HOST), 0) { @Override public Set<TaskId> prevActiveTasks() { return prevTasks; } @Override public Set<TaskId> cachedTasks() { return cachedTasks; } }; partitionAssignor.configure(config.getConsumerConfigs(thread, "test", clientId)); PartitionAssignor.Subscription subscription = partitionAssignor.subscription(Utils.mkSet("topic1", "topic2")); Collections.sort(subscription.topics()); assertEquals(Utils.mkList("topic1", "topic2"), subscription.topics()); Set<TaskId> standbyTasks = new HashSet<>(cachedTasks); standbyTasks.removeAll(prevTasks); SubscriptionInfo info = new SubscriptionInfo(processId, prevTasks, standbyTasks, null); assertEquals(info.encode(), subscription.userData()); }
StreamPartitionAssignor implements PartitionAssignor, Configurable { @Override public void onAssignment(Assignment assignment) { List<TopicPartition> partitions = new ArrayList<>(assignment.partitions()); Collections.sort(partitions, PARTITION_COMPARATOR); AssignmentInfo info = AssignmentInfo.decode(assignment.userData()); this.standbyTasks = info.standbyTasks; this.activeTasks = new HashMap<>(); if (partitions.size() != info.activeTasks.size()) { throw new TaskAssignmentException( String.format("stream-thread [%s] Number of assigned partitions %d is not equal to the number of active taskIds %d" + ", assignmentInfo=%s", streamThread.getName(), partitions.size(), info.activeTasks.size(), info.toString()) ); } for (int i = 0; i < partitions.size(); i++) { TopicPartition partition = partitions.get(i); TaskId id = info.activeTasks.get(i); Set<TopicPartition> assignedPartitions = activeTasks.get(id); if (assignedPartitions == null) { assignedPartitions = new HashSet<>(); activeTasks.put(id, assignedPartitions); } assignedPartitions.add(partition); } this.partitionsByHostState = info.partitionsByHost; final Collection<Set<TopicPartition>> values = partitionsByHostState.values(); final Map<TopicPartition, PartitionInfo> topicToPartitionInfo = new HashMap<>(); for (Set<TopicPartition> value : values) { for (TopicPartition topicPartition : value) { topicToPartitionInfo.put(topicPartition, new PartitionInfo(topicPartition.topic(), topicPartition.partition(), null, new Node[0], new Node[0])); } } metadataWithInternalTopics = Cluster.empty().withPartitions(topicToPartitionInfo); checkForNewTopicAssignments(assignment); } @Override void configure(Map<String, ?> configs); @Override String name(); @Override Subscription subscription(Set<String> topics); @Override Map<String, Assignment> assign(Cluster metadata, Map<String, Subscription> subscriptions); @Override void onAssignment(Assignment assignment); void close(); final static int NOT_AVAILABLE; }
@Test public void testOnAssignment() throws Exception { TopicPartition t2p3 = new TopicPartition("topic2", 3); TopologyBuilder builder = new TopologyBuilder(); builder.addSource("source1", "topic1"); builder.addSource("source2", "topic2"); builder.addProcessor("processor", new MockProcessorSupplier(), "source1", "source2"); UUID uuid = UUID.randomUUID(); String client1 = "client1"; StreamThread thread = new StreamThread(builder, config, mockClientSupplier, "test", client1, uuid, new Metrics(), Time.SYSTEM, new StreamsMetadataState(builder, StreamsMetadataState.UNKNOWN_HOST), 0); partitionAssignor.configure(config.getConsumerConfigs(thread, "test", client1)); List<TaskId> activeTaskList = Utils.mkList(task0, task3); Map<TaskId, Set<TopicPartition>> activeTasks = new HashMap<>(); Map<TaskId, Set<TopicPartition>> standbyTasks = new HashMap<>(); activeTasks.put(task0, Utils.mkSet(t1p0)); activeTasks.put(task3, Utils.mkSet(t2p3)); standbyTasks.put(task1, Utils.mkSet(t1p0)); standbyTasks.put(task2, Utils.mkSet(t2p0)); AssignmentInfo info = new AssignmentInfo(activeTaskList, standbyTasks, new HashMap<HostInfo, Set<TopicPartition>>()); PartitionAssignor.Assignment assignment = new PartitionAssignor.Assignment(Utils.mkList(t1p0, t2p3), info.encode()); partitionAssignor.onAssignment(assignment); assertEquals(activeTasks, partitionAssignor.activeTasks()); assertEquals(standbyTasks, partitionAssignor.standbyTasks()); }
StreamPartitionAssignor implements PartitionAssignor, Configurable { @Override public void configure(Map<String, ?> configs) { numStandbyReplicas = (Integer) configs.get(StreamsConfig.NUM_STANDBY_REPLICAS_CONFIG); Object o = configs.get(StreamsConfig.InternalConfig.STREAM_THREAD_INSTANCE); if (o == null) { KafkaException ex = new KafkaException("StreamThread is not specified"); log.error(ex.getMessage(), ex); throw ex; } if (!(o instanceof StreamThread)) { KafkaException ex = new KafkaException(String.format("%s is not an instance of %s", o.getClass().getName(), StreamThread.class.getName())); log.error(ex.getMessage(), ex); throw ex; } streamThread = (StreamThread) o; streamThread.setPartitionAssignor(this); String userEndPoint = (String) configs.get(StreamsConfig.APPLICATION_SERVER_CONFIG); if (userEndPoint != null && !userEndPoint.isEmpty()) { try { String host = getHost(userEndPoint); Integer port = getPort(userEndPoint); if (host == null || port == null) throw new ConfigException(String.format("stream-thread [%s] Config %s isn't in the correct format. Expected a host:port pair" + " but received %s", streamThread.getName(), StreamsConfig.APPLICATION_SERVER_CONFIG, userEndPoint)); } catch (NumberFormatException nfe) { throw new ConfigException(String.format("stream-thread [%s] Invalid port supplied in %s for config %s", streamThread.getName(), userEndPoint, StreamsConfig.APPLICATION_SERVER_CONFIG)); } this.userEndPoint = userEndPoint; } internalTopicManager = new InternalTopicManager( new StreamsKafkaClient(this.streamThread.config), configs.containsKey(StreamsConfig.REPLICATION_FACTOR_CONFIG) ? (Integer) configs.get(StreamsConfig.REPLICATION_FACTOR_CONFIG) : 1, configs.containsKey(StreamsConfig.WINDOW_STORE_CHANGE_LOG_ADDITIONAL_RETENTION_MS_CONFIG) ? (Long) configs.get(StreamsConfig.WINDOW_STORE_CHANGE_LOG_ADDITIONAL_RETENTION_MS_CONFIG) : WINDOW_CHANGE_LOG_ADDITIONAL_RETENTION_DEFAULT, time); this.copartitionedTopicsValidator = new CopartitionedTopicsValidator(streamThread.getName()); } @Override void configure(Map<String, ?> configs); @Override String name(); @Override Subscription subscription(Set<String> topics); @Override Map<String, Assignment> assign(Cluster metadata, Map<String, Subscription> subscriptions); @Override void onAssignment(Assignment assignment); void close(); final static int NOT_AVAILABLE; }
@Test public void shouldThrowExceptionIfApplicationServerConfigPortIsNotAnInteger() throws Exception { final Properties properties = configProps(); final String myEndPoint = "localhost:j87yhk"; properties.put(StreamsConfig.APPLICATION_SERVER_CONFIG, myEndPoint); final StreamsConfig config = new StreamsConfig(properties); final UUID uuid1 = UUID.randomUUID(); final String client1 = "client1"; final String applicationId = "application-id"; builder.setApplicationId(applicationId); final StreamThread streamThread = new StreamThread(builder, config, mockClientSupplier, applicationId, client1, uuid1, new Metrics(), Time.SYSTEM, new StreamsMetadataState(builder, StreamsMetadataState.UNKNOWN_HOST), 0); try { partitionAssignor.configure(config.getConsumerConfigs(streamThread, applicationId, client1)); Assert.fail("expected to an exception due to invalid config"); } catch (ConfigException e) { } } @Test(expected = KafkaException.class) public void shouldThrowKafkaExceptionIfStreamThreadNotConfigured() throws Exception { partitionAssignor.configure(Collections.singletonMap(StreamsConfig.NUM_STANDBY_REPLICAS_CONFIG, 1)); } @Test(expected = KafkaException.class) public void shouldThrowKafkaExceptionIfStreamThreadConfigIsNotStreamThreadInstance() throws Exception { final Map<String, Object> config = new HashMap<>(); config.put(StreamsConfig.NUM_STANDBY_REPLICAS_CONFIG, 1); config.put(StreamsConfig.InternalConfig.STREAM_THREAD_INSTANCE, "i am not a stream thread"); partitionAssignor.configure(config); }
StreamPartitionAssignor implements PartitionAssignor, Configurable { Cluster clusterMetadata() { if (metadataWithInternalTopics == null) { return Cluster.empty(); } return metadataWithInternalTopics; } @Override void configure(Map<String, ?> configs); @Override String name(); @Override Subscription subscription(Set<String> topics); @Override Map<String, Assignment> assign(Cluster metadata, Map<String, Subscription> subscriptions); @Override void onAssignment(Assignment assignment); void close(); final static int NOT_AVAILABLE; }
@Test public void shouldReturnEmptyClusterMetadataIfItHasntBeenBuilt() throws Exception { final Cluster cluster = partitionAssignor.clusterMetadata(); assertNotNull(cluster); }
ConnectorsResource { @DELETE @Path("/{connector}") public void destroyConnector(final @PathParam("connector") String connector, final @QueryParam("forward") Boolean forward) throws Throwable { FutureCallback<Herder.Created<ConnectorInfo>> cb = new FutureCallback<>(); herder.deleteConnectorConfig(connector, cb); completeOrForwardRequest(cb, "/connectors/" + connector, "DELETE", null, forward); } ConnectorsResource(Herder herder); @GET @Path("/") Collection<String> listConnectors(final @QueryParam("forward") Boolean forward); @POST @Path("/") Response createConnector(final @QueryParam("forward") Boolean forward, final CreateConnectorRequest createRequest); @GET @Path("/{connector}") ConnectorInfo getConnector(final @PathParam("connector") String connector, final @QueryParam("forward") Boolean forward); @GET @Path("/{connector}/config") Map<String, String> getConnectorConfig(final @PathParam("connector") String connector, final @QueryParam("forward") Boolean forward); @GET @Path("/{connector}/status") ConnectorStateInfo getConnectorStatus(final @PathParam("connector") String connector); @PUT @Path("/{connector}/config") Response putConnectorConfig(final @PathParam("connector") String connector, final @QueryParam("forward") Boolean forward, final Map<String, String> connectorConfig); @POST @Path("/{connector}/restart") void restartConnector(final @PathParam("connector") String connector, final @QueryParam("forward") Boolean forward); @PUT @Path("/{connector}/pause") Response pauseConnector(@PathParam("connector") String connector); @PUT @Path("/{connector}/resume") Response resumeConnector(@PathParam("connector") String connector); @GET @Path("/{connector}/tasks") List<TaskInfo> getTaskConfigs(final @PathParam("connector") String connector, final @QueryParam("forward") Boolean forward); @POST @Path("/{connector}/tasks") void putTaskConfigs(final @PathParam("connector") String connector, final @QueryParam("forward") Boolean forward, final List<Map<String, String>> taskConfigs); @GET @Path("/{connector}/tasks/{task}/status") ConnectorStateInfo.TaskState getTaskStatus(final @PathParam("connector") String connector, final @PathParam("task") Integer task); @POST @Path("/{connector}/tasks/{task}/restart") void restartTask(final @PathParam("connector") String connector, final @PathParam("task") Integer task, final @QueryParam("forward") Boolean forward); @DELETE @Path("/{connector}") void destroyConnector(final @PathParam("connector") String connector, final @QueryParam("forward") Boolean forward); }
@Test public void testDeleteConnector() throws Throwable { final Capture<Callback<Herder.Created<ConnectorInfo>>> cb = Capture.newInstance(); herder.deleteConnectorConfig(EasyMock.eq(CONNECTOR_NAME), EasyMock.capture(cb)); expectAndCallbackResult(cb, null); PowerMock.replayAll(); connectorsResource.destroyConnector(CONNECTOR_NAME, FORWARD); PowerMock.verifyAll(); } @Test public void testDeleteConnectorNotLeader() throws Throwable { final Capture<Callback<Herder.Created<ConnectorInfo>>> cb = Capture.newInstance(); herder.deleteConnectorConfig(EasyMock.eq(CONNECTOR_NAME), EasyMock.capture(cb)); expectAndCallbackNotLeaderException(cb); EasyMock.expect(RestServer.httpRequest("http: .andReturn(new RestServer.HttpResponse<>(204, new HashMap<String, List<String>>(), null)); PowerMock.replayAll(); connectorsResource.destroyConnector(CONNECTOR_NAME, FORWARD); PowerMock.verifyAll(); } @Test(expected = NotFoundException.class) public void testDeleteConnectorNotFound() throws Throwable { final Capture<Callback<Herder.Created<ConnectorInfo>>> cb = Capture.newInstance(); herder.deleteConnectorConfig(EasyMock.eq(CONNECTOR_NAME), EasyMock.capture(cb)); expectAndCallbackException(cb, new NotFoundException("not found")); PowerMock.replayAll(); connectorsResource.destroyConnector(CONNECTOR_NAME, FORWARD); PowerMock.verifyAll(); }
RecordQueue { public int addRawRecords(Iterable<ConsumerRecord<byte[], byte[]>> rawRecords) { for (ConsumerRecord<byte[], byte[]> rawRecord : rawRecords) { ConsumerRecord<Object, Object> record = recordDeserializer.deserialize(rawRecord); long timestamp = timestampExtractor.extract(record, timeTracker.get()); log.trace("Source node {} extracted timestamp {} for record {}", source.name(), timestamp, record); if (timestamp < 0) { continue; } StampedRecord stampedRecord = new StampedRecord(record, timestamp); fifoQueue.addLast(stampedRecord); timeTracker.addElement(stampedRecord); } long timestamp = timeTracker.get(); if (timestamp > partitionTime) partitionTime = timestamp; return size(); } RecordQueue(final TopicPartition partition, final SourceNode source, final TimestampExtractor timestampExtractor); SourceNode source(); TopicPartition partition(); int addRawRecords(Iterable<ConsumerRecord<byte[], byte[]>> rawRecords); StampedRecord poll(); int size(); boolean isEmpty(); long timestamp(); void clear(); }
@Test(expected = StreamsException.class) public void shouldThrowStreamsExceptionWhenKeyDeserializationFails() throws Exception { final byte[] key = Serdes.Long().serializer().serialize("foo", 1L); final List<ConsumerRecord<byte[], byte[]>> records = Collections.singletonList( new ConsumerRecord<>("topic", 1, 1, 0L, TimestampType.CREATE_TIME, 0L, 0, 0, key, recordValue)); queue.addRawRecords(records); } @Test(expected = StreamsException.class) public void shouldThrowStreamsExceptionWhenValueDeserializationFails() throws Exception { final byte[] value = Serdes.Long().serializer().serialize("foo", 1L); final List<ConsumerRecord<byte[], byte[]>> records = Collections.singletonList( new ConsumerRecord<>("topic", 1, 1, 0L, TimestampType.CREATE_TIME, 0L, 0, 0, recordKey, value)); queue.addRawRecords(records); } @Test(expected = StreamsException.class) public void shouldThrowOnNegativeTimestamp() { final List<ConsumerRecord<byte[], byte[]>> records = Collections.singletonList( new ConsumerRecord<>("topic", 1, 1, -1L, TimestampType.CREATE_TIME, 0L, 0, 0, recordKey, recordValue)); final RecordQueue queue = new RecordQueue(new TopicPartition(topics[0], 1), new MockSourceNode<>(topics, intDeserializer, intDeserializer), new FailOnInvalidTimestamp()); queue.addRawRecords(records); }
ProcessorNode { public void init(ProcessorContext context) { this.context = context; try { nodeMetrics = new NodeMetrics(context.metrics(), name, "task." + context.taskId()); nodeMetrics.metrics.measureLatencyNs(time, initDelegate, nodeMetrics.nodeCreationSensor); } catch (Exception e) { throw new StreamsException(String.format("failed to initialize processor %s", name), e); } } ProcessorNode(String name); ProcessorNode(String name, Processor<K, V> processor, Set<String> stateStores); final String name(); final Processor<K, V> processor(); final List<ProcessorNode<?, ?>> children(); void addChild(ProcessorNode<?, ?> child); void init(ProcessorContext context); void close(); void process(final K key, final V value); void punctuate(long timestamp); @Override String toString(); String toString(String indent); final Set<String> stateStores; }
@SuppressWarnings("unchecked") @Test (expected = StreamsException.class) public void shouldThrowStreamsExceptionIfExceptionCaughtDuringInit() throws Exception { final ProcessorNode node = new ProcessorNode("name", new ExceptionalProcessor(), Collections.emptySet()); node.init(null); }
ProcessorNode { public void close() { try { nodeMetrics.metrics.measureLatencyNs(time, closeDelegate, nodeMetrics.nodeDestructionSensor); nodeMetrics.removeAllSensors(); } catch (Exception e) { throw new StreamsException(String.format("failed to close processor %s", name), e); } } ProcessorNode(String name); ProcessorNode(String name, Processor<K, V> processor, Set<String> stateStores); final String name(); final Processor<K, V> processor(); final List<ProcessorNode<?, ?>> children(); void addChild(ProcessorNode<?, ?> child); void init(ProcessorContext context); void close(); void process(final K key, final V value); void punctuate(long timestamp); @Override String toString(); String toString(String indent); final Set<String> stateStores; }
@SuppressWarnings("unchecked") @Test (expected = StreamsException.class) public void shouldThrowStreamsExceptionIfExceptionCaughtDuringClose() throws Exception { final ProcessorNode node = new ProcessorNode("name", new ExceptionalProcessor(), Collections.emptySet()); node.close(); }
SourceNode extends ProcessorNode<K, V> { K deserializeKey(String topic, Headers headers, byte[] data) { return keyDeserializer.deserialize(topic, headers, data); } SourceNode(String name, List<String> topics, TimestampExtractor timestampExtractor, Deserializer<K> keyDeserializer, Deserializer<V> valDeserializer); SourceNode(String name, List<String> topics, Deserializer<K> keyDeserializer, Deserializer<V> valDeserializer); @SuppressWarnings("unchecked") @Override void init(ProcessorContext context); @Override void process(final K key, final V value); @Override String toString(); String toString(String indent); TimestampExtractor getTimestampExtractor(); }
@Test public void shouldProvideTopicHeadersAndDataToKeyDeserializer() { final SourceNode<String, String> sourceNode = new MockSourceNode<>(new String[]{""}, new TheExtendedDeserializer(), new TheExtendedDeserializer()); final RecordHeaders headers = new RecordHeaders(); final String deserializeKey = sourceNode.deserializeKey("topic", headers, "data".getBytes(StandardCharsets.UTF_8)); assertThat(deserializeKey, is("topic" + headers + "data")); }
SourceNode extends ProcessorNode<K, V> { V deserializeValue(String topic, Headers headers, byte[] data) { return valDeserializer.deserialize(topic, headers, data); } SourceNode(String name, List<String> topics, TimestampExtractor timestampExtractor, Deserializer<K> keyDeserializer, Deserializer<V> valDeserializer); SourceNode(String name, List<String> topics, Deserializer<K> keyDeserializer, Deserializer<V> valDeserializer); @SuppressWarnings("unchecked") @Override void init(ProcessorContext context); @Override void process(final K key, final V value); @Override String toString(); String toString(String indent); TimestampExtractor getTimestampExtractor(); }
@Test public void shouldProvideTopicHeadersAndDataToValueDeserializer() { final SourceNode<String, String> sourceNode = new MockSourceNode<>(new String[]{""}, new TheExtendedDeserializer(), new TheExtendedDeserializer()); final RecordHeaders headers = new RecordHeaders(); final String deserializedValue = sourceNode.deserializeValue("topic", headers, "data".getBytes(StandardCharsets.UTF_8)); assertThat(deserializedValue, is("topic" + headers + "data")); }
ProcessorStateManager implements StateManager { @Override public StateStore getStore(final String name) { return stores.get(name); } ProcessorStateManager(final TaskId taskId, final Collection<TopicPartition> sources, final boolean isStandby, final StateDirectory stateDirectory, final Map<String, String> storeToChangelogTopic, final ChangelogReader changelogReader, final boolean eosEnabled); static String storeChangelogTopic(final String applicationId, final String storeName); @Override File baseDir(); @Override void register(final StateStore store, final boolean loggingEnabled, final StateRestoreCallback stateRestoreCallback); @Override Map<TopicPartition, Long> checkpointed(); @Override StateStore getStore(final String name); @Override void flush(); @Override void close(final Map<TopicPartition, Long> ackedOffsets); @Override void checkpoint(final Map<TopicPartition, Long> ackedOffsets); @Override StateStore getGlobalStore(final String name); static final String STATE_CHANGELOG_TOPIC_SUFFIX; }
@Test public void testGetStore() throws IOException { final MockStateStoreSupplier.MockStateStore mockStateStore = new MockStateStoreSupplier.MockStateStore(nonPersistentStoreName, false); final ProcessorStateManager stateMgr = new ProcessorStateManager( new TaskId(0, 1), noPartitions, false, stateDirectory, Collections.<String, String>emptyMap(), changelogReader, false); try { stateMgr.register(mockStateStore, true, mockStateStore.stateRestoreCallback); assertNull(stateMgr.getStore("noSuchStore")); assertEquals(mockStateStore, stateMgr.getStore(nonPersistentStoreName)); } finally { stateMgr.close(Collections.<TopicPartition, Long>emptyMap()); } }
ProcessorStateManager implements StateManager { @Override public void close(final Map<TopicPartition, Long> ackedOffsets) throws ProcessorStateException { RuntimeException firstException = null; try { if (!stores.isEmpty()) { log.debug("{} Closing its state manager and all the registered state stores", logPrefix); for (final Map.Entry<String, StateStore> entry : stores.entrySet()) { log.debug("{} Closing storage engine {}", logPrefix, entry.getKey()); try { entry.getValue().close(); } catch (final Exception e) { if (firstException == null) { firstException = new ProcessorStateException(String.format("%s Failed to close state store %s", logPrefix, entry.getKey()), e); } log.error("{} Failed to close state store {}: ", logPrefix, entry.getKey(), e); } } if (ackedOffsets != null) { checkpoint(ackedOffsets); } } } finally { try { stateDirectory.unlock(taskId); } catch (final IOException e) { if (firstException == null) { firstException = new ProcessorStateException(String.format("%s Failed to release state dir lock", logPrefix), e); } log.error("{} Failed to release state dir lock: ", logPrefix, e); } } if (firstException != null) { throw firstException; } } ProcessorStateManager(final TaskId taskId, final Collection<TopicPartition> sources, final boolean isStandby, final StateDirectory stateDirectory, final Map<String, String> storeToChangelogTopic, final ChangelogReader changelogReader, final boolean eosEnabled); static String storeChangelogTopic(final String applicationId, final String storeName); @Override File baseDir(); @Override void register(final StateStore store, final boolean loggingEnabled, final StateRestoreCallback stateRestoreCallback); @Override Map<TopicPartition, Long> checkpointed(); @Override StateStore getStore(final String name); @Override void flush(); @Override void close(final Map<TopicPartition, Long> ackedOffsets); @Override void checkpoint(final Map<TopicPartition, Long> ackedOffsets); @Override StateStore getGlobalStore(final String name); static final String STATE_CHANGELOG_TOPIC_SUFFIX; }
@Test public void shouldThrowLockExceptionIfFailedToLockStateDirectory() throws Exception { final File taskDirectory = stateDirectory.directoryForTask(taskId); final FileChannel channel = FileChannel.open(new File(taskDirectory, StateDirectory.LOCK_FILE_NAME).toPath(), StandardOpenOption.CREATE, StandardOpenOption.WRITE); final FileLock lock = channel.lock(); try { new ProcessorStateManager( taskId, noPartitions, false, stateDirectory, Collections.<String, String>emptyMap(), changelogReader, false); fail("Should have thrown LockException"); } catch (final LockException e) { } finally { lock.release(); channel.close(); } } @Test public void shouldDeleteCheckpointFileOnCreationIfEosEnabled() throws Exception { checkpoint.write(Collections.<TopicPartition, Long>emptyMap()); assertTrue(checkpointFile.exists()); ProcessorStateManager stateManager = null; try { stateManager = new ProcessorStateManager( taskId, noPartitions, false, stateDirectory, Collections.<String, String>emptyMap(), changelogReader, true); assertFalse(checkpointFile.exists()); } finally { if (stateManager != null) { stateManager.close(null); } } }
ProcessorStateManager implements StateManager { @Override public void register(final StateStore store, final boolean loggingEnabled, final StateRestoreCallback stateRestoreCallback) { log.debug("{} Registering state store {} to its state manager", logPrefix, store.name()); if (store.name().equals(CHECKPOINT_FILE_NAME)) { throw new IllegalArgumentException(String.format("%s Illegal store name: %s", logPrefix, CHECKPOINT_FILE_NAME)); } if (stores.containsKey(store.name())) { throw new IllegalArgumentException(String.format("%s Store %s has already been registered.", logPrefix, store.name())); } final String topic = storeToChangelogTopic.get(store.name()); if (topic == null) { stores.put(store.name(), store); return; } final TopicPartition storePartition = new TopicPartition(topic, getPartition(topic)); changelogReader.validatePartitionExists(storePartition, store.name()); if (isStandby) { if (store.persistent()) { log.trace("{} Preparing standby replica of persistent state store {} with changelog topic {}", logPrefix, store.name(), topic); restoreCallbacks.put(topic, stateRestoreCallback); } } else { log.trace("{} Restoring state store {} from changelog topic {}", logPrefix, store.name(), topic); final StateRestorer restorer = new StateRestorer(storePartition, stateRestoreCallback, checkpointedOffsets.get(storePartition), offsetLimit(storePartition), store.persistent()); changelogReader.register(restorer); } stores.put(store.name(), store); } ProcessorStateManager(final TaskId taskId, final Collection<TopicPartition> sources, final boolean isStandby, final StateDirectory stateDirectory, final Map<String, String> storeToChangelogTopic, final ChangelogReader changelogReader, final boolean eosEnabled); static String storeChangelogTopic(final String applicationId, final String storeName); @Override File baseDir(); @Override void register(final StateStore store, final boolean loggingEnabled, final StateRestoreCallback stateRestoreCallback); @Override Map<TopicPartition, Long> checkpointed(); @Override StateStore getStore(final String name); @Override void flush(); @Override void close(final Map<TopicPartition, Long> ackedOffsets); @Override void checkpoint(final Map<TopicPartition, Long> ackedOffsets); @Override StateStore getGlobalStore(final String name); static final String STATE_CHANGELOG_TOPIC_SUFFIX; }
@Test public void shouldThrowIllegalArgumentExceptionIfStoreNameIsSameAsCheckpointFileName() throws Exception { final ProcessorStateManager stateManager = new ProcessorStateManager( taskId, noPartitions, false, stateDirectory, Collections.<String, String>emptyMap(), changelogReader, false); try { stateManager.register(new MockStateStoreSupplier.MockStateStore(ProcessorStateManager.CHECKPOINT_FILE_NAME, true), true, null); fail("should have thrown illegal argument exception when store name same as checkpoint file"); } catch (final IllegalArgumentException e) { } } @Test public void shouldThrowIllegalArgumentExceptionOnRegisterWhenStoreHasAlreadyBeenRegistered() throws Exception { final ProcessorStateManager stateManager = new ProcessorStateManager( taskId, noPartitions, false, stateDirectory, Collections.<String, String>emptyMap(), changelogReader, false); stateManager.register(mockStateStore, false, null); try { stateManager.register(mockStateStore, false, null); fail("should have thrown illegal argument exception when store with same name already registered"); } catch (final IllegalArgumentException e) { } }
MinTimestampTracker implements TimestampTracker<E> { public long get() { Stamped<E> stamped = ascendingSubsequence.peekFirst(); if (stamped == null) return lastKnownTime; else return stamped.timestamp; } void addElement(final Stamped<E> elem); void removeElement(final Stamped<E> elem); int size(); long get(); }
@Test public void shouldReturnNotKnownTimestampWhenNoRecordsEverAdded() throws Exception { assertThat(tracker.get(), equalTo(TimestampTracker.NOT_KNOWN)); }
MinTimestampTracker implements TimestampTracker<E> { public void removeElement(final Stamped<E> elem) { if (elem == null) { return; } if (ascendingSubsequence.peekFirst() == elem) { ascendingSubsequence.removeFirst(); } if (ascendingSubsequence.isEmpty()) { lastKnownTime = elem.timestamp; } } void addElement(final Stamped<E> elem); void removeElement(final Stamped<E> elem); int size(); long get(); }
@Test public void shouldIgnoreNullRecordOnRemove() throws Exception { tracker.removeElement(null); }
MinTimestampTracker implements TimestampTracker<E> { public void addElement(final Stamped<E> elem) { if (elem == null) throw new NullPointerException(); Stamped<E> maxElem = ascendingSubsequence.peekLast(); while (maxElem != null && maxElem.timestamp >= elem.timestamp) { ascendingSubsequence.removeLast(); maxElem = ascendingSubsequence.peekLast(); } ascendingSubsequence.offerLast(elem); } void addElement(final Stamped<E> elem); void removeElement(final Stamped<E> elem); int size(); long get(); }
@Test(expected = NullPointerException.class) public void shouldThrowNullPointerExceptionWhenTryingToAddNullElement() throws Exception { tracker.addElement(null); }
SourceNodeRecordDeserializer implements RecordDeserializer { @Override public ConsumerRecord<Object, Object> deserialize(final ConsumerRecord<byte[], byte[]> rawRecord) { final Object key; try { key = sourceNode.deserializeKey(rawRecord.topic(), rawRecord.headers(), rawRecord.key()); } catch (Exception e) { throw new StreamsException(format("Failed to deserialize key for record. topic=%s, partition=%d, offset=%d", rawRecord.topic(), rawRecord.partition(), rawRecord.offset()), e); } final Object value; try { value = sourceNode.deserializeValue(rawRecord.topic(), rawRecord.headers(), rawRecord.value()); } catch (Exception e) { throw new StreamsException(format("Failed to deserialize value for record. topic=%s, partition=%d, offset=%d", rawRecord.topic(), rawRecord.partition(), rawRecord.offset()), e); } return new ConsumerRecord<>(rawRecord.topic(), rawRecord.partition(), rawRecord.offset(), rawRecord.timestamp(), TimestampType.CREATE_TIME, rawRecord.checksum(), rawRecord.serializedKeySize(), rawRecord.serializedValueSize(), key, value); } SourceNodeRecordDeserializer(final SourceNode sourceNode); @Override ConsumerRecord<Object, Object> deserialize(final ConsumerRecord<byte[], byte[]> rawRecord); }
@Test(expected = StreamsException.class) public void shouldThrowStreamsExceptionIfKeyFailsToDeserialize() throws Exception { final SourceNodeRecordDeserializer recordDeserializer = new SourceNodeRecordDeserializer( new TheSourceNode(true, false)); recordDeserializer.deserialize(rawRecord); } @Test(expected = StreamsException.class) public void shouldThrowStreamsExceptionIfKeyValueFailsToDeserialize() throws Exception { final SourceNodeRecordDeserializer recordDeserializer = new SourceNodeRecordDeserializer( new TheSourceNode(false, true)); recordDeserializer.deserialize(rawRecord); } @Test public void shouldReturnNewConsumerRecordWithDeserializedValueWhenNoExceptions() throws Exception { final SourceNodeRecordDeserializer recordDeserializer = new SourceNodeRecordDeserializer( new TheSourceNode(false, false, "key", "value")); final ConsumerRecord<Object, Object> record = recordDeserializer.deserialize(rawRecord); assertEquals(rawRecord.topic(), record.topic()); assertEquals(rawRecord.partition(), record.partition()); assertEquals(rawRecord.offset(), record.offset()); assertEquals(rawRecord.checksum(), record.checksum()); assertEquals("key", record.key()); assertEquals("value", record.value()); assertEquals(rawRecord.timestamp(), record.timestamp()); assertEquals(TimestampType.CREATE_TIME, record.timestampType()); }
AssignmentInfo { public static AssignmentInfo decode(ByteBuffer data) { data.rewind(); DataInputStream in = new DataInputStream(new ByteBufferInputStream(data)); try { int version = in.readInt(); if (version < 0 || version > CURRENT_VERSION) { TaskAssignmentException ex = new TaskAssignmentException("Unknown assignment data version: " + version); log.error(ex.getMessage(), ex); throw ex; } int count = in.readInt(); List<TaskId> activeTasks = new ArrayList<>(count); for (int i = 0; i < count; i++) { activeTasks.add(TaskId.readFrom(in)); } count = in.readInt(); Map<TaskId, Set<TopicPartition>> standbyTasks = new HashMap<>(count); for (int i = 0; i < count; i++) { TaskId id = TaskId.readFrom(in); standbyTasks.put(id, readTopicPartitions(in)); } Map<HostInfo, Set<TopicPartition>> hostStateToTopicPartitions = new HashMap<>(); if (version == CURRENT_VERSION) { int numEntries = in.readInt(); for (int i = 0; i < numEntries; i++) { HostInfo hostInfo = new HostInfo(in.readUTF(), in.readInt()); hostStateToTopicPartitions.put(hostInfo, readTopicPartitions(in)); } } return new AssignmentInfo(activeTasks, standbyTasks, hostStateToTopicPartitions); } catch (IOException ex) { throw new TaskAssignmentException("Failed to decode AssignmentInfo", ex); } } AssignmentInfo(List<TaskId> activeTasks, Map<TaskId, Set<TopicPartition>> standbyTasks, Map<HostInfo, Set<TopicPartition>> hostState); protected AssignmentInfo(int version, List<TaskId> activeTasks, Map<TaskId, Set<TopicPartition>> standbyTasks, Map<HostInfo, Set<TopicPartition>> hostState); ByteBuffer encode(); static AssignmentInfo decode(ByteBuffer data); @Override int hashCode(); @Override boolean equals(Object o); @Override String toString(); final int version; final List<TaskId> activeTasks; final Map<TaskId, Set<TopicPartition>> standbyTasks; final Map<HostInfo, Set<TopicPartition>> partitionsByHost; }
@Test public void shouldDecodePreviousVersion() throws Exception { List<TaskId> activeTasks = Arrays.asList(new TaskId(0, 0), new TaskId(0, 0), new TaskId(0, 1), new TaskId(1, 0)); Map<TaskId, Set<TopicPartition>> standbyTasks = new HashMap<>(); standbyTasks.put(new TaskId(1, 1), Utils.mkSet(new TopicPartition("t1", 1), new TopicPartition("t2", 1))); standbyTasks.put(new TaskId(2, 0), Utils.mkSet(new TopicPartition("t3", 0), new TopicPartition("t3", 0))); final AssignmentInfo oldVersion = new AssignmentInfo(1, activeTasks, standbyTasks, null); final AssignmentInfo decoded = AssignmentInfo.decode(encodeV1(oldVersion)); assertEquals(oldVersion.activeTasks, decoded.activeTasks); assertEquals(oldVersion.standbyTasks, decoded.standbyTasks); assertEquals(0, decoded.partitionsByHost.size()); assertEquals(2, decoded.version); }
ConnectorsResource { @GET @Path("/{connector}") public ConnectorInfo getConnector(final @PathParam("connector") String connector, final @QueryParam("forward") Boolean forward) throws Throwable { FutureCallback<ConnectorInfo> cb = new FutureCallback<>(); herder.connectorInfo(connector, cb); return completeOrForwardRequest(cb, "/connectors/" + connector, "GET", null, forward); } ConnectorsResource(Herder herder); @GET @Path("/") Collection<String> listConnectors(final @QueryParam("forward") Boolean forward); @POST @Path("/") Response createConnector(final @QueryParam("forward") Boolean forward, final CreateConnectorRequest createRequest); @GET @Path("/{connector}") ConnectorInfo getConnector(final @PathParam("connector") String connector, final @QueryParam("forward") Boolean forward); @GET @Path("/{connector}/config") Map<String, String> getConnectorConfig(final @PathParam("connector") String connector, final @QueryParam("forward") Boolean forward); @GET @Path("/{connector}/status") ConnectorStateInfo getConnectorStatus(final @PathParam("connector") String connector); @PUT @Path("/{connector}/config") Response putConnectorConfig(final @PathParam("connector") String connector, final @QueryParam("forward") Boolean forward, final Map<String, String> connectorConfig); @POST @Path("/{connector}/restart") void restartConnector(final @PathParam("connector") String connector, final @QueryParam("forward") Boolean forward); @PUT @Path("/{connector}/pause") Response pauseConnector(@PathParam("connector") String connector); @PUT @Path("/{connector}/resume") Response resumeConnector(@PathParam("connector") String connector); @GET @Path("/{connector}/tasks") List<TaskInfo> getTaskConfigs(final @PathParam("connector") String connector, final @QueryParam("forward") Boolean forward); @POST @Path("/{connector}/tasks") void putTaskConfigs(final @PathParam("connector") String connector, final @QueryParam("forward") Boolean forward, final List<Map<String, String>> taskConfigs); @GET @Path("/{connector}/tasks/{task}/status") ConnectorStateInfo.TaskState getTaskStatus(final @PathParam("connector") String connector, final @PathParam("task") Integer task); @POST @Path("/{connector}/tasks/{task}/restart") void restartTask(final @PathParam("connector") String connector, final @PathParam("task") Integer task, final @QueryParam("forward") Boolean forward); @DELETE @Path("/{connector}") void destroyConnector(final @PathParam("connector") String connector, final @QueryParam("forward") Boolean forward); }
@Test public void testGetConnector() throws Throwable { final Capture<Callback<ConnectorInfo>> cb = Capture.newInstance(); herder.connectorInfo(EasyMock.eq(CONNECTOR_NAME), EasyMock.capture(cb)); expectAndCallbackResult(cb, new ConnectorInfo(CONNECTOR_NAME, CONNECTOR_CONFIG, CONNECTOR_TASK_NAMES)); PowerMock.replayAll(); ConnectorInfo connInfo = connectorsResource.getConnector(CONNECTOR_NAME, FORWARD); assertEquals(new ConnectorInfo(CONNECTOR_NAME, CONNECTOR_CONFIG, CONNECTOR_TASK_NAMES), connInfo); PowerMock.verifyAll(); }
CarManufacturer { public Car manufactureCar(Specification spec) { Car car = carFactory.createCar(spec); entityManager.merge(car); return car; } Car manufactureCar(Specification spec); }
@Test public void test() { Specification spec = new Specification(); Car car = new Car(spec); when(entityManager.merge(any())).then(a -> a.getArgument(0)); when(carFactory.createCar(any())).thenReturn(car); assertThat(testObject.manufactureCar(spec)).isEqualTo(car); verify(carFactory).createCar(spec); verify(entityManager).merge(car); } @Test public void test() { Specification spec = new Specification(); Car car = new Car(spec); when(testObject.entityManager.merge(any())).then(a -> a.getArgument(0)); when(testObject.carFactory.createCar(any())).thenReturn(car); assertThat(testObject.manufactureCar(spec)).isEqualTo(car); verify(testObject.carFactory).createCar(spec); verify(testObject.entityManager).merge(car); }
Types { public static boolean isAssignable(Class<?> lhsType, Class<?> rhsType) { if (lhsType.isAssignableFrom(rhsType)) { return true; } return lhsType.isPrimitive() ? lhsType.equals(Primitives.unwrap(rhsType)) : lhsType.isAssignableFrom(Primitives.wrap(rhsType)); } private Types(); static boolean isAssignable(Class<?> lhsType, Class<?> rhsType); static boolean equals(Class<?> lhsType, Class<?> rhsType); }
@Test public void testIsAssignable() throws Exception { assertThat(Types.isAssignable(int.class, Integer.class), is(true)); assertThat(Types.isAssignable(Integer.class, Integer.class), is(true)); assertThat(Types.isAssignable(Integer.class, int.class), is(true)); assertThat(Types.isAssignable(int.class, int.class), is(true)); assertThat(Types.isAssignable(int.class, long.class), is(false)); assertThat(Types.isAssignable(Integer.class, Long.class), is(false)); assertThat(Types.isAssignable(A.class, B.class), is(true)); assertThat(Types.isAssignable(A.class, A.class), is(true)); assertThat(Types.isAssignable(B.class, B.class), is(true)); assertThat(Types.isAssignable(B.class, A.class), is(false)); }
MatchRatingApproachEncoder implements StringEncoder { public boolean isEncodeEquals(String name1, String name2) { if (name1 == null || EMPTY.equalsIgnoreCase(name1) || SPACE.equalsIgnoreCase(name1)) { return false; } else if (name2 == null || EMPTY.equalsIgnoreCase(name2) || SPACE.equalsIgnoreCase(name2)) { return false; } else if (name1.length() == 1 || name2.length() == 1) { return false; } else if (name1.equalsIgnoreCase(name2)) { return true; } name1 = cleanName(name1); name2 = cleanName(name2); name1 = removeVowels(name1); name2 = removeVowels(name2); name1 = removeDoubleConsonants(name1); name2 = removeDoubleConsonants(name2); name1 = getFirst3Last3(name1); name2 = getFirst3Last3(name2); if (Math.abs(name1.length() - name2.length()) >= THREE) { return false; } final int sumLength = Math.abs(name1.length() + name2.length()); int minRating = 0; minRating = getMinRating(sumLength); final int count = leftToRightThenRightToLeftProcessing(name1, name2); return count >= minRating; } @Override final Object encode(final Object pObject); @Override final String encode(String name); boolean isEncodeEquals(String name1, String name2); }
@Test public final void testCompareNameNullSpace_ReturnsFalseSuccessfully() { assertFalse(getStringEncoder().isEncodeEquals(null, " ")); } @Test public final void testCompareNameSameNames_ReturnsFalseSuccessfully() { assertTrue(getStringEncoder().isEncodeEquals("John", "John")); } @Test public final void testCompare_SMITH_SMYTH_SuccessfullyMatched() { assertTrue(this.getStringEncoder().isEncodeEquals("smith", "smyth")); } @Test public final void testCompare_BURNS_BOURNE_SuccessfullyMatched() { assertTrue(this.getStringEncoder().isEncodeEquals("Burns", "Bourne")); } @Test public final void testCompare_ShortNames_AL_ED_WorksButNoMatch() { assertFalse(this.getStringEncoder().isEncodeEquals("Al", "Ed")); } @Test public final void testCompare_CATHERINE_KATHRYN_SuccessfullyMatched() { assertTrue(this.getStringEncoder().isEncodeEquals("Catherine", "Kathryn")); } @Test public final void testCompare_BRIAN_BRYAN_SuccessfullyMatched() { assertTrue(this.getStringEncoder().isEncodeEquals("Brian", "Bryan")); } @Test public final void testCompare_SEAN_SHAUN_SuccessfullyMatched() { assertTrue(this.getStringEncoder().isEncodeEquals("Séan", "Shaun")); } @Test public final void testCompare_COLM_COLIN_WithAccentsAndSymbolsAndSpaces_SuccessfullyMatched() { assertTrue(this.getStringEncoder().isEncodeEquals("Cólm. ", "C-olín")); } @Test public final void testCompare_STEPHEN_STEVEN_SuccessfullyMatched() { assertTrue(this.getStringEncoder().isEncodeEquals("Stephen", "Steven")); } @Test public final void testCompare_STEVEN_STEFAN_SuccessfullyMatched() { assertTrue(this.getStringEncoder().isEncodeEquals("Steven", "Stefan")); } @Test public final void testCompare_STEPHEN_STEFAN_SuccessfullyMatched() { assertTrue(this.getStringEncoder().isEncodeEquals("Stephen", "Stefan")); } @Test public final void testCompare_SAM_SAMUEL_SuccessfullyMatched() { assertTrue(this.getStringEncoder().isEncodeEquals("Sam", "Samuel")); } @Test public final void testCompare_MICKY_MICHAEL_SuccessfullyMatched() { assertTrue(this.getStringEncoder().isEncodeEquals("Micky", "Michael")); } @Test public final void testCompare_OONA_OONAGH_SuccessfullyMatched() { assertTrue(this.getStringEncoder().isEncodeEquals("Oona", "Oonagh")); } @Test public final void testCompare_SOPHIE_SOFIA_SuccessfullyMatched() { assertTrue(this.getStringEncoder().isEncodeEquals("Sophie", "Sofia")); } @Test public final void testCompare_FRANCISZEK_FRANCES_SuccessfullyMatched() { assertTrue(this.getStringEncoder().isEncodeEquals("Franciszek", "Frances")); } @Test public final void testCompare_TOMASZ_TOM_SuccessfullyMatched() { assertTrue(this.getStringEncoder().isEncodeEquals("Tomasz", "tom")); } @Test public final void testCompare_SmallInput_CARK_Kl_SuccessfullyMatched() { assertTrue(this.getStringEncoder().isEncodeEquals("Kl", "Karl")); } @Test public final void testCompareNameToSingleLetter_KARL_C_DoesNotMatch() { assertFalse(this.getStringEncoder().isEncodeEquals("Karl", "C")); } @Test public final void testCompare_ZACH_ZAKARIA_SuccessfullyMatched() { assertTrue(this.getStringEncoder().isEncodeEquals("Zach", "Zacharia")); } @Test public final void testCompare_KARL_ALESSANDRO_DoesNotMatch() { assertFalse(this.getStringEncoder().isEncodeEquals("Karl", "Alessandro")); } @Test public final void testCompare_Forenames_UNA_OONAGH_ShouldSuccessfullyMatchButDoesNot() { assertFalse(this.getStringEncoder().isEncodeEquals("Úna", "Oonagh")); } @Test public final void testCompare_Surname_OSULLIVAN_OSUILLEABHAIN_SuccessfulMatch() { assertTrue(this.getStringEncoder().isEncodeEquals("O'Sullivan", "Ó ' Súilleabháin")); } @Test public final void testCompare_LongSurnames_MORIARTY_OMUIRCHEARTAIGH_DoesNotSuccessfulMatch() { assertFalse(this.getStringEncoder().isEncodeEquals("Moriarty", "OMuircheartaigh")); } @Test public final void testCompare_LongSurnames_OMUIRCHEARTAIGH_OMIREADHAIGH_SuccessfulMatch() { assertTrue(this.getStringEncoder().isEncodeEquals("o'muireadhaigh", "Ó 'Muircheartaigh ")); } @Test public final void testCompare_Surname_COOPERFLYNN_SUPERLYN_SuccessfullyMatched() { assertTrue(this.getStringEncoder().isEncodeEquals("Cooper-Flynn", "Super-Lyn")); } @Test public final void testCompare_Surname_HAILEY_HALLEY_SuccessfullyMatched() { assertTrue(this.getStringEncoder().isEncodeEquals("Hailey", "Halley")); } @Test public final void testCompare_Surname_AUERBACH_UHRBACH_SuccessfullyMatched() { assertTrue(this.getStringEncoder().isEncodeEquals("Auerbach", "Uhrbach")); } @Test public final void testCompare_Surname_MOSKOWITZ_MOSKOVITZ_SuccessfullyMatched() { assertTrue(this.getStringEncoder().isEncodeEquals("Moskowitz", "Moskovitz")); } @Test public final void testCompare_Surname_LIPSHITZ_LIPPSZYC_SuccessfullyMatched() { assertTrue(this.getStringEncoder().isEncodeEquals("LIPSHITZ", "LIPPSZYC")); } @Test public final void testCompare_Surname_LEWINSKY_LEVINSKI_SuccessfullyMatched() { assertTrue(this.getStringEncoder().isEncodeEquals("LEWINSKY", "LEVINSKI")); } @Test public final void testCompare_Surname_SZLAMAWICZ_SHLAMOVITZ_SuccessfullyMatched() { assertTrue(this.getStringEncoder().isEncodeEquals("SZLAMAWICZ", "SHLAMOVITZ")); } @Test public final void testCompare_Surname_ROSOCHOWACIEC_ROSOKHOVATSETS_SuccessfullyMatched() { assertTrue(this.getStringEncoder().isEncodeEquals("R o s o ch o w a c ie c", " R o s o k ho v a ts e ts")); } @Test public final void testCompare_Surname_PRZEMYSL_PSHEMESHIL_SuccessfullyMatched() { assertTrue(this.getStringEncoder().isEncodeEquals(" P rz e m y s l", " P sh e m e sh i l")); } @Test public final void testCompare_PETERSON_PETERS_SuccessfullyMatched() { assertTrue(this.getStringEncoder().isEncodeEquals("Peterson", "Peters")); } @Test public final void testCompare_MCGOWAN_MCGEOGHEGAN_SuccessfullyMatched() { assertTrue(this.getStringEncoder().isEncodeEquals("McGowan", "Mc Geoghegan")); } @Test public final void testCompare_SurnamesCornerCase_MURPHY_Space_NoMatch() { assertFalse(this.getStringEncoder().isEncodeEquals("Murphy", " ")); } @Test public final void testCompare_SurnamesCornerCase_MURPHY_NoSpace_NoMatch() { assertFalse(this.getStringEncoder().isEncodeEquals("Murphy", "")); } @Test public final void testCompare_SurnameCornerCase_Nulls_NoMatch() { assertFalse(this.getStringEncoder().isEncodeEquals(null, null)); } @Test public final void testCompare_Surnames_MURPHY_LYNCH_NoMatchExpected() { assertFalse(this.getStringEncoder().isEncodeEquals("Murphy", "Lynch")); } @Test public final void testCompare_Forenames_SEAN_JOHN_MatchExpected() { assertTrue(this.getStringEncoder().isEncodeEquals("Sean", "John")); } @Test public final void testCompare_Forenames_SEAN_PETE_NoMatchExpected() { assertFalse(this.getStringEncoder().isEncodeEquals("Sean", "Pete")); } @Test public final void testisEncodeEquals_CornerCase_SecondNameNothing_ReturnsFalse() { assertFalse(this.getStringEncoder().isEncodeEquals("test", "")); } @Test public final void testisEncodeEquals_CornerCase_FirstNameNothing_ReturnsFalse() { assertFalse(this.getStringEncoder().isEncodeEquals("", "test")); } @Test public final void testisEncodeEquals_CornerCase_SecondNameJustSpace_ReturnsFalse() { assertFalse(this.getStringEncoder().isEncodeEquals("test", " ")); } @Test public final void testisEncodeEquals_CornerCase_FirstNameJustSpace_ReturnsFalse() { assertFalse(this.getStringEncoder().isEncodeEquals(" ", "test")); } @Test public final void testisEncodeEquals_CornerCase_SecondNameNull_ReturnsFalse() { assertFalse(this.getStringEncoder().isEncodeEquals("test", null)); } @Test public final void testisEncodeEquals_CornerCase_FirstNameNull_ReturnsFalse() { assertFalse(this.getStringEncoder().isEncodeEquals(null, "test")); } @Test public final void testisEncodeEquals_CornerCase_FirstNameJust1Letter_ReturnsFalse() { assertFalse(this.getStringEncoder().isEncodeEquals("t", "test")); } @Test public final void testisEncodeEqualsSecondNameJust1Letter_ReturnsFalse() { assertFalse(this.getStringEncoder().isEncodeEquals("test", "t")); }
Parser implements ParserTreeConstants, ParserConstants { final public ASTRootNode parse() throws ParseException { ASTRootNode jjtn000 = new ASTRootNode(JJTROOTNODE); boolean jjtc000 = true; jjtree.openNodeScope(jjtn000); jjtn000.jjtSetFirstToken(getToken(1)); try { DML(); ASTBlock jjtn001 = new ASTBlock(JJTBLOCK); boolean jjtc001 = true; jjtree.openNodeScope(jjtn001); jjtn001.jjtSetFirstToken(getToken(1)); try { label_1: while (true) { Statement(); switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { case BLANK: case QUOTE_TEXT: case JDBC_ITERABLE_PARAMETER: case JDBC_PARAMETER: case GLOBAL_TABLE: case JOIN_PARAMETER: case IF_DIRECTIVE: case INSERT: case DELETE: case UPDATE: case SELECT: case REPLACE: case MERGE: case TRUNCATE: case TEXT: ; break; default: jj_la1[0] = jj_gen; break label_1; } } } catch (Throwable jjte001) { if (jjtc001) { jjtree.clearNodeScope(jjtn001); jjtc001 = false; } else { jjtree.popNode(); } if (jjte001 instanceof RuntimeException) { {if (true) throw (RuntimeException)jjte001;} } if (jjte001 instanceof ParseException) { {if (true) throw (ParseException)jjte001;} } {if (true) throw (Error)jjte001;} } finally { if (jjtc001) { jjtree.closeNodeScope(jjtn001, true); jjtn001.jjtSetLastToken(getToken(0)); } } jj_consume_token(0); jjtree.closeNodeScope(jjtn000, true); jjtc000 = false; jjtn000.jjtSetLastToken(getToken(0)); {if (true) return jjtn000;} } catch (Throwable jjte000) { if (jjtc000) { jjtree.clearNodeScope(jjtn000); jjtc000 = false; } else { jjtree.popNode(); } if (jjte000 instanceof RuntimeException) { {if (true) throw (RuntimeException)jjte000;} } if (jjte000 instanceof ParseException) { {if (true) throw (ParseException)jjte000;} } {if (true) throw (Error)jjte000;} } finally { if (jjtc000) { jjtree.closeNodeScope(jjtn000, true); jjtn000.jjtSetLastToken(getToken(0)); } } throw new Error("Missing return statement in function"); } Parser(String s); Parser(java.io.InputStream stream); Parser(java.io.InputStream stream, String encoding); Parser(Reader stream); Parser(ParserTokenManager tm); final ASTRootNode parse(); final void DML(); final void Insert(); final void Delete(); final void Update(); final void Select(); final void Replace(); final void Merge(); final void Truncate(); final void Statement(); final void JDBCParameter(); final void JDBCIterableParameter(); final void GlobalTable(); final void JoinParameter(); final void QuoteText(); final void Text(); final void Blank(); final void IfStatement(); final void ElseStatement(); final void ElseIfStatement(); final void Expression(); final void ConditionalOrExpression(); final void ConditionalAndExpression(); final void RelationalExpression(); final void UnaryExpression(); final void PrimaryExpression(); final void ExpressionParameter(); final void IntegerLiteral(); final void StringLiteral(); final void True(); final void False(); final void Null(); void ReInit(java.io.InputStream stream); void ReInit(java.io.InputStream stream, String encoding); void ReInit(Reader stream); void ReInit(ParserTokenManager tm); final Token getNextToken(); final Token getToken(int index); ParseException generateParseException(); final void enable_tracing(); final void disable_tracing(); public ParserTokenManager token_source; public Token token; public Token jj_nt; }
@Test public void testParse() throws Exception { String sql = "SELECT * from user where id in ( select id from user2 )"; ASTRootNode n = new Parser(sql).parse().init(); InvocationContext context = DefaultInvocationContext.create(); n.render(context); BoundSql boundSql = context.getBoundSql(); assertThat(boundSql.getSql().toString(), equalTo("SELECT * from user where id in ( select id from user2 )")); } @Test public void testIntegerLiteral2() throws Exception { String sql = "select #if (:1 > 10) ok #end"; ASTRootNode n = new Parser(sql).parse().init(); ParameterContext ctx = getParameterContext(Lists.newArrayList((Type) Integer.class)); n.checkAndBind(ctx); InvocationContext context = DefaultInvocationContext.create(); context.addParameter("1", Long.MAX_VALUE); n.render(context); BoundSql boundSql = context.getBoundSql(); assertThat(boundSql.getSql(), Matchers.equalTo("select ok ")); } @Test public void testIntegerLiteral3() throws Exception { String sql = "select #if (:1 > 9223372036854775800) ok #end"; ASTRootNode n = new Parser(sql).parse().init(); ParameterContext ctx = getParameterContext(Lists.newArrayList((Type) Integer.class)); n.checkAndBind(ctx); InvocationContext context = DefaultInvocationContext.create(); context.addParameter("1", 100); n.render(context); BoundSql boundSql = context.getBoundSql(); assertThat(boundSql.getSql(), Matchers.equalTo("select ")); } @Test public void testStringLiteral2() throws Exception { String sql = "select #if (:1 == 'hello') ok #end"; ASTRootNode n = new Parser(sql).parse().init(); ParameterContext ctx = getParameterContext(Lists.newArrayList((Type) String.class)); n.checkAndBind(ctx); InvocationContext context = DefaultInvocationContext.create(); context.addParameter("1", "hello2"); n.render(context); BoundSql boundSql = context.getBoundSql(); assertThat(boundSql.getSql(), Matchers.equalTo("select ")); } @Test public void testStringLiteral3() throws Exception { String sql = "select #if ('') ok #end"; ASTRootNode n = new Parser(sql).parse().init(); ParameterContext ctx = getParameterContext(Lists.newArrayList((Type) String.class)); n.checkAndBind(ctx); InvocationContext context = DefaultInvocationContext.create(); context.addParameter("1", "hello2"); n.render(context); BoundSql boundSql = context.getBoundSql(); assertThat(boundSql.getSql(), Matchers.equalTo("select ")); } @Test public void testStringLiteral4() throws Exception { String sql = "select #if (!'') ok #end"; ASTRootNode n = new Parser(sql).parse().init(); ParameterContext ctx = getParameterContext(Lists.newArrayList((Type) String.class)); n.checkAndBind(ctx); InvocationContext context = DefaultInvocationContext.create(); context.addParameter("1", "hello2"); n.render(context); BoundSql boundSql = context.getBoundSql(); assertThat(boundSql.getSql(), Matchers.equalTo("select ok ")); } @Test public void testStringLiteral5() throws Exception { String sql = "select #if (:1) ok #end"; ASTRootNode n = new Parser(sql).parse().init(); ParameterContext ctx = getParameterContext(Lists.newArrayList((Type) String.class)); n.checkAndBind(ctx); InvocationContext context = DefaultInvocationContext.create(); context.addParameter("1", "he"); n.render(context); BoundSql boundSql = context.getBoundSql(); assertThat(boundSql.getSql(), Matchers.equalTo("select ok ")); } @Test public void testStringLiteral6() throws Exception { String sql = "select #if (:1) ok #end"; ASTRootNode n = new Parser(sql).parse().init(); ParameterContext ctx = getParameterContext(Lists.newArrayList((Type) String.class)); n.checkAndBind(ctx); InvocationContext context = DefaultInvocationContext.create(); context.addParameter("1", ""); n.render(context); BoundSql boundSql = context.getBoundSql(); assertThat(boundSql.getSql(), Matchers.equalTo("select ")); } @Test public void testQuote() throws Exception { String sql = "insert into table ... values(':dd',':xx')"; ASTRootNode n = new Parser(sql).parse().init(); List<Type> types = Lists.newArrayList(); ParameterContext ctx = getParameterContext(types); n.checkAndBind(ctx); InvocationContext context = DefaultInvocationContext.create(); n.render(context); BoundSql boundSql = context.getBoundSql(); assertThat(boundSql.getSql().toString(), equalTo("insert into table ... values(':dd',':xx')")); assertThat(boundSql.getArgs(), hasSize(0)); } @Test public void testExpressionParameter4In() throws Exception { String sql = "select #if (:1) id in (:1) #end"; ASTRootNode n = new Parser(sql).parse().init(); ParameterContext ctx = getParameterContext(Lists.newArrayList(new TypeToken<List<Integer>>(){}.getType())); n.checkAndBind(ctx); InvocationContext context = DefaultInvocationContext.create(); List<Integer> ids = Lists.newArrayList(1, 2, 3); context.addParameter("1", ids); n.render(context); BoundSql boundSql = context.getBoundSql(); assertThat(boundSql.getSql(), Matchers.equalTo("select id in (?,?,?) ")); } @Test public void testExpressionParameter4InEmpty() throws Exception { String sql = "select #if (:1) id in (:1) #end"; ASTRootNode n = new Parser(sql).parse().init(); ParameterContext ctx = getParameterContext(Lists.newArrayList(new TypeToken<List<Integer>>(){}.getType())); n.checkAndBind(ctx); InvocationContext context = DefaultInvocationContext.create(); List<Integer> ids = Lists.newArrayList(); context.addParameter("1", ids); n.render(context); BoundSql boundSql = context.getBoundSql(); assertThat(boundSql.getSql(), Matchers.equalTo("select ")); } @Test public void testExpressionParameter4InNull() throws Exception { String sql = "select #if (:1) id in (:1) #end"; ASTRootNode n = new Parser(sql).parse().init(); ParameterContext ctx = getParameterContext(Lists.newArrayList(new TypeToken<List<Integer>>(){}.getType())); n.checkAndBind(ctx); InvocationContext context = DefaultInvocationContext.create(); List<Integer> ids = null; context.addParameter("1", ids); n.render(context); BoundSql boundSql = context.getBoundSql(); assertThat(boundSql.getSql(), Matchers.equalTo("select ")); } @Test public void testJdbcType() throws Exception { String sql = "insert into table ... values(:1.b.c@blob) a in (:2.x.y@clob)"; ASTRootNode n = new Parser(sql).parse().init(); final AtomicInteger t = new AtomicInteger(0); n.jjtAccept(new ParserVisitorAdapter() { @Override public Object visit(ASTJDBCParameter node, Object data) { BindingParameter bp = node.getBindingParameter(); assertThat(bp.getParameterName(), equalTo("1")); assertThat(bp.getPropertyPath(), equalTo("b.c")); assertThat(bp.getJdbcType(), equalTo(JdbcType.BLOB)); t.incrementAndGet(); return super.visit(node, data); } @Override public Object visit(ASTJDBCIterableParameter node, Object data) { BindingParameter bp = node.getBindingParameter(); assertThat(bp.getParameterName(), equalTo("2")); assertThat(bp.getPropertyPath(), equalTo("x.y")); assertThat(bp.getJdbcType(), equalTo(JdbcType.CLOB)); t.incrementAndGet(); return super.visit(node, data); } }, null); assertThat(t.intValue(), equalTo(2)); } @Test public void testBase() throws Exception { String sql = "select #{:1} from user where id in (:2) and name=:3"; ASTRootNode n = new Parser(sql).parse().init(); Type listType = new TypeToken<List<Integer>>() { }.getType(); ParameterContext ctx = getParameterContext(Lists.newArrayList(String.class, listType, String.class)); n.checkAndBind(ctx); InvocationContext context = DefaultInvocationContext.create(); context.addParameter("1", "id"); context.addParameter("2", Arrays.asList(9, 5, 2, 7)); context.addParameter("3", "ash"); n.render(context); BoundSql boundSql = context.getBoundSql(); assertThat(boundSql.getSql().toString(), equalTo("select id from user where id in (?,?,?,?) and name=?")); assertThat(boundSql.getArgs(), contains(new Object[]{9, 5, 2, 7, "ash"})); } @Test public void testIf() throws Exception { String sql = "select where 1=1 #if(:1) and id>:1 #end"; ASTRootNode n = new Parser(sql).parse().init(); ParameterContext ctx = getParameterContext(Lists.newArrayList((Type) Integer.class)); n.checkAndBind(ctx); InvocationContext context = DefaultInvocationContext.create(); context.addParameter("1", 100); n.render(context); BoundSql boundSql = context.getBoundSql(); assertThat(boundSql.getSql().toString(), equalTo("select where 1=1 and id>? ")); assertThat(boundSql.getArgs(), contains(new Object[]{100})); } @Test public void testIf2() throws Exception { String sql = "select where 1=1 #if(!:1) and id>:1 #end"; ASTRootNode n = new Parser(sql).parse().init(); ParameterContext ctx = getParameterContext(Lists.newArrayList((Type) Integer.class)); n.checkAndBind(ctx); InvocationContext context = DefaultInvocationContext.create(); context.addParameter("1", 100); n.render(context); BoundSql boundSql = context.getBoundSql(); assertThat(boundSql.getSql().toString(), equalTo("select where 1=1 ")); assertThat(boundSql.getArgs().size(), equalTo(0)); } @Test public void testIfElseIf() throws Exception { String sql = "select where 1=1" + "#if(:1>0)" + " and id>:1" + "#elseif(:1<0)" + " and id<:1" + "#end"; ASTRootNode n = new Parser(sql).parse().init(); ParameterContext ctx = getParameterContext(Lists.newArrayList((Type) Integer.class)); n.checkAndBind(ctx); InvocationContext context = DefaultInvocationContext.create(); context.addParameter("1", 100); n.render(context); BoundSql boundSql = context.getBoundSql(); assertThat(boundSql.getSql().toString(), equalTo("select where 1=1 and id>?")); assertThat(boundSql.getArgs(), contains(new Object[]{100})); } @Test public void testIfElseIf2() throws Exception { String sql = "select where 1=1" + "#if(:1>0)" + " and id>:1" + "#elseif(:1<0)" + " and id<:1" + "#end"; ASTRootNode n = new Parser(sql).parse().init(); ParameterContext ctx = getParameterContext(Lists.newArrayList((Type) Integer.class)); n.checkAndBind(ctx); InvocationContext context = DefaultInvocationContext.create(); context.addParameter("1", -100); n.render(context); BoundSql boundSql = context.getBoundSql(); assertThat(boundSql.getSql().toString(), equalTo("select where 1=1 and id<?")); assertThat(boundSql.getArgs(), contains(new Object[]{-100})); } @Test public void testIfElseIfElse() throws Exception { String sql = "select where 1=1" + "#if(:1>0)" + " and id>:1" + "#elseif(:1<0)" + " and id<:1" + "#else" + " and id=:1" + "#end"; ASTRootNode n = new Parser(sql).parse().init(); ParameterContext ctx = getParameterContext(Lists.newArrayList((Type) Integer.class)); n.checkAndBind(ctx); InvocationContext context = DefaultInvocationContext.create(); context.addParameter("1", 100); n.render(context); BoundSql boundSql = context.getBoundSql(); assertThat(boundSql.getSql().toString(), equalTo("select where 1=1 and id>?")); assertThat(boundSql.getArgs(), contains(new Object[]{100})); } @Test public void testIfElseIfElse2() throws Exception { String sql = "select where 1=1" + "#if(:1>0)" + " and id>:1" + "#elseif(:1<0)" + " and id<:1" + "#else" + " and id=:1" + "#end"; ASTRootNode n = new Parser(sql).parse().init(); ParameterContext ctx = getParameterContext(Lists.newArrayList((Type) Integer.class)); n.checkAndBind(ctx); InvocationContext context = DefaultInvocationContext.create(); context.addParameter("1", -100); n.render(context); BoundSql boundSql = context.getBoundSql(); assertThat(boundSql.getSql().toString(), equalTo("select where 1=1 and id<?")); assertThat(boundSql.getArgs(), contains(new Object[]{-100})); } @Test public void testIfElseIfElse3() throws Exception { String sql = "select where 1=1" + "#if(:1>0)" + " and id>:1" + "#elseif(:1<0)" + " and id<:1" + "#else" + " and id=:1" + "#end"; ASTRootNode n = new Parser(sql).parse().init(); ParameterContext ctx = getParameterContext(Lists.newArrayList((Type) Integer.class)); n.checkAndBind(ctx); InvocationContext context = DefaultInvocationContext.create(); context.addParameter("1", 0); n.render(context); BoundSql boundSql = context.getBoundSql(); assertThat(boundSql.getSql().toString(), equalTo("select where 1=1 and id=?")); assertThat(boundSql.getArgs(), contains(new Object[]{0})); }
Parser implements ParserTreeConstants, ParserConstants { final public void IntegerLiteral() throws ParseException { ASTIntegerLiteral jjtn000 = new ASTIntegerLiteral(JJTINTEGERLITERAL); boolean jjtc000 = true; jjtree.openNodeScope(jjtn000); jjtn000.jjtSetFirstToken(getToken(1));Token t; try { t = jj_consume_token(INTEGER_LITERAL); jjtree.closeNodeScope(jjtn000, true); jjtc000 = false; jjtn000.jjtSetLastToken(getToken(0)); jjtn000.init(t.image); } finally { if (jjtc000) { jjtree.closeNodeScope(jjtn000, true); jjtn000.jjtSetLastToken(getToken(0)); } } } Parser(String s); Parser(java.io.InputStream stream); Parser(java.io.InputStream stream, String encoding); Parser(Reader stream); Parser(ParserTokenManager tm); final ASTRootNode parse(); final void DML(); final void Insert(); final void Delete(); final void Update(); final void Select(); final void Replace(); final void Merge(); final void Truncate(); final void Statement(); final void JDBCParameter(); final void JDBCIterableParameter(); final void GlobalTable(); final void JoinParameter(); final void QuoteText(); final void Text(); final void Blank(); final void IfStatement(); final void ElseStatement(); final void ElseIfStatement(); final void Expression(); final void ConditionalOrExpression(); final void ConditionalAndExpression(); final void RelationalExpression(); final void UnaryExpression(); final void PrimaryExpression(); final void ExpressionParameter(); final void IntegerLiteral(); final void StringLiteral(); final void True(); final void False(); final void Null(); void ReInit(java.io.InputStream stream); void ReInit(java.io.InputStream stream, String encoding); void ReInit(Reader stream); void ReInit(ParserTokenManager tm); final Token getNextToken(); final Token getToken(int index); ParseException generateParseException(); final void enable_tracing(); final void disable_tracing(); public ParserTokenManager token_source; public Token token; public Token jj_nt; }
@Test public void testIntegerLiteral() throws Exception { String sql = "select #if (:1 > 9223372036854775800) ok #end"; ASTRootNode n = new Parser(sql).parse().init(); ParameterContext ctx = getParameterContext(Lists.newArrayList((Type) Integer.class)); n.checkAndBind(ctx); InvocationContext context = DefaultInvocationContext.create(); context.addParameter("1", Long.MAX_VALUE); n.render(context); BoundSql boundSql = context.getBoundSql(); assertThat(boundSql.getSql(), Matchers.equalTo("select ok ")); }
Soundex implements StringEncoder { @Override public Object encode(final Object obj) throws EncoderException { if (!(obj instanceof String)) { throw new EncoderException("Parameter supplied to Soundex encode is not of type java.lang.String"); } return soundex((String) obj); } Soundex(); Soundex(final char[] mapping); Soundex(final String mapping); int difference(final String s1, final String s2); @Override Object encode(final Object obj); @Override String encode(final String str); @Deprecated int getMaxLength(); @Deprecated void setMaxLength(final int maxLength); String soundex(String str); static final String US_ENGLISH_MAPPING_STRING; static final Soundex US_ENGLISH; }
@Test public void testBadCharacters() { Assert.assertEquals("H452", this.getStringEncoder().encode("HOL>MES")); } @Test public void testEncodeBasic() { Assert.assertEquals("T235", this.getStringEncoder().encode("testing")); Assert.assertEquals("T000", this.getStringEncoder().encode("The")); Assert.assertEquals("Q200", this.getStringEncoder().encode("quick")); Assert.assertEquals("B650", this.getStringEncoder().encode("brown")); Assert.assertEquals("F200", this.getStringEncoder().encode("fox")); Assert.assertEquals("J513", this.getStringEncoder().encode("jumped")); Assert.assertEquals("O160", this.getStringEncoder().encode("over")); Assert.assertEquals("T000", this.getStringEncoder().encode("the")); Assert.assertEquals("L200", this.getStringEncoder().encode("lazy")); Assert.assertEquals("D200", this.getStringEncoder().encode("dogs")); } @Test public void testEncodeBatch2() { Assert.assertEquals("A462", this.getStringEncoder().encode("Allricht")); Assert.assertEquals("E166", this.getStringEncoder().encode("Eberhard")); Assert.assertEquals("E521", this.getStringEncoder().encode("Engebrethson")); Assert.assertEquals("H512", this.getStringEncoder().encode("Heimbach")); Assert.assertEquals("H524", this.getStringEncoder().encode("Hanselmann")); Assert.assertEquals("H431", this.getStringEncoder().encode("Hildebrand")); Assert.assertEquals("K152", this.getStringEncoder().encode("Kavanagh")); Assert.assertEquals("L530", this.getStringEncoder().encode("Lind")); Assert.assertEquals("L222", this.getStringEncoder().encode("Lukaschowsky")); Assert.assertEquals("M235", this.getStringEncoder().encode("McDonnell")); Assert.assertEquals("M200", this.getStringEncoder().encode("McGee")); Assert.assertEquals("O155", this.getStringEncoder().encode("Opnian")); Assert.assertEquals("O155", this.getStringEncoder().encode("Oppenheimer")); Assert.assertEquals("R355", this.getStringEncoder().encode("Riedemanas")); Assert.assertEquals("Z300", this.getStringEncoder().encode("Zita")); Assert.assertEquals("Z325", this.getStringEncoder().encode("Zitzmeinn")); } @Test public void testEncodeBatch3() { Assert.assertEquals("W252", this.getStringEncoder().encode("Washington")); Assert.assertEquals("L000", this.getStringEncoder().encode("Lee")); Assert.assertEquals("G362", this.getStringEncoder().encode("Gutierrez")); Assert.assertEquals("P236", this.getStringEncoder().encode("Pfister")); Assert.assertEquals("J250", this.getStringEncoder().encode("Jackson")); Assert.assertEquals("T522", this.getStringEncoder().encode("Tymczak")); Assert.assertEquals("V532", this.getStringEncoder().encode("VanDeusen")); } @Test public void testEncodeBatch4() { Assert.assertEquals("H452", this.getStringEncoder().encode("HOLMES")); Assert.assertEquals("A355", this.getStringEncoder().encode("ADOMOMI")); Assert.assertEquals("V536", this.getStringEncoder().encode("VONDERLEHR")); Assert.assertEquals("B400", this.getStringEncoder().encode("BALL")); Assert.assertEquals("S000", this.getStringEncoder().encode("SHAW")); Assert.assertEquals("J250", this.getStringEncoder().encode("JACKSON")); Assert.assertEquals("S545", this.getStringEncoder().encode("SCANLON")); Assert.assertEquals("S532", this.getStringEncoder().encode("SAINTJOHN")); } @Test public void testEncodeIgnoreTrimmable() { Assert.assertEquals("W252", this.getStringEncoder().encode(" \t\n\r Washington \t\n\r ")); } @Test public void testHWRuleEx1() { Assert.assertEquals("A261", this.getStringEncoder().encode("Ashcraft")); } @Test public void testHWRuleEx2() { Assert.assertEquals("B312", this.getStringEncoder().encode("BOOTHDAVIS")); Assert.assertEquals("B312", this.getStringEncoder().encode("BOOTH-DAVIS")); } @Test public void testHWRuleEx3() throws EncoderException { Assert.assertEquals("S460", this.getStringEncoder().encode("Sgler")); Assert.assertEquals("S460", this.getStringEncoder().encode("Swhgler")); this.checkEncodingVariations("S460", new String[]{ "SAILOR", "SALYER", "SAYLOR", "SCHALLER", "SCHELLER", "SCHILLER", "SCHOOLER", "SCHULER", "SCHUYLER", "SEILER", "SEYLER", "SHOLAR", "SHULER", "SILAR", "SILER", "SILLER"}); } @Test public void testMsSqlServer1() { Assert.assertEquals("S530", this.getStringEncoder().encode("Smith")); Assert.assertEquals("S530", this.getStringEncoder().encode("Smythe")); } @Test public void testMsSqlServer3() { Assert.assertEquals("A500", this.getStringEncoder().encode("Ann")); Assert.assertEquals("A536", this.getStringEncoder().encode("Andrew")); Assert.assertEquals("J530", this.getStringEncoder().encode("Janet")); Assert.assertEquals("M626", this.getStringEncoder().encode("Margaret")); Assert.assertEquals("S315", this.getStringEncoder().encode("Steven")); Assert.assertEquals("M240", this.getStringEncoder().encode("Michael")); Assert.assertEquals("R163", this.getStringEncoder().encode("Robert")); Assert.assertEquals("L600", this.getStringEncoder().encode("Laura")); Assert.assertEquals("A500", this.getStringEncoder().encode("Anne")); } @Test public void testUsMappingEWithAcute() { Assert.assertEquals("E000", this.getStringEncoder().encode("e")); if (Character.isLetter('\u00e9')) { try { Assert.assertEquals("\u00c9000", this.getStringEncoder().encode("\u00e9")); Assert.fail("Expected IllegalArgumentException not thrown"); } catch (final IllegalArgumentException e) { } } else { Assert.assertEquals("", this.getStringEncoder().encode("\u00e9")); } } @Test public void testUsMappingOWithDiaeresis() { Assert.assertEquals("O000", this.getStringEncoder().encode("o")); if (Character.isLetter('\u00f6')) { try { Assert.assertEquals("\u00d6000", this.getStringEncoder().encode("\u00f6")); Assert.fail("Expected IllegalArgumentException not thrown"); } catch (final IllegalArgumentException e) { } } else { Assert.assertEquals("", this.getStringEncoder().encode("\u00f6")); } }
Soundex implements StringEncoder { public int difference(final String s1, final String s2) throws EncoderException { return SoundexUtils.difference(this, s1, s2); } Soundex(); Soundex(final char[] mapping); Soundex(final String mapping); int difference(final String s1, final String s2); @Override Object encode(final Object obj); @Override String encode(final String str); @Deprecated int getMaxLength(); @Deprecated void setMaxLength(final int maxLength); String soundex(String str); static final String US_ENGLISH_MAPPING_STRING; static final Soundex US_ENGLISH; }
@Test public void testDifference() throws EncoderException { Assert.assertEquals(0, this.getStringEncoder().difference(null, null)); Assert.assertEquals(0, this.getStringEncoder().difference("", "")); Assert.assertEquals(0, this.getStringEncoder().difference(" ", " ")); Assert.assertEquals(4, this.getStringEncoder().difference("Smith", "Smythe")); Assert.assertEquals(2, this.getStringEncoder().difference("Ann", "Andrew")); Assert.assertEquals(1, this.getStringEncoder().difference("Margaret", "Andrew")); Assert.assertEquals(0, this.getStringEncoder().difference("Janet", "Margaret")); Assert.assertEquals(4, this.getStringEncoder().difference("Green", "Greene")); Assert.assertEquals(0, this.getStringEncoder().difference("Blotchet-Halls", "Greene")); Assert.assertEquals(4, this.getStringEncoder().difference("Smith", "Smythe")); Assert.assertEquals(4, this.getStringEncoder().difference("Smithers", "Smythers")); Assert.assertEquals(2, this.getStringEncoder().difference("Anothers", "Brothers")); }
Parser implements ParserTreeConstants, ParserConstants { final public void Replace() throws ParseException { ASTReplace jjtn000 = new ASTReplace(JJTREPLACE); boolean jjtc000 = true; jjtree.openNodeScope(jjtn000); jjtn000.jjtSetFirstToken(getToken(1));Token t; try { t = jj_consume_token(REPLACE); jjtree.closeNodeScope(jjtn000, true); jjtc000 = false; jjtn000.jjtSetLastToken(getToken(0)); jjtn000.setValue(t.image); } finally { if (jjtc000) { jjtree.closeNodeScope(jjtn000, true); jjtn000.jjtSetLastToken(getToken(0)); } } } Parser(String s); Parser(java.io.InputStream stream); Parser(java.io.InputStream stream, String encoding); Parser(Reader stream); Parser(ParserTokenManager tm); final ASTRootNode parse(); final void DML(); final void Insert(); final void Delete(); final void Update(); final void Select(); final void Replace(); final void Merge(); final void Truncate(); final void Statement(); final void JDBCParameter(); final void JDBCIterableParameter(); final void GlobalTable(); final void JoinParameter(); final void QuoteText(); final void Text(); final void Blank(); final void IfStatement(); final void ElseStatement(); final void ElseIfStatement(); final void Expression(); final void ConditionalOrExpression(); final void ConditionalAndExpression(); final void RelationalExpression(); final void UnaryExpression(); final void PrimaryExpression(); final void ExpressionParameter(); final void IntegerLiteral(); final void StringLiteral(); final void True(); final void False(); final void Null(); void ReInit(java.io.InputStream stream); void ReInit(java.io.InputStream stream, String encoding); void ReInit(Reader stream); void ReInit(ParserTokenManager tm); final Token getNextToken(); final Token getToken(int index); ParseException generateParseException(); final void enable_tracing(); final void disable_tracing(); public ParserTokenManager token_source; public Token token; public Token jj_nt; }
@Test public void testReplace() throws Exception { String sql = "replace xxx into replace xxx"; ASTRootNode n = new Parser(sql).parse().init(); List<Type> types = Lists.newArrayList(); ParameterContext ctx = getParameterContext(types); n.checkAndBind(ctx); InvocationContext context = DefaultInvocationContext.create(); n.render(context); BoundSql boundSql = context.getBoundSql(); assertThat(boundSql.getSql(), Matchers.equalTo("replace xxx into replace xxx")); assertThat(n.getSQLType(), is(SQLType.REPLACE)); }
Soundex implements StringEncoder { public Soundex() { this.soundexMapping = US_ENGLISH_MAPPING; } Soundex(); Soundex(final char[] mapping); Soundex(final String mapping); int difference(final String s1, final String s2); @Override Object encode(final Object obj); @Override String encode(final String str); @Deprecated int getMaxLength(); @Deprecated void setMaxLength(final int maxLength); String soundex(String str); static final String US_ENGLISH_MAPPING_STRING; static final Soundex US_ENGLISH; }
@Test public void testNewInstance() { Assert.assertEquals("W452", new Soundex().soundex("Williams")); } @Test public void testNewInstance2() { Assert.assertEquals("W452", new Soundex(Soundex.US_ENGLISH_MAPPING_STRING.toCharArray()).soundex("Williams")); } @Test public void testNewInstance3() { Assert.assertEquals("W452", new Soundex(Soundex.US_ENGLISH_MAPPING_STRING).soundex("Williams")); } @Test public void testUsEnglishStatic() { Assert.assertEquals("W452", Soundex.US_ENGLISH.soundex("Williams")); }
Parser implements ParserTreeConstants, ParserConstants { final public void Merge() throws ParseException { ASTMerge jjtn000 = new ASTMerge(JJTMERGE); boolean jjtc000 = true; jjtree.openNodeScope(jjtn000); jjtn000.jjtSetFirstToken(getToken(1));Token t; try { t = jj_consume_token(MERGE); jjtree.closeNodeScope(jjtn000, true); jjtc000 = false; jjtn000.jjtSetLastToken(getToken(0)); jjtn000.setValue(t.image); } finally { if (jjtc000) { jjtree.closeNodeScope(jjtn000, true); jjtn000.jjtSetLastToken(getToken(0)); } } } Parser(String s); Parser(java.io.InputStream stream); Parser(java.io.InputStream stream, String encoding); Parser(Reader stream); Parser(ParserTokenManager tm); final ASTRootNode parse(); final void DML(); final void Insert(); final void Delete(); final void Update(); final void Select(); final void Replace(); final void Merge(); final void Truncate(); final void Statement(); final void JDBCParameter(); final void JDBCIterableParameter(); final void GlobalTable(); final void JoinParameter(); final void QuoteText(); final void Text(); final void Blank(); final void IfStatement(); final void ElseStatement(); final void ElseIfStatement(); final void Expression(); final void ConditionalOrExpression(); final void ConditionalAndExpression(); final void RelationalExpression(); final void UnaryExpression(); final void PrimaryExpression(); final void ExpressionParameter(); final void IntegerLiteral(); final void StringLiteral(); final void True(); final void False(); final void Null(); void ReInit(java.io.InputStream stream); void ReInit(java.io.InputStream stream, String encoding); void ReInit(Reader stream); void ReInit(ParserTokenManager tm); final Token getNextToken(); final Token getToken(int index); ParseException generateParseException(); final void enable_tracing(); final void disable_tracing(); public ParserTokenManager token_source; public Token token; public Token jj_nt; }
@Test public void testMerge() throws Exception { String sql = "merge xxx into merge xxx"; ASTRootNode n = new Parser(sql).parse().init(); List<Type> types = Lists.newArrayList(); ParameterContext ctx = getParameterContext(types); n.checkAndBind(ctx); InvocationContext context = DefaultInvocationContext.create(); n.render(context); BoundSql boundSql = context.getBoundSql(); assertThat(boundSql.getSql(), Matchers.equalTo("merge xxx into merge xxx")); assertThat(n.getSQLType(), is(SQLType.MERGE)); }
TransactionManager { public TransactionManager() { } TransactionManager(); TransactionManager(ConnectionSource connectionSource); void initialize(); T callInTransaction(final Callable<T> callable); T callInTransaction(String tableName, final Callable<T> callable); static T callInTransaction(final ConnectionSource connectionSource, final Callable<T> callable); static T callInTransaction(String tableName, final ConnectionSource connectionSource, final Callable<T> callable); static T callInTransaction(final DatabaseConnection connection, final DatabaseType databaseType, final Callable<T> callable); static T callInTransaction(final DatabaseConnection connection, boolean saved, final DatabaseType databaseType, final Callable<T> callable); void setConnectionSource(ConnectionSource connectionSource); }
@Test public void testTransactionManager() throws Exception { ConnectionSource connectionSource = createMock(ConnectionSource.class); DatabaseConnection conn = createMock(DatabaseConnection.class); expect(conn.isAutoCommitSupported()).andReturn(false); Savepoint savePoint = createMock(Savepoint.class); expect(savePoint.getSavepointName()).andReturn("name").anyTimes(); expect(conn.setSavePoint(isA(String.class))).andReturn(savePoint); conn.commit(savePoint); expect(connectionSource.getDatabaseType()).andReturn(databaseType); expect(connectionSource.getReadWriteConnection(null)).andReturn(conn); expect(connectionSource.saveSpecialConnection(conn)).andReturn(true); connectionSource.clearSpecialConnection(conn); connectionSource.releaseConnection(conn); replay(connectionSource, conn, savePoint); TransactionManager tm = new TransactionManager(connectionSource); tm.callInTransaction(new Callable<Void>() { @Override public Void call() { return null; } }); verify(connectionSource, conn, savePoint); }
TransactionManager { public void initialize() { if (connectionSource == null) { throw new IllegalStateException("dataSource was not set on " + getClass().getSimpleName()); } } TransactionManager(); TransactionManager(ConnectionSource connectionSource); void initialize(); T callInTransaction(final Callable<T> callable); T callInTransaction(String tableName, final Callable<T> callable); static T callInTransaction(final ConnectionSource connectionSource, final Callable<T> callable); static T callInTransaction(String tableName, final ConnectionSource connectionSource, final Callable<T> callable); static T callInTransaction(final DatabaseConnection connection, final DatabaseType databaseType, final Callable<T> callable); static T callInTransaction(final DatabaseConnection connection, boolean saved, final DatabaseType databaseType, final Callable<T> callable); void setConnectionSource(ConnectionSource connectionSource); }
@Test(expected = IllegalStateException.class) public void testTransactionManagerNoSet() { TransactionManager tm = new TransactionManager(); tm.initialize(); }
TransactionManager { private static void rollBack(DatabaseConnection connection, Savepoint savePoint) throws SQLException { String name = (savePoint == null ? null : savePoint.getSavepointName()); connection.rollback(savePoint); if (name == null) { logger.debug("rolled back savePoint transaction"); } else { logger.debug("rolled back savePoint transaction {}", name); } } TransactionManager(); TransactionManager(ConnectionSource connectionSource); void initialize(); T callInTransaction(final Callable<T> callable); T callInTransaction(String tableName, final Callable<T> callable); static T callInTransaction(final ConnectionSource connectionSource, final Callable<T> callable); static T callInTransaction(String tableName, final ConnectionSource connectionSource, final Callable<T> callable); static T callInTransaction(final DatabaseConnection connection, final DatabaseType databaseType, final Callable<T> callable); static T callInTransaction(final DatabaseConnection connection, boolean saved, final DatabaseType databaseType, final Callable<T> callable); void setConnectionSource(ConnectionSource connectionSource); }
@Test public void testRollBack() throws Exception { if (connectionSource == null) { return; } TransactionManager mgr = new TransactionManager(connectionSource); final Dao<Foo, Integer> fooDao = createDao(Foo.class, true); testTransactionManager(mgr, new RuntimeException("What!! I protest!!"), fooDao); }
TransactionManager { public <T> T callInTransaction(final Callable<T> callable) throws SQLException { return callInTransaction(connectionSource, callable); } TransactionManager(); TransactionManager(ConnectionSource connectionSource); void initialize(); T callInTransaction(final Callable<T> callable); T callInTransaction(String tableName, final Callable<T> callable); static T callInTransaction(final ConnectionSource connectionSource, final Callable<T> callable); static T callInTransaction(String tableName, final ConnectionSource connectionSource, final Callable<T> callable); static T callInTransaction(final DatabaseConnection connection, final DatabaseType databaseType, final Callable<T> callable); static T callInTransaction(final DatabaseConnection connection, boolean saved, final DatabaseType databaseType, final Callable<T> callable); void setConnectionSource(ConnectionSource connectionSource); }
@Test public void testTransactionWithinTransaction() throws Exception { if (connectionSource == null) { return; } final TransactionManager mgr = new TransactionManager(connectionSource); final Dao<Foo, Integer> dao = createDao(Foo.class, true); mgr.callInTransaction(new Callable<Void>() { @Override public Void call() throws Exception { testTransactionManager(mgr, null, dao); return null; } }); } @Test public void testTransactionWithinTransactionFails() throws Exception { if (connectionSource == null) { return; } final TransactionManager mgr = new TransactionManager(connectionSource); final Dao<Foo, Integer> dao = createDao(Foo.class, true); try { mgr.callInTransaction(new Callable<Void>() { @Override public Void call() throws Exception { dao.create(new Foo()); mgr.callInTransaction(new Callable<Void>() { @Override public Void call() throws Exception { dao.create(new Foo()); throw new SQLException("Exception ahoy!"); } }); return null; } }); fail("Should have thrown"); } catch (SQLException se) { } List<Foo> results = dao.queryForAll(); assertNotNull(results); assertEquals(0, results.size()); }
SqlExceptionUtil { public static SQLException create(String message, Throwable cause) { SQLException sqlException; if (cause instanceof SQLException) { sqlException = new SQLException(message, ((SQLException) cause).getSQLState()); } else { sqlException = new SQLException(message); } sqlException.initCause(cause); return sqlException; } private SqlExceptionUtil(); static SQLException create(String message, Throwable cause); }
@Test public void testException() { Throwable cause = new Throwable(); String msg = "hello"; SQLException e = SqlExceptionUtil.create(msg, cause); assertEquals(msg, e.getMessage()); assertEquals(cause, e.getCause()); } @Test public void testExceptionWithSQLException() { String sqlReason = "sql exception message"; String sqlState = "sql exception state"; Throwable cause = new SQLException(sqlReason, sqlState); String msg = "hello"; SQLException e = SqlExceptionUtil.create(msg, cause); assertEquals(msg, e.getMessage()); assertEquals(sqlState, e.getSQLState()); assertEquals(cause, e.getCause()); }
BaseDaoEnabled { public int create() throws SQLException { checkForDao(); @SuppressWarnings("unchecked") T t = (T) this; return dao.create(t); } int create(); int refresh(); int update(); int updateId(ID newId); int delete(); String objectToString(); ID extractId(); boolean objectsEqual(T other); void setDao(Dao<T, ID> dao); Dao<T, ID> getDao(); }
@Test public void testCreate() throws Exception { Dao<One, Integer> dao = createDao(One.class, true); One one = new One(); String stuff = "fewpfjewfew"; one.stuff = stuff; one.setDao(dao); assertEquals(1, one.create()); } @Test(expected = SQLException.class) public void testCreateNoDao() throws Exception { One one = new One(); String stuff = "fewpfjewfew"; one.stuff = stuff; one.create(); }
Parser implements ParserTreeConstants, ParserConstants { final public void StringLiteral() throws ParseException { ASTStringLiteral jjtn000 = new ASTStringLiteral(JJTSTRINGLITERAL); boolean jjtc000 = true; jjtree.openNodeScope(jjtn000); jjtn000.jjtSetFirstToken(getToken(1));Token t; try { t = jj_consume_token(STRING_LITERAL); jjtree.closeNodeScope(jjtn000, true); jjtc000 = false; jjtn000.jjtSetLastToken(getToken(0)); jjtn000.init(t.image); } finally { if (jjtc000) { jjtree.closeNodeScope(jjtn000, true); jjtn000.jjtSetLastToken(getToken(0)); } } } Parser(String s); Parser(java.io.InputStream stream); Parser(java.io.InputStream stream, String encoding); Parser(Reader stream); Parser(ParserTokenManager tm); final ASTRootNode parse(); final void DML(); final void Insert(); final void Delete(); final void Update(); final void Select(); final void Replace(); final void Merge(); final void Truncate(); final void Statement(); final void JDBCParameter(); final void JDBCIterableParameter(); final void GlobalTable(); final void JoinParameter(); final void QuoteText(); final void Text(); final void Blank(); final void IfStatement(); final void ElseStatement(); final void ElseIfStatement(); final void Expression(); final void ConditionalOrExpression(); final void ConditionalAndExpression(); final void RelationalExpression(); final void UnaryExpression(); final void PrimaryExpression(); final void ExpressionParameter(); final void IntegerLiteral(); final void StringLiteral(); final void True(); final void False(); final void Null(); void ReInit(java.io.InputStream stream); void ReInit(java.io.InputStream stream, String encoding); void ReInit(Reader stream); void ReInit(ParserTokenManager tm); final Token getNextToken(); final Token getToken(int index); ParseException generateParseException(); final void enable_tracing(); final void disable_tracing(); public ParserTokenManager token_source; public Token token; public Token jj_nt; }
@Test public void testStringLiteral() throws Exception { String sql = "select #if (:1 == 'hello') ok #end"; ASTRootNode n = new Parser(sql).parse().init(); ParameterContext ctx = getParameterContext(Lists.newArrayList((Type) String.class)); n.checkAndBind(ctx); InvocationContext context = DefaultInvocationContext.create(); context.addParameter("1", "hello"); n.render(context); BoundSql boundSql = context.getBoundSql(); assertThat(boundSql.getSql(), Matchers.equalTo("select ok ")); }
BaseDaoEnabled { public int update() throws SQLException { checkForDao(); @SuppressWarnings("unchecked") T t = (T) this; return dao.update(t); } int create(); int refresh(); int update(); int updateId(ID newId); int delete(); String objectToString(); ID extractId(); boolean objectsEqual(T other); void setDao(Dao<T, ID> dao); Dao<T, ID> getDao(); }
@Test public void testUpdate() throws Exception { Dao<One, Integer> dao = createDao(One.class, true); One one = new One(); String stuff1 = "fewpfjewfew"; one.stuff = stuff1; assertEquals(1, dao.create(one)); String stuff2 = "fjpfejpwewpfjewfew"; one.stuff = stuff2; assertEquals(1, one.update()); One one2 = dao.queryForId(one.id); assertEquals(stuff2, one2.stuff); }
BaseDaoEnabled { public int updateId(ID newId) throws SQLException { checkForDao(); @SuppressWarnings("unchecked") T t = (T) this; return dao.updateId(t, newId); } int create(); int refresh(); int update(); int updateId(ID newId); int delete(); String objectToString(); ID extractId(); boolean objectsEqual(T other); void setDao(Dao<T, ID> dao); Dao<T, ID> getDao(); }
@Test public void testUpdateId() throws Exception { Dao<One, Integer> dao = createDao(One.class, true); One one = new One(); String stuff1 = "fewpfjewfew"; one.stuff = stuff1; assertEquals(1, dao.create(one)); int id = one.id; assertNotNull(dao.queryForId(id)); assertEquals(1, one.updateId(id + 1)); assertNull(dao.queryForId(id)); assertNotNull(dao.queryForId(id + 1)); }
BaseDaoEnabled { public int delete() throws SQLException { checkForDao(); @SuppressWarnings("unchecked") T t = (T) this; return dao.delete(t); } int create(); int refresh(); int update(); int updateId(ID newId); int delete(); String objectToString(); ID extractId(); boolean objectsEqual(T other); void setDao(Dao<T, ID> dao); Dao<T, ID> getDao(); }
@Test public void testDelete() throws Exception { Dao<One, Integer> dao = createDao(One.class, true); One one = new One(); String stuff1 = "fewpfjewfew"; one.stuff = stuff1; assertEquals(1, dao.create(one)); assertNotNull(dao.queryForId(one.id)); assertEquals(1, one.delete()); assertNull(dao.queryForId(one.id)); }
BaseDaoEnabled { public String objectToString() { try { checkForDao(); } catch (SQLException e) { throw new IllegalArgumentException(e); } @SuppressWarnings("unchecked") T t = (T) this; return dao.objectToString(t); } int create(); int refresh(); int update(); int updateId(ID newId); int delete(); String objectToString(); ID extractId(); boolean objectsEqual(T other); void setDao(Dao<T, ID> dao); Dao<T, ID> getDao(); }
@Test(expected = IllegalArgumentException.class) public void testObjectEqualsNoDao() { One one = new One(); String stuff1 = "fewpfjewfew"; one.stuff = stuff1; one.objectToString(); }
BaseDaoEnabled { public ID extractId() throws SQLException { checkForDao(); @SuppressWarnings("unchecked") T t = (T) this; return dao.extractId(t); } int create(); int refresh(); int update(); int updateId(ID newId); int delete(); String objectToString(); ID extractId(); boolean objectsEqual(T other); void setDao(Dao<T, ID> dao); Dao<T, ID> getDao(); }
@Test public void testExtractId() throws Exception { Dao<One, Integer> dao = createDao(One.class, true); One one = new One(); String stuff1 = "fewpfjewfew"; one.stuff = stuff1; assertEquals(1, dao.create(one)); assertEquals(one.id, (int) one.extractId()); }
Logger { public boolean isLevelEnabled(Level level) { return log.isLevelEnabled(level); } Logger(Log log); boolean isLevelEnabled(Level level); void trace(String msg); void trace(String msg, Object arg0); void trace(String msg, Object arg0, Object arg1); void trace(String msg, Object arg0, Object arg1, Object arg2); void trace(String msg, Object[] argArray); void trace(Throwable throwable, String msg); void trace(Throwable throwable, String msg, Object arg0); void trace(Throwable throwable, String msg, Object arg0, Object arg1); void trace(Throwable throwable, String msg, Object arg0, Object arg1, Object arg2); void trace(Throwable throwable, String msg, Object[] argArray); void debug(String msg); void debug(String msg, Object arg0); void debug(String msg, Object arg0, Object arg1); void debug(String msg, Object arg0, Object arg1, Object arg2); void debug(String msg, Object[] argArray); void debug(Throwable throwable, String msg); void debug(Throwable throwable, String msg, Object arg0); void debug(Throwable throwable, String msg, Object arg0, Object arg1); void debug(Throwable throwable, String msg, Object arg0, Object arg1, Object arg2); void debug(Throwable throwable, String msg, Object[] argArray); void info(String msg); void info(String msg, Object arg0); void info(String msg, Object arg0, Object arg1); void info(String msg, Object arg0, Object arg1, Object arg2); void info(String msg, Object[] argArray); void info(Throwable throwable, String msg); void info(Throwable throwable, String msg, Object arg0); void info(Throwable throwable, String msg, Object arg0, Object arg1); void info(Throwable throwable, String msg, Object arg0, Object arg1, Object arg2); void info(Throwable throwable, String msg, Object[] argArray); void warn(String msg); void warn(String msg, Object arg0); void warn(String msg, Object arg0, Object arg1); void warn(String msg, Object arg0, Object arg1, Object arg2); void warn(String msg, Object[] argArray); void warn(Throwable throwable, String msg); void warn(Throwable throwable, String msg, Object arg0); void warn(Throwable throwable, String msg, Object arg0, Object arg1); void warn(Throwable throwable, String msg, Object arg0, Object arg1, Object arg2); void warn(Throwable throwable, String msg, Object[] argArray); void error(String msg); void error(String msg, Object arg0); void error(String msg, Object arg0, Object arg1); void error(String msg, Object arg0, Object arg1, Object arg2); void error(String msg, Object[] argArray); void error(Throwable throwable, String msg); void error(Throwable throwable, String msg, Object arg0); void error(Throwable throwable, String msg, Object arg0, Object arg1); void error(Throwable throwable, String msg, Object arg0, Object arg1, Object arg2); void error(Throwable throwable, String msg, Object[] argArray); void fatal(String msg); void fatal(String msg, Object arg0); void fatal(String msg, Object arg0, Object arg1); void fatal(String msg, Object arg0, Object arg1, Object arg2); void fatal(String msg, Object[] argArray); void fatal(Throwable throwable, String msg); void fatal(Throwable throwable, String msg, Object arg0); void fatal(Throwable throwable, String msg, Object arg0, Object arg1); void fatal(Throwable throwable, String msg, Object arg0, Object arg1, Object arg2); void fatal(Throwable throwable, String msg, Object[] argArray); void log(Level level, String msg); void log(Level level, String msg, Object arg0); void log(Level level, String msg, Object arg0, Object arg1); void log(Level level, String msg, Object arg0, Object arg1, Object arg2); void log(Level level, String msg, Object[] argArray); void log(Level level, Throwable throwable, String msg); void log(Level level, Throwable throwable, String msg, Object arg0); void log(Level level, Throwable throwable, String msg, Object arg0, Object arg1); void log(Level level, Throwable throwable, String msg, Object arg0, Object arg1, Object arg2); void log(Level level, Throwable throwable, String msg, Object[] argArray); }
@Test public void testIsEnabled() { for (Level level : Level.values()) { reset(mockLog); expect(mockLog.isLevelEnabled(level)).andReturn(true); expect(mockLog.isLevelEnabled(level)).andReturn(false); replay(mockLog); assertTrue(logger.isLevelEnabled(level)); assertFalse(logger.isLevelEnabled(level)); verify(mockLog); } } @Test public void testShouldNotCallToString() throws Exception { for (Level level : Level.values()) { Method method = Logger.class.getMethod(getNameFromLevel(level), String.class, Object.class); reset(mockLog); expect(mockLog.isLevelEnabled(level)).andReturn(false); replay(mockLog); method.invoke(logger, "msg {}", new ToStringThrow()); verify(mockLog); } } @Test public void testShouldCallToString() throws Exception { for (Level level : Level.values()) { Method method = Logger.class.getMethod(getNameFromLevel(level), String.class, Object.class); reset(mockLog); expect(mockLog.isLevelEnabled(level)).andReturn(true); replay(mockLog); try { method.invoke(logger, "msg {}", new ToStringThrow()); fail("Should have thrown"); } catch (InvocationTargetException e) { assertTrue("should have thrown an IllegalStateException", e.getCause() instanceof IllegalStateException); } verify(mockLog); } }
LocalLog implements Log { @Override public boolean isLevelEnabled(Level level) { return this.level.isEnabled(level); } LocalLog(String className); static void openLogFile(String logPath); @Override boolean isLevelEnabled(Level level); @Override void log(Level level, String msg); @Override void log(Level level, String msg, Throwable throwable); static final String LOCAL_LOG_LEVEL_PROPERTY; static final String LOCAL_LOG_FILE_PROPERTY; static final String LOCAL_LOG_PROPERTIES_FILE; }
@Test public void testLevelProperty() { Log log = new LocalLog("foo"); if (log.isLevelEnabled(Level.TRACE)) { return; } System.setProperty(LocalLog.LOCAL_LOG_LEVEL_PROPERTY, "TRACE"); try { log = new LocalLog("foo"); assertTrue(log.isLevelEnabled(Level.TRACE)); } finally { System.clearProperty(LocalLog.LOCAL_LOG_LEVEL_PROPERTY); } }
LocalLog implements Log { public static void openLogFile(String logPath) { if (logPath == null) { printStream = System.out; } else { try { printStream = new PrintStream(new File(logPath)); } catch (FileNotFoundException e) { throw new IllegalArgumentException("Log file " + logPath + " was not found", e); } } } LocalLog(String className); static void openLogFile(String logPath); @Override boolean isLevelEnabled(Level level); @Override void log(Level level, String msg); @Override void log(Level level, String msg, Throwable throwable); static final String LOCAL_LOG_LEVEL_PROPERTY; static final String LOCAL_LOG_FILE_PROPERTY; static final String LOCAL_LOG_PROPERTIES_FILE; }
@Test(expected = IllegalArgumentException.class) public void testInvalidFileProperty() { LocalLog.openLogFile("not-a-proper-directory-name-we-hope/foo.txt"); }
LocalLog implements Log { static List<PatternLevel> readLevelResourceFile(InputStream stream) { List<PatternLevel> levels = null; if (stream != null) { try { levels = configureClassLevels(stream); } catch (IOException e) { System.err.println( "IO exception reading the log properties file '" + LOCAL_LOG_PROPERTIES_FILE + "': " + e); } finally { IOUtils.closeQuietly(stream); } } return levels; } LocalLog(String className); static void openLogFile(String logPath); @Override boolean isLevelEnabled(Level level); @Override void log(Level level, String msg); @Override void log(Level level, String msg, Throwable throwable); static final String LOCAL_LOG_LEVEL_PROPERTY; static final String LOCAL_LOG_FILE_PROPERTY; static final String LOCAL_LOG_PROPERTIES_FILE; }
@Test public void testInvalidLevelsFile() { StringWriter stringWriter = new StringWriter(); stringWriter.write("x\n"); stringWriter.write("com\\.j256\\.ormlite\\.stmt\\.StatementExecutor = INVALID_LEVEL\n"); LocalLog.readLevelResourceFile(new ByteArrayInputStream(stringWriter.toString().getBytes())); } @Test public void testIoErrorsReadingLevelFile() { InputStream errorStream = new InputStream() { @Override public int read() throws IOException { throw new IOException("simulated exception"); } @Override public void close() throws IOException { throw new IOException("simulated exception"); } }; LocalLog.readLevelResourceFile(errorStream); } @Test public void testInputStreamNull() { LocalLog.readLevelResourceFile(null); }
LoggerFactory { public static Logger getLogger(Class<?> clazz) { return getLogger(clazz.getName()); } private LoggerFactory(); static Logger getLogger(Class<?> clazz); static Logger getLogger(String className); static String getSimpleClassName(String className); static final String LOG_TYPE_SYSTEM_PROPERTY; }
@Test public void testGetLoggerClass() { assertNotNull(LoggerFactory.getLogger(getClass())); } @Test public void testGetLoggerString() { assertNotNull(LoggerFactory.getLogger(getClass().getName())); }
LoggerFactory { public static String getSimpleClassName(String className) { String[] parts = className.split("\\."); if (parts.length <= 1) { return className; } else { return parts[parts.length - 1]; } } private LoggerFactory(); static Logger getLogger(Class<?> clazz); static Logger getLogger(String className); static String getSimpleClassName(String className); static final String LOG_TYPE_SYSTEM_PROPERTY; }
@Test public void testGetSimpleClassName() { String first = "foo"; String name = LoggerFactory.getSimpleClassName(first); assertEquals(first, name); String second = "bar"; String className = first + "." + second; name = LoggerFactory.getSimpleClassName(className); assertEquals(second, name); }
TableInfo { public TableInfo(ConnectionSource connectionSource, BaseDaoImpl<T, ID> baseDaoImpl, Class<T> dataClass) throws SQLException { this(connectionSource.getDatabaseType(), baseDaoImpl, DatabaseTableConfig.fromClass(connectionSource, dataClass)); } TableInfo(ConnectionSource connectionSource, BaseDaoImpl<T, ID> baseDaoImpl, Class<T> dataClass); TableInfo(DatabaseType databaseType, BaseDaoImpl<T, ID> baseDaoImpl, DatabaseTableConfig<T> tableConfig); Class<T> getDataClass(); String getTableName(); FieldType[] getFieldTypes(); FieldType getFieldTypeByColumnName(String columnName); FieldType getIdField(); Constructor<T> getConstructor(); String objectToString(T object); T createObject(); boolean isUpdatable(); boolean isForeignAutoCreate(); FieldType[] getForeignCollections(); boolean hasColumnName(String columnName); }
@Test(expected = IllegalArgumentException.class) public void testTableInfo() throws SQLException { new TableInfo<NoFieldAnnotations, Void>(connectionSource, null, NoFieldAnnotations.class); }
TableInfo { public String objectToString(T object) { StringBuilder sb = new StringBuilder(64); sb.append(object.getClass().getSimpleName()); for (FieldType fieldType : fieldTypes) { sb.append(' ').append(fieldType.getColumnName()).append('='); try { sb.append(fieldType.extractJavaFieldValue(object)); } catch (Exception e) { throw new IllegalStateException("Could not generate toString of field " + fieldType, e); } } return sb.toString(); } TableInfo(ConnectionSource connectionSource, BaseDaoImpl<T, ID> baseDaoImpl, Class<T> dataClass); TableInfo(DatabaseType databaseType, BaseDaoImpl<T, ID> baseDaoImpl, DatabaseTableConfig<T> tableConfig); Class<T> getDataClass(); String getTableName(); FieldType[] getFieldTypes(); FieldType getFieldTypeByColumnName(String columnName); FieldType getIdField(); Constructor<T> getConstructor(); String objectToString(T object); T createObject(); boolean isUpdatable(); boolean isForeignAutoCreate(); FieldType[] getForeignCollections(); boolean hasColumnName(String columnName); }
@Test public void testObjectToString() throws Exception { String id = "f11232oo"; Foo foo = new Foo(); foo.id = id; assertEquals(id, foo.id); TableInfo<Foo, String> tableInfo = new TableInfo<Foo, String>(connectionSource, null, Foo.class); assertTrue(tableInfo.objectToString(foo).contains(id)); }
TableInfo { public String getTableName() { return tableName; } TableInfo(ConnectionSource connectionSource, BaseDaoImpl<T, ID> baseDaoImpl, Class<T> dataClass); TableInfo(DatabaseType databaseType, BaseDaoImpl<T, ID> baseDaoImpl, DatabaseTableConfig<T> tableConfig); Class<T> getDataClass(); String getTableName(); FieldType[] getFieldTypes(); FieldType getFieldTypeByColumnName(String columnName); FieldType getIdField(); Constructor<T> getConstructor(); String objectToString(T object); T createObject(); boolean isUpdatable(); boolean isForeignAutoCreate(); FieldType[] getForeignCollections(); boolean hasColumnName(String columnName); }
@Test public void testNoTableNameInAnnotation() throws Exception { TableInfo<NoTableNameAnnotation, Void> tableInfo = new TableInfo<NoTableNameAnnotation, Void>(connectionSource, null, NoTableNameAnnotation.class); assertEquals(NoTableNameAnnotation.class.getSimpleName().toLowerCase(), tableInfo.getTableName()); }
TableInfo { public T createObject() throws SQLException { try { T instance; ObjectFactory<T> factory = null; if (baseDaoImpl != null) { factory = baseDaoImpl.getObjectFactory(); } if (factory == null) { instance = constructor.newInstance(); } else { instance = factory.createObject(constructor, baseDaoImpl.getDataClass()); } wireNewInstance(baseDaoImpl, instance); return instance; } catch (Exception e) { throw SqlExceptionUtil.create("Could not create object for " + constructor.getDeclaringClass(), e); } } TableInfo(ConnectionSource connectionSource, BaseDaoImpl<T, ID> baseDaoImpl, Class<T> dataClass); TableInfo(DatabaseType databaseType, BaseDaoImpl<T, ID> baseDaoImpl, DatabaseTableConfig<T> tableConfig); Class<T> getDataClass(); String getTableName(); FieldType[] getFieldTypes(); FieldType getFieldTypeByColumnName(String columnName); FieldType getIdField(); Constructor<T> getConstructor(); String objectToString(T object); T createObject(); boolean isUpdatable(); boolean isForeignAutoCreate(); FieldType[] getForeignCollections(); boolean hasColumnName(String columnName); }
@Test public void testConstruct() throws Exception { TableInfo<Foo, String> tableInfo = new TableInfo<Foo, String>(connectionSource, null, Foo.class); Foo foo = tableInfo.createObject(); assertNotNull(foo); }
TableInfo { public FieldType getFieldTypeByColumnName(String columnName) { if (fieldNameMap == null) { Map<String, FieldType> map = new HashMap<String, FieldType>(); for (FieldType fieldType : fieldTypes) { map.put(fieldType.getColumnName().toLowerCase(), fieldType); } fieldNameMap = map; } FieldType fieldType = fieldNameMap.get(columnName.toLowerCase()); if (fieldType != null) { return fieldType; } for (FieldType fieldType2 : fieldTypes) { if (fieldType2.getFieldName().equals(columnName)) { throw new IllegalArgumentException("You should use columnName '" + fieldType2.getColumnName() + "' for table " + tableName + " instead of fieldName '" + fieldType2.getFieldName() + "'"); } } throw new IllegalArgumentException("Unknown column name '" + columnName + "' in table " + tableName); } TableInfo(ConnectionSource connectionSource, BaseDaoImpl<T, ID> baseDaoImpl, Class<T> dataClass); TableInfo(DatabaseType databaseType, BaseDaoImpl<T, ID> baseDaoImpl, DatabaseTableConfig<T> tableConfig); Class<T> getDataClass(); String getTableName(); FieldType[] getFieldTypes(); FieldType getFieldTypeByColumnName(String columnName); FieldType getIdField(); Constructor<T> getConstructor(); String objectToString(T object); T createObject(); boolean isUpdatable(); boolean isForeignAutoCreate(); FieldType[] getForeignCollections(); boolean hasColumnName(String columnName); }
@Test public void testUnknownForeignField() throws Exception { TableInfo<Foreign, Void> tableInfo = new TableInfo<Foreign, Void>(connectionSource, null, Foreign.class); try { tableInfo.getFieldTypeByColumnName("foo"); fail("expected exception"); } catch (IllegalArgumentException e) { assertTrue(e.getMessage().contains("'" + Foreign.FOREIGN_FIELD_NAME + "'")); assertTrue(e.getMessage().contains("'foo'")); } }
TableInfo { public boolean hasColumnName(String columnName) { for (FieldType fieldType : fieldTypes) { if (fieldType.getColumnName().equals(columnName)) { return true; } } return false; } TableInfo(ConnectionSource connectionSource, BaseDaoImpl<T, ID> baseDaoImpl, Class<T> dataClass); TableInfo(DatabaseType databaseType, BaseDaoImpl<T, ID> baseDaoImpl, DatabaseTableConfig<T> tableConfig); Class<T> getDataClass(); String getTableName(); FieldType[] getFieldTypes(); FieldType getFieldTypeByColumnName(String columnName); FieldType getIdField(); Constructor<T> getConstructor(); String objectToString(T object); T createObject(); boolean isUpdatable(); boolean isForeignAutoCreate(); FieldType[] getForeignCollections(); boolean hasColumnName(String columnName); }
@Test public void testHasColumnName() throws Exception { Dao<Foo, String> dao = createDao(Foo.class, true); TableInfo<Foo, String> tableInfo = ((BaseDaoImpl<Foo, String>) dao).getTableInfo(); assertTrue(tableInfo.hasColumnName(COLUMN_NAME)); assertFalse(tableInfo.hasColumnName("not this name")); }
DatabaseTableConfigLoader { public static <T> void write(BufferedWriter writer, DatabaseTableConfig<T> config) throws SQLException { try { writeConfig(writer, config); } catch (IOException e) { throw SqlExceptionUtil.create("Could not write config to writer", e); } } static List<DatabaseTableConfig<?>> loadDatabaseConfigFromReader(BufferedReader reader); static DatabaseTableConfig<T> fromReader(BufferedReader reader); static void write(BufferedWriter writer, DatabaseTableConfig<T> config); }
@Test public void testConfigFile() throws Exception { DatabaseTableConfig<NoFields> config = new DatabaseTableConfig<NoFields>(); StringBuilder body = new StringBuilder(); StringWriter writer = new StringWriter(); BufferedWriter buffer = new BufferedWriter(writer); Class<NoFields> clazz = NoFields.class; config.setDataClass(clazz); body.append("dataClass=").append(clazz.getName()).append(LINE_SEP); checkConfigOutput(config, body, writer, buffer, false); String tableName = "pojgefwpjoefwpjo"; config.setTableName(tableName); body.append("tableName=").append(tableName).append(LINE_SEP); checkConfigOutput(config, body, writer, buffer, false); DatabaseFieldConfig field1 = new DatabaseFieldConfig(); String columnName = "efjpowefpjoefw"; field1.setColumnName(columnName); config.setFieldConfigs(Arrays.asList(field1)); StringWriter fieldWriter = new StringWriter(); BufferedWriter fieldBuffer = new BufferedWriter(fieldWriter); DatabaseFieldConfigLoader.write(fieldBuffer, field1, tableName); fieldBuffer.flush(); body.append("# --table-fields-start--").append(LINE_SEP); body.append(fieldWriter.toString()); checkConfigOutput(config, body, writer, buffer, true); }
DatabaseTableConfigLoader { public static List<DatabaseTableConfig<?>> loadDatabaseConfigFromReader(BufferedReader reader) throws SQLException { List<DatabaseTableConfig<?>> list = new ArrayList<DatabaseTableConfig<?>>(); while (true) { DatabaseTableConfig<?> config = DatabaseTableConfigLoader.fromReader(reader); if (config == null) { break; } list.add(config); } return list; } static List<DatabaseTableConfig<?>> loadDatabaseConfigFromReader(BufferedReader reader); static DatabaseTableConfig<T> fromReader(BufferedReader reader); static void write(BufferedWriter writer, DatabaseTableConfig<T> config); }
@Test public void testConfigEntriesFromStream() throws Exception { StringBuilder value = new StringBuilder(); value.append(TABLE_START); value.append("# random comment").append(LINE_SEP); value.append(LINE_SEP); value.append("dataClass=").append(Foo.class.getName()).append(LINE_SEP); String tableName = "fprwojfgopwejfw"; value.append("tableName=").append(tableName).append(LINE_SEP); value.append("# --table-fields-start--").append(LINE_SEP); value.append("# --field-start--").append(LINE_SEP); String fieldName = "weopjfwefjw"; value.append("fieldName=").append(fieldName).append(LINE_SEP); value.append("canBeNull=true").append(LINE_SEP); value.append("generatedId=true").append(LINE_SEP); value.append("# --field-end--").append(LINE_SEP); value.append("# --table-fields-end--").append(LINE_SEP); value.append(TABLE_END); List<DatabaseTableConfig<?>> tables = DatabaseTableConfigLoader.loadDatabaseConfigFromReader(new BufferedReader(new StringReader( value.toString()))); assertEquals(1, tables.size()); assertEquals(tableName, tables.get(0).getTableName()); DatabaseTableConfig<?> config = tables.get(0); List<DatabaseFieldConfig> fields = config.getFieldConfigs(); assertEquals(1, fields.size()); assertEquals(fieldName, fields.get(0).getFieldName()); } @Test(expected = SQLException.class) public void testConfigInvalidLine() throws Exception { StringBuilder value = new StringBuilder(); value.append(TABLE_START); value.append("dataClass").append(LINE_SEP); DatabaseTableConfigLoader.loadDatabaseConfigFromReader(new BufferedReader(new StringReader(value.toString()))); } @Test(expected = IllegalArgumentException.class) public void testConfigUnknownClass() throws Exception { StringBuilder value = new StringBuilder(); value.append(TABLE_START); value.append("dataClass=unknown.class.name.okay").append(LINE_SEP); value.append("# --table-fields-start--").append(LINE_SEP); value.append("# --field-start--").append(LINE_SEP); value.append("fieldName=xxx").append(LINE_SEP); value.append("# --field-end--").append(LINE_SEP); value.append("# --table-fields-end--").append(LINE_SEP); value.append(TABLE_END); DatabaseTableConfigLoader.loadDatabaseConfigFromReader(new BufferedReader(new StringReader(value.toString()))); } @Test public void testQuickEndOfConfig() throws Exception { StringBuilder value = new StringBuilder(); value.append(TABLE_START); value.append("dataClass=").append(Foo.class.getName()).append(LINE_SEP); value.append("# --table-fields-start--").append(LINE_SEP); value.append("# --field-start--").append(LINE_SEP); value.append("fieldName=xxx").append(LINE_SEP); value.append("# --field-end--").append(LINE_SEP); value.append("# --field-start--").append(LINE_SEP); List<DatabaseTableConfig<?>> tables = DatabaseTableConfigLoader.loadDatabaseConfigFromReader(new BufferedReader(new StringReader( value.toString()))); assertEquals(1, tables.size()); DatabaseTableConfig<?> config = tables.get(0); List<DatabaseFieldConfig> fields = config.getFieldConfigs(); assertEquals(1, fields.size()); }
DatabaseTableConfig { public DatabaseTableConfig() { } DatabaseTableConfig(); DatabaseTableConfig(Class<T> dataClass, List<DatabaseFieldConfig> fieldConfigs); DatabaseTableConfig(Class<T> dataClass, String tableName, List<DatabaseFieldConfig> fieldConfigs); private DatabaseTableConfig(Class<T> dataClass, String tableName, FieldType[] fieldTypes); void initialize(); Class<T> getDataClass(); void setDataClass(Class<T> dataClass); String getTableName(); void setTableName(String tableName); void setFieldConfigs(List<DatabaseFieldConfig> fieldConfigs); void extractFieldTypes(ConnectionSource connectionSource); FieldType[] getFieldTypes(DatabaseType databaseType); List<DatabaseFieldConfig> getFieldConfigs(); Constructor<T> getConstructor(); void setConstructor(Constructor<T> constructor); static DatabaseTableConfig<T> fromClass(ConnectionSource connectionSource, Class<T> clazz); static String extractTableName(Class<T> clazz); static Constructor<T> findNoArgConstructor(Class<T> dataClass); }
@Test public void testDatabaseTableConfig() throws SQLException { DatabaseTableConfig<DatabaseTableAnno> dbTableConf = DatabaseTableConfig.fromClass(connectionSource, DatabaseTableAnno.class); assertEquals(DatabaseTableAnno.class, dbTableConf.getDataClass()); assertEquals(TABLE_NAME, dbTableConf.getTableName()); dbTableConf.extractFieldTypes(connectionSource); FieldType[] fieldTypes = dbTableConf.getFieldTypes(databaseType); assertEquals(1, fieldTypes.length); assertEquals("stuff", fieldTypes[0].getColumnName()); }
DatabaseTableConfig { public void setFieldConfigs(List<DatabaseFieldConfig> fieldConfigs) { this.fieldConfigs = fieldConfigs; } DatabaseTableConfig(); DatabaseTableConfig(Class<T> dataClass, List<DatabaseFieldConfig> fieldConfigs); DatabaseTableConfig(Class<T> dataClass, String tableName, List<DatabaseFieldConfig> fieldConfigs); private DatabaseTableConfig(Class<T> dataClass, String tableName, FieldType[] fieldTypes); void initialize(); Class<T> getDataClass(); void setDataClass(Class<T> dataClass); String getTableName(); void setTableName(String tableName); void setFieldConfigs(List<DatabaseFieldConfig> fieldConfigs); void extractFieldTypes(ConnectionSource connectionSource); FieldType[] getFieldTypes(DatabaseType databaseType); List<DatabaseFieldConfig> getFieldConfigs(); Constructor<T> getConstructor(); void setConstructor(Constructor<T> constructor); static DatabaseTableConfig<T> fromClass(ConnectionSource connectionSource, Class<T> clazz); static String extractTableName(Class<T> clazz); static Constructor<T> findNoArgConstructor(Class<T> dataClass); }
@Test public void testSetFieldConfigs() throws SQLException { DatabaseTableConfig<DatabaseTableAnno> dbTableConf = new DatabaseTableConfig<DatabaseTableAnno>(); dbTableConf.setDataClass(DatabaseTableAnno.class); dbTableConf.setTableName(TABLE_NAME); List<DatabaseFieldConfig> fieldConfigs = new ArrayList<DatabaseFieldConfig>(); fieldConfigs.add(new DatabaseFieldConfig("stuff", null, DataType.UNKNOWN, "", 0, true, false, false, null, false, null, false, null, false, null, false, null, null, false, DatabaseFieldConfig.NO_MAX_FOREIGN_AUTO_REFRESH_LEVEL_SPECIFIED, 0)); dbTableConf.setFieldConfigs(fieldConfigs); dbTableConf.initialize(); assertEquals(DatabaseTableAnno.class, dbTableConf.getDataClass()); assertEquals(TABLE_NAME, dbTableConf.getTableName()); dbTableConf.extractFieldTypes(connectionSource); FieldType[] fieldTypes = dbTableConf.getFieldTypes(databaseType); assertEquals(1, fieldTypes.length); assertEquals("stuff", fieldTypes[0].getColumnName()); }
DatabaseTableConfig { public void initialize() { if (dataClass == null) { throw new IllegalStateException("dataClass was never set on " + getClass().getSimpleName()); } if (tableName == null) { tableName = extractTableName(dataClass); } } DatabaseTableConfig(); DatabaseTableConfig(Class<T> dataClass, List<DatabaseFieldConfig> fieldConfigs); DatabaseTableConfig(Class<T> dataClass, String tableName, List<DatabaseFieldConfig> fieldConfigs); private DatabaseTableConfig(Class<T> dataClass, String tableName, FieldType[] fieldTypes); void initialize(); Class<T> getDataClass(); void setDataClass(Class<T> dataClass); String getTableName(); void setTableName(String tableName); void setFieldConfigs(List<DatabaseFieldConfig> fieldConfigs); void extractFieldTypes(ConnectionSource connectionSource); FieldType[] getFieldTypes(DatabaseType databaseType); List<DatabaseFieldConfig> getFieldConfigs(); Constructor<T> getConstructor(); void setConstructor(Constructor<T> constructor); static DatabaseTableConfig<T> fromClass(ConnectionSource connectionSource, Class<T> clazz); static String extractTableName(Class<T> clazz); static Constructor<T> findNoArgConstructor(Class<T> dataClass); }
@Test(expected = IllegalStateException.class) public void testBadSpringWiring() { DatabaseTableConfig<NoFields> dbTableConf = new DatabaseTableConfig<NoFields>(); dbTableConf.initialize(); }
ConsoleLogger extends AbstractInternalLogger { @Override public void trace(String msg) { println(msg); } protected ConsoleLogger(String name); @Override boolean isTraceEnabled(); @Override void trace(String msg); @Override void trace(String format, Object arg); @Override void trace(String format, Object argA, Object argB); @Override void trace(String format, Object... arguments); @Override void trace(String msg, Throwable t); @Override boolean isDebugEnabled(); @Override void debug(String msg); @Override void debug(String format, Object arg); @Override void debug(String format, Object argA, Object argB); @Override void debug(String format, Object... arguments); @Override void debug(String msg, Throwable t); @Override boolean isInfoEnabled(); @Override void info(String msg); @Override void info(String format, Object arg); @Override void info(String format, Object argA, Object argB); @Override void info(String format, Object... arguments); @Override void info(String msg, Throwable t); @Override boolean isWarnEnabled(); @Override void warn(String msg); @Override void warn(String format, Object arg); @Override void warn(String format, Object... arguments); @Override void warn(String format, Object argA, Object argB); @Override void warn(String msg, Throwable t); @Override boolean isErrorEnabled(); @Override void error(String msg); @Override void error(String format, Object arg); @Override void error(String format, Object argA, Object argB); @Override void error(String format, Object... arguments); @Override void error(String msg, Throwable t); }
@Test public void testMsg() throws Exception { ConsoleLogger logger = new ConsoleLogger("org"); logger.trace("ok"); }
DatabaseTableConfig { public FieldType[] getFieldTypes(DatabaseType databaseType) throws SQLException { if (fieldTypes == null) { throw new SQLException("Field types have not been extracted in table config"); } return fieldTypes; } DatabaseTableConfig(); DatabaseTableConfig(Class<T> dataClass, List<DatabaseFieldConfig> fieldConfigs); DatabaseTableConfig(Class<T> dataClass, String tableName, List<DatabaseFieldConfig> fieldConfigs); private DatabaseTableConfig(Class<T> dataClass, String tableName, FieldType[] fieldTypes); void initialize(); Class<T> getDataClass(); void setDataClass(Class<T> dataClass); String getTableName(); void setTableName(String tableName); void setFieldConfigs(List<DatabaseFieldConfig> fieldConfigs); void extractFieldTypes(ConnectionSource connectionSource); FieldType[] getFieldTypes(DatabaseType databaseType); List<DatabaseFieldConfig> getFieldConfigs(); Constructor<T> getConstructor(); void setConstructor(Constructor<T> constructor); static DatabaseTableConfig<T> fromClass(ConnectionSource connectionSource, Class<T> clazz); static String extractTableName(Class<T> clazz); static Constructor<T> findNoArgConstructor(Class<T> dataClass); }
@Test(expected = SQLException.class) public void testNoFields() throws SQLException { new DatabaseTableConfig<DatabaseTableAnno>().getFieldTypes(databaseType); }
TableUtils { public static <T, ID> List<String> getCreateTableStatements(ConnectionSource connectionSource, Class<T> dataClass) throws SQLException { Dao<T, ID> dao = DaoManager.createDao(connectionSource, dataClass); if (dao instanceof BaseDaoImpl<?, ?>) { return addCreateTableStatements(connectionSource, ((BaseDaoImpl<?, ?>) dao).getTableInfo(), false); } else { TableInfo<T, ID> tableInfo = new TableInfo<T, ID>(connectionSource, null, dataClass); return addCreateTableStatements(connectionSource, tableInfo, false); } } private TableUtils(); static int createTable(ConnectionSource connectionSource, Class<T> dataClass); static int createTable(Dao<?, ?> dao); static int createTableIfNotExists(ConnectionSource connectionSource, Class<T> dataClass); static int createTable(ConnectionSource connectionSource, DatabaseTableConfig<T> tableConfig); static int createTableIfNotExists(ConnectionSource connectionSource, DatabaseTableConfig<T> tableConfig); static List<String> getCreateTableStatements(ConnectionSource connectionSource, Class<T> dataClass); static List<String> getCreateTableStatements(ConnectionSource connectionSource, DatabaseTableConfig<T> tableConfig); static int dropTable(ConnectionSource connectionSource, Class<T> dataClass, boolean ignoreErrors); static int dropTable(Dao<T, ID> dao, boolean ignoreErrors); static int dropTable(ConnectionSource connectionSource, DatabaseTableConfig<T> tableConfig, boolean ignoreErrors); static int clearTable(ConnectionSource connectionSource, Class<T> dataClass); static int clearTable(ConnectionSource connectionSource, DatabaseTableConfig<T> tableConfig); }
@Test public void testCreateStatements() throws Exception { List<String> stmts = TableUtils.getCreateTableStatements(connectionSource, LocalFoo.class); assertEquals(1, stmts.size()); assertEquals(expectedCreateStatement(), stmts.get(0)); } @Test public void testCreateStatementsTableConfig() throws Exception { List<String> stmts = TableUtils.getCreateTableStatements(connectionSource, DatabaseTableConfig.fromClass(connectionSource, LocalFoo.class)); assertEquals(1, stmts.size()); assertEquals(expectedCreateStatement(), stmts.get(0)); }
TableUtils { public static <T> int createTable(ConnectionSource connectionSource, Class<T> dataClass) throws SQLException { Dao<T, ?> dao = DaoManager.createDao(connectionSource, dataClass); return doCreateTable(dao, false); } private TableUtils(); static int createTable(ConnectionSource connectionSource, Class<T> dataClass); static int createTable(Dao<?, ?> dao); static int createTableIfNotExists(ConnectionSource connectionSource, Class<T> dataClass); static int createTable(ConnectionSource connectionSource, DatabaseTableConfig<T> tableConfig); static int createTableIfNotExists(ConnectionSource connectionSource, DatabaseTableConfig<T> tableConfig); static List<String> getCreateTableStatements(ConnectionSource connectionSource, Class<T> dataClass); static List<String> getCreateTableStatements(ConnectionSource connectionSource, DatabaseTableConfig<T> tableConfig); static int dropTable(ConnectionSource connectionSource, Class<T> dataClass, boolean ignoreErrors); static int dropTable(Dao<T, ID> dao, boolean ignoreErrors); static int dropTable(ConnectionSource connectionSource, DatabaseTableConfig<T> tableConfig, boolean ignoreErrors); static int clearTable(ConnectionSource connectionSource, Class<T> dataClass); static int clearTable(ConnectionSource connectionSource, DatabaseTableConfig<T> tableConfig); }
@Test public void testCreateTableQueriesAfter() throws Exception { final String queryAfter = "SELECT * from foo"; DatabaseType databaseType = new H2DatabaseType() { @Override public void appendColumnArg(String tableName, StringBuilder sb, FieldType fieldType, List<String> additionalArgs, List<String> statementsBefore, List<String> statementsAfter, List<String> queriesAfter) throws SQLException { super.appendColumnArg(tableName, sb, fieldType, additionalArgs, statementsBefore, statementsAfter, queriesAfter); if (fieldType.getColumnName().equals(LocalFoo.ID_FIELD_NAME)) { queriesAfter.add(queryAfter); } } }; final ConnectionSource connectionSource = createMock(ConnectionSource.class); testCreate("localfoo", connectionSource, databaseType, 0, false, queryAfter, new Callable<Integer>() { @Override public Integer call() throws Exception { return TableUtils.createTable(connectionSource, LocalFoo.class); } }); } @Test(expected = SQLException.class) public void testCreateTableThrow() throws Exception { final ConnectionSource connectionSource = createMock(ConnectionSource.class); testCreate("localfoo", connectionSource, databaseType, 1, true, null, new Callable<Integer>() { @Override public Integer call() throws Exception { return TableUtils.createTable(connectionSource, LocalFoo.class); } }); } @Test(expected = SQLException.class) public void testCreateTableAboveZero() throws Exception { final ConnectionSource connectionSource = createMock(ConnectionSource.class); testCreate("localfoo", connectionSource, databaseType, 1, false, null, new Callable<Integer>() { @Override public Integer call() throws Exception { return TableUtils.createTable(connectionSource, LocalFoo.class); } }); } @Test(expected = SQLException.class) public void testCreateTableBelowZero() throws Exception { final ConnectionSource connectionSource = createMock(ConnectionSource.class); testCreate("localfoo", connectionSource, databaseType, -1, false, null, new Callable<Integer>() { @Override public Integer call() throws Exception { return TableUtils.createTable(connectionSource, LocalFoo.class); } }); } @Test public void testCreateTableTableConfig() throws Exception { final ConnectionSource connectionSource = createMock(ConnectionSource.class); testCreate("localfoo", connectionSource, databaseType, 0, false, null, new Callable<Integer>() { @Override public Integer call() throws Exception { return (int) TableUtils.createTable(connectionSource, DatabaseTableConfig.fromClass(connectionSource, LocalFoo.class)); } }); } @Test public void testCreateTable() throws Exception { Dao<LocalFoo, Integer> fooDao = createDao(LocalFoo.class, false); createTable(LocalFoo.class, false); assertEquals(0, fooDao.queryForAll().size()); dropTable(LocalFoo.class, true); try { fooDao.countOf(); fail("Was expecting a SQL exception"); } catch (Exception expected) { } createTable(LocalFoo.class, false); assertEquals(0, fooDao.queryForAll().size()); dropTable(LocalFoo.class, true); }
TableUtils { public static <T, ID> int dropTable(ConnectionSource connectionSource, Class<T> dataClass, boolean ignoreErrors) throws SQLException { Dao<T, ID> dao = DaoManager.createDao(connectionSource, dataClass); return dropTable(dao, ignoreErrors); } private TableUtils(); static int createTable(ConnectionSource connectionSource, Class<T> dataClass); static int createTable(Dao<?, ?> dao); static int createTableIfNotExists(ConnectionSource connectionSource, Class<T> dataClass); static int createTable(ConnectionSource connectionSource, DatabaseTableConfig<T> tableConfig); static int createTableIfNotExists(ConnectionSource connectionSource, DatabaseTableConfig<T> tableConfig); static List<String> getCreateTableStatements(ConnectionSource connectionSource, Class<T> dataClass); static List<String> getCreateTableStatements(ConnectionSource connectionSource, DatabaseTableConfig<T> tableConfig); static int dropTable(ConnectionSource connectionSource, Class<T> dataClass, boolean ignoreErrors); static int dropTable(Dao<T, ID> dao, boolean ignoreErrors); static int dropTable(ConnectionSource connectionSource, DatabaseTableConfig<T> tableConfig, boolean ignoreErrors); static int clearTable(ConnectionSource connectionSource, Class<T> dataClass); static int clearTable(ConnectionSource connectionSource, DatabaseTableConfig<T> tableConfig); }
@Test public void testDropTable() throws Exception { final ConnectionSource connectionSource = createMock(ConnectionSource.class); testDrop("localfoo", connectionSource, 0, false, new Callable<Integer>() { @Override public Integer call() throws Exception { return (int) TableUtils.dropTable(connectionSource, LocalFoo.class, false); } }); } @Test(expected = SQLException.class) public void testDropTableThrow() throws Exception { final ConnectionSource connectionSource = createMock(ConnectionSource.class); testDrop("localfoo", connectionSource, 0, true, new Callable<Integer>() { @Override public Integer call() throws Exception { return (int) TableUtils.dropTable(connectionSource, LocalFoo.class, false); } }); } @Test public void testDropTableThrowIgnore() throws Exception { final ConnectionSource connectionSource = createMock(ConnectionSource.class); testDrop("localfoo", connectionSource, 0, true, new Callable<Integer>() { @Override public Integer call() throws Exception { return (int) TableUtils.dropTable(connectionSource, LocalFoo.class, true); } }); } @Test(expected = SQLException.class) public void testDropTableNegRows() throws Exception { final ConnectionSource connectionSource = createMock(ConnectionSource.class); testDrop("localfoo", connectionSource, -1, false, new Callable<Integer>() { @Override public Integer call() throws Exception { return (int) TableUtils.dropTable(connectionSource, LocalFoo.class, false); } }); } @Test public void testDropTableTableConfig() throws Exception { final ConnectionSource connectionSource = createMock(ConnectionSource.class); testDrop("localfoo", connectionSource, 0, false, new Callable<Integer>() { @Override public Integer call() throws Exception { return (int) TableUtils.dropTable(connectionSource, DatabaseTableConfig.fromClass(connectionSource, LocalFoo.class), false); } }); } @Test public void testDropThenQuery() throws Exception { Dao<LocalFoo, Integer> fooDao = createDao(LocalFoo.class, true); assertEquals(0, fooDao.queryForAll().size()); dropTable(LocalFoo.class, true); try { fooDao.queryForAll(); fail("Should have thrown"); } catch (SQLException e) { } }
TableUtils { public static <T> int clearTable(ConnectionSource connectionSource, Class<T> dataClass) throws SQLException { String tableName = DatabaseTableConfig.extractTableName(dataClass); DatabaseType databaseType = connectionSource.getDatabaseType(); if (databaseType.isEntityNamesMustBeUpCase()) { tableName = databaseType.upCaseEntityName(tableName); } return clearTable(connectionSource, tableName); } private TableUtils(); static int createTable(ConnectionSource connectionSource, Class<T> dataClass); static int createTable(Dao<?, ?> dao); static int createTableIfNotExists(ConnectionSource connectionSource, Class<T> dataClass); static int createTable(ConnectionSource connectionSource, DatabaseTableConfig<T> tableConfig); static int createTableIfNotExists(ConnectionSource connectionSource, DatabaseTableConfig<T> tableConfig); static List<String> getCreateTableStatements(ConnectionSource connectionSource, Class<T> dataClass); static List<String> getCreateTableStatements(ConnectionSource connectionSource, DatabaseTableConfig<T> tableConfig); static int dropTable(ConnectionSource connectionSource, Class<T> dataClass, boolean ignoreErrors); static int dropTable(Dao<T, ID> dao, boolean ignoreErrors); static int dropTable(ConnectionSource connectionSource, DatabaseTableConfig<T> tableConfig, boolean ignoreErrors); static int clearTable(ConnectionSource connectionSource, Class<T> dataClass); static int clearTable(ConnectionSource connectionSource, DatabaseTableConfig<T> tableConfig); }
@Test public void testClearTable() throws Exception { Dao<LocalFoo, Integer> fooDao = createDao(LocalFoo.class, true); assertEquals(0, fooDao.countOf()); LocalFoo foo = new LocalFoo(); assertEquals(1, fooDao.create(foo)); assertEquals(1, fooDao.countOf()); TableUtils.clearTable(connectionSource, LocalFoo.class); assertEquals(0, fooDao.countOf()); }
TableUtils { public static <T> int createTableIfNotExists(ConnectionSource connectionSource, Class<T> dataClass) throws SQLException { Dao<T, ?> dao = DaoManager.createDao(connectionSource, dataClass); return doCreateTable(dao, true); } private TableUtils(); static int createTable(ConnectionSource connectionSource, Class<T> dataClass); static int createTable(Dao<?, ?> dao); static int createTableIfNotExists(ConnectionSource connectionSource, Class<T> dataClass); static int createTable(ConnectionSource connectionSource, DatabaseTableConfig<T> tableConfig); static int createTableIfNotExists(ConnectionSource connectionSource, DatabaseTableConfig<T> tableConfig); static List<String> getCreateTableStatements(ConnectionSource connectionSource, Class<T> dataClass); static List<String> getCreateTableStatements(ConnectionSource connectionSource, DatabaseTableConfig<T> tableConfig); static int dropTable(ConnectionSource connectionSource, Class<T> dataClass, boolean ignoreErrors); static int dropTable(Dao<T, ID> dao, boolean ignoreErrors); static int dropTable(ConnectionSource connectionSource, DatabaseTableConfig<T> tableConfig, boolean ignoreErrors); static int clearTable(ConnectionSource connectionSource, Class<T> dataClass); static int clearTable(ConnectionSource connectionSource, DatabaseTableConfig<T> tableConfig); }
@Test public void testCreateTableIfNotExists() throws Exception { dropTable(LocalFoo.class, true); Dao<LocalFoo, Integer> fooDao = createDao(LocalFoo.class, false); try { fooDao.countOf(); fail("Should have thrown an exception"); } catch (Exception e) { } TableUtils.createTableIfNotExists(connectionSource, LocalFoo.class); assertEquals(0, fooDao.countOf()); TableUtils.createTableIfNotExists(connectionSource, LocalFoo.class); assertEquals(0, fooDao.countOf()); }
BaseDatabaseType implements DatabaseType { @Override public void loadDriver() throws SQLException { String className = getDriverClassName(); if (className != null) { try { Class.forName(className); } catch (ClassNotFoundException e) { throw SqlExceptionUtil.create("Driver class was not found for " + getDatabaseName() + " database. Missing jar with class " + className + ".", e); } } } @Override void loadDriver(); @Override void setDriver(Driver driver); @Override void appendColumnArg(String tableName, StringBuilder sb, FieldType fieldType, List<String> additionalArgs, List<String> statementsBefore, List<String> statementsAfter, List<String> queriesAfter); @Override void addPrimaryKeySql(FieldType[] fieldTypes, List<String> additionalArgs, List<String> statementsBefore, List<String> statementsAfter, List<String> queriesAfter); @Override void addUniqueComboSql(FieldType[] fieldTypes, List<String> additionalArgs, List<String> statementsBefore, List<String> statementsAfter, List<String> queriesAfter); @Override void dropColumnArg(FieldType fieldType, List<String> statementsBefore, List<String> statementsAfter); @Override void appendEscapedWord(StringBuilder sb, String word); @Override void appendEscapedEntityName(StringBuilder sb, String name); @Override String generateIdSequenceName(String tableName, FieldType idFieldType); @Override String getCommentLinePrefix(); @Override DataPersister getDataPersister(DataPersister defaultPersister, FieldType fieldType); @Override FieldConverter getFieldConverter(DataPersister dataPersister, FieldType fieldType); @Override boolean isIdSequenceNeeded(); @Override boolean isVarcharFieldWidthSupported(); @Override boolean isLimitSqlSupported(); @Override boolean isOffsetSqlSupported(); @Override boolean isOffsetLimitArgument(); @Override boolean isLimitAfterSelect(); @Override void appendLimitValue(StringBuilder sb, long limit, Long offset); @Override void appendOffsetValue(StringBuilder sb, long offset); @Override void appendSelectNextValFromSequence(StringBuilder sb, String sequenceName); @Override void appendCreateTableSuffix(StringBuilder sb); @Override boolean isCreateTableReturnsZero(); @Override boolean isCreateTableReturnsNegative(); @Override boolean isEntityNamesMustBeUpCase(); @Override String upCaseEntityName(String entityName); @Override boolean isNestedSavePointsSupported(); @Override String getPingStatement(); @Override boolean isBatchUseTransaction(); @Override boolean isTruncateSupported(); @Override boolean isCreateIfNotExistsSupported(); @Override boolean isCreateIndexIfNotExistsSupported(); @Override boolean isSelectSequenceBeforeInsert(); @Override boolean isAllowGeneratedIdInsertSupported(); @Override DatabaseTableConfig<T> extractDatabaseTableConfig(ConnectionSource connectionSource, Class<T> clazz); @Override void appendInsertNoColumns(StringBuilder sb); }
@Test(expected = SQLException.class) public void testDriverNotFound() throws SQLException { new TestDatabaseType().loadDriver(); }
BaseDatabaseType implements DatabaseType { protected void configureGeneratedId(String tableName, StringBuilder sb, FieldType fieldType, List<String> statementsBefore, List<String> statementsAfter, List<String> additionalArgs, List<String> queriesAfter) { throw new IllegalStateException( "GeneratedId is not supported by database " + getDatabaseName() + " for field " + fieldType); } @Override void loadDriver(); @Override void setDriver(Driver driver); @Override void appendColumnArg(String tableName, StringBuilder sb, FieldType fieldType, List<String> additionalArgs, List<String> statementsBefore, List<String> statementsAfter, List<String> queriesAfter); @Override void addPrimaryKeySql(FieldType[] fieldTypes, List<String> additionalArgs, List<String> statementsBefore, List<String> statementsAfter, List<String> queriesAfter); @Override void addUniqueComboSql(FieldType[] fieldTypes, List<String> additionalArgs, List<String> statementsBefore, List<String> statementsAfter, List<String> queriesAfter); @Override void dropColumnArg(FieldType fieldType, List<String> statementsBefore, List<String> statementsAfter); @Override void appendEscapedWord(StringBuilder sb, String word); @Override void appendEscapedEntityName(StringBuilder sb, String name); @Override String generateIdSequenceName(String tableName, FieldType idFieldType); @Override String getCommentLinePrefix(); @Override DataPersister getDataPersister(DataPersister defaultPersister, FieldType fieldType); @Override FieldConverter getFieldConverter(DataPersister dataPersister, FieldType fieldType); @Override boolean isIdSequenceNeeded(); @Override boolean isVarcharFieldWidthSupported(); @Override boolean isLimitSqlSupported(); @Override boolean isOffsetSqlSupported(); @Override boolean isOffsetLimitArgument(); @Override boolean isLimitAfterSelect(); @Override void appendLimitValue(StringBuilder sb, long limit, Long offset); @Override void appendOffsetValue(StringBuilder sb, long offset); @Override void appendSelectNextValFromSequence(StringBuilder sb, String sequenceName); @Override void appendCreateTableSuffix(StringBuilder sb); @Override boolean isCreateTableReturnsZero(); @Override boolean isCreateTableReturnsNegative(); @Override boolean isEntityNamesMustBeUpCase(); @Override String upCaseEntityName(String entityName); @Override boolean isNestedSavePointsSupported(); @Override String getPingStatement(); @Override boolean isBatchUseTransaction(); @Override boolean isTruncateSupported(); @Override boolean isCreateIfNotExistsSupported(); @Override boolean isCreateIndexIfNotExistsSupported(); @Override boolean isSelectSequenceBeforeInsert(); @Override boolean isAllowGeneratedIdInsertSupported(); @Override DatabaseTableConfig<T> extractDatabaseTableConfig(ConnectionSource connectionSource, Class<T> clazz); @Override void appendInsertNoColumns(StringBuilder sb); }
@Test(expected = IllegalStateException.class) public void testConfigureGeneratedId() { new TestDatabaseType().configureGeneratedId(null, new StringBuilder(), null, new ArrayList<String>(), null, new ArrayList<String>(), new ArrayList<String>()); }
BaseSqliteDatabaseType extends BaseDatabaseType { @Override protected void configureGeneratedId(String tableName, StringBuilder sb, FieldType fieldType, List<String> statementsBefore, List<String> statementsAfter, List<String> additionalArgs, List<String> queriesAfter) { if (fieldType.getSqlType() != SqlType.INTEGER && fieldType.getSqlType() != SqlType.LONG) { throw new IllegalArgumentException( "Sqlite requires that auto-increment generated-id be integer or long type"); } sb.append("PRIMARY KEY AUTOINCREMENT "); } @Override boolean isVarcharFieldWidthSupported(); @Override boolean isCreateTableReturnsZero(); @Override boolean isCreateIfNotExistsSupported(); @Override FieldConverter getFieldConverter(DataPersister dataPersister, FieldType fieldType); @Override void appendInsertNoColumns(StringBuilder sb); }
@Test(expected = IllegalArgumentException.class) public void testConfigureGeneratedIdNotInteger() throws Exception { Field field = Foo.class.getField("stringField"); FieldType fieldType = FieldType.createFieldType(connectionSource, "foo", field, Foo.class); OurSqliteDatabaseType dbType = new OurSqliteDatabaseType(); StringBuilder sb = new StringBuilder(); dbType.configureGeneratedId(null, sb, fieldType, new ArrayList<String>(), null, new ArrayList<String>(), new ArrayList<String>()); } @Test public void testConfigureGeneratedIdInteger() throws Exception { Field field = Foo.class.getField("val"); FieldType fieldType = FieldType.createFieldType(connectionSource, "foo", field, Foo.class); OurSqliteDatabaseType dbType = new OurSqliteDatabaseType(); StringBuilder sb = new StringBuilder(); dbType.configureGeneratedId(null, sb, fieldType, new ArrayList<String>(), null, new ArrayList<String>(), new ArrayList<String>()); assertTrue(sb.toString().contains("PRIMARY KEY AUTOINCREMENT")); }
BaseSqliteDatabaseType extends BaseDatabaseType { @Override public boolean isVarcharFieldWidthSupported() { return false; } @Override boolean isVarcharFieldWidthSupported(); @Override boolean isCreateTableReturnsZero(); @Override boolean isCreateIfNotExistsSupported(); @Override FieldConverter getFieldConverter(DataPersister dataPersister, FieldType fieldType); @Override void appendInsertNoColumns(StringBuilder sb); }
@Test public void testIsVarcharFieldWidthSupported() { assertFalse(new OurSqliteDatabaseType().isVarcharFieldWidthSupported()); }
BaseSqliteDatabaseType extends BaseDatabaseType { @Override public boolean isCreateTableReturnsZero() { return false; } @Override boolean isVarcharFieldWidthSupported(); @Override boolean isCreateTableReturnsZero(); @Override boolean isCreateIfNotExistsSupported(); @Override FieldConverter getFieldConverter(DataPersister dataPersister, FieldType fieldType); @Override void appendInsertNoColumns(StringBuilder sb); }
@Test public void testIsCreateTableReturnsZero() { assertFalse(new OurSqliteDatabaseType().isCreateTableReturnsZero()); }
BaseSqliteDatabaseType extends BaseDatabaseType { @Override protected boolean generatedIdSqlAtEnd() { return false; } @Override boolean isVarcharFieldWidthSupported(); @Override boolean isCreateTableReturnsZero(); @Override boolean isCreateIfNotExistsSupported(); @Override FieldConverter getFieldConverter(DataPersister dataPersister, FieldType fieldType); @Override void appendInsertNoColumns(StringBuilder sb); }
@Test public void testGeneratedIdSqlAtEnd() { assertFalse(new OurSqliteDatabaseType().generatedIdSqlAtEnd()); }
BaseSqliteDatabaseType extends BaseDatabaseType { @Override public boolean isCreateIfNotExistsSupported() { return true; } @Override boolean isVarcharFieldWidthSupported(); @Override boolean isCreateTableReturnsZero(); @Override boolean isCreateIfNotExistsSupported(); @Override FieldConverter getFieldConverter(DataPersister dataPersister, FieldType fieldType); @Override void appendInsertNoColumns(StringBuilder sb); }
@Test public void testIsCreateIfNotExistsSupported() { assertTrue(new OurSqliteDatabaseType().isCreateIfNotExistsSupported()); }