repo stringclasses 1k
values | file_url stringlengths 96 373 | file_path stringlengths 11 294 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 6
values | commit_sha stringclasses 1k
values | retrieved_at stringdate 2026-01-04 14:45:56 2026-01-04 18:30:23 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/controller/KubernetesController.java | kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/controller/KubernetesController.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.controller;
import com.michelin.kstreamplify.initializer.KafkaStreamsStarter;
import com.michelin.kstreamplify.service.KubernetesService;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.media.Content;
import io.swagger.v3.oas.annotations.media.Schema;
import io.swagger.v3.oas.annotations.responses.ApiResponse;
import io.swagger.v3.oas.annotations.responses.ApiResponses;
import io.swagger.v3.oas.annotations.tags.Tag;
import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RestController;
/** Kafka Streams controller for Kubernetes. */
@RestController
@ConditionalOnBean(KafkaStreamsStarter.class)
@Tag(name = "Kubernetes", description = "Kubernetes Controller")
public class KubernetesController {
private final KubernetesService kubernetesService;
/**
* Constructor.
*
* @param kubernetesService The kubernetes service
*/
public KubernetesController(KubernetesService kubernetesService) {
this.kubernetesService = kubernetesService;
}
/**
* Readiness Kubernetes probe endpoint.
*
* @return An HTTP response based on the Kafka Streams state
*/
@Operation(summary = "Kubernetes readiness probe")
@ApiResponses(
value = {
@ApiResponse(responseCode = "200", description = "Kafka Streams running"),
@ApiResponse(
responseCode = "204",
description = "Kafka Streams starting",
content = {
@Content(
mediaType = MediaType.APPLICATION_JSON_VALUE,
schema = @Schema(implementation = String.class))
}),
@ApiResponse(
responseCode = "400",
description = "Kafka Streams not instantiated",
content = {
@Content(
mediaType = MediaType.APPLICATION_JSON_VALUE,
schema = @Schema(implementation = String.class))
}),
@ApiResponse(
responseCode = "503",
description = "Kafka Streams not running",
content = {
@Content(
mediaType = MediaType.APPLICATION_JSON_VALUE,
schema = @Schema(implementation = String.class))
}),
})
@GetMapping("/${kubernetes.readiness.path:ready}")
public ResponseEntity<Void> readiness() {
int readinessStatus = kubernetesService.getReadiness();
return ResponseEntity.status(readinessStatus).build();
}
/**
* Liveness Kubernetes probe endpoint.
*
* @return An HTTP response based on the Kafka Streams state
*/
@Operation(summary = "Kubernetes liveness probe")
@ApiResponses(
value = {
@ApiResponse(responseCode = "200", description = "Kafka Streams running"),
@ApiResponse(
responseCode = "400",
description = "Kafka Streams not instantiated",
content = {
@Content(
mediaType = MediaType.APPLICATION_JSON_VALUE,
schema = @Schema(implementation = String.class))
}),
@ApiResponse(
responseCode = "503",
description = "Kafka Streams not running",
content = {
@Content(
mediaType = MediaType.APPLICATION_JSON_VALUE,
schema = @Schema(implementation = String.class))
}),
})
@GetMapping("/${kubernetes.liveness.path:liveness}")
public ResponseEntity<Void> liveness() {
int livenessStatus = kubernetesService.getLiveness();
return ResponseEntity.status(livenessStatus).build();
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/initializer/SpringBootKafkaStreamsInitializer.java | kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/initializer/SpringBootKafkaStreamsInitializer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.initializer;
import com.michelin.kstreamplify.context.KafkaStreamsExecutionContext;
import com.michelin.kstreamplify.property.KafkaProperties;
import io.micrometer.core.instrument.MeterRegistry;
import io.micrometer.core.instrument.binder.kafka.KafkaStreamsMetrics;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.errors.StreamsUncaughtExceptionHandler;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.ApplicationArguments;
import org.springframework.boot.ApplicationRunner;
import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.stereotype.Component;
/** The Kafka Streams initializer. */
@Slf4j
@Component
@ConditionalOnBean(KafkaStreamsStarter.class)
public class SpringBootKafkaStreamsInitializer extends KafkaStreamsInitializer implements ApplicationRunner {
private final ConfigurableApplicationContext applicationContext;
private final MeterRegistry registry;
private final KafkaProperties springBootKafkaProperties;
@Value("${server.port:8080}")
private int springBootServerPort;
/**
* Constructor.
*
* @param applicationContext The application context
* @param kafkaStreamsStarter The Kafka Streams starter
* @param springBootKafkaProperties The Spring Boot Kafka properties
* @param registry The Micrometer registry
*/
public SpringBootKafkaStreamsInitializer(
ConfigurableApplicationContext applicationContext,
KafkaStreamsStarter kafkaStreamsStarter,
KafkaProperties springBootKafkaProperties,
MeterRegistry registry) {
super(kafkaStreamsStarter);
this.applicationContext = applicationContext;
this.springBootKafkaProperties = springBootKafkaProperties;
this.registry = registry;
}
/**
* Start Kstreamplify.
*
* @param args The program arguments
*/
@Override
public void run(ApplicationArguments args) {
start();
}
/** {@inheritDoc} */
@Override
protected void startHttpServer() {
// Nothing to do here as the server is already started by Spring Boot
}
/** {@inheritDoc} */
@Override
protected void initProperties() {
serverPort = springBootServerPort;
kafkaProperties = springBootKafkaProperties.asProperties();
KafkaStreamsExecutionContext.registerProperties(kafkaProperties);
}
/** {@inheritDoc} */
@Override
protected StreamsUncaughtExceptionHandler.StreamThreadExceptionResponse onStreamsUncaughtException(
Throwable exception) {
closeApplicationContext();
return super.onStreamsUncaughtException(exception);
}
/** {@inheritDoc} */
@Override
protected void onStateChange(KafkaStreams.State newState, KafkaStreams.State oldState) {
if (newState.equals(KafkaStreams.State.ERROR)) {
closeApplicationContext();
}
}
/** {@inheritDoc} */
@Override
protected void registerMetrics(KafkaStreams kafkaStreams) {
// As the Kafka Streams metrics are not picked up by the OpenTelemetry Java agent automatically,
// register them manually to the Spring Boot registry as the agent will pick metrics up from there
KafkaStreamsMetrics kafkaStreamsMetrics = new KafkaStreamsMetrics(kafkaStreams);
kafkaStreamsMetrics.bindTo(registry);
}
/** Close the application context. */
private void closeApplicationContext() {
if (applicationContext != null) {
applicationContext.close();
} else {
log.warn("Spring Boot context is not set, cannot close it");
}
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/opentelemetry/OpenTelemetryConfig.java | kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/opentelemetry/OpenTelemetryConfig.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.opentelemetry;
import io.micrometer.core.instrument.MeterRegistry;
import io.micrometer.core.instrument.Tag;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.actuate.autoconfigure.metrics.MeterRegistryCustomizer;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.util.StringUtils;
/** The OpenTelemetry configuration class. */
@Slf4j
@Getter
@Configuration
public class OpenTelemetryConfig {
private final String otelResourceAttributes;
/**
* Constructor.
*
* @param otelResourceAttributes The OpenTelemetry resource attributes
*/
public OpenTelemetryConfig(@Value("${otel.resource.attributes:#{null}}") String otelResourceAttributes) {
this.otelResourceAttributes = otelResourceAttributes;
}
/**
* Register tags in Open Telemetry meter registry. It enables to add custom tags given in the property
* otel.resource.attributes to metrics.
*
* @return A meter registry customizer
*/
@Bean
@ConditionalOnProperty(value = "otel.resource.attributes")
public MeterRegistryCustomizer<MeterRegistry> addTagsOnMetrics() {
List<Tag> tags = StringUtils.hasText(otelResourceAttributes)
? Arrays.stream(otelResourceAttributes.split(","))
.map(resourceAttribute -> Tag.of(
resourceAttribute.split("=")[0],
resourceAttribute.split("=")[1]))
.toList()
: Collections.emptyList();
return registry -> {
// Only add tags and Kafka metrics to Open Telemetry meter registry whose Java agent reads from it
if (registry.getClass().getName().contains("OpenTelemetryMeterRegistry")) {
log.info(
"Adding tags {} to registry {}",
tags.stream()
.map(tag -> tag.getKey() + "=" + tag.getValue())
.toList(),
registry.getClass().getName());
registry.config().commonTags(tags);
}
};
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/property/KafkaProperties.java | kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/property/KafkaProperties.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.property;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import lombok.Getter;
import lombok.Setter;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Configuration;
/** The Kafka properties class. */
@Getter
@Setter
@Configuration
@ConfigurationProperties(prefix = "kafka")
public class KafkaProperties {
private Map<String, String> properties = new HashMap<>();
/** Constructor. */
public KafkaProperties() {
// Default constructor
}
/**
* Return the Kafka properties as {@link java.util.Properties}.
*
* @return The Kafka properties
*/
public Properties asProperties() {
Properties props = new Properties();
props.putAll(properties);
return props;
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/config/BeanConfig.java | kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/config/BeanConfig.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.config;
import com.michelin.kstreamplify.initializer.KafkaStreamsStarter;
import com.michelin.kstreamplify.initializer.SpringBootKafkaStreamsInitializer;
import com.michelin.kstreamplify.service.KubernetesService;
import com.michelin.kstreamplify.service.TopologyService;
import com.michelin.kstreamplify.service.interactivequeries.keyvalue.KeyValueStoreService;
import com.michelin.kstreamplify.service.interactivequeries.keyvalue.TimestampedKeyValueStoreService;
import com.michelin.kstreamplify.service.interactivequeries.window.TimestampedWindowStoreService;
import com.michelin.kstreamplify.service.interactivequeries.window.WindowStoreService;
import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
/** Bean configuration. */
@Configuration
@ConditionalOnBean(KafkaStreamsStarter.class)
public class BeanConfig {
/** Default constructor. */
public BeanConfig() {
// Default constructor
}
/**
* Register the Kubernetes service as a bean.
*
* @param initializer The Kafka Streams initializer
* @return The Kubernetes service
*/
@Bean
KubernetesService kubernetesService(SpringBootKafkaStreamsInitializer initializer) {
return new KubernetesService(initializer);
}
/**
* Register the Topology service as a bean.
*
* @param initializer The Kafka Streams initializer
* @return The Topology service
*/
@Bean
TopologyService topologyService(SpringBootKafkaStreamsInitializer initializer) {
return new TopologyService(initializer);
}
/**
* Register the key-value store service as a bean.
*
* @param initializer The Kafka Streams initializer
* @return The key-value store service
*/
@Bean
KeyValueStoreService keyValueStoreService(SpringBootKafkaStreamsInitializer initializer) {
return new KeyValueStoreService(initializer);
}
/**
* Register the timestamped key-value store service as a bean.
*
* @param initializer The Kafka Streams initializer
* @return The timestamped key-value store service
*/
@Bean
TimestampedKeyValueStoreService timestampedKeyValueStoreService(SpringBootKafkaStreamsInitializer initializer) {
return new TimestampedKeyValueStoreService(initializer);
}
/**
* Register the window store service as a bean.
*
* @param initializer The Kafka Streams initializer
* @return The window store service
*/
@Bean
WindowStoreService windowStoreService(SpringBootKafkaStreamsInitializer initializer) {
return new WindowStoreService(initializer);
}
/**
* Register the timestamped window store service as a bean.
*
* @param initializer The Kafka Streams initializer
* @return The timestamped window store service
*/
@Bean
TimestampedWindowStoreService timestampedWindowStoreService(SpringBootKafkaStreamsInitializer initializer) {
return new TimestampedWindowStoreService(initializer);
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/topic/TopicUtilsTest.java | kstreamplify-core/src/test/java/com/michelin/kstreamplify/topic/TopicUtilsTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.topic;
import static org.junit.jupiter.api.Assertions.assertEquals;
import com.michelin.kstreamplify.context.KafkaStreamsExecutionContext;
import java.util.Properties;
import org.junit.jupiter.api.Test;
class TopicUtilsTest {
@Test
void shouldRemapTopic() {
Properties properties = new Properties();
properties.put("topic.remap.myTopic", "myRemappedTopic");
KafkaStreamsExecutionContext.setProperties(properties);
String remappedTopic = TopicUtils.remapAndPrefix("myTopic", "");
assertEquals("myRemappedTopic", remappedTopic);
}
@Test
void shouldRemapAndPrefixTopic() {
Properties properties = new Properties();
properties.put("topic.remap.myTopic", "myRemappedTopic");
properties.put("prefix.myNamespace", "myNamespacePrefix.");
KafkaStreamsExecutionContext.setProperties(properties);
String remappedTopic = TopicUtils.remapAndPrefix("myTopic", "myNamespace");
assertEquals("myNamespacePrefix.myRemappedTopic", remappedTopic);
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/service/KubernetesServiceTest.java | kstreamplify-core/src/test/java/com/michelin/kstreamplify/service/KubernetesServiceTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.service;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.mockito.Mockito.when;
import com.michelin.kstreamplify.context.KafkaStreamsExecutionContext;
import com.michelin.kstreamplify.initializer.KafkaStreamsInitializer;
import java.net.HttpURLConnection;
import java.util.Properties;
import java.util.Set;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.processor.internals.StreamThread;
import org.apache.kafka.streams.processor.internals.ThreadMetadataImpl;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
@ExtendWith(MockitoExtension.class)
class KubernetesServiceTest {
@Mock
private KafkaStreamsInitializer kafkaStreamsInitializer;
@Mock
private KafkaStreams kafkaStreams;
@InjectMocks
private KubernetesService kubernetesService;
@Test
void shouldGetReadinessProbeWhenRunning() {
KafkaStreamsExecutionContext.registerProperties(new Properties());
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.state()).thenReturn(KafkaStreams.State.RUNNING);
int response = kubernetesService.getReadiness();
assertEquals(HttpURLConnection.HTTP_OK, response);
}
@Test
void shouldGetReadinessProbeWhenNotRunning() {
KafkaStreamsExecutionContext.registerProperties(new Properties());
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.state()).thenReturn(KafkaStreams.State.NOT_RUNNING);
int response = kubernetesService.getReadiness();
assertEquals(HttpURLConnection.HTTP_UNAVAILABLE, response);
}
@Test
void shouldGetReadinessProbeWhenNull() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(null);
int response = kubernetesService.getReadiness();
assertEquals(HttpURLConnection.HTTP_BAD_REQUEST, response);
}
@Test
void shouldGetReadinessProbeWhenRebalancingAndAllThreadsCreated() {
KafkaStreamsExecutionContext.registerProperties(new Properties());
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.state()).thenReturn(KafkaStreams.State.REBALANCING);
when(kafkaStreams.metadataForLocalThreads())
.thenReturn(Set.of(new ThreadMetadataImpl(
"thread-1",
StreamThread.State.CREATED.name(),
"mainConsumerClientId",
"restoreConsumerClientId",
Set.of(),
"adminClientId",
Set.of(),
Set.of())));
int response = kubernetesService.getReadiness();
assertEquals(HttpURLConnection.HTTP_NO_CONTENT, response);
}
@Test
void shouldGetReadinessProbeWhenRebalancingAndAllThreadsNotStartingOrCreated() {
KafkaStreamsExecutionContext.registerProperties(new Properties());
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.state()).thenReturn(KafkaStreams.State.REBALANCING);
when(kafkaStreams.metadataForLocalThreads())
.thenReturn(Set.of(
new ThreadMetadataImpl(
"thread-1",
StreamThread.State.CREATED.name(),
"mainConsumerClientId",
"restoreConsumerClientId",
Set.of(),
"adminClientId",
Set.of(),
Set.of()),
new ThreadMetadataImpl(
"thread-2",
StreamThread.State.STARTING.name(),
"mainConsumerClientId",
"restoreConsumerClientId",
Set.of(),
"adminClientId",
Set.of(),
Set.of()),
new ThreadMetadataImpl(
"thread-3",
StreamThread.State.PARTITIONS_ASSIGNED.name(),
"mainConsumerClientId",
"restoreConsumerClientId",
Set.of(),
"adminClientId",
Set.of(),
Set.of())));
int response = kubernetesService.getReadiness();
assertEquals(HttpURLConnection.HTTP_UNAVAILABLE, response);
}
@Test
void shouldGetLivenessProbeWithWhenStreamsRunning() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.state()).thenReturn(KafkaStreams.State.RUNNING);
int response = kubernetesService.getLiveness();
assertEquals(HttpURLConnection.HTTP_OK, response);
}
@Test
void shouldGetLivenessProbeWithWhenStreamsNotRunning() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.state()).thenReturn(KafkaStreams.State.NOT_RUNNING);
int response = kubernetesService.getLiveness();
assertEquals(HttpURLConnection.HTTP_INTERNAL_ERROR, response);
}
@Test
void shouldGetLivenessProbeWithWhenStreamsNull() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(null);
int response = kubernetesService.getLiveness();
assertEquals(HttpURLConnection.HTTP_NO_CONTENT, response);
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/service/TopologyServiceTest.java | kstreamplify-core/src/test/java/com/michelin/kstreamplify/service/TopologyServiceTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.service;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.mockito.Mockito.when;
import com.michelin.kstreamplify.initializer.KafkaStreamsInitializer;
import com.michelin.kstreamplify.initializer.KafkaStreamsStarter;
import org.apache.kafka.streams.StreamsBuilder;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
@ExtendWith(MockitoExtension.class)
class TopologyServiceTest {
@Mock
private KafkaStreamsInitializer kafkaStreamsInitializer;
@InjectMocks
private TopologyService topologyService;
@Test
void shouldExposeTopology() {
StreamsBuilder streamsBuilder = new StreamsBuilder();
KafkaStreamsStarter starter = new KafkaStreamsStarterStub();
starter.topology(streamsBuilder);
when(kafkaStreamsInitializer.getTopology()).thenReturn(streamsBuilder.build());
String response = topologyService.getTopology();
assertEquals("""
Topologies:
Sub-topology: 0
Source: KSTREAM-SOURCE-0000000000 (topics: [INPUT_TOPIC])
--> KSTREAM-SINK-0000000001
Sink: KSTREAM-SINK-0000000001 (topic: OUTPUT_TOPIC)
<-- KSTREAM-SOURCE-0000000000
""", response);
}
static class KafkaStreamsStarterStub extends KafkaStreamsStarter {
@Override
public void topology(StreamsBuilder streamsBuilder) {
streamsBuilder.stream("INPUT_TOPIC").to("OUTPUT_TOPIC");
}
@Override
public String dlqTopic() {
return "DLQ_TOPIC";
}
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/service/interactivequeries/window/WindowStoreServiceTest.java | kstreamplify-core/src/test/java/com/michelin/kstreamplify/service/interactivequeries/window/WindowStoreServiceTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.service.interactivequeries.window;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertIterableEquals;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.doCallRealMethod;
import static org.mockito.Mockito.when;
import com.michelin.kstreamplify.exception.OtherInstanceResponseException;
import com.michelin.kstreamplify.exception.UnknownKeyException;
import com.michelin.kstreamplify.initializer.KafkaStreamsInitializer;
import com.michelin.kstreamplify.store.StateStoreRecord;
import java.net.http.HttpClient;
import java.net.http.HttpResponse;
import java.time.Instant;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import org.apache.kafka.common.serialization.Serializer;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.KeyQueryMetadata;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.StreamsMetadata;
import org.apache.kafka.streams.errors.StreamsNotStartedException;
import org.apache.kafka.streams.errors.UnknownStateStoreException;
import org.apache.kafka.streams.kstream.Windowed;
import org.apache.kafka.streams.kstream.internals.TimeWindow;
import org.apache.kafka.streams.query.QueryResult;
import org.apache.kafka.streams.query.StateQueryRequest;
import org.apache.kafka.streams.query.StateQueryResult;
import org.apache.kafka.streams.state.HostInfo;
import org.apache.kafka.streams.state.KeyValueIterator;
import org.apache.kafka.streams.state.WindowStoreIterator;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.ArgumentMatchers;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
@ExtendWith(MockitoExtension.class)
class WindowStoreServiceTest {
private static final String STREAMS_NOT_STARTED = "Cannot process request while instance is in REBALANCING state";
@Mock
private KafkaStreamsInitializer kafkaStreamsInitializer;
@Mock
private HttpClient httpClient;
@Mock
private StreamsMetadata streamsMetadata;
@Mock
private KafkaStreams kafkaStreams;
@Mock
private StateQueryResult<KeyValueIterator<Windowed<String>, Object>> stateWindowRangeQueryResult;
@Mock
private KeyValueIterator<Windowed<String>, Object> iterator;
@Mock
private StateQueryResult<WindowStoreIterator<Object>> stateWindowKeyQueryResult;
@Mock
private WindowStoreIterator<Object> windowStoreIterator;
@Mock
private HttpResponse<String> httpResponse;
@InjectMocks
private WindowStoreService windowStoreService;
@Test
void shouldValidatePath() {
assertEquals("window", windowStoreService.path());
}
@Test
void shouldNotGetStoresWhenStreamsIsNotStarted() {
when(kafkaStreamsInitializer.isNotRunning()).thenReturn(true);
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.state()).thenReturn(KafkaStreams.State.REBALANCING);
StreamsNotStartedException exception =
assertThrows(StreamsNotStartedException.class, () -> windowStoreService.getStateStores());
assertEquals(STREAMS_NOT_STARTED, exception.getMessage());
}
@Test
void shouldGetStores() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.metadataForAllStreamsClients()).thenReturn(List.of(streamsMetadata));
when(streamsMetadata.stateStoreNames()).thenReturn(Set.of("store1", "store2"));
Set<String> stores = windowStoreService.getStateStores();
assertTrue(stores.contains("store1"));
assertTrue(stores.contains("store2"));
}
@Test
void shouldGetStoresWhenNull() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.metadataForAllStreamsClients()).thenReturn(null);
Set<String> stores = windowStoreService.getStateStores();
assertTrue(stores.isEmpty());
}
@Test
void shouldGetStoresWhenEmpty() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.metadataForAllStreamsClients()).thenReturn(Collections.emptyList());
Set<String> stores = windowStoreService.getStateStores();
assertTrue(stores.isEmpty());
}
@Test
void shouldNotGetStreamsMetadataForStoreWhenStreamsIsNotStarted() {
when(kafkaStreamsInitializer.isNotRunning()).thenReturn(true);
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.state()).thenReturn(KafkaStreams.State.REBALANCING);
StreamsNotStartedException exception = assertThrows(
StreamsNotStartedException.class, () -> windowStoreService.getStreamsMetadataForStore("store"));
assertEquals(STREAMS_NOT_STARTED, exception.getMessage());
}
@Test
void shouldGetStreamsMetadataForStore() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.streamsMetadataForStore(any())).thenReturn(List.of(streamsMetadata));
Collection<StreamsMetadata> streamsMetadataResponse = windowStoreService.getStreamsMetadataForStore("store");
assertIterableEquals(List.of(streamsMetadata), streamsMetadataResponse);
}
@Test
void shouldNotGetAllWhenStreamsIsNotStarted() {
when(kafkaStreamsInitializer.isNotRunning()).thenReturn(true);
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.state()).thenReturn(KafkaStreams.State.REBALANCING);
Instant instant = Instant.now();
StreamsNotStartedException exception = assertThrows(
StreamsNotStartedException.class, () -> windowStoreService.getAll("store", instant, instant));
assertEquals(STREAMS_NOT_STARTED, exception.getMessage());
}
@Test
void shouldGetAllThrowsUnknownStoreExceptionWhenMetadataNull() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.streamsMetadataForStore(any())).thenReturn(null);
Instant instant = Instant.now();
assertThrows(UnknownStateStoreException.class, () -> windowStoreService.getAll("store", instant, instant));
}
@Test
void shouldGetAllThrowsUnknownStoreExceptionWhenMetadataEmpty() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.streamsMetadataForStore(any())).thenReturn(Collections.emptyList());
Instant instant = Instant.now();
assertThrows(UnknownStateStoreException.class, () -> windowStoreService.getAll("store", instant, instant));
}
@Test
void shouldGetAll() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.streamsMetadataForStore(any())).thenReturn(List.of(streamsMetadata));
HostInfo hostInfo = new HostInfo("localhost", 8080);
when(streamsMetadata.hostInfo()).thenReturn(hostInfo);
when(kafkaStreamsInitializer.getHostInfo()).thenReturn(hostInfo);
when(kafkaStreams.query(ArgumentMatchers.<StateQueryRequest<KeyValueIterator<Windowed<String>, Object>>>any()))
.thenReturn(stateWindowRangeQueryResult);
when(stateWindowRangeQueryResult.getPartitionResults()).thenReturn(Map.of(0, QueryResult.forResult(iterator)));
doCallRealMethod().when(iterator).forEachRemaining(any());
when(iterator.hasNext()).thenReturn(true).thenReturn(false);
when(iterator.next())
.thenReturn(
KeyValue.pair(new Windowed<>("key", new TimeWindow(0L, 150L)), new UserStub("John", "Doe")));
List<StateStoreRecord> responses = windowStoreService.getAll("store", Instant.EPOCH, Instant.now());
assertEquals("key", responses.get(0).getKey());
assertEquals("John", ((Map<?, ?>) responses.get(0).getValue()).get("firstName"));
assertEquals("Doe", ((Map<?, ?>) responses.get(0).getValue()).get("lastName"));
assertNull(responses.get(0).getTimestamp());
}
@Test
void shouldGetAllWithRemoteCall() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.streamsMetadataForStore(any())).thenReturn(List.of(streamsMetadata));
when(streamsMetadata.hostInfo()).thenReturn(new HostInfo("localhost", 8080));
when(kafkaStreamsInitializer.getHostInfo()).thenReturn(new HostInfo("anotherHost", 8080));
when(httpClient.sendAsync(any(), eq(HttpResponse.BodyHandlers.ofString())))
.thenReturn(CompletableFuture.completedFuture(httpResponse));
when(httpResponse.body()).thenReturn("""
[
{
"key": "key",
"value": {
"firstName": "John",
"lastName": "Doe"
},
"timestamp": 150
}
]""");
List<StateStoreRecord> responses = windowStoreService.getAll("store", Instant.EPOCH, Instant.now());
assertEquals("key", responses.get(0).getKey());
assertEquals("John", ((Map<?, ?>) responses.get(0).getValue()).get("firstName"));
assertEquals("Doe", ((Map<?, ?>) responses.get(0).getValue()).get("lastName"));
assertEquals(150L, responses.get(0).getTimestamp());
}
@Test
void shouldGetAllOnLocalHostThrowsUnknownStoreExceptionWhenMetadataNull() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.streamsMetadataForStore(any())).thenReturn(null);
Instant instant = Instant.now();
assertThrows(
UnknownStateStoreException.class,
() -> windowStoreService.getAllOnLocalInstance("store", instant, instant));
}
@Test
void shouldGetAllOnLocalHostThrowsUnknownStoreExceptionWhenMetadataEmpty() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.streamsMetadataForStore(any())).thenReturn(Collections.emptyList());
Instant instant = Instant.now();
assertThrows(
UnknownStateStoreException.class,
() -> windowStoreService.getAllOnLocalInstance("store", instant, instant));
}
@Test
void shouldGetAllOnLocalHost() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.streamsMetadataForStore(any())).thenReturn(List.of(streamsMetadata));
when(kafkaStreams.query(ArgumentMatchers.<StateQueryRequest<KeyValueIterator<Windowed<String>, Object>>>any()))
.thenReturn(stateWindowRangeQueryResult);
when(stateWindowRangeQueryResult.getPartitionResults()).thenReturn(Map.of(0, QueryResult.forResult(iterator)));
doCallRealMethod().when(iterator).forEachRemaining(any());
when(iterator.hasNext()).thenReturn(true).thenReturn(false);
when(iterator.next())
.thenReturn(
KeyValue.pair(new Windowed<>("key", new TimeWindow(0L, 150L)), new UserStub("John", "Doe")));
Instant instant = Instant.now();
List<StateStoreRecord> responses = windowStoreService.getAllOnLocalInstance("store", instant, instant);
assertEquals("key", responses.get(0).getKey());
assertEquals("John", ((Map<?, ?>) responses.get(0).getValue()).get("firstName"));
assertEquals("Doe", ((Map<?, ?>) responses.get(0).getValue()).get("lastName"));
assertNull(responses.get(0).getTimestamp());
}
@Test
void shouldHandleRuntimeExceptionWhenGettingAllOtherInstance() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.streamsMetadataForStore(any())).thenReturn(List.of(streamsMetadata));
when(streamsMetadata.hostInfo()).thenReturn(new HostInfo("localhost", 8080));
when(kafkaStreamsInitializer.getHostInfo()).thenReturn(new HostInfo("anotherHost", 8080));
when(httpClient.sendAsync(any(), eq(HttpResponse.BodyHandlers.ofString())))
.thenThrow(new RuntimeException("Error"));
Instant instant = Instant.now();
OtherInstanceResponseException exception = assertThrows(
OtherInstanceResponseException.class, () -> windowStoreService.getAll("store", instant, instant));
assertEquals("Fail to read other instance response", exception.getMessage());
}
@Test
void shouldNotGetByKeyWhenStreamsIsNotStarted() {
when(kafkaStreamsInitializer.isNotRunning()).thenReturn(true);
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.state()).thenReturn(KafkaStreams.State.REBALANCING);
Instant instant = Instant.now();
StreamsNotStartedException exception = assertThrows(
StreamsNotStartedException.class, () -> windowStoreService.getByKey("store", "key", instant, instant));
assertEquals(STREAMS_NOT_STARTED, exception.getMessage());
}
@Test
void shouldGetByKeyThrowsUnknownStoreExceptionWhenMetadataNull() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.queryMetadataForKey(anyString(), any(), ArgumentMatchers.<Serializer<Object>>any()))
.thenReturn(null);
Instant instant = Instant.now();
assertThrows(
UnknownStateStoreException.class, () -> windowStoreService.getByKey("store", "key", instant, instant));
}
@Test
void shouldGetByKeyCurrentInstance() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.queryMetadataForKey(anyString(), any(), ArgumentMatchers.<Serializer<Object>>any()))
.thenReturn(new KeyQueryMetadata(new HostInfo("localhost", 8080), Collections.emptySet(), 0));
when(kafkaStreamsInitializer.getHostInfo()).thenReturn(new HostInfo("localhost", 8080));
when(kafkaStreams.query(ArgumentMatchers.<StateQueryRequest<WindowStoreIterator<Object>>>any()))
.thenReturn(stateWindowKeyQueryResult);
when(stateWindowKeyQueryResult.getOnlyPartitionResult()).thenReturn(QueryResult.forResult(windowStoreIterator));
doCallRealMethod().when(windowStoreIterator).forEachRemaining(any());
when(windowStoreIterator.hasNext()).thenReturn(true).thenReturn(true).thenReturn(false);
when(windowStoreIterator.next()).thenReturn(KeyValue.pair(0L, new UserStub("John", "Doe")));
List<StateStoreRecord> responses = windowStoreService.getByKey("store", "key", Instant.EPOCH, Instant.now());
assertEquals("key", responses.get(0).getKey());
assertEquals("John", ((Map<?, ?>) responses.get(0).getValue()).get("firstName"));
assertEquals("Doe", ((Map<?, ?>) responses.get(0).getValue()).get("lastName"));
assertNull(responses.get(0).getTimestamp());
}
@Test
void shouldGetByKeyOtherInstance() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.queryMetadataForKey(anyString(), any(), ArgumentMatchers.<Serializer<Object>>any()))
.thenReturn(new KeyQueryMetadata(new HostInfo("localhost", 8085), Collections.emptySet(), 0));
when(kafkaStreamsInitializer.getHostInfo()).thenReturn(new HostInfo("localhost", 8080));
when(httpClient.sendAsync(any(), eq(HttpResponse.BodyHandlers.ofString())))
.thenReturn(CompletableFuture.completedFuture(httpResponse));
when(httpResponse.body()).thenReturn("""
[
{
"key": "key",
"value": {
"firstName": "John",
"lastName": "Doe"
},
"timestamp": 150
}
]
""");
List<StateStoreRecord> responses = windowStoreService.getByKey("store", "key", Instant.EPOCH, Instant.now());
assertEquals("key", responses.get(0).getKey());
assertEquals("John", ((Map<?, ?>) responses.get(0).getValue()).get("firstName"));
assertEquals("Doe", ((Map<?, ?>) responses.get(0).getValue()).get("lastName"));
assertEquals(150L, responses.get(0).getTimestamp());
}
@Test
void shouldGetUnknownKeyCurrentInstance() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.queryMetadataForKey(anyString(), any(), ArgumentMatchers.<Serializer<Object>>any()))
.thenReturn(new KeyQueryMetadata(new HostInfo("localhost", 8080), Collections.emptySet(), 0));
when(kafkaStreamsInitializer.getHostInfo()).thenReturn(new HostInfo("localhost", 8080));
when(kafkaStreams.query(ArgumentMatchers.<StateQueryRequest<WindowStoreIterator<Object>>>any()))
.thenReturn(stateWindowKeyQueryResult);
when(stateWindowKeyQueryResult.getOnlyPartitionResult()).thenReturn(QueryResult.forResult(windowStoreIterator));
Instant instant = Instant.now();
UnknownKeyException exception = assertThrows(
UnknownKeyException.class, () -> windowStoreService.getByKey("store", "unknownKey", instant, instant));
assertEquals("Key unknownKey not found", exception.getMessage());
}
@Test
void shouldHandleRuntimeExceptionWhenGettingByKeyOtherInstance() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.queryMetadataForKey(anyString(), any(), ArgumentMatchers.<Serializer<Object>>any()))
.thenReturn(new KeyQueryMetadata(new HostInfo("localhost", 8085), Collections.emptySet(), 0));
when(kafkaStreamsInitializer.getHostInfo()).thenReturn(new HostInfo("localhost", 8080));
when(httpClient.sendAsync(any(), eq(HttpResponse.BodyHandlers.ofString())))
.thenThrow(new RuntimeException("Error"));
Instant instant = Instant.now();
OtherInstanceResponseException exception = assertThrows(
OtherInstanceResponseException.class,
() -> windowStoreService.getByKey("store", "key", instant, instant));
assertEquals("Fail to read other instance response", exception.getMessage());
}
record UserStub(String firstName, String lastName) {}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/service/interactivequeries/window/TimestampedWindowStoreServiceTest.java | kstreamplify-core/src/test/java/com/michelin/kstreamplify/service/interactivequeries/window/TimestampedWindowStoreServiceTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.service.interactivequeries.window;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertIterableEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.doCallRealMethod;
import static org.mockito.Mockito.when;
import com.michelin.kstreamplify.exception.OtherInstanceResponseException;
import com.michelin.kstreamplify.exception.UnknownKeyException;
import com.michelin.kstreamplify.initializer.KafkaStreamsInitializer;
import com.michelin.kstreamplify.store.StateStoreRecord;
import java.net.http.HttpClient;
import java.net.http.HttpResponse;
import java.time.Instant;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import org.apache.kafka.common.serialization.Serializer;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.KeyQueryMetadata;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.StreamsMetadata;
import org.apache.kafka.streams.errors.StreamsNotStartedException;
import org.apache.kafka.streams.errors.UnknownStateStoreException;
import org.apache.kafka.streams.kstream.Windowed;
import org.apache.kafka.streams.kstream.internals.TimeWindow;
import org.apache.kafka.streams.query.QueryResult;
import org.apache.kafka.streams.query.StateQueryRequest;
import org.apache.kafka.streams.query.StateQueryResult;
import org.apache.kafka.streams.state.HostInfo;
import org.apache.kafka.streams.state.KeyValueIterator;
import org.apache.kafka.streams.state.ValueAndTimestamp;
import org.apache.kafka.streams.state.WindowStoreIterator;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.ArgumentMatchers;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
@ExtendWith(MockitoExtension.class)
class TimestampedWindowStoreServiceTest {
private static final String STREAMS_NOT_STARTED = "Cannot process request while instance is in REBALANCING state";
@Mock
private KafkaStreamsInitializer kafkaStreamsInitializer;
@Mock
private HttpClient httpClient;
@Mock
private StreamsMetadata streamsMetadata;
@Mock
private KafkaStreams kafkaStreams;
@Mock
private StateQueryResult<KeyValueIterator<Windowed<String>, ValueAndTimestamp<Object>>> stateWindowRangeQueryResult;
@Mock
private KeyValueIterator<Windowed<String>, ValueAndTimestamp<Object>> iterator;
@Mock
private StateQueryResult<WindowStoreIterator<ValueAndTimestamp<Object>>> stateWindowKeyQueryResult;
@Mock
private WindowStoreIterator<ValueAndTimestamp<Object>> windowStoreIterator;
@Mock
private HttpResponse<String> httpResponse;
@InjectMocks
private TimestampedWindowStoreService timestampedWindowStoreService;
@Test
void shouldValidatePath() {
assertEquals("window/timestamped", timestampedWindowStoreService.path());
}
@Test
void shouldNotGetStoresWhenStreamsIsNotStarted() {
when(kafkaStreamsInitializer.isNotRunning()).thenReturn(true);
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.state()).thenReturn(KafkaStreams.State.REBALANCING);
StreamsNotStartedException exception =
assertThrows(StreamsNotStartedException.class, () -> timestampedWindowStoreService.getStateStores());
assertEquals(STREAMS_NOT_STARTED, exception.getMessage());
}
@Test
void shouldGetStores() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.metadataForAllStreamsClients()).thenReturn(List.of(streamsMetadata));
when(streamsMetadata.stateStoreNames()).thenReturn(Set.of("store1", "store2"));
Set<String> stores = timestampedWindowStoreService.getStateStores();
assertTrue(stores.contains("store1"));
assertTrue(stores.contains("store2"));
}
@Test
void shouldGetStoresWhenNull() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.metadataForAllStreamsClients()).thenReturn(null);
Set<String> stores = timestampedWindowStoreService.getStateStores();
assertTrue(stores.isEmpty());
}
@Test
void shouldGetStoresWhenEmpty() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.metadataForAllStreamsClients()).thenReturn(Collections.emptyList());
Set<String> stores = timestampedWindowStoreService.getStateStores();
assertTrue(stores.isEmpty());
}
@Test
void shouldNotGetStreamsMetadataForStoreWhenStreamsIsNotStarted() {
when(kafkaStreamsInitializer.isNotRunning()).thenReturn(true);
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.state()).thenReturn(KafkaStreams.State.REBALANCING);
StreamsNotStartedException exception = assertThrows(
StreamsNotStartedException.class,
() -> timestampedWindowStoreService.getStreamsMetadataForStore("store"));
assertEquals(STREAMS_NOT_STARTED, exception.getMessage());
}
@Test
void shouldGetStreamsMetadataForStore() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.streamsMetadataForStore(any())).thenReturn(List.of(streamsMetadata));
Collection<StreamsMetadata> streamsMetadataResponse =
timestampedWindowStoreService.getStreamsMetadataForStore("store");
assertIterableEquals(List.of(streamsMetadata), streamsMetadataResponse);
}
@Test
void shouldNotGetAllWhenStreamsIsNotStarted() {
when(kafkaStreamsInitializer.isNotRunning()).thenReturn(true);
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.state()).thenReturn(KafkaStreams.State.REBALANCING);
Instant instant = Instant.now();
StreamsNotStartedException exception = assertThrows(
StreamsNotStartedException.class,
() -> timestampedWindowStoreService.getAll("store", instant, instant));
assertEquals(STREAMS_NOT_STARTED, exception.getMessage());
}
@Test
void shouldGetAllThrowsUnknownStoreExceptionWhenMetadataNull() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.streamsMetadataForStore(any())).thenReturn(null);
Instant instant = Instant.now();
assertThrows(
UnknownStateStoreException.class,
() -> timestampedWindowStoreService.getAll("store", instant, instant));
}
@Test
void shouldGetAllThrowsUnknownStoreExceptionWhenMetadataEmpty() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.streamsMetadataForStore(any())).thenReturn(Collections.emptyList());
Instant instant = Instant.now();
assertThrows(
UnknownStateStoreException.class,
() -> timestampedWindowStoreService.getAll("store", instant, instant));
}
@Test
void shouldGetAll() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.streamsMetadataForStore(any())).thenReturn(List.of(streamsMetadata));
HostInfo hostInfo = new HostInfo("localhost", 8080);
when(streamsMetadata.hostInfo()).thenReturn(hostInfo);
when(kafkaStreamsInitializer.getHostInfo()).thenReturn(hostInfo);
when(kafkaStreams.query(
ArgumentMatchers
.<StateQueryRequest<KeyValueIterator<Windowed<String>, ValueAndTimestamp<Object>>>>
any()))
.thenReturn(stateWindowRangeQueryResult);
when(stateWindowRangeQueryResult.getPartitionResults()).thenReturn(Map.of(0, QueryResult.forResult(iterator)));
doCallRealMethod().when(iterator).forEachRemaining(any());
when(iterator.hasNext()).thenReturn(true).thenReturn(false);
when(iterator.next())
.thenReturn(KeyValue.pair(
new Windowed<>("key", new TimeWindow(0L, 150L)),
ValueAndTimestamp.make(new UserStub("John", "Doe"), 150L)));
List<StateStoreRecord> responses = timestampedWindowStoreService.getAll("store", Instant.EPOCH, Instant.now());
assertEquals("key", responses.get(0).getKey());
assertEquals("John", ((Map<?, ?>) responses.get(0).getValue()).get("firstName"));
assertEquals("Doe", ((Map<?, ?>) responses.get(0).getValue()).get("lastName"));
assertEquals(150L, responses.get(0).getTimestamp());
}
@Test
void shouldGetAllWithRemoteCall() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.streamsMetadataForStore(any())).thenReturn(List.of(streamsMetadata));
when(streamsMetadata.hostInfo()).thenReturn(new HostInfo("localhost", 8080));
when(kafkaStreamsInitializer.getHostInfo()).thenReturn(new HostInfo("anotherHost", 8080));
when(httpClient.sendAsync(any(), eq(HttpResponse.BodyHandlers.ofString())))
.thenReturn(CompletableFuture.completedFuture(httpResponse));
when(httpResponse.body()).thenReturn("""
[
{
"key": "key",
"value": {
"firstName": "John",
"lastName": "Doe"
},
"timestamp": 150
}
]""");
List<StateStoreRecord> responses = timestampedWindowStoreService.getAll("store", Instant.EPOCH, Instant.now());
assertEquals("key", responses.get(0).getKey());
assertEquals("John", ((Map<?, ?>) responses.get(0).getValue()).get("firstName"));
assertEquals("Doe", ((Map<?, ?>) responses.get(0).getValue()).get("lastName"));
assertEquals(150L, responses.get(0).getTimestamp());
}
@Test
void shouldGetAllOnLocalHostThrowsUnknownStoreExceptionWhenMetadataNull() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.streamsMetadataForStore(any())).thenReturn(null);
Instant instant = Instant.now();
assertThrows(
UnknownStateStoreException.class,
() -> timestampedWindowStoreService.getAllOnLocalInstance("store", instant, instant));
}
@Test
void shouldGetAllOnLocalHostThrowsUnknownStoreExceptionWhenMetadataEmpty() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.streamsMetadataForStore(any())).thenReturn(Collections.emptyList());
Instant instant = Instant.now();
assertThrows(
UnknownStateStoreException.class,
() -> timestampedWindowStoreService.getAllOnLocalInstance("store", instant, instant));
}
@Test
void shouldGetAllOnLocalHost() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.streamsMetadataForStore(any())).thenReturn(List.of(streamsMetadata));
when(kafkaStreams.query(
ArgumentMatchers
.<StateQueryRequest<KeyValueIterator<Windowed<String>, ValueAndTimestamp<Object>>>>
any()))
.thenReturn(stateWindowRangeQueryResult);
when(stateWindowRangeQueryResult.getPartitionResults()).thenReturn(Map.of(0, QueryResult.forResult(iterator)));
doCallRealMethod().when(iterator).forEachRemaining(any());
when(iterator.hasNext()).thenReturn(true).thenReturn(false);
when(iterator.next())
.thenReturn(KeyValue.pair(
new Windowed<>("key", new TimeWindow(0L, 150L)),
ValueAndTimestamp.make(new UserStub("John", "Doe"), 150L)));
Instant instant = Instant.now();
List<StateStoreRecord> responses =
timestampedWindowStoreService.getAllOnLocalInstance("store", instant, instant);
assertEquals("key", responses.get(0).getKey());
assertEquals("John", ((Map<?, ?>) responses.get(0).getValue()).get("firstName"));
assertEquals("Doe", ((Map<?, ?>) responses.get(0).getValue()).get("lastName"));
assertEquals(150L, responses.get(0).getTimestamp());
}
@Test
void shouldHandleRuntimeExceptionWhenGettingAllOtherInstance() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.streamsMetadataForStore(any())).thenReturn(List.of(streamsMetadata));
when(streamsMetadata.hostInfo()).thenReturn(new HostInfo("localhost", 8080));
when(kafkaStreamsInitializer.getHostInfo()).thenReturn(new HostInfo("anotherHost", 8080));
when(httpClient.sendAsync(any(), eq(HttpResponse.BodyHandlers.ofString())))
.thenThrow(new RuntimeException("Error"));
Instant instant = Instant.now();
OtherInstanceResponseException exception = assertThrows(
OtherInstanceResponseException.class,
() -> timestampedWindowStoreService.getAll("store", instant, instant));
assertEquals("Fail to read other instance response", exception.getMessage());
}
@Test
void shouldNotGetByKeyWhenStreamsIsNotStarted() {
when(kafkaStreamsInitializer.isNotRunning()).thenReturn(true);
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.state()).thenReturn(KafkaStreams.State.REBALANCING);
Instant instant = Instant.now();
StreamsNotStartedException exception = assertThrows(
StreamsNotStartedException.class,
() -> timestampedWindowStoreService.getByKey("store", "key", instant, instant));
assertEquals(STREAMS_NOT_STARTED, exception.getMessage());
}
@Test
void shouldGetByKeyThrowsUnknownStoreExceptionWhenMetadataNull() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.queryMetadataForKey(anyString(), any(), ArgumentMatchers.<Serializer<Object>>any()))
.thenReturn(null);
Instant instant = Instant.now();
assertThrows(
UnknownStateStoreException.class,
() -> timestampedWindowStoreService.getByKey("store", "key", instant, instant));
}
@Test
void shouldGetByKeyCurrentInstance() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.queryMetadataForKey(anyString(), any(), ArgumentMatchers.<Serializer<Object>>any()))
.thenReturn(new KeyQueryMetadata(new HostInfo("localhost", 8080), Collections.emptySet(), 0));
when(kafkaStreamsInitializer.getHostInfo()).thenReturn(new HostInfo("localhost", 8080));
when(kafkaStreams.query(
ArgumentMatchers.<StateQueryRequest<WindowStoreIterator<ValueAndTimestamp<Object>>>>any()))
.thenReturn(stateWindowKeyQueryResult);
when(stateWindowKeyQueryResult.getOnlyPartitionResult()).thenReturn(QueryResult.forResult(windowStoreIterator));
doCallRealMethod().when(windowStoreIterator).forEachRemaining(any());
when(windowStoreIterator.hasNext()).thenReturn(true).thenReturn(true).thenReturn(false);
when(windowStoreIterator.next())
.thenReturn(KeyValue.pair(0L, ValueAndTimestamp.make(new UserStub("John", "Doe"), 150L)));
List<StateStoreRecord> responses =
timestampedWindowStoreService.getByKey("store", "key", Instant.EPOCH, Instant.now());
assertEquals("key", responses.get(0).getKey());
assertEquals("John", ((Map<?, ?>) responses.get(0).getValue()).get("firstName"));
assertEquals("Doe", ((Map<?, ?>) responses.get(0).getValue()).get("lastName"));
assertEquals(150L, responses.get(0).getTimestamp());
}
@Test
void shouldGetByKeyOtherInstance() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.queryMetadataForKey(anyString(), any(), ArgumentMatchers.<Serializer<Object>>any()))
.thenReturn(new KeyQueryMetadata(new HostInfo("localhost", 8085), Collections.emptySet(), 0));
when(kafkaStreamsInitializer.getHostInfo()).thenReturn(new HostInfo("localhost", 8080));
when(httpClient.sendAsync(any(), eq(HttpResponse.BodyHandlers.ofString())))
.thenReturn(CompletableFuture.completedFuture(httpResponse));
when(httpResponse.body()).thenReturn("""
[
{
"key": "key",
"value": {
"firstName": "John",
"lastName": "Doe"
},
"timestamp": 150
}
]
""");
List<StateStoreRecord> responses =
timestampedWindowStoreService.getByKey("store", "key", Instant.EPOCH, Instant.now());
assertEquals("key", responses.get(0).getKey());
assertEquals("John", ((Map<?, ?>) responses.get(0).getValue()).get("firstName"));
assertEquals("Doe", ((Map<?, ?>) responses.get(0).getValue()).get("lastName"));
assertEquals(150L, responses.get(0).getTimestamp());
}
@Test
void shouldGetUnknownKeyCurrentInstance() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.queryMetadataForKey(anyString(), any(), ArgumentMatchers.<Serializer<Object>>any()))
.thenReturn(new KeyQueryMetadata(new HostInfo("localhost", 8080), Collections.emptySet(), 0));
when(kafkaStreamsInitializer.getHostInfo()).thenReturn(new HostInfo("localhost", 8080));
when(kafkaStreams.query(
ArgumentMatchers.<StateQueryRequest<WindowStoreIterator<ValueAndTimestamp<Object>>>>any()))
.thenReturn(stateWindowKeyQueryResult);
when(stateWindowKeyQueryResult.getOnlyPartitionResult()).thenReturn(QueryResult.forResult(windowStoreIterator));
Instant instant = Instant.now();
UnknownKeyException exception = assertThrows(
UnknownKeyException.class,
() -> timestampedWindowStoreService.getByKey("store", "unknownKey", instant, instant));
assertEquals("Key unknownKey not found", exception.getMessage());
}
@Test
void shouldHandleRuntimeExceptionWhenGettingByKeyOtherInstance() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.queryMetadataForKey(anyString(), any(), ArgumentMatchers.<Serializer<Object>>any()))
.thenReturn(new KeyQueryMetadata(new HostInfo("localhost", 8085), Collections.emptySet(), 0));
when(kafkaStreamsInitializer.getHostInfo()).thenReturn(new HostInfo("localhost", 8080));
when(httpClient.sendAsync(any(), eq(HttpResponse.BodyHandlers.ofString())))
.thenThrow(new RuntimeException("Error"));
Instant instant = Instant.now();
OtherInstanceResponseException exception = assertThrows(
OtherInstanceResponseException.class,
() -> timestampedWindowStoreService.getByKey("store", "key", instant, instant));
assertEquals("Fail to read other instance response", exception.getMessage());
}
record UserStub(String firstName, String lastName) {}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/service/interactivequeries/keyvalue/TimestampedKeyValueServiceTest.java | kstreamplify-core/src/test/java/com/michelin/kstreamplify/service/interactivequeries/keyvalue/TimestampedKeyValueServiceTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.service.interactivequeries.keyvalue;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertIterableEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.doCallRealMethod;
import static org.mockito.Mockito.when;
import com.michelin.kstreamplify.exception.OtherInstanceResponseException;
import com.michelin.kstreamplify.exception.UnknownKeyException;
import com.michelin.kstreamplify.initializer.KafkaStreamsInitializer;
import com.michelin.kstreamplify.store.StateStoreRecord;
import java.net.http.HttpClient;
import java.net.http.HttpResponse;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import org.apache.kafka.common.serialization.Serializer;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.KeyQueryMetadata;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.StreamsMetadata;
import org.apache.kafka.streams.errors.StreamsNotStartedException;
import org.apache.kafka.streams.errors.UnknownStateStoreException;
import org.apache.kafka.streams.query.QueryResult;
import org.apache.kafka.streams.query.StateQueryRequest;
import org.apache.kafka.streams.query.StateQueryResult;
import org.apache.kafka.streams.state.HostInfo;
import org.apache.kafka.streams.state.KeyValueIterator;
import org.apache.kafka.streams.state.ValueAndTimestamp;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.ArgumentMatchers;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
@ExtendWith(MockitoExtension.class)
class TimestampedKeyValueServiceTest {
private static final String STREAMS_NOT_STARTED = "Cannot process request while instance is in REBALANCING state";
@Mock
private KafkaStreamsInitializer kafkaStreamsInitializer;
@Mock
private HttpClient httpClient;
@Mock
private StreamsMetadata streamsMetadata;
@Mock
private KafkaStreams kafkaStreams;
@Mock
private StateQueryResult<KeyValueIterator<Object, ValueAndTimestamp<Object>>> stateRangeQueryResult;
@Mock
private KeyValueIterator<Object, ValueAndTimestamp<Object>> iterator;
@Mock
private StateQueryResult<ValueAndTimestamp<Object>> stateKeyQueryResult;
@Mock
private HttpResponse<String> httpResponse;
@InjectMocks
private TimestampedKeyValueStoreService timestampedKeyValueStoreService;
@Test
void shouldValidatePath() {
assertEquals("key-value/timestamped", timestampedKeyValueStoreService.path());
}
@Test
void shouldNotGetStoresWhenStreamsIsNotStarted() {
when(kafkaStreamsInitializer.isNotRunning()).thenReturn(true);
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.state()).thenReturn(KafkaStreams.State.REBALANCING);
StreamsNotStartedException exception =
assertThrows(StreamsNotStartedException.class, () -> timestampedKeyValueStoreService.getStateStores());
assertEquals(STREAMS_NOT_STARTED, exception.getMessage());
}
@Test
void shouldGetStores() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.metadataForAllStreamsClients()).thenReturn(List.of(streamsMetadata));
when(streamsMetadata.stateStoreNames()).thenReturn(Set.of("store1", "store2"));
Set<String> stores = timestampedKeyValueStoreService.getStateStores();
assertTrue(stores.contains("store1"));
assertTrue(stores.contains("store2"));
}
@Test
void shouldGetStoresWhenNull() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.metadataForAllStreamsClients()).thenReturn(null);
Set<String> stores = timestampedKeyValueStoreService.getStateStores();
assertTrue(stores.isEmpty());
}
@Test
void shouldGetStoresWhenEmpty() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.metadataForAllStreamsClients()).thenReturn(Collections.emptyList());
Set<String> stores = timestampedKeyValueStoreService.getStateStores();
assertTrue(stores.isEmpty());
}
@Test
void shouldNotGetStreamsMetadataForStoreWhenStreamsIsNotStarted() {
when(kafkaStreamsInitializer.isNotRunning()).thenReturn(true);
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.state()).thenReturn(KafkaStreams.State.REBALANCING);
StreamsNotStartedException exception = assertThrows(
StreamsNotStartedException.class,
() -> timestampedKeyValueStoreService.getStreamsMetadataForStore("store"));
assertEquals(STREAMS_NOT_STARTED, exception.getMessage());
}
@Test
void shouldGetStreamsMetadataForStore() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.streamsMetadataForStore(any())).thenReturn(List.of(streamsMetadata));
Collection<StreamsMetadata> streamsMetadataResponse =
timestampedKeyValueStoreService.getStreamsMetadataForStore("store");
assertIterableEquals(List.of(streamsMetadata), streamsMetadataResponse);
}
@Test
void shouldNotGetAllWhenStreamsIsNotStarted() {
when(kafkaStreamsInitializer.isNotRunning()).thenReturn(true);
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.state()).thenReturn(KafkaStreams.State.REBALANCING);
StreamsNotStartedException exception =
assertThrows(StreamsNotStartedException.class, () -> timestampedKeyValueStoreService.getAll("store"));
assertEquals(STREAMS_NOT_STARTED, exception.getMessage());
}
@Test
void shouldGetAllThrowsUnknownStoreExceptionWhenMetadataNull() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.streamsMetadataForStore(any())).thenReturn(null);
assertThrows(UnknownStateStoreException.class, () -> timestampedKeyValueStoreService.getAll("store"));
}
@Test
void shouldGetAllThrowsUnknownStoreExceptionWhenMetadataEmpty() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.streamsMetadataForStore(any())).thenReturn(Collections.emptyList());
assertThrows(UnknownStateStoreException.class, () -> timestampedKeyValueStoreService.getAll("store"));
}
@Test
void shouldGetAll() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.streamsMetadataForStore(any())).thenReturn(List.of(streamsMetadata));
HostInfo hostInfo = new HostInfo("localhost", 8080);
when(streamsMetadata.hostInfo()).thenReturn(hostInfo);
when(kafkaStreamsInitializer.getHostInfo()).thenReturn(hostInfo);
when(kafkaStreams.query(
ArgumentMatchers.<StateQueryRequest<KeyValueIterator<Object, ValueAndTimestamp<Object>>>>any()))
.thenReturn(stateRangeQueryResult);
when(stateRangeQueryResult.getPartitionResults()).thenReturn(Map.of(0, QueryResult.forResult(iterator)));
doCallRealMethod().when(iterator).forEachRemaining(any());
when(iterator.hasNext()).thenReturn(true).thenReturn(false);
when(iterator.next())
.thenReturn(KeyValue.pair("key", ValueAndTimestamp.make(new UserStub("John", "Doe"), 150L)));
List<StateStoreRecord> responses = timestampedKeyValueStoreService.getAll("store");
assertEquals("key", responses.get(0).getKey());
assertEquals("John", ((Map<?, ?>) responses.get(0).getValue()).get("firstName"));
assertEquals("Doe", ((Map<?, ?>) responses.get(0).getValue()).get("lastName"));
assertEquals(150L, responses.get(0).getTimestamp());
}
@Test
void shouldGetAllWithRemoteCall() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.streamsMetadataForStore(any())).thenReturn(List.of(streamsMetadata));
when(streamsMetadata.hostInfo()).thenReturn(new HostInfo("localhost", 8080));
when(kafkaStreamsInitializer.getHostInfo()).thenReturn(new HostInfo("anotherHost", 8080));
when(httpClient.sendAsync(any(), eq(HttpResponse.BodyHandlers.ofString())))
.thenReturn(CompletableFuture.completedFuture(httpResponse));
when(httpResponse.body()).thenReturn("""
[
{
"key": "key",
"value": {
"firstName": "John",
"lastName": "Doe"
},
"timestamp": 150
}
]""");
List<StateStoreRecord> responses = timestampedKeyValueStoreService.getAll("store");
assertEquals("key", responses.get(0).getKey());
assertEquals("John", ((Map<?, ?>) responses.get(0).getValue()).get("firstName"));
assertEquals("Doe", ((Map<?, ?>) responses.get(0).getValue()).get("lastName"));
assertEquals(150L, responses.get(0).getTimestamp());
}
@Test
void shouldGetAllOnLocalHostThrowsUnknownStoreExceptionWhenMetadataNull() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.streamsMetadataForStore(any())).thenReturn(null);
assertThrows(
UnknownStateStoreException.class, () -> timestampedKeyValueStoreService.getAllOnLocalInstance("store"));
}
@Test
void shouldGetAllOnLocalHostThrowsUnknownStoreExceptionWhenMetadataEmpty() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.streamsMetadataForStore(any())).thenReturn(Collections.emptyList());
assertThrows(
UnknownStateStoreException.class, () -> timestampedKeyValueStoreService.getAllOnLocalInstance("store"));
}
@Test
void shouldGetAllOnLocalHost() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.streamsMetadataForStore(any())).thenReturn(List.of(streamsMetadata));
when(kafkaStreams.query(
ArgumentMatchers.<StateQueryRequest<KeyValueIterator<Object, ValueAndTimestamp<Object>>>>any()))
.thenReturn(stateRangeQueryResult);
when(stateRangeQueryResult.getPartitionResults()).thenReturn(Map.of(0, QueryResult.forResult(iterator)));
doCallRealMethod().when(iterator).forEachRemaining(any());
when(iterator.hasNext()).thenReturn(true).thenReturn(false);
when(iterator.next())
.thenReturn(KeyValue.pair("key", ValueAndTimestamp.make(new UserStub("John", "Doe"), 150L)));
List<StateStoreRecord> responses = timestampedKeyValueStoreService.getAllOnLocalInstance("store");
assertEquals("key", responses.get(0).getKey());
assertEquals("John", ((Map<?, ?>) responses.get(0).getValue()).get("firstName"));
assertEquals("Doe", ((Map<?, ?>) responses.get(0).getValue()).get("lastName"));
assertEquals(150L, responses.get(0).getTimestamp());
}
@Test
void shouldHandleRuntimeExceptionWhenGettingAllOtherInstance() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.streamsMetadataForStore(any())).thenReturn(List.of(streamsMetadata));
when(streamsMetadata.hostInfo()).thenReturn(new HostInfo("localhost", 8080));
when(kafkaStreamsInitializer.getHostInfo()).thenReturn(new HostInfo("anotherHost", 8080));
when(httpClient.sendAsync(any(), eq(HttpResponse.BodyHandlers.ofString())))
.thenThrow(new RuntimeException("Error"));
OtherInstanceResponseException exception = assertThrows(
OtherInstanceResponseException.class, () -> timestampedKeyValueStoreService.getAll("store"));
assertEquals("Fail to read other instance response", exception.getMessage());
}
@Test
void shouldNotGetByKeyWhenStreamsIsNotStarted() {
when(kafkaStreamsInitializer.isNotRunning()).thenReturn(true);
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.state()).thenReturn(KafkaStreams.State.REBALANCING);
StreamsNotStartedException exception = assertThrows(
StreamsNotStartedException.class, () -> timestampedKeyValueStoreService.getByKey("store", "key"));
assertEquals(STREAMS_NOT_STARTED, exception.getMessage());
}
@Test
void shouldGetByKeyThrowsUnknownStoreExceptionWhenMetadataNull() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.queryMetadataForKey(anyString(), any(), ArgumentMatchers.<Serializer<Object>>any()))
.thenReturn(null);
assertThrows(UnknownStateStoreException.class, () -> timestampedKeyValueStoreService.getByKey("store", "key"));
}
@Test
void shouldGetByKeyCurrentInstance() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.queryMetadataForKey(anyString(), any(), ArgumentMatchers.<Serializer<Object>>any()))
.thenReturn(new KeyQueryMetadata(new HostInfo("localhost", 8080), Collections.emptySet(), 0));
when(kafkaStreamsInitializer.getHostInfo()).thenReturn(new HostInfo("localhost", 8080));
when(kafkaStreams.query(ArgumentMatchers.<StateQueryRequest<ValueAndTimestamp<Object>>>any()))
.thenReturn(stateKeyQueryResult);
when(stateKeyQueryResult.getOnlyPartitionResult())
.thenReturn(QueryResult.forResult(ValueAndTimestamp.make(new UserStub("John", "Doe"), 150L)));
StateStoreRecord response = timestampedKeyValueStoreService.getByKey("store", "key");
assertEquals("key", response.getKey());
assertEquals("John", ((Map<?, ?>) response.getValue()).get("firstName"));
assertEquals("Doe", ((Map<?, ?>) response.getValue()).get("lastName"));
assertEquals(150L, response.getTimestamp());
}
@Test
void shouldGetByKeyOtherInstance() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.queryMetadataForKey(anyString(), any(), ArgumentMatchers.<Serializer<Object>>any()))
.thenReturn(new KeyQueryMetadata(new HostInfo("localhost", 8085), Collections.emptySet(), 0));
when(kafkaStreamsInitializer.getHostInfo()).thenReturn(new HostInfo("localhost", 8080));
when(httpClient.sendAsync(any(), eq(HttpResponse.BodyHandlers.ofString())))
.thenReturn(CompletableFuture.completedFuture(httpResponse));
when(httpResponse.body()).thenReturn("""
{
"key": "key",
"value": {
"firstName": "John",
"lastName": "Doe"
},
"timestamp": 150
}
""");
StateStoreRecord response = timestampedKeyValueStoreService.getByKey("store", "key");
assertEquals("key", response.getKey());
assertEquals("John", ((Map<?, ?>) response.getValue()).get("firstName"));
assertEquals("Doe", ((Map<?, ?>) response.getValue()).get("lastName"));
assertEquals(150L, response.getTimestamp());
}
@Test
void shouldGetUnknownKeyCurrentInstance() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.queryMetadataForKey(anyString(), any(), ArgumentMatchers.<Serializer<Object>>any()))
.thenReturn(new KeyQueryMetadata(new HostInfo("localhost", 8080), Collections.emptySet(), 0));
when(kafkaStreamsInitializer.getHostInfo()).thenReturn(new HostInfo("localhost", 8080));
when(kafkaStreams.query(ArgumentMatchers.<StateQueryRequest<ValueAndTimestamp<Object>>>any()))
.thenReturn(stateKeyQueryResult);
when(stateKeyQueryResult.getOnlyPartitionResult()).thenReturn(null);
UnknownKeyException exception = assertThrows(
UnknownKeyException.class, () -> timestampedKeyValueStoreService.getByKey("store", "unknownKey"));
assertEquals("Key unknownKey not found", exception.getMessage());
}
@Test
void shouldHandleRuntimeExceptionWhenGettingByKeyOtherInstance() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.queryMetadataForKey(anyString(), any(), ArgumentMatchers.<Serializer<Object>>any()))
.thenReturn(new KeyQueryMetadata(new HostInfo("localhost", 8085), Collections.emptySet(), 0));
when(kafkaStreamsInitializer.getHostInfo()).thenReturn(new HostInfo("localhost", 8080));
when(httpClient.sendAsync(any(), eq(HttpResponse.BodyHandlers.ofString())))
.thenThrow(new RuntimeException("Error"));
OtherInstanceResponseException exception = assertThrows(
OtherInstanceResponseException.class, () -> timestampedKeyValueStoreService.getByKey("store", "key"));
assertEquals("Fail to read other instance response", exception.getMessage());
}
record UserStub(String firstName, String lastName) {}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/service/interactivequeries/keyvalue/KeyValueServiceTest.java | kstreamplify-core/src/test/java/com/michelin/kstreamplify/service/interactivequeries/keyvalue/KeyValueServiceTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.service.interactivequeries.keyvalue;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertIterableEquals;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.doCallRealMethod;
import static org.mockito.Mockito.when;
import com.michelin.kstreamplify.exception.OtherInstanceResponseException;
import com.michelin.kstreamplify.exception.UnknownKeyException;
import com.michelin.kstreamplify.initializer.KafkaStreamsInitializer;
import com.michelin.kstreamplify.store.StateStoreRecord;
import java.net.http.HttpClient;
import java.net.http.HttpResponse;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import org.apache.kafka.common.serialization.Serializer;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.KeyQueryMetadata;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.StreamsMetadata;
import org.apache.kafka.streams.errors.StreamsNotStartedException;
import org.apache.kafka.streams.errors.UnknownStateStoreException;
import org.apache.kafka.streams.query.QueryResult;
import org.apache.kafka.streams.query.StateQueryRequest;
import org.apache.kafka.streams.query.StateQueryResult;
import org.apache.kafka.streams.state.HostInfo;
import org.apache.kafka.streams.state.KeyValueIterator;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.ArgumentMatchers;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
@ExtendWith(MockitoExtension.class)
class KeyValueServiceTest {
private static final String STREAMS_NOT_STARTED = "Cannot process request while instance is in REBALANCING state";
@Mock
private KafkaStreamsInitializer kafkaStreamsInitializer;
@Mock
private HttpClient httpClient;
@Mock
private StreamsMetadata streamsMetadata;
@Mock
private KafkaStreams kafkaStreams;
@Mock
private StateQueryResult<KeyValueIterator<Object, Object>> stateRangeQueryResult;
@Mock
private KeyValueIterator<Object, Object> iterator;
@Mock
private StateQueryResult<Object> stateKeyQueryResult;
@Mock
private HttpResponse<String> httpResponse;
@InjectMocks
private KeyValueStoreService keyValueService;
@Test
void shouldValidatePath() {
assertEquals("key-value", keyValueService.path());
}
@Test
void shouldNotGetStoresWhenStreamsIsNotStarted() {
when(kafkaStreamsInitializer.isNotRunning()).thenReturn(true);
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.state()).thenReturn(KafkaStreams.State.REBALANCING);
StreamsNotStartedException exception =
assertThrows(StreamsNotStartedException.class, () -> keyValueService.getStateStores());
assertEquals(STREAMS_NOT_STARTED, exception.getMessage());
}
@Test
void shouldGetStores() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.metadataForAllStreamsClients()).thenReturn(List.of(streamsMetadata));
when(streamsMetadata.stateStoreNames()).thenReturn(Set.of("store1", "store2"));
Set<String> stores = keyValueService.getStateStores();
assertTrue(stores.contains("store1"));
assertTrue(stores.contains("store2"));
}
@Test
void shouldGetStoresWhenNull() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.metadataForAllStreamsClients()).thenReturn(null);
Set<String> stores = keyValueService.getStateStores();
assertTrue(stores.isEmpty());
}
@Test
void shouldGetStoresWhenEmpty() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.metadataForAllStreamsClients()).thenReturn(Collections.emptyList());
Set<String> stores = keyValueService.getStateStores();
assertTrue(stores.isEmpty());
}
@Test
void shouldNotGetStreamsMetadataForStoreWhenStreamsIsNotStarted() {
when(kafkaStreamsInitializer.isNotRunning()).thenReturn(true);
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.state()).thenReturn(KafkaStreams.State.REBALANCING);
StreamsNotStartedException exception = assertThrows(
StreamsNotStartedException.class, () -> keyValueService.getStreamsMetadataForStore("store"));
assertEquals(STREAMS_NOT_STARTED, exception.getMessage());
}
@Test
void shouldGetStreamsMetadataForStore() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.streamsMetadataForStore(any())).thenReturn(List.of(streamsMetadata));
Collection<StreamsMetadata> streamsMetadataResponse = keyValueService.getStreamsMetadataForStore("store");
assertIterableEquals(List.of(streamsMetadata), streamsMetadataResponse);
}
@Test
void shouldNotGetAllWhenStreamsIsNotStarted() {
when(kafkaStreamsInitializer.isNotRunning()).thenReturn(true);
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.state()).thenReturn(KafkaStreams.State.REBALANCING);
StreamsNotStartedException exception =
assertThrows(StreamsNotStartedException.class, () -> keyValueService.getAll("store"));
assertEquals(STREAMS_NOT_STARTED, exception.getMessage());
}
@Test
void shouldGetAllThrowsUnknownStoreExceptionWhenMetadataNull() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.streamsMetadataForStore(any())).thenReturn(null);
assertThrows(UnknownStateStoreException.class, () -> keyValueService.getAll("store"));
}
@Test
void shouldGetAllThrowsUnknownStoreExceptionWhenMetadataEmpty() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.streamsMetadataForStore(any())).thenReturn(Collections.emptyList());
assertThrows(UnknownStateStoreException.class, () -> keyValueService.getAll("store"));
}
@Test
void shouldGetAll() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.streamsMetadataForStore(any())).thenReturn(List.of(streamsMetadata));
HostInfo hostInfo = new HostInfo("localhost", 8080);
when(streamsMetadata.hostInfo()).thenReturn(hostInfo);
when(kafkaStreamsInitializer.getHostInfo()).thenReturn(hostInfo);
when(kafkaStreams.query(ArgumentMatchers.<StateQueryRequest<KeyValueIterator<Object, Object>>>any()))
.thenReturn(stateRangeQueryResult);
when(stateRangeQueryResult.getPartitionResults()).thenReturn(Map.of(0, QueryResult.forResult(iterator)));
doCallRealMethod().when(iterator).forEachRemaining(any());
when(iterator.hasNext()).thenReturn(true).thenReturn(false);
when(iterator.next()).thenReturn(KeyValue.pair("key", new UserStub("John", "Doe")));
List<StateStoreRecord> responses = keyValueService.getAll("store");
assertEquals("key", responses.get(0).getKey());
assertEquals("John", ((Map<?, ?>) responses.get(0).getValue()).get("firstName"));
assertEquals("Doe", ((Map<?, ?>) responses.get(0).getValue()).get("lastName"));
assertNull(responses.get(0).getTimestamp());
}
@Test
void shouldGetAllWithRemoteCall() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.streamsMetadataForStore(any())).thenReturn(List.of(streamsMetadata));
when(streamsMetadata.hostInfo()).thenReturn(new HostInfo("localhost", 8080));
when(kafkaStreamsInitializer.getHostInfo()).thenReturn(new HostInfo("anotherHost", 8080));
when(httpClient.sendAsync(any(), eq(HttpResponse.BodyHandlers.ofString())))
.thenReturn(CompletableFuture.completedFuture(httpResponse));
when(httpResponse.body()).thenReturn("""
[
{
"key": "key",
"value": {
"firstName": "John",
"lastName": "Doe"
},
"timestamp": 150
}
]""");
List<StateStoreRecord> responses = keyValueService.getAll("store");
assertEquals("key", responses.get(0).getKey());
assertEquals("John", ((Map<?, ?>) responses.get(0).getValue()).get("firstName"));
assertEquals("Doe", ((Map<?, ?>) responses.get(0).getValue()).get("lastName"));
assertEquals(150L, responses.get(0).getTimestamp());
}
@Test
void shouldGetAllOnLocalHostThrowsUnknownStoreExceptionWhenMetadataNull() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.streamsMetadataForStore(any())).thenReturn(null);
assertThrows(UnknownStateStoreException.class, () -> keyValueService.getAllOnLocalInstance("store"));
}
@Test
void shouldGetAllOnLocalHostThrowsUnknownStoreExceptionWhenMetadataEmpty() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.streamsMetadataForStore(any())).thenReturn(Collections.emptyList());
assertThrows(UnknownStateStoreException.class, () -> keyValueService.getAllOnLocalInstance("store"));
}
@Test
void shouldGetAllOnLocalHost() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.streamsMetadataForStore(any())).thenReturn(List.of(streamsMetadata));
when(kafkaStreams.query(ArgumentMatchers.<StateQueryRequest<KeyValueIterator<Object, Object>>>any()))
.thenReturn(stateRangeQueryResult);
when(stateRangeQueryResult.getPartitionResults()).thenReturn(Map.of(0, QueryResult.forResult(iterator)));
doCallRealMethod().when(iterator).forEachRemaining(any());
when(iterator.hasNext()).thenReturn(true).thenReturn(false);
when(iterator.next()).thenReturn(KeyValue.pair("key", new UserStub("John", "Doe")));
List<StateStoreRecord> responses = keyValueService.getAllOnLocalInstance("store");
assertEquals("key", responses.get(0).getKey());
assertEquals("John", ((Map<?, ?>) responses.get(0).getValue()).get("firstName"));
assertEquals("Doe", ((Map<?, ?>) responses.get(0).getValue()).get("lastName"));
assertNull(responses.get(0).getTimestamp());
}
@Test
void shouldHandleRuntimeExceptionWhenGettingAllOtherInstance() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.streamsMetadataForStore(any())).thenReturn(List.of(streamsMetadata));
when(streamsMetadata.hostInfo()).thenReturn(new HostInfo("localhost", 8080));
when(kafkaStreamsInitializer.getHostInfo()).thenReturn(new HostInfo("anotherHost", 8080));
when(httpClient.sendAsync(any(), eq(HttpResponse.BodyHandlers.ofString())))
.thenThrow(new RuntimeException("Error"));
OtherInstanceResponseException exception =
assertThrows(OtherInstanceResponseException.class, () -> keyValueService.getAll("store"));
assertEquals("Fail to read other instance response", exception.getMessage());
}
@Test
void shouldNotGetByKeyWhenStreamsIsNotStarted() {
when(kafkaStreamsInitializer.isNotRunning()).thenReturn(true);
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.state()).thenReturn(KafkaStreams.State.REBALANCING);
StreamsNotStartedException exception =
assertThrows(StreamsNotStartedException.class, () -> keyValueService.getByKey("store", "key"));
assertEquals(STREAMS_NOT_STARTED, exception.getMessage());
}
@Test
void shouldGetByKeyThrowsUnknownStoreExceptionWhenMetadataNull() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.queryMetadataForKey(anyString(), any(), ArgumentMatchers.<Serializer<Object>>any()))
.thenReturn(null);
assertThrows(UnknownStateStoreException.class, () -> keyValueService.getByKey("store", "key"));
}
@Test
void shouldGetByKeyCurrentInstance() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.queryMetadataForKey(anyString(), any(), ArgumentMatchers.<Serializer<Object>>any()))
.thenReturn(new KeyQueryMetadata(new HostInfo("localhost", 8080), Collections.emptySet(), 0));
when(kafkaStreamsInitializer.getHostInfo()).thenReturn(new HostInfo("localhost", 8080));
when(kafkaStreams.query(any())).thenReturn(stateKeyQueryResult);
when(stateKeyQueryResult.getOnlyPartitionResult())
.thenReturn(QueryResult.forResult(new UserStub("John", "Doe")));
StateStoreRecord response = keyValueService.getByKey("store", "key");
assertEquals("key", response.getKey());
assertEquals("John", ((Map<?, ?>) response.getValue()).get("firstName"));
assertEquals("Doe", ((Map<?, ?>) response.getValue()).get("lastName"));
assertNull(response.getTimestamp());
}
@Test
void shouldGetByKeyOtherInstance() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.queryMetadataForKey(anyString(), any(), ArgumentMatchers.<Serializer<Object>>any()))
.thenReturn(new KeyQueryMetadata(new HostInfo("localhost", 8085), Collections.emptySet(), 0));
when(kafkaStreamsInitializer.getHostInfo()).thenReturn(new HostInfo("localhost", 8080));
when(httpClient.sendAsync(any(), eq(HttpResponse.BodyHandlers.ofString())))
.thenReturn(CompletableFuture.completedFuture(httpResponse));
when(httpResponse.body()).thenReturn("""
{
"key": "key",
"value": {
"firstName": "John",
"lastName": "Doe"
},
"timestamp": 150
}
""");
StateStoreRecord response = keyValueService.getByKey("store", "key");
assertEquals("key", response.getKey());
assertEquals("John", ((Map<?, ?>) response.getValue()).get("firstName"));
assertEquals("Doe", ((Map<?, ?>) response.getValue()).get("lastName"));
assertEquals(150L, response.getTimestamp());
}
@Test
void shouldGetUnknownKeyCurrentInstance() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.queryMetadataForKey(anyString(), any(), ArgumentMatchers.<Serializer<Object>>any()))
.thenReturn(new KeyQueryMetadata(new HostInfo("localhost", 8080), Collections.emptySet(), 0));
when(kafkaStreamsInitializer.getHostInfo()).thenReturn(new HostInfo("localhost", 8080));
when(kafkaStreams.query(ArgumentMatchers.any())).thenReturn(stateKeyQueryResult);
when(stateKeyQueryResult.getOnlyPartitionResult()).thenReturn(null);
UnknownKeyException exception =
assertThrows(UnknownKeyException.class, () -> keyValueService.getByKey("store", "unknownKey"));
assertEquals("Key unknownKey not found", exception.getMessage());
}
@Test
void shouldHandleRuntimeExceptionWhenGettingByKeyOtherInstance() {
when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams);
when(kafkaStreams.queryMetadataForKey(anyString(), any(), ArgumentMatchers.<Serializer<Object>>any()))
.thenReturn(new KeyQueryMetadata(new HostInfo("localhost", 8085), Collections.emptySet(), 0));
when(kafkaStreamsInitializer.getHostInfo()).thenReturn(new HostInfo("localhost", 8080));
when(httpClient.sendAsync(any(), eq(HttpResponse.BodyHandlers.ofString())))
.thenThrow(new RuntimeException("Error"));
OtherInstanceResponseException exception =
assertThrows(OtherInstanceResponseException.class, () -> keyValueService.getByKey("store", "key"));
assertEquals("Fail to read other instance response", exception.getMessage());
}
record UserStub(String firstName, String lastName) {}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/integration/WebServicesPathIntegrationTest.java | kstreamplify-core/src/test/java/com/michelin/kstreamplify/integration/WebServicesPathIntegrationTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.integration;
import static com.michelin.kstreamplify.property.PropertiesUtils.KAFKA_PROPERTIES_PREFIX;
import static com.michelin.kstreamplify.property.PropertiesUtils.PROPERTY_SEPARATOR;
import static org.apache.kafka.streams.StreamsConfig.BOOTSTRAP_SERVERS_CONFIG;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
import com.michelin.kstreamplify.context.KafkaStreamsExecutionContext;
import com.michelin.kstreamplify.integration.container.KafkaIntegrationTest;
import java.io.IOException;
import java.net.URI;
import java.net.http.HttpRequest;
import java.net.http.HttpResponse;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.StreamsMetadata;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.testcontainers.junit.jupiter.Testcontainers;
@Slf4j
@Testcontainers
class WebServicesPathIntegrationTest extends KafkaIntegrationTest {
@BeforeAll
static void globalSetUp() {
createTopics(
broker.getBootstrapServers(),
new TopicPartition("INPUT_TOPIC", 2),
new TopicPartition("OUTPUT_TOPIC", 2));
initializer = new KafkaStreamInitializerStub(
new KafkaStreamsInitializerIntegrationTest.KafkaStreamsStarterStub(),
8081,
Map.of(
KAFKA_PROPERTIES_PREFIX + PROPERTY_SEPARATOR + BOOTSTRAP_SERVERS_CONFIG,
broker.getBootstrapServers(),
"kubernetes.readiness.path",
"custom-readiness",
"kubernetes.liveness.path",
"custom-liveness",
"topology.path",
"custom-topology"));
initializer.start();
}
@BeforeEach
void setUp() throws InterruptedException {
waitingForKafkaStreamsToStart();
}
@Test
void shouldInitAndRunWithWebServicesExposedOnCustomPaths() throws InterruptedException, IOException {
assertEquals(KafkaStreams.State.RUNNING, initializer.getKafkaStreams().state());
List<StreamsMetadata> streamsMetadata =
new ArrayList<>(initializer.getKafkaStreams().metadataForAllStreamsClients());
// Assert Kafka Streams initialization
assertEquals("localhost", streamsMetadata.get(0).hostInfo().host());
assertEquals(8081, streamsMetadata.get(0).hostInfo().port());
assertTrue(streamsMetadata.get(0).stateStoreNames().isEmpty());
Set<TopicPartition> topicPartitions = streamsMetadata.get(0).topicPartitions();
assertTrue(Set.of(new TopicPartition("INPUT_TOPIC", 0), new TopicPartition("INPUT_TOPIC", 1))
.containsAll(topicPartitions));
assertEquals("DLQ_TOPIC", KafkaStreamsExecutionContext.getDlqTopicName());
assertEquals(
"org.apache.kafka.common.serialization.Serdes$StringSerde",
KafkaStreamsExecutionContext.getSerdesConfig().get("default.key.serde"));
assertEquals(
"org.apache.kafka.common.serialization.Serdes$StringSerde",
KafkaStreamsExecutionContext.getSerdesConfig().get("default.value.serde"));
assertEquals(
"localhost:8081", KafkaStreamsExecutionContext.getProperties().get("application.server"));
// Assert HTTP probes
HttpRequest requestReady = HttpRequest.newBuilder()
.uri(URI.create("http://localhost:8081/custom-readiness"))
.GET()
.build();
HttpResponse<Void> responseReady = httpClient.send(requestReady, HttpResponse.BodyHandlers.discarding());
assertEquals(200, responseReady.statusCode());
HttpRequest requestLiveness = HttpRequest.newBuilder()
.uri(URI.create("http://localhost:8081/custom-liveness"))
.GET()
.build();
HttpResponse<Void> responseLiveness = httpClient.send(requestLiveness, HttpResponse.BodyHandlers.discarding());
assertEquals(200, responseLiveness.statusCode());
HttpRequest requestTopology = HttpRequest.newBuilder()
.uri(URI.create("http://localhost:8081/custom-topology"))
.GET()
.build();
HttpResponse<String> responseTopology = httpClient.send(requestTopology, HttpResponse.BodyHandlers.ofString());
assertEquals(200, responseTopology.statusCode());
assertEquals("""
Topologies:
Sub-topology: 0
Source: KSTREAM-SOURCE-0000000000 (topics: [INPUT_TOPIC])
--> KSTREAM-SINK-0000000001
Sink: KSTREAM-SINK-0000000001 (topic: OUTPUT_TOPIC)
<-- KSTREAM-SOURCE-0000000000
""", responseTopology.body());
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/integration/KafkaStreamsInitializerIntegrationTest.java | kstreamplify-core/src/test/java/com/michelin/kstreamplify/integration/KafkaStreamsInitializerIntegrationTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.integration;
import static com.michelin.kstreamplify.property.PropertiesUtils.KAFKA_PROPERTIES_PREFIX;
import static com.michelin.kstreamplify.property.PropertiesUtils.PROPERTY_SEPARATOR;
import static org.apache.kafka.streams.StreamsConfig.BOOTSTRAP_SERVERS_CONFIG;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
import com.michelin.kstreamplify.context.KafkaStreamsExecutionContext;
import com.michelin.kstreamplify.initializer.KafkaStreamsStarter;
import com.michelin.kstreamplify.integration.container.KafkaIntegrationTest;
import java.io.IOException;
import java.net.URI;
import java.net.http.HttpRequest;
import java.net.http.HttpResponse;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.StreamsMetadata;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.testcontainers.junit.jupiter.Testcontainers;
@Slf4j
@Testcontainers
class KafkaStreamsInitializerIntegrationTest extends KafkaIntegrationTest {
@BeforeAll
static void globalSetUp() {
createTopics(
broker.getBootstrapServers(),
new TopicPartition("INPUT_TOPIC", 2),
new TopicPartition("OUTPUT_TOPIC", 2));
initializer = new KafkaStreamInitializerStub(
new KafkaStreamsStarterStub(),
Map.of(
KAFKA_PROPERTIES_PREFIX + PROPERTY_SEPARATOR + BOOTSTRAP_SERVERS_CONFIG,
broker.getBootstrapServers()));
initializer.start();
}
@BeforeEach
void setUp() throws InterruptedException {
waitingForKafkaStreamsToStart();
}
@Test
void shouldStartAndRun() throws InterruptedException, IOException {
assertEquals(KafkaStreams.State.RUNNING, initializer.getKafkaStreams().state());
List<StreamsMetadata> streamsMetadata =
new ArrayList<>(initializer.getKafkaStreams().metadataForAllStreamsClients());
// Assert Kafka Streams initialization
assertEquals("localhost", streamsMetadata.get(0).hostInfo().host());
assertEquals(8080, streamsMetadata.get(0).hostInfo().port());
assertTrue(streamsMetadata.get(0).stateStoreNames().isEmpty());
Set<TopicPartition> topicPartitions = streamsMetadata.get(0).topicPartitions();
assertTrue(Set.of(new TopicPartition("INPUT_TOPIC", 0), new TopicPartition("INPUT_TOPIC", 1))
.containsAll(topicPartitions));
assertEquals("DLQ_TOPIC", KafkaStreamsExecutionContext.getDlqTopicName());
assertEquals(
"org.apache.kafka.common.serialization.Serdes$StringSerde",
KafkaStreamsExecutionContext.getSerdesConfig().get("default.key.serde"));
assertEquals(
"org.apache.kafka.common.serialization.Serdes$StringSerde",
KafkaStreamsExecutionContext.getSerdesConfig().get("default.value.serde"));
assertEquals(
"localhost:8080", KafkaStreamsExecutionContext.getProperties().get("application.server"));
// Assert HTTP probes
HttpRequest requestReady = HttpRequest.newBuilder()
.uri(URI.create("http://localhost:8080/ready"))
.GET()
.build();
HttpResponse<Void> responseReady = httpClient.send(requestReady, HttpResponse.BodyHandlers.discarding());
assertEquals(200, responseReady.statusCode());
HttpRequest requestLiveness = HttpRequest.newBuilder()
.uri(URI.create("http://localhost:8080/liveness"))
.GET()
.build();
HttpResponse<Void> responseLiveness = httpClient.send(requestLiveness, HttpResponse.BodyHandlers.discarding());
assertEquals(200, responseLiveness.statusCode());
HttpRequest requestTopology = HttpRequest.newBuilder()
.uri(URI.create("http://localhost:8080/topology"))
.GET()
.build();
HttpResponse<String> responseTopology = httpClient.send(requestTopology, HttpResponse.BodyHandlers.ofString());
assertEquals(200, responseTopology.statusCode());
assertEquals("""
Topologies:
Sub-topology: 0
Source: KSTREAM-SOURCE-0000000000 (topics: [INPUT_TOPIC])
--> KSTREAM-SINK-0000000001
Sink: KSTREAM-SINK-0000000001 (topic: OUTPUT_TOPIC)
<-- KSTREAM-SOURCE-0000000000
""", responseTopology.body());
}
@Slf4j
static class KafkaStreamsStarterStub extends KafkaStreamsStarter {
@Override
public void topology(StreamsBuilder streamsBuilder) {
streamsBuilder.stream("INPUT_TOPIC").to("OUTPUT_TOPIC");
}
@Override
public String dlqTopic() {
return "DLQ_TOPIC";
}
@Override
public void onStart(KafkaStreams kafkaStreams) {
log.info("Starting Kafka Streams from integration tests!");
}
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/integration/container/KafkaIntegrationTest.java | kstreamplify-core/src/test/java/com/michelin/kstreamplify/integration/container/KafkaIntegrationTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.integration.container;
import static org.apache.kafka.streams.StreamsConfig.BOOTSTRAP_SERVERS_CONFIG;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.michelin.kstreamplify.context.KafkaStreamsExecutionContext;
import com.michelin.kstreamplify.initializer.KafkaStreamsInitializer;
import com.michelin.kstreamplify.initializer.KafkaStreamsStarter;
import com.michelin.kstreamplify.property.PropertiesUtils;
import java.net.http.HttpClient;
import java.util.Arrays;
import java.util.Map;
import java.util.Properties;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.admin.AdminClient;
import org.apache.kafka.clients.admin.NewTopic;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.LagInfo;
import org.testcontainers.containers.GenericContainer;
import org.testcontainers.containers.Network;
import org.testcontainers.containers.wait.strategy.Wait;
import org.testcontainers.junit.jupiter.Container;
import org.testcontainers.kafka.ConfluentKafkaContainer;
import org.testcontainers.utility.DockerImageName;
/** Base class for Kafka integration tests. */
@Slf4j
public abstract class KafkaIntegrationTest {
protected static final String CONFLUENT_PLATFORM_VERSION = "7.7.0";
protected static final Network NETWORK = Network.newNetwork();
protected final HttpClient httpClient = HttpClient.newBuilder().build();
protected final ObjectMapper objectMapper = new ObjectMapper();
protected static KafkaStreamsInitializer initializer;
@Container
protected static ConfluentKafkaContainer broker = new ConfluentKafkaContainer(
DockerImageName.parse("confluentinc/cp-kafka:" + CONFLUENT_PLATFORM_VERSION))
.withNetwork(NETWORK)
.withNetworkAliases("broker");
@Container
protected static GenericContainer<?> schemaRegistry = new GenericContainer<>(
DockerImageName.parse("confluentinc/cp-schema-registry:" + CONFLUENT_PLATFORM_VERSION))
.dependsOn(broker)
.withNetwork(NETWORK)
.withNetworkAliases("schema-registry")
.withExposedPorts(8081)
.withEnv("SCHEMA_REGISTRY_HOST_NAME", "schema-registry")
.withEnv("SCHEMA_REGISTRY_LISTENERS", "http://0.0.0.0:8081")
.withEnv("SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS", "broker:9093")
.waitingFor(Wait.forHttp("/subjects").forStatusCode(200));
protected static void createTopics(String bootstrapServers, TopicPartition... topicPartitions) {
var newTopics = Arrays.stream(topicPartitions)
.map(topicPartition -> new NewTopic(topicPartition.topic(), topicPartition.partition(), (short) 1))
.toList();
try (var admin = AdminClient.create(Map.of(BOOTSTRAP_SERVERS_CONFIG, bootstrapServers))) {
admin.createTopics(newTopics);
}
}
protected void waitingForKafkaStreamsToStart() throws InterruptedException {
while (!initializer.getKafkaStreams().state().equals(KafkaStreams.State.RUNNING)) {
log.info("Waiting for Kafka Streams to start...");
Thread.sleep(2000); // NOSONAR
}
}
protected void waitingForLocalStoreToReachOffset(Map<String, Map<Integer, Long>> topicPartitionOffset)
throws InterruptedException {
while (hasLag(topicPartitionOffset)) {
log.info(
"Waiting for local stores {} to reach offsets",
topicPartitionOffset.keySet().stream().toList());
Thread.sleep(5000); // NOSONAR
}
}
private boolean hasLag(Map<String, Map<Integer, Long>> topicPartitionOffset) {
Map<String, Map<Integer, LagInfo>> currentLag =
initializer.getKafkaStreams().allLocalStorePartitionLags();
return !topicPartitionOffset.entrySet().stream()
.allMatch(topicPartitionOffsetEntry -> topicPartitionOffsetEntry.getValue().entrySet().stream()
.anyMatch(partitionOffsetEntry -> currentLag
.get(topicPartitionOffsetEntry.getKey())
.get(partitionOffsetEntry.getKey())
.currentOffsetPosition()
== partitionOffsetEntry.getValue()));
}
/**
* Define a KafkaStreamsInitializer stub for testing. This stub allows to override some properties of the
* application.properties file or to set some properties dynamically from Testcontainers.
*/
public static class KafkaStreamInitializerStub extends KafkaStreamsInitializer {
private Integer newServerPort;
private final Map<String, String> additionalProperties;
public KafkaStreamInitializerStub(KafkaStreamsStarter kafkaStreamsStarter, Map<String, String> properties) {
super(kafkaStreamsStarter);
this.additionalProperties = properties;
}
public KafkaStreamInitializerStub(
KafkaStreamsStarter kafkaStreamsStarter,
Integer newServerPort,
Map<String, String> additionalProperties) {
super(kafkaStreamsStarter);
this.newServerPort = newServerPort;
this.additionalProperties = additionalProperties;
}
/**
* Override properties of the application.properties file. Some properties are dynamically set from
* Testcontainers.
*/
@Override
protected void initProperties() {
super.initProperties();
if (newServerPort != null) {
serverPort = newServerPort;
}
properties.putAll(additionalProperties);
Properties convertedAdditionalProperties = new Properties();
convertedAdditionalProperties.putAll(additionalProperties);
kafkaProperties.putAll(PropertiesUtils.loadKafkaProperties(convertedAdditionalProperties));
KafkaStreamsExecutionContext.registerProperties(kafkaProperties);
}
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/integration/interactivequeries/window/WindowIntegrationTest.java | kstreamplify-core/src/test/java/com/michelin/kstreamplify/integration/interactivequeries/window/WindowIntegrationTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.integration.interactivequeries.window;
import static com.michelin.kstreamplify.property.PropertiesUtils.KAFKA_PROPERTIES_PREFIX;
import static com.michelin.kstreamplify.property.PropertiesUtils.PROPERTY_SEPARATOR;
import static io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG;
import static org.apache.kafka.clients.producer.ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG;
import static org.apache.kafka.clients.producer.ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG;
import static org.apache.kafka.streams.StreamsConfig.APPLICATION_ID_CONFIG;
import static org.apache.kafka.streams.StreamsConfig.BOOTSTRAP_SERVERS_CONFIG;
import static org.apache.kafka.streams.StreamsConfig.STATE_DIR_CONFIG;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import com.fasterxml.jackson.core.type.TypeReference;
import com.michelin.kstreamplify.avro.KafkaUserStub;
import com.michelin.kstreamplify.initializer.KafkaStreamsStarter;
import com.michelin.kstreamplify.integration.container.KafkaIntegrationTest;
import com.michelin.kstreamplify.serde.SerdesUtils;
import com.michelin.kstreamplify.service.interactivequeries.window.WindowStoreService;
import com.michelin.kstreamplify.store.StateStoreRecord;
import com.michelin.kstreamplify.store.StreamsMetadata;
import io.confluent.kafka.serializers.KafkaAvroSerializer;
import java.io.IOException;
import java.net.URI;
import java.net.http.HttpRequest;
import java.net.http.HttpResponse;
import java.time.Duration;
import java.time.Instant;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.common.serialization.StringSerializer;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.kstream.Consumed;
import org.apache.kafka.streams.processor.api.Processor;
import org.apache.kafka.streams.processor.api.ProcessorContext;
import org.apache.kafka.streams.processor.api.ProcessorSupplier;
import org.apache.kafka.streams.processor.api.Record;
import org.apache.kafka.streams.state.KeyValueStore;
import org.apache.kafka.streams.state.StoreBuilder;
import org.apache.kafka.streams.state.Stores;
import org.apache.kafka.streams.state.WindowStore;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.CsvSource;
import org.testcontainers.junit.jupiter.Testcontainers;
@Slf4j
@Testcontainers
class WindowIntegrationTest extends KafkaIntegrationTest {
private final WindowStoreService windowService = new WindowStoreService(initializer);
@BeforeAll
static void globalSetUp() throws ExecutionException, InterruptedException {
createTopics(
broker.getBootstrapServers(),
new TopicPartition("STRING_TOPIC", 3),
new TopicPartition("AVRO_TOPIC", 2));
try (KafkaProducer<String, String> stringKafkaProducer = new KafkaProducer<>(Map.of(
BOOTSTRAP_SERVERS_CONFIG,
broker.getBootstrapServers(),
KEY_SERIALIZER_CLASS_CONFIG,
StringSerializer.class.getName(),
VALUE_SERIALIZER_CLASS_CONFIG,
StringSerializer.class.getName()))) {
ProducerRecord<String, String> message = new ProducerRecord<>("STRING_TOPIC", "user", "Doe");
stringKafkaProducer.send(message).get();
}
try (KafkaProducer<String, KafkaUserStub> avroKafkaProducer = new KafkaProducer<>(Map.of(
BOOTSTRAP_SERVERS_CONFIG,
broker.getBootstrapServers(),
KEY_SERIALIZER_CLASS_CONFIG,
StringSerializer.class.getName(),
VALUE_SERIALIZER_CLASS_CONFIG,
KafkaAvroSerializer.class.getName(),
SCHEMA_REGISTRY_URL_CONFIG,
"http://" + schemaRegistry.getHost() + ":" + schemaRegistry.getFirstMappedPort()))) {
KafkaUserStub kafkaUserStub = KafkaUserStub.newBuilder()
.setId(1L)
.setFirstName("John")
.setLastName("Doe")
.setBirthDate(Instant.parse("2000-01-01T01:00:00Z"))
.build();
ProducerRecord<String, KafkaUserStub> message = new ProducerRecord<>("AVRO_TOPIC", "user", kafkaUserStub);
avroKafkaProducer.send(message).get();
}
initializer = new KafkaStreamInitializerStub(
new KafkaStreamsStarterStub(),
8085,
Map.of(
KAFKA_PROPERTIES_PREFIX + PROPERTY_SEPARATOR + BOOTSTRAP_SERVERS_CONFIG,
broker.getBootstrapServers(),
KAFKA_PROPERTIES_PREFIX + PROPERTY_SEPARATOR + APPLICATION_ID_CONFIG,
"appWindowInteractiveQueriesId",
KAFKA_PROPERTIES_PREFIX + PROPERTY_SEPARATOR + SCHEMA_REGISTRY_URL_CONFIG,
"http://" + schemaRegistry.getHost() + ":" + schemaRegistry.getFirstMappedPort(),
KAFKA_PROPERTIES_PREFIX + PROPERTY_SEPARATOR + STATE_DIR_CONFIG,
"/tmp/kstreamplify/kstreamplify-core-test/interactive-queries/window"));
initializer.start();
}
@BeforeEach
void setUp() throws InterruptedException {
waitingForKafkaStreamsToStart();
waitingForLocalStoreToReachOffset(Map.of(
"STRING_STRING_WINDOW_STORE", Map.of(2, 1L),
"STRING_AVRO_WINDOW_STORE", Map.of(0, 1L),
"STRING_AVRO_KV_STORE", Map.of(0, 1L)));
}
@Test
void shouldGetStoresAndStoreMetadata() throws IOException, InterruptedException {
// Get stores
HttpRequest storesRequest = HttpRequest.newBuilder()
.uri(URI.create("http://localhost:8085/store"))
.GET()
.build();
HttpResponse<String> storesResponse = httpClient.send(storesRequest, HttpResponse.BodyHandlers.ofString());
List<String> stores = objectMapper.readValue(storesResponse.body(), new TypeReference<>() {});
assertEquals(200, storesResponse.statusCode());
assertTrue(stores.containsAll(
List.of("STRING_STRING_WINDOW_STORE", "STRING_AVRO_WINDOW_STORE", "STRING_AVRO_KV_STORE")));
// Get store metadata
HttpRequest streamsMetadataRequest = HttpRequest.newBuilder()
.uri(URI.create("http://localhost:8085/store/metadata/STRING_STRING_WINDOW_STORE"))
.GET()
.build();
HttpResponse<String> streamsMetadataResponse =
httpClient.send(streamsMetadataRequest, HttpResponse.BodyHandlers.ofString());
List<StreamsMetadata> streamsMetadata =
objectMapper.readValue(streamsMetadataResponse.body(), new TypeReference<>() {});
assertEquals(200, streamsMetadataResponse.statusCode());
assertEquals(
Set.of("STRING_STRING_WINDOW_STORE", "STRING_AVRO_WINDOW_STORE", "STRING_AVRO_KV_STORE"),
streamsMetadata.get(0).getStateStoreNames());
assertEquals("localhost", streamsMetadata.get(0).getHostInfo().host());
assertEquals(8085, streamsMetadata.get(0).getHostInfo().port());
assertEquals(
Set.of("AVRO_TOPIC-0", "AVRO_TOPIC-1", "STRING_TOPIC-0", "STRING_TOPIC-1", "STRING_TOPIC-2"),
streamsMetadata.get(0).getTopicPartitions());
}
@ParameterizedTest
@CsvSource({
"http://localhost:8085/store/window/WRONG_STORE/user,State store WRONG_STORE not found",
"http://localhost:8085/store/window/STRING_STRING_WINDOW_STORE/wrongKey,Key wrongKey not found",
"http://localhost:8085/store/window/WRONG_STORE,State store WRONG_STORE not found"
})
void shouldNotFoundWhenKeyOrStoreNotFound(String url, String message) throws IOException, InterruptedException {
HttpRequest request =
HttpRequest.newBuilder().uri(URI.create(url)).GET().build();
HttpResponse<String> response = httpClient.send(request, HttpResponse.BodyHandlers.ofString());
assertEquals(404, response.statusCode());
assertEquals(message, response.body());
}
@Test
void shouldGetErrorWhenQueryingWrongStoreType() throws IOException, InterruptedException {
HttpRequest request = HttpRequest.newBuilder()
.uri(URI.create("http://localhost:8085/store/window/STRING_AVRO_KV_STORE/user"))
.GET()
.build();
HttpResponse<String> response = httpClient.send(request, HttpResponse.BodyHandlers.ofString());
assertEquals(400, response.statusCode());
assertNotNull(response.body());
}
@Test
void shouldGetByKeyInStringStringStore() throws IOException, InterruptedException {
HttpRequest request = HttpRequest.newBuilder()
.uri(URI.create("http://localhost:8085/store/window/STRING_STRING_WINDOW_STORE/user"))
.GET()
.build();
HttpResponse<String> response = httpClient.send(request, HttpResponse.BodyHandlers.ofString());
List<StateStoreRecord> body = objectMapper.readValue(response.body(), new TypeReference<>() {});
assertEquals(200, response.statusCode());
assertEquals("user", body.get(0).getKey());
assertEquals("Doe", body.get(0).getValue());
assertNull(body.get(0).getTimestamp());
}
@Test
void shouldGetByKeyInStringAvroStore() throws IOException, InterruptedException {
HttpRequest request = HttpRequest.newBuilder()
.uri(URI.create("http://localhost:8085/store/window/STRING_AVRO_WINDOW_STORE/user"))
.GET()
.build();
HttpResponse<String> response = httpClient.send(request, HttpResponse.BodyHandlers.ofString());
List<StateStoreRecord> body = objectMapper.readValue(response.body(), new TypeReference<>() {});
assertEquals(200, response.statusCode());
assertEquals("user", body.get(0).getKey());
assertEquals(1, ((Map<?, ?>) body.get(0).getValue()).get("id"));
assertEquals("John", ((Map<?, ?>) body.get(0).getValue()).get("firstName"));
assertEquals("Doe", ((Map<?, ?>) body.get(0).getValue()).get("lastName"));
assertEquals("2000-01-01T01:00:00Z", ((Map<?, ?>) body.get(0).getValue()).get("birthDate"));
assertNull(body.get(0).getTimestamp());
}
@ParameterizedTest
@CsvSource({
"http://localhost:8085/store/window/STRING_STRING_WINDOW_STORE/user",
"http://localhost:8085/store/window/STRING_AVRO_WINDOW_STORE/user"
})
void shouldNotFoundWhenStartTimeIsTooLate(String url) throws IOException, InterruptedException {
Instant tooLate = Instant.now().plus(Duration.ofDays(1));
HttpRequest request = HttpRequest.newBuilder()
.uri(URI.create(url + "?startTime=" + tooLate))
.GET()
.build();
HttpResponse<String> response = httpClient.send(request, HttpResponse.BodyHandlers.ofString());
assertEquals(404, response.statusCode());
}
@ParameterizedTest
@CsvSource({
"http://localhost:8085/store/window/STRING_STRING_WINDOW_STORE/user",
"http://localhost:8085/store/window/STRING_AVRO_WINDOW_STORE/user"
})
void shouldNotFoundWhenEndTimeIsTooEarly(String url) throws IOException, InterruptedException {
Instant tooEarly = Instant.now().minus(Duration.ofDays(1));
HttpRequest request = HttpRequest.newBuilder()
.uri(URI.create(url + "?endTime=" + tooEarly))
.GET()
.build();
HttpResponse<String> response = httpClient.send(request, HttpResponse.BodyHandlers.ofString());
assertEquals(404, response.statusCode());
}
@ParameterizedTest
@CsvSource({
"http://localhost:8085/store/window/STRING_STRING_WINDOW_STORE",
"http://localhost:8085/store/window/local/STRING_STRING_WINDOW_STORE"
})
void shouldGetAllInStringStringStore(String url) throws IOException, InterruptedException {
HttpRequest request =
HttpRequest.newBuilder().uri(URI.create(url)).GET().build();
HttpResponse<String> response = httpClient.send(request, HttpResponse.BodyHandlers.ofString());
List<StateStoreRecord> body = objectMapper.readValue(response.body(), new TypeReference<>() {});
assertEquals(200, response.statusCode());
assertEquals("user", body.get(0).getKey());
assertEquals("Doe", body.get(0).getValue());
assertNull(body.get(0).getTimestamp());
}
@ParameterizedTest
@CsvSource({
"http://localhost:8085/store/window/STRING_AVRO_WINDOW_STORE",
"http://localhost:8085/store/window/local/STRING_AVRO_WINDOW_STORE"
})
void shouldGetAllFromStringAvroStores(String url) throws IOException, InterruptedException {
HttpRequest request =
HttpRequest.newBuilder().uri(URI.create(url)).GET().build();
HttpResponse<String> response = httpClient.send(request, HttpResponse.BodyHandlers.ofString());
List<StateStoreRecord> body = objectMapper.readValue(response.body(), new TypeReference<>() {});
assertEquals(200, response.statusCode());
assertEquals("user", body.get(0).getKey());
assertEquals(1, ((Map<?, ?>) body.get(0).getValue()).get("id"));
assertEquals("John", ((Map<?, ?>) body.get(0).getValue()).get("firstName"));
assertEquals("Doe", ((Map<?, ?>) body.get(0).getValue()).get("lastName"));
assertEquals("2000-01-01T01:00:00Z", ((Map<?, ?>) body.get(0).getValue()).get("birthDate"));
assertNull(body.get(0).getTimestamp());
}
@Test
void shouldGetByKeyInStringAvroStoreFromService() {
List<StateStoreRecord> stateStoreRecord =
windowService.getByKey("STRING_AVRO_WINDOW_STORE", "user", Instant.EPOCH, Instant.now());
assertEquals("user", stateStoreRecord.get(0).getKey());
assertEquals(1L, ((Map<?, ?>) stateStoreRecord.get(0).getValue()).get("id"));
assertEquals("John", ((Map<?, ?>) stateStoreRecord.get(0).getValue()).get("firstName"));
assertEquals("Doe", ((Map<?, ?>) stateStoreRecord.get(0).getValue()).get("lastName"));
assertEquals(
"2000-01-01T01:00:00Z", ((Map<?, ?>) stateStoreRecord.get(0).getValue()).get("birthDate"));
assertNull(stateStoreRecord.get(0).getTimestamp());
}
@Test
void shouldGetAllInStringAvroStoreFromService() {
List<StateStoreRecord> stateQueryData =
windowService.getAll("STRING_AVRO_WINDOW_STORE", Instant.EPOCH, Instant.now());
assertEquals("user", stateQueryData.get(0).getKey());
assertEquals(1L, ((Map<?, ?>) stateQueryData.get(0).getValue()).get("id"));
assertEquals("John", ((Map<?, ?>) stateQueryData.get(0).getValue()).get("firstName"));
assertEquals("Doe", ((Map<?, ?>) stateQueryData.get(0).getValue()).get("lastName"));
assertEquals("2000-01-01T01:00:00Z", ((Map<?, ?>) stateQueryData.get(0).getValue()).get("birthDate"));
assertNull(stateQueryData.get(0).getTimestamp());
}
/**
* Kafka Streams starter implementation for integration tests. The topology consumes events from multiple topics
* (string, Java, Avro) and stores them in dedicated stores so that they can be queried.
*/
@Slf4j
static class KafkaStreamsStarterStub extends KafkaStreamsStarter {
@Override
public void topology(StreamsBuilder streamsBuilder) {
streamsBuilder.stream("STRING_TOPIC", Consumed.with(Serdes.String(), Serdes.String()))
.process(new ProcessorSupplier<String, String, String, String>() {
@Override
public Set<StoreBuilder<?>> stores() {
StoreBuilder<WindowStore<String, String>> stringStringWindowStoreBuilder =
Stores.windowStoreBuilder(
Stores.persistentWindowStore(
"STRING_STRING_WINDOW_STORE",
Duration.ofMinutes(5),
Duration.ofMinutes(1),
false),
Serdes.String(),
Serdes.String());
return Set.of(stringStringWindowStoreBuilder);
}
@Override
public Processor<String, String, String, String> get() {
return new Processor<>() {
private WindowStore<String, String> stringStringWindowStore;
@Override
public void init(ProcessorContext<String, String> context) {
this.stringStringWindowStore = context.getStateStore("STRING_STRING_WINDOW_STORE");
}
@Override
public void process(Record<String, String> message) {
stringStringWindowStore.put(message.key(), message.value(), message.timestamp());
}
};
}
});
streamsBuilder.stream(
"AVRO_TOPIC", Consumed.with(Serdes.String(), SerdesUtils.<KafkaUserStub>getValueSerdes()))
.process(new ProcessorSupplier<String, KafkaUserStub, String, KafkaUserStub>() {
@Override
public Set<StoreBuilder<?>> stores() {
StoreBuilder<WindowStore<String, KafkaUserStub>> stringAvroWindowStoreBuilder =
Stores.windowStoreBuilder(
Stores.persistentWindowStore(
"STRING_AVRO_WINDOW_STORE",
Duration.ofMinutes(5),
Duration.ofMinutes(1),
false),
Serdes.String(),
SerdesUtils.getValueSerdes());
StoreBuilder<KeyValueStore<String, KafkaUserStub>> stringAvroKeyValueStoreBuilder =
Stores.keyValueStoreBuilder(
Stores.persistentKeyValueStore("STRING_AVRO_KV_STORE"),
Serdes.String(),
SerdesUtils.getValueSerdes());
return Set.of(stringAvroWindowStoreBuilder, stringAvroKeyValueStoreBuilder);
}
@Override
public Processor<String, KafkaUserStub, String, KafkaUserStub> get() {
return new Processor<>() {
private WindowStore<String, KafkaUserStub> stringAvroWindowStore;
private KeyValueStore<String, KafkaUserStub> stringAvroKeyValueStore;
@Override
public void init(ProcessorContext<String, KafkaUserStub> context) {
this.stringAvroWindowStore = context.getStateStore("STRING_AVRO_WINDOW_STORE");
this.stringAvroKeyValueStore = context.getStateStore("STRING_AVRO_KV_STORE");
}
@Override
public void process(Record<String, KafkaUserStub> message) {
stringAvroWindowStore.put(message.key(), message.value(), message.timestamp());
stringAvroKeyValueStore.put(message.key(), message.value());
}
};
}
});
}
@Override
public String dlqTopic() {
return "DLQ_TOPIC";
}
@Override
public void onStart(KafkaStreams kafkaStreams) {
kafkaStreams.cleanUp();
}
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/integration/interactivequeries/window/TimestampedWindowIntegrationTest.java | kstreamplify-core/src/test/java/com/michelin/kstreamplify/integration/interactivequeries/window/TimestampedWindowIntegrationTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.integration.interactivequeries.window;
import static com.michelin.kstreamplify.property.PropertiesUtils.KAFKA_PROPERTIES_PREFIX;
import static com.michelin.kstreamplify.property.PropertiesUtils.PROPERTY_SEPARATOR;
import static io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG;
import static org.apache.kafka.clients.producer.ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG;
import static org.apache.kafka.clients.producer.ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG;
import static org.apache.kafka.streams.StreamsConfig.APPLICATION_ID_CONFIG;
import static org.apache.kafka.streams.StreamsConfig.BOOTSTRAP_SERVERS_CONFIG;
import static org.apache.kafka.streams.StreamsConfig.STATE_DIR_CONFIG;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import com.fasterxml.jackson.core.type.TypeReference;
import com.michelin.kstreamplify.avro.KafkaUserStub;
import com.michelin.kstreamplify.initializer.KafkaStreamsStarter;
import com.michelin.kstreamplify.integration.container.KafkaIntegrationTest;
import com.michelin.kstreamplify.serde.SerdesUtils;
import com.michelin.kstreamplify.service.interactivequeries.window.TimestampedWindowStoreService;
import com.michelin.kstreamplify.store.StateStoreRecord;
import com.michelin.kstreamplify.store.StreamsMetadata;
import io.confluent.kafka.serializers.KafkaAvroSerializer;
import java.io.IOException;
import java.net.URI;
import java.net.http.HttpRequest;
import java.net.http.HttpResponse;
import java.time.Duration;
import java.time.Instant;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.common.serialization.StringSerializer;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.kstream.Consumed;
import org.apache.kafka.streams.processor.api.Processor;
import org.apache.kafka.streams.processor.api.ProcessorContext;
import org.apache.kafka.streams.processor.api.ProcessorSupplier;
import org.apache.kafka.streams.processor.api.Record;
import org.apache.kafka.streams.state.KeyValueStore;
import org.apache.kafka.streams.state.StoreBuilder;
import org.apache.kafka.streams.state.Stores;
import org.apache.kafka.streams.state.TimestampedWindowStore;
import org.apache.kafka.streams.state.ValueAndTimestamp;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.CsvSource;
import org.testcontainers.junit.jupiter.Testcontainers;
@Slf4j
@Testcontainers
class TimestampedWindowIntegrationTest extends KafkaIntegrationTest {
private final TimestampedWindowStoreService timestampedWindowService =
new TimestampedWindowStoreService(initializer);
@BeforeAll
static void globalSetUp() throws ExecutionException, InterruptedException {
createTopics(
broker.getBootstrapServers(),
new TopicPartition("STRING_TOPIC", 3),
new TopicPartition("AVRO_TOPIC", 2));
try (KafkaProducer<String, String> stringKafkaProducer = new KafkaProducer<>(Map.of(
BOOTSTRAP_SERVERS_CONFIG,
broker.getBootstrapServers(),
KEY_SERIALIZER_CLASS_CONFIG,
StringSerializer.class.getName(),
VALUE_SERIALIZER_CLASS_CONFIG,
StringSerializer.class.getName()))) {
ProducerRecord<String, String> message = new ProducerRecord<>("STRING_TOPIC", "user", "Doe");
stringKafkaProducer.send(message).get();
}
try (KafkaProducer<String, KafkaUserStub> avroKafkaProducer = new KafkaProducer<>(Map.of(
BOOTSTRAP_SERVERS_CONFIG,
broker.getBootstrapServers(),
KEY_SERIALIZER_CLASS_CONFIG,
StringSerializer.class.getName(),
VALUE_SERIALIZER_CLASS_CONFIG,
KafkaAvroSerializer.class.getName(),
SCHEMA_REGISTRY_URL_CONFIG,
"http://" + schemaRegistry.getHost() + ":" + schemaRegistry.getFirstMappedPort()))) {
KafkaUserStub kafkaUserStub = KafkaUserStub.newBuilder()
.setId(1L)
.setFirstName("John")
.setLastName("Doe")
.setBirthDate(Instant.parse("2000-01-01T01:00:00Z"))
.build();
ProducerRecord<String, KafkaUserStub> message = new ProducerRecord<>("AVRO_TOPIC", "user", kafkaUserStub);
avroKafkaProducer.send(message).get();
}
initializer = new KafkaStreamInitializerStub(
new KafkaStreamsStarterStub(),
8084,
Map.of(
KAFKA_PROPERTIES_PREFIX + PROPERTY_SEPARATOR + BOOTSTRAP_SERVERS_CONFIG,
broker.getBootstrapServers(),
KAFKA_PROPERTIES_PREFIX + PROPERTY_SEPARATOR + APPLICATION_ID_CONFIG,
"appTimestampedWindowInteractiveQueriesId",
KAFKA_PROPERTIES_PREFIX + PROPERTY_SEPARATOR + SCHEMA_REGISTRY_URL_CONFIG,
"http://" + schemaRegistry.getHost() + ":" + schemaRegistry.getFirstMappedPort(),
KAFKA_PROPERTIES_PREFIX + PROPERTY_SEPARATOR + STATE_DIR_CONFIG,
"/tmp/kstreamplify/kstreamplify-core-test/interactive-queries/timestamped-window"));
initializer.start();
}
@BeforeEach
void setUp() throws InterruptedException {
waitingForKafkaStreamsToStart();
waitingForLocalStoreToReachOffset(Map.of(
"STRING_STRING_TIMESTAMPED_STORE", Map.of(2, 1L),
"STRING_AVRO_TIMESTAMPED_STORE", Map.of(0, 1L),
"STRING_AVRO_KV_STORE", Map.of(0, 1L)));
}
@Test
void shouldGetStoresAndStoreMetadata() throws IOException, InterruptedException {
// Get stores
HttpRequest storesRequest = HttpRequest.newBuilder()
.uri(URI.create("http://localhost:8084/store"))
.GET()
.build();
HttpResponse<String> storesResponse = httpClient.send(storesRequest, HttpResponse.BodyHandlers.ofString());
List<String> stores = objectMapper.readValue(storesResponse.body(), new TypeReference<>() {});
assertEquals(200, storesResponse.statusCode());
assertTrue(stores.containsAll(
List.of("STRING_STRING_TIMESTAMPED_STORE", "STRING_AVRO_TIMESTAMPED_STORE", "STRING_AVRO_KV_STORE")));
// Get store metadata
HttpRequest streamsMetadataRequest = HttpRequest.newBuilder()
.uri(URI.create("http://localhost:8084/store/metadata/STRING_STRING_TIMESTAMPED_STORE"))
.GET()
.build();
HttpResponse<String> streamsMetadataResponse =
httpClient.send(streamsMetadataRequest, HttpResponse.BodyHandlers.ofString());
List<StreamsMetadata> streamsMetadata =
objectMapper.readValue(streamsMetadataResponse.body(), new TypeReference<>() {});
assertEquals(200, streamsMetadataResponse.statusCode());
assertEquals(
Set.of("STRING_STRING_TIMESTAMPED_STORE", "STRING_AVRO_TIMESTAMPED_STORE", "STRING_AVRO_KV_STORE"),
streamsMetadata.get(0).getStateStoreNames());
assertEquals("localhost", streamsMetadata.get(0).getHostInfo().host());
assertEquals(8084, streamsMetadata.get(0).getHostInfo().port());
assertEquals(
Set.of("AVRO_TOPIC-0", "AVRO_TOPIC-1", "STRING_TOPIC-0", "STRING_TOPIC-1", "STRING_TOPIC-2"),
streamsMetadata.get(0).getTopicPartitions());
}
@ParameterizedTest
@CsvSource({
"http://localhost:8084/store/window/timestamped/WRONG_STORE/user,State store WRONG_STORE not found",
"http://localhost:8084/store/window/timestamped/STRING_STRING_TIMESTAMPED_STORE/wrongKey,Key wrongKey not found",
"http://localhost:8084/store/window/timestamped/WRONG_STORE,State store WRONG_STORE not found"
})
void shouldNotFoundWhenKeyOrStoreNotFound(String url, String message) throws IOException, InterruptedException {
HttpRequest request =
HttpRequest.newBuilder().uri(URI.create(url)).GET().build();
HttpResponse<String> response = httpClient.send(request, HttpResponse.BodyHandlers.ofString());
assertEquals(404, response.statusCode());
assertEquals(message, response.body());
}
@Test
void shouldGetErrorWhenQueryingWrongStoreType() throws IOException, InterruptedException {
HttpRequest request = HttpRequest.newBuilder()
.uri(URI.create("http://localhost:8084/store/window/timestamped/STRING_AVRO_KV_STORE/user"))
.GET()
.build();
HttpResponse<String> response = httpClient.send(request, HttpResponse.BodyHandlers.ofString());
assertEquals(400, response.statusCode());
assertNotNull(response.body());
}
@Test
void shouldGetByKeyInStringStringStore() throws IOException, InterruptedException {
HttpRequest request = HttpRequest.newBuilder()
.uri(URI.create("http://localhost:8084/store/window/timestamped/STRING_STRING_TIMESTAMPED_STORE/user"))
.GET()
.build();
HttpResponse<String> response = httpClient.send(request, HttpResponse.BodyHandlers.ofString());
List<StateStoreRecord> body = objectMapper.readValue(response.body(), new TypeReference<>() {});
assertEquals(200, response.statusCode());
assertEquals("user", body.get(0).getKey());
assertEquals("Doe", body.get(0).getValue());
assertNotNull(body.get(0).getTimestamp());
}
@Test
void shouldGetByKeyInStringAvroStore() throws IOException, InterruptedException {
HttpRequest request = HttpRequest.newBuilder()
.uri(URI.create("http://localhost:8084/store/window/timestamped/STRING_AVRO_TIMESTAMPED_STORE/user"))
.GET()
.build();
HttpResponse<String> response = httpClient.send(request, HttpResponse.BodyHandlers.ofString());
List<StateStoreRecord> body = objectMapper.readValue(response.body(), new TypeReference<>() {});
assertEquals(200, response.statusCode());
assertEquals("user", body.get(0).getKey());
assertEquals(1, ((Map<?, ?>) body.get(0).getValue()).get("id"));
assertEquals("John", ((Map<?, ?>) body.get(0).getValue()).get("firstName"));
assertEquals("Doe", ((Map<?, ?>) body.get(0).getValue()).get("lastName"));
assertEquals("2000-01-01T01:00:00Z", ((Map<?, ?>) body.get(0).getValue()).get("birthDate"));
assertNotNull(body.get(0).getTimestamp());
}
@ParameterizedTest
@CsvSource({
"http://localhost:8084/store/window/timestamped/STRING_STRING_TIMESTAMPED_STORE/user",
"http://localhost:8084/store/window/timestamped/STRING_AVRO_TIMESTAMPED_STORE/user"
})
void shouldNotFoundWhenStartTimeIsTooLate(String url) throws IOException, InterruptedException {
Instant tooLate = Instant.now().plus(Duration.ofDays(1));
HttpRequest request = HttpRequest.newBuilder()
.uri(URI.create(url + "?startTime=" + tooLate))
.GET()
.build();
HttpResponse<String> response = httpClient.send(request, HttpResponse.BodyHandlers.ofString());
assertEquals(404, response.statusCode());
}
@ParameterizedTest
@CsvSource({
"http://localhost:8084/store/window/timestamped/STRING_STRING_TIMESTAMPED_STORE/user",
"http://localhost:8084/store/window/timestamped/STRING_AVRO_TIMESTAMPED_STORE/user"
})
void shouldNotFoundWhenEndTimeIsTooEarly(String url) throws IOException, InterruptedException {
Instant tooEarly = Instant.now().minus(Duration.ofDays(1));
HttpRequest request = HttpRequest.newBuilder()
.uri(URI.create(url + "?endTime=" + tooEarly))
.GET()
.build();
HttpResponse<String> response = httpClient.send(request, HttpResponse.BodyHandlers.ofString());
assertEquals(404, response.statusCode());
}
@ParameterizedTest
@CsvSource({
"http://localhost:8084/store/window/timestamped/STRING_STRING_TIMESTAMPED_STORE",
"http://localhost:8084/store/window/timestamped/local/STRING_STRING_TIMESTAMPED_STORE"
})
void shouldGetAllInStringStringStore(String url) throws IOException, InterruptedException {
HttpRequest request =
HttpRequest.newBuilder().uri(URI.create(url)).GET().build();
HttpResponse<String> response = httpClient.send(request, HttpResponse.BodyHandlers.ofString());
List<StateStoreRecord> body = objectMapper.readValue(response.body(), new TypeReference<>() {});
assertEquals(200, response.statusCode());
assertEquals("user", body.get(0).getKey());
assertEquals("Doe", body.get(0).getValue());
assertNotNull(body.get(0).getTimestamp());
}
@ParameterizedTest
@CsvSource({
"http://localhost:8084/store/window/timestamped/STRING_AVRO_TIMESTAMPED_STORE",
"http://localhost:8084/store/window/timestamped/local/STRING_AVRO_TIMESTAMPED_STORE"
})
void shouldGetAllFromStringAvroStores(String url) throws IOException, InterruptedException {
HttpRequest request =
HttpRequest.newBuilder().uri(URI.create(url)).GET().build();
HttpResponse<String> response = httpClient.send(request, HttpResponse.BodyHandlers.ofString());
List<StateStoreRecord> body = objectMapper.readValue(response.body(), new TypeReference<>() {});
assertEquals(200, response.statusCode());
assertEquals("user", body.get(0).getKey());
assertEquals(1, ((Map<?, ?>) body.get(0).getValue()).get("id"));
assertEquals("John", ((Map<?, ?>) body.get(0).getValue()).get("firstName"));
assertEquals("Doe", ((Map<?, ?>) body.get(0).getValue()).get("lastName"));
assertEquals("2000-01-01T01:00:00Z", ((Map<?, ?>) body.get(0).getValue()).get("birthDate"));
assertNotNull(body.get(0).getTimestamp());
}
@Test
void shouldGetByKeyInStringAvroStoreFromService() {
List<StateStoreRecord> stateStoreRecord = timestampedWindowService.getByKey(
"STRING_AVRO_TIMESTAMPED_STORE", "user", Instant.EPOCH, Instant.now());
assertEquals("user", stateStoreRecord.get(0).getKey());
assertEquals(1L, ((Map<?, ?>) stateStoreRecord.get(0).getValue()).get("id"));
assertEquals("John", ((Map<?, ?>) stateStoreRecord.get(0).getValue()).get("firstName"));
assertEquals("Doe", ((Map<?, ?>) stateStoreRecord.get(0).getValue()).get("lastName"));
assertEquals(
"2000-01-01T01:00:00Z", ((Map<?, ?>) stateStoreRecord.get(0).getValue()).get("birthDate"));
assertNotNull(stateStoreRecord.get(0).getTimestamp());
}
@Test
void shouldGetAllInStringAvroStoreFromService() {
List<StateStoreRecord> stateQueryData =
timestampedWindowService.getAll("STRING_AVRO_TIMESTAMPED_STORE", Instant.EPOCH, Instant.now());
assertEquals("user", stateQueryData.get(0).getKey());
assertEquals(1L, ((Map<?, ?>) stateQueryData.get(0).getValue()).get("id"));
assertEquals("John", ((Map<?, ?>) stateQueryData.get(0).getValue()).get("firstName"));
assertEquals("Doe", ((Map<?, ?>) stateQueryData.get(0).getValue()).get("lastName"));
assertEquals("2000-01-01T01:00:00Z", ((Map<?, ?>) stateQueryData.get(0).getValue()).get("birthDate"));
assertNotNull(stateQueryData.get(0).getTimestamp());
}
/**
* Kafka Streams starter implementation for integration tests. The topology consumes events from multiple topics
* (string, Java, Avro) and stores them in dedicated stores so that they can be queried.
*/
@Slf4j
static class KafkaStreamsStarterStub extends KafkaStreamsStarter {
@Override
public void topology(StreamsBuilder streamsBuilder) {
streamsBuilder.stream("STRING_TOPIC", Consumed.with(Serdes.String(), Serdes.String()))
.process(new ProcessorSupplier<String, String, String, String>() {
@Override
public Set<StoreBuilder<?>> stores() {
StoreBuilder<TimestampedWindowStore<String, String>> stringStringWindowStoreBuilder =
Stores.timestampedWindowStoreBuilder(
Stores.persistentTimestampedWindowStore(
"STRING_STRING_TIMESTAMPED_STORE",
Duration.ofMinutes(5),
Duration.ofMinutes(1),
false),
Serdes.String(),
Serdes.String());
return Set.of(stringStringWindowStoreBuilder);
}
@Override
public Processor<String, String, String, String> get() {
return new Processor<>() {
private TimestampedWindowStore<String, String> stringStringWindowStore;
@Override
public void init(ProcessorContext<String, String> context) {
this.stringStringWindowStore =
context.getStateStore("STRING_STRING_TIMESTAMPED_STORE");
}
@Override
public void process(Record<String, String> message) {
stringStringWindowStore.put(
message.key(),
ValueAndTimestamp.make(message.value(), message.timestamp()),
message.timestamp());
}
};
}
});
streamsBuilder.stream(
"AVRO_TOPIC", Consumed.with(Serdes.String(), SerdesUtils.<KafkaUserStub>getValueSerdes()))
.process(new ProcessorSupplier<String, KafkaUserStub, String, KafkaUserStub>() {
@Override
public Set<StoreBuilder<?>> stores() {
StoreBuilder<TimestampedWindowStore<String, KafkaUserStub>> stringAvroWindowStoreBuilder =
Stores.timestampedWindowStoreBuilder(
Stores.persistentTimestampedWindowStore(
"STRING_AVRO_TIMESTAMPED_STORE",
Duration.ofMinutes(5),
Duration.ofMinutes(1),
false),
Serdes.String(),
SerdesUtils.getValueSerdes());
StoreBuilder<KeyValueStore<String, KafkaUserStub>> stringAvroKeyValueStoreBuilder =
Stores.keyValueStoreBuilder(
Stores.persistentKeyValueStore("STRING_AVRO_KV_STORE"),
Serdes.String(),
SerdesUtils.getValueSerdes());
return Set.of(stringAvroWindowStoreBuilder, stringAvroKeyValueStoreBuilder);
}
@Override
public Processor<String, KafkaUserStub, String, KafkaUserStub> get() {
return new Processor<>() {
private TimestampedWindowStore<String, KafkaUserStub> stringAvroWindowStore;
private KeyValueStore<String, KafkaUserStub> stringAvroKeyValueStore;
@Override
public void init(ProcessorContext<String, KafkaUserStub> context) {
this.stringAvroWindowStore = context.getStateStore("STRING_AVRO_TIMESTAMPED_STORE");
this.stringAvroKeyValueStore = context.getStateStore("STRING_AVRO_KV_STORE");
}
@Override
public void process(Record<String, KafkaUserStub> message) {
stringAvroWindowStore.put(
message.key(),
ValueAndTimestamp.make(message.value(), message.timestamp()),
message.timestamp());
stringAvroKeyValueStore.put(message.key(), message.value());
}
};
}
});
}
@Override
public String dlqTopic() {
return "DLQ_TOPIC";
}
@Override
public void onStart(KafkaStreams kafkaStreams) {
kafkaStreams.cleanUp();
}
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/integration/interactivequeries/keyvalue/TimestampedKeyValueIntegrationTest.java | kstreamplify-core/src/test/java/com/michelin/kstreamplify/integration/interactivequeries/keyvalue/TimestampedKeyValueIntegrationTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.integration.interactivequeries.keyvalue;
import static com.michelin.kstreamplify.property.PropertiesUtils.KAFKA_PROPERTIES_PREFIX;
import static com.michelin.kstreamplify.property.PropertiesUtils.PROPERTY_SEPARATOR;
import static io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG;
import static org.apache.kafka.clients.producer.ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG;
import static org.apache.kafka.clients.producer.ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG;
import static org.apache.kafka.streams.StreamsConfig.APPLICATION_ID_CONFIG;
import static org.apache.kafka.streams.StreamsConfig.BOOTSTRAP_SERVERS_CONFIG;
import static org.apache.kafka.streams.StreamsConfig.STATE_DIR_CONFIG;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import com.fasterxml.jackson.core.type.TypeReference;
import com.michelin.kstreamplify.avro.KafkaUserStub;
import com.michelin.kstreamplify.initializer.KafkaStreamsStarter;
import com.michelin.kstreamplify.integration.container.KafkaIntegrationTest;
import com.michelin.kstreamplify.serde.SerdesUtils;
import com.michelin.kstreamplify.service.interactivequeries.keyvalue.TimestampedKeyValueStoreService;
import com.michelin.kstreamplify.store.StateStoreRecord;
import io.confluent.kafka.serializers.KafkaAvroSerializer;
import java.io.IOException;
import java.net.URI;
import java.net.http.HttpRequest;
import java.net.http.HttpResponse;
import java.time.Duration;
import java.time.Instant;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.common.serialization.StringSerializer;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.kstream.Consumed;
import org.apache.kafka.streams.processor.api.Processor;
import org.apache.kafka.streams.processor.api.ProcessorContext;
import org.apache.kafka.streams.processor.api.ProcessorSupplier;
import org.apache.kafka.streams.processor.api.Record;
import org.apache.kafka.streams.state.StoreBuilder;
import org.apache.kafka.streams.state.Stores;
import org.apache.kafka.streams.state.TimestampedKeyValueStore;
import org.apache.kafka.streams.state.ValueAndTimestamp;
import org.apache.kafka.streams.state.WindowStore;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.CsvSource;
import org.testcontainers.junit.jupiter.Testcontainers;
@Slf4j
@Testcontainers
class TimestampedKeyValueIntegrationTest extends KafkaIntegrationTest {
private final TimestampedKeyValueStoreService timestampedKeyValueService =
new TimestampedKeyValueStoreService(initializer);
@BeforeAll
static void globalSetUp() throws ExecutionException, InterruptedException {
createTopics(
broker.getBootstrapServers(),
new TopicPartition("STRING_TOPIC", 3),
new TopicPartition("AVRO_TOPIC", 2));
try (KafkaProducer<String, String> stringKafkaProducer = new KafkaProducer<>(Map.of(
BOOTSTRAP_SERVERS_CONFIG,
broker.getBootstrapServers(),
KEY_SERIALIZER_CLASS_CONFIG,
StringSerializer.class.getName(),
VALUE_SERIALIZER_CLASS_CONFIG,
StringSerializer.class.getName()))) {
ProducerRecord<String, String> message = new ProducerRecord<>("STRING_TOPIC", "user", "Doe");
stringKafkaProducer.send(message).get();
}
try (KafkaProducer<String, KafkaUserStub> avroKafkaProducer = new KafkaProducer<>(Map.of(
BOOTSTRAP_SERVERS_CONFIG,
broker.getBootstrapServers(),
KEY_SERIALIZER_CLASS_CONFIG,
StringSerializer.class.getName(),
VALUE_SERIALIZER_CLASS_CONFIG,
KafkaAvroSerializer.class.getName(),
SCHEMA_REGISTRY_URL_CONFIG,
"http://" + schemaRegistry.getHost() + ":" + schemaRegistry.getFirstMappedPort()))) {
KafkaUserStub kafkaUserStub = KafkaUserStub.newBuilder()
.setId(1L)
.setFirstName("John")
.setLastName("Doe")
.setBirthDate(Instant.parse("2000-01-01T01:00:00Z"))
.build();
ProducerRecord<String, KafkaUserStub> message = new ProducerRecord<>("AVRO_TOPIC", "user", kafkaUserStub);
avroKafkaProducer.send(message).get();
}
initializer = new KafkaStreamInitializerStub(
new KafkaStreamsStarterStub(),
8083,
Map.of(
KAFKA_PROPERTIES_PREFIX + PROPERTY_SEPARATOR + BOOTSTRAP_SERVERS_CONFIG,
broker.getBootstrapServers(),
KAFKA_PROPERTIES_PREFIX + PROPERTY_SEPARATOR + APPLICATION_ID_CONFIG,
"appTimestampedKeyValueInteractiveQueriesId",
KAFKA_PROPERTIES_PREFIX + PROPERTY_SEPARATOR + SCHEMA_REGISTRY_URL_CONFIG,
"http://" + schemaRegistry.getHost() + ":" + schemaRegistry.getFirstMappedPort(),
KAFKA_PROPERTIES_PREFIX + PROPERTY_SEPARATOR + STATE_DIR_CONFIG,
"/tmp/kstreamplify/kstreamplify-core-test/interactive-queries/timestamped-key-value"));
initializer.start();
}
@BeforeEach
void setUp() throws InterruptedException {
waitingForKafkaStreamsToStart();
waitingForLocalStoreToReachOffset(Map.of(
"STRING_STRING_TIMESTAMPED_STORE", Map.of(2, 1L),
"STRING_AVRO_TIMESTAMPED_STORE", Map.of(0, 1L),
"STRING_AVRO_WINDOW_STORE", Map.of(0, 1L)));
}
@ParameterizedTest
@CsvSource({
"http://localhost:8083/store/key-value/timestamped/WRONG_STORE/user,State store WRONG_STORE not found",
"http://localhost:8083/store/key-value/timestamped/STRING_STRING_TIMESTAMPED_STORE/wrongKey,Key wrongKey not found",
"http://localhost:8083/store/key-value/timestamped/WRONG_STORE,State store WRONG_STORE not found"
})
void shouldNotFoundWhenKeyOrStoreNotFound(String url, String message) throws IOException, InterruptedException {
HttpRequest request =
HttpRequest.newBuilder().uri(URI.create(url)).GET().build();
HttpResponse<String> response = httpClient.send(request, HttpResponse.BodyHandlers.ofString());
assertEquals(404, response.statusCode());
assertEquals(message, response.body());
}
@Test
void shouldGetErrorWhenQueryingWrongStoreType() throws IOException, InterruptedException {
HttpRequest request = HttpRequest.newBuilder()
.uri(URI.create("http://localhost:8083/store/key-value/timestamped/STRING_AVRO_WINDOW_STORE/user"))
.GET()
.build();
HttpResponse<String> response = httpClient.send(request, HttpResponse.BodyHandlers.ofString());
assertEquals(400, response.statusCode());
assertNotNull(response.body());
}
@Test
void shouldGetByKeyInStringStringStore() throws IOException, InterruptedException {
HttpRequest request = HttpRequest.newBuilder()
.uri(URI.create(
"http://localhost:8083/store/key-value/timestamped/STRING_STRING_TIMESTAMPED_STORE/user"))
.GET()
.build();
HttpResponse<String> response = httpClient.send(request, HttpResponse.BodyHandlers.ofString());
StateStoreRecord body = objectMapper.readValue(response.body(), StateStoreRecord.class);
assertEquals(200, response.statusCode());
assertEquals("user", body.getKey());
assertEquals("Doe", body.getValue());
assertNotNull(body.getTimestamp());
}
@Test
void shouldGetByKeyInStringAvroStore() throws IOException, InterruptedException {
HttpRequest request = HttpRequest.newBuilder()
.uri(URI.create("http://localhost:8083/store/key-value/timestamped/STRING_AVRO_TIMESTAMPED_STORE/user"))
.GET()
.build();
HttpResponse<String> response = httpClient.send(request, HttpResponse.BodyHandlers.ofString());
StateStoreRecord body = objectMapper.readValue(response.body(), StateStoreRecord.class);
assertEquals(200, response.statusCode());
assertEquals("user", body.getKey());
assertEquals(1, ((Map<?, ?>) body.getValue()).get("id"));
assertEquals("John", ((Map<?, ?>) body.getValue()).get("firstName"));
assertEquals("Doe", ((Map<?, ?>) body.getValue()).get("lastName"));
assertEquals("2000-01-01T01:00:00Z", ((Map<?, ?>) body.getValue()).get("birthDate"));
assertNotNull(body.getTimestamp());
}
@ParameterizedTest
@CsvSource({
"http://localhost:8083/store/key-value/timestamped/STRING_STRING_TIMESTAMPED_STORE",
"http://localhost:8083/store/key-value/timestamped/local/STRING_STRING_TIMESTAMPED_STORE"
})
void shouldGetAllInStringStringStore(String url) throws IOException, InterruptedException {
HttpRequest request =
HttpRequest.newBuilder().uri(URI.create(url)).GET().build();
HttpResponse<String> response = httpClient.send(request, HttpResponse.BodyHandlers.ofString());
List<StateStoreRecord> body = objectMapper.readValue(response.body(), new TypeReference<>() {});
assertEquals(200, response.statusCode());
assertEquals("user", body.get(0).getKey());
assertEquals("Doe", body.get(0).getValue());
assertNotNull(body.get(0).getTimestamp());
}
@ParameterizedTest
@CsvSource({
"http://localhost:8083/store/key-value/timestamped/STRING_AVRO_TIMESTAMPED_STORE",
"http://localhost:8083/store/key-value/timestamped/local/STRING_AVRO_TIMESTAMPED_STORE"
})
void shouldGetFromStringAvroStores(String url) throws IOException, InterruptedException {
HttpRequest request =
HttpRequest.newBuilder().uri(URI.create(url)).GET().build();
HttpResponse<String> response = httpClient.send(request, HttpResponse.BodyHandlers.ofString());
List<StateStoreRecord> body = objectMapper.readValue(response.body(), new TypeReference<>() {});
assertEquals(200, response.statusCode());
assertEquals("user", body.get(0).getKey());
assertEquals(1, ((Map<?, ?>) body.get(0).getValue()).get("id"));
assertEquals("John", ((Map<?, ?>) body.get(0).getValue()).get("firstName"));
assertEquals("Doe", ((Map<?, ?>) body.get(0).getValue()).get("lastName"));
assertEquals("2000-01-01T01:00:00Z", ((Map<?, ?>) body.get(0).getValue()).get("birthDate"));
assertNotNull(body.get(0).getTimestamp());
}
@Test
void shouldGetByKeyInStringAvroStoreFromService() {
StateStoreRecord stateStoreRecord =
timestampedKeyValueService.getByKey("STRING_AVRO_TIMESTAMPED_STORE", "user");
assertEquals("user", stateStoreRecord.getKey());
assertEquals(1L, ((Map<?, ?>) stateStoreRecord.getValue()).get("id"));
assertEquals("John", ((Map<?, ?>) stateStoreRecord.getValue()).get("firstName"));
assertEquals("Doe", ((Map<?, ?>) stateStoreRecord.getValue()).get("lastName"));
assertEquals("2000-01-01T01:00:00Z", ((Map<?, ?>) stateStoreRecord.getValue()).get("birthDate"));
assertNotNull(stateStoreRecord.getTimestamp());
}
@Test
void shouldGetAllInStringAvroStoreFromService() {
List<StateStoreRecord> stateQueryData = timestampedKeyValueService.getAll("STRING_AVRO_TIMESTAMPED_STORE");
assertEquals("user", stateQueryData.get(0).getKey());
assertEquals(1L, ((Map<?, ?>) stateQueryData.get(0).getValue()).get("id"));
assertEquals("John", ((Map<?, ?>) stateQueryData.get(0).getValue()).get("firstName"));
assertEquals("Doe", ((Map<?, ?>) stateQueryData.get(0).getValue()).get("lastName"));
assertEquals("2000-01-01T01:00:00Z", ((Map<?, ?>) stateQueryData.get(0).getValue()).get("birthDate"));
assertNotNull(stateQueryData.get(0).getTimestamp());
}
/**
* Kafka Streams starter implementation for integration tests. The topology consumes events from multiple topics
* (string, Java, Avro) and stores them in dedicated stores so that they can be queried.
*/
@Slf4j
static class KafkaStreamsStarterStub extends KafkaStreamsStarter {
@Override
public void topology(StreamsBuilder streamsBuilder) {
streamsBuilder.stream("STRING_TOPIC", Consumed.with(Serdes.String(), Serdes.String()))
.process(new ProcessorSupplier<String, String, String, String>() {
@Override
public Set<StoreBuilder<?>> stores() {
StoreBuilder<TimestampedKeyValueStore<String, String>> stringStringKeyValueStoreBuilder =
Stores.timestampedKeyValueStoreBuilder(
Stores.persistentTimestampedKeyValueStore(
"STRING_STRING_TIMESTAMPED_STORE"),
Serdes.String(),
Serdes.String());
return Set.of(stringStringKeyValueStoreBuilder);
}
@Override
public Processor<String, String, String, String> get() {
return new Processor<>() {
private TimestampedKeyValueStore<String, String> stringStringKeyValueStore;
@Override
public void init(ProcessorContext<String, String> context) {
this.stringStringKeyValueStore =
context.getStateStore("STRING_STRING_TIMESTAMPED_STORE");
}
@Override
public void process(Record<String, String> message) {
stringStringKeyValueStore.put(
message.key(),
ValueAndTimestamp.make(message.value(), message.timestamp()));
}
};
}
});
streamsBuilder.stream(
"AVRO_TOPIC", Consumed.with(Serdes.String(), SerdesUtils.<KafkaUserStub>getValueSerdes()))
.process(new ProcessorSupplier<String, KafkaUserStub, String, KafkaUserStub>() {
@Override
public Set<StoreBuilder<?>> stores() {
StoreBuilder<TimestampedKeyValueStore<String, KafkaUserStub>>
stringAvroKeyValueStoreBuilder = Stores.timestampedKeyValueStoreBuilder(
Stores.persistentTimestampedKeyValueStore("STRING_AVRO_TIMESTAMPED_STORE"),
Serdes.String(),
SerdesUtils.getValueSerdes());
StoreBuilder<WindowStore<String, KafkaUserStub>> stringAvroWindowStoreBuilder =
Stores.windowStoreBuilder(
Stores.persistentWindowStore(
"STRING_AVRO_WINDOW_STORE",
Duration.ofMinutes(5),
Duration.ofMinutes(1),
false),
Serdes.String(),
SerdesUtils.getValueSerdes());
return Set.of(stringAvroKeyValueStoreBuilder, stringAvroWindowStoreBuilder);
}
@Override
public Processor<String, KafkaUserStub, String, KafkaUserStub> get() {
return new Processor<>() {
private TimestampedKeyValueStore<String, KafkaUserStub> stringAvroKeyValueStore;
private WindowStore<String, KafkaUserStub> stringAvroWindowStore;
@Override
public void init(ProcessorContext<String, KafkaUserStub> context) {
this.stringAvroKeyValueStore =
context.getStateStore("STRING_AVRO_TIMESTAMPED_STORE");
this.stringAvroWindowStore = context.getStateStore("STRING_AVRO_WINDOW_STORE");
}
@Override
public void process(Record<String, KafkaUserStub> message) {
stringAvroKeyValueStore.put(
message.key(),
ValueAndTimestamp.make(message.value(), message.timestamp()));
stringAvroWindowStore.put(message.key(), message.value(), message.timestamp());
}
};
}
});
}
@Override
public String dlqTopic() {
return "DLQ_TOPIC";
}
@Override
public void onStart(KafkaStreams kafkaStreams) {
kafkaStreams.cleanUp();
}
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/integration/interactivequeries/keyvalue/KeyValueIntegrationTest.java | kstreamplify-core/src/test/java/com/michelin/kstreamplify/integration/interactivequeries/keyvalue/KeyValueIntegrationTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.integration.interactivequeries.keyvalue;
import static com.michelin.kstreamplify.property.PropertiesUtils.KAFKA_PROPERTIES_PREFIX;
import static com.michelin.kstreamplify.property.PropertiesUtils.PROPERTY_SEPARATOR;
import static io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG;
import static org.apache.kafka.clients.producer.ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG;
import static org.apache.kafka.clients.producer.ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG;
import static org.apache.kafka.streams.StreamsConfig.APPLICATION_ID_CONFIG;
import static org.apache.kafka.streams.StreamsConfig.BOOTSTRAP_SERVERS_CONFIG;
import static org.apache.kafka.streams.StreamsConfig.STATE_DIR_CONFIG;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import com.fasterxml.jackson.core.type.TypeReference;
import com.michelin.kstreamplify.avro.KafkaUserStub;
import com.michelin.kstreamplify.initializer.KafkaStreamsStarter;
import com.michelin.kstreamplify.integration.container.KafkaIntegrationTest;
import com.michelin.kstreamplify.serde.SerdesUtils;
import com.michelin.kstreamplify.service.interactivequeries.keyvalue.KeyValueStoreService;
import com.michelin.kstreamplify.store.StateStoreRecord;
import com.michelin.kstreamplify.store.StreamsMetadata;
import io.confluent.kafka.serializers.KafkaAvroSerializer;
import java.io.IOException;
import java.net.URI;
import java.net.http.HttpRequest;
import java.net.http.HttpResponse;
import java.time.Duration;
import java.time.Instant;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.common.serialization.StringSerializer;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.kstream.Consumed;
import org.apache.kafka.streams.processor.api.Processor;
import org.apache.kafka.streams.processor.api.ProcessorContext;
import org.apache.kafka.streams.processor.api.ProcessorSupplier;
import org.apache.kafka.streams.processor.api.Record;
import org.apache.kafka.streams.state.KeyValueStore;
import org.apache.kafka.streams.state.StoreBuilder;
import org.apache.kafka.streams.state.Stores;
import org.apache.kafka.streams.state.WindowStore;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.CsvSource;
import org.testcontainers.junit.jupiter.Testcontainers;
@Slf4j
@Testcontainers
class KeyValueIntegrationTest extends KafkaIntegrationTest {
private final KeyValueStoreService keyValueService = new KeyValueStoreService(initializer);
@BeforeAll
static void globalSetUp() throws ExecutionException, InterruptedException {
createTopics(
broker.getBootstrapServers(),
new TopicPartition("STRING_TOPIC", 3),
new TopicPartition("AVRO_TOPIC", 2));
try (KafkaProducer<String, String> stringKafkaProducer = new KafkaProducer<>(Map.of(
BOOTSTRAP_SERVERS_CONFIG,
broker.getBootstrapServers(),
KEY_SERIALIZER_CLASS_CONFIG,
StringSerializer.class.getName(),
VALUE_SERIALIZER_CLASS_CONFIG,
StringSerializer.class.getName()))) {
ProducerRecord<String, String> message = new ProducerRecord<>("STRING_TOPIC", "user", "Doe");
stringKafkaProducer.send(message).get();
}
try (KafkaProducer<String, KafkaUserStub> avroKafkaProducer = new KafkaProducer<>(Map.of(
BOOTSTRAP_SERVERS_CONFIG,
broker.getBootstrapServers(),
KEY_SERIALIZER_CLASS_CONFIG,
StringSerializer.class.getName(),
VALUE_SERIALIZER_CLASS_CONFIG,
KafkaAvroSerializer.class.getName(),
SCHEMA_REGISTRY_URL_CONFIG,
"http://" + schemaRegistry.getHost() + ":" + schemaRegistry.getFirstMappedPort()))) {
KafkaUserStub kafkaUserStub = KafkaUserStub.newBuilder()
.setId(1L)
.setFirstName("John")
.setLastName("Doe")
.setBirthDate(Instant.parse("2000-01-01T01:00:00Z"))
.build();
ProducerRecord<String, KafkaUserStub> message = new ProducerRecord<>("AVRO_TOPIC", "user", kafkaUserStub);
avroKafkaProducer.send(message).get();
}
initializer = new KafkaStreamInitializerStub(
new KafkaStreamsStarterStub(),
8082,
Map.of(
KAFKA_PROPERTIES_PREFIX + PROPERTY_SEPARATOR + BOOTSTRAP_SERVERS_CONFIG,
broker.getBootstrapServers(),
KAFKA_PROPERTIES_PREFIX + PROPERTY_SEPARATOR + APPLICATION_ID_CONFIG,
"appKeyValueInteractiveQueriesId",
KAFKA_PROPERTIES_PREFIX + PROPERTY_SEPARATOR + SCHEMA_REGISTRY_URL_CONFIG,
"http://" + schemaRegistry.getHost() + ":" + schemaRegistry.getFirstMappedPort(),
KAFKA_PROPERTIES_PREFIX + PROPERTY_SEPARATOR + STATE_DIR_CONFIG,
"/tmp/kstreamplify/kstreamplify-core-test/interactive-queries/key-value"));
initializer.start();
}
@BeforeEach
void setUp() throws InterruptedException {
waitingForKafkaStreamsToStart();
waitingForLocalStoreToReachOffset(Map.of(
"STRING_STRING_KV_STORE", Map.of(2, 1L),
"STRING_AVRO_KV_STORE", Map.of(0, 1L),
"STRING_AVRO_WINDOW_STORE", Map.of(0, 1L)));
}
@Test
void shouldGetStoresAndStoreMetadata() throws IOException, InterruptedException {
// Get stores
HttpRequest storesRequest = HttpRequest.newBuilder()
.uri(URI.create("http://localhost:8082/store"))
.GET()
.build();
HttpResponse<String> storesResponse = httpClient.send(storesRequest, HttpResponse.BodyHandlers.ofString());
List<String> stores = objectMapper.readValue(storesResponse.body(), new TypeReference<>() {});
assertEquals(200, storesResponse.statusCode());
assertTrue(stores.containsAll(
List.of("STRING_STRING_KV_STORE", "STRING_AVRO_KV_STORE", "STRING_AVRO_WINDOW_STORE")));
// Get store metadata
HttpRequest streamsMetadataRequest = HttpRequest.newBuilder()
.uri(URI.create("http://localhost:8082/store/metadata/STRING_STRING_KV_STORE"))
.GET()
.build();
HttpResponse<String> streamsMetadataResponse =
httpClient.send(streamsMetadataRequest, HttpResponse.BodyHandlers.ofString());
List<StreamsMetadata> streamsMetadata =
objectMapper.readValue(streamsMetadataResponse.body(), new TypeReference<>() {});
assertEquals(200, streamsMetadataResponse.statusCode());
assertEquals(
Set.of("STRING_STRING_KV_STORE", "STRING_AVRO_KV_STORE", "STRING_AVRO_WINDOW_STORE"),
streamsMetadata.get(0).getStateStoreNames());
assertEquals("localhost", streamsMetadata.get(0).getHostInfo().host());
assertEquals(8082, streamsMetadata.get(0).getHostInfo().port());
assertEquals(
Set.of("AVRO_TOPIC-0", "AVRO_TOPIC-1", "STRING_TOPIC-0", "STRING_TOPIC-1", "STRING_TOPIC-2"),
streamsMetadata.get(0).getTopicPartitions());
}
@ParameterizedTest
@CsvSource({
"http://localhost:8082/store/key-value/WRONG_STORE/user,State store WRONG_STORE not found",
"http://localhost:8082/store/key-value/STRING_STRING_KV_STORE/wrongKey,Key wrongKey not found",
"http://localhost:8082/store/key-value/WRONG_STORE,State store WRONG_STORE not found"
})
void shouldNotFoundWhenKeyOrStoreNotFound(String url, String message) throws IOException, InterruptedException {
HttpRequest request =
HttpRequest.newBuilder().uri(URI.create(url)).GET().build();
HttpResponse<String> response = httpClient.send(request, HttpResponse.BodyHandlers.ofString());
assertEquals(404, response.statusCode());
assertEquals(message, response.body());
}
@Test
void shouldGetErrorWhenQueryingWrongStoreType() throws IOException, InterruptedException {
HttpRequest request = HttpRequest.newBuilder()
.uri(URI.create("http://localhost:8082/store/key-value/STRING_AVRO_WINDOW_STORE/user"))
.GET()
.build();
HttpResponse<String> response = httpClient.send(request, HttpResponse.BodyHandlers.ofString());
assertEquals(400, response.statusCode());
assertNotNull(response.body());
}
@Test
void shouldGetByKeyInStringStringStore() throws IOException, InterruptedException {
HttpRequest request = HttpRequest.newBuilder()
.uri(URI.create("http://localhost:8082/store/key-value/STRING_STRING_KV_STORE/user"))
.GET()
.build();
HttpResponse<String> response = httpClient.send(request, HttpResponse.BodyHandlers.ofString());
StateStoreRecord body = objectMapper.readValue(response.body(), StateStoreRecord.class);
assertEquals(200, response.statusCode());
assertEquals("user", body.getKey());
assertEquals("Doe", body.getValue());
assertNull(body.getTimestamp());
}
@Test
void shouldGetByKeyInStringAvroStore() throws IOException, InterruptedException {
HttpRequest request = HttpRequest.newBuilder()
.uri(URI.create("http://localhost:8082/store/key-value/STRING_AVRO_KV_STORE/user"))
.GET()
.build();
HttpResponse<String> response = httpClient.send(request, HttpResponse.BodyHandlers.ofString());
StateStoreRecord body = objectMapper.readValue(response.body(), StateStoreRecord.class);
assertEquals(200, response.statusCode());
assertEquals("user", body.getKey());
assertEquals(1, ((Map<?, ?>) body.getValue()).get("id"));
assertEquals("John", ((Map<?, ?>) body.getValue()).get("firstName"));
assertEquals("Doe", ((Map<?, ?>) body.getValue()).get("lastName"));
assertEquals("2000-01-01T01:00:00Z", ((Map<?, ?>) body.getValue()).get("birthDate"));
assertNull(body.getTimestamp());
}
@ParameterizedTest
@CsvSource({
"http://localhost:8082/store/key-value/STRING_STRING_KV_STORE",
"http://localhost:8082/store/key-value/local/STRING_STRING_KV_STORE"
})
void shouldGetAllInStringStringStore(String url) throws IOException, InterruptedException {
HttpRequest request =
HttpRequest.newBuilder().uri(URI.create(url)).GET().build();
HttpResponse<String> response = httpClient.send(request, HttpResponse.BodyHandlers.ofString());
List<StateStoreRecord> body = objectMapper.readValue(response.body(), new TypeReference<>() {});
assertEquals(200, response.statusCode());
assertEquals("user", body.get(0).getKey());
assertEquals("Doe", body.get(0).getValue());
assertNull(body.get(0).getTimestamp());
}
@ParameterizedTest
@CsvSource({
"http://localhost:8082/store/key-value/STRING_AVRO_KV_STORE",
"http://localhost:8082/store/key-value/local/STRING_AVRO_KV_STORE"
})
void shouldGetAllFromStringAvroStores(String url) throws IOException, InterruptedException {
HttpRequest request =
HttpRequest.newBuilder().uri(URI.create(url)).GET().build();
HttpResponse<String> response = httpClient.send(request, HttpResponse.BodyHandlers.ofString());
List<StateStoreRecord> body = objectMapper.readValue(response.body(), new TypeReference<>() {});
assertEquals(200, response.statusCode());
assertEquals("user", body.get(0).getKey());
assertEquals(1, ((Map<?, ?>) body.get(0).getValue()).get("id"));
assertEquals("John", ((Map<?, ?>) body.get(0).getValue()).get("firstName"));
assertEquals("Doe", ((Map<?, ?>) body.get(0).getValue()).get("lastName"));
assertEquals("2000-01-01T01:00:00Z", ((Map<?, ?>) body.get(0).getValue()).get("birthDate"));
assertNull(body.get(0).getTimestamp());
}
@Test
void shouldGetByKeyInStringAvroStoreFromService() {
StateStoreRecord stateStoreRecord = keyValueService.getByKey("STRING_AVRO_KV_STORE", "user");
assertEquals("user", stateStoreRecord.getKey());
assertEquals(1L, ((Map<?, ?>) stateStoreRecord.getValue()).get("id"));
assertEquals("John", ((Map<?, ?>) stateStoreRecord.getValue()).get("firstName"));
assertEquals("Doe", ((Map<?, ?>) stateStoreRecord.getValue()).get("lastName"));
assertEquals("2000-01-01T01:00:00Z", ((Map<?, ?>) stateStoreRecord.getValue()).get("birthDate"));
assertNull(stateStoreRecord.getTimestamp());
}
@Test
void shouldGetAllInStringAvroStoreFromService() {
List<StateStoreRecord> stateQueryData = keyValueService.getAll("STRING_AVRO_KV_STORE");
assertEquals("user", stateQueryData.get(0).getKey());
assertEquals(1L, ((Map<?, ?>) stateQueryData.get(0).getValue()).get("id"));
assertEquals("John", ((Map<?, ?>) stateQueryData.get(0).getValue()).get("firstName"));
assertEquals("Doe", ((Map<?, ?>) stateQueryData.get(0).getValue()).get("lastName"));
assertEquals("2000-01-01T01:00:00Z", ((Map<?, ?>) stateQueryData.get(0).getValue()).get("birthDate"));
assertNull(stateQueryData.get(0).getTimestamp());
}
/**
* Kafka Streams starter implementation for integration tests. The topology consumes events from multiple topics
* (string, Java, Avro) and stores them in dedicated stores so that they can be queried.
*/
@Slf4j
static class KafkaStreamsStarterStub extends KafkaStreamsStarter {
@Override
public void topology(StreamsBuilder streamsBuilder) {
streamsBuilder.stream("STRING_TOPIC", Consumed.with(Serdes.String(), Serdes.String()))
.process(new ProcessorSupplier<String, String, String, String>() {
@Override
public Set<StoreBuilder<?>> stores() {
StoreBuilder<KeyValueStore<String, String>> stringStringKeyValueStoreBuilder =
Stores.keyValueStoreBuilder(
Stores.persistentKeyValueStore("STRING_STRING_KV_STORE"),
Serdes.String(),
Serdes.String());
return Set.of(stringStringKeyValueStoreBuilder);
}
@Override
public Processor<String, String, String, String> get() {
return new Processor<>() {
private KeyValueStore<String, String> stringStringKeyValueStore;
@Override
public void init(ProcessorContext<String, String> context) {
this.stringStringKeyValueStore = context.getStateStore("STRING_STRING_KV_STORE");
}
@Override
public void process(Record<String, String> message) {
stringStringKeyValueStore.put(message.key(), message.value());
}
};
}
});
streamsBuilder.stream(
"AVRO_TOPIC", Consumed.with(Serdes.String(), SerdesUtils.<KafkaUserStub>getValueSerdes()))
.process(new ProcessorSupplier<String, KafkaUserStub, String, KafkaUserStub>() {
@Override
public Set<StoreBuilder<?>> stores() {
StoreBuilder<KeyValueStore<String, KafkaUserStub>> stringAvroKeyValueStoreBuilder =
Stores.keyValueStoreBuilder(
Stores.persistentKeyValueStore("STRING_AVRO_KV_STORE"),
Serdes.String(),
SerdesUtils.getValueSerdes());
StoreBuilder<WindowStore<String, KafkaUserStub>> stringAvroWindowStoreBuilder =
Stores.windowStoreBuilder(
Stores.persistentWindowStore(
"STRING_AVRO_WINDOW_STORE",
Duration.ofMinutes(5),
Duration.ofMinutes(1),
false),
Serdes.String(),
SerdesUtils.getValueSerdes());
return Set.of(stringAvroKeyValueStoreBuilder, stringAvroWindowStoreBuilder);
}
@Override
public Processor<String, KafkaUserStub, String, KafkaUserStub> get() {
return new Processor<>() {
private KeyValueStore<String, KafkaUserStub> stringAvroKeyValueStore;
private WindowStore<String, KafkaUserStub> stringAvroWindowStore;
@Override
public void init(ProcessorContext<String, KafkaUserStub> context) {
this.stringAvroKeyValueStore = context.getStateStore("STRING_AVRO_KV_STORE");
this.stringAvroWindowStore = context.getStateStore("STRING_AVRO_WINDOW_STORE");
}
@Override
public void process(Record<String, KafkaUserStub> message) {
stringAvroKeyValueStore.put(message.key(), message.value());
stringAvroWindowStore.put(message.key(), message.value(), message.timestamp());
}
};
}
});
}
@Override
public String dlqTopic() {
return "DLQ_TOPIC";
}
@Override
public void onStart(KafkaStreams kafkaStreams) {
kafkaStreams.cleanUp();
}
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/error/ProcessingErrorTest.java | kstreamplify-core/src/test/java/com/michelin/kstreamplify/error/ProcessingErrorTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.error;
import static org.junit.jupiter.api.Assertions.assertEquals;
import com.michelin.kstreamplify.avro.KafkaError;
import org.junit.jupiter.api.Test;
class ProcessingErrorTest {
@Test
void shouldCreateProcessingErrorFromStringRecord() {
String contextMessage = "Some context message";
Exception exception = new Exception("Test Exception");
String kafkaRecord = "Sample Kafka Record";
ProcessingError<String> processingError = new ProcessingError<>(exception, contextMessage, kafkaRecord);
// Assert
assertEquals(exception, processingError.getException());
assertEquals(contextMessage, processingError.getContextMessage());
assertEquals(kafkaRecord, processingError.getKafkaRecord());
}
@Test
void shouldCreateProcessingErrorWithNoContextMessage() {
Exception exception = new Exception("Test Exception");
String kafkaRecord = "Sample Kafka Record";
ProcessingError<String> processingError = new ProcessingError<>(exception, kafkaRecord);
// Assert
assertEquals(exception, processingError.getException());
assertEquals("No context message", processingError.getContextMessage());
assertEquals(kafkaRecord, processingError.getKafkaRecord());
}
@Test
void shouldCreateProcessingErrorFromAvroRecord() {
String contextMessage = "Some context message";
Exception exception = new Exception("Test Exception");
KafkaError kafkaRecord = KafkaError.newBuilder()
.setCause("Cause")
.setOffset(1L)
.setPartition(1)
.setTopic("Topic")
.setValue("Value")
.setApplicationId("ApplicationId")
.build();
ProcessingError<KafkaError> processingError = new ProcessingError<>(exception, contextMessage, kafkaRecord);
assertEquals(exception, processingError.getException());
assertEquals(contextMessage, processingError.getContextMessage());
assertEquals("""
{
"partition": 1,
"offset": 1,
"cause": "Cause",
"topic": "Topic",
"applicationId": "ApplicationId",
"value": "Value"
}""", processingError.getKafkaRecord());
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/error/ProcessingResultTest.java | kstreamplify-core/src/test/java/com/michelin/kstreamplify/error/ProcessingResultTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.error;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.nio.charset.StandardCharsets;
import java.util.Collections;
import org.apache.kafka.common.header.Headers;
import org.apache.kafka.common.header.internals.RecordHeader;
import org.apache.kafka.common.header.internals.RecordHeaders;
import org.apache.kafka.streams.processor.api.Record;
import org.junit.jupiter.api.Test;
class ProcessingResultTest {
@Test
void shouldCreateProcessingResultSuccess() {
String successValue = "Success";
ProcessingResult<String, Integer> result = ProcessingResult.success(successValue);
assertTrue(result.isValid());
assertEquals(successValue, result.getValue());
assertNull(result.getError());
}
@Test
void shouldCreateWrappedProcessingResultFromRecord() {
Record<String, String> message = new Record<>("key", "value", System.currentTimeMillis());
Record<String, ProcessingResult<String, Integer>> wrappedRecord = ProcessingResult.wrapRecordSuccess(message);
assertTrue(wrappedRecord.value().isValid());
assertEquals(message.key(), wrappedRecord.key());
assertNotNull(wrappedRecord.value());
assertEquals(message.value(), wrappedRecord.value().getValue());
assertEquals(message.timestamp(), wrappedRecord.timestamp());
assertNull(wrappedRecord.value().getError());
}
@Test
void shouldCreateWrappedProcessingResultFromParameters() {
String key = "key";
String value = "value";
long timestamp = System.currentTimeMillis();
Record<String, ProcessingResult<String, Integer>> wrappedRecord =
ProcessingResult.wrapRecordSuccess(key, value, timestamp);
assertTrue(wrappedRecord.value().isValid());
assertEquals(key, wrappedRecord.key());
assertNotNull(wrappedRecord.value());
assertEquals(value, wrappedRecord.value().getValue());
assertEquals(timestamp, wrappedRecord.timestamp());
assertNull(wrappedRecord.value().getError());
}
@Test
void shouldWrapRecordSuccessWithHeadersFromRecord() {
String headerKey = "header_key";
String headerValue = "header_value";
Headers headers = new RecordHeaders(
Collections.singletonList(new RecordHeader(headerKey, headerValue.getBytes(StandardCharsets.UTF_8))));
Record<String, String> message = new Record<>("key", "value", System.currentTimeMillis(), headers);
Record<String, ProcessingResult<String, Integer>> wrappedRecord =
ProcessingResult.wrapRecordSuccessWithHeaders(message);
assertTrue(wrappedRecord.value().isValid());
assertEquals(message.key(), wrappedRecord.key());
assertNotNull(wrappedRecord.value());
assertEquals(message.value(), wrappedRecord.value().getValue());
assertEquals(message.timestamp(), wrappedRecord.timestamp());
assertEquals(1, wrappedRecord.headers().toArray().length);
assertEquals(
message.headers().lastHeader(headerKey).value(),
wrappedRecord.headers().lastHeader(headerKey).value());
assertNull(wrappedRecord.value().getError());
}
@Test
void shouldWrapRecordSuccessWithHeadersFromParameters() {
String key = "key";
String value = "value";
String headerKey = "header_key";
String headerValue = "header_value";
long timestamp = System.currentTimeMillis();
Headers headers = new RecordHeaders(
Collections.singletonList(new RecordHeader(headerKey, headerValue.getBytes(StandardCharsets.UTF_8))));
Record<String, ProcessingResult<String, Integer>> wrappedRecord =
ProcessingResult.wrapRecordSuccess(key, value, timestamp, headers);
assertTrue(wrappedRecord.value().isValid());
assertEquals(key, wrappedRecord.key());
assertNotNull(wrappedRecord.value());
assertEquals(value, wrappedRecord.value().getValue());
assertEquals(timestamp, wrappedRecord.timestamp());
assertEquals(1, wrappedRecord.headers().toArray().length);
assertEquals(
headers.lastHeader(headerKey).value(),
wrappedRecord.headers().lastHeader(headerKey).value());
assertNull(wrappedRecord.value().getError());
}
@Test
void shouldCreateFailedProcessingResult() {
String failedRecordValue = "value";
Exception exception = new Exception("Exception");
ProcessingResult<String, String> result = ProcessingResult.fail(exception, failedRecordValue);
assertFalse(result.isValid());
assertNotNull(result.getError());
assertEquals(exception, result.getError().getException());
assertEquals(failedRecordValue, result.getError().getKafkaRecord());
assertEquals("No context message", result.getError().getContextMessage());
assertNull(result.getValue());
}
@Test
void shouldCreateFailedProcessingResultWithContextMessage() {
String failedRecordValue = "value";
Exception exception = new Exception("Exception");
String contextMessage = "Context message";
ProcessingResult<String, String> result = ProcessingResult.fail(exception, failedRecordValue, contextMessage);
assertFalse(result.isValid());
assertNotNull(result.getError());
assertEquals(exception, result.getError().getException());
assertEquals(failedRecordValue, result.getError().getKafkaRecord());
assertEquals(contextMessage, result.getError().getContextMessage());
assertNull(result.getValue());
}
@Test
void shouldCreateWrappedFailedProcessingResultFromRecord() {
Exception exception = new Exception("Exception");
Record<String, String> message = new Record<>("key", "value", System.currentTimeMillis());
Record<String, ProcessingResult<String, String>> wrappedRecord =
ProcessingResult.wrapRecordFailure(exception, message);
assertEquals(message.key(), wrappedRecord.key());
assertNotNull(wrappedRecord.value());
assertFalse(wrappedRecord.value().isValid());
assertNull(wrappedRecord.value().getValue());
assertNotNull(wrappedRecord.value().getError());
assertEquals(exception, wrappedRecord.value().getError().getException());
assertEquals(message.value(), wrappedRecord.value().getError().getKafkaRecord());
assertEquals("No context message", wrappedRecord.value().getError().getContextMessage());
assertEquals(message.timestamp(), wrappedRecord.timestamp());
}
@Test
void shouldCreateWrappedFailedProcessingResultWithContextMessageFromRecord() {
Exception exception = new Exception("Exception");
String contextMessage = "Context message";
Record<String, String> message = new Record<>("key", "value", System.currentTimeMillis());
Record<String, ProcessingResult<String, String>> wrappedRecord =
ProcessingResult.wrapRecordFailure(exception, message, contextMessage);
assertEquals(message.key(), wrappedRecord.key());
assertNotNull(wrappedRecord.value());
assertFalse(wrappedRecord.value().isValid());
assertNull(wrappedRecord.value().getValue());
assertNotNull(wrappedRecord.value().getError());
assertEquals(exception, wrappedRecord.value().getError().getException());
assertEquals(message.value(), wrappedRecord.value().getError().getKafkaRecord());
assertEquals(contextMessage, wrappedRecord.value().getError().getContextMessage());
assertEquals(message.timestamp(), wrappedRecord.timestamp());
}
@Test
void shouldCreateWrappedFailedProcessingResultFromParameters() {
String key = "key";
String value = "value";
long timestamp = System.currentTimeMillis();
Exception exception = new Exception("Exception");
Record<String, ProcessingResult<String, String>> wrappedRecord =
ProcessingResult.wrapRecordFailure(exception, key, value, timestamp);
assertEquals(key, wrappedRecord.key());
assertNotNull(wrappedRecord.value());
assertFalse(wrappedRecord.value().isValid());
assertNull(wrappedRecord.value().getValue());
assertNotNull(wrappedRecord.value().getError());
assertEquals(exception, wrappedRecord.value().getError().getException());
assertEquals(value, wrappedRecord.value().getError().getKafkaRecord());
assertEquals("No context message", wrappedRecord.value().getError().getContextMessage());
assertEquals(timestamp, wrappedRecord.timestamp());
}
@Test
void shouldCreateWrappedFailedProcessingResultFromParametersWithContextMessage() {
String key = "key";
String value = "value";
long timestamp = System.currentTimeMillis();
Exception exception = new Exception("Exception");
String contextMessage = "Context message";
Record<String, ProcessingResult<String, String>> wrappedRecord =
ProcessingResult.wrapRecordFailure(exception, key, value, timestamp, contextMessage);
assertEquals(key, wrappedRecord.key());
assertNotNull(wrappedRecord.value());
assertFalse(wrappedRecord.value().isValid());
assertNull(wrappedRecord.value().getValue());
assertNotNull(wrappedRecord.value().getError());
assertEquals(exception, wrappedRecord.value().getError().getException());
assertEquals(value, wrappedRecord.value().getError().getKafkaRecord());
assertEquals(contextMessage, wrappedRecord.value().getError().getContextMessage());
assertEquals(timestamp, wrappedRecord.timestamp());
}
@Test
void shouldProcessingResultBeValid() {
ProcessingResult<String, Integer> validResult = ProcessingResult.success("Value");
ProcessingResult<String, Integer> invalidResult1 = ProcessingResult.fail(new Exception(), 42);
assertTrue(validResult.isValid());
assertFalse(invalidResult1.isValid());
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/error/DlqProductionExceptionHandlerTest.java | kstreamplify-core/src/test/java/com/michelin/kstreamplify/error/DlqProductionExceptionHandlerTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.error;
import static org.apache.kafka.streams.StreamsConfig.APPLICATION_ID_CONFIG;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.mockito.Mockito.when;
import com.michelin.kstreamplify.avro.KafkaError;
import com.michelin.kstreamplify.context.KafkaStreamsExecutionContext;
import io.confluent.kafka.serializers.KafkaAvroSerializer;
import io.confluent.kafka.serializers.KafkaAvroSerializerConfig;
import java.nio.charset.StandardCharsets;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import org.apache.kafka.clients.consumer.RetriableCommitFailedException;
import org.apache.kafka.clients.producer.MockProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.KafkaException;
import org.apache.kafka.common.serialization.ByteArraySerializer;
import org.apache.kafka.common.serialization.Serializer;
import org.apache.kafka.streams.errors.ErrorHandlerContext;
import org.apache.kafka.streams.errors.ProductionExceptionHandler;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
@ExtendWith(MockitoExtension.class)
class DlqProductionExceptionHandlerTest {
@Mock
private ErrorHandlerContext errorHandlerContext;
@Mock
private ProducerRecord<byte[], byte[]> producerRecord;
private Producer<byte[], KafkaError> producer;
@BeforeEach
@SuppressWarnings("unchecked")
void setUp() {
Serializer<KafkaError> serializer = (Serializer) new KafkaAvroSerializer();
serializer.configure(Map.of(KafkaAvroSerializerConfig.SCHEMA_REGISTRY_URL_CONFIG, "mock://"), false);
producer = new MockProducer<>(true, new ByteArraySerializer(), serializer);
KafkaStreamsExecutionContext.setDlqTopicName(null);
Properties properties = new Properties();
properties.setProperty(APPLICATION_ID_CONFIG, "test-app");
KafkaStreamsExecutionContext.setProperties(properties);
}
@Test
void shouldReturnFailIfNoDlq() {
DlqProductionExceptionHandler handler = new DlqProductionExceptionHandler(producer);
ProductionExceptionHandler.ProductionExceptionHandlerResponse response =
handler.handle(errorHandlerContext, producerRecord, new RuntimeException("Exception..."));
assertEquals(ProductionExceptionHandler.ProductionExceptionHandlerResponse.FAIL, response);
}
@Test
void shouldReturnContinueOnExceptionDuringHandle() {
DlqProductionExceptionHandler handler = new DlqProductionExceptionHandler(producer);
KafkaStreamsExecutionContext.setDlqTopicName("DLQ_TOPIC");
ProductionExceptionHandler.ProductionExceptionHandlerResponse response =
handler.handle(errorHandlerContext, producerRecord, new KafkaException("Exception..."));
assertEquals(ProductionExceptionHandler.ProductionExceptionHandlerResponse.CONTINUE, response);
}
@Test
void shouldReturnContinueOnKafkaException() {
DlqProductionExceptionHandler handler = new DlqProductionExceptionHandler(producer);
KafkaStreamsExecutionContext.setDlqTopicName("DLQ_TOPIC");
when(producerRecord.key()).thenReturn("key".getBytes(StandardCharsets.UTF_8));
when(producerRecord.value()).thenReturn("value".getBytes(StandardCharsets.UTF_8));
when(producerRecord.topic()).thenReturn("topic");
ProductionExceptionHandler.ProductionExceptionHandlerResponse response =
handler.handle(errorHandlerContext, producerRecord, new KafkaException("Exception..."));
assertEquals(ProductionExceptionHandler.ProductionExceptionHandlerResponse.CONTINUE, response);
}
@Test
void shouldReturnFailOnRetriableException() {
DlqProductionExceptionHandler handler = new DlqProductionExceptionHandler(producer);
KafkaStreamsExecutionContext.setDlqTopicName("DLQ_TOPIC");
ProductionExceptionHandler.ProductionExceptionHandlerResponse response =
handler.handle(errorHandlerContext, producerRecord, new RetriableCommitFailedException("Exception..."));
assertEquals(ProductionExceptionHandler.ProductionExceptionHandlerResponse.FAIL, response);
}
@Test
void shouldConfigure() {
Map<String, Object> configs = new HashMap<>();
configs.put("bootstrap.servers", "localhost:9092");
configs.put("schema.registry.url", "localhost:8080");
configs.put("acks", "all");
DlqProductionExceptionHandler handler = new DlqProductionExceptionHandler();
handler.configure(configs);
assertNotNull(DlqExceptionHandler.getProducer());
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/error/GenericErrorProcessorTest.java | kstreamplify-core/src/test/java/com/michelin/kstreamplify/error/GenericErrorProcessorTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.error;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import com.michelin.kstreamplify.avro.KafkaError;
import java.util.Optional;
import org.apache.kafka.streams.processor.api.FixedKeyProcessorContext;
import org.apache.kafka.streams.processor.api.FixedKeyRecord;
import org.apache.kafka.streams.processor.api.RecordMetadata;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
@ExtendWith(MockitoExtension.class)
class GenericErrorProcessorTest {
private final GenericErrorProcessor<String> errorProcessor = new GenericErrorProcessor<>();
@Mock
private FixedKeyProcessorContext<String, KafkaError> mockContext;
@Mock
private FixedKeyRecord<String, ProcessingError<String>> mockRecord;
@Mock
private RecordMetadata mockRecordMetadata;
@Test
void shouldProcessError() {
when(mockRecord.value())
.thenReturn(new ProcessingError<>(new RuntimeException("Exception..."), "Context message", "Record"));
// Given a mock RecordMetadata
when(mockRecordMetadata.offset()).thenReturn(10L);
when(mockRecordMetadata.partition()).thenReturn(0);
when(mockRecordMetadata.topic()).thenReturn("test-topic");
// Given that the context has a recordMetadata
when(mockContext.recordMetadata()).thenReturn(Optional.of(mockRecordMetadata));
// When processing the record
errorProcessor.init(mockContext);
errorProcessor.process(mockRecord);
verify(mockContext).forward(any());
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/error/DlqDeserializationExceptionHandlerTest.java | kstreamplify-core/src/test/java/com/michelin/kstreamplify/error/DlqDeserializationExceptionHandlerTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.error;
import static com.michelin.kstreamplify.property.KstreamplifyConfig.DLQ_DESERIALIZATION_HANDLER_FORWARD_REST_CLIENT_EXCEPTION;
import static org.apache.kafka.streams.StreamsConfig.APPLICATION_ID_CONFIG;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.mockito.Mockito.when;
import com.michelin.kstreamplify.avro.KafkaError;
import com.michelin.kstreamplify.context.KafkaStreamsExecutionContext;
import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException;
import io.confluent.kafka.serializers.KafkaAvroSerializer;
import io.confluent.kafka.serializers.KafkaAvroSerializerConfig;
import java.nio.charset.StandardCharsets;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.producer.MockProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.common.KafkaException;
import org.apache.kafka.common.errors.RecordTooLargeException;
import org.apache.kafka.common.serialization.ByteArraySerializer;
import org.apache.kafka.common.serialization.Serializer;
import org.apache.kafka.streams.errors.DeserializationExceptionHandler;
import org.apache.kafka.streams.errors.ErrorHandlerContext;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
@ExtendWith(MockitoExtension.class)
class DlqDeserializationExceptionHandlerTest {
@Mock
private ErrorHandlerContext errorHandlerContext;
@Mock
private ConsumerRecord<byte[], byte[]> consumerRecord;
private Producer<byte[], KafkaError> producer;
@BeforeEach
@SuppressWarnings("unchecked")
void setUp() {
Serializer<KafkaError> serializer = (Serializer) new KafkaAvroSerializer();
serializer.configure(Map.of(KafkaAvroSerializerConfig.SCHEMA_REGISTRY_URL_CONFIG, "mock://"), false);
producer = new MockProducer<>(true, new ByteArraySerializer(), serializer);
KafkaStreamsExecutionContext.setDlqTopicName(null);
Properties properties = new Properties();
properties.setProperty(APPLICATION_ID_CONFIG, "test-app");
KafkaStreamsExecutionContext.setProperties(properties);
}
@Test
void shouldReturnFailIfNoDlq() {
DlqDeserializationExceptionHandler handler = new DlqDeserializationExceptionHandler(producer);
DeserializationExceptionHandler.DeserializationHandlerResponse response =
handler.handle(errorHandlerContext, consumerRecord, new RuntimeException("Exception..."));
assertEquals(DeserializationExceptionHandler.DeserializationHandlerResponse.FAIL, response);
}
@Test
void shouldReturnFailOnExceptionDuringHandle() {
DlqDeserializationExceptionHandler handler = new DlqDeserializationExceptionHandler(producer);
KafkaStreamsExecutionContext.setDlqTopicName("DLQ_TOPIC");
DeserializationExceptionHandler.DeserializationHandlerResponse response =
handler.handle(errorHandlerContext, consumerRecord, new KafkaException("Exception..."));
assertEquals(DeserializationExceptionHandler.DeserializationHandlerResponse.FAIL, response);
}
@Test
void shouldReturnContinueOnKafkaException() {
DlqDeserializationExceptionHandler handler = new DlqDeserializationExceptionHandler(producer);
KafkaStreamsExecutionContext.setDlqTopicName("DLQ_TOPIC");
when(consumerRecord.key()).thenReturn("key".getBytes(StandardCharsets.UTF_8));
when(consumerRecord.value()).thenReturn("value".getBytes(StandardCharsets.UTF_8));
when(consumerRecord.topic()).thenReturn("topic");
// Wrap the KafkaException so that getCause() instanceof KafkaException
Exception wrapped = new Exception("Wrapper", new KafkaException("Exception..."));
DeserializationExceptionHandler.DeserializationHandlerResponse response =
handler.handle(errorHandlerContext, consumerRecord, wrapped);
assertEquals(DeserializationExceptionHandler.DeserializationHandlerResponse.CONTINUE, response);
}
@Test
void shouldContinueOnRestClientExceptionWhenFeatureFlagEnabled() {
DlqDeserializationExceptionHandler handler = new DlqDeserializationExceptionHandler(producer);
// Enable the feature flag
Properties props = new Properties();
props.setProperty(APPLICATION_ID_CONFIG, "test-app");
props.setProperty(DLQ_DESERIALIZATION_HANDLER_FORWARD_REST_CLIENT_EXCEPTION, "true");
KafkaStreamsExecutionContext.registerProperties(props);
KafkaStreamsExecutionContext.setDlqTopicName("DLQ_TOPIC");
handler.configure(Map.of());
when(consumerRecord.key()).thenReturn("key".getBytes(StandardCharsets.UTF_8));
when(consumerRecord.value()).thenReturn("value".getBytes(StandardCharsets.UTF_8));
when(consumerRecord.topic()).thenReturn("topic");
// Wrap the RestClientException so getCause() is an instance of RestClientException
Exception wrapped = new Exception("Wrapper", new RestClientException("schema error", 500, 500));
DeserializationExceptionHandler.DeserializationHandlerResponse response =
handler.handle(errorHandlerContext, consumerRecord, wrapped);
assertEquals(DeserializationExceptionHandler.DeserializationHandlerResponse.CONTINUE, response);
}
@Test
void shouldFailOnRestClientExceptionWhenFeatureFlagDisabled() {
DlqDeserializationExceptionHandler handler = new DlqDeserializationExceptionHandler(producer);
// Disable the feature flag
Properties props = new Properties();
props.setProperty(APPLICATION_ID_CONFIG, "test-app");
props.setProperty(DLQ_DESERIALIZATION_HANDLER_FORWARD_REST_CLIENT_EXCEPTION, "false");
KafkaStreamsExecutionContext.registerProperties(props);
KafkaStreamsExecutionContext.setDlqTopicName("DLQ_TOPIC");
handler.configure(Map.of());
when(consumerRecord.key()).thenReturn("key".getBytes(StandardCharsets.UTF_8));
when(consumerRecord.value()).thenReturn("value".getBytes(StandardCharsets.UTF_8));
when(consumerRecord.topic()).thenReturn("topic");
Exception wrapped = new Exception(
"Wrapper",
new io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException(
"schema error", 500, 500));
DeserializationExceptionHandler.DeserializationHandlerResponse response =
handler.handle(errorHandlerContext, consumerRecord, wrapped);
assertEquals(DeserializationExceptionHandler.DeserializationHandlerResponse.FAIL, response);
}
@Test
void shouldFailOnRestClientExceptionWhenFeatureFlagNotProvided() {
DlqDeserializationExceptionHandler handler = new DlqDeserializationExceptionHandler(producer);
// Do NOT set the property (default should be false)
Properties props = new Properties();
props.setProperty(APPLICATION_ID_CONFIG, "test-app");
KafkaStreamsExecutionContext.registerProperties(props);
KafkaStreamsExecutionContext.setDlqTopicName("DLQ_TOPIC");
handler.configure(Map.of());
when(consumerRecord.key()).thenReturn("key".getBytes(StandardCharsets.UTF_8));
when(consumerRecord.value()).thenReturn("value".getBytes(StandardCharsets.UTF_8));
when(consumerRecord.topic()).thenReturn("topic");
Exception wrapped = new Exception(
"Wrapper",
new io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException(
"schema error", 500, 500));
DeserializationExceptionHandler.DeserializationHandlerResponse response =
handler.handle(errorHandlerContext, consumerRecord, wrapped);
// Default behavior without property should be FAIL
assertEquals(DeserializationExceptionHandler.DeserializationHandlerResponse.FAIL, response);
}
@Test
void shouldConfigure() {
Map<String, Object> configs = new HashMap<>();
configs.put("bootstrap.servers", "localhost:9092");
configs.put("schema.registry.url", "localhost:8080");
configs.put("acks", "all");
DlqDeserializationExceptionHandler handler = new DlqDeserializationExceptionHandler();
handler.configure(configs);
assertNotNull(DlqExceptionHandler.getProducer());
}
@Test
void shouldEnrichWithException() {
KafkaError.Builder kafkaError = KafkaError.newBuilder()
.setTopic("topic")
.setStack("stack")
.setPartition(0)
.setOffset(0)
.setCause("cause")
.setValue("value");
DlqDeserializationExceptionHandler handler = new DlqDeserializationExceptionHandler();
KafkaError.Builder enrichedBuilder = handler.enrichWithException(
kafkaError,
new RuntimeException("Exception..."),
"key".getBytes(StandardCharsets.UTF_8),
"value".getBytes(StandardCharsets.UTF_8));
KafkaError error = enrichedBuilder.build();
assertEquals("Unknown cause", error.getCause());
assertNull(error.getContextMessage());
}
@Test
void shouldEnrichWithRecordTooLargeException() {
KafkaError.Builder kafkaError = KafkaError.newBuilder()
.setTopic("topic")
.setStack("stack")
.setPartition(0)
.setOffset(0)
.setCause("cause")
.setValue("value");
DlqDeserializationExceptionHandler handler = new DlqDeserializationExceptionHandler();
KafkaError.Builder enrichedBuilder = handler.enrichWithException(
kafkaError,
new RecordTooLargeException("Exception..."),
"key".getBytes(StandardCharsets.UTF_8),
"value".getBytes(StandardCharsets.UTF_8));
KafkaError error = enrichedBuilder.build();
assertEquals("Unknown cause", error.getCause());
assertEquals(
"The record is too large to be set as value (5 bytes). " + "The key will be used instead",
error.getValue());
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/error/DlqExceptionHandlerTest.java | kstreamplify-core/src/test/java/com/michelin/kstreamplify/error/DlqExceptionHandlerTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.error;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import java.util.HashMap;
import java.util.Map;
import org.junit.jupiter.api.Test;
class DlqExceptionHandlerTest {
@Test
void shouldInstantiateProducer() {
Map<String, Object> configs = new HashMap<>();
configs.put("bootstrap.servers", "localhost:9092");
configs.put("schema.registry.url", "localhost:8080");
configs.put("acks", "all");
DlqExceptionHandler.instantiateProducer("test-client", configs);
assertNotNull(DlqExceptionHandler.getProducer());
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/store/WindowStateStoreUtilsTest.java | kstreamplify-core/src/test/java/com/michelin/kstreamplify/store/WindowStateStoreUtilsTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.store;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.anyLong;
import static org.mockito.Mockito.eq;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.state.WindowStore;
import org.apache.kafka.streams.state.WindowStoreIterator;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
@ExtendWith(MockitoExtension.class)
class WindowStateStoreUtilsTest {
@Mock
private WindowStore<String, String> windowStore;
@Mock
private WindowStoreIterator<String> iterator;
@Test
void shouldReturnNull() {
when(windowStore.backwardFetch(anyString(), any(), any())).thenReturn(null);
String result = WindowStateStoreUtils.get(windowStore, "testKey", 1);
assertNull(result);
}
@Test
void shouldPutAndGetFromWindowStore() {
String value = "testValue";
when(iterator.hasNext()).thenReturn(true).thenReturn(false);
when(iterator.next()).thenReturn(KeyValue.pair(1L, value));
when(windowStore.backwardFetch(anyString(), any(), any())).thenReturn(iterator);
String key = "testKey";
WindowStateStoreUtils.put(windowStore, key, value);
String result = WindowStateStoreUtils.get(windowStore, key, 1);
String nullResult = WindowStateStoreUtils.get(windowStore, "nothing", 1);
assertEquals("testValue", result);
assertNull(nullResult);
verify(windowStore).put(eq(key), eq(value), anyLong());
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/initializer/KafkaStreamsInitializerTest.java | kstreamplify-core/src/test/java/com/michelin/kstreamplify/initializer/KafkaStreamsInitializerTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.initializer;
import static com.michelin.kstreamplify.initializer.KafkaStreamsInitializer.SERVER_PORT_PROPERTY_NAME;
import static com.michelin.kstreamplify.property.PropertiesUtils.KAFKA_PROPERTIES_PREFIX;
import static com.michelin.kstreamplify.property.PropertiesUtils.PROPERTY_SEPARATOR;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.mockStatic;
import com.michelin.kstreamplify.context.KafkaStreamsExecutionContext;
import com.michelin.kstreamplify.property.PropertiesUtils;
import java.util.Properties;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.errors.StreamsUncaughtExceptionHandler;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
import org.mockito.MockedStatic;
import org.mockito.junit.jupiter.MockitoExtension;
@ExtendWith(MockitoExtension.class)
class KafkaStreamsInitializerTest {
@Mock
private KafkaStreamsStarter kafkaStreamsStarter;
private final KafkaStreamsInitializer initializer = new KafkaStreamsInitializer(kafkaStreamsStarter);
@Test
void shouldStartProperties() {
try (MockedStatic<PropertiesUtils> propertiesUtilsMockedStatic = mockStatic(PropertiesUtils.class)) {
Properties properties = new Properties();
properties.put(SERVER_PORT_PROPERTY_NAME, 8080);
properties.put(KAFKA_PROPERTIES_PREFIX + PROPERTY_SEPARATOR + StreamsConfig.APPLICATION_ID_CONFIG, "appId");
properties.put(KAFKA_PROPERTIES_PREFIX + PROPERTY_SEPARATOR + "prefix.self", "abc.");
propertiesUtilsMockedStatic.when(PropertiesUtils::loadProperties).thenReturn(properties);
propertiesUtilsMockedStatic
.when(() -> PropertiesUtils.loadKafkaProperties(any()))
.thenCallRealMethod();
initializer.initProperties();
assertNotNull(initializer.getProperties());
assertEquals(8080, initializer.getServerPort());
assertTrue(initializer.getKafkaProperties().containsKey(StreamsConfig.APPLICATION_ID_CONFIG));
assertEquals("abc.", KafkaStreamsExecutionContext.getPrefix());
assertEquals(
"abc.appId", KafkaStreamsExecutionContext.getProperties().get(StreamsConfig.APPLICATION_ID_CONFIG));
}
}
@Test
void shouldShutdownClientOnUncaughtException() {
try (MockedStatic<PropertiesUtils> propertiesUtilsMockedStatic = mockStatic(PropertiesUtils.class)) {
Properties properties = new Properties();
properties.put(SERVER_PORT_PROPERTY_NAME, 8080);
properties.put(KAFKA_PROPERTIES_PREFIX + PROPERTY_SEPARATOR + StreamsConfig.APPLICATION_ID_CONFIG, "appId");
propertiesUtilsMockedStatic.when(PropertiesUtils::loadProperties).thenReturn(properties);
propertiesUtilsMockedStatic
.when(() -> PropertiesUtils.loadKafkaProperties(any()))
.thenCallRealMethod();
initializer.initProperties();
StreamsUncaughtExceptionHandler.StreamThreadExceptionResponse response =
initializer.onStreamsUncaughtException(new RuntimeException("Test Exception"));
assertEquals(StreamsUncaughtExceptionHandler.StreamThreadExceptionResponse.SHUTDOWN_CLIENT, response);
}
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/initializer/KafkaStreamsStarterTest.java | kstreamplify-core/src/test/java/com/michelin/kstreamplify/initializer/KafkaStreamsStarterTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.initializer;
import static org.apache.kafka.streams.errors.StreamsUncaughtExceptionHandler.StreamThreadExceptionResponse.REPLACE_THREAD;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import com.michelin.kstreamplify.avro.KafkaError;
import com.michelin.kstreamplify.context.KafkaStreamsExecutionContext;
import com.michelin.kstreamplify.deduplication.DeduplicationUtils;
import com.michelin.kstreamplify.error.ProcessingResult;
import com.michelin.kstreamplify.error.TopologyErrorHandler;
import com.michelin.kstreamplify.serde.SerdesUtils;
import com.michelin.kstreamplify.serde.TopicWithSerde;
import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig;
import java.io.IOException;
import java.time.Duration;
import java.util.Map;
import java.util.Properties;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.common.serialization.Serde;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.errors.StreamsUncaughtExceptionHandler;
import org.junit.jupiter.api.Test;
class KafkaStreamsStarterTest {
@Test
void shouldInstantiateKafkaStreamsStarter() {
KafkaStreamsExecutionContext.registerProperties(new Properties());
KafkaStreamsExecutionContext.setSerdesConfig(
Map.of(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, "mock://"));
StreamsBuilder builder = new StreamsBuilder();
KafkaStreamsStarterStub starter = new KafkaStreamsStarterStub();
starter.topology(builder);
assertNotNull(builder.build().describe());
assertEquals("DLQ_TOPIC", starter.dlqTopic());
starter.onStart(null);
assertTrue(starter.isStarted());
}
@Test
void shouldStartWithCustomUncaughtExceptionHandler() {
KafkaStreamsExecutionContext.registerProperties(new Properties());
KafkaStreamsExecutionContext.setSerdesConfig(
Map.of(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, "mock://"));
StreamsBuilder builder = new StreamsBuilder();
KafkaStreamsStarterStub starter = new KafkaStreamsStarterStub();
starter.topology(builder);
assertNotNull(builder.build().describe());
assertEquals("DLQ_TOPIC", starter.dlqTopic());
assertEquals(
starter.uncaughtExceptionHandler()
.handle(new Exception("Register a custom uncaught exception handler test.")),
REPLACE_THREAD);
starter.onStart(null);
assertTrue(starter.isStarted());
}
/** Kafka Streams Starter implementation used for unit tests purpose. */
@Getter
static class KafkaStreamsStarterStub extends KafkaStreamsStarter {
private boolean started;
@Override
public void topology(StreamsBuilder streamsBuilder) {
var streams = TopicWithSerdeStub.inputTopicWithSerde().stream(streamsBuilder);
DeduplicationUtils.deduplicateKeys(
streamsBuilder,
streams,
"deduplicateKeysStoreName",
"deduplicateKeysRepartitionName",
Duration.ZERO);
DeduplicationUtils.deduplicateKeyValues(
streamsBuilder,
streams,
"deduplicateKeyValuesStoreName",
"deduplicateKeyValuesRepartitionName",
Duration.ZERO);
DeduplicationUtils.deduplicateWithPredicate(streamsBuilder, streams, Duration.ofMillis(1), null);
var enrichedStreams = streams.mapValues(KafkaStreamsStarterStub::enrichValue);
var enrichedStreams2 = streams.mapValues(KafkaStreamsStarterStub::enrichValue2);
var processingResults = TopologyErrorHandler.catchErrors(enrichedStreams);
TopologyErrorHandler.catchErrors(enrichedStreams2, true);
TopicWithSerdeStub.outputTopicWithSerde().produce(processingResults);
}
@Override
public String dlqTopic() {
return "DLQ_TOPIC";
}
@Override
public void onStart(KafkaStreams kafkaStreams) {
started = true;
}
@Override
public StreamsUncaughtExceptionHandler uncaughtExceptionHandler() {
return new UncaughtExceptionHandlerStub();
}
private static ProcessingResult<String, String> enrichValue(KafkaError input) {
if (input != null) {
return ProcessingResult.success("output field");
} else {
return ProcessingResult.fail(new IOException("an exception occurred"), "output error");
}
}
private static ProcessingResult<String, String> enrichValue2(KafkaError input) {
if (input != null) {
return ProcessingResult.success("output field 2");
} else {
return ProcessingResult.fail(new IOException("an exception occurred"), "output error 2");
}
}
}
/**
* Topic with serdes helper used for unit tests purpose.
*
* @param <K> The key type
* @param <V> The value type
*/
static class TopicWithSerdeStub<K, V> extends TopicWithSerde<K, V> {
private TopicWithSerdeStub(String name, String appName, Serde<K> keySerde, Serde<V> valueSerde) {
super(name, appName, keySerde, valueSerde);
}
public static TopicWithSerdeStub<String, String> outputTopicWithSerde() {
return new TopicWithSerdeStub<>("OUTPUT_TOPIC", "APP_NAME", Serdes.String(), Serdes.String());
}
public static TopicWithSerdeStub<String, KafkaError> inputTopicWithSerde() {
return new TopicWithSerdeStub<>("INPUT_TOPIC", "APP_NAME", Serdes.String(), SerdesUtils.getValueSerdes());
}
}
@Slf4j
static class UncaughtExceptionHandlerStub implements StreamsUncaughtExceptionHandler {
@Override
public StreamThreadExceptionResponse handle(final Throwable t) {
log.error("!Custom uncaught exception handler test!");
return REPLACE_THREAD;
}
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/property/PropertiesUtilsTest.java | kstreamplify-core/src/test/java/com/michelin/kstreamplify/property/PropertiesUtilsTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.property;
import static com.michelin.kstreamplify.property.KstreamplifyConfig.DLQ_DESERIALIZATION_HANDLER_FORWARD_REST_CLIENT_EXCEPTION;
import static org.apache.kafka.streams.StreamsConfig.APPLICATION_ID_CONFIG;
import static org.junit.jupiter.api.Assertions.*;
import java.util.Properties;
import org.junit.jupiter.api.Test;
class PropertiesUtilsTest {
@Test
void shouldLoadProperties() {
Properties properties = PropertiesUtils.loadProperties();
assertTrue(properties.containsKey("server.port"));
assertEquals(8080, properties.get("server.port"));
assertTrue(properties.containsKey("kafka.properties." + APPLICATION_ID_CONFIG));
assertEquals("appId", properties.get("kafka.properties." + APPLICATION_ID_CONFIG));
assertTrue(properties.containsKey(
"kafka.properties." + DLQ_DESERIALIZATION_HANDLER_FORWARD_REST_CLIENT_EXCEPTION));
assertEquals(
true, properties.get("kafka.properties." + DLQ_DESERIALIZATION_HANDLER_FORWARD_REST_CLIENT_EXCEPTION));
}
@Test
void shouldLoadKafkaProperties() {
Properties properties = PropertiesUtils.loadKafkaProperties(PropertiesUtils.loadProperties());
assertTrue(properties.containsKey(APPLICATION_ID_CONFIG));
assertTrue(properties.containsValue("appId"));
}
@Test
void shouldExtractPropertiesByPrefix() {
Properties props = new Properties();
props.put("dlq.feature1", "true");
props.put("dlq.feature2", "false");
props.put("other.feature", "ignored");
Properties extracted = PropertiesUtils.extractPropertiesByPrefix(props, "dlq.");
assertEquals(2, extracted.size());
assertEquals("true", extracted.getProperty("dlq.feature1"));
assertEquals("false", extracted.getProperty("dlq.feature2"));
assertNull(extracted.getProperty("other.feature"));
}
@Test
void shouldReturnTrueWhenFeatureEnabled() {
Properties props = new Properties();
props.put("my.feature", "true");
assertTrue(PropertiesUtils.isFeatureEnabled(props, "my.feature", false));
}
@Test
void shouldReturnFalseWhenFeatureDisabled() {
Properties props = new Properties();
props.put("my.feature", "false");
assertFalse(PropertiesUtils.isFeatureEnabled(props, "my.feature", true));
}
@Test
void shouldReturnDefaultWhenFeatureMissing() {
Properties props = new Properties();
assertTrue(PropertiesUtils.isFeatureEnabled(props, "missing.feature", true));
assertFalse(PropertiesUtils.isFeatureEnabled(props, "missing.feature", false));
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/property/RocksDbConfigTest.java | kstreamplify-core/src/test/java/com/michelin/kstreamplify/property/RocksDbConfigTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.property;
import static org.mockito.Mockito.any;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import com.michelin.kstreamplify.context.KafkaStreamsExecutionContext;
import com.michelin.kstreamplify.store.RocksDbConfig;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import org.rocksdb.BlockBasedTableConfig;
import org.rocksdb.CompressionType;
import org.rocksdb.Options;
@ExtendWith(MockitoExtension.class)
class RocksDbConfigTest {
@Mock
private Options options;
@BeforeEach
void setUp() {
when(options.tableFormatConfig()).thenReturn(new BlockBasedTableConfig());
}
@Test
void testSetConfigWithDefaultValues() {
Map<String, Object> configs = new HashMap<>();
RocksDbConfig rocksDbConfig = new RocksDbConfig();
KafkaStreamsExecutionContext.registerProperties(new Properties());
rocksDbConfig.setConfig("storeName", options, configs);
verify(options).tableFormatConfig();
verify(options).setTableFormatConfig(any());
verify(options).setMaxWriteBufferNumber(RocksDbConfig.ROCKSDB_MAX_WRITE_BUFFER_DEFAULT);
verify(options).setWriteBufferSize(RocksDbConfig.ROCKSDB_WRITE_BUFFER_SIZE_DEFAULT);
verify(options).setCompressionType(CompressionType.NO_COMPRESSION);
}
@Test
void testSetConfigWithCustomValues() {
long cacheSize = 64 * 1024L * 1024L;
long writeBufferSize = 8 * 1024L * 1024L;
long blockSize = 8 * 1024L;
int maxWriteBuffer = 4;
boolean cacheIndexBlock = false;
String compressionType = "lz4";
Map<String, Object> configs = new HashMap<>();
configs.put(RocksDbConfig.ROCKSDB_CACHE_SIZE_CONFIG, String.valueOf(cacheSize));
configs.put(RocksDbConfig.ROCKSDB_WRITE_BUFFER_SIZE_CONFIG, String.valueOf(writeBufferSize));
configs.put(RocksDbConfig.ROCKSDB_BLOCK_SIZE_CONFIG, String.valueOf(blockSize));
configs.put(RocksDbConfig.ROCKSDB_MAX_WRITE_BUFFER_CONFIG, String.valueOf(maxWriteBuffer));
configs.put(RocksDbConfig.ROCKSDB_CACHE_INDEX_BLOCK_ENABLED_CONFIG, String.valueOf(cacheIndexBlock));
configs.put(RocksDbConfig.ROCKSDB_COMPRESSION_TYPE_CONFIG, compressionType);
Properties properties = new Properties();
properties.putAll(configs);
KafkaStreamsExecutionContext.registerProperties(properties);
RocksDbConfig rocksDbConfig = new RocksDbConfig();
rocksDbConfig.setConfig("storeName", options, configs);
verify(options).tableFormatConfig();
verify(options).setTableFormatConfig(any());
verify(options).setMaxWriteBufferNumber(maxWriteBuffer);
verify(options).setWriteBufferSize(writeBufferSize);
verify(options).setCompressionType(CompressionType.getCompressionType(compressionType));
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/property/KstreamplifyConfigTest.java | kstreamplify-core/src/test/java/com/michelin/kstreamplify/property/KstreamplifyConfigTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.property;
import static org.junit.jupiter.api.Assertions.*;
import org.junit.jupiter.api.Test;
class KstreamplifyConfigTest {
@Test
void shouldHaveCorrectDlqPropertiesPrefix() {
assertEquals("dlq", KstreamplifyConfig.DLQ_PROPERTIES_PREFIX);
}
@Test
void shouldHaveCorrectDeserializationHandlerRestClientExceptionEnabledKey() {
assertEquals(
"dlq.deserialization-handler.forward-restclient-exception",
KstreamplifyConfig.DLQ_DESERIALIZATION_HANDLER_FORWARD_REST_CLIENT_EXCEPTION);
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/converter/JsonToAvroConverterTest.java | kstreamplify-core/src/test/java/com/michelin/kstreamplify/converter/JsonToAvroConverterTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.converter;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNull;
import com.michelin.kstreamplify.avro.EnumField;
import com.michelin.kstreamplify.avro.KafkaRecordStub;
import java.util.Map;
import lombok.extern.slf4j.Slf4j;
import org.junit.jupiter.api.Test;
@Slf4j
class JsonToAvroConverterTest {
private static final String JSON = "{"
+ "\"decimalField\":10.5,"
+ "\"intField\":123,"
+ "\"stringField\":\"test\","
+ "\"booleanField\":false,"
+ "\"uuidField\":\"dc306935-d720-427f-9ecd-ff87c0b15189\","
+ "\"timestampMillisField\":\"2024-03-27T19:51:01.815Z\","
+ "\"timestampMicrosField\":\"2024-03-27T19:51:01.815832Z\","
+ "\"localTimestampMillisField\":\"2024-03-27T20:51:01.815832\","
+ "\"localTimestampMicrosField\":\"2024-03-27T20:51:01.815832123\","
+ "\"timeMillisField\":\"20:51:01.815\","
+ "\"timeMicrosField\":\"20:51:01.815832\","
+ "\"enumField\":\"b\","
+ "\"dateField\":\"2024-03-27\","
+ "\"membersString\":{\"key1\":\"val1\",\"key2\":\"val2\"},"
+ "\"split\":[{"
+ "\"subSplit\":[{\"subSubIntField\":8,\"subSubField\":\"subSubTest\"}],"
+ "\"subField\":\"subTest\"}],"
+ "\"members\":{\"key1\":{\"mapQuantityField\":1}},"
+ "\"listString\":[\"val1\",\"val2\"]"
+ "}";
@Test
void shouldConvertJsonToObject() {
assertEquals(
Map.of("firstName", "John", "lastName", "Doe"),
JsonToAvroConverter.jsonToObject("{\"firstName\":\"John\",\"lastName\":\"Doe\"}"));
}
@Test
void shouldConvertJsonToObjectNull() {
assertNull(JsonToAvroConverter.jsonToObject(null));
}
@Test
void shouldConvertJsonToAvro() {
KafkaRecordStub kafkaTest =
(KafkaRecordStub) JsonToAvroConverter.jsonToAvro(JSON, KafkaRecordStub.getClassSchema());
assertEquals("val1", kafkaTest.getMembersString().get("key1"));
assertEquals(8, kafkaTest.getSplit().get(0).getSubSplit().get(0).getSubSubIntField());
assertEquals(
"subSubTest", kafkaTest.getSplit().get(0).getSubSplit().get(0).getSubSubField());
assertEquals("subTest", kafkaTest.getSplit().get(0).getSubField());
assertFalse(kafkaTest.getBooleanField());
assertEquals(
"1.0000",
kafkaTest.getMembers().get("key1").getMapQuantityField().toString());
assertEquals("10.5000", kafkaTest.getDecimalField().toString());
assertEquals("123", String.valueOf(kafkaTest.getIntField()));
assertEquals("test", kafkaTest.getStringField());
assertEquals("val1", kafkaTest.getListString().get(0));
assertEquals("val2", kafkaTest.getListString().get(1));
assertEquals("2024-03-27", kafkaTest.getDateField().toString());
assertEquals("20:51:01.815", kafkaTest.getTimeMillisField().toString());
assertEquals("20:51:01.815832", kafkaTest.getTimeMicrosField().toString());
assertEquals(
"2024-03-27T20:51:01.815832",
kafkaTest.getLocalTimestampMillisField().toString());
assertEquals(
"2024-03-27T20:51:01.815832123",
kafkaTest.getLocalTimestampMicrosField().toString());
assertEquals(
"2024-03-27T19:51:01.815Z", kafkaTest.getTimestampMillisField().toString());
assertEquals(
"2024-03-27T19:51:01.815832Z",
kafkaTest.getTimestampMicrosField().toString());
assertEquals(EnumField.b, kafkaTest.getEnumField());
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/converter/AvroToJsonConverterTest.java | kstreamplify-core/src/test/java/com/michelin/kstreamplify/converter/AvroToJsonConverterTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.converter;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNull;
import com.michelin.kstreamplify.avro.EnumField;
import com.michelin.kstreamplify.avro.KafkaRecordStub;
import com.michelin.kstreamplify.avro.MapElement;
import com.michelin.kstreamplify.avro.SubKafkaRecordStub;
import com.michelin.kstreamplify.avro.SubSubKafkaRecordStub;
import java.math.BigDecimal;
import java.time.Instant;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import lombok.extern.slf4j.Slf4j;
import org.junit.jupiter.api.Test;
@Slf4j
class AvroToJsonConverterTest {
@Test
void shouldConvertObjectNull() {
assertNull(AvroToJsonConverter.convertObject((Object) null));
}
@Test
void shouldConvertObject() {
String json = AvroToJsonConverter.convertObject(new UserStub("John", "Doe"));
assertEquals("""
{
"firstName": "John",
"lastName": "Doe"
}""", json);
}
@Test
void shouldConvertGenericRecord() {
String json = AvroToJsonConverter.convertRecord(buildKafkaRecordStub());
assertEquals("""
{
"localTimestampMillisField": "2024-03-27T20:51:01.815832",
"membersString": {
"key1": "val1"
},
"decimalField": 10,
"timeMillisField": "20:51:01.815",
"booleanField": false,
"dateField": "2024-03-27",
"timestampMillisField": "2024-03-27T19:51:01.815Z",
"intField": 5,
"localTimestampMicrosField": "2024-03-27T20:51:01.815832123",
"listString": [
"val1",
"val2"
],
"timestampMicrosField": "2024-03-27T19:51:01.815832Z",
"uuidField": "dc306935-d720-427f-9ecd-ff87c0b15189",
"split": [
{
"subSplit": [
{
"subSubIntField": 8,
"subSubDateField": "1970-01-01T00:00:00.002Z",
"subSubField": "subSubTest"
}
],
"subField": "subTest"
}
],
"members": {
"key1": {
"mapDateField": "1970-01-01T00:00:00.003Z",
"mapQuantityField": 1
}
},
"timeMicrosField": "20:51:01.815832",
"stringField": "test",
"enumField": "b"
}""", json);
}
@Test
void shouldConvertListObject() {
String json = AvroToJsonConverter.convertObject(List.of(new UserStub("John", "Doe")));
assertEquals("""
[{
"firstName": "John",
"lastName": "Doe"
}]""", json);
}
private KafkaRecordStub buildKafkaRecordStub() {
return KafkaRecordStub.newBuilder()
.setDecimalField(BigDecimal.TEN)
.setIntField(5)
.setStringField("test")
.setBooleanField(false)
.setUuidField(UUID.fromString("dc306935-d720-427f-9ecd-ff87c0b15189"))
.setTimeMillisField(LocalTime.parse("20:51:01.815"))
.setTimeMicrosField(LocalTime.parse("20:51:01.815832"))
.setTimestampMillisField(Instant.parse("2024-03-27T19:51:01.815Z"))
.setTimestampMicrosField(Instant.parse("2024-03-27T19:51:01.815832Z"))
.setLocalTimestampMillisField(LocalDateTime.parse("2024-03-27T20:51:01.815832"))
.setLocalTimestampMicrosField(LocalDateTime.parse("2024-03-27T20:51:01.815832123"))
.setDateField(LocalDate.parse("2024-03-27"))
.setEnumField(EnumField.b)
.setMembers(Map.of(
"key1",
MapElement.newBuilder()
.setMapDateField(Instant.ofEpochMilli(3))
.setMapQuantityField(BigDecimal.ONE)
.build()))
.setMembersString(Map.of("key1", "val1"))
.setListString(List.of("val1", "val2"))
.setSplit(List.of(SubKafkaRecordStub.newBuilder()
.setSubField("subTest")
.setSubSplit(List.of(SubSubKafkaRecordStub.newBuilder()
.setSubSubField("subSubTest")
.setSubSubDateField(Instant.ofEpochMilli(2))
.setSubSubIntField(8)
.build()))
.build()))
.build();
}
record UserStub(String firstName, String lastName) {}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/serde/TopicWithSerdeTest.java | kstreamplify-core/src/test/java/com/michelin/kstreamplify/serde/TopicWithSerdeTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.serde;
import static org.junit.jupiter.api.Assertions.assertEquals;
import com.michelin.kstreamplify.context.KafkaStreamsExecutionContext;
import java.util.Properties;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.StreamsBuilder;
import org.junit.jupiter.api.Test;
class TopicWithSerdeTest {
@Test
void shouldCreateTopicWithSerde() {
KafkaStreamsExecutionContext.registerProperties(new Properties());
TopicWithSerde<String, String> topicWithSerde =
new TopicWithSerde<>("INPUT_TOPIC", Serdes.String(), Serdes.String());
assertEquals("INPUT_TOPIC", topicWithSerde.getUnPrefixedName());
assertEquals("INPUT_TOPIC", topicWithSerde.toString());
}
@Test
void shouldCreateTopicWithSerdeWithPrefix() {
Properties properties = new Properties();
properties.put("prefix.self", "abc.");
KafkaStreamsExecutionContext.registerProperties(properties);
TopicWithSerde<String, String> topicWithSerde =
new TopicWithSerde<>("INPUT_TOPIC", Serdes.String(), Serdes.String());
assertEquals("INPUT_TOPIC", topicWithSerde.getUnPrefixedName());
assertEquals("abc.INPUT_TOPIC", topicWithSerde.toString());
}
@Test
void shouldCreateStream() {
KafkaStreamsExecutionContext.registerProperties(new Properties());
TopicWithSerde<String, String> topicWithSerde =
new TopicWithSerde<>("INPUT_TOPIC", Serdes.String(), Serdes.String());
StreamsBuilder streamsBuilder = new StreamsBuilder();
topicWithSerde.stream(streamsBuilder);
assertEquals("""
Topologies:
Sub-topology: 0
Source: KSTREAM-SOURCE-0000000000 (topics: [INPUT_TOPIC])
--> none
""", streamsBuilder.build().describe().toString());
}
@Test
void shouldCreateTable() {
KafkaStreamsExecutionContext.registerProperties(new Properties());
TopicWithSerde<String, String> topicWithSerde =
new TopicWithSerde<>("INPUT_TOPIC", Serdes.String(), Serdes.String());
StreamsBuilder streamsBuilder = new StreamsBuilder();
topicWithSerde.table(streamsBuilder, "myStore");
assertEquals("""
Topologies:
Sub-topology: 0
Source: KSTREAM-SOURCE-0000000000 (topics: [INPUT_TOPIC])
--> KTABLE-SOURCE-0000000001
Processor: KTABLE-SOURCE-0000000001 (stores: [myStore])
--> none
<-- KSTREAM-SOURCE-0000000000
""", streamsBuilder.build().describe().toString());
}
@Test
void shouldCreateGlobalKtable() {
KafkaStreamsExecutionContext.registerProperties(new Properties());
TopicWithSerde<String, String> topicWithSerde =
new TopicWithSerde<>("INPUT_TOPIC", Serdes.String(), Serdes.String());
StreamsBuilder streamsBuilder = new StreamsBuilder();
topicWithSerde.globalTable(streamsBuilder, "myStore");
assertEquals("""
Topologies:
Sub-topology: 0 for global store (will not generate tasks)
Source: KSTREAM-SOURCE-0000000000 (topics: [INPUT_TOPIC])
--> KTABLE-SOURCE-0000000001
Processor: KTABLE-SOURCE-0000000001 (stores: [myStore])
--> none
<-- KSTREAM-SOURCE-0000000000
""", streamsBuilder.build().describe().toString());
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/server/KafkaStreamsHttpServerTest.java | kstreamplify-core/src/test/java/com/michelin/kstreamplify/server/KafkaStreamsHttpServerTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.server;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import com.michelin.kstreamplify.initializer.KafkaStreamsInitializer;
import com.michelin.kstreamplify.initializer.KafkaStreamsStarter;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
@ExtendWith(MockitoExtension.class)
class KafkaStreamsHttpServerTest {
@Mock
private KafkaStreamsStarter kafkaStreamsStarter;
@Test
void shouldCreateServerWithDefaultHostAndPort() {
KafkaStreamsHttpServer server = new KafkaStreamsHttpServer(new KafkaStreamsInitializer(kafkaStreamsStarter));
server.start();
assertNotNull(server.server.getAddress().getHostName());
assertNotEquals(0, server.server.getAddress().getPort());
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/deduplication/DedupKeyProcessorTest.java | kstreamplify-core/src/test/java/com/michelin/kstreamplify/deduplication/DedupKeyProcessorTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.deduplication;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyLong;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.ArgumentMatchers.argThat;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import com.michelin.kstreamplify.avro.KafkaError;
import com.michelin.kstreamplify.error.ProcessingResult;
import java.time.Duration;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.processor.api.ProcessorContext;
import org.apache.kafka.streams.processor.api.Record;
import org.apache.kafka.streams.state.WindowStore;
import org.apache.kafka.streams.state.WindowStoreIterator;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
@ExtendWith(MockitoExtension.class)
class DedupKeyProcessorTest {
private DedupKeyProcessor<KafkaError> processor;
@Mock
private ProcessorContext<String, ProcessingResult<KafkaError, KafkaError>> context;
@Mock
private WindowStore<String, String> windowStore;
@Mock
private WindowStoreIterator<String> windowStoreIterator;
@BeforeEach
void setUp() {
// Create an instance of DedupWithPredicateProcessor for testing
processor = new DedupKeyProcessor<>("testStore", Duration.ofHours(1));
// Stub the context.getStateStore method to return the mock store
when(context.getStateStore("testStore")).thenReturn(windowStore);
processor.init(context);
}
@Test
void shouldProcessNewRecord() {
final KafkaError kafkaError = new KafkaError();
final Record<String, KafkaError> message = new Record<>("key", kafkaError, 0);
processor.process(message);
verify(windowStore).put("key", "key", message.timestamp());
verify(context).forward(argThat(arg -> arg.value().getValue().equals(message.value())));
}
@Test
void shouldProcessDuplicate() {
final KafkaError kafkaError = new KafkaError();
final Record<String, KafkaError> message = new Record<>("key", kafkaError, 0L);
// Simulate hasNext() returning true once and then false
when(windowStoreIterator.hasNext()).thenReturn(true);
// Simulate the condition to trigger the return statement
when(windowStoreIterator.next()).thenReturn(KeyValue.pair(0L, "key"));
// Simulate the backwardFetch() method returning the mocked ResultIterator
when(windowStore.backwardFetch(any(), any(), any())).thenReturn(windowStoreIterator);
// Call the process method
processor.process(message);
verify(windowStore, never()).put(anyString(), any(), anyLong());
verify(context, never()).forward(any());
}
@Test
void shouldThrowException() {
Record<String, KafkaError> message = new Record<>("key", new KafkaError(), 0L);
when(windowStore.backwardFetch(any(), any(), any()))
.thenReturn(null)
.thenThrow(new RuntimeException("Exception..."));
doThrow(new RuntimeException("Exception...")).when(windowStore).put(anyString(), any(), anyLong());
processor.process(message);
verify(context).forward(argThat(arg -> arg.value()
.getError()
.getContextMessage()
.equals("Could not figure out what to do with the current payload: "
+ "An unlikely error occurred during deduplication transform")));
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/deduplication/DedupKeyValueProcessorTest.java | kstreamplify-core/src/test/java/com/michelin/kstreamplify/deduplication/DedupKeyValueProcessorTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.deduplication;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyLong;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.ArgumentMatchers.argThat;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import com.michelin.kstreamplify.avro.KafkaError;
import com.michelin.kstreamplify.error.ProcessingResult;
import java.time.Duration;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.processor.api.ProcessorContext;
import org.apache.kafka.streams.processor.api.Record;
import org.apache.kafka.streams.state.WindowStore;
import org.apache.kafka.streams.state.WindowStoreIterator;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
@ExtendWith(MockitoExtension.class)
class DedupKeyValueProcessorTest {
private DedupKeyValueProcessor<KafkaError> processor;
@Mock
private ProcessorContext<String, ProcessingResult<KafkaError, KafkaError>> context;
@Mock
private WindowStore<String, KafkaError> windowStore;
@Mock
private WindowStoreIterator<KafkaError> windowStoreIterator;
@BeforeEach
void setUp() {
// Create an instance of DedupWithPredicateProcessor for testing
processor = new DedupKeyValueProcessor<>("testStore", Duration.ofHours(1));
// Stub the context.getStateStore method to return the mock store
when(context.getStateStore("testStore")).thenReturn(windowStore);
processor.init(context);
}
@Test
void shouldProcessNewRecord() {
final KafkaError kafkaError = new KafkaError();
final Record<String, KafkaError> message = new Record<>("key", kafkaError, 0);
processor.process(message);
verify(windowStore).put(message.key(), message.value(), message.timestamp());
verify(context).forward(argThat(arg -> arg.value().getValue().equals(message.value())));
}
@Test
void shouldProcessDuplicate() {
final KafkaError kafkaError = new KafkaError();
final Record<String, KafkaError> message = new Record<>("key", kafkaError, 0L);
// Simulate hasNext() returning true once and then false
when(windowStoreIterator.hasNext()).thenReturn(true);
// Simulate the condition to trigger the return statement
when(windowStoreIterator.next()).thenReturn(KeyValue.pair(0L, kafkaError));
// Simulate the backwardFetch() method returning the mocked ResultIterator
when(windowStore.backwardFetch(any(), any(), any())).thenReturn(windowStoreIterator);
// Call the process method
processor.process(message);
verify(windowStore, never()).put(anyString(), any(), anyLong());
verify(context, never()).forward(any());
}
@Test
void shouldThrowException() {
final Record<String, KafkaError> message = new Record<>("key", new KafkaError(), 0L);
when(windowStore.backwardFetch(any(), any(), any()))
.thenReturn(null)
.thenThrow(new RuntimeException("Exception..."));
doThrow(new RuntimeException("Exception...")).when(windowStore).put(anyString(), any(), anyLong());
// Call the process method
processor.process(message);
verify(context).forward(argThat(arg -> arg.value()
.getError()
.getContextMessage()
.equals("Could not figure out what to do with the current payload: "
+ "An unlikely error occurred during deduplication transform")));
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/deduplication/DedupWithPredicateProcessorTest.java | kstreamplify-core/src/test/java/com/michelin/kstreamplify/deduplication/DedupWithPredicateProcessorTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.deduplication;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyLong;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.ArgumentMatchers.argThat;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import com.michelin.kstreamplify.avro.KafkaError;
import com.michelin.kstreamplify.error.ProcessingResult;
import java.time.Duration;
import org.apache.avro.specific.SpecificRecord;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.processor.api.ProcessorContext;
import org.apache.kafka.streams.processor.api.Record;
import org.apache.kafka.streams.state.WindowStore;
import org.apache.kafka.streams.state.WindowStoreIterator;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
@ExtendWith(MockitoExtension.class)
class DedupWithPredicateProcessorTest {
private DedupWithPredicateProcessor<String, KafkaError> processor;
@Mock
private ProcessorContext<String, ProcessingResult<KafkaError, KafkaError>> context;
@Mock
private WindowStore<String, KafkaError> windowStore;
@Mock
private WindowStoreIterator<KafkaError> windowStoreIterator;
@BeforeEach
void setUp() {
// Create an instance of DedupWithPredicateProcessor for testing
processor = new DedupWithPredicateProcessor<>("testStore", Duration.ofHours(1), KeyExtractorStub::extract);
// Stub the context.getStateStore method to return the mock store
when(context.getStateStore("testStore")).thenReturn(windowStore);
processor.init(context);
}
@Test
void shouldProcessNewRecord() {
final KafkaError kafkaError = new KafkaError();
final Record<String, KafkaError> message = new Record<>("key", kafkaError, 0);
processor.process(message);
verify(windowStore).put("", message.value(), message.timestamp());
verify(context).forward(argThat(arg -> arg.value().getValue().equals(message.value())));
}
@Test
void shouldProcessDuplicate() {
final KafkaError kafkaError = new KafkaError();
final Record<String, KafkaError> message = new Record<>("key", kafkaError, 0L);
// Simulate hasNext() returning true once and then false
when(windowStoreIterator.hasNext()).thenReturn(true);
// Simulate the condition to trigger the return statement
when(windowStoreIterator.next()).thenReturn(KeyValue.pair(0L, kafkaError));
// Simulate the backwardFetch() method returning the mocked ResultIterator
when(windowStore.backwardFetch(any(), any(), any())).thenReturn(windowStoreIterator);
// Call the process method
processor.process(message);
verify(windowStore, never()).put(anyString(), any(), anyLong());
verify(context, never()).forward(any());
}
@Test
void shouldThrowException() {
Record<String, KafkaError> message = new Record<>("key", new KafkaError(), 0L);
when(windowStore.backwardFetch(any(), any(), any()))
.thenReturn(null)
.thenThrow(new RuntimeException("Exception..."));
doThrow(new RuntimeException("Exception...")).when(windowStore).put(anyString(), any(), anyLong());
// Call the process method
processor.process(message);
verify(context).forward(argThat(arg -> arg.value()
.getError()
.getContextMessage()
.equals("Could not figure out what to do with the current payload: "
+ "An unlikely error occurred during deduplication transform")));
}
static class KeyExtractorStub {
public static <V extends SpecificRecord> String extract(V v) {
return "";
}
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/context/KafkaStreamsExecutionContextTest.java | kstreamplify-core/src/test/java/com/michelin/kstreamplify/context/KafkaStreamsExecutionContextTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.context;
import static org.junit.jupiter.api.Assertions.*;
import java.util.Properties;
import org.apache.kafka.streams.StreamsConfig;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
class KafkaStreamsExecutionContextTest {
@BeforeEach
void setUp() {
KafkaStreamsExecutionContext.setProperties(null);
KafkaStreamsExecutionContext.setDlqProperties(new Properties());
}
@Test
void shouldNotRegisterPropertiesWhenNull() {
KafkaStreamsExecutionContext.registerProperties(null);
assertNull(KafkaStreamsExecutionContext.getProperties());
}
@Test
void shouldAddPrefixToAppId() {
Properties properties = new Properties();
properties.put(StreamsConfig.APPLICATION_ID_CONFIG, "appId");
properties.put("prefix.self", "abc.");
KafkaStreamsExecutionContext.registerProperties(properties);
assertEquals("abc.", KafkaStreamsExecutionContext.getPrefix());
assertEquals(
"abc.appId", KafkaStreamsExecutionContext.getProperties().get(StreamsConfig.APPLICATION_ID_CONFIG));
}
@Test
void shouldNotAddPrefixToAppIdIfNoPrefix() {
Properties properties = new Properties();
properties.put(StreamsConfig.APPLICATION_ID_CONFIG, "appId");
KafkaStreamsExecutionContext.registerProperties(properties);
assertEquals("", KafkaStreamsExecutionContext.getPrefix());
assertEquals("appId", KafkaStreamsExecutionContext.getProperties().get(StreamsConfig.APPLICATION_ID_CONFIG));
}
@Test
void shouldNotAddPrefixToAppIdIfNotAppId() {
Properties properties = new Properties();
properties.put("prefix.self", "abc.");
KafkaStreamsExecutionContext.registerProperties(properties);
assertEquals("abc.", KafkaStreamsExecutionContext.getPrefix());
assertNull(KafkaStreamsExecutionContext.getProperties().get(StreamsConfig.APPLICATION_ID_CONFIG));
}
@Test
void shouldExtractDlqProperties() {
Properties properties = new Properties();
properties.put("dlq.some.feature", "true");
properties.put("dlq.other.feature", "false");
KafkaStreamsExecutionContext.registerProperties(properties);
Properties dlqProps = KafkaStreamsExecutionContext.getDlqProperties();
assertEquals("true", dlqProps.getProperty("dlq.some.feature"));
assertEquals("false", dlqProps.getProperty("dlq.other.feature"));
}
@Test
void shouldReturnTrueWhenDlqFeatureEnabled() {
Properties properties = new Properties();
properties.put("dlq.test.feature", "true");
KafkaStreamsExecutionContext.registerProperties(properties);
assertTrue(KafkaStreamsExecutionContext.isDlqFeatureEnabled("dlq.test.feature"));
}
@Test
void shouldReturnFalseWhenDlqFeatureDisabled() {
Properties properties = new Properties();
properties.put("dlq.test.feature", "false");
KafkaStreamsExecutionContext.registerProperties(properties);
assertFalse(KafkaStreamsExecutionContext.isDlqFeatureEnabled("dlq.test.feature"));
}
@Test
void shouldReturnFalseWhenDlqFeatureNotPresent() {
Properties properties = new Properties();
KafkaStreamsExecutionContext.registerProperties(properties);
assertFalse(KafkaStreamsExecutionContext.isDlqFeatureEnabled("dlq.missing.feature"));
}
@Test
void shouldUseDefaultFalseWhenDlqFeatureMissing() {
KafkaStreamsExecutionContext.setDlqProperties(new Properties());
assertFalse(KafkaStreamsExecutionContext.isDlqFeatureEnabled("dlq.non.existing"));
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/topic/TopicUtils.java | kstreamplify-core/src/main/java/com/michelin/kstreamplify/topic/TopicUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.topic;
import static com.michelin.kstreamplify.property.PropertiesUtils.PROPERTY_SEPARATOR;
import com.michelin.kstreamplify.context.KafkaStreamsExecutionContext;
import java.util.Properties;
/** The topic utils class. */
public final class TopicUtils {
/** The topic property name. */
public static final String TOPIC_PROPERTY_NAME = "topic";
/** The prefix property name. */
public static final String PREFIX_PROPERTY_NAME = "prefix";
/** The remap property name. */
public static final String REMAP_PROPERTY_NAME = "remap";
private TopicUtils() {}
/**
* Remap and prefix the topic name. Remap is retrieved from the configuration like so:
*
* <pre>{@code
* kafka:
* properties:
* topic:
* remap:
* myTopic: "myRemappedTopicName"
* }</pre>
*
* Prefix is retrieved from the configuration like so:
*
* <pre>{@code
* kafka:
* properties:
* prefix:
* anyPrefix: "myPrefix."
* }</pre>
*
* @param topicName The topic name to remap and prefix
* @param prefixKey The prefix key
* @return The prefixed and/or remapped topic.
*/
public static String remapAndPrefix(String topicName, String prefixKey) {
Properties properties = KafkaStreamsExecutionContext.getProperties();
// Check for dynamic remap in properties
String remappedTopic = properties.getProperty(
TOPIC_PROPERTY_NAME + PROPERTY_SEPARATOR + REMAP_PROPERTY_NAME + PROPERTY_SEPARATOR + topicName,
topicName);
// Check if topic prefix property exists
String prefix = properties.getProperty(PREFIX_PROPERTY_NAME + PROPERTY_SEPARATOR + prefixKey, "");
return prefix.concat(remappedTopic);
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/service/KubernetesService.java | kstreamplify-core/src/main/java/com/michelin/kstreamplify/service/KubernetesService.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.service;
import com.michelin.kstreamplify.context.KafkaStreamsExecutionContext;
import com.michelin.kstreamplify.initializer.KafkaStreamsInitializer;
import java.net.HttpURLConnection;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.processor.internals.StreamThread;
/** Kafka Streams Kubernetes service. */
@Slf4j
public final class KubernetesService {
/** The readiness path property name. */
public static final String READINESS_PATH_PROPERTY_NAME = "kubernetes.readiness.path";
/** The liveness path property name. */
public static final String LIVENESS_PATH_PROPERTY_NAME = "kubernetes.liveness.path";
/** The default readiness path. */
public static final String DEFAULT_READINESS_PATH = "ready";
/** The default liveness path. */
public static final String DEFAULT_LIVENESS_PATH = "liveness";
private final KafkaStreamsInitializer kafkaStreamsInitializer;
/**
* Constructor.
*
* @param kafkaStreamsInitializer The Kafka Streams initializer
*/
public KubernetesService(KafkaStreamsInitializer kafkaStreamsInitializer) {
this.kafkaStreamsInitializer = kafkaStreamsInitializer;
}
/**
* Kubernetes' readiness probe.
*
* @return An HTTP response code
*/
public int getReadiness() {
if (kafkaStreamsInitializer.getKafkaStreams() != null) {
log.debug(
"Kafka Stream \"{}\" state: {}",
KafkaStreamsExecutionContext.getProperties().getProperty(StreamsConfig.APPLICATION_ID_CONFIG),
kafkaStreamsInitializer.getKafkaStreams().state());
if (kafkaStreamsInitializer.getKafkaStreams().state() == KafkaStreams.State.REBALANCING) {
long startingThreadCount = kafkaStreamsInitializer.getKafkaStreams().metadataForLocalThreads().stream()
.filter(t -> StreamThread.State.STARTING.name().compareToIgnoreCase(t.threadState()) == 0
|| StreamThread.State.CREATED.name().compareToIgnoreCase(t.threadState()) == 0)
.count();
if (startingThreadCount
== kafkaStreamsInitializer
.getKafkaStreams()
.metadataForLocalThreads()
.size()) {
return HttpURLConnection.HTTP_NO_CONTENT;
}
}
return kafkaStreamsInitializer.getKafkaStreams().state().equals(KafkaStreams.State.RUNNING)
? HttpURLConnection.HTTP_OK
: HttpURLConnection.HTTP_UNAVAILABLE;
}
return HttpURLConnection.HTTP_BAD_REQUEST;
}
/**
* Kubernetes' liveness probe.
*
* @return An HTTP response code
*/
public int getLiveness() {
if (kafkaStreamsInitializer.getKafkaStreams() != null) {
return kafkaStreamsInitializer.getKafkaStreams().state() != KafkaStreams.State.NOT_RUNNING
? HttpURLConnection.HTTP_OK
: HttpURLConnection.HTTP_INTERNAL_ERROR;
}
return HttpURLConnection.HTTP_NO_CONTENT;
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/service/TopologyService.java | kstreamplify-core/src/main/java/com/michelin/kstreamplify/service/TopologyService.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.service;
import com.michelin.kstreamplify.initializer.KafkaStreamsInitializer;
import lombok.extern.slf4j.Slf4j;
/** Kafka Streams topology service. */
@Slf4j
public class TopologyService {
/** The topology path property. */
public static final String TOPOLOGY_PATH_PROPERTY_NAME = "topology.path";
/** The default topology path. */
public static final String TOPOLOGY_DEFAULT_PATH = "topology";
private final KafkaStreamsInitializer kafkaStreamsInitializer;
/**
* Constructor.
*
* @param kafkaStreamsInitializer The Kafka Streams initializer
*/
public TopologyService(KafkaStreamsInitializer kafkaStreamsInitializer) {
this.kafkaStreamsInitializer = kafkaStreamsInitializer;
}
/**
* Get the Kafka Streams topology.
*
* @return The Kafka Streams topology
*/
public String getTopology() {
return kafkaStreamsInitializer.getTopology().describe().toString();
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/service/interactivequeries/CommonStoreService.java | kstreamplify-core/src/main/java/com/michelin/kstreamplify/service/interactivequeries/CommonStoreService.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.service.interactivequeries;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.michelin.kstreamplify.exception.OtherInstanceResponseException;
import com.michelin.kstreamplify.initializer.KafkaStreamsInitializer;
import com.michelin.kstreamplify.store.StateStoreRecord;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.http.HttpClient;
import java.net.http.HttpRequest;
import java.net.http.HttpResponse;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.stream.Collectors;
import lombok.AllArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.common.serialization.Serializer;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.KeyQueryMetadata;
import org.apache.kafka.streams.StreamsMetadata;
import org.apache.kafka.streams.errors.StreamsNotStartedException;
import org.apache.kafka.streams.state.HostInfo;
/** Interactive queries service. */
@Slf4j
@AllArgsConstructor
public abstract class CommonStoreService {
private static final String STREAMS_NOT_STARTED = "Cannot process request while instance is in %s state";
/** Error message when the state store is not found. */
protected static final String UNKNOWN_STATE_STORE = "State store %s not found";
private final ObjectMapper objectMapper = new ObjectMapper();
private final HttpClient httpClient;
/** The Kafka Streams initializer. */
protected final KafkaStreamsInitializer kafkaStreamsInitializer;
/**
* Constructor.
*
* @param kafkaStreamsInitializer The Kafka Streams initializer
*/
protected CommonStoreService(KafkaStreamsInitializer kafkaStreamsInitializer) {
this.kafkaStreamsInitializer = kafkaStreamsInitializer;
this.httpClient = HttpClient.newHttpClient();
}
/**
* Get the stores.
*
* @return The stores
*/
public Set<String> getStateStores() {
checkStreamsRunning();
final Collection<StreamsMetadata> metadata =
kafkaStreamsInitializer.getKafkaStreams().metadataForAllStreamsClients();
if (metadata == null || metadata.isEmpty()) {
return Collections.emptySet();
}
return metadata.stream()
.flatMap(streamsMetadata -> streamsMetadata.stateStoreNames().stream())
.collect(Collectors.toSet());
}
/**
* Get the hosts of the store.
*
* @param store The store
* @return The hosts
*/
public Collection<StreamsMetadata> getStreamsMetadataForStore(final String store) {
checkStreamsRunning();
return kafkaStreamsInitializer.getKafkaStreams().streamsMetadataForStore(store);
}
/**
* Get the host by store and key.
*
* @param store The store
* @param key The key
* @param serializer The key serializer
* @return The host
* @param <K> The key type
*/
protected <K> KeyQueryMetadata getKeyQueryMetadata(String store, K key, Serializer<K> serializer) {
checkStreamsRunning();
return kafkaStreamsInitializer.getKafkaStreams().queryMetadataForKey(store, key, serializer);
}
/**
* Request remote instance.
*
* @param host The host instance
* @param endpointPath The endpoint path to request
* @return The response
*/
protected List<StateStoreRecord> getAllOnRemoteHost(HostInfo host, String endpointPath) {
try {
String jsonResponse = sendRequest(host, endpointPath);
return objectMapper.readValue(jsonResponse, new TypeReference<>() {});
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
return Collections.emptyList();
} catch (Exception e) {
throw new OtherInstanceResponseException(e);
}
}
/**
* Request remote instance.
*
* @param host The host instance
* @param endpointPath The endpoint path to request
* @return The response
*/
protected StateStoreRecord getByKeyOnRemoteHost(HostInfo host, String endpointPath) {
try {
String jsonResponse = sendRequest(host, endpointPath);
return objectMapper.readValue(jsonResponse, StateStoreRecord.class);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
return null;
} catch (Exception e) {
throw new OtherInstanceResponseException(e);
}
}
/**
* Send request to the remote host.
*
* @param host The host
* @param endpointPath The endpoint path
* @return The response
* @throws URISyntaxException URI syntax exception
* @throws ExecutionException Execution exception
* @throws InterruptedException Interrupted exception
*/
private String sendRequest(HostInfo host, String endpointPath)
throws URISyntaxException, ExecutionException, InterruptedException {
HttpRequest request = HttpRequest.newBuilder()
.header("Accept", "application/json")
.uri(new URI("http://%s:%d/%s".formatted(host.host(), host.port(), endpointPath)))
.GET()
.build();
return httpClient
.sendAsync(request, HttpResponse.BodyHandlers.ofString())
.thenApply(HttpResponse::body)
.get();
}
/**
* Check if given host is equals to the current stream host.
*
* @param compareHostInfo The host to compare
* @return True if the host is not the current host
*/
protected boolean isNotCurrentHost(HostInfo compareHostInfo) {
return !kafkaStreamsInitializer.getHostInfo().host().equals(compareHostInfo.host())
|| kafkaStreamsInitializer.getHostInfo().port() != compareHostInfo.port();
}
/** Check if the streams are started. */
private void checkStreamsRunning() {
if (kafkaStreamsInitializer.isNotRunning()) {
KafkaStreams.State state = kafkaStreamsInitializer.getKafkaStreams().state();
throw new StreamsNotStartedException(STREAMS_NOT_STARTED.formatted(state));
}
}
/**
* The path for RPC.
*
* @return The path
*/
protected abstract String path();
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/service/interactivequeries/window/TimestampedWindowStoreService.java | kstreamplify-core/src/main/java/com/michelin/kstreamplify/service/interactivequeries/window/TimestampedWindowStoreService.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.service.interactivequeries.window;
import com.michelin.kstreamplify.exception.UnknownKeyException;
import com.michelin.kstreamplify.initializer.KafkaStreamsInitializer;
import com.michelin.kstreamplify.store.StateStoreRecord;
import java.net.http.HttpClient;
import java.time.Instant;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.streams.KeyQueryMetadata;
import org.apache.kafka.streams.kstream.Windowed;
import org.apache.kafka.streams.query.QueryResult;
import org.apache.kafka.streams.query.StateQueryRequest;
import org.apache.kafka.streams.query.StateQueryResult;
import org.apache.kafka.streams.query.WindowKeyQuery;
import org.apache.kafka.streams.query.WindowRangeQuery;
import org.apache.kafka.streams.state.KeyValueIterator;
import org.apache.kafka.streams.state.ValueAndTimestamp;
import org.apache.kafka.streams.state.WindowStoreIterator;
/** Window store service. */
@Slf4j
public class TimestampedWindowStoreService extends CommonWindowStoreService {
/**
* Constructor.
*
* @param kafkaStreamsInitializer The Kafka Streams initializer
*/
public TimestampedWindowStoreService(KafkaStreamsInitializer kafkaStreamsInitializer) {
super(kafkaStreamsInitializer);
}
/**
* Constructor.
*
* @param kafkaStreamsInitializer The Kafka Streams initializer
* @param httpClient The HTTP client
*/
@SuppressWarnings("unused")
public TimestampedWindowStoreService(KafkaStreamsInitializer kafkaStreamsInitializer, HttpClient httpClient) {
super(httpClient, kafkaStreamsInitializer);
}
/** {@inheritDoc} */
@Override
protected String path() {
return "window/timestamped";
}
/** {@inheritDoc} */
@Override
protected List<StateStoreRecord> executeWindowRangeQuery(String store, Instant startTime, Instant endTime) {
WindowRangeQuery<String, ValueAndTimestamp<Object>> windowRangeQuery =
WindowRangeQuery.withWindowStartRange(startTime, endTime);
StateQueryResult<KeyValueIterator<Windowed<String>, ValueAndTimestamp<Object>>> result = kafkaStreamsInitializer
.getKafkaStreams()
.query(StateQueryRequest.inStore(store).withQuery(windowRangeQuery));
List<StateStoreRecord> partitionsResult = new ArrayList<>();
result.getPartitionResults().forEach((key, queryResult) -> queryResult
.getResult()
.forEachRemaining(kv -> partitionsResult.add(
new StateStoreRecord(kv.key.key(), kv.value.value(), kv.value.timestamp()))));
return partitionsResult;
}
/** {@inheritDoc} */
@Override
protected List<StateStoreRecord> executeWindowKeyQuery(
KeyQueryMetadata keyQueryMetadata, String store, String key, Instant startTime, Instant endTime) {
WindowKeyQuery<String, ValueAndTimestamp<Object>> windowKeyQuery =
WindowKeyQuery.withKeyAndWindowStartRange(key, startTime, endTime);
StateQueryResult<WindowStoreIterator<ValueAndTimestamp<Object>>> result = kafkaStreamsInitializer
.getKafkaStreams()
.query(StateQueryRequest.inStore(store)
.withQuery(windowKeyQuery)
.withPartitions(Collections.singleton(keyQueryMetadata.partition())));
if (result.getPartitionResults().values().stream().anyMatch(QueryResult::isFailure)) {
throw new IllegalArgumentException(
result.getPartitionResults().get(0).getFailureMessage());
}
if (!result.getOnlyPartitionResult().getResult().hasNext()) {
throw new UnknownKeyException(key);
}
List<StateStoreRecord> partitionsResult = new ArrayList<>();
result.getOnlyPartitionResult()
.getResult()
.forEachRemaining(
kv -> partitionsResult.add(new StateStoreRecord(key, kv.value.value(), kv.value.timestamp())));
return partitionsResult;
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/service/interactivequeries/window/CommonWindowStoreService.java | kstreamplify-core/src/main/java/com/michelin/kstreamplify/service/interactivequeries/window/CommonWindowStoreService.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.service.interactivequeries.window;
import com.michelin.kstreamplify.initializer.KafkaStreamsInitializer;
import com.michelin.kstreamplify.service.interactivequeries.CommonStoreService;
import com.michelin.kstreamplify.store.StateStoreRecord;
import java.net.http.HttpClient;
import java.time.Instant;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.common.serialization.StringSerializer;
import org.apache.kafka.streams.KeyQueryMetadata;
import org.apache.kafka.streams.StreamsMetadata;
import org.apache.kafka.streams.errors.UnknownStateStoreException;
import org.apache.kafka.streams.state.HostInfo;
@Slf4j
abstract class CommonWindowStoreService extends CommonStoreService {
/**
* Constructor.
*
* @param kafkaStreamsInitializer The Kafka Streams initializer
*/
protected CommonWindowStoreService(KafkaStreamsInitializer kafkaStreamsInitializer) {
super(kafkaStreamsInitializer);
}
/**
* Constructor.
*
* @param httpClient The HTTP client
* @param kafkaStreamsInitializer The Kafka Streams initializer
*/
protected CommonWindowStoreService(HttpClient httpClient, KafkaStreamsInitializer kafkaStreamsInitializer) {
super(httpClient, kafkaStreamsInitializer);
}
/**
* Get all values from the store.
*
* @param store The store
* @param startTime The start time
* @param endTime The end time
* @return The values
*/
public List<StateStoreRecord> getAll(String store, Instant startTime, Instant endTime) {
final Collection<StreamsMetadata> streamsMetadata = getStreamsMetadataForStore(store);
if (streamsMetadata == null || streamsMetadata.isEmpty()) {
throw new UnknownStateStoreException(UNKNOWN_STATE_STORE.formatted(store));
}
List<StateStoreRecord> results = new ArrayList<>();
streamsMetadata.forEach(metadata -> {
if (isNotCurrentHost(metadata.hostInfo())) {
log.debug("Fetching data on other instance ({}:{})", metadata.host(), metadata.port());
results.addAll(getAllOnRemoteHost(
metadata.hostInfo(),
"store/" + path() + "/local/" + store + "?startTime=" + startTime + "&endTime=" + endTime));
} else {
log.debug("Fetching data on this instance ({}:{})", metadata.host(), metadata.port());
results.addAll(executeWindowRangeQuery(store, startTime, endTime));
}
});
return results;
}
/**
* Get the value by key from the store.
*
* @param store The store name
* @param key The key
* @param startTime The start time
* @param endTime The end time
* @return The value
*/
public List<StateStoreRecord> getByKey(String store, String key, Instant startTime, Instant endTime) {
KeyQueryMetadata keyQueryMetadata = getKeyQueryMetadata(store, key, new StringSerializer());
if (keyQueryMetadata == null) {
throw new UnknownStateStoreException(UNKNOWN_STATE_STORE.formatted(store));
}
HostInfo host = keyQueryMetadata.activeHost();
if (isNotCurrentHost(host)) {
log.debug("The key {} has been located on another instance ({}:{})", key, host.host(), host.port());
return getAllOnRemoteHost(
host,
"store/" + path() + "/" + store + "/" + key + "?startTime=" + startTime + "&endTime=" + endTime);
}
log.debug("The key {} has been located on the current instance ({}:{})", key, host.host(), host.port());
return executeWindowKeyQuery(keyQueryMetadata, store, key, startTime, endTime);
}
/**
* Get all values from the store on the local instance.
*
* @param store The store
* @param startTime The start time
* @param endTime The end time
* @return The values
*/
public List<StateStoreRecord> getAllOnLocalInstance(String store, Instant startTime, Instant endTime) {
final Collection<StreamsMetadata> streamsMetadata = getStreamsMetadataForStore(store);
if (streamsMetadata == null || streamsMetadata.isEmpty()) {
throw new UnknownStateStoreException(UNKNOWN_STATE_STORE.formatted(store));
}
return executeWindowRangeQuery(store, startTime, endTime);
}
/**
* Execute a window range query on the store.
*
* @param store The store
* @param startTime The start time
* @param endTime The end time
* @return The values
*/
protected abstract List<StateStoreRecord> executeWindowRangeQuery(String store, Instant startTime, Instant endTime);
/**
* Execute a window key query on the store.
*
* @param keyQueryMetadata The key query metadata
* @param store The store
* @param key The key
* @param startTime The start time
* @param endTime The end time
* @return The values
*/
protected abstract List<StateStoreRecord> executeWindowKeyQuery(
KeyQueryMetadata keyQueryMetadata, String store, String key, Instant startTime, Instant endTime);
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/service/interactivequeries/window/WindowStoreService.java | kstreamplify-core/src/main/java/com/michelin/kstreamplify/service/interactivequeries/window/WindowStoreService.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.service.interactivequeries.window;
import com.michelin.kstreamplify.exception.UnknownKeyException;
import com.michelin.kstreamplify.initializer.KafkaStreamsInitializer;
import com.michelin.kstreamplify.store.StateStoreRecord;
import java.net.http.HttpClient;
import java.time.Instant;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.streams.KeyQueryMetadata;
import org.apache.kafka.streams.kstream.Windowed;
import org.apache.kafka.streams.query.QueryResult;
import org.apache.kafka.streams.query.StateQueryRequest;
import org.apache.kafka.streams.query.StateQueryResult;
import org.apache.kafka.streams.query.WindowKeyQuery;
import org.apache.kafka.streams.query.WindowRangeQuery;
import org.apache.kafka.streams.state.KeyValueIterator;
import org.apache.kafka.streams.state.WindowStoreIterator;
/** Window store service. */
@Slf4j
public class WindowStoreService extends CommonWindowStoreService {
/**
* Constructor.
*
* @param kafkaStreamsInitializer The Kafka Streams initializer
*/
public WindowStoreService(KafkaStreamsInitializer kafkaStreamsInitializer) {
super(kafkaStreamsInitializer);
}
/**
* Constructor.
*
* @param kafkaStreamsInitializer The Kafka Streams initializer
* @param httpClient The HTTP client
*/
@SuppressWarnings("unused")
public WindowStoreService(KafkaStreamsInitializer kafkaStreamsInitializer, HttpClient httpClient) {
super(httpClient, kafkaStreamsInitializer);
}
/** {@inheritDoc} */
@Override
protected String path() {
return "window";
}
/** {@inheritDoc} */
@Override
protected List<StateStoreRecord> executeWindowRangeQuery(String store, Instant startTime, Instant endTime) {
WindowRangeQuery<String, Object> windowRangeQuery = WindowRangeQuery.withWindowStartRange(startTime, endTime);
StateQueryResult<KeyValueIterator<Windowed<String>, Object>> result = kafkaStreamsInitializer
.getKafkaStreams()
.query(StateQueryRequest.inStore(store).withQuery(windowRangeQuery));
List<StateStoreRecord> partitionsResult = new ArrayList<>();
result.getPartitionResults().forEach((key, queryResult) -> queryResult
.getResult()
.forEachRemaining(kv -> partitionsResult.add(new StateStoreRecord(kv.key.key(), kv.value))));
return partitionsResult;
}
/** {@inheritDoc} */
@Override
protected List<StateStoreRecord> executeWindowKeyQuery(
KeyQueryMetadata keyQueryMetadata, String store, String key, Instant startTime, Instant endTime) {
WindowKeyQuery<String, Object> windowKeyQuery =
WindowKeyQuery.withKeyAndWindowStartRange(key, startTime, endTime);
StateQueryResult<WindowStoreIterator<Object>> result = kafkaStreamsInitializer
.getKafkaStreams()
.query(StateQueryRequest.inStore(store)
.withQuery(windowKeyQuery)
.withPartitions(Collections.singleton(keyQueryMetadata.partition())));
if (result.getPartitionResults().values().stream().anyMatch(QueryResult::isFailure)) {
throw new IllegalArgumentException(
result.getPartitionResults().get(0).getFailureMessage());
}
if (!result.getOnlyPartitionResult().getResult().hasNext()) {
throw new UnknownKeyException(key);
}
List<StateStoreRecord> partitionsResult = new ArrayList<>();
result.getOnlyPartitionResult()
.getResult()
.forEachRemaining(kv -> partitionsResult.add(new StateStoreRecord(key, kv.value)));
return partitionsResult;
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/service/interactivequeries/keyvalue/CommonKeyValueStoreService.java | kstreamplify-core/src/main/java/com/michelin/kstreamplify/service/interactivequeries/keyvalue/CommonKeyValueStoreService.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.service.interactivequeries.keyvalue;
import com.michelin.kstreamplify.initializer.KafkaStreamsInitializer;
import com.michelin.kstreamplify.service.interactivequeries.CommonStoreService;
import com.michelin.kstreamplify.store.StateStoreRecord;
import java.net.http.HttpClient;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.common.serialization.StringSerializer;
import org.apache.kafka.streams.KeyQueryMetadata;
import org.apache.kafka.streams.StreamsMetadata;
import org.apache.kafka.streams.errors.UnknownStateStoreException;
import org.apache.kafka.streams.state.HostInfo;
@Slf4j
abstract class CommonKeyValueStoreService extends CommonStoreService {
/**
* Constructor.
*
* @param kafkaStreamsInitializer The Kafka Streams initializer
*/
protected CommonKeyValueStoreService(KafkaStreamsInitializer kafkaStreamsInitializer) {
super(kafkaStreamsInitializer);
}
/**
* Constructor.
*
* @param kafkaStreamsInitializer The Kafka Streams initializer
* @param httpClient The HTTP client
*/
protected CommonKeyValueStoreService(HttpClient httpClient, KafkaStreamsInitializer kafkaStreamsInitializer) {
super(httpClient, kafkaStreamsInitializer);
}
/**
* Get all values from the store.
*
* @param store The store
* @return The values
*/
public List<StateStoreRecord> getAll(String store) {
final Collection<StreamsMetadata> streamsMetadata = getStreamsMetadataForStore(store);
if (streamsMetadata == null || streamsMetadata.isEmpty()) {
throw new UnknownStateStoreException(UNKNOWN_STATE_STORE.formatted(store));
}
List<StateStoreRecord> results = new ArrayList<>();
streamsMetadata.forEach(metadata -> {
if (isNotCurrentHost(metadata.hostInfo())) {
log.debug("Fetching data on other instance ({}:{})", metadata.host(), metadata.port());
results.addAll(getAllOnRemoteHost(metadata.hostInfo(), "store/" + path() + "/local/" + store));
} else {
log.debug("Fetching data on this instance ({}:{})", metadata.host(), metadata.port());
results.addAll(executeRangeQuery(store));
}
});
return results;
}
/**
* Get the value by key from the store.
*
* @param store The store name
* @param key The key
* @return The value
*/
public StateStoreRecord getByKey(String store, String key) {
KeyQueryMetadata keyQueryMetadata = getKeyQueryMetadata(store, key, new StringSerializer());
if (keyQueryMetadata == null) {
throw new UnknownStateStoreException(UNKNOWN_STATE_STORE.formatted(store));
}
HostInfo host = keyQueryMetadata.activeHost();
if (isNotCurrentHost(host)) {
log.debug("The key {} has been located on another instance ({}:{})", key, host.host(), host.port());
return getByKeyOnRemoteHost(host, "store/" + path() + "/" + store + "/" + key);
}
log.debug("The key {} has been located on the current instance ({}:{})", key, host.host(), host.port());
return executeKeyQuery(keyQueryMetadata, store, key);
}
/**
* Get all values from the store on the local instance.
*
* @param store The store
* @return The values
*/
public List<StateStoreRecord> getAllOnLocalInstance(String store) {
final Collection<StreamsMetadata> streamsMetadata = getStreamsMetadataForStore(store);
if (streamsMetadata == null || streamsMetadata.isEmpty()) {
throw new UnknownStateStoreException(UNKNOWN_STATE_STORE.formatted(store));
}
return executeRangeQuery(store);
}
/**
* Execute a range query on the store.
*
* @param store The store
* @return The results
*/
protected abstract List<StateStoreRecord> executeRangeQuery(String store);
/**
* Execute a key query on the store.
*
* @param keyQueryMetadata The key query metadata
* @param store The store
* @param key The key
* @return The result
*/
protected abstract StateStoreRecord executeKeyQuery(KeyQueryMetadata keyQueryMetadata, String store, String key);
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/service/interactivequeries/keyvalue/TimestampedKeyValueStoreService.java | kstreamplify-core/src/main/java/com/michelin/kstreamplify/service/interactivequeries/keyvalue/TimestampedKeyValueStoreService.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.service.interactivequeries.keyvalue;
import com.michelin.kstreamplify.exception.UnknownKeyException;
import com.michelin.kstreamplify.initializer.KafkaStreamsInitializer;
import com.michelin.kstreamplify.store.StateStoreRecord;
import java.net.http.HttpClient;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.streams.KeyQueryMetadata;
import org.apache.kafka.streams.query.QueryResult;
import org.apache.kafka.streams.query.StateQueryRequest;
import org.apache.kafka.streams.query.StateQueryResult;
import org.apache.kafka.streams.query.TimestampedKeyQuery;
import org.apache.kafka.streams.query.TimestampedRangeQuery;
import org.apache.kafka.streams.state.KeyValueIterator;
import org.apache.kafka.streams.state.ValueAndTimestamp;
/** Timestamped key-value store service. */
@Slf4j
public class TimestampedKeyValueStoreService extends CommonKeyValueStoreService {
/**
* Constructor.
*
* @param kafkaStreamsInitializer The Kafka Streams initializer
*/
public TimestampedKeyValueStoreService(KafkaStreamsInitializer kafkaStreamsInitializer) {
super(kafkaStreamsInitializer);
}
/**
* Constructor.
*
* @param kafkaStreamsInitializer The Kafka Streams initializer
* @param httpClient The HTTP client
*/
@SuppressWarnings("unused")
public TimestampedKeyValueStoreService(KafkaStreamsInitializer kafkaStreamsInitializer, HttpClient httpClient) {
super(httpClient, kafkaStreamsInitializer);
}
/** {@inheritDoc} */
@Override
protected String path() {
return "key-value/timestamped";
}
/** {@inheritDoc} */
@Override
protected List<StateStoreRecord> executeRangeQuery(String store) {
TimestampedRangeQuery<String, Object> rangeQuery = TimestampedRangeQuery.withNoBounds();
StateQueryResult<KeyValueIterator<String, ValueAndTimestamp<Object>>> result = kafkaStreamsInitializer
.getKafkaStreams()
.query(StateQueryRequest.inStore(store).withQuery(rangeQuery));
List<StateStoreRecord> partitionsResult = new ArrayList<>();
result.getPartitionResults().forEach((key, queryResult) -> queryResult
.getResult()
.forEachRemaining(kv ->
partitionsResult.add(new StateStoreRecord(kv.key, kv.value.value(), kv.value.timestamp()))));
return new ArrayList<>(partitionsResult);
}
/** {@inheritDoc} */
@Override
protected StateStoreRecord executeKeyQuery(KeyQueryMetadata keyQueryMetadata, String store, String key) {
TimestampedKeyQuery<String, Object> keyQuery = TimestampedKeyQuery.withKey(key);
StateQueryResult<ValueAndTimestamp<Object>> result = kafkaStreamsInitializer
.getKafkaStreams()
.query(StateQueryRequest.inStore(store)
.withQuery(keyQuery)
.withPartitions(Collections.singleton(keyQueryMetadata.partition())));
if (result.getPartitionResults().values().stream().anyMatch(QueryResult::isFailure)) {
throw new IllegalArgumentException(
result.getPartitionResults().get(0).getFailureMessage());
}
if (result.getOnlyPartitionResult() == null) {
throw new UnknownKeyException(key);
}
return new StateStoreRecord(
key,
result.getOnlyPartitionResult().getResult().value(),
result.getOnlyPartitionResult().getResult().timestamp());
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/service/interactivequeries/keyvalue/KeyValueStoreService.java | kstreamplify-core/src/main/java/com/michelin/kstreamplify/service/interactivequeries/keyvalue/KeyValueStoreService.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.service.interactivequeries.keyvalue;
import com.michelin.kstreamplify.exception.UnknownKeyException;
import com.michelin.kstreamplify.initializer.KafkaStreamsInitializer;
import com.michelin.kstreamplify.store.StateStoreRecord;
import java.net.http.HttpClient;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.streams.KeyQueryMetadata;
import org.apache.kafka.streams.query.KeyQuery;
import org.apache.kafka.streams.query.QueryResult;
import org.apache.kafka.streams.query.RangeQuery;
import org.apache.kafka.streams.query.StateQueryRequest;
import org.apache.kafka.streams.query.StateQueryResult;
import org.apache.kafka.streams.state.KeyValueIterator;
/** Key-value store service. */
@Slf4j
public class KeyValueStoreService extends CommonKeyValueStoreService {
/**
* Constructor.
*
* @param kafkaStreamsInitializer The Kafka Streams initializer
*/
public KeyValueStoreService(KafkaStreamsInitializer kafkaStreamsInitializer) {
super(kafkaStreamsInitializer);
}
/** {@inheritDoc} */
@Override
protected String path() {
return "key-value";
}
/**
* Constructor.
*
* @param kafkaStreamsInitializer The Kafka Streams initializer
* @param httpClient The HTTP client
*/
@SuppressWarnings("unused")
public KeyValueStoreService(KafkaStreamsInitializer kafkaStreamsInitializer, HttpClient httpClient) {
super(httpClient, kafkaStreamsInitializer);
}
/** {@inheritDoc} */
@Override
protected List<StateStoreRecord> executeRangeQuery(String store) {
RangeQuery<String, Object> rangeQuery = RangeQuery.withNoBounds();
StateQueryResult<KeyValueIterator<String, Object>> result = kafkaStreamsInitializer
.getKafkaStreams()
.query(StateQueryRequest.inStore(store).withQuery(rangeQuery));
List<StateStoreRecord> partitionsResult = new ArrayList<>();
result.getPartitionResults().forEach((key, queryResult) -> queryResult
.getResult()
.forEachRemaining(kv -> partitionsResult.add(new StateStoreRecord(kv.key, kv.value))));
return new ArrayList<>(partitionsResult);
}
/** {@inheritDoc} */
@Override
protected StateStoreRecord executeKeyQuery(KeyQueryMetadata keyQueryMetadata, String store, String key) {
KeyQuery<String, Object> keyQuery = KeyQuery.withKey(key);
StateQueryResult<Object> result = kafkaStreamsInitializer
.getKafkaStreams()
.query(StateQueryRequest.inStore(store)
.withQuery(keyQuery)
.withPartitions(Collections.singleton(keyQueryMetadata.partition())));
if (result.getPartitionResults().values().stream().anyMatch(QueryResult::isFailure)) {
throw new IllegalArgumentException(
result.getPartitionResults().get(0).getFailureMessage());
}
if (result.getOnlyPartitionResult() == null) {
throw new UnknownKeyException(key);
}
return new StateStoreRecord(key, result.getOnlyPartitionResult().getResult());
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/TopologyErrorHandler.java | kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/TopologyErrorHandler.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.error;
import com.michelin.kstreamplify.context.KafkaStreamsExecutionContext;
import com.michelin.kstreamplify.serde.SerdesUtils;
import java.util.Map;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.kstream.Branched;
import org.apache.kafka.streams.kstream.KStream;
import org.apache.kafka.streams.kstream.Named;
import org.apache.kafka.streams.kstream.Produced;
/** The topology error handler class. */
@Slf4j
public class TopologyErrorHandler {
private static final String BRANCHING_NAME_NOMINAL = "branch-nominal";
private TopologyErrorHandler() {}
/**
* Catch the errors from the given stream.
*
* @param stream The stream of processing result that may contain processing errors
* @param <K> The key type
* @param <V> The type of the successful record
* @param <V2> The type of the failed record
* @return A stream filtered from all processing errors
*/
public static <K, V, V2> KStream<K, V> catchErrors(KStream<K, ProcessingResult<V, V2>> stream) {
return catchErrors(stream, false);
}
/**
* Catch the errors from the given stream.
*
* @param stream The stream of processing result that may contain processing errors
* @param allowTombstone Allow sending tombstone in DLQ topic or to be returned
* @param <K> The key type
* @param <V> The type of the successful record
* @param <V2> The type of the failed record
* @return A stream filtered from all processing errors
*/
public static <K, V, V2> KStream<K, V> catchErrors(
KStream<K, ProcessingResult<V, V2>> stream, boolean allowTombstone) {
Map<String, KStream<K, ProcessingResult<V, V2>>> branches;
String branchNamePrefix = stream.toString().split("@")[1];
if (!allowTombstone) {
branches = stream.filter((key, value) -> value != null)
.filterNot((key, value) -> value.getValue() == null && value.getError() == null)
.split(Named.as(branchNamePrefix))
.branch((key, value) -> value.isValid(), Branched.as(BRANCHING_NAME_NOMINAL))
.defaultBranch(Branched.withConsumer(
ks -> TopologyErrorHandler.handleErrors(ks.mapValues(ProcessingResult::getError))));
} else {
branches = stream.filter((key, value) -> value != null)
.split(Named.as(branchNamePrefix))
.branch((key, value) -> value.getError() == null, Branched.as(BRANCHING_NAME_NOMINAL))
.defaultBranch(Branched.withConsumer(
ks -> TopologyErrorHandler.handleErrors(ks.mapValues(ProcessingResult::getError))));
}
return branches.get(branchNamePrefix + BRANCHING_NAME_NOMINAL).mapValues(ProcessingResult::getValue);
}
/**
* Process a stream of processing errors and route it to the configured DLQ topic.
*
* @param errorsStream The stream of processing errors
* @param <K> The key type
* @param <V> The value type
*/
private static <K, V> void handleErrors(KStream<K, ProcessingError<V>> errorsStream) {
if (StringUtils.isBlank(KafkaStreamsExecutionContext.getDlqTopicName())) {
log.warn("Failed to route topology error to the designated DLQ (Dead Letter Queue) topic. "
+ "Please make sure to define a DLQ topic in your KafkaStreamsStarter bean configuration.");
return;
}
errorsStream
.map((key, value) -> new KeyValue<>(key == null ? "null" : key.toString(), value))
.processValues(GenericErrorProcessor<V>::new)
.to(
KafkaStreamsExecutionContext.getDlqTopicName(),
Produced.with(Serdes.String(), SerdesUtils.getValueSerdes()));
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/DlqDeserializationExceptionHandler.java | kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/DlqDeserializationExceptionHandler.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.error;
import static com.michelin.kstreamplify.property.KstreamplifyConfig.*;
import static org.apache.kafka.streams.StreamsConfig.APPLICATION_ID_CONFIG;
import com.michelin.kstreamplify.avro.KafkaError;
import com.michelin.kstreamplify.context.KafkaStreamsExecutionContext;
import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException;
import java.util.Map;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.KafkaException;
import org.apache.kafka.streams.errors.DeserializationExceptionHandler;
import org.apache.kafka.streams.errors.ErrorHandlerContext;
/** The class managing deserialization exceptions. */
@Slf4j
public class DlqDeserializationExceptionHandler extends DlqExceptionHandler implements DeserializationExceptionHandler {
private static final Object GUARD = new Object();
private boolean handleSchemaRegistryRestException = false;
/** Constructor. */
public DlqDeserializationExceptionHandler() {
// Default constructor
}
/**
* Constructor.
*
* @param producer A Kafka producer.
*/
public DlqDeserializationExceptionHandler(Producer<byte[], KafkaError> producer) {
DlqExceptionHandler.producer = producer;
}
/**
* Manage deserialization exceptions.
*
* @param errorHandlerContext The error handler context
* @param consumerRecord The record to deserialize
* @param exception The exception for the deserialization
* @return FAIL or CONTINUE
*/
@Override
public DeserializationHandlerResponse handle(
ErrorHandlerContext errorHandlerContext,
ConsumerRecord<byte[], byte[]> consumerRecord,
Exception exception) {
if (StringUtils.isBlank(KafkaStreamsExecutionContext.getDlqTopicName())) {
log.warn("Failed to route deserialization error to the designated DLQ topic. "
+ "Please make sure to define a DLQ topic in your KafkaStreamsStarter bean configuration.");
return DeserializationHandlerResponse.FAIL;
}
try {
var builder = KafkaError.newBuilder();
enrichWithException(builder, exception, consumerRecord.key(), consumerRecord.value())
.setContextMessage("An exception occurred during the stream internal deserialization")
.setOffset(consumerRecord.offset())
.setPartition(consumerRecord.partition())
.setTopic(consumerRecord.topic())
.setApplicationId(
KafkaStreamsExecutionContext.getProperties().getProperty(APPLICATION_ID_CONFIG));
boolean isCausedByKafka = exception.getCause() instanceof KafkaException;
boolean isRestClientSchemaRegistryException = exception.getCause() instanceof RestClientException;
if (isCausedByKafka
|| exception.getCause() == null
|| (isRestClientSchemaRegistryException && handleSchemaRegistryRestException)) {
producer.send(new ProducerRecord<>(
KafkaStreamsExecutionContext.getDlqTopicName(), consumerRecord.key(), builder.build()))
.get();
return DeserializationHandlerResponse.CONTINUE;
}
} catch (InterruptedException ie) {
log.error(
"Interruption while sending the deserialization exception {} for key {}, "
+ "value {} and topic {} to DLQ topic {}",
exception,
consumerRecord.key(),
consumerRecord.value(),
consumerRecord.topic(),
KafkaStreamsExecutionContext.getDlqTopicName(),
ie);
Thread.currentThread().interrupt();
} catch (Exception e) {
log.error(
"Cannot send the deserialization exception {} for key {}, value {} and topic {} to DLQ topic {}",
exception,
consumerRecord.key(),
consumerRecord.value(),
consumerRecord.topic(),
KafkaStreamsExecutionContext.getDlqTopicName(),
e);
}
// here we only have exception like UnknownHostException for example or TimeoutException ...
// situation example: we cannot ask schema registry because the url is unavailable
return DeserializationHandlerResponse.FAIL;
}
/** {@inheritDoc} */
@Override
public void configure(Map<String, ?> configs) {
synchronized (GUARD) {
if (producer == null) {
instantiateProducer(DlqDeserializationExceptionHandler.class.getName(), configs);
}
handleSchemaRegistryRestException = KafkaStreamsExecutionContext.isDlqFeatureEnabled(
DLQ_DESERIALIZATION_HANDLER_FORWARD_REST_CLIENT_EXCEPTION);
}
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/DlqProductionExceptionHandler.java | kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/DlqProductionExceptionHandler.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.error;
import static org.apache.kafka.streams.StreamsConfig.APPLICATION_ID_CONFIG;
import com.michelin.kstreamplify.avro.KafkaError;
import com.michelin.kstreamplify.context.KafkaStreamsExecutionContext;
import java.util.Map;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.errors.RetriableException;
import org.apache.kafka.streams.errors.ErrorHandlerContext;
import org.apache.kafka.streams.errors.ProductionExceptionHandler;
/** The class managing DLQ production exceptions. */
@Slf4j
public class DlqProductionExceptionHandler extends DlqExceptionHandler implements ProductionExceptionHandler {
private static final Object GUARD = new Object();
/** Constructor. */
public DlqProductionExceptionHandler() {
// Default constructor
}
/**
* Constructor.
*
* @param producer A Kafka producer
*/
public DlqProductionExceptionHandler(Producer<byte[], KafkaError> producer) {
DlqExceptionHandler.producer = producer;
}
/**
* Manage production exceptions.
*
* @param errorHandlerContext The error handler context
* @param producerRecord The record to produce
* @param productionException The exception on producing
* @return FAIL or CONTINUE
*/
@Override
public ProductionExceptionHandlerResponse handle(
ErrorHandlerContext errorHandlerContext,
ProducerRecord<byte[], byte[]> producerRecord,
Exception productionException) {
if (StringUtils.isBlank(KafkaStreamsExecutionContext.getDlqTopicName())) {
log.warn("Failed to route production error to the designated DLQ topic. "
+ "Please make sure to define a DLQ topic in your KafkaStreamsStarter bean configuration.");
return ProductionExceptionHandlerResponse.FAIL;
}
boolean retryable = productionException instanceof RetriableException;
if (!retryable) {
try {
var builder = KafkaError.newBuilder();
enrichWithException(builder, productionException, producerRecord.key(), producerRecord.value())
.setContextMessage("An exception occurred during the stream internal production")
.setOffset(-1)
.setPartition(producerRecord.partition() == null ? -1 : producerRecord.partition())
.setTopic(producerRecord.topic())
.setApplicationId(
KafkaStreamsExecutionContext.getProperties().getProperty(APPLICATION_ID_CONFIG));
producer.send(new ProducerRecord<>(
KafkaStreamsExecutionContext.getDlqTopicName(), producerRecord.key(), builder.build()))
.get();
} catch (InterruptedException ie) {
log.error(
"Interruption while sending the production exception {} for key {}, value {} "
+ "and topic {} to DLQ topic {}",
productionException,
producerRecord.key(),
producerRecord.value(),
producerRecord.topic(),
KafkaStreamsExecutionContext.getDlqTopicName(),
ie);
Thread.currentThread().interrupt();
} catch (Exception e) {
log.error(
"Cannot send the production exception {} for key {}, value {} and topic {} to DLQ topic {}",
productionException,
producerRecord.key(),
producerRecord.value(),
producerRecord.topic(),
KafkaStreamsExecutionContext.getDlqTopicName(),
e);
return ProductionExceptionHandlerResponse.CONTINUE;
}
return ProductionExceptionHandlerResponse.CONTINUE;
}
return ProductionExceptionHandlerResponse.FAIL;
}
/** {@inheritDoc} */
@Override
public void configure(Map<String, ?> configs) {
synchronized (GUARD) {
if (producer == null) {
instantiateProducer(DlqProductionExceptionHandler.class.getName(), configs);
}
}
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/DlqExceptionHandler.java | kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/DlqExceptionHandler.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.error;
import com.michelin.kstreamplify.avro.KafkaError;
import io.confluent.kafka.serializers.KafkaAvroSerializer;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.nio.ByteBuffer;
import java.util.Map;
import java.util.Properties;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.errors.RecordTooLargeException;
import org.apache.kafka.common.serialization.ByteArraySerializer;
/** The class to manage DLQ exception. */
@Slf4j
public abstract class DlqExceptionHandler {
/** The DLQ producer. */
@Getter
protected static Producer<byte[], KafkaError> producer;
/** Constructor. */
protected DlqExceptionHandler() {}
/**
* Create a producer.
*
* @param clientId The producer client id
* @param configs The producer configs
*/
public static void instantiateProducer(String clientId, Map<String, ?> configs) {
Properties properties = new Properties();
properties.putAll(configs);
properties.setProperty(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName());
properties.setProperty(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, KafkaAvroSerializer.class.getName());
properties.setProperty(ProducerConfig.CLIENT_ID_CONFIG, clientId);
producer = new KafkaProducer<>(properties);
}
/**
* Enrich with exception.
*
* @param builder the error builder
* @param exception the exception to add
* @param key the record key
* @param value the record value
* @return the error enriched by the exception
*/
public KafkaError.Builder enrichWithException(
KafkaError.Builder builder, Exception exception, byte[] key, byte[] value) {
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
exception.printStackTrace(pw);
boolean tooLarge = exception instanceof RecordTooLargeException;
return builder.setCause(
exception.getCause() != null ? exception.getCause().getMessage() : "Unknown cause")
.setValue(
tooLarge
? "The record is too large to be set as value (" + value.length
+ " bytes). The key will be used instead"
: null)
.setStack(sw.toString())
.setByteValue(tooLarge ? ByteBuffer.wrap(key) : ByteBuffer.wrap(value));
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/ProcessingError.java | kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/ProcessingError.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.error;
import com.michelin.kstreamplify.converter.AvroToJsonConverter;
import lombok.Getter;
import org.apache.avro.generic.GenericRecord;
/**
* The processing error class.
*
* @param <V> The type of the failed record
*/
@Getter
public class ProcessingError<V> {
/** The exception that occurred. */
private final Exception exception;
/** The failed Kafka record. */
private final String kafkaRecord;
/** A context message defined when the error is caught. */
private final String contextMessage;
/**
* Constructor.
*
* @param exception The exception
* @param contextMessage The context message
* @param kafkaRecord The failed Kafka record
*/
public ProcessingError(Exception exception, String contextMessage, V kafkaRecord) {
this.exception = exception;
this.contextMessage = contextMessage;
if (kafkaRecord instanceof GenericRecord genericRecord) {
this.kafkaRecord = AvroToJsonConverter.convertRecord(genericRecord);
} else {
this.kafkaRecord = String.valueOf(kafkaRecord);
}
}
/**
* Constructor.
*
* @param exception The exception
* @param kafkaRecord The failed Kafka record
*/
public ProcessingError(Exception exception, V kafkaRecord) {
this(exception, "No context message", kafkaRecord);
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/GenericErrorProcessor.java | kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/GenericErrorProcessor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.error;
import com.michelin.kstreamplify.avro.KafkaError;
import java.io.PrintWriter;
import java.io.StringWriter;
import org.apache.kafka.streams.processor.api.ContextualFixedKeyProcessor;
import org.apache.kafka.streams.processor.api.FixedKeyRecord;
import org.apache.kafka.streams.processor.api.RecordMetadata;
/**
* Generic error processor.
*
* @param <V> The type of the failed record
*/
class GenericErrorProcessor<V> extends ContextualFixedKeyProcessor<String, ProcessingError<V>, KafkaError> {
/**
* Process the error.
*
* @param fixedKeyRecord the record to process an error
*/
@Override
public void process(FixedKeyRecord<String, ProcessingError<V>> fixedKeyRecord) {
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
fixedKeyRecord.value().getException().printStackTrace(pw);
RecordMetadata recordMetadata = context().recordMetadata().orElse(null);
KafkaError error = KafkaError.newBuilder()
.setCause(fixedKeyRecord.value().getException().getMessage())
.setContextMessage(fixedKeyRecord.value().getContextMessage())
.setOffset(recordMetadata != null ? recordMetadata.offset() : -1)
.setPartition(recordMetadata != null ? recordMetadata.partition() : -1)
.setStack(sw.toString())
.setTopic(
recordMetadata != null && recordMetadata.topic() != null
? recordMetadata.topic()
: "Outside topic context")
.setValue(fixedKeyRecord.value().getKafkaRecord())
.setApplicationId(context().applicationId())
.build();
context().forward(fixedKeyRecord.withValue(error));
}
/** {@inheritDoc} */
@Override
public void close() {
// may close resource opened in init
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/ProcessingResult.java | kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/ProcessingResult.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.error;
import lombok.Getter;
import org.apache.kafka.common.header.Headers;
import org.apache.kafka.streams.processor.api.Record;
/**
* The processing result class.
*
* @param <V> The type of the successful record
* @param <V2> The type of the failed record
*/
@Getter
public class ProcessingResult<V, V2> {
/** The successful record. */
private V value;
/** The failed record wrapped in a processing error. */
private ProcessingError<V2> error;
/**
* Private constructor that sets the success value.
*
* @param value The success value
*/
private ProcessingResult(V value) {
this.value = value;
}
/**
* Private constructor that sets the error value.
*
* @param error the ProcessingError containing the
*/
private ProcessingResult(ProcessingError<V2> error) {
this.error = error;
}
/**
* Create a successful {@link ProcessingResult}.
*
* @param value The successful record value
* @param <V> The type of the successful record
* @param <V2> The type of the failed record
* @return A {@link ProcessingResult} containing a successful value
*/
public static <V, V2> ProcessingResult<V, V2> success(V value) {
return new ProcessingResult<>(value);
}
/**
* Create a {@link Record} with a successful {@link ProcessingResult}.
*
* @param message The successful record
* @param <K> The type of the record key
* @param <V> The type of the ProcessingResult successful value
* @param <V2> The type of the ProcessingResult error value
* @return A {@link Record} with a {@link ProcessingResult} containing a successful value
*/
public static <K, V, V2> Record<K, ProcessingResult<V, V2>> wrapRecordSuccess(Record<K, V> message) {
return new Record<>(message.key(), ProcessingResult.success(message.value()), message.timestamp());
}
/**
* Create a {@link Record} with a successful {@link ProcessingResult}.
*
* @param key The key to put in the resulting record
* @param value The successful value to put in the resulting record
* @param timestamp The timestamp to apply on the resulting record
* @param <K> The type of the record key
* @param <V> The type of the ProcessingResult successful value
* @param <V2> The type of the ProcessingResult error value
* @return A {@link Record} with a {@link ProcessingResult} containing a successful value
*/
public static <K, V, V2> Record<K, ProcessingResult<V, V2>> wrapRecordSuccess(K key, V value, long timestamp) {
return new Record<>(key, ProcessingResult.success(value), timestamp);
}
/**
* Create a {@link Record} with headers and a successful {@link ProcessingResult}.
*
* @param key The key to put in the resulting record
* @param value The successful value to put in the resulting record
* @param timestamp The timestamp to apply on the resulting record
* @param headers The headers values to put in the resulting record
* @param <K> The type of the record key
* @param <V> The type of the ProcessingResult successful value
* @param <V2> The type of the ProcessingResult error value
* @return A {@link Record} with a {@link ProcessingResult} containing a successful value
*/
public static <K, V, V2> Record<K, ProcessingResult<V, V2>> wrapRecordSuccess(
K key, V value, long timestamp, Headers headers) {
return new Record<>(key, ProcessingResult.success(value), timestamp, headers);
}
/**
* Create a {@link Record} with headers and a successful {@link ProcessingResult}.
*
* @param message The successful record
* @param <K> The type of the record key
* @param <V> The type of the ProcessingResult successful value
* @param <V2> The type of the ProcessingResult error value
* @return A {@link Record} with a {@link ProcessingResult} containing a successful value
*/
public static <K, V, V2> Record<K, ProcessingResult<V, V2>> wrapRecordSuccessWithHeaders(Record<K, V> message) {
return new Record<>(
message.key(), ProcessingResult.success(message.value()), message.timestamp(), message.headers());
}
/**
* Create a failed {@link ProcessingResult}.
*
* @param exception The exception
* @param value The failed record value
* @param <V> The type of the successful record
* @param <V2> The type of the failed record
* @return A {@link ProcessingResult} containing a failed value
*/
public static <V, V2> ProcessingResult<V, V2> fail(Exception exception, V2 value) {
return new ProcessingResult<>(new ProcessingError<>(exception, value));
}
/**
* Create a failed {@link ProcessingResult} with a custom context message.
*
* @param exception The exception
* @param value The failed record value
* @param contextMessage The custom context message
* @param <V> The type of the successful record
* @param <V2> The type of the failed record
* @return A {@link ProcessingResult} containing a failed value
*/
public static <V, V2> ProcessingResult<V, V2> fail(Exception exception, V2 value, String contextMessage) {
return new ProcessingResult<>(new ProcessingError<>(exception, contextMessage, value));
}
/**
* Create a {@link Record} with a failed {@link ProcessingResult}.
*
* @param exception The initial exception
* @param message The failed record
* @param <K> The type of the record key
* @param <V> The type of the ProcessingResult successful value
* @param <V2> The type of the ProcessingResult error value
* @return A {@link Record} with a {@link ProcessingResult} containing a failed value
*/
public static <K, V, V2> Record<K, ProcessingResult<V, V2>> wrapRecordFailure(
Exception exception, Record<K, V2> message) {
return new Record<>(message.key(), ProcessingResult.fail(exception, message.value()), message.timestamp());
}
/**
* Create a {@link Record} with a failed {@link ProcessingResult} with a custom context message.
*
* @param exception The initial exception
* @param message The failed record
* @param contextMessage The custom context message that will be added in the stack trace
* @param <K> The type of the record key
* @param <V> The type of the ProcessingResult successful value
* @param <V2> The type of the ProcessingResult error value
* @return A {@link Record} with a {@link ProcessingResult} containing a failed value
*/
public static <K, V, V2> Record<K, ProcessingResult<V, V2>> wrapRecordFailure(
Exception exception, Record<K, V2> message, String contextMessage) {
return new Record<>(
message.key(), ProcessingResult.fail(exception, message.value(), contextMessage), message.timestamp());
}
/**
* Create a {@link Record} with a failed {@link ProcessingResult}.
*
* @param exception The initial exception
* @param key The key to put in the resulting record
* @param value The failed record value
* @param timestamp The timestamp to apply on the resulting record
* @param <K> The type of the record key
* @param <V> The type of the ProcessingResult successful value
* @param <V2> The type of the ProcessingResult error value
* @return A {@link Record} with a {@link ProcessingResult} containing a failed value
*/
public static <K, V, V2> Record<K, ProcessingResult<V, V2>> wrapRecordFailure(
Exception exception, K key, V2 value, long timestamp) {
return new Record<>(key, ProcessingResult.fail(exception, value), timestamp);
}
/**
* Create a {@link Record} with a failed {@link ProcessingResult} with a custom context message.
*
* @param exception The initial exception
* @param key The key to put in the resulting record
* @param value The failed record value
* @param timestamp The timestamp to apply on the resulting record
* @param contextMessage The custom context message that will be added in the stack trace
* @param <K> The type of the record key
* @param <V> The type of the ProcessingResult successful value
* @param <V2> The type of the ProcessingResult error value
* @return A {@link Record} with a {@link ProcessingResult} containing a failed value
*/
public static <K, V, V2> Record<K, ProcessingResult<V, V2>> wrapRecordFailure(
Exception exception, K key, V2 value, long timestamp, String contextMessage) {
return new Record<>(key, ProcessingResult.fail(exception, value, contextMessage), timestamp);
}
/**
* Is the processing result valid. Is it valid either if it contains a successful value or an error
*
* @return true if valid, false otherwise
*/
public boolean isValid() {
return value != null && error == null;
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/store/WindowStateStoreUtils.java | kstreamplify-core/src/main/java/com/michelin/kstreamplify/store/WindowStateStoreUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.store;
import java.time.Duration;
import java.time.Instant;
import org.apache.kafka.streams.state.WindowStore;
/** The window state store utils. */
public final class WindowStateStoreUtils {
private WindowStateStoreUtils() {}
/**
* Put the key/value into the state store.
*
* @param stateStore The stateStore
* @param key The key
* @param value The value
* @param <K> The template for the key
* @param <V> The template for the value
*/
public static <K, V> void put(WindowStore<K, V> stateStore, K key, V value) {
stateStore.put(key, value, Instant.now().toEpochMilli());
}
/**
* Get the value by the key from the state store.
*
* @param stateStore The stateStore
* @param key The key
* @param retentionDays The delay of retention
* @param <K> The template for the key
* @param <V> The template for the value
* @return The last value inserted in the state store for the key
*/
public static <K, V> V get(WindowStore<K, V> stateStore, K key, int retentionDays) {
var resultIterator =
stateStore.backwardFetch(key, Instant.now().minus(Duration.ofDays(retentionDays)), Instant.now());
if (resultIterator != null && resultIterator.hasNext()) {
return resultIterator.next().value;
}
return null;
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/store/RocksDbConfig.java | kstreamplify-core/src/main/java/com/michelin/kstreamplify/store/RocksDbConfig.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.store;
import com.michelin.kstreamplify.context.KafkaStreamsExecutionContext;
import java.util.Map;
import org.apache.kafka.streams.state.RocksDBConfigSetter;
import org.rocksdb.BlockBasedTableConfig;
import org.rocksdb.CompressionType;
import org.rocksdb.Options;
/** The RockDB configuration class. */
public class RocksDbConfig implements RocksDBConfigSetter {
/** The RocksDB cache size config key. */
public static final String ROCKSDB_CACHE_SIZE_CONFIG = "rocksdb.config.cache.size";
/** The RocksDB write buffer size config key. */
public static final String ROCKSDB_WRITE_BUFFER_SIZE_CONFIG = "rocksdb.config.write.buffer.size";
/** The RocksDB block size config key. */
public static final String ROCKSDB_BLOCK_SIZE_CONFIG = "rocksdb.config.block.size";
/** The RocksDB max write buffer config. */
public static final String ROCKSDB_MAX_WRITE_BUFFER_CONFIG = "rocksdb.config.max.write.buffer";
/** The RocksDB compression type config key. */
public static final String ROCKSDB_COMPRESSION_TYPE_CONFIG = "rocksdb.config.compression.type";
/** The RocksDB cache index block enabled config. */
public static final String ROCKSDB_CACHE_INDEX_BLOCK_ENABLED_CONFIG = "rocksdb.config.cache.index.block.enabled";
/** One KB in B. */
private static final long ONE_KB = 1024L;
/** The RocksDB default cache size. */
public static final Long ROCKSDB_CACHE_SIZE_DEFAULT = 16 * ONE_KB * ONE_KB;
/** The RocksDB default write buffer size. */
public static final Long ROCKSDB_WRITE_BUFFER_SIZE_DEFAULT = 4 * ONE_KB * ONE_KB;
/** The RocksDB default block size. */
public static final Long ROCKSDB_BLOCK_SIZE_DEFAULT = 4 * ONE_KB;
/** The RocksDB default max write buffer. */
public static final Integer ROCKSDB_MAX_WRITE_BUFFER_DEFAULT = 2;
/** The RocksDB default compression type. */
public static final String ROCKSDB_COMPRESSION_TYPE_DEFAULT = "";
/** The RocksDB default cache index block enabled. */
public static final Boolean ROCKSDB_CACHE_INDEX_BLOCK_ENABLED_DEFAULT = true;
private org.rocksdb.Cache cache = null;
/** Constructor. */
public RocksDbConfig() {
// Default constructor
}
/**
* Set the RocksDB configuration.
*
* @param storeName The store name
* @param options The options
* @param configs The configs
*/
@Override
public void setConfig(final String storeName, final Options options, final Map<String, Object> configs) {
long blockCacheSize = KafkaStreamsExecutionContext.getProperties().containsKey(ROCKSDB_CACHE_SIZE_CONFIG)
? Long.parseLong(KafkaStreamsExecutionContext.getProperties().getProperty(ROCKSDB_CACHE_SIZE_CONFIG))
: ROCKSDB_CACHE_SIZE_DEFAULT;
if (cache == null) {
cache = new org.rocksdb.LRUCache(blockCacheSize);
}
BlockBasedTableConfig tableConfig = (BlockBasedTableConfig) options.tableFormatConfig();
tableConfig.setBlockCache(cache);
long blockSize = KafkaStreamsExecutionContext.getProperties().containsKey(ROCKSDB_BLOCK_SIZE_CONFIG)
? Long.parseLong(KafkaStreamsExecutionContext.getProperties().getProperty(ROCKSDB_BLOCK_SIZE_CONFIG))
: ROCKSDB_BLOCK_SIZE_DEFAULT;
tableConfig.setBlockSize(blockSize);
boolean cacheIndexBlock =
KafkaStreamsExecutionContext.getProperties().containsKey(ROCKSDB_CACHE_INDEX_BLOCK_ENABLED_CONFIG)
? Boolean.parseBoolean(KafkaStreamsExecutionContext.getProperties()
.getProperty(ROCKSDB_CACHE_INDEX_BLOCK_ENABLED_CONFIG))
: ROCKSDB_CACHE_INDEX_BLOCK_ENABLED_DEFAULT;
tableConfig.setCacheIndexAndFilterBlocks(cacheIndexBlock);
options.setTableFormatConfig(tableConfig);
int maxWriteBuffer = KafkaStreamsExecutionContext.getProperties().containsKey(ROCKSDB_MAX_WRITE_BUFFER_CONFIG)
? Integer.parseInt(
KafkaStreamsExecutionContext.getProperties().getProperty(ROCKSDB_MAX_WRITE_BUFFER_CONFIG))
: ROCKSDB_MAX_WRITE_BUFFER_DEFAULT;
options.setMaxWriteBufferNumber(maxWriteBuffer);
long writeBufferSize =
KafkaStreamsExecutionContext.getProperties().containsKey(ROCKSDB_WRITE_BUFFER_SIZE_CONFIG)
? Long.parseLong(KafkaStreamsExecutionContext.getProperties()
.getProperty(ROCKSDB_WRITE_BUFFER_SIZE_CONFIG))
: ROCKSDB_WRITE_BUFFER_SIZE_DEFAULT;
options.setWriteBufferSize(writeBufferSize);
String compressionType = KafkaStreamsExecutionContext.getProperties()
.getProperty(ROCKSDB_COMPRESSION_TYPE_CONFIG, ROCKSDB_COMPRESSION_TYPE_DEFAULT);
options.setCompressionType(CompressionType.getCompressionType(compressionType));
}
@Override
public void close(String storeName, Options options) {
cache.close();
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/store/StreamsMetadata.java | kstreamplify-core/src/main/java/com/michelin/kstreamplify/store/StreamsMetadata.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.store;
import java.util.Set;
import java.util.stream.Collectors;
import lombok.Getter;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.streams.state.HostInfo;
/** State store metadata. */
@Getter
public class StreamsMetadata {
private Set<String> stateStoreNames;
private StreamsHostInfo hostInfo;
private Set<String> topicPartitions;
/** Constructor. */
public StreamsMetadata() {
// Default constructor
}
/**
* Constructor.
*
* @param stateStoreNames The state store names
* @param host The host
* @param topicPartitions The topic partitions
*/
public StreamsMetadata(Set<String> stateStoreNames, HostInfo host, Set<TopicPartition> topicPartitions) {
this.stateStoreNames = stateStoreNames;
this.hostInfo = new StreamsHostInfo(host.host(), host.port());
this.topicPartitions = topicPartitions.stream()
.map(topicPartition -> topicPartition.topic() + "-" + topicPartition.partition())
.collect(Collectors.toSet());
}
/**
* State store host information.
*
* @param host The host
* @param port The port
*/
public record StreamsHostInfo(String host, Integer port) {}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/store/StateStoreRecord.java | kstreamplify-core/src/main/java/com/michelin/kstreamplify/store/StateStoreRecord.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.store;
import static com.michelin.kstreamplify.converter.AvroToJsonConverter.convertObject;
import static com.michelin.kstreamplify.converter.JsonToAvroConverter.jsonToObject;
import com.fasterxml.jackson.annotation.JsonInclude;
import lombok.Getter;
/** The state store record class. */
@Getter
@JsonInclude(JsonInclude.Include.NON_NULL)
public class StateStoreRecord {
private String key;
private Object value;
private Long timestamp;
/** Constructor. */
public StateStoreRecord() {
// Default constructor
}
/**
* Constructor.
*
* @param key The key
* @param value The value
*/
public StateStoreRecord(String key, Object value) {
this.key = key;
// Convert the value to JSON then to object to avoid issue between Avro and Jackson
this.value = jsonToObject(convertObject(value));
}
/**
* Constructor.
*
* @param key The key
* @param value The value
* @param timestamp The timestamp
*/
public StateStoreRecord(String key, Object value, Long timestamp) {
this(key, value);
this.timestamp = timestamp;
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/initializer/KafkaStreamsInitializer.java | kstreamplify-core/src/main/java/com/michelin/kstreamplify/initializer/KafkaStreamsInitializer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.initializer;
import static java.util.Optional.ofNullable;
import com.michelin.kstreamplify.context.KafkaStreamsExecutionContext;
import com.michelin.kstreamplify.property.PropertiesUtils;
import com.michelin.kstreamplify.server.KafkaStreamsHttpServer;
import java.util.HashMap;
import java.util.Properties;
import java.util.stream.Collectors;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.Topology;
import org.apache.kafka.streams.errors.StreamsUncaughtExceptionHandler;
import org.apache.kafka.streams.state.HostInfo;
/** The Kafka Streams initializer class. */
@Slf4j
@Getter
public class KafkaStreamsInitializer {
/** The application server property name. */
public static final String APPLICATION_SERVER_PROPERTY_NAME = "application.server.var.name";
/** The server port property name. */
public static final String SERVER_PORT_PROPERTY_NAME = "server.port";
/** The default application server variable name. */
public static final String DEFAULT_APPLICATION_SERVER_VARIABLE_NAME = "APPLICATION_SERVER";
/** The Kafka Streams instance. */
private KafkaStreams kafkaStreams;
/** The Kafka Streams starter. */
protected KafkaStreamsStarter kafkaStreamsStarter;
/** The topology. */
private Topology topology;
/** The Kafka properties. */
protected Properties kafkaProperties;
/** The application properties. */
protected Properties properties = new Properties();
/** The DLQ topic. */
private String dlq;
/** The host info. */
private HostInfo hostInfo;
/** The server port. */
protected int serverPort;
/**
* Constructor.
*
* @param kafkaStreamsStarter The Kafka Streams starter
*/
public KafkaStreamsInitializer(KafkaStreamsStarter kafkaStreamsStarter) {
this.kafkaStreamsStarter = kafkaStreamsStarter;
}
/** Start Kstreamplify. */
public void start() {
initProperties();
initSerdeConfig();
initDlq();
initHostInfo();
StreamsBuilder streamsBuilder = new StreamsBuilder();
kafkaStreamsStarter.topology(streamsBuilder);
topology = streamsBuilder.build(KafkaStreamsExecutionContext.getProperties());
log.info("Topology description:\n {}", topology.describe());
kafkaStreams = new KafkaStreams(topology, KafkaStreamsExecutionContext.getProperties());
registerMetrics(kafkaStreams);
kafkaStreamsStarter.onStart(kafkaStreams);
Runtime.getRuntime().addShutdownHook(new Thread(kafkaStreams::close));
kafkaStreams.setUncaughtExceptionHandler(
ofNullable(kafkaStreamsStarter.uncaughtExceptionHandler()).orElse(this::onStreamsUncaughtException));
kafkaStreams.setStateListener(this::onStateChange);
kafkaStreams.start();
startHttpServer();
}
/** Init the Kafka Streams execution context. */
private void initSerdeConfig() {
KafkaStreamsExecutionContext.setSerdesConfig(kafkaProperties.entrySet().stream()
.collect(Collectors.toMap(
e -> String.valueOf(e.getKey()),
e -> String.valueOf(e.getValue()),
(prev, next) -> next,
HashMap::new)));
}
/** Init the Kafka Streams default DLQ. */
private void initDlq() {
dlq = kafkaStreamsStarter.dlqTopic();
KafkaStreamsExecutionContext.setDlqTopicName(dlq);
}
/** Init the host information. */
private void initHostInfo() {
String applicationServerVarName = (String) kafkaProperties.getOrDefault(
APPLICATION_SERVER_PROPERTY_NAME, DEFAULT_APPLICATION_SERVER_VARIABLE_NAME);
String applicationServer = System.getenv(applicationServerVarName);
String host = StringUtils.isNotBlank(applicationServer) ? applicationServer : "localhost";
hostInfo = new HostInfo(host, serverPort);
log.info(
"The Kafka Streams \"{}\" is running on {}:{}",
KafkaStreamsExecutionContext.getProperties().getProperty(StreamsConfig.APPLICATION_ID_CONFIG),
hostInfo.host(),
hostInfo.port());
KafkaStreamsExecutionContext.getProperties()
.put(StreamsConfig.APPLICATION_SERVER_CONFIG, "%s:%s".formatted(hostInfo.host(), hostInfo.port()));
}
/** Init the HTTP server. */
protected void startHttpServer() {
KafkaStreamsHttpServer server = new KafkaStreamsHttpServer(this);
server.start();
}
/** Init all properties. */
protected void initProperties() {
properties = PropertiesUtils.loadProperties();
serverPort = (Integer) properties.get(SERVER_PORT_PROPERTY_NAME);
kafkaProperties = PropertiesUtils.loadKafkaProperties(properties);
KafkaStreamsExecutionContext.registerProperties(kafkaProperties);
}
/**
* Default uncaught exception handler.
*
* @param exception The exception
* @return The execution
*/
protected StreamsUncaughtExceptionHandler.StreamThreadExceptionResponse onStreamsUncaughtException(
Throwable exception) {
log.error(
"A not covered exception occurred in {} Kafka Streams. Shutting down...",
kafkaProperties.get(StreamsConfig.APPLICATION_ID_CONFIG),
exception);
return StreamsUncaughtExceptionHandler.StreamThreadExceptionResponse.SHUTDOWN_CLIENT;
}
/**
* Default state change listener.
*
* @param newState The new state
* @param oldState The old state
*/
protected void onStateChange(KafkaStreams.State newState, KafkaStreams.State oldState) {
if (newState.equals(KafkaStreams.State.ERROR)) {
log.error(
"The {} Kafka Streams is in error state...",
kafkaProperties.get(StreamsConfig.APPLICATION_ID_CONFIG));
System.exit(3);
}
}
/**
* Register the metrics.
*
* @param kafkaStreams The Kafka Streams instance
*/
protected void registerMetrics(KafkaStreams kafkaStreams) {
// Nothing to do here
}
/**
* Check if the Kafka Streams is running.
*
* @return True if the Kafka Streams is running
*/
public boolean isNotRunning() {
return !kafkaStreams.state().equals(KafkaStreams.State.RUNNING);
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/initializer/KafkaStreamsStarter.java | kstreamplify-core/src/main/java/com/michelin/kstreamplify/initializer/KafkaStreamsStarter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.initializer;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.errors.StreamsUncaughtExceptionHandler;
/** The Kafka Streams starter interface. */
public abstract class KafkaStreamsStarter {
/** Constructor. */
protected KafkaStreamsStarter() {}
/**
* Define the topology of the Kafka Streams.
*
* @param streamsBuilder The streams builder
*/
public abstract void topology(StreamsBuilder streamsBuilder);
/**
* Define the dead letter queue (DLQ) topic. If you don't want to use the DLQ topic, you can return
* {@link org.apache.commons.lang3.StringUtils#EMPTY}.
*
* @return The dead letter queue (DLQ) topic
*/
public abstract String dlqTopic();
/**
* Define runnable code after the Kafka Streams startup.
*
* @param kafkaStreams The Kafka Streams instance
*/
public void onStart(KafkaStreams kafkaStreams) {}
/**
* Register a custom uncaught exception handler.
*
* @return StreamsUncaughtExceptionHandler The custom uncaught exception handler
*/
public StreamsUncaughtExceptionHandler uncaughtExceptionHandler() {
return null;
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/exception/HttpServerException.java | kstreamplify-core/src/main/java/com/michelin/kstreamplify/exception/HttpServerException.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.exception;
/** Exception thrown when the HTTP server cannot be created. */
public class HttpServerException extends RuntimeException {
private static final String FAIL_TO_CREATE = "Fail to create the HTTP server";
/**
* Constructor.
*
* @param cause The cause of the exception
*/
public HttpServerException(Throwable cause) {
super(FAIL_TO_CREATE, cause);
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/exception/UnknownKeyException.java | kstreamplify-core/src/main/java/com/michelin/kstreamplify/exception/UnknownKeyException.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.exception;
/** Exception thrown when a key is not found. */
public class UnknownKeyException extends RuntimeException {
private static final String UNKNOWN_KEY = "Key %s not found";
/**
* Constructor.
*
* @param key The key that was not found
*/
public UnknownKeyException(String key) {
super(UNKNOWN_KEY.formatted(key));
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/exception/PropertiesFileException.java | kstreamplify-core/src/main/java/com/michelin/kstreamplify/exception/PropertiesFileException.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.exception;
/** Exception thrown when a properties file cannot be read. */
public class PropertiesFileException extends RuntimeException {
private static final String CANNOT_READ_PROPERTIES_FILE = "Cannot read properties file";
/**
* Constructor.
*
* @param cause The cause of the exception
*/
public PropertiesFileException(Throwable cause) {
super(CANNOT_READ_PROPERTIES_FILE, cause);
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/exception/OtherInstanceResponseException.java | kstreamplify-core/src/main/java/com/michelin/kstreamplify/exception/OtherInstanceResponseException.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.exception;
/** Exception thrown when a response from another instance cannot be read. */
public class OtherInstanceResponseException extends RuntimeException {
private static final String OTHER_INSTANCE_RESPONSE = "Fail to read other instance response";
/**
* Constructor.
*
* @param cause The cause of the exception
*/
public OtherInstanceResponseException(Throwable cause) {
super(OTHER_INSTANCE_RESPONSE, cause);
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/utils/SerdesUtils.java | kstreamplify-core/src/main/java/com/michelin/kstreamplify/utils/SerdesUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.utils;
import io.confluent.kafka.streams.serdes.avro.SpecificAvroSerde;
import org.apache.avro.specific.SpecificRecord;
/**
* The Serdes utils class.
*
* @deprecated Use {@link com.michelin.kstreamplify.serde.SerdesUtils}.
*/
@Deprecated(since = "1.1.0")
public final class SerdesUtils {
private SerdesUtils() {}
/**
* Return a key serdes for a requested class.
*
* @param <T> The class of requested serdes
* @return a serdes for requested class
*/
public static <T extends SpecificRecord> SpecificAvroSerde<T> getSerdesForKey() {
return com.michelin.kstreamplify.serde.SerdesUtils.getKeySerdes();
}
/**
* Return a value serdes for a requested class.
*
* @param <T> The class of requested serdes
* @return a serdes for requested class
*/
public static <T extends SpecificRecord> SpecificAvroSerde<T> getSerdesForValue() {
return com.michelin.kstreamplify.serde.SerdesUtils.getValueSerdes();
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/utils/WindowStateStoreUtils.java | kstreamplify-core/src/main/java/com/michelin/kstreamplify/utils/WindowStateStoreUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.utils;
import java.time.Duration;
import java.time.Instant;
import org.apache.kafka.streams.state.WindowStore;
/**
* The window state store utils.
*
* @deprecated Use {@link com.michelin.kstreamplify.store.WindowStateStoreUtils}.
*/
@Deprecated(since = "1.1.0")
public class WindowStateStoreUtils {
private WindowStateStoreUtils() {}
/**
* Put the key/value into the state store.
*
* @param stateStore The stateStore
* @param key The key
* @param value The value
* @param <K> The template for the key
* @param <V> The template for the value
*/
public static <K, V> void put(WindowStore<K, V> stateStore, K key, V value) {
stateStore.put(key, value, Instant.now().toEpochMilli());
}
/**
* Get the value by the key from the state store.
*
* @param stateStore The stateStore
* @param key The key
* @param retentionDays The delay of retention
* @param <K> The template for the key
* @param <V> The template for the value
* @return The last value inserted in the state store for the key
*/
public static <K, V> V get(WindowStore<K, V> stateStore, K key, int retentionDays) {
var resultIterator =
stateStore.backwardFetch(key, Instant.now().minus(Duration.ofDays(retentionDays)), Instant.now());
if (resultIterator != null && resultIterator.hasNext()) {
return resultIterator.next().value;
}
return null;
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/utils/TopicWithSerde.java | kstreamplify-core/src/main/java/com/michelin/kstreamplify/utils/TopicWithSerde.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.utils;
import static com.michelin.kstreamplify.serde.TopicWithSerde.SELF;
import com.michelin.kstreamplify.topic.TopicUtils;
import lombok.Getter;
import org.apache.kafka.common.serialization.Serde;
import org.apache.kafka.common.utils.Bytes;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.kstream.Consumed;
import org.apache.kafka.streams.kstream.GlobalKTable;
import org.apache.kafka.streams.kstream.KStream;
import org.apache.kafka.streams.kstream.KTable;
import org.apache.kafka.streams.kstream.Materialized;
import org.apache.kafka.streams.kstream.Produced;
import org.apache.kafka.streams.state.KeyValueStore;
/**
* Wrapper class for simplifying topics interactions and their behaviors.
*
* @param <K> The model used as the key avro of the topic. Can be String (Recommended)
* @param <V> The model used as the value avro of the topic.
* @deprecated Use {@link com.michelin.kstreamplify.serde.TopicWithSerde}.
*/
@Deprecated(since = "1.1.0")
public class TopicWithSerde<K, V> {
private final String topicName;
private final String prefixKey;
@Getter
private final Serde<K> keySerde;
@Getter
private final Serde<V> valueSerde;
/**
* Constructor.
*
* @param topicName The name of the topic
* @param keySerde The key serde
* @param valueSerde The value serde
*/
public TopicWithSerde(String topicName, Serde<K> keySerde, Serde<V> valueSerde) {
this.topicName = topicName;
this.prefixKey = SELF;
this.keySerde = keySerde;
this.valueSerde = valueSerde;
}
/**
* Constructor.
*
* @param topicName The name of the topic
* @param prefixKey The prefix key
* @param keySerde The key serde
* @param valueSerde The value serde
*/
public TopicWithSerde(String topicName, String prefixKey, Serde<K> keySerde, Serde<V> valueSerde) {
this.topicName = topicName;
this.prefixKey = prefixKey;
this.keySerde = keySerde;
this.valueSerde = valueSerde;
}
/**
* Get the unprefixed topic name.
*
* @return The unprefixed topic name
*/
public String getUnPrefixedName() {
return topicName;
}
/**
* Remap and prefix the topic name.
*
* @return The remapped and prefixed topic name
*/
@Override
public String toString() {
return TopicUtils.remapAndPrefix(topicName, prefixKey);
}
/**
* Wrapper for {@link StreamsBuilder#stream(String, Consumed)}.
*
* @param streamsBuilder The streams builder
* @return A ${@link KStream} from the given topic
*/
public KStream<K, V> stream(StreamsBuilder streamsBuilder) {
return streamsBuilder.stream(this.toString(), Consumed.with(keySerde, valueSerde));
}
/**
* Wrapper for {@link StreamsBuilder#table(String, Consumed, Materialized)}.
*
* @param streamsBuilder The streams builder
* @param storeName The store name
* @return A ${@link KTable} from the given topic
*/
public KTable<K, V> table(StreamsBuilder streamsBuilder, String storeName) {
return streamsBuilder.table(
this.toString(),
Consumed.with(keySerde, valueSerde),
Materialized.<K, V, KeyValueStore<Bytes, byte[]>>as(storeName)
.withKeySerde(keySerde)
.withValueSerde(valueSerde));
}
/**
* Wrapper for {@link StreamsBuilder#globalTable(String, Consumed, Materialized)}.
*
* @param streamsBuilder The streams builder
* @param storeName The store name
* @return A ${@link GlobalKTable} from the given topic
*/
public GlobalKTable<K, V> globalTable(StreamsBuilder streamsBuilder, String storeName) {
return streamsBuilder.globalTable(
this.toString(),
Consumed.with(keySerde, valueSerde),
Materialized.<K, V, KeyValueStore<Bytes, byte[]>>as(storeName)
.withKeySerde(keySerde)
.withValueSerde(valueSerde));
}
/**
* Wrapper for {@link KStream#to(String, Produced)}.
*
* @param stream The stream to produce
*/
public void produce(KStream<K, V> stream) {
stream.to(this.toString(), Produced.with(keySerde, valueSerde));
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/properties/RocksDbConfig.java | kstreamplify-core/src/main/java/com/michelin/kstreamplify/properties/RocksDbConfig.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.properties;
import com.michelin.kstreamplify.context.KafkaStreamsExecutionContext;
import java.util.Map;
import org.apache.kafka.streams.state.RocksDBConfigSetter;
import org.rocksdb.BlockBasedTableConfig;
import org.rocksdb.CompressionType;
import org.rocksdb.Options;
/**
* The RockDB configuration class.
*
* @deprecated Use {@link com.michelin.kstreamplify.store.RocksDbConfig}.
*/
@Deprecated(since = "1.1.0")
public class RocksDbConfig implements RocksDBConfigSetter {
/** The RocksDB cache size config key. */
public static final String ROCKSDB_CACHE_SIZE_CONFIG = "rocksdb.config.cache.size";
/** The RocksDB write buffer size config key. */
public static final String ROCKSDB_WRITE_BUFFER_SIZE_CONFIG = "rocksdb.config.write.buffer.size";
/** The RocksDB block size config key. */
public static final String ROCKSDB_BLOCK_SIZE_CONFIG = "rocksdb.config.block.size";
/** The RocksDB max write buffer config. */
public static final String ROCKSDB_MAX_WRITE_BUFFER_CONFIG = "rocksdb.config.max.write.buffer";
/** The RocksDB compression type config key. */
public static final String ROCKSDB_COMPRESSION_TYPE_CONFIG = "rocksdb.config.compression.type";
/** The RocksDB cache index block enabled config. */
public static final String ROCKSDB_CACHE_INDEX_BLOCK_ENABLED_CONFIG = "rocksdb.config.cache.index.block.enabled";
/** One KB in B. */
private static final long ONE_KB = 1024L;
/** The RocksDB default cache size. */
public static final Long ROCKSDB_CACHE_SIZE_DEFAULT = 16 * ONE_KB * ONE_KB;
/** The RocksDB default write buffer size. */
public static final Long ROCKSDB_WRITE_BUFFER_SIZE_DEFAULT = 4 * ONE_KB * ONE_KB;
/** The RocksDB default block size. */
public static final Long ROCKSDB_BLOCK_SIZE_DEFAULT = 4 * ONE_KB;
/** The RocksDB default max write buffer. */
public static final Integer ROCKSDB_MAX_WRITE_BUFFER_DEFAULT = 2;
/** The RocksDB default compression type. */
public static final String ROCKSDB_COMPRESSION_TYPE_DEFAULT = "";
/** The RocksDB default cache index block enabled. */
public static final Boolean ROCKSDB_CACHE_INDEX_BLOCK_ENABLED_DEFAULT = true;
/** The RocksDB cache. */
private org.rocksdb.Cache cache = null;
/** Constructor. */
public RocksDbConfig() {
// Default constructor
}
/**
* Set the RocksDB configuration.
*
* @param storeName The store name
* @param options The options
* @param configs The configs
*/
@Override
public void setConfig(final String storeName, final Options options, final Map<String, Object> configs) {
long blockCacheSize = KafkaStreamsExecutionContext.getProperties().containsKey(ROCKSDB_CACHE_SIZE_CONFIG)
? Long.parseLong(KafkaStreamsExecutionContext.getProperties().getProperty(ROCKSDB_CACHE_SIZE_CONFIG))
: ROCKSDB_CACHE_SIZE_DEFAULT;
if (cache == null) {
cache = new org.rocksdb.LRUCache(blockCacheSize);
}
BlockBasedTableConfig tableConfig = (BlockBasedTableConfig) options.tableFormatConfig();
tableConfig.setBlockCache(cache);
long blockSize = KafkaStreamsExecutionContext.getProperties().containsKey(ROCKSDB_BLOCK_SIZE_CONFIG)
? Long.parseLong(KafkaStreamsExecutionContext.getProperties().getProperty(ROCKSDB_BLOCK_SIZE_CONFIG))
: ROCKSDB_BLOCK_SIZE_DEFAULT;
tableConfig.setBlockSize(blockSize);
boolean cacheIndexBlock =
KafkaStreamsExecutionContext.getProperties().containsKey(ROCKSDB_CACHE_INDEX_BLOCK_ENABLED_CONFIG)
? Boolean.parseBoolean(KafkaStreamsExecutionContext.getProperties()
.getProperty(ROCKSDB_CACHE_INDEX_BLOCK_ENABLED_CONFIG))
: ROCKSDB_CACHE_INDEX_BLOCK_ENABLED_DEFAULT;
tableConfig.setCacheIndexAndFilterBlocks(cacheIndexBlock);
options.setTableFormatConfig(tableConfig);
int maxWriteBuffer = KafkaStreamsExecutionContext.getProperties().containsKey(ROCKSDB_MAX_WRITE_BUFFER_CONFIG)
? Integer.parseInt(
KafkaStreamsExecutionContext.getProperties().getProperty(ROCKSDB_MAX_WRITE_BUFFER_CONFIG))
: ROCKSDB_MAX_WRITE_BUFFER_DEFAULT;
options.setMaxWriteBufferNumber(maxWriteBuffer);
long writeBufferSize =
KafkaStreamsExecutionContext.getProperties().containsKey(ROCKSDB_WRITE_BUFFER_SIZE_CONFIG)
? Long.parseLong(KafkaStreamsExecutionContext.getProperties()
.getProperty(ROCKSDB_WRITE_BUFFER_SIZE_CONFIG))
: ROCKSDB_WRITE_BUFFER_SIZE_DEFAULT;
options.setWriteBufferSize(writeBufferSize);
String compressionType = KafkaStreamsExecutionContext.getProperties()
.getProperty(ROCKSDB_COMPRESSION_TYPE_CONFIG, ROCKSDB_COMPRESSION_TYPE_DEFAULT);
options.setCompressionType(CompressionType.getCompressionType(compressionType));
}
@Override
public void close(String storeName, Options options) {
cache.close();
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/property/PropertiesUtils.java | kstreamplify-core/src/main/java/com/michelin/kstreamplify/property/PropertiesUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.property;
import com.michelin.kstreamplify.exception.PropertiesFileException;
import java.io.IOException;
import java.io.InputStream;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Properties;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.Strings;
import org.yaml.snakeyaml.Yaml;
/** Properties utils. */
public final class PropertiesUtils {
/** The Kafka properties prefix. */
public static final String KAFKA_PROPERTIES_PREFIX = "kafka.properties";
/** The default property file. */
public static final String DEFAULT_PROPERTY_FILE = "application.yml";
/** The property separator. */
public static final String PROPERTY_SEPARATOR = ".";
private PropertiesUtils() {}
/**
* Load the properties from the default properties file.
*
* @return The properties
*/
public static Properties loadProperties() {
Yaml yaml = new Yaml();
try (InputStream inputStream =
PropertiesUtils.class.getClassLoader().getResourceAsStream(DEFAULT_PROPERTY_FILE)) {
LinkedHashMap<String, Object> propsMap = yaml.load(inputStream);
return parsePropertiesMap(propsMap);
} catch (IOException e) {
throw new PropertiesFileException(e);
}
}
/**
* Get the Kafka properties only from the given properties.
*
* @param props The properties
* @return The Kafka properties
*/
public static Properties loadKafkaProperties(Properties props) {
Properties resultProperties = new Properties();
for (var prop : props.entrySet()) {
if (prop.getKey().toString().contains(KAFKA_PROPERTIES_PREFIX)) {
resultProperties.put(
Strings.CS.remove(prop.getKey().toString(), KAFKA_PROPERTIES_PREFIX + PROPERTY_SEPARATOR),
prop.getValue());
}
}
return resultProperties;
}
/**
* Parse a map into Properties.
*
* @param map The map
* @return The properties
*/
private static Properties parsePropertiesMap(LinkedHashMap<String, Object> map) {
return parseKey("", map, null);
}
/**
* Parse a given key.
*
* @param key The key
* @param map The underlying map
* @param properties The properties
* @return The properties
*/
private static Properties parseKey(String key, Object map, Properties properties) {
if (properties == null) {
properties = new Properties();
}
String separator = PROPERTY_SEPARATOR;
if (StringUtils.isBlank(key)) {
separator = "";
}
if (map instanceof LinkedHashMap<?, ?> hashMap) {
for (Map.Entry<?, ?> entry : hashMap.entrySet()) {
parseKey(key + separator + entry.getKey(), entry.getValue(), properties);
}
} else {
properties.put(key, map);
}
return properties;
}
/**
* Extract properties by prefix.
*
* @param properties The properties
* @param prefix The prefix to filter by
* @return The filtered properties
*/
public static Properties extractPropertiesByPrefix(Properties properties, String prefix) {
Properties result = new Properties();
for (String key : properties.stringPropertyNames()) {
if (key.startsWith(prefix)) {
result.setProperty(key, properties.getProperty(key));
}
}
return result;
}
/**
* Check if a feature is enabled based on properties.
*
* @param properties The properties
* @param key The property key
* @param defaultValue The default value if the property is not set
* @return true if the feature is enabled, false otherwise
*/
public static boolean isFeatureEnabled(Properties properties, String key, boolean defaultValue) {
return Boolean.parseBoolean(properties.getProperty(key, Boolean.toString(defaultValue)));
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/property/KstreamplifyConfig.java | kstreamplify-core/src/main/java/com/michelin/kstreamplify/property/KstreamplifyConfig.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.property;
import lombok.AccessLevel;
import lombok.NoArgsConstructor;
/** Kstreamplify configuration constants. */
@NoArgsConstructor(access = AccessLevel.PRIVATE)
public abstract class KstreamplifyConfig {
/** The DLQ properties prefix. */
public static final String DLQ_PROPERTIES_PREFIX = "dlq";
/** Property key to configure handling of Schema Registry RestClient exceptions in DLQ deserialization. */
public static final String DLQ_DESERIALIZATION_HANDLER_FORWARD_REST_CLIENT_EXCEPTION =
"dlq.deserialization-handler.forward-restclient-exception";
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/converter/JsonToAvroConverter.java | kstreamplify-core/src/main/java/com/michelin/kstreamplify/converter/JsonToAvroConverter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.converter;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
import com.google.gson.ToNumberPolicy;
import java.math.BigDecimal;
import java.math.MathContext;
import java.math.RoundingMode;
import java.time.Instant;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.time.temporal.ChronoUnit;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import java.util.UUID;
import org.apache.avro.LogicalTypes;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.specific.SpecificRecordBase;
/** The class to convert Json to Avro. */
public class JsonToAvroConverter {
private static final Gson gson = new GsonBuilder()
.setObjectToNumberStrategy(ToNumberPolicy.LONG_OR_DOUBLE)
.setPrettyPrinting()
.create();
private JsonToAvroConverter() {}
/**
* Convert a json string to an object.
*
* @param json the json string
* @return the object
*/
public static Object jsonToObject(String json) {
if (json == null) {
return null;
}
return gson.fromJson(json, Object.class);
}
/**
* Convert a file in json to avro.
*
* @param file the file in json
* @param schema the avro schema to use
* @return the record in avro
*/
public static SpecificRecordBase jsonToAvro(String file, Schema schema) {
return jsonToAvro(JsonParser.parseString(file).getAsJsonObject(), schema);
}
/**
* Convert json to avro.
*
* @param jsonEvent the json record
* @param schema the avro schema to use
* @return the record in avro
*/
public static SpecificRecordBase jsonToAvro(JsonObject jsonEvent, Schema schema) {
try {
SpecificRecordBase message = baseClass(schema.getNamespace(), schema.getName())
.getDeclaredConstructor()
.newInstance();
populateGenericRecordFromJson(jsonEvent, message);
return message;
} catch (Exception e) {
return null;
}
}
/**
* Populate avro records from json.
*
* @param jsonObject json data to provide to the avro record
* @param message the avro record to populate
*/
private static void populateGenericRecordFromJson(JsonObject jsonObject, SpecificRecordBase message) {
// Iterate over object attributes
jsonObject.keySet().forEach(currentKey -> {
try {
var currentValue = jsonObject.get(currentKey);
// If this is an object, add to prefix and call method again
if (currentValue instanceof JsonObject currentValueJsonObject) {
Schema currentSchema =
message.getSchema().getField(currentKey).schema();
// If the current value is a UNION
if (currentSchema.getType().equals(Schema.Type.UNION)) {
// Then research the first NOT NULL sub value
Optional<Schema> notNullSchema = currentSchema.getTypes().stream()
.filter(s -> !s.getType().equals(Schema.Type.NULL))
.findAny();
if (notNullSchema.isPresent()) {
currentSchema = notNullSchema.get();
}
}
switch (currentSchema.getType()) {
case RECORD -> {
SpecificRecordBase currentRecord = baseClass(
message.getSchema().getNamespace(), currentSchema.getName())
.getDeclaredConstructor()
.newInstance();
populateGenericRecordFromJson(currentValueJsonObject, currentRecord);
message.put(currentKey, currentRecord);
}
case MAP -> {
Map<String, Object> map = new HashMap<>();
if (!currentSchema.getValueType().getType().equals(Schema.Type.RECORD)) {
for (String key : currentValueJsonObject.keySet()) {
Object value = populateFieldWithCorrespondingType(
currentValueJsonObject.get(key),
currentSchema.getValueType().getType());
map.put(key, value);
}
} else {
for (String key : currentValueJsonObject.keySet()) {
SpecificRecordBase mapValueRecord = baseClass(
message.getSchema().getNamespace(),
currentSchema.getValueType().getName())
.getDeclaredConstructor()
.newInstance();
populateGenericRecordFromJson(
currentValueJsonObject.get(key).getAsJsonObject(), mapValueRecord);
map.put(key, mapValueRecord);
}
}
message.put(currentKey, map);
}
default ->
message.put(
currentKey,
populateFieldWithCorrespondingType(currentValue, currentSchema.getType()));
}
} else if (currentValue instanceof JsonArray jsonArray) {
// If this is an Array, call method for each one of them
var arraySchema = message.getSchema().getField(currentKey).schema();
Schema arrayType = arraySchema.getType() != Schema.Type.UNION
? arraySchema
: arraySchema.getTypes().stream()
.filter(s -> s.getType() != Schema.Type.NULL)
.findFirst()
.get();
Schema elementType = arrayType.getElementType();
if (elementType != null && Schema.Type.RECORD.equals(elementType.getType())) {
ArrayList<GenericRecord> recordArray = new ArrayList<>();
for (int i = 0; i < jsonArray.size(); i++) {
SpecificRecordBase currentRecord = baseClass(
message.getSchema().getNamespace(), elementType.getName())
.getDeclaredConstructor()
.newInstance();
populateGenericRecordFromJson((JsonObject) jsonArray.get(i), currentRecord);
recordArray.add(currentRecord);
}
message.put(currentKey, recordArray);
} else {
ArrayList<Object> objArray = new ArrayList<>();
for (int i = 0; i < ((JsonArray) currentValue).size(); i++) {
Object obj = populateFieldWithCorrespondingType(
(((JsonArray) currentValue).get(i)), elementType.getType());
objArray.add(obj);
}
message.put(currentKey, objArray);
}
} else {
// Otherwise, put the value in the record after parsing according to its
// corresponding schema type
if (!jsonObject.get(currentKey).isJsonNull()) {
populateFieldInRecordWithCorrespondingType(jsonObject, currentKey, message);
}
}
} catch (Exception e) {
throw new IllegalStateException(e);
}
});
}
/**
* Populate field with corresponding type.
*
* @param jsonElement the json element to convert
* @param type the type of the element
* @return the element converted with the corresponding type
*/
private static Object populateFieldWithCorrespondingType(JsonElement jsonElement, Schema.Type type) {
return switch (type) {
case INT -> jsonElement.getAsInt();
case LONG -> jsonElement.getAsLong();
case FLOAT -> jsonElement.getAsFloat();
case DOUBLE -> jsonElement.getAsDouble();
case BOOLEAN -> jsonElement.getAsBoolean();
default -> jsonElement.getAsString();
};
}
/**
* Populate field in record with corresponding type.
*
* @param jsonObject data to provide to the avro record
* @param fieldName the name to populate
* @param result the avro record populated
*/
@SuppressWarnings("unchecked")
private static void populateFieldInRecordWithCorrespondingType(
JsonObject jsonObject, String fieldName, GenericRecord result) {
Schema fieldSchema = result.getSchema().getField(fieldName).schema();
Optional<Schema> optionalFieldType = fieldSchema.getType() != Schema.Type.UNION
? Optional.of(fieldSchema)
: fieldSchema.getTypes().stream()
.filter(s -> s.getType() != Schema.Type.NULL)
.findFirst();
if (optionalFieldType.isPresent()) {
Schema fieldType = optionalFieldType.get();
switch (fieldType.getType()) {
case INT -> {
if (fieldType.getLogicalType() != null
&& fieldType.getLogicalType().getName().equals("date")) {
result.put(
fieldName,
LocalDate.parse(jsonObject.get(fieldName).getAsString()));
} else if (fieldType.getLogicalType() != null
&& fieldType.getLogicalType().getName().equals("time-millis")) {
result.put(
fieldName,
LocalTime.parse(
jsonObject.get(fieldName).getAsString(), DateTimeFormatter.ISO_LOCAL_TIME));
} else {
result.put(fieldName, jsonObject.get(fieldName).getAsInt());
}
}
case LONG -> {
if (fieldType.getLogicalType() != null
&& fieldType.getLogicalType().getName().equals("timestamp-millis")) {
try {
result.put(
fieldName,
Instant.ofEpochMilli(
jsonObject.get(fieldName).getAsLong()));
} catch (NumberFormatException e) {
result.put(
fieldName,
Instant.parse(jsonObject.get(fieldName).getAsString()));
}
} else if (fieldType.getLogicalType() != null
&& fieldType.getLogicalType().getName().equals("timestamp-micros")) {
try {
result.put(
fieldName,
Instant.EPOCH.plus(jsonObject.get(fieldName).getAsLong(), ChronoUnit.MICROS));
} catch (NumberFormatException e) {
result.put(
fieldName,
Instant.parse(jsonObject.get(fieldName).getAsString()));
}
} else if (fieldType.getLogicalType() != null
&& fieldType.getLogicalType().getName().equals("local-timestamp-millis")) {
try {
result.put(
fieldName,
LocalDateTime.ofInstant(
Instant.ofEpochMilli(
jsonObject.get(fieldName).getAsLong()),
ZoneId.systemDefault()));
} catch (NumberFormatException e) {
result.put(
fieldName,
LocalDateTime.parse(
jsonObject.get(fieldName).getAsString()));
}
} else if (fieldType.getLogicalType() != null
&& fieldType.getLogicalType().getName().equals("local-timestamp-micros")) {
try {
result.put(
fieldName,
LocalDateTime.ofInstant(
Instant.EPOCH.plus(
jsonObject.get(fieldName).getAsLong(), ChronoUnit.MICROS),
ZoneId.systemDefault()));
} catch (NumberFormatException e) {
result.put(
fieldName,
LocalDateTime.parse(
jsonObject.get(fieldName).getAsString()));
}
} else if (fieldType.getLogicalType() != null
&& fieldType.getLogicalType().getName().equals("time-micros")) {
try {
result.put(
fieldName,
Instant.EPOCH.plus(jsonObject.get(fieldName).getAsLong(), ChronoUnit.MICROS));
} catch (NumberFormatException e) {
result.put(
fieldName,
LocalTime.parse(
jsonObject.get(fieldName).getAsString(), DateTimeFormatter.ISO_LOCAL_TIME));
}
} else {
result.put(fieldName, jsonObject.get(fieldName).getAsLong());
}
}
case FLOAT -> result.put(fieldName, jsonObject.get(fieldName).getAsFloat());
case DOUBLE -> result.put(fieldName, jsonObject.get(fieldName).getAsDouble());
case BOOLEAN -> result.put(fieldName, jsonObject.get(fieldName).getAsBoolean());
case BYTES -> {
if (fieldType.getLogicalType() != null
&& fieldType.getLogicalType().getName().equals("decimal")) {
result.put(
fieldName,
new BigDecimal(jsonObject.get(fieldName).getAsString())
.setScale(
((LogicalTypes.Decimal) fieldType.getLogicalType()).getScale(),
RoundingMode.HALF_UP)
.round(new MathContext(
((LogicalTypes.Decimal) fieldType.getLogicalType()).getPrecision())));
} else {
// This is not supposed to happen, that would mean that the given field is in Byte format
result.put(fieldName, jsonObject.get(fieldName).getAsByte());
}
}
case STRING -> {
if (fieldType.getLogicalType() != null
&& fieldType.getLogicalType().getName().equals("uuid")) {
result.put(
fieldName,
UUID.fromString(jsonObject.get(fieldName).getAsString()));
} else {
result.put(fieldName, jsonObject.get(fieldName).getAsString());
}
}
case ENUM -> {
try {
Class clazz = Class.forName(fieldSchema.getFullName());
var value =
Enum.valueOf(clazz, jsonObject.get(fieldName).getAsString());
result.put(fieldName, value);
} catch (ClassNotFoundException e) {
throw new IllegalStateException(e);
}
}
default -> result.put(fieldName, jsonObject.get(fieldName).getAsString());
}
}
}
/**
* Get base class.
*
* @param baseNamespace the namespace of the class
* @param typeName the class type
* @return the base class
*/
@SuppressWarnings("unchecked")
private static Class<SpecificRecordBase> baseClass(String baseNamespace, String typeName) {
try {
return (Class<SpecificRecordBase>) Class.forName(baseNamespace + "." + typeName);
} catch (ClassNotFoundException e) {
throw new IllegalArgumentException(e);
}
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/converter/AvroToJsonConverter.java | kstreamplify-core/src/main/java/com/michelin/kstreamplify/converter/AvroToJsonConverter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.converter;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.JsonDeserializationContext;
import com.google.gson.JsonDeserializer;
import com.google.gson.JsonElement;
import com.google.gson.JsonParseException;
import com.google.gson.JsonPrimitive;
import com.google.gson.JsonSerializationContext;
import com.google.gson.JsonSerializer;
import com.google.gson.ToNumberPolicy;
import java.lang.reflect.Type;
import java.time.Instant;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.time.format.DateTimeFormatter;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.avro.Schema.Field;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.util.Utf8;
/** The class to convert Avro to Json. */
public class AvroToJsonConverter {
private static final Gson gson = new GsonBuilder()
.registerTypeAdapter(LocalDate.class, new LocalDateTypeAdapter())
.registerTypeAdapter(LocalDateTime.class, new LocalDateTimeTypeAdapter())
.registerTypeAdapter(LocalTime.class, new LocalTimeTypeAdapter())
.setObjectToNumberStrategy(ToNumberPolicy.LONG_OR_DOUBLE)
.setPrettyPrinting()
.create();
private AvroToJsonConverter() {}
/**
* Convert the value to JSON.
*
* @param value The value
* @return The JSON
*/
public static String convertObject(Object value) {
if (value == null) {
return null;
}
if (value instanceof GenericRecord genericRecord) {
return convertRecord(genericRecord);
}
return gson.toJson(value);
}
/**
* Convert the values to JSON.
*
* @param values The values
* @return The JSON
*/
public static String convertObject(List<Object> values) {
return values.stream().map(AvroToJsonConverter::convertObject).toList().toString();
}
/**
* Convert the record from avro format to json format.
*
* @param inputRecord the record in avro format
* @return the record in json format
*/
public static String convertRecord(GenericRecord inputRecord) {
return gson.toJson(recordAsMap(inputRecord));
}
/**
* Convert avro to a map for json format.
*
* @param inputRecord record in avro
* @return map for json format
*/
private static Map<String, Object> recordAsMap(GenericRecord inputRecord) {
Map<String, Object> recordMapping = new HashMap<>();
for (Field field : inputRecord.getSchema().getFields()) {
Object recordValue = inputRecord.get(field.name());
if (recordValue instanceof Utf8 || recordValue instanceof Instant) {
recordValue = recordValue.toString();
}
if (recordValue instanceof List<?> recordValueAsList) {
recordValue = recordValueAsList.stream()
.map(value -> {
if (value instanceof GenericRecord genericRecord) {
return recordAsMap(genericRecord);
} else {
return value.toString();
}
})
.toList();
}
if (recordValue instanceof Map<?, ?> recordValueAsMap) {
Map<Object, Object> jsonMap = new HashMap<>();
recordValueAsMap.forEach((key, value) -> {
if (value instanceof GenericRecord genericRecord) {
jsonMap.put(key, recordAsMap(genericRecord));
} else {
jsonMap.put(key, value.toString());
}
});
recordValue = jsonMap;
}
if (recordValue instanceof GenericRecord genericRecord) {
recordValue = recordAsMap(genericRecord);
}
recordMapping.put(field.name(), recordValue);
}
return recordMapping;
}
private static class LocalDateTypeAdapter implements JsonSerializer<LocalDate>, JsonDeserializer<LocalDate> {
private final DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd");
@Override
public JsonElement serialize(
final LocalDate date, final Type typeOfSrc, final JsonSerializationContext context) {
return new JsonPrimitive(date.format(formatter));
}
@Override
public LocalDate deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context)
throws JsonParseException {
return LocalDate.parse(json.getAsString(), formatter);
}
}
private static class LocalDateTimeTypeAdapter
implements JsonSerializer<LocalDateTime>, JsonDeserializer<LocalDateTime> {
private static final DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSSSSS");
private static final DateTimeFormatter formatterNano =
DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSSSSSSSS");
@Override
public JsonElement serialize(LocalDateTime localDateTime, Type srcType, JsonSerializationContext context) {
if (localDateTime.toString().length() == 29) {
return new JsonPrimitive(formatterNano.format(localDateTime));
}
return new JsonPrimitive(formatter.format(localDateTime));
}
@Override
public LocalDateTime deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context)
throws JsonParseException {
return LocalDateTime.parse(json.getAsString(), formatter);
}
}
private static class LocalTimeTypeAdapter implements JsonSerializer<LocalTime>, JsonDeserializer<LocalTime> {
private static final DateTimeFormatter formatter = DateTimeFormatter.ofPattern("HH:mm:ss.SSS");
private static final DateTimeFormatter formatterNano = DateTimeFormatter.ofPattern("HH:mm:ss.SSSSSS");
@Override
public JsonElement serialize(LocalTime localTime, Type srcType, JsonSerializationContext context) {
if (localTime.toString().length() == 15) {
return new JsonPrimitive(formatterNano.format(localTime));
}
return new JsonPrimitive(formatter.format(localTime));
}
@Override
public LocalTime deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context)
throws JsonParseException {
return LocalTime.parse(json.getAsString(), formatter);
}
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/serde/SerdesUtils.java | kstreamplify-core/src/main/java/com/michelin/kstreamplify/serde/SerdesUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.serde;
import com.michelin.kstreamplify.context.KafkaStreamsExecutionContext;
import io.confluent.kafka.streams.serdes.avro.SpecificAvroSerde;
import org.apache.avro.specific.SpecificRecord;
/** The Serde utils class. */
public final class SerdesUtils {
private SerdesUtils() {}
/**
* Return a key serde for a requested class.
*
* @param <T> The class of requested serdes
* @return a serdes for requested class
*/
public static <T extends SpecificRecord> SpecificAvroSerde<T> getKeySerdes() {
return getSerdes(true);
}
/**
* Return a value serdes for a requested class.
*
* @param <T> The class of requested serdes
* @return a serdes for requested class
*/
public static <T extends SpecificRecord> SpecificAvroSerde<T> getValueSerdes() {
return getSerdes(false);
}
/**
* Return a serdes for a requested class.
*
* @param isSerdeForKey Is the serdes for a key or a value
* @param <T> The class of requested serdes
* @return a serdes for requested class
*/
private static <T extends SpecificRecord> SpecificAvroSerde<T> getSerdes(boolean isSerdeForKey) {
SpecificAvroSerde<T> serde = new SpecificAvroSerde<>();
serde.configure(KafkaStreamsExecutionContext.getSerdesConfig(), isSerdeForKey);
return serde;
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/serde/TopicWithSerde.java | kstreamplify-core/src/main/java/com/michelin/kstreamplify/serde/TopicWithSerde.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.serde;
import com.michelin.kstreamplify.topic.TopicUtils;
import lombok.Getter;
import org.apache.kafka.common.serialization.Serde;
import org.apache.kafka.common.utils.Bytes;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.kstream.Consumed;
import org.apache.kafka.streams.kstream.GlobalKTable;
import org.apache.kafka.streams.kstream.KStream;
import org.apache.kafka.streams.kstream.KTable;
import org.apache.kafka.streams.kstream.Materialized;
import org.apache.kafka.streams.kstream.Produced;
import org.apache.kafka.streams.state.KeyValueStore;
/**
* TopicWithSerde API.
*
* @param <K> The type of the key.
* @param <V> The type of the value.
*/
public class TopicWithSerde<K, V> {
/** Default prefix property name. */
public static final String SELF = "self";
private final String topicName;
private final String prefixKey;
@Getter
private final Serde<K> keySerde;
@Getter
private final Serde<V> valueSerde;
/**
* Constructor.
*
* @param topicName The name of the topic
* @param keySerde The key serde
* @param valueSerde The value serde
*/
public TopicWithSerde(String topicName, Serde<K> keySerde, Serde<V> valueSerde) {
this.topicName = topicName;
this.prefixKey = SELF;
this.keySerde = keySerde;
this.valueSerde = valueSerde;
}
/**
* Constructor.
*
* @param topicName The name of the topic
* @param prefixKey The prefix key
* @param keySerde The key serde
* @param valueSerde The value serde
*/
public TopicWithSerde(String topicName, String prefixKey, Serde<K> keySerde, Serde<V> valueSerde) {
this.topicName = topicName;
this.prefixKey = prefixKey;
this.keySerde = keySerde;
this.valueSerde = valueSerde;
}
/**
* Get the unprefixed topic name.
*
* @return The unprefixed topic name
*/
public String getUnPrefixedName() {
return topicName;
}
/**
* Remap and prefix the topic name.
*
* @return The remapped and prefixed topic name
*/
@Override
public String toString() {
return TopicUtils.remapAndPrefix(topicName, prefixKey);
}
/**
* Wrapper for {@link StreamsBuilder#stream(String, Consumed)}.
*
* @param streamsBuilder The streams builder
* @return A ${@link KStream} from the given topic
*/
public KStream<K, V> stream(StreamsBuilder streamsBuilder) {
return streamsBuilder.stream(this.toString(), Consumed.with(keySerde, valueSerde));
}
/**
* Wrapper for {@link StreamsBuilder#table(String, Consumed, Materialized)}.
*
* @param streamsBuilder The streams builder
* @param storeName The store name
* @return A ${@link KTable} from the given topic
*/
public KTable<K, V> table(StreamsBuilder streamsBuilder, String storeName) {
return streamsBuilder.table(
this.toString(),
Consumed.with(keySerde, valueSerde),
Materialized.<K, V, KeyValueStore<Bytes, byte[]>>as(storeName)
.withKeySerde(keySerde)
.withValueSerde(valueSerde));
}
/**
* Wrapper for {@link StreamsBuilder#globalTable(String, Consumed, Materialized)}.
*
* @param streamsBuilder The streams builder
* @param storeName The store name
* @return A ${@link GlobalKTable} from the given topic
*/
public GlobalKTable<K, V> globalTable(StreamsBuilder streamsBuilder, String storeName) {
return streamsBuilder.globalTable(
this.toString(),
Consumed.with(keySerde, valueSerde),
Materialized.<K, V, KeyValueStore<Bytes, byte[]>>as(storeName)
.withKeySerde(keySerde)
.withValueSerde(valueSerde));
}
/**
* Wrapper for {@link KStream#to(String, Produced)}.
*
* @param stream The stream to produce
*/
public void produce(KStream<K, V> stream) {
stream.to(this.toString(), Produced.with(keySerde, valueSerde));
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/server/KafkaStreamsHttpServer.java | kstreamplify-core/src/main/java/com/michelin/kstreamplify/server/KafkaStreamsHttpServer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.server;
import static com.michelin.kstreamplify.service.KubernetesService.DEFAULT_LIVENESS_PATH;
import static com.michelin.kstreamplify.service.KubernetesService.DEFAULT_READINESS_PATH;
import static com.michelin.kstreamplify.service.KubernetesService.LIVENESS_PATH_PROPERTY_NAME;
import static com.michelin.kstreamplify.service.KubernetesService.READINESS_PATH_PROPERTY_NAME;
import static com.michelin.kstreamplify.service.TopologyService.TOPOLOGY_DEFAULT_PATH;
import static com.michelin.kstreamplify.service.TopologyService.TOPOLOGY_PATH_PROPERTY_NAME;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.net.HttpHeaders;
import com.google.common.net.MediaType;
import com.michelin.kstreamplify.exception.HttpServerException;
import com.michelin.kstreamplify.exception.UnknownKeyException;
import com.michelin.kstreamplify.initializer.KafkaStreamsInitializer;
import com.michelin.kstreamplify.service.KubernetesService;
import com.michelin.kstreamplify.service.TopologyService;
import com.michelin.kstreamplify.service.interactivequeries.keyvalue.KeyValueStoreService;
import com.michelin.kstreamplify.service.interactivequeries.keyvalue.TimestampedKeyValueStoreService;
import com.michelin.kstreamplify.service.interactivequeries.window.TimestampedWindowStoreService;
import com.michelin.kstreamplify.service.interactivequeries.window.WindowStoreService;
import com.michelin.kstreamplify.store.StreamsMetadata;
import com.sun.net.httpserver.HttpExchange;
import com.sun.net.httpserver.HttpServer;
import java.io.OutputStream;
import java.net.HttpURLConnection;
import java.net.InetSocketAddress;
import java.time.Instant;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Properties;
import java.util.function.IntSupplier;
import java.util.stream.Collectors;
import org.apache.kafka.streams.errors.StreamsNotStartedException;
import org.apache.kafka.streams.errors.UnknownStateStoreException;
/** Kafka Streams HTTP server. */
public class KafkaStreamsHttpServer {
private static final String DEFAULT_STORE_PATH = "store";
private static final String DEFAULT_KEY_VALUE_STORE_PATH = "key-value";
private static final String DEFAULT_WINDOW_STORE_PATH = "window";
private static final String START_TIME_REQUEST_PARAM = "startTime";
private static final String END_TIME_REQUEST_PARAM = "endTime";
private final KafkaStreamsInitializer kafkaStreamsInitializer;
private final ObjectMapper objectMapper;
private final KubernetesService kubernetesService;
private final TopologyService topologyService;
private final KeyValueStoreService keyValueService;
private final TimestampedKeyValueStoreService timestampedKeyValueService;
private final WindowStoreService windowStoreService;
private final TimestampedWindowStoreService timestampedWindowStoreService;
/** The HTTP server. */
protected HttpServer server;
/**
* Constructor.
*
* @param kafkaStreamsInitializer The Kafka Streams initializer
*/
public KafkaStreamsHttpServer(KafkaStreamsInitializer kafkaStreamsInitializer) {
this.kafkaStreamsInitializer = kafkaStreamsInitializer;
this.objectMapper = new ObjectMapper();
this.kubernetesService = new KubernetesService(kafkaStreamsInitializer);
this.topologyService = new TopologyService(kafkaStreamsInitializer);
this.keyValueService = new KeyValueStoreService(kafkaStreamsInitializer);
this.timestampedKeyValueService = new TimestampedKeyValueStoreService(kafkaStreamsInitializer);
this.windowStoreService = new WindowStoreService(kafkaStreamsInitializer);
this.timestampedWindowStoreService = new TimestampedWindowStoreService(kafkaStreamsInitializer);
}
/** Start the HTTP server. */
public void start() {
try {
server = HttpServer.create(new InetSocketAddress(kafkaStreamsInitializer.getServerPort()), 0);
Properties properties = kafkaStreamsInitializer.getProperties();
createKubernetesEndpoint(
(String) properties.getOrDefault(READINESS_PATH_PROPERTY_NAME, DEFAULT_READINESS_PATH),
kubernetesService::getReadiness);
createKubernetesEndpoint(
(String) properties.getOrDefault(LIVENESS_PATH_PROPERTY_NAME, DEFAULT_LIVENESS_PATH),
kubernetesService::getLiveness);
createTopologyEndpoint();
createStoreEndpoints();
addEndpoint(kafkaStreamsInitializer);
server.start();
} catch (Exception e) {
throw new HttpServerException(e);
}
}
private void createKubernetesEndpoint(String path, IntSupplier kubernetesSupplier) {
server.createContext("/" + path, (exchange -> {
int code = kubernetesSupplier.getAsInt();
exchange.sendResponseHeaders(code, 0);
exchange.close();
}));
}
private void createTopologyEndpoint() {
String topologyEndpointPath = (String) kafkaStreamsInitializer
.getProperties()
.getOrDefault(TOPOLOGY_PATH_PROPERTY_NAME, TOPOLOGY_DEFAULT_PATH);
server.createContext("/" + topologyEndpointPath, (exchange -> {
String response = topologyService.getTopology();
exchange.sendResponseHeaders(HttpURLConnection.HTTP_OK, response.length());
exchange.getResponseHeaders().set(HttpHeaders.CONTENT_TYPE, MediaType.PLAIN_TEXT_UTF_8.toString());
OutputStream output = exchange.getResponseBody();
output.write(response.getBytes());
exchange.close();
}));
}
private void createStoreEndpoints() {
server.createContext("/" + DEFAULT_STORE_PATH, (exchange -> {
try {
Object response = getResponseForStoreEndpoints(exchange);
String jsonResponse = objectMapper.writeValueAsString(response);
exchange.sendResponseHeaders(HttpURLConnection.HTTP_OK, jsonResponse.length());
exchange.getResponseHeaders().set(HttpHeaders.CONTENT_TYPE, MediaType.JSON_UTF_8.toString());
OutputStream output = exchange.getResponseBody();
output.write(jsonResponse.getBytes());
} catch (StreamsNotStartedException e) {
exchange.sendResponseHeaders(
HttpURLConnection.HTTP_UNAVAILABLE, e.getMessage().length());
OutputStream output = exchange.getResponseBody();
output.write(e.getMessage().getBytes());
} catch (UnknownStateStoreException | UnknownKeyException e) {
exchange.sendResponseHeaders(
HttpURLConnection.HTTP_NOT_FOUND, e.getMessage().length());
OutputStream output = exchange.getResponseBody();
output.write(e.getMessage().getBytes());
} catch (IllegalArgumentException e) {
exchange.sendResponseHeaders(
HttpURLConnection.HTTP_BAD_REQUEST, e.getMessage().length());
OutputStream output = exchange.getResponseBody();
output.write(e.getMessage().getBytes());
} finally {
exchange.close();
}
}));
}
private Object getResponseForStoreEndpoints(HttpExchange exchange) {
if (exchange.getRequestURI().toString().equals("/" + DEFAULT_STORE_PATH)) {
return keyValueService.getStateStores();
}
String store;
if (exchange.getRequestURI().toString().matches("/" + DEFAULT_STORE_PATH + "/metadata/.*")) {
store = parsePathParam(exchange, 3);
return keyValueService.getStreamsMetadataForStore(store).stream()
.map(streamsMetadata -> new StreamsMetadata(
streamsMetadata.stateStoreNames(),
streamsMetadata.hostInfo(),
streamsMetadata.topicPartitions()))
.toList();
}
// Get all on local host for key-value store
if (exchange.getRequestURI()
.toString()
.matches("/" + DEFAULT_STORE_PATH + "/" + DEFAULT_KEY_VALUE_STORE_PATH + "/local/.*")) {
store = parsePathParam(exchange, 4);
return keyValueService.getAllOnLocalInstance(store);
}
// Get all on local host for timestamped key-value store
if (exchange.getRequestURI()
.toString()
.matches("/" + DEFAULT_STORE_PATH + "/" + DEFAULT_KEY_VALUE_STORE_PATH + "/timestamped/local/.*")) {
store = parsePathParam(exchange, 5);
return timestampedKeyValueService.getAllOnLocalInstance(store);
}
// Get all on local host for window store
if (exchange.getRequestURI()
.toString()
.matches("/" + DEFAULT_STORE_PATH + "/" + DEFAULT_WINDOW_STORE_PATH + "/local/.*")) {
store = parsePathParam(exchange, 4);
Instant instantFrom = parseRequestParam(exchange, START_TIME_REQUEST_PARAM)
.map(Instant::parse)
.orElse(Instant.EPOCH);
Instant instantTo = parseRequestParam(exchange, END_TIME_REQUEST_PARAM)
.map(Instant::parse)
.orElse(Instant.now());
return windowStoreService.getAllOnLocalInstance(store, instantFrom, instantTo);
}
// Get all on local host for timestamped window store
if (exchange.getRequestURI()
.toString()
.matches("/" + DEFAULT_STORE_PATH + "/" + DEFAULT_WINDOW_STORE_PATH + "/timestamped/local/.*")) {
store = parsePathParam(exchange, 5);
Instant instantFrom = parseRequestParam(exchange, START_TIME_REQUEST_PARAM)
.map(Instant::parse)
.orElse(Instant.EPOCH);
Instant instantTo = parseRequestParam(exchange, END_TIME_REQUEST_PARAM)
.map(Instant::parse)
.orElse(Instant.now());
return timestampedWindowStoreService.getAllOnLocalInstance(store, instantFrom, instantTo);
}
// Get by key for timestamped key-value store
if (exchange.getRequestURI()
.toString()
.matches("/" + DEFAULT_STORE_PATH + "/" + DEFAULT_KEY_VALUE_STORE_PATH + "/timestamped/.*/.*")) {
store = parsePathParam(exchange, 4);
String key = parsePathParam(exchange, 5);
return timestampedKeyValueService.getByKey(store, key);
}
// Get all for timestamped key-value store
if (exchange.getRequestURI()
.toString()
.matches("/" + DEFAULT_STORE_PATH + "/" + DEFAULT_KEY_VALUE_STORE_PATH + "/timestamped/.*")) {
store = parsePathParam(exchange, 4);
return timestampedKeyValueService.getAll(store);
}
// Get by key for key-value store
if (exchange.getRequestURI()
.toString()
.matches("/" + DEFAULT_STORE_PATH + "/" + DEFAULT_KEY_VALUE_STORE_PATH + "/.*/.*")) {
store = parsePathParam(exchange, 3);
String key = parsePathParam(exchange, 4);
return keyValueService.getByKey(store, key);
}
// Get all for key-value store
if (exchange.getRequestURI()
.toString()
.matches("/" + DEFAULT_STORE_PATH + "/" + DEFAULT_KEY_VALUE_STORE_PATH + "/.*")) {
store = parsePathParam(exchange, 3);
return keyValueService.getAll(store);
}
// Get by key for timestamped window store
if (exchange.getRequestURI()
.toString()
.matches("/" + DEFAULT_STORE_PATH + "/" + DEFAULT_WINDOW_STORE_PATH + "/timestamped/.*/.*")) {
store = parsePathParam(exchange, 4);
String key = parsePathParam(exchange, 5);
Instant instantFrom = parseRequestParam(exchange, START_TIME_REQUEST_PARAM)
.map(Instant::parse)
.orElse(Instant.EPOCH);
Instant instantTo = parseRequestParam(exchange, END_TIME_REQUEST_PARAM)
.map(Instant::parse)
.orElse(Instant.now());
return timestampedWindowStoreService.getByKey(store, key, instantFrom, instantTo);
}
// Get all for timestamped window store
if (exchange.getRequestURI()
.toString()
.matches("/" + DEFAULT_STORE_PATH + "/" + DEFAULT_WINDOW_STORE_PATH + "/timestamped/.*")) {
store = parsePathParam(exchange, 4);
Instant instantFrom = parseRequestParam(exchange, START_TIME_REQUEST_PARAM)
.map(Instant::parse)
.orElse(Instant.EPOCH);
Instant instantTo = parseRequestParam(exchange, END_TIME_REQUEST_PARAM)
.map(Instant::parse)
.orElse(Instant.now());
return timestampedWindowStoreService.getAll(store, instantFrom, instantTo);
}
// Get by key for window store
if (exchange.getRequestURI()
.toString()
.matches("/" + DEFAULT_STORE_PATH + "/" + DEFAULT_WINDOW_STORE_PATH + "/.*/.*")) {
store = parsePathParam(exchange, 3);
String key = parsePathParam(exchange, 4);
Instant instantFrom = parseRequestParam(exchange, START_TIME_REQUEST_PARAM)
.map(Instant::parse)
.orElse(Instant.EPOCH);
Instant instantTo = parseRequestParam(exchange, END_TIME_REQUEST_PARAM)
.map(Instant::parse)
.orElse(Instant.now());
return windowStoreService.getByKey(store, key, instantFrom, instantTo);
}
// Get all for window store
if (exchange.getRequestURI()
.toString()
.matches("/" + DEFAULT_STORE_PATH + "/" + DEFAULT_WINDOW_STORE_PATH + "/.*")) {
store = parsePathParam(exchange, 3);
Instant instantFrom = parseRequestParam(exchange, START_TIME_REQUEST_PARAM)
.map(Instant::parse)
.orElse(Instant.EPOCH);
Instant instantTo = parseRequestParam(exchange, END_TIME_REQUEST_PARAM)
.map(Instant::parse)
.orElse(Instant.now());
return windowStoreService.getAll(store, instantFrom, instantTo);
}
return null;
}
private String parsePathParam(HttpExchange exchange, int index) {
return exchange.getRequestURI().toString().split("\\?")[0].split("/")[index];
}
private Optional<String> parseRequestParam(HttpExchange exchange, String key) {
String[] uriAndParams = exchange.getRequestURI().toString().split("\\?");
if (uriAndParams.length == 1) {
return Optional.empty();
}
List<String> params = Arrays.asList(uriAndParams[1].split("&"));
Map<String, String> keyValue = params.stream()
.map(param -> param.split("="))
.collect(Collectors.toMap(param -> param[0], param -> param[1]));
return Optional.ofNullable(keyValue.get(key));
}
/**
* Callback to override in case of adding endpoints.
*
* @param kafkaStreamsInitializer The Kafka Streams initializer
*/
protected void addEndpoint(KafkaStreamsInitializer kafkaStreamsInitializer) {
// Nothing to do here
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DedupKeyValueProcessor.java | kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DedupKeyValueProcessor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.deduplication;
import com.michelin.kstreamplify.error.ProcessingResult;
import java.time.Duration;
import java.time.Instant;
import org.apache.avro.specific.SpecificRecord;
import org.apache.kafka.streams.processor.api.Processor;
import org.apache.kafka.streams.processor.api.ProcessorContext;
import org.apache.kafka.streams.processor.api.Record;
import org.apache.kafka.streams.state.WindowStore;
/**
* Transformer class for the deduplication mechanism on both keys and values of a given topic.
*
* @param <V> The type of the value
*/
public class DedupKeyValueProcessor<V extends SpecificRecord>
implements Processor<String, V, String, ProcessingResult<V, V>> {
/** Kstream context for this transformer. */
private ProcessorContext<String, ProcessingResult<V, V>> processorContext;
/** Window store containing all the records seen on the given window. */
private WindowStore<String, V> dedupWindowStore;
/** Window store name, initialized @ construction. */
private final String windowStoreName;
/** Retention window for the state store. Used for fetching data. */
private final Duration retentionWindowDuration;
/**
* Constructor.
*
* @param windowStoreName The window store name
* @param retentionWindowHours The retention window duration
*/
public DedupKeyValueProcessor(String windowStoreName, Duration retentionWindowHours) {
this.windowStoreName = windowStoreName;
this.retentionWindowDuration = retentionWindowHours;
}
@Override
public void init(ProcessorContext<String, ProcessingResult<V, V>> context) {
this.processorContext = context;
dedupWindowStore = this.processorContext.getStateStore(windowStoreName);
}
@Override
public void process(Record<String, V> message) {
try {
// Get the record timestamp
var currentInstant = Instant.ofEpochMilli(message.timestamp());
// Retrieve all the matching keys in the stateStore and return null if found it (signaling a duplicate)
try (var resultIterator = dedupWindowStore.backwardFetch(
message.key(),
currentInstant.minus(retentionWindowDuration),
currentInstant.plus(retentionWindowDuration))) {
while (resultIterator != null && resultIterator.hasNext()) {
var currentKeyValue = resultIterator.next();
if (message.value().equals(currentKeyValue.value)) {
return;
}
}
}
// First time we see this record, store entry in the window store and forward the record to the output
dedupWindowStore.put(message.key(), message.value(), message.timestamp());
processorContext.forward(ProcessingResult.wrapRecordSuccess(message));
} catch (Exception e) {
processorContext.forward(ProcessingResult.wrapRecordFailure(
e,
message,
"Could not figure out what to do with the current payload: "
+ "An unlikely error occurred during deduplication transform"));
}
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DeduplicationUtils.java | kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DeduplicationUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.deduplication;
import com.michelin.kstreamplify.error.ProcessingResult;
import com.michelin.kstreamplify.serde.SerdesUtils;
import java.time.Duration;
import java.util.function.Function;
import org.apache.avro.specific.SpecificRecord;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.kstream.KStream;
import org.apache.kafka.streams.kstream.Repartitioned;
import org.apache.kafka.streams.state.StoreBuilder;
import org.apache.kafka.streams.state.Stores;
import org.apache.kafka.streams.state.WindowStore;
/** Deduplication utility class. Only streams with String keys are supported. */
public final class DeduplicationUtils {
private static final String DEFAULT_DEDUP_NAME = "Dedup_";
private static final String DEFAULT_WINDOWSTORE = "WindowStore";
private static final String DEFAULT_REPARTITION = "Repartition";
private DeduplicationUtils() {}
/**
* Deduplicate the input stream on the input key using a window store for the given period of time. This constructor
* should not be used if using the deduplicator multiple times in the same topology.
*
* @param streamsBuilder Stream builder instance for topology editing
* @param initialStream Stream containing the events that should be deduplicated
* @param windowDuration Window of time on which we should watch out for duplicates
* @param <V> Generic Type of the Stream value. Key type is not implemented because using anything other than a
* String as the key is retarded. You can quote me on this.
* @return KStream with a processingResult
*/
public static <V extends SpecificRecord> KStream<String, ProcessingResult<V, V>> deduplicateKeys(
StreamsBuilder streamsBuilder, KStream<String, V> initialStream, Duration windowDuration) {
return deduplicateKeys(
streamsBuilder,
initialStream,
DEFAULT_DEDUP_NAME + DEFAULT_WINDOWSTORE,
DEFAULT_DEDUP_NAME + DEFAULT_REPARTITION,
windowDuration);
}
/**
* Deduplicate the input stream on the input key using a window store for the given period of time.
*
* @param streamsBuilder Stream builder instance for topology editing
* @param initialStream Stream containing the events that should be deduplicated
* @param storeName State store name
* @param repartitionName Repartition topic name
* @param windowDuration Window of time to keep in the window store
* @param <V> Generic Type of the Stream value. Key type is not implemented because using anything other than a
* String as the key is retarded. You can quote me on this.
* @return Resulting de-duplicated Stream
*/
public static <V extends SpecificRecord> KStream<String, ProcessingResult<V, V>> deduplicateKeys(
StreamsBuilder streamsBuilder,
KStream<String, V> initialStream,
String storeName,
String repartitionName,
Duration windowDuration) {
StoreBuilder<WindowStore<String, String>> dedupWindowStore = Stores.windowStoreBuilder(
Stores.persistentWindowStore(storeName, windowDuration, windowDuration, false),
Serdes.String(),
Serdes.String());
streamsBuilder.addStateStore(dedupWindowStore);
var repartitioned =
initialStream.repartition(Repartitioned.with(Serdes.String(), SerdesUtils.<V>getValueSerdes())
.withName(repartitionName));
return repartitioned.process(() -> new DedupKeyProcessor<>(storeName, windowDuration), storeName);
}
/**
* Deduplicate the input stream on the input key and value using a window store for the given period of time. This
* constructor should not be used if using the deduplicator multiple times in the same topology.
*
* @param streamsBuilder Stream builder instance for topology editing
* @param initialStream Stream containing the events that should be deduplicated
* @param windowDuration Window of time on which we should watch out for duplicates
* @param <V> Generic Type of the Stream value. Key type is not implemented because using anything other than a
* String as the key is retarded. You can quote me on this.
* @return KStream with a processingResult
*/
public static <V extends SpecificRecord> KStream<String, ProcessingResult<V, V>> deduplicateKeyValues(
StreamsBuilder streamsBuilder, KStream<String, V> initialStream, Duration windowDuration) {
return deduplicateKeyValues(
streamsBuilder,
initialStream,
DEFAULT_DEDUP_NAME + DEFAULT_WINDOWSTORE,
DEFAULT_DEDUP_NAME + DEFAULT_REPARTITION,
windowDuration);
}
/**
* Deduplicate the input stream on the input key and Value using a window store for the given period of time. The
* input stream should have a String key.
*
* @param streamsBuilder Stream builder instance for topology editing
* @param initialStream Stream containing the events that should be deduplicated
* @param storeName State store name
* @param repartitionName Repartition topic name
* @param windowDuration Window of time to keep in the window store
* @param <V> Generic Type of the Stream value. Key type is not implemented because using anything other than a
* String as the key is retarded. You can quote me on this.
* @return Resulting de-duplicated Stream
*/
public static <V extends SpecificRecord> KStream<String, ProcessingResult<V, V>> deduplicateKeyValues(
StreamsBuilder streamsBuilder,
KStream<String, V> initialStream,
String storeName,
String repartitionName,
Duration windowDuration) {
StoreBuilder<WindowStore<String, V>> dedupWindowStore = Stores.windowStoreBuilder(
Stores.persistentWindowStore(storeName, windowDuration, windowDuration, false),
Serdes.String(),
SerdesUtils.getValueSerdes());
streamsBuilder.addStateStore(dedupWindowStore);
var repartitioned =
initialStream.repartition(Repartitioned.with(Serdes.String(), SerdesUtils.<V>getValueSerdes())
.withName(repartitionName));
return repartitioned.process(() -> new DedupKeyValueProcessor<>(storeName, windowDuration), storeName);
}
/**
* Deduplicate the input stream by applying the deduplicationKeyExtractor function on each record to generate a
* unique signature for the record. Uses a window store for the given period of time. The input stream should have a
* String key. This constructor should not be used if using the deduplicator multiple times in the same topology.
* Use {@link DeduplicationUtils#deduplicateWithPredicate(StreamsBuilder, KStream, String storeName, String
* repartitionName, Duration, Function)} in this scenario.
*
* @param streamsBuilder Stream builder instance for topology editing
* @param initialStream Stream containing the events that should be deduplicated
* @param windowDuration Window of time to keep in the window store
* @param deduplicationKeyExtractor Function that should extract a deduplication key in String format. This key acts
* like a comparison vector. A recommended approach is to concatenate all necessary fields in String format to
* provide a unique identifier for comparison between records.
* @param <V> Generic Type of the Stream value. Key type is not implemented because using anything other than a
* String as the key is retarded. You can quote me on this.
* @return Resulting de-duplicated Stream
*/
public static <V extends SpecificRecord> KStream<String, ProcessingResult<V, V>> deduplicateWithPredicate(
StreamsBuilder streamsBuilder,
KStream<String, V> initialStream,
Duration windowDuration,
Function<V, String> deduplicationKeyExtractor) {
return deduplicateWithPredicate(
streamsBuilder,
initialStream,
DEFAULT_DEDUP_NAME + DEFAULT_WINDOWSTORE,
DEFAULT_DEDUP_NAME + DEFAULT_REPARTITION,
windowDuration,
deduplicationKeyExtractor);
}
/**
* Deduplicate the input stream by applying the deduplicationKeyExtractor function on each record to generate a
* unique signature for the record. Uses a window store for the given period of time. The input stream should have a
* String key.
*
* @param streamsBuilder Stream builder instance for topology editing
* @param initialStream Stream containing the events that should be deduplicated
* @param storeName State store name
* @param repartitionName Repartition topic name
* @param windowDuration Window of time to keep in the window store
* @param deduplicationKeyExtractor Function that should extract a deduplication key in String format. This key acts
* like a comparison vector. A recommended approach is to concatenate all necessary fields in String format to
* provide a unique identifier for comparison between records.
* @param <V> Generic Type of the Stream value. Key type is not implemented because using anything other than a
* String as the key is retarded. You can quote me on this.
* @return Resulting de-duplicated Stream
*/
public static <V extends SpecificRecord> KStream<String, ProcessingResult<V, V>> deduplicateWithPredicate(
StreamsBuilder streamsBuilder,
KStream<String, V> initialStream,
String storeName,
String repartitionName,
Duration windowDuration,
Function<V, String> deduplicationKeyExtractor) {
StoreBuilder<WindowStore<String, V>> dedupWindowStore = Stores.windowStoreBuilder(
Stores.persistentWindowStore(storeName, windowDuration, windowDuration, false),
Serdes.String(),
SerdesUtils.getValueSerdes());
streamsBuilder.addStateStore(dedupWindowStore);
var repartitioned =
initialStream.repartition(Repartitioned.with(Serdes.String(), SerdesUtils.<V>getValueSerdes())
.withName(repartitionName));
return repartitioned.process(
() -> new DedupWithPredicateProcessor<>(storeName, windowDuration, deduplicationKeyExtractor),
storeName);
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DedupWithPredicateProcessor.java | kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DedupWithPredicateProcessor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.deduplication;
import com.michelin.kstreamplify.error.ProcessingResult;
import java.time.Duration;
import java.time.Instant;
import java.util.function.Function;
import org.apache.avro.specific.SpecificRecord;
import org.apache.kafka.streams.processor.api.Processor;
import org.apache.kafka.streams.processor.api.ProcessorContext;
import org.apache.kafka.streams.processor.api.Record;
import org.apache.kafka.streams.state.WindowStore;
/**
* Transformer class for the deduplication mechanism on predicate of a given topic.
*
* @param <K> The type of the key
* @param <V> The type of the value
*/
public class DedupWithPredicateProcessor<K, V extends SpecificRecord>
implements Processor<K, V, K, ProcessingResult<V, V>> {
/** Kstream context for this transformer. */
private ProcessorContext<K, ProcessingResult<V, V>> processorContext;
/** Window store containing all the records seen on the given window. */
private WindowStore<String, V> dedupWindowStore;
/** Window store name, initialized @ construction. */
private final String windowStoreName;
/** Retention window for the state store. Used for fetching data. */
private final Duration retentionWindowDuration;
/** Deduplication key extractor. */
private final Function<V, String> deduplicationKeyExtractor;
/**
* Constructor.
*
* @param windowStoreName Name of the deduplication state store
* @param retentionWindowDuration Retention window duration
* @param deduplicationKeyExtractor Deduplication function
*/
public DedupWithPredicateProcessor(
String windowStoreName, Duration retentionWindowDuration, Function<V, String> deduplicationKeyExtractor) {
this.windowStoreName = windowStoreName;
this.retentionWindowDuration = retentionWindowDuration;
this.deduplicationKeyExtractor = deduplicationKeyExtractor;
}
@Override
public void init(ProcessorContext<K, ProcessingResult<V, V>> context) {
this.processorContext = context;
dedupWindowStore = this.processorContext.getStateStore(windowStoreName);
}
@Override
public void process(Record<K, V> message) {
try {
// Get the record timestamp
var currentInstant = Instant.ofEpochMilli(message.timestamp());
String identifier = deduplicationKeyExtractor.apply(message.value());
// Retrieve all the matching keys in the stateStore and return null if found it (signaling a duplicate)
try (var resultIterator = dedupWindowStore.backwardFetch(
identifier,
currentInstant.minus(retentionWindowDuration),
currentInstant.plus(retentionWindowDuration))) {
while (resultIterator != null && resultIterator.hasNext()) {
var currentKeyValue = resultIterator.next();
if (identifier.equals(deduplicationKeyExtractor.apply(currentKeyValue.value))) {
return;
}
}
}
// First time we see this record, store entry in the window store and forward the record to the output
dedupWindowStore.put(identifier, message.value(), message.timestamp());
processorContext.forward(ProcessingResult.wrapRecordSuccess(message));
} catch (Exception e) {
processorContext.forward(ProcessingResult.wrapRecordFailure(
e,
message,
"Could not figure out what to do with the current payload: "
+ "An unlikely error occurred during deduplication transform"));
}
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DedupKeyProcessor.java | kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DedupKeyProcessor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.deduplication;
import com.michelin.kstreamplify.error.ProcessingResult;
import java.time.Duration;
import java.time.Instant;
import org.apache.avro.specific.SpecificRecord;
import org.apache.kafka.streams.processor.api.Processor;
import org.apache.kafka.streams.processor.api.ProcessorContext;
import org.apache.kafka.streams.processor.api.Record;
import org.apache.kafka.streams.state.WindowStore;
/**
* Transformer class for the deduplication mechanism on keys of a given topic.
*
* @param <V> The type of the value
*/
public class DedupKeyProcessor<V extends SpecificRecord>
implements Processor<String, V, String, ProcessingResult<V, V>> {
/** Context for this transformer. */
private ProcessorContext<String, ProcessingResult<V, V>> processorContext;
/** Window store containing all the records seen on the given window. */
private WindowStore<String, String> dedupWindowStore;
/** Window store name, initialized @ construction. */
private final String windowStoreName;
/** Retention window for the state store. Used for fetching data. */
private final Duration retentionWindowDuration;
/**
* Constructor.
*
* @param windowStoreName The name of the constructor
* @param retentionWindowDuration The retentionWindow Duration
*/
public DedupKeyProcessor(String windowStoreName, Duration retentionWindowDuration) {
this.windowStoreName = windowStoreName;
this.retentionWindowDuration = retentionWindowDuration;
}
@Override
public void init(ProcessorContext<String, ProcessingResult<V, V>> context) {
processorContext = context;
dedupWindowStore = this.processorContext.getStateStore(windowStoreName);
}
@Override
public void process(Record<String, V> message) {
try {
// Get the record timestamp
var currentInstant = Instant.ofEpochMilli(message.timestamp());
// Retrieve all the matching keys in the stateStore and return null if found it (signaling a duplicate)
try (var resultIterator = dedupWindowStore.backwardFetch(
message.key(),
currentInstant.minus(retentionWindowDuration),
currentInstant.plus(retentionWindowDuration))) {
while (resultIterator != null && resultIterator.hasNext()) {
var currentKeyValue = resultIterator.next();
if (message.key().equals(currentKeyValue.value)) {
return;
}
}
}
// First time we see this record, store entry in the window store and forward the record to the output
dedupWindowStore.put(message.key(), message.key(), message.timestamp());
processorContext.forward(ProcessingResult.wrapRecordSuccess(message));
} catch (Exception e) {
processorContext.forward(ProcessingResult.wrapRecordFailure(
e,
message,
"Could not figure out what to do with the current payload: "
+ "An unlikely error occurred during deduplication transform"));
}
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/context/KafkaStreamsExecutionContext.java | kstreamplify-core/src/main/java/com/michelin/kstreamplify/context/KafkaStreamsExecutionContext.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify.context;
import static com.michelin.kstreamplify.property.KstreamplifyConfig.DLQ_PROPERTIES_PREFIX;
import static com.michelin.kstreamplify.property.PropertiesUtils.PROPERTY_SEPARATOR;
import static com.michelin.kstreamplify.serde.TopicWithSerde.SELF;
import static com.michelin.kstreamplify.topic.TopicUtils.PREFIX_PROPERTY_NAME;
import com.michelin.kstreamplify.property.PropertiesUtils;
import java.util.Map;
import java.util.Properties;
import lombok.Getter;
import lombok.Setter;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.apache.kafka.streams.StreamsConfig;
/** The class to represent the context of the KStream. */
@Slf4j
public class KafkaStreamsExecutionContext {
@Getter
@Setter
private static String dlqTopicName;
@Getter
@Setter
private static Map<String, String> serdesConfig;
@Getter
@Setter
private static Properties properties;
@Getter
@Setter
private static Properties dlqProperties;
@Getter
private static String prefix;
private KafkaStreamsExecutionContext() {}
/**
* Register Kafka properties.
*
* @param properties The Kafka properties
*/
public static void registerProperties(Properties properties) {
if (properties == null) {
return;
}
prefix = properties.getProperty(PREFIX_PROPERTY_NAME + PROPERTY_SEPARATOR + SELF, "");
if (StringUtils.isNotBlank(prefix) && properties.containsKey(StreamsConfig.APPLICATION_ID_CONFIG)) {
properties.setProperty(
StreamsConfig.APPLICATION_ID_CONFIG,
prefix.concat(properties.getProperty(StreamsConfig.APPLICATION_ID_CONFIG)));
}
KafkaStreamsExecutionContext.properties = properties;
// Extract all Dead Letter Queue (DLQ) properties from the main properties using the DLQ prefix
dlqProperties =
PropertiesUtils.extractPropertiesByPrefix(properties, DLQ_PROPERTIES_PREFIX + PROPERTY_SEPARATOR);
}
/**
* Checks if a DLQ (Dead Letter Queue) feature flag is enabled based on the given key.
*
* @param key The DLQ feature property key to check.
* @return {@code true} if the feature is enabled; {@code false} otherwise.
*/
public static boolean isDlqFeatureEnabled(String key) {
return PropertiesUtils.isFeatureEnabled(dlqProperties, key, false);
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core-test/src/test/java/com/michelin/kstreamplify/TopologyErrorHandlerTest.java | kstreamplify-core-test/src/test/java/com/michelin/kstreamplify/TopologyErrorHandlerTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify;
import static io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG;
import static org.apache.kafka.streams.StreamsConfig.STATE_DIR_CONFIG;
import static org.junit.jupiter.api.Assertions.assertEquals;
import com.michelin.kstreamplify.avro.KafkaError;
import com.michelin.kstreamplify.context.KafkaStreamsExecutionContext;
import com.michelin.kstreamplify.error.ProcessingResult;
import com.michelin.kstreamplify.error.TopologyErrorHandler;
import com.michelin.kstreamplify.initializer.KafkaStreamsStarter;
import com.michelin.kstreamplify.serde.SerdesUtils;
import com.michelin.kstreamplify.serde.TopicWithSerde;
import java.util.List;
import java.util.Properties;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.TestInputTopic;
import org.apache.kafka.streams.TestOutputTopic;
import org.apache.kafka.streams.kstream.Consumed;
import org.apache.kafka.streams.kstream.KStream;
import org.apache.kafka.streams.kstream.Produced;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
class TopologyErrorHandlerTest extends KafkaStreamsStarterTest {
private static final String AVRO_TOPIC = "avroTopic";
private static final String STRING_TOPIC = "stringTopic";
private static final String OUTPUT_AVRO_TOPIC = "outputAvroTopic";
private static final String OUTPUT_STRING_TOPIC = "outputStringTopic";
private static final String DLQ_TOPIC = "dlqTopic";
private TestInputTopic<String, KafkaError> avroInputTopic;
private TestInputTopic<String, String> stringInputTopic;
private TestOutputTopic<String, KafkaError> avroOutputTopic;
private TestOutputTopic<String, String> stringOutputTopic;
private TestOutputTopic<String, KafkaError> dlqTopic;
@Override
protected KafkaStreamsStarter getKafkaStreamsStarter() {
return new KafkaStreamsStarter() {
@Override
public String dlqTopic() {
return DLQ_TOPIC;
}
@Override
public void topology(StreamsBuilder streamsBuilder) {
KStream<String, ProcessingResult<String, String>> stringStream = streamsBuilder.stream(
STRING_TOPIC, Consumed.with(Serdes.String(), Serdes.String()))
.mapValues(value -> "error".equals(value)
? ProcessingResult.fail(new NullPointerException(), value)
: ProcessingResult.success(value));
TopologyErrorHandler.catchErrors(stringStream)
.to(OUTPUT_STRING_TOPIC, Produced.with(Serdes.String(), Serdes.String()));
KStream<String, ProcessingResult<KafkaError, KafkaError>> avroStream = streamsBuilder.stream(
AVRO_TOPIC, Consumed.with(Serdes.String(), SerdesUtils.<KafkaError>getValueSerdes()))
.mapValues(value -> value == null
? ProcessingResult.fail(new NullPointerException(), null)
: ProcessingResult.success(value));
TopologyErrorHandler.catchErrors(avroStream)
.to(OUTPUT_AVRO_TOPIC, Produced.with(Serdes.String(), SerdesUtils.getValueSerdes()));
}
};
}
@BeforeEach
void setUp() {
stringInputTopic = testDriver.createInputTopic(STRING_TOPIC, new StringSerializer(), new StringSerializer());
avroInputTopic = testDriver.createInputTopic(
AVRO_TOPIC,
new StringSerializer(),
SerdesUtils.<KafkaError>getValueSerdes().serializer());
stringOutputTopic =
testDriver.createOutputTopic(OUTPUT_STRING_TOPIC, new StringDeserializer(), new StringDeserializer());
avroOutputTopic = testDriver.createOutputTopic(
OUTPUT_AVRO_TOPIC,
new StringDeserializer(),
SerdesUtils.<KafkaError>getValueSerdes().deserializer());
dlqTopic = testDriver.createOutputTopic(
DLQ_TOPIC,
new StringDeserializer(),
SerdesUtils.<KafkaError>getValueSerdes().deserializer());
}
@Test
void shouldContinueWhenProcessingValueIsValid() {
stringInputTopic.pipeInput("key", "message");
var resultDlq = dlqTopic.readValuesToList();
var resultOutput = stringOutputTopic.readValuesToList();
assertEquals(0, resultDlq.size());
assertEquals(1, resultOutput.size());
}
@Test
void shouldSendExceptionToDlqWhenProcessingValueIsInvalid() {
stringInputTopic.pipeInput("key", "error");
var resultDlq = dlqTopic.readValuesToList();
assertEquals(1, resultDlq.size());
var record = resultDlq.get(0);
assertEquals("test", record.getApplicationId());
assertEquals("stringTopic", record.getTopic());
assertEquals("error", record.getValue());
var resultOutput = stringOutputTopic.readValuesToList();
assertEquals(0, resultOutput.size());
}
@Test
void shouldContinueWhenProcessingValueIsValidAvro() {
KafkaError avroModel = KafkaError.newBuilder()
.setTopic("topic")
.setStack("stack")
.setPartition(0)
.setOffset(0)
.setCause("cause")
.setValue("value")
.build();
avroInputTopic.pipeInput("key", avroModel);
List<KafkaError> resultDlq = dlqTopic.readValuesToList();
List<KafkaError> resultOutput = avroOutputTopic.readValuesToList();
assertEquals(0, resultDlq.size());
assertEquals(1, resultOutput.size());
}
@Test
void shouldContinueWhenProcessingValueIsInvalidAvro() {
avroInputTopic.pipeInput("key", null);
List<KafkaError> resultDlq = dlqTopic.readValuesToList();
List<KafkaError> resultOutput = avroOutputTopic.readValuesToList();
assertEquals(1, resultDlq.size());
assertEquals(0, resultOutput.size());
}
@Test
@SuppressWarnings("deprecation")
void shouldCreateInputAndOutputTopicsWithDeprecatedSerde() {
TestInputTopic<String, String> inputTopic =
createInputTestTopic(new com.michelin.kstreamplify.utils.TopicWithSerde<>(
"INPUT_TOPIC", "APP_NAME", Serdes.String(), Serdes.String()));
assertEquals(
"TestInputTopic[topic='INPUT_TOPIC', keySerializer=StringSerializer, "
+ "valueSerializer=StringSerializer]",
inputTopic.toString());
TestOutputTopic<String, String> outputTopic =
createOutputTestTopic(new com.michelin.kstreamplify.utils.TopicWithSerde<>(
"OUTPUT_TOPIC", "APP_NAME", Serdes.String(), Serdes.String()));
assertEquals(
"TestOutputTopic[topic='OUTPUT_TOPIC', keyDeserializer=StringDeserializer, "
+ "valueDeserializer=StringDeserializer, size=0]",
outputTopic.toString());
}
@Test
void shouldCreateInputAndOutputTopicsWithSerde() {
TestInputTopic<String, String> inputTopic =
createInputTestTopic(new TopicWithSerde<>("INPUT_TOPIC", "APP_NAME", Serdes.String(), Serdes.String()));
assertEquals(
"TestInputTopic[topic='INPUT_TOPIC', keySerializer=StringSerializer, "
+ "valueSerializer=StringSerializer]",
inputTopic.toString());
TestOutputTopic<String, String> outputTopic = createOutputTestTopic(
new TopicWithSerde<>("OUTPUT_TOPIC", "APP_NAME", Serdes.String(), Serdes.String()));
assertEquals(
"TestOutputTopic[topic='OUTPUT_TOPIC', keyDeserializer=StringDeserializer, "
+ "valueDeserializer=StringDeserializer, size=0]",
outputTopic.toString());
}
/** Test the default storage path. */
@Test
void shouldValidateDefaultStorageDir() {
Properties properties = KafkaStreamsExecutionContext.getProperties();
assertEquals("/tmp/kafka-streams/" + getClass().getSimpleName(), properties.getProperty(STATE_DIR_CONFIG));
}
/** Test the default schema registry url. */
@Test
void shouldValidateDefaultSchemaRegistryUrl() {
Properties properties = KafkaStreamsExecutionContext.getProperties();
assertEquals("mock://" + getClass().getSimpleName(), properties.getProperty(SCHEMA_REGISTRY_URL_CONFIG));
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core-test/src/test/java/com/michelin/kstreamplify/GetSpecificPropertiesTest.java | kstreamplify-core-test/src/test/java/com/michelin/kstreamplify/GetSpecificPropertiesTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify;
import static io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG;
import static org.apache.kafka.streams.StreamsConfig.STATE_DIR_CONFIG;
import static org.junit.jupiter.api.Assertions.assertEquals;
import com.michelin.kstreamplify.context.KafkaStreamsExecutionContext;
import com.michelin.kstreamplify.initializer.KafkaStreamsStarter;
import com.michelin.kstreamplify.serde.TopicWithSerde;
import java.util.Map;
import java.util.Properties;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.StreamsBuilder;
import org.junit.jupiter.api.Test;
class GetSpecificPropertiesTest extends KafkaStreamsStarterTest {
private static final String DLQ_TOPIC = "dlqTopic";
private static final String SPECIFIC_STORAGE_PATH = "/tmp/personal-path";
private static final String SPECIFIC_SCHEMA_REGISTRY_URL = "mock://specific-schema-registry-url";
private static final String INPUT_TOPIC = "INPUT_TOPIC";
private static final String OUTPUT_TOPIC = "OUTPUT_TOPIC";
private static final String SELF_TOPIC = "SELF_TOPIC";
@Override
protected KafkaStreamsStarter getKafkaStreamsStarter() {
return new KafkaStreamsStarter() {
@Override
public String dlqTopic() {
return DLQ_TOPIC;
}
@Override
public void topology(StreamsBuilder streamsBuilder) {
// Do nothing
}
};
}
/**
* Overrides the default properties with specific values for the test.
*
* @return a map containing the overridden properties
*/
@Override
protected Map<String, String> getSpecificProperties() {
return Map.of(
STATE_DIR_CONFIG,
SPECIFIC_STORAGE_PATH,
SCHEMA_REGISTRY_URL_CONFIG,
SPECIFIC_SCHEMA_REGISTRY_URL,
"prefix.abc",
"abc.",
"prefix.def",
"def.");
}
/** Test when default properties are overridden. */
@Test
void shouldValidateOverriddenProperties() {
Properties properties = KafkaStreamsExecutionContext.getProperties();
assertEquals(SPECIFIC_STORAGE_PATH, properties.getProperty(STATE_DIR_CONFIG));
assertEquals(SPECIFIC_SCHEMA_REGISTRY_URL, properties.getProperty(SCHEMA_REGISTRY_URL_CONFIG));
}
/** Test to verify that input and output topics are created with the correct prefixes. */
@Test
void shouldCreateInputAndOutputTopicsWithPrefixes() {
var inputTopicWithSerde = new TopicWithSerde<>(INPUT_TOPIC, "abc", Serdes.String(), Serdes.String());
var outputTopicWithSerde = new TopicWithSerde<>(OUTPUT_TOPIC, "def", Serdes.String(), Serdes.String());
var selfTopicWithSerde = new TopicWithSerde<>(SELF_TOPIC, Serdes.String(), Serdes.String());
var inputTopic = createInputTestTopic(inputTopicWithSerde);
var outputTopic = createOutputTestTopic(outputTopicWithSerde);
var selfTopic = createInputTestTopic(selfTopicWithSerde);
assertEquals(
"TestInputTopic[topic='abc.INPUT_TOPIC', keySerializer=StringSerializer, "
+ "valueSerializer=StringSerializer]",
inputTopic.toString());
assertEquals(
"TestOutputTopic[topic='def.OUTPUT_TOPIC', keyDeserializer=StringDeserializer, "
+ "valueDeserializer=StringDeserializer, size=0]",
outputTopic.toString());
assertEquals(
"TestInputTopic[topic='SELF_TOPIC', keySerializer=StringSerializer, "
+ "valueSerializer=StringSerializer]",
selfTopic.toString());
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
michelin/kstreamplify | https://github.com/michelin/kstreamplify/blob/7b912c3bc711629f1745ae4a60bffeaed649d07a/kstreamplify-core-test/src/main/java/com/michelin/kstreamplify/KafkaStreamsStarterTest.java | kstreamplify-core-test/src/main/java/com/michelin/kstreamplify/KafkaStreamsStarterTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.michelin.kstreamplify;
import static io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG;
import static org.apache.kafka.streams.StreamsConfig.STATE_DIR_CONFIG;
import com.michelin.kstreamplify.avro.KafkaError;
import com.michelin.kstreamplify.context.KafkaStreamsExecutionContext;
import com.michelin.kstreamplify.initializer.KafkaStreamsStarter;
import com.michelin.kstreamplify.serde.SerdesUtils;
import com.michelin.kstreamplify.serde.TopicWithSerde;
import io.confluent.kafka.schemaregistry.testutil.MockSchemaRegistry;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.time.Instant;
import java.util.Collections;
import java.util.Map;
import java.util.Properties;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.TestInputTopic;
import org.apache.kafka.streams.TestOutputTopic;
import org.apache.kafka.streams.TopologyTestDriver;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
/**
* The main test class to extend to execute unit tests on topology. It provides a {@link TopologyTestDriver} and a
* {@link TestOutputTopic} for the DLQ.
*/
public abstract class KafkaStreamsStarterTest {
private static final String STATE_DIR = "/tmp/kafka-streams/";
/** The topology test driver. */
protected TopologyTestDriver testDriver;
/** The dlq topic, initialized in {@link #generalSetUp()}. */
protected TestOutputTopic<String, KafkaError> dlqTopic;
/** Constructor. */
protected KafkaStreamsStarterTest() {}
/** Set up topology test driver. */
@BeforeEach
void generalSetUp() {
Properties properties = getProperties();
KafkaStreamsExecutionContext.registerProperties(properties);
String schemaRegistryUrl = properties.getProperty(SCHEMA_REGISTRY_URL_CONFIG);
KafkaStreamsExecutionContext.setSerdesConfig(
Collections.singletonMap(SCHEMA_REGISTRY_URL_CONFIG, schemaRegistryUrl));
KafkaStreamsStarter starter = getKafkaStreamsStarter();
KafkaStreamsExecutionContext.setDlqTopicName(starter.dlqTopic());
StreamsBuilder streamsBuilder = new StreamsBuilder();
starter.topology(streamsBuilder);
testDriver = new TopologyTestDriver(streamsBuilder.build(), properties, getInitialWallClockTime());
dlqTopic = testDriver.createOutputTopic(
KafkaStreamsExecutionContext.getDlqTopicName(),
new StringDeserializer(),
SerdesUtils.<KafkaError>getValueSerdes().deserializer());
}
/**
* Get the properties for the test.
*
* @return The properties for the test
*/
private Properties getProperties() {
Properties properties = new Properties();
// Default properties
properties.setProperty(StreamsConfig.APPLICATION_ID_CONFIG, "test");
properties.setProperty(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "mock:1234");
properties.setProperty(
StreamsConfig.STATE_DIR_CONFIG, STATE_DIR + getClass().getSimpleName());
properties.setProperty(
SCHEMA_REGISTRY_URL_CONFIG, "mock://" + getClass().getSimpleName());
// Add specific properties or overwrite default properties
Map<String, String> propertiesMap = getSpecificProperties();
if (propertiesMap != null && !propertiesMap.isEmpty()) {
properties.putAll(propertiesMap);
}
return properties;
}
/**
* Method to override to provide the KafkaStreamsStarter to test.
*
* @return The KafkaStreamsStarter to test
*/
protected abstract KafkaStreamsStarter getKafkaStreamsStarter();
/**
* Default base wall clock time for topology test driver.
*
* @return The default wall clock time as instant
*/
protected Instant getInitialWallClockTime() {
return Instant.ofEpochMilli(1577836800000L);
}
/**
* Create/Overwrite properties.
*
* @return new/overwrite properties
*/
protected Map<String, String> getSpecificProperties() {
return Collections.emptyMap();
}
/** Method to close everything properly at the end of the test. */
@AfterEach
void generalTearDown() throws IOException {
testDriver.close();
Files.deleteIfExists(
Path.of(KafkaStreamsExecutionContext.getProperties().getProperty(STATE_DIR_CONFIG)));
MockSchemaRegistry.dropScope("mock://" + getClass().getSimpleName());
}
/**
* Creates an input test topic on the testDriver using the provided topicWithSerde.
*
* @param topicWithSerde The topic with serde used to crete the test topic
* @param <K> The serializable type of the key
* @param <V> The serializable type of the value
* @return The corresponding TestInputTopic
*/
protected <K, V> TestInputTopic<K, V> createInputTestTopic(TopicWithSerde<K, V> topicWithSerde) {
return this.testDriver.createInputTopic(
topicWithSerde.toString(),
topicWithSerde.getKeySerde().serializer(),
topicWithSerde.getValueSerde().serializer());
}
/**
* Creates an input test topic on the testDriver using the provided topicWithSerde.
*
* @param topicWithSerde The topic with serde used to crete the test topic
* @param <K> The serializable type of the key
* @param <V> The serializable type of the value
* @return The corresponding TestInputTopic
* @deprecated Use {@link #createInputTestTopic(TopicWithSerde)}
*/
@Deprecated(since = "1.1.0")
protected <K, V> TestInputTopic<K, V> createInputTestTopic(
com.michelin.kstreamplify.utils.TopicWithSerde<K, V> topicWithSerde) {
return createInputTestTopic(new TopicWithSerde<>(
topicWithSerde.getUnPrefixedName(), topicWithSerde.getKeySerde(), topicWithSerde.getValueSerde()));
}
/**
* Creates an output test topic on the testDriver using the provided topicWithSerde.
*
* @param topicWithSerde The topic with serde used to crete the test topic
* @param <K> The serializable type of the key
* @param <V> The serializable type of the value
* @return The corresponding TestOutputTopic
*/
protected <K, V> TestOutputTopic<K, V> createOutputTestTopic(TopicWithSerde<K, V> topicWithSerde) {
return this.testDriver.createOutputTopic(
topicWithSerde.toString(),
topicWithSerde.getKeySerde().deserializer(),
topicWithSerde.getValueSerde().deserializer());
}
/**
* Creates an output test topic on the testDriver using the provided topicWithSerde.
*
* @param topicWithSerde The topic with serde used to crete the test topic
* @param <K> The serializable type of the key
* @param <V> The serializable type of the value
* @return The corresponding TestOutputTopic
* @deprecated Use {@link #createOutputTestTopic(TopicWithSerde)}
*/
@Deprecated(since = "1.1.0")
protected <K, V> TestOutputTopic<K, V> createOutputTestTopic(
com.michelin.kstreamplify.utils.TopicWithSerde<K, V> topicWithSerde) {
return createOutputTestTopic(new TopicWithSerde<>(
topicWithSerde.getUnPrefixedName(), topicWithSerde.getKeySerde(), topicWithSerde.getValueSerde()));
}
}
| java | Apache-2.0 | 7b912c3bc711629f1745ae4a60bffeaed649d07a | 2026-01-05T02:38:06.284501Z | false |
xiaoymin/LlmInAction | https://github.com/xiaoymin/LlmInAction/blob/11723c550071102640f26e251cd6564bedde1fd4/llm_chat_java_hello/src/test/java/com/github/xiaoymin/llm/LlmChatJavaHelloApplicationTests.java | llm_chat_java_hello/src/test/java/com/github/xiaoymin/llm/LlmChatJavaHelloApplicationTests.java | package com.github.xiaoymin.llm;
import com.github.xiaoymin.llm.domain.llm.EmbeddingResult;
import com.github.xiaoymin.llm.llm.ZhipuAI;
import com.google.gson.Gson;
import lombok.extern.slf4j.Slf4j;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
@Slf4j
@SpringBootTest
class LlmChatJavaHelloApplicationTests {
@Test
void contextLoads() {
}
}
| java | Apache-2.0 | 11723c550071102640f26e251cd6564bedde1fd4 | 2026-01-05T02:38:26.882818Z | false |
xiaoymin/LlmInAction | https://github.com/xiaoymin/LlmInAction/blob/11723c550071102640f26e251cd6564bedde1fd4/llm_chat_java_hello/src/main/java/com/github/xiaoymin/llm/LlmChatJavaHelloApplication.java | llm_chat_java_hello/src/main/java/com/github/xiaoymin/llm/LlmChatJavaHelloApplication.java | package com.github.xiaoymin.llm;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@SpringBootApplication
public class LlmChatJavaHelloApplication {
public static void main(String[] args) {
SpringApplication.run(LlmChatJavaHelloApplication.class, args);
}
}
| java | Apache-2.0 | 11723c550071102640f26e251cd6564bedde1fd4 | 2026-01-05T02:38:26.882818Z | false |
xiaoymin/LlmInAction | https://github.com/xiaoymin/LlmInAction/blob/11723c550071102640f26e251cd6564bedde1fd4/llm_chat_java_hello/src/main/java/com/github/xiaoymin/llm/command/AddTxtCommand.java | llm_chat_java_hello/src/main/java/com/github/xiaoymin/llm/command/AddTxtCommand.java | package com.github.xiaoymin.llm.command;
import com.github.xiaoymin.llm.compoents.TxtChunk;
import com.github.xiaoymin.llm.compoents.VectorStorage;
import com.github.xiaoymin.llm.domain.llm.ChunkResult;
import com.github.xiaoymin.llm.domain.llm.EmbeddingResult;
import com.github.xiaoymin.llm.llm.ZhipuAI;
import lombok.AllArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.shell.standard.ShellComponent;
import org.springframework.shell.standard.ShellMethod;
import java.util.List;
/**
* @author <a href="xiaoymin@foxmail.com">xiaoymin@foxmail.com</a>
* 2023/10/06 12:50
* @since llm_chat_java_hello
*/
@Slf4j
@AllArgsConstructor
@ShellComponent
public class AddTxtCommand {
final TxtChunk txtChunk;
final VectorStorage vectorStorage;
final ZhipuAI zhipuAI;
@ShellMethod(value = "add local txt data")
public String add(String doc){
log.info("start add doc.");
// 加载
List<ChunkResult> chunkResults= txtChunk.chunk(doc);
// embedding
List<EmbeddingResult> embeddingResults=zhipuAI.embedding(chunkResults);
// store vector
String collection= vectorStorage.getCollectionName();
vectorStorage.store(collection,embeddingResults);
log.info("finished");
return "finished docId:{}"+doc;
}
}
| java | Apache-2.0 | 11723c550071102640f26e251cd6564bedde1fd4 | 2026-01-05T02:38:26.882818Z | false |
xiaoymin/LlmInAction | https://github.com/xiaoymin/LlmInAction/blob/11723c550071102640f26e251cd6564bedde1fd4/llm_chat_java_hello/src/main/java/com/github/xiaoymin/llm/command/ChatCommand.java | llm_chat_java_hello/src/main/java/com/github/xiaoymin/llm/command/ChatCommand.java | package com.github.xiaoymin.llm.command;
import cn.hutool.core.util.StrUtil;
import com.github.xiaoymin.llm.compoents.VectorStorage;
import com.github.xiaoymin.llm.llm.ZhipuAI;
import com.github.xiaoymin.llm.utils.LLMUtils;
import lombok.AllArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.shell.standard.ShellComponent;
import org.springframework.shell.standard.ShellMethod;
/**
* @author <a href="xiaoymin@foxmail.com">xiaoymin@foxmail.com</a>
* 2023/10/06 13:04
* @since llm_chat_java_hello
*/
@AllArgsConstructor
@Slf4j
@ShellComponent
public class ChatCommand {
final VectorStorage vectorStorage;
final ZhipuAI zhipuAI;
@ShellMethod(value = "chat with files")
public String chat(String question){
if (StrUtil.isBlank(question)){
return "You must send a question";
}
//句子转向量
double[] vector=zhipuAI.sentence(question);
// 向量召回
String collection= vectorStorage.getCollectionName();
String vectorData=vectorStorage.retrieval(collection,vector);
if (StrUtil.isBlank(vectorData)){
return "No Answer!";
}
// 构建Prompt
String prompt= LLMUtils.buildPrompt(question,vectorData);
zhipuAI.chat(prompt);
// 大模型对话
//return "you question:{}"+question+"finished.";
return StrUtil.EMPTY;
}
}
| java | Apache-2.0 | 11723c550071102640f26e251cd6564bedde1fd4 | 2026-01-05T02:38:26.882818Z | false |
xiaoymin/LlmInAction | https://github.com/xiaoymin/LlmInAction/blob/11723c550071102640f26e251cd6564bedde1fd4/llm_chat_java_hello/src/main/java/com/github/xiaoymin/llm/domain/store/ElasticVectorData.java | llm_chat_java_hello/src/main/java/com/github/xiaoymin/llm/domain/store/ElasticVectorData.java | package com.github.xiaoymin.llm.domain.store;
import lombok.Data;
/**
* @author <a href="xiaoymin@foxmail.com">xiaoymin@foxmail.com</a>
* 2023/10/06 14:40
* @since llm_chat_java_hello
*/
@Data
public class ElasticVectorData {
private String chunkId;
private String content;
private String docId;
private double[] vector;
}
| java | Apache-2.0 | 11723c550071102640f26e251cd6564bedde1fd4 | 2026-01-05T02:38:26.882818Z | false |
xiaoymin/LlmInAction | https://github.com/xiaoymin/LlmInAction/blob/11723c550071102640f26e251cd6564bedde1fd4/llm_chat_java_hello/src/main/java/com/github/xiaoymin/llm/domain/llm/EmbeddingResult.java | llm_chat_java_hello/src/main/java/com/github/xiaoymin/llm/domain/llm/EmbeddingResult.java | package com.github.xiaoymin.llm.domain.llm;
import lombok.Getter;
import lombok.Setter;
/**
* @author <a href="xiaoymin@foxmail.com">xiaoymin@foxmail.com</a>
* 2023/10/06 13:36
* @since llm_chat_java_hello
*/
@Getter
@Setter
public class EmbeddingResult {
/**
* 原始文本内容
*/
private String prompt;
/**
* embedding的处理结果,返回向量化表征的数组,数组长度为1024
*/
private double[] embedding;
/**
* 用户在客户端请求时提交的任务编号或者平台生成的任务编号
*/
private String requestId;
/**
* 智谱AI开放平台生成的任务订单号,调用请求结果接口时请使用此订单号
*/
private String taskId;
/**
* 处理状态,PROCESSING(处理中),SUCCESS(成功),FAIL(失败)
* 注:处理中状态需通过查询获取结果
*/
private String taskStatus;
}
| java | Apache-2.0 | 11723c550071102640f26e251cd6564bedde1fd4 | 2026-01-05T02:38:26.882818Z | false |
xiaoymin/LlmInAction | https://github.com/xiaoymin/LlmInAction/blob/11723c550071102640f26e251cd6564bedde1fd4/llm_chat_java_hello/src/main/java/com/github/xiaoymin/llm/domain/llm/ChunkResult.java | llm_chat_java_hello/src/main/java/com/github/xiaoymin/llm/domain/llm/ChunkResult.java | package com.github.xiaoymin.llm.domain.llm;
import lombok.Data;
/**
* @author <a href="xiaoymin@foxmail.com">xiaoymin@foxmail.com</a>
* 2023/10/06 14:33
* @since llm_chat_java_hello
*/
@Data
public class ChunkResult {
private String docId;
private int chunkId;
private String content;
}
| java | Apache-2.0 | 11723c550071102640f26e251cd6564bedde1fd4 | 2026-01-05T02:38:26.882818Z | false |
xiaoymin/LlmInAction | https://github.com/xiaoymin/LlmInAction/blob/11723c550071102640f26e251cd6564bedde1fd4/llm_chat_java_hello/src/main/java/com/github/xiaoymin/llm/domain/llm/ZhipuResult.java | llm_chat_java_hello/src/main/java/com/github/xiaoymin/llm/domain/llm/ZhipuResult.java | package com.github.xiaoymin.llm.domain.llm;
import lombok.Data;
/**
* @author <a href="xiaoymin@foxmail.com">xiaoymin@foxmail.com</a>
* 2023/10/06 14:11
* @since llm_chat_java_hello
*/
@Data
public class ZhipuResult {
private int code;
private String msg;
private boolean success;
private EmbeddingResult data;
}
| java | Apache-2.0 | 11723c550071102640f26e251cd6564bedde1fd4 | 2026-01-05T02:38:26.882818Z | false |
xiaoymin/LlmInAction | https://github.com/xiaoymin/LlmInAction/blob/11723c550071102640f26e251cd6564bedde1fd4/llm_chat_java_hello/src/main/java/com/github/xiaoymin/llm/domain/llm/ZhipuChatCompletion.java | llm_chat_java_hello/src/main/java/com/github/xiaoymin/llm/domain/llm/ZhipuChatCompletion.java | /*
* THIS FILE IS PART OF Zhejiang LiShi Technology CO.,LTD.
* Copyright (c) 2019-2023 Zhejiang LiShi Technology CO.,LTD.
* It is forbidden to distribute or copy the code under this software without the consent of the Zhejiang LiShi Technology
*
* https://www.lishiots.com/
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.xiaoymin.llm.domain.llm;
import lombok.Data;
import java.util.LinkedList;
import java.util.List;
/**
* @Classname ZpChatglmReq
* @Description TODO
* @Date 2023/7/13 15:21
* @Author jerrylin
*/
@Data
public class ZhipuChatCompletion {
private List<Prompt> prompt=new LinkedList<>();
private float temperature;
private float top_p;
private String request_id;
/**
* SSE接口调用时,用于控制每次返回内容方式是增量还是全量,不提供此参数时默认为增量返回
* - true 为增量返回
* - false 为全量返回
*/
private boolean incremental = true;
public void addPrompt(String content){
this.prompt.add(Prompt.buildOne(content));
}
@Data
public static class Prompt {
private String role = "user";
private String content;
public static Prompt buildOne(String content){
Prompt prompt1=new Prompt();
prompt1.setContent(content);
return prompt1;
}
}
}
| java | Apache-2.0 | 11723c550071102640f26e251cd6564bedde1fd4 | 2026-01-05T02:38:26.882818Z | false |
xiaoymin/LlmInAction | https://github.com/xiaoymin/LlmInAction/blob/11723c550071102640f26e251cd6564bedde1fd4/llm_chat_java_hello/src/main/java/com/github/xiaoymin/llm/utils/LLMUtils.java | llm_chat_java_hello/src/main/java/com/github/xiaoymin/llm/utils/LLMUtils.java | package com.github.xiaoymin.llm.utils;
import cn.hutool.jwt.JWTUtil;
import java.nio.charset.StandardCharsets;
import java.util.HashMap;
import java.util.Map;
/**
* @author <a href="xiaoymin@foxmail.com">xiaoymin@foxmail.com</a>
* 2023/10/06 13:42
* @since llm_chat_java_hello
*/
public class LLMUtils {
public static String buildPrompt(String question,String context){
return "请利用如下上下文的信息回答问题:" + "\n" +
question + "\n" +
"上下文信息如下:" + "\n" +
context + "\n" +
"如果上下文信息中没有帮助,则不允许胡乱回答!";
}
public static String gen(String apiKey, int expSeconds) {
String[] parts = apiKey.split("\\.");
if (parts.length != 2) {
throw new RuntimeException("智谱invalid key");
}
String id = parts[0];
String secret = parts[1];
// 4143f0fa36f0aaf39a63be11a3623c63.eMhlXYJLdUGQO0xH
Map<String, Object> payload = new HashMap<>();
long currentTimeMillis = System.currentTimeMillis();
long expirationTimeMillis = currentTimeMillis + (60 * 1000);
payload.put("api_key", id);
payload.put("exp", expirationTimeMillis);
payload.put("timestamp", currentTimeMillis);
Map<String, Object> headerMap = new HashMap<>();
headerMap.put("alg", "HS256");
headerMap.put("sign_type", "SIGN");
return JWTUtil.createToken(headerMap,payload,secret.getBytes(StandardCharsets.UTF_8));
}
}
| java | Apache-2.0 | 11723c550071102640f26e251cd6564bedde1fd4 | 2026-01-05T02:38:26.882818Z | false |
xiaoymin/LlmInAction | https://github.com/xiaoymin/LlmInAction/blob/11723c550071102640f26e251cd6564bedde1fd4/llm_chat_java_hello/src/main/java/com/github/xiaoymin/llm/llm/ZhipuAI.java | llm_chat_java_hello/src/main/java/com/github/xiaoymin/llm/llm/ZhipuAI.java | package com.github.xiaoymin.llm.llm;
import cn.hutool.core.collection.CollectionUtil;
import cn.hutool.core.util.RandomUtil;
import cn.hutool.core.util.StrUtil;
import cn.hutool.http.ContentType;
import cn.hutool.http.Header;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.github.xiaoymin.llm.config.LLmProperties;
import com.github.xiaoymin.llm.domain.llm.ChunkResult;
import com.github.xiaoymin.llm.domain.llm.EmbeddingResult;
import com.github.xiaoymin.llm.domain.llm.ZhipuChatCompletion;
import com.github.xiaoymin.llm.domain.llm.ZhipuResult;
import com.github.xiaoymin.llm.listener.ConsoleEventSourceListener;
import com.github.xiaoymin.llm.utils.LLMUtils;
import com.google.gson.Gson;
import lombok.AllArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import okhttp3.*;
import okhttp3.sse.EventSource;
import okhttp3.sse.EventSources;
import org.jetbrains.annotations.NotNull;
import org.springframework.stereotype.Component;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
/**
* @author <a href="xiaoymin@foxmail.com">xiaoymin@foxmail.com</a>
* 2023/10/06 13:35
* @since llm_chat_java_hello
*/
@Component
@AllArgsConstructor
@Slf4j
public class ZhipuAI {
final LLmProperties lLmProperties;
final Gson GSON=new Gson();
public String getApiKey(){
String apiKey= lLmProperties.getZpKey();
if (StrUtil.isBlank(apiKey)){
apiKey=System.getenv("CHAT2CMD_KEY_ZP");
}
return apiKey;
}
public void chat(String prompt){
try {
OkHttpClient.Builder builder = new OkHttpClient.Builder()
.connectTimeout(20000, TimeUnit.MILLISECONDS)
.readTimeout(20000, TimeUnit.MILLISECONDS)
.writeTimeout(20000, TimeUnit.MILLISECONDS)
.addInterceptor(new ZhipuHeaderInterceptor(this.getApiKey()));
OkHttpClient okHttpClient = builder.build();
ZhipuChatCompletion zhipuChatCompletion=new ZhipuChatCompletion();
zhipuChatCompletion.addPrompt(prompt);
// 采样温度,控制输出的随机性,必须为正数
// 值越大,会使输出更随机,更具创造性;值越小,输出会更加稳定或确定
zhipuChatCompletion.setTemperature(0.7f);
zhipuChatCompletion.setTop_p(0.7f);
EventSource.Factory factory = EventSources.createFactory(okHttpClient);
ObjectMapper mapper = new ObjectMapper();
String requestBody = mapper.writeValueAsString(zhipuChatCompletion);
Request request = new Request.Builder()
.url("https://open.bigmodel.cn/api/paas/v3/model-api/chatglm_std/sse-invoke")
.post(RequestBody.create(MediaType.parse(ContentType.JSON.getValue()), requestBody))
.build();
CountDownLatch countDownLatch=new CountDownLatch(1);
// 创建事件,控制台输出
EventSource eventSource = factory.newEventSource(request, new ConsoleEventSourceListener(countDownLatch));
countDownLatch.await();
} catch (Exception e) {
log.error("llm-chat异常:{}", e.getMessage());
}
}
/**
* 获取句子的向量
* @param sentence 句子
* @return 向量
*/
public double[] sentence(String sentence){
ChunkResult chunkResult=new ChunkResult();
chunkResult.setContent(sentence);
chunkResult.setChunkId(RandomUtil.randomInt());
EmbeddingResult embeddingResult=this.embedding(chunkResult);
return embeddingResult.getEmbedding();
}
/**
* 批量
* @param chunkResults 批量文本
* @return 向量
*/
public List<EmbeddingResult> embedding(List<ChunkResult> chunkResults){
log.info("start embedding,size:{}",CollectionUtil.size(chunkResults));
if (CollectionUtil.isEmpty(chunkResults)){
return new ArrayList<>();
}
List<EmbeddingResult> embeddingResults=new ArrayList<>();
for (ChunkResult chunkResult:chunkResults){
embeddingResults.add(this.embedding(chunkResult));
}
return embeddingResults;
}
public EmbeddingResult embedding(ChunkResult chunkResult){
String apiKey= this.getApiKey();
//log.info("zp-key:{}",apiKey);
OkHttpClient.Builder builder = new OkHttpClient.Builder()
.connectTimeout(20000, TimeUnit.MILLISECONDS)
.readTimeout(20000, TimeUnit.MILLISECONDS)
.writeTimeout(20000, TimeUnit.MILLISECONDS)
.addInterceptor(new ZhipuHeaderInterceptor(apiKey));
OkHttpClient okHttpClient = builder.build();
EmbeddingResult embedRequest=new EmbeddingResult();
embedRequest.setPrompt(chunkResult.getContent());
embedRequest.setRequestId(Objects.toString(chunkResult.getChunkId()));
// 智谱embedding
Request request = new Request.Builder()
.url("https://open.bigmodel.cn/api/paas/v3/model-api/text_embedding/invoke")
.post(RequestBody.create(MediaType.parse(ContentType.JSON.getValue()), GSON.toJson(embedRequest)))
.build();
try {
Response response= okHttpClient.newCall(request).execute();
String result=response.body().string();
ZhipuResult zhipuResult= GSON.fromJson(result, ZhipuResult.class);
EmbeddingResult ret= zhipuResult.getData();
ret.setPrompt(embedRequest.getPrompt());
ret.setRequestId(embedRequest.getRequestId());
return ret;
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@AllArgsConstructor
private static class ZhipuHeaderInterceptor implements Interceptor {
final String apiKey;
@NotNull
@Override
public Response intercept(@NotNull Chain chain) throws IOException {
Request original = chain.request();
String authorization=LLMUtils.gen(apiKey,60);
//log.info("authorization:{}",authorization);
Request request = original.newBuilder()
.header(Header.AUTHORIZATION.getValue(), authorization)
.header(Header.CONTENT_TYPE.getValue(), ContentType.JSON.getValue())
.method(original.method(), original.body())
.build();
return chain.proceed(request);
}
}
}
| java | Apache-2.0 | 11723c550071102640f26e251cd6564bedde1fd4 | 2026-01-05T02:38:26.882818Z | false |
xiaoymin/LlmInAction | https://github.com/xiaoymin/LlmInAction/blob/11723c550071102640f26e251cd6564bedde1fd4/llm_chat_java_hello/src/main/java/com/github/xiaoymin/llm/compoents/TxtChunk.java | llm_chat_java_hello/src/main/java/com/github/xiaoymin/llm/compoents/TxtChunk.java | package com.github.xiaoymin.llm.compoents;
import cn.hutool.core.io.IoUtil;
import cn.hutool.core.util.ArrayUtil;
import cn.hutool.core.util.StrUtil;
import com.github.xiaoymin.llm.domain.llm.ChunkResult;
import lombok.AllArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.core.io.ClassPathResource;
import org.springframework.stereotype.Component;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.atomic.AtomicInteger;
/**
* @author <a href="xiaoymin@foxmail.com">xiaoymin@foxmail.com</a>
* 2023/10/06 12:48
* @since llm_chat_java_hello
*/
@Slf4j
@Component
@AllArgsConstructor
public class TxtChunk {
public List<ChunkResult> chunk(String docId){
String path="data/"+docId+".txt";
log.info("start chunk---> docId:{},path:{}",docId,path);
ClassPathResource classPathResource=new ClassPathResource(path);
try {
String txt=IoUtil.read(classPathResource.getInputStream(), StandardCharsets.UTF_8);
//按固定字数分割,256
String[] lines=StrUtil.split(txt,256);
log.info("chunk size:{}", ArrayUtil.length(lines));
List<ChunkResult> results=new ArrayList<>();
AtomicInteger atomicInteger=new AtomicInteger(0);
for (String line:lines){
ChunkResult chunkResult=new ChunkResult();
chunkResult.setDocId(docId);
chunkResult.setContent(line);
chunkResult.setChunkId(atomicInteger.incrementAndGet());
results.add(chunkResult);
}
return results;
} catch (IOException e) {
log.error(e.getMessage());
}
return new ArrayList<>();
}
}
| java | Apache-2.0 | 11723c550071102640f26e251cd6564bedde1fd4 | 2026-01-05T02:38:26.882818Z | false |
xiaoymin/LlmInAction | https://github.com/xiaoymin/LlmInAction/blob/11723c550071102640f26e251cd6564bedde1fd4/llm_chat_java_hello/src/main/java/com/github/xiaoymin/llm/compoents/LoadStartup.java | llm_chat_java_hello/src/main/java/com/github/xiaoymin/llm/compoents/LoadStartup.java | package com.github.xiaoymin.llm.compoents;
import cn.hutool.core.date.DateUtil;
import lombok.AllArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.stereotype.Component;
import java.time.Instant;
import java.util.Date;
/**
* @author <a href="xiaoymin@foxmail.com">xiaoymin@foxmail.com</a>
* 2023/10/06 13:26
* @since llm_chat_java_hello
*/
@AllArgsConstructor
@Component
@Slf4j
public class LoadStartup implements InitializingBean {
final VectorStorage vectorStorage;
public void startup(){
log.info("init vector collection");
String collectionName= vectorStorage.getCollectionName();
log.info("init collection:{}",collectionName);
//向量维度固定1024,根据选择的向量Embedding模型的维度确定最终维度
// 这里因为选择智谱的Embedding模型,维度是1024,所以固定为该值
vectorStorage.initCollection(collectionName,1024);
log.info("init collection success.");
}
@Override
public void afterPropertiesSet() throws Exception {
log.info("start load.....");
this.startup();
}
}
| java | Apache-2.0 | 11723c550071102640f26e251cd6564bedde1fd4 | 2026-01-05T02:38:26.882818Z | false |
xiaoymin/LlmInAction | https://github.com/xiaoymin/LlmInAction/blob/11723c550071102640f26e251cd6564bedde1fd4/llm_chat_java_hello/src/main/java/com/github/xiaoymin/llm/compoents/VectorStorage.java | llm_chat_java_hello/src/main/java/com/github/xiaoymin/llm/compoents/VectorStorage.java | package com.github.xiaoymin.llm.compoents;
import cn.hutool.core.collection.CollectionUtil;
import cn.hutool.core.date.DateUtil;
import cn.hutool.core.map.MapUtil;
import com.github.xiaoymin.llm.domain.llm.EmbeddingResult;
import com.github.xiaoymin.llm.domain.store.ElasticVectorData;
import lombok.AllArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.query.functionscore.ScriptScoreQueryBuilder;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptType;
import org.springframework.data.domain.Pageable;
import org.springframework.data.elasticsearch.core.*;
import org.springframework.data.elasticsearch.core.document.Document;
import org.springframework.data.elasticsearch.core.mapping.IndexCoordinates;
import org.springframework.data.elasticsearch.core.query.IndexQuery;
import org.springframework.data.elasticsearch.core.query.IndexQueryBuilder;
import org.springframework.data.elasticsearch.core.query.NativeSearchQuery;
import org.springframework.data.elasticsearch.core.query.NativeSearchQueryBuilder;
import org.springframework.stereotype.Component;
import java.time.Instant;
import java.util.*;
/**
* @author <a href="xiaoymin@foxmail.com">xiaoymin@foxmail.com</a>
* 2023/10/06 12:39
* @since llm_chat_java_hello
*/
@Slf4j
@Component
@AllArgsConstructor
public class VectorStorage {
final ElasticsearchRestTemplate elasticsearchRestTemplate;
public String getCollectionName(){
//演示效果使用,固定前缀+日期
return "llm_action_rag_"+ DateUtil.format(Date.from(Instant.now()),"yyyyMMdd");
}
/**
* 初始化向量数据库index
* @param collectionName 名称
* @param dim 维度
*/
public boolean initCollection(String collectionName,int dim){
log.info("collection:{}", collectionName);
// 查看向量索引是否存在,此方法为固定默认索引字段
IndexOperations indexOperations = elasticsearchRestTemplate.indexOps(IndexCoordinates.of(collectionName));
if (!indexOperations.exists()) {
// 索引不存在,直接创建
log.info("index not exists,create");
//创建es的结构,简化处理
Document document = Document.from(this.elasticMapping(dim));
// 创建
indexOperations.create(new HashMap<>(), document);
return true;
}
return true;
}
public void store(String collectionName,List<EmbeddingResult> embeddingResults){
//保存向量
log.info("save vector,collection:{},size:{}",collectionName, CollectionUtil.size(embeddingResults));
List<IndexQuery> results = new ArrayList<>();
for (EmbeddingResult embeddingResult : embeddingResults) {
ElasticVectorData ele = new ElasticVectorData();
ele.setVector(embeddingResult.getEmbedding());
ele.setChunkId(embeddingResult.getRequestId());
ele.setContent(embeddingResult.getPrompt());
results.add(new IndexQueryBuilder().withObject(ele).build());
}
// 构建数据包
List<IndexedObjectInformation> bulkedResult = elasticsearchRestTemplate.bulkIndex(results, IndexCoordinates.of(collectionName));
int size = CollectionUtil.size(bulkedResult);
log.info("保存向量成功-size:{}", size);
}
public String retrieval(String collectionName,double[] vector){
// Build the script,查询向量
Map<String, Object> params = new HashMap<>();
params.put("query_vector", vector);
// 计算cos值+1,避免出现负数的情况,得到结果后,实际score值在减1再计算
Script script = new Script(ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG, "cosineSimilarity(params.query_vector, 'vector')+1", params);
ScriptScoreQueryBuilder scriptScoreQueryBuilder = new ScriptScoreQueryBuilder(QueryBuilders.boolQuery(), script);
// 构建请求
NativeSearchQuery nativeSearchQuery = new NativeSearchQueryBuilder()
.withQuery(scriptScoreQueryBuilder)
.withPageable(Pageable.ofSize(3)).build();
SearchHits<ElasticVectorData> dataSearchHits = this.elasticsearchRestTemplate.search(nativeSearchQuery, ElasticVectorData.class, IndexCoordinates.of(collectionName));
//log.info("检索成功,size:{}", dataSearchHits.getTotalHits());
List<SearchHit<ElasticVectorData>> data = dataSearchHits.getSearchHits();
List<String> results = new LinkedList<>();
for (SearchHit<ElasticVectorData> ele : data) {
results.add(ele.getContent().getContent());
}
return CollectionUtil.join(results,"");
}
private Map<String, Object> elasticMapping(int dims) {
Map<String, Object> properties = new HashMap<>();
properties.put("_class", MapUtil.builder("type", "keyword").put("doc_values", "false").put("index", "false").build());
properties.put("chunkId", MapUtil.builder("type", "keyword").build());
properties.put("content", MapUtil.builder("type", "keyword").build());
properties.put("docId", MapUtil.builder("type", "keyword").build());
// 向量
properties.put("vector", MapUtil.builder("type", "dense_vector").put("dims", Objects.toString(dims)).build());
Map<String, Object> root = new HashMap<>();
root.put("properties", properties);
return root;
}
}
| java | Apache-2.0 | 11723c550071102640f26e251cd6564bedde1fd4 | 2026-01-05T02:38:26.882818Z | false |
xiaoymin/LlmInAction | https://github.com/xiaoymin/LlmInAction/blob/11723c550071102640f26e251cd6564bedde1fd4/llm_chat_java_hello/src/main/java/com/github/xiaoymin/llm/config/LLmProperties.java | llm_chat_java_hello/src/main/java/com/github/xiaoymin/llm/config/LLmProperties.java | package com.github.xiaoymin.llm.config;
import lombok.Data;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.stereotype.Component;
/**
* @author <a href="xiaoymin@foxmail.com">xiaoymin@foxmail.com</a>
* 2023/10/06 13:38
* @since llm_chat_java_hello
*/
@Data
@Component
@ConfigurationProperties(prefix = "llm")
public class LLmProperties {
/**
* 智谱AI的开发密钥,https://open.bigmodel.cn/dev/api#text_embedding
* 注册智谱AI开放平台获取
*/
private String zpKey;
}
| java | Apache-2.0 | 11723c550071102640f26e251cd6564bedde1fd4 | 2026-01-05T02:38:26.882818Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.