Skip to content

Commit 7ced50c

Browse files
authored
chore(kafka): Implement kafka binding with support for Domain Events (#116)
chore(kafka): Implement kafka binding with support for Domain Events --------- Co-authored-by: AndresFelipe11 Co-authored-by: jespinosas
1 parent 0233d1a commit 7ced50c

File tree

41 files changed

+1934
-11
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

41 files changed

+1934
-11
lines changed

.gitignore

+2-1
Original file line numberDiff line numberDiff line change
@@ -644,4 +644,5 @@ MigrationBackup/
644644
# End of https://www.toptal.com/developers/gitignore/api/macos,linux,windows,gradle,java,intellij,visualstudio,eclipse
645645
contiperf-report
646646

647-
samples/async/local-example/
647+
samples/async/local-example/
648+
.kafka-env

async/async-commons-api/src/test/java/org/reactivecommons/async/api/HandlerRegistryTest.java

+9-4
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,12 @@
55
import org.junit.jupiter.api.Test;
66
import org.reactivecommons.api.domain.Command;
77
import org.reactivecommons.api.domain.DomainEvent;
8-
import org.reactivecommons.async.api.handlers.*;
8+
import org.reactivecommons.async.api.handlers.CloudCommandHandler;
9+
import org.reactivecommons.async.api.handlers.CloudEventHandler;
10+
import org.reactivecommons.async.api.handlers.DomainCommandHandler;
11+
import org.reactivecommons.async.api.handlers.DomainEventHandler;
12+
import org.reactivecommons.async.api.handlers.QueryHandler;
13+
import org.reactivecommons.async.api.handlers.QueryHandlerDelegate;
914
import org.reactivecommons.async.api.handlers.registered.RegisteredCommandHandler;
1015
import org.reactivecommons.async.api.handlers.registered.RegisteredEventListener;
1116
import org.reactivecommons.async.api.handlers.registered.RegisteredQueryHandler;
@@ -152,7 +157,7 @@ void handleDomainCommand() {
152157

153158
@Test
154159
void handleCloudEventCommand() {
155-
SomeCloudEventCommandHandler cloudCommandHandler = new SomeCloudEventCommandHandler();
160+
SomeCloudCommandHandler cloudCommandHandler = new SomeCloudCommandHandler();
156161

157162
registry.handleCloudEventCommand(name, cloudCommandHandler);
158163

@@ -197,7 +202,7 @@ void serveQueryWithLambda() {
197202
@Test
198203
void serveQueryWithTypeInference() {
199204
QueryHandler<SomeDataClass, SomeDataClass> handler = new SomeQueryHandler();
200-
registry.serveQuery(name, handler,SomeDataClass.class);
205+
registry.serveQuery(name, handler, SomeDataClass.class);
201206
assertThat(registry.getHandlers()).anySatisfy(registered -> {
202207
assertThat(registered).extracting(RegisteredQueryHandler::getPath, RegisteredQueryHandler::getQueryClass)
203208
.containsExactly(name, SomeDataClass.class);
@@ -252,7 +257,7 @@ public Mono<Void> handle(Command<SomeDataClass> message) {
252257
}
253258
}
254259

255-
private static class SomeCloudEventCommandHandler implements CloudCommandHandler {
260+
private static class SomeCloudCommandHandler implements CloudCommandHandler {
256261
@Override
257262
public Mono<Void> handle(CloudEvent message) {
258263
return null;

async/async-kafka/async-kafka.gradle

+12
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
ext {
2+
artifactId = 'async-kafka'
3+
artifactDescription = 'Async Kafka'
4+
}
5+
6+
dependencies {
7+
api project(':async-commons-api')
8+
api project(':domain-events-api')
9+
api project(':async-commons')
10+
api 'io.projectreactor.kafka:reactor-kafka:1.3.23'
11+
api 'io.cloudevents:cloudevents-json-jackson:4.0.1'
12+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
package org.reactivecommons.async.kafka;
2+
3+
import io.cloudevents.CloudEvent;
4+
import lombok.AllArgsConstructor;
5+
import org.reactivecommons.api.domain.DomainEvent;
6+
import org.reactivecommons.api.domain.DomainEventBus;
7+
import org.reactivecommons.async.kafka.communications.ReactiveMessageSender;
8+
import org.reactivestreams.Publisher;
9+
10+
@AllArgsConstructor
11+
public class KafkaDomainEventBus implements DomainEventBus {
12+
private final ReactiveMessageSender sender;
13+
14+
@Override
15+
public <T> Publisher<Void> emit(DomainEvent<T> event) {
16+
return sender.send(event);
17+
}
18+
19+
@Override
20+
public Publisher<Void> emit(CloudEvent event) {
21+
return sender.send(event);
22+
}
23+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,52 @@
1+
package org.reactivecommons.async.kafka;
2+
3+
import lombok.Data;
4+
import org.apache.kafka.common.header.Headers;
5+
import org.reactivecommons.async.commons.communications.Message;
6+
import reactor.kafka.receiver.ReceiverRecord;
7+
8+
import java.util.HashMap;
9+
import java.util.Map;
10+
11+
import static org.reactivecommons.async.kafka.converters.json.KafkaJacksonMessageConverter.CONTENT_TYPE;
12+
13+
14+
@Data
15+
public class KafkaMessage implements Message {
16+
private final byte[] body;
17+
private final Properties properties;
18+
19+
@Data
20+
public static class KafkaMessageProperties implements Properties {
21+
private long contentLength;
22+
private String key;
23+
private String topic;
24+
private Map<String, Object> headers = new HashMap<>();
25+
26+
@Override
27+
public String getContentType() {
28+
return (String) headers.get(CONTENT_TYPE);
29+
}
30+
}
31+
32+
public static KafkaMessage fromDelivery(ReceiverRecord<String, byte[]> record) {
33+
return new KafkaMessage(record.value(), createMessageProps(record));
34+
}
35+
36+
private static Properties createMessageProps(ReceiverRecord<String, byte[]> record) {
37+
Map<String, Object> headers = parseHeaders(record.headers());
38+
39+
final KafkaMessageProperties properties = new KafkaMessageProperties();
40+
properties.setHeaders(headers);
41+
properties.setKey(record.key());
42+
properties.setTopic(record.topic());
43+
properties.setContentLength(record.value().length);
44+
return properties;
45+
}
46+
47+
private static Map<String, Object> parseHeaders(Headers headers) {
48+
Map<String, Object> parsedHeaders = new HashMap<>();
49+
headers.forEach(header -> parsedHeaders.put(header.key(), new String(header.value())));
50+
return parsedHeaders;
51+
}
52+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
package org.reactivecommons.async.kafka.communications;
2+
3+
import lombok.AllArgsConstructor;
4+
import org.apache.kafka.clients.consumer.ConsumerConfig;
5+
import reactor.core.publisher.Flux;
6+
import reactor.kafka.receiver.KafkaReceiver;
7+
import reactor.kafka.receiver.ReceiverOptions;
8+
import reactor.kafka.receiver.ReceiverRecord;
9+
10+
import java.util.List;
11+
12+
13+
@AllArgsConstructor
14+
public class ReactiveMessageListener {
15+
private final ReceiverOptions<String, byte[]> receiverOptions;
16+
17+
public Flux<ReceiverRecord<String, byte[]>> listen(String groupId, List<String> topics) { // Notification events
18+
ReceiverOptions<String, byte[]> options = receiverOptions.consumerProperty(ConsumerConfig.GROUP_ID_CONFIG, groupId);
19+
return KafkaReceiver.create(options.subscription(topics))
20+
.receive();
21+
}
22+
23+
public int getMaxConcurrency() {
24+
Object property = receiverOptions.consumerProperty(ConsumerConfig.MAX_POLL_RECORDS_CONFIG);
25+
if (property instanceof Integer) {
26+
return (int) property;
27+
}
28+
return ConsumerConfig.DEFAULT_MAX_POLL_RECORDS;
29+
}
30+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,91 @@
1+
package org.reactivecommons.async.kafka.communications;
2+
3+
import lombok.SneakyThrows;
4+
import org.apache.kafka.clients.producer.ProducerRecord;
5+
import org.apache.kafka.common.header.Header;
6+
import org.apache.kafka.common.header.internals.RecordHeader;
7+
import org.reactivecommons.async.commons.converters.MessageConverter;
8+
import org.reactivecommons.async.kafka.KafkaMessage;
9+
import org.reactivecommons.async.kafka.communications.topology.TopologyCreator;
10+
import reactor.core.publisher.Flux;
11+
import reactor.core.publisher.FluxSink;
12+
import reactor.core.publisher.Mono;
13+
import reactor.core.publisher.MonoSink;
14+
import reactor.kafka.sender.KafkaSender;
15+
import reactor.kafka.sender.SenderRecord;
16+
import reactor.kafka.sender.SenderResult;
17+
18+
import java.nio.charset.StandardCharsets;
19+
import java.util.List;
20+
import java.util.concurrent.ConcurrentHashMap;
21+
import java.util.concurrent.CopyOnWriteArrayList;
22+
import java.util.concurrent.ExecutorService;
23+
import java.util.concurrent.Executors;
24+
import java.util.concurrent.atomic.AtomicLong;
25+
import java.util.stream.Collectors;
26+
27+
public class ReactiveMessageSender {
28+
private final ConcurrentHashMap<String, MonoSink<Void>> confirmations = new ConcurrentHashMap<>();
29+
private final CopyOnWriteArrayList<FluxSink<SenderRecord<String, byte[], String>>> fluxSinks = new CopyOnWriteArrayList<>();
30+
private final AtomicLong counter = new AtomicLong();
31+
32+
private final ExecutorService executorServiceConfirm = Executors.newFixedThreadPool(13, r -> new Thread(r, "KMessageSender1-" + counter.getAndIncrement()));
33+
private final ExecutorService executorServiceEmit = Executors.newFixedThreadPool(13, r -> new Thread(r, "KMessageSender2-" + counter.getAndIncrement()));
34+
35+
private final int senderCount = 4;
36+
37+
private final MessageConverter messageConverter;
38+
private final TopologyCreator topologyCreator;
39+
40+
public ReactiveMessageSender(KafkaSender<String, byte[]> sender, MessageConverter messageConverter,
41+
TopologyCreator topologyCreator) {
42+
this.messageConverter = messageConverter;
43+
this.topologyCreator = topologyCreator;
44+
for (int i = 0; i < senderCount; ++i) {
45+
Flux<SenderRecord<String, byte[], String>> source = Flux.create(fluxSinks::add);
46+
sender.send(source)
47+
.doOnNext(this::confirm)
48+
.subscribe();
49+
}
50+
}
51+
52+
public <V> Mono<Void> send(V message) {
53+
return Mono.create(sink -> {
54+
SenderRecord<String, byte[], String> record = createRecord(message);
55+
confirmations.put(record.key(), sink);
56+
executorServiceEmit.submit(() -> fluxSinks.get((int) (System.currentTimeMillis() % senderCount)).next(record));
57+
});
58+
}
59+
60+
private void confirm(SenderResult<String> result) {
61+
executorServiceConfirm.submit(() -> {
62+
MonoSink<Void> sink = confirmations.remove(result.correlationMetadata());
63+
if (sink != null) {
64+
if (result.exception() != null) {
65+
sink.error(result.exception());
66+
} else {
67+
sink.success();
68+
}
69+
}
70+
});
71+
}
72+
73+
private <V> SenderRecord<String, byte[], String> createRecord(V object) {
74+
KafkaMessage message = (KafkaMessage) messageConverter.toMessage(object);
75+
ProducerRecord<String, byte[]> record = createProducerRecord(message);
76+
return SenderRecord.create(record, message.getProperties().getKey()); // TODO: Review for Request-Reply
77+
}
78+
79+
@SneakyThrows
80+
private ProducerRecord<String, byte[]> createProducerRecord(KafkaMessage message) {
81+
topologyCreator.checkTopic(message.getProperties().getTopic());
82+
83+
List<Header> headers = message.getProperties().getHeaders().entrySet().stream()
84+
.map(entry -> new RecordHeader(entry.getKey(), entry.getValue()
85+
.toString().getBytes(StandardCharsets.UTF_8)))
86+
.collect(Collectors.toList());
87+
88+
return new ProducerRecord<>(message.getProperties().getTopic(), null,
89+
message.getProperties().getKey(), message.getBody(), headers);
90+
}
91+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
package org.reactivecommons.async.kafka.communications.exceptions;
2+
3+
public class TopicNotFoundException extends RuntimeException {
4+
public TopicNotFoundException(String message) {
5+
super(message);
6+
}
7+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
package org.reactivecommons.async.kafka.communications.topology;
2+
3+
import lombok.AllArgsConstructor;
4+
import lombok.Data;
5+
import lombok.NoArgsConstructor;
6+
7+
import java.util.HashMap;
8+
import java.util.Map;
9+
10+
@Data
11+
@AllArgsConstructor
12+
@NoArgsConstructor
13+
public class KafkaCustomizations {
14+
private Map<String, TopicCustomization> topics = new HashMap<>();
15+
16+
public static KafkaCustomizations withTopic(String topic, TopicCustomization customization) {
17+
KafkaCustomizations customizations = new KafkaCustomizations();
18+
customizations.getTopics().put(topic, customization);
19+
return customizations;
20+
}
21+
22+
public KafkaCustomizations addTopic(String topic, TopicCustomization customization) {
23+
this.getTopics().put(topic, customization);
24+
return this;
25+
}
26+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
package org.reactivecommons.async.kafka.communications.topology;
2+
3+
import lombok.AllArgsConstructor;
4+
import lombok.Builder;
5+
import lombok.Data;
6+
import lombok.NoArgsConstructor;
7+
8+
import java.util.Map;
9+
10+
@Data
11+
@Builder
12+
@AllArgsConstructor
13+
@NoArgsConstructor
14+
public class TopicCustomization {
15+
private String topic;
16+
private int partitions;
17+
private short replicationFactor;
18+
private Map<String, String> config;
19+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,76 @@
1+
package org.reactivecommons.async.kafka.communications.topology;
2+
3+
import lombok.SneakyThrows;
4+
import org.apache.kafka.clients.admin.AdminClient;
5+
import org.apache.kafka.clients.admin.ListTopicsOptions;
6+
import org.apache.kafka.clients.admin.ListTopicsResult;
7+
import org.apache.kafka.clients.admin.NewTopic;
8+
import org.apache.kafka.common.errors.TopicExistsException;
9+
import org.reactivecommons.async.kafka.communications.exceptions.TopicNotFoundException;
10+
import reactor.core.publisher.Flux;
11+
import reactor.core.publisher.Mono;
12+
13+
import java.util.List;
14+
import java.util.Map;
15+
import java.util.stream.Collectors;
16+
17+
public class TopologyCreator {
18+
public static final int TIMEOUT_MS = 60_000;
19+
private final AdminClient adminClient;
20+
private final KafkaCustomizations customizations;
21+
private final Map<String, Boolean> existingTopics;
22+
23+
public TopologyCreator(AdminClient adminClient, KafkaCustomizations customizations) {
24+
this.adminClient = adminClient;
25+
this.customizations = customizations;
26+
this.existingTopics = getTopics();
27+
}
28+
29+
@SneakyThrows
30+
public Map<String, Boolean> getTopics() {
31+
ListTopicsResult topics = adminClient.listTopics(new ListTopicsOptions().timeoutMs(TIMEOUT_MS));
32+
return topics.names().get().stream().collect(Collectors.toConcurrentMap(name -> name, name -> true));
33+
}
34+
35+
public Mono<Void> createTopics(List<String> topics) {
36+
TopicCustomization.TopicCustomizationBuilder defaultBuilder = TopicCustomization.builder()
37+
.partitions(-1)
38+
.replicationFactor((short) -1);
39+
40+
return Flux.fromIterable(topics)
41+
.map(topic -> {
42+
if (customizations.getTopics().containsKey(topic)) {
43+
return customizations.getTopics().get(topic);
44+
}
45+
return defaultBuilder.topic(topic).build();
46+
})
47+
.map(this::toNewTopic)
48+
.flatMap(this::createTopic)
49+
.doOnNext(topic -> existingTopics.put(topic.name(), true))
50+
.then();
51+
}
52+
53+
protected Mono<NewTopic> createTopic(NewTopic topic) {
54+
return Mono.fromFuture(adminClient.createTopics(List.of(topic))
55+
.all()
56+
.toCompletionStage()
57+
.toCompletableFuture())
58+
.thenReturn(topic)
59+
.onErrorResume(TopicExistsException.class, e -> Mono.just(topic));
60+
}
61+
62+
protected NewTopic toNewTopic(TopicCustomization customization) {
63+
NewTopic topic = new NewTopic(customization.getTopic(), customization.getPartitions(), customization.getReplicationFactor());
64+
if (customization.getConfig() != null) {
65+
return topic.configs(customization.getConfig());
66+
}
67+
return topic;
68+
}
69+
70+
public void checkTopic(String topicName) {
71+
if (!existingTopics.containsKey(topicName)) {
72+
throw new TopicNotFoundException("Topic not found: " + topicName + ". Please create it before send a message.");
73+
// TODO: should refresh topics?? getTopics();
74+
}
75+
}
76+
}

0 commit comments

Comments
 (0)