Skip to content

Commit 96efd3f

Browse files
garyrussellartembilan
authored andcommitted
GH-1494: Fix NPE in DeadLetterPublishingRecoverer
Resolves #1494
1 parent 0281b77 commit 96efd3f

File tree

3 files changed

+57
-6
lines changed

3 files changed

+57
-6
lines changed

spring-kafka/src/main/java/org/springframework/kafka/listener/DeadLetterPublishingRecoverer.java

Lines changed: 16 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -129,7 +129,9 @@ public DeadLetterPublishingRecoverer(KafkaTemplate<? extends Object, ? extends O
129129
* original topic. The templates map keys are classes and the value the corresponding
130130
* template to use for objects (producer record values) of that type. A
131131
* {@link java.util.LinkedHashMap} is recommended when there is more than one
132-
* template, to ensure the map is traversed in order.
132+
* template, to ensure the map is traversed in order. To send records with a null
133+
* value, add a template with the {@link Void} class as a key; otherwise the first
134+
* template from the map values iterator will be used.
133135
* @param templates the {@link KafkaOperations}s to use for publishing.
134136
*/
135137
public DeadLetterPublishingRecoverer(Map<Class<?>, KafkaOperations<? extends Object, ? extends Object>> templates) {
@@ -143,7 +145,9 @@ public DeadLetterPublishingRecoverer(Map<Class<?>, KafkaOperations<? extends Obj
143145
* 0, no partition is set when publishing to the topic. The templates map keys are
144146
* classes and the value the corresponding template to use for objects (producer
145147
* record values) of that type. A {@link java.util.LinkedHashMap} is recommended when
146-
* there is more than one template, to ensure the map is traversed in order.
148+
* there is more than one template, to ensure the map is traversed in order. To send
149+
* records with a null value, add a template with the {@link Void} class as a key;
150+
* otherwise the first template from the map values iterator will be used.
147151
* @param templates the {@link KafkaOperations}s to use for publishing.
148152
* @param destinationResolver the resolving function.
149153
*/
@@ -212,10 +216,19 @@ public void accept(ConsumerRecord<?, ?> record, Exception exception) {
212216
}
213217

214218
@SuppressWarnings("unchecked")
215-
private KafkaOperations<Object, Object> findTemplateForValue(Object value) {
219+
private KafkaOperations<Object, Object> findTemplateForValue(@Nullable Object value) {
216220
if (this.template != null) {
217221
return this.template;
218222
}
223+
if (value == null) {
224+
KafkaOperations<?, ?> operations = this.templates.get(Void.class);
225+
if (operations == null) {
226+
return (KafkaOperations<Object, Object>) this.templates.values().iterator().next();
227+
}
228+
else {
229+
return (KafkaOperations<Object, Object>) operations;
230+
}
231+
}
219232
Optional<Class<?>> key = this.templates.keySet()
220233
.stream()
221234
.filter((k) -> k.isAssignableFrom(value.getClass()))

spring-kafka/src/test/java/org/springframework/kafka/listener/DeadLetterPublishingRecovererTests.java

Lines changed: 39 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,8 @@
2929
import java.io.IOException;
3030
import java.io.ObjectOutputStream;
3131
import java.io.UncheckedIOException;
32+
import java.util.HashMap;
33+
import java.util.Map;
3234

3335
import org.apache.kafka.clients.consumer.ConsumerRecord;
3436
import org.apache.kafka.clients.producer.ProducerRecord;
@@ -52,38 +54,41 @@
5254
*/
5355
public class DeadLetterPublishingRecovererTests {
5456

55-
@SuppressWarnings("unchecked")
57+
@SuppressWarnings({ "unchecked", "rawtypes" })
5658
@Test
5759
void testTxNoTx() {
5860
KafkaOperations<?, ?> template = mock(KafkaOperations.class);
5961
given(template.isTransactional()).willReturn(true);
6062
given(template.inTransaction()).willReturn(false);
6163
given(template.isAllowNonTransactional()).willReturn(true);
64+
given(template.send(any(ProducerRecord.class))).willReturn(new SettableListenableFuture());
6265
DeadLetterPublishingRecoverer recoverer = new DeadLetterPublishingRecoverer(template);
6366
ConsumerRecord<String, String> record = new ConsumerRecord<>("foo", 0, 0L, "bar", "baz");
6467
recoverer.accept(record, new RuntimeException());
6568
verify(template, never()).executeInTransaction(any());
6669
verify(template).send(any(ProducerRecord.class));
6770
}
6871

69-
@SuppressWarnings("unchecked")
72+
@SuppressWarnings({ "unchecked", "rawtypes" })
7073
@Test
7174
void testTxExisting() {
7275
KafkaOperations<?, ?> template = mock(KafkaOperations.class);
7376
given(template.isTransactional()).willReturn(true);
7477
given(template.inTransaction()).willReturn(true);
78+
given(template.send(any(ProducerRecord.class))).willReturn(new SettableListenableFuture());
7579
DeadLetterPublishingRecoverer recoverer = new DeadLetterPublishingRecoverer(template);
7680
ConsumerRecord<String, String> record = new ConsumerRecord<>("foo", 0, 0L, "bar", "baz");
7781
recoverer.accept(record, new RuntimeException());
7882
verify(template, never()).executeInTransaction(any());
7983
verify(template).send(any(ProducerRecord.class));
8084
}
8185

82-
@SuppressWarnings("unchecked")
86+
@SuppressWarnings({ "unchecked", "rawtypes" })
8387
@Test
8488
void testNonTx() {
8589
KafkaOperations<?, ?> template = mock(KafkaOperations.class);
8690
given(template.isTransactional()).willReturn(false);
91+
given(template.send(any(ProducerRecord.class))).willReturn(new SettableListenableFuture());
8792
DeadLetterPublishingRecoverer recoverer = new DeadLetterPublishingRecoverer(template);
8893
ConsumerRecord<String, String> record = new ConsumerRecord<>("foo", 0, 0L, "bar", "baz");
8994
recoverer.accept(record, new RuntimeException());
@@ -103,6 +108,7 @@ void testTxNewTx() {
103108
((OperationsCallback) inv.getArgument(0)).doInOperations(template);
104109
return null;
105110
}).given(template).executeInTransaction(any());
111+
given(template.send(any(ProducerRecord.class))).willReturn(new SettableListenableFuture());
106112
DeadLetterPublishingRecoverer recoverer = new DeadLetterPublishingRecoverer(template);
107113
ConsumerRecord<String, String> record = new ConsumerRecord<>("foo", 0, 0L, "bar", "baz");
108114
recoverer.accept(record, new RuntimeException());
@@ -166,6 +172,36 @@ void headersNotStripped() {
166172
assertThat(headers.lastHeader(ErrorHandlingDeserializer.KEY_DESERIALIZER_EXCEPTION_HEADER)).isNotNull();
167173
}
168174

175+
@SuppressWarnings({ "unchecked", "rawtypes" })
176+
@Test
177+
void tombstoneWithMultiTemplates() {
178+
KafkaOperations<?, ?> template1 = mock(KafkaOperations.class);
179+
given(template1.send(any(ProducerRecord.class))).willReturn(new SettableListenableFuture());
180+
KafkaOperations<?, ?> template2 = mock(KafkaOperations.class);
181+
Map<Class<?>, KafkaOperations<?, ?>> templates = new HashMap<>();
182+
templates.put(String.class, template1);
183+
templates.put(Integer.class, template2);
184+
DeadLetterPublishingRecoverer recoverer = new DeadLetterPublishingRecoverer(templates);
185+
ConsumerRecord<String, String> record = new ConsumerRecord<>("foo", 0, 0L, "bar", null);
186+
recoverer.accept(record, new RuntimeException());
187+
verify(template1).send(any(ProducerRecord.class));
188+
}
189+
190+
@SuppressWarnings({ "unchecked", "rawtypes" })
191+
@Test
192+
void tombstoneWithMultiTemplatesExplicit() {
193+
KafkaOperations<?, ?> template1 = mock(KafkaOperations.class);
194+
KafkaOperations<?, ?> template2 = mock(KafkaOperations.class);
195+
given(template2.send(any(ProducerRecord.class))).willReturn(new SettableListenableFuture());
196+
Map<Class<?>, KafkaOperations<?, ?>> templates = new HashMap<>();
197+
templates.put(String.class, template1);
198+
templates.put(Void.class, template2);
199+
DeadLetterPublishingRecoverer recoverer = new DeadLetterPublishingRecoverer(templates);
200+
ConsumerRecord<String, String> record = new ConsumerRecord<>("foo", 0, 0L, "bar", null);
201+
recoverer.accept(record, new RuntimeException());
202+
verify(template2).send(any(ProducerRecord.class));
203+
}
204+
169205
private byte[] header(boolean isKey) {
170206
ByteArrayOutputStream baos = new ByteArrayOutputStream();
171207
try {

src/reference/asciidoc/kafka.adoc

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4859,6 +4859,8 @@ public DeadLetterPublishingRecoverer publisher(KafkaTemplate<?, ?> stringTemplat
48594859
The publisher uses the map keys to locate a template that is suitable for the `value()` about to be published.
48604860
A `LinkedHashMap` is recommended so that the keys are examined in order.
48614861

4862+
When publishing `null` values, when there are multiple templates, the recoverer will look for a template for the `Void` class; if none is present, the first template from the `values().iterator()` will be used.
4863+
48624864
IMPORTANT: If the recoverer fails (throws an exception), the record will be included in the seeks and recovery will be attempted again during the next delivery.
48634865

48644866
Starting with version 2.3, the recoverer can also be used with Kafka Streams - see <<streams-deser-recovery>> for more information.

0 commit comments

Comments
 (0)