DBZ-1604 Documentation update

This commit is contained in:
Gunnar Morling 2019-11-08 10:18:43 +01:00
parent 884def88d7
commit 2c86b1c84f
4 changed files with 34 additions and 11 deletions

View File

@ -132,3 +132,4 @@ WenZe Hu
William Pursell William Pursell
Willie Cheong Willie Cheong
Wout Scheepers Wout Scheepers
Yang Yang

View File

@ -5,6 +5,9 @@
*/ */
package io.debezium.converters; package io.debezium.converters;
import java.nio.ByteBuffer;
import java.util.Map;
import org.apache.kafka.common.config.ConfigDef; import org.apache.kafka.common.config.ConfigDef;
import org.apache.kafka.connect.data.Schema; import org.apache.kafka.connect.data.Schema;
import org.apache.kafka.connect.data.SchemaAndValue; import org.apache.kafka.connect.data.SchemaAndValue;
@ -13,9 +16,6 @@
import org.apache.kafka.connect.storage.ConverterConfig; import org.apache.kafka.connect.storage.ConverterConfig;
import org.apache.kafka.connect.storage.HeaderConverter; import org.apache.kafka.connect.storage.HeaderConverter;
import java.nio.ByteBuffer;
import java.util.Map;
/** /**
* A customized value converter to allow avro message to be delivered as it is (byte[]) to kafka, this is used * A customized value converter to allow avro message to be delivered as it is (byte[]) to kafka, this is used
* for outbox pattern where payload is serialized by KafkaAvroSerializer, the consumer need to get the deseralized payload. * for outbox pattern where payload is serialized by KafkaAvroSerializer, the consumer need to get the deseralized payload.

View File

@ -6,18 +6,18 @@
package io.debezium.converters; package io.debezium.converters;
import org.apache.kafka.connect.data.Schema; import static junit.framework.TestCase.fail;
import org.apache.kafka.connect.data.SchemaAndValue; import static org.fest.assertions.Assertions.assertThat;
import org.apache.kafka.connect.errors.DataException;
import org.junit.Before;
import org.junit.Test;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.util.Collections; import java.util.Collections;
import static junit.framework.TestCase.fail; import org.apache.kafka.connect.data.Schema;
import static org.fest.assertions.Assertions.assertThat; import org.apache.kafka.connect.data.SchemaAndValue;
import org.apache.kafka.connect.errors.DataException;
import org.junit.Before;
import org.junit.Test;
public class ByteBufferConverterTest { public class ByteBufferConverterTest {

View File

@ -201,3 +201,25 @@ See option `table.fields.additional.placement` for more details.
transforms=outbox,... transforms=outbox,...
transforms.outbox.type=io.debezium.transforms.outbox.EventRouter transforms.outbox.type=io.debezium.transforms.outbox.EventRouter
---- ----
=== Using Avro as the payload format
The outbox routing SMT supports arbitrary payload formats, as the payload column value is passed on transparently.
As an alternative to working with JSON as shown above it's therefore also possible to use Avro.
This can be beneficial for the purposes of message format governance and making sure outbox event schemas evolve in a backwards-compatible way.
How a source application produces Avro messages as an outbox event payload is out of the scope of this documentation.
One possibility could be to leverage the `KafkaAvroSerializer` class and use it to serialize `GenericRecord` instances.
In order to ensure that the Kafka message value is the exact Avro binary data,
apply the following configuration to the connector:
[source]
----
transforms=outbox,...
transforms.outbox.type=io.debezium.transforms.outbox.EventRouter
transforms.outbox.table.fields.additional.placement=type:header:eventType
value.converter=io.debezium.converters.ByteBufferConverter
----
This moves the `eventType` value into a Kafka message header, leaving only the `payload` column value (the Avro data) as the sole message value.
Using `ByteBufferConverter` as the value converter will propagate that value as-is into the Kafka message value.