DBZ-409 Indentation fix

This commit is contained in:
Gunnar Morling 2017-12-11 21:53:03 +01:00
parent d6dbf02f4c
commit 1cfc8c3596
2 changed files with 40 additions and 38 deletions

View File

@ -8,23 +8,23 @@
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Map.Entry; import java.util.Map.Entry;
import org.apache.kafka.connect.data.Schema;
import org.apache.kafka.connect.data.SchemaBuilder; import org.apache.kafka.connect.data.SchemaBuilder;
import org.apache.kafka.connect.data.Struct; import org.apache.kafka.connect.data.Struct;
import org.bson.BsonDocument; import org.bson.BsonDocument;
import org.bson.BsonType; import org.bson.BsonType;
import org.bson.BsonValue; import org.bson.BsonValue;
import org.slf4j.LoggerFactory;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.apache.kafka.connect.data.Schema; import org.slf4j.LoggerFactory;
/** /**
* MongoDataConverter handles translating MongoDB strings to Kafka Connect schemas and row data to Kafka * MongoDataConverter handles translating MongoDB strings to Kafka Connect schemas and row data to Kafka
* Connect records. * Connect records.
*
* @author Sairam Polavarapu * @author Sairam Polavarapu
*/ */
public class MongoDataConverter { public class MongoDataConverter {
static SchemaBuilder builder = SchemaBuilder.struct();
private static final Logger LOG = LoggerFactory.getLogger(MongoDataConverter.class); private static final Logger LOG = LoggerFactory.getLogger(MongoDataConverter.class);
@ -189,7 +189,6 @@ public static void convertFieldValue(Entry<String, BsonValue> keyvalueforStruct,
} }
public static void addFieldSchema(Entry<String, BsonValue> keyValuesforSchema, SchemaBuilder builder) { public static void addFieldSchema(Entry<String, BsonValue> keyValuesforSchema, SchemaBuilder builder) {
String key = keyValuesforSchema.getKey(); String key = keyValuesforSchema.getKey();
BsonType type = keyValuesforSchema.getValue().getBsonType(); BsonType type = keyValuesforSchema.getValue().getBsonType();

View File

@ -5,6 +5,12 @@
*/ */
package io.debezium.connector.mongodb.transforms; package io.debezium.connector.mongodb.transforms;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import org.apache.kafka.common.config.ConfigDef; import org.apache.kafka.common.config.ConfigDef;
import org.apache.kafka.connect.connector.ConnectRecord; import org.apache.kafka.connect.connector.ConnectRecord;
import org.apache.kafka.connect.data.Schema; import org.apache.kafka.connect.data.Schema;
@ -20,24 +26,19 @@
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode; import com.fasterxml.jackson.databind.node.ObjectNode;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.Map.Entry;
/** /**
* Debezium Mongo Connector generates the CDC records in String format. Sink connectors usually are not able to parse * Debezium Mongo Connector generates the CDC records in String format. Sink connectors usually are not able to parse
* the string and insert the document as it is represented in the Source. so a user use this SMT to parse the String * the string and insert the document as it is represented in the Source. so a user use this SMT to parse the String
* and insert the MongoDB document in the JSON format.. * and insert the MongoDB document in the JSON format.
*
* @param <R> the subtype of {@link ConnectRecord} on which this transformation will operate * @param <R> the subtype of {@link ConnectRecord} on which this transformation will operate
* @author Sairam Polavarapu. * @author Sairam Polavarapu
*/ */
public class StringToJSON<R extends ConnectRecord<R>> implements Transformation<R> { public class StringToJSON<R extends ConnectRecord<R>> implements Transformation<R> {
final ExtractField<R> afterExtractor = new ExtractField.Value<R>(); private final ExtractField<R> afterExtractor = new ExtractField.Value<R>();
final ExtractField<R> patchExtractor = new ExtractField.Value<R>(); private final ExtractField<R> patchExtractor = new ExtractField.Value<R>();
final ExtractField<R> keyExtractor = new ExtractField.Key<R>(); private final ExtractField<R> keyExtractor = new ExtractField.Key<R>();
@Override @Override
public R apply(R r) { public R apply(R r) {
@ -54,6 +55,7 @@ public R apply(R r) {
final R key = keyExtractor.apply(r); final R key = keyExtractor.apply(r);
ObjectNode patchEventWKey = mapper.createObjectNode(); ObjectNode patchEventWKey = mapper.createObjectNode();
JsonNode patchEvent = null; JsonNode patchEvent = null;
try { try {
patchEvent = mapper.readTree(patchRecord.value().toString()); patchEvent = mapper.readTree(patchRecord.value().toString());
} catch (JsonProcessingException e) { } catch (JsonProcessingException e) {
@ -128,6 +130,7 @@ public ConfigDef config() {
return new ConfigDef(); return new ConfigDef();
} }
@Override
public void close() { public void close() {
} }