Merge pull request #6 from rhauch/dbz-1

DBZ-1 Added the MySQL source connector
This commit is contained in:
Randall Hauch 2016-02-02 15:02:58 -06:00
commit baaf2ee6e9
91 changed files with 10616 additions and 376 deletions

View File

@ -1,5 +1,14 @@
## Change log # Change log
All notable changes are documented in this file. Release numbers follow [Semantic Versioning](http://semver.org) All notable changes are documented in this file. Release numbers follow [Semantic Versioning](http://semver.org)
### Unreleased ## Unreleased
### Added
* MySQL connector for ingesting change events from MySQL databases ([DBZ-1](https://issues.jboss.org/projects/DBZ/issues/DBZ-1))
* Simple DDL parsing framework that can be extended and used by various connectors (as part of [DBZ-1](https://issues.jboss.org/projects/DBZ/issues/DBZ-1))
### Changed
### Fixed

View File

@ -25,6 +25,10 @@
<groupId>org.apache.kafka</groupId> <groupId>org.apache.kafka</groupId>
<artifactId>connect-api</artifactId> <artifactId>connect-api</artifactId>
</dependency> </dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
</dependency>
<!-- Testing --> <!-- Testing -->
<dependency> <dependency>
<groupId>org.slf4j</groupId> <groupId>org.slf4j</groupId>

View File

@ -36,7 +36,9 @@
/** /**
* An immutable representation of a Debezium configuration. A {@link Configuration} instance can be obtained * An immutable representation of a Debezium configuration. A {@link Configuration} instance can be obtained
* {@link #from(Properties) from Properties} or loaded from a {@link #load(File) file}, {@link #load(InputStream) stream}, * {@link #from(Properties) from Properties} or loaded from a {@link #load(File) file}, {@link #load(InputStream) stream},
* {@link #load(Reader) reader}, {@link #load(URL) URL}, or from a {@link #load(String, ClassLoader) resource on the classpath}. * {@link #load(Reader) reader}, {@link #load(URL) URL}, or {@link #load(String, ClassLoader) classpath resource}. They can
* also be built by first {@link #create() creating a builder} and then using that builder to populate and
* {@link Builder#build() return} the immutable Configuration instance.
* <p> * <p>
* A Configuration object is basically a decorator around a {@link Properties} object. It has methods to get and convert * A Configuration object is basically a decorator around a {@link Properties} object. It has methods to get and convert
* individual property values to numeric, boolean and String types, optionally using a default value if the given property value * individual property values to numeric, boolean and String types, optionally using a default value if the given property value
@ -48,51 +50,6 @@
@Immutable @Immutable
public interface Configuration { public interface Configuration {
public static Field field(String name, String description) {
return new Field(name, description, null);
}
public static Field field(String name, String description, String defaultValue) {
return new Field(name, description, defaultValue);
}
public static Field field(String name, String description, int defaultValue) {
return new Field(name, description, Integer.toString(defaultValue));
}
public static Field field(String name, String description, long defaultValue) {
return new Field(name, description, Long.toString(defaultValue));
}
public static Field field(String name, String description, boolean defaultValue) {
return new Field(name, description, Boolean.toString(defaultValue));
}
public static class Field {
private final String name;
private final String desc;
private final String defaultValue;
public Field(String name, String description, String defaultValue) {
this.name = name;
this.desc = description;
this.defaultValue = defaultValue;
assert this.name != null;
}
public String name() {
return name;
}
public String defaultValue() {
return defaultValue;
}
public String description() {
return desc;
}
}
/** /**
* The basic interface for configuration builders. * The basic interface for configuration builders.
* *
@ -164,6 +121,70 @@ default B with(String key, boolean value) {
return with(key, Boolean.toString(value)); return with(key, Boolean.toString(value));
} }
/**
* If there is no field with the specified key, then associate the given value with the specified key.
*
* @param key the key
* @param value the value
* @return this builder object so methods can be chained together; never null
*/
B withDefault(String key, String value);
/**
* If there is no field with the specified key, then associate the given value with the specified key.
*
* @param key the key
* @param value the value
* @return this builder object so methods can be chained together; never null
*/
default B withDefault(String key, int value) {
return withDefault(key, Integer.toString(value));
}
/**
* If there is no field with the specified key, then associate the given value with the specified key.
*
* @param key the key
* @param value the value
* @return this builder object so methods can be chained together; never null
*/
default B withDefault(String key, float value) {
return withDefault(key, Float.toString(value));
}
/**
* If there is no field with the specified key, then associate the given value with the specified key.
*
* @param key the key
* @param value the value
* @return this builder object so methods can be chained together; never null
*/
default B withDefault(String key, double value) {
return withDefault(key, Double.toString(value));
}
/**
* If there is no field with the specified key, then associate the given value with the specified key.
*
* @param key the key
* @param value the value
* @return this builder object so methods can be chained together; never null
*/
default B withDefault(String key, long value) {
return withDefault(key, Long.toString(value));
}
/**
* If there is no field with the specified key, then associate the given value with the specified key.
*
* @param key the key
* @param value the value
* @return this builder object so methods can be chained together; never null
*/
default B withDefault(String key, boolean value) {
return withDefault(key, Boolean.toString(value));
}
/** /**
* Associate the given value with the key of the specified field. * Associate the given value with the key of the specified field.
* *
@ -172,7 +193,7 @@ default B with(String key, boolean value) {
* @return this builder object so methods can be chained together; never null * @return this builder object so methods can be chained together; never null
*/ */
default B with(Field field, String value) { default B with(Field field, String value) {
return with(field.name(),value); return with(field.name(), value);
} }
/** /**
@ -183,7 +204,7 @@ default B with(Field field, String value) {
* @return this builder object so methods can be chained together; never null * @return this builder object so methods can be chained together; never null
*/ */
default B with(Field field, int value) { default B with(Field field, int value) {
return with(field.name(),value); return with(field.name(), value);
} }
/** /**
@ -194,7 +215,7 @@ default B with(Field field, int value) {
* @return this builder object so methods can be chained together; never null * @return this builder object so methods can be chained together; never null
*/ */
default B with(Field field, float value) { default B with(Field field, float value) {
return with(field.name(),value); return with(field.name(), value);
} }
/** /**
@ -205,7 +226,7 @@ default B with(Field field, float value) {
* @return this builder object so methods can be chained together; never null * @return this builder object so methods can be chained together; never null
*/ */
default B with(Field field, double value) { default B with(Field field, double value) {
return with(field.name(),value); return with(field.name(), value);
} }
/** /**
@ -216,7 +237,7 @@ default B with(Field field, double value) {
* @return this builder object so methods can be chained together; never null * @return this builder object so methods can be chained together; never null
*/ */
default B with(Field field, long value) { default B with(Field field, long value) {
return with(field.name(),value); return with(field.name(), value);
} }
/** /**
@ -227,9 +248,83 @@ default B with(Field field, long value) {
* @return this builder object so methods can be chained together; never null * @return this builder object so methods can be chained together; never null
*/ */
default B with(Field field, boolean value) { default B with(Field field, boolean value) {
return with(field.name(),value); return with(field.name(), value);
} }
/**
* If the field does not have a value, then associate the given value with the key of the specified field.
*
* @param field the predefined field for the key
* @param value the value
* @return this builder object so methods can be chained together; never null
*/
default B withDefault(Field field, String value) {
return withDefault(field.name(), value);
}
/**
* If the field does not have a value, then associate the given value with the key of the specified field.
*
* @param field the predefined field for the key
* @param value the value
* @return this builder object so methods can be chained together; never null
*/
default B withDefault(Field field, int value) {
return withDefault(field.name(), value);
}
/**
* If the field does not have a value, then associate the given value with the key of the specified field.
*
* @param field the predefined field for the key
* @param value the value
* @return this builder object so methods can be chained together; never null
*/
default B withDefault(Field field, float value) {
return withDefault(field.name(), value);
}
/**
* If the field does not have a value, then associate the given value with the key of the specified field.
*
* @param field the predefined field for the key
* @param value the value
* @return this builder object so methods can be chained together; never null
*/
default B withDefault(Field field, double value) {
return withDefault(field.name(), value);
}
/**
* If the field does not have a value, then associate the given value with the key of the specified field.
*
* @param field the predefined field for the key
* @param value the value
* @return this builder object so methods can be chained together; never null
*/
default B withDefault(Field field, long value) {
return withDefault(field.name(), value);
}
/**
* If the field does not have a value, then associate the given value with the key of the specified field.
*
* @param field the predefined field for the key
* @param value the default value
* @return this builder object so methods can be chained together; never null
*/
default B withDefault(Field field, boolean value) {
return withDefault(field.name(), value);
}
/**
* Apply the function to this builder.
*
* @param function the predefined field for the key
* @return this builder object so methods can be chained together; never null
*/
B apply(Consumer<B> function);
/** /**
* Build and return the immutable configuration. * Build and return the immutable configuration.
* *
@ -257,6 +352,20 @@ public Builder with(String key, String value) {
return this; return this;
} }
@Override
public Builder withDefault(String key, String value) {
if ( !props.containsKey(key)) {
props.setProperty(key, value);
}
return this;
}
@Override
public Builder apply(Consumer<Builder> function) {
function.accept(this);
return this;
}
@Override @Override
public Configuration build() { public Configuration build() {
return Configuration.from(props); return Configuration.from(props);
@ -474,6 +583,14 @@ public static Configuration load(String path, ClassLoader classLoader, Consumer<
} }
} }
/**
* Obtain an editor for a copy of this configuration.
* @return a builder that is populated with this configuration's key-value pairs; never null
*/
default Builder edit() {
return copy(this);
}
/** /**
* Determine whether this configuration contains a key-value pair with the given key and the value is non-null * Determine whether this configuration contains a key-value pair with the given key and the value is non-null
* *
@ -536,6 +653,19 @@ default String getString(Field field) {
return getString(field.name(), field.defaultValue()); return getString(field.name(), field.defaultValue());
} }
/**
* Get the string value associated with the given field, returning the field's default value if there is no such key-value
* pair in this configuration.
*
* @param field the field; may not be null
* @param defaultValue the default value
* @return the configuration's value for the field, or the field's {@link Field#defaultValue() default value} if there is no
* such key-value pair in the configuration
*/
default String getString(Field field, String defaultValue) {
return getString(field.name(), ()->field.defaultValue());
}
/** /**
* Get the string value(s) associated with the given key, where the supplied regular expression is used to parse the single * Get the string value(s) associated with the given key, where the supplied regular expression is used to parse the single
* string value into multiple values. * string value into multiple values.
@ -693,9 +823,10 @@ default Boolean getBoolean(String key, BooleanSupplier defaultValueSupplier) {
* @return the integer value, or null if the key is null, there is no such key-value pair in the configuration and there is * @return the integer value, or null if the key is null, there is no such key-value pair in the configuration and there is
* no default value in the field or the default value could not be parsed as a long, or there is a key-value pair in * no default value in the field or the default value could not be parsed as a long, or there is a key-value pair in
* the configuration but the value could not be parsed as an integer value * the configuration but the value could not be parsed as an integer value
* @throws NumberFormatException if there is no name-value pair and the field has no default value
*/ */
default int getInteger(Field field) { default int getInteger(Field field) {
return getInteger(field.name(), Integer.valueOf(field.defaultValue())); return getInteger(field.name(), ()->Integer.valueOf(field.defaultValue())).intValue();
} }
/** /**
@ -706,9 +837,52 @@ default int getInteger(Field field) {
* @return the integer value, or null if the key is null, there is no such key-value pair in the configuration and there is * @return the integer value, or null if the key is null, there is no such key-value pair in the configuration and there is
* no default value in the field or the default value could not be parsed as a long, or there is a key-value pair in * no default value in the field or the default value could not be parsed as a long, or there is a key-value pair in
* the configuration but the value could not be parsed as a long value * the configuration but the value could not be parsed as a long value
* @throws NumberFormatException if there is no name-value pair and the field has no default value
*/ */
default long getLong(Field field) { default long getLong(Field field) {
return getLong(field.name(), Long.valueOf(field.defaultValue())); return getLong(field.name(), ()->Long.valueOf(field.defaultValue())).longValue();
}
/**
* Get the boolean value associated with the given field when that field has a default value. If the configuration does
* not have a name-value pair with the same name as the field, then the field's default value.
*
* @param field the field
* @return the boolean value, or null if the key is null, there is no such key-value pair in the configuration and there is
* no default value in the field or the default value could not be parsed as a long, or there is a key-value pair in
* the configuration but the value could not be parsed as a boolean value
* @throws NumberFormatException if there is no name-value pair and the field has no default value
*/
default boolean getBoolean(Field field) {
return getBoolean(field.name(), ()->Boolean.valueOf(field.defaultValue())).booleanValue();
}
/**
* Get the integer value associated with the given field, returning the field's default value if there is no such
* key-value pair.
*
* @param field the field
* @param defaultValue the default value
* @return the integer value, or null if the key is null, there is no such key-value pair in the configuration and there is
* no default value in the field or the default value could not be parsed as a long, or there is a key-value pair in
* the configuration but the value could not be parsed as an integer value
*/
default int getInteger(Field field, int defaultValue) {
return getInteger(field.name(), defaultValue);
}
/**
* Get the long value associated with the given field, returning the field's default value if there is no such
* key-value pair.
*
* @param field the field
* @param defaultValue the default value
* @return the integer value, or null if the key is null, there is no such key-value pair in the configuration and there is
* no default value in the field or the default value could not be parsed as a long, or there is a key-value pair in
* the configuration but the value could not be parsed as a long value
*/
default long getLong(Field field, long defaultValue) {
return getLong(field.name(), defaultValue);
} }
/** /**
@ -716,44 +890,91 @@ default long getLong(Field field) {
* key-value pair. * key-value pair.
* *
* @param field the field * @param field the field
* @param defaultValue the default value
* @return the boolean value, or null if the key is null, there is no such key-value pair in the configuration and there is * @return the boolean value, or null if the key is null, there is no such key-value pair in the configuration and there is
* no default value in the field or the default value could not be parsed as a long, or there is a key-value pair in * no default value in the field or the default value could not be parsed as a long, or there is a key-value pair in
* the configuration but the value could not be parsed as a boolean value * the configuration but the value could not be parsed as a boolean value
*/ */
default boolean getBoolean(Field field) { default boolean getBoolean(Field field, boolean defaultValue) {
return getBoolean(field.name(), Boolean.valueOf(field.defaultValue())); return getBoolean(field.name(), defaultValue);
} }
/** /**
* Get an instance of the class given by the value in the configuration associated with the given key. * Get an instance of the class given by the value in the configuration associated with the given key.
* *
* @param key the key for the configuration property * @param key the key for the configuration property
* @param clazz the Class of which the resulting object is expected to be an instance of; may not be null * @param type the Class of which the resulting object is expected to be an instance of; may not be null
* @return the new instance, or null if there is no such key-value pair in the configuration or if there is a key-value * @return the new instance, or null if there is no such key-value pair in the configuration or if there is a key-value
* configuration but the value could not be converted to an existing class with a zero-argument constructor * configuration but the value could not be converted to an existing class with a zero-argument constructor
*/ */
default <T> T getInstance(String key, Class<T> clazz) { default <T> T getInstance(String key, Class<T> type) {
return getInstance(key, clazz, () -> getClass().getClassLoader()); return getInstance(key, type, () -> getClass().getClassLoader());
} }
/** /**
* Get an instance of the class given by the value in the configuration associated with the given key. * Get an instance of the class given by the value in the configuration associated with the given key.
* *
* @param key the key for the configuration property * @param key the key for the configuration property
* @param clazz the Class of which the resulting object is expected to be an instance of; may not be null * @param type the Class of which the resulting object is expected to be an instance of; may not be null
* @param classloaderSupplier the supplier of the ClassLoader to be used to load the resulting class; may be null if this * @param classloaderSupplier the supplier of the ClassLoader to be used to load the resulting class; may be null if this
* class' ClassLoader should be used * class' ClassLoader should be used
* @return the new instance, or null if there is no such key-value pair in the configuration or if there is a key-value * @return the new instance, or null if there is no such key-value pair in the configuration or if there is a key-value
* configuration but the value could not be converted to an existing class with a zero-argument constructor * configuration but the value could not be converted to an existing class with a zero-argument constructor
*/ */
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
default <T> T getInstance(String key, Class<T> clazz, Supplier<ClassLoader> classloaderSupplier) { default <T> T getInstance(String key, Class<T> type, Supplier<ClassLoader> classloaderSupplier) {
String className = getString(key); String className = getString(key);
if (className != null) { if (className != null) {
ClassLoader classloader = classloaderSupplier != null ? classloaderSupplier.get() : getClass().getClassLoader(); ClassLoader classloader = classloaderSupplier != null ? classloaderSupplier.get() : getClass().getClassLoader();
try { try {
return (T) classloader.loadClass(className); Class<? extends T> clazz = (Class<? extends T>)classloader.loadClass(className);
return clazz.newInstance();
} catch (ClassNotFoundException e) { } catch (ClassNotFoundException e) {
LoggerFactory.getLogger(getClass()).error("Unable to find class {}",className,e);
} catch (InstantiationException e) {
LoggerFactory.getLogger(getClass()).error("Unable to instantiate class {}",className,e);
} catch (IllegalAccessException e) {
LoggerFactory.getLogger(getClass()).error("Unable to access class {}",className,e);
}
}
return null;
}
/**
* Get an instance of the class given by the value in the configuration associated with the given field.
*
* @param field the field for the configuration property
* @param clazz the Class of which the resulting object is expected to be an instance of; may not be null
* @return the new instance, or null if there is no such key-value pair in the configuration or if there is a key-value
* configuration but the value could not be converted to an existing class with a zero-argument constructor
*/
default <T> T getInstance(Field field, Class<T> clazz) {
return getInstance(field, clazz, () -> getClass().getClassLoader());
}
/**
* Get an instance of the class given by the value in the configuration associated with the given field.
*
* @param field the field for the configuration property
* @param type the Class of which the resulting object is expected to be an instance of; may not be null
* @param classloaderSupplier the supplier of the ClassLoader to be used to load the resulting class; may be null if this
* class' ClassLoader should be used
* @return the new instance, or null if there is no such key-value pair in the configuration or if there is a key-value
* configuration but the value could not be converted to an existing class with a zero-argument constructor
*/
@SuppressWarnings("unchecked")
default <T> T getInstance(Field field, Class<T> type, Supplier<ClassLoader> classloaderSupplier) {
String className = getString(field);
if (className != null) {
ClassLoader classloader = classloaderSupplier != null ? classloaderSupplier.get() : getClass().getClassLoader();
try {
Class<? extends T> clazz = (Class<? extends T>)classloader.loadClass(className);
return clazz.newInstance();
} catch (ClassNotFoundException e) {
LoggerFactory.getLogger(getClass()).error("Unable to find class {}",className,e);
} catch (InstantiationException e) {
LoggerFactory.getLogger(getClass()).error("Unable to instantiate class {}",className,e);
} catch (IllegalAccessException e) {
LoggerFactory.getLogger(getClass()).error("Unable to access class {}",className,e);
} }
} }
return null; return null;
@ -858,6 +1079,20 @@ default Properties asProperties() {
return props; return props;
} }
/**
* Get a copy of these configuration properties as a Properties object.
*
* @return the properties object; never null
*/
default Map<String,String> asMap() {
Map<String, String> props = new HashMap<>();
keys().forEach(key -> {
String value = getString(key);
if (key != null && value != null) props.put(key, value);
});
return props;
}
/** /**
* Return a copy of this configuration except where acceptable system properties are used to overwrite properties copied from * Return a copy of this configuration except where acceptable system properties are used to overwrite properties copied from
* this configuration. All system properties whose name has the given prefix are added, where the prefix is removed from the * this configuration. All system properties whose name has the given prefix are added, where the prefix is removed from the
@ -903,4 +1138,33 @@ default Configuration withSystemProperties(Function<String, String> propertyName
} }
return from(props); return from(props);
} }
/**
* Validate the supplied fields in this configuration.
*
* @param fields the fields
* @param problems the consumer to be called with each problem; never null
* @return {@code true} if the value is considered valid, or {@code false} if it is not valid
*/
default boolean validate(Iterable<Field> fields, Consumer<String> problems) {
boolean valid = true;
for ( Field field : fields ) {
if ( !field.validate(this,problems) ) valid = false;
}
return valid;
}
/**
* Validate the supplied fields in this configuration.
* @param fields the fields
* @param problems the consumer to be called with each problem; never null
* @return {@code true} if the value is considered valid, or {@code false} if it is not valid
*/
default boolean validate(Field[] fields, Consumer<String> problems) {
boolean valid = true;
for ( Field field : fields ) {
if ( !field.validate(this,problems) ) valid = false;
}
return valid;
}
} }

View File

@ -0,0 +1,317 @@
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.config;
import java.util.Objects;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.Predicate;
import io.debezium.annotation.Immutable;
/**
* An immutable definition of a field that make appear within a {@link Configuration} instance.
*
* @author Randall Hauch
*/
@Immutable
public final class Field {
/**
* A functional interface that can be used to validate field values.
*/
public static interface Validator {
/**
* Validate the supplied value for the field, and report any problems to the designated consumer.
*
* @param config the configuration containing the field to be validated; may not be null
* @param field the {@link Field} being validated; never null
* @param problems the consumer to be called with each problem; never null
* @return the number of problems that were found, or 0 if the value is valid
*/
int validate(Configuration config, Field field, Consumer<String> problems);
}
/**
* Create an immutable {@link Field} instance with the given property name.
*
* @param name the name of the field; may not be null
* @return the field; never null
*/
public static Field create(String name) {
return new Field(name, null, null, null);
}
/**
* Create an immutable {@link Field} instance with the given property name and description.
*
* @param name the name of the field; may not be null
* @param description the description
* @return the field; never null
*/
public static Field create(String name, String description) {
return new Field(name, description, null, null);
}
/**
* Create an immutable {@link Field} instance with the given property name, description, and default value.
*
* @param name the name of the field; may not be null
* @param description the description
* @param defaultValue the default value for the field
* @return the field; never null
*/
public static Field create(String name, String description, String defaultValue) {
return new Field(name, description, defaultValue, null);
}
/**
* Create an immutable {@link Field} instance with the given property name, description, and default value.
*
* @param name the name of the field; may not be null
* @param description the description
* @param defaultValue the default value for the field
* @return the field; never null
*/
public static Field create(String name, String description, int defaultValue) {
return new Field(name, description, Integer.toString(defaultValue), null);
}
/**
* Create an immutable {@link Field} instance with the given property name, description, and default value.
*
* @param name the name of the field; may not be null
* @param description the description
* @param defaultValue the default value for the field
* @return the field; never null
*/
public static Field create(String name, String description, long defaultValue) {
return new Field(name, description, Long.toString(defaultValue), null);
}
/**
* Create an immutable {@link Field} instance with the given property name, description, and default value.
*
* @param name the name of the field; may not be null
* @param description the description
* @param defaultValue the default value for the field
* @return the field; never null
*/
public static Field create(String name, String description, boolean defaultValue) {
return new Field(name, description, Boolean.toString(defaultValue), null);
}
private final String name;
private final String desc;
private final String defaultValue;
private final Validator validator;
protected Field(String name, String description, String defaultValue, Validator validator) {
Objects.requireNonNull(name, "The field name is required");
this.name = name;
this.desc = description;
this.defaultValue = defaultValue;
this.validator = validator;
assert this.name != null;
}
/**
* Get the name of the field.
*
* @return the name; never null
*/
public String name() {
return name;
}
/**
* Get the default value of the field.
*
* @return the default value as a string; never null
*/
public String defaultValue() {
return defaultValue;
}
/**
* Get the description of the field.
*
* @return the description; never null
*/
public String description() {
return desc;
}
/**
* Validate the supplied value for this field, and report any problems to the designated consumer.
*
* @param config the field values keyed by their name; may not be null
* @param problems the consumer to be called with each problem; never null
* @return {@code true} if the value is considered valid, or {@code false} if it is not valid
*/
public boolean validate(Configuration config, Consumer<String> problems) {
return validator == null ? true : validator.validate(config, this, problems) == 0;
}
/**
* Create and return a new Field instance that is a copy of this field but with the given description.
*
* @param description the new description for the new field
* @return the new field; never null
*/
public Field withDescription(String description) {
return Field.create(name(), description(), defaultValue);
}
/**
* Create and return a new Field instance that is a copy of this field but with the given default value.
*
* @param defaultValue the new default value for the new field
* @return the new field; never null
*/
public Field withDefault(String defaultValue) {
return Field.create(name(), description(), defaultValue);
}
/**
* Create and return a new Field instance that is a copy of this field but with the given default value.
*
* @param defaultValue the new default value for the new field
* @return the new field; never null
*/
public Field withDefault(boolean defaultValue) {
return Field.create(name(), description(), defaultValue);
}
/**
* Create and return a new Field instance that is a copy of this field but with the given default value.
*
* @param defaultValue the new default value for the new field
* @return the new field; never null
*/
public Field withDefault(int defaultValue) {
return Field.create(name(), description(), defaultValue);
}
/**
* Create and return a new Field instance that is a copy of this field but with the given default value.
*
* @param defaultValue the new default value for the new field
* @return the new field; never null
*/
public Field withDefault(long defaultValue) {
return Field.create(name(), description(), defaultValue);
}
/**
* Create and return a new Field instance that is a copy of this field but that uses no validation.
*
* @return the new field; never null
*/
public Field withNoValidation() {
return new Field(name(), description(), defaultValue, null);
}
/**
* Create and return a new Field instance that is a copy of this field but that uses the supplied validation function during
* {@link Field#validate(Configuration, Consumer)}.
*
* @param validator the validation function; may be null
* @return the new field; never null
*/
public Field withValidation(Validator validator) {
return new Field(name(), description(), defaultValue, validator);
}
/**
* Create and return a new Field instance that is a copy of this field but that uses the supplied conversion check function
* during {@link Field#validate(Configuration, Consumer)}.
*
* @param conversionCheck the functions that attempt to validate the object; may be null
* @return the new field; never null
*/
@SuppressWarnings("unchecked")
public Field withValidation(Function<String, ?>... conversionCheck) {
return new Field(name(), description(), defaultValue, (config, field, problems) -> {
String value = config.getString(field);
for (Function<String, ?> check : conversionCheck) {
if (check != null) {
try {
check.apply(value);
} catch (Throwable t) {
problems.accept("The " + field.name() + " value '" + value + "' is not allowed: " + t.getMessage());
return 1;
}
}
}
return 0;
});
}
/**
* Create and return a new Field instance that that is a copy of this field but that uses the supplied predicate during
* {@link Field#validate(Configuration, Consumer)}.
*
* @param predicates the functions that attempt to validate the object; may be null
* @return the new field; never null
*/
@SuppressWarnings("unchecked")
public Field withValidation(Predicate<String>... predicates) {
return new Field(name(), description(), defaultValue, (config, field, problems) -> {
String value = config.getString(field);
for (Predicate<String> predicate : predicates) {
if (predicate != null) {
try {
if (!predicate.test(value)) {
problems.accept("The " + field.name() + " value '" + value + "' is not valid");
}
} catch (Throwable t) {
problems.accept("The " + field.name() + " value '" + value + "' is not allowed: " + t.getMessage());
return 1;
}
}
}
return 0;
});
}
@Override
public int hashCode() {
return name.hashCode();
}
@Override
public boolean equals(Object obj) {
if (obj == this) return true;
if (obj instanceof Field) {
Field that = (Field) obj;
return this.name().equals(that.name());
}
return false;
}
@Override
public String toString() {
return name();
}
public static boolean isRequired(String value) {
return value != null && value.trim().length() > 0;
}
public static boolean isBoolean(String value) {
Boolean.parseBoolean(value);
return true;
}
public static boolean isInteger(String value) {
if (value != null) Integer.parseInt(value);
return true;
}
public static boolean isPositiveInteger(String value) {
return value != null ? Integer.parseInt(value) > 0 : true;
}
}

View File

@ -0,0 +1,831 @@
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.document;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.Arrays;
import java.util.List;
import java.util.function.BiFunction;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
import io.debezium.util.Iterators;
/**
* An array of {@link Value}s. The array can also be viewed as a stream of {@link Entry} instances, each of which contain the
* index and the value.
*
* @author Randall Hauch
*
*/
public interface Array extends Iterable<Array.Entry>, Comparable<Array> {
static interface Entry extends Comparable<Entry> {
/**
* Get the index of the entry
*
* @return the entry's index; never null
*/
int getIndex();
/**
* Get the value of the entry.
*
* @return the entry's value; may be null
*/
Value getValue();
@Override
default int compareTo(Entry that) {
if (that == null) return 1;
int diff = this.getIndex() - that.getIndex();
if (diff != 0) return diff;
return this.getValue().compareTo(that.getValue());
}
}
static Array create() {
return new BasicArray();
}
static Array createWithNulls(int number) {
Value[] vals = new Value[number];
Arrays.fill(vals, Value.nullValue());
return new BasicArray(vals);
}
static Array create(Object... values) {
if (values == null || values.length == 0) {
return create();
}
Value[] vals = new Value[values.length];
for (int i = 0; i != values.length; ++i) {
vals[i] = Value.create(values[i]);
}
return new BasicArray(vals);
}
static Array create(Value[] values) {
if (values == null || values.length == 0) {
return create();
}
return new BasicArray(values);
}
static Array create(Value firstValue, Value secondValue, Value... additionalValues) {
Value[] values = new Value[additionalValues.length + 2];
values[0] = Value.create(firstValue);
values[1] = Value.create(secondValue);
for (int i = 0; i != additionalValues.length; ++i) {
values[i + 2] = Value.create(additionalValues[i]);
}
return new BasicArray(values);
}
static Array create(Iterable<?> values) {
if (values == null) return create();
BasicArray array = new BasicArray();
values.forEach(obj -> array.add(Value.create(obj)));
return array;
}
static Array create(List<Value> values) {
return (values == null || values.isEmpty()) ? create() : new BasicArray(values);
}
/**
* Return the number of name-value fields in this object.
*
* @return the number of name-value fields; never negative
*/
int size();
/**
* Return whether this document contains no fields and is therefore empty.
*
* @return true if there are no fields in this document, or false if there is at least one.
*/
boolean isEmpty();
/**
* Determine if this contains an entry at the given index.
*
* @param index the index
* @return true if the entry exists, or false otherwise
*/
boolean has(int index);
/**
* Gets the value in this array at the given index.
*
* @param index the index
* @return The field value, if found, or null otherwise
*/
Value get(int index);
/**
* Gets the value in this document for the given field name.
*
* @param index the index
* @param defaultValue the default value to return if there is no such entry
* @return The value if found or <code>defaultValue</code> if there is no such entry
*/
default Value get(int index, Object defaultValue) {
Value value = get(index);
return value != null ? value : Value.create(defaultValue);
}
/**
* Determine whether this object has an entry at the given index and the value is null.
*
* @param index the index
* @return <code>true</code> if the entry exists but is null, or false otherwise
* @see #isNullOrMissing(int)
*/
default boolean isNull(int index) {
Value value = get(index);
return value != null ? value.isNull() : false;
}
/**
* Determine whether this object has an entry at the given index and the value is null, or if this object has no entry at
* the given index.
*
* @param index the index
* @return <code>true</code> if the field value for the name is null or if there is no such field.
* @see #isNull(int)
*/
default boolean isNullOrMissing(int index) {
Value value = get(index);
return value != null ? value.isNull() : true;
}
/**
* Remove the specified entry from this array
*
* @param index the index
* @return the value in the removed entry, or null if there is no such entry
*/
Value remove(int index);
/**
* Remove all entries from this array.
*
* @return this array to allow for chaining methods
*/
Array removeAll();
/**
* Sets on this object all name/value pairs from the supplied object. If the supplied object is null, this method does
* nothing.
*
* @param values the values to be added to this array
* @return this array to allow for chaining methods
*/
default Array addAll(Object... values) {
if (values != null) {
for (Object obj : values) {
add(Value.create(obj));
}
}
return this;
}
/**
* Sets on this object all name/value pairs from the supplied object. If the supplied object is null, this method does
* nothing.
*
* @param values the values to be added to this array
* @return this array to allow for chaining methods
*/
default Array addAll(Value... values) {
if (values != null) {
addAll(Stream.of(values));
}
return this;
}
/**
* Sets on this object all name/value pairs from the supplied object. If the supplied object is null, this method does
* nothing.
*
* @param values the values to be added to this array
* @return this array to allow for chaining methods
*/
default Array addAll(Iterable<Value> values) {
if (values != null) {
values.forEach(value -> add(value != null ? value.clone() : Value.nullValue()));
}
return this;
}
/**
* Sets on this object all name/value pairs from the supplied object. If the supplied object is null, this method does
* nothing.
*
* @param values the values to be added to this array
* @return this array to allow for chaining methods
*/
default Array addAll(Stream<Value> values) {
if (values != null) {
values.forEach(value -> add(value != null ? value.clone() : Value.nullValue()));
}
return this;
}
/**
* Adds the value to the end of this array.
*
* @param value the value; may not be null
* @return this array to allow for chaining methods
*/
Array add(Value value);
/**
* Adds a null value to the end of this array.
*
* @return this array to allow for chaining methods
*/
default Array addNull() {
add(Value.nullValue());
return this;
}
/**
* Adds the string value to the end of this array.
*
* @param value the string value; may be null if a {@link #addNull() null value} should be added
* @return this array to allow for chaining methods
*/
default Array add(String value) {
add(Value.create(value));
return this;
}
/**
* Adds the boolean value to the end of this array.
*
* @param value the boolean value; may not be null
* @return this array to allow for chaining methods
*/
default Array add(boolean value) {
add(Value.create(value));
return this;
}
/**
* Adds the boolean value to the end of this array.
*
* @param value the boolean value; may be null if a {@link #addNull() null value} should be added
* @return this array to allow for chaining methods
*/
default Array add(Boolean value) {
add(Value.create(value));
return this;
}
/**
* Adds the integer value to the end of this array.
*
* @param value the integer value; may not be null
* @return this array to allow for chaining methods
*/
default Array add(int value) {
add(Value.create(value));
return this;
}
/**
* Adds the long value to the end of this array.
*
* @param value the long value; may not be null
* @return this array to allow for chaining methods
*/
default Array add(long value) {
add(Value.create(value));
return this;
}
/**
* Adds the float value to the end of this array.
*
* @param value the float value; may not be null
* @return this array to allow for chaining methods
*/
default Array add(float value) {
add(Value.create(value));
return this;
}
/**
* Adds the double value to the end of this array.
*
* @param value the double value; may not be null
* @return this array to allow for chaining methods
*/
default Array add(double value) {
add(Value.create(value));
return this;
}
/**
* Adds the big integer value to the end of this array.
*
* @param value the big integer value; may be null if a {@link #addNull() null value} should be added
* @return this array to allow for chaining methods
*/
default Array add(BigInteger value) {
add(Value.create(value));
return this;
}
/**
* Adds the decimal value to the end of this array.
*
* @param value the decimal value; may be null if a {@link #addNull() null value} should be added
* @return this array to allow for chaining methods
*/
default Array add(BigDecimal value) {
add(Value.create(value));
return this;
}
/**
* Adds the integer value to the end of this array.
*
* @param value the integer value; may be null if a {@link #addNull() null value} should be added
* @return this array to allow for chaining methods
*/
default Array add(Integer value) {
add(Value.create(value));
return this;
}
/**
* Adds the long value to the end of this array.
*
* @param value the long value; may be null if a {@link #addNull() null value} should be added
* @return this array to allow for chaining methods
*/
default Array add(Long value) {
add(Value.create(value));
return this;
}
/**
* Adds the float value to the end of this array.
*
* @param value the float value; may be null if a {@link #addNull() null value} should be added
* @return this array to allow for chaining methods
*/
default Array add(Float value) {
add(Value.create(value));
return this;
}
/**
* Adds the double value to the end of this array.
*
* @param value the double value; may be null if a {@link #addNull() null value} should be added
* @return this array to allow for chaining methods
*/
default Array add(Double value) {
add(Value.create(value));
return this;
}
/**
* Adds the document value to the end of this array.
*
* @param value the document value; may be null if a {@link #addNull() null value} should be added
* @return this array to allow for chaining methods
*/
default Array add(Document value) {
add(Value.create(value));
return this;
}
/**
* Adds the array value to the end of this array.
*
* @param value the array value; may be null if a {@link #addNull() null value} should be added
* @return this array to allow for chaining methods
*/
default Array add(Array value) {
add(Value.create(value));
return this;
}
/**
* Sets on this object all key/value pairs from the supplied map. If the supplied map is null, this method does nothing.
*
* @param entries the entries that are to be used to modify this array
* @return this array to allow for chaining methods
*/
default Array putAll(Iterable<Entry> entries) {
if (entries != null) {
entries.forEach(entry -> {
if (entry != null) {
Value value = entry.getValue().clone();
setValue(entry.getIndex(), value);
}
});
}
return this;
}
default Iterable<Value> values() {
return Iterators.around(Iterators.around(this, (entry) -> entry.getValue()));
}
/**
* Returns a sequential {@code Stream} with this array as its source.
*
* @return a sequential {@code Stream} over the elements in this collection
*/
default Stream<Entry> streamEntries() {
return StreamSupport.stream(spliterator(), false);
}
/**
* Returns a sequential {@code Stream} with this array as its source.
*
* @return a sequential {@code Stream} over the elements in this collection
*/
default Stream<Value> streamValues() {
return StreamSupport.stream(values().spliterator(), false);
}
/**
* Transform all of the field values using the supplied {@link BiFunction transformer function}.
*
* @param transformer the transformer that should be used to transform each field value; may not be null
* @return this array with transformed fields, or this document if the transformer changed none of the values
*/
default Array transform(BiFunction<Integer, Value, Value> transformer) {
for (int i = 0; i != size(); ++i) {
Value existing = get(i);
Value updated = transformer.apply(Integer.valueOf(i), existing);
if (updated == null) updated = Value.nullValue();
if (updated != existing) {
setValue(i, updated);
}
}
return this;
}
/**
* Set the value for the field with the given name to be a null value. The {@link #isNull(int)} methods can be used to
* determine if a field has been set to null, or {@link #isNullOrMissing(int)} if the field has not be set or if it has
* been set to null.
*
* @param index the index of the field; must be greater than or equal to 0 or less than or equal to {@link #size() size}
* @return this array to allow for chaining methods
* @see #isNull(int)
* @see #isNullOrMissing(int)
*/
default Array setNull(int index) {
return setValue(index, Value.nullValue());
}
/**
* Set the value for the field with the given name to the supplied boolean value.
*
* @param index the index of the field; must be greater than or equal to 0 or less than or equal to {@link #size() size}
* @param value the new value for the field
* @return this array to allow for chaining methods
*/
default Array setBoolean(int index,
boolean value) {
return setValue(index, Value.create(value));
}
/**
* Set the value for the field with the given name to the supplied integer value.
*
* @param index the index of the field; must be greater than or equal to 0 or less than or equal to {@link #size() size}
* @param value the new value for the field
* @return this array to allow for chaining methods
*/
default Array setNumber(int index,
int value) {
return setValue(index, Value.create(value));
}
/**
* Set the value for the field with the given name to the supplied long value.
*
* @param index the index of the field; must be greater than or equal to 0 or less than or equal to {@link #size() size}
* @param value the new value for the field
* @return this array to allow for chaining methods
*/
default Array setNumber(int index,
long value) {
return setValue(index, Value.create(value));
}
/**
* Set the value for the field with the given name to the supplied float value.
*
* @param index the index of the field; must be greater than or equal to 0 or less than or equal to {@link #size() size}
* @param value the new value for the field
* @return this array to allow for chaining methods
*/
default Array setNumber(int index,
float value) {
return setValue(index, Value.create(value));
}
/**
* Set the value for the field with the given name to the supplied double value.
*
* @param index the index of the field; must be greater than or equal to 0 or less than or equal to {@link #size() size}
* @param value the new value for the field
* @return this array to allow for chaining methods
*/
default Array setNumber(int index,
double value) {
return setValue(index, Value.create(value));
}
/**
* Set the value for the field with the given name to the supplied big integer value.
*
* @param index the index of the field; must be greater than or equal to 0 or less than or equal to {@link #size() size}
* @param value the new value for the field
* @return this array to allow for chaining methods
*/
default Array setNumber(int index,
BigInteger value) {
return setValue(index, Value.create(value));
}
/**
* Set the value for the field with the given name to the supplied big integer value.
*
* @param index the index of the field; must be greater than or equal to 0 or less than or equal to {@link #size() size}
* @param value the new value for the field
* @return this array to allow for chaining methods
*/
default Array setNumber(int index,
BigDecimal value) {
return setValue(index, Value.create(value));
}
/**
* Set the value for the field with the given name to the supplied string value.
*
* @param index the index of the field; must be greater than or equal to 0 or less than or equal to {@link #size() size}
* @param value the new value for the field
* @return this array to allow for chaining methods
*/
default Array setString(int index,
String value) {
return setValue(index, Value.create(value));
}
/**
* Set the value for the field with the given name to be a binary value. The value will be encoded as Base64.
*
* @param index the index of the field; must be greater than or equal to 0 or less than or equal to {@link #size() size}
* @param data the bytes for the binary value
* @return this array to allow for chaining methods
*/
default Array setBinary(int index,
byte[] data) {
return setValue(index, Value.create(data));
}
/**
* Set the value for the field with the given name to be a value.
*
* @param index the index of the field; must be greater than or equal to 0 and less than or equal to {@link #size() size}
* @param value the new value
* @return this array to allow for chaining methods
*/
Array setValue(int index, Value value);
/**
* If the current size of the array is smaller than the given size, expand it and use a null value for all new entries.
* This method does nothing if the current size is larger than the supplied {@code desiredSize}.
*
* @param desiredSize the desired size of the array; may be negative
* @return this array to allow for chaining methods
*/
default Array expand(int desiredSize) {
return expand(desiredSize,Value.nullValue());
}
/**
* If the current size of the array is smaller than the given size, expand it and use the supplied value for all new entries.
* This method does nothing if the current size is larger than the supplied {@code desiredSize}.
*
* @param desiredSize the desired size of the array; may be negative
* @param value the new value for any new entries
* @return this array to allow for chaining methods
*/
Array expand(int desiredSize, Value value);
/**
* If the current size of the array is smaller than the given size, expand it and use the supplied value for all new entries.
* This method does nothing if the current size is larger than the supplied {@code desiredSize}.
*
* @param desiredSize the desired size of the array; may be negative
* @param value the new value for any new entries
* @return this array to allow for chaining methods
*/
default Array expand(int desiredSize, boolean value) {
return expand(desiredSize,Value.create(value));
}
/**
* If the current size of the array is smaller than the given size, expand it and use the supplied value for all new entries.
* This method does nothing if the current size is larger than the supplied {@code desiredSize}.
*
* @param desiredSize the desired size of the array; may be negative
* @param value the new value for any new entries
* @return this array to allow for chaining methods
*/
default Array expand(int desiredSize, int value) {
return expand(desiredSize,Value.create(value));
}
/**
* If the current size of the array is smaller than the given size, expand it and use the supplied value for all new entries.
* This method does nothing if the current size is larger than the supplied {@code desiredSize}.
*
* @param desiredSize the desired size of the array; may be negative
* @param value the new value for any new entries
* @return this array to allow for chaining methods
*/
default Array expand(int desiredSize, long value) {
return expand(desiredSize,Value.create(value));
}
/**
* If the current size of the array is smaller than the given size, expand it and use the supplied value for all new entries.
* This method does nothing if the current size is larger than the supplied {@code desiredSize}.
*
* @param desiredSize the desired size of the array; may be negative
* @param value the new value for any new entries
* @return this array to allow for chaining methods
*/
default Array expand(int desiredSize, float value) {
return expand(desiredSize,Value.create(value));
}
/**
* If the current size of the array is smaller than the given size, expand it and use the supplied value for all new entries.
* This method does nothing if the current size is larger than the supplied {@code desiredSize}.
*
* @param desiredSize the desired size of the array; may be negative
* @param value the new value for any new entries
* @return this array to allow for chaining methods
*/
default Array expand(int desiredSize, double value) {
return expand(desiredSize,Value.create(value));
}
/**
* If the current size of the array is smaller than the given size, expand it and use the supplied value for all new entries.
* This method does nothing if the current size is larger than the supplied {@code desiredSize}.
*
* @param desiredSize the desired size of the array; may be negative
* @param value the new value for any new entries
* @return this array to allow for chaining methods
*/
default Array expand(int desiredSize, String value) {
return expand(desiredSize,Value.create(value));
}
/**
* Increment the numeric value at the given location by the designated amount.
* @param index the index of the field; must be greater than or equal to 0 and less than or equal to {@link #size() size}
* @param increment the amount to increment the existing value; may be negative to decrement
* @return this array to allow for chaining methods
* @throws IllegalArgumentException if the current value is not a number
*/
default Array increment( int index, int increment ) {
return increment(index,Value.create(increment));
}
/**
* Increment the numeric value at the given location by the designated amount.
* @param index the index of the field; must be greater than or equal to 0 and less than or equal to {@link #size() size}
* @param increment the amount to increment the existing value; may be negative to decrement
* @return this array to allow for chaining methods
* @throws IllegalArgumentException if the current value is not a number
*/
default Array increment( int index, long increment ) {
return increment(index,Value.create(increment));
}
/**
* Increment the numeric value at the given location by the designated amount.
* @param index the index of the field; must be greater than or equal to 0 and less than or equal to {@link #size() size}
* @param increment the amount to increment the existing value; may be negative to decrement
* @return this array to allow for chaining methods
* @throws IllegalArgumentException if the current value is not a number
*/
default Array increment( int index, double increment ) {
return increment(index,Value.create(increment));
}
/**
* Increment the numeric value at the given location by the designated amount.
* @param index the index of the field; must be greater than or equal to 0 and less than or equal to {@link #size() size}
* @param increment the amount to increment the existing value; may be negative to decrement
* @return this array to allow for chaining methods
* @throws IllegalArgumentException if the current value is not a number
*/
default Array increment( int index, float increment ) {
return increment(index,Value.create(increment));
}
/**
* Increment the numeric value at the given location by the designated amount.
* @param index the index of the field; must be greater than or equal to 0 and less than or equal to {@link #size() size}
* @param increment the amount to increment the existing value; may be negative to decrement
* @return this array to allow for chaining methods
* @throws IllegalArgumentException if the current value is not a number
*/
Array increment( int index, Value increment );
/**
* Set the value for the field with the given name to be a new, empty Document.
*
* @param index the index of the field; must be greater than or equal to 0 and less than or equal to {@link #size() size}
* @return The editable document that was just created; never null
*/
default Document setDocument(int index) {
return setDocument(index, Document.create());
}
/**
* Set the value for the field with the given name to be the supplied Document.
*
* @param index the index of the field; must be greater than or equal to 0 or less than or equal to {@link #size() size}
* @param document the document
* @return The document that was just set as the value for the named field; never null and may or may not be the same
* instance as the supplied <code>document</code>.
*/
default Document setDocument(int index,
Document document) {
if (document == null) document = Document.create();
setValue(index, Value.create(document));
return document;
}
/**
* Set the value for the field with the given name to be a new, empty array.
*
* @param index the index of the field; must be greater than or equal to 0 or less than or equal to {@link #size() size}
* @return The array that was just created; never null
*/
default Array setArray(int index) {
return setArray(index, Array.create());
}
/**
* Set the value for the field with the given name to be the supplied array.
*
* @param index the index of the field; must be greater than or equal to 0 or less than or equal to {@link #size() size}
* @param array the array
* @return The array that was just set as the value for the named field; never null and may or may not be the same
* instance as the supplied <code>array</code>.
*/
default Array setArray(int index,
Array array) {
if (array == null) array = Array.create();
setValue(index, Value.create(array));
return array;
}
/**
* Set the value for the field with the given name to be the supplied array.
*
* @param index the index of the field; must be greater than or equal to 0 or less than or equal to {@link #size() size}
* @param values the (valid) values for the array
* @return The array that was just set as the value for the named field; never null and may or may not be the same
* instance as the supplied <code>array</code>.
*/
default Array setArray(int index,
Value... values) {
Array array = Array.create(values);
setValue(index, Value.create(array));
return array;
}
/**
* Obtain a complete copy of this array.
*
* @return the clone of this array; never null
*/
Array clone();
}

View File

@ -0,0 +1,98 @@
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.document;
import java.io.BufferedInputStream;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.Reader;
import java.net.URL;
import io.debezium.annotation.ThreadSafe;
/**
* Reads {@link Array} instances from a variety of input forms.
*
* @author Randall Hauch
*/
@ThreadSafe
public interface ArrayReader {
/**
* Get the default {@link ArrayReader} instance.
*
* @return the shared default reader instance; never null
*/
static ArrayReader defaultReader() {
return JacksonReader.INSTANCE;
}
/**
* Read an array from the supplied stream.
*
* @param jsonStream the input stream to be read; may not be null
* @return the array instance; never null
* @throws IOException if an array could not be read from the supplied stream
*/
Array readArray(InputStream jsonStream) throws IOException;
/**
* Read an array from the supplied {@link Reader}.
*
* @param jsonReader the reader to be read; may not be null
* @return the array instance; never null
* @throws IOException if an array could not be read from the supplied reader
*/
Array readArray(Reader jsonReader) throws IOException;
/**
* Read an array from the supplied JSON-formatted string.
*
* @param json the JSON string representation to be read; may not be null
* @return the array instance; never null
* @throws IOException if an array could not be read from the supplied string
*/
Array readArray(String json) throws IOException;
/**
* Read an array from the content at the given URL.
*
* @param jsonUrl the URL to the content that is to be read; may not be null
* @return the array instance; never null
* @throws IOException if an array could not be read from the supplied content
*/
default Array readArray(URL jsonUrl) throws IOException {
return readArray(jsonUrl.openStream());
}
/**
* Read an array from the supplied file.
*
* @param jsonFile the file to be read; may not be null
* @return the array instance; never null
* @throws IOException if an array could not be read from the supplied file
*/
default Array readArray(File jsonFile) throws IOException {
return readArray( new BufferedInputStream(new FileInputStream(jsonFile)) );
}
/**
* Read an array from the supplied bytes.
*
* @param rawBytes the UTF-8 bytes to be read; may not be null
* @return the array instance; never null
* @throws IOException if an array could not be read from the supplied bytes
*/
default Array readArray(byte[] rawBytes) throws IOException {
try (ByteArrayInputStream stream = new ByteArrayInputStream(rawBytes)) {
return ArrayReader.defaultReader().readArray(stream);
}
}
}

View File

@ -0,0 +1,54 @@
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.document;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.Map;
import org.apache.kafka.common.serialization.Deserializer;
import org.apache.kafka.common.serialization.Serializer;
import io.debezium.annotation.Immutable;
/**
* A Kafka {@link Serializer} and {@link Serializer} that operates upon Debezium {@link Array}s.
*
* @author Randall Hauch
*/
@Immutable
public class ArraySerdes implements Serializer<Array>, Deserializer<Array> {
private static final ArrayWriter ARRAY_WRITER = ArrayWriter.defaultWriter();
private static final ArrayReader ARRAY_READER = ArrayReader.defaultReader();
@Override
public void configure(Map<String, ?> arg0, boolean arg1) {
}
@Override
public byte[] serialize(String topic, Array data) {
return ARRAY_WRITER.writeAsBytes(data);
}
@Override
public Array deserialize(String topic, byte[] data) {
try {
return ARRAY_READER.readArray(bytesToString(data));
} catch (IOException e) {
// Should never see this, but shit if we do ...
throw new RuntimeException(e);
}
}
@Override
public void close() {
}
private String bytesToString(byte[] bytes) {
return new String(bytes, StandardCharsets.UTF_8);
}
}

View File

@ -0,0 +1,79 @@
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.document;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.io.Writer;
import io.debezium.annotation.ThreadSafe;
/**
* Writes {@link Array} instances to a variety of output forms.
*
* @author Randall Hauch
*/
@ThreadSafe
public interface ArrayWriter {
/**
* Get the default ArrayWriter instance.
* @return the shared default writer instance; never null
*/
static ArrayWriter defaultWriter() {
return JacksonWriter.INSTANCE;
}
/**
* Get the default ArrayWriter instance that outputs nicely-formatted JSON arrays.
* @return the shared default pretty writer instance; never null
*/
static ArrayWriter prettyWriter() {
return JacksonWriter.PRETTY_WRITER;
}
/**
* Write the supplied array to bytes using UTF-8.
* @param array the array to be written; may not be null
* @return the bytes containing the output JSON-formatted array; never null
*/
default byte[] writeAsBytes( Array array ) {
try (ByteArrayOutputStream stream = new ByteArrayOutputStream()) {
write(array, stream);
return stream.toByteArray();
} catch ( IOException e ) {
// This really should never happen ...
e.printStackTrace();
return new byte[]{};
}
}
/**
* Write the supplied array to bytes using UTF-8.
* @param array the array to be written; may not be null
* @param jsonStream the stream to which the array is to be written; may not be null
* @throws IOException if an array could not be written to the supplied stream
*/
void write( Array array, OutputStream jsonStream ) throws IOException;
/**
* Write the supplied array to bytes using UTF-8.
* @param array the array to be written; may not be null
* @param jsonWriter the IO writer to which the array is to be written; may not be null
* @throws IOException if an array could not be written to the supplied stream
*/
void write( Array array, Writer jsonWriter ) throws IOException;
/**
* Write the supplied array to a string using UTF-8.
* @param array the array to be written; may not be null
* @return the string containing the output JSON-formatted array; never null
* @throws IOException if an array could not be written to the supplied stream
*/
String write( Array array ) throws IOException;
}

View File

@ -0,0 +1,192 @@
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.document;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.function.BiFunction;
import io.debezium.annotation.NotThreadSafe;
import io.debezium.util.Iterators;
import io.debezium.util.MathOps;
import io.debezium.util.Sequences;
/**
* Package-level implementation of {@link Array}.
*
* @author Randall Hauch
*/
@NotThreadSafe
final class BasicArray implements Array {
private static final BiFunction<Integer, Value, Entry> CONVERT_PAIR_TO_ENTRY = new BiFunction<Integer, Value, Entry>() {
@Override
public Entry apply(Integer index, Value value) {
return new BasicEntry(index.intValue(), value);
}
};
private final List<Value> values;
BasicArray() {
this.values = new ArrayList<>();
}
BasicArray(List<Value> values) {
assert values != null;
this.values = values;
}
BasicArray(Value[] values) {
if (values == null || values.length == 0) {
this.values = new ArrayList<>();
} else {
this.values = new ArrayList<>(values.length);
for (Value value : values) {
this.values.add(value != null ? value : Value.nullValue());
}
}
}
protected final int indexFrom(CharSequence name) {
return Integer.parseInt(name.toString());
}
protected final boolean isValidIndex(int index) {
return index >= 0 && index < size();
}
@Override
public int size() {
return values.size();
}
@Override
public boolean isEmpty() {
return values.isEmpty();
}
@Override
public int compareTo(Array that) {
if (that == null) return 1;
int size = this.size();
if (size != that.size()) return size - that.size();
Array thatArray = that;
for (int i = 0; i != size; ++i) {
Value thatValue = thatArray.get(i);
Value thisValue = get(i);
int diff = thatValue.compareTo(thisValue);
if (diff != 0) return diff;
}
return 0;
}
@Override
public Iterator<Entry> iterator() {
return Iterators.around(Sequences.infiniteIntegers(0), values, CONVERT_PAIR_TO_ENTRY);
}
@Override
public Value remove(int index) {
if (isValidIndex(index)) {
// The index is in bounds ...
return values.remove(index);
}
return null;
}
@Override
public Array removeAll() {
this.values.clear();
return this;
}
@Override
public boolean has(int index) {
return isValidIndex(index);
}
@Override
public Value get(int index) {
return isValidIndex(index) ? values.get(index) : null;
}
@Override
public Array setValue(int index, Value value) {
if (value == null) value = Value.nullValue();
if (isValidIndex(index)) {
// The index is in bounds ...
values.set(index, value);
} else if (isValidIndex(index - 1)) {
// The index is the next valid one, so go ahead and add it ...
values.add(value);
} else {
// The index is invalid ...
throw new IllegalArgumentException("The index " + index + " is too large for this array, which has only " + size() + " values");
}
return this;
}
@Override
public Array expand(int desiredSize, Value value) {
if (desiredSize <= values.size()) return this;
// Otherwise, we have to expand the array ...
if (value == null) value = Value.nullValue();
for (int i = values.size(); i < desiredSize; ++i) {
values.add(value);
}
return this;
}
@Override
public Array increment(int index, Value increment) {
if ( !increment.isNumber() ) throw new IllegalArgumentException("The increment must be a number but is " + increment);
Value current = get(index);
if ( current.isNumber() ) {
Value updated = Value.create(MathOps.add(current.asNumber(),increment.asNumber()));
setValue(index,Value.create(updated));
}
return this;
}
@Override
public Array add(Value value) {
if (value == null) value = Value.nullValue();
this.values.add(value);
return this;
}
@Override
public Iterable<Value> values() {
return values;
}
@Override
public Array clone() {
return new BasicArray().addAll(this.values);
}
@Override
public int hashCode() {
return values.hashCode();
}
@Override
public boolean equals(Object obj) {
if (obj instanceof BasicArray) {
BasicArray that = (BasicArray) obj;
return values.equals(that.values);
}
return false;
}
@Override
public String toString() {
return values.toString();
}
}

View File

@ -0,0 +1,216 @@
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.document;
import java.io.IOException;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.function.Function;
import io.debezium.annotation.NotThreadSafe;
import io.debezium.util.Iterators;
import io.debezium.util.MathOps;
/**
* Package-level implementation of {@link Document}.
*
* @author Randall Hauch
*/
@NotThreadSafe
final class BasicDocument implements Document {
static final Function<Map.Entry<? extends CharSequence, Value>, Field> CONVERT_ENTRY_TO_FIELD = new Function<Map.Entry<? extends CharSequence, Value>, Field>() {
@Override
public Field apply(Entry<? extends CharSequence, Value> entry) {
return new BasicField(entry.getKey(), entry.getValue());
}
};
private final Map<CharSequence, Value> fields = new LinkedHashMap<>();
BasicDocument() {
}
@Override
public int size() {
return fields.size();
}
@Override
public boolean isEmpty() {
return fields.isEmpty();
}
@Override
public int compareTo(Document that) {
return compareTo(that, true);
}
@Override
public int compareToWithoutFieldOrder(Document that) {
return compareTo(that, false);
}
@Override
public int compareTo(Document that, boolean enforceFieldOrder) {
if (that == null) return 1;
if (this.size() != that.size()) {
return this.size() - that.size();
}
int diff = 0;
if (enforceFieldOrder) {
Iterator<CharSequence> thisIter = this.keySet().iterator(); // ordered
Iterator<CharSequence> thatIter = that.keySet().iterator(); // ordered
while (thisIter.hasNext() && thatIter.hasNext()) {
String thisKey = thisIter.next().toString();
String thatKey = thatIter.next().toString();
diff = thisKey.compareTo(thatKey);
if (diff != 0) return diff;
diff = compare(this.get(thisKey), that.get(thatKey));
if (diff != 0) return diff;
}
if (thisIter.hasNext()) return 1;
if (thatIter.hasNext()) return -1;
} else {
// We don't care about order, so just go through by this Document's fields ...
for (Map.Entry<CharSequence, Value> entry : fields.entrySet()) {
CharSequence key = entry.getKey();
diff = compare(this.get(key), that.get(key));
if (diff != 0) return diff;
}
if (that.size() > this.size()) return 1;
}
return 0;
}
/**
* Semantically compare two values. This includes comparing numeric values of different types (e.g., an integer and long),
* and {@code null} and {@link Value#nullValue()} references.
*
* @param value1 the first value; may be null
* @param value2 the second value; may be null
* @return a negative integer, zero, or a positive integer as this object
* is less than, equal to, or greater than the specified object.
*/
protected int compare(Value value1, Value value2) {
if (value1 == null) return Value.isNull(value2) ? 0 : 1;
return value1.comparable().compareTo(value2.comparable());
}
@Override
public Iterable<CharSequence> keySet() {
return fields.keySet();
}
@Override
public Iterator<Field> iterator() {
return Iterators.around(fields.entrySet(), CONVERT_ENTRY_TO_FIELD);
}
@Override
public void clear() {
fields.clear();
}
@Override
public boolean has(CharSequence fieldName) {
return fields.containsKey(fieldName);
}
@Override
public boolean hasAll(Document that) {
if (that == null) return true;
if (this.size() < that.size()) {
// Can't have all of 'that' if 'that' is bigger ...
return false;
}
return that.stream().allMatch(field -> {
Value thatValue = field.getValue();
Value thisValue = this.get(field.getName());
return Value.compareTo(thisValue, thatValue) == 0;
});
}
@Override
public Value get(CharSequence fieldName, Comparable<?> defaultValue) {
Value value = fields.get(fieldName);
return value != null ? value : Value.create(defaultValue);
}
@Override
public Document putAll(Iterable<Field> object) {
object.forEach(this::setValue);
return this;
}
@Override
public Document removeAll() {
fields.clear();
return this;
}
@Override
public Value remove(CharSequence name) {
if (!fields.containsKey(name)) return null;
Comparable<?> removedValue = fields.remove(name);
return Value.create(removedValue);
}
@Override
public Document setValue(CharSequence name, Value value) {
this.fields.put(name, value != null ? value.clone() : Value.nullValue());
return this;
}
@Override
public Document increment(CharSequence name, Value increment) {
if (!increment.isNumber()) throw new IllegalArgumentException("The increment must be a number but is " + increment);
if (fields.containsKey(name)) {
Number current = getNumber(name);
if (current != null) {
Value updated = Value.create(MathOps.add(current, increment.asNumber()));
setValue(name, Value.create(updated));
}
} else {
setValue(name, increment);
}
return this;
}
@Override
public Document clone() {
return new BasicDocument().putAll(this);
}
@Override
public int hashCode() {
return fields.hashCode();
}
@Override
public boolean equals(Object obj) {
if (obj instanceof BasicDocument) {
BasicDocument that = (BasicDocument) obj;
return fields.equals(that.fields);
}
if (obj instanceof Document) {
Document that = (Document) obj;
return this.hasAll(that) && that.hasAll(this);
}
return false;
}
@Override
public String toString() {
try {
return DocumentWriter.prettyWriter().write(this);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}

View File

@ -0,0 +1,64 @@
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.document;
import java.util.Objects;
import io.debezium.annotation.Immutable;
/**
* Package-level implementation of {@link Array.Entry} in an {@link Array}.
*
* @author Randall Hauch
*/
@Immutable
final class BasicEntry implements Array.Entry, Comparable<Array.Entry> {
private final int index;
private final Value value;
BasicEntry(int index, Value value) {
this.index = index;
this.value = value;
}
@Override
public int getIndex() {
return index;
}
@Override
public Value getValue() {
return value;
}
@Override
public String toString() {
return "@" + index + "=" + value;
}
@Override
public int hashCode() {
return index;
}
@Override
public boolean equals(Object obj) {
if ( obj == this ) return true;
if ( obj instanceof Array.Entry ) {
Array.Entry that = (Array.Entry)obj;
return this.getIndex() == that.getIndex() && Objects.equals(this.getValue(),that.getValue());
}
return false;
}
@Override
public int compareTo(Array.Entry that) {
if ( this == that ) return 0;
if ( this.getIndex() != that.getIndex() ) return this.getIndex() - that.getIndex();
return Value.compareTo(this.getValue(),that.getValue());
}
}

View File

@ -0,0 +1,66 @@
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.document;
import java.util.Objects;
import io.debezium.annotation.Immutable;
import io.debezium.util.Strings;
/**
* Package-level implementation of a {@link Document.Field} inside a {@link Document}.
*
* @author Randall Hauch
*/
@Immutable
final class BasicField implements Document.Field, Comparable<Document.Field> {
private final CharSequence name;
private final Value value;
BasicField(CharSequence name, Value value) {
this.name = name;
this.value = value;
}
@Override
public CharSequence getName() {
return name;
}
@Override
public Value getValue() {
return value;
}
@Override
public String toString() {
return name + "=" + value;
}
@Override
public int hashCode() {
return name.hashCode();
}
@Override
public boolean equals(Object obj) {
if ( obj == this ) return true;
if ( obj instanceof Document.Field ) {
Document.Field that = (Document.Field)obj;
return this.getName().equals(that.getName()) && Objects.equals(this.getValue(),that.getValue());
}
return false;
}
@Override
public int compareTo(Document.Field that) {
if ( this == that ) return 0;
int diff = Strings.compareTo(this.getName(),that.getName());
if ( diff != 0 ) return diff;
return Value.compareTo(this.getValue(),that.getValue());
}
}

View File

@ -0,0 +1,205 @@
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.document;
import java.math.BigDecimal;
import java.math.BigInteger;
import io.debezium.annotation.Immutable;
/**
* A specialization of {@link Value} that represents a binary value.
*
* @author Randall Hauch
*/
@Immutable
final class BinaryValue implements Value {
private final byte[] value;
BinaryValue(byte[] value) {
assert value != null;
this.value = value;
}
@Override
public int hashCode() {
return value.hashCode();
}
@Override
public boolean equals(Object obj) {
if (obj == this) return true;
if (obj instanceof Value) {
Value that = (Value) obj;
if (that.isNull()) return false;
if (that.isBinary()) return this.value.equals(that.asBytes());
if (that.isString()) return this.value.equals(that.asString().getBytes());
return false;
}
return false;
}
@Override
public String toString() {
return new String(value);
}
@Override
public int compareTo(Value that) {
if (that.isNull()) return 1;
if (that.isBinary()) return this.value.length - that.asBytes().length;
return 1;
}
@Override
public Type getType() {
return Type.BINARY;
}
@Override
public Object asObject() {
return value;
}
@Override
public String asString() {
return null;
}
@Override
public Integer asInteger() {
return null;
}
@Override
public Long asLong() {
return null;
}
@Override
public Boolean asBoolean() {
return null;
}
@Override
public Number asNumber() {
return null;
}
@Override
public BigInteger asBigInteger() {
return null;
}
@Override
public BigDecimal asBigDecimal() {
return null;
}
@Override
public Float asFloat() {
return null;
}
@Override
public Double asDouble() {
return null;
}
@Override
public byte[] asBytes() {
return value;
}
@Override
public Document asDocument() {
return null;
}
@Override
public Array asArray() {
return null;
}
@Override
public boolean isNull() {
return false;
}
@Override
public boolean isString() {
return false;
}
@Override
public boolean isBoolean() {
return false;
}
@Override
public boolean isInteger() {
return false;
}
@Override
public boolean isLong() {
return false;
}
@Override
public boolean isFloat() {
return false;
}
@Override
public boolean isDouble() {
return false;
}
@Override
public boolean isNumber() {
return false;
}
@Override
public boolean isBigInteger() {
return false;
}
@Override
public boolean isBigDecimal() {
return false;
}
@Override
public boolean isBinary() {
return true;
}
@Override
public boolean isDocument() {
return false;
}
@Override
public boolean isArray() {
return false;
}
@Override
public Value convert() {
return new ConvertingValue(this);
}
@Override
public Value clone() {
byte[] copy = new byte[this.value.length];
System.arraycopy(this.value, 0, copy, 0, this.value.length);
return new BinaryValue(copy);
}
}

View File

@ -0,0 +1,309 @@
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.document;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.HashMap;
import java.util.Map;
import io.debezium.annotation.Immutable;
/**
* A specialization of {@link Value} that wraps another {@link Value} that is not comparable.
*
* @author Randall Hauch
*/
@Immutable
final class ComparableValue implements Value {
private static final Map<Class<?>, Type> TYPES_BY_CLASS;
static {
Map<Class<?>, Type> types = new HashMap<>();
types.put(String.class, Type.STRING);
types.put(Boolean.class, Type.BOOLEAN);
types.put(byte[].class, Type.BINARY);
types.put(Integer.class, Type.INTEGER);
types.put(Long.class, Type.LONG);
types.put(Float.class, Type.FLOAT);
types.put(Double.class, Type.DOUBLE);
types.put(BigInteger.class, Type.BIG_INTEGER);
types.put(BigDecimal.class, Type.DECIMAL);
types.put(BasicDocument.class, Type.DOCUMENT);
types.put(BasicArray.class, Type.ARRAY);
TYPES_BY_CLASS = types;
}
static Type typeForValue(Value value) {
assert value != null;
if (value.isNull()) return Type.NULL;
// Check by exact class ...
Type type = TYPES_BY_CLASS.get(value.getClass());
if (type != null) return type;
// Otherwise, check using instanceof ...
if (value.isString()) return Type.STRING;
if (value.isBoolean()) return Type.BOOLEAN;
if (value.isBinary()) return Type.BINARY;
if (value.isInteger()) return Type.INTEGER;
if (value.isLong()) return Type.LONG;
if (value.isFloat()) return Type.FLOAT;
if (value.isDouble()) return Type.DOUBLE;
if (value.isBigInteger()) return Type.BIG_INTEGER;
if (value.isBigDecimal()) return Type.DECIMAL;
if (value.isDocument()) return Type.DOCUMENT;
if (value.isArray()) return Type.ARRAY;
if (value.isNull()) return Type.NULL;
assert false;
throw new IllegalStateException();
}
private final Comparable<?> value;
ComparableValue(Comparable<?> value) {
assert value != null;
this.value = value;
}
@Override
public int hashCode() {
return value.hashCode();
}
@Override
public boolean equals(Object obj) {
if (obj == this) return true;
if (obj instanceof Value) {
Value that = (Value) obj;
if (this.isNumber() && that.isNumber()) {
if (this.isLong()) return this.asLong().equals(that.asLong());
if (this.isDouble()) return this.asDouble().equals(that.asDouble());
if (this.isInteger()) return this.asInteger().equals(that.asInteger());
if (this.isFloat()) return this.asFloat().equals(that.asFloat());
if (this.isBigDecimal()) return this.asBigDecimal().equals(that.asBigDecimal());
if (this.isBigInteger()) return this.asBigInteger().equals(that.asBigInteger());
}
return this.value.equals(that.asObject());
}
// Compare the value straight away ...
return this.value.equals(obj);
}
@Override
public String toString() {
return value.toString();
}
@SuppressWarnings("unchecked")
@Override
public int compareTo(Value that) {
if (Value.isNull(that)) return 1;
if ( this.isBoolean() && that.isBoolean() ) {
return this.asBoolean().compareTo(that.asBoolean());
}
if (this.isNumber() && that.isNumber()) {
if (this.isLong()) return this.asLong().compareTo(that.asLong());
if (this.isDouble()) return this.asDouble().compareTo(that.asDouble());
if (this.isInteger()) return this.asInteger().compareTo(that.asInteger());
if (this.isFloat()) return this.asFloat().compareTo(that.asFloat());
if (this.isBigDecimal()) return this.asBigDecimal().compareTo(that.asBigDecimal());
return this.asBigInteger().compareTo(that.asBigInteger());
}
if ( this.isDocument() && that.isDocument()) {
return this.asDocument().compareTo(that.asDocument());
}
if ( this.isArray() && that.isArray() ) {
return this.asArray().compareTo(that.asArray());
}
Comparable<Object> thisValue = (Comparable<Object>)this.asObject();
Comparable<Object> thatValue = (Comparable<Object>)((ComparableValue)that.comparable()).asObject();
if ( thisValue.getClass().isAssignableFrom(thatValue.getClass())) {
return thisValue.compareTo(thatValue);
} else if (thatValue.getClass().isAssignableFrom(thisValue.getClass())) {
return thatValue.compareTo(thisValue) * -1; // correct for the reversed comparison
}
return ((Comparable<Object>) this.value).compareTo(that.asObject());
}
@Override
public Type getType() {
Type type = TYPES_BY_CLASS.get(value.getClass());
if (type == null) {
// Didn't match by exact class, so then figure out the extensible types by instanceof ...
if (isDocument()) return Type.DOCUMENT;
if (isArray()) return Type.ARRAY;
if (isNull()) return Type.NULL;
}
assert type != null;
return type;
}
@Override
public Comparable<?> asObject() {
return value;
}
@Override
public String asString() {
return isString() ? (String) value : null;
}
@Override
public Integer asInteger() {
if (value instanceof Integer) return (Integer) value;
if (value instanceof Long) {
long raw = ((Long) value).longValue();
if (isValidInteger(raw)) return new Integer((int) raw);
}
return null;
}
private static boolean isValidInteger(long value) {
return value >= Integer.MIN_VALUE && value <= Integer.MAX_VALUE;
}
private static boolean isValidFloat(double value) {
return value >= Float.MIN_VALUE && value <= Float.MAX_VALUE;
}
@Override
public Long asLong() {
if (value instanceof Long) return (Long) value;
if (value instanceof Integer) return new Long(((Integer) value).longValue());
return null;
}
@Override
public Boolean asBoolean() {
return isBoolean() ? (Boolean) value : null;
}
@Override
public Number asNumber() {
return isNumber() ? (Number) value : null;
}
@Override
public BigInteger asBigInteger() {
return isBigInteger() ? (BigInteger) value : null;
}
@Override
public BigDecimal asBigDecimal() {
return isBigDecimal() ? (BigDecimal) value : null;
}
@Override
public Float asFloat() {
if (value instanceof Float) return (Float) value;
if (value instanceof Double) {
double raw = ((Double) value).doubleValue();
if (isValidFloat(raw)) return new Float((float) raw);
}
return null;
}
@Override
public Double asDouble() {
if (value instanceof Double) return (Double) value;
if (value instanceof Float) return new Double(((Float) value).doubleValue());
return null;
}
@Override
public Document asDocument() {
return isDocument() ? (Document) value : null;
}
@Override
public Array asArray() {
return isArray() ? (Array) value : null;
}
@Override
public boolean isNull() {
return false;
}
@Override
public boolean isString() {
return value instanceof String;
}
@Override
public boolean isBoolean() {
return value instanceof Boolean;
}
@Override
public boolean isInteger() {
return value instanceof Integer || (value instanceof Long && isValidInteger(((Long) value).longValue()));
}
@Override
public boolean isLong() {
return value instanceof Long || value instanceof Integer; // all integers are longs
}
@Override
public boolean isFloat() {
return value instanceof Float || (value instanceof Double && isValidFloat(((Double) value).doubleValue()));
}
@Override
public boolean isDouble() {
return value instanceof Double || value instanceof Float; // all floats are doubles
}
@Override
public boolean isNumber() {
return value instanceof Number;
}
@Override
public boolean isBigInteger() {
return value instanceof BigInteger || value instanceof Integer || value instanceof Long;
}
@Override
public boolean isBigDecimal() {
return value instanceof BigDecimal || value instanceof Float || value instanceof Double;
}
@Override
public boolean isDocument() {
return value instanceof Document;
}
@Override
public boolean isArray() {
return value instanceof Array;
}
@Override
public boolean isBinary() {
return false;
}
@Override
public byte[] asBytes() {
return null;
}
@Override
public Value convert() {
return new ConvertingValue(this);
}
@Override
public Value clone() {
if (isArray()) return new ComparableValue(asArray().clone());
if (isDocument()) return new ComparableValue(asDocument().clone());
// All other values are immutable ...
return this;
}
}

View File

@ -0,0 +1,280 @@
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.document;
import java.math.BigDecimal;
import java.math.BigInteger;
import io.debezium.annotation.Immutable;
/**
* A specialization of {@link Value} that wraps another {@link Value} to allow conversion of types.
*
* @author Randall Hauch
*/
@Immutable
final class ConvertingValue implements Value {
private final Value value;
ConvertingValue(Value value) {
assert value != null;
this.value = value;
}
@Override
public int hashCode() {
return value.hashCode();
}
@Override
public boolean equals(Object obj) {
return value.equals(obj);
}
@Override
public String toString() {
return value.toString();
}
@Override
public int compareTo(Value that) {
return value.compareTo(that);
}
@Override
public Type getType() {
return value.getType();
}
@Override
public Object asObject() {
return value.asObject();
}
@Override
public String asString() {
return value.isNull() ? null : value.toString();
}
@Override
public Boolean asBoolean() {
if (value.isBoolean()) return value.asBoolean();
if (value.isNumber()) return value.asNumber().intValue() == 0 ? Boolean.FALSE : Boolean.TRUE;
if (value.isString()) {
return Boolean.valueOf(asString());
}
return null;
}
@Override
public Integer asInteger() {
if (value.isInteger()) return value.asInteger();
if (value.isNumber()) return new Integer(asNumber().intValue());
if (value.isString()) {
try {
return Integer.valueOf(asString());
} catch (NumberFormatException e) {
}
}
return null;
}
@Override
public Long asLong() {
if (value.isLong()) return value.asLong();
if (value.isNumber()) return new Long(asNumber().longValue());
if (value.isString()) {
try {
return Long.valueOf(asString());
} catch (NumberFormatException e) {
}
}
return null;
}
@Override
public Float asFloat() {
if (value.isFloat()) return value.asFloat();
if (value.isNumber()) return new Float(asNumber().floatValue());
if (value.isString()) {
try {
return Float.valueOf(asString());
} catch (NumberFormatException e) {
}
}
return null;
}
@Override
public Double asDouble() {
if (value.isDouble()) return value.asDouble();
if (value.isNumber()) return new Double(asNumber().doubleValue());
if (value.isString()) {
try {
return Double.valueOf(asString());
} catch (NumberFormatException e) {
}
}
return null;
}
@Override
public Number asNumber() {
if (value.isNumber()) return value.asNumber();
if (value.isString()) {
String str = value.asString();
try {
return Integer.valueOf(str);
} catch (NumberFormatException e) {
try {
return Long.valueOf(str);
} catch (NumberFormatException e1) {
try {
return Float.valueOf(str);
} catch (NumberFormatException e2) {
try {
return Double.valueOf(str);
} catch (NumberFormatException e3) {
try {
return new BigInteger(str);
} catch (NumberFormatException e4) {
try {
return new BigDecimal(str);
} catch (NumberFormatException e5) {
}
}
}
}
}
}
}
return null;
}
@Override
public BigInteger asBigInteger() {
if (value.isBigInteger()) return value.asBigInteger();
if (value.isBigDecimal()) return value.asBigDecimal().toBigInteger();
if (value instanceof Number) return BigInteger.valueOf(asLong().longValue());
if (value.isString()) {
try {
return new BigInteger(asString());
} catch (NumberFormatException e) {
}
}
return null;
}
@Override
public BigDecimal asBigDecimal() {
if (value.isBigDecimal()) return value.asBigDecimal();
if (value.isBigInteger()) return new BigDecimal(value.asBigInteger());
if (value.isInteger() || value.isLong()) return BigDecimal.valueOf(asLong().longValue());
if (value.isFloat() || value.isDouble()) return BigDecimal.valueOf(asDouble().doubleValue());
if (value.isString()) {
try {
return new BigDecimal(asString());
} catch (NumberFormatException e) {
}
}
return null;
}
@Override
public byte[] asBytes() {
if (value.isBinary()) return value.asBytes();
if (value.isString()) return value.asString().getBytes();
return null;
}
@Override
public Document asDocument() {
return value.isDocument() ? value.asDocument() : null;
}
@Override
public Array asArray() {
return value.isArray() ? value.asArray() : null;
}
@Override
public boolean isNull() {
return value.isNull();
}
@Override
public boolean isString() {
return value.isString();
}
@Override
public boolean isBoolean() {
return value.isBoolean();
}
@Override
public boolean isInteger() {
return value.isInteger();
}
@Override
public boolean isLong() {
return value.isLong();
}
@Override
public boolean isFloat() {
return value.isFloat();
}
@Override
public boolean isDouble() {
return value.isDouble();
}
@Override
public boolean isNumber() {
return value.isNumber();
}
@Override
public boolean isBigInteger() {
return value.isBigInteger();
}
@Override
public boolean isBigDecimal() {
return value.isBigDecimal();
}
@Override
public boolean isDocument() {
return value.isDocument();
}
@Override
public boolean isArray() {
return value.isArray();
}
@Override
public boolean isBinary() {
return value.isBinary();
}
@Override
public Value convert() {
return this;
}
@Override
public Value clone() {
Value clonedValue = value.clone();
if (clonedValue == value) return this;
return new ConvertingValue(clonedValue);
}
}

View File

@ -0,0 +1,1152 @@
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.document;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.Iterator;
import java.util.Map;
import java.util.Optional;
import java.util.function.BiConsumer;
import java.util.function.BiFunction;
import java.util.function.Consumer;
import java.util.function.Predicate;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
import io.debezium.annotation.NotThreadSafe;
/**
* A document contains multiple {@link Field}s, each with a name and possibly-null {@link Value}. A single document can only
* contain a single field with a given name.
*
* @author Randall Hauch
*/
@NotThreadSafe
public interface Document extends Iterable<Document.Field>, Comparable<Document> {
static interface Field extends Comparable<Field> {
/**
* Get the name of the field
*
* @return the field's name; never null
*/
CharSequence getName();
/**
* Get the value of the field.
*
* @return the field's value; may be null
*/
Value getValue();
default boolean isNull() {
Value v = getValue();
return v == null || v.isNull();
}
default boolean isNotNull() {
return !isNull();
}
@Override
default int compareTo(Field that) {
if (that == null) return 1;
int diff = this.getName().toString().compareTo(that.getName().toString());
if (diff != 0) return diff;
return this.getValue().compareTo(that.getValue());
}
}
static Field field(String name, Value value) {
return new BasicField(name, value);
}
static Field field(String name, Object value) {
return new BasicField(name, Value.create(value));
}
static Document create() {
return new BasicDocument();
}
static Document create(CharSequence fieldName, Object value) {
return new BasicDocument().set(fieldName, value);
}
static Document create(CharSequence fieldName1, Object value1, CharSequence fieldName2, Object value2) {
return new BasicDocument().set(fieldName1, value1).set(fieldName2, value2);
}
static Document create(CharSequence fieldName1, Object value1, CharSequence fieldName2, Object value2, CharSequence fieldName3,
Object value3) {
return new BasicDocument().set(fieldName1, value1).set(fieldName2, value2).set(fieldName3, value3);
}
static Document create(CharSequence fieldName1, Object value1, CharSequence fieldName2, Object value2, CharSequence fieldName3,
Object value3, CharSequence fieldName4, Object value4) {
return new BasicDocument().set(fieldName1, value1).set(fieldName2, value2).set(fieldName3, value3).set(fieldName4, value4);
}
/**
* Return the number of name-value fields in this object.
*
* @return the number of name-value fields; never negative
*/
int size();
/**
* Return whether this document contains no fields and is therefore empty.
*
* @return true if there are no fields in this document, or false if there is at least one.
*/
boolean isEmpty();
/**
* Remove all fields from this document.
*/
void clear();
/**
* Determine if this contains a field with the given name.
*
* @param fieldName The name of the field
* @return true if the field exists, or false otherwise
*/
boolean has(CharSequence fieldName);
/**
* Checks if this object contains all of the fields in the supplied document.
*
* @param document The document with the fields that should be in this document
* @return true if this document contains all of the fields in the supplied document, or false otherwise
*/
boolean hasAll(Document document);
/**
* Set the value at the given path resolved against this document, optionally adding any missing intermediary documents
* or arrays based upon the format of the path segments.
*
* @param path the path at which the value is to be set
* @param addIntermediaries true if any missing intermediary fields should be created, or false if any missing
* intermediary fields should be handled as an error via {@code invalid}
* @param value the value that should be set at the given path; may be null or a {@link Value#nullValue() null value}
* @param invalid the function that should be called if the supplied path cannot be resolved; may not be null
* @return the {@code value} if successful or the {@link Optional#empty() empty (not present)} optional value if
* the path was invalid and could not be resolved (and {@code invalid} is invoked)
*/
default Optional<Value> set(Path path, boolean addIntermediaries, Value value, Consumer<Path> invalid) {
if (path == null) return Optional.empty();
if (path.isRoot()) {
// This is an invalid path, since we don't know what to do with the value given just a root path ...
invalid.accept(path);
return Optional.empty();
}
if (path.isSingle()) {
// Perform a simple set ...
set(path.lastSegment().get(), value);
return Optional.ofNullable(value);
}
// Otherwise, we need to find the parent that will contain the value ...
Path parentPath = path.parent().get();
Optional<Value> parent = Optional.empty();
if (!addIntermediaries) {
// Any missing intermediaries is considered invalid ...
parent = find(parentPath, (missingPath, missingIndex) -> {
invalid.accept(missingPath); // invoke the invalid handler
return Optional.empty();
} , invalid);
} else {
// Create any missing intermediaries using the segment after the missing segment to determine which
// type of intermediate value to add ...
parent = find(parentPath, (missingPath, missingIndex) -> {
String nextSegment = path.segment(missingIndex + 1); // can always find next segment 'path' (not 'parentPath')...
if (Path.Segments.isArrayIndex(nextSegment)) {
return Optional.of(Value.create(Array.create()));
} else {
return Optional.of(Value.create(Document.create()));
}
} , invalid);
}
if (!parent.isPresent()) return Optional.empty();
String lastSegment = path.lastSegment().get();
Value parentValue = parent.get();
if (parentValue.isDocument()) {
parentValue.asDocument().set(lastSegment, value);
} else if (parentValue.isArray()) {
Array array = parentValue.asArray();
if (Path.Segments.isAfterLastIndex(lastSegment)) {
array.add(value);
} else {
int index = Path.Segments.asInteger(lastSegment).get();
array.setValue(index, value);
}
} else {
// The parent is not a document or array ...
invalid.accept(path);
return Optional.empty();
}
return Optional.of(value);
}
/**
* Attempt to find the value at the given path.
*
* @param path the path to find
* @return the optional value at this path, which is {@link Optional#isPresent() present} if the value was found at that
* path or is {@link Optional#empty() empty (not present)} if there is no value at the path or if the path was not
* valid
*/
default Optional<Value> find(Path path) {
return find(path, (missingPath, missingIndex) -> Optional.empty(), (invalidPath) -> {
});
}
/**
* Attempt to find the value at the given path, optionally creating missing segments.
*
* @param path the path to find
* @param missingSegment function called when a segment in the path does not exist, and which should return a new value
* if one should be created or {@link Optional#empty()} if nothing should be created and {@code invalid} function
* should be called by this method
* @param invalid function called when the supplied path is invalid; in this case, this method also returns
* {@link Optional#empty()}
* @return the optional value at this path, which is {@link Optional#isPresent() present} if the value was found at that
* path or is {@link Optional#empty() empty (not present)} if there is no value at the path or if the path was not
* valid
*/
default Optional<Value> find(Path path, BiFunction<Path, Integer, Optional<Value>> missingSegment, Consumer<Path> invalid) {
if (path == null) return Optional.empty();
if (path.isRoot()) {
return Optional.of(Value.create(this));
}
Value value = Value.create(this);
int i = 0;
for (String segment : path) {
if (value.isDocument()) {
Value existingValue = value.asDocument().get(segment);
if (Value.isNull(existingValue)) {
// It does not exist ...
Optional<Value> newValue = missingSegment.apply(path, i);
if (newValue.isPresent()) {
// Add the new value (whatever it is) ...
Document doc = value.asDocument();
doc.set(segment, newValue.get());
value = doc.get(segment);
} else {
return Optional.empty();
}
} else {
value = existingValue;
}
} else if (value.isArray()) {
Array array = value.asArray();
if (Path.Segments.isAfterLastIndex(segment)) {
// This means "after the last index", so call it as missing ...
Optional<Value> newValue = missingSegment.apply(path, i);
if (newValue.isPresent()) {
// Add the new value (whatever it is) ...
value = newValue.get();
array.add(value);
} else {
return Optional.empty();
}
} else {
Optional<Integer> index = Path.Segments.asInteger(segment);
if (index.isPresent()) {
// This is an index ...
if (array.has(index.get())) {
value = array.get(index.get());
} else if (array.size() == index.get()) {
// We can add at this index ...
Optional<Value> newValue = missingSegment.apply(path, i);
if (newValue.isPresent()) {
// Add the new value (whatever it is) ...
array.add(newValue.get());
} else {
return Optional.empty();
}
} else {
// The index is not valid (it's too big to be an existing or the next index) ...
invalid.accept(path.subpath(i));
return Optional.empty();
}
} else {
// This is not an array index but we're expecting it to be, so this is a bad path
invalid.accept(path.subpath(i));
return Optional.empty();
}
}
} else {
// We're supposed to find the segment within this value, but it's not a document or array ...
invalid.accept(path.subpath(i));
return Optional.empty();
}
++i;
}
return Optional.of(value);
}
/**
* Find a document at the given path and obtain a stream over its fields. This will return an empty stream when:
* <ul>
* <li>a value does not exist in this document at the supplied path; or</li>
* <li>a non-document value does exist in this document at the supplied path; or</li>
* <li>a document value does exist in this document at the supplied path, but that document is empty</li>
* </ul>
*
* @param path the path to the contained document
* @return the stream of fields in the document at the given path; never null
*/
default Stream<Field> children(Path path) {
Value parent = find(path).orElse(Value.create(Document.create()));
if (!parent.isDocument()) return Stream.empty();
return parent.asDocument().stream();
}
/**
* Find the document at the given field name and obtain a stream over its fields. This will return an empty stream when:
* <ul>
* <li>a field with the given name does not exist in this document; or</li>
* <li>a field with the given name does exist in this document but the value is not a document; or</li>
* <li>a field with the given name does exist in this document and the value is an empty document</li>
* </ul>
*
* @param fieldName the path to the contained document
* @return the stream of fields within the nested document; never null
*/
default Stream<Field> children(String fieldName) {
Document doc = getDocument(fieldName);
if (doc == null) return Stream.empty();
return doc.stream();
}
/**
* Gets the field in this document with the given field name.
*
* @param fieldName The name of the field
* @return The field, if found, or null otherwise
*/
default Field getField(CharSequence fieldName) {
Value value = get(fieldName);
return value != null ? new BasicField(fieldName, value) : null;
}
/**
* Gets the value in this document for the given field name.
*
* @param fieldName The name of the field
* @return The field value, if found, or null otherwise
*/
default Value get(CharSequence fieldName) {
return get(fieldName, null);
}
/**
* Gets the value in this document for the given field name.
*
* @param fieldName The name of the field
* @param defaultValue the default value to return if there is no such field
* @return The field value, if found, or , or <code>defaultValue</code> if there is no such field
*/
Value get(CharSequence fieldName, Comparable<?> defaultValue);
/**
* Get the boolean value in this document for the given field name.
*
* @param fieldName The name of the field
* @return The boolean field value, if found, or null if there is no such field or if the value is not a boolean
*/
default Boolean getBoolean(CharSequence fieldName) {
Value value = get(fieldName);
return value != null && value.isBoolean() ? value.asBoolean() : null;
}
/**
* Get the boolean value in this document for the given field name.
*
* @param fieldName The name of the field
* @param defaultValue the default value to return if there is no such field or if the value is not a boolean
* @return The boolean field value if found, or <code>defaultValue</code> if there is no such field or if the value is not a
* boolean
*/
default boolean getBoolean(CharSequence fieldName,
boolean defaultValue) {
Value value = get(fieldName);
return value != null && value.isBoolean() ? value.asBoolean().booleanValue() : defaultValue;
}
/**
* Get the integer value in this document for the given field name.
*
* @param fieldName The name of the field
* @return The integer field value, if found, or null if there is no such field or if the value is not an integer
*/
default Integer getInteger(CharSequence fieldName) {
Value value = get(fieldName);
return value != null && value.isInteger() ? value.asInteger() : null;
}
/**
* Get the integer value in this document for the given field name.
*
* @param fieldName The name of the field
* @param defaultValue the default value to return if there is no such field or if the value is not a integer
* @return The integer field value if found, or <code>defaultValue</code> if there is no such field or if the value is not a
* integer
*/
default int getInteger(CharSequence fieldName,
int defaultValue) {
Value value = get(fieldName);
return value != null && value.isInteger() ? value.asInteger().intValue() : defaultValue;
}
/**
* Get the integer value in this document for the given field name.
*
* @param fieldName The name of the field
* @return The long field value, if found, or null if there is no such field or if the value is not a long value
*/
default Long getLong(CharSequence fieldName) {
Value value = get(fieldName);
return value != null && value.isLong() ? value.asLong() : null;
}
/**
* Get the long value in this document for the given field name.
*
* @param fieldName The name of the field
* @param defaultValue the default value to return if there is no such field or if the value is not a long value
* @return The long field value if found, or <code>defaultValue</code> if there is no such field or if the value is not a long
* value
*/
default long getLong(CharSequence fieldName,
long defaultValue) {
Value value = get(fieldName);
return value != null && value.isLong() ? value.asLong().longValue() : defaultValue;
}
/**
* Get the double value in this document for the given field name.
*
* @param fieldName The name of the field
* @return The double field value, if found, or null if there is no such field or if the value is not a double
*/
default Double getDouble(CharSequence fieldName) {
Value value = get(fieldName);
return value != null && value.isDouble() ? value.asDouble() : null;
}
/**
* Get the double value in this document for the given field name.
*
* @param fieldName The name of the field
* @param defaultValue the default value to return if there is no such field or if the value is not a double
* @return The double field value if found, or <code>defaultValue</code> if there is no such field or if the value is not a
* double
*/
default double getDouble(CharSequence fieldName,
double defaultValue) {
Value value = get(fieldName);
return value != null && value.isDouble() ? value.asDouble().doubleValue() : defaultValue;
}
/**
* Get the double value in this document for the given field name.
*
* @param fieldName The name of the field
* @return The double field value, if found, or null if there is no such field or if the value is not a double
*/
default Float getFloat(CharSequence fieldName) {
Value value = get(fieldName);
return value != null && value.isFloat() ? value.asFloat() : null;
}
/**
* Get the float value in this document for the given field name.
*
* @param fieldName The name of the field
* @param defaultValue the default value to return if there is no such field or if the value is not a double
* @return The double field value if found, or <code>defaultValue</code> if there is no such field or if the value is not a
* double
*/
default float getFloat(CharSequence fieldName,
float defaultValue) {
Value value = get(fieldName);
return value != null && value.isFloat() ? value.asFloat().floatValue() : defaultValue;
}
/**
* Get the number value in this document for the given field name.
*
* @param fieldName The name of the field
* @return The number field value, if found, or null if there is no such field or if the value is not a number
*/
default Number getNumber(CharSequence fieldName) {
return getNumber(fieldName, null);
}
/**
* Get the number value in this document for the given field name.
*
* @param fieldName The name of the field
* @param defaultValue the default value to return if there is no such field or if the value is not a number
* @return The number field value if found, or <code>defaultValue</code> if there is no such field or if the value is not a
* number
*/
default Number getNumber(CharSequence fieldName,
Number defaultValue) {
Value value = get(fieldName);
return value != null && value.isNumber() ? value.asNumber() : defaultValue;
}
/**
* Get the big integer value in this document for the given field name.
*
* @param fieldName The name of the field
* @return The big integer field value, if found, or null if there is no such field or if the value is not a big integer
*/
default BigInteger getBigInteger(CharSequence fieldName) {
return getBigInteger(fieldName, null);
}
/**
* Get the big integer value in this document for the given field name.
*
* @param fieldName The name of the field
* @param defaultValue the default value to return if there is no such field or if the value is not a big integer
* @return The big integer field value, if found, or null if there is no such field or if the value is not a big integer
*/
default BigInteger getBigInteger(CharSequence fieldName, BigInteger defaultValue) {
Value value = get(fieldName);
return value != null && value.isBigInteger() ? value.asBigInteger() : defaultValue;
}
/**
* Get the big decimal value in this document for the given field name.
*
* @param fieldName The name of the field
* @return The big decimal field value, if found, or null if there is no such field or if the value is not a big decimal
*/
default BigDecimal getBigDecimal(CharSequence fieldName) {
return getBigDecimal(fieldName, null);
}
/**
* Get the big decimal value in this document for the given field name.
*
* @param fieldName The name of the field
* @param defaultValue the default value to return if there is no such field or if the value is not a big decimal
* @return The big decimal field value, if found, or null if there is no such field or if the value is not a big decimal
*/
default BigDecimal getBigDecimal(CharSequence fieldName, BigDecimal defaultValue) {
Value value = get(fieldName);
return value != null && value.isBigDecimal() ? value.asBigDecimal() : defaultValue;
}
/**
* Get the string value in this document for the given field name.
*
* @param fieldName The name of the field
* @return The string field value, if found, or null if there is no such field or if the value is not a string
*/
default String getString(CharSequence fieldName) {
return getString(fieldName, null);
}
/**
* Get the string value in this document for the given field name.
*
* @param fieldName The name of the field
* @param defaultValue the default value to return if there is no such field or if the value is not a string
* @return The string field value if found, or <code>defaultValue</code> if there is no such field or if the value is not a
* string
*/
default String getString(CharSequence fieldName,
String defaultValue) {
Value value = get(fieldName);
return value != null && value.isString() ? value.asString() : defaultValue;
}
/**
* Get the Base64 encoded binary value in this document for the given field name.
*
* @param fieldName The name of the field
* @return The binary field value, if found, or null if there is no such field or if the value is not a binary value
*/
default byte[] getBytes(CharSequence fieldName) {
Value value = get(fieldName);
return value != null && value.isBinary() ? value.asBytes() : null;
}
/**
* Get the array value in this document for the given field name.
*
* @param fieldName The name of the field
* @return The array field value (as a list), if found, or null if there is no such field or if the value is not an array
*/
default Array getArray(CharSequence fieldName) {
return getArray(fieldName, null);
}
/**
* Get the array value in this document for the given field name.
*
* @param fieldName The name of the field
* @param defaultValue the default array that should be returned if there is no such field
* @return The array field value (as a list), if found, or the default value if there is no such field or if the value is not
* an array
*/
default Array getArray(CharSequence fieldName, Array defaultValue) {
Value value = get(fieldName);
return value != null && value.isArray() ? value.asArray() : defaultValue;
}
/**
* Get the existing array value in this document for the given field name, or create a new array if there is no existing array
* at this field.
*
* @param fieldName The name of the field
* @return The editable array field value; never null
*/
default Array getOrCreateArray(CharSequence fieldName) {
Value value = get(fieldName);
if (value == null || value.isNull()) {
return setArray(fieldName, (Array) null);
}
return value.asArray();
}
/**
* Get the document value in this document for the given field name.
*
* @param fieldName The name of the field
* @return The document field value, if found, or null if there is no such field or if the value is not a document
*/
default Document getDocument(CharSequence fieldName) {
Value value = get(fieldName);
return value != null && value.isDocument() ? value.asDocument() : null;
}
/**
* Get the existing document value in this document for the given field name, or create a new document if there is no existing
* document at this field.
*
* @param fieldName The name of the field
* @return The editable document field value; null if the field exists but is not a document
*/
default Document getOrCreateDocument(CharSequence fieldName) {
Value value = get(fieldName);
if (value == null || value.isNull()) {
return setDocument(fieldName, (Document) null);
}
return value.asDocument();
}
/**
* Determine whether this object has a field with the given the name and the value is null. This is equivalent to calling:
*
* <pre>
* this.get(name) instanceof Null;
* </pre>
*
* @param fieldName The name of the field
* @return <code>true</code> if the field exists but is null, or false otherwise
* @see #isNullOrMissing(CharSequence)
*/
default boolean isNull(CharSequence fieldName) {
Value value = get(fieldName);
return value != null && value.isNull();
}
/**
* Determine whether this object has a field with the given the name and the value is null, or if this object has no field
* with
* the given name. This is equivalent to calling:
*
* <pre>
* Null.matches(this.get(name));
* </pre>
*
* @param fieldName The name of the field
* @return <code>true</code> if the field value for the name is null or if there is no such field.
* @see #isNull(CharSequence)
*/
default boolean isNullOrMissing(CharSequence fieldName) {
Value value = get(fieldName);
return value == null || value.isNull();
}
/**
* Returns this object's fields' names
*
* @return The names of the fields in this object
*/
Iterable<CharSequence> keySet();
/**
* Obtain a clone of this document.
*
* @return the clone of this document; never null
*/
Document clone();
/**
* Remove the field with the supplied name, and return the value.
*
* @param name The name of the field
* @return the value that was removed, or null if there was no such value
*/
Value remove(CharSequence name);
/**
* If the supplied name is provided, then remove the field with the supplied name and return the value.
*
* @param name The optional name of the field
* @return the value that was removed, or null if the field was not present or there was no such value
*/
default Value remove(Optional<? extends CharSequence> name) {
return name.isPresent() ? remove(name.get()) : null;
}
/**
* Remove all fields from this document.
*
* @return This document, to allow for chaining methods
*/
Document removeAll();
/**
* Sets on this object all name/value pairs from the supplied object. If the supplied object is null, this method does
* nothing.
*
* @param fields the name/value pairs to be set on this object; may not be null
* @return This document, to allow for chaining methods
*/
default Document putAll(Iterator<Field> fields) {
while (fields.hasNext()) {
Field field = fields.next();
setValue(field.getName(), field.getValue());
}
return this;
}
/**
* Sets on this object all name/value pairs from the supplied object. If the supplied object is null, this method does
* nothing.
*
* @param fields the name/value pairs to be set on this object; may not be null
* @return This document, to allow for chaining methods
*/
default Document putAll(Iterable<Field> fields) {
for (Field field : fields) {
setValue(field.getName(), field.getValue());
}
return this;
}
/**
* Attempts to copy all of the acceptable fields from the source and set on this document, overwriting any existing
* values.
*
* @param fields the name/value pairs to be set on this object; may not be null
* @param acceptableFieldNames the predicate to determine which fields from the source should be copied; may not be null
* @return This document, to allow for chaining methods
*/
default Document putAll(Iterable<Field> fields, Predicate<CharSequence> acceptableFieldNames) {
for (Field field : fields) {
if (acceptableFieldNames.test(field.getName())) {
setValue(field.getName(), field.getValue());
}
}
return this;
}
/**
* Sets on this object all key/value pairs from the supplied map. If the supplied map is null, this method does nothing.
*
* @param fields the map containing the name/value pairs to be set on this object
* @return This document, to allow for chaining methods
*/
default Document putAll(Map<? extends CharSequence, Object> fields) {
if (fields != null) {
for (Map.Entry<? extends CharSequence, Object> entry : fields.entrySet()) {
set(entry.getKey(), entry.getValue());
}
}
return this;
}
/**
* Returns a sequential {@code Stream} with this array as its source.
*
* @return a sequential {@code Stream} over the elements in this collection
*/
default Stream<Field> stream() {
return StreamSupport.stream(spliterator(), false);
}
default void forEach(BiConsumer<Path, Value> consumer) {
Path root = Path.root();
stream().forEach((field) -> {
Path path = root.append(field.getName().toString());
Value value = field.getValue();
if (value.isDocument()) {
value.asDocument().forEach((p, v) -> {
consumer.accept(path.append(p), v);
});
} else if (value.isArray()) {
value.asArray().forEach((entry) -> {
consumer.accept(path.append(Integer.toString(entry.getIndex())), entry.getValue());
});
} else {
consumer.accept(path, value);
}
});
}
/**
* Transform all of the field values using the supplied {@link BiFunction transformer function}.
*
* @param transformer the transformer that should be used to transform each field value; may not be null
* @return this document with transformed fields, or this document if the transformer changed none of the values
*/
default Document transform(BiFunction<CharSequence, Value, Value> transformer) {
for (Field field : this) {
Value existing = get(field.getName());
Value updated = transformer.apply(field.getName(), existing);
if (updated == null) updated = Value.nullValue();
if (updated != existing) {
setValue(field.getName(), updated);
}
}
return this;
}
/**
* Set the value for the field with the given name to be a binary value. The value will be encoded as Base64.
*
* @param name The name of the field
* @param value the new value
* @return This document, to allow for chaining methods
*/
default Document set(CharSequence name, Object value) {
if (value instanceof Value) {
setValue(name, (Value) value);
return this;
}
Value wrapped = Value.create(value);
setValue(name, wrapped);
return this;
}
/**
* Set the value for the field with the given name to be a null value. The {@link #isNull(CharSequence)} methods can be used
* to
* determine if a field has been set to null, or {@link #isNullOrMissing(CharSequence)} if the field has not be set or if it
* has
* been set to null.
*
* @param name The name of the field
* @return This document, to allow for chaining methods
* @see #isNull(CharSequence)
* @see #isNullOrMissing(CharSequence)
*/
default Document setNull(CharSequence name) {
setValue(name, Value.nullValue());
return this;
}
/**
* Set the value for the field with the given name to the supplied boolean value.
*
* @param name The name of the field
* @param value the new value for the field
* @return This document, to allow for chaining methods
*/
default Document setBoolean(CharSequence name,
boolean value) {
setValue(name, Value.create(value));
return this;
}
/**
* Set the value for the field with the given name to the supplied integer value.
*
* @param name The name of the field
* @param value the new value for the field
* @return This document, to allow for chaining methods
*/
default Document setNumber(CharSequence name,
int value) {
setValue(name, Value.create(value));
return this;
}
/**
* Set the value for the field with the given name to the supplied long value.
*
* @param name The name of the field
* @param value the new value for the field
* @return This document, to allow for chaining methods
*/
default Document setNumber(CharSequence name,
long value) {
setValue(name, Value.create(value));
return this;
}
/**
* Set the value for the field with the given name to the supplied float value.
*
* @param name The name of the field
* @param value the new value for the field
* @return This document, to allow for chaining methods
*/
default Document setNumber(CharSequence name,
float value) {
setValue(name, Value.create(value));
return this;
}
/**
* Set the value for the field with the given name to the supplied double value.
*
* @param name The name of the field
* @param value the new value for the field
* @return This document, to allow for chaining methods
*/
default Document setNumber(CharSequence name,
double value) {
setValue(name, Value.create(value));
return this;
}
/**
* Set the value for the field with the given name to the supplied big integer value.
*
* @param name The name of the field
* @param value the new value for the field
* @return This document, to allow for chaining methods
*/
default Document setNumber(CharSequence name,
BigInteger value) {
setValue(name, Value.create(value));
return this;
}
/**
* Set the value for the field with the given name to the supplied big integer value.
*
* @param name The name of the field
* @param value the new value for the field
* @return This document, to allow for chaining methods
*/
default Document setNumber(CharSequence name,
BigDecimal value) {
setValue(name, Value.create(value));
return this;
}
/**
* Set the value for the field with the given name to the supplied string value.
*
* @param name The name of the field
* @param value the new value for the field
* @return This document, to allow for chaining methods
*/
default Document setString(CharSequence name,
String value) {
setValue(name, Value.create(value));
return this;
}
/**
* Increment the numeric value in the given field by the designated amount.
*
* @param name The name of the field
* @param increment the amount to increment the existing value; may be negative to decrement
* @return this array to allow for chaining methods
* @throws IllegalArgumentException if the current value is not a number
*/
default Document increment(CharSequence name, int increment) {
return increment(name, Value.create(increment));
}
/**
* Increment the numeric value in the given field by the designated amount.
*
* @param name The name of the field
* @param increment the amount to increment the existing value; may be negative to decrement
* @return this array to allow for chaining methods
* @throws IllegalArgumentException if the current value is not a number
*/
default Document increment(CharSequence name, long increment) {
return increment(name, Value.create(increment));
}
/**
* Increment the numeric value in the given field by the designated amount.
*
* @param name The name of the field
* @param increment the amount to increment the existing value; may be negative to decrement
* @return this array to allow for chaining methods
* @throws IllegalArgumentException if the current value is not a number
*/
default Document increment(CharSequence name, double increment) {
return increment(name, Value.create(increment));
}
/**
* Increment the numeric value in the given field by the designated amount.
*
* @param name The name of the field
* @param increment the amount to increment the existing value; may be negative to decrement
* @return this array to allow for chaining methods
* @throws IllegalArgumentException if the current value is not a number
*/
default Document increment(CharSequence name, float increment) {
return increment(name, Value.create(increment));
}
/**
* Increment the numeric value in the given field by the designated amount.
*
* @param name The name of the field
* @param increment the amount to increment the existing value; may be negative to decrement
* @return this array to allow for chaining methods
* @throws IllegalArgumentException if the current value is not a number
*/
Document increment(CharSequence name, Value increment);
/**
* Set the value for the field with the given name to be a binary value. The value will be encoded as Base64.
*
* @param name The name of the field
* @param data the bytes for the binary value
* @return This document, to allow for chaining methods
*/
default Document setBinary(CharSequence name,
byte[] data) {
setValue(name, Value.create(data));
return this;
}
/**
* Set the value for the field with the given name.
*
* @param name The name of the field
* @param value the new value
* @return This document, to allow for chaining methods
*/
Document setValue(CharSequence name,
Value value);
/**
* Set the field on this document.
*
* @param field The field
* @return This document, to allow for chaining methods
*/
default Document setValue(Field field) {
return setValue(field.getName(), field.getValue());
}
/**
* Set the value for the field with the given name to be a new, empty Document.
*
* @param name The name of the field
* @return The editable document that was just created; never null
*/
default Document setDocument(CharSequence name) {
return setDocument(name, Document.create());
}
/**
* Set the value for the field with the given name to be the supplied Document.
*
* @param name The name of the field
* @param document the document; if null, a new document will be created
* @return The document that was just set as the value for the named field; never null and may or may not be the same
* instance as the supplied <code>document</code>.
*/
default Document setDocument(CharSequence name,
Document document) {
if (document == null) document = Document.create();
setValue(name, Value.create(document));
return getDocument(name);
}
/**
* Set the value for the field with the given name to be a new, empty array.
*
* @param name The name of the field
* @return The array that was just created; never null
*/
default Array setArray(CharSequence name) {
return setArray(name, Array.create());
}
/**
* Set the value for the field with the given name to be the supplied array.
*
* @param name The name of the field
* @param array the array
* @return The array that was just set as the value for the named field; never null and may or may not be the same
* instance as the supplied <code>array</code>.
*/
default Array setArray(CharSequence name,
Array array) {
if (array == null) array = Array.create();
setValue(name, Value.create(array));
return getArray(name);
}
/**
* Set the value for the field with the given name to be the supplied array.
*
* @param name The name of the field
* @param values the (valid) values for the array
* @return The array that was just set as the value for the named field; never null and may or may not be the same
* instance as the supplied <code>array</code>.
*/
default Array setArray(CharSequence name,
Object... values) {
return setArray(name, Value.create(Array.create(values)));
}
/**
* Compare this Document to the specified Document, taking into account the order of the fields.
*
* @param that the other Document to be compared to this object
* @return a negative integer, zero, or a positive integer as this object
* is less than, equal to, or greater than the specified object.
*/
@Override
int compareTo(Document that);
/**
* Compare this Document to the specified Document, without regard to the order of the fields.
*
* @param that the other Document to be compared to this object
* @return a negative integer, zero, or a positive integer as this object
* is less than, equal to, or greater than the specified object.
*/
int compareToWithoutFieldOrder(Document that);
/**
* Compare this Document to the specified Document, optionally comparing the fields in the same order.
*
* @param that the other Document to be compared to this object
* @param enforceFieldOrder {@code true} if the documents should be compared using their existing field order, or
* {@code false} if the field order should not affect the result.
* @return a negative integer, zero, or a positive integer as this object
* is less than, equal to, or greater than the specified object.
*/
int compareTo(Document that, boolean enforceFieldOrder);
}

View File

@ -0,0 +1,98 @@
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.document;
import java.io.BufferedInputStream;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.Reader;
import java.net.URL;
import io.debezium.annotation.ThreadSafe;
/**
* Reads {@link Document} instances from a variety of input forms.
*
* @author Randall Hauch
*/
@ThreadSafe
public interface DocumentReader {
/**
* Get the default {@link DocumentReader} instance.
*
* @return the shared default reader instance; never null
*/
static DocumentReader defaultReader() {
return JacksonReader.INSTANCE;
}
/**
* Read a document from the supplied stream.
*
* @param jsonStream the input stream to be read; may not be null
* @return the document instance; never null
* @throws IOException if a document could not be read from the supplied stream
*/
Document read(InputStream jsonStream) throws IOException;
/**
* Read a document from the supplied {@link Reader}.
*
* @param jsonReader the reader to be read; may not be null
* @return the document instance; never null
* @throws IOException if a document could not be read from the supplied reader
*/
Document read(Reader jsonReader) throws IOException;
/**
* Read a document from the supplied JSON-formatted string.
*
* @param json the JSON string representation to be read; may not be null
* @return the document instance; never null
* @throws IOException if a document could not be read from the supplied string
*/
Document read(String json) throws IOException;
/**
* Read a document from the content at the given URL.
*
* @param jsonUrl the URL to the content that is to be read; may not be null
* @return the document instance; never null
* @throws IOException if a document could not be read from the supplied content
*/
default Document read(URL jsonUrl) throws IOException {
return read(jsonUrl.openStream());
}
/**
* Read a document from the supplied file.
*
* @param jsonFile the file to be read; may not be null
* @return the document instance; never null
* @throws IOException if a document could not be read from the supplied file
*/
default Document read(File jsonFile) throws IOException {
return read( new BufferedInputStream(new FileInputStream(jsonFile)) );
}
/**
* Read a document from the supplied bytes.
*
* @param rawBytes the UTF-8 bytes to be read; may not be null
* @return the document instance; never null
* @throws IOException if a document could not be read from the supplied bytes
*/
default Document read(byte[] rawBytes) throws IOException {
try (ByteArrayInputStream stream = new ByteArrayInputStream(rawBytes)) {
return DocumentReader.defaultReader().read(stream);
}
}
}

View File

@ -0,0 +1,56 @@
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.document;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.Map;
import org.apache.kafka.common.serialization.Deserializer;
import org.apache.kafka.common.serialization.Serializer;
import io.debezium.annotation.Immutable;
/**
* A Kafka {@link Deserializer} and {@link Serializer} that operates upon Debezium {@link Document}s.
*
* @author Randall Hauch
*/
@Immutable
public class DocumentSerdes implements Serializer<Document>, Deserializer<Document> {
public static DocumentSerdes INSTANCE = new DocumentSerdes();
private static final DocumentReader DOCUMENT_READER = DocumentReader.defaultReader();
private static final DocumentWriter DOCUMENT_WRITER = DocumentWriter.defaultWriter();
@Override
public void configure(Map<String, ?> arg0, boolean arg1) {
}
@Override
public byte[] serialize(String topic, Document data) {
return DOCUMENT_WRITER.writeAsBytes(data);
}
@Override
public Document deserialize(String topic, byte[] data) {
try {
return DOCUMENT_READER.read(bytesToString(data));
} catch (IOException e) {
// Should never see this, but shit if we do ...
throw new RuntimeException(e);
}
}
@Override
public void close() {
}
private String bytesToString(byte[] bytes) {
return new String(bytes, StandardCharsets.UTF_8);
}
}

View File

@ -0,0 +1,81 @@
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.document;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.io.Writer;
import io.debezium.annotation.ThreadSafe;
/**
* Writes {@link Document} instances to a variety of output forms.
*
* @author Randall Hauch
*/
@ThreadSafe
public interface DocumentWriter {
/**
* Get the default DocumentWriter instance.
*
* @return the shared default writer instance; never null
*/
static DocumentWriter defaultWriter() {
return JacksonWriter.INSTANCE;
}
/**
* Get the default DocumentWriter instance that outputs nicely-formatted JSON arrays.
*
* @return the shared default pretty writer instance; never null
*/
static DocumentWriter prettyWriter() {
return JacksonWriter.PRETTY_WRITER;
}
/**
* Write the supplied array to bytes using UTF-8.
* @param document the document to be written; may not be null
* @return the bytes containing the output JSON-formatted document; never null
*/
default byte[] writeAsBytes(Document document) {
try (ByteArrayOutputStream stream = new ByteArrayOutputStream()) {
write(document, stream);
return stream.toByteArray();
} catch (IOException e) {
// This really should never happen ...
e.printStackTrace();
return new byte[] {};
}
}
/**
* Write the supplied document to bytes using UTF-8.
* @param document the array to be written; may not be null
* @param jsonStream the stream to which the document is to be written; may not be null
* @throws IOException if a document could not be written to the supplied stream
*/
void write(Document document, OutputStream jsonStream) throws IOException;
/**
* Write the supplied document to bytes using UTF-8.
* @param document the array to be written; may not be null
* @param jsonWriter the writer to which the document is to be written; may not be null
* @throws IOException if a document could not be written to the supplied stream
*/
void write(Document document, Writer jsonWriter) throws IOException;
/**
* Write the supplied document to a string using UTF-8.
* @param document the document to be written; may not be null
* @return the string containing the output JSON-formatted document; never null
* @throws IOException if a document could not be written to the supplied stream
*/
String write(Document document) throws IOException;
}

View File

@ -0,0 +1,249 @@
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.document;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.Reader;
import java.net.URL;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonToken;
import io.debezium.annotation.ThreadSafe;
/**
* A {@link DocumentReader} and {@link ArrayReader} that uses the Jackson library to read JSON.
*
* @author Randall Hauch
*/
@ThreadSafe
final class JacksonReader implements DocumentReader, ArrayReader {
public static final JacksonReader INSTANCE = new JacksonReader();
private static final JsonFactory factory;
static {
factory = new JsonFactory();
factory.enable(JsonParser.Feature.ALLOW_COMMENTS);
}
private JacksonReader() {
}
@Override
public Document read(InputStream jsonStream) throws IOException {
return parse(factory.createParser(jsonStream));
}
@Override
public Document read(Reader jsonReader) throws IOException {
return parse(factory.createParser(jsonReader));
}
@Override
public Document read(String json) throws IOException {
return parse(factory.createParser(json));
}
@Override
public Document read(File jsonFile) throws IOException {
return parse(factory.createParser(jsonFile));
}
@Override
public Document read(URL jsonUrl) throws IOException {
return parse(factory.createParser(jsonUrl));
}
@Override
public Document read(byte[] rawBytes) throws IOException {
return parse(factory.createParser(rawBytes));
}
@Override
public Array readArray(InputStream jsonStream) throws IOException {
return parseArray(factory.createParser(jsonStream), false);
}
@Override
public Array readArray(Reader jsonReader) throws IOException {
return parseArray(factory.createParser(jsonReader), false);
}
@Override
public Array readArray(URL jsonUrl) throws IOException {
return parseArray(factory.createParser(jsonUrl), false);
}
@Override
public Array readArray(File jsonFile) throws IOException {
return parseArray(factory.createParser(jsonFile), false);
}
@Override
public Array readArray(String jsonArray) throws IOException {
return parseArray(factory.createParser(jsonArray), false);
}
private Document parse(JsonParser parser) throws IOException {
try {
return parseDocument(parser, false);
} finally {
parser.close();
}
}
private Document parseDocument(JsonParser parser, boolean nested) throws IOException {
// Iterate over the fields in the top-level document ...
BasicDocument doc = new BasicDocument();
JsonToken token = null;
if (!nested) {
// We expect the START_OBJECT token ...
token = parser.nextToken();
if (!nested && token != JsonToken.START_OBJECT) {
throw new IOException("Expected data to start with an Object, but was " + token);
}
}
String fieldName = null;
token = parser.nextToken();
while (token != JsonToken.END_OBJECT) {
switch (token) {
case FIELD_NAME:
fieldName = parser.getCurrentName();
break;
case START_OBJECT:
doc.setDocument(fieldName, parseDocument(parser, true));
break;
case START_ARRAY:
doc.setArray(fieldName, parseArray(parser, true));
break;
case VALUE_STRING:
doc.setString(fieldName, parser.getValueAsString());
break;
case VALUE_TRUE:
doc.setBoolean(fieldName, true);
break;
case VALUE_FALSE:
doc.setBoolean(fieldName, false);
break;
case VALUE_NULL:
doc.setNull(fieldName);
break;
case VALUE_NUMBER_FLOAT:
case VALUE_NUMBER_INT:
switch (parser.getNumberType()) {
case FLOAT:
doc.setNumber(fieldName, parser.getFloatValue());
break;
case DOUBLE:
doc.setNumber(fieldName, parser.getDoubleValue());
break;
case BIG_DECIMAL:
doc.setNumber(fieldName, parser.getDecimalValue());
break;
case INT:
doc.setNumber(fieldName, parser.getIntValue());
break;
case LONG:
doc.setNumber(fieldName, parser.getLongValue());
break;
case BIG_INTEGER:
doc.setNumber(fieldName, parser.getBigIntegerValue());
break;
}
break;
case VALUE_EMBEDDED_OBJECT:
// disregard this, since it's an extension ...
break;
case NOT_AVAILABLE:
throw new JsonParseException("Non-blocking parsers are not supported", parser.getCurrentLocation());
case END_ARRAY:
throw new JsonParseException("Not expecting an END_ARRAY token", parser.getCurrentLocation());
case END_OBJECT:
throw new JsonParseException("Not expecting an END_OBJECT token", parser.getCurrentLocation());
}
token = parser.nextToken();
}
return doc;
}
private Array parseArray(JsonParser parser, boolean nested) throws IOException {
// Iterate over the values in the array ...
BasicArray array = new BasicArray();
JsonToken token = null;
if (!nested) {
// We expect the START_ARRAY token ...
token = parser.nextToken();
if (!nested && token != JsonToken.START_ARRAY) {
throw new IOException("Expected data to start with an Array, but was " + token);
}
}
token = parser.nextToken();
while (token != JsonToken.END_ARRAY) {
switch (token) {
case START_OBJECT:
array.add(parseDocument(parser, true));
break;
case START_ARRAY:
array.add(parseArray(parser, true));
break;
case VALUE_STRING:
array.add(parser.getValueAsString());
break;
case VALUE_TRUE:
array.add(true);
break;
case VALUE_FALSE:
array.add(false);
break;
case VALUE_NULL:
array.addNull();
break;
case VALUE_NUMBER_FLOAT:
case VALUE_NUMBER_INT:
switch (parser.getNumberType()) {
case FLOAT:
array.add(parser.getFloatValue());
break;
case DOUBLE:
array.add(parser.getDoubleValue());
break;
case BIG_DECIMAL:
array.add(parser.getDecimalValue());
break;
case INT:
array.add(parser.getIntValue());
break;
case LONG:
array.add(parser.getLongValue());
break;
case BIG_INTEGER:
array.add(parser.getBigIntegerValue());
break;
}
break;
case VALUE_EMBEDDED_OBJECT:
// disregard this, since it's an extension ...
break;
case NOT_AVAILABLE:
throw new JsonParseException("Non-blocking parsers are not supported", parser.getCurrentLocation());
case FIELD_NAME:
throw new JsonParseException("Not expecting a FIELD_NAME token", parser.getCurrentLocation());
case END_ARRAY:
throw new JsonParseException("Not expecting an END_ARRAY token", parser.getCurrentLocation());
case END_OBJECT:
throw new JsonParseException("Not expecting an END_OBJECT token", parser.getCurrentLocation());
}
token = parser.nextToken();
}
return array;
}
}

View File

@ -0,0 +1,200 @@
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.document;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.io.StringWriter;
import java.io.Writer;
import com.fasterxml.jackson.core.JsonEncoding;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.util.DefaultPrettyPrinter;
import io.debezium.annotation.ThreadSafe;
/**
* A {@link DocumentWriter} and {@link ArrayWriter} that uses the Jackson library to write JSON.
*
* @author Randall Hauch
*/
@ThreadSafe
final class JacksonWriter implements DocumentWriter, ArrayWriter {
public static final JacksonWriter INSTANCE = new JacksonWriter(false);
public static final JacksonWriter PRETTY_WRITER = new JacksonWriter(true);
private static final JsonFactory factory;
static {
factory = new JsonFactory();
}
private final boolean pretty;
private JacksonWriter(boolean pretty) {
this.pretty = pretty;
}
@Override
public void write(Document document, OutputStream jsonStream) throws IOException {
try (JsonGenerator jsonGenerator = factory.createGenerator(jsonStream)) {
configure(jsonGenerator);
writeDocument(document, jsonGenerator);
}
}
@Override
public void write(Document document, Writer jsonWriter) throws IOException {
try (JsonGenerator jsonGenerator = factory.createGenerator(jsonWriter)) {
configure(jsonGenerator);
writeDocument(document, jsonGenerator);
}
}
@Override
public String write(Document document) throws IOException {
StringWriter writer = new StringWriter();
try (JsonGenerator jsonGenerator = factory.createGenerator(writer)) {
configure(jsonGenerator);
writeDocument(document, jsonGenerator);
}
return writer.getBuffer().toString();
}
@Override
public byte[] writeAsBytes(Document document) {
try (ByteArrayOutputStream stream = new ByteArrayOutputStream()) {
try (JsonGenerator jsonGenerator = factory.createGenerator(stream, JsonEncoding.UTF8)) {
configure(jsonGenerator);
writeDocument(document, jsonGenerator);
}
return stream.toByteArray();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@Override
public void write(Array array, OutputStream jsonStream) throws IOException {
try (JsonGenerator jsonGenerator = factory.createGenerator(jsonStream)) {
configure(jsonGenerator);
writeArray(array, jsonGenerator);
}
}
@Override
public void write(Array array, Writer jsonWriter) throws IOException {
try (JsonGenerator jsonGenerator = factory.createGenerator(jsonWriter)) {
configure(jsonGenerator);
writeArray(array, jsonGenerator);
}
}
@Override
public String write(Array array) throws IOException {
StringWriter writer = new StringWriter();
try (JsonGenerator jsonGenerator = factory.createGenerator(writer)) {
configure(jsonGenerator);
writeArray(array, jsonGenerator);
}
return writer.getBuffer().toString();
}
protected void configure(JsonGenerator generator) {
if (pretty) generator.setPrettyPrinter(new DefaultPrettyPrinter());
}
protected void writeDocument(Document document, JsonGenerator generator) throws IOException {
generator.writeStartObject();
try {
document.stream().forEach((field) -> {
try {
generator.writeFieldName(field.getName().toString());
writeValue(field.getValue(), generator);
} catch (IOException e) {
throw new WritingError(e);
}
});
generator.writeEndObject();
} catch (WritingError e) {
throw e.wrapped();
}
}
protected void writeArray(Array array, JsonGenerator generator) throws IOException {
generator.writeStartArray();
try {
array.streamValues().forEach((value) -> {
try {
writeValue(value, generator);
} catch (IOException e) {
throw new WritingError(e);
}
});
generator.writeEndArray();
} catch (WritingError e) {
throw e.wrapped();
}
}
protected void writeValue(Value value, JsonGenerator generator) throws IOException {
switch (value.getType()) {
case NULL:
generator.writeNull();
break;
case STRING:
generator.writeString(value.asString());
break;
case BOOLEAN:
generator.writeBoolean(value.asBoolean());
break;
case BINARY:
generator.writeBinary(value.asBytes());
break;
case INTEGER:
generator.writeNumber(value.asInteger());
break;
case LONG:
generator.writeNumber(value.asLong());
break;
case FLOAT:
generator.writeNumber(value.asFloat());
break;
case DOUBLE:
generator.writeNumber(value.asDouble());
break;
case BIG_INTEGER:
generator.writeNumber(value.asBigInteger());
break;
case DECIMAL:
generator.writeNumber(value.asBigDecimal());
break;
case DOCUMENT:
writeDocument(value.asDocument(), generator);
break;
case ARRAY:
writeArray(value.asArray(), generator);
break;
}
}
protected static final class WritingError extends RuntimeException {
private static final long serialVersionUID = 1L;
private final IOException wrapped;
protected WritingError(IOException wrapped) {
this.wrapped = wrapped;
}
public IOException wrapped() {
return wrapped;
}
}
}

View File

@ -0,0 +1,192 @@
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.document;
import java.math.BigDecimal;
import java.math.BigInteger;
import io.debezium.annotation.Immutable;
/**
* A specialization of {@link Value} that represents a null value.
* @author Randall Hauch
*/
@Immutable
final class NullValue implements Value {
public static final Value INSTANCE = new NullValue();
private NullValue() {
// prevent instantiation
}
@Override
public int hashCode() {
return 0;
}
@Override
public boolean equals(Object obj) {
return obj == this;
}
@Override
public String toString() {
return "null";
}
@Override
public int compareTo(Value that) {
if (this == that) return 0;
return -1;
}
@Override
public Type getType() {
return Type.NULL;
}
@Override
public Object asObject() {
return null;
}
@Override
public String asString() {
return null;
}
@Override
public Integer asInteger() {
return null;
}
@Override
public Long asLong() {
return null;
}
@Override
public Boolean asBoolean() {
return null;
}
@Override
public Number asNumber() {
return null;
}
@Override
public BigInteger asBigInteger() {
return null;
}
@Override
public BigDecimal asBigDecimal() {
return null;
}
@Override
public Float asFloat() {
return null;
}
@Override
public Double asDouble() {
return null;
}
@Override
public byte[] asBytes() {
return null;
}
@Override
public Document asDocument() {
return null;
}
@Override
public Array asArray() {
return null;
}
@Override
public boolean isNull() {
return true;
}
@Override
public boolean isString() {
return false;
}
@Override
public boolean isBoolean() {
return false;
}
@Override
public boolean isInteger() {
return false;
}
@Override
public boolean isLong() {
return false;
}
@Override
public boolean isFloat() {
return false;
}
@Override
public boolean isDouble() {
return false;
}
@Override
public boolean isNumber() {
return false;
}
@Override
public boolean isBigInteger() {
return false;
}
@Override
public boolean isBigDecimal() {
return false;
}
@Override
public boolean isBinary() {
return false;
}
@Override
public boolean isDocument() {
return false;
}
@Override
public boolean isArray() {
return false;
}
@Override
public Value convert() {
return this;
}
@Override
public Value clone() {
return this;
}
}

View File

@ -0,0 +1,188 @@
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.document;
import java.util.Optional;
import java.util.function.Consumer;
import io.debezium.annotation.Immutable;
/**
* A representation of multiple name segments that together form a path within {@link Document}.
*
* @author Randall Hauch
*/
@Immutable
public interface Path extends Iterable<String> {
public static interface Segments {
public static boolean isAfterLastIndex(String segment) {
return "-".equals(segment);
}
public static boolean isArrayIndex(String segment) {
return isAfterLastIndex(segment) || asInteger(segment).isPresent();
}
public static boolean isFieldName(String segment) {
return !isArrayIndex(segment);
}
public static Optional<Integer> asInteger(String segment) {
try {
return Optional.of(Integer.parseInt(segment));
} catch (NumberFormatException e) {
return Optional.empty();
}
}
public static Optional<Integer> asInteger(Optional<String> segment) {
return segment.isPresent() ? asInteger(segment.get()) : Optional.empty();
}
}
/**
* Get the zero-length path.
*
* @return the shared root path; never null
*/
public static Path root() {
return Paths.RootPath.INSTANCE;
}
/**
* Get an {@link Optional} reference to the root path. The resulting Optional will always {@link Optional#isPresent() be
* present}.
*
* @return the shared optional root path; never null
*/
static Optional<Path> optionalRoot() {
return Paths.RootPath.OPTIONAL_OF_ROOT;
}
/**
* Parse a JSON Path expression. Segments are separated by a single forward slash ('{@code /}'); any '{@code ~}' or '{@code /}
* ' literals must be escaped. Trailing slashes are ignored.
*
* @param path the path as a string; may not be null, but may be an empty string or "/" for a root path
* @return the path object; never null
*/
static Path parse(String path) {
return Paths.parse(path, true);
}
/**
* Parse a JSON Path expression. Segments are separated by a single forward slash ('{@code /}'); any '{@code ~}' or '{@code /}
* ' literals must be escaped. Trailing slashes are ignored.
*
* @param path the path as a string; may not be null, but may be an empty string or "/" for a root path
* @param resolveJsonPointerEscapes {@code true} if '{@code ~}' and '{@code /} ' literals are to be escaped as '{@code ~0}'
* and '{@code ~1}', respectively, or {@code false} if they are not to be escaped
* @return the path object; never null
*/
static Path parse(String path, boolean resolveJsonPointerEscapes) {
return Paths.parse(path, resolveJsonPointerEscapes);
}
/**
* Return whether this path is the root path with no segments. This method is equivalent to {@code size() == 0}.
*
* @return true if this path contains exactly one segment, or false otherwise
*/
default boolean isRoot() {
return size() == 0;
}
/**
* Return whether this path has a single segment. This method is equivalent to {@code size() == 1}.
*
* @return true if this path contains exactly one segment, or false otherwise
*/
default boolean isSingle() {
return size() == 1;
}
/**
* Return whether this path has more than one segment. This method is equivalent to {@code size() > 1}.
*
* @return true if this path contains exactly one segment, or false otherwise
*/
default boolean isMultiple() {
return size() > 1;
}
/**
* Get the number of segments in the path.
*
* @return the size of the path; never negative
*/
int size();
/**
* Get the optional parent path.
*
* @return an optional containing the parent (if this is not the root path), or an empty optional if this is the root path.
*/
Optional<Path> parent();
/**
* Get the last segment, if there is one.
*
* @return an optional containing the last segment of this path (if this is not the root path), or an empty optional if this
* is the root path.
*/
Optional<String> lastSegment();
/**
* Get a portion of this path that has a specified number of segments.
* @param length the number of segments
* @return the subpath, or this path if the supplied length is equal to {@code this.size()}
*/
Path subpath(int length);
/**
* Get the segment at the given index.
* @param index the index of the segment
* @return the segment
* @throws IllegalArgumentException if the index value is equal to or greater than #size()
*/
String segment(int index);
/**
* Create a new path consisting of this path with one or more additional segments given by the relative path.
* @param relPath the relative path to be appended to this path; may not be null
* @return the new path
*/
default Path append(String relPath) {
return append(Path.parse(relPath));
}
/**
* Create a new path consisting of this path appended with the given path that will be treated as a relative path.
* @param relPath the relative path to be appended to this path; may not be null
* @return the new path
*/
Path append(Path relPath);
/**
* Obtain the representation of this path as a relative path without the leading '/'.
* @return the relative path; never null but may be empty
*/
String toRelativePath();
/**
* Call the consumer with the path of every ancestor (except root) down to this path.
*
* @param consumer the function to call on each path segment
*/
default void fromRoot(Consumer<Path> consumer) {
Path path = root();
for (String segment : this) {
path = path.append(segment);
consumer.accept(path);
}
}
}

View File

@ -0,0 +1,417 @@
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.document;
import java.util.Arrays;
import java.util.Iterator;
import java.util.Optional;
import java.util.function.Consumer;
import io.debezium.annotation.Immutable;
import io.debezium.util.HashCode;
import io.debezium.util.Iterators;
import io.debezium.util.Joiner;
import io.debezium.util.Strings;
/**
* A package-level utility that implements useful operations to create paths.
*
* @author Randall Hauch
*/
@Immutable
final class Paths {
static Path parse(String path, boolean resolveJsonPointerEscapes) {
// Remove leading and trailing whitespace and '/' characters ...
path = Strings.trim(path, (c) -> c < ' ' || c == '/');
if (path.length() == 0) return RootPath.INSTANCE;
String[] segments = path.split("/");
if (segments.length == 1) {
return new SingleSegmentPath(parseSegment(segments[0], resolveJsonPointerEscapes));
}
if (resolveJsonPointerEscapes) {
for (int i = 0; i != segments.length; ++i)
segments[i] = parseSegment(segments[i], true);
}
return new MultiSegmentPath(segments);
}
private static String parseSegment(String segment, boolean resolveJsonPointerEscapes) {
if (resolveJsonPointerEscapes) {
segment = segment.replaceAll("\\~1", "/").replaceAll("\\~0", "~");
}
return segment;
}
static interface InnerPath {
int copyInto(String[] segments, int start);
}
static final class RootPath implements Path, InnerPath {
public static final Path INSTANCE = new RootPath();
public static final Optional<Path> OPTIONAL_OF_ROOT = Optional.of(RootPath.INSTANCE);
private RootPath() {
}
@Override
public Optional<Path> parent() {
return Optional.empty();
}
@Override
public Optional<String> lastSegment() {
return Optional.empty();
}
@Override
public int size() {
return 0;
}
@Override
public int hashCode() {
return 1;
}
@Override
public boolean equals(Object obj) {
return obj == this;
}
@Override
public String toString() {
return "/";
}
@Override
public String toRelativePath() {
return "";
}
@Override
public Iterator<String> iterator() {
return Iterators.empty();
}
@Override
public void forEach(Consumer<? super String> consumer) {
}
@Override
public Path subpath(int length) {
if (length != 0) throw new IllegalArgumentException("Invalid subpath length: " + length);
return this;
}
@Override
public String segment(int index) {
throw new IllegalArgumentException("Invalid segment index: " + index);
}
@Override
public Path append(Path relPath) {
return relPath;
}
@Override
public int copyInto(String[] segments, int start) {
return 0;
}
}
static final class SingleSegmentPath implements Path, InnerPath {
private final Optional<String> segment;
protected SingleSegmentPath(String segment) {
assert segment != null;
this.segment = Optional.of(segment); // wrap because we're always giving it away
}
@Override
public Optional<Path> parent() {
return Path.optionalRoot();
}
@Override
public Optional<String> lastSegment() {
return segment;
}
@Override
public int size() {
return 1;
}
@Override
public int hashCode() {
return segment.get().hashCode();
}
@Override
public boolean equals(Object obj) {
if (obj == this) return true;
if (obj instanceof Path) {
Path that = (Path) obj;
if (this.size() != that.size()) return false;
return this.lastSegment().get().equals(that.lastSegment().get());
}
return false;
}
@Override
public String toString() {
return "/" + segment.get();
}
@Override
public String toRelativePath() {
return segment.get();
}
@Override
public Iterator<String> iterator() {
return Iterators.with(segment.get());
}
@Override
public void forEach(Consumer<? super String> consumer) {
consumer.accept(segment.get());
}
@Override
public Path subpath(int length) {
if (length > size() || length < 0) throw new IllegalArgumentException("Invalid subpath length: " + length);
return length == 1 ? this : Path.root();
}
@Override
public String segment(int index) {
if (index >= size() || index < 0) throw new IllegalArgumentException("Invalid segment index: " + index);
return segment.get();
}
@Override
public Path append(Path relPath) {
if (relPath.isRoot()) return this;
if (relPath.isSingle()) return new ChildPath(this, relPath.lastSegment().get());
String[] segments = new String[size() + relPath.size()];
int offset = this.copyInto(segments, 0);
copyPathInto(relPath, segments, offset);
return new MultiSegmentPath(segments);
}
@Override
public int copyInto(String[] segments, int start) {
segments[start] = segment.get();
return 1;
}
}
static final class MultiSegmentPath implements Path, InnerPath {
private final String[] segments;
private final int hc;
protected MultiSegmentPath(String[] segments) {
this.segments = segments;
assert size() > 1;
this.hc = HashCode.compute(segments[0].hashCode(),segments[1].hashCode());
}
@Override
public Optional<Path> parent() {
if (size() == 2) return Optional.of(new SingleSegmentPath(segments[0]));
return Optional.of(new MultiSegmentPath(Arrays.copyOf(segments, segments.length - 1)));
}
@Override
public Optional<String> lastSegment() {
return Optional.of(segments[segments.length - 1]);
}
@Override
public int size() {
return segments.length;
}
@Override
public int hashCode() {
return hc;
}
@Override
public boolean equals(Object obj) {
if (obj == this) return true;
if (obj instanceof Path) {
Path that = (Path) obj;
if (this.size() != that.size()) return false;
Iterator<String> thisIter = this.iterator();
Iterator<String> thatIter = that.iterator();
while (thisIter.hasNext()) {
if (!thisIter.next().equals(thatIter.next())) return false;
}
return true;
}
return false;
}
@Override
public String toString() {
return Joiner.on("/", "/").join(segments);
}
@Override
public String toRelativePath() {
return Joiner.on("", "/").join(segments);
}
@Override
public Iterator<String> iterator() {
return Iterators.with(segments);
}
@Override
public void forEach(Consumer<? super String> consumer) {
for (String segment : segments) {
consumer.accept(segment);
}
}
@Override
public Path subpath(int length) {
if (length > size() || length < 0) throw new IllegalArgumentException("Invalid subpath length: " + length);
if (length == 0) return RootPath.INSTANCE;
if (length == 1) return new SingleSegmentPath(segments[0]);
if (length == size()) return this;
return new MultiSegmentPath(Arrays.copyOf(segments, length));
}
@Override
public String segment(int index) {
if (index >= size() || index < 0) throw new IllegalArgumentException("Invalid segment index: " + index);
return segments[index];
}
@Override
public Path append(Path relPath) {
if (relPath.isRoot()) return this;
if (relPath.isSingle()) return new ChildPath(this, relPath.lastSegment().get());
String[] segments = new String[size() + relPath.size()];
int offset = this.copyInto(segments, 0);
copyPathInto(relPath, segments, offset);
return new MultiSegmentPath(segments);
}
@Override
public int copyInto(String[] segments, int start) {
System.arraycopy(this.segments, 0, segments, start, this.segments.length);
return this.segments.length;
}
}
static final class ChildPath implements Path, InnerPath {
private final Path parent;
private final String segment;
protected ChildPath(Path parent, String segment) {
assert parent instanceof InnerPath;
this.parent = parent;
this.segment = segment;
}
@Override
public Iterator<String> iterator() {
return Iterators.join(parent, segment);
}
@Override
public Optional<String> lastSegment() {
return Optional.of(segment);
}
@Override
public Optional<Path> parent() {
return Optional.of(parent);
}
@Override
public int size() {
return parent.size() + 1;
}
@Override
public int hashCode() {
return HashCode.compute(parent, segment);
}
@Override
public boolean equals(Object obj) {
if (obj == this) return true;
if (obj instanceof Path) {
Path that = (Path) obj;
if (this.size() != that.size()) return false;
if (!this.parent.equals(that.parent())) return false;
return this.segment.equals(that.lastSegment().get());
}
return false;
}
@Override
public String toString() {
return Joiner.on("/","/").join(parent.toString(), segment);
}
@Override
public String toRelativePath() {
return Joiner.on("/").join(parent.toRelativePath(), segment);
}
@Override
public String segment(int index) {
if (index >= size() || index < 0) throw new IllegalArgumentException("Invalid segment index: " + index);
return index < parent.size() ? parent.segment(index) : segment;
}
@Override
public Path subpath(int length) {
if (length > size() || length < 0) throw new IllegalArgumentException("Invalid subpath length: " + length);
return length <= parent.size() ? parent.subpath(length) : this;
}
@Override
public Path append(Path relPath) {
if (relPath.isRoot()) return this;
if (relPath.isSingle()) return new ChildPath(this, relPath.lastSegment().get());
String[] segments = new String[size() + relPath.size() + 1];
int offset = copyInto(segments, 0);
copyPathInto(relPath, segments, offset);
return new MultiSegmentPath(segments);
}
@Override
public int copyInto(String[] segments, int start) {
int copied = ((InnerPath) parent).copyInto(segments, start);
segments[copied] = this.segment;
return size();
}
}
static int copyPathInto(Path path, String[] segments, int start) {
if (path instanceof InnerPath) {
return ((InnerPath) path).copyInto(segments, start);
}
int i = start;
for (String segment : path)
segments[i++] = segment;
return i;
}
private Paths() {
}
}

View File

@ -0,0 +1,397 @@
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.document;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.function.Consumer;
import java.util.function.DoubleConsumer;
import java.util.function.IntConsumer;
import java.util.function.LongConsumer;
import io.debezium.annotation.Immutable;
/**
* A value in a {@link Document} or {@link Array}. Note that {@link #compareTo(Value)} might perform literal comparisons;
* to perform semantic comparisons, use {@link #comparable()} to obtain a wrapped value with semantic comparison capability.
*
* @author Randall Hauch
*/
@Immutable
public interface Value extends Comparable<Value> {
static enum Type {
NULL, STRING, BOOLEAN, BINARY, INTEGER, LONG, FLOAT, DOUBLE, BIG_INTEGER, DECIMAL, DOCUMENT, ARRAY;
}
static boolean isNull(Value value) {
return value == null || value.isNull();
}
static boolean notNull(Value value) {
return value != null && !value.isNull();
}
static boolean isValid(Object value) {
return value == null || value instanceof Value ||
value instanceof String || value instanceof Boolean ||
value instanceof Integer || value instanceof Long ||
value instanceof Float || value instanceof Double ||
value instanceof Document || value instanceof Array ||
value instanceof BigInteger || value instanceof BigDecimal;
}
/**
* Compare two {@link Value} objects, which may or may not be null.
*
* @param value1 the first value object, may be null
* @param value2 the second value object, which may be null
* @return a negative integer if the first value is less than the second, zero if the values are equivalent (including if both
* are null), or a positive integer if the first value is greater than the second
*/
static int compareTo(Value value1, Value value2) {
if (value1 == null) return isNull(value2) ? 0 : -1;
return value1.compareTo(value2);
}
static Value create(Object value) {
if (value instanceof Value) return (Value) value;
if (!isValid(value)) {
assert value != null;
throw new IllegalArgumentException("Unexpected value " + value + "' of type " + value.getClass());
}
return value == null ? NullValue.INSTANCE : new ComparableValue((Comparable<?>) value);
}
static Value create(boolean value) {
return new ComparableValue(Boolean.valueOf(value));
}
static Value create(int value) {
return new ComparableValue(Integer.valueOf(value));
}
static Value create(long value) {
return new ComparableValue(Long.valueOf(value));
}
static Value create(float value) {
return new ComparableValue(Float.valueOf(value));
}
static Value create(double value) {
return new ComparableValue(Double.valueOf(value));
}
static Value create(BigInteger value) {
return value == null ? NullValue.INSTANCE : new ComparableValue(value);
}
static Value create(BigDecimal value) {
return value == null ? NullValue.INSTANCE : new ComparableValue(value);
}
static Value create(Integer value) {
return value == null ? NullValue.INSTANCE : new ComparableValue(value);
}
static Value create(Long value) {
return value == null ? NullValue.INSTANCE : new ComparableValue(value);
}
static Value create(Float value) {
return value == null ? NullValue.INSTANCE : new ComparableValue(value);
}
static Value create(Double value) {
return value == null ? NullValue.INSTANCE : new ComparableValue(value);
}
static Value create(String value) {
return value == null ? NullValue.INSTANCE : new ComparableValue(value);
}
static Value create(byte[] value) {
return value == null ? NullValue.INSTANCE : new BinaryValue(value);
}
static Value create(Document value) {
return value == null ? NullValue.INSTANCE : new ComparableValue(value);
}
static Value create(Array value) {
return value == null ? NullValue.INSTANCE : new ComparableValue(value);
}
static Value nullValue() {
return NullValue.INSTANCE;
}
default Type getType() {
return ComparableValue.typeForValue(this);
}
/**
* Get the raw value.
*
* @return the raw value; may be null
*/
Object asObject();
String asString();
Integer asInteger();
Long asLong();
Boolean asBoolean();
Number asNumber();
BigInteger asBigInteger();
BigDecimal asBigDecimal();
Float asFloat();
Double asDouble();
Document asDocument();
Array asArray();
byte[] asBytes();
boolean isNull();
default boolean isNotNull() {
return !isNull();
}
boolean isString();
boolean isInteger();
boolean isLong();
boolean isBoolean();
boolean isNumber();
boolean isBigInteger();
boolean isBigDecimal();
boolean isFloat();
boolean isDouble();
boolean isDocument();
boolean isArray();
boolean isBinary();
/**
* Get a Value representation that will convert attempt to convert values.
*
* @return a value that can convert actual values to the requested format
*/
Value convert();
/**
* Get a Value representation that will allow semantic comparison of values, rather than the literal comparison normally
* performed by {@link #compareTo(Value)}.
*
* @return the Value that will perform semantic comparisons; never null
*/
default Value comparable() {
if (this instanceof ComparableValue) return this;
return new ComparableValue(this);
}
/**
* Obtain a clone of this value.
*
* @return the clone of this value; never null, but possibly the same instance if the underlying value is immutable
* and not a document or array
*/
Value clone();
/**
* If a value is a document, invoke the specified consumer with the value, otherwise do nothing.
*
* @param consumer block to be executed if the value is a document
* @return true if the block was called, or false otherwise
*/
default boolean ifDocument(Consumer<Document> consumer) {
if (isDocument()) {
consumer.accept(asDocument());
return true;
}
return false;
}
/**
* If a value is an array, invoke the specified consumer with the value, otherwise do nothing.
*
* @param consumer block to be executed if the value is an array
* @return true if the block was called, or false otherwise
*/
default boolean ifArray(Consumer<Array> consumer) {
if (isArray()) {
consumer.accept(asArray());
return true;
}
return false;
}
/**
* If a value is a string, invoke the specified consumer with the value, otherwise do nothing.
*
* @param consumer block to be executed if the value is a string
* @return true if the block was called, or false otherwise
*/
default boolean ifString(Consumer<String> consumer) {
if (isString()) {
consumer.accept(asString());
return true;
}
return false;
}
/**
* If a value is a boolean value, invoke the specified consumer with the value, otherwise do nothing.
*
* @param consumer block to be executed if the value is a boolean
* @return true if the block was called, or false otherwise
*/
default boolean ifBoolean(Consumer<Boolean> consumer) {
if (isBoolean()) {
consumer.accept(asBoolean());
return true;
}
return false;
}
/**
* If a value is a byte array, invoke the specified consumer with the value, otherwise do nothing.
*
* @param consumer block to be executed if the value is a byte array
* @return true if the block was called, or false otherwise
*/
default boolean ifBinary(Consumer<byte[]> consumer) {
if (isBinary()) {
consumer.accept(asBytes());
return true;
}
return false;
}
/**
* If a value is an integer, invoke the specified consumer with the value, otherwise do nothing.
*
* @param consumer block to be executed if the value is an integer
* @return true if the block was called, or false otherwise
*/
default boolean ifInteger(IntConsumer consumer) {
if (isInteger()) {
consumer.accept(asInteger().intValue());
return true;
}
return false;
}
/**
* If a value is a long, invoke the specified consumer with the value, otherwise do nothing.
*
* @param consumer block to be executed if the value is a long
* @return true if the block was called, or false otherwise
*/
default boolean ifLong(LongConsumer consumer) {
if (isLong()) {
consumer.accept(asLong().longValue());
return true;
}
return false;
}
/**
* If a value is a float, invoke the specified consumer with the value, otherwise do nothing.
*
* @param consumer block to be executed if the value is a float
* @return true if the block was called, or false otherwise
*/
default boolean ifFloat(DoubleConsumer consumer) {
if (isFloat()) {
consumer.accept(asFloat().doubleValue());
return true;
}
return false;
}
/**
* If a value is a double, invoke the specified consumer with the value, otherwise do nothing.
*
* @param consumer block to be executed if the value is a double
* @return true if the block was called, or false otherwise
*/
default boolean ifDouble(DoubleConsumer consumer) {
if (isDouble()) {
consumer.accept(asDouble().intValue());
return true;
}
return false;
}
/**
* If a value is a variable-sized integer, invoke the specified consumer with the value, otherwise do nothing.
*
* @param consumer block to be executed if the value is a big integer
* @return true if the block was called, or false otherwise
*/
default boolean ifBigInteger(Consumer<BigInteger> consumer) {
if (isBigInteger()) {
consumer.accept(asBigInteger());
return true;
}
return false;
}
/**
* If a value is a variable-sized decimal, invoke the specified consumer with the value, otherwise do nothing.
*
* @param consumer block to be executed if the value is a big decimal
* @return true if the block was called, or false otherwise
*/
default boolean ifBigDecimal(Consumer<BigDecimal> consumer) {
if (isBigDecimal()) {
consumer.accept(asBigDecimal());
return true;
}
return false;
}
/**
* If a value is a variable-sized integer, invoke the specified consumer with the value, otherwise do nothing.
*
* @param consumer block to be executed if the value is a big integer
* @return true if the block was called, or false otherwise
*/
default boolean ifNull(NullHandler consumer) {
if (isNull()) {
consumer.call();
return true;
}
return false;
}
@FunctionalInterface
static interface NullHandler {
void call();
}
}

View File

@ -7,35 +7,53 @@
import java.util.Properties; import java.util.Properties;
import java.util.Set; import java.util.Set;
import java.util.function.Consumer;
import java.util.function.Predicate; import java.util.function.Predicate;
import io.debezium.annotation.Immutable; import io.debezium.annotation.Immutable;
import io.debezium.config.Configuration; import io.debezium.config.Configuration;
import io.debezium.config.Field;
import io.debezium.util.Collect; import io.debezium.util.Collect;
/** /**
* A specialized configuration for the Debezium driver. * A specialized configuration for the Debezium driver. This defines several known {@link io.debezium.config.Field
* fields} that are common to all JDBC configurations.
* *
* @author Randall Hauch * @author Randall Hauch
*/ */
@Immutable @Immutable
public interface JdbcConfiguration extends Configuration { public interface JdbcConfiguration extends Configuration {
public static final Field DATABASE = Configuration.field("dbname", /**
* A field for the name of the database. This field has no default value.
*/
public static final Field DATABASE = Field.create("dbname",
"Name of the database"); "Name of the database");
public static final Field USER = Configuration.field("user", /**
* A field for the user of the database. This field has no default value.
*/
public static final Field USER = Field.create("user",
"Name of the database user to be used when connecting to the database"); "Name of the database user to be used when connecting to the database");
public static final Field PASSWORD = Configuration.field("password", /**
* A field for the password of the database. This field has no default value.
*/
public static final Field PASSWORD = Field.create("password",
"Password to be used when connecting to the database"); "Password to be used when connecting to the database");
public static final Field HOSTNAME = Configuration.field("hostname", "IP address of the database"); /**
public static final Field PORT = Configuration.field("port", "Port of the database", 5432); * A field for the hostname of the database server. This field has no default value.
*/
public static final Field HOSTNAME = Field.create("hostname", "IP address of the database");
/**
* A field for the port of the database server. There is no default value.
*/
public static final Field PORT = Field.create("port", "Port of the database");
/** /**
* The set of pre-defined fields for JDBC configurations. * The set of names of the pre-defined JDBC configuration fields, including {@link #DATABASE}, {@link #USER},
* {@link #PASSWORD}, {@link #HOSTNAME}, and {@link #PORT}.
*/ */
public static Set<String> ALL_KNOWN_FIELDS = Collect.unmodifiableSet(Field::name, DATABASE, USER, PASSWORD, HOSTNAME, PORT); public static Set<String> ALL_KNOWN_FIELDS = Collect.unmodifiableSet(Field::name, DATABASE, USER, PASSWORD, HOSTNAME, PORT);
/** /**
* Obtain a {@link JdbcConfiguration} adapter for the given {@link Configuration}. * Obtain a {@link JdbcConfiguration} adapter for the given {@link Configuration}.
* *
@ -136,6 +154,20 @@ public Builder with(String key, String value) {
return this; return this;
} }
@Override
public Builder withDefault(String key, String value) {
if (!props.containsKey(key)) {
props.setProperty(key, value);
}
return this;
}
@Override
public Builder apply(Consumer<Builder> function) {
function.accept(this);
return this;
}
@Override @Override
public JdbcConfiguration build() { public JdbcConfiguration build() {
return JdbcConfiguration.adapt(Configuration.from(props)); return JdbcConfiguration.adapt(Configuration.from(props));
@ -163,6 +195,20 @@ public Builder with(String key, String value) {
return this; return this;
} }
@Override
public Builder withDefault(String key, String value) {
if (!props.containsKey(key)) {
props.setProperty(key, value);
}
return this;
}
@Override
public Builder apply(Consumer<Builder> function) {
function.accept(this);
return this;
}
@Override @Override
public JdbcConfiguration build() { public JdbcConfiguration build() {
return JdbcConfiguration.adapt(Configuration.from(props)); return JdbcConfiguration.adapt(Configuration.from(props));

View File

@ -14,7 +14,9 @@
import java.sql.Statement; import java.sql.Statement;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.Properties; import java.util.Properties;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ConcurrentMap;
@ -25,6 +27,7 @@
import io.debezium.annotation.ThreadSafe; import io.debezium.annotation.ThreadSafe;
import io.debezium.config.Configuration; import io.debezium.config.Configuration;
import io.debezium.config.Field;
import io.debezium.relational.Column; import io.debezium.relational.Column;
import io.debezium.relational.ColumnEditor; import io.debezium.relational.ColumnEditor;
import io.debezium.relational.TableEditor; import io.debezium.relational.TableEditor;
@ -85,18 +88,20 @@ public static interface Operations {
* </ul> * </ul>
* *
* @param urlPattern the URL pattern string; may not be null * @param urlPattern the URL pattern string; may not be null
* @param variables any custom or overridden configuration variables
* @return the connection factory * @return the connection factory
*/ */
protected static ConnectionFactory patternBasedFactory(String urlPattern) { protected static ConnectionFactory patternBasedFactory(String urlPattern, Field... variables) {
return (config) -> { return (config) -> {
LOGGER.trace("Config: {}", config.asProperties()); LOGGER.trace("Config: {}", config.asProperties());
Properties props = config.asProperties(); Properties props = config.asProperties();
String url = findAndReplace(urlPattern, props, Field[] varsWithDefaults = combineVariables(variables,
JdbcConfiguration.HOSTNAME, JdbcConfiguration.HOSTNAME,
JdbcConfiguration.PORT, JdbcConfiguration.PORT,
JdbcConfiguration.USER, JdbcConfiguration.USER,
JdbcConfiguration.PASSWORD, JdbcConfiguration.PASSWORD,
JdbcConfiguration.DATABASE); JdbcConfiguration.DATABASE);
String url = findAndReplace(urlPattern, props, varsWithDefaults);
LOGGER.trace("Props: {}", props); LOGGER.trace("Props: {}", props);
LOGGER.trace("URL: {}", url); LOGGER.trace("URL: {}", url);
Connection conn = DriverManager.getConnection(url, props); Connection conn = DriverManager.getConnection(url, props);
@ -105,13 +110,29 @@ protected static ConnectionFactory patternBasedFactory(String urlPattern) {
}; };
} }
private static String findAndReplace(String url, Properties props, Configuration.Field... variables) { private static Field[] combineVariables(Field[] overriddenVariables,
for (Configuration.Field field : variables ) { Field... defaultVariables) {
Map<String, Field> fields = new HashMap<>();
if (defaultVariables != null) {
for (Field variable : defaultVariables) {
fields.put(variable.name(), variable);
}
}
if (overriddenVariables != null) {
for (Field variable : overriddenVariables) {
fields.put(variable.name(), variable);
}
}
return fields.values().toArray(new Field[fields.size()]);
}
private static String findAndReplace(String url, Properties props, Field... variables) {
for (Field field : variables) {
String variable = field.name(); String variable = field.name();
if (variable != null && url.contains("${" + variable + "}")) { if (variable != null && url.contains("${" + variable + "}")) {
// Otherwise, we have to remove it from the properties ... // Otherwise, we have to remove it from the properties ...
String value = props.getProperty(variable); String value = props.getProperty(variable);
if ( value != null ) { if (value != null) {
props.remove(variable); props.remove(variable);
// And replace the variable ... // And replace the variable ...
url = url.replaceAll("\\$\\{" + variable + "\\}", value); url = url.replaceAll("\\$\\{" + variable + "\\}", value);
@ -145,12 +166,34 @@ public JdbcConnection(Configuration config, ConnectionFactory connectionFactory)
* @param initialOperations the initial operations that should be run on each new connection; may be null * @param initialOperations the initial operations that should be run on each new connection; may be null
*/ */
public JdbcConnection(Configuration config, ConnectionFactory connectionFactory, Operations initialOperations) { public JdbcConnection(Configuration config, ConnectionFactory connectionFactory, Operations initialOperations) {
this.config = config; this(config,connectionFactory,initialOperations,null);
}
/**
* Create a new instance with the given configuration and connection factory, and specify the operations that should be
* run against each newly-established connection.
*
* @param config the configuration; may not be null
* @param connectionFactory the connection factory; may not be null
* @param initialOperations the initial operations that should be run on each new connection; may be null
* @param adapter the function that can be called to update the configuration with defaults
*/
protected JdbcConnection(Configuration config, ConnectionFactory connectionFactory, Operations initialOperations, Consumer<Configuration.Builder> adapter) {
this.config = adapter == null ? config : config.edit().apply(adapter).build();
this.factory = connectionFactory; this.factory = connectionFactory;
this.initialOps = initialOperations; this.initialOps = initialOperations;
this.conn = null; this.conn = null;
} }
/**
* Obtain the configuration for this connection.
*
* @return the JDBC configuration; never null
*/
public JdbcConfiguration config() {
return JdbcConfiguration.adapt(config);
}
/** /**
* Ensure a connection to the database is established. * Ensure a connection to the database is established.
* *
@ -215,22 +258,22 @@ public JdbcConnection query(String query, Consumer<ResultSet> resultConsumer) th
return this; return this;
} }
public void print(ResultSet resultSet ) { public void print(ResultSet resultSet) {
// CHECKSTYLE:OFF // CHECKSTYLE:OFF
print(resultSet,System.out::println); print(resultSet, System.out::println);
// CHECKSTYLE:ON // CHECKSTYLE:ON
} }
public void print(ResultSet resultSet, Consumer<String> lines ) { public void print(ResultSet resultSet, Consumer<String> lines) {
try { try {
ResultSetMetaData rsmd = resultSet.getMetaData(); ResultSetMetaData rsmd = resultSet.getMetaData();
int columnCount = rsmd.getColumnCount(); int columnCount = rsmd.getColumnCount();
int[] columnSizes = findMaxLength(resultSet); int[] columnSizes = findMaxLength(resultSet);
lines.accept(delimiter(columnCount, columnSizes)); lines.accept(delimiter(columnCount, columnSizes));
StringBuilder sb = new StringBuilder(); StringBuilder sb = new StringBuilder();
for ( int i=1; i<=columnCount; i++ ) { for (int i = 1; i <= columnCount; i++) {
if (i > 1) sb.append(" | "); if (i > 1) sb.append(" | ");
sb.append(Strings.setLength(rsmd.getColumnLabel(i),columnSizes[i],' ')); sb.append(Strings.setLength(rsmd.getColumnLabel(i), columnSizes[i], ' '));
} }
lines.accept(sb.toString()); lines.accept(sb.toString());
sb.setLength(0); sb.setLength(0);
@ -239,7 +282,7 @@ public void print(ResultSet resultSet, Consumer<String> lines ) {
sb.setLength(0); sb.setLength(0);
for (int i = 1; i <= columnCount; i++) { for (int i = 1; i <= columnCount; i++) {
if (i > 1) sb.append(" | "); if (i > 1) sb.append(" | ");
sb.append(Strings.setLength(resultSet.getString(i),columnSizes[i],' ')); sb.append(Strings.setLength(resultSet.getString(i), columnSizes[i], ' '));
} }
lines.accept(sb.toString()); lines.accept(sb.toString());
sb.setLength(0); sb.setLength(0);
@ -250,26 +293,26 @@ public void print(ResultSet resultSet, Consumer<String> lines ) {
} }
} }
private String delimiter( int columnCount, int[] columnSizes ) { private String delimiter(int columnCount, int[] columnSizes) {
StringBuilder sb = new StringBuilder(); StringBuilder sb = new StringBuilder();
for ( int i=1; i<=columnCount; i++ ) { for (int i = 1; i <= columnCount; i++) {
if (i > 1) sb.append("---"); if (i > 1) sb.append("---");
sb.append(Strings.createString('-',columnSizes[i])); sb.append(Strings.createString('-', columnSizes[i]));
} }
return sb.toString(); return sb.toString();
} }
private int[] findMaxLength( ResultSet resultSet ) throws SQLException { private int[] findMaxLength(ResultSet resultSet) throws SQLException {
ResultSetMetaData rsmd = resultSet.getMetaData(); ResultSetMetaData rsmd = resultSet.getMetaData();
int columnCount = rsmd.getColumnCount(); int columnCount = rsmd.getColumnCount();
int[] columnSizes = new int[columnCount+1]; int[] columnSizes = new int[columnCount + 1];
for ( int i=1; i<=columnCount; i++ ) { for (int i = 1; i <= columnCount; i++) {
columnSizes[i] = Math.max(columnSizes[i], rsmd.getColumnLabel(i).length()); columnSizes[i] = Math.max(columnSizes[i], rsmd.getColumnLabel(i).length());
} }
while (resultSet.next()) { while (resultSet.next()) {
for (int i = 1; i <= columnCount; i++) { for (int i = 1; i <= columnCount; i++) {
String value = resultSet.getString(i); String value = resultSet.getString(i);
if ( value != null ) columnSizes[i] = Math.max(columnSizes[i], value.length()); if (value != null) columnSizes[i] = Math.max(columnSizes[i], value.length());
} }
} }
resultSet.beforeFirst(); resultSet.beforeFirst();
@ -375,7 +418,7 @@ public void readSchema(Tables tables, String databaseCatalog, String schemaNameP
*/ */
public static void columnsFor(ResultSet resultSet, TableEditor editor) throws SQLException { public static void columnsFor(ResultSet resultSet, TableEditor editor) throws SQLException {
List<Column> columns = new ArrayList<>(); List<Column> columns = new ArrayList<>();
columnsFor(resultSet,columns::add); columnsFor(resultSet, columns::add);
editor.setColumns(columns); editor.setColumns(columns);
} }
@ -407,6 +450,4 @@ private static boolean isNullable(int jdbcNullable) {
return jdbcNullable == ResultSetMetaData.columnNullable || jdbcNullable == ResultSetMetaData.columnNullableUnknown; return jdbcNullable == ResultSetMetaData.columnNullable || jdbcNullable == ResultSetMetaData.columnNullableUnknown;
} }
} }

View File

@ -62,6 +62,23 @@ public Column columnWithName(String name) {
return columnsByLowercaseName.get(name.toLowerCase()); return columnsByLowercaseName.get(name.toLowerCase());
} }
@Override
public int hashCode() {
return id.hashCode();
}
@Override
public boolean equals(Object obj) {
if ( obj== this) return true;
if ( obj instanceof Table ) {
Table that = (Table)obj;
return this.id().equals(that.id())
&& this.columns().equals(that.columns())
&& this.primaryKeyColumnNames().equals(that.primaryKeyColumnNames());
}
return false;
}
@Override @Override
public String toString() { public String toString() {
StringBuilder sb = new StringBuilder(); StringBuilder sb = new StringBuilder();

View File

@ -6,11 +6,11 @@
package io.debezium.relational; package io.debezium.relational;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.function.Function; import java.util.function.Function;
import org.apache.kafka.connect.data.Schema; import org.apache.kafka.connect.data.Schema;
@ -65,7 +65,7 @@ public static interface ColumnFilter {
} }
private final FunctionalReadWriteLock lock = FunctionalReadWriteLock.reentrant(); private final FunctionalReadWriteLock lock = FunctionalReadWriteLock.reentrant();
private final Map<TableId, TableImpl> tablesByTableId = new HashMap<>(); private final Map<TableId, TableImpl> tablesByTableId = new ConcurrentHashMap<>();
private final Set<TableId> changes = new HashSet<>(); private final Set<TableId> changes = new HashSet<>();
/** /**
@ -299,6 +299,21 @@ public TableEditor editOrCreateTable(String catalogName, String schemaName, Stri
return editOrCreateTable(new TableId(catalogName, schemaName, tableName)); return editOrCreateTable(new TableId(catalogName, schemaName, tableName));
} }
@Override
public int hashCode() {
return tablesByTableId.hashCode();
}
@Override
public boolean equals(Object obj) {
if ( obj == this ) return true;
if ( obj instanceof Tables ) {
Tables that = (Tables)obj;
return this.tablesByTableId.equals(that.tablesByTableId);
}
return false;
}
@Override @Override
public String toString() { public String toString() {
return lock.read(() -> { return lock.read(() -> {

View File

@ -108,7 +108,7 @@ protected int determineTokenType(int type, String token) {
* *
* @param name the name of the current schema; may be null * @param name the name of the current schema; may be null
*/ */
protected void setCurrentSchema(String name) { public void setCurrentSchema(String name) {
this.currentSchema = name; this.currentSchema = name;
} }
@ -117,7 +117,7 @@ protected void setCurrentSchema(String name) {
* *
* @return the current schema name, or null if the current schema name has not been {@link #setCurrentSchema(String) set} * @return the current schema name, or null if the current schema name has not been {@link #setCurrentSchema(String) set}
*/ */
protected String currentSchema() { public String currentSchema() {
return currentSchema; return currentSchema;
} }

View File

@ -575,7 +575,7 @@ protected void parseAlterTable(Marker start) {
// Update the table ... // Update the table ...
Column newColumnDefn = column.create(); Column newColumnDefn = column.create();
table.setColumns(newColumnDefn); table.addColumn(newColumnDefn);
if (isPrimaryKey.get()) { if (isPrimaryKey.get()) {
table.setPrimaryKeyNames(newColumnDefn.name()); table.setPrimaryKeyNames(newColumnDefn.name());
} }

View File

@ -0,0 +1,62 @@
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.relational.history;
import java.util.Map;
import java.util.function.Consumer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import io.debezium.config.Configuration;
import io.debezium.relational.Tables;
import io.debezium.relational.ddl.DdlParser;
/**
* @author Randall Hauch
*
*/
public abstract class AbstractDatabaseHistory implements DatabaseHistory {
protected Configuration config;
protected final Logger logger = LoggerFactory.getLogger(getClass());
protected AbstractDatabaseHistory() {
}
@Override
public void configure(Configuration config) {
this.config = config;
}
@Override
public final void record(Map<String, ?> source, Map<String, ?> position, String databaseName, Tables schema, String ddl) {
storeRecord(new HistoryRecord(source, position, databaseName, ddl));
}
@Override
public final void recover(Map<String, ?> source, Map<String, ?> position, Tables schema, DdlParser ddlParser) {
HistoryRecord stopPoint = new HistoryRecord(source, position, null, null);
recoverRecords(schema,ddlParser,recovered->{
if (recovered.isAtOrBefore(stopPoint)) {
ddlParser.setCurrentSchema(recovered.databaseName()); // may be null
String ddl = recovered.ddl();
if (ddl != null) {
ddlParser.parse(ddl, schema);
}
}
});
}
protected abstract void storeRecord(HistoryRecord record);
protected abstract void recoverRecords(Tables schema, DdlParser ddlParser, Consumer<HistoryRecord> records);
@Override
public void shutdown() {
// do nothing
}
}

View File

@ -0,0 +1,59 @@
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.relational.history;
import java.util.Map;
import io.debezium.config.Configuration;
import io.debezium.relational.Tables;
import io.debezium.relational.ddl.DdlParser;
/**
* A history of the database schema described by a {@link Tables}. Changes to the database schema can be
* {@link #record(Map, Map, String, Tables, String) recorded}, and a {@link Tables database schema} can be
* {@link #record(Map, Map, String, Tables, String) recovered} to various points in that history.
*
* @author Randall Hauch
*/
public interface DatabaseHistory {
/**
* Configure this instance.
* @param config the configuration for this history store
*/
void configure(Configuration config);
/**
* Record a change to the schema of the named database, and store it in the schema storage.
*
* @param source the information about the source database; may not be null
* @param position the point in history where these DDL changes were made, which may be used when
* {@link #recover(Map, Map, Tables, DdlParser) recovering} the schema to some point in history; may not be
* null
* @param databaseName the name of the database whose schema is being changed; may not be null
* @param schema the current definition of the database schema; may not be null
* @param ddl the DDL statements that describe the changes to the database schema; may not be null
*/
void record(Map<String, ?> source, Map<String, ?> position, String databaseName, Tables schema, String ddl);
/**
* Recover the {@link Tables database schema} to a known point in its history. Note that it is possible to recover the
* database schema to a point in history that is earlier than what has been {@link #record(Map, Map, String, Tables, String)
* recorded}. Likewise, when recovering to a point in history <em>later</em> than what was recorded, the database schema will
* reflect the latest state known to the history.
*
* @param source the information about the source database; may not be null
* @param position the point in history at which the {@link Tables database schema} should be recovered; may not be null
* @param schema the definition of the schema for the named {@code database}; may not be null
* @param ddlParser the DDL parser that can be used to apply DDL statements to the given {@code schema}; may not be null
*/
void recover(Map<String, ?> source, Map<String, ?> position, Tables schema, DdlParser ddlParser);
/**
* Stop recording history and release any resources acquired since {@link #configure(Configuration)}.
*/
void shutdown();
}

View File

@ -0,0 +1,85 @@
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.relational.history;
import java.io.IOException;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.LinkOption;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardOpenOption;
import java.util.Collection;
import java.util.function.Consumer;
import io.debezium.annotation.ThreadSafe;
import io.debezium.config.Configuration;
import io.debezium.config.Field;
import io.debezium.document.DocumentReader;
import io.debezium.document.DocumentWriter;
import io.debezium.relational.Tables;
import io.debezium.relational.ddl.DdlParser;
import io.debezium.util.Collect;
import io.debezium.util.FunctionalReadWriteLock;
/**
* A {@link DatabaseHistory} implementation that stores the schema history in a local file.
*
* @author Randall Hauch
*/
@ThreadSafe
public final class FileDatabaseHistory extends AbstractDatabaseHistory {
@SuppressWarnings("unchecked")
public static final Field FILE_PATH = Field.create("path")
.withDescription("The path to the file that will be used to record the database history")
.withValidation(Field::isRequired);
public static Collection<Field> ALL_FIELDS = Collect.arrayListOf(FILE_PATH);
private static final Charset UTF8 = StandardCharsets.UTF_8;
private final FunctionalReadWriteLock lock = FunctionalReadWriteLock.reentrant();
private final DocumentWriter writer = DocumentWriter.defaultWriter();
private final DocumentReader reader = DocumentReader.defaultReader();
private Path path;
@Override
public void configure(Configuration config) {
config.validate(ALL_FIELDS, logger::error);
super.configure(config);
path = Paths.get(config.getString(FILE_PATH));
}
@Override
protected void storeRecord(HistoryRecord record) {
lock.write(() -> {
try {
String line = writer.write(record.document());
if (!Files.exists(path, LinkOption.NOFOLLOW_LINKS)) {
Files.createDirectories(path.getParent());
Files.createFile(path);
}
Files.write(path, Collect.arrayListOf(line), UTF8, StandardOpenOption.APPEND);
} catch (IOException e) {
logger.error("Failed to add record to history at {}: {}", path, record, e);
}
});
}
@Override
protected void recoverRecords(Tables schema, DdlParser ddlParser, Consumer<HistoryRecord> records) {
lock.write(() -> {
try {
for ( String line : Files.readAllLines(path)) {
records.accept(new HistoryRecord(reader.read(line)));
}
} catch (IOException e) {
logger.error("Failed to add recover records from history at {}", path, e);
}
});
}
}

View File

@ -0,0 +1,64 @@
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.relational.history;
import java.util.Map;
import io.debezium.document.Document;
public class HistoryRecord {
private final Document doc;
public HistoryRecord(Document document) {
this.doc = document;
}
public HistoryRecord(Map<String, ?> source, Map<String, ?> position, String databaseName, String ddl) {
this.doc = Document.create();
Document src = doc.setDocument("source");
if (source != null) source.forEach(src::set);
Document pos = doc.setDocument("position");
if (position != null) position.forEach(pos::set);
if (databaseName != null) doc.setString("databaseName", databaseName);
if (ddl != null) doc.setString("ddl", ddl);
}
public Document document() {
return this.doc;
}
public boolean isAtOrBefore(HistoryRecord other) {
if (other == this) return true;
return this.position().compareToWithoutFieldOrder(other.position()) <= 0
&& source().equals(other.source());
}
protected Document source() {
return doc.getDocument("source");
}
protected Document position() {
return doc.getDocument("position");
}
protected String databaseName() {
return doc.getString("databaseName");
}
protected String ddl() {
return doc.getString("ddl");
}
protected boolean hasSameSource(HistoryRecord other) {
if (this == other) return true;
return other != null && source().equals(other.source());
}
protected boolean hasSameDatabase(HistoryRecord other) {
if (this == other) return true;
return other != null && databaseName().equals(other.databaseName());
}
}

View File

@ -0,0 +1,125 @@
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.relational.history;
import java.io.IOException;
import java.util.Collection;
import java.util.UUID;
import java.util.function.Consumer;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.connect.source.SourceRecord;
import io.debezium.annotation.NotThreadSafe;
import io.debezium.config.Configuration;
import io.debezium.config.Field;
import io.debezium.document.DocumentReader;
import io.debezium.relational.Tables;
import io.debezium.relational.ddl.DdlParser;
import io.debezium.util.Collect;
/**
* A {@link DatabaseHistory} implementation that records schema changes as normal {@link SourceRecord}s on the specified topic,
* and that recovers the history by establishing a Kafka Consumer re-processing all messages on that topic.
*
* @author Randall Hauch
*/
@NotThreadSafe
public class KafkaDatabaseHistory extends AbstractDatabaseHistory {
@SuppressWarnings("unchecked")
public static final Field TOPIC = Field.create("topic")
.withDescription("The name of the topic for the database schema history")
.withValidation(Field::isRequired);
@SuppressWarnings("unchecked")
public static final Field BOOTSTRAP_SERVERS = Field.create("bootstrap.servers")
.withDescription("A list of host/port pairs that the connector will use for establishing the initial "
+ "connection to the Kafka cluster for retrieving database schema history previously stored "
+ "by the connector. This should point to the same Kafka cluster used by the Kafka Connect "
+ "process.")
.withValidation(Field::isRequired);
public static Collection<Field> ALL_FIELDS = Collect.arrayListOf(TOPIC, BOOTSTRAP_SERVERS);
private final DocumentReader reader = DocumentReader.defaultReader();
private final Integer partition = new Integer(0);
private String topicName;
private Configuration consumerConfig;
private Configuration producerConfig;
private KafkaProducer<String, String> producer;
@Override
public void configure(Configuration config) {
config.validate(ALL_FIELDS, logger::error);
super.configure(config);
this.topicName = config.getString(TOPIC);
String bootstrapServers = config.getString(BOOTSTRAP_SERVERS);
// Copy the relevant portions of the configuration and add useful defaults ...
this.consumerConfig = config.subset("consumer.", true).edit()
.withDefault("bootstrap.servers", bootstrapServers)
.withDefault("group.id", UUID.randomUUID().toString())
.withDefault("enable.auto.commit", false)
.withDefault("session.timeout.ms", 30000)
.withDefault("key.deserializer", StringDeserializer.class.getName())
.withDefault("value.deserializer", StringDeserializer.class.getName())
.build();
this.producerConfig = config.subset("producer.", true).edit()
.withDefault("bootstrap.servers", bootstrapServers)
.withDefault("acks", "all")
.withDefault("retries", 1) // may result in duplicate messages, but that's okay
.withDefault("batch.size", 1024) // enough 1024 byte messages per batch
.withDefault("linger.ms", 1)
.withDefault("buffer.memory", 1048576) // 1MB
.withDefault("key.deserializer", StringDeserializer.class.getName())
.withDefault("value.deserializer", StringDeserializer.class.getName())
.build();
this.producer = new KafkaProducer<>(this.producerConfig.asProperties());
}
@Override
protected void storeRecord(HistoryRecord record) {
this.producer.send(new ProducerRecord<String, String>(topicName, partition, null, record.toString()));
}
@Override
protected void recoverRecords(Tables schema, DdlParser ddlParser, Consumer<HistoryRecord> records) {
try (KafkaConsumer<String, String> historyConsumer = new KafkaConsumer<String, String>(consumerConfig.asProperties());) {
// Subscribe to the only partition for this topic, and seek to the beginning of that partition ...
TopicPartition topicPartition = new TopicPartition(topicName, partition);
historyConsumer.assign(Collect.arrayListOf(topicPartition));
historyConsumer.seekToBeginning(topicPartition);
// Read all messages in the topic ...
while (true) {
ConsumerRecords<String, String> recoveredRecords = historyConsumer.poll(100);
for (ConsumerRecord<String, String> record : recoveredRecords) {
try {
records.accept(new HistoryRecord(reader.read(record.value())));
} catch (IOException e) {
logger.error("Error while deserializing history record", e);
}
}
}
}
}
@Override
public void shutdown() {
try {
if (this.producer != null) this.producer.close();
} finally {
this.producer = null;
super.shutdown();
}
}
}

View File

@ -0,0 +1,43 @@
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.relational.history;
import java.util.ArrayList;
import java.util.List;
import java.util.function.Consumer;
import io.debezium.annotation.ThreadSafe;
import io.debezium.relational.Tables;
import io.debezium.relational.ddl.DdlParser;
import io.debezium.util.FunctionalReadWriteLock;
/**
* A {@link DatabaseHistory} implementation that stores the schema history in a local file.
*
* @author Randall Hauch
*/
@ThreadSafe
public final class MemoryDatabaseHistory extends AbstractDatabaseHistory {
private final List<HistoryRecord> records = new ArrayList<>();
private final FunctionalReadWriteLock lock = FunctionalReadWriteLock.reentrant();
/**
* Create an instance that keeps the history in memory.
*/
public MemoryDatabaseHistory() {
}
@Override
protected void storeRecord(HistoryRecord record) {
lock.write(() -> records.add(record));
}
@Override
protected void recoverRecords(Tables schema, DdlParser ddlParser, Consumer<HistoryRecord> records) {
lock.write(() -> this.records.forEach(records));
}
}

View File

@ -337,6 +337,7 @@
* {@link Tokenizer}s with exactly this behavior can actually be created using the {@link #basicTokenizer(boolean)} method. So * {@link Tokenizer}s with exactly this behavior can actually be created using the {@link #basicTokenizer(boolean)} method. So
* while this very basic implementation is not meant to be used in all situations, it may be useful in some situations. * while this very basic implementation is not meant to be used in all situations, it may be useful in some situations.
* </p> * </p>
*
* @author Randall Hauch * @author Randall Hauch
* @author Horia Chiorean * @author Horia Chiorean
* @author Daniel Kelleher * @author Daniel Kelleher

View File

@ -0,0 +1,52 @@
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.util;
/**
* An abstraction for a clock.
*
* @author Randall Hauch
*/
public interface Clock {
/**
* The {@link Clock} instance that uses the {@link System} methods.
*/
static final Clock SYSTEM = new Clock() {
@Override
public long currentTimeInMillis() {
return System.currentTimeMillis();
}
@Override
public long currentTimeInNanos() {
return System.nanoTime();
}
};
/**
* Get the {@link Clock} instance that uses the {@link System} methods.
* @return the system clock; never null
*/
static Clock system() {
return SYSTEM;
}
/**
* Get the current time in nanoseconds.
* @return the current time in nanoseconds.
*/
default long currentTimeInNanos() {
return currentTimeInMillis() * 1000000L;
}
/**
* Get the current time in milliseconds.
* @return the current time in milliseconds.
*/
public long currentTimeInMillis();
}

View File

@ -140,6 +140,37 @@ public static <K, V> Map<K, V> hashMapOf(K key1, V value1, K key2, V value2, K k
return map; return map;
} }
public static <K, V> Map<K, V> linkMapOf(K key, V value) {
Map<K, V> map = new LinkedHashMap<>();
map.put(key, value);
return map;
}
public static <K, V> Map<K, V> linkMapOf(K key1, V value1, K key2, V value2) {
Map<K, V> map = new LinkedHashMap<>();
map.put(key1, value1);
map.put(key2, value2);
return map;
}
public static <K, V> Map<K, V> linkMapOf(K key1, V value1, K key2, V value2, K key3, V value3) {
Map<K, V> map = new LinkedHashMap<>();
map.put(key1, value1);
map.put(key2, value2);
map.put(key3, value3);
return map;
}
public static <K, V> Map<K, V> linkMapOf(K key1, V value1, K key2, V value2, K key3, V value3, K key4, V value4) {
Map<K, V> map = new LinkedHashMap<>();
map.put(key1, value1);
map.put(key2, value2);
map.put(key3, value3);
map.put(key4, value4);
return map;
}
/** /**
* Set the value at the given position in the list, expanding the list as required to accommodate the new position. * Set the value at the given position in the list, expanding the list as required to accommodate the new position.
* <p> * <p>

View File

@ -309,6 +309,11 @@ public static File createFile(Path path) {
throw new IllegalStateException("Expecting '" + path + "' to be a file but found a directory"); throw new IllegalStateException("Expecting '" + path + "' to be a file but found a directory");
} }
file.getParentFile().mkdirs(); file.getParentFile().mkdirs();
try {
Files.createFile(path);
} catch (IOException e) {
throw new IllegalStateException("Unable to create the file '" + path + "': " + e.getMessage(), e);
}
return file; return file;
} }
@ -426,6 +431,7 @@ private static void logMessage(InputStream stream, Consumer<String> logger, Stri
/** /**
* Atomically load the properties file at the given location within the designated class loader. * Atomically load the properties file at the given location within the designated class loader.
*
* @param classLoader the supplier for the class loader; may not be null or return null * @param classLoader the supplier for the class loader; may not be null or return null
* @param classpathResource the path to the resource file; may not be null * @param classpathResource the path to the resource file; may not be null
* @return the properties object; never null, but possibly empty * @return the properties object; never null, but possibly empty
@ -445,24 +451,26 @@ public static Properties loadProperties(Supplier<ClassLoader> classLoader, Strin
/** /**
* Atomically load the properties file at the given location within the designated class loader. * Atomically load the properties file at the given location within the designated class loader.
*
* @param classLoader the class loader; may not be null * @param classLoader the class loader; may not be null
* @param classpathResource the path to the resource file; may not be null * @param classpathResource the path to the resource file; may not be null
* @return the properties object; never null, but possibly empty * @return the properties object; never null, but possibly empty
* @throws IllegalStateException if the file could not be found or read * @throws IllegalStateException if the file could not be found or read
*/ */
public static Properties loadProperties(ClassLoader classLoader, String classpathResource) { public static Properties loadProperties(ClassLoader classLoader, String classpathResource) {
return loadProperties(()->classLoader,classpathResource); return loadProperties(() -> classLoader, classpathResource);
} }
/** /**
* Atomically load the properties file at the given location within the designated class' class loader. * Atomically load the properties file at the given location within the designated class' class loader.
*
* @param clazz the class whose class loader is to be used; may not be null * @param clazz the class whose class loader is to be used; may not be null
* @param classpathResource the path to the resource file; may not be null * @param classpathResource the path to the resource file; may not be null
* @return the properties object; never null, but possibly empty * @return the properties object; never null, but possibly empty
* @throws IllegalStateException if the file could not be found or read * @throws IllegalStateException if the file could not be found or read
*/ */
public static Properties loadProperties(Class<?> clazz, String classpathResource) { public static Properties loadProperties(Class<?> clazz, String classpathResource) {
return loadProperties(clazz::getClassLoader,classpathResource); return loadProperties(clazz::getClassLoader, classpathResource);
} }
private IoUtil() { private IoUtil() {

View File

@ -0,0 +1,316 @@
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.util;
import java.util.Iterator;
import java.util.NoSuchElementException;
import java.util.function.BiFunction;
import java.util.function.Function;
import io.debezium.annotation.Immutable;
/**
* A utility for creating iterators.
*
* @author Randall Hauch
*/
@Immutable
public class Iterators {
public static <T> Iterator<T> empty() {
return new Iterator<T>() {
@Override
public boolean hasNext() {
return false;
}
@Override
public T next() {
throw new NoSuchElementException();
}
};
}
public static <T> Iterator<T> with(final T value) {
return new Iterator<T>() {
private boolean finished = false;
@Override
public boolean hasNext() {
return !finished;
}
@Override
public T next() {
if (finished) {
throw new NoSuchElementException();
}
finished = true;
return value;
}
};
}
public static <T> Iterator<T> with(T value1, T value2) {
return new Iterator<T>() {
private int remaining = 2;
@Override
public boolean hasNext() {
return remaining > 0;
}
@Override
public T next() {
if (remaining == 2) {
--remaining;
return value1;
}
if (remaining == 1) {
--remaining;
return value2;
}
throw new NoSuchElementException();
}
};
}
public static <T> Iterator<T> with(T value1, T value2, T value3) {
return new Iterator<T>() {
private int remaining = 3;
@Override
public boolean hasNext() {
return remaining > 0;
}
@Override
public T next() {
if (remaining == 3) {
--remaining;
return value1;
}
if (remaining == 2) {
--remaining;
return value2;
}
if (remaining == 1) {
--remaining;
return value3;
}
throw new NoSuchElementException();
}
};
}
@SafeVarargs
public static <T> Iterator<T> with(T value1, T value2, T value3, T... additional) {
return new Iterator<T>() {
private int index = 0;
@Override
public boolean hasNext() {
return index < additional.length + 3;
}
@Override
public T next() {
try {
if (index == 0) return value1;
if (index == 1) return value2;
if (index == 2) return value3;
if (index < additional.length + 3) return additional[index - 3];
--index;
throw new NoSuchElementException();
} finally {
++index;
}
}
};
}
public static <T> Iterator<T> with(T[] values) {
return new Iterator<T>() {
private int index = 0;
@Override
public boolean hasNext() {
return index < values.length;
}
@Override
public T next() {
try {
if (index < values.length) return values[index];
--index;
throw new NoSuchElementException();
} finally {
++index;
}
}
};
}
public static <T, U, V> Iterator<V> around(Iterable<? extends T> first,
Iterable<? extends U> second,
BiFunction<T, U, V> conversion) {
return around(first.iterator(), second.iterator(), conversion);
}
public static <T, U, V> Iterator<V> around(final Iterator<? extends T> first,
final Iterator<? extends U> second,
final BiFunction<T, U, V> combineFirstAndSecond) {
return new Iterator<V>() {
@Override
public boolean hasNext() {
return second.hasNext();
}
@Override
public V next() {
return combineFirstAndSecond.apply(first.next(), second.next());
}
};
}
public static <V, T> Iterator<T> around(final Iterable<? extends V> iterable, Function<V, T> conversion) {
return around(iterable.iterator(), conversion);
}
public static <V, T> Iterator<T> around(final Iterator<? extends V> iterator, Function<V, T> conversion) {
return new Iterator<T>() {
@Override
public boolean hasNext() {
return iterator.hasNext();
}
@Override
public T next() {
return conversion.apply(iterator.next());
}
@Override
public void remove() {
iterator.remove();
}
};
}
public static <T> Iterable<T> around(final Iterator<T> iterator) {
return new Iterable<T>() {
@Override
public Iterator<T> iterator() {
return iterator;
}
};
}
public static <T> Iterator<T> readOnly(final Iterator<T> iterator) {
return new Iterator<T>() {
@Override
public boolean hasNext() {
return iterator.hasNext();
}
@Override
public T next() {
return iterator.next();
}
};
}
public static <V, T> Iterator<T> readOnly(final Iterator<? extends V> iterator, Function<V, T> conversion) {
return new Iterator<T>() {
@Override
public boolean hasNext() {
return iterator.hasNext();
}
@Override
public T next() {
return conversion.apply(iterator.next());
}
};
}
public static <T> Iterator<T> readOnly(final Iterable<T> iterable) {
return readOnly(iterable.iterator());
}
public static <V, T> Iterator<T> readOnly(final Iterable<V> iterable, Function<V, T> conversion) {
return readOnly(iterable.iterator(), conversion);
}
public static <T> Iterable<T> readOnlyIterable(final Iterable<T> iterable) {
return new Iterable<T>() {
@Override
public Iterator<T> iterator() {
return readOnly(iterable.iterator());
}
};
}
public static <V, T> Iterable<T> readOnlyIterable(final Iterable<? extends V> iterable, Function<V, T> conversion) {
return new Iterable<T>() {
@Override
public Iterator<T> iterator() {
return readOnly(iterable.iterator(), conversion);
}
};
}
public static <T> Iterator<T> join(Iterable<T> first, T last) {
return join(first.iterator(), with(last));
}
public static <T> Iterator<T> join(Iterable<T> first, T last1, T last2) {
return join(first.iterator(), with(last1, last2));
}
public static <T> Iterator<T> join(Iterable<T> first, T last1, T last2, T last3) {
return join(first.iterator(), with(last1, last2, last3));
}
public static <T> Iterator<T> join(Iterable<T> first, T last1, T last2, T last3, T last4) {
return join(first.iterator(), with(last1, last2, last3, last4));
}
public static <T> Iterator<T> join(Iterable<T> first, Iterable<T> second) {
return join(first.iterator(), second.iterator());
}
public static <T> Iterator<T> join(Iterator<T> first, Iterator<T> second) {
return new Iterator<T>() {
private boolean completedFirst = false;
@Override
public boolean hasNext() {
if (!completedFirst) {
if (first.hasNext()) return true;
completedFirst = true;
}
return second.hasNext();
}
@Override
public T next() {
if (!completedFirst) {
if (first.hasNext()) return first.next();
completedFirst = true;
}
return second.next();
}
@Override
public void remove() {
if (!completedFirst) {
first.remove();
}
second.remove();
}
};
}
}

View File

@ -0,0 +1,81 @@
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.util;
import java.util.Iterator;
import java.util.StringJoiner;
import io.debezium.annotation.Immutable;
/**
* A utility for joining multiple {@link CharSequence character sequences} together. One major difference compared to
* {@link StringJoiner} is that this class ignores null values (rather than appending "null").
*
* @author Randall Hauch
*/
@Immutable
public final class Joiner {
public static Joiner on(CharSequence delimiter) {
return new Joiner(new StringJoiner(delimiter));
}
public static Joiner on(CharSequence prefix, CharSequence delimiter) {
return new Joiner(new StringJoiner(delimiter, prefix, ""));
}
public static Joiner on(CharSequence prefix, CharSequence delimiter, CharSequence suffix) {
return new Joiner(new StringJoiner(delimiter, prefix, suffix));
}
private final StringJoiner joiner;
private Joiner(StringJoiner joiner) {
this.joiner = joiner;
}
public String join(Object[] values) {
for (Object value : values) {
if (value != null) joiner.add(value.toString());
}
return joiner.toString();
}
public String join(CharSequence firstValue, CharSequence... additionalValues) {
if (firstValue != null) joiner.add(firstValue);
for (CharSequence value : additionalValues) {
if (value != null) joiner.add(value);
}
return joiner.toString();
}
public String join(Iterable<?> values) {
for (Object value : values) {
if (value != null) joiner.add(value.toString());
}
return joiner.toString();
}
public String join(Iterable<?> values, CharSequence nextValue, CharSequence... additionalValues) {
for (Object value : values) {
if (value != null) joiner.add(value.toString());
}
if (nextValue != null) joiner.add(nextValue);
for (CharSequence value : additionalValues) {
if (value != null) joiner.add(value);
}
return joiner.toString();
}
public String join(Iterator<?> values) {
while (values.hasNext()) {
Object value = values.next();
if (value != null) joiner.add(value.toString());
}
return joiner.toString();
}
}

View File

@ -0,0 +1,721 @@
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.util;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import io.debezium.annotation.Immutable;
/**
* Utilities for performing math operations with mixed native and advanced numeric types.
*
* @author Randall Hauch
*/
@Immutable
public final class MathOps {
public static Number add(Number first, Number second) {
if (second == null)
return first;
else if (first == null) return second;
if (first instanceof Short) return add((Short) first, second);
if (first instanceof Integer) return add((Integer) first, second);
if (first instanceof Long) return add((Long) first, second);
if (first instanceof Float) return add((Float) first, second);
if (first instanceof Double) return add((Double) first, second);
if (first instanceof BigInteger) return add((BigInteger) first, second);
if (first instanceof BigDecimal) return add((BigDecimal) first, second);
if (first instanceof AtomicLong) return add((AtomicLong) first, second);
if (first instanceof AtomicInteger) return add((AtomicInteger) first, second);
throw new IllegalArgumentException();
}
public static Number add(Short first, Number second) {
if (second instanceof Short) return add(first, (Short) second);
if (second instanceof Integer) return add(first, (Integer) second);
if (second instanceof Long) return add(first, (Long) second);
if (second instanceof Float) return add(first, (Float) second);
if (second instanceof Double) return add(first, (Double) second);
if (second instanceof BigInteger) return add(first, (BigInteger) second);
if (second instanceof BigDecimal) return add(first, (BigDecimal) second);
if (second instanceof AtomicInteger) return add(first, (AtomicInteger) second);
if (second instanceof AtomicLong) return add(first, (AtomicLong) second);
throw new IllegalArgumentException();
}
public static Number add(Short first, short second) {
int sum = first.shortValue() + second;
if (Short.MAX_VALUE >= sum && Short.MIN_VALUE <= sum) return new Short((short) sum);
return new Integer(sum);
}
public static Number add(Short first, int second) {
long sum = first.longValue() + second;
if (Short.MAX_VALUE >= sum && Short.MIN_VALUE <= sum) return new Short((short) sum);
if (Integer.MAX_VALUE >= sum && Integer.MIN_VALUE <= sum) return new Integer((int) sum);
return new Long(sum);
}
public static Number add(Short first, long second) {
long sum = first.longValue() + second;
if (Short.MAX_VALUE >= sum && Short.MIN_VALUE <= sum) return new Short((short) sum);
if (Integer.MAX_VALUE >= sum && Integer.MIN_VALUE <= sum) return new Integer((int) sum);
return new Long(sum);
}
public static Number add(Short first, float second) {
double sum = first.floatValue() + second;
if (Float.MAX_VALUE >= sum && Float.MIN_VALUE <= sum) return new Float((float) sum);
return new Double(sum);
}
public static Number add(Short first, double second) {
double sum = first.doubleValue() + second;
if (Float.MAX_VALUE >= sum && Float.MIN_VALUE <= sum) return new Float((float) sum);
return new Double(sum);
}
public static Number add(Short first, Short second) {
return add(first, second.shortValue());
}
public static Number add(Short first, Integer second) {
return add(first, second.intValue());
}
public static Number add(Short first, Long second) {
return add(first, second.longValue());
}
public static Number add(Short first, Float second) {
return add(first, second.floatValue());
}
public static Number add(Short first, Double second) {
return add(first, second.doubleValue());
}
public static Number add(Short first, BigDecimal second) {
return second.add(BigDecimal.valueOf(first.longValue()));
}
public static Number add(Short first, BigInteger second) {
return second.add(BigInteger.valueOf(first.longValue()));
}
public static Number add(Short first, AtomicInteger second) {
return add(first, second.intValue());
}
public static Number add(Short first, AtomicLong second) {
return add(first, second.longValue());
}
public static Number add(Integer first, Number second) {
if (second instanceof Short) return add(first, (Short) second);
if (second instanceof Integer) return add(first, (Integer) second);
if (second instanceof Long) return add(first, (Long) second);
if (second instanceof Float) return add(first, (Float) second);
if (second instanceof Double) return add(first, (Double) second);
if (second instanceof BigInteger) return add(first, (BigInteger) second);
if (second instanceof BigDecimal) return add(first, (BigDecimal) second);
if (second instanceof AtomicInteger) return add(first, (AtomicInteger) second);
if (second instanceof AtomicLong) return add(first, (AtomicLong) second);
throw new IllegalArgumentException();
}
public static Number add(Integer first, short second) {
long sum = first.longValue() + second;
if (Short.MAX_VALUE >= sum && Short.MIN_VALUE <= sum) return new Short((short) sum);
if (Integer.MAX_VALUE >= sum && Integer.MIN_VALUE <= sum) return new Integer((int) sum);
return new Long(sum);
}
public static Number add(Integer first, int second) {
long sum = first.longValue() + second;
if (Short.MAX_VALUE >= sum && Short.MIN_VALUE <= sum) return new Short((short) sum);
if (Integer.MAX_VALUE >= sum && Integer.MIN_VALUE <= sum) return new Integer((int) sum);
return new Long(sum);
}
public static Number add(Integer first, long second) {
long sum = first.longValue() + second;
if (Short.MAX_VALUE >= sum && Short.MIN_VALUE <= sum) return new Short((short) sum);
if (Integer.MAX_VALUE >= sum && Integer.MIN_VALUE <= sum) return new Integer((int) sum);
return new Long(sum);
}
public static Number add(Integer first, float second) {
double sum = first.floatValue() + second;
if (Float.MAX_VALUE >= sum && Float.MIN_VALUE <= sum) return new Float((float) sum);
return new Double(sum);
}
public static Number add(Integer first, double second) {
double sum = first.doubleValue() + second;
if (Float.MAX_VALUE >= sum && Float.MIN_VALUE <= sum) return new Float((float) sum);
return new Double(sum);
}
public static Number add(Integer first, Short second) {
return add(first, second.shortValue());
}
public static Number add(Integer first, Integer second) {
return add(first, second.intValue());
}
public static Number add(Integer first, Long second) {
return add(first, second.longValue());
}
public static Number add(Integer first, Float second) {
return add(first, second.floatValue());
}
public static Number add(Integer first, Double second) {
return add(first, second.doubleValue());
}
public static Number add(Integer first, BigDecimal second) {
return second.add(BigDecimal.valueOf(first.longValue()));
}
public static Number add(Integer first, BigInteger second) {
return second.add(BigInteger.valueOf(first.longValue()));
}
public static Number add(Integer first, AtomicInteger second) {
return add(first, second.intValue());
}
public static Number add(Integer first, AtomicLong second) {
return add(first, second.longValue());
}
public static Number add(Long first, Number second) {
if (second instanceof Short) return add(first, (Short) second);
if (second instanceof Integer) return add(first, (Integer) second);
if (second instanceof Long) return add(first, (Long) second);
if (second instanceof Float) return add(first, (Float) second);
if (second instanceof Double) return add(first, (Double) second);
if (second instanceof BigInteger) return add(first, (BigInteger) second);
if (second instanceof BigDecimal) return add(first, (BigDecimal) second);
if (second instanceof AtomicInteger) return add(first, (AtomicInteger) second);
if (second instanceof AtomicLong) return add(first, (AtomicLong) second);
throw new IllegalArgumentException();
}
public static Number add(Long first, short second) {
long sum = first.longValue() + second;
if (Short.MAX_VALUE >= sum && Short.MIN_VALUE <= sum) return new Short((short) sum);
if (Integer.MAX_VALUE >= sum && Integer.MIN_VALUE <= sum) return new Integer((int) sum);
return new Long(sum);
}
public static Number add(Long first, int second) {
long sum = first.longValue() + second;
if (Short.MAX_VALUE >= sum && Short.MIN_VALUE <= sum) return new Short((short) sum);
if (Integer.MAX_VALUE >= sum && Integer.MIN_VALUE <= sum) return new Integer((int) sum);
return new Long(sum);
}
public static Number add(Long first, long second) {
long sum = first.longValue() + second;
if (Short.MAX_VALUE >= sum && Short.MIN_VALUE <= sum) return new Short((short) sum);
if (Integer.MAX_VALUE >= sum && Integer.MIN_VALUE <= sum) return new Integer((int) sum);
return new Long(sum);
}
public static Number add(Long first, float second) {
double sum = first.doubleValue() + second;
if (Float.MAX_VALUE >= sum && Float.MIN_VALUE <= sum) return new Float((float) sum);
return new Double(sum);
}
public static Number add(Long first, double second) {
double sum = first.doubleValue() + second;
if (Float.MAX_VALUE >= sum && Float.MIN_VALUE <= sum) return new Float((float) sum);
return new Double(sum);
}
public static Number add(Long first, Short second) {
return add(first, second.shortValue());
}
public static Number add(Long first, Integer second) {
return add(first, second.intValue());
}
public static Number add(Long first, Long second) {
return add(first, second.longValue());
}
public static Number add(Long first, Float second) {
return add(first, second.floatValue());
}
public static Number add(Long first, Double second) {
return add(first, second.doubleValue());
}
public static Number add(Long first, BigDecimal second) {
return second.add(BigDecimal.valueOf(first.longValue()));
}
public static Number add(Long first, BigInteger second) {
return second.add(BigInteger.valueOf(first.longValue()));
}
public static Number add(Long first, AtomicInteger second) {
return add(first, second.intValue());
}
public static Number add(Long first, AtomicLong second) {
return add(first, second.longValue());
}
public static Number add(Float first, Number second) {
if (second instanceof Short) return add(first, (Short) second);
if (second instanceof Integer) return add(first, (Integer) second);
if (second instanceof Long) return add(first, (Long) second);
if (second instanceof Float) return add(first, (Float) second);
if (second instanceof Double) return add(first, (Double) second);
if (second instanceof BigInteger) return add(first, (BigInteger) second);
if (second instanceof BigDecimal) return add(first, (BigDecimal) second);
if (second instanceof AtomicInteger) return add(first, (AtomicInteger) second);
if (second instanceof AtomicLong) return add(first, (AtomicLong) second);
throw new IllegalArgumentException();
}
public static Number add(Float first, short second) {
double sum = first.floatValue() + second;
if (Float.MAX_VALUE >= sum && Float.MIN_VALUE <= sum) return new Float((float) sum);
return new Double(sum);
}
public static Number add(Float first, int second) {
double sum = first.floatValue() + second;
if (Float.MAX_VALUE >= sum && Float.MIN_VALUE <= sum) return new Float((float) sum);
return new Double(sum);
}
public static Number add(Float first, long second) {
double sum = first.floatValue() + second;
if (Float.MAX_VALUE >= sum && Float.MIN_VALUE <= sum) return new Float((float) sum);
return new Double(sum);
}
public static Number add(Float first, float second) {
double sum = first.floatValue() + second;
if (Float.MAX_VALUE >= sum && Float.MIN_VALUE <= sum) return new Float((float) sum);
return new Double(sum);
}
public static Number add(Float first, double second) {
double sum = first.floatValue() + second;
if (Float.MAX_VALUE >= sum && Float.MIN_VALUE <= sum) return new Float((float) sum);
return new Double(sum);
}
public static Number add(Float first, Short second) {
return add(first, second.shortValue());
}
public static Number add(Float first, Integer second) {
return add(first, second.intValue());
}
public static Number add(Float first, Long second) {
return add(first, second.longValue());
}
public static Number add(Float first, Float second) {
return add(first, second.floatValue());
}
public static Number add(Float first, Double second) {
return add(first, second.doubleValue());
}
public static Number add(Float first, BigDecimal second) {
return second.add(BigDecimal.valueOf(first.longValue()));
}
public static Number add(Float first, BigInteger second) {
return second.add(BigInteger.valueOf(first.longValue()));
}
public static Number add(Float first, AtomicInteger second) {
return add(first, second.intValue());
}
public static Number add(Float first, AtomicLong second) {
return add(first, second.longValue());
}
public static Number add(Double first, Number second) {
if (second instanceof Short) return add(first, (Short) second);
if (second instanceof Integer) return add(first, (Integer) second);
if (second instanceof Long) return add(first, (Long) second);
if (second instanceof Float) return add(first, (Float) second);
if (second instanceof Double) return add(first, (Double) second);
if (second instanceof BigInteger) return add(first, (BigInteger) second);
if (second instanceof BigDecimal) return add(first, (BigDecimal) second);
if (second instanceof AtomicInteger) return add(first, (AtomicInteger) second);
if (second instanceof AtomicLong) return add(first, (AtomicLong) second);
throw new IllegalArgumentException();
}
public static Number add(Double first, short second) {
double sum = first.floatValue() + second;
if (Float.MAX_VALUE >= sum && Float.MIN_VALUE <= sum) return new Float((float) sum);
return new Double(sum);
}
public static Number add(Double first, int second) {
double sum = first.floatValue() + second;
if (Float.MAX_VALUE >= sum && Float.MIN_VALUE <= sum) return new Float((float) sum);
return new Double(sum);
}
public static Number add(Double first, long second) {
double sum = first.floatValue() + second;
if (Float.MAX_VALUE >= sum && Float.MIN_VALUE <= sum) return new Float((float) sum);
return new Double(sum);
}
public static Number add(Double first, float second) {
double sum = first.floatValue() + second;
if (Float.MAX_VALUE >= sum && Float.MIN_VALUE <= sum) return new Float((float) sum);
return new Double(sum);
}
public static Number add(Double first, double second) {
double sum = first.floatValue() + second;
if (Float.MAX_VALUE >= sum && Float.MIN_VALUE <= sum) return new Float((float) sum);
return new Double(sum);
}
public static Number add(Double first, Short second) {
return add(first, second.shortValue());
}
public static Number add(Double first, Integer second) {
return add(first, second.intValue());
}
public static Number add(Double first, Long second) {
return add(first, second.longValue());
}
public static Number add(Double first, Float second) {
return add(first, second.floatValue());
}
public static Number add(Double first, Double second) {
return add(first, second.doubleValue());
}
public static Number add(Double first, BigDecimal second) {
return second.add(BigDecimal.valueOf(first.longValue()));
}
public static Number add(Double first, BigInteger second) {
return second.add(BigInteger.valueOf(first.longValue()));
}
public static Number add(Double first, AtomicInteger second) {
return add(first, second.intValue());
}
public static Number add(Double first, AtomicLong second) {
return add(first, second.longValue());
}
public static Number add(BigInteger first, Number second) {
if (second instanceof Short) return add(first, (Short) second);
if (second instanceof Integer) return add(first, (Integer) second);
if (second instanceof Long) return add(first, (Long) second);
if (second instanceof Float) return add(first, (Float) second);
if (second instanceof Double) return add(first, (Double) second);
if (second instanceof BigInteger) return add(first, (BigInteger) second);
if (second instanceof BigDecimal) return add(first, (BigDecimal) second);
if (second instanceof AtomicInteger) return add(first, (AtomicInteger) second);
if (second instanceof AtomicLong) return add(first, (AtomicLong) second);
throw new IllegalArgumentException();
}
public static Number add(BigInteger first, short second) {
return first.add(BigInteger.valueOf(second));
}
public static Number add(BigInteger first, int second) {
return first.add(BigInteger.valueOf(second));
}
public static Number add(BigInteger first, long second) {
return first.add(BigInteger.valueOf(second));
}
public static Number add(BigInteger first, float second) {
return new BigDecimal(first).add(BigDecimal.valueOf(second));
}
public static Number add(BigInteger first, double second) {
return new BigDecimal(first).add(BigDecimal.valueOf(second));
}
public static Number add(BigInteger first, Short second) {
return add(first, second.shortValue());
}
public static Number add(BigInteger first, Integer second) {
return add(first, second.intValue());
}
public static Number add(BigInteger first, Long second) {
return add(first, second.longValue());
}
public static Number add(BigInteger first, Float second) {
return add(first, second.floatValue());
}
public static Number add(BigInteger first, Double second) {
return add(first, second.doubleValue());
}
public static Number add(BigInteger first, BigDecimal second) {
return second.add(new BigDecimal(first));
}
public static Number add(BigInteger first, BigInteger second) {
return second.add(second);
}
public static Number add(BigInteger first, AtomicInteger second) {
return add(first, second.intValue());
}
public static Number add(BigInteger first, AtomicLong second) {
return add(first, second.longValue());
}
public static Number add(BigDecimal first, Number second) {
if (second instanceof Short) return add(first, (Short) second);
if (second instanceof Integer) return add(first, (Integer) second);
if (second instanceof Long) return add(first, (Long) second);
if (second instanceof Float) return add(first, (Float) second);
if (second instanceof Double) return add(first, (Double) second);
if (second instanceof BigInteger) return add(first, (BigInteger) second);
if (second instanceof BigDecimal) return add(first, (BigDecimal) second);
if (second instanceof AtomicInteger) return add(first, (AtomicInteger) second);
if (second instanceof AtomicLong) return add(first, (AtomicLong) second);
throw new IllegalArgumentException();
}
public static Number add(BigDecimal first, short second) {
return first.add(BigDecimal.valueOf(second));
}
public static Number add(BigDecimal first, int second) {
return first.add(BigDecimal.valueOf(second));
}
public static Number add(BigDecimal first, long second) {
return first.add(BigDecimal.valueOf(second));
}
public static Number add(BigDecimal first, float second) {
return first.add(BigDecimal.valueOf(second));
}
public static Number add(BigDecimal first, double second) {
return first.add(BigDecimal.valueOf(second));
}
public static Number add(BigDecimal first, Short second) {
return add(first, second.shortValue());
}
public static Number add(BigDecimal first, Integer second) {
return add(first, second.intValue());
}
public static Number add(BigDecimal first, Long second) {
return add(first, second.longValue());
}
public static Number add(BigDecimal first, Float second) {
return add(first, second.floatValue());
}
public static Number add(BigDecimal first, Double second) {
return add(first, second.doubleValue());
}
public static Number add(BigDecimal first, BigDecimal second) {
return second.add(first);
}
public static Number add(BigDecimal first, BigInteger second) {
return second.add(second);
}
public static Number add(BigDecimal first, AtomicInteger second) {
return add(first, second.intValue());
}
public static Number add(BigDecimal first, AtomicLong second) {
return add(first, second.longValue());
}
public static Number add(AtomicInteger first, Number second) {
if (second instanceof Short) return add(first, (Short) second);
if (second instanceof Integer) return add(first, (Integer) second);
if (second instanceof Long) return add(first, (Long) second);
if (second instanceof Float) return add(first, (Float) second);
if (second instanceof Double) return add(first, (Double) second);
if (second instanceof BigInteger) return add(first, (BigInteger) second);
if (second instanceof BigDecimal) return add(first, (BigDecimal) second);
if (second instanceof AtomicInteger) return add(first, (AtomicInteger) second);
if (second instanceof AtomicLong) return add(first, (AtomicLong) second);
throw new IllegalArgumentException();
}
public static Number add(AtomicInteger first, short second) {
return add(new Integer(first.intValue()), second);
}
public static Number add(AtomicInteger first, int second) {
return add(new Integer(first.intValue()), second);
}
public static Number add(AtomicInteger first, long second) {
return add(new Integer(first.intValue()), second);
}
public static Number add(AtomicInteger first, float second) {
return add(new Integer(first.intValue()), second);
}
public static Number add(AtomicInteger first, double second) {
return add(new Integer(first.intValue()), second);
}
public static Number add(AtomicInteger first, Short second) {
return add(first, second.shortValue());
}
public static Number add(AtomicInteger first, Integer second) {
return add(first, second.intValue());
}
public static Number add(AtomicInteger first, Long second) {
return add(first, second.longValue());
}
public static Number add(AtomicInteger first, Float second) {
return add(first, second.floatValue());
}
public static Number add(AtomicInteger first, Double second) {
return add(first, second.doubleValue());
}
public static Number add(AtomicInteger first, BigDecimal second) {
return add(second, first);
}
public static Number add(AtomicInteger first, BigInteger second) {
return add(second, first);
}
public static Number add(AtomicInteger first, AtomicInteger second) {
return add(first, second.intValue());
}
public static Number add(AtomicInteger first, AtomicLong second) {
return add(first, second.longValue());
}
public static Number add(AtomicLong first, Number second) {
if (second instanceof Short) return add(first, (Short) second);
if (second instanceof Integer) return add(first, (Integer) second);
if (second instanceof Long) return add(first, (Long) second);
if (second instanceof Float) return add(first, (Float) second);
if (second instanceof Double) return add(first, (Double) second);
if (second instanceof BigInteger) return add(first, (BigInteger) second);
if (second instanceof BigDecimal) return add(first, (BigDecimal) second);
if (second instanceof AtomicInteger) return add(first, (AtomicInteger) second);
if (second instanceof AtomicLong) return add(first, (AtomicLong) second);
throw new IllegalArgumentException();
}
public static Number add(AtomicLong first, short second) {
return add(new Long(first.longValue()), second);
}
public static Number add(AtomicLong first, int second) {
return add(new Long(first.longValue()), second);
}
public static Number add(AtomicLong first, long second) {
return add(new Long(first.longValue()), second);
}
public static Number add(AtomicLong first, float second) {
return add(new Long(first.longValue()), second);
}
public static Number add(AtomicLong first, double second) {
return add(new Long(first.longValue()), second);
}
public static Number add(AtomicLong first, Short second) {
return add(first, second.shortValue());
}
public static Number add(AtomicLong first, Integer second) {
return add(first, second.intValue());
}
public static Number add(AtomicLong first, Long second) {
return add(first, second.longValue());
}
public static Number add(AtomicLong first, Float second) {
return add(first, second.floatValue());
}
public static Number add(AtomicLong first, Double second) {
return add(first, second.doubleValue());
}
public static Number add(AtomicLong first, BigDecimal second) {
return add(second, first);
}
public static Number add(AtomicLong first, BigInteger second) {
return add(second, first);
}
public static Number add(AtomicLong first, AtomicInteger second) {
return add(first, second.intValue());
}
public static Number add(AtomicLong first, AtomicLong second) {
return add(first, second.longValue());
}
private MathOps() {
}
}

View File

@ -0,0 +1,99 @@
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.util;
import java.util.Iterator;
import java.util.Random;
import java.util.function.Supplier;
import java.util.stream.IntStream;
import io.debezium.annotation.Immutable;
/**
* Utility methods for obtaining streams of integers.
*
* @author Randall Hauch
*/
@Immutable
public class Sequences {
/**
* Create a stream of <em>number</em> monotonically increasing numbers starting at 0, useful when performing an operation
* <em>number</em> times.
*
* @param number the number of values to include in the stream; must be positive
* @return the sequence; never null
*/
public static IntStream times(int number) {
return IntStream.range(0, number);
}
/**
* Create an iterator over an infinite number of monotonically increasing numbers starting at 0, useful when performing an
* operation an unknown number of times.
*
* @return the sequence; never null
*/
public static Iterable<Integer> infiniteIntegers() {
return infiniteIntegers(0);
}
/**
* Create an iterator over an infinite number monotonically increasing numbers starting at the given number, useful when
* performing an operation an unknown number of times.
*
* @param startingAt the first number to include in the resulting stream
* @return the sequence; never null
*/
public static Iterable<Integer> infiniteIntegers(int startingAt) {
return Iterators.around(new Iterator<Integer>() {
private int counter = startingAt;
@Override
public boolean hasNext() {
return true;
}
@Override
public Integer next() {
return Integer.valueOf(counter++);
}
});
}
/**
* Obtain a supplier function that randomly selects from the given values. If the supplied values contain nulls, then
* the resulting supplier function may return null values.
*
* @param first the first value that may be randomly picked
* @param additional the additional values to randomly pick from; may be null or empty
* @return the supplier function; never null
*/
@SafeVarargs
public static <T> Supplier<T> randomlySelect(T first, T... additional) {
if (additional == null || additional.length == 0) return () -> first;
Random rng = new Random(System.currentTimeMillis());
int max = additional.length + 1;
return () -> {
int index = rng.nextInt(max);
return index == 0 ? first : additional[index-1];
};
}
/**
* Obtain a supplier function that randomly selects from the given values. If the supplied values contain nulls, then
* the resulting supplier function may return null values.
*
* @param values the values to randomly pick from; may not be null, should not be empty
* @return the supplier function; never null
*/
@SafeVarargs
public static <T> Supplier<T> randomlySelect(T... values) {
if (values == null || values.length == 0) throw new IllegalArgumentException("The values array may not be null or empty");
Random rng = new Random(System.currentTimeMillis());
return () -> values[rng.nextInt(values.length)];
}
}

View File

@ -0,0 +1,42 @@
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.document;
import java.io.IOException;
import org.junit.Test;
import static org.fest.assertions.Assertions.assertThat;
import io.debezium.util.Testing;
/**
* @author Randall Hauch
*
*/
public class ArraySerdesTest implements Testing {
private static final ArraySerdes SERDES = new ArraySerdes();
@Test
public void shouldConvertFromBytesToArray1() throws IOException {
readAsStringAndBytes("json/array1.json");
}
@Test
public void shouldConvertFromBytesToArray2() throws IOException {
readAsStringAndBytes("json/array2.json");
}
protected void readAsStringAndBytes( String resourceFile ) throws IOException {
String content = Testing.Files.readResourceAsString(resourceFile);
Array doc = ArrayReader.defaultReader().readArray(content);
byte[] bytes = SERDES.serialize("topicA",doc);
Array reconstituted = SERDES.deserialize("topicA",bytes);
assertThat((Object)reconstituted).isEqualTo(doc);
}
}

View File

@ -0,0 +1,81 @@
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.document;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.function.IntConsumer;
import org.junit.Test;
import static org.fest.assertions.Assertions.assertThat;
import io.debezium.util.Testing;
/**
* @author Randall Hauch
*
*/
public class DocumentSerdesTest implements Testing {
private static final DocumentSerdes SERDES = new DocumentSerdes();
@Test
public void shouldConvertFromBytesToDocument1() throws IOException {
readAsStringAndBytes("json/sample1.json");
}
@Test
public void shouldUseSerdeMethodToConvertFromBytesToDocument2() throws IOException {
readAsStringAndBytes("json/sample2.json");
}
@Test
public void shouldUseSerdeMethodToConvertFromBytesToDocument3() throws IOException {
readAsStringAndBytes("json/sample3.json");
}
@Test
public void shouldUseSerdeMethodToConvertFromBytesToDocumentForResponse1() throws IOException {
readAsStringAndBytes("json/response1.json");
}
@Test
public void shouldUseSerdeMethodToConvertFromBytesToDocumentForResponse2() throws IOException {
readAsStringAndBytes("json/response2.json");
}
protected void readAsStringAndBytes(String resourceFile) throws IOException {
String content = Testing.Files.readResourceAsString(resourceFile);
Document doc = DocumentReader.defaultReader().read(content);
roundTrip(doc, size -> Testing.print("message size " + size + " bytes: \n" + doc));
}
protected void roundTrip(Document doc, IntConsumer sizeAccumulator) {
byte[] bytes = SERDES.serialize("topicA", doc);
if (sizeAccumulator != null) sizeAccumulator.accept(bytes.length);
Document reconstituted = SERDES.deserialize("topicA", bytes);
assertThat((Object) reconstituted).isEqualTo(doc);
}
protected List<Document> readResources(String prefix, String... resources) throws IOException {
List<Document> documents = new ArrayList<>();
for (String resource : resources) {
String content = Testing.Files.readResourceAsString(prefix + resource);
Array array = null;
try {
Document doc = DocumentReader.defaultReader().read(content);
array = doc.getArray("entityChanges");
} catch (IOException e) {
array = ArrayReader.defaultReader().readArray(content);
}
array.forEach(entry -> documents.add(entry.getValue().asDocument()));
}
return documents;
}
}

View File

@ -0,0 +1,54 @@
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.document;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.Map;
import org.junit.Before;
import org.junit.Test;
import static org.fest.assertions.Assertions.assertThat;
/**
* @author Randall Hauch
*
*/
public class DocumentTest {
private Document doc;
private Map<Path,Value> found = new LinkedHashMap<>();
private Iterator<Map.Entry<Path, Value>> iterator;
@Before
public void beforeEach() {
doc = null;
found = new LinkedHashMap<>();
iterator = null;
}
@Test
public void shouldPerformForEachOnFlatDocument() {
doc = Document.create("a","A","b","B");
doc.forEach((path,value)->found.put(path,value));
iterator = found.entrySet().iterator();
assertPair(iterator,"/a","A");
assertPair(iterator,"/b","B");
assertNoMore(iterator);
}
protected void assertPair( Iterator<Map.Entry<Path, Value>> iterator, String path, Object value ) {
Map.Entry<Path,Value> entry = iterator.next();
assertThat((Object)entry.getKey()).isEqualTo(Path.parse(path));
assertThat(entry.getValue()).isEqualTo(Value.create(value));
}
protected void assertNoMore( Iterator<Map.Entry<Path, Value>> iterator ) {
assertThat(iterator.hasNext()).isFalse();
}
}

View File

@ -0,0 +1,82 @@
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.document;
import org.junit.Before;
import org.junit.Test;
import static org.fest.assertions.Assertions.assertThat;
import io.debezium.util.Testing;
/**
* @author Randall Hauch
*/
public class JacksonArrayReadingAndWritingTest implements Testing {
private Array array;
private Array after;
private JacksonWriter writer = JacksonWriter.INSTANCE;
private JacksonReader reader = JacksonReader.INSTANCE;
@Before
public void beforeEach() {
array = Array.create();
after = null;
}
@Test
public void shouldWriteDocumentWithSingleField() throws Exception {
array.add("value1");
after = reader.readArray(writer.write(array));
assertThat(after.get(0)).isEqualTo("value1");
assertThat(after.size()).isEqualTo(1);
}
@Test
public void shouldWriteDocumentWithTwoFields() throws Exception {
array.add("value1");
array.add("value2");
after = reader.readArray(writer.write(array));
assertThat(after.get(0)).isEqualTo("value1");
assertThat(after.get(1)).isEqualTo("value2");
assertThat(after.size()).isEqualTo(2);
}
@Test
public void shouldWriteDocumentWithNestedDocument() throws Exception {
array.add("value1");
array.add("value2");
array.add(Document.create("a","A","b","B"));
after = reader.readArray(writer.write(array));
assertThat(after.get(0)).isEqualTo("value1");
assertThat(after.get(1)).isEqualTo("value2");
assertThat(after.size()).isEqualTo(3);
Document nested = after.get(2).asDocument();
assertThat(nested.getString("a")).isEqualTo("A");
assertThat(nested.getString("b")).isEqualTo("B");
assertThat(nested.size()).isEqualTo(2);
}
@Test
public void shouldWriteDocumentWithDeeplyNestedDocument() throws Exception {
array.add("value1");
array.add("value2");
array.add(Document.create("a","A","b","B","c",Document.create("x","X")));
after = reader.readArray(writer.write(array));
assertThat(after.get(0)).isEqualTo("value1");
assertThat(after.get(1)).isEqualTo("value2");
assertThat(after.size()).isEqualTo(3);
Document nested = after.get(2).asDocument();
assertThat(nested.getString("a")).isEqualTo("A");
assertThat(nested.getString("b")).isEqualTo("B");
assertThat(nested.size()).isEqualTo(3);
Document deepNested = nested.getDocument("c");
assertThat(deepNested.getString("x")).isEqualTo("X");
assertThat(deepNested.size()).isEqualTo(1);
}
}

View File

@ -0,0 +1,83 @@
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.document;
import org.junit.Before;
import org.junit.Test;
import static org.fest.assertions.Assertions.assertThat;
import io.debezium.util.Testing;
/**
* @author Randall Hauch
*/
public class JacksonWriterTest implements Testing {
private Document doc;
private Document after;
private JacksonWriter writer = JacksonWriter.INSTANCE;
private JacksonReader reader = JacksonReader.INSTANCE;
@Before
public void beforeEach() {
doc = Document.create();
after = null;
}
@Test
public void shouldWriteDocumentWithSingleField() throws Exception {
doc.set("field", "value");
after = reader.read(writer.write(doc));
assertThat(after.getString("field")).isEqualTo("value");
assertThat(after.size()).isEqualTo(1);
}
@Test
public void shouldWriteDocumentWithTwoFields() throws Exception {
doc.set("field1", "value");
doc.set("field2", 22);
after = reader.read(writer.write(doc));
assertThat(after.getString("field1")).isEqualTo("value");
assertThat(after.getInteger("field2")).isEqualTo(22);
assertThat(after.size()).isEqualTo(2);
}
@Test
public void shouldWriteDocumentWithNestedDocument() throws Exception {
doc.set("field1", "value");
doc.set("field2", 22);
doc.set("field3", Document.create("a","A","b","B"));
after = reader.read(writer.write(doc));
Testing.print(after);
assertThat(after.getString("field1")).isEqualTo("value");
assertThat(after.getInteger("field2")).isEqualTo(22);
assertThat(after.size()).isEqualTo(3);
Document nested = after.getDocument("field3");
assertThat(nested.getString("a")).isEqualTo("A");
assertThat(nested.getString("b")).isEqualTo("B");
assertThat(nested.size()).isEqualTo(2);
}
@Test
public void shouldWriteDocumentWithDeeplyNestedDocument() throws Exception {
doc.set("field1", "value");
doc.set("field2", 22);
doc.set("field3", Document.create("a","A","b","B","c",Document.create("x","X")));
after = reader.read(writer.write(doc));
Testing.print(after);
assertThat(after.getString("field1")).isEqualTo("value");
assertThat(after.getInteger("field2")).isEqualTo(22);
assertThat(after.size()).isEqualTo(3);
Document nested = after.getDocument("field3");
assertThat(nested.getString("a")).isEqualTo("A");
assertThat(nested.getString("b")).isEqualTo("B");
assertThat(nested.size()).isEqualTo(3);
Document deepNested = nested.getDocument("c");
assertThat(deepNested.getString("x")).isEqualTo("X");
assertThat(deepNested.size()).isEqualTo(1);
}
}

View File

@ -0,0 +1,101 @@
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.document;
import org.junit.Before;
import org.junit.Test;
import static org.fest.assertions.Assertions.assertThat;
import io.debezium.util.Testing;
/**
* @author Randall Hauch
*
*/
public class PathsTest implements Testing {
private Path path;
@Before
public void beforeEach() {
this.path = null;
}
@Test
public void shouldParseRootPath() {
path = parse("/");
assertThat(path.isRoot()).isTrue();
assertThat(path.isSingle()).isFalse();
assertThat(path.size()).isEqualTo(0);
}
@Test
public void shouldParseSingleRelativePath() {
path = parse("a");
assertThat(path.isRoot()).isFalse();
assertThat(path.isSingle()).isTrue();
assertThat(path.size()).isEqualTo(1);
assertThat(path.segment(0)).isEqualTo("a");
}
@Test
public void shouldParseSingleAbsolutePath() {
path = parse("/a");
assertThat(path.isRoot()).isFalse();
assertThat(path.isSingle()).isTrue();
assertThat(path.size()).isEqualTo(1);
assertThat(path.segment(0)).isEqualTo("a");
}
@Test
public void shouldParseDoubleRelativePath() {
path = parse("a/b");
assertThat(path.isRoot()).isFalse();
assertThat(path.isSingle()).isFalse();
assertThat(path.size()).isEqualTo(2);
assertThat(path.segment(0)).isEqualTo("a");
assertThat(path.segment(1)).isEqualTo("b");
}
@Test
public void shouldParseDoubleAbsolutePath() {
path = parse("/a/b");
assertThat(path.isRoot()).isFalse();
assertThat(path.isSingle()).isFalse();
assertThat(path.size()).isEqualTo(2);
assertThat(path.segment(0)).isEqualTo("a");
assertThat(path.segment(1)).isEqualTo("b");
}
@Test
public void shouldParseMultiRelativePath() {
path = parse("a/b");
assertThat(path.isRoot()).isFalse();
assertThat(path.isSingle()).isFalse();
assertThat(path.size()).isEqualTo(2);
assertThat(path.segment(0)).isEqualTo("a");
assertThat(path.segment(1)).isEqualTo("b");
}
@Test
public void shouldParseMultiAbsolutePath() {
path = parse("/a/b/c/d/e");
assertThat(path.isRoot()).isFalse();
assertThat(path.isSingle()).isFalse();
assertThat(path.size()).isEqualTo(5);
assertThat(path.segment(0)).isEqualTo("a");
assertThat(path.segment(1)).isEqualTo("b");
assertThat(path.segment(2)).isEqualTo("c");
assertThat(path.segment(3)).isEqualTo("d");
assertThat(path.segment(4)).isEqualTo("e");
}
protected Path parse( String path ) {
return Paths.parse(path, false);
}
}

View File

@ -1,19 +0,0 @@
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.jdbc;
import io.debezium.config.Configuration;
public class TestDatabase {
public static JdbcConfiguration testConfig( String databaseName ) {
return buildTestConfig().withDatabase(databaseName).build();
}
public static JdbcConfiguration.Builder buildTestConfig() {
return JdbcConfiguration.copy(Configuration.fromSystemProperties("database."));
}
}

View File

@ -0,0 +1,119 @@
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.relational.history;
import java.util.Map;
import org.junit.Before;
import org.junit.Test;
import static org.fest.assertions.Assertions.assertThat;
import io.debezium.relational.Tables;
import io.debezium.relational.ddl.DdlParser;
import io.debezium.relational.ddl.DdlParserSql2003;
import io.debezium.util.Collect;
import io.debezium.util.Testing;
/**
* @author Randall Hauch
*
*/
public abstract class AbstractDatabaseHistoryTest {
protected DatabaseHistory history;
protected Map<String, Object> source1;
protected Map<String, Object> source2;
protected Tables tables;
protected Tables t0;
protected Tables t1;
protected Tables t2;
protected Tables t3;
protected Tables t4;
protected Tables all;
protected DdlParser parser;
@Before
public void beforeEach() {
parser = new DdlParserSql2003();
tables = new Tables();
t0 = new Tables();
t1 = new Tables();
t2 = new Tables();
t3 = new Tables();
t4 = new Tables();
all = new Tables();
source1 = server("abc");
source2 = server("xyz");
history = createHistory();
}
protected abstract DatabaseHistory createHistory();
protected Map<String, Object> server(String serverName) {
return Collect.linkMapOf("server", serverName);
}
protected Map<String, Object> position(String filename, long position, int entry) {
return Collect.linkMapOf("file", filename, "position", position, "entry", entry);
}
protected void record(long pos, int entry, String ddl, Tables... update) {
history.record(source1, position("a.log", pos, entry), "db", tables, ddl);
for (Tables tables : update) {
if (tables != null) {
parser.setCurrentSchema("db");
parser.parse(ddl, tables);
}
}
}
protected Tables recover(long pos, int entry) {
Tables result = new Tables();
history.recover(source1, position("a.log", pos, entry), result, parser);
return result;
}
@Test
public void shouldRecordChangesAndRecoverToVariousPoints() {
record(01, 0, "CREATE TABLE foo ( first VARCHAR(22) NOT NULL );", all, t3, t2, t1, t0);
record(23, 1, "CREATE TABLE person ( name VARCHAR(22) NOT NULL );", all, t3, t2, t1);
record(30, 2, "CREATE TABLE address ( street VARCHAR(22) NOT NULL );", all, t3, t2);
record(32, 3, "ALTER TABLE address ADD city VARCHAR(22) NOT NULL;", all, t3);
// Testing.Print.enable();
if (Testing.Print.isEnabled()) {
Testing.print("t0 = " + t0);
Testing.print("t1 = " + t1);
Testing.print("t2 = " + t2);
Testing.print("t3 = " + t3);
}
assertThat(recover(01, 0)).isEqualTo(t0);
assertThat(recover(01, 3)).isEqualTo(t0);
assertThat(recover(10, 1)).isEqualTo(t0);
assertThat(recover(22, 999999)).isEqualTo(t0);
assertThat(recover(23, 0)).isEqualTo(t0);
assertThat(recover(23, 1)).isEqualTo(t1);
assertThat(recover(23, 2)).isEqualTo(t1);
assertThat(recover(23, 3)).isEqualTo(t1);
assertThat(recover(29, 999)).isEqualTo(t1);
assertThat(recover(30, 1)).isEqualTo(t1);
assertThat(recover(30, 2)).isEqualTo(t2);
assertThat(recover(30, 3)).isEqualTo(t2);
assertThat(recover(32, 2)).isEqualTo(t2);
assertThat(recover(32, 3)).isEqualTo(t3);
assertThat(recover(32, 4)).isEqualTo(t3);
assertThat(recover(33, 0)).isEqualTo(t3);
assertThat(recover(33, 0)).isEqualTo(all);
assertThat(recover(1033, 4)).isEqualTo(t3);
assertThat(recover(1033, 4)).isEqualTo(t3);
}
}

View File

@ -0,0 +1,37 @@
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.relational.history;
import java.nio.file.Path;
import org.junit.Before;
import io.debezium.config.Configuration;
import io.debezium.util.Testing;
/**
* @author Randall Hauch
*/
public class FileDatabaseHistoryTest extends AbstractDatabaseHistoryTest {
private static final Path TEST_FILE_PATH = Testing.Files.createTestingPath("dbHistory.log");
@Override
@Before
public void beforeEach() {
Testing.Files.delete(TEST_FILE_PATH);
super.beforeEach();
}
@Override
protected DatabaseHistory createHistory() {
DatabaseHistory history = new FileDatabaseHistory();
history.configure(Configuration.create()
.with(FileDatabaseHistory.FILE_PATH, TEST_FILE_PATH.toAbsolutePath().toString())
.build());
return history;
}
}

View File

@ -0,0 +1,85 @@
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.relational.history;
import java.util.Map;
import org.junit.Before;
import org.junit.Test;
import static org.fest.assertions.Assertions.assertThat;
import io.debezium.util.Collect;
/**
* @author Randall Hauch
*
*/
public class HistoryRecordTest {
private Map<String,Object> source1;
private Map<String,Object> position1;
private HistoryRecord record1;
private Map<String,Object> source2;
private Map<String,Object> position2;
private HistoryRecord record2;
private Map<String,Object> source3;
private Map<String,Object> position3;
private HistoryRecord record3;
@Before
public void beforeEach() {
source1 = Collect.linkMapOf("server", "abc");
position1 = Collect.linkMapOf("file", "x.log", "position", 100L, "entry", 1);
record1 = new HistoryRecord(source1, position1, "db", "CREATE TABLE foo ( first VARCHAR(22) NOT NULL );");
source2 = Collect.linkMapOf("server", "abc");
position2 = Collect.linkMapOf("file", "x.log", "position", 300L, "entry", 2);
record2 = new HistoryRecord(source2, position2, "db", "DROP TABLE foo;");
source3 = Collect.linkMapOf("server", "xyx");
position3 = Collect.linkMapOf("file", "y.log", "position", 10000L, "entry", 1);
record3 = new HistoryRecord(source3, position3, "other", "DROP TABLE foo;");
}
@Test
public void shouldConsiderOneSourceTheSame() {
assertThat(record1.hasSameSource(record1)).isTrue();
assertThat(record2.hasSameSource(record2)).isTrue();
assertThat(record3.hasSameSource(record3)).isTrue();
}
@Test
public void shouldConsiderTwoDifferentSourcesNotSame() {
assertThat(record1.hasSameSource(null)).isFalse();
assertThat(record1.hasSameSource(record3)).isFalse();
assertThat(record2.hasSameSource(record3)).isFalse();
}
@Test
public void shouldConsiderTwoDifferentSourcesTheSame() {
assertThat(record1.hasSameSource(record2)).isTrue();
}
@Test
public void shouldConsiderOneDatabaseTheSame() {
assertThat(record1.hasSameDatabase(record1)).isTrue();
}
@Test
public void shouldConsiderTwoDifferentDatabasesNotSame() {
assertThat(record1.hasSameDatabase(record3)).isFalse();
assertThat(record2.hasSameDatabase(record3)).isFalse();
}
@Test
public void shouldCorrectlyComparePositions() {
assertThat(record1.isAtOrBefore(record1)).isTrue();
assertThat(record2.isAtOrBefore(record2)).isTrue();
assertThat(record1.isAtOrBefore(record2)).isTrue();
assertThat(record2.isAtOrBefore(record1)).isFalse();
}
}

View File

@ -0,0 +1,17 @@
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.relational.history;
/**
* @author Randall Hauch
*/
public class MemoryDatabaseHistoryTest extends AbstractDatabaseHistoryTest {
@Override
protected DatabaseHistory createHistory() {
return new MemoryDatabaseHistory();
}
}

View File

@ -0,0 +1,353 @@
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.util;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.ServerSocket;
import java.nio.file.FileSystems;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.concurrent.Callable;
import java.util.function.Consumer;
import org.fest.assertions.Fail;
import org.junit.Before;
import static org.fest.assertions.Assertions.assertThat;
import static org.fest.assertions.Fail.fail;
import io.debezium.util.Stopwatch.Statistics;
import io.debezium.util.Stopwatch.StopwatchSet;
/**
* A set of utility methods for test cases.
*
* @author Randall Hauch
*/
public interface Testing {
@Before
default void resetBeforeEachTest() {
Print.enabled = false;
Debug.enabled = false;
Timer.reset();
}
public static final class Print {
private static boolean enabled = false;
public static void enable() {
enabled = true;
}
public static void disable() {
enabled = true;
}
public static boolean isEnabled() {
return enabled;
}
}
public static void print(Object message) {
if (message != null && Print.enabled) {
System.out.println(message);
}
}
public static void print(int length, String leader, Object message) {
if (message != null && Print.enabled) {
int len = leader.length();
System.out.print(leader);
if (len < length) for (int i = len; i != length; ++i)
System.out.print(" ");
System.out.println(message);
}
}
public static final class Debug {
private static boolean enabled = false;
public static void enable() {
enabled = true;
}
public static void disable() {
enabled = true;
}
public static boolean isEnabled() {
return enabled;
}
}
public static void debug(Object message) {
if (message != null && Debug.enabled) {
System.out.println(message);
}
}
public static void printError(Object message) {
if (message != null) {
System.err.println(message);
}
}
public static void printError(Throwable throwable) {
if (throwable != null) {
throwable.printStackTrace();
}
}
public static void printError(String message, Throwable throwable) {
printError(message);
printError(throwable);
}
/**
* Network-related utility methods.
*/
public static interface Network {
/**
* Find a port that is available. This method starts a {@link ServerSocket} and obtains the port on which the socket is
* listening, and then shuts down the socket so the port becomes available.
*
* @return the number of the now-available port
*/
public static int getAvailablePort() {
return IoUtil.getAvailablePort();
}
}
/**
* File system utility methods.
*/
public static interface Files {
/**
* Obtain an InputStream to a named resource on the given classpath.
*
* @param pathOnClasspath the path of the resource on the classpath
* @param testClass the test class, used for accessing the class loader
* @return the string representation
*/
public static InputStream readResourceAsStream(String pathOnClasspath, Class<?> testClass) {
InputStream stream = testClass.getClassLoader().getResourceAsStream(pathOnClasspath);
assertThat(stream).isNotNull();
return stream;
}
/**
* Obtain an InputStream to a named resource on the classpath used to load this {@link Testing} class.
*
* @param pathOnClasspath the path of the resource on the classpath
* @return the string representation
*/
public static InputStream readResourceAsStream(String pathOnClasspath) {
return readResourceAsStream(pathOnClasspath, Testing.class);
}
/**
* Read a classpath resource into a string.
*
* @param pathOnClasspath the path of the resource on the classpath
* @return the string representation
*/
public static String readResourceAsString(String pathOnClasspath) {
try (InputStream stream = readResourceAsStream(pathOnClasspath)) {
return IoUtil.read(stream);
} catch (IOException e) {
Fail.fail("Unable to read '" + pathOnClasspath + "'", e);
return null;
}
}
/**
* Create a directory within the test data directory at the given relative path.
*
* @param relativePath the path of the directory within the test data directory; may not be null
* @return the reference to the existing readable and writable directory
*/
public static File createTestingDirectory(String relativePath) {
Path dirPath = Paths.get("target/data", relativePath).toAbsolutePath();
return IoUtil.createDirectory(dirPath);
}
/**
* Create a file within the test data directory at the given relative path.
*
* @param relativePath the path of the file within the test data directory; may not be null
* @return the reference to the existing readable and writable file
*/
public static File createTestingFile(String relativePath) {
Path path = Paths.get("target/data", relativePath).toAbsolutePath();
return IoUtil.createFile(path);
}
/**
* Create a file within the test data directory at the given relative path.
*
* @param relativePath the path of the file within the test data directory; may not be null
* @return the reference to the existing readable and writable file
*/
public static File createTestingFile(Path relativePath) {
Path path = relativePath.toAbsolutePath();
if ( !inTargetDir(path) ) {
throw new IllegalStateException("Expecting '" + relativePath + "' to be within the testing directory");
}
return IoUtil.createFile(path);
}
/**
* Create the path to a file within the test data directory at the given relative path.
*
* @param relativePath the path of the file within the test data directory; may not be null
* @return the reference to the existing readable and writable file
*/
public static Path createTestingPath(String relativePath) {
return Paths.get("target/data", relativePath).toAbsolutePath();
}
/**
* Create a directory within the test data directory at the given relative path.
*
* @param relativePath the path of the directory within the test data directory; may not be null
* @param removeExistingContent true if any existing content should be removed
* @return the reference to the existing readable and writable directory
* @throws IOException if there is a problem deleting the files at this path
*/
public static File createTestingDirectory(String relativePath, boolean removeExistingContent) throws IOException {
Path dirPath = Paths.get("target/data", relativePath).toAbsolutePath();
return IoUtil.createDirectory(dirPath, removeExistingContent);
}
/**
* A method that will delete a file or folder only if it is within the 'target' directory (for safety).
* Folders are removed recursively.
*
* @param path the path to the file or folder in the target directory
*/
public static void delete(String path) {
if (path != null) delete(Paths.get(path));
}
/**
* A method that will delete a file or folder only if it is within the 'target' directory (for safety).
* Folders are removed recursively.
*
* @param fileOrFolder the file or folder in the target directory
*/
public static void delete(File fileOrFolder) {
if (fileOrFolder != null) delete(fileOrFolder.toPath());
}
/**
* A method that will delete a file or folder only if it is within the 'target' directory (for safety).
* Folders are removed recursively.
*
* @param path the path to the file or folder in the target directory
*/
public static void delete(Path path) {
if (path != null) {
path = path.toAbsolutePath();
if (inTargetDir(path)) {
try {
IoUtil.delete(path);
} catch (IOException e) {
printError("Unable to remove '" + path.toAbsolutePath() + "'", e);
}
} else {
printError("Will not remove directory that is outside test target area: " + path);
}
}
}
/**
* Verify that the supplied file or directory is within the target directory.
*
* @param file the file or directory; may not be null
* @return true if inside the target directory, or false otherwise
*/
public static boolean inTargetDir(File file) {
return inTargetDir(file.toPath());
}
/**
* Verify that the supplied file or directory is within the target directory.
*
* @param path the path to the file or directory; may not be null
* @return true if inside the target directory, or false otherwise
*/
public static boolean inTargetDir(Path path) {
Path target = FileSystems.getDefault().getPath("target").toAbsolutePath();
return path.toAbsolutePath().startsWith(target);
}
}
default public Statistics once(InterruptableFunction runnable) throws InterruptedException {
return Timer.time(null, 1, runnable, null);
}
default public <T> Statistics once(Callable<T> runnable, Consumer<T> cleanup) throws InterruptedException {
return Timer.time(null, 1, runnable, cleanup);
}
default public Statistics time(String desc, int repeat, InterruptableFunction runnable) throws InterruptedException {
return Timer.time(desc, repeat, runnable, null);
}
default public <T> Statistics time(String desc, int repeat, Callable<T> runnable, Consumer<T> cleanup) throws InterruptedException {
return Timer.time(desc, repeat, runnable, cleanup);
}
public static final class Timer {
private static Stopwatch sw = Stopwatch.accumulating();
private static StopwatchSet sws = Stopwatch.multiple();
public static void reset() {
sw = Stopwatch.accumulating();
sws = Stopwatch.multiple();
}
public static Statistics completionTime() {
return sw.durations().statistics();
}
public static Statistics operationTimes() {
return sws.statistics();
}
protected static <T> Statistics time(String desc, int repeat, Callable<T> runnable, Consumer<T> cleanup)
throws InterruptedException {
sw.start();
try {
sws.time(repeat, runnable, result -> {
if (cleanup != null) cleanup.accept(result);
});
} catch (Throwable t) {
t.printStackTrace();
fail(t.getMessage());
}
sw.stop();
// if (desc != null) Testing.print(60, "Time to " + desc + ":", sw.durations().statistics().getTotalAsString());
// Testing.print(60,"Total clock time:",sw.durations().statistics().getTotalAsString());
// Testing.print(54,"Time to invoke the functions:",sws);
return sw.durations().statistics();
}
}
@FunctionalInterface
public static interface InterruptableFunction extends Callable<Void> {
@Override
public Void call() throws InterruptedException;
}
}

View File

@ -0,0 +1,32 @@
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.util;
import java.io.File;
import java.nio.file.Path;
import java.nio.file.Paths;
import org.junit.Test;
import static org.fest.assertions.Assertions.assertThat;
public class TestingTest implements Testing {
@Test
public void shouldKnowDirectoriesInsideTarget() {
assertThat(Testing.Files.inTargetDir(new File("target/classes").toPath())).isTrue();
assertThat(Testing.Files.inTargetDir(new File("../debezium").toPath())).isFalse();
}
@Test
public void shouldRemoveDirectory() throws Exception {
Path path = Paths.get("target/test-dir");
path.toFile().mkdir();
Path file = Paths.get("target/test-dir/file.txt");
file.toFile().createNewFile();
Testing.Files.delete(path);
}
}

View File

@ -0,0 +1 @@
[ "string", "number", "description", 1, true, -1 ]

View File

@ -0,0 +1 @@
[]

View File

@ -0,0 +1 @@
{"db":"my-db","collection":"Contacts","zone":"default","entity":"2be2f6ed-1560-4793-9890-76623ca81c48","clientid":"96886878-2add-4d9e-aa76-f968a4aa0cde","request":2,"user":"jsmith","begun":1415747136050,"parts":2,"status":1,"ops":[{"op":"add","path":"firstName","value":"William"},{"op":"add","path":"lastName","value":"Johnson"},{"op":"add","path":"mobilePhone","value":"1-222-555-9876"}],"before":{},"after":{"firstName":"William","lastName":"Johnson","mobilePhone":"1-222-555-9876"}}

View File

@ -0,0 +1,35 @@
{
"begun": 1415747136050,
"clientid": "96886878-2add-4d9e-aa76-f968a4aa0cde",
"collection": "Contacts",
"db": "my-db",
"entity": "2be2f6ed-1560-4793-9890-76623ca81c48",
"ops": [
{
"op": "add",
"path": "firstName",
"value": "William"
},
{
"op": "add",
"path": "lastName",
"value": "Johnson"
},
{
"op": "add",
"path": "mobilePhone",
"value": "1-222-555-9876"
}
],
"parts": 2,
"request": 2,
"status": 1,
"user": "jsmith",
"zone": "default",
"after": {
"firstName": "William",
"lastName": "Johnson",
"mobilePhone": "1-222-555-9876"
},
"before": {}
}

View File

@ -0,0 +1 @@
{"db":"my-db","ops":[{"op":"add","path":"/","value":{}}],"clientid":"a3eebe76-b15f-490d-872a-ccf6a9033ff8","request":1,"user":"jsmith","begun":1415724207051}

View File

@ -0,0 +1,14 @@
{
"begun": 1415724207051,
"clientid": "a3eebe76-b15f-490d-872a-ccf6a9033ff8",
"db": "my-db",
"ops": [
{
"op": "add",
"path": "/",
"value": {}
}
],
"request": 1,
"user": "jsmith"
}

View File

@ -0,0 +1,4 @@
{
"values": [ "str1", "str2", 103, true, false, { "field1" : "value1", "field2" : true }, [ "nestedValue1", 203 ] ],
"user": "jsmith"
}

79
debezium-embedded/pom.xml Normal file
View File

@ -0,0 +1,79 @@
<?xml version="1.0"?>
<!--
~ Copyright 2014 Red Hat, Inc. and/or its affiliates.
~
~ Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<groupId>io.debezium</groupId>
<artifactId>debezium-parent</artifactId>
<version>0.1-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>debezium-embedded</artifactId>
<name>Debezium Embedded</name>
<packaging>jar</packaging>
<dependencies>
<dependency>
<groupId>io.debezium</groupId>
<artifactId>debezium-core</artifactId>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>connect-api</artifactId>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>connect-runtime</artifactId>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>connect-json</artifactId>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>connect-file</artifactId>
</dependency>
<!-- Testing -->
<dependency>
<groupId>io.debezium</groupId>
<artifactId>debezium-core</artifactId>
<type>test-jar</type>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
</dependency>
<dependency>
<groupId>org.easytesting</groupId>
<artifactId>fest-assert</artifactId>
</dependency>
</dependencies>
<build>
<resources>
<!-- Apply the properties set in the POM to the resource files -->
<resource>
<filtering>true</filtering>
<directory>src/main/resources</directory>
<includes>
<include>**/build.properties</include>
</includes>
</resource>
</resources>
</build>
</project>

View File

@ -0,0 +1,475 @@
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.embedded;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executor;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Consumer;
import org.apache.kafka.connect.connector.ConnectorContext;
import org.apache.kafka.connect.connector.Task;
import org.apache.kafka.connect.json.JsonConverter;
import org.apache.kafka.connect.source.SourceConnector;
import org.apache.kafka.connect.source.SourceRecord;
import org.apache.kafka.connect.source.SourceTask;
import org.apache.kafka.connect.source.SourceTaskContext;
import org.apache.kafka.connect.storage.Converter;
import org.apache.kafka.connect.storage.MemoryOffsetBackingStore;
import org.apache.kafka.connect.storage.OffsetBackingStore;
import org.apache.kafka.connect.storage.OffsetStorageReader;
import org.apache.kafka.connect.storage.OffsetStorageReaderImpl;
import org.apache.kafka.connect.storage.OffsetStorageWriter;
import org.apache.kafka.connect.storage.StringConverter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import io.debezium.annotation.ThreadSafe;
import io.debezium.config.Configuration;
import io.debezium.config.Field;
import io.debezium.util.Clock;
import io.debezium.util.VariableLatch;
/**
* A mechanism for running a single Kafka Connect {@link SourceConnector} within an application's process. An embedded connector
* is entirely standalone and only talks with the source system; no Kafka, Kafka Connect, or Zookeeper processes are needed.
* Applications using an embedded connector simply set one up and supply a {@link Consumer consumer function} to which the
* connector will pass all {@link SourceRecord}s containing database change events.
* <p>
* With an embedded connector, the application that runs the connector assumes all responsibility for fault tolerance,
* scalability, and durability. Additionally, applications must specify how the connector can store its relational database
* schema history and offsets. By default, this information will be stored in memory and will thus be lost upon application
* restart.
* <p>
* Embedded connectors are designed to be submitted to an {@link Executor} or {@link ExecutorService} for execution by a single
* thread, and a running connector can be stopped either by calling {@link #stop()} from another thread or by interrupting
* the running thread (e.g., as is the case with {@link ExecutorService#shutdownNow()}).
*
* @author Randall Hauch
*/
@ThreadSafe
public final class EmbeddedConnector implements Runnable {
/**
* A required field for an embedded connector that specifies the unique name for the connector instance.
*/
@SuppressWarnings("unchecked")
public static final Field CONNECTOR_NAME = Field.create("name")
.withDescription("Unique name for this connector instance.")
.withValidation(Field::isRequired);
/**
* A required field for an embedded connector that specifies the name of the normal Debezium connector's Java class.
*/
@SuppressWarnings("unchecked")
public static final Field CONNECTOR_CLASS = Field.create("connector.class")
.withDescription("The Java class for the connector")
.withValidation(Field::isRequired);
/**
* An optional advanced field that specifies the maximum amount of time that the embedded connector should wait
* for an offset commit to complete.
*/
@SuppressWarnings("unchecked")
public static final Field OFFSET_COMMIT_TIMEOUT_MS_CONFIG = Field.create("offset.flush.timeout.ms")
.withDescription("Maximum number of milliseconds to wait for records to flush and partition offset data to be"
+ " committed to offset storage before cancelling the process and restoring the offset "
+ "data to be committed in a future attempt.")
.withDefault(5000L)
.withValidation(Field::isPositiveInteger);
protected static final Field INTERNAL_KEY_CONVERTER_CLASS = Field.create("internal.key.converter")
.withDescription("The Converter class that should be used to serialize and deserialize key data for offsets.")
.withDefault(StringConverter.class.getName());
protected static final Field INTERNAL_VALUE_CONVERTER_CLASS = Field.create("internal.value.converter")
.withDescription("The Converter class that should be used to serialize and deserialize value data for offsets.")
.withDefault(JsonConverter.class.getName());
/**
* The array of fields that are required by each connectors.
*/
public static final Field[] CONNECTOR_FIELDS = { CONNECTOR_NAME, CONNECTOR_CLASS };
/**
* A builder to set up and create {@link EmbeddedConnector} instances.
*/
public static interface Builder {
/**
* Call the specified function for every {@link SourceRecord data change event} read from the source database.
* This method must be called with a non-null consumer.
*
* @param consumer the consumer function
* @return this builder object so methods can be chained together; never null
*/
Builder notifying(Consumer<SourceRecord> consumer);
/**
* Use the specified {@link Configuration#validate(Field[], Consumer) valid} configuration for the connector. This method
* must be called with a non-null configuration.
*
* @param config the configuration
* @return this builder object so methods can be chained together; never null
*/
Builder using(Configuration config);
/**
* Use the specified {@link OffsetCommitPolicy} to determine when offsets should be written to offset storage.
* <p>
* Passing <code>null</code> or not calling this method results in the connector using all offsets
* {@link OffsetCommitPolicy#always() always} being committed after each batch of records are received from the source
* system and processed by the {@link #notifying(Consumer) consumer function}.
*
* @param policy the policy for when to commit offsets to the offset store
* @return this builder object so methods can be chained together; never null
*/
Builder using(OffsetCommitPolicy policy);
/**
* Use the specified storage mechanism for tracking how much data change history in the source database the connector
* has processed.
* <p>
* Passing <code>null</code> or not calling this method results in the connector storing offsets in-memory, which means
* when the application stops it will lose all record of how far the connector has read from the source database. If the
* application upon restart should resume reading the source database where it left off, then a durable store must be
* supplied.
*
* @param offsetStorage the store for recording connector offsets
* @return this builder object so methods can be chained together; never null
*/
Builder using(OffsetBackingStore offsetStorage);
/**
* Use the specified class loader to find all necessary classes. Passing <code>null</code> or not calling this method
* results in the connector using this class's class loader.
*
* @param classLoader the class loader
* @return this builder object so methods can be chained together; never null
*/
Builder using(ClassLoader classLoader);
/**
* Use the specified clock when needing to determine the current time. Passing <code>null</code> or not calling this
* method results in the connector using the {@link Clock#system() system clock}.
*
* @param clock the clock
* @return this builder object so methods can be chained together; never null
*/
Builder using(Clock clock);
/**
* Build a new connector with the information previously supplied to this builder.
*
* @return the embedded connector; never null
* @throws IllegalArgumentException if a {@link #using(Configuration) configuration} or {@link #notifying(Consumer)
* consumer function} were not supplied before this method is called
*/
EmbeddedConnector build();
}
/**
* Obtain a new {@link Builder} instance that can be used to construct runnable {@link EmbeddedConnector} instances.
*
* @return the new builder; never null
*/
public static Builder create() {
return new Builder() {
private Configuration config;
private OffsetBackingStore offsetStore;
private OffsetCommitPolicy offsetCommitPolicy;
private Consumer<SourceRecord> consumer;
private ClassLoader classLoader;
private Clock clock;
@Override
public Builder using(Configuration config) {
this.config = config;
return this;
}
@Override
public Builder using(OffsetBackingStore offsetStore) {
this.offsetStore = offsetStore;
return this;
}
@Override
public Builder using(OffsetCommitPolicy policy) {
this.offsetCommitPolicy = policy;
return this;
}
@Override
public Builder using(ClassLoader classLoader) {
this.classLoader = classLoader;
return this;
}
@Override
public Builder using(Clock clock) {
this.clock = clock;
return this;
}
@Override
public Builder notifying(Consumer<SourceRecord> consumer) {
this.consumer = consumer;
return this;
}
@Override
public EmbeddedConnector build() {
if (offsetStore == null) offsetStore = new MemoryOffsetBackingStore();
if (offsetCommitPolicy == null) offsetCommitPolicy = OffsetCommitPolicy.always();
if (classLoader == null) classLoader = getClass().getClassLoader();
if (clock == null) clock = Clock.system();
Objects.requireNonNull(config, "A connector configuration must be specified.");
Objects.requireNonNull(consumer, "A connector consumer must be specified.");
return new EmbeddedConnector(config, offsetStore, offsetCommitPolicy, classLoader, clock, consumer);
}
};
}
private final Logger logger = LoggerFactory.getLogger(getClass());
private final Configuration config;
private final OffsetBackingStore offsetStore;
private final OffsetCommitPolicy offsetCommitPolicy;
private final Clock clock;
private final ClassLoader classLoader;
private final Consumer<SourceRecord> consumer;
private final AtomicBoolean running = new AtomicBoolean(false);
private final VariableLatch latch = new VariableLatch(0);
private final Converter keyConverter;
private final Converter valueConverter;
private long recordsSinceLastCommit = 0;
private long timeSinceLastCommitMillis = 0;
private EmbeddedConnector(Configuration config, OffsetBackingStore offsetStore,
OffsetCommitPolicy offsetCommitPolicy, ClassLoader classLoader, Clock clock, Consumer<SourceRecord> consumer) {
this.config = config;
this.offsetStore = offsetStore;
this.offsetCommitPolicy = offsetCommitPolicy;
this.consumer = consumer;
this.classLoader = classLoader;
this.clock = clock;
assert this.config != null;
assert this.offsetStore != null;
assert this.offsetCommitPolicy != null;
assert this.consumer != null;
assert this.classLoader != null;
assert this.clock != null;
keyConverter = config.getInstance(INTERNAL_KEY_CONVERTER_CLASS, Converter.class, () -> this.classLoader);
keyConverter.configure(config.subset(INTERNAL_KEY_CONVERTER_CLASS.name() + ".", true).asMap(), false);
valueConverter = config.getInstance(INTERNAL_VALUE_CONVERTER_CLASS, Converter.class, () -> this.classLoader);
Configuration valueConverterConfig = config;
if (valueConverter instanceof JsonConverter) {
// Make sure that the JSON converter is configured to NOT enable schemas ...
valueConverterConfig = config.edit().with(INTERNAL_VALUE_CONVERTER_CLASS + ".schemas.enable", false).build();
}
valueConverter.configure(valueConverterConfig.subset(INTERNAL_VALUE_CONVERTER_CLASS.name() + ".", true).asMap(), false);
}
/**
* Determine if this embedded connector is currently running.
*
* @return {@code true} if running, or {@code false} otherwise
*/
protected boolean isRunning() {
return this.running.get();
}
/**
* Run this embedded connector and deliver database changes to the registered {@link Consumer}.
* <p>
* First, the method checks to see if this instance is currently {@link #run() running}, and if so immediately returns.
* <p>
* If the configuration is valid, this method connects to the MySQL server and begins reading the server's transaction log.
* All messages are delivered in batches to the {@link Consumer} registered with this embedded connector. The batch size,
* polling
* frequency, and other parameters are controlled via configuration settings. This continues until this connector is
* {@link #stop() stopped}.
* <p>
* Note that there are two ways to stop a connector running on a thread: calling {@link #stop()} from another thread, or
* interrupting the thread (e.g., via {@link ExecutorService#shutdownNow()}).
*/
@Override
public void run() {
if (running.compareAndSet(false, true)) {
// Only one thread can be in this part of the method at a time ...
latch.countUp();
try {
if (config.validate(CONNECTOR_FIELDS, logger::error)) {
// Instantiate the connector ...
final String connectorName = config.getString(CONNECTOR_NAME);
final String connectorClassName = config.getString(CONNECTOR_CLASS);
SourceConnector connector = null;
try {
@SuppressWarnings("unchecked")
Class<? extends SourceConnector> connectorClass = (Class<SourceConnector>) classLoader.loadClass(connectorClassName);
connector = connectorClass.newInstance();
} catch (Throwable t) {
logger.error("Unable to instantiate connector class {}", connectorClassName, t);
return;
}
// Initialize the connector using a context that does NOT respond to requests to reconfigure tasks ...
ConnectorContext context = () -> {};
connector.initialize(context);
OffsetStorageWriter offsetWriter = new OffsetStorageWriter(offsetStore, connectorName,
keyConverter, valueConverter);
OffsetStorageReader offsetReader = new OffsetStorageReaderImpl(offsetStore, connectorName,
keyConverter, valueConverter);
long commitTimeoutMs = config.getLong(OFFSET_COMMIT_TIMEOUT_MS_CONFIG);
try {
// Start the connector with the given properties and get the task configurations ...
connector.start(config.asMap());
List<Map<String, String>> taskConfigs = connector.taskConfigs(1);
Class<? extends Task> taskClass = connector.taskClass();
SourceTask task = null;
try {
task = (SourceTask) taskClass.newInstance();
} catch (IllegalAccessException | InstantiationException t) {
logger.error("Unable to instantiate connector's task class {}", taskClass.getName(), t);
return;
}
try {
SourceTaskContext taskContext = () -> offsetReader;
task.initialize(taskContext);
task.start(taskConfigs.get(0));
} catch (Throwable t) {
logger.error("Unable to initialize and start connector's task class {} with config: {}",
taskClass.getName(), taskConfigs.get(0), t);
return;
}
recordsSinceLastCommit = 0;
timeSinceLastCommitMillis = clock.currentTimeInMillis();
while (running.get()) {
try {
List<SourceRecord> changeRecords = task.poll(); // blocks until there are values ...
if (changeRecords != null && !changeRecords.isEmpty()) {
// First write out the last partition to offset storage ...
SourceRecord lastRecord = changeRecords.get(changeRecords.size() - 1);
lastRecord.sourceOffset();
offsetWriter.offset(lastRecord.sourcePartition(), lastRecord.sourceOffset());
// Now forward the records to the connector's consumer ...
for (SourceRecord record : changeRecords) {
consumer.accept(record);
}
// Flush the offsets to storage if necessary ...
recordsSinceLastCommit += changeRecords.size();
maybeFlush(offsetWriter, commitTimeoutMs);
}
} catch (InterruptedException e) {
// This thread was interrupted, which signals that the thread should stop work ...
// but first try to commit the offsets ...
maybeFlush(offsetWriter, commitTimeoutMs);
// Then clear the interrupted status ...
Thread.interrupted();
return;
}
}
} catch (Throwable t) {
logger.error("Error while running to instantiate connector class {}", connectorClassName, t);
} finally {
connector.stop();
}
}
} finally {
latch.countDown();
running.set(false);
}
}
}
/**
* Determine if we should flush offsets to storage, and if so then attempt to flush offsets.
*
* @param offsetWriter the offset storage writer; may not be null
* @param commitTimeoutMs the timeout to wait for commit results
*/
protected void maybeFlush(OffsetStorageWriter offsetWriter, long commitTimeoutMs) {
// Determine if we need to commit to offset storage ...
if (this.offsetCommitPolicy.performCommit(recordsSinceLastCommit, timeSinceLastCommitMillis,
TimeUnit.MILLISECONDS)) {
long started = clock.currentTimeInMillis();
long timeout = started + commitTimeoutMs;
offsetWriter.beginFlush();
Future<Void> flush = offsetWriter.doFlush(this::completedFlush);
if (flush == null) return; // no offsets to commit ...
// Wait until the offsets are flushed ...
try {
flush.get(Math.max(timeout - clock.currentTimeInMillis(), 0), TimeUnit.MILLISECONDS);
recordsSinceLastCommit = 0;
timeSinceLastCommitMillis = clock.currentTimeInMillis();
} catch (InterruptedException e) {
logger.warn("Flush of {} offsets interrupted, cancelling", this);
offsetWriter.cancelFlush();
} catch (ExecutionException e) {
logger.error("Flush of {} offsets threw an unexpected exception: ", this, e);
offsetWriter.cancelFlush();
} catch (TimeoutException e) {
logger.error("Timed out waiting to flush {} offsets to storage", this);
offsetWriter.cancelFlush();
}
}
}
protected void completedFlush(Throwable error, Void result) {
if (error != null) {
logger.error("Failed to flush {} offsets to storage: ", this, error);
} else {
logger.trace("Finished flushing {} offsets to storage", this);
}
}
/**
* Stop the execution of this embedded connector. This method does not block until the connector is stopped; use
* {@link #await(long, TimeUnit)} for this purpose.
*
* @return {@code true} if the connector was {@link #run() running} and will eventually stop, or {@code false} if it was not
* running when this method is called
* @see #await(long, TimeUnit)
*/
public boolean stop() {
return running.getAndSet(false);
}
/**
* Wait for the connector to complete processing. If the processor is not running, this method returns immediately; however,
* if the processor is {@link #stop() stopped} and restarted before this method is called, this method will return only
* when it completes the second time.
*
* @param timeout the maximum amount of time to wait before returning
* @param unit the unit of time; may not be null
* @return {@code true} if the connector completed within the timeout (or was not running), or {@code false} if it is still
* running when the timeout occurred
* @throws InterruptedException if this thread is interrupted while waiting for the completion of the connector
*/
public boolean await(long timeout, TimeUnit unit) throws InterruptedException {
return latch.await(timeout, unit);
}
@Override
public String toString() {
return "EmbeddedConnector{id=" + config.getString(CONNECTOR_NAME) + '}';
}
}

View File

@ -0,0 +1,75 @@
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.embedded;
import java.util.concurrent.TimeUnit;
import org.apache.kafka.connect.storage.OffsetBackingStore;
/**
* The policy that defines when the offsets should be committed to {@link OffsetBackingStore offset storage}.
*
* @author Randall Hauch
*/
@FunctionalInterface
public interface OffsetCommitPolicy {
/**
* Get an {@link OffsetCommitPolicy} that will commit offsets as frequently as possible. This may result in reduced
* performance, but it has the least potential for seeing source records more than once upon restart.
*
* @return the offset commit policy; never null
*/
static OffsetCommitPolicy always() {
return (number, time, unit) -> true;
}
/**
* Get an {@link OffsetCommitPolicy} that will commit offsets no more than the specified time period.
*
* @param minimumTime the minimum amount of time between committing offsets; must be positive
* @param timeUnit the time unit for {@code minimumTime}; may not be null
* @return the offset commit policy; never null
*/
static OffsetCommitPolicy periodic(long minimumTime, TimeUnit timeUnit) {
return (number, actualTime, actualUnit) -> {
return timeUnit.convert(actualTime, actualUnit) >= minimumTime;
};
}
/**
* Determine if a commit of the offsets should be performed.
*
* @param numberOfMessagesSinceLastCommit the number of messages that have been received from the connector since last
* the offsets were last committed; never negative
* @param timeSinceLastCommit the time that has elapsed since the offsets were last committed; never negative
* @param timeUnit the unit of time used for {@code timeSinceLastCommit}; never null
* @return {@code true} if the offsets should be committed, or {@code false} otherwise
*/
boolean performCommit(long numberOfMessagesSinceLastCommit, long timeSinceLastCommit, TimeUnit timeUnit);
/**
* Obtain a new {@link OffsetCommitPolicy} that will commit offsets if this policy OR the other requests it.
*
* @param other the other commit policy; if null, then this policy instance is returned as is
* @return the resulting policy; never null
*/
default OffsetCommitPolicy or(OffsetCommitPolicy other) {
if ( other == null ) return this;
return (number, time, unit) -> this.performCommit(number, time, unit) || other.performCommit(number, time, unit);
}
/**
* Obtain a new {@link OffsetCommitPolicy} that will commit offsets if both this policy AND the other requests it.
*
* @param other the other commit policy; if null, then this policy instance is returned as is
* @return the resulting policy; never null
*/
default OffsetCommitPolicy and(OffsetCommitPolicy other) {
if ( other == null ) return this;
return (number, time, unit) -> this.performCommit(number, time, unit) && other.performCommit(number, time, unit);
}
}

View File

@ -0,0 +1,60 @@
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.embedded;
/**
* The Debezium Embedded API provides a simple and lightweight way for an application to directly monitor an external database
* and receive all of the data changes that are made to that database.
* <p>
* The primary benefit of embedding a connector is that the application and connector no longer require nor use the external
* systems normally required by Debezium (e.g., Kafka, Kafka Connect, and Zookeeper), so the application architecture becomes
* more straightforward.
* <p>
* However, an embedded connector is not as fault tolerant or scalable, and the connector only monitors the database while
* the application is running. Furthermore, the embedding application is completely responsible for the connector's lifecycle
* and storage of its internal state (e.g., offsets, database schema history, etc.) so that, after the application stops and
* restarts its connector, the connector can continue processing exactly where it left off.
* <h2>Usage</h2>
* <p>
* Applications do not directly work with Debezium connectors, but instead use the {@link io.debezium.embedded.EmbeddedConnector}
* class to configure and build an {@link io.debezium.embedded.EmbeddedConnector} instance that wraps and completely manages
* a standard Debezium connector. The application also provides, among other things, a function that the EmbeddedConnector will
* use to deliver data change events to the application.
* <p>
* Once the application has configured its {@link io.debezium.embedded.EmbeddedConnector} instance and is ready to start receiving
* data change events, the application submits the EmbeddedConnector to an {@link java.util.concurrent.Executor} or
* {@link java.util.concurrent.ExecutorService} managed by the application. The EmbeddedConnector's
* {@link io.debezium.embedded.EmbeddedConnector#run()} method will start the standard Debezium connector and continuously
* deliver any data changes events to the application.
* <p>
* When the application is ready to shut down the connector, it should call {@link EmbeddedConnector#stop()} on the
* EmbeddedConnector, which will then stop monitoring the source database, complete any current work, and gracefully shut down.
* The application can wait for the connector to complete by using the
* {@link io.debezium.embedded.EmbeddedConnector#await(long, java.util.concurrent.TimeUnit)} method.
* <h2>Storing connector state</h2>
* <p>
* As Debezium connectors operate, they keep track of which information from the source database they have processed, and they
* record this <em>offset information</em> in an {@link org.apache.kafka.connect.storage.OffsetBackingStore}. Kafka Connect
* provides several implementations that can be used by an application, including a
* {@link org.apache.kafka.connect.storage.FileOffsetBackingStore file-based store} and an
* {@link org.apache.kafka.connect.storage.MemoryOffsetBackingStore memory-based store}. For most applications the memory-based
* store will not be sufficient, since when the application shuts down all offset information will be lost. Instead, most
* applications should use the file-based store (or another persistent implementation of
* {@link org.apache.kafka.connect.storage.OffsetBackingStore}) so that all offset information is persisted after the application
* terminates and can be read upon restart.
* <p>
* Some Debezium connectors to relational databases may also keep track of all changes to the database's schema so that it has
* the correct table structure for any point in time as it reads the transaction logs. This is critical information, since the
* data being read from the transaction log reflects the database structure at the time those records were written in the log,
* and the database's table structure may have changed since that point in time. These connectors use a
* {@link io.debezium.relational.history.DatabaseHistory} store to persist the database schema changes and the offsets at which
* the changes are recorded. This way, no matter at which offset the database connector starts reading the transaction log, the
* connector will have the correct database schema for that point in time. And, just like with the
* {@link org.apache.kafka.connect.storage.OffsetBackingStore}, the application must provide the EmbeddedConnector with a
* {@link io.debezium.relational.history.DatabaseHistory} implementation such as the
* {@link io.debezium.relational.history.FileDatabaseHistory} that stores the schema changes on the local file system.
*/

View File

@ -0,0 +1,177 @@
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.embedded;
import java.io.File;
import java.io.IOException;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.nio.file.Path;
import java.nio.file.StandardOpenOption;
import java.util.List;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import org.apache.kafka.connect.file.FileStreamSourceConnector;
import org.apache.kafka.connect.source.SourceRecord;
import org.apache.kafka.connect.storage.MemoryOffsetBackingStore;
import org.apache.kafka.connect.storage.OffsetBackingStore;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import static org.fest.assertions.Assertions.assertThat;
import io.debezium.config.Configuration;
import io.debezium.util.Collect;
import io.debezium.util.Testing;
/**
* @author Randall Hauch
*/
public class EmbeddedConnectorTest implements Testing {
private static final int NUMBER_OF_LINES = 10;
private static final Path TEST_FILE_PATH = Testing.Files.createTestingPath("file-connector-input.txt");
private static final Charset UTF8 = StandardCharsets.UTF_8;
private ExecutorService executor;
private EmbeddedConnector connector;
private File inputFile;
private BlockingQueue<SourceRecord> consumedLines;
private int nextConsumedLineNumber;
private int linesAdded;
private OffsetBackingStore offsetStore;
@Before
public void beforeEach() throws Exception {
nextConsumedLineNumber = 1;
linesAdded = 0;
consumedLines = new ArrayBlockingQueue<>(100);
Testing.Files.delete(TEST_FILE_PATH);
inputFile = Testing.Files.createTestingFile(TEST_FILE_PATH);
executor = Executors.newFixedThreadPool(1);
}
@After
public void afterEach() {
stopConnector();
}
@Test
public void shouldStartAndUseFileConnectorUsingMemoryOffsetStorage() throws Exception {
// Set up the offset store ...
offsetStore = new MemoryOffsetBackingStore();
// Add initial content to the file ...
appendLinesToSource(NUMBER_OF_LINES);
// Create the connector ...
connector = EmbeddedConnector.create()
.using(Configuration.create()
.with(EmbeddedConnector.CONNECTOR_NAME, "file-connector")
.with(EmbeddedConnector.CONNECTOR_CLASS, FileStreamSourceConnector.class.getName())
.with(FileStreamSourceConnector.FILE_CONFIG, inputFile.getAbsolutePath())
.with(FileStreamSourceConnector.TOPIC_CONFIG, "topicX")
.build())
.using(offsetStore)
.using(OffsetCommitPolicy.always())
.notifying(consumedLines::add)
.build();
// Submit the connector for asynchronous execution ...
executor.execute(connector);
// Verify the first 10 lines were found ...
consumeOutput(NUMBER_OF_LINES);
assertNothingToConsume();
for (int i = 1; i != 5; ++i) {
// Add a few more lines, and then verify they are consumed ...
appendLinesToSource(NUMBER_OF_LINES);
consumeOutput(NUMBER_OF_LINES);
assertNothingToConsume();
}
// Stop the connector ...
assertThat(connector.stop()).isTrue();
// Add several more lines ...
appendLinesToSource(NUMBER_OF_LINES);
assertNothingToConsume();
// Start the connector again ...
executor.execute(connector);
// Verify that we see the correct line number, meaning that offsets were recorded correctly ...
consumeOutput(NUMBER_OF_LINES);
assertNothingToConsume();
}
protected void appendLinesToSource(int numberOfLines) throws IOException {
CharSequence[] lines = new CharSequence[numberOfLines];
for (int i = 0; i != numberOfLines; ++i) {
lines[i] = generateLine(linesAdded + i + 1);
}
java.nio.file.Files.write(inputFile.toPath(), Collect.arrayListOf(lines), UTF8, StandardOpenOption.APPEND);
linesAdded += numberOfLines;
}
protected void appendLinesToSource(CharSequence... lines) throws IOException {
}
protected String generateLine(int lineNumber) {
return "Generated line number " + lineNumber;
}
protected void consumeOutput(int numberOfLines) throws InterruptedException {
for (int i = 0; i != numberOfLines; ++i) {
SourceRecord record = consumedLines.poll(5, TimeUnit.SECONDS);
String line = record.value().toString();
assertThat(line).isEqualTo(generateLine(nextConsumedLineNumber));
++nextConsumedLineNumber;
}
}
protected void assertNothingToConsume() {
assertThat(consumedLines.isEmpty()).isTrue();
}
protected void stopConnector() {
try {
// Try to stop the connector ...
if ( connector != null ) {
connector.stop();
try {
connector.await(5, TimeUnit.SECONDS);
} catch ( InterruptedException e ) {
Thread.interrupted();
}
}
List<Runnable> neverRunTasks = executor.shutdownNow();
assertThat(neverRunTasks).isEmpty();
try {
executor.awaitTermination(10, TimeUnit.SECONDS); // wait for completion ...
} catch (InterruptedException e) {
Thread.interrupted();
}
assertStopped();
} finally {
connector = null;
executor = null;
}
}
protected void assertStopped() {
if (connector != null) {
assertThat(connector.isRunning()).isFalse();
}
}
}

View File

@ -0,0 +1,84 @@
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.embedded;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import org.junit.Test;
import static org.fest.assertions.Assertions.assertThat;
/**
* @author Randall Hauch
*/
public class OffsetCommitPolicyTest {
@Test
public void shouldAlwaysCommit() {
OffsetCommitPolicy policy = OffsetCommitPolicy.always();
assertThat(policy.performCommit(0, 0, TimeUnit.NANOSECONDS)).isTrue();
assertThat(policy.performCommit(10000, 1000, TimeUnit.DAYS)).isTrue();
}
@Test
public void shouldCommitPeriodically() {
OffsetCommitPolicy policy = OffsetCommitPolicy.periodic(10, TimeUnit.HOURS);
assertThat(policy.performCommit(0, 0, TimeUnit.NANOSECONDS)).isFalse();
assertThat(policy.performCommit(10000, 9, TimeUnit.HOURS)).isFalse();
assertThat(policy.performCommit(0, 10, TimeUnit.HOURS)).isTrue();
}
@Test
public void shouldCombineTwoPolicies() {
AtomicBoolean commitFirst = new AtomicBoolean(false);
AtomicBoolean commitSecond = new AtomicBoolean(false);
OffsetCommitPolicy policy1 = (num, time, unit) -> commitFirst.get();
OffsetCommitPolicy policy2 = (num, time, unit) -> commitSecond.get();
OffsetCommitPolicy both1 = policy1.and(policy2);
OffsetCommitPolicy both2 = policy2.and(policy1);
OffsetCommitPolicy either1 = policy1.or(policy2);
OffsetCommitPolicy either2 = policy2.or(policy1);
assertThat(both1.performCommit(0, 0, TimeUnit.NANOSECONDS)).isFalse();
assertThat(both2.performCommit(0, 0, TimeUnit.NANOSECONDS)).isFalse();
assertThat(either1.performCommit(0, 0, TimeUnit.NANOSECONDS)).isFalse();
assertThat(either2.performCommit(0, 0, TimeUnit.NANOSECONDS)).isFalse();
commitFirst.set(true);
assertThat(both1.performCommit(0, 0, TimeUnit.NANOSECONDS)).isFalse();
assertThat(both2.performCommit(0, 0, TimeUnit.NANOSECONDS)).isFalse();
assertThat(either1.performCommit(0, 0, TimeUnit.NANOSECONDS)).isTrue();
assertThat(either2.performCommit(0, 0, TimeUnit.NANOSECONDS)).isTrue();
commitSecond.set(true);
assertThat(both1.performCommit(0, 0, TimeUnit.NANOSECONDS)).isTrue();
assertThat(both2.performCommit(0, 0, TimeUnit.NANOSECONDS)).isTrue();
assertThat(either1.performCommit(0, 0, TimeUnit.NANOSECONDS)).isTrue();
assertThat(either2.performCommit(0, 0, TimeUnit.NANOSECONDS)).isTrue();
commitFirst.set(false);
assertThat(both1.performCommit(0, 0, TimeUnit.NANOSECONDS)).isFalse();
assertThat(both2.performCommit(0, 0, TimeUnit.NANOSECONDS)).isFalse();
assertThat(either1.performCommit(0, 0, TimeUnit.NANOSECONDS)).isTrue();
assertThat(either2.performCommit(0, 0, TimeUnit.NANOSECONDS)).isTrue();
commitSecond.set(false);
assertThat(both1.performCommit(0, 0, TimeUnit.NANOSECONDS)).isFalse();
assertThat(both2.performCommit(0, 0, TimeUnit.NANOSECONDS)).isFalse();
assertThat(either1.performCommit(0, 0, TimeUnit.NANOSECONDS)).isFalse();
assertThat(either2.performCommit(0, 0, TimeUnit.NANOSECONDS)).isFalse();
}
@Test
public void shouldCombineOnePolicyWithNull() {
AtomicBoolean commit = new AtomicBoolean(false);
OffsetCommitPolicy policy1 = (num, time, unit) -> commit.get();
assertThat(policy1.and(null)).isSameAs(policy1);
assertThat(policy1.or(null)).isSameAs(policy1);
}
}

View File

@ -0,0 +1,11 @@
# Direct log messages to stdout
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.Target=System.out
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=%d{ISO8601} %-5p %m (%c)%n
# Root logger option
log4j.rootLogger=INFO, stdout
# Set up the default logging to be INFO level, then override specific units
log4j.logger.io.debezium=INFO

View File

@ -1,15 +1,30 @@
## Ingesting MySQL # Ingesting MySQL change events
This module defines the connector that ingests change events from MySQL databases.
## Using the MySQL connector with Kafka Connect
The MySQL connector is designed to work with [Kafka Connect](http://kafka.apache.org/documentation.html#connect) and to be deployed to a Kafka Connect runtime service. The deployed connector will monitor one or more databases and write all change events to Kafka topics, which can be independently consumed by one or more clients. Kafka Connect can be distributed to provide fault tolerance to ensure the connectors are running and continually keeping up with changes in the database.
Kafka Connect can also be run standalone as a single process, although doing so is not tolerant of failures.
## Embedding the MySQL connector
The MySQL connector can also be used as a library without Kafka or Kafka Connect, enabling applications and services to directly connect to a MySQL database and obtain the ordered change events. This approach requires the application to record the progress of the connector so that upon restart the connect can continue where it left off. Therefore, this may be a useful approach for less critical use cases. For production use cases, we highly recommend using this connector with Kafka and Kafka Connect.
## Unit and integration tests
## Testing
This module contains both unit tests and integration tests. This module contains both unit tests and integration tests.
A *unit test* is a JUnit test class named `*Test.java` or `Test*.java` that never requires or uses external services, though it can use the file system and can run any components within the same JVM process. They should run very quickly, be independent of each other, and clean up after itself. A *unit test* is a JUnit test class named `*Test.java` or `Test*.java` that never requires or uses external services, though it can use the file system and can run any components within the same JVM process. They should run very quickly, be independent of each other, and clean up after itself.
An *integration test* is a JUnit test class named `*IT.java` or `IT*.java` that uses one or more MySQL databases running in a custom Docker container automatically started before the integration tests are run and automatically stopped and removed after all of the integration tests complete (regardless of whether they suceed or fail). All databases used in the integration tests are defined and populated using `*.sql` files and `*.sh` scripts in the `src/test/docker` directory, which are copied into the Docker image and run (in lexicographical order) by MySQL upon startup. Multiple test methods within a single integration test class can reuse the same database, but generally each integration test class should use its own dedicated database(s). An *integration test* is a JUnit test class named `*IT.java` or `IT*.java` that uses a MySQL database server running in a custom Docker container. The build will automatically start the MySQL container before the integration tests are run and automatically stop and remove it after all of the integration tests complete (regardless of whether they suceed or fail). All databases used in the integration tests are defined and populated using `*.sql` files and `*.sh` scripts in the `src/test/docker/init` directory, which are copied into the Docker image and run in lexicographical order by MySQL upon startup. Multiple test methods within a single integration test class can reuse the same database, but generally each integration test class should use its own dedicated database(s).
Running `mvn install` will compile all code and run the unit tests. If there are any problems, such as failing unit tests, the build will stop immediately. Otherwise, the build will create the module's artifacts, create the Docker image with MySQL, start the Docker container, run the integration tests, and stop the container even if there are integration test failures. If there are no problems, the build will end by installing the artifacts into the local Maven repository. Running `mvn install` will compile all code and run the unit tests. If there are any compile problems or any of the unit tests fail, the build will stop immediately. Otherwise, the command will continue to create the module's artifacts, create the Docker image with MySQL and custom scripts, start the Docker container, run the integration tests, stop the container (even if there are integration test failures), and run checkstyle on the code. If there are still no problems, the build will then install the module's artifacts into the local Maven repository.
You should always default to using `mvn install`, especially prior to committing changes to Git. However, there are a few situations where you may want to run a different Maven command. You should always default to using `mvn install`, especially prior to committing changes to Git. However, there are a few situations where you may want to run a different Maven command.
@ -23,26 +38,38 @@ Of course, wildcards also work:
$ mvn -Dit.test=Connect*IT install $ mvn -Dit.test=Connect*IT install
These commands will automatically manage the MySQL Docker container.
### Debugging tests ### Debugging tests
Normally, the MySQL Docker container is stopped and removed after the integration tests are run. One way to debug tests is to configure the build to wait for a remote debugging client, but then you also have to set up your IDE to connect. It's often far easier to debug a single test directly from within your IDE. To do that, you want to start the MySQL Docker container and keep it running: If you want to debug integration tests by stepping through them in your IDE, using the `mvn install` command will be problematic since it will not wait for your IDE's breakpoints. There are ways of doing this, but it is typically far easier to simply start the Docker container and leave it running so that it is available when you run the integration test(s). To create and start the Docker container, simply run:
$ mvn docker:start $ mvn docker:start
Then use your IDE to run one or more unit tests, optionally debugging them as needed. Just be sure that the unit tests clean up their database before (and after) each test. Again, the container and database server will be initialized as usual but will continue to run. Now you can use your IDE to run/debug one or more integration tests. Just be sure that the integration tests clean up their database before (and after) each test, and that you run the tests with VM arguments that define the required system properties, including:
To stop the container, simply use Docker to stop and remove the MySQL Docker container named `database`: * `database.dbname` - the name of the database that your integration test will use; there is no default
* `database.hostname` - the IP address or name of the host where the Docker container is running; defaults to `localhost` which is likely for Linux, but on OS X and Windows Docker it will have to be set to the IP address of the VM that runs Docker (which you can find by looking at the `DOCKER_HOST` environment variable).
* `database.port` - the port on which MySQL is listening; defaults to `3306` and is what this module's Docker container uses
* `database.user` - the name of the database user; defaults to `mysql` and is correct unless your database script uses something different
* `database.password` - the password of the database user; defaults to `mysqlpw` and is correct unless your database script uses something different
For example, you can define these properties by passing these arguments to the VM:
-Ddatabase.dbname=<DATABASE_NAME> -Ddatabase.hostname=<DOCKER_HOST> -Ddatabase.port=3306 -Ddatabase.user=mysqluser -Ddatabase.password=mysqlpw
When you are finished running the integration tests from your IDE, you have to stop and remove the Docker container (conveniently named "database") before you can run the next build:
$ docker stop database $ docker stop database
$ docker rm database $ docker rm database
### Analyzing the database ### Analyzing the database
Sometimes you may want to inspect the state of the database(s) after one or more integration tests are run. The `mvn install` command runs the tests but shuts down and removes the container after the tests complete. To keep the container running after the tests complete, use this Maven command: Sometimes you may want to inspect the state of the database(s) after one or more integration tests are run. The `mvn install` command runs the tests but shuts down and removes the container after the integration tests complete. To keep the container running after the integration tests complete, use this Maven command:
$ mvn integration-test $ mvn integration-test
This instructs Maven to run the normal Maven lifecycle through `integration-test`, and to stop before the `post-integration-test` phase when the Docker container is normally shut down and removed. Be aware that you will need to manually stop and remove the container before running the build again, and to make this more convenient we give the MySQL container the alias `database`: This instructs Maven to run the normal Maven lifecycle through `integration-test`, and to stop before the `post-integration-test` phase when the Docker container is normally shut down and removed. Be aware that you will need to manually stop and remove the container (conveniently named "database") before running the build again:
$ docker stop database $ docker stop database
$ docker rm database $ docker rm database

View File

@ -3,7 +3,7 @@
* *
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0 * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/ */
package io.debezium.mysql.ingest; package io.debezium.mysql;
import java.util.Properties; import java.util.Properties;

View File

@ -1,39 +0,0 @@
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.mysql;
import io.debezium.config.Configuration;
import io.debezium.config.Configuration.Field;
/**
* The configuration properties.
*/
public class MySqlConfiguration {
public static final Field USER = Configuration.field("database.user",
"Name of the database user to be used when connecting to the database");
public static final Field PASSWORD = Configuration.field("database.password",
"Password to be used when connecting to the database");
public static final Field HOSTNAME = Configuration.field("database.hostname", "IP address of the database");
public static final Field PORT = Configuration.field("database.port", "Port of the database", 5432);
public static final Field SERVER_ID = Configuration.field("connect.id",
"ID of this database client, which must be unique across all database processes in the cluster.");
public static final Field CONNECTION_TIMEOUT_MS = Configuration.field("connect.timeout.ms",
"Maximum time in milliseconds to wait after trying to connect to the database before timing out.",
30 * 1000);
public static final Field KEEP_ALIVE = Configuration.field("connect.keep.alive",
"Whether a separate thread should be used to ensure the connection is kept alive.",
true);
public static final Field MAX_QUEUE_SIZE = Configuration.field("max.queue.size",
"Maximum size of the queue for change events read from the database log but not yet recorded or forwarded. Should be larger than the maximum batch size.",
2048);
public static final Field MAX_BATCH_SIZE = Configuration.field("max.batch.size", "Maximum size of each batch of source records.",
1024);
public static final Field POLL_INTERVAL_MS = Configuration.field("poll.interval.ms",
"Frequency in milliseconds to poll for new change events", 1 * 1000);
public static final Field LOGICAL_ID = Configuration.field("database.logical.id",
"Logical unique identifier for this database. Defaults to host:port");
}

View File

@ -1,43 +0,0 @@
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.mysql.ingest;
/**
* A function that determines the name of a topic given the table name and database name.
*
* @author Randall Hauch
*/
@FunctionalInterface
public interface TopicSelector {
/**
* Get the default topic selector logic, which simply concatenates the database name and topic name using a '.' delimiter
* character.
*
* @return the topic selector; never null
*/
static TopicSelector defaultSelector() {
return defaultSelector(".");
}
/**
* Get the default topic selector logic, which simply concatenates the database name and topic name using the supplied
* delimiter.
*
* @param delimiter the string delineating the database name and table name; may not be null
* @return the topic selector; never null
*/
static TopicSelector defaultSelector(String delimiter) {
return (databaseName, tableName) -> databaseName + delimiter + tableName;
}
/**
* Get the name of the topic given the database and table names.
* @param databaseName the name of the database; may not be null
* @param tableName the name of the table; may not be null
* @return the topic name; never null
*/
String getTopic(String databaseName, String tableName);
}

View File

@ -3,23 +3,37 @@
* *
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0 * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/ */
package io.debezium.mysql.ingest; package io.debezium.mysql.source;
import java.util.Collections;
import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import org.apache.kafka.connect.connector.Task; import org.apache.kafka.connect.connector.Task;
import org.apache.kafka.connect.source.SourceConnector; import org.apache.kafka.connect.source.SourceConnector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import io.debezium.config.Configuration;
import io.debezium.mysql.Module;
/** /**
* A Kafka Connect source connector that creates tasks that read the MySQL binary log and generate the corresponding * A Kafka Connect source connector that creates tasks that read the MySQL binary log and generate the corresponding
* data change events. * data change events.
* <h2>Configuration</h2>
* <p>
* This connector is configured with the set of properties described in {@link ConnectorConfig}.
*
* *
* @author Randall Hauch * @author Randall Hauch
*/ */
public class MySqlConnector extends SourceConnector { public class Connector extends SourceConnector {
public MySqlConnector() { private final Logger logger = LoggerFactory.getLogger(getClass());
private Map<String, String> props;
public Connector() {
} }
@Override @Override
@ -28,21 +42,24 @@ public String version() {
} }
@Override @Override
public void start(Map<String, String> props) { public Class<? extends Task> taskClass() {
return LogReader.class;
} }
@Override @Override
public Class<? extends Task> taskClass() { public void start(Map<String, String> props) {
return null; this.props = props;
Configuration.from(props).validate(ConnectorConfig.ALL_FIELDS, logger::error);
} }
@Override @Override
public List<Map<String, String>> taskConfigs(int maxTasks) { public List<Map<String, String>> taskConfigs(int maxTasks) {
return null; return props == null ? Collections.emptyList() : Collections.singletonList(new HashMap<String, String>(props));
} }
@Override @Override
public void stop() { public void stop() {
this.props = null;
} }
} }

View File

@ -0,0 +1,104 @@
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.mysql.source;
import java.util.Collection;
import java.util.function.Consumer;
import io.debezium.config.Configuration;
import io.debezium.config.Field;
import io.debezium.relational.history.KafkaDatabaseHistory;
import io.debezium.util.Collect;
/**
* The configuration properties.
*/
@SuppressWarnings("unchecked")
public class ConnectorConfig {
public static final Field USER = Field.create("database.user")
.withDescription("Name of the database user to be used when connecting to the database.")
.withValidation(Field::isRequired);
public static final Field PASSWORD = Field.create("database.password")
.withDescription("Password to be used when connecting to the database.")
.withValidation(Field::isRequired);
public static final Field HOSTNAME = Field.create("database.hostname")
.withDescription("IP address of the MySQL database server.")
.withValidation(Field::isRequired);
public static final Field PORT = Field.create("database.port")
.withDescription("Port of the MySQL database server.")
.withDefault(3306)
.withValidation(Field::isRequired, Field::isInteger);
public static final Field SERVER_ID = Field.create("database.server.id")
.withDescription("A numeric ID of this database client, which must be unique across all currently-running database processes in the cluster. This is required because this connector essentially joins the MySQL database cluster as another server (with this unique ID) so it can read the binlog.")
.withValidation(Field::isRequired, Field::isInteger);
public static final Field SERVER_NAME = Field.create("database.server.name")
.withDescription("A unique name that identifies the database server that this connector monitors. Each database server should be monitored by at most one Debezium connector, since this server name delineates all persisted data eminating from this server. Defaults to 'host:port'")
.withValidation(Field::isRequired);
public static final Field CONNECTION_TIMEOUT_MS = Field.create("connect.timeout.ms")
.withDescription("Maximum time in milliseconds to wait after trying to connect to the database before timing out.")
.withDefault(30 * 1000)
.withValidation(Field::isPositiveInteger);
public static final Field KEEP_ALIVE = Field.create("connect.keep.alive")
.withDescription("Whether a separate thread should be used to ensure the connection is kept alive.")
.withDefault(true)
.withValidation(Field::isBoolean);
public static final Field MAX_QUEUE_SIZE = Field.create("max.queue.size")
.withDescription("Maximum size of the queue for change events read from the database log but not yet recorded or forwarded. Defaults to 2048, and should always be larger than the maximum batch size.")
.withDefault(2048)
.withValidation(ConnectorConfig::validateMaxQueueSize);
public static final Field MAX_BATCH_SIZE = Field.create("max.batch.size")
.withDescription("Maximum size of each batch of source records. Defaults to 1024.")
.withDefault(1024)
.withValidation(Field::isPositiveInteger);
public static final Field POLL_INTERVAL_MS = Field.create("poll.interval.ms")
.withDescription("Frequency in milliseconds to poll for new change events.")
.withDefault(1 * 1000)
.withValidation(Field::isPositiveInteger);
public static final Field DATABASE_HISTORY = Field.create("database.history")
.withDescription("The name of the DatabaseHistory class that should be used to store and recover database schema changes. "
+ "The configuration properties for the history can be specified with the 'database.history.' prefix.")
.withDefault(KafkaDatabaseHistory.class.getName());
public static final Field INCLUDE_SCHEMA_CHANGES = Field.create("include.schema.changes")
.withDescription("Whether schema changes should be included in the change events")
.withDefault(false)
.withValidation(Field::isBoolean);
public static Collection<Field> ALL_FIELDS = Collect.arrayListOf(USER, PASSWORD, HOSTNAME, PORT, SERVER_ID,
SERVER_NAME, CONNECTION_TIMEOUT_MS, KEEP_ALIVE,
MAX_QUEUE_SIZE, MAX_BATCH_SIZE, POLL_INTERVAL_MS,
DATABASE_HISTORY, INCLUDE_SCHEMA_CHANGES);
private static int validateMaxQueueSize(Configuration config, Field field, Consumer<String> problems) {
int maxQueueSize = config.getInteger(field);
int maxBatchSize = config.getInteger(MAX_BATCH_SIZE);
int count = 0;
if (maxQueueSize <= 0) {
maxBatchSize = maxQueueSize / 2;
problems.accept("The " + MAX_QUEUE_SIZE + " value '" + maxQueueSize + "' must be positive");
++count;
}
if (maxQueueSize <= maxBatchSize) {
maxBatchSize = maxQueueSize / 2;
problems.accept("The " + MAX_QUEUE_SIZE + " value '" + maxQueueSize + "' must be larger than " +
MAX_BATCH_SIZE + " of '" + maxBatchSize + ".");
++count;
}
return count;
}
}

View File

@ -3,14 +3,14 @@
* *
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0 * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/ */
package io.debezium.mysql.ingest; package io.debezium.mysql.source;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.EnumMap; import java.util.EnumMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Objects;
import java.util.concurrent.BlockingQueue; import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingDeque; import java.util.concurrent.LinkedBlockingDeque;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
@ -34,25 +34,31 @@
import com.github.shyiko.mysql.binlog.event.deserialization.EventDeserializer; import com.github.shyiko.mysql.binlog.event.deserialization.EventDeserializer;
import com.github.shyiko.mysql.binlog.network.AuthenticationException; import com.github.shyiko.mysql.binlog.network.AuthenticationException;
import io.debezium.annotation.NotThreadSafe;
import io.debezium.config.Configuration; import io.debezium.config.Configuration;
import io.debezium.mysql.MySqlConfiguration; import io.debezium.mysql.Module;
import io.debezium.relational.TableId; import io.debezium.mysql.MySqlDdlParser;
import io.debezium.relational.Tables; import io.debezium.relational.Tables;
import io.debezium.relational.ddl.DdlParser;
import io.debezium.relational.history.DatabaseHistory;
/** /**
* A Kafka Connect source task reads the MySQL binary log and generate the corresponding data change events. * A Kafka Connect source task reads the MySQL binary log and generate the corresponding data change events.
* *
* @see MySqlConnector * @see Connector
* @author Randall Hauch * @author Randall Hauch
*/ */
public class MySqlChangeDetector extends SourceTask { @NotThreadSafe
final class LogReader extends SourceTask {
private final Logger logger = LoggerFactory.getLogger(getClass()); private final Logger logger = LoggerFactory.getLogger(getClass());
private final EnumMap<EventType, EventHandler> eventHandlers = new EnumMap<>(EventType.class); private final TopicSelector topicSelector;
private final Tables tables;
private final TableConverters tableConverters;
// These are all effectively constants between start(...) and stop(...) // These are all effectively constants between start(...) and stop(...)
private DatabaseHistory dbHistory;
private EnumMap<EventType, EventHandler> eventHandlers = new EnumMap<>(EventType.class);
private Tables tables;
private TableConverters tableConverters;
private BinaryLogClient client; private BinaryLogClient client;
private BlockingQueue<Event> events; private BlockingQueue<Event> events;
private List<Event> batchEvents; private List<Event> batchEvents;
@ -60,21 +66,30 @@ public class MySqlChangeDetector extends SourceTask {
private long pollIntervalMs; private long pollIntervalMs;
// Used in the methods that process events ... // Used in the methods that process events ...
private final SourceInfo sourceInfo = new SourceInfo(); private final SourceInfo source = new SourceInfo();
public MySqlChangeDetector() { /**
this(null); * Create an instance of the log reader that uses Kafka to store database schema history and the
* {@link TopicSelector#defaultSelector() default topic selector} of "{@code <serverName>.<databaseName>.<tableName>}" for
* data and "{@code <serverName>}" for metadata.
*/
public LogReader() {
this.topicSelector = TopicSelector.defaultSelector();
this.dbHistory = null; // delay creating the history until startup, which is only allowed by default constructor
} }
public MySqlChangeDetector( TopicSelector topicSelector ) { /**
topicSelector = topicSelector != null ? topicSelector : TopicSelector.defaultSelector(); * Create an instance of the log reader that uses the supplied {@link TopicSelector} and the supplied storage for database
tables = new Tables(); * schema history.
tableConverters = new TableConverters(topicSelector, tables, this::signalTablesChanged); *
eventHandlers.put(EventType.TABLE_MAP, tableConverters::updateTableMetadata); * @param dbHistory the history storage for the database's schema; may not be null
eventHandlers.put(EventType.QUERY, tableConverters::updateTableCommand); * @param dataTopicSelector the selector for topics where data and metadata changes are to be written; if null the
eventHandlers.put(EventType.EXT_WRITE_ROWS, tableConverters::handleInsert); * {@link TopicSelector#defaultSelector() default topic selector} will be used
eventHandlers.put(EventType.EXT_UPDATE_ROWS, tableConverters::handleUpdate); */
eventHandlers.put(EventType.EXT_DELETE_ROWS, tableConverters::handleDelete); protected LogReader(DatabaseHistory dbHistory, TopicSelector dataTopicSelector) {
Objects.requireNonNull(dbHistory, "The storage for database schema history is required");
this.topicSelector = dataTopicSelector != null ? dataTopicSelector : TopicSelector.defaultSelector();
this.dbHistory = dbHistory;
} }
@Override @Override
@ -82,30 +97,55 @@ public String version() {
return Module.version(); return Module.version();
} }
protected void signalTablesChanged( Set<TableId> changedTables ) {
// TODO: do something
}
@Override @Override
public void start(Map<String, String> props) { public void start(Map<String, String> props) {
// Read and verify the configuration ... // Validate the configuration ...
final Configuration config = Configuration.from(props); final Configuration config = Configuration.from(props);
final String user = config.getString(MySqlConfiguration.USER); if ( config.validate(ConnectorConfig.ALL_FIELDS,logger::error) ) {
final String password = config.getString(MySqlConfiguration.PASSWORD); return;
final String host = config.getString(MySqlConfiguration.HOSTNAME); }
final int port = config.getInteger(MySqlConfiguration.PORT);
final Long serverId = config.getLong(MySqlConfiguration.SERVER_ID); // Create and configure the database history ...
final String logicalId = config.getString(MySqlConfiguration.LOGICAL_ID.name(), "" + host + ":" + port); this.dbHistory = config.getInstance(ConnectorConfig.DATABASE_HISTORY, DatabaseHistory.class);
final boolean keepAlive = config.getBoolean(MySqlConfiguration.KEEP_ALIVE); if ( this.dbHistory == null ) {
final int maxQueueSize = config.getInteger(MySqlConfiguration.MAX_QUEUE_SIZE); this.logger.error("Unable to instantiate the database history class {}",config.getString(ConnectorConfig.DATABASE_HISTORY));
final long timeoutInMilliseconds = config.getLong(MySqlConfiguration.CONNECTION_TIMEOUT_MS); return;
maxBatchSize = config.getInteger(MySqlConfiguration.MAX_BATCH_SIZE); }
pollIntervalMs = config.getLong(MySqlConfiguration.POLL_INTERVAL_MS); Configuration dbHistoryConfig = config.subset(ConnectorConfig.DATABASE_HISTORY.name() + ".", true);
this.dbHistory.configure(dbHistoryConfig);
// Read the configuration ...
final String user = config.getString(ConnectorConfig.USER);
final String password = config.getString(ConnectorConfig.PASSWORD);
final String host = config.getString(ConnectorConfig.HOSTNAME);
final int port = config.getInteger(ConnectorConfig.PORT);
final Long serverId = config.getLong(ConnectorConfig.SERVER_ID);
final String serverName = config.getString(ConnectorConfig.SERVER_NAME.name(), host + ":" + port);
final boolean keepAlive = config.getBoolean(ConnectorConfig.KEEP_ALIVE);
final int maxQueueSize = config.getInteger(ConnectorConfig.MAX_QUEUE_SIZE);
final long timeoutInMilliseconds = config.getLong(ConnectorConfig.CONNECTION_TIMEOUT_MS);
final boolean includeSchemaChanges = config.getBoolean(ConnectorConfig.INCLUDE_SCHEMA_CHANGES);
maxBatchSize = config.getInteger(ConnectorConfig.MAX_BATCH_SIZE);
pollIntervalMs = config.getLong(ConnectorConfig.POLL_INTERVAL_MS);
if (maxQueueSize <= maxBatchSize) {
maxBatchSize = maxQueueSize / 2;
logger.error("The {} value must be larger than {}, so changing {} to {}", ConnectorConfig.MAX_QUEUE_SIZE,
ConnectorConfig.MAX_BATCH_SIZE, ConnectorConfig.MAX_QUEUE_SIZE, maxBatchSize);
}
// Create the queue ... // Create the queue ...
events = new LinkedBlockingDeque<>(maxQueueSize); events = new LinkedBlockingDeque<>(maxQueueSize);
batchEvents = new ArrayList<>(maxBatchSize); batchEvents = new ArrayList<>(maxBatchSize);
// Set up our handlers ...
tables = new Tables();
tableConverters = new TableConverters(topicSelector, dbHistory, includeSchemaChanges, tables);
eventHandlers.put(EventType.TABLE_MAP, tableConverters::updateTableMetadata);
eventHandlers.put(EventType.QUERY, tableConverters::updateTableCommand);
eventHandlers.put(EventType.EXT_WRITE_ROWS, tableConverters::handleInsert);
eventHandlers.put(EventType.EXT_UPDATE_ROWS, tableConverters::handleUpdate);
eventHandlers.put(EventType.EXT_DELETE_ROWS, tableConverters::handleDelete);
// Set up the log reader ... // Set up the log reader ...
client = new BinaryLogClient(host, port, user, password); client = new BinaryLogClient(host, port, user, password);
client.setServerId(serverId); client.setServerId(serverId);
@ -115,20 +155,29 @@ public void start(Map<String, String> props) {
client.registerLifecycleListener(traceLifecycleListener()); client.registerLifecycleListener(traceLifecycleListener());
// Check if we've already processed some of the log for this database ... // Check if we've already processed some of the log for this database ...
sourceInfo.setDatabase(logicalId); source.setServerName(serverName);
if (context != null) { if (context != null) {
// TODO: Figure out how to load the table definitions from previous runs. Can it be read from each of the output
// topics? Does it need to be serialized locally?
// Get the offsets for our partition ... // Get the offsets for our partition ...
sourceInfo.setOffset(context.offsetStorageReader().offset(sourceInfo.partition())); source.setOffset(context.offsetStorageReader().offset(source.partition()));
// And set the client to start from that point ... // And set the client to start from that point ...
client.setBinlogFilename(sourceInfo.binlogFilename()); client.setBinlogFilename(source.binlogFilename());
client.setBinlogPosition(sourceInfo.binlogPosition()); client.setBinlogPosition(source.binlogPosition());
// The event row number will be used when processing the first event ... // The event row number will be used when processing the first event ...
// We have to make our Tables reflect the state of the database at the above source partition (e.g., the location
// in the MySQL log where we last stopped reading. Since the TableConverts writes out all DDL statements to the
// TopicSelector.getTopic(serverName) topic, we can consume that topic and apply each of the DDL statements
// to our Tables object. Each of those DDL messages is keyed by the database name, and contains a single string
// of DDL. However, we should consume no further than offset we recovered above.
try {
DdlParser ddlParser = new MySqlDdlParser();
dbHistory.recover(source.partition(), source.offset(), tables, ddlParser);
} catch (Throwable t) {
logger.error("Error while recovering database schemas", t);
}
} else { } else {
// initializes this position, though it will be reset when we see the first event (should be a rotate event) ... // initializes this position, though it will be reset when we see the first event (should be a rotate event) ...
sourceInfo.setBinlogPosition(client.getBinlogPosition()); source.setBinlogPosition(client.getBinlogPosition());
} }
// Start the log reader, which starts background threads ... // Start the log reader, which starts background threads ...
@ -169,22 +218,22 @@ public List<SourceRecord> poll() throws InterruptedException {
} else { } else {
rotateEventData = (RotateEventData) eventData; rotateEventData = (RotateEventData) eventData;
} }
sourceInfo.setBinlogFilename(rotateEventData.getBinlogFilename()); source.setBinlogFilename(rotateEventData.getBinlogFilename());
sourceInfo.setBinlogPosition(rotateEventData.getBinlogPosition()); source.setBinlogPosition(rotateEventData.getBinlogPosition());
sourceInfo.setRowInEvent(0); source.setRowInEvent(0);
} else if (eventHeader instanceof EventHeaderV4) { } else if (eventHeader instanceof EventHeaderV4) {
EventHeaderV4 trackableEventHeader = (EventHeaderV4) eventHeader; EventHeaderV4 trackableEventHeader = (EventHeaderV4) eventHeader;
long nextBinlogPosition = trackableEventHeader.getNextPosition(); long nextBinlogPosition = trackableEventHeader.getNextPosition();
if (nextBinlogPosition > 0) { if (nextBinlogPosition > 0) {
sourceInfo.setBinlogPosition(nextBinlogPosition); source.setBinlogPosition(nextBinlogPosition);
sourceInfo.setRowInEvent(0); source.setRowInEvent(0);
} }
} }
// If there is a handler for this event, forward the event to it ... // If there is a handler for this event, forward the event to it ...
EventHandler handler = eventHandlers.get(eventType); EventHandler handler = eventHandlers.get(eventType);
if (handler != null) { if (handler != null) {
handler.handle(event, sourceInfo, records::add); handler.handle(event, source, records::add);
} }
} }
// We've processed them all, so clear the batch and return the records ... // We've processed them all, so clear the batch and return the records ...

View File

@ -3,7 +3,7 @@
* *
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0 * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/ */
package io.debezium.mysql.ingest; package io.debezium.mysql.source;
import java.util.Map; import java.util.Map;
@ -40,7 +40,7 @@
@NotThreadSafe @NotThreadSafe
final class SourceInfo { final class SourceInfo {
public static final String DATABASE_PARTITION_KEY = "db"; public static final String SERVER_PARTITION_KEY = "server";
public static final String BINLOG_FILENAME_OFFSET_KEY = "file"; public static final String BINLOG_FILENAME_OFFSET_KEY = "file";
public static final String BINLOG_POSITION_OFFSET_KEY = "pos"; public static final String BINLOG_POSITION_OFFSET_KEY = "pos";
public static final String BINLOG_EVENT_ROW_NUMBER_OFFSET_KEY = "row"; public static final String BINLOG_EVENT_ROW_NUMBER_OFFSET_KEY = "row";
@ -48,7 +48,7 @@ final class SourceInfo {
private String binlogFilename; private String binlogFilename;
private long binlogPosition = 4; private long binlogPosition = 4;
private int eventRowNumber = 0; private int eventRowNumber = 0;
private String databaseId; private String serverName;
private Map<String, ?> sourcePartition; private Map<String, ?> sourcePartition;
public SourceInfo() { public SourceInfo() {
@ -59,15 +59,15 @@ public SourceInfo() {
* *
* @param logicalId the logical identifier for the database; may not be null * @param logicalId the logical identifier for the database; may not be null
*/ */
public void setDatabase(String logicalId) { public void setServerName(String logicalId) {
this.databaseId = logicalId; this.serverName = logicalId;
sourcePartition = Collect.hashMapOf(DATABASE_PARTITION_KEY, databaseId); sourcePartition = Collect.hashMapOf(SERVER_PARTITION_KEY, serverName);
} }
/** /**
* Get the Kafka Connect detail about the source "partition", which describes the portion of the source that we are * Get the Kafka Connect detail about the source "partition", which describes the portion of the source that we are
* consuming. Since we're reading the binary log for a single database, the source partition specifies the * consuming. Since we're reading the binary log for a single database, the source partition specifies the
* {@link #setDatabase database server}. * {@link #setServerName(String) database server}.
* <p> * <p>
* The resulting map is mutable for efficiency reasons (this information rarely changes), but should not be mutated. * The resulting map is mutable for efficiency reasons (this information rarely changes), but should not be mutated.
* *
@ -79,11 +79,11 @@ public void setDatabase(String logicalId) {
/** /**
* Get the Kafka Connect detail about the source "offset", which describes the position within the source where we last * Get the Kafka Connect detail about the source "offset", which describes the position within the source where we last
* stopped reading. * have last read.
* *
* @return a copy of the current offset; never null * @return a copy of the current offset; never null
*/ */
public Map<String, Object> offset() { public Map<String, ?> offset() {
return Collect.hashMapOf(BINLOG_FILENAME_OFFSET_KEY, binlogFilename, return Collect.hashMapOf(BINLOG_FILENAME_OFFSET_KEY, binlogFilename,
BINLOG_POSITION_OFFSET_KEY, binlogPosition, BINLOG_POSITION_OFFSET_KEY, binlogPosition,
BINLOG_EVENT_ROW_NUMBER_OFFSET_KEY, eventRowNumber); BINLOG_EVENT_ROW_NUMBER_OFFSET_KEY, eventRowNumber);
@ -91,12 +91,12 @@ public Map<String, Object> offset() {
/** /**
* Set the current row number within a given event, and then get the Kafka Connect detail about the source "offset", which * Set the current row number within a given event, and then get the Kafka Connect detail about the source "offset", which
* describes the position within the source where we last stopped reading. * describes the position within the source where we have last read.
* *
* @param eventRowNumber the row number within the last event that was successfully processed * @param eventRowNumber the 0-based row number within the last event that was successfully processed
* @return a copy of the current offset; never null * @return a copy of the current offset; never null
*/ */
public Map<String, Object> offset(int eventRowNumber) { public Map<String, ?> offset(int eventRowNumber) {
setRowInEvent(eventRowNumber); setRowInEvent(eventRowNumber);
return offset(); return offset();
} }
@ -134,7 +134,7 @@ public void setRowInEvent(int rowNumber) {
* *
* @param sourceOffset the previously-recorded Kafka Connect source offset * @param sourceOffset the previously-recorded Kafka Connect source offset
*/ */
public void setOffset(Map<String, Object> sourceOffset) { public void setOffset(Map<String, ?> sourceOffset) {
if (sourceOffset != null) { if (sourceOffset != null) {
// We have previously recorded an offset ... // We have previously recorded an offset ...
binlogFilename = (String) sourceOffset.get(BINLOG_FILENAME_OFFSET_KEY); binlogFilename = (String) sourceOffset.get(BINLOG_FILENAME_OFFSET_KEY);
@ -174,9 +174,9 @@ public int eventRowNumber() {
/** /**
* Get the logical identifier of the database that is the source of the events. * Get the logical identifier of the database that is the source of the events.
* @return the database name; null if it has not been {@link #setDatabase(String) set} * @return the database name; null if it has not been {@link #setServerName(String) set}
*/ */
public String database() { public String serverName() {
return databaseId; return serverName;
} }
} }

View File

@ -3,12 +3,13 @@
* *
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0 * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/ */
package io.debezium.mysql.ingest; package io.debezium.mysql.source;
import java.io.Serializable; import java.io.Serializable;
import java.util.BitSet; import java.util.BitSet;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.util.Objects;
import java.util.Set; import java.util.Set;
import java.util.function.Consumer; import java.util.function.Consumer;
@ -32,6 +33,8 @@
import io.debezium.relational.TableSchema; import io.debezium.relational.TableSchema;
import io.debezium.relational.TableSchemaBuilder; import io.debezium.relational.TableSchemaBuilder;
import io.debezium.relational.Tables; import io.debezium.relational.Tables;
import io.debezium.relational.history.DatabaseHistory;
import io.debezium.relational.history.HistoryRecord;
import io.debezium.text.ParsingException; import io.debezium.text.ParsingException;
/** /**
@ -42,43 +45,61 @@
final class TableConverters { final class TableConverters {
private final Logger logger = LoggerFactory.getLogger(getClass()); private final Logger logger = LoggerFactory.getLogger(getClass());
private final DatabaseHistory dbHistory;
private final TopicSelector topicSelector; private final TopicSelector topicSelector;
private final MySqlDdlParser ddlParser; private final MySqlDdlParser ddlParser;
private final Tables tables; private final Tables tables;
private final TableSchemaBuilder schemaBuilder = new TableSchemaBuilder(); private final TableSchemaBuilder schemaBuilder = new TableSchemaBuilder();
private final Consumer<Set<TableId>> tablesChangedHandler; private final Map<TableId, TableSchema> tableSchemaByTableId = new HashMap<>();
private final Map<String, TableSchema> tableSchemaByTableName = new HashMap<>();
private final Map<Long, Converter> convertersByTableId = new HashMap<>(); private final Map<Long, Converter> convertersByTableId = new HashMap<>();
private final Map<String, Long> tableNumbersByTableName = new HashMap<>(); private final Map<String, Long> tableNumbersByTableName = new HashMap<>();
private final boolean recordSchemaChangesInSourceRecords;
public TableConverters( TopicSelector topicSelector, Tables tables, Consumer<Set<TableId>> tablesChangedHandler ) { public TableConverters(TopicSelector topicSelector, DatabaseHistory dbHistory,
boolean recordSchemaChangesInSourceRecords, Tables tables) {
Objects.requireNonNull(topicSelector, "A topic selector is required");
Objects.requireNonNull(dbHistory, "Database history storage is required");
Objects.requireNonNull(tables, "A Tables object is required");
this.topicSelector = topicSelector; this.topicSelector = topicSelector;
this.tablesChangedHandler = tablesChangedHandler != null ? tablesChangedHandler : (ids)->{}; this.dbHistory = dbHistory;
this.tables = tables != null ? tables : new Tables(); this.tables = tables;
this.ddlParser = new MySqlDdlParser(false); // don't include views this.ddlParser = new MySqlDdlParser(false); // don't include views
this.recordSchemaChangesInSourceRecords = recordSchemaChangesInSourceRecords;
} }
public void updateTableCommand(Event event, SourceInfo source, Consumer<SourceRecord> recorder) { public void updateTableCommand(Event event, SourceInfo source, Consumer<SourceRecord> recorder) {
QueryEventData command = event.getData(); QueryEventData command = event.getData();
String databaseName = command.getDatabase();
String ddlStatements = command.getSql(); String ddlStatements = command.getSql();
try { try {
this.ddlParser.setCurrentSchema(databaseName);
this.ddlParser.parse(ddlStatements, tables); this.ddlParser.parse(ddlStatements, tables);
} catch ( ParsingException e) { } catch (ParsingException e) {
logger.error("Error parsing DDL statement and updating tables", e); logger.error("Error parsing DDL statement and updating tables", e);
} finally { } finally {
// Record the DDL statement so that we can later recover them if needed ...
dbHistory.record(source.partition(), source.offset(), databaseName, tables, ddlStatements);
if (recordSchemaChangesInSourceRecords) {
String serverName = source.serverName();
String topicName = topicSelector.getTopic(serverName);
HistoryRecord historyRecord = new HistoryRecord(source.partition(), source.offset(), databaseName, ddlStatements);
recorder.accept(new SourceRecord(source.partition(), source.offset(), topicName, 0,
Schema.STRING_SCHEMA, databaseName, Schema.STRING_SCHEMA, historyRecord.document().toString()));
}
}
// Figure out what changed ... // Figure out what changed ...
Set<TableId> changes = tables.drainChanges(); Set<TableId> changes = tables.drainChanges();
changes.forEach(tableId->{ changes.forEach(tableId -> {
Table table = tables.forTable(tableId); Table table = tables.forTable(tableId);
if ( table == null ) { // removed if (table == null) { // removed
tableSchemaByTableName.remove(tableId.table()); tableSchemaByTableId.remove(tableId);
} else { } else {
TableSchema schema = schemaBuilder.create(table, false); TableSchema schema = schemaBuilder.create(table, false);
tableSchemaByTableName.put(tableId.table(), schema); tableSchemaByTableId.put(tableId, schema);
} }
}); });
tablesChangedHandler.accept(changes); // notify
}
} }
/** /**
@ -102,12 +123,14 @@ public void updateTableMetadata(Event event, SourceInfo source, Consumer<SourceR
long tableNumber = metadata.getTableId(); long tableNumber = metadata.getTableId();
if (!convertersByTableId.containsKey(tableNumber)) { if (!convertersByTableId.containsKey(tableNumber)) {
// We haven't seen this table ID, so we need to rebuild our converter functions ... // We haven't seen this table ID, so we need to rebuild our converter functions ...
String serverName = source.serverName();
String databaseName = metadata.getDatabase(); String databaseName = metadata.getDatabase();
String tableName = metadata.getTable(); String tableName = metadata.getTable();
String topicName = topicSelector.getTopic(databaseName, tableName); String topicName = topicSelector.getTopic(serverName, databaseName, tableName);
// Just get the current schema, which should be up-to-date ... // Just get the current schema, which should be up-to-date ...
TableSchema tableSchema = tableSchemaByTableName.get(tableName); TableId tableId = new TableId(databaseName, null, tableName);
TableSchema tableSchema = tableSchemaByTableId.get(tableId);
// Generate this table's insert, update, and delete converters ... // Generate this table's insert, update, and delete converters ...
Converter converter = new Converter() { Converter converter = new Converter() {
@ -115,44 +138,50 @@ public void updateTableMetadata(Event event, SourceInfo source, Consumer<SourceR
public String topic() { public String topic() {
return topicName; return topicName;
} }
@Override @Override
public Integer partition() { public Integer partition() {
return null; return null;
} }
@Override @Override
public Schema keySchema() { public Schema keySchema() {
return tableSchema.keySchema(); return tableSchema.keySchema();
} }
@Override @Override
public Schema valueSchema() { public Schema valueSchema() {
return tableSchema.valueSchema(); return tableSchema.valueSchema();
} }
@Override @Override
public Object createKey(Serializable[] row, BitSet includedColumns) { public Object createKey(Serializable[] row, BitSet includedColumns) {
// assume all columns in the table are included ... // assume all columns in the table are included ...
return tableSchema.keyFromColumnData(row); return tableSchema.keyFromColumnData(row);
} }
@Override @Override
public Struct inserted(Serializable[] row, BitSet includedColumns) { public Struct inserted(Serializable[] row, BitSet includedColumns) {
// assume all columns in the table are included ... // assume all columns in the table are included ...
return tableSchema.valueFromColumnData(row); return tableSchema.valueFromColumnData(row);
} }
@Override @Override
public Struct updated(Serializable[] after, BitSet includedColumns, Serializable[] before, public Struct updated(Serializable[] after, BitSet includedColumns, Serializable[] before,
BitSet includedColumnsBeforeUpdate) { BitSet includedColumnsBeforeUpdate) {
// assume all columns in the table are included, and we'll write out only the updates ... // assume all columns in the table are included, and we'll write out only the after state ...
return tableSchema.valueFromColumnData(after); return tableSchema.valueFromColumnData(after);
} }
@Override @Override
public Struct deleted(Serializable[] deleted, BitSet includedColumns) { public Struct deleted(Serializable[] deleted, BitSet includedColumns) {
// TODO: Should we write out the old values or null? // We current write out null to signal that the row was removed ...
// assume all columns in the table are included ...
return null; // tableSchema.valueFromColumnData(row); return null; // tableSchema.valueFromColumnData(row);
} }
}; };
convertersByTableId.put(tableNumber, converter); convertersByTableId.put(tableNumber, converter);
Long previousTableNumber = tableNumbersByTableName.put(tableName, tableNumber); Long previousTableNumber = tableNumbersByTableName.put(tableName, tableNumber);
if ( previousTableNumber != null ) { if (previousTableNumber != null) {
convertersByTableId.remove(previousTableNumber); convertersByTableId.remove(previousTableNumber);
} }
} }
@ -168,9 +197,9 @@ public void handleInsert(Event event, SourceInfo source, Consumer<SourceRecord>
for (int row = 0; row <= source.eventRowNumber(); ++row) { for (int row = 0; row <= source.eventRowNumber(); ++row) {
Serializable[] values = write.getRows().get(row); Serializable[] values = write.getRows().get(row);
Schema keySchema = converter.keySchema(); Schema keySchema = converter.keySchema();
Object key = converter.createKey(values,includedColumns); Object key = converter.createKey(values, includedColumns);
Schema valueSchema = converter.valueSchema(); Schema valueSchema = converter.valueSchema();
Struct value = converter.inserted(values,includedColumns); Struct value = converter.inserted(values, includedColumns);
SourceRecord record = new SourceRecord(source.partition(), source.offset(row), topic, partition, SourceRecord record = new SourceRecord(source.partition(), source.offset(row), topic, partition,
keySchema, key, valueSchema, value); keySchema, key, valueSchema, value);
recorder.accept(record); recorder.accept(record);
@ -197,9 +226,9 @@ public void handleUpdate(Event event, SourceInfo source, Consumer<SourceRecord>
Serializable[] before = changes.getKey(); Serializable[] before = changes.getKey();
Serializable[] after = changes.getValue(); Serializable[] after = changes.getValue();
Schema keySchema = converter.keySchema(); Schema keySchema = converter.keySchema();
Object key = converter.createKey(after,includedColumns); Object key = converter.createKey(after, includedColumns);
Schema valueSchema = converter.valueSchema(); Schema valueSchema = converter.valueSchema();
Struct value = converter.updated(before,includedColumnsBefore, after,includedColumns); Struct value = converter.updated(before, includedColumnsBefore, after, includedColumns);
SourceRecord record = new SourceRecord(source.partition(), source.offset(row), topic, partition, SourceRecord record = new SourceRecord(source.partition(), source.offset(row), topic, partition,
keySchema, key, valueSchema, value); keySchema, key, valueSchema, value);
recorder.accept(record); recorder.accept(record);
@ -216,9 +245,9 @@ public void handleDelete(Event event, SourceInfo source, Consumer<SourceRecord>
for (int row = 0; row <= source.eventRowNumber(); ++row) { for (int row = 0; row <= source.eventRowNumber(); ++row) {
Serializable[] values = deleted.getRows().get(row); Serializable[] values = deleted.getRows().get(row);
Schema keySchema = converter.keySchema(); Schema keySchema = converter.keySchema();
Object key = converter.createKey(values,includedColumns); Object key = converter.createKey(values, includedColumns);
Schema valueSchema = converter.valueSchema(); Schema valueSchema = converter.valueSchema();
Struct value = converter.inserted(values,includedColumns); Struct value = converter.inserted(values, includedColumns);
SourceRecord record = new SourceRecord(source.partition(), source.offset(row), topic, partition, SourceRecord record = new SourceRecord(source.partition(), source.offset(row), topic, partition,
keySchema, key, valueSchema, value); keySchema, key, valueSchema, value);
recorder.accept(record); recorder.accept(record);
@ -238,7 +267,7 @@ protected static interface Converter {
Struct inserted(Serializable[] row, BitSet includedColumns); Struct inserted(Serializable[] row, BitSet includedColumns);
Struct updated(Serializable[] after, BitSet includedColumns, Serializable[] before, BitSet includedColumnsBeforeUpdate ); Struct updated(Serializable[] after, BitSet includedColumns, Serializable[] before, BitSet includedColumnsBeforeUpdate);
Struct deleted(Serializable[] deleted, BitSet includedColumns); Struct deleted(Serializable[] deleted, BitSet includedColumns);
} }

View File

@ -0,0 +1,78 @@
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.mysql.source;
import io.debezium.annotation.ThreadSafe;
/**
* A function that determines the name of topics for data and metadata.
*
* @author Randall Hauch
*/
@ThreadSafe
public interface TopicSelector {
/**
* Get the default topic selector logic, which uses a '.' delimiter character when needed.
*
* @return the topic selector; never null
*/
static TopicSelector defaultSelector() {
return defaultSelector(".");
}
/**
* Get the default topic selector logic, which uses the supplied delimiter character when needed.
*
* @param delimiter the string delineating the server, database, and table names; may not be null
* @return the topic selector; never null
*/
static TopicSelector defaultSelector(String delimiter) {
return new TopicSelector() {
/**
* Get the name of the topic for the given server, database, and table names. This method returns
* "{@code <serverName>}".
*
* @param serverName the name of the database server; may not be null
* @return the topic name; never null
*/
@Override
public String getTopic(String serverName) {
return serverName;
}
/**
* Get the name of the topic for the given server name. This method returns
* "{@code <serverName>.<databaseName>.<tableName>}".
*
* @param serverName the name of the database server; may not be null
* @param databaseName the name of the database; may not be null
* @param tableName the name of the table; may not be null
* @return the topic name; never null
*/
@Override
public String getTopic(String serverName, String databaseName, String tableName) {
return String.join(delimiter, serverName, databaseName, tableName);
}
};
}
/**
* Get the name of the topic for the given server name.
*
* @param serverName the name of the database server; may not be null
* @param databaseName the name of the database; may not be null
* @param tableName the name of the table; may not be null
* @return the topic name; never null
*/
String getTopic(String serverName, String databaseName, String tableName);
/**
* Get the name of the topic for the given server, database, and table names.
*
* @param serverName the name of the database server; may not be null
* @return the topic name; never null
*/
String getTopic(String serverName);
}

View File

@ -11,21 +11,19 @@
import org.junit.Ignore; import org.junit.Ignore;
import org.junit.Test; import org.junit.Test;
import io.debezium.jdbc.TestDatabase;
public class ConnectionIT { public class ConnectionIT {
@Ignore @Ignore
@Test @Test
public void shouldConnectToDefaulDatabase() throws SQLException { public void shouldConnectToDefaulDatabase() throws SQLException {
try (MySQLConnection conn = new MySQLConnection(TestDatabase.testConfig("mysql"));) { try (MySQLConnection conn = MySQLConnection.forTestDatabase("mysql");) {
conn.connect(); conn.connect();
} }
} }
@Test @Test
public void shouldDoStuffWithDatabase() throws SQLException { public void shouldDoStuffWithDatabase() throws SQLException {
try (MySQLConnection conn = new MySQLConnection(TestDatabase.testConfig("readbinlog_test"));) { try (MySQLConnection conn = MySQLConnection.forTestDatabase("readbinlog_test");) {
conn.connect(); conn.connect();
// Set up the table as one transaction and wait to see the events ... // Set up the table as one transaction and wait to see the events ...
conn.execute("DROP TABLE IF EXISTS person", conn.execute("DROP TABLE IF EXISTS person",
@ -46,7 +44,7 @@ public void shouldDoStuffWithDatabase() throws SQLException {
@Ignore @Ignore
@Test @Test
public void shouldConnectToEmptyDatabase() throws SQLException { public void shouldConnectToEmptyDatabase() throws SQLException {
try (MySQLConnection conn = new MySQLConnection(TestDatabase.testConfig("emptydb"));) { try (MySQLConnection conn = MySQLConnection.forTestDatabase("emptydb");) {
conn.connect(); conn.connect();
} }
} }

View File

@ -6,14 +6,36 @@
package io.debezium.mysql; package io.debezium.mysql;
import io.debezium.config.Configuration; import io.debezium.config.Configuration;
import io.debezium.jdbc.JdbcConfiguration;
import io.debezium.jdbc.JdbcConnection; import io.debezium.jdbc.JdbcConnection;
/** /**
* A utility for working with MySQL connections. * A utility for integration test cases to connect the MySQL server running in the Docker container created by this module's
* build.
*
* @author Randall Hauch * @author Randall Hauch
*/ */
public class MySQLConnection extends JdbcConnection { public class MySQLConnection extends JdbcConnection {
/**
* Obtain a connection instance to the named test database.
*
* @param databaseName the name of the test database
* @return the MySQLConnection instance; never null
*/
public static MySQLConnection forTestDatabase(String databaseName) {
return new MySQLConnection(JdbcConfiguration.copy(Configuration.fromSystemProperties("database."))
.withDatabase(databaseName)
.build());
}
protected static void addDefaults(Configuration.Builder builder) {
builder.withDefault(JdbcConfiguration.HOSTNAME, "localhost")
.withDefault(JdbcConfiguration.PORT, 3306)
.withDefault(JdbcConfiguration.USER, "mysql")
.withDefault(JdbcConfiguration.PASSWORD, "mysqlpw");
}
protected static ConnectionFactory FACTORY = JdbcConnection.patternBasedFactory("jdbc:mysql://${hostname}:${port}/${dbname}"); protected static ConnectionFactory FACTORY = JdbcConnection.patternBasedFactory("jdbc:mysql://${hostname}:${port}/${dbname}");
/** /**
@ -22,7 +44,7 @@ public class MySQLConnection extends JdbcConnection {
* @param config the configuration; may not be null * @param config the configuration; may not be null
*/ */
public MySQLConnection(Configuration config) { public MySQLConnection(Configuration config) {
super(config, FACTORY); super(config, FACTORY, null, MySQLConnection::addDefaults);
} }
/** /**
@ -33,6 +55,6 @@ public MySQLConnection(Configuration config) {
* @param initialOperations the initial operations that should be run on each new connection; may be null * @param initialOperations the initial operations that should be run on each new connection; may be null
*/ */
public MySQLConnection(Configuration config, Operations initialOperations) { public MySQLConnection(Configuration config, Operations initialOperations) {
super(config, FACTORY, initialOperations); super(config, FACTORY, initialOperations, MySQLConnection::addDefaults);
} }
} }

View File

@ -23,6 +23,7 @@
import io.debezium.relational.Tables; import io.debezium.relational.Tables;
import io.debezium.relational.ddl.DdlParser; import io.debezium.relational.ddl.DdlParser;
import io.debezium.util.IoUtil; import io.debezium.util.IoUtil;
import io.debezium.util.Testing;
public class MySqlDdlParserTest { public class MySqlDdlParserTest {
@ -86,13 +87,13 @@ public void shouldParseCreateTableStatementWithSingleGeneratedColumnAsPrimaryKey
@Test @Test
public void shouldParseCreateStatements() { public void shouldParseCreateStatements() {
parser.parse(readFile("ddl/mysql-test-create.ddl"), tables); parser.parse(readFile("ddl/mysql-test-create.ddl"), tables);
//System.out.println(tables); Testing.print(tables);
} }
@Test @Test
public void shouldParseTestStatements() { public void shouldParseTestStatements() {
parser.parse(readFile("ddl/mysql-test-statements.ddl"), tables); parser.parse(readFile("ddl/mysql-test-statements.ddl"), tables);
System.out.println(tables); Testing.print(tables);
} }
@Test @Test

View File

@ -3,7 +3,7 @@
* *
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0 * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/ */
package io.debezium.mysql.ingest; package io.debezium.mysql.source;
import java.sql.SQLException; import java.sql.SQLException;
import java.sql.Types; import java.sql.Types;
@ -12,7 +12,6 @@
import static org.fest.assertions.Assertions.assertThat; import static org.fest.assertions.Assertions.assertThat;
import io.debezium.jdbc.TestDatabase;
import io.debezium.mysql.MySQLConnection; import io.debezium.mysql.MySQLConnection;
import io.debezium.relational.Column; import io.debezium.relational.Column;
import io.debezium.relational.Table; import io.debezium.relational.Table;
@ -27,7 +26,7 @@ public class MetadataIT {
*/ */
@Test @Test
public void shouldLoadMetadataViaJdbc() throws SQLException { public void shouldLoadMetadataViaJdbc() throws SQLException {
try (MySQLConnection conn = new MySQLConnection(TestDatabase.testConfig("readbinlog_test"));) { try (MySQLConnection conn = MySQLConnection.forTestDatabase("readbinlog_test");) {
conn.connect(); conn.connect();
// Set up the table as one transaction and wait to see the events ... // Set up the table as one transaction and wait to see the events ...
conn.execute("DROP TABLE IF EXISTS person", conn.execute("DROP TABLE IF EXISTS person",

View File

@ -3,7 +3,7 @@
* *
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0 * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/ */
package io.debezium.mysql.ingest; package io.debezium.mysql.source;
import static org.junit.Assert.fail; import static org.junit.Assert.fail;
@ -45,7 +45,6 @@
import static org.fest.assertions.Assertions.assertThat; import static org.fest.assertions.Assertions.assertThat;
import io.debezium.jdbc.JdbcConfiguration; import io.debezium.jdbc.JdbcConfiguration;
import io.debezium.jdbc.TestDatabase;
import io.debezium.mysql.MySQLConnection; import io.debezium.mysql.MySQLConnection;
public class ReadBinLogIT { public class ReadBinLogIT {
@ -59,7 +58,6 @@ private static final class AnyValue implements Serializable {
private static final Serializable ANY_OBJECT = new AnyValue(); private static final Serializable ANY_OBJECT = new AnyValue();
private JdbcConfiguration config;
private EventQueue counters; private EventQueue counters;
private BinaryLogClient client; private BinaryLogClient client;
private MySQLConnection conn; private MySQLConnection conn;
@ -69,12 +67,13 @@ private static final class AnyValue implements Serializable {
public void beforeEach() throws TimeoutException, IOException, SQLException, InterruptedException { public void beforeEach() throws TimeoutException, IOException, SQLException, InterruptedException {
events.clear(); events.clear();
config = TestDatabase.buildTestConfig().withDatabase("readbinlog_test").build();
// Connect the normal SQL client ... // Connect the normal SQL client ...
conn = new MySQLConnection(config); conn = MySQLConnection.forTestDatabase("readbinlog_test");
conn.connect(); conn.connect();
// Get the configuration that we used ...
JdbcConfiguration config = conn.config();
// Connect the bin log client ... // Connect the bin log client ...
counters = new EventQueue(DEFAULT_TIMEOUT, this::logConsumedEvent, this::logIgnoredEvent); counters = new EventQueue(DEFAULT_TIMEOUT, this::logConsumedEvent, this::logIgnoredEvent);
client = new BinaryLogClient(config.getHostname(), config.getPort(), "replicator", "replpass"); client = new BinaryLogClient(config.getHostname(), config.getPort(), "replicator", "replpass");

View File

@ -3,27 +3,24 @@
* *
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0 * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/ */
package io.debezium.ingest.postgresql; package io.debezium.postgresql;
import java.sql.SQLException; import java.sql.SQLException;
import org.junit.Test; import org.junit.Test;
import io.debezium.ingest.postgres.PostgresConnection;
import io.debezium.jdbc.TestDatabase;
public class ConnectionIT { public class ConnectionIT {
@Test @Test
public void shouldConnectToDefaulDatabase() throws SQLException { public void shouldConnectToDefaulDatabase() throws SQLException {
try (PostgresConnection conn = new PostgresConnection( TestDatabase.testConfig("postgres") );) { try (PostgresConnection conn = PostgresConnection.forTestDatabase("postgres");) {
conn.connect(); conn.connect();
} }
} }
@Test @Test
public void shouldConnectToEmptyDatabase() throws SQLException { public void shouldConnectToEmptyDatabase() throws SQLException {
try (PostgresConnection conn = new PostgresConnection( TestDatabase.testConfig("emptydb") );) { try (PostgresConnection conn = PostgresConnection.forTestDatabase("emptydb");) {
conn.connect(); conn.connect();
} }
} }

View File

@ -3,17 +3,39 @@
* *
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0 * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/ */
package io.debezium.ingest.postgres; package io.debezium.postgresql;
import io.debezium.config.Configuration; import io.debezium.config.Configuration;
import io.debezium.jdbc.JdbcConfiguration;
import io.debezium.jdbc.JdbcConnection; import io.debezium.jdbc.JdbcConnection;
/** /**
* A utility for working with MySQL connections. * A utility for integration test cases to connect the PostgreSQL server running in the Docker container created by this
* module's build.
*
* @author Randall Hauch * @author Randall Hauch
*/ */
public class PostgresConnection extends JdbcConnection { public class PostgresConnection extends JdbcConnection {
/**
* Obtain a connection instance to the named test database.
*
* @param databaseName the name of the test database
* @return the PostgresConnection instance; never null
*/
public static PostgresConnection forTestDatabase(String databaseName) {
return new PostgresConnection(JdbcConfiguration.copy(Configuration.fromSystemProperties("database."))
.withDatabase(databaseName)
.build());
}
protected static void addDefaults(Configuration.Builder builder) {
builder.withDefault(JdbcConfiguration.HOSTNAME, "localhost")
.withDefault(JdbcConfiguration.PORT, 5432)
.withDefault(JdbcConfiguration.USER, "postgres")
.withDefault(JdbcConfiguration.PASSWORD, "postgres");
}
protected static ConnectionFactory FACTORY = JdbcConnection.patternBasedFactory("jdbc:postgresql://${hostname}:${port}/${dbname}"); protected static ConnectionFactory FACTORY = JdbcConnection.patternBasedFactory("jdbc:postgresql://${hostname}:${port}/${dbname}");
/** /**

10
pom.xml
View File

@ -68,7 +68,7 @@
<version.fest>1.4</version.fest> <version.fest>1.4</version.fest>
<!-- Maven Plugins --> <!-- Maven Plugins -->
<version.resources.plugin>2.4.2</version.resources.plugin> <version.resources.plugin>2.7</version.resources.plugin>
<version.dependency.plugin>2.10</version.dependency.plugin> <version.dependency.plugin>2.10</version.dependency.plugin>
<version.assembly.plugin>2.4</version.assembly.plugin> <version.assembly.plugin>2.4</version.assembly.plugin>
<version.war.plugin>2.5</version.war.plugin> <version.war.plugin>2.5</version.war.plugin>
@ -83,9 +83,10 @@
<modules> <modules>
<module>support/checkstyle</module> <module>support/checkstyle</module>
<module>debezium-core</module> <module>debezium-core</module>
<module>debezium-embedded</module>
<module>debezium-ingest-mysql</module>
<!--module>debezium-ingest-jdbc</module--> <!--module>debezium-ingest-jdbc</module-->
<!--module>debezium-ingest-postgres</module--> <!--module>debezium-ingest-postgres</module-->
<module>debezium-ingest-mysql</module>
<!--module>debezium-kafka-connect</module--> <!--module>debezium-kafka-connect</module-->
</modules> </modules>
<dependencyManagement> <dependencyManagement>
@ -177,6 +178,11 @@
<artifactId>debezium-core</artifactId> <artifactId>debezium-core</artifactId>
<version>${project.version}</version> <version>${project.version}</version>
</dependency> </dependency>
<dependency>
<groupId>io.debezium</groupId>
<artifactId>debezium-embedded</artifactId>
<version>${project.version}</version>
</dependency>
<dependency> <dependency>
<groupId>io.debezium</groupId> <groupId>io.debezium</groupId>
<artifactId>debezium-ingest-jdbc</artifactId> <artifactId>debezium-ingest-jdbc</artifactId>

View File

@ -1,7 +1,13 @@
<?xml version='1.0' encoding='UTF-8'?> <?xml version='1.0' encoding='UTF-8'?>
<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"> <project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<modelVersion>4.0.0</modelVersion> <parent>
<groupId>org.jboss</groupId>
<artifactId>jboss-parent</artifactId>
<version>19</version>
<!-- same as parent POM -->
</parent>
<modelVersion>4.0.0</modelVersion>
<groupId>io.debezium</groupId> <groupId>io.debezium</groupId>
<artifactId>debezium-checkstyle</artifactId> <artifactId>debezium-checkstyle</artifactId>
<version>0.1-SNAPSHOT</version> <version>0.1-SNAPSHOT</version>
@ -33,7 +39,6 @@
<plugin> <plugin>
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-checkstyle-plugin</artifactId> <artifactId>maven-checkstyle-plugin</artifactId>
<version>2.15</version>
</plugin> </plugin>
<!-- <!--
This is not deployed into a Maven repository. It is merely installed into the local Maven repository This is not deployed into a Maven repository. It is merely installed into the local Maven repository