DBZ-3349 Updated kube-client version to 5.x

This commit is contained in:
jcechace 2021-03-30 11:30:29 +02:00 committed by Jiri Pechanec
parent 47f380b565
commit 3f84a70ff0
8 changed files with 86 additions and 83 deletions

View File

@ -13,10 +13,10 @@
<name>Debezium OpenShift integration test-suite</name> <name>Debezium OpenShift integration test-suite</name>
<properties> <properties>
<version.fabric8.client>4.10.1</version.fabric8.client> <version.fabric8.client>5.2.1</version.fabric8.client>
<version.commons.codec>1.14</version.commons.codec> <version.commons.codec>1.14</version.commons.codec>
<version.commons.compress>1.20</version.commons.compress> <version.commons.compress>1.20</version.commons.compress>
<version.strimzi>0.18.0</version.strimzi> <version.strimzi>0.22.0</version.strimzi>
<version.junit5>5.5.1</version.junit5> <version.junit5>5.5.1</version.junit5>
<version.junit5.pioneer>0.5.1</version.junit5.pioneer> <version.junit5.pioneer>0.5.1</version.junit5.pioneer>
<version.assertj>3.11.1</version.assertj> <version.assertj>3.11.1</version.assertj>
@ -247,7 +247,7 @@
<dependency> <dependency>
<groupId>com.github.jcechace.apicurio-model-generator</groupId> <groupId>com.github.jcechace.apicurio-model-generator</groupId>
<artifactId>kubernetes-apicurio-registry-model</artifactId> <artifactId>kubernetes-apicurio-registry-model</artifactId>
<version>v0.0.4</version> <version>a5c02b71</version>
</dependency> </dependency>
</dependencies> </dependencies>
@ -258,7 +258,7 @@
<properties> <properties>
<image.push.skip>true</image.push.skip> <image.push.skip>true</image.push.skip>
<image.version.strimzi>latest</image.version.strimzi> <image.version.strimzi>latest</image.version.strimzi>
<image.base.name>strimzi/kafka:${image.version.strimzi}-kafka-${version.kafka}</image.base.name> <image.base.name>quay.io/strimzi/kafka:${image.version.strimzi}-kafka-${version.kafka}</image.base.name>
<version.db2-connector>${project.version}</version.db2-connector> <version.db2-connector>${project.version}</version.db2-connector>
</properties> </properties>

View File

@ -27,10 +27,14 @@
import io.fabric8.kubernetes.api.model.PodList; import io.fabric8.kubernetes.api.model.PodList;
import io.fabric8.kubernetes.api.model.Service; import io.fabric8.kubernetes.api.model.Service;
import io.fabric8.kubernetes.api.model.ServiceAccount; import io.fabric8.kubernetes.api.model.ServiceAccount;
import io.fabric8.kubernetes.api.model.ServiceAccountBuilder;
import io.fabric8.kubernetes.api.model.ServiceBuilder;
import io.fabric8.kubernetes.api.model.apps.Deployment; import io.fabric8.kubernetes.api.model.apps.Deployment;
import io.fabric8.kubernetes.api.model.networking.NetworkPolicy; import io.fabric8.kubernetes.api.model.networking.v1.NetworkPolicy;
import io.fabric8.kubernetes.api.model.networking.NetworkPolicyPort; import io.fabric8.kubernetes.api.model.networking.v1.NetworkPolicyBuilder;
import io.fabric8.kubernetes.api.model.networking.v1.NetworkPolicyPort;
import io.fabric8.openshift.api.model.Route; import io.fabric8.openshift.api.model.Route;
import io.fabric8.openshift.api.model.RouteBuilder;
import io.fabric8.openshift.client.OpenShiftClient; import io.fabric8.openshift.client.OpenShiftClient;
/** /**
@ -56,38 +60,39 @@ public OpenShiftUtils(OpenShiftClient client) {
* @return {@link Route} object for created route * @return {@link Route} object for created route
*/ */
public Route createRoute(String project, String name, String service, String port, Map<String, String> labels) { public Route createRoute(String project, String name, String service, String port, Map<String, String> labels) {
Route route = client.routes().inNamespace(project).createOrReplaceWithNew() return client.routes().inNamespace(project).createOrReplace(
.withNewMetadata() new RouteBuilder()
.withName(name) .withNewMetadata()
.withLabels(labels) .withName(name)
.endMetadata() .withLabels(labels)
.withNewSpec() .endMetadata()
.withNewTo() .withNewSpec()
.withKind("Service") .withNewTo()
.withName(service) .withKind("Service")
.endTo() .withName(service)
.withNewPort() .endTo()
.withNewTargetPort(port) .withNewPort()
.endPort() .withNewTargetPort(port)
.endSpec() .endPort()
.done(); .endSpec()
return route; .build());
} }
public Service createService(String project, String name, String portName, int port, Map<String, String> selector, Map<String, String> labels) { public Service createService(String project, String name, String portName, int port, Map<String, String> selector, Map<String, String> labels) {
Service service = client.services().inNamespace(project).createOrReplaceWithNew() return client.services().inNamespace(project).createOrReplace(
.withNewMetadata() new ServiceBuilder()
.withName(name) .withNewMetadata()
.withLabels(labels) .withName(name)
.endMetadata() .withLabels(labels)
.withNewSpec() .endMetadata()
.addNewPort() .withNewSpec()
.withProtocol("TCP") .addNewPort()
.withName(portName).withPort(port).withTargetPort(new IntOrString(port)) .withProtocol("TCP")
.endPort() .withName(portName).withPort(port).withTargetPort(new IntOrString(port))
.withSelector(selector) .endPort()
.endSpec().done(); .withSelector(selector)
return service; .endSpec()
.build());
} }
/** /**
@ -99,21 +104,21 @@ public Service createService(String project, String name, String portName, int p
* @return {@link NetworkPolicy} object for created policy * @return {@link NetworkPolicy} object for created policy
*/ */
public NetworkPolicy createNetworkPolicy(String project, String name, Map<String, String> podSelectorLabels, List<NetworkPolicyPort> ports) { public NetworkPolicy createNetworkPolicy(String project, String name, Map<String, String> podSelectorLabels, List<NetworkPolicyPort> ports) {
NetworkPolicy policy = client.network().networkPolicies().inNamespace(project) NetworkPolicy policy = client.network().networkPolicies().inNamespace(project).createOrReplace(
.createOrReplaceWithNew() new NetworkPolicyBuilder()
.withNewMetadata() .withNewMetadata()
.withName(name) .withName(name)
.endMetadata() .endMetadata()
.withNewSpec() .withNewSpec()
.withNewPodSelector() .withNewPodSelector()
.withMatchLabels(podSelectorLabels) .withMatchLabels(podSelectorLabels)
.endPodSelector() .endPodSelector()
.addNewIngress() .addNewIngress()
.addToPorts(ports.toArray(new NetworkPolicyPort[ports.size()])) .addToPorts(ports.toArray(new NetworkPolicyPort[0]))
.endIngress() .endIngress()
.withPolicyTypes("Ingress") .withPolicyTypes("Ingress")
.endSpec() .endSpec()
.done(); .build());
return policy; return policy;
} }
@ -121,18 +126,18 @@ public NetworkPolicy createNetworkPolicy(String project, String name, Map<String
/** /**
* Links pull secret to service account * Links pull secret to service account
* @param project project where this operation happens * @param project project where this operation happens
* @param sa service account name * @param account service account name
* @param secret secret name * @param secret secret name
* @return {@link} Service account object to which this secret was linked * @return {@link} Service account object to which this secret was linked
*/ */
public ServiceAccount linkPullSecret(String project, String sa, String secret) { public ServiceAccount linkPullSecret(String project, String account, String secret) {
ServiceAccount serviceAccount = client.serviceAccounts().inNamespace(project).withName(sa).get(); ServiceAccount serviceAccount = client.serviceAccounts().inNamespace(project).withName(account).get();
boolean linked = serviceAccount.getImagePullSecrets().stream().anyMatch(r -> r.getName().equals(secret)); boolean linked = serviceAccount.getImagePullSecrets().stream().anyMatch(r -> r.getName().equals(secret));
if (!linked) { if (!linked) {
return client.serviceAccounts().inNamespace(project).withName(sa).edit() return client.serviceAccounts().inNamespace(project).withName(account).edit(sa -> new ServiceAccountBuilder(sa)
.addNewImagePullSecret().withName(secret).endImagePullSecret() .addNewImagePullSecret().withName(secret).endImagePullSecret()
.addNewSecret().withName(secret).endSecret() .addNewSecret().withName(secret).endSecret()
.done(); .build());
} }
return serviceAccount; return serviceAccount;
} }

View File

@ -13,9 +13,9 @@
import io.fabric8.kubernetes.api.model.apps.Deployment; import io.fabric8.kubernetes.api.model.apps.Deployment;
import io.fabric8.kubernetes.api.model.apps.DeploymentCondition; import io.fabric8.kubernetes.api.model.apps.DeploymentCondition;
import io.fabric8.kubernetes.api.model.apps.DeploymentStatus; import io.fabric8.kubernetes.api.model.apps.DeploymentStatus;
import io.fabric8.kubernetes.client.CustomResource;
import io.fabric8.openshift.api.model.DeploymentConfig; import io.fabric8.openshift.api.model.DeploymentConfig;
import io.fabric8.openshift.api.model.DeploymentConfigStatus; import io.fabric8.openshift.api.model.DeploymentConfigStatus;
import io.strimzi.api.kafka.model.status.HasStatus;
import io.strimzi.api.kafka.model.status.Status; import io.strimzi.api.kafka.model.status.Status;
/** /**
@ -32,7 +32,7 @@ public class WaitConditions {
* @param <T> resource type * @param <T> resource type
* @return true if resource is ready * @return true if resource is ready
*/ */
public static <T extends Status> boolean kafkaReadyCondition(HasStatus<T> resource) { public static <T extends Status> boolean kafkaReadyCondition(CustomResource<?, T> resource) {
T status = resource.getStatus(); T status = resource.getStatus();
if (status == null) { if (status == null) {
return false; return false;

View File

@ -27,16 +27,15 @@
import io.debezium.testing.openshift.tools.WaitConditions; import io.debezium.testing.openshift.tools.WaitConditions;
import io.fabric8.kubernetes.api.model.IntOrString; import io.fabric8.kubernetes.api.model.IntOrString;
import io.fabric8.kubernetes.api.model.Service; import io.fabric8.kubernetes.api.model.Service;
import io.fabric8.kubernetes.api.model.networking.NetworkPolicy; import io.fabric8.kubernetes.api.model.networking.v1.NetworkPolicy;
import io.fabric8.kubernetes.api.model.networking.NetworkPolicyPort; import io.fabric8.kubernetes.api.model.networking.v1.NetworkPolicyPort;
import io.fabric8.kubernetes.api.model.networking.NetworkPolicyPortBuilder; import io.fabric8.kubernetes.api.model.networking.v1.NetworkPolicyPortBuilder;
import io.fabric8.kubernetes.client.dsl.NonNamespaceOperation; import io.fabric8.kubernetes.client.dsl.NonNamespaceOperation;
import io.fabric8.kubernetes.client.dsl.Resource; import io.fabric8.kubernetes.client.dsl.Resource;
import io.fabric8.openshift.api.model.Route; import io.fabric8.openshift.api.model.Route;
import io.fabric8.openshift.client.OpenShiftClient; import io.fabric8.openshift.client.OpenShiftClient;
import io.strimzi.api.kafka.Crds; import io.strimzi.api.kafka.Crds;
import io.strimzi.api.kafka.KafkaConnectorList; import io.strimzi.api.kafka.KafkaConnectorList;
import io.strimzi.api.kafka.model.DoneableKafkaConnector;
import io.strimzi.api.kafka.model.KafkaConnect; import io.strimzi.api.kafka.model.KafkaConnect;
import io.strimzi.api.kafka.model.KafkaConnector; import io.strimzi.api.kafka.model.KafkaConnector;
@ -86,8 +85,9 @@ public KafkaConnectController(KafkaConnect kafkaConnect, OpenShiftClient ocp, Ok
*/ */
public void disable() { public void disable() {
LOGGER.info("Disabling KafkaConnect deployment (scaling to ZERO)."); LOGGER.info("Disabling KafkaConnect deployment (scaling to ZERO).");
ocp.apps().deployments().inNamespace(project).withName(this.kafkaConnect.getMetadata().getName() + "-connect") ocp.apps().deployments().inNamespace(project)
.edit().editSpec().withReplicas(0).endSpec().done(); .withName(this.kafkaConnect.getMetadata().getName() + "-connect")
.scale(0);
await() await()
.atMost(30, SECONDS) .atMost(30, SECONDS)
.pollDelay(5, SECONDS) .pollDelay(5, SECONDS)
@ -133,8 +133,7 @@ public NetworkPolicy allowServiceAccess() {
.map(p -> new NetworkPolicyPortBuilder().withProtocol("TCP").withPort(p).build()) .map(p -> new NetworkPolicyPortBuilder().withProtocol("TCP").withPort(p).build())
.collect(Collectors.toList()); .collect(Collectors.toList());
NetworkPolicy policy = ocpUtils.createNetworkPolicy(project, kafkaConnect.getMetadata().getName() + "-allowed", labels, ports); return ocpUtils.createNetworkPolicy(project, kafkaConnect.getMetadata().getName() + "-allowed", labels, ports);
return policy;
} }
/** /**
@ -232,7 +231,7 @@ public KafkaConnector waitForKafkaConnector(String name) throws InterruptedExcep
return kafkaConnectorOperation().withName(name).waitUntilCondition(WaitConditions::kafkaReadyCondition, 5, MINUTES); return kafkaConnectorOperation().withName(name).waitUntilCondition(WaitConditions::kafkaReadyCondition, 5, MINUTES);
} }
private NonNamespaceOperation<KafkaConnector, KafkaConnectorList, DoneableKafkaConnector, Resource<KafkaConnector, DoneableKafkaConnector>> kafkaConnectorOperation() { private NonNamespaceOperation<KafkaConnector, KafkaConnectorList, Resource<KafkaConnector>> kafkaConnectorOperation() {
return Crds.kafkaConnectorOperation(ocp).inNamespace(project); return Crds.kafkaConnectorOperation(ocp).inNamespace(project);
} }

View File

@ -20,7 +20,6 @@
import io.fabric8.openshift.client.OpenShiftClient; import io.fabric8.openshift.client.OpenShiftClient;
import io.strimzi.api.kafka.Crds; import io.strimzi.api.kafka.Crds;
import io.strimzi.api.kafka.KafkaTopicList; import io.strimzi.api.kafka.KafkaTopicList;
import io.strimzi.api.kafka.model.DoneableKafkaTopic;
import io.strimzi.api.kafka.model.Kafka; import io.strimzi.api.kafka.model.Kafka;
import io.strimzi.api.kafka.model.KafkaTopic; import io.strimzi.api.kafka.model.KafkaTopic;
import io.strimzi.api.kafka.model.status.ListenerAddress; import io.strimzi.api.kafka.model.status.ListenerAddress;
@ -85,7 +84,7 @@ public KafkaTopic waitForKafkaTopic(String name) throws InterruptedException {
return topicOperation().withName(name).waitUntilCondition(WaitConditions::kafkaReadyCondition, 5, MINUTES); return topicOperation().withName(name).waitUntilCondition(WaitConditions::kafkaReadyCondition, 5, MINUTES);
} }
private NonNamespaceOperation<KafkaTopic, KafkaTopicList, DoneableKafkaTopic, Resource<KafkaTopic, DoneableKafkaTopic>> topicOperation() { private NonNamespaceOperation<KafkaTopic, KafkaTopicList, Resource<KafkaTopic>> topicOperation() {
return Crds.topicOperation(ocp).inNamespace(project); return Crds.topicOperation(ocp).inNamespace(project);
} }
} }

View File

@ -23,8 +23,6 @@
import io.strimzi.api.kafka.Crds; import io.strimzi.api.kafka.Crds;
import io.strimzi.api.kafka.KafkaConnectList; import io.strimzi.api.kafka.KafkaConnectList;
import io.strimzi.api.kafka.KafkaList; import io.strimzi.api.kafka.KafkaList;
import io.strimzi.api.kafka.model.DoneableKafka;
import io.strimzi.api.kafka.model.DoneableKafkaConnect;
import io.strimzi.api.kafka.model.Kafka; import io.strimzi.api.kafka.model.Kafka;
import io.strimzi.api.kafka.model.KafkaConnect; import io.strimzi.api.kafka.model.KafkaConnect;
import io.strimzi.api.kafka.model.KafkaConnectBuilder; import io.strimzi.api.kafka.model.KafkaConnectBuilder;
@ -120,11 +118,11 @@ public Secret deployPullSecret(String yamlPath) {
return ocp.secrets().inNamespace(project).createOrReplace(YAML.from(yamlPath, Secret.class)); return ocp.secrets().inNamespace(project).createOrReplace(YAML.from(yamlPath, Secret.class));
} }
private NonNamespaceOperation<Kafka, KafkaList, DoneableKafka, Resource<Kafka, DoneableKafka>> kafkaOperation() { private NonNamespaceOperation<Kafka, KafkaList, Resource<Kafka>> kafkaOperation() {
return Crds.kafkaOperation(ocp).inNamespace(project); return Crds.kafkaOperation(ocp).inNamespace(project);
} }
private NonNamespaceOperation<KafkaConnect, KafkaConnectList, DoneableKafkaConnect, Resource<KafkaConnect, DoneableKafkaConnect>> kafkaConnectOperation() { private NonNamespaceOperation<KafkaConnect, KafkaConnectList, Resource<KafkaConnect>> kafkaConnectOperation() {
return Crds.kafkaConnectOperation(ocp).inNamespace(project); return Crds.kafkaConnectOperation(ocp).inNamespace(project);
} }

View File

@ -12,11 +12,11 @@
import io.apicurio.registry.operator.api.model.ApicurioRegistry; import io.apicurio.registry.operator.api.model.ApicurioRegistry;
import io.apicurio.registry.operator.api.model.ApicurioRegistryList; import io.apicurio.registry.operator.api.model.ApicurioRegistryList;
import io.apicurio.registry.operator.api.model.DoneableApicurioRegistry;
import io.debezium.testing.openshift.tools.ConfigProperties; import io.debezium.testing.openshift.tools.ConfigProperties;
import io.debezium.testing.openshift.tools.OpenShiftUtils; import io.debezium.testing.openshift.tools.OpenShiftUtils;
import io.fabric8.kubernetes.api.model.Service; import io.fabric8.kubernetes.api.model.Service;
import io.fabric8.kubernetes.api.model.apiextensions.CustomResourceDefinition; import io.fabric8.kubernetes.api.model.apiextensions.v1.CustomResourceDefinition;
import io.fabric8.kubernetes.client.dsl.base.CustomResourceDefinitionContext;
import io.fabric8.openshift.api.model.Route; import io.fabric8.openshift.api.model.Route;
import io.fabric8.openshift.client.OpenShiftClient; import io.fabric8.openshift.client.OpenShiftClient;
@ -84,7 +84,9 @@ private Service getRegistryService() {
* @return true if the CR was found and deleted * @return true if the CR was found and deleted
*/ */
public boolean undeployRegistry() { public boolean undeployRegistry() {
CustomResourceDefinition crd = ocp.customResourceDefinitions().load(RegistryDeployer.class.getResourceAsStream("/apicur.io_apicurioregistries_crd.yaml")).get(); CustomResourceDefinition crd = ocp.apiextensions().v1().customResourceDefinitions()
return ocp.customResources(crd, ApicurioRegistry.class, ApicurioRegistryList.class, DoneableApicurioRegistry.class).inNamespace(project).delete(registry); .load(RegistryDeployer.class.getResourceAsStream("/apicur.io_apicurioregistries_crd.yaml")).get();
CustomResourceDefinitionContext context = CustomResourceDefinitionContext.fromCrd(crd);
return ocp.customResources(context, ApicurioRegistry.class, ApicurioRegistryList.class).inNamespace(project).delete(registry);
} }
} }

View File

@ -18,17 +18,16 @@
import io.apicurio.registry.operator.api.model.ApicurioRegistry; import io.apicurio.registry.operator.api.model.ApicurioRegistry;
import io.apicurio.registry.operator.api.model.ApicurioRegistryList; import io.apicurio.registry.operator.api.model.ApicurioRegistryList;
import io.apicurio.registry.operator.api.model.DoneableApicurioRegistry;
import io.debezium.testing.openshift.tools.OpenShiftUtils; import io.debezium.testing.openshift.tools.OpenShiftUtils;
import io.debezium.testing.openshift.tools.OperatorController; import io.debezium.testing.openshift.tools.OperatorController;
import io.debezium.testing.openshift.tools.WaitConditions; import io.debezium.testing.openshift.tools.WaitConditions;
import io.debezium.testing.openshift.tools.YAML; import io.debezium.testing.openshift.tools.YAML;
import io.debezium.testing.openshift.tools.kafka.KafkaController; import io.debezium.testing.openshift.tools.kafka.KafkaController;
import io.fabric8.kubernetes.api.model.apiextensions.CustomResourceDefinition; import io.fabric8.kubernetes.api.model.apiextensions.v1.CustomResourceDefinition;
import io.fabric8.kubernetes.api.model.apps.Deployment; import io.fabric8.kubernetes.api.model.apps.Deployment;
import io.fabric8.kubernetes.client.dsl.NonNamespaceOperation; import io.fabric8.kubernetes.client.dsl.NonNamespaceOperation;
import io.fabric8.kubernetes.client.dsl.Resource; import io.fabric8.kubernetes.client.dsl.Resource;
import io.fabric8.kubernetes.internal.KubernetesDeserializer; import io.fabric8.kubernetes.client.dsl.base.CustomResourceDefinitionContext;
import io.fabric8.openshift.api.model.DeploymentConfig; import io.fabric8.openshift.api.model.DeploymentConfig;
import io.fabric8.openshift.client.OpenShiftClient; import io.fabric8.openshift.client.OpenShiftClient;
@ -82,11 +81,12 @@ public RegistryController deployRegistry(String yamlPath, String storageTopicYam
return new RegistryController(registry, ocp, http); return new RegistryController(registry, ocp, http);
} }
public NonNamespaceOperation<ApicurioRegistry, ApicurioRegistryList, DoneableApicurioRegistry, Resource<ApicurioRegistry, DoneableApicurioRegistry>> registryOperation() { public NonNamespaceOperation<ApicurioRegistry, ApicurioRegistryList, Resource<ApicurioRegistry>> registryOperation() {
CustomResourceDefinition crd = ocp.customResourceDefinitions().load(RegistryDeployer.class.getResourceAsStream("/crds/apicur.io_apicurioregistries_crd.yaml")) CustomResourceDefinition crd = ocp.apiextensions().v1().customResourceDefinitions()
.load(RegistryDeployer.class.getResourceAsStream("/crds/apicur.io_apicurioregistries_crd.yaml"))
.get(); .get();
KubernetesDeserializer.registerCustomKind("apicur.io/v1alpha1", "ApicurioRegistry", ApicurioRegistry.class); CustomResourceDefinitionContext context = CustomResourceDefinitionContext.fromCrd(crd);
return ocp.customResources(crd, ApicurioRegistry.class, ApicurioRegistryList.class, DoneableApicurioRegistry.class).inNamespace(project); return ocp.customResources(context, ApicurioRegistry.class, ApicurioRegistryList.class).inNamespace(project);
} }
public ApicurioRegistry waitForRegistry(String name) throws InterruptedException { public ApicurioRegistry waitForRegistry(String name) throws InterruptedException {