DBZ-3349 Updated kube-client version to 5.x

This commit is contained in:
jcechace 2021-03-30 11:30:29 +02:00 committed by Jiri Pechanec
parent 47f380b565
commit 3f84a70ff0
8 changed files with 86 additions and 83 deletions

View File

@ -13,10 +13,10 @@
<name>Debezium OpenShift integration test-suite</name>
<properties>
<version.fabric8.client>4.10.1</version.fabric8.client>
<version.fabric8.client>5.2.1</version.fabric8.client>
<version.commons.codec>1.14</version.commons.codec>
<version.commons.compress>1.20</version.commons.compress>
<version.strimzi>0.18.0</version.strimzi>
<version.strimzi>0.22.0</version.strimzi>
<version.junit5>5.5.1</version.junit5>
<version.junit5.pioneer>0.5.1</version.junit5.pioneer>
<version.assertj>3.11.1</version.assertj>
@ -247,7 +247,7 @@
<dependency>
<groupId>com.github.jcechace.apicurio-model-generator</groupId>
<artifactId>kubernetes-apicurio-registry-model</artifactId>
<version>v0.0.4</version>
<version>a5c02b71</version>
</dependency>
</dependencies>
@ -258,7 +258,7 @@
<properties>
<image.push.skip>true</image.push.skip>
<image.version.strimzi>latest</image.version.strimzi>
<image.base.name>strimzi/kafka:${image.version.strimzi}-kafka-${version.kafka}</image.base.name>
<image.base.name>quay.io/strimzi/kafka:${image.version.strimzi}-kafka-${version.kafka}</image.base.name>
<version.db2-connector>${project.version}</version.db2-connector>
</properties>

View File

@ -27,10 +27,14 @@
import io.fabric8.kubernetes.api.model.PodList;
import io.fabric8.kubernetes.api.model.Service;
import io.fabric8.kubernetes.api.model.ServiceAccount;
import io.fabric8.kubernetes.api.model.ServiceAccountBuilder;
import io.fabric8.kubernetes.api.model.ServiceBuilder;
import io.fabric8.kubernetes.api.model.apps.Deployment;
import io.fabric8.kubernetes.api.model.networking.NetworkPolicy;
import io.fabric8.kubernetes.api.model.networking.NetworkPolicyPort;
import io.fabric8.kubernetes.api.model.networking.v1.NetworkPolicy;
import io.fabric8.kubernetes.api.model.networking.v1.NetworkPolicyBuilder;
import io.fabric8.kubernetes.api.model.networking.v1.NetworkPolicyPort;
import io.fabric8.openshift.api.model.Route;
import io.fabric8.openshift.api.model.RouteBuilder;
import io.fabric8.openshift.client.OpenShiftClient;
/**
@ -56,38 +60,39 @@ public OpenShiftUtils(OpenShiftClient client) {
* @return {@link Route} object for created route
*/
public Route createRoute(String project, String name, String service, String port, Map<String, String> labels) {
Route route = client.routes().inNamespace(project).createOrReplaceWithNew()
.withNewMetadata()
.withName(name)
.withLabels(labels)
.endMetadata()
.withNewSpec()
.withNewTo()
.withKind("Service")
.withName(service)
.endTo()
.withNewPort()
.withNewTargetPort(port)
.endPort()
.endSpec()
.done();
return route;
return client.routes().inNamespace(project).createOrReplace(
new RouteBuilder()
.withNewMetadata()
.withName(name)
.withLabels(labels)
.endMetadata()
.withNewSpec()
.withNewTo()
.withKind("Service")
.withName(service)
.endTo()
.withNewPort()
.withNewTargetPort(port)
.endPort()
.endSpec()
.build());
}
public Service createService(String project, String name, String portName, int port, Map<String, String> selector, Map<String, String> labels) {
Service service = client.services().inNamespace(project).createOrReplaceWithNew()
.withNewMetadata()
.withName(name)
.withLabels(labels)
.endMetadata()
.withNewSpec()
.addNewPort()
.withProtocol("TCP")
.withName(portName).withPort(port).withTargetPort(new IntOrString(port))
.endPort()
.withSelector(selector)
.endSpec().done();
return service;
return client.services().inNamespace(project).createOrReplace(
new ServiceBuilder()
.withNewMetadata()
.withName(name)
.withLabels(labels)
.endMetadata()
.withNewSpec()
.addNewPort()
.withProtocol("TCP")
.withName(portName).withPort(port).withTargetPort(new IntOrString(port))
.endPort()
.withSelector(selector)
.endSpec()
.build());
}
/**
@ -99,21 +104,21 @@ public Service createService(String project, String name, String portName, int p
* @return {@link NetworkPolicy} object for created policy
*/
public NetworkPolicy createNetworkPolicy(String project, String name, Map<String, String> podSelectorLabels, List<NetworkPolicyPort> ports) {
NetworkPolicy policy = client.network().networkPolicies().inNamespace(project)
.createOrReplaceWithNew()
.withNewMetadata()
.withName(name)
.endMetadata()
.withNewSpec()
.withNewPodSelector()
.withMatchLabels(podSelectorLabels)
.endPodSelector()
.addNewIngress()
.addToPorts(ports.toArray(new NetworkPolicyPort[ports.size()]))
.endIngress()
.withPolicyTypes("Ingress")
.endSpec()
.done();
NetworkPolicy policy = client.network().networkPolicies().inNamespace(project).createOrReplace(
new NetworkPolicyBuilder()
.withNewMetadata()
.withName(name)
.endMetadata()
.withNewSpec()
.withNewPodSelector()
.withMatchLabels(podSelectorLabels)
.endPodSelector()
.addNewIngress()
.addToPorts(ports.toArray(new NetworkPolicyPort[0]))
.endIngress()
.withPolicyTypes("Ingress")
.endSpec()
.build());
return policy;
}
@ -121,18 +126,18 @@ public NetworkPolicy createNetworkPolicy(String project, String name, Map<String
/**
* Links pull secret to service account
* @param project project where this operation happens
* @param sa service account name
* @param account service account name
* @param secret secret name
* @return {@link} Service account object to which this secret was linked
*/
public ServiceAccount linkPullSecret(String project, String sa, String secret) {
ServiceAccount serviceAccount = client.serviceAccounts().inNamespace(project).withName(sa).get();
public ServiceAccount linkPullSecret(String project, String account, String secret) {
ServiceAccount serviceAccount = client.serviceAccounts().inNamespace(project).withName(account).get();
boolean linked = serviceAccount.getImagePullSecrets().stream().anyMatch(r -> r.getName().equals(secret));
if (!linked) {
return client.serviceAccounts().inNamespace(project).withName(sa).edit()
return client.serviceAccounts().inNamespace(project).withName(account).edit(sa -> new ServiceAccountBuilder(sa)
.addNewImagePullSecret().withName(secret).endImagePullSecret()
.addNewSecret().withName(secret).endSecret()
.done();
.build());
}
return serviceAccount;
}

View File

@ -13,9 +13,9 @@
import io.fabric8.kubernetes.api.model.apps.Deployment;
import io.fabric8.kubernetes.api.model.apps.DeploymentCondition;
import io.fabric8.kubernetes.api.model.apps.DeploymentStatus;
import io.fabric8.kubernetes.client.CustomResource;
import io.fabric8.openshift.api.model.DeploymentConfig;
import io.fabric8.openshift.api.model.DeploymentConfigStatus;
import io.strimzi.api.kafka.model.status.HasStatus;
import io.strimzi.api.kafka.model.status.Status;
/**
@ -32,7 +32,7 @@ public class WaitConditions {
* @param <T> resource type
* @return true if resource is ready
*/
public static <T extends Status> boolean kafkaReadyCondition(HasStatus<T> resource) {
public static <T extends Status> boolean kafkaReadyCondition(CustomResource<?, T> resource) {
T status = resource.getStatus();
if (status == null) {
return false;

View File

@ -27,16 +27,15 @@
import io.debezium.testing.openshift.tools.WaitConditions;
import io.fabric8.kubernetes.api.model.IntOrString;
import io.fabric8.kubernetes.api.model.Service;
import io.fabric8.kubernetes.api.model.networking.NetworkPolicy;
import io.fabric8.kubernetes.api.model.networking.NetworkPolicyPort;
import io.fabric8.kubernetes.api.model.networking.NetworkPolicyPortBuilder;
import io.fabric8.kubernetes.api.model.networking.v1.NetworkPolicy;
import io.fabric8.kubernetes.api.model.networking.v1.NetworkPolicyPort;
import io.fabric8.kubernetes.api.model.networking.v1.NetworkPolicyPortBuilder;
import io.fabric8.kubernetes.client.dsl.NonNamespaceOperation;
import io.fabric8.kubernetes.client.dsl.Resource;
import io.fabric8.openshift.api.model.Route;
import io.fabric8.openshift.client.OpenShiftClient;
import io.strimzi.api.kafka.Crds;
import io.strimzi.api.kafka.KafkaConnectorList;
import io.strimzi.api.kafka.model.DoneableKafkaConnector;
import io.strimzi.api.kafka.model.KafkaConnect;
import io.strimzi.api.kafka.model.KafkaConnector;
@ -86,8 +85,9 @@ public KafkaConnectController(KafkaConnect kafkaConnect, OpenShiftClient ocp, Ok
*/
public void disable() {
LOGGER.info("Disabling KafkaConnect deployment (scaling to ZERO).");
ocp.apps().deployments().inNamespace(project).withName(this.kafkaConnect.getMetadata().getName() + "-connect")
.edit().editSpec().withReplicas(0).endSpec().done();
ocp.apps().deployments().inNamespace(project)
.withName(this.kafkaConnect.getMetadata().getName() + "-connect")
.scale(0);
await()
.atMost(30, SECONDS)
.pollDelay(5, SECONDS)
@ -133,8 +133,7 @@ public NetworkPolicy allowServiceAccess() {
.map(p -> new NetworkPolicyPortBuilder().withProtocol("TCP").withPort(p).build())
.collect(Collectors.toList());
NetworkPolicy policy = ocpUtils.createNetworkPolicy(project, kafkaConnect.getMetadata().getName() + "-allowed", labels, ports);
return policy;
return ocpUtils.createNetworkPolicy(project, kafkaConnect.getMetadata().getName() + "-allowed", labels, ports);
}
/**
@ -232,7 +231,7 @@ public KafkaConnector waitForKafkaConnector(String name) throws InterruptedExcep
return kafkaConnectorOperation().withName(name).waitUntilCondition(WaitConditions::kafkaReadyCondition, 5, MINUTES);
}
private NonNamespaceOperation<KafkaConnector, KafkaConnectorList, DoneableKafkaConnector, Resource<KafkaConnector, DoneableKafkaConnector>> kafkaConnectorOperation() {
private NonNamespaceOperation<KafkaConnector, KafkaConnectorList, Resource<KafkaConnector>> kafkaConnectorOperation() {
return Crds.kafkaConnectorOperation(ocp).inNamespace(project);
}

View File

@ -20,7 +20,6 @@
import io.fabric8.openshift.client.OpenShiftClient;
import io.strimzi.api.kafka.Crds;
import io.strimzi.api.kafka.KafkaTopicList;
import io.strimzi.api.kafka.model.DoneableKafkaTopic;
import io.strimzi.api.kafka.model.Kafka;
import io.strimzi.api.kafka.model.KafkaTopic;
import io.strimzi.api.kafka.model.status.ListenerAddress;
@ -85,7 +84,7 @@ public KafkaTopic waitForKafkaTopic(String name) throws InterruptedException {
return topicOperation().withName(name).waitUntilCondition(WaitConditions::kafkaReadyCondition, 5, MINUTES);
}
private NonNamespaceOperation<KafkaTopic, KafkaTopicList, DoneableKafkaTopic, Resource<KafkaTopic, DoneableKafkaTopic>> topicOperation() {
private NonNamespaceOperation<KafkaTopic, KafkaTopicList, Resource<KafkaTopic>> topicOperation() {
return Crds.topicOperation(ocp).inNamespace(project);
}
}

View File

@ -23,8 +23,6 @@
import io.strimzi.api.kafka.Crds;
import io.strimzi.api.kafka.KafkaConnectList;
import io.strimzi.api.kafka.KafkaList;
import io.strimzi.api.kafka.model.DoneableKafka;
import io.strimzi.api.kafka.model.DoneableKafkaConnect;
import io.strimzi.api.kafka.model.Kafka;
import io.strimzi.api.kafka.model.KafkaConnect;
import io.strimzi.api.kafka.model.KafkaConnectBuilder;
@ -120,11 +118,11 @@ public Secret deployPullSecret(String yamlPath) {
return ocp.secrets().inNamespace(project).createOrReplace(YAML.from(yamlPath, Secret.class));
}
private NonNamespaceOperation<Kafka, KafkaList, DoneableKafka, Resource<Kafka, DoneableKafka>> kafkaOperation() {
private NonNamespaceOperation<Kafka, KafkaList, Resource<Kafka>> kafkaOperation() {
return Crds.kafkaOperation(ocp).inNamespace(project);
}
private NonNamespaceOperation<KafkaConnect, KafkaConnectList, DoneableKafkaConnect, Resource<KafkaConnect, DoneableKafkaConnect>> kafkaConnectOperation() {
private NonNamespaceOperation<KafkaConnect, KafkaConnectList, Resource<KafkaConnect>> kafkaConnectOperation() {
return Crds.kafkaConnectOperation(ocp).inNamespace(project);
}

View File

@ -12,11 +12,11 @@
import io.apicurio.registry.operator.api.model.ApicurioRegistry;
import io.apicurio.registry.operator.api.model.ApicurioRegistryList;
import io.apicurio.registry.operator.api.model.DoneableApicurioRegistry;
import io.debezium.testing.openshift.tools.ConfigProperties;
import io.debezium.testing.openshift.tools.OpenShiftUtils;
import io.fabric8.kubernetes.api.model.Service;
import io.fabric8.kubernetes.api.model.apiextensions.CustomResourceDefinition;
import io.fabric8.kubernetes.api.model.apiextensions.v1.CustomResourceDefinition;
import io.fabric8.kubernetes.client.dsl.base.CustomResourceDefinitionContext;
import io.fabric8.openshift.api.model.Route;
import io.fabric8.openshift.client.OpenShiftClient;
@ -84,7 +84,9 @@ private Service getRegistryService() {
* @return true if the CR was found and deleted
*/
public boolean undeployRegistry() {
CustomResourceDefinition crd = ocp.customResourceDefinitions().load(RegistryDeployer.class.getResourceAsStream("/apicur.io_apicurioregistries_crd.yaml")).get();
return ocp.customResources(crd, ApicurioRegistry.class, ApicurioRegistryList.class, DoneableApicurioRegistry.class).inNamespace(project).delete(registry);
CustomResourceDefinition crd = ocp.apiextensions().v1().customResourceDefinitions()
.load(RegistryDeployer.class.getResourceAsStream("/apicur.io_apicurioregistries_crd.yaml")).get();
CustomResourceDefinitionContext context = CustomResourceDefinitionContext.fromCrd(crd);
return ocp.customResources(context, ApicurioRegistry.class, ApicurioRegistryList.class).inNamespace(project).delete(registry);
}
}

View File

@ -18,17 +18,16 @@
import io.apicurio.registry.operator.api.model.ApicurioRegistry;
import io.apicurio.registry.operator.api.model.ApicurioRegistryList;
import io.apicurio.registry.operator.api.model.DoneableApicurioRegistry;
import io.debezium.testing.openshift.tools.OpenShiftUtils;
import io.debezium.testing.openshift.tools.OperatorController;
import io.debezium.testing.openshift.tools.WaitConditions;
import io.debezium.testing.openshift.tools.YAML;
import io.debezium.testing.openshift.tools.kafka.KafkaController;
import io.fabric8.kubernetes.api.model.apiextensions.CustomResourceDefinition;
import io.fabric8.kubernetes.api.model.apiextensions.v1.CustomResourceDefinition;
import io.fabric8.kubernetes.api.model.apps.Deployment;
import io.fabric8.kubernetes.client.dsl.NonNamespaceOperation;
import io.fabric8.kubernetes.client.dsl.Resource;
import io.fabric8.kubernetes.internal.KubernetesDeserializer;
import io.fabric8.kubernetes.client.dsl.base.CustomResourceDefinitionContext;
import io.fabric8.openshift.api.model.DeploymentConfig;
import io.fabric8.openshift.client.OpenShiftClient;
@ -82,11 +81,12 @@ public RegistryController deployRegistry(String yamlPath, String storageTopicYam
return new RegistryController(registry, ocp, http);
}
public NonNamespaceOperation<ApicurioRegistry, ApicurioRegistryList, DoneableApicurioRegistry, Resource<ApicurioRegistry, DoneableApicurioRegistry>> registryOperation() {
CustomResourceDefinition crd = ocp.customResourceDefinitions().load(RegistryDeployer.class.getResourceAsStream("/crds/apicur.io_apicurioregistries_crd.yaml"))
public NonNamespaceOperation<ApicurioRegistry, ApicurioRegistryList, Resource<ApicurioRegistry>> registryOperation() {
CustomResourceDefinition crd = ocp.apiextensions().v1().customResourceDefinitions()
.load(RegistryDeployer.class.getResourceAsStream("/crds/apicur.io_apicurioregistries_crd.yaml"))
.get();
KubernetesDeserializer.registerCustomKind("apicur.io/v1alpha1", "ApicurioRegistry", ApicurioRegistry.class);
return ocp.customResources(crd, ApicurioRegistry.class, ApicurioRegistryList.class, DoneableApicurioRegistry.class).inNamespace(project);
CustomResourceDefinitionContext context = CustomResourceDefinitionContext.fromCrd(crd);
return ocp.customResources(context, ApicurioRegistry.class, ApicurioRegistryList.class).inNamespace(project);
}
public ApicurioRegistry waitForRegistry(String name) throws InterruptedException {