Skip to content

Commit

Permalink
deprecated auth fields were removed & Mtls cerctificate creation was …
Browse files Browse the repository at this point in the history
…implemented
  • Loading branch information
tengu-alt committed Aug 11, 2023
1 parent 4f54849 commit ec15027
Show file tree
Hide file tree
Showing 14 changed files with 564 additions and 81 deletions.
85 changes: 39 additions & 46 deletions apis/clusters/v1beta1/kafka_types.go
Original file line number Diff line number Diff line change
Expand Up @@ -79,14 +79,12 @@ type KafkaSpec struct {

// Provision additional dedicated nodes for Apache Zookeeper to run on.
// Zookeeper nodes will be co-located with Kafka if this is not provided
DedicatedZookeeper []*DedicatedZookeeper `json:"dedicatedZookeeper,omitempty"`
ClientBrokerAuthWithMTLS bool `json:"clientBrokerAuthWithMtls,omitempty"`
ClientAuthBrokerWithoutEncryption bool `json:"clientAuthBrokerWithoutEncryption,omitempty"`
ClientAuthBrokerWithEncryption bool `json:"clientAuthBrokerWithEncryption,omitempty"`
KarapaceRestProxy []*KarapaceRestProxy `json:"karapaceRestProxy,omitempty"`
KarapaceSchemaRegistry []*KarapaceSchemaRegistry `json:"karapaceSchemaRegistry,omitempty"`
BundledUseOnly bool `json:"bundledUseOnly,omitempty"`
UserRefs []*UserReference `json:"userRefs,omitempty"`
DedicatedZookeeper []*DedicatedZookeeper `json:"dedicatedZookeeper,omitempty"`
ClientBrokerAuthWithMTLS bool `json:"clientBrokerAuthWithMtls,omitempty"`
KarapaceRestProxy []*KarapaceRestProxy `json:"karapaceRestProxy,omitempty"`
KarapaceSchemaRegistry []*KarapaceSchemaRegistry `json:"karapaceSchemaRegistry,omitempty"`
BundledUseOnly bool `json:"bundledUseOnly,omitempty"`
UserRefs []*UserReference `json:"userRefs,omitempty"`
}

type KafkaDataCentre struct {
Expand Down Expand Up @@ -140,27 +138,25 @@ func (k *Kafka) NewPatch() client.Patch {

func (k *KafkaSpec) ToInstAPI() *models.KafkaCluster {
return &models.KafkaCluster{
SchemaRegistry: k.schemaRegistryToInstAPI(),
RestProxy: k.restProxyToInstAPI(),
PCIComplianceMode: k.PCICompliance,
DefaultReplicationFactor: k.ReplicationFactor,
DefaultNumberOfPartitions: k.PartitionsNumber,
TwoFactorDelete: k.TwoFactorDeletesToInstAPI(),
AllowDeleteTopics: k.AllowDeleteTopics,
AutoCreateTopics: k.AutoCreateTopics,
ClientToClusterEncryption: k.ClientToClusterEncryption,
DedicatedZookeeper: k.dedicatedZookeeperToInstAPI(),
PrivateNetworkCluster: k.PrivateNetworkCluster,
Name: k.Name,
SLATier: k.SLATier,
KafkaVersion: k.Version,
DataCentres: k.dcToInstAPI(),
ClientBrokerAuthWithMtls: k.ClientBrokerAuthWithMTLS,
ClientAuthBrokerWithoutEncryption: k.ClientAuthBrokerWithoutEncryption,
ClientAuthBrokerWithEncryption: k.ClientAuthBrokerWithEncryption,
BundledUseOnly: k.BundledUseOnly,
KarapaceRestProxy: k.karapaceRestProxyToInstAPI(),
KarapaceSchemaRegistry: k.karapaceSchemaRegistryToInstAPI(),
SchemaRegistry: k.schemaRegistryToInstAPI(),
RestProxy: k.restProxyToInstAPI(),
PCIComplianceMode: k.PCICompliance,
DefaultReplicationFactor: k.ReplicationFactor,
DefaultNumberOfPartitions: k.PartitionsNumber,
TwoFactorDelete: k.TwoFactorDeletesToInstAPI(),
AllowDeleteTopics: k.AllowDeleteTopics,
AutoCreateTopics: k.AutoCreateTopics,
ClientToClusterEncryption: k.ClientToClusterEncryption,
DedicatedZookeeper: k.dedicatedZookeeperToInstAPI(),
PrivateNetworkCluster: k.PrivateNetworkCluster,
Name: k.Name,
SLATier: k.SLATier,
KafkaVersion: k.Version,
DataCentres: k.dcToInstAPI(),
ClientBrokerAuthWithMtls: k.ClientBrokerAuthWithMTLS,
BundledUseOnly: k.BundledUseOnly,
KarapaceRestProxy: k.karapaceRestProxyToInstAPI(),
KarapaceSchemaRegistry: k.karapaceSchemaRegistryToInstAPI(),
}
}

Expand Down Expand Up @@ -283,21 +279,20 @@ func (ks *KafkaSpec) FromInstAPI(iKafka *models.KafkaCluster) KafkaSpec {
SLATier: iKafka.SLATier,
TwoFactorDelete: ks.Cluster.TwoFactorDeleteFromInstAPI(iKafka.TwoFactorDelete),
},
SchemaRegistry: ks.SchemaRegistryFromInstAPI(iKafka.SchemaRegistry),
ReplicationFactor: iKafka.DefaultReplicationFactor,
PartitionsNumber: iKafka.DefaultNumberOfPartitions,
RestProxy: ks.RestProxyFromInstAPI(iKafka.RestProxy),
AllowDeleteTopics: iKafka.AllowDeleteTopics,
AutoCreateTopics: iKafka.AutoCreateTopics,
ClientToClusterEncryption: iKafka.ClientToClusterEncryption,
DataCentres: ks.DCsFromInstAPI(iKafka.DataCentres),
DedicatedZookeeper: ks.DedicatedZookeeperFromInstAPI(iKafka.DedicatedZookeeper),
ClientBrokerAuthWithMTLS: iKafka.ClientBrokerAuthWithMtls,
ClientAuthBrokerWithoutEncryption: iKafka.ClientAuthBrokerWithoutEncryption,
ClientAuthBrokerWithEncryption: iKafka.ClientAuthBrokerWithEncryption,
KarapaceRestProxy: ks.KarapaceRestProxyFromInstAPI(iKafka.KarapaceRestProxy),
KarapaceSchemaRegistry: ks.KarapaceSchemaRegistryFromInstAPI(iKafka.KarapaceSchemaRegistry),
BundledUseOnly: iKafka.BundledUseOnly,
SchemaRegistry: ks.SchemaRegistryFromInstAPI(iKafka.SchemaRegistry),
ReplicationFactor: iKafka.DefaultReplicationFactor,
PartitionsNumber: iKafka.DefaultNumberOfPartitions,
RestProxy: ks.RestProxyFromInstAPI(iKafka.RestProxy),
AllowDeleteTopics: iKafka.AllowDeleteTopics,
AutoCreateTopics: iKafka.AutoCreateTopics,
ClientToClusterEncryption: iKafka.ClientToClusterEncryption,
DataCentres: ks.DCsFromInstAPI(iKafka.DataCentres),
DedicatedZookeeper: ks.DedicatedZookeeperFromInstAPI(iKafka.DedicatedZookeeper),
ClientBrokerAuthWithMTLS: iKafka.ClientBrokerAuthWithMtls,

KarapaceRestProxy: ks.KarapaceRestProxyFromInstAPI(iKafka.KarapaceRestProxy),
KarapaceSchemaRegistry: ks.KarapaceSchemaRegistryFromInstAPI(iKafka.KarapaceSchemaRegistry),
BundledUseOnly: iKafka.BundledUseOnly,
}
}

Expand Down Expand Up @@ -399,8 +394,6 @@ func (a *KafkaSpec) IsEqual(b KafkaSpec) bool {
a.AutoCreateTopics == b.AutoCreateTopics &&
a.ClientToClusterEncryption == b.ClientToClusterEncryption &&
a.ClientBrokerAuthWithMTLS == b.ClientBrokerAuthWithMTLS &&
a.ClientAuthBrokerWithoutEncryption == b.ClientAuthBrokerWithoutEncryption &&
a.ClientAuthBrokerWithEncryption == b.ClientAuthBrokerWithEncryption &&
a.BundledUseOnly == b.BundledUseOnly &&
isKafkaAddonsEqual[SchemaRegistry](a.SchemaRegistry, b.SchemaRegistry) &&
isKafkaAddonsEqual[RestProxy](a.RestProxy, b.RestProxy) &&
Expand Down
36 changes: 16 additions & 20 deletions apis/clusters/v1beta1/kafka_webhook.go
Original file line number Diff line number Diff line change
Expand Up @@ -318,31 +318,27 @@ type immutableKafkaFields struct {
}

type specificKafkaFields struct {
replicationFactor int
partitionsNumber int
allowDeleteTopics bool
autoCreateTopics bool
clientToClusterEncryption bool
bundledUseOnly bool
privateNetworkCluster bool
clientAuthBrokerWithEncryption bool
clientAuthBrokerWithoutEncryption bool
clientBrokerAuthWithMtls bool
replicationFactor int
partitionsNumber int
allowDeleteTopics bool
autoCreateTopics bool
clientToClusterEncryption bool
bundledUseOnly bool
privateNetworkCluster bool
clientBrokerAuthWithMtls bool
}

func (ks *KafkaSpec) newKafkaImmutableFields() *immutableKafkaFields {
return &immutableKafkaFields{
specificFields: specificKafkaFields{
replicationFactor: ks.ReplicationFactor,
partitionsNumber: ks.PartitionsNumber,
allowDeleteTopics: ks.AllowDeleteTopics,
autoCreateTopics: ks.AutoCreateTopics,
clientToClusterEncryption: ks.ClientToClusterEncryption,
bundledUseOnly: ks.BundledUseOnly,
privateNetworkCluster: ks.PrivateNetworkCluster,
clientAuthBrokerWithEncryption: ks.ClientAuthBrokerWithEncryption,
clientAuthBrokerWithoutEncryption: ks.ClientAuthBrokerWithoutEncryption,
clientBrokerAuthWithMtls: ks.ClientBrokerAuthWithMTLS,
replicationFactor: ks.ReplicationFactor,
partitionsNumber: ks.PartitionsNumber,
allowDeleteTopics: ks.AllowDeleteTopics,
autoCreateTopics: ks.AutoCreateTopics,
clientToClusterEncryption: ks.ClientToClusterEncryption,
bundledUseOnly: ks.BundledUseOnly,
privateNetworkCluster: ks.PrivateNetworkCluster,
clientBrokerAuthWithMtls: ks.ClientBrokerAuthWithMTLS,
},
cluster: ks.Cluster.newImmutableFields(),
}
Expand Down
41 changes: 38 additions & 3 deletions apis/kafkamanagement/v1beta1/kafkauser_types.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ limitations under the License.
package v1beta1

import (
k8sCore "k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"sigs.k8s.io/controller-runtime/pkg/client"

Expand All @@ -26,9 +27,10 @@ import (

// KafkaUserSpec defines the desired state of KafkaUser
type KafkaUserSpec struct {
Options *KafkaUserOptions `json:"options"`
SecretRef *v1beta1.SecretReference `json:"secretRef"`
InitialPermissions string `json:"initialPermissions"`
Options *KafkaUserOptions `json:"options"`
SecretRef *v1beta1.SecretReference `json:"secretRef"`
CertificateCreationData []*CertificateCreationData `json:"certificateCreationData,omitempty"`
InitialPermissions string `json:"initialPermissions"`
}

type KafkaUserOptions struct {
Expand All @@ -41,6 +43,24 @@ type KafkaUserStatus struct {
ClustersEvents map[string]string `json:"clustersEvents,omitempty"`
}

type CertificateData struct {
ID string `json:"id,omitempty"`
ExpiryDate string `json:"expiryDate,omitempty"`
SignedCertificate string `json:"signedCertificate,omitempty"`
}

type CertificateCreationData struct {
SecretName string `json:"secretName"`
SecretNamespace string `json:"secretNamespace"`
ClusterID string `json:"clusterId,omitempty"`
CSR string `json:"csr,omitempty"`
ValidPeriod int `json:"validPeriod,omitempty"`
CommonName string `json:"commonName,omitempty"`
Country string `json:"country,omitempty"`
Organization string `json:"organization,omitempty"`
OrganizationalUnit string `json:"organizationalUnit,omitempty"`
}

//+kubebuilder:object:root=true
//+kubebuilder:subresource:status

Expand Down Expand Up @@ -83,6 +103,21 @@ func (ku *KafkaUser) GetID(clusterID, name string) string {
return clusterID + "_" + name
}

func (ku *KafkaUser) NewCertificateSecret(name, namespace string) *k8sCore.Secret {
return &k8sCore.Secret{
TypeMeta: metav1.TypeMeta{
Kind: models.SecretKind,
APIVersion: models.K8sAPIVersionV1,
},
ObjectMeta: metav1.ObjectMeta{
Name: name,
Namespace: namespace,
},

StringData: map[string]string{},
}
}

func init() {
SchemeBuilder.Register(&KafkaUser{}, &KafkaUserList{})
}
Expand Down
41 changes: 41 additions & 0 deletions apis/kafkamanagement/v1beta1/zz_generated.deepcopy.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 0 additions & 4 deletions config/crd/bases/clusters.instaclustr.com_kafkas.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -41,10 +41,6 @@ spec:
type: boolean
bundledUseOnly:
type: boolean
clientAuthBrokerWithEncryption:
type: boolean
clientAuthBrokerWithoutEncryption:
type: boolean
clientBrokerAuthWithMtls:
type: boolean
clientToClusterEncryption:
Expand Down
26 changes: 26 additions & 0 deletions config/crd/bases/kafkamanagement.instaclustr.com_kafkausers.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,32 @@ spec:
spec:
description: KafkaUserSpec defines the desired state of KafkaUser
properties:
certificateCreationData:
items:
properties:
clusterId:
type: string
commonName:
type: string
country:
type: string
csr:
type: string
organization:
type: string
organizationalUnit:
type: string
secretName:
type: string
secretNamespace:
type: string
validPeriod:
type: integer
required:
- secretName
- secretNamespace
type: object
type: array
initialPermissions:
type: string
options:
Expand Down
8 changes: 3 additions & 5 deletions config/samples/clusters_v1beta1_kafka.yaml
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
apiVersion: clusters.instaclustr.com/v1beta1
kind: Kafka
metadata:
name: kafka-two
name: kafka
spec:
name: "Kafka-example"
name: "oleksandr-Kafka"
version: "3.3.1"
pciCompliance: false
replicationFactor: 3
Expand All @@ -14,9 +14,7 @@ spec:
privateNetworkCluster: false
slaTier: "NON_PRODUCTION"
# bundledUseOnly: true
# clientAuthBrokerWithEncryption: true
# clientAuthBrokerWithoutEncryption: true
# clientBrokerAuthWithMtls: true
clientBrokerAuthWithMtls: true
# dedicatedZookeeper:
# - nodeSize: "KDZ-DEV-t4g.small-30"
# nodesNumber: 3
Expand Down
11 changes: 10 additions & 1 deletion config/samples/kafkamanagement_v1beta1_kafkauser.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,16 @@ spec:
secretRef:
name: "secret-test"
namespace: "default"
# certificateCreationData:
# - secretName: "cert-secret-teste"
# secretNamespace: "default"
# clusterId: "5d89fce4-951c-4e19-a76b-b28538b3c4a3"
# commonName: "Sanch-two"
# country: "VN"
# organization: "Instaclustr"
# organizationalUnit: "IT"
# validPeriod: 6
initialPermissions: "standard"
options:
overrideExistingUser: true
saslScramMechanism: "SCRAM-SHA-256"
saslScramMechanism: "SCRAM-SHA-256"
Loading

0 comments on commit ec15027

Please sign in to comment.