Skip to content

Commit

Permalink
deprecated auth fields were removed & Mtls cerctificate creation was …
Browse files Browse the repository at this point in the history
…implemented
  • Loading branch information
tengu-alt committed Aug 21, 2023
1 parent 4f54849 commit 5cab976
Show file tree
Hide file tree
Showing 15 changed files with 724 additions and 79 deletions.
84 changes: 38 additions & 46 deletions apis/clusters/v1beta1/kafka_types.go
Original file line number Diff line number Diff line change
Expand Up @@ -79,14 +79,12 @@ type KafkaSpec struct {

// Provision additional dedicated nodes for Apache Zookeeper to run on.
// Zookeeper nodes will be co-located with Kafka if this is not provided
DedicatedZookeeper []*DedicatedZookeeper `json:"dedicatedZookeeper,omitempty"`
ClientBrokerAuthWithMTLS bool `json:"clientBrokerAuthWithMtls,omitempty"`
ClientAuthBrokerWithoutEncryption bool `json:"clientAuthBrokerWithoutEncryption,omitempty"`
ClientAuthBrokerWithEncryption bool `json:"clientAuthBrokerWithEncryption,omitempty"`
KarapaceRestProxy []*KarapaceRestProxy `json:"karapaceRestProxy,omitempty"`
KarapaceSchemaRegistry []*KarapaceSchemaRegistry `json:"karapaceSchemaRegistry,omitempty"`
BundledUseOnly bool `json:"bundledUseOnly,omitempty"`
UserRefs []*UserReference `json:"userRefs,omitempty"`
DedicatedZookeeper []*DedicatedZookeeper `json:"dedicatedZookeeper,omitempty"`
ClientBrokerAuthWithMTLS bool `json:"clientBrokerAuthWithMtls,omitempty"`
KarapaceRestProxy []*KarapaceRestProxy `json:"karapaceRestProxy,omitempty"`
KarapaceSchemaRegistry []*KarapaceSchemaRegistry `json:"karapaceSchemaRegistry,omitempty"`
BundledUseOnly bool `json:"bundledUseOnly,omitempty"`
UserRefs []*UserReference `json:"userRefs,omitempty"`
}

type KafkaDataCentre struct {
Expand Down Expand Up @@ -140,27 +138,25 @@ func (k *Kafka) NewPatch() client.Patch {

func (k *KafkaSpec) ToInstAPI() *models.KafkaCluster {
return &models.KafkaCluster{
SchemaRegistry: k.schemaRegistryToInstAPI(),
RestProxy: k.restProxyToInstAPI(),
PCIComplianceMode: k.PCICompliance,
DefaultReplicationFactor: k.ReplicationFactor,
DefaultNumberOfPartitions: k.PartitionsNumber,
TwoFactorDelete: k.TwoFactorDeletesToInstAPI(),
AllowDeleteTopics: k.AllowDeleteTopics,
AutoCreateTopics: k.AutoCreateTopics,
ClientToClusterEncryption: k.ClientToClusterEncryption,
DedicatedZookeeper: k.dedicatedZookeeperToInstAPI(),
PrivateNetworkCluster: k.PrivateNetworkCluster,
Name: k.Name,
SLATier: k.SLATier,
KafkaVersion: k.Version,
DataCentres: k.dcToInstAPI(),
ClientBrokerAuthWithMtls: k.ClientBrokerAuthWithMTLS,
ClientAuthBrokerWithoutEncryption: k.ClientAuthBrokerWithoutEncryption,
ClientAuthBrokerWithEncryption: k.ClientAuthBrokerWithEncryption,
BundledUseOnly: k.BundledUseOnly,
KarapaceRestProxy: k.karapaceRestProxyToInstAPI(),
KarapaceSchemaRegistry: k.karapaceSchemaRegistryToInstAPI(),
SchemaRegistry: k.schemaRegistryToInstAPI(),
RestProxy: k.restProxyToInstAPI(),
PCIComplianceMode: k.PCICompliance,
DefaultReplicationFactor: k.ReplicationFactor,
DefaultNumberOfPartitions: k.PartitionsNumber,
TwoFactorDelete: k.TwoFactorDeletesToInstAPI(),
AllowDeleteTopics: k.AllowDeleteTopics,
AutoCreateTopics: k.AutoCreateTopics,
ClientToClusterEncryption: k.ClientToClusterEncryption,
DedicatedZookeeper: k.dedicatedZookeeperToInstAPI(),
PrivateNetworkCluster: k.PrivateNetworkCluster,
Name: k.Name,
SLATier: k.SLATier,
KafkaVersion: k.Version,
DataCentres: k.dcToInstAPI(),
ClientBrokerAuthWithMtls: k.ClientBrokerAuthWithMTLS,
BundledUseOnly: k.BundledUseOnly,
KarapaceRestProxy: k.karapaceRestProxyToInstAPI(),
KarapaceSchemaRegistry: k.karapaceSchemaRegistryToInstAPI(),
}
}

Expand Down Expand Up @@ -283,21 +279,19 @@ func (ks *KafkaSpec) FromInstAPI(iKafka *models.KafkaCluster) KafkaSpec {
SLATier: iKafka.SLATier,
TwoFactorDelete: ks.Cluster.TwoFactorDeleteFromInstAPI(iKafka.TwoFactorDelete),
},
SchemaRegistry: ks.SchemaRegistryFromInstAPI(iKafka.SchemaRegistry),
ReplicationFactor: iKafka.DefaultReplicationFactor,
PartitionsNumber: iKafka.DefaultNumberOfPartitions,
RestProxy: ks.RestProxyFromInstAPI(iKafka.RestProxy),
AllowDeleteTopics: iKafka.AllowDeleteTopics,
AutoCreateTopics: iKafka.AutoCreateTopics,
ClientToClusterEncryption: iKafka.ClientToClusterEncryption,
DataCentres: ks.DCsFromInstAPI(iKafka.DataCentres),
DedicatedZookeeper: ks.DedicatedZookeeperFromInstAPI(iKafka.DedicatedZookeeper),
ClientBrokerAuthWithMTLS: iKafka.ClientBrokerAuthWithMtls,
ClientAuthBrokerWithoutEncryption: iKafka.ClientAuthBrokerWithoutEncryption,
ClientAuthBrokerWithEncryption: iKafka.ClientAuthBrokerWithEncryption,
KarapaceRestProxy: ks.KarapaceRestProxyFromInstAPI(iKafka.KarapaceRestProxy),
KarapaceSchemaRegistry: ks.KarapaceSchemaRegistryFromInstAPI(iKafka.KarapaceSchemaRegistry),
BundledUseOnly: iKafka.BundledUseOnly,
SchemaRegistry: ks.SchemaRegistryFromInstAPI(iKafka.SchemaRegistry),
ReplicationFactor: iKafka.DefaultReplicationFactor,
PartitionsNumber: iKafka.DefaultNumberOfPartitions,
RestProxy: ks.RestProxyFromInstAPI(iKafka.RestProxy),
AllowDeleteTopics: iKafka.AllowDeleteTopics,
AutoCreateTopics: iKafka.AutoCreateTopics,
ClientToClusterEncryption: iKafka.ClientToClusterEncryption,
DataCentres: ks.DCsFromInstAPI(iKafka.DataCentres),
DedicatedZookeeper: ks.DedicatedZookeeperFromInstAPI(iKafka.DedicatedZookeeper),
ClientBrokerAuthWithMTLS: iKafka.ClientBrokerAuthWithMtls,
KarapaceRestProxy: ks.KarapaceRestProxyFromInstAPI(iKafka.KarapaceRestProxy),
KarapaceSchemaRegistry: ks.KarapaceSchemaRegistryFromInstAPI(iKafka.KarapaceSchemaRegistry),
BundledUseOnly: iKafka.BundledUseOnly,
}
}

Expand Down Expand Up @@ -399,8 +393,6 @@ func (a *KafkaSpec) IsEqual(b KafkaSpec) bool {
a.AutoCreateTopics == b.AutoCreateTopics &&
a.ClientToClusterEncryption == b.ClientToClusterEncryption &&
a.ClientBrokerAuthWithMTLS == b.ClientBrokerAuthWithMTLS &&
a.ClientAuthBrokerWithoutEncryption == b.ClientAuthBrokerWithoutEncryption &&
a.ClientAuthBrokerWithEncryption == b.ClientAuthBrokerWithEncryption &&
a.BundledUseOnly == b.BundledUseOnly &&
isKafkaAddonsEqual[SchemaRegistry](a.SchemaRegistry, b.SchemaRegistry) &&
isKafkaAddonsEqual[RestProxy](a.RestProxy, b.RestProxy) &&
Expand Down
36 changes: 16 additions & 20 deletions apis/clusters/v1beta1/kafka_webhook.go
Original file line number Diff line number Diff line change
Expand Up @@ -318,31 +318,27 @@ type immutableKafkaFields struct {
}

type specificKafkaFields struct {
replicationFactor int
partitionsNumber int
allowDeleteTopics bool
autoCreateTopics bool
clientToClusterEncryption bool
bundledUseOnly bool
privateNetworkCluster bool
clientAuthBrokerWithEncryption bool
clientAuthBrokerWithoutEncryption bool
clientBrokerAuthWithMtls bool
replicationFactor int
partitionsNumber int
allowDeleteTopics bool
autoCreateTopics bool
clientToClusterEncryption bool
bundledUseOnly bool
privateNetworkCluster bool
clientBrokerAuthWithMtls bool
}

func (ks *KafkaSpec) newKafkaImmutableFields() *immutableKafkaFields {
return &immutableKafkaFields{
specificFields: specificKafkaFields{
replicationFactor: ks.ReplicationFactor,
partitionsNumber: ks.PartitionsNumber,
allowDeleteTopics: ks.AllowDeleteTopics,
autoCreateTopics: ks.AutoCreateTopics,
clientToClusterEncryption: ks.ClientToClusterEncryption,
bundledUseOnly: ks.BundledUseOnly,
privateNetworkCluster: ks.PrivateNetworkCluster,
clientAuthBrokerWithEncryption: ks.ClientAuthBrokerWithEncryption,
clientAuthBrokerWithoutEncryption: ks.ClientAuthBrokerWithoutEncryption,
clientBrokerAuthWithMtls: ks.ClientBrokerAuthWithMTLS,
replicationFactor: ks.ReplicationFactor,
partitionsNumber: ks.PartitionsNumber,
allowDeleteTopics: ks.AllowDeleteTopics,
autoCreateTopics: ks.AutoCreateTopics,
clientToClusterEncryption: ks.ClientToClusterEncryption,
bundledUseOnly: ks.BundledUseOnly,
privateNetworkCluster: ks.PrivateNetworkCluster,
clientBrokerAuthWithMtls: ks.ClientBrokerAuthWithMTLS,
},
cluster: ks.Cluster.newImmutableFields(),
}
Expand Down
51 changes: 48 additions & 3 deletions apis/kafkamanagement/v1beta1/kafkauser_types.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ limitations under the License.
package v1beta1

import (
k8sCore "k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"sigs.k8s.io/controller-runtime/pkg/client"

Expand All @@ -26,9 +27,10 @@ import (

// KafkaUserSpec defines the desired state of KafkaUser
type KafkaUserSpec struct {
Options *KafkaUserOptions `json:"options"`
SecretRef *v1beta1.SecretReference `json:"secretRef"`
InitialPermissions string `json:"initialPermissions"`
Options *KafkaUserOptions `json:"options"`
SecretRef *v1beta1.SecretReference `json:"secretRef"`
CertificateRequests []*CertificateRequest `json:"certificateRequests,omitempty"`
InitialPermissions string `json:"initialPermissions"`
}

type KafkaUserOptions struct {
Expand All @@ -41,6 +43,25 @@ type KafkaUserStatus struct {
ClustersEvents map[string]string `json:"clustersEvents,omitempty"`
}

type Certificate struct {
ID string `json:"id,omitempty"`
ExpiryDate string `json:"expiryDate,omitempty"`
SignedCertificate string `json:"signedCertificate,omitempty"`
}

type CertificateRequest struct {
SecretName string `json:"secretName"`
SecretNamespace string `json:"secretNamespace"`
ClusterID string `json:"clusterId"`
CSR string `json:"csr,omitempty"`
ValidPeriod int `json:"validPeriod"`
CommonName string `json:"commonName"`
Country string `json:"country"`
Organization string `json:"organization"`
OrganizationalUnit string `json:"organizationalUnit"`
AutoRenew bool `json:"autoRenew"`
}

//+kubebuilder:object:root=true
//+kubebuilder:subresource:status

Expand Down Expand Up @@ -83,6 +104,21 @@ func (ku *KafkaUser) GetID(clusterID, name string) string {
return clusterID + "_" + name
}

func (ku *KafkaUser) NewCertificateSecret(name, namespace string) *k8sCore.Secret {
return &k8sCore.Secret{
TypeMeta: metav1.TypeMeta{
Kind: models.SecretKind,
APIVersion: models.K8sAPIVersionV1,
},
ObjectMeta: metav1.ObjectMeta{
Name: name,
Namespace: namespace,
},

StringData: map[string]string{},
}
}

func init() {
SchemeBuilder.Register(&KafkaUser{}, &KafkaUserList{})
}
Expand All @@ -104,3 +140,12 @@ func (ko *KafkaUserOptions) ToInstAPI() *models.KafkaUserOptions {
}

}

func (cr *CertificateRequest) ToInstAPI(username string) *models.CertificateRequest {
return &models.CertificateRequest{
ClusterID: cr.ClusterID,
CSR: cr.CSR,
KafkaUsername: username,
ValidPeriod: cr.ValidPeriod,
}
}
41 changes: 41 additions & 0 deletions apis/kafkamanagement/v1beta1/zz_generated.deepcopy.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 0 additions & 4 deletions config/crd/bases/clusters.instaclustr.com_kafkas.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -41,10 +41,6 @@ spec:
type: boolean
bundledUseOnly:
type: boolean
clientAuthBrokerWithEncryption:
type: boolean
clientAuthBrokerWithoutEncryption:
type: boolean
clientBrokerAuthWithMtls:
type: boolean
clientToClusterEncryption:
Expand Down
35 changes: 35 additions & 0 deletions config/crd/bases/kafkamanagement.instaclustr.com_kafkausers.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,41 @@ spec:
spec:
description: KafkaUserSpec defines the desired state of KafkaUser
properties:
certificateRequests:
items:
properties:
autoRenew:
type: boolean
clusterId:
type: string
commonName:
type: string
country:
type: string
csr:
type: string
organization:
type: string
organizationalUnit:
type: string
secretName:
type: string
secretNamespace:
type: string
validPeriod:
type: integer
required:
- autoRenew
- clusterId
- commonName
- country
- organization
- organizationalUnit
- secretName
- secretNamespace
- validPeriod
type: object
type: array
initialPermissions:
type: string
options:
Expand Down
4 changes: 1 addition & 3 deletions config/samples/clusters_v1beta1_kafka.yaml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
apiVersion: clusters.instaclustr.com/v1beta1
kind: Kafka
metadata:
name: kafka-two
name: kafka
spec:
name: "Kafka-example"
version: "3.3.1"
Expand All @@ -14,8 +14,6 @@ spec:
privateNetworkCluster: false
slaTier: "NON_PRODUCTION"
# bundledUseOnly: true
# clientAuthBrokerWithEncryption: true
# clientAuthBrokerWithoutEncryption: true
# clientBrokerAuthWithMtls: true
# dedicatedZookeeper:
# - nodeSize: "KDZ-DEV-t4g.small-30"
Expand Down
21 changes: 20 additions & 1 deletion config/samples/kafkamanagement_v1beta1_kafkauser.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,26 @@ spec:
secretRef:
name: "secret-test"
namespace: "default"
certificateRequests:
# - secretName: "cert-secret-test"
# secretNamespace: "default"
# clusterId: "9f422a96-5e53-4e09-b789-efc852fd9d4a"
# commonName: "Sanch-two"
# country: "VN"
# organization: "Instaclustr"
# organizationalUnit: "IT"
# validPeriod: 6
# autoRenew: true
# - secretName: "cert-secret-test-three"
# secretNamespace: "default"
# clusterId: "9f422a96-5e53-4e09-b789-efc852fd9d4a"
# commonName: "Sanch-two"
# country: "VN"
# organization: "Instaclustr"
# organizationalUnit: "IT"
# validPeriod: 6
# autoRenew: false
initialPermissions: "standard"
options:
overrideExistingUser: true
saslScramMechanism: "SCRAM-SHA-256"
saslScramMechanism: "SCRAM-SHA-256"
Loading

0 comments on commit 5cab976

Please sign in to comment.