diff --git a/_topic_maps/_topic_map.yml b/_topic_maps/_topic_map.yml index d7b626aba4..93ec5ae018 100644 --- a/_topic_maps/_topic_map.yml +++ b/_topic_maps/_topic_map.yml @@ -3845,7 +3845,7 @@ Topics: File: installing-knative-serving - Name: Installing Knative Eventing File: installing-knative-eventing - - Name: Configuring Knative Kafka + - Name: Configuring Knative for Apache Kafka File: serverless-kafka-admin - Name: Configuring Serverless Functions File: configuring-serverless-functions @@ -3975,7 +3975,7 @@ Topics: File: serverless-apiserversource - Name: Creating a ping source File: serverless-pingsource - - Name: Kafka source + - Name: Source for Apache Kafka File: serverless-kafka-developer-source - Name: Custom event sources File: serverless-custom-event-sources @@ -3988,7 +3988,7 @@ Topics: File: serverless-event-sinks - Name: Creating event sinks File: serverless-creating-sinks - - Name: Kafka sink + - Name: Sink for Apache Kafka File: serverless-kafka-developer-sink - Name: Brokers Dir: brokers @@ -4003,7 +4003,7 @@ Topics: File: serverless-broker-backing-channel-default - Name: Configuring the default broker class File: serverless-global-config-broker-class-default - - Name: Kafka broker + - Name: Knative broker for Apache Kafka File: kafka-broker - Name: Managing brokers File: serverless-using-brokers-managing-brokers @@ -4039,7 +4039,7 @@ Topics: File: connecting-channels-sinks - Name: Default channel implementation File: serverless-channel-default - - Name: Security configuration for Knative Kafka channels + - Name: Security configuration for channels File: serverless-kafka-admin-security-channels - Name: Subscriptions Dir: subscriptions diff --git a/_topic_maps/_topic_map_osd.yml b/_topic_maps/_topic_map_osd.yml index 8cda03b7e5..6bda1e3db8 100644 --- a/_topic_maps/_topic_map_osd.yml +++ b/_topic_maps/_topic_map_osd.yml @@ -399,7 +399,7 @@ Topics: File: installing-knative-serving - Name: Installing Knative Eventing File: installing-knative-eventing - - Name: Configuring Knative Kafka + - Name: Configuring Knative for Apache Kafka File: serverless-kafka-admin - Name: Configuring Serverless Functions File: configuring-serverless-functions @@ -529,7 +529,7 @@ Topics: File: serverless-apiserversource - Name: Creating a ping source File: serverless-pingsource - - Name: Kafka source + - Name: Source for Apache Kafka File: serverless-kafka-developer-source - Name: Custom event sources File: serverless-custom-event-sources @@ -542,7 +542,7 @@ Topics: File: serverless-event-sinks - Name: Creating event sinks File: serverless-creating-sinks - - Name: Kafka sink + - Name: Sink for Apache Kafka File: serverless-kafka-developer-sink - Name: Brokers Dir: brokers @@ -557,7 +557,7 @@ Topics: File: serverless-broker-backing-channel-default - Name: Configuring the default broker class File: serverless-global-config-broker-class-default - - Name: Kafka broker + - Name: Knative broker for Apache Kafka File: kafka-broker - Name: Managing brokers File: serverless-using-brokers-managing-brokers @@ -593,7 +593,7 @@ Topics: File: connecting-channels-sinks - Name: Default channel implementation File: serverless-channel-default - - Name: Security configuration for Knative Kafka channels + - Name: Security configuration for channels File: serverless-kafka-admin-security-channels - Name: Subscriptions Dir: subscriptions diff --git a/_topic_maps/_topic_map_rosa.yml b/_topic_maps/_topic_map_rosa.yml index 784b1c8674..396eb2b811 100644 --- a/_topic_maps/_topic_map_rosa.yml +++ b/_topic_maps/_topic_map_rosa.yml @@ -594,7 +594,7 @@ Topics: File: installing-knative-serving - Name: Installing Knative Eventing File: installing-knative-eventing - - Name: Configuring Knative Kafka + - Name: Configuring Knative for Apache Kafka File: serverless-kafka-admin - Name: Configuring Serverless Functions File: configuring-serverless-functions @@ -724,7 +724,7 @@ Topics: File: serverless-apiserversource - Name: Creating a ping source File: serverless-pingsource - - Name: Kafka source + - Name: Source for Apache Kafka File: serverless-kafka-developer-source - Name: Custom event sources File: serverless-custom-event-sources @@ -737,7 +737,7 @@ Topics: File: serverless-event-sinks - Name: Creating event sinks File: serverless-creating-sinks - - Name: Kafka sink + - Name: Sink for Apache Kafka File: serverless-kafka-developer-sink - Name: Brokers Dir: brokers @@ -752,7 +752,7 @@ Topics: File: serverless-broker-backing-channel-default - Name: Configuring the default broker class File: serverless-global-config-broker-class-default - - Name: Kafka broker + - Name: Knative broker for Apache Kafka File: kafka-broker - Name: Managing brokers File: serverless-using-brokers-managing-brokers @@ -788,7 +788,7 @@ Topics: File: connecting-channels-sinks - Name: Default channel implementation File: serverless-channel-default - - Name: Security configuration for Knative Kafka channels + - Name: Security configuration for channels File: serverless-kafka-admin-security-channels - Name: Subscriptions Dir: subscriptions diff --git a/modules/serverless-channel-default.adoc b/modules/serverless-channel-default.adoc index 42a62a265d..06eca79e6c 100644 --- a/modules/serverless-channel-default.adoc +++ b/modules/serverless-channel-default.adoc @@ -10,7 +10,7 @@ * You have administrator permissions on {product-title}. * You have installed the {ServerlessOperatorName} and Knative Eventing on your cluster. -* If you want to use Kafka channels as the default channel implementation, you must also install the `KnativeKafka` CR on your cluster. +* If you want to use Knative channels for Apache Kafka as the default channel implementation, you must also install the `KnativeKafka` CR on your cluster. .Procedure diff --git a/modules/serverless-config-replicas-kafka.adoc b/modules/serverless-config-replicas-kafka.adoc index c0120caf7e..c80c07f313 100644 --- a/modules/serverless-config-replicas-kafka.adoc +++ b/modules/serverless-config-replicas-kafka.adoc @@ -4,9 +4,9 @@ :_content-type: PROCEDURE [id="serverless-config-replicas-kafka_{context}"] -= Configuring high availability replicas for Knative Kafka += Configuring high availability replicas for the Knative broker implementation for Apache Kafka -High availability (HA) is available by default for the Knative Kafka `kafka-controller` and `kafka-webhook-eventing` components, which are configured to have two each replicas by default. You can change the number of replicas for these components by modifying the `spec.high-availability.replicas` value in the `KnativeKafka` custom resource (CR). +High availability (HA) is available by default for the Knative broker implementation for Apache Kafka components `kafka-controller` and `kafka-webhook-eventing`, which are configured to have two each replicas by default. You can change the number of replicas for these components by modifying the `spec.high-availability.replicas` value in the `KnativeKafka` custom resource (CR). .Prerequisites @@ -18,7 +18,7 @@ ifdef::openshift-dedicated,openshift-rosa[] * You have access to an {product-title} account with cluster administrator or dedicated administrator access. endif::[] -* The {ServerlessOperatorName} and Knative Kafka are installed on your cluster. +* The {ServerlessOperatorName} and Knative broker for Apache Kafka are installed on your cluster. .Procedure diff --git a/modules/serverless-create-kafka-channel-yaml.adoc b/modules/serverless-create-kafka-channel-yaml.adoc index 86cd7e3caa..e5f529185c 100644 --- a/modules/serverless-create-kafka-channel-yaml.adoc +++ b/modules/serverless-create-kafka-channel-yaml.adoc @@ -5,7 +5,7 @@ :_content-type: PROCEDURE [id="serverless-create-kafka-channel-yaml_{context}"] -= Creating a Kafka channel by using YAML += Creating a channel for Apache Kafka by using YAML Creating Knative resources by using YAML files uses a declarative API, which enables you to describe channels declaratively and in a reproducible manner. You can create a Knative Eventing channel that is backed by Kafka topics by creating a Kafka channel. To create a Kafka channel by using YAML, you must create a YAML file that defines a `KafkaChannel` object, then apply it by using the `oc apply` command. diff --git a/modules/serverless-creating-a-kafka-event-sink.adoc b/modules/serverless-creating-a-kafka-event-sink.adoc index 45067d8192..a63401c9af 100644 --- a/modules/serverless-creating-a-kafka-event-sink.adoc +++ b/modules/serverless-creating-a-kafka-event-sink.adoc @@ -4,13 +4,16 @@ :_content-type: PROCEDURE [id="serverless-creating-a-kafka-event-sink_{context}"] -= Creating a Kafka sink by using the {product-title} web console += Creating an event sink for Apache Kafka by using the {product-title} web console You can create a Kafka sink that sends events to a Kafka topic by using the *Developer* perspective in the {product-title} web console. By default, a Kafka sink uses the binary content mode, which is more efficient than the structured mode. + +As a developer, you can create an event sink to receive events from a particular source and send them to a Kafka topic. + .Prerequisites -* You have installed the {ServerlessOperatorName}, with Knative Serving, Knative Eventing, and Knative Kafka APIs, from the OperatorHub. +* You have installed the {ServerlessOperatorName}, with Knative Serving, Knative Eventing, and Knative broker for Apache Kafka APIs, from the OperatorHub. * You have created a Kafka topic in your Kafka environment. .Procedure diff --git a/modules/serverless-creating-channel-admin-web-console.adoc b/modules/serverless-creating-channel-admin-web-console.adoc index 9e9012b3b3..d56b899161 100644 --- a/modules/serverless-creating-channel-admin-web-console.adoc +++ b/modules/serverless-creating-channel-admin-web-console.adoc @@ -30,6 +30,6 @@ endif::[] + [NOTE] ==== -Currently only `InMemoryChannel` channel objects are supported by default. Kafka channels are available if you have installed Knative Kafka on {ServerlessProductName}. +Currently only `InMemoryChannel` channel objects are supported by default. Knative channels for Apache Kafka are available if you have installed the Knative broker implementation for Apache Kafka on {ServerlessProductName}. ==== . Click *Create*. diff --git a/modules/serverless-creating-subscriptions-kn.adoc b/modules/serverless-creating-subscriptions-kn.adoc index 845cf013de..9921b70e5b 100644 --- a/modules/serverless-creating-subscriptions-kn.adoc +++ b/modules/serverless-creating-subscriptions-kn.adoc @@ -25,7 +25,7 @@ $ kn subscription create \ --sink : \ <2> --sink-dead-letter : <3> ---- -<1> `--channel` specifies the source for cloud events that should be processed. You must provide the channel name. If you are not using the default `InMemoryChannel` channel that is backed by the `Channel` custom resource, you must prefix the channel name with the `` for the specified channel type. For example, this will be `messaging.knative.dev:v1beta1:KafkaChannel` for a Kafka backed channel. +<1> `--channel` specifies the source for cloud events that should be processed. You must provide the channel name. If you are not using the default `InMemoryChannel` channel that is backed by the `Channel` custom resource, you must prefix the channel name with the `` for the specified channel type. For example, this will be `messaging.knative.dev:v1beta1:KafkaChannel` for an Apache Kafka backed channel. <2> `--sink` specifies the target destination to which the event should be delivered. By default, the `` is interpreted as a Knative service of this name, in the same namespace as the subscription. You can specify the type of the sink by using one of the following prefixes: `ksvc`:: A Knative service. `channel`:: A channel that should be used as destination. Only default channel types can be referenced here. diff --git a/modules/serverless-event-delivery-component-behaviors.adoc b/modules/serverless-event-delivery-component-behaviors.adoc index 7e43802e7f..d6fcacd07d 100644 --- a/modules/serverless-event-delivery-component-behaviors.adoc +++ b/modules/serverless-event-delivery-component-behaviors.adoc @@ -9,7 +9,7 @@ Different channel and broker types have their own behavior patterns that are followed for event delivery. [id="serverless-event-delivery-component-behaviors-kafka_{context}"] -== Knative Kafka channels and brokers +== Knative channels and brokers for Apache Kafka If an event is successfully delivered to a Kafka channel or broker receiver, the receiver responds with a `202` status code, which means that the event has been safely stored inside a Kafka topic and is not lost. diff --git a/modules/serverless-install-kafka-odc.adoc b/modules/serverless-install-kafka-odc.adoc index f1ca984eb8..0405b5669b 100644 --- a/modules/serverless-install-kafka-odc.adoc +++ b/modules/serverless-install-kafka-odc.adoc @@ -4,9 +4,9 @@ :_content-type: PROCEDURE [id="serverless-install-kafka-odc_{context}"] -= Installing Knative Kafka += Installing Knative broker for Apache Kafka -Knative Kafka provides integration options for you to use supported versions of the Apache Kafka message streaming platform with {ServerlessProductName}. Knative Kafka functionality is available in an {ServerlessProductName} installation if you have installed the `KnativeKafka` custom resource. +The Knative broker implementation for Apache Kafka provides integration options for you to use supported versions of the Apache Kafka message streaming platform with {ServerlessProductName}. Knative broker for Apache Kafka functionality is available in an {ServerlessProductName} installation if you have installed the `KnativeKafka` custom resource. .Prerequisites @@ -67,7 +67,7 @@ spec: <1> Enables developers to use the `KafkaChannel` channel type in the cluster. <2> A comma-separated list of bootstrap servers from your AMQ Streams cluster. <3> Enables developers to use the `KafkaSource` event source type in the cluster. -<4> Enables developers to use the Knative Kafka broker implementation in the cluster. +<4> Enables developers to use the Knative broker implementation for Apache Kafka in the cluster. <5> A comma-separated list of bootstrap servers from your Red Hat AMQ Streams cluster. <6> Defines the number of partitions of the Kafka topics, backed by the `Broker` objects. The default is `10`. <7> Defines the replication factor of the Kafka topics, backed by the `Broker` objects. The default is `3`. @@ -94,7 +94,7 @@ image::knative-kafka-overview.png[Kafka Knative Overview page showing Conditions + If the conditions have a status of *Unknown* or *False*, wait a few moments to refresh the page. -. Check that the Knative Kafka resources have been created: +. Check that the Knative broker for Apache Kafka resources have been created: + [source,terminal] ---- diff --git a/modules/serverless-kafka-broker-configmap.adoc b/modules/serverless-kafka-broker-configmap.adoc index e39bb8742d..fd6540a31f 100644 --- a/modules/serverless-kafka-broker-configmap.adoc +++ b/modules/serverless-kafka-broker-configmap.adoc @@ -4,7 +4,7 @@ :_content-type: PROCEDURE [id="serverless-kafka-broker-configmap_{context}"] -= Configuring Kafka broker settings += Configuring Apache Kafka broker settings You can configure the replication factor, bootstrap servers, and the number of topic partitions for a Kafka broker, by creating a config map and referencing this config map in the Kafka `Broker` object. diff --git a/modules/serverless-kafka-broker-sasl-default-config.adoc b/modules/serverless-kafka-broker-sasl-default-config.adoc index a7be740bb9..1f4aa481ff 100644 --- a/modules/serverless-kafka-broker-sasl-default-config.adoc +++ b/modules/serverless-kafka-broker-sasl-default-config.adoc @@ -4,7 +4,7 @@ :_content-type: PROCEDURE [id="serverless-kafka-broker-sasl-default-config_{context}"] -= Configuring SASL authentication for Kafka brokers += Configuring SASL authentication for Apache Kafka brokers _Simple Authentication and Security Layer_ (SASL) is used by Apache Kafka for authentication. If you use SASL authentication on your cluster, users must provide credentials to Knative for communicating with the Kafka cluster; otherwise events cannot be produced or consumed. diff --git a/modules/serverless-kafka-broker-tls-default-config.adoc b/modules/serverless-kafka-broker-tls-default-config.adoc index 7865335773..cb2c13b125 100644 --- a/modules/serverless-kafka-broker-tls-default-config.adoc +++ b/modules/serverless-kafka-broker-tls-default-config.adoc @@ -5,9 +5,9 @@ :_content-type: PROCEDURE [id="serverless-kafka-broker-tls-default-config_{context}"] -= Configuring TLS authentication for Kafka brokers += Configuring TLS authentication for Apache Kafka brokers -_Transport Layer Security_ (TLS) is used by Apache Kafka clients and servers to encrypt traffic between Knative and Kafka, as well as for authentication. TLS is the only supported method of traffic encryption for Knative Kafka. +_Transport Layer Security_ (TLS) is used by Apache Kafka clients and servers to encrypt traffic between Knative and Kafka, as well as for authentication. TLS is the only supported method of traffic encryption for the Knative broker implementation for Apache Kafka. .Prerequisites diff --git a/modules/serverless-kafka-broker-with-kafka-topic.adoc b/modules/serverless-kafka-broker-with-kafka-topic.adoc index 989bba176a..bab2f50c39 100644 --- a/modules/serverless-kafka-broker-with-kafka-topic.adoc +++ b/modules/serverless-kafka-broker-with-kafka-topic.adoc @@ -4,7 +4,7 @@ :_content-type: PROCEDURE [id="serverless-kafka-broker-with-kafka-topic_{context}"] -= Creating a Kafka broker that uses an externally managed Kafka topic += Creating an Apache Kafka broker that uses an externally managed Kafka topic If you want to use a Kafka broker without allowing it to create its own internal topic, you can use an externally managed Kafka topic instead. To do this, you must create a Kafka `Broker` object that uses the `kafka.eventing.knative.dev/external.topic` annotation. diff --git a/modules/serverless-kafka-broker.adoc b/modules/serverless-kafka-broker.adoc index b5c6652a2e..1144d8e5f5 100644 --- a/modules/serverless-kafka-broker.adoc +++ b/modules/serverless-kafka-broker.adoc @@ -4,7 +4,7 @@ :_content-type: PROCEDURE [id="serverless-kafka-broker_{context}"] -= Creating a Kafka broker by using YAML += Creating an Apache Kafka broker by using YAML Creating Knative resources by using YAML files uses a declarative API, which enables you to describe applications declaratively and in a reproducible manner. To create a Kafka broker by using YAML, you must create a YAML file that defines a `Broker` object, then apply it by using the `oc apply` command. @@ -36,7 +36,7 @@ spec: namespace: knative-eventing ---- <1> The broker class. If not specified, brokers use the default class as configured by cluster administrators. To use the Kafka broker, this value must be `Kafka`. -<2> The default config map for Knative Kafka brokers. This config map is created when the Kafka broker functionality is enabled on the cluster by a cluster administrator. +<2> The default config map for Knative brokers for Apache Kafka. This config map is created when the Kafka broker functionality is enabled on the cluster by a cluster administrator. . Apply the Kafka-based broker YAML file: + diff --git a/modules/serverless-kafka-developer.adoc b/modules/serverless-kafka-developer.adoc index 009340622b..fcd0d6c030 100644 --- a/modules/serverless-kafka-developer.adoc +++ b/modules/serverless-kafka-developer.adoc @@ -4,21 +4,21 @@ :_content-type: CONCEPT [id="serverless-kafka-developer_{context}"] -= Using Knative Kafka += Using the Knative broker for Apache Kafka -Knative Kafka provides integration options for you to use supported versions of the Apache Kafka message streaming platform with {ServerlessProductName}. Kafka provides options for event source, channel, broker, and event sink capabilities. +THe Knative broker implementation for Apache Kafka provides integration options for you to use supported versions of the Apache Kafka message streaming platform with {ServerlessProductName}. Kafka provides options for event source, channel, broker, and event sink capabilities. // OCP ifdef::openshift-enterprise[] [NOTE] ==== -Knative Kafka is not currently supported for {ibmzProductName} and {ibmpowerProductName}. +The Knative broker implementation for Apache Kafka is not currently supported for {ibmzProductName} and {ibmpowerProductName}. ==== endif::[] -Knative Kafka provides additional options, such as: +Knative broker for Apache Kafka provides additional options, such as: * Kafka source * Kafka channel diff --git a/modules/serverless-kafka-event-delivery.adoc b/modules/serverless-kafka-event-delivery.adoc index d7d0852e7b..6a62692364 100644 --- a/modules/serverless-kafka-event-delivery.adoc +++ b/modules/serverless-kafka-event-delivery.adoc @@ -4,8 +4,8 @@ :_content-type: CONCEPT [id="serverless-kafka-delivery-retries_{context}"] -= Kafka event delivery and retries += Apache Kafka event delivery and retries -Using Kafka components in an event-driven architecture provides "at least once" event delivery. This means that operations are retried until a return code value is received. This makes applications more resilient to lost events; however, it might result in duplicate events being sent. +Using Apache Kafka components in an event-driven architecture provides "at least once" event delivery. This means that operations are retried until a return code value is received. This makes applications more resilient to lost events; however, it might result in duplicate events being sent. For the Kafka event source, there is a fixed number of retries for event delivery by default. For Kafka channels, retries are only performed if they are configured in the Kafka channel `Delivery` spec. diff --git a/modules/serverless-kafka-sasl-channels.adoc b/modules/serverless-kafka-sasl-channels.adoc index 8137357958..6bf4d05c49 100644 --- a/modules/serverless-kafka-sasl-channels.adoc +++ b/modules/serverless-kafka-sasl-channels.adoc @@ -4,7 +4,7 @@ :_content-type: PROCEDURE [id="serverless-kafka-sasl-channels_{context}"] -= Configuring SASL authentication for Kafka channels += Configuring SASL authentication for Knative channels for Apache Kafka _Simple Authentication and Security Layer_ (SASL) is used by Apache Kafka for authentication. If you use SASL authentication on your cluster, users must provide credentials to Knative for communicating with the Kafka cluster; otherwise events cannot be produced or consumed. diff --git a/modules/serverless-kafka-sasl-source.adoc b/modules/serverless-kafka-sasl-source.adoc index 11018a591f..9a16a1ae21 100644 --- a/modules/serverless-kafka-sasl-source.adoc +++ b/modules/serverless-kafka-sasl-source.adoc @@ -4,7 +4,7 @@ :_content-type: PROCEDURE [id="serverless-kafka-sasl-source_{context}"] -= Configuring SASL authentication for Kafka sources += Configuring SASL authentication for Apache Kafka sources _Simple Authentication and Security Layer_ (SASL) is used by Apache Kafka for authentication. If you use SASL authentication on your cluster, users must provide credentials to Knative for communicating with the Kafka cluster; otherwise events cannot be produced or consumed. diff --git a/modules/serverless-kafka-sink-security-config.adoc b/modules/serverless-kafka-sink-security-config.adoc index 89d9047bee..53fc8c0fed 100644 --- a/modules/serverless-kafka-sink-security-config.adoc +++ b/modules/serverless-kafka-sink-security-config.adoc @@ -4,9 +4,9 @@ :_content-type: PROCEDURE [id="serverless-kafka-sink-security-config_{context}"] -= Configuring security for Kafka sinks += Configuring security for Apache Kafka sinks -_Transport Layer Security_ (TLS) is used by Apache Kafka clients and servers to encrypt traffic between Knative and Kafka, as well as for authentication. TLS is the only supported method of traffic encryption for Knative Kafka. +_Transport Layer Security_ (TLS) is used by Apache Kafka clients and servers to encrypt traffic between Knative and Kafka, as well as for authentication. TLS is the only supported method of traffic encryption for the Knative broker implementation for Apache Kafka. _Simple Authentication and Security Layer_ (SASL) is used by Apache Kafka for authentication. If you use SASL authentication on your cluster, users must provide credentials to Knative for communicating with the Kafka cluster; otherwise events cannot be produced or consumed. diff --git a/modules/serverless-kafka-sink.adoc b/modules/serverless-kafka-sink.adoc index 5f94dc1001..66bff7cfee 100644 --- a/modules/serverless-kafka-sink.adoc +++ b/modules/serverless-kafka-sink.adoc @@ -4,7 +4,7 @@ :_content-type: PROCEDURE [id="serverless-kafka-sink_{context}"] -= Creating a Kafka sink by using YAML += Creating an Apache Kafka sink by using YAML You can create a Kafka sink that sends events to a Kafka topic. By default, a Kafka sink uses the binary content mode, which is more efficient than the structured mode. To create a Kafka sink by using YAML, you must create a YAML file that defines a `KafkaSink` object, then apply it by using the `oc apply` command. diff --git a/modules/serverless-kafka-source-kn.adoc b/modules/serverless-kafka-source-kn.adoc index 6b9292e2fc..6386f58f3b 100644 --- a/modules/serverless-kafka-source-kn.adoc +++ b/modules/serverless-kafka-source-kn.adoc @@ -5,7 +5,7 @@ :_content-type: PROCEDURE [id="serverless-kafka-source-kn_{context}"] -= Creating a Kafka event source by using the Knative CLI += Creating an Apache Kafka event source by using the Knative CLI You can use the `kn source kafka create` command to create a Kafka source by using the Knative (`kn`) CLI. Using the Knative CLI to create event sources provides a more streamlined and intuitive user interface than modifying YAML files directly. diff --git a/modules/serverless-kafka-source-odc.adoc b/modules/serverless-kafka-source-odc.adoc index c4c53bdcd9..aa3f55d663 100644 --- a/modules/serverless-kafka-source-odc.adoc +++ b/modules/serverless-kafka-source-odc.adoc @@ -4,9 +4,9 @@ :_content-type: PROCEDURE [id="serverless-kafka-source-odc_{context}"] -= Creating a Kafka event source by using the web console += Creating an Apache Kafka event source by using the web console -After Knative Kafka is installed on your cluster, you can create a Kafka source by using the web console. Using the {product-title} web console provides a streamlined and intuitive user interface to create a Kafka source. +After the Knative broker implementation for Apache Kafka is installed on your cluster, you can create an Apache Kafka source by using the web console. Using the {product-title} web console provides a streamlined and intuitive user interface to create a Kafka source. .Prerequisites diff --git a/modules/serverless-kafka-source-yaml.adoc b/modules/serverless-kafka-source-yaml.adoc index fe8dbb83b7..34b4703f52 100644 --- a/modules/serverless-kafka-source-yaml.adoc +++ b/modules/serverless-kafka-source-yaml.adoc @@ -4,7 +4,7 @@ :_content-type: PROCEDURE [id="serverless-kafka-source-yaml_{context}"] -= Creating a Kafka event source by using YAML += Creating an Apache Kafka event source by using YAML Creating Knative resources by using YAML files uses a declarative API, which enables you to describe applications declaratively and in a reproducible manner. To create a Kafka source by using YAML, you must create a YAML file that defines a `KafkaSource` object, then apply it by using the `oc apply` command. diff --git a/modules/serverless-kafka-tls-channels.adoc b/modules/serverless-kafka-tls-channels.adoc index a727a753db..2da85bc7bf 100644 --- a/modules/serverless-kafka-tls-channels.adoc +++ b/modules/serverless-kafka-tls-channels.adoc @@ -5,9 +5,9 @@ :_content-type: PROCEDURE [id="serverless-kafka-tls-channels_{context}"] -= Configuring TLS authentication for Kafka channels += Configuring TLS authentication for Knative channels for Apache Kafka -_Transport Layer Security_ (TLS) is used by Apache Kafka clients and servers to encrypt traffic between Knative and Kafka, as well as for authentication. TLS is the only supported method of traffic encryption for Knative Kafka. +_Transport Layer Security_ (TLS) is used by Apache Kafka clients and servers to encrypt traffic between Knative and Kafka, as well as for authentication. TLS is the only supported method of traffic encryption for the Knative broker implementation for Apache Kafka. .Prerequisites diff --git a/modules/serverless-rn-1-28-0.adoc b/modules/serverless-rn-1-28-0.adoc index f166414661..c7010e6cc2 100644 --- a/modules/serverless-rn-1-28-0.adoc +++ b/modules/serverless-rn-1-28-0.adoc @@ -15,7 +15,7 @@ * {ServerlessProductName} now uses Knative Eventing 1.7. * {ServerlessProductName} now uses Kourier 1.7. * {ServerlessProductName} now uses Knative (`kn`) CLI 1.7. -* {ServerlessProductName} now uses Knative Kafka 1.7. +* {ServerlessProductName} now uses Knative broker implementation for Apache Kafka 1.7. * The `kn func` CLI plug-in now uses `func` 1.9.1 version. * Node.js and TypeScript runtimes for {ServerlessProductName} Functions are now Generally Available (GA). diff --git a/modules/trigger-event-delivery-config.adoc b/modules/trigger-event-delivery-config.adoc index d0e49ebce4..d3d6071a73 100644 --- a/modules/trigger-event-delivery-config.adoc +++ b/modules/trigger-event-delivery-config.adoc @@ -10,7 +10,7 @@ If you are using a Kafka broker, you can configure the delivery order of events .Prerequisites -* The {ServerlessOperatorName}, Knative Eventing, and Knative Kafka are installed on your {product-title} cluster. +* The {ServerlessOperatorName}, Knative Eventing, and Knative broker implementation for Apache Kafka are installed on your {product-title} cluster. * Kafka broker is enabled for use on your cluster, and you have created a Kafka broker. * You have created a project or have access to a project with the appropriate roles and permissions to create applications and other workloads in {product-title}. * You have installed the OpenShift (`oc`) CLI. diff --git a/serverless/about/about-knative-eventing.adoc b/serverless/about/about-knative-eventing.adoc index 30158a86d7..108d0799d7 100644 --- a/serverless/about/about-knative-eventing.adoc +++ b/serverless/about/about-knative-eventing.adoc @@ -29,5 +29,5 @@ include::modules/serverless-kafka-developer.adoc[leveloffset=+1] == Additional resources * xref:../../serverless/install/installing-knative-eventing.adoc#serverless-install-kafka-odc_installing-knative-eventing[Installing the `KnativeKafka` custom resource]. * link:https://access.redhat.com/documentation/en-us/red_hat_amq/7.6/html/amq_streams_on_openshift_overview/kafka-concepts_str#kafka-concepts-key_str[Red Hat AMQ Streams documentation] -* link:https://access.redhat.com/documentation/en-us/red_hat_amq/7.6/html-single/using_amq_streams_on_rhel/index#assembly-kafka-encryption-and-authentication-str[Red Hat AMQ Streams TLS and SASL on Kafka documentation] +* link:https://access.redhat.com/documentation/en-us/red_hat_amq/7.6/html-single/using_amq_streams_on_rhel/index#assembly-kafka-encryption-and-authentication-str[Red Hat AMQ Streams TLS and SASL on Apache Kafka documentation] * xref:../../serverless/eventing/brokers/serverless-event-delivery.adoc#serverless-event-delivery[Event delivery] diff --git a/serverless/eventing/brokers/kafka-broker.adoc b/serverless/eventing/brokers/kafka-broker.adoc index 15bb436175..5aa351fc8f 100644 --- a/serverless/eventing/brokers/kafka-broker.adoc +++ b/serverless/eventing/brokers/kafka-broker.adoc @@ -1,7 +1,7 @@ :_content-type: ASSEMBLY include::_attributes/common-attributes.adoc[] [id="kafka-broker"] -= Kafka broker += Knative broker implementation for Apache Kafka :context: kafka-broker toc::[] @@ -10,7 +10,7 @@ include::snippets/serverless-about-kafka-broker.adoc[] [id="creating-kafka-broker"] -== Creating a Kafka broker when it is not configured as the default broker type +== Creating an Apache Kafka broker when it is not configured as the default broker type If your {ServerlessProductName} deployment is not configured to use Kafka broker as the default broker type, you can use one of the following procedures to create a Kafka-based broker. @@ -24,7 +24,7 @@ include::modules/serverless-kafka-broker-configmap.adoc[leveloffset=+1] [id="serverless-kafka-admin-security"] -== Security configuration for Knative Kafka brokers +== Security configuration for the Knative broker implementation for Apache Kafka Kafka clusters are generally secured by using the TLS or SASL authentication methods. You can configure a Kafka broker or channel to work against a protected Red Hat AMQ Streams cluster by using TLS or SASL. diff --git a/serverless/eventing/brokers/serverless-broker-backing-channel-default.adoc b/serverless/eventing/brokers/serverless-broker-backing-channel-default.adoc index f70bcdca55..5eeacb9adc 100644 --- a/serverless/eventing/brokers/serverless-broker-backing-channel-default.adoc +++ b/serverless/eventing/brokers/serverless-broker-backing-channel-default.adoc @@ -11,7 +11,7 @@ If you are using a channel-based broker, you can set the default backing channel * You have administrator permissions on {product-title}. * You have installed the {ServerlessOperatorName} and Knative Eventing on your cluster. * You have installed the OpenShift (`oc`) CLI. -* If you want to use Kafka channels as the default backing channel type, you must also install the `KnativeKafka` CR on your cluster. +* If you want to use Apache Kafka channels as the default backing channel type, you must also install the `KnativeKafka` CR on your cluster. .Procedure diff --git a/serverless/eventing/brokers/serverless-broker-types.adoc b/serverless/eventing/brokers/serverless-broker-types.adoc index 98b5b727f5..9be26bbcc0 100644 --- a/serverless/eventing/brokers/serverless-broker-types.adoc +++ b/serverless/eventing/brokers/serverless-broker-types.adoc @@ -12,9 +12,9 @@ Cluster administrators can set the default broker implementation for a cluster. Knative provides a default, channel-based broker implementation. This channel-based broker can be used for development and testing purposes, but does not provide adequate event delivery guarantees for production environments. The default broker is backed by the `InMemoryChannel` channel implementation by default. -If you want to use Kafka to reduce network hops, use the Kafka broker implementation. Do not configure the channel-based broker to be backed by the `KafkaChannel` channel implementation. +If you want to use Apache Kafka to reduce network hops, use the Knative broker implementation for Apache Kafka. Do not configure the channel-based broker to be backed by the `KafkaChannel` channel implementation. [id="serverless-broker-types-production_{context}"] -== Production-ready Kafka broker implementation +== Production-ready Knative broker implementation for Apache Kafka include::snippets/serverless-about-kafka-broker.adoc[] diff --git a/serverless/eventing/brokers/serverless-global-config-broker-class-default.adoc b/serverless/eventing/brokers/serverless-global-config-broker-class-default.adoc index 389d18f58f..6ef322bef7 100644 --- a/serverless/eventing/brokers/serverless-global-config-broker-class-default.adoc +++ b/serverless/eventing/brokers/serverless-global-config-broker-class-default.adoc @@ -11,7 +11,7 @@ You can use the `config-br-defaults` config map to specify default broker class * You have administrator permissions on {product-title}. * You have installed the {ServerlessOperatorName} and Knative Eventing on your cluster. -* If you want to use Kafka broker as the default broker implementation, you must also install the `KnativeKafka` CR on your cluster. +* If you want to use the Knative broker for Apache Kafka as the default broker implementation, you must also install the `KnativeKafka` CR on your cluster. .Procedure @@ -48,7 +48,7 @@ spec: <2> In `spec.config`, you can specify the config maps that you want to add modified configurations for. <3> The `config-br-defaults` config map specifies the default settings for any broker that does not specify `spec.config` settings or a broker class. <4> The cluster-wide default broker class configuration. In this example, the default broker class implementation for the cluster is `Kafka`. -<5> The `kafka-broker-config` config map specifies default settings for the Kafka broker. See "Configuring Kafka broker settings" in the "Additional resources" section. +<5> The `kafka-broker-config` config map specifies default settings for the Kafka broker. See "Configuring Knative broker for Apache Kafka settings" in the "Additional resources" section. <6> The namespace where the `kafka-broker-config` config map exists. <7> The namespace-scoped default broker class configuration. In this example, the default broker class implementation for the `my-namespace` namespace is `MTChannelBasedBroker`. You can specify default broker class implementations for multiple namespaces. <8> The `config-br-default-channel` config map specifies the default backing channel for the broker. See "Configuring the default broker backing channel" in the "Additional resources" section. diff --git a/serverless/eventing/brokers/serverless-using-brokers.adoc b/serverless/eventing/brokers/serverless-using-brokers.adoc index 6a94b4b791..511c55f2f4 100644 --- a/serverless/eventing/brokers/serverless-using-brokers.adoc +++ b/serverless/eventing/brokers/serverless-using-brokers.adoc @@ -8,9 +8,9 @@ toc::[] Knative provides a default, channel-based broker implementation. This channel-based broker can be used for development and testing purposes, but does not provide adequate event delivery guarantees for production environments. -If a cluster administrator has configured your {ServerlessProductName} deployment to use Kafka as the default broker type, creating a broker by using the default settings creates a Kafka-based broker. +If a cluster administrator has configured your {ServerlessProductName} deployment to use Apache Kafka as the default broker type, creating a broker by using the default settings creates a Knative broker for Apache Kafka. -If your {ServerlessProductName} deployment is not configured to use Kafka broker as the default broker type, the channel-based broker is created when you use the default settings in the following procedures. +If your {ServerlessProductName} deployment is not configured to use the Knative broker for Apache Kafka as the default broker type, the channel-based broker is created when you use the default settings in the following procedures. include::modules/serverless-create-broker-kn.adoc[leveloffset=+1] include::modules/serverless-creating-broker-annotation.adoc[leveloffset=+1] diff --git a/serverless/eventing/channels/serverless-channels.adoc b/serverless/eventing/channels/serverless-channels.adoc index da99d3397e..9dc691162b 100644 --- a/serverless/eventing/channels/serverless-channels.adoc +++ b/serverless/eventing/channels/serverless-channels.adoc @@ -30,7 +30,7 @@ The channel controller then creates the backing channel instance based on the `s The `spec.channelTemplate` properties cannot be changed after creation, because they are set by the default channel mechanism rather than by the user. ==== -When this mechanism is used with the preceding example, two objects are created: a generic backing channel and an `InMemoryChannel` channel. If you are using a different default channel implementation, the `InMemoryChannel` is replaced with one that is specific to your implementation. For example, with Knative Kafka, the `KafkaChannel` channel is created. +When this mechanism is used with the preceding example, two objects are created: a generic backing channel and an `InMemoryChannel` channel. If you are using a different default channel implementation, the `InMemoryChannel` is replaced with one that is specific to your implementation. For example, with the Knative broker for Apache Kafka, the `KafkaChannel` channel is created. The backing channel acts as a proxy that copies its subscriptions to the user-created channel object, and sets the user-created channel object status to reflect the status of the backing channel. diff --git a/serverless/eventing/channels/serverless-kafka-admin-security-channels.adoc b/serverless/eventing/channels/serverless-kafka-admin-security-channels.adoc index 0a858c422b..a4c9702df3 100644 --- a/serverless/eventing/channels/serverless-kafka-admin-security-channels.adoc +++ b/serverless/eventing/channels/serverless-kafka-admin-security-channels.adoc @@ -1,7 +1,7 @@ :_content-type: ASSEMBLY include::_attributes/common-attributes.adoc[] [id="serverless-kafka-admin-security-channels"] -= Security configuration for Knative Kafka channels += Security configuration for channels :context: serverless-kafka-admin-security-channels toc::[] diff --git a/serverless/eventing/event-sinks/serverless-kafka-developer-sink.adoc b/serverless/eventing/event-sinks/serverless-kafka-developer-sink.adoc index 63d791ed53..77862e494e 100644 --- a/serverless/eventing/event-sinks/serverless-kafka-developer-sink.adoc +++ b/serverless/eventing/event-sinks/serverless-kafka-developer-sink.adoc @@ -1,12 +1,12 @@ :_content-type: ASSEMBLY include::_attributes/common-attributes.adoc[] [id="serverless-kafka-developer-sink"] -= Kafka sink += Sink for Apache Kafka :context: serverless-kafka-developer-sink toc::[] -Kafka sinks are a type of xref:../../../serverless/eventing/event-sinks/serverless-event-sinks.adoc#serverless-event-sinks[event sink] that are available if a cluster administrator has enabled Kafka on your cluster. You can send events directly from an xref:../../../serverless/eventing/event-sources/knative-event-sources.adoc#knative-event-sources[event source] to a Kafka topic by using a Kafka sink. +Apache Kafka sinks are a type of xref:../../../serverless/eventing/event-sinks/serverless-event-sinks.adoc#serverless-event-sinks[event sink] that are available if a cluster administrator has enabled Apache Kafka on your cluster. You can send events directly from an xref:../../../serverless/eventing/event-sources/knative-event-sources.adoc#knative-event-sources[event source] to a Kafka topic by using a Kafka sink. // Kafka sink via YAML include::modules/serverless-kafka-sink.adoc[leveloffset=+1] diff --git a/serverless/eventing/event-sources/knative-event-sources.adoc b/serverless/eventing/event-sources/knative-event-sources.adoc index 13afc88490..a25d7a1364 100644 --- a/serverless/eventing/event-sources/knative-event-sources.adoc +++ b/serverless/eventing/event-sources/knative-event-sources.adoc @@ -16,6 +16,6 @@ xref:../../../serverless/eventing/event-sources/serverless-apiserversource.adoc# xref:../../../serverless/eventing/event-sources/serverless-pingsource.adoc#serverless-pingsource[Ping source]:: Produces events with a fixed payload on a specified cron schedule. -xref:../../../serverless/eventing/event-sources/serverless-kafka-developer-source.adoc#serverless-kafka-developer-source[Kafka event source]:: Connects a Kafka cluster to a sink as an event source. +xref:../../../serverless/eventing/event-sources/serverless-kafka-developer-source.adoc#serverless-kafka-developer-source[Kafka event source]:: Connects an Apache Kafka cluster to a sink as an event source. You can also create a xref:../../../serverless/eventing/event-sources/serverless-custom-event-sources.adoc#serverless-custom-event-sources[custom event source]. diff --git a/serverless/eventing/event-sources/serverless-kafka-developer-source.adoc b/serverless/eventing/event-sources/serverless-kafka-developer-source.adoc index eaa205d20b..deb539119a 100644 --- a/serverless/eventing/event-sources/serverless-kafka-developer-source.adoc +++ b/serverless/eventing/event-sources/serverless-kafka-developer-source.adoc @@ -1,12 +1,12 @@ :_content-type: ASSEMBLY include::_attributes/common-attributes.adoc[] [id="serverless-kafka-developer-source"] -= Kafka source += Source for Apache Kafka :context: serverless-kafka-developer-source toc::[] -You can create a Kafka source that reads events from an Apache Kafka cluster and passes these events to a sink. You can create a Kafka source by using the {product-title} web console, the Knative (`kn`) CLI, or by creating a `KafkaSource` object directly as a YAML file and using the OpenShift CLI (`oc`) to apply it. +You can create an Apache Kafka source that reads events from an Apache Kafka cluster and passes these events to a sink. You can create a Kafka source by using the {product-title} web console, the Knative (`kn`) CLI, or by creating a `KafkaSource` object directly as a YAML file and using the OpenShift CLI (`oc`) to apply it. // dev console include::modules/serverless-kafka-source-odc.adoc[leveloffset=+1] diff --git a/serverless/eventing/triggers/connect-trigger-sink.adoc b/serverless/eventing/triggers/connect-trigger-sink.adoc index 8fca218d1a..19d72d7379 100644 --- a/serverless/eventing/triggers/connect-trigger-sink.adoc +++ b/serverless/eventing/triggers/connect-trigger-sink.adoc @@ -7,7 +7,7 @@ include::_attributes/common-attributes.adoc[] You can connect a trigger to a sink, so that events from a broker are filtered before they are sent to the sink. A sink that is connected to a trigger is configured as a `subscriber` in the `Trigger` object's resource spec. -.Example of a `Trigger` object connected to a Kafka sink +.Example of a `Trigger` object connected to an Apache Kafka sink [source,yaml] ---- apiVersion: eventing.knative.dev/v1 diff --git a/serverless/eventing/triggers/serverless-triggers.adoc b/serverless/eventing/triggers/serverless-triggers.adoc index 21d4de836a..53b3379668 100644 --- a/serverless/eventing/triggers/serverless-triggers.adoc +++ b/serverless/eventing/triggers/serverless-triggers.adoc @@ -8,7 +8,7 @@ include::_attributes/common-attributes.adoc[] include::snippets/serverless-brokers-intro.adoc[] -If you are using a Kafka broker, you can configure the delivery order of events from triggers to event sinks. See xref:../../../serverless/eventing/triggers/serverless-triggers.adoc#trigger-event-delivery-config_serverless-triggers[Configuring event delivery ordering for triggers]. +If you are using a Knative broker for Apache Kafka, you can configure the delivery order of events from triggers to event sinks. See xref:../../../serverless/eventing/triggers/serverless-triggers.adoc#trigger-event-delivery-config_serverless-triggers[Configuring event delivery ordering for triggers]. diff --git a/serverless/install/install-serverless-operator.adoc b/serverless/install/install-serverless-operator.adoc index 4b4cbccf1c..e7ae098018 100644 --- a/serverless/install/install-serverless-operator.adoc +++ b/serverless/install/install-serverless-operator.adoc @@ -6,7 +6,7 @@ include::_attributes/common-attributes.adoc[] toc::[] -Installing the {ServerlessOperatorName} enables you to install and use Knative Serving, Knative Eventing, and Knative Kafka on a {product-title} cluster. The {ServerlessOperatorName} manages Knative custom resource definitions (CRDs) for your cluster and enables you to configure them without directly modifying individual config maps for each component. +Installing the {ServerlessOperatorName} enables you to install and use Knative Serving, Knative Eventing, and the Knative broker for Apache Kafka on a {product-title} cluster. The {ServerlessOperatorName} manages Knative custom resource definitions (CRDs) for your cluster and enables you to configure them without directly modifying individual config maps for each component. // universal install doc include::modules/serverless-install-web-console.adoc[leveloffset=+1] diff --git a/serverless/install/serverless-kafka-admin.adoc b/serverless/install/serverless-kafka-admin.adoc index ece23054d2..092beae46b 100644 --- a/serverless/install/serverless-kafka-admin.adoc +++ b/serverless/install/serverless-kafka-admin.adoc @@ -1,12 +1,12 @@ :_content-type: ASSEMBLY include::_attributes/common-attributes.adoc[] [id="serverless-kafka-admin"] -= Configuring Knative Kafka += Configuring Knative broker for Apache Kafka :context: serverless-kafka-admin toc::[] -Knative Kafka provides integration options for you to use supported versions of the Apache Kafka message streaming platform with {ServerlessProductName}. Kafka provides options for event source, channel, broker, and event sink capabilities. +The Knative broker implementation for Apache Kafka provides integration options for you to use supported versions of the Apache Kafka message streaming platform with {ServerlessProductName}. Kafka provides options for event source, channel, broker, and event sink capabilities. // OCP ifdef::openshift-enterprise[] @@ -14,7 +14,7 @@ In addition to the Knative Eventing components that are provided as part of a co [NOTE] ==== -Knative Kafka is not currently supported for {ibmzProductName} and {ibmpowerProductName}. +Knative broker for Apache Kafka is not currently supported for {ibmzProductName} and {ibmpowerProductName}. ==== endif::[] diff --git a/serverless/knative-serving/config-applications/serverless-config-tls.adoc b/serverless/knative-serving/config-applications/serverless-config-tls.adoc index fecc76527d..4e9dddfc22 100644 --- a/serverless/knative-serving/config-applications/serverless-config-tls.adoc +++ b/serverless/knative-serving/config-applications/serverless-config-tls.adoc @@ -8,7 +8,7 @@ toc::[] You can use _Transport Layer Security_ (TLS) to encrypt Knative traffic and for authentication. -TLS is the only supported method of traffic encryption for Knative Kafka. Red Hat recommends using both SASL and TLS together for Knative Kafka resources. +TLS is the only supported method of traffic encryption for Knative Kafka. Red Hat recommends using both SASL and TLS together for Knative broker for Apache Kafka resources. [NOTE] ==== @@ -22,8 +22,8 @@ include::modules/serverless-enabling-tls-internal-traffic.adoc[leveloffset=+1] [role="_additional-resources"] .Additional resources -* xref:../../../serverless/eventing/brokers/kafka-broker.adoc#serverless-kafka-broker-tls-default-config_kafka-broker[Configuring TLS authentication for Kafka brokers] -* xref:../../../serverless/eventing/channels/serverless-kafka-admin-security-channels.adoc#serverless-kafka-tls-channels_serverless-kafka-admin-security-channels[Configuring TLS authentication for Kafka channels] +* xref:../../../serverless/eventing/brokers/kafka-broker.adoc#serverless-kafka-broker-tls-default-config_kafka-broker[Configuring TLS authentication for the Knative broker for Apache Kafka] +* xref:../../../serverless/eventing/channels/serverless-kafka-admin-security-channels.adoc#serverless-kafka-tls-channels_serverless-kafka-admin-security-channels[Configuring TLS authentication for channels for Apache Kafka] ifndef::openshift-dedicated[] * xref:../../../serverless/integrations/serverless-ossm-setup.adoc#serverless-ossm-enabling-serving-metrics_serverless-ossm-setup[Enabling Knative Serving metrics when using Service Mesh with mTLS] -endif::[] \ No newline at end of file +endif::[] diff --git a/snippets/serverless-about-kafka-broker.adoc b/snippets/serverless-about-kafka-broker.adoc index d9a63afa7b..b0b4e20f5a 100644 --- a/snippets/serverless-about-kafka-broker.adoc +++ b/snippets/serverless-about-kafka-broker.adoc @@ -5,13 +5,13 @@ :_content-type: SNIPPET -For production-ready Knative Eventing deployments, Red Hat recommends using the Knative Kafka broker implementation. The Kafka broker is an Apache Kafka native implementation of the Knative broker, which sends CloudEvents directly to the Kafka instance. +For production-ready Knative Eventing deployments, Red Hat recommends using the Knative broker implementation for Apache Kafka. The broker is an Apache Kafka native implementation of the Knative broker, which sends CloudEvents directly to the Kafka instance. -The Kafka broker has a native integration with Kafka for storing and routing events. This allows better integration with Kafka for the broker and trigger model over other broker types, and reduces network hops. Other benefits of the Kafka broker implementation include: +The Knative broker has a native integration with Kafka for storing and routing events. This allows better integration with Kafka for the broker and trigger model over other broker types, and reduces network hops. Other benefits of the Knative broker implementation include: * At-least-once delivery guarantees * Ordered delivery of events, based on the CloudEvents partitioning extension * Control plane high availability * A horizontally scalable data plane -The Knative Kafka broker stores incoming CloudEvents as Kafka records, using the binary content mode. This means that all CloudEvent attributes and extensions are mapped as headers on the Kafka record, while the `data` spec of the CloudEvent corresponds to the value of the Kafka record. +The Knative broker implementation for Apache Kafka stores incoming CloudEvents as Kafka records, using the binary content mode. This means that all CloudEvent attributes and extensions are mapped as headers on the Kafka record, while the `data` spec of the CloudEvent corresponds to the value of the Kafka record.