Commit 642af52d authored by Stephane Nicoll's avatar Stephane Nicoll

Polish "Apache Kafka support" contribution

Closes gh-6961
parent c4188c8e
...@@ -22,11 +22,10 @@ import org.springframework.kafka.core.ConsumerFactory; ...@@ -22,11 +22,10 @@ import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.listener.config.ContainerProperties; import org.springframework.kafka.listener.config.ContainerProperties;
/** /**
* Default configurer for Kafka listener container factories. * Configure {@link ConcurrentKafkaListenerContainerFactory} with sensible defaults.
* *
* @author Gary Russell * @author Gary Russell
* @since 1.5 * @since 1.5.0
*
*/ */
public class ConcurrentKafkaListenerContainerFactoryConfigurer { public class ConcurrentKafkaListenerContainerFactoryConfigurer {
...@@ -43,8 +42,8 @@ public class ConcurrentKafkaListenerContainerFactoryConfigurer { ...@@ -43,8 +42,8 @@ public class ConcurrentKafkaListenerContainerFactoryConfigurer {
/** /**
* Configure the specified Kafka listener container factory. The factory can be * Configure the specified Kafka listener container factory. The factory can be
* further tuned and default settings can be overridden. * further tuned and default settings can be overridden.
* @param listenerContainerFactory the {@link SimpleKafkaListenerContainerFactory} instance to * @param listenerContainerFactory the {@link ConcurrentKafkaListenerContainerFactory}
* configure * instance to configure
* @param consumerFactory the {@link ConsumerFactory} to use * @param consumerFactory the {@link ConsumerFactory} to use
*/ */
public void configure(ConcurrentKafkaListenerContainerFactory<Object, Object> listenerContainerFactory, public void configure(ConcurrentKafkaListenerContainerFactory<Object, Object> listenerContainerFactory,
......
...@@ -26,11 +26,10 @@ import org.springframework.kafka.config.KafkaListenerConfigUtils; ...@@ -26,11 +26,10 @@ import org.springframework.kafka.config.KafkaListenerConfigUtils;
import org.springframework.kafka.core.ConsumerFactory; import org.springframework.kafka.core.ConsumerFactory;
/** /**
* Adds {@link EnableKafka} if present on the classpath. * Configuration for Kafka annotation-driven support.
* *
* @author Gary Russell * @author Gary Russell
* @since 1.5 * @since 1.5.0
*
*/ */
@Configuration @Configuration
@ConditionalOnClass(EnableKafka.class) @ConditionalOnClass(EnableKafka.class)
......
...@@ -16,6 +16,7 @@ ...@@ -16,6 +16,7 @@
package org.springframework.boot.autoconfigure.kafka; package org.springframework.boot.autoconfigure.kafka;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.autoconfigure.condition.ConditionalOnClass; import org.springframework.boot.autoconfigure.condition.ConditionalOnClass;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.boot.context.properties.EnableConfigurationProperties;
...@@ -31,11 +32,10 @@ import org.springframework.kafka.support.LoggingProducerListener; ...@@ -31,11 +32,10 @@ import org.springframework.kafka.support.LoggingProducerListener;
import org.springframework.kafka.support.ProducerListener; import org.springframework.kafka.support.ProducerListener;
/** /**
* Auto-configuration for Spring for Apache Kafka. * {@link EnableAutoConfiguration Auto-configuration} for Apache Kafka.
* *
* @author Gary Russell * @author Gary Russell
* @since 1.5 * @since 1.5.0
*
*/ */
@Configuration @Configuration
@ConditionalOnClass(KafkaTemplate.class) @ConditionalOnClass(KafkaTemplate.class)
...@@ -51,9 +51,11 @@ public class KafkaAutoConfiguration { ...@@ -51,9 +51,11 @@ public class KafkaAutoConfiguration {
@Bean @Bean
@ConditionalOnMissingBean(KafkaTemplate.class) @ConditionalOnMissingBean(KafkaTemplate.class)
public KafkaTemplate<?, ?> kafkaTemplate(ProducerFactory<Object, Object> kafkaProducerFactory, public KafkaTemplate<?, ?> kafkaTemplate(
ProducerFactory<Object, Object> kafkaProducerFactory,
ProducerListener<Object, Object> kafkaProducerListener) { ProducerListener<Object, Object> kafkaProducerListener) {
KafkaTemplate<Object, Object> kafkaTemplate = new KafkaTemplate<Object, Object>(kafkaProducerFactory); KafkaTemplate<Object, Object> kafkaTemplate =
new KafkaTemplate<Object, Object>(kafkaProducerFactory);
kafkaTemplate.setProducerListener(kafkaProducerListener); kafkaTemplate.setProducerListener(kafkaProducerListener);
kafkaTemplate.setDefaultTopic(this.properties.getTemplate().getDefaultTopic()); kafkaTemplate.setDefaultTopic(this.properties.getTemplate().getDefaultTopic());
return kafkaTemplate; return kafkaTemplate;
...@@ -65,21 +67,18 @@ public class KafkaAutoConfiguration { ...@@ -65,21 +67,18 @@ public class KafkaAutoConfiguration {
return new LoggingProducerListener<Object, Object>(); return new LoggingProducerListener<Object, Object>();
} }
@Configuration @Bean
protected static class ConnectionConfig { @ConditionalOnMissingBean(ConsumerFactory.class)
public ConsumerFactory<?, ?> kafkaConsumerFactory() {
@Bean return new DefaultKafkaConsumerFactory<Object, Object>(
@ConditionalOnMissingBean(ConsumerFactory.class) this.properties.buildConsumerProperties());
public ConsumerFactory<?, ?> kafkaConsumerFactory(KafkaProperties properties) { }
return new DefaultKafkaConsumerFactory<Object, Object>(properties.buildConsumerProperties());
}
@Bean
@ConditionalOnMissingBean(ProducerFactory.class)
public ProducerFactory<?, ?> kafkaProducerFactory(KafkaProperties properties) {
return new DefaultKafkaProducerFactory<Object, Object>(properties.buildProducerProperties());
}
@Bean
@ConditionalOnMissingBean(ProducerFactory.class)
public ProducerFactory<?, ?> kafkaProducerFactory() {
return new DefaultKafkaProducerFactory<Object, Object>(
this.properties.buildProducerProperties());
} }
} }
...@@ -16,6 +16,5 @@ ...@@ -16,6 +16,5 @@
/** /**
* Auto-configuration for Apache Kafka. * Auto-configuration for Apache Kafka.
*
*/ */
package org.springframework.boot.autoconfigure.kafka; package org.springframework.boot.autoconfigure.kafka;
...@@ -409,6 +409,76 @@ ...@@ -409,6 +409,76 @@
} }
] ]
}, },
{
"name": "spring.kafka.consumer.auto-offset-reset",
"values": [
{
"value": "earliest",
"description": "Automatically reset the offset to the earliest offset."
},
{
"value": "latest",
"description": "Automatically reset the offset to the latest offset."
},
{
"value": "none",
"description": "Throw exception to the consumer if no previous offset is found for the consumer's group."
},
{
"value": "exception",
"description": "Throw exception to the consumer."
}
],
"providers": [
{
"name": "any"
}
]
},
{
"name": "spring.kafka.consumer.key-deserializer",
"providers": [
{
"name": "handle-as",
"parameters": {
"target": "org.apache.kafka.common.serialization.Deserializer"
}
}
]
},
{
"name": "spring.kafka.consumer.value-deserializer",
"providers": [
{
"name": "handle-as",
"parameters": {
"target": "org.apache.kafka.common.serialization.Deserializer"
}
}
]
},
{
"name": "spring.kafka.producer.key-serializer",
"providers": [
{
"name": "handle-as",
"parameters": {
"target": "org.apache.kafka.common.serialization.Serializer"
}
}
]
},
{
"name": "spring.kafka.producer.value-serializer",
"providers": [
{
"name": "handle-as",
"parameters": {
"target": "org.apache.kafka.common.serialization.Serializer"
}
}
]
},
{ {
"name": "spring.http.converters.preferred-json-mapper", "name": "spring.http.converters.preferred-json-mapper",
"values": [ "values": [
......
...@@ -35,18 +35,17 @@ import org.springframework.messaging.handler.annotation.Header; ...@@ -35,18 +35,17 @@ import org.springframework.messaging.handler.annotation.Header;
import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThat;
/** /**
* Tests for Kafka Auto-configuration. * Integration tests for {@link KafkaAutoConfiguration}.
* *
* @author Gary Russell * @author Gary Russell
* @since 1.5
*
*/ */
public class KafkaAutoConfigurationIntegrationTests { public class KafkaAutoConfigurationIntegrationTests {
private static final String TEST_TOPIC = "testTopic"; private static final String TEST_TOPIC = "testTopic";
@ClassRule @ClassRule
public static final KafkaEmbedded kafkaEmbedded = new KafkaEmbedded(1, true, TEST_TOPIC); public static final KafkaEmbedded kafkaEmbedded =
new KafkaEmbedded(1, true, TEST_TOPIC);
private AnnotationConfigApplicationContext context; private AnnotationConfigApplicationContext context;
...@@ -59,7 +58,8 @@ public class KafkaAutoConfigurationIntegrationTests { ...@@ -59,7 +58,8 @@ public class KafkaAutoConfigurationIntegrationTests {
@Test @Test
public void testEndToEnd() throws Exception { public void testEndToEnd() throws Exception {
load(KafkaConfig.class, "spring.kafka.bootstrap-servers:" + kafkaEmbedded.getBrokersAsString(), load(KafkaConfig.class,
"spring.kafka.bootstrap-servers:" + kafkaEmbedded.getBrokersAsString(),
"spring.kafka.consumer.group-id=testGroup", "spring.kafka.consumer.group-id=testGroup",
"spring.kafka.consumer.auto-offset-reset=earliest"); "spring.kafka.consumer.auto-offset-reset=earliest");
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
...@@ -103,7 +103,8 @@ public class KafkaAutoConfigurationIntegrationTests { ...@@ -103,7 +103,8 @@ public class KafkaAutoConfigurationIntegrationTests {
private volatile String key; private volatile String key;
@KafkaListener(topics = TEST_TOPIC) @KafkaListener(topics = TEST_TOPIC)
public void listen(String foo, @Header(KafkaHeaders.RECEIVED_MESSAGE_KEY) String key) { public void listen(String foo,
@Header(KafkaHeaders.RECEIVED_MESSAGE_KEY) String key) {
this.received = foo; this.received = foo;
this.key = key; this.key = key;
this.latch.countDown(); this.latch.countDown();
......
...@@ -857,53 +857,39 @@ content into your application; rather pick only the properties that you need. ...@@ -857,53 +857,39 @@ content into your application; rather pick only the properties that you need.
spring.jms.template.time-to-live= # Time-to-live of a message when sending in milliseconds. Enable QoS when set. spring.jms.template.time-to-live= # Time-to-live of a message when sending in milliseconds. Enable QoS when set.
# APACHE KAFKA ({sc-spring-boot-autoconfigure}/kafka/KafkaProperties.{sc-ext}[KafkaProperties]) # APACHE KAFKA ({sc-spring-boot-autoconfigure}/kafka/KafkaProperties.{sc-ext}[KafkaProperties])
spring.kafka.bootstrap-servers=localhost:9092 # Comma-delimited list of host:port pairs. spring.kafka.bootstrap-servers= # Comma-delimited list of host:port pairs to use for establishing the initial connection to the Kafka cluster.
spring.kafka.client-id= # Id to pass to the server when making requests; used for server-side logging. spring.kafka.client-id= # Id to pass to the server when making requests; used for server-side logging.
spring.kafka.ssl.key-password= # Password of the private key in the key store file. spring.kafka.consumer.auto-commit-interval= # Frequency in milliseconds that the consumer offsets are auto-committed to Kafka if 'enable.auto.commit' true.
spring.kafka.ssl.keystore-location= # Location (resource) of the key store file (e.g. file:my.ks).
spring.kafka.ssl.keystore-password= # Store password for the key store file.
spring.kafka.ssl.truststore-location= # Location (resource) of the trust store file (e.g. file:my.ts).
spring.kafka.ssl.truststore-password= # Store password for the trust store file.
# Consumer-specific properties:
spring.kafka.consumer.auto-commit-interval-ms= # Frequency in milliseconds that the consumer offsets are auto-committed.
spring.kafka.consumer.auto-offset-reset= # What to do when there is no initial offset in Kafka or if the current offset does not exist any more on the server. spring.kafka.consumer.auto-offset-reset= # What to do when there is no initial offset in Kafka or if the current offset does not exist any more on the server.
spring.kafka.consumer.bootstrap-servers= # Comma-delimited list of host:port pairs. spring.kafka.consumer.bootstrap-servers= # Comma-delimited list of host:port pairs to use for establishing the initial connection to the Kafka cluster.
spring.kafka.consumer.client-id= # Id to pass to the server when making requests; used for server-side logging. spring.kafka.consumer.client-id= # Id to pass to the server when making requests; used for server-side logging.
spring.kafka.consumer.enable-auto-commit= # If true the consumer's offset will be periodically committed in the background. spring.kafka.consumer.enable-auto-commit= # If true the consumer's offset will be periodically committed in the background.
spring.kafka.consumer.fetch-max-wait-ms= # Maximum amount of time the server will block before answering the fetch request. spring.kafka.consumer.fetch-max-wait= # Maximum amount of time in milliseconds the server will block before answering the fetch request if there isn't sufficient data to immediately satisfy the requirement given by "fetch.min.bytes".
spring.kafka.consumer.fetch-min-bytes= # Minimum amount of data the server should return for a fetch request. spring.kafka.consumer.fetch-min-size= # Minimum amount of data the server should return for a fetch request in bytes.
spring.kafka.consumer.group-id= # Unique string that identifies the consumer group this consumer belongs to. spring.kafka.consumer.group-id= # Unique string that identifies the consumer group this consumer belongs to.
spring.kafka.consumer.heartbeat-interval-ms= # Expected time between heartbeats to the consumer coordinator. spring.kafka.consumer.heartbeat-interval= # Expected time in milliseconds between heartbeats to the consumer coordinator.
spring.kafka.consumer.key-deserializer=StringDeserializer # Deserializer class for keys. spring.kafka.consumer.key-deserializer= # Deserializer class for keys.
spring.kafka.consumer.ssl.key-password= # Password of the private key in the key store file. spring.kafka.consumer.value-deserializer= # Deserializer class for values.
spring.kafka.consumer.ssl.keystore-location= # Location (resource) of the key store file (e.g. file:my.ks). spring.kafka.listener.ack-count= # Number of records between offset commits when ackMode is "COUNT" or "COUNT_TIME".
spring.kafka.consumer.ssl.keystore-password= # Store password for the key store file. spring.kafka.listener.ack-mode= # Listener AckMode; see the spring-kafka documentation.
spring.kafka.consumer.ssl.truststore-location= # Location (resource) of the trust store file (e.g. file:my.ts). spring.kafka.listener.ack-time= # Time in milliseconds between offset commits when ackMode is "TIME" or "COUNT_TIME".
spring.kafka.consumer.ssl.truststore-password= # Store password for the trust store file. spring.kafka.listener.concurrency= # Number of threads to run in the listener containers.
spring.kafka.consumer.value-deserializer=StringDeserializer # Deserializer class for values. spring.kafka.listener.poll-timeout= # Timeout in milliseconds to use when polling the consumer.
# Listener properties - Refer to the Spring for Apache Kafka documentation spring.kafka.producer.acks= # Number of acknowledgments the producer requires the leader to have received before considering a request complete.
spring.kafka.listener.ack-mode=BATCH # AckMode - see the spring-kafka documentation.
spring.kafka.listener.ack-count= # Number of records between offset commits when ack-mode is COUNT or COUNT_TIME.
spring.kafka.listener.ack-time= # Time in milliseconds between offset commits when ack-mode is TIME or COUNT_TIME.
spring.kafka.listener.concurrency=1 # Number of threads to run in the listener container(s).
spring.kafka.listener.pollTimeout=1000 # Timeout in milliseconds to use when polling the consumer.
# Producer-specific properties:
spring.kafka.producer.acks= # Number of acknowledgments the producer requires the leader to have received.
spring.kafka.producer.batch-size= # Number of records to batch before sending. spring.kafka.producer.batch-size= # Number of records to batch before sending.
spring.kafka.producer.bootstrap-servers= # Comma-delimited list of host:port pairs. spring.kafka.producer.bootstrap-servers= # Comma-delimited list of host:port pairs to use for establishing the initial connection to the Kafka cluster.
spring.kafka.producer.buffer-memory= # Total bytes of memory the producer can use to buffer records waiting to be sent to the server. spring.kafka.producer.buffer-memory= # Total bytes of memory the producer can use to buffer records waiting to be sent to the server.
spring.kafka.producer.client-id= # Id to pass to the server when making requests; used for server-side logging. spring.kafka.producer.client-id= # Id to pass to the server when making requests; used for server-side logging.
spring.kafka.producer.compression-type= # Compression type for all data generated by the producer. spring.kafka.producer.compression-type= # Compression type for all data generated by the producer.
spring.kafka.producer.key-serializer=StringSerializer # Serializer class for keys. spring.kafka.producer.key-serializer= # Serializer class for keys.
spring.kafka.producer.retries= # When greater than zero, enables retrying of failed sends. spring.kafka.producer.retries= # When greater than zero, enables retrying of failed sends.
spring.kafka.producer.ssl.key-password= # Password of the private key in the key store file. spring.kafka.producer.value-serializer= # Serializer class for values.
spring.kafka.producer.ssl.keystore-location= # Location (resource) of the key store file (e.g. file:my.ks). spring.kafka.ssl.key-password= # Password of the private key in the key store file.
spring.kafka.producer.ssl.keystore-password= # Store password for the key store file. spring.kafka.ssl.keystore-location= # Location of the key store file.
spring.kafka.producer.ssl.truststore-location= # Location (resource) of the trust store file (e.g. file:my.ts). spring.kafka.ssl.keystore-password= # Store password for the key store file.
spring.kafka.producer.ssl.truststore-password= # Store password for the trust store file. spring.kafka.ssl.truststore-location= # Location of the trust store file.
spring.kafka.producer.value-serializer=StringSerializer # Serializer class for values. spring.kafka.ssl.truststore-password= # Store password for the trust store file.
# template properties spring.kafka.template.default-topic= # Default topic to which messages will be sent.
spring.kafka.template.default-topic= # Default topic to which messages are sent
# RABBIT ({sc-spring-boot-autoconfigure}/amqp/RabbitProperties.{sc-ext}[RabbitProperties]) # RABBIT ({sc-spring-boot-autoconfigure}/amqp/RabbitProperties.{sc-ext}[RabbitProperties])
spring.rabbitmq.addresses= # Comma-separated list of addresses to which the client should connect. spring.rabbitmq.addresses= # Comma-separated list of addresses to which the client should connect.
......
...@@ -4452,27 +4452,34 @@ throw an `AmqpRejectAndDontRequeueException` to signal the message should be rej ...@@ -4452,27 +4452,34 @@ throw an `AmqpRejectAndDontRequeueException` to signal the message should be rej
This is the mechanism used when retries are enabled and the maximum delivery attempts are This is the mechanism used when retries are enabled and the maximum delivery attempts are
reached. reached.
[[boot-features-kafka]] [[boot-features-kafka]]
=== Apache Kafka Support === Apache Kafka Support
http://kafka.apache.org/[Apache Kafa] is supported by providing auto-configuration of the `spring-kafka` project. http://kafka.apache.org/[Apache Kafa] is supported by providing auto-configuration of the
`spring-kafka` project.
Kafka configuration is controlled by external configuration properties in `spring.kafka.*`. For example, you might Kafka configuration is controlled by external configuration properties in
declare the following section in `application.properties`: `spring.kafka.*`. For example, you might declare the following section in
`application.properties`:
[source,properties,indent=0] [source,properties,indent=0]
---- ----
spring.kafka.bootstrap-servers=localhost:9092 spring.kafka.bootstrap-servers=localhost:9092
spring.kafka.consumer.group-id=myGroup spring.kafka.consumer.group-id=myGroup
---- ----
See {sc-spring-boot-autoconfigure}/kafka/KafkaProperties.{sc-ext}[`KafkaProperties`] See {sc-spring-boot-autoconfigure}/kafka/KafkaProperties.{sc-ext}[`KafkaProperties`]
for more of the supported options. for more of the supported options.
=== Sending a Message === Sending a Message
Spring's `KafkaTemplate` is auto-configured and you can autowire them directly in your own beans: Spring's `KafkaTemplate` is auto-configured and you can autowire them directly in your own
beans:
[source,java,indent=0] [source,java,indent=0]
---- ----
...@@ -4491,8 +4498,18 @@ public class MyBean { ...@@ -4491,8 +4498,18 @@ public class MyBean {
} }
---- ----
=== Receiving a Message === Receiving a Message
When the Apache Kafka infrastructure is present, any bean can be annotated with
`@KafkaListener` to create a listener endpoint. If no `KafkaListenerContainerFactory`
has been defined, a default one is configured automatically with keys defined in
`spring.kafka.listener.*`.
The following component creates a listener endpoint on the `someTopic` topic:
[source,java,indent=0] [source,java,indent=0]
---- ----
@Component @Component
...@@ -4506,31 +4523,39 @@ public class MyBean { ...@@ -4506,31 +4523,39 @@ public class MyBean {
} }
---- ----
[[kafka-extra-props]] [[kafka-extra-props]]
=== Additional Kafka Properties === Additional Kafka Properties
The properties supported by auto configuration are shown in <<common-application-properties>>. The properties supported by auto configuration are shown in
Note that these properties (hyphenated or camelCase) map directly to the Apache Kafka dotted properties, refer <<common-application-properties>>. Note that these properties (hyphenated or camelCase)
to the Apache Kafka documentation for details. map directly to the Apache Kafka dotted properties for the most part, refer to the Apache
The first few of these properties apply to both producers and consumers, but can be specified at the producer or Kafka documentation for details.
consumer level if you wish to use different values for each.
Apache Kafka designates properties with an importance: HIGH, MEDIUM and LOW. The first few of these properties apply to both producers and consumers, but can be
Spring Boot auto configuration supports all HIGH importance properties, some selected MEDIUM and LOW, specified at the producer or consumer level if you wish to use different values for each.
Apache Kafka designates properties with an importance: HIGH, MEDIUM and LOW. Spring Boot
auto configuration supports all HIGH importance properties, some selected MEDIUM and LOW,
and any that do not have a default value. and any that do not have a default value.
Only a subset of the properties supported by Kafka are available via the `KafkaProperties` class.
If you wish to configure the producer or consumer with additional properties, you can override the producer factory Only a subset of the properties supported by Kafka are available via the `KafkaProperties`
and/or consumer factory bean, adding additional properties, for example: class. If you wish to configure the producer or consumer with additional properties, you
can override the producer factory and/or consumer factory bean, adding additional
properties, for example:
[source,java,indent=0] [source,java,indent=0]
---- ----
@Bean @Bean
public ProducerFactory<?, ?> kafkaProducerFactory(KafkaProperties properties) { public ProducerFactory<?, ?> kafkaProducerFactory(KafkaProperties properties) {
Map<String, Object> producerProperties = properties.buildProducerProperties(); Map<String, Object> producerProperties = properties.buildProducerProperties();
producerProperties.put("some.property", "some.value"); producerProperties.put("some.property", "some.value");
return new DefaultKafkaProducerFactory<Object, Object>(producerProperties); return new DefaultKafkaProducerFactory<Object, Object>(producerProperties);
} }
---- ----
[[boot-features-restclient]] [[boot-features-restclient]]
== Calling REST services == Calling REST services
If you need to call remote REST services from your application, you can use Spring If you need to call remote REST services from your application, you can use Spring
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment