下面是SpringBoot整合Kafka工具类的详细代码攻略。
环境准备
- 确认已经安装JDK、Maven和Kafka
- 在Maven中添加Kafka依赖
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
<version>2.1.13.RELEASE</version>
</dependency>
工具类设计
Kafka工具类应该包含:
1. KafkaProducer生产者
2. KafkaConsumer消费者
3. ProducerRecord消息对象
4. ConsumerRecord消息对象
5. 初始化方法
import java.util.Properties;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.core.ProducerFactory;
import org.springframework.kafka.support.SendResult;
import org.springframework.util.concurrent.ListenableFuture;
import org.springframework.util.concurrent.ListenableFutureCallback;
public class KafkaUtils {
private static final Logger LOGGER = LoggerFactory.getLogger(KafkaUtils.class);
private static KafkaProducer<String, String> producer = null;
private static KafkaConsumer<String, String> consumer = null;
private static final String TOPIC = "test-topic"; // 消费和生产的主题名
/**
* 获取生产者
*/
public static KafkaProducer<String, String> getProducer() {
if (producer == null) {
Properties props = new Properties();
props.put("bootstrap.servers", "localhost:9092");
props.put("acks", "all");
props.put("retries", 0);
props.put("batch.size", 16384);
props.put("linger.ms", 1);
props.put("buffer.memory", 33554432);
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
producer = new KafkaProducer<>(props);
}
return producer;
}
/**
* 获取消费者
*/
public static KafkaConsumer<String, String> getConsumer() {
if (consumer == null) {
Properties props = new Properties();
props.put("bootstrap.servers", "localhost:9092");
props.put("group.id", "test-group");
props.put("enable.auto.commit", "true");
props.put("auto.commit.interval.ms", "1000");
props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
consumer = new KafkaConsumer<>(props);
consumer.subscribe(Collections.singleton(TOPIC));
}
return consumer;
}
/**
* 发送消息
*/
public static void sendMessage(String message) {
KafkaProducer<String, String> producer = getProducer();
ProducerRecord<String, String> record = new ProducerRecord<>(TOPIC, message);
producer.send(record, new ListenableFutureCallback<RecordMetadata>() {
@Override
public void onFailure(Throwable ex) {
LOGGER.error("Send message failed.", ex);
}
@Override
public void onSuccess(RecordMetadata metadata) {
LOGGER.info("Send message Success: {}", metadata.offset());
}
});
producer.flush();
}
/**
* 接收消息
*/
public static void receiveMessage() {
KafkaConsumer<String, String> consumer = getConsumer();
while (true) {
ConsumerRecords<String, String> records = consumer.poll(100);
for (ConsumerRecord<String, String> record : records) {
LOGGER.info("Received message: partition = {}, offset = {}, key = {}, value = {}", record.partition(), record.offset(), record.key(), record.value());
}
}
}
}
使用示例
生产者示例
public class KafkaProducerDemo {
public static void main(String[] args) throws InterruptedException {
String message = "Hello World";
KafkaUtils.sendMessage(message);
}
}
消费者示例
public class KafkaConsumerDemo {
public static void main(String[] args) throws InterruptedException {
KafkaUtils.receiveMessage();
}
}
以上就是SpringBoot整合Kafka工具类的详细代码攻略和两条示例,希望能帮到你。
本站文章如无特殊说明,均为本站原创,如若转载,请注明出处:SpringBoot整合Kafka工具类的详细代码 - Python技术站