赞
踩
package com.atguigu.kafka.bean;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.Date;
@Data
@NoArgsConstructor
@AllArgsConstructor
public class UserDTO {
private Long userId;
private String userName;
private String phone;
private Date joinDate;
}
server:
port: 8110
# v1
spring:
kafka:
bootstrap-servers: 192.168.74.148:9095,192.168.74.148:9096,192.168.74.148:9097
producer: # producer 生产者
retries: 1 # 重试次数 0表示不重试
acks: -1 # 应答级别:多少个分区副本备份完成时向生产者发送ack确认(可选0、1、-1/all)
# transaction-id-prefix: tx_ # 事务id前缀:配置后producer自动开启事务
batch-size: 16384 # 批次大小 单位byte
buffer-memory: 33554432 # 生产者缓冲区大小 单位byte
key-serializer: org.apache.kafka.common.serialization.StringSerializer # key的序列化器
# 配置值序列化器 使用json序列化器
value-serializer: org.springframework.kafka.support.serializer.JsonSerializer
# value-serializer: org.apache.kafka.common.serialization.StringSerializer # value的序列化器
package com.atguigu.kafka.config;
import org.apache.kafka.clients.admin.NewTopic;
import org.springframework.context.annotation.Bean;
import org.springframework.kafka.config.TopicBuilder;
import org.springframework.stereotype.Component;
@Component
public class KafkaTopicConfig {
@Bean
public NewTopic myTopic1() {
//相同名称的主题 只会创建一次,后面创建的主题名称相同配置不同可以做增量更新(分区、副本数)
return TopicBuilder.name("my_topic1")//主题名称
.partitions(3)//主题分区
.replicas(3)//主题分区副本数
.build();//创建
}
}
package com.atguigu.kafka.interceptor;
import org.apache.kafka.clients.producer.ProducerInterceptor;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.springframework.stereotype.Component;
import java.util.Map;
//拦截器必须手动注册给kafka生产者(KafkaTemplate)
@Component
public class MyKafkaInterceptor implements ProducerInterceptor<String,Object> {
//kafka生产者发送消息前执行:拦截发送的消息预处理
@Override
public ProducerRecord<String, Object> onSend(ProducerRecord<String, Object> producerRecord) {
System.out.println("生产者即将发送消息:topic = "+ producerRecord.topic()
+",partition:"+producerRecord.partition()
+",key = "+producerRecord.key()
+",value = "+producerRecord.value());
return null;
}
//kafka broker 给出应答后执行
@Override
public void onAcknowledgement(RecordMetadata recordMetadata, Exception e) {
//exception为空表示消息发送成功
if(e == null){
System.out.println("消息发送成功:topic = "+ recordMetadata.topic()
+",partition:"+recordMetadata.partition()
+",offset="+recordMetadata.offset()
+",timestamp="+recordMetadata.timestamp());
}
}
@Override
public void close() {
}
@Override
public void configure(Map<String, ?> map) {
}
}
package com.atguigu.kafka.listener;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.springframework.kafka.support.ProducerListener;
import org.springframework.stereotype.Component;
@Component
public class MyKafkaProducerListener implements ProducerListener {
//生产者 ack 配置为 0 只要发送即成功
//ack为 1 leader落盘 broker ack之后 才成功
//ack为 -1 分区所有副本全部落盘 broker ack之后 才成功
@Override
public void onSuccess(ProducerRecord producerRecord, RecordMetadata recordMetadata) {
//ProducerListener.super.onSuccess(producerRecord, recordMetadata);
System.out.println("MyKafkaProducerListener消息发送成功:"+"topic="+producerRecord.topic()
+",partition = "+producerRecord.partition()
+",key = "+producerRecord.key()
+",value = "+producerRecord.value()
+",offset = "+recordMetadata.offset());
}
//消息发送失败的回调:监听器可以接收到发送失败的消息 可以记录失败的消息
@Override
public void onError(ProducerRecord producerRecord, RecordMetadata recordMetadata, Exception exception) {
System.out.println("MyKafkaProducerListener消息发送失败:"+"topic="+producerRecord.topic()
+",partition = "+producerRecord.partition()
+",key = "+producerRecord.key()
+",value = "+producerRecord.value()
+",offset = "+recordMetadata.offset());
System.out.println("异常信息:" + exception.getMessage());
}
}
package com.atguigu.kafka.producer;
import com.atguigu.kafka.bean.UserDTO;
import com.atguigu.kafka.interceptor.MyKafkaInterceptor;
import jakarta.annotation.PostConstruct;
import jakarta.annotation.Resource;
import org.junit.jupiter.api.Test;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.kafka.core.KafkaTemplate;
import java.io.IOException;
import java.util.Date;
@SpringBootTest
class KafkaProducerApplicationTests {
//装配kafka模板类: springboot启动时会自动根据配置文初始化kafka模板类对象注入到容器中
@Resource
KafkaTemplate kafkaTemplate;
@Resource
MyKafkaInterceptor myKafkaInterceptor;
@PostConstruct
public void init() {
kafkaTemplate.setProducerInterceptor(myKafkaInterceptor);
}
@Test
void contextLoads() throws IOException {
UserDTO userDTO = new UserDTO(1L, "张三", "13800000000", new Date());
kafkaTemplate.send("my_topic1", userDTO);
}
}
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>3.0.5</version>
<relativePath/> <!-- lookup parent from repository -->
</parent>
<!-- Generated by https://start.springboot.io -->
<!-- 优质的 spring/boot/data/security/cloud 框架中文文档尽在 => https://springdoc.cn -->
<groupId>com.atguigu.kafka</groupId>
<artifactId>kafka-producer</artifactId>
<version>0.0.1-SNAPSHOT</version>
<name>kafka-producer</name>
<description>kafka-producer</description>
<properties>
<java.version>17</java.version>
</properties>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
</plugin>
</plugins>
</build>
</project>
. ____ _ __ _ _
/\\ / ___'_ __ _ _(_)_ __ __ _ \ \ \ \
( ( )\___ | '_ | '_| | '_ \/ _` | \ \ \ \
\\/ ___)| |_)| | | | | || (_| | ) ) ) )
' |____| .__|_| |_|_| |_\__, | / / / /
=========|_|==============|___/=/_/_/_/
:: Spring Boot :: (v3.0.5)
生产者即将发送消息:topic = my_topic1,partition:null,key = null,value = UserDTO(userId=1, userName=张三, phone=13800000000, joinDate=Thu Jun 06 20:00:57 CST 2024)
消息发送成功:topic = my_topic1,partition:0,offset=0,timestamp=1717675257112
MyKafkaProducerListener消息发送成功:topic=my_topic1,partition = null,key = null,value = UserDTO(userId=1, userName=张三, phone=13800000000, joinDate=Thu Jun 06 20:00:57 CST 2024),offset = 0
[
[
{
"partition": 0,
"offset": 0,
"msg": "{\"userId\":1,\"userName\":\"张三\",\"phone\":\"13800000000\",\"joinDate\":1717675257046}",
"timespan": 1717675257112,
"date": "2024-06-06 12:00:57"
}
]
]
Copyright © 2003-2013 www.wpsshop.cn 版权所有,并保留所有权利。