一个partition同一个时刻在一个consumer group中只能有一个consumer instance在消费,从而保证消费顺序。
consumer group中的consumer instance的数量不能比一个Topic中的partition的数量多,否则,多出来的consumer消费不到消息。
Kafka只在partition的范围内保证消息消费的局部顺序性,不能在同一个topic中的多个partition中保证总的消费顺序性。
如果有在总体上保证消费顺序的需求,那么我们可以通过将topic的partition数量设置为1,将consumer group中的consumer instance数量也设置为1,但是这样会影响性能,所以kafka的顺序消费很少用。
有的时候,我们需要根据某个主键来实现消费的有序性,那我们只需要考虑将 Producer 将相关联的消息发送到 Topic 下的相同的 Partition 即可。 Kafka Producer 发送消息使用分区策略的话,根据 key 的哈希值取模来获取到其在 Topic 下对应的 Partition 。 所以我们发送消息的时候指定key即可
<dependencies>
<dependency>
<groupId>org.springframework.bootgroupId>
<artifactId>spring-boot-starter-webartifactId>
dependency>
<dependency>
<groupId>org.springframework.kafkagroupId>
<artifactId>spring-kafkaartifactId>
dependency>
<dependency>
<groupId>org.springframework.bootgroupId>
<artifactId>spring-boot-starter-testartifactId>
<scope>testscope>
dependency>
<dependency>
<groupId>junitgroupId>
<artifactId>junitartifactId>
<scope>testscope>
dependency>
dependencies>
spring:
# Kafka 配置项,对应 KafkaProperties 配置类
kafka:
bootstrap-servers: 192.168.126.140:9092 # 指定 Kafka Broker 地址,可以设置多个,以逗号分隔
# Kafka Producer 配置项
producer:
acks: 1 # 0-不应答。1-leader 应答。all-所有 leader 和 follower 应答。
retries: 3 # 发送失败时,重试发送的次数
key-serializer: org.apache.kafka.common.serialization.StringSerializer # 消息的 key 的序列化
value-serializer: org.springframework.kafka.support.serializer.JsonSerializer # 消息的 value 的序列化
# Kafka Consumer 配置项
consumer:
auto-offset-reset: earliest # 设置消费者分组最初的消费进度为 earliest
key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
value-deserializer: org.springframework.kafka.support.serializer.JsonDeserializer
properties:
spring:
json:
trusted:
packages: com.artisan.springkafka.domain
# Kafka Consumer Listener 监听器配置
listener:
missing-topics-fatal: false # 消费监听接口监听的主题不存在时,默认会报错。所以通过设置为 false ,解决报错
logging:
level:
org:
springframework:
kafka: ERROR # spring-kafka
apache:
kafka: ERROR # kafka
package com.artisan.springkafka.producer;
import com.artisan.springkafka.constants.TOPIC;
import com.artisan.springkafka.domain.MessageMock;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.support.SendResult;
import org.springframework.stereotype.Component;
import org.springframework.util.concurrent.ListenableFuture;
import java.util.Random;
import java.util.concurrent.ExecutionException;
/**
* @author 小工匠
* @version 1.0
* @description: TODO
* @date 2021/2/17 22:25
* @mark: show me the code , change the world
*/
@Component
public class ArtisanProducerMock {
@Autowired
private KafkaTemplate<Object,Object> kafkaTemplate ;
/**
* 同步发送
* @return
* @throws ExecutionException
* @throws InterruptedException
*/
public SendResult sendMsgSync() throws ExecutionException, InterruptedException {
// 模拟发送的消息
int id = 6687421;
MessageMock messageMock = new MessageMock(id,"artisanTestMessage-" + id);
// 测试顺序消息 以 id 为key (因为使用 String 的方式序列化 key ,所以需要将 id 转换成 String)
return kafkaTemplate.send(TOPIC.TOPIC,String.valueOf(id) ,messageMock).get();
}
/**
* 同步发送
* @return
* @throws ExecutionException
* @throws InterruptedException
*/
public SendResult sendMsgSync2() throws ExecutionException, InterruptedException {
// 模拟发送的消息
int id = 1255447;
MessageMock messageMock = new MessageMock(id,"artisanTestMessage-" + id);
// 测试顺序消息 以 id 为key (因为使用 String 的方式序列化 key ,所以需要将 id 转换成 String)
return kafkaTemplate.send(TOPIC.TOPIC,String.valueOf(id) ,messageMock).get();
}
}
package com.artisan.springkafka.consumer;
import com.artisan.springkafka.domain.MessageMock;
import com.artisan.springkafka.constants.TOPIC;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.stereotype.Component;
/**
* @author 小工匠
* @version 1.0
* @description: TODO
* @date 2021/2/17 22:33
* @mark: show me the code , change the world
*/
@Component
public class ArtisanCosumerMock {
private Logger logger = LoggerFactory.getLogger(getClass());
private static final String CONSUMER_GROUP_PREFIX = "MOCK-A" ;
@KafkaListener(topics = TOPIC.TOPIC ,groupId = CONSUMER_GROUP_PREFIX + TOPIC.TOPIC,
concurrency = "2")
public void onMessage(MessageMock messageMock){
// logger.info("【接受到消息][线程ID:{} 消息内容:{}]", Thread.currentThread().getId(), messageMock);
}
}
package com.artisan.springkafka.produceTest;
import com.artisan.springkafka.SpringkafkaApplication;
import com.artisan.springkafka.producer.ArtisanProducerMock;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.kafka.support.SendResult;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.util.concurrent.ListenableFuture;
import org.springframework.util.concurrent.ListenableFutureCallback;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
/**
* @author 小工匠
* * @version 1.0
* @description: TODO
* @date 2021/2/17 22:40
* @mark: show me the code , change the world
*/
@RunWith(SpringRunner.class)
@SpringBootTest(classes = SpringkafkaApplication.class)
public class ProduceMockTest {
private Logger logger = LoggerFactory.getLogger(getClass());
@Autowired
private ArtisanProducerMock artisanProducerMock;
@Test
public void testAsynSend() throws ExecutionException, InterruptedException {
logger.info("开始发送");
// 模拟发送多条消息
for (int i = 0; i < 5; i++) {
SendResult sendResult = artisanProducerMock.sendMsgSync();
logger.info("key {} . 分区:{}",sendResult.getProducerRecord().key(),sendResult.getRecordMetadata().partition());
SendResult sendResult1 = artisanProducerMock.sendMsgSync2();
logger.info("key {} . 分区:{}",sendResult1.getProducerRecord().key(),sendResult1.getRecordMetadata().partition());
}
// 阻塞等待,保证消费
new CountDownLatch(1).await();
}
}
2021-02-18 23:05:46.903 INFO 24568 --- [ main] c.a.s.produceTest.ProduceMockTest : 开始发送
2021-02-18 23:05:47.088 INFO 24568 --- [ main] c.a.s.produceTest.ProduceMockTest : key 6687421 . 分区:1
2021-02-18 23:05:47.090 INFO 24568 --- [ main] c.a.s.produceTest.ProduceMockTest : key 1255447 . 分区:0
2021-02-18 23:05:47.093 INFO 24568 --- [ main] c.a.s.produceTest.ProduceMockTest : key 6687421 . 分区:1
2021-02-18 23:05:47.096 INFO 24568 --- [ main] c.a.s.produceTest.ProduceMockTest : key 1255447 . 分区:0
2021-02-18 23:05:47.097 INFO 24568 --- [ main] c.a.s.produceTest.ProduceMockTest : key 6687421 . 分区:1
2021-02-18 23:05:47.099 INFO 24568 --- [ main] c.a.s.produceTest.ProduceMockTest : key 1255447 . 分区:0
2021-02-18 23:05:47.101 INFO 24568 --- [ main] c.a.s.produceTest.ProduceMockTest : key 6687421 . 分区:1
2021-02-18 23:05:47.103 INFO 24568 --- [ main] c.a.s.produceTest.ProduceMockTest : key 1255447 . 分区:0
2021-02-18 23:05:47.105 INFO 24568 --- [ main] c.a.s.produceTest.ProduceMockTest : key 6687421 . 分区:1
2021-02-18 23:05:47.106 INFO 24568 --- [ main] c.a.s.produceTest.ProduceMockTest : key 1255447 . 分区:0
懂了么,老兄 ~
https://github.com/yangshangwei/boot2/tree/master/springkafkaSerialConsume