MallPersonLabelConsumer.java
7.39 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
package com.viontech.keliu.consumer;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.viontech.keliu.constants.KafkaConstants;
import com.viontech.keliu.constants.RedisConstants;
import com.viontech.keliu.dao.DPersonLabelDao;
import com.viontech.keliu.entity.PersonLabelContent;
import com.viontech.keliu.service.KafkaProducerService;
import com.viontech.keliu.service.SpeedStatService;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.OffsetAndMetadata;
import org.apache.kafka.common.TopicPartition;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
import javax.annotation.Resource;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.stream.Collectors;
@Component
@Slf4j
public class MallPersonLabelConsumer {
@Value("${vion.consumer.mallPersonLabel.batchEnable:0}")
private Integer batchEnable;
@Value("${vion.consumer.mallPersonLabel.batchSize:0}")
private Integer batchSize;
@Value("${vion.consumer.mallPersonLabel.batchThreadNum:0}")
private Integer batchThreadNum;
@Autowired
private ObjectMapper objectMapper;
@Resource
private DPersonLabelDao dPersonLabelDao;
@Resource
private KafkaProducerService kafkaProducerService;
@Resource
private SpeedStatService speedStatService;
@KafkaListener(topicPattern = "Mall_PersonLabel_.*"
, autoStartup = "${vion.consumer.mallPersonLabel.autoStartup:false}"
, groupId = "MallPersonLabelToDb"
, concurrency = "${vion.consumer.mallPersonLabel.concurrency:1}")
public void consumerMallPersonLabel(List<ConsumerRecord<String, String>> recordList, Consumer<?, ?> consumer) {
if (CollectionUtils.isEmpty(recordList)) {
return;
}
try {
Map<String, List<ConsumerRecord<String, String>>> topicPartitionDataMap = recordList.stream().collect(Collectors.groupingBy(d -> d.topic() + "-" + d.partition()));
for (Map.Entry<String, List<ConsumerRecord<String, String>>> entry : topicPartitionDataMap.entrySet()) {
try {
long startTime = System.currentTimeMillis();
List<ConsumerRecord<String, String>> recordValues = entry.getValue();
if (!CollectionUtils.isEmpty(recordValues)) {
ConsumerRecord<String, String> lastRecord = recordValues.get(recordValues.size() - 1);
List<PersonLabelContent> labelList = new ArrayList<>();
for (ConsumerRecord<String, String> consumerRecord : recordValues) {
try {
PersonLabelContent dataContent = objectMapper.readValue(consumerRecord.value(), PersonLabelContent.class);
if (dataContent != null) {
labelList.add(dataContent);
}
} catch (Exception ee) {
log.error("处理Mall_PersonLabel[{}], JsonDeserializerThrowable={}", entry.getKey(), ee.getMessage(), ee);
}
}
if (!CollectionUtils.isEmpty(labelList)) {
if (batchEnable == 1) {
// 分批处理
batchHandle(labelList);
} else {
try {
dPersonLabelDao.batchInsert(labelList);
speedStatService.stat(RedisConstants.PDS_MALLPERSONLABEL_WRITE, labelList.size());
} catch (Exception ex) {
log.error("处理Mall_PersonLabel[{}], batchSize={}, batchInsert.Exception={}", entry.getKey(), labelList.size(), ex.getMessage(), ex);
// 批量插入重试队列
long startSendTime = System.currentTimeMillis();
kafkaProducerService.sendMessages(KafkaConstants.TOPIC_MALL_RETRY_PERSONLABEL, labelList);
log.info("处理Mall_PersonLabel[{}], batchSendFinish耗时:{} ms", entry.getKey(), System.currentTimeMillis() - startSendTime);
}
}
}
// 提交Offset
Map<TopicPartition, OffsetAndMetadata> offsets = new HashMap<>();
offsets.put(
new TopicPartition(lastRecord.topic(), lastRecord.partition()),
new OffsetAndMetadata(lastRecord.offset() + 1) // 提交下一条偏移量
);
consumer.commitSync(offsets);
}
log.info("处理Mall_PersonLabel[{}], batchHandle, {}条,耗时:{} ms", entry.getKey(), recordValues.size(), System.currentTimeMillis() - startTime);
} catch (Throwable e) {
log.error("处理Mall_PersonLabel[{}], Throwable={}", entry.getKey(), e.getMessage(), e);
}
}
} catch (Throwable exx) {
log.error("处理Mall_PersonLabel.Throwable={}", exx.getMessage(), exx);
}
}
/**
* 分批处理
* @param recordList
*/
private void batchHandle(List<PersonLabelContent> recordList) {
// 总记录数
int total = recordList.size();
ExecutorService threadPool = Executors.newFixedThreadPool(batchThreadNum);
List<Future> futureList = new ArrayList<>();
for (int i = 0; i < total; i += batchSize) {
List<PersonLabelContent> labelList = recordList.subList(i, Math.min(i + batchSize, total));
Future<?> future = threadPool.submit(() -> {
try {
dPersonLabelDao.batchInsert(labelList);
speedStatService.stat(RedisConstants.PDS_MALLPERSONLABEL_WRITE, labelList.size());
} catch (Exception ex) {
log.error("处理Mall_PersonLabel分批处理, batchSize={}, batchInsert.Exception={}", labelList.size(), ex.getMessage(), ex);
// 批量插入重试队列
long startSendTime = System.currentTimeMillis();
kafkaProducerService.sendMessages(KafkaConstants.TOPIC_MALL_RETRY_PERSONLABEL, labelList);
log.info("处理Mall_PersonLabel分批处理, batchSendFinish, {}条,耗时:{} ms", labelList.size(), System.currentTimeMillis() - startSendTime);
}
});
futureList.add(future);
}
threadPool.shutdown();
for (Future future : futureList) {
try {
future.get();
} catch (Exception e) {
log.error("batchHandle.getFuture.Exception={}", e.getMessage(), e);
}
}
}
}