Commit a682c58c by 毛树良

首次提交

0 parents
Showing 29 changed files with 2109 additions and 0 deletions
HELP.md
target/
!.mvn/wrapper/maven-wrapper.jar
!**/src/main/**/target/
!**/src/test/**/target/
### STS ###
.apt_generated
.classpath
.factorypath
.project
.settings
.springBeans
.sts4-cache
### IntelliJ IDEA ###
.idea
*.iws
*.iml
*.ipr
### NetBeans ###
/nbproject/private/
/nbbuild/
/dist/
/nbdist/
/.nb-gradle/
build/
!**/src/main/**/build/
!**/src/test/**/build/
### VS Code ###
.vscode/
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>2.7.18</version>
<relativePath/> <!-- lookup parent from repository -->
</parent>
<groupId>com.viontech.keliu</groupId>
<artifactId>VVAS-DataCenter-DBWriter</artifactId>
<version>1.0-SNAPSHOT</version>
<name>VVAS-DataCenter-DBWriter</name>
<description>VVAS-DataCenter-DBWriter</description>
<properties>
<java.version>1.8</java.version>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
</properties>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-jdbc</artifactId>
</dependency>
<dependency>
<groupId>org.postgresql</groupId>
<artifactId>postgresql</artifactId>
<version>42.2.2</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-redis</artifactId>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<finalName>VVAS-DataCenter-DBWriter</finalName>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
<configuration>
<excludes>
<exclude>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
</exclude>
</excludes>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<configuration>
<excludes>
<exclude>application.properties</exclude>
</excludes>
</configuration>
</plugin>
</plugins>
</build>
</project>
package com.viontech.keliu;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@SpringBootApplication
public class PassengerDataStorageApplication {
public static void main(String[] args) {
SpringApplication.run(PassengerDataStorageApplication.class, args);
}
}
package com.viontech.keliu.config;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.redis.connection.RedisConnectionFactory;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.data.redis.serializer.GenericJackson2JsonRedisSerializer;
import org.springframework.data.redis.serializer.StringRedisSerializer;
@Configuration
public class RedisConfig {
@Bean
public RedisTemplate<String, Object> redisTemplate(RedisConnectionFactory factory) {
RedisTemplate<String, Object> template = new RedisTemplate<>();
template.setConnectionFactory(factory);
// 设置 Key 的序列化方式
template.setKeySerializer(new StringRedisSerializer());
template.setHashKeySerializer(new StringRedisSerializer());
// 自定义 ObjectMapper,支持 Java 8 日期时间
ObjectMapper objectMapper = new ObjectMapper();
objectMapper.registerModule(new JavaTimeModule()); // 注册 Java 8 时间模块
objectMapper.findAndRegisterModules(); // 发现并注册其他可用的模块
// 设置 Value 的序列化方式
GenericJackson2JsonRedisSerializer serializer = new GenericJackson2JsonRedisSerializer(objectMapper);
template.setValueSerializer(serializer);
template.setHashValueSerializer(serializer);
template.afterPropertiesSet();
return template;
}
}
package com.viontech.keliu.constants;
public class KafkaConstants {
// 商业人脸抓拍重试topic
public static final String TOPIC_MALL_RETRY_FACECAPTURE = "Mall_Retry_FaceCapture";
// 商业人员标签重试topic
public static final String TOPIC_MALL_RETRY_PERSONLABEL = "Mall_Retry_PersonLabel";
/**
* d_gate_minute_count_data统计数据的topic
*/
public static final String MALL_GATE_MINUTE_COUNT_DATA_TOPIC = "Mall_gate_minute_count_data";
/**
* d_gate_hour_count_data统计数据的topic
*/
public static final String MALL_GATE_HOUR_COUNT_DATA_TOPIC = "Mall_gate_hour_count_data";
/**
* d_gate_day_count_data统计数据的topic
*/
public static final String MALL_GATE_DAY_COUNT_DATA_TOPIC = "Mall_gate_day_count_data";
/**
* d_zone_minute_count_data统计数据的topic
*/
public static final String MALL_ZONE_MINUTE_COUNT_DATA_TOPIC = "Mall_zone_minute_count_data";
/**
* d_zone_hour_count_data统计数据的topic
*/
public static final String MALL_ZONE_HOUR_COUNT_DATA_TOPIC = "Mall_zone_hour_count_data";
/**
* d_zone_day_count_data统计数据的topic
*/
public static final String MALL_ZONE_DAY_COUNT_DATA_TOPIC = "Mall_zone_day_count_data";
}
package com.viontech.keliu.constants;
public class ProcessConstants {
// 数据库操作类型
public static class DbOperationType {
public static final String INSERT = "insert";
public static final String UPDATE = "update";
}
}
package com.viontech.keliu.constants;
public class RedisConstants {
/**
* 商业人脸抓拍写库key
*/
public static final String PDS_MALLFACECAPTURE_WRITE = "pds:mallFaceCapture:write";
/**
* 商业人脸抓拍重写 写库key
*/
public static final String PDS_MALLRETRYFACECAPTURE_WRITE = "pds:mallRetryFaceCapture:write";
/**
* 商业人员标签写库key
*/
public static final String PDS_MALLPERSONLABEL_WRITE = "pds:mallPersonLabel:write";
/**
* 商业人员标签重写 写库key
*/
public static final String PDS_MALLRETRYPERSONLABEL_WRITE = "pds:mallRetryPersonLabel:write";
/**
* d_gate_minute_count_data写库key
*/
public static final String PDS_MALLGATEMINUTECOUNT_WRITE = "pds:mallGateMinuteCount:write";
/**
* d_zone_minute_count_data写库key
*/
public static final String PDS_MALLZONEMINUTECOUNT_WRITE = "pds:mallZoneMinuteCount:write";
}
package com.viontech.keliu.consumer;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.viontech.keliu.constants.KafkaConstants;
import com.viontech.keliu.constants.RedisConstants;
import com.viontech.keliu.dao.DFaceRecognitionDao;
import com.viontech.keliu.entity.FaceDataContent;
import com.viontech.keliu.service.KafkaProducerService;
import com.viontech.keliu.service.SpeedStatService;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.OffsetAndMetadata;
import org.apache.kafka.common.TopicPartition;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
import javax.annotation.Resource;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.stream.Collectors;
@Component
@Slf4j
public class MallFaceCaptureConsumer {
@Value("${vion.consumer.mallFaceCapture.batchEnable:0}")
private Integer batchEnable;
@Value("${vion.consumer.mallFaceCapture.batchSize:0}")
private Integer batchSize;
@Value("${vion.consumer.mallFaceCapture.batchThreadNum:0}")
private Integer batchThreadNum;
@Autowired
private ObjectMapper objectMapper;
@Resource
private DFaceRecognitionDao dFaceRecognitionDao;
@Resource
private KafkaProducerService kafkaProducerService;
@Resource
private SpeedStatService speedStatService;
@KafkaListener(topicPattern = "Mall_FaceCapture_.*"
, autoStartup = "${vion.consumer.mallFaceCapture.autoStartup:false}"
, groupId = "MallFaceCaptureToDb"
, concurrency = "${vion.consumer.mallFaceCapture.concurrency:1}")
public void consumerMallFaceCapture(List<ConsumerRecord<String, String>> recordList, Consumer<?, ?> consumer) {
if (CollectionUtils.isEmpty(recordList)) {
return;
}
try {
Map<String, List<ConsumerRecord<String, String>>> topicPartitionDataMap = recordList.stream().collect(Collectors.groupingBy(d -> d.topic() + "-" + d.partition()));
for (Map.Entry<String, List<ConsumerRecord<String, String>>> entry : topicPartitionDataMap.entrySet()) {
try {
long startTime = System.currentTimeMillis();
List<ConsumerRecord<String, String>> recordValues = entry.getValue();
if (!CollectionUtils.isEmpty(recordValues)) {
ConsumerRecord<String, String> lastRecord = recordValues.get(recordValues.size() - 1);
List<FaceDataContent> faceDataList = new ArrayList<>();
for (ConsumerRecord<String, String> consumerRecord : recordValues) {
try {
FaceDataContent faceDataContent = objectMapper.readValue(consumerRecord.value(), FaceDataContent.class);
if (faceDataContent != null) {
faceDataList.add(faceDataContent);
}
} catch (Exception ee) {
log.error("处理Mall_FaceCapture[{}], JsonDeserializerThrowable={}", entry.getKey(), ee.getMessage(), ee);
}
}
// 插入数据
if (!CollectionUtils.isEmpty(faceDataList)) {
if (batchEnable == 1) {
// 分批处理
batchHandle(faceDataList);
} else {
try {
dFaceRecognitionDao.batchInsert(faceDataList);
speedStatService.stat(RedisConstants.PDS_MALLFACECAPTURE_WRITE, faceDataList.size());
} catch (Exception ex) {
log.error("处理Mall_FaceCapture[{}], batchSize={}, batchInsert.Exception={}", entry.getKey(), faceDataList.size(), ex.getMessage(), ex);
// 批量插入重试队列
long startSendTime = System.currentTimeMillis();
kafkaProducerService.sendMessages(KafkaConstants.TOPIC_MALL_RETRY_FACECAPTURE, faceDataList);
log.info("处理Mall_FaceCapture[{}], batchSendFinish, {}条,耗时:{} ms", entry.getKey(), faceDataList.size(), System.currentTimeMillis() - startSendTime);
}
}
}
// 提交Offset
Map<TopicPartition, OffsetAndMetadata> offsets = new HashMap<>();
offsets.put(
new TopicPartition(lastRecord.topic(), lastRecord.partition()),
new OffsetAndMetadata(lastRecord.offset() + 1) // 提交下一条偏移量
);
consumer.commitSync(offsets);
}
log.info("处理Mall_FaceCapture[{}], batchHandle, {}条,耗时:{} ms", entry.getKey(), recordValues.size(), System.currentTimeMillis() - startTime);
} catch (Throwable e) {
log.error("处理Mall_FaceCapture[{}], Throwable={}", entry.getKey(), e.getMessage(), e);
}
}
} catch (Throwable exx) {
log.error("处理Mall_FaceCapture.Throwable={}", exx.getMessage(), exx);
}
}
/**
* 分批处理
* @param recordList
*/
private void batchHandle(List<FaceDataContent> recordList) {
// 总记录数
int total = recordList.size();
ExecutorService threadPool = Executors.newFixedThreadPool(batchThreadNum);
List<Future> futureList = new ArrayList<>();
for (int i = 0; i < total; i += batchSize) {
List<FaceDataContent> faceDataList = recordList.subList(i, Math.min(i + batchSize, total));
Future<?> future = threadPool.submit(() -> {
try {
dFaceRecognitionDao.batchInsert(faceDataList);
speedStatService.stat(RedisConstants.PDS_MALLFACECAPTURE_WRITE, faceDataList.size());
} catch (Exception ex) {
log.error("处理Mall_FaceCapture分批处理, batchSize={}, batchInsert.Exception={}", faceDataList.size(), ex.getMessage(), ex);
// 批量插入重试队列
long startSendTime = System.currentTimeMillis();
kafkaProducerService.sendMessages(KafkaConstants.TOPIC_MALL_RETRY_FACECAPTURE, faceDataList);
log.info("处理Mall_FaceCapture分批处理, batchSendFinish, {}条,耗时:{} ms", faceDataList.size(), System.currentTimeMillis() - startSendTime);
}
});
futureList.add(future);
}
threadPool.shutdown();
for (Future future : futureList) {
try {
future.get();
} catch (Exception e) {
log.error("batchHandle.getFuture.Exception={}", e.getMessage(), e);
}
}
}
}
package com.viontech.keliu.consumer;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.viontech.keliu.constants.KafkaConstants;
import com.viontech.keliu.constants.ProcessConstants;
import com.viontech.keliu.constants.RedisConstants;
import com.viontech.keliu.dao.DGateMinuteCountDataDao;
import com.viontech.keliu.entity.GateCountData;
import com.viontech.keliu.service.SpeedStatService;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.support.Acknowledgment;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
import javax.annotation.Resource;
import java.util.*;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.stream.Collectors;
@Slf4j
@Component
public class MallGateMinuteCountDataConsumer {
@Value("${vion.consumer.mallGateMinuteCount.batchSize:20}")
private Integer batchSize;
@Value("${vion.consumer.mallGateMinuteCount.batchThreadNum:5}")
private Integer batchThreadNum;
@Autowired
private ObjectMapper objectMapper;
@Resource
private DGateMinuteCountDataDao dGateMinuteCountDataDao;
@Resource
private SpeedStatService speedStatService;
@KafkaListener(topics = KafkaConstants.MALL_GATE_MINUTE_COUNT_DATA_TOPIC
, autoStartup = "${vion.consumer.mallGateMinuteCount.autoStartup:false}"
, groupId = "MallGateMinuteCountToDb"
, concurrency = "${vion.consumer.mallGateMinuteCount.concurrency:1}")
public void consumerMallGateMinuteCount(List<ConsumerRecord<String, String>> recordList, Acknowledgment ack) {
if (CollectionUtils.isEmpty(recordList)) {
return;
}
try {
long startTime = System.currentTimeMillis();
List<GateCountData> dataList = new ArrayList<>();
for (ConsumerRecord<String, String> consumerRecord : recordList) {
try {
GateCountData dataContent = objectMapper.readValue(consumerRecord.value(), GateCountData.class);
if (dataContent != null) {
dataList.add(dataContent);
}
} catch (Exception ee) {
log.error("处理Mall_GateMinuteCount.offset={}, JsonDeserializerThrowable={}", consumerRecord.offset(), ee.getMessage(), ee);
}
}
if (!CollectionUtils.isEmpty(dataList)) {
// 先插入再更新
List<GateCountData> insertList = dataList.stream().filter(d -> ProcessConstants.DbOperationType.INSERT.equals(d.getOperationType())).collect(Collectors.toList());
batchHandle(insertList, ProcessConstants.DbOperationType.INSERT);
List<GateCountData> updateList = dataList.stream().filter(d -> ProcessConstants.DbOperationType.UPDATE.equals(d.getOperationType())).collect(Collectors.toList());
batchHandle(updateList, ProcessConstants.DbOperationType.UPDATE);
}
log.info("处理Mall_GateMinuteCount, batchHandle, {}条,耗时:{} ms", dataList.size(), System.currentTimeMillis() - startTime);
} catch (Throwable exx) {
log.error("处理Mall_GateMinuteCount.Throwable={}", exx.getMessage(), exx);
}
ack.acknowledge();
}
/**
* 分批处理
* @param recordList
*/
private void batchHandle(List<GateCountData> recordList, String operationType) {
if (CollectionUtils.isEmpty(recordList)) {
return;
}
// 总记录数
int total = recordList.size();
ExecutorService threadPool = Executors.newFixedThreadPool(batchThreadNum);
List<Future> futureList = new ArrayList<>();
for (int i = 0; i < total; i += batchSize) {
List<GateCountData> subList = recordList.subList(i, Math.min(i + batchSize, total));
Future<?> future = threadPool.submit(() -> {
try {
if (ProcessConstants.DbOperationType.INSERT.equals(operationType)) {
insertHandle(subList);
} else if (ProcessConstants.DbOperationType.UPDATE.equals(operationType)) {
updateHandle(subList);
}
} catch (Exception ex) {
log.error("处理Mall_GateMinuteCount分批处理, batchSize={}, batchException={}", subList.size(), ex.getMessage(), ex);
}
});
futureList.add(future);
}
threadPool.shutdown();
for (Future future : futureList) {
try {
future.get();
} catch (Exception e) {
log.error("batchHandle.getFuture.Exception={}", e.getMessage(), e);
}
}
}
/**
* 处理插入操作
* 首先进行批量插入,插入失败后单条插入
* @param insertList
*/
private void insertHandle(List<GateCountData> insertList) {
if (CollectionUtils.isEmpty(insertList)) {
return;
}
// 成功插入条数
Integer insertCount = 0;
try {
dGateMinuteCountDataDao.batchInsert(insertList);
insertCount = insertList.size();
} catch (Exception ex) {
log.error("处理Mall_GateMinuteCount.insertHandle={}, batchInsert.Exception={}", insertList.size(), ex.getMessage(), ex);
log.info("准备二次单条插入,处理Mall_GateMinuteCount.insertHandle={}, batchInsert.Exception={}", insertList.size(), ex.getMessage());
for (GateCountData gateCountData : insertList) {
try {
dGateMinuteCountDataDao.insert(gateCountData);
insertCount = insertCount + 1;
} catch (Exception e) {
try {
log.info("数据二次写入错误:{}", objectMapper.writeValueAsString(gateCountData));
log.error("数据二次写入错误:{}", e.getMessage(), e);
} catch (Exception ee) {
log.error("数据二次写入错误日志:{}", ee.getMessage(), ee);
}
}
}
}
speedStatService.stat(RedisConstants.PDS_MALLGATEMINUTECOUNT_WRITE, insertCount);
}
/**
* 处理更新操作
* 首先进行批量插入,插入失败后单条插入
* @param updateList
*/
private void updateHandle(List<GateCountData> updateList) {
if (CollectionUtils.isEmpty(updateList)) {
return;
}
// 成功插入条数
Integer updateCount = 0;
try {
dGateMinuteCountDataDao.batchUpdate(updateList);
updateCount = updateList.size();
} catch (Exception ex) {
log.error("处理Mall_GateMinuteCount.updateHandle={}, batchUpdate.Exception={}", updateList.size(), ex.getMessage(), ex);
log.info("准备二次单条更新,处理Mall_GateMinuteCount.updateHandle={}, batchUpdate.Exception={}", updateList.size(), ex.getMessage());
for (GateCountData gateCountData : updateList) {
try {
dGateMinuteCountDataDao.update(gateCountData);
updateCount = updateCount + 1;
} catch (Exception e) {
try {
log.info("数据二次更新错误:{}", objectMapper.writeValueAsString(gateCountData));
log.error("数据二次更新错误:{}", e.getMessage(), e);
} catch (Exception ee) {
log.error("数据二次更新错误日志:{}", ee.getMessage(), ee);
}
}
}
}
speedStatService.stat(RedisConstants.PDS_MALLGATEMINUTECOUNT_WRITE, updateCount);
}
}
package com.viontech.keliu.consumer;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.viontech.keliu.constants.KafkaConstants;
import com.viontech.keliu.constants.RedisConstants;
import com.viontech.keliu.dao.DPersonLabelDao;
import com.viontech.keliu.entity.PersonLabelContent;
import com.viontech.keliu.service.KafkaProducerService;
import com.viontech.keliu.service.SpeedStatService;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.OffsetAndMetadata;
import org.apache.kafka.common.TopicPartition;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
import javax.annotation.Resource;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.stream.Collectors;
@Component
@Slf4j
public class MallPersonLabelConsumer {
@Value("${vion.consumer.mallPersonLabel.batchEnable:0}")
private Integer batchEnable;
@Value("${vion.consumer.mallPersonLabel.batchSize:0}")
private Integer batchSize;
@Value("${vion.consumer.mallPersonLabel.batchThreadNum:0}")
private Integer batchThreadNum;
@Autowired
private ObjectMapper objectMapper;
@Resource
private DPersonLabelDao dPersonLabelDao;
@Resource
private KafkaProducerService kafkaProducerService;
@Resource
private SpeedStatService speedStatService;
@KafkaListener(topicPattern = "Mall_PersonLabel_.*"
, autoStartup = "${vion.consumer.mallPersonLabel.autoStartup:false}"
, groupId = "MallPersonLabelToDb"
, concurrency = "${vion.consumer.mallPersonLabel.concurrency:1}")
public void consumerMallPersonLabel(List<ConsumerRecord<String, String>> recordList, Consumer<?, ?> consumer) {
if (CollectionUtils.isEmpty(recordList)) {
return;
}
try {
Map<String, List<ConsumerRecord<String, String>>> topicPartitionDataMap = recordList.stream().collect(Collectors.groupingBy(d -> d.topic() + "-" + d.partition()));
for (Map.Entry<String, List<ConsumerRecord<String, String>>> entry : topicPartitionDataMap.entrySet()) {
try {
long startTime = System.currentTimeMillis();
List<ConsumerRecord<String, String>> recordValues = entry.getValue();
if (!CollectionUtils.isEmpty(recordValues)) {
ConsumerRecord<String, String> lastRecord = recordValues.get(recordValues.size() - 1);
List<PersonLabelContent> labelList = new ArrayList<>();
for (ConsumerRecord<String, String> consumerRecord : recordValues) {
try {
PersonLabelContent dataContent = objectMapper.readValue(consumerRecord.value(), PersonLabelContent.class);
if (dataContent != null) {
labelList.add(dataContent);
}
} catch (Exception ee) {
log.error("处理Mall_PersonLabel[{}], JsonDeserializerThrowable={}", entry.getKey(), ee.getMessage(), ee);
}
}
if (!CollectionUtils.isEmpty(labelList)) {
if (batchEnable == 1) {
// 分批处理
batchHandle(labelList);
} else {
try {
dPersonLabelDao.batchInsert(labelList);
speedStatService.stat(RedisConstants.PDS_MALLPERSONLABEL_WRITE, labelList.size());
} catch (Exception ex) {
log.error("处理Mall_PersonLabel[{}], batchSize={}, batchInsert.Exception={}", entry.getKey(), labelList.size(), ex.getMessage(), ex);
// 批量插入重试队列
long startSendTime = System.currentTimeMillis();
kafkaProducerService.sendMessages(KafkaConstants.TOPIC_MALL_RETRY_PERSONLABEL, labelList);
log.info("处理Mall_PersonLabel[{}], batchSendFinish耗时:{} ms", entry.getKey(), System.currentTimeMillis() - startSendTime);
}
}
}
// 提交Offset
Map<TopicPartition, OffsetAndMetadata> offsets = new HashMap<>();
offsets.put(
new TopicPartition(lastRecord.topic(), lastRecord.partition()),
new OffsetAndMetadata(lastRecord.offset() + 1) // 提交下一条偏移量
);
consumer.commitSync(offsets);
}
log.info("处理Mall_PersonLabel[{}], batchHandle, {}条,耗时:{} ms", entry.getKey(), recordValues.size(), System.currentTimeMillis() - startTime);
} catch (Throwable e) {
log.error("处理Mall_PersonLabel[{}], Throwable={}", entry.getKey(), e.getMessage(), e);
}
}
} catch (Throwable exx) {
log.error("处理Mall_PersonLabel.Throwable={}", exx.getMessage(), exx);
}
}
/**
* 分批处理
* @param recordList
*/
private void batchHandle(List<PersonLabelContent> recordList) {
// 总记录数
int total = recordList.size();
ExecutorService threadPool = Executors.newFixedThreadPool(batchThreadNum);
List<Future> futureList = new ArrayList<>();
for (int i = 0; i < total; i += batchSize) {
List<PersonLabelContent> labelList = recordList.subList(i, Math.min(i + batchSize, total));
Future<?> future = threadPool.submit(() -> {
try {
dPersonLabelDao.batchInsert(labelList);
speedStatService.stat(RedisConstants.PDS_MALLPERSONLABEL_WRITE, labelList.size());
} catch (Exception ex) {
log.error("处理Mall_PersonLabel分批处理, batchSize={}, batchInsert.Exception={}", labelList.size(), ex.getMessage(), ex);
// 批量插入重试队列
long startSendTime = System.currentTimeMillis();
kafkaProducerService.sendMessages(KafkaConstants.TOPIC_MALL_RETRY_PERSONLABEL, labelList);
log.info("处理Mall_PersonLabel分批处理, batchSendFinish, {}条,耗时:{} ms", labelList.size(), System.currentTimeMillis() - startSendTime);
}
});
futureList.add(future);
}
threadPool.shutdown();
for (Future future : futureList) {
try {
future.get();
} catch (Exception e) {
log.error("batchHandle.getFuture.Exception={}", e.getMessage(), e);
}
}
}
}
package com.viontech.keliu.consumer;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.viontech.keliu.constants.KafkaConstants;
import com.viontech.keliu.constants.RedisConstants;
import com.viontech.keliu.dao.DFaceRecognitionDao;
import com.viontech.keliu.entity.FaceDataContent;
import com.viontech.keliu.service.SpeedStatService;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.OffsetAndMetadata;
import org.apache.kafka.common.TopicPartition;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
import javax.annotation.Resource;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
@Component
@Slf4j
public class MallRetryFaceCaptureConsumer {
@Autowired
private ObjectMapper objectMapper;
@Resource
private DFaceRecognitionDao dFaceRecognitionDao;
@Resource
private SpeedStatService speedStatService;
@KafkaListener(topics = KafkaConstants.TOPIC_MALL_RETRY_FACECAPTURE
, autoStartup = "${vion.consumer.mallRetryFaceCapture.autoStartup:false}"
, groupId = "MallRetryFaceCaptureToDb"
, concurrency = "${vion.consumer.mallRetryFaceCapture.concurrency:1}")
public void consumerMallRetryFaceCapture(List<ConsumerRecord<String, String>> recordList, Consumer<?, ?> consumer) {
if (CollectionUtils.isEmpty(recordList)) {
return;
}
try {
Map<String, List<ConsumerRecord<String, String>>> topicPartitionDataMap = recordList.stream().collect(Collectors.groupingBy(d -> d.topic() + "-" + d.partition()));
for (Map.Entry<String, List<ConsumerRecord<String, String>>> entry : topicPartitionDataMap.entrySet()) {
try {
long startTime = System.currentTimeMillis();
List<ConsumerRecord<String, String>> recordValues = entry.getValue();
if (!CollectionUtils.isEmpty(recordValues)) {
ConsumerRecord<String, String> lastRecord = recordValues.get(recordValues.size() - 1);
List<FaceDataContent> faceDataList = new ArrayList<>();
for (ConsumerRecord<String, String> consumerRecord : recordValues) {
try {
FaceDataContent faceDataContent = objectMapper.readValue(consumerRecord.value(), FaceDataContent.class);
if (faceDataContent != null) {
faceDataList.add(faceDataContent);
}
} catch (Exception ee) {
log.error("处理Mall_Retry_FaceCapture[{}], JsonDeserializerThrowable={}", entry.getKey(), ee.getMessage(), ee);
}
}
if (!CollectionUtils.isEmpty(faceDataList)) {
for (FaceDataContent faceDataContent : faceDataList) {
try {
dFaceRecognitionDao.insert(faceDataContent);
speedStatService.stat(RedisConstants.PDS_MALLRETRYFACECAPTURE_WRITE, 1);
} catch (Exception ex) {
log.error("处理Mall_Retry_FaceCapture[{}], mallid={}, unid={}, insert.Exception={}", entry.getKey(), faceDataContent.getMallId(), faceDataContent.getUnid(), ex.getMessage(), ex);
}
}
}
// 提交Offset
Map<TopicPartition, OffsetAndMetadata> offsets = new HashMap<>();
offsets.put(
new TopicPartition(lastRecord.topic(), lastRecord.partition()),
new OffsetAndMetadata(lastRecord.offset() + 1) // 提交下一条偏移量
);
consumer.commitSync(offsets);
}
log.info("处理Mall_Retry_FaceCapture[{}], batchHandle耗时:{} ms", entry.getKey(), System.currentTimeMillis() - startTime);
} catch (Throwable e) {
log.error("处理Mall_Retry_FaceCapture[{}], Throwable={}", entry.getKey(), e.getMessage(), e);
}
}
} catch (Throwable exx) {
log.error("处理Mall_Retry_FaceCapture.Throwable={}", exx.getMessage(), exx);
}
}
}
package com.viontech.keliu.consumer;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.viontech.keliu.constants.KafkaConstants;
import com.viontech.keliu.constants.RedisConstants;
import com.viontech.keliu.dao.DPersonLabelDao;
import com.viontech.keliu.entity.PersonLabelContent;
import com.viontech.keliu.service.SpeedStatService;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.OffsetAndMetadata;
import org.apache.kafka.common.TopicPartition;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
import javax.annotation.Resource;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
@Component
@Slf4j
public class MallRetryPersonLabelConsumer {
@Autowired
private ObjectMapper objectMapper;
@Resource
private DPersonLabelDao dPersonLabelDao;
@Resource
private SpeedStatService speedStatService;
@KafkaListener(topics = KafkaConstants.TOPIC_MALL_RETRY_PERSONLABEL
, autoStartup = "${vion.consumer.mallRetryPersonLabel.autoStartup:false}"
, groupId = "MallRetryPersonLabelToDb"
, concurrency = "${vion.consumer.mallRetryPersonLabel.concurrency:1}")
public void consumerMallRetryPersonLabel(List<ConsumerRecord<String, String>> recordList, Consumer<?, ?> consumer) {
if (CollectionUtils.isEmpty(recordList)) {
return;
}
try {
Map<String, List<ConsumerRecord<String, String>>> topicPartitionDataMap = recordList.stream().collect(Collectors.groupingBy(d -> d.topic() + "-" + d.partition()));
for (Map.Entry<String, List<ConsumerRecord<String, String>>> entry : topicPartitionDataMap.entrySet()) {
try {
long startTime = System.currentTimeMillis();
List<ConsumerRecord<String, String>> recordValues = entry.getValue();
if (!CollectionUtils.isEmpty(recordValues)) {
ConsumerRecord<String, String> lastRecord = recordValues.get(recordValues.size() - 1);
List<PersonLabelContent> labelList = new ArrayList<>();
for (ConsumerRecord<String, String> consumerRecord : recordValues) {
try {
PersonLabelContent dataContent = objectMapper.readValue(consumerRecord.value(), PersonLabelContent.class);
if (dataContent != null) {
labelList.add(dataContent);
}
} catch (Exception ee) {
log.error("处理Mall_Retry_PersonLabel[{}], JsonDeserializerThrowable={}", entry.getKey(), ee.getMessage(), ee);
}
}
if (!CollectionUtils.isEmpty(labelList)) {
for (PersonLabelContent content : labelList) {
try {
dPersonLabelDao.insert(content);
speedStatService.stat(RedisConstants.PDS_MALLRETRYPERSONLABEL_WRITE, 1);
} catch (Exception ex) {
log.error("处理Mall_Retry_PersonLabel[{}], mallid={}, unid={}, insert.Exception={}", entry.getKey(), content.getMallId(), content.getRecognitionUnid(), ex.getMessage(), ex);
}
}
}
// 提交Offset
Map<TopicPartition, OffsetAndMetadata> offsets = new HashMap<>();
offsets.put(
new TopicPartition(lastRecord.topic(), lastRecord.partition()),
new OffsetAndMetadata(lastRecord.offset() + 1) // 提交下一条偏移量
);
consumer.commitSync(offsets);
}
log.info("处理Mall_Retry_PersonLabel[{}], batchHandle耗时:{} ms", entry.getKey(), System.currentTimeMillis() - startTime);
} catch (Throwable e) {
log.error("处理Mall_Retry_PersonLabel[{}], Throwable={}", entry.getKey(), e.getMessage(), e);
}
}
} catch (Throwable exx) {
log.error("处理Mall_Retry_PersonLabel.Throwable={}", exx.getMessage(), exx);
}
}
}
package com.viontech.keliu.consumer;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.viontech.keliu.constants.KafkaConstants;
import com.viontech.keliu.constants.ProcessConstants;
import com.viontech.keliu.constants.RedisConstants;
import com.viontech.keliu.dao.DZoneMinuteCountDataDao;
import com.viontech.keliu.entity.ZoneCountData;
import com.viontech.keliu.service.SpeedStatService;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.support.Acknowledgment;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
import javax.annotation.Resource;
import java.util.*;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.stream.Collectors;
@Slf4j
@Component
public class MallZoneMinuteCountDataConsumer {
@Value("${vion.consumer.mallZoneMinuteCount.batchSize:20}")
private Integer batchSize;
@Value("${vion.consumer.mallZoneMinuteCount.batchThreadNum:5}")
private Integer batchThreadNum;
@Autowired
private ObjectMapper objectMapper;
@Resource
private DZoneMinuteCountDataDao dZoneMinuteCountDataDao;
@Resource
private SpeedStatService speedStatService;
@KafkaListener(topics = KafkaConstants.MALL_ZONE_MINUTE_COUNT_DATA_TOPIC
, autoStartup = "${vion.consumer.mallZoneMinuteCount.autoStartup:false}"
, groupId = "MallZoneMinuteCountToDb"
, concurrency = "${vion.consumer.mallZoneMinuteCount.concurrency:1}")
public void consumerMallZoneMinuteCount(List<ConsumerRecord<String, String>> recordList, Acknowledgment ack) {
if (CollectionUtils.isEmpty(recordList)) {
return;
}
try {
long startTime = System.currentTimeMillis();
List<ZoneCountData> dataList = new ArrayList<>();
for (ConsumerRecord<String, String> consumerRecord : recordList) {
try {
ZoneCountData dataContent = objectMapper.readValue(consumerRecord.value(), ZoneCountData.class);
if (dataContent != null) {
dataList.add(dataContent);
}
} catch (Exception ee) {
log.error("处理Mall_ZoneMinuteCount.offset={}, JsonDeserializerThrowable={}", consumerRecord.offset(), ee.getMessage(), ee);
}
}
if (!CollectionUtils.isEmpty(dataList)) {
// 先插入再更新
List<ZoneCountData> insertList = dataList.stream().filter(d -> ProcessConstants.DbOperationType.INSERT.equals(d.getOperationType())).collect(Collectors.toList());
batchHandle(insertList, ProcessConstants.DbOperationType.INSERT);
List<ZoneCountData> updateList = dataList.stream().filter(d -> ProcessConstants.DbOperationType.UPDATE.equals(d.getOperationType())).collect(Collectors.toList());
batchHandle(updateList, ProcessConstants.DbOperationType.UPDATE);
}
log.info("处理Mall_ZoneMinuteCount, batchHandle, {}条,耗时:{} ms", dataList.size(), System.currentTimeMillis() - startTime);
} catch (Throwable exx) {
log.error("处理Mall_ZoneMinuteCount.Throwable={}", exx.getMessage(), exx);
}
ack.acknowledge();
}
/**
* 分批处理
* @param recordList
*/
private void batchHandle(List<ZoneCountData> recordList, String operationType) {
if (CollectionUtils.isEmpty(recordList)) {
return;
}
// 总记录数
int total = recordList.size();
ExecutorService threadPool = Executors.newFixedThreadPool(batchThreadNum);
List<Future> futureList = new ArrayList<>();
for (int i = 0; i < total; i += batchSize) {
List<ZoneCountData> subList = recordList.subList(i, Math.min(i + batchSize, total));
Future<?> future = threadPool.submit(() -> {
try {
if (ProcessConstants.DbOperationType.INSERT.equals(operationType)) {
insertHandle(subList);
} else if (ProcessConstants.DbOperationType.UPDATE.equals(operationType)) {
updateHandle(subList);
}
} catch (Exception ex) {
log.error("处理Mall_ZoneMinuteCount分批处理, batchSize={}, batchException={}", subList.size(), ex.getMessage(), ex);
}
});
futureList.add(future);
}
threadPool.shutdown();
for (Future future : futureList) {
try {
future.get();
} catch (Exception e) {
log.error("batchHandle.getFuture.Exception={}", e.getMessage(), e);
}
}
}
/**
* 处理插入操作
* 首先进行批量插入,插入失败后单条插入
* @param insertList
*/
private void insertHandle(List<ZoneCountData> insertList) {
if (CollectionUtils.isEmpty(insertList)) {
return;
}
// 成功插入条数
Integer insertCount = 0;
try {
dZoneMinuteCountDataDao.batchInsert(insertList);
insertCount = insertList.size();
} catch (Exception ex) {
log.error("处理Mall_ZoneMinuteCount.insertHandle={}, batchInsert.Exception={}", insertList.size(), ex.getMessage(), ex);
log.info("准备二次单条插入,处理Mall_ZoneMinuteCount.insertHandle={}, batchInsert.Exception={}", insertList.size(), ex.getMessage());
for (ZoneCountData countData : insertList) {
try {
dZoneMinuteCountDataDao.insert(countData);
insertCount = insertCount + 1;
} catch (Exception e) {
try {
log.info("数据二次写入错误:{}", objectMapper.writeValueAsString(countData));
log.error("数据二次写入错误:{}", e.getMessage(), e);
} catch (Exception ee) {
log.error("数据二次写入错误日志:{}", ee.getMessage(), ee);
}
}
}
}
speedStatService.stat(RedisConstants.PDS_MALLZONEMINUTECOUNT_WRITE, insertCount);
}
/**
* 处理更新操作
* 首先进行批量插入,插入失败后单条插入
* @param updateList
*/
private void updateHandle(List<ZoneCountData> updateList) {
if (CollectionUtils.isEmpty(updateList)) {
return;
}
// 成功插入条数
Integer updateCount = 0;
try {
dZoneMinuteCountDataDao.batchUpdate(updateList);
updateCount = updateList.size();
} catch (Exception ex) {
log.error("处理Mall_ZoneMinuteCount.updateHandle={}, batchUpdate.Exception={}", updateList.size(), ex.getMessage(), ex);
log.info("准备二次单条更新,处理Mall_ZoneMinuteCount.updateHandle={}, batchUpdate.Exception={}", updateList.size(), ex.getMessage());
for (ZoneCountData countData : updateList) {
try {
dZoneMinuteCountDataDao.update(countData);
updateCount = updateCount + 1;
} catch (Exception e) {
try {
log.info("数据二次更新错误:{}", objectMapper.writeValueAsString(countData));
log.error("数据二次更新错误:{}", e.getMessage(), e);
} catch (Exception ee) {
log.error("数据二次更新错误日志:{}", ee.getMessage(), ee);
}
}
}
}
speedStatService.stat(RedisConstants.PDS_MALLZONEMINUTECOUNT_WRITE, updateCount);
}
}
package com.viontech.keliu.controller;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RestController;
@RestController
public class ServiceHealthController {
/**
* 服务监控状态
* @return
*/
@GetMapping("/health")
public String getServiceHealthStatus() {
return "success";
}
}
package com.viontech.keliu.dao;
import com.viontech.keliu.entity.FaceDataContent;
import org.springframework.jdbc.core.namedparam.BeanPropertySqlParameterSource;
import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate;
import org.springframework.jdbc.core.namedparam.SqlParameterSource;
import org.springframework.jdbc.core.namedparam.SqlParameterSourceUtils;
import org.springframework.stereotype.Repository;
import javax.annotation.Resource;
import java.util.List;
@Repository
public class DFaceRecognitionDao {
@Resource
private NamedParameterJdbcTemplate namedParameterJdbcTemplate;
public final static String INSERT_FACE_RECOGNITION_SQL = "INSERT INTO d_face_recognition (device_id, channel_id, gate_id, person_unid,person_type, device_serialnum, channel_serialnum, face_pic, body_pic, mood, age, gender, direction, counttime, countdate, mall_id, account_id,status,track_info,track_time,happy_conf,unid,history_arrival_count,face_score,face_type,face_pic_num,body_pic_num ,face_feature_type ,body_feature_type,body_type,intersect_x,intersect_y,intersect_time,age_group,duration,event_image,event_coordinate,pay_type,pick_up_goods,together_count,event_video) VALUES "
+ "(:deviceId, :channelId, :gateId, :personUnid, :personType ,:deviceSerialnum, :channelSerialnum, :facePic, :bodyPic, :mood, :age, :gender, :direction, :counttime, :countdate, :mallId, :accountId,:status,:trackInfo,:trackTime,:happyConf,:unid,:historyArrivalCount,:faceScore,:faceType,:facePicNum,:bodyPicNum,:faceFeatureType,:bodyFeatureType,:bodyType,:intersectX,:intersectY,:intersectTime,:ageGroup,:duration,:eventImage,:eventCoordinate,:payType,:pickUpGoods,:togetherCount,:eventVideo);";
public void insert(FaceDataContent faceDataContent) {
SqlParameterSource parameterSource = new BeanPropertySqlParameterSource(faceDataContent);
namedParameterJdbcTemplate.update(INSERT_FACE_RECOGNITION_SQL, parameterSource);
}
public void batchInsert(List<FaceDataContent> faceDataContents) {
SqlParameterSource[] batch = SqlParameterSourceUtils.createBatch(faceDataContents.toArray());
namedParameterJdbcTemplate.batchUpdate(INSERT_FACE_RECOGNITION_SQL, batch);
}
}
package com.viontech.keliu.dao;
import com.viontech.keliu.entity.GateCountData;
import org.springframework.jdbc.core.namedparam.BeanPropertySqlParameterSource;
import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate;
import org.springframework.jdbc.core.namedparam.SqlParameterSource;
import org.springframework.jdbc.core.namedparam.SqlParameterSourceUtils;
import org.springframework.stereotype.Repository;
import javax.annotation.Resource;
import java.util.List;
@Repository
public class DGateMinuteCountDataDao {
@Resource
private NamedParameterJdbcTemplate namedParameterJdbcTemplate;
public final static String INSERT_SQL = "INSERT INTO d_gate_minute_count_data(mall_id, account_id, gate_id, innum, outnum, outside_innum, outside_outnum, countdate, counttime, hour) VALUES (:mallId, :accountId, :gateId, :innum, :outnum, :outsideInnum, :outsideOutnum, :countdate, :counttime, :hour);";
public final static String UPDATE_SQL = "UPDATE d_gate_minute_count_data SET modify_time=now(),innum=:innum,outnum=:outnum,outside_innum=:outsideInnum,outside_outnum=:outsideOutnum where mall_id=:mallId and countdate=:countdate and gate_id=:gateId and counttime = :counttime;";
public void insert(GateCountData data) {
SqlParameterSource parameterSource = new BeanPropertySqlParameterSource(data);
namedParameterJdbcTemplate.update(INSERT_SQL, parameterSource);
}
public void batchInsert(List<GateCountData> dataList) {
SqlParameterSource[] batch = SqlParameterSourceUtils.createBatch(dataList.toArray());
namedParameterJdbcTemplate.batchUpdate(INSERT_SQL, batch);
}
public void update(GateCountData data) {
SqlParameterSource sqlParameterSource = new BeanPropertySqlParameterSource(data);
namedParameterJdbcTemplate.update(UPDATE_SQL, sqlParameterSource);
}
public void batchUpdate(List<GateCountData> dataList) {
SqlParameterSource[] batch = SqlParameterSourceUtils.createBatch(dataList.toArray());
namedParameterJdbcTemplate.batchUpdate(UPDATE_SQL, batch);
}
}
package com.viontech.keliu.dao;
import com.viontech.keliu.entity.PersonLabelContent;
import org.springframework.jdbc.core.namedparam.BeanPropertySqlParameterSource;
import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate;
import org.springframework.jdbc.core.namedparam.SqlParameterSource;
import org.springframework.jdbc.core.namedparam.SqlParameterSourceUtils;
import org.springframework.stereotype.Repository;
import javax.annotation.Resource;
import java.util.List;
@Repository
public class DPersonLabelDao {
@Resource
private NamedParameterJdbcTemplate namedParameterJdbcTemplate;
private final static String INSERT_PERSON_LABEL_SQL = "INSERT INTO d_person_label (recognition_unid, account_id, mall_id, count_date, count_time, hair_style,hair_color,hat,hat_color,glasses,mask,earring,jacket_type, jacket_color, " +
"bottoms_type, bottoms_color,suit_type,suit_color,clothing_brand,necklace,watch,bracelet,phone,rucksack_type,rucksack_color,handbag_type,handbag_color,shopping_bag_type,shopping_bag_color,shoe_type,shoe_color) "
+ "VALUES (:recognitionUnid, :accountId, :mallId,:countDate,:countTime,:hairStyle,:hairColor,:hat,:hatColor,:glasses,:mask,:earring,:jacketType,:jacketColor,:bottomsType,:bottomsColor,:suitType,:suitColor,:clothingBrand,:necklace," +
":watch,:bracelet,:phone,:rucksackType,:rucksackColor,:handbagType,:handbagColor,:shoppingBagType,:shoppingBagColor,:shoeType,:shoeColor);";
public void insert(PersonLabelContent personLabel) {
SqlParameterSource parameterSource = new BeanPropertySqlParameterSource(personLabel);
namedParameterJdbcTemplate.update(INSERT_PERSON_LABEL_SQL, parameterSource);
}
public void batchInsert(List<PersonLabelContent> personLabels) {
SqlParameterSource[] batch = SqlParameterSourceUtils.createBatch(personLabels.toArray());
namedParameterJdbcTemplate.batchUpdate(INSERT_PERSON_LABEL_SQL, batch);
}
}
package com.viontech.keliu.dao;
import com.viontech.keliu.entity.ZoneCountData;
import org.springframework.jdbc.core.namedparam.BeanPropertySqlParameterSource;
import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate;
import org.springframework.jdbc.core.namedparam.SqlParameterSource;
import org.springframework.jdbc.core.namedparam.SqlParameterSourceUtils;
import org.springframework.stereotype.Repository;
import javax.annotation.Resource;
import java.util.List;
@Repository
public class DZoneMinuteCountDataDao {
@Resource
private NamedParameterJdbcTemplate namedParameterJdbcTemplate;
public final static String INSERT_SQL = "INSERT INTO d_zone_minute_count_data(mall_id, account_id, floor_id, zone_id, innum, outnum, outside_innum, outside_outnum, countdate, counttime, hour,bunk_no) VALUES (:mallId, :accountId, :floorId, :zoneId, :innum, :outnum, :outsideInnum, :outsideOutnum, :countdate, :counttime, :hour, :bunkNo);";
public final static String UPDATE_SQL = "UPDATE d_zone_minute_count_data SET modify_time=now(),innum=:innum,outnum=:outnum,outside_innum=:outsideInnum,outside_outnum=:outsideOutnum where mall_id=:mallId and countdate=:countdate and zone_id=:zoneId and counttime = :counttime;";
public void insert(ZoneCountData data) {
SqlParameterSource parameterSource = new BeanPropertySqlParameterSource(data);
namedParameterJdbcTemplate.update(INSERT_SQL, parameterSource);
}
public void batchInsert(List<ZoneCountData> dataList) {
SqlParameterSource[] batch = SqlParameterSourceUtils.createBatch(dataList.toArray());
namedParameterJdbcTemplate.batchUpdate(INSERT_SQL, batch);
}
public void update(ZoneCountData data) {
SqlParameterSource sqlParameterSource = new BeanPropertySqlParameterSource(data);
namedParameterJdbcTemplate.update(UPDATE_SQL, sqlParameterSource);
}
public void batchUpdate(List<ZoneCountData> dataList) {
SqlParameterSource[] batch = SqlParameterSourceUtils.createBatch(dataList.toArray());
namedParameterJdbcTemplate.batchUpdate(UPDATE_SQL, batch);
}
}
package com.viontech.keliu.entity;
import com.fasterxml.jackson.annotation.JsonFormat;
import lombok.Getter;
import lombok.Setter;
import java.util.Date;
@Getter
@Setter
public class FaceDataContent {
/**
* 设备id
*/
private Long deviceId;
/**
* 通道id
*/
private Long channelId;
/**
* 出入口id
*/
private Long gateId;
/**
* 设备序列号
*/
private String deviceSerialnum;
/**
* 通道序列号
*/
private String channelSerialnum;
/**
* 人员类型(0 新客 1 二次进店 2 多次进店 9 店员)
*/
private Integer personType;
/**
* 人脸图片1
*/
private String facePic;
/**
* 人体图片
*/
private String bodyPic;
/**
* 心情
*/
private Integer mood;
/**
* 年龄
*/
private Integer age;
/**
* 性别
*/
private Integer gender;
/**
* 方向
*/
private Integer direction;
/**
* 统计时间
*/
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
private Date counttime;
/**
* 统计日期
*/
@JsonFormat(pattern = "yyyy-MM-dd", timezone = "GMT+8")
private Date countdate;
private Long mallId;
private Long accountId;
private String personUnid;
/**
* 处理进度表
*/
private Integer status;
/**
* 轨迹信息文件存储路径
*/
private String trackInfo;
/**
* 轨迹时间
*/
private Integer trackTime;
/**
* 开心指数
*/
private Integer happyConf;
/**
* 历史到店次数
*/
private Integer historyArrivalCount;
/**
* 今日到店次数
*/
// private Integer todayArrivalCount;
/**
* 原始抓拍id
*/
private String unid;
private Float faceScore;
private Integer faceType;
private Integer facePicNum;
private Integer bodyPicNum;
private Integer faceFeatureType;
private Integer bodyFeatureType;
private Integer bodyType;
private Integer intersectX;
private Integer intersectY;
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
private Date intersectTime;
/**
* 0:儿童 1:成人 -1:未知
*/
private Integer ageGroup;
/**
* 是否是穿堂客流
*/
// private Integer isAcross;
/**
* 持续时长,单位秒
*/
private Long duration;
/**
* 支付方式 0:未支付 1:pos支付 2:手机支付
*/
private Integer payType;
/**
* 是否提货
*/
private Integer pickUpGoods;
/**
* 事件图片
*/
private String eventImage;
/**
* 事件图片头脚坐标
*/
private String eventCoordinate;
/**
* 收银事件结伴人数
*/
private Integer togetherCount;
private String eventVideo;
}
package com.viontech.keliu.entity;
import com.fasterxml.jackson.annotation.JsonFormat;
import lombok.Getter;
import lombok.Setter;
import java.util.Date;
@Getter
@Setter
public class GateCountData {
/**
* 广场id
*/
private Long mallId;
/**
* 商户id
*/
private Long accountId;
/**
* 出入口id
*/
private Long gateId;
/**
* 进入数量
*/
private Integer innum;
/**
* 出数量
*/
private Integer outnum;
/**
* 统计日期
*/
@JsonFormat(pattern = "yyyy-MM-dd", timezone = "GMT+8")
private Date countdate;
/**
* 统计时间
*/
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
private Date counttime;
private Integer hour;
/**
* 店外正向客流
*/
private Integer outsideInnum;
/**
* 店外反向客流
*/
private Integer outsideOutnum;
/**
* 数据操作类型:insert:插入,update:更新
*/
private String operationType;
}
package com.viontech.keliu.entity;
import com.fasterxml.jackson.annotation.JsonFormat;
import lombok.Getter;
import lombok.Setter;
import java.util.Date;
@Getter
@Setter
public class PersonLabelContent {
private Long accountId;
private Long mallId;
private String recognitionUnid;
@JsonFormat(pattern = "yyyy-MM-dd", timezone = "GMT+8")
private Date countDate;
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
private Date countTime;
/**
* 发型
*/
private Integer hairStyle;
/**
* 发色
*/
private Integer hairColor;
/**
* 帽子
*/
private Integer hat;
/**
* 帽子颜色
*/
private Integer hatColor;
/**
* 眼镜
*/
private Integer glasses;
/**
* 口罩
*/
private Integer mask;
/**
* 耳饰
*/
private Integer earring;
/**
* 上衣类型
*/
private Integer jacketType;
/**
* 上衣颜色
*/
private Integer jacketColor;
/**
* 下衣类型
*/
private Integer bottomsType;
/**
* 下衣颜色
*/
private Integer bottomsColor;
/**
* 套装类型
*/
private Integer suitType;
/**
* 套装颜色
*/
private Integer suitColor;
/**
* 衣服品牌
*/
private Integer clothingBrand;
/**
* 项链
*/
private Integer necklace;
/**
* 手表
*/
private Integer watch;
/**
* 手链
*/
private Integer bracelet;
/**
* 手机
*/
private Integer phone;
/**
* 背包
*/
private Integer rucksackType;
/**
* 背包颜色
*/
private Integer rucksackColor;
/**
* 拎包类型
*/
private Integer handbagType;
/**
* 拎包颜色
*/
private Integer handbagColor;
/**
* 提袋类型
*/
private Integer shoppingBagType;
/**
* 提袋颜色
*/
private Integer shoppingBagColor;
/**
* 鞋子类型
*/
private Integer shoeType;
/**
* 鞋子颜色
*/
private Integer shoeColor;
}
package com.viontech.keliu.entity;
import com.fasterxml.jackson.annotation.JsonFormat;
import lombok.Getter;
import lombok.Setter;
import java.util.Date;
@Getter
@Setter
public class ZoneCountData {
/**
* 广场id
*/
private Long mallId;
/**
* 商户id
*/
private Long accountId;
/**
* 楼层id
*/
private Long floorId;
/**
* 区域id
*/
private Long zoneId;
/**
* 进入数量
*/
private Integer innum;
/**
* 出数量
*/
private Integer outnum;
/**
* 店外进人数
*/
private Integer outsideInnum;
/**
* 店外出人数
*/
private Integer outsideOutnum;
/**
* 统计时间
*/
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
private Date counttime;
/**
* 统计日期
*/
@JsonFormat(pattern = "yyyy-MM-dd", timezone = "GMT+8")
private Date countdate;
/**
* 小时
*/
private Integer hour;
/**
* 铺位号
*/
private Long bunkNo;
/**
* 数据操作类型:insert:插入,update:更新
*/
private String operationType;
}
package com.viontech.keliu.service;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.support.SendResult;
import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils;
import org.springframework.util.concurrent.ListenableFuture;
import java.util.List;
@Service
@Slf4j
public class KafkaProducerService {
@Autowired
private KafkaTemplate kafkaTemplate;
public <V> ListenableFuture<SendResult<String, V>> sendMessage(String topic, V t){
return kafkaTemplate.send(topic, t);
}
public <V> void sendMessages(String topic, List<V> list){
if (!CollectionUtils.isEmpty(list)) {
for (V v : list) {
kafkaTemplate.send(topic, v);
}
}
}
}
package com.viontech.keliu.service;
import com.viontech.keliu.utils.DateUtil;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.stereotype.Service;
import java.time.LocalDateTime;
import java.util.concurrent.TimeUnit;
@Service
@Slf4j
public class SpeedStatService {
@Autowired
private RedisTemplate redisTemplate;
public void stat(String key, long number) {
try {
LocalDateTime now = LocalDateTime.now();
key = key + ":" + DateUtil.formatYYYYMMDD(now);
String field = DateUtil.formatYYYYMMDDHHMM(now);
redisTemplate.opsForHash().increment(key, field, number);
redisTemplate.expire(key, 2L, TimeUnit.DAYS);
} catch (Exception e) {
log.error("统计处理速度异常:{}", e.getMessage(), e);
}
}
}
package com.viontech.keliu.utils;
import org.springframework.util.StringUtils;
import java.text.SimpleDateFormat;
import java.time.LocalDateTime;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.Date;
public class DateUtil {
private static final DateTimeFormatter YYYYMMDD_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd").withZone(ZoneId.of("Asia/Shanghai"));
private static final DateTimeFormatter YYYYMMDDHHMM_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm").withZone(ZoneId.of("Asia/Shanghai"));
/**
*
* @param dateTime
* @return 返回日期格式: yyyy-MM-dd
*/
public static String formatYYYYMMDD(LocalDateTime dateTime) {
if (null == dateTime) {
return null;
}
return dateTime.format(YYYYMMDD_FORMATTER);
}
/**
*
* @param dateTime
* @return 返回日期格式: yyyy-MM-dd HH:mm
*/
public static String formatYYYYMMDDHHMM(LocalDateTime dateTime) {
if (null == dateTime) {
return null;
}
return dateTime.format(YYYYMMDDHHMM_FORMATTER);
}
/**
* 通用日期格式化
* @param date
* @param format
* @return
*/
public static String format(Date date, String format) {
if (null == date || StringUtils.isEmpty(format)) {
return null;
}
SimpleDateFormat sdf = new SimpleDateFormat(format);
return sdf.format(date);
}
}
# app info
server.port=16060
spring.application.name=VVAS-DataCenter-DBWriter
# Db
spring.datasource.driver-class-name=org.postgresql.Driver
spring.datasource.url=jdbc:postgresql://39.155.171.242:5432/ShoppingMall
spring.datasource.username=postgres
spring.datasource.password=vion
spring.datasource.hikari.connection-timeout=30000
spring.datasource.hikari.idle-timeout=600000
spring.datasource.hikari.max-lifetime=1800000
spring.datasource.hikari.minimum-idle=5
spring.datasource.hikari.maximum-pool-size=50
# redis
spring.redis.host=39.155.171.242
spring.redis.port=6379
spring.redis.password=vionredis
spring.redis.database=10
spring.redis.lettuce.pool.min-idle=1
spring.redis.lettuce.pool.max-active=10
# kafka
spring.kafka.bootstrap-servers=182.92.177.43:9092
spring.kafka.properties.spring.json.add.type.headers=false
spring.kafka.producer.key-serializer=org.springframework.kafka.support.serializer.JsonSerializer
spring.kafka.producer.value-serializer=org.springframework.kafka.support.serializer.JsonSerializer
spring.kafka.producer.batch-size=16384
spring.kafka.producer.properties.linger.ms=50
spring.kafka.producer.retries=1
spring.kafka.producer.acks=all
#spring.kafka.consumer.key-deserializer=org.springframework.kafka.support.serializer.JsonDeserializer
#spring.kafka.consumer.value-deserializer=org.springframework.kafka.support.serializer.JsonDeserializer
spring.kafka.consumer.auto-offset-reset=earliest
spring.kafka.consumer.enable-auto-commit=false
spring.kafka.consumer.fetch-max-wait=500
spring.kafka.consumer.max-poll-records=100
#spring.kafka.consumer.properties.spring.json.trusted.packages=com.viontech.keliu.entity
spring.kafka.listener.ack-mode=manual_immediate
spring.kafka.listener.type=batch
# jackson
spring.jackson.time-zone=Asia/Shanghai
spring.jackson.date-format=yyyy-MM-dd HH:mm:ss
# topic consumer config
# FaceCapture
vion.consumer.mallFaceCapture.autoStartup=true
vion.consumer.mallFaceCapture.concurrency=1
vion.consumer.mallFaceCapture.batchEnable=1
vion.consumer.mallFaceCapture.batchSize=100
vion.consumer.mallFaceCapture.batchThreadNum=10
vion.consumer.mallRetryFaceCapture.autoStartup=false
vion.consumer.mallRetryFaceCapture.concurrency=1
# PersonLabel
vion.consumer.mallPersonLabel.autoStartup=false
vion.consumer.mallPersonLabel.concurrency=1
vion.consumer.mallPersonLabel.batchEnable=1
vion.consumer.mallPersonLabel.batchSize=100
vion.consumer.mallPersonLabel.batchThreadNum=10
vion.consumer.mallRetryPersonLabel.autoStartup=false
vion.consumer.mallRetryPersonLabel.concurrency=1
# d_gate_minute_count_data
vion.consumer.mallGateMinuteCount.autoStartup=true
vion.consumer.mallGateMinuteCount.concurrency=1
vion.consumer.mallGateMinuteCount.batchSize=20
vion.consumer.mallGateMinuteCount.batchThreadNum=5
# d_zone_minute_count_data
vion.consumer.mallZoneMinuteCount.autoStartup=true
vion.consumer.mallZoneMinuteCount.concurrency=1
vion.consumer.mallZoneMinuteCount.batchSize=20
vion.consumer.mallZoneMinuteCount.batchThreadNum=5
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<!-- 日志级别从低到高分为TRACE < DEBUG < INFO < WARN < ERROR < FATAL,如果设置为WARN,则低于WARN的信息都不会输出 -->
<!-- scan:当此属性设置为true时,配置文件如果发生改变,将会被重新加载,默认值为true -->
<!-- scanPeriod:设置监测配置文件是否有修改的时间间隔,如果没有给出时间单位,默认单位是毫秒。当scan为true时,此属性生效。默认的时间间隔为1分钟。 -->
<!-- debug:当此属性设置为true时,将打印出logback内部日志信息,实时查看logback运行状态。默认值为false。 -->
<configuration scan="true" scanPeriod="10 seconds">
<!--<include resource="org/springframework/boot/logging/logback/base.xml" />-->
<contextName>logback</contextName>
<!-- name的值是变量的名称,value的值时变量定义的值。通过定义的值会被插入到logger上下文中。定义变量后,可以使“${}”来使用变量。 -->
<property name="log.path" value="logs"/>
<property name="pattern" value="[%d{yyyy-MM-dd HH:mm:ss.SSS}] [%-5level] [%-5thread] [%49c{1}:%-4L] %msg%n"/>
<!--
<property name="pattern" value="[%d{yyyy-MM-dd HH:mm:ss.SSS}] [%-5level] [%thread] %logger{50} - %msg%n"/>
-->
<property name="datapattern" value="%msg%n"/>
<!--输出到控制台-->
<appender name="CONSOLE" class="ch.qos.logback.core.ConsoleAppender">
<!--此日志appender是为开发使用,只配置最底级别,控制台输出的日志级别是大于或等于此级别的日志信息-->
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>debug</level>
</filter>
<encoder>
<Pattern>${pattern}</Pattern>
<!-- 设置字符集 -->
</encoder>
</appender>
<!--输出到文件-->
<!-- 时间滚动输出 level为 DEBUG 日志 -->
<appender name="DEBUG_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<!-- 正在记录的日志文件的路径及文件名 -->
<file>${log.path}/log_debug.log</file>
<!--日志文件输出格式-->
<encoder>
<Pattern>${pattern}</Pattern>
<charset>UTF-8</charset> <!-- 设置字符集 -->
</encoder>
<!-- 日志记录器的滚动策略,按日期,按大小记录 -->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!-- 日志归档 -->
<fileNamePattern>${log.path}/debug/log-debug-%d{yyyy-MM-dd}.%i.log.gz</fileNamePattern>
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
<maxFileSize>100MB</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
<!--日志文件保留天数-->
<maxHistory>5</maxHistory>
</rollingPolicy>
<!-- 此日志文件只记录debug级别的 -->
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>debug</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<!-- 时间滚动输出 level为 INFO 日志 -->
<appender name="INFO_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<!-- 正在记录的日志文件的路径及文件名 -->
<file>${log.path}/log_info.log</file>
<!--日志文件输出格式-->
<encoder>
<Pattern>${pattern}</Pattern>
<charset>UTF-8</charset>
</encoder>
<!-- 日志记录器的滚动策略,按日期,按大小记录 -->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!-- 每天日志归档路径以及格式 -->
<fileNamePattern>${log.path}/info/log-info-%d{yyyy-MM-dd}.%i.log.gz</fileNamePattern>
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
<maxFileSize>100MB</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
<!--日志文件保留天数-->
<maxHistory>5</maxHistory>
</rollingPolicy>
<!-- 此日志文件只记录info级别的 -->
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>info</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<!-- 时间滚动输出 level为 WARN 日志 -->
<appender name="WARN_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<!-- 正在记录的日志文件的路径及文件名 -->
<file>${log.path}/log_warn.log</file>
<!--日志文件输出格式-->
<encoder>
<Pattern>${pattern}</Pattern>
<charset>UTF-8</charset> <!-- 此处设置字符集 -->
</encoder>
<!-- 日志记录器的滚动策略,按日期,按大小记录 -->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${log.path}/warn/log-warn-%d{yyyy-MM-dd}.%i.log.gz</fileNamePattern>
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
<maxFileSize>100MB</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
<!--日志文件保留天数-->
<maxHistory>5</maxHistory>
</rollingPolicy>
<!-- 此日志文件只记录warn级别的 -->
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>warn</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<!-- 时间滚动输出 level为 ERROR 日志 -->
<appender name="ERROR_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<!-- 正在记录的日志文件的路径及文件名 -->
<file>${log.path}/log_error.log</file>
<!--日志文件输出格式-->
<encoder>
<Pattern>${pattern}</Pattern>
<charset>UTF-8</charset> <!-- 此处设置字符集 -->
</encoder>
<!-- 日志记录器的滚动策略,按日期,按大小记录 -->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${log.path}/error/log-error-%d{yyyy-MM-dd}.%i.log.gz</fileNamePattern>
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
<maxFileSize>100MB</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
<!--日志文件保留天数-->
<maxHistory>5</maxHistory>
</rollingPolicy>
<!-- 此日志文件只记录ERROR级别的 -->
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>ERROR</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<!--
<logger>用来设置某一个包或者具体的某一个类的日志打印级别、
以及指定<appender>。<logger>仅有一个name属性,
一个可选的level和一个可选的addtivity属性。
name:用来指定受此logger约束的某一个包或者具体的某一个类。
level:用来设置打印级别,大小写无关:TRACE, DEBUG, INFO, WARN, ERROR, ALL 和 OFF,
还有一个特俗值INHERITED或者同义词NULL,代表强制执行上级的级别。
如果未设置此属性,那么当前logger将会继承上级的级别。
addtivity:是否向上级logger传递打印信息。默认是true。
-->
<!--<logger name="org.springframework.web" level="info"/>-->
<!--<logger name="org.springframework.scheduling.annotation.ScheduledAnnotationBeanPostProcessor" level="INFO"/>-->
<!--
使用mybatis的时候,sql语句是debug下才会打印,而这里我们只配置了info,所以想要查看sql语句的话,有以下两种操作:
第一种把<root level="info">改成<root level="DEBUG">这样就会打印sql,不过这样日志那边会出现很多其他消息
第二种就是单独给dao下目录配置debug模式,代码如下,这样配置sql语句会打印,其他还是正常info级别:
-->
<!--
root节点是必选节点,用来指定最基础的日志输出级别,只有一个level属性
level:用来设置打印级别,大小写无关:TRACE, DEBUG, INFO, WARN, ERROR, ALL 和 OFF,
不能设置为INHERITED或者同义词NULL。默认是DEBUG
可以包含零个或多个元素,标识这个appender将会添加到这个logger。
-->
<logger name="jdbc.sqlonly" level="off">
</logger>
<logger name="jdbc.audit" level="off">
</logger>
<logger name="jdbc.resultset" level="off">
</logger>
<logger name="jdbc.connection" level="off">
</logger>
<logger name="jdbc.sqltiming" level="off">
</logger>
<logger name="com.viontech" level="debug">
<appender-ref ref="DEBUG_FILE"/>
</logger>
<root level="INFO">
<appender-ref ref="CONSOLE"/>
<appender-ref ref="INFO_FILE"/>
<appender-ref ref="WARN_FILE"/>
<appender-ref ref="ERROR_FILE"/>
</root>
</configuration>
\ No newline at end of file
<html>
<body>
<h1>hello word!!!</h1>
<p>this is a html page</p>
</body>
</html>
\ No newline at end of file
package com.viontech.keliu;
import org.junit.jupiter.api.Test;
import org.springframework.boot.test.context.SpringBootTest;
@SpringBootTest
class PassengerDataStorageApplicationTests {
@Test
void contextLoads() {
}
}
Markdown is supported
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!