diff --git a/axzo-log-server/src/main/java/cn/axzo/log/platform/server/service/impl/LogSinkELKServiceImpl.java b/axzo-log-server/src/main/java/cn/axzo/log/platform/server/service/impl/LogSinkELKServiceImpl.java index 589bd43..cba1380 100644 --- a/axzo-log-server/src/main/java/cn/axzo/log/platform/server/service/impl/LogSinkELKServiceImpl.java +++ b/axzo-log-server/src/main/java/cn/axzo/log/platform/server/service/impl/LogSinkELKServiceImpl.java @@ -19,6 +19,7 @@ import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Service; import org.springframework.util.StringUtils; +import java.nio.charset.StandardCharsets; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.Map; @@ -43,6 +44,9 @@ public class LogSinkELKServiceImpl implements LogSinkELKService { @Value("${log-plat.kafka.send.async:false}") private Boolean isAsync; + @Value("${log-plat.max.request.size:1040000}") + private Long maxSize; + private Map appKeyConfigMap = new HashMap<>(); @Value("${log-plat.sign.keysecret.config}") @@ -57,11 +61,18 @@ public class LogSinkELKServiceImpl implements LogSinkELKService { public void sinkELk(LogSinkELKReqDTO req) throws BizException, ExecutionException, InterruptedException, JsonProcessingException { //解密 && 转换 String json = signAndConvert(req); + + long jsonMaxSize = json.getBytes(StandardCharsets.UTF_8).length; + if (jsonMaxSize > maxSize) { + logger.warn("json size is {} > {}", jsonMaxSize ,maxSize); + return; + } + if (isAsync) { kafkaProducer.send(new ProducerRecord<>(topic, json), new KafkaSendCallBack()); } else { RecordMetadata recordMetadata = kafkaProducer.send(new ProducerRecord<>(topic, json)).get(); - logger.info("send msg to kafka success,offset={},partition={}.", recordMetadata.offset(), recordMetadata.partition()); + logger.info("send msg to kafka success,size:{},offset={},partition={}.", jsonMaxSize, recordMetadata.offset(), recordMetadata.partition()); } }