Merge branch 'feature/REQ-3557' into 'master'

Feature/req 3557

See merge request universal/infrastructure/backend/axzo-log-plat!148
This commit is contained in:
金海洋 2025-01-13 11:52:22 +00:00
commit 3576af098e
3 changed files with 175 additions and 0 deletions

View File

@ -173,6 +173,10 @@
<version>1.0.0-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>com.xuxueli</groupId>
<artifactId>xxl-job-core</artifactId>
</dependency>
</dependencies>
<build>

View File

@ -0,0 +1,67 @@
package cn.axzo.log.platform.server.config;
import cn.azxo.framework.common.annotation.OnlyPodsEnvironment;
import com.xxl.job.core.executor.impl.XxlJobSpringExecutor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
/**
*
* @author chenwenjian
* @version 1.0
* @date 2025/1/9 15:35
*/
@OnlyPodsEnvironment
@Configuration(value = "xxlJobConfig")
public class XxlJobConfig {
Logger logger = LoggerFactory.getLogger(XxlJobConfig.class);
/**
* //@Value("http://dev-xxl-job.axzo.cn/xxl-job-admin")
*/
@Value("${xxl.job.admin.addresses}")
private String adminAddresses;
@Value("${xxl.job.executor.appname}")
private String appName;
@Value("")
private String ip;
@Value("${xxl.job.executor.port}")
private int port;
/**
* // @Value("${xxl.job.accessToken}")
*/
@Value("")
private String accessToken;
@Value("")
private String logPath;
@Value("-1")
private int logRetentionDays;
@Bean
public XxlJobSpringExecutor xxlJobExecutor() {
logger.info(">>>>>>>>>>> xxl-job config init.");
XxlJobSpringExecutor xxlJobSpringExecutor = new XxlJobSpringExecutor();
xxlJobSpringExecutor.setAdminAddresses(adminAddresses);
xxlJobSpringExecutor.setAppname(appName);
xxlJobSpringExecutor.setIp(ip);
xxlJobSpringExecutor.setPort(port);
xxlJobSpringExecutor.setAccessToken(accessToken);
xxlJobSpringExecutor.setLogPath(logPath);
xxlJobSpringExecutor.setLogRetentionDays(logRetentionDays);
return xxlJobSpringExecutor;
}
}

View File

@ -0,0 +1,104 @@
package cn.axzo.log.platform.server.job;
import cn.axzo.log.platform.server.entity.LogEntity;
import com.alibaba.fastjson.JSON;
import com.mongodb.client.result.DeleteResult;
import com.xxl.job.core.biz.model.ReturnT;
import com.xxl.job.core.handler.annotation.XxlJob;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.data.mongodb.core.MongoTemplate;
import org.springframework.data.mongodb.core.query.Criteria;
import org.springframework.data.mongodb.core.query.Query;
import org.springframework.stereotype.Component;
import java.text.SimpleDateFormat;
import java.util.Date;
/**
* 清理日志
*
* @author chenwenjian
* @version 1.0
* @date 2025/1/9 11:25
*/
@Slf4j
@Component
@RequiredArgsConstructor
public class LogCleanupJob {
private final MongoTemplate mongoTemplate;
/**
* 清理日志
*
* @param param 示例{"batchSize":1000,"daysToKeep":15,"scene":"networkLog"}
* @return
*/
@XxlJob("logCleanupJobHandler")
public ReturnT<String> cleanupLogs(String param) {
log.info("Starting log cleanup job with parameters: {}", param);
// 设置默认参数默认清理scene为networkLog且时间超过30天的日志数据
ParamDTO paramDTO = ParamDTO.builder().build();
// 解析参数
try {
paramDTO = JSON.parseObject(param, ParamDTO.class);
} catch (Exception e) {
log.warn("Invalid parameter format, using default values. error info: {}", e.getMessage());
}
long deletedCount = 0;
try {
// 构建查询条件删除指定场景且时间超过指定天数的日志数据
Date cutoffDate = new Date(System.currentTimeMillis() - (paramDTO.getDaysToKeep() * 24 * 60 * 60 * 1000L));
Criteria criteria = Criteria.where("scene").is(paramDTO.getScene())
.and("timestamp").lte(cutoffDate.getTime());
Query query = new Query(criteria);
query.limit(paramDTO.getBatchSize());
SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS");
while (true) {
DeleteResult result = mongoTemplate.remove(query, LogEntity.class);
if (result.getDeletedCount() == 0) {
break;
}
deletedCount += result.getDeletedCount();
log.info("Cleaned up {} logs for scene: {} before data: {}", result.getDeletedCount(), paramDTO.getScene(), format.format(cutoffDate));
}
} catch (Exception e) {
log.error("Error during log cleanup job", e);
return ReturnT.FAIL;
}
log.info("Ending log cleanup job, total cleaned up logs for scene {}: {}", paramDTO.getScene(), deletedCount);
return ReturnT.SUCCESS;
}
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
private static class ParamDTO {
/**
* 每次删除的日志数量页大小
*/
@Builder.Default
private int batchSize = 1000;
/**
* 保留天数
*/
@Builder.Default
private int daysToKeep = 30;
/**
* 场景
*/
@Builder.Default
private String scene = "networkLog";
}
}