feat:增加dao-support,RateLimiter

This commit is contained in:
lilong 2024-03-15 16:51:57 +08:00
parent fa47f6c5fd
commit 74171b683d
24 changed files with 2417 additions and 0 deletions

View File

@ -104,6 +104,15 @@
<groupId>cn.axzo.framework</groupId>
<artifactId>axzo-common-domain</artifactId>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.redisson</groupId>
<artifactId>redisson-spring-boot-starter</artifactId>
</dependency>
</dependencies>
<repositories>

View File

@ -0,0 +1,84 @@
package cn.axzo.pokonyan.client;
import com.google.common.base.Splitter;
import com.google.common.base.Strings;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.Getter;
import lombok.ToString;
import org.springframework.util.CollectionUtils;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.stream.Collectors;
public interface RateLimiter {
/**
* 尝试获得锁, 获取失败则返回Optional.empty()
* 如果获取锁成功. 则返回Optional<Permit>. 同时计数器增加
* Permit支持取消
*
* @param value 业务标识
* @return
*/
boolean tryAcquire(Object value);
/**
* 获取窗口类型
*
* @return
*/
WindowType getWindowType();
class Permit {
private List<Runnable> cancelRunners;
@Builder
public Permit(List<Runnable> cancelRunners) {
Objects.requireNonNull(cancelRunners);
this.cancelRunners = cancelRunners;
}
public void cancel() {
if (!cancelRunners.isEmpty()) {
cancelRunners.stream().forEach(e -> e.run());
}
}
}
@Getter
@AllArgsConstructor
enum WindowType {
/**
* 滑动窗口, 窗口范围: start = currentMillis - WindowDuration, end = currentMillis
*/
SLIDING("s");
//减少redisKey长度
private final String shortName;
}
/**
* 限流规则
* <pre>
* seconds: 窗口时长
* permits: 允许发放的令牌数量
* </pre>
*/
@Data
@Builder
@AllArgsConstructor
@ToString
class LimitRule {
long seconds;
int permits;
public boolean isValid() {
return seconds > 0 && permits > 0;
}
}
}

View File

@ -0,0 +1,43 @@
package cn.axzo.pokonyan.client;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
public interface RateLimiterClient {
/**
* 构建一个基于Redis的RateLimiter
*
* @return
*/
RateLimiter build(RateLimiterReq rateLimiterReq);
/**
* 根据windowType与ruleExpression构建一个基于Redis的RateLimiter
* @param limiterKey
* @param windowType
* @param seconds
* @param permits
* @return
*/
default RateLimiter build(String limiterKey, RateLimiter.WindowType windowType, long seconds, int permits) {
return build(RateLimiterReq.builder()
.windowType(windowType)
.rule(RateLimiter.LimitRule.builder()
.seconds(seconds)
.permits(permits)
.build())
.limiterKey(limiterKey)
.build());
}
@Data
@Builder
@AllArgsConstructor
class RateLimiterReq {
RateLimiter.WindowType windowType;
RateLimiter.LimitRule rule;
String limiterKey;
}
}

View File

@ -0,0 +1,52 @@
package cn.axzo.pokonyan.client.impl;
import cn.axzo.pokonyan.client.RateLimiter;
import cn.axzo.pokonyan.client.RateLimiterClient;
import lombok.AccessLevel;
import lombok.Data;
import lombok.Setter;
import lombok.extern.slf4j.Slf4j;
import org.redisson.api.RedissonClient;
import java.util.Objects;
@Slf4j
public class RateLimiterClientImpl implements RateLimiterClient {
@Setter(AccessLevel.PROTECTED)
private RedissonClient redissonClient;
@Override
public RateLimiter build(RateLimiterReq rateLimiterReq) {
return RedisRateLimiterImpl.builder()
.windowType(rateLimiterReq.getWindowType())
.limitRule(rateLimiterReq.getRule())
.limiterKey(rateLimiterReq.getLimiterKey())
.redissonClient(redissonClient)
.build();
}
public static Builder builder() {
return new Builder();
}
@Data
public static class Builder {
private RedissonClient redissonClient;
public Builder redisTemplate(RedissonClient redissonClient) {
this.redissonClient = redissonClient;
return this;
}
public RateLimiterClient build() {
//单元测试环境也构建同样的RateLimiterClient
Objects.requireNonNull(redissonClient);
RateLimiterClientImpl client = new RateLimiterClientImpl();
client.setRedissonClient(redissonClient);
return client;
}
}
}

View File

@ -0,0 +1,108 @@
package cn.axzo.pokonyan.client.impl;
import cn.axzo.pokonyan.client.RateLimiter;
import com.alibaba.fastjson.JSONObject;
import com.google.common.hash.Hashing;
import lombok.Builder;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.codec.Charsets;
import org.redisson.api.RRateLimiter;
import org.redisson.api.RateIntervalUnit;
import org.redisson.api.RateType;
import org.redisson.api.RedissonClient;
import java.util.Objects;
@Slf4j
public class RedisRateLimiterImpl implements RateLimiter {
private RedissonClient redissonClient;
private RateLimiterWorker rateLimiterWorker;
/**
* 自定义的key, 避免redisKey冲突. 必填
*/
private String limiterKey;
private LimitRule limitRule;
private WindowType windowType;
@Builder
RedisRateLimiterImpl(RedissonClient redissonClient,
WindowType windowType,
String limiterKey,
LimitRule limitRule,
Integer maxWindowDurationHour) {
Objects.requireNonNull(redissonClient);
Objects.requireNonNull(windowType);
Objects.requireNonNull(limitRule);
Objects.requireNonNull(limiterKey);
if (!limitRule.isValid()) {
throw new RuntimeException(String.format("invalid rate expression, limitRule = %s", JSONObject.toJSONString(limitRule)));
}
this.windowType = windowType;
this.redissonClient = redissonClient;
this.limitRule = limitRule;
this.limiterKey = limiterKey;
this.rateLimiterWorker = buildWorker(windowType);
}
@Override
public boolean tryAcquire(Object value) {
return rateLimiterWorker.tryAcquire(value);
}
@Override
public WindowType getWindowType() {
return windowType;
}
private RateLimiterWorker buildWorker(WindowType windowType) {
if (windowType == WindowType.SLIDING) {
return new SlidingWindowRateLimiter();
}
throw new RuntimeException(String.format("unsupported window type, window type = %s", windowType));
}
private String buildRedisKey(Object value) {
String hash = Hashing.murmur3_128().newHasher()
.putString(limiterKey, Charsets.UTF_8)
.putString(String.valueOf(value), Charsets.UTF_8)
.hash()
.toString();
return new StringBuilder("rl").append(getWindowType().getShortName()).append(hash).toString();
}
/**
* 滑动窗口限流, 每次获取令牌成功时间加入到zset. 后续获取令牌时每次检查zset中WindowDuration中已获取令牌数. 并判断是否可以继续获取令牌
* <pre>
* key = value
* zset value = currentMillis. score = currentMillis
* 获取令牌时, 在计算zset中 score = [currentMillis-WindowDuration, currentMillis} 的element数量
* </pre>
*/
class SlidingWindowRateLimiter implements RateLimiterWorker {
public boolean tryAcquire(Object value) {
String key = buildRedisKey(value);
long now = System.currentTimeMillis();
RRateLimiter rateLimiter = redissonClient.getRateLimiter(key);
rateLimiter.availablePermits();
if (!rateLimiter.isExists()) {
rateLimiter.trySetRate(RateType.OVERALL, limitRule.getPermits(), limitRule.getSeconds(), RateIntervalUnit.SECONDS);
}
return rateLimiter.tryAcquire(1);
}
}
interface RateLimiterWorker {
/**
* 尝试获取令牌
*
* @param value
* @return 如果获取成功则返回true, 失败则为false
*/
boolean tryAcquire(Object value);
}
}

View File

@ -0,0 +1,124 @@
package cn.axzo.pokonyan.dao.converter;
import cn.axzo.pokonyan.dao.mysql.MybatisPlusConverterUtils;
import cn.axzo.pokonyan.dao.page.IPageParam;
import com.baomidou.mybatisplus.core.metadata.OrderItem;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import lombok.experimental.UtilityClass;
import org.apache.commons.lang3.BooleanUtils;
import org.apache.commons.lang3.StringUtils;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.function.Function;
import java.util.stream.Collectors;
@UtilityClass
public class PageConverter {
/**
* 将bfs page转换为MybatisPlus的IPage
* 支持根据entity上的字段来排序
*
* @param page
* @param entityClz
* @param <R>
* @return
*/
public static <R> Page<R> convertToMybatis(IPageParam page, Class<R> entityClz) {
int pageSize = Math.min(Optional.ofNullable(page.getPageSize()).orElse(IPageParam.DEFAULT_PAGE_SIZE), IPageParam.MAX_PAGE_SIZE);
Integer current = Optional.ofNullable(page.getPageNumber()).orElse(IPageParam.DEFAULT_PAGE_NUMBER);
Page<R> myBatisPage
= new Page<>(current, pageSize);
Map<String, String> fieldColumnMap = entityClz == null ? ImmutableMap.of() : MybatisPlusConverterUtils.getFieldMapping(entityClz);
List<OrderItem> orderItems = Optional.ofNullable(page.getSort()).orElse(ImmutableList.of()).stream()
.map(e -> {
String property = StringUtils.substringBefore(e, IPageParam.SORT_DELIMITER);
// 尝试把实体类上的字段转换为数据库column
if (fieldColumnMap.containsKey(property)) {
property = fieldColumnMap.get(property);
}
String direction = StringUtils.substringAfter(e, IPageParam.SORT_DELIMITER);
if (direction != null && IPageParam.SORT_DESC.equals(direction)) {
return OrderItem.desc(property);
}
return OrderItem.asc(property);
})
.collect(Collectors.toList());
myBatisPage.setOrders(orderItems);
return myBatisPage;
}
/**
* 将所有的数据通过page接口写入到list. 并返回
* function中需要参数为新的pageNum, 默认从第一页开始加载. 直到返回的记录行数小于 预期的行数
*/
public static <T> List<T> drainAll(Function<Integer, Page<T>> function) {
return drainAll(function, null);
}
/**
* 将所有的数据通过page接口写入到list. 并返回
* function中需要参数为新的pageNum, 默认从第一页开始加载. 直到返回的记录行数小于 预期的行数
* breaker可以自行决定何时中断允许为空为空表示会拉取所有
*/
public static <T> List<T> drainAll(Function<Integer, Page<T>> function, Function<List<T>, Boolean> breaker) {
List<T> totalData = Lists.newArrayList();
int pageNum = IPageParam.DEFAULT_PAGE_NUMBER;
while (true) {
Page<T> result = function.apply(pageNum);
totalData.addAll(result.getRecords());
if (result.getRecords().size() < result.getSize()) {
break;
}
if (breaker != null && BooleanUtils.isTrue(breaker.apply(totalData))) {
break;
}
pageNum += 1;
}
return totalData;
}
/**
* 将MybatisPlus的IPage转换为spring的Page, 用于返回
*
* @param page
* @param <T>
* @return
*/
// public static <T> Page convertToBfs(IPage<T> page) {
// List<String> sorts = page.orders().stream()
// .map(e -> e.getColumn().concat(IPageParam.SORT_DELIMITER).concat(e.isAsc() ? IPageParam.SORT_ASC : IPageParam.SORT_DESC))
// .collect(Collectors.toList());
// Page result = Page.builder()
// .total(page.getTotal())
// .current(page.getCurrent())
// .size(page.getSize())
// .build();
//
// result.setTotal(page.getTotal());
// result.setRecords(page.getRecords());
//
// return result;
// }
//
// /**
// * 读取 bfs page 请求读取 mybatis 并将结果转换成 bfs page
// * mybatisPage(page, p->xxxDao.selectPage(p, query));
// * XXX 针对排序字段作出优化通过传入entityClz用于确定排序sql中的真实字段名
// */
// public static <T, R> Page<R> mybatisPage(IPageParam page, Function<IPage, IPage> pageLoader, Class<T> entityClz) {
// final IPage<T> p = PageConverter.convertToMybatis(page, entityClz);
// IPage<T> iPage = pageLoader.apply(p);
// return PageConverter.convertToBfs(iPage);
// }
}

View File

@ -0,0 +1,265 @@
package cn.axzo.pokonyan.dao.mysql;
import cn.axzo.pokonyan.dao.mysql.type.SetTypeHandler;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.baomidou.mybatisplus.core.metadata.TableFieldInfo;
import com.baomidou.mybatisplus.core.metadata.TableInfo;
import com.baomidou.mybatisplus.core.metadata.TableInfoHelper;
import com.google.common.base.Function;
import com.google.common.base.Preconditions;
import com.google.common.base.Splitter;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import org.apache.commons.lang3.tuple.Pair;
import org.springframework.util.CollectionUtils;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.BiConsumer;
import java.util.stream.Collectors;
/**
* 一个简单工具类可以从测试环境中通过intellij工具导入数据成json字符串, 然后运行这个脚本导入到线上环境.
* intellij中请使用JSON-groovy格式输出成json
* 注意.导出的数据的字段是数据库的列名需要转换.
* <pre>
* [
* {
* "id": 2,
* "app_ids": "9999",
* "name": "集成测试告警",
* "subject": "集成测试告警",
* "priority": 98,
* "recipients": "",
* "send_threshold": 1,
* "send_interval": 1,
* "description": "集成测试告警",
* "ext": "",
* "status": "ENABLED",
* "create_time": "2019-10-25 07:10:43",
* "update_time": "2019-11-23 19:51:50"
* }
* ....
* ]
* </pre>
* sample
* <pre>
* importHelper = JsonImportHelper.<AlertRuleDao, AlertRule>builder().baseMapper(alertRuleDao)
* .bizKeyNames(List.of("name"))
* .excludeFields(Set.of())
* .clz(AlertRule.class)
* .saveOrUpdate(null).build();
* </pre>
*
* @param <M>
* @param <T>
*/
public class JsonImportHelper<M extends BaseMapper<T>, T> {
private static final Set<String> DEFAULT_EXCLUDE_FIELDS = ImmutableSet.of("", "id", "rowid", "updatedTime", "createTime", "updateTime", "createTime", "modifyTime");
private static final String DEFAULT_ID_FIELD_NAME = "id";
private static final int MAX_IMPORT_COUNT = 100;
private boolean jsonSmart;
private String idFieldName;
private HashSet<String> excludeFields;
private M baseMapper;
/**
* 业务主键. 比如code, 唯一的名称...能唯一标识这条数据.
*/
private List<String> bizKeyNames;
private Class<T> clz;
private BiConsumer<T, Boolean> saveOrUpdate;
/**
* @param baseMapper
* @param excludeFields 排除的字段. 比如更新日期,创建日期,id. 默认已经集成.
* @param bizKeyNames
* @param clz
* @param saveOrUpdate
*/
@Builder
public JsonImportHelper(M baseMapper, Set<String> excludeFields, List<String> bizKeyNames, Class<T> clz,
BiConsumer<T, Boolean> saveOrUpdate, String idFieldName) {
Preconditions.checkArgument(!CollectionUtils.isEmpty(bizKeyNames));
Preconditions.checkArgument(baseMapper != null);
Preconditions.checkArgument(clz != null);
this.baseMapper = baseMapper;
this.excludeFields = new HashSet<>(DEFAULT_EXCLUDE_FIELDS);
if (!CollectionUtils.isEmpty(excludeFields)) {
this.excludeFields.addAll(excludeFields);
}
this.bizKeyNames = bizKeyNames;
this.clz = clz;
this.saveOrUpdate = saveOrUpdate;
this.jsonSmart = true;
this.idFieldName = Strings.isNullOrEmpty(idFieldName) ? DEFAULT_ID_FIELD_NAME : idFieldName;
}
public void setJsonSmart(boolean jsonSmart) {
this.jsonSmart = jsonSmart;
}
/**
* 执行导入数据操作.返回需要更新, 查询, 没有变化的数据. 如果limit 是0 只是查看数据结果, 不发生操作.
* 一次最多不超过200条数据
*
* @param rawJson
* @param limit 指定需要更新的数据数量. 如果是0, 不会更新数据. 只返回结果
* @return 返回更新数据列表.
*/
public ImportResp run(List<JSONObject> rawJson, int limit) {
Preconditions.checkArgument(rawJson.size() <= MAX_IMPORT_COUNT);
List<JSONObject> importRows = resolveRows(rawJson);
QueryWrapper<T> query = new QueryWrapper<>();
query.last("limit 1000");
TableInfo tableInfo = TableInfoHelper.getTableInfo(clz);
Map<String, String> columnMap = tableInfo.getFieldList().stream().collect(Collectors.toMap(e -> e.getColumn(), e -> e.getProperty()));
// 主键没有columnMap中,需要显示声明.
columnMap.put(tableInfo.getKeyColumn(), tableInfo.getKeyProperty());
// 没有直接使用selectList处理json类型列有问题
// baseMapper.selectMaps(query) 返回数据是db 列名, 需要转换
List<JSONObject> dbRows = baseMapper.selectMaps(query).stream().map(e -> {
final Map<String, Object> p = e.entrySet().stream()
.collect(Collectors.toMap(entry -> columnMap.get(entry.getKey().toLowerCase()), entry -> entry.getValue()));
return new JSONObject(p);
}).collect(Collectors.toList());
ImportResp<T> diffRes = diff(dbRows, importRows);
diffRes.insertRows.stream().limit(limit).forEach(e -> doSaveOrUpdate(e, true));
diffRes.updateRows.stream().limit(limit).forEach(e -> doSaveOrUpdate(e, false));
return diffRes;
}
private void doSaveOrUpdate(T entity, boolean insert) {
if (saveOrUpdate != null) {
saveOrUpdate.accept(entity, insert);
} else {
if (insert) {
baseMapper.insert(entity);
} else {
baseMapper.updateById(entity);
}
}
}
/**
* 解决数据库的列名到属性名的转换, 并过滤不存在和需要过滤的列
*
* @param rows
* @return
*/
List<JSONObject> resolveRows(List<JSONObject> rows) {
TableInfo tableInfo = TableInfoHelper.getTableInfo(clz);
// 兼容客户端上传的数据是表的column名称(下划线), 或者是熟悉的名称(camel)
ImmutableMap<String, TableFieldInfo> columnMap = Maps.uniqueIndex(tableInfo.getFieldList(), TableFieldInfo::getColumn);
ImmutableMap<String, TableFieldInfo> propertyMap = Maps.uniqueIndex(tableInfo.getFieldList(), TableFieldInfo::getProperty);
Map<String, TableFieldInfo> columnPropertyMap = new HashMap<>(columnMap);
columnPropertyMap.putAll(propertyMap);
return rows.stream().map(e -> {
Map<String, Object> collect = e.entrySet().stream().map(node -> {
TableFieldInfo column = columnPropertyMap.get(node.getKey());
if (column == null) {
return Pair.of("", "");
}
Object nodeValue = node.getValue();
if (jsonSmart && nodeValue != null && (node.getValue() instanceof String)) {
String value = (String) (node.getValue());
if (value.startsWith("{") && JSON.isValidObject(value)) {
nodeValue = JSONObject.parseObject(value);
}
if (value.startsWith("[") && JSON.isValidArray(value)) {
nodeValue = JSONObject.parseArray(value);
}
if (column.getTypeHandler() == SetTypeHandler.class) {
nodeValue = ImmutableSet.copyOf(Splitter.on(",").omitEmptyStrings().trimResults().splitToList(value));
}
}
return Pair.of(column.getProperty(), nodeValue);
})
.filter(x -> x.getValue() != null)
.filter(x -> !excludeFields.contains(x.getKey()))
.collect(Collectors.toMap(x -> x.getKey(), x -> x.getValue()));
return new JSONObject(collect);
}).collect(Collectors.toList());
}
ImportResp<T> diff(List<JSONObject> dbRows, List<JSONObject> importRows) {
// 通过逻辑biz key来关联数据库和导入的数据, 得到需要需要创建和更新的数据.
ImmutableMap<String, JSONObject> dbRowMap = Maps.uniqueIndex(dbRows, e -> {
return bizKeyNames.stream().map(key -> e.getString(key)).collect(Collectors.joining(","));
});
ImmutableMap<String, JSONObject> importRowMap = Maps.uniqueIndex(importRows, e -> {
return bizKeyNames.stream().map(key -> e.getString(key)).collect(Collectors.joining(","));
});
ImportResp res = new ImportResp();
for (Map.Entry<String, JSONObject> entry : importRowMap.entrySet()) {
JSONObject dbRow = dbRowMap.get(entry.getKey());
JSONObject importRow = entry.getValue();
if (dbRow != null) {
// 关联成功
if (isSame(dbRow, importRow)) {
res.sameRows.add(JSONObject.toJavaObject(importRow, clz));
} else {
// update the id by db row's
JSONObject updated = new JSONObject(importRow);
updated.put(this.idFieldName, dbRow.getOrDefault(this.idFieldName, null));
res.updateRows.add(JSONObject.toJavaObject(updated, clz));
}
} else {
// 没有关联上, 说明需要新增.
res.insertRows.add(JSONObject.toJavaObject(importRow, clz));
}
}
return res;
}
/**
* 将json对象串成一个字段串, 来比较2个json是否一致.
*
* @param src
* @param target
* @return
*/
boolean isSame(JSONObject src, JSONObject target) {
Function<JSONObject, String> mixer = i -> {
return i.entrySet().stream().filter(e -> !excludeFields.contains(e.getKey()))
.sorted(Comparator.comparing(Map.Entry::getKey))
.filter(e -> e.getValue() != null)
.filter(e -> !Strings.isNullOrEmpty(e.getValue().toString()))
.map(e -> e.getKey() + "=" + e.getValue()).collect(Collectors.joining(","));
};
String srcText = mixer.apply(src);
String targetText = mixer.apply(target);
boolean res = srcText.equals(targetText);
return res;
}
@NoArgsConstructor
@AllArgsConstructor
@Data
@Builder
public final static class ImportResp<T> {
List<T> updateRows = Lists.newArrayList();
List<T> insertRows = Lists.newArrayList();
List<T> sameRows = Lists.newArrayList();
}
}

View File

@ -0,0 +1,132 @@
package cn.axzo.pokonyan.dao.mysql;
import com.baomidou.mybatisplus.core.conditions.Wrapper;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.baomidou.mybatisplus.core.toolkit.Wrappers;
import com.baomidou.mybatisplus.core.toolkit.support.SFunction;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Maps;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.NonNull;
import java.io.Serializable;
import java.time.Duration;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.function.Function;
import java.util.function.Predicate;
import java.util.stream.Collectors;
/**
*通过basempper 读取 mysql 中数据并缓存LoadingCache中
* 提供了 2 中构建方式
* <ul>
* <li>
* <p>IdBuilder</p><br/>
* LoadingCache<Long, List<User>> cache = MybatisPlusCache.KeyBuilder.<User, Long>builder()
* .expire(Duration.ofMinutes(5)).maxSize(100L).baseMapper(mapper)
* .keyFunction(User::gtiId).build().toLoadingCache();
* </li>
* <li>
* <p>AllBuilder</p><br/>
* LoadingCache<Long, List<User>> cache = MybatisPlusCache.AllBuilder.<User, Long>builder()
* .expire(Duration.ofMinutes(5)).maxSize(100L).baseMapper(mapper)
* .queryBuilder(k -> new QueryWrapper<User>())
* .entityFilter(e -> false)
* .build()
* .toLoadingCache();
* </li>
* </ul>
*
*
*/
public class MybatisPlusCacheHelper {
@Builder
@AllArgsConstructor
public static class KeyBuilder<T, K extends Serializable> {
@NonNull
private BaseMapper<T> baseMapper;
@NonNull
private Long maxSize;
@NonNull
private Duration expire;
@NonNull
SFunction<T, K> keyFunction;
public LoadingCache<K, Optional<T>> toLoadingCache() {
return CacheBuilder.newBuilder()
.expireAfterWrite(expire)
.maximumSize(maxSize)
.recordStats()
.build(new CacheLoader<K, Optional<T>>() {
@Override
public Optional<T> load(K key) throws Exception {
return Optional.ofNullable(baseMapper.selectOne(Wrappers.<T>lambdaQuery().eq(keyFunction, key)));
}
@Override
public Map<K, Optional<T>> loadAll(Iterable<? extends K> keys) throws Exception {
LambdaQueryWrapper<T> query = Wrappers.<T>lambdaQuery().in(keyFunction, ImmutableList.copyOf(keys));
Map<K, T> rows = baseMapper.selectList(query).stream().collect(Collectors.toMap(keyFunction, e -> e));
Map<K, Optional<T>> res = Maps.newHashMapWithExpectedSize(rows.size());
for (K key : keys) {
res.put(key, Optional.ofNullable(rows.get(key)));
}
return res;
}
});
}
}
@Builder
@AllArgsConstructor
public static class AllBuilder<T, K extends Serializable> {
@NonNull
private BaseMapper<T> baseMapper;
@NonNull
private Long maxSize;
@NonNull
private Duration expire;
/**
* 查询 QueryWrapper 构建器
*/
Function<K, Wrapper<T>> queryBuilder;
/**
* 实体过滤器
*/
Predicate<T> entityFilter;
public <R> LoadingCache<K, List<R>> toLoadingCache(@NonNull Function<T, R> entityConverter) {
return CacheBuilder.newBuilder()
.expireAfterWrite(expire)
.maximumSize(maxSize)
.recordStats()
.build(new CacheLoader<K, List<R>>() {
@Override
public List<R> load(K key) throws Exception {
Wrapper<T> query = null;
if (queryBuilder != null) {
query = queryBuilder.apply(key);
}
List<T> res = baseMapper.selectList(query);
if (entityFilter != null) {
res = res.stream().filter(entityFilter).collect(Collectors.toList());
}
return res.stream().map(e -> entityConverter.apply(e)).collect(Collectors.toList());
}
});
}
public LoadingCache<K, List<T>> toLoadingCache() {
return toLoadingCache(e->e);
}
}
}

View File

@ -0,0 +1,61 @@
package cn.axzo.pokonyan.dao.mysql;
import cn.axzo.pokonyan.dao.wrapper.SimpleWrapperConverter;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.core.metadata.TableFieldInfo;
import com.baomidou.mybatisplus.core.metadata.TableInfo;
import com.baomidou.mybatisplus.core.metadata.TableInfoHelper;
import com.google.common.base.Strings;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Collectors;
public class MybatisPlusConverterUtils {
private static Map<Class, SimpleWrapperConverter<QueryWrapper>> converters = new ConcurrentHashMap<>(32);
public static SimpleWrapperConverter<QueryWrapper> getWrapperConverter(Class entityClass) {
return converters.computeIfAbsent(entityClass, clazz ->
SimpleWrapperConverter.<QueryWrapper>builder()
.operatorProcessor(new MybatisPlusOperatorProcessor())
.fieldColumnMap(getFieldMapping(clazz))
.fieldTypeMap(getFieldTypeMapping(clazz))
.build());
}
/**
* 返回class中property 对应的 column
*
* @param clazz
* @return
*/
public static Map<String, String> getFieldMapping(Class clazz) {
// XXX: TableInfoHelper.getTableInfo(clazz).getFieldList返回的映射关系是不包含@TableId注解会导致根据id查询的列找不到
// 在获取property和column映射关系的时候需要聚合filedList和@TableId
TableInfo tableInfo = TableInfoHelper.getTableInfo(clazz);
Map<String, String> fieldMap = tableInfo.getFieldList().stream()
.collect(Collectors.toMap(TableFieldInfo::getProperty, TableFieldInfo::getColumn));
if (!Strings.isNullOrEmpty(tableInfo.getKeyProperty())) {
fieldMap.put(tableInfo.getKeyProperty(), tableInfo.getKeyColumn());
}
return fieldMap;
}
/**
* 返回class中property 对应的 propertyClass
*
* @param clazz
* @return
*/
public static Map<String, Class<?>> getFieldTypeMapping(Class clazz) {
TableInfo tableInfo = TableInfoHelper.getTableInfo(clazz);
Map<String, Class<?>> fieldTypeMap = tableInfo.getFieldList().stream()
.collect(Collectors.toMap(TableFieldInfo::getProperty, TableFieldInfo::getPropertyType));
if (!Strings.isNullOrEmpty(tableInfo.getKeyProperty())) {
fieldTypeMap.put(tableInfo.getKeyProperty(), tableInfo.getKeyType());
}
return fieldTypeMap;
}
}

View File

@ -0,0 +1,73 @@
package cn.axzo.pokonyan.dao.mysql;
import com.baomidou.mybatisplus.core.conditions.Wrapper;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.baomidou.mybatisplus.core.toolkit.support.SFunction;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import org.apache.commons.lang3.StringUtils;
import java.util.List;
import java.util.function.Function;
import java.util.function.Supplier;
/**
* @author yuanyi
* Created on 2020/9/27.
*/
public class MybatisPlusHelper {
private static final Integer LIMIT = 1_000;
public static <T> List<T> drainAll(BaseMapper<T> baseMapper, QueryWrapper<T> queryWrapper,
SFunction<T, Long> idFunction) {
return drainAll(baseMapper, queryWrapper::lambda, idFunction, LIMIT);
}
public static <T> List<T> drainAll(BaseMapper<T> baseMapper, QueryWrapper<T> queryWrapper,
SFunction<T, Long> idFunction, int limit) {
return drainAll(baseMapper, queryWrapper::lambda, idFunction, limit);
}
public static <T> List<T> drainAll(BaseMapper<T> baseMapper, Supplier<LambdaQueryWrapper<T>> wrapperSupplier,
SFunction<T, Long> idFunction) {
return drainAll(baseMapper, wrapperSupplier, idFunction, LIMIT);
}
public static <T> List<T> drainAll(BaseMapper<T> baseMapper, Supplier<LambdaQueryWrapper<T>> wrapperSupplier,
SFunction<T, Long> idFunction, int limit) {
LambdaQueryWrapper<T> queryWrapper = wrapperSupplier.get();
Preconditions.checkArgument(!StringUtils.containsIgnoreCase(queryWrapper.getSqlSegment(), "order by"),
"queryWrapper不能含有order by");
LambdaQueryWrapper<T> nextQueryWrapper = wrapperSupplier.get();
Preconditions.checkArgument(queryWrapper != nextQueryWrapper,
"wrapperSupplier需要返回不同的LambdaQueryWrapper实例");
Function<Long, Wrapper<T>> wrapperFunc = startId -> wrapperSupplier.get()
.gt(idFunction, startId)
.orderByAsc(idFunction)
.last(" limit " + limit);
return drainAll(baseMapper, wrapperFunc, idFunction::apply, limit);
}
private static <T> List<T> drainAll(BaseMapper<T> baseMapper, Function<Long, Wrapper<T>> wrapperFunc,
Function<T, Long> startIdFunc, int limit) {
List<T> totalData = Lists.newArrayList();
Long startId = 0L;
while (true) {
List<T> records = baseMapper.selectList(wrapperFunc.apply(startId));
totalData.addAll(records);
if (records.size() < limit) {
break;
}
T lastOne = records.get(records.size() - 1);
startId = startIdFunc.apply(lastOne);
}
return totalData;
}
}

View File

@ -0,0 +1,141 @@
package cn.axzo.pokonyan.dao.mysql;
import cn.axzo.pokonyan.dao.wrapper.CriteriaWrapper;
import cn.axzo.pokonyan.dao.wrapper.Operator;
import cn.axzo.pokonyan.dao.wrapper.OperatorProcessor;
import cn.axzo.pokonyan.dao.wrapper.TriConsumer;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.core.toolkit.Wrappers;
import com.google.common.collect.ImmutableListMultimap;
import org.springframework.data.domain.Sort;
import java.util.Collection;
import java.util.List;
public class MybatisPlusOperatorProcessor<T> implements OperatorProcessor<QueryWrapper<T>> {
@Override
public QueryWrapper<T> assembleAllQueryWrapper(ImmutableListMultimap<String, CriteriaWrapper.QueryField> queryColumnMap, boolean andOperator) {
QueryWrapper<T> queryWrapper = Wrappers.query();
queryColumnMap.asMap().forEach((column, queryFields) -> {
// 获取processor拼装wrapper
if (andOperator) {
queryFields.forEach(queryField -> get(queryField.getOperator())
.accept(queryWrapper, queryField.getColumnWithPrefix(column), queryField.getValue()));
} else {
queryFields.forEach(queryField -> {
get(queryField.getOperator())
.accept(queryWrapper, queryField.getColumnWithPrefix(column), queryField.getValue());
queryWrapper.or();
});
}
});
return queryWrapper;
}
public TriConsumer<QueryWrapper<T>, String, Object> get(Operator operator) {
switch (operator) {
case LIKE:
return QueryWrapper::like;
case EQ:
return QueryWrapper::eq;
case LT:
return QueryWrapper::lt;
case LE:
return QueryWrapper::le;
case GT:
return QueryWrapper::gt;
case GE:
return QueryWrapper::ge;
case NE:
return QueryWrapper::ne;
case SW:
return QueryWrapper::likeRight;
case EW:
return QueryWrapper::likeLeft;
case IN:
return this::in;
case IS_NULL:
return this::isNull;
case IS_NOT_NULL:
return this::isNotNull;
case BETWEEN:
return this::between;
case ORDER:
return this::order;
case OR:
return this::or;
case JSON:
return this::json;
case JSON_OR:
return this::jsonOr;
case FS:
throw new UnsupportedOperationException("暂不支持的操作符");
default:
throw new UnsupportedOperationException("暂不支持的操作符");
}
}
private void or(QueryWrapper<T> queryWrapper, String column, Object value) {
List<CriteriaWrapper.QueryField> criterials = (List<CriteriaWrapper.QueryField>) value;
queryWrapper.and(e -> {
for (CriteriaWrapper.QueryField q : criterials) {
get(q.getOperator()).accept(e, q.getFieldWithPrefix(), q.getValue());
e.or();
}
});
}
private void json(QueryWrapper<T> queryWrapper, String column, Object value) {
List<CriteriaWrapper.QueryField> criterials = (List<CriteriaWrapper.QueryField>) value;
queryWrapper.and(e -> {
for (CriteriaWrapper.QueryField q : criterials) {
get(q.getOperator()).accept(e, q.getFieldWithPrefix(), q.getValue());
}
});
}
private void jsonOr(QueryWrapper<T> queryWrapper, String column, Object value) {
List<CriteriaWrapper.QueryField> criterials = (List<CriteriaWrapper.QueryField>) value;
queryWrapper.and(e -> {
for (CriteriaWrapper.QueryField q : criterials) {
get(q.getOperator()).accept(e, q.getFieldWithPrefix(), q.getValue());
e.or();
}
});
}
private void isNull(QueryWrapper<T> queryWrapper, String column, Object value) {
queryWrapper.isNull(column);
}
private void isNotNull(QueryWrapper<T> queryWrapper, String column, Object value) {
queryWrapper.isNotNull(column);
}
private void between(QueryWrapper queryWrapper, String column, Object value) {
List valueList = (List) value;
queryWrapper.between(column, valueList.get(0), valueList.get(1));
}
private void in(QueryWrapper<T> queryWrapper, String column, Object v) {
Collection values = (Collection) v;
boolean isJson = CriteriaWrapper.QueryField.isJsonQueryField(column);
if (isJson) {
queryWrapper.and(w -> values.forEach(value -> w.or().apply(column + "={0}", value)));
} else {
queryWrapper.in(column, values);
}
}
private void order(QueryWrapper<T> queryWrapper, String column, Object value) {
// 如果value不能转成Direction会抛异常
if (Sort.Direction.fromString(String.valueOf(value)).isDescending()) {
queryWrapper.orderByDesc(column);
return;
}
queryWrapper.orderByAsc(column);
}
}

View File

@ -0,0 +1,117 @@
package cn.axzo.pokonyan.dao.mysql;
import com.alibaba.fastjson.JSONObject;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import com.google.common.base.Splitter;
import com.google.common.collect.Lists;
import lombok.NonNull;
import lombok.experimental.UtilityClass;
import org.apache.commons.lang3.StringUtils;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
/**
* 一个 mysql json 操作 helper方便构建 json 操作sql
*/
@UtilityClass
public class MysqlJsonHelper {
/**
* 构建 json 更新sql支持嵌套内容的更新
* mysql json_set 需要嵌套的路径中对象存在否则更新失败
* https://stackoverflow.com/questions/40896920/mysql-json-set-cant-insert-into-column-with-null-value5-7
* <p>
* String sql = MysqlJsonHelper.buildJsonSetSql("ext",
* ImmutableMap.of("test2.test4.test5", "100", "test2.test3", "55", "test2.test4.test6", "999"));
* 产生结果
* ext = COALESCE(ext, JSON_OBJECT()),
* ext = JSON_SET(ext, '$.test2', IFNULL(ext->'$.test2',JSON_OBJECT())),
* ext = JSON_SET(ext, '$.test2.test4', IFNULL(ext->'$.test2.test4',JSON_OBJECT())),
* ext = JSON_SET(ext, '$.test2.test4.test5', '100'),
* ext = JSON_SET(ext, '$.test2.test3', '55'),
* ext = JSON_SET(ext, '$.test2.test4.test6', '999')
*
* @return json_set sql 语句
*/
public String buildJsonSetSql(@NonNull String colName, @NonNull Map<String, Object> values) {
Preconditions.checkArgument(!values.isEmpty());
return values.entrySet().stream().flatMap(e -> {
List<String> nodes = Splitter.on(".").splitToList(e.getKey());
List<String> sqls = Lists.newArrayList();
String jsonPath = "$";
// json_set 不支持上级节点是null这里需要产生 sql 来初始化上级节点
sqls.add(String.format("%s = COALESCE(%s, JSON_OBJECT())", colName, colName));
for (int i = 0; i < nodes.size() - 1; i++) {
jsonPath = Joiner.on(".").join(jsonPath, nodes.get(i));
String sql = String.format("%s = JSON_SET(%s, '%s', IFNULL(%s,JSON_OBJECT()))", colName, colName, jsonPath, buildJsonField(colName, jsonPath));
sqls.add(sql);
}
String sqlValue = String.format("%s = JSON_SET(%s, '%s', %s)", colName, colName, "$." + e.getKey(), resolveValue(e.getValue()));
sqls.add(sqlValue);
return sqls.stream();
}).distinct().collect(Collectors.joining(",\n"));
}
private Object resolveValue(Object value) {
if (value instanceof String) {
return "'" + value + "'";
}
if (value instanceof List) {
return "JSON_ARRAY(JSON_OBJECT" + Joiner.on(",JSON_OBJECT").join((List) value)
.replace("\":\"", "\",\"")
.replace("{", "(")
.replace("}", ")") + ")";
}
return value;
}
public String buildJsonField(@NonNull String colName, @NonNull String path) {
Preconditions.checkArgument(StringUtils.isNotBlank(colName));
Preconditions.checkArgument(StringUtils.isNotBlank(path));
return String.format("%s->'%s'", colName, (path.startsWith("$") ? path : "$." + path));
}
/**
* mysql 暂不支持JSON IN 查询
* 这里把 in 转换成 or 查询
* //https://dev.mysql.com/doc/refman/5.7/en/json.html#json-comparison
*/
public <T> QueryWrapper<T> buildJsonIn(@NonNull QueryWrapper<T> wrapper, @NonNull String colName, @NonNull String path, @NonNull Collection values) {
Preconditions.checkArgument(StringUtils.isNotBlank(colName));
Preconditions.checkArgument(StringUtils.isNotBlank(path));
wrapper.and(w -> values.forEach(value -> {
w.or().apply(buildJsonField(colName, path) + "={0}", value);
}));
return wrapper;
}
/**
* mysql 暂不支持JSON ARRAY IN 查询
* 这里把 in 转换成 or 查询
* //https://dev.mysql.com/doc/refman/5.7/en/json.html#json-comparison
*/
public <T> QueryWrapper<T> buildJsonContains(@NonNull QueryWrapper<T> wrapper, @NonNull String colName, @NonNull String path, @NonNull Collection values) {
Preconditions.checkArgument(StringUtils.isNotBlank(colName));
Preconditions.checkArgument(StringUtils.isNotBlank(path));
String normalizedPath = (path.startsWith("$") ? path : "$." + path);
wrapper.and(w -> values.forEach(value -> {
String sql = String.format("JSON_CONTAINS(%s, '%s', '%s')", colName,
// XXX Long类型可能会被序列化为带引号的字符串(为了兼容前端)因此Long类型不使用json来序列化
(value instanceof Long) ? value : JSONObject.toJSONString(value), normalizedPath);
w.or().apply(sql);
}));
return wrapper;
}
}

View File

@ -0,0 +1,50 @@
package cn.axzo.pokonyan.dao.mysql;
import cn.axzo.pokonyan.dao.wrapper.CriteriaWrapper;
import cn.axzo.pokonyan.dao.wrapper.SimpleWrapperConverter;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
public class QueryWrapperHelper {
public static <T> QueryWrapper<T> fromBean(Object bean, Class<T> clazz) {
SimpleWrapperConverter<QueryWrapper> converter = MybatisPlusConverterUtils.getWrapperConverter(clazz);
return converter.toWrapper(CriteriaWrapper.fromBean(bean));
}
public static <T> QueryWrapper<T> fromMap(Map<String, Object> map, Class<T> clazz) {
SimpleWrapperConverter<QueryWrapper> converter = MybatisPlusConverterUtils.getWrapperConverter(clazz);
return converter.toWrapper(CriteriaWrapper.builder().queryField(map).build());
}
public static <T> QueryWrapper<T> fromBean(Object bean, Class<T> clazz,
Function<CriteriaWrapper.QueryField, List<CriteriaWrapper.QueryField>> fieldConverter) {
SimpleWrapperConverter<QueryWrapper> converter = MybatisPlusConverterUtils.getWrapperConverter(clazz);
return converter.toWrapper(CriteriaWrapper.fromBean(bean, true, fieldConverter));
}
public static <T> QueryWrapper<T> fromBean(Object bean, Class<T> clazz, Class... others) {
SimpleWrapperConverter<QueryWrapper> converter = MybatisPlusConverterUtils.getWrapperConverter(clazz);
return converter.toWrapper(CriteriaWrapper.fromBean(bean), Arrays.stream(others)
.map(MybatisPlusConverterUtils::getWrapperConverter).toArray(SimpleWrapperConverter[]::new));
}
public static <T> QueryWrapper<T> fromMap(Map<String, Object> map, Class<T> clazz, Class... others) {
SimpleWrapperConverter<QueryWrapper> converter = MybatisPlusConverterUtils.getWrapperConverter(clazz);
return converter.toWrapper(CriteriaWrapper.builder().queryField(map).build(), Arrays.stream(others)
.map(MybatisPlusConverterUtils::getWrapperConverter).toArray(SimpleWrapperConverter[]::new));
}
public static <T> QueryWrapper<T> fromBean(Object bean, Class<T> clazz,
Function<CriteriaWrapper.QueryField,
List<CriteriaWrapper.QueryField>> fieldConverter,
Class... others) {
SimpleWrapperConverter<QueryWrapper> converter = MybatisPlusConverterUtils.getWrapperConverter(clazz);
return converter.toWrapper(CriteriaWrapper.fromBean(bean, true, fieldConverter), Arrays.stream(others)
.map(MybatisPlusConverterUtils::getWrapperConverter).toArray(SimpleWrapperConverter[]::new));
}
}

View File

@ -0,0 +1,51 @@
package cn.axzo.pokonyan.dao.mysql.type;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.serializer.SerializerFeature;
import org.apache.ibatis.type.BaseTypeHandler;
import org.apache.ibatis.type.JdbcType;
import org.apache.ibatis.type.MappedJdbcTypes;
import org.apache.ibatis.type.MappedTypes;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.sql.CallableStatement;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.List;
@MappedTypes({List.class})
@MappedJdbcTypes(JdbcType.VARCHAR)
public abstract class BaseListTypeHandler<T> extends BaseTypeHandler<List<T>> {
private Class<T> type = getGenericType();
@Override
public void setNonNullParameter(PreparedStatement preparedStatement, int i,
List<T> list, JdbcType jdbcType) throws SQLException {
preparedStatement.setString(i, JSONArray.toJSONString(list, SerializerFeature.WriteMapNullValue,
SerializerFeature.WriteNullListAsEmpty, SerializerFeature.WriteNullStringAsEmpty));
}
@Override
public List<T> getNullableResult(ResultSet resultSet, String s) throws SQLException {
return JSONArray.parseArray(resultSet.getString(s), type);
}
@Override
public List<T> getNullableResult(ResultSet resultSet, int i) throws SQLException {
return JSONArray.parseArray(resultSet.getString(i), type);
}
@Override
public List<T> getNullableResult(CallableStatement callableStatement, int i) throws SQLException {
return JSONArray.parseArray(callableStatement.getString(i), type);
}
private Class<T> getGenericType() {
Type t = getClass().getGenericSuperclass();
Type[] params = ((ParameterizedType) t).getActualTypeArguments();
return (Class<T>) params[0];
}
}

View File

@ -0,0 +1,54 @@
package cn.axzo.pokonyan.dao.mysql.type;
import com.google.common.base.Joiner;
import com.google.common.base.Splitter;
import com.google.common.base.Strings;
import com.google.common.collect.Sets;
import org.apache.ibatis.type.BaseTypeHandler;
import org.apache.ibatis.type.JdbcType;
import org.apache.ibatis.type.MappedJdbcTypes;
import java.sql.CallableStatement;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.LinkedHashSet;
import java.util.Optional;
/**
* 1. 用于将数据库中的, 逗号分割的String, 转换为LinkedHashSet.
* 2. 存储时将LinkedHashSet直接存String, 多个使用逗号分割
*/
@MappedJdbcTypes({JdbcType.VARCHAR})
public class LinkedHashSetTypeHandler extends BaseTypeHandler<LinkedHashSet<String>> {
private static final String DELIMITER = ",";
@Override
public void setNonNullParameter(PreparedStatement ps, int i, LinkedHashSet<String> parameter, JdbcType jdbcType) throws SQLException {
String value = Joiner.on(DELIMITER).join(Optional.ofNullable(parameter).orElseGet(Sets::newLinkedHashSet));
ps.setString(i, value);
}
@Override
public LinkedHashSet<String> getNullableResult(ResultSet rs, String columnName) throws SQLException {
return getSet(rs.getString(columnName));
}
@Override
public LinkedHashSet<String> getNullableResult(ResultSet rs, int columnIndex) throws SQLException {
return getSet(rs.getString(columnIndex));
}
@Override
public LinkedHashSet<String> getNullableResult(CallableStatement cs, int columnIndex) throws SQLException {
return getSet(cs.getString(columnIndex));
}
private LinkedHashSet<String> getSet(String dbValue) {
return Sets.newLinkedHashSet(Splitter.on(DELIMITER)
.omitEmptyStrings()
.trimResults()
.splitToList(Strings.nullToEmpty(dbValue)));
}
}

View File

@ -0,0 +1,60 @@
package cn.axzo.pokonyan.dao.mysql.type;
import com.google.common.base.Splitter;
import com.google.common.collect.Sets;
import org.apache.commons.lang3.StringUtils;
import org.apache.ibatis.type.BaseTypeHandler;
import org.apache.ibatis.type.JdbcType;
import org.apache.ibatis.type.MappedJdbcTypes;
import java.sql.CallableStatement;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.Collections;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
/**
* 1. 用于将数据库中的, 逗号分割的String, 转换为Set.
* 2. 存储时将Set直接存String, 多个使用逗号分割
*/
@MappedJdbcTypes({JdbcType.VARCHAR})
public class SetTypeHandler extends BaseTypeHandler<Set<String>> {
private static final String DELIMITER = ",";
@Override
public void setNonNullParameter(PreparedStatement ps, int i, Set<String> parameter, JdbcType jdbcType) throws SQLException {
String value = Sets.newLinkedHashSet(Optional.ofNullable(parameter).orElse(Collections.emptySet()))
.stream()
.collect(Collectors.joining(DELIMITER));
ps.setString(i, value);
}
@Override
public Set<String> getNullableResult(ResultSet rs, String columnName) throws SQLException {
return getSet(rs.getString(columnName));
}
@Override
public Set<String> getNullableResult(ResultSet rs, int columnIndex) throws SQLException {
return getSet(rs.getString(columnIndex));
}
@Override
public Set<String> getNullableResult(CallableStatement cs, int columnIndex) throws SQLException {
return getSet(cs.getString(columnIndex));
}
private Set<String> getSet(String dbValue) {
return Splitter.on(",")
.omitEmptyStrings()
.trimResults()
.splitToList(Optional.of(dbValue).orElse(StringUtils.EMPTY))
.stream()
.collect(Collectors.toSet());
}
}

View File

@ -0,0 +1,37 @@
package cn.axzo.pokonyan.dao.page;
import com.google.common.collect.ImmutableList;
import lombok.Getter;
import lombok.RequiredArgsConstructor;
import java.util.List;
public interface IPageParam {
Integer DEFAULT_PAGE_NUMBER = 1;
Integer DEFAULT_PAGE_SIZE = 20;
Integer MAX_PAGE_SIZE = 1000;
String SORT_DELIMITER = "__";
String SORT_DESC = OrderEnum.DESC.name();
String SORT_ASC = OrderEnum.ASC.name();
default Integer getPageNumber() {
return DEFAULT_PAGE_NUMBER;
}
default Integer getPageSize() {
return DEFAULT_PAGE_SIZE;
}
default List<String> getSort() {
return ImmutableList.of();
}
@RequiredArgsConstructor
@Getter
public static enum OrderEnum {
DESC,
ASC;
}
}

View File

@ -0,0 +1,255 @@
package cn.axzo.pokonyan.dao.utils;
import cn.axzo.pokonyan.dao.mysql.MybatisPlusConverterUtils;
import cn.axzo.pokonyan.dao.wrapper.SimpleWrapperConverter;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.core.conditions.update.UpdateWrapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.baomidou.mybatisplus.core.metadata.TableInfo;
import com.baomidou.mybatisplus.core.metadata.TableInfoHelper;
import com.baomidou.mybatisplus.core.toolkit.Wrappers;
import com.google.common.base.Preconditions;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.cglib.beans.BeanMap;
import org.springframework.util.CollectionUtils;
import java.io.Serializable;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
/**
* 提供修复数据的工具类, 支持mysql. 修复方式为通过指定起始id批量查询并修复
*
* @param mapper {@link BaseMapper}对象
* @param clz 实体类
* @param updater 用于更新数据库中查询出来的记录
* @param queryWrapper 可选. 查询数据库时指定的查询条件. 注意因为需要clone该wrapper, 要求其中的查询参数是serializable的否则可能出错
* @param selectFields 可选. 查询数据库时指定返回的字段
*/
@Slf4j
public class RepairDataHelper<T> {
private RepositoryWrapper<T> repositoryWrapper;
private Function<List<T>, List<T>> updater;
@Builder
public RepairDataHelper(BaseMapper<T> mapper, Class<T> clz, Function<List<T>, List<T>> updater,
QueryWrapper<T> queryWrapper, Set<String> selectFields) {
Preconditions.checkArgument(mapper != null, "mapper不能为空");
Preconditions.checkArgument(clz != null, "clz不能为空");
Preconditions.checkArgument(updater != null, "updater不能为空");
if (queryWrapper == null) {
queryWrapper = Wrappers.query();
}
repositoryWrapper = new MysqlRepositoryWrapper(mapper, clz, queryWrapper, selectFields);
this.updater = updater;
log.info("---Repair Data Helper--- created with repository: {}, queryWrapper: {}, selectFields: {}",
mapper, queryWrapper, selectFields);
}
/**
* 执行数据修复
*
* @param req startId 从指定的起始id开始修复数据. 默认从第一条记录开始
* batchSize 一个批次处理的数据量. 默认一个批次仅处理20条数据
* limit 必填项, 限制处理数据总条数.
* onlyPrint true表示仅打印需要修复的数据, 不做修复动作. 默认不执行修复动作
* @return 总的修复记录数
*/
public int repair(RepairReq req) {
req = Optional.ofNullable(req).orElse(RepairReq.DEFAULT);
Preconditions.checkArgument(req.getBatchSize() > 0, "batchCount必须大于0");
Preconditions.checkArgument(req.getLimit() > 0, "limit必须大于0");
log.info("---Repair Data Helper--- repair with req: {}", req);
int batchIndex = 0;
int totalProcessed = 0;
int totalRepaired = 0;
Serializable startId = req.getStartId();
while (totalProcessed < req.getLimit()) {
log.info("[{}]---Repair Data Helper--- start process, startId: {}", batchIndex, startId);
boolean includeStartId = batchIndex == 0;
int batchSize = Math.min(req.getBatchSize(), req.getLimit() - totalProcessed);
List<T> selectedRecords = repositoryWrapper.selectByStartId(startId, batchSize, includeStartId);
log.info("[{}]---Repair Data Helper--- selected records count: {}", batchIndex, selectedRecords.size());
if (CollectionUtils.isEmpty(selectedRecords)) {
break;
}
totalProcessed += selectedRecords.size();
List<T> updateRecords = updater.apply(selectedRecords);
log.info("[{}]---Repair Data Helper--- update records count: {}", batchIndex, updateRecords.size());
List<Object> updateIds = updateRecords.stream()
.map(r -> getIdValue(r, repositoryWrapper.getIdFieldName()))
.collect(Collectors.toList());
if (req.getOnlyPrint() || CollectionUtils.isEmpty(updateIds)) {
log.info("[{}]---Repair Data Helper--- to be updated ids: {}", batchIndex, updateIds);
} else {
int updatedCount;
if (needBatchUpdate(updateRecords)) {
updatedCount = repositoryWrapper.batchUpdate(updateRecords);
} else {
// 所有待更新的数据都是相同的可以通过条件更新一次性更新
updatedCount = repositoryWrapper.updateByIds(updateRecords.get(0), updateIds);
}
log.info("[{}]---Repair Data Helper--- updated records count: {}", batchIndex, updatedCount);
totalRepaired += updatedCount;
}
startId = getIdValue(selectedRecords.get(selectedRecords.size() - 1), repositoryWrapper.getIdFieldName());
batchIndex += 1;
}
log.info("---Repair Data Helper--- repair finished, req: {}, processed: {}, repaired: {}, ignored: {}",
req, totalProcessed, totalRepaired, totalProcessed - totalRepaired);
return totalRepaired;
}
private interface RepositoryWrapper<T> {
String getIdFieldName();
List<T> selectByStartId(Serializable startId, int count, boolean includeStartId);
int updateByIds(T updateEntity, List<Object> ids);
int batchUpdate(List<T> updateEntities);
}
private class MysqlRepositoryWrapper implements RepositoryWrapper<T> {
private QueryWrapper<T> queryWrapper;
private Set<String> selectFields;
private BaseMapper<T> baseMapper;
private String idFieldName;
private String idColumnName;
private SimpleWrapperConverter<QueryWrapper> converter;
private MysqlRepositoryWrapper(BaseMapper<T> baseMapper, Class<T> entityClass,
QueryWrapper<T> queryWrapper, Set<String> selectFields) {
try {
TableInfo tableInfo = TableInfoHelper.getTableInfo(entityClass);
idFieldName = tableInfo.getKeyProperty();
idColumnName = tableInfo.getKeyColumn();
converter = MybatisPlusConverterUtils.getWrapperConverter(entityClass);
} catch (Exception e) {
}
Preconditions.checkArgument(baseMapper != null, "repository找不到baseMapper");
Preconditions.checkArgument(entityClass != null, "repository找不到entityClass");
Preconditions.checkArgument(idFieldName != null, "repository找不到idFieldName");
Preconditions.checkArgument(idColumnName != null, "repository找不到idColumnName");
Preconditions.checkArgument(converter != null, "repository找不到converter");
this.baseMapper = baseMapper;
this.queryWrapper = queryWrapper;
this.selectFields = selectFields;
}
@Override
public String getIdFieldName() {
return idFieldName;
}
@Override
public List<T> selectByStartId(Serializable startId, int count, boolean includeStartId) {
// 直接操作queryWrapper将带来副作用需要将它clone出来
QueryWrapper<T> clonedWrapper = queryWrapper.clone();
if (startId != null) {
if (includeStartId) {
clonedWrapper.ge(idColumnName, startId);
} else {
clonedWrapper.gt(idColumnName, startId);
}
}
clonedWrapper.orderByAsc(idColumnName).last("LIMIT " + count);
if (selectFields != null) {
Set<String> columns = selectFields.stream()
.map(converter::getColumnNotNull)
.collect(Collectors.toSet());
columns.add(idColumnName);
// 注意wrapper的select()方法不支持重复调用因此需要将所有查询字段放在一个数组里传入
clonedWrapper.select(columns.toArray(new String[0]));
}
return baseMapper.selectList(clonedWrapper);
}
@Override
public int updateByIds(T updateEntity, List<Object> ids) {
UpdateWrapper<T> updateWrapper = Wrappers.<T>update().in(idColumnName, ids);
return baseMapper.update(updateEntity, updateWrapper);
}
@Override
public int batchUpdate(List<T> updateEntities) {
return updateEntities.stream().mapToInt(baseMapper::updateById).sum();
}
}
private Serializable getIdValue(Object entity, String idFieldName) {
Map<String, Object> entityFields = BeanMap.create(entity);
Serializable value = (Serializable) entityFields.get(idFieldName);
Preconditions.checkState(value != null, "记录的id不能为空");
return value;
}
private boolean needBatchUpdate(List<T> records) {
return records.stream()
.map(record -> ((Map<String, Object>) BeanMap.create(record)).entrySet().stream()
.filter(e -> !repositoryWrapper.getIdFieldName().equals(e.getKey()))
.collect(Collectors.toList()))
.distinct()
.count() > 1;
}
@Data
@Builder
@AllArgsConstructor
@NoArgsConstructor
public static class RepairReq {
private String startId;
private Integer batchSize;
private Integer limit;
private Boolean onlyPrint;
private static final RepairReq DEFAULT = RepairReq.builder()
.batchSize(20)
.limit(1)
.onlyPrint(true)
.build();
public Integer getBatchSize() {
return Optional.ofNullable(batchSize).orElse(DEFAULT.batchSize);
}
public Integer getLimit() {
return Optional.ofNullable(limit).orElse(DEFAULT.limit);
}
public Boolean getOnlyPrint() {
return Optional.ofNullable(onlyPrint).orElse(DEFAULT.onlyPrint);
}
}
}

View File

@ -0,0 +1,52 @@
package cn.axzo.pokonyan.dao.wrapper;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.FIELD})
public @interface CriteriaField {
/**
* 对应Mysql Entity的字段名或者"Field__Operator"格式
* @return String
*/
String field() default "";
Operator operator() default Operator.EQ;
/**
* 默认为true当value为空集合或空map的时候自动过滤该查询条件.
* 主要是考虑到调用方在用fastjson序列化的时候会将null集合序列化为空集合
* 所以默认将空集合过滤掉
*
* @return boolean
*/
boolean filterEmpty() default true;
/**
* 默认为true当value为null的时候自动过滤该查询条件.
*
* @return boolean
*/
boolean filterNull() default true;
/**
* 是否忽略该字段的查询条件
*
* @return
*/
boolean ignore() default false;
/**
* 字段名前缀 在放入sql时允许指定一个前缀如前缀'a'则转换后的字段为a.id
* 用于联表查询比如:
* @Select("select bill.* from bms_plus_pay_bill as bill, bms_plus_pay_audit as audit ${ew.customSqlSegment} " +
* "group by bill.id")
* IPage<PayBill> pageWithQueryPayAudit(IPage<PayBill> page, @Param(Constants.WRAPPER) Wrapper wrapper);
*
* @return
*/
String prefix() default "";
}

View File

@ -0,0 +1,408 @@
package cn.axzo.pokonyan.dao.wrapper;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableListMultimap;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.reflect.FieldUtils;
import org.springframework.cglib.beans.BeanMap;
import org.springframework.util.CollectionUtils;
import java.lang.reflect.Field;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.ConcurrentMap;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;
@Slf4j
public class CriteriaWrapper {
private static final String DELIMITER = "__";
/**
* 使用ConcurrentMapHashMap线程不安全
*/
private static ConcurrentMap<Class, Map<String, CriteriaField>> criteriaFieldAnnotations = Maps.newConcurrentMap();
@Getter
private Boolean andOperator = true;
@Getter
private ImmutableListMultimap<String, QueryField> queryFieldMap;
public CriteriaWrapper() {
queryFieldMap = ImmutableListMultimap.of();
}
public CriteriaWrapper(ImmutableListMultimap<String, QueryField> queryFieldMap, boolean andOperator) {
this.queryFieldMap = queryFieldMap;
this.andOperator = andOperator;
}
public static CriteriaWrapperBuilder builder() {
return new CriteriaWrapperBuilder();
}
public static class CriteriaWrapperBuilder {
private List<QueryField> queryFields = Lists.newArrayList();
private boolean andOperator = true;
/**
* @param key java bean的字段名或者"Field__Operator"格式
* @param value 除了IS_NULL, IS_NOT_NULL允许null值外其他情况如果为null该查询条件会被过滤
* @return CriteriaWrapperBuilder
*/
public CriteriaWrapperBuilder queryField(String key, Object value) {
Preconditions.checkArgument(!Strings.isNullOrEmpty(key));
return queryField(key, QueryField.getDefaultOperator(key, value), value);
}
/**
*
* @param key java bean的字段名或者"Field__Operator"格式
* @param defaultOperator Operator
* @param value 除了IS_NULL, IS_NOT_NULL允许null值外其他情况如果为null该查询条件会被过滤
* @return CriteriaWrapperBuilder
*/
public CriteriaWrapperBuilder queryField(String key, Operator defaultOperator, Object value) {
Preconditions.checkArgument(!Strings.isNullOrEmpty(key));
QueryField queryField = QueryField.buildWithNullFilter(key, defaultOperator, value);
if (queryField != null) {
this.queryFields.add(queryField);
}
return this;
}
/**
*
* @param condition false的时候该查询条件会被过滤
* @param key java bean的字段名或者"Field__Operator"格式
* @param value 如果为null该查询条件会被过滤
* @return CriteriaWrapperBuilder
*/
public CriteriaWrapperBuilder queryField(boolean condition, String key, Object value) {
Preconditions.checkArgument(!Strings.isNullOrEmpty(key));
return queryField(condition, key, QueryField.getDefaultOperator(key, value), value);
}
/**
*
* @param condition false的时候该查询条件会被过滤
* @param key java bean的字段名或者"Field__Operator"格式
* @param defaultOperator 默认的查询Operator
* @param value 如果为null该查询条件会被过滤
* @return CriteriaWrapperBuilder
*/
public CriteriaWrapperBuilder queryField(boolean condition, String key, Operator defaultOperator, Object value) {
Preconditions.checkArgument(!Strings.isNullOrEmpty(key));
return queryField(condition, key, defaultOperator, value, StringUtils.EMPTY);
}
/**
*
* @param condition false的时候该查询条件会被过滤
* @param key java bean的字段名或者"Field__Operator"格式
* @param defaultOperator 默认的查询Operator
* @param value 如果为null该查询条件会被过滤
* @param prefix 字段名添加一个前缀
* @return CriteriaWrapperBuilder
*/
public CriteriaWrapperBuilder queryField(boolean condition, String key, Operator defaultOperator,
Object value, String prefix) {
Preconditions.checkArgument(!Strings.isNullOrEmpty(key));
if (!condition) {
return this;
}
queryFields.add(QueryField.build(key, defaultOperator, value, prefix));
return this;
}
/**
*
* @param params {key, value}, key支持"Field__Operator"格式value如果为null该查询条件会被过滤
* @return CriteriaWrapperBuilder
*/
public CriteriaWrapperBuilder queryField(Map<String, Object> params) {
return queryField(params, null);
}
/**
*
* @param params {key, value}, key支持"Field__Operator"格式value如果为null该查询条件会被过滤
* @param converter 对querfield进行转换. 比如查询条件, 查询值.
* @return CriteriaWrapperBuilder
*/
public CriteriaWrapperBuilder queryField(Map<String, Object> params, Function<QueryField, QueryField> converter) {
List<QueryField> queryFields = params.entrySet().stream()
.map(entry -> QueryField.build(entry.getKey(), entry.getValue()))
.map(queryField -> converter == null ? queryField : converter.apply(queryField))
.filter(Objects::nonNull)
.filter(queryField -> queryField.getValue() != null || queryField.getOperator().allowNullValue())
.collect(Collectors.toList());
this.queryFields.addAll(queryFields);
return this;
}
public CriteriaWrapperBuilder queryFields(List<QueryField> queryFields) {
this.queryFields.addAll(queryFields);
return this;
}
public CriteriaWrapperBuilder andOperator(boolean andOperator) {
this.andOperator = andOperator;
return this;
}
public CriteriaWrapper build() {
ImmutableListMultimap<String, QueryField> multimap = queryFields.stream()
.collect(ImmutableListMultimap.toImmutableListMultimap(QueryField::getField, Function.identity()));
return new CriteriaWrapper(multimap, this.andOperator);
}
}
/**
* 从bean对象来构造一个CriteriaWrapper
* 需要这个bean对象中查询的属性通过{@link CriteriaField}来声明
* @param bean 查询条件的bean对象
* @return 查询对象
*/
public static CriteriaWrapper fromBean(Object bean) {
return fromBean(bean, true);
}
/**
* 从bean对象来构造一个CriteriaWrapper
* 需要这个bean对象中查询的属性通过{@link CriteriaField}来声明
* @param bean 查询条件的bean对象
* @param andOperator true查询条件是and操作, false查询条件or操作.
* @return 查询对象
*/
public static CriteriaWrapper fromBean(Object bean, boolean andOperator) {
return fromBean(bean, andOperator, null);
}
/**
* 从bean对象来构造一个CriteriaWrapper
* 需要这个bean对象中查询的属性通过{@link CriteriaField}来声明
* @param bean 查询条件的bean对象
* @param andOperator true查询条件是and操作, false查询条件or操作.
* @return 查询对象
*/
public static CriteriaWrapper fromBean(Object bean, boolean andOperator,
Function<QueryField, List<QueryField>> converter) {
return fromBean(bean, andOperator, converter, ImmutableMap.of());
}
/**
* 从bean对象来构造一个CriteriaWrapper
* 需要这个bean对象中查询的属性通过{@link CriteriaField}来声明
*
* @param bean 查询条件的bean对象
* @param andOperator true查询条件是and操作, false查询条件or操作.
* @return 查询对象
*/
public static CriteriaWrapper fromBean(Object bean, boolean andOperator,
Function<QueryField, List<QueryField>> converter, Map<String, Operator> defaultOperators) {
Map<String, CriteriaField> annotations = getFieldAnnotations(bean.getClass());
Map<String, Object> beanMap = (bean instanceof Map) ? (Map<String, Object>) bean : BeanMap.create(bean);
List<QueryField> queryFields = beanMap.entrySet().stream()
.map(entry -> QueryField.build(entry.getKey(), entry.getValue(), annotations,
(defaultOperators != null ? defaultOperators.get(entry.getKey()): null)))
.filter(Objects::nonNull)
.flatMap(queryField -> converter == null ? Stream.of(queryField) : converter.apply(queryField).stream())
.filter(Objects::nonNull)
.collect(Collectors.toList());
return CriteriaWrapper.builder().queryFields(queryFields).andOperator(andOperator).build();
}
private static Map<String, CriteriaField> getFieldAnnotations(Class beanClazz) {
return criteriaFieldAnnotations.computeIfAbsent(beanClazz, clazz ->
FieldUtils.getFieldsListWithAnnotation(clazz, CriteriaField.class).stream()
.collect(Collectors.toMap(Field::getName, field -> field.getAnnotation(CriteriaField.class))));
}
@Builder(toBuilder = true)
@Data
@AllArgsConstructor
public static class QueryField {
/**
* json字段查询条件构造模板examplecontent->'$.filed1'__LIKE
*/
private final static String JSON_QUERY_FILED_TEMPLATE = "%s->'$.%s'%s";
/**
* json查询条件作为filed时的特殊操作符
*/
private final static String JSON_QUERY_FILED_FLAG = "->";
/**
* 逻辑控制字段名称前缀
*/
private final static String LOGICAL_CONTROL_FIELD_PREFIX = "$$";
/**
* java bean的property name
*/
private String field;
/**
* 查询的operator
*/
private Operator operator;
/**
* 查询的值
*/
private Object value;
/**
* 字段前缀名称
*/
private String prefix;
public boolean isLogicalControlField() {
return operator == Operator.OR;
}
/**
* 是否逻辑控制字段
* @param fieldName
* @return 比如 orand
*/
public static boolean isLogicalControlField(String fieldName) {
return fieldName.startsWith(LOGICAL_CONTROL_FIELD_PREFIX);
}
/**
* 是否是json查询字段fieldName包含"->"符号表示是json查询
* @param fieldName
* @return
*/
public static boolean isJsonQueryField(String fieldName) {
return fieldName.contains(JSON_QUERY_FILED_FLAG);
}
public String getFieldWithPrefix() {
if (Strings.isNullOrEmpty(prefix)) {
return field;
}
return prefix + '.' + field;
}
public String getColumnWithPrefix(String column) {
if (Strings.isNullOrEmpty(prefix)) {
return column;
}
return prefix + '.' + column;
}
private static QueryField build(String field, Object value, Map<String, CriteriaField> annotations, Operator defaultOperator) {
CriteriaField fieldAnnotation = annotations.get(field);
if (fieldAnnotation == null) {
// 如果没有Annotation默认会过滤Null
return QueryField.buildWithNullFilter(field, (defaultOperator != null ? defaultOperator : getDefaultOperator(field, value)), value);
}
if (fieldAnnotation.ignore()) {
return null;
}
if (fieldAnnotation.filterEmpty()
&& (value instanceof Collection)
&& CollectionUtils.isEmpty((Collection)value)) {
return null;
}
if (fieldAnnotation.filterEmpty()
&& (value instanceof Map)
&& CollectionUtils.isEmpty((Map) value)) {
return null;
}
if (fieldAnnotation.filterNull() && Objects.isNull(value)) {
return null;
}
String fieldName = Strings.isNullOrEmpty(fieldAnnotation.field()) ? field : fieldAnnotation.field();
// XXX 注解中获取的operator如果为EQ无法判断是用户手动设置为EQ还是使用的默认operator.EQ这里暂时保持原状
return build(fieldName, fieldAnnotation.operator(), value, fieldAnnotation.prefix());
}
private static QueryField build(String key, Object value) {
return QueryField.build(key, getDefaultOperator(key, value), value, StringUtils.EMPTY);
}
private static QueryField build(String key, Operator defaultOperator, Object value, String prefix) {
String field = StringUtils.substringBefore(key, DELIMITER);
String expression = StringUtils.substringAfter(key, DELIMITER);
Operator operator = Strings.isNullOrEmpty(expression) ? defaultOperator : Operator.valueOf(expression);
if (operator == Operator.OR) {
List<QueryField> nestedFields = ((Map<String, Object>)value).entrySet()
.stream().map(e -> build(e.getKey(), Operator.EQ, e.getValue(), prefix)).collect(Collectors.toList());
// FIXME: 因为为了减少修改这里修改了 field 的名称通过名称将状态传递下去
return new QueryField(LOGICAL_CONTROL_FIELD_PREFIX + field, operator, nestedFields, prefix);
}
if (operator == Operator.JSON || operator == Operator.JSON_OR) {
List<QueryField> nestedFields = ((Map<String, Object>)value).entrySet()
.stream().map(e -> build(buildJsonQueryFieldKey(field, e.getKey()), Operator.EQ, e.getValue(), prefix)).collect(Collectors.toList());
// FIXME: 因为为了减少修改这里修改了 field 的名称通过名称将状态传递下去
return new QueryField(LOGICAL_CONTROL_FIELD_PREFIX + field, operator, nestedFields, prefix);
}
return new QueryField(field, operator, value, prefix);
}
private static QueryField buildWithNullFilter(String key, Operator defaultOperator, Object value) {
QueryField queryField = build(key, defaultOperator, value, StringUtils.EMPTY);
if (queryField.getValue() == null && !queryField.getOperator().allowNullValue()) {
return null;
}
return queryField;
}
private static Operator getDefaultOperator(String field, Object value) {
if (!(value instanceof Collection)) {
return Operator.EQ;
}
// 当value instanceof Collection检查值是否为空
if (((Collection)value).isEmpty()) {
log.error("value is collection and is empty, field={}", field);
}
return Operator.IN;
}
/**
* 构造json查询条件key
* 例如fieldName=content, jsonFieldName=text__LIKE返回content->'$.text'__LIKE
*
* @param fieldName 数据库字段名
* @param jsonFieldName json内的字段名
* @return
*/
private static String buildJsonQueryFieldKey(String fieldName, String jsonFieldName) {
String jsonField = StringUtils.substringBefore(jsonFieldName, DELIMITER);
String jsonExpression = StringUtils.substringAfter(jsonFieldName, DELIMITER);
if (Strings.isNullOrEmpty(jsonExpression)) {
return String.format(JSON_QUERY_FILED_TEMPLATE, fieldName, jsonField, "");
}
return String.format(JSON_QUERY_FILED_TEMPLATE, fieldName, jsonField, DELIMITER + jsonExpression);
}
}
}

View File

@ -0,0 +1,47 @@
package cn.axzo.pokonyan.dao.wrapper;
import lombok.AllArgsConstructor;
import lombok.Getter;
@Getter
@AllArgsConstructor
public enum Operator {
IN,
LIKE,
EQ,
NE,
GT,
GE,
LT,
LE,
IS_NULL,
IS_NOT_NULL,
BETWEEN,
/**
* start with
*/
SW,
/**
* end with
*/
EW,
ORDER,
OR,
/**
* 添加
* FULL_SEARCH
*/
FS,
/**
* JSON解析现只支持mysql json类型字段
* 查询条件示例{"name":"张三","time__GT":"1595235995326","carNo__LIKE":"川A"}
*/
JSON,
JSON_OR,
@Deprecated
CUSTOM;
public boolean allowNullValue() {
return this == IS_NULL || this == IS_NOT_NULL;
}
}

View File

@ -0,0 +1,19 @@
package cn.axzo.pokonyan.dao.wrapper;
import com.google.common.collect.ImmutableListMultimap;
/**
* 封装底层的查询实现
*
*/
public interface OperatorProcessor<T> {
/**
* 组装所有查询条件
*
* @param queryColumnMap
* @param andOperator
* @return
*/
T assembleAllQueryWrapper(ImmutableListMultimap<String, CriteriaWrapper.QueryField> queryColumnMap, boolean andOperator);
}

View File

@ -0,0 +1,158 @@
package cn.axzo.pokonyan.dao.wrapper;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.ImmutableListMultimap;
import com.google.common.collect.ImmutableTable;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.Multimaps;
import com.google.common.collect.Table;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.extern.slf4j.Slf4j;
import java.time.Instant;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.ZoneId;
import java.util.Collection;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collector;
import java.util.stream.Collectors;
/**
* 将CriteriaWrapper转换为mysql的QueryWrapper
*/
@Slf4j
@Builder
@AllArgsConstructor
public class SimpleWrapperConverter<T> {
private OperatorProcessor<T> operatorProcessor;
/**
* key = property value = column
*/
private Map<String, String> fieldColumnMap;
/**
* key = property value = property class
*/
private Map<String, Class<?>> fieldTypeMap;
/**
* value转换. key1 = value原来的type, key2 = targetType, value = 转换func
*/
private static final Table<Class<?>, Class<?>, Function<Object, Object>> VALUE_CONVERTERS =
ImmutableTable.<Class<?>, Class<?>, Function<Object, Object>>builder()
.put(Long.class, LocalDateTime.class, o -> Instant.ofEpochMilli((Long) o).atZone(ZoneId.systemDefault()).toLocalDateTime())
.put(Long.class, LocalDate.class, o -> Instant.ofEpochMilli((Long) o).atZone(ZoneId.systemDefault()).toLocalDate())
.put(Long.class, Date.class, o -> new Date((Long) o))
.build();
public T toWrapper(CriteriaWrapper criteriaWrapper) {
ListMultimap<String, CriteriaWrapper.QueryField> multimap = criteriaWrapper.getQueryFieldMap()
.asMap().entrySet().stream()
.collect(Multimaps.flatteningToMultimap(
query -> getColumnNotNull(query.getKey()),
query -> convertValue(query.getValue()).stream(),
ArrayListMultimap::create));
return operatorProcessor.assembleAllQueryWrapper(ImmutableListMultimap.copyOf(multimap), criteriaWrapper.getAndOperator());
}
public T toWrapper(CriteriaWrapper criteriaWrapper, SimpleWrapperConverter... anotherConverters) {
ListMultimap<String, CriteriaWrapper.QueryField> multimap = criteriaWrapper.getQueryFieldMap()
.asMap().entrySet().stream()
.collect(Multimaps.flatteningToMultimap(
query -> getColumnByConverters(query.getKey(), anotherConverters),
query -> convertValue(query.getValue()).stream(),
ArrayListMultimap::create));
return operatorProcessor.assembleAllQueryWrapper(ImmutableListMultimap.copyOf(multimap), criteriaWrapper.getAndOperator());
}
public Optional<String> getColumn(String fieldName) {
if (CriteriaWrapper.QueryField.isLogicalControlField(fieldName) ||
CriteriaWrapper.QueryField.isJsonQueryField(fieldName)) {
// 逻辑控制字段是虚拟字段不需要查找json查询字段是特殊语法构造的字段不需要查找
return Optional.of(fieldName);
}
return Optional.ofNullable(fieldColumnMap.get(fieldName));
}
public String getColumnNotNull(String fieldName) {
return getColumn(fieldName).orElseThrow(() -> new RuntimeException("参数验证失败"));
}
public String getColumnByConverters(String fieldName, SimpleWrapperConverter... anotherConverters) {
// 优先查找当前class的字段
Optional<String> column = getColumn(fieldName);
if (column.isPresent()) {
return column.get();
}
for (SimpleWrapperConverter anotherConverter : anotherConverters) {
column = anotherConverter.getColumn(fieldName);
if (column.isPresent()) {
return column.get();
}
}
throw new RuntimeException("参数验证失败");
}
/**
* 转换QueryField的value
*
* @param queryFields
* @return
*/
private List<CriteriaWrapper.QueryField> convertValue(Collection<CriteriaWrapper.QueryField> queryFields) {
return queryFields.stream()
.map(queryField -> queryField.toBuilder().value(getConvertedValue(queryField)).build())
.collect(Collectors.toList());
}
private Object getConvertedValue(CriteriaWrapper.QueryField queryField) {
if (queryField.isLogicalControlField()) {
List<CriteriaWrapper.QueryField> subfields = (List<CriteriaWrapper.QueryField>) queryField.getValue();
// resolve 嵌套字段的字段和值
for (final CriteriaWrapper.QueryField subfield : subfields) {
subfield.setField(getColumnNotNull(subfield.getField()));
}
}
if (queryField.getValue() == null) {
return null;
}
//找到属性对应的类型与当前value的类型. 根据类型找到convert. 有则处理. 没有则直接返回
Class<?> fieldType = fieldTypeMap.get(queryField.getField());
Class<?> valueType = queryField.getValue().getClass();
//如果sourceType
if (Collection.class.isAssignableFrom(valueType)) {
Collection collection = (Collection) queryField.getValue();
return collection.stream()
.filter(Objects::nonNull)
.map(e -> {
Function<Object, Object> valueConverter = VALUE_CONVERTERS.get(e.getClass(), fieldType);
return valueConverter == null ? e : valueConverter.apply(e);
}).collect(getCollectionCollector(valueType));
}
Function<Object, Object> valueConverter = VALUE_CONVERTERS.get(valueType, fieldType);
return valueConverter == null ? queryField.getValue() : valueConverter.apply(queryField.getValue());
}
private Collector getCollectionCollector(Class clazz) {
if (List.class.isAssignableFrom(clazz)) {
return Collectors.toList();
}
if (Set.class.isAssignableFrom(clazz)) {
return Collectors.toSet();
}
throw new UnsupportedOperationException(String.format("unsupported collection type of %s", clazz.getSimpleName()));
}
}

View File

@ -0,0 +1,17 @@
package cn.axzo.pokonyan.dao.wrapper;
/**
* 仅包内可访问
*
*/
@FunctionalInterface
public interface TriConsumer<K, V, S> {
/**
* 三入参的consumer
*
* @param k
* @param v
* @param s
*/
void accept(K k, V v, S s);
}