diff --git a/docs/test.md b/docs/test.md index eef42e72..35253ff7 100644 --- a/docs/test.md +++ b/docs/test.md @@ -121,7 +121,6 @@ public class RedisEnvTest extends BaseTestCase { | Redis | `@RedisEnv` | `@RedisProperties` | | | DataSource | `@DataSourceEnv` | `@DataSourceProperties` | | | ElasticSearch | `@EsEnv` | `@EsProperties` | | -| Feign | `@FeignEnv` | | | | Mybatis | `@MybatisEnv` | `@MybatisProperties` | | | MybatisPlus | `@MybatisPlusEnv` | `@MybatisPlusProperties` | | | ... | | | | diff --git a/uno-bom/pom.xml b/uno-bom/pom.xml index 77e51d0e..88ca1c6d 100644 --- a/uno-bom/pom.xml +++ b/uno-bom/pom.xml @@ -5,8 +5,8 @@ org.springframework.boot spring-boot-starter-parent - 2.3.12.RELEASE - + 3.1.5 + 4.0.0 @@ -24,48 +24,46 @@ 1.0.1 2.11.4 - 1.2.78 1.12.2 3.3.0 1.18.22 5.6.2 - 4.0.0 - 30.1.1-jre + 5.6.0 + 31.1-jre 2.2.17 1.6.2 - 3.2.0 + 3.0.0 3.0.2 3.5.3 - - 4.0.11 - 1.2.5 + 1.2.20 8.0.22 12.2.0.1 42.2.22 8.4.1.jre8 - 3.3.6 - - 0.31.0 - 3.0.0 + 4.1.3 + 27.0 + 3.7.0 2.5.1 - 4.4.0 - 1.8 8.7.1 6.7.0 5.3.1 - 2.4.0 3.2.4 + 3.2.1 - 2.0.1 + + + 2.0.1 + + 0.4.6 @@ -264,18 +262,18 @@ ${swagger-v2.version} provided - - - com.alibaba - QLExpress - ${qlexpress.version} - org.mybatis.spring.boot mybatis-spring-boot-starter ${mybatis-spring-boot.version} provided + + org.mybatis.spring.boot + mybatis-spring-boot-starter-test + ${mybatis-spring-boot.version} + test + com.baomidou mybatis-plus-boot-starter @@ -328,36 +326,6 @@ ${mybatis-plus-dynamic.version} provided - - org.hswebframework.web - hsweb-commons-crud - ${hsweb-framework.version} - - - io.opentracing - opentracing-api - ${oepntracing.version} - - - io.opentracing - opentracing-util - ${oepntracing.version} - - - com.playtika.reactivefeign - feign-reactor-cloud - ${feign-reactor.version} - - - com.playtika.reactivefeign - feign-reactor-webclient - ${feign-reactor.version} - - - com.playtika.reactivefeign - feign-reactor-spring-configuration - ${feign-reactor.version} - io.springfox springfox-swagger2 @@ -383,26 +351,6 @@ postgis-jdbc ${postgis-jdbc.version} - - cn.afterturn - easypoi-annotation - ${easypoi.version} - - - cn.afterturn - easypoi-base - ${easypoi.version} - - - cn.afterturn - easypoi-web - ${easypoi-version} - - - org.apache.commons - commons-csv - ${appache-commons-csv.version} - com.influxdb influxdb-client-java @@ -437,7 +385,7 @@ jakarta.json jakarta.json-api - ${jakarta.json-api.version} + ${jakarta.version} com.taosdata.jdbc diff --git a/uno-components/uno-component-sequential/pom.xml b/uno-components/uno-component-sequential/pom.xml index 36c2441b..9cffbd4e 100644 --- a/uno-components/uno-component-sequential/pom.xml +++ b/uno-components/uno-component-sequential/pom.xml @@ -43,20 +43,10 @@ elasticsearch-java test - - org.elasticsearch.client - elasticsearch-rest-high-level-client - test - org.elasticsearch.client elasticsearch-rest-client test - - org.elasticsearch - elasticsearch - test - \ No newline at end of file diff --git a/uno-data/pom.xml b/uno-data/pom.xml index 742d29ce..e081571e 100644 --- a/uno-data/pom.xml +++ b/uno-data/pom.xml @@ -51,20 +51,11 @@ elasticsearch-java provided - - org.elasticsearch.client - elasticsearch-rest-high-level-client - provided - org.elasticsearch.client elasticsearch-rest-client provided - - org.elasticsearch - elasticsearch - jakarta.persistence jakarta.persistence-api @@ -81,10 +72,6 @@ taos-jdbcdriver provided - - cn.afterturn - easypoi-base - org.springframework.boot spring-boot-starter-data-jpa @@ -126,9 +113,8 @@ provided - dev.miku + io.asyncer r2dbc-mysql - provided io.r2dbc @@ -136,9 +122,8 @@ provided - io.r2dbc + org.postgresql r2dbc-postgresql - provided io.r2dbc @@ -184,11 +169,6 @@ - - org.apache.commons - commons-csv - test - org.springframework spring-webmvc diff --git a/uno-data/src/main/java/cc/allio/uno/data/query/excel/CellDataDeal.java b/uno-data/src/main/java/cc/allio/uno/data/query/excel/CellDataDeal.java deleted file mode 100644 index 5ee1267d..00000000 --- a/uno-data/src/main/java/cc/allio/uno/data/query/excel/CellDataDeal.java +++ /dev/null @@ -1,24 +0,0 @@ -package cc.allio.uno.data.query.excel; - -import java.util.Collection; -import java.util.List; -import java.util.Map; - -/** - * 实现设备历史监测数据进行导出时候,调用该类方法进行数据处理 - * - * @author cxd - * @date 2022/12/30 22:49 - * @since 1.1.2 - */ - -public interface CellDataDeal { - - /** - * @param dataMap - * @param cellHeader - * @return - */ - List> makeExcelData(Map> dataMap, CellHeader cellHeader); - -} diff --git a/uno-data/src/main/java/cc/allio/uno/data/query/excel/CellDataDealFactory.java b/uno-data/src/main/java/cc/allio/uno/data/query/excel/CellDataDealFactory.java deleted file mode 100644 index 9c4c0a43..00000000 --- a/uno-data/src/main/java/cc/allio/uno/data/query/excel/CellDataDealFactory.java +++ /dev/null @@ -1,14 +0,0 @@ -package cc.allio.uno.data.query.excel; - -import cc.allio.uno.data.query.param.DateDimension; - -public class CellDataDealFactory { - - public static CellDataDeal create(DateDimension[] contemporaneous) { - if (null != contemporaneous && contemporaneous.length > 0) { - return new ContemporaneousCellDataDeal(); - } else { - return new CollectionCellDataDeal(); - } - } -} diff --git a/uno-data/src/main/java/cc/allio/uno/data/query/excel/CellHeader.java b/uno-data/src/main/java/cc/allio/uno/data/query/excel/CellHeader.java deleted file mode 100644 index d803c790..00000000 --- a/uno-data/src/main/java/cc/allio/uno/data/query/excel/CellHeader.java +++ /dev/null @@ -1,62 +0,0 @@ -package cc.allio.uno.data.query.excel; - -import cn.afterturn.easypoi.excel.entity.params.ExcelExportEntity; -import cc.allio.uno.data.query.param.DateDimension; -import com.google.common.collect.Maps; -import lombok.Data; - -import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; - -/** - * 实现设备历史监测数据进行导出时候,调用该类方法进行导出表格表头的生成 - * - * @author cxd - * @date 2022/12/30 22:49 - * @since 1.1.2 - */ -@Data -public class CellHeader { - - final Map headers; - final String timeField; - final String[] dataField; - final DateDimension[] contemporaneous; - - public CellHeader(DateDimension[] contemporaneous, String[] dataField, String timeField, List cellIndices) { - this.headers = Maps.newLinkedHashMap(); - Map cellIndicesMap = cellIndices.stream().collect(Collectors.toMap(CellIndex::getKey, CellIndex::getName)); - ExcelExportEntity colEntity = new ExcelExportEntity("采集时间", timeField); - colEntity.setNeedMerge(true); - colEntity.setWidth(20.00); - headers.put((String) colEntity.getKey(), colEntity); - if (null != contemporaneous && contemporaneous.length > 0) { - //同期数据 - for (int i = 0; i < dataField.length; i++) { - for (int i1 = 0; i1 < contemporaneous.length; i1++) { - colEntity = new ExcelExportEntity(cellIndicesMap.get(dataField[i]) + contemporaneous[i1].getDate(), dataField[i] + contemporaneous[i1].getDate()); - colEntity.setNeedMerge(true); - colEntity.setWidth(30.00); - headers.put((String) colEntity.getKey(), colEntity); - } - } - } else { - //非同期数据 - for (int i = 0; i < dataField.length; i++) { - colEntity = new ExcelExportEntity(cellIndicesMap.get(dataField[i]), dataField[i]); - colEntity.setWidth(20.00); - colEntity.setNeedMerge(true); - headers.put((String) colEntity.getKey(), colEntity); - } - } - this.timeField = timeField; - this.dataField = dataField; - this.contemporaneous = contemporaneous; - } - - - public ExcelExportEntity get(String field) { - return headers.get(field); - } -} diff --git a/uno-data/src/main/java/cc/allio/uno/data/query/excel/CellIndex.java b/uno-data/src/main/java/cc/allio/uno/data/query/excel/CellIndex.java deleted file mode 100644 index edb5e6e1..00000000 --- a/uno-data/src/main/java/cc/allio/uno/data/query/excel/CellIndex.java +++ /dev/null @@ -1,23 +0,0 @@ -package cc.allio.uno.data.query.excel; - -import lombok.*; - -import java.io.Serializable; - -@Getter -@Setter -@ToString -@RequiredArgsConstructor -@AllArgsConstructor -public class CellIndex implements Serializable { - - /** - * 指标值 - */ - private String key; - - /** - * 指标名称 - */ - private String name; -} diff --git a/uno-data/src/main/java/cc/allio/uno/data/query/excel/CollectionCellDataDeal.java b/uno-data/src/main/java/cc/allio/uno/data/query/excel/CollectionCellDataDeal.java deleted file mode 100644 index cb96dc12..00000000 --- a/uno-data/src/main/java/cc/allio/uno/data/query/excel/CollectionCellDataDeal.java +++ /dev/null @@ -1,46 +0,0 @@ -package cc.allio.uno.data.query.excel; - -import cn.afterturn.easypoi.excel.entity.params.ExcelExportEntity; -import cc.allio.uno.core.bean.ValueWrapper; -import cc.allio.uno.core.util.DateUtil; - -import java.util.*; - -/** - * 实现设备历史监测数据进行导出时候,调用该类方法进行{非同期的}数据处理 - * - * @author cxd - * @date 2022/12/30 22:49 - * @since 1.1.2 - */ -public class CollectionCellDataDeal implements CellDataDeal { - - @Override - public List> makeExcelData(Map> dataMap, CellHeader cellHeader) { - List colList = new ArrayList<>(cellHeader.getHeaders().values()); - String timeField = cellHeader.getTimeField(); - //非同期数据 - List> list = new ArrayList<>(); - Collection dataList = dataMap.values().stream().findFirst().orElse(new ArrayList<>()); - for (Object dataDatum : dataList) { - ValueWrapper valueWrapper = ValueWrapper.get(dataDatum); - Map valMap = new HashMap<>(); - for (ExcelExportEntity excelExportEntity : colList) { - if (timeField.equals(excelExportEntity.getKey())) { - Object maybeTime = valueWrapper.getForce(timeField); - Date dateTime = null; - if (maybeTime.getClass().isAssignableFrom(String.class)) { - dateTime = DateUtil.parse(maybeTime.toString()); - } else if (maybeTime.getClass().isAssignableFrom(Date.class)) { - dateTime = (Date) maybeTime; - } - valMap.put(timeField, DateUtil.format(dateTime, TimeFormatDeal.getTimeFieldFormat(null))); - } else { - valMap.put((String) excelExportEntity.getKey(), valueWrapper.getForce((String) excelExportEntity.getKey())); - } - } - list.add(valMap); - } - return list; - } -} diff --git a/uno-data/src/main/java/cc/allio/uno/data/query/excel/ContemporaneousCellDataDeal.java b/uno-data/src/main/java/cc/allio/uno/data/query/excel/ContemporaneousCellDataDeal.java deleted file mode 100644 index 5b1fef08..00000000 --- a/uno-data/src/main/java/cc/allio/uno/data/query/excel/ContemporaneousCellDataDeal.java +++ /dev/null @@ -1,61 +0,0 @@ -package cc.allio.uno.data.query.excel; - -import cc.allio.uno.core.bean.ValueWrapper; -import cc.allio.uno.core.util.DateUtil; -import cc.allio.uno.data.query.param.DateDimension; -import com.google.common.collect.Maps; - -import java.util.*; -import java.util.stream.Collectors; - -/** - * 实现设备历史监测数据进行导出时候,调用该类方法进行{同期的}数据处理 - * - * @author cxd - * @date 2022/12/30 22:49 - * @since 1.1.2 - */ -public class ContemporaneousCellDataDeal extends CollectionCellDataDeal { - - @Override - public List> makeExcelData(Map> dataMap, CellHeader cellHeader) { - String timeField = cellHeader.getTimeField(); - DateDimension[] contemporaneous = cellHeader.getContemporaneous(); - String[] dataField = cellHeader.getDataField(); - //同期数据 - return dataMap.entrySet().stream() - .flatMap(entry -> - entry.getValue().stream().map(dataDatum -> { - ValueWrapper valueWrapper = ValueWrapper.get(dataDatum); - Object maybeTime = valueWrapper.getForce(timeField); - Map valMap = Maps.newHashMap(); - Date dateTime = null; - if (maybeTime.getClass().isAssignableFrom(String.class)) { - dateTime = DateUtil.parse(maybeTime.toString()); - } else if (maybeTime.getClass().isAssignableFrom(Date.class)) { - dateTime = (Date) maybeTime; - } - valMap.put(timeField, DateUtil.format(dateTime, TimeFormatDeal.getTimeFieldFormat(contemporaneous[0].getDimension()))); - String timeHeadFormat = TimeFormatDeal.getTimeHeadFormat(contemporaneous[0].getDimension()); - for (int i = 0; i < dataField.length; i++) { - valMap.put(dataField[i] + DateUtil.format(dateTime, timeHeadFormat), valueWrapper.getForce(dataField[i])); - } - return valMap; - })) - .collect(Collectors.groupingBy(k -> k.get(timeField))) - .values() - .stream() - .map(g -> g.stream().reduce(Maps.newHashMap(), (a, b) -> { - a.putAll(b); - return a; - })) - .sorted((o1, o2) -> { - Date time = DateUtil.parse((String) o2.get(timeField), TimeFormatDeal.getTimeFieldFormat(contemporaneous[0].getDimension())); - Date timeCom = DateUtil.parse((String) o1.get(timeField), TimeFormatDeal.getTimeFieldFormat(contemporaneous[0].getDimension())); - return time.compareTo(timeCom); - }) - .collect(Collectors.toList()); - } - -} - diff --git a/uno-data/src/main/java/cc/allio/uno/data/query/excel/TimeFormatDeal.java b/uno-data/src/main/java/cc/allio/uno/data/query/excel/TimeFormatDeal.java deleted file mode 100644 index 6b30fd4f..00000000 --- a/uno-data/src/main/java/cc/allio/uno/data/query/excel/TimeFormatDeal.java +++ /dev/null @@ -1,39 +0,0 @@ -package cc.allio.uno.data.query.excel; - -import cc.allio.uno.core.util.DateUtil; -import cc.allio.uno.data.query.param.TimeDimension; - -/** - * 导出表头和表数据的时间格式处理类 - * - * @author cxd - * @date 2022/12/30 22:49 - * @since 1.1.2 - */ -public class TimeFormatDeal { - - public static String getTimeFieldFormat(TimeDimension timeDimension) { - if (null != timeDimension) { - if (TimeDimension.MONTH.equals(timeDimension)) { - return "dd HH:mm"; - } else if (TimeDimension.DAY.equals(timeDimension)) { - return "HH:mm"; - } else { - throw new IllegalArgumentException("请输入合法参数"); - } - } else { - return "MM-dd HH:mm"; - } - } - - public static String getTimeHeadFormat(TimeDimension timeDimension) { - if (TimeDimension.MONTH.equals(timeDimension)) { - return DateUtil.PATTERN_MONTH; - } else if (TimeDimension.DAY.equals(timeDimension)) { - return DateUtil.PATTERN_DATE; - } else { - throw new IllegalArgumentException("请输入合法参数"); - } - } - -} diff --git a/uno-data/src/main/java/cc/allio/uno/data/query/excel/ValueTimeExcelWriter.java b/uno-data/src/main/java/cc/allio/uno/data/query/excel/ValueTimeExcelWriter.java deleted file mode 100644 index 058818cb..00000000 --- a/uno-data/src/main/java/cc/allio/uno/data/query/excel/ValueTimeExcelWriter.java +++ /dev/null @@ -1,34 +0,0 @@ -package cc.allio.uno.data.query.excel; - -import cn.afterturn.easypoi.excel.ExcelExportUtil; -import cn.afterturn.easypoi.excel.entity.ExportParams; -import cn.afterturn.easypoi.excel.entity.params.ExcelExportEntity; -import cc.allio.uno.data.query.param.DateDimension; -import org.apache.poi.ss.usermodel.Workbook; - -import java.util.*; - -/** - * 历史监测数据的导出调用接口 - * - * @author cxd - * @date 2022/12/30 22:49 - * @since 1.1.2 - */ -public class ValueTimeExcelWriter { - - /** - * 根据入参构建导出表格,入参主要有是否同期,统计数据,统计字段 - * 添加数据(同期数据和非同期数据) - */ - public static Workbook exportHisExcel(CellHeader cellHeader, Map> dataMap) { - //生成表头 - DateDimension[] contemporaneous = cellHeader.getContemporaneous(); - List colList = new ArrayList<>(cellHeader.getHeaders().values()); - //插入表数据 - CellDataDeal cellDataDeal = CellDataDealFactory.create(contemporaneous); - List> list = cellDataDeal.makeExcelData(dataMap, cellHeader); - return ExcelExportUtil.exportExcel(new ExportParams(), colList, list); - } - -} diff --git a/uno-starters/pom.xml b/uno-starters/pom.xml index 8220d66f..94a6a270 100644 --- a/uno-starters/pom.xml +++ b/uno-starters/pom.xml @@ -20,7 +20,6 @@ uno-starter-core uno-starter-kafka - uno-starter-liquibase uno-starter-websocket uno-starter-sequential uno-starter-redis diff --git a/uno-starters/uno-starter-liquibase/README.md b/uno-starters/uno-starter-liquibase/README.md deleted file mode 100644 index ebffbfc6..00000000 --- a/uno-starters/uno-starter-liquibase/README.md +++ /dev/null @@ -1,294 +0,0 @@ -# Liquibase数据库版本控制 - -版本号:1.1.5.RELEASE - -liquibase是一个数据库变更的版本控制工具。项目中通过liquibase解析用户编写的liquibase的配置文件,生成sql语句,并执行和记录。执行是根据记录确定sql语句是否曾经执行过,和配置文件里的预判断语句确定sql是否执行。 - -特性: - -1、配置文件支持SQL、XML、JSON 或者 YAML - 2、版本控制按序执行 - 3、可以用上下文控制sql在何时何地如何执行。 - 4、支持schmea的变更 - 5、根据配置文件自动生成sql语句用于预览 - 6、可重复执行迁移 - 7、可插件拓展 - 8、可回滚 - 9、可兼容14中主流数据库如oracle,mysql,pg等,支持平滑迁移 - 10、支持schema方式的多租户(multi-tenant) - -## 为什么使用Liquibase - -在大多数项目仍然提供`sql`脚本甚至有时不这样(哪里漏水堵哪里): - -- What state is the database in onTrigger this machine?(不知道在这台机器数据库的状态) -- Has this script already been applied or not?(这个脚本是否被使用过) -- Has the quick fix in production been applied in test afterwards?(生产用了,测试是否用了?) -- How do you set up a new database instance?(怎么从头开始创建一个新的数据库) - -所以数据库版本管理框架是用来解决这种混乱的局面:他们允许你: - -- Recreate a database from scratch(重新开始创建一个数据库) -- Make it clear at all times what state a database is in(在任何时刻知道数据库状态) -- Migrate in a deterministic way from your current version of the database to a newer one(以一种确定的方式从你当前的数据库版本迁移到一个较新的版本) - -### 为什么不使用flyway - -[Flyway](https://flywaydb.org/) 是一款开源的数据库版本管理工具,使用方式和学习成本与Liquibase差不多,但它无法很好适用于微服务形式,原因在于它的版本控制多团队开发可能存在冲突。但Liquibase可以自定义版本,这点就很适合用于多团队多服务数据库版本管理 - -## 使用 - -blade的Liqubase目前仅支持change-log文件读取,暂不支持指定rollback等更多配置内容。 - -Liquibase的核心原理是通过读取`change-log`文件。所以在使用时,我们首先创建这个文件,这个文件应该创建在哪里? - -默认需要创建在类路径下:**/db/migrations/#{dbType}/db_migration.yaml**。 - -### 文件路径 - -#### 单数据源 - -我们可以看到这个目录中存在一个占位符,这个占位符的类型表示当前服务使用的数据源类型,比如当前数据源是**mysql**,文件存放路径如:/db/migrations/mysql/db_migration.yaml(后缀也可以是`.xml/.json/.sql`,怎么配置详细看官网文档)。同理如果切换其他数据源我们就需要创建相应的目录文件。 - -有时候我们又想指定目录就能读取到,基于这个需要,单数据源只需要在spring 配置中增加`spring.liquibase.change-log`并且填写对应的目录 - -```yaml -spring: - liquibase: - change-log: classpath:test.yaml -``` - -#### 多数据源 - -会存在一个情况我们使用的服务存在多数据源,这个时候可以对不同的数据源执行不同的版本控制,但前提我们的文件名称需要变一下。比如说:想使用master数据源进行版本控制,那么此时你的文件名称就变为`db_migration-master.yaml`,同理其他数据源也是一样的配置。就如下面这张图一样 - - - -指定目录在多数据源配置如下: - -```yaml -spring: - #排除DruidDataSourceAutoConfigure - autoconfigure: - exclude: com.alibaba.druid.spring.boot.autoconfigure.DruidDataSourceAutoConfigure - datasource: - dynamic: - druid: - #通用校验配置 - validation-query: select 1 - #启用sql日志拦截器 - proxy-filters: - - sqlLogInterceptor - #设置默认的数据源或者数据源组,默认值即为master - primary: master - datasource: - master: - druid: - #独立校验配置 - validation-query: select 1 - #oracle校验 - #validation-query: select 1 from dual - url: ${blade.datasource.dev.master.url} - username: ${blade.datasource.dev.master.username} - password: ${blade.datasource.dev.master.password} - liquibase: - change-log: classpath:test.yaml -``` - -### 文件内容 - -在知道change-log文件路径在哪后,我们来具体看看它里面存在什么内容: - -```yaml -databaseChangeLog: - # 一次数据库版本变化都是一个变更集 - - changeSet: - # 命名规则:框架(系统名)-服务-版本号-功能(或者目的) - id: jw-test-V1-Person - author: jiangwei - comment: 创建Person表 - runInTransaction: true - changes: - # 创建表结构 - - sqlFile: - path: classpath:db/migrations/mysql/sql/V1__Person.sql - encoding: utf8 - # 创建表数据 - - sqlFile: - path: classpath:db/migrations/mysql/sql/V1__Person_Data.sql - encoding: utf8 - - changeSet: - id: jw-test-V1-Person-task - author: jiangwei - changes: - # 创建自定义数据变更类 - - customChange: { "class": "com.jw.liquibase.change.PersonChangeTask" } - -``` - -Liquibase每次数据库变更都是以`change-set`作为一个变更集,所以我们每次一个功能版本都是一个`change-log`。在一个`change-log`中需要指定`id`、`author`。其中: - -- id:命名规则:框架(系统名)-服务-版本号-功能(或者目的),比如:blade-auth-V1-init(blade系统-认证服务-第一个版本-初始化) - -在`change-set`中`changes`是为最重要,他表示当前变更的内容。它有两种好使用的方式: - -- sqlFile:指定一个sql文件路径 - -- customChange:指定一个实现`CustomTaskChange`接口的权限类名,如下: - - ```java - public class PersonChangeTask implements CustomTaskChange { - - @Override - public void execute(Database database) throws CustomChangeException { - Person person = new Person(4, "Jiang Wei"); - PersonMapper mapper = SpringUtil.getContext().getBean(PersonMapper.class); - mapper.insert(person); - } - - @Override - public String getConfirmationMessage() { - return null; - } - - @Override - public void setUp() throws SetupException { - - } - - @Override - public void setFileOpener(ResourceAccessor resourceAccessor) { - - } - - @Override - public ValidationErrors validate(Database database) { - return null; - } - } - - ``` - -需要注意的点:如果使用自定义变更类(`CustomTaskChange`)他不能读取和他在一个变更集执行的库表数据内容(比如我当前sqlFile是创建`Person`表,但是如果在`CustomTaskChange`中将读取不到这张表的内容,因为他们处于同一个事物中)解决办法是放在不同的变更集中。如上面所示。 - -## uno-starter-liquibase原理 - -在具体实现最重要的是两点: - -1. 读取到change-log的内容 -2. 不同的数据源具有不同的change-log内容 - -解决办法是,制定如下规则: - -1.自动读取配置文件(也可以人工配置,如果有):分数据库类型 - -​ 单数据源:以db_migration.yaml或者db_migration.xml作为查找条件 - -​ 多数据源:以db_migration-#{dynamic}.yaml或者db_migration-#{dynamic}.xml作为查找条件 - -2.指定配置读取 - -​ 单数据源:读取spring.liquibase.changelog - -​ 多数据源:读取spring.datasource.dynamic.datasource.\#{dynamic}.liquibase.changelog配置 - -因为考虑到数据源类型的多种多样,所以采取适配器设计模式,它的顶层接口是`LiquibaseDataSourceAdapter`。 - -我们在获取到`change-log`路径后需要创建`SpringLiquibase`注册到Spring中,让他自动执行变更集,具体代码如下: - -```java - /** - * 向Spring注册Liquibase对象 - * - * @param dataSource 数据源对象 - * @param beanFactory bean工厂 - */ - default void registerLiquibase(DataSource dataSource, DefaultListableBeanFactory beanFactory) { - String changeLog = getChangeLog(dataSource); - if (StringUtils.isEmpty(changeLog)) { - return; - } - SpringLiquibase liquibase = new SpringLiquibase(); - liquibase.setChangeLog(changeLog); - liquibase.setDataSource(dataSource); - BeanDefinitionBuilder beanDefinitionBuilder = BeanDefinitionBuilder.genericBeanDefinition(SpringLiquibase.class, () -> liquibase); - beanFactory.registerBeanDefinition("liquibase-".concat(dbType(dataSource)), beanDefinitionBuilder.getRawBeanDefinition()); - } -``` - -## change-log - -changelog是LiquiBase用来记录数据库的变更,一般放在`CLASSPATH`下,然后配置到执行路径中。 - -changelog支持多种格式,主要有`XML/JSON/YAML/SQL,其中XML/JSON/YAML除了具体格式语法不同,节点配置很类似,SQL格式中主要记录SQL语句,这里仅给出XML格式和SQL格式的示例: - -`change-log.xml`: - -```xml - - - 3 - title 3 - content 3 - - -``` - -`change-log.sql` - -```sql ---liquibase formatted sql ---changeset daniel:16040707 -CREATE TABLE `role_authority_sum` ( - `row_id` int(11) unsigned NOT NULL AUTO_INCREMENT COMMENT '自增id', - `role_id` int(11) unsigned NOT NULL DEFAULT '0' COMMENT '关联role的role_id', - `authority_sum` int(11) unsigned NOT NULL DEFAULT '0' COMMENT 'perms的值的和', - `data_type_id` int(11) unsigned NOT NULL DEFAULT '0' COMMENT '关联data_type的id', - PRIMARY KEY (`row_id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8 COMMENT='角色的权限值的和,如角色有RD权限,则和为2+8=10'; -``` - -### change-set - -一个``标签对应一个变更集,由id、name、以及changelog的文件路径组成唯一标识。changelog在执行的时候并不是按照id的顺序,而是按照changeSet在changelog中出现的顺序。 - -LiquiBase在执行changelog时,会在数据库中插入两张表:`DATABASECHANGELOG`和`DATABASECHANGELOGLOCK`,分别记录changelog的执行日志和锁日志。 - -LiquiBase在执行changelog中的changeSet时,会首先查看`DATABASECHANGELOG`表,如果已经执行过,则会跳过(除非changeSet的`runAlways`属性为true,后面会介绍),如果没有执行过,则执行并记录changelog日志; - -changelog中的一个changeSet对应一个事务,在changeSet执行完后commit,如果出现错误则rollback; - -- runAlways:即使已经执行过,仍然每次都执行;**注意**: 由于`DATABASECHANGELOG`表中还记录了changeSet的MD5校验值MD5SUM,如果changeSet的`id`和`name`没变,而内容变了,则由于MD5值变了,即使runAlways的值为True,执行也是失败的,会报错。这种情况应该使用`runOnChange`属性。 -- runOnChange:第一次的时候执行以及当changeSet的内容发生变化时执行。不受MD5校验值的约束。 -- runInTransaction:是否作为一个事务执行,默认为true。设置为false时需要**小心**:如果执行过程中出错了则不会rollback,数据库很可能处于不一致的状态; - -``下有一个重要的子标签``,即定义回滚的SQL语句。对于`create table`, `rename column`和`add column`等,LiquiBase会自动生成对应的rollback语句,而对于`drop table`、`insert data`等则需要显示定义rollback语句。 - -### include与include-all - -当changelog文件越来越多时,可以使用``将文件管理起来,如: - -```xml - - - - -``` -## Spring-Liquibase配置文件 - -| 属性名 | 说明 | 默认值 | -| ------------------------------------------ | ------------------------------------------------------------ | ----------------------------------------------------- | -| spring.liquibase.change-log | 变更日志配置路径 | classpath:/db/changelog/db.changelog-master.
yaml | -| spring.liquibase.check-change-log-location | 检查变更日志位置是否存在 | true | -| spring.liquibase.liquibase.contexts | 要使用的运行时上下文的逗号分隔列表。 | | -| spring.liquibase.default-schema | 默认的数据库Schema | | -| spring.liquibase.drop-first | 先删除数据库Schema | | -| spring.liquibase.enabled | 是否开启liquibase | | -| spring.liquibase.password | 待迁移数据库的登录密码。 | | -| spring.liquibase.url | 待迁移数据库的JDBC URL。如果没有设置,就使用配置的主数据源。 | | -| spring.liquibase.user | 待迁移数据库的登录用户。 | | - diff --git a/uno-starters/uno-starter-liquibase/doc/images/example1.png b/uno-starters/uno-starter-liquibase/doc/images/example1.png deleted file mode 100644 index 36a32176..00000000 Binary files a/uno-starters/uno-starter-liquibase/doc/images/example1.png and /dev/null differ diff --git a/uno-starters/uno-starter-liquibase/pom.xml b/uno-starters/uno-starter-liquibase/pom.xml deleted file mode 100644 index c2191972..00000000 --- a/uno-starters/uno-starter-liquibase/pom.xml +++ /dev/null @@ -1,66 +0,0 @@ - - - - uno-starters - cc.allio - 1.1.5.RELEASE - - 4.0.0 - - uno-starter-liquibase - - - 8 - 8 - - - - - cc.allio - uno-core - - - cc.allio - uno-test - test - - - org.liquibase - liquibase-core - - - - com.alibaba - druid-spring-boot-starter - - - - mysql - mysql-connector-java - provided - - - - org.postgresql - postgresql - provided - - - - com.microsoft.sqlserver - mssql-jdbc - provided - - - com.h2database - h2 - provided - - - com.baomidou - dynamic-datasource-spring-boot-starter - - - \ No newline at end of file diff --git a/uno-starters/uno-starter-liquibase/src/main/java/cc/allio/uno/starter/liquibase/BaseLiquibaseDataSourceAdapter.java b/uno-starters/uno-starter-liquibase/src/main/java/cc/allio/uno/starter/liquibase/BaseLiquibaseDataSourceAdapter.java deleted file mode 100644 index 8c27d54d..00000000 --- a/uno-starters/uno-starter-liquibase/src/main/java/cc/allio/uno/starter/liquibase/BaseLiquibaseDataSourceAdapter.java +++ /dev/null @@ -1,143 +0,0 @@ -package cc.allio.uno.starter.liquibase; - -import cc.allio.uno.core.util.template.GenericTokenParser; -import cc.allio.uno.core.util.template.Tokenizer; -import lombok.extern.slf4j.Slf4j; -import org.springframework.context.ApplicationContext; -import org.springframework.util.StringUtils; - -import javax.sql.DataSource; -import java.io.File; -import java.util.Objects; -import java.util.function.Function; -import java.util.function.Predicate; - -/** - * 提高是否有change-log文件的基本适配器 - * - * @author jiangwei - * @date 2022/1/19 20:34 - * @since 1.0 - */ -@Slf4j -public abstract class BaseLiquibaseDataSourceAdapter implements LiquibaseDataSourceAdapter { - - private ApplicationContext applicationContext; - - private static final String XML = ".xml"; - - private static final String YAML = ".yaml"; - - private static final String JSON = ".json"; - - private static final String SQL = ".sql"; - - @Override - public LiquibaseDataSourceAdapter setApplicationContext(ApplicationContext applicationContext) { - this.applicationContext = applicationContext; - return this; - } - - @Override - public ApplicationContext getApplicationContext() { - return applicationContext; - } - - @Override - public String getChangeLog(DataSource dataSource) { - String propertiesChangeLog = getPropertiesChangeLog(dataSource); - if (!StringUtils.isEmpty(propertiesChangeLog)) { - return propertiesChangeLog; - } - GenericTokenParser parser = getDbParser().apply(dataSource); - String changeLogPath = changeLogPath(dataSource, parser); - String changeLogName = changeLogName(dataSource, parser); - return getMaybeChangeLog(changeLogPath, changeLogName); - } - - /** - * 获取可能的change-log文件路径 - * - * @param changeLogPath change-path - * @param expectedChangeLogName 期望change-log名称,无后缀文件名称 - * @return 某个具体文件路径 - */ - protected String getMaybeChangeLog(String changeLogPath, String expectedChangeLogName) { - String changeLog = changeLog(changeLogPath, expectedChangeLogName.concat(YAML)); - if (!StringUtils.isEmpty(changeLog)) { - return changeLog; - } - changeLog = changeLog(changeLogPath, expectedChangeLogName.concat(XML)); - if (!StringUtils.isEmpty(changeLog)) { - return changeLog; - } - changeLog = changeLog(changeLogPath, expectedChangeLogName.concat(JSON)); - if (!StringUtils.isEmpty(changeLog)) { - return changeLog; - } - changeLog = changeLog(changeLogPath, expectedChangeLogName.concat(SQL)); - if (!StringUtils.isEmpty(changeLog)) { - return changeLog; - } - return ""; - } - - /** - * 获取change-log名称 - * - * @param changeLogPath change-log文件存放的路径 - * @param changeLogName change-log文件具体名称 - * @return 可能能在目标目录找到的change-log文件 - */ - private String changeLog(String changeLogPath, String changeLogName) { - Predicate predicate = hasChangeLogFile(changeLogName); - if (predicate.test(changeLogPath)) { - Function expect = getFile(changeLogName); - File file = expect.apply(changeLogPath); - if (Objects.nonNull(file)) { - return changeLogPath.concat("/").concat(file.getName()); - } - } - return ""; - } - - /** - * 获取change-path路径名称 - * - * @param dataSource 数据源对象 - * @param parser 占位符解析器 - * @return change-log路径名称 - */ - protected String changeLogPath(DataSource dataSource, GenericTokenParser parser) { - return parser.parse(CHANGE_LOG_PATH, content -> dbType(dataSource)); - } - - /** - * 获取change-log文件名称 - * - * @param dataSource 数据源对象 - * @param parser 占位符解析器 - * @return change-log文件名称 - */ - protected String changeLogName(DataSource dataSource, GenericTokenParser parser) { - return parser.parse(CHANGE_LOG_NAME, content -> dbType(dataSource)); - } - - protected Function getDbParser() { - return dataSource -> new GenericTokenParser(Tokenizer.HASH_BRACE); - } - - /** - * 获取配置文件中的change-log路径, - *
    - *
  • 当spring.liquibase.changelog有值则取该路径的change-log
  • - *
  • 否则取db/migrations/#{dbType}/db_migration.yaml
  • - *
- * - * @param dataSource 数据源对象 - * @return 可能存在的change-log路径 - */ - protected String getPropertiesChangeLog(DataSource dataSource) { - return applicationContext.getEnvironment().getProperty("spring.liquibase.changelog"); - } -} diff --git a/uno-starters/uno-starter-liquibase/src/main/java/cc/allio/uno/starter/liquibase/DataSourceAdapterDispatcher.java b/uno-starters/uno-starter-liquibase/src/main/java/cc/allio/uno/starter/liquibase/DataSourceAdapterDispatcher.java deleted file mode 100644 index 9d520007..00000000 --- a/uno-starters/uno-starter-liquibase/src/main/java/cc/allio/uno/starter/liquibase/DataSourceAdapterDispatcher.java +++ /dev/null @@ -1,54 +0,0 @@ -package cc.allio.uno.starter.liquibase; - -import org.springframework.context.ApplicationContext; - -import javax.sql.DataSource; -import java.util.ArrayList; -import java.util.List; -import java.util.ServiceLoader; - -/** - * 数据源适配处理器,数据源分派 - * - * @author jiangwei - * @date 2022/1/19 18:04 - * @since 1.0 - */ -public class DataSourceAdapterDispatcher { - - /** - * 适配器缓存 - */ - private final List adapters = new ArrayList<>(); - - /** - * 单例 - */ - private static final DataSourceAdapterDispatcher DISPATCHER = new DataSourceAdapterDispatcher(); - - private DataSourceAdapterDispatcher() { - ServiceLoader load = ServiceLoader.load(LiquibaseDataSourceAdapter.class); - for (LiquibaseDataSourceAdapter liquibaseDataSourceAdapter : load) { - adapters.add(liquibaseDataSourceAdapter); - } - } - - /** - * 数据源适配派发 - * - * @param clazz 数据源Class对象 - * @throws NullPointerException 当没有找到合适的适配器时抛出 - */ - public LiquibaseDataSourceAdapter handle(ApplicationContext applicationContext, Class clazz) { - return adapters.stream() - .filter(adapter -> adapter.isAdapter().test(clazz)) - .findFirst() - .orElseThrow(NullPointerException::new) - .setApplicationContext(applicationContext); - } - - public static DataSourceAdapterDispatcher getInstance() { - return DISPATCHER; - } - -} diff --git a/uno-starters/uno-starter-liquibase/src/main/java/cc/allio/uno/starter/liquibase/DruidDataSourceAdapter.java b/uno-starters/uno-starter-liquibase/src/main/java/cc/allio/uno/starter/liquibase/DruidDataSourceAdapter.java deleted file mode 100644 index f366045e..00000000 --- a/uno-starters/uno-starter-liquibase/src/main/java/cc/allio/uno/starter/liquibase/DruidDataSourceAdapter.java +++ /dev/null @@ -1,29 +0,0 @@ -package cc.allio.uno.starter.liquibase; - -import com.alibaba.druid.pool.DruidDataSource; -import com.google.auto.service.AutoService; - -import javax.sql.DataSource; -import java.util.function.Predicate; - -/** - * Druid数据源适配器 - * - * @author jiangwei - * @date 2022/1/19 18:06 - * @since 1.0 - */ -@AutoService(LiquibaseDataSourceAdapter.class) -public class DruidDataSourceAdapter extends BaseLiquibaseDataSourceAdapter { - - @Override - public String dbType(DataSource dataSource) { - DruidDataSource druidDataSource = (DruidDataSource) dataSource; - return druidDataSource.getDbType(); - } - - @Override - public Predicate> isAdapter() { - return DruidDataSource.class::isAssignableFrom; - } -} diff --git a/uno-starters/uno-starter-liquibase/src/main/java/cc/allio/uno/starter/liquibase/DynamicRoutingDataSourceAdapter.java b/uno-starters/uno-starter-liquibase/src/main/java/cc/allio/uno/starter/liquibase/DynamicRoutingDataSourceAdapter.java deleted file mode 100644 index 048827ff..00000000 --- a/uno-starters/uno-starter-liquibase/src/main/java/cc/allio/uno/starter/liquibase/DynamicRoutingDataSourceAdapter.java +++ /dev/null @@ -1,74 +0,0 @@ -package cc.allio.uno.starter.liquibase; - -import cc.allio.uno.core.util.template.GenericTokenParser; -import cc.allio.uno.core.util.template.Tokenizer; -import com.baomidou.dynamic.datasource.DynamicRoutingDataSource; -import com.baomidou.dynamic.datasource.ds.ItemDataSource; -import com.google.auto.service.AutoService; -import lombok.extern.slf4j.Slf4j; -import org.springframework.beans.factory.support.DefaultListableBeanFactory; -import org.springframework.context.ApplicationContext; -import org.springframework.util.ClassUtils; - -import javax.sql.DataSource; -import java.util.Map; -import java.util.function.Predicate; - -/** - * 多数据源适配器 - * - * @author jiangwei - * @date 2022/1/19 21:10 - * @since 1.0 - */ -@Slf4j -@AutoService(LiquibaseDataSourceAdapter.class) -public class DynamicRoutingDataSourceAdapter extends BaseLiquibaseDataSourceAdapter { - - private static final String CHANGE_LOG_PROPERTIES = "spring.datasource.dynamic.datasource.#{dynamic}.liquibase.changelog"; - - @Override - public String dbType(DataSource dataSource) { - ItemDataSource itemDataSource = (ItemDataSource) dataSource; - DataSource realDataSource = itemDataSource.getRealDataSource(); - return DataSourceAdapterDispatcher.getInstance().handle(null, realDataSource.getClass()).dbType(realDataSource); - } - - /** - * @param dataSource 数据源对象 - * @param parser 占位符解析器 - * @return 获取以多数据源的db_migration-#{dynamicType}名称的文件名称 - */ - @Override - protected String changeLogName(DataSource dataSource, GenericTokenParser parser) { - String changeLogName = super.changeLogName(dataSource, parser); - ItemDataSource itemDataSource = (ItemDataSource) dataSource; - return changeLogName.concat("-").concat(itemDataSource.getName()); - } - - @Override - protected String getPropertiesChangeLog(DataSource dataSource) { - ApplicationContext applicationContext = getApplicationContext(); - GenericTokenParser parser = new GenericTokenParser(Tokenizer.HASH_BRACE); - String changeLogProperties = parser.parse(CHANGE_LOG_PROPERTIES, content -> ((ItemDataSource) dataSource).getName()); - return applicationContext.getEnvironment().getProperty(changeLogProperties); - } - - @Override - public void registerLiquibase(DataSource dataSource, DefaultListableBeanFactory beanFactory) { - DynamicRoutingDataSource dynamicRoutingDataSource = (DynamicRoutingDataSource) dataSource; - Map currentDataSources = dynamicRoutingDataSource.getCurrentDataSources(); - currentDataSources.forEach((k, v) -> super.registerLiquibase(v, beanFactory)); - } - - @Override - public Predicate> isAdapter() { - return clazz -> { - boolean present = ClassUtils.isPresent("com.baomidou.dynamic.datasource.DynamicRoutingDataSource", ClassUtils.getDefaultClassLoader()); - if (present) { - return DynamicRoutingDataSource.class.isAssignableFrom(clazz); - } - return false; - }; - } -} diff --git a/uno-starters/uno-starter-liquibase/src/main/java/cc/allio/uno/starter/liquibase/HikariDataSourceAdapter.java b/uno-starters/uno-starter-liquibase/src/main/java/cc/allio/uno/starter/liquibase/HikariDataSourceAdapter.java deleted file mode 100644 index 8bf43ab6..00000000 --- a/uno-starters/uno-starter-liquibase/src/main/java/cc/allio/uno/starter/liquibase/HikariDataSourceAdapter.java +++ /dev/null @@ -1,46 +0,0 @@ -package cc.allio.uno.starter.liquibase; - -import cc.allio.uno.core.StringPool; -import cc.allio.uno.core.util.StringUtils; -import com.google.auto.service.AutoService; -import com.zaxxer.hikari.HikariDataSource; - -import javax.sql.DataSource; -import java.util.function.Predicate; - -/** - * HikariDataSource数据源适配器 - * - * @author jiangwei - * @date 2022/8/29 13:05 - * @since 1.0.9 - */ -@AutoService(LiquibaseDataSourceAdapter.class) -public class HikariDataSourceAdapter extends BaseLiquibaseDataSourceAdapter { - @Override - public String dbType(DataSource dataSource) { - // org.postgresql.Driver - // com.mysql.cj.jdbc.Driver - // org.h2.Driver - // com.microsoft.sqlserver.jdbc.SQLServerDriver - // oracle.jdbc.driver.OracleDriver - String driverClassName = ((HikariDataSource) dataSource).getDriverClassName(); - if (StringUtils.isEmpty(driverClassName)) { - throw new NullPointerException(String.format("DataSource Driver Class is empty %s", dataSource)); - } - String maybeDriverClassName = driverClassName.split(StringPool.DOT)[1]; - - // sqlserver做特殊处理 - if (maybeDriverClassName.equals("microsoft")) { - return "mssql"; - } else if (maybeDriverClassName.equals("jdbc")) { - return "oracle"; - } - return maybeDriverClassName; - } - - @Override - public Predicate> isAdapter() { - return HikariDataSource.class::isAssignableFrom; - } -} diff --git a/uno-starters/uno-starter-liquibase/src/main/java/cc/allio/uno/starter/liquibase/LiquibaseDataSourceAdapter.java b/uno-starters/uno-starter-liquibase/src/main/java/cc/allio/uno/starter/liquibase/LiquibaseDataSourceAdapter.java deleted file mode 100644 index 8452548a..00000000 --- a/uno-starters/uno-starter-liquibase/src/main/java/cc/allio/uno/starter/liquibase/LiquibaseDataSourceAdapter.java +++ /dev/null @@ -1,153 +0,0 @@ -package cc.allio.uno.starter.liquibase; - -import liquibase.integration.spring.SpringLiquibase; -import org.springframework.beans.factory.support.BeanDefinitionBuilder; -import org.springframework.beans.factory.support.DefaultListableBeanFactory; -import org.springframework.context.ApplicationContext; -import org.springframework.util.ObjectUtils; -import org.springframework.util.ResourceUtils; -import org.springframework.util.StringUtils; - -import javax.sql.DataSource; -import java.io.File; -import java.io.FileNotFoundException; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.function.Function; -import java.util.function.Predicate; -import java.util.stream.Collectors; - -/** - * Liquibase适配器,对于不同的数据源类型来创建Liquibase对象 - * - * @author jiangwei - * @date 2022/1/19 16:28 - * @since 1.0 - */ -public interface LiquibaseDataSourceAdapter { - - /** - * change-log配置文件的目录 - */ - String CHANGE_LOG_PATH = "classpath:db/migrations/#{dbType}"; - - /** - * change-log文件名称 - */ - String CHANGE_LOG_NAME = "db_migration"; - - /** - * 获取change-log文件路径。
- * 默认优先级顺序:db_migration_#{dbType}(或者db_migration_#{dbType}-#{dynamic})>.yaml>.yml>.xml - * - * @param dataSource 数据源对象 - * @return change-log文件目录 - * @see BaseLiquibaseDataSourceAdapter - */ - String getChangeLog(DataSource dataSource); - - /** - * 获取dbType - * - * @param dataSource 数据源对象 - * @return dbType字符串 - */ - String dbType(DataSource dataSource); - - /** - * 测试当前数据源适配器是否可以适配 - * - * @return 返回一个断言对象 - */ - Predicate> isAdapter(); - - /** - * 设置application实例 - * - * @param applicationContext 上下文实例 - * @return 当前实例 - */ - LiquibaseDataSourceAdapter setApplicationContext(ApplicationContext applicationContext); - - /** - * 获取ApplicationContext上下文 - * - * @return application上下文实例 - */ - ApplicationContext getApplicationContext(); - - // ---------------- default ---------------- - - /** - * 获取指定目录下的所以文件 - * - * @return 入参为文件路径,获取文件List - */ - default Function> getFiles() { - return path -> { - try { - File changeFile = ResourceUtils.getFile(path); - // 判断边界条件 - File[] listableFile = changeFile.listFiles(); - if (ObjectUtils.isEmpty(listableFile)) { - return Collections.emptyList(); - } - return Arrays.stream(listableFile) - .filter(File::isFile) - .collect(Collectors.toList()); - } catch (FileNotFoundException e) { - return Collections.emptyList(); - } - }; - } - - /** - * 根据文件目录判断目标路径存在有change-log文件:
- * 1.判断是否有目录
- * 2.如果有目录,判断目录下是否有以db_migration.yaml或者db_migration.xml或者db_migration.json为文件名
- * - * @param maybeName change-log文件名称或者.yaml文件 - * @return 返回一个断言对象 - * @see BaseLiquibaseDataSourceAdapter - * @see DynamicRoutingDataSourceAdapter - */ - default Predicate hasChangeLogFile(String maybeName) { - return path -> - getFiles().apply(path) - .stream() - .anyMatch(file -> file.getName().equals(maybeName)); - } - - /** - * 获取文件,调用{@link String#contains(CharSequence)}方法进行判断。 - * - * @param expectedFileName 期望获取的文件名称,可能不是具体文件名称。 - * @return 以文件路径作为入参,获取具体文件(当找不到时返回null) - */ - default Function getFile(String expectedFileName) { - return path -> getFiles().apply(path) - .stream() - .filter(file -> file.getName().equals(expectedFileName)) - .findFirst() - .orElse(null); - } - - /** - * 向Spring注册Liquibase对象 - * - * @param dataSource 数据源对象 - * @param beanFactory bean工厂 - */ - default void registerLiquibase(DataSource dataSource, DefaultListableBeanFactory beanFactory) { - String changeLog = getChangeLog(dataSource); - if (StringUtils.isEmpty(changeLog)) { - return; - } - SpringLiquibase liquibase = new SpringLiquibase(); - liquibase.setChangeLog(changeLog); - liquibase.setDataSource(dataSource); - BeanDefinitionBuilder beanDefinitionBuilder = BeanDefinitionBuilder.genericBeanDefinition(SpringLiquibase.class, () -> liquibase); - beanFactory.registerBeanDefinition("liquibase-".concat(dbType(dataSource)), beanDefinitionBuilder.getRawBeanDefinition()); - } -} diff --git a/uno-starters/uno-starter-liquibase/src/main/java/cc/allio/uno/starter/liquibase/config/UnoLiquibaseAutoConfiguration.java b/uno-starters/uno-starter-liquibase/src/main/java/cc/allio/uno/starter/liquibase/config/UnoLiquibaseAutoConfiguration.java deleted file mode 100644 index 5eda6b5a..00000000 --- a/uno-starters/uno-starter-liquibase/src/main/java/cc/allio/uno/starter/liquibase/config/UnoLiquibaseAutoConfiguration.java +++ /dev/null @@ -1,83 +0,0 @@ -package cc.allio.uno.starter.liquibase.config; - -import cc.allio.uno.starter.liquibase.DataSourceAdapterDispatcher; -import liquibase.integration.spring.SpringLiquibase; -import lombok.extern.slf4j.Slf4j; -import org.springframework.beans.BeansException; -import org.springframework.beans.factory.BeanFactoryUtils; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.beans.factory.support.DefaultListableBeanFactory; -import org.springframework.boot.autoconfigure.AutoConfigureAfter; -import org.springframework.boot.autoconfigure.AutoConfigureBefore; -import org.springframework.boot.autoconfigure.EnableAutoConfiguration; -import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; -import org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration; -import org.springframework.boot.autoconfigure.liquibase.LiquibaseAutoConfiguration; -import org.springframework.boot.autoconfigure.orm.jpa.HibernateJpaAutoConfiguration; -import org.springframework.context.ApplicationContext; -import org.springframework.context.ApplicationContextAware; -import org.springframework.context.ConfigurableApplicationContext; -import org.springframework.core.Ordered; -import org.springframework.core.annotation.Order; -import org.springframework.util.CollectionUtils; - -import javax.sql.DataSource; -import java.util.Map; - -/** - * Blade-Liquibase自动配置类。
- *
    - *
  1. 扫描在/db/migrations/#{dbType}目录下是否存在db_migration格式的文件
  2. - *
  3. 代码逻辑将会根据当前服务数据源的配置创建Liquibase对象。
  4. - *
  5. 如果当前项目配置多数据源,需要在db_migration-#{dynamic}。以此适配多数据源
  6. - *
- * - * @author jiangwei - * @date 2022/1/19 10:47 - * @since 1.0 - */ -@Slf4j -@EnableAutoConfiguration -@ConditionalOnProperty(prefix = "spring.liquibase", name = "enabled", havingValue = "true") -@AutoConfigureBefore({LiquibaseAutoConfiguration.class}) -@AutoConfigureAfter({DataSourceAutoConfiguration.class, HibernateJpaAutoConfiguration.class}) -@Order(Ordered.HIGHEST_PRECEDENCE) -public class UnoLiquibaseAutoConfiguration implements ApplicationContextAware, InitializingBean { - - private ApplicationContext applicationContext; - - @Override - public void setApplicationContext(ApplicationContext applicationContext) throws BeansException { - this.applicationContext = applicationContext; - } - - @Override - public void afterPropertiesSet() throws Exception { - ConfigurableApplicationContext configurableApplicationContext = (ConfigurableApplicationContext) applicationContext; - DefaultListableBeanFactory beanFactory = (DefaultListableBeanFactory) configurableApplicationContext.getBeanFactory(); - Map dataSourceMap = BeanFactoryUtils.beansOfTypeIncludingAncestors(applicationContext, DataSource.class); - if (CollectionUtils.isEmpty(dataSourceMap)) { - return; - } - if (dataSourceMap.size() == 1) { - DataSource dataSource = applicationContext.getBean(DataSource.class); - try { - DataSourceAdapterDispatcher.getInstance().handle(applicationContext, dataSource.getClass()).registerLiquibase(dataSource, beanFactory); - } catch (Throwable e) { - log.error("register liquibase failed", e); - } - - } else { - dataSourceMap.forEach((k, v) -> { - try { - DataSourceAdapterDispatcher.getInstance().handle(applicationContext, v.getClass()).registerLiquibase(v, beanFactory); - } catch (Throwable e) { - log.error("register liquibase failed", e); - } - }); - } - // 使注册的Liquibase生效 - BeanFactoryUtils.beansOfTypeIncludingAncestors(applicationContext, SpringLiquibase.class); - } - -} diff --git a/uno-starters/uno-starter-liquibase/src/test/java/cc/allio/uno/starter/liquibase/DruidDataSourceAdapterTest.java b/uno-starters/uno-starter-liquibase/src/test/java/cc/allio/uno/starter/liquibase/DruidDataSourceAdapterTest.java deleted file mode 100644 index 38dd4511..00000000 --- a/uno-starters/uno-starter-liquibase/src/test/java/cc/allio/uno/starter/liquibase/DruidDataSourceAdapterTest.java +++ /dev/null @@ -1,49 +0,0 @@ -package cc.allio.uno.starter.liquibase; - -import com.alibaba.druid.pool.DruidDataSource; -import cc.allio.uno.core.util.template.GenericTokenParser; -import cc.allio.uno.test.BaseTestCase; -import org.junit.jupiter.api.Test; - -class DruidDataSourceAdapterTest extends BaseTestCase { - - DruidDataSource dataSource; - - DruidDataSourceAdapter adapter; - - @Override - protected void onInit() throws Throwable { - dataSource = new DruidDataSource(); - dataSource.setDbType("postgres"); - adapter = new DruidDataSourceAdapter(); - } - - @Test - void testDbType() { - String dbType = adapter.dbType(dataSource); - assertEquals("postgres", dbType); - } - - @Test - void testChangeFilePath() { - GenericTokenParser parser = adapter.getDbParser().apply(dataSource); - String changeLogPath = adapter.changeLogPath(dataSource, parser); - assertEquals("classpath:db/migrations/postgres", changeLogPath); - } - - @Test - void testChangeFileName() { - GenericTokenParser parser = adapter.getDbParser().apply(dataSource); - String changeLogName = adapter.changeLogName(dataSource, parser); - assertEquals("db_migration", changeLogName); - } - - @Test - void testGetChangeLog() { - assertEquals("classpath:db/migrations/postgres/db_migration.yaml", adapter.getChangeLog(dataSource)); - } - - @Override - protected void onDown() throws Throwable { - } -} diff --git a/uno-starters/uno-starter-liquibase/src/test/java/cc/allio/uno/starter/liquibase/DynamicRoutingDataSourceAdapterTest.java b/uno-starters/uno-starter-liquibase/src/test/java/cc/allio/uno/starter/liquibase/DynamicRoutingDataSourceAdapterTest.java deleted file mode 100644 index 0804faf4..00000000 --- a/uno-starters/uno-starter-liquibase/src/test/java/cc/allio/uno/starter/liquibase/DynamicRoutingDataSourceAdapterTest.java +++ /dev/null @@ -1,67 +0,0 @@ -package cc.allio.uno.starter.liquibase; - -import com.alibaba.druid.pool.DruidDataSource; -import cc.allio.uno.core.util.template.GenericTokenParser; -import cc.allio.uno.test.BaseTestCase; -import com.baomidou.dynamic.datasource.DynamicRoutingDataSource; -import com.baomidou.dynamic.datasource.ds.ItemDataSource; -import com.baomidou.dynamic.datasource.enums.SeataMode; -import org.junit.jupiter.api.Test; - -class DynamicRoutingDataSourceAdapterTest extends BaseTestCase { - - DynamicRoutingDataSource dataSource; - - DynamicRoutingDataSourceAdapter adapter; - - ItemDataSource master; - - ItemDataSource second; - - @Override - protected void onInit() throws Throwable { - dataSource = new DynamicRoutingDataSource(); - DruidDataSource masterDruidDataSource = new DruidDataSource(); - masterDruidDataSource.setDbType("postgres"); - master = new ItemDataSource("master", masterDruidDataSource, masterDruidDataSource, false, false, SeataMode.AT); - dataSource.addDataSource("master", master); - DruidDataSource secondDruidDataSource = new DruidDataSource(); - secondDruidDataSource.setDbType("mysql"); - second = new ItemDataSource("second", secondDruidDataSource, secondDruidDataSource, false, false, SeataMode.AT); - dataSource.addDataSource("second", second); - adapter = new DynamicRoutingDataSourceAdapter(); - } - - @Test - void testDbType() { - assertEquals("postgres", adapter.dbType(master)); - assertEquals("mysql", adapter.dbType(second)); - } - - @Test - void testChangeFilePath() { - GenericTokenParser masterParser = adapter.getDbParser().apply(master); - assertEquals("classpath:db/migrations/postgres", adapter.changeLogPath(master, masterParser)); - GenericTokenParser secondParser = adapter.getDbParser().apply(second); - assertEquals("classpath:db/migrations/mysql", adapter.changeLogPath(second, secondParser)); - } - - @Test - void testChangeName() { - GenericTokenParser masterParser = adapter.getDbParser().apply(master); - assertEquals("db_migration-master", adapter.changeLogName(master, masterParser)); - GenericTokenParser secondParser = adapter.getDbParser().apply(second); - assertEquals("db_migration-second", adapter.changeLogName(second, secondParser)); - } - - @Test - void testGetChangeLog() { - assertEquals("classpath:db/migrations/postgres/db_migration-master.yaml", adapter.getChangeLog(master)); - assertEquals("classpath:db/migrations/mysql/db_migration-second.yaml", adapter.getChangeLog(second)); - } - - @Override - protected void onDown() throws Throwable { - - } -} diff --git a/uno-starters/uno-starter-liquibase/src/test/java/cc/allio/uno/starter/liquibase/HikariDataSourceAdapterTest.java b/uno-starters/uno-starter-liquibase/src/test/java/cc/allio/uno/starter/liquibase/HikariDataSourceAdapterTest.java deleted file mode 100644 index f83658ab..00000000 --- a/uno-starters/uno-starter-liquibase/src/test/java/cc/allio/uno/starter/liquibase/HikariDataSourceAdapterTest.java +++ /dev/null @@ -1,54 +0,0 @@ -package cc.allio.uno.starter.liquibase; - -import cc.allio.uno.test.BaseTestCase; -import com.microsoft.sqlserver.jdbc.SQLServerDriver; -import com.zaxxer.hikari.HikariDataSource; -import org.junit.jupiter.api.Test; -import org.postgresql.Driver; - -/** - * HikariDataSource测试 - * - * @author jiangwei - * @date 2022/8/29 13:46 - * @since 1.0.9 - */ -class HikariDataSourceAdapterTest extends BaseTestCase { - - - @Override - protected void onInit() throws Throwable { - - } - - @Override - protected void onDown() throws Throwable { - - } - - @Test - void testDbType() { - HikariDataSourceAdapter dataSourceAdapter = new HikariDataSourceAdapter(); - HikariDataSource hikariDataSource = new HikariDataSource(); - hikariDataSource.setDriverClassName(com.mysql.cj.jdbc.Driver.class.getName()); - - // 验证mysql - String dbType = dataSourceAdapter.dbType(hikariDataSource); - assertEquals("mysql", dbType); - - // 验证Postgresql - hikariDataSource.setDriverClassName(Driver.class.getName()); - dbType = dataSourceAdapter.dbType(hikariDataSource); - assertEquals("postgresql", dbType); - - // 验证h2 - hikariDataSource.setDriverClassName(org.h2.Driver.class.getName()); - dbType = dataSourceAdapter.dbType(hikariDataSource); - assertEquals("h2", dbType); - - // sqlserver - hikariDataSource.setDriverClassName(SQLServerDriver.class.getName()); - dbType = dataSourceAdapter.dbType(hikariDataSource); - assertEquals("mssql", dbType); - } -} diff --git a/uno-starters/uno-starter-liquibase/src/test/java/cc/allio/uno/starter/liquibase/MigrationTest.java b/uno-starters/uno-starter-liquibase/src/test/java/cc/allio/uno/starter/liquibase/MigrationTest.java deleted file mode 100644 index 382d9b13..00000000 --- a/uno-starters/uno-starter-liquibase/src/test/java/cc/allio/uno/starter/liquibase/MigrationTest.java +++ /dev/null @@ -1,62 +0,0 @@ -package cc.allio.uno.starter.liquibase; - -import cc.allio.uno.starter.liquibase.config.UnoLiquibaseAutoConfiguration; -import cc.allio.uno.test.CoreTest; -import cc.allio.uno.test.env.Environment; -import liquibase.integration.spring.SpringLiquibase; -import org.junit.jupiter.api.Test; -import org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration; - -import javax.sql.DataSource; -import java.util.Map; - -/** - * 迁移单元测试 - * - * @author jiangwei - * @date 2022/8/29 11:20 - * @since 1.0.9 - */ -public class MigrationTest extends CoreTest { - - @Override - protected void onEnvBuild() { - registerComponent( - UnoLiquibaseAutoConfiguration.class, - DataSourceAutoConfiguration.class - ); - } - - @Override - public Environment supportEnv() { - // TODO 更改测试类 -// DataSourceProperties dataSourceProperties = new DataSourceProperties(); -// dataSourceProperties.setDriverClassName("com.mysql.cj.jdbc.Driver"); -// dataSourceProperties.setUrl("jdbc:mysql://192.168.2.29:3306/migration?useSSL=false&useUnicode=true&characterEncoding=utf-8&zeroDateTimeBehavior=convertToNull&transformedBitIsBoolean=true&tinyInt1isBit=false&allowMultiQueries=true&serverTimezone=GMT%2B8&allowPublicKeyRetrieval=true"); -// dataSourceProperties.setUsername("root"); -// dataSourceProperties.setPassword("123456"); -// return new EnvironmentFacade(new DataSourceEnvironment(dataSourceProperties)); - return null; - } - - @Override - protected void onRefreshComplete() throws Throwable { - - } - - @Override - protected void onContextClose() throws Throwable { - - } - - /** - * Test Case: 测试数据源与Liquibase的数量 - */ - @Test - void testSpringLiquibaseSize() { - Map dataSourceMap = getContext().getBeansOfType(DataSource.class); - Map liquibaseMap = getContext().getBeansOfType(SpringLiquibase.class); - assertEquals(dataSourceMap.size(), liquibaseMap.size()); - } - -} diff --git a/uno-starters/uno-starter-liquibase/src/test/java/cc/allio/uno/starter/liquibase/changetask/TestChangeTask.java b/uno-starters/uno-starter-liquibase/src/test/java/cc/allio/uno/starter/liquibase/changetask/TestChangeTask.java deleted file mode 100644 index c21f3971..00000000 --- a/uno-starters/uno-starter-liquibase/src/test/java/cc/allio/uno/starter/liquibase/changetask/TestChangeTask.java +++ /dev/null @@ -1,34 +0,0 @@ -package cc.allio.uno.starter.liquibase.changetask; - -import liquibase.change.custom.CustomTaskChange; -import liquibase.database.Database; -import liquibase.exception.CustomChangeException; -import liquibase.exception.SetupException; -import liquibase.exception.ValidationErrors; -import liquibase.resource.ResourceAccessor; - -public class TestChangeTask implements CustomTaskChange { - @Override - public void execute(Database database) throws CustomChangeException { - } - - @Override - public String getConfirmationMessage() { - return null; - } - - @Override - public void setUp() throws SetupException { - - } - - @Override - public void setFileOpener(ResourceAccessor resourceAccessor) { - - } - - @Override - public ValidationErrors validate(Database database) { - return null; - } -} diff --git a/uno-starters/uno-starter-liquibase/src/test/resources/db/migrations/mysql/db_migration-second.yaml b/uno-starters/uno-starter-liquibase/src/test/resources/db/migrations/mysql/db_migration-second.yaml deleted file mode 100644 index e69de29b..00000000 diff --git a/uno-starters/uno-starter-liquibase/src/test/resources/db/migrations/mysql/db_migration.yaml b/uno-starters/uno-starter-liquibase/src/test/resources/db/migrations/mysql/db_migration.yaml deleted file mode 100644 index 35a31c89..00000000 --- a/uno-starters/uno-starter-liquibase/src/test/resources/db/migrations/mysql/db_migration.yaml +++ /dev/null @@ -1,17 +0,0 @@ -databaseChangeLog: - # 一次数据库版本变化都是一个变更集 - - changeSet: - # 命名规则:框架(系统名)-服务-版本号-功能(或者目的) - id: jw-test-V1-Person - author: jiangwei - comment: 创建Person表 - runInTransaction: true - changes: - # 创建表结构 - - sqlFile: - path: classpath:db/migrations/mysql/sql/V1__Person.sql - encoding: utf8 - # 创建表数据 - - sqlFile: - path: classpath:db/migrations/mysql/sql/V1__Person_Data.sql - encoding: utf8 diff --git a/uno-starters/uno-starter-liquibase/src/test/resources/db/migrations/mysql/sql/V1__Person.sql b/uno-starters/uno-starter-liquibase/src/test/resources/db/migrations/mysql/sql/V1__Person.sql deleted file mode 100644 index 21b2146a..00000000 --- a/uno-starters/uno-starter-liquibase/src/test/resources/db/migrations/mysql/sql/V1__Person.sql +++ /dev/null @@ -1,6 +0,0 @@ -CREATE TABLE `person` -( - `id` int(11) NOT NULL, - `name` varchar(255) COLLATE utf8mb4_bin DEFAULT NULL, - PRIMARY KEY (`id`) -) \ No newline at end of file diff --git a/uno-starters/uno-starter-liquibase/src/test/resources/db/migrations/mysql/sql/V1__Person_Data.sql b/uno-starters/uno-starter-liquibase/src/test/resources/db/migrations/mysql/sql/V1__Person_Data.sql deleted file mode 100644 index e6a41c85..00000000 --- a/uno-starters/uno-starter-liquibase/src/test/resources/db/migrations/mysql/sql/V1__Person_Data.sql +++ /dev/null @@ -1 +0,0 @@ -INSERT INTO person(id, name) VALUES (1, 'test') \ No newline at end of file diff --git a/uno-starters/uno-starter-liquibase/src/test/resources/db/migrations/postgres/db_migration-master.yaml b/uno-starters/uno-starter-liquibase/src/test/resources/db/migrations/postgres/db_migration-master.yaml deleted file mode 100644 index e69de29b..00000000 diff --git a/uno-starters/uno-starter-liquibase/src/test/resources/db/migrations/postgres/db_migration.yaml b/uno-starters/uno-starter-liquibase/src/test/resources/db/migrations/postgres/db_migration.yaml deleted file mode 100644 index e69de29b..00000000 diff --git a/uno-starters/uno-starter-liquibase/src/test/resources/uno.yaml b/uno-starters/uno-starter-liquibase/src/test/resources/uno.yaml deleted file mode 100644 index 2b52649e..00000000 --- a/uno-starters/uno-starter-liquibase/src/test/resources/uno.yaml +++ /dev/null @@ -1,4 +0,0 @@ -spring: - liquibase: - enabled: true - changelog: classpath:db/migrations/mysql/db_migration.yaml \ No newline at end of file diff --git a/uno-test/src/main/java/cc/allio/uno/test/env/annotation/properties/FeignClientProperties.java b/uno-test/src/main/java/cc/allio/uno/test/env/annotation/properties/FeignClientProperties.java deleted file mode 100644 index d8e911c9..00000000 --- a/uno-test/src/main/java/cc/allio/uno/test/env/annotation/properties/FeignClientProperties.java +++ /dev/null @@ -1,24 +0,0 @@ -package cc.allio.uno.test.env.annotation.properties; - -import java.lang.annotation.*; - -/** - * {@link org.springframework.cloud.openfeign.FeignClientProperties}的注解描述 - * - * @author jiangwei - * @date 2023/3/9 12:08 - * @since 1.1.4 - */ -@Documented -@Retention(RetentionPolicy.RUNTIME) -@Target(ElementType.TYPE) -@Properties("feign.client") -public @interface FeignClientProperties { - - boolean defaultToProperties() default true; - - String defaultConfig() default "default"; - - boolean decodeSlash() default true; - -} diff --git a/uno-test/src/main/java/cc/allio/uno/test/env/annotation/properties/FeignEncoderProperties.java b/uno-test/src/main/java/cc/allio/uno/test/env/annotation/properties/FeignEncoderProperties.java deleted file mode 100644 index fd8581e2..00000000 --- a/uno-test/src/main/java/cc/allio/uno/test/env/annotation/properties/FeignEncoderProperties.java +++ /dev/null @@ -1,22 +0,0 @@ -package cc.allio.uno.test.env.annotation.properties; - -import java.lang.annotation.*; - -/** - * {@link org.springframework.cloud.openfeign.support.FeignEncoderProperties}的注解描述 - * - * @author jiangwei - * @date 2023/3/9 12:40 - * @since 1.1.4 - */ -@Documented -@Retention(RetentionPolicy.RUNTIME) -@Target(ElementType.TYPE) -@Properties("feign.encoder") -public @interface FeignEncoderProperties { - - /** - * Indicates whether the charset should be derived from the Content-Type header. - */ - boolean charsetFromContentType() default false; -} diff --git a/uno-test/src/test/java/cc/allio/uno/test/annotation/MybatisEnvTest.java b/uno-test/src/test/java/cc/allio/uno/test/annotation/MybatisEnvTest.java index 85081fd6..b2f95cda 100644 --- a/uno-test/src/test/java/cc/allio/uno/test/annotation/MybatisEnvTest.java +++ b/uno-test/src/test/java/cc/allio/uno/test/annotation/MybatisEnvTest.java @@ -3,12 +3,11 @@ import cc.allio.uno.test.Inject; import cc.allio.uno.test.env.annotation.MybatisEnv; import cc.allio.uno.test.RunTest; +import jakarta.annotation.Resource; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import org.springframework.beans.factory.annotation.Autowired; -import javax.annotation.Resource; - @RunTest @MybatisEnv public class MybatisEnvTest {