zhaojinyu 1 maand geleden
commit
4dff2d8b1c
100 gewijzigde bestanden met toevoegingen van 5514 en 0 verwijderingen
  1. 8 0
      .idea/.gitignore
  2. 21 0
      .idea/compiler.xml
  3. 11 0
      .idea/encodings.xml
  4. 20 0
      .idea/jarRepositories.xml
  5. 12 0
      .idea/misc.xml
  6. 4 0
      .idea/vcs.xml
  7. 45 0
      README.md
  8. BIN
      allatori/allatori.jar
  9. 31 0
      allatori/allatori.xml
  10. BIN
      allatori/class-winter-core-enhance-2.9.4.jar
  11. BIN
      allatori/class-winter-maven-plugin-enhance-2.9.4.jar
  12. 37 0
      allatori/class-winter-maven-plugin-pom.xml
  13. 32 0
      jnpf-workflow-activiti/pom.xml
  14. 139 0
      jnpf-workflow-activiti/src/main/java/jnpf/workflow/activiti/cmd/JumpCmd.java
  15. 285 0
      jnpf-workflow-activiti/src/main/java/jnpf/workflow/activiti/util/ActivitiUtil.java
  16. 187 0
      jnpf-workflow-common/pom.xml
  17. 35 0
      jnpf-workflow-common/src/main/java/jnpf/workflow/common/exception/BizException.java
  18. 48 0
      jnpf-workflow-common/src/main/java/jnpf/workflow/common/exception/ResultCode.java
  19. 24 0
      jnpf-workflow-common/src/main/java/jnpf/workflow/common/model/fo/CompensateFo.java
  20. 29 0
      jnpf-workflow-common/src/main/java/jnpf/workflow/common/model/fo/DefinitionDeleteFo.java
  21. 34 0
      jnpf-workflow-common/src/main/java/jnpf/workflow/common/model/fo/DefinitionDeployFo.java
  22. 27 0
      jnpf-workflow-common/src/main/java/jnpf/workflow/common/model/fo/FlowTargetTaskFo.java
  23. 27 0
      jnpf-workflow-common/src/main/java/jnpf/workflow/common/model/fo/InfoModel.java
  24. 29 0
      jnpf-workflow-common/src/main/java/jnpf/workflow/common/model/fo/InstanceDeleteFo.java
  25. 30 0
      jnpf-workflow-common/src/main/java/jnpf/workflow/common/model/fo/InstanceStartFo.java
  26. 35 0
      jnpf-workflow-common/src/main/java/jnpf/workflow/common/model/fo/JumpFo.java
  27. 38 0
      jnpf-workflow-common/src/main/java/jnpf/workflow/common/model/fo/MoveMultiToSingleFo.java
  28. 38 0
      jnpf-workflow-common/src/main/java/jnpf/workflow/common/model/fo/MoveSingleToMultiFo.java
  29. 29 0
      jnpf-workflow-common/src/main/java/jnpf/workflow/common/model/fo/TaskAfterFo.java
  30. 29 0
      jnpf-workflow-common/src/main/java/jnpf/workflow/common/model/fo/TaskBackFo.java
  31. 30 0
      jnpf-workflow-common/src/main/java/jnpf/workflow/common/model/fo/TaskCompleteFo.java
  32. 32 0
      jnpf-workflow-common/src/main/java/jnpf/workflow/common/model/fo/TaskNextFo.java
  33. 32 0
      jnpf-workflow-common/src/main/java/jnpf/workflow/common/model/fo/TaskOutgoingFo.java
  34. 32 0
      jnpf-workflow-common/src/main/java/jnpf/workflow/common/model/fo/TaskPrevFo.java
  35. 42 0
      jnpf-workflow-common/src/main/java/jnpf/workflow/common/model/vo/DefinitionVo.java
  36. 22 0
      jnpf-workflow-common/src/main/java/jnpf/workflow/common/model/vo/DeploymentVo.java
  37. 34 0
      jnpf-workflow-common/src/main/java/jnpf/workflow/common/model/vo/FlowElementVo.java
  38. 19 0
      jnpf-workflow-common/src/main/java/jnpf/workflow/common/model/vo/FlowVo.java
  39. 46 0
      jnpf-workflow-common/src/main/java/jnpf/workflow/common/model/vo/HistoricInstanceVo.java
  40. 28 0
      jnpf-workflow-common/src/main/java/jnpf/workflow/common/model/vo/HistoricNodeVo.java
  41. 22 0
      jnpf-workflow-common/src/main/java/jnpf/workflow/common/model/vo/InstanceVo.java
  42. 38 0
      jnpf-workflow-common/src/main/java/jnpf/workflow/common/model/vo/NodeElementVo.java
  43. 37 0
      jnpf-workflow-common/src/main/java/jnpf/workflow/common/model/vo/TaskVo.java
  44. 52 0
      jnpf-workflow-common/src/main/java/jnpf/workflow/common/service/IDefinitionService.java
  45. 43 0
      jnpf-workflow-common/src/main/java/jnpf/workflow/common/service/IInstanceService.java
  46. 197 0
      jnpf-workflow-common/src/main/java/jnpf/workflow/common/service/ITaskService.java
  47. 8 0
      jnpf-workflow-common/src/main/java/jnpf/workflow/common/util/FlowUtil.java
  48. BIN
      jnpf-workflow-common/target/classes/jnpf/workflow/common/exception/BizException.class
  49. BIN
      jnpf-workflow-common/target/classes/jnpf/workflow/common/exception/ResultCode.class
  50. BIN
      jnpf-workflow-common/target/classes/jnpf/workflow/common/model/fo/CompensateFo.class
  51. BIN
      jnpf-workflow-common/target/classes/jnpf/workflow/common/model/fo/DefinitionDeleteFo.class
  52. BIN
      jnpf-workflow-common/target/classes/jnpf/workflow/common/model/fo/DefinitionDeployFo.class
  53. BIN
      jnpf-workflow-common/target/classes/jnpf/workflow/common/model/fo/FlowTargetTaskFo.class
  54. BIN
      jnpf-workflow-common/target/classes/jnpf/workflow/common/model/fo/InfoModel.class
  55. BIN
      jnpf-workflow-common/target/classes/jnpf/workflow/common/model/fo/InstanceDeleteFo.class
  56. BIN
      jnpf-workflow-common/target/classes/jnpf/workflow/common/model/fo/InstanceStartFo.class
  57. BIN
      jnpf-workflow-common/target/classes/jnpf/workflow/common/model/fo/JumpFo.class
  58. BIN
      jnpf-workflow-common/target/classes/jnpf/workflow/common/model/fo/MoveMultiToSingleFo.class
  59. BIN
      jnpf-workflow-common/target/classes/jnpf/workflow/common/model/fo/MoveSingleToMultiFo.class
  60. BIN
      jnpf-workflow-common/target/classes/jnpf/workflow/common/model/fo/TaskAfterFo.class
  61. BIN
      jnpf-workflow-common/target/classes/jnpf/workflow/common/model/fo/TaskBackFo.class
  62. BIN
      jnpf-workflow-common/target/classes/jnpf/workflow/common/model/fo/TaskCompleteFo.class
  63. BIN
      jnpf-workflow-common/target/classes/jnpf/workflow/common/model/fo/TaskNextFo.class
  64. BIN
      jnpf-workflow-common/target/classes/jnpf/workflow/common/model/fo/TaskOutgoingFo.class
  65. BIN
      jnpf-workflow-common/target/classes/jnpf/workflow/common/model/fo/TaskPrevFo.class
  66. BIN
      jnpf-workflow-common/target/classes/jnpf/workflow/common/model/vo/DefinitionVo.class
  67. BIN
      jnpf-workflow-common/target/classes/jnpf/workflow/common/model/vo/DeploymentVo.class
  68. BIN
      jnpf-workflow-common/target/classes/jnpf/workflow/common/model/vo/FlowElementVo.class
  69. BIN
      jnpf-workflow-common/target/classes/jnpf/workflow/common/model/vo/FlowVo.class
  70. BIN
      jnpf-workflow-common/target/classes/jnpf/workflow/common/model/vo/HistoricInstanceVo.class
  71. BIN
      jnpf-workflow-common/target/classes/jnpf/workflow/common/model/vo/HistoricNodeVo.class
  72. BIN
      jnpf-workflow-common/target/classes/jnpf/workflow/common/model/vo/InstanceVo.class
  73. BIN
      jnpf-workflow-common/target/classes/jnpf/workflow/common/model/vo/NodeElementVo.class
  74. BIN
      jnpf-workflow-common/target/classes/jnpf/workflow/common/model/vo/TaskVo.class
  75. BIN
      jnpf-workflow-common/target/classes/jnpf/workflow/common/service/IDefinitionService.class
  76. BIN
      jnpf-workflow-common/target/classes/jnpf/workflow/common/service/IInstanceService.class
  77. BIN
      jnpf-workflow-common/target/classes/jnpf/workflow/common/service/ITaskService.class
  78. BIN
      jnpf-workflow-common/target/classes/jnpf/workflow/common/util/FlowUtil.class
  79. BIN
      jnpf-workflow-common/target/jnpf-workflow-common-1.0.0-RELEASE.jar
  80. 3 0
      jnpf-workflow-common/target/maven-archiver/pom.properties
  81. 31 0
      jnpf-workflow-common/target/maven-status/maven-compiler-plugin/compile/default-compile/createdFiles.lst
  82. 31 0
      jnpf-workflow-common/target/maven-status/maven-compiler-plugin/compile/default-compile/inputFiles.lst
  83. 133 0
      jnpf-workflow-flowable/pom.xml
  84. 145 0
      jnpf-workflow-flowable/src/main/java/jnpf/workflow/flowable/cmd/JumpCmd.java
  85. 132 0
      jnpf-workflow-flowable/src/main/java/jnpf/workflow/flowable/service/DefinitionServiceImpl.java
  86. 87 0
      jnpf-workflow-flowable/src/main/java/jnpf/workflow/flowable/service/InstanceServiceImpl.java
  87. 592 0
      jnpf-workflow-flowable/src/main/java/jnpf/workflow/flowable/service/TaskServiceImpl.java
  88. 370 0
      jnpf-workflow-flowable/src/main/java/jnpf/workflow/flowable/util/FlowableUtil.java
  89. 1992 0
      jnpf-workflow-flowable/src/main/java/liquibase/snapshot/JdbcDatabaseSnapshot.java
  90. BIN
      jnpf-workflow-flowable/target/classes/jnpf/workflow/flowable/cmd/JumpCmd.class
  91. BIN
      jnpf-workflow-flowable/target/classes/jnpf/workflow/flowable/service/DefinitionServiceImpl.class
  92. BIN
      jnpf-workflow-flowable/target/classes/jnpf/workflow/flowable/service/InstanceServiceImpl.class
  93. BIN
      jnpf-workflow-flowable/target/classes/jnpf/workflow/flowable/service/TaskServiceImpl.class
  94. BIN
      jnpf-workflow-flowable/target/classes/jnpf/workflow/flowable/util/FlowableUtil.class
  95. BIN
      jnpf-workflow-flowable/target/classes/liquibase/snapshot/JdbcDatabaseSnapshot$CachingDatabaseMetaData$1.class
  96. BIN
      jnpf-workflow-flowable/target/classes/liquibase/snapshot/JdbcDatabaseSnapshot$CachingDatabaseMetaData$2.class
  97. BIN
      jnpf-workflow-flowable/target/classes/liquibase/snapshot/JdbcDatabaseSnapshot$CachingDatabaseMetaData$3.class
  98. BIN
      jnpf-workflow-flowable/target/classes/liquibase/snapshot/JdbcDatabaseSnapshot$CachingDatabaseMetaData$4.class
  99. BIN
      jnpf-workflow-flowable/target/classes/liquibase/snapshot/JdbcDatabaseSnapshot$CachingDatabaseMetaData$5.class
  100. BIN
      jnpf-workflow-flowable/target/classes/liquibase/snapshot/JdbcDatabaseSnapshot$CachingDatabaseMetaData$ForeignKeysResultSetCache.class

+ 8 - 0
.idea/.gitignore

@@ -0,0 +1,8 @@
+# 默认忽略的文件
+/shelf/
+/workspace.xml
+# 基于编辑器的 HTTP 客户端请求
+/httpRequests/
+# Datasource local storage ignored files
+/dataSources/
+/dataSources.local.xml

+ 21 - 0
.idea/compiler.xml

@@ -0,0 +1,21 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project version="4">
+  <component name="CompilerConfiguration">
+    <annotationProcessing>
+      <profile name="Maven default annotation processors profile" enabled="true">
+        <sourceOutputDir name="target/generated-sources/annotations" />
+        <sourceTestOutputDir name="target/generated-test-sources/test-annotations" />
+        <outputRelativeToContentRoot value="true" />
+        <module name="jnpf-workflow-common" />
+        <module name="jnpf-workflow-flowable" />
+      </profile>
+    </annotationProcessing>
+  </component>
+  <component name="JavacSettings">
+    <option name="ADDITIONAL_OPTIONS_OVERRIDE">
+      <module name="jnpf-workflow-common" options="-parameters" />
+      <module name="jnpf-workflow-core" options="-parameters" />
+      <module name="jnpf-workflow-flowable" options="-parameters" />
+    </option>
+  </component>
+</project>

+ 11 - 0
.idea/encodings.xml

@@ -0,0 +1,11 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project version="4">
+  <component name="Encoding">
+    <file url="file://$PROJECT_DIR$/jnpf-workflow-common/src/main/java" charset="UTF-8" />
+    <file url="file://$PROJECT_DIR$/jnpf-workflow-common/src/main/resources" charset="UTF-8" />
+    <file url="file://$PROJECT_DIR$/jnpf-workflow-flowable/src/main/java" charset="UTF-8" />
+    <file url="file://$PROJECT_DIR$/jnpf-workflow-flowable/src/main/resources" charset="UTF-8" />
+    <file url="file://$PROJECT_DIR$/src/main/java" charset="UTF-8" />
+    <file url="file://$PROJECT_DIR$/src/main/resources" charset="UTF-8" />
+  </component>
+</project>

+ 20 - 0
.idea/jarRepositories.xml

@@ -0,0 +1,20 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project version="4">
+  <component name="RemoteRepositoriesConfiguration">
+    <remote-repository>
+      <option name="id" value="central" />
+      <option name="name" value="Central Repository" />
+      <option name="url" value="http://127.0.0.1:9999/repository/maven-public/" />
+    </remote-repository>
+    <remote-repository>
+      <option name="id" value="central" />
+      <option name="name" value="Maven Central repository" />
+      <option name="url" value="https://repo1.maven.org/maven2" />
+    </remote-repository>
+    <remote-repository>
+      <option name="id" value="jboss.community" />
+      <option name="name" value="JBoss Community repository" />
+      <option name="url" value="https://repository.jboss.org/nexus/content/repositories/public/" />
+    </remote-repository>
+  </component>
+</project>

+ 12 - 0
.idea/misc.xml

@@ -0,0 +1,12 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project version="4">
+  <component name="ExternalStorageConfigurationManager" enabled="true" />
+  <component name="MavenProjectsManager">
+    <option name="originalFiles">
+      <list>
+        <option value="$PROJECT_DIR$/pom.xml" />
+      </list>
+    </option>
+  </component>
+  <component name="ProjectRootManager" version="2" languageLevel="JDK_21" default="true" project-jdk-name="21" project-jdk-type="JavaSDK" />
+</project>

+ 4 - 0
.idea/vcs.xml

@@ -0,0 +1,4 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project version="4">
+  <component name="VcsDirectoryMappings" defaultProject="true" />
+</project>

+ 45 - 0
README.md

@@ -0,0 +1,45 @@
+> 特别说明:源码、JDK、数据库、Redis等安装或存放路径禁止包含中文、空格、特殊字符等
+
+## 一 环境要求
+
+| 类目 | 版本说明或建议                                          |
+| --- |--------------------------------------------------|
+| 硬件 | 开发电脑建议使用I3及以上CPU,16G及以上内存                        |
+| 操作系统 | Windows 10/11,MacOS                              |
+| JDK | 默认使用JDK 21,如需要切换JDK 8/11/17版本请参考文档调整代码,推荐使用 `OpenJDK`,如 `Liberica JDK`、`Eclipse Temurin`、`Alibaba Dragonwell`、`BiSheng`等发行版;|
+| Maven | 依赖管理工具,推荐使用 `3.6.3` 及以上版本  |
+| IDE   | 代码集成开发环境,推荐使用 `IDEA2024` 及以上版本,兼容 `Eclipse`、 `Spring Tool Suite` 等IDE工具 |
+
+## 二 关联项目
+> 为以下项目提供基础依赖
+
+| 项目            | 分支            | 说明         |
+|---------------|---------------|------------|
+| jnpf-workflow | v1.0.0-stable | 流程引擎后端项目源码 |
+
+## 三 选择是否加密
+
+> 是否加密将会影响 `jnpf-workflow` 项目的启动方式
+> 如果此项目选择加密 `jnpf-workflow` 项目也需要选择加密
+
+### 3.1 不使用加密
+
+在IDEA中, 展开右侧 `Maven` 中 `Profiles` 去除勾选 `encrypted` 选项, 再点击Maven `刷新` 图标刷新Maven
+
+### 3.2 使用加密
+
+在IDEA中, 展开右侧 `Maven` 中 `Profiles` 勾选 `encrypted` 选项, 再点击Maven `刷新` 图标刷新Maven
+
+#### 3.2.1 安装加密插件
+
+在IDEA中,双击右侧 `Maven` 中 `jnpf-workflow-core` > `clean` 将会自动安装加密打包插件
+
+## 四 使用方式
+
+### 4.1 本地安装
+
+在IDEA中,双击右侧 `Maven` 中 `jnpf-workflow-core` > `Lifecycle` > `install`,将`jnpf-workflow-core`包安装至本地
+
+### 4.2 发布到私服
+
+在IDEA中,双击右侧 `Maven` 中 `jnpf-workflow-core` > `Lifecycle` > `deploy` 发布至私服。

BIN
allatori/allatori.jar


+ 31 - 0
allatori/allatori.xml

@@ -0,0 +1,31 @@
+<config>
+    <input>
+        <jar in="${project.build.finalName}.jar" out="${project.build.finalName}.jar"/>
+    </input>
+
+    <keep-names>
+        <class access="protected+">
+            <field access="protected+"/>
+            <method access="protected+"/>
+        </class>
+    </keep-names>
+
+
+    <!-- 混淆设置 -->
+    <!--随机类名保持小写-->
+    <property name="classes-naming" value="abc"/>
+    <!-- 接口形参名保持不变 -->
+    <property name="local-variables-naming" value="keep-parameters"/>
+    <!-- 字符串加密 -->
+    <property name="string-encryption" value="maximum"/>
+    <property name="string-encryption-type" value="strong"/>
+    <property name="string-encryption-version" value="v4"/>
+    <!-- 行数混淆 -->
+    <property name="line-numbers" value="obfuscate"/>
+    <!-- 成员重新排序 -->
+    <property name="member-reorder" value="enable"/>
+    <!-- 数据jar压缩等级 -->
+    <property name="output-jar-compression-level" value="9"/>
+
+    <property name="log-file" value="log.xml"/>
+</config>

BIN
allatori/class-winter-core-enhance-2.9.4.jar


BIN
allatori/class-winter-maven-plugin-enhance-2.9.4.jar


+ 37 - 0
allatori/class-winter-maven-plugin-pom.xml

@@ -0,0 +1,37 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+
+    <groupId>com.idea-aedi</groupId>
+    <artifactId>class-winter-maven-plugin</artifactId>
+    <version>enhance-2.9.4</version>
+
+    <dependencies>
+        <dependency>
+            <groupId>com.idea-aedi</groupId>
+            <artifactId>class-winter-core</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+
+        <dependency>
+            <groupId>org.apache.maven</groupId>
+            <artifactId>maven-plugin-api</artifactId>
+            <version>3.8.1</version>
+        </dependency>
+
+        <dependency>
+            <groupId>org.apache.maven.plugin-tools</groupId>
+            <artifactId>maven-plugin-annotations</artifactId>
+            <version>3.6.1</version>
+            <scope>provided</scope>
+        </dependency>
+
+        <dependency>
+            <groupId>org.apache.maven</groupId>
+            <artifactId>maven-project</artifactId>
+            <version>2.2.1</version>
+        </dependency>
+    </dependencies>
+</project>

+ 32 - 0
jnpf-workflow-activiti/pom.xml

@@ -0,0 +1,32 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+    <parent>
+        <groupId>com.jnpf</groupId>
+        <artifactId>jnpf-workflow-core</artifactId>
+        <version>1.0.0-RELEASE</version>
+    </parent>
+
+    <artifactId>jnpf-workflow-activiti</artifactId>
+
+    <dependencies>
+        <dependency>
+            <groupId>org.activiti</groupId>
+            <artifactId>activiti-spring-boot-starter</artifactId>
+            <version>${activiti.version}</version>
+            <exclusions>
+                <exclusion>
+                    <artifactId>mybatis</artifactId>
+                    <groupId>org.mybatis</groupId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>com.jnpf</groupId>
+            <artifactId>jnpf-workflow-common</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+    </dependencies>
+</project>

+ 139 - 0
jnpf-workflow-activiti/src/main/java/jnpf/workflow/activiti/cmd/JumpCmd.java

@@ -0,0 +1,139 @@
+package jnpf.workflow.activiti.cmd;
+
+import cn.hutool.core.collection.CollectionUtil;
+import org.activiti.bpmn.model.BpmnModel;
+import org.activiti.bpmn.model.UserTask;
+import org.activiti.engine.RuntimeService;
+import org.activiti.engine.history.HistoricActivityInstance;
+import org.activiti.engine.history.HistoricTaskInstance;
+import org.activiti.engine.impl.HistoricActivityInstanceQueryImpl;
+import org.activiti.engine.impl.HistoricTaskInstanceQueryImpl;
+import org.activiti.engine.impl.Page;
+import org.activiti.engine.impl.interceptor.Command;
+import org.activiti.engine.impl.interceptor.CommandContext;
+import org.activiti.engine.impl.persistence.entity.*;
+import org.activiti.engine.runtime.Execution;
+
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+
+/**
+ * 跳转命令类
+ *
+ * @author yanghuixing
+ * @author YMPaaS Cloud@YinMai Info. Co., Ltd
+ * @version 0.2.0
+ * @since 2023/8/22 17:56
+ */
+public class JumpCmd implements Command<Void> {
+    private final String processInstanceId;
+
+    private List<String> sourceTaskDefIdList;
+    private List<String> targetFlowNodeIdList;
+
+    private String deleteReason;
+
+    private final BpmnModel bpmnModel;
+    private final RuntimeService runtimeService;
+
+    /**
+     * 保存撤回节点的变量map
+     */
+    private Map<String, List<VariableInstanceEntity>> varMap = new ConcurrentHashMap<>();
+
+    public JumpCmd(String processInstanceId, List<String> sourceTaskDefIdList, List<String> targetFlowNodeIdList,
+                   String deleteReason, BpmnModel bpmnModel, RuntimeService runtimeService) {
+        this.processInstanceId = processInstanceId;
+        this.sourceTaskDefIdList = sourceTaskDefIdList;
+        this.deleteReason = deleteReason;
+        this.targetFlowNodeIdList = targetFlowNodeIdList;
+        this.bpmnModel = bpmnModel;
+        this.runtimeService = runtimeService;
+    }
+
+    @Override
+    public Void execute(CommandContext commandContext) {
+        ExecutionEntityManager executionEntityManager = commandContext.getExecutionEntityManager();
+        // 处理act_ru_execution
+        handleExecution(commandContext);
+        // 处理act_hi_actinst
+        handleActInst(commandContext);
+
+        targetFlowNodeIdList.forEach(targetId -> {
+            UserTask userTask = (UserTask) bpmnModel.getFlowElement(targetId);
+            // 创建子执行流,开启任务
+            ExecutionEntity processExecution = executionEntityManager.findById(processInstanceId);
+            ExecutionEntity childExecution = executionEntityManager.createChildExecution(processExecution);
+            childExecution.setCurrentFlowElement(userTask);
+
+            // 设置执行变量
+            VariableInstanceEntityManager variableManage = commandContext.getVariableInstanceEntityManager();
+            List<VariableInstanceEntity> variableInstanceEntities = varMap.get(userTask.getId());
+            if (CollectionUtil.isNotEmpty(variableInstanceEntities)) {
+                variableInstanceEntities.forEach(var -> {
+                    var.setExecutionId(childExecution.getId());
+                    variableManage.insert(var);
+                });
+            }
+            executionEntityManager.insert(childExecution);
+            // 交给引擎流转
+            commandContext.getAgenda().planContinueProcessOperation(childExecution);
+        });
+        return null;
+    }
+
+    private void handleActInst(CommandContext commandContext) {
+        for (String str : sourceTaskDefIdList) {
+            HistoricActivityInstanceQueryImpl query =
+                    new HistoricActivityInstanceQueryImpl().activityId(str).processInstanceId(processInstanceId).unfinished();
+            List<HistoricActivityInstance> activityInstances = commandContext.getHistoricActivityInstanceEntityManager()
+                    .findHistoricActivityInstancesByQueryCriteria(query, new Page(0, Integer.MAX_VALUE));
+            for (HistoricActivityInstance activity : activityInstances) {
+                HistoricActivityInstanceEntity activityEntity = (HistoricActivityInstanceEntity) activity;
+                // 修改act_hi_actinst表
+                activityEntity.setDeleted(true);
+                activityEntity.setDeleteReason(deleteReason);
+                commandContext.getHistoricActivityInstanceEntityManager().update(activityEntity);
+            }
+        }
+    }
+
+    private void handleExecution(CommandContext commandContext) {
+        ExecutionEntityManager executionEntityManager = commandContext.getExecutionEntityManager();
+        HistoricTaskInstanceEntityManager historicTaskManager = commandContext.getHistoricTaskInstanceEntityManager();
+        VariableInstanceEntityManager variableManager = commandContext.getVariableInstanceEntityManager();
+        for (String str : sourceTaskDefIdList) {
+            List<Execution> executionEntities = runtimeService.createExecutionQuery().processInstanceId(processInstanceId).activityId(str).list();
+            for (Execution parentExecution : executionEntities) {
+                //关闭未完成的任务执行流
+                // 获取子级Executions,如子流程节点等需要处理
+                List<ExecutionEntity> childExecutions =
+                        executionEntityManager.findChildExecutionsByParentExecutionId(parentExecution.getId());
+                for (ExecutionEntity childExecution : childExecutions) {
+                    //因为外键约束,首先要删除variable表中的execution相关数据
+                    List<VariableInstanceEntity> variableInstances = variableManager.findVariableInstancesByExecutionId(childExecution.getId());
+                    varMap.put(parentExecution.getActivityId(), variableInstances);
+                    variableInstances.forEach(variableManager::delete);
+                    executionEntityManager.deleteExecutionAndRelatedData(childExecution, deleteReason, false);
+                    // 修改历史实例
+                    HistoricTaskInstanceQueryImpl query = new HistoricTaskInstanceQueryImpl().executionId(childExecution.getId()).processInstanceId(processInstanceId);
+                    List<HistoricTaskInstance> HistoricTaskInstances = historicTaskManager.findHistoricTaskInstancesByQueryCriteria(query);
+                    if (CollectionUtil.isNotEmpty(HistoricTaskInstances)) {
+                        for (HistoricTaskInstance HistoricTaskInstance : HistoricTaskInstances) {
+                            HistoricTaskInstanceEntity entity = (HistoricTaskInstanceEntity) HistoricTaskInstance;
+                            entity.setDeleteReason(deleteReason);
+                            commandContext.getHistoricTaskInstanceEntityManager().update(entity);
+                        }
+                    }
+                }
+                //父执行流关闭
+                List<VariableInstanceEntity> variableInstances = variableManager.findVariableInstancesByExecutionId(parentExecution.getId());
+                varMap.put(parentExecution.getActivityId(), variableInstances);
+                variableInstances.forEach(variableManager::delete);
+                ExecutionEntity parentExecution1 = (ExecutionEntity) parentExecution;
+                executionEntityManager.deleteExecutionAndRelatedData(parentExecution1, deleteReason, false);
+            }
+        }
+    }
+}

+ 285 - 0
jnpf-workflow-activiti/src/main/java/jnpf/workflow/activiti/util/ActivitiUtil.java

@@ -0,0 +1,285 @@
+package jnpf.workflow.activiti.util;
+
+import org.activiti.bpmn.model.*;
+
+import java.util.*;
+
+/**
+ * activiti工具类
+ *
+ * @author yanghuixing
+ * @author YMPaaS Cloud@YinMai Info. Co., Ltd
+ * @version 0.2.0
+ * @since 2023/8/22 14:12
+ */
+public class ActivitiUtil {
+    /**
+     * 获取全部节点元素
+     *
+     * @param flowElements {@link Collection<FlowElement>}
+     * @param allElements  {@link Collection<FlowElement>}
+     * @return {@link Collection<FlowElement>}
+     * @author yanghuixing
+     * @since 2023/8/22 16:08
+     **/
+    public static Collection<FlowElement> getAllElements(Collection<FlowElement> flowElements, Collection<FlowElement> allElements) {
+        allElements = allElements == null ? new ArrayList<>() : allElements;
+        for (FlowElement flowElement : flowElements) {
+            allElements.add(flowElement);
+            if (flowElement instanceof SubProcess) {
+                // 获取子流程元素
+                allElements = getAllElements(((SubProcess) flowElement).getFlowElements(), allElements);
+            }
+        }
+        return allElements;
+    }
+
+    /**
+     * 获取节点的入口连线
+     *
+     * @param element {@link FlowElement}
+     * @return {@link List<SequenceFlow>}
+     * @author yanghuixing
+     * @since 2023/8/22 16:09
+     **/
+    public static List<SequenceFlow> getElementIncomingFlows(FlowElement element) {
+        List<SequenceFlow> sequenceFlows = null;
+        if (element instanceof FlowNode) {
+            sequenceFlows = ((FlowNode) element).getIncomingFlows();
+        }
+        return sequenceFlows;
+    }
+
+    /**
+     * 获取节点的入口连线
+     *
+     * @param element {@link FlowElement}
+     * @return {@link List<SequenceFlow>}
+     * @author yanghuixing
+     * @since 2023/8/22 16:10
+     **/
+    public static List<SequenceFlow> getElementOutgoingFlows(FlowElement element) {
+        List<SequenceFlow> sequenceFlows = null;
+        if (element instanceof FlowNode) {
+            sequenceFlows = ((FlowNode) element).getOutgoingFlows();
+        }
+        return sequenceFlows;
+    }
+
+    /**
+     * 获取可回退的节点(用户任务、子流程)
+     *
+     * @param source    {@link FlowElement}
+     * @param passFlows {@link Set<String>}
+     * @param passActs  {@link List<Activity>}
+     * @return {@link List<Activity>}
+     * @author yanghuixing
+     * @since 2023/8/22 16:11
+     **/
+    public static List<Activity> getPassActs(FlowElement source, Set<String> passFlows, List<Activity> passActs) {
+        passFlows = passFlows == null ? new HashSet<>() : passFlows;
+        passActs = passActs == null ? new ArrayList<>() : passActs;
+
+        List<SequenceFlow> sequenceFlows = getElementIncomingFlows(source);
+        if (null != sequenceFlows && sequenceFlows.size() > 0) {
+            for (SequenceFlow sequenceFlow : sequenceFlows) {
+                // 连线重复
+                if (passFlows.contains(sequenceFlow.getId())) {
+                    continue;
+                }
+                // 添加经过的连线
+                passFlows.add(sequenceFlow.getId());
+                // 添加经过的用户任务、子流程
+                if (sequenceFlow.getSourceFlowElement() instanceof UserTask) {
+                    passActs.add((UserTask) sequenceFlow.getSourceFlowElement());
+                }
+                if (sequenceFlow.getSourceFlowElement() instanceof SubProcess) {
+                    passActs.add((SubProcess) sequenceFlow.getSourceFlowElement());
+                }
+                // 迭代
+                getPassActs(sequenceFlow.getSourceFlowElement(), passFlows, passActs);
+            }
+        }
+        return passActs;
+    }
+
+    /**
+     * 获取上一级节点(用户任务、子流程)
+     *
+     * @param source     {@link FlowElement}
+     * @param passFlows  {@link Set<String>}
+     * @param parentActs {@link List<Activity>}
+     * @return {@link List<Activity>}
+     * @author yanghuixing
+     * @since 2023/8/22 16:13
+     **/
+    public static List<Activity> getParentActs(FlowElement source, Set<String> passFlows, List<Activity> parentActs) {
+        passFlows = passFlows == null ? new HashSet<>() : passFlows;
+        parentActs = parentActs == null ? new ArrayList<>() : parentActs;
+
+        List<SequenceFlow> sequenceFlows = getElementIncomingFlows(source);
+        if (null != sequenceFlows && sequenceFlows.size() > 0) {
+            for (SequenceFlow sequenceFlow : sequenceFlows) {
+                // 连线重复
+                if (passFlows.contains(sequenceFlow.getId())) {
+                    continue;
+                }
+                // 添加经过的连线
+                passFlows.add(sequenceFlow.getId());
+                // 添加用户任务、子流程
+                if (sequenceFlow.getSourceFlowElement() instanceof UserTask) {
+                    parentActs.add((UserTask) sequenceFlow.getSourceFlowElement());
+                    continue;
+                }
+                if (sequenceFlow.getSourceFlowElement() instanceof SubProcess) {
+                    parentActs.add((SubProcess) sequenceFlow.getSourceFlowElement());
+                    continue;
+                }
+                // 迭代
+                getParentActs(sequenceFlow.getSourceFlowElement(), passFlows, parentActs);
+            }
+        }
+        return parentActs;
+    }
+
+    /**
+     * 获取需要撤回的节点
+     *
+     * @param source         {@link FlowElement}
+     * @param runTaskKeyList {@link List<String>}
+     * @param passFlows      {@link Set<String>}
+     * @param userTasks      {@link List<UserTask>}
+     * @return {@link List<UserTask>}
+     * @author yanghuixing
+     * @since 2023/8/22 16:13
+     **/
+    public static List<UserTask> getChildUserTasks(FlowElement source, List<String> runTaskKeyList, Set<String> passFlows, List<UserTask> userTasks) {
+        passFlows = passFlows == null ? new HashSet<>() : passFlows;
+        userTasks = userTasks == null ? new ArrayList<>() : userTasks;
+        List<SequenceFlow> sequenceFlows = getElementOutgoingFlows(source);
+        if (null != sequenceFlows && sequenceFlows.size() > 0) {
+            for (SequenceFlow sequenceFlow : sequenceFlows) {
+                // 连线重复
+                if (passFlows.contains(sequenceFlow.getId())) {
+                    continue;
+                }
+                // 添加经过的连线
+                passFlows.add(sequenceFlow.getId());
+                // 用户任务
+                if (sequenceFlow.getTargetFlowElement() instanceof UserTask
+                        && runTaskKeyList.contains(sequenceFlow.getTargetFlowElement().getId())) {
+                    userTasks.add((UserTask) sequenceFlow.getTargetFlowElement());
+                    continue;
+                }
+                // 子流程,从第一个节点开始获取
+                if (sequenceFlow.getTargetFlowElement() instanceof SubProcess) {
+                    FlowElement flowElement = (FlowElement) ((SubProcess) sequenceFlow.getTargetFlowElement()).getFlowElements().toArray()[0];
+                    List<UserTask> tasks = getChildUserTasks(flowElement, runTaskKeyList, passFlows, null);
+                    // 找到用户任务,不继续向下找
+                    if (tasks.size() > 0) {
+                        userTasks.addAll(tasks);
+                        continue;
+                    }
+                }
+                // 迭代
+                getChildUserTasks(sequenceFlow.getTargetFlowElement(), runTaskKeyList, passFlows, userTasks);
+            }
+        }
+        return userTasks;
+    }
+
+    /**
+     * 获取下一级的用户任务
+     *
+     * @param source          {@link FlowElement}
+     * @param hasSequenceFlow {@link Set<String>}
+     * @param userTaskList    {@link List<UserTask>}
+     * @return {@link List<UserTask>}
+     * @author yanghuixing
+     * @since 2023/8/22 16:15
+     **/
+    public static List<UserTask> getNextUserTasks(FlowElement source, Set<String> hasSequenceFlow, List<UserTask> userTaskList) {
+        hasSequenceFlow = Optional.ofNullable(hasSequenceFlow).orElse(new HashSet<>());
+        userTaskList = Optional.ofNullable(userTaskList).orElse(new ArrayList<>());
+        // 获取出口连线
+        List<SequenceFlow> sequenceFlows = getElementOutgoingFlows(source);
+        if (null != sequenceFlows) {
+            for (SequenceFlow sequenceFlow : sequenceFlows) {
+                // 如果发现连线重复,说明循环了,跳过这个循环
+                if (hasSequenceFlow.contains(sequenceFlow.getId())) {
+                    continue;
+                }
+                // 添加已经走过的连线
+                hasSequenceFlow.add(sequenceFlow.getId());
+                FlowElement targetFlowElement = sequenceFlow.getTargetFlowElement();
+                if (targetFlowElement instanceof UserTask) {
+                    // 若节点为用户任务,加入到结果列表中
+                    userTaskList.add((UserTask) targetFlowElement);
+                } else {
+                    // 若节点非用户任务,继续递归查找下一个节点
+                    getNextUserTasks(targetFlowElement, hasSequenceFlow, userTaskList);
+                }
+            }
+        }
+        return userTaskList;
+    }
+
+    /**
+     * 判断某个节点的出口是否是网关,获取网关的出口连线
+     *
+     * @param source {@link FlowElement}
+     * @return {@link List<SequenceFlow>}
+     * @author yanghuixing
+     * @since 2023/8/22 16:15
+     **/
+    public static List<SequenceFlow> getOutFlowsOfGateway(FlowElement source) {
+        List<SequenceFlow> flows = new ArrayList<>();
+        // 获取出口连线
+        List<SequenceFlow> sequenceFlows = getElementOutgoingFlows(source);
+        if (null != sequenceFlows) {
+            for (SequenceFlow sequenceFlow : sequenceFlows) {
+                FlowElement targetFlowElement = sequenceFlow.getTargetFlowElement();
+                if (targetFlowElement instanceof Gateway) {
+                    List<SequenceFlow> outgoingFlows = ((Gateway) targetFlowElement).getOutgoingFlows();
+                    flows.addAll(outgoingFlows);
+                }
+            }
+            return flows;
+        }
+        return null;
+    }
+
+    /**
+     * 获取之前的节点
+     *
+     * @param source    {@link FlowElement}
+     * @param passFlows {@link Set<String>}
+     * @param keys      {@link List<String>}
+     * @return {@link List<String>}
+     * @author yanghuixing
+     * @since 2023/8/22 16:16
+     **/
+    public static List<String> getBefore(FlowElement source, Set<String> passFlows, List<String> keys) {
+        passFlows = passFlows == null ? new HashSet<>() : passFlows;
+        keys = keys == null ? new ArrayList<>() : keys;
+        List<SequenceFlow> sequenceFlows = getElementIncomingFlows(source);
+        if (null != sequenceFlows && sequenceFlows.size() > 0) {
+            for (SequenceFlow sequenceFlow : sequenceFlows) {
+                // 连线重复
+                if (passFlows.contains(sequenceFlow.getId())) {
+                    continue;
+                }
+                // 添加经过的连线
+                passFlows.add(sequenceFlow.getId());
+                // 添加节点Key
+                keys.add(sequenceFlow.getSourceFlowElement().getId());
+                if (sequenceFlow.getSourceFlowElement() instanceof StartEvent) {
+                    continue;
+                }
+                // 迭代
+                getBefore(sequenceFlow.getSourceFlowElement(), passFlows, keys);
+            }
+        }
+        return keys;
+    }
+}

+ 187 - 0
jnpf-workflow-common/pom.xml

@@ -0,0 +1,187 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+
+    <artifactId>jnpf-workflow-common</artifactId>
+
+    <parent>
+        <groupId>com.jnpf</groupId>
+        <artifactId>jnpf-workflow-core</artifactId>
+        <version>1.0.0-RELEASE</version>
+    </parent>
+
+    <dependencies>
+        <!-- lombok 工具类 -->
+        <dependency>
+            <groupId>org.projectlombok</groupId>
+            <artifactId>lombok</artifactId>
+        </dependency>
+        <!-- hutool 工具类 -->
+        <dependency>
+            <groupId>cn.hutool</groupId>
+            <artifactId>hutool-all</artifactId>
+            <version>${hutool.version}</version>
+        </dependency>
+        <!--<dependency>
+            <groupId>org.springdoc</groupId>
+            <artifactId>springdoc-openapi-starter-webmvc-ui</artifactId>
+            <version>${springdoc.openapi.version}</version>
+        </dependency>-->
+
+    </dependencies>
+
+
+
+    <profiles>
+        <profile>
+            <!--JAR包加密-->
+            <id>encrypted</id>
+            <build>
+                <plugins>
+                    <!--安装加密打包插件-->
+                    <plugin>
+                        <groupId>org.apache.maven.plugins</groupId>
+                        <artifactId>maven-install-plugin</artifactId>
+                        <executions>
+                            <execution>
+                                <id>install-core</id>
+                                <phase>clean</phase>
+                                <configuration>
+                                    <file>${project.basedir}/../allatori/class-winter-core-${class-winter-maven-plugin.version}.jar</file>
+                                    <repositoryLayout>default</repositoryLayout>
+                                    <groupId>com.idea-aedi</groupId>
+                                    <artifactId>class-winter-core</artifactId>
+                                    <version>enhance-2.9.4</version>
+                                    <packaging>jar</packaging>
+                                    <generatePom>true</generatePom>
+                                </configuration>
+                                <goals>
+                                    <goal>install-file</goal>
+                                </goals>
+                            </execution>
+                            <execution>
+                                <id>install-maven</id>
+                                <phase>clean</phase>
+                                <configuration>
+                                    <file>${project.basedir}/../allatori/class-winter-maven-plugin-${class-winter-maven-plugin.version}.jar</file>
+                                    <repositoryLayout>default</repositoryLayout>
+                                    <groupId>com.idea-aedi</groupId>
+                                    <artifactId>class-winter-maven-plugin</artifactId>
+                                    <version>${class-winter-maven-plugin.version}</version>
+                                    <packaging>jar</packaging>
+                                    <generatePom>false</generatePom>
+                                    <pomFile>${project.basedir}/../allatori/class-winter-maven-plugin-pom.xml</pomFile>
+                                </configuration>
+                                <goals>
+                                    <goal>install-file</goal>
+                                </goals>
+                            </execution>
+                        </executions>
+                    </plugin>
+                    <!--复制allatori配置重写配置中的包路径-->
+                    <plugin>
+                        <groupId>org.apache.maven.plugins</groupId>
+                        <artifactId>maven-resources-plugin</artifactId>
+                        <executions>
+                            <execution>
+                                <id>copy-and-filter-allatori-config</id>
+                                <phase>package</phase>
+                                <goals>
+                                    <goal>copy-resources</goal>
+                                </goals>
+                                <configuration>
+                                    <outputDirectory>${project.build.directory}</outputDirectory>
+                                    <resources>
+                                        <resource>
+                                            <directory>${project.basedir}/../allatori</directory>
+                                            <includes>
+                                                <include>allatori.xml</include>
+                                            </includes>
+                                            <filtering>true</filtering>
+                                        </resource>
+                                    </resources>
+                                </configuration>
+                            </execution>
+                        </executions>
+                    </plugin>
+                    <!--Jar混淆-->
+                    <plugin>
+                        <groupId>org.codehaus.mojo</groupId>
+                        <artifactId>exec-maven-plugin</artifactId>
+                        <executions>
+                            <execution>
+                                <id>run-allatori</id>
+                                <phase>package</phase>
+                                <goals>
+                                    <goal>exec</goal>
+                                </goals>
+                            </execution>
+                        </executions>
+                        <configuration>
+                            <executable>java</executable>
+                            <arguments>
+                                <argument>-Xms128m</argument>
+                                <argument>-Xmx512m</argument>
+                                <argument>-jar</argument>
+                                <!-- Copy allatori.jar to 'allatori' directory to use the commented line -->
+                                <argument>${project.basedir}/../allatori/allatori.jar</argument>
+                                <!-- <argument>${basedir}/allatori/allatori.jar</argument> -->
+                                <argument>${project.build.directory}/allatori.xml</argument>
+                            </arguments>
+                        </configuration>
+                    </plugin>
+                    <!--Jar加密-->
+                    <plugin>
+                        <groupId>com.idea-aedi</groupId>
+                        <artifactId>class-winter-maven-plugin</artifactId>
+                        <!-- 相关配置 -->
+                        <configuration>
+                            <!--需要处理的包名-->
+                            <includePrefix>jnpf.workflow</includePrefix>
+                            <!--直接覆盖原文件-->
+                            <finalName>${build.finalName}</finalName>
+                            <!--检查启动参数-->
+                            <jvmArgCheck>-XX:+DisableAttachMechanism</jvmArgCheck>
+                        </configuration>
+                        <executions>
+                            <execution>
+                                <phase>package</phase>
+                                <goals>
+                                    <goal>class-winter</goal>
+                                </goals>
+                            </execution>
+                        </executions>
+                    </plugin>
+                </plugins>
+            </build>
+        </profile>
+        <profile>
+            <id>boot3</id>
+            <activation>
+                <jdk>[17,)</jdk>
+            </activation>
+            <dependencies>
+                <dependency>
+                    <groupId>com.github.xiaoymin</groupId>
+                    <artifactId>knife4j-openapi3-jakarta-spring-boot-starter</artifactId>
+                </dependency>
+            </dependencies>
+        </profile>
+        <profile>
+            <id>boot2</id>
+            <activation>
+                <jdk>(,17)</jdk>
+            </activation>
+            <dependencies>
+                <!--引入Knife4j的官方start包,该指南选择Spring Boot版本<3.0,开发者需要注意-->
+                <dependency>
+                    <groupId>com.github.xiaoymin</groupId>
+                    <artifactId>knife4j-openapi3-spring-boot-starter</artifactId>
+                </dependency>
+            </dependencies>
+        </profile>
+    </profiles>
+
+</project>

+ 35 - 0
jnpf-workflow-common/src/main/java/jnpf/workflow/common/exception/BizException.java

@@ -0,0 +1,35 @@
+package jnpf.workflow.common.exception;
+
+import lombok.Getter;
+
+/**
+ * 业务异常类
+ *
+ * @author JNPF Flowable@YinMai Info. Co., Ltd
+ * @version 1.0.0
+ * @since 2024/4/3 15:31
+ */
+@Getter
+public class BizException extends RuntimeException {
+    public ResultCode resultCode;
+
+    public BizException(ResultCode errorCode) {
+        super(errorCode.getMsg());
+        this.resultCode = errorCode;
+    }
+
+    public BizException(String message) {
+        super(message);
+        this.resultCode = ResultCode.SYSTEM_EXECUTION_ERROR;
+    }
+
+    public BizException(String message, Throwable cause) {
+        super(message, cause);
+        this.resultCode = ResultCode.SYSTEM_EXECUTION_ERROR;
+    }
+
+    public BizException(Throwable cause) {
+        super(cause);
+        this.resultCode = ResultCode.SYSTEM_EXECUTION_ERROR;
+    }
+}

+ 48 - 0
jnpf-workflow-common/src/main/java/jnpf/workflow/common/exception/ResultCode.java

@@ -0,0 +1,48 @@
+package jnpf.workflow.common.exception;
+
+import lombok.AllArgsConstructor;
+import lombok.Getter;
+import lombok.NoArgsConstructor;
+
+import java.io.Serializable;
+
+/**
+ * 统一返回结果枚举类
+ *
+ * @author JNPF Flowable@YinMai Info. Co., Ltd
+ * @version 1.0.0
+ * @since 2024/4/3 15:08
+ */
+@AllArgsConstructor
+@NoArgsConstructor
+@Getter
+public enum ResultCode implements Serializable {
+
+    SUCCESS("200", "请求成功"),
+    FAILURE("999", "请求失败"),
+
+    DELETE_SUCCESS("1101", "删除成功"),
+    DELETE_FAILURE("1102", "删除失败"),
+    DEPLOY_FAILURE("1103", "部署失败"),
+    START_FAILURE("1104", "启动失败"),
+    COMPLETE_SUCCESS("1105", "任务完成成功"),
+    COMPLETE_FAILURE("1106", "任务完成失败"),
+    RETRACT_SUCCESS("1107", "撤回成功"),
+    RETRACT_FAILURE("1108", "撤回失败"),
+    JUMP_SUCCESS("1109", "跳转成功"),
+    JUMP_FAILURE("1110", "跳转失败"),
+
+    DEPLOY_ERROR("9001", "部署错误,请检查XML格式、内容等是否有误"),
+    DEFINITION_NOT_EXIST("9002", "找不到流程模板,请重新发布该流程"),
+    INSTANCE_NOT_EXIST("9003", "实例不存在"),
+    TASK_NOT_EXIST("9004", "任务不存在"),
+    TASK_COMPLETE_ERROR("9005", "任务完成错误"),
+    TASK_JUMP_ERROR("9006", "节点跳转错误"),
+
+    SYSTEM_EXECUTION_ERROR("9901", "系统执行出错"),
+    REQUEST_PARAM_IS_NULL("9701", "请求必填参数为空");
+
+    private String code;
+    private String msg;
+
+}

+ 24 - 0
jnpf-workflow-common/src/main/java/jnpf/workflow/common/model/fo/CompensateFo.java

@@ -0,0 +1,24 @@
+package jnpf.workflow.common.model.fo;
+
+import lombok.Data;
+
+import java.util.List;
+
+/**
+ * 类的描述
+ *
+ * @author JNPF@YinMai Info. Co., Ltd
+ * @version 5.0.x
+ * @since 2024/6/4 11:31
+ */
+@Data
+public class CompensateFo {
+    /**
+     * 实例主键
+     */
+    private String instanceId;
+    /**
+     * 原先的节点
+     */
+    private List<String> source;
+}

+ 29 - 0
jnpf-workflow-common/src/main/java/jnpf/workflow/common/model/fo/DefinitionDeleteFo.java

@@ -0,0 +1,29 @@
+package jnpf.workflow.common.model.fo;
+
+import io.swagger.v3.oas.annotations.media.Schema;
+import jakarta.validation.constraints.NotBlank;
+import lombok.Data;
+
+import java.io.Serializable;
+
+/**
+ * 流程定义删除参数类
+ *
+ * @author JNPF Flowable@YinMai Info. Co., Ltd
+ * @version 1.0.0
+ * @since 2024/4/7 11:37
+ */
+@Data
+public class DefinitionDeleteFo implements Serializable {
+    /**
+     * 引擎部署ID
+     */
+    @NotBlank(message = "引擎部署ID不能为空")
+    @Schema(name = "deploymentId", description = "引擎部署ID")
+    private String deploymentId;
+    /**
+     * 是否级联删除流程定义下的流程实例等
+     */
+    @Schema(name = "cascade", description = "是否级联删除流程定义下的流程实例等")
+    private Boolean cascade;
+}

+ 34 - 0
jnpf-workflow-common/src/main/java/jnpf/workflow/common/model/fo/DefinitionDeployFo.java

@@ -0,0 +1,34 @@
+package jnpf.workflow.common.model.fo;
+
+import io.swagger.v3.oas.annotations.media.Schema;
+import jakarta.validation.constraints.NotBlank;
+import lombok.Data;
+
+import java.io.Serializable;
+
+/**
+ * 流程定义部署参数类
+ *
+ * @author JNPF Flowable@YinMai Info. Co., Ltd
+ * @version 1.0.0
+ * @since 2024/4/3 11:45
+ */
+@Data
+public class DefinitionDeployFo implements Serializable {
+    /**
+     * bpmn xml字符串
+     */
+    @NotBlank(message = "bpmn xml字符串不能为空")
+    @Schema(name = "bpmnXml", description = "bpmn xml字符串")
+    private String bpmnXml;
+    /**
+     * 业务名称
+     */
+    @Schema(name = "name", description = "业务名称")
+    private String name;
+    /**
+     * 业务Key
+     */
+    @Schema(name = "key", description = "业务Key")
+    private String key;
+}

+ 27 - 0
jnpf-workflow-common/src/main/java/jnpf/workflow/common/model/fo/FlowTargetTaskFo.java

@@ -0,0 +1,27 @@
+package jnpf.workflow.common.model.fo;
+
+import io.swagger.v3.oas.annotations.media.Schema;
+import lombok.Data;
+
+import java.io.Serializable;
+
+/**
+ * 连接线目标任务
+ *
+ * @author JNPF@YinMai Info. Co., Ltd
+ * @version 5.0.x
+ * @since 2024/4/17 17:36
+ */
+@Data
+public class FlowTargetTaskFo implements Serializable {
+    /**
+     * 部署ID
+     */
+    @Schema(name = "deploymentId", description = "部署ID")
+    private String deploymentId;
+    /**
+     * 线的Key
+     */
+    @Schema(name = "flowKey", description = "线的Key")
+    private String flowKey;
+}

+ 27 - 0
jnpf-workflow-common/src/main/java/jnpf/workflow/common/model/fo/InfoModel.java

@@ -0,0 +1,27 @@
+package jnpf.workflow.common.model.fo;
+
+import io.swagger.v3.oas.annotations.media.Schema;
+import lombok.Data;
+
+import java.io.Serializable;
+
+/**
+ * 类的描述
+ *
+ * @author JNPF@YinMai Info. Co., Ltd
+ * @version 5.0.x
+ * @since 2024/9/30 15:09
+ */
+@Data
+public class InfoModel implements Serializable {
+    /**
+     * 部署ID
+     */
+    @Schema(name = "deploymentId", description = "部署ID")
+    private String deploymentId;
+    /**
+     * 节点Key
+     */
+    @Schema(name = "key", description = "节点Key")
+    private String key;
+}

+ 29 - 0
jnpf-workflow-common/src/main/java/jnpf/workflow/common/model/fo/InstanceDeleteFo.java

@@ -0,0 +1,29 @@
+package jnpf.workflow.common.model.fo;
+
+import io.swagger.v3.oas.annotations.media.Schema;
+import jakarta.validation.constraints.NotBlank;
+import lombok.Data;
+
+import java.io.Serializable;
+
+/**
+ * 流程实例删除参数类
+ *
+ * @author JNPF Flowable@YinMai Info. Co., Ltd
+ * @version 1.0.0
+ * @since 2024/4/7 15:55
+ */
+@Data
+public class InstanceDeleteFo implements Serializable {
+    /**
+     * 实例ID
+     */
+    @NotBlank(message = "实例ID不能为空")
+    @Schema(name = "instanceId", description = "实例ID")
+    private String instanceId;
+    /**
+     * 删除原因
+     */
+    @Schema(name = "deleteReason", description = "删除原因")
+    private String deleteReason;
+}

+ 30 - 0
jnpf-workflow-common/src/main/java/jnpf/workflow/common/model/fo/InstanceStartFo.java

@@ -0,0 +1,30 @@
+package jnpf.workflow.common.model.fo;
+
+import io.swagger.v3.oas.annotations.media.Schema;
+import jakarta.validation.constraints.NotBlank;
+import lombok.Data;
+
+import java.io.Serializable;
+import java.util.Map;
+
+/**
+ * 流程实例启动参数类
+ *
+ * @author JNPF Flowable@YinMai Info. Co., Ltd
+ * @version 1.0.0
+ * @since 2024/4/7 15:14
+ */
+@Data
+public class InstanceStartFo implements Serializable {
+    /**
+     * 部署ID
+     */
+    @NotBlank(message = "部署ID不能为空")
+    @Schema(name = "deploymentId", description = "部署ID")
+    private String deploymentId;
+    /**
+     * 变量
+     */
+    @Schema(name = "variables", description = "变量")
+    private Map<String, Object> variables;
+}

+ 35 - 0
jnpf-workflow-common/src/main/java/jnpf/workflow/common/model/fo/JumpFo.java

@@ -0,0 +1,35 @@
+package jnpf.workflow.common.model.fo;
+
+import io.swagger.v3.oas.annotations.media.Schema;
+import jakarta.validation.constraints.NotBlank;
+import lombok.Data;
+
+import java.io.Serializable;
+import java.util.List;
+
+/**
+ * 跳转参数类
+ *
+ * @author JNPF Flowable@YinMai Info. Co., Ltd
+ * @version 1.0.0
+ * @since 2024/4/10 11:18
+ */
+@Data
+public class JumpFo implements Serializable {
+    /**
+     * 实例ID
+     */
+    @NotBlank(message = "实例ID不能为空")
+    @Schema(name = "instanceId", description = "实例ID")
+    private String instanceId;
+    /**
+     * 源节点集合
+     */
+    @Schema(name = "source", description = "源节点集合")
+    private List<String> source;
+    /**
+     * 目标节点集合
+     */
+    @Schema(name = "target", description = "目标节点集合")
+    private List<String> target;
+}

+ 38 - 0
jnpf-workflow-common/src/main/java/jnpf/workflow/common/model/fo/MoveMultiToSingleFo.java

@@ -0,0 +1,38 @@
+package jnpf.workflow.common.model.fo;
+
+import io.swagger.v3.oas.annotations.media.Schema;
+import jakarta.validation.constraints.NotBlank;
+import jakarta.validation.constraints.NotNull;
+import lombok.Data;
+
+import java.io.Serializable;
+import java.util.List;
+
+/**
+ * 多节点跳转单节点参数类
+ *
+ * @author JNPF Flowable@YinMai Info. Co., Ltd
+ * @version 1.0.0
+ * @since 2024/4/9 14:23
+ */
+@Data
+public class MoveMultiToSingleFo implements Serializable {
+    /**
+     * 实例ID
+     */
+    @NotBlank(message = "实例ID不能为空")
+    @Schema(name = "instanceId", description = "实例ID")
+    private String instanceId;
+    /**
+     * 当前节点集合
+     */
+    @NotNull(message = "当前节点集合不能为空")
+    @Schema(name = "sourceKeys", description = "当前节点集合")
+    private List<String> sourceKeys;
+    /**
+     * 目标节点
+     */
+    @NotBlank(message = "目标节点不能为空")
+    @Schema(name = "targetKey", description = "目标节点")
+    private String targetKey;
+}

+ 38 - 0
jnpf-workflow-common/src/main/java/jnpf/workflow/common/model/fo/MoveSingleToMultiFo.java

@@ -0,0 +1,38 @@
+package jnpf.workflow.common.model.fo;
+
+import io.swagger.v3.oas.annotations.media.Schema;
+import jakarta.validation.constraints.NotBlank;
+import jakarta.validation.constraints.NotNull;
+import lombok.Data;
+
+import java.io.Serializable;
+import java.util.List;
+
+/**
+ * 单节点跳转多节点参数类
+ *
+ * @author JNPF Flowable@YinMai Info. Co., Ltd
+ * @version 1.0.0
+ * @since 2024/4/8 16:36
+ */
+@Data
+public class MoveSingleToMultiFo implements Serializable {
+    /**
+     * 实例ID
+     */
+    @NotBlank(message = "实例ID不能为空")
+    @Schema(name = "instanceId", description = "实例ID")
+    private String instanceId;
+    /**
+     * 当前节点
+     */
+    @NotBlank(message = "当前节点不能为空")
+    @Schema(name = "sourceKey", description = "当前节点")
+    private String sourceKey;
+    /**
+     * 目标节点集合
+     */
+    @NotNull(message = "目标节点集合不能为空")
+    @Schema(name = "targetKeys", description = "目标节点集合")
+    private List<String> targetKeys;
+}

+ 29 - 0
jnpf-workflow-common/src/main/java/jnpf/workflow/common/model/fo/TaskAfterFo.java

@@ -0,0 +1,29 @@
+package jnpf.workflow.common.model.fo;
+
+import io.swagger.v3.oas.annotations.media.Schema;
+import lombok.Data;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * 类的描述
+ *
+ * @author JNPF@YinMai Info. Co., Ltd
+ * @version 5.0.x
+ * @since 2024/5/8 20:03
+ */
+@Data
+public class TaskAfterFo implements Serializable {
+    /**
+     * 部署ID
+     */
+    @Schema(name = "deploymentId", description = "部署ID")
+    private String deploymentId;
+    /**
+     * 节点Key
+     */
+    @Schema(name = "taskKeys", description = "节点Key")
+    private List<String> taskKeys = new ArrayList<>();
+}

+ 29 - 0
jnpf-workflow-common/src/main/java/jnpf/workflow/common/model/fo/TaskBackFo.java

@@ -0,0 +1,29 @@
+package jnpf.workflow.common.model.fo;
+
+import io.swagger.v3.oas.annotations.media.Schema;
+import jakarta.validation.constraints.NotBlank;
+import lombok.Data;
+
+import java.io.Serializable;
+
+/**
+ * 任务退回参数类
+ *
+ * @author JNPF Flowable@YinMai Info. Co., Ltd
+ * @version 1.0.0
+ * @since 2024/4/8 16:05
+ */
+@Data
+public class TaskBackFo implements Serializable {
+    /**
+     * 任务ID
+     */
+    @NotBlank(message = "任务ID不能为空")
+    @Schema(name = "taskId", description = "任务ID")
+    private String taskId;
+    /**
+     * 目标节点ID
+     */
+    @Schema(name = "targetKey", description = "目标节点ID")
+    private String targetKey;
+}

+ 30 - 0
jnpf-workflow-common/src/main/java/jnpf/workflow/common/model/fo/TaskCompleteFo.java

@@ -0,0 +1,30 @@
+package jnpf.workflow.common.model.fo;
+
+import io.swagger.v3.oas.annotations.media.Schema;
+import jakarta.validation.constraints.NotBlank;
+import lombok.Data;
+
+import java.io.Serializable;
+import java.util.Map;
+
+/**
+ * 任务完成参数类
+ *
+ * @author JNPF Flowable@YinMai Info. Co., Ltd
+ * @version 1.0.0
+ * @since 2024/4/8 13:57
+ */
+@Data
+public class TaskCompleteFo implements Serializable {
+    /**
+     * 任务ID
+     */
+    @NotBlank(message = "任务ID不能为空")
+    @Schema(name = "taskId", description = "任务ID")
+    private String taskId;
+    /**
+     * 变量
+     */
+    @Schema(name = "variables", description = "变量")
+    private Map<String, Object> variables;
+}

+ 32 - 0
jnpf-workflow-common/src/main/java/jnpf/workflow/common/model/fo/TaskNextFo.java

@@ -0,0 +1,32 @@
+package jnpf.workflow.common.model.fo;
+
+import io.swagger.v3.oas.annotations.media.Schema;
+import lombok.Data;
+
+import java.io.Serializable;
+
+/**
+ * 下一级任务参数类
+ *
+ * @author JNPF Flowable@YinMai Info. Co., Ltd
+ * @version 1.0.0
+ * @since 2024/4/8 17:03
+ */
+@Data
+public class TaskNextFo implements Serializable {
+    /**
+     * 部署ID
+     */
+    @Schema(name = "deploymentId", description = "部署ID")
+    private String deploymentId;
+    /**
+     * 节点Key
+     */
+    @Schema(name = "taskKey", description = "节点Key")
+    private String taskKey;
+    /**
+     * 任务ID
+     */
+    @Schema(name = "taskId", description = "任务ID")
+    private String taskId;
+}

+ 32 - 0
jnpf-workflow-common/src/main/java/jnpf/workflow/common/model/fo/TaskOutgoingFo.java

@@ -0,0 +1,32 @@
+package jnpf.workflow.common.model.fo;
+
+import io.swagger.v3.oas.annotations.media.Schema;
+import lombok.Data;
+
+import java.io.Serializable;
+
+/**
+ * 查询参数类
+ *
+ * @author JNPF Flowable@YinMai Info. Co., Ltd
+ * @version 1.0.0
+ * @since 2024/4/9 9:53
+ */
+@Data
+public class TaskOutgoingFo implements Serializable {
+    /**
+     * 部署ID
+     */
+    @Schema(name = "deploymentId", description = "部署ID")
+    private String deploymentId;
+    /**
+     * 节点Key
+     */
+    @Schema(name = "taskKey", description = "节点Key")
+    private String taskKey;
+    /**
+     * 任务ID
+     */
+    @Schema(name = "taskId", description = "任务ID")
+    private String taskId;
+}

+ 32 - 0
jnpf-workflow-common/src/main/java/jnpf/workflow/common/model/fo/TaskPrevFo.java

@@ -0,0 +1,32 @@
+package jnpf.workflow.common.model.fo;
+
+import io.swagger.v3.oas.annotations.media.Schema;
+import lombok.Data;
+
+import java.io.Serializable;
+
+/**
+ * 上一级任务参数类
+ *
+ * @author JNPF@YinMai Info. Co., Ltd
+ * @version 5.0.x
+ * @since 2024/4/23 10:38
+ */
+@Data
+public class TaskPrevFo implements Serializable {
+    /**
+     * 部署ID
+     */
+    @Schema(name = "deploymentId", description = "部署ID")
+    private String deploymentId;
+    /**
+     * 节点Key
+     */
+    @Schema(name = "taskKey", description = "节点Key")
+    private String taskKey;
+    /**
+     * 任务ID
+     */
+    @Schema(name = "taskId", description = "任务ID")
+    private String taskId;
+}

+ 42 - 0
jnpf-workflow-common/src/main/java/jnpf/workflow/common/model/vo/DefinitionVo.java

@@ -0,0 +1,42 @@
+package jnpf.workflow.common.model.vo;
+
+import io.swagger.v3.oas.annotations.media.Schema;
+import lombok.Data;
+
+import java.io.Serializable;
+
+/**
+ * 流程定义VO
+ *
+ * @author JNPF Flowable@YinMai Info. Co., Ltd
+ * @version 1.0.0
+ * @since 2024/4/3 14:26
+ */
+@Data
+public class DefinitionVo implements Serializable {
+    /**
+     * 定义ID
+     */
+    @Schema(name = "id", description = "定义ID")
+    String definitionId;
+    /**
+     * 定义名称
+     */
+    @Schema(name = "name", description = "定义名称")
+    String definitionName;
+    /**
+     * 定义Key
+     */
+    @Schema(name = "key", description = "定义Key")
+    String definitionKey;
+    /**
+     * 定义版本
+     */
+    @Schema(name = "version", description = "定义版本")
+    Integer definitionVersion;
+    /**
+     * 定义部署ID
+     */
+    @Schema(name = "deploymentId", description = "定义部署ID")
+    String deploymentId;
+}

+ 22 - 0
jnpf-workflow-common/src/main/java/jnpf/workflow/common/model/vo/DeploymentVo.java

@@ -0,0 +1,22 @@
+package jnpf.workflow.common.model.vo;
+
+import io.swagger.v3.oas.annotations.media.Schema;
+import lombok.Data;
+
+import java.io.Serializable;
+
+/**
+ * 定义部署Vo
+ *
+ * @author JNPF Flowable@YinMai Info. Co., Ltd
+ * @version 1.0.0
+ * @since 2024/4/7 9:35
+ */
+@Data
+public class DeploymentVo implements Serializable {
+    /**
+     * 部署ID
+     */
+    @Schema(name = "deploymentId", description = "部署ID")
+    String deploymentId;
+}

+ 34 - 0
jnpf-workflow-common/src/main/java/jnpf/workflow/common/model/vo/FlowElementVo.java

@@ -0,0 +1,34 @@
+package jnpf.workflow.common.model.vo;
+
+import lombok.Data;
+
+import java.util.List;
+
+/**
+ * 类的描述
+ *
+ * @author JNPF@YinMai Info. Co., Ltd
+ * @version 5.0.x
+ * @since 2024/6/11 10:33
+ */
+@Data
+public class FlowElementVo {
+    private String id;
+    private String name;
+    /**
+     * 线的源
+     */
+    private String sourceRef;
+    /**
+     * 线的目标
+     */
+    private String targetRef;
+    /**
+     * 节点进线
+     */
+    private List<String> incomingList;
+    /**
+     * 节点出线
+     */
+    private List<String> outgoingList;
+}

+ 19 - 0
jnpf-workflow-common/src/main/java/jnpf/workflow/common/model/vo/FlowVo.java

@@ -0,0 +1,19 @@
+package jnpf.workflow.common.model.vo;
+
+import lombok.Data;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * 类的描述
+ *
+ * @author JNPF@YinMai Info. Co., Ltd
+ * @version 5.0.x
+ * @since 2024/8/13 13:31
+ */
+@Data
+public class FlowVo {
+    private String key;
+    private List<FlowVo> children = new ArrayList<>();
+}

+ 46 - 0
jnpf-workflow-common/src/main/java/jnpf/workflow/common/model/vo/HistoricInstanceVo.java

@@ -0,0 +1,46 @@
+package jnpf.workflow.common.model.vo;
+
+import com.fasterxml.jackson.annotation.JsonFormat;
+import io.swagger.v3.oas.annotations.media.Schema;
+import lombok.Data;
+
+import java.io.Serializable;
+import java.time.LocalDateTime;
+
+/**
+ * 历史流程实例Vo
+ *
+ * @author JNPF Flowable@YinMai Info. Co., Ltd
+ * @version 1.0.0
+ * @since 2024/4/7 17:04
+ */
+@Data
+public class HistoricInstanceVo implements Serializable {
+    /**
+     * 实例ID
+     */
+    @Schema(name = "instanceId", description = "实例ID")
+    private String instanceId;
+    /**
+     * 开始时间
+     */
+    @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
+    @Schema(name = "startTime", description = "开始时间")
+    private LocalDateTime startTime;
+    /**
+     * 结束时间
+     */
+    @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
+    @Schema(name = "endTime", description = "结束时间")
+    private LocalDateTime endTime;
+    /**
+     * 耗时
+     */
+    @Schema(name = "durationInMillis", description = "耗时")
+    private Long durationInMillis;
+    /**
+     * 删除原因
+     */
+    @Schema(name = "deleteReason", description = "删除原因")
+    private String deleteReason;
+}

+ 28 - 0
jnpf-workflow-common/src/main/java/jnpf/workflow/common/model/vo/HistoricNodeVo.java

@@ -0,0 +1,28 @@
+package jnpf.workflow.common.model.vo;
+
+import lombok.Data;
+
+import java.io.Serializable;
+
+/**
+ * 类的描述
+ *
+ * @author JNPF@YinMai Info. Co., Ltd
+ * @version 5.0.x
+ * @since 2024/6/17 16:06
+ */
+@Data
+public class HistoricNodeVo implements Serializable {
+    /**
+     * 任务ID
+     */
+    private String taskId;
+    /**
+     * 节点编码
+     */
+    private String code;
+    /**
+     * 开始时间
+     */
+    private Long startTime;
+}

+ 22 - 0
jnpf-workflow-common/src/main/java/jnpf/workflow/common/model/vo/InstanceVo.java

@@ -0,0 +1,22 @@
+package jnpf.workflow.common.model.vo;
+
+import io.swagger.v3.oas.annotations.media.Schema;
+import lombok.Data;
+
+import java.io.Serializable;
+
+/**
+ * 流程实例Vo
+ *
+ * @author JNPF Flowable@YinMai Info. Co., Ltd
+ * @version 1.0.0
+ * @since 2024/4/7 14:42
+ */
+@Data
+public class InstanceVo implements Serializable {
+    /**
+     * 实例ID
+     */
+    @Schema(name = "instanceId", description = "实例ID")
+    private String instanceId;
+}

+ 38 - 0
jnpf-workflow-common/src/main/java/jnpf/workflow/common/model/vo/NodeElementVo.java

@@ -0,0 +1,38 @@
+package jnpf.workflow.common.model.vo;
+
+import io.swagger.v3.oas.annotations.media.Schema;
+import lombok.Data;
+
+import java.io.Serializable;
+import java.util.List;
+
+/**
+ * 元素Vo
+ *
+ * @author JNPF Flowable@YinMai Info. Co., Ltd
+ * @version 1.0.0
+ * @since 2024/4/8 17:48
+ */
+@Data
+public class NodeElementVo implements Serializable {
+    /**
+     * 元素ID
+     */
+    @Schema(name = "id", description = "元素ID")
+    private String id;
+    /**
+     * 元素名称
+     */
+    @Schema(name = "name", description = "元素名称")
+    private String name;
+    /**
+     * 进线ID
+     */
+    @Schema(name = "incoming", description = "进线ID")
+    private List<String> incomingList;
+    /**
+     * 出线ID
+     */
+    @Schema(name = "outgoingList", description = "出线ID")
+    private List<String> outgoingList;
+}

+ 37 - 0
jnpf-workflow-common/src/main/java/jnpf/workflow/common/model/vo/TaskVo.java

@@ -0,0 +1,37 @@
+package jnpf.workflow.common.model.vo;
+
+import io.swagger.v3.oas.annotations.media.Schema;
+import lombok.Data;
+
+import java.io.Serializable;
+
+/**
+ * 流程任务VO
+ *
+ * @author JNPF Flowable@YinMai Info. Co., Ltd
+ * @version 1.0.0
+ * @since 2024/4/8 11:22
+ */
+@Data
+public class TaskVo implements Serializable {
+    /**
+     * 任务ID
+     */
+    @Schema(name = "taskId", description = "任务ID")
+    private String taskId;
+    /**
+     * 任务名称
+     */
+    @Schema(name = "taskName", description = "任务名称")
+    private String taskName;
+    /**
+     * 任务Key
+     */
+    @Schema(name = "taskKey", description = "任务Key")
+    private String taskKey;
+    /**
+     * 实例ID
+     */
+    @Schema(name = "instanceId", description = "实例ID")
+    private String instanceId;
+}

+ 52 - 0
jnpf-workflow-common/src/main/java/jnpf/workflow/common/service/IDefinitionService.java

@@ -0,0 +1,52 @@
+package jnpf.workflow.common.service;
+
+
+import jnpf.workflow.common.model.fo.DefinitionDeleteFo;
+import jnpf.workflow.common.model.fo.DefinitionDeployFo;
+import jnpf.workflow.common.model.vo.DefinitionVo;
+import jnpf.workflow.common.model.vo.DeploymentVo;
+import jnpf.workflow.common.model.vo.FlowElementVo;
+
+import java.util.List;
+
+/**
+ * 流程定义服务接口
+ *
+ * @author JNPF Flowable@YinMai Info. Co., Ltd
+ * @version 1.0.0
+ * @since 2024/4/3 11:36
+ */
+public interface IDefinitionService {
+    /**
+     * 部署流程定义
+     *
+     * @param fo {@link DefinitionDeployFo}
+     * @return {@link DeploymentVo}
+     * @since 2024/4/7 10:51
+     **/
+    DeploymentVo deployDefinition(DefinitionDeployFo fo);
+
+    /**
+     * 列表查询流程定义
+     *
+     * @return {@link List<DefinitionVo>}
+     * @since 2024/4/7 11:23
+     **/
+    List<DefinitionVo> listDefinition();
+
+    /**
+     * 删除流程定义
+     *
+     * @param fo {@link DefinitionDeleteFo}
+     * @return {@link boolean}
+     * @since 2024/4/7 13:51
+     **/
+    boolean deleteDefinition(DefinitionDeleteFo fo);
+
+    /**
+     * 获取流程元素
+     *
+     * @param deploymentId 部署ID
+     */
+    List<FlowElementVo> getStructure(String deploymentId);
+}

+ 43 - 0
jnpf-workflow-common/src/main/java/jnpf/workflow/common/service/IInstanceService.java

@@ -0,0 +1,43 @@
+package jnpf.workflow.common.service;
+
+
+import jnpf.workflow.common.model.fo.InstanceDeleteFo;
+import jnpf.workflow.common.model.fo.InstanceStartFo;
+import jnpf.workflow.common.model.vo.HistoricInstanceVo;
+import jnpf.workflow.common.model.vo.InstanceVo;
+
+/**
+ * 流程实例服务接口
+ *
+ * @author JNPF Flowable@YinMai Info. Co., Ltd
+ * @version 1.0.0
+ * @since 2024/4/7 14:31
+ */
+public interface IInstanceService {
+    /**
+     * 根据ID启动实例
+     *
+     * @param fo {@link InstanceStartFo}
+     * @return {@link InstanceVo}
+     * @since 2024/4/7 15:44
+     **/
+    InstanceVo startById(InstanceStartFo fo);
+
+    /**
+     * 获取历史流程实例
+     *
+     * @param processInstanceId {@link String}
+     * @return {@link HistoricInstanceVo}
+     * @since 2024/4/7 17:30
+     **/
+    HistoricInstanceVo getHistoricProcessInstance(String processInstanceId);
+
+    /**
+     * 删除流程实例
+     *
+     * @param fo {@link InstanceDeleteFo}
+     * @return {@link boolean}
+     * @since 2024/4/7 16:07
+     **/
+    boolean deleteInstance(InstanceDeleteFo fo);
+}

+ 197 - 0
jnpf-workflow-common/src/main/java/jnpf/workflow/common/service/ITaskService.java

@@ -0,0 +1,197 @@
+package jnpf.workflow.common.service;
+
+
+import jnpf.workflow.common.model.fo.*;
+import jnpf.workflow.common.model.vo.FlowVo;
+import jnpf.workflow.common.model.vo.HistoricNodeVo;
+import jnpf.workflow.common.model.vo.NodeElementVo;
+import jnpf.workflow.common.model.vo.TaskVo;
+
+import java.util.List;
+
+/**
+ * 流程任务服务接口
+ *
+ * @author JNPF Flowable@YinMai Info. Co., Ltd
+ * @version 1.0.0
+ * @since 2024/4/8 11:11
+ */
+public interface ITaskService {
+
+    /**
+     * 根据流程实例ID获取任务
+     *
+     * @param instanceId {@link String}
+     * @return {@link List<TaskVo>}
+     * @since 2024/4/8 14:15
+     **/
+    List<TaskVo> getTask(String instanceId);
+
+    /**
+     * 完成任务
+     *
+     * @param fo {@link TaskCompleteFo}
+     * @return {@link boolean}
+     * @since 2024/4/8 14:21
+     **/
+    boolean complete(TaskCompleteFo fo);
+
+    /**
+     * 单节点跳转多节点
+     *
+     * @param fo {@link MoveSingleToMultiFo}
+     * @return {@link boolean}
+     * @since 2024/4/9 14:48
+     **/
+    boolean moveSingleToMulti(MoveSingleToMultiFo fo);
+
+    /**
+     * 多节点跳转单节点
+     *
+     * @param fo {@link MoveMultiToSingleFo}
+     * @return {@link boolean}
+     * @since 2024/4/9 14:48
+     **/
+    boolean moveMultiToSingle(MoveMultiToSingleFo fo);
+
+    /**
+     * 节点跳转
+     *
+     * @param fo {@link JumpFo}
+     * @return {@link boolean}
+     * @since 2024/4/10 11:35
+     **/
+    boolean jump(JumpFo fo);
+
+    /**
+     * 获取可回退的节点ID
+     *
+     * @param taskId {@link String}
+     * @return {@link List<String>}
+     * @since 2024/4/8 15:39
+     **/
+    List<String> getFallbacks(String taskId);
+
+    /**
+     * 回退目标节点
+     *
+     * @param fo {@link TaskBackFo}
+     * @return {@link List<String>}
+     * @since 2024/4/8 16:11
+     **/
+    List<String> back(TaskBackFo fo);
+
+    /**
+     * 获取上一级任务节点ID集合,用于自动处置的相邻选项
+     *
+     * @param fo {@link TaskPrevFo}
+     * @return {@link List<String>}
+     * @since 2024/4/8 16:30
+     **/
+    List<String> getPrevUserTask(TaskPrevFo fo);
+
+    /**
+     * 获取下一级任务节点集合
+     *
+     * @param fo {@link TaskNextFo}
+     * @return {@link List< NodeElementVo >}
+     * @since 2024/4/9 9:20
+     **/
+    List<NodeElementVo> getNextUserTask(TaskNextFo fo);
+
+    /**
+     * 获取线之后的任务节点
+     *
+     * @param fo {@link FlowTargetTaskFo}
+     * @return {@link String}
+     * @since 2024/4/17 17:45
+     **/
+    List<String> getTaskKeyAfterFlow(FlowTargetTaskFo fo);
+
+    /**
+     * 撤回
+     *
+     * @param taskId {@link String}
+     * @return {@link boolean}
+     * @since 2024/4/9 9:30
+     **/
+    boolean retract(String taskId);
+
+    /**
+     * 获取出线Key集合(若出线的出口为网关,则一并获取网关的出线)
+     *
+     * @param fo {@link TaskOutgoingFo}
+     * @return {@link List<String>}
+     * @since 2024/4/9 10:54
+     **/
+    List<String> getOutgoingFlows(TaskOutgoingFo fo);
+
+    /**
+     * 获取出线
+     *
+     * @param fo 参数
+     */
+    List<FlowVo> getOutgoing(TaskOutgoingFo fo);
+
+    /**
+     * 获取完成的节点Key
+     *
+     * @param instanceId {@link String}
+     * @return {@link List<String>}
+     * @since 2024/4/9 13:51
+     **/
+    List<String> getKeysOfFinished(String instanceId);
+
+    /**
+     * 获取进线的Key
+     *
+     * @param taskId {@link String}
+     * @return {@link List<String>}
+     * @since 2024/4/9 13:56
+     **/
+    List<String> getIncomingFlows(String taskId);
+
+    /**
+     * 获取未经过的节点
+     *
+     * @param instanceId {@link String}
+     * @return {@link List<String>}
+     * @since 2024/4/29 10:01
+     **/
+    List<String> getToBePass(String instanceId);
+
+    /**
+     * 获取节点的后续节点
+     *
+     * @param fo 参数
+     */
+    List<String> getAfter(TaskAfterFo fo);
+
+    /**
+     * 异常补偿
+     *
+     * @param fo 参数
+     */
+    List<TaskVo> compensate(CompensateFo fo);
+
+    /**
+     * 获取历史节点
+     *
+     * @param instanceId 实例主键
+     */
+    List<HistoricNodeVo> getHistoric(String instanceId);
+
+    /**
+     * 获取历史结束节点
+     *
+     * @param instanceId 实例主键
+     */
+    List<String> getHistoricEnd(String instanceId);
+
+    /**
+     * 获取元素信息
+     *
+     * @param model 参数
+     */
+    NodeElementVo getElementInfo(InfoModel model);
+}

+ 8 - 0
jnpf-workflow-common/src/main/java/jnpf/workflow/common/util/FlowUtil.java

@@ -0,0 +1,8 @@
+package jnpf.workflow.common.util;
+
+public class FlowUtil {
+
+    public static boolean isDM(String url){
+        return url.startsWith("jdbc:dm");
+    }
+}

BIN
jnpf-workflow-common/target/classes/jnpf/workflow/common/exception/BizException.class


BIN
jnpf-workflow-common/target/classes/jnpf/workflow/common/exception/ResultCode.class


BIN
jnpf-workflow-common/target/classes/jnpf/workflow/common/model/fo/CompensateFo.class


BIN
jnpf-workflow-common/target/classes/jnpf/workflow/common/model/fo/DefinitionDeleteFo.class


BIN
jnpf-workflow-common/target/classes/jnpf/workflow/common/model/fo/DefinitionDeployFo.class


BIN
jnpf-workflow-common/target/classes/jnpf/workflow/common/model/fo/FlowTargetTaskFo.class


BIN
jnpf-workflow-common/target/classes/jnpf/workflow/common/model/fo/InfoModel.class


BIN
jnpf-workflow-common/target/classes/jnpf/workflow/common/model/fo/InstanceDeleteFo.class


BIN
jnpf-workflow-common/target/classes/jnpf/workflow/common/model/fo/InstanceStartFo.class


BIN
jnpf-workflow-common/target/classes/jnpf/workflow/common/model/fo/JumpFo.class


BIN
jnpf-workflow-common/target/classes/jnpf/workflow/common/model/fo/MoveMultiToSingleFo.class


BIN
jnpf-workflow-common/target/classes/jnpf/workflow/common/model/fo/MoveSingleToMultiFo.class


BIN
jnpf-workflow-common/target/classes/jnpf/workflow/common/model/fo/TaskAfterFo.class


BIN
jnpf-workflow-common/target/classes/jnpf/workflow/common/model/fo/TaskBackFo.class


BIN
jnpf-workflow-common/target/classes/jnpf/workflow/common/model/fo/TaskCompleteFo.class


BIN
jnpf-workflow-common/target/classes/jnpf/workflow/common/model/fo/TaskNextFo.class


BIN
jnpf-workflow-common/target/classes/jnpf/workflow/common/model/fo/TaskOutgoingFo.class


BIN
jnpf-workflow-common/target/classes/jnpf/workflow/common/model/fo/TaskPrevFo.class


BIN
jnpf-workflow-common/target/classes/jnpf/workflow/common/model/vo/DefinitionVo.class


BIN
jnpf-workflow-common/target/classes/jnpf/workflow/common/model/vo/DeploymentVo.class


BIN
jnpf-workflow-common/target/classes/jnpf/workflow/common/model/vo/FlowElementVo.class


BIN
jnpf-workflow-common/target/classes/jnpf/workflow/common/model/vo/FlowVo.class


BIN
jnpf-workflow-common/target/classes/jnpf/workflow/common/model/vo/HistoricInstanceVo.class


BIN
jnpf-workflow-common/target/classes/jnpf/workflow/common/model/vo/HistoricNodeVo.class


BIN
jnpf-workflow-common/target/classes/jnpf/workflow/common/model/vo/InstanceVo.class


BIN
jnpf-workflow-common/target/classes/jnpf/workflow/common/model/vo/NodeElementVo.class


BIN
jnpf-workflow-common/target/classes/jnpf/workflow/common/model/vo/TaskVo.class


BIN
jnpf-workflow-common/target/classes/jnpf/workflow/common/service/IDefinitionService.class


BIN
jnpf-workflow-common/target/classes/jnpf/workflow/common/service/IInstanceService.class


BIN
jnpf-workflow-common/target/classes/jnpf/workflow/common/service/ITaskService.class


BIN
jnpf-workflow-common/target/classes/jnpf/workflow/common/util/FlowUtil.class


BIN
jnpf-workflow-common/target/jnpf-workflow-common-1.0.0-RELEASE.jar


+ 3 - 0
jnpf-workflow-common/target/maven-archiver/pom.properties

@@ -0,0 +1,3 @@
+artifactId=jnpf-workflow-common
+groupId=com.jnpf
+version=1.0.0-RELEASE

+ 31 - 0
jnpf-workflow-common/target/maven-status/maven-compiler-plugin/compile/default-compile/createdFiles.lst

@@ -0,0 +1,31 @@
+jnpf\workflow\common\model\vo\TaskVo.class
+jnpf\workflow\common\model\fo\InstanceDeleteFo.class
+jnpf\workflow\common\model\fo\TaskNextFo.class
+jnpf\workflow\common\model\vo\FlowVo.class
+jnpf\workflow\common\service\IDefinitionService.class
+jnpf\workflow\common\model\fo\TaskBackFo.class
+jnpf\workflow\common\model\fo\DefinitionDeployFo.class
+jnpf\workflow\common\model\fo\MoveSingleToMultiFo.class
+jnpf\workflow\common\model\fo\InfoModel.class
+jnpf\workflow\common\model\vo\FlowElementVo.class
+jnpf\workflow\common\model\fo\TaskOutgoingFo.class
+jnpf\workflow\common\model\vo\DeploymentVo.class
+jnpf\workflow\common\model\fo\TaskAfterFo.class
+jnpf\workflow\common\model\fo\InstanceStartFo.class
+jnpf\workflow\common\model\vo\HistoricNodeVo.class
+jnpf\workflow\common\service\ITaskService.class
+jnpf\workflow\common\model\fo\FlowTargetTaskFo.class
+jnpf\workflow\common\model\vo\InstanceVo.class
+jnpf\workflow\common\service\IInstanceService.class
+jnpf\workflow\common\model\fo\JumpFo.class
+jnpf\workflow\common\exception\ResultCode.class
+jnpf\workflow\common\model\vo\HistoricInstanceVo.class
+jnpf\workflow\common\model\vo\DefinitionVo.class
+jnpf\workflow\common\model\fo\TaskCompleteFo.class
+jnpf\workflow\common\util\FlowUtil.class
+jnpf\workflow\common\model\fo\CompensateFo.class
+jnpf\workflow\common\model\fo\DefinitionDeleteFo.class
+jnpf\workflow\common\model\fo\MoveMultiToSingleFo.class
+jnpf\workflow\common\model\fo\TaskPrevFo.class
+jnpf\workflow\common\exception\BizException.class
+jnpf\workflow\common\model\vo\NodeElementVo.class

+ 31 - 0
jnpf-workflow-common/target/maven-status/maven-compiler-plugin/compile/default-compile/inputFiles.lst

@@ -0,0 +1,31 @@
+C:\Users\zhaojinyu\Desktop\USKY\jnpf6.0\jnpf-workflow-core-v6.x\jnpf-workflow-common\src\main\java\jnpf\workflow\common\exception\BizException.java
+C:\Users\zhaojinyu\Desktop\USKY\jnpf6.0\jnpf-workflow-core-v6.x\jnpf-workflow-common\src\main\java\jnpf\workflow\common\exception\ResultCode.java
+C:\Users\zhaojinyu\Desktop\USKY\jnpf6.0\jnpf-workflow-core-v6.x\jnpf-workflow-common\src\main\java\jnpf\workflow\common\model\fo\CompensateFo.java
+C:\Users\zhaojinyu\Desktop\USKY\jnpf6.0\jnpf-workflow-core-v6.x\jnpf-workflow-common\src\main\java\jnpf\workflow\common\model\fo\DefinitionDeleteFo.java
+C:\Users\zhaojinyu\Desktop\USKY\jnpf6.0\jnpf-workflow-core-v6.x\jnpf-workflow-common\src\main\java\jnpf\workflow\common\model\fo\DefinitionDeployFo.java
+C:\Users\zhaojinyu\Desktop\USKY\jnpf6.0\jnpf-workflow-core-v6.x\jnpf-workflow-common\src\main\java\jnpf\workflow\common\model\fo\FlowTargetTaskFo.java
+C:\Users\zhaojinyu\Desktop\USKY\jnpf6.0\jnpf-workflow-core-v6.x\jnpf-workflow-common\src\main\java\jnpf\workflow\common\model\fo\InfoModel.java
+C:\Users\zhaojinyu\Desktop\USKY\jnpf6.0\jnpf-workflow-core-v6.x\jnpf-workflow-common\src\main\java\jnpf\workflow\common\model\fo\InstanceDeleteFo.java
+C:\Users\zhaojinyu\Desktop\USKY\jnpf6.0\jnpf-workflow-core-v6.x\jnpf-workflow-common\src\main\java\jnpf\workflow\common\model\fo\InstanceStartFo.java
+C:\Users\zhaojinyu\Desktop\USKY\jnpf6.0\jnpf-workflow-core-v6.x\jnpf-workflow-common\src\main\java\jnpf\workflow\common\model\fo\JumpFo.java
+C:\Users\zhaojinyu\Desktop\USKY\jnpf6.0\jnpf-workflow-core-v6.x\jnpf-workflow-common\src\main\java\jnpf\workflow\common\model\fo\MoveMultiToSingleFo.java
+C:\Users\zhaojinyu\Desktop\USKY\jnpf6.0\jnpf-workflow-core-v6.x\jnpf-workflow-common\src\main\java\jnpf\workflow\common\model\fo\MoveSingleToMultiFo.java
+C:\Users\zhaojinyu\Desktop\USKY\jnpf6.0\jnpf-workflow-core-v6.x\jnpf-workflow-common\src\main\java\jnpf\workflow\common\model\fo\TaskAfterFo.java
+C:\Users\zhaojinyu\Desktop\USKY\jnpf6.0\jnpf-workflow-core-v6.x\jnpf-workflow-common\src\main\java\jnpf\workflow\common\model\fo\TaskBackFo.java
+C:\Users\zhaojinyu\Desktop\USKY\jnpf6.0\jnpf-workflow-core-v6.x\jnpf-workflow-common\src\main\java\jnpf\workflow\common\model\fo\TaskCompleteFo.java
+C:\Users\zhaojinyu\Desktop\USKY\jnpf6.0\jnpf-workflow-core-v6.x\jnpf-workflow-common\src\main\java\jnpf\workflow\common\model\fo\TaskNextFo.java
+C:\Users\zhaojinyu\Desktop\USKY\jnpf6.0\jnpf-workflow-core-v6.x\jnpf-workflow-common\src\main\java\jnpf\workflow\common\model\fo\TaskOutgoingFo.java
+C:\Users\zhaojinyu\Desktop\USKY\jnpf6.0\jnpf-workflow-core-v6.x\jnpf-workflow-common\src\main\java\jnpf\workflow\common\model\fo\TaskPrevFo.java
+C:\Users\zhaojinyu\Desktop\USKY\jnpf6.0\jnpf-workflow-core-v6.x\jnpf-workflow-common\src\main\java\jnpf\workflow\common\model\vo\DefinitionVo.java
+C:\Users\zhaojinyu\Desktop\USKY\jnpf6.0\jnpf-workflow-core-v6.x\jnpf-workflow-common\src\main\java\jnpf\workflow\common\model\vo\DeploymentVo.java
+C:\Users\zhaojinyu\Desktop\USKY\jnpf6.0\jnpf-workflow-core-v6.x\jnpf-workflow-common\src\main\java\jnpf\workflow\common\model\vo\FlowElementVo.java
+C:\Users\zhaojinyu\Desktop\USKY\jnpf6.0\jnpf-workflow-core-v6.x\jnpf-workflow-common\src\main\java\jnpf\workflow\common\model\vo\FlowVo.java
+C:\Users\zhaojinyu\Desktop\USKY\jnpf6.0\jnpf-workflow-core-v6.x\jnpf-workflow-common\src\main\java\jnpf\workflow\common\model\vo\HistoricInstanceVo.java
+C:\Users\zhaojinyu\Desktop\USKY\jnpf6.0\jnpf-workflow-core-v6.x\jnpf-workflow-common\src\main\java\jnpf\workflow\common\model\vo\HistoricNodeVo.java
+C:\Users\zhaojinyu\Desktop\USKY\jnpf6.0\jnpf-workflow-core-v6.x\jnpf-workflow-common\src\main\java\jnpf\workflow\common\model\vo\InstanceVo.java
+C:\Users\zhaojinyu\Desktop\USKY\jnpf6.0\jnpf-workflow-core-v6.x\jnpf-workflow-common\src\main\java\jnpf\workflow\common\model\vo\NodeElementVo.java
+C:\Users\zhaojinyu\Desktop\USKY\jnpf6.0\jnpf-workflow-core-v6.x\jnpf-workflow-common\src\main\java\jnpf\workflow\common\model\vo\TaskVo.java
+C:\Users\zhaojinyu\Desktop\USKY\jnpf6.0\jnpf-workflow-core-v6.x\jnpf-workflow-common\src\main\java\jnpf\workflow\common\service\IDefinitionService.java
+C:\Users\zhaojinyu\Desktop\USKY\jnpf6.0\jnpf-workflow-core-v6.x\jnpf-workflow-common\src\main\java\jnpf\workflow\common\service\IInstanceService.java
+C:\Users\zhaojinyu\Desktop\USKY\jnpf6.0\jnpf-workflow-core-v6.x\jnpf-workflow-common\src\main\java\jnpf\workflow\common\service\ITaskService.java
+C:\Users\zhaojinyu\Desktop\USKY\jnpf6.0\jnpf-workflow-core-v6.x\jnpf-workflow-common\src\main\java\jnpf\workflow\common\util\FlowUtil.java

+ 133 - 0
jnpf-workflow-flowable/pom.xml

@@ -0,0 +1,133 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+    <parent>
+        <groupId>com.jnpf</groupId>
+        <artifactId>jnpf-workflow-core</artifactId>
+        <version>1.0.0-RELEASE</version>
+    </parent>
+
+    <artifactId>jnpf-workflow-flowable</artifactId>
+
+    <dependencies>
+        <dependency>
+            <groupId>org.flowable</groupId>
+            <artifactId>flowable-spring-boot-starter</artifactId>
+            <version>${flowable.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>com.jnpf</groupId>
+            <artifactId>jnpf-workflow-common</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+    </dependencies>
+
+
+    <profiles>
+
+        <profile>
+            <id>encrypted</id>
+            <build>
+                <plugins>
+                    <!--复制驱动包, 供后续加密使用-->
+                    <plugin>
+                        <groupId>org.apache.maven.plugins</groupId>
+                        <artifactId>maven-dependency-plugin</artifactId>
+                        <executions>
+                            <execution>
+                                <id>copy-dependencies</id>
+                                <phase>package</phase>
+                                <goals>
+                                    <goal>copy-dependencies</goal>
+                                </goals>
+                                <configuration>
+                                    <includeGroupIds>org.flowable,com.jnpf</includeGroupIds>
+                                    <includeArtifactIds>jnpf-workflow-common,flowable-bpmn-model,flowable-engine-common,flowable-engine,flowable-task-service-api,flowable-variable-service</includeArtifactIds>
+                                    <outputDirectory>${project.build.directory}/copylib</outputDirectory>
+                                </configuration>
+                            </execution>
+                        </executions>
+                    </plugin>
+                    <!--复制allatori配置重写配置中的包路径-->
+                    <plugin>
+                        <groupId>org.apache.maven.plugins</groupId>
+                        <artifactId>maven-resources-plugin</artifactId>
+                        <executions>
+                            <execution>
+                                <id>copy-and-filter-allatori-config</id>
+                                <phase>package</phase>
+                                <goals>
+                                    <goal>copy-resources</goal>
+                                </goals>
+                                <configuration>
+                                    <outputDirectory>${project.build.directory}</outputDirectory>
+                                    <resources>
+                                        <resource>
+                                            <directory>${project.basedir}/../allatori</directory>
+                                            <includes>
+                                                <include>allatori.xml</include>
+                                            </includes>
+                                            <filtering>true</filtering>
+                                        </resource>
+                                    </resources>
+                                </configuration>
+                            </execution>
+                        </executions>
+                    </plugin>
+                    <!--Jar混淆-->
+                    <plugin>
+                        <groupId>org.codehaus.mojo</groupId>
+                        <artifactId>exec-maven-plugin</artifactId>
+                        <executions>
+                            <execution>
+                                <id>run-allatori</id>
+                                <phase>package</phase>
+                                <goals>
+                                    <goal>exec</goal>
+                                </goals>
+                            </execution>
+                        </executions>
+                        <configuration>
+                            <executable>java</executable>
+                            <arguments>
+                                <argument>-Xms128m</argument>
+                                <argument>-Xmx512m</argument>
+                                <argument>-jar</argument>
+                                <!-- Copy allatori.jar to 'allatori' directory to use the commented line -->
+                                <argument>${project.basedir}/../allatori/allatori.jar</argument>
+                                <!-- <argument>${basedir}/allatori/allatori.jar</argument> -->
+                                <argument>${project.build.directory}/allatori.xml</argument>
+                            </arguments>
+                        </configuration>
+                    </plugin>
+                    <!--Jar加密-->
+                    <plugin>
+                        <groupId>com.idea-aedi</groupId>
+                        <artifactId>class-winter-maven-plugin</artifactId>
+                        <!-- 相关配置 -->
+                        <configuration>
+                            <!--需要处理的包名-->
+                            <includePrefix>jnpf.workflow</includePrefix>
+                            <!--直接覆盖原文件-->
+                            <finalName>${build.finalName}</finalName>
+                            <!--编译中依赖的第三方包路径-->
+                            <supportFile>${project.build.directory}/copylib</supportFile>
+                            <!--检查启动参数-->
+                            <jvmArgCheck>-XX:+DisableAttachMechanism</jvmArgCheck>
+                        </configuration>
+                        <executions>
+                            <execution>
+                                <phase>package</phase>
+                                <goals>
+                                    <goal>class-winter</goal>
+                                </goals>
+                            </execution>
+                        </executions>
+                    </plugin>
+                </plugins>
+            </build>
+        </profile>
+    </profiles>
+</project>

+ 145 - 0
jnpf-workflow-flowable/src/main/java/jnpf/workflow/flowable/cmd/JumpCmd.java

@@ -0,0 +1,145 @@
+package jnpf.workflow.flowable.cmd;
+
+import cn.hutool.core.collection.CollectionUtil;
+import org.flowable.bpmn.model.BpmnModel;
+import org.flowable.bpmn.model.FlowNode;
+import org.flowable.common.engine.impl.interceptor.Command;
+import org.flowable.common.engine.impl.interceptor.CommandContext;
+import org.flowable.engine.RuntimeService;
+import org.flowable.engine.history.HistoricActivityInstance;
+import org.flowable.engine.impl.HistoricActivityInstanceQueryImpl;
+import org.flowable.engine.impl.persistence.entity.ExecutionEntity;
+import org.flowable.engine.impl.persistence.entity.ExecutionEntityManager;
+import org.flowable.engine.impl.persistence.entity.HistoricActivityInstanceEntity;
+import org.flowable.engine.impl.util.CommandContextUtil;
+import org.flowable.engine.runtime.Execution;
+import org.flowable.task.api.history.HistoricTaskInstance;
+import org.flowable.task.service.HistoricTaskService;
+import org.flowable.task.service.impl.HistoricTaskInstanceQueryImpl;
+import org.flowable.task.service.impl.persistence.entity.HistoricTaskInstanceEntity;
+import org.flowable.variable.service.VariableService;
+import org.flowable.variable.service.impl.persistence.entity.VariableInstanceEntity;
+
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+
+/**
+ * 跳转命令类
+ * 参考:https://blog.csdn.net/zhsp419/article/details/114264451
+ *
+ * @author JNPF Flowable@YinMai Info. Co., Ltd
+ * @version 1.0.0
+ * @since 2024/4/9 17:45
+ */
+public class JumpCmd implements Command<Void> {
+    private final String processInstanceId;
+
+    private final List<String> sourceTaskDefIdList;
+    private final List<String> targetFlowNodeIdList;
+
+    private final String deleteReason;
+
+    private final BpmnModel bpmnModel;
+    private final RuntimeService runtimeService;
+
+    /**
+     * 保存撤回节点的变量map
+     */
+    private final Map<String, List<VariableInstanceEntity>> varMap = new ConcurrentHashMap<>();
+
+    public JumpCmd(String processInstanceId, List<String> sourceTaskDefIdList, List<String> targetFlowNodeIdList,
+                   String deleteReason, BpmnModel bpmnModel, RuntimeService runtimeService) {
+        this.processInstanceId = processInstanceId;
+        this.sourceTaskDefIdList = sourceTaskDefIdList;
+        this.deleteReason = deleteReason;
+        this.targetFlowNodeIdList = targetFlowNodeIdList;
+        this.bpmnModel = bpmnModel;
+        this.runtimeService = runtimeService;
+    }
+
+    @Override
+    public Void execute(CommandContext commandContext) {
+        ExecutionEntityManager executionEntityManager = CommandContextUtil.getExecutionEntityManager();
+        // 处理act_ru_execution
+        handleExecution(commandContext);
+        // 处理act_hi_actinst
+        handleActInst(commandContext);
+
+        targetFlowNodeIdList.forEach(targetId -> {
+            FlowNode flowNode = (FlowNode) bpmnModel.getFlowElement(targetId);
+            // 创建子执行流,开启任务
+            ExecutionEntity processExecution = executionEntityManager.findById(processInstanceId);
+            ExecutionEntity childExecution = executionEntityManager.createChildExecution(processExecution);
+            childExecution.setCurrentFlowElement(flowNode);
+
+            // 设置执行变量
+            VariableService variableService = CommandContextUtil.getVariableService();
+            List<VariableInstanceEntity> variableInstanceEntities = varMap.get(flowNode.getId());
+            if (CollectionUtil.isNotEmpty(variableInstanceEntities)) {
+                variableInstanceEntities.forEach(var -> {
+                    var.setExecutionId(childExecution.getId());
+                    variableService.insertVariableInstance(var);
+                });
+            }
+            executionEntityManager.insert(childExecution);
+            // 交给引擎流转
+            CommandContextUtil.getAgenda().planContinueProcessOperation(childExecution);
+        });
+        return null;
+    }
+
+    private void handleActInst(CommandContext commandContext) {
+        for (String str : sourceTaskDefIdList) {
+            HistoricActivityInstanceQueryImpl query = new HistoricActivityInstanceQueryImpl()
+                    .activityId(str).processInstanceId(processInstanceId).unfinished();
+            List<HistoricActivityInstance> activityInstances = CommandContextUtil.getHistoricActivityInstanceEntityManager()
+                    .findHistoricActivityInstancesByQueryCriteria(query);
+            for (HistoricActivityInstance activity : activityInstances) {
+                HistoricActivityInstanceEntity activityEntity = (HistoricActivityInstanceEntity) activity;
+                // 修改act_hi_actinst表
+                activityEntity.setDeleted(true);
+                activityEntity.setDeleteReason(deleteReason);
+                CommandContextUtil.getHistoricActivityInstanceEntityManager().update(activityEntity);
+            }
+        }
+    }
+
+    private void handleExecution(CommandContext commandContext) {
+        ExecutionEntityManager executionEntityManager = CommandContextUtil.getExecutionEntityManager();
+        HistoricTaskService historicTaskService = CommandContextUtil.getHistoricTaskService();
+        VariableService variableService = CommandContextUtil.getVariableService();
+        for (String str : sourceTaskDefIdList) {
+            List<Execution> executionEntities = runtimeService.createExecutionQuery().processInstanceId(processInstanceId).activityId(str).list();
+            for (Execution parentExecution : executionEntities) {
+                //关闭未完成的任务执行流
+                // 获取子级Executions,如子流程节点等需要处理
+                List<ExecutionEntity> childExecutions =
+                        executionEntityManager.findChildExecutionsByParentExecutionId(parentExecution.getId());
+                for (ExecutionEntity childExecution : childExecutions) {
+                    //因为外键约束,首先要删除variable表中的execution相关数据
+                    List<VariableInstanceEntity> variableInstances = variableService.findVariableInstancesByExecutionId(childExecution.getId());
+                    varMap.put(parentExecution.getActivityId(), variableInstances);
+                    variableInstances.forEach(variableService::deleteVariableInstance);
+                    executionEntityManager.deleteExecutionAndRelatedData(childExecution, deleteReason, false);
+                    // 修改历史实例
+                    HistoricTaskInstanceQueryImpl query = new HistoricTaskInstanceQueryImpl().executionId(childExecution.getId()).processInstanceId(processInstanceId);
+                    List<HistoricTaskInstance> HistoricTaskInstances = historicTaskService.findHistoricTaskInstancesByQueryCriteria(query);
+                    if (CollectionUtil.isNotEmpty(HistoricTaskInstances)) {
+                        for (HistoricTaskInstance HistoricTaskInstance : HistoricTaskInstances) {
+                            HistoricTaskInstanceEntity entity = (HistoricTaskInstanceEntity) HistoricTaskInstance;
+                            entity.setDeleteReason(deleteReason);
+                            historicTaskService.updateHistoricTask(entity, true);
+                        }
+                    }
+                }
+                //父执行流关闭
+                List<VariableInstanceEntity> variableInstances = variableService.findVariableInstancesByExecutionId(parentExecution.getId());
+                varMap.put(parentExecution.getActivityId(), variableInstances);
+                variableInstances.forEach(variableService::deleteVariableInstance);
+                ExecutionEntity parentExecution1 = (ExecutionEntity) parentExecution;
+                executionEntityManager.deleteExecutionAndRelatedData(parentExecution1, deleteReason, false);
+            }
+        }
+    }
+}

+ 132 - 0
jnpf-workflow-flowable/src/main/java/jnpf/workflow/flowable/service/DefinitionServiceImpl.java

@@ -0,0 +1,132 @@
+package jnpf.workflow.flowable.service;
+
+import cn.hutool.core.bean.BeanUtil;
+import cn.hutool.core.collection.CollectionUtil;
+import cn.hutool.core.util.IdUtil;
+import cn.hutool.core.util.StrUtil;
+import jnpf.workflow.common.exception.BizException;
+import jnpf.workflow.common.exception.ResultCode;
+import jnpf.workflow.common.model.fo.DefinitionDeleteFo;
+import jnpf.workflow.common.model.fo.DefinitionDeployFo;
+import jnpf.workflow.common.model.vo.DefinitionVo;
+import jnpf.workflow.common.model.vo.DeploymentVo;
+import jnpf.workflow.common.model.vo.FlowElementVo;
+import jnpf.workflow.common.service.IDefinitionService;
+import jnpf.workflow.flowable.util.FlowableUtil;
+import lombok.AllArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+import org.flowable.bpmn.model.Process;
+import org.flowable.bpmn.model.*;
+import org.flowable.engine.RepositoryService;
+import org.flowable.engine.repository.Deployment;
+import org.flowable.engine.repository.ProcessDefinition;
+import org.springframework.stereotype.Service;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+import java.util.stream.Collectors;
+
+/**
+ * 流程定义实现层
+ *
+ * @author JNPF Flowable@YinMai Info. Co., Ltd
+ * @version 1.0.0
+ * @since 2024/4/3 11:36
+ */
+@Slf4j
+@Service
+@AllArgsConstructor
+public class DefinitionServiceImpl implements IDefinitionService {
+    private final RepositoryService repositoryService;
+
+    @Override
+    public DeploymentVo deployDefinition(DefinitionDeployFo fo) {
+        Deployment deployment;
+        try {
+            String resourceName;
+            if (StrUtil.isNotBlank(fo.getKey())) {
+                resourceName = fo.getKey();
+            } else {
+                resourceName = IdUtil.getSnowflakeNextIdStr();
+            }
+            deployment = repositoryService
+                    .createDeployment()
+                    .name(fo.getName())
+                    .key(fo.getKey())
+                    .addString(resourceName + ".bpmn20.xml", fo.getBpmnXml())
+                    .disableSchemaValidation()
+                    .deploy();
+        } catch (Exception e) {
+            throw new BizException(ResultCode.DEPLOY_ERROR.getMsg(), e);
+        }
+        DeploymentVo vo = new DeploymentVo();
+        vo.setDeploymentId(deployment.getId());
+        return vo;
+    }
+
+    @Override
+    public List<DefinitionVo> listDefinition() {
+        List<ProcessDefinition> definitions = repositoryService.createProcessDefinitionQuery().list();
+        List<DefinitionVo> list = new ArrayList<>();
+        if (CollectionUtil.isNotEmpty(definitions)) {
+            for (ProcessDefinition definition : definitions) {
+                DefinitionVo vo = new DefinitionVo();
+                vo.setDefinitionId(definition.getId());
+                vo.setDefinitionName(definition.getName());
+                vo.setDefinitionKey(definition.getKey());
+                vo.setDefinitionVersion(definition.getVersion());
+                vo.setDeploymentId(definition.getDeploymentId());
+                list.add(vo);
+            }
+        }
+        return list;
+    }
+
+    @Override
+    public boolean deleteDefinition(DefinitionDeleteFo fo) {
+        if (null == fo.getCascade()) {
+            fo.setCascade(true);
+        }
+        try {
+            // 根据部署ID删除,并级联删除当前流程定义下的所有流程实例、job
+            repositoryService.deleteDeployment(fo.getDeploymentId(), fo.getCascade());
+            return true;
+        } catch (Exception e) {
+            log.error(ResultCode.DELETE_FAILURE.getMsg(), e);
+        }
+        return false;
+    }
+
+    @Override
+    public List<FlowElementVo> getStructure(String deploymentId) {
+        List<FlowElementVo> vos = new ArrayList<>();
+
+        ProcessDefinition definition = repositoryService.createProcessDefinitionQuery().deploymentId(deploymentId).singleResult();
+        if (null != definition) {
+            Process process = repositoryService.getBpmnModel(definition.getId()).getProcesses().get(0);
+            Collection<FlowElement> elements = FlowableUtil.getAllElements(process.getFlowElements(), null);
+            for (FlowElement element : elements) {
+                FlowElementVo vo = BeanUtil.copyProperties(element, FlowElementVo.class);
+                if (element instanceof Event) {
+                    Event el = (Event) element;
+                    vo.setIncomingList(el.getIncomingFlows().stream().map(SequenceFlow::getId).collect(Collectors.toList()));
+                    vo.setOutgoingList(el.getOutgoingFlows().stream().map(SequenceFlow::getId).collect(Collectors.toList()));
+                }
+                if (element instanceof Activity) {
+                    Activity el = (Activity) element;
+                    vo.setIncomingList(el.getIncomingFlows().stream().map(SequenceFlow::getId).collect(Collectors.toList()));
+                    vo.setOutgoingList(el.getOutgoingFlows().stream().map(SequenceFlow::getId).collect(Collectors.toList()));
+                }
+                if (element instanceof Gateway) {
+                    Gateway el = (Gateway) element;
+                    vo.setIncomingList(el.getIncomingFlows().stream().map(SequenceFlow::getId).collect(Collectors.toList()));
+                    vo.setOutgoingList(el.getOutgoingFlows().stream().map(SequenceFlow::getId).collect(Collectors.toList()));
+                }
+                vos.add(vo);
+            }
+        }
+
+        return vos;
+    }
+}

+ 87 - 0
jnpf-workflow-flowable/src/main/java/jnpf/workflow/flowable/service/InstanceServiceImpl.java

@@ -0,0 +1,87 @@
+package jnpf.workflow.flowable.service;
+
+import cn.hutool.core.collection.CollectionUtil;
+import jnpf.workflow.common.exception.BizException;
+import jnpf.workflow.common.exception.ResultCode;
+import jnpf.workflow.common.model.fo.InstanceDeleteFo;
+import jnpf.workflow.common.model.fo.InstanceStartFo;
+import jnpf.workflow.common.model.vo.HistoricInstanceVo;
+import jnpf.workflow.common.model.vo.InstanceVo;
+import jnpf.workflow.common.service.IInstanceService;
+import lombok.AllArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+import org.flowable.engine.HistoryService;
+import org.flowable.engine.RepositoryService;
+import org.flowable.engine.RuntimeService;
+import org.flowable.engine.history.HistoricProcessInstance;
+import org.flowable.engine.repository.ProcessDefinition;
+import org.flowable.engine.runtime.ProcessInstance;
+import org.springframework.stereotype.Service;
+
+import java.time.ZoneId;
+
+/**
+ * 流程实例实现层
+ *
+ * @author JNPF Flowable@YinMai Info. Co., Ltd
+ * @version 1.0.0
+ * @since 2024/4/7 14:34
+ */
+@Slf4j
+@Service
+@AllArgsConstructor
+public class InstanceServiceImpl implements IInstanceService {
+    private final RepositoryService repositoryService;
+    private final RuntimeService runtimeService;
+    private final HistoryService historyService;
+
+    @Override
+    public InstanceVo startById(InstanceStartFo fo) {
+        ProcessDefinition definition = repositoryService
+                .createProcessDefinitionQuery()
+                .deploymentId(fo.getDeploymentId()).singleResult();
+        if (null == definition) {
+            throw new BizException(ResultCode.DEFINITION_NOT_EXIST);
+        }
+        InstanceVo vo = new InstanceVo();
+        ProcessInstance instance;
+        if (CollectionUtil.isNotEmpty(fo.getVariables())) {
+            instance = runtimeService.startProcessInstanceById(definition.getId(), fo.getVariables());
+        } else {
+            instance = runtimeService.startProcessInstanceById(definition.getId());
+        }
+        if (null != instance) {
+            vo.setInstanceId(instance.getId());
+        }
+        return vo;
+    }
+
+    @Override
+    public HistoricInstanceVo getHistoricProcessInstance(String processInstanceId) {
+        HistoricProcessInstance historicInstance = historyService
+                .createHistoricProcessInstanceQuery()
+                .processInstanceId(processInstanceId)
+                .singleResult();
+        if (null == historicInstance) {
+            throw new BizException(ResultCode.INSTANCE_NOT_EXIST);
+        }
+        HistoricInstanceVo vo = new HistoricInstanceVo();
+        vo.setInstanceId(historicInstance.getId());
+        vo.setStartTime(historicInstance.getStartTime().toInstant().atZone(ZoneId.systemDefault()).toLocalDateTime());
+        vo.setEndTime(historicInstance.getEndTime() == null ? null : historicInstance.getEndTime().toInstant().atZone(ZoneId.systemDefault()).toLocalDateTime());
+        vo.setDurationInMillis(historicInstance.getDurationInMillis());
+        vo.setDeleteReason(historicInstance.getDeleteReason());
+        return vo;
+    }
+
+    @Override
+    public boolean deleteInstance(InstanceDeleteFo fo) {
+        try {
+            runtimeService.deleteProcessInstance(fo.getInstanceId(), fo.getDeleteReason());
+            return true;
+        } catch (Exception e) {
+            log.error(ResultCode.DELETE_FAILURE.getMsg(), e);
+        }
+        return false;
+    }
+}

+ 592 - 0
jnpf-workflow-flowable/src/main/java/jnpf/workflow/flowable/service/TaskServiceImpl.java

@@ -0,0 +1,592 @@
+package jnpf.workflow.flowable.service;
+
+import cn.hutool.core.collection.CollUtil;
+import cn.hutool.core.collection.CollectionUtil;
+import cn.hutool.core.util.StrUtil;
+import jnpf.workflow.common.exception.BizException;
+import jnpf.workflow.common.exception.ResultCode;
+import jnpf.workflow.common.model.fo.*;
+import jnpf.workflow.common.model.vo.*;
+import jnpf.workflow.common.service.IInstanceService;
+import jnpf.workflow.common.service.ITaskService;
+import jnpf.workflow.flowable.cmd.JumpCmd;
+import jnpf.workflow.flowable.util.FlowableUtil;
+import lombok.AllArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+import org.flowable.bpmn.constants.BpmnXMLConstants;
+import org.flowable.bpmn.model.Process;
+import org.flowable.bpmn.model.*;
+import org.flowable.engine.*;
+import org.flowable.engine.history.HistoricActivityInstance;
+import org.flowable.engine.history.HistoricProcessInstance;
+import org.flowable.engine.repository.ProcessDefinition;
+import org.flowable.engine.runtime.ProcessInstance;
+import org.flowable.task.api.Task;
+import org.flowable.task.api.TaskInfo;
+import org.flowable.task.api.history.HistoricTaskInstance;
+import org.flowable.variable.api.history.HistoricVariableInstance;
+import org.springframework.stereotype.Service;
+
+import java.util.*;
+import java.util.stream.Collectors;
+
+/**
+ * 流程任务实现层
+ *
+ * @author JNPF Flowable@YinMai Info. Co., Ltd
+ * @version 1.0.0
+ * @since 2024/4/8 11:12
+ */
+@Slf4j
+@Service
+@AllArgsConstructor
+public class TaskServiceImpl implements ITaskService {
+    private final TaskService taskService;
+    private final HistoryService historyService;
+    private final RuntimeService runtimeService;
+    private final RepositoryService repositoryService;
+    private final ManagementService managementService;
+    private final IInstanceService instanceService;
+
+    @Override
+    public List<TaskVo> getTask(String instanceId) {
+        List<Task> list = taskService.createTaskQuery().processInstanceId(instanceId).list();
+        List<TaskVo> vos = new ArrayList<>();
+        if (CollectionUtil.isNotEmpty(list)) {
+            for (Task task : list) {
+                TaskVo vo = new TaskVo();
+                vo.setTaskId(task.getId());
+                vo.setTaskName(task.getName());
+                vo.setTaskKey(task.getTaskDefinitionKey());
+                vo.setInstanceId(task.getProcessInstanceId());
+                vos.add(vo);
+            }
+        }
+        return vos;
+    }
+
+    @Override
+    public boolean complete(TaskCompleteFo fo) {
+        Task task = taskService.createTaskQuery().taskId(fo.getTaskId()).singleResult();
+        if (null == task) {
+            throw new BizException(ResultCode.TASK_NOT_EXIST);
+        }
+        try {
+            if (CollectionUtil.isNotEmpty(fo.getVariables())) {
+                taskService.complete(fo.getTaskId(), fo.getVariables());
+            } else {
+                taskService.complete(fo.getTaskId());
+            }
+            return true;
+        } catch (Exception e) {
+            log.error(ResultCode.TASK_COMPLETE_ERROR.getMsg(), e);
+        }
+        return false;
+    }
+
+    @Override
+    public boolean moveSingleToMulti(MoveSingleToMultiFo fo) {
+        ProcessInstance instance = runtimeService.createProcessInstanceQuery().processInstanceId(fo.getInstanceId()).singleResult();
+        if (null == instance) {
+            throw new BizException(ResultCode.INSTANCE_NOT_EXIST);
+        }
+        try {
+            this.moveSingleActivityIdToActivityIds(fo.getInstanceId(), fo.getSourceKey(), fo.getTargetKeys());
+            return true;
+        } catch (Exception e) {
+            log.error(ResultCode.TASK_JUMP_ERROR.getMsg(), e);
+        }
+        return false;
+    }
+
+    /**
+     * 节点跳转
+     * Set the activity id that should be changed to multiple activity ids
+     */
+    public void moveSingleActivityIdToActivityIds(String processInstanceId, String activityId, List<String> activityIds) {
+        runtimeService.createChangeActivityStateBuilder()
+                .processInstanceId(processInstanceId)
+                .moveSingleActivityIdToActivityIds(activityId, activityIds).changeState();
+    }
+
+    @Override
+    public boolean moveMultiToSingle(MoveMultiToSingleFo fo) {
+        ProcessInstance instance = runtimeService.createProcessInstanceQuery().processInstanceId(fo.getInstanceId()).singleResult();
+        if (null == instance) {
+            throw new BizException(ResultCode.INSTANCE_NOT_EXIST);
+        }
+        try {
+            this.moveActivityIdsToSingleActivityId(fo.getInstanceId(), fo.getSourceKeys(), fo.getTargetKey());
+            return true;
+        } catch (Exception e) {
+            log.error(ResultCode.TASK_JUMP_ERROR.getMsg(), e);
+        }
+        return false;
+    }
+
+    /**
+     * 节点跳转
+     * Set the activity ids that should be changed to a single activity id
+     */
+    public void moveActivityIdsToSingleActivityId(String processInstanceId, List<String> activityIds, String activityId) {
+        runtimeService.createChangeActivityStateBuilder()
+                .processInstanceId(processInstanceId)
+                .moveActivityIdsToSingleActivityId(activityIds, activityId).changeState();
+    }
+
+    @Override
+    public boolean jump(JumpFo fo) {
+        ProcessInstance instance = runtimeService.createProcessInstanceQuery().processInstanceId(fo.getInstanceId()).singleResult();
+        if (null == instance) {
+            throw new BizException(ResultCode.INSTANCE_NOT_EXIST);
+        }
+        try {
+            BpmnModel bpmnModel = repositoryService.getBpmnModel(instance.getProcessDefinitionId());
+            JumpCmd jumpCmd = new JumpCmd(fo.getInstanceId(), fo.getSource(), fo.getTarget(), "custom jump", bpmnModel, runtimeService);
+            managementService.executeCommand(jumpCmd);
+            return true;
+        } catch (Exception e) {
+            log.error(ResultCode.TASK_JUMP_ERROR.getMsg(), e);
+        }
+        return false;
+    }
+
+    @Override
+    public List<String> getFallbacks(String taskId) {
+        Task task = taskService.createTaskQuery().taskId(taskId).singleResult();
+        if (null == task) {
+            throw new BizException(ResultCode.TASK_NOT_EXIST);
+        }
+        FlowElement source = getFlowElement(task.getProcessDefinitionId(), task.getTaskDefinitionKey());
+        List<String> list = FlowableUtil.getPassActs(source, null, null);
+        return list.stream().distinct().collect(Collectors.toList());
+    }
+
+    /**
+     * 根据流程定义ID和任务KEY 获取节点元素
+     */
+    public FlowElement getFlowElement(String processDefinitionId, String taskDefinitionKey) {
+        ProcessDefinition definition = repositoryService.createProcessDefinitionQuery()
+                .processDefinitionId(processDefinitionId).singleResult();
+        if (null == definition) {
+            throw new BizException(ResultCode.DEFINITION_NOT_EXIST);
+        }
+        Process process = repositoryService.getBpmnModel(definition.getId()).getProcesses().get(0);
+        Collection<FlowElement> elements = FlowableUtil.getAllElements(process.getFlowElements(), null);
+        FlowElement source = null;
+        if (null != elements && !elements.isEmpty()) {
+            for (FlowElement element : elements) {
+                if (element.getId().equals(taskDefinitionKey)) {
+                    source = element;
+                }
+            }
+        }
+        return source;
+    }
+
+    @Override
+    public List<String> back(TaskBackFo fo) {
+        Task task = taskService.createTaskQuery().taskId(fo.getTaskId()).singleResult();
+        String definitionId;
+        String instanceId;
+        if (null == task) {
+            HistoricTaskInstance historicTask = historyService.createHistoricTaskInstanceQuery().taskId(fo.getTaskId()).singleResult();
+            definitionId = historicTask.getProcessDefinitionId();
+            instanceId = historicTask.getProcessInstanceId();
+        } else {
+            definitionId = task.getProcessDefinitionId();
+            instanceId = task.getProcessInstanceId();
+        }
+        if (StrUtil.isNotBlank(fo.getTargetKey())) {
+            String[] split = fo.getTargetKey().split(",");
+            if (split.length == 0) {
+                throw new BizException("目标节点编码不能为空");
+            }
+            List<String> list = Arrays.asList(split);
+            return this.back(definitionId, instanceId, list);
+        }
+        return null;
+    }
+
+    public List<String> back(String definitionId, String instanceId,  List<String> targetList) {
+        List<String> currentIds = new ArrayList<>();
+        for (String targetKey : targetList) {
+            FlowElement target = getFlowElement(definitionId, targetKey);
+            // 获取所有正常进行的任务节点Key,用于找出需要撤回的任务
+            List<Task> runTaskList = taskService.createTaskQuery().processInstanceId(instanceId).list();
+            List<String> runTaskKeyList = runTaskList.stream().map(Task::getTaskDefinitionKey).collect(Collectors.toList());
+            // 需驳回的任务列表
+            List<UserTask> userTaskList = FlowableUtil.getChildUserTasks(target, runTaskKeyList, null, null);
+            List<String> collect = userTaskList.stream().map(UserTask::getId).collect(Collectors.toList());
+            currentIds.addAll(collect);
+        }
+        currentIds = currentIds.stream().distinct().collect(Collectors.toList());
+
+        JumpFo jumpFo = new JumpFo();
+        jumpFo.setInstanceId(instanceId);
+        jumpFo.setSource(currentIds);
+        jumpFo.setTarget(targetList);
+        this.jump(jumpFo);
+
+        return currentIds;
+    }
+
+    @Override
+    public List<String> getPrevUserTask(TaskPrevFo fo) {
+        List<String> list = new ArrayList<>();
+        if (StrUtil.isNotBlank(fo.getDeploymentId())) {
+            ProcessDefinition definition = repositoryService.createProcessDefinitionQuery().deploymentId(fo.getDeploymentId()).singleResult();
+            if (null == definition) {
+                throw new BizException(ResultCode.DEFINITION_NOT_EXIST);
+            }
+            // 获取当前节点
+            FlowElement source = getFlowElement(definition.getId(), fo.getTaskKey());
+            // 获取下一级用户任务
+            list = FlowableUtil.getParentActs(source, null, null);
+        } else {
+            Task task = taskService.createTaskQuery().taskId(fo.getTaskId()).singleResult();
+            if (null == task) {
+                throw new BizException(ResultCode.TASK_NOT_EXIST);
+            }
+            FlowElement source = getFlowElement(task.getProcessDefinitionId(), task.getTaskDefinitionKey());
+            list = FlowableUtil.getParentActs(source, null, null);
+        }
+        return list;
+    }
+
+    @Override
+    public List<NodeElementVo> getNextUserTask(TaskNextFo fo) {
+        List<UserTask> nextUserTasks = new ArrayList<>();
+        if (StrUtil.isNotBlank(fo.getDeploymentId())) {
+            ProcessDefinition definition = repositoryService.createProcessDefinitionQuery().deploymentId(fo.getDeploymentId()).singleResult();
+            if (null == definition) {
+                throw new BizException(ResultCode.DEFINITION_NOT_EXIST);
+            }
+            // 获取当前节点
+            FlowElement source = getFlowElement(definition.getId(), fo.getTaskKey());
+            // 获取下一级用户任务
+            nextUserTasks = FlowableUtil.getNextUserTasks(source, null, null);
+        } else {
+            HistoricTaskInstance taskInst = historyService.createHistoricTaskInstanceQuery().taskId(fo.getTaskId()).singleResult();
+            if (null == taskInst) {
+                throw new BizException(ResultCode.TASK_NOT_EXIST);
+            }
+            FlowElement source = getFlowElement(taskInst.getProcessDefinitionId(), taskInst.getTaskDefinitionKey());
+            // 获取下一级用户任务
+            nextUserTasks = FlowableUtil.getNextUserTasks(source, null, null);
+        }
+        List<NodeElementVo> vos = new ArrayList<>();
+        if (CollectionUtil.isNotEmpty(nextUserTasks)) {
+            for (UserTask task : nextUserTasks) {
+                NodeElementVo vo = new NodeElementVo();
+                vo.setId(task.getId());
+                vo.setName(task.getName());
+                vo.setIncomingList(task.getIncomingFlows().stream().map(SequenceFlow::getId).collect(Collectors.toList()));
+                vo.setOutgoingList(task.getOutgoingFlows().stream().map(SequenceFlow::getId).collect(Collectors.toList()));
+                vos.add(vo);
+            }
+        }
+        return vos;
+    }
+
+    @Override
+    public List<String> getTaskKeyAfterFlow(FlowTargetTaskFo fo) {
+        ProcessDefinition definition = repositoryService.createProcessDefinitionQuery().deploymentId(fo.getDeploymentId()).singleResult();
+        if (null == definition) {
+            throw new BizException(ResultCode.DEFINITION_NOT_EXIST);
+        }
+        List<String> list = new ArrayList<>();
+        // 获取当前节点
+        FlowElement source = getFlowElement(definition.getId(), fo.getFlowKey());
+        String taskKey = FlowableUtil.getTaskKeyAfterFlow(source);
+        list.add(taskKey);
+        return list;
+    }
+
+    @Override
+    public boolean retract(String taskId) {
+        // 需要撤回的任务实例
+        HistoricTaskInstance taskInst = historyService.createHistoricTaskInstanceQuery().taskId(taskId).singleResult();
+        if (null != taskInst) {
+            ProcessInstance procInst = runtimeService.createProcessInstanceQuery()
+                    .processInstanceId(taskInst.getProcessInstanceId())
+                    .active().singleResult();
+            if (null != procInst) {
+                // 获取当前节点
+                FlowElement source = getFlowElement(taskInst.getProcessDefinitionId(), taskInst.getTaskDefinitionKey());
+                // 获取下一级用户任务
+                List<UserTask> nextUserTasks = FlowableUtil.getNextUserTasks(source, null, null);
+                List<String> nextUserTaskKeys = nextUserTasks.stream().map(UserTask::getId).collect(Collectors.toList());
+                // 获取所有运行的任务节点,找到需要撤回的任务
+                List<Task> activateTasks = taskService.createTaskQuery().processInstanceId(taskInst.getProcessInstanceId()).list();
+                List<String> currentIds = new ArrayList<>();
+                for (Task task : activateTasks) {
+                    // 检查激活的任务节点是否存在下一级中,如果存在,则加入到需要撤回的节点
+                    if (CollUtil.contains(nextUserTaskKeys, task.getTaskDefinitionKey())) {
+                        currentIds.add(task.getTaskDefinitionKey());
+                    }
+                }
+                this.moveActivityIdsToSingleActivityId(taskInst.getProcessInstanceId(), currentIds, taskInst.getTaskDefinitionKey());
+                return true;
+            }
+        }
+        return false;
+    }
+
+    @Override
+    public List<String> getOutgoingFlows(TaskOutgoingFo fo) {
+        if (StrUtil.isNotBlank(fo.getDeploymentId())) {
+            ProcessDefinition definition = repositoryService.createProcessDefinitionQuery().deploymentId(fo.getDeploymentId()).singleResult();
+            if (null == definition) {
+                throw new BizException(ResultCode.DEFINITION_NOT_EXIST);
+            }
+            return this.getOutgoingFlows(definition.getId(), fo.getTaskKey());
+        } else {
+            Task task = taskService.createTaskQuery().taskId(fo.getTaskId()).singleResult();
+            if (null == task) {
+                throw new BizException(ResultCode.TASK_NOT_EXIST);
+            }
+            return this.getOutgoingFlows(task.getProcessDefinitionId(), task.getTaskDefinitionKey());
+        }
+    }
+
+    /**
+     * 获取出线Key集合(若出线的出口为网关,则一并获取网关的出线)
+     */
+    public List<String> getOutgoingFlows(String processDefinitionId, String taskDefinitionKey) {
+        FlowElement source = getFlowElement(processDefinitionId, taskDefinitionKey);
+        List<SequenceFlow> flows = new ArrayList<>();
+        flows = FlowableUtil.getOutFlowsWithGateway(source, flows);
+        List<String> list = new ArrayList<>();
+        if (!flows.isEmpty()) {
+            for (SequenceFlow flow : flows) {
+                list.add(flow.getId());
+            }
+        }
+        return list.stream().distinct().collect(Collectors.toList());
+    }
+
+    @Override
+    public List<FlowVo> getOutgoing(TaskOutgoingFo fo) {
+        if (StrUtil.isNotBlank(fo.getDeploymentId())) {
+            ProcessDefinition definition = repositoryService.createProcessDefinitionQuery().deploymentId(fo.getDeploymentId()).singleResult();
+            if (null == definition) {
+                throw new BizException(ResultCode.DEFINITION_NOT_EXIST);
+            }
+            return this.getOutgoing(definition.getId(), fo.getTaskKey());
+        } else {
+            Task task = taskService.createTaskQuery().taskId(fo.getTaskId()).singleResult();
+            if (null == task) {
+                throw new BizException(ResultCode.TASK_NOT_EXIST);
+            }
+            return this.getOutgoing(task.getProcessDefinitionId(), task.getTaskDefinitionKey());
+        }
+    }
+
+    public List<FlowVo> getOutgoing(String processDefinitionId, String taskDefinitionKey) {
+        FlowElement source = getFlowElement(processDefinitionId, taskDefinitionKey);
+        return FlowableUtil.getOutFlows(source, null);
+    }
+
+    @Override
+    public List<String> getKeysOfFinished(String instanceId) {
+        ProcessInstance instance = runtimeService.createProcessInstanceQuery().processInstanceId(instanceId).singleResult();
+        if (null == instance) {
+            throw new BizException(ResultCode.INSTANCE_NOT_EXIST);
+        }
+        // 获取当前节点之前的节点
+        List<String> keysOfBefore = getKeysOfBefore(instance);
+        // 获取流程实例下完成的历史活动
+        List<HistoricActivityInstance> list = historyService.createHistoricActivityInstanceQuery()
+                .processInstanceId(instanceId).finished().list();
+        if (CollectionUtil.isNotEmpty(list)) {
+            // 去除线,并去重
+            List<String> keysOfFinished = list.stream()
+                    .filter(e -> !BpmnXMLConstants.ELEMENT_SEQUENCE_FLOW.equals(e.getActivityType()))
+                    .sorted(Comparator.comparing(HistoricActivityInstance::getStartTime))
+                    .map(HistoricActivityInstance::getActivityId)
+                    .distinct().collect(Collectors.toList());
+            if (CollectionUtil.isNotEmpty(keysOfBefore)) {
+                keysOfFinished.retainAll(keysOfBefore);
+            }
+            return keysOfFinished;
+        }
+        return null;
+    }
+
+    /**
+     * 遍历当前节点,获取之前的节点
+     */
+    public List<String> getKeysOfBefore(ProcessInstance instance) {
+        List<String> res = new ArrayList<>();
+        // 获取实例下的当前任务
+        List<Task> taskList = taskService.createTaskQuery().processInstanceId(instance.getId()).list();
+        if (CollectionUtil.isNotEmpty(taskList)) {
+            List<String> keys = taskList.stream().map(TaskInfo::getTaskDefinitionKey).collect(Collectors.toList());
+            for (String key : keys) {
+                FlowElement source = getFlowElement(instance.getProcessDefinitionId(), key);
+                List<String> list = FlowableUtil.getBefore(source, null, null);
+                res.addAll(list);
+            }
+        }
+        return res.stream().distinct().collect(Collectors.toList());
+    }
+
+    @Override
+    public List<String> getIncomingFlows(String taskId) {
+        HistoricTaskInstance taskInst = historyService.createHistoricTaskInstanceQuery().taskId(taskId).singleResult();
+        if (null != taskInst) {
+            FlowElement source = getFlowElement(taskInst.getProcessDefinitionId(), taskInst.getTaskDefinitionKey());
+            List<SequenceFlow> flows = FlowableUtil.getElementIncomingFlows(source);
+            return flows.stream().map(BaseElement::getId).collect(Collectors.toList());
+        }
+        return null;
+    }
+
+    // 获取未经过的节点
+    @Override
+    public List<String> getToBePass(String instanceId) {
+        List<String> list = new ArrayList<>();
+        List<Task> currentList = taskService.createTaskQuery().processInstanceId(instanceId).list();
+        if (CollectionUtil.isNotEmpty(currentList)) {
+            List<String> collect = currentList.stream().map(Task::getTaskDefinitionKey).collect(Collectors.toList());
+            // 根据当前的节点 递归后续的所有节点
+            for (Task task : currentList) {
+                FlowElement source = getFlowElement(task.getProcessDefinitionId(), task.getTaskDefinitionKey());
+                List<String> after = FlowableUtil.getAfter(source, null, null);
+                list.addAll(after);
+            }
+            list = list.stream().filter(e -> !collect.contains(e)).collect(Collectors.toList());
+        }
+        return list.stream().distinct().collect(Collectors.toList());
+    }
+
+    @Override
+    public List<String> getAfter(TaskAfterFo fo) {
+        String deploymentId = fo.getDeploymentId();
+        List<String> taskKeys = fo.getTaskKeys();
+        List<String> list = new ArrayList<>();
+        ProcessDefinition definition = repositoryService.createProcessDefinitionQuery().deploymentId(deploymentId).singleResult();
+        if (null == definition) {
+            throw new BizException(ResultCode.DEFINITION_NOT_EXIST);
+        }
+        for (String taskKey : taskKeys) {
+            FlowElement source = getFlowElement(definition.getId(), taskKey);
+            List<String> after = FlowableUtil.getAfter(source, null, null);
+            list.addAll(after);
+        }
+        return list.stream().distinct().collect(Collectors.toList());
+    }
+
+    // complete 异常 补偿
+    @Override
+    public List<TaskVo> compensate(CompensateFo fo) {
+        String instanceId = fo.getInstanceId();
+        ProcessInstance instance = runtimeService.createProcessInstanceQuery().processInstanceId(instanceId).singleResult();
+        if (null == instance) {
+            HistoricProcessInstance historicInstance = historyService.createHistoricProcessInstanceQuery().processInstanceId(instanceId).singleResult();
+            Map<String, Object> variables = new HashMap<>();
+            List<HistoricVariableInstance> list = historyService.createHistoricVariableInstanceQuery().processInstanceId(instanceId).list();
+            for (HistoricVariableInstance var : list) {
+                variables.put(var.getVariableName(), var.getValue());
+            }
+            String deploymentId = historicInstance.getDeploymentId();
+
+            InstanceStartFo startFo = new InstanceStartFo();
+            startFo.setDeploymentId(deploymentId);
+            startFo.setVariables(variables);
+            InstanceVo instanceVo = instanceService.startById(startFo);
+            instanceId = instanceVo.getInstanceId();
+        }
+        List<String> sourceList = fo.getSource().stream().sorted().collect(Collectors.toList());
+
+        List<TaskVo> taskVoList = this.getTask(instanceId);
+        List<String> currentList = taskVoList.stream().map(TaskVo::getTaskKey).sorted().collect(Collectors.toList());
+
+//        if (ObjectUtil.equals(sourceList, currentList)) {
+//            return null == instance ? taskVoList : new ArrayList<>();
+//        }
+
+        // 获取需要跳转的节点集合、目标节点集合
+        List<String> createList = sourceList.stream().filter(e -> !currentList.contains(e)).collect(Collectors.toList());
+        List<String> deleteList = currentList.stream().filter(e -> !sourceList.contains(e)).collect(Collectors.toList());
+
+        JumpFo jumpFo = new JumpFo();
+        jumpFo.setInstanceId(instanceId);
+        jumpFo.setSource(deleteList);
+        jumpFo.setTarget(createList);
+        this.jump(jumpFo);
+
+        List<TaskVo> vos = this.getTask(instanceId);
+        if (null != instance) {
+            vos.forEach(e -> e.setInstanceId(null));
+        }
+        return vos;
+    }
+
+    @Override
+    public List<HistoricNodeVo> getHistoric(String instanceId) {
+        List<HistoricNodeVo> vos;
+
+        Set<String> set = new HashSet<>();
+        set.add("userTask");
+        set.add("startEvent");
+
+        vos = this.getHistoricVos(instanceId, set);
+
+        return vos.stream().sorted(Comparator.comparing(HistoricNodeVo::getStartTime)).collect(Collectors.toList());
+    }
+
+    // 获取历史结束节点
+    @Override
+    public List<String> getHistoricEnd(String instanceId) {
+        List<String> list = new ArrayList<>();
+        List<HistoricNodeVo> vos = this.getHistoricVos(instanceId, null);
+        if (CollectionUtil.isNotEmpty(vos)) {
+            list = vos.stream().map(HistoricNodeVo::getCode).distinct().collect(Collectors.toList());
+        }
+        return list;
+    }
+
+    public List<HistoricNodeVo> getHistoricVos(String instanceId, Set<String> set) {
+        List<HistoricNodeVo> vos = new ArrayList<>();
+        if (CollectionUtil.isEmpty(set)) {
+            set = new HashSet<>();
+            set.add("endEvent");
+        }
+        List<HistoricActivityInstance> list = historyService.createHistoricActivityInstanceQuery()
+                .activityTypes(set)
+                .processInstanceId(instanceId).list();
+        if (CollectionUtil.isNotEmpty(list)) {
+            for (HistoricActivityInstance act : list) {
+                HistoricNodeVo vo = new HistoricNodeVo();
+                vo.setCode(act.getActivityId());
+                vo.setTaskId(act.getTaskId());
+                vo.setStartTime(act.getStartTime().getTime());
+                vos.add(vo);
+            }
+        }
+        return vos;
+    }
+
+    @Override
+    public NodeElementVo getElementInfo(InfoModel model) {
+        String deploymentId = model.getDeploymentId();
+        ProcessDefinition definition = repositoryService.createProcessDefinitionQuery().deploymentId(deploymentId).singleResult();
+        if (null == definition) {
+            throw new BizException(ResultCode.DEFINITION_NOT_EXIST);
+        }
+        String definitionId = definition.getId();
+        String key = model.getKey();
+        NodeElementVo vo = new NodeElementVo();
+        FlowElement source = getFlowElement(definitionId, key);
+        if (null != source) {
+            vo.setId(source.getId());
+            List<SequenceFlow> outgoingFlows = FlowableUtil.getElementOutgoingFlows(source);
+            vo.setOutgoingList(outgoingFlows.stream().map(SequenceFlow::getId).collect(Collectors.toList()));
+            List<SequenceFlow> incomingFlows = FlowableUtil.getElementIncomingFlows(source);
+            vo.setIncomingList(incomingFlows.stream().map(SequenceFlow::getId).collect(Collectors.toList()));
+        }
+        return vo;
+    }
+}

+ 370 - 0
jnpf-workflow-flowable/src/main/java/jnpf/workflow/flowable/util/FlowableUtil.java

@@ -0,0 +1,370 @@
+package jnpf.workflow.flowable.util;
+
+import cn.hutool.core.collection.CollectionUtil;
+import jnpf.workflow.common.model.vo.FlowVo;
+import org.flowable.bpmn.model.*;
+
+import java.util.*;
+
+/**
+ * flowable工具类
+ *
+ * @author JNPF Flowable@YinMai Info. Co., Ltd
+ * @version 1.0.0
+ * @since 2024/4/8 11:06
+ */
+public class FlowableUtil {
+    /**
+     * 获取全部节点元素
+     *
+     * @param flowElements {@link Collection<FlowElement>}
+     * @param allElements  {@link Collection<FlowElement>}
+     * @return {@link Collection<FlowElement>}
+     * @since 2024/4/8 11:07
+     **/
+    public static Collection<FlowElement> getAllElements(Collection<FlowElement> flowElements, Collection<FlowElement> allElements) {
+        allElements = allElements == null ? new ArrayList<>() : allElements;
+        for (FlowElement flowElement : flowElements) {
+            allElements.add(flowElement);
+            if (flowElement instanceof SubProcess) {
+                // 获取子流程元素
+                allElements = getAllElements(((SubProcess) flowElement).getFlowElements(), allElements);
+            }
+        }
+        return allElements;
+    }
+
+    /**
+     * 获取节点的入口连线
+     *
+     * @param element {@link FlowElement}
+     * @return {@link List<SequenceFlow>}
+     * @since 2024/4/8 11:10
+     **/
+    public static List<SequenceFlow> getElementIncomingFlows(FlowElement element) {
+        List<SequenceFlow> flows = null;
+        if (element instanceof FlowNode) {
+            flows = ((FlowNode) element).getIncomingFlows();
+        }
+        return flows;
+    }
+
+    /**
+     * 获取节点的出口连线
+     *
+     * @param element {@link FlowElement}
+     * @return {@link List<SequenceFlow>}
+     * @since 2024/4/8 11:10
+     **/
+    public static List<SequenceFlow> getElementOutgoingFlows(FlowElement element) {
+        List<SequenceFlow> flows = null;
+        if (element instanceof FlowNode) {
+            flows = ((FlowNode) element).getOutgoingFlows();
+        }
+        return flows;
+    }
+
+    /**
+     * 获取可回退的节点(仅用户任务)
+     *
+     * @param source    {@link FlowElement}
+     * @param passFlows {@link Set<String>}
+     * @param passActs  {@link List<String>}
+     * @return {@link List<String>}
+     * @since 2024/4/8 15:27
+     **/
+    public static List<String> getPassActs(FlowElement source, Set<String> passFlows, List<String> passActs) {
+        passFlows = passFlows == null ? new HashSet<>() : passFlows;
+        passActs = passActs == null ? new ArrayList<>() : passActs;
+
+        List<SequenceFlow> sequenceFlows = getElementIncomingFlows(source);
+        if (null != sequenceFlows && !sequenceFlows.isEmpty()) {
+            for (SequenceFlow sequenceFlow : sequenceFlows) {
+                // 连线重复
+                if (passFlows.contains(sequenceFlow.getId())) {
+                    continue;
+                }
+                // 添加经过的连线
+                passFlows.add(sequenceFlow.getId());
+                // 添加经过的用户任务
+                if (sequenceFlow.getSourceFlowElement() instanceof UserTask) {
+                    passActs.add(sequenceFlow.getSourceFlowElement().getId());
+                }
+                if (sequenceFlow.getSourceFlowElement() instanceof StartEvent) {
+                    passActs.add(sequenceFlow.getSourceFlowElement().getId());
+                    continue;
+                }
+                // 迭代
+                getPassActs(sequenceFlow.getSourceFlowElement(), passFlows, passActs);
+            }
+        }
+        return passActs;
+    }
+
+    /**
+     * 获取需要撤回的节点
+     *
+     * @param source         {@link FlowElement}
+     * @param runTaskKeyList {@link List<String>}
+     * @param passFlows      {@link Set<String>}
+     * @param userTasks      {@link List<UserTask>}
+     * @return {@link List<UserTask>}
+     * @since 2024/4/8 15:42
+     **/
+    public static List<UserTask> getChildUserTasks(FlowElement source, List<String> runTaskKeyList, Set<String> passFlows, List<UserTask> userTasks) {
+        passFlows = passFlows == null ? new HashSet<>() : passFlows;
+        userTasks = userTasks == null ? new ArrayList<>() : userTasks;
+        List<SequenceFlow> sequenceFlows = getElementOutgoingFlows(source);
+        if (null != sequenceFlows && !sequenceFlows.isEmpty()) {
+            for (SequenceFlow sequenceFlow : sequenceFlows) {
+                // 连线重复
+                if (passFlows.contains(sequenceFlow.getId())) {
+                    continue;
+                }
+                // 添加经过的连线
+                passFlows.add(sequenceFlow.getId());
+                // 用户任务
+                if (sequenceFlow.getTargetFlowElement() instanceof UserTask
+                        && runTaskKeyList.contains(sequenceFlow.getTargetFlowElement().getId())) {
+                    userTasks.add((UserTask) sequenceFlow.getTargetFlowElement());
+                    continue;
+                }
+                // 子流程,从第一个节点开始获取
+                if (sequenceFlow.getTargetFlowElement() instanceof SubProcess) {
+                    FlowElement flowElement = (FlowElement) ((SubProcess) sequenceFlow.getTargetFlowElement()).getFlowElements().toArray()[0];
+                    List<UserTask> tasks = getChildUserTasks(flowElement, runTaskKeyList, passFlows, null);
+                    // 找到用户任务,不继续向下找
+                    if (!tasks.isEmpty()) {
+                        userTasks.addAll(tasks);
+                        continue;
+                    }
+                }
+                // 迭代
+                getChildUserTasks(sequenceFlow.getTargetFlowElement(), runTaskKeyList, passFlows, userTasks);
+            }
+        }
+        return userTasks;
+    }
+
+    /**
+     * 获取上一级节点
+     *
+     * @param source     {@link FlowElement}
+     * @param passFlows  {@link Set<String>}
+     * @param parentActs {@link List<String>}
+     * @return {@link List<Activity>}
+     * @since 2024/4/8 15:53
+     **/
+    public static List<String> getParentActs(FlowElement source, Set<String> passFlows, List<String> parentActs) {
+        passFlows = passFlows == null ? new HashSet<>() : passFlows;
+        parentActs = parentActs == null ? new ArrayList<>() : parentActs;
+
+        List<SequenceFlow> sequenceFlows = getElementIncomingFlows(source);
+        if (null != sequenceFlows && !sequenceFlows.isEmpty()) {
+            for (SequenceFlow sequenceFlow : sequenceFlows) {
+                // 连线重复
+                if (passFlows.contains(sequenceFlow.getId())) {
+                    continue;
+                }
+                // 添加经过的连线
+                passFlows.add(sequenceFlow.getId());
+                // 添加用户任务、子流程
+                if (sequenceFlow.getSourceFlowElement() instanceof UserTask) {
+                    parentActs.add(sequenceFlow.getSourceFlowElement().getId());
+                    continue;
+                }
+                if (sequenceFlow.getSourceFlowElement() instanceof StartEvent) {
+                    parentActs.add(sequenceFlow.getSourceFlowElement().getId());
+                    continue;
+                }
+                // 迭代
+                getParentActs(sequenceFlow.getSourceFlowElement(), passFlows, parentActs);
+            }
+        }
+        return parentActs;
+    }
+
+    /**
+     * 获取下一级的用户任务
+     *
+     * @param source          {@link FlowElement}
+     * @param hasSequenceFlow {@link Set<String>}
+     * @param userTaskList    {@link List<UserTask>}
+     * @return {@link List<UserTask>}
+     * @since 2024/4/8 16:34
+     **/
+    public static List<UserTask> getNextUserTasks(FlowElement source, Set<String> hasSequenceFlow, List<UserTask> userTaskList) {
+        hasSequenceFlow = Optional.ofNullable(hasSequenceFlow).orElse(new HashSet<>());
+        userTaskList = Optional.ofNullable(userTaskList).orElse(new ArrayList<>());
+        // 获取出口连线
+        List<SequenceFlow> sequenceFlows = getElementOutgoingFlows(source);
+        if (null != sequenceFlows) {
+            for (SequenceFlow sequenceFlow : sequenceFlows) {
+                // 如果发现连线重复,说明循环了,跳过这个循环
+                if (hasSequenceFlow.contains(sequenceFlow.getId())) {
+                    continue;
+                }
+                // 添加已经走过的连线
+                hasSequenceFlow.add(sequenceFlow.getId());
+                FlowElement targetFlowElement = sequenceFlow.getTargetFlowElement();
+                if (targetFlowElement instanceof UserTask) {
+                    // 若节点为用户任务,加入到结果列表中
+                    userTaskList.add((UserTask) targetFlowElement);
+                } else {
+                    // 若节点非用户任务,继续递归查找下一个节点
+                    getNextUserTasks(targetFlowElement, hasSequenceFlow, userTaskList);
+                }
+            }
+        }
+        return userTaskList;
+    }
+
+    /**
+     * 获取元素之后的所有节点
+     *
+     * @param source          {@link FlowElement}
+     * @param hasSequenceFlow {@link Set<String>}
+     * @param list            {@link List<String>}
+     * @return {@link List<String>}
+     * @since 2024/4/29 16:00
+     **/
+    public static List<String> getAfter(FlowElement source, Set<String> hasSequenceFlow, List<String> list) {
+        hasSequenceFlow = Optional.ofNullable(hasSequenceFlow).orElse(new HashSet<>());
+        list = Optional.ofNullable(list).orElse(new ArrayList<>());
+        // 获取出口连线
+        List<SequenceFlow> sequenceFlows = getElementOutgoingFlows(source);
+        if (null != sequenceFlows) {
+            for (SequenceFlow sequenceFlow : sequenceFlows) {
+                // 如果发现连线重复,说明循环了,跳过这个循环
+                if (hasSequenceFlow.contains(sequenceFlow.getId())) {
+                    continue;
+                }
+                // 添加已经走过的连线
+                hasSequenceFlow.add(sequenceFlow.getId());
+                FlowElement targetFlowElement = sequenceFlow.getTargetFlowElement();
+                if (targetFlowElement instanceof UserTask) {
+                    // 若节点为用户任务,加入到结果列表中
+                    list.add(targetFlowElement.getId());
+                } else if (targetFlowElement instanceof EndEvent) {
+                    list.add(targetFlowElement.getId());
+                    continue;
+                }
+                // 继续递归查找下一个节点
+                getAfter(targetFlowElement, hasSequenceFlow, list);
+            }
+        }
+        return list;
+    }
+
+    /**
+     * 获取节点的出口连线,若出线的出口是网关,则一并获取网关的出线
+     *
+     * @param source {@link FlowElement}
+     * @return {@link List<SequenceFlow>}
+     * @since 2024/4/9 9:58
+     **/
+    public static List<SequenceFlow> getOutFlowsWithGateway(FlowElement source, List<SequenceFlow> flows) {
+        flows = flows == null ? new ArrayList<>() : flows;
+        // 获取出口连线
+        List<SequenceFlow> sequenceFlows = getElementOutgoingFlows(source);
+        if (null != sequenceFlows) {
+            for (SequenceFlow sequenceFlow : sequenceFlows) {
+                flows.add(sequenceFlow);
+                FlowElement targetFlowElement = sequenceFlow.getTargetFlowElement();
+                if (targetFlowElement instanceof UserTask) {
+                    continue;
+                }
+                if (targetFlowElement instanceof Gateway) {
+                    Gateway gateway = (Gateway) targetFlowElement;
+                    List<SequenceFlow> outgoingFlows = gateway.getOutgoingFlows();
+                    flows.addAll(outgoingFlows);
+                    getOutFlowsWithGateway(gateway, flows);
+                }
+            }
+        }
+        return flows;
+    }
+
+    /**
+     * 获取出线,上下级关系
+     *
+     * @param source 源
+     * @param flows  结果集合
+     */
+    public static List<FlowVo> getOutFlows(FlowElement source, List<FlowVo> flows) {
+        flows = flows == null ? new ArrayList<>() : flows;
+        // 获取出口连线
+        List<SequenceFlow> sequenceFlows = getElementOutgoingFlows(source);
+        if (null != sequenceFlows) {
+            for (SequenceFlow sequenceFlow : sequenceFlows) {
+                FlowVo vo = new FlowVo();
+                vo.setKey(sequenceFlow.getId());
+
+                FlowElement targetFlowElement = sequenceFlow.getTargetFlowElement();
+                if (targetFlowElement instanceof Gateway) {
+                    Gateway gateway = (Gateway) targetFlowElement;
+                    List<FlowVo> list = getOutFlows(gateway, null);
+                    vo.setChildren(list);
+                }
+                flows.add(vo);
+            }
+        }
+        return flows;
+    }
+
+    /**
+     * 获取之前的节点
+     *
+     * @param source    {@link FlowElement}
+     * @param passFlows {@link Set<String>}
+     * @param keys      {@link List<String>}
+     * @return {@link List<String>}
+     * @since 2024/4/9 11:51
+     **/
+    public static List<String> getBefore(FlowElement source, Set<String> passFlows, List<String> keys) {
+        passFlows = passFlows == null ? new HashSet<>() : passFlows;
+        keys = keys == null ? new ArrayList<>() : keys;
+        List<SequenceFlow> sequenceFlows = getElementIncomingFlows(source);
+        if (null != sequenceFlows && !sequenceFlows.isEmpty()) {
+            for (SequenceFlow sequenceFlow : sequenceFlows) {
+                // 连线重复
+                if (passFlows.contains(sequenceFlow.getId())) {
+                    continue;
+                }
+                // 添加经过的连线
+                passFlows.add(sequenceFlow.getId());
+                // 添加节点Key
+                keys.add(sequenceFlow.getSourceFlowElement().getId());
+                if (sequenceFlow.getSourceFlowElement() instanceof StartEvent) {
+                    continue;
+                }
+                // 迭代
+                getBefore(sequenceFlow.getSourceFlowElement(), passFlows, keys);
+            }
+        }
+        return keys;
+    }
+
+    /**
+     * 获取线之后的任务节点
+     *
+     * @return
+     */
+    public static String getTaskKeyAfterFlow(FlowElement source) {
+        if (source instanceof SequenceFlow) {
+            SequenceFlow sequenceFlow = (SequenceFlow) source;
+            FlowElement target = sequenceFlow.getTargetFlowElement();
+            if (target instanceof Gateway) {
+                List<SequenceFlow> outgoingFlows = ((Gateway) target).getOutgoingFlows();
+                if (CollectionUtil.isNotEmpty(outgoingFlows)) {
+                    SequenceFlow flow = outgoingFlows.get(0);
+                    return getTaskKeyAfterFlow(flow);
+                }
+            }
+            if (target instanceof UserTask) {
+                return target.getId();
+            }
+        }
+        return null;
+    }
+}

+ 1992 - 0
jnpf-workflow-flowable/src/main/java/liquibase/snapshot/JdbcDatabaseSnapshot.java

@@ -0,0 +1,1992 @@
+package liquibase.snapshot;
+
+import jnpf.workflow.common.util.FlowUtil;
+import liquibase.CatalogAndSchema;
+import liquibase.Scope;
+import liquibase.database.AbstractJdbcDatabase;
+import liquibase.database.Database;
+import liquibase.database.DatabaseConnection;
+import liquibase.database.LiquibaseTableNamesFactory;
+import liquibase.database.core.*;
+import liquibase.database.jvm.JdbcConnection;
+import liquibase.exception.DatabaseException;
+import liquibase.executor.jvm.ColumnMapRowMapper;
+import liquibase.executor.jvm.RowMapperNotNullConstraintsResultSetExtractor;
+import liquibase.structure.DatabaseObject;
+import liquibase.structure.core.Catalog;
+import liquibase.structure.core.Schema;
+import liquibase.structure.core.Table;
+import liquibase.structure.core.View;
+import liquibase.util.JdbcUtil;
+import liquibase.util.StringUtil;
+
+import java.sql.*;
+import java.util.*;
+
+public class JdbcDatabaseSnapshot extends DatabaseSnapshot {
+
+    private boolean warnedAboutDbaRecycleBin;
+    private static final boolean ignoreWarnAboutDbaRecycleBin = Boolean.getBoolean("liquibase.ignoreRecycleBinWarning");
+
+    private CachingDatabaseMetaData cachingDatabaseMetaData;
+
+    private Map<String, CachedRow> cachedExpressionMap = null;
+
+    private Set<String> userDefinedTypes;
+
+    public JdbcDatabaseSnapshot(DatabaseObject[] examples, Database database, SnapshotControl snapshotControl) throws DatabaseException, InvalidExampleException {
+        super(examples, database, snapshotControl);
+    }
+
+    public JdbcDatabaseSnapshot(DatabaseObject[] examples, Database database) throws DatabaseException, InvalidExampleException {
+        super(examples, database);
+    }
+
+    public CachingDatabaseMetaData getMetaDataFromCache() throws SQLException {
+        if (cachingDatabaseMetaData == null) {
+            DatabaseMetaData databaseMetaData = null;
+            if (getDatabase().getConnection() != null) {
+                databaseMetaData = ((JdbcConnection) getDatabase().getConnection()).getUnderlyingConnection().getMetaData();
+            }
+
+            cachingDatabaseMetaData = new CachingDatabaseMetaData(this.getDatabase(), databaseMetaData);
+        }
+        return cachingDatabaseMetaData;
+    }
+
+    public class CachingDatabaseMetaData {
+        private static final String SQL_FILTER_MATCH_ALL = "%";
+        private final DatabaseMetaData databaseMetaData;
+        private final Database database;
+
+        public CachingDatabaseMetaData(Database database, DatabaseMetaData metaData) {
+            this.databaseMetaData = metaData;
+            this.database = database;
+        }
+
+        public DatabaseMetaData getDatabaseMetaData() {
+            return databaseMetaData;
+        }
+
+        public List<CachedRow> getForeignKeys(final String catalogName, final String schemaName, final String tableName,
+                                              final String fkName) throws DatabaseException {
+            ForeignKeysResultSetCache foreignKeysResultSetCache = new ForeignKeysResultSetCache(database, catalogName, schemaName, tableName, fkName);
+            ResultSetCache importedKeys = getResultSetCache("getImportedKeys");
+            importedKeys.setBulkTracking(!(database instanceof MSSQLDatabase));
+
+            return importedKeys.get(foreignKeysResultSetCache);
+        }
+
+        public List<CachedRow> getIndexInfo(final String catalogName, final String schemaName, final String tableName, final String indexName) throws DatabaseException, SQLException {
+
+            return getResultSetCache("getIndexInfo").get(new ResultSetCache.UnionResultSetExtractor(database) {
+
+                public boolean isBulkFetchMode;
+
+                @Override
+                public ResultSetCache.RowData rowKeyParameters(CachedRow row) {
+                    return new ResultSetCache.RowData(row.getString("TABLE_CAT"), row.getString("TABLE_SCHEM"), database, row.getString("TABLE_NAME"), row.getString("INDEX_NAME"));
+                }
+
+                @Override
+                public ResultSetCache.RowData wantedKeyParameters() {
+                    return new ResultSetCache.RowData(catalogName, schemaName, database, tableName, indexName);
+                }
+
+                @Override
+                public boolean bulkContainsSchema(String schemaKey) {
+                    return getAllCatalogsStringScratchData() != null && database instanceof OracleDatabase;
+                }
+
+                @Override
+                public String getSchemaKey(CachedRow row) {
+                    return row.getString("TABLE_SCHEM");
+                }
+
+                @Override
+                public List<CachedRow> fastFetch() throws SQLException, DatabaseException {
+                    List<CachedRow> returnList = new ArrayList<>();
+
+                    CatalogAndSchema catalogAndSchema = new CatalogAndSchema(catalogName, schemaName).customize(database);
+                    if (database instanceof OracleDatabase) {
+                        warnAboutDbaRecycleBin();
+
+                        //oracle getIndexInfo is buggy and slow.  See Issue 1824548 and http://forums.oracle.com/forums/thread.jspa?messageID=578383&#578383
+                        String sql =
+                                "SELECT " +
+                                        "c.INDEX_NAME, " +
+                                        "3 AS TYPE, " +
+                                        "c.TABLE_OWNER AS TABLE_SCHEM, " +
+                                        "c.TABLE_NAME, " +
+                                        "c.COLUMN_NAME, " +
+                                        "c.COLUMN_POSITION AS ORDINAL_POSITION, " +
+                                        "NULL AS FILTER_CONDITION, " +
+                                        "c.INDEX_OWNER, " +
+                                        "CASE I.UNIQUENESS WHEN 'UNIQUE' THEN 0 ELSE 1 END AS NON_UNIQUE, " +
+                                        "CASE c.DESCEND WHEN 'Y' THEN 'D' WHEN 'DESC' THEN 'D' WHEN 'N' THEN 'A' WHEN 'ASC' THEN 'A' END AS ASC_OR_DESC, " +
+                                        "CASE WHEN tablespace_name = (SELECT default_tablespace FROM user_users) " +
+                                        "THEN NULL ELSE tablespace_name END AS tablespace_name  " +
+                                        "FROM ALL_IND_COLUMNS c " +
+                                        "JOIN ALL_INDEXES i ON i.owner=c.index_owner AND i.index_name = c.index_name and i.table_owner = c.table_owner " +
+                                        "LEFT OUTER JOIN " + (((OracleDatabase) database).canAccessDbaRecycleBin() ? "dba_recyclebin" : "user_recyclebin") + " d ON d.object_name=c.table_name ";
+                        if (!isBulkFetchMode || getAllCatalogsStringScratchData() == null) {
+                            sql += "WHERE c.TABLE_OWNER = '" + database.correctObjectName(catalogAndSchema.getCatalogName(), Schema.class) + "' ";
+                        } else {
+                            sql += "WHERE c.TABLE_OWNER IN ('" + database.correctObjectName(catalogAndSchema.getCatalogName(), Schema.class) + "', " + getAllCatalogsStringScratchData() + ")";
+                        }
+                        sql += "AND i.OWNER = c.TABLE_OWNER " +
+                                "AND d.object_name IS NULL ";
+
+
+                        if (!isBulkFetchMode && (tableName != null)) {
+                            sql += " AND c.TABLE_NAME='" + tableName + "'";
+                        }
+
+                        if (!isBulkFetchMode && (indexName != null)) {
+                            sql += " AND c.INDEX_NAME='" + indexName + "'";
+                        }
+
+                        sql += " ORDER BY c.INDEX_NAME, ORDINAL_POSITION";
+
+                        returnList.addAll(setIndexExpressions(executeAndExtract(sql, database)));
+                    } else if (database instanceof MSSQLDatabase) {
+                        String tableCat = "original_db_name()";
+
+                        if (9 <= database.getDatabaseMajorVersion()) {
+                            tableCat = "db_name()";
+                        }
+                        //fetch additional index info
+                        String sql = "SELECT " +
+                                tableCat + " as TABLE_CAT, " +
+                                "object_schema_name(i.object_id) as TABLE_SCHEM, " +
+                                "object_name(i.object_id) as TABLE_NAME, " +
+                                "CASE is_unique WHEN 1 then 0 else 1 end as NON_UNIQUE, " +
+                                "object_name(i.object_id) as INDEX_QUALIFIER, " +
+                                "i.name as INDEX_NAME, " +
+                                "case i.type when 1 then 1 ELSE 3 end as TYPE, " +
+                                "key_ordinal as ORDINAL_POSITION, " +
+                                "COL_NAME(c.object_id,c.column_id) AS COLUMN_NAME, " +
+                                "case is_descending_key when 0 then 'A' else 'D' end as ASC_OR_DESC, " +
+                                "null as CARDINALITY, " +
+                                "null as PAGES, " +
+                                "i.filter_definition as FILTER_CONDITION, " +
+                                "o.type AS INTERNAL_OBJECT_TYPE, " +
+                                "i.*, " +
+                                "c.*, " +
+                                "s.* " +
+                                "FROM sys.indexes i " +
+                                "join sys.index_columns c on i.object_id=c.object_id and i.index_id=c.index_id " +
+                                "join sys.stats s on i.object_id=s.object_id and i.name=s.name " +
+                                "join sys.objects o on i.object_id=o.object_id " +
+                                "WHERE object_schema_name(i.object_id)='" + database.correctObjectName(catalogAndSchema.getSchemaName(), Schema.class) + "'";
+
+                        if (!isBulkFetchMode && (tableName != null)) {
+                            sql += " AND object_name(i.object_id)='" + database.escapeStringForDatabase(tableName) + "'";
+                        }
+
+                        if (!isBulkFetchMode && (indexName != null)) {
+                            sql += " AND i.name='" + database.escapeStringForDatabase(indexName) + "'";
+                        }
+
+                        sql += "ORDER BY i.object_id, i.index_id, c.key_ordinal";
+
+                        returnList.addAll(executeAndExtract(sql, database));
+
+                    } else if (database instanceof Db2zDatabase) {
+                        List<String> parameters = new ArrayList<>(3);
+                        String sql = "SELECT i.CREATOR AS TABLE_SCHEM, " +
+                                "i.TBNAME AS TABLE_NAME, " +
+                                "i.NAME AS INDEX_NAME, " +
+                                "3 AS TYPE, " +
+                                "k.COLNAME AS COLUMN_NAME, " +
+                                "k.COLSEQ AS ORDINAL_POSITION, " +
+                                "CASE UNIQUERULE WHEN 'D' then 1 else 0 end as NON_UNIQUE, " +
+                                "k.ORDERING AS ORDER, " +
+                                "i.CREATOR AS INDEX_QUALIFIER " +
+                                "FROM SYSIBM.SYSKEYS k " +
+                                "JOIN SYSIBM.SYSINDEXES i " +
+                                "ON k.IXNAME = i.NAME " +
+                                "AND k.IXCREATOR = i.CREATOR " +
+                                "WHERE  i.CREATOR = ?";
+                        parameters.add(database.correctObjectName(catalogAndSchema.getSchemaName(), Schema.class));
+                        if (!isBulkFetchMode && tableName != null) {
+                            sql += " AND i.TBNAME = ?";
+                            parameters.add(database.escapeStringForDatabase(tableName));
+                        }
+
+                        if (!isBulkFetchMode && indexName != null) {
+                            sql += " AND i.NAME = ?";
+                            parameters.add(database.escapeStringForDatabase(indexName));
+                        }
+
+                        sql += "ORDER BY i.NAME, k.COLSEQ";
+
+                        returnList.addAll(executeAndExtract(database, sql, parameters.toArray()));
+                    } else if (!(database instanceof MariaDBDatabase) && database instanceof MySQLDatabase) {
+
+                        //mysql 8.0.13 introduced support for indexes on `lower(first_name)` which comes back in an "expression" column
+                        String filterConditionValue = "NULL";
+                        if (database.getDatabaseMajorVersion() > 8 || (database.getDatabaseMajorVersion() == 8 && ((MySQLDatabase) database).getDatabasePatchVersion() >= 13)) {
+                            filterConditionValue = "EXPRESSION";
+                        }
+
+                        StringBuilder sql = new StringBuilder("SELECT TABLE_CATALOG AS TABLE_CAT, TABLE_SCHEMA AS TABLE_SCHEM,");
+                        sql.append(" TABLE_NAME, NON_UNIQUE, NULL AS INDEX_QUALIFIER, INDEX_NAME,");
+                        sql.append(DatabaseMetaData.tableIndexOther);
+                        sql.append(" AS TYPE, SEQ_IN_INDEX AS ORDINAL_POSITION, COLUMN_NAME,");
+                        sql.append("COLLATION AS ASC_OR_DESC, CARDINALITY, 0 AS PAGES, " + filterConditionValue + " AS FILTER_CONDITION FROM INFORMATION_SCHEMA.STATISTICS WHERE");
+                        sql.append(" TABLE_SCHEMA = '").append(database.correctObjectName(catalogAndSchema.getCatalogName(), Catalog.class)).append("'");
+
+                        if (!isBulkFetchMode && tableName != null) {
+                            sql.append(" AND TABLE_NAME = '").append(database.escapeStringForDatabase(tableName)).append("'");
+                        }
+
+                        if (!isBulkFetchMode && indexName != null) {
+                            sql.append(" AND INDEX_NAME='").append(database.escapeStringForDatabase(indexName)).append("'");
+                        }
+
+                        sql.append("ORDER BY NON_UNIQUE, INDEX_NAME, SEQ_IN_INDEX");
+
+                        returnList.addAll(executeAndExtract(sql.toString(), database));
+                    } else {
+                        /*
+                         * If we do not know in which table to look for the index, things get a little bit ugly.
+                         * First, we get a collection of all tables within the catalogAndSchema, then iterate through
+                         * them until we (hopefully) find the index we are looking for.
+                         */
+                        List<String> tables = new ArrayList<>();
+                        if (tableName == null) {
+                            // Build a list of all candidate tables in the catalog/schema that might contain the index
+                            for (CachedRow row : getTables(((AbstractJdbcDatabase) database).getJdbcCatalogName(catalogAndSchema), ((AbstractJdbcDatabase) database).getJdbcSchemaName(catalogAndSchema), null)) {
+                                tables.add(row.getString("TABLE_NAME"));
+                            }
+                        } else {
+                            tables.add(tableName);
+                        }
+
+                        // Iterate through all the candidate tables and try to find the index.
+                        for (String tableName1 : tables) {
+                            ResultSet rs = databaseMetaData.getIndexInfo(
+                                    ((AbstractJdbcDatabase) database).getJdbcCatalogName(catalogAndSchema),
+                                    ((AbstractJdbcDatabase) database).getJdbcSchemaName(catalogAndSchema),
+                                    tableName1,
+                                    false,
+                                    true);
+                            List<CachedRow> rows = extract(rs, (database instanceof InformixDatabase));
+                            returnList.addAll(rows);
+                        }
+                    }
+
+                    return returnList;
+                }
+
+                private List<CachedRow> setIndexExpressions(List<CachedRow> c) throws DatabaseException, SQLException {
+                    Map<String, CachedRow> expressionMap = getCachedExpressionMap();
+                    c.forEach(row -> {
+                        row.set("FILTER_CONDITION", null);
+                        String key = row.getString("INDEX_OWNER") + "::" + row.getString("INDEX_NAME") + "::" +
+                                row.getInt("ORDINAL_POSITION");
+                        CachedRow fromMap = expressionMap.get(key);
+                        if (fromMap != null) {
+                            row.set("FILTER_CONDITION", fromMap.get("COLUMN_EXPRESSION"));
+                        }
+                    });
+                    return c;
+                }
+
+                private Map<String, CachedRow> getCachedExpressionMap() throws DatabaseException, SQLException {
+                    if (cachedExpressionMap != null) {
+                        return cachedExpressionMap;
+                    }
+                    String expSql = "SELECT e.column_expression, e.index_owner, e.index_name, e.column_position FROM all_ind_expressions e";
+                    List<CachedRow> ec = executeAndExtract(expSql, database);
+                    cachedExpressionMap = new HashMap<>();
+                    ec.forEach(row -> {
+                        String key = row.getString("INDEX_OWNER") + "::" + row.getString("INDEX_NAME") + "::" +
+                                row.getInt("COLUMN_POSITION");
+                        cachedExpressionMap.put(key, row);
+                    });
+                    return cachedExpressionMap;
+                }
+
+                @Override
+                public List<CachedRow> bulkFetch() throws SQLException, DatabaseException {
+                    this.isBulkFetchMode = true;
+                    return fastFetch();
+                }
+
+                @Override
+                protected boolean shouldBulkSelect(String schemaKey, ResultSetCache resultSetCache) {
+                    if (database instanceof OracleDatabase || database instanceof MSSQLDatabase) {
+                        return JdbcDatabaseSnapshot.this.getAllCatalogsStringScratchData() != null || (tableName == null && indexName == null) || super.shouldBulkSelect(schemaKey, resultSetCache);
+                    }
+                    return false;
+                }
+            });
+        }
+
+
+        protected void warnAboutDbaRecycleBin() {
+            if (!ignoreWarnAboutDbaRecycleBin && !warnedAboutDbaRecycleBin && !(((OracleDatabase) database).canAccessDbaRecycleBin())) {
+                Scope.getCurrentScope().getLog(getClass()).warning(((OracleDatabase) database).getDbaRecycleBinWarning());
+                warnedAboutDbaRecycleBin = true;
+            }
+        }
+
+        /**
+         * Return the columns for the given catalog, schema, table, and column.
+         */
+        public List<CachedRow> getColumns(final String catalogName, final String schemaName, final String tableName, final String columnName) throws SQLException, DatabaseException {
+
+            if ((database instanceof MSSQLDatabase) && (userDefinedTypes == null)) {
+                userDefinedTypes = new HashSet<>();
+                DatabaseConnection databaseConnection = database.getConnection();
+                if (databaseConnection instanceof JdbcConnection) {
+                    Statement stmt = null;
+                    ResultSet resultSet = null;
+                    try {
+                        stmt = ((JdbcConnection) databaseConnection).getUnderlyingConnection().createStatement();
+                        resultSet = stmt.executeQuery("select name from " + (catalogName == null ? "" : "[" + catalogName + "].") + "sys.types where is_user_defined=1");
+                        while (resultSet.next()) {
+                            userDefinedTypes.add(resultSet.getString("name").toLowerCase());
+                        }
+                    } finally {
+                        JdbcUtil.close(resultSet, stmt);
+                    }
+                }
+            }
+            GetColumnResultSetCache getColumnResultSetCache = new GetColumnResultSetCache(database, catalogName,
+                    schemaName, tableName, columnName);
+            return getResultSetCache("getColumns").get(getColumnResultSetCache);
+        }
+
+        /**
+         * Return the NotNullConstraints for the given catalog, schema, table, and column.
+         */
+        public List<CachedRow> getNotNullConst(final String catalogName, final String schemaName,
+                                               final String tableName) throws DatabaseException {
+            if (!(database instanceof OracleDatabase)) {
+                return Collections.emptyList();
+            }
+            GetNotNullConstraintsResultSetCache getNotNullConstraintsResultSetCache = new GetNotNullConstraintsResultSetCache(database, catalogName,
+                    schemaName, tableName);
+            return getResultSetCache("getNotNullConst").get(getNotNullConstraintsResultSetCache);
+        }
+
+        private class GetColumnResultSetCache extends ResultSetCache.SingleResultSetExtractor {
+            final String catalogName;
+            final String schemaName;
+            final String tableName;
+            final String columnName;
+
+            private GetColumnResultSetCache(Database database, String catalogName, String schemaName, String tableName, String columnName) {
+                super(database);
+                this.catalogName = catalogName;
+                this.schemaName = schemaName;
+                this.tableName = tableName;
+                this.columnName = columnName;
+            }
+
+            @Override
+            public ResultSetCache.RowData rowKeyParameters(CachedRow row) {
+                return new ResultSetCache.RowData(row.getString("TABLE_CAT"), row.getString("TABLE_SCHEM"), database, row.getString("TABLE_NAME"), row.getString("COLUMN_NAME"));
+            }
+
+            @Override
+            public ResultSetCache.RowData wantedKeyParameters() {
+                return new ResultSetCache.RowData(catalogName, schemaName, database, tableName, columnName);
+            }
+
+            @Override
+            public boolean bulkContainsSchema(String schemaKey) {
+                String catalogs = getAllCatalogsStringScratchData();
+                return catalogs != null && schemaKey != null
+                        && catalogs.contains("'" + schemaKey.toUpperCase() + "'")
+                        && database instanceof OracleDatabase;
+            }
+
+            @Override
+            public String getSchemaKey(CachedRow row) {
+                return row.getString("TABLE_SCHEM");
+            }
+
+            @Override
+            protected boolean shouldBulkSelect(String schemaKey, ResultSetCache resultSetCache) {
+                LiquibaseTableNamesFactory liquibaseTableNamesFactory = Scope.getCurrentScope().getSingleton(LiquibaseTableNamesFactory.class);
+                List<String> liquibaseTableNames = liquibaseTableNamesFactory.getLiquibaseTableNames(database);
+                return liquibaseTableNames.stream().noneMatch(tableName::equalsIgnoreCase);
+            }
+
+            @Override
+            public List<CachedRow> fastFetchQuery() throws SQLException, DatabaseException {
+                if (database instanceof OracleDatabase) {
+                    return oracleQuery(false);
+                } else if (database instanceof MSSQLDatabase) {
+                    return mssqlQuery(false);
+                }
+                CatalogAndSchema catalogAndSchema = new CatalogAndSchema(catalogName, schemaName).customize(database);
+
+                try {
+                    List<CachedRow> returnList =
+                            extract(
+                                    databaseMetaData.getColumns(
+                                            ((AbstractJdbcDatabase) database).getJdbcCatalogName(catalogAndSchema),
+                                            escapeForLike(((AbstractJdbcDatabase) database).getJdbcSchemaName(catalogAndSchema), database),
+                                            escapeForLike(tableName, database),
+                                            SQL_FILTER_MATCH_ALL)
+                            );
+                    //
+                    // IF MARIADB OR SQL ANYWHERE
+                    // Query to get actual data types and then map each column to its CachedRow
+                    //
+                    determineActualDataTypes(returnList, tableName);
+                    return returnList;
+                } catch (SQLException e) {
+                    if (shouldReturnEmptyColumns(e)) { //view with table already dropped. Act like it has no columns.
+                        return new ArrayList<>();
+                    } else {
+                        throw e;
+                    }
+                }
+            }
+
+            @Override
+            public List<CachedRow> bulkFetchQuery() throws SQLException, DatabaseException {
+                if (database instanceof OracleDatabase) {
+                    return oracleQuery(true);
+                } else if (database instanceof MSSQLDatabase) {
+                    return mssqlQuery(true);
+                }
+
+                CatalogAndSchema catalogAndSchema = new CatalogAndSchema(catalogName, schemaName).customize(database);
+
+                try {
+                    List<CachedRow> returnList =
+                            extract(databaseMetaData.getColumns(((AbstractJdbcDatabase) database)
+                                            .getJdbcCatalogName(catalogAndSchema),
+                                    escapeForLike(((AbstractJdbcDatabase) database).getJdbcSchemaName(catalogAndSchema), database),
+                                    SQL_FILTER_MATCH_ALL, SQL_FILTER_MATCH_ALL));
+                    //
+                    // IF MARIADB OR SQL ANYWHERE
+                    // Query to get actual data types and then map each column to its CachedRow
+                    //
+                    determineActualDataTypes(returnList, null);
+                    return returnList;
+                } catch (SQLException e) {
+                    if (shouldReturnEmptyColumns(e)) {
+                        return new ArrayList<>();
+                    } else {
+                        throw e;
+                    }
+                }
+            }
+
+            //
+            // For MariaDB, query for the data type column so that we can correctly
+            // set the DATETIME(6) type if specified
+            //
+            // For SQL Anywhere, query for the scale column so we can correctly
+            // set the size unit
+            //
+            private void determineActualDataTypes(List<CachedRow> returnList, String tableName) throws SQLException {
+                //
+                // If not MariaDB / SQL Anywhere then just return
+                //
+                if (!(database instanceof MariaDBDatabase || database instanceof SybaseASADatabase)) {
+                    return;
+                }
+
+                if (database instanceof SybaseASADatabase) {
+                    //
+                    // Query for actual data type for column. The actual SYSTABCOL.scale column value is
+                    // not reported by the DatabaseMetadata.getColumns() query for CHAR-limited (in contrast
+                    // to BYTE-limited) columns, and it is needed to capture the kind if limitation.
+                    // The actual SYSTABCOL.column_type is not reported by the DatabaseMetadata.getColumns()
+                    // query as the IS_GENERATEDCOLUMN columns is missing in the result set, and it is needed to
+                    // capture the kind of column (regular or computed).
+                    //
+                    // See https://help.sap.com/docs/SAP_SQL_Anywhere/93079d4ba8e44920ae63ffb4def91f5b/3beaa3956c5f1014883cb0c3e3559cc9.html.
+                    //
+                    String selectStatement =
+                            "SELECT table_name, column_name, scale, column_type FROM SYSTABCOL KEY JOIN SYSTAB KEY JOIN SYSUSER " +
+                                    "WHERE user_name = ? AND ? IS NULL OR table_name = ?";
+                    Connection underlyingConnection = ((JdbcConnection) database.getConnection()).getUnderlyingConnection();
+                    try (PreparedStatement stmt = underlyingConnection.prepareStatement(selectStatement)) {
+                        stmt.setString(1, schemaName);
+                        stmt.setString(2, tableName);
+                        stmt.setString(3, tableName);
+                        try (ResultSet columnSelectRS = stmt.executeQuery()) {
+                            while (columnSelectRS.next()) {
+                                String selectedTableName = columnSelectRS.getString("table_name");
+                                String selectedColumnName = columnSelectRS.getString("column_name");
+                                int selectedScale = columnSelectRS.getInt("scale");
+                                String selectedColumnType = columnSelectRS.getString("column_type");
+                                for (CachedRow row : returnList) {
+                                    String rowTableName = row.getString("TABLE_NAME");
+                                    String rowColumnName = row.getString("COLUMN_NAME");
+                                    if (rowTableName.equalsIgnoreCase(selectedTableName) &&
+                                            rowColumnName.equalsIgnoreCase(selectedColumnName)) {
+                                        int rowDataType = row.getInt("DATA_TYPE");
+                                        if (rowDataType == Types.VARCHAR || rowDataType == Types.CHAR) {
+                                            row.set("scale", selectedScale);
+                                        }
+                                        row.set("IS_GENERATEDCOLUMN", "C".equals(selectedColumnType) ? "YES" : "NO");
+                                        break;
+                                    }
+                                }
+                            }
+                        }
+                    } catch (SQLException sqle) {
+                        throw new RuntimeException(sqle);
+                        //
+                        // Do not stop
+                        //
+                    }
+                    return;
+                }
+
+                //
+                // Query for actual data type for column. The actual DATA_TYPE column string is
+                // not returned by the DatabaseMetadata.getColumns() query, and it is needed
+                // to capture DATETIME(<precision>) data types.
+                //
+                StringBuilder selectStatement = new StringBuilder(
+                        "SELECT TABLE_SCHEMA, TABLE_NAME, COLUMN_NAME, DATA_TYPE FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_SCHEMA = ?");
+                if(tableName != null) {
+                    selectStatement.append(" AND TABLE_NAME = ?");
+                }
+                Connection underlyingConnection = ((JdbcConnection) database.getConnection()).getUnderlyingConnection();
+                PreparedStatement statement = underlyingConnection.prepareStatement(selectStatement.toString());
+                statement.setString(1, schemaName);
+                if (tableName != null) {
+                    statement.setString(2, tableName);
+                }
+                try {
+                    ResultSet columnSelectRS = statement.executeQuery(selectStatement.toString());
+                    //
+                    // Iterate the result set from the query and match the rows
+                    // to the rows that were returned by getColumns() in order
+                    // to assign the actual DATA_TYPE string to the appropriate row.
+                    //
+                    while (columnSelectRS.next()) {
+                        String selectedTableName = columnSelectRS.getString("TABLE_NAME");
+                        String selectedColumnName = columnSelectRS.getString("COLUMN_NAME");
+                        String actualDataType = columnSelectRS.getString("DATA_TYPE");
+                        for (CachedRow row : returnList) {
+                            String rowTableName = row.getString("TABLE_NAME");
+                            String rowColumnName = row.getString("COLUMN_NAME");
+                            String rowTypeName = row.getString("TYPE_NAME");
+                            int rowDataType = row.getInt("DATA_TYPE");
+                            if (rowTableName.equalsIgnoreCase(selectedTableName) &&
+                                    rowColumnName.equalsIgnoreCase(selectedColumnName) &&
+                                    rowTypeName.equalsIgnoreCase("datetime") &&
+                                    rowDataType == Types.OTHER &&
+                                    !rowTypeName.equalsIgnoreCase(actualDataType)) {
+                                row.set("TYPE_NAME", actualDataType);
+                                row.set("DATA_TYPE", Types.TIMESTAMP);
+                                break;
+                            }
+                        }
+                    }
+                } catch (SQLException sqle) {
+                    //
+                    // Do not stop
+                    //
+                }
+                finally {
+                    JdbcUtil.closeStatement(statement);
+                }
+            }
+
+            protected boolean shouldReturnEmptyColumns(SQLException e) {
+                return e.getMessage().contains("references invalid table"); //view with table already dropped. Act like it has no columns.
+            }
+
+            protected List<CachedRow> oracleQuery(boolean bulk) throws DatabaseException, SQLException {
+                CatalogAndSchema catalogAndSchema = new CatalogAndSchema(catalogName, schemaName).customize(database);
+
+                String jdbcSchemaName = ((AbstractJdbcDatabase) database).getJdbcSchemaName(catalogAndSchema);
+                boolean collectIdentityData = database.getDatabaseMajorVersion() >= OracleDatabase.ORACLE_12C_MAJOR_VERSION;
+
+                String sql = "select NULL AS TABLE_CAT, OWNER AS TABLE_SCHEM, 'NO' as IS_AUTOINCREMENT, cc.COMMENTS AS REMARKS," +
+                        "OWNER, TABLE_NAME, COLUMN_NAME, DATA_TYPE AS DATA_TYPE_NAME, DATA_TYPE_MOD, DATA_TYPE_OWNER, " +
+                        // note: oracle reports DATA_LENGTH=4*CHAR_LENGTH when using VARCHAR( <N> CHAR ), thus BYTEs
+                        "DECODE (c.data_type, 'CHAR', 1, 'VARCHAR2', 12, 'NUMBER', 3, 'LONG', -1, 'DATE', " + "93" + ", 'RAW', -3, 'LONG RAW', -4, 'BLOB', 2004, 'CLOB', 2005, 'BFILE', -13, 'FLOAT', 6, 'TIMESTAMP(6)', 93, 'TIMESTAMP(6) WITH TIME ZONE', -101, 'TIMESTAMP(6) WITH LOCAL TIME ZONE', -102, 'INTERVAL YEAR(2) TO MONTH', -103, 'INTERVAL DAY(2) TO SECOND(6)', -104, 'BINARY_FLOAT', 100, 'BINARY_DOUBLE', 101, 'XMLTYPE', 2009, 1111) AS data_type, " +
+                        "DECODE( CHAR_USED, 'C',CHAR_LENGTH, DATA_LENGTH ) as DATA_LENGTH, " +
+                        "DATA_PRECISION, DATA_SCALE, NULLABLE, COLUMN_ID as ORDINAL_POSITION, DEFAULT_LENGTH, " +
+                        "DATA_DEFAULT, " +
+                        "NUM_BUCKETS, CHARACTER_SET_NAME, " +
+                        "CHAR_COL_DECL_LENGTH, CHAR_LENGTH, " +
+                        "CHAR_USED, VIRTUAL_COLUMN ";
+                if (collectIdentityData) {
+                    sql += ", DEFAULT_ON_NULL, IDENTITY_COLUMN, ic.GENERATION_TYPE ";
+                }
+                sql += "FROM ALL_TAB_COLS c " +
+                        "JOIN ALL_COL_COMMENTS cc USING ( OWNER, TABLE_NAME, COLUMN_NAME ) ";
+                if (collectIdentityData) {
+                    sql += "LEFT JOIN ALL_TAB_IDENTITY_COLS ic USING (OWNER, TABLE_NAME, COLUMN_NAME ) ";
+                }
+                if (!bulk || getAllCatalogsStringScratchData() == null) {
+                    sql += "WHERE OWNER='" + jdbcSchemaName + "' AND hidden_column='NO'";
+                } else {
+                    sql += "WHERE OWNER IN ('" + jdbcSchemaName + "', " + getAllCatalogsStringScratchData() + ") AND hidden_column='NO'";
+                }
+
+                if (!bulk) {
+                    if (tableName != null) {
+                        sql += " AND TABLE_NAME='" + database.escapeStringForDatabase(tableName) + "'";
+                    }
+                    if (columnName != null) {
+                        sql += " AND COLUMN_NAME='" + database.escapeStringForDatabase(columnName) + "'";
+                    }
+                }
+                sql += " AND " + ((OracleDatabase) database).getSystemTableWhereClause("TABLE_NAME");
+                sql += " ORDER BY OWNER, TABLE_NAME, c.COLUMN_ID";
+
+                return this.executeAndExtract(sql, database);
+            }
+
+
+            protected List<CachedRow> mssqlQuery(boolean bulk) throws DatabaseException, SQLException {
+                CatalogAndSchema catalogAndSchema = new CatalogAndSchema(catalogName, schemaName).customize(database);
+
+                String databaseName = StringUtil.trimToNull(database.correctObjectName(catalogAndSchema.getCatalogName(), Catalog.class));
+                String dbIdParam;
+                String databasePrefix;
+                if (databaseName == null) {
+                    databasePrefix = "";
+                    dbIdParam = "";
+                } else {
+                    dbIdParam = ", db_id('" + databaseName + "')";
+                    databasePrefix = "[" + databaseName + "].";
+                }
+
+                String sql = "select " +
+                        "db_name(" + (databaseName == null ? "" : "db_id('" + databaseName + "')") + ") AS TABLE_CAT, " +
+                        "object_schema_name(c.object_id" + dbIdParam + ") AS TABLE_SCHEM, " +
+                        "object_name(c.object_id" + dbIdParam + ") AS TABLE_NAME, " +
+                        "c.name AS COLUMN_NAME, " +
+                        "is_filestream AS IS_FILESTREAM, " +
+                        "is_rowguidcol AS IS_ROWGUIDCOL, " +
+                        "CASE WHEN c.is_identity = 'true' THEN 'YES' ELSE 'NO' END as IS_AUTOINCREMENT, " +
+                        "{REMARKS_COLUMN_PLACEHOLDER}" +
+                        "t.name AS TYPE_NAME, " +
+                        "dc.name as COLUMN_DEF_NAME, " +
+                        "dc.definition as COLUMN_DEF, " +
+                        // data type mapping from https://msdn.microsoft.com/en-us/library/ms378878(v=sql.110).aspx
+                        "CASE t.name " +
+                        "WHEN 'bigint' THEN " + Types.BIGINT + " " +
+                        "WHEN 'binary' THEN " + Types.BINARY + " " +
+                        "WHEN 'bit' THEN " + Types.BIT + " " +
+                        "WHEN 'char' THEN " + Types.CHAR + " " +
+                        "WHEN 'date' THEN " + Types.DATE + " " +
+                        "WHEN 'datetime' THEN " + Types.TIMESTAMP + " " +
+                        "WHEN 'datetime2' THEN " + Types.TIMESTAMP + " " +
+                        "WHEN 'datetimeoffset' THEN -155 " +
+                        "WHEN 'decimal' THEN " + Types.DECIMAL + " " +
+                        "WHEN 'float' THEN " + Types.DOUBLE + " " +
+                        "WHEN 'image' THEN " + Types.LONGVARBINARY + " " +
+                        "WHEN 'int' THEN " + Types.INTEGER + " " +
+                        "WHEN 'money' THEN " + Types.DECIMAL + " " +
+                        "WHEN 'nchar' THEN " + Types.NCHAR + " " +
+                        "WHEN 'ntext' THEN " + Types.LONGNVARCHAR + " " +
+                        "WHEN 'numeric' THEN " + Types.NUMERIC + " " +
+                        "WHEN 'nvarchar' THEN " + Types.NVARCHAR + " " +
+                        "WHEN 'real' THEN " + Types.REAL + " " +
+                        "WHEN 'smalldatetime' THEN " + Types.TIMESTAMP + " " +
+                        "WHEN 'smallint' THEN " + Types.SMALLINT + " " +
+                        "WHEN 'smallmoney' THEN " + Types.DECIMAL + " " +
+                        "WHEN 'text' THEN " + Types.LONGVARCHAR + " " +
+                        "WHEN 'time' THEN " + Types.TIME + " " +
+                        "WHEN 'timestamp' THEN " + Types.BINARY + " " +
+                        "WHEN 'tinyint' THEN " + Types.TINYINT + " " +
+                        "WHEN 'udt' THEN " + Types.VARBINARY + " " +
+                        "WHEN 'uniqueidentifier' THEN " + Types.CHAR + " " +
+                        "WHEN 'varbinary' THEN " + Types.VARBINARY + " " +
+                        "WHEN 'varbinary(max)' THEN " + Types.VARBINARY + " " +
+                        "WHEN 'varchar' THEN " + Types.VARCHAR + " " +
+                        "WHEN 'varchar(max)' THEN " + Types.VARCHAR + " " +
+                        "WHEN 'xml' THEN " + Types.LONGVARCHAR + " " +
+                        "WHEN 'LONGNVARCHAR' THEN " + Types.SQLXML + " " +
+                        "ELSE " + Types.OTHER + " END AS DATA_TYPE, " +
+                        "CASE WHEN c.is_nullable = 'true' THEN 1 ELSE 0 END AS NULLABLE, " +
+                        "10 as NUM_PREC_RADIX, " +
+                        "c.column_id as ORDINAL_POSITION, " +
+                        "c.scale as DECIMAL_DIGITS, " +
+                        "c.max_length as COLUMN_SIZE, " +
+                        "c.precision as DATA_PRECISION, " +
+                        "c.is_computed as IS_COMPUTED " +
+                        "FROM " + databasePrefix + "sys.columns c " +
+                        "inner join " + databasePrefix + "sys.types t on c.user_type_id=t.user_type_id " +
+                        "{REMARKS_JOIN_PLACEHOLDER}" +
+                        "left outer join " + databasePrefix + "sys.default_constraints dc on dc.parent_column_id = c.column_id AND dc.parent_object_id=c.object_id AND type_desc='DEFAULT_CONSTRAINT' " +
+                        "WHERE object_schema_name(c.object_id" + dbIdParam + ")='" + ((AbstractJdbcDatabase) database).getJdbcSchemaName(catalogAndSchema) + "'";
+
+
+                if (!bulk) {
+                    if (tableName != null) {
+                        sql += " and object_name(c.object_id" + dbIdParam + ")='" + database.escapeStringForDatabase(tableName) + "'";
+                    }
+                    if (columnName != null) {
+                        sql += " and c.name='" + database.escapeStringForDatabase(columnName) + "'";
+                    }
+                }
+                sql += "order by object_schema_name(c.object_id" + dbIdParam + "), object_name(c.object_id" + dbIdParam + "), c.column_id";
+
+
+                // sys.extended_properties is added to Azure on V12: https://feedback.azure.com/forums/217321-sql-database/suggestions/6549815-add-sys-extended-properties-for-meta-data-support
+                if ((!((MSSQLDatabase) database).isAzureDb()) // Either NOT AzureDB (=SQL Server 2008 or higher)
+                        || (database.getDatabaseMajorVersion() >= 12)) { // or at least AzureDB v12
+                    // SQL Server 2005 or later
+                    // https://technet.microsoft.com/en-us/library/ms177541.aspx
+                    sql = sql.replace("{REMARKS_COLUMN_PLACEHOLDER}", "CAST([ep].[value] AS [nvarchar](MAX)) AS [REMARKS], ");
+                    sql = sql.replace("{REMARKS_JOIN_PLACEHOLDER}", "left outer join " + databasePrefix + "[sys].[extended_properties] AS [ep] ON [ep].[class] = 1 " +
+                            "AND [ep].[major_id] = c.object_id " +
+                            "AND [ep].[minor_id] = column_id " +
+                            "AND [ep].[name] = 'MS_Description' ");
+                } else {
+                    sql = sql.replace("{REMARKS_COLUMN_PLACEHOLDER}", "");
+                    sql = sql.replace("{REMARKS_JOIN_PLACEHOLDER}", "");
+                }
+
+                List<CachedRow> rows = this.executeAndExtract(sql, database);
+
+                for (CachedRow row : rows) {
+                    String typeName = row.getString("TYPE_NAME");
+                    if ("nvarchar".equals(typeName) || "nchar".equals(typeName)) {
+                        Integer size = row.getInt("COLUMN_SIZE");
+                        if (size > 0) {
+                            row.set("COLUMN_SIZE", size / 2);
+                        }
+                    } else if ((row.getInt("DATA_PRECISION") != null) && (row.getInt("DATA_PRECISION") > 0)) {
+                        row.set("COLUMN_SIZE", row.getInt("DATA_PRECISION"));
+                    }
+                }
+
+                return rows;
+            }
+
+            @Override
+            protected List<CachedRow> extract(ResultSet resultSet, boolean informixIndexTrimHint) throws SQLException {
+                List<CachedRow> rows = super.extract(resultSet, informixIndexTrimHint);
+                if ((database instanceof MSSQLDatabase) && !userDefinedTypes.isEmpty()) { //UDT types in MSSQL don't take parameters
+                    for (CachedRow row : rows) {
+                        String dataType = (String) row.get("TYPE_NAME");
+                        if (userDefinedTypes.contains(dataType.toLowerCase())) {
+                            row.set("COLUMN_SIZE", null);
+                            row.set("DECIMAL_DIGITS ", null);
+                        }
+                    }
+                }
+                return rows;
+            }
+        }
+
+        private class ForeignKeysResultSetCache extends ResultSetCache.UnionResultSetExtractor {
+            final String catalogName;
+            final String schemaName;
+            final String tableName;
+            final String fkName;
+
+            private ForeignKeysResultSetCache(Database database, String catalogName, String schemaName, String tableName, String fkName) {
+                super(database);
+                this.catalogName = catalogName;
+                this.schemaName = schemaName;
+                this.tableName = tableName;
+                this.fkName = fkName;
+            }
+
+            @Override
+            public ResultSetCache.RowData rowKeyParameters(CachedRow row) {
+                return new ResultSetCache.RowData(row.getString("FKTABLE_CAT"), row.getString("FKTABLE_SCHEM"), database, row.getString("FKTABLE_NAME"), row.getString("FK_NAME"));
+            }
+
+            @Override
+            public ResultSetCache.RowData wantedKeyParameters() {
+                return new ResultSetCache.RowData(catalogName, schemaName, database, tableName, fkName);
+            }
+
+            @Override
+            public boolean bulkContainsSchema(String schemaKey) {
+                return database instanceof OracleDatabase;
+            }
+
+            @Override
+            public String getSchemaKey(CachedRow row) {
+                return row.getString("FKTABLE_SCHEM");
+            }
+
+            @Override
+            public List<CachedRow> fastFetch() throws SQLException, DatabaseException {
+                CatalogAndSchema catalogAndSchema = new CatalogAndSchema(catalogName, schemaName).customize(database);
+
+                String jdbcCatalogName = ((AbstractJdbcDatabase) database).getJdbcCatalogName(catalogAndSchema);
+                String jdbcSchemaName = ((AbstractJdbcDatabase) database).getJdbcSchemaName(catalogAndSchema);
+
+                if (database instanceof DB2Database) {
+                    if (database.getDatabaseProductName().startsWith("DB2 UDB for AS/400")) {
+                        executeAndExtract(getDB2ForAs400Sql(jdbcSchemaName, tableName), database);
+                    }
+                    return querytDB2Luw(jdbcSchemaName, tableName);
+                } else if (database instanceof Db2zDatabase) {
+                    return queryDb2Zos(catalogAndSchema, tableName);
+                } else {
+                    List<String> tables = new ArrayList<>();
+                    if (tableName == null) {
+                        for (CachedRow row : getTables(jdbcCatalogName, jdbcSchemaName, null)) {
+                            tables.add(row.getString("TABLE_NAME"));
+                        }
+                    } else {
+                        tables.add(tableName);
+                    }
+
+                    List<CachedRow> returnList = new ArrayList<>();
+                    for (String foundTable : tables) {
+                        if (database instanceof OracleDatabase) {
+                            throw new RuntimeException("Should have bulk selected");
+                        } else {
+                            returnList.addAll(extract(databaseMetaData.getImportedKeys(jdbcCatalogName, jdbcSchemaName, foundTable)));
+                        }
+                    }
+
+                    return returnList;
+                }
+            }
+
+            @Override
+            public List<CachedRow> bulkFetch() throws SQLException, DatabaseException {
+                if (database instanceof OracleDatabase) {
+                    CatalogAndSchema catalogAndSchema = new CatalogAndSchema(catalogName, schemaName).customize(database);
+                    String jdbcSchemaName = ((AbstractJdbcDatabase) database).getJdbcSchemaName(catalogAndSchema);
+                    String sql = getOracleSql(jdbcSchemaName);
+                    return executeAndExtract(sql, database);
+                } else if (database instanceof DB2Database) {
+                    CatalogAndSchema catalogAndSchema = new CatalogAndSchema(catalogName, schemaName).customize(database);
+                    String jdbcSchemaName = ((AbstractJdbcDatabase) database).getJdbcSchemaName(catalogAndSchema);
+                    if (database.getDatabaseProductName().startsWith("DB2 UDB for AS/400")) {
+                        executeAndExtract(getDB2ForAs400Sql(jdbcSchemaName, null), database);
+                    }
+                    return querytDB2Luw(jdbcSchemaName, null);
+                } else if (database instanceof Db2zDatabase) {
+                    CatalogAndSchema catalogAndSchema = new CatalogAndSchema(catalogName, schemaName).customize(database);
+                    return queryDb2Zos(catalogAndSchema, null);
+                } else if (database instanceof MSSQLDatabase) {
+                    CatalogAndSchema catalogAndSchema = new CatalogAndSchema(catalogName, schemaName).customize(database);
+                    String jdbcSchemaName = ((AbstractJdbcDatabase) database).getJdbcSchemaName(catalogAndSchema);
+                    String sql = getMSSQLSql(jdbcSchemaName, tableName);
+                    return executeAndExtract(sql, database);
+                } else {
+                    throw new RuntimeException("Cannot bulk select");
+                }
+            }
+
+            protected String getOracleSql(String jdbcSchemaName) {
+                String sql = "SELECT  /*+rule*/" +
+                        "  NULL AS pktable_cat,  " +
+                        "  p.owner as pktable_schem,  " +
+                        "  p.table_name as pktable_name,  " +
+                        "  pc.column_name as pkcolumn_name,  " +
+                        "  NULL as fktable_cat,  " +
+                        "  f.owner as fktable_schem,  " +
+                        "  f.table_name as fktable_name,  " +
+                        "  fc.column_name as fkcolumn_name,  " +
+                        "  fc.position as key_seq,  " +
+                        "  NULL as update_rule,  " +
+                        "  decode (f.delete_rule, 'CASCADE', 0, 'SET NULL', 2, 1) as delete_rule,  " +
+                        "  f.constraint_name as fk_name,  " +
+                        "  p.constraint_name as pk_name,  " +
+                        "  decode(f.deferrable, 'DEFERRABLE', 5, 'NOT DEFERRABLE', 7, 'DEFERRED', 6) deferrability,  " +
+                        "  f.validated as fk_validate " +
+                        "FROM " +
+                        "all_cons_columns pc " +
+                        "INNER JOIN all_constraints p " +
+                        "ON pc.owner = p.owner " +
+                        "AND pc.constraint_name = p.constraint_name " +
+                        "INNER JOIN all_constraints f " +
+                        "ON pc.owner = f.r_owner " +
+                        "AND pc.constraint_name = f.r_constraint_name " +
+                        "INNER JOIN all_cons_columns fc " +
+                        "ON fc.owner = f.owner " +
+                        "AND fc.constraint_name = f.constraint_name " +
+                        "AND fc.position = pc.position ";
+                if (getAllCatalogsStringScratchData() == null) {
+                    sql += "WHERE f.owner = '" + jdbcSchemaName + "' ";
+                } else {
+                    sql += "WHERE f.owner IN ('" + jdbcSchemaName + "', " + getAllCatalogsStringScratchData() + ") ";
+                }
+                sql += "AND p.constraint_type in ('P', 'U') " +
+                        "AND f.constraint_type = 'R' " +
+                        "AND p.table_name NOT LIKE 'BIN$%' " +
+                        "ORDER BY fktable_schem, fktable_name, key_seq";
+                return sql;
+            }
+
+            protected String getMSSQLSql(String jdbcSchemaName, String tableName) {
+                //comes from select object_definition(object_id('sp_fkeys'))
+                return "select " +
+                        "convert(sysname,db_name()) AS PKTABLE_CAT, " +
+                        "convert(sysname,schema_name(o1.schema_id)) AS PKTABLE_SCHEM, " +
+                        "convert(sysname,o1.name) AS PKTABLE_NAME, " +
+                        "convert(sysname,c1.name) AS PKCOLUMN_NAME, " +
+                        "convert(sysname,db_name()) AS FKTABLE_CAT, " +
+                        "convert(sysname,schema_name(o2.schema_id)) AS FKTABLE_SCHEM, " +
+                        "convert(sysname,o2.name) AS FKTABLE_NAME, " +
+                        "convert(sysname,c2.name) AS FKCOLUMN_NAME, " +
+                        "isnull(convert(smallint,k.constraint_column_id), convert(smallint,0)) AS KEY_SEQ, " +
+                        "convert(smallint, case ObjectProperty(f.object_id, 'CnstIsUpdateCascade') when 1 then 0 else 1 end) AS UPDATE_RULE, " +
+                        "convert(smallint, case ObjectProperty(f.object_id, 'CnstIsDeleteCascade') when 1 then 0 else 1 end) AS DELETE_RULE, " +
+                        "convert(sysname,object_name(f.object_id)) AS FK_NAME, " +
+                        "convert(sysname,i.name) AS PK_NAME, " +
+                        "convert(smallint, 7) AS DEFERRABILITY " +
+                        "from " +
+                        "sys.objects o1, " +
+                        "sys.objects o2, " +
+                        "sys.columns c1, " +
+                        "sys.columns c2, " +
+                        "sys.foreign_keys f inner join " +
+                        "sys.foreign_key_columns k on (k.constraint_object_id = f.object_id) inner join " +
+                        "sys.indexes i on (f.referenced_object_id = i.object_id and f.key_index_id = i.index_id) " +
+                        "where " +
+                        "o1.object_id = f.referenced_object_id and " +
+                        "o2.object_id = f.parent_object_id and " +
+                        "c1.object_id = f.referenced_object_id and " +
+                        "c2.object_id = f.parent_object_id and " +
+                        "c1.column_id = k.referenced_column_id and " +
+                        "c2.column_id = k.parent_column_id and " +
+                        "((object_schema_name(o1.object_id)='" + jdbcSchemaName + "'" +
+                        " and convert(sysname,schema_name(o2.schema_id))='" + jdbcSchemaName + "' and " +
+                        "convert(sysname,o2.name)='" + tableName + "' ) or ( convert(sysname,schema_name" +
+                        "(o2.schema_id))='" + jdbcSchemaName + "' and convert(sysname,o2.name)='" + tableName +
+                        "' )) order by 5, 6, 7, 9, 8";
+            }
+
+            private List<CachedRow> querytDB2Luw(String jdbcSchemaName, String tableName) throws DatabaseException, SQLException {
+                List<String> parameters = new ArrayList<>(2);
+                StringBuilder sql = new StringBuilder ("SELECT " +
+                        "  pk_col.tabschema AS pktable_cat,  " +
+                        "  pk_col.tabname as pktable_name,  " +
+                        "  pk_col.colname as pkcolumn_name, " +
+                        "  fk_col.tabschema as fktable_cat,  " +
+                        "  fk_col.tabname as fktable_name,  " +
+                        "  fk_col.colname as fkcolumn_name, " +
+                        "  fk_col.colseq as key_seq,  " +
+                        "  decode (ref.updaterule, 'A', 3, 'R', 1, 1) as update_rule,  " +
+                        "  decode (ref.deleterule, 'A', 3, 'C', 0, 'N', 2, 'R', 1, 1) as delete_rule,  " +
+                        "  ref.constname as fk_name,  " +
+                        "  ref.refkeyname as pk_name,  " +
+                        "  7 as deferrability  " +
+                        "FROM " +
+                        "syscat.references ref " +
+                        "join syscat.keycoluse fk_col on ref.constname=fk_col.constname and ref.tabschema=fk_col.tabschema and ref.tabname=fk_col.tabname " +
+                        "join syscat.keycoluse pk_col on ref.refkeyname=pk_col.constname and ref.reftabschema=pk_col.tabschema and ref.reftabname=pk_col.tabname and pk_col.colseq=fk_col.colseq " +
+                        "WHERE ref.tabschema = ? ");
+                parameters.add(jdbcSchemaName);
+                if (tableName != null) {
+                    sql.append("and fk_col.tabname = ? ");
+                    parameters.add(tableName);
+                }
+                sql.append("ORDER BY fk_col.colseq");
+                return executeAndExtract(database, sql.toString(), parameters.toArray());
+            }
+
+            private String getDB2ForAs400Sql(String jdbcSchemaName, String tableName) {
+                return "SELECT " +
+                        "pktable_cat, " +
+                        "pktable_name, " +
+                        "pkcolumn_name, " +
+                        "fktable_cat, " +
+                        "fktable_name, " +
+                        "fkcolumn_name, " +
+                        "key_seq, " +
+                        "update_rule, " +
+                        "delete_rule, " +
+                        "fk_name, " +
+                        "pk_name, " +
+                        "deferrability " +
+                        "FROM " +
+                        "sysibm.SQLFORKEYS " +
+                        "WHERE " +
+                        "FKTABLE_SCHEM = '" + jdbcSchemaName + "' " +
+                        "AND FKTABLE_NAME = '" + tableName + "'";
+            }
+
+            protected List<CachedRow> queryDb2Zos(CatalogAndSchema catalogAndSchema, String tableName) throws DatabaseException, SQLException {
+
+                List<String> parameters = new ArrayList<>(2);
+                StringBuilder sql = new StringBuilder("SELECT  " +
+                        "  ref.REFTBCREATOR AS pktable_cat,  " +
+                        "  ref.REFTBNAME as pktable_name,  " +
+                        "  pk_col.colname as pkcolumn_name, " +
+                        "  ref.CREATOR as fktable_cat,  " +
+                        "  ref.TBNAME as fktable_name,  " +
+                        "  fk_col.colname as fkcolumn_name, " +
+                        "  fk_col.colseq as key_seq,  " +
+                        "  decode (ref.deleterule, 'A', 3, 'C', 0, 'N', 2, 'R', 1, 1) as delete_rule,  " +
+                        "  ref.relname as fk_name,  " +
+                        "  pk_col.colname as pk_name,  " +
+                        "  7 as deferrability  " +
+                        "FROM " +
+                        "SYSIBM.SYSRELS ref " +
+                        "join SYSIBM.SYSFOREIGNKEYS fk_col " +
+                        "on ref.relname = fk_col.RELNAME " +
+                        "and ref.CREATOR = fk_col.CREATOR " +
+                        "and ref.TBNAME = fk_col.TBNAME " +
+                        "join SYSIBM.SYSKEYCOLUSE pk_col " +
+                        "on ref.REFTBCREATOR = pk_col.TBCREATOR " +
+                        "and ref.REFTBNAME = pk_col.TBNAME " +
+                        "and pk_col.colseq=fk_col.colseq " +
+                        "WHERE ref.CREATOR = ? ");
+                parameters.add(((AbstractJdbcDatabase) CachingDatabaseMetaData.this.database).getJdbcSchemaName(catalogAndSchema));
+                if (tableName != null) {
+                    sql.append("AND ref.TBNAME = ? ");
+                    parameters.add(tableName);
+                }
+                sql.append("ORDER BY fk_col.colseq");
+
+                return executeAndExtract(CachingDatabaseMetaData.this.database, sql.toString(), parameters.toArray());
+            }
+
+            @Override
+            protected boolean shouldBulkSelect(String schemaKey, ResultSetCache resultSetCache) {
+                if (database instanceof AbstractDb2Database || database instanceof MSSQLDatabase) {
+                    return super.shouldBulkSelect(schemaKey, resultSetCache); //can bulk and fast fetch
+                } else {
+                    return database instanceof OracleDatabase; //oracle is slow, always bulk select while you are at it. Other databases need to go through all tables.
+                }
+            }
+        }
+
+        private class GetNotNullConstraintsResultSetCache extends ResultSetCache.SingleResultSetExtractor {
+            final String catalogName;
+            final String schemaName;
+            final String tableName;
+
+            private GetNotNullConstraintsResultSetCache(Database database, String catalogName, String schemaName, String tableName) {
+                super(database);
+                this.catalogName = catalogName;
+                this.schemaName = schemaName;
+                this.tableName = tableName;
+            }
+
+            @Override
+            public ResultSetCache.RowData rowKeyParameters(CachedRow row) {
+                return new ResultSetCache.RowData(row.getString("TABLE_CAT"), row.getString("TABLE_SCHEMA"),
+                        database, row.getString("TABLE_NAME"));
+            }
+
+            @Override
+            public ResultSetCache.RowData wantedKeyParameters() {
+                return new ResultSetCache.RowData(catalogName, schemaName, database, tableName);
+            }
+
+            @Override
+            public boolean bulkContainsSchema(String schemaKey) {
+                return database instanceof OracleDatabase;
+            }
+
+            @Override
+            public String getSchemaKey(CachedRow row) {
+                return row.getString("TABLE_SCHEMA");
+            }
+
+            @Override
+            protected boolean shouldBulkSelect(String schemaKey, ResultSetCache resultSetCache) {
+                LiquibaseTableNamesFactory liquibaseTableNamesFactory = Scope.getCurrentScope().getSingleton(LiquibaseTableNamesFactory.class);
+                List<String> liquibaseTableNames = liquibaseTableNamesFactory.getLiquibaseTableNames(database);
+                return liquibaseTableNames.stream().noneMatch(tableName::equalsIgnoreCase);
+            }
+
+            @Override
+            public List<CachedRow> fastFetchQuery() throws SQLException, DatabaseException {
+                if (database instanceof OracleDatabase) {
+                    return oracleQuery(false);
+                }
+                return Collections.emptyList();
+            }
+
+            @Override
+            public List<CachedRow> bulkFetchQuery() throws SQLException, DatabaseException {
+                if (database instanceof OracleDatabase) {
+                    return oracleQuery(true);
+                }
+                return Collections.emptyList();
+            }
+
+            private List<CachedRow> oracleQuery(boolean bulk) throws DatabaseException, SQLException {
+                CatalogAndSchema catalogAndSchema = new CatalogAndSchema(catalogName, schemaName).customize(database);
+
+                String jdbcSchemaName = ((AbstractJdbcDatabase) database).getJdbcSchemaName(catalogAndSchema);
+                String jdbcTableName = database.escapeStringForDatabase(tableName);
+                String sqlToSelectNotNullConstraints = "SELECT  NULL AS TABLE_CAT, atc.OWNER AS TABLE_SCHEMA, atc.OWNER, atc.TABLE_NAME, " +
+                        "atc.COLUMN_NAME, NULLABLE, ac.VALIDATED as VALIDATED, ac.SEARCH_CONDITION, ac.CONSTRAINT_NAME " +
+                        "FROM ALL_TAB_COLS atc " +
+                        "JOIN all_cons_columns acc ON atc.OWNER = acc.OWNER AND atc.TABLE_NAME = acc.TABLE_NAME AND atc.COLUMN_NAME = acc.COLUMN_NAME " +
+                        "JOIN all_constraints ac ON atc.OWNER = ac.OWNER AND atc.TABLE_NAME = ac.TABLE_NAME AND acc.CONSTRAINT_NAME = ac.CONSTRAINT_NAME ";
+
+                if (!bulk || getAllCatalogsStringScratchData() == null) {
+                    sqlToSelectNotNullConstraints += " WHERE atc.OWNER='" + jdbcSchemaName + "' AND atc.hidden_column='NO' AND ac.CONSTRAINT_TYPE='C'  and ac.search_condition is not null ";
+                } else {
+                    sqlToSelectNotNullConstraints += " WHERE atc.OWNER IN ('" + jdbcSchemaName + "', " + getAllCatalogsStringScratchData() + ") "
+                            + " AND atc.hidden_column='NO' AND ac.CONSTRAINT_TYPE='C'  and ac.search_condition is not null ";
+                }
+
+                sqlToSelectNotNullConstraints += (!bulk && tableName != null && !tableName.isEmpty()) ? " AND atc.TABLE_NAME='" + jdbcTableName + "'" : "";
+
+                return this.executeAndExtract(sqlToSelectNotNullConstraints, database);
+            }
+
+            @Override
+            protected List<CachedRow> extract(ResultSet resultSet, boolean informixIndexTrimHint) throws SQLException {
+                List<CachedRow> cachedRowList = new ArrayList<>();
+                if (!(database instanceof OracleDatabase)) {
+                    return cachedRowList;
+                }
+
+                resultSet.setFetchSize(database.getFetchSize());
+
+                try {
+                    List<Map> result = (List<Map>) new RowMapperNotNullConstraintsResultSetExtractor(new ColumnMapRowMapper(database.isCaseSensitive()) {
+                        @Override
+                        protected Object getColumnValue(ResultSet rs, int index) throws SQLException {
+                            Object value = super.getColumnValue(rs, index);
+                            if (!(value instanceof String)) {
+                                return value;
+                            }
+                            return value.toString().trim();
+                        }
+                    }).extractData(resultSet);
+
+                    for (Map row : result) {
+                        cachedRowList.add(new CachedRow(row));
+                    }
+                } finally {
+                    JdbcUtil.closeResultSet(resultSet);
+                }
+                return cachedRowList;
+
+            }
+        }
+
+        public List<CachedRow> getTables(final String catalogName, final String schemaName, final String table) throws DatabaseException {
+            return getResultSetCache("getTables").get(new ResultSetCache.SingleResultSetExtractor(database) {
+
+                @Override
+                protected boolean shouldBulkSelect(String schemaKey, ResultSetCache resultSetCache) {
+                    return table == null || getAllCatalogsStringScratchData() != null || super.shouldBulkSelect(schemaKey, resultSetCache);
+                }
+
+                @Override
+                public ResultSetCache.RowData rowKeyParameters(CachedRow row) {
+                    return new ResultSetCache.RowData(row.getString("TABLE_CAT"), row.getString("TABLE_SCHEM"), database, row.getString("TABLE_NAME"));
+                }
+
+                @Override
+                public ResultSetCache.RowData wantedKeyParameters() {
+                    return new ResultSetCache.RowData(catalogName, schemaName, database, table);
+                }
+
+                @Override
+                public boolean bulkContainsSchema(String schemaKey) {
+                    return database instanceof OracleDatabase;
+                }
+
+                @Override
+                public String getSchemaKey(CachedRow row) {
+                    return row.getString("TABLE_SCHEM");
+                }
+
+                @Override
+                public List<CachedRow> fastFetchQuery() throws SQLException, DatabaseException {
+                    CatalogAndSchema catalogAndSchema = new CatalogAndSchema(catalogName, schemaName).customize(database);
+
+                    if (database instanceof OracleDatabase) {
+                        // 修改 0827
+                        if(FlowUtil.isDM(database.getConnection().getURL())) {
+                            return queryDM(catalogAndSchema, table);
+                        } else {
+                            return queryOracle(catalogAndSchema, table);
+                        }
+                    } else if (database instanceof MSSQLDatabase) {
+                        return queryMssql(catalogAndSchema, table);
+                    } else if (database instanceof Db2zDatabase) {
+                        return queryDb2Zos(catalogAndSchema, table);
+                    } else if (database instanceof PostgresDatabase) {
+                        return queryPostgres(catalogAndSchema, table);
+                    }
+
+                    String catalog = ((AbstractJdbcDatabase) database).getJdbcCatalogName(catalogAndSchema);
+                    String schema = ((AbstractJdbcDatabase) database).getJdbcSchemaName(catalogAndSchema);
+                    return extract(databaseMetaData.getTables(catalog, escapeForLike(schema, database), ((table == null) ?
+                            SQL_FILTER_MATCH_ALL : escapeForLike(table, database)), new String[]{"TABLE"}));
+                }
+
+                @Override
+                public List<CachedRow> bulkFetchQuery() throws SQLException, DatabaseException {
+                    CatalogAndSchema catalogAndSchema = new CatalogAndSchema(catalogName, schemaName).customize(database);
+
+                    if (database instanceof OracleDatabase) {
+                        // 修改 0827
+                        if(FlowUtil.isDM(database.getConnection().getURL())) {
+                            return queryDM(catalogAndSchema, null);
+                        } else {
+                            return queryOracle(catalogAndSchema, null);
+                        }
+                    } else if (database instanceof MSSQLDatabase) {
+                        return queryMssql(catalogAndSchema, null);
+                    } else if (database instanceof Db2zDatabase) {
+                        return queryDb2Zos(catalogAndSchema, null);
+                    } else if (database instanceof PostgresDatabase) {
+                        return queryPostgres(catalogAndSchema, table);
+                    }
+
+                    String catalog = ((AbstractJdbcDatabase) database).getJdbcCatalogName(catalogAndSchema);
+                    String schema = ((AbstractJdbcDatabase) database).getJdbcSchemaName(catalogAndSchema);
+                    return extract(databaseMetaData.getTables(catalog, escapeForLike(schema, database), SQL_FILTER_MATCH_ALL, new String[]{"TABLE"}));
+                }
+
+                private List<CachedRow> queryMssql(CatalogAndSchema catalogAndSchema, String tableName) throws DatabaseException, SQLException {
+                    String ownerName = database.correctObjectName(catalogAndSchema.getSchemaName(), Schema.class);
+
+                    String databaseName = StringUtil.trimToNull(database.correctObjectName(catalogAndSchema.getCatalogName(), Catalog.class));
+                    String dbIdParam;
+                    String databasePrefix;
+                    if (databaseName == null) {
+                        databasePrefix = "";
+                        dbIdParam = "";
+                    } else {
+                        dbIdParam = ", db_id('" + databaseName + "')";
+                        databasePrefix = "[" + databaseName + "].";
+                    }
+
+
+                    //From select object_definition(object_id('sp_tables'))
+                    String sql = "select " +
+                            "db_name(" + (databaseName == null ? "" : "db_id('" + databaseName + "')") + ") AS TABLE_CAT, " +
+                            "convert(sysname,object_schema_name(o.object_id" + dbIdParam + ")) AS TABLE_SCHEM, " +
+                            "convert(sysname,o.name) AS TABLE_NAME, " +
+                            "'TABLE' AS TABLE_TYPE, " +
+                            "CAST(ep.value as varchar(max)) as REMARKS " +
+                            "from " + databasePrefix + "sys.all_objects o " +
+                            "left outer join sys.extended_properties ep on ep.name='MS_Description' and major_id=o.object_id and minor_id=0 " +
+                            "where " +
+                            "o.type in ('U') " +
+                            "and has_perms_by_name(" + (databaseName == null ? "" : "quotename('" + databaseName + "') + '.' + ") + "quotename(object_schema_name(o.object_id" + dbIdParam + ")) + '.' + quotename(o.name), 'object', 'select') = 1 " +
+                            "and charindex(substring(o.type,1,1),'U') <> 0 " +
+                            "and object_schema_name(o.object_id" + dbIdParam + ")='" + database.escapeStringForDatabase(ownerName) + "'";
+                    if (tableName != null) {
+                        sql += " AND o.name='" + database.escapeStringForDatabase(tableName) + "' ";
+                    }
+                    sql += "order by 4, 1, 2, 3";
+
+                    return executeAndExtract(sql, database);
+                }
+
+
+                private List<CachedRow> queryDM(CatalogAndSchema catalogAndSchema, String tableName) throws DatabaseException, SQLException {
+                    String ownerName = database.correctObjectName(catalogAndSchema.getCatalogName(), Schema.class);
+
+                    String sql = "SELECT null as TABLE_CAT, a.OWNER as TABLE_SCHEM, a.TABLE_NAME as TABLE_NAME, " +
+                            "a.TEMPORARY as TEMPORARY, a.DURATION as DURATION, 'TABLE' as TABLE_TYPE, " +
+                            "c.COMMENTS as REMARKS, A.tablespace_name as tablespace_name, CASE WHEN A.tablespace_name = " +
+                            "(SELECT DEFAULT_TABLESPACE FROM USER_USERS) THEN 'true' ELSE null END as default_tablespace " +
+                            "from ALL_TABLES a " +
+                            "join ALL_TAB_COMMENTS c on a.TABLE_NAME=c.table_name and a.owner=c.owner ";
+                    String allCatalogsString = getAllCatalogsStringScratchData();
+                    if (tableName != null || allCatalogsString == null) {
+                        sql += "AND a.OWNER='" + ownerName + "'";
+                    } else {
+                        sql += "AND a.OWNER IN ('" + ownerName + "', " + allCatalogsString + ")";
+                    }
+                    if (tableName != null) {
+                        sql += " AND a.TABLE_NAME='" + tableName + "'";
+                    }
+
+                    return executeAndExtract(sql, database);
+                }
+
+                private List<CachedRow> queryOracle(CatalogAndSchema catalogAndSchema, String tableName) throws DatabaseException, SQLException {
+                    String ownerName = database.correctObjectName(catalogAndSchema.getCatalogName(), Schema.class);
+
+                    String sql = "SELECT null as TABLE_CAT, a.OWNER as TABLE_SCHEM, a.TABLE_NAME as TABLE_NAME, " +
+                            "a.TEMPORARY as TEMPORARY, a.DURATION as DURATION, 'TABLE' as TABLE_TYPE, " +
+                            "c.COMMENTS as REMARKS, A.tablespace_name as tablespace_name, CASE WHEN A.tablespace_name = " +
+                            "(SELECT DEFAULT_TABLESPACE FROM USER_USERS) THEN 'true' ELSE null END as default_tablespace " +
+                            "from ALL_TABLES a " +
+                            "join ALL_TAB_COMMENTS c on a.TABLE_NAME=c.table_name and a.owner=c.owner " +
+                            "left outer join ALL_QUEUE_TABLES q ON a.TABLE_NAME = q.QUEUE_TABLE and a.OWNER = q.OWNER " +
+                            "WHERE q.QUEUE_TABLE is null ";
+                    String allCatalogsString = getAllCatalogsStringScratchData();
+                    if (tableName != null || allCatalogsString == null) {
+                        sql += "AND a.OWNER='" + ownerName + "'";
+                    } else {
+                        sql += "AND a.OWNER IN ('" + ownerName + "', " + allCatalogsString + ")";
+                    }
+                    if (tableName != null) {
+                        sql += " AND a.TABLE_NAME='" + tableName + "'";
+                    }
+
+                    return executeAndExtract(sql, database);
+                }
+
+                private List<CachedRow> queryDb2Zos(CatalogAndSchema catalogAndSchema, String tableName) throws DatabaseException, SQLException {
+                    String ownerName = database.correctObjectName(catalogAndSchema.getCatalogName(), Schema.class);
+
+                    String sql = "SELECT CREATOR AS TABLE_SCHEM, " +
+                            "NAME AS TABLE_NAME, " +
+                            "'TABLE' AS TABLE_TYPE, " +
+                            "REMARKS " +
+                            "FROM SYSIBM.SYSTABLES " +
+                            "WHERE TYPE = 'T'";
+                    List<String> parameters = new ArrayList<>(2);
+                    if (ownerName != null) {
+                        sql += " AND CREATOR = ?";
+                        parameters.add(ownerName);
+                    }
+                    if (tableName != null) {
+                        sql += " AND NAME = ?";
+                        parameters.add(tableName);
+                    }
+
+                    return executeAndExtract(database, sql, parameters.toArray());
+                }
+
+                private List<CachedRow> queryPostgres(CatalogAndSchema catalogAndSchema, String tableName) throws SQLException {
+                    String catalog = ((AbstractJdbcDatabase) database).getJdbcCatalogName(catalogAndSchema);
+                    String schema = ((AbstractJdbcDatabase) database).getJdbcSchemaName(catalogAndSchema);
+                    return extract(databaseMetaData.getTables(catalog, escapeForLike(schema, database), ((tableName == null) ?
+                            SQL_FILTER_MATCH_ALL : escapeForLike(tableName, database)), new String[]{"TABLE", "PARTITIONED TABLE"}));
+
+                }
+            });
+        }
+
+        public List<CachedRow> getViews(final String catalogName, final String schemaName, String viewName) throws DatabaseException {
+            final String view;
+            if (database instanceof DB2Database) {
+                view = database.correctObjectName(viewName, View.class);
+            } else {
+                view = viewName;
+            }
+            return getResultSetCache("getViews").get(new ResultSetCache.SingleResultSetExtractor(database) {
+
+                @Override
+                protected boolean shouldBulkSelect(String schemaKey, ResultSetCache resultSetCache) {
+                    return view == null || getAllCatalogsStringScratchData() != null || super.shouldBulkSelect(schemaKey, resultSetCache);
+                }
+
+                @Override
+                public ResultSetCache.RowData rowKeyParameters(CachedRow row) {
+                    return new ResultSetCache.RowData(row.getString("TABLE_CAT"), row.getString("TABLE_SCHEM"), database, row.getString("TABLE_NAME"));
+                }
+
+
+                @Override
+                public ResultSetCache.RowData wantedKeyParameters() {
+                    return new ResultSetCache.RowData(catalogName, schemaName, database, view);
+                }
+
+                @Override
+                public boolean bulkContainsSchema(String schemaKey) {
+                    return database instanceof OracleDatabase;
+                }
+
+                @Override
+                public String getSchemaKey(CachedRow row) {
+                    return row.getString("TABLE_SCHEM");
+                }
+
+
+                @Override
+                public List<CachedRow> fastFetchQuery() throws SQLException, DatabaseException {
+                    CatalogAndSchema catalogAndSchema = new CatalogAndSchema(catalogName, schemaName).customize(database);
+
+                    if (database instanceof OracleDatabase) {
+                        return queryOracle(catalogAndSchema, view);
+                    } else if (database instanceof MSSQLDatabase) {
+                        return queryMssql(catalogAndSchema, view);
+                    }
+
+                    String catalog = ((AbstractJdbcDatabase) database).getJdbcCatalogName(catalogAndSchema);
+                    String schema = ((AbstractJdbcDatabase) database).getJdbcSchemaName(catalogAndSchema);
+                    return extract(databaseMetaData.getTables(catalog, escapeForLike(schema, database), ((view == null) ? SQL_FILTER_MATCH_ALL
+                            : escapeForLike(view, database)), new String[]{"VIEW"}));
+                }
+
+                @Override
+                public List<CachedRow> bulkFetchQuery() throws SQLException, DatabaseException {
+                    CatalogAndSchema catalogAndSchema = new CatalogAndSchema(catalogName, schemaName).customize(database);
+
+                    if (database instanceof OracleDatabase) {
+                        return queryOracle(catalogAndSchema, null);
+                    } else if (database instanceof MSSQLDatabase) {
+                        return queryMssql(catalogAndSchema, null);
+                    }
+
+                    String catalog = ((AbstractJdbcDatabase) database).getJdbcCatalogName(catalogAndSchema);
+                    String schema = ((AbstractJdbcDatabase) database).getJdbcSchemaName(catalogAndSchema);
+                    return extract(databaseMetaData.getTables(catalog, escapeForLike(schema, database), SQL_FILTER_MATCH_ALL, new String[]{"VIEW"}));
+                }
+
+                private List<CachedRow> queryMssql(CatalogAndSchema catalogAndSchema, String viewName) throws DatabaseException, SQLException {
+                    String ownerName = database.correctObjectName(catalogAndSchema.getSchemaName(), Schema.class);
+                    String databaseName = StringUtil.trimToNull(database.correctObjectName(catalogAndSchema.getCatalogName(), Catalog.class));
+                    String dbIdParam = "";
+                    String databasePrefix = "";
+                    boolean haveDatabaseName = databaseName != null;
+
+                    if (haveDatabaseName) {
+                        dbIdParam = ", db_id('" + databaseName + "')";
+                        databasePrefix = "[" + databaseName + "].";
+                    }
+                    String tableCatParam = haveDatabaseName ? "db_id('" + databaseName + "')" : "";
+                    String permsParam = haveDatabaseName ? "quotename('" + databaseName + "') + '.' + " : "";
+
+                    String sql = "select " +
+                            "db_name(" + tableCatParam + ") AS TABLE_CAT, " +
+                            "convert(sysname,object_schema_name(o.object_id" + dbIdParam + ")) AS TABLE_SCHEM, " +
+                            "convert(sysname,o.name) AS TABLE_NAME, " +
+                            "'VIEW' AS TABLE_TYPE, " +
+                            "CAST(ep.value as varchar(max)) as REMARKS " +
+                            "from " + databasePrefix + "sys.all_objects o " +
+                            "left join sys.extended_properties ep on ep.name='MS_Description' and major_id=o.object_id and minor_id=0 " +
+                            "where " +
+                            "o.type in ('V') " +
+                            "and has_perms_by_name(" + permsParam + "quotename(object_schema_name(o.object_id" + dbIdParam + ")) + '.' + quotename(o.name), 'object', 'select') = 1 " +
+                            "and charindex(substring(o.type,1,1),'V') <> 0 " +
+                            "and object_schema_name(o.object_id" + dbIdParam + ")='" + database.escapeStringForDatabase(ownerName) + "'";
+                    if (viewName != null) {
+                        sql += " AND o.name='" + database.escapeStringForDatabase(viewName) + "' ";
+                    }
+                    sql += "order by 4, 1, 2, 3";
+
+                    return executeAndExtract(sql, database);
+                }
+
+
+                private List<CachedRow> queryOracle(CatalogAndSchema catalogAndSchema, String viewName) throws DatabaseException, SQLException {
+                    String ownerName = database.correctObjectName(catalogAndSchema.getCatalogName(), Schema.class);
+
+                    String sql = "SELECT null as TABLE_CAT, a.OWNER as TABLE_SCHEM, a.VIEW_NAME as TABLE_NAME, 'TABLE' as TABLE_TYPE, c.COMMENTS as REMARKS, TEXT as OBJECT_BODY";
+                    if (database.getDatabaseMajorVersion() > 10) {
+                        sql += ", EDITIONING_VIEW";
+                    }
+                    sql += " from ALL_VIEWS a " +
+                            "join ALL_TAB_COMMENTS c on a.VIEW_NAME=c.table_name and a.owner=c.owner ";
+                    if (viewName != null || getAllCatalogsStringScratchData() == null) {
+                        sql += "WHERE a.OWNER='" + ownerName + "'";
+                    } else {
+                        sql += "WHERE a.OWNER IN ('" + ownerName + "', " + getAllCatalogsStringScratchData() + ")";
+                    }
+                    if (viewName != null) {
+                        sql += " AND a.VIEW_NAME='" + database.correctObjectName(viewName, View.class) + "'";
+                    }
+                    sql += " AND a.VIEW_NAME not in (select mv.name from all_registered_mviews mv where mv.owner=a.owner)";
+
+                    return executeAndExtract(sql, database);
+                }
+            });
+        }
+
+        public List<CachedRow> getPrimaryKeys(final String catalogName, final String schemaName, final String table) throws DatabaseException {
+            return getResultSetCache("getPrimaryKeys").get(new ResultSetCache.SingleResultSetExtractor(database) {
+
+                @Override
+                public ResultSetCache.RowData rowKeyParameters(CachedRow row) {
+                    return new ResultSetCache.RowData(row.getString("TABLE_CAT"), row.getString("TABLE_SCHEM"), database, row.getString("TABLE_NAME"));
+                }
+
+                @Override
+                public ResultSetCache.RowData wantedKeyParameters() {
+                    return new ResultSetCache.RowData(catalogName, schemaName, database, table);
+                }
+
+                @Override
+                public boolean bulkContainsSchema(String schemaKey) {
+                    return database instanceof OracleDatabase;
+                }
+
+
+                @Override
+                public String getSchemaKey(CachedRow row) {
+                    return row.getString("TABLE_SCHEM");
+                }
+
+                @Override
+                public List<CachedRow> fastFetchQuery() throws SQLException {
+                    CatalogAndSchema catalogAndSchema = new CatalogAndSchema(catalogName, schemaName).customize(database);
+                    try {
+                        List<CachedRow> foundPks = new ArrayList<>();
+                        if (table == null) {
+                            List<CachedRow> tables = CachingDatabaseMetaData.this.getTables(catalogName, schemaName, null);
+                            for (CachedRow table : tables) {
+                                List<CachedRow> pkInfo = getPkInfo(catalogAndSchema, table.getString("TABLE_NAME"));
+                                if (pkInfo != null) {
+                                    foundPks.addAll(pkInfo);
+                                }
+                            }
+                            return foundPks;
+                        } else {
+                            List<CachedRow> pkInfo = getPkInfo(catalogAndSchema, table);
+                            if (pkInfo != null) {
+                                foundPks.addAll(pkInfo);
+                            }
+                        }
+                        return foundPks;
+                    } catch (DatabaseException e) {
+                        throw new SQLException(e);
+                    }
+                }
+
+                private List<CachedRow> getPkInfo(CatalogAndSchema catalogAndSchema, String tableName) throws DatabaseException, SQLException {
+                    List<CachedRow> pkInfo;
+                    if (database instanceof MSSQLDatabase) {
+                        String sql = mssqlSql(catalogAndSchema, tableName);
+                        pkInfo = executeAndExtract(sql, database);
+                    } else {
+                        if (database instanceof Db2zDatabase) {
+                            String sql = "SELECT 'NULL' AS TABLE_CAT," +
+                                    " SYSTAB.TBCREATOR AS TABLE_SCHEM, " +
+                                    "SYSTAB.TBNAME AS TABLE_NAME, " +
+                                    "COLUSE.COLNAME AS COLUMN_NAME, " +
+                                    "COLUSE.COLSEQ AS KEY_SEQ, " +
+                                    "SYSTAB.CONSTNAME AS PK_NAME " +
+                                    "FROM SYSIBM.SYSTABCONST SYSTAB " +
+                                    "JOIN SYSIBM.SYSKEYCOLUSE COLUSE " +
+                                    "ON SYSTAB.TBCREATOR = COLUSE.TBCREATOR " +
+                                    "WHERE SYSTAB.TYPE = 'P' " +
+                                    "AND SYSTAB.TBNAME = ? " +
+                                    "AND SYSTAB.TBCREATOR = ? " +
+                                    "AND SYSTAB.TBNAME=COLUSE.TBNAME " +
+                                    "AND SYSTAB.CONSTNAME=COLUSE.CONSTNAME " +
+                                    "ORDER BY COLUSE.COLNAME";
+                            try {
+                                return executeAndExtract(database, sql, table, ((AbstractJdbcDatabase) database).getJdbcSchemaName(catalogAndSchema));
+                            } catch (DatabaseException e) {
+                                throw new SQLException(e);
+                            }
+                        } else if (database instanceof OracleDatabase) {
+                            warnAboutDbaRecycleBin();
+
+                            String sql = "SELECT NULL AS table_cat, c.owner AS table_schem, c.table_name, c.column_name as COLUMN_NAME, c.position AS key_seq, c.constraint_name AS pk_name, k.VALIDATED as VALIDATED " +
+                                    "FROM all_cons_columns c, all_constraints k " +
+                                    "LEFT JOIN " + (((OracleDatabase) database).canAccessDbaRecycleBin() ? "dba_recyclebin" : "user_recyclebin") + " d ON d.object_name=k.table_name " +
+                                    "WHERE k.constraint_type = 'P' " +
+                                    "AND d.object_name IS NULL " +
+                                    "AND k.table_name = '" + table + "' " +
+                                    "AND k.owner = '" + ((AbstractJdbcDatabase) database).getJdbcSchemaName(catalogAndSchema) + "' " +
+                                    "AND k.constraint_name = c.constraint_name " +
+                                    "AND k.table_name = c.table_name " +
+                                    "AND k.owner = c.owner " +
+                                    "ORDER BY column_name";
+                            try {
+                                return executeAndExtract(sql, database);
+                            } catch (DatabaseException e) {
+                                throw new SQLException(e);
+                            }
+                        } else if (database instanceof CockroachDatabase) {
+                            // This is the same as the query generated by PGJDBC's getPrimaryKeys method, except it
+                            // also adds an `asc_or_desc` column to the result.
+                            String sql = "SELECT " +
+                                    "  result.table_cat, " +
+                                    "  result.table_schem, " +
+                                    "  result.table_name, " +
+                                    "  result.column_name, " +
+                                    "  result.key_seq, " +
+                                    "  result.pk_name, " +
+                                    "  CASE result.indoption[result.key_seq - 1] & 1 " +
+                                    "    WHEN 1 THEN 'D' " +
+                                    "    ELSE 'A' " +
+                                    "    END AS asc_or_desc " +
+                                    "FROM " +
+                                    "  (" +
+                                    "    SELECT " +
+                                    "      NULL AS table_cat, " +
+                                    "      n.nspname AS table_schem, " +
+                                    "      ct.relname AS table_name, " +
+                                    "      a.attname AS column_name, " +
+                                    "      (information_schema._pg_expandarray(i.indkey)).n " +
+                                    "        AS key_seq, " +
+                                    "      ci.relname AS pk_name, " +
+                                    "      information_schema._pg_expandarray(i.indkey) AS keys, " +
+                                    "      i.indoption, " +
+                                    "      a.attnum AS a_attnum " +
+                                    "    FROM " +
+                                    "      pg_catalog.pg_class AS ct " +
+                                    "      JOIN pg_catalog.pg_attribute AS a ON (ct.oid = a.attrelid) " +
+                                    "      JOIN pg_catalog.pg_namespace AS n ON " +
+                                    "          (ct.relnamespace = n.oid) " +
+                                    "      JOIN pg_catalog.pg_index AS i ON (a.attrelid = i.indrelid) " +
+                                    "      JOIN pg_catalog.pg_class AS ci ON (ci.oid = i.indexrelid) " +
+                                    "    WHERE " +
+                                    "      true " +
+                                    "      AND n.nspname = '" + ((AbstractJdbcDatabase) database).getJdbcSchemaName(catalogAndSchema) + "' " +
+                                    "      AND ct.relname = '" + table + "' " +
+                                    "      AND i.indisprimary" +
+                                    "  ) " +
+                                    "    AS result " +
+                                    "WHERE " +
+                                    "  result.a_attnum = (result.keys).x " +
+                                    "ORDER BY " +
+                                    "  result.table_name, result.pk_name, result.key_seq";
+
+                            try {
+                                return executeAndExtract(sql, database);
+                            } catch (DatabaseException e) {
+                                throw new SQLException(e);
+                            }
+                        } else {
+                            return extract(
+                                    databaseMetaData.getPrimaryKeys(
+                                            ((AbstractJdbcDatabase) database).getJdbcCatalogName(catalogAndSchema),
+                                            ((AbstractJdbcDatabase) database).getJdbcSchemaName(catalogAndSchema),
+                                            table
+                                    )
+                            );
+                        }
+                    }
+                    return pkInfo;
+                }
+
+                private String mssqlSql(CatalogAndSchema catalogAndSchema, String tableName) throws DatabaseException {
+                    String sql;
+                    sql =
+                            "SELECT " +
+                                    "DB_NAME() AS [TABLE_CAT], " +
+                                    "[s].[name] AS [TABLE_SCHEM], " +
+                                    "[t].[name] AS [TABLE_NAME], " +
+                                    "[c].[name] AS [COLUMN_NAME], " +
+                                    "CASE [ic].[is_descending_key] WHEN 0 THEN N'A' WHEN 1 THEN N'D' END AS [ASC_OR_DESC], " +
+                                    "[ic].[key_ordinal] AS [KEY_SEQ], " +
+                                    "[kc].[name] AS [PK_NAME] " +
+                                    "FROM [sys].[schemas] AS [s] " +
+                                    "INNER JOIN [sys].[tables] AS [t] " +
+                                    "ON [t].[schema_id] = [s].[schema_id] " +
+                                    "INNER JOIN [sys].[key_constraints] AS [kc] " +
+                                    "ON [kc].[parent_object_id] = [t].[object_id] " +
+                                    "INNER JOIN [sys].[indexes] AS [i] " +
+                                    "ON [i].[object_id] = [kc].[parent_object_id] " +
+                                    "AND [i].[index_id] = [kc].[unique_index_id] " +
+                                    "INNER JOIN [sys].[index_columns] AS [ic] " +
+                                    "ON [ic].[object_id] = [i].[object_id] " +
+                                    "AND [ic].[index_id] = [i].[index_id] " +
+                                    "INNER JOIN [sys].[columns] AS [c] " +
+                                    "ON [c].[object_id] = [ic].[object_id] " +
+                                    "AND [c].[column_id] = [ic].[column_id] " +
+                                    "WHERE [s].[name] = N'" + database.escapeStringForDatabase(catalogAndSchema.getSchemaName()) + "' " + // The schema name was corrected in the customized CatalogAndSchema
+                                    (tableName == null ? "" : "AND [t].[name] = N'" + database.escapeStringForDatabase(database.correctObjectName(tableName, Table.class)) + "' ") +
+                                    "AND [kc].[type] = 'PK' " +
+                                    "AND [ic].[key_ordinal] > 0 " +
+                                    "ORDER BY " +
+                                    "[ic].[key_ordinal]";
+                    return sql;
+                }
+
+                @Override
+                public List<CachedRow> bulkFetchQuery() throws SQLException {
+                    if (database instanceof OracleDatabase) {
+                        CatalogAndSchema catalogAndSchema = new CatalogAndSchema(catalogName, schemaName).customize(database);
+
+                        warnAboutDbaRecycleBin();
+                        try {
+                            String sql = "SELECT NULL AS table_cat, c.owner AS table_schem, c.table_name, c.column_name, c.position AS key_seq,c.constraint_name AS pk_name, k.VALIDATED as VALIDATED FROM " +
+                                    "all_cons_columns c, " +
+                                    "all_constraints k " +
+                                    "LEFT JOIN " + (((OracleDatabase) database).canAccessDbaRecycleBin() ? "dba_recyclebin" : "user_recyclebin") + " d ON d.object_name=k.table_name " +
+                                    "WHERE k.constraint_type = 'P' " +
+                                    "AND d.object_name IS NULL ";
+                            if (getAllCatalogsStringScratchData() == null) {
+                                sql += "AND k.owner='" + catalogAndSchema.getCatalogName() + "' ";
+                            } else {
+                                sql += "AND k.owner IN ('" + catalogAndSchema.getCatalogName() + "', " + getAllCatalogsStringScratchData() + ")";
+                            }
+                            sql += "AND k.constraint_name = c.constraint_name " +
+                                    "AND k.table_name = c.table_name " +
+                                    "AND k.owner = c.owner " +
+                                    "ORDER BY column_name";
+                            return executeAndExtract(sql, database);
+                        } catch (DatabaseException e) {
+                            throw new SQLException(e);
+                        }
+                    } else if (database instanceof MSSQLDatabase) {
+                        CatalogAndSchema catalogAndSchema = new CatalogAndSchema(catalogName, schemaName).customize(database);
+                        try {
+                            return executeAndExtract(mssqlSql(catalogAndSchema, null), database);
+                        } catch (DatabaseException e) {
+                            throw new SQLException(e);
+                        }
+                    }
+                    return null;
+                }
+
+                @Override
+                protected boolean shouldBulkSelect(String schemaKey, ResultSetCache resultSetCache) {
+                    if ((database instanceof OracleDatabase) || (database instanceof MSSQLDatabase)) {
+                        return table == null || getAllCatalogsStringScratchData() != null || super.shouldBulkSelect(schemaKey, resultSetCache);
+                    } else {
+                        return false;
+                    }
+                }
+            });
+        }
+
+        public List<CachedRow> getUniqueConstraints(final String catalogName, final String schemaName, final String tableName) throws DatabaseException {
+            return getResultSetCache("getUniqueConstraints").get(new ResultSetCache.SingleResultSetExtractor(database) {
+
+                @Override
+                protected boolean shouldBulkSelect(String schemaKey, ResultSetCache resultSetCache) {
+                    return tableName == null || getAllCatalogsStringScratchData() != null || super.shouldBulkSelect(schemaKey, resultSetCache);
+                }
+
+                @Override
+                public boolean bulkContainsSchema(String schemaKey) {
+                    return database instanceof OracleDatabase;
+                }
+
+                @Override
+                public String getSchemaKey(CachedRow row) {
+                    return row.getString("CONSTRAINT_SCHEM");
+                }
+
+                @Override
+                public ResultSetCache.RowData rowKeyParameters(CachedRow row) {
+                    return new ResultSetCache.RowData(catalogName, schemaName, database, row.getString("TABLE_NAME"));
+                }
+
+                @Override
+                public ResultSetCache.RowData wantedKeyParameters() {
+                    return new ResultSetCache.RowData(catalogName, schemaName, database, tableName);
+                }
+
+                @Override
+                public List<CachedRow> fastFetchQuery() throws SQLException, DatabaseException {
+                    CatalogAndSchema catalogAndSchema = new CatalogAndSchema(catalogName, schemaName).customize(database);
+
+                    return queryDb(catalogAndSchema, tableName);
+                }
+
+                @Override
+                public List<CachedRow> bulkFetchQuery() throws SQLException, DatabaseException {
+                    CatalogAndSchema catalogAndSchema = new CatalogAndSchema(catalogName, schemaName).customize(database);
+
+                    return queryDb(catalogAndSchema, null);
+                }
+
+                private List<CachedRow> queryDb(CatalogAndSchema catalogAndSchema, String tableName) throws SQLException, DatabaseException {
+
+                    String jdbcCatalogName = catalogAndSchema.getCatalogName();
+                    String jdbcSchemaName = catalogAndSchema.getSchemaName();
+
+                    Database database = getDatabase();
+                    List<String> parameters = new ArrayList<>(3);
+                    String sql = null;
+                    if (database instanceof Ingres9Database) {
+                        sql = "select CONSTRAINT_NAME, TABLE_NAME from iiconstraints where schema_name ='"
+                                + schemaName + "' and constraint_type='U'";
+                        if (tableName != null) {
+                            sql += " and table_name='" + tableName + "'";
+                        }
+                    } else if ((database instanceof MySQLDatabase) || (database instanceof HsqlDatabase) || (database
+                            instanceof MariaDBDatabase)) {
+                        sql = "select CONSTRAINT_NAME, TABLE_NAME "
+                                + "from " + database.getSystemSchema() + ".table_constraints "
+                                + "where constraint_schema='" + jdbcCatalogName + "' "
+                                + "and constraint_type='UNIQUE'";
+                        if (tableName != null) {
+                            sql += " and table_name='" + tableName + "'";
+                        }
+                    } else if (database instanceof PostgresDatabase) {
+                        sql = "select CONSTRAINT_NAME, TABLE_NAME "
+                                + "from " + database.getSystemSchema() + ".table_constraints "
+                                + "where constraint_catalog='" + jdbcCatalogName + "' "
+                                + "and constraint_schema='" + jdbcSchemaName + "' "
+                                + "and constraint_type='UNIQUE'";
+                        if (tableName != null) {
+                            sql += " and table_name='" + tableName + "'";
+                        }
+                    } else if (database.getClass().getName().contains("MaxDB")) { //have to check classname as this is currently an extension
+                        sql = "select distinct tablename AS TABLE_NAME, constraintname AS CONSTRAINT_NAME from CONSTRAINTCOLUMNS WHERE CONSTRAINTTYPE = 'UNIQUE_CONST'";
+                        if (tableName != null) {
+                            sql += " and tablename='" + tableName + "'";
+                        }
+                    } else if (database instanceof MSSQLDatabase) {
+                        sql =
+                                "SELECT " +
+                                        "[TC].[CONSTRAINT_NAME], " +
+                                        "[TC].[TABLE_NAME], " +
+                                        "[TC].[CONSTRAINT_CATALOG] AS INDEX_CATALOG, " +
+                                        "[TC].[CONSTRAINT_SCHEMA] AS INDEX_SCHEMA, " +
+                                        "[IDX].[TYPE_DESC], " +
+                                        "[IDX].[name] AS INDEX_NAME " +
+                                        "FROM [INFORMATION_SCHEMA].[TABLE_CONSTRAINTS] AS [TC] " +
+                                        "JOIN sys.indexes AS IDX ON IDX.name=[TC].[CONSTRAINT_NAME] AND object_schema_name(object_id)=[TC].[CONSTRAINT_SCHEMA] " +
+                                        "WHERE [TC].[CONSTRAINT_TYPE] = 'UNIQUE' " +
+                                        "AND [TC].[CONSTRAINT_CATALOG] = N'" + database.escapeStringForDatabase(jdbcCatalogName) + "' " +
+                                        "AND [TC].[CONSTRAINT_SCHEMA] = N'" + database.escapeStringForDatabase(jdbcSchemaName) + "'";
+                        if (tableName != null) {
+                            sql += " AND [TC].[TABLE_NAME] = N'" + database.escapeStringForDatabase(database.correctObjectName(tableName, Table.class)) + "'";
+                        }
+                    } else if (database instanceof OracleDatabase) {
+                        warnAboutDbaRecycleBin();
+
+                        sql = "select uc.owner AS CONSTRAINT_SCHEM, uc.constraint_name, uc.table_name,uc.status,uc.deferrable,uc.deferred,ui.tablespace_name, ui.index_name, ui.owner as INDEX_CATALOG, uc.VALIDATED as VALIDATED, ac.COLUMN_NAME as COLUMN_NAME " +
+                                "from all_constraints uc " +
+                                "join all_indexes ui on uc.index_name = ui.index_name and uc.owner=ui.table_owner and uc.table_name=ui.table_name " +
+                                "LEFT OUTER JOIN " + (((OracleDatabase) database).canAccessDbaRecycleBin() ? "dba_recyclebin" : "user_recyclebin") + " d ON d.object_name=ui.table_name " +
+                                "LEFT JOIN all_cons_columns ac ON ac.OWNER = uc.OWNER AND ac.TABLE_NAME = uc.TABLE_NAME AND ac.CONSTRAINT_NAME = uc.CONSTRAINT_NAME " +
+                                "where uc.constraint_type='U' ";
+                        if (tableName != null || getAllCatalogsStringScratchData() == null) {
+                            sql += "and uc.owner = '" + jdbcSchemaName + "'";
+                        } else {
+                            sql += "and uc.owner IN ('" + jdbcSchemaName + "', " + getAllCatalogsStringScratchData() + ")";
+                        }
+                        sql += "AND d.object_name IS NULL ";
+
+                        if (tableName != null) {
+                            sql += " and uc.table_name = '" + tableName + "'";
+                        }
+                    } else if (database instanceof DB2Database) {
+                        // if we are on DB2 AS400 iSeries
+                        if (database.getDatabaseProductName().startsWith("DB2 UDB for AS/400")) {
+                            sql = "select constraint_name as constraint_name, table_name as table_name from QSYS2.TABLE_CONSTRAINTS where table_schema='" + jdbcSchemaName + "' and constraint_type='UNIQUE'";
+                            if (tableName != null) {
+                                sql += " and table_name = '" + tableName + "'";
+                            }
+                            // DB2 z/OS
+                        }
+                        // here we are on DB2 UDB
+                        else {
+                            sql = "select distinct k.constname as constraint_name, t.tabname as TABLE_NAME "
+                                    + "from syscat.keycoluse k "
+                                    + "inner join syscat.tabconst t "
+                                    + "on k.constname = t.constname "
+                                    + "where t.tabschema = ? "
+                                    + "and t.type = 'U'";
+                            parameters.add(jdbcSchemaName);
+                            if (tableName != null) {
+                                sql += " and t.tabname = ?";
+                                parameters.add(tableName);
+                            }
+                        }
+                    } else if (database instanceof Db2zDatabase) {
+                        sql = "select k.constname as constraint_name, t.tbname as TABLE_NAME"
+                                + " from SYSIBM.SYSKEYCOLUSE k"
+                                + " inner join SYSIBM.SYSTABCONST t"
+                                + " on k.constname = t.constname"
+                                + " and k.TBCREATOR = t.TBCREATOR"
+                                + " and k.TBNAME = t.TBNAME"
+                                + " where t.TBCREATOR = ?"
+                                + " and t.TYPE = 'U'";
+                        parameters.add(jdbcSchemaName);
+                        if (tableName != null) {
+                            sql += " and t.TBNAME = ?";
+                            parameters.add(tableName);
+                        }
+                    } else if (database instanceof FirebirdDatabase) {
+                        sql = "SELECT TRIM(RDB$INDICES.RDB$INDEX_NAME) AS CONSTRAINT_NAME, " +
+                                "TRIM(RDB$INDICES.RDB$RELATION_NAME) AS TABLE_NAME " +
+                                "FROM RDB$INDICES "
+                                + "LEFT JOIN RDB$RELATION_CONSTRAINTS "
+                                + "ON RDB$RELATION_CONSTRAINTS.RDB$INDEX_NAME = RDB$INDICES.RDB$INDEX_NAME "
+                                + "WHERE RDB$INDICES.RDB$UNIQUE_FLAG IS NOT NULL "
+                                + "AND ("
+                                + "RDB$RELATION_CONSTRAINTS.RDB$CONSTRAINT_TYPE IS NULL "
+                                + "OR TRIM(RDB$RELATION_CONSTRAINTS.RDB$CONSTRAINT_TYPE)='UNIQUE') "
+                                + "AND NOT(RDB$INDICES.RDB$INDEX_NAME LIKE 'RDB$%')";
+                        if (tableName != null) {
+                            sql += " AND TRIM(RDB$INDICES.RDB$RELATION_NAME)='" + tableName + "'";
+                        }
+                    } else if (database instanceof DerbyDatabase) {
+                        sql = "select c.constraintname as CONSTRAINT_NAME, tablename AS TABLE_NAME "
+                                + "from sys.systables t, sys.sysconstraints c, sys.sysschemas s "
+                                + "where s.schemaname='" + jdbcCatalogName + "' "
+                                + "and t.tableid = c.tableid "
+                                + "and t.schemaid=s.schemaid "
+                                + "and c.type = 'U'";
+                        if (tableName != null) {
+                            sql += " AND t.tablename = '" + tableName + "'";
+                        }
+                    } else if (database instanceof InformixDatabase) {
+                        sql = "select unique sysindexes.idxname as CONSTRAINT_NAME, sysindexes.idxtype, systables.tabname as TABLE_NAME "
+                                + "from sysindexes, systables "
+                                + "left outer join sysconstraints on sysconstraints.tabid = systables.tabid and sysconstraints.constrtype = 'P' "
+                                + "where sysindexes.tabid = systables.tabid and sysindexes.idxtype = 'U' "
+                                + "and sysconstraints.idxname != sysindexes.idxname "
+                                + "and sysconstraints.tabid = sysindexes.tabid";
+                        if (tableName != null) {
+                            sql += " and systables.tabname = '" + database.correctObjectName(tableName, Table.class) + "'";
+                        }
+                    } else if (database instanceof SybaseDatabase) {
+                        sql = "select idx.name as CONSTRAINT_NAME, tbl.name as TABLE_NAME "
+                                + "from sysindexes idx "
+                                + "inner join sysobjects tbl on tbl.id = idx.id "
+                                + "where idx.indid between 1 and 254 "
+                                + "and (idx.status & 2) = 2 "
+                                + "and tbl.type = 'U'";
+                        if (tableName != null) {
+                            sql += " and tbl.name = '" + database.correctObjectName(tableName, Table.class) + "'";
+                        }
+                    } else if (database instanceof SybaseASADatabase) {
+                        sql = "select sysconstraint.constraint_name, sysconstraint.constraint_type, systable.table_name " +
+                                "from sysconstraint, systable " +
+                                "where sysconstraint.table_object_id = systable.object_id " +
+                                "and sysconstraint.constraint_type = 'U'";
+                        if (tableName != null) {
+                            sql += " and systable.table_name = '" + tableName + "'";
+                        }
+                    } else {
+                        if (database instanceof H2Database) {
+                            try {
+                                if (database.getDatabaseMajorVersion() >= 2) {
+                                    sql = "select CONSTRAINT_NAME, CONSTRAINT_TYPE, TABLE_NAME "
+                                            + "from " + database.getSystemSchema() + ".table_constraints "
+                                            + "where constraint_schema='" + jdbcSchemaName + "' "
+                                            + "and constraint_catalog='" + jdbcCatalogName + "' "
+                                            + "and constraint_type='UNIQUE'";
+                                    if (tableName != null) {
+                                        sql += " and table_name='" + tableName + "'";
+                                    }
+                                }
+                            } catch (DatabaseException e) {
+                                Scope.getCurrentScope().getLog(getClass()).fine("Cannot determine h2 version, using default unique constraint query");
+                            }
+                        }
+                        if (sql == null) {
+
+                            sql = "select CONSTRAINT_NAME, CONSTRAINT_TYPE, TABLE_NAME "
+                                    + "from " + database.getSystemSchema() + ".constraints "
+                                    + "where constraint_schema='" + jdbcSchemaName + "' "
+                                    + "and constraint_catalog='" + jdbcCatalogName + "' "
+                                    + "and constraint_type='UNIQUE'";
+                            if (tableName != null) {
+                                sql += " and table_name='" + tableName + "'";
+                            }
+                        }
+                    }
+
+                    return executeAndExtract(database, database instanceof InformixDatabase, sql, parameters.toArray());
+                }
+            });
+        }
+    }
+
+    private String getAllCatalogsStringScratchData() {
+        return (String) getScratchData(ALL_CATALOGS_STRING_SCRATCH_KEY);
+    }
+
+    private String escapeForLike(String string, Database database) {
+        if (string == null) {
+            return null;
+        }
+
+        // 修改 0827
+        if (database instanceof SQLiteDatabase
+                || FlowUtil.isDM(database.getConnection().getURL())) {
+            //sqlite jdbc's queries does not support escaped patterns.
+            return string;
+        }
+
+        return string
+                .replace("%", "\\%")
+                .replace("_", "\\_");
+    }
+}

BIN
jnpf-workflow-flowable/target/classes/jnpf/workflow/flowable/cmd/JumpCmd.class


BIN
jnpf-workflow-flowable/target/classes/jnpf/workflow/flowable/service/DefinitionServiceImpl.class


BIN
jnpf-workflow-flowable/target/classes/jnpf/workflow/flowable/service/InstanceServiceImpl.class


BIN
jnpf-workflow-flowable/target/classes/jnpf/workflow/flowable/service/TaskServiceImpl.class


BIN
jnpf-workflow-flowable/target/classes/jnpf/workflow/flowable/util/FlowableUtil.class


BIN
jnpf-workflow-flowable/target/classes/liquibase/snapshot/JdbcDatabaseSnapshot$CachingDatabaseMetaData$1.class


BIN
jnpf-workflow-flowable/target/classes/liquibase/snapshot/JdbcDatabaseSnapshot$CachingDatabaseMetaData$2.class


BIN
jnpf-workflow-flowable/target/classes/liquibase/snapshot/JdbcDatabaseSnapshot$CachingDatabaseMetaData$3.class


BIN
jnpf-workflow-flowable/target/classes/liquibase/snapshot/JdbcDatabaseSnapshot$CachingDatabaseMetaData$4.class


BIN
jnpf-workflow-flowable/target/classes/liquibase/snapshot/JdbcDatabaseSnapshot$CachingDatabaseMetaData$5.class


BIN
jnpf-workflow-flowable/target/classes/liquibase/snapshot/JdbcDatabaseSnapshot$CachingDatabaseMetaData$ForeignKeysResultSetCache.class


Some files were not shown because too many files changed in this diff