refactor: 初始化dev-v2

This commit is contained in:
fit2cloud-chenyw 2023-10-23 22:00:14 +08:00
parent 910d5b26ab
commit afd05faf63
4962 changed files with 199249 additions and 0 deletions

View File

294
core/core-backend/pom.xml Normal file
View File

@ -0,0 +1,294 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>core</artifactId>
<groupId>io.dataease</groupId>
<version>2.0.0</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<packaging>jar</packaging>
<artifactId>core-backend</artifactId>
<dependencies>
<dependency>
<groupId>io.dataease</groupId>
<artifactId>api-base</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>io.dataease</groupId>
<artifactId>api-permissions</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.mysql</groupId>
<artifactId>mysql-connector-j</artifactId>
</dependency>
<!--calcite核心包-->
<dependency>
<groupId>org.apache.calcite</groupId>
<artifactId>calcite-core</artifactId>
<version>${calcite-core.version}</version>
<classifier>de</classifier>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-dbcp2</artifactId>
<version>${commons-dbcp2.version}</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.antlr/antlr -->
<dependency>
<groupId>org.antlr</groupId>
<artifactId>antlr</artifactId>
<version>${antlr.version}</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>${junit.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.jayway.jsonpath</groupId>
<artifactId>json-path</artifactId>
<version>2.4.0</version>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
</dependency>
<dependency>
<groupId>com.fit2cloud</groupId>
<artifactId>quartz-spring-boot-starter</artifactId>
<version>1.0.8</version>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</exclusion>
</exclusions>
</dependency>
</dependencies>
<profiles>
<!-- 单机版 默认就是这个版本 -->
<profile>
<id>standalone</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<properties>
<profiles.active>standalone</profiles.active>
</properties>
<dependencies>
</dependencies>
<build>
<plugins>
<plugin>
<artifactId>maven-clean-plugin</artifactId>
<configuration>
<filesets>
<fileset>
<directory>src/main/resources/static</directory>
<includes>
<include>**</include>
</includes>
<followSymlinks>false</followSymlinks>
</fileset>
</filesets>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId>
<executions>
<execution>
<id>copy-front-2-back</id>
<phase>generate-resources</phase>
<configuration>
<target>
<move todir="src/main/resources/static">
<fileset dir="../core-frontend/dist">
<include name="**"/>
</fileset>
</move>
</target>
</configuration>
<goals>
<goal>run</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
<resources>
<resource>
<directory>src/main/resources</directory>
<filtering>true</filtering>
<includes>
<include>static/**</include>
</includes>
</resource>
</resources>
</build>
</profile>
<!-- 简单版(桌面版/社区版) 使用substitute包内的替补权限实现 -->
<profile>
<id>desktop</id>
<properties>
<profiles.active>desktop</profiles.active>
</properties>
<dependencies>
<dependency>
<groupId>com.h2database</groupId>
<artifactId>h2</artifactId>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<artifactId>maven-clean-plugin</artifactId>
<configuration>
<filesets>
<fileset>
<directory>src/main/resources/static</directory>
<includes>
<include>**</include>
</includes>
<followSymlinks>false</followSymlinks>
</fileset>
</filesets>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId>
<executions>
<execution>
<id>copy-front-2-back</id>
<phase>generate-resources</phase>
<configuration>
<target>
<move todir="src/main/resources/static">
<fileset dir="../core-frontend/dist">
<include name="**"/>
</fileset>
</move>
</target>
</configuration>
<goals>
<goal>run</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
<resources>
<resource>
<directory>src/main/resources</directory>
<filtering>true</filtering>
<includes>
<include>static/**</include>
</includes>
</resource>
</resources>
</build>
</profile>
<!-- 分布式版(企业版/saas版) -->
<profile>
<id>distributed</id>
<properties>
<profiles.active>distributed</profiles.active>
</properties>
<dependencies>
<!-- 分布式版(企业版/saas版) 引入分布式组件 -->
<dependency>
<groupId>io.dataease</groupId>
<artifactId>distributed</artifactId>
<version>${project.version}</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<excludes>
<!-- 分布式版(企业版/saas版) 需要排除权限替补实现 否则就会出现多个权限实现 报错 -->
<exclude>io/dataease/substitute/**</exclude>
</excludes>
</configuration>
</plugin>
</plugins>
</build>
</profile>
</profiles>
<build>
<!-- 打包时必须要包含resources下配置文件 -->
<resources>
<resource>
<directory>src/main/resources</directory>
<filtering>true</filtering>
<includes>
<include>**/*.properties</include>
<include>**/*.xml</include>
<include>**/*.yml</include>
<include>**/*.sql</include>
<include>**/*.xlsx</include>
</includes>
</resource>
</resources>
<!-- springboot打包插件 -->
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
<!--<configuration>
<mainClass>io.dataease.CoreApplication</mainClass>
</configuration>-->
<executions>
<execution>
<goals>
<goal>repackage</goal>
</goals>
<configuration>
<finalName>CoreApplication</finalName>
<layout>ZIP</layout>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
<repositories>
<repository>
<id>fit2cloud-public</id>
<name>Fit2cloud Public</name>
<url>https://repository.fit2cloud.com/repository/fit2cloud-public/</url>
</repository>
</repositories>
</project>

View File

@ -0,0 +1,17 @@
package io.dataease;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.autoconfigure.quartz.QuartzAutoConfiguration;
import org.springframework.cache.annotation.EnableCaching;
import org.springframework.scheduling.annotation.EnableScheduling;
@SpringBootApplication(exclude = {QuartzAutoConfiguration.class})
@EnableCaching
@EnableScheduling
public class CoreApplication {
public static void main(String[] args) {
SpringApplication.run(CoreApplication.class, args);
}
}

View File

@ -0,0 +1,70 @@
package io.dataease;
import com.baomidou.mybatisplus.generator.FastAutoGenerator;
import com.baomidou.mybatisplus.generator.config.DataSourceConfig;
import com.baomidou.mybatisplus.generator.config.TemplateType;
import com.baomidou.mybatisplus.generator.config.converts.MySqlTypeConvert;
/**
* 研发使用请勿提交
*/
public class MybatisPlusGenerator {
/**
* 为什么不从配置文件读
* 第一 我嫌麻烦
* 第二 后面配置会放到nacos读起来更麻烦了
*/
private static final String url = "jdbc:mysql://39.98.78.97:3306/dataease?autoReconnect=false&useUnicode=true&characterEncoding=UTF-8&characterSetResults=UTF-8&zeroDateTimeBehavior=convertToNull&useSSL=false";
private static final String username = "root";
private static final String password = "Password123@mysql";
/**
* 业务模块例如datasource,dataset,panel等
*/
private static final String busi = "operation";
/**
* 这是要生成代码的表名称
*/
private static final String TABLE_NAME = "core_opt_recent";
/**
* 下面两个配置基本上不用动
*/
private static final String codeDir = "src/main/java/";
private static final String AUTO_DAO = ".dao.auto";
public static void main(String[] args) throws Exception{
String path = System.getProperty("java.class.path");
path = path.substring(0, path.indexOf("target/classes"));
String packageName = packageName() + "." + busi + AUTO_DAO;
String outPath = path + codeDir;
DataSourceConfig.Builder dsc = new DataSourceConfig.Builder(url, username, password);
dsc.typeConvert( MySqlTypeConvert.INSTANCE);
FastAutoGenerator.create(dsc)
.globalConfig(builder -> {
builder.author("fit2cloud").outputDir(outPath);
})
.packageConfig(builder -> {
builder.parent(packageName);
})
.strategyConfig(builder -> {
builder.addInclude(TABLE_NAME).entityBuilder().enableFileOverride().mapperBuilder().mapperAnnotation(org.apache.ibatis.annotations.Mapper.class).enableFileOverride(); //设置需要生成的表名
})
.templateConfig(builder -> {
builder.disable(TemplateType.CONTROLLER).disable(TemplateType.SERVICE).disable(TemplateType.SERVICE_IMPL).disable(TemplateType.XML).build();
})
.execute();
}
private static String packageName() {
return new Object() {
public String getPackageName() {
String packageName = this.getClass().getPackageName();
return packageName;
}
}.getPackageName();
}
}

View File

@ -0,0 +1,23 @@
package io.dataease.chart.constant;
/**
* @Author gin
*/
public class ChartConstants {
public static final String YEAR_MOM = "year_mom";
public static final String MONTH_MOM = "month_mom";
public static final String YEAR_YOY = "year_yoy";
public static final String DAY_MOM = "day_mom";
public static final String MONTH_YOY = "month_yoy";
public static final String[] M_Y = {YEAR_MOM, MONTH_MOM, YEAR_YOY, DAY_MOM, MONTH_YOY};
//视图数据查询模式
public static final class VIEW_RESULT_MODE {
// 所有
public static final String ALL = "all";
// 自定义
public static final String CUSTOM = "custom";
}
}

View File

@ -0,0 +1,536 @@
package io.dataease.chart.dao.auto.entity;
import com.baomidou.mybatisplus.annotation.TableName;
import java.io.Serializable;
/**
* <p>
*
* </p>
*
* @author fit2cloud
* @since 2023-08-20
*/
@TableName("core_chart_view")
public class CoreChartView implements Serializable {
private static final long serialVersionUID = 1L;
/**
* ID
*/
private Long id;
/**
* 标题
*/
private String title;
/**
* 场景ID chart_type为private的时候 是仪表板id
*/
private Long sceneId;
/**
* 数据集表ID
*/
private Long tableId;
/**
* 图表类型
*/
private String type;
/**
* 图表渲染方式
*/
private String render;
/**
* 展示结果
*/
private Integer resultCount;
/**
* 展示模式
*/
private String resultMode;
/**
* 横轴field
*/
private String xAxis;
/**
* table-row
*/
private String xAxisExt;
/**
* 纵轴field
*/
private String yAxis;
/**
* 副轴
*/
private String yAxisExt;
/**
* 堆叠项
*/
private String extStack;
/**
* 气泡大小
*/
private String extBubble;
/**
* 动态标签
*/
private String extLabel;
/**
* 动态提示
*/
private String extTooltip;
/**
* 图形属性
*/
private String customAttr;
/**
* 组件样式
*/
private String customStyle;
/**
* 结果过滤
*/
private String customFilter;
/**
* 钻取字段
*/
private String drillFields;
/**
* 高级
*/
private String senior;
/**
* 创建人ID
*/
private String createBy;
/**
* 创建时间
*/
private Long createTime;
/**
* 更新时间
*/
private Long updateTime;
/**
* 缩略图
*/
private String snapshot;
/**
* 样式优先级 panel 仪表板 view 图表
*/
private String stylePriority;
/**
* 图表类型 public 公共 历史可复用的图表private 私有 专属某个仪表板
*/
private String chartType;
/**
* 是否插件
*/
private Boolean isPlugin;
/**
* 数据来源 template 模板数据 dataset 数据集数据
*/
private String dataFrom;
/**
* 图表字段集合
*/
private String viewFields;
/**
* 是否开启刷新
*/
private Boolean refreshViewEnable;
/**
* 刷新时间单位
*/
private String refreshUnit;
/**
* 刷新时间
*/
private Integer refreshTime;
/**
* 是否开启联动
*/
private Boolean linkageActive;
/**
* 是否开启跳转
*/
private Boolean jumpActive;
private Long copyFrom;
private Long copyId;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public Long getSceneId() {
return sceneId;
}
public void setSceneId(Long sceneId) {
this.sceneId = sceneId;
}
public Long getTableId() {
return tableId;
}
public void setTableId(Long tableId) {
this.tableId = tableId;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public String getRender() {
return render;
}
public void setRender(String render) {
this.render = render;
}
public Integer getResultCount() {
return resultCount;
}
public void setResultCount(Integer resultCount) {
this.resultCount = resultCount;
}
public String getResultMode() {
return resultMode;
}
public void setResultMode(String resultMode) {
this.resultMode = resultMode;
}
public String getxAxis() {
return xAxis;
}
public void setxAxis(String xAxis) {
this.xAxis = xAxis;
}
public String getxAxisExt() {
return xAxisExt;
}
public void setxAxisExt(String xAxisExt) {
this.xAxisExt = xAxisExt;
}
public String getyAxis() {
return yAxis;
}
public void setyAxis(String yAxis) {
this.yAxis = yAxis;
}
public String getyAxisExt() {
return yAxisExt;
}
public void setyAxisExt(String yAxisExt) {
this.yAxisExt = yAxisExt;
}
public String getExtStack() {
return extStack;
}
public void setExtStack(String extStack) {
this.extStack = extStack;
}
public String getExtBubble() {
return extBubble;
}
public void setExtBubble(String extBubble) {
this.extBubble = extBubble;
}
public String getExtLabel() {
return extLabel;
}
public void setExtLabel(String extLabel) {
this.extLabel = extLabel;
}
public String getExtTooltip() {
return extTooltip;
}
public void setExtTooltip(String extTooltip) {
this.extTooltip = extTooltip;
}
public String getCustomAttr() {
return customAttr;
}
public void setCustomAttr(String customAttr) {
this.customAttr = customAttr;
}
public String getCustomStyle() {
return customStyle;
}
public void setCustomStyle(String customStyle) {
this.customStyle = customStyle;
}
public String getCustomFilter() {
return customFilter;
}
public void setCustomFilter(String customFilter) {
this.customFilter = customFilter;
}
public String getDrillFields() {
return drillFields;
}
public void setDrillFields(String drillFields) {
this.drillFields = drillFields;
}
public String getSenior() {
return senior;
}
public void setSenior(String senior) {
this.senior = senior;
}
public String getCreateBy() {
return createBy;
}
public void setCreateBy(String createBy) {
this.createBy = createBy;
}
public Long getCreateTime() {
return createTime;
}
public void setCreateTime(Long createTime) {
this.createTime = createTime;
}
public Long getUpdateTime() {
return updateTime;
}
public void setUpdateTime(Long updateTime) {
this.updateTime = updateTime;
}
public String getSnapshot() {
return snapshot;
}
public void setSnapshot(String snapshot) {
this.snapshot = snapshot;
}
public String getStylePriority() {
return stylePriority;
}
public void setStylePriority(String stylePriority) {
this.stylePriority = stylePriority;
}
public String getChartType() {
return chartType;
}
public void setChartType(String chartType) {
this.chartType = chartType;
}
public Boolean getIsPlugin() {
return isPlugin;
}
public void setIsPlugin(Boolean isPlugin) {
this.isPlugin = isPlugin;
}
public String getDataFrom() {
return dataFrom;
}
public void setDataFrom(String dataFrom) {
this.dataFrom = dataFrom;
}
public String getViewFields() {
return viewFields;
}
public void setViewFields(String viewFields) {
this.viewFields = viewFields;
}
public Boolean getRefreshViewEnable() {
return refreshViewEnable;
}
public void setRefreshViewEnable(Boolean refreshViewEnable) {
this.refreshViewEnable = refreshViewEnable;
}
public String getRefreshUnit() {
return refreshUnit;
}
public void setRefreshUnit(String refreshUnit) {
this.refreshUnit = refreshUnit;
}
public Integer getRefreshTime() {
return refreshTime;
}
public void setRefreshTime(Integer refreshTime) {
this.refreshTime = refreshTime;
}
public Boolean getLinkageActive() {
return linkageActive;
}
public void setLinkageActive(Boolean linkageActive) {
this.linkageActive = linkageActive;
}
public Boolean getJumpActive() {
return jumpActive;
}
public void setJumpActive(Boolean jumpActive) {
this.jumpActive = jumpActive;
}
public Long getCopyFrom() {
return copyFrom;
}
public void setCopyFrom(Long copyFrom) {
this.copyFrom = copyFrom;
}
public Long getCopyId() {
return copyId;
}
public void setCopyId(Long copyId) {
this.copyId = copyId;
}
@Override
public String toString() {
return "CoreChartView{" +
"id = " + id +
", title = " + title +
", sceneId = " + sceneId +
", tableId = " + tableId +
", type = " + type +
", render = " + render +
", resultCount = " + resultCount +
", resultMode = " + resultMode +
", xAxis = " + xAxis +
", xAxisExt = " + xAxisExt +
", yAxis = " + yAxis +
", yAxisExt = " + yAxisExt +
", extStack = " + extStack +
", extBubble = " + extBubble +
", extLabel = " + extLabel +
", extTooltip = " + extTooltip +
", customAttr = " + customAttr +
", customStyle = " + customStyle +
", customFilter = " + customFilter +
", drillFields = " + drillFields +
", senior = " + senior +
", createBy = " + createBy +
", createTime = " + createTime +
", updateTime = " + updateTime +
", snapshot = " + snapshot +
", stylePriority = " + stylePriority +
", chartType = " + chartType +
", isPlugin = " + isPlugin +
", dataFrom = " + dataFrom +
", viewFields = " + viewFields +
", refreshViewEnable = " + refreshViewEnable +
", refreshUnit = " + refreshUnit +
", refreshTime = " + refreshTime +
", linkageActive = " + linkageActive +
", jumpActive = " + jumpActive +
", copyFrom = " + copyFrom +
", copyId = " + copyId +
"}";
}
}

View File

@ -0,0 +1,18 @@
package io.dataease.chart.dao.auto.mapper;
import io.dataease.chart.dao.auto.entity.CoreChartView;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import org.apache.ibatis.annotations.Mapper;
/**
* <p>
* Mapper 接口
* </p>
*
* @author fit2cloud
* @since 2023-08-20
*/
@Mapper
public interface CoreChartViewMapper extends BaseMapper<CoreChartView> {
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,275 @@
package io.dataease.chart.manage;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.dataease.api.chart.dto.*;
import io.dataease.api.dataset.union.model.SQLObj;
import io.dataease.chart.dao.auto.entity.CoreChartView;
import io.dataease.chart.dao.auto.mapper.CoreChartViewMapper;
import io.dataease.dataset.dao.auto.entity.CoreDatasetTableField;
import io.dataease.dataset.dao.auto.mapper.CoreDatasetTableFieldMapper;
import io.dataease.dataset.manage.PermissionManage;
import io.dataease.dto.dataset.DatasetTableFieldDTO;
import io.dataease.engine.constant.ExtFieldConstant;
import io.dataease.engine.func.FunctionConstant;
import io.dataease.engine.utils.Utils;
import io.dataease.exception.DEException;
import io.dataease.i18n.Translator;
import io.dataease.utils.BeanUtils;
import io.dataease.utils.JsonUtil;
import jakarta.annotation.Resource;
import org.apache.commons.lang3.ObjectUtils;
import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;
import java.util.*;
import java.util.stream.Collectors;
/**
* @Author Junjun
*/
@Component
public class ChartViewManege {
@Resource
private CoreChartViewMapper coreChartViewMapper;
@Resource
private ChartDataManage chartDataManage;
@Resource
private CoreDatasetTableFieldMapper coreDatasetTableFieldMapper;
@Resource
private PermissionManage permissionManage;
private ObjectMapper objectMapper = new ObjectMapper();
@Transactional
public ChartViewDTO save(ChartViewDTO chartViewDTO) throws Exception {
if (chartViewDTO.getTitle().length() > 100) {
DEException.throwException(Translator.get("i18n_name_limit_100"));
}
Long id = chartViewDTO.getId();
if (id == null) {
DEException.throwException(Translator.get("i18n_no_id"));
}
CoreChartView coreChartView = coreChartViewMapper.selectById(id);
CoreChartView record = transDTO2Record(chartViewDTO);
if (ObjectUtils.isEmpty(coreChartView)) {
coreChartViewMapper.insert(record);
} else {
coreChartViewMapper.updateById(record);
}
return chartViewDTO;
}
public void delete(Long id) {
coreChartViewMapper.deleteById(id);
}
@Transactional
public void deleteBySceneId(Long sceneId, List<Long> chartIds) {
QueryWrapper<CoreChartView> wrapper = new QueryWrapper<>();
wrapper.eq("scene_id", sceneId);
wrapper.notIn("id", chartIds);
coreChartViewMapper.delete(wrapper);
}
public ChartViewDTO getDetails(Long id) {
CoreChartView coreChartView = coreChartViewMapper.selectById(id);
if (ObjectUtils.isEmpty(coreChartView)) {
return null;
}
ChartViewDTO dto = transRecord2DTO(coreChartView);
return dto;
}
/**
* sceneId 为仪表板或者数据大屏id
*/
public List<ChartViewDTO> listBySceneId(Long sceneId) {
QueryWrapper<CoreChartView> wrapper = new QueryWrapper<>();
wrapper.eq("scene_id", sceneId);
return transChart(coreChartViewMapper.selectList(wrapper));
}
public List<ChartViewDTO> transChart(List<CoreChartView> list) {
if (ObjectUtils.isEmpty(list)) {
return Collections.emptyList();
}
return list.stream().map(ele -> {
ChartViewDTO dto = transRecord2DTO(ele);
return dto;
}).collect(Collectors.toList());
}
public ChartViewDTO getChart(Long id) throws Exception {
ChartViewDTO details = getDetails(id);
if (details == null) {
return null;
}
return chartDataManage.calcData(details);
}
public Map<String, List<ChartViewFieldDTO>> listByDQ(Long id, Long chartId) {
QueryWrapper<CoreDatasetTableField> wrapper = new QueryWrapper<>();
wrapper.eq("dataset_group_id", id);
wrapper.eq("checked", true);
List<CoreDatasetTableField> fields = coreDatasetTableFieldMapper.selectList(wrapper);
List<DatasetTableFieldDTO> collect = fields.stream().map(ele -> {
DatasetTableFieldDTO dto = new DatasetTableFieldDTO();
BeanUtils.copyBean(dto, ele);
return dto;
}).collect(Collectors.toList());
// filter column disable field
Map<String, ColumnPermissionItem> desensitizationList = new HashMap<>();
List<DatasetTableFieldDTO> datasetTableFieldDTOS = permissionManage.filterColumnPermissions(collect, desensitizationList, id, null);
datasetTableFieldDTOS.add(createCountField(id));
List<ChartViewFieldDTO> list = transFieldDTO(datasetTableFieldDTOS);
// 获取视图计算字段
wrapper.clear();
wrapper.eq("chart_id", chartId);
List<DatasetTableFieldDTO> chartFields = coreDatasetTableFieldMapper.selectList(wrapper).stream().map(ele -> {
DatasetTableFieldDTO dto = new DatasetTableFieldDTO();
BeanUtils.copyBean(dto, ele);
return dto;
}).collect(Collectors.toList());
list.addAll(transFieldDTO(chartFields));
// 获取list中的聚合函数将字段的summary设置成空
SQLObj tableObj = new SQLObj();
tableObj.setTableAlias("");
for (ChartViewFieldDTO ele : list) {
if (Objects.equals(ele.getExtField(), ExtFieldConstant.EXT_CALC)) {
String originField = Utils.calcFieldRegex(ele.getOriginName(), tableObj, list.stream().peek(e -> {
DatasetTableFieldDTO dto = new DatasetTableFieldDTO();
BeanUtils.copyBean(dto, e);
}).collect(Collectors.toList()));
for (String func : FunctionConstant.AGG_FUNC) {
if (Utils.matchFunction(func, originField)) {
ele.setSummary("");
break;
}
}
}
}
List<ChartViewFieldDTO> dimensionList = list.stream().filter(ele -> StringUtils.equalsIgnoreCase(ele.getGroupType(), "d")).collect(Collectors.toList());
List<ChartViewFieldDTO> quotaList = list.stream().filter(ele -> StringUtils.equalsIgnoreCase(ele.getGroupType(), "q")).collect(Collectors.toList());
Map<String, List<ChartViewFieldDTO>> map = new LinkedHashMap<>();
map.put("dimensionList", dimensionList);
map.put("quotaList", quotaList);
return map;
}
public DatasetTableFieldDTO createCountField(Long id) {
DatasetTableFieldDTO dto = new DatasetTableFieldDTO();
dto.setId(-1L);
dto.setDatasetGroupId(id);
dto.setOriginName("*");
dto.setName("记录数*");
dto.setDataeaseName("*");
dto.setType("INT");
dto.setChecked(true);
dto.setColumnIndex(999);
dto.setDeType(2);
dto.setExtField(1);
dto.setGroupType("q");
return dto;
}
public List<ChartViewFieldDTO> transFieldDTO(List<DatasetTableFieldDTO> list) {
return list.stream().map(ele -> {
ChartViewFieldDTO dto = new ChartViewFieldDTO();
if (ele == null) return null;
BeanUtils.copyBean(dto, ele);
dto.setDateStyle("y_M_d");
dto.setDatePattern("date_sub");
dto.setChartType("bar");
if (dto.getId() == -1L || dto.getDeType() == 0 || dto.getDeType() == 1) {
dto.setSummary("count");
} else {
dto.setSummary("sum");
}
ChartFieldCompareDTO chartFieldCompareDTO = new ChartFieldCompareDTO();
chartFieldCompareDTO.setType("none");
dto.setCompareCalc(chartFieldCompareDTO);
dto.setFormatterCfg(new FormatterCfgDTO());
dto.setSort("none");
dto.setFilter(Collections.emptyList());
return dto;
}).collect(Collectors.toList());
}
public CoreChartView transDTO2Record(ChartViewDTO dto) throws Exception {
CoreChartView record = new CoreChartView();
BeanUtils.copyBean(record, dto);
record.setxAxis(objectMapper.writeValueAsString(dto.getXAxis()));
record.setxAxisExt(objectMapper.writeValueAsString(dto.getXAxisExt()));
record.setyAxis(objectMapper.writeValueAsString(dto.getYAxis()));
record.setyAxisExt(objectMapper.writeValueAsString(dto.getYAxisExt()));
record.setExtStack(objectMapper.writeValueAsString(dto.getExtStack()));
record.setExtBubble(objectMapper.writeValueAsString(dto.getExtBubble()));
record.setExtLabel(objectMapper.writeValueAsString(dto.getExtLabel()));
record.setExtTooltip(objectMapper.writeValueAsString(dto.getExtTooltip()));
record.setCustomAttr(objectMapper.writeValueAsString(dto.getCustomAttr()));
record.setCustomStyle(objectMapper.writeValueAsString(dto.getCustomStyle()));
record.setSenior(objectMapper.writeValueAsString(dto.getSenior()));
record.setDrillFields(objectMapper.writeValueAsString(dto.getDrillFields()));
record.setCustomFilter(objectMapper.writeValueAsString(dto.getCustomFilter()));
record.setViewFields(objectMapper.writeValueAsString(dto.getViewFields()));
return record;
}
public ChartViewDTO transRecord2DTO(CoreChartView record) {
ChartViewDTO dto = new ChartViewDTO();
BeanUtils.copyBean(dto, record);
TypeReference<List<ChartViewFieldDTO>> tokenType = new TypeReference<>() {
};
TypeReference<List<ChartFieldCustomFilterDTO>> filterTokenType = new TypeReference<>() {
};
dto.setXAxis(JsonUtil.parseList(record.getxAxis(), tokenType));
dto.setXAxisExt(JsonUtil.parseList(record.getxAxisExt(), tokenType));
dto.setYAxis(JsonUtil.parseList(record.getyAxis(), tokenType));
dto.setYAxisExt(JsonUtil.parseList(record.getyAxisExt(), tokenType));
dto.setExtStack(JsonUtil.parseList(record.getExtStack(), tokenType));
dto.setExtBubble(JsonUtil.parseList(record.getExtBubble(), tokenType));
dto.setExtLabel(JsonUtil.parseList(record.getExtLabel(), tokenType));
dto.setExtTooltip(JsonUtil.parseList(record.getExtTooltip(), tokenType));
dto.setCustomAttr(JsonUtil.parse(record.getCustomAttr(), Map.class));
dto.setCustomStyle(JsonUtil.parse(record.getCustomStyle(), Map.class));
dto.setSenior(JsonUtil.parse(record.getSenior(), Map.class));
dto.setDrillFields(JsonUtil.parseList(record.getDrillFields(), tokenType));
dto.setCustomFilter(JsonUtil.parseList(record.getCustomFilter(), filterTokenType));
dto.setViewFields(JsonUtil.parseList(record.getViewFields(), tokenType));
return dto;
}
public String checkSameDataSet(String viewIdSource, String viewIdTarget) {
QueryWrapper<CoreChartView> wrapper = new QueryWrapper<>();
wrapper.select("distinct table_id");
wrapper.in("id", Arrays.asList(viewIdSource, viewIdTarget));
coreChartViewMapper.selectCount(wrapper);
if (coreChartViewMapper.selectCount(wrapper) == 1) {
return "YES";
} else {
return "NO";
}
}
}

View File

@ -0,0 +1,222 @@
package io.dataease.chart.server;
import cn.hutool.core.util.ArrayUtil;
import io.dataease.api.chart.ChartDataApi;
import io.dataease.api.chart.dto.ChartViewDTO;
import io.dataease.api.chart.dto.ViewDetailField;
import io.dataease.api.chart.request.ChartExcelRequest;
import io.dataease.chart.manage.ChartDataManage;
import io.dataease.engine.constant.DeTypeConstants;
import io.dataease.exception.DEException;
import io.dataease.result.ResultCode;
import io.dataease.utils.LogUtil;
import jakarta.annotation.Resource;
import jakarta.servlet.http.HttpServletResponse;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.ObjectUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.poi.hssf.usermodel.HSSFClientAnchor;
import org.apache.poi.hssf.usermodel.HSSFWorkbook;
import org.apache.poi.ss.usermodel.*;
import org.apache.poi.ss.util.CellRangeAddress;
import org.apache.poi.xssf.streaming.SXSSFWorkbook;
import org.springframework.util.Base64Utils;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.io.IOException;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
/**
* @Author Junjun
*/
@RestController
@RequestMapping("chartData")
public class ChartDataServer implements ChartDataApi {
@Resource
private ChartDataManage chartDataManage;
@Override
public ChartViewDTO getData(ChartViewDTO chartViewDTO) throws Exception {
try {
return chartDataManage.calcData(chartViewDTO);
} catch (Exception e) {
DEException.throwException(ResultCode.DATA_IS_WRONG.code(), e.getMessage());
}
return null;
}
public void findExcelData(ChartExcelRequest request) {
try {
ChartViewDTO viewDTO = request.getViewInfo();
viewDTO.setIsExcelExport(true);
List<String> excelHeaderKeys = request.getExcelHeaderKeys();
ChartViewDTO chartViewInfo = getData(viewDTO);
List<Map> tableRow = (List) chartViewInfo.getData().get("tableRow");
List<Object[]> result = new ArrayList<>();
for (Map detailMap : tableRow) {
List<Object> detailObj = new ArrayList<>();
for (String key : excelHeaderKeys) {
detailObj.add(detailMap.get(key));
}
result.add(detailObj.toArray());
}
request.setDetails(result);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
@Override
public void innerExportDetails(ChartExcelRequest request, HttpServletResponse response) throws Exception {
OutputStream outputStream = response.getOutputStream();
try {
findExcelData(request);
List<Object[]> details = request.getDetails();
Integer[] excelTypes = request.getExcelTypes();
details.add(0, request.getHeader());
Workbook wb = new SXSSFWorkbook();
//明细sheet
Sheet detailsSheet = wb.createSheet("数据");
//给单元格设置样式
CellStyle cellStyle = wb.createCellStyle();
Font font = wb.createFont();
//设置字体大小
font.setFontHeightInPoints((short) 12);
//设置字体加粗
font.setBold(true);
//给字体设置样式
cellStyle.setFont(font);
//设置单元格背景颜色
cellStyle.setFillForegroundColor(IndexedColors.GREY_25_PERCENT.getIndex());
//设置单元格填充样式(使用纯色背景颜色填充)
cellStyle.setFillPattern(FillPatternType.SOLID_FOREGROUND);
Boolean mergeHead = false;
ViewDetailField[] detailFields = request.getDetailFields();
if (ArrayUtil.isNotEmpty(detailFields)) {
cellStyle.setBorderTop(BorderStyle.THIN);
cellStyle.setBorderRight(BorderStyle.THIN);
cellStyle.setBorderBottom(BorderStyle.THIN);
cellStyle.setBorderLeft(BorderStyle.THIN);
String[] detailField = Arrays.stream(detailFields).map(field -> field.getName()).collect(Collectors.toList()).toArray(new String[detailFields.length]);
Object[] header = request.getHeader();
Row row = detailsSheet.createRow(0);
int headLen = header.length;
int detailFieldLen = detailField.length;
for (int i = 0; i < headLen; i++) {
Cell cell = row.createCell(i);
cell.setCellValue(header[i].toString());
if (i < headLen - 1) {
CellRangeAddress cellRangeAddress = new CellRangeAddress(0, 1, i, i);
detailsSheet.addMergedRegion(cellRangeAddress);
} else {
for (int j = i + 1; j < detailFieldLen + i; j++) {
row.createCell(j).setCellStyle(cellStyle);
}
CellRangeAddress cellRangeAddress = new CellRangeAddress(0, 0, i, i + detailFieldLen - 1);
detailsSheet.addMergedRegion(cellRangeAddress);
}
cell.setCellStyle(cellStyle);
detailsSheet.setColumnWidth(i, 255 * 20);
}
Row detailRow = detailsSheet.createRow(1);
for (int i = 0; i < headLen - 1; i++) {
Cell cell = detailRow.createCell(i);
cell.setCellStyle(cellStyle);
}
for (int i = 0; i < detailFieldLen; i++) {
int colIndex = headLen - 1 + i;
Cell cell = detailRow.createCell(colIndex);
cell.setCellValue(detailField[i]);
cell.setCellStyle(cellStyle);
detailsSheet.setColumnWidth(colIndex, 255 * 20);
}
details.add(1, detailField);
mergeHead = true;
}
if (CollectionUtils.isNotEmpty(details) && (!mergeHead || details.size() > 2)) {
int realDetailRowIndex = 2;
for (int i = (mergeHead ? 2 : 0); i < details.size(); i++) {
Row row = detailsSheet.createRow(realDetailRowIndex > 2 ? realDetailRowIndex : i);
Object[] rowData = details.get(i);
if (rowData != null) {
for (int j = 0; j < rowData.length; j++) {
Object cellValObj = rowData[j];
if (mergeHead && j == rowData.length - 1 && (cellValObj.getClass().isArray() || cellValObj instanceof ArrayList)) {
Object[] detailRowArray = ((List<Object>) cellValObj).toArray(new Object[((List<?>) cellValObj).size()]);
int detailRowArrayLen = detailRowArray.length;
int temlJ = j;
while (detailRowArrayLen > 1 && temlJ-- > 0) {
CellRangeAddress cellRangeAddress = new CellRangeAddress(realDetailRowIndex, realDetailRowIndex + detailRowArrayLen - 1, temlJ, temlJ);
detailsSheet.addMergedRegion(cellRangeAddress);
}
for (int k = 0; k < detailRowArrayLen; k++) {
List<Object> detailRows = (List<Object>) detailRowArray[k];
Row curRow = row;
if (k > 0) {
curRow = detailsSheet.createRow(realDetailRowIndex + k);
}
for (int l = 0; l < detailRows.size(); l++) {
Object col = detailRows.get(l);
Cell cell = curRow.createCell(j + l);
cell.setCellValue(col.toString());
}
}
realDetailRowIndex += detailRowArrayLen;
break;
}
Cell cell = row.createCell(j);
if (i == 0) {// 头部
cell.setCellValue(cellValObj.toString());
cell.setCellStyle(cellStyle);
//设置列的宽度
detailsSheet.setColumnWidth(j, 255 * 20);
} else if (cellValObj != null) {
try {
// with DataType
if ((excelTypes[j].equals(DeTypeConstants.DE_INT) || excelTypes[j] .equals(DeTypeConstants.DE_FLOAT) ) && StringUtils.isNotEmpty(cellValObj.toString())) {
cell.setCellValue(Double.valueOf(cellValObj.toString()));
} else {
cell.setCellValue(cellValObj.toString());
}
} catch (Exception e) {
LogUtil.warn("export excel data transform error");
}
}
}
}
}
}
response.setContentType("application/vnd.ms-excel");
//文件名称
response.setHeader("Content-disposition", "attachment;filename=" + request.getViewName() + ".xlsx");
wb.write(outputStream);
outputStream.flush();
outputStream.close();
} catch (Exception e) {
DEException.throwException(e);
}
}
@Override
public List<String> getFieldData(ChartViewDTO view, Long fieldId, String fieldType) throws Exception {
return chartDataManage.getFieldData(view, fieldId, fieldType);
}
}

View File

@ -0,0 +1,56 @@
package io.dataease.chart.server;
import io.dataease.api.chart.ChartViewApi;
import io.dataease.api.chart.dto.ChartViewDTO;
import io.dataease.api.chart.dto.ChartViewFieldDTO;
import io.dataease.chart.manage.ChartViewManege;
import io.dataease.exception.DEException;
import io.dataease.result.ResultCode;
import jakarta.annotation.Resource;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.List;
import java.util.Map;
/**
* @Author Junjun
*/
@RestController
@RequestMapping("chart")
public class ChartViewServer implements ChartViewApi {
@Resource
private ChartViewManege chartViewManege;
@Override
public ChartViewDTO getData(Long id) throws Exception {
try {
return chartViewManege.getChart(id);
} catch (Exception e) {
DEException.throwException(ResultCode.DATA_IS_WRONG.code(), e.getMessage());
}
return null;
}
@Override
public Map<String, List<ChartViewFieldDTO>> listByDQ(Long id, Long chartId) {
return chartViewManege.listByDQ(id, chartId);
}
@Override
public ChartViewDTO save(ChartViewDTO dto) throws Exception {
return chartViewManege.save(dto);
}
@Override
public String checkSameDataSet(String viewIdSource, String viewIdTarget) {
return chartViewManege.checkSameDataSet(viewIdSource, viewIdTarget);
}
@Override
public ChartViewDTO getDetail(Long id) {
return chartViewManege.getDetails(id);
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,11 @@
package io.dataease.commons;
import java.util.UUID;
public class UUIDUtils {
public static String getUUID() {
return UUID.randomUUID().toString();
}
}

View File

@ -0,0 +1,37 @@
package io.dataease.commons.constants;
public class DataVisualizationConstants {
//删除标志
public static final class DELETE_FLAG {
//已删除
public static final boolean DELETED = true;
//未删除可用
public static final boolean AVAILABLE = false;
}
//节点类型
public static final class NODE_TYPE {
//目录
public static final String FOLDER = "folder";
//资源节点
public static final String LEAF = "leaf";
}
//操作
public static final class RESOURCE_OPT_TYPE {
//新建资源节点
public static final String NEW_LEAF = "newLeaf";
//新建文件夹
public static final String NEW_FOLDER = "newFolder";
//移动
public static final String MOVE = "move";
//重命名
public static final String RENAME = "rename";
//复制
public static final String COPY = "copy";
}
}

View File

@ -0,0 +1,31 @@
package io.dataease.commons.constants;
/**
* @author : WangJiaHao
* @date : 2023/10/8 09:37
*/
public class OptConstants {
public static final class OPT_TYPE {
//新建
public static final int NEW = 1;
//新建
public static final int UPDATE = 2;
//删除
public static final int DELETE = 3;
}
public static final class OPT_RESOURCE_TYPE {
//可视化资源
public static final int VISUALIZATION = 1;
//仪表板
public static final int DASHBOARD = 2;
//数据大屏
public static final int DATA_VISUALIZATION = 3;
//数据集
public static final int DATASET = 4;
//数据源
public static final int DATASOURCE = 5;
}
}

View File

@ -0,0 +1,12 @@
package io.dataease.commons.constants;
public enum TaskStatus {
WaitingForExecution, // 等待执行
Stopped, // 停止
Suspend, // 暂停
UnderExecution, // 执行中
Completed, //完成
Error, //错误
Warning //警告
}

View File

@ -0,0 +1,210 @@
package io.dataease.commons.utils;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.lang3.StringUtils;
import javax.crypto.*;
import javax.crypto.spec.IvParameterSpec;
import javax.crypto.spec.SecretKeySpec;
import java.security.MessageDigest;
import java.util.UUID;
/**
* 加密解密工具
*
* @author kun.mo
*/
public class CodingUtil {
private static final String UTF_8 = "UTF-8";
private static final char[] HEX_DIGITS = {'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f'};
public static String[] chars = new String[] { "a", "b", "c", "d", "e", "f",
"g", "h", "i", "j", "k", "l", "m", "n", "o", "p", "q", "r", "s",
"t", "u", "v", "w", "x", "y", "z", "0", "1", "2", "3", "4", "5",
"6", "7", "8", "9", "A", "B", "C", "D", "E", "F", "G", "H", "I",
"J", "K", "L", "M", "N", "O", "P", "Q", "R", "S", "T", "U", "V",
"W", "X", "Y", "Z" };
/**
* MD5加密
*
* @param src 要加密的串
* @return 加密后的字符串
*/
public static String md5(String src) {
return md5(src, UTF_8);
}
/**
* MD5加密
*
* @param src 要加密的串
* @param charset 加密字符集
* @return 加密后的字符串
*/
public static String md5(String src, String charset) {
try {
byte[] strTemp = StringUtils.isEmpty(charset) ? src.getBytes() : src.getBytes(charset);
MessageDigest mdTemp = MessageDigest.getInstance("MD5");
mdTemp.update(strTemp);
byte[] md = mdTemp.digest();
int j = md.length;
char[] str = new char[j * 2];
int k = 0;
for (byte byte0 : md) {
str[k++] = HEX_DIGITS[byte0 >>> 4 & 0xf];
str[k++] = HEX_DIGITS[byte0 & 0xf];
}
return new String(str);
} catch (Exception e) {
throw new RuntimeException("MD5 encrypt error:", e);
}
}
/**
* BASE64解密
*
* @param src 待解密的字符串
* @return 解密后的字符串
*/
public static String base64Decoding(String src) {
byte[] b;
String result = null;
if (src != null) {
try {
b = Base64.decodeBase64(src);
result = new String(b, UTF_8);
} catch (Exception e) {
throw new RuntimeException("BASE64 decoding error:", e);
}
}
return result;
}
/**
* BASE64加密
*
* @param src 待加密的字符串
* @return 加密后的字符串
*/
public static String base64Encoding(String src) {
String result = null;
if (src != null) {
try {
result = Base64.encodeBase64String(src.getBytes(UTF_8));
} catch (Exception e) {
throw new RuntimeException("BASE64 encoding error:", e);
}
}
return result;
}
/**
* AES加密
*
* @param src 待加密字符串
* @param secretKey 密钥
* @param iv 向量
* @return 加密后字符串
*/
public static String aesEncrypt(String src, String secretKey, String iv) {
if (StringUtils.isBlank(secretKey)) {
throw new RuntimeException("secretKey is empty");
}
try {
byte[] raw = secretKey.getBytes(UTF_8);
SecretKeySpec secretKeySpec = new SecretKeySpec(raw, "AES");
// "算法/模式/补码方式" ECB
Cipher cipher = Cipher.getInstance("AES/CBC/PKCS5Padding");
IvParameterSpec iv1 = new IvParameterSpec(iv.getBytes());
cipher.init(Cipher.ENCRYPT_MODE, secretKeySpec, iv1);
byte[] encrypted = cipher.doFinal(src.getBytes(UTF_8));
return Base64.encodeBase64String(encrypted);
} catch (Exception e) {
throw new RuntimeException("AES encrypt error:", e);
}
}
/**
* AES 解密
*
* @param src 待解密字符串
* @param secretKey 密钥
* @param iv 向量
* @return 解密后字符串
*/
public static String aesDecrypt(String src, String secretKey, String iv) {
if (StringUtils.isBlank(secretKey)) {
throw new RuntimeException("secretKey is empty");
}
try {
byte[] raw = secretKey.getBytes(UTF_8);
SecretKeySpec secretKeySpec = new SecretKeySpec(raw, "AES");
Cipher cipher = Cipher.getInstance("AES/CBC/PKCS5Padding");
IvParameterSpec iv1 = new IvParameterSpec(iv.getBytes());
cipher.init(Cipher.DECRYPT_MODE, secretKeySpec, iv1);
byte[] encrypted1 = Base64.decodeBase64(src);
byte[] original = cipher.doFinal(encrypted1);
return new String(original, UTF_8);
} catch (BadPaddingException | IllegalBlockSizeException e) {
// 解密的原字符串为非加密字符串则直接返回原字符串
return src;
} catch (Exception e) {
throw new RuntimeException("decrypt errorplease check parameters", e);
}
}
public static String secretKey() {
try {
KeyGenerator keyGen = KeyGenerator.getInstance("AES");
keyGen.init(128);
SecretKey secretKey = keyGen.generateKey();
return Base64.encodeBase64String(secretKey.getEncoded());
} catch (Exception e) {
throw new RuntimeException("generate secretKey error", e);
}
}
public static boolean isNumeric(String str){
for (int i = str.length();--i>=0;){
if (!Character.isDigit(str.charAt(i))){
return false;
}
}
return true;
}
public static String shortUuid() {
StringBuffer shortBuffer = new StringBuffer();
String uuid = UUID.randomUUID().toString().replace("-", "");
for (int i = 0; i < 8; i++) {
String str = uuid.substring(i * 4, i * 4 + 4);
int x = Integer.parseInt(str, 16);
shortBuffer.append(chars[x % 0x3E]);
}
return shortBuffer.toString();
}
public static Integer string2Integer(String str) {
StringBuffer sb = new StringBuffer();
if (StringUtils.isBlank(str)) return null;
for (int i = 0; i < str.length(); i++) {
char c = str.charAt(i);
if (Character.isDigit(c)) {
sb.append(c);
}
}
return sb.length() > 0 ? Integer.parseInt(sb.toString()) : null;
}
}

View File

@ -0,0 +1,101 @@
package io.dataease.commons.utils;
import io.dataease.utils.LogUtil;
import jakarta.annotation.PostConstruct;
import jakarta.annotation.PreDestroy;
import java.util.concurrent.*;
/**
* @Author gin
* @Date 2021/4/13 4:08 下午
*/
public class CommonThreadPool {
private int corePoolSize = 10;
private int maxQueueSize = 10;
private int keepAliveSeconds = 600;
private ScheduledThreadPoolExecutor scheduledThreadPoolExecutor;
@PostConstruct
public void init() {
scheduledThreadPoolExecutor = new ScheduledThreadPoolExecutor(corePoolSize);
scheduledThreadPoolExecutor.setKeepAliveTime(keepAliveSeconds, TimeUnit.SECONDS);
}
@PreDestroy
public void shutdown() {
if (scheduledThreadPoolExecutor != null) {
scheduledThreadPoolExecutor.shutdown();
}
}
/**
* 线程池是否可用(实际队列数是否小于最大队列数)
*
* @return true为可用false不可用
*/
public boolean available() {
return scheduledThreadPoolExecutor.getQueue().size() <= maxQueueSize;
}
/**
* 添加任务不强制限制队列数
*
* @param task 任务
*/
public void addTask(Runnable task) {
scheduledThreadPoolExecutor.execute(task);
}
/**
* 添加延迟执行任务不强制限制队列数
*
* @param task 任务
* @param delay 延迟时间
* @param unit 延迟时间单位
*/
public void scheduleTask(Runnable task, long delay, TimeUnit unit) {
scheduledThreadPoolExecutor.schedule(task, delay, unit);
}
/**
* 添加任务和超时时间超时时间内未执行完的任务将被终止并移除线程池防止任务执行时间过长而占用线程池
*
* @param task 任务
* @param timeOut 超时时间
* @param timeUnit 超时时间单位
*/
public void addTask(Runnable task, long timeOut, TimeUnit timeUnit) {
scheduledThreadPoolExecutor.execute(() -> {
ExecutorService executorService = Executors.newSingleThreadExecutor();
try {
Future future = executorService.submit(task);
future.get(timeOut, timeUnit); // 此行会阻塞直到任务执行完或超时
} catch (TimeoutException timeoutException) {
LogUtil.getLogger().error("timeout to execute task", timeoutException);
} catch (Exception exception) {
LogUtil.getLogger().error("failed to execute task", exception);
} finally {
if (!executorService.isShutdown()) {
executorService.shutdown();
}
}
});
}
public void setCorePoolSize(int corePoolSize) {
this.corePoolSize = corePoolSize;
}
public void setMaxQueueSize(int maxQueueSize) {
this.maxQueueSize = maxQueueSize;
}
public void setKeepAliveSeconds(int keepAliveSeconds) {
this.keepAliveSeconds = keepAliveSeconds;
}
}

View File

@ -0,0 +1,128 @@
package io.dataease.commons.utils;
import io.dataease.task.dao.auto.entity.CoreSysTask;
import org.apache.commons.lang3.ObjectUtils;
import org.quartz.CronExpression;
import org.quartz.CronScheduleBuilder;
import org.quartz.CronTrigger;
import org.quartz.TriggerBuilder;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
/**
* @author song.tianyang
* @Date 2020/12/17 4:06 下午
* @Description CRON解析类
*/
public class CronUtils {
/**
* 解析表达式获取CronTrigger
*
* @param cron
* @return
*/
public static CronTrigger getCronTrigger(String cron) {
if (!CronExpression.isValidExpression(cron)) {
throw new RuntimeException("cron :" + cron + "表达式解析错误");
}
return TriggerBuilder.newTrigger().withIdentity("Calculate Date").withSchedule(CronScheduleBuilder.cronSchedule(cron)).build();
}
/**
* 获取以指定时间为开始时间的下一次执行时间
*
* @param cron
* @param start
* @return
*/
public static Date getNextTriggerTime(String cron, Date start) {
if (start == null) {
return getNextTriggerTime(cron);
} else {
CronTrigger trigger = getCronTrigger(cron);
return trigger.getFireTimeAfter(start);
}
}
/**
* 获取以当前日期为准的下一次执行时间
*
* @param cron
* @return
*/
public static Date getNextTriggerTime(String cron) {
Date date = null;
try {
CronTrigger trigger = getCronTrigger(cron);
Date startDate = trigger.getStartTime();
date = trigger.getFireTimeAfter(startDate);
} catch (Exception e) {
}
return date;
}
public static String cron(CoreSysTask taskEntity) {
if (taskEntity.getRateType() == -1) {
return taskEntity.getRateVal();
}
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
Date date = null;
try {
date = sdf.parse(taskEntity.getRateVal());
} catch (ParseException e) {
e.printStackTrace();
}
Calendar instance = Calendar.getInstance();
instance.setTime(date);
if (taskEntity.getRateType() == 0) {
return instance.get(Calendar.SECOND) + " " +
instance.get(Calendar.MINUTE) + " " +
instance.get(Calendar.HOUR_OF_DAY) + " * * ?";
}
if (taskEntity.getRateType() == 1) {
return instance.get(Calendar.SECOND) + " " +
instance.get(Calendar.MINUTE) + " " +
instance.get(Calendar.HOUR_OF_DAY) + " ? * " +
getDayOfWeek(instance);
}
if (taskEntity.getRateType() == 2) {
return instance.get(Calendar.SECOND) + " " +
instance.get(Calendar.MINUTE) + " " +
instance.get(Calendar.HOUR_OF_DAY) + " " +
instance.get(Calendar.DATE) + " * ?";
}
return null;
}
public static String cron() {
Calendar instance = Calendar.getInstance();
instance.add(Calendar.SECOND, 5);
return instance.get(Calendar.SECOND) + " " +
instance.get(Calendar.MINUTE) + " " +
instance.get(Calendar.HOUR_OF_DAY) + " * * ?";
}
private static String getDayOfWeek(Calendar instance) {
int index = instance.get(Calendar.DAY_OF_WEEK);
index = (index % 7) + 1;
return String.valueOf(index);
}
// 判断任务是否过期
public static Boolean taskExpire(Long endTime) {
if (ObjectUtils.isEmpty(endTime))
return false;
Long now = System.currentTimeMillis();
return now > endTime;
}
}

View File

@ -0,0 +1,44 @@
package io.dataease.commons.utils;
import io.dataease.utils.BeanUtils;
import java.util.List;
import java.util.stream.Collectors;
public class EncryptUtils extends CodingUtil {
private static final String secretKey = "www.fit2cloud.co";
private static final String iv = "1234567890123456";
public static Object aesEncrypt(Object o) {
if (o == null) {
return null;
}
return aesEncrypt(o.toString(), secretKey, iv);
}
public static Object aesDecrypt(Object o) {
if (o == null) {
return null;
}
return aesDecrypt(o.toString(), secretKey, iv);
}
public static <T> Object aesDecrypt(List<T> o, String attrName) {
if (o == null) {
return null;
}
return o.stream()
.filter(element -> BeanUtils.getFieldValueByName(attrName, element) != null)
.peek(element -> BeanUtils.setFieldValueByName(element, attrName, aesDecrypt(BeanUtils.getFieldValueByName(attrName, element).toString(), secretKey, iv), String.class))
.collect(Collectors.toList());
}
public static Object md5Encrypt(Object o) {
if (o == null) {
return null;
}
return md5(o.toString());
}
}

View File

@ -0,0 +1,46 @@
package io.dataease.commons.utils;
import lombok.Getter;
import lombok.Setter;
@Getter
@Setter
public class MybatisInterceptorConfig {
private String modelName;
private String attrName;
private String attrNameForList;
private String interceptorClass;
private String interceptorMethod;
private String undoClass;
private String undoMethod;
public MybatisInterceptorConfig() {
}
/**
* 用时需谨慎
* 主要配置多个的时候参数少一点
*
* @param modelClass
* @param attrName
*/
public MybatisInterceptorConfig(Class<?> modelClass, String attrName) {
this.modelName = modelClass.getName();
this.attrName = attrName;
this.interceptorClass = EncryptUtils.class.getName();
this.interceptorMethod = "aesEncrypt";
this.undoClass = EncryptUtils.class.getName();
this.undoMethod = "aesDecrypt";
}
public MybatisInterceptorConfig(Class<?> modelClass, String attrName, Class<?> interceptorClass, String interceptorMethod, String undoMethod) {
this.modelName = modelClass.getName();
this.attrName = attrName;
this.interceptorClass = interceptorClass.getName();
this.interceptorMethod = interceptorMethod;
this.undoClass = interceptorClass.getName();
this.undoMethod = undoMethod;
}
}

View File

@ -0,0 +1,174 @@
package io.dataease.commons.utils;
import com.fasterxml.jackson.core.type.TypeReference;
import io.dataease.api.dataset.dto.SqlVariableDetails;
import io.dataease.exception.DEException;
import io.dataease.i18n.Translator;
import io.dataease.utils.JsonUtil;
import org.apache.calcite.config.Lex;
import org.apache.calcite.sql.*;
import org.apache.calcite.sql.parser.SqlParseException;
import org.apache.calcite.sql.parser.SqlParser;
import org.apache.calcite.sql.util.SqlShuttle;
import org.apache.commons.lang3.ObjectUtils;
import org.apache.commons.lang3.StringUtils;
import org.checkerframework.checker.nullness.qual.Nullable;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import static org.apache.calcite.sql.SqlKind.*;
public class SqlparserUtils {
public static final String regex = "\\$\\{(.*?)\\}";
private static final String SubstitutedParams = "DATAEASE_PATAMS_BI";
private static final String SubstitutedSql = " 'DE-BI' = 'DE-BI' ";
private static final String SubstitutedSqlVirtualData = " 1 > 2 ";
public static String removeVariables(final String sql) {
String tmpSql = sql;
Pattern pattern = Pattern.compile(regex);
Matcher matcher = pattern.matcher(sql);
boolean hasVariables = false;
while (matcher.find()) {
hasVariables = true;
tmpSql = tmpSql.replace(matcher.group(), SubstitutedParams);
}
if (!hasVariables && !tmpSql.contains(SubstitutedParams)) {
return tmpSql;
}
SqlParser.Config config =
SqlParser.configBuilder()
.setLex(Lex.JAVA)
.setIdentifierMaxLength(256)
.build();
SqlParser sqlParser = SqlParser.create(tmpSql, config);
SqlNode sqlNode;
try {
sqlNode = sqlParser.parseStmt();
} catch (SqlParseException e) {
throw new RuntimeException("使用 Calcite 进行语法分析发生了异常", e);
}
// 递归遍历语法树
getDependencies(sqlNode, false);
return sqlNode.toString();
}
private static void getDependencies(SqlNode sqlNode, Boolean fromOrJoin) {
if (sqlNode.getKind() == JOIN) {
SqlJoin sqlKind = (SqlJoin) sqlNode;
} else if (sqlNode.getKind() == IDENTIFIER) {
} else if (sqlNode.getKind() == AS) {
SqlBasicCall sqlKind = (SqlBasicCall) sqlNode;
} else if (sqlNode.getKind() == SELECT) {
SqlSelect sqlKind = (SqlSelect) sqlNode;
List<SqlNode> list = sqlKind.getSelectList().getList();
for (SqlNode i : list) {
getDependencies(i, false);
}
SqlNode from = sqlKind.getFrom().accept(getSqlShuttle());
sqlKind.setFrom(from);
if (sqlKind.getWhere() != null) {
SqlNode newWhere = sqlKind.getWhere().accept(getSqlShuttle());
sqlKind.setWhere(newWhere);
}
} else {
// TODO 这里可根据需求拓展处理其他类型的 sqlNode
}
}
public static SqlShuttle getSqlShuttle() {
return new SqlShuttle() {
@Override
public @Nullable SqlNode visit(final SqlCall call) {
CallCopyingArgHandler argHandler = new CallCopyingArgHandler(call, false);
call.getOperator().acceptCall(this, call, false, argHandler);
if (argHandler.result().toString().contains(SubstitutedParams)) {
SqlNode sqlNode1 = null;
try {
sqlNode1 = SqlParser.create(SubstitutedSql).parseExpression();
} catch (Exception e) {
}
return sqlNode1;
}
return argHandler.result();
}
};
}
public static String handleVariableDefaultValue(String sql, String sqlVariableDetails, boolean isEdit, boolean isFromDataSet, List<SqlVariableDetails> parameters) {
if (StringUtils.isEmpty(sql)) {
DEException.throwException(Translator.get("i18n_sql_not_empty"));
}
if (sql.trim().endsWith(";")) {
sql = sql.substring(0, sql.length() - 1);
}
if (StringUtils.isNotEmpty(sqlVariableDetails)) {
TypeReference<List<SqlVariableDetails>> listTypeReference = new TypeReference<List<SqlVariableDetails>>() {
};
List<SqlVariableDetails> defaultsSqlVariableDetails = JsonUtil.parseList(sqlVariableDetails, listTypeReference);
Pattern pattern = Pattern.compile(regex);
Matcher matcher = pattern.matcher(sql);
while (matcher.find()) {
SqlVariableDetails defaultsSqlVariableDetail = null;
for (SqlVariableDetails sqlVariableDetail : defaultsSqlVariableDetails) {
if (matcher.group().substring(2, matcher.group().length() - 1).equalsIgnoreCase(sqlVariableDetail.getVariableName())) {
defaultsSqlVariableDetail = sqlVariableDetail;
break;
}
}
SqlVariableDetails filterParameter = null;
if (ObjectUtils.isNotEmpty(parameters)) {
for (SqlVariableDetails parameter : parameters) {
if (parameter.getVariableName().equalsIgnoreCase(defaultsSqlVariableDetail.getVariableName())) {
filterParameter = parameter;
}
}
}
if (filterParameter != null) {
sql = sql.replace(matcher.group(), transFilter(filterParameter));
} else {
if (defaultsSqlVariableDetail != null && StringUtils.isNotEmpty(defaultsSqlVariableDetail.getDefaultValue())) {
if (!isEdit && isFromDataSet && defaultsSqlVariableDetail.getDefaultValueScope().equals(SqlVariableDetails.DefaultValueScope.ALLSCOPE)) {
sql = sql.replace(matcher.group(), defaultsSqlVariableDetail.getDefaultValue());
}
if (isEdit) {
sql = sql.replace(matcher.group(), defaultsSqlVariableDetail.getDefaultValue());
}
}
}
}
}
try {
sql = removeVariables(sql);
} catch (Exception e) {
e.printStackTrace();
}
return sql;
}
private static String transFilter(SqlVariableDetails sqlVariableDetails) {
if (sqlVariableDetails.getOperator().equals("in")) {
return "'" + String.join("','", sqlVariableDetails.getValue()) + "'";
} else if (sqlVariableDetails.getOperator().equals("between")) {
SimpleDateFormat simpleDateFormat = new SimpleDateFormat(sqlVariableDetails.getType().size() > 1 ? (String) sqlVariableDetails.getType().get(1).replace("DD", "dd") : "YYYY");
return simpleDateFormat.format(new Date(Long.parseLong((String) sqlVariableDetails.getValue().get(0))));
} else {
return (String) sqlVariableDetails.getValue().get(0);
}
}
}

View File

@ -0,0 +1,22 @@
package io.dataease.config;
import com.fit2cloud.autoconfigure.QuartzAutoConfiguration;
import io.dataease.commons.utils.CommonThreadPool;
import org.springframework.boot.autoconfigure.AutoConfigureBefore;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@Configuration
@AutoConfigureBefore(QuartzAutoConfiguration.class)
public class CommonConfig {
@Bean(destroyMethod = "shutdown")
public CommonThreadPool resourcePoolThreadPool() {
CommonThreadPool commonThreadPool = new CommonThreadPool();
commonThreadPool.setCorePoolSize(20);
commonThreadPool.setMaxQueueSize(100);
commonThreadPool.setKeepAliveSeconds(3600);
return commonThreadPool;
}
}

View File

@ -0,0 +1,32 @@
package io.dataease.config;
import io.dataease.constant.AuthConstant;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.servlet.config.annotation.ResourceHandlerRegistry;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
import static io.dataease.constant.StaticResourceConstants.*;
import static io.dataease.utils.StaticResourceUtils.ensureBoth;
import static io.dataease.utils.StaticResourceUtils.ensureSuffix;
@Configuration
public class DeMvcConfig implements WebMvcConfigurer {
/**
* Configuring static resource path
*
* @param registry registry
*/
@Override
public void addResourceHandlers(ResourceHandlerRegistry registry) {
String workDir = FILE_PROTOCOL + ensureSuffix(WORK_DIR, FILE_SEPARATOR);
String uploadUrlPattern = ensureBoth(URL_SEPARATOR + UPLOAD_URL_PREFIX, AuthConstant.DE_API_PREFIX, URL_SEPARATOR) + "**";
registry.addResourceHandler(uploadUrlPattern)
.addResourceLocations(workDir);
// map
String mapDir = FILE_PROTOCOL + ensureSuffix(MAP_DIR, FILE_SEPARATOR);
String mapUrlPattern = ensureBoth(MAP_URL, AuthConstant.DE_API_PREFIX, URL_SEPARATOR) + "**";
registry.addResourceHandler(mapUrlPattern)
.addResourceLocations(mapDir);
}
}

View File

@ -0,0 +1,28 @@
package io.dataease.config;
import io.dataease.commons.utils.MybatisInterceptorConfig;
import io.dataease.datasource.dao.auto.entity.CoreDeEngine;
import io.dataease.interceptor.MybatisInterceptor;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.transaction.annotation.EnableTransactionManagement;
import java.util.ArrayList;
import java.util.List;
@Configuration
@EnableTransactionManagement
public class MybatisConfig {
@Bean
@ConditionalOnMissingBean
public MybatisInterceptor dbInterceptor() {
MybatisInterceptor interceptor = new MybatisInterceptor();
List<MybatisInterceptorConfig> configList = new ArrayList<>();
configList.add(new MybatisInterceptorConfig(CoreDeEngine.class, "configuration"));
interceptor.setInterceptorConfigList(configList);
return interceptor;
}
}

View File

@ -0,0 +1,9 @@
package io.dataease.dataset.constant;
/**
* @Author Junjun
*/
public class DatasetTableType {
public static String DB = "db";
public static String SQL = "sql";
}

View File

@ -0,0 +1,231 @@
package io.dataease.dataset.dao.auto.entity;
import com.baomidou.mybatisplus.annotation.TableName;
import java.io.Serializable;
/**
* <p>
*
* </p>
*
* @author fit2cloud
* @since 2023-08-28
*/
@TableName("core_dataset_group")
public class CoreDatasetGroup implements Serializable {
private static final long serialVersionUID = 1L;
/**
* ID
*/
private Long id;
/**
* 名称
*/
private String name;
/**
* 父级ID
*/
private Long pid;
/**
* 当前分组处于第几级
*/
private Integer level;
/**
* node类型folder or dataset
*/
private String nodeType;
/**
* sql,union
*/
private String type;
/**
* 连接模式0-直连1-同步(包括excelapi等数据存在de中的表)
*/
private Integer mode;
/**
* 关联关系树
*/
private String info;
/**
* 创建人ID
*/
private String createBy;
/**
* 创建时间
*/
private Long createTime;
private String qrtzInstance;
/**
* 同步状态
*/
private String syncStatus;
/**
* 更新人ID
*/
private String updateBy;
/**
* 最后同步时间
*/
private Long lastUpdateTime;
/**
* 关联sql
*/
private String unionSql;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Long getPid() {
return pid;
}
public void setPid(Long pid) {
this.pid = pid;
}
public Integer getLevel() {
return level;
}
public void setLevel(Integer level) {
this.level = level;
}
public String getNodeType() {
return nodeType;
}
public void setNodeType(String nodeType) {
this.nodeType = nodeType;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public Integer getMode() {
return mode;
}
public void setMode(Integer mode) {
this.mode = mode;
}
public String getInfo() {
return info;
}
public void setInfo(String info) {
this.info = info;
}
public String getCreateBy() {
return createBy;
}
public void setCreateBy(String createBy) {
this.createBy = createBy;
}
public Long getCreateTime() {
return createTime;
}
public void setCreateTime(Long createTime) {
this.createTime = createTime;
}
public String getQrtzInstance() {
return qrtzInstance;
}
public void setQrtzInstance(String qrtzInstance) {
this.qrtzInstance = qrtzInstance;
}
public String getSyncStatus() {
return syncStatus;
}
public void setSyncStatus(String syncStatus) {
this.syncStatus = syncStatus;
}
public String getUpdateBy() {
return updateBy;
}
public void setUpdateBy(String updateBy) {
this.updateBy = updateBy;
}
public Long getLastUpdateTime() {
return lastUpdateTime;
}
public void setLastUpdateTime(Long lastUpdateTime) {
this.lastUpdateTime = lastUpdateTime;
}
public String getUnionSql() {
return unionSql;
}
public void setUnionSql(String unionSql) {
this.unionSql = unionSql;
}
@Override
public String toString() {
return "CoreDatasetGroup{" +
"id = " + id +
", name = " + name +
", pid = " + pid +
", level = " + level +
", nodeType = " + nodeType +
", type = " + type +
", mode = " + mode +
", info = " + info +
", createBy = " + createBy +
", createTime = " + createTime +
", qrtzInstance = " + qrtzInstance +
", syncStatus = " + syncStatus +
", updateBy = " + updateBy +
", lastUpdateTime = " + lastUpdateTime +
", unionSql = " + unionSql +
"}";
}
}

View File

@ -0,0 +1,136 @@
package io.dataease.dataset.dao.auto.entity;
import com.baomidou.mybatisplus.annotation.TableName;
import java.io.Serializable;
/**
* <p>
*
* </p>
*
* @author fit2cloud
* @since 2023-04-14
*/
@TableName("core_dataset_table")
public class CoreDatasetTable implements Serializable {
private static final long serialVersionUID = 1L;
/**
* ID
*/
private Long id;
/**
* 名称
*/
private String name;
/**
* 物理表名
*/
private String tableName;
/**
* 数据源ID
*/
private Long datasourceId;
/**
* 数据集ID
*/
private Long datasetGroupId;
/**
* db,sql,union,excel,api
*/
private String type;
/**
* 表原始信息,表名,sql等
*/
private String info;
/**
* SQL参数
*/
private String sqlVariableDetails;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getTableName() {
return tableName;
}
public void setTableName(String tableName) {
this.tableName = tableName;
}
public Long getDatasourceId() {
return datasourceId;
}
public void setDatasourceId(Long datasourceId) {
this.datasourceId = datasourceId;
}
public Long getDatasetGroupId() {
return datasetGroupId;
}
public void setDatasetGroupId(Long datasetGroupId) {
this.datasetGroupId = datasetGroupId;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public String getInfo() {
return info;
}
public void setInfo(String info) {
this.info = info;
}
public String getSqlVariableDetails() {
return sqlVariableDetails;
}
public void setSqlVariableDetails(String sqlVariableDetails) {
this.sqlVariableDetails = sqlVariableDetails;
}
@Override
public String toString() {
return "CoreDatasetTable{" +
"id = " + id +
", name = " + name +
", tableName = " + tableName +
", datasourceId = " + datasourceId +
", datasetGroupId = " + datasetGroupId +
", type = " + type +
", info = " + info +
", sqlVariableDetails = " + sqlVariableDetails +
"}";
}
}

View File

@ -0,0 +1,326 @@
package io.dataease.dataset.dao.auto.entity;
import com.baomidou.mybatisplus.annotation.TableName;
import java.io.Serializable;
/**
* <p>
*
* </p>
*
* @author fit2cloud
* @since 2023-07-10
*/
@TableName("core_dataset_table_field")
public class CoreDatasetTableField implements Serializable {
private static final long serialVersionUID = 1L;
/**
* ID
*/
private Long id;
/**
* 数据源ID
*/
private Long datasourceId;
/**
* 数据表ID
*/
private Long datasetTableId;
/**
* 数据集ID
*/
private Long datasetGroupId;
/**
* 视图ID
*/
private Long chartId;
/**
* 原始字段名
*/
private String originName;
/**
* 字段名用于展示
*/
private String name;
/**
* 描述
*/
private String description;
/**
* de字段名用作唯一标识
*/
private String dataeaseName;
/**
* de字段别名
*/
private String fieldShortName;
/**
* 维度/指标标识 d:维度q:指标
*/
private String groupType;
/**
* 原始字段类型
*/
private String type;
private Integer size;
/**
* dataease字段类型0-文本1-时间2-整型数值3-浮点数值4-布尔5-地理位置6-二进制
*/
private Integer deType;
/**
* de记录的原始类型
*/
private Integer deExtractType;
/**
* 是否扩展字段 0原始 1复制 2计算字段...
*/
private Integer extField;
/**
* 是否选中
*/
private Boolean checked;
/**
* 列位置
*/
private Integer columnIndex;
/**
* 同步时间
*/
private Long lastSyncTime;
/**
* 精度
*/
private Integer accuracy;
private String dateFormat;
/**
* 时间格式类型
*/
private String dateFormatType;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public Long getDatasourceId() {
return datasourceId;
}
public void setDatasourceId(Long datasourceId) {
this.datasourceId = datasourceId;
}
public Long getDatasetTableId() {
return datasetTableId;
}
public void setDatasetTableId(Long datasetTableId) {
this.datasetTableId = datasetTableId;
}
public Long getDatasetGroupId() {
return datasetGroupId;
}
public void setDatasetGroupId(Long datasetGroupId) {
this.datasetGroupId = datasetGroupId;
}
public Long getChartId() {
return chartId;
}
public void setChartId(Long chartId) {
this.chartId = chartId;
}
public String getOriginName() {
return originName;
}
public void setOriginName(String originName) {
this.originName = originName;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public String getDataeaseName() {
return dataeaseName;
}
public void setDataeaseName(String dataeaseName) {
this.dataeaseName = dataeaseName;
}
public String getFieldShortName() {
return fieldShortName;
}
public void setFieldShortName(String fieldShortName) {
this.fieldShortName = fieldShortName;
}
public String getGroupType() {
return groupType;
}
public void setGroupType(String groupType) {
this.groupType = groupType;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public Integer getSize() {
return size;
}
public void setSize(Integer size) {
this.size = size;
}
public Integer getDeType() {
return deType;
}
public void setDeType(Integer deType) {
this.deType = deType;
}
public Integer getDeExtractType() {
return deExtractType;
}
public void setDeExtractType(Integer deExtractType) {
this.deExtractType = deExtractType;
}
public Integer getExtField() {
return extField;
}
public void setExtField(Integer extField) {
this.extField = extField;
}
public Boolean getChecked() {
return checked;
}
public void setChecked(Boolean checked) {
this.checked = checked;
}
public Integer getColumnIndex() {
return columnIndex;
}
public void setColumnIndex(Integer columnIndex) {
this.columnIndex = columnIndex;
}
public Long getLastSyncTime() {
return lastSyncTime;
}
public void setLastSyncTime(Long lastSyncTime) {
this.lastSyncTime = lastSyncTime;
}
public Integer getAccuracy() {
return accuracy;
}
public void setAccuracy(Integer accuracy) {
this.accuracy = accuracy;
}
public String getDateFormat() {
return dateFormat;
}
public void setDateFormat(String dateFormat) {
this.dateFormat = dateFormat;
}
public String getDateFormatType() {
return dateFormatType;
}
public void setDateFormatType(String dateFormatType) {
this.dateFormatType = dateFormatType;
}
@Override
public String toString() {
return "CoreDatasetTableField{" +
"id = " + id +
", datasourceId = " + datasourceId +
", datasetTableId = " + datasetTableId +
", datasetGroupId = " + datasetGroupId +
", chartId = " + chartId +
", originName = " + originName +
", name = " + name +
", description = " + description +
", dataeaseName = " + dataeaseName +
", fieldShortName = " + fieldShortName +
", groupType = " + groupType +
", type = " + type +
", size = " + size +
", deType = " + deType +
", deExtractType = " + deExtractType +
", extField = " + extField +
", checked = " + checked +
", columnIndex = " + columnIndex +
", lastSyncTime = " + lastSyncTime +
", accuracy = " + accuracy +
", dateFormat = " + dateFormat +
", dateFormatType = " + dateFormatType +
"}";
}
}

View File

@ -0,0 +1,122 @@
package io.dataease.dataset.dao.auto.entity;
import com.baomidou.mybatisplus.annotation.TableName;
import java.io.Serializable;
/**
* <p>
*
* </p>
*
* @author fit2cloud
* @since 2023-08-08
*/
@TableName("core_dataset_table_sql_log")
public class CoreDatasetTableSqlLog implements Serializable {
private static final long serialVersionUID = 1L;
/**
* ID
*/
private String id;
/**
* 数据集SQL节点ID
*/
private String tableId;
/**
* 开始时间
*/
private Long startTime;
/**
* 结束时间
*/
private Long endTime;
/**
* 耗时(毫秒)
*/
private Long spend;
/**
* 详细信息
*/
private String sql;
/**
* 状态
*/
private String status;
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getTableId() {
return tableId;
}
public void setTableId(String tableId) {
this.tableId = tableId;
}
public Long getStartTime() {
return startTime;
}
public void setStartTime(Long startTime) {
this.startTime = startTime;
}
public Long getEndTime() {
return endTime;
}
public void setEndTime(Long endTime) {
this.endTime = endTime;
}
public Long getSpend() {
return spend;
}
public void setSpend(Long spend) {
this.spend = spend;
}
public String getSql() {
return sql;
}
public void setSql(String sql) {
this.sql = sql;
}
public String getStatus() {
return status;
}
public void setStatus(String status) {
this.status = status;
}
@Override
public String toString() {
return "CoreDatasetTableSqlLog{" +
"id = " + id +
", tableId = " + tableId +
", startTime = " + startTime +
", endTime = " + endTime +
", spend = " + spend +
", sql = " + sql +
", status = " + status +
"}";
}
}

View File

@ -0,0 +1,18 @@
package io.dataease.dataset.dao.auto.mapper;
import io.dataease.dataset.dao.auto.entity.CoreDatasetGroup;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import org.apache.ibatis.annotations.Mapper;
/**
* <p>
* Mapper 接口
* </p>
*
* @author fit2cloud
* @since 2023-08-28
*/
@Mapper
public interface CoreDatasetGroupMapper extends BaseMapper<CoreDatasetGroup> {
}

View File

@ -0,0 +1,18 @@
package io.dataease.dataset.dao.auto.mapper;
import io.dataease.dataset.dao.auto.entity.CoreDatasetTableField;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import org.apache.ibatis.annotations.Mapper;
/**
* <p>
* Mapper 接口
* </p>
*
* @author fit2cloud
* @since 2023-04-20
*/
@Mapper
public interface CoreDatasetTableFieldMapper extends BaseMapper<CoreDatasetTableField> {
}

View File

@ -0,0 +1,18 @@
package io.dataease.dataset.dao.auto.mapper;
import io.dataease.dataset.dao.auto.entity.CoreDatasetTable;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import org.apache.ibatis.annotations.Mapper;
/**
* <p>
* Mapper 接口
* </p>
*
* @author fit2cloud
* @since 2023-04-14
*/
@Mapper
public interface CoreDatasetTableMapper extends BaseMapper<CoreDatasetTable> {
}

View File

@ -0,0 +1,18 @@
package io.dataease.dataset.dao.auto.mapper;
import io.dataease.dataset.dao.auto.entity.CoreDatasetTableSqlLog;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import org.apache.ibatis.annotations.Mapper;
/**
* <p>
* Mapper 接口
* </p>
*
* @author fit2cloud
* @since 2023-08-08
*/
@Mapper
public interface CoreDatasetTableSqlLogMapper extends BaseMapper<CoreDatasetTableSqlLog> {
}

View File

@ -0,0 +1,24 @@
package io.dataease.dataset.dao.ext.mapper;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import io.dataease.api.dataset.vo.DataSetBarVO;
import io.dataease.dataset.dao.ext.po.DataSetNodePO;
import io.dataease.model.BusiNodeRequest;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;
import org.apache.ibatis.annotations.Select;
import java.util.List;
@Mapper
public interface CoreDataSetExtMapper {
@Select("""
select id, name, node_type, pid from core_dataset_group
${ew.customSqlSegment}
""")
List<DataSetNodePO> query(@Param("ew") QueryWrapper queryWrapper);
@Select("select id, name, node_type, create_by, create_time, update_by, last_update_time from core_dataset_group where id = #{id}")
DataSetBarVO queryBarInfo(@Param("id") Long id);
}

View File

@ -0,0 +1,22 @@
package io.dataease.dataset.dao.ext.po;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.io.Serial;
import java.io.Serializable;
@Data
@AllArgsConstructor
@NoArgsConstructor
public class DataSetNodePO implements Serializable {
@Serial
private static final long serialVersionUID = -4457506330575500164L;
private Long id;
private String name;
private String nodeType;
private Long pid;
}

View File

@ -0,0 +1,25 @@
package io.dataease.dataset.dto;
import io.dataease.model.TreeBaseModel;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.io.Serial;
@Data
@AllArgsConstructor
@NoArgsConstructor
public class DataSetNodeBO implements TreeBaseModel {
@Serial
private static final long serialVersionUID = 728340676442387790L;
private Long id;
private String name;
private Boolean leaf;
private Integer weight = 3;
private Long pid;
private Integer extraFlag;
}

View File

@ -0,0 +1,12 @@
package io.dataease.dataset.dto;
import io.dataease.datasource.dao.auto.entity.CoreDatasource;
import lombok.Data;
/**
* @Author Junjun
*/
@Data
public class DatasourceSchemaDTO extends CoreDatasource {
private String schemaAlias;
}

View File

@ -0,0 +1,463 @@
package io.dataease.dataset.manage;
import io.dataease.api.chart.dto.ChartViewDTO;
import io.dataease.api.chart.dto.ColumnPermissionItem;
import io.dataease.api.chart.request.ChartExtRequest;
import io.dataease.api.dataset.dto.DatasetTableDTO;
import io.dataease.api.dataset.dto.PreviewSqlDTO;
import io.dataease.api.dataset.dto.SqlLogDTO;
import io.dataease.api.dataset.union.DatasetGroupInfoDTO;
import io.dataease.api.dataset.union.DatasetTableInfoDTO;
import io.dataease.api.dataset.union.model.SQLMeta;
import io.dataease.api.ds.vo.TableField;
import io.dataease.api.permissions.dataset.dto.DataSetRowPermissionsTreeDTO;
import io.dataease.auth.bo.TokenUserBO;
import io.dataease.chart.manage.ChartViewManege;
import io.dataease.chart.utils.ChartDataBuild;
import io.dataease.commons.utils.SqlparserUtils;
import io.dataease.dataset.constant.DatasetTableType;
import io.dataease.dataset.dto.DatasourceSchemaDTO;
import io.dataease.dataset.utils.FieldUtils;
import io.dataease.dataset.utils.SqlUtils;
import io.dataease.dataset.utils.TableUtils;
import io.dataease.datasource.dao.auto.entity.CoreDatasource;
import io.dataease.datasource.dao.auto.mapper.CoreDatasourceMapper;
import io.dataease.datasource.provider.CalciteProvider;
import io.dataease.datasource.request.DatasourceRequest;
import io.dataease.datasource.server.EngineServer;
import io.dataease.dto.dataset.DatasetTableFieldDTO;
import io.dataease.engine.constant.ExtFieldConstant;
import io.dataease.engine.constant.SQLConstants;
import io.dataease.engine.func.FunctionConstant;
import io.dataease.engine.sql.SQLProvider;
import io.dataease.engine.trans.Field2SQLObj;
import io.dataease.engine.trans.Order2SQLObj;
import io.dataease.engine.trans.Table2SQLObj;
import io.dataease.engine.trans.WhereTree2Str;
import io.dataease.engine.utils.SQLUtils;
import io.dataease.engine.utils.Utils;
import io.dataease.exception.DEException;
import io.dataease.i18n.Translator;
import io.dataease.utils.AuthUtils;
import io.dataease.utils.BeanUtils;
import io.dataease.utils.JsonUtil;
import jakarta.annotation.Resource;
import org.apache.commons.lang3.ObjectUtils;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
import java.util.*;
import java.util.stream.Collectors;
/**
* @Author Junjun
*/
@Component
public class DatasetDataManage {
@Resource
private DatasetSQLManage datasetSQLManage;
@Resource
private CalciteProvider calciteProvider;
@Resource
private CoreDatasourceMapper coreDatasourceMapper;
@Resource
private DatasetTableFieldManage datasetTableFieldManage;
@Resource
private DatasetTableManage datasetTableManage;
@Resource
private EngineServer engineServer;
@Resource
private DatasetGroupManage datasetGroupManage;
@Resource
private PermissionManage permissionManage;
@Resource
private DatasetTableSqlLogManage datasetTableSqlLogManage;
@Resource
private ChartViewManege chartViewManege;
private static Logger logger = LoggerFactory.getLogger(DatasetDataManage.class);
public List<DatasetTableFieldDTO> getTableFields(DatasetTableDTO datasetTableDTO) throws Exception {
List<DatasetTableFieldDTO> list = null;
List<TableField> tableFields = null;
String type = datasetTableDTO.getType();
DatasetTableInfoDTO tableInfoDTO = JsonUtil.parseObject(datasetTableDTO.getInfo(), DatasetTableInfoDTO.class);
if (StringUtils.equalsIgnoreCase(type, DatasetTableType.DB) || StringUtils.equalsIgnoreCase(type, DatasetTableType.SQL)) {
CoreDatasource coreDatasource = coreDatasourceMapper.selectById(datasetTableDTO.getDatasourceId());
DatasourceSchemaDTO datasourceSchemaDTO = new DatasourceSchemaDTO();
if (StringUtils.equalsIgnoreCase("excel", coreDatasource.getType()) || StringUtils.equalsIgnoreCase("api", coreDatasource.getType())) {
coreDatasource = engineServer.getDeEngine();
}
BeanUtils.copyBean(datasourceSchemaDTO, coreDatasource);
datasourceSchemaDTO.setSchemaAlias(String.format(SQLConstants.SCHEMA, datasourceSchemaDTO.getId()));
DatasourceRequest datasourceRequest = new DatasourceRequest();
datasourceRequest.setDsList(Map.of(datasourceSchemaDTO.getId(), datasourceSchemaDTO));
if (StringUtils.equalsIgnoreCase(type, DatasetTableType.DB)) {
// add table schema
datasourceRequest.setQuery(TableUtils.tableName2Sql(datasourceSchemaDTO, tableInfoDTO.getTable()) + " LIMIT 0 OFFSET 0");
} else {
// parser sql params and replace default value
String sql = SqlparserUtils.handleVariableDefaultValue(new String(Base64.getDecoder().decode(tableInfoDTO.getSql())), datasetTableDTO.getSqlVariableDetails(), false, false, null);
// add sql table schema
sql = SqlUtils.addSchema(sql, datasourceSchemaDTO.getSchemaAlias());
sql = SQLUtils.buildOriginPreviewSql(sql, 0, 0);
datasourceRequest.setQuery(sql);
}
logger.info("calcite data table field sql: " + datasourceRequest.getQuery());
// 获取数据源表的原始字段
tableFields = (List<TableField>) calciteProvider.fetchResultField(datasourceRequest).get("fields");
} else {
// excel,api
CoreDatasource coreDatasource = engineServer.getDeEngine();
DatasourceSchemaDTO datasourceSchemaDTO = new DatasourceSchemaDTO();
BeanUtils.copyBean(datasourceSchemaDTO, coreDatasource);
datasourceSchemaDTO.setSchemaAlias(String.format(SQLConstants.SCHEMA, datasourceSchemaDTO.getId()));
DatasourceRequest datasourceRequest = new DatasourceRequest();
datasourceRequest.setDsList(Map.of(datasourceSchemaDTO.getId(), datasourceSchemaDTO));
datasourceRequest.setQuery(TableUtils.tableName2Sql(datasourceSchemaDTO, tableInfoDTO.getTable()) + " LIMIT 0 OFFSET 0");
logger.info("calcite data table field sql: " + datasourceRequest.getQuery());
tableFields = (List<TableField>) calciteProvider.fetchResultField(datasourceRequest).get("fields");
}
return transFields(tableFields, true);
}
public List<DatasetTableFieldDTO> transFields(List<TableField> tableFields, boolean defaultStatus) {
return tableFields.stream().map(ele -> {
DatasetTableFieldDTO dto = new DatasetTableFieldDTO();
dto.setName(StringUtils.isNotEmpty(ele.getName()) ? ele.getName() : ele.getOriginName());
dto.setOriginName(ele.getOriginName());
dto.setChecked(defaultStatus);
dto.setType(ele.getType());
int deType = FieldUtils.transType2DeType(ele.getType());
dto.setDeExtractType(deType);
dto.setDeType(deType);
dto.setGroupType(FieldUtils.transDeType2DQ(deType));
dto.setExtField(0);
return dto;
}).collect(Collectors.toList());
}
public Map<String, Object> previewDataWithLimit(DatasetGroupInfoDTO datasetGroupInfoDTO, Integer start, Integer count, boolean checkPermission) throws Exception {
Map<String, Object> sqlMap = datasetSQLManage.getUnionSQLForEdit(datasetGroupInfoDTO, null);
String sql = (String) sqlMap.get("sql");
// 获取allFields
List<DatasetTableFieldDTO> fields = datasetGroupInfoDTO.getAllFields();
if (ObjectUtils.isEmpty(fields)) {
DEException.throwException(Translator.get("i18n_no_fields"));
}
Map<String, ColumnPermissionItem> desensitizationList = new HashMap<>();
if (checkPermission) {
fields = permissionManage.filterColumnPermissions(fields, desensitizationList, datasetGroupInfoDTO.getId(), null);
if (ObjectUtils.isEmpty(fields)) {
DEException.throwException(Translator.get("i18n_no_column_permission"));
}
}
buildFieldName(sqlMap, fields);
Map<Long, DatasourceSchemaDTO> dsMap = (Map<Long, DatasourceSchemaDTO>) sqlMap.get("dsMap");
List<String> dsList = new ArrayList<>();
for (Map.Entry<Long, DatasourceSchemaDTO> next : dsMap.entrySet()) {
dsList.add(next.getValue().getType());
}
boolean needOrder = Utils.isNeedOrder(dsList);
List<DataSetRowPermissionsTreeDTO> rowPermissionsTree = new ArrayList<>();
TokenUserBO user = AuthUtils.getUser();
if (user != null && checkPermission) {
rowPermissionsTree = permissionManage.getRowPermissionsTree(datasetGroupInfoDTO.getId(), user.getUserId());
}
// build query sql
SQLMeta sqlMeta = new SQLMeta();
Table2SQLObj.table2sqlobj(sqlMeta, null, "(" + sql + ")");
Field2SQLObj.field2sqlObj(sqlMeta, fields);
WhereTree2Str.transFilterTrees(sqlMeta, rowPermissionsTree, fields);
Order2SQLObj.getOrders(sqlMeta, fields, datasetGroupInfoDTO.getSortFields());
String querySQL;
if (start == null || count == null) {
querySQL = SQLProvider.createQuerySQL(sqlMeta, false, false, needOrder);
} else {
querySQL = SQLProvider.createQuerySQLWithLimit(sqlMeta, false, needOrder, false, start, count);
}
logger.info("calcite data preview sql: " + querySQL);
// 通过数据源请求数据
// 调用数据源的calcite获得data
DatasourceRequest datasourceRequest = new DatasourceRequest();
datasourceRequest.setQuery(querySQL);
datasourceRequest.setDsList(dsMap);
Map<String, Object> data = calciteProvider.fetchResultField(datasourceRequest);
Map<String, Object> map = new LinkedHashMap<>();
// 重新构造data
Map<String, Object> previewData = buildPreviewData(data, fields, desensitizationList);
map.put("data", previewData);
if (ObjectUtils.isEmpty(datasetGroupInfoDTO.getId())) {
map.put("allFields", fields);
} else {
List<DatasetTableFieldDTO> fieldList = datasetTableFieldManage.selectByDatasetGroupId(datasetGroupInfoDTO.getId());
map.put("allFields", fieldList);
}
map.put("sql", Base64.getEncoder().encodeToString(querySQL.getBytes()));
map.put("total", getDatasetTotal(datasetGroupInfoDTO));
return map;
}
public Long getDatasetTotal(DatasetGroupInfoDTO datasetGroupInfoDTO) throws Exception {
Map<String, Object> sqlMap = datasetSQLManage.getUnionSQLForEdit(datasetGroupInfoDTO, null);
String sql = (String) sqlMap.get("sql");
Map<Long, DatasourceSchemaDTO> dsMap = (Map<Long, DatasourceSchemaDTO>) sqlMap.get("dsMap");
List<String> dsList = new ArrayList<>();
for (Map.Entry<Long, DatasourceSchemaDTO> next : dsMap.entrySet()) {
dsList.add(next.getValue().getType());
}
String querySQL = "SELECT COUNT(*) FROM (" + sql + ") t_a_0";
logger.info("calcite data count sql: " + querySQL);
// 通过数据源请求数据
// 调用数据源的calcite获得data
DatasourceRequest datasourceRequest = new DatasourceRequest();
datasourceRequest.setQuery(querySQL);
datasourceRequest.setDsList(dsMap);
Map<String, Object> data = calciteProvider.fetchResultField(datasourceRequest);
List<String[]> dataList = (List<String[]>) data.get("data");
if (ObjectUtils.isNotEmpty(dataList) && ObjectUtils.isNotEmpty(dataList.get(0)) && ObjectUtils.isNotEmpty(dataList.get(0)[0])) {
return Long.valueOf(dataList.get(0)[0]);
}
return 0L;
}
public Map<String, Object> previewSqlWithLog(PreviewSqlDTO dto) {
if (dto == null) {
return null;
}
SqlLogDTO sqlLogDTO = new SqlLogDTO();
String sql = new String(Base64.getDecoder().decode(dto.getSql()));
sqlLogDTO.setSql(sql);
Map<String, Object> map = null;
try {
sqlLogDTO.setStartTime(System.currentTimeMillis());
map = previewSql(dto);
sqlLogDTO.setEndTime(System.currentTimeMillis());
sqlLogDTO.setSpend(sqlLogDTO.getEndTime() - sqlLogDTO.getStartTime());
sqlLogDTO.setStatus("Completed");
} catch (Exception e) {
sqlLogDTO.setStatus("Error");
DEException.throwException(e.getMessage());
} finally {
if (ObjectUtils.isNotEmpty(dto.getTableId())) {
sqlLogDTO.setTableId(dto.getTableId());
datasetTableSqlLogManage.save(sqlLogDTO);
}
}
return map;
}
public Map<String, Object> previewSql(PreviewSqlDTO dto) {
CoreDatasource coreDatasource = coreDatasourceMapper.selectById(dto.getDatasourceId());
DatasourceSchemaDTO datasourceSchemaDTO = new DatasourceSchemaDTO();
if (coreDatasource.getType().equalsIgnoreCase("API") || coreDatasource.getType().equalsIgnoreCase("Excel")) {
BeanUtils.copyBean(datasourceSchemaDTO, engineServer.getDeEngine());
} else {
BeanUtils.copyBean(datasourceSchemaDTO, coreDatasource);
}
String alias = String.format(SQLConstants.SCHEMA, datasourceSchemaDTO.getId());
datasourceSchemaDTO.setSchemaAlias(alias);
// parser sql params and replace default value
String sql = SqlparserUtils.handleVariableDefaultValue(datasetSQLManage.subPrefixSuffixChar(new String(Base64.getDecoder().decode(dto.getSql()))), dto.getSqlVariableDetails(), true, true, null);
sql = SqlUtils.addSchema(sql, alias);
Map<Long, DatasourceSchemaDTO> dsMap = new LinkedHashMap<>();
dsMap.put(datasourceSchemaDTO.getId(), datasourceSchemaDTO);
DatasourceRequest datasourceRequest = new DatasourceRequest();
datasourceRequest.setDsList(dsMap);
// sql 作为临时表外层加上limit
if (Utils.isNeedOrder(List.of(datasourceSchemaDTO.getType()))) {
// 先根据sql获取表字段
String sqlField = SQLUtils.buildOriginPreviewSql(sql, 0, 0);
datasourceRequest.setQuery(sqlField);
// 获取数据源表的原始字段
List<TableField> list = (List<TableField>) calciteProvider.fetchResultField(datasourceRequest).get("fields");
if (ObjectUtils.isEmpty(list)) {
return null;
}
sql = SQLUtils.buildOriginPreviewSqlWithOrderBy(sql, 100, 0, list.get(0).getOriginName() + " ASC ");
} else {
sql = SQLUtils.buildOriginPreviewSql(sql, 100, 0);
}
logger.info("calcite data preview sql: " + sql);
datasourceRequest.setQuery(sql);
Map<String, Object> data = calciteProvider.fetchResultField(datasourceRequest);
// 重新构造data
List<TableField> fList = (List<TableField>) data.get("fields");
List<DatasetTableFieldDTO> fields = transFields(fList, false);
Map<String, Object> previewData = buildPreviewData(data, fields, new HashMap<>());
Map<String, Object> map = new LinkedHashMap<>();
map.put("data", previewData);
return map;
}
public Map<String, Object> buildPreviewData(Map<String, Object> data, List<DatasetTableFieldDTO> fields, Map<String, ColumnPermissionItem> desensitizationList) {
Map<String, Object> map = new LinkedHashMap<>();
List<String[]> dataList = (List<String[]>) data.get("data");
List<LinkedHashMap<String, Object>> dataObjectList = new ArrayList<>();
if (ObjectUtils.isNotEmpty(dataList)) {
for (int i = 0; i < dataList.size(); i++) {
String[] row = dataList.get(i);
LinkedHashMap<String, Object> obj = new LinkedHashMap<>();
if (row.length > 0) {
for (int j = 0; j < row.length; j++) {
if (desensitizationList.keySet().contains(fields.get(j).getDataeaseName())) {
obj.put(fields.get(j).getDataeaseName(), ChartDataBuild.desensitizationValue(desensitizationList.get(fields.get(j).getDataeaseName()), String.valueOf(row[j])));
} else {
obj.put(ObjectUtils.isNotEmpty(fields.get(j).getDataeaseName()) ?
fields.get(j).getDataeaseName() : fields.get(j).getOriginName(), row[j]);
}
}
}
dataObjectList.add(obj);
}
}
map.put("fields", fields);
map.put("data", dataObjectList);
return map;
}
public void buildFieldName(Map<String, Object> sqlMap, List<DatasetTableFieldDTO> fields) {
// 获取内层union sql和字段
List<DatasetTableFieldDTO> unionFields = (List<DatasetTableFieldDTO>) sqlMap.get("field");
for (DatasetTableFieldDTO datasetTableFieldDTO : fields) {
DatasetTableFieldDTO dto = datasetTableFieldManage.selectById(datasetTableFieldDTO.getId());
if (ObjectUtils.isEmpty(dto)) {
if (Objects.equals(datasetTableFieldDTO.getExtField(), ExtFieldConstant.EXT_NORMAL)) {
for (DatasetTableFieldDTO fieldDTO : unionFields) {
if (Objects.equals(datasetTableFieldDTO.getDatasetTableId(), fieldDTO.getDatasetTableId())
&& Objects.equals(datasetTableFieldDTO.getOriginName(), fieldDTO.getOriginName())) {
datasetTableFieldDTO.setDataeaseName(fieldDTO.getDataeaseName());
datasetTableFieldDTO.setFieldShortName(fieldDTO.getFieldShortName());
}
}
}
if (Objects.equals(datasetTableFieldDTO.getExtField(), ExtFieldConstant.EXT_CALC)) {
String dataeaseName = TableUtils.fieldNameShort(datasetTableFieldDTO.getId() + "_" + datasetTableFieldDTO.getOriginName());
datasetTableFieldDTO.setDataeaseName(dataeaseName);
datasetTableFieldDTO.setFieldShortName(dataeaseName);
datasetTableFieldDTO.setDeExtractType(datasetTableFieldDTO.getDeType());
}
} else {
datasetTableFieldDTO.setDataeaseName(dto.getDataeaseName());
datasetTableFieldDTO.setFieldShortName(dto.getFieldShortName());
}
}
}
public List<String> getFieldEnum(List<Long> ids) throws Exception {
// 根据前端传的查询组件field ids获取所有字段枚举值并去重合并
List<List<String>> list = new ArrayList<>();
for (Long id : ids) {
DatasetTableFieldDTO field = datasetTableFieldManage.selectById(id);
if (field == null) {
DEException.throwException(Translator.get("i18n_no_field"));
}
List<DatasetTableFieldDTO> allFields = new ArrayList<>();
// 根据视图计算字段获取数据集
Long datasetGroupId;
if (field.getDatasetGroupId() == null && field.getChartId() != null) {
ChartViewDTO chart = chartViewManege.getChart(field.getChartId());
datasetGroupId = chart.getTableId();
allFields.addAll(datasetTableFieldManage.getChartCalcFields(field.getChartId()));
} else {
datasetGroupId = field.getDatasetGroupId();
}
DatasetGroupInfoDTO datasetGroupInfoDTO = datasetGroupManage.get(datasetGroupId, null);
Map<String, Object> sqlMap = datasetSQLManage.getUnionSQLForEdit(datasetGroupInfoDTO, new ChartExtRequest());
String sql = (String) sqlMap.get("sql");
allFields.addAll(datasetGroupInfoDTO.getAllFields());
// build query sql
SQLMeta sqlMeta = new SQLMeta();
Table2SQLObj.table2sqlobj(sqlMeta, null, "(" + sql + ")");
// 计算字段先完成内容替换
if (Objects.equals(field.getExtField(), ExtFieldConstant.EXT_CALC)) {
String originField = Utils.calcFieldRegex(field.getOriginName(), sqlMeta.getTable(), allFields);
// 此处是数据集预览获取数据库原始字段枚举值等操作使用如果遇到聚合函数则将originField设置为null
for (String func : FunctionConstant.AGG_FUNC) {
if (Utils.matchFunction(func, originField)) {
originField = null;
break;
}
}
field.setOriginName(originField);
}
// 获取allFields
List<DatasetTableFieldDTO> fields = Collections.singletonList(field);
Map<String, ColumnPermissionItem> desensitizationList = new HashMap<>();
fields = permissionManage.filterColumnPermissions(fields, desensitizationList, datasetGroupInfoDTO.getId(), null);
if (ObjectUtils.isEmpty(fields)) {
DEException.throwException(Translator.get("i18n_no_column_permission"));
}
buildFieldName(sqlMap, fields);
Map<Long, DatasourceSchemaDTO> dsMap = (Map<Long, DatasourceSchemaDTO>) sqlMap.get("dsMap");
List<String> dsList = new ArrayList<>();
for (Map.Entry<Long, DatasourceSchemaDTO> next : dsMap.entrySet()) {
dsList.add(next.getValue().getType());
}
boolean needOrder = Utils.isNeedOrder(dsList);
List<DataSetRowPermissionsTreeDTO> rowPermissionsTree = new ArrayList<>();
TokenUserBO user = AuthUtils.getUser();
if (user != null) {
rowPermissionsTree = permissionManage.getRowPermissionsTree(datasetGroupInfoDTO.getId(), user.getUserId());
}
Field2SQLObj.field2sqlObj(sqlMeta, fields);
WhereTree2Str.transFilterTrees(sqlMeta, rowPermissionsTree, fields);
Order2SQLObj.getOrders(sqlMeta, fields, datasetGroupInfoDTO.getSortFields());
String querySQL = SQLProvider.createQuerySQLWithLimit(sqlMeta, false, needOrder, true, 0, 1000);
// 通过数据源请求数据
// 调用数据源的calcite获得data
DatasourceRequest datasourceRequest = new DatasourceRequest();
datasourceRequest.setQuery(querySQL);
datasourceRequest.setDsList(dsMap);
Map<String, Object> data = calciteProvider.fetchResultField(datasourceRequest);
List<String[]> dataList = (List<String[]>) data.get("data");
List<String> previewData = new ArrayList<>();
if (ObjectUtils.isNotEmpty(dataList)) {
List<String> tmpData = dataList.stream().map(ele -> (ObjectUtils.isNotEmpty(ele) && ele.length > 0) ? ele[0] : null).collect(Collectors.toList());
if (!CollectionUtils.isEmpty(tmpData)) {
if (desensitizationList.keySet().contains(field.getDataeaseName())) {
for (int i = 0; i < tmpData.size(); i++) {
previewData.add(ChartDataBuild.desensitizationValue(desensitizationList.get(field.getDataeaseName()), tmpData.get(i)));
}
} else {
previewData = tmpData;
}
}
list.add(previewData);
}
logger.info("calcite data enum sql: " + querySQL);
}
// 重新构造data
Set<String> result = new LinkedHashSet<>();
for (List<String> l : list) {
result.addAll(l);
}
return result.stream().toList();
}
}

View File

@ -0,0 +1,491 @@
package io.dataease.dataset.manage;
import cn.hutool.core.collection.CollectionUtil;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.fasterxml.jackson.core.type.TypeReference;
import io.dataease.api.dataset.dto.DatasetTableDTO;
import io.dataease.api.dataset.dto.SqlVariableDetails;
import io.dataease.api.dataset.union.DatasetGroupInfoDTO;
import io.dataease.api.dataset.union.UnionDTO;
import io.dataease.api.dataset.vo.DataSetBarVO;
import io.dataease.api.ds.vo.DatasourceDTO;
import io.dataease.api.permissions.user.api.UserApi;
import io.dataease.api.permissions.user.vo.UserFormVO;
import io.dataease.commons.constants.OptConstants;
import io.dataease.dataset.dao.auto.entity.CoreDatasetGroup;
import io.dataease.dataset.dao.auto.entity.CoreDatasetTable;
import io.dataease.dataset.dao.auto.mapper.CoreDatasetGroupMapper;
import io.dataease.dataset.dao.auto.mapper.CoreDatasetTableMapper;
import io.dataease.dataset.dao.ext.mapper.CoreDataSetExtMapper;
import io.dataease.dataset.dao.ext.po.DataSetNodePO;
import io.dataease.dataset.dto.DataSetNodeBO;
import io.dataease.dataset.utils.FieldUtils;
import io.dataease.dataset.utils.TableUtils;
import io.dataease.datasource.dao.auto.entity.CoreDatasource;
import io.dataease.datasource.dao.auto.mapper.CoreDatasourceMapper;
import io.dataease.dto.dataset.DatasetTableFieldDTO;
import io.dataease.engine.constant.ExtFieldConstant;
import io.dataease.exception.DEException;
import io.dataease.i18n.Translator;
import io.dataease.license.config.XpackInteract;
import io.dataease.model.BusiNodeRequest;
import io.dataease.model.BusiNodeVO;
import io.dataease.operation.manage.CoreOptRecentManage;
import io.dataease.utils.*;
import jakarta.annotation.Resource;
import org.apache.commons.lang3.ObjectUtils;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;
import java.util.*;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import java.util.stream.Collectors;
/**
* @Author Junjun
*/
@Component
@Transactional(rollbackFor = Exception.class)
public class DatasetGroupManage {
@Resource
private CoreDatasetGroupMapper coreDatasetGroupMapper;
@Resource
private DatasetSQLManage datasetSQLManage;
@Resource
private DatasetDataManage datasetDataManage;
@Resource
private DatasetTableManage datasetTableManage;
@Resource
private DatasetTableFieldManage datasetTableFieldManage;
@Resource
private PermissionManage permissionManage;
@Resource
private CoreDataSetExtMapper coreDataSetExtMapper;
@Resource
private CoreDatasetTableMapper coreDatasetTableMapper;
@Resource
private CoreDatasourceMapper coreDatasourceMapper;
@Autowired(required = false)
private UserApi userApi;
@Resource
private CoreOptRecentManage coreOptRecentManage;
private static final String leafType = "dataset";
private Lock lock = new ReentrantLock();
public DatasetGroupInfoDTO save(DatasetGroupInfoDTO datasetGroupInfoDTO, boolean rename) throws Exception {
lock.lock();
try {
boolean isCreate;
// 用于重命名获取pid
if (ObjectUtils.isEmpty(datasetGroupInfoDTO.getPid()) && ObjectUtils.isNotEmpty(datasetGroupInfoDTO.getId())) {
CoreDatasetGroup coreDatasetGroup = coreDatasetGroupMapper.selectById(datasetGroupInfoDTO.getId());
datasetGroupInfoDTO.setPid(coreDatasetGroup.getPid());
}
if (userApi == null) {
checkName(datasetGroupInfoDTO);
}
if (userApi != null) {
datasetGroupInfoDTO.setUpdateBy(userApi.info().getId() + "");
datasetGroupInfoDTO.setLastUpdateTime(System.currentTimeMillis());
}
if (StringUtils.equalsIgnoreCase(datasetGroupInfoDTO.getNodeType(), leafType)) {
if (!rename && ObjectUtils.isEmpty(datasetGroupInfoDTO.getAllFields())) {
DEException.throwException(Translator.get("i18n_no_fields"));
}
// get union sql
Map<String, Object> sqlMap = datasetSQLManage.getUnionSQLForEdit(datasetGroupInfoDTO, null);
if (ObjectUtils.isNotEmpty(sqlMap)) {
String sql = (String) sqlMap.get("sql");
datasetGroupInfoDTO.setUnionSql(sql);
datasetGroupInfoDTO.setInfo(Objects.requireNonNull(JsonUtil.toJSONString(datasetGroupInfoDTO.getUnion())).toString());
}
}
// save dataset/group
long time = System.currentTimeMillis();
if (ObjectUtils.isEmpty(datasetGroupInfoDTO.getId())) {
isCreate = true;
datasetGroupInfoDTO.setId(IDUtils.snowID());
if (userApi != null) {
datasetGroupInfoDTO.setCreateBy(userApi.info().getId() + "");
datasetGroupInfoDTO.setUpdateBy(userApi.info().getId() + "");
}
datasetGroupInfoDTO.setCreateTime(time);
datasetGroupInfoDTO.setLastUpdateTime(time);
datasetGroupInfoDTO.setPid(datasetGroupInfoDTO.getPid() == null ? 0L : datasetGroupInfoDTO.getPid());
Objects.requireNonNull(CommonBeanFactory.getBean(this.getClass())).innerSave(datasetGroupInfoDTO);
} else {
isCreate = false;
if (Objects.equals(datasetGroupInfoDTO.getId(), datasetGroupInfoDTO.getPid())) {
DEException.throwException(Translator.get("i18n_pid_not_eq_id"));
}
Objects.requireNonNull(CommonBeanFactory.getBean(this.getClass())).innerEdit(datasetGroupInfoDTO);
}
// node_type=dataset需要创建dataset_table和field
if (StringUtils.equalsIgnoreCase(datasetGroupInfoDTO.getNodeType(), "dataset")) {
List<Long> tableIds = new ArrayList<>();
List<Long> fieldIds = new ArrayList<>();
// 解析tree保存
saveTable(datasetGroupInfoDTO, datasetGroupInfoDTO.getUnion(), tableIds, isCreate);
saveField(datasetGroupInfoDTO, fieldIds);
// 删除不要的table和field
datasetTableManage.deleteByDatasetGroupUpdate(datasetGroupInfoDTO.getId(), tableIds);
datasetTableFieldManage.deleteByDatasetGroupUpdate(datasetGroupInfoDTO.getId(), fieldIds);
}
return datasetGroupInfoDTO;
} catch (Exception e) {
DEException.throwException(e.getMessage());
} finally {
lock.unlock();
}
return null;
}
@XpackInteract(value = "authResourceTree", before = false)
public void innerEdit(DatasetGroupInfoDTO datasetGroupInfoDTO) {
CoreDatasetGroup coreDatasetGroup = BeanUtils.copyBean(new CoreDatasetGroup(), datasetGroupInfoDTO);
coreDatasetGroup.setLastUpdateTime(System.currentTimeMillis());
coreDatasetGroupMapper.updateById(coreDatasetGroup);
coreOptRecentManage.saveOpt(datasetGroupInfoDTO.getId(), OptConstants.OPT_RESOURCE_TYPE.DATASET,OptConstants.OPT_TYPE.UPDATE);
}
@XpackInteract(value = "authResourceTree", before = false)
public void innerSave(DatasetGroupInfoDTO datasetGroupInfoDTO) {
CoreDatasetGroup coreDatasetGroup = BeanUtils.copyBean(new CoreDatasetGroup(), datasetGroupInfoDTO);
coreDatasetGroupMapper.insert(coreDatasetGroup);
coreOptRecentManage.saveOpt(coreDatasetGroup.getId(), OptConstants.OPT_RESOURCE_TYPE.DATASET,OptConstants.OPT_TYPE.NEW);
}
@XpackInteract(value = "authResourceTree", before = false)
public DatasetGroupInfoDTO move(DatasetGroupInfoDTO datasetGroupInfoDTO) {
if (userApi == null) {
checkName(datasetGroupInfoDTO);
}
if (datasetGroupInfoDTO.getPid() != 0) {
checkMove(datasetGroupInfoDTO);
}
// save dataset/group
long time = System.currentTimeMillis();
CoreDatasetGroup coreDatasetGroup = new CoreDatasetGroup();
BeanUtils.copyBean(coreDatasetGroup, datasetGroupInfoDTO);
if (userApi != null) {
datasetGroupInfoDTO.setUpdateBy(userApi.info().getId() + "");
}
coreDatasetGroup.setLastUpdateTime(time);
coreDatasetGroupMapper.updateById(coreDatasetGroup);
coreOptRecentManage.saveOpt(coreDatasetGroup.getId(), OptConstants.OPT_RESOURCE_TYPE.DATASET,OptConstants.OPT_TYPE.UPDATE);
return datasetGroupInfoDTO;
}
@XpackInteract(value = "authResourceTree", before = false)
public void delete(Long id) {
CoreDatasetGroup coreDatasetGroup = coreDatasetGroupMapper.selectById(id);
if (ObjectUtils.isEmpty(coreDatasetGroup)) {
DEException.throwException("resource not exist");
}
Objects.requireNonNull(CommonBeanFactory.getBean(this.getClass())).recursionDel(id);
coreOptRecentManage.saveOpt(coreDatasetGroup.getId(), OptConstants.OPT_RESOURCE_TYPE.DATASET,OptConstants.OPT_TYPE.DELETE);
}
public void recursionDel(Long id) {
coreDatasetGroupMapper.deleteById(id);
datasetTableManage.deleteByDatasetGroupDelete(id);
datasetTableFieldManage.deleteByDatasetGroupDelete(id);
QueryWrapper<CoreDatasetGroup> wrapper = new QueryWrapper<>();
wrapper.eq("pid", id);
List<CoreDatasetGroup> coreDatasetGroups = coreDatasetGroupMapper.selectList(wrapper);
if (ObjectUtils.isNotEmpty(coreDatasetGroups)) {
for (CoreDatasetGroup record : coreDatasetGroups) {
recursionDel(record.getId());
}
}
}
@XpackInteract(value = "authResourceTree", replace = true)
public List<BusiNodeVO> tree(BusiNodeRequest request) {
QueryWrapper<Object> queryWrapper = new QueryWrapper<>();
if (ObjectUtils.isNotEmpty(request.getLeaf())) {
queryWrapper.eq("node_type", request.getLeaf() ? "dataset" : "folder");
}
queryWrapper.orderByDesc("create_time");
List<DataSetNodePO> pos = coreDataSetExtMapper.query(queryWrapper);
List<DataSetNodeBO> nodes = new ArrayList<>();
if (ObjectUtils.isEmpty(request.getLeaf()) || !request.getLeaf()) nodes.add(rootNode());
List<DataSetNodeBO> bos = pos.stream().map(this::convert).toList();
if (CollectionUtil.isNotEmpty(bos)) {
nodes.addAll(bos);
}
return TreeUtils.mergeTree(nodes, BusiNodeVO.class, false);
}
public DataSetBarVO queryBarInfo(Long id) {
DataSetBarVO dataSetBarVO = coreDataSetExtMapper.queryBarInfo(id);
// get creator
if (userApi != null) {
UserFormVO userFormVO = userApi.queryById(Long.valueOf(dataSetBarVO.getCreateBy()));
if (userFormVO != null) {
dataSetBarVO.setCreator(userFormVO.getName());
}
UserFormVO userFormVOUpdateBy = userApi.queryById(Long.valueOf(dataSetBarVO.getUpdateBy()));
if (userFormVOUpdateBy != null) {
dataSetBarVO.setUpdater(userFormVOUpdateBy.getName());
}
}
dataSetBarVO.setDatasourceDTOList(getDatasource(id));
return dataSetBarVO;
}
private List<DatasourceDTO> getDatasource(Long datasetId) {
QueryWrapper<CoreDatasetTable> wrapper = new QueryWrapper<>();
wrapper.eq("dataset_group_id", datasetId);
List<CoreDatasetTable> coreDatasetTables = coreDatasetTableMapper.selectList(wrapper);
Set<Long> ids = new LinkedHashSet();
coreDatasetTables.forEach(ele -> ids.add(ele.getDatasourceId()));
QueryWrapper<CoreDatasource> datasourceQueryWrapper = new QueryWrapper<>();
datasourceQueryWrapper.in("id", ids);
List<DatasourceDTO> datasourceDTOList = coreDatasourceMapper.selectList(datasourceQueryWrapper).stream().map(ele -> {
DatasourceDTO dto = new DatasourceDTO();
BeanUtils.copyBean(dto, ele);
dto.setConfiguration(null);
return dto;
}).collect(Collectors.toList());
if(ids.size() != datasourceDTOList.size()){
DEException.throwException("由于数据集所用的数据源已被删除,无法显示数据集");
}
return datasourceDTOList;
}
private DataSetNodeBO rootNode() {
return new DataSetNodeBO(0L, "root", false, 7, -1L, 0);
}
private DataSetNodeBO convert(DataSetNodePO po) {
return new DataSetNodeBO(po.getId(), po.getName(), StringUtils.equals(po.getNodeType(), leafType), 7, po.getPid(), 0);
}
public void checkName(DatasetGroupInfoDTO dto) {
QueryWrapper<CoreDatasetGroup> wrapper = new QueryWrapper<>();
if (ObjectUtils.isNotEmpty(dto.getPid())) {
wrapper.eq("pid", dto.getPid());
}
if (StringUtils.isNotEmpty(dto.getName())) {
wrapper.eq("name", dto.getName());
}
if (ObjectUtils.isNotEmpty(dto.getId())) {
wrapper.ne("id", dto.getId());
}
if (ObjectUtils.isNotEmpty(dto.getLevel())) {
wrapper.eq("level", dto.getLevel());
}
if (ObjectUtils.isNotEmpty(dto.getNodeType())) {
wrapper.eq("node_type", dto.getNodeType());
}
List<CoreDatasetGroup> list = coreDatasetGroupMapper.selectList(wrapper);
if (list.size() > 0) {
DEException.throwException(Translator.get("i18n_ds_name_exists"));
}
}
public void saveTable(DatasetGroupInfoDTO datasetGroupInfoDTO, List<UnionDTO> union, List<Long> tableIds, boolean isCreate) {
// table和field均由前端生成id如果没有id
Long datasetGroupId = datasetGroupInfoDTO.getId();
if (ObjectUtils.isNotEmpty(union)) {
for (UnionDTO unionDTO : union) {
DatasetTableDTO currentDs = unionDTO.getCurrentDs();
CoreDatasetTable coreDatasetTable = datasetTableManage.selectById(currentDs.getId());
if (coreDatasetTable != null && isCreate) {
DEException.throwException(Translator.get("i18n_table_duplicate"));
}
currentDs.setDatasetGroupId(datasetGroupId);
datasetTableManage.save(currentDs);
tableIds.add(currentDs.getId());
saveTable(datasetGroupInfoDTO, unionDTO.getChildrenDs(), tableIds, isCreate);
}
}
}
public void saveField(DatasetGroupInfoDTO datasetGroupInfoDTO, List<Long> fieldIds) throws Exception {
if (ObjectUtils.isEmpty(datasetGroupInfoDTO.getUnion())) {
return;
}
datasetDataManage.previewDataWithLimit(datasetGroupInfoDTO, 0, 1, false);
// table和field均由前端生成id如果没有id
Long datasetGroupId = datasetGroupInfoDTO.getId();
List<DatasetTableFieldDTO> allFields = datasetGroupInfoDTO.getAllFields();
if (ObjectUtils.isNotEmpty(allFields)) {
// 获取内层union sql和字段
Map<String, Object> map = datasetSQLManage.getUnionSQLForEdit(datasetGroupInfoDTO, null);
List<DatasetTableFieldDTO> unionFields = (List<DatasetTableFieldDTO>) map.get("field");
for (DatasetTableFieldDTO datasetTableFieldDTO : allFields) {
DatasetTableFieldDTO dto = datasetTableFieldManage.selectById(datasetTableFieldDTO.getId());
if (ObjectUtils.isEmpty(dto)) {
if (Objects.equals(datasetTableFieldDTO.getExtField(), ExtFieldConstant.EXT_NORMAL)) {
for (DatasetTableFieldDTO fieldDTO : unionFields) {
if (Objects.equals(datasetTableFieldDTO.getDatasetTableId(), fieldDTO.getDatasetTableId())
&& Objects.equals(datasetTableFieldDTO.getOriginName(), fieldDTO.getOriginName())) {
datasetTableFieldDTO.setDataeaseName(fieldDTO.getDataeaseName());
datasetTableFieldDTO.setFieldShortName(fieldDTO.getFieldShortName());
}
}
}
if (Objects.equals(datasetTableFieldDTO.getExtField(), ExtFieldConstant.EXT_CALC)) {
String dataeaseName = TableUtils.fieldNameShort(datasetTableFieldDTO.getId() + "_" + datasetTableFieldDTO.getOriginName());
datasetTableFieldDTO.setDataeaseName(dataeaseName);
datasetTableFieldDTO.setFieldShortName(dataeaseName);
datasetTableFieldDTO.setDeExtractType(datasetTableFieldDTO.getDeType());
}
datasetTableFieldDTO.setDatasetGroupId(datasetGroupId);
} else {
datasetTableFieldDTO.setDataeaseName(dto.getDataeaseName());
datasetTableFieldDTO.setFieldShortName(dto.getFieldShortName());
}
datasetTableFieldDTO = datasetTableFieldManage.save(datasetTableFieldDTO);
fieldIds.add(datasetTableFieldDTO.getId());
}
}
}
public DatasetGroupInfoDTO get(Long id, String type) throws Exception {
CoreDatasetGroup coreDatasetGroup = coreDatasetGroupMapper.selectById(id);
if (coreDatasetGroup == null) {
return null;
}
DatasetGroupInfoDTO dto = new DatasetGroupInfoDTO();
BeanUtils.copyBean(dto, coreDatasetGroup);
// get creator
if (userApi != null) {
UserFormVO userFormVO = userApi.queryById(Long.valueOf(dto.getCreateBy()));
if (userFormVO != null) {
dto.setCreator(userFormVO.getName());
}
UserFormVO userFormVOUpdateBy = userApi.queryById(Long.valueOf(dto.getUpdateBy()));
if (userFormVOUpdateBy != null) {
dto.setUpdater(userFormVOUpdateBy.getName());
}
}
dto.setUnionSql(null);
if (StringUtils.equalsIgnoreCase(dto.getNodeType(), "dataset")) {
List<UnionDTO> unionDTOList = JsonUtil.parseList(coreDatasetGroup.getInfo(), new TypeReference<>() {
});
dto.setUnion(unionDTOList);
// 获取field
List<DatasetTableFieldDTO> dsFields = datasetTableFieldManage.selectByDatasetGroupId(id);
List<DatasetTableFieldDTO> allFields = dsFields.stream().map(ele -> {
DatasetTableFieldDTO datasetTableFieldDTO = new DatasetTableFieldDTO();
BeanUtils.copyBean(datasetTableFieldDTO, ele);
datasetTableFieldDTO.setFieldShortName(ele.getDataeaseName());
return datasetTableFieldDTO;
}).collect(Collectors.toList());
dto.setAllFields(allFields);
if ("preview".equalsIgnoreCase(type)) {
// 请求数据
Map<String, Object> map = datasetDataManage.previewDataWithLimit(dto, 0, 100, true);
// 获取data,sql
Map<String, List> data = (Map<String, List>) map.get("data");
String sql = (String) map.get("sql");
Long total = (Long) map.get("total");
dto.setData(data);
dto.setSql(Base64.getEncoder().encodeToString(sql.getBytes()));
dto.setTotal(total);
}
}
return dto;
}
public List<DatasetTableDTO> getDetail(List<Long> ids) {
if (ObjectUtils.isEmpty(ids)) {
DEException.throwException(Translator.get("i18n_table_id_can_not_empty"));
}
List<DatasetTableDTO> list = new ArrayList<>();
for (Long id : ids) {
CoreDatasetGroup coreDatasetGroup = coreDatasetGroupMapper.selectById(id);
if (coreDatasetGroup == null) {
list.add(null);
} else {
DatasetTableDTO dto = new DatasetTableDTO();
BeanUtils.copyBean(dto, coreDatasetGroup);
Map<String, List<DatasetTableFieldDTO>> listByDQ = datasetTableFieldManage.listByDQ(id);
dto.setFields(listByDQ);
list.add(dto);
}
}
return list;
}
public List<SqlVariableDetails> getSqlParams(List<Long> ids) {
List<SqlVariableDetails> list = new ArrayList<>();
if (ObjectUtils.isEmpty(ids)) {
return list;
}
TypeReference<List<SqlVariableDetails>> listTypeReference = new TypeReference<List<SqlVariableDetails>>() {
};
for (Long id : ids) {
List<CoreDatasetTable> datasetTables = datasetTableManage.selectByDatasetGroupId(id);
for (CoreDatasetTable datasetTable : datasetTables) {
if (StringUtils.isNotEmpty(datasetTable.getSqlVariableDetails())) {
List<SqlVariableDetails> defaultsSqlVariableDetails = JsonUtil.parseList(datasetTable.getSqlVariableDetails(), listTypeReference);
if(CollectionUtil.isNotEmpty(defaultsSqlVariableDetails)){
List<String> fullName = new ArrayList<>();
geFullName(id, fullName);
List<String> finalFullName = CollectionUtil.reverse(fullName);
defaultsSqlVariableDetails.forEach(sqlVariableDetails -> {
sqlVariableDetails.setDatasetGroupId(id);
sqlVariableDetails.setDatasetTableId(datasetTable.getId());
sqlVariableDetails.setDatasetFullName(String.join("/", finalFullName));
});
}
list.addAll(defaultsSqlVariableDetails);
}
}
}
list.forEach(sqlVariableDetail -> {
sqlVariableDetail.setId(sqlVariableDetail.getDatasetTableId() + "|DE|" + sqlVariableDetail.getVariableName());
sqlVariableDetail.setDeType(FieldUtils.transType2DeType(sqlVariableDetail.getType().get(0).contains("DATETIME") ? "DATETIME" : sqlVariableDetail.getType().get(0)));
});
return list;
}
public void checkMove(DatasetGroupInfoDTO datasetGroupInfoDTO) {
if (Objects.equals(datasetGroupInfoDTO.getId(), datasetGroupInfoDTO.getPid())) {
DEException.throwException(Translator.get("i18n_pid_not_eq_id"));
}
List<Long> ids = new ArrayList<>();
getParents(datasetGroupInfoDTO.getPid(), ids);
if (ids.contains(datasetGroupInfoDTO.getId())) {
DEException.throwException(Translator.get("i18n_pid_not_eq_id"));
}
}
private void getParents(Long pid, List<Long> ids) {
CoreDatasetGroup parent = coreDatasetGroupMapper.selectById(pid);// 查找父级folder
ids.add(parent.getId());
if (parent.getPid() != null && parent.getPid() != 0) {
getParents(parent.getPid(), ids);
}
}
private void geFullName(Long pid, List<String> fullName) {
CoreDatasetGroup parent = coreDatasetGroupMapper.selectById(pid);// 查找父级folder
fullName.add(parent.getName());
if (parent.getPid() != null && parent.getPid() != 0) {
geFullName(parent.getPid(), fullName);
}
}
}

View File

@ -0,0 +1,370 @@
package io.dataease.dataset.manage;
import io.dataease.api.chart.dto.ChartExtFilterDTO;
import io.dataease.api.chart.request.ChartExtRequest;
import io.dataease.api.dataset.dto.DatasetTableDTO;
import io.dataease.api.dataset.dto.SqlVariableDetails;
import io.dataease.api.dataset.union.*;
import io.dataease.api.dataset.union.model.SQLObj;
import io.dataease.api.permissions.auth.api.InteractiveAuthApi;
import io.dataease.api.permissions.auth.dto.BusiPerCheckDTO;
import io.dataease.commons.utils.SqlparserUtils;
import io.dataease.constant.AuthEnum;
import io.dataease.dataset.constant.DatasetTableType;
import io.dataease.dataset.dao.auto.mapper.CoreDatasetTableMapper;
import io.dataease.dataset.dto.DatasourceSchemaDTO;
import io.dataease.dataset.utils.DatasetTableTypeConstants;
import io.dataease.dataset.utils.SqlUtils;
import io.dataease.dataset.utils.TableUtils;
import io.dataease.datasource.dao.auto.entity.CoreDatasource;
import io.dataease.datasource.dao.auto.mapper.CoreDatasourceMapper;
import io.dataease.datasource.server.EngineServer;
import io.dataease.dto.dataset.DatasetTableFieldDTO;
import io.dataease.engine.constant.ExtFieldConstant;
import io.dataease.engine.constant.SQLConstants;
import io.dataease.exception.DEException;
import io.dataease.i18n.Translator;
import io.dataease.utils.BeanUtils;
import io.dataease.utils.JsonUtil;
import jakarta.annotation.Resource;
import org.apache.commons.lang3.ObjectUtils;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
import java.text.MessageFormat;
import java.util.*;
import java.util.stream.Collectors;
/**
* @Author Junjun
*/
@Component
public class DatasetSQLManage {
@Resource
private CoreDatasetTableMapper coreDatasetTableMapper;
@Resource
private DatasetTableFieldManage datasetTableFieldManage;
@Resource
private CoreDatasourceMapper coreDatasourceMapper;
@Resource
private EngineServer engineServer;
@Autowired(required = false)
private InteractiveAuthApi interactiveAuthApi;
private static Logger logger = LoggerFactory.getLogger(DatasetSQLManage.class);
private List<SqlVariableDetails> filterParameters(ChartExtRequest chartExtRequest, Long datasetTableId) {
List<SqlVariableDetails> parameters = new ArrayList<>();
if (chartExtRequest != null && ObjectUtils.isNotEmpty(chartExtRequest.getFilter())) {
for (ChartExtFilterDTO filterDTO : chartExtRequest.getFilter()) {
if (CollectionUtils.isEmpty(filterDTO.getValue())) {
continue;
}
if (ObjectUtils.isNotEmpty(filterDTO.getParameters())) {
for (SqlVariableDetails parameter : filterDTO.getParameters()) {
if (parameter.getDatasetTableId().equals(datasetTableId)) {
parameter.setValue(filterDTO.getValue());
parameter.setOperator(filterDTO.getOperator());
parameters.add(parameter);
}
}
}
}
}
return parameters;
}
public Map<String, Object> getUnionSQLForEdit(DatasetGroupInfoDTO dataTableInfoDTO, ChartExtRequest chartExtRequest) throws Exception {
Map<Long, DatasourceSchemaDTO> dsMap = new LinkedHashMap<>();
List<UnionDTO> union = dataTableInfoDTO.getUnion();
// 所有选中的字段即select后的查询字段
Map<String, String[]> checkedInfo = new LinkedHashMap<>();
List<UnionParamDTO> unionList = new ArrayList<>();
List<DatasetTableFieldDTO> checkedFields = new ArrayList<>();
String sql = "";
if (ObjectUtils.isEmpty(union)) {
return null;
}
DatasetTableDTO currentDs = union.get(0).getCurrentDs();
// get datasource and schema,put map
String tableSchema = putObj2Map(dsMap, currentDs);
// get table
DatasetTableInfoDTO infoDTO = JsonUtil.parseObject(currentDs.getInfo(), DatasetTableInfoDTO.class);
SQLObj tableName = getUnionTable(currentDs, infoDTO, tableSchema, 0, filterParameters(chartExtRequest, currentDs.getId()), chartExtRequest == null);
for (int i = 0; i < union.size(); i++) {
UnionDTO unionDTO = union.get(i);
DatasetTableDTO datasetTable = unionDTO.getCurrentDs();
DatasetTableInfoDTO tableInfo = JsonUtil.parseObject(datasetTable.getInfo(), DatasetTableInfoDTO.class);
String schema;
if (dsMap.containsKey(datasetTable.getDatasourceId())) {
schema = dsMap.get(datasetTable.getDatasourceId()).getSchemaAlias();
} else {
schema = putObj2Map(dsMap, datasetTable);
}
SQLObj table = getUnionTable(datasetTable, tableInfo, schema, i, filterParameters(chartExtRequest, currentDs.getId()), chartExtRequest == null);
// 获取前端传过来选中的字段
List<DatasetTableFieldDTO> fields = unionDTO.getCurrentDsFields();
fields = fields.stream().filter(DatasetTableFieldDTO::getChecked).collect(Collectors.toList());
String[] array = fields.stream()
.map(f -> {
String alias;
if (StringUtils.isEmpty(f.getDataeaseName())) {
alias = TableUtils.fieldNameShort(table.getTableAlias() + "_" + f.getOriginName());
} else {
alias = f.getDataeaseName();
}
f.setFieldShortName(alias);
f.setDataeaseName(f.getFieldShortName());
f.setDatasetTableId(datasetTable.getId());
String prefix = "";
if (Objects.equals(f.getExtField(), ExtFieldConstant.EXT_NORMAL)) {
prefix = "`";
}
return table.getTableAlias() + "." + prefix + f.getOriginName() + prefix + " AS " + alias;
})
.toArray(String[]::new);
checkedInfo.put(table.getTableAlias(), array);
checkedFields.addAll(fields);
// 获取child的fields和union
if (!CollectionUtils.isEmpty(unionDTO.getChildrenDs())) {
getUnionForEdit(datasetTable, table, unionDTO.getChildrenDs(), checkedInfo, unionList, checkedFields, dsMap, chartExtRequest);
}
}
// build sql
if (!CollectionUtils.isEmpty(unionList)) {
// field
StringBuilder field = new StringBuilder();
for (Map.Entry<String, String[]> next : checkedInfo.entrySet()) {
if (next.getValue().length > 0) {
field.append(StringUtils.join(next.getValue(), ",")).append(",");
}
}
String f = subPrefixSuffixChar(field.toString());
// join
StringBuilder join = new StringBuilder();
for (UnionParamDTO unionParamDTO : unionList) {
// get join type
String joinType = convertUnionTypeToSQL(unionParamDTO.getUnionType());
SQLObj parentSQLObj = unionParamDTO.getParentSQLObj();
SQLObj currentSQLObj = unionParamDTO.getCurrentSQLObj();
String ts = "";
String tablePrefix = "";
if (ObjectUtils.isNotEmpty(currentSQLObj.getTableSchema())) {
ts = currentSQLObj.getTableSchema() + ".";
tablePrefix = "`";
}
// build join
join.append(" ").append(joinType).append(" ")
.append(ts)
.append(tablePrefix + currentSQLObj.getTableName() + tablePrefix)
.append(" ").append(currentSQLObj.getTableAlias()).append(" ")
.append(" ON ");
if (unionParamDTO.getUnionFields().size() == 0) {
DEException.throwException(Translator.get("i18n_union_field_can_not_empty"));
}
for (int i = 0; i < unionParamDTO.getUnionFields().size(); i++) {
UnionItemDTO unionItemDTO = unionParamDTO.getUnionFields().get(i);
// 通过field id取得field详情并且以第一组为准寻找dataset table
DatasetTableFieldDTO parentField = unionItemDTO.getParentField();
DatasetTableFieldDTO currentField = unionItemDTO.getCurrentField();
String pPrefix = "";
if (Objects.equals(parentField.getExtField(), ExtFieldConstant.EXT_NORMAL)) {
pPrefix = "`";
}
String cPrefix = "";
if (Objects.equals(currentField.getExtField(), ExtFieldConstant.EXT_NORMAL)) {
cPrefix = "`";
}
join.append(parentSQLObj.getTableAlias()).append(".")
.append(pPrefix + parentField.getOriginName() + pPrefix)
.append(" = ")
.append(currentSQLObj.getTableAlias()).append(".")
.append(cPrefix + currentField.getOriginName() + cPrefix);
if (i < unionParamDTO.getUnionFields().size() - 1) {
join.append(" AND ");
}
}
}
if (StringUtils.isEmpty(f)) {
DEException.throwException(Translator.get("i18n_union_ds_no_checked"));
}
sql = MessageFormat.format("SELECT {0} FROM {1}", f, TableUtils.getTableAndAlias(tableName)) + join.toString();
} else {
String f = StringUtils.join(checkedInfo.get(tableName.getTableAlias()), ",");
if (StringUtils.isEmpty(f)) {
DEException.throwException(Translator.get("i18n_union_ds_no_checked"));
}
sql = MessageFormat.format("SELECT {0} FROM {1}", f, TableUtils.getTableAndAlias(tableName));
}
logger.info("calcite origin sql: " + sql);
Map<String, Object> map = new HashMap<>();
map.put("sql", sql);
map.put("field", checkedFields);
map.put("join", unionList);
map.put("dsMap", dsMap);
return map;
}
// 递归计算出所有子级的checkedFields和unionParam
private void getUnionForEdit(DatasetTableDTO parentTable, SQLObj parentSQLObj,
List<UnionDTO> childrenDs, Map<String, String[]> checkedInfo,
List<UnionParamDTO> unionList, List<DatasetTableFieldDTO> checkedFields,
Map<Long, DatasourceSchemaDTO> dsMap, ChartExtRequest chartExtRequest) throws Exception {
for (int i = 0; i < childrenDs.size(); i++) {
int index = unionList.size() + 1;
UnionDTO unionDTO = childrenDs.get(i);
DatasetTableDTO datasetTable = unionDTO.getCurrentDs();
DatasetTableInfoDTO tableInfo = JsonUtil.parseObject(datasetTable.getInfo(), DatasetTableInfoDTO.class);
String schema;
if (dsMap.containsKey(datasetTable.getDatasourceId())) {
schema = dsMap.get(datasetTable.getDatasourceId()).getSchemaAlias();
} else {
schema = putObj2Map(dsMap, datasetTable);
}
SQLObj table = getUnionTable(datasetTable, tableInfo, schema, index, filterParameters(chartExtRequest, datasetTable.getId()), chartExtRequest == null);
List<DatasetTableFieldDTO> fields = unionDTO.getCurrentDsFields();
fields = fields.stream().filter(DatasetTableFieldDTO::getChecked).collect(Collectors.toList());
String[] array = fields.stream()
.map(f -> {
String alias;
if (StringUtils.isEmpty(f.getDataeaseName())) {
alias = TableUtils.fieldNameShort(table.getTableAlias() + "_" + f.getOriginName());
} else {
alias = f.getDataeaseName();
}
f.setFieldShortName(alias);
f.setDataeaseName(f.getFieldShortName());
f.setDatasetTableId(datasetTable.getId());
String prefix = "";
if (Objects.equals(f.getExtField(), ExtFieldConstant.EXT_NORMAL)) {
prefix = "`";
}
return table.getTableAlias() + "." + prefix + f.getOriginName() + prefix + " AS " + alias;
})
.toArray(String[]::new);
checkedInfo.put(table.getTableAlias(), array);
checkedFields.addAll(fields);
UnionParamDTO unionToParent = unionDTO.getUnionToParent();
// 设置关联关系中两个table信息
unionToParent.setParentDs(parentTable);
unionToParent.setParentSQLObj(parentSQLObj);
unionToParent.setCurrentDs(datasetTable);
unionToParent.setCurrentSQLObj(table);
unionList.add(unionToParent);
if (!CollectionUtils.isEmpty(unionDTO.getChildrenDs())) {
getUnionForEdit(datasetTable, table, unionDTO.getChildrenDs(), checkedInfo, unionList, checkedFields, dsMap, chartExtRequest);
}
}
}
public String subPrefixSuffixChar(String str) {
while (StringUtils.startsWith(str, ",")) {
str = str.substring(1, str.length());
}
while (StringUtils.endsWith(str, ",")) {
str = str.substring(0, str.length() - 1);
}
return str;
}
private String convertUnionTypeToSQL(String unionType) {
switch (unionType) {
case "1:1":
case "inner":
return " INNER JOIN ";
case "1:N":
case "left":
return " LEFT JOIN ";
case "N:1":
case "right":
return " RIGHT JOIN ";
case "N:N":
case "full":
return " FULL JOIN ";
default:
return " INNER JOIN ";
}
}
private SQLObj getUnionTable(DatasetTableDTO currentDs, DatasetTableInfoDTO infoDTO, String tableSchema, int index, List<SqlVariableDetails> parameters, boolean isFromDataSet) {
SQLObj tableObj;
String tableAlias = String.format(SQLConstants.TABLE_ALIAS_PREFIX, index);
if (StringUtils.equalsIgnoreCase(currentDs.getType(), DatasetTableTypeConstants.DATASET_TABLE_DB)) {
tableObj = SQLObj.builder().tableSchema(tableSchema).tableName(infoDTO.getTable()).tableAlias(tableAlias).build();
} else if (StringUtils.equalsIgnoreCase(currentDs.getType(), DatasetTableTypeConstants.DATASET_TABLE_SQL)) {
// parser sql params and replace default value
String sql = SqlparserUtils.handleVariableDefaultValue(new String(Base64.getDecoder().decode(infoDTO.getSql())), currentDs.getSqlVariableDetails(), false, isFromDataSet, parameters);
// add table schema
sql = SqlUtils.addSchema(sql, tableSchema);
tableObj = SQLObj.builder().tableSchema("").tableName("(" + sql + ")").tableAlias(tableAlias).build();
} else {
// excel,api
tableObj = SQLObj.builder().tableSchema(tableSchema).tableName(infoDTO.getTable()).tableAlias(tableAlias).build();
}
return tableObj;
}
private String putObj2Map(Map<Long, DatasourceSchemaDTO> dsMap, DatasetTableDTO ds) throws Exception {
// 通过datasource id校验数据源权限
if (interactiveAuthApi != null) {
BusiPerCheckDTO dto = new BusiPerCheckDTO();
dto.setId(ds.getDatasourceId());
dto.setAuthEnum(AuthEnum.READ);
try {
interactiveAuthApi.checkAuth(dto);
} catch (Exception e) {
DEException.throwException(Translator.get("i18n_no_datasource_permission"));
}
}
String schemaAlias;
if (StringUtils.equalsIgnoreCase(ds.getType(), DatasetTableType.DB) || StringUtils.equalsIgnoreCase(ds.getType(), DatasetTableType.SQL)) {
CoreDatasource coreDatasource = coreDatasourceMapper.selectById(ds.getDatasourceId());
if (coreDatasource == null) {
DEException.throwException(Translator.get("i18n_dataset_ds_error") + ",ID:" + ds.getDatasourceId());
}
if (StringUtils.equalsIgnoreCase("excel", coreDatasource.getType()) || StringUtils.equalsIgnoreCase("api", coreDatasource.getType())) {
coreDatasource = engineServer.getDeEngine();
}
schemaAlias = String.format(SQLConstants.SCHEMA, coreDatasource.getId());
if (!dsMap.containsKey(coreDatasource.getId())) {
DatasourceSchemaDTO datasourceSchemaDTO = new DatasourceSchemaDTO();
BeanUtils.copyBean(datasourceSchemaDTO, coreDatasource);
datasourceSchemaDTO.setSchemaAlias(schemaAlias);
dsMap.put(coreDatasource.getId(), datasourceSchemaDTO);
}
} else {
CoreDatasource coreDatasource = engineServer.getDeEngine();
schemaAlias = String.format(SQLConstants.SCHEMA, coreDatasource.getId());
if (!dsMap.containsKey(coreDatasource.getId())) {
DatasourceSchemaDTO datasourceSchemaDTO = new DatasourceSchemaDTO();
BeanUtils.copyBean(datasourceSchemaDTO, coreDatasource);
datasourceSchemaDTO.setSchemaAlias(schemaAlias);
dsMap.put(coreDatasource.getId(), datasourceSchemaDTO);
}
}
return schemaAlias;
}
}

View File

@ -0,0 +1,204 @@
package io.dataease.dataset.manage;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import io.dataease.dataset.dao.auto.entity.CoreDatasetTableField;
import io.dataease.dataset.dao.auto.mapper.CoreDatasetGroupMapper;
import io.dataease.dataset.dao.auto.mapper.CoreDatasetTableFieldMapper;
import io.dataease.dataset.utils.TableUtils;
import io.dataease.datasource.provider.CalciteProvider;
import io.dataease.dto.dataset.DatasetTableFieldDTO;
import io.dataease.exception.DEException;
import io.dataease.i18n.Translator;
import io.dataease.utils.BeanUtils;
import io.dataease.utils.IDUtils;
import jakarta.annotation.Resource;
import org.apache.commons.lang3.ObjectUtils;
import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.CollectionUtils;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
/**
* @Author Junjun
*/
@Component
@Transactional
public class DatasetTableFieldManage {
@Resource
private CoreDatasetTableFieldMapper coreDatasetTableFieldMapper;
@Resource
private PermissionManage permissionManage;
@Resource
private CoreDatasetGroupMapper coreDatasetGroupMapper;
@Resource
private CalciteProvider calciteProvider;
public void save(CoreDatasetTableField coreDatasetTableField) {
checkNameLength(coreDatasetTableField.getName());
if (ObjectUtils.isEmpty(coreDatasetTableField.getId())) {
coreDatasetTableField.setId(IDUtils.snowID());
coreDatasetTableFieldMapper.insert(coreDatasetTableField);
} else {
coreDatasetTableFieldMapper.updateById(coreDatasetTableField);
}
}
public DatasetTableFieldDTO chartFieldSave(DatasetTableFieldDTO datasetTableFieldDTO) {
checkNameLength(datasetTableFieldDTO.getName());
CoreDatasetTableField coreDatasetTableField = coreDatasetTableFieldMapper.selectById(datasetTableFieldDTO.getId());
QueryWrapper<CoreDatasetTableField> wrapper = new QueryWrapper<>();
wrapper.eq("name", datasetTableFieldDTO.getName());
wrapper.eq("chart_id", datasetTableFieldDTO.getChartId());
if (ObjectUtils.isNotEmpty(coreDatasetTableField)) {
wrapper.ne("id", datasetTableFieldDTO.getId());
}
List<CoreDatasetTableField> fields = coreDatasetTableFieldMapper.selectList(wrapper);
if (ObjectUtils.isNotEmpty(fields)) {
DEException.throwException(Translator.get("i18n_field_name_duplicated"));
}
datasetTableFieldDTO.setDatasetGroupId(null);
return save(datasetTableFieldDTO);
}
/**
* 数据集保存时使用
*
* @param datasetTableFieldDTO
* @return
*/
public DatasetTableFieldDTO save(DatasetTableFieldDTO datasetTableFieldDTO) {
checkNameLength(datasetTableFieldDTO.getName());
CoreDatasetTableField coreDatasetTableField = coreDatasetTableFieldMapper.selectById(datasetTableFieldDTO.getId());
CoreDatasetTableField record = new CoreDatasetTableField();
BeanUtils.copyBean(record, datasetTableFieldDTO);
if (ObjectUtils.isEmpty(record.getDataeaseName())) {
String n = TableUtils.fieldNameShort(record.getId() + "");
record.setFieldShortName(n);
record.setDataeaseName(n);
}
if (ObjectUtils.isEmpty(coreDatasetTableField)) {
coreDatasetTableFieldMapper.insert(record);
} else {
coreDatasetTableFieldMapper.updateById(record);
}
return datasetTableFieldDTO;
}
public DatasetTableFieldDTO saveField(DatasetTableFieldDTO datasetTableFieldDTO) {
CoreDatasetTableField record = new CoreDatasetTableField();
if (ObjectUtils.isEmpty(datasetTableFieldDTO.getId())) {
datasetTableFieldDTO.setId(IDUtils.snowID());
BeanUtils.copyBean(record, datasetTableFieldDTO);
coreDatasetTableFieldMapper.insert(record);
} else {
BeanUtils.copyBean(record, datasetTableFieldDTO);
coreDatasetTableFieldMapper.updateById(record);
}
return datasetTableFieldDTO;
}
public List<DatasetTableFieldDTO> getChartCalcFields(Long chartId) {
QueryWrapper<CoreDatasetTableField> wrapper = new QueryWrapper<>();
wrapper.eq("chart_id", chartId);
return transDTO(coreDatasetTableFieldMapper.selectList(wrapper));
}
public void deleteById(Long id) {
coreDatasetTableFieldMapper.deleteById(id);
}
public void deleteByDatasetTableUpdate(Long datasetTableId, List<Long> fieldIds) {
if (!CollectionUtils.isEmpty(fieldIds)) {
QueryWrapper<CoreDatasetTableField> wrapper = new QueryWrapper<>();
wrapper.eq("dataset_table_id", datasetTableId);
wrapper.notIn("id", fieldIds);
coreDatasetTableFieldMapper.delete(wrapper);
}
}
public void deleteByDatasetGroupUpdate(Long datasetGroupId, List<Long> fieldIds) {
if (!CollectionUtils.isEmpty(fieldIds)) {
QueryWrapper<CoreDatasetTableField> wrapper = new QueryWrapper<>();
wrapper.eq("dataset_group_id", datasetGroupId);
wrapper.notIn("id", fieldIds);
coreDatasetTableFieldMapper.delete(wrapper);
}
}
public void deleteByDatasetGroupDelete(Long datasetGroupId) {
QueryWrapper<CoreDatasetTableField> wrapper = new QueryWrapper<>();
wrapper.eq("dataset_group_id", datasetGroupId);
coreDatasetTableFieldMapper.delete(wrapper);
}
public void deleteByChartId(Long chartId) {
QueryWrapper<CoreDatasetTableField> wrapper = new QueryWrapper<>();
wrapper.eq("chart_id", chartId);
coreDatasetTableFieldMapper.delete(wrapper);
}
public List<DatasetTableFieldDTO> selectByDatasetTableId(Long id) {
QueryWrapper<CoreDatasetTableField> wrapper = new QueryWrapper<>();
wrapper.eq("dataset_table_id", id);
return transDTO(coreDatasetTableFieldMapper.selectList(wrapper));
}
public List<DatasetTableFieldDTO> selectByDatasetGroupId(Long id) {
QueryWrapper<CoreDatasetTableField> wrapper = new QueryWrapper<>();
wrapper.eq("dataset_group_id", id);
wrapper.eq("checked", true);
return transDTO(coreDatasetTableFieldMapper.selectList(wrapper));
}
public List<DatasetTableFieldDTO> selectByFieldIds(List<Long> ids) {
QueryWrapper<CoreDatasetTableField> wrapper = new QueryWrapper<>();
wrapper.in("id", ids);
return transDTO(coreDatasetTableFieldMapper.selectList(wrapper));
}
public DatasetTableFieldDTO selectById(Long id) {
CoreDatasetTableField coreDatasetTableField = coreDatasetTableFieldMapper.selectById(id);
if (coreDatasetTableField == null) return null;
DatasetTableFieldDTO dto = new DatasetTableFieldDTO();
BeanUtils.copyBean(dto, coreDatasetTableField);
return dto;
}
/**
* 返回维度指标列表
*
* @return
*/
public Map<String, List<DatasetTableFieldDTO>> listByDQ(Long id) {
QueryWrapper<CoreDatasetTableField> wrapper = new QueryWrapper<>();
wrapper.eq("dataset_group_id", id);
wrapper.eq("checked", true);
List<DatasetTableFieldDTO> list = transDTO(coreDatasetTableFieldMapper.selectList(wrapper));
List<DatasetTableFieldDTO> dimensionList = list.stream().filter(ele -> StringUtils.equalsIgnoreCase(ele.getGroupType(), "d")).collect(Collectors.toList());
List<DatasetTableFieldDTO> quotaList = list.stream().filter(ele -> StringUtils.equalsIgnoreCase(ele.getGroupType(), "q")).collect(Collectors.toList());
Map<String, List<DatasetTableFieldDTO>> map = new LinkedHashMap<>();
map.put("dimensionList", dimensionList);
map.put("quotaList", quotaList);
return map;
}
public List<DatasetTableFieldDTO> transDTO(List<CoreDatasetTableField> list) {
return list.stream().map(ele -> {
DatasetTableFieldDTO dto = new DatasetTableFieldDTO();
if (ele == null) return null;
BeanUtils.copyBean(dto, ele);
return dto;
}).collect(Collectors.toList());
}
private void checkNameLength(String name) {
if (name != null && name.length() > 100) {
DEException.throwException(Translator.get("i18n_name_limit_100"));
}
}
}

View File

@ -0,0 +1,80 @@
package io.dataease.dataset.manage;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import io.dataease.api.dataset.dto.DatasetTableDTO;
import io.dataease.dataset.dao.auto.entity.CoreDatasetTable;
import io.dataease.dataset.dao.auto.mapper.CoreDatasetTableMapper;
import io.dataease.exception.DEException;
import io.dataease.i18n.Translator;
import io.dataease.utils.BeanUtils;
import io.dataease.utils.IDUtils;
import jakarta.annotation.Resource;
import org.apache.commons.lang3.ObjectUtils;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
import java.util.List;
/**
* @Author Junjun
*/
@Component
public class DatasetTableManage {
@Resource
private CoreDatasetTableMapper coreDatasetTableMapper;
public void save(CoreDatasetTable coreDatasetTable) {
checkNameLength(coreDatasetTable.getName());
checkNameLength(coreDatasetTable.getTableName());
if (ObjectUtils.isEmpty(coreDatasetTable.getId())) {
coreDatasetTable.setId(IDUtils.snowID());
coreDatasetTableMapper.insert(coreDatasetTable);
} else {
coreDatasetTableMapper.updateById(coreDatasetTable);
}
}
public void save(DatasetTableDTO currentDs) {
checkNameLength(currentDs.getName());
checkNameLength(currentDs.getTableName());
CoreDatasetTable coreDatasetTable = coreDatasetTableMapper.selectById(currentDs.getId());
CoreDatasetTable record = new CoreDatasetTable();
BeanUtils.copyBean(record, currentDs);
if (ObjectUtils.isEmpty(coreDatasetTable)) {
coreDatasetTableMapper.insert(record);
} else {
coreDatasetTableMapper.updateById(record);
}
}
public List<CoreDatasetTable> selectByDatasetGroupId(Long datasetGroupId) {
QueryWrapper<CoreDatasetTable> wrapper = new QueryWrapper<>();
wrapper.eq("dataset_group_id", datasetGroupId);
return coreDatasetTableMapper.selectList(wrapper);
}
public CoreDatasetTable selectById(Long id) {
return coreDatasetTableMapper.selectById(id);
}
public void deleteByDatasetGroupUpdate(Long datasetGroupId, List<Long> ids) {
if (!CollectionUtils.isEmpty(ids)) {
QueryWrapper<CoreDatasetTable> wrapper = new QueryWrapper<>();
wrapper.eq("dataset_group_id", datasetGroupId);
wrapper.notIn("id", ids);
coreDatasetTableMapper.delete(wrapper);
}
}
public void deleteByDatasetGroupDelete(Long datasetGroupId) {
QueryWrapper<CoreDatasetTable> wrapper = new QueryWrapper<>();
wrapper.eq("dataset_group_id", datasetGroupId);
coreDatasetTableMapper.delete(wrapper);
}
private void checkNameLength(String name) {
if (name != null && name.length() > 100) {
DEException.throwException(Translator.get("i18n_name_limit_100"));
}
}
}

View File

@ -0,0 +1,59 @@
package io.dataease.dataset.manage;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import io.dataease.api.dataset.dto.SqlLogDTO;
import io.dataease.dataset.dao.auto.entity.CoreDatasetTableSqlLog;
import io.dataease.dataset.dao.auto.mapper.CoreDatasetTableSqlLogMapper;
import io.dataease.utils.BeanUtils;
import jakarta.annotation.Resource;
import org.apache.commons.lang3.ObjectUtils;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;
import java.util.List;
import java.util.UUID;
import java.util.stream.Collectors;
/**
* @Author Junjun
*/
@Component
@Transactional(rollbackFor = Exception.class)
public class DatasetTableSqlLogManage {
@Resource
private CoreDatasetTableSqlLogMapper coreDatasetTableSqlLogMapper;
public void save(SqlLogDTO dto) {
if (dto == null) {
return;
}
CoreDatasetTableSqlLog coreDatasetTableSqlLog = new CoreDatasetTableSqlLog();
BeanUtils.copyBean(coreDatasetTableSqlLog, dto);
if (ObjectUtils.isEmpty(coreDatasetTableSqlLog.getId())) {
coreDatasetTableSqlLog.setId(UUID.randomUUID().toString());
coreDatasetTableSqlLogMapper.insert(coreDatasetTableSqlLog);
} else {
coreDatasetTableSqlLogMapper.updateById(coreDatasetTableSqlLog);
}
}
public List<SqlLogDTO> listByTableId(SqlLogDTO dto) {
if (dto == null || ObjectUtils.isEmpty(dto.getTableId())) {
return null;
}
QueryWrapper<CoreDatasetTableSqlLog> wrapper = new QueryWrapper<>();
wrapper.eq("table_id", dto.getTableId());
List<CoreDatasetTableSqlLog> coreDatasetTableSqlLogs = coreDatasetTableSqlLogMapper.selectList(wrapper);
return coreDatasetTableSqlLogs.stream().map(ele -> {
SqlLogDTO s = new SqlLogDTO();
BeanUtils.copyBean(s, ele);
return s;
}).collect(Collectors.toList());
}
public void deleteByTableId(String id) {
QueryWrapper<CoreDatasetTableSqlLog> wrapper = new QueryWrapper<>();
wrapper.eq("table_id", id);
coreDatasetTableSqlLogMapper.delete(wrapper);
}
}

View File

@ -0,0 +1,235 @@
package io.dataease.dataset.manage;
import com.fasterxml.jackson.core.type.TypeReference;
import io.dataease.api.chart.dto.ColumnPermissionItem;
import io.dataease.api.chart.dto.ColumnPermissions;
import io.dataease.api.permissions.dataset.api.ColumnPermissionsApi;
import io.dataease.api.permissions.dataset.api.RowPermissionsApi;
import io.dataease.api.permissions.dataset.dto.*;
import io.dataease.api.permissions.user.vo.UserFormVO;
import io.dataease.constant.ColumnPermissionConstants;
import io.dataease.dto.dataset.DatasetTableFieldDTO;
import io.dataease.utils.AuthUtils;
import io.dataease.utils.JsonUtil;
import jakarta.annotation.Resource;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.ObjectUtils;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
@Service
public class PermissionManage {
@Autowired(required = false)
private RowPermissionsApi rowPermissionsApi;
@Autowired(required = false)
private ColumnPermissionsApi columnPermissionsApi = null;
@Resource
private DatasetTableFieldManage datasetTableFieldManage;
private RowPermissionsApi getRowPermissionsApi() {
return rowPermissionsApi;
}
private ColumnPermissionsApi getColumnPermissionsApi() {
return columnPermissionsApi;
}
public List<DatasetTableFieldDTO> filterColumnPermissions(List<DatasetTableFieldDTO> fields, Map<String, ColumnPermissionItem> desensitizationList, Long datasetTableId, Long user) {
List<DatasetTableFieldDTO> result = new ArrayList<>();
List<ColumnPermissionItem> userColumnPermissionItems = new ArrayList<>();
List<ColumnPermissionItem> roleColumnPermissionItems = new ArrayList<>();
for (DataSetColumnPermissionsDTO dataSetColumnPermissionsDTO : columnPermissions(datasetTableId, user)) {
ColumnPermissions columnPermissions = JsonUtil.parseObject(dataSetColumnPermissionsDTO.getPermissions(), ColumnPermissions.class);
if (!columnPermissions.getEnable()) {
continue;
}
if (dataSetColumnPermissionsDTO.getAuthTargetType().equalsIgnoreCase("user")) {
userColumnPermissionItems.addAll(columnPermissions.getColumns().stream().filter(columnPermissionItem -> columnPermissionItem.getSelected()).collect(Collectors.toList()));
}
if (dataSetColumnPermissionsDTO.getAuthTargetType().equalsIgnoreCase("role")) {
roleColumnPermissionItems.addAll(columnPermissions.getColumns().stream().filter(columnPermissionItem -> columnPermissionItem.getSelected()).collect(Collectors.toList()));
}
}
fields.forEach(field -> {
List<ColumnPermissionItem> fieldUserColumnPermissionItems = userColumnPermissionItems.stream().filter(columnPermissionItem -> columnPermissionItem.getId().equals(field.getId())).collect(Collectors.toList());
List<ColumnPermissionItem> fieldRoleColumnPermissionItems = roleColumnPermissionItems.stream().filter(columnPermissionItem -> columnPermissionItem.getId().equals(field.getId())).collect(Collectors.toList());
if (CollectionUtils.isNotEmpty(fieldUserColumnPermissionItems)) {
if (fieldUserColumnPermissionItems.stream().map(ColumnPermissionItem::getOpt).collect(Collectors.toList()).contains(ColumnPermissionConstants.Desensitization)) {
desensitizationList.put(field.getDataeaseName(), fieldUserColumnPermissionItems.get(0));
result.add(field);
}
return;
}
if (CollectionUtils.isNotEmpty(fieldRoleColumnPermissionItems)) {
if (fieldRoleColumnPermissionItems.stream().map(ColumnPermissionItem::getOpt).collect(Collectors.toList()).contains(ColumnPermissionConstants.Desensitization)) {
desensitizationList.put(field.getDataeaseName(), fieldRoleColumnPermissionItems.get(0));
result.add(field);
}
return;
}
result.add(field);
});
return result;
}
private List<DataSetColumnPermissionsDTO> columnPermissions(Long datasetId, Long userId) {
List<DataSetColumnPermissionsDTO> datasetColumnPermissions = new ArrayList<>();
userId = userId != null ? userId : AuthUtils.getUser().getUserId();
if (getRowPermissionsApi() == null || getColumnPermissionsApi() == null) {
return new ArrayList<>();
}
if (AuthUtils.isSysAdmin(userId)) {
return new ArrayList<>();
}
DataSetColumnPermissionsDTO dataSetColumnPermissionsDTO = new DataSetColumnPermissionsDTO();
dataSetColumnPermissionsDTO.setDatasetId(datasetId);
dataSetColumnPermissionsDTO.setAuthTargetIds(Collections.singletonList(userId));
dataSetColumnPermissionsDTO.setAuthTargetType("user");
List<DataSetColumnPermissionsDTO> dataSetColumnPermissionsDTOS = getColumnPermissionsApi().list(dataSetColumnPermissionsDTO);
if (dataSetColumnPermissionsDTOS != null && CollectionUtils.isNotEmpty(dataSetColumnPermissionsDTOS)) {
datasetColumnPermissions.addAll(dataSetColumnPermissionsDTOS);
}
List<Long> roleIds = getRowPermissionsApi().getUserById(userId).getRoleIds().stream().map(x -> Long.valueOf(x)).collect(Collectors.toList());
if (CollectionUtils.isNotEmpty(roleIds)) {
List<Item> items = (List<Item>) getRowPermissionsApi().authObjs(datasetId, "role");
roleIds = roleIds.stream().filter(id -> {
return items.stream().map(Item::getId).collect(Collectors.toList()).contains(id);
}).collect(Collectors.toList());
if (CollectionUtils.isNotEmpty(roleIds)) {
dataSetColumnPermissionsDTO.setAuthTargetIds(roleIds);
dataSetColumnPermissionsDTO.setAuthTargetType("role");
List<DataSetColumnPermissionsDTO> roleColumnPermissionsDTOS = new ArrayList<>();
for (DataSetColumnPermissionsDTO columnPermissionsDTO : getColumnPermissionsApi().list(dataSetColumnPermissionsDTO)) {
TypeReference<List<Long>> listTypeReference = new TypeReference<List<Long>>() {
};
List<Long> userIdList = JsonUtil.parseList(columnPermissionsDTO.getWhiteListUser(), listTypeReference);
if (CollectionUtils.isEmpty(userIdList) || !userIdList.contains(userId)) {
roleColumnPermissionsDTOS.add(columnPermissionsDTO);
}
}
datasetColumnPermissions.addAll(roleColumnPermissionsDTOS);
}
}
return datasetColumnPermissions;
}
public List<DataSetRowPermissionsTreeDTO> getRowPermissionsTree(Long datasetId, Long user) {
// 获取当前数据集下当前用户角色组织所有的行权限非白名单非禁用
List<DataSetRowPermissionsTreeDTO> records = rowPermissionsTree(datasetId, user);
// 构建权限tree中的field如果field不存在置为null
for (DataSetRowPermissionsTreeDTO record : records) {
getField(record.getTree());
}
return records;
}
private List<DataSetRowPermissionsTreeDTO> rowPermissionsTree(Long datasetId, Long userId) {
List<DataSetRowPermissionsTreeDTO> datasetRowPermissions = new ArrayList<>();
userId = userId != null ? userId : AuthUtils.getUser().getUserId();
if (AuthUtils.isSysAdmin(userId)) {
return datasetRowPermissions;
}
UserFormVO userEntity = getRowPermissionsApi().getUserById(userId);
List<Long> roleIds = userEntity.getRoleIds().stream().map(x -> Long.valueOf(x)).collect(Collectors.toList());
DatasetRowPermissionsTreeRequest dataSetRowPermissionsDTO = new DatasetRowPermissionsTreeRequest();
dataSetRowPermissionsDTO.setDatasetId(datasetId);
dataSetRowPermissionsDTO.setEnable(true);
if (ObjectUtils.isNotEmpty(userId)) {
dataSetRowPermissionsDTO.setAuthTargetIds(Collections.singletonList(userId));
dataSetRowPermissionsDTO.setAuthTargetType("user");
datasetRowPermissions.addAll(getRowPermissionsApi().list(dataSetRowPermissionsDTO));
}
if (ObjectUtils.isNotEmpty(roleIds)) {
dataSetRowPermissionsDTO.setAuthTargetIds(roleIds);
dataSetRowPermissionsDTO.setAuthTargetType("role");
datasetRowPermissions.addAll(getRowPermissionsApi().list(dataSetRowPermissionsDTO));
}
dataSetRowPermissionsDTO.setAuthTargetIds(null);
dataSetRowPermissionsDTO.setAuthTargetType("sysParams");
datasetRowPermissions.addAll(getRowPermissionsApi().list(dataSetRowPermissionsDTO));
// 若当前用户是白名单中的则忽略permission tree
// 若当前规则是系统变量则替换变量
List<DataSetRowPermissionsTreeDTO> result = new ArrayList<>();
TypeReference<List<Long>> listTypeReference = new TypeReference<List<Long>>() {
};
for (DataSetRowPermissionsTreeDTO record : datasetRowPermissions) {
List<Long> userIdList = JsonUtil.parseList(record.getWhiteListUser(), listTypeReference);
List<Long> roleIdList = JsonUtil.parseList(record.getWhiteListRole(), listTypeReference);
List<Long> deptIdList = JsonUtil.parseList(record.getWhiteListDept(), listTypeReference);
if (ObjectUtils.isNotEmpty(userId) && ObjectUtils.isNotEmpty(userIdList) && userIdList.contains(userId)) {
continue;
}
if (ObjectUtils.isNotEmpty(roleIds) && ObjectUtils.isNotEmpty(roleIdList) && ObjectUtils.isNotEmpty(intersectionForList(roleIds, roleIdList))) {
continue;
}
// 替换系统变量
if (StringUtils.equalsIgnoreCase(record.getAuthTargetType(), "sysParams")) {
String expressionTree = record.getExpressionTree();
if (StringUtils.isNotEmpty(userEntity.getAccount())) {
expressionTree = expressionTree.replaceAll("\\$\\{sysParams\\.userId}", userEntity.getAccount());
}
if (StringUtils.isNotEmpty(userEntity.getEmail())) {
expressionTree = expressionTree.replaceAll("\\$\\{sysParams\\.userEmail}", userEntity.getEmail());
}
if (StringUtils.isNotEmpty(userEntity.getName())) {
expressionTree = expressionTree.replaceAll("\\$\\{sysParams\\.userName}", userEntity.getName());
}
record.setExpressionTree(expressionTree);
DatasetRowPermissionsTreeObj tree = JsonUtil.parseObject(expressionTree, DatasetRowPermissionsTreeObj.class);
record.setTree(tree);
}
result.add(record);
}
return result;
}
private List<Long> intersectionForList(List<Long> list1, List<Long> list2) {
List<Long> result = new ArrayList<>();
for (Long id : list1) {
if (list2.contains(id)) {
result.add(id);
}
}
return result;
}
public void getField(DatasetRowPermissionsTreeObj tree) {
if (ObjectUtils.isNotEmpty(tree)) {
if (ObjectUtils.isNotEmpty(tree.getItems())) {
for (DatasetRowPermissionsTreeItem item : tree.getItems()) {
if (ObjectUtils.isNotEmpty(item)) {
if (StringUtils.equalsIgnoreCase(item.getType(), "item") || ObjectUtils.isEmpty(item.getSubTree())) {
item.setField(datasetTableFieldManage.selectById(item.getFieldId()));
} else if (StringUtils.equalsIgnoreCase(item.getType(), "tree") || (ObjectUtils.isNotEmpty(item.getSubTree()) && StringUtils.isNotEmpty(item.getSubTree().getLogic()))) {
getField(item.getSubTree());
}
}
}
}
}
}
}

View File

@ -0,0 +1,49 @@
package io.dataease.dataset.server;
import io.dataease.api.dataset.DatasetDataApi;
import io.dataease.api.dataset.dto.DatasetTableDTO;
import io.dataease.api.dataset.dto.PreviewSqlDTO;
import io.dataease.api.dataset.union.DatasetGroupInfoDTO;
import io.dataease.dataset.manage.DatasetDataManage;
import io.dataease.dto.dataset.DatasetTableFieldDTO;
import jakarta.annotation.Resource;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.List;
import java.util.Map;
/**
* @Author Junjun
*/
@RestController
@RequestMapping("datasetData")
public class DatasetDataServer implements DatasetDataApi {
@Resource
private DatasetDataManage datasetDataManage;
@Override
public Map<String, Object> previewData(DatasetGroupInfoDTO datasetGroupInfoDTO) throws Exception {
return datasetDataManage.previewDataWithLimit(datasetGroupInfoDTO, 0, 100, false);
}
@Override
public List<DatasetTableFieldDTO> tableField(DatasetTableDTO datasetTableDTO) throws Exception {
return datasetDataManage.getTableFields(datasetTableDTO);
}
@Override
public Map<String, Object> previewSql(PreviewSqlDTO dto) throws Exception {
return datasetDataManage.previewSqlWithLog(dto);
}
@Override
public Map<String, Object> previewSqlCheck(PreviewSqlDTO dto) throws Exception {
return datasetDataManage.previewSql(dto);
}
@Override
public List<String> getFieldEnum(List<Long> ids) throws Exception {
return datasetDataManage.getFieldEnum(ids);
}
}

View File

@ -0,0 +1,79 @@
package io.dataease.dataset.server;
import io.dataease.api.dataset.DatasetTableApi;
import io.dataease.api.dataset.dto.MultFieldValuesRequest;
import io.dataease.api.dataset.engine.SQLFunctionDTO;
import io.dataease.api.dataset.engine.SQLFunctionsEnum;
import io.dataease.dataset.manage.DatasetDataManage;
import io.dataease.dataset.manage.DatasetTableFieldManage;
import io.dataease.dto.dataset.DatasetTableFieldDTO;
import jakarta.annotation.Resource;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
/**
* @Author Junjun
*/
@RestController
@RequestMapping("datasetField")
public class DatasetFieldServer implements DatasetTableApi {
@Resource
private DatasetTableFieldManage datasetTableFieldManage;
@Resource
private DatasetDataManage datasetDataManage;
@Override
public DatasetTableFieldDTO save(DatasetTableFieldDTO datasetTableFieldDTO) throws Exception {
return datasetTableFieldManage.chartFieldSave(datasetTableFieldDTO);
}
@Override
public DatasetTableFieldDTO get(Long id) {
return datasetTableFieldManage.selectById(id);
}
@Override
public List<DatasetTableFieldDTO> listByDatasetGroup(Long id) {
return datasetTableFieldManage.selectByDatasetGroupId(id);
}
@Override
public void delete(Long id) {
datasetTableFieldManage.deleteById(id);
}
@Override
public Map<String, List<DatasetTableFieldDTO>> listByDQ(Long id) {
return datasetTableFieldManage.listByDQ(id);
}
@Override
public List<String> multFieldValuesForPermissions(@RequestBody MultFieldValuesRequest multFieldValuesRequest) throws Exception {
return datasetDataManage.getFieldEnum(multFieldValuesRequest.getFieldIds());
}
@Override
public List<SQLFunctionDTO> getFunction() {
SQLFunctionsEnum[] values = SQLFunctionsEnum.values();
return Arrays.stream(values).map(ele -> {
SQLFunctionDTO dto = new SQLFunctionDTO();
dto.setName(ele.getName());
dto.setFunc(ele.getFunc());
dto.setType(ele.getType());
dto.setDesc(ele.getDesc());
dto.setCustom(ele.isCustom());
return dto;
}).collect(Collectors.toList());
}
@Override
public void deleteByChartId(Long id) {
datasetTableFieldManage.deleteByChartId(id);
}
}

View File

@ -0,0 +1,35 @@
package io.dataease.dataset.server;
import io.dataease.api.dataset.DatasetTableSqlLogApi;
import io.dataease.api.dataset.dto.SqlLogDTO;
import io.dataease.dataset.manage.DatasetTableSqlLogManage;
import jakarta.annotation.Resource;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.List;
/**
* @Author Junjun
*/
@RestController
@RequestMapping("datasetTableSqlLog")
public class DatasetTableSqlLogServer implements DatasetTableSqlLogApi {
@Resource
private DatasetTableSqlLogManage datasetTableSqlLogManage;
@Override
public void save(SqlLogDTO sqlLogDTO) throws Exception {
datasetTableSqlLogManage.save(sqlLogDTO);
}
@Override
public List<SqlLogDTO> listByTableId(SqlLogDTO sqlLogDTO) throws Exception {
return datasetTableSqlLogManage.listByTableId(sqlLogDTO);
}
@Override
public void deleteByTableId(String id) throws Exception {
datasetTableSqlLogManage.deleteByTableId(id);
}
}

View File

@ -0,0 +1,80 @@
package io.dataease.dataset.server;
import io.dataease.api.dataset.DatasetTreeApi;
import io.dataease.api.dataset.dto.DatasetNodeDTO;
import io.dataease.api.dataset.dto.DatasetTableDTO;
import io.dataease.api.dataset.dto.SqlVariableDetails;
import io.dataease.api.dataset.union.DatasetGroupInfoDTO;
import io.dataease.api.dataset.vo.DataSetBarVO;
import io.dataease.commons.constants.OptConstants;
import io.dataease.dataset.manage.DatasetGroupManage;
import io.dataease.model.BusiNodeRequest;
import io.dataease.model.BusiNodeVO;
import jakarta.annotation.Resource;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.List;
@RestController
@RequestMapping("datasetTree")
public class DatasetTreeServer implements DatasetTreeApi {
@Resource
private DatasetGroupManage datasetGroupManage;
@Override
public DatasetGroupInfoDTO save(DatasetGroupInfoDTO datasetNodeDTO) throws Exception {
return datasetGroupManage.save(datasetNodeDTO, false);
}
@Override
public DatasetNodeDTO rename(DatasetGroupInfoDTO dto) throws Exception {
return datasetGroupManage.save(dto, true);
}
@Override
public DatasetNodeDTO create(DatasetGroupInfoDTO dto) throws Exception {
return datasetGroupManage.save(dto, false);
}
@Override
public DatasetNodeDTO move(DatasetGroupInfoDTO dto) throws Exception {
return datasetGroupManage.move(dto);
}
@Override
public void delete(Long id) {
datasetGroupManage.delete(id);
}
public List<BusiNodeVO> tree(BusiNodeRequest request) {
return datasetGroupManage.tree(request);
}
@Override
public DataSetBarVO barInfo(Long id) {
return datasetGroupManage.queryBarInfo(id);
}
@Override
public DatasetGroupInfoDTO get(Long id) throws Exception {
return datasetGroupManage.get(id, "preview");
}
@Override
public DatasetGroupInfoDTO details(Long id) throws Exception {
return datasetGroupManage.get(id, null);
}
@Override
public List<DatasetTableDTO> panelGetDsDetails(List<Long> ids) throws Exception {
return datasetGroupManage.getDetail(ids);
}
@Override
public List<SqlVariableDetails> getSqlParams(List<Long> ids) throws Exception {
return datasetGroupManage.getSqlParams(ids);
}
}

View File

@ -0,0 +1,9 @@
package io.dataease.dataset.utils;
/**
* @Author Junjun
*/
public class DatasetTableTypeConstants {
public static String DATASET_TABLE_DB = "db";
public static String DATASET_TABLE_SQL = "sql";
}

View File

@ -0,0 +1,58 @@
package io.dataease.dataset.utils;
/**
* @Author Junjun
*/
public class FieldUtils {
public static int transType2DeType(String type) {
switch (type) {
case "CHAR":
case "VARCHAR":
case "TEXT":
case "TINYTEXT":
case "MEDIUMTEXT":
case "LONGTEXT":
case "ENUM":
case "ANY":
return 0;// 文本
case "DATE":
case "TIME":
case "YEAR":
case "DATETIME":
case "TIMESTAMP":
return 1;// 时间
case "INT":
case "SMALLINT":
case "MEDIUMINT":
case "INTEGER":
case "BIGINT":
case "LONG": //增加了LONG类型
return 2;// 整型
case "FLOAT":
case "DOUBLE":
case "DECIMAL":
case "REAL":
return 3;// 浮点
case "BIT":
case "TINYINT":
return 4;// 布尔
default:
return 0;
}
}
public static String transDeType2DQ(int deType) {
switch (deType) {
case 0:
case 1:
case 5:
return "d";
case 2:
case 3:
case 4:
return "q";
default:
return "d";
}
}
}

View File

@ -0,0 +1,111 @@
package io.dataease.dataset.utils;
import com.google.common.collect.ImmutableList;
import io.dataease.exception.DEException;
import org.apache.calcite.config.Lex;
import org.apache.calcite.sql.*;
import org.apache.calcite.sql.parser.SqlParseException;
import org.apache.calcite.sql.parser.SqlParser;
import java.util.List;
import static org.apache.calcite.sql.SqlKind.*;
/**
* @Author Junjun
*/
public class SqlUtils {
public static String addSchema(String sql, String schema) {
if (sql.trim().endsWith(";")) {
sql = sql.substring(0, sql.length() - 1);
}
SqlParser.Config config =
SqlParser.configBuilder()
.setLex(Lex.JAVA)
.setIdentifierMaxLength(256)
.build();
// 创建解析器
SqlParser sqlParser = SqlParser
.create(sql, config);
// 生成 AST 语法树
SqlNode sqlNode = null;
try {
sqlNode = sqlParser.parseStmt();
addTableSchema(sqlNode, false, schema, config);
} catch (SqlParseException e) {
DEException.throwException("使用 Calcite 进行语法分析发生了异常:" + e);
}
String sqlRender = sqlNode.toString();
// 处理sql中多余的`都替换成1个
sqlRender = sqlRender.replaceAll("(`+)", "`");
return sqlRender;
}
private static void addTableSchema(SqlNode sqlNode, Boolean fromOrJoin, String schema, SqlParser.Config config) {
try {
if (sqlNode.getKind() == JOIN) {
SqlJoin sqlKind = (SqlJoin) sqlNode;
addTableSchema(sqlKind.getLeft(), true, schema, config);
addTableSchema(sqlKind.getRight(), true, schema, config);
} else if (sqlNode.getKind() == IDENTIFIER) {
if (fromOrJoin) {
// 获取表名
String tableName = sqlNode.toString();
SqlIdentifier sqlKind = (SqlIdentifier) sqlNode;
sqlKind.setNames(ImmutableList.of(schema + "`.`" + tableName), null);
}
} else if (sqlNode.getKind() == AS) {
SqlBasicCall sqlKind = (SqlBasicCall) sqlNode;
if (sqlKind.getOperandList().size() >= 2) {
addTableSchema(sqlKind.getOperandList().get(0), fromOrJoin, schema, config);
}
} else if (sqlNode.getKind() == SELECT) {
SqlSelect sqlKind = (SqlSelect) sqlNode;
// 解析from
addTableSchema(sqlKind.getFrom(), true, schema, config);
// 解析where
SqlBasicCall where = (SqlBasicCall) sqlKind.getWhere();
if (where != null && where.getOperandList().size() >= 2) {
for (int i = 0; i < where.getOperandList().size(); i++) {
addTableSchema(where.getOperandList().get(i), false, schema, config);
}
}
} else if (sqlNode.getKind() == UNION) {
SqlBasicCall sqlKind = (SqlBasicCall) sqlNode;
// 使用union至少会有2个子SQL否则语法不正确
if (sqlKind.getOperandList().size() >= 2) {
for (int i = 0; i < sqlKind.getOperandList().size(); i++) {
addTableSchema(sqlKind.getOperandList().get(i), fromOrJoin, schema, config);
}
}
} else if (sqlNode.getKind() == ORDER_BY) {
SqlOrderBy sqlKind = (SqlOrderBy) sqlNode;
List<SqlNode> operandList = sqlKind.getOperandList();
if (operandList.size() > 0) {
addTableSchema(operandList.get(0), fromOrJoin, schema, config);
}
} else if (sqlNode.getKind() == IN
|| sqlNode.getKind() == NOT_IN
|| sqlNode.getKind() == AND
|| sqlNode.getKind() == OR
|| sqlNode.getKind() == LESS_THAN
|| sqlNode.getKind() == GREATER_THAN
|| sqlNode.getKind() == LESS_THAN_OR_EQUAL
|| sqlNode.getKind() == GREATER_THAN_OR_EQUAL
|| sqlNode.getKind() == EQUALS
|| sqlNode.getKind() == NOT_EQUALS) {
SqlBasicCall where = (SqlBasicCall) sqlNode;
if (where.getOperandList().size() >= 2) {
for (int i = 0; i < where.getOperandList().size(); i++) {
addTableSchema(where.getOperandList().get(i), fromOrJoin, schema, config);
}
}
}
} catch (Exception e) {
DEException.throwException("使用 Calcite 进行语法分析发生了异常:" + e);
}
}
}

View File

@ -0,0 +1,54 @@
package io.dataease.dataset.utils;
import io.dataease.api.dataset.union.model.SQLObj;
import io.dataease.dataset.dto.DatasourceSchemaDTO;
import io.dataease.utils.Md5Utils;
import org.apache.calcite.avatica.util.Quoting;
import org.apache.commons.lang3.StringUtils;
public class TableUtils {
public static String format = Quoting.BACK_TICK.string + "%s" + Quoting.BACK_TICK.string;
public static String tableName(String name) {
return name;
}
public static String tmpName(String name) {
return "tmp_" + name;
}
public static String deleteName(String dorisName) {
return "delete_" + dorisName;
}
public static String addName(String dorisName) {
return "add_" + dorisName;
}
public static String fieldName(String dorisName) {
return "f_" + Md5Utils.md5(dorisName);
}
public static String fieldNameShort(String dorisName) {
return "f_" + Md5Utils.md5(dorisName).substring(8, 24);
}
public static String columnName(String fieldName) {
return "C_" + Md5Utils.md5(fieldName);
}
public static String getTableAndAlias(SQLObj sqlObj) {
String schema = "";
String prefix = "";
if (StringUtils.isNotEmpty(sqlObj.getTableSchema())) {
schema = sqlObj.getTableSchema() + ".";
prefix = "`";
}
return schema + prefix + sqlObj.getTableName() + prefix + " " + sqlObj.getTableAlias();
}
public static String tableName2Sql(DatasourceSchemaDTO ds, String tableName) {
return "SELECT * FROM " + ds.getSchemaAlias() + "." + String.format(format, tableName);
}
}

View File

@ -0,0 +1,223 @@
package io.dataease.datasource.dao.auto.entity;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import java.io.Serializable;
/**
* <p>
*
* </p>
*
* @author fit2cloud
* @since 2023-09-26
*/
@TableName("core_datasource")
public class CoreDatasource implements Serializable {
private static final long serialVersionUID = 1L;
/**
* 主键
*/
@TableId(value = "id", type = IdType.AUTO)
private Long id;
/**
* 名称
*/
private String name;
/**
* 描述
*/
private String description;
/**
* 类型
*/
private String type;
/**
* 父级ID
*/
private Long pid;
/**
* 更新方式0替换1追加
*/
private String editType;
/**
* 详细信息
*/
private String configuration;
/**
* 创建时间
*/
private Long createTime;
/**
* 更新时间
*/
private Long updateTime;
/**
* 变更人
*/
private Long updateBy;
/**
* 创建人ID
*/
private String createBy;
/**
* 状态
*/
private String status;
/**
* 状态
*/
private String qrtzInstance;
/**
* 任务状态
*/
private String taskStatus;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public Long getPid() {
return pid;
}
public void setPid(Long pid) {
this.pid = pid;
}
public String getEditType() {
return editType;
}
public void setEditType(String editType) {
this.editType = editType;
}
public String getConfiguration() {
return configuration;
}
public void setConfiguration(String configuration) {
this.configuration = configuration;
}
public Long getCreateTime() {
return createTime;
}
public void setCreateTime(Long createTime) {
this.createTime = createTime;
}
public Long getUpdateTime() {
return updateTime;
}
public void setUpdateTime(Long updateTime) {
this.updateTime = updateTime;
}
public Long getUpdateBy() {
return updateBy;
}
public void setUpdateBy(Long updateBy) {
this.updateBy = updateBy;
}
public String getCreateBy() {
return createBy;
}
public void setCreateBy(String createBy) {
this.createBy = createBy;
}
public String getStatus() {
return status;
}
public void setStatus(String status) {
this.status = status;
}
public String getQrtzInstance() {
return qrtzInstance;
}
public void setQrtzInstance(String qrtzInstance) {
this.qrtzInstance = qrtzInstance;
}
public String getTaskStatus() {
return taskStatus;
}
public void setTaskStatus(String taskStatus) {
this.taskStatus = taskStatus;
}
@Override
public String toString() {
return "CoreDatasource{" +
"id = " + id +
", name = " + name +
", description = " + description +
", type = " + type +
", pid = " + pid +
", editType = " + editType +
", configuration = " + configuration +
", createTime = " + createTime +
", updateTime = " + updateTime +
", updateBy = " + updateBy +
", createBy = " + createBy +
", status = " + status +
", qrtzInstance = " + qrtzInstance +
", taskStatus = " + taskStatus +
"}";
}
}

View File

@ -0,0 +1,248 @@
package io.dataease.datasource.dao.auto.entity;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import java.io.Serializable;
/**
* <p>
*
* </p>
*
* @author fit2cloud
* @since 2023-09-28
*/
@TableName("core_datasource_task")
public class CoreDatasourceTask implements Serializable {
private static final long serialVersionUID = 1L;
/**
* 主键
*/
@TableId(value = "id", type = IdType.AUTO)
private Long id;
/**
* 数据源ID
*/
private Long dsId;
/**
* 任务名称
*/
private String name;
/**
* 更新方式
*/
private String updateType;
/**
* 开始时间
*/
private Long startTime;
/**
* 执行频率0 一次性 1 cron
*/
private String syncRate;
/**
* cron表达式
*/
private String cron;
/**
* 简单重复间隔
*/
private Long simpleCronValue;
/**
* 简单重复类型
*/
private String simpleCronType;
/**
* 结束限制 0 无限制 1 设定结束时间
*/
private String endLimit;
/**
* 结束时间
*/
private Long endTime;
/**
* 创建时间
*/
private Long createTime;
/**
* 上次执行时间
*/
private Long lastExecTime;
/**
* 上次执行结果
*/
private String lastExecStatus;
private String extraData;
/**
* 任务状态
*/
private String taskStatus;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public Long getDsId() {
return dsId;
}
public void setDsId(Long dsId) {
this.dsId = dsId;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getUpdateType() {
return updateType;
}
public void setUpdateType(String updateType) {
this.updateType = updateType;
}
public Long getStartTime() {
return startTime;
}
public void setStartTime(Long startTime) {
this.startTime = startTime;
}
public String getSyncRate() {
return syncRate;
}
public void setSyncRate(String syncRate) {
this.syncRate = syncRate;
}
public String getCron() {
return cron;
}
public void setCron(String cron) {
this.cron = cron;
}
public Long getSimpleCronValue() {
return simpleCronValue;
}
public void setSimpleCronValue(Long simpleCronValue) {
this.simpleCronValue = simpleCronValue;
}
public String getSimpleCronType() {
return simpleCronType;
}
public void setSimpleCronType(String simpleCronType) {
this.simpleCronType = simpleCronType;
}
public String getEndLimit() {
return endLimit;
}
public void setEndLimit(String endLimit) {
this.endLimit = endLimit;
}
public Long getEndTime() {
return endTime;
}
public void setEndTime(Long endTime) {
this.endTime = endTime;
}
public Long getCreateTime() {
return createTime;
}
public void setCreateTime(Long createTime) {
this.createTime = createTime;
}
public Long getLastExecTime() {
return lastExecTime;
}
public void setLastExecTime(Long lastExecTime) {
this.lastExecTime = lastExecTime;
}
public String getLastExecStatus() {
return lastExecStatus;
}
public void setLastExecStatus(String lastExecStatus) {
this.lastExecStatus = lastExecStatus;
}
public String getExtraData() {
return extraData;
}
public void setExtraData(String extraData) {
this.extraData = extraData;
}
public String getTaskStatus() {
return taskStatus;
}
public void setTaskStatus(String taskStatus) {
this.taskStatus = taskStatus;
}
@Override
public String toString() {
return "CoreDatasourceTask{" +
"id = " + id +
", dsId = " + dsId +
", name = " + name +
", updateType = " + updateType +
", startTime = " + startTime +
", syncRate = " + syncRate +
", cron = " + cron +
", simpleCronValue = " + simpleCronValue +
", simpleCronType = " + simpleCronType +
", endLimit = " + endLimit +
", endTime = " + endTime +
", createTime = " + createTime +
", lastExecTime = " + lastExecTime +
", lastExecStatus = " + lastExecStatus +
", extraData = " + extraData +
", taskStatus = " + taskStatus +
"}";
}
}

View File

@ -0,0 +1,164 @@
package io.dataease.datasource.dao.auto.entity;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import java.io.Serializable;
/**
* <p>
*
* </p>
*
* @author fit2cloud
* @since 2023-09-28
*/
@TableName("core_datasource_task_log")
public class CoreDatasourceTaskLog implements Serializable {
private static final long serialVersionUID = 1L;
/**
* 主键
*/
@TableId(value = "id", type = IdType.AUTO)
private Long id;
/**
* 数据源ID
*/
private Long dsId;
/**
* 任务ID
*/
private Long taskId;
/**
* 开始时间
*/
private Long startTime;
/**
* 结束时间
*/
private Long endTime;
/**
* 执行状态
*/
private String taskStatus;
/**
* 执行状态
*/
private String tableName;
/**
* 错误信息
*/
private String info;
/**
* 创建时间
*/
private Long createTime;
private String triggerType;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public Long getDsId() {
return dsId;
}
public void setDsId(Long dsId) {
this.dsId = dsId;
}
public Long getTaskId() {
return taskId;
}
public void setTaskId(Long taskId) {
this.taskId = taskId;
}
public Long getStartTime() {
return startTime;
}
public void setStartTime(Long startTime) {
this.startTime = startTime;
}
public Long getEndTime() {
return endTime;
}
public void setEndTime(Long endTime) {
this.endTime = endTime;
}
public String getTaskStatus() {
return taskStatus;
}
public void setTaskStatus(String taskStatus) {
this.taskStatus = taskStatus;
}
public String getTableName() {
return tableName;
}
public void setTableName(String tableName) {
this.tableName = tableName;
}
public String getInfo() {
return info;
}
public void setInfo(String info) {
this.info = info;
}
public Long getCreateTime() {
return createTime;
}
public void setCreateTime(Long createTime) {
this.createTime = createTime;
}
public String getTriggerType() {
return triggerType;
}
public void setTriggerType(String triggerType) {
this.triggerType = triggerType;
}
@Override
public String toString() {
return "CoreDatasourceTaskLog{" +
"id = " + id +
", dsId = " + dsId +
", taskId = " + taskId +
", startTime = " + startTime +
", endTime = " + endTime +
", taskStatus = " + taskStatus +
", tableName = " + tableName +
", info = " + info +
", createTime = " + createTime +
", triggerType = " + triggerType +
"}";
}
}

View File

@ -0,0 +1,153 @@
package io.dataease.datasource.dao.auto.entity;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import java.io.Serializable;
/**
* <p>
*
* </p>
*
* @author fit2cloud
* @since 2023-04-18
*/
@TableName("core_de_engine")
public class CoreDeEngine implements Serializable {
private static final long serialVersionUID = 1L;
/**
* 主键
*/
@TableId(value = "id", type = IdType.AUTO)
private Long id;
/**
* 名称
*/
private String name;
/**
* 描述
*/
private String description;
/**
* 类型
*/
private String type;
/**
* 详细信息
*/
private String configuration;
/**
* Create timestamp
*/
private Long createTime;
/**
* Update timestamp
*/
private Long updateTime;
/**
* 创建人ID
*/
private String createBy;
/**
* 状态
*/
private String status;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public String getConfiguration() {
return configuration;
}
public void setConfiguration(String configuration) {
this.configuration = configuration;
}
public Long getCreateTime() {
return createTime;
}
public void setCreateTime(Long createTime) {
this.createTime = createTime;
}
public Long getUpdateTime() {
return updateTime;
}
public void setUpdateTime(Long updateTime) {
this.updateTime = updateTime;
}
public String getCreateBy() {
return createBy;
}
public void setCreateBy(String createBy) {
this.createBy = createBy;
}
public String getStatus() {
return status;
}
public void setStatus(String status) {
this.status = status;
}
@Override
public String toString() {
return "CoreDeEngine{" +
"id = " + id +
", name = " + name +
", description = " + description +
", type = " + type +
", configuration = " + configuration +
", createTime = " + createTime +
", updateTime = " + updateTime +
", createBy = " + createBy +
", status = " + status +
"}";
}
}

View File

@ -0,0 +1,111 @@
package io.dataease.datasource.dao.auto.entity;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import java.io.Serializable;
/**
* <p>
* 驱动
* </p>
*
* @author fit2cloud
* @since 2023-04-18
*/
@TableName("core_driver")
public class CoreDriver implements Serializable {
private static final long serialVersionUID = 1L;
/**
* 主键
*/
@TableId(value = "id", type = IdType.AUTO)
private Long id;
/**
* 名称
*/
private String name;
/**
* 创健时间
*/
private Long createTime;
/**
* 数据源类型
*/
private String type;
/**
* 驱动类
*/
private String driverClass;
/**
* 描述
*/
private String description;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Long getCreateTime() {
return createTime;
}
public void setCreateTime(Long createTime) {
this.createTime = createTime;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public String getDriverClass() {
return driverClass;
}
public void setDriverClass(String driverClass) {
this.driverClass = driverClass;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
@Override
public String toString() {
return "CoreDriver{" +
"id = " + id +
", name = " + name +
", createTime = " + createTime +
", type = " + type +
", driverClass = " + driverClass +
", description = " + description +
"}";
}
}

View File

@ -0,0 +1,119 @@
package io.dataease.datasource.dao.auto.entity;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import java.io.Serializable;
/**
* <p>
* 驱动详情
* </p>
*
* @author fit2cloud
* @since 2023-04-17
*/
@TableName("core_driver_jar")
public class CoreDriverJar implements Serializable {
private static final long serialVersionUID = 1L;
/**
* 主键
*/
@TableId(value = "id", type = IdType.AUTO)
private Long id;
/**
* 驱动主键
*/
private String deDriverId;
/**
* 名称
*/
private String fileName;
/**
* 版本
*/
private String version;
/**
* 驱动类
*/
private String driverClass;
private String transName;
private Boolean isTransName;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getDeDriverId() {
return deDriverId;
}
public void setDeDriverId(String deDriverId) {
this.deDriverId = deDriverId;
}
public String getFileName() {
return fileName;
}
public void setFileName(String fileName) {
this.fileName = fileName;
}
public String getVersion() {
return version;
}
public void setVersion(String version) {
this.version = version;
}
public String getDriverClass() {
return driverClass;
}
public void setDriverClass(String driverClass) {
this.driverClass = driverClass;
}
public String getTransName() {
return transName;
}
public void setTransName(String transName) {
this.transName = transName;
}
public Boolean getIsTransName() {
return isTransName;
}
public void setIsTransName(Boolean isTransName) {
this.isTransName = isTransName;
}
@Override
public String toString() {
return "CoreDriverJar{" +
"id = " + id +
", deDriverId = " + deDriverId +
", fileName = " + fileName +
", version = " + version +
", driverClass = " + driverClass +
", transName = " + transName +
", isTransName = " + isTransName +
"}";
}
}

View File

@ -0,0 +1,41 @@
package io.dataease.datasource.dao.auto.entity;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import java.io.Serializable;
/**
* <p>
*
* </p>
*
* @author fit2cloud
* @since 2023-10-08
*/
@TableName("core_ds_finish_page")
public class CoreDsFinishPage implements Serializable {
private static final long serialVersionUID = 1L;
/**
* 主键
*/
@TableId(value = "id", type = IdType.AUTO)
private Long id;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
@Override
public String toString() {
return "CoreDsFinishPage{" +
"id = " + id +
"}";
}
}

View File

@ -0,0 +1,68 @@
package io.dataease.datasource.dao.auto.entity;
import com.baomidou.mybatisplus.annotation.TableName;
import java.io.Serializable;
/**
* <p>
*
* </p>
*
* @author fit2cloud
* @since 2023-09-28
*/
@TableName("QRTZ_SCHEDULER_STATE")
public class QrtzSchedulerState implements Serializable {
private static final long serialVersionUID = 1L;
private String schedName;
private String instanceName;
private Long lastCheckinTime;
private Long checkinInterval;
public String getSchedName() {
return schedName;
}
public void setSchedName(String schedName) {
this.schedName = schedName;
}
public String getInstanceName() {
return instanceName;
}
public void setInstanceName(String instanceName) {
this.instanceName = instanceName;
}
public Long getLastCheckinTime() {
return lastCheckinTime;
}
public void setLastCheckinTime(Long lastCheckinTime) {
this.lastCheckinTime = lastCheckinTime;
}
public Long getCheckinInterval() {
return checkinInterval;
}
public void setCheckinInterval(Long checkinInterval) {
this.checkinInterval = checkinInterval;
}
@Override
public String toString() {
return "QrtzSchedulerState{" +
"schedName = " + schedName +
", instanceName = " + instanceName +
", lastCheckinTime = " + lastCheckinTime +
", checkinInterval = " + checkinInterval +
"}";
}
}

View File

@ -0,0 +1,18 @@
package io.dataease.datasource.dao.auto.mapper;
import io.dataease.datasource.dao.auto.entity.CoreDatasource;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import org.apache.ibatis.annotations.Mapper;
/**
* <p>
* Mapper 接口
* </p>
*
* @author fit2cloud
* @since 2023-09-26
*/
@Mapper
public interface CoreDatasourceMapper extends BaseMapper<CoreDatasource> {
}

View File

@ -0,0 +1,18 @@
package io.dataease.datasource.dao.auto.mapper;
import io.dataease.datasource.dao.auto.entity.CoreDatasourceTaskLog;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import org.apache.ibatis.annotations.Mapper;
/**
* <p>
* Mapper 接口
* </p>
*
* @author fit2cloud
* @since 2023-09-28
*/
@Mapper
public interface CoreDatasourceTaskLogMapper extends BaseMapper<CoreDatasourceTaskLog> {
}

View File

@ -0,0 +1,18 @@
package io.dataease.datasource.dao.auto.mapper;
import io.dataease.datasource.dao.auto.entity.CoreDatasourceTask;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import org.apache.ibatis.annotations.Mapper;
/**
* <p>
* Mapper 接口
* </p>
*
* @author fit2cloud
* @since 2023-09-28
*/
@Mapper
public interface CoreDatasourceTaskMapper extends BaseMapper<CoreDatasourceTask> {
}

View File

@ -0,0 +1,18 @@
package io.dataease.datasource.dao.auto.mapper;
import io.dataease.datasource.dao.auto.entity.CoreDeEngine;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import org.apache.ibatis.annotations.Mapper;
/**
* <p>
* Mapper 接口
* </p>
*
* @author fit2cloud
* @since 2023-04-18
*/
@Mapper
public interface CoreDeEngineMapper extends BaseMapper<CoreDeEngine> {
}

View File

@ -0,0 +1,18 @@
package io.dataease.datasource.dao.auto.mapper;
import io.dataease.datasource.dao.auto.entity.CoreDriverJar;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import org.apache.ibatis.annotations.Mapper;
/**
* <p>
* 驱动详情 Mapper 接口
* </p>
*
* @author fit2cloud
* @since 2023-04-17
*/
@Mapper
public interface CoreDriverJarMapper extends BaseMapper<CoreDriverJar> {
}

View File

@ -0,0 +1,18 @@
package io.dataease.datasource.dao.auto.mapper;
import io.dataease.datasource.dao.auto.entity.CoreDriver;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import org.apache.ibatis.annotations.Mapper;
/**
* <p>
* 驱动 Mapper 接口
* </p>
*
* @author fit2cloud
* @since 2023-04-18
*/
@Mapper
public interface CoreDriverMapper extends BaseMapper<CoreDriver> {
}

View File

@ -0,0 +1,18 @@
package io.dataease.datasource.dao.auto.mapper;
import io.dataease.datasource.dao.auto.entity.CoreDsFinishPage;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import org.apache.ibatis.annotations.Mapper;
/**
* <p>
* Mapper 接口
* </p>
*
* @author fit2cloud
* @since 2023-10-08
*/
@Mapper
public interface CoreDsFinishPageMapper extends BaseMapper<CoreDsFinishPage> {
}

View File

@ -0,0 +1,18 @@
package io.dataease.datasource.dao.auto.mapper;
import io.dataease.datasource.dao.auto.entity.QrtzSchedulerState;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import org.apache.ibatis.annotations.Mapper;
/**
* <p>
* Mapper 接口
* </p>
*
* @author fit2cloud
* @since 2023-09-28
*/
@Mapper
public interface QrtzSchedulerStateMapper extends BaseMapper<QrtzSchedulerState> {
}

View File

@ -0,0 +1,20 @@
package io.dataease.datasource.dao.ext.mapper;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import io.dataease.datasource.dto.DatasourceNodePO;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;
import org.apache.ibatis.annotations.Select;
import java.util.List;
@Mapper
public interface CoreDatasourceExtMapper {
@Select("""
select id, name, type, pid from core_datasource
${ew.customSqlSegment}
""")
List<DatasourceNodePO> query(@Param("ew") QueryWrapper queryWrapper);
}

View File

@ -0,0 +1,22 @@
package io.dataease.datasource.dao.ext.mapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import io.dataease.datasource.dao.ext.po.Ctimestamp;
import io.dataease.datasource.dao.ext.po.DataSourceNodePO;
import io.dataease.dto.dataset.DatasetTableFieldDTO;
import org.apache.ibatis.annotations.*;
@Mapper
public interface DataSourceExtMapper extends BaseMapper<DataSourceNodePO> {
@Select("select unix_timestamp(current_timestamp()) as currentTimestamp")
@Results(
id = "selectTimestamp",
value = {
@Result(property = "currentTimestamp", column = "currentTimestamp")
}
)
Ctimestamp selectTimestamp();
}

View File

@ -0,0 +1,39 @@
package io.dataease.datasource.dao.ext.mapper;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import io.dataease.datasource.dto.CoreDatasourceTaskDTO;
import io.dataease.request.GridExample;
import org.apache.ibatis.annotations.*;
import java.util.List;
/**
* @Author gin
* @Date 2021/3/9 3:26 下午
*/
@Mapper
public interface ExtDatasourceTaskMapper {
@Select(
"""
SELECT core_datasource.name as datasource_name,core_datasource_task.* , QRTZ_TRIGGERS.NEXT_FIRE_TIME as NEXT_FIRE_TIME
FROM core_datasource_task
left join core_datasource on core_datasource.id=core_datasource_task.ds_id
left join QRTZ_TRIGGERS on core_datasource_task.id=QRTZ_TRIGGERS.TRIGGER_NAME
${ew.customSqlSegment}
"""
)
@Results(
id = "taskWithTriggers",
value = {
@Result(property = "id", column = "id"),
@Result(property = "datasourceName", column = "datasource_name"),
@Result(property = "dsId", column = "ds_id"),
@Result(property = "nextExecTime", column = "NEXT_FIRE_TIME")
}
)
List<CoreDatasourceTaskDTO> taskWithTriggers(@Param("ew") QueryWrapper queryWrapper);
}

View File

@ -0,0 +1 @@
package io.dataease.datasource.dao.ext.mapper; import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper; import com.baomidou.mybatisplus.core.mapper.BaseMapper; import com.baomidou.mybatisplus.core.metadata.IPage; import io.dataease.api.ds.vo.CoreDatasourceTaskLogDTO; import org.apache.ibatis.annotations.*; @Mapper public interface TaskLogExtMapper extends BaseMapper<CoreDatasourceTaskLogDTO> { @Select( """ select * from core_datasource_task_log ${ew.customSqlSegment} """ ) @Results( id = "taskLog", value = { @Result(property = "id", column = "id"), @Result(property = "ds_id", column = "dsId"), @Result(property = "task_id", column = "taskId"), @Result(property = "start_time", column = "startTime"), @Result(property = "end_time", column = "endTime"), @Result(property = "task_status", column = "taskStatus"), @Result(property = "trigger_type", column = "triggerType"), @Result(property = "table_name", column = "tableName"), @Result(property = "info", column = "info") } ) IPage<CoreDatasourceTaskLogDTO> pager(IPage<CoreDatasourceTaskLogDTO> page, @Param("ew") QueryWrapper queryWrapper); }

View File

@ -0,0 +1,8 @@
package io.dataease.datasource.dao.ext.po;
import lombok.Data;
@Data
public class Ctimestamp {
private Long currentTimestamp;
}

View File

@ -0,0 +1,25 @@
package io.dataease.datasource.dao.ext.po;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import lombok.Data;
import java.io.Serializable;
@Data
@TableName("core_datasource")
public class DataSourceNodePO implements Serializable {
@TableId
private Long id;
private Long pid;
private String name;
private String type;
private String status;
private Long createTime;
}

View File

@ -0,0 +1,19 @@
package io.dataease.datasource.dto;
import io.dataease.datasource.dao.auto.entity.CoreDatasourceTask;
import lombok.Getter;
import lombok.Setter;
/**
* @Author gin
* @Date 2021/3/9 3:19 下午
*/
@Getter
@Setter
public class CoreDatasourceTaskDTO extends CoreDatasourceTask {
private String datasourceName;
private Long nextExecTime;
private String taskStatus;
private String msg;
private String privileges;
}

View File

@ -0,0 +1,29 @@
package io.dataease.datasource.dto;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import com.fasterxml.jackson.databind.ser.std.ToStringSerializer;
import io.dataease.model.TreeBaseModel;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.io.Serial;
@Data
@AllArgsConstructor
@NoArgsConstructor
public class DatasourceNodeBO implements TreeBaseModel<DatasourceNodeBO> {
@Serial
private static final long serialVersionUID = 728340676442387790L;
@JsonSerialize(using = ToStringSerializer.class)
private Long id;
private String name;
private Boolean leaf;
private Integer weight = 3;
private Long pid;
private Integer extraFlag;
private String type;
}

View File

@ -0,0 +1,22 @@
package io.dataease.datasource.dto;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.io.Serial;
import java.io.Serializable;
@Data
@AllArgsConstructor
@NoArgsConstructor
public class DatasourceNodePO implements Serializable {
@Serial
private static final long serialVersionUID = -4457506330575500164L;
private Long id;
private String name;
private String type;
private Long pid;
}

View File

@ -0,0 +1,104 @@
package io.dataease.datasource.manage;
import cn.hutool.core.collection.CollectionUtil;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.core.conditions.update.UpdateWrapper;
import io.dataease.api.ds.vo.DatasourceDTO;
import io.dataease.commons.constants.OptConstants;
import io.dataease.constant.DataSourceType;
import io.dataease.datasource.dao.auto.entity.CoreDatasource;
import io.dataease.datasource.dao.auto.mapper.CoreDatasourceMapper;
import io.dataease.datasource.dao.ext.mapper.DataSourceExtMapper;
import io.dataease.datasource.dao.ext.po.DataSourceNodePO;
import io.dataease.datasource.dto.DatasourceNodeBO;
import io.dataease.exception.DEException;
import io.dataease.license.config.XpackInteract;
import io.dataease.model.BusiNodeRequest;
import io.dataease.model.BusiNodeVO;
import io.dataease.operation.manage.CoreOptRecentManage;
import io.dataease.utils.AuthUtils;
import io.dataease.utils.TreeUtils;
import jakarta.annotation.Resource;
import org.apache.commons.lang3.ObjectUtils;
import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Component;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
@Component
public class DataSourceManage {
@Resource
private DataSourceExtMapper dataSourceExtMapper;
@Resource
private CoreDatasourceMapper coreDatasourceMapper;
@Resource
private CoreOptRecentManage coreOptRecentManage;
private DatasourceNodeBO rootNode() {
return new DatasourceNodeBO(0L, "root", false, 7, -1L, 0, "mysql");
}
private DatasourceNodeBO convert(DataSourceNodePO po) {
DataSourceType dataSourceType = DataSourceType.valueOf(po.getType());
if (ObjectUtils.isEmpty(dataSourceType)) {
dataSourceType = DataSourceType.mysql;
}
Integer flag = dataSourceType.getFlag();
int extraFlag = StringUtils.equalsIgnoreCase("error", po.getStatus()) ? Math.negateExact(flag) : flag;
return new DatasourceNodeBO(po.getId(), po.getName(), !StringUtils.equals(po.getType(), "folder"), 7, po.getPid(), extraFlag, dataSourceType.name());
}
@XpackInteract(value = "datasourceResourceTree", replace = true)
public List<BusiNodeVO> tree(BusiNodeRequest request) {
QueryWrapper<DataSourceNodePO> queryWrapper = new QueryWrapper<>();
if (ObjectUtils.isNotEmpty(request.getLeaf()) && !request.getLeaf()) {
queryWrapper.eq("type", "folder");
}
queryWrapper.orderByDesc("create_time");
List<DatasourceNodeBO> nodes = new ArrayList<>();
List<DataSourceNodePO> pos = dataSourceExtMapper.selectList(queryWrapper);
if (ObjectUtils.isEmpty(request.getLeaf()) || !request.getLeaf()) nodes.add(rootNode());
if (CollectionUtil.isNotEmpty(pos)) {
nodes.addAll(pos.stream().map(this::convert).toList());
}
return TreeUtils.mergeTree(nodes, BusiNodeVO.class, false);
}
@XpackInteract(value = "datasourceResourceTree", before = false)
public void innerSave(CoreDatasource coreDatasource) {
coreDatasourceMapper.insert(coreDatasource);
coreOptRecentManage.saveOpt(coreDatasource.getId(), OptConstants.OPT_RESOURCE_TYPE.DATASOURCE,OptConstants.OPT_TYPE.NEW);
}
@XpackInteract(value = "datasourceResourceTree", before = false)
public void innerEdit(CoreDatasource coreDatasource) {
UpdateWrapper<CoreDatasource> updateWrapper = new UpdateWrapper<>();
updateWrapper.eq("id", coreDatasource.getId());
coreDatasource.setUpdateTime(System.currentTimeMillis());
coreDatasource.setUpdateBy(AuthUtils.getUser().getUserId());
coreDatasourceMapper.update(coreDatasource, updateWrapper);
coreOptRecentManage.saveOpt(coreDatasource.getId(), OptConstants.OPT_RESOURCE_TYPE.DATASOURCE,OptConstants.OPT_TYPE.UPDATE);
}
@XpackInteract(value = "datasourceResourceTree", before = false)
public void move(DatasourceDTO dataSourceDTO) {
Long id = dataSourceDTO.getId();
CoreDatasource sourceData = null;
if (ObjectUtils.isEmpty(id) || ObjectUtils.isEmpty(sourceData = coreDatasourceMapper.selectById(id))) {
DEException.throwException("resource not exist");
}
sourceData.setUpdateTime(System.currentTimeMillis());
sourceData.setUpdateBy(AuthUtils.getUser().getUserId());
sourceData.setPid(dataSourceDTO.getPid());
sourceData.setName(dataSourceDTO.getName());
coreDatasourceMapper.updateById(sourceData);
coreOptRecentManage.saveOpt(sourceData.getId(), OptConstants.OPT_RESOURCE_TYPE.DATASOURCE,OptConstants.OPT_TYPE.UPDATE);
}
}

View File

@ -0,0 +1,353 @@
package io.dataease.datasource.manage;
import com.baomidou.mybatisplus.core.conditions.update.UpdateWrapper;
import io.dataease.api.dataset.dto.DatasetTableDTO;
import io.dataease.api.ds.vo.TableField;
import io.dataease.commons.constants.TaskStatus;
import io.dataease.dataset.utils.TableUtils;
import io.dataease.datasource.dao.auto.entity.CoreDatasource;
import io.dataease.datasource.dao.auto.entity.CoreDatasourceTask;
import io.dataease.datasource.dao.auto.entity.CoreDatasourceTaskLog;
import io.dataease.datasource.dao.auto.entity.CoreDeEngine;
import io.dataease.datasource.dao.auto.mapper.CoreDatasourceMapper;
import io.dataease.datasource.provider.ApiUtils;
import io.dataease.datasource.provider.EngineProvider;
import io.dataease.datasource.provider.ExcelUtils;
import io.dataease.datasource.provider.ProviderUtil;
import io.dataease.datasource.request.DatasourceRequest;
import io.dataease.datasource.request.EngineRequest;
import io.dataease.datasource.server.DatasourceServer;
import io.dataease.datasource.server.DatasourceTaskServer;
import io.dataease.datasource.server.EngineServer;
import io.dataease.exception.DEException;
import io.dataease.job.sechedule.ExtractDataJob;
import io.dataease.job.sechedule.ScheduleManager;
import io.dataease.utils.JsonUtil;
import io.dataease.utils.LogUtil;
import jakarta.annotation.Resource;
import org.apache.commons.lang3.StringUtils;
import org.quartz.JobExecutionContext;
import org.quartz.JobKey;
import org.quartz.TriggerKey;
import org.springframework.stereotype.Component;
import java.util.Date;
import java.util.List;
import java.util.Map;
import static io.dataease.datasource.server.DatasourceTaskServer.ScheduleType.CRON;
import static io.dataease.datasource.server.DatasourceTaskServer.ScheduleType.MANUAL;
@Component
public class DatasourceSyncManage {
@Resource
private CoreDatasourceMapper datasourceMapper;
@Resource
private EngineServer engineServer;
@Resource
private DatasourceTaskServer datasourceTaskServer;
@Resource
private ScheduleManager scheduleManager;
public void extractExcelData(CoreDatasource coreDatasource, String type) {
if (coreDatasource == null) {
LogUtil.error("Can not find CoreDatasource: " + coreDatasource.getName());
return;
}
DatasourceServer.UpdateType updateType = DatasourceServer.UpdateType.valueOf(type);
DatasourceRequest datasourceRequest = new DatasourceRequest();
datasourceRequest.setDatasource(coreDatasource);
List<DatasetTableDTO> tables = ExcelUtils.getTables(datasourceRequest);
for (DatasetTableDTO tableDTO : tables) {
CoreDatasourceTaskLog datasetTableTaskLog = datasourceTaskServer.initTaskLog(coreDatasource.getId(), null, tableDTO.getTableName(), CRON.toString());
datasourceRequest.setTable(tableDTO.getTableName());
List<TableField> tableFields = ExcelUtils.getTableFields(datasourceRequest);
try {
datasetTableTaskLog.setInfo(datasetTableTaskLog.getInfo() + "/n Begin to sync datatable: " + datasourceRequest.getTable());
createEngineTable(datasourceRequest.getTable(), tableFields);
if (updateType.equals(DatasourceServer.UpdateType.all_scope)) {
createEngineTable(TableUtils.tmpName(datasourceRequest.getTable()), tableFields);
}
extractExcelData(datasourceRequest, updateType);
if (updateType.equals(DatasourceServer.UpdateType.all_scope)) {
replaceTable(datasourceRequest.getTable());
}
datasetTableTaskLog.setInfo(datasetTableTaskLog.getInfo() + "/n End to sync datatable: " + datasourceRequest.getTable());
datasetTableTaskLog.setTaskStatus(TaskStatus.Completed.toString());
} catch (Exception e) {
try {
if (updateType.equals(DatasourceServer.UpdateType.all_scope)) {
dropEngineTable(TableUtils.tmpName(datasourceRequest.getTable()));
}
}catch (Exception ignore){}
datasetTableTaskLog.setTaskStatus(TaskStatus.Error.toString());
datasetTableTaskLog.setInfo(datasetTableTaskLog.getInfo() + "/n Failed to sync datatable: " + datasourceRequest.getTable() + ", " + e.getMessage());
} finally {
datasourceTaskServer.saveLog(datasetTableTaskLog);
}
}
}
public void extractData(Long datasourceId, Long taskId, JobExecutionContext context) {
CoreDatasource coreDatasource = datasourceMapper.selectById(datasourceId);
if (coreDatasource == null) {
LogUtil.error("Can not find datasource: " + datasourceId);
return;
}
CoreDatasourceTask coreDatasourceTask = datasourceTaskServer.selectById(taskId);
if (coreDatasourceTask == null) {
return;
}
datasourceTaskServer.checkTaskIsStopped(coreDatasourceTask);
if (StringUtils.isNotEmpty(coreDatasourceTask.getTaskStatus()) && (coreDatasourceTask.getTaskStatus().equalsIgnoreCase(TaskStatus.Stopped.name()) || coreDatasourceTask.getTaskStatus().equalsIgnoreCase(TaskStatus.Suspend.name()))) {
LogUtil.info("Skip synchronization task: {} ,due to task status is {}", coreDatasourceTask.getId(), coreDatasourceTask.getTaskStatus());
return;
}
if (datasourceTaskServer.existUnderExecutionTask(datasourceId, coreDatasourceTask.getId())) {
LogUtil.info("Skip synchronization task for datasource due to exist others, datasource ID : " + datasourceId);
return;
}
DatasourceServer.UpdateType updateType = DatasourceServer.UpdateType.valueOf(coreDatasourceTask.getUpdateType());
if (context != null) {
UpdateWrapper<CoreDatasource> updateWrapper = new UpdateWrapper<>();
updateWrapper.eq("id", datasourceId);
CoreDatasource record = new CoreDatasource();
record.setQrtzInstance(context.getFireInstanceId());
datasourceMapper.update(record, updateWrapper);
}
extractedData(taskId, coreDatasource, updateType, coreDatasourceTask.getSyncRate());
try {
datasourceTaskServer.updateTaskStatus(coreDatasourceTask);
updateDsTaskStatus(datasourceId);
} catch (Exception ignore) {
LogUtil.error(ignore);
}
}
public void extractedData(Long taskId, CoreDatasource coreDatasource, DatasourceServer.UpdateType updateType, String scheduleType ) {
DatasourceRequest datasourceRequest = new DatasourceRequest();
datasourceRequest.setDatasource(coreDatasource);
List<DatasetTableDTO> tables = ApiUtils.getTables(datasourceRequest);
for (DatasetTableDTO api : tables) {
CoreDatasourceTaskLog datasetTableTaskLog = datasourceTaskServer.initTaskLog(coreDatasource.getId(), taskId, api.getName(), scheduleType);
datasourceRequest.setTable(api.getTableName());
List<TableField> tableFields = ApiUtils.getTableFields(datasourceRequest);
try {
datasetTableTaskLog.setInfo(datasetTableTaskLog.getInfo() + "/n Begin to sync datatable: " + datasourceRequest.getTable());
createEngineTable(datasourceRequest.getTable(), tableFields);
if (updateType.equals(DatasourceServer.UpdateType.all_scope)) {
createEngineTable(TableUtils.tmpName(datasourceRequest.getTable()), tableFields);
}
extractApiData(datasourceRequest, updateType);
if (updateType.equals(DatasourceServer.UpdateType.all_scope)) {
replaceTable(datasourceRequest.getTable());
}
datasetTableTaskLog.setInfo(datasetTableTaskLog.getInfo() + "/n End to sync datatable: " + datasourceRequest.getTable());
datasetTableTaskLog.setTaskStatus(TaskStatus.Completed.toString());
datasetTableTaskLog.setEndTime(System.currentTimeMillis());
} catch (Exception e) {
try {
if (updateType.equals(DatasourceServer.UpdateType.all_scope)) {
dropEngineTable(TableUtils.tmpName(datasourceRequest.getTable()));
}
} catch (Exception ignore) {
}
datasetTableTaskLog.setInfo(datasetTableTaskLog.getInfo() + "/n Failed to sync datatable: " + datasourceRequest.getTable() + ", " + e.getMessage());
datasetTableTaskLog.setTaskStatus(TaskStatus.Error.toString());
datasetTableTaskLog.setEndTime(System.currentTimeMillis());
} finally {
datasourceTaskServer.saveLog(datasetTableTaskLog);
}
}
}
private void updateDsTaskStatus(Long datasourceId){
UpdateWrapper<CoreDatasource> updateWrapper = new UpdateWrapper<>();
updateWrapper.eq("id", datasourceId);
CoreDatasource record = new CoreDatasource();
record.setTaskStatus(TaskStatus.WaitingForExecution.name());
datasourceMapper.update(record, updateWrapper);
}
public void extractDataForTable(Long datasourceId, String name, String tableName, String type) {
DatasourceServer.UpdateType updateType = DatasourceServer.UpdateType.valueOf(type);
CoreDatasource coreDatasource = datasourceMapper.selectById(datasourceId);
if (coreDatasource == null) {
LogUtil.error("Can not find datasource: " + datasourceId);
return;
}
CoreDatasourceTaskLog datasetTableTaskLog = datasourceTaskServer.initTaskLog(datasourceId, null, name, MANUAL.toString());
DatasourceRequest datasourceRequest = new DatasourceRequest();
datasourceRequest.setDatasource(coreDatasource);
List<DatasetTableDTO> tables = ApiUtils.getTables(datasourceRequest);
for (DatasetTableDTO api : tables) {
if(api.getTableName().equalsIgnoreCase(tableName)){
datasourceRequest.setTable(api.getTableName());
List<TableField> tableFields = ApiUtils.getTableFields(datasourceRequest);
try {
datasetTableTaskLog.setInfo(datasetTableTaskLog.getInfo() + "/n Begin to sync datatable: " + datasourceRequest.getTable());
createEngineTable(datasourceRequest.getTable(), tableFields);
if (updateType.equals(DatasourceServer.UpdateType.all_scope)) {
createEngineTable(TableUtils.tmpName(datasourceRequest.getTable()), tableFields);
}
extractApiData(datasourceRequest, updateType);
if (updateType.equals(DatasourceServer.UpdateType.all_scope)) {
replaceTable(datasourceRequest.getTable());
}
datasetTableTaskLog.setInfo(datasetTableTaskLog.getInfo() + "/n End to sync datatable: " + datasourceRequest.getTable());
datasetTableTaskLog.setTaskStatus(TaskStatus.Completed.name());
datasetTableTaskLog.setEndTime(System.currentTimeMillis());
} catch (Exception e) {
try {
if (updateType.equals(DatasourceServer.UpdateType.all_scope)) {
dropEngineTable(TableUtils.tmpName(datasourceRequest.getTable()));
}
}catch (Exception ignore){}
datasetTableTaskLog.setInfo(datasetTableTaskLog.getInfo() + "/n Failed to sync datatable: " + datasourceRequest.getTable() + ", " + e.getMessage());
datasetTableTaskLog.setTaskStatus(TaskStatus.Error.name());
datasetTableTaskLog.setEndTime(System.currentTimeMillis());
}finally {
datasourceTaskServer.saveLog(datasetTableTaskLog);
}
}
}
}
private void extractApiData(DatasourceRequest datasourceRequest, DatasourceServer.UpdateType extractType) throws Exception {
Map<String, Object> result = ApiUtils.fetchResultField(datasourceRequest);
List<String[]> dataList = (List<String[]>) result.get("dataList");
String engineTableName;
switch (extractType) {
case all_scope:
engineTableName = TableUtils.tmpName(TableUtils.tableName(datasourceRequest.getTable()));
break;
default:
engineTableName = TableUtils.tableName(datasourceRequest.getTable());
break;
}
CoreDeEngine engine = engineServer.info();
EngineRequest engineRequest = new EngineRequest();
engineRequest.setEngine(engine);
EngineProvider engineProvider = ProviderUtil.getEngineProvider(engine.getType());
int pageNumber = 1000; //一次插入 1000条
int totalPage;
if (dataList.size() % pageNumber > 0) {
totalPage = dataList.size() / pageNumber + 1;
} else {
totalPage = dataList.size() / pageNumber;
}
for (int page = 1; page <= totalPage; page++) {
engineRequest.setQuery(engineProvider.insertSql(engineTableName, dataList, page, pageNumber));
engineProvider.exec(engineRequest);
}
}
private void extractExcelData(DatasourceRequest datasourceRequest, DatasourceServer.UpdateType extractType) throws Exception {
ExcelUtils excelUtils = new ExcelUtils();
List<String[]> dataList = excelUtils.fetchDataList(datasourceRequest);
String engineTableName;
switch (extractType) {
case all_scope:
engineTableName = TableUtils.tmpName(TableUtils.tableName(datasourceRequest.getTable()));
break;
default:
engineTableName = TableUtils.tableName(datasourceRequest.getTable());
break;
}
CoreDeEngine engine = engineServer.info();
EngineRequest engineRequest = new EngineRequest();
engineRequest.setEngine(engine);
EngineProvider engineProvider = ProviderUtil.getEngineProvider(engine.getType());
int pageNumber = 1000; //一次插入 1000条
int totalPage;
if (dataList.size() % pageNumber > 0) {
totalPage = dataList.size() / pageNumber + 1;
} else {
totalPage = dataList.size() / pageNumber;
}
for (int page = 1; page <= totalPage; page++) {
engineRequest.setQuery(engineProvider.insertSql(engineTableName, dataList, page, pageNumber));
engineProvider.exec(engineRequest);
}
}
private void replaceTable(String tableName) throws Exception {
CoreDeEngine engine = engineServer.info();
EngineRequest engineRequest = new EngineRequest();
engineRequest.setEngine(engine);
EngineProvider engineProvider = ProviderUtil.getEngineProvider(engine.getType());
String[] replaceTableSql = engineProvider.replaceTable(tableName).split(";");
for (int i = 0; i < replaceTableSql.length; i++) {
if (StringUtils.isNotEmpty(replaceTableSql[i])) {
engineRequest.setQuery(replaceTableSql[i]);
engineProvider.exec(engineRequest);
}
}
}
public void createEngineTable(String tableName, List<TableField> tableFields) throws Exception {
CoreDeEngine engine = engineServer.info();
EngineRequest engineRequest = new EngineRequest();
engineRequest.setEngine(engine);
EngineProvider engineProvider = ProviderUtil.getEngineProvider(engine.getType());
engineRequest.setQuery(engineProvider.createTableSql(tableName, tableFields, engine));
engineProvider.exec(engineRequest);
}
public void dropEngineTable(String tableName) throws Exception{
CoreDeEngine engine = engineServer.info();
EngineRequest engineRequest = new EngineRequest();
engineRequest.setEngine(engine);
EngineProvider engineProvider = ProviderUtil.getEngineProvider(engine.getType());
engineRequest.setQuery(engineProvider.dropTable(tableName));
engineProvider.exec(engineRequest);
}
public void addSchedule(CoreDatasourceTask datasourceTask) throws DEException {
if (StringUtils.equalsIgnoreCase(datasourceTask.getSyncRate(), DatasourceTaskServer.ScheduleType.RIGHTNOW.toString())) {
scheduleManager.addOrUpdateSingleJob(new JobKey(datasourceTask.getId().toString(), datasourceTask.getDsId().toString()),
new TriggerKey(datasourceTask.getId().toString(), datasourceTask.getDsId().toString()),
ExtractDataJob.class,
new Date(datasourceTask.getStartTime()),
scheduleManager.getDefaultJobDataMap(datasourceTask.getDsId().toString(), datasourceTask.getCron(), datasourceTask.getId().toString(), datasourceTask.getUpdateType()));
} else {
Date endTime;
if (StringUtils.equalsIgnoreCase(datasourceTask.getEndLimit().toString(), "1")) {
if (datasourceTask.getEndTime() == null || datasourceTask.getEndTime() == 0) {
endTime = null;
} else {
endTime = new Date(datasourceTask.getEndTime());
if (endTime.before(new Date())) {
deleteSchedule(datasourceTask);
return;
}
}
} else {
endTime = null;
}
scheduleManager.addOrUpdateCronJob(new JobKey(datasourceTask.getId().toString(), datasourceTask.getDsId().toString()),
new TriggerKey(datasourceTask.getId().toString(), datasourceTask.getDsId().toString()),
ExtractDataJob.class,
datasourceTask.getCron(), new Date(datasourceTask.getStartTime()), endTime,
scheduleManager.getDefaultJobDataMap(datasourceTask.getDsId().toString(), datasourceTask.getCron(), datasourceTask.getId().toString(), datasourceTask.getUpdateType()));
}
}
public void deleteSchedule(CoreDatasourceTask datasourceTask) {
scheduleManager.removeJob(new JobKey(datasourceTask.getId().toString(), datasourceTask.getDsId().toString()), new TriggerKey(datasourceTask.getId().toString(), datasourceTask.getDsId().toString()));
}
public void fireNow(CoreDatasourceTask datasourceTask) throws Exception {
scheduleManager.fireNow(datasourceTask.getId().toString(), datasourceTask.getDsId().toString());
}
}

View File

@ -0,0 +1,555 @@
package io.dataease.datasource.provider;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.jayway.jsonpath.JsonPath;
import io.dataease.api.dataset.dto.DatasetTableDTO;
import io.dataease.api.ds.vo.ApiDefinition;
import io.dataease.api.ds.vo.ApiDefinitionRequest;
import io.dataease.api.ds.vo.TableField;
import io.dataease.datasource.request.DatasourceRequest;
import io.dataease.exception.DEException;
import io.dataease.utils.CommonBeanFactory;
import io.dataease.utils.HttpClientConfig;
import io.dataease.utils.HttpClientUtil;
import io.dataease.utils.JsonUtil;
import net.minidev.json.JSONArray;
import org.apache.commons.lang3.StringUtils;
import org.springframework.util.CollectionUtils;
import org.springframework.util.ObjectUtils;
import java.net.URLEncoder;
import java.util.*;
import java.util.stream.Collectors;
public class ApiUtils {
private static String path = "['%s']";
public static ObjectMapper objectMapper = CommonBeanFactory.getBean(ObjectMapper.class);
private static TypeReference<List<Object>> listTypeReference = new TypeReference<List<Object>>() {
};
private static TypeReference<List<Map<String, Object>>> listForMapTypeReference = new TypeReference<List<Map<String, Object>>>() {
};
public static List<DatasetTableDTO> getTables(DatasourceRequest datasourceRequest) throws DEException {
List<DatasetTableDTO> tableDescs = new ArrayList<>();
TypeReference<List<ApiDefinition>> listTypeReference = new TypeReference<List<ApiDefinition>>() {
};
List<ApiDefinition> apiDefinitionList = JsonUtil.parseList(datasourceRequest.getDatasource().getConfiguration(), listTypeReference);
for (ApiDefinition apiDefinition : apiDefinitionList) {
DatasetTableDTO datasetTableDTO = new DatasetTableDTO();
datasetTableDTO.setTableName(apiDefinition.getDeTableName());
datasetTableDTO.setName(apiDefinition.getName());
datasetTableDTO.setDatasourceId(datasourceRequest.getDatasource().getId());
tableDescs.add(datasetTableDTO);
}
return tableDescs;
}
public static Map<String, Object> fetchResultField(DatasourceRequest datasourceRequest) throws DEException {
Map<String, Object> result = new HashMap<>();
List<String[]> dataList = new ArrayList<>();
List<TableField> fieldList = new ArrayList<>();
ApiDefinition apiDefinition = checkApiDefinition(datasourceRequest);
if (apiDefinition == null) {
DEException.throwException("未找到");
}
String response = execHttpRequest(apiDefinition, 10);
fieldList = getTableFields(apiDefinition);
result.put("fieldList", fieldList);
dataList = fetchResult(response, apiDefinition);
result.put("dataList", dataList);
return result;
}
private static List<TableField> getTableFields(ApiDefinition apiDefinition) throws DEException {
return apiDefinition.getFields();
}
public static List<TableField> getTableFields(DatasourceRequest datasourceRequest) throws DEException {
TypeReference<List<ApiDefinition>> listTypeReference = new TypeReference<List<ApiDefinition>>() {
};
List<TableField> tableFields = new ArrayList<>();
try {
List<ApiDefinition> lists = objectMapper.readValue(datasourceRequest.getDatasource().getConfiguration(), listTypeReference);
for (ApiDefinition apiDefinition : lists) {
if (datasourceRequest.getTable().equalsIgnoreCase(apiDefinition.getDeTableName())) {
tableFields = getTableFields(apiDefinition);
}
}
} catch (Exception e) {
}
return tableFields;
}
public static String checkStatus(DatasourceRequest datasourceRequest) throws Exception {
TypeReference<List<ApiDefinition>> listTypeReference = new TypeReference<List<ApiDefinition>>() {
};
List<ApiDefinition> apiDefinitionList = JsonUtil.parseList(datasourceRequest.getDatasource().getConfiguration(), listTypeReference);
List<ObjectNode> status = new ArrayList();
for (ApiDefinition apiDefinition : apiDefinitionList) {
datasourceRequest.setTable(apiDefinition.getName());
ObjectNode apiItemStatuses = objectMapper.createObjectNode();
try {
getData(datasourceRequest);
apiItemStatuses.put("name", apiDefinition.getName());
apiItemStatuses.put("status", "Success");
} catch (Exception ignore) {
apiItemStatuses.put("name", apiDefinition.getName());
apiItemStatuses.put("status", "Error");
}
status.add(apiItemStatuses);
}
return JsonUtil.toJSONString(status).toString();
}
private static List<String[]> getData(DatasourceRequest datasourceRequest) throws Exception {
ApiDefinition apiDefinition = checkApiDefinition(datasourceRequest);
if (apiDefinition == null) {
DEException.throwException("未找到");
}
String response = execHttpRequest(apiDefinition, 10);
return fetchResult(response, apiDefinition);
}
public static String execHttpRequest(ApiDefinition apiDefinition, int socketTimeout) {
String response = "";
HttpClientConfig httpClientConfig = new HttpClientConfig();
httpClientConfig.setSocketTimeout(socketTimeout * 1000);
ApiDefinitionRequest apiDefinitionRequest = apiDefinition.getRequest();
for (Map header : apiDefinitionRequest.getHeaders()) {
if (header.get("name") != null && StringUtils.isNotEmpty(header.get("name").toString()) && header.get("value") != null && StringUtils.isNotEmpty(header.get("value").toString())) {
httpClientConfig.addHeader(header.get("name").toString(), header.get("value").toString());
}
}
if (apiDefinitionRequest.getAuthManager() != null
&& StringUtils.isNotBlank(apiDefinitionRequest.getAuthManager().getUsername())
&& StringUtils.isNotBlank(apiDefinitionRequest.getAuthManager().getPassword())
&& apiDefinitionRequest.getAuthManager().getVerification().equals("Basic Auth")) {
String authValue = "Basic " + Base64.getUrlEncoder().encodeToString((apiDefinitionRequest.getAuthManager().getUsername()
+ ":" + apiDefinitionRequest.getAuthManager().getPassword()).getBytes());
httpClientConfig.addHeader("Authorization", authValue);
}
List<String> params = new ArrayList<>();
for (Map<String, String> argument : apiDefinition.getRequest().getArguments()) {
if (StringUtils.isNotEmpty(argument.get("name")) && StringUtils.isNotEmpty(argument.get("value"))) {
params.add(argument.get("name") + "=" + URLEncoder.encode(argument.get("value")));
}
}
if (org.apache.commons.collections4.CollectionUtils.isNotEmpty(params)) {
apiDefinition.setUrl(apiDefinition.getUrl() + "?" + StringUtils.join(params, "&"));
}
switch (apiDefinition.getMethod()) {
case "GET":
response = HttpClientUtil.get(apiDefinition.getUrl().trim(), httpClientConfig);
break;
case "POST":
if (!apiDefinitionRequest.getBody().keySet().contains("type")) {
DEException.throwException("请求类型不能为空");
}
String type = apiDefinitionRequest.getBody().get("type").toString();
if (StringUtils.equalsAny(type, "JSON", "XML", "Raw")) {
String raw = null;
if (apiDefinitionRequest.getBody().get("raw") != null) {
raw = apiDefinitionRequest.getBody().get("raw").toString();
response = HttpClientUtil.post(apiDefinition.getUrl(), raw, httpClientConfig);
}
}
if (StringUtils.equalsAny(type, "Form_Data", "WWW_FORM")) {
if (apiDefinitionRequest.getBody().get("kvs") != null) {
Map<String, String> body = new HashMap<>();
TypeReference<List<JsonNode>> listTypeReference = new TypeReference<List<JsonNode>>() {
};
List<JsonNode> rootNode = null;
try {
rootNode = objectMapper.readValue(JsonUtil.toJSONString(apiDefinition.getRequest().getBody().get("kvs")).toString(), listTypeReference);
} catch (Exception e) {
e.printStackTrace();
DEException.throwException(e);
}
for (JsonNode jsonNode : rootNode) {
if (jsonNode.has("name")) {
body.put(jsonNode.get("name").asText(), jsonNode.get("value").asText());
}
}
response = HttpClientUtil.post(apiDefinition.getUrl(), body, httpClientConfig);
}
}
break;
default:
break;
}
return response;
}
public static ApiDefinition checkApiDefinition(ApiDefinition apiDefinition, String response) throws DEException {
if (StringUtils.isEmpty(response)) {
DEException.throwException("该请求返回数据为空");
}
List<Map<String, Object>> fields = new ArrayList<>();
if (apiDefinition.isShowApiStructure() || !apiDefinition.isUseJsonPath()) {
String rootPath;
if (response.startsWith("[")) {
rootPath = "$[*]";
JsonNode jsonArray = null;
try {
jsonArray = objectMapper.readTree(response);
} catch (Exception e) {
DEException.throwException(e);
}
for (Object o : jsonArray) {
handleStr(apiDefinition, o.toString(), fields, rootPath);
}
} else {
rootPath = "$";
handleStr(apiDefinition, response, fields, rootPath);
}
apiDefinition.setJsonFields(fields);
return apiDefinition;
} else {
List<LinkedHashMap> currentData = new ArrayList<>();
try {
Object object = JsonPath.read(response, apiDefinition.getJsonPath());
if (object instanceof List) {
currentData = (List<LinkedHashMap>) object;
} else {
currentData.add((LinkedHashMap) object);
}
} catch (Exception e) {
DEException.throwException(e);
}
int i = 0;
try {
LinkedHashMap data = currentData.get(0);
}catch (Exception e){
DEException.throwException("数据不符合规范, " + e.getMessage());
}
for (LinkedHashMap data : currentData) {
if (i >= apiDefinition.getPreviewNum()) {
break;
}
if (i == 0) {
for (Object o : data.keySet()) {
Map<String, Object> field = new HashMap<>();
field.put("originName", o.toString());
field.put("name", o.toString());
field.put("type", "STRING");
field.put("checked", true);
field.put("size", 65535);
field.put("deExtractType", 0);
field.put("deType", 0);
field.put("extField", 0);
fields.add(field);
}
}
for (Map<String, Object> field : fields) {
JSONArray array = new JSONArray();
if (field.get("value") != null) {
try {
TypeReference<JSONArray> listTypeReference = new TypeReference<JSONArray>() {
};
array = objectMapper.readValue(field.get("value").toString(), listTypeReference);
} catch (Exception e) {
e.printStackTrace();
DEException.throwException(e);
}
array.add(Optional.ofNullable(data.get(field.get("originName"))).orElse("").toString().replaceAll("\n", " ").replaceAll("\r", " "));
} else {
array.add(Optional.ofNullable(data.get(field.get("originName"))).orElse("").toString().replaceAll("\n", " ").replaceAll("\r", " "));
}
field.put("value", array);
}
i++;
}
apiDefinition.setJsonFields(fields);
return apiDefinition;
}
}
private static void handleStr(ApiDefinition apiDefinition, String jsonStr, List<Map<String, Object>> fields, String rootPath) throws DEException {
if (jsonStr.startsWith("[")) {
TypeReference<List<Object>> listTypeReference = new TypeReference<List<Object>>() {
};
List<Object> jsonArray = null;
try {
jsonArray = objectMapper.readValue(jsonStr, listTypeReference);
} catch (Exception e) {
DEException.throwException(e);
}
for (Object o : jsonArray) {
handleStr(apiDefinition, o.toString(), fields, rootPath);
}
} else {
JsonNode jsonNode = null;
try {
jsonNode = objectMapper.readTree(jsonStr);
} catch (Exception e) {
DEException.throwException(e);
}
Iterator<String> fieldNames = jsonNode.fieldNames();
while (fieldNames.hasNext()) {
String fieldName = fieldNames.next();
String value = jsonNode.get(fieldName).toString();
if (StringUtils.isNotEmpty(value) && !value.startsWith("[") && !value.startsWith("{")) {
value = jsonNode.get(fieldName).asText();
}
if (StringUtils.isNotEmpty(value) && value.startsWith("[")) {
Map<String, Object> o = new HashMap<>();
try {
JsonNode jsonArray = objectMapper.readTree(value);
List<Map<String, Object>> childrenField = new ArrayList<>();
for (JsonNode node : jsonArray) {
if (StringUtils.isNotEmpty(node.toString()) && !node.toString().startsWith("[") && !node.toString().startsWith("{")) {
throw new Exception(node + "is not json type");
}
}
for (JsonNode node : jsonArray) {
handleStr(apiDefinition, node.toString(), childrenField, rootPath + "." + fieldName + "[*]");
}
o.put("children", childrenField);
o.put("childrenDataType", "LIST");
} catch (Exception e) {
JSONArray array = new JSONArray();
array.add(StringUtils.isNotEmpty(jsonNode.get(fieldName).toString()) ? jsonNode.get(fieldName).toString() : "");
o.put("value", array);
}
o.put("jsonPath", rootPath + "." + String.format(path, fieldName));
setProperty(apiDefinition, o, fieldName);
if (!hasItem(apiDefinition, fields, o)) {
fields.add(o);
}
} else if (StringUtils.isNotEmpty(value) && value.startsWith("{")) {
try {
JsonNode jsonNode1 = objectMapper.readTree(value);
List<Map<String, Object>> children = new ArrayList<>();
handleStr(apiDefinition, value, children, rootPath + "." + String.format(path, fieldName));
Map<String, Object> o = new HashMap<>();
o.put("children", children);
o.put("childrenDataType", "OBJECT");
o.put("jsonPath", rootPath + "." + fieldName);
setProperty(apiDefinition, o, fieldName);
if (!hasItem(apiDefinition, fields, o)) {
fields.add(o);
}
} catch (Exception e) {
Map<String, Object> o = new HashMap<>();
o.put("jsonPath", rootPath + "." + String.format(path, fieldName));
setProperty(apiDefinition, o, fieldName);
JSONArray array = new JSONArray();
array.add(StringUtils.isNotEmpty(value) ? value : "");
o.put("value", array);
if (!hasItem(apiDefinition, fields, o)) {
fields.add(o);
}
}
} else {
Map<String, Object> o = new HashMap<>();
o.put("jsonPath", rootPath + "." + String.format(path, fieldName));
setProperty(apiDefinition, o, fieldName);
JSONArray array = new JSONArray();
array.add(StringUtils.isNotEmpty(value) ? value : "");
o.put("value", array);
if (!hasItem(apiDefinition, fields, o)) {
fields.add(o);
}
}
}
}
}
private static void setProperty(ApiDefinition apiDefinition, Map<String, Object> o, String s) {
o.put("originName", s);
o.put("name", s);
o.put("type", "STRING");
o.put("size", 65535);
o.put("deExtractType", 0);
o.put("deType", 0);
o.put("checked", false);
if (!apiDefinition.isUseJsonPath()) {
for (TableField field : apiDefinition.getFields()) {
if (!ObjectUtils.isEmpty(o.get("jsonPath")) && StringUtils.isNotEmpty(field.getJsonPath()) && field.getJsonPath().equals(o.get("jsonPath").toString())) {
o.put("checked", true);
o.put("name", field.getName());
o.put("deExtractType", field.getDeExtractType());
}
}
}
}
private static boolean hasItem(ApiDefinition apiDefinition, List<Map<String, Object>> fields, Map<String, Object> item) throws DEException {
boolean has = false;
for (Map<String, Object> field : fields) {
if (field.get("jsonPath").equals(item.get("jsonPath"))) {
has = true;
mergeField(field, item);
mergeValue(field, apiDefinition, item);
break;
}
}
return has;
}
private static void mergeField(Map<String, Object> field, Map<String, Object> item) throws DEException {
if (item.get("children") != null) {
List<Map<String, Object>> fieldChildren = null;
List<Map<String, Object>> itemChildren = null;
try {
fieldChildren = objectMapper.readValue(JsonUtil.toJSONString(field.get("children")).toString(), listForMapTypeReference);
itemChildren = objectMapper.readValue(JsonUtil.toJSONString(item.get("children")).toString(), listForMapTypeReference);
} catch (Exception e) {
DEException.throwException(e);
}
if (fieldChildren == null) {
fieldChildren = new ArrayList<>();
}
for (Map<String, Object> itemChild : itemChildren) {
boolean hasKey = false;
for (Map<String, Object> fieldChild : fieldChildren) {
if (itemChild.get("jsonPath").toString().equals(fieldChild.get("jsonPath").toString())) {
mergeField(fieldChild, itemChild);
hasKey = true;
}
}
if (!hasKey) {
fieldChildren.add(itemChild);
}
}
}
}
private static void mergeValue(Map<String, Object> field, ApiDefinition apiDefinition, Map<String, Object> item) throws DEException {
TypeReference<JSONArray> listTypeReference = new TypeReference<JSONArray>() {
};
try {
if (!ObjectUtils.isEmpty(field.get("value")) && !ObjectUtils.isEmpty(item.get("value"))) {
JSONArray array = objectMapper.readValue(JsonUtil.toJSONString(field.get("value")).toString(), listTypeReference);
array.add(objectMapper.readValue(JsonUtil.toJSONString(item.get("value")).toString(), listTypeReference).get(0));
field.put("value", array);
}
if (!ObjectUtils.isEmpty(field.get("children")) && !ObjectUtils.isEmpty(item.get("children"))) {
List<Map<String, Object>> fieldChildren = objectMapper.readValue(JsonUtil.toJSONString(field.get("children")).toString(), listForMapTypeReference);
List<Map<String, Object>> itemChildren = objectMapper.readValue(JsonUtil.toJSONString(item.get("children")).toString(), listForMapTypeReference);
List<Map<String, Object>> fieldArrayChildren = new ArrayList<>();
for (Map<String, Object> fieldChild : fieldChildren) {
Map<String, Object> find = null;
for (Map<String, Object> itemChild : itemChildren) {
if (fieldChild.get("jsonPath").toString().equals(itemChild.get("jsonPath").toString())) {
find = itemChild;
}
}
if (find != null) {
mergeValue(fieldChild, apiDefinition, find);
}
fieldArrayChildren.add(fieldChild);
}
field.put("children", fieldArrayChildren);
}
} catch (Exception e) {
e.printStackTrace();
DEException.throwException(e);
}
}
private static List<String[]> fetchResult(String result, ApiDefinition apiDefinition) {
List<String[]> dataList = new LinkedList<>();
if (apiDefinition.isUseJsonPath()) {
List<LinkedHashMap> currentData = new ArrayList<>();
Object object = JsonPath.read(result, apiDefinition.getJsonPath());
if (object instanceof List) {
currentData = (List<LinkedHashMap>) object;
} else {
currentData.add((LinkedHashMap) object);
}
for (LinkedHashMap data : currentData) {
String[] row = new String[apiDefinition.getFields().size()];
int i = 0;
for (TableField field : apiDefinition.getFields()) {
row[i] = Optional.ofNullable(data.get(field.getName())).orElse("").toString().replaceAll("\n", " ").replaceAll("\r", " ");
i++;
}
dataList.add(row);
}
} else {
List<String> jsonPaths = apiDefinition.getFields().stream().map(TableField::getJsonPath).collect(Collectors.toList());
Long maxLength = 0l;
List<List<String>> columnDataList = new ArrayList<>();
for (int i = 0; i < jsonPaths.size(); i++) {
List<String> data = new ArrayList<>();
Object object = JsonPath.read(result, jsonPaths.get(i));
if (object instanceof List && jsonPaths.get(i).contains("[*]")) {
data = (List<String>) object;
} else {
if (object != null) {
data.add(object.toString());
}
}
maxLength = maxLength > data.size() ? maxLength : data.size();
columnDataList.add(data);
}
for (int i = 0; i < maxLength; i++) {
String[] row = new String[apiDefinition.getFields().size()];
dataList.add(row);
}
for (int i = 0; i < columnDataList.size(); i++) {
for (int j = 0; j < columnDataList.get(i).size(); j++) {
dataList.get(j)[i] = Optional.ofNullable(String.valueOf(columnDataList.get(i).get(j))).orElse("").replaceAll("\n", " ").replaceAll("\r", " ");
}
}
}
return dataList;
}
private static ApiDefinition checkApiDefinition(DatasourceRequest datasourceRequest) throws DEException {
List<ApiDefinition> apiDefinitionList = new ArrayList<>();
TypeReference<List<ApiDefinition>> listTypeReference = new TypeReference<List<ApiDefinition>>() {
};
List<ApiDefinition> apiDefinitionListTemp = null;
try {
apiDefinitionListTemp = objectMapper.readValue(datasourceRequest.getDatasource().getConfiguration(), listTypeReference);
} catch (Exception e) {
DEException.throwException(e);
}
if (!CollectionUtils.isEmpty(apiDefinitionListTemp)) {
for (ApiDefinition apiDefinition : apiDefinitionListTemp) {
if (apiDefinition.getDeTableName().equalsIgnoreCase(datasourceRequest.getTable()) || apiDefinition.getName().equalsIgnoreCase(datasourceRequest.getTable())) {
apiDefinitionList.add(apiDefinition);
}
}
}
if (CollectionUtils.isEmpty(apiDefinitionList)) {
DEException.throwException("未找到API数据表");
}
if (apiDefinitionList.size() > 1) {
DEException.throwException("存在重名的API数据表");
}
ApiDefinition find = null;
for (ApiDefinition apiDefinition : apiDefinitionList) {
if (apiDefinition.getName().equalsIgnoreCase(datasourceRequest.getTable()) || apiDefinition.getDeTableName().equalsIgnoreCase(datasourceRequest.getTable())) {
find = apiDefinition;
}
}
return find;
}
}

View File

@ -0,0 +1,726 @@
package io.dataease.datasource.provider;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import io.dataease.api.dataset.dto.DatasetTableDTO;
import io.dataease.api.ds.vo.DatasourceConfiguration;
import io.dataease.api.ds.vo.DatasourceConfiguration.DatasourceType;
import io.dataease.api.ds.vo.DatasourceDTO;
import io.dataease.api.ds.vo.TableField;
import io.dataease.commons.utils.CommonThreadPool;
import io.dataease.dataset.dto.DatasourceSchemaDTO;
import io.dataease.dataset.utils.FieldUtils;
import io.dataease.datasource.dao.auto.entity.CoreDatasource;
import io.dataease.datasource.dao.auto.entity.CoreDriver;
import io.dataease.datasource.dao.auto.mapper.CoreDatasourceMapper;
import io.dataease.datasource.request.DatasourceRequest;
import io.dataease.datasource.server.EngineServer;
import io.dataease.datasource.type.*;
import io.dataease.engine.constant.SQLConstants;
import io.dataease.engine.func.scalar.ScalarFunctions;
import io.dataease.exception.DEException;
import io.dataease.i18n.Translator;
import io.dataease.utils.BeanUtils;
import io.dataease.utils.CommonBeanFactory;
import io.dataease.utils.JsonUtil;
import io.dataease.utils.LogUtil;
import jakarta.annotation.PostConstruct;
import jakarta.annotation.Resource;
import org.apache.calcite.adapter.jdbc.JdbcSchema;
import org.apache.calcite.jdbc.CalciteConnection;
import org.apache.calcite.schema.Schema;
import org.apache.calcite.schema.SchemaPlus;
import org.apache.calcite.schema.impl.ScalarFunctionImpl;
import org.apache.commons.dbcp2.BasicDataSource;
import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.Method;
import java.net.URL;
import java.sql.*;
import java.util.*;
import java.util.stream.Collectors;
@Component("calciteProvider")
public class CalciteProvider {
//TODO mongo impala es hive
@Resource
protected CoreDatasourceMapper coreDatasourceMapper;
@Resource
private EngineServer engineServer;
protected ExtendedJdbcClassLoader extendedJdbcClassLoader;
private Map<Long, ExtendedJdbcClassLoader> customJdbcClassLoaders = new HashMap<>();
private final String FILE_PATH = "/opt/dataease/drivers";
private final String CUSTOM_PATH = "/opt/dataease/custom-drivers/";
private static String split = "DE";
@Resource
private CommonThreadPool commonThreadPool;
@PostConstruct
public void init() throws Exception {
try {
String jarPath = FILE_PATH;
ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
extendedJdbcClassLoader = new ExtendedJdbcClassLoader(new URL[]{new File(jarPath).toURI().toURL()}, classLoader);
File file = new File(jarPath);
File[] array = file.listFiles();
Optional.ofNullable(array).ifPresent(files -> {
for (File tmp : array) {
if (tmp.getName().endsWith(".jar")) {
try {
extendedJdbcClassLoader.addFile(tmp);
} catch (IOException e) {
e.printStackTrace();
}
}
}
});
} catch (Exception e) {
}
}
public List<String> getSchema(DatasourceRequest datasourceRequest) {
List<String> schemas = new ArrayList<>();
String queryStr = getSchemaSql(datasourceRequest.getDatasource());
try (Connection con = getConnection(datasourceRequest.getDatasource()); Statement statement = getStatement(con, 30); ResultSet resultSet = statement.executeQuery(queryStr)) {
while (resultSet.next()) {
schemas.add(resultSet.getString(1));
}
} catch (Exception e) {
DEException.throwException(e);
}
return schemas;
}
public List<DatasetTableDTO> getTables(DatasourceRequest datasourceRequest) {
List<DatasetTableDTO> tables = new ArrayList<>();
List<String> tablesSqls = getTablesSql(datasourceRequest);
for (String tablesSql : tablesSqls) {
try (Connection con = getConnection(datasourceRequest.getDatasource()); Statement statement = getStatement(con, 30); ResultSet resultSet = statement.executeQuery(tablesSql)) {
while (resultSet.next()) {
tables.add(getTableDesc(datasourceRequest, resultSet));
}
} catch (Exception e) {
DEException.throwException(e);
}
}
return tables;
}
private DatasetTableDTO getTableDesc(DatasourceRequest datasourceRequest, ResultSet resultSet) throws SQLException {
DatasetTableDTO tableDesc = new DatasetTableDTO();
tableDesc.setDatasourceId(datasourceRequest.getDatasource().getId());
tableDesc.setType("db");
tableDesc.setTableName(resultSet.getString(1));
tableDesc.setName(resultSet.getString(1));
return tableDesc;
}
private List<String> getDriver() {
List<String> drivers = new ArrayList<>();
Map<String, DatasourceConfiguration> beansOfType = CommonBeanFactory.getApplicationContext().getBeansOfType((DatasourceConfiguration.class));
beansOfType.keySet().forEach(key -> drivers.add(beansOfType.get(key).getDriver()));
return drivers;
}
public String checkStatus(DatasourceRequest datasourceRequest) throws Exception {
DatasourceConfiguration.DatasourceType datasourceType = DatasourceConfiguration.DatasourceType.valueOf(datasourceRequest.getDatasource().getType());
switch (datasourceType) {
case pg:
DatasourceConfiguration configuration = JsonUtil.parseObject(datasourceRequest.getDatasource().getConfiguration(), Pg.class);
List<String> schemas = getSchema(datasourceRequest);
if (CollectionUtils.isEmpty(schemas) || !schemas.contains(configuration.getSchema())) {
DEException.throwException("无效的 schema");
}
break;
default:
break;
}
String querySql = getTablesSql(datasourceRequest).get(0);
try (Connection con = getConnection(datasourceRequest.getDatasource()); Statement statement = getStatement(con, 30); ResultSet resultSet = statement.executeQuery(querySql)) {
} catch (Exception e) {
DEException.throwException(e);
}
return "Success";
}
public List<TableField> getTableFields(DatasourceRequest datasourceRequest) throws Exception {
return null;
}
public Map<String, Object> fetchResultField(DatasourceRequest datasourceRequest) throws DEException {
List<TableField> datasetTableFields = new ArrayList<>();
List<String[]> list = new LinkedList<>();
PreparedStatement statement = null;
ResultSet resultSet = null;
Connection connection = take();
try {
CalciteConnection calciteConnection = connection.unwrap(CalciteConnection.class);
statement = calciteConnection.prepareStatement(datasourceRequest.getQuery());
resultSet = statement.executeQuery();
ResultSetMetaData metaData = resultSet.getMetaData();
int columnCount = metaData.getColumnCount();
for (int i = 1; i <= columnCount; i++) {
TableField tableField = new TableField();
tableField.setOriginName(metaData.getColumnLabel(i));
tableField.setType(metaData.getColumnTypeName(i));
tableField.setPrecision(metaData.getPrecision(i));
int deType = FieldUtils.transType2DeType(tableField.getType());
tableField.setDeExtractType(deType);
tableField.setDeType(deType);
tableField.setScale(metaData.getScale(i));
datasetTableFields.add(tableField);
}
list = getDataResult(resultSet);
} catch (Exception e) {
DEException.throwException(Translator.get("i18n_fetch_error") + e.getMessage());
} finally {
try {
if (resultSet != null) resultSet.close();
if (statement != null) statement.close();
} catch (Exception e) {
}
}
Map<String, Object> map = new LinkedHashMap<>();
map.put("fields", datasetTableFields);
map.put("data", list);
return map;
}
public Connection initConnection(Map<Long, DatasourceSchemaDTO> dsMap) {
Connection connection = getCalciteConnection();
CalciteConnection calciteConnection = null;
try {
calciteConnection = connection.unwrap(CalciteConnection.class);
} catch (Exception e) {
DEException.throwException(e);
}
DatasourceRequest datasourceRequest = new DatasourceRequest();
datasourceRequest.setDsList(dsMap);
SchemaPlus rootSchema = buildSchema(datasourceRequest, calciteConnection);
addCustomFunctions(rootSchema);
return connection;
}
private void addCustomFunctions(SchemaPlus rootSchema) {
// scalar functions
Class<?> clazz = ScalarFunctions.class;
Method[] methods = clazz.getMethods();
for (Method method : methods) {
rootSchema.add(method.getName().toUpperCase(), ScalarFunctionImpl.create(
ScalarFunctions.class, method.getName()));
}
}
private void registerDriver() {
for (String driverClass : getDriver()) {
try {
Driver driver = (Driver) extendedJdbcClassLoader.loadClass(driverClass).newInstance();
DriverManager.registerDriver(new DriverShim(driver));
} catch (Exception e) {
e.printStackTrace();
}
}
}
private Connection getCalciteConnection() {
registerDriver();
Properties info = new Properties();
info.setProperty("lex", "JAVA");
info.setProperty("caseSensitive", "false");
info.setProperty("remarks", "true");
info.setProperty("parserFactory", "org.apache.calcite.sql.parser.impl.SqlParserImpl#FACTORY");
Connection connection = null;
try {
Class.forName("org.apache.calcite.jdbc.Driver");
connection = DriverManager.getConnection("jdbc:calcite:", info);
} catch (Exception e) {
DEException.throwException(e);
}
return connection;
}
// 构建root schema
private SchemaPlus buildSchema(DatasourceRequest datasourceRequest, CalciteConnection calciteConnection) {
SchemaPlus rootSchema = calciteConnection.getRootSchema();
Map<Long, DatasourceSchemaDTO> dsList = datasourceRequest.getDsList();
for (Map.Entry<Long, DatasourceSchemaDTO> next : dsList.entrySet()) {
DatasourceSchemaDTO ds = next.getValue();
commonThreadPool.addTask(() -> {
try {
BasicDataSource dataSource = new BasicDataSource();
Schema schema = null;
DatasourceConfiguration configuration = null;
DatasourceType datasourceType = DatasourceType.valueOf(ds.getType());
try {
if(rootSchema.getSubSchema(ds.getSchemaAlias()) != null){
JdbcSchema jdbcSchema = rootSchema.getSubSchema(ds.getSchemaAlias()).unwrap(JdbcSchema.class);
BasicDataSource basicDataSource = (BasicDataSource) jdbcSchema.getDataSource();
basicDataSource.close();
rootSchema.removeSubSchema(ds.getSchemaAlias());
}
switch (datasourceType) {
case mysql:
case mongo:
case mariadb:
case TiDB:
case StarRocks:
case doris:
configuration = JsonUtil.parseObject(ds.getConfiguration(), Mysql.class);
dataSource.setUrl(configuration.getJdbc());
dataSource.setUsername(configuration.getUsername());
dataSource.setPassword(configuration.getPassword());
dataSource.setDefaultQueryTimeout(Integer.valueOf(configuration.getQueryTimeout()));
dataSource.setInitialSize(configuration.getInitialPoolSize());
dataSource.setMaxTotal(configuration.getMaxPoolSize());
dataSource.setMinIdle(configuration.getMinPoolSize());
schema = JdbcSchema.create(rootSchema, ds.getSchemaAlias(), dataSource, null, configuration.getDataBase());
rootSchema.add(ds.getSchemaAlias(), schema);
break;
case impala:
configuration = JsonUtil.parseObject(ds.getConfiguration(), Impala.class);
dataSource.setUrl(configuration.getJdbc());
dataSource.setUsername(configuration.getUsername());
dataSource.setPassword(configuration.getPassword());
dataSource.setInitialSize(configuration.getInitialPoolSize());
dataSource.setMaxTotal(configuration.getMaxPoolSize());
dataSource.setMinIdle(configuration.getMinPoolSize());
dataSource.setDefaultQueryTimeout(Integer.valueOf(configuration.getQueryTimeout()));
schema = JdbcSchema.create(rootSchema, ds.getSchemaAlias(), dataSource, null, configuration.getDataBase());
rootSchema.add(ds.getSchemaAlias(), schema);
break;
case sqlServer:
configuration = JsonUtil.parseObject(ds.getConfiguration(), Sqlserver.class);
dataSource.setUrl(configuration.getJdbc());
dataSource.setUsername(configuration.getUsername());
dataSource.setPassword(configuration.getPassword());
dataSource.setInitialSize(configuration.getInitialPoolSize());
dataSource.setMaxTotal(configuration.getMaxPoolSize());
dataSource.setMinIdle(configuration.getMinPoolSize());
dataSource.setDefaultQueryTimeout(Integer.valueOf(configuration.getQueryTimeout()));
schema = JdbcSchema.create(rootSchema, ds.getSchemaAlias(), dataSource, null, configuration.getSchema());
rootSchema.add(ds.getSchemaAlias(), schema);
break;
case oracle:
configuration = JsonUtil.parseObject(ds.getConfiguration(), Oracle.class);
dataSource.setUrl(configuration.getJdbc());
dataSource.setUsername(configuration.getUsername());
dataSource.setPassword(configuration.getPassword());
dataSource.setInitialSize(configuration.getInitialPoolSize());
dataSource.setMaxTotal(configuration.getMaxPoolSize());
dataSource.setMinIdle(configuration.getMinPoolSize());
dataSource.setDefaultQueryTimeout(Integer.valueOf(configuration.getQueryTimeout()));
schema = JdbcSchema.create(rootSchema, ds.getSchemaAlias(), dataSource, null, configuration.getSchema());
rootSchema.add(ds.getSchemaAlias(), schema);
break;
case db2:
configuration = JsonUtil.parseObject(ds.getConfiguration(), Db2.class);
dataSource.setUrl(configuration.getJdbc());
dataSource.setUsername(configuration.getUsername());
dataSource.setPassword(configuration.getPassword());
dataSource.setInitialSize(configuration.getInitialPoolSize());
dataSource.setMaxTotal(configuration.getMaxPoolSize());
dataSource.setMinIdle(configuration.getMinPoolSize());
dataSource.setDefaultQueryTimeout(Integer.valueOf(configuration.getQueryTimeout()));
schema = JdbcSchema.create(rootSchema, ds.getSchemaAlias(), dataSource, null, configuration.getSchema());
rootSchema.add(ds.getSchemaAlias(), schema);
break;
case ck:
configuration = JsonUtil.parseObject(ds.getConfiguration(), CK.class);
dataSource.setUrl(configuration.getJdbc());
dataSource.setUsername(configuration.getUsername());
dataSource.setPassword(configuration.getPassword());
dataSource.setInitialSize(configuration.getInitialPoolSize());
dataSource.setMaxTotal(configuration.getMaxPoolSize());
dataSource.setMinIdle(configuration.getMinPoolSize());
dataSource.setDefaultQueryTimeout(Integer.valueOf(configuration.getQueryTimeout()));
schema = JdbcSchema.create(rootSchema, ds.getSchemaAlias(), dataSource, null, configuration.getDataBase());
rootSchema.add(ds.getSchemaAlias(), schema);
break;
case pg:
configuration = JsonUtil.parseObject(ds.getConfiguration(), Pg.class);
dataSource.setUrl(configuration.getJdbc());
dataSource.setUsername(configuration.getUsername());
dataSource.setPassword(configuration.getPassword());
dataSource.setInitialSize(configuration.getInitialPoolSize());
dataSource.setMaxTotal(configuration.getMaxPoolSize());
dataSource.setMinIdle(configuration.getMinPoolSize());
dataSource.setDefaultQueryTimeout(Integer.valueOf(configuration.getQueryTimeout()));
schema = JdbcSchema.create(rootSchema, ds.getSchemaAlias(), dataSource, null, configuration.getSchema());
rootSchema.add(ds.getSchemaAlias(), schema);
break;
case redshift:
configuration = JsonUtil.parseObject(ds.getConfiguration(), Redshift.class);
dataSource.setUrl(configuration.getJdbc());
dataSource.setUsername(configuration.getUsername());
dataSource.setPassword(configuration.getPassword());
dataSource.setInitialSize(configuration.getInitialPoolSize());
dataSource.setMaxTotal(configuration.getMaxPoolSize());
dataSource.setMinIdle(configuration.getMinPoolSize());
dataSource.setDefaultQueryTimeout(Integer.valueOf(configuration.getQueryTimeout()));
schema = JdbcSchema.create(rootSchema, ds.getSchemaAlias(), dataSource, null, configuration.getSchema());
rootSchema.add(ds.getSchemaAlias(), schema);
break;
case h2:
configuration = JsonUtil.parseObject(ds.getConfiguration(), H2.class);
dataSource.setUrl(configuration.getJdbc());
dataSource.setUsername(configuration.getUsername());
dataSource.setPassword(configuration.getPassword());
dataSource.setInitialSize(configuration.getInitialPoolSize());
dataSource.setMaxTotal(configuration.getMaxPoolSize());
dataSource.setMinIdle(configuration.getMinPoolSize());
dataSource.setDefaultQueryTimeout(Integer.valueOf(configuration.getQueryTimeout()));
schema = JdbcSchema.create(rootSchema, ds.getSchemaAlias(), dataSource, null, configuration.getDataBase());
rootSchema.add(ds.getSchemaAlias(), schema);
break;
default:
configuration = JsonUtil.parseObject(ds.getConfiguration(), Mysql.class);
dataSource.setUrl(configuration.getJdbc());
dataSource.setUsername(configuration.getUsername());
dataSource.setPassword(configuration.getPassword());
dataSource.setInitialSize(configuration.getInitialPoolSize());
dataSource.setMaxTotal(configuration.getMaxPoolSize());
dataSource.setMinIdle(configuration.getMinPoolSize());
dataSource.setDefaultQueryTimeout(Integer.valueOf(configuration.getQueryTimeout()));
schema = JdbcSchema.create(rootSchema, ds.getSchemaAlias(), dataSource, null, configuration.getDataBase());
rootSchema.add(ds.getSchemaAlias(), schema);
}
} catch (Exception e) {
e.printStackTrace();
}
} catch (Exception e) {
e.printStackTrace();
}
});
}
return rootSchema;
}
private List<String[]> getDataResult(ResultSet rs) {
List<String[]> list = new LinkedList<>();
try {
ResultSetMetaData metaData = rs.getMetaData();
int columnCount = metaData.getColumnCount();
while (rs.next()) {
String[] row = new String[columnCount];
for (int j = 0; j < columnCount; j++) {
int columnType = metaData.getColumnType(j + 1);
switch (columnType) {
case Types.DATE:
if (rs.getDate(j + 1) != null) {
row[j] = rs.getDate(j + 1).toString();
}
break;
case Types.BOOLEAN:
row[j] = rs.getBoolean(j + 1) ? "1" : "0";
break;
default:
if (metaData.getColumnTypeName(j + 1).toLowerCase().equalsIgnoreCase("blob")) {
row[j] = rs.getBlob(j + 1) == null ? "" : rs.getBlob(j + 1).toString();
} else {
row[j] = rs.getString(j + 1);
}
break;
}
}
list.add(row);
}
} catch (Exception e) {
DEException.throwException(e);
}
return list;
}
private List<String> getTablesSql(DatasourceRequest datasourceRequest) throws DEException {
List<String> tableSqls = new ArrayList<>();
DatasourceConfiguration.DatasourceType datasourceType = DatasourceConfiguration.DatasourceType.valueOf(datasourceRequest.getDatasource().getType());
DatasourceConfiguration configuration = null;
switch (datasourceType) {
case mysql:
case mongo:
case mariadb:
case TiDB:
case StarRocks:
case doris:
configuration = JsonUtil.parseObject(datasourceRequest.getDatasource().getConfiguration(), Mysql.class);
tableSqls.add(String.format("SELECT TABLE_NAME,TABLE_COMMENT FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA = '%s' ;", configuration.getDataBase()));
break;
case oracle:
configuration = JsonUtil.parseObject(datasourceRequest.getDatasource().getConfiguration(), Oracle.class);
if (StringUtils.isEmpty(configuration.getSchema())) {
DEException.throwException(Translator.get("i18n_schema_is_empty"));
}
tableSqls.add("select table_name, owner, comments from all_tab_comments where owner='" + configuration.getSchema() + "' AND table_type = 'TABLE'");
break;
case db2:
configuration = JsonUtil.parseObject(datasourceRequest.getDatasource().getConfiguration(), Db2.class);
if (StringUtils.isEmpty(configuration.getSchema())) {
DEException.throwException(Translator.get("i18n_schema_is_empty"));
}
tableSqls.add("select TABNAME from syscat.tables WHERE TABSCHEMA ='DE_SCHEMA' AND \"TYPE\" = 'T'".replace("DE_SCHEMA", configuration.getSchema()));
break;
case sqlServer:
configuration = JsonUtil.parseObject(datasourceRequest.getDatasource().getConfiguration(), Sqlserver.class);
if (StringUtils.isEmpty(configuration.getSchema())) {
DEException.throwException(Translator.get("i18n_schema_is_empty"));
}
tableSqls.add("SELECT TABLE_NAME FROM \"DATABASE\".INFORMATION_SCHEMA.VIEWS WHERE TABLE_SCHEMA = 'DS_SCHEMA' ;"
.replace("DATABASE", configuration.getDataBase())
.replace("DS_SCHEMA", configuration.getSchema()));
tableSqls.add("SELECT TABLE_NAME FROM \"DATABASE\".INFORMATION_SCHEMA.TABLES WHERE TABLE_TYPE = 'BASE TABLE' AND TABLE_SCHEMA = 'DS_SCHEMA' ;"
.replace("DATABASE", configuration.getDataBase())
.replace("DS_SCHEMA", configuration.getSchema()));
break;
case pg:
configuration = JsonUtil.parseObject(datasourceRequest.getDatasource().getConfiguration(), Pg.class);
if (StringUtils.isEmpty(configuration.getSchema())) {
DEException.throwException(Translator.get("i18n_schema_is_empty"));
}
tableSqls.add("SELECT tablename FROM pg_tables WHERE schemaname='SCHEMA' ;".replace("SCHEMA", configuration.getSchema()));
break;
case redshift:
configuration = JsonUtil.parseObject(datasourceRequest.getDatasource().getConfiguration(), CK.class);
tableSqls.add("SELECT tablename FROM pg_tables WHERE schemaname='SCHEMA' ;".replace("SCHEMA", configuration.getSchema()));
break;
case ck:
configuration = JsonUtil.parseObject(datasourceRequest.getDatasource().getConfiguration(), CK.class);
tableSqls.add("SELECT name FROM system.tables where database='DATABASE';".replace("DATABASE", configuration.getDataBase()));
break;
default:
tableSqls.add("show tables");
}
return tableSqls;
}
private String getSchemaSql(CoreDatasource datasource) throws DEException {
DatasourceConfiguration.DatasourceType datasourceType = DatasourceConfiguration.DatasourceType.valueOf(datasource.getType());
switch (datasourceType) {
case oracle:
return "select * from all_users";
case sqlServer:
return "select name from sys.schemas;";
case db2:
DatasourceConfiguration configuration = JsonUtil.parseObject(datasource.getConfiguration(), Db2.class);
return "select SCHEMANAME from syscat.SCHEMATA WHERE \"DEFINER\" ='USER'".replace("USER", configuration.getUsername().toUpperCase());
case pg:
return "SELECT nspname FROM pg_namespace;";
case redshift:
return "SELECT nspname FROM pg_namespace;";
default:
return "show tables;";
}
}
public Connection getConnection(CoreDatasource coreDatasource) throws DEException {
DatasourceConfiguration configuration = null;
DatasourceConfiguration.DatasourceType datasourceType = DatasourceConfiguration.DatasourceType.valueOf(coreDatasource.getType());
switch (datasourceType) {
case mysql:
case mongo:
case StarRocks:
case doris:
case TiDB:
case mariadb:
configuration = JsonUtil.parseObject(coreDatasource.getConfiguration(), Mysql.class);
break;
case impala:
configuration = JsonUtil.parseObject(coreDatasource.getConfiguration(), Impala.class);
break;
case sqlServer:
configuration = JsonUtil.parseObject(coreDatasource.getConfiguration(), Sqlserver.class);
break;
case oracle:
configuration = JsonUtil.parseObject(coreDatasource.getConfiguration(), Oracle.class);
break;
case db2:
configuration = JsonUtil.parseObject(coreDatasource.getConfiguration(), Db2.class);
break;
case pg:
configuration = JsonUtil.parseObject(coreDatasource.getConfiguration(), Pg.class);
break;
case redshift:
configuration = JsonUtil.parseObject(coreDatasource.getConfiguration(), Redshift.class);
break;
case ck:
configuration = JsonUtil.parseObject(coreDatasource.getConfiguration(), CK.class);
break;
case h2:
configuration = JsonUtil.parseObject(coreDatasource.getConfiguration(), H2.class);
break;
default:
configuration = JsonUtil.parseObject(coreDatasource.getConfiguration(), Mysql.class);
}
Properties props = new Properties();
if (StringUtils.isNotBlank(configuration.getUsername())) {
props.setProperty("user", configuration.getUsername());
if (StringUtils.isNotBlank(configuration.getPassword())) {
props.setProperty("password", configuration.getPassword());
}
}
String driverClassName = configuration.getDriver();
ExtendedJdbcClassLoader jdbcClassLoader = extendedJdbcClassLoader;
Connection conn = null;
try {
Driver driverClass = (Driver) jdbcClassLoader.loadClass(driverClassName).newInstance();
conn = driverClass.connect(configuration.getJdbc(), props);
} catch (Exception e) {
DEException.throwException(e);
}
return conn;
}
public Statement getStatement(Connection connection, int queryTimeout) {
if (connection == null) {
DEException.throwException("Failed to get connection!");
}
Statement stat = null;
try {
stat = connection.createStatement();
stat.setQueryTimeout(queryTimeout);
} catch (Exception e) {
DEException.throwException(e);
}
return stat;
}
protected boolean isDefaultClassLoader(String customDriver) {
return StringUtils.isEmpty(customDriver) || customDriver.equalsIgnoreCase("default");
}
protected ExtendedJdbcClassLoader getCustomJdbcClassLoader(CoreDriver coreDriver) {
if (coreDriver == null) {
DEException.throwException("Can not found custom Driver");
}
ExtendedJdbcClassLoader customJdbcClassLoader = customJdbcClassLoaders.get(coreDriver.getId());
if (customJdbcClassLoader == null) {
return addCustomJdbcClassLoader(coreDriver);
} else {
if (StringUtils.isNotEmpty(customJdbcClassLoader.getDriver()) && customJdbcClassLoader.getDriver().equalsIgnoreCase(coreDriver.getDriverClass())) {
return customJdbcClassLoader;
} else {
customJdbcClassLoaders.remove(coreDriver.getId());
return addCustomJdbcClassLoader(coreDriver);
}
}
}
private synchronized ExtendedJdbcClassLoader addCustomJdbcClassLoader(CoreDriver coreDriver) {
ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
while (classLoader.getParent() != null) {
classLoader = classLoader.getParent();
if (classLoader.toString().contains("ExtClassLoader")) {
break;
}
}
try {
ExtendedJdbcClassLoader customJdbcClassLoader = new ExtendedJdbcClassLoader(new URL[]{new File(CUSTOM_PATH + coreDriver.getId()).toURI().toURL()}, classLoader);
customJdbcClassLoader.setDriver(coreDriver.getDriverClass());
File file = new File(CUSTOM_PATH + coreDriver.getId());
File[] array = file.listFiles();
Optional.ofNullable(array).ifPresent(files -> {
for (File tmp : array) {
if (tmp.getName().endsWith(".jar")) {
try {
customJdbcClassLoader.addFile(tmp);
} catch (IOException e) {
e.printStackTrace();
}
}
}
});
customJdbcClassLoaders.put(coreDriver.getId(), customJdbcClassLoader);
return customJdbcClassLoader;
} catch (Exception e) {
DEException.throwException(e);
}
return null;
}
private Connection connection = null;
public static int capacity = 10;
public void initConnectionPool() {
LogUtil.info("Begin to init datasource pool...");
QueryWrapper<CoreDatasource> datasourceQueryWrapper = new QueryWrapper();
List<CoreDatasource> coreDatasources = coreDatasourceMapper.selectList(datasourceQueryWrapper).stream().filter(coreDatasource -> !Arrays.asList("folder", "API", "Excel").contains(coreDatasource.getType())).collect(Collectors.toList());
CoreDatasource engine = engineServer.deEngine();
if (engine != null) {
coreDatasources.add(engine);
}
Map<Long, DatasourceSchemaDTO> dsMap = new HashMap<>();
for (CoreDatasource coreDatasource : coreDatasources) {
DatasourceSchemaDTO datasourceSchemaDTO = new DatasourceSchemaDTO();
BeanUtils.copyBean(datasourceSchemaDTO, coreDatasource);
datasourceSchemaDTO.setSchemaAlias(String.format(SQLConstants.SCHEMA, datasourceSchemaDTO.getId()));
dsMap.put(datasourceSchemaDTO.getId(), datasourceSchemaDTO);
}
LogUtil.info("dsMap size..." + dsMap.keySet().size());
try {
commonThreadPool.addTask(() -> {
try {
connection = initConnection(dsMap);
} catch (Exception e) {
e.printStackTrace();
}
});
} catch (Exception e) {
}
}
public void update(DatasourceDTO datasourceDTO) throws DEException {
DatasourceSchemaDTO datasourceSchemaDTO = new DatasourceSchemaDTO();
BeanUtils.copyBean(datasourceSchemaDTO, datasourceDTO);
datasourceSchemaDTO.setSchemaAlias(String.format(SQLConstants.SCHEMA, datasourceSchemaDTO.getId()));
DatasourceRequest datasourceRequest = new DatasourceRequest();
datasourceRequest.setDsList(Map.of(datasourceSchemaDTO.getId(), datasourceSchemaDTO));
try {
CalciteConnection calciteConnection = connection.unwrap(CalciteConnection.class);
SchemaPlus rootSchema = buildSchema(datasourceRequest, calciteConnection);
addCustomFunctions(rootSchema);
} catch (Exception e) {
DEException.throwException(e);
}
}
public void delete(CoreDatasource datasource) throws DEException {
DatasourceSchemaDTO datasourceSchemaDTO = new DatasourceSchemaDTO();
BeanUtils.copyBean(datasourceSchemaDTO, datasource);
datasourceSchemaDTO.setSchemaAlias(String.format(SQLConstants.SCHEMA, datasourceSchemaDTO.getId()));
try {
CalciteConnection calciteConnection = connection.unwrap(CalciteConnection.class);
SchemaPlus rootSchema = calciteConnection.getRootSchema();
if (rootSchema.getSubSchema(datasourceSchemaDTO.getSchemaAlias()) != null) {
JdbcSchema jdbcSchema = rootSchema.getSubSchema(datasourceSchemaDTO.getSchemaAlias()).unwrap(JdbcSchema.class);
BasicDataSource basicDataSource = (BasicDataSource) jdbcSchema.getDataSource();
basicDataSource.close();
rootSchema.removeSubSchema(datasourceSchemaDTO.getSchemaAlias());
}
} catch (Exception e) {
DEException.throwException(e);
}
}
public Connection take() {
// 为了避免出现线程安全问题这里使用 synchronized 也可以使用 cas
if (connection == null) {
DEException.throwException("初始化连接池失败!");
}
return connection;
}
}

View File

@ -0,0 +1,46 @@
package io.dataease.datasource.provider;
import java.sql.*;
import java.util.Properties;
import java.util.logging.Logger;
public class DriverShim implements Driver {
private Driver driver;
public DriverShim(Driver d) {
this.driver = d;
}
public boolean acceptsURL(String u) throws SQLException {
return this.driver.acceptsURL(u);
}
@Override
public DriverPropertyInfo[] getPropertyInfo(String url, Properties info) throws SQLException {
return new DriverPropertyInfo[0];
}
@Override
public int getMajorVersion() {
return 0;
}
@Override
public int getMinorVersion() {
return 0;
}
@Override
public boolean jdbcCompliant() {
return false;
}
@Override
public Logger getParentLogger() throws SQLFeatureNotSupportedException {
return null;
}
public Connection connect(String u, Properties p) throws SQLException {
return this.driver.connect(u, p);
}
}

View File

@ -0,0 +1,31 @@
package io.dataease.datasource.provider;
import io.dataease.datasource.dao.auto.entity.CoreDeEngine;
import io.dataease.api.ds.vo.TableField;
import io.dataease.datasource.request.EngineRequest;
import io.dataease.exception.DEException;
import java.util.List;
/**
* @Author gin
* @Date 2021/5/17 4:19 下午
*/
public abstract class EngineProvider extends CalciteProvider {
public abstract String createView(String name, String viewSQL);
public abstract String dropTable(String name);
public abstract String dropView(String name);
public abstract String replaceTable(String name);
public abstract String createTableSql(String name, List<TableField> tableFields, CoreDeEngine engine);
public abstract String insertSql(String name, List<String[]> dataList, int page, int pageNumber);
public void exec(EngineRequest datasourceRequest) throws Exception {
}
}

View File

@ -0,0 +1,507 @@
package io.dataease.datasource.provider;
import com.alibaba.excel.EasyExcel;
import com.alibaba.excel.ExcelReader;
import com.alibaba.excel.context.AnalysisContext;
import com.alibaba.excel.event.AnalysisEventListener;
import com.alibaba.excel.metadata.data.ReadCellData;
import com.alibaba.excel.read.metadata.ReadSheet;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.dataease.api.dataset.dto.DatasetTableDTO;
import io.dataease.api.ds.vo.ExcelFileData;
import io.dataease.api.ds.vo.ExcelSheetData;
import io.dataease.api.ds.vo.TableField;
import io.dataease.datasource.dao.auto.entity.CoreDatasource;
import io.dataease.datasource.request.DatasourceRequest;
import io.dataease.exception.DEException;
import io.dataease.utils.AuthUtils;
import io.dataease.utils.JsonUtil;
import lombok.Data;
import org.apache.commons.lang3.StringUtils;
import org.springframework.util.CollectionUtils;
import org.springframework.web.multipart.MultipartFile;
import java.io.*;
import java.nio.charset.StandardCharsets;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
public class ExcelUtils {
private static String path = "/opt/dataease/data/excel/";
private static ObjectMapper objectMapper = new ObjectMapper();
private static TypeReference<List<TableField>> TableFieldListTypeReference = new TypeReference<List<TableField>>() {
};
public static List<DatasetTableDTO> getTables(DatasourceRequest datasourceRequest) throws DEException {
List<DatasetTableDTO> tableDescs = new ArrayList<>();
try {
JsonNode rootNode = objectMapper.readTree(datasourceRequest.getDatasource().getConfiguration());
for (int i = 0; i < rootNode.size(); i++) {
DatasetTableDTO datasetTableDTO = new DatasetTableDTO();
datasetTableDTO.setTableName(rootNode.get(i).get("deTableName").asText());
datasetTableDTO.setName(rootNode.get(i).get("deTableName").asText());
datasetTableDTO.setDatasourceId(datasourceRequest.getDatasource().getId());
datasetTableDTO.setLastUpdateTime(rootNode.get(i).get("lastUpdateTime") == null? datasourceRequest.getDatasource().getCreateTime(): rootNode.get(i).get("lastUpdateTime").asLong(0L));
tableDescs.add(datasetTableDTO);
}
} catch (Exception e) {
DEException.throwException(e);
}
return tableDescs;
}
public static String getFileName(CoreDatasource datasource) throws DEException {
try {
JsonNode rootNode = objectMapper.readTree(datasource.getConfiguration());
for (int i = 0; i < rootNode.size(); i++) {
return rootNode.get(i).get("fileName").asText();
}
} catch (Exception e) {
DEException.throwException(e);
}
return "";
}
public static String getSize(CoreDatasource datasource) throws DEException {
try {
JsonNode rootNode = objectMapper.readTree(datasource.getConfiguration());
for (int i = 0; i < rootNode.size(); i++) {
return rootNode.get(i).get("size").asText();
}
} catch (Exception e) {
DEException.throwException(e);
}
return "0 B";
}
public List<String[]> fetchDataList(DatasourceRequest datasourceRequest) throws DEException {
List<String[]> dataList = new ArrayList<>();
try {
JsonNode rootNode = objectMapper.readTree(datasourceRequest.getDatasource().getConfiguration());
for (int i = 0; i < rootNode.size(); i++) {
if (rootNode.get(i).get("deTableName").asText().equalsIgnoreCase(datasourceRequest.getTable())) {
List<TableField> tableFields = JsonUtil.parseList(rootNode.get(i).get("fields").toString(), TableFieldListTypeReference);
String suffix = rootNode.get(i).get("path").asText().substring(rootNode.get(i).get("path").asText().lastIndexOf(".") + 1);
InputStream inputStream = new FileInputStream(rootNode.get(i).get("path").asText());
if (StringUtils.equalsIgnoreCase(suffix, "csv")) {
BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8));
reader.readLine();//去掉表头
dataList = csvData(reader, false, tableFields.size());
} else {
dataList = fetchExcelDataList(rootNode.get(i).get("tableName").asText(), inputStream);
}
}
}
} catch (Exception e) {
DEException.throwException(e);
}
return dataList;
}
public List<String[]> fetchExcelDataList(String sheetName, InputStream inputStream) {
NoModelDataListener noModelDataListener = new NoModelDataListener();
ExcelReader excelReader = EasyExcel.read(inputStream, noModelDataListener).build();
List<ReadSheet> sheets = excelReader.excelExecutor().sheetList();
for (ReadSheet readSheet : sheets) {
if (!sheetName.equalsIgnoreCase(readSheet.getSheetName())) {
continue;
}
noModelDataListener.clear();
List<TableField> fields = new ArrayList<>();
excelReader.read(readSheet);
for (String s : noModelDataListener.getHeader()) {
TableField tableFiled = new TableField();
tableFiled.setFieldType("TEXT");
tableFiled.setName(s);
tableFiled.setOriginName(s);
fields.add(tableFiled);
}
}
return noModelDataListener.getData();
}
public static List<TableField> getTableFields(DatasourceRequest datasourceRequest) throws DEException {
List<TableField> tableFields = new ArrayList<>();
TypeReference<List<TableField>> listTypeReference = new TypeReference<List<TableField>>() {
};
try {
JsonNode rootNode = objectMapper.readTree(datasourceRequest.getDatasource().getConfiguration());
for (int i = 0; i < rootNode.size(); i++) {
if (rootNode.get(i).get("deTableName").asText().equalsIgnoreCase(datasourceRequest.getTable())) {
tableFields = JsonUtil.parseList(rootNode.get(i).get("fields").toString(), listTypeReference);
}
}
} catch (Exception e) {
DEException.throwException(e);
}
return tableFields;
}
public ExcelFileData excelSaveAndParse(MultipartFile file) throws DEException {
String filename = file.getOriginalFilename();
List<ExcelSheetData> excelSheetDataList = null;
try {
excelSheetDataList = parseExcel(filename, file.getInputStream(), true);
} catch (Exception e) {
DEException.throwException(e);
}
List<ExcelSheetData> returnSheetDataList = new ArrayList<>();
returnSheetDataList = excelSheetDataList;
returnSheetDataList = returnSheetDataList.stream()
.filter(excelSheetData -> !CollectionUtils.isEmpty(excelSheetData.getFields()))
.collect(Collectors.toList());
// save file
String excelId = UUID.randomUUID().toString();
String filePath = saveFile(file, excelId);
for (ExcelSheetData excelSheetData : returnSheetDataList) {
if (excelSheetData.getExcelLabel().length() > 40) {
DEException.throwException(excelSheetData.getExcelLabel() + "长度不能大于40");
}
excelSheetData.setLastUpdateTime(System.currentTimeMillis());
excelSheetData.setTableName(excelSheetData.getExcelLabel());
excelSheetData.setDeTableName("excel_" + excelSheetData.getExcelLabel() + "_" + UUID.randomUUID().toString().replace("-", "").substring(0, 10));
excelSheetData.setPath(filePath);
excelSheetData.setSheetId(UUID.randomUUID().toString());
excelSheetData.setSheetExcelId(excelId);
excelSheetData.setFileName(filename);
/**
* dataease字段类型0-文本1-时间2-整型数值3-浮点数值4-布尔5-地理位置6-二进制
*/
for (TableField field : excelSheetData.getFields()) {
if (field.getOriginName().length() > 40) {
DEException.throwException(excelSheetData.getExcelLabel() + "的字段" + field.getOriginName() + "长度不能大于40");
}
//TEXT LONG DATETIME DOUBLE
if (field.getFieldType().equalsIgnoreCase("TEXT")) {
field.setDeType(0);
field.setDeExtractType(0);
}
if (field.getFieldType().equalsIgnoreCase("DATETIME")) {
field.setDeType(1);
field.setDeExtractType(1);
}
if (field.getFieldType().equalsIgnoreCase("LONG")) {
field.setDeType(2);
field.setDeExtractType(2);
}
if (field.getFieldType().equalsIgnoreCase("DOUBLE")) {
field.setDeType(3);
field.setDeExtractType(3);
}
}
long size = 0;
String unit = "B";
if (file.getSize() / 1024 == 0) {
size = file.getSize();
}
if (0 < file.getSize() / 1024 && file.getSize() / 1024 < 1024) {
size = file.getSize() / 1024;
unit = "KB";
}
if (1024 <= file.getSize() / 1024) {
size = file.getSize() / 1024 / 1024;
unit = "MB";
}
excelSheetData.setSize(size + " " + unit);
}
ExcelFileData excelFileData = new ExcelFileData();
excelFileData.setExcelLabel(filename.substring(0, filename.lastIndexOf('.')));
excelFileData.setId(excelId);
excelFileData.setPath(filePath);
excelFileData.setSheets(returnSheetDataList);
return excelFileData;
}
private static String saveFile(MultipartFile file, String fileNameUUID) throws DEException {
String filePath = null;
try {
String filename = file.getOriginalFilename();
String suffix = filename.substring(filename.lastIndexOf(".") + 1);
String dirPath = path + AuthUtils.getUser().getUserId() + "/";
File p = new File(dirPath);
if (!p.exists()) {
p.mkdirs();
}
filePath = dirPath + fileNameUUID + "." + suffix;
File f = new File(filePath);
FileOutputStream fileOutputStream = new FileOutputStream(f);
fileOutputStream.write(file.getBytes());
fileOutputStream.flush();
fileOutputStream.close();
} catch (Exception e) {
DEException.throwException(e);
}
return filePath;
}
private static boolean isEmpty(List<String> cells) {
if (CollectionUtils.isEmpty(cells)) {
return true;
}
boolean isEmpty = true;
for (int i = 0; i < cells.size(); i++) {
if (isEmpty && StringUtils.isEmpty(cells.get(i))) {
isEmpty = true;
} else {
isEmpty = false;
}
}
return isEmpty;
}
public static List<String[]> csvData(BufferedReader reader, boolean isPreview, int size) throws DEException {
List<String[]> data = new ArrayList<>();
try {
int num = 1;
String line;
while ((line = reader.readLine()) != null) {
if (isPreview && num > 1000) {
break;
}
String str;
line += ",";
Pattern pCells = Pattern.compile("(\"[^\"]*(\"{2})*[^\"]*\")*[^,]*,");
Matcher mCells = pCells.matcher(line);
List<String> cells = new ArrayList();//每行记录一个list
//读取每个单元格
while (mCells.find()) {
str = mCells.group();
str = str.replaceAll("(?sm)\"?([^\"]*(\"{2})*[^\"]*)\"?.*,", "$1");
str = str.replaceAll("(?sm)(\"(\"))", "$2");
cells.add(str);
}
if (!isEmpty(cells)) {
if(cells.size() > size){
cells = cells.subList(0, size);
}
data.add(cells.toArray(new String[]{}));
num++;
}
}
} catch (Exception e) {
DEException.throwException(e);
}
return data;
}
private String cellType(String value) {
try {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
sdf.parse(value);
return "DATETIME";
} catch (Exception e1) {
try {
Double d = Double.valueOf(value);
double eps = 1e-10;
if (d - Math.floor(d) < eps) {
return "LONG";
} else {
return "DOUBLE";
}
} catch (Exception e2) {
return "TEXT";
}
}
}
private void cellType(String value, int i, TableField tableFiled) {
if (StringUtils.isEmpty(value)) {
return;
}
if (i == 0) {
tableFiled.setFieldType(cellType(value));
} else {
String type = cellType(value);
if(tableFiled.getFieldType() == null){
tableFiled.setFieldType(type);
}else {
if (type.equalsIgnoreCase("TEXT")) {
tableFiled.setFieldType(type);
}
if (type.equalsIgnoreCase("DOUBLE") && tableFiled.getFieldType().equalsIgnoreCase("LONG")) {
tableFiled.setFieldType(type);
}
}
}
}
@Data
public class NoModelDataListener extends AnalysisEventListener<Map<Integer, String>> {
private List<String[]> data = new ArrayList<>();
private List<String> header = new ArrayList<>();
private List<Integer> headerKey = new ArrayList<>();
@Override
public void invokeHead(Map<Integer, ReadCellData<?>> headMap, AnalysisContext context) {
super.invokeHead(headMap, context);
for (Integer key : headMap.keySet()) {
ReadCellData<?> cellData = headMap.get(key);
String value = cellData.getStringValue();
if (StringUtils.isEmpty(value)) {
DEException.throwException(context.readSheetHolder().getSheetName() + ", 首行行中不允许有空单元格!");
}
headerKey.add(key);
header.add(value);
}
}
@Override
public void invoke(Map<Integer, String> dataMap, AnalysisContext context) {
List<String> line = new ArrayList<>();
for (Integer key : dataMap.keySet()) {
String value = dataMap.get(key);
if (StringUtils.isEmpty(value)) {
value = null;
}
if (headerKey.contains(key)) {
line.add(value);
}
}
int size = line.size();
if (size < header.size()) {
for (int i = 0; i < header.size() - size; i++) {
line.add(null);
}
}
data.add(line.toArray(new String[line.size()]));
}
@Override
public void doAfterAllAnalysed(AnalysisContext analysisContext) {
}
public void clear() {
data.clear();
header.clear();
}
}
public List<ExcelSheetData> parseExcel(String filename, InputStream inputStream, boolean isPreview) throws IOException {
List<ExcelSheetData> excelSheetDataList = new ArrayList<>();
String suffix = filename.substring(filename.lastIndexOf(".") + 1);
if (StringUtils.equalsIgnoreCase(suffix, "xlsx") || StringUtils.equalsIgnoreCase(suffix, "xls")) {
NoModelDataListener noModelDataListener = new NoModelDataListener();
ExcelReader excelReader = EasyExcel.read(inputStream, noModelDataListener).build();
List<ReadSheet> sheets = excelReader.excelExecutor().sheetList();
for (ReadSheet readSheet : sheets) {
noModelDataListener.clear();
List<TableField> fields = new ArrayList<>();
excelReader.read(readSheet);
if (CollectionUtils.isEmpty(noModelDataListener.getHeader())) {
DEException.throwException(readSheet.getSheetName() + "首行不能为空!");
}
for (String s : noModelDataListener.getHeader()) {
TableField tableFiled = new TableField();
tableFiled.setFieldType(null);
tableFiled.setName(s);
tableFiled.setOriginName(s);
fields.add(tableFiled);
}
List<String[]> data = new ArrayList<>(noModelDataListener.getData());
if (isPreview) {
if (data.size() > 100) {
data = data.subList(0, 100);
}
for (int i = 0; i < data.size(); i++) {
for (int j = 0; j < data.get(i).length; j++) {
if (j < fields.size()) {
cellType(data.get(i)[j], i, fields.get(j));
}
}
}
}
for (int i = 0; i < fields.size(); i++) {
if (StringUtils.isEmpty(fields.get(i).getFieldType())) {
fields.get(i).setFieldType("TEXT");
}
}
ExcelSheetData excelSheetData = new ExcelSheetData();
excelSheetData.setFields(fields);
excelSheetData.setData(data);
excelSheetData.setFileName(filename);
excelSheetData.setExcelLabel(readSheet.getSheetName());
excelSheetDataList.add(excelSheetData);
}
}
if (StringUtils.equalsIgnoreCase(suffix, "csv")) {
List<TableField> fields = new ArrayList<>();
BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8));
String s = reader.readLine();// first line
String[] split = s.split(",");
for (int i = 0; i < split.length; i++) {
String filedName = split[i];
if(StringUtils.isEmpty(filedName)){
DEException.throwException("首行行中不允许有空单元格!");
}
TableField tableFiled = new TableField();
tableFiled.setName(filedName);
tableFiled.setOriginName(filedName);
tableFiled.setFieldType(null);
fields.add(tableFiled);
}
List<String[]> data = csvData(reader, isPreview, fields.size());
if (isPreview) {
for (int i = 0; i < data.size(); i++) {
for (int j = 0; j < data.get(i).length; j++) {
if (j < fields.size()) {
cellType(data.get(i)[j], i, fields.get(j));
}
}
}
}
for (int i = 0; i < fields.size(); i++) {
if (StringUtils.isEmpty(fields.get(i).getFieldType())) {
fields.get(i).setFieldType("TEXT");
}
}
ExcelSheetData excelSheetData = new ExcelSheetData();
String[] fieldArray = fields.stream().map(TableField::getName).toArray(String[]::new);
excelSheetData.setFields(fields);
excelSheetData.setData(data);
excelSheetData.setFileName(filename);
excelSheetData.setExcelLabel(filename.substring(0, filename.lastIndexOf('.')));
excelSheetDataList.add(excelSheetData);
}
inputStream.close();
for (ExcelSheetData excelSheetData : excelSheetDataList) {
List<String[]> data = excelSheetData.getData();
String[] fieldArray = excelSheetData.getFields().stream().map(TableField::getName).toArray(String[]::new);
List<Map<String, Object>> jsonArray = new ArrayList<>();
if (data != null) {
jsonArray = data.stream().map(ele -> {
Map<String, Object> map = new HashMap<>();
for (int i = 0; i < fieldArray.length; i++) {
map.put(fieldArray[i], i < ele.length ? ele[i] : "");
}
return map;
}).collect(Collectors.toList());
}
excelSheetData.setJsonArray(jsonArray);
}
return excelSheetDataList;
}
}

View File

@ -0,0 +1,97 @@
package io.dataease.datasource.provider;
import java.io.File;
import java.io.IOException;
import java.net.URL;
import java.net.URLClassLoader;
public class ExtendedJdbcClassLoader extends URLClassLoader {
private String driver;
public String getDriver() {
return driver;
}
public void setDriver(String driver) {
this.driver = driver;
}
public ExtendedJdbcClassLoader(URL[] urls, ClassLoader parent) {
super(urls, parent);
}
@Override
protected Class<?> loadClass(String name, boolean resolve) throws ClassNotFoundException {
synchronized (getClassLoadingLock(name)) {
// First, check if the class has already been loaded
Class<?> c = findLoadedClass(name);
if (c != null) {
if (resolve) {
resolveClass(c);
}
return c;
}
try {
c = findClass(name);
if (c != null) {
if (resolve) {
resolveClass(c);
}
return c;
}
} catch (ClassNotFoundException e) {
// Ignore
}
try {
if (getParent() != null) {
c = super.loadClass(name, resolve);
if (c != null) {
if (resolve) {
resolveClass(c);
}
return c;
}
}
} catch (ClassNotFoundException e) {
// Ignore
}
try {
c = findSystemClass(name);
if (c != null) {
if (resolve) {
resolveClass(c);
}
return c;
}
} catch (ClassNotFoundException e) {
// Ignore
}
throw new ClassNotFoundException(name);
}
}
public void addFile(String s) throws IOException {
File f = new File(s);
addFile(f);
}
public void addFile(File f) throws IOException {
addFile(f.toURI().toURL());
}
public void addFile(URL u) throws IOException {
try {
this.addURL(u);
} catch (Throwable t) {
t.printStackTrace();
throw new IOException("Error, could not add URL to system classloader");
}
}
}

View File

@ -0,0 +1,131 @@
package io.dataease.datasource.provider;
import io.dataease.api.ds.vo.DatasourceConfiguration;
import io.dataease.api.ds.vo.TableField;
import io.dataease.dataset.utils.TableUtils;
import io.dataease.datasource.dao.auto.entity.CoreDatasource;
import io.dataease.datasource.dao.auto.entity.CoreDeEngine;
import io.dataease.datasource.request.EngineRequest;
import io.dataease.datasource.type.H2;
import io.dataease.utils.BeanUtils;
import io.dataease.utils.JsonUtil;
import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Service;
import java.sql.Connection;
import java.sql.Statement;
import java.util.Arrays;
import java.util.List;
@Service("h2Engine")
public class H2EngineProvider extends EngineProvider {
public void exec(EngineRequest engineRequest) throws Exception {
DatasourceConfiguration configuration = JsonUtil.parseObject(engineRequest.getEngine().getConfiguration(), H2.class);
int queryTimeout = configuration.getQueryTimeout();
CoreDatasource datasource = new CoreDatasource();
BeanUtils.copyBean(datasource, engineRequest.getEngine());
try (Connection connection = getConnection(datasource); Statement stat = getStatement(connection, queryTimeout)) {
Boolean result = stat.execute(engineRequest.getQuery());
} catch (Exception e) {
throw e;
}
}
private static final String creatTableSql =
"CREATE TABLE IF NOT EXISTS `TABLE_NAME`" +
"Column_Fields;";
@Override
public String createView(String name, String viewSQL) {
return "CREATE or replace view " + name + " AS (" + viewSQL + ")";
}
@Override
public String insertSql(String name, List<String[]> dataList, int page, int pageNumber) {
String insertSql = "INSERT INTO `TABLE_NAME` VALUES ".replace("TABLE_NAME", name);
StringBuffer values = new StringBuffer();
Integer realSize = page * pageNumber < dataList.size() ? page * pageNumber : dataList.size();
for (String[] strings : dataList.subList((page - 1) * pageNumber, realSize)) {
String[] strings1 = new String[strings.length];
for (int i = 0; i < strings.length; i++) {
if (StringUtils.isEmpty(strings[i])) {
strings1[i] = null;
} else {
strings1[i] = strings[i].replace("'", "\\'");
}
}
values.append("('").append(String.join("','", Arrays.asList(strings1)))
.append("'),");
}
return (insertSql + values.substring(0, values.length() - 1)).replaceAll("'null'", "null");
}
@Override
public String dropTable(String name) {
return "DROP TABLE IF EXISTS `" + name + "`";
}
@Override
public String dropView(String name) {
return "DROP VIEW IF EXISTS `" + name + "`";
}
@Override
public String replaceTable(String name) {
return "ALTER TABLE `FROM_TABLE` rename to `FROM_TABLE_tmp`; ALTER TABLE `TO_TABLE` rename to `FROM_TABLE`; DROP TABLE IF EXISTS `FROM_TABLE_tmp`;".replace("FROM_TABLE", name).replace("TO_TABLE", TableUtils.tmpName(name));
}
@Override
public String createTableSql(String tableName, List<TableField> tableFields, CoreDeEngine engine) {
String dorisTableColumnSql = createTableSql(tableFields);
return creatTableSql.replace("TABLE_NAME", tableName).replace("Column_Fields", dorisTableColumnSql);
}
private String createTableSql(final List<TableField> tableFields) {
StringBuilder Column_Fields = new StringBuilder("`");
for (TableField tableField : tableFields) {
Column_Fields.append(tableField.getName()).append("` ");
int size = tableField.getPrecision() * 4;
switch (tableField.getDeType()) {
case 0:
Column_Fields.append("longtext").append(",`");
break;
case 1:
size = size < 50 ? 50 : size;
if (size < 65533) {
Column_Fields.append("varchar(length)".replace("length", String.valueOf(tableField.getPrecision()))).append(",`");
} else {
Column_Fields.append("longtext").append(",`");
}
break;
case 2:
Column_Fields.append("bigint(20)").append(",`");
break;
case 3:
Column_Fields.append("varchar(100)").append(",`");
break;
case 4:
Column_Fields.append("TINYINT(length)".replace("length", String.valueOf(tableField.getPrecision()))).append(",`");
break;
default:
if (size < 65533) {
Column_Fields.append("varchar(length)".replace("length", String.valueOf(tableField.getPrecision()))).append(",`");
} else {
Column_Fields.append("longtext").append(",`");
}
break;
}
}
Column_Fields = new StringBuilder(Column_Fields.substring(0, Column_Fields.length() - 2));
Column_Fields = new StringBuilder("(" + Column_Fields + ")\n");
return Column_Fields.toString();
}
}

View File

@ -0,0 +1,129 @@
package io.dataease.datasource.provider;
import io.dataease.api.ds.vo.DatasourceConfiguration;
import io.dataease.api.ds.vo.TableField;
import io.dataease.dataset.utils.TableUtils;
import io.dataease.datasource.dao.auto.entity.CoreDatasource;
import io.dataease.datasource.dao.auto.entity.CoreDeEngine;
import io.dataease.datasource.request.EngineRequest;
import io.dataease.datasource.type.Mysql;
import io.dataease.utils.BeanUtils;
import io.dataease.utils.JsonUtil;
import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Service;
import java.sql.Connection;
import java.sql.Statement;
import java.util.Arrays;
import java.util.List;
/**
* @Author gin
* @Date 2021/5/17 4:27 下午
*/
@Service("mysqlEngine")
public class MysqlEngineProvider extends EngineProvider {
public void exec(EngineRequest engineRequest) throws Exception {
DatasourceConfiguration configuration = JsonUtil.parseObject(engineRequest.getEngine().getConfiguration(), Mysql.class);
int queryTimeout = configuration.getQueryTimeout();
CoreDatasource datasource = new CoreDatasource();
BeanUtils.copyBean(datasource, engineRequest.getEngine());
try (Connection connection = getConnection(datasource); Statement stat = getStatement(connection, queryTimeout)) {
Boolean result = stat.execute(engineRequest.getQuery());
} catch (Exception e) {
throw e;
}
}
private static final String creatTableSql =
"CREATE TABLE IF NOT EXISTS `TABLE_NAME`" +
"Column_Fields;";
@Override
public String createView(String name, String viewSQL) {
return "CREATE or replace view " + name + " AS (" + viewSQL + ")";
}
@Override
public String insertSql(String name, List<String[]> dataList, int page, int pageNumber) {
String insertSql = "INSERT INTO `TABLE_NAME` VALUES ".replace("TABLE_NAME", name);
StringBuffer values = new StringBuffer();
Integer realSize = page * pageNumber < dataList.size() ? page * pageNumber : dataList.size();
for (String[] strings : dataList.subList((page - 1) * pageNumber, realSize)) {
String[] strings1 = new String[strings.length];
for (int i = 0; i < strings.length; i++) {
if (StringUtils.isEmpty(strings[i])) {
strings1[i] = null;
} else {
strings1[i] = strings[i].replace("'", "\\'");
}
}
values.append("('").append(String.join("','", Arrays.asList(strings1)))
.append("'),");
}
return (insertSql + values.substring(0, values.length() - 1)).replaceAll("'null'", "null");
}
@Override
public String dropTable(String name) {
return "DROP TABLE IF EXISTS `" + name + "`";
}
@Override
public String dropView(String name) {
return "DROP VIEW IF EXISTS `" + name + "`";
}
@Override
public String replaceTable(String name) {
String replaceTableSql = "rename table `FROM_TABLE` to `FROM_TABLE_tmp`, `TO_TABLE` to `FROM_TABLE`, `FROM_TABLE_tmp` to `TO_TABLE`"
.replace("FROM_TABLE", name).replace("TO_TABLE", TableUtils.tmpName(name));
String dropTableSql = "DROP TABLE IF EXISTS `TABLE_NAME`".replace("TABLE_NAME", TableUtils.tmpName(name));
return replaceTableSql + ";" + dropTableSql;
}
@Override
public String createTableSql(String tableName, List<TableField> tableFields, CoreDeEngine engine) {
String dorisTableColumnSql = createTableSql(tableFields);
return creatTableSql.replace("TABLE_NAME", tableName).replace("Column_Fields", dorisTableColumnSql);
}
private String createTableSql(final List<TableField> tableFields) {
StringBuilder Column_Fields = new StringBuilder("`");
for (TableField tableField : tableFields) {
Column_Fields.append(tableField.getName()).append("` ");
int size = tableField.getPrecision() * 4;
switch (tableField.getDeExtractType()) {
case 0:
Column_Fields.append("longtext").append(",`");
break;
case 1:
Column_Fields.append("datetime").append(",`");
break;
case 2:
Column_Fields.append("bigint(20)").append(",`");
break;
case 3:
Column_Fields.append("decimal(27,8)").append(",`");
break;
case 4:
Column_Fields.append("TINYINT(length)".replace("length", String.valueOf(tableField.getPrecision()))).append(",`");
break;
default:
Column_Fields.append("longtext").append(",`");
break;
}
}
Column_Fields = new StringBuilder(Column_Fields.substring(0, Column_Fields.length() - 2));
Column_Fields = new StringBuilder("(" + Column_Fields + ")\n");
return Column_Fields.toString();
}
}

View File

@ -0,0 +1,17 @@
package io.dataease.datasource.provider;
import io.dataease.utils.CommonBeanFactory;
import io.micrometer.common.util.StringUtils;
public class ProviderUtil {
public static EngineProvider getEngineProvider(String datasourceType) {
if (StringUtils.isNotEmpty(datasourceType)) {
return (EngineProvider) CommonBeanFactory.getBean(datasourceType + "Engine");
} else {
return CommonBeanFactory.getBean(MysqlEngineProvider.class);
}
}
}

View File

@ -0,0 +1,64 @@
package io.dataease.datasource.request;
import io.dataease.dataset.dto.DatasourceSchemaDTO;
import io.dataease.datasource.dao.auto.entity.CoreDatasource;
import lombok.Data;
import org.springframework.util.StringUtils;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@Data
public class DatasourceRequest {
private final String REG_WITH_SQL_FRAGMENT = "((?i)WITH[\\s\\S]+(?i)AS?\\s*\\([\\s\\S]+\\))\\s*(?i)SELECT";
private Pattern WITH_SQL_FRAGMENT = Pattern.compile("((?i)WITH[\\s\\S]+(?i)AS?\\s*\\([\\s\\S]+\\))\\s*(?i)SELECT");
protected String query;
protected String table;
protected CoreDatasource datasource;
private Integer pageSize;
private Integer page;
private Integer realSize;
private Integer fetchSize = 10000;
private boolean pageable = false;
private boolean previewData = false;
private boolean totalPageFlag;
private Map<Long, DatasourceSchemaDTO> dsList;
public DatasourceRequest() {
}
public String getQuery() {
return this.rebuildSqlWithFragment(this.query);
}
public void setQuery(String query) {
this.query = query;
}
private String rebuildSqlWithFragment(String sql) {
if (!sql.toLowerCase().startsWith("with")) {
Matcher matcher = this.WITH_SQL_FRAGMENT.matcher(sql);
if (matcher.find()) {
String withFragment = matcher.group();
if (!StringUtils.isEmpty(withFragment)) {
if (withFragment.length() > 6) {
int lastSelectIndex = withFragment.length() - 6;
sql = sql.replace(withFragment, withFragment.substring(lastSelectIndex));
withFragment = withFragment.substring(0, lastSelectIndex);
}
sql = withFragment + " " + sql;
sql = sql.replaceAll(" {2,}", " ");
}
}
}
return sql;
}
public String getREG_WITH_SQL_FRAGMENT() {
this.getClass();
return "((?i)WITH[\\s\\S]+(?i)AS?\\s*\\([\\s\\S]+\\))\\s*(?i)SELECT";
}
}

View File

@ -0,0 +1,64 @@
package io.dataease.datasource.request;
import io.dataease.dataset.dto.DatasourceSchemaDTO;
import io.dataease.datasource.dao.auto.entity.CoreDatasource;
import io.dataease.datasource.dao.auto.entity.CoreDeEngine;
import lombok.Data;
import org.springframework.util.StringUtils;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@Data
public class EngineRequest {
private final String REG_WITH_SQL_FRAGMENT = "((?i)WITH[\\s\\S]+(?i)AS?\\s*\\([\\s\\S]+\\))\\s*(?i)SELECT";
private Pattern WITH_SQL_FRAGMENT = Pattern.compile("((?i)WITH[\\s\\S]+(?i)AS?\\s*\\([\\s\\S]+\\))\\s*(?i)SELECT");
protected String query;
protected String table;
protected CoreDeEngine engine;
private Integer pageSize;
private Integer page;
private Integer realSize;
private Integer fetchSize = 10000;
private boolean pageable = false;
private boolean previewData = false;
private boolean totalPageFlag;
public EngineRequest() {
}
public String getQuery() {
return this.rebuildSqlWithFragment(this.query);
}
public void setQuery(String query) {
this.query = query;
}
private String rebuildSqlWithFragment(String sql) {
if (!sql.toLowerCase().startsWith("with")) {
Matcher matcher = this.WITH_SQL_FRAGMENT.matcher(sql);
if (matcher.find()) {
String withFragment = matcher.group();
if (!StringUtils.isEmpty(withFragment)) {
if (withFragment.length() > 6) {
int lastSelectIndex = withFragment.length() - 6;
sql = sql.replace(withFragment, withFragment.substring(lastSelectIndex));
withFragment = withFragment.substring(0, lastSelectIndex);
}
sql = withFragment + " " + sql;
sql = sql.replaceAll(" {2,}", " ");
}
}
}
return sql;
}
public String getREG_WITH_SQL_FRAGMENT() {
this.getClass();
return "((?i)WITH[\\s\\S]+(?i)AS?\\s*\\([\\s\\S]+\\))\\s*(?i)SELECT";
}
}

View File

@ -0,0 +1,167 @@
package io.dataease.datasource.server;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import io.dataease.api.ds.DatasourceDriverApi;
import io.dataease.api.ds.vo.DatasourceDTO;
import io.dataease.api.ds.vo.DriveDTO;
import io.dataease.api.ds.vo.DriveJarDTO;
import io.dataease.datasource.dao.auto.entity.CoreDriver;
import io.dataease.datasource.dao.auto.entity.CoreDriverJar;
import io.dataease.datasource.dao.auto.mapper.CoreDriverJarMapper;
import io.dataease.datasource.dao.auto.mapper.CoreDriverMapper;
import io.dataease.utils.BeanUtils;
import io.dataease.utils.FileUtils;
import io.dataease.utils.Md5Utils;
import jakarta.annotation.Resource;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.CollectionUtils;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.multipart.MultipartFile;
import java.io.File;
import java.io.FileOutputStream;
import java.util.*;
@Transactional(rollbackFor = Exception.class)
@RestController
@RequestMapping("/datasourceDriver")
public class DatasourceDriverServer implements DatasourceDriverApi {
private final static String DRIVER_PATH = "/opt/dataease/custom-drivers/";
@Resource
private CoreDriverMapper coreDriverMapper;
@Resource
private CoreDriverJarMapper coreDriverJarMapper;
@Override
public List<DatasourceDTO> query(String keyWord) {
return null;
}
@Override
public List<DriveDTO> list() {
List<DriveDTO> driveDTOS = new ArrayList<>();
List<CoreDriver> coreDrivers = coreDriverMapper.selectList(null);
coreDrivers.forEach(coreDriver -> {
DriveDTO datasourceDrive = new DriveDTO();
BeanUtils.copyBean(datasourceDrive, coreDriver);
datasourceDrive.setTypeDesc(""); //TODO 设置数据源类型desc
});
return driveDTOS;
}
@Override
public List<DriveDTO> listByDsType(String dsType) {
List<DriveDTO> driveDTOS = new ArrayList<>();
QueryWrapper<CoreDriver> queryWrapper = new QueryWrapper<>();
queryWrapper.eq("type", dsType);
List<CoreDriver> coreDrivers = coreDriverMapper.selectList(queryWrapper);
coreDrivers.forEach(coreDriver -> {
DriveDTO datasourceDrive = new DriveDTO();
BeanUtils.copyBean(datasourceDrive, coreDriver);
});
return driveDTOS;
}
@Override
public DriveDTO save(DriveDTO datasourceDrive){
CoreDriver coreDriver = new CoreDriver();
BeanUtils.copyBean(coreDriver, datasourceDrive);
coreDriverMapper.insert(coreDriver);
return datasourceDrive;
}
@Override
public DriveDTO update(DriveDTO datasourceDrive){
CoreDriver coreDriver = new CoreDriver();
BeanUtils.copyBean(coreDriver, datasourceDrive);
coreDriverMapper.updateById(coreDriver);
return datasourceDrive;
}
@Override
public void delete(String driverId){
coreDriverMapper.deleteById(driverId);
Map<String, Object> map = new HashMap<>();
map.put("deDriverId", driverId);
coreDriverJarMapper.deleteByMap(map);
}
@Override
public List<DriveJarDTO> listDriverJar(String driverId){
List<DriveJarDTO> driveJarDTOS = new ArrayList<>();
QueryWrapper<CoreDriverJar> queryWrapper = new QueryWrapper<>();
queryWrapper.eq("deDriverId", driverId);
coreDriverJarMapper.selectList(queryWrapper).forEach(coreDriverJar -> {
DriveJarDTO driveJarDTO = new DriveJarDTO();
BeanUtils.copyBean(driveJarDTO, coreDriverJar);
driveJarDTOS.add(driveJarDTO);
});
return driveJarDTOS;
}
@Override
public void deleteDriverJar(String jarId){
CoreDriverJar driverJar = coreDriverJarMapper.selectById(jarId);
coreDriverJarMapper.deleteById(jarId);
CoreDriver driver = coreDriverMapper.selectById(driverJar.getDeDriverId());
FileUtils.deleteFile(DRIVER_PATH + driverJar.getDeDriverId() + "/" + driverJar.getTransName());
//TODO 更新classloader
};
@Override
public DriveJarDTO uploadJar(@RequestParam("deDriverId") String deDriverId, @RequestParam("jarFile") MultipartFile jarFile) throws Exception{
CoreDriver coreDriver = coreDriverMapper.selectById(deDriverId);
if(coreDriver == null){
throw new RuntimeException("DRIVER_NOT_FOUND");
}
String filename = jarFile.getOriginalFilename();
if(!filename.endsWith(".jar")){
throw new RuntimeException("NOT_JAR");
}
QueryWrapper<CoreDriverJar> queryWrapper = new QueryWrapper<>();
queryWrapper.eq("fileName", filename);
if(!CollectionUtils.isEmpty(coreDriverJarMapper.selectList(queryWrapper))){
throw new Exception("A file with the same name already exists" + filename);
}
String dirPath = DRIVER_PATH + deDriverId + "/";
String filePath = dirPath + Md5Utils.md5(filename) + ".jar";
saveJarFile(jarFile, dirPath, filePath);
CoreDriverJar coreDriverJar = new CoreDriverJar();
coreDriverJar.setDeDriverId(deDriverId);
coreDriverJar.setVersion("");
coreDriverJar.setFileName(filename);
coreDriverJar.setDriverClass(String.join(",", new ArrayList<>()));
coreDriverJar.setIsTransName(true);
coreDriverJar.setTransName(Md5Utils.md5(filename) + ".jar");
coreDriverJarMapper.insert(coreDriverJar);
//TODO 并更新classloader
DriveJarDTO driveJarDTO = new DriveJarDTO();
BeanUtils.copyBean(driveJarDTO, coreDriverJar);
return driveJarDTO;
}
private String saveJarFile(MultipartFile file, String dirPath, String filePath) throws Exception {
File p = new File(dirPath);
if (!p.exists()) {
p.mkdirs();
}
File f = new File(filePath);
FileOutputStream fileOutputStream = new FileOutputStream(f);
fileOutputStream.write(file.getBytes());
fileOutputStream.flush();
fileOutputStream.close();
return filePath;
}
}

View File

@ -0,0 +1,917 @@
package io.dataease.datasource.server;
import cn.hutool.core.collection.CollectionUtil;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
import io.dataease.api.dataset.dto.DatasetTableDTO;
import io.dataease.api.dataset.dto.PreviewSqlDTO;
import io.dataease.api.ds.DatasourceApi;
import io.dataease.api.ds.vo.*;
import io.dataease.api.permissions.auth.api.InteractiveAuthApi;
import io.dataease.api.permissions.auth.dto.BusiResourceEditor;
import io.dataease.api.permissions.user.api.UserApi;
import io.dataease.api.permissions.user.vo.UserFormVO;
import io.dataease.commons.constants.TaskStatus;
import io.dataease.commons.utils.CommonThreadPool;
import io.dataease.constant.DataSourceType;
import io.dataease.dataset.dao.auto.entity.CoreDatasetTable;
import io.dataease.dataset.dao.auto.mapper.CoreDatasetTableMapper;
import io.dataease.dataset.dto.DatasourceSchemaDTO;
import io.dataease.dataset.manage.DatasetDataManage;
import io.dataease.dataset.utils.TableUtils;
import io.dataease.datasource.dao.auto.entity.*;
import io.dataease.datasource.dao.auto.mapper.CoreDatasourceMapper;
import io.dataease.datasource.dao.auto.mapper.CoreDsFinishPageMapper;
import io.dataease.datasource.dao.auto.mapper.QrtzSchedulerStateMapper;
import io.dataease.datasource.dao.ext.mapper.DataSourceExtMapper;
import io.dataease.datasource.dao.ext.mapper.TaskLogExtMapper;
import io.dataease.datasource.manage.DataSourceManage;
import io.dataease.datasource.manage.DatasourceSyncManage;
import io.dataease.datasource.provider.ApiUtils;
import io.dataease.datasource.provider.CalciteProvider;
import io.dataease.datasource.provider.ExcelUtils;
import io.dataease.datasource.request.DatasourceRequest;
import io.dataease.engine.constant.SQLConstants;
import io.dataease.exception.DEException;
import io.dataease.i18n.Translator;
import io.dataease.license.config.XpackInteract;
import io.dataease.model.BusiNodeRequest;
import io.dataease.model.BusiNodeVO;
import io.dataease.utils.*;
import jakarta.annotation.Resource;
import org.apache.commons.lang3.ObjectUtils;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.CollectionUtils;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.multipart.MultipartFile;
import java.util.*;
import java.util.stream.Collectors;
import static io.dataease.datasource.server.DatasourceTaskServer.ScheduleType.MANUAL;
import static io.dataease.datasource.server.DatasourceTaskServer.ScheduleType.RIGHTNOW;
@RestController
@RequestMapping("/datasource")
@Transactional
public class DatasourceServer implements DatasourceApi {
@Resource
private CoreDatasourceMapper datasourceMapper;
@Resource
private EngineServer engineServer;
@Resource
private DatasourceTaskServer datasourceTaskServer;
@Resource
private CalciteProvider calciteProvider;
@Resource
private DatasourceSyncManage datasourceSyncManage;
@Resource
private TaskLogExtMapper taskLogExtMapper;
private static final ObjectMapper objectMapper = new ObjectMapper();
@Resource
private DataSourceManage dataSourceManage;
@Resource
private QrtzSchedulerStateMapper qrtzSchedulerStateMapper;
@Resource
private DataSourceExtMapper dataSourceExtMapper;
@Resource
private CoreDsFinishPageMapper coreDsFinishPageMapper;
@Resource
private DatasetDataManage datasetDataManage;
@Autowired(required = false)
private UserApi userApi;
@Autowired(required = false)
private InteractiveAuthApi interactiveAuthApi;
@Override
public List<DatasourceDTO> query(String keyWord) {
return null;
}
public enum UpdateType {
all_scope, add_scope
}
@Resource
private CommonThreadPool commonThreadPool;
private boolean isUpdatingStatus = false;
public void move(DatasourceDTO dataSourceDTO) throws DEException {
switch (dataSourceDTO.getAction()) {
case "move" -> {
if (dataSourceDTO.getPid() == null) {
DEException.throwException("目录必选!");
}
if (Objects.equals(dataSourceDTO.getId(), dataSourceDTO.getPid())) {
DEException.throwException("pid can not equal to id.");
}
dataSourceManage.move(dataSourceDTO);
}
case "rename" -> {
CoreDatasource datasource = datasourceMapper.selectById(dataSourceDTO.getId());
datasource.setName(dataSourceDTO.getName());
dataSourceManage.innerEdit(datasource);
}
case "create" -> {
CoreDatasource coreDatasource = new CoreDatasource();
BeanUtils.copyBean(coreDatasource, dataSourceDTO);
coreDatasource.setCreateTime(System.currentTimeMillis());
coreDatasource.setUpdateTime(System.currentTimeMillis());
coreDatasource.setTaskStatus(TaskStatus.WaitingForExecution.name());
coreDatasource.setType(dataSourceDTO.getNodeType());
coreDatasource.setId(IDUtils.snowID());
coreDatasource.setConfiguration("");
dataSourceManage.innerSave(coreDatasource);
}
default -> {
}
}
}
private void filterDs(List<BusiNodeVO> busiNodeVOS, List<Long> ids, String type, Long id) {
for (BusiNodeVO busiNodeVO : busiNodeVOS) {
if (busiNodeVO.getType() != null && busiNodeVO.getType().equalsIgnoreCase(type)) {
if (id != null && !busiNodeVO.getId().equals(id)) {
ids.add(busiNodeVO.getId());
} else {
ids.add(busiNodeVO.getId());
}
}
if (CollectionUtil.isNotEmpty(busiNodeVO.getChildren())) {
filterDs(busiNodeVO.getChildren(), ids, type, id);
}
}
}
public boolean checkRepeat(@RequestBody DatasourceDTO dataSourceDTO) {
if (Arrays.asList("API", "Excel", "folder").contains(dataSourceDTO.getType())) {
return false;
}
BusiNodeRequest request = new BusiNodeRequest();
request.setBusiFlag("datasource");
List<BusiNodeVO> busiNodeVOS = dataSourceManage.tree(request);
List<Long> ids = new ArrayList<>();
filterDs(busiNodeVOS, ids, dataSourceDTO.getType(), dataSourceDTO.getId());
if(CollectionUtil.isEmpty(ids)){
return false;
}
QueryWrapper<CoreDatasource> wrapper = new QueryWrapper<>();
wrapper.in("id", ids);
List<CoreDatasource> datasources = datasourceMapper.selectList(wrapper);
if (CollectionUtil.isEmpty(datasources)) {
return false;
}
dataSourceDTO.setConfiguration(new String(Base64.getDecoder().decode(dataSourceDTO.getConfiguration())));
DatasourceConfiguration configuration = JsonUtil.parseObject(dataSourceDTO.getConfiguration(), DatasourceConfiguration.class);
boolean hasRepeat = false;
for (CoreDatasource datasource : datasources) {
if (Arrays.asList("API", "Excel", "folder").contains(datasource.getType())) {
continue;
}
DatasourceConfiguration compare = JsonUtil.parseObject(datasource.getConfiguration(), DatasourceConfiguration.class);
switch (dataSourceDTO.getType()) {
case "sqlServer":
case "db2":
case "oracle":
case "pg":
case "redshift":
if (configuration.getHost().equalsIgnoreCase(compare.getHost()) && Objects.equals(configuration.getPort(), compare.getPort()) && configuration.getDataBase().equalsIgnoreCase(compare.getDataBase()) && configuration.getSchema().equalsIgnoreCase(compare.getSchema())) {
hasRepeat = true;
} else {
hasRepeat = false;
}
break;
default:
if (configuration.getHost().equalsIgnoreCase(compare.getHost()) && Objects.equals(configuration.getPort(), compare.getPort()) && configuration.getDataBase().equalsIgnoreCase(compare.getDataBase())) {
hasRepeat = true;
} else {
hasRepeat = false;
}
break;
}
}
return hasRepeat;
}
@Override
public DatasourceDTO save(DatasourceDTO dataSourceDTO) throws DEException {
if (StringUtils.isNotEmpty(dataSourceDTO.getAction())) {
move(dataSourceDTO);
return dataSourceDTO;
}
if (StringUtils.isNotEmpty(dataSourceDTO.getNodeType()) && dataSourceDTO.getNodeType().equalsIgnoreCase("folder")) {
dataSourceDTO.setType("folder");
dataSourceDTO.setConfiguration("");
}
if (dataSourceDTO.getId() != null && dataSourceDTO.getId() > 0) {
return update(dataSourceDTO);
}
if (StringUtils.isNotEmpty(dataSourceDTO.getConfiguration())) {
dataSourceDTO.setConfiguration(new String(Base64.getDecoder().decode(dataSourceDTO.getConfiguration())));
}
preCheckDs(dataSourceDTO);
dataSourceDTO.setId(IDUtils.snowID());
CoreDatasource coreDatasource = new CoreDatasource();
BeanUtils.copyBean(coreDatasource, dataSourceDTO);
coreDatasource.setCreateTime(System.currentTimeMillis());
coreDatasource.setUpdateTime(System.currentTimeMillis());
try {
checkDatasourceStatus(coreDatasource);
} catch (Exception ignore) {
}
coreDatasource.setTaskStatus(TaskStatus.WaitingForExecution.name());
coreDatasource.setCreateBy(AuthUtils.getUser().getUserId().toString());
coreDatasource.setUpdateBy(AuthUtils.getUser().getUserId());
dataSourceManage.innerSave(coreDatasource);
if (dataSourceDTO.getType().equals(DatasourceConfiguration.DatasourceType.Excel.name())) {
DatasourceRequest datasourceRequest = new DatasourceRequest();
datasourceRequest.setDatasource(coreDatasource);
List<DatasetTableDTO> tables = ExcelUtils.getTables(datasourceRequest);
for (DatasetTableDTO table : tables) {
datasourceRequest.setTable(table.getTableName());
List<TableField> tableFields = ExcelUtils.getTableFields(datasourceRequest);
try {
datasourceSyncManage.createEngineTable(datasourceRequest.getTable(), tableFields);
} catch (Exception e) {
DEException.throwException("Failed to create table " + datasourceRequest.getTable());
}
}
datasourceSyncManage.extractExcelData(coreDatasource, "all_scope");
} else if (dataSourceDTO.getType().equals(DatasourceConfiguration.DatasourceType.API.name())) {
CoreDatasourceTask coreDatasourceTask = new CoreDatasourceTask();
BeanUtils.copyBean(coreDatasourceTask, dataSourceDTO.getSyncSetting());
coreDatasourceTask.setName(coreDatasource.getName() + "-task");
coreDatasourceTask.setDsId(coreDatasource.getId());
if (coreDatasourceTask.getStartTime() == null) {
coreDatasourceTask.setStartTime(System.currentTimeMillis() - 20 * 1000);
}
if (StringUtils.equalsIgnoreCase(coreDatasourceTask.getSyncRate(), RIGHTNOW.toString())) {
coreDatasourceTask.setCron(null);
} else {
if (StringUtils.equalsIgnoreCase(coreDatasourceTask.getEndLimit(), "1") && coreDatasourceTask.getStartTime() > coreDatasourceTask.getEndTime()) {
DEException.throwException("结束时间不能小于开始时间!");
}
}
coreDatasourceTask.setTaskStatus(TaskStatus.WaitingForExecution.name());
datasourceTaskServer.insert(coreDatasourceTask);
datasourceSyncManage.addSchedule(coreDatasourceTask);
DatasourceRequest datasourceRequest = new DatasourceRequest();
datasourceRequest.setDatasource(coreDatasource);
List<DatasetTableDTO> tables = ApiUtils.getTables(datasourceRequest);
checkName(tables.stream().map(DatasetTableDTO::getName).collect(Collectors.toList()));
for (DatasetTableDTO api : tables) {
datasourceRequest.setTable(api.getTableName());
List<TableField> tableFields = ApiUtils.getTableFields(datasourceRequest);
try {
datasourceSyncManage.createEngineTable(datasourceRequest.getTable(), tableFields);
} catch (Exception e) {
DEException.throwException("Failed to create table " + datasourceRequest.getTable() + ": " + e.getMessage());
}
}
} else {
calciteProvider.update(dataSourceDTO);
}
return dataSourceDTO;
}
private static void checkName(List<String> tables) {
for (int i = 0; i < tables.size() - 1; i++) {
for (int j = i + 1; j < tables.size(); j++) {
if (tables.get(i).equalsIgnoreCase(tables.get(j))) {
DEException.throwException(Translator.get("i18n_table_name_repeat") + tables.get(i));
}
}
}
}
public DatasourceDTO update(DatasourceDTO dataSourceDTO) throws DEException {
Long pk = null;
if (ObjectUtils.isEmpty(pk = dataSourceDTO.getId())) {
return save(dataSourceDTO);
}
CoreDatasource sourceData = datasourceMapper.selectById(pk);
dataSourceDTO.setConfiguration(new String(Base64.getDecoder().decode(dataSourceDTO.getConfiguration())));
dataSourceDTO.setPid(sourceData.getPid());
preCheckDs(dataSourceDTO);
CoreDatasource requestDatasource = new CoreDatasource();
BeanUtils.copyBean(requestDatasource, dataSourceDTO);
requestDatasource.setUpdateTime(System.currentTimeMillis());
requestDatasource.setUpdateBy(AuthUtils.getUser().getUserId());
try {
checkDatasourceStatus(requestDatasource);
} catch (Exception ignore) {
}
DatasourceRequest sourceTableRequest = new DatasourceRequest();
sourceTableRequest.setDatasource(sourceData);
DatasourceRequest datasourceRequest = new DatasourceRequest();
datasourceRequest.setDatasource(requestDatasource);
List<String> toCreateTables = new ArrayList<>();
List<String> toDeleteTables = new ArrayList<>();
if (dataSourceDTO.getType().equals(DatasourceConfiguration.DatasourceType.API.name())) {
List<String> sourceTables = ApiUtils.getTables(sourceTableRequest).stream().map(DatasetTableDTO::getTableName).collect(Collectors.toList());
List<DatasetTableDTO> datasetTableDTOS = ApiUtils.getTables(datasourceRequest);
List<String> tables = datasetTableDTOS.stream().map(DatasetTableDTO::getTableName).collect(Collectors.toList());
checkName(datasetTableDTOS.stream().map(DatasetTableDTO::getName).collect(Collectors.toList()));
toCreateTables = tables.stream().filter(table -> !sourceTables.contains(table)).collect(Collectors.toList());
toDeleteTables = sourceTables.stream().filter(table -> !tables.contains(table)).collect(Collectors.toList());
for (String table : tables) {
for (String sourceTable : sourceTables) {
if (table.equals(sourceTable)) {
datasourceRequest.setTable(table);
List<String> tableFields = ApiUtils.getTableFields(datasourceRequest).stream().map(TableField::getName).sorted().collect(Collectors.toList());
sourceTableRequest.setTable(sourceTable);
List<String> sourceTableFields = ApiUtils.getTableFields(sourceTableRequest).stream().map(TableField::getName).sorted().collect(Collectors.toList());
if (!String.join(",", tableFields).equals(String.join(",", sourceTableFields))) {
toDeleteTables.add(table);
toCreateTables.add(table);
}
}
}
}
CoreDatasourceTask coreDatasourceTask = new CoreDatasourceTask();
BeanUtils.copyBean(coreDatasourceTask, dataSourceDTO.getSyncSetting());
coreDatasourceTask.setName(requestDatasource.getName() + "-task");
coreDatasourceTask.setDsId(requestDatasource.getId());
if (StringUtils.equalsIgnoreCase(coreDatasourceTask.getSyncRate(), RIGHTNOW.toString())) {
coreDatasourceTask.setStartTime(System.currentTimeMillis() - 20 * 1000);
coreDatasourceTask.setCron(null);
} else {
if (StringUtils.equalsIgnoreCase(coreDatasourceTask.getEndLimit(), "1") && coreDatasourceTask.getStartTime() > coreDatasourceTask.getEndTime()) {
DEException.throwException("结束时间不能小于开始时间!");
}
}
coreDatasourceTask.setTaskStatus(TaskStatus.WaitingForExecution.toString());
datasourceTaskServer.update(coreDatasourceTask);
for (String deleteTable : toDeleteTables) {
try {
datasourceSyncManage.dropEngineTable(deleteTable);
} catch (Exception e) {
DEException.throwException("Failed to drop table " + deleteTable);
}
}
for (String toCreateTable : toCreateTables) {
datasourceRequest.setTable(toCreateTable);
try {
datasourceSyncManage.createEngineTable(toCreateTable, ApiUtils.getTableFields(datasourceRequest));
} catch (Exception e) {
DEException.throwException("Failed to create table " + toCreateTable);
}
}
datasourceSyncManage.deleteSchedule(datasourceTaskServer.selectByDSId(dataSourceDTO.getId()));
datasourceSyncManage.addSchedule(coreDatasourceTask);
dataSourceManage.innerEdit(requestDatasource);
} else if (dataSourceDTO.getType().equals(DatasourceConfiguration.DatasourceType.Excel.name())) {
List<String> sourceTables = ExcelUtils.getTables(sourceTableRequest).stream().map(DatasetTableDTO::getTableName).collect(Collectors.toList());
List<String> tables = ExcelUtils.getTables(datasourceRequest).stream().map(DatasetTableDTO::getTableName).collect(Collectors.toList());
if (dataSourceDTO.getEditType() == 0) {
toCreateTables = tables;
toDeleteTables = sourceTables;
for (String deleteTable : toDeleteTables) {
try {
datasourceSyncManage.dropEngineTable(deleteTable);
} catch (Exception e) {
DEException.throwException("Failed to drop table " + deleteTable);
}
}
for (String toCreateTable : toCreateTables) {
datasourceRequest.setTable(toCreateTable);
try {
datasourceSyncManage.createEngineTable(toCreateTable, ExcelUtils.getTableFields(datasourceRequest));
} catch (Exception e) {
DEException.throwException("Failed to create table " + toCreateTable);
}
}
datasourceSyncManage.extractExcelData(requestDatasource, "all_scope");
dataSourceManage.innerEdit(requestDatasource);
} else {
datasourceSyncManage.extractExcelData(requestDatasource, "add_scope");
dataSourceManage.innerEdit(requestDatasource);
}
} else {
dataSourceManage.innerEdit(requestDatasource);
calciteProvider.update(dataSourceDTO);
}
return dataSourceDTO;
}
private String excelDataTableName(String name) {
return StringUtils.substring(name, 6, name.length() - 11);
}
@Override
public List<DatasourceConfiguration.DatasourceType> datasourceTypes() {
return Arrays.asList(DatasourceConfiguration.DatasourceType.values());
}
@Override
public DatasourceDTO validate(DatasourceDTO dataSourceDTO) throws DEException {
dataSourceDTO.setConfiguration(new String(Base64.getDecoder().decode(dataSourceDTO.getConfiguration())));
CoreDatasource coreDatasource = new CoreDatasource();
BeanUtils.copyBean(coreDatasource, dataSourceDTO);
checkDatasourceStatus(coreDatasource);
dataSourceDTO.setStatus(coreDatasource.getStatus());
return dataSourceDTO;
}
@Override
public List<String> getSchema(DatasourceDTO dataSourceDTO) throws DEException {
dataSourceDTO.setConfiguration(new String(Base64.getDecoder().decode(dataSourceDTO.getConfiguration())));
CoreDatasource coreDatasource = new CoreDatasource();
BeanUtils.copyBean(coreDatasource, dataSourceDTO);
DatasourceRequest datasourceRequest = new DatasourceRequest();
datasourceRequest.setDatasource(coreDatasource);
return calciteProvider.getSchema(datasourceRequest);
}
@Override
public DatasourceDTO get(Long datasourceId) throws DEException {
DatasourceDTO datasourceDTO = new DatasourceDTO();
CoreDatasource datasource = datasourceMapper.selectById(datasourceId);
BeanUtils.copyBean(datasourceDTO, datasource);
TypeReference<List<ApiDefinition>> listTypeReference = new TypeReference<List<ApiDefinition>>() {
};
if (datasourceDTO.getType().equalsIgnoreCase(DatasourceConfiguration.DatasourceType.API.toString())) {
List<ApiDefinition> apiDefinitionList = JsonUtil.parseList(datasourceDTO.getConfiguration(), listTypeReference);
List<ApiDefinition> apiDefinitionListWithStatus = new ArrayList<>();
int success = 0;
for (ApiDefinition apiDefinition : apiDefinitionList) {
String status = null;
if (StringUtils.isNotEmpty(datasourceDTO.getStatus())) {
JsonNode jsonNode = null;
try {
jsonNode = objectMapper.readTree(datasourceDTO.getStatus());
} catch (Exception e) {
DEException.throwException(e);
}
for (JsonNode node : jsonNode) {
if (node.get("name").asText().equals(apiDefinition.getName())) {
status = node.get("status").asText();
}
}
apiDefinition.setStatus(status);
}
if (StringUtils.isNotEmpty(status) && status.equalsIgnoreCase("Success")) {
success++;
}
CoreDatasourceTaskLog log = datasourceTaskServer.lastSyncLogForTable(datasourceId, apiDefinition.getDeTableName());
if (log != null) {
apiDefinition.setUpdateTime(log.getStartTime());
}
apiDefinitionListWithStatus.add(apiDefinition);
}
datasourceDTO.setApiConfigurationStr(new String(Base64.getEncoder().encode(Objects.requireNonNull(JsonUtil.toJSONString(apiDefinitionListWithStatus)).toString().getBytes())));
if (success == apiDefinitionList.size()) {
datasourceDTO.setStatus("Success");
} else {
if (success > 0 && success < apiDefinitionList.size()) {
datasourceDTO.setStatus("Warning");
} else {
datasourceDTO.setStatus("Error");
}
}
CoreDatasourceTask coreDatasourceTask = datasourceTaskServer.selectByDSId(datasourceDTO.getId());
TaskDTO taskDTO = new TaskDTO();
BeanUtils.copyBean(taskDTO, coreDatasourceTask);
datasourceDTO.setSyncSetting(taskDTO);
CoreDatasourceTask task = datasourceTaskServer.selectByDSId(datasourceDTO.getId());
if (task != null) {
datasourceDTO.setLastSyncTime(task.getStartTime());
}
}
if (datasourceDTO.getType().equalsIgnoreCase(DatasourceConfiguration.DatasourceType.Excel.toString())) {
datasourceDTO.setFileName(ExcelUtils.getFileName(datasource));
datasourceDTO.setSize(ExcelUtils.getSize(datasource));
}
datasourceDTO.setConfiguration(new String(Base64.getEncoder().encode(datasourceDTO.getConfiguration().getBytes())));
if (userApi != null) {
UserFormVO userFormVO = userApi.queryById(Long.valueOf(datasourceDTO.getCreateBy()));
if (userFormVO != null) {
datasourceDTO.setCreator(userFormVO.getName());
}
}
return datasourceDTO;
}
@Override
@XpackInteract(value = "datasourceResourceTree", before = false)
public void delete(Long datasourceId) throws DEException {
Objects.requireNonNull(CommonBeanFactory.getBean(DatasourceServer.class)).recursionDel(datasourceId);
}
public void recursionDel(Long datasourceId) throws DEException {
CoreDatasource coreDatasource = datasourceMapper.selectById(datasourceId);
if (ObjectUtils.isEmpty(coreDatasource)) {
return;
}
if (coreDatasource.getType().equals(DatasourceConfiguration.DatasourceType.Excel.name())) {
DatasourceRequest datasourceRequest = new DatasourceRequest();
datasourceRequest.setDatasource(coreDatasource);
List<DatasetTableDTO> tables = ExcelUtils.getTables(datasourceRequest);
for (DatasetTableDTO table : tables) {
datasourceRequest.setTable(table.getTableName());
try {
datasourceSyncManage.dropEngineTable(datasourceRequest.getTable());
} catch (Exception e) {
DEException.throwException("Failed to drop table " + datasourceRequest.getTable());
}
}
}
if (coreDatasource.getType().equals(DatasourceConfiguration.DatasourceType.API.name())) {
DatasourceRequest datasourceRequest = new DatasourceRequest();
datasourceRequest.setDatasource(coreDatasource);
List<DatasetTableDTO> tables = ApiUtils.getTables(datasourceRequest);
for (DatasetTableDTO api : tables) {
datasourceRequest.setTable(api.getTableName());
try {
datasourceSyncManage.dropEngineTable(datasourceRequest.getTable());
} catch (Exception e) {
DEException.throwException("Failed to drop table " + datasourceRequest.getTable());
}
}
datasourceTaskServer.deleteByDSId(datasourceId);
}
datasourceMapper.deleteById(datasourceId);
if (!Arrays.asList("API", "Excel", "folder").contains(coreDatasource.getType())) {
calciteProvider.delete(coreDatasource);
}
if (coreDatasource.getType().equals(DatasourceConfiguration.DatasourceType.folder.name())) {
QueryWrapper<CoreDatasource> wrapper = new QueryWrapper<>();
wrapper.eq("pid", datasourceId);
List<CoreDatasource> coreDatasources = datasourceMapper.selectList(wrapper);
if (ObjectUtils.isNotEmpty(coreDatasources)) {
for (CoreDatasource record : coreDatasources) {
delete(record.getId());
}
}
}
}
@Override
public DatasourceDTO validate(Long datasourceId) throws DEException {
CoreDatasource coreDatasource = datasourceMapper.selectById(datasourceId);
return validate(coreDatasource);
}
private DatasourceDTO validate(CoreDatasource coreDatasource) {
checkDatasourceStatus(coreDatasource);
DatasourceDTO datasourceDTO = new DatasourceDTO();
BeanUtils.copyBean(datasourceDTO, coreDatasource);
CoreDatasource record = new CoreDatasource();
record.setStatus(coreDatasource.getStatus());
QueryWrapper<CoreDatasource> wrapper = new QueryWrapper<>();
wrapper.eq("id", coreDatasource.getId());
datasourceMapper.update(record, wrapper);
if (interactiveAuthApi != null) {
BusiResourceEditor editor = new BusiResourceEditor();
editor.setId((long) coreDatasource.getId());
editor.setName(coreDatasource.getName());
editor.setExtraFlag(getExtraFlag(coreDatasource.getType(), coreDatasource.getStatus()));
interactiveAuthApi.editResource(editor);
}
return datasourceDTO;
}
private int getExtraFlag(Object typeObj, Object statusObj) {
if (ObjectUtils.isNotEmpty(statusObj)) {
String status = statusObj.toString();
if (typeObj.toString().equalsIgnoreCase("API")) {
TypeReference<List<ObjectNode>> listTypeReference = new TypeReference<List<ObjectNode>>() {
};
List<ObjectNode> apiStatus = JsonUtil.parseList(status, listTypeReference);
boolean hasError = false;
for (ObjectNode jsonNodes : apiStatus) {
if (jsonNodes.get("status") != null && jsonNodes.get("status").asText().equalsIgnoreCase("Error")) {
hasError = true;
break;
}
}
if (hasError) {
return -DataSourceType.valueOf(typeObj.toString()).getFlag();
} else {
return DataSourceType.valueOf(typeObj.toString()).getFlag();
}
} else {
if (StringUtils.equalsIgnoreCase(status, "Error")) {
return -DataSourceType.valueOf(typeObj.toString()).getFlag();
} else {
return DataSourceType.valueOf(typeObj.toString()).getFlag();
}
}
} else {
return DataSourceType.valueOf(typeObj.toString()).getFlag();
}
}
@Override
public List<BusiNodeVO> tree(BusiNodeRequest request) throws DEException {
return dataSourceManage.tree(request);
}
@Override
public List<DatasetTableDTO> getTables(DatasetTableDTO datasetTableDTO) throws DEException {
CoreDatasource coreDatasource = datasourceMapper.selectById(datasetTableDTO.getDatasourceId());
DatasourceRequest datasourceRequest = new DatasourceRequest();
datasourceRequest.setDatasource(coreDatasource);
if (coreDatasource.getType().equals("API")) {
List<DatasetTableDTO> datasetTableDTOS = ApiUtils.getTables(datasourceRequest);
datasetTableDTOS.forEach(datasetTableDTO1 -> {
CoreDatasourceTaskLog log = datasourceTaskServer.lastSyncLogForTable(datasetTableDTO.getDatasourceId(), datasetTableDTO1.getTableName());
if (log != null) {
datasetTableDTO1.setLastUpdateTime(log.getStartTime());
datasetTableDTO1.setStatus(log.getTaskStatus());
}
});
return datasetTableDTOS;
}
if (coreDatasource.getType().equals("Excel")) {
return ExcelUtils.getTables(datasourceRequest);
}
return calciteProvider.getTables(datasourceRequest);
}
@Override
public List<TableField> getTableField(Map<String, String> req) throws DEException {
String tableName = req.get("tableName");
String datasourceId = req.get("datasourceId");
CoreDatasource coreDatasource = datasourceMapper.selectById(datasourceId);
DatasourceRequest datasourceRequest = new DatasourceRequest();
datasourceRequest.setDatasource(coreDatasource);
if (coreDatasource.getType().equals("API") || coreDatasource.getType().equals("Excel")) {
datasourceRequest.setDatasource(engineServer.getDeEngine());
DatasourceSchemaDTO datasourceSchemaDTO = new DatasourceSchemaDTO();
BeanUtils.copyBean(datasourceSchemaDTO, engineServer.getDeEngine());
datasourceSchemaDTO.setSchemaAlias(String.format(SQLConstants.SCHEMA, datasourceSchemaDTO.getId()));
datasourceRequest.setDsList(Map.of(datasourceSchemaDTO.getId(), datasourceSchemaDTO));
datasourceRequest.setQuery(TableUtils.tableName2Sql(datasourceSchemaDTO, tableName) + " LIMIT 0 OFFSET 0");
List<TableField> tableFields = (List<TableField>) calciteProvider.fetchResultField(datasourceRequest).get("fields");
return tableFields.stream().filter(tableField -> {
return !tableField.getOriginName().equalsIgnoreCase("dataease_uuid");
}).collect(Collectors.toList());
}
DatasourceSchemaDTO datasourceSchemaDTO = new DatasourceSchemaDTO();
BeanUtils.copyBean(datasourceSchemaDTO, coreDatasource);
datasourceSchemaDTO.setSchemaAlias(String.format(SQLConstants.SCHEMA, datasourceSchemaDTO.getId()));
datasourceRequest.setDsList(Map.of(datasourceSchemaDTO.getId(), datasourceSchemaDTO));
datasourceRequest.setQuery(TableUtils.tableName2Sql(datasourceSchemaDTO, tableName) + " LIMIT 0 OFFSET 0");
return (List<TableField>) calciteProvider.fetchResultField(datasourceRequest).get("fields");
}
@Override
public void syncApiTable(Map<String, String> req) throws DEException {
String tableName = req.get("tableName");
String name = req.get("name");
Long datasourceId = Long.valueOf(req.get("datasourceId"));
datasourceSyncManage.extractDataForTable(datasourceId, name, tableName, datasourceTaskServer.selectByDSId(datasourceId).getUpdateType());
}
@Override
public void syncApiDs(Map<String, String> req) throws Exception {
Long datasourceId = Long.valueOf(req.get("datasourceId"));
CoreDatasourceTask coreDatasourceTask = datasourceTaskServer.selectByDSId(datasourceId);
CoreDatasource coreDatasource = datasourceMapper.selectById(datasourceId);
DatasourceServer.UpdateType updateType = DatasourceServer.UpdateType.valueOf(coreDatasourceTask.getUpdateType());
datasourceSyncManage.extractedData(null, coreDatasource, updateType, MANUAL.toString());
}
public ExcelFileData excelUpload(@RequestParam("file") MultipartFile file, @RequestParam("id") long datasourceId, @RequestParam("editType") Integer editType) throws DEException {
ExcelUtils excelUtils = new ExcelUtils();
ExcelFileData excelFileData = excelUtils.excelSaveAndParse(file);
if (editType == 1 || editType == 0) { //按照excel sheet 名称匹配
CoreDatasource coreDatasource = datasourceMapper.selectById(datasourceId);
if (coreDatasource != null) {
DatasourceRequest datasourceRequest = new DatasourceRequest();
datasourceRequest.setDatasource(coreDatasource);
List<DatasetTableDTO> datasetTableDTOS = ExcelUtils.getTables(datasourceRequest);
List<ExcelSheetData> excelSheetDataList = new ArrayList<>();
for (ExcelSheetData sheet : excelFileData.getSheets()) {
for (DatasetTableDTO datasetTableDTO : datasetTableDTOS) {
if (excelDataTableName(datasetTableDTO.getTableName()).equals(sheet.getTableName()) || isCsv(file.getOriginalFilename())) {
List<String> fieldNames = sheet.getFields().stream().map(TableField::getName).collect(Collectors.toList());
List<String> fieldTypes = sheet.getFields().stream().map(TableField::getFieldType).collect(Collectors.toList());
Collections.sort(fieldNames);
Collections.sort(fieldTypes);
datasourceRequest.setTable(datasetTableDTO.getTableName());
List<String> oldFieldNames = ExcelUtils.getTableFields(datasourceRequest).stream().map(TableField::getName).collect(Collectors.toList());
List<String> oldFieldTypes = ExcelUtils.getTableFields(datasourceRequest).stream().map(TableField::getFieldType).collect(Collectors.toList());
Collections.sort(oldFieldNames);
Collections.sort(oldFieldTypes);
if (fieldNames.equals(oldFieldNames) && fieldTypes.equals(oldFieldTypes)) {
sheet.setDeTableName(datasetTableDTO.getTableName());
excelSheetDataList.add(sheet);
}
}
}
}
if (CollectionUtils.isEmpty(excelSheetDataList)) {
DEException.throwException("上传文件与源文件不一致,请检查文件!");
}
excelFileData.setSheets(excelSheetDataList);
}
}
for (ExcelSheetData sheet : excelFileData.getSheets()) {
for (int i = 0; i < sheet.getFields().size() - 1; i++) {
for (int j = i + 1; j < sheet.getFields().size(); j++) {
if (sheet.getFields().get(i).getName().equalsIgnoreCase(sheet.getFields().get(j).getName())) {
DEException.throwException(sheet.getExcelLabel() + Translator.get("i18n_field_name_repeat") + sheet.getFields().get(i).getName());
}
}
}
}
return excelFileData;
}
private boolean isCsv(String fileName) {
String suffix = fileName.substring(fileName.lastIndexOf(".") + 1);
return suffix.equalsIgnoreCase("csv");
}
public ApiDefinition checkApiDatasource(Map<String, String> request) throws DEException {
ApiDefinition apiDefinition = JsonUtil.parseObject(new String(java.util.Base64.getDecoder().decode(request.get("data"))), ApiDefinition.class);
String response = ApiUtils.execHttpRequest(apiDefinition, 10);
if (request.keySet().contains("type") && request.get("type").equals("apiStructure")) {
apiDefinition.setShowApiStructure(true);
}
ApiUtils.checkApiDefinition(apiDefinition, response);
if (apiDefinition.getRequest().getAuthManager() != null && StringUtils.isNotBlank(apiDefinition.getRequest().getAuthManager().getUsername()) && StringUtils.isNotBlank(apiDefinition.getRequest().getAuthManager().getPassword()) && apiDefinition.getRequest().getAuthManager().getVerification().equals("Basic Auth")) {
apiDefinition.getRequest().getAuthManager().setUsername(new String(Base64.getEncoder().encode(apiDefinition.getRequest().getAuthManager().getUsername().getBytes())));
apiDefinition.getRequest().getAuthManager().setPassword(new String(Base64.getEncoder().encode(apiDefinition.getRequest().getAuthManager().getPassword().getBytes())));
}
return apiDefinition;
}
private void preCheckDs(DatasourceDTO datasource) throws DEException {
if (!datasourceTypes().stream().map(DatasourceConfiguration.DatasourceType::getType).toList().contains(datasource.getType())) {
DEException.throwException("Datasource type not supported.");
}
}
public void checkDatasourceStatus(CoreDatasource coreDatasource) throws DEException {
if (coreDatasource.getType().equals(DatasourceConfiguration.DatasourceType.Excel.name()) || coreDatasource.getType().equals(DatasourceConfiguration.DatasourceType.folder.name())) {
return;
}
try {
DatasourceRequest datasourceRequest = new DatasourceRequest();
datasourceRequest.setDatasource(coreDatasource);
String status = null;
if (coreDatasource.getType().equals("API")) {
status = ApiUtils.checkStatus(datasourceRequest);
} else {
status = calciteProvider.checkStatus(datasourceRequest);
}
coreDatasource.setStatus(status);
} catch (Exception e) {
coreDatasource.setStatus("Error");
DEException.throwException("校验失败: " + e.getMessage());
}
}
public void updateDemoDs() {
}
@Override
public Map<String, Object> previewDataWithLimit(Map<String, Object> req) {
String tableName = req.get("table").toString();
Long id = Long.valueOf(req.get("id").toString());
if (ObjectUtils.isEmpty(tableName) || ObjectUtils.isEmpty(id)) {
return null;
}
String sql = "SELECT * FROM `" + tableName + "`";
sql = new String(Base64.getEncoder().encode(sql.getBytes()));
PreviewSqlDTO previewSqlDTO = new PreviewSqlDTO();
previewSqlDTO.setSql(sql);
previewSqlDTO.setDatasourceId(id);
return datasetDataManage.previewSql(previewSqlDTO);
}
@Override
public List<String> latestUse() {
List<String> types = new ArrayList<>();
QueryWrapper<CoreDatasource> queryWrapper = new QueryWrapper<>();
queryWrapper.eq("create_by", AuthUtils.getUser().getUserId());
queryWrapper.orderByDesc("create_time");
queryWrapper.last(" limit 5");
List<CoreDatasource> coreDatasources = datasourceMapper.selectList(queryWrapper);
if (CollectionUtils.isEmpty(coreDatasources)) {
return types;
}
for (CoreDatasource coreDatasource : coreDatasources) {
if (!coreDatasource.getType().equalsIgnoreCase("folder") && !types.contains(coreDatasource.getType())) {
types.add(coreDatasource.getType());
}
}
return types;
}
public IPage<CoreDatasourceTaskLogDTO> listSyncRecord(int goPage, int pageSize, Long dsId) {
QueryWrapper<CoreDatasourceTaskLogDTO> wrapper = new QueryWrapper<>();
wrapper.eq("ds_id", dsId);
wrapper.orderByDesc("start_time");
Page<CoreDatasourceTaskLogDTO> page = new Page<>(goPage, pageSize);
IPage<CoreDatasourceTaskLogDTO> pager = taskLogExtMapper.pager(page, wrapper);
return pager;
}
public void updateDatasourceStatus() {
QueryWrapper<CoreDatasource> wrapper = new QueryWrapper<>();
wrapper.notIn("type", Arrays.asList("Excel", "folder"));
List<CoreDatasource> datasources = datasourceMapper.selectList(wrapper);
datasources.forEach(datasource -> {
commonThreadPool.addTask(() -> {
try {
validate(datasource);
} catch (Exception e) {
e.printStackTrace();
}
});
});
}
public void updateStopJobStatus() {
if (this.isUpdatingStatus) {
return;
} else {
this.isUpdatingStatus = true;
}
try {
doUpdate();
} catch (Exception e) {
e.printStackTrace();
} finally {
this.isUpdatingStatus = false;
}
}
private void doUpdate() {
List<QrtzSchedulerState> qrtzSchedulerStates = qrtzSchedulerStateMapper.selectList(null);
List<String> activeQrtzInstances = qrtzSchedulerStates.stream().filter(qrtzSchedulerState -> qrtzSchedulerState.getLastCheckinTime() + qrtzSchedulerState.getCheckinInterval() + 1000 > dataSourceExtMapper.selectTimestamp().getCurrentTimestamp() * 1000).map(QrtzSchedulerState::getInstanceName).collect(Collectors.toList());
QueryWrapper<CoreDatasource> queryWrapper = new QueryWrapper<>();
queryWrapper.eq("task_status", TaskStatus.UnderExecution.name());
List<CoreDatasource> datasources = datasourceMapper.selectList(queryWrapper);
List<CoreDatasource> syncCoreDatasources = new ArrayList<>();
List<CoreDatasource> jobStoppedCoreDatasources = new ArrayList<>();
datasources.forEach(coreDatasource -> {
if (StringUtils.isNotEmpty(coreDatasource.getQrtzInstance()) && !activeQrtzInstances.contains(coreDatasource.getQrtzInstance().substring(0, coreDatasource.getQrtzInstance().length() - 13))) {
jobStoppedCoreDatasources.add(coreDatasource);
} else {
syncCoreDatasources.add(coreDatasource);
}
});
if (CollectionUtils.isEmpty(jobStoppedCoreDatasources)) {
return;
}
queryWrapper.clear();
queryWrapper.in("id", jobStoppedCoreDatasources.stream().map(CoreDatasource::getId).collect(Collectors.toList()));
CoreDatasource record = new CoreDatasource();
record.setTaskStatus(TaskStatus.WaitingForExecution.name());
datasourceMapper.update(record, queryWrapper);
//Task
datasourceTaskServer.updateByDsIds(jobStoppedCoreDatasources.stream().map(CoreDatasource::getId).collect(Collectors.toList()));
}
public boolean showFinishPage() throws DEException {
return coreDsFinishPageMapper.selectById(AuthUtils.getUser().getUserId()) == null;
}
public void setShowFinishPage() throws DEException {
CoreDsFinishPage coreDsFinishPage = new CoreDsFinishPage();
coreDsFinishPage.setId(AuthUtils.getUser().getUserId());
coreDsFinishPageMapper.insert(coreDsFinishPage);
}
}

View File

@ -0,0 +1,197 @@
package io.dataease.datasource.server;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.core.conditions.update.UpdateWrapper;
import io.dataease.commons.constants.TaskStatus;
import io.dataease.datasource.dao.auto.entity.CoreDatasource;
import io.dataease.datasource.dao.auto.entity.CoreDatasourceTask;
import io.dataease.datasource.dao.auto.entity.CoreDatasourceTaskLog;
import io.dataease.datasource.dao.auto.mapper.CoreDatasourceMapper;
import io.dataease.datasource.dao.auto.mapper.CoreDatasourceTaskLogMapper;
import io.dataease.datasource.dao.auto.mapper.CoreDatasourceTaskMapper;
import io.dataease.datasource.dto.CoreDatasourceTaskDTO;
import io.dataease.datasource.dao.ext.mapper.ExtDatasourceTaskMapper;
import io.dataease.datasource.manage.DatasourceSyncManage;
import io.dataease.utils.IDUtils;
import jakarta.annotation.Resource;
import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
import java.util.ArrayList;
import java.util.List;
@Component
public class DatasourceTaskServer {
@Resource
private CoreDatasourceTaskMapper datasourceTaskMapper;
@Resource
private CoreDatasourceMapper coreDatasourceMapper;
@Resource
private ExtDatasourceTaskMapper extDatasourceTaskMapper;
@Resource
private CoreDatasourceTaskLogMapper coreDatasourceTaskLogMapper;
@Resource
private DatasourceSyncManage datasourceSyncManage;
public CoreDatasourceTask selectById(Long taskId) {
return datasourceTaskMapper.selectById(taskId);
}
public List<CoreDatasourceTask> listAll() {
return datasourceTaskMapper.selectList(null);
}
public CoreDatasourceTask selectByDSId(Long dsId) {
QueryWrapper<CoreDatasourceTask> queryWrapper = new QueryWrapper<>();
queryWrapper.eq("ds_id", dsId);
List<CoreDatasourceTask> coreDatasourceTasks = datasourceTaskMapper.selectList(queryWrapper);
return CollectionUtils.isEmpty(coreDatasourceTasks) ? new CoreDatasourceTask() : coreDatasourceTasks.get(0);
}
public CoreDatasourceTaskLog lastSyncLogForTable(Long dsId, String tableName){
List<CoreDatasourceTaskLog> coreDatasourceTaskLogs = new ArrayList<>();
QueryWrapper<CoreDatasourceTaskLog> queryWrapper = new QueryWrapper<>();
queryWrapper.eq("ds_id", dsId);
queryWrapper.eq("table_name", tableName);
queryWrapper.orderByDesc("start_time");
List<CoreDatasourceTaskLog> logs = coreDatasourceTaskLogMapper.selectList(queryWrapper);
if(!CollectionUtils.isEmpty(logs)){
return logs.get(0);
}else {
return null;
}
}
public void deleteByDSId(Long dsId) {
QueryWrapper<CoreDatasourceTask> queryWrapper = new QueryWrapper<>();
queryWrapper.eq("ds_id", dsId);
List<CoreDatasourceTask> coreDatasourceTasks = datasourceTaskMapper.selectList(queryWrapper);
if(!CollectionUtils.isEmpty(coreDatasourceTasks)){
datasourceSyncManage.deleteSchedule(coreDatasourceTasks.get(0));
}
datasourceTaskMapper.delete(queryWrapper);
}
public void insert(CoreDatasourceTask coreDatasourceTask) {
coreDatasourceTask.setId(IDUtils.snowID());
datasourceTaskMapper.insert(coreDatasourceTask);
}
public void delete(Long id) {
datasourceTaskMapper.deleteById(id);
}
public void update(CoreDatasourceTask coreDatasourceTask) {
if(coreDatasourceTask.getId() == null){
datasourceTaskMapper.insert(coreDatasourceTask);
}else {
UpdateWrapper<CoreDatasourceTask> updateWrapper = new UpdateWrapper<>();
updateWrapper.eq("id", coreDatasourceTask.getId());
datasourceTaskMapper.updateById(coreDatasourceTask);
}
}
public void updateByDsIds(List<Long> dsIds){
UpdateWrapper<CoreDatasourceTask> updateWrapper = new UpdateWrapper<>();
updateWrapper.in("ds_id", dsIds);
CoreDatasourceTask record = new CoreDatasourceTask();
record.setTaskStatus(TaskStatus.WaitingForExecution.name());
datasourceTaskMapper.update(record, updateWrapper);
}
public void checkTaskIsStopped(CoreDatasourceTask coreDatasourceTask) {
if (coreDatasourceTask.getEndLimit() != null && StringUtils.equalsIgnoreCase(coreDatasourceTask.getEndLimit(), "1")) { // 结束限制 0 无限制 1 设定结束时间'
List<CoreDatasourceTaskDTO> dataSetTaskDTOS = taskWithTriggers(coreDatasourceTask.getId());
if (CollectionUtils.isEmpty(dataSetTaskDTOS)) {
return;
}
UpdateWrapper<CoreDatasourceTask> updateWrapper = new UpdateWrapper<>();
updateWrapper.eq("id", coreDatasourceTask.getId());
CoreDatasourceTask datasourceTask = new CoreDatasourceTask();
if (dataSetTaskDTOS.get(0).getNextExecTime() == null || dataSetTaskDTOS.get(0).getNextExecTime() <= 0) {
datasourceTask.setTaskStatus(TaskStatus.Stopped.name());
datasourceTaskMapper.update(datasourceTask, updateWrapper);
}
if (dataSetTaskDTOS.get(0).getNextExecTime() != null && dataSetTaskDTOS.get(0).getNextExecTime() > coreDatasourceTask.getEndTime()) {
datasourceTask.setTaskStatus(TaskStatus.Stopped.name());
datasourceTaskMapper.update(datasourceTask, updateWrapper);
}
}
}
public List<CoreDatasourceTaskDTO> taskWithTriggers(Long taskId) {
QueryWrapper<CoreDatasourceTaskDTO> wrapper = new QueryWrapper<>();
wrapper.eq("core_datasource_task.id", taskId);
return extDatasourceTaskMapper.taskWithTriggers(wrapper);
}
public synchronized boolean existUnderExecutionTask(Long datasourceId, Long taskId) {
UpdateWrapper<CoreDatasource> updateWrapper = new UpdateWrapper<>();
updateWrapper.eq("id", datasourceId);
updateWrapper.ne("task_status", TaskStatus.UnderExecution.name());
CoreDatasource coreDatasource = new CoreDatasource();
coreDatasource.setTaskStatus(TaskStatus.UnderExecution.name());
Boolean existSyncTask = coreDatasourceMapper.update(coreDatasource, updateWrapper) == 0;
if (!existSyncTask) {
UpdateWrapper<CoreDatasourceTask> updateTaskWrapper = new UpdateWrapper<>();
updateWrapper.eq("id", taskId);
CoreDatasourceTask record = new CoreDatasourceTask();
record.setTaskStatus(TaskStatus.UnderExecution.name());
record.setLastExecTime(System.currentTimeMillis());
datasourceTaskMapper.update(record, updateTaskWrapper);
}
return existSyncTask;
}
public CoreDatasourceTaskLog initTaskLog(Long datasourceId, Long taskId, String tableName, String triggerType) {
Long startTime = System.currentTimeMillis();
CoreDatasourceTaskLog coreDatasourceTaskLog = new CoreDatasourceTaskLog();
coreDatasourceTaskLog.setId(IDUtils.snowID());
coreDatasourceTaskLog.setDsId(datasourceId);
coreDatasourceTaskLog.setTaskId(taskId);
coreDatasourceTaskLog.setTaskStatus(TaskStatus.UnderExecution.name());
coreDatasourceTaskLog.setTriggerType(triggerType);
coreDatasourceTaskLog.setStartTime(startTime);
coreDatasourceTaskLog.setCreateTime(startTime);
coreDatasourceTaskLog.setTableName(tableName);
coreDatasourceTaskLog.setInfo("");
coreDatasourceTaskLogMapper.insert(coreDatasourceTaskLog);
return coreDatasourceTaskLog;
}
public void saveLog(CoreDatasourceTaskLog coreDatasourceTaskLog) {
coreDatasourceTaskLogMapper.updateById(coreDatasourceTaskLog);
}
public void updateTaskStatus(CoreDatasourceTask coreDatasourceTask) {
CoreDatasourceTask record = new CoreDatasourceTask();
if (coreDatasourceTask.getSyncRate().equalsIgnoreCase(ScheduleType.RIGHTNOW.name())) {
record.setTaskStatus(TaskStatus.Stopped.name());
} else {
if (coreDatasourceTask.getEndLimit() != null && StringUtils.equalsIgnoreCase(coreDatasourceTask.getEndLimit(), "1")) {
List<CoreDatasourceTaskDTO> dataSetTaskDTOS = taskWithTriggers(coreDatasourceTask.getId());
if (CollectionUtils.isEmpty(dataSetTaskDTOS)) {
return;
}
if (dataSetTaskDTOS.get(0).getNextExecTime() == null || dataSetTaskDTOS.get(0).getNextExecTime() <= 0) {
record.setTaskStatus(TaskStatus.Stopped.name());
} else {
record.setTaskStatus(TaskStatus.WaitingForExecution.name());
}
} else {
record.setTaskStatus(TaskStatus.WaitingForExecution.name());
}
}
UpdateWrapper<CoreDatasourceTask> updateTaskWrapper = new UpdateWrapper<>();
updateTaskWrapper.eq("id", coreDatasourceTask.getId());
datasourceTaskMapper.update(record, updateTaskWrapper);
}
public enum ScheduleType {
CRON, RIGHTNOW, SIMPLE_CRON, MANUAL
}
}

View File

@ -0,0 +1,148 @@
package io.dataease.datasource.server;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import io.dataease.datasource.dao.auto.entity.CoreDatasource;
import io.dataease.datasource.dao.auto.entity.CoreDeEngine;
import io.dataease.datasource.dao.auto.mapper.CoreDeEngineMapper;
import io.dataease.datasource.provider.EngineProvider;
import io.dataease.datasource.provider.ProviderUtil;
import io.dataease.datasource.request.DatasourceRequest;
import io.dataease.datasource.type.H2;
import io.dataease.datasource.type.Mysql;
import io.dataease.exception.DEException;
import io.dataease.result.ResultMessage;
import io.dataease.utils.BeanUtils;
import io.dataease.utils.JsonUtil;
import io.dataease.utils.ModelUtils;
import jakarta.annotation.Resource;
import org.apache.commons.lang3.StringUtils;
import org.springframework.core.env.Environment;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.CollectionUtils;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import static io.dataease.result.ResultCode.DATA_IS_WRONG;
@Service
@Transactional(rollbackFor = Exception.class)
public class EngineServer {
@Resource
private Environment env;
@Resource
private CoreDeEngineMapper deEngineMapper;
public CoreDeEngine info() throws DEException {
List<CoreDeEngine> deEngines = deEngineMapper.selectList(null);
if (CollectionUtils.isEmpty(deEngines)) {
DEException.throwException("未完整设置数据引擎");
}
return deEngines.get(0);
}
public CoreDatasource getDeEngine(){
List<CoreDeEngine> deEngines = deEngineMapper.selectList(null);
if(CollectionUtils.isEmpty(deEngines)){
DEException.throwException("未完整设置数据引擎");
}
CoreDatasource coreDatasource = new CoreDatasource();
BeanUtils.copyBean(coreDatasource, deEngines.get(0));
return coreDatasource;
}
public CoreDatasource deEngine(){
List<CoreDeEngine> deEngines = deEngineMapper.selectList(null);
CoreDatasource coreDatasource = new CoreDatasource();
if(CollectionUtils.isEmpty(deEngines)){
return null;
}
BeanUtils.copyBean(coreDatasource, deEngines.get(0));
return coreDatasource;
}
public ResultMessage validate(CoreDeEngine engine) throws Exception {
if (StringUtils.isEmpty(engine.getType()) || StringUtils.isEmpty(engine.getConfiguration())) {
throw new Exception("未完整设置数据引擎");
}
try {
EngineProvider provider = ProviderUtil.getEngineProvider(engine.getType());
DatasourceRequest datasourceRequest = new DatasourceRequest();
CoreDatasource datasource = new CoreDatasource();
BeanUtils.copyBean(datasource, engine);
datasourceRequest.setDatasource(datasource);
provider.checkStatus(datasourceRequest);
return ResultMessage.success(datasource);
} catch (Exception e) {
return ResultMessage.failure(DATA_IS_WRONG, "Engine is invalid: " + e.getMessage());
}
}
public ResultMessage save(CoreDeEngine engine) throws Exception {
if (engine.getId() == null) {
deEngineMapper.insert(engine);
} else {
deEngineMapper.updateById(engine);
}
return ResultMessage.success(engine);
}
public void initSimpleEngine() throws Exception{
QueryWrapper<CoreDeEngine> queryWrapper = new QueryWrapper<>();
if(ModelUtils.isDesktop()){
queryWrapper.eq("type", engineType.h2.name());
}else {
queryWrapper.eq("type", engineType.mysql.name());
}
List<CoreDeEngine> deEngines = deEngineMapper.selectList(queryWrapper);
if (!CollectionUtils.isEmpty(deEngines)) {
return;
}
CoreDeEngine engine = new CoreDeEngine();
if(ModelUtils.isDesktop()){
engine.setType(engineType.h2.name());
H2 h2 = new H2();
h2.setJdbc("jdbc:h2:/opt/dataease2.0/desktop_data;AUTO_SERVER=TRUE;AUTO_RECONNECT=TRUE;MODE=MySQL");
h2.setDataBase("PUBLIC");
h2.setUsername(env.getProperty("spring.datasource.username"));
h2.setPassword(env.getProperty("spring.datasource.password"));
engine.setConfiguration(JsonUtil.toJSONString(h2).toString());
}else {
engine.setType(engineType.mysql.name());
Mysql mysqlConfiguration = new Mysql();
Pattern WITH_SQL_FRAGMENT = Pattern.compile("jdbc:mysql://(.*):(\\d+)/(.*)");
Matcher matcher = WITH_SQL_FRAGMENT.matcher(env.getProperty("spring.datasource.url"));
if (!matcher.find()) {
return;
}
mysqlConfiguration.setHost(matcher.group(1));
mysqlConfiguration.setPort(Integer.valueOf(matcher.group(2)));
mysqlConfiguration.setDataBase(matcher.group(3).split("\\?")[0]);
mysqlConfiguration.setExtraParams(matcher.group(3).split("\\?")[1]);
mysqlConfiguration.setUsername(env.getProperty("spring.datasource.username"));
mysqlConfiguration.setPassword(env.getProperty("spring.datasource.password"));
engine.setConfiguration(JsonUtil.toJSONString(mysqlConfiguration).toString());
}
deEngineMapper.insert(engine);
}
public enum engineType {
mysql("Mysql"),
h2("h2");
private String alias;
private engineType(String alias) {
this.alias = alias;
}
public String getAlias() {
return alias;
}
}
}

View File

@ -0,0 +1,28 @@
package io.dataease.datasource.type;
import io.dataease.api.ds.vo.DatasourceConfiguration;
import lombok.Data;
import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Component;
@Data
@Component("ck")
public class CK extends DatasourceConfiguration {
private String driver = "com.clickhouse.jdbc.ClickHouseDriver";
private String extraParams = "";
public String getJdbc() {
if(StringUtils.isEmpty(extraParams.trim())){
return "jdbc:clickhouse://HOSTNAME:PORT/DATABASE"
.replace("HOSTNAME", getHost().trim())
.replace("PORT", getPort().toString().trim())
.replace("DATABASE", getDataBase().trim());
}else {
return "jdbc:clickhouse://HOSTNAME:PORT/DATABASE?EXTRA_PARAMS"
.replace("HOSTNAME", getHost().trim())
.replace("PORT", getPort().toString().trim())
.replace("DATABASE", getDataBase().trim())
.replace("EXTRA_PARAMS", getExtraParams().trim());
}
}
}

View File

@ -0,0 +1,36 @@
package io.dataease.datasource.type;
import io.dataease.api.ds.vo.DatasourceConfiguration;
import lombok.Data;
import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Component;
@Data
@Component("db2")
public class Db2 extends DatasourceConfiguration {
private String driver = "com.ibm.db2.jcc.DB2Driver";
private String extraParams = "";
public String getJdbc() {
if(StringUtils.isEmpty(extraParams.trim())){
if (StringUtils.isEmpty(getSchema())) {
return "jdbc:db2://HOSTNAME:PORT/DATABASE"
.replace("HOSTNAME", getHost().trim())
.replace("PORT", getPort().toString().trim())
.replace("DATABASE", getDataBase().trim());
} else {
return "jdbc:db2://HOSTNAME:PORT/DATABASE:currentSchema=SCHEMA;"
.replace("HOSTNAME", getHost().trim())
.replace("PORT", getPort().toString().trim())
.replace("DATABASE", getDataBase().trim())
.replace("SCHEMA",getSchema().trim());
}
}else {
return "jdbc:db2://HOSTNAME:PORT/DATABASE:EXTRA_PARAMS"
.replace("HOSTNAME", getHost().trim())
.replace("PORT", getPort().toString().trim())
.replace("DATABASE", getDataBase().trim())
.replace("EXTRA_PARAMS", getExtraParams().trim());
}
}
}

View File

@ -0,0 +1,15 @@
package io.dataease.datasource.type;
import io.dataease.api.ds.vo.DatasourceConfiguration;
import lombok.Data;
import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Component;
import java.util.Arrays;
import java.util.List;
@Data
@Component("h2")
public class H2 extends DatasourceConfiguration {
private String driver = "org.h2.Driver";
}

View File

@ -0,0 +1,33 @@
package io.dataease.datasource.type;
import io.dataease.api.ds.vo.DatasourceConfiguration;
import lombok.Data;
import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Component;
import java.util.Arrays;
import java.util.List;
@Data
@Component("impala")
public class Impala extends DatasourceConfiguration {
private String driver = "com.cloudera.impala.jdbc.Driver";
private String extraParams = "";
private List<String> illegalParameters = Arrays.asList("autoDeserialize", "queryInterceptors", "statementInterceptors", "detectCustomCollations");
private List<String> showTableSqls = Arrays.asList("show tables");
public String getJdbc() {
if(StringUtils.isEmpty(extraParams.trim())){
return "jdbc:impala://HOSTNAME:PORT/DATABASE"
.replace("HOSTNAME", getHost().trim())
.replace("PORT", getPort().toString().trim())
.replace("DATABASE", getDataBase().trim());
}else {
return "jdbc:impala://HOSTNAME:PORT/DATABASE;EXTRA_PARAMS"
.replace("HOSTNAME", getHost().trim())
.replace("PORT", getPort().toString().trim())
.replace("DATABASE", getDataBase().trim())
.replace("EXTRA_PARAMS", getExtraParams().trim());
}
}
}

View File

@ -0,0 +1,39 @@
package io.dataease.datasource.type;
import io.dataease.api.ds.vo.DatasourceConfiguration;
import lombok.Data;
import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Component;
import java.util.Arrays;
import java.util.List;
@Data
@Component("mongo")
public class Mongo extends DatasourceConfiguration {
private String driver = "com.mysql.cj.jdbc.Driver";
private String extraParams = "characterEncoding=UTF-8&connectTimeout=5000&useSSL=false&allowPublicKeyRetrieval=true&zeroDateTimeBehavior=convertToNull";
private List<String> illegalParameters = Arrays.asList("autoDeserialize", "queryInterceptors", "statementInterceptors", "detectCustomCollations");
private List<String> showTableSqls = Arrays.asList("show tables");
public String getJdbc() {
if (StringUtils.isEmpty(extraParams.trim())) {
return "jdbc:mysql://HOSTNAME:PORT/DATABASE"
.replace("HOSTNAME", getHost().trim())
.replace("PORT", getPort().toString().trim())
.replace("DATABASE", getDataBase().trim());
} else {
for (String illegalParameter : illegalParameters) {
if (getExtraParams().contains(illegalParameter)) {
throw new RuntimeException("Illegal parameter: " + illegalParameter);
}
}
return "jdbc:mysql://HOSTNAME:PORT/DATABASE?EXTRA_PARAMS"
.replace("HOSTNAME", getHost().trim())
.replace("PORT", getPort().toString().trim())
.replace("DATABASE", getDataBase().trim())
.replace("EXTRA_PARAMS", getExtraParams().trim());
}
}
}

Some files were not shown because too many files have changed in this diff Show More