From e81138ec5c07cfdaf6d985571edbe6474dc12eea Mon Sep 17 00:00:00 2001 From: taojinlong Date: Mon, 14 Mar 2022 12:26:24 +0800 Subject: [PATCH] =?UTF-8?q?feat:=20=E6=94=AF=E6=8C=81=E7=B2=BE=E7=AE=80?= =?UTF-8?q?=E6=A8=A1=E5=BC=8F?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../controller/engine/EngineController.java | 45 +++ .../io/dataease/provider/DDLProvider.java | 2 + .../io/dataease/provider/DDLProviderImpl.java | 16 ++ .../service/dataset/ExtractDataService.java | 253 ++++++++++------- .../service/datasource/DatasourceService.java | 3 +- .../service/engine/EngineService.java | 62 ++++- frontend/src/api/system/engine.js | 36 +++ frontend/src/lang/en.js | 3 +- frontend/src/lang/tw.js | 3 +- frontend/src/lang/zh.js | 2 +- frontend/src/views/dataset/add/AddApi.vue | 7 +- frontend/src/views/dataset/add/AddDB.vue | 79 +++--- frontend/src/views/dataset/add/AddSQL.vue | 15 +- .../common/DatasetGroupSelectorTree.vue | 8 - frontend/src/views/dataset/group/Group.vue | 7 +- .../system/SysParam/SimpleModeSetting.vue | 263 ++++++++++++++++++ .../SysParam/SimplemodeDatasourceSetting.vue | 239 ---------------- frontend/src/views/system/SysParam/index.vue | 20 +- 18 files changed, 668 insertions(+), 395 deletions(-) create mode 100644 backend/src/main/java/io/dataease/controller/engine/EngineController.java create mode 100644 frontend/src/api/system/engine.js create mode 100644 frontend/src/views/system/SysParam/SimpleModeSetting.vue delete mode 100644 frontend/src/views/system/SysParam/SimplemodeDatasourceSetting.vue diff --git a/backend/src/main/java/io/dataease/controller/engine/EngineController.java b/backend/src/main/java/io/dataease/controller/engine/EngineController.java new file mode 100644 index 0000000000..c032534158 --- /dev/null +++ b/backend/src/main/java/io/dataease/controller/engine/EngineController.java @@ -0,0 +1,45 @@ +package io.dataease.controller.engine; + +import io.dataease.base.domain.DeEngine; +import io.dataease.controller.ResultHolder; +import io.dataease.dto.DatasourceDTO; +import io.dataease.service.engine.EngineService; +import org.springframework.web.bind.annotation.*; +import springfox.documentation.annotations.ApiIgnore; + +import javax.annotation.Resource; + +@ApiIgnore +@RequestMapping("engine") +@RestController +public class EngineController { + + @Resource + private EngineService engineService; + + @ApiIgnore + @GetMapping("/mode") + public String runMode() throws Exception{ + return engineService.mode(); + } + + @ApiIgnore + @GetMapping("/info") + public DeEngine info() throws Exception{ + return engineService.info(); + } + + @ApiIgnore + @PostMapping("/validate") + public ResultHolder validate(@RequestBody DatasourceDTO datasource) throws Exception { + return engineService.validate(datasource); + } + + + @ApiIgnore + @PostMapping("/save") + public ResultHolder save(@RequestBody DeEngine engine) throws Exception { + return engineService.save(engine); + } + +} diff --git a/backend/src/main/java/io/dataease/provider/DDLProvider.java b/backend/src/main/java/io/dataease/provider/DDLProvider.java index b74b7799fd..758e9fc925 100644 --- a/backend/src/main/java/io/dataease/provider/DDLProvider.java +++ b/backend/src/main/java/io/dataease/provider/DDLProvider.java @@ -18,4 +18,6 @@ public abstract class DDLProvider { public abstract String replaceTable(String name); public abstract String createTableSql(String name, List datasetTableFields); + + public abstract String insertSql(String name, List dataList, int page, int pageNumber); } diff --git a/backend/src/main/java/io/dataease/provider/DDLProviderImpl.java b/backend/src/main/java/io/dataease/provider/DDLProviderImpl.java index 9831d8013a..ada0dd0294 100644 --- a/backend/src/main/java/io/dataease/provider/DDLProviderImpl.java +++ b/backend/src/main/java/io/dataease/provider/DDLProviderImpl.java @@ -1,7 +1,9 @@ package io.dataease.provider; import io.dataease.base.domain.DatasetTableField; +import io.dataease.commons.utils.Md5Utils; +import java.util.Arrays; import java.util.List; public class DDLProviderImpl extends DDLProvider { @@ -30,5 +32,19 @@ public class DDLProviderImpl extends DDLProvider { return null; } + @Override + public String insertSql(String name, List dataList, int page, int pageNumber) { + String insertSql = "INSERT INTO TABLE_NAME VALUES ".replace("TABLE_NAME", name); + StringBuffer values = new StringBuffer(); + + Integer realSize = page * pageNumber < dataList.size() ? page * pageNumber : dataList.size(); + for (String[] strings : dataList.subList((page - 1) * pageNumber, realSize)) { + values.append("(").append(Md5Utils.md5(String.join(",", Arrays.asList(strings)))) + .append("," ).append(String.join(",", Arrays.asList(strings))) + .append("),"); + } + return insertSql + values.substring(0, values.length() - 1); + } + } diff --git a/backend/src/main/java/io/dataease/service/dataset/ExtractDataService.java b/backend/src/main/java/io/dataease/service/dataset/ExtractDataService.java index ee64d75dcd..9795f86261 100644 --- a/backend/src/main/java/io/dataease/service/dataset/ExtractDataService.java +++ b/backend/src/main/java/io/dataease/service/dataset/ExtractDataService.java @@ -160,7 +160,7 @@ public class ExtractDataService { } UpdateType updateType = UpdateType.valueOf(type); DatasetTableTaskLog datasetTableTaskLog; - if(datasetTableFields == null){ + if (datasetTableFields == null) { datasetTableFields = dataSetTableFieldsService.list(DatasetTableField.builder().tableId(datasetTable.getId()).build()); } datasetTableFields = datasetTableFields.stream().filter(datasetTableField -> datasetTableField.getExtField() == 0).collect(Collectors.toList()); @@ -180,25 +180,29 @@ public class ExtractDataService { datasetTableTaskLog = writeDatasetTableTaskLog(datasetTableId, ops); createEngineTable(TableUtils.tableName(datasetTableId), datasetTableFields); createEngineTable(TableUtils.tmpName(TableUtils.tableName(datasetTableId)), datasetTableFields); - generateTransFile("all_scope", datasetTable, datasource, datasetTableFields, null); - generateJobFile("all_scope", datasetTable, datasetTableFields.stream().map(DatasetTableField::getDataeaseName).collect(Collectors.joining(","))); Long execTime = System.currentTimeMillis(); - extractData(datasetTable, "all_scope"); + if (!engineService.isSimpleMode()) { + generateTransFile("all_scope", datasetTable, datasource, datasetTableFields, null); + generateJobFile("all_scope", datasetTable, datasetTableFields.stream().map(DatasetTableField::getDataeaseName).collect(Collectors.joining(","))); + extractData(datasetTable, "all_scope"); + } else { + extractExcelDataForSimpleMode(datasetTable, "all_scope"); + } replaceTable(TableUtils.tableName(datasetTableId)); saveSuccessLog(datasetTableTaskLog); updateTableStatus(datasetTableId, datasetTable, JobStatus.Completed, execTime); - if(ops.equalsIgnoreCase("替换")){ + if (ops.equalsIgnoreCase("替换")) { List oldFileds = getDatasetTableFields(datasetTable.getId()); List toAdd = new ArrayList<>(); List toDelete = new ArrayList<>(); for (DatasetTableField oldFiled : oldFileds) { boolean delete = true; for (DatasetTableField datasetTableField : datasetTableFields) { - if(oldFiled.getDataeaseName().equalsIgnoreCase(datasetTableField.getDataeaseName()) && oldFiled.getDeExtractType().equals(datasetTableField.getDeExtractType())){ + if (oldFiled.getDataeaseName().equalsIgnoreCase(datasetTableField.getDataeaseName()) && oldFiled.getDeExtractType().equals(datasetTableField.getDeExtractType())) { delete = false; } } - if(delete){ + if (delete) { toDelete.add(oldFiled); } } @@ -206,11 +210,11 @@ public class ExtractDataService { for (DatasetTableField datasetTableField : datasetTableFields) { boolean add = true; for (DatasetTableField oldFiled : oldFileds) { - if(oldFiled.getDataeaseName().equalsIgnoreCase(datasetTableField.getDataeaseName()) && oldFiled.getDeExtractType().equals(datasetTableField.getDeExtractType())){ + if (oldFiled.getDataeaseName().equalsIgnoreCase(datasetTableField.getDataeaseName()) && oldFiled.getDeExtractType().equals(datasetTableField.getDeExtractType())) { add = false; } } - if(add){ + if (add) { toAdd.add(datasetTableField); } } @@ -230,10 +234,14 @@ public class ExtractDataService { case add_scope: // 增量更新 try { datasetTableTaskLog = writeDatasetTableTaskLog(datasetTableId, ops); - generateTransFile("incremental_add", datasetTable, datasource, datasetTableFields, null); - generateJobFile("incremental_add", datasetTable, datasetTableFields.stream().map(DatasetTableField::getDataeaseName).collect(Collectors.joining(","))); Long execTime = System.currentTimeMillis(); - extractData(datasetTable, "incremental_add"); + if (!engineService.isSimpleMode()) { + generateTransFile("incremental_add", datasetTable, datasource, datasetTableFields, null); + generateJobFile("incremental_add", datasetTable, datasetTableFields.stream().map(DatasetTableField::getDataeaseName).collect(Collectors.joining(","))); + extractData(datasetTable, "incremental_add"); + } else { + extractExcelDataForSimpleMode(datasetTable, "incremental_add"); + } saveSuccessLog(datasetTableTaskLog); updateTableStatus(datasetTableId, datasetTable, JobStatus.Completed, execTime); } catch (Exception e) { @@ -309,21 +317,39 @@ public class ExtractDataService { lastExecStatus = JobStatus.Error; execTime = null; } finally { - try { deleteFile("all_scope", datasetTableId); }catch (Exception ignore){ System.out.println(ignore.getMessage());} - try { sendWebMsg(datasetTable, datasetTableTask, datasetTableTaskLog, msg); }catch (Exception ignore){ System.out.println(ignore.getMessage());} - try { dataSetTableTaskService.updateTaskStatus(datasetTableTask, lastExecStatus); }catch (Exception ignore){ + try { + deleteFile("all_scope", datasetTableId); + } catch (Exception ignore) { + System.out.println(ignore.getMessage()); + } + try { + sendWebMsg(datasetTable, datasetTableTask, datasetTableTaskLog, msg); + } catch (Exception ignore) { + System.out.println(ignore.getMessage()); + } + try { + dataSetTableTaskService.updateTaskStatus(datasetTableTask, lastExecStatus); + } catch (Exception ignore) { + System.out.println(ignore.getMessage()); + } + try { + updateTableStatus(datasetTableId, datasetTable, lastExecStatus, execTime); + } catch (Exception ignore) { + System.out.println(ignore.getMessage()); + } + try { + dropDorisTable(TableUtils.tmpName(TableUtils.tableName(datasetTableId))); + } catch (Exception ignore) { System.out.println(ignore.getMessage()); } - try { updateTableStatus(datasetTableId, datasetTable, lastExecStatus, execTime); }catch (Exception ignore){ System.out.println(ignore.getMessage());} - try { dropDorisTable(TableUtils.tmpName(TableUtils.tableName(datasetTableId))); }catch (Exception ignore){ System.out.println(ignore.getMessage());} } break; case add_scope: // 增量更新 try { - if(datasource.getType().equalsIgnoreCase(DatasourceTypes.api.name())){ + if (datasource.getType().equalsIgnoreCase(DatasourceTypes.api.name())) { extractData(datasetTable, datasource, datasetTableFields, "incremental_add", null); - }else{ + } else { DatasetTableIncrementalConfig datasetTableIncrementalConfig = dataSetTableService.incrementalConfig(datasetTableId); if (datasetTable.getLastUpdateTime() == null || datasetTable.getLastUpdateTime() == 0) { throw new Exception("未进行全量同步"); @@ -351,10 +377,23 @@ public class ExtractDataService { lastExecStatus = JobStatus.Error; execTime = null; } finally { - try { deleteFile("incremental_add", datasetTableId); deleteFile("incremental_delete", datasetTableId); }catch (Exception ignore){} - try { sendWebMsg(datasetTable, datasetTableTask, datasetTableTaskLog, msg); }catch (Exception ignore){} - try { dataSetTableTaskService.updateTaskStatus(datasetTableTask, lastExecStatus); }catch (Exception ignore){} - try { updateTableStatus(datasetTableId, datasetTable, lastExecStatus, execTime); }catch (Exception ignore){} + try { + deleteFile("incremental_add", datasetTableId); + deleteFile("incremental_delete", datasetTableId); + } catch (Exception ignore) { + } + try { + sendWebMsg(datasetTable, datasetTableTask, datasetTableTaskLog, msg); + } catch (Exception ignore) { + } + try { + dataSetTableTaskService.updateTaskStatus(datasetTableTask, lastExecStatus); + } catch (Exception ignore) { + } + try { + updateTableStatus(datasetTableId, datasetTable, lastExecStatus, execTime); + } catch (Exception ignore) { + } } break; } @@ -366,7 +405,7 @@ public class ExtractDataService { } - private List getDatasetTableFields(String datasetTableId){ + private List getDatasetTableFields(String datasetTableId) { List datasetTableFields = dataSetTableFieldsService.list(DatasetTableField.builder().tableId(datasetTableId).build()); datasetTableFields = datasetTableFields.stream().filter(datasetTableField -> datasetTableField.getExtField() == 0).collect(Collectors.toList()); datasetTableFields.sort((o1, o2) -> { @@ -381,53 +420,32 @@ public class ExtractDataService { return datasetTableFields; } - private void extractData(DatasetTable datasetTable, Datasource datasource, List datasetTableFields, String extractType, String selectSQL) throws Exception{ - if(datasource.getType().equalsIgnoreCase(DatasourceTypes.api.name())){ - extractDataByDE(datasetTable, datasource, datasetTableFields, extractType); + private void extractData(DatasetTable datasetTable, Datasource datasource, List datasetTableFields, String extractType, String selectSQL) throws Exception { + if (datasource.getType().equalsIgnoreCase(DatasourceTypes.api.name())) { + extractApiData(datasetTable, datasource, datasetTableFields, extractType); return; } extractDataByKettle(datasetTable, datasource, datasetTableFields, extractType, selectSQL); } - private void extractDataByDEForSimpleMode(List dataList)throws Exception{ - Datasource engine = engineService.getDeEngine(); - JdbcProvider jdbcProvider = CommonBeanFactory.getBean(JdbcProvider.class); - DatasourceRequest datasourceRequest = new DatasourceRequest(); - datasourceRequest.setDatasource(engine); - DDLProvider ddlProvider = ProviderFactory.getDDLProvider(engine.getType()); - int i = 0; - for (String[] strings : dataList) { - - String content = ""; - for (int i=0;i< strings.length;i++){ - content = content + strings[i] + separator; - } - - datasourceRequest.setQuery(ddlProvider.createTableSql(tableName, datasetTableFields)); - if(i==100 || ){ - jdbcProvider.exec(datasourceRequest); - } - - } - - - - } - private void extractDataByDE(DatasetTable datasetTable, Datasource datasource, List datasetTableFields, String extractType)throws Exception{ + private void extractApiData(DatasetTable datasetTable, Datasource datasource, List datasetTableFields, String extractType) throws Exception { List lists = JSONObject.parseArray(datasource.getConfiguration(), ApiDefinition.class); lists = lists.stream().filter(item -> item.getName().equalsIgnoreCase(new Gson().fromJson(datasetTable.getInfo(), DataTableInfoDTO.class).getTable())).collect(Collectors.toList()); - if(CollectionUtils.isEmpty(lists)){ + if (CollectionUtils.isEmpty(lists)) { throw new Exception("未找到API数据表"); } - if(lists.size() > 1 ){ + if (lists.size() > 1) { throw new Exception("存在重名的API数据表"); } DatasourceProvider datasourceProvider = ProviderFactory.getProvider(datasource.getType()); DatasourceRequest datasourceRequest = new DatasourceRequest(); datasourceRequest.setDatasource(datasource); datasourceRequest.setTable(new Gson().fromJson(datasetTable.getInfo(), DataTableInfoDTO.class).getTable()); - Map result = datasourceProvider.fetchResultAndField(datasourceRequest); + Map result = datasourceProvider.fetchResultAndField(datasourceRequest); List dataList = result.get("dataList"); + if (engineService.isSimpleMode()) { + extractDataForSimpleMode(extractType, datasetTable.getId(), dataList); + } Datasource engine = engineService.getDeEngine(); DorisConfiguration dorisConfiguration = new Gson().fromJson(engine.getConfiguration(), DorisConfiguration.class); @@ -450,8 +468,8 @@ public class ExtractDataService { BufferedWriter bw = new BufferedWriter(new FileWriter(dataFile)); for (String[] strings : dataList) { String content = ""; - for (int i=0;i< strings.length;i++){ - content = content + strings[i] + separator; + for (int i = 0; i < strings.length; i++) { + content = content + strings[i] + separator; } content = content + Md5Utils.md5(content); bw.write(content); @@ -473,7 +491,7 @@ public class ExtractDataService { try { Process process = Runtime.getRuntime().exec(root_path + datasetTable.getId() + ".sh"); process.waitFor(); - if(process.waitFor() != 0){ + if (process.waitFor() != 0) { BufferedReader input = new BufferedReader(new InputStreamReader(process.getErrorStream())); String errMsg = ""; String line = ""; @@ -486,16 +504,45 @@ public class ExtractDataService { } throw new Exception(errMsg); } - }catch (Exception e){ + } catch (Exception e) { throw e; - }finally { + } finally { File deleteFile = new File(root_path + datasetTable.getId() + ".sh"); FileUtils.forceDelete(deleteFile); } } - private void extractDataByKettle(DatasetTable datasetTable, Datasource datasource, List datasetTableFields, String extractType, String selectSQL)throws Exception{ + private void extractDataForSimpleMode(String extractType, String datasetId, List dataList) throws Exception { + String tableName; + switch (extractType) { + case "all_scope": + tableName = TableUtils.tmpName(TableUtils.tableName(datasetId)); + break; + default: + tableName = TableUtils.tableName(datasetId); + break; + } + Datasource engine = engineService.getDeEngine(); + JdbcProvider jdbcProvider = CommonBeanFactory.getBean(JdbcProvider.class); + DatasourceRequest datasourceRequest = new DatasourceRequest(); + datasourceRequest.setDatasource(engine); + DDLProvider ddlProvider = ProviderFactory.getDDLProvider(engine.getType()); + int pageNumber = 100; //一次插入 100条 + int totalPage; + if (dataList.size() % pageNumber > 0) { + totalPage = dataList.size() / pageNumber + 1; + } else { + totalPage = dataList.size() / pageNumber; + } + + for (int page = 1; page <= totalPage; page++) { + datasourceRequest.setQuery(ddlProvider.insertSql(tableName, dataList, page, pageNumber)); + jdbcProvider.exec(datasourceRequest); + } + } + + private void extractDataByKettle(DatasetTable datasetTable, Datasource datasource, List datasetTableFields, String extractType, String selectSQL) throws Exception { generateTransFile(extractType, datasetTable, datasource, datasetTableFields, selectSQL); generateJobFile(extractType, datasetTable, datasetTableFields.stream().map(DatasetTableField::getDataeaseName).collect(Collectors.joining(","))); extractData(datasetTable, extractType); @@ -564,7 +611,6 @@ public class ExtractDataService { } - private void createEngineTable(String tableName, List datasetTableFields) throws Exception { Datasource engine = engineService.getDeEngine(); JdbcProvider jdbcProvider = CommonBeanFactory.getBean(JdbcProvider.class); @@ -595,8 +641,8 @@ public class ExtractDataService { datasourceRequest.setDatasource(engine); DDLProvider ddlProvider = ProviderFactory.getDDLProvider(engine.getType()); String[] replaceTableSql = ddlProvider.replaceTable(dorisTableName).split(";"); - for(int i=0;i data = new ArrayList<>(); + DataTableInfoDTO dataTableInfoDTO = new Gson().fromJson(datasetTable.getInfo(), DataTableInfoDTO.class); + List excelSheetDataList = dataTableInfoDTO.getExcelSheetDataList(); + ExcelXlsxReader excelXlsxReader = new ExcelXlsxReader(); + for (ExcelSheetData excelSheetData : excelSheetDataList) { + excelXlsxReader.process(new FileInputStream(excelSheetData.getPath())); + for (ExcelSheetData sheet : excelXlsxReader.totalSheets) { + if (sheet.getExcelLable().equalsIgnoreCase(excelSheetData.getExcelLable())) { + for (List dataItem : sheet.getData()) { + data.add(dataItem.toArray(new String[dataItem.size()])); + } + } + } + } + extractDataForSimpleMode(extractType, datasetTable.getId(), data); } private void extractData(DatasetTable datasetTable, String extractType) throws Exception { - if(StringUtils.isNotEmpty(datasetTable.getDataSourceId())){ + if (StringUtils.isNotEmpty(datasetTable.getDataSourceId())) { datasourceService.validate(datasetTable.getDataSourceId()); } - if(engineService.isSimpleMode()){ - extractDataForSimpleMode(datasetTable, extractType); - } KettleFileRepository repository = CommonBeanFactory.getBean(KettleFileRepository.class); RepositoryDirectoryInterface repositoryDirectoryInterface = repository.loadRepositoryDirectoryTree(); TransMeta transMeta = null; @@ -743,7 +797,7 @@ public class ExtractDataService { } private void generateJobFile(String extractType, DatasetTable datasetTable, String columnFields) throws Exception { - if(engineService.isSimpleMode()){ + if (engineService.isSimpleMode()) { return; } String outFile; @@ -830,7 +884,7 @@ public class ExtractDataService { } private void generateTransFile(String extractType, DatasetTable datasetTable, Datasource datasource, List datasetTableFields, String selectSQL) throws Exception { - if(engineService.isSimpleMode()){ + if (engineService.isSimpleMode()) { return; } TransMeta transMeta = new TransMeta(); @@ -976,7 +1030,7 @@ public class ExtractDataService { return fromStep; } - private StepMeta excelInputStep(String Info, List datasetTableFields){ + private StepMeta excelInputStep(String Info, List datasetTableFields) { DataTableInfoDTO dataTableInfoDTO = new Gson().fromJson(Info, DataTableInfoDTO.class); List excelSheetDataList = dataTableInfoDTO.getExcelSheetDataList(); String suffix = excelSheetDataList.get(0).getPath().substring(excelSheetDataList.get(0).getPath().lastIndexOf(".") + 1); @@ -986,10 +1040,10 @@ public class ExtractDataService { List files = new ArrayList<>(); List filesRequired = new ArrayList<>(); for (ExcelSheetData excelSheetData : excelSheetDataList) { - if(!sheetNames.contains(excelSheetData.getExcelLable())){ + if (!sheetNames.contains(excelSheetData.getExcelLable())) { sheetNames.add(excelSheetData.getExcelLable()); } - if(!files.contains(excelSheetData.getPath())){ + if (!files.contains(excelSheetData.getPath())) { files.add(excelSheetData.getPath()); filesRequired.add("Y"); } @@ -1003,7 +1057,7 @@ public class ExtractDataService { excelInputMeta.setSheetName(sheetNames.toArray(new String[sheetNames.size()])); } excelInputMeta.setPassword("Encrypted"); - excelInputMeta.setFileName( files.toArray(new String[files.size()])); + excelInputMeta.setFileName(files.toArray(new String[files.size()])); excelInputMeta.setFileRequired(filesRequired.toArray(new String[filesRequired.size()])); excelInputMeta.setStartsWithHeader(true); excelInputMeta.setIgnoreEmptyRows(true); @@ -1035,9 +1089,9 @@ public class ExtractDataService { textFileOutputMeta.setSeparator(separator); textFileOutputMeta.setExtension(extention); - if (datasource.getType().equalsIgnoreCase(DatasourceTypes.oracle.name()) ) { + if (datasource.getType().equalsIgnoreCase(DatasourceTypes.oracle.name())) { TextFileField[] outputFields = new TextFileField[datasetTableFields.size() + 1]; - for(int i=0;i< datasetTableFields.size();i++){ + for (int i = 0; i < datasetTableFields.size(); i++) { TextFileField textFileField = new TextFileField(); textFileField.setName(datasetTableFields.get(i).getOriginName()); textFileField.setType("String"); @@ -1049,9 +1103,9 @@ public class ExtractDataService { outputFields[datasetTableFields.size()] = textFileField; textFileOutputMeta.setOutputFields(outputFields); - }else if (datasource.getType().equalsIgnoreCase(DatasourceTypes.sqlServer.name()) || datasource.getType().equalsIgnoreCase(DatasourceTypes.pg.name()) || datasource.getType().equalsIgnoreCase(DatasourceTypes.mysql.name())){ + } else if (datasource.getType().equalsIgnoreCase(DatasourceTypes.sqlServer.name()) || datasource.getType().equalsIgnoreCase(DatasourceTypes.pg.name()) || datasource.getType().equalsIgnoreCase(DatasourceTypes.mysql.name())) { TextFileField[] outputFields = new TextFileField[datasetTableFields.size() + 1]; - for(int i=0;i< datasetTableFields.size();i++){ + for (int i = 0; i < datasetTableFields.size(); i++) { TextFileField textFileField = new TextFileField(); textFileField.setName(datasetTableFields.get(i).getDataeaseName()); if (datasetTableFields.get(i).getDeExtractType().equals(DeTypeConstants.DE_TIME)) { @@ -1069,9 +1123,9 @@ public class ExtractDataService { outputFields[datasetTableFields.size()] = textFileField; textFileOutputMeta.setOutputFields(outputFields); - }else if(datasource.getType().equalsIgnoreCase(DatasourceTypes.excel.name())) { + } else if (datasource.getType().equalsIgnoreCase(DatasourceTypes.excel.name())) { TextFileField[] outputFields = new TextFileField[datasetTableFields.size() + 1]; - for(int i=0;i< datasetTableFields.size();i++){ + for (int i = 0; i < datasetTableFields.size(); i++) { TextFileField textFileField = new TextFileField(); textFileField.setName(datasetTableFields.get(i).getDataeaseName()); if (datasetTableFields.get(i).getDeExtractType().equals(DeTypeConstants.DE_INT)) { @@ -1089,7 +1143,7 @@ public class ExtractDataService { outputFields[datasetTableFields.size()] = textFileField; textFileOutputMeta.setOutputFields(outputFields); - }else { + } else { textFileOutputMeta.setOutputFields(new TextFileField[0]); } @@ -1104,7 +1158,7 @@ public class ExtractDataService { String excelCompletion = ""; for (DatasetTableField datasetTableField : datasetTableFields) { - if(datasetTableField.getDeExtractType().equals(DeTypeConstants.DE_BINARY)){ + if (datasetTableField.getDeExtractType().equals(DeTypeConstants.DE_BINARY)) { handleBinaryTypeCode.append("\n").append(handleBinaryType.replace("FIELD", datasetTableField.getDataeaseName())); } } @@ -1145,7 +1199,7 @@ public class ExtractDataService { } public void deleteFile(String type, String dataSetTableId) { - if(kettleFilesKeep){ + if (kettleFilesKeep) { return; } String transName = null; @@ -1176,29 +1230,33 @@ public class ExtractDataService { deleteFile(root_path + transName + ".ktr"); } - private void deleteExcelFile(DatasetTable datasetTable, ListdatasetTableIds){ + private void deleteExcelFile(DatasetTable datasetTable, List datasetTableIds) { List datasetTables = dataSetTableService.list(datasetTableIds); for (ExcelSheetData excelSheetData : new Gson().fromJson(datasetTable.getInfo(), DataTableInfoDTO.class).getExcelSheetDataList()) { Boolean allIsFinished = true; for (DatasetTable table : datasetTables) { - for(ExcelSheetData data : new Gson().fromJson(table.getInfo(), DataTableInfoDTO.class).getExcelSheetDataList()){ - if(data.getPath().equalsIgnoreCase(excelSheetData.getPath())){ - if(StringUtils.isEmpty(table.getSyncStatus()) || table.getSyncStatus().equalsIgnoreCase(JobStatus.Underway.name())){ + for (ExcelSheetData data : new Gson().fromJson(table.getInfo(), DataTableInfoDTO.class).getExcelSheetDataList()) { + if (data.getPath().equalsIgnoreCase(excelSheetData.getPath())) { + if (StringUtils.isEmpty(table.getSyncStatus()) || table.getSyncStatus().equalsIgnoreCase(JobStatus.Underway.name())) { allIsFinished = false; } } } } - if(allIsFinished){ + if (allIsFinished) { deleteFile(excelSheetData.getPath()); - }else { - try { Thread.sleep(5000); }catch (Exception ignore){} + } else { + try { + Thread.sleep(5000); + } catch (Exception ignore) { + } deleteExcelFile(datasetTable, datasetTableIds); } } } - private void deleteFile(String filePath){ - if(StringUtils.isEmpty(filePath)){ + + private void deleteFile(String filePath) { + if (StringUtils.isEmpty(filePath)) { return; } try { @@ -1207,6 +1265,7 @@ public class ExtractDataService { } catch (Exception e) { } } + public boolean isKettleRunning() { try { if (!InetAddress.getByName(carte).isReachable(1000)) { diff --git a/backend/src/main/java/io/dataease/service/datasource/DatasourceService.java b/backend/src/main/java/io/dataease/service/datasource/DatasourceService.java index 01f3f7363e..0f3b597bcc 100644 --- a/backend/src/main/java/io/dataease/service/datasource/DatasourceService.java +++ b/backend/src/main/java/io/dataease/service/datasource/DatasourceService.java @@ -71,7 +71,7 @@ public class DatasourceService { return datasource; } - private void handleConnectionPool(Datasource datasource, String type) { + public void handleConnectionPool(Datasource datasource, String type) { commonThreadPool.addTask(() -> { try { DatasourceProvider datasourceProvider = ProviderFactory.getProvider(datasource.getType()); @@ -221,7 +221,6 @@ public class DatasourceService { }catch (Exception e){ return ResultHolder.error("Datasource is invalid: " + e.getMessage()); } - } public ResultHolder validate(String datasourceId) { diff --git a/backend/src/main/java/io/dataease/service/engine/EngineService.java b/backend/src/main/java/io/dataease/service/engine/EngineService.java index f449058db4..627c67e311 100644 --- a/backend/src/main/java/io/dataease/service/engine/EngineService.java +++ b/backend/src/main/java/io/dataease/service/engine/EngineService.java @@ -6,20 +6,32 @@ import io.dataease.base.domain.DeEngine; import io.dataease.base.domain.DeEngineExample; import io.dataease.base.mapper.DeEngineMapper; import io.dataease.commons.utils.BeanUtils; +import io.dataease.controller.ResultHolder; +import io.dataease.controller.request.datasource.DatasourceRequest; +import io.dataease.dto.DatasourceDTO; +import io.dataease.provider.ProviderFactory; +import io.dataease.provider.datasource.DatasourceProvider; +import io.dataease.service.datasource.DatasourceService; import org.apache.commons.collections4.CollectionUtils; +import org.apache.commons.lang3.StringUtils; import org.springframework.core.env.Environment; import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; import javax.annotation.Resource; import java.util.List; +import java.util.UUID; @Service +@Transactional(rollbackFor = Exception.class) public class EngineService { @Resource private Environment env; @Resource private DeEngineMapper deEngineMapper; - static private Datasource ds = null; + @Resource + private DatasourceService datasource; + static private Datasource ds = new Datasource(); public Boolean isLocalMode(){ @@ -29,8 +41,50 @@ public class EngineService { public Boolean isSimpleMode(){ return env.getProperty("engine_mode", "local").equalsIgnoreCase("simple"); } - public Datasource getDeEngine() throws Exception{ - if (this.ds != null) { + + public Boolean isClusterMode(){ + return env.getProperty("engine_mode", "local").equalsIgnoreCase("cluster"); + } + + public String mode(){ + return env.getProperty("engine_mode", "local"); + } + + public DeEngine info(){ + List deEngines = deEngineMapper.selectByExampleWithBLOBs(new DeEngineExample()); + if(CollectionUtils.isEmpty(deEngines)){ + return new DeEngine(); + } + return deEngines.get(0); + } + + public ResultHolder validate(DatasourceDTO datasource) throws Exception { + try { + DatasourceProvider datasourceProvider = ProviderFactory.getProvider(datasource.getType()); + DatasourceRequest datasourceRequest = new DatasourceRequest(); + datasourceRequest.setDatasource(datasource); + datasourceProvider.checkStatus(datasourceRequest); + return ResultHolder.success(datasource); + }catch (Exception e){ + return ResultHolder.error("Datasource is invalid: " + e.getMessage()); + } + } + + public ResultHolder save(DeEngine engine) throws Exception { + if(StringUtils.isEmpty(engine.getId())){ + engine.setId(UUID.randomUUID().toString()); + deEngineMapper.insert(engine); + }else { + deEngineMapper.updateByPrimaryKeyWithBLOBs(engine); + } + datasource.handleConnectionPool(this.ds, "delete"); + BeanUtils.copyBean(this.ds, engine); + datasource.handleConnectionPool(this.ds, "add"); + return ResultHolder.success(engine); + } + + public Datasource getDeEngine() throws Exception{ + if (this.ds != null || StringUtils.isNotEmpty(ds.getType())) { return this.ds; } if(isLocalMode()){ @@ -52,7 +106,7 @@ public class EngineService { this.ds = datasource; } if(isSimpleMode()){ - List deEngines = deEngineMapper.selectByExample(new DeEngineExample()); + List deEngines = deEngineMapper.selectByExampleWithBLOBs(new DeEngineExample()); if(CollectionUtils.isEmpty(deEngines)){ throw new Exception("未设置数据引擎"); } diff --git a/frontend/src/api/system/engine.js b/frontend/src/api/system/engine.js new file mode 100644 index 0000000000..a0dd8c9a4d --- /dev/null +++ b/frontend/src/api/system/engine.js @@ -0,0 +1,36 @@ +import request from '@/utils/request' +import {validateDs} from "@/api/system/datasource"; + +export function engineMode() { + return request({ + url: '/engine/mode', + method: 'get', + loading: true + }) +} + +export function engineInfo() { + return request({ + url: '/engine/info', + method: 'get', + loading: true + }) +} + +export function validate(data) { + return request({ + url: '/engine/validate', + method: 'post', + loading: true, + data + }) +} + +export function save(data) { + return request({ + url: '/engine/save', + method: 'post', + loading: true, + data + }) +} diff --git a/frontend/src/lang/en.js b/frontend/src/lang/en.js index 906eedb303..77b002b1c6 100644 --- a/frontend/src/lang/en.js +++ b/frontend/src/lang/en.js @@ -647,7 +647,8 @@ export default { port: 'Port number cannot be empty', account: 'Account cannot be empty', test_recipients: 'Test recipients', - tip: 'Tip: use as test mail recipient only' + tip: 'Tip: use as test mail recipient only', + engine_mode_setting: 'Engine Setting' }, chart: { save_snapshot: 'Save Snapshot', diff --git a/frontend/src/lang/tw.js b/frontend/src/lang/tw.js index 372a8d3ec7..1877ab88a2 100644 --- a/frontend/src/lang/tw.js +++ b/frontend/src/lang/tw.js @@ -649,7 +649,8 @@ export default { port: '端口號不能爲空', account: '賬戶不能爲空', test_recipients: '測試收件人', - tip: '提示:僅用來作爲測試郵件收件人' + tip: '提示:僅用來作爲測試郵件收件人', + engine_mode_setting: '引擎設置' }, chart: { save_snapshot: '保存縮略圖', diff --git a/frontend/src/lang/zh.js b/frontend/src/lang/zh.js index 1f446a898c..49d97b0777 100644 --- a/frontend/src/lang/zh.js +++ b/frontend/src/lang/zh.js @@ -651,7 +651,7 @@ export default { account: '账户不能为空', test_recipients: '测试收件人', tip: '提示:仅用来作为测试邮件收件人', - simple_mode_datasource: '数据设置' + engine_mode_setting: '引擎设置' }, chart: { save_snapshot: '保存缩略图', diff --git a/frontend/src/views/dataset/add/AddApi.vue b/frontend/src/views/dataset/add/AddApi.vue index f15ea8f66c..ca72c61192 100644 --- a/frontend/src/views/dataset/add/AddApi.vue +++ b/frontend/src/views/dataset/add/AddApi.vue @@ -28,7 +28,7 @@ - + @@ -66,6 +66,7 @@ diff --git a/frontend/src/views/dataset/add/AddSQL.vue b/frontend/src/views/dataset/add/AddSQL.vue index 0757a854f4..c778f7c217 100644 --- a/frontend/src/views/dataset/add/AddSQL.vue +++ b/frontend/src/views/dataset/add/AddSQL.vue @@ -35,7 +35,7 @@ + :disabled="disabledSync"/> @@ -124,6 +124,7 @@ import 'codemirror/keymap/emacs.js' import 'codemirror/addon/hint/show-hint.css' import 'codemirror/addon/hint/sql-hint' import 'codemirror/addon/hint/show-hint' +import {engineMode} from "@/api/system/engine"; export default { name: 'AddSQL', @@ -157,7 +158,9 @@ export default { syncType: 'sync_now', height: 500, kettleRunning: false, - selectedDatasource: {} + selectedDatasource: {}, + engineMode: 'local', + disabledSync: true } }, computed: { @@ -187,6 +190,9 @@ export default { }, created() { this.kettleState() + engineMode().then(res => { + this.engineMode = res.data + }) }, methods: { kettleState() { @@ -198,6 +204,11 @@ export default { for (let i = 0; i < this.options.length; i++) { if (this.options[i].id === this.dataSource) { this.selectedDatasource = this.options[i] + if (this.engineMode === 'simple' || (!this.kettleRunning || this.disabledSyncDs.indexOf(this.selectedDatasource.type) !== -1 )) { + this.disabledSync = true + } else { + this.disabledSync = false + } } } }, diff --git a/frontend/src/views/dataset/common/DatasetGroupSelectorTree.vue b/frontend/src/views/dataset/common/DatasetGroupSelectorTree.vue index c0feb93984..d3308be7b9 100644 --- a/frontend/src/views/dataset/common/DatasetGroupSelectorTree.vue +++ b/frontend/src/views/dataset/common/DatasetGroupSelectorTree.vue @@ -79,7 +79,6 @@ + + diff --git a/frontend/src/views/system/SysParam/SimplemodeDatasourceSetting.vue b/frontend/src/views/system/SysParam/SimplemodeDatasourceSetting.vue deleted file mode 100644 index 722ce2c6be..0000000000 --- a/frontend/src/views/system/SysParam/SimplemodeDatasourceSetting.vue +++ /dev/null @@ -1,239 +0,0 @@ -