From f9bb4b31421e45c01b37bd688128115b8c13f3df Mon Sep 17 00:00:00 2001 From: taojinlong Date: Mon, 24 May 2021 17:06:35 +0800 Subject: [PATCH] =?UTF-8?q?fix:=20=E6=8A=BD=E5=8F=96excel=E6=95=B0?= =?UTF-8?q?=E6=8D=AE=E6=A0=BC=E5=BC=8F=E9=97=AE=E9=A2=98=EF=BC=9B?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../datasource/service/DatasourceService.java | 27 ++++++++++- .../listener/AppStartReadHBaseListener.java | 47 ------------------- .../listener/DataSourceInitStartListener.java | 25 ++++++++++ .../service/dataset/DataSetGroupService.java | 2 + .../service/dataset/DataSetTableService.java | 2 + .../service/dataset/ExtractDataService.java | 33 +++++++++---- .../components/canvas/components/Toolbar.vue | 2 +- frontend/src/lang/en.js | 1 + frontend/src/lang/tw.js | 1 + frontend/src/lang/zh.js | 1 + 10 files changed, 84 insertions(+), 57 deletions(-) delete mode 100644 backend/src/main/java/io/dataease/listener/AppStartReadHBaseListener.java create mode 100644 backend/src/main/java/io/dataease/listener/DataSourceInitStartListener.java diff --git a/backend/src/main/java/io/dataease/datasource/service/DatasourceService.java b/backend/src/main/java/io/dataease/datasource/service/DatasourceService.java index 6966b6611c..82c6722302 100644 --- a/backend/src/main/java/io/dataease/datasource/service/DatasourceService.java +++ b/backend/src/main/java/io/dataease/datasource/service/DatasourceService.java @@ -7,6 +7,8 @@ import io.dataease.base.mapper.ext.ExtDataSourceMapper; import io.dataease.base.mapper.ext.query.GridExample; import io.dataease.commons.exception.DEException; import io.dataease.commons.utils.AuthUtils; +import io.dataease.commons.utils.CommonThreadPool; +import io.dataease.commons.utils.LogUtil; import io.dataease.controller.request.DatasourceUnionRequest; import io.dataease.controller.sys.base.BaseGridRequest; import io.dataease.controller.sys.base.ConditionEntity; @@ -40,6 +42,8 @@ public class DatasourceService { private DatasetTableMapper datasetTableMapper; @Resource private DataSetGroupService dataSetGroupService; + @Resource + private CommonThreadPool commonThreadPool; public Datasource addDatasource(Datasource datasource) { DatasourceExample example = new DatasourceExample(); @@ -114,7 +118,6 @@ public class DatasourceService { DataTableInfoDTO dataTableInfoDTO = new Gson().fromJson(datasetTable.getInfo(), DataTableInfoDTO.class); if (StringUtils.equals(name, dataTableInfoDTO.getTable())) { dbTableDTO.setEnableCheck(false); - List parents = dataSetGroupService.getParents(datasetTable.getSceneId()); StringBuilder stringBuilder = new StringBuilder(); parents.forEach(ele -> stringBuilder.append(ele.getName()).append("/")); @@ -131,4 +134,26 @@ public class DatasourceService { public Datasource get(String id) { return datasourceMapper.selectByPrimaryKey(id); } + + public void initAllDataSourceConnectionPool(){ + List datasources = datasourceMapper.selectByExampleWithBLOBs(new DatasourceExample()); + datasources.forEach(datasource -> { + try { + commonThreadPool.addTask(() ->{ + try { + System.out.println(new Gson().toJson(datasource)); + DatasourceProvider datasourceProvider = ProviderFactory.getProvider(datasource.getType()); + DatasourceRequest datasourceRequest = new DatasourceRequest(); + datasourceRequest.setDatasource(datasource); + datasourceProvider.initDataSource(datasourceRequest); + LogUtil.error("Succsss to init datasource connection pool: " + datasource.getName()); + }catch (Exception e){ + LogUtil.error("Failed to init datasource connection pool: " + datasource.getName(), e); + } + }); + }catch (Exception e){ + e.printStackTrace(); + } + }); + } } diff --git a/backend/src/main/java/io/dataease/listener/AppStartReadHBaseListener.java b/backend/src/main/java/io/dataease/listener/AppStartReadHBaseListener.java deleted file mode 100644 index 00383b7090..0000000000 --- a/backend/src/main/java/io/dataease/listener/AppStartReadHBaseListener.java +++ /dev/null @@ -1,47 +0,0 @@ -//package io.dataease.listener; -// -//import io.dataease.base.mapper.DatasetTableMapper; -//import io.dataease.commons.utils.CommonThreadPool; -//import io.dataease.service.dataset.DataSetTableFieldsService; -//import org.springframework.boot.context.event.ApplicationReadyEvent; -//import org.springframework.context.ApplicationListener; -//import org.springframework.core.annotation.Order; -//import org.springframework.core.env.Environment; -//import org.springframework.stereotype.Component; -// -//import javax.annotation.Resource; -// -//@Component -//@Order(value = 2) -//public class AppStartReadHBaseListener implements ApplicationListener { -// @Resource -// private CommonThreadPool commonThreadPool; -//// @Resource -//// private SparkCalc sparkCalc; -// @Resource -// private Environment env; // 保存了配置文件的信息 -// -// @Resource -// private DatasetTableMapper datasetTableMapper; -// @Resource -// private DataSetTableFieldsService dataSetTableFieldsService; -// -// @Override -// public void onApplicationEvent(ApplicationReadyEvent applicationReadyEvent) { -//// System.out.println("================= Read HBase start ================="); -//// // 项目启动,从数据集中找到定时抽取的表,从HBase中读取放入缓存 -//// DatasetTableExample datasetTableExample = new DatasetTableExample(); -//// datasetTableExample.createCriteria().andModeEqualTo(1); -//// List datasetTables = datasetTableMapper.selectByExampleWithBLOBs(datasetTableExample); -//// for (DatasetTable table : datasetTables) { -////// commonThreadPool.addTask(() -> { -//// try { -//// List fields = dataSetTableFieldsService.getFieldsByTableId(table.getId()); -//// sparkCalc.getHBaseDataAndCache(table.getId(), fields); -//// } catch (Exception e) { -//// e.printStackTrace(); -//// } -////// }); -//// } -// } -//} diff --git a/backend/src/main/java/io/dataease/listener/DataSourceInitStartListener.java b/backend/src/main/java/io/dataease/listener/DataSourceInitStartListener.java new file mode 100644 index 0000000000..3ceeb18071 --- /dev/null +++ b/backend/src/main/java/io/dataease/listener/DataSourceInitStartListener.java @@ -0,0 +1,25 @@ +package io.dataease.listener; + +import io.dataease.base.domain.DatasetTableTask; +import io.dataease.datasource.service.DatasourceService; +import io.dataease.service.ScheduleService; +import io.dataease.service.dataset.DataSetTableTaskService; +import org.springframework.boot.context.event.ApplicationReadyEvent; +import org.springframework.context.ApplicationListener; +import org.springframework.core.annotation.Order; +import org.springframework.stereotype.Component; + +import javax.annotation.Resource; +import java.util.List; + +@Component +@Order(value = 1) +public class DataSourceInitStartListener implements ApplicationListener { + @Resource + private DatasourceService datasourceService; + + @Override + public void onApplicationEvent(ApplicationReadyEvent applicationReadyEvent) { + datasourceService.initAllDataSourceConnectionPool(); + } +} diff --git a/backend/src/main/java/io/dataease/service/dataset/DataSetGroupService.java b/backend/src/main/java/io/dataease/service/dataset/DataSetGroupService.java index 153f68efba..92a88605dd 100644 --- a/backend/src/main/java/io/dataease/service/dataset/DataSetGroupService.java +++ b/backend/src/main/java/io/dataease/service/dataset/DataSetGroupService.java @@ -14,6 +14,7 @@ import io.dataease.dto.dataset.DataSetTableDTO; import io.dataease.i18n.Translator; import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang3.StringUtils; +import org.springframework.context.annotation.Lazy; import org.springframework.stereotype.Service; import javax.annotation.Resource; @@ -32,6 +33,7 @@ public class DataSetGroupService { @Resource private DatasetGroupMapper datasetGroupMapper; @Resource + @Lazy private DataSetTableService dataSetTableService; @Resource private ExtDataSetGroupMapper extDataSetGroupMapper; diff --git a/backend/src/main/java/io/dataease/service/dataset/DataSetTableService.java b/backend/src/main/java/io/dataease/service/dataset/DataSetTableService.java index 2aafeb24a1..efe1615205 100644 --- a/backend/src/main/java/io/dataease/service/dataset/DataSetTableService.java +++ b/backend/src/main/java/io/dataease/service/dataset/DataSetTableService.java @@ -35,6 +35,7 @@ import org.apache.poi.xssf.usermodel.XSSFSheet; import org.apache.poi.xssf.usermodel.XSSFWorkbook; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; import org.springframework.web.multipart.MultipartFile; import javax.annotation.Resource; @@ -50,6 +51,7 @@ import java.util.stream.Collectors; * @Date 2021/2/23 2:54 下午 */ @Service +@Transactional(rollbackFor = Exception.class) public class DataSetTableService { @Resource private DatasetTableMapper datasetTableMapper; diff --git a/backend/src/main/java/io/dataease/service/dataset/ExtractDataService.java b/backend/src/main/java/io/dataease/service/dataset/ExtractDataService.java index 36d1c6220e..fde4cf6ff5 100644 --- a/backend/src/main/java/io/dataease/service/dataset/ExtractDataService.java +++ b/backend/src/main/java/io/dataease/service/dataset/ExtractDataService.java @@ -63,6 +63,7 @@ import org.pentaho.di.trans.steps.userdefinedjavaclass.UserDefinedJavaClassDef; import org.pentaho.di.trans.steps.userdefinedjavaclass.UserDefinedJavaClassMeta; import org.pentaho.di.www.SlaveServerJobStatus; import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Lazy; import org.springframework.stereotype.Service; import javax.annotation.Resource; @@ -82,6 +83,7 @@ import java.util.stream.Collectors; public class ExtractDataService { @Resource + @Lazy private DataSetTableService dataSetTableService; @Resource private DataSetTableFieldsService dataSetTableFieldsService; @@ -91,7 +93,6 @@ public class ExtractDataService { private DataSetTableTaskService dataSetTableTaskService; @Resource private DatasourceMapper datasourceMapper; - private static ExecutorService pool = Executors.newScheduledThreadPool(50); //设置连接池 private static String lastUpdateTime = "${__last_update_time__}"; private static String currentUpdateTime = "${__current_update_time__}"; @@ -113,7 +114,6 @@ public class ExtractDataService { "UNIQUE KEY(dataease_uuid)\n" + "DISTRIBUTED BY HASH(dataease_uuid) BUCKETS 10\n" + "PROPERTIES(\"replication_num\" = \"1\");"; - private static String shellScript = "curl --location-trusted -u %s:%s -H \"label:%s\" -H \"column_separator:%s\" -H \"columns:%s\" -H \"merge_type: %s\" -T %s -XPUT http://%s:%s/api/%s/%s/_stream_load\n" + "rm -rf %s\n"; @@ -448,6 +448,7 @@ public class ExtractDataService { selectSQL = qp.createQuerySQL(tableName, datasetTableFields); } inputStep = inputStep(transMeta, selectSQL); + udjcStep = udjc(datasetTableFields, false); break; case sqlServer: SqlServerConfigration sqlServerConfigration = new Gson().fromJson(datasource.getConfiguration(), SqlServerConfigration.class); @@ -459,10 +460,12 @@ public class ExtractDataService { selectSQL = qp.createQuerySQL(tableName, datasetTableFields); } inputStep = inputStep(transMeta, selectSQL); + udjcStep = udjc(datasetTableFields, false); break; case excel: String filePath = new Gson().fromJson(datasetTable.getInfo(), DataTableInfoDTO.class).getData(); inputStep = excelInputStep(filePath, datasetTableFields); + udjcStep = udjc(datasetTableFields, true); default: break; } @@ -487,7 +490,7 @@ public class ExtractDataService { break; } - udjcStep = udjc(datasetTableFields); + outputStep = outputStep(dorisOutputTable); hi1 = new TransHopMeta(inputStep, udjcStep); hi2 = new TransHopMeta(udjcStep, outputStep); @@ -569,11 +572,11 @@ public class ExtractDataService { return outputStep; } - private StepMeta udjc(List datasetTableFields) { - String needToChangeolumnType = ""; + private StepMeta udjc(List datasetTableFields, boolean isExcel) { + String needToChangeColumnType = ""; for (DatasetTableField datasetTableField : datasetTableFields) { if (datasetTableField.getDeExtractType() != null && datasetTableField.getDeExtractType() == 4) { - needToChangeolumnType = needToChangeolumnType + alterColumnTypeCode.replace("FILED", datasetTableField.getOriginName()); + needToChangeColumnType = needToChangeColumnType + alterColumnTypeCode.replace("FILED", datasetTableField.getOriginName()); } } @@ -583,8 +586,13 @@ public class ExtractDataService { fields.add(fieldInfo); userDefinedJavaClassMeta.setFieldInfo(fields); List definitions = new ArrayList(); - UserDefinedJavaClassDef userDefinedJavaClassDef = new UserDefinedJavaClassDef(UserDefinedJavaClassDef.ClassType.TRANSFORM_CLASS, "Processor", - code.replace("alterColumnTypeCode", needToChangeolumnType).replace("Column_Fields", String.join(",", datasetTableFields.stream().map(DatasetTableField::getOriginName).collect(Collectors.toList())))); + String tmp_code = code.replace("alterColumnTypeCode", needToChangeColumnType).replace("Column_Fields", String.join(",", datasetTableFields.stream().map(DatasetTableField::getOriginName).collect(Collectors.toList()))); + if(isExcel){ + tmp_code = tmp_code.replace("handleExcelIntColumn", handleExcelIntColumn); + }else { + tmp_code = tmp_code.replace("handleExcelIntColumn", ""); + } + UserDefinedJavaClassDef userDefinedJavaClassDef = new UserDefinedJavaClassDef(UserDefinedJavaClassDef.ClassType.TRANSFORM_CLASS, "Processor", tmp_code); userDefinedJavaClassDef.setActive(true); definitions.add(userDefinedJavaClassDef); @@ -629,6 +637,14 @@ public class ExtractDataService { " }\n" + " }\n"; + private static String handleExcelIntColumn = " \t\tif(tmp != null && tmp.endsWith(\".0\")){\n" + + " try {\n" + + " Integer.valueOf(tmp.substring(0, tmp.length()-2));\n" + + " get(Fields.Out, filed).setValue(r, tmp.substring(0, tmp.length()-2));\n" + + " get(Fields.Out, filed).getValueMeta().setType(2);\n" + + " }catch (Exception e){}\n" + + " }"; + private static String code = "import org.pentaho.di.core.row.ValueMetaInterface;\n" + "import java.util.List;\n" + "import java.io.File;\n" + @@ -659,6 +675,7 @@ public class ExtractDataService { " for (String filed : fileds) {\n" + " String tmp = get(Fields.In, filed).getString(r);\n" + "alterColumnTypeCode \n" + + "handleExcelIntColumn \n" + " str = str + tmp;\n" + " }\n" + "\n" + diff --git a/frontend/src/components/canvas/components/Toolbar.vue b/frontend/src/components/canvas/components/Toolbar.vue index 48aa7af8e0..397f850eb6 100644 --- a/frontend/src/components/canvas/components/Toolbar.vue +++ b/frontend/src/components/canvas/components/Toolbar.vue @@ -160,7 +160,7 @@ export default { handleFileChange(e) { const file = e.target.files[0] if (!file.type.includes('image')) { - toast('只能插入图片') + toast(this.$t('panel.picture_limit')) return } diff --git a/frontend/src/lang/en.js b/frontend/src/lang/en.js index 118d1fa692..74e26d4d85 100644 --- a/frontend/src/lang/en.js +++ b/frontend/src/lang/en.js @@ -815,6 +815,7 @@ export default { input_limit_0_50: '0-50 chars' }, panel: { + picture_limit: 'Only pictures can be inserted', drag_here: 'Please drag the left field here', copy_link_passwd: 'Copy link and password', copy_link: 'Copy link', diff --git a/frontend/src/lang/tw.js b/frontend/src/lang/tw.js index bf73985ab4..8abe13ebda 100644 --- a/frontend/src/lang/tw.js +++ b/frontend/src/lang/tw.js @@ -814,6 +814,7 @@ export default { input_limit_0_50: '0-50字符' }, panel: { + picture_limit: '只能插入圖片', drag_here: '請將左側字段拖至此處', copy_link_passwd: '複製鏈接及密碼', copy_link: '複製鏈接', diff --git a/frontend/src/lang/zh.js b/frontend/src/lang/zh.js index a12f580cee..f1d074c1da 100644 --- a/frontend/src/lang/zh.js +++ b/frontend/src/lang/zh.js @@ -816,6 +816,7 @@ export default { input_limit_0_50: '0-50字符' }, panel: { + picture_limit: '只能插入图片', drag_here: '请将左侧字段拖至此处', copy_link_passwd: '复制链接及密码', copy_link: '复制链接',