Merge pull request #1924 from dataease/pr@dev@impala
Merge branch 'dev' into pr@dev@impala
This commit is contained in:
commit
19107a7405
@ -4,6 +4,7 @@ public enum DatasourceTypes {
|
||||
excel("excel", "excel", "", "", "", "", ""),
|
||||
mysql("mysql", "mysql", "com.mysql.jdbc.Driver", "`", "`", "'", "'"),
|
||||
hive("hive", "hive", "org.apache.hive.jdbc.HiveDriver", "`", "`", "'", "'"),
|
||||
impala("impala", "impala", "org.apache.hive.jdbc.HiveDriver", "`", "`", "'", "'"),
|
||||
mariadb("mariadb", "mariadb", "com.mysql.jdbc.Driver", "`", "`", "'", "'"),
|
||||
ds_doris("ds_doris", "ds_doris", "com.mysql.jdbc.Driver", "`", "`", "'", "'"),
|
||||
pg("pg", "pg", "org.postgresql.Driver", "\"", "\"", "\"", "\""),
|
||||
|
||||
@ -0,0 +1,28 @@
|
||||
package io.dataease.dto.datasource;
|
||||
|
||||
import lombok.Getter;
|
||||
import lombok.Setter;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
||||
@Getter
|
||||
@Setter
|
||||
public class ImpalaConfiguration extends JdbcConfiguration {
|
||||
|
||||
private String driver = "com.cloudera.impala.jdbc.Driver";
|
||||
private String extraParams = "";
|
||||
|
||||
public String getJdbc() {
|
||||
if(StringUtils.isEmpty(extraParams.trim())){
|
||||
return "jdbc:impala://HOSTNAME:PORT/DATABASE"
|
||||
.replace("HOSTNAME", getHost().trim())
|
||||
.replace("PORT", getPort().toString().trim())
|
||||
.replace("DATABASE", getDataBase().trim());
|
||||
}else {
|
||||
return "jdbc:impala://HOSTNAME:PORT/DATABASE;EXTRA_PARAMS"
|
||||
.replace("HOSTNAME", getHost().trim())
|
||||
.replace("PORT", getPort().toString().trim())
|
||||
.replace("DATABASE", getDataBase().trim())
|
||||
.replace("EXTRA_PARAMS", getExtraParams().trim());
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -54,6 +54,8 @@ public class ProviderFactory implements ApplicationContextAware {
|
||||
return context.getBean("redshiftQuery", QueryProvider.class);
|
||||
case hive:
|
||||
return context.getBean("hiveQuery", QueryProvider.class);
|
||||
case impala:
|
||||
return context.getBean("impalaQuery", QueryProvider.class);
|
||||
case db2:
|
||||
return context.getBean("db2Query", QueryProvider.class);
|
||||
case api:
|
||||
|
||||
@ -154,7 +154,7 @@ public class JdbcProvider extends DatasourceProvider {
|
||||
while (resultSet.next()) {
|
||||
String tableName = resultSet.getString("TABLE_NAME");
|
||||
String database;
|
||||
if (datasourceRequest.getDatasource().getType().equalsIgnoreCase(DatasourceTypes.ck.name())) {
|
||||
if (datasourceRequest.getDatasource().getType().equalsIgnoreCase(DatasourceTypes.ck.name()) || datasourceRequest.getDatasource().getType().equalsIgnoreCase(DatasourceTypes.impala.name())) {
|
||||
database = resultSet.getString("TABLE_SCHEM");
|
||||
} else {
|
||||
database = resultSet.getString("TABLE_CAT");
|
||||
@ -485,6 +485,14 @@ public class JdbcProvider extends DatasourceProvider {
|
||||
driver = hiveConfiguration.getDriver();
|
||||
jdbcurl = hiveConfiguration.getJdbc();
|
||||
break;
|
||||
case impala:
|
||||
ImpalaConfiguration impalaConfiguration = new Gson().fromJson(datasourceRequest.getDatasource().getConfiguration(), ImpalaConfiguration.class);
|
||||
System.out.println(new Gson().toJson(impalaConfiguration));
|
||||
username = impalaConfiguration.getUsername();
|
||||
password = impalaConfiguration.getPassword();
|
||||
driver = impalaConfiguration.getDriver();
|
||||
jdbcurl = impalaConfiguration.getJdbc();
|
||||
break;
|
||||
case db2:
|
||||
Db2Configuration db2Configuration = new Gson().fromJson(datasourceRequest.getDatasource().getConfiguration(), Db2Configuration.class);
|
||||
username = db2Configuration.getUsername();
|
||||
@ -586,6 +594,13 @@ public class JdbcProvider extends DatasourceProvider {
|
||||
dataSource.setUrl(hiveConfiguration.getJdbc());
|
||||
jdbcConfiguration = hiveConfiguration;
|
||||
break;
|
||||
case impala:
|
||||
ImpalaConfiguration impalaConfiguration = new Gson().fromJson(datasourceRequest.getDatasource().getConfiguration(), ImpalaConfiguration.class);
|
||||
dataSource.setPassword(impalaConfiguration.getPassword());
|
||||
dataSource.setDriverClassName(impalaConfiguration.getDriver());
|
||||
dataSource.setUrl(impalaConfiguration.getJdbc());
|
||||
jdbcConfiguration = impalaConfiguration;
|
||||
break;
|
||||
case db2:
|
||||
Db2Configuration db2Configuration = new Gson().fromJson(datasourceRequest.getDatasource().getConfiguration(), Db2Configuration.class);
|
||||
dataSource.setPassword(db2Configuration.getPassword());
|
||||
@ -614,6 +629,7 @@ public class JdbcProvider extends DatasourceProvider {
|
||||
case engine_doris:
|
||||
case ds_doris:
|
||||
case hive:
|
||||
case impala:
|
||||
return "show tables";
|
||||
case sqlServer:
|
||||
SqlServerConfiguration sqlServerConfiguration = new Gson().fromJson(datasourceRequest.getDatasource().getConfiguration(), SqlServerConfiguration.class);
|
||||
|
||||
@ -2,16 +2,16 @@ package io.dataease.provider.query.hive;
|
||||
|
||||
import io.dataease.provider.SQLConstants;
|
||||
|
||||
import static io.dataease.commons.constants.DatasourceTypes.mysql;
|
||||
import static io.dataease.commons.constants.DatasourceTypes.hive;
|
||||
|
||||
/**
|
||||
* @Author gin
|
||||
* @Date 2021/7/8 7:22 下午
|
||||
*/
|
||||
public class HiveConstants extends SQLConstants {
|
||||
public static final String KEYWORD_TABLE = mysql.getKeywordPrefix() + "%s" + mysql.getKeywordSuffix();
|
||||
public static final String KEYWORD_TABLE = hive.getKeywordPrefix() + "%s" + hive.getKeywordSuffix();
|
||||
|
||||
public static final String KEYWORD_FIX = "%s." + mysql.getKeywordPrefix() + "%s" + mysql.getKeywordSuffix();
|
||||
public static final String KEYWORD_FIX = "%s." + hive.getKeywordPrefix() + "%s" + hive.getKeywordSuffix();
|
||||
|
||||
public static final String UNIX_TIMESTAMP = "unix_timestamp(%s)";
|
||||
|
||||
|
||||
@ -0,0 +1,39 @@
|
||||
package io.dataease.provider.query.impala;
|
||||
|
||||
import io.dataease.provider.SQLConstants;
|
||||
|
||||
import static io.dataease.commons.constants.DatasourceTypes.mysql;
|
||||
|
||||
public class ImpalaConstants extends SQLConstants {
|
||||
public static final String KEYWORD_TABLE = mysql.getKeywordPrefix() + "%s" + mysql.getKeywordSuffix();
|
||||
|
||||
public static final String KEYWORD_FIX = "%s." + mysql.getKeywordPrefix() + "%s" + mysql.getKeywordSuffix();
|
||||
|
||||
public static final String UNIX_TIMESTAMP = "unix_timestamp(%s)";
|
||||
|
||||
public static final String DATE_FORMAT = "from_unixtime(UNIX_TIMESTAMP(%s), '%s')";
|
||||
|
||||
public static final String FROM_UNIXTIME = "FROM_UNIXTIME(%s,'%s')";
|
||||
|
||||
public static final String STR_TO_DATE = "STR_TO_DATE(%s,'%s')";
|
||||
|
||||
public static final String CAST = "CAST(%s AS %s)";
|
||||
|
||||
public static final String DEFAULT_DATE_FORMAT = "yyyy-MM-dd HH:mm:ss";
|
||||
|
||||
public static final String DEFAULT_INT_FORMAT = "BIGINT";
|
||||
|
||||
public static final String DEFAULT_FLOAT_FORMAT = "DECIMAL(20,2)";
|
||||
|
||||
public static final String WHERE_VALUE_NULL = "(NULL,'')";
|
||||
|
||||
public static final String WHERE_VALUE_VALUE = "'%s'";
|
||||
|
||||
public static final String AGG_COUNT = "COUNT(*)";
|
||||
|
||||
public static final String AGG_FIELD = "%s(%s)";
|
||||
|
||||
public static final String WHERE_BETWEEN = "'%s' AND '%s'";
|
||||
|
||||
public static final String BRACKETS = "(%s)";
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@ -292,25 +292,6 @@ public class ChartViewService {
|
||||
|
||||
}
|
||||
|
||||
// private void checkPermissions(List<? extends ChartViewFieldBaseDTO> chartViewFieldDTOS, List<DatasetTableField> fields, List<String> desensitizationList, Boolean alowDesensitization) throws Exception {
|
||||
// String filedName = "";
|
||||
// for (ChartViewFieldBaseDTO chartViewFieldDTO : chartViewFieldDTOS) {
|
||||
// if (alowDesensitization) {
|
||||
// if (!fields.stream().map(DatasetTableField::getDataeaseName).collect(Collectors.toList()).contains(chartViewFieldDTO.getDataeaseName())) {
|
||||
// filedName = filedName + chartViewFieldDTO.getName() + " ,";
|
||||
// }
|
||||
// } else {
|
||||
// if (desensitizationList.contains(chartViewFieldDTO.getDataeaseName()) || !fields.stream().map(DatasetTableField::getDataeaseName).collect(Collectors.toList()).contains(chartViewFieldDTO.getDataeaseName())) {
|
||||
// filedName = filedName + chartViewFieldDTO.getName() + " ,";
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// filedName = filedName.endsWith(",") ? filedName.substring(0, filedName.length() - 1) : filedName;
|
||||
// if (StringUtils.isNotEmpty(filedName)) {
|
||||
// throw new Exception("以下字段没有权限: " + filedName);
|
||||
// }
|
||||
// }
|
||||
|
||||
public ChartViewDTO calcData(ChartViewDTO view, ChartExtRequest requestList, boolean cache) throws Exception {
|
||||
if (ObjectUtils.isEmpty(view)) {
|
||||
throw new RuntimeException(Translator.get("i18n_chart_delete"));
|
||||
|
||||
@ -101,6 +101,7 @@ public class DataSetTableService {
|
||||
@Resource
|
||||
private EngineService engineService;
|
||||
|
||||
private static boolean isUpdatingDatasetTableStatus = false;
|
||||
private static final String lastUpdateTime = "${__last_update_time__}";
|
||||
private static final String currentUpdateTime = "${__current_update_time__}";
|
||||
|
||||
@ -2182,15 +2183,30 @@ public class DataSetTableService {
|
||||
private UtilMapper utilMapper;
|
||||
|
||||
public void updateDatasetTableStatus() {
|
||||
if(this.isUpdatingDatasetTableStatus){
|
||||
return;
|
||||
}else {
|
||||
this.isUpdatingDatasetTableStatus = true;
|
||||
}
|
||||
|
||||
try {
|
||||
doUpdate();
|
||||
}catch (Exception e){}
|
||||
finally {
|
||||
this.isUpdatingDatasetTableStatus = false;
|
||||
}
|
||||
}
|
||||
|
||||
private void doUpdate(){
|
||||
List<QrtzSchedulerState> qrtzSchedulerStates = qrtzSchedulerStateMapper.selectByExample(null);
|
||||
List<String> activeQrtzInstances = qrtzSchedulerStates.stream()
|
||||
.filter(qrtzSchedulerState -> qrtzSchedulerState.getLastCheckinTime()
|
||||
+ qrtzSchedulerState.getCheckinInterval() + 1000 > utilMapper.currentTimestamp())
|
||||
.map(QrtzSchedulerStateKey::getInstanceName).collect(Collectors.toList());
|
||||
List<DatasetTable> jobStoppeddDatasetTables = new ArrayList<>();
|
||||
|
||||
DatasetTableExample example = new DatasetTableExample();
|
||||
example.createCriteria().andSyncStatusEqualTo(JobStatus.Underway.name());
|
||||
|
||||
datasetTableMapper.selectByExample(example).forEach(datasetTable -> {
|
||||
if (StringUtils.isEmpty(datasetTable.getQrtzInstance()) || !activeQrtzInstances.contains(
|
||||
datasetTable.getQrtzInstance().substring(0, datasetTable.getQrtzInstance().length() - 13))) {
|
||||
@ -2202,6 +2218,7 @@ public class DataSetTableService {
|
||||
return;
|
||||
}
|
||||
|
||||
//DatasetTable
|
||||
DatasetTable record = new DatasetTable();
|
||||
record.setSyncStatus(JobStatus.Error.name());
|
||||
example.clear();
|
||||
@ -2209,6 +2226,14 @@ public class DataSetTableService {
|
||||
.andIdIn(jobStoppeddDatasetTables.stream().map(DatasetTable::getId).collect(Collectors.toList()));
|
||||
datasetTableMapper.updateByExampleSelective(record, example);
|
||||
|
||||
//Task
|
||||
DatasetTableTaskExample datasetTableTaskExample = new DatasetTableTaskExample();
|
||||
DatasetTableTaskExample.Criteria criteria = datasetTableTaskExample.createCriteria();
|
||||
criteria.andTableIdIn(jobStoppeddDatasetTables.stream().map(DatasetTable::getId).collect(Collectors.toList())).andStatusEqualTo(JobStatus.Underway.name());
|
||||
List<DatasetTableTask> datasetTableTasks = dataSetTableTaskService.list(datasetTableTaskExample);
|
||||
dataSetTableTaskService.updateTaskStatus(datasetTableTasks, JobStatus.Error);
|
||||
|
||||
//TaskLog
|
||||
DatasetTableTaskLog datasetTableTaskLog = new DatasetTableTaskLog();
|
||||
datasetTableTaskLog.setStatus(JobStatus.Error.name());
|
||||
datasetTableTaskLog.setInfo("Job stopped due to system error.");
|
||||
@ -2216,19 +2241,14 @@ public class DataSetTableService {
|
||||
DatasetTableTaskLogExample datasetTableTaskLogExample = new DatasetTableTaskLogExample();
|
||||
datasetTableTaskLogExample.createCriteria().andStatusEqualTo(JobStatus.Underway.name())
|
||||
.andTableIdIn(jobStoppeddDatasetTables.stream().map(DatasetTable::getId).collect(Collectors.toList()));
|
||||
List<String> taskIds = datasetTableTaskLogMapper.selectByExample(datasetTableTaskLogExample).stream()
|
||||
.map(DatasetTableTaskLog::getTaskId).collect(Collectors.toList());
|
||||
datasetTableTaskLogMapper.updateByExampleSelective(datasetTableTaskLog, datasetTableTaskLogExample);
|
||||
|
||||
dataSetTableTaskService.updateTaskStatus(taskIds, JobStatus.Error);
|
||||
|
||||
for (DatasetTable jobStoppeddDatasetTable : jobStoppeddDatasetTables) {
|
||||
extractDataService.deleteFile("all_scope", jobStoppeddDatasetTable.getId());
|
||||
extractDataService.deleteFile("incremental_add", jobStoppeddDatasetTable.getId());
|
||||
extractDataService.deleteFile("incremental_delete", jobStoppeddDatasetTable.getId());
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* 判断数组中是否有重复的值
|
||||
*/
|
||||
|
||||
@ -161,13 +161,13 @@ public class DataSetTableTaskService {
|
||||
return datasetTableTaskMapper.selectByPrimaryKey(id);
|
||||
}
|
||||
|
||||
public void updateTaskStatus(List<String> taskIds, JobStatus lastExecStatus) {
|
||||
if (CollectionUtils.isEmpty(taskIds)) {
|
||||
return;
|
||||
}
|
||||
DatasetTableTaskExample example = new DatasetTableTaskExample();
|
||||
example.createCriteria().andIdIn(taskIds);
|
||||
List<DatasetTableTask> datasetTableTasks = datasetTableTaskMapper.selectByExample(example);
|
||||
|
||||
public List<DatasetTableTask> list(DatasetTableTaskExample example) {
|
||||
return datasetTableTaskMapper.selectByExample(example);
|
||||
}
|
||||
|
||||
|
||||
public void updateTaskStatus(List<DatasetTableTask> datasetTableTasks, JobStatus lastExecStatus) {
|
||||
for (DatasetTableTask tableTask : datasetTableTasks) {
|
||||
updateTaskStatus(tableTask, lastExecStatus);
|
||||
}
|
||||
@ -202,7 +202,7 @@ public class DataSetTableTaskService {
|
||||
if (datasetTableTask.getRate().equalsIgnoreCase(ScheduleType.SIMPLE.name())) {
|
||||
datasetTableTask.setStatus(TaskStatus.Stopped.name());
|
||||
} else {
|
||||
datasetTableTask = datasetTableTaskMapper.selectByPrimaryKey(datasetTableTask.getId());
|
||||
// datasetTableTask = datasetTableTaskMapper.selectByPrimaryKey(datasetTableTask.getId());
|
||||
datasetTableTask.setLastExecStatus(lastExecStatus.name());
|
||||
if (StringUtils.isNotEmpty(datasetTableTask.getEnd()) && datasetTableTask.getEnd().equalsIgnoreCase("1")) {
|
||||
BaseGridRequest request = new BaseGridRequest();
|
||||
|
||||
@ -140,7 +140,7 @@ public class ExtractDataService {
|
||||
datasetTableTaskLog.setTableId(datasetTable.getId());
|
||||
datasetTableTaskLog.setStatus(JobStatus.Underway.name());
|
||||
List<DatasetTableTaskLog> datasetTableTaskLogs = dataSetTableTaskLogService.select(datasetTableTaskLog);
|
||||
return !CollectionUtils.isNotEmpty(datasetTableTaskLogs) || !datasetTableTaskLogs.get(0).getTriggerType().equalsIgnoreCase(TriggerType.Custom.name());
|
||||
return CollectionUtils.isEmpty(datasetTableTaskLogs) || !datasetTableTaskLogs.get(0).getTriggerType().equalsIgnoreCase(TriggerType.Custom.name());
|
||||
} else {
|
||||
datasetTableTask.setLastExecTime(startTime);
|
||||
datasetTableTask.setLastExecStatus(JobStatus.Underway.name());
|
||||
@ -159,7 +159,7 @@ public class ExtractDataService {
|
||||
return;
|
||||
}
|
||||
UpdateType updateType = UpdateType.valueOf(type);
|
||||
DatasetTableTaskLog datasetTableTaskLog;
|
||||
|
||||
if (datasetTableFields == null) {
|
||||
datasetTableFields = dataSetTableFieldsService.list(DatasetTableField.builder().tableId(datasetTable.getId()).build());
|
||||
}
|
||||
@ -174,10 +174,10 @@ public class ExtractDataService {
|
||||
return o1.getColumnIndex().compareTo(o2.getColumnIndex());
|
||||
});
|
||||
|
||||
DatasetTableTaskLog datasetTableTaskLog = writeDatasetTableTaskLog(datasetTableId, ops);
|
||||
switch (updateType) {
|
||||
case all_scope: // 全量更新
|
||||
try {
|
||||
datasetTableTaskLog = writeDatasetTableTaskLog(datasetTableId, ops);
|
||||
createEngineTable(TableUtils.tableName(datasetTableId), datasetTableFields);
|
||||
createEngineTable(TableUtils.tmpName(TableUtils.tableName(datasetTableId)), datasetTableFields);
|
||||
Long execTime = System.currentTimeMillis();
|
||||
@ -222,7 +222,7 @@ public class ExtractDataService {
|
||||
toDelete.forEach(datasetTableField -> dataSetTableFieldsService.delete(datasetTableField.getId()));
|
||||
}
|
||||
} catch (Exception e) {
|
||||
saveErrorLog(datasetTableId, null, e);
|
||||
saveErrorLog(datasetTableTaskLog, e);
|
||||
updateTableStatus(datasetTableId, datasetTable, JobStatus.Error, null);
|
||||
dropDorisTable(TableUtils.tmpName(TableUtils.tableName(datasetTableId)));
|
||||
} finally {
|
||||
@ -233,7 +233,6 @@ public class ExtractDataService {
|
||||
|
||||
case add_scope: // 增量更新
|
||||
try {
|
||||
datasetTableTaskLog = writeDatasetTableTaskLog(datasetTableId, ops);
|
||||
Long execTime = System.currentTimeMillis();
|
||||
if (!engineService.isSimpleMode()) {
|
||||
generateTransFile("incremental_add", datasetTable, datasource, datasetTableFields, null);
|
||||
@ -245,7 +244,7 @@ public class ExtractDataService {
|
||||
saveSuccessLog(datasetTableTaskLog);
|
||||
updateTableStatus(datasetTableId, datasetTable, JobStatus.Completed, execTime);
|
||||
} catch (Exception e) {
|
||||
saveErrorLog(datasetTableId, null, e);
|
||||
saveErrorLog(datasetTableTaskLog, e);
|
||||
updateTableStatus(datasetTableId, datasetTable, JobStatus.Error, null);
|
||||
} finally {
|
||||
deleteFile("incremental_add", datasetTableId);
|
||||
@ -312,7 +311,7 @@ public class ExtractDataService {
|
||||
msg = true;
|
||||
lastExecStatus = JobStatus.Completed;
|
||||
} catch (Exception e) {
|
||||
saveErrorLog(datasetTableId, taskId, e);
|
||||
saveErrorLog(datasetTableTaskLog, e);
|
||||
msg = false;
|
||||
lastExecStatus = JobStatus.Error;
|
||||
execTime = null;
|
||||
@ -372,7 +371,7 @@ public class ExtractDataService {
|
||||
msg = true;
|
||||
lastExecStatus = JobStatus.Completed;
|
||||
} catch (Exception e) {
|
||||
saveErrorLog(datasetTableId, taskId, e);
|
||||
saveErrorLog(datasetTableTaskLog, e);
|
||||
msg = false;
|
||||
lastExecStatus = JobStatus.Error;
|
||||
execTime = null;
|
||||
@ -592,26 +591,14 @@ public class ExtractDataService {
|
||||
dataSetTableTaskLogService.save(datasetTableTaskLog);
|
||||
}
|
||||
|
||||
private void saveErrorLog(String datasetTableId, String taskId, Exception e) {
|
||||
LogUtil.error("Extract data error: " + datasetTableId, e);
|
||||
DatasetTableTaskLog datasetTableTaskLog = new DatasetTableTaskLog();
|
||||
datasetTableTaskLog.setTableId(datasetTableId);
|
||||
datasetTableTaskLog.setStatus(JobStatus.Underway.name());
|
||||
if (StringUtils.isNotEmpty(taskId)) {
|
||||
datasetTableTaskLog.setTaskId(taskId);
|
||||
}
|
||||
List<DatasetTableTaskLog> datasetTableTaskLogs = dataSetTableTaskLogService.select(datasetTableTaskLog);
|
||||
if (CollectionUtils.isNotEmpty(datasetTableTaskLogs)) {
|
||||
datasetTableTaskLog = datasetTableTaskLogs.get(0);
|
||||
datasetTableTaskLog.setStatus(JobStatus.Error.name());
|
||||
datasetTableTaskLog.setInfo(e.getMessage());
|
||||
datasetTableTaskLog.setEndTime(System.currentTimeMillis());
|
||||
dataSetTableTaskLogService.save(datasetTableTaskLog);
|
||||
}
|
||||
|
||||
private void saveErrorLog(DatasetTableTaskLog datasetTableTaskLog, Exception e) {
|
||||
LogUtil.error("Extract data error: " + datasetTableTaskLog.getTaskId(), e);
|
||||
datasetTableTaskLog.setStatus(JobStatus.Error.name());
|
||||
datasetTableTaskLog.setInfo(e.getMessage());
|
||||
datasetTableTaskLog.setEndTime(System.currentTimeMillis());
|
||||
dataSetTableTaskLogService.save(datasetTableTaskLog);
|
||||
}
|
||||
|
||||
|
||||
private void createEngineTable(String tableName, List<DatasetTableField> datasetTableFields) throws Exception {
|
||||
Datasource engine = engineService.getDeEngine();
|
||||
JdbcProvider jdbcProvider = CommonBeanFactory.getBean(JdbcProvider.class);
|
||||
@ -671,14 +658,9 @@ public class ExtractDataService {
|
||||
datasetTableTaskLog.setTaskId(taskId);
|
||||
datasetTableTaskLog.setStatus(JobStatus.Underway.name());
|
||||
datasetTableTaskLog.setTriggerType(TriggerType.Cron.name());
|
||||
List<DatasetTableTaskLog> datasetTableTaskLogs = dataSetTableTaskLogService.select(datasetTableTaskLog);
|
||||
if (CollectionUtils.isEmpty(datasetTableTaskLogs)) {
|
||||
datasetTableTaskLog.setStartTime(System.currentTimeMillis());
|
||||
dataSetTableTaskLogService.save(datasetTableTaskLog);
|
||||
return datasetTableTaskLog;
|
||||
} else {
|
||||
return datasetTableTaskLogs.get(0);
|
||||
}
|
||||
datasetTableTaskLog.setStartTime(System.currentTimeMillis());
|
||||
dataSetTableTaskLogService.save(datasetTableTaskLog);
|
||||
return datasetTableTaskLog;
|
||||
}
|
||||
|
||||
private DatasetTableTaskLog getDatasetTableTaskLog(String datasetTableId, String taskId, Long startTime) {
|
||||
@ -687,7 +669,7 @@ public class ExtractDataService {
|
||||
datasetTableTaskLog.setTaskId(taskId);
|
||||
datasetTableTaskLog.setStatus(JobStatus.Underway.name());
|
||||
datasetTableTaskLog.setTriggerType(TriggerType.Custom.name());
|
||||
for (int i = 0; i < 10; i++) {
|
||||
for (int i = 0; i < 5; i++) {
|
||||
List<DatasetTableTaskLog> datasetTableTaskLogs = dataSetTableTaskLogService.select(datasetTableTaskLog);
|
||||
if (CollectionUtils.isNotEmpty(datasetTableTaskLogs)) {
|
||||
return datasetTableTaskLogs.get(0);
|
||||
|
||||
BIN
drivers/ImpalaJDBC41.jar
Normal file
BIN
drivers/ImpalaJDBC41.jar
Normal file
Binary file not shown.
@ -213,5 +213,5 @@ export function checkCustomDs() {
|
||||
loading: true
|
||||
})
|
||||
}
|
||||
|
||||
export const disabledSyncDs= ['es', 'ck', 'mongo', 'redshift', 'hive', 'impala']
|
||||
export default { loadTable, getScene, addGroup, delGroup, addTable, delTable, groupTree, checkCustomDs }
|
||||
|
||||
@ -69,7 +69,7 @@
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import {listDatasource, post, isKettleRunning} from '@/api/dataset/dataset'
|
||||
import {listDatasource, post, isKettleRunning, disabledSyncDs} from '@/api/dataset/dataset'
|
||||
import {engineMode} from "@/api/system/engine";
|
||||
|
||||
export default {
|
||||
@ -94,7 +94,7 @@ export default {
|
||||
selectedDatasource: {},
|
||||
engineMode: 'local',
|
||||
disabledSync: true,
|
||||
disabledSyncDs: ['es', 'ck', 'mongo', 'redshift', 'hive']
|
||||
disabledSyncDs: disabledSyncDs
|
||||
}
|
||||
},
|
||||
watch: {
|
||||
|
||||
@ -99,7 +99,7 @@
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import {post, listDatasource, isKettleRunning} from '@/api/dataset/dataset'
|
||||
import {post, listDatasource, isKettleRunning, disabledSyncDs} from '@/api/dataset/dataset'
|
||||
import {codemirror} from 'vue-codemirror'
|
||||
import {getTable} from '@/api/dataset/dataset'
|
||||
// 核心样式
|
||||
@ -160,7 +160,8 @@ export default {
|
||||
kettleRunning: false,
|
||||
selectedDatasource: {},
|
||||
engineMode: 'local',
|
||||
disabledSync: true
|
||||
disabledSync: true,
|
||||
disabledSyncDs: disabledSyncDs
|
||||
}
|
||||
},
|
||||
computed: {
|
||||
|
||||
@ -400,6 +400,7 @@ export default {
|
||||
allTypes: [
|
||||
{name: 'mysql', label: 'MySQL', type: 'jdbc', extraParams: 'characterEncoding=UTF-8&connectTimeout=5000&useSSL=false&allowPublicKeyRetrieval=true'},
|
||||
{name: 'hive', label: 'Apache Hive', type: 'jdbc', extraParams: ''},
|
||||
{name: 'impala', label: 'Apache Impala', type: 'jdbc', extraParams: 'auth=noSasl'},
|
||||
{name: 'oracle', label: 'Oracle', type: 'jdbc'},
|
||||
{name: 'sqlServer', label: 'SQL Server', type: 'jdbc', extraParams: ''},
|
||||
{name: 'pg', label: 'PostgreSQL', type: 'jdbc', extraParams: ''},
|
||||
|
||||
Loading…
Reference in New Issue
Block a user