feat: 支持 impala
This commit is contained in:
parent
98dc2436a2
commit
a3b31a21cb
@ -4,6 +4,7 @@ public enum DatasourceTypes {
|
|||||||
excel("excel", "excel", "", "", "", "", ""),
|
excel("excel", "excel", "", "", "", "", ""),
|
||||||
mysql("mysql", "mysql", "com.mysql.jdbc.Driver", "`", "`", "'", "'"),
|
mysql("mysql", "mysql", "com.mysql.jdbc.Driver", "`", "`", "'", "'"),
|
||||||
hive("hive", "hive", "org.apache.hive.jdbc.HiveDriver", "`", "`", "'", "'"),
|
hive("hive", "hive", "org.apache.hive.jdbc.HiveDriver", "`", "`", "'", "'"),
|
||||||
|
impala("impala", "impala", "org.apache.hive.jdbc.HiveDriver", "`", "`", "'", "'"),
|
||||||
mariadb("mariadb", "mariadb", "com.mysql.jdbc.Driver", "`", "`", "'", "'"),
|
mariadb("mariadb", "mariadb", "com.mysql.jdbc.Driver", "`", "`", "'", "'"),
|
||||||
ds_doris("ds_doris", "ds_doris", "com.mysql.jdbc.Driver", "`", "`", "'", "'"),
|
ds_doris("ds_doris", "ds_doris", "com.mysql.jdbc.Driver", "`", "`", "'", "'"),
|
||||||
pg("pg", "pg", "org.postgresql.Driver", "\"", "\"", "\"", "\""),
|
pg("pg", "pg", "org.postgresql.Driver", "\"", "\"", "\"", "\""),
|
||||||
|
|||||||
@ -0,0 +1,28 @@
|
|||||||
|
package io.dataease.dto.datasource;
|
||||||
|
|
||||||
|
import lombok.Getter;
|
||||||
|
import lombok.Setter;
|
||||||
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
|
||||||
|
@Getter
|
||||||
|
@Setter
|
||||||
|
public class ImpalaConfiguration extends JdbcConfiguration {
|
||||||
|
|
||||||
|
private String driver = "com.cloudera.impala.jdbc.Driver";
|
||||||
|
private String extraParams = "";
|
||||||
|
|
||||||
|
public String getJdbc() {
|
||||||
|
if(StringUtils.isEmpty(extraParams.trim())){
|
||||||
|
return "jdbc:impala://HOSTNAME:PORT/DATABASE"
|
||||||
|
.replace("HOSTNAME", getHost().trim())
|
||||||
|
.replace("PORT", getPort().toString().trim())
|
||||||
|
.replace("DATABASE", getDataBase().trim());
|
||||||
|
}else {
|
||||||
|
return "jdbc:impala://HOSTNAME:PORT/DATABASE;EXTRA_PARAMS"
|
||||||
|
.replace("HOSTNAME", getHost().trim())
|
||||||
|
.replace("PORT", getPort().toString().trim())
|
||||||
|
.replace("DATABASE", getDataBase().trim())
|
||||||
|
.replace("EXTRA_PARAMS", getExtraParams().trim());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -54,6 +54,8 @@ public class ProviderFactory implements ApplicationContextAware {
|
|||||||
return context.getBean("redshiftQuery", QueryProvider.class);
|
return context.getBean("redshiftQuery", QueryProvider.class);
|
||||||
case hive:
|
case hive:
|
||||||
return context.getBean("hiveQuery", QueryProvider.class);
|
return context.getBean("hiveQuery", QueryProvider.class);
|
||||||
|
case impala:
|
||||||
|
return context.getBean("impalaQuery", QueryProvider.class);
|
||||||
case db2:
|
case db2:
|
||||||
return context.getBean("db2Query", QueryProvider.class);
|
return context.getBean("db2Query", QueryProvider.class);
|
||||||
case api:
|
case api:
|
||||||
|
|||||||
@ -154,7 +154,7 @@ public class JdbcProvider extends DatasourceProvider {
|
|||||||
while (resultSet.next()) {
|
while (resultSet.next()) {
|
||||||
String tableName = resultSet.getString("TABLE_NAME");
|
String tableName = resultSet.getString("TABLE_NAME");
|
||||||
String database;
|
String database;
|
||||||
if (datasourceRequest.getDatasource().getType().equalsIgnoreCase(DatasourceTypes.ck.name())) {
|
if (datasourceRequest.getDatasource().getType().equalsIgnoreCase(DatasourceTypes.ck.name()) || datasourceRequest.getDatasource().getType().equalsIgnoreCase(DatasourceTypes.impala.name())) {
|
||||||
database = resultSet.getString("TABLE_SCHEM");
|
database = resultSet.getString("TABLE_SCHEM");
|
||||||
} else {
|
} else {
|
||||||
database = resultSet.getString("TABLE_CAT");
|
database = resultSet.getString("TABLE_CAT");
|
||||||
@ -485,6 +485,14 @@ public class JdbcProvider extends DatasourceProvider {
|
|||||||
driver = hiveConfiguration.getDriver();
|
driver = hiveConfiguration.getDriver();
|
||||||
jdbcurl = hiveConfiguration.getJdbc();
|
jdbcurl = hiveConfiguration.getJdbc();
|
||||||
break;
|
break;
|
||||||
|
case impala:
|
||||||
|
ImpalaConfiguration impalaConfiguration = new Gson().fromJson(datasourceRequest.getDatasource().getConfiguration(), ImpalaConfiguration.class);
|
||||||
|
System.out.println(new Gson().toJson(impalaConfiguration));
|
||||||
|
username = impalaConfiguration.getUsername();
|
||||||
|
password = impalaConfiguration.getPassword();
|
||||||
|
driver = impalaConfiguration.getDriver();
|
||||||
|
jdbcurl = impalaConfiguration.getJdbc();
|
||||||
|
break;
|
||||||
case db2:
|
case db2:
|
||||||
Db2Configuration db2Configuration = new Gson().fromJson(datasourceRequest.getDatasource().getConfiguration(), Db2Configuration.class);
|
Db2Configuration db2Configuration = new Gson().fromJson(datasourceRequest.getDatasource().getConfiguration(), Db2Configuration.class);
|
||||||
username = db2Configuration.getUsername();
|
username = db2Configuration.getUsername();
|
||||||
@ -586,6 +594,13 @@ public class JdbcProvider extends DatasourceProvider {
|
|||||||
dataSource.setUrl(hiveConfiguration.getJdbc());
|
dataSource.setUrl(hiveConfiguration.getJdbc());
|
||||||
jdbcConfiguration = hiveConfiguration;
|
jdbcConfiguration = hiveConfiguration;
|
||||||
break;
|
break;
|
||||||
|
case impala:
|
||||||
|
ImpalaConfiguration impalaConfiguration = new Gson().fromJson(datasourceRequest.getDatasource().getConfiguration(), ImpalaConfiguration.class);
|
||||||
|
dataSource.setPassword(impalaConfiguration.getPassword());
|
||||||
|
dataSource.setDriverClassName(impalaConfiguration.getDriver());
|
||||||
|
dataSource.setUrl(impalaConfiguration.getJdbc());
|
||||||
|
jdbcConfiguration = impalaConfiguration;
|
||||||
|
break;
|
||||||
case db2:
|
case db2:
|
||||||
Db2Configuration db2Configuration = new Gson().fromJson(datasourceRequest.getDatasource().getConfiguration(), Db2Configuration.class);
|
Db2Configuration db2Configuration = new Gson().fromJson(datasourceRequest.getDatasource().getConfiguration(), Db2Configuration.class);
|
||||||
dataSource.setPassword(db2Configuration.getPassword());
|
dataSource.setPassword(db2Configuration.getPassword());
|
||||||
@ -614,6 +629,7 @@ public class JdbcProvider extends DatasourceProvider {
|
|||||||
case engine_doris:
|
case engine_doris:
|
||||||
case ds_doris:
|
case ds_doris:
|
||||||
case hive:
|
case hive:
|
||||||
|
case impala:
|
||||||
return "show tables";
|
return "show tables";
|
||||||
case sqlServer:
|
case sqlServer:
|
||||||
SqlServerConfiguration sqlServerConfiguration = new Gson().fromJson(datasourceRequest.getDatasource().getConfiguration(), SqlServerConfiguration.class);
|
SqlServerConfiguration sqlServerConfiguration = new Gson().fromJson(datasourceRequest.getDatasource().getConfiguration(), SqlServerConfiguration.class);
|
||||||
|
|||||||
@ -2,16 +2,16 @@ package io.dataease.provider.query.hive;
|
|||||||
|
|
||||||
import io.dataease.provider.SQLConstants;
|
import io.dataease.provider.SQLConstants;
|
||||||
|
|
||||||
import static io.dataease.commons.constants.DatasourceTypes.mysql;
|
import static io.dataease.commons.constants.DatasourceTypes.hive;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @Author gin
|
* @Author gin
|
||||||
* @Date 2021/7/8 7:22 下午
|
* @Date 2021/7/8 7:22 下午
|
||||||
*/
|
*/
|
||||||
public class HiveConstants extends SQLConstants {
|
public class HiveConstants extends SQLConstants {
|
||||||
public static final String KEYWORD_TABLE = mysql.getKeywordPrefix() + "%s" + mysql.getKeywordSuffix();
|
public static final String KEYWORD_TABLE = hive.getKeywordPrefix() + "%s" + hive.getKeywordSuffix();
|
||||||
|
|
||||||
public static final String KEYWORD_FIX = "%s." + mysql.getKeywordPrefix() + "%s" + mysql.getKeywordSuffix();
|
public static final String KEYWORD_FIX = "%s." + hive.getKeywordPrefix() + "%s" + hive.getKeywordSuffix();
|
||||||
|
|
||||||
public static final String UNIX_TIMESTAMP = "unix_timestamp(%s)";
|
public static final String UNIX_TIMESTAMP = "unix_timestamp(%s)";
|
||||||
|
|
||||||
|
|||||||
@ -0,0 +1,39 @@
|
|||||||
|
package io.dataease.provider.query.impala;
|
||||||
|
|
||||||
|
import io.dataease.provider.SQLConstants;
|
||||||
|
|
||||||
|
import static io.dataease.commons.constants.DatasourceTypes.mysql;
|
||||||
|
|
||||||
|
public class ImpalaConstants extends SQLConstants {
|
||||||
|
public static final String KEYWORD_TABLE = mysql.getKeywordPrefix() + "%s" + mysql.getKeywordSuffix();
|
||||||
|
|
||||||
|
public static final String KEYWORD_FIX = "%s." + mysql.getKeywordPrefix() + "%s" + mysql.getKeywordSuffix();
|
||||||
|
|
||||||
|
public static final String UNIX_TIMESTAMP = "unix_timestamp(%s)";
|
||||||
|
|
||||||
|
public static final String DATE_FORMAT = "from_unixtime(UNIX_TIMESTAMP(%s), '%s')";
|
||||||
|
|
||||||
|
public static final String FROM_UNIXTIME = "FROM_UNIXTIME(%s,'%s')";
|
||||||
|
|
||||||
|
public static final String STR_TO_DATE = "STR_TO_DATE(%s,'%s')";
|
||||||
|
|
||||||
|
public static final String CAST = "CAST(%s AS %s)";
|
||||||
|
|
||||||
|
public static final String DEFAULT_DATE_FORMAT = "yyyy-MM-dd HH:mm:ss";
|
||||||
|
|
||||||
|
public static final String DEFAULT_INT_FORMAT = "BIGINT";
|
||||||
|
|
||||||
|
public static final String DEFAULT_FLOAT_FORMAT = "DECIMAL(20,2)";
|
||||||
|
|
||||||
|
public static final String WHERE_VALUE_NULL = "(NULL,'')";
|
||||||
|
|
||||||
|
public static final String WHERE_VALUE_VALUE = "'%s'";
|
||||||
|
|
||||||
|
public static final String AGG_COUNT = "COUNT(*)";
|
||||||
|
|
||||||
|
public static final String AGG_FIELD = "%s(%s)";
|
||||||
|
|
||||||
|
public static final String WHERE_BETWEEN = "'%s' AND '%s'";
|
||||||
|
|
||||||
|
public static final String BRACKETS = "(%s)";
|
||||||
|
}
|
||||||
File diff suppressed because it is too large
Load Diff
@ -259,25 +259,6 @@ public class ChartViewService {
|
|||||||
return calcData(view, request, request.isCache());
|
return calcData(view, request, request.isCache());
|
||||||
}
|
}
|
||||||
|
|
||||||
// private void checkPermissions(List<? extends ChartViewFieldBaseDTO> chartViewFieldDTOS, List<DatasetTableField> fields, List<String> desensitizationList, Boolean alowDesensitization) throws Exception {
|
|
||||||
// String filedName = "";
|
|
||||||
// for (ChartViewFieldBaseDTO chartViewFieldDTO : chartViewFieldDTOS) {
|
|
||||||
// if (alowDesensitization) {
|
|
||||||
// if (!fields.stream().map(DatasetTableField::getDataeaseName).collect(Collectors.toList()).contains(chartViewFieldDTO.getDataeaseName())) {
|
|
||||||
// filedName = filedName + chartViewFieldDTO.getName() + " ,";
|
|
||||||
// }
|
|
||||||
// } else {
|
|
||||||
// if (desensitizationList.contains(chartViewFieldDTO.getDataeaseName()) || !fields.stream().map(DatasetTableField::getDataeaseName).collect(Collectors.toList()).contains(chartViewFieldDTO.getDataeaseName())) {
|
|
||||||
// filedName = filedName + chartViewFieldDTO.getName() + " ,";
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
// filedName = filedName.endsWith(",") ? filedName.substring(0, filedName.length() - 1) : filedName;
|
|
||||||
// if (StringUtils.isNotEmpty(filedName)) {
|
|
||||||
// throw new Exception("以下字段没有权限: " + filedName);
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
|
|
||||||
public ChartViewDTO calcData(ChartViewDTO view, ChartExtRequest requestList, boolean cache) throws Exception {
|
public ChartViewDTO calcData(ChartViewDTO view, ChartExtRequest requestList, boolean cache) throws Exception {
|
||||||
if (ObjectUtils.isEmpty(view)) {
|
if (ObjectUtils.isEmpty(view)) {
|
||||||
throw new RuntimeException(Translator.get("i18n_chart_delete"));
|
throw new RuntimeException(Translator.get("i18n_chart_delete"));
|
||||||
|
|||||||
@ -101,6 +101,7 @@ public class DataSetTableService {
|
|||||||
@Resource
|
@Resource
|
||||||
private EngineService engineService;
|
private EngineService engineService;
|
||||||
|
|
||||||
|
private static boolean isUpdatingDatasetTableStatus = false;
|
||||||
private static final String lastUpdateTime = "${__last_update_time__}";
|
private static final String lastUpdateTime = "${__last_update_time__}";
|
||||||
private static final String currentUpdateTime = "${__current_update_time__}";
|
private static final String currentUpdateTime = "${__current_update_time__}";
|
||||||
|
|
||||||
@ -2182,6 +2183,21 @@ public class DataSetTableService {
|
|||||||
private UtilMapper utilMapper;
|
private UtilMapper utilMapper;
|
||||||
|
|
||||||
public void updateDatasetTableStatus() {
|
public void updateDatasetTableStatus() {
|
||||||
|
if(this.isUpdatingDatasetTableStatus){
|
||||||
|
return;
|
||||||
|
}else {
|
||||||
|
this.isUpdatingDatasetTableStatus = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
doUpdate();
|
||||||
|
}catch (Exception e){}
|
||||||
|
finally {
|
||||||
|
this.isUpdatingDatasetTableStatus = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void doUpdate(){
|
||||||
List<QrtzSchedulerState> qrtzSchedulerStates = qrtzSchedulerStateMapper.selectByExample(null);
|
List<QrtzSchedulerState> qrtzSchedulerStates = qrtzSchedulerStateMapper.selectByExample(null);
|
||||||
List<String> activeQrtzInstances = qrtzSchedulerStates.stream()
|
List<String> activeQrtzInstances = qrtzSchedulerStates.stream()
|
||||||
.filter(qrtzSchedulerState -> qrtzSchedulerState.getLastCheckinTime()
|
.filter(qrtzSchedulerState -> qrtzSchedulerState.getLastCheckinTime()
|
||||||
@ -2233,7 +2249,6 @@ public class DataSetTableService {
|
|||||||
extractDataService.deleteFile("incremental_delete", jobStoppeddDatasetTable.getId());
|
extractDataService.deleteFile("incremental_delete", jobStoppeddDatasetTable.getId());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* 判断数组中是否有重复的值
|
* 判断数组中是否有重复的值
|
||||||
*/
|
*/
|
||||||
|
|||||||
BIN
drivers/ImpalaJDBC41.jar
Normal file
BIN
drivers/ImpalaJDBC41.jar
Normal file
Binary file not shown.
@ -213,5 +213,5 @@ export function checkCustomDs() {
|
|||||||
loading: true
|
loading: true
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
export const disabledSyncDs= ['es', 'ck', 'mongo', 'redshift', 'hive', 'impala']
|
||||||
export default { loadTable, getScene, addGroup, delGroup, addTable, delTable, groupTree, checkCustomDs }
|
export default { loadTable, getScene, addGroup, delGroup, addTable, delTable, groupTree, checkCustomDs }
|
||||||
|
|||||||
@ -69,7 +69,7 @@
|
|||||||
</template>
|
</template>
|
||||||
|
|
||||||
<script>
|
<script>
|
||||||
import {listDatasource, post, isKettleRunning} from '@/api/dataset/dataset'
|
import {listDatasource, post, isKettleRunning, disabledSyncDs} from '@/api/dataset/dataset'
|
||||||
import {engineMode} from "@/api/system/engine";
|
import {engineMode} from "@/api/system/engine";
|
||||||
|
|
||||||
export default {
|
export default {
|
||||||
@ -94,7 +94,7 @@ export default {
|
|||||||
selectedDatasource: {},
|
selectedDatasource: {},
|
||||||
engineMode: 'local',
|
engineMode: 'local',
|
||||||
disabledSync: true,
|
disabledSync: true,
|
||||||
disabledSyncDs: ['es', 'ck', 'mongo', 'redshift', 'hive']
|
disabledSyncDs: disabledSyncDs
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
watch: {
|
watch: {
|
||||||
|
|||||||
@ -99,7 +99,7 @@
|
|||||||
</template>
|
</template>
|
||||||
|
|
||||||
<script>
|
<script>
|
||||||
import {post, listDatasource, isKettleRunning} from '@/api/dataset/dataset'
|
import {post, listDatasource, isKettleRunning, disabledSyncDs} from '@/api/dataset/dataset'
|
||||||
import {codemirror} from 'vue-codemirror'
|
import {codemirror} from 'vue-codemirror'
|
||||||
import {getTable} from '@/api/dataset/dataset'
|
import {getTable} from '@/api/dataset/dataset'
|
||||||
// 核心样式
|
// 核心样式
|
||||||
@ -160,7 +160,8 @@ export default {
|
|||||||
kettleRunning: false,
|
kettleRunning: false,
|
||||||
selectedDatasource: {},
|
selectedDatasource: {},
|
||||||
engineMode: 'local',
|
engineMode: 'local',
|
||||||
disabledSync: true
|
disabledSync: true,
|
||||||
|
disabledSyncDs: disabledSyncDs
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
computed: {
|
computed: {
|
||||||
|
|||||||
@ -400,6 +400,7 @@ export default {
|
|||||||
allTypes: [
|
allTypes: [
|
||||||
{name: 'mysql', label: 'MySQL', type: 'jdbc', extraParams: 'characterEncoding=UTF-8&connectTimeout=5000&useSSL=false&allowPublicKeyRetrieval=true'},
|
{name: 'mysql', label: 'MySQL', type: 'jdbc', extraParams: 'characterEncoding=UTF-8&connectTimeout=5000&useSSL=false&allowPublicKeyRetrieval=true'},
|
||||||
{name: 'hive', label: 'Apache Hive', type: 'jdbc', extraParams: ''},
|
{name: 'hive', label: 'Apache Hive', type: 'jdbc', extraParams: ''},
|
||||||
|
{name: 'impala', label: 'Apache Impala', type: 'jdbc', extraParams: 'auth=noSasl'},
|
||||||
{name: 'oracle', label: 'Oracle', type: 'jdbc'},
|
{name: 'oracle', label: 'Oracle', type: 'jdbc'},
|
||||||
{name: 'sqlServer', label: 'SQL Server', type: 'jdbc', extraParams: ''},
|
{name: 'sqlServer', label: 'SQL Server', type: 'jdbc', extraParams: ''},
|
||||||
{name: 'pg', label: 'PostgreSQL', type: 'jdbc', extraParams: ''},
|
{name: 'pg', label: 'PostgreSQL', type: 'jdbc', extraParams: ''},
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user