diff --git a/backend/src/main/java/io/dataease/auth/api/AuthApi.java b/backend/src/main/java/io/dataease/auth/api/AuthApi.java index f3e2a977e0..e4872b8485 100644 --- a/backend/src/main/java/io/dataease/auth/api/AuthApi.java +++ b/backend/src/main/java/io/dataease/auth/api/AuthApi.java @@ -7,6 +7,8 @@ import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestMapping; +import java.util.Map; + @Api(tags = "权限:权限管理") @RequestMapping("/api/auth") @@ -26,6 +28,9 @@ public interface AuthApi { @PostMapping("/logout") String logout(); + @PostMapping("/validateName") + Boolean validateName(Map nameDto); + @GetMapping("/test") String test(); diff --git a/backend/src/main/java/io/dataease/auth/filter/JWTFilter.java b/backend/src/main/java/io/dataease/auth/filter/JWTFilter.java index 62316b95a9..f7a4b6ea33 100644 --- a/backend/src/main/java/io/dataease/auth/filter/JWTFilter.java +++ b/backend/src/main/java/io/dataease/auth/filter/JWTFilter.java @@ -55,12 +55,19 @@ public class JWTFilter extends BasicHttpAuthenticationFilter { throw new AuthenticationException(expireMessage); } if (JWTUtils.needRefresh(authorization)){ + String oldAuthorization = authorization; authorization = refreshToken(request, response); + JWTUtils.removeTokenExpire(oldAuthorization); } + // 删除老的操作时间 + JWTUtils.removeTokenExpire(authorization); + // 设置新的操作时间 + JWTUtils.addTokenExpire(authorization); JWTToken token = new JWTToken(authorization); Subject subject = getSubject(request, response); // 提交给realm进行登入,如果错误他会抛出异常并被捕获 subject.login(token); + return true; } @@ -98,10 +105,10 @@ public class JWTFilter extends BasicHttpAuthenticationFilter { String password = user.getPassword(); // 删除老token操作时间 - JWTUtils.removeTokenExpire(token); + // JWTUtils.removeTokenExpire(token); String newToken = JWTUtils.sign(tokenInfo, password); // 记录新token操作时间 - JWTUtils.addTokenExpire(newToken); + // JWTUtils.addTokenExpire(newToken); JWTToken jwtToken = new JWTToken(newToken); this.getSubject(request, response).login(jwtToken); diff --git a/backend/src/main/java/io/dataease/auth/server/AuthServer.java b/backend/src/main/java/io/dataease/auth/server/AuthServer.java index a3a1640706..6c454034f7 100644 --- a/backend/src/main/java/io/dataease/auth/server/AuthServer.java +++ b/backend/src/main/java/io/dataease/auth/server/AuthServer.java @@ -82,6 +82,15 @@ public class AuthServer implements AuthApi { return "success"; } + @Override + public Boolean validateName(@RequestBody Map nameDto) { + String userName = nameDto.get("userName"); + if (StringUtils.isEmpty(userName)) return false; + SysUserEntity userEntity = authUserService.getUserByName(userName); + if (ObjectUtils.isEmpty(userEntity)) return false; + return true; + } + @Override public Boolean isLogin() { return null; diff --git a/backend/src/main/java/io/dataease/auth/service/impl/ShiroServiceImpl.java b/backend/src/main/java/io/dataease/auth/service/impl/ShiroServiceImpl.java index b4f005e273..88f361fe18 100644 --- a/backend/src/main/java/io/dataease/auth/service/impl/ShiroServiceImpl.java +++ b/backend/src/main/java/io/dataease/auth/service/impl/ShiroServiceImpl.java @@ -42,9 +42,14 @@ public class ShiroServiceImpl implements ShiroService { //验证链接 filterChainDefinitionMap.put("/api/link/validate**", ANON); + filterChainDefinitionMap.put("/panel/group/findOne/**", ANON); + filterChainDefinitionMap.put("/chart/view/getData/**", ANON); + + filterChainDefinitionMap.put("/api/auth/login", ANON); + filterChainDefinitionMap.put("/api/auth/validateName", ANON); filterChainDefinitionMap.put("/unauth", ANON); filterChainDefinitionMap.put("/display/**", ANON); filterChainDefinitionMap.put("/tokenExpired", ANON); diff --git a/backend/src/main/java/io/dataease/auth/util/JWTUtils.java b/backend/src/main/java/io/dataease/auth/util/JWTUtils.java index 1b4026dede..9b171a936f 100644 --- a/backend/src/main/java/io/dataease/auth/util/JWTUtils.java +++ b/backend/src/main/java/io/dataease/auth/util/JWTUtils.java @@ -20,9 +20,9 @@ public class JWTUtils { // token过期时间1min (过期会自动刷新续命 目的是避免一直都是同一个token ) - private static final long EXPIRE_TIME = 5*60*1000; + private static final long EXPIRE_TIME = 1*60*1000; // 登录间隔时间10min 超过这个时间强制重新登录 - private static final long Login_Interval = 30*60*1000; + private static final long Login_Interval = 10*60*1000; /** @@ -81,8 +81,17 @@ public class JWTUtils { public static boolean loginExpire(String token){ Long now = System.currentTimeMillis(); Long lastOperateTime = tokenLastOperateTime(token); - if (lastOperateTime == null) return true; - return now - lastOperateTime > Login_Interval; + boolean isExpire = false; + if (lastOperateTime != null) { + isExpire = now - lastOperateTime > Login_Interval; + } + if (isExpire) { + System.out.println("-----------------------"); + System.out.println("-----上次操作时间是["+lastOperateTime+"]-----"); + System.out.println("-----当前操作时间是["+now+"]-----"); + System.out.println("-----------------------"); + } + return isExpire; } public static Date getExp(String token) { diff --git a/backend/src/main/java/io/dataease/config/CommonConfig.java b/backend/src/main/java/io/dataease/config/CommonConfig.java index cfe2d068f9..bb9cb36cbb 100644 --- a/backend/src/main/java/io/dataease/config/CommonConfig.java +++ b/backend/src/main/java/io/dataease/config/CommonConfig.java @@ -2,7 +2,6 @@ package io.dataease.config; import com.fit2cloud.autoconfigure.QuartzAutoConfiguration; import io.dataease.commons.utils.CommonThreadPool; -import org.apache.spark.sql.SparkSession; import org.pentaho.di.core.KettleEnvironment; import org.pentaho.di.repository.filerep.KettleFileRepository; import org.pentaho.di.repository.filerep.KettleFileRepositoryMeta; @@ -32,31 +31,31 @@ public class CommonConfig { // return configuration; // } - @Bean - @ConditionalOnMissingBean - public SparkSession javaSparkSession() { - SparkSession spark = SparkSession.builder() - .appName(env.getProperty("spark.appName", "DataeaseJob")) - .master(env.getProperty("spark.master", "local[*]")) - .config("spark.scheduler.mode", env.getProperty("spark.scheduler.mode", "FAIR")) - .config("spark.serializer", env.getProperty("spark.serializer", "org.apache.spark.serializer.KryoSerializer")) - .config("spark.executor.cores", env.getProperty("spark.executor.cores", "8")) - .config("spark.executor.memory", env.getProperty("spark.executor.memory", "6442450944b")) - .config("spark.locality.wait", env.getProperty("spark.locality.wait", "600000")) - .config("spark.maxRemoteBlockSizeFetchToMem", env.getProperty("spark.maxRemoteBlockSizeFetchToMem", "2000m")) - .config("spark.shuffle.detectCorrupt", env.getProperty("spark.shuffle.detectCorrupt", "false")) - .config("spark.shuffle.service.enabled", env.getProperty("spark.shuffle.service.enabled", "true")) - .config("spark.sql.adaptive.enabled", env.getProperty("spark.sql.adaptive.enabled", "true")) - .config("spark.sql.adaptive.shuffle.targetPostShuffleInputSize", env.getProperty("spark.sql.adaptive.shuffle.targetPostShuffleInputSize", "200M")) - .config("spark.sql.broadcastTimeout", env.getProperty("spark.sql.broadcastTimeout", "12000")) - .config("spark.sql.retainGroupColumns", env.getProperty("spark.sql.retainGroupColumns", "false")) - .config("spark.sql.sortMergeJoinExec.buffer.in.memory.threshold", env.getProperty("spark.sql.sortMergeJoinExec.buffer.in.memory.threshold", "100000")) - .config("spark.sql.sortMergeJoinExec.buffer.spill.threshold", env.getProperty("spark.sql.sortMergeJoinExec.buffer.spill.threshold", "100000")) - .config("spark.sql.variable.substitute", env.getProperty("spark.sql.variable.substitute", "false")) - .config("spark.temp.expired.time", env.getProperty("spark.temp.expired.time", "3600")) - .getOrCreate(); - return spark; - } +// @Bean +// @ConditionalOnMissingBean +// public SparkSession javaSparkSession() { +// SparkSession spark = SparkSession.builder() +// .appName(env.getProperty("spark.appName", "DataeaseJob")) +// .master(env.getProperty("spark.master", "local[*]")) +// .config("spark.scheduler.mode", env.getProperty("spark.scheduler.mode", "FAIR")) +//// .config("spark.serializer", env.getProperty("spark.serializer", "org.apache.spark.serializer.KryoSerializer")) +//// .config("spark.executor.cores", env.getProperty("spark.executor.cores", "8")) +//// .config("spark.executor.memory", env.getProperty("spark.executor.memory", "6442450944b")) +//// .config("spark.locality.wait", env.getProperty("spark.locality.wait", "600000")) +//// .config("spark.maxRemoteBlockSizeFetchToMem", env.getProperty("spark.maxRemoteBlockSizeFetchToMem", "2000m")) +//// .config("spark.shuffle.detectCorrupt", env.getProperty("spark.shuffle.detectCorrupt", "false")) +//// .config("spark.shuffle.service.enabled", env.getProperty("spark.shuffle.service.enabled", "true")) +//// .config("spark.sql.adaptive.enabled", env.getProperty("spark.sql.adaptive.enabled", "true")) +//// .config("spark.sql.adaptive.shuffle.targetPostShuffleInputSize", env.getProperty("spark.sql.adaptive.shuffle.targetPostShuffleInputSize", "200M")) +//// .config("spark.sql.broadcastTimeout", env.getProperty("spark.sql.broadcastTimeout", "12000")) +//// .config("spark.sql.retainGroupColumns", env.getProperty("spark.sql.retainGroupColumns", "false")) +//// .config("spark.sql.sortMergeJoinExec.buffer.in.memory.threshold", env.getProperty("spark.sql.sortMergeJoinExec.buffer.in.memory.threshold", "100000")) +//// .config("spark.sql.sortMergeJoinExec.buffer.spill.threshold", env.getProperty("spark.sql.sortMergeJoinExec.buffer.spill.threshold", "100000")) +//// .config("spark.sql.variable.substitute", env.getProperty("spark.sql.variable.substitute", "false")) +//// .config("spark.temp.expired.time", env.getProperty("spark.temp.expired.time", "3600")) +// .getOrCreate(); +// return spark; +// } @Bean @ConditionalOnMissingBean diff --git a/backend/src/main/java/io/dataease/listener/AppStartInitDataSourceListener.java b/backend/src/main/java/io/dataease/listener/AppStartInitDataSourceListener.java index 632cb2e767..245d7c674a 100644 --- a/backend/src/main/java/io/dataease/listener/AppStartInitDataSourceListener.java +++ b/backend/src/main/java/io/dataease/listener/AppStartInitDataSourceListener.java @@ -1,21 +1,12 @@ package io.dataease.listener; -import io.dataease.base.domain.DatasetTable; -import io.dataease.base.domain.DatasetTableExample; -import io.dataease.base.domain.DatasetTableField; -import io.dataease.base.mapper.DatasetTableMapper; -import io.dataease.commons.utils.CommonThreadPool; import io.dataease.datasource.service.DatasourceService; -import io.dataease.service.dataset.DataSetTableFieldsService; -import io.dataease.service.spark.SparkCalc; import org.springframework.boot.context.event.ApplicationReadyEvent; import org.springframework.context.ApplicationListener; import org.springframework.core.annotation.Order; -import org.springframework.core.env.Environment; import org.springframework.stereotype.Component; import javax.annotation.Resource; -import java.util.List; @Component @Order(value = 2) diff --git a/backend/src/main/java/io/dataease/listener/AppStartReadHBaseListener.java b/backend/src/main/java/io/dataease/listener/AppStartReadHBaseListener.java index f7ba6a1a7d..00383b7090 100644 --- a/backend/src/main/java/io/dataease/listener/AppStartReadHBaseListener.java +++ b/backend/src/main/java/io/dataease/listener/AppStartReadHBaseListener.java @@ -1,52 +1,47 @@ -package io.dataease.listener; - -import io.dataease.base.domain.DatasetTable; -import io.dataease.base.domain.DatasetTableExample; -import io.dataease.base.domain.DatasetTableField; -import io.dataease.base.mapper.DatasetTableMapper; -import io.dataease.commons.utils.CommonThreadPool; -import io.dataease.service.dataset.DataSetTableFieldsService; -import io.dataease.service.spark.SparkCalc; -import org.springframework.boot.context.event.ApplicationReadyEvent; -import org.springframework.context.ApplicationListener; -import org.springframework.core.annotation.Order; -import org.springframework.core.env.Environment; -import org.springframework.stereotype.Component; - -import javax.annotation.Resource; -import java.util.List; - -@Component -@Order(value = 2) -public class AppStartReadHBaseListener implements ApplicationListener { - @Resource - private CommonThreadPool commonThreadPool; - @Resource - private SparkCalc sparkCalc; - @Resource - private Environment env; // 保存了配置文件的信息 - - @Resource - private DatasetTableMapper datasetTableMapper; - @Resource - private DataSetTableFieldsService dataSetTableFieldsService; - - @Override - public void onApplicationEvent(ApplicationReadyEvent applicationReadyEvent) { -// System.out.println("================= Read HBase start ================="); -// // 项目启动,从数据集中找到定时抽取的表,从HBase中读取放入缓存 -// DatasetTableExample datasetTableExample = new DatasetTableExample(); -// datasetTableExample.createCriteria().andModeEqualTo(1); -// List datasetTables = datasetTableMapper.selectByExampleWithBLOBs(datasetTableExample); -// for (DatasetTable table : datasetTables) { -//// commonThreadPool.addTask(() -> { -// try { -// List fields = dataSetTableFieldsService.getFieldsByTableId(table.getId()); -// sparkCalc.getHBaseDataAndCache(table.getId(), fields); -// } catch (Exception e) { -// e.printStackTrace(); -// } -//// }); -// } - } -} +//package io.dataease.listener; +// +//import io.dataease.base.mapper.DatasetTableMapper; +//import io.dataease.commons.utils.CommonThreadPool; +//import io.dataease.service.dataset.DataSetTableFieldsService; +//import org.springframework.boot.context.event.ApplicationReadyEvent; +//import org.springframework.context.ApplicationListener; +//import org.springframework.core.annotation.Order; +//import org.springframework.core.env.Environment; +//import org.springframework.stereotype.Component; +// +//import javax.annotation.Resource; +// +//@Component +//@Order(value = 2) +//public class AppStartReadHBaseListener implements ApplicationListener { +// @Resource +// private CommonThreadPool commonThreadPool; +//// @Resource +//// private SparkCalc sparkCalc; +// @Resource +// private Environment env; // 保存了配置文件的信息 +// +// @Resource +// private DatasetTableMapper datasetTableMapper; +// @Resource +// private DataSetTableFieldsService dataSetTableFieldsService; +// +// @Override +// public void onApplicationEvent(ApplicationReadyEvent applicationReadyEvent) { +//// System.out.println("================= Read HBase start ================="); +//// // 项目启动,从数据集中找到定时抽取的表,从HBase中读取放入缓存 +//// DatasetTableExample datasetTableExample = new DatasetTableExample(); +//// datasetTableExample.createCriteria().andModeEqualTo(1); +//// List datasetTables = datasetTableMapper.selectByExampleWithBLOBs(datasetTableExample); +//// for (DatasetTable table : datasetTables) { +////// commonThreadPool.addTask(() -> { +//// try { +//// List fields = dataSetTableFieldsService.getFieldsByTableId(table.getId()); +//// sparkCalc.getHBaseDataAndCache(table.getId(), fields); +//// } catch (Exception e) { +//// e.printStackTrace(); +//// } +////// }); +//// } +// } +//} diff --git a/backend/src/main/java/io/dataease/service/chart/ChartViewService.java b/backend/src/main/java/io/dataease/service/chart/ChartViewService.java index 72493215a7..6ff458e792 100644 --- a/backend/src/main/java/io/dataease/service/chart/ChartViewService.java +++ b/backend/src/main/java/io/dataease/service/chart/ChartViewService.java @@ -1,5 +1,6 @@ package io.dataease.service.chart; +import com.alibaba.fastjson.JSONObject; import com.google.gson.Gson; import com.google.gson.reflect.TypeToken; import io.dataease.base.domain.*; @@ -20,7 +21,6 @@ import io.dataease.dto.chart.Series; import io.dataease.dto.dataset.DataTableInfoDTO; import io.dataease.service.dataset.DataSetTableFieldsService; import io.dataease.service.dataset.DataSetTableService; -import io.dataease.service.spark.SparkCalc; import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang3.ObjectUtils; import org.apache.commons.lang3.StringUtils; @@ -28,6 +28,7 @@ import org.springframework.stereotype.Service; import javax.annotation.Resource; import java.math.BigDecimal; +import java.math.RoundingMode; import java.text.MessageFormat; import java.util.*; @@ -43,8 +44,8 @@ public class ChartViewService { private DataSetTableService dataSetTableService; @Resource private DatasourceService datasourceService; - @Resource - private SparkCalc sparkCalc; + // @Resource +// private SparkCalc sparkCalc; @Resource private DataSetTableFieldsService dataSetTableFieldsService; @@ -97,8 +98,6 @@ public class ChartViewService { List yAxis = new Gson().fromJson(view.getYAxis(), new TypeToken>() { }.getType()); - List x = new ArrayList<>(); - List series = new ArrayList<>(); if (CollectionUtils.isEmpty(xAxis) || CollectionUtils.isEmpty(yAxis)) { ChartViewDTO dto = new ChartViewDTO(); BeanUtils.copyBean(dto, view); @@ -146,11 +145,23 @@ public class ChartViewService { data = datasourceProvider.getData(datasourceRequest); } else if (table.getMode() == 1) {// 抽取 // 获取数据集de字段 - List fields = dataSetTableFieldsService.getFieldsByTableId(table.getId()); - data = sparkCalc.getData(table.getId(), fields, xAxis, yAxis, "tmp_" + view.getId().split("-")[0], extFilterList); +// List fields = dataSetTableFieldsService.getFieldsByTableId(table.getId()); +// data = sparkCalc.getData(table.getId(), fields, xAxis, yAxis, "tmp_" + view.getId().split("-")[0], extFilterList); + + // 连接doris,构建doris数据源查询 + Datasource ds = dorisDatasource(); + DatasourceProvider datasourceProvider = ProviderFactory.getProvider(ds.getType()); + DatasourceRequest datasourceRequest = new DatasourceRequest(); + datasourceRequest.setDatasource(ds); + String tableName = "ds_" + table.getId().replaceAll("-", "_"); + datasourceRequest.setTable(tableName); + datasourceRequest.setQuery(getSQL(ds.getType(), tableName, xAxis, yAxis, extFilterList)); + data = datasourceProvider.getData(datasourceRequest); } // 图表组件可再扩展 + List x = new ArrayList<>(); + List series = new ArrayList<>(); for (ChartViewFieldDTO y : yAxis) { Series series1 = new Series(); series1.setName(y.getName()); @@ -177,9 +188,29 @@ public class ChartViewService { } } } + // table组件 + List fields = new ArrayList<>(); + List> tableRow = new ArrayList<>(); + fields.addAll(xAxis); + fields.addAll(yAxis); + data.forEach(ele -> { + Map d = new HashMap<>(); + for (int i = 0; i < fields.size(); i++) { + ChartViewFieldDTO chartViewFieldDTO = fields.get(i); + if (chartViewFieldDTO.getDeType() == 0 || chartViewFieldDTO.getDeType() == 1) { + d.put(fields.get(i).getOriginName(), ele[i]); + } else if (chartViewFieldDTO.getDeType() == 2 || chartViewFieldDTO.getDeType() == 3) { + d.put(fields.get(i).getOriginName(), new BigDecimal(ele[i]).setScale(2, RoundingMode.HALF_UP)); + } + } + tableRow.add(d); + }); + Map map = new HashMap<>(); map.put("x", x); map.put("series", series); + map.put("fields", fields); + map.put("tableRow", tableRow); ChartViewDTO dto = new ChartViewDTO(); BeanUtils.copyBean(dto, view); @@ -214,6 +245,24 @@ public class ChartViewService { return filter.toString(); } + public Datasource dorisDatasource() { + JSONObject jsonObject = new JSONObject(); + jsonObject.put("dataSourceType", "jdbc"); + jsonObject.put("dataBase", "example_db"); + jsonObject.put("username", "root"); + jsonObject.put("password", "dataease"); + jsonObject.put("host", "59.110.64.159"); + jsonObject.put("port", "9030"); + + Datasource datasource = new Datasource(); + datasource.setId("doris"); + datasource.setName("doris"); + datasource.setDesc("doris"); + datasource.setType("mysql"); + datasource.setConfiguration(jsonObject.toJSONString()); + return datasource; + } + public String getSQL(String type, String table, List xAxis, List yAxis, List extFilterRequestList) { DatasourceTypes datasourceType = DatasourceTypes.valueOf(type); switch (datasourceType) { @@ -227,10 +276,10 @@ public class ChartViewService { public String transMysqlSQL(String table, List xAxis, List yAxis, List extFilterRequestList) { // 字段汇总 排序等 - String[] field = yAxis.stream().map(y -> "CAST(" + y.getSummary() + "(" + y.getOriginName() + ") AS DECIMAL(20,2)) AS _" + y.getSummary() + "_" + y.getOriginName()).toArray(String[]::new); + String[] field = yAxis.stream().map(y -> "CAST(" + y.getSummary() + "(" + y.getOriginName() + ") AS DECIMAL(20,2)) AS _" + y.getSummary() + "_" + (StringUtils.equalsIgnoreCase(y.getOriginName(), "*") ? "" : y.getOriginName())).toArray(String[]::new); String[] group = xAxis.stream().map(ChartViewFieldDTO::getOriginName).toArray(String[]::new); String[] order = yAxis.stream().filter(y -> StringUtils.isNotEmpty(y.getSort()) && !StringUtils.equalsIgnoreCase(y.getSort(), "none")) - .map(y -> "_" + y.getSummary() + "_" + y.getOriginName() + " " + y.getSort()).toArray(String[]::new); + .map(y -> "_" + y.getSummary() + "_" + (StringUtils.equalsIgnoreCase(y.getOriginName(), "*") ? "" : y.getOriginName()) + " " + y.getSort()).toArray(String[]::new); String sql = MessageFormat.format("SELECT {0},{1} FROM {2} WHERE 1=1 {3} GROUP BY {4} ORDER BY null,{5}", StringUtils.join(group, ","), @@ -245,7 +294,19 @@ public class ChartViewService { // 如果是对结果字段过滤,则再包裹一层sql String[] resultFilter = yAxis.stream().filter(y -> CollectionUtils.isNotEmpty(y.getFilter()) && y.getFilter().size() > 0) .map(y -> { - String[] s = y.getFilter().stream().map(f -> "AND _" + y.getSummary() + "_" + y.getOriginName() + transMysqlFilterTerm(f.getTerm()) + f.getValue()).toArray(String[]::new); + String[] s = y.getFilter().stream().map(f -> { + StringBuilder filter = new StringBuilder(); + filter.append("AND _").append(y.getSummary()).append("_").append(StringUtils.equalsIgnoreCase(y.getOriginName(), "*") ? "" : y.getOriginName()).append(transMysqlFilterTerm(f.getTerm())); + if (StringUtils.containsIgnoreCase(f.getTerm(), "null")) { + } else if (StringUtils.containsIgnoreCase(f.getTerm(), "in")) { + filter.append("('").append(StringUtils.join(f.getValue(), "','")).append("')"); + } else if (StringUtils.containsIgnoreCase(f.getTerm(), "like")) { + filter.append("%").append(f.getValue()).append("%"); + } else { + filter.append(f.getValue()); + } + return filter.toString(); + }).toArray(String[]::new); return StringUtils.join(s, " "); }).toArray(String[]::new); if (resultFilter.length == 0) { @@ -321,7 +382,7 @@ public class ChartViewService { return map; } - public List viewsByIds(List viewIds){ + public List viewsByIds(List viewIds) { ChartViewExample example = new ChartViewExample(); example.createCriteria().andIdIn(viewIds); return chartViewMapper.selectByExample(example); diff --git a/backend/src/main/java/io/dataease/service/dataset/DataSetTableService.java b/backend/src/main/java/io/dataease/service/dataset/DataSetTableService.java index fb74fab3d2..b670fe1b0f 100644 --- a/backend/src/main/java/io/dataease/service/dataset/DataSetTableService.java +++ b/backend/src/main/java/io/dataease/service/dataset/DataSetTableService.java @@ -135,6 +135,19 @@ public class DataSetTableService { dimension.add(field); } }); + // quota add count + DatasetTableField count = DatasetTableField.builder() + .id("count") + .tableId(dataSetTableRequest.getId()) + .originName("*") + .name("记录数*") + .type("INT") + .checked(true) + .columnIndex(999) + .deType(2) + .build(); + quota.add(count); + Map> map = new HashMap<>(); map.put("dimension", dimension); map.put("quota", quota); @@ -637,11 +650,12 @@ public class DataSetTableService { private String saveFile(MultipartFile file) throws Exception { String filename = file.getOriginalFilename(); - File p = new File(path); + String dirPath = path + AuthUtils.getUser().getUsername() + "/"; + File p = new File(dirPath); if (!p.exists()) { p.mkdirs(); } - String filePath = path + AuthUtils.getUser().getUsername() + "/" + filename; + String filePath = dirPath + filename; File f = new File(filePath); FileOutputStream fileOutputStream = new FileOutputStream(f); fileOutputStream.write(file.getBytes()); diff --git a/backend/src/main/java/io/dataease/service/dataset/ExtractDataService.java b/backend/src/main/java/io/dataease/service/dataset/ExtractDataService.java index f9a9c5820b..714521d31b 100644 --- a/backend/src/main/java/io/dataease/service/dataset/ExtractDataService.java +++ b/backend/src/main/java/io/dataease/service/dataset/ExtractDataService.java @@ -1,7 +1,6 @@ package io.dataease.service.dataset; import com.google.gson.Gson; -import com.sun.org.apache.bcel.internal.generic.SWITCH; import io.dataease.base.domain.*; import io.dataease.base.mapper.DatasourceMapper; import io.dataease.commons.constants.JobStatus; @@ -13,31 +12,15 @@ import io.dataease.datasource.constants.DatasourceTypes; import io.dataease.datasource.dto.MysqlConfigrationDTO; import io.dataease.dto.dataset.DataSetTaskLogDTO; import io.dataease.dto.dataset.DataTableInfoDTO; -import io.dataease.service.spark.SparkCalc; import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.io.FileUtils; import org.apache.commons.lang3.StringUtils; -import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.TableName; -import org.apache.hadoop.hbase.client.*; -import org.pentaho.big.data.api.cluster.NamedCluster; -import org.pentaho.big.data.api.cluster.NamedClusterService; -import org.pentaho.big.data.api.cluster.service.locator.NamedClusterServiceLocator; -import org.pentaho.big.data.api.cluster.service.locator.impl.NamedClusterServiceLocatorImpl; -import org.pentaho.big.data.api.initializer.ClusterInitializer; -import org.pentaho.big.data.api.initializer.ClusterInitializerProvider; -import org.pentaho.big.data.api.initializer.impl.ClusterInitializerImpl; -import org.pentaho.big.data.impl.cluster.NamedClusterImpl; -import org.pentaho.big.data.impl.cluster.NamedClusterManager; -import org.pentaho.big.data.kettle.plugins.hbase.MappingDefinition; -import org.pentaho.big.data.kettle.plugins.hbase.output.HBaseOutputMeta; +import org.apache.hadoop.hbase.client.Connection; import org.pentaho.di.cluster.SlaveServer; -import org.pentaho.di.core.KettleEnvironment; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.plugins.PluginRegistry; import org.pentaho.di.core.plugins.StepPluginType; -import org.pentaho.di.core.util.EnvUtil; -import org.pentaho.di.engine.configuration.impl.pentaho.DefaultRunConfiguration; import org.pentaho.di.job.Job; import org.pentaho.di.job.JobExecutionConfiguration; import org.pentaho.di.job.JobHopMeta; @@ -45,49 +28,25 @@ import org.pentaho.di.job.JobMeta; import org.pentaho.di.job.entries.special.JobEntrySpecial; import org.pentaho.di.job.entries.success.JobEntrySuccess; import org.pentaho.di.job.entries.trans.JobEntryTrans; -import org.pentaho.di.job.entries.writetolog.JobEntryWriteToLog; import org.pentaho.di.job.entry.JobEntryCopy; import org.pentaho.di.repository.RepositoryDirectoryInterface; import org.pentaho.di.repository.filerep.KettleFileRepository; -import org.pentaho.di.repository.filerep.KettleFileRepositoryMeta; -import org.pentaho.di.trans.TransConfiguration; -import org.pentaho.di.trans.TransExecutionConfiguration; import org.pentaho.di.trans.TransHopMeta; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.steps.tableinput.TableInputMeta; import org.pentaho.di.trans.steps.textfileoutput.TextFileField; -import org.pentaho.di.trans.steps.textfileoutput.TextFileOutput; import org.pentaho.di.trans.steps.textfileoutput.TextFileOutputMeta; -import org.pentaho.di.trans.steps.userdefinedjavaclass.InfoStepDefinition; -import org.pentaho.di.trans.steps.userdefinedjavaclass.UserDefinedJavaClassDef; -import org.pentaho.di.trans.steps.userdefinedjavaclass.UserDefinedJavaClassMeta; import org.pentaho.di.www.SlaveServerJobStatus; -import org.pentaho.runtime.test.RuntimeTest; -import org.pentaho.runtime.test.RuntimeTester; -import org.pentaho.runtime.test.action.RuntimeTestActionHandler; -import org.pentaho.runtime.test.action.RuntimeTestActionService; -import org.pentaho.runtime.test.action.impl.RuntimeTestActionServiceImpl; -import org.pentaho.runtime.test.impl.RuntimeTesterImpl; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Service; -import org.pentaho.di.core.row.ValueMetaInterface; -import scala.annotation.meta.field; import javax.annotation.Resource; -import javax.sound.sampled.Line; import java.io.File; -import java.security.MessageDigest; -import java.sql.ResultSet; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; import java.util.List; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; -import static org.mockito.Mockito.mock; - @Service public class ExtractDataService { @@ -125,8 +84,8 @@ public class ExtractDataService { @Value("${hbase.zookeeper.property.clientPort:2181}") private String zkPort; - @Resource - private SparkCalc sparkCalc; +// @Resource +// private SparkCalc sparkCalc; public void extractData(String datasetTableId, String taskId, String type) { diff --git a/backend/src/main/java/io/dataease/service/panel/PanelGroupService.java b/backend/src/main/java/io/dataease/service/panel/PanelGroupService.java index 6a881798a5..130278b8cc 100644 --- a/backend/src/main/java/io/dataease/service/panel/PanelGroupService.java +++ b/backend/src/main/java/io/dataease/service/panel/PanelGroupService.java @@ -7,6 +7,7 @@ import io.dataease.base.mapper.PanelGroupMapper; import io.dataease.base.mapper.ext.ExtPanelDesignMapper; import io.dataease.base.mapper.ext.ExtPanelGroupMapper; import io.dataease.commons.constants.PanelConstants; +import io.dataease.commons.utils.AuthUtils; import io.dataease.commons.utils.BeanUtils; import io.dataease.controller.request.panel.PanelGroupRequest; import io.dataease.dto.chart.ChartViewDTO; @@ -75,6 +76,7 @@ public class PanelGroupService { if (StringUtils.isEmpty(request.getId())) { request.setId(UUID.randomUUID().toString()); request.setCreateTime(System.currentTimeMillis()); + request.setCreateBy(AuthUtils.getUser().getUsername()); panelGroupMapper.insert(request); } else { panelGroupMapper.updateByPrimaryKeySelective(request); diff --git a/backend/src/main/java/io/dataease/service/spark/SparkCalc.java b/backend/src/main/java/io/dataease/service/spark/SparkCalc.java index 53f8dc0116..f75dd84fb5 100644 --- a/backend/src/main/java/io/dataease/service/spark/SparkCalc.java +++ b/backend/src/main/java/io/dataease/service/spark/SparkCalc.java @@ -1,308 +1,407 @@ -package io.dataease.service.spark; - -import io.dataease.base.domain.DatasetTableField; -import io.dataease.commons.utils.CommonBeanFactory; -import io.dataease.controller.request.chart.ChartExtFilterRequest; -import io.dataease.dto.chart.ChartViewFieldDTO; -import org.apache.commons.collections4.CollectionUtils; -import org.apache.commons.lang3.ObjectUtils; -import org.apache.commons.lang3.StringUtils; -import org.apache.hadoop.hbase.client.Result; -import org.apache.hadoop.hbase.client.Scan; -import org.apache.hadoop.hbase.io.ImmutableBytesWritable; -import org.apache.hadoop.hbase.mapreduce.TableInputFormat; -import org.apache.hadoop.hbase.protobuf.ProtobufUtil; -import org.apache.hadoop.hbase.protobuf.generated.ClientProtos; -import org.apache.hadoop.hbase.util.Bytes; -import org.apache.spark.api.java.JavaPairRDD; -import org.apache.spark.api.java.JavaRDD; -import org.apache.spark.api.java.JavaSparkContext; -import org.apache.spark.api.java.function.FlatMapFunction; -import org.apache.spark.sql.*; -import org.apache.spark.sql.types.DataTypes; -import org.apache.spark.sql.types.StructField; -import org.apache.spark.sql.types.StructType; -import org.apache.spark.storage.StorageLevel; -import org.springframework.core.env.Environment; -import org.springframework.stereotype.Service; -import scala.Tuple2; - -import javax.annotation.Resource; -import java.text.MessageFormat; -import java.util.ArrayList; -import java.util.Base64; -import java.util.Iterator; -import java.util.List; - -/** - * @Author gin - * @Date 2021/3/26 3:49 下午 - */ -@Service -public class SparkCalc { - private static String column_family = "dataease"; - private static String data_path = "/opt/dataease/data/db/"; - @Resource - private Environment env; // 保存了配置文件的信息 - - public List getData(String hTable, List fields, List xAxis, List yAxis, String tmpTable, List requestList) throws Exception { - // Spark Context - SparkSession spark = CommonBeanFactory.getBean(SparkSession.class); - JavaSparkContext sparkContext = new JavaSparkContext(spark.sparkContext()); - - // Spark SQL Context - SQLContext sqlContext = new SQLContext(sparkContext); - sqlContext.setConf("spark.sql.shuffle.partitions", env.getProperty("spark.sql.shuffle.partitions", "1")); - sqlContext.setConf("spark.default.parallelism", env.getProperty("spark.default.parallelism", "1")); - - Dataset dataFrame = getData(sparkContext, sqlContext, hTable, fields); +//package io.dataease.service.spark; +// +//import io.dataease.base.domain.DatasetTableField; +//import io.dataease.commons.utils.CommonBeanFactory; +//import io.dataease.controller.request.chart.ChartExtFilterRequest; +//import io.dataease.dto.chart.ChartViewFieldDTO; +//import org.antlr.analysis.MachineProbe; +//import org.apache.commons.collections4.CollectionUtils; +//import org.apache.commons.lang3.ObjectUtils; +//import org.apache.commons.lang3.StringUtils; +//import org.apache.hadoop.hbase.client.Result; +//import org.apache.hadoop.hbase.client.Scan; +//import org.apache.hadoop.hbase.io.ImmutableBytesWritable; +//import org.apache.hadoop.hbase.mapreduce.TableInputFormat; +//import org.apache.hadoop.hbase.protobuf.ProtobufUtil; +//import org.apache.hadoop.hbase.protobuf.generated.ClientProtos; +//import org.apache.hadoop.hbase.util.Bytes; +//import org.apache.spark.api.java.JavaPairRDD; +//import org.apache.spark.api.java.JavaRDD; +//import org.apache.spark.api.java.JavaSparkContext; +//import org.apache.spark.api.java.function.FlatMapFunction; +//import org.apache.spark.api.java.function.Function; +//import org.apache.spark.sql.*; +//import org.apache.spark.sql.types.DataTypes; +//import org.apache.spark.sql.types.StructField; +//import org.apache.spark.sql.types.StructType; +//import org.apache.spark.storage.StorageLevel; +//import org.springframework.core.env.Environment; +//import org.springframework.stereotype.Service; +//import scala.Tuple2; +// +//import javax.annotation.Resource; +//import java.math.BigDecimal; +//import java.text.MessageFormat; +//import java.util.*; +// +///** +// * @Author gin +// * @Date 2021/3/26 3:49 下午 +// */ +//@Service +//public class SparkCalc { +// private static String column_family = "dataease"; +// private static String data_path = "/opt/dataease/data/db/"; +// @Resource +// private Environment env; // 保存了配置文件的信息 +// +// public List getData(String hTable, List fields, List xAxis, List yAxis, String tmpTable, List requestList) throws Exception { +// // Spark Context +// SparkSession spark = CommonBeanFactory.getBean(SparkSession.class); +// JavaSparkContext sparkContext = new JavaSparkContext(spark.sparkContext()); +// +// // Spark SQL Context +// SQLContext sqlContext = new SQLContext(sparkContext); +// sqlContext.setConf("spark.sql.shuffle.partitions", env.getProperty("spark.sql.shuffle.partitions", "1")); +// sqlContext.setConf("spark.default.parallelism", env.getProperty("spark.default.parallelism", "1")); +// +// /*Map dataFrame = getData(sparkContext, sqlContext, hTable, fields); +// List data = new ArrayList<>(); +// Iterator> iterator = dataFrame.entrySet().iterator(); +// while (iterator.hasNext()) { +// String[] r = new String[2]; +// Map.Entry next = iterator.next(); +// String key = next.getKey(); +// BigDecimal value = next.getValue(); +// r[0] = key; +// r[1] = value.toString(); +// data.add(r); +// }*/ +// +//// Dataset dataFrame = getData(sparkContext, sqlContext, hTable, fields); // Dataset dataFrame = CacheUtil.getInstance().getCacheData(hTable); // if (ObjectUtils.isEmpty(dataFrame)) { -// dataFrame = getData(sparkContext, sqlContext, hTable, fields); +// dataFrame = getHBaseDataAndCache(sparkContext, sqlContext, hTable, fields); // } - - dataFrame.createOrReplaceTempView( tmpTable); - Dataset sql = sqlContext.sql(getSQL(xAxis, yAxis, tmpTable, requestList)); - // transform - List data = new ArrayList<>(); - List list = sql.collectAsList(); - for (Row row : list) { - String[] r = new String[row.length()]; - for (int i = 0; i < row.length(); i++) { - r[i] = row.get(i) == null ? "null" : row.get(i).toString(); - } - data.add(r); - } - return data; - } - - public Dataset getHBaseDataAndCache(String hTable, List fields) throws Exception { - // Spark Context - SparkSession spark = CommonBeanFactory.getBean(SparkSession.class); - JavaSparkContext sparkContext = new JavaSparkContext(spark.sparkContext()); - - // Spark SQL Context - SQLContext sqlContext = new SQLContext(sparkContext); - sqlContext.setConf("spark.sql.shuffle.partitions", env.getProperty("spark.sql.shuffle.partitions", "1")); - sqlContext.setConf("spark.default.parallelism", env.getProperty("spark.default.parallelism", "1")); - return getHBaseDataAndCache(sparkContext, sqlContext, hTable, fields); - } - - public Dataset getData(JavaSparkContext sparkContext, SQLContext sqlContext, String tableId, List fields) throws Exception { - fields.sort((o1, o2) -> { - if (o1.getOriginName() == null) { - return -1; - } - if (o2.getOriginName() == null) { - return 1; - } - return o1.getOriginName().compareTo(o2.getOriginName()); - }); - - JavaRDD pairRDD = sparkContext.textFile(data_path + tableId + ".txt"); - - JavaRDD rdd = pairRDD.mapPartitions( (FlatMapFunction, Row>) tuple2Iterator -> { - List iterator = new ArrayList<>(); - while (tuple2Iterator.hasNext()) { - String[] items = tuple2Iterator.next().split(";"); - List list = new ArrayList<>(); - for(int i=0; i structFields = new ArrayList<>(); - // struct顺序要与rdd顺序一致 - fields.forEach(x -> { - if (x.getDeType() == 0 || x.getDeType() == 1) { - structFields.add(DataTypes.createStructField(x.getOriginName(), DataTypes.StringType, true)); - } else if (x.getDeType() == 2) { - structFields.add(DataTypes.createStructField(x.getOriginName(), DataTypes.LongType, true)); - } else if (x.getDeType() == 3) { - structFields.add(DataTypes.createStructField(x.getOriginName(), DataTypes.DoubleType, true)); - } - }); - StructType structType = DataTypes.createStructType(structFields); - - Dataset dataFrame = sqlContext.createDataFrame(rdd, structType); - return dataFrame; - } - - public Dataset getHBaseDataAndCache(JavaSparkContext sparkContext, SQLContext sqlContext, String hTable, List fields) throws Exception { - Scan scan = new Scan(); - scan.addFamily(Bytes.toBytes(column_family)); - for (DatasetTableField field : fields) { - scan.addColumn(Bytes.toBytes(column_family), Bytes.toBytes(field.getOriginName())); - } - ClientProtos.Scan proto = ProtobufUtil.toScan(scan); - String scanToString = new String(Base64.getEncoder().encode(proto.toByteArray())); - - // HBase config - org.apache.hadoop.conf.Configuration conf = new org.apache.hadoop.conf.Configuration(); - conf.set("hbase.zookeeper.quorum", env.getProperty("hbase.zookeeper.quorum")); - conf.set("hbase.zookeeper.property.clientPort", env.getProperty("hbase.zookeeper.property.clientPort")); - conf.set("hbase.client.retries.number", env.getProperty("hbase.client.retries.number", "1")); - conf.set(TableInputFormat.INPUT_TABLE, hTable); - conf.set(TableInputFormat.SCAN, scanToString); - - JavaPairRDD pairRDD = sparkContext.newAPIHadoopRDD(conf, TableInputFormat.class, ImmutableBytesWritable.class, Result.class); - - JavaRDD rdd = pairRDD.mapPartitions((FlatMapFunction>, Row>) tuple2Iterator -> { - List iterator = new ArrayList<>(); - while (tuple2Iterator.hasNext()) { - Result result = tuple2Iterator.next()._2; - List list = new ArrayList<>(); - fields.forEach(x -> { - String l = Bytes.toString(result.getValue(column_family.getBytes(), x.getOriginName().getBytes())); - if (x.getDeType() == 0 || x.getDeType() == 1) { - list.add(l); - } else if (x.getDeType() == 2) { - if (StringUtils.isEmpty(l)) { - l = "0"; - } - list.add(Long.valueOf(l)); - } else if (x.getDeType() == 3) { - if (StringUtils.isEmpty(l)) { - l = "0.0"; - } - list.add(Double.valueOf(l)); - } - }); - iterator.add(RowFactory.create(list.toArray())); - } - return iterator.iterator(); - }); - - List structFields = new ArrayList<>(); - // struct顺序要与rdd顺序一致 - fields.forEach(x -> { - if (x.getDeType() == 0 || x.getDeType() == 1) { - structFields.add(DataTypes.createStructField(x.getOriginName(), DataTypes.StringType, true)); - } else if (x.getDeType() == 2) { - structFields.add(DataTypes.createStructField(x.getOriginName(), DataTypes.LongType, true)); - } else if (x.getDeType() == 3) { - structFields.add(DataTypes.createStructField(x.getOriginName(), DataTypes.DoubleType, true)); - } - }); - StructType structType = DataTypes.createStructType(structFields); - - Dataset dataFrame = sqlContext.createDataFrame(rdd, structType).persist(StorageLevel.MEMORY_AND_DISK_SER()); +// +// dataFrame.createOrReplaceTempView(tmpTable); +// Dataset sql = sqlContext.sql(getSQL(xAxis, yAxis, tmpTable, requestList)); +// // transform +// List data = new ArrayList<>(); +// List list = sql.collectAsList(); +// for (Row row : list) { +// String[] r = new String[row.length()]; +// for (int i = 0; i < row.length(); i++) { +// r[i] = row.get(i) == null ? "null" : row.get(i).toString(); +// } +// data.add(r); +// } +// return data; +// } +// +// public Dataset getHBaseDataAndCache(String hTable, List fields) throws Exception { +// // Spark Context +// SparkSession spark = CommonBeanFactory.getBean(SparkSession.class); +// JavaSparkContext sparkContext = new JavaSparkContext(spark.sparkContext()); +// +// // Spark SQL Context +// SQLContext sqlContext = new SQLContext(sparkContext); +// sqlContext.setConf("spark.sql.shuffle.partitions", env.getProperty("spark.sql.shuffle.partitions", "1")); +// sqlContext.setConf("spark.default.parallelism", env.getProperty("spark.default.parallelism", "1")); +// return getHBaseDataAndCache(sparkContext, sqlContext, hTable, fields); +// } +// +// public Map getData(JavaSparkContext sparkContext, SQLContext sqlContext, String tableId, List fields) throws Exception { +// fields.sort((o1, o2) -> { +// if (o1.getOriginName() == null) { +// return -1; +// } +// if (o2.getOriginName() == null) { +// return 1; +// } +// return o1.getOriginName().compareTo(o2.getOriginName()); +// }); +// +// JavaRDD pairRDD = sparkContext.textFile(data_path + tableId + ".txt"); +//// System.out.println(pairRDD.count()); +// +//// JavaRDD> rdd = pairRDD.map((Function>) v1 -> { +//// Map map = new HashMap<>(); +//// String[] items = v1.split(";"); +//// String day = null; +//// BigDecimal res = new BigDecimal(0); +//// for (int i = 0; i < items.length; i++) { +//// String l = items[i]; +//// DatasetTableField x = fields.get(i); +//// if (x.getOriginName().equalsIgnoreCase("sync_day")) { +//// day = l; +//// } +//// if (x.getOriginName().equalsIgnoreCase("usage_cost")) { +//// res = new BigDecimal(l); +//// } +//// } +//// BigDecimal bigDecimal = map.get(day); +//// if (bigDecimal == null) { +//// map.put(day, res); +//// } else { +//// map.put(day, bigDecimal.add(res)); +//// } +//// return map.entrySet().iterator().next(); +//// }); +// +// JavaRDD> rdd = pairRDD.mapPartitions((FlatMapFunction, Map.Entry>) tuple2Iterator -> { +// Map map = new HashMap<>(); +// while (tuple2Iterator.hasNext()) { +// String[] items = tuple2Iterator.next().split(";"); +// String day = null; +// BigDecimal res = new BigDecimal(0); +// for (int i = 0; i < items.length; i++) { +// String l = items[i]; +// DatasetTableField x = fields.get(i); +// if (x.getOriginName().equalsIgnoreCase("sync_day")) { +// day = l; +// } +// if (x.getOriginName().equalsIgnoreCase("usage_cost")) { +// res = new BigDecimal(l); +// } +// } +// BigDecimal bigDecimal = map.get(day); +// if (bigDecimal == null) { +// map.put(day, res); +// } else { +// map.put(day, bigDecimal.add(res)); +// } +// } +// return map.entrySet().iterator(); +// }); +// +// +//// System.out.println(rdd.count()); +// +// Map map = new HashMap<>(); +// List> collect = rdd.collect(); +//// System.out.println(collect.size()); +// +// collect.forEach(stringBigDecimalEntry -> { +// String key = stringBigDecimalEntry.getKey(); +// BigDecimal value = stringBigDecimalEntry.getValue(); +// +// BigDecimal bigDecimal = map.get(key); +// if (bigDecimal == null) { +// map.put(key, value); +// } else { +// map.put(key, bigDecimal.add(value)); +// } +// }); +// +// return map; +// } +// +//// public Dataset getData(JavaSparkContext sparkContext, SQLContext sqlContext, String tableId, List fields) throws Exception { +//// fields.sort((o1, o2) -> { +//// if (o1.getOriginName() == null) { +//// return -1; +//// } +//// if (o2.getOriginName() == null) { +//// return 1; +//// } +//// return o1.getOriginName().compareTo(o2.getOriginName()); +//// }); +//// +//// JavaRDD pairRDD = sparkContext.textFile(data_path + tableId + ".txt"); +//// +//// JavaRDD rdd = pairRDD.mapPartitions((FlatMapFunction, Row>) tuple2Iterator -> { +//// List iterator = new ArrayList<>(); +//// while (tuple2Iterator.hasNext()) { +//// String[] items = tuple2Iterator.next().split(";"); +//// List list = new ArrayList<>(); +//// for (int i = 0; i < items.length; i++) { +//// String l = items[i]; +//// DatasetTableField x = fields.get(i); +//// if (x.getDeType() == 0 || x.getDeType() == 1) { +//// list.add(l); +//// } else if (x.getDeType() == 2) { +//// if (StringUtils.isEmpty(l)) { +//// l = "0"; +//// } +//// if (StringUtils.equalsIgnoreCase(l, "Y")) { +//// l = "1"; +//// } +//// if (StringUtils.equalsIgnoreCase(l, "N")) { +//// l = "0"; +//// } +//// list.add(Long.valueOf(l)); +//// } else if (x.getDeType() == 3) { +//// if (StringUtils.isEmpty(l)) { +//// l = "0.0"; +//// } +//// list.add(Double.valueOf(l)); +//// } +//// } +//// iterator.add(RowFactory.create(list.toArray())); +//// } +//// return iterator.iterator(); +//// }); +//// +//// List structFields = new ArrayList<>(); +//// // struct顺序要与rdd顺序一致 +//// fields.forEach(x -> { +//// if (x.getDeType() == 0 || x.getDeType() == 1) { +//// structFields.add(DataTypes.createStructField(x.getOriginName(), DataTypes.StringType, true)); +//// } else if (x.getDeType() == 2) { +//// structFields.add(DataTypes.createStructField(x.getOriginName(), DataTypes.LongType, true)); +//// } else if (x.getDeType() == 3) { +//// structFields.add(DataTypes.createStructField(x.getOriginName(), DataTypes.DoubleType, true)); +//// } +//// }); +//// StructType structType = DataTypes.createStructType(structFields); +//// +//// Dataset dataFrame = sqlContext.createDataFrame(rdd, structType); +//// return dataFrame; +//// } +// +// public Dataset getHBaseDataAndCache(JavaSparkContext sparkContext, SQLContext sqlContext, String hTable, List fields) throws Exception { +// Scan scan = new Scan(); +// scan.addFamily(Bytes.toBytes(column_family)); +// for (DatasetTableField field : fields) { +// scan.addColumn(Bytes.toBytes(column_family), Bytes.toBytes(field.getOriginName())); +// } +// ClientProtos.Scan proto = ProtobufUtil.toScan(scan); +// String scanToString = new String(Base64.getEncoder().encode(proto.toByteArray())); +// +// // HBase config +// org.apache.hadoop.conf.Configuration conf = new org.apache.hadoop.conf.Configuration(); +// conf.set("hbase.zookeeper.quorum", env.getProperty("hbase.zookeeper.quorum")); +// conf.set("hbase.zookeeper.property.clientPort", env.getProperty("hbase.zookeeper.property.clientPort")); +// conf.set("hbase.client.retries.number", env.getProperty("hbase.client.retries.number", "1")); +// conf.set(TableInputFormat.INPUT_TABLE, hTable); +// conf.set(TableInputFormat.SCAN, scanToString); +// +// JavaPairRDD pairRDD = sparkContext.newAPIHadoopRDD(conf, TableInputFormat.class, ImmutableBytesWritable.class, Result.class); +// +// JavaRDD rdd = pairRDD.mapPartitions((FlatMapFunction>, Row>) tuple2Iterator -> { +// List iterator = new ArrayList<>(); +// while (tuple2Iterator.hasNext()) { +// Result result = tuple2Iterator.next()._2; +// List list = new ArrayList<>(); +// fields.forEach(x -> { +// String l = Bytes.toString(result.getValue(column_family.getBytes(), x.getOriginName().getBytes())); +// if (x.getDeType() == 0 || x.getDeType() == 1) { +// list.add(l); +// } else if (x.getDeType() == 2) { +// if (StringUtils.isEmpty(l)) { +// l = "0"; +// } +// list.add(Long.valueOf(l)); +// } else if (x.getDeType() == 3) { +// if (StringUtils.isEmpty(l)) { +// l = "0.0"; +// } +// list.add(Double.valueOf(l)); +// } +// }); +// iterator.add(RowFactory.create(list.toArray())); +// } +// return iterator.iterator(); +// }); +// +// List structFields = new ArrayList<>(); +// // struct顺序要与rdd顺序一致 +// fields.forEach(x -> { +// if (x.getDeType() == 0 || x.getDeType() == 1) { +// structFields.add(DataTypes.createStructField(x.getOriginName(), DataTypes.StringType, true)); +// } else if (x.getDeType() == 2) { +// structFields.add(DataTypes.createStructField(x.getOriginName(), DataTypes.LongType, true)); +// } else if (x.getDeType() == 3) { +// structFields.add(DataTypes.createStructField(x.getOriginName(), DataTypes.DoubleType, true)); +// } +// }); +// StructType structType = DataTypes.createStructType(structFields); +// +// Dataset dataFrame = sqlContext.createDataFrame(rdd, structType).persist(StorageLevel.MEMORY_AND_DISK_SER()); // CacheUtil.getInstance().addCacheData(hTable, dataFrame); - dataFrame.count(); - return dataFrame; - } - - public String getSQL(List xAxis, List yAxis, String table, List extFilterRequestList) { - // 字段汇总 排序等 - String[] field = yAxis.stream().map(y -> "CAST(" + y.getSummary() + "(" + y.getOriginName() + ") AS DECIMAL(20,2)) AS _" + y.getSummary() + "_" + y.getOriginName()).toArray(String[]::new); - String[] group = xAxis.stream().map(ChartViewFieldDTO::getOriginName).toArray(String[]::new); - String[] order = yAxis.stream().filter(y -> StringUtils.isNotEmpty(y.getSort()) && !StringUtils.equalsIgnoreCase(y.getSort(), "none")) - .map(y -> "_" + y.getSummary() + "_" + y.getOriginName() + " " + y.getSort()).toArray(String[]::new); - - String sql = MessageFormat.format("SELECT {0},{1} FROM {2} WHERE 1=1 {3} GROUP BY {4} ORDER BY null,{5}", - StringUtils.join(group, ","), - StringUtils.join(field, ","), - table, - transExtFilter(extFilterRequestList),// origin field filter and panel field filter, - StringUtils.join(group, ","), - StringUtils.join(order, ",")); - if (sql.endsWith(",")) { - sql = sql.substring(0, sql.length() - 1); - } - // 如果是对结果字段过滤,则再包裹一层sql - String[] resultFilter = yAxis.stream().filter(y -> CollectionUtils.isNotEmpty(y.getFilter()) && y.getFilter().size() > 0) - .map(y -> { - String[] s = y.getFilter().stream().map(f -> "AND _" + y.getSummary() + "_" + y.getOriginName() + transFilterTerm(f.getTerm()) + f.getValue()).toArray(String[]::new); - return StringUtils.join(s, " "); - }).toArray(String[]::new); - if (resultFilter.length == 0) { - return sql; - } else { - String filterSql = MessageFormat.format("SELECT * FROM {0} WHERE 1=1 {1}", - "(" + sql + ") AS tmp", - StringUtils.join(resultFilter, " ")); - return filterSql; - } - } - - public String transFilterTerm(String term) { - switch (term) { - case "eq": - return " = "; - case "not_eq": - return " <> "; - case "lt": - return " < "; - case "le": - return " <= "; - case "gt": - return " > "; - case "ge": - return " >= "; - case "in": - return " IN "; - case "not in": - return " NOT IN "; - case "like": - return " LIKE "; - case "not like": - return " NOT LIKE "; - case "null": - return " IS NULL "; - case "not_null": - return " IS NOT NULL "; - default: - return ""; - } - } - - public String transExtFilter(List requestList) { - if (CollectionUtils.isEmpty(requestList)) { - return ""; - } - StringBuilder filter = new StringBuilder(); - for (ChartExtFilterRequest request : requestList) { - List value = request.getValue(); - if (CollectionUtils.isEmpty(value)) { - continue; - } - DatasetTableField field = request.getDatasetTableField(); - filter.append(" AND ") - .append(field.getOriginName()) - .append(" ") - .append(transFilterTerm(request.getOperator())) - .append(" "); - if (StringUtils.containsIgnoreCase(request.getOperator(), "in")) { - filter.append("('").append(StringUtils.join(value, "','")).append("')"); - } else if (StringUtils.containsIgnoreCase(request.getOperator(), "like")) { - filter.append("'%").append(value.get(0)).append("%'"); - } else { - filter.append("'").append(value.get(0)).append("'"); - } - } - return filter.toString(); - } -} +// dataFrame.count(); +// return dataFrame; +// } +// +// public String getSQL(List xAxis, List yAxis, String table, List extFilterRequestList) { +// // 字段汇总 排序等 +// String[] field = yAxis.stream().map(y -> "CAST(" + y.getSummary() + "(" + y.getOriginName() + ") AS DECIMAL(20,2)) AS _" + y.getSummary() + "_" + y.getOriginName()).toArray(String[]::new); +// String[] group = xAxis.stream().map(ChartViewFieldDTO::getOriginName).toArray(String[]::new); +// String[] order = yAxis.stream().filter(y -> StringUtils.isNotEmpty(y.getSort()) && !StringUtils.equalsIgnoreCase(y.getSort(), "none")) +// .map(y -> "_" + y.getSummary() + "_" + y.getOriginName() + " " + y.getSort()).toArray(String[]::new); +// +// String sql = MessageFormat.format("SELECT {0},{1} FROM {2} WHERE 1=1 {3} GROUP BY {4} ORDER BY null,{5}", +// StringUtils.join(group, ","), +// StringUtils.join(field, ","), +// table, +// transExtFilter(extFilterRequestList),// origin field filter and panel field filter, +// StringUtils.join(group, ","), +// StringUtils.join(order, ",")); +// if (sql.endsWith(",")) { +// sql = sql.substring(0, sql.length() - 1); +// } +// // 如果是对结果字段过滤,则再包裹一层sql +// String[] resultFilter = yAxis.stream().filter(y -> CollectionUtils.isNotEmpty(y.getFilter()) && y.getFilter().size() > 0) +// .map(y -> { +// String[] s = y.getFilter().stream().map(f -> "AND _" + y.getSummary() + "_" + y.getOriginName() + transFilterTerm(f.getTerm()) + f.getValue()).toArray(String[]::new); +// return StringUtils.join(s, " "); +// }).toArray(String[]::new); +// if (resultFilter.length == 0) { +// return sql; +// } else { +// String filterSql = MessageFormat.format("SELECT * FROM {0} WHERE 1=1 {1}", +// "(" + sql + ") AS tmp", +// StringUtils.join(resultFilter, " ")); +// return filterSql; +// } +// } +// +// public String transFilterTerm(String term) { +// switch (term) { +// case "eq": +// return " = "; +// case "not_eq": +// return " <> "; +// case "lt": +// return " < "; +// case "le": +// return " <= "; +// case "gt": +// return " > "; +// case "ge": +// return " >= "; +// case "in": +// return " IN "; +// case "not in": +// return " NOT IN "; +// case "like": +// return " LIKE "; +// case "not like": +// return " NOT LIKE "; +// case "null": +// return " IS NULL "; +// case "not_null": +// return " IS NOT NULL "; +// default: +// return ""; +// } +// } +// +// public String transExtFilter(List requestList) { +// if (CollectionUtils.isEmpty(requestList)) { +// return ""; +// } +// StringBuilder filter = new StringBuilder(); +// for (ChartExtFilterRequest request : requestList) { +// List value = request.getValue(); +// if (CollectionUtils.isEmpty(value)) { +// continue; +// } +// DatasetTableField field = request.getDatasetTableField(); +// filter.append(" AND ") +// .append(field.getOriginName()) +// .append(" ") +// .append(transFilterTerm(request.getOperator())) +// .append(" "); +// if (StringUtils.containsIgnoreCase(request.getOperator(), "in")) { +// filter.append("('").append(StringUtils.join(value, "','")).append("')"); +// } else if (StringUtils.containsIgnoreCase(request.getOperator(), "like")) { +// filter.append("'%").append(value.get(0)).append("%'"); +// } else { +// filter.append("'").append(value.get(0)).append("'"); +// } +// } +// return filter.toString(); +// } +//} diff --git a/backend/src/main/resources/db/migration/V8__system.sql b/backend/src/main/resources/db/migration/V8__system.sql index cf284d82cd..ecfb3a62e4 100644 --- a/backend/src/main/resources/db/migration/V8__system.sql +++ b/backend/src/main/resources/db/migration/V8__system.sql @@ -1,4 +1,5 @@ -DROP TABLE IF EXISTS `sys_dept`; +DROP TABLE IF EXISTS `sys_dept` ; + CREATE TABLE `sys_dept` ( `dept_id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT 'ID', `pid` bigint(20) DEFAULT NULL COMMENT '上级部门', @@ -13,23 +14,18 @@ CREATE TABLE `sys_dept` ( PRIMARY KEY (`dept_id`) USING BTREE, KEY `inx_pid` (`pid`), KEY `inx_enabled` (`enabled`) -) ENGINE=InnoDB AUTO_INCREMENT=24 DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT COMMENT='部门'; +) ENGINE=InnoDB AUTO_INCREMENT=26 DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT COMMENT='部门'; --- ---------------------------- --- Records of sys_dept --- ---------------------------- -BEGIN; -INSERT INTO `sys_dept` VALUES (18, 0, 1, '上海飞致云', 1, b'1', NULL, NULL, 1614048906358, 1614048906358); -INSERT INTO `sys_dept` VALUES (19, 0, 1, '北京飞致云', 2, b'1', NULL, NULL, 1614048918465, 1614048918465); -INSERT INTO `sys_dept` VALUES (20, 18, 0, '营销部', 1, b'1', NULL, NULL, 1614048946370, 1614049006759); -INSERT INTO `sys_dept` VALUES (21, 19, 0, '综合部', 1, b'1', NULL, NULL, 1614048963483, 1614048963483); -INSERT INTO `sys_dept` VALUES (22, 0, 0, '深圳飞致云', 3, b'1', NULL, NULL, 1614679834772, 1614679834772); -INSERT INTO `sys_dept` VALUES (23, 0, 0, '南京飞致云', 4, b'1', NULL, NULL, 1614679890462, 1614679890462); -COMMIT; +INSERT INTO `sys_dept` (`dept_id`, `pid`, `sub_count`, `name`, `dept_sort`, `enabled`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('18','0','1','上海飞致云','1',b'1',null,null,'1614048906358','1614048906358'); +INSERT INTO `sys_dept` (`dept_id`, `pid`, `sub_count`, `name`, `dept_sort`, `enabled`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('19','0','1','北京飞致云','2',b'1',null,null,'1614048918465','1614048918465'); +INSERT INTO `sys_dept` (`dept_id`, `pid`, `sub_count`, `name`, `dept_sort`, `enabled`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('20','18','1','营销部','1',b'1',null,null,'1614048946370','1614049006759'); +INSERT INTO `sys_dept` (`dept_id`, `pid`, `sub_count`, `name`, `dept_sort`, `enabled`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('21','19','0','综合部','3',b'1',null,null,'1614048963483','1615783363091'); +INSERT INTO `sys_dept` (`dept_id`, `pid`, `sub_count`, `name`, `dept_sort`, `enabled`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('25','20','0','售前组','1',b'1',null,null,'1615791706945','1615791706945'); -DROP TABLE IF EXISTS `sys_menu`; +DROP TABLE IF EXISTS `sys_menu` ; + CREATE TABLE `sys_menu` ( `menu_id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT 'ID', `pid` bigint(20) DEFAULT NULL COMMENT '上级菜单ID', @@ -53,114 +49,44 @@ CREATE TABLE `sys_menu` ( UNIQUE KEY `uniq_title` (`title`), UNIQUE KEY `uniq_name` (`name`), KEY `inx_pid` (`pid`) -) ENGINE=InnoDB AUTO_INCREMENT=28 DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT COMMENT='系统菜单'; +) ENGINE=InnoDB AUTO_INCREMENT=35 DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT COMMENT='系统菜单'; --- ---------------------------- --- Records of sys_menu --- ---------------------------- -BEGIN; -INSERT INTO `sys_menu` VALUES (1, 0, 3, 0, '系统管理', '系统管理', 'Layout', 3, 'system', '/system', NULL, b'0', b'0', 'dir:sys', NULL, NULL, NULL, 1614916695777); -INSERT INTO `sys_menu` VALUES (2, 1, 3, 1, '用户管理', '用户管理', 'system/user/index', 1, 'peoples', 'user', NULL, b'0', b'0', 'user:read', NULL, NULL, NULL, NULL); -INSERT INTO `sys_menu` VALUES (3, 1, 3, 1, '菜单管理', '菜单管理', 'system/menu/index', 2, 'menu', 'menu', NULL, b'0', b'0', 'menu:read', NULL, NULL, NULL, NULL); -INSERT INTO `sys_menu` VALUES (4, 1, 3, 1, '组织管理', '组织管理', 'system/dept/index', 3, 'dept', 'dept', NULL, b'0', b'0', 'dept:read', NULL, NULL, NULL, NULL); -INSERT INTO `sys_menu` VALUES (5, 1, 3, 1, '角色管理', '角色管理', 'system/role/index', 4, 'role', 'role', b'0', b'0', b'0', 'role:read', NULL, NULL, 1614683852133, 1614683852133); -INSERT INTO `sys_menu` VALUES (6, 1, 0, 1, '参数管理', '参数管理', 'system/systemParamSettings/index', 5, 'sys-tools', 'systemParamSettings', NULL, b'0', b'0', 'sysparam:read', NULL, NULL, NULL, 1614916731805); -INSERT INTO `sys_menu` VALUES (7, 0, 1, 0, '数据管理', '数据管理', 'Layout', 2, 'dataset', '/dataset', NULL, b'0', b'0', 'dir:data', NULL, NULL, NULL, 1614916666408); -INSERT INTO `sys_menu` VALUES (8, 7, 0, 1, '数据管理1', '数据管理1', 'dataset/index', 1, 'dataset', 'index', NULL, b'0', b'0', 'data:read', NULL, NULL, NULL, 1614916684821); -INSERT INTO `sys_menu` VALUES (9, 0, 1, 0, '视图管理', '视图管理', 'Layout', 1, 'chart', '/chart', NULL, b'0', b'0', 'dir:chart', NULL, NULL, NULL, 1614916648098); -INSERT INTO `sys_menu` VALUES (10, 9, 0, 1, '视图1', '视图1', 'chart/index', 1, 'chart', 'index', NULL, b'0', b'0', 'chart:read', NULL, NULL, NULL, 1614915491036); -INSERT INTO `sys_menu` VALUES (11, 1, 4, 1, '数据连接', '数据连接', 'system/datasource/index', 0, 'database', 'index', NULL, b'0', b'0', 'datasource:read', NULL, NULL, NULL, 1614916717642); -INSERT INTO `sys_menu` VALUES (12, 3, 0, 2, '创建菜单', NULL, NULL, 999, NULL, NULL, b'0', b'0', b'0', 'menu:add', NULL, NULL, 1614924617327, 1614924617327); -INSERT INTO `sys_menu` VALUES (13, 3, 0, 2, '删除菜单', NULL, NULL, 999, NULL, NULL, b'0', b'0', b'0', 'menu:del', NULL, NULL, 1614924667808, 1614924667808); -INSERT INTO `sys_menu` VALUES (14, 3, 0, 2, '编辑菜单', NULL, NULL, 999, NULL, NULL, b'0', b'0', b'0', 'menu:edit', NULL, NULL, 1614930734224, 1614936429773); -INSERT INTO `sys_menu` VALUES (15, 2, 0, 2, '创建用户', NULL, NULL, 999, NULL, NULL, b'0', b'0', b'0', 'user:add', NULL, NULL, 1614930862373, 1614930862373); -INSERT INTO `sys_menu` VALUES (16, 2, 0, 2, '删除用户', NULL, NULL, 999, NULL, NULL, b'0', b'0', b'0', 'user:del', NULL, NULL, 1614930903502, 1614930903502); -INSERT INTO `sys_menu` VALUES (17, 2, 0, 2, '编辑用户', NULL, NULL, 999, NULL, NULL, b'0', b'0', b'0', 'user:edit', NULL, NULL, 1614930935529, 1614930935529); -INSERT INTO `sys_menu` VALUES (18, 4, 0, 2, '创建组织', NULL, NULL, 999, NULL, NULL, b'0', b'0', b'0', 'dept:add', NULL, NULL, 1614930976297, 1614930976297); -INSERT INTO `sys_menu` VALUES (19, 4, 0, 2, '删除组织', NULL, NULL, 999, NULL, NULL, b'0', b'0', b'0', 'dept:del', NULL, NULL, 1614930997130, 1614930997130); -INSERT INTO `sys_menu` VALUES (20, 4, 0, 2, '编辑组织', NULL, NULL, 999, NULL, NULL, b'0', b'0', b'0', 'dept:edit', NULL, NULL, 1614931022967, 1614931022967); -INSERT INTO `sys_menu` VALUES (21, 5, 0, 2, '创建角色', NULL, NULL, 999, NULL, NULL, b'0', b'0', b'0', 'role:add', NULL, NULL, 1614931069408, 1614931069408); -INSERT INTO `sys_menu` VALUES (22, 5, 0, 2, '删除角色', NULL, NULL, 999, NULL, NULL, b'0', b'0', b'0', 'role:del', NULL, NULL, 1614931097720, 1614931097720); -INSERT INTO `sys_menu` VALUES (23, 5, 0, 2, '编辑角色', NULL, NULL, 999, NULL, NULL, b'0', b'0', b'0', 'role:edit', NULL, NULL, 1614931124782, 1614931124782); -INSERT INTO `sys_menu` VALUES (24, 11, 0, 2, '创建连接', NULL, NULL, 999, NULL, NULL, b'0', b'0', b'0', 'datasource:add', NULL, NULL, 1614931168956, 1614931168956); -INSERT INTO `sys_menu` VALUES (25, 11, 0, 2, '删除连接', NULL, NULL, 999, NULL, NULL, b'0', b'0', b'0', 'datasource:del', NULL, NULL, 1614931205899, 1614931205899); -INSERT INTO `sys_menu` VALUES (26, 11, 0, 2, '编辑连接', NULL, NULL, 999, NULL, NULL, b'0', b'0', b'0', 'datasource:edit', NULL, NULL, 1614931234105, 1614931234105); -INSERT INTO `sys_menu` VALUES (27, 11, 0, 2, '校验连接', NULL, NULL, 999, NULL, NULL, b'0', b'0', b'0', 'datasource:validate', NULL, NULL, 1614931268578, 1614931268578); -INSERT INTO `sys_menu` VALUES (28, 2, 0, 2, '修改密码', NULL, NULL, 999, NULL, NULL, b'0', b'0', b'0', 'user:editPwd', NULL, NULL, 1615275128262, 1615275128262); -COMMIT; - -DROP TABLE IF EXISTS `sys_role`; -CREATE TABLE `sys_role` ( - `role_id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT 'ID', - `code` varchar(100) NOT NULL COMMENT '代码', - `name` varchar(255) NOT NULL COMMENT '名称', - `description` varchar(255) DEFAULT NULL COMMENT '描述', - `create_by` varchar(255) DEFAULT NULL COMMENT '创建者', - `update_by` varchar(255) DEFAULT NULL COMMENT '更新者', - `create_time` bigint(13) DEFAULT NULL COMMENT '创建日期', - `update_time` bigint(13) DEFAULT NULL COMMENT '更新时间', - PRIMARY KEY (`role_id`) USING BTREE, - UNIQUE KEY `uniq_name` (`name`), - KEY `role_name_index` (`name`) -) ENGINE=InnoDB AUTO_INCREMENT=5 DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT COMMENT='角色表'; - --- ---------------------------- --- Records of sys_role --- ---------------------------- -BEGIN; -INSERT INTO `sys_role` VALUES (3, 'admin', '管理员', NULL, NULL, NULL, NULL, NULL); -INSERT INTO `sys_role` VALUES (4, 'emp', '普通员工', NULL, NULL, NULL, NULL, NULL); -COMMIT; +INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('1','0','3','0','系统管理','系统管理','Layout','5','system','/system',null,b'0',b'0','dir:sys',null,null,null,'1614916695777'); +INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('2','1','4','1','用户管理','用户管理','system/user/index','2','peoples','user',null,b'0',b'0','user:read',null,null,null,'1615786052463'); +INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('3','1','3','1','菜单管理','菜单管理','system/menu/index','2','menu','menu',null,b'0',b'0','menu:read',null,null,null,null); +INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('4','1','3','1','组织管理','组织管理','system/dept/index','3','dept','dept',null,b'0',b'0','dept:read',null,null,null,null); +INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('5','1','3','1','角色管理','角色管理','system/role/index','4','role','role',b'0',b'0',b'0','role:read',null,null,'1614683852133','1614683852133'); +INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('6','1','0','1','参数管理','参数管理','system/systemParamSettings/index','13','sys-tools','systemParamSettings',null,b'0',b'0','sysparam:read',null,null,null,'1615790294169'); +INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('7','0','1','0','数据集','数据管理','Layout','3','dataset','/dataset',null,b'0',b'0','dir:data',null,null,null,'1619081474697'); +INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('8','7','0','1','数据管理1','数据管理1','dataset/index','1','dataset','index',null,b'0',b'0','data:read',null,null,null,'1614916684821'); +INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('9','0','1','0','视图','视图管理','Layout','2','chart','/chart',null,b'0',b'0','dir:chart',null,null,null,'1619081462127'); +INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('10','9','0','1','视图1','视图1','chart/index','1','chart','index',null,b'0',b'0','chart:read',null,null,null,'1614915491036'); +INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('12','3','0','2','创建菜单',null,null,'999',null,null,b'0',b'0',b'0','menu:add',null,null,'1614924617327','1614924617327'); +INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('13','3','0','2','删除菜单',null,null,'999',null,null,b'0',b'0',b'0','menu:del',null,null,'1614924667808','1614924667808'); +INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('14','3','0','2','编辑菜单',null,null,'999',null,null,b'0',b'0',b'0','menu:edit',null,null,'1614930734224','1614936429773'); +INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('15','2','0','2','创建用户',null,null,'999',null,null,b'0',b'0',b'0','user:add',null,null,'1614930862373','1614930862373'); +INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('16','2','0','2','删除用户',null,null,'999',null,null,b'0',b'0',b'0','user:del',null,null,'1614930903502','1614930903502'); +INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('17','2','0','2','编辑用户',null,null,'999',null,null,b'0',b'0',b'0','user:edit',null,null,'1614930935529','1614930935529'); +INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('18','4','0','2','创建组织',null,null,'999',null,null,b'0',b'0',b'0','dept:add',null,null,'1614930976297','1614930976297'); +INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('19','4','0','2','删除组织',null,null,'999',null,null,b'0',b'0',b'0','dept:del',null,null,'1614930997130','1614930997130'); +INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('20','4','0','2','编辑组织',null,null,'999',null,null,b'0',b'0',b'0','dept:edit',null,null,'1614931022967','1614931022967'); +INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('21','5','0','2','创建角色',null,null,'999',null,null,b'0',b'0',b'0','role:add',null,null,'1614931069408','1614931069408'); +INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('22','5','0','2','删除角色',null,null,'999',null,null,b'0',b'0',b'0','role:del',null,null,'1614931097720','1614931097720'); +INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('23','5','0','2','编辑角色',null,null,'999',null,null,b'0',b'0',b'0','role:edit',null,null,'1614931124782','1614931124782'); +INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('24','34','0','2','创建连接',null,null,'997',null,null,b'0',b'0',b'0','datasource:add',null,null,'1614931168956','1615783705537'); +INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('25','34','0','2','删除连接',null,null,'999',null,null,b'0',b'0',b'0','datasource:del',null,null,'1614931205899','1614931205899'); +INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('26','34','0','2','编辑连接',null,null,'999',null,null,b'0',b'0',b'0','datasource:edit',null,null,'1614931234105','1614931234105'); +INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('27','34','0','2','校验连接',null,null,'999',null,null,b'0',b'0',b'0','datasource:validate',null,null,'1614931268578','1614931268578'); +INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('28','2','0','2','修改密码',null,null,'999',null,null,b'0',b'0',b'0','user:editPwd',null,null,'1615275128262','1615275128262'); +INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('29','0','1','0','仪表盘','仪表盘管理','Layout','1',null,'/panel',null,b'0',b'0','panel:read',null,null,null,'1619081454146'); +INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('30','29','0','1','仪表盘1','仪表盘','panel/index','1',null,'index',b'0',b'0',b'0','panel:read',null,null,null,'1619081449067'); +INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('33','0','1','0','数据源','数据源','Layout','4',null,'/datasource',b'0',b'0',b'0','dir:datasource',null,null,'1619083205537','1619083205537'); +INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('34','33','4','1','数据源1','数据源1','system/datasource/index','1',null,'index',b'0',b'0',b'0','datasource:read',null,null,null,null); -DROP TABLE IF EXISTS `sys_roles_menus`; -CREATE TABLE `sys_roles_menus` ( - `menu_id` bigint(20) NOT NULL COMMENT '菜单ID', - `role_id` bigint(20) NOT NULL COMMENT '角色ID', - PRIMARY KEY (`menu_id`,`role_id`) USING BTREE, - KEY `FKcngg2qadojhi3a651a5adkvbq` (`role_id`) USING BTREE -) ENGINE=InnoDB DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT COMMENT='角色菜单关联'; --- ---------------------------- --- Records of sys_roles_menus --- ---------------------------- -BEGIN; -INSERT INTO `sys_roles_menus` VALUES (1, 3); -INSERT INTO `sys_roles_menus` VALUES (2, 3); -INSERT INTO `sys_roles_menus` VALUES (3, 3); -INSERT INTO `sys_roles_menus` VALUES (4, 3); -INSERT INTO `sys_roles_menus` VALUES (5, 3); -INSERT INTO `sys_roles_menus` VALUES (6, 3); -INSERT INTO `sys_roles_menus` VALUES (7, 3); -INSERT INTO `sys_roles_menus` VALUES (8, 3); -INSERT INTO `sys_roles_menus` VALUES (9, 3); -INSERT INTO `sys_roles_menus` VALUES (10, 3); -INSERT INTO `sys_roles_menus` VALUES (11, 3); -INSERT INTO `sys_roles_menus` VALUES (12, 3); -INSERT INTO `sys_roles_menus` VALUES (13, 3); -INSERT INTO `sys_roles_menus` VALUES (14, 3); -INSERT INTO `sys_roles_menus` VALUES (15, 3); -INSERT INTO `sys_roles_menus` VALUES (16, 3); -INSERT INTO `sys_roles_menus` VALUES (17, 3); -INSERT INTO `sys_roles_menus` VALUES (18, 3); -INSERT INTO `sys_roles_menus` VALUES (19, 3); -INSERT INTO `sys_roles_menus` VALUES (20, 3); -INSERT INTO `sys_roles_menus` VALUES (21, 3); -INSERT INTO `sys_roles_menus` VALUES (22, 3); -INSERT INTO `sys_roles_menus` VALUES (23, 3); -INSERT INTO `sys_roles_menus` VALUES (24, 3); -INSERT INTO `sys_roles_menus` VALUES (25, 3); -INSERT INTO `sys_roles_menus` VALUES (26, 3); -INSERT INTO `sys_roles_menus` VALUES (27, 3); -INSERT INTO `sys_roles_menus` VALUES (28, 3); -INSERT INTO `sys_roles_menus` VALUES (1, 4); -INSERT INTO `sys_roles_menus` VALUES (2, 4); -INSERT INTO `sys_roles_menus` VALUES (3, 4); -INSERT INTO `sys_roles_menus` VALUES (12, 4); -COMMIT; +DROP TABLE IF EXISTS `sys_user` ; - -DROP TABLE IF EXISTS `sys_user`; CREATE TABLE `sys_user` ( `user_id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT 'ID', `dept_id` bigint(20) DEFAULT NULL COMMENT '部门名称', @@ -184,19 +110,83 @@ CREATE TABLE `sys_user` ( UNIQUE KEY `uniq_email` (`email`), KEY `FK5rwmryny6jthaaxkogownknqp` (`dept_id`) USING BTREE, KEY `inx_enabled` (`enabled`) -) ENGINE=InnoDB AUTO_INCREMENT=7 DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT COMMENT='系统用户'; +) ENGINE=InnoDB AUTO_INCREMENT=20 DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT COMMENT='系统用户'; --- ---------------------------- --- Records of sys_user --- ---------------------------- -BEGIN; -INSERT INTO `sys_user` VALUES (4, 0, 'admin', '管理员', '男', NULL, 'admin@fit2cloud.com', 'e10adc3949ba59abbe56e057f20f883e', b'1', 1, NULL, NULL, NULL, NULL, 1615184951534); -COMMIT; +INSERT INTO `sys_user` (`user_id`, `dept_id`, `username`, `nick_name`, `gender`, `phone`, `email`, `password`, `is_admin`, `enabled`, `create_by`, `update_by`, `pwd_reset_time`, `create_time`, `update_time`) VALUES ('4','0','admin','管理员','男',null,'admin@fit2cloud.com','e10adc3949ba59abbe56e057f20f883e',b'1','1',null,null,null,null,'1615184951534'); +INSERT INTO `sys_user` (`user_id`, `dept_id`, `username`, `nick_name`, `gender`, `phone`, `email`, `password`, `is_admin`, `enabled`, `create_by`, `update_by`, `pwd_reset_time`, `create_time`, `update_time`) VALUES ('19','25','demo','demo','男',null,'demo@fit2cloud.com','e10adc3949ba59abbe56e057f20f883e',b'0','1',null,null,null,'1619086036234','1619086036234'); + + + +DROP TABLE IF EXISTS `sys_role` ; + +CREATE TABLE `sys_role` ( + `role_id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT 'ID', + `code` varchar(100) NOT NULL COMMENT '代码', + `name` varchar(255) NOT NULL COMMENT '名称', + `description` varchar(255) DEFAULT NULL COMMENT '描述', + `create_by` varchar(255) DEFAULT NULL COMMENT '创建者', + `update_by` varchar(255) DEFAULT NULL COMMENT '更新者', + `create_time` bigint(13) DEFAULT NULL COMMENT '创建日期', + `update_time` bigint(13) DEFAULT NULL COMMENT '更新时间', + PRIMARY KEY (`role_id`) USING BTREE, + UNIQUE KEY `uniq_name` (`name`), + KEY `role_name_index` (`name`) +) ENGINE=InnoDB AUTO_INCREMENT=5 DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT COMMENT='角色表'; + +INSERT INTO `sys_role` (`role_id`, `code`, `name`, `description`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('3','admin','管理员',null,null,null,null,null); +INSERT INTO `sys_role` (`role_id`, `code`, `name`, `description`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('4','emp','普通员工',null,null,null,null,null); + + + +DROP TABLE IF EXISTS `sys_roles_menus` ; + +CREATE TABLE `sys_roles_menus` ( + `menu_id` bigint(20) NOT NULL COMMENT '菜单ID', + `role_id` bigint(20) NOT NULL COMMENT '角色ID', + PRIMARY KEY (`menu_id`,`role_id`) USING BTREE, + KEY `FKcngg2qadojhi3a651a5adkvbq` (`role_id`) USING BTREE +) ENGINE=InnoDB DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT COMMENT='角色菜单关联'; + +INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('1','3'); +INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('2','3'); +INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('3','3'); +INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('4','3'); +INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('5','3'); +INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('6','3'); +INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('7','3'); +INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('8','3'); +INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('9','3'); +INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('10','3'); +INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('11','3'); +INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('14','3'); +INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('15','3'); +INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('16','3'); +INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('17','3'); +INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('18','3'); +INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('19','3'); +INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('20','3'); +INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('21','3'); +INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('22','3'); +INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('23','3'); +INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('24','3'); +INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('25','3'); +INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('26','3'); +INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('27','3'); +INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('28','3'); +INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('29','3'); +INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('30','3'); +INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('31','3'); +INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('32','3'); +INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('33','3'); +INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('34','3'); +INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('101','3'); +INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('29','4'); +INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('30','4'); + + + +DROP TABLE IF EXISTS `sys_users_roles` ; --- ---------------------------- --- Table structure for sys_users_roles --- ---------------------------- -DROP TABLE IF EXISTS `sys_users_roles`; CREATE TABLE `sys_users_roles` ( `user_id` bigint(20) NOT NULL COMMENT '用户ID', `role_id` bigint(20) NOT NULL COMMENT '角色ID', @@ -204,9 +194,6 @@ CREATE TABLE `sys_users_roles` ( KEY `FKq4eq273l04bpu4efj0jd0jb98` (`role_id`) USING BTREE ) ENGINE=InnoDB DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT COMMENT='用户角色关联'; --- ---------------------------- --- Records of sys_users_roles --- ---------------------------- -BEGIN; -INSERT INTO `sys_users_roles` VALUES (4, 3); -COMMIT; \ No newline at end of file +INSERT INTO `sys_users_roles` (`user_id`, `role_id`) VALUES ('4','3'); +INSERT INTO `sys_users_roles` (`user_id`, `role_id`) VALUES ('19','4'); + diff --git a/frontend/src/api/user.js b/frontend/src/api/user.js index d4676c5d13..fbe62b88a2 100644 --- a/frontend/src/api/user.js +++ b/frontend/src/api/user.js @@ -21,3 +21,11 @@ export function logout() { method: 'post' }) } + +export function validateUserName(data) { + return request({ + url: '/api/auth/validateName', + method: 'post', + data + }) +} diff --git a/frontend/src/components/Breadcrumb/index.vue b/frontend/src/components/Breadcrumb/index.vue index ec8b63d1c7..f0a2ae2a50 100644 --- a/frontend/src/components/Breadcrumb/index.vue +++ b/frontend/src/components/Breadcrumb/index.vue @@ -3,7 +3,7 @@ 当前位置: - {{item.meta.title }} + {{ item.meta.title }} {{ item.meta.title }} @@ -34,7 +34,7 @@ export default { const first = matched[0] if (!this.isDashboard(first)) { - matched = [{ path: '/dashboard', meta: { title: 'Dashboard' }}].concat(matched) + matched = [{ path: '/panel', meta: { title: '仪表板' }}].concat(matched) } this.levelList = matched.filter(item => item.meta && item.meta.title && item.meta.breadcrumb !== false) @@ -44,7 +44,8 @@ export default { if (!name) { return false } - return name.trim().toLocaleLowerCase() === 'Dashboard'.toLocaleLowerCase() + // return name.trim().toLocaleLowerCase() === 'Dashboard'.toLocaleLowerCase() + return name.trim().toLocaleLowerCase() === '仪表板'.toLocaleLowerCase() }, pathCompile(path) { // To solve this problem https://github.com/PanJiaChen/vue-element-admin/issues/561 diff --git a/frontend/src/icons/svg/table-normal.svg b/frontend/src/icons/svg/table-normal.svg new file mode 100644 index 0000000000..bbe3facfdc --- /dev/null +++ b/frontend/src/icons/svg/table-normal.svg @@ -0,0 +1 @@ + diff --git a/frontend/src/lang/zh.js b/frontend/src/lang/zh.js index b5d313db27..4297f9f0b3 100644 --- a/frontend/src/lang/zh.js +++ b/frontend/src/lang/zh.js @@ -600,8 +600,8 @@ export default { avg: '平均', max: '最大值', min: '最小值', - std: '标准差', - var_samp: '方差', + stddev_pop: '标准差', + var_pop: '方差', quick_calc: '快速计算', show_name_set: '显示名设置', color: '颜色', @@ -699,7 +699,14 @@ export default { area_mode: '面积', rose_radius: '圆角', view_name: '视图名称', - name_can_not_empty: '名称不能为空' + name_can_not_empty: '名称不能为空', + custom_count: '记录数', + table_title_fontsize: '表头字体大小', + table_item_fontsize: '表格字体大小', + table_header_bg: '表头背景', + table_item_bg: '表格背景', + table_item_font_color: '字体颜色', + stripe: '斑马纹' }, dataset: { datalist: '数据集', diff --git a/frontend/src/layout/components/Topbar.vue b/frontend/src/layout/components/Topbar.vue index 10e572326c..0a5f5ddeab 100644 --- a/frontend/src/layout/components/Topbar.vue +++ b/frontend/src/layout/components/Topbar.vue @@ -94,7 +94,7 @@ export default { return meta.activeMenu } // 如果是首页,首页高亮 - if (path === '/dashboard') { + if (path === '/panel') { return '/' } // 如果不是首页,高亮一级菜单 diff --git a/frontend/src/router/index.js b/frontend/src/router/index.js index b3b0fd2fd2..c4eaca82c6 100644 --- a/frontend/src/router/index.js +++ b/frontend/src/router/index.js @@ -82,19 +82,25 @@ export const constantRoutes = [ component: () => import('@/components/canvas/components/Editor/PreviewFullScreen'), hidden: true }, - { path: '/', component: Layout, - redirect: '/dashboard', - children: [{ - path: 'dashboard', - name: 'Dashboard', - component: () => import('@/views/dashboard/index'), - meta: { title: '首页', icon: 'dashboard' } - }] + redirect: '/panel', + hidden: true } + // { + // path: '/', + // component: Layout, + // redirect: '/panel', + // children: [{ + // path: 'index', + // name: '仪表板', + // component: () => import('@/views/panel/index'), + // meta: { title: '仪表板', icon: 'dashboard' } + // }] + // } + // { // path: '/example', // component: Layout, diff --git a/frontend/src/views/401.vue b/frontend/src/views/401.vue index 8a3b69e09a..75475d5eb6 100644 --- a/frontend/src/views/401.vue +++ b/frontend/src/views/401.vue @@ -13,7 +13,7 @@
  • 或者你可以去:
  • @@ -39,7 +39,7 @@ export default { methods: { back() { if (this.$route.query.noGoBack) { - this.$router.push({ path: '/dashboard' }) + this.$router.push({ path: '/panel' }) } else { this.$router.go(-1) } diff --git a/frontend/src/views/chart/chart/chart.js b/frontend/src/views/chart/chart/chart.js index 101290218e..629f4303a8 100644 --- a/frontend/src/views/chart/chart/chart.js +++ b/frontend/src/views/chart/chart/chart.js @@ -1,7 +1,11 @@ export const DEFAULT_COLOR_CASE = { value: 'default', colors: ['#5470c6', '#91cc75', '#fac858', '#ee6666', '#73c0de', '#3ba272', '#fc8452', '#9a60b4', '#ea7ccc'], - alpha: 100 + alpha: 100, + tableHeaderBgColor: '#4e81bb', + tableItemBgColor: '#c6d9f0', + tableFontColor: '#000000', + tableStripe: true } export const DEFAULT_SIZE = { barDefault: true, @@ -18,7 +22,9 @@ export const DEFAULT_SIZE = { pieRoseType: 'radius', pieRoseRadius: 5, funnelWidth: 80, - radarShape: 'polygon' + radarShape: 'polygon', + tableTitleFontSize: 12, + tableItemFontSize: 12 } export const DEFAULT_LABEL = { show: false, diff --git a/frontend/src/views/chart/components/ChartComponent.vue b/frontend/src/views/chart/components/ChartComponent.vue index aeb7addab9..34ec088d9f 100644 --- a/frontend/src/views/chart/components/ChartComponent.vue +++ b/frontend/src/views/chart/components/ChartComponent.vue @@ -21,10 +21,13 @@ export default { type: Object, required: true }, - filter: { - type: Object, - required: false - } + filter: { + type: Object, + required: false, + default: function() { + return {} + } + } }, data() { return { diff --git a/frontend/src/views/chart/components/component-style/TitleSelector.vue b/frontend/src/views/chart/components/component-style/TitleSelector.vue index 400f1b37bc..75747febb3 100644 --- a/frontend/src/views/chart/components/component-style/TitleSelector.vue +++ b/frontend/src/views/chart/components/component-style/TitleSelector.vue @@ -26,7 +26,7 @@ {{ $t('chart.text_pos_right') }} - + {{ $t('chart.text_pos_top') }} {{ $t('chart.text_pos_center') }} diff --git a/frontend/src/views/chart/components/drag-item/QuotaItem.vue b/frontend/src/views/chart/components/drag-item/QuotaItem.vue index ae5a1f5738..df81d00a2d 100644 --- a/frontend/src/views/chart/components/drag-item/QuotaItem.vue +++ b/frontend/src/views/chart/components/drag-item/QuotaItem.vue @@ -3,7 +3,9 @@ - {{ item.name }}{{ $t('chart.'+item.summary) }} + {{ item.name }} + {{ $t('chart.'+item.summary) }} + @@ -17,17 +19,17 @@ - {{ $t('chart.sum') }} - {{ $t('chart.count') }} - {{ $t('chart.avg') }} - {{ $t('chart.max') }} - {{ $t('chart.min') }} - {{ $t('chart.std') }} - {{ $t('chart.var_samp') }} + {{ $t('chart.count') }} + {{ $t('chart.sum') }} + {{ $t('chart.avg') }} + {{ $t('chart.max') }} + {{ $t('chart.min') }} + {{ $t('chart.stddev_pop') }} + {{ $t('chart.var_pop') }} - + diff --git a/frontend/src/views/chart/components/shape-attr/ColorSelector.vue b/frontend/src/views/chart/components/shape-attr/ColorSelector.vue index c3491ca085..da36e6547d 100644 --- a/frontend/src/views/chart/components/shape-attr/ColorSelector.vue +++ b/frontend/src/views/chart/components/shape-attr/ColorSelector.vue @@ -8,8 +8,8 @@ > - - + +
    @@ -18,6 +18,20 @@ + + + + + + + + + + + + {{ $t('chart.stripe') }} + + @@ -31,6 +45,8 @@ + + diff --git a/frontend/src/views/chart/group/Group.vue b/frontend/src/views/chart/group/Group.vue index e15470f6fd..23a4649298 100644 --- a/frontend/src/views/chart/group/Group.vue +++ b/frontend/src/views/chart/group/Group.vue @@ -516,9 +516,9 @@ export default { }, sceneClick(data, node) { - this.$store.dispatch('chart/setViewId', null) - this.$store.dispatch('chart/setViewId', data.id) - this.$emit('switchComponent', { name: 'ChartEdit' }) + // this.$store.dispatch('chart/setViewId', null) + // this.$store.dispatch('chart/setViewId', data.id) + this.$emit('switchComponent', { name: 'ChartEdit', param: { 'id': data.id }}) }, selectTable() { @@ -565,8 +565,8 @@ export default { this.$store.dispatch('chart/setTableId', null) this.$store.dispatch('chart/setTableId', this.table.id) // this.$router.push('/chart/chart-edit') - this.$emit('switchComponent', { name: 'ChartEdit' }) - this.$store.dispatch('chart/setViewId', response.data.id) + this.$emit('switchComponent', { name: 'ChartEdit', param: { 'id': response.data.id }}) + // this.$store.dispatch('chart/setViewId', response.data.id) this.chartTree() }) }, diff --git a/frontend/src/views/chart/view/ChartEdit.vue b/frontend/src/views/chart/view/ChartEdit.vue index e1b3817c0e..2f67e6c948 100644 --- a/frontend/src/views/chart/view/ChartEdit.vue +++ b/frontend/src/views/chart/view/ChartEdit.vue @@ -1,6 +1,6 @@