Merge remote-tracking branch 'origin/main' into main

# Conflicts:
#	frontend/src/components/canvas/components/Toolbar.vue
#	frontend/src/views/panel/edit/index.vue
This commit is contained in:
wangjiahao 2021-04-27 10:30:04 +08:00
commit 0f644e7842
33 changed files with 1119 additions and 663 deletions

View File

@ -7,6 +7,8 @@ import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import java.util.Map;
@Api(tags = "权限:权限管理")
@RequestMapping("/api/auth")
@ -26,6 +28,9 @@ public interface AuthApi {
@PostMapping("/logout")
String logout();
@PostMapping("/validateName")
Boolean validateName(Map<String, String> nameDto);
@GetMapping("/test")
String test();

View File

@ -55,12 +55,19 @@ public class JWTFilter extends BasicHttpAuthenticationFilter {
throw new AuthenticationException(expireMessage);
}
if (JWTUtils.needRefresh(authorization)){
String oldAuthorization = authorization;
authorization = refreshToken(request, response);
JWTUtils.removeTokenExpire(oldAuthorization);
}
// 删除老的操作时间
JWTUtils.removeTokenExpire(authorization);
// 设置新的操作时间
JWTUtils.addTokenExpire(authorization);
JWTToken token = new JWTToken(authorization);
Subject subject = getSubject(request, response);
// 提交给realm进行登入如果错误他会抛出异常并被捕获
subject.login(token);
return true;
}
@ -98,10 +105,10 @@ public class JWTFilter extends BasicHttpAuthenticationFilter {
String password = user.getPassword();
// 删除老token操作时间
JWTUtils.removeTokenExpire(token);
// JWTUtils.removeTokenExpire(token);
String newToken = JWTUtils.sign(tokenInfo, password);
// 记录新token操作时间
JWTUtils.addTokenExpire(newToken);
// JWTUtils.addTokenExpire(newToken);
JWTToken jwtToken = new JWTToken(newToken);
this.getSubject(request, response).login(jwtToken);

View File

@ -82,6 +82,15 @@ public class AuthServer implements AuthApi {
return "success";
}
@Override
public Boolean validateName(@RequestBody Map<String, String> nameDto) {
String userName = nameDto.get("userName");
if (StringUtils.isEmpty(userName)) return false;
SysUserEntity userEntity = authUserService.getUserByName(userName);
if (ObjectUtils.isEmpty(userEntity)) return false;
return true;
}
@Override
public Boolean isLogin() {
return null;

View File

@ -42,9 +42,14 @@ public class ShiroServiceImpl implements ShiroService {
//验证链接
filterChainDefinitionMap.put("/api/link/validate**", ANON);
filterChainDefinitionMap.put("/panel/group/findOne/**", ANON);
filterChainDefinitionMap.put("/chart/view/getData/**", ANON);
filterChainDefinitionMap.put("/api/auth/login", ANON);
filterChainDefinitionMap.put("/api/auth/validateName", ANON);
filterChainDefinitionMap.put("/unauth", ANON);
filterChainDefinitionMap.put("/display/**", ANON);
filterChainDefinitionMap.put("/tokenExpired", ANON);

View File

@ -20,9 +20,9 @@ public class JWTUtils {
// token过期时间1min (过期会自动刷新续命 目的是避免一直都是同一个token )
private static final long EXPIRE_TIME = 5*60*1000;
private static final long EXPIRE_TIME = 1*60*1000;
// 登录间隔时间10min 超过这个时间强制重新登录
private static final long Login_Interval = 30*60*1000;
private static final long Login_Interval = 10*60*1000;
/**
@ -81,8 +81,17 @@ public class JWTUtils {
public static boolean loginExpire(String token){
Long now = System.currentTimeMillis();
Long lastOperateTime = tokenLastOperateTime(token);
if (lastOperateTime == null) return true;
return now - lastOperateTime > Login_Interval;
boolean isExpire = false;
if (lastOperateTime != null) {
isExpire = now - lastOperateTime > Login_Interval;
}
if (isExpire) {
System.out.println("-----------------------");
System.out.println("-----上次操作时间是["+lastOperateTime+"]-----");
System.out.println("-----当前操作时间是["+now+"]-----");
System.out.println("-----------------------");
}
return isExpire;
}
public static Date getExp(String token) {

View File

@ -2,7 +2,6 @@ package io.dataease.config;
import com.fit2cloud.autoconfigure.QuartzAutoConfiguration;
import io.dataease.commons.utils.CommonThreadPool;
import org.apache.spark.sql.SparkSession;
import org.pentaho.di.core.KettleEnvironment;
import org.pentaho.di.repository.filerep.KettleFileRepository;
import org.pentaho.di.repository.filerep.KettleFileRepositoryMeta;
@ -32,31 +31,31 @@ public class CommonConfig {
// return configuration;
// }
@Bean
@ConditionalOnMissingBean
public SparkSession javaSparkSession() {
SparkSession spark = SparkSession.builder()
.appName(env.getProperty("spark.appName", "DataeaseJob"))
.master(env.getProperty("spark.master", "local[*]"))
.config("spark.scheduler.mode", env.getProperty("spark.scheduler.mode", "FAIR"))
.config("spark.serializer", env.getProperty("spark.serializer", "org.apache.spark.serializer.KryoSerializer"))
.config("spark.executor.cores", env.getProperty("spark.executor.cores", "8"))
.config("spark.executor.memory", env.getProperty("spark.executor.memory", "6442450944b"))
.config("spark.locality.wait", env.getProperty("spark.locality.wait", "600000"))
.config("spark.maxRemoteBlockSizeFetchToMem", env.getProperty("spark.maxRemoteBlockSizeFetchToMem", "2000m"))
.config("spark.shuffle.detectCorrupt", env.getProperty("spark.shuffle.detectCorrupt", "false"))
.config("spark.shuffle.service.enabled", env.getProperty("spark.shuffle.service.enabled", "true"))
.config("spark.sql.adaptive.enabled", env.getProperty("spark.sql.adaptive.enabled", "true"))
.config("spark.sql.adaptive.shuffle.targetPostShuffleInputSize", env.getProperty("spark.sql.adaptive.shuffle.targetPostShuffleInputSize", "200M"))
.config("spark.sql.broadcastTimeout", env.getProperty("spark.sql.broadcastTimeout", "12000"))
.config("spark.sql.retainGroupColumns", env.getProperty("spark.sql.retainGroupColumns", "false"))
.config("spark.sql.sortMergeJoinExec.buffer.in.memory.threshold", env.getProperty("spark.sql.sortMergeJoinExec.buffer.in.memory.threshold", "100000"))
.config("spark.sql.sortMergeJoinExec.buffer.spill.threshold", env.getProperty("spark.sql.sortMergeJoinExec.buffer.spill.threshold", "100000"))
.config("spark.sql.variable.substitute", env.getProperty("spark.sql.variable.substitute", "false"))
.config("spark.temp.expired.time", env.getProperty("spark.temp.expired.time", "3600"))
.getOrCreate();
return spark;
}
// @Bean
// @ConditionalOnMissingBean
// public SparkSession javaSparkSession() {
// SparkSession spark = SparkSession.builder()
// .appName(env.getProperty("spark.appName", "DataeaseJob"))
// .master(env.getProperty("spark.master", "local[*]"))
// .config("spark.scheduler.mode", env.getProperty("spark.scheduler.mode", "FAIR"))
//// .config("spark.serializer", env.getProperty("spark.serializer", "org.apache.spark.serializer.KryoSerializer"))
//// .config("spark.executor.cores", env.getProperty("spark.executor.cores", "8"))
//// .config("spark.executor.memory", env.getProperty("spark.executor.memory", "6442450944b"))
//// .config("spark.locality.wait", env.getProperty("spark.locality.wait", "600000"))
//// .config("spark.maxRemoteBlockSizeFetchToMem", env.getProperty("spark.maxRemoteBlockSizeFetchToMem", "2000m"))
//// .config("spark.shuffle.detectCorrupt", env.getProperty("spark.shuffle.detectCorrupt", "false"))
//// .config("spark.shuffle.service.enabled", env.getProperty("spark.shuffle.service.enabled", "true"))
//// .config("spark.sql.adaptive.enabled", env.getProperty("spark.sql.adaptive.enabled", "true"))
//// .config("spark.sql.adaptive.shuffle.targetPostShuffleInputSize", env.getProperty("spark.sql.adaptive.shuffle.targetPostShuffleInputSize", "200M"))
//// .config("spark.sql.broadcastTimeout", env.getProperty("spark.sql.broadcastTimeout", "12000"))
//// .config("spark.sql.retainGroupColumns", env.getProperty("spark.sql.retainGroupColumns", "false"))
//// .config("spark.sql.sortMergeJoinExec.buffer.in.memory.threshold", env.getProperty("spark.sql.sortMergeJoinExec.buffer.in.memory.threshold", "100000"))
//// .config("spark.sql.sortMergeJoinExec.buffer.spill.threshold", env.getProperty("spark.sql.sortMergeJoinExec.buffer.spill.threshold", "100000"))
//// .config("spark.sql.variable.substitute", env.getProperty("spark.sql.variable.substitute", "false"))
//// .config("spark.temp.expired.time", env.getProperty("spark.temp.expired.time", "3600"))
// .getOrCreate();
// return spark;
// }
@Bean
@ConditionalOnMissingBean

View File

@ -1,21 +1,12 @@
package io.dataease.listener;
import io.dataease.base.domain.DatasetTable;
import io.dataease.base.domain.DatasetTableExample;
import io.dataease.base.domain.DatasetTableField;
import io.dataease.base.mapper.DatasetTableMapper;
import io.dataease.commons.utils.CommonThreadPool;
import io.dataease.datasource.service.DatasourceService;
import io.dataease.service.dataset.DataSetTableFieldsService;
import io.dataease.service.spark.SparkCalc;
import org.springframework.boot.context.event.ApplicationReadyEvent;
import org.springframework.context.ApplicationListener;
import org.springframework.core.annotation.Order;
import org.springframework.core.env.Environment;
import org.springframework.stereotype.Component;
import javax.annotation.Resource;
import java.util.List;
@Component
@Order(value = 2)

View File

@ -1,52 +1,47 @@
package io.dataease.listener;
import io.dataease.base.domain.DatasetTable;
import io.dataease.base.domain.DatasetTableExample;
import io.dataease.base.domain.DatasetTableField;
import io.dataease.base.mapper.DatasetTableMapper;
import io.dataease.commons.utils.CommonThreadPool;
import io.dataease.service.dataset.DataSetTableFieldsService;
import io.dataease.service.spark.SparkCalc;
import org.springframework.boot.context.event.ApplicationReadyEvent;
import org.springframework.context.ApplicationListener;
import org.springframework.core.annotation.Order;
import org.springframework.core.env.Environment;
import org.springframework.stereotype.Component;
import javax.annotation.Resource;
import java.util.List;
@Component
@Order(value = 2)
public class AppStartReadHBaseListener implements ApplicationListener<ApplicationReadyEvent> {
@Resource
private CommonThreadPool commonThreadPool;
@Resource
private SparkCalc sparkCalc;
@Resource
private Environment env; // 保存了配置文件的信息
@Resource
private DatasetTableMapper datasetTableMapper;
@Resource
private DataSetTableFieldsService dataSetTableFieldsService;
@Override
public void onApplicationEvent(ApplicationReadyEvent applicationReadyEvent) {
// System.out.println("================= Read HBase start =================");
// // 项目启动从数据集中找到定时抽取的表从HBase中读取放入缓存
// DatasetTableExample datasetTableExample = new DatasetTableExample();
// datasetTableExample.createCriteria().andModeEqualTo(1);
// List<DatasetTable> datasetTables = datasetTableMapper.selectByExampleWithBLOBs(datasetTableExample);
// for (DatasetTable table : datasetTables) {
//// commonThreadPool.addTask(() -> {
// try {
// List<DatasetTableField> fields = dataSetTableFieldsService.getFieldsByTableId(table.getId());
// sparkCalc.getHBaseDataAndCache(table.getId(), fields);
// } catch (Exception e) {
// e.printStackTrace();
// }
//// });
// }
}
}
//package io.dataease.listener;
//
//import io.dataease.base.mapper.DatasetTableMapper;
//import io.dataease.commons.utils.CommonThreadPool;
//import io.dataease.service.dataset.DataSetTableFieldsService;
//import org.springframework.boot.context.event.ApplicationReadyEvent;
//import org.springframework.context.ApplicationListener;
//import org.springframework.core.annotation.Order;
//import org.springframework.core.env.Environment;
//import org.springframework.stereotype.Component;
//
//import javax.annotation.Resource;
//
//@Component
//@Order(value = 2)
//public class AppStartReadHBaseListener implements ApplicationListener<ApplicationReadyEvent> {
// @Resource
// private CommonThreadPool commonThreadPool;
//// @Resource
//// private SparkCalc sparkCalc;
// @Resource
// private Environment env; // 保存了配置文件的信息
//
// @Resource
// private DatasetTableMapper datasetTableMapper;
// @Resource
// private DataSetTableFieldsService dataSetTableFieldsService;
//
// @Override
// public void onApplicationEvent(ApplicationReadyEvent applicationReadyEvent) {
//// System.out.println("================= Read HBase start =================");
//// // 项目启动从数据集中找到定时抽取的表从HBase中读取放入缓存
//// DatasetTableExample datasetTableExample = new DatasetTableExample();
//// datasetTableExample.createCriteria().andModeEqualTo(1);
//// List<DatasetTable> datasetTables = datasetTableMapper.selectByExampleWithBLOBs(datasetTableExample);
//// for (DatasetTable table : datasetTables) {
////// commonThreadPool.addTask(() -> {
//// try {
//// List<DatasetTableField> fields = dataSetTableFieldsService.getFieldsByTableId(table.getId());
//// sparkCalc.getHBaseDataAndCache(table.getId(), fields);
//// } catch (Exception e) {
//// e.printStackTrace();
//// }
////// });
//// }
// }
//}

View File

@ -1,5 +1,6 @@
package io.dataease.service.chart;
import com.alibaba.fastjson.JSONObject;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
import io.dataease.base.domain.*;
@ -20,7 +21,6 @@ import io.dataease.dto.chart.Series;
import io.dataease.dto.dataset.DataTableInfoDTO;
import io.dataease.service.dataset.DataSetTableFieldsService;
import io.dataease.service.dataset.DataSetTableService;
import io.dataease.service.spark.SparkCalc;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.ObjectUtils;
import org.apache.commons.lang3.StringUtils;
@ -28,6 +28,7 @@ import org.springframework.stereotype.Service;
import javax.annotation.Resource;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.text.MessageFormat;
import java.util.*;
@ -43,8 +44,8 @@ public class ChartViewService {
private DataSetTableService dataSetTableService;
@Resource
private DatasourceService datasourceService;
@Resource
private SparkCalc sparkCalc;
// @Resource
// private SparkCalc sparkCalc;
@Resource
private DataSetTableFieldsService dataSetTableFieldsService;
@ -97,8 +98,6 @@ public class ChartViewService {
List<ChartViewFieldDTO> yAxis = new Gson().fromJson(view.getYAxis(), new TypeToken<List<ChartViewFieldDTO>>() {
}.getType());
List<String> x = new ArrayList<>();
List<Series> series = new ArrayList<>();
if (CollectionUtils.isEmpty(xAxis) || CollectionUtils.isEmpty(yAxis)) {
ChartViewDTO dto = new ChartViewDTO();
BeanUtils.copyBean(dto, view);
@ -146,11 +145,23 @@ public class ChartViewService {
data = datasourceProvider.getData(datasourceRequest);
} else if (table.getMode() == 1) {// 抽取
// 获取数据集de字段
List<DatasetTableField> fields = dataSetTableFieldsService.getFieldsByTableId(table.getId());
data = sparkCalc.getData(table.getId(), fields, xAxis, yAxis, "tmp_" + view.getId().split("-")[0], extFilterList);
// List<DatasetTableField> fields = dataSetTableFieldsService.getFieldsByTableId(table.getId());
// data = sparkCalc.getData(table.getId(), fields, xAxis, yAxis, "tmp_" + view.getId().split("-")[0], extFilterList);
// 连接doris构建doris数据源查询
Datasource ds = dorisDatasource();
DatasourceProvider datasourceProvider = ProviderFactory.getProvider(ds.getType());
DatasourceRequest datasourceRequest = new DatasourceRequest();
datasourceRequest.setDatasource(ds);
String tableName = "ds_" + table.getId().replaceAll("-", "_");
datasourceRequest.setTable(tableName);
datasourceRequest.setQuery(getSQL(ds.getType(), tableName, xAxis, yAxis, extFilterList));
data = datasourceProvider.getData(datasourceRequest);
}
// 图表组件可再扩展
List<String> x = new ArrayList<>();
List<Series> series = new ArrayList<>();
for (ChartViewFieldDTO y : yAxis) {
Series series1 = new Series();
series1.setName(y.getName());
@ -177,9 +188,29 @@ public class ChartViewService {
}
}
}
// table组件
List<ChartViewFieldDTO> fields = new ArrayList<>();
List<Map<String, Object>> tableRow = new ArrayList<>();
fields.addAll(xAxis);
fields.addAll(yAxis);
data.forEach(ele -> {
Map<String, Object> d = new HashMap<>();
for (int i = 0; i < fields.size(); i++) {
ChartViewFieldDTO chartViewFieldDTO = fields.get(i);
if (chartViewFieldDTO.getDeType() == 0 || chartViewFieldDTO.getDeType() == 1) {
d.put(fields.get(i).getOriginName(), ele[i]);
} else if (chartViewFieldDTO.getDeType() == 2 || chartViewFieldDTO.getDeType() == 3) {
d.put(fields.get(i).getOriginName(), new BigDecimal(ele[i]).setScale(2, RoundingMode.HALF_UP));
}
}
tableRow.add(d);
});
Map<String, Object> map = new HashMap<>();
map.put("x", x);
map.put("series", series);
map.put("fields", fields);
map.put("tableRow", tableRow);
ChartViewDTO dto = new ChartViewDTO();
BeanUtils.copyBean(dto, view);
@ -214,6 +245,24 @@ public class ChartViewService {
return filter.toString();
}
public Datasource dorisDatasource() {
JSONObject jsonObject = new JSONObject();
jsonObject.put("dataSourceType", "jdbc");
jsonObject.put("dataBase", "example_db");
jsonObject.put("username", "root");
jsonObject.put("password", "dataease");
jsonObject.put("host", "59.110.64.159");
jsonObject.put("port", "9030");
Datasource datasource = new Datasource();
datasource.setId("doris");
datasource.setName("doris");
datasource.setDesc("doris");
datasource.setType("mysql");
datasource.setConfiguration(jsonObject.toJSONString());
return datasource;
}
public String getSQL(String type, String table, List<ChartViewFieldDTO> xAxis, List<ChartViewFieldDTO> yAxis, List<ChartExtFilterRequest> extFilterRequestList) {
DatasourceTypes datasourceType = DatasourceTypes.valueOf(type);
switch (datasourceType) {
@ -227,10 +276,10 @@ public class ChartViewService {
public String transMysqlSQL(String table, List<ChartViewFieldDTO> xAxis, List<ChartViewFieldDTO> yAxis, List<ChartExtFilterRequest> extFilterRequestList) {
// 字段汇总 排序等
String[] field = yAxis.stream().map(y -> "CAST(" + y.getSummary() + "(" + y.getOriginName() + ") AS DECIMAL(20,2)) AS _" + y.getSummary() + "_" + y.getOriginName()).toArray(String[]::new);
String[] field = yAxis.stream().map(y -> "CAST(" + y.getSummary() + "(" + y.getOriginName() + ") AS DECIMAL(20,2)) AS _" + y.getSummary() + "_" + (StringUtils.equalsIgnoreCase(y.getOriginName(), "*") ? "" : y.getOriginName())).toArray(String[]::new);
String[] group = xAxis.stream().map(ChartViewFieldDTO::getOriginName).toArray(String[]::new);
String[] order = yAxis.stream().filter(y -> StringUtils.isNotEmpty(y.getSort()) && !StringUtils.equalsIgnoreCase(y.getSort(), "none"))
.map(y -> "_" + y.getSummary() + "_" + y.getOriginName() + " " + y.getSort()).toArray(String[]::new);
.map(y -> "_" + y.getSummary() + "_" + (StringUtils.equalsIgnoreCase(y.getOriginName(), "*") ? "" : y.getOriginName()) + " " + y.getSort()).toArray(String[]::new);
String sql = MessageFormat.format("SELECT {0},{1} FROM {2} WHERE 1=1 {3} GROUP BY {4} ORDER BY null,{5}",
StringUtils.join(group, ","),
@ -245,7 +294,19 @@ public class ChartViewService {
// 如果是对结果字段过滤则再包裹一层sql
String[] resultFilter = yAxis.stream().filter(y -> CollectionUtils.isNotEmpty(y.getFilter()) && y.getFilter().size() > 0)
.map(y -> {
String[] s = y.getFilter().stream().map(f -> "AND _" + y.getSummary() + "_" + y.getOriginName() + transMysqlFilterTerm(f.getTerm()) + f.getValue()).toArray(String[]::new);
String[] s = y.getFilter().stream().map(f -> {
StringBuilder filter = new StringBuilder();
filter.append("AND _").append(y.getSummary()).append("_").append(StringUtils.equalsIgnoreCase(y.getOriginName(), "*") ? "" : y.getOriginName()).append(transMysqlFilterTerm(f.getTerm()));
if (StringUtils.containsIgnoreCase(f.getTerm(), "null")) {
} else if (StringUtils.containsIgnoreCase(f.getTerm(), "in")) {
filter.append("('").append(StringUtils.join(f.getValue(), "','")).append("')");
} else if (StringUtils.containsIgnoreCase(f.getTerm(), "like")) {
filter.append("%").append(f.getValue()).append("%");
} else {
filter.append(f.getValue());
}
return filter.toString();
}).toArray(String[]::new);
return StringUtils.join(s, " ");
}).toArray(String[]::new);
if (resultFilter.length == 0) {
@ -321,7 +382,7 @@ public class ChartViewService {
return map;
}
public List<ChartView> viewsByIds(List<String> viewIds){
public List<ChartView> viewsByIds(List<String> viewIds) {
ChartViewExample example = new ChartViewExample();
example.createCriteria().andIdIn(viewIds);
return chartViewMapper.selectByExample(example);

View File

@ -135,6 +135,19 @@ public class DataSetTableService {
dimension.add(field);
}
});
// quota add count
DatasetTableField count = DatasetTableField.builder()
.id("count")
.tableId(dataSetTableRequest.getId())
.originName("*")
.name("记录数*")
.type("INT")
.checked(true)
.columnIndex(999)
.deType(2)
.build();
quota.add(count);
Map<String, List<DatasetTableField>> map = new HashMap<>();
map.put("dimension", dimension);
map.put("quota", quota);
@ -637,11 +650,12 @@ public class DataSetTableService {
private String saveFile(MultipartFile file) throws Exception {
String filename = file.getOriginalFilename();
File p = new File(path);
String dirPath = path + AuthUtils.getUser().getUsername() + "/";
File p = new File(dirPath);
if (!p.exists()) {
p.mkdirs();
}
String filePath = path + AuthUtils.getUser().getUsername() + "/" + filename;
String filePath = dirPath + filename;
File f = new File(filePath);
FileOutputStream fileOutputStream = new FileOutputStream(f);
fileOutputStream.write(file.getBytes());

View File

@ -1,7 +1,6 @@
package io.dataease.service.dataset;
import com.google.gson.Gson;
import com.sun.org.apache.bcel.internal.generic.SWITCH;
import io.dataease.base.domain.*;
import io.dataease.base.mapper.DatasourceMapper;
import io.dataease.commons.constants.JobStatus;
@ -13,31 +12,15 @@ import io.dataease.datasource.constants.DatasourceTypes;
import io.dataease.datasource.dto.MysqlConfigrationDTO;
import io.dataease.dto.dataset.DataSetTaskLogDTO;
import io.dataease.dto.dataset.DataTableInfoDTO;
import io.dataease.service.spark.SparkCalc;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.pentaho.big.data.api.cluster.NamedCluster;
import org.pentaho.big.data.api.cluster.NamedClusterService;
import org.pentaho.big.data.api.cluster.service.locator.NamedClusterServiceLocator;
import org.pentaho.big.data.api.cluster.service.locator.impl.NamedClusterServiceLocatorImpl;
import org.pentaho.big.data.api.initializer.ClusterInitializer;
import org.pentaho.big.data.api.initializer.ClusterInitializerProvider;
import org.pentaho.big.data.api.initializer.impl.ClusterInitializerImpl;
import org.pentaho.big.data.impl.cluster.NamedClusterImpl;
import org.pentaho.big.data.impl.cluster.NamedClusterManager;
import org.pentaho.big.data.kettle.plugins.hbase.MappingDefinition;
import org.pentaho.big.data.kettle.plugins.hbase.output.HBaseOutputMeta;
import org.apache.hadoop.hbase.client.Connection;
import org.pentaho.di.cluster.SlaveServer;
import org.pentaho.di.core.KettleEnvironment;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.core.plugins.PluginRegistry;
import org.pentaho.di.core.plugins.StepPluginType;
import org.pentaho.di.core.util.EnvUtil;
import org.pentaho.di.engine.configuration.impl.pentaho.DefaultRunConfiguration;
import org.pentaho.di.job.Job;
import org.pentaho.di.job.JobExecutionConfiguration;
import org.pentaho.di.job.JobHopMeta;
@ -45,49 +28,25 @@ import org.pentaho.di.job.JobMeta;
import org.pentaho.di.job.entries.special.JobEntrySpecial;
import org.pentaho.di.job.entries.success.JobEntrySuccess;
import org.pentaho.di.job.entries.trans.JobEntryTrans;
import org.pentaho.di.job.entries.writetolog.JobEntryWriteToLog;
import org.pentaho.di.job.entry.JobEntryCopy;
import org.pentaho.di.repository.RepositoryDirectoryInterface;
import org.pentaho.di.repository.filerep.KettleFileRepository;
import org.pentaho.di.repository.filerep.KettleFileRepositoryMeta;
import org.pentaho.di.trans.TransConfiguration;
import org.pentaho.di.trans.TransExecutionConfiguration;
import org.pentaho.di.trans.TransHopMeta;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.step.StepMeta;
import org.pentaho.di.trans.steps.tableinput.TableInputMeta;
import org.pentaho.di.trans.steps.textfileoutput.TextFileField;
import org.pentaho.di.trans.steps.textfileoutput.TextFileOutput;
import org.pentaho.di.trans.steps.textfileoutput.TextFileOutputMeta;
import org.pentaho.di.trans.steps.userdefinedjavaclass.InfoStepDefinition;
import org.pentaho.di.trans.steps.userdefinedjavaclass.UserDefinedJavaClassDef;
import org.pentaho.di.trans.steps.userdefinedjavaclass.UserDefinedJavaClassMeta;
import org.pentaho.di.www.SlaveServerJobStatus;
import org.pentaho.runtime.test.RuntimeTest;
import org.pentaho.runtime.test.RuntimeTester;
import org.pentaho.runtime.test.action.RuntimeTestActionHandler;
import org.pentaho.runtime.test.action.RuntimeTestActionService;
import org.pentaho.runtime.test.action.impl.RuntimeTestActionServiceImpl;
import org.pentaho.runtime.test.impl.RuntimeTesterImpl;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import org.pentaho.di.core.row.ValueMetaInterface;
import scala.annotation.meta.field;
import javax.annotation.Resource;
import javax.sound.sampled.Line;
import java.io.File;
import java.security.MessageDigest;
import java.sql.ResultSet;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import static org.mockito.Mockito.mock;
@Service
public class ExtractDataService {
@ -125,8 +84,8 @@ public class ExtractDataService {
@Value("${hbase.zookeeper.property.clientPort:2181}")
private String zkPort;
@Resource
private SparkCalc sparkCalc;
// @Resource
// private SparkCalc sparkCalc;
public void extractData(String datasetTableId, String taskId, String type) {

View File

@ -7,6 +7,7 @@ import io.dataease.base.mapper.PanelGroupMapper;
import io.dataease.base.mapper.ext.ExtPanelDesignMapper;
import io.dataease.base.mapper.ext.ExtPanelGroupMapper;
import io.dataease.commons.constants.PanelConstants;
import io.dataease.commons.utils.AuthUtils;
import io.dataease.commons.utils.BeanUtils;
import io.dataease.controller.request.panel.PanelGroupRequest;
import io.dataease.dto.chart.ChartViewDTO;
@ -75,6 +76,7 @@ public class PanelGroupService {
if (StringUtils.isEmpty(request.getId())) {
request.setId(UUID.randomUUID().toString());
request.setCreateTime(System.currentTimeMillis());
request.setCreateBy(AuthUtils.getUser().getUsername());
panelGroupMapper.insert(request);
} else {
panelGroupMapper.updateByPrimaryKeySelective(request);

View File

@ -1,308 +1,407 @@
package io.dataease.service.spark;
import io.dataease.base.domain.DatasetTableField;
import io.dataease.commons.utils.CommonBeanFactory;
import io.dataease.controller.request.chart.ChartExtFilterRequest;
import io.dataease.dto.chart.ChartViewFieldDTO;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.ObjectUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableInputFormat;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.sql.*;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;
import org.apache.spark.storage.StorageLevel;
import org.springframework.core.env.Environment;
import org.springframework.stereotype.Service;
import scala.Tuple2;
import javax.annotation.Resource;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Base64;
import java.util.Iterator;
import java.util.List;
/**
* @Author gin
* @Date 2021/3/26 3:49 下午
*/
@Service
public class SparkCalc {
private static String column_family = "dataease";
private static String data_path = "/opt/dataease/data/db/";
@Resource
private Environment env; // 保存了配置文件的信息
public List<String[]> getData(String hTable, List<DatasetTableField> fields, List<ChartViewFieldDTO> xAxis, List<ChartViewFieldDTO> yAxis, String tmpTable, List<ChartExtFilterRequest> requestList) throws Exception {
// Spark Context
SparkSession spark = CommonBeanFactory.getBean(SparkSession.class);
JavaSparkContext sparkContext = new JavaSparkContext(spark.sparkContext());
// Spark SQL Context
SQLContext sqlContext = new SQLContext(sparkContext);
sqlContext.setConf("spark.sql.shuffle.partitions", env.getProperty("spark.sql.shuffle.partitions", "1"));
sqlContext.setConf("spark.default.parallelism", env.getProperty("spark.default.parallelism", "1"));
Dataset<Row> dataFrame = getData(sparkContext, sqlContext, hTable, fields);
//package io.dataease.service.spark;
//
//import io.dataease.base.domain.DatasetTableField;
//import io.dataease.commons.utils.CommonBeanFactory;
//import io.dataease.controller.request.chart.ChartExtFilterRequest;
//import io.dataease.dto.chart.ChartViewFieldDTO;
//import org.antlr.analysis.MachineProbe;
//import org.apache.commons.collections4.CollectionUtils;
//import org.apache.commons.lang3.ObjectUtils;
//import org.apache.commons.lang3.StringUtils;
//import org.apache.hadoop.hbase.client.Result;
//import org.apache.hadoop.hbase.client.Scan;
//import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
//import org.apache.hadoop.hbase.mapreduce.TableInputFormat;
//import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
//import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
//import org.apache.hadoop.hbase.util.Bytes;
//import org.apache.spark.api.java.JavaPairRDD;
//import org.apache.spark.api.java.JavaRDD;
//import org.apache.spark.api.java.JavaSparkContext;
//import org.apache.spark.api.java.function.FlatMapFunction;
//import org.apache.spark.api.java.function.Function;
//import org.apache.spark.sql.*;
//import org.apache.spark.sql.types.DataTypes;
//import org.apache.spark.sql.types.StructField;
//import org.apache.spark.sql.types.StructType;
//import org.apache.spark.storage.StorageLevel;
//import org.springframework.core.env.Environment;
//import org.springframework.stereotype.Service;
//import scala.Tuple2;
//
//import javax.annotation.Resource;
//import java.math.BigDecimal;
//import java.text.MessageFormat;
//import java.util.*;
//
///**
// * @Author gin
// * @Date 2021/3/26 3:49 下午
// */
//@Service
//public class SparkCalc {
// private static String column_family = "dataease";
// private static String data_path = "/opt/dataease/data/db/";
// @Resource
// private Environment env; // 保存了配置文件的信息
//
// public List<String[]> getData(String hTable, List<DatasetTableField> fields, List<ChartViewFieldDTO> xAxis, List<ChartViewFieldDTO> yAxis, String tmpTable, List<ChartExtFilterRequest> requestList) throws Exception {
// // Spark Context
// SparkSession spark = CommonBeanFactory.getBean(SparkSession.class);
// JavaSparkContext sparkContext = new JavaSparkContext(spark.sparkContext());
//
// // Spark SQL Context
// SQLContext sqlContext = new SQLContext(sparkContext);
// sqlContext.setConf("spark.sql.shuffle.partitions", env.getProperty("spark.sql.shuffle.partitions", "1"));
// sqlContext.setConf("spark.default.parallelism", env.getProperty("spark.default.parallelism", "1"));
//
// /*Map<String, BigDecimal> dataFrame = getData(sparkContext, sqlContext, hTable, fields);
// List<String[]> data = new ArrayList<>();
// Iterator<Map.Entry<String, BigDecimal>> iterator = dataFrame.entrySet().iterator();
// while (iterator.hasNext()) {
// String[] r = new String[2];
// Map.Entry<String, BigDecimal> next = iterator.next();
// String key = next.getKey();
// BigDecimal value = next.getValue();
// r[0] = key;
// r[1] = value.toString();
// data.add(r);
// }*/
//
//// Dataset<Row> dataFrame = getData(sparkContext, sqlContext, hTable, fields);
// Dataset<Row> dataFrame = CacheUtil.getInstance().getCacheData(hTable);
// if (ObjectUtils.isEmpty(dataFrame)) {
// dataFrame = getData(sparkContext, sqlContext, hTable, fields);
// dataFrame = getHBaseDataAndCache(sparkContext, sqlContext, hTable, fields);
// }
dataFrame.createOrReplaceTempView( tmpTable);
Dataset<Row> sql = sqlContext.sql(getSQL(xAxis, yAxis, tmpTable, requestList));
// transform
List<String[]> data = new ArrayList<>();
List<Row> list = sql.collectAsList();
for (Row row : list) {
String[] r = new String[row.length()];
for (int i = 0; i < row.length(); i++) {
r[i] = row.get(i) == null ? "null" : row.get(i).toString();
}
data.add(r);
}
return data;
}
public Dataset<Row> getHBaseDataAndCache(String hTable, List<DatasetTableField> fields) throws Exception {
// Spark Context
SparkSession spark = CommonBeanFactory.getBean(SparkSession.class);
JavaSparkContext sparkContext = new JavaSparkContext(spark.sparkContext());
// Spark SQL Context
SQLContext sqlContext = new SQLContext(sparkContext);
sqlContext.setConf("spark.sql.shuffle.partitions", env.getProperty("spark.sql.shuffle.partitions", "1"));
sqlContext.setConf("spark.default.parallelism", env.getProperty("spark.default.parallelism", "1"));
return getHBaseDataAndCache(sparkContext, sqlContext, hTable, fields);
}
public Dataset<Row> getData(JavaSparkContext sparkContext, SQLContext sqlContext, String tableId, List<DatasetTableField> fields) throws Exception {
fields.sort((o1, o2) -> {
if (o1.getOriginName() == null) {
return -1;
}
if (o2.getOriginName() == null) {
return 1;
}
return o1.getOriginName().compareTo(o2.getOriginName());
});
JavaRDD<String> pairRDD = sparkContext.textFile(data_path + tableId + ".txt");
JavaRDD<Row> rdd = pairRDD.mapPartitions( (FlatMapFunction<java.util.Iterator<String>, Row>) tuple2Iterator -> {
List<Row> iterator = new ArrayList<>();
while (tuple2Iterator.hasNext()) {
String[] items = tuple2Iterator.next().split(";");
List<Object> list = new ArrayList<>();
for(int i=0; i<items.length; i++){
String l = items[i];
DatasetTableField x = fields.get(i);
if (x.getDeType() == 0 || x.getDeType() == 1) {
list.add(l);
} else if (x.getDeType() == 2) {
if (StringUtils.isEmpty(l)) {
l = "0";
}
if (StringUtils.equalsIgnoreCase(l,"Y")) {
l = "1";
}
if (StringUtils.equalsIgnoreCase(l,"N")) {
l = "0";
}
list.add(Long.valueOf(l));
} else if (x.getDeType() == 3) {
if (StringUtils.isEmpty(l)) {
l = "0.0";
}
list.add(Double.valueOf(l));
}
}
iterator.add(RowFactory.create(list.toArray()));
}
return iterator.iterator();
});
List<StructField> structFields = new ArrayList<>();
// struct顺序要与rdd顺序一致
fields.forEach(x -> {
if (x.getDeType() == 0 || x.getDeType() == 1) {
structFields.add(DataTypes.createStructField(x.getOriginName(), DataTypes.StringType, true));
} else if (x.getDeType() == 2) {
structFields.add(DataTypes.createStructField(x.getOriginName(), DataTypes.LongType, true));
} else if (x.getDeType() == 3) {
structFields.add(DataTypes.createStructField(x.getOriginName(), DataTypes.DoubleType, true));
}
});
StructType structType = DataTypes.createStructType(structFields);
Dataset<Row> dataFrame = sqlContext.createDataFrame(rdd, structType);
return dataFrame;
}
public Dataset<Row> getHBaseDataAndCache(JavaSparkContext sparkContext, SQLContext sqlContext, String hTable, List<DatasetTableField> fields) throws Exception {
Scan scan = new Scan();
scan.addFamily(Bytes.toBytes(column_family));
for (DatasetTableField field : fields) {
scan.addColumn(Bytes.toBytes(column_family), Bytes.toBytes(field.getOriginName()));
}
ClientProtos.Scan proto = ProtobufUtil.toScan(scan);
String scanToString = new String(Base64.getEncoder().encode(proto.toByteArray()));
// HBase config
org.apache.hadoop.conf.Configuration conf = new org.apache.hadoop.conf.Configuration();
conf.set("hbase.zookeeper.quorum", env.getProperty("hbase.zookeeper.quorum"));
conf.set("hbase.zookeeper.property.clientPort", env.getProperty("hbase.zookeeper.property.clientPort"));
conf.set("hbase.client.retries.number", env.getProperty("hbase.client.retries.number", "1"));
conf.set(TableInputFormat.INPUT_TABLE, hTable);
conf.set(TableInputFormat.SCAN, scanToString);
JavaPairRDD<ImmutableBytesWritable, Result> pairRDD = sparkContext.newAPIHadoopRDD(conf, TableInputFormat.class, ImmutableBytesWritable.class, Result.class);
JavaRDD<Row> rdd = pairRDD.mapPartitions((FlatMapFunction<Iterator<Tuple2<ImmutableBytesWritable, Result>>, Row>) tuple2Iterator -> {
List<Row> iterator = new ArrayList<>();
while (tuple2Iterator.hasNext()) {
Result result = tuple2Iterator.next()._2;
List<Object> list = new ArrayList<>();
fields.forEach(x -> {
String l = Bytes.toString(result.getValue(column_family.getBytes(), x.getOriginName().getBytes()));
if (x.getDeType() == 0 || x.getDeType() == 1) {
list.add(l);
} else if (x.getDeType() == 2) {
if (StringUtils.isEmpty(l)) {
l = "0";
}
list.add(Long.valueOf(l));
} else if (x.getDeType() == 3) {
if (StringUtils.isEmpty(l)) {
l = "0.0";
}
list.add(Double.valueOf(l));
}
});
iterator.add(RowFactory.create(list.toArray()));
}
return iterator.iterator();
});
List<StructField> structFields = new ArrayList<>();
// struct顺序要与rdd顺序一致
fields.forEach(x -> {
if (x.getDeType() == 0 || x.getDeType() == 1) {
structFields.add(DataTypes.createStructField(x.getOriginName(), DataTypes.StringType, true));
} else if (x.getDeType() == 2) {
structFields.add(DataTypes.createStructField(x.getOriginName(), DataTypes.LongType, true));
} else if (x.getDeType() == 3) {
structFields.add(DataTypes.createStructField(x.getOriginName(), DataTypes.DoubleType, true));
}
});
StructType structType = DataTypes.createStructType(structFields);
Dataset<Row> dataFrame = sqlContext.createDataFrame(rdd, structType).persist(StorageLevel.MEMORY_AND_DISK_SER());
//
// dataFrame.createOrReplaceTempView(tmpTable);
// Dataset<Row> sql = sqlContext.sql(getSQL(xAxis, yAxis, tmpTable, requestList));
// // transform
// List<String[]> data = new ArrayList<>();
// List<Row> list = sql.collectAsList();
// for (Row row : list) {
// String[] r = new String[row.length()];
// for (int i = 0; i < row.length(); i++) {
// r[i] = row.get(i) == null ? "null" : row.get(i).toString();
// }
// data.add(r);
// }
// return data;
// }
//
// public Dataset<Row> getHBaseDataAndCache(String hTable, List<DatasetTableField> fields) throws Exception {
// // Spark Context
// SparkSession spark = CommonBeanFactory.getBean(SparkSession.class);
// JavaSparkContext sparkContext = new JavaSparkContext(spark.sparkContext());
//
// // Spark SQL Context
// SQLContext sqlContext = new SQLContext(sparkContext);
// sqlContext.setConf("spark.sql.shuffle.partitions", env.getProperty("spark.sql.shuffle.partitions", "1"));
// sqlContext.setConf("spark.default.parallelism", env.getProperty("spark.default.parallelism", "1"));
// return getHBaseDataAndCache(sparkContext, sqlContext, hTable, fields);
// }
//
// public Map<String, BigDecimal> getData(JavaSparkContext sparkContext, SQLContext sqlContext, String tableId, List<DatasetTableField> fields) throws Exception {
// fields.sort((o1, o2) -> {
// if (o1.getOriginName() == null) {
// return -1;
// }
// if (o2.getOriginName() == null) {
// return 1;
// }
// return o1.getOriginName().compareTo(o2.getOriginName());
// });
//
// JavaRDD<String> pairRDD = sparkContext.textFile(data_path + tableId + ".txt");
//// System.out.println(pairRDD.count());
//
//// JavaRDD<Map.Entry<String, BigDecimal>> rdd = pairRDD.map((Function<String, Map.Entry<String, BigDecimal>>) v1 -> {
//// Map<String, BigDecimal> map = new HashMap<>();
//// String[] items = v1.split(";");
//// String day = null;
//// BigDecimal res = new BigDecimal(0);
//// for (int i = 0; i < items.length; i++) {
//// String l = items[i];
//// DatasetTableField x = fields.get(i);
//// if (x.getOriginName().equalsIgnoreCase("sync_day")) {
//// day = l;
//// }
//// if (x.getOriginName().equalsIgnoreCase("usage_cost")) {
//// res = new BigDecimal(l);
//// }
//// }
//// BigDecimal bigDecimal = map.get(day);
//// if (bigDecimal == null) {
//// map.put(day, res);
//// } else {
//// map.put(day, bigDecimal.add(res));
//// }
//// return map.entrySet().iterator().next();
//// });
//
// JavaRDD<Map.Entry<String, BigDecimal>> rdd = pairRDD.mapPartitions((FlatMapFunction<java.util.Iterator<String>, Map.Entry<String, BigDecimal>>) tuple2Iterator -> {
// Map<String, BigDecimal> map = new HashMap<>();
// while (tuple2Iterator.hasNext()) {
// String[] items = tuple2Iterator.next().split(";");
// String day = null;
// BigDecimal res = new BigDecimal(0);
// for (int i = 0; i < items.length; i++) {
// String l = items[i];
// DatasetTableField x = fields.get(i);
// if (x.getOriginName().equalsIgnoreCase("sync_day")) {
// day = l;
// }
// if (x.getOriginName().equalsIgnoreCase("usage_cost")) {
// res = new BigDecimal(l);
// }
// }
// BigDecimal bigDecimal = map.get(day);
// if (bigDecimal == null) {
// map.put(day, res);
// } else {
// map.put(day, bigDecimal.add(res));
// }
// }
// return map.entrySet().iterator();
// });
//
//
//// System.out.println(rdd.count());
//
// Map<String, BigDecimal> map = new HashMap<>();
// List<Map.Entry<String, BigDecimal>> collect = rdd.collect();
//// System.out.println(collect.size());
//
// collect.forEach(stringBigDecimalEntry -> {
// String key = stringBigDecimalEntry.getKey();
// BigDecimal value = stringBigDecimalEntry.getValue();
//
// BigDecimal bigDecimal = map.get(key);
// if (bigDecimal == null) {
// map.put(key, value);
// } else {
// map.put(key, bigDecimal.add(value));
// }
// });
//
// return map;
// }
//
//// public Dataset<Row> getData(JavaSparkContext sparkContext, SQLContext sqlContext, String tableId, List<DatasetTableField> fields) throws Exception {
//// fields.sort((o1, o2) -> {
//// if (o1.getOriginName() == null) {
//// return -1;
//// }
//// if (o2.getOriginName() == null) {
//// return 1;
//// }
//// return o1.getOriginName().compareTo(o2.getOriginName());
//// });
////
//// JavaRDD<String> pairRDD = sparkContext.textFile(data_path + tableId + ".txt");
////
//// JavaRDD<Row> rdd = pairRDD.mapPartitions((FlatMapFunction<java.util.Iterator<String>, Row>) tuple2Iterator -> {
//// List<Row> iterator = new ArrayList<>();
//// while (tuple2Iterator.hasNext()) {
//// String[] items = tuple2Iterator.next().split(";");
//// List<Object> list = new ArrayList<>();
//// for (int i = 0; i < items.length; i++) {
//// String l = items[i];
//// DatasetTableField x = fields.get(i);
//// if (x.getDeType() == 0 || x.getDeType() == 1) {
//// list.add(l);
//// } else if (x.getDeType() == 2) {
//// if (StringUtils.isEmpty(l)) {
//// l = "0";
//// }
//// if (StringUtils.equalsIgnoreCase(l, "Y")) {
//// l = "1";
//// }
//// if (StringUtils.equalsIgnoreCase(l, "N")) {
//// l = "0";
//// }
//// list.add(Long.valueOf(l));
//// } else if (x.getDeType() == 3) {
//// if (StringUtils.isEmpty(l)) {
//// l = "0.0";
//// }
//// list.add(Double.valueOf(l));
//// }
//// }
//// iterator.add(RowFactory.create(list.toArray()));
//// }
//// return iterator.iterator();
//// });
////
//// List<StructField> structFields = new ArrayList<>();
//// // struct顺序要与rdd顺序一致
//// fields.forEach(x -> {
//// if (x.getDeType() == 0 || x.getDeType() == 1) {
//// structFields.add(DataTypes.createStructField(x.getOriginName(), DataTypes.StringType, true));
//// } else if (x.getDeType() == 2) {
//// structFields.add(DataTypes.createStructField(x.getOriginName(), DataTypes.LongType, true));
//// } else if (x.getDeType() == 3) {
//// structFields.add(DataTypes.createStructField(x.getOriginName(), DataTypes.DoubleType, true));
//// }
//// });
//// StructType structType = DataTypes.createStructType(structFields);
////
//// Dataset<Row> dataFrame = sqlContext.createDataFrame(rdd, structType);
//// return dataFrame;
//// }
//
// public Dataset<Row> getHBaseDataAndCache(JavaSparkContext sparkContext, SQLContext sqlContext, String hTable, List<DatasetTableField> fields) throws Exception {
// Scan scan = new Scan();
// scan.addFamily(Bytes.toBytes(column_family));
// for (DatasetTableField field : fields) {
// scan.addColumn(Bytes.toBytes(column_family), Bytes.toBytes(field.getOriginName()));
// }
// ClientProtos.Scan proto = ProtobufUtil.toScan(scan);
// String scanToString = new String(Base64.getEncoder().encode(proto.toByteArray()));
//
// // HBase config
// org.apache.hadoop.conf.Configuration conf = new org.apache.hadoop.conf.Configuration();
// conf.set("hbase.zookeeper.quorum", env.getProperty("hbase.zookeeper.quorum"));
// conf.set("hbase.zookeeper.property.clientPort", env.getProperty("hbase.zookeeper.property.clientPort"));
// conf.set("hbase.client.retries.number", env.getProperty("hbase.client.retries.number", "1"));
// conf.set(TableInputFormat.INPUT_TABLE, hTable);
// conf.set(TableInputFormat.SCAN, scanToString);
//
// JavaPairRDD<ImmutableBytesWritable, Result> pairRDD = sparkContext.newAPIHadoopRDD(conf, TableInputFormat.class, ImmutableBytesWritable.class, Result.class);
//
// JavaRDD<Row> rdd = pairRDD.mapPartitions((FlatMapFunction<Iterator<Tuple2<ImmutableBytesWritable, Result>>, Row>) tuple2Iterator -> {
// List<Row> iterator = new ArrayList<>();
// while (tuple2Iterator.hasNext()) {
// Result result = tuple2Iterator.next()._2;
// List<Object> list = new ArrayList<>();
// fields.forEach(x -> {
// String l = Bytes.toString(result.getValue(column_family.getBytes(), x.getOriginName().getBytes()));
// if (x.getDeType() == 0 || x.getDeType() == 1) {
// list.add(l);
// } else if (x.getDeType() == 2) {
// if (StringUtils.isEmpty(l)) {
// l = "0";
// }
// list.add(Long.valueOf(l));
// } else if (x.getDeType() == 3) {
// if (StringUtils.isEmpty(l)) {
// l = "0.0";
// }
// list.add(Double.valueOf(l));
// }
// });
// iterator.add(RowFactory.create(list.toArray()));
// }
// return iterator.iterator();
// });
//
// List<StructField> structFields = new ArrayList<>();
// // struct顺序要与rdd顺序一致
// fields.forEach(x -> {
// if (x.getDeType() == 0 || x.getDeType() == 1) {
// structFields.add(DataTypes.createStructField(x.getOriginName(), DataTypes.StringType, true));
// } else if (x.getDeType() == 2) {
// structFields.add(DataTypes.createStructField(x.getOriginName(), DataTypes.LongType, true));
// } else if (x.getDeType() == 3) {
// structFields.add(DataTypes.createStructField(x.getOriginName(), DataTypes.DoubleType, true));
// }
// });
// StructType structType = DataTypes.createStructType(structFields);
//
// Dataset<Row> dataFrame = sqlContext.createDataFrame(rdd, structType).persist(StorageLevel.MEMORY_AND_DISK_SER());
// CacheUtil.getInstance().addCacheData(hTable, dataFrame);
dataFrame.count();
return dataFrame;
}
public String getSQL(List<ChartViewFieldDTO> xAxis, List<ChartViewFieldDTO> yAxis, String table, List<ChartExtFilterRequest> extFilterRequestList) {
// 字段汇总 排序等
String[] field = yAxis.stream().map(y -> "CAST(" + y.getSummary() + "(" + y.getOriginName() + ") AS DECIMAL(20,2)) AS _" + y.getSummary() + "_" + y.getOriginName()).toArray(String[]::new);
String[] group = xAxis.stream().map(ChartViewFieldDTO::getOriginName).toArray(String[]::new);
String[] order = yAxis.stream().filter(y -> StringUtils.isNotEmpty(y.getSort()) && !StringUtils.equalsIgnoreCase(y.getSort(), "none"))
.map(y -> "_" + y.getSummary() + "_" + y.getOriginName() + " " + y.getSort()).toArray(String[]::new);
String sql = MessageFormat.format("SELECT {0},{1} FROM {2} WHERE 1=1 {3} GROUP BY {4} ORDER BY null,{5}",
StringUtils.join(group, ","),
StringUtils.join(field, ","),
table,
transExtFilter(extFilterRequestList),// origin field filter and panel field filter,
StringUtils.join(group, ","),
StringUtils.join(order, ","));
if (sql.endsWith(",")) {
sql = sql.substring(0, sql.length() - 1);
}
// 如果是对结果字段过滤则再包裹一层sql
String[] resultFilter = yAxis.stream().filter(y -> CollectionUtils.isNotEmpty(y.getFilter()) && y.getFilter().size() > 0)
.map(y -> {
String[] s = y.getFilter().stream().map(f -> "AND _" + y.getSummary() + "_" + y.getOriginName() + transFilterTerm(f.getTerm()) + f.getValue()).toArray(String[]::new);
return StringUtils.join(s, " ");
}).toArray(String[]::new);
if (resultFilter.length == 0) {
return sql;
} else {
String filterSql = MessageFormat.format("SELECT * FROM {0} WHERE 1=1 {1}",
"(" + sql + ") AS tmp",
StringUtils.join(resultFilter, " "));
return filterSql;
}
}
public String transFilterTerm(String term) {
switch (term) {
case "eq":
return " = ";
case "not_eq":
return " <> ";
case "lt":
return " < ";
case "le":
return " <= ";
case "gt":
return " > ";
case "ge":
return " >= ";
case "in":
return " IN ";
case "not in":
return " NOT IN ";
case "like":
return " LIKE ";
case "not like":
return " NOT LIKE ";
case "null":
return " IS NULL ";
case "not_null":
return " IS NOT NULL ";
default:
return "";
}
}
public String transExtFilter(List<ChartExtFilterRequest> requestList) {
if (CollectionUtils.isEmpty(requestList)) {
return "";
}
StringBuilder filter = new StringBuilder();
for (ChartExtFilterRequest request : requestList) {
List<String> value = request.getValue();
if (CollectionUtils.isEmpty(value)) {
continue;
}
DatasetTableField field = request.getDatasetTableField();
filter.append(" AND ")
.append(field.getOriginName())
.append(" ")
.append(transFilterTerm(request.getOperator()))
.append(" ");
if (StringUtils.containsIgnoreCase(request.getOperator(), "in")) {
filter.append("('").append(StringUtils.join(value, "','")).append("')");
} else if (StringUtils.containsIgnoreCase(request.getOperator(), "like")) {
filter.append("'%").append(value.get(0)).append("%'");
} else {
filter.append("'").append(value.get(0)).append("'");
}
}
return filter.toString();
}
}
// dataFrame.count();
// return dataFrame;
// }
//
// public String getSQL(List<ChartViewFieldDTO> xAxis, List<ChartViewFieldDTO> yAxis, String table, List<ChartExtFilterRequest> extFilterRequestList) {
// // 字段汇总 排序等
// String[] field = yAxis.stream().map(y -> "CAST(" + y.getSummary() + "(" + y.getOriginName() + ") AS DECIMAL(20,2)) AS _" + y.getSummary() + "_" + y.getOriginName()).toArray(String[]::new);
// String[] group = xAxis.stream().map(ChartViewFieldDTO::getOriginName).toArray(String[]::new);
// String[] order = yAxis.stream().filter(y -> StringUtils.isNotEmpty(y.getSort()) && !StringUtils.equalsIgnoreCase(y.getSort(), "none"))
// .map(y -> "_" + y.getSummary() + "_" + y.getOriginName() + " " + y.getSort()).toArray(String[]::new);
//
// String sql = MessageFormat.format("SELECT {0},{1} FROM {2} WHERE 1=1 {3} GROUP BY {4} ORDER BY null,{5}",
// StringUtils.join(group, ","),
// StringUtils.join(field, ","),
// table,
// transExtFilter(extFilterRequestList),// origin field filter and panel field filter,
// StringUtils.join(group, ","),
// StringUtils.join(order, ","));
// if (sql.endsWith(",")) {
// sql = sql.substring(0, sql.length() - 1);
// }
// // 如果是对结果字段过滤则再包裹一层sql
// String[] resultFilter = yAxis.stream().filter(y -> CollectionUtils.isNotEmpty(y.getFilter()) && y.getFilter().size() > 0)
// .map(y -> {
// String[] s = y.getFilter().stream().map(f -> "AND _" + y.getSummary() + "_" + y.getOriginName() + transFilterTerm(f.getTerm()) + f.getValue()).toArray(String[]::new);
// return StringUtils.join(s, " ");
// }).toArray(String[]::new);
// if (resultFilter.length == 0) {
// return sql;
// } else {
// String filterSql = MessageFormat.format("SELECT * FROM {0} WHERE 1=1 {1}",
// "(" + sql + ") AS tmp",
// StringUtils.join(resultFilter, " "));
// return filterSql;
// }
// }
//
// public String transFilterTerm(String term) {
// switch (term) {
// case "eq":
// return " = ";
// case "not_eq":
// return " <> ";
// case "lt":
// return " < ";
// case "le":
// return " <= ";
// case "gt":
// return " > ";
// case "ge":
// return " >= ";
// case "in":
// return " IN ";
// case "not in":
// return " NOT IN ";
// case "like":
// return " LIKE ";
// case "not like":
// return " NOT LIKE ";
// case "null":
// return " IS NULL ";
// case "not_null":
// return " IS NOT NULL ";
// default:
// return "";
// }
// }
//
// public String transExtFilter(List<ChartExtFilterRequest> requestList) {
// if (CollectionUtils.isEmpty(requestList)) {
// return "";
// }
// StringBuilder filter = new StringBuilder();
// for (ChartExtFilterRequest request : requestList) {
// List<String> value = request.getValue();
// if (CollectionUtils.isEmpty(value)) {
// continue;
// }
// DatasetTableField field = request.getDatasetTableField();
// filter.append(" AND ")
// .append(field.getOriginName())
// .append(" ")
// .append(transFilterTerm(request.getOperator()))
// .append(" ");
// if (StringUtils.containsIgnoreCase(request.getOperator(), "in")) {
// filter.append("('").append(StringUtils.join(value, "','")).append("')");
// } else if (StringUtils.containsIgnoreCase(request.getOperator(), "like")) {
// filter.append("'%").append(value.get(0)).append("%'");
// } else {
// filter.append("'").append(value.get(0)).append("'");
// }
// }
// return filter.toString();
// }
//}

View File

@ -1,4 +1,5 @@
DROP TABLE IF EXISTS `sys_dept`;
DROP TABLE IF EXISTS `sys_dept` ;
CREATE TABLE `sys_dept` (
`dept_id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT 'ID',
`pid` bigint(20) DEFAULT NULL COMMENT '上级部门',
@ -13,23 +14,18 @@ CREATE TABLE `sys_dept` (
PRIMARY KEY (`dept_id`) USING BTREE,
KEY `inx_pid` (`pid`),
KEY `inx_enabled` (`enabled`)
) ENGINE=InnoDB AUTO_INCREMENT=24 DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT COMMENT='部门';
) ENGINE=InnoDB AUTO_INCREMENT=26 DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT COMMENT='部门';
-- ----------------------------
-- Records of sys_dept
-- ----------------------------
BEGIN;
INSERT INTO `sys_dept` VALUES (18, 0, 1, '上海飞致云', 1, b'1', NULL, NULL, 1614048906358, 1614048906358);
INSERT INTO `sys_dept` VALUES (19, 0, 1, '北京飞致云', 2, b'1', NULL, NULL, 1614048918465, 1614048918465);
INSERT INTO `sys_dept` VALUES (20, 18, 0, '营销部', 1, b'1', NULL, NULL, 1614048946370, 1614049006759);
INSERT INTO `sys_dept` VALUES (21, 19, 0, '综合部', 1, b'1', NULL, NULL, 1614048963483, 1614048963483);
INSERT INTO `sys_dept` VALUES (22, 0, 0, '深圳飞致云', 3, b'1', NULL, NULL, 1614679834772, 1614679834772);
INSERT INTO `sys_dept` VALUES (23, 0, 0, '南京飞致云', 4, b'1', NULL, NULL, 1614679890462, 1614679890462);
COMMIT;
INSERT INTO `sys_dept` (`dept_id`, `pid`, `sub_count`, `name`, `dept_sort`, `enabled`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('18','0','1','上海飞致云','1',b'1',null,null,'1614048906358','1614048906358');
INSERT INTO `sys_dept` (`dept_id`, `pid`, `sub_count`, `name`, `dept_sort`, `enabled`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('19','0','1','北京飞致云','2',b'1',null,null,'1614048918465','1614048918465');
INSERT INTO `sys_dept` (`dept_id`, `pid`, `sub_count`, `name`, `dept_sort`, `enabled`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('20','18','1','营销部','1',b'1',null,null,'1614048946370','1614049006759');
INSERT INTO `sys_dept` (`dept_id`, `pid`, `sub_count`, `name`, `dept_sort`, `enabled`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('21','19','0','综合部','3',b'1',null,null,'1614048963483','1615783363091');
INSERT INTO `sys_dept` (`dept_id`, `pid`, `sub_count`, `name`, `dept_sort`, `enabled`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('25','20','0','售前组','1',b'1',null,null,'1615791706945','1615791706945');
DROP TABLE IF EXISTS `sys_menu`;
DROP TABLE IF EXISTS `sys_menu` ;
CREATE TABLE `sys_menu` (
`menu_id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT 'ID',
`pid` bigint(20) DEFAULT NULL COMMENT '上级菜单ID',
@ -53,114 +49,44 @@ CREATE TABLE `sys_menu` (
UNIQUE KEY `uniq_title` (`title`),
UNIQUE KEY `uniq_name` (`name`),
KEY `inx_pid` (`pid`)
) ENGINE=InnoDB AUTO_INCREMENT=28 DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT COMMENT='系统菜单';
) ENGINE=InnoDB AUTO_INCREMENT=35 DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT COMMENT='系统菜单';
-- ----------------------------
-- Records of sys_menu
-- ----------------------------
BEGIN;
INSERT INTO `sys_menu` VALUES (1, 0, 3, 0, '系统管理', '系统管理', 'Layout', 3, 'system', '/system', NULL, b'0', b'0', 'dir:sys', NULL, NULL, NULL, 1614916695777);
INSERT INTO `sys_menu` VALUES (2, 1, 3, 1, '用户管理', '用户管理', 'system/user/index', 1, 'peoples', 'user', NULL, b'0', b'0', 'user:read', NULL, NULL, NULL, NULL);
INSERT INTO `sys_menu` VALUES (3, 1, 3, 1, '菜单管理', '菜单管理', 'system/menu/index', 2, 'menu', 'menu', NULL, b'0', b'0', 'menu:read', NULL, NULL, NULL, NULL);
INSERT INTO `sys_menu` VALUES (4, 1, 3, 1, '组织管理', '组织管理', 'system/dept/index', 3, 'dept', 'dept', NULL, b'0', b'0', 'dept:read', NULL, NULL, NULL, NULL);
INSERT INTO `sys_menu` VALUES (5, 1, 3, 1, '角色管理', '角色管理', 'system/role/index', 4, 'role', 'role', b'0', b'0', b'0', 'role:read', NULL, NULL, 1614683852133, 1614683852133);
INSERT INTO `sys_menu` VALUES (6, 1, 0, 1, '参数管理', '参数管理', 'system/systemParamSettings/index', 5, 'sys-tools', 'systemParamSettings', NULL, b'0', b'0', 'sysparam:read', NULL, NULL, NULL, 1614916731805);
INSERT INTO `sys_menu` VALUES (7, 0, 1, 0, '数据管理', '数据管理', 'Layout', 2, 'dataset', '/dataset', NULL, b'0', b'0', 'dir:data', NULL, NULL, NULL, 1614916666408);
INSERT INTO `sys_menu` VALUES (8, 7, 0, 1, '数据管理1', '数据管理1', 'dataset/index', 1, 'dataset', 'index', NULL, b'0', b'0', 'data:read', NULL, NULL, NULL, 1614916684821);
INSERT INTO `sys_menu` VALUES (9, 0, 1, 0, '视图管理', '视图管理', 'Layout', 1, 'chart', '/chart', NULL, b'0', b'0', 'dir:chart', NULL, NULL, NULL, 1614916648098);
INSERT INTO `sys_menu` VALUES (10, 9, 0, 1, '视图1', '视图1', 'chart/index', 1, 'chart', 'index', NULL, b'0', b'0', 'chart:read', NULL, NULL, NULL, 1614915491036);
INSERT INTO `sys_menu` VALUES (11, 1, 4, 1, '数据连接', '数据连接', 'system/datasource/index', 0, 'database', 'index', NULL, b'0', b'0', 'datasource:read', NULL, NULL, NULL, 1614916717642);
INSERT INTO `sys_menu` VALUES (12, 3, 0, 2, '创建菜单', NULL, NULL, 999, NULL, NULL, b'0', b'0', b'0', 'menu:add', NULL, NULL, 1614924617327, 1614924617327);
INSERT INTO `sys_menu` VALUES (13, 3, 0, 2, '删除菜单', NULL, NULL, 999, NULL, NULL, b'0', b'0', b'0', 'menu:del', NULL, NULL, 1614924667808, 1614924667808);
INSERT INTO `sys_menu` VALUES (14, 3, 0, 2, '编辑菜单', NULL, NULL, 999, NULL, NULL, b'0', b'0', b'0', 'menu:edit', NULL, NULL, 1614930734224, 1614936429773);
INSERT INTO `sys_menu` VALUES (15, 2, 0, 2, '创建用户', NULL, NULL, 999, NULL, NULL, b'0', b'0', b'0', 'user:add', NULL, NULL, 1614930862373, 1614930862373);
INSERT INTO `sys_menu` VALUES (16, 2, 0, 2, '删除用户', NULL, NULL, 999, NULL, NULL, b'0', b'0', b'0', 'user:del', NULL, NULL, 1614930903502, 1614930903502);
INSERT INTO `sys_menu` VALUES (17, 2, 0, 2, '编辑用户', NULL, NULL, 999, NULL, NULL, b'0', b'0', b'0', 'user:edit', NULL, NULL, 1614930935529, 1614930935529);
INSERT INTO `sys_menu` VALUES (18, 4, 0, 2, '创建组织', NULL, NULL, 999, NULL, NULL, b'0', b'0', b'0', 'dept:add', NULL, NULL, 1614930976297, 1614930976297);
INSERT INTO `sys_menu` VALUES (19, 4, 0, 2, '删除组织', NULL, NULL, 999, NULL, NULL, b'0', b'0', b'0', 'dept:del', NULL, NULL, 1614930997130, 1614930997130);
INSERT INTO `sys_menu` VALUES (20, 4, 0, 2, '编辑组织', NULL, NULL, 999, NULL, NULL, b'0', b'0', b'0', 'dept:edit', NULL, NULL, 1614931022967, 1614931022967);
INSERT INTO `sys_menu` VALUES (21, 5, 0, 2, '创建角色', NULL, NULL, 999, NULL, NULL, b'0', b'0', b'0', 'role:add', NULL, NULL, 1614931069408, 1614931069408);
INSERT INTO `sys_menu` VALUES (22, 5, 0, 2, '删除角色', NULL, NULL, 999, NULL, NULL, b'0', b'0', b'0', 'role:del', NULL, NULL, 1614931097720, 1614931097720);
INSERT INTO `sys_menu` VALUES (23, 5, 0, 2, '编辑角色', NULL, NULL, 999, NULL, NULL, b'0', b'0', b'0', 'role:edit', NULL, NULL, 1614931124782, 1614931124782);
INSERT INTO `sys_menu` VALUES (24, 11, 0, 2, '创建连接', NULL, NULL, 999, NULL, NULL, b'0', b'0', b'0', 'datasource:add', NULL, NULL, 1614931168956, 1614931168956);
INSERT INTO `sys_menu` VALUES (25, 11, 0, 2, '删除连接', NULL, NULL, 999, NULL, NULL, b'0', b'0', b'0', 'datasource:del', NULL, NULL, 1614931205899, 1614931205899);
INSERT INTO `sys_menu` VALUES (26, 11, 0, 2, '编辑连接', NULL, NULL, 999, NULL, NULL, b'0', b'0', b'0', 'datasource:edit', NULL, NULL, 1614931234105, 1614931234105);
INSERT INTO `sys_menu` VALUES (27, 11, 0, 2, '校验连接', NULL, NULL, 999, NULL, NULL, b'0', b'0', b'0', 'datasource:validate', NULL, NULL, 1614931268578, 1614931268578);
INSERT INTO `sys_menu` VALUES (28, 2, 0, 2, '修改密码', NULL, NULL, 999, NULL, NULL, b'0', b'0', b'0', 'user:editPwd', NULL, NULL, 1615275128262, 1615275128262);
COMMIT;
DROP TABLE IF EXISTS `sys_role`;
CREATE TABLE `sys_role` (
`role_id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT 'ID',
`code` varchar(100) NOT NULL COMMENT '代码',
`name` varchar(255) NOT NULL COMMENT '名称',
`description` varchar(255) DEFAULT NULL COMMENT '描述',
`create_by` varchar(255) DEFAULT NULL COMMENT '创建者',
`update_by` varchar(255) DEFAULT NULL COMMENT '更新者',
`create_time` bigint(13) DEFAULT NULL COMMENT '创建日期',
`update_time` bigint(13) DEFAULT NULL COMMENT '更新时间',
PRIMARY KEY (`role_id`) USING BTREE,
UNIQUE KEY `uniq_name` (`name`),
KEY `role_name_index` (`name`)
) ENGINE=InnoDB AUTO_INCREMENT=5 DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT COMMENT='角色表';
-- ----------------------------
-- Records of sys_role
-- ----------------------------
BEGIN;
INSERT INTO `sys_role` VALUES (3, 'admin', '管理员', NULL, NULL, NULL, NULL, NULL);
INSERT INTO `sys_role` VALUES (4, 'emp', '普通员工', NULL, NULL, NULL, NULL, NULL);
COMMIT;
INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('1','0','3','0','系统管理','系统管理','Layout','5','system','/system',null,b'0',b'0','dir:sys',null,null,null,'1614916695777');
INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('2','1','4','1','用户管理','用户管理','system/user/index','2','peoples','user',null,b'0',b'0','user:read',null,null,null,'1615786052463');
INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('3','1','3','1','菜单管理','菜单管理','system/menu/index','2','menu','menu',null,b'0',b'0','menu:read',null,null,null,null);
INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('4','1','3','1','组织管理','组织管理','system/dept/index','3','dept','dept',null,b'0',b'0','dept:read',null,null,null,null);
INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('5','1','3','1','角色管理','角色管理','system/role/index','4','role','role',b'0',b'0',b'0','role:read',null,null,'1614683852133','1614683852133');
INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('6','1','0','1','参数管理','参数管理','system/systemParamSettings/index','13','sys-tools','systemParamSettings',null,b'0',b'0','sysparam:read',null,null,null,'1615790294169');
INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('7','0','1','0','数据集','数据管理','Layout','3','dataset','/dataset',null,b'0',b'0','dir:data',null,null,null,'1619081474697');
INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('8','7','0','1','数据管理1','数据管理1','dataset/index','1','dataset','index',null,b'0',b'0','data:read',null,null,null,'1614916684821');
INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('9','0','1','0','视图','视图管理','Layout','2','chart','/chart',null,b'0',b'0','dir:chart',null,null,null,'1619081462127');
INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('10','9','0','1','视图1','视图1','chart/index','1','chart','index',null,b'0',b'0','chart:read',null,null,null,'1614915491036');
INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('12','3','0','2','创建菜单',null,null,'999',null,null,b'0',b'0',b'0','menu:add',null,null,'1614924617327','1614924617327');
INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('13','3','0','2','删除菜单',null,null,'999',null,null,b'0',b'0',b'0','menu:del',null,null,'1614924667808','1614924667808');
INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('14','3','0','2','编辑菜单',null,null,'999',null,null,b'0',b'0',b'0','menu:edit',null,null,'1614930734224','1614936429773');
INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('15','2','0','2','创建用户',null,null,'999',null,null,b'0',b'0',b'0','user:add',null,null,'1614930862373','1614930862373');
INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('16','2','0','2','删除用户',null,null,'999',null,null,b'0',b'0',b'0','user:del',null,null,'1614930903502','1614930903502');
INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('17','2','0','2','编辑用户',null,null,'999',null,null,b'0',b'0',b'0','user:edit',null,null,'1614930935529','1614930935529');
INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('18','4','0','2','创建组织',null,null,'999',null,null,b'0',b'0',b'0','dept:add',null,null,'1614930976297','1614930976297');
INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('19','4','0','2','删除组织',null,null,'999',null,null,b'0',b'0',b'0','dept:del',null,null,'1614930997130','1614930997130');
INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('20','4','0','2','编辑组织',null,null,'999',null,null,b'0',b'0',b'0','dept:edit',null,null,'1614931022967','1614931022967');
INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('21','5','0','2','创建角色',null,null,'999',null,null,b'0',b'0',b'0','role:add',null,null,'1614931069408','1614931069408');
INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('22','5','0','2','删除角色',null,null,'999',null,null,b'0',b'0',b'0','role:del',null,null,'1614931097720','1614931097720');
INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('23','5','0','2','编辑角色',null,null,'999',null,null,b'0',b'0',b'0','role:edit',null,null,'1614931124782','1614931124782');
INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('24','34','0','2','创建连接',null,null,'997',null,null,b'0',b'0',b'0','datasource:add',null,null,'1614931168956','1615783705537');
INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('25','34','0','2','删除连接',null,null,'999',null,null,b'0',b'0',b'0','datasource:del',null,null,'1614931205899','1614931205899');
INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('26','34','0','2','编辑连接',null,null,'999',null,null,b'0',b'0',b'0','datasource:edit',null,null,'1614931234105','1614931234105');
INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('27','34','0','2','校验连接',null,null,'999',null,null,b'0',b'0',b'0','datasource:validate',null,null,'1614931268578','1614931268578');
INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('28','2','0','2','修改密码',null,null,'999',null,null,b'0',b'0',b'0','user:editPwd',null,null,'1615275128262','1615275128262');
INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('29','0','1','0','仪表盘','仪表盘管理','Layout','1',null,'/panel',null,b'0',b'0','panel:read',null,null,null,'1619081454146');
INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('30','29','0','1','仪表盘1','仪表盘','panel/index','1',null,'index',b'0',b'0',b'0','panel:read',null,null,null,'1619081449067');
INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('33','0','1','0','数据源','数据源','Layout','4',null,'/datasource',b'0',b'0',b'0','dir:datasource',null,null,'1619083205537','1619083205537');
INSERT INTO `sys_menu` (`menu_id`, `pid`, `sub_count`, `type`, `title`, `name`, `component`, `menu_sort`, `icon`, `path`, `i_frame`, `cache`, `hidden`, `permission`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('34','33','4','1','数据源1','数据源1','system/datasource/index','1',null,'index',b'0',b'0',b'0','datasource:read',null,null,null,null);
DROP TABLE IF EXISTS `sys_roles_menus`;
CREATE TABLE `sys_roles_menus` (
`menu_id` bigint(20) NOT NULL COMMENT '菜单ID',
`role_id` bigint(20) NOT NULL COMMENT '角色ID',
PRIMARY KEY (`menu_id`,`role_id`) USING BTREE,
KEY `FKcngg2qadojhi3a651a5adkvbq` (`role_id`) USING BTREE
) ENGINE=InnoDB DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT COMMENT='角色菜单关联';
-- ----------------------------
-- Records of sys_roles_menus
-- ----------------------------
BEGIN;
INSERT INTO `sys_roles_menus` VALUES (1, 3);
INSERT INTO `sys_roles_menus` VALUES (2, 3);
INSERT INTO `sys_roles_menus` VALUES (3, 3);
INSERT INTO `sys_roles_menus` VALUES (4, 3);
INSERT INTO `sys_roles_menus` VALUES (5, 3);
INSERT INTO `sys_roles_menus` VALUES (6, 3);
INSERT INTO `sys_roles_menus` VALUES (7, 3);
INSERT INTO `sys_roles_menus` VALUES (8, 3);
INSERT INTO `sys_roles_menus` VALUES (9, 3);
INSERT INTO `sys_roles_menus` VALUES (10, 3);
INSERT INTO `sys_roles_menus` VALUES (11, 3);
INSERT INTO `sys_roles_menus` VALUES (12, 3);
INSERT INTO `sys_roles_menus` VALUES (13, 3);
INSERT INTO `sys_roles_menus` VALUES (14, 3);
INSERT INTO `sys_roles_menus` VALUES (15, 3);
INSERT INTO `sys_roles_menus` VALUES (16, 3);
INSERT INTO `sys_roles_menus` VALUES (17, 3);
INSERT INTO `sys_roles_menus` VALUES (18, 3);
INSERT INTO `sys_roles_menus` VALUES (19, 3);
INSERT INTO `sys_roles_menus` VALUES (20, 3);
INSERT INTO `sys_roles_menus` VALUES (21, 3);
INSERT INTO `sys_roles_menus` VALUES (22, 3);
INSERT INTO `sys_roles_menus` VALUES (23, 3);
INSERT INTO `sys_roles_menus` VALUES (24, 3);
INSERT INTO `sys_roles_menus` VALUES (25, 3);
INSERT INTO `sys_roles_menus` VALUES (26, 3);
INSERT INTO `sys_roles_menus` VALUES (27, 3);
INSERT INTO `sys_roles_menus` VALUES (28, 3);
INSERT INTO `sys_roles_menus` VALUES (1, 4);
INSERT INTO `sys_roles_menus` VALUES (2, 4);
INSERT INTO `sys_roles_menus` VALUES (3, 4);
INSERT INTO `sys_roles_menus` VALUES (12, 4);
COMMIT;
DROP TABLE IF EXISTS `sys_user` ;
DROP TABLE IF EXISTS `sys_user`;
CREATE TABLE `sys_user` (
`user_id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT 'ID',
`dept_id` bigint(20) DEFAULT NULL COMMENT '部门名称',
@ -184,19 +110,83 @@ CREATE TABLE `sys_user` (
UNIQUE KEY `uniq_email` (`email`),
KEY `FK5rwmryny6jthaaxkogownknqp` (`dept_id`) USING BTREE,
KEY `inx_enabled` (`enabled`)
) ENGINE=InnoDB AUTO_INCREMENT=7 DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT COMMENT='系统用户';
) ENGINE=InnoDB AUTO_INCREMENT=20 DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT COMMENT='系统用户';
-- ----------------------------
-- Records of sys_user
-- ----------------------------
BEGIN;
INSERT INTO `sys_user` VALUES (4, 0, 'admin', '管理员', '', NULL, 'admin@fit2cloud.com', 'e10adc3949ba59abbe56e057f20f883e', b'1', 1, NULL, NULL, NULL, NULL, 1615184951534);
COMMIT;
INSERT INTO `sys_user` (`user_id`, `dept_id`, `username`, `nick_name`, `gender`, `phone`, `email`, `password`, `is_admin`, `enabled`, `create_by`, `update_by`, `pwd_reset_time`, `create_time`, `update_time`) VALUES ('4','0','admin','管理员','',null,'admin@fit2cloud.com','e10adc3949ba59abbe56e057f20f883e',b'1','1',null,null,null,null,'1615184951534');
INSERT INTO `sys_user` (`user_id`, `dept_id`, `username`, `nick_name`, `gender`, `phone`, `email`, `password`, `is_admin`, `enabled`, `create_by`, `update_by`, `pwd_reset_time`, `create_time`, `update_time`) VALUES ('19','25','demo','demo','',null,'demo@fit2cloud.com','e10adc3949ba59abbe56e057f20f883e',b'0','1',null,null,null,'1619086036234','1619086036234');
DROP TABLE IF EXISTS `sys_role` ;
CREATE TABLE `sys_role` (
`role_id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT 'ID',
`code` varchar(100) NOT NULL COMMENT '代码',
`name` varchar(255) NOT NULL COMMENT '名称',
`description` varchar(255) DEFAULT NULL COMMENT '描述',
`create_by` varchar(255) DEFAULT NULL COMMENT '创建者',
`update_by` varchar(255) DEFAULT NULL COMMENT '更新者',
`create_time` bigint(13) DEFAULT NULL COMMENT '创建日期',
`update_time` bigint(13) DEFAULT NULL COMMENT '更新时间',
PRIMARY KEY (`role_id`) USING BTREE,
UNIQUE KEY `uniq_name` (`name`),
KEY `role_name_index` (`name`)
) ENGINE=InnoDB AUTO_INCREMENT=5 DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT COMMENT='角色表';
INSERT INTO `sys_role` (`role_id`, `code`, `name`, `description`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('3','admin','管理员',null,null,null,null,null);
INSERT INTO `sys_role` (`role_id`, `code`, `name`, `description`, `create_by`, `update_by`, `create_time`, `update_time`) VALUES ('4','emp','普通员工',null,null,null,null,null);
DROP TABLE IF EXISTS `sys_roles_menus` ;
CREATE TABLE `sys_roles_menus` (
`menu_id` bigint(20) NOT NULL COMMENT '菜单ID',
`role_id` bigint(20) NOT NULL COMMENT '角色ID',
PRIMARY KEY (`menu_id`,`role_id`) USING BTREE,
KEY `FKcngg2qadojhi3a651a5adkvbq` (`role_id`) USING BTREE
) ENGINE=InnoDB DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT COMMENT='角色菜单关联';
INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('1','3');
INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('2','3');
INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('3','3');
INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('4','3');
INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('5','3');
INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('6','3');
INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('7','3');
INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('8','3');
INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('9','3');
INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('10','3');
INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('11','3');
INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('14','3');
INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('15','3');
INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('16','3');
INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('17','3');
INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('18','3');
INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('19','3');
INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('20','3');
INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('21','3');
INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('22','3');
INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('23','3');
INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('24','3');
INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('25','3');
INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('26','3');
INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('27','3');
INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('28','3');
INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('29','3');
INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('30','3');
INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('31','3');
INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('32','3');
INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('33','3');
INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('34','3');
INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('101','3');
INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('29','4');
INSERT INTO `sys_roles_menus` (`menu_id`, `role_id`) VALUES ('30','4');
DROP TABLE IF EXISTS `sys_users_roles` ;
-- ----------------------------
-- Table structure for sys_users_roles
-- ----------------------------
DROP TABLE IF EXISTS `sys_users_roles`;
CREATE TABLE `sys_users_roles` (
`user_id` bigint(20) NOT NULL COMMENT '用户ID',
`role_id` bigint(20) NOT NULL COMMENT '角色ID',
@ -204,9 +194,6 @@ CREATE TABLE `sys_users_roles` (
KEY `FKq4eq273l04bpu4efj0jd0jb98` (`role_id`) USING BTREE
) ENGINE=InnoDB DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT COMMENT='用户角色关联';
-- ----------------------------
-- Records of sys_users_roles
-- ----------------------------
BEGIN;
INSERT INTO `sys_users_roles` VALUES (4, 3);
COMMIT;
INSERT INTO `sys_users_roles` (`user_id`, `role_id`) VALUES ('4','3');
INSERT INTO `sys_users_roles` (`user_id`, `role_id`) VALUES ('19','4');

View File

@ -21,3 +21,11 @@ export function logout() {
method: 'post'
})
}
export function validateUserName(data) {
return request({
url: '/api/auth/validateName',
method: 'post',
data
})
}

View File

@ -3,7 +3,7 @@
<transition-group name="breadcrumb">
<el-breadcrumb-item v-for="(item,index) in levelList" :key="item.path">
<span v-if="index === 0">当前位置</span>
<span v-if="item.redirect==='noRedirect'||index==levelList.length-1" class="no-redirect">{{item.meta.title }}</span>
<span v-if="item.redirect==='noRedirect'||index==levelList.length-1" class="no-redirect">{{ item.meta.title }}</span>
<a v-else @click.prevent="handleLink(item)">{{ item.meta.title }}</a>
</el-breadcrumb-item>
</transition-group>
@ -34,7 +34,7 @@ export default {
const first = matched[0]
if (!this.isDashboard(first)) {
matched = [{ path: '/dashboard', meta: { title: 'Dashboard' }}].concat(matched)
matched = [{ path: '/panel', meta: { title: '仪表板' }}].concat(matched)
}
this.levelList = matched.filter(item => item.meta && item.meta.title && item.meta.breadcrumb !== false)
@ -44,7 +44,8 @@ export default {
if (!name) {
return false
}
return name.trim().toLocaleLowerCase() === 'Dashboard'.toLocaleLowerCase()
// return name.trim().toLocaleLowerCase() === 'Dashboard'.toLocaleLowerCase()
return name.trim().toLocaleLowerCase() === '仪表板'.toLocaleLowerCase()
},
pathCompile(path) {
// To solve this problem https://github.com/PanJiaChen/vue-element-admin/issues/561

View File

@ -0,0 +1 @@
<?xml version="1.0" standalone="no"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg t="1619335647805" class="icon" viewBox="0 0 1024 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" p-id="859" xmlns:xlink="http://www.w3.org/1999/xlink" width="200" height="200"><defs><style type="text/css"></style></defs><path d="M975.920762 0H72.46019C45.884952 0 24.380952 24.576 24.380952 54.979048v889.660952C24.380952 975.043048 45.884952 999.619048 72.46019 999.619048h903.460572C1002.496 999.619048 1024 975.043048 1024 944.64V54.979048C1024 24.576 1002.496 0 975.920762 0zM338.066286 925.988571H89.965714v-194.243047h248.100572v194.218666z m0-258.925714H89.965714v-194.243047h248.100572v194.218666z m0-258.925714H89.965714V213.991619h248.100572v194.096762z m310.174476 517.851428H400.14019v-194.243047h248.100572v194.218666z m0-258.925714H400.14019v-194.243047h248.100572v194.218666z m0-258.925714H400.14019V213.991619h248.100572v194.096762zM958.415238 925.988571H710.314667v-194.243047H958.415238v194.218666z m0-258.925714H710.314667v-194.243047H958.415238v194.218666z m0-258.925714H710.314667V213.991619H958.415238v194.096762z" p-id="860"></path></svg>

After

Width:  |  Height:  |  Size: 1.2 KiB

View File

@ -600,8 +600,8 @@ export default {
avg: '平均',
max: '最大值',
min: '最小值',
std: '标准差',
var_samp: '方差',
stddev_pop: '标准差',
var_pop: '方差',
quick_calc: '快速计算',
show_name_set: '显示名设置',
color: '颜色',
@ -699,7 +699,14 @@ export default {
area_mode: '面积',
rose_radius: '圆角',
view_name: '视图名称',
name_can_not_empty: '名称不能为空'
name_can_not_empty: '名称不能为空',
custom_count: '记录数',
table_title_fontsize: '表头字体大小',
table_item_fontsize: '表格字体大小',
table_header_bg: '表头背景',
table_item_bg: '表格背景',
table_item_font_color: '字体颜色',
stripe: '斑马纹'
},
dataset: {
datalist: '数据集',

View File

@ -94,7 +94,7 @@ export default {
return meta.activeMenu
}
//
if (path === '/dashboard') {
if (path === '/panel') {
return '/'
}
//

View File

@ -82,19 +82,25 @@ export const constantRoutes = [
component: () => import('@/components/canvas/components/Editor/PreviewFullScreen'),
hidden: true
},
{
path: '/',
component: Layout,
redirect: '/dashboard',
children: [{
path: 'dashboard',
name: 'Dashboard',
component: () => import('@/views/dashboard/index'),
meta: { title: '首页', icon: 'dashboard' }
}]
redirect: '/panel',
hidden: true
}
// {
// path: '/',
// component: Layout,
// redirect: '/panel',
// children: [{
// path: 'index',
// name: '仪表板',
// component: () => import('@/views/panel/index'),
// meta: { title: '仪表板', icon: 'dashboard' }
// }]
// }
// {
// path: '/example',
// component: Layout,

View File

@ -13,7 +13,7 @@
<ul class="list-unstyled">
<li>或者你可以去:</li>
<li class="link-type">
<router-link to="/dashboard">
<router-link to="/panel">
回首页
</router-link>
</li>
@ -39,7 +39,7 @@ export default {
methods: {
back() {
if (this.$route.query.noGoBack) {
this.$router.push({ path: '/dashboard' })
this.$router.push({ path: '/panel' })
} else {
this.$router.go(-1)
}

View File

@ -1,7 +1,11 @@
export const DEFAULT_COLOR_CASE = {
value: 'default',
colors: ['#5470c6', '#91cc75', '#fac858', '#ee6666', '#73c0de', '#3ba272', '#fc8452', '#9a60b4', '#ea7ccc'],
alpha: 100
alpha: 100,
tableHeaderBgColor: '#4e81bb',
tableItemBgColor: '#c6d9f0',
tableFontColor: '#000000',
tableStripe: true
}
export const DEFAULT_SIZE = {
barDefault: true,
@ -18,7 +22,9 @@ export const DEFAULT_SIZE = {
pieRoseType: 'radius',
pieRoseRadius: 5,
funnelWidth: 80,
radarShape: 'polygon'
radarShape: 'polygon',
tableTitleFontSize: 12,
tableItemFontSize: 12
}
export const DEFAULT_LABEL = {
show: false,

View File

@ -21,10 +21,13 @@ export default {
type: Object,
required: true
},
filter: {
type: Object,
required: false
}
filter: {
type: Object,
required: false,
default: function() {
return {}
}
}
},
data() {
return {

View File

@ -26,7 +26,7 @@
<el-radio-button label="right">{{ $t('chart.text_pos_right') }}</el-radio-button>
</el-radio-group>
</el-form-item>
<el-form-item :label="$t('chart.text_v_position')" class="form-item">
<el-form-item v-if="chart.type && !chart.type.includes('table')" :label="$t('chart.text_v_position')" class="form-item">
<el-radio-group v-model="titleForm.vPosition" size="mini" @change="changeTitleStyle">
<el-radio-button label="top">{{ $t('chart.text_pos_top') }}</el-radio-button>
<el-radio-button label="center">{{ $t('chart.text_pos_center') }}</el-radio-button>

View File

@ -3,7 +3,9 @@
<el-dropdown trigger="click" size="mini" @command="clickItem">
<span class="el-dropdown-link">
<el-tag size="small" class="item-axis">
{{ item.name }}<span v-if="item.summary" class="summary-span">{{ $t('chart.'+item.summary) }}</span><i class="el-icon-arrow-down el-icon--right" />
<span>{{ item.name }}</span>
<span v-if="item.summary" class="summary-span">{{ $t('chart.'+item.summary) }}</span>
<i class="el-icon-arrow-down el-icon--right" />
</el-tag>
<el-dropdown-menu slot="dropdown">
<el-dropdown-item>
@ -17,17 +19,17 @@
<i class="el-icon-arrow-right el-icon--right" />
</span>
<el-dropdown-menu slot="dropdown">
<el-dropdown-item :command="beforeSummary('sum')">{{ $t('chart.sum') }}</el-dropdown-item>
<el-dropdown-item :command="beforeSummary('count')">{{ $t('chart.count') }}</el-dropdown-item>
<el-dropdown-item :command="beforeSummary('avg')">{{ $t('chart.avg') }}</el-dropdown-item>
<el-dropdown-item :command="beforeSummary('max')">{{ $t('chart.max') }}</el-dropdown-item>
<el-dropdown-item :command="beforeSummary('min')">{{ $t('chart.min') }}</el-dropdown-item>
<el-dropdown-item :command="beforeSummary('std')">{{ $t('chart.std') }}</el-dropdown-item>
<el-dropdown-item :command="beforeSummary('var_samp')">{{ $t('chart.var_samp') }}</el-dropdown-item>
<el-dropdown-item v-if="item.id === 'count'" :command="beforeSummary('count')">{{ $t('chart.count') }}</el-dropdown-item>
<el-dropdown-item v-if="item.id !== 'count'" :command="beforeSummary('sum')">{{ $t('chart.sum') }}</el-dropdown-item>
<el-dropdown-item v-if="item.id !== 'count'" :command="beforeSummary('avg')">{{ $t('chart.avg') }}</el-dropdown-item>
<el-dropdown-item v-if="item.id !== 'count'" :command="beforeSummary('max')">{{ $t('chart.max') }}</el-dropdown-item>
<el-dropdown-item v-if="item.id !== 'count'" :command="beforeSummary('min')">{{ $t('chart.min') }}</el-dropdown-item>
<el-dropdown-item v-if="item.id !== 'count'" :command="beforeSummary('stddev_pop')">{{ $t('chart.stddev_pop') }}</el-dropdown-item>
<el-dropdown-item v-if="item.id !== 'count'" :command="beforeSummary('var_pop')">{{ $t('chart.var_pop') }}</el-dropdown-item>
</el-dropdown-menu>
</el-dropdown>
</el-dropdown-item>
<el-dropdown-item>
<el-dropdown-item v-if="item.id !== 'count'">
<el-dropdown placement="right-start" size="mini" style="width: 100%" @command="quickCalc">
<span class="el-dropdown-link inner-dropdown-menu">
<span>

View File

@ -8,8 +8,8 @@
>
<el-col>
<el-form ref="colorForm" :model="colorForm" label-width="80px" size="mini">
<el-form-item :label="$t('chart.color_case')" class="form-item">
<el-select v-model="colorForm.colorCase" :placeholder="$t('chart.pls_slc_color_case')" size="mini" @change="changeColorCase">
<el-form-item v-if="chart.type && !chart.type.includes('table')" :label="$t('chart.color_case')" class="form-item">
<el-select v-model="colorForm.value" :placeholder="$t('chart.pls_slc_color_case')" size="mini" @change="changeColorCase">
<el-option v-for="option in colorCases" :key="option.value" :label="option.name" :value="option.value" style="display: flex;align-items: center;">
<div style="float: left">
<span v-for="(c,index) in option.colors" :key="index" :style="{width: '20px',height: '20px',float: 'left',backgroundColor: c}" />
@ -18,6 +18,20 @@
</el-option>
</el-select>
</el-form-item>
<el-form-item v-if="chart.type && chart.type.includes('table')" :label="$t('chart.table_header_bg')" class="form-item">
<colorPicker v-model="colorForm.tableHeaderBgColor" style="margin-top: 6px;cursor: pointer;z-index: 1004;border: solid 1px black" @change="changeColorCase" />
</el-form-item>
<el-form-item v-if="chart.type && chart.type.includes('table')" :label="$t('chart.table_item_bg')" class="form-item">
<colorPicker v-model="colorForm.tableItemBgColor" style="margin-top: 6px;cursor: pointer;z-index: 1003;border: solid 1px black" @change="changeColorCase" />
</el-form-item>
<el-form-item v-if="chart.type && chart.type.includes('table')" :label="$t('chart.table_item_font_color')" class="form-item">
<colorPicker v-model="colorForm.tableFontColor" style="margin-top: 6px;cursor: pointer;z-index: 1002;border: solid 1px black" @change="changeColorCase" />
</el-form-item>
<el-form-item v-if="chart.type && chart.type.includes('table')" :label="$t('chart.stripe')" class="form-item">
<el-checkbox v-model="colorForm.tableStripe" @change="changeColorCase">{{ $t('chart.stripe') }}</el-checkbox>
</el-form-item>
<el-form-item :label="$t('chart.not_alpha')" class="form-item form-item-slider">
<el-slider v-model="colorForm.alpha" show-input :show-input-controls="false" input-size="mini" @change="changeColorCase" />
</el-form-item>
@ -31,6 +45,8 @@
</template>
<script>
import { DEFAULT_COLOR_CASE } from '../../chart/chart'
export default {
name: 'ColorSelector',
props: {
@ -88,10 +104,7 @@ export default {
colors: ['#05f8d6', '#0082fc', '#fdd845', '#22ed7c', '#09b0d3', '#1d27c9', '#f9e264', '#f47a75', '#009db2']
}
],
colorForm: {
colorCase: 'default',
alpha: 100
}
colorForm: JSON.parse(JSON.stringify(DEFAULT_COLOR_CASE))
}
},
watch: {
@ -101,8 +114,7 @@ export default {
if (chart.customAttr) {
const customAttr = JSON.parse(chart.customAttr)
if (customAttr.color) {
this.colorForm.colorCase = customAttr.color.value
this.colorForm.alpha = customAttr.color.alpha
this.colorForm = customAttr.color
}
}
}
@ -114,13 +126,12 @@ export default {
changeColorCase() {
const that = this
const items = this.colorCases.filter(ele => {
return ele.value === that.colorForm.colorCase
})
this.$emit('onColorChange', {
value: items[0].value,
colors: items[0].colors,
alpha: this.colorForm.alpha
return ele.value === that.colorForm.value
})
const val = JSON.parse(JSON.stringify(this.colorForm))
val.value = items[0].value
val.colors = items[0].colors
this.$emit('onColorChange', val)
}
}
}

View File

@ -85,6 +85,19 @@
</el-radio-group>
</el-form-item>
</el-form>
<el-form v-if="chart.type && chart.type.includes('table')" ref="sizeFormPie" :model="sizeForm" label-width="100px" size="mini">
<el-form-item :label="$t('chart.table_title_fontsize')" class="form-item">
<el-select v-model="sizeForm.tableTitleFontSize" :placeholder="$t('chart.table_title_fontsize')" @change="changeBarSizeCase">
<el-option v-for="option in fontSize" :key="option.value" :label="option.name" :value="option.value" />
</el-select>
</el-form-item>
<el-form-item :label="$t('chart.table_item_fontsize')" class="form-item">
<el-select v-model="sizeForm.tableItemFontSize" :placeholder="$t('chart.table_item_fontsize')" @change="changeBarSizeCase">
<el-option v-for="option in fontSize" :key="option.value" :label="option.name" :value="option.value" />
</el-select>
</el-form-item>
</el-form>
</el-col>
<el-button slot="reference" size="mini" class="shape-item">{{ $t('chart.size') }}<i class="el-icon-setting el-icon--right" /></el-button>
@ -116,7 +129,8 @@ export default {
{ name: this.$t('chart.line_symbol_diamond'), value: 'diamond' },
{ name: this.$t('chart.line_symbol_pin'), value: 'pin' },
{ name: this.$t('chart.line_symbol_arrow'), value: 'arrow' }
]
],
fontSize: []
}
},
watch: {
@ -133,8 +147,19 @@ export default {
}
},
mounted() {
this.init()
},
methods: {
init() {
const arr = []
for (let i = 10; i <= 30; i = i + 2) {
arr.push({
name: i + '',
value: i + ''
})
}
this.fontSize = arr
},
changeBarSizeCase() {
this.$emit('onSizeChange', this.sizeForm)
}

View File

@ -0,0 +1,212 @@
<template>
<div :style="bg_class">
<p v-show="title_show" ref="title" :style="title_class">{{ chart.title }}</p>
<ux-grid
ref="plxTable"
size="mini"
style="width: 100%;"
:height="height"
:checkbox-config="{highlight: true}"
:width-resize="true"
:header-row-style="table_header_class"
:row-style="getRowStyle"
class="table-class"
show-summary
:summary-method="summaryMethod"
>
<ux-table-column
v-for="field in fields"
:key="field.originName"
min-width="200px"
:field="field.originName"
:resizable="true"
sortable
:title="field.name"
>
<!-- <template slot="header">-->
<!-- <span>{{ field.name }}</span>-->
<!-- </template>-->
</ux-table-column>
</ux-grid>
</div>
</template>
<script>
import { hexColorToRGBA } from '../../chart/util'
export default {
name: 'TableNormal',
props: {
chart: {
type: Object,
required: true
},
filter: {
type: Object,
required: false,
default: function() {
return {}
}
}
},
data() {
return {
fields: [],
height: 'auto',
title_class: {
margin: '8px 0',
width: '100%',
fontSize: '18px',
color: '#303133',
textAlign: 'left',
fontStyle: 'normal'
},
bg_class: {
background: hexColorToRGBA('#ffffff', 0)
},
table_header_class: {
fontSize: '12px',
color: '#606266',
background: '#e8eaec'
},
table_item_class: {
fontSize: '12px',
color: '#606266',
background: '#ffffff'
},
table_item_class_stripe: {
fontSize: '12px',
color: '#606266',
background: '#ffffff'
},
title_show: true
}
},
watch: {
chart() {
this.init()
this.calcHeight()
}
},
mounted() {
this.init()
this.calcHeight()
},
methods: {
init() {
const that = this
let datas = []
if (this.chart.data) {
this.fields = JSON.parse(JSON.stringify(this.chart.data.fields))
datas = JSON.parse(JSON.stringify(this.chart.data.tableRow))
} else {
this.fields = []
datas = []
}
this.$refs.plxTable.reloadData(datas)
this.initStyle()
window.onresize = function() {
that.calcHeight()
}
},
calcHeight() {
const that = this
setTimeout(function() {
const currentHeight = document.documentElement.clientHeight
const tableMaxHeight = currentHeight - 56 - 40 - 84 - that.$refs.title.offsetHeight - 8 * 2 - 20
let tableHeight
if (that.chart.data) {
tableHeight = (that.chart.data.tableRow.length + 2) * 36
} else {
tableHeight = 0
}
if (tableHeight > tableMaxHeight) {
that.height = tableMaxHeight + 'px'
} else {
that.height = 'auto'
}
}, 10)
},
initStyle() {
if (this.chart.customAttr) {
const customAttr = JSON.parse(this.chart.customAttr)
if (customAttr.color) {
this.table_header_class.color = customAttr.color.tableFontColor
this.table_header_class.background = hexColorToRGBA(customAttr.color.tableHeaderBgColor, customAttr.color.alpha)
this.table_item_class.color = customAttr.color.tableFontColor
this.table_item_class.background = hexColorToRGBA(customAttr.color.tableItemBgColor, customAttr.color.alpha)
}
if (customAttr.size) {
this.table_header_class.fontSize = customAttr.size.tableTitleFontSize + 'px'
this.table_item_class.fontSize = customAttr.size.tableItemFontSize + 'px'
}
this.table_item_class_stripe = JSON.parse(JSON.stringify(this.table_item_class))
if (customAttr.color.tableStripe) {
this.table_item_class_stripe.background = hexColorToRGBA(customAttr.color.tableItemBgColor, customAttr.color.alpha - 40)
}
}
if (this.chart.customStyle) {
const customStyle = JSON.parse(this.chart.customStyle)
if (customStyle.text) {
this.title_show = customStyle.text.show
this.title_class.fontSize = customStyle.text.fontSize + 'px'
this.title_class.color = customStyle.text.color
this.title_class.textAlign = customStyle.text.hPosition
this.title_class.fontStyle = customStyle.text.isItalic ? 'italic' : 'normal'
}
if (customStyle.background) {
this.bg_class.background = hexColorToRGBA(customStyle.background.color, customStyle.background.alpha)
}
}
// footer
const s_table = document.getElementsByClassName('elx-table--footer')[0]
// console.log(s_table)
let s = ''
for (const i in this.table_header_class) {
s += i + ':' + this.table_header_class[i] + ';'
}
s_table.setAttribute('style', s)
},
getRowStyle({ row, rowIndex }) {
if (rowIndex % 2 === 0) {
return this.table_item_class_stripe
} else {
return this.table_item_class
}
},
summaryMethod({ columns, data }) {
const means = [] //
columns.forEach((column, columnIndex) => {
if (columnIndex === 0) {
means.push('合计')
} else {
const values = data.map(item => Number(item[column.property]))
//
if (!values.every(value => isNaN(value))) {
means[columnIndex] = values.reduce((prev, curr) => {
const value = Number(curr)
if (!isNaN(value)) {
return prev + curr
} else {
return prev
}
}, 0)
means[columnIndex] = (means[columnIndex] + '').includes('.') ? means[columnIndex].toFixed(2) : means[columnIndex]
} else {
means[columnIndex] = ''
}
}
})
// ()
return [means]
}
}
}
</script>
<style scoped>
.table-class>>>.body--wrapper{
background: rgba(1,1,1,0);
}
</style>

View File

@ -516,9 +516,9 @@ export default {
},
sceneClick(data, node) {
this.$store.dispatch('chart/setViewId', null)
this.$store.dispatch('chart/setViewId', data.id)
this.$emit('switchComponent', { name: 'ChartEdit' })
// this.$store.dispatch('chart/setViewId', null)
// this.$store.dispatch('chart/setViewId', data.id)
this.$emit('switchComponent', { name: 'ChartEdit', param: { 'id': data.id }})
},
selectTable() {
@ -565,8 +565,8 @@ export default {
this.$store.dispatch('chart/setTableId', null)
this.$store.dispatch('chart/setTableId', this.table.id)
// this.$router.push('/chart/chart-edit')
this.$emit('switchComponent', { name: 'ChartEdit' })
this.$store.dispatch('chart/setViewId', response.data.id)
this.$emit('switchComponent', { name: 'ChartEdit', param: { 'id': response.data.id }})
// this.$store.dispatch('chart/setViewId', response.data.id)
this.chartTree()
})
},

View File

@ -1,6 +1,6 @@
<template>
<el-row style="height: 100%;overflow-y: hidden;width: 100%;">
<span v-show="false">{{ vId }}</span>
<!-- <span v-show="false">{{ vId }}</span>-->
<el-row style="height: 40px;background-color: white" class="padding-lr">
<el-popover
placement="right-start"
@ -62,7 +62,7 @@
<svg-icon v-if="item.deType === 0" icon-class="field_text" class="field-icon-text" />
<svg-icon v-if="item.deType === 1" icon-class="field_time" class="field-icon-time" />
<svg-icon v-if="item.deType === 2 || item.deType === 3" icon-class="field_value" class="field-icon-value" />
{{ item.name }}
<span>{{ item.name }}</span>
</span>
</transition-group>
</draggable>
@ -114,7 +114,7 @@
<el-radio value="radar" label="radar"><svg-icon icon-class="radar" class="chart-icon" /></el-radio>
</div>
<div>
<el-radio value="" label="" disabled class="disabled-none-cursor"><svg-icon icon-class="" class="chart-icon" /></el-radio>
<el-radio value="table-normal" label="table-normal"><svg-icon icon-class="table-normal" class="chart-icon" /></el-radio>
<el-radio value="" label="" disabled class="disabled-none-cursor"><svg-icon icon-class="" class="chart-icon" /></el-radio>
<el-radio value="" label="" disabled class="disabled-none-cursor"><svg-icon icon-class="" class="chart-icon" /></el-radio>
<el-radio value="" label="" disabled class="disabled-none-cursor"><svg-icon icon-class="" class="chart-icon" /></el-radio>
@ -129,12 +129,12 @@
<el-tab-pane :label="$t('chart.shape_attr')" class="padding-lr">
<color-selector class="attr-selector" :chart="chart" @onColorChange="onColorChange" />
<size-selector class="attr-selector" :chart="chart" @onSizeChange="onSizeChange" />
<label-selector class="attr-selector" :chart="chart" @onLabelChange="onLabelChange" />
<tooltip-selector class="attr-selector" :chart="chart" @onTooltipChange="onTooltipChange" />
<label-selector v-if="!view.type.includes('table')" class="attr-selector" :chart="chart" @onLabelChange="onLabelChange" />
<tooltip-selector v-if="!view.type.includes('table')" class="attr-selector" :chart="chart" @onTooltipChange="onTooltipChange" />
</el-tab-pane>
<el-tab-pane :label="$t('chart.module_style')" class="padding-lr">
<title-selector class="attr-selector" :chart="chart" @onTextChange="onTextChange" />
<legend-selector class="attr-selector" :chart="chart" @onLegendChange="onLegendChange" />
<legend-selector v-if="!view.type.includes('table')" class="attr-selector" :chart="chart" @onLegendChange="onLegendChange" />
<x-axis-selector v-if="view.type.includes('bar') || view.type.includes('line')" class="attr-selector" :chart="chart" @onChangeXAxisForm="onChangeXAxisForm" />
<y-axis-selector v-if="view.type.includes('bar') || view.type.includes('line')" class="attr-selector" :chart="chart" @onChangeYAxisForm="onChangeYAxisForm" />
<background-color-selector class="attr-selector" :chart="chart" @onChangeBackgroundForm="onChangeBackgroundForm" />
@ -195,7 +195,8 @@
</el-row>
</el-row>
<chart-component :chart-id="chart.id" :chart="chart" class="chart-class" />
<chart-component v-if="chart.type && !chart.type.includes('table')" :chart-id="chart.id" :chart="chart" class="chart-class" />
<table-normal v-if="chart.type && chart.type.includes('table')" :chart="chart" class="table-class" />
</el-row>
</el-col>
</el-row>
@ -262,10 +263,17 @@ import XAxisSelector from '../components/component-style/XAxisSelector'
import YAxisSelector from '../components/component-style/YAxisSelector'
import BackgroundColorSelector from '../components/component-style/BackgroundColorSelector'
import QuotaFilterEditor from '../components/filter/QuotaFilterEditor'
import TableNormal from '../components/table/TableNormal'
export default {
name: 'ChartEdit',
components: { DatasetChartDetail, QuotaFilterEditor, BackgroundColorSelector, FilterItem, XAxisSelector, YAxisSelector, TooltipSelector, LabelSelector, LegendSelector, TitleSelector, SizeSelector, ColorSelector, ChartComponent, QuotaItem, DimensionItem, draggable },
components: { TableNormal, DatasetChartDetail, QuotaFilterEditor, BackgroundColorSelector, FilterItem, XAxisSelector, YAxisSelector, TooltipSelector, LabelSelector, LegendSelector, TitleSelector, SizeSelector, ColorSelector, ChartComponent, QuotaItem, DimensionItem, draggable },
props: {
param: {
type: Object,
required: true
}
},
data() {
return {
table: {},
@ -311,21 +319,25 @@ export default {
}
},
computed: {
vId() {
// console.log(this.$store.state.chart.viewId);
this.getData(this.$store.state.chart.viewId)
return this.$store.state.chart.viewId
}
// vId() {
// // console.log(this.$store.state.chart.viewId);
// this.getData(this.$store.state.chart.viewId)
// return this.$store.state.chart.viewId
// }
},
watch: {
'param': function() {
console.log(this.param)
this.getData(this.param.id)
}
},
created() {
// this.get(this.$store.state.chart.viewId);
},
mounted() {
// this.get(this.$store.state.chart.viewId);
this.getData(this.$store.state.chart.viewId)
this.getData(this.param.id)
// this.myEcharts();
},
activated() {
@ -358,7 +370,11 @@ export default {
// })
view.yaxis.forEach(function(ele) {
if (!ele.summary || ele.summary === '') {
ele.summary = 'sum'
if (ele.id === 'count') {
ele.summary = 'count'
} else {
ele.summary = 'sum'
}
}
if (!ele.sort || ele.sort === '') {
ele.sort = 'none'
@ -746,6 +762,10 @@ export default {
height: calc(100% - 84px);
padding: 10px;
}
.table-class{
height: calc(100% - 104px);
margin: 10px;
}
.dialog-css>>>.el-dialog__title {
font-size: 14px;

View File

@ -49,17 +49,28 @@
</template>
<script>
import { validUsername } from '@/utils/validate'
import { encrypt } from '@/utils/rsaEncrypt'
import { validateUserName } from '@/api/user'
export default {
name: 'Login',
data() {
const validateUsername = (rule, value, callback) => {
if (!validUsername(value)) {
const userName = value.trim()
validateUserName({ userName: userName }).then(res => {
if (res.data) {
callback()
} else {
callback(new Error('Please enter the correct user name'))
}
}).catch(() => {
callback(new Error('Please enter the correct user name'))
} else {
callback()
}
})
// if (!validUsername(value)) {
// callback(new Error('Please enter the correct user name'))
// } else {
// callback()
// }
}
const validatePassword = (rule, value, callback) => {
if (value.length < 6) {

View File

@ -179,7 +179,7 @@
>
<div class="view-container-class">
<el-checkbox-group v-model="checkedViews" @change="checkedViewsChange">
<el-checkbox v-for="(item ) in viewInfos" :key="item.id" :label="item.id" border>
<el-checkbox v-for="(item ) in viewInfos" :key="item.id" :label="item.id">
<span>
<svg-icon :icon-class="item.type" class="chart-icon" />
<span style="margin-left: 6px">{{ item.name }}</span>
@ -658,6 +658,7 @@ export default {
position: relative;
>>> label {
width: 100%;
margin-left: 0px !important;
}
}

View File

@ -1,5 +1,5 @@
<template>
<de-container v-loading="$store.getters.loadingMap[$store.getters.currentPath]" style="background-color: #f7f8fa">
<de-container v-loading="$store.getters.loadingMap[$store.getters.currentPath]">
<de-main-container>
<component :is="component" :param="param" />
</de-main-container>