解决综合浏览数据表导出数据量过大时异常的问题

master
guoxin 1 year ago
parent 0079a889a2
commit 3f4861c867
  1. 5
      shandan-browser/src/main/java/com/keyware/shandan/browser/controller/SearchController.java
  2. 4
      shandan-browser/src/main/java/com/keyware/shandan/browser/service/MetadataDataService.java
  3. 9
      shandan-browser/src/main/java/com/keyware/shandan/browser/service/impl/MetadataDataServiceImpl.java
  4. 2
      shandan-system/src/main/java/com/keyware/shandan/bianmu/export/ExportProgress.java
  5. 76
      shandan-system/src/main/java/com/keyware/shandan/bianmu/export/MetaTableExport.java
  6. 1
      shandan-system/src/main/java/com/keyware/shandan/datasource/mapper/DynamicDatasourceMapper.java

@ -32,7 +32,6 @@ import java.io.File;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* 组合搜索前端控制器
@ -233,10 +232,8 @@ public class SearchController {
String userId = auth.getPrincipal().toString();
MetaTableExport export = ExportCache.getCache(userId, null);
if (export == null) {
int count = metadataDataService.count(metadata, condition);
MetaTableExport newExport = new MetaTableExport(userId, metadata);
newExport.queryData((o) -> metadataDataService.queryDataByHandler(metadata, condition, newExport));
newExport.setDataCount(count);
newExport.setQueryHandler((list) -> list.addAll(metadataDataService.list(metadata, condition)));
export = newExport;
ExportCache.startExport(export);
}

@ -6,6 +6,7 @@ import com.keyware.shandan.browser.entity.SearchConditionVo;
import org.apache.ibatis.session.ResultHandler;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
@ -19,5 +20,8 @@ public interface MetadataDataService {
int count(MetadataBasicVo metadata, SearchConditionVo condition);
List<Map<String, Object>> list(MetadataBasicVo metadata, SearchConditionVo condition);
void queryDataByHandler(MetadataBasicVo metadata, SearchConditionVo condition, ResultHandler<HashMap<String, Object>> handler);
}

@ -75,6 +75,15 @@ public class MetadataDataServiceImpl implements MetadataDataService {
return dynamicDatasourceMapper.count(sql);
}
@Override
public List<Map<String, Object>> list(MetadataBasicVo metadata, SearchConditionVo condition) {
String sql = getQuerySql(metadata, condition);
sql += condition.getOrderBySql();
List<Map<String, Object>> list = dynamicDatasourceMapper.list(sql);
list.forEach(item-> item.entrySet().forEach(this::convertBigTextField));
return list;
}
@Override
public void queryDataByHandler(MetadataBasicVo metadata, SearchConditionVo condition, ResultHandler<HashMap<String, Object>> handler) {
String sql = getQuerySql(metadata, condition);

@ -60,7 +60,7 @@ public abstract class ExportProgress implements Runnable, Serializable {
* @return
*/
public boolean getIsDone() {
return currentStep == getStepTotal();
return currentStep >= getStepTotal() && this.currentStep > 0 && this.getStepTotal() > 0;
}
protected void setDone() {

@ -3,37 +3,32 @@ package com.keyware.shandan.bianmu.export;
import cn.hutool.core.collection.CollUtil;
import cn.hutool.poi.excel.BigExcelWriter;
import cn.hutool.poi.excel.ExcelUtil;
import cn.hutool.poi.excel.ExcelWriter;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.keyware.shandan.bianmu.entity.MetadataBasicVo;
import com.keyware.shandan.bianmu.service.MetadataService;
import com.keyware.shandan.common.util.StreamUtil;
import com.keyware.shandan.common.util.StringUtils;
import com.keyware.shandan.frame.config.component.AppContext;
import org.apache.ibatis.session.ResultContext;
import org.apache.ibatis.session.ResultHandler;
import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Consumer;
import java.util.stream.Collectors;
public class MetaTableExport extends ExportProgress implements ResultHandler<HashMap<String, Object>> {
public class MetaTableExport extends ExportProgress {
private final MetadataBasicVo metadata;
private Consumer<Object> queryFunc;
private final String fileName;
private JSONArray colsArray;
private final MetadataService metadataService;
// 所有行的集合
private List<List<String>> rowList = new ArrayList<>();
private final List<List<String>> rowList = new ArrayList<>();
// 字段列名集合
private List<String> colNameList = new ArrayList<>();
private int dataTotal;
private final List<String> colNameList = new ArrayList<>();
private final List<Map<String, Object>> datas = new ArrayList<>();
private Consumer<List<Map<String, Object>>> queryHandler;
private boolean isDone = false;
public MetaTableExport(String userId, MetadataBasicVo metadata) {
super(userId);
@ -49,7 +44,7 @@ public class MetaTableExport extends ExportProgress implements ResultHandler<Has
@Override
public void run() {
this.setTitle("正在读取数据表信息");
colsArray = metadataService.getColumns(metadata.getId());
JSONArray colsArray = metadataService.getColumns(metadata.getId());
// 添加第一行,字段列名注释集合,当做表格的第一行
rowList.add(colsArray.stream().map((json) -> {
@ -60,26 +55,37 @@ public class MetaTableExport extends ExportProgress implements ResultHandler<Has
return colName + (StringUtils.hasText(comment) ? "[" + comment + "]" : "");
}).collect(Collectors.toList()));
this.setTitle("正在读取数据到临时文件");
// 执行数据查询
try {
queryFunc.accept(null);
this.setTitle("正在查询数据");
this.autoAddStep();
this.queryHandler.accept(this.datas);
handleResult();
File file = new File(fileName);
if(file.exists()){
if (file.exists()) {
file.delete();
}
List<List<String>> rows = CollUtil.newArrayList(rowList);
//通过工具类创建writer
ExcelWriter writer = ExcelUtil.getWriter(file);
BigExcelWriter writer = ExcelUtil.getBigWriter(file);
this.setTitle("正在写入缓存");
this.autoAddStep();
//一次性写出内容
writer.write(rows);
//关闭writer,释放内存
this.setTitle("正在写入文件");
this.autoAddStep();
//写入文件后关闭writer,释放内存
writer.close();
this.autoAddStep();
this.setTitle("数据文件准备完毕,开始下载");
this.setDone();
isDone = true;
} catch (Exception e) {
e.printStackTrace();
this.error = true;
this.setTitle("准备数据时出现异常");
this.clean();
@ -88,7 +94,7 @@ public class MetaTableExport extends ExportProgress implements ResultHandler<Has
@Override
public long getStepTotal() {
return this.dataTotal;
return 4;
}
@Override
@ -96,24 +102,24 @@ public class MetaTableExport extends ExportProgress implements ResultHandler<Has
return this.fileName;
}
@Override
public void handleResult(ResultContext<? extends HashMap<String, Object>> context) {
Map<String, Object> data = context.getResultObject();
// 遍历字段列名,获取对应数据并拼接成列的集合
List<String> cells = colNameList.stream().map(col -> {
Object value = data.get(col);
value = value == null ? "" : value;
return String.valueOf(value);
}).collect(Collectors.toList());
rowList.add(cells);
this.autoAddStep();
public void handleResult() {
for (Map<String, Object> data : this.datas) {
// 遍历字段列名,获取对应数据并拼接成列的集合
List<String> cells = colNameList.stream().map(col -> {
Object value = data.get(col);
value = value == null ? "" : value;
return String.valueOf(value);
}).collect(Collectors.toList());
rowList.add(cells);
}
}
public void queryData(Consumer<Object> query) {
this.queryFunc = query;
public void setQueryHandler(Consumer<List<Map<String, Object>>> handler) {
this.queryHandler = handler;
}
public void setDataCount(int count) {
this.dataTotal = count;
@Override
public boolean getIsDone() {
return isDone;
}
}

@ -25,6 +25,7 @@ import java.util.Map;
public interface DynamicDatasourceMapper {
@Select("${sql}")
@ResultType(HashMap.class)
List<Map<String, Object>> list(String sqlStr);
@Select("${sql}")