实现文件上传的功能。

This commit is contained in:
chenxudong 2024-11-22 17:17:15 +08:00
parent 8619126d16
commit 516e81ba91
7 changed files with 298 additions and 45 deletions

View File

@ -2,6 +2,7 @@ package com.electromagnetic.industry.software.data.manage.controller;
import com.electromagnetic.industry.software.data.manage.facade.EDDataFacade;
import com.electromagnetic.industry.software.data.manage.request.indicator.EDDataRequest;
import com.electromagnetic.industry.software.data.manage.request.indicator.FileChunkDTO;
import electromagnetic.data.framework.share.model.ElectromagneticResult;
import electromagnetic.data.framework.share.model.ElectromagneticResultUtil;
import io.swagger.annotations.ApiOperation;
@ -9,8 +10,8 @@ import org.springframework.web.bind.annotation.*;
import org.springframework.web.multipart.MultipartFile;
import javax.annotation.Resource;
import javax.servlet.http.HttpServletRequest;
import java.util.Date;
import java.util.List;
@RequestMapping("/data/ed/file")
@RestController
@ -73,5 +74,30 @@ public class EDDataController {
return ElectromagneticResultUtil.success("复制");
}
@ApiOperation(value = "获取已经上传的分片",notes = "")
@RequestMapping(value = "/getUploadedChunkNums",method = RequestMethod.GET)
public ElectromagneticResult<?> getUploadedChunkNums(@RequestParam String identifier) {
return edDataFacade.getUploadedChunkNums(identifier);
}
@ApiOperation(value = "合并分片",notes = "")
@RequestMapping(value = "/mergeChunks",method = RequestMethod.GET)
public ElectromagneticResult<?> mergeChunks(@RequestParam String identifier,
@RequestParam String fileName,
@RequestParam Integer totalChunks) {
return edDataFacade.mergeChunks(identifier, fileName, totalChunks);
}
@ApiOperation(value = "分片上传",notes = "")
@RequestMapping(value = "/batchImport",method = RequestMethod.POST)
public ElectromagneticResult<?> batchImport(FileChunkDTO fileChunkDTO) {
return edDataFacade.batchImport(fileChunkDTO);
}
@ApiOperation(value = "检查分片是否存在",notes = "")
@RequestMapping(value = "/batchImport",method = RequestMethod.GET)
public ElectromagneticResult<?> checkChunkExist(FileChunkDTO fileChunkDTO) {
return edDataFacade.checkChunkExist(fileChunkDTO);
}
}

View File

@ -15,15 +15,17 @@ spring.datasource.typd=com.alibaba.druid.pool.DruidDataSource
#spring.datasource.url=jdbc:mysql://${DATASOURCE_URL:obproxy-0c63.ops.cloud.cic.inter}:3306/${DATASOURCE_HOST:dataplatform_ptst}?serverTimezone=UTC&useUnicode=true&characterEncoding=utf-8&useSSL=true&serverTimezone=GMT%2B8&allowMultiQueries=true&rewriteBatchedStatements=true
#spring.datasource.username=${DATASOURCE_NAME:dataplatform@ProjectTest_tenant5#testdb}
#spring.datasource.password=${DATASOURCE_PASSWORD:Y7G6v4_3ijn77K_Kbl_U85}
spring.datasource.url=jdbc:mysql://139.196.179.195:3306/em_data?serverTimezone=UTC&useUnicode=true&characterEncoding=utf-8&useSSL=true&serverTimezone=GMT%2B8&allowMultiQueries=true&rewriteBatchedStatements=true
spring.datasource.url=jdbc:mysql://139.224.43.89:3306/em_data?serverTimezone=UTC&useUnicode=true&characterEncoding=utf-8&useSSL=true&serverTimezone=GMT%2B8&allowMultiQueries=true&rewriteBatchedStatements=true
spring.datasource.username=comac
spring.datasource.password=2024*Comac
spring.servlet.multipart.max-file-size=500MB
spring.servlet.multipart.max-request-size=10MB
spring.datasource.driver-class-name=com.mysql.jdbc.Driver
spring.datasource.type=com.alibaba.druid.pool.DruidDataSource
mybatis.mapper-locations=classpath:sqlmapper/*.xml
pagehelper.helperDialect=mysql
pagehelper.reasonable=false
server.port=8888
server.port=12352
upload.file.dest.path=G:/data/

View File

@ -1,6 +1,8 @@
package com.electromagnetic.industry.software.data.manage.facade;
import com.electromagnetic.industry.software.data.manage.request.indicator.EDDataRequest;
import com.electromagnetic.industry.software.data.manage.request.indicator.FileChunkDTO;
import com.electromagnetic.industry.software.data.manage.request.indicator.FileChunkResultDTO;
import com.electromagnetic.industry.software.data.manage.response.indicator.EDDataPageResponse;
import electromagnetic.data.framework.share.model.ElectromagneticResult;
@ -61,9 +63,34 @@ public interface EDDataFacade {
/**
* 导入
* @param request
* @param fileChunkDTO
* @return
*/
ElectromagneticResult<String> batchImport(EDDataRequest request);
ElectromagneticResult<?> batchImport(FileChunkDTO fileChunkDTO);
/**
* 获取已经上传的分片
* @param identifier
* @return
*/
ElectromagneticResult<List<Integer>> getUploadedChunkNums(String identifier);
/**
* 合并分片
* @param identifier
* @param fileName
* @param totalChunks
* @return
*/
ElectromagneticResult<?> mergeChunks(String identifier, String fileName, Integer totalChunks);
/**
* 检查分片是否存在
* @param fileChunkDTO
* @return
*/
ElectromagneticResult<FileChunkResultDTO> checkChunkExist(FileChunkDTO fileChunkDTO);
}

View File

@ -0,0 +1,18 @@
package com.electromagnetic.industry.software.data.manage.request.indicator;
import lombok.Data;
import org.springframework.web.multipart.MultipartFile;
@Data
public class FileChunkDTO {
private String identifier;
private long totalSize;
private MultipartFile file;
private String fileName;
private Integer chunkNumber;
private Long chunkSize;
private Long currentChunkSize;
private Integer totalChunks;
}

View File

@ -0,0 +1,15 @@
package com.electromagnetic.industry.software.data.manage.request.indicator;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.Set;
@Data
@AllArgsConstructor
@NoArgsConstructor
public class FileChunkResultDTO {
private Boolean skipUpload;
private Set<Integer> uploaded;
}

View File

@ -1,18 +1,21 @@
package com.electromagnetic.industry.software.data.manage.service.aop;
import com.alibaba.fastjson.JSON;
import electromagnetic.data.framework.share.exception.LoggerConstant;
import electromagnetic.data.framework.share.log.LogUtils;
import electromagnetic.data.framework.share.log.ServiceResultHandleUtil;
import lombok.extern.slf4j.Slf4j;
import org.apache.catalina.connector.ResponseFacade;
import org.aspectj.lang.ProceedingJoinPoint;
import org.aspectj.lang.Signature;
import org.aspectj.lang.annotation.Around;
import org.aspectj.lang.annotation.Aspect;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Component;
import org.springframework.util.StopWatch;
import org.springframework.web.bind.annotation.ExceptionHandler;
import org.springframework.web.multipart.MultipartFile;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import java.util.ArrayList;
import java.util.List;
/**
* service 通用日志切面
*
@ -21,33 +24,56 @@ import org.springframework.stereotype.Component;
*/
@Aspect
@Component
@Slf4j
public class ServiceAspect {
private static final Logger LOGGER = LoggerFactory.getLogger(LoggerConstant.DOMAIN_SERVICE);
/**
* 日志切面
*/
@Around("execution(* com.electromagnetic.industry.software.data.manage.facade..*.*(..)))")
public Object logAspect(ProceedingJoinPoint pjd) {
long startTime = System.currentTimeMillis();
Object[] args = pjd.getArgs();
Object arg = args.length < 1 ? "" : args[0];
Signature signature = pjd.getSignature();
String declaringTypeName = signature.getDeclaringTypeName();
String logPattern = declaringTypeName.substring(declaringTypeName.lastIndexOf(".") + 1) + "@" + signature.getName() + " request={0}, response={1}, cost={2}ms";
Object proceed = null;
public Object process(ProceedingJoinPoint jp) throws Throwable {
String methodInfo = jp.getTarget().getClass().getSimpleName() + "."
+ jp.getSignature().getName();
Object[] args = jp.getArgs();
String paramInfo = "";
if (args != null && args.length > 0) {
if (args[0] != null && args[0].getClass() != ResponseFacade.class) {
try {
proceed = pjd.proceed(args);
} catch (Throwable t) {
LogUtils.error(t,LOGGER, "Executing " + logPattern+",Throwable:"+t,
arg == null ? "" : JSON.toJSONString(arg), JSON.toJSONString(proceed), System.currentTimeMillis() - startTime);
return ServiceResultHandleUtil.serviceError(t);
List<Object> list = new ArrayList<>();
for (Object obj : jp.getArgs()) {
if (obj instanceof ServletResponse) {
log.info("参数中有response");
} else if (obj instanceof ServletRequest) {
log.info("参数中有request");
} else if (obj instanceof MultipartFile) {
//文件不输出
MultipartFile obj1 = (MultipartFile) obj;
log.info("参数中文件;文件名:{},文件大小:{}", obj1.getName(), obj1.getSize());
break;
} else {
list.add(obj);
}
}
// 正常返回日志
LogUtils.info(LOGGER, "End " + logPattern,
arg == null ? "" : JSON.toJSONString(arg), JSON.toJSONString(proceed), System.currentTimeMillis() - startTime);
return proceed;
paramInfo = JSON.toJSONString(list);
} catch (Exception e) {
log.warn("切面异常", e.getMessage());
}
}
}
log.info("请求接口开始:{},参数:{}", methodInfo, paramInfo);
long startTime = System.currentTimeMillis();
StopWatch stopwatch = new StopWatch();
stopwatch.start("接口:" + methodInfo);
Object rvt = jp.proceed();
if (rvt instanceof ResponseEntity) {
return rvt;
}
String returnInfo = JSON.toJSONString(rvt);
log.info("请求接口结束:{},返回参数:{},接口耗时:{}", methodInfo, returnInfo, (System.currentTimeMillis() - startTime) + "毫秒");
stopwatch.stop();
log.debug(stopwatch.prettyPrint());
return rvt;
}
}

View File

@ -1,5 +1,9 @@
package com.electromagnetic.industry.software.data.manage.service.facade;
import cn.hutool.core.io.FileUtil;
import cn.hutool.core.io.IoUtil;
import cn.hutool.core.io.file.FileMode;
import cn.hutool.core.util.ZipUtil;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.electromagnetic.industry.software.data.manage.domain.boardservice.category.model.Category;
@ -10,29 +14,41 @@ import com.electromagnetic.industry.software.data.manage.domain.boardservice.ind
import com.electromagnetic.industry.software.data.manage.domain.boardservice.user.service.CategoryService;
import com.electromagnetic.industry.software.data.manage.facade.EDDataFacade;
import com.electromagnetic.industry.software.data.manage.request.indicator.EDDataRequest;
import com.electromagnetic.industry.software.data.manage.request.indicator.FileChunkDTO;
import com.electromagnetic.industry.software.data.manage.request.indicator.FileChunkResultDTO;
import com.electromagnetic.industry.software.data.manage.response.indicator.EDDataPageResponse;
import com.electromagnetic.industry.software.data.manage.service.mappers.EDDataMappers;
import electromagnetic.data.framework.share.id.IdWorker;
import electromagnetic.data.framework.share.model.ElectromagneticResult;
import electromagnetic.data.framework.share.model.ElectromagneticResultUtil;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.http.HttpStatus;
import org.springframework.stereotype.Service;
import org.springframework.web.multipart.MultipartFile;
import javax.annotation.Resource;
import java.io.File;
import java.io.*;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.*;
import java.util.stream.Collectors;
@Service
@Slf4j
public class EDDataFacadeImpl implements EDDataFacade {
@Resource
private EDDataService edDataService;
private CategoryService categoryService;
@Value("${upload.file.dest.path}")
private String uploadFilePath;
private static final String UPLOAD_FILE_CHUNK_SUFFIX = ".part";
/**
* 创建文件夹
* WISDOM_CREATE_FOLDER
@ -431,19 +447,142 @@ public class EDDataFacadeImpl implements EDDataFacade {
/**
* 导入
* @param request
* @param fileChunkDTO
* @return
*/
@Override
public ElectromagneticResult<String> batchImport(EDDataRequest request) {
//1上传文件到指定目录并重命名
//2接下压缩包
//3扫码解压文件夹的所有文件
//4循环处理文件读取文件名称根据名称规则进行数据库查询匹配层级树文件夹和文件是否跳过
//5如果不存在则新建文件记录
//6并移动文件到上传目录
//7处理完成返回成功
return null;
public ElectromagneticResult<?> batchImport(FileChunkDTO fileChunkDTO) {
String identifier = fileChunkDTO.getIdentifier();
String fileName = fileChunkDTO.getFileName();
// 首先检查文件是否存在如果存在则不允许重复上传
String destZipPath = uploadFilePath + identifier + File.separator + fileName;
boolean existFile = FileUtil.exist(new File(destZipPath));
if (existFile) {
return ElectromagneticResultUtil.fail("-1", "文件已经存在,请勿重复上传。");
}
// 检查该分片有没被上传过
String destChunkPath = uploadFilePath + identifier + File.separator + fileChunkDTO.getChunkNumber() + UPLOAD_FILE_CHUNK_SUFFIX;
boolean existChunk = FileUtil.exist(new File(destChunkPath));
if (existChunk) {
return ElectromagneticResultUtil.success(true);
}
File dir = new File(uploadFilePath + identifier + File.separator);
if (!dir.exists()) {
dir.mkdir();
}
try (
InputStream inputStream = fileChunkDTO.getFile().getInputStream();
FileOutputStream fileOutputStream = new FileOutputStream(destChunkPath);
) {
IoUtil.copy(inputStream, fileOutputStream);
} catch (IOException ioException) {
log.error("上传文件错误...", ioException);
}
return ElectromagneticResultUtil.success(fileChunkDTO.getIdentifier());
}
@Override
public ElectromagneticResult<List<Integer>> getUploadedChunkNums(String identifier) {
return ElectromagneticResultUtil.success(getUploadedChunks(identifier));
}
private List<Integer> getUploadedChunks(String identifier) {
String destPath = uploadFilePath + identifier;
if (!FileUtil.exist(new File(destPath))) {
return new ArrayList<>();
}
List<Integer> uploadedFileChunkNums = FileUtil.listFileNames(destPath)
.stream()
.filter(e -> !e.endsWith(".zip"))
.map(e -> e.replace(UPLOAD_FILE_CHUNK_SUFFIX, ""))
.map(Integer::parseInt)
.collect(Collectors.toList());
return uploadedFileChunkNums;
}
// TODO 需要验证如果一个分片上传一半网络断开则该分片的存储情况
@Override
public ElectromagneticResult<?> mergeChunks(String identifier, String fileName, Integer totalChunks) {
// 检查所有分片是否已经上传完成分片编号从1开始
for (int i = 1; i <= totalChunks; i++) {
String tmpPath = uploadFilePath + identifier + File.separator + i + UPLOAD_FILE_CHUNK_SUFFIX;
if (!FileUtil.exist(new File(tmpPath))) {
log.error("第{}个分片没有上传完成,请上传完成后再合并。", i);
ElectromagneticResultUtil.fail("-1", "文件尚未上传完成。");
}
}
// 合并分片
String destZipPath = uploadFilePath + identifier + File.separator + fileName;
File mergedFile = new File(destZipPath);
try {
RandomAccessFile targetFile = new RandomAccessFile(mergedFile, "rw");
byte[] buffer = new byte[1024];
for (int i = 1; i <= totalChunks; i++) {
String tmpPath = uploadFilePath + identifier + File.separator + i + UPLOAD_FILE_CHUNK_SUFFIX;
RandomAccessFile tmp = new RandomAccessFile(new File(tmpPath), "r");
int len;
while ((len = tmp.read(buffer)) != -1) {
targetFile.write(buffer, 0, len);
}
tmp.close();
}
targetFile.close();
} catch (IOException ioException) {
ElectromagneticResultUtil.fail("-1", "文件合并失败");
}
// 删除分片
for (int i = 1; i <= totalChunks; i++) {
String tmpPath = uploadFilePath + identifier + File.separator + i + UPLOAD_FILE_CHUNK_SUFFIX;
FileUtil.del(tmpPath);
}
// 检验文件的MD5值
// 解压文件
ZipUtil.unzip(destZipPath, uploadFilePath + identifier + File.separator);
// 文件信息存入数据库
return ElectromagneticResultUtil.success(true);
}
@Override
public ElectromagneticResult<FileChunkResultDTO> checkChunkExist(FileChunkDTO fileChunkDTO) {
FileChunkResultDTO res = new FileChunkResultDTO();
// 首先判断zip文件是否存在如果不存在则表示还没有上传完成
String identifier = fileChunkDTO.getIdentifier();
String fileName = fileChunkDTO.getFileName();
String destZipPath = uploadFilePath + identifier + File.separator + fileName;
boolean existFile = FileUtil.exist(new File(destZipPath));
if (existFile) {
return ElectromagneticResultUtil.success(new FileChunkResultDTO(false, new HashSet<>()));
}
List<Integer> uploadedChunks = getUploadedChunks(identifier);
return ElectromagneticResultUtil.success(new FileChunkResultDTO(true, new HashSet<>(uploadedChunks)));
//
// String destChunkPath = uploadFilePath + identifier + File.separator + fileChunkDTO.getChunkNumber() + UPLOAD_FILE_CHUNK_SUFFIX;
// boolean existChunk = FileUtil.exist(new File(destChunkPath));
// List<Integer> uploadedChunks = getUploadedChunks(identifier);
//
//
//
//
// FileChunkResultDTO res = new FileChunkResultDTO();
// res.setSkipUpload(existChunk);
// res.setUploaded(new HashSet<>(uploadedChunks));
// return ElectromagneticResultUtil.success(res);
}