diff --git a/backend/services/data-management-service/src/main/java/com/dataengine/datamanagement/application/service/DatasetFileApplicationService.java b/backend/services/data-management-service/src/main/java/com/dataengine/datamanagement/application/service/DatasetFileApplicationService.java index e8141e1a..132e9f7d 100644 --- a/backend/services/data-management-service/src/main/java/com/dataengine/datamanagement/application/service/DatasetFileApplicationService.java +++ b/backend/services/data-management-service/src/main/java/com/dataengine/datamanagement/application/service/DatasetFileApplicationService.java @@ -16,6 +16,7 @@ import com.dataengine.datamanagement.interfaces.dto.UploadFilesPreRequest; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; +import lombok.extern.slf4j.Slf4j; import org.apache.ibatis.session.RowBounds; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; @@ -43,6 +44,7 @@ /** * 数据集文件应用服务(UUID 模式) */ +@Slf4j @Service @Transactional public class DatasetFileApplicationService { @@ -58,7 +60,7 @@ public class DatasetFileApplicationService { @Autowired public DatasetFileApplicationService(DatasetFileMapper datasetFileMapper, DatasetMapper datasetMapper, FileService fileService, - @Value("${app.file.upload-dir:./uploads}") String uploadDir) { + @Value("${app.file.upload-dir:./dataset}") String uploadDir) { this.datasetFileMapper = datasetFileMapper; this.datasetMapper = datasetMapper; this.fileStorageLocation = Paths.get(uploadDir).toAbsolutePath().normalize(); @@ -80,10 +82,12 @@ public DatasetFile uploadFile(String datasetId, MultipartFile file, String descr } String originalFilename = file.getOriginalFilename(); - String fileName = System.currentTimeMillis() + "_" + (originalFilename != null ? originalFilename : "file"); + String fileName = originalFilename != null ? originalFilename : "file"; try { // 保存文件到磁盘 - Path targetLocation = this.fileStorageLocation.resolve(fileName); + Path targetLocation = this.fileStorageLocation.resolve(datasetId + File.separator + fileName); + // 确保目标目录存在 + Files.createDirectories(targetLocation); Files.copy(file.getInputStream(), targetLocation, StandardCopyOption.REPLACE_EXISTING); // 创建文件实体(UUID 主键) @@ -108,6 +112,7 @@ public DatasetFile uploadFile(String datasetId, MultipartFile file, String descr return datasetFileMapper.findByDatasetIdAndFileName(datasetId, fileName); } catch (IOException ex) { + log.error("Could not store file {}", fileName, ex); throw new RuntimeException("Could not store file " + fileName, ex); } } diff --git a/backend/services/data-management-service/src/main/java/com/dataengine/datamanagement/interfaces/converter/DatasetConverter.java b/backend/services/data-management-service/src/main/java/com/dataengine/datamanagement/interfaces/converter/DatasetConverter.java index baf3b9fe..d03b3ed6 100644 --- a/backend/services/data-management-service/src/main/java/com/dataengine/datamanagement/interfaces/converter/DatasetConverter.java +++ b/backend/services/data-management-service/src/main/java/com/dataengine/datamanagement/interfaces/converter/DatasetConverter.java @@ -24,6 +24,8 @@ public interface DatasetConverter { /** * 将数据集转换为响应 */ + @Mapping(source = "sizeBytes", target = "totalSize") + @Mapping(source = "path", target = "targetLocation") DatasetResponse convertToResponse(Dataset dataset); /** diff --git a/backend/services/data-management-service/src/main/java/com/dataengine/datamanagement/interfaces/rest/DatasetFileController.java b/backend/services/data-management-service/src/main/java/com/dataengine/datamanagement/interfaces/rest/DatasetFileController.java index 8ef7b8e9..1eae6db6 100644 --- a/backend/services/data-management-service/src/main/java/com/dataengine/datamanagement/interfaces/rest/DatasetFileController.java +++ b/backend/services/data-management-service/src/main/java/com/dataengine/datamanagement/interfaces/rest/DatasetFileController.java @@ -78,6 +78,7 @@ public ResponseEntity> uploadDatasetFile( } catch (IllegalArgumentException e) { return ResponseEntity.badRequest().body(Response.error("参数错误", null)); } catch (Exception e) { + log.error("upload fail", e); return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).body(Response.error("服务器错误", null)); } } diff --git a/backend/services/data-management-service/src/main/resources/mappers/DatasetMapper.xml b/backend/services/data-management-service/src/main/resources/mappers/DatasetMapper.xml index ae5781b2..482229e3 100644 --- a/backend/services/data-management-service/src/main/resources/mappers/DatasetMapper.xml +++ b/backend/services/data-management-service/src/main/resources/mappers/DatasetMapper.xml @@ -206,7 +206,7 @@ diff --git a/frontend/src/pages/DataCollection/Home/components/TaskManagement.tsx b/frontend/src/pages/DataCollection/Home/components/TaskManagement.tsx index 36563e70..444e7793 100644 --- a/frontend/src/pages/DataCollection/Home/components/TaskManagement.tsx +++ b/frontend/src/pages/DataCollection/Home/components/TaskManagement.tsx @@ -4,7 +4,7 @@ import { SearchControls } from "@/components/SearchControls"; import { deleteTaskByIdUsingDelete, executeTaskByIdUsingPost, - queryTasksUsingPost, + queryTasksUsingGet, stopTaskByIdUsingPost, } from "../../collection.apis"; import { TaskStatus, type CollectionTask } from "../../collection.model"; @@ -32,7 +32,7 @@ export default function TaskManagement() { setSearchParams, fetchData, handleFiltersChange, - } = useFetchData(queryTasksUsingPost); + } = useFetchData(queryTasksUsingGet); const handleStartTask = async (taskId: string) => { await executeTaskByIdUsingPost(taskId); diff --git a/frontend/src/pages/DataCollection/collection.apis.ts b/frontend/src/pages/DataCollection/collection.apis.ts index 0ae34144..0206cef8 100644 --- a/frontend/src/pages/DataCollection/collection.apis.ts +++ b/frontend/src/pages/DataCollection/collection.apis.ts @@ -1,12 +1,12 @@ import { get, post, put, del } from "@/utils/request"; // 数据源任务相关接口 -export function queryTasksUsingPost(params?: any) { - return post("/api/data-collection/tasks", params); +export function queryTasksUsingGet(params?: any) { + return get("/api/data-collection/tasks", params); } export function createTaskUsingPost(data: any) { - return post("/api/data-collection/tasks/create", data); + return post("/api/data-collection/tasks", data); } export function queryTaskByIdUsingGet(id: string | number) { diff --git a/frontend/src/pages/DataManagement/Create/CreateDataset.tsx b/frontend/src/pages/DataManagement/Create/CreateDataset.tsx index 9887f131..3d9cc832 100644 --- a/frontend/src/pages/DataManagement/Create/CreateDataset.tsx +++ b/frontend/src/pages/DataManagement/Create/CreateDataset.tsx @@ -13,7 +13,7 @@ import { updateDatasetByIdUsingPut, } from "../dataset.api"; import { DatasetSubType, DatasetType, DataSource } from "../dataset.model"; -import { queryTasksUsingPost } from "@/pages/DataCollection/collection.apis"; +import { queryTasksUsingGet } from "@/pages/DataCollection/collection.apis"; import { mockPreparedTags } from "@/components/TagManagement"; export default function DatasetCreate() { @@ -77,7 +77,7 @@ export default function DatasetCreate() { // 获取归集任务列表 const fetchCollectionTasks = async () => { try { - const { data } = await queryTasksUsingPost({ pageNum: 1, pageSize: 100 }); + const { data } = await queryTasksUsingGet({ pageNum: 1, pageSize: 100 }); const options = data.map((task: any) => ({ label: task.name, value: task.id,