diff --git a/dinky-admin/src/main/java/org/dinky/controller/CatalogueController.java b/dinky-admin/src/main/java/org/dinky/controller/CatalogueController.java index 2e318b139e..2ed07d9aaf 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/CatalogueController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/CatalogueController.java @@ -26,10 +26,12 @@ import org.dinky.data.constant.DirConstant; import org.dinky.data.dto.CatalogueTaskDTO; import org.dinky.data.dto.CatalogueTreeQueryDTO; +import org.dinky.data.dto.ImportCatalogueDTO; import org.dinky.data.enums.BusinessType; import org.dinky.data.enums.Status; import org.dinky.data.model.Catalogue; import org.dinky.data.result.Result; +import org.dinky.data.vo.ExportCatalogueVO; import org.dinky.data.vo.TreeVo; import org.dinky.service.TaskService; import org.dinky.service.catalogue.CatalogueService; @@ -37,7 +39,10 @@ import java.io.File; import java.util.List; +import org.springframework.http.HttpHeaders; +import org.springframework.http.ResponseEntity; import org.springframework.web.bind.annotation.DeleteMapping; +import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.PutMapping; @@ -46,6 +51,7 @@ import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; import org.springframework.web.multipart.MultipartFile; +import org.springframework.web.multipart.MultipartHttpServletRequest; import cn.dev33.satoken.annotation.SaCheckLogin; import cn.hutool.core.io.FileUtil; @@ -250,4 +256,38 @@ public Result copyTask(@RequestBody Catalogue catalogue) { public Result deleteCatalogueById(@CatalogueId @RequestParam Integer id) { return catalogueService.deleteCatalogueById(id); } + + /** + * export catalogue by id + * + * @param id catalogue id + * @return {@link ResponseEntity} + */ + @GetMapping("/export") + @Log(title = "Export Catalogue", businessType = BusinessType.EXPORT) + @ApiOperation("Export Catalogue") + @ApiImplicitParam(name = "id", value = "id", required = true, dataType = "Integer", dataTypeClass = Integer.class) + public ResponseEntity exportCatalogue(@RequestParam Integer id) { + ExportCatalogueVO exportCatalogueVo = catalogueService.exportCatalogue(id); + // convert the return value to file at the interface level + HttpHeaders headers = new HttpHeaders(); + headers.add(HttpHeaders.CONTENT_DISPOSITION, "attachment; filename=" + exportCatalogueVo.getFileName()); + headers.add(HttpHeaders.CONTENT_TYPE, "application/json"); + return ResponseEntity.ok().headers(headers).body(exportCatalogueVo.getDataJson()); + } + + /** + * import catalogue by parent id + * + * @return {@link Result}< {@link Void}>} + */ + @PostMapping("/import") + @Log(title = "Import Catalogue", businessType = BusinessType.IMPORT) + @ApiOperation("Import Catalogue") + public Result importCatalogue(MultipartHttpServletRequest request) { + // assemble dto objects and shield service requests + ImportCatalogueDTO importCatalogueDto = ImportCatalogueDTO.build(request); + catalogueService.importCatalogue(importCatalogueDto); + return Result.succeed(); + } } diff --git a/dinky-admin/src/main/java/org/dinky/data/bo/catalogue/export/ExportCatalogueBO.java b/dinky-admin/src/main/java/org/dinky/data/bo/catalogue/export/ExportCatalogueBO.java new file mode 100644 index 0000000000..47e4a2a927 --- /dev/null +++ b/dinky-admin/src/main/java/org/dinky/data/bo/catalogue/export/ExportCatalogueBO.java @@ -0,0 +1,46 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.dinky.data.bo.catalogue.export; + +import java.util.List; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class ExportCatalogueBO { + + private String name; + + private Boolean enabled; + + private Boolean isLeaf; + + private ExportTaskBO task; + + private String type; + + private List children; +} diff --git a/dinky-admin/src/main/java/org/dinky/data/bo/catalogue/export/ExportTaskBO.java b/dinky-admin/src/main/java/org/dinky/data/bo/catalogue/export/ExportTaskBO.java new file mode 100644 index 0000000000..3c14719065 --- /dev/null +++ b/dinky-admin/src/main/java/org/dinky/data/bo/catalogue/export/ExportTaskBO.java @@ -0,0 +1,66 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.dinky.data.bo.catalogue.export; + +import org.dinky.data.model.ext.TaskExtConfig; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class ExportTaskBO { + + private String name; + + private String dialect; + + private String type; + + private Integer checkPoint; + + private Integer savePointStrategy; + + private Integer parallelism; + + private Boolean fragment; + + private Boolean statementSet; + + private Boolean batchModel; + + private Integer envId; + + private Integer alertGroupId; + + private TaskExtConfig configJson; + + private String note; + + private Integer step; + + private Boolean enabled; + + private String statement; +} diff --git a/dinky-admin/src/main/java/org/dinky/data/dto/ImportCatalogueDTO.java b/dinky-admin/src/main/java/org/dinky/data/dto/ImportCatalogueDTO.java new file mode 100644 index 0000000000..4f04ee5954 --- /dev/null +++ b/dinky-admin/src/main/java/org/dinky/data/dto/ImportCatalogueDTO.java @@ -0,0 +1,68 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.dinky.data.dto; + +import org.dinky.data.bo.catalogue.export.ExportCatalogueBO; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStreamReader; +import java.util.Objects; + +import org.springframework.web.multipart.MultipartFile; +import org.springframework.web.multipart.MultipartHttpServletRequest; + +import cn.hutool.json.JSONUtil; +import lombok.Builder; +import lombok.Getter; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +@Getter +@Builder +public class ImportCatalogueDTO { + + private Integer parentCatalogueId; + + private ExportCatalogueBO exportCatalogue; + + public static ImportCatalogueDTO build(MultipartHttpServletRequest request) { + int parentCatalogueId = Integer.parseInt(request.getParameter("pid")); + ExportCatalogueBO exportCatalogue = null; + MultipartFile file = request.getFile("file"); + if (Objects.isNull(file)) { + return null; + } + try (BufferedReader reader = new BufferedReader(new InputStreamReader(file.getInputStream()))) { + StringBuilder content = new StringBuilder(); + String line; + while ((line = reader.readLine()) != null) { + content.append(line); + } + exportCatalogue = JSONUtil.toBean(content.toString(), ExportCatalogueBO.class); + } catch (IOException e) { + log.error("Convert MultipartHttpServletRequest to ExportCatalogueBO failed", e); + } + return ImportCatalogueDTO.builder() + .parentCatalogueId(parentCatalogueId) + .exportCatalogue(exportCatalogue) + .build(); + } +} diff --git a/dinky-admin/src/main/java/org/dinky/data/vo/ExportCatalogueVO.java b/dinky-admin/src/main/java/org/dinky/data/vo/ExportCatalogueVO.java new file mode 100644 index 0000000000..d578b9fb1d --- /dev/null +++ b/dinky-admin/src/main/java/org/dinky/data/vo/ExportCatalogueVO.java @@ -0,0 +1,41 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.dinky.data.vo; + +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +@ApiModel(value = "ExportCatalogueVO", description = "The return value of export catalogue") +public class ExportCatalogueVO { + + @ApiModelProperty(value = "FileName", dataType = "String", example = "data.json") + private String fileName; + + @ApiModelProperty(value = "DataJson", dataType = "String") + private String dataJson; +} diff --git a/dinky-admin/src/main/java/org/dinky/service/catalogue/CatalogueService.java b/dinky-admin/src/main/java/org/dinky/service/catalogue/CatalogueService.java index 6d166d0b7a..5a9fa7e5bb 100644 --- a/dinky-admin/src/main/java/org/dinky/service/catalogue/CatalogueService.java +++ b/dinky-admin/src/main/java/org/dinky/service/catalogue/CatalogueService.java @@ -21,8 +21,10 @@ import org.dinky.data.dto.CatalogueTaskDTO; import org.dinky.data.dto.CatalogueTreeQueryDTO; +import org.dinky.data.dto.ImportCatalogueDTO; import org.dinky.data.model.Catalogue; import org.dinky.data.result.Result; +import org.dinky.data.vo.ExportCatalogueVO; import org.dinky.data.vo.TreeVo; import org.dinky.mybatis.service.ISuperService; @@ -157,4 +159,19 @@ public interface CatalogueService extends ISuperService { * @return */ Boolean checkTaskOperatePermission(Integer catalogueId); + + /** + * Export catalogue by id + * + * @param catalogueId catalogue id + * @return export catalogue vo + */ + ExportCatalogueVO exportCatalogue(Integer catalogueId); + + /** + * Import catalogue + * + * @param importCatalogueDto ImportCatalogueDTO + */ + void importCatalogue(ImportCatalogueDTO importCatalogueDto); } diff --git a/dinky-admin/src/main/java/org/dinky/service/catalogue/factory/CatalogueFactory.java b/dinky-admin/src/main/java/org/dinky/service/catalogue/factory/CatalogueFactory.java index 7036fe0f9b..2ffd533b16 100644 --- a/dinky-admin/src/main/java/org/dinky/service/catalogue/factory/CatalogueFactory.java +++ b/dinky-admin/src/main/java/org/dinky/service/catalogue/factory/CatalogueFactory.java @@ -19,6 +19,11 @@ package org.dinky.service.catalogue.factory; +import org.dinky.config.Dialect; +import org.dinky.data.bo.catalogue.export.ExportCatalogueBO; +import org.dinky.data.bo.catalogue.export.ExportTaskBO; +import org.dinky.data.constant.CommonConstant; +import org.dinky.data.enums.GatewayType; import org.dinky.data.enums.JobLifeCycle; import org.dinky.data.model.Catalogue; import org.dinky.data.model.Task; @@ -62,6 +67,28 @@ public Task getNewTask(Task oldTask, String newTaskName) { return newTask; } + public Task getTask(ExportTaskBO exportTaskBO, Integer firstLevelOwner) { + Task task = new Task(); + task.setName(exportTaskBO.getName()); + task.setDialect(exportTaskBO.getDialect()); + task.setType(exportTaskBO.getType()); + task.setCheckPoint(exportTaskBO.getCheckPoint()); + task.setSavePointStrategy(exportTaskBO.getSavePointStrategy()); + task.setParallelism(exportTaskBO.getParallelism()); + task.setFragment(exportTaskBO.getFragment()); + task.setStatementSet(exportTaskBO.getStatementSet()); + task.setBatchModel(exportTaskBO.getBatchModel()); + task.setEnvId(exportTaskBO.getEnvId()); + task.setAlertGroupId(exportTaskBO.getAlertGroupId()); + task.setConfigJson(exportTaskBO.getConfigJson()); + task.setNote(exportTaskBO.getNote()); + task.setStep(exportTaskBO.getStep()); + task.setEnabled(exportTaskBO.getEnabled()); + task.setStatement(exportTaskBO.getStatement()); + task.setFirstLevelOwner(firstLevelOwner); + return task; + } + public Catalogue getNewCatalogue(Catalogue paramCatalogue, Catalogue oldCatalogue, Task newTask) { Catalogue newCatalogue = new Catalogue(); BeanUtil.copyProperties(paramCatalogue, newCatalogue); @@ -80,4 +107,71 @@ public Catalogue getNewCatalogue(Catalogue paramCatalogue, Catalogue oldCatalogu newCatalogue.setTenantId(null); return newCatalogue; } + + public Catalogue getCatalogue(ExportCatalogueBO exportCatalogueBO, Integer parentId, Integer taskId) { + Catalogue catalogue = new Catalogue(); + catalogue.setParentId(parentId); + catalogue.setTaskId(taskId); + catalogue.setName(exportCatalogueBO.getName()); + catalogue.setType(exportCatalogueBO.getType()); + catalogue.setEnabled(exportCatalogueBO.getEnabled()); + catalogue.setIsLeaf(exportCatalogueBO.getIsLeaf()); + return catalogue; + } + + /** + * Reset Task value + * + * @param task Task + */ + public void resetTask(Task task, String dialect) { + task.setStep(JobLifeCycle.DEVELOP.getValue()); + task.setEnabled(Boolean.TRUE); + task.setVersionId(null); + task.setJobInstanceId(null); + if (Dialect.isFlinkSql(dialect, false)) { + task.setType(GatewayType.LOCAL.getLongValue()); + task.setParallelism(1); + task.setSavePointStrategy(CommonConstant.SAVE_POINT_STRATEGY_DISABLE); + task.setEnvId(CommonConstant.ENV_DISABLE); + task.setAlertGroupId(CommonConstant.ALERT_GROUP_DISABLE); + task.setFragment(Boolean.FALSE); + } + } + + public ExportCatalogueBO getExportCatalogueBo(Catalogue catalogue, Task task) { + return ExportCatalogueBO.builder() + .name(catalogue.getName()) + .enabled(catalogue.getEnabled()) + .isLeaf(catalogue.getIsLeaf()) + .type(catalogue.getType()) + .task(getExportTaskBo(task)) + .build(); + } + + private ExportTaskBO getExportTaskBo(Task task) { + if (Objects.isNull(task)) { + return null; + } + // Reset task + resetTask(task, task.getDialect()); + return ExportTaskBO.builder() + .name(task.getName()) + .dialect(task.getDialect()) + .type(task.getType()) + .checkPoint(task.getCheckPoint()) + .savePointStrategy(task.getSavePointStrategy()) + .parallelism(task.getParallelism()) + .fragment(task.getFragment()) + .statementSet(task.getStatementSet()) + .batchModel(task.getBatchModel()) + .envId(task.getEnvId()) + .alertGroupId(task.getAlertGroupId()) + .configJson(task.getConfigJson()) + .note(task.getNote()) + .step(task.getStep()) + .enabled(task.getEnabled()) + .statement(task.getStatement()) + .build(); + } } diff --git a/dinky-admin/src/main/java/org/dinky/service/catalogue/impl/CatalogueServiceImpl.java b/dinky-admin/src/main/java/org/dinky/service/catalogue/impl/CatalogueServiceImpl.java index d990bc04cc..1fc37cac67 100644 --- a/dinky-admin/src/main/java/org/dinky/service/catalogue/impl/CatalogueServiceImpl.java +++ b/dinky-admin/src/main/java/org/dinky/service/catalogue/impl/CatalogueServiceImpl.java @@ -22,11 +22,12 @@ import static org.dinky.assertion.Asserts.isNull; import org.dinky.assertion.Asserts; -import org.dinky.config.Dialect; +import org.dinky.data.bo.catalogue.export.ExportCatalogueBO; +import org.dinky.data.bo.catalogue.export.ExportTaskBO; import org.dinky.data.dto.CatalogueTaskDTO; import org.dinky.data.dto.CatalogueTreeQueryDTO; +import org.dinky.data.dto.ImportCatalogueDTO; import org.dinky.data.enums.CatalogueSortValueEnum; -import org.dinky.data.enums.GatewayType; import org.dinky.data.enums.JobLifeCycle; import org.dinky.data.enums.JobStatus; import org.dinky.data.enums.SortTypeEnum; @@ -40,6 +41,7 @@ import org.dinky.data.model.job.JobHistory; import org.dinky.data.model.job.JobInstance; import org.dinky.data.result.Result; +import org.dinky.data.vo.ExportCatalogueVO; import org.dinky.data.vo.TreeVo; import org.dinky.mapper.CatalogueMapper; import org.dinky.mybatis.service.impl.SuperServiceImpl; @@ -53,6 +55,8 @@ import org.dinky.service.catalogue.factory.CatalogueTreeSortFactory; import org.dinky.service.catalogue.strategy.CatalogueTreeSortStrategy; +import org.apache.commons.lang3.StringUtils; + import java.io.BufferedReader; import java.io.File; import java.io.InputStreamReader; @@ -61,6 +65,7 @@ import java.util.Arrays; import java.util.Comparator; import java.util.List; +import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.UUID; @@ -71,13 +76,17 @@ import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper; import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper; +import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import cn.dev33.satoken.stp.StpUtil; import cn.hutool.core.bean.BeanUtil; import cn.hutool.core.collection.CollUtil; import cn.hutool.core.collection.CollectionUtil; import cn.hutool.core.lang.Opt; import cn.hutool.core.util.ObjectUtil; +import cn.hutool.json.JSONUtil; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; @@ -264,15 +273,8 @@ private Task initTaskValue(CatalogueTaskDTO catalogueTask) { if (Opt.ofNullable(catalogueTask.getTask()).isPresent()) { task = catalogueTask.getTask().buildTask(); } else { - task.setStep(JobLifeCycle.DEVELOP.getValue()); - task.setEnabled(true); - if (Dialect.isFlinkSql(catalogueTask.getType(), false)) { - task.setType(GatewayType.LOCAL.getLongValue()); - task.setParallelism(1); - task.setSavePointStrategy(0); // 0 is disabled - task.setEnvId(-1); // -1 is disabled - task.setAlertGroupId(-1); // -1 is disabled - } + String dialect = catalogueTask.getType(); + catalogueFactory.resetTask(task, dialect); } if (!Opt.ofNullable(task.getStep()).isPresent()) { task.setStep(JobLifeCycle.DEVELOP.getValue()); @@ -596,4 +598,185 @@ public Boolean checkTaskOperatePermission(Integer catalogueId) { } return null; } + + /** + * Export catalogue by id + * + * @param catalogueId catalogue id + * @return export catalogue vo + */ + @Override + public ExportCatalogueVO exportCatalogue(Integer catalogueId) { + log.info("Export catalogue: {}", catalogueId); + ExportCatalogueBO exportCatalogueBo = getAllCatalogue(catalogueId); + String dataJson = JSONUtil.toJsonPrettyStr(exportCatalogueBo); + return ExportCatalogueVO.builder() + .fileName(getExportCatalogueFileName(catalogueId)) + .dataJson(dataJson) + .build(); + } + + /** + * Import catalogue + * + * @param importCatalogueDto ImportCatalogueDTO + */ + @Override + @Transactional(rollbackFor = Exception.class) + public void importCatalogue(ImportCatalogueDTO importCatalogueDto) { + log.info("Import Catalogue start. importCatalogueDto: {}", importCatalogueDto); + Integer parentCatalogueId = importCatalogueDto.getParentCatalogueId(); + ExportCatalogueBO exportCatalogue = importCatalogueDto.getExportCatalogue(); + if (Objects.isNull(exportCatalogue)) { + throw new BusException(Status.FAILED); + } + Catalogue parentCatalogue = this.getById(parentCatalogueId); + // check param + checkImportCatalogueParam(parentCatalogue, exportCatalogue); + + // create catalogue and task + List createTasks = Lists.newArrayList(); + List createCatalogues = Lists.newArrayList(); + List searchCatalogues = Lists.newArrayList(exportCatalogue); + + // ExportCatalogueBO -> Task mapping + Map exportCatalogueTaskMap = Maps.newHashMap(); + // ExportCatalogueBO -> Catalogue mapping + Map exportCatalogueMap = Maps.newHashMap(); + // ExportCatalogueBO -> Parent Catalogue ID mapping + Map exportCatalogueParentIdMap = Maps.newHashMap(); + exportCatalogueParentIdMap.put(exportCatalogue, parentCatalogueId); + + Integer currentUserId = getCurrentUserId(); + while (CollectionUtil.isNotEmpty(searchCatalogues)) { + List nextSearchCatalogues = Lists.newArrayList(); + // create task + for (ExportCatalogueBO searchCatalogue : searchCatalogues) { + ExportTaskBO exportTaskBO = searchCatalogue.getTask(); + if (Objects.nonNull(exportTaskBO)) { + Task task = catalogueFactory.getTask(exportTaskBO, currentUserId); + createTasks.add(task); + exportCatalogueTaskMap.put(searchCatalogue, task); + } + } + taskService.saveBatch(createTasks); + // create catalogue + for (ExportCatalogueBO searchCatalogue : searchCatalogues) { + Task task = exportCatalogueTaskMap.get(searchCatalogue); + Integer taskId = Objects.nonNull(task) ? task.getId() : null; + Integer parentId = exportCatalogueParentIdMap.get(searchCatalogue); + if (Objects.isNull(parentId)) { + log.error("Not found parent id. searchCatalogue: {}", searchCatalogue); + throw new BusException(Status.FAILED); + } + Catalogue catalogue = catalogueFactory.getCatalogue(searchCatalogue, parentId, taskId); + createCatalogues.add(catalogue); + exportCatalogueMap.put(searchCatalogue, catalogue); + List children = searchCatalogue.getChildren(); + if (CollectionUtil.isNotEmpty(children)) { + nextSearchCatalogues.addAll(children); + } + } + this.saveBatch(createCatalogues); + // put parent id + for (ExportCatalogueBO searchCatalogue : searchCatalogues) { + List children = searchCatalogue.getChildren(); + if (CollectionUtil.isEmpty(children)) { + continue; + } + Catalogue catalogue = exportCatalogueMap.get(searchCatalogue); + for (ExportCatalogueBO child : children) { + exportCatalogueParentIdMap.put(child, catalogue.getId()); + } + } + createTasks.clear(); + createCatalogues.clear(); + searchCatalogues = nextSearchCatalogues; + } + log.info("Import Catalogue success. The number of Catalogue created is: {}", exportCatalogueMap.size()); + } + + @VisibleForTesting + protected Integer getCurrentUserId() { + return StpUtil.getLoginIdAsInt(); + } + + private void checkImportCatalogueParam(Catalogue parentCatalogue, ExportCatalogueBO exportCatalogue) { + // verify that the parent directory exists + if (Objects.isNull(parentCatalogue)) { + throw new BusException(Status.CATALOGUE_NOT_EXIST); + } + // check if a catalogue with the same name exists + List catalogueNames = getCatalogueNames(exportCatalogue); + List existCatalogues = + this.list(new LambdaQueryWrapper().in(Catalogue::getName, catalogueNames)); + if (CollectionUtil.isNotEmpty(existCatalogues)) { + throw new BusException( + Status.CATALOGUE_IS_EXIST, + existCatalogues.stream().map(Catalogue::getName).collect(Collectors.joining(","))); + } + // verify that the task name and parent catalogue name are consistent + List searchExportCatalogues = Lists.newArrayList(exportCatalogue); + while (CollectionUtil.isNotEmpty(searchExportCatalogues)) { + List nextSearchExportCatalogues = Lists.newArrayList(); + for (ExportCatalogueBO searchExportCatalogue : searchExportCatalogues) { + List children = searchExportCatalogue.getChildren(); + if (CollectionUtil.isNotEmpty(children)) { + nextSearchExportCatalogues.addAll(children); + } + ExportTaskBO task = searchExportCatalogue.getTask(); + if (Objects.isNull(task)) { + continue; + } + String catalogueName = searchExportCatalogue.getName(); + String taskName = task.getName(); + if (!StringUtils.equals(catalogueName, taskName)) { + throw new BusException(Status.TASK_NAME_NOT_MATCH_CATALOGUE_NAME, catalogueName, taskName); + } + } + searchExportCatalogues = nextSearchExportCatalogues; + } + } + + private List getCatalogueNames(ExportCatalogueBO exportCatalogue) { + if (Objects.isNull(exportCatalogue)) { + return Lists.newArrayList(); + } + List catalogueNameList = Lists.newArrayList(); + String catalogueName = exportCatalogue.getName(); + catalogueNameList.add(catalogueName); + List children = exportCatalogue.getChildren(); + if (CollectionUtil.isEmpty(children)) { + return catalogueNameList; + } + for (ExportCatalogueBO child : children) { + catalogueNameList.addAll(getCatalogueNames(child)); + } + return catalogueNameList; + } + + private String getExportCatalogueFileName(Integer catalogueId) { + return String.format("export_catalogue_%s_%s.json", catalogueId, System.currentTimeMillis()); + } + + private ExportCatalogueBO getAllCatalogue(Integer catalogueId) { + Catalogue catalogue = this.getById(catalogueId); + if (Objects.isNull(catalogue)) { + return null; + } + Boolean isLeaf = catalogue.getIsLeaf(); + // only leaf nodes have tasks + Task task = isLeaf ? taskService.getById(catalogue.getTaskId()) : null; + ExportCatalogueBO exportCatalogueBo = catalogueFactory.getExportCatalogueBo(catalogue, task); + List subCatalogues = + this.list(new LambdaQueryWrapper().eq(Catalogue::getParentId, catalogueId)); + if (CollectionUtil.isNotEmpty(subCatalogues)) { + List subExportCatalogueBo = subCatalogues.stream() + .map(Catalogue::getId) + .map(this::getAllCatalogue) + .collect(Collectors.toList()); + exportCatalogueBo.setChildren(subExportCatalogueBo); + } + return exportCatalogueBo; + } } diff --git a/dinky-admin/src/test/java/org/dinky/service/catalogue/impl/CatalogueServiceImplTest.java b/dinky-admin/src/test/java/org/dinky/service/catalogue/impl/CatalogueServiceImplTest.java new file mode 100644 index 0000000000..64051fff32 --- /dev/null +++ b/dinky-admin/src/test/java/org/dinky/service/catalogue/impl/CatalogueServiceImplTest.java @@ -0,0 +1,180 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.dinky.service.catalogue.impl; + +import static org.junit.Assert.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.powermock.api.mockito.PowerMockito.doAnswer; +import static org.powermock.api.mockito.PowerMockito.when; + +import org.dinky.data.bo.catalogue.export.ExportCatalogueBO; +import org.dinky.data.dto.ImportCatalogueDTO; +import org.dinky.data.model.Catalogue; +import org.dinky.data.vo.ExportCatalogueVO; +import org.dinky.mapper.CatalogueMapper; +import org.dinky.service.TaskService; +import org.dinky.service.catalogue.factory.CatalogueFactory; + +import java.lang.reflect.Field; +import java.util.concurrent.atomic.AtomicInteger; + +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.powermock.core.classloader.annotations.PowerMockIgnore; +import org.powermock.modules.junit4.PowerMockRunner; + +import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper; +import com.google.common.collect.Lists; + +import cn.hutool.json.JSONUtil; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +@RunWith(PowerMockRunner.class) +@PowerMockIgnore({"com.sun.org.apache.xerces.*", "javax.xml.*", "org.xml.*", "javax.management.*", "org.w3c.*"}) +public class CatalogueServiceImplTest { + + @Mock + private CatalogueMapper catalogueMapper; + + @Mock + private TaskService taskService; + + @Mock + private CatalogueFactory catalogueFactory; + + @InjectMocks + private MockCatalogueServiceImpl catalogueServiceImplTest; + + @Before + public void init() throws IllegalAccessException, NoSuchFieldException { + // mock catalogueMapper + Field baseMapperField = catalogueServiceImplTest + .getClass() + .getSuperclass() + .getSuperclass() + .getSuperclass() + .getDeclaredField("baseMapper"); + baseMapperField.setAccessible(true); + baseMapperField.set(catalogueServiceImplTest, catalogueMapper); + } + + @Test + public void exportCatalogueTest() { + int catalogueId = 1; + + // result + ExportCatalogueBO subExportCatalogueBo = ExportCatalogueBO.builder() + .name("234") + .enabled(true) + .isLeaf(false) + .type("ttt") + .task(null) + .children(Lists.newArrayList()) + .build(); + ExportCatalogueBO exportCatalogueBo = ExportCatalogueBO.builder() + .name("123") + .enabled(true) + .isLeaf(false) + .type("ttt") + .task(null) + .children(Lists.newArrayList(subExportCatalogueBo)) + .build(); + String expectDataJson = JSONUtil.toJsonPrettyStr(exportCatalogueBo); + + // mock + Catalogue catalogue = new Catalogue(); + catalogue.setId(catalogueId); + catalogue.setName("123"); + catalogue.setIsLeaf(false); + catalogue.setEnabled(true); + catalogue.setType("ttt"); + Catalogue subCatalogue = new Catalogue(); + subCatalogue.setId(222); + subCatalogue.setParentId(catalogueId); + subCatalogue.setName("234"); + subCatalogue.setIsLeaf(false); + subCatalogue.setEnabled(true); + subCatalogue.setType("ttt"); + when(catalogueMapper.selectById(eq(catalogueId))).thenReturn(catalogue); + when(catalogueMapper.selectById(eq(222))).thenReturn(subCatalogue); + + AtomicInteger cnt = new AtomicInteger(); + doAnswer(invocationOnMock -> { + if (cnt.getAndIncrement() > 0) { + return Lists.newArrayList(); + } + return Lists.newArrayList(subCatalogue); + }) + .when(catalogueMapper) + .selectList(any(LambdaQueryWrapper.class)); + when(catalogueFactory.getExportCatalogueBo(eq(catalogue), eq(null))).thenReturn(exportCatalogueBo); + when(catalogueFactory.getExportCatalogueBo(eq(subCatalogue), eq(null))).thenReturn(subExportCatalogueBo); + + // execute and verify + ExportCatalogueVO exportCatalogueVo = catalogueServiceImplTest.exportCatalogue(catalogueId); + assertEquals(expectDataJson, exportCatalogueVo.getDataJson()); + } + + @Test + public void importCatalogueTest() { + ExportCatalogueBO subExportCatalogueBo = ExportCatalogueBO.builder() + .name("234") + .enabled(true) + .isLeaf(false) + .type("ttt") + .task(null) + .children(Lists.newArrayList()) + .build(); + ExportCatalogueBO exportCatalogueBo = ExportCatalogueBO.builder() + .name("123") + .enabled(true) + .isLeaf(false) + .type("ttt") + .task(null) + .children(Lists.newArrayList(subExportCatalogueBo)) + .build(); + int parentCatalogueId = 1; + ImportCatalogueDTO importCatalogueDto = ImportCatalogueDTO.builder() + .parentCatalogueId(parentCatalogueId) + .exportCatalogue(exportCatalogueBo) + .build(); + + // mock + Catalogue parentCatalogue = new Catalogue(); + parentCatalogue.setId(parentCatalogueId); + parentCatalogue.setName("111"); + parentCatalogue.setIsLeaf(false); + parentCatalogue.setEnabled(true); + parentCatalogue.setType("ttt"); + when(catalogueMapper.selectById(eq(parentCatalogueId))).thenReturn(parentCatalogue); + when(catalogueMapper.selectList(any(LambdaQueryWrapper.class))).thenReturn(Lists.newArrayList()); + when(catalogueFactory.getCatalogue(any(), anyInt(), eq(null))).thenCallRealMethod(); + + // execute and verify + catalogueServiceImplTest.importCatalogue(importCatalogueDto); + verify(taskService, times(2)).saveBatch(anyList()); + } +} diff --git a/dinky-admin/src/test/java/org/dinky/service/catalogue/impl/MockCatalogueServiceImpl.java b/dinky-admin/src/test/java/org/dinky/service/catalogue/impl/MockCatalogueServiceImpl.java new file mode 100644 index 0000000000..85b4a121cd --- /dev/null +++ b/dinky-admin/src/test/java/org/dinky/service/catalogue/impl/MockCatalogueServiceImpl.java @@ -0,0 +1,75 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.dinky.service.catalogue.impl; + +import org.dinky.data.model.Catalogue; +import org.dinky.service.HistoryService; +import org.dinky.service.JobHistoryService; +import org.dinky.service.JobInstanceService; +import org.dinky.service.MonitorService; +import org.dinky.service.TaskService; +import org.dinky.service.catalogue.factory.CatalogueFactory; +import org.dinky.service.catalogue.factory.CatalogueTreeSortFactory; + +import java.util.Collection; +import java.util.Optional; +import java.util.concurrent.atomic.AtomicInteger; + +import com.google.common.collect.Lists; + +public class MockCatalogueServiceImpl extends CatalogueServiceImpl { + + public MockCatalogueServiceImpl( + TaskService taskService, + JobInstanceService jobInstanceService, + HistoryService historyService, + JobHistoryService jobHistoryService, + MonitorService monitorService, + CatalogueTreeSortFactory catalogueTreeSortFactory, + CatalogueFactory catalogueFactory) { + super( + taskService, + jobInstanceService, + historyService, + jobHistoryService, + monitorService, + catalogueTreeSortFactory, + catalogueFactory); + } + + /** + * Save batch + * + * @param entityList ignore + * @param batchSize ignore + * @return ignore + */ + @Override + public boolean saveBatch(Collection entityList, int batchSize) { + AtomicInteger id = new AtomicInteger(11111); + Optional.ofNullable(entityList).orElse(Lists.newArrayList()).forEach(e -> e.setId(id.getAndIncrement())); + return true; + } + + @Override + protected Integer getCurrentUserId() { + return 222; + } +} diff --git a/dinky-common/src/main/java/org/dinky/data/constant/CommonConstant.java b/dinky-common/src/main/java/org/dinky/data/constant/CommonConstant.java index 5e1d7e8afd..57797850c7 100644 --- a/dinky-common/src/main/java/org/dinky/data/constant/CommonConstant.java +++ b/dinky-common/src/main/java/org/dinky/data/constant/CommonConstant.java @@ -44,4 +44,12 @@ public final class CommonConstant { "cn.hutool.core.util.IdUtil", "cn.hutool.core.util.RandomUtil", "cn.hutool.core.util.StrUtil")); + + /** + * Task init param + */ + public static final Integer ENV_DISABLE = -1; + + public static final Integer SAVE_POINT_STRATEGY_DISABLE = 0; + public static final Integer ALERT_GROUP_DISABLE = -1; } diff --git a/dinky-common/src/main/java/org/dinky/data/enums/Status.java b/dinky-common/src/main/java/org/dinky/data/enums/Status.java index 85ff1a18dd..e9d2cee6f5 100644 --- a/dinky-common/src/main/java/org/dinky/data/enums/Status.java +++ b/dinky-common/src/main/java/org/dinky/data/enums/Status.java @@ -193,6 +193,9 @@ public enum Status { MODE_IS_NOT_ALLOW_SELECT(12014, "mode.is.not.allow.select"), OPERATE_NOT_SUPPORT_QUERY(12015, "operate.not.support.query"), TASK_NOT_OPERATE_PERMISSION(12016, "task.not.operate.permission"), + CATALOGUE_NOT_EXIST(12017, "catalogue.not.exist"), + CATALOGUE_IS_EXIST(12018, "catalogue.is.exist"), + TASK_NAME_NOT_MATCH_CATALOGUE_NAME(12019, "task.name.not.match.catalogue.name"), /** * alert instance diff --git a/dinky-common/src/main/resources/i18n/messages_en_US.properties b/dinky-common/src/main/resources/i18n/messages_en_US.properties index 1d24fd9618..7ca751290e 100644 --- a/dinky-common/src/main/resources/i18n/messages_en_US.properties +++ b/dinky-common/src/main/resources/i18n/messages_en_US.properties @@ -290,6 +290,9 @@ task.update.failed=Task Update failed mode.is.not.allow.select=Application / Pre-Job mode does not allow executing select statements. To perform this operation, please switch to Local, Standalone, or Yarn session modes. operate.not.support.query=The [Run] button does not support select statements, please switch to the [Query] button. task.not.operate.permission=No operation permission for the task +catalogue.not.exist=The catalogue does not exist +catalogue.is.exist=The following directory/task already exists: {} +task.name.not.match.catalogue.name=The task name[{}] does not match the catalogue name[{}] # process process.submit.submitTask= Submit the job diff --git a/dinky-common/src/main/resources/i18n/messages_zh_CN.properties b/dinky-common/src/main/resources/i18n/messages_zh_CN.properties index 0046e567dd..321f0ff425 100644 --- a/dinky-common/src/main/resources/i18n/messages_zh_CN.properties +++ b/dinky-common/src/main/resources/i18n/messages_zh_CN.properties @@ -290,6 +290,9 @@ task.update.failed=Task更新失败 mode.is.not.allow.select=Application / Pre-Job 模式不允许执行 select 语句, 如需执行此操作, 请切换至 Local、Standalone、Yarn session等模式 operate.not.support.query=[运行] 按钮不支持 select 语句,请切换至 [查询] 按钮 task.not.operate.permission=任务无操作权限 +catalogue.not.exist=目录不存在 +catalogue.is.exist=以下目录/任务已存在: {} +task.name.not.match.catalogue.name=目录名称[{}]与任务名称[{}]不一致 # process process.submit.submitTask=提交作业 diff --git a/dinky-web/src/locales/en-US/pages.ts b/dinky-web/src/locales/en-US/pages.ts index ab05bc0ff9..ac6bd8b55f 100644 --- a/dinky-web/src/locales/en-US/pages.ts +++ b/dinky-web/src/locales/en-US/pages.ts @@ -152,6 +152,8 @@ export default { 'datastudio.project.delete.job': 'Delete [{type}] Job [{name}]', 'datastudio.project.delete.job.confirm': 'This operation will delete the execution history of the task and all information of the task. \nPlease operate with caution! This operation is irreversible!!! \n\t\t\t\tConfirm to delete?', + 'datastudio.project.import.title': 'Import json file', + 'datastudio.project.import.tip': 'Click or drag file to this area to upload', /** * * devops diff --git a/dinky-web/src/locales/en-US/request.ts b/dinky-web/src/locales/en-US/request.ts index b8f159926c..0c3ba66a49 100644 --- a/dinky-web/src/locales/en-US/request.ts +++ b/dinky-web/src/locales/en-US/request.ts @@ -47,6 +47,7 @@ export default { ' The heartbeat of the data source is abnormal, and the detection time is: {time} ', 'app.request.upload.failed': ' Upload failed. ', 'app.request.update.setting.success': 'Modify the configuration successfully!', + 'app.request.download.failed': 'Download failed, please try again', 'app.request.error.try': 'Failed, please try again', 'app.request.geterror.try': 'Get failed, please try again', 'app.request.delete.error': 'Delete failed, please try again', diff --git a/dinky-web/src/locales/zh-CN/pages.ts b/dinky-web/src/locales/zh-CN/pages.ts index 8d3378f06a..3f9080716c 100644 --- a/dinky-web/src/locales/zh-CN/pages.ts +++ b/dinky-web/src/locales/zh-CN/pages.ts @@ -140,6 +140,8 @@ export default { 'datastudio.project.delete.job': '删除 [{type}] 作业 [{name}]', 'datastudio.project.delete.job.confirm': '此操作会将该任务的执行历史, 以及任务的所有信息全部删除.\n\t\t\t\t请谨慎操作! 该操作不可逆!!!\n\t\t\t\t\t确认删除吗?', + 'datastudio.project.import.title': '导入 json 文件', + 'datastudio.project.import.tip': '点击或拖拽文件到此区域上传', /** * * devops diff --git a/dinky-web/src/locales/zh-CN/request.ts b/dinky-web/src/locales/zh-CN/request.ts index d62e8c5fe0..d9e2b6153c 100644 --- a/dinky-web/src/locales/zh-CN/request.ts +++ b/dinky-web/src/locales/zh-CN/request.ts @@ -43,6 +43,7 @@ export default { 'app.request.heartbeat.connection.failed': '数据源连接异常,原因为: {error}', 'app.request.upload.failed': '上传失败。', 'app.request.update.setting.success': '修改配置成功!', + 'app.request.download.failed': '下载失败,请重试', 'app.request.error.try': '失败,请重试', 'app.request.geterror.try': '获取失败,请重试', 'app.request.delete.error': '删除失败,请重试', diff --git a/dinky-web/src/pages/DataStudio/LeftContainer/Project/JobTree/components/JobImportModal/index.tsx b/dinky-web/src/pages/DataStudio/LeftContainer/Project/JobTree/components/JobImportModal/index.tsx new file mode 100644 index 0000000000..47cc6850af --- /dev/null +++ b/dinky-web/src/pages/DataStudio/LeftContainer/Project/JobTree/components/JobImportModal/index.tsx @@ -0,0 +1,103 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +import { l } from '@/utils/intl'; +import { ErrorMessageAsync, SuccessMessageAsync } from '@/utils/messages'; +import { InboxOutlined } from '@ant-design/icons'; +import type { UploadProps } from 'antd'; +import { Modal, Upload } from 'antd'; +import React, { useEffect, useState } from 'react'; +import { UploadFile } from 'antd/es/upload/interface'; + +const { Dragger } = Upload; + +type JobImportModalProps = { + onUpload: { url: string; pid: string }; + visible: boolean; + onClose: () => void; + onOk: () => void; +}; + +const JobImportModal: React.FC = (props) => { + const { onUpload, onClose, onOk, visible } = props; + const { url, pid } = onUpload; + + const [file_list, setFileList] = useState([]); + + useEffect(() => { + if (!visible) { + setFileList([]); + } + }, [visible]); + + const uploadProps: UploadProps = { + name: 'file', + multiple: true, + action: url + '?pid=' + pid, + fileList: file_list, + onChange: async (info) => { + const { status, response, uid } = info.file; + const uploadFileList = info.fileList; + if (status === 'done') { + if (response.success) { + await SuccessMessageAsync( + l('rc.resource.upload.success', '', { fileName: info.file.name }) + ); + } else { + uploadFileList.forEach((f) => { + if (f.uid === uid) { + f.status = 'error'; + } + }); + await ErrorMessageAsync(response.msg); + } + } else if (status === 'error') { + uploadFileList.forEach((f) => { + if (f.uid === uid) { + f.status = 'error'; + } + }); + await ErrorMessageAsync(l('rc.resource.upload.fail', '', { fileName: info.file.name })); + } + setFileList(uploadFileList); + }, + onDrop(e) { + console.log('Dropped files', e.dataTransfer.files); + } + }; + + return ( + + +

+ +

+

{l('datastudio.project.import.tip')}

+
+
+ ); +}; + +export default JobImportModal; diff --git a/dinky-web/src/pages/DataStudio/LeftContainer/Project/constants.tsx b/dinky-web/src/pages/DataStudio/LeftContainer/Project/constants.tsx index 9728e52a3e..d54d0dc682 100644 --- a/dinky-web/src/pages/DataStudio/LeftContainer/Project/constants.tsx +++ b/dinky-web/src/pages/DataStudio/LeftContainer/Project/constants.tsx @@ -23,6 +23,7 @@ import { CopyrightTwoTone, CopyTwoTone, DeleteTwoTone, + DownCircleTwoTone, EditTwoTone, PlusCircleTwoTone, UpCircleTwoTone @@ -67,6 +68,16 @@ export const FOLDER_RIGHT_MENU = (disabled = false): MenuItemType[] => [ icon: , label: l('right.menu.paste'), disabled: !disabled + }, + { + key: 'importJson', + icon: , + label: l('right.menu.importJson') + }, + { + key: 'exportJson', + icon: , + label: l('right.menu.exportJson') } ]; @@ -86,12 +97,6 @@ export const JOB_RIGHT_MENU = (disabled = false): MenuItemType[] => [ icon: , label: l('button.edit') }, - { - key: 'exportJson', - icon: , - label: l('right.menu.exportJson'), - disabled: true // todo: 此功能暂时不实现 先禁用掉 - }, { key: 'copy', icon: , @@ -107,6 +112,11 @@ export const JOB_RIGHT_MENU = (disabled = false): MenuItemType[] => [ key: 'delete', icon: , label: l('button.delete') + }, + { + key: 'exportJson', + icon: , + label: l('right.menu.exportJson') } ]; diff --git a/dinky-web/src/pages/DataStudio/LeftContainer/Project/index.tsx b/dinky-web/src/pages/DataStudio/LeftContainer/Project/index.tsx index 383e3dbd38..4906ce0f73 100644 --- a/dinky-web/src/pages/DataStudio/LeftContainer/Project/index.tsx +++ b/dinky-web/src/pages/DataStudio/LeftContainer/Project/index.tsx @@ -40,6 +40,7 @@ import { } from '@/pages/DataStudio/model'; import { handleAddOrUpdate, + handleDownloadOption, handleOption, handlePutDataByParams, handleRemoveById @@ -54,6 +55,7 @@ import { Modal, Typography } from 'antd'; import { MenuInfo } from 'rc-menu/es/interface'; import React, { Key, useEffect, useState } from 'react'; import { connect } from 'umi'; +import JobImportModal from '@/pages/DataStudio/LeftContainer/Project/JobTree/components/JobImportModal'; const { Text } = Typography; @@ -68,6 +70,12 @@ const Project: React.FC = (props: connect) => { } = props; const [projectState, setProjectState] = useState(InitProjectState); + const [importVisible, setImportVisible] = useState(false); + const [uploadValue] = useState({ + url: API_CONSTANTS.IMPORT_CATALOGUE_URL, + pid: '' + }); + const btnDispatch = useTasksDispatch(); useEffect(() => { @@ -353,6 +361,29 @@ const Project: React.FC = (props: connect) => { handleContextCancel(); }; + const handleUploadCancel = () => { + setImportVisible(false); + dispatch({ + type: STUDIO_MODEL_ASYNC.queryProject, + payload: { ...selectCatalogueSortTypeData } + }); + handleContextCancel(); + }; + + const handleImportJson = () => { + uploadValue.pid = projectState.value.id; + setImportVisible(true); + handleContextCancel(); + }; + + const handleExportJson = async () => { + const catalogue_id = projectState.value.id; + await handleDownloadOption(API_CONSTANTS.EXPORT_CATALOGUE_URL, l('right.menu.exportJson'), { + id: catalogue_id + }); + handleContextCancel(); + }; + /** * cut task handle */ @@ -414,8 +445,10 @@ const Project: React.FC = (props: connect) => { handleEdit(); break; case 'exportJson': - // todo: 导出 json - // await handleCancel(); + await handleExportJson(); + break; + case 'importJson': + handleImportJson(); break; case 'copy': await handleCopy(); @@ -513,6 +546,14 @@ const Project: React.FC = (props: connect) => { onSubmit={handleSubmit} /> )} + + {/* import task json */} + ); }; diff --git a/dinky-web/src/services/BusinessCrud.ts b/dinky-web/src/services/BusinessCrud.ts index 76626e3b84..d1131f64a7 100644 --- a/dinky-web/src/services/BusinessCrud.ts +++ b/dinky-web/src/services/BusinessCrud.ts @@ -19,6 +19,7 @@ import { addOrUpdateData, + download, getData, getDataByRequestBody, getInfoById, @@ -222,6 +223,15 @@ export const handleOption = async ( } }; +export const handleDownloadOption = async (url: string, title: string, param: any) => { + await LoadingMessageAsync(l('app.request.running') + title); + try { + await download(url, param); + } catch (error) { + WarningMessage(l('app.request.download.failed')); + } +}; + export const handleGetOption = async (url: string, title: string, param: any) => { await LoadingMessageAsync(l('app.request.running') + title); try { diff --git a/dinky-web/src/services/api.ts b/dinky-web/src/services/api.ts index fb6e38aee9..895024613b 100644 --- a/dinky-web/src/services/api.ts +++ b/dinky-web/src/services/api.ts @@ -150,3 +150,31 @@ export async function getDataByRequestBody(url: string, body: any) { data: { ...body } }); } + +export async function download(url: string, params?: any) { + return request(url, { + method: METHOD_CONSTANTS.GET, + params: { + ...params + }, + responseType: 'blob', + getResponse: true + }).then((res) => { + const { headers, data } = res; + const disposition = headers['content-disposition']; + const file_name = + disposition + .split(';') + .map((item) => item.trim()) + .filter((item) => item.startsWith('filename=')) + .map((item) => item.replaceAll('filename=', '')) + .shift() || ''; + const blob = new Blob([data]); + const objectURL = URL.createObjectURL(blob); + let btn = document.createElement('a'); + btn.download = file_name; + btn.href = objectURL; + btn.click(); + URL.revokeObjectURL(objectURL); + }); +} diff --git a/dinky-web/src/services/endpoints.tsx b/dinky-web/src/services/endpoints.tsx index 7ba67b7707..b42731dbd5 100644 --- a/dinky-web/src/services/endpoints.tsx +++ b/dinky-web/src/services/endpoints.tsx @@ -264,6 +264,8 @@ export enum API_CONSTANTS { SAVE_OR_UPDATE_CATALOGUE_URL = '/api/catalogue/saveOrUpdateCatalogue', COPY_TASK_URL = '/api/catalogue/copyTask', MOVE_CATALOGUE_URL = '/api/catalogue/moveCatalogue', + EXPORT_CATALOGUE_URL = '/api/catalogue/export', + IMPORT_CATALOGUE_URL = '/api/catalogue/import', // ------------------------------------ task ------------------------------------ TASK = '/api/task',