Browse Source

update dubhe-server

tags/v2.0
之江实验室 4 years ago
parent
commit
30e2a4ad37
54 changed files with 189 additions and 1676 deletions
  1. +2
    -2
      dubhe-server/common/src/main/java/org/dubhe/aspect/LogAspect.java
  2. +4
    -2
      dubhe-server/common/src/main/java/org/dubhe/enums/LogEnum.java
  3. +5
    -0
      dubhe-server/common/src/main/java/org/dubhe/filter/BaseLogFilter.java
  4. +1
    -1
      dubhe-server/common/src/main/java/org/dubhe/utils/JwtUtils.java
  5. +0
    -28
      dubhe-server/dubhe-admin/src/main/java/org/dubhe/dao/PtDatasetMapper.java
  6. +0
    -29
      dubhe-server/dubhe-admin/src/main/java/org/dubhe/dao/PtDevEnvsMapper.java
  7. +0
    -28
      dubhe-server/dubhe-admin/src/main/java/org/dubhe/dao/PtStorageMapper.java
  8. +0
    -71
      dubhe-server/dubhe-admin/src/main/java/org/dubhe/domain/PtDataset.java
  9. +0
    -114
      dubhe-server/dubhe-admin/src/main/java/org/dubhe/domain/PtDevEnvs.java
  10. +0
    -69
      dubhe-server/dubhe-admin/src/main/java/org/dubhe/domain/PtStorage.java
  11. +0
    -28
      dubhe-server/dubhe-admin/src/main/java/org/dubhe/domain/dto/PtDatasetQueryCriteria.java
  12. +0
    -41
      dubhe-server/dubhe-admin/src/main/java/org/dubhe/domain/dto/PtStorageDTO.java
  13. +0
    -28
      dubhe-server/dubhe-admin/src/main/java/org/dubhe/domain/dto/PtStorageQueryCriteria.java
  14. +1
    -2
      dubhe-server/dubhe-admin/src/main/java/org/dubhe/domain/dto/PtTrainJobCreateDTO.java
  15. +0
    -89
      dubhe-server/dubhe-admin/src/main/java/org/dubhe/rest/PtDatasetController.java
  16. +0
    -88
      dubhe-server/dubhe-admin/src/main/java/org/dubhe/rest/PtDevEnvsController.java
  17. +0
    -87
      dubhe-server/dubhe-admin/src/main/java/org/dubhe/rest/PtStorageController.java
  18. +7
    -0
      dubhe-server/dubhe-admin/src/main/java/org/dubhe/rest/PtTrainJobController.java
  19. +0
    -91
      dubhe-server/dubhe-admin/src/main/java/org/dubhe/service/PtDatasetService.java
  20. +0
    -91
      dubhe-server/dubhe-admin/src/main/java/org/dubhe/service/PtDevEnvsService.java
  21. +0
    -92
      dubhe-server/dubhe-admin/src/main/java/org/dubhe/service/PtStorageService.java
  22. +2
    -2
      dubhe-server/dubhe-admin/src/main/java/org/dubhe/service/abstracts/AbstractPodCallback.java
  23. +0
    -33
      dubhe-server/dubhe-admin/src/main/java/org/dubhe/service/convert/PtDatasetConvert.java
  24. +0
    -34
      dubhe-server/dubhe-admin/src/main/java/org/dubhe/service/convert/PtDevEnvsConvert.java
  25. +0
    -34
      dubhe-server/dubhe-admin/src/main/java/org/dubhe/service/convert/PtImageConvert.java
  26. +0
    -33
      dubhe-server/dubhe-admin/src/main/java/org/dubhe/service/convert/PtStorageConvert.java
  27. +0
    -34
      dubhe-server/dubhe-admin/src/main/java/org/dubhe/service/convert/PtTrainJobConvert.java
  28. +1
    -1
      dubhe-server/dubhe-admin/src/main/java/org/dubhe/service/impl/NoteBookAsyncServiceImpl.java
  29. +5
    -5
      dubhe-server/dubhe-admin/src/main/java/org/dubhe/service/impl/NoteBookServiceImpl.java
  30. +0
    -121
      dubhe-server/dubhe-admin/src/main/java/org/dubhe/service/impl/PtDatasetServiceImpl.java
  31. +0
    -123
      dubhe-server/dubhe-admin/src/main/java/org/dubhe/service/impl/PtDevEnvsServiceImpl.java
  32. +1
    -1
      dubhe-server/dubhe-admin/src/main/java/org/dubhe/service/impl/PtImageServiceImpl.java
  33. +0
    -122
      dubhe-server/dubhe-admin/src/main/java/org/dubhe/service/impl/PtStorageServiceImpl.java
  34. +19
    -16
      dubhe-server/dubhe-admin/src/main/resources/config/application-dev.yml
  35. +14
    -12
      dubhe-server/dubhe-admin/src/main/resources/config/application.yml
  36. +0
    -19
      dubhe-server/dubhe-admin/src/main/resources/kubeconfig-prod
  37. +3
    -1
      dubhe-server/dubhe-admin/src/test/java/org/dubhe/BaseTest.java
  38. +1
    -1
      dubhe-server/dubhe-data/src/main/java/org/dubhe/data/machine/statemachine/GlobalStateMachine.java
  39. +1
    -13
      dubhe-server/dubhe-data/src/main/java/org/dubhe/data/service/impl/DatasetServiceImpl.java
  40. +5
    -4
      dubhe-server/dubhe-data/src/main/java/org/dubhe/data/service/impl/LabelServiceImpl.java
  41. +1
    -1
      dubhe-server/dubhe-k8s/src/main/java/org/dubhe/harbor/api/impl/HarborApiImpl.java
  42. +3
    -3
      dubhe-server/dubhe-k8s/src/main/java/org/dubhe/k8s/api/impl/JupyterResourceApiImpl.java
  43. +0
    -1
      dubhe-server/dubhe-k8s/src/main/java/org/dubhe/k8s/api/impl/TrainJobApiImpl.java
  44. +0
    -19
      dubhe-server/dubhe-k8s/src/main/java/org/dubhe/k8s/domain/bo/K8sTaskBO.java
  45. +9
    -0
      dubhe-server/dubhe-k8s/src/main/java/org/dubhe/k8s/domain/entity/K8sTask.java
  46. +1
    -1
      dubhe-server/dubhe-k8s/src/main/java/org/dubhe/k8s/event/watcher/PodWatcher.java
  47. +1
    -1
      dubhe-server/dubhe-k8s/src/main/java/org/dubhe/k8s/observer/TrainJobObserver.java
  48. +2
    -1
      dubhe-server/dubhe-system/src/main/java/org/dubhe/service/RecycleTaskService.java
  49. +30
    -21
      dubhe-server/dubhe-system/src/main/java/org/dubhe/service/impl/RecycleTaskServiceImpl.java
  50. +3
    -1
      dubhe-server/dubhe-system/src/test/java/org/dubhe/BaseTest.java
  51. +5
    -0
      dubhe-server/dubhe-task/src/main/java/org/dubhe/task/k8s/DelayCudResourceTask.java
  52. +22
    -19
      dubhe-server/dubhe-task/src/main/resources/config/application-dev.yml
  53. +39
    -17
      dubhe-server/dubhe-task/src/main/resources/config/application.yml
  54. +1
    -1
      dubhe-server/sql/v1/09-Dubhe-Patch.sql

+ 2
- 2
dubhe-server/common/src/main/java/org/dubhe/aspect/LogAspect.java View File

@@ -75,11 +75,11 @@ public class LogAspect {
private Object combineLogInfo(JoinPoint joinPoint) throws Throwable {
Object[] param = joinPoint.getArgs();
LogUtil.info(LogEnum.REST_REQ, "uri:{},input:{},==>begin", joinPoint.getSignature(), param);
LogUtil.info(LogEnum.LOG_ASPECT, "uri:{},input:{},==>begin", joinPoint.getSignature(), param);
long start = System.currentTimeMillis();
Object result = ((ProceedingJoinPoint) joinPoint).proceed();
long end = System.currentTimeMillis();
LogUtil.info(LogEnum.REST_REQ, "uri:{},output:{},proc_time:{},<==end", joinPoint.getSignature().toString(),
LogUtil.info(LogEnum.LOG_ASPECT, "uri:{},output:{},proc_time:{},<==end", joinPoint.getSignature().toString(),
result, end - start);
return result;
}


+ 4
- 2
dubhe-server/common/src/main/java/org/dubhe/enums/LogEnum.java View File

@@ -59,8 +59,10 @@ public enum LogEnum {
//DATA_SEQUENCE
DATA_SEQUENCE,
//IO UTIL
IO_UTIL;

IO_UTIL,
// 日志切面
LOG_ASPECT
;
/**
* 判断日志类型不能为空
*


+ 5
- 0
dubhe-server/common/src/main/java/org/dubhe/filter/BaseLogFilter.java View File

@@ -55,6 +55,11 @@ public class BaseLogFilter extends AbstractMatcherFilter<ILoggingEvent> {
return onMismatch;
}

/**
* 检测日志级别
* @param iLoggingEvent 日志事件
* @return true 过滤当前级别 false 不过滤当前级别
*/
protected boolean checkLevel(ILoggingEvent iLoggingEvent) {
return this.level != null
&& iLoggingEvent.getLevel() != null


+ 1
- 1
dubhe-server/common/src/main/java/org/dubhe/utils/JwtUtils.java View File

@@ -41,7 +41,7 @@ import java.util.Objects;
import java.util.concurrent.TimeUnit;

/**
* @description JWT
* @description JWT
* @date 2020-03-14
*/
@Component


+ 0
- 28
dubhe-server/dubhe-admin/src/main/java/org/dubhe/dao/PtDatasetMapper.java View File

@@ -1,28 +0,0 @@
/**
* Copyright 2020 Zhejiang Lab. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================
*/

package org.dubhe.dao;

import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import org.dubhe.domain.PtDataset;

/**
* @description 数据集
* @date 2020-03-30
*/
public interface PtDatasetMapper extends BaseMapper<PtDataset> {
}

+ 0
- 29
dubhe-server/dubhe-admin/src/main/java/org/dubhe/dao/PtDevEnvsMapper.java View File

@@ -1,29 +0,0 @@
/**
* Copyright 2020 Zhejiang Lab. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================
*/

package org.dubhe.dao;

import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import org.dubhe.domain.PtDevEnvs;

/**
* @description 开发环境
* @date 2020-03-30
*/
public interface PtDevEnvsMapper extends BaseMapper<PtDevEnvs> {

}

+ 0
- 28
dubhe-server/dubhe-admin/src/main/java/org/dubhe/dao/PtStorageMapper.java View File

@@ -1,28 +0,0 @@
/**
* Copyright 2020 Zhejiang Lab. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================
*/

package org.dubhe.dao;

import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import org.dubhe.domain.PtStorage;

/**
* @description 存储管理
* @date 2020-03-30
*/
public interface PtStorageMapper extends BaseMapper<PtStorage> {
}

+ 0
- 71
dubhe-server/dubhe-admin/src/main/java/org/dubhe/domain/PtDataset.java View File

@@ -1,71 +0,0 @@
/**
* Copyright 2020 Zhejiang Lab. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================
*/

package org.dubhe.domain;

import cn.hutool.core.bean.BeanUtil;
import cn.hutool.core.bean.copier.CopyOptions;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import lombok.Data;
import org.dubhe.base.BaseEntity;
import org.dubhe.domain.entity.Team;
import org.dubhe.domain.entity.User;

import javax.validation.constraints.NotBlank;

/**
* @description 数据集
* @date 2020-03-17
*/
@Data
@TableName("pt_dataset")
public class PtDataset extends BaseEntity {

@TableId(value = "id", type = IdType.AUTO)
private Long id;

@TableField(value = "name")
@NotBlank
private String name;

@TableField(value = "remark")
private String remark;

@TableField(value = "type")
@NotBlank
private String type;
/**
* 团队
*/
@TableField(exist = false)
private Team team;
/**
* 创建用户
*/
@TableField(exist = false)
private User createUser;

public void copy(PtDataset source) {
BeanUtil.copyProperties(source, this, CopyOptions.create().setIgnoreNullValue(true));
}

public @interface Update {
}
}

+ 0
- 114
dubhe-server/dubhe-admin/src/main/java/org/dubhe/domain/PtDevEnvs.java View File

@@ -1,114 +0,0 @@
/**
* Copyright 2020 Zhejiang Lab. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================
*/

package org.dubhe.domain;

import cn.hutool.core.bean.BeanUtil;
import cn.hutool.core.bean.copier.CopyOptions;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import lombok.Data;
import org.dubhe.base.BaseEntity;
import org.dubhe.domain.entity.PtImage;
import org.dubhe.domain.entity.Team;
import org.dubhe.domain.entity.User;

import javax.validation.constraints.NotBlank;
import java.sql.Timestamp;

/**
* @description 开发环境
* @date 2020-03-17
*/

@Data
@TableName("pt_dev_envs")
public class PtDevEnvs extends BaseEntity {
@TableId(value = "id", type = IdType.AUTO)
private Long id;

@NotBlank
private String name;

@TableField(value = "remark")
private String remark;

@TableField(value = "type")
@NotBlank
private String type;

@TableField(value = "pod_num")
private Integer podNum;

@TableField(value = "gpu_num")
private Integer gpuNum;

@TableField(value = "mem_num")
private Integer memNum;

@TableField(value = "cpu_num")
private Integer cpuNum;

@TableField(value = "duration")
private Integer duration;

@TableField(value = "start_time")
private Timestamp startTime;

@TableField(value = "close_time")
private Timestamp closeTime;

/**
* 数据集
*/
@TableField(exist = false)
private PtDataset dataset;

/**
* 镜像
*/
@TableField(exist = false)
private PtImage image;

/**
* 存储
*/
@TableField(exist = false)
private PtStorage storage;

/**
* 团队
*/
@TableField(exist = false)
private Team team;

/**
* 创建用户
*/
@TableField(exist = false)
private User createUser;


public void copy(PtDevEnvs source) {
BeanUtil.copyProperties(source, this, CopyOptions.create().setIgnoreNullValue(true));
}

public @interface Update {
}
}

+ 0
- 69
dubhe-server/dubhe-admin/src/main/java/org/dubhe/domain/PtStorage.java View File

@@ -1,69 +0,0 @@
/**
* Copyright 2020 Zhejiang Lab. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================
*/

package org.dubhe.domain;

import cn.hutool.core.bean.BeanUtil;
import cn.hutool.core.bean.copier.CopyOptions;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import lombok.Data;
import org.dubhe.base.BaseEntity;
import org.dubhe.domain.entity.Team;
import org.dubhe.domain.entity.User;

import javax.validation.constraints.NotBlank;
import javax.validation.constraints.NotNull;

/**
* @description 存储
* @date 2020-03-17
*/
@Data
@TableName("pt_storage")
public class PtStorage extends BaseEntity {
@TableId(value = "id", type = IdType.AUTO)
@NotNull(groups = {Update.class})
private Long id;

@TableField(value = "name")
@NotBlank
private String name;

@TableField(value = "size")
private Integer size;

@TableField(value = "storageclass")
@NotBlank
private String storageclass;

@TableField(exist = false)
private Team team;

@TableField(exist = false)
private User createUser;


public void copy(PtStorage source) {
BeanUtil.copyProperties(source, this, CopyOptions.create().setIgnoreNullValue(true));
}

public @interface Update {
}
}

+ 0
- 28
dubhe-server/dubhe-admin/src/main/java/org/dubhe/domain/dto/PtDatasetQueryCriteria.java View File

@@ -1,28 +0,0 @@
/**
* Copyright 2020 Zhejiang Lab. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================
*/

package org.dubhe.domain.dto;

import lombok.Data;

/**
* @description 数据集查询条件
* @date 2020-03-17
*/
@Data
public class PtDatasetQueryCriteria {
}

+ 0
- 41
dubhe-server/dubhe-admin/src/main/java/org/dubhe/domain/dto/PtStorageDTO.java View File

@@ -1,41 +0,0 @@
/**
* Copyright 2020 Zhejiang Lab. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================
*/

package org.dubhe.domain.dto;

import lombok.Data;

import java.io.Serializable;
import java.sql.Timestamp;

/**
* @description 存储
* @date 2020-03-17
*/
@Data
public class PtStorageDTO implements Serializable {

private Long id;
private String name;
private Integer size;
private String storageclass;
private TeamSmallDTO team;
private UserSmallDTO createUser;
private Timestamp createTime;
private Timestamp updateTime;
private Boolean deleted;
}

+ 0
- 28
dubhe-server/dubhe-admin/src/main/java/org/dubhe/domain/dto/PtStorageQueryCriteria.java View File

@@ -1,28 +0,0 @@
/**
* Copyright 2020 Zhejiang Lab. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================
*/

package org.dubhe.domain.dto;

import lombok.Data;

/**
* @description 存储
* @date 2020-03-17
*/
@Data
public class PtStorageQueryCriteria {
}

+ 1
- 2
dubhe-server/dubhe-admin/src/main/java/org/dubhe/domain/dto/PtTrainJobCreateDTO.java View File

@@ -103,8 +103,7 @@ public class PtTrainJobCreateDTO extends BaseImageDTO {
private Integer trainType;

@ApiModelProperty(value = "节点个数", required = true)
@Min(value = TrainUtil.NUMBER_ONE, message = "节点个数在1~8之间")
@Max(value = TrainUtil.NUMBER_EIGHT, message = "节点个数在1~8之间")
@Min(value = TrainUtil.NUMBER_ONE, message = "节点个数至少1个")
@NotNull(message = "节点个数")
private Integer resourcesPoolNode;



+ 0
- 89
dubhe-server/dubhe-admin/src/main/java/org/dubhe/rest/PtDatasetController.java View File

@@ -1,89 +0,0 @@
/**
* Copyright 2020 Zhejiang Lab. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================
*/

package org.dubhe.rest;


import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.dubhe.base.DataResponseBody;
import org.dubhe.domain.PtDataset;
import org.dubhe.domain.dto.PtDatasetQueryCriteria;
import org.dubhe.service.PtDatasetService;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.*;
import springfox.documentation.annotations.ApiIgnore;

import javax.servlet.http.HttpServletResponse;
import java.io.IOException;

/**
* @description dataset管理
* @date 2020-03-17
*/
@Api(tags = "dataset管理")
@ApiIgnore
@RestController
@RequestMapping("/api/{version}/pt_dataset")
public class PtDatasetController {

private final PtDatasetService ptDatasetService;

public PtDatasetController(PtDatasetService ptDatasetService) {
this.ptDatasetService = ptDatasetService;
}


@ApiOperation("导出数据")
@GetMapping(value = "/download")
@PreAuthorize("@el.check('ptDataset:list')")
public void download(HttpServletResponse response, PtDatasetQueryCriteria criteria) throws IOException {
ptDatasetService.download(ptDatasetService.queryAll(criteria), response);
}

@GetMapping
@ApiOperation("查询dataset")
@PreAuthorize("@el.check('ptDataset:list')")
public DataResponseBody getPtDatasets(PtDatasetQueryCriteria criteria, Page page) {
return new DataResponseBody(ptDatasetService.queryAll(criteria, page));
}

@PostMapping
@ApiOperation("新增dataset")
@PreAuthorize("@el.check('ptDataset:add')")
public DataResponseBody create(@Validated @RequestBody PtDataset resources) {
return new DataResponseBody(ptDatasetService.create(resources));
}

@PutMapping
@ApiOperation("修改dataset")
@PreAuthorize("@el.check('ptDataset:edit')")
public DataResponseBody update(@Validated @RequestBody PtDataset resources) {
ptDatasetService.update(resources);
return new DataResponseBody();
}

@ApiOperation("删除dataset")
@PreAuthorize("@el.check('ptDataset:del')")
@DeleteMapping
public DataResponseBody deleteAll(@RequestBody Long[] ids) {
ptDatasetService.deleteAll(ids);
return new DataResponseBody();
}
}

+ 0
- 88
dubhe-server/dubhe-admin/src/main/java/org/dubhe/rest/PtDevEnvsController.java View File

@@ -1,88 +0,0 @@
/**
* Copyright 2020 Zhejiang Lab. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================
*/

package org.dubhe.rest;


import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.dubhe.base.DataResponseBody;
import org.dubhe.domain.PtDevEnvs;
import org.dubhe.domain.dto.PtDevEnvsQueryCriteria;
import org.dubhe.service.PtDevEnvsService;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.*;
import springfox.documentation.annotations.ApiIgnore;

import javax.servlet.http.HttpServletResponse;
import java.io.IOException;

/**
* @description devEnvs管理
* @date 2020-03-17
*/
@Api(tags = "devEnvs管理")
@ApiIgnore
@RestController
@RequestMapping("/api/{version}/pt_ev_envs")
public class PtDevEnvsController {

private final PtDevEnvsService ptDevEnvsService;

public PtDevEnvsController(PtDevEnvsService ptDevEnvsService) {
this.ptDevEnvsService = ptDevEnvsService;
}

@ApiOperation("导出数据")
@GetMapping(value = "/download")
@PreAuthorize("@el.check('ptDevEnvs:list')")
public void download(HttpServletResponse response, PtDevEnvsQueryCriteria criteria) throws IOException {
ptDevEnvsService.download(ptDevEnvsService.queryAll(criteria), response);
}

@GetMapping
@ApiOperation("查询devEnvs")
@PreAuthorize("@el.check('ptDevEnvs:list')")
public DataResponseBody getPtDevEnvss(PtDevEnvsQueryCriteria criteria, Page page) {
return new DataResponseBody(ptDevEnvsService.queryAll(criteria, page));
}

@PostMapping
@ApiOperation("新增devEnvs")
@PreAuthorize("@el.check('ptDevEnvs:add')")
public DataResponseBody create(@Validated @RequestBody PtDevEnvs resources) {
return new DataResponseBody(ptDevEnvsService.create(resources));
}

@PutMapping
@ApiOperation("修改devEnvs")
@PreAuthorize("@el.check('ptDevEnvs:edit')")
public DataResponseBody update(@Validated @RequestBody PtDevEnvs resources) {
ptDevEnvsService.update(resources);
return new DataResponseBody();
}

@ApiOperation("删除devEnvs")
@PreAuthorize("@el.check('ptDevEnvs:del')")
@DeleteMapping
public DataResponseBody deleteAll(@RequestBody Long[] ids) {
ptDevEnvsService.deleteAll(ids);
return new DataResponseBody();
}
}

+ 0
- 87
dubhe-server/dubhe-admin/src/main/java/org/dubhe/rest/PtStorageController.java View File

@@ -1,87 +0,0 @@
/**
* Copyright 2020 Zhejiang Lab. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================
*/

package org.dubhe.rest;

import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.dubhe.base.DataResponseBody;
import org.dubhe.domain.PtStorage;
import org.dubhe.domain.dto.PtStorageQueryCriteria;
import org.dubhe.service.PtStorageService;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.*;
import springfox.documentation.annotations.ApiIgnore;

import javax.servlet.http.HttpServletResponse;
import java.io.IOException;

/**
* @description storage管理
* @date 2020-03-17
*/
@Api(tags = "storage管理")
@ApiIgnore
@RestController
@RequestMapping("/api/{version}/pt_storage")
public class PtStorageController {

private final PtStorageService ptStorageService;

public PtStorageController(PtStorageService ptStorageService) {
this.ptStorageService = ptStorageService;
}

@ApiOperation("导出数据")
@GetMapping(value = "/download")
@PreAuthorize("@el.check('ptStorage:list')")
public void download(HttpServletResponse response, PtStorageQueryCriteria criteria) throws IOException {
ptStorageService.download(ptStorageService.queryAll(criteria), response);
}

@GetMapping
@ApiOperation("查询storage")
@PreAuthorize("@el.check('ptStorage:list')")
public DataResponseBody getPtStorages(PtStorageQueryCriteria criteria, Page page) {
return new DataResponseBody(ptStorageService.queryAll(criteria, page));
}

@PostMapping
@ApiOperation("新增storage")
@PreAuthorize("@el.check('ptStorage:add')")
public DataResponseBody create(@Validated @RequestBody PtStorage resources) {
return new DataResponseBody(ptStorageService.create(resources));
}

@PutMapping
@ApiOperation("修改storage")
@PreAuthorize("@el.check('ptStorage:edit')")
public DataResponseBody update(@Validated @RequestBody PtStorage resources) {
ptStorageService.update(resources);
return new DataResponseBody();
}

@ApiOperation("删除storage")
@PreAuthorize("@el.check('ptStorage:del')")
@DeleteMapping
public DataResponseBody deleteAll(@RequestBody Long[] ids) {
ptStorageService.deleteAll(ids);
return new DataResponseBody();
}
}

+ 7
- 0
dubhe-server/dubhe-admin/src/main/java/org/dubhe/rest/PtTrainJobController.java View File

@@ -25,6 +25,8 @@ import org.dubhe.annotation.ApiVersion;
import org.dubhe.base.DataResponseBody;
import org.dubhe.constant.Permissions;
import org.dubhe.domain.dto.*;
import org.dubhe.enums.TrainTypeEnum;
import org.dubhe.factory.DataResponseFactory;
import org.dubhe.service.PtTrainJobService;
import org.dubhe.service.PtTrainJobSpecsService;
import org.springframework.beans.factory.annotation.Autowired;
@@ -94,6 +96,11 @@ public class PtTrainJobController {
@ApiOperation("创建训练任务")
@RequiresPermissions(Permissions.TRAINING_JOB)
public DataResponseBody createTrainJob(@Validated @RequestBody PtTrainJobCreateDTO ptTrainJobCreateDTO) {
if (TrainTypeEnum.isDistributeTrain(ptTrainJobCreateDTO.getTrainType())
&& ptTrainJobCreateDTO.getResourcesPoolNode() < 2) {
// 分布式训练节点数校验补偿
return DataResponseFactory.failed("分布式训练节点个数至少2个");
}
return new DataResponseBody(ptTrainJobService.createTrainJobVersion(ptTrainJobCreateDTO));
}



+ 0
- 91
dubhe-server/dubhe-admin/src/main/java/org/dubhe/service/PtDatasetService.java View File

@@ -1,91 +0,0 @@
/**
* Copyright 2020 Zhejiang Lab. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================
*/

package org.dubhe.service;

import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import org.dubhe.domain.PtDataset;
import org.dubhe.domain.dto.PtDatasetDTO;
import org.dubhe.domain.dto.PtDatasetQueryCriteria;

import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.util.List;
import java.util.Map;

/**
* @description 查询数据
* @date 2020-03-17
*/
public interface PtDatasetService {

/**
* 查询数据分页
*
* @param criteria 条件
* @param page 分页参数
* @return Map<String, Object>
*/
Map<String, Object> queryAll(PtDatasetQueryCriteria criteria, Page page);

/**
* 查询所有数据不分页
*
* @param criteria 条件参数
* @return List<PtDatasetDto>
*/
List<PtDatasetDTO> queryAll(PtDatasetQueryCriteria criteria);

/**
* 根据ID查询
*
* @param id ID
* @return PtDatasetDto
*/
PtDatasetDTO findById(Long id);

/**
* 创建
*
* @param resources /
* @return PtDatasetDto
*/
PtDatasetDTO create(PtDataset resources);

/**
* 编辑
*
* @param resources /
*/
void update(PtDataset resources);

/**
* 多选删除
*
* @param ids /
*/
void deleteAll(Long[] ids);

/**
* 导出数据
*
* @param all 待导出的数据
* @param response /
* @throws IOException /
*/
void download(List<PtDatasetDTO> all, HttpServletResponse response) throws IOException;
}

+ 0
- 91
dubhe-server/dubhe-admin/src/main/java/org/dubhe/service/PtDevEnvsService.java View File

@@ -1,91 +0,0 @@
/**
* Copyright 2020 Zhejiang Lab. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================
*/

package org.dubhe.service;

import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import org.dubhe.domain.PtDevEnvs;
import org.dubhe.domain.dto.PtDevEnvsDTO;
import org.dubhe.domain.dto.PtDevEnvsQueryCriteria;

import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.util.List;
import java.util.Map;

/**
* @description 查询数据
* @date 2020-03-17
*/
public interface PtDevEnvsService {

/**
* 查询数据分页
*
* @param criteria 条件
* @param page 分页参数
* @return Map<String, Object>
*/
Map<String, Object> queryAll(PtDevEnvsQueryCriteria criteria, Page page);

/**
* 查询所有数据不分页
*
* @param criteria 条件参数
* @return List<PtDevEnvsDto>
*/
List<PtDevEnvsDTO> queryAll(PtDevEnvsQueryCriteria criteria);

/**
* 根据ID查询
*
* @param id ID
* @return PtDevEnvsDto
*/
PtDevEnvsDTO findById(Long id);

/**
* 创建
*
* @param resources /
* @return PtDevEnvsDto
*/
PtDevEnvsDTO create(PtDevEnvs resources);

/**
* 编辑
*
* @param resources /
*/
void update(PtDevEnvs resources);

/**
* 多选删除
*
* @param ids /
*/
void deleteAll(Long[] ids);

/**
* 导出数据
*
* @param all 待导出的数据
* @param response /
* @throws IOException /
*/
void download(List<PtDevEnvsDTO> all, HttpServletResponse response) throws IOException;
}

+ 0
- 92
dubhe-server/dubhe-admin/src/main/java/org/dubhe/service/PtStorageService.java View File

@@ -1,92 +0,0 @@
/**
* Copyright 2020 Zhejiang Lab. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================
*/

package org.dubhe.service;


import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import org.dubhe.domain.PtStorage;
import org.dubhe.domain.dto.PtStorageDTO;
import org.dubhe.domain.dto.PtStorageQueryCriteria;

import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.util.List;
import java.util.Map;

/**
* @description 查询数据
* @date 2020-03-17
*/
public interface PtStorageService {

/**
* 查询数据分页
*
* @param criteria 条件
* @param page 分页参数
* @return Map<String, Object>
*/
Map<String, Object> queryAll(PtStorageQueryCriteria criteria, Page page);

/**
* 查询所有数据不分页
*
* @param criteria 条件参数
* @return List<PtStorageDto>
*/
List<PtStorageDTO> queryAll(PtStorageQueryCriteria criteria);

/**
* 根据ID查询
*
* @param id ID
* @return PtStorageDto
*/
PtStorageDTO findById(Long id);

/**
* 创建
*
* @param resources /
* @return PtStorageDto
*/
PtStorageDTO create(PtStorage resources);

/**
* 编辑
*
* @param resources /
*/
void update(PtStorage resources);

/**
* 多选删除
*
* @param ids /
*/
void deleteAll(Long[] ids);

/**
* 导出数据
*
* @param all 待导出的数据
* @param response /
* @throws IOException /
*/
void download(List<PtStorageDTO> all, HttpServletResponse response) throws IOException;
}

+ 2
- 2
dubhe-server/dubhe-admin/src/main/java/org/dubhe/service/abstracts/AbstractPodCallback.java View File

@@ -49,7 +49,7 @@ public abstract class AbstractPodCallback implements PodCallbackAsyncService {
Thread.sleep(tryTime * 1000);
continue;
} catch (InterruptedException e) {
LogUtil.error(LogEnum.NOTE_BOOK,"AbstractPodCallback podCallBack InterruptedException", e);
LogUtil.error(LogEnum.NOTE_BOOK,"AbstractPodCallback podCallBack InterruptedException : {}", e);
// Restore interrupted state...      
Thread.currentThread().interrupt();
}
@@ -61,7 +61,7 @@ public abstract class AbstractPodCallback implements PodCallbackAsyncService {
}
}

/**
/**
* pod 异步回调具体实现处理类
* @param times 第n次处理
* @param k8sPodCallbackCreateDTO k8s回调实体类


+ 0
- 33
dubhe-server/dubhe-admin/src/main/java/org/dubhe/service/convert/PtDatasetConvert.java View File

@@ -1,33 +0,0 @@
/**
* Copyright 2020 Zhejiang Lab. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================
*/

package org.dubhe.service.convert;

import org.dubhe.base.BaseConvert;
import org.dubhe.domain.PtDataset;
import org.dubhe.domain.dto.PtDatasetDTO;
import org.mapstruct.Mapper;
import org.mapstruct.ReportingPolicy;

/**
* @description 数据集
* @date 2020-03-17
*/
@Mapper(componentModel = "spring", unmappedTargetPolicy = ReportingPolicy.IGNORE)
public interface PtDatasetConvert extends BaseConvert<PtDatasetDTO, PtDataset> {

}

+ 0
- 34
dubhe-server/dubhe-admin/src/main/java/org/dubhe/service/convert/PtDevEnvsConvert.java View File

@@ -1,34 +0,0 @@
/**
* Copyright 2020 Zhejiang Lab. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================
*/

package org.dubhe.service.convert;


import org.dubhe.base.BaseConvert;
import org.dubhe.domain.PtDevEnvs;
import org.dubhe.domain.dto.PtDevEnvsDTO;
import org.mapstruct.Mapper;
import org.mapstruct.ReportingPolicy;

/**
* @description 开发环境
* @date 2020-03-17
*/
@Mapper(componentModel = "spring", unmappedTargetPolicy = ReportingPolicy.IGNORE)
public interface PtDevEnvsConvert extends BaseConvert<PtDevEnvsDTO, PtDevEnvs> {

}

+ 0
- 34
dubhe-server/dubhe-admin/src/main/java/org/dubhe/service/convert/PtImageConvert.java View File

@@ -1,34 +0,0 @@
/**
* Copyright 2020 Zhejiang Lab. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================
*/

package org.dubhe.service.convert;


import org.dubhe.base.BaseConvert;
import org.dubhe.domain.entity.PtImage;
import org.dubhe.domain.dto.PtImageDTO;
import org.mapstruct.Mapper;
import org.mapstruct.ReportingPolicy;

/**
* @description 镜像
* @date 2020-03-17
*/
@Mapper(componentModel = "spring", unmappedTargetPolicy = ReportingPolicy.IGNORE)
public interface PtImageConvert extends BaseConvert<PtImageDTO, PtImage> {

}

+ 0
- 33
dubhe-server/dubhe-admin/src/main/java/org/dubhe/service/convert/PtStorageConvert.java View File

@@ -1,33 +0,0 @@
/**
* Copyright 2020 Zhejiang Lab. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================
*/

package org.dubhe.service.convert;


import org.dubhe.base.BaseConvert;
import org.dubhe.domain.PtStorage;
import org.dubhe.domain.dto.PtStorageDTO;
import org.mapstruct.Mapper;
import org.mapstruct.ReportingPolicy;

/**
* @description 存储类转化
* @date 2020-03-17
*/
@Mapper(componentModel = "spring", unmappedTargetPolicy = ReportingPolicy.IGNORE)
public interface PtStorageConvert extends BaseConvert<PtStorageDTO, PtStorage> {
}

+ 0
- 34
dubhe-server/dubhe-admin/src/main/java/org/dubhe/service/convert/PtTrainJobConvert.java View File

@@ -1,34 +0,0 @@
/**
* Copyright 2020 Zhejiang Lab. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================
*/

package org.dubhe.service.convert;


import org.dubhe.base.BaseConvert;
import org.dubhe.domain.entity.PtTrainJob;
import org.dubhe.domain.dto.PtTrainJobDTO;
import org.mapstruct.Mapper;
import org.mapstruct.ReportingPolicy;

/**
* @description 训练任务转化
* @date 2020-03-17
*/
@Mapper(componentModel = "spring", unmappedTargetPolicy = ReportingPolicy.IGNORE)
public interface PtTrainJobConvert extends BaseConvert<PtTrainJobDTO, PtTrainJob> {

}

+ 1
- 1
dubhe-server/dubhe-admin/src/main/java/org/dubhe/service/impl/NoteBookAsyncServiceImpl.java View File

@@ -65,7 +65,7 @@ public class NoteBookAsyncServiceImpl extends AbstractPodCallback implements Pod
noteBookService.updateById(notebook);
return true;
} catch (Exception e) {
LogUtil.error(LogEnum.NOTE_BOOK, "NoteBook doCallback error!", e);
LogUtil.error(LogEnum.NOTE_BOOK, "NoteBook doCallback error!{}", e);
return false;
}
}


+ 5
- 5
dubhe-server/dubhe-admin/src/main/java/org/dubhe/service/impl/NoteBookServiceImpl.java View File

@@ -222,7 +222,7 @@ public class NoteBookServiceImpl implements NoteBookService {
return (HttpUtils.isSuccess(result.getCode())
|| K8sResponseEnum.EXISTS.getCode().equals(result.getCode()));
} catch (Exception e) {
LogUtil.error(LogEnum.NOTE_BOOK, "createNoteBook调用jupyterResourceApi.createWithPvc异常!", e);
LogUtil.error(LogEnum.NOTE_BOOK, "createNoteBook调用jupyterResourceApi.createWithPvc异常!{}", e);
noteBook.setK8sStatusCode(BLANK);
noteBook.setK8sStatusInfo(NotebookUtil.getK8sStatusInfo(e));
return false;
@@ -305,7 +305,7 @@ public class NoteBookServiceImpl implements NoteBookService {
returnStr = "删除失败";
}
} catch (Exception e) {
LogUtil.error(LogEnum.NOTE_BOOK, "deleteNoteBook调用jupyterResourceApi.delete异常!", e);
LogUtil.error(LogEnum.NOTE_BOOK, "deleteNoteBook调用jupyterResourceApi.delete异常!{}", e);
noteBook.setK8sStatusCode(BLANK);
noteBook.setK8sStatusInfo(NotebookUtil.getK8sStatusInfo(e));
returnStr = "删除失败";
@@ -392,7 +392,7 @@ public class NoteBookServiceImpl implements NoteBookService {
noteBook.setK8sStatusInfo(NotebookUtil.getK8sStatusInfo(result));
return HttpUtils.isSuccess(result.getCode());
} catch (Exception e) {
LogUtil.error(LogEnum.NOTE_BOOK, "notebook调用jupyterResourceApi.createWithPvc异常!", e);
LogUtil.error(LogEnum.NOTE_BOOK, "notebook调用jupyterResourceApi.createWithPvc异常!{}", e);
noteBook.setK8sStatusCode(BLANK);
noteBook.setK8sStatusInfo(NotebookUtil.getK8sStatusInfo(e));
return false;
@@ -447,7 +447,7 @@ public class NoteBookServiceImpl implements NoteBookService {
returnStr = "停止" + NotebookUtil.FAILED;
}
} catch (Exception e) {
LogUtil.error(LogEnum.NOTE_BOOK, "停止notebook调用jupyterResourceApi.delete异常!", e);
LogUtil.error(LogEnum.NOTE_BOOK, "停止notebook调用jupyterResourceApi.delete异常!{}", e);
noteBook.setK8sStatusCode(BLANK);
noteBook.setK8sStatusInfo(NotebookUtil.getK8sStatusInfo(e));
returnStr = "停止" + NotebookUtil.FAILED;
@@ -527,7 +527,7 @@ public class NoteBookServiceImpl implements NoteBookService {
}
return NoteBookStatusEnum.convert(result.getPhase());
} catch (Exception e) {
LogUtil.error(LogEnum.NOTE_BOOK, "notebook nameSpace:{} resourceName:{} 查询异常!", noteBook.getK8sNamespace(), noteBook.getK8sResourceName(), e);
LogUtil.error(LogEnum.NOTE_BOOK, "notebook nameSpace:{} resourceName:{} 查询异常!{}", noteBook.getK8sNamespace(), noteBook.getK8sResourceName(), e);
noteBook.setK8sStatusCode(BLANK);
noteBook.setK8sStatusInfo(NotebookUtil.getK8sStatusInfo(e));
return null;


+ 0
- 121
dubhe-server/dubhe-admin/src/main/java/org/dubhe/service/impl/PtDatasetServiceImpl.java View File

@@ -1,121 +0,0 @@
/**
* Copyright 2020 Zhejiang Lab. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================
*/

package org.dubhe.service.impl;

import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import org.dubhe.dao.PtDatasetMapper;
import org.dubhe.domain.PtDataset;
import org.dubhe.domain.dto.PtDatasetDTO;
import org.dubhe.domain.dto.PtDatasetQueryCriteria;
import org.dubhe.service.PtDatasetService;
import org.dubhe.service.convert.PtDatasetConvert;
import org.dubhe.utils.FileUtil;
import org.dubhe.utils.PageUtil;
import org.dubhe.utils.WrapperHelp;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.cache.annotation.CacheConfig;
import org.springframework.cache.annotation.CacheEvict;
import org.springframework.cache.annotation.Cacheable;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;

import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;

/**
* @description 数据集管理
* @date 2020-03-17
*/
@Service
@CacheConfig(cacheNames = "ptDataset")
@Transactional(propagation = Propagation.SUPPORTS, readOnly = true, rollbackFor = Exception.class)
public class PtDatasetServiceImpl implements PtDatasetService {

@Autowired
private PtDatasetMapper ptDatasetMapper;
@Autowired
private PtDatasetConvert ptDatasetConvert;


@Override
@Cacheable
public Map<String, Object> queryAll(PtDatasetQueryCriteria criteria, Page page) {
IPage<PtDataset> ptDatasets = ptDatasetMapper.selectPage(page, WrapperHelp.getWrapper(criteria));
return PageUtil.toPage(ptDatasets, ptDatasetConvert::toDto);
}

@Override
@Cacheable
public List<PtDatasetDTO> queryAll(PtDatasetQueryCriteria criteria) {
return ptDatasetConvert.toDto(ptDatasetMapper.selectList(WrapperHelp.getWrapper(criteria)));
}

@Override
@Cacheable(key = "#p0")
public PtDatasetDTO findById(Long id) {
PtDataset ptDataset = ptDatasetMapper.selectById(id);
return ptDatasetConvert.toDto(ptDataset);
}

@Override
@CacheEvict(allEntries = true)
@Transactional(rollbackFor = Exception.class)
public PtDatasetDTO create(PtDataset resources) {
ptDatasetMapper.insert(resources);
return ptDatasetConvert.toDto(resources);
}

@Override
@CacheEvict(allEntries = true)
@Transactional(rollbackFor = Exception.class)
public void update(PtDataset resources) {
PtDataset ptDataset = ptDatasetMapper.selectById(resources.getId());
ptDataset.copy(resources);
ptDatasetMapper.updateById(ptDataset);
}

@Override
@CacheEvict(allEntries = true)
public void deleteAll(Long[] ids) {
for (Long id : ids) {
ptDatasetMapper.deleteById(id);
}
}

@Override
public void download(List<PtDatasetDTO> all, HttpServletResponse response) throws IOException {
List<Map<String, Object>> list = new ArrayList<>();
for (PtDatasetDTO ptDataset : all) {
Map<String, Object> map = new LinkedHashMap<>();
map.put(" name", ptDataset.getName());
map.put(" remark", ptDataset.getRemark());
map.put(" type", ptDataset.getType());
map.put(" team", ptDataset.getTeam());
map.put(" createUser", ptDataset.getCreateUser());
map.put(" createTime", ptDataset.getCreateTime());
list.add(map);
}
FileUtil.downloadExcel(list, response);
}
}

+ 0
- 123
dubhe-server/dubhe-admin/src/main/java/org/dubhe/service/impl/PtDevEnvsServiceImpl.java View File

@@ -1,123 +0,0 @@
/**
* Copyright 2020 Zhejiang Lab. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================
*/

package org.dubhe.service.impl;

import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import org.dubhe.dao.PtDevEnvsMapper;
import org.dubhe.domain.PtDevEnvs;
import org.dubhe.domain.dto.PtDevEnvsDTO;
import org.dubhe.domain.dto.PtDevEnvsQueryCriteria;
import org.dubhe.service.PtDevEnvsService;
import org.dubhe.service.convert.PtDevEnvsConvert;
import org.dubhe.utils.FileUtil;
import org.dubhe.utils.PageUtil;
import org.dubhe.utils.WrapperHelp;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.cache.annotation.CacheConfig;
import org.springframework.cache.annotation.CacheEvict;
import org.springframework.cache.annotation.Cacheable;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;

import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;

/**
* @description 开发环境
* @date 2020-03-17
*/
@Service
@CacheConfig(cacheNames = "ptDevEnvs")
@Transactional(propagation = Propagation.SUPPORTS, readOnly = true, rollbackFor = Exception.class)
public class PtDevEnvsServiceImpl implements PtDevEnvsService {

@Autowired
private PtDevEnvsMapper ptDevEnvsMapper;
private PtDevEnvsConvert ptDevEnvsConvert;


@Override
@Cacheable
public Map<String, Object> queryAll(PtDevEnvsQueryCriteria criteria, Page page) {
IPage<PtDevEnvs> ptDevEnvss = ptDevEnvsMapper.selectPage(page, WrapperHelp.getWrapper(criteria));
return PageUtil.toPage(ptDevEnvss, ptDevEnvsConvert::toDto);
}

@Override
@Cacheable
public List<PtDevEnvsDTO> queryAll(PtDevEnvsQueryCriteria criteria) {
return ptDevEnvsConvert.toDto(ptDevEnvsMapper.selectList(WrapperHelp.getWrapper(criteria)));
}

@Override
@Cacheable(key = "#p0")
public PtDevEnvsDTO findById(Long id) {
PtDevEnvs ptDevEnvs = ptDevEnvsMapper.selectById(id);
return ptDevEnvsConvert.toDto(ptDevEnvs);
}

@Override
@CacheEvict(allEntries = true)
@Transactional(rollbackFor = Exception.class)
public PtDevEnvsDTO create(PtDevEnvs resources) {
ptDevEnvsMapper.insert(resources);
return ptDevEnvsConvert.toDto(resources);
}

@Override
@CacheEvict(allEntries = true)
@Transactional(rollbackFor = Exception.class)
public void update(PtDevEnvs resources) {
PtDevEnvs ptDevEnvs = ptDevEnvsMapper.selectById(resources.getId());
ptDevEnvs.copy(resources);
ptDevEnvsMapper.updateById(ptDevEnvs);
}

@Override
@CacheEvict(allEntries = true)
public void deleteAll(Long[] ids) {
for (Long id : ids) {
ptDevEnvsMapper.deleteById(id);
}
}

@Override
public void download(List<PtDevEnvsDTO> all, HttpServletResponse response) throws IOException {
List<Map<String, Object>> list = new ArrayList<>();
for (PtDevEnvsDTO ptDevEnvs : all) {
Map<String, Object> map = new LinkedHashMap<>();
map.put(" name", ptDevEnvs.getName());
map.put(" remark", ptDevEnvs.getRemark());
map.put(" podnum", ptDevEnvs.getPodNum());
map.put(" gpunum", ptDevEnvs.getGpuNum());
map.put(" memnum", ptDevEnvs.getMemNum());
map.put(" cpunum", ptDevEnvs.getCpuNum());
map.put(" duration", ptDevEnvs.getDuration());
map.put(" startTime", ptDevEnvs.getStartTime());
map.put(" closeTime", ptDevEnvs.getCloseTime());
list.add(map);
}
FileUtil.downloadExcel(list, response);
}
}

+ 1
- 1
dubhe-server/dubhe-admin/src/main/java/org/dubhe/service/impl/PtImageServiceImpl.java View File

@@ -121,7 +121,7 @@ public class PtImageServiceImpl implements PtImageService {
}
ptImages = ptImageMapper.selectPage(page, query);
} catch (Exception e) {
LogUtil.error(LogEnum.BIZ_TRAIN, "User {} query mirror list display exception :{}, request information :{}", e, ptImageQueryDTO);
LogUtil.error(LogEnum.BIZ_TRAIN, "User {} query mirror list display exception :{}, request information :{}",user.getId() ,e, ptImageQueryDTO);
throw new BusinessException("查询镜像列表展示异常");
}
List<PtImageQueryVO> ptImageQueryResult = ptImages.getRecords().stream().map(x -> {


+ 0
- 122
dubhe-server/dubhe-admin/src/main/java/org/dubhe/service/impl/PtStorageServiceImpl.java View File

@@ -1,122 +0,0 @@
/**
* Copyright 2020 Zhejiang Lab. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================
*/

package org.dubhe.service.impl;


import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import org.dubhe.dao.PtStorageMapper;
import org.dubhe.domain.PtStorage;
import org.dubhe.domain.dto.PtStorageDTO;
import org.dubhe.domain.dto.PtStorageQueryCriteria;
import org.dubhe.service.PtStorageService;
import org.dubhe.service.convert.PtStorageConvert;
import org.dubhe.utils.FileUtil;
import org.dubhe.utils.PageUtil;
import org.dubhe.utils.WrapperHelp;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.cache.annotation.CacheConfig;
import org.springframework.cache.annotation.CacheEvict;
import org.springframework.cache.annotation.Cacheable;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;

import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;

/**
* @description 存储服务类
* @date 2020-03-17
*/
@Service
@CacheConfig(cacheNames = "ptStorage")
@Transactional(propagation = Propagation.SUPPORTS, readOnly = true, rollbackFor = Exception.class)
public class PtStorageServiceImpl implements PtStorageService {

@Autowired
private PtStorageMapper ptStorageMapper;

@Autowired
private PtStorageConvert ptStorageConvert;


@Override
@Cacheable
public Map<String, Object> queryAll(PtStorageQueryCriteria criteria, Page page) {
IPage<PtStorage> ptStorages = ptStorageMapper.selectPage(page, WrapperHelp.getWrapper(criteria));
return PageUtil.toPage(ptStorages, ptStorageConvert::toDto);
}

@Override
@Cacheable
public List<PtStorageDTO> queryAll(PtStorageQueryCriteria criteria) {
return ptStorageConvert.toDto(ptStorageMapper.selectList(WrapperHelp.getWrapper(criteria)));
}

@Override
@Cacheable(key = "#p0")
public PtStorageDTO findById(Long id) {
PtStorage ptStorage = ptStorageMapper.selectById(id);
return ptStorageConvert.toDto(ptStorage);
}

@Override
@CacheEvict(allEntries = true)
@Transactional(rollbackFor = Exception.class)
public PtStorageDTO create(PtStorage resources) {
ptStorageMapper.insert(resources);
return ptStorageConvert.toDto(resources);
}

@Override
@CacheEvict(allEntries = true)
@Transactional(rollbackFor = Exception.class)
public void update(PtStorage resources) {
PtStorage ptStorage = ptStorageMapper.selectById(resources.getId());
ptStorage.copy(resources);
ptStorageMapper.updateById(ptStorage);
}

@Override
@CacheEvict(allEntries = true)
public void deleteAll(Long[] ids) {
for (Long id : ids) {
ptStorageMapper.deleteById(id);
}
}

@Override
public void download(List<PtStorageDTO> all, HttpServletResponse response) throws IOException {
List<Map<String, Object>> list = new ArrayList<>();
for (PtStorageDTO ptStorage : all) {
Map<String, Object> map = new LinkedHashMap<>();
map.put(" name", ptStorage.getName());
map.put(" size", ptStorage.getSize());
map.put(" storageclass", ptStorage.getStorageclass());
map.put(" createUser", ptStorage.getCreateUser());
map.put(" createTime", ptStorage.getCreateTime());
list.add(map);
}
FileUtil.downloadExcel(list, response);
}
}

+ 19
- 16
dubhe-server/dubhe-admin/src/main/resources/config/application-dev.yml View File

@@ -8,7 +8,7 @@ spring:
redis:
#数据库索引
database: 0
host:
host: 127.0.0.1
port: 6379
password:
#连接超时时间
@@ -18,8 +18,8 @@ spring:
db-type: com.alibaba.druid.pool.DruidDataSource
driverClassName: net.sf.log4jdbc.sql.jdbcapi.DriverSpy
url: jdbc:log4jdbc:mysql://127.0.0.1:3306/dubhe-dev?serverTimezone=Asia/Shanghai&characterEncoding=utf8&useSSL=false&allowMultiQueries=true&useInformationSchema=true
username:
password:
username: test
password: test

# 初始化配置
initial-size: 3
@@ -59,8 +59,8 @@ spring:
type: com.alibaba.druid.pool.DruidDataSource
driver-class-name: net.sf.log4jdbc.sql.jdbcapi.DriverSpy
url: jdbc:log4jdbc:mysql://127.0.0.1:3306/dubhe-dev?serverTimezone=Asia/Shanghai&characterEncoding=utf8&useSSL=false&allowMultiQueries=true&useInformationSchema=true
username:
password:
username: test
password: test

#指定data_dataset表 主键id 生成策略
sharding:
@@ -86,19 +86,19 @@ k8s:
# k8s集群配置文件
kubeconfig: kubeconfig
# nfs服务暴露的IP地址 如需测试需修改为合适的地址
nfs:
nfs: 127.0.0.1
#nfs服务端 共享目录
nfs-root-path: /nfs/
nfs-root-windows-path: "Z:"
# 命名空间关键字
namespace: namespace
# k8s ingress域名 如需测试需修改为合适的域名
host:
# k8s ingress-controller 对外port
port:
# elasticsearch暴露的服务地址
host: notebook.test.com
# k8s ingress-controller 对外port,获取方式:部署 ingress-controller 后,在k8s master节点执行 kubectl get svc -A | grep 'ingress-nginx-controller' 获取80对应的外部端口
port: 33334
# elasticsearch暴露的服务地址,获取方式 部署 管理集群日志 后,在k8s msater节点执行 kubectl get svc -A | grep 'elasticsearch' 获取9200对应的外部端口
elasticsearch:
hostlist: ${eshostlist::}
hostlist: ${eshostlist:127.0.0.1:33333}
# 日志采集配置信息
log:
type: _doc
@@ -120,9 +120,10 @@ k8s:
nfs-storage-class-name: zjlab-nfs-storage
#配置harbor
harbor:
address:
username:
password:
# habor服务域名,需要将用户持有域名解析到 harbor服务所在服务器ip
address: harbor.test.com
username: admin
password: Harbor12345
model-name: train
# data模块配置
data:
@@ -157,8 +158,10 @@ data:
# minio配置
minio:
url: http://127.0.0.1:9000/
accessKey:
secretKey:
# 部署 minio 时指定的 MINIO_ACCESS_KEY
accessKey: admin
# 部署 minio 时指定的 MINIO_SECRET_KEY
secretKey: 123@abc.com
bucketName: dubhe-dev
presignedUrlExpiryTime: 300
annotation: /annotation/


+ 14
- 12
dubhe-server/dubhe-admin/src/main/resources/config/application.yml View File

@@ -9,13 +9,13 @@ spring:
repositories:
enabled: false

# 邮箱配置
# 邮箱配置,用于发送用户注册邮箱验证码
mail:
host:
host: smtp.163.com
# 邮件的发送者 163邮箱(开发测试时使用 发送延时 20秒,每天发送量限制 50)
username:
username: test@163.com
# SMTP授权密码
password:
password: AAAAAAAAAAAAAAAA
protocol: smtp
properties.mail.smtp.auth: true
properties.mail.smtp.port: 465 #465或者994
@@ -32,10 +32,10 @@ loginCode:
height: 28
length: 4

#密码加密传输,前端公钥加密,后端私钥解密
#密码加密传输,前端公钥加密,后端私钥解密,与前端src/settings.js publicKey搭配使用;可使用genKeyPair方法进行生成
rsa:
private_key:
public_key:
private_key: MIIBUwIBADANBgkqhkiG9w0BAQEFAASCAT0wggE5AgEAAkEA0vfvyTdGJkdbHkB8mp0f3FE0GYP3AYPaJF7jUd1M0XxFSE2ceK3k2kw20YvQ09NJKk+OMjWQl9WitG9pB6tSCQIDAQABAkA2SimBrWC2/wvauBuYqjCFwLvYiRYqZKThUS3MZlebXJiLB+Ue/gUifAAKIg1avttUZsHBHrop4qfJCwAI0+YRAiEA+W3NK/RaXtnRqmoUUkb59zsZUBLpvZgQPfj1MhyHDz0CIQDYhsAhPJ3mgS64NbUZmGWuuNKp5coY2GIj/zYDMJp6vQIgUueLFXv/eZ1ekgz2Oi67MNCk5jeTF2BurZqNLR3MSmUCIFT3Q6uHMtsB9Eha4u7hS31tj1UWE+D+ADzp59MGnoftAiBeHT7gDMuqeJHPL4b+kC+gzV4FGTfhR9q3tTbklZkD2A==
public_key: MFwwDQYJKoZIhvcNAQEBBQADSwAwSAJBANL378k3RiZHWx5AfJqdH9xRNBmD9wGD2iRe41HdTNF8RUhNnHit5NpMNtGL0NPTSSpPjjI1kJfVorRvaQerUgkCAwEAAQ==

#jwt
jwt:
@@ -43,7 +43,7 @@ jwt:
# 令牌前缀
token-start-with: Bearer
# 必须使用最少88位的Base64对该令牌进行编码
base64-secret:
base64-secret: ZmQ0ZGI5NjQ0MDQwY2I4MjMxY2Y3ZmI3MjdhN2ZmMjNhODViOTg1ZGE0NTBjMGM4NDA5NzYxMjdjOWMwYWRmZTBlZjlhNGY3ZTg4Y2U3YTE1ODVkZDU5Y2Y3OGYwZWE1NzUzNWQ2YjFjZDc0NGMxZWU2MmQ3MjY1NzJmMTIzNDU=
# 令牌过期时间 此处单位/毫秒 ,默认24小时,可在此网站生成 https://www.convertworld.com/zh-hans/time/milliseconds.html
token-validity-in-seconds: 86400000
# 在线用户key
@@ -56,7 +56,7 @@ swagger:
enabled: true

# 后台添加用户的初始密码
initial_password: ""
initial_password: "123456"

train-job:
namespace: "namespace-"
@@ -123,12 +123,14 @@ logging:
dubhe-proxy:
visual:
keyword: visual
server:
port:
# 可视化服务后端 ip
server: 127.0.0.1
# 可视化服务后端 端口
port: 9898
refine:
keyword: refine
server: localhost
port:
port: 9797

# 延时全局配置
delay:


+ 0
- 19
dubhe-server/dubhe-admin/src/main/resources/kubeconfig-prod View File

@@ -1,19 +0,0 @@
apiVersion: v1
clusters:
- cluster:
certificate-authority-data:
server:
name: kubernetes
contexts:
- context:
cluster: kubernetes
user: kubernetes-admin
name: kubernetes-admin@kubernetes
current-context: kubernetes-admin@kubernetes
kind: Config
preferences: {}
users:
- name: kubernetes-admin
user:
client-certificate-data:
client-key-data:

+ 3
- 1
dubhe-server/dubhe-admin/src/test/java/org/dubhe/BaseTest.java View File

@@ -19,6 +19,7 @@ package org.dubhe;

import org.apache.shiro.SecurityUtils;
import org.apache.shiro.subject.Subject;
import org.apache.shiro.util.ThreadContext;
import org.apache.shiro.web.mgt.DefaultWebSecurityManager;
import org.dubhe.support.login.UsernamePasswordCaptchaToken;
import org.junit.Assert;
@@ -65,8 +66,9 @@ public class BaseTest {
public void setup() {
this.mockMvc = MockMvcBuilders.webAppContextSetup(this.wac).build();
SecurityUtils.setSecurityManager(defaultWebSecurityManager);
ThreadContext.bind(defaultWebSecurityManager);
Subject subject = SecurityUtils.getSubject();
UsernamePasswordCaptchaToken token = new UsernamePasswordCaptchaToken("admin", "admin");
UsernamePasswordCaptchaToken token = new UsernamePasswordCaptchaToken("admin", "123456");
token.setRememberMe(true);
subject.login(token);
}


+ 1
- 1
dubhe-server/dubhe-data/src/main/java/org/dubhe/data/machine/statemachine/GlobalStateMachine.java View File

@@ -37,4 +37,4 @@ public class GlobalStateMachine {
*/
private FileStateMachine fileStateMachine;

}
}

+ 1
- 13
dubhe-server/dubhe-data/src/main/java/org/dubhe/data/service/impl/DatasetServiceImpl.java View File

@@ -35,7 +35,6 @@ import org.dubhe.base.MagicNumConstant;
import org.dubhe.constant.NumberConstant;
import org.dubhe.data.constant.*;
import org.dubhe.data.dao.DatasetMapper;
import org.dubhe.data.dao.DatasetVersionFileMapper;
import org.dubhe.data.dao.DatasetVersionMapper;
import org.dubhe.data.dao.TaskMapper;
import org.dubhe.data.domain.dto.*;
@@ -137,12 +136,6 @@ public class DatasetServiceImpl extends ServiceImpl<DatasetMapper, Dataset> impl
public FileService fileService;

/**
* 数据集转换
*/
@Autowired
private DatasetConvert datasetConvert;

/**
* 数据集标签服务类
*/
@Autowired
@@ -201,9 +194,6 @@ public class DatasetServiceImpl extends ServiceImpl<DatasetMapper, Dataset> impl
@Autowired
private LabelGroupServiceImpl labelGroupService;

@Autowired
private DatasetVersionFileMapper datasetVersionFileMapper;

/**
* 检测是否为公共数据集
*
@@ -434,12 +424,10 @@ public class DatasetServiceImpl extends ServiceImpl<DatasetMapper, Dataset> impl
Map<String, Long> labelNameMap = labelList.stream().collect(Collectors.toMap(Label::getName, Label::getId));
if(!Objects.isNull(labelNameMap.get(label.getName()))){
datasetLabelService.insert(DatasetLabel.builder().datasetId(datasetId).labelId(labelNameMap.get(label.getName())).build());
datasetGroupLabelService.insert(DatasetGroupLabel.builder().labelGroupId(dataset.getLabelGroupId()).labelId(labelNameMap.get(label.getName())).build());
}else {
insertLabelData(label,datasetId);
}
if(!Objects.isNull(dataset.getLabelGroupId()) && COCO_ID.compareTo(dataset.getLabelGroupId()) != 0){
datasetGroupLabelService.insert(DatasetGroupLabel.builder().labelGroupId(dataset.getLabelGroupId()).labelId(label.getId()).build());
}
}else {
insertLabelData(label,datasetId);
}


+ 5
- 4
dubhe-server/dubhe-data/src/main/java/org/dubhe/data/service/impl/LabelServiceImpl.java View File

@@ -27,13 +27,11 @@ import org.dubhe.data.constant.ErrorEnum;
import org.dubhe.data.dao.LabelMapper;
import org.dubhe.data.domain.dto.LabelCreateDTO;
import org.dubhe.data.domain.dto.LabelDTO;
import org.dubhe.data.domain.entity.Dataset;
import org.dubhe.data.domain.entity.DatasetGroupLabel;
import org.dubhe.data.domain.entity.DatasetLabel;
import org.dubhe.data.domain.entity.Label;
import org.dubhe.data.service.DatasetGroupLabelService;
import org.dubhe.data.service.DatasetLabelService;
import org.dubhe.data.service.DatasetService;
import org.dubhe.data.service.LabelService;
import org.dubhe.enums.LogEnum;
import org.dubhe.exception.BusinessException;
@@ -92,12 +90,15 @@ public class LabelServiceImpl extends ServiceImpl<LabelMapper, Label> implements
List<Label> labels = getBaseMapper().listLabelByDatasetId(datasetId);
List<Long> pubLabelIds = getPubLabelIds();
if(!CollectionUtils.isEmpty(labels)){
//根据数据集ID查询标签组ID
List<LabelDTO> labelDTOS = baseMapper.listByDatesetId(datasetId);
Map<Long, Long> labelMap = labelDTOS.stream().collect(Collectors.toMap(LabelDTO::getId, LabelDTO::getLabelGroupId));
//查询数据集所属标签组下标签
return labels.stream().map(a -> {
LabelDTO dto = new LabelDTO();
dto.setName(a.getName());
dto.setColor(a.getColor());
dto.setLabelGroupId(pubLabelIds.contains(a.getId()) ? COCO_ID : null);
dto.setLabelGroupId(pubLabelIds.contains(a.getId()) ? COCO_ID : labelMap.get(a.getId()));
dto.setType(a.getType());
dto.setId(a.getId());
return dto;
@@ -403,7 +404,7 @@ public class LabelServiceImpl extends ServiceImpl<LabelMapper, Label> implements
*/
@Override
public int selectCount(Long id) {
return baseMapper.listByGroupId(id).size();
return datasetGroupLabelService.listByGroupId(id).size();
}

/**


+ 1
- 1
dubhe-server/dubhe-k8s/src/main/java/org/dubhe/harbor/api/impl/HarborApiImpl.java View File

@@ -298,7 +298,7 @@ public class HarborApiImpl implements HarborApi {
//获取harbor中所有项目的名称
Set<String> names = projectIdMap.keySet();
//判断harbor中是否具有改项目
names.stream().forEach(name->{
names.forEach(name->{
if(urlSplits[MagicNumConstant.ONE].equals(name)){
//发送删除请求
HttpClientUtils.sendHttpsDelete(tagSearchUrl+dataRep+TAG_SEARCH_PARAMS+SymbolConstant.SLASH+tagUrls[MagicNumConstant.ONE],harborName,harborPassword);


+ 3
- 3
dubhe-server/dubhe-k8s/src/main/java/org/dubhe/k8s/api/impl/JupyterResourceApiImpl.java View File

@@ -583,7 +583,7 @@ public class JupyterResourceApiImpl implements JupyterResourceApi {
if (delayDelete != null && delayDelete > 0){
taskYamlBO.append(statefulSet);
}
LogUtil.info(LogEnum.BIZ_K8S, "Ready to deploy {}, yaml信息为{}", statefulSetName, YamlUtils.dumpAsYaml(statefulSet));
LogUtil.info(LogEnum.BIZ_K8S, "Ready to deploy {}, yaml info is : {}", statefulSetName, YamlUtils.dumpAsYaml(statefulSet));
statefulSet = client.apps().statefulSets().create(statefulSet);
LogUtil.info(LogEnum.BIZ_K8S, "{} deployed successfully", statefulSetName);
return statefulSet;
@@ -625,7 +625,7 @@ public class JupyterResourceApiImpl implements JupyterResourceApi {
if (delayDelete != null && delayDelete > 0){
taskYamlBO.append(svc);
}
LogUtil.info(LogEnum.BIZ_K8S, "Ready to deploy {}, yaml信息为{}", svcName, YamlUtils.dumpAsYaml(svc));
LogUtil.info(LogEnum.BIZ_K8S, "Ready to deploy {}, yaml info is : {}", svcName, YamlUtils.dumpAsYaml(svc));
svc = client.services().create(svc);
LogUtil.info(LogEnum.BIZ_K8S, "{} deployed successfully", svcName);
return svc;
@@ -674,7 +674,7 @@ public class JupyterResourceApiImpl implements JupyterResourceApi {
if (delayDelete != null && delayDelete > 0){
taskYamlBO.append(ingress);
}
LogUtil.info(LogEnum.BIZ_K8S, "Ready to deploy {}, yaml信息为{}", ingressName, YamlUtils.dumpAsYaml(ingress));
LogUtil.info(LogEnum.BIZ_K8S, "Ready to deploy {}, yaml info is : {}", ingressName, YamlUtils.dumpAsYaml(ingress));
ingress = client.extensions().ingresses().create(ingress);
LogUtil.info(LogEnum.BIZ_K8S, "{} deployed successfully", ingressName);
return ingress;


+ 0
- 1
dubhe-server/dubhe-k8s/src/main/java/org/dubhe/k8s/api/impl/TrainJobApiImpl.java View File

@@ -54,7 +54,6 @@ import org.dubhe.k8s.domain.bo.TaskYamlBO;
import org.dubhe.k8s.domain.entity.K8sTask;
import org.dubhe.k8s.domain.resource.BizJob;
import org.dubhe.k8s.domain.resource.BizPersistentVolumeClaim;
import org.dubhe.k8s.domain.resource.BizPod;
import org.dubhe.k8s.domain.vo.PtJupyterJobVO;
import org.dubhe.k8s.enums.ImagePullPolicyEnum;
import org.dubhe.k8s.enums.K8sKindEnum;


+ 0
- 19
dubhe-server/dubhe-k8s/src/main/java/org/dubhe/k8s/domain/bo/K8sTaskBO.java View File

@@ -37,23 +37,4 @@ public class K8sTaskBO extends K8sTask {
*/
private Long maxStopUnixTime;

public K8sTaskBO(K8sTask k8sTask){
this.setId(k8sTask.getId());
this.setNamespace(k8sTask.getNamespace());
this.setResourceName(k8sTask.getResourceName());
this.setTaskYaml(k8sTask.getTaskYaml());
this.setBusiness(k8sTask.getBusiness());
this.setApplyUnixTime(k8sTask.getApplyUnixTime());
this.setApplyDisplayTime(k8sTask.getApplyDisplayTime());
this.setApplyStatus(k8sTask.getApplyStatus());
this.setStopUnixTime(k8sTask.getStopUnixTime());
this.setStopDisplayTime(k8sTask.getStopDisplayTime());
this.setStopStatus(k8sTask.getStopStatus());
this.setCreateTime(k8sTask.getCreateTime());
this.setCreateUserId(k8sTask.getCreateUserId());
this.setUpdateTime(k8sTask.getUpdateTime());
this.setUpdateUserId(k8sTask.getUpdateUserId());
this.setDeleted(k8sTask.getDeleted());
}

}

+ 9
- 0
dubhe-server/dubhe-k8s/src/main/java/org/dubhe/k8s/domain/entity/K8sTask.java View File

@@ -98,4 +98,13 @@ public class K8sTask extends BaseEntity{
boolean needDelete = stopUnixTime < time && K8sTaskStatusEnum.UNEXECUTED.getStatus().equals(stopStatus);
return needDelete && (needCreate ^ needDelete);
}

/**
* 判断任务是否已超时
* @param time
* @return
*/
public boolean overtime(Long time){
return applyUnixTime < time && K8sTaskStatusEnum.UNEXECUTED.getStatus().equals(applyStatus) && stopUnixTime < time && K8sTaskStatusEnum.UNEXECUTED.getStatus().equals(stopStatus);
}
}

+ 1
- 1
dubhe-server/dubhe-k8s/src/main/java/org/dubhe/k8s/event/watcher/PodWatcher.java View File

@@ -97,7 +97,7 @@ public class PodWatcher implements CommandLineRunner, Watcher<Pod> {
*/
@Override
public void onClose(KubernetesClientException cause) {
LogUtil.warn(LogEnum.BIZ_K8S, cause.getMessage());
LogUtil.warn(LogEnum.BIZ_K8S," onClose=>cause : {}", cause.getMessage());
k8sUtils.getClient().pods().inAnyNamespace().watch(this);
}



+ 1
- 1
dubhe-server/dubhe-k8s/src/main/java/org/dubhe/k8s/observer/TrainJobObserver.java View File

@@ -55,7 +55,7 @@ public class TrainJobObserver implements Observer {
boolean trainJobFailed = PodPhaseEnum.FAILED.getPhase().equals(pod.getPhase()) && BizEnum.ALGORITHM.getBizCode().equals(pod.getBusinessLabel()) && SpringContextHolder.getActiveProfile().equals(pod.getLabel(K8sLabelConstants.PLATFORM_RUNTIME_ENV));
if (trainJobFailed){
LogUtil.warn(LogEnum.BIZ_K8S,"delete failed train job resourceName {};phase {};podName {}",pod.getLabel(K8sLabelConstants.BASE_TAG_SOURCE),pod.getPhase(),pod.getName());
//trainJobApi.delete(pod.getNamespace(),pod.getLabel(K8sLabelConstants.BASE_TAG_SOURCE));
trainJobApi.delete(pod.getNamespace(),pod.getLabel(K8sLabelConstants.BASE_TAG_SOURCE));
}
}
}


+ 2
- 1
dubhe-server/dubhe-system/src/main/java/org/dubhe/service/RecycleTaskService.java View File

@@ -73,8 +73,9 @@ public interface RecycleTaskService {
* 回收文件资源
*
* @param recycleTask 回收任务
* @return String 回收任务失败返回的失败信息
*/
void deleteFileByCMD(RecycleTask recycleTask);
String deleteFileByCMD(RecycleTask recycleTask);

/**
* 修改回收任务状态


+ 30
- 21
dubhe-server/dubhe-system/src/main/java/org/dubhe/service/impl/RecycleTaskServiceImpl.java View File

@@ -17,10 +17,12 @@
package org.dubhe.service.impl;

import cn.hutool.core.date.DateUtil;
import cn.hutool.core.util.RandomUtil;
import cn.hutool.core.util.StrUtil;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import org.dubhe.base.MagicNumConstant;
import org.dubhe.base.ResponseCode;
import org.dubhe.config.NfsConfig;
import org.dubhe.config.RecycleConfig;
import org.dubhe.constatnts.UserConstant;
@@ -31,7 +33,6 @@ import org.dubhe.domain.dto.UserDTO;
import org.dubhe.domain.entity.RecycleTask;
import org.dubhe.enums.LogEnum;
import org.dubhe.enums.RecycleStatusEnum;
import org.dubhe.enums.RecycleTypeEnum;
import org.dubhe.exception.BusinessException;
import org.dubhe.service.RecycleTaskService;
import org.dubhe.utils.*;
@@ -122,13 +123,6 @@ public class RecycleTaskServiceImpl implements RecycleTaskService {
recycleTaskCreateDTO.setRecycleDelayDate(recycleConfig.getDate());
}

//如果是删除文件任务,校验根目录及系统环境
if (Objects.equals(recycleTaskCreateDTO.getRecycleType(), RecycleTypeEnum.FILE.getCode()) &&
recycleTaskCreateDTO.getRecycleCondition().startsWith(nfsConfig.getRootDir() + nfsConfig.getBucket())) {
LogUtil.error(LogEnum.GARBAGE_RECYCLE, "User {} created recycle task failed,file sourcePath :{} invalid", currentUser.getUsername(), recycleTaskCreateDTO.getRecycleCondition());
throw new BusinessException("创建回收文件任务失败");
}

RecycleTask recycleTask = new RecycleTask();
BeanUtils.copyProperties(recycleTaskCreateDTO, recycleTask);

@@ -153,11 +147,14 @@ public class RecycleTaskServiceImpl implements RecycleTaskService {
public void delTempInvalidResources(String sourcePath) {
UserDTO currentUser = JwtUtils.getCurrentUserDto();
if (currentUser.getId() != UserConstant.ADMIN_USER_ID) {
throw new BusinessException("不支持普通用户操作");
throw new BusinessException(ResponseCode.UNAUTHORIZED, "不支持普通用户操作");
}
RecycleTask recycleTask = new RecycleTask();
recycleTask.setRecycleCondition(sourcePath);
deleteFileByCMD(recycleTask);
String resMsg = deleteFileByCMD(recycleTask);
if (StrUtil.isNotEmpty(resMsg)) {
throw new BusinessException(ResponseCode.ERROR, resMsg);
}
}

/**
@@ -195,34 +192,47 @@ public class RecycleTaskServiceImpl implements RecycleTaskService {

List<RecycleTask> recycleTaskList = recycleTaskMapper.selectList(new LambdaQueryWrapper<RecycleTask>()
.ne(RecycleTask::getRecycleStatus, RecycleStatusEnum.SUCCEEDED.getCode())
.le(RecycleTask::getRecycleDelayDate, new Date()));
.le(RecycleTask::getRecycleDelayDate, DateUtil.format(new Date(), "yyyy-MM-dd")));
return recycleTaskList;

}

/**
* 回收文件资源
*
* 回收天枢一站式平台中的无效文件资源
* 处理方式:获取到回收任务表中的无效文件路径,通过linux命令进行具体删除
* 文件路径必须满足格式如:/nfs/当前系统环境/具体删除的文件或文件夹(至少三层目录)
* @param recycleTask 回收任务
* @return String 回收任务失败返回的失败信息
*/
@Override
public void deleteFileByCMD(RecycleTask recycleTask) {
public String deleteFileByCMD(RecycleTask recycleTask) {
String sourcePath = nfsUtil.formatPath(recycleTask.getRecycleCondition());
//判断该路径是否存在文件或文件夹
String emptyDir = "";
if (!nfsUtil.fileOrDirIsEmpty(sourcePath) && sourcePath.startsWith(nfsUtil.formatPath(nfsConfig.getRootDir() + nfsConfig.getBucket()))) {
try {
sourcePath = sourcePath.endsWith(StrUtil.SLASH) ? sourcePath : sourcePath + StrUtil.SLASH;
emptyDir = "/tmp/empty_" + recycleTask.getId() + StrUtil.SLASH;
String errMsg = "";
String nfsBucket = nfsUtil.formatPath(nfsConfig.getRootDir() + nfsConfig.getBucket() + StrUtil.SLASH);
sourcePath = sourcePath.endsWith(StrUtil.SLASH) ? sourcePath : sourcePath + StrUtil.SLASH;
try {
//校验回收文件是否存在以及回收文件必须至少在当前环境目录下还有一层目录,如:/nfs/dubhe-test/xxxx/
if (!nfsUtil.fileOrDirIsEmpty(sourcePath)
&& sourcePath.startsWith((nfsBucket))
&& sourcePath.length() > nfsBucket.length()) {
emptyDir = "/tmp/empty_" + (recycleTask.getId() == null ? RandomUtil.randomString(MagicNumConstant.TWO) : recycleTask.getId()) + StrUtil.SLASH;
LogUtil.info(LogEnum.GARBAGE_RECYCLE, "recycle task sourcePath:{},emptyDir:{}", sourcePath, emptyDir);
Process process = Runtime.getRuntime().exec(new String[]{"/bin/sh", "-c", String.format(RecycleConfig.DEL_COMMAND, userName, nfsIp, emptyDir, emptyDir, sourcePath, emptyDir, sourcePath)});
//资源回收完毕修改回收表状态
if (recycleTask.getId() != null) {
updateRecycleStatus(recycleTask, recycleSourceIsOk(process));
}
} catch (Exception e) {
LogUtil.error(LogEnum.GARBAGE_RECYCLE, "recycle task id:{} Run failed,fail Exception:{}", recycleTask.getId(), e);
} else {
LogUtil.info(LogEnum.GARBAGE_RECYCLE, "recycle task failure!!! sourcePath:{}", sourcePath);
errMsg = "recycle task failure!!! sourcePath:" + sourcePath;
}
} catch (Exception e) {
LogUtil.error(LogEnum.GARBAGE_RECYCLE, "recycle task id:{} Run failed, fail Exception:{}", recycleTask.getId(), e);
errMsg = "recycle task failure!!! sourcePath:" + sourcePath + "and exception message:" + e.getMessage();
}
return errMsg;
}

/**
@@ -316,5 +326,4 @@ public class RecycleTaskServiceImpl implements RecycleTaskService {
}
}
}

}

+ 3
- 1
dubhe-server/dubhe-system/src/test/java/org/dubhe/BaseTest.java View File

@@ -19,6 +19,7 @@ package org.dubhe;

import org.apache.shiro.SecurityUtils;
import org.apache.shiro.subject.Subject;
import org.apache.shiro.util.ThreadContext;
import org.apache.shiro.web.mgt.DefaultWebSecurityManager;
import org.dubhe.support.login.UsernamePasswordCaptchaToken;
import org.junit.Assert;
@@ -65,8 +66,9 @@ public class BaseTest {
public void setup() {
this.mockMvc = MockMvcBuilders.webAppContextSetup(this.wac).build();
SecurityUtils.setSecurityManager(defaultWebSecurityManager);
ThreadContext.bind(defaultWebSecurityManager);
Subject subject = SecurityUtils.getSubject();
UsernamePasswordCaptchaToken token = new UsernamePasswordCaptchaToken("admin", "admin");
UsernamePasswordCaptchaToken token = new UsernamePasswordCaptchaToken("admin", "123456");
token.setRememberMe(true);
subject.login(token);
}


+ 5
- 0
dubhe-server/dubhe-task/src/main/java/org/dubhe/task/k8s/DelayCudResourceTask.java View File

@@ -116,6 +116,11 @@ public class DelayCudResourceTask {
k8sTaskService.update(k8sTask);
});
}
if (k8sTask.overtime(curUnixTime)){
k8sTask.setApplyStatus(K8sTaskStatusEnum.EXECUTED.getStatus());
k8sTask.setStopStatus(K8sTaskStatusEnum.EXECUTED.getStatus());
k8sTaskService.update(k8sTask);
}
}
}catch (Exception e){
LogUtil.error(LogEnum.BIZ_K8S,"delayCudResource error {}",e);


+ 22
- 19
dubhe-server/dubhe-task/src/main/resources/config/application-dev.yml View File

@@ -8,7 +8,7 @@ spring:
redis:
#数据库索引
database: 0
host:
host: 127.0.0.1
port: 6379
password:
#连接超时时间
@@ -17,9 +17,9 @@ spring:
druid:
db-type: com.alibaba.druid.pool.DruidDataSource
driverClassName: net.sf.log4jdbc.sql.jdbcapi.DriverSpy
url: jdbc:log4jdbc:mysql://127.0.0.1:3306/dubhe-dev?serverTimezone=Asia/Shanghai&characterEncoding=utf8&useSSL=false&allowMultiQueries=true
username:
password:
url: jdbc:log4jdbc:mysql://127.0.0.1:3306/dubhe-dev?serverTimezone=Asia/Shanghai&characterEncoding=utf8&useSSL=false&allowMultiQueries=true&useInformationSchema=true
username: test
password: test

# 初始化配置
initial-size: 3
@@ -59,8 +59,8 @@ spring:
type: com.alibaba.druid.pool.DruidDataSource
driver-class-name: net.sf.log4jdbc.sql.jdbcapi.DriverSpy
url: jdbc:log4jdbc:mysql://127.0.0.1:3306/dubhe-dev?serverTimezone=Asia/Shanghai&characterEncoding=utf8&useSSL=false&allowMultiQueries=true&useInformationSchema=true
username:
password:
username: test
password: test

#指定data_dataset表 主键id 生成策略
sharding:
@@ -83,22 +83,22 @@ spring:
show: true

k8s:
# k8s集群配置文件
# k8s集群配置文件,将k8s集群master下$HOME/.kube/config文件 复制到dubhe-task/src/main/resources/下 重命名为 kubeconfig
kubeconfig: kubeconfig
# nfs服务暴露的IP地址 如需测试需修改为合适的地址
nfs:
nfs: 127.0.0.1
#nfs服务端 共享目录
nfs-root-path: /nfs/
nfs-root-windows-path: "Z:"
# 命名空间关键字
namespace: namespace
# k8s ingress域名 如需测试需修改为合适的域名
host:
# k8s ingress-controller 对外port
port: 32493
# elasticsearch暴露的服务地址
host: notebook.test.com
# k8s ingress-controller 对外port,获取方式:部署 ingress-controller 后,在k8s master节点执行 kubectl get svc -A | grep 'ingress-nginx-controller' 获取80对应的外部端口
port: 33334
# elasticsearch暴露的服务地址,获取方式 部署 管理集群日志 后,在k8s msater节点执行 kubectl get svc -A | grep 'elasticsearch' 获取9200对应的外部端口
elasticsearch:
hostlist: ${eshostlist::32321}
hostlist: ${eshostlist:127.0.0.1:33333}
# 日志采集配置信息
log:
type: _doc
@@ -116,13 +116,14 @@ k8s:
# 展示Pod的CPU使用率,Memory使用量,GPU使用率的grafana地址
pod:
metrics:
grafanaUrl: http://127.0.0.1:30006/d/Sx0_4-WGk/jian-kong-xin-xi?orgId=1&refresh=5s&kiosk&var-pod=
grafanaUrl: http://127.0.0.1:30006/d/job/monitor?orgId=1&refresh=5s&kiosk&var-pod=
nfs-storage-class-name: zjlab-nfs-storage
#配置harbor
harbor:
address:
username:
password:
# habor服务域名,需要将用户持有域名解析到 harbor服务所在服务器ip
address: harbor.test.com
username: admin
password: Harbor12345
model-name: train
# data模块配置
data:
@@ -157,8 +158,10 @@ data:
# minio配置
minio:
url: http://127.0.0.1:9000/
accessKey:
secretKey:
# 部署 minio 时指定的 MINIO_ACCESS_KEY
accessKey: admin
# 部署 minio 时指定的 MINIO_SECRET_KEY
secretKey: 123@abc.com
bucketName: dubhe-dev
presignedUrlExpiryTime: 300
annotation: /annotation/


+ 39
- 17
dubhe-server/dubhe-task/src/main/resources/config/application.yml View File

@@ -8,13 +8,14 @@ spring:
redis:
repositories:
enabled: false
# 邮箱配置

# 邮箱配置,用于发送用户注册邮箱验证码
mail:
host:
host: smtp.163.com
# 邮件的发送者 163邮箱(开发测试时使用 发送延时 20秒,每天发送量限制 50)
username:
username: test@163.com
# SMTP授权密码
password:
password: AAAAAAAAAAAAAAAA
protocol: smtp
properties.mail.smtp.auth: true
properties.mail.smtp.port: 465 #465或者994
@@ -31,10 +32,10 @@ loginCode:
height: 28
length: 4

#密码加密传输,前端公钥加密,后端私钥解密
#密码加密传输,前端公钥加密,后端私钥解密,与前端src/settings.js publicKey搭配使用;可使用genKeyPair方法进行生成
rsa:
private_key:
public_key:
private_key: MIIBUwIBADANBgkqhkiG9w0BAQEFAASCAT0wggE5AgEAAkEA0vfvyTdGJkdbHkB8mp0f3FE0GYP3AYPaJF7jUd1M0XxFSE2ceK3k2kw20YvQ09NJKk+OMjWQl9WitG9pB6tSCQIDAQABAkA2SimBrWC2/wvauBuYqjCFwLvYiRYqZKThUS3MZlebXJiLB+Ue/gUifAAKIg1avttUZsHBHrop4qfJCwAI0+YRAiEA+W3NK/RaXtnRqmoUUkb59zsZUBLpvZgQPfj1MhyHDz0CIQDYhsAhPJ3mgS64NbUZmGWuuNKp5coY2GIj/zYDMJp6vQIgUueLFXv/eZ1ekgz2Oi67MNCk5jeTF2BurZqNLR3MSmUCIFT3Q6uHMtsB9Eha4u7hS31tj1UWE+D+ADzp59MGnoftAiBeHT7gDMuqeJHPL4b+kC+gzV4FGTfhR9q3tTbklZkD2A==
public_key: MFwwDQYJKoZIhvcNAQEBBQADSwAwSAJBANL378k3RiZHWx5AfJqdH9xRNBmD9wGD2iRe41HdTNF8RUhNnHit5NpMNtGL0NPTSSpPjjI1kJfVorRvaQerUgkCAwEAAQ==

#jwt
jwt:
@@ -42,7 +43,7 @@ jwt:
# 令牌前缀
token-start-with: Bearer
# 必须使用最少88位的Base64对该令牌进行编码
base64-secret:
base64-secret: ZmQ0ZGI5NjQ0MDQwY2I4MjMxY2Y3ZmI3MjdhN2ZmMjNhODViOTg1ZGE0NTBjMGM4NDA5NzYxMjdjOWMwYWRmZTBlZjlhNGY3ZTg4Y2U3YTE1ODVkZDU5Y2Y3OGYwZWE1NzUzNWQ2YjFjZDc0NGMxZWU2MmQ3MjY1NzJmMTIzNDU=
# 令牌过期时间 此处单位/毫秒 ,默认24小时,可在此网站生成 https://www.convertworld.com/zh-hans/time/milliseconds.html
token-validity-in-seconds: 86400000
# 在线用户key
@@ -55,7 +56,7 @@ swagger:
enabled: true

# 后台添加用户的初始密码
initial_password: ""
initial_password: "123456"

train-job:
namespace: "namespace-"
@@ -68,15 +69,23 @@ train-job:
log-path: "log"
load-path: "load"
load-key: "model_load_dir"
load-val-dataset-key: "val_data_url"
visualized-log-path: "visualizedlog"
docker-dataset-path: "/dataset"
docker-train-path: "/train"
docker-train-path: "/workspace"
docker-model-path: "/modeldir"
docker-val-dataset-path: "/valdataset"
docker-out-path: "train_out=${train-job.docker-train-path}/${train-job.out-path}"
docker-log-path: "train_log=${train-job.docker-train-path}/${train-job.log-path}"
docker-visualized-log-path: "train_visualized_log=${train-job.docker-train-path}/${train-job.visualized-log-path}"
docker-dataset: "data_url=${train-job.docker-dataset-path}"
eight: "8"
plus-eight: "+8"
node-ips: "node_ips"
node-num: "num_nodes"
gpu-num-per-node: "gpu_num_per_node"



minioweb:
GetToken:
@@ -88,6 +97,7 @@ minioweb:
zip:
url: minio/zip?token=


train-algorithm:
#是否输出训练结果
is-train-out: true
@@ -99,12 +109,7 @@ train-algorithm:
algorithm-source: 1
#设置fork默认值
fork: false
#上传算法文件路径名
upload-algorithm-path: "upload-temp/algorithm-manage"

docker:
host:
port:

# 配置slq打印日志
logging:
@@ -118,7 +123,9 @@ logging:
dubhe-proxy:
visual:
keyword: visual
server: 10.5.18.239
# 可视化服务后端 ip
server: 127.0.0.1
# 可视化服务后端 端口
port: 9898
refine:
keyword: refine
@@ -129,4 +136,19 @@ dubhe-proxy:
delay:
notebook:
#模型开发延时关闭时间
delete: 240
delete: 240

#垃圾回收默认配置
recycle:
#过期时间设置
timeout:
#用户上传文件至临时路径下后文件最大有效时长,以小时为单位
file-valid: 24
#用户删除数据后,默认其文件最大有效时长,以天为单位
date: 7
#用户删除某一算法后,其算法文件最大有效时长,以天为单位
algorithm-valid: 3
#用户删除某一模型后,其模型文件最大有效时长,以天为单位
model-valid: 3
#用户删除训练任务后,其训练管理文件最大有效时长,以天为单位
train-valid: 3

+ 1
- 1
dubhe-server/sql/v1/09-Dubhe-Patch.sql View File

@@ -225,7 +225,7 @@ start transaction; -- 整个存储过程指定为一个事务
apply_status tinyint(1) default 0 not null comment '状态(0无需操作,1未创建,2已创建)',
stop_unix_time bigint default 0 not null comment '资源停止unix时间(精确到秒)',
stop_display_time timestamp null comment '资源停止展示时间',
stop_status tinyint(1) default 0 not null comment '状态(0无需操作,1已停止,2已创建)',
stop_status tinyint(1) default 0 not null comment '状态(0无需操作,1未停止,2已停止)',
create_time timestamp default CURRENT_TIMESTAMP null comment '创建时间',
create_user_id bigint(20) default 0 null comment '创建用户ID',
update_time timestamp default CURRENT_TIMESTAMP null on update CURRENT_TIMESTAMP comment '更新时间',


Loading…
Cancel
Save