提交 030437f3 编写于 作者: Q qq389401879 提交者: dailidong

增加工作流导出导入功能,前端定时器表达式的秒和分钟的默认值从*修改成0 (#709)

* 增加工作流导出导入功能,前端定时器表达式的秒和分钟的默认值从*修改成0

* 修改工作流导出导入功能由excel改为json减少依赖
上级 f00ab67c
......@@ -30,9 +30,11 @@ import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.*;
import springfox.documentation.annotations.ApiIgnore;
import javax.servlet.http.HttpServletResponse;
import java.util.Map;
import static cn.escheduler.api.enums.Status.*;
import static cn.escheduler.api.enums.Status.EXPORT_PROCESS_DEFINE_BY_ID_ERROR;
/**
......@@ -430,4 +432,31 @@ public class ProcessDefinitionController extends BaseController{
}
}
/**
* export process definition by id
*
* @param loginUser
* @param projectName
* @param processDefinitionId
* @return
*/
@ApiOperation(value = "exportProcessDefinitionById", notes= "EXPORT_PROCCESS_DEFINITION_BY_ID_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100")
})
@GetMapping(value="/export")
@ResponseBody
public void exportProcessDefinitionById(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@PathVariable String projectName,
@RequestParam("processDefinitionId") Integer processDefinitionId,
HttpServletResponse response){
try{
logger.info("export process definition by id, login user:{}, project name:{}, process definition id:{}",
loginUser.getUserName(), projectName, processDefinitionId);
processDefinitionService.exportProcessDefinitionById(loginUser, projectName, processDefinitionId,response);
}catch (Exception e){
logger.error(EXPORT_PROCESS_DEFINE_BY_ID_ERROR.getMsg(),e);
}
}
}
......@@ -18,6 +18,7 @@ package cn.escheduler.api.controller;
import cn.escheduler.api.enums.Status;
import cn.escheduler.api.service.ProcessDefinitionService;
import cn.escheduler.api.service.ProjectService;
import cn.escheduler.api.utils.Constants;
import cn.escheduler.api.utils.Result;
......@@ -32,6 +33,7 @@ import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.multipart.MultipartFile;
import springfox.documentation.annotations.ApiIgnore;
import java.util.Map;
......@@ -51,6 +53,9 @@ public class ProjectController extends BaseController {
@Autowired
private ProjectService projectService;
@Autowired
private ProcessDefinitionService processDefinitionService;
/**
* create project
*
......@@ -249,5 +254,30 @@ public class ProjectController extends BaseController {
}
}
/**
* import process definition
*
* @param loginUser
* @param file
* @return
*/
@ApiOperation(value = "importProcessDefinition", notes= "EXPORT_PROCCESS_DEFINITION_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "file", value = "RESOURCE_FILE", required = true, dataType = "MultipartFile")
})
@PostMapping(value="/importProcessDefinition")
public Result importProcessDefinition(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam("file") MultipartFile file){
try{
logger.info("import process definition by id, login user:{}",
loginUser.getUserName());
Map<String, Object> result = processDefinitionService.importProcessDefinition(loginUser,file);
return returnDataList(result);
}catch (Exception e){
logger.error(IMPORT_PROCESS_DEFINE_ERROR.getMsg(),e);
return error(IMPORT_PROCESS_DEFINE_ERROR.getCode(), IMPORT_PROCESS_DEFINE_ERROR.getMsg());
}
}
}
......@@ -213,6 +213,8 @@ public enum Status {
BATCH_DELETE_PROCESS_DEFINE_ERROR(50025,"batch delete process definition error"),
BATCH_DELETE_PROCESS_DEFINE_BY_IDS_ERROR(50026,"batch delete process definition by ids {0} error"),
TENANT_NOT_SUITABLE(50027,"there is not any tenant suitable, please choose a tenant available."),
EXPORT_PROCESS_DEFINE_BY_ID_ERROR(50028,"export process definition by id error"),
IMPORT_PROCESS_DEFINE_ERROR(50029,"import process definition error"),
HDFS_NOT_STARTUP(60001,"hdfs not startup"),
HDFS_TERANT_RESOURCES_FILE_EXISTS(60002,"resource file exists,please delete resource first"),
......
......@@ -21,10 +21,7 @@ import cn.escheduler.api.dto.treeview.TreeViewDto;
import cn.escheduler.api.enums.Status;
import cn.escheduler.api.utils.Constants;
import cn.escheduler.api.utils.PageInfo;
import cn.escheduler.common.enums.Flag;
import cn.escheduler.common.enums.ReleaseState;
import cn.escheduler.common.enums.TaskType;
import cn.escheduler.common.enums.UserType;
import cn.escheduler.common.enums.*;
import cn.escheduler.common.graph.DAG;
import cn.escheduler.common.model.TaskNode;
import cn.escheduler.common.model.TaskNodeRelation;
......@@ -38,14 +35,25 @@ import cn.escheduler.dao.ProcessDao;
import cn.escheduler.dao.mapper.*;
import cn.escheduler.dao.model.*;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.fasterxml.jackson.core.JsonProcessingException;
import org.apache.commons.lang3.ObjectUtils;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.MediaType;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.multipart.MultipartFile;
import javax.servlet.ServletOutputStream;
import javax.servlet.http.HttpServletResponse;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
......@@ -86,6 +94,12 @@ public class ProcessDefinitionService extends BaseDAGService {
@Autowired
private ProcessDao processDao;
@Autowired
private DataSourceMapper dataSourceMapper;
@Autowired
private WorkerGroupMapper workerGroupMapper;
/**
* create process definition
*
......@@ -142,7 +156,7 @@ public class ProcessDefinitionService extends BaseDAGService {
processDefine.setFlag(Flag.YES);
processDefineMapper.insert(processDefine);
putMsg(result, Status.SUCCESS);
result.put("processDefinitionId",processDefine.getId());
return result;
}
......@@ -504,6 +518,239 @@ public class ProcessDefinitionService extends BaseDAGService {
return result;
}
/**
* export process definition by id
*
* @param loginUser
* @param projectName
* @param processDefinitionId
* @return
*/
public void exportProcessDefinitionById(User loginUser, String projectName, Integer processDefinitionId, HttpServletResponse response) {
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultStatus = (Status) checkResult.get(Constants.STATUS);
if (resultStatus == Status.SUCCESS) {
ProcessDefinition processDefinition = processDefineMapper.queryByDefineId(processDefinitionId);
if (processDefinition != null) {
JSONObject jsonObject = JSONUtils.parseObject(processDefinition.getProcessDefinitionJson());
JSONArray jsonArray = (JSONArray) jsonObject.get("tasks");
for (int i = 0; i < jsonArray.size(); i++) {
JSONObject taskNode = jsonArray.getJSONObject(i);
if (taskNode.get("type") != null && taskNode.get("type") != "") {
String taskType = taskNode.getString("type");
if(taskType.equals(TaskType.SQL.name()) || taskType.equals(TaskType.PROCEDURE.name())){
JSONObject sqlParameters = JSONUtils.parseObject(taskNode.getString("params"));
DataSource dataSource = dataSourceMapper.queryById((Integer) sqlParameters.get("datasource"));
if (dataSource != null) {
sqlParameters.put("datasourceName", dataSource.getName());
}
taskNode.put("params", sqlParameters);
}
}
}
jsonObject.put("tasks", jsonArray);
processDefinition.setProcessDefinitionJson(jsonObject.toString());
Map<String, Object> row = new LinkedHashMap<>();
row.put("projectName", processDefinition.getProjectName());
row.put("processDefinitionName", processDefinition.getName());
row.put("processDefinitionJson", processDefinition.getProcessDefinitionJson());
row.put("processDefinitionDesc", processDefinition.getDesc());
row.put("processDefinitionLocations", processDefinition.getLocations());
row.put("processDefinitionConnects", processDefinition.getConnects());
List<Schedule> schedules = scheduleMapper.queryByProcessDefinitionId(processDefinitionId);
if (schedules.size() > 0) {
Schedule schedule = schedules.get(0);
row.put("scheduleWarningType", schedule.getWarningType());
row.put("scheduleWarningGroupId", schedule.getWarningGroupId());
row.put("scheduleStartTime", schedule.getStartTime());
row.put("scheduleEndTime", schedule.getEndTime());
row.put("scheduleCrontab", schedule.getCrontab());
row.put("scheduleFailureStrategy", schedule.getFailureStrategy());
row.put("scheduleReleaseState", schedule.getReleaseState());
row.put("scheduleProcessInstancePriority", schedule.getProcessInstancePriority());
if(schedule.getId() == -1){
row.put("scheduleWorkerGroupId", -1);
}else{
WorkerGroup workerGroup = workerGroupMapper.queryById(schedule.getId());
if(workerGroup != null){
row.put("scheduleWorkerGroupName", workerGroup.getName());
}
}
}
String rowsJson = JSONUtils.toJsonString(row);
response.setContentType(MediaType.APPLICATION_JSON_UTF8_VALUE);
response.setHeader("Content-Disposition", "attachment;filename="+processDefinition.getName()+".json");
BufferedOutputStream buff = null;
ServletOutputStream out = null;
try {
out = response.getOutputStream();
buff = new BufferedOutputStream(out);
buff.write(rowsJson.getBytes("UTF-8"));
buff.flush();
buff.close();
} catch (IOException e) {
e.printStackTrace();
}finally {
try {
buff.close();
out.close();
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
}
@Transactional(value = "TransactionManager", rollbackFor = Exception.class)
public Map<String, Object> importProcessDefinition(User loginUser, MultipartFile file) {
Map<String, Object> result = new HashMap<>(5);
JSONObject json = null;
try(InputStreamReader inputStreamReader = new InputStreamReader( file.getInputStream(), "UTF-8" )) {
BufferedReader streamReader = new BufferedReader(inputStreamReader);
StringBuilder respomseStrBuilder = new StringBuilder();
String inputStr = "";
while ((inputStr = streamReader.readLine())!= null){
respomseStrBuilder.append( inputStr );
}
json = JSONObject.parseObject( respomseStrBuilder.toString() );
if(json != null){
String projectName = null;
String processDefinitionName = null;
String processDefinitionJson = null;
String processDefinitionDesc = null;
String processDefinitionLocations = null;
String processDefinitionConnects = null;
String scheduleWarningType = null;
String scheduleWarningGroupId = null;
String scheduleStartTime = null;
String scheduleEndTime = null;
String scheduleCrontab = null;
String scheduleFailureStrategy = null;
String scheduleReleaseState = null;
String scheduleProcessInstancePriority = null;
String scheduleWorkerGroupId = null;
String scheduleWorkerGroupName = null;
if (ObjectUtils.allNotNull(json.get("projectName"))) {
projectName = json.get("projectName").toString();
} else {
putMsg(result, Status.DATA_IS_NULL, "processDefinitionName");
}
if (ObjectUtils.allNotNull(json.get("processDefinitionName"))) {
processDefinitionName = json.get("processDefinitionName").toString();
} else {
putMsg(result, Status.DATA_IS_NULL, "processDefinitionName");
}
if (ObjectUtils.allNotNull(json.get("processDefinitionJson"))) {
processDefinitionJson = json.get("processDefinitionJson").toString();
} else {
putMsg(result, Status.DATA_IS_NULL, "processDefinitionJson");
}
if (ObjectUtils.allNotNull(json.get("processDefinitionDesc"))) {
processDefinitionDesc = json.get("processDefinitionDesc").toString();
}
if (ObjectUtils.allNotNull(json.get("processDefinitionLocations"))) {
processDefinitionLocations = json.get("processDefinitionLocations").toString();
}
if (ObjectUtils.allNotNull(json.get("processDefinitionConnects"))) {
processDefinitionConnects = json.get("processDefinitionConnects").toString();
}
JSONObject jsonObject = JSONUtils.parseObject(processDefinitionJson);
JSONArray jsonArray = (JSONArray) jsonObject.get("tasks");
for (int j = 0; j < jsonArray.size(); j++) {
JSONObject taskNode = jsonArray.getJSONObject(j);
JSONObject sqlParameters = JSONUtils.parseObject(taskNode.getString("params"));
List<DataSource> dataSources = dataSourceMapper.queryDataSourceByName(sqlParameters.getString("datasourceName"));
if (dataSources.size() > 0) {
DataSource dataSource = dataSources.get(0);
sqlParameters.put("datasource", dataSource.getId());
}
taskNode.put("params", sqlParameters);
}
jsonObject.put("tasks", jsonArray);
Map<String, Object> createProcessDefinitionResult = createProcessDefinition(loginUser,projectName,processDefinitionName,jsonObject.toString(),processDefinitionDesc,processDefinitionLocations,processDefinitionConnects);
Integer processDefinitionId = null;
if (ObjectUtils.allNotNull(createProcessDefinitionResult.get("processDefinitionId"))) {
processDefinitionId = Integer.parseInt(createProcessDefinitionResult.get("processDefinitionId").toString());
}
if (ObjectUtils.allNotNull(json.get("scheduleCrontab")) && processDefinitionId != null) {
Date now = new Date();
Schedule scheduleObj = new Schedule();
scheduleObj.setProjectName(projectName);
scheduleObj.setProcessDefinitionId(processDefinitionId);
scheduleObj.setProcessDefinitionName(processDefinitionName);
scheduleObj.setCreateTime(now);
scheduleObj.setUpdateTime(now);
scheduleObj.setUserId(loginUser.getId());
scheduleObj.setUserName(loginUser.getUserName());
scheduleCrontab = json.get("scheduleCrontab").toString();
scheduleObj.setCrontab(scheduleCrontab);
if (ObjectUtils.allNotNull(json.get("scheduleStartTime"))) {
scheduleStartTime = json.get("scheduleStartTime").toString();
scheduleObj.setStartTime(DateUtils.stringToDate(scheduleStartTime));
}
if (ObjectUtils.allNotNull(json.get("scheduleEndTime"))) {
scheduleEndTime = json.get("scheduleEndTime").toString();
scheduleObj.setEndTime(DateUtils.stringToDate(scheduleEndTime));
}
if (ObjectUtils.allNotNull(json.get("scheduleWarningType"))) {
scheduleWarningType = json.get("scheduleWarningType").toString();
scheduleObj.setWarningType(WarningType.valueOf(scheduleWarningType));
}
if (ObjectUtils.allNotNull(json.get("scheduleWarningGroupId"))) {
scheduleWarningGroupId = json.get("scheduleWarningGroupId").toString();
scheduleObj.setWarningGroupId(Integer.parseInt(scheduleWarningGroupId));
}
if (ObjectUtils.allNotNull(json.get("scheduleFailureStrategy"))) {
scheduleFailureStrategy = json.get("scheduleFailureStrategy").toString();
scheduleObj.setFailureStrategy(FailureStrategy.valueOf(scheduleFailureStrategy));
}
if (ObjectUtils.allNotNull(json.get("scheduleReleaseState"))) {
scheduleReleaseState = json.get("scheduleReleaseState").toString();
scheduleObj.setReleaseState(ReleaseState.valueOf(scheduleReleaseState));
}
if (ObjectUtils.allNotNull(json.get("scheduleProcessInstancePriority"))) {
scheduleProcessInstancePriority = json.get("scheduleProcessInstancePriority").toString();
scheduleObj.setProcessInstancePriority(Priority.valueOf(scheduleProcessInstancePriority));
}
if (ObjectUtils.allNotNull(json.get("scheduleWorkerGroupId"))) {
scheduleWorkerGroupId = json.get("scheduleWorkerGroupId").toString();
if(scheduleWorkerGroupId != null){
scheduleObj.setWorkerGroupId(Integer.parseInt(scheduleWorkerGroupId));
}else{
if (ObjectUtils.allNotNull(json.get("scheduleWorkerGroupName"))) {
scheduleWorkerGroupName = json.get("scheduleWorkerGroupName").toString();
List<WorkerGroup> workerGroups = workerGroupMapper.queryWorkerGroupByName(scheduleWorkerGroupName);
if(workerGroups.size() > 0){
scheduleObj.setWorkerGroupId(workerGroups.get(0).getId());
}
}
}
}
scheduleMapper.insert(scheduleObj);
}
}else{
putMsg(result, Status.EXPORT_PROCESS_DEFINE_BY_ID_ERROR);
return result;
}
} catch (IOException e) {
throw new RuntimeException(e.getMessage(), e);
}
putMsg(result, Status.SUCCESS);
return result;
}
/**
* check the process definition node meets the specifications
......
......@@ -226,6 +226,22 @@ public class JSONUtils {
}
}
public static JSONObject parseObject(String text) {
try{
return JSONObject.parseObject(text);
} catch (Exception e) {
throw new RuntimeException("Json deserialization exception.", e);
}
}
public static JSONArray parseArray(String text) {
try{
return JSONObject.parseArray(text);
} catch (Exception e) {
throw new RuntimeException("Json deserialization exception.", e);
}
}
/**
......
......@@ -15,10 +15,10 @@
<th width="50">
<span>{{$t('State')}}</span>
</th>
<th width="140">
<th width="130">
<span>{{$t('Create Time')}}</span>
</th>
<th width="140">
<th width="130">
<span>{{$t('Update Time')}}</span>
</th>
<th>
......@@ -27,7 +27,7 @@
<th width="90">
<span>{{$t('Timing state')}}</span>
</th>
<th width="220">
<th width="240">
<span>{{$t('Operation')}}</span>
</th>
</tr>
......@@ -86,6 +86,8 @@
</template>
</x-poptip>
<x-button type="info" shape="circle" size="xsmall" data-toggle="tooltip" :title="$t('TreeView')" @click="_treeView(item)" icon="iconfont icon-juxingkaobei"><!--{{$t('树形图')}}--></x-button>
<x-button type="info" shape="circle" size="xsmall" data-toggle="tooltip" :title="$t('Export')" @click="_export(item)" icon="iconfont icon-download"><!--{{$t('导出')}}--></x-button>
</td>
</tr>
</table>
......@@ -129,7 +131,7 @@
pageSize: Number
},
methods: {
...mapActions('dag', ['editProcessState', 'getStartCheck', 'getReceiver', 'deleteDefinition', 'batchDeleteDefinition']),
...mapActions('dag', ['editProcessState', 'getStartCheck', 'getReceiver', 'deleteDefinition', 'batchDeleteDefinition','exportDefinition']),
_rtPublishStatus (code) {
return _.filter(publishStatus, v => v.code === code)[0].desc
},
......@@ -276,6 +278,14 @@
releaseState: 1
})
},
_export (item) {
this.exportDefinition({
processDefinitionId: item.id,
processDefinitionName: item.name
}).catch(e => {
this.$message.error(e.msg || '')
})
},
/**
* Edit state
*/
......
......@@ -165,7 +165,7 @@
warningGroupId: {},
spinnerLoading: false,
scheduleTime: '',
crontab: '* * * * * ? *',
crontab: '0 0 * * * ? *',
cronPopover: false,
receivers: [],
receiversCc: [],
......
......@@ -4,6 +4,8 @@
<m-conditions @on-conditions="_onConditions">
<template slot="button-group">
<x-button type="ghost" size="small" @click="() => this.$router.push({name: 'definition-create'})">{{$t('Create process')}}</x-button>
<x-button type="ghost" size="small" @click="_uploading">{{$t('Import process')}}</x-button>
</template>
</m-conditions>
</template>
......@@ -32,6 +34,7 @@
import mConditions from '@/module/components/conditions/conditions'
import mSecondaryMenu from '@/module/components/secondaryMenu/secondaryMenu'
import mListConstruction from '@/module/components/listConstruction/listConstruction'
import { findComponentDownward } from '@/module/util/'
export default {
name: 'definition-list-index',
......@@ -53,6 +56,12 @@
},
methods: {
...mapActions('dag', ['getProcessListP']),
/**
* File Upload
*/
_uploading () {
findComponentDownward(this.$root, 'roof-nav')._fileUpdate('DEFINITION')
},
/**
* page
*/
......@@ -82,6 +91,11 @@
},
_onUpdate () {
this._debounceGET('false')
},
_updateList () {
this.searchParams.pageNo = 1
this.searchParams.searchVal = ''
this._debounceGET()
}
},
watch: {
......
......@@ -496,6 +496,39 @@ export default {
})
})
},
/**
* export definition
*/
exportDefinition ({ state }, payload) {
const downloadBlob = (data, fileNameS = 'json') => {
if (!data) {
return
}
let blob = new Blob([data])
let fileName = `${fileNameS}.json`
if ('download' in document.createElement('a')) { // 不是IE浏览器
let url = window.URL.createObjectURL(blob)
let link = document.createElement('a')
link.style.display = 'none'
link.href = url
link.setAttribute('download', fileName)
document.body.appendChild(link)
link.click()
document.body.removeChild(link) // 下载完成移除元素
window.URL.revokeObjectURL(url) // 释放掉blob对象
} else { // IE 10+
window.navigator.msSaveBlob(blob, fileName)
}
}
io.get(`projects/${state.projectName}/process/export`,{processDefinitionId: payload.processDefinitionId,}, res => {
downloadBlob(res, payload.processDefinitionName)
}, e => {
}, {
responseType: 'blob'
})
},
/**
* Process instance get variable
*/
......
<template>
<m-popup
ref="popup"
:ok-text="$t('Upload')"
:nameText="$t('File Upload')"
@ok="_ok"
:disabled="progress === 0 ? false : true">
<template slot="content">
<form name="files" enctype="multipart/form-data" method="post">
<div class="file-update-model"
@drop.prevent="_onDrop"
@dragover.prevent="dragOver = true"
@dragleave.prevent="dragOver = false"
id="file-update-model">
<div class="tooltip-info">
<i class="fa fa-info-circle"></i>
<span>{{$t('Drag the file into the current upload window')}}</span>
</div>
<!--<div class="hide-archive" v-if="progress !== 0" @click="_ckArchive">
<i class="fa fa-minus" data-toggle="tooltip" title="关闭窗口 继续上传" data-container="body" ></i>
</div>-->
<div class="update-popup" v-if="dragOver">
<div class="icon-box">
<i class="fa fa-cloud-upload"></i>
</div>
<p class="p1">
<span>{{$t('Drag area upload')}}</span>
</p>
</div>
<m-list-box-f>
<template slot="name"><b>*</b>{{$t('Upload Files')}}</template>
<template slot="content">
<div class="file-update-box">
<template v-if="progress === 0">
<input name="file" id="file" type="file" class="file-update">
<x-button type="dashed" size="xsmall"> {{$t('Upload')}} </x-button>
</template>
<div class="progress-box" v-if="progress !== 0">
<m-progress-bar :value="progress" text-placement="left-right"></m-progress-bar>
</div>
</div>
</template>
</m-list-box-f>
<m-list-box-f>
<template slot="name">{{$t('File Name')}}</template>
<template slot="content">
<x-input
type="input"
v-model="name"
:disabled="progress !== 0"
:placeholder="$t('Please enter name')"
autocomplete="off">
</x-input>
</template>
</m-list-box-f>
</div>
</form>
</template>
</m-popup>
</template>
<script>
import io from '@/module/io'
import i18n from '@/module/i18n'
import store from '@/conf/home/store'
import mPopup from '@/module/components/popup/popup'
import mListBoxF from '@/module/components/listBoxF/listBoxF'
import mProgressBar from '@/module/components/progressBar/progressBar'
export default {
name: 'file-update',
data () {
return {
store,
// name
name: '',
// desc
desc: '',
// progress
progress: 0,
// file
file: '',
// Whether to drag upload
dragOver: false
}
},
watch: {
},
props: {
type: String
},
methods: {
/**
* submit
*/
_ok () {
this.$refs['popup'].spinnerLoading = true
if (this._validation()) {
name: this.name
this._formDataUpdate().then(res => {
setTimeout(() => {
this.$refs['popup'].spinnerLoading = false
}, 800)
}).catch(e => {
this.$refs['popup'].spinnerLoading = false
})
} else {
this.$refs['popup'].spinnerLoading = false
}
},
/**
* validation
*/
_validation () {
if (!this.file) {
this.$message.warning(`${i18n.$t('Please select the file to upload')}`)
return false
}
return true
},
/**
* update file
*/
_formDataUpdate () {
return new Promise((resolve, reject) => {
let self = this
let formData = new FormData()
formData.append('file', this.file)
io.post(`projects/importProcessDefinition`, res => {
this.$message.success(res.msg)
resolve()
self.$emit('onUpdate')
}, e => {
reject(e)
self.$emit('close')
this.$message.error(e.msg || '')
}, {
data: formData,
emulateJSON: false,
onUploadProgress (progressEvent) {
// Size has been uploaded
let loaded = progressEvent.loaded
// Total attachment size
let total = progressEvent.total
self.progress = Math.floor(100 * loaded / total)
self.$emit('onProgress', self.progress)
}
})
})
},
/**
* Archive to the top right corner Continue uploading
*/
_ckArchive () {
$('.update-file-modal').hide()
this.$emit('onArchive')
},
/**
* Drag and drop upload
*/
_onDrop (e) {
let file = e.dataTransfer.files[0]
this.file = file
this.name = file.name
this.dragOver = false
}
},
mounted () {
$('#file').change(() => {
let file = $('#file')[0].files[0]
this.file = file
this.name = file.name
})
},
components: { mPopup, mListBoxF, mProgressBar }
}
</script>
<style lang="scss" rel="stylesheet/scss">
.file-update-model {
.tooltip-info {
position: absolute;
left: 20px;
bottom: 26px;
span {
font-size: 12px;
color: #666;
vertical-align: middle;
}
.fa {
color: #0097e0;
font-size: 14px;
vertical-align: middle;
}
}
.hide-archive {
position: absolute;
right: 22px;
top: 17px;
.fa{
font-size: 16px;
color: #333;
font-weight: normal;
cursor: pointer;
&:hover {
color: #0097e0;
}
}
}
.file-update-box {
padding-top: 4px;
position: relative;
.file-update {
width: 70px;
height: 40px;
position: absolute;
left: 0;
top: 0;
cursor: pointer;
filter: alpha(opacity=0);
-moz-opacity: 0;
opacity: 0;
}
&:hover {
.v-btn-dashed {
background-color: transparent;
border-color: #47c3ff;
color: #47c3ff;
cursor: pointer;
}
}
.progress-box {
width: 200px;
position: absolute;
left: 70px;
top: 14px;
}
}
.update-popup {
width: calc(100% - 20px);
height: calc(100% - 20px);
background: rgba(255,253,239,.7);
position: absolute;
top: 10px;
left: 10px;
border-radius: 3px;
z-index: 1;
border: .18rem dashed #cccccc;
.icon-box {
text-align: center;
margin-top: 96px;
.fa {
font-size: 50px;
color: #2d8cf0;
}
}
.p1 {
text-align: center;
font-size: 16px;
color: #333;
padding-top: 8px;
}
}
}
</style>
......@@ -139,6 +139,7 @@
import { mapState, mapActions } from 'vuex'
import { findComponentDownward } from '@/module/util/'
import mFileUpdate from '@/module/components/fileUpdate/fileUpdate'
import mDefinitionUpdate from '@/module/components/fileUpdate/definitionUpdate'
import mProgressBar from '@/module/components/progressBar/progressBar'
import { findLocale, localeList } from '@/module/i18n/config'
......@@ -191,29 +192,55 @@
className: 'update-file-modal',
transitionName: 'opacityp',
render (h) {
return h(mFileUpdate, {
on: {
onProgress (val) {
self.progress = val
if(type === 'DEFINITION'){
return h(mDefinitionUpdate, {
on: {
onProgress (val) {
self.progress = val
},
onUpdate () {
findComponentDownward(self.$root, `definition-list-index`)._updateList()
self.isUpdate = false
self.progress = 0
modal.remove()
},
onArchive () {
self.isUpdate = true
},
close () {
self.progress = 0
modal.remove()
}
},
onUpdate () {
findComponentDownward(self.$root, `resource-list-index-${type}`)._updateList()
self.isUpdate = false
self.progress = 0
modal.remove()
},
onArchive () {
self.isUpdate = true
props: {
type: type
}
})
}else{
return h(mFileUpdate, {
on: {
onProgress (val) {
self.progress = val
},
onUpdate () {
findComponentDownward(self.$root, `resource-list-index-${type}`)._updateList()
self.isUpdate = false
self.progress = 0
modal.remove()
},
onArchive () {
self.isUpdate = true
},
close () {
self.progress = 0
modal.remove()
}
},
close () {
self.progress = 0
modal.remove()
props: {
type: type
}
},
props: {
type: type
}
})
})
}
}
})
},
......@@ -247,7 +274,7 @@
computed: {
...mapState('user', ['userInfo'])
},
components: { mFileUpdate, mProgressBar }
components: { mFileUpdate, mProgressBar, mDefinitionUpdate }
}
</script>
......
......@@ -253,6 +253,7 @@ export default {
'Size': 'Size',
'Rename': 'Rename',
'Download': 'Download',
'Export': 'Export',
'Submit': 'Submit',
'Edit UDF Function': 'Edit UDF Function',
'type': 'type',
......@@ -324,6 +325,7 @@ export default {
'Edit password': 'Edit password',
'Ordinary users': 'Ordinary users',
'Create process': 'Create process',
'Import process': 'Import process',
'Timing state': 'Timing state',
'Timing': 'Timing',
'TreeView': 'TreeView',
......
......@@ -253,6 +253,7 @@ export default {
'Size': '大小',
'Rename': '重命名',
'Download': '下载',
'Export': '导出',
'Submit': '提交',
'Edit UDF Function': '编辑UDF函数',
'type': '类型',
......@@ -324,6 +325,7 @@ export default {
'Edit password': '修改密码',
'Ordinary users': '普通用户',
'Create process': '创建工作流',
'Import process': '导入工作流',
'Timing state': '定时状态',
'Timing': '定时',
'TreeView': '树形图',
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册