Commit 7ecea109 authored by gongzijian's avatar gongzijian
Browse files

Merge remote-tracking branch 'upstream/dev-20190415' into dev-20190415

parents bda4bcc0 92e26b40
Loading
Loading
Loading
Loading
+12 −0
Original line number Diff line number Diff line
@@ -21,6 +21,7 @@ Lodash 高性能的 JavaScript 实用工具库
- #### Node安装
Node包下载 (注意版本 8.9.4) `https://nodejs.org/download/release/v8.9.4/` 


- #### 前端项目构建
用命令行模式 `cd`  进入 `escheduler-ui`项目目录并执行 `npm install` 拉取项目依赖包

@@ -31,6 +32,16 @@ Node包下载 (注意版本 8.9.4) `https://nodejs.org/download/release/v8.9.4/`
> 运行 `cnpm install` 


- 新建一个`.env`文件,用于跟后端交互的接口

`escheduler-ui`目录下新建一个`.env`文件,在文件里添加后端服务的ip地址和端口,用于跟后端交互,`.env`文件内容如下:
```
# 代理的接口地址(自行修改)
API_BASE = http://192.168.xx.xx:12345

# 如果您需要用ip访问项目可以把 "#" 号去掉(例)
#DEV_HOST = 192.168.xx.xx
```

> #####  !!!这里特别注意 项目如果在拉取依赖包的过程中报 " node-sass error " 错误,请在执行完后再次执行以下命令
```
@@ -49,6 +60,7 @@ npm install node-sass --unsafe-perm //单独安装node-sass依赖

再拷贝到服务器对应的目录下(前端服务静态页面存放目录)


访问地址 `http://localhost:8888/#/` 


+1 −10
Original line number Diff line number Diff line
@@ -3,23 +3,14 @@
前端有3种部署方式,分别为自动化部署,手动部署和编译源码部署

## 1、准备工作
#### 准备一:下载安装包
#### 下载安装包

目前最新安装包版本是1.0.1,下载地址: [码云下载](https://gitee.com/easyscheduler/EasyScheduler/attach_files/)

下载escheduler-ui-1.0.1.tar.gz后,解压后会产生dist目录,进入dist目录
> cd dist  

#### 准备二:新建一个`.env`文件

在dist目录下新建一个`.env`文件,在文件里添加后端服务的ip地址和端口,用于跟后端交互,`.env`文件内容如下:
```
# 代理的接口地址(自行修改)
API_BASE = http://192.168.xx.xx:12345

# 如果您需要用ip访问项目可以把 "#" 号去掉(例)
#DEV_HOST = 192.168.xx.xx
```

## 2、部署
以下两种方式任选其一部署即可,推荐自动化部署
+16 −3
Original line number Diff line number Diff line
@@ -57,7 +57,7 @@ escheduler ALL=(ALL) NOPASSWD: NOPASSWD: ALL
    flush privileges;
    ```

* 创建表和导入基础数据
* 1.0.0和1.0.1版本创建表和导入基础数据
    说明:在escheduler-backend/sql/escheduler.sql和quartz.sql

    ```sql
@@ -66,6 +66,12 @@ escheduler ALL=(ALL) NOPASSWD: NOPASSWD: ALL
    mysql -h {host} -u {user} -p{password} -D {db} < quartz.sql
    ```

* 1.0.2版本创建表和导入基础数据

    ```
    sh ./script/create_escheduler.sh
    ```

#### 准备五: 修改部署目录权限及运行参数

我们先来大体了解下解压后escheduler-backend目录下的文件(夹)的作用
@@ -148,11 +154,11 @@ install.sh : 一键部署脚本

* 一键停止集群所有服务
   
   ` sh ./script/stop_all.sh`
   ` sh ./bin/stop_all.sh`
   
* 一键开启集群所有服务
   
   ` sh ./script/start_all.sh`
   ` sh ./bin/start_all.sh`

* 启停Master

@@ -186,3 +192,9 @@ sh ./bin/escheduler-daemon.sh stop logger-server
sh ./bin/escheduler-daemon.sh start alert-server
sh ./bin/escheduler-daemon.sh stop alert-server
```

## 3、数据库升级
数据库升级是在1.0.2版本增加的功能,执行以下命令即可自动升级数据库
```
sh ./script/upgrade_escheduler.sh
```
 No newline at end of file
+2 −0
Original line number Diff line number Diff line
@@ -125,6 +125,7 @@ public class ProcessDefinitionService extends BaseDAGService {
        processDefine.setDesc(desc);
        processDefine.setLocations(locations);
        processDefine.setConnects(connects);
        processDefine.setTimeout(processData.getTimeout());

        //custom global params
        List<Property> globalParamsList = processData.getGlobalParams();
@@ -288,6 +289,7 @@ public class ProcessDefinitionService extends BaseDAGService {
        processDefine.setDesc(desc);
        processDefine.setLocations(locations);
        processDefine.setConnects(connects);
        processDefine.setTimeout(processData.getTimeout());

        //custom global params
        List<Property> globalParamsList = processData.getGlobalParams();
+15 −4
Original line number Diff line number Diff line
@@ -346,7 +346,8 @@ public class ProcessInstanceService extends BaseDAGService {

        //check process instance status
        if (!processInstance.getState().typeIsFinished()) {
            putMsg(result, Status.PROCESS_INSTANCE_STATE_OPERATION_ERROR, "update");
            putMsg(result, Status.PROCESS_INSTANCE_STATE_OPERATION_ERROR,
                    processInstance.getName(), processInstance.getState().toString(), "update");
            return result;
        }
        Date schedule = null;
@@ -355,8 +356,12 @@ public class ProcessInstanceService extends BaseDAGService {
        } else {
            schedule = processInstance.getScheduleTime();
        }
        processInstance.setScheduleTime(schedule);
        processInstance.setLocations(locations);
        processInstance.setConnects(connects);
        String globalParams = null;
        String originDefParams = null;
        int timeout = processInstance.getTimeout();
        if (StringUtils.isNotEmpty(processInstanceJson)) {
            ProcessData processData = JSONUtils.parseObject(processInstanceJson, ProcessData.class);
            //check workflow json is valid
@@ -370,9 +375,14 @@ public class ProcessInstanceService extends BaseDAGService {
            Map<String, String> globalParamMap = globalParamList.stream().collect(Collectors.toMap(Property::getProp, Property::getValue));
            globalParams = ParameterUtils.curingGlobalParams(globalParamMap, globalParamList,
                    processInstance.getCmdTypeIfComplement(), schedule);
        }
        int update = processDao.updateProcessInstance(processInstanceId, processInstanceJson,
                globalParams, schedule, flag, locations, connects);
            timeout = processData.getTimeout();
            processInstance.setTimeout(timeout);
            processInstance.setProcessInstanceJson(processInstanceJson);
            processInstance.setGlobalParams(globalParams);
        }
//        int update = processDao.updateProcessInstance(processInstanceId, processInstanceJson,
//                globalParams, schedule, flag, locations, connects);
        int update = processDao.updateProcessInstance(processInstance);
        int updateDefine = 1;
        if (syncDefine && StringUtils.isNotEmpty(processInstanceJson)) {
            ProcessDefinition processDefinition = processDao.findProcessDefineById(processInstance.getProcessDefinitionId());
@@ -380,6 +390,7 @@ public class ProcessInstanceService extends BaseDAGService {
            processDefinition.setGlobalParams(originDefParams);
            processDefinition.setLocations(locations);
            processDefinition.setConnects(connects);
            processDefinition.setTimeout(timeout);
            updateDefine = processDefineMapper.update(processDefinition);
        }
        if (update > 0 && updateDefine > 0) {
Loading