Commit b770077f authored by zhang.xin's avatar zhang.xin
Browse files

1.完善shell启动脚本

2.增加分析模式,目前分析有两种模式:1)accumulate 2)rewrite
parent 252e50cd
Loading
Loading
Loading
Loading
+76 −54
Original line number Diff line number Diff line
#!/bin/bash

# check environment virables
SW_ANALYSIS_HOME=/tmp/skywalking-analysis

# check Java home
if [ -z "$JAVA_HOME" -a -z "$JRE_HOME" ]; then
  if $darwin; then
    # Bugzilla 54390
    if [ -x '/usr/libexec/java_home' ] ; then
      export JAVA_HOME=`/usr/libexec/java_home`
    # Bugzilla 37284 (reviewed).
    elif [ -d "/System/Library/Frameworks/JavaVM.framework/Versions/CurrentJDK/Home" ]; then
      export JAVA_HOME="/System/Library/Frameworks/JavaVM.framework/Versions/CurrentJDK/Home"
    fi
  else
    JAVA_PATH=`which java 2>/dev/null`
    if [ "x$JAVA_PATH" != "x" ]; then
      JAVA_PATH=`dirname $JAVA_PATH 2>/dev/null`
      JRE_HOME=`dirname $JAVA_PATH 2>/dev/null`
    fi
    if [ "x$JRE_HOME" = "x" ]; then
      # XXX: Should we try other locations?
      if [ -x /usr/bin/java ]; then
        JRE_HOME=/usr
      fi
    fi
  fi
  if [ -z "$JAVA_HOME" -a -z "$JRE_HOME" ]; then
    echo "Neither the JAVA_HOME nor the JRE_HOME environment variable is defined"
    echo "At least one of these environment variable is needed to run this program"
    exit 1
  fi
fi
if [ -z "$JAVA_HOME" -a "$1" = "debug" ]; then
  echo "JAVA_HOME should point to a JDK in order to run in debug mode."
  exit 1
fi
if [ -z "$JRE_HOME" ]; then
  JRE_HOME="$JAVA_HOME"
fi
# check environment variables
SW_ANALYSIS_HOME=${HOME}/skywalking-analysis

#check hbase home
HBASE_HOME=${HOME}/hbase-1.1.2
@@ -46,30 +9,89 @@ HBASE_HOME=${HOME}/hbase-1.1.2
#check hadoop home
HADOOP_HOME=${HOME}/hadoop-2.6.0

## check provious execute time
PID_DIR="${SW_ANALYSIS_HOME}/tmp"

if [ ! -d "$PID_DIR" ]; then
    mkdir -p $PID_DIR
#check skywalking runtime config directory is exisit, if not, will create it.
SW_RT_CONF_DIR="${SW_ANALYSIS_HOME}/runtime-conf"
if [ ! -d "$SW_RT_CONF_DIR" ]; then
    mkdir -p $SW_RT_CONF_DIR
fi

PID_FILES="${PID_DIR}/analysis.pid"
# get the previous process id
PID_FILES="${SW_RT_CONF_DIR}/analysis.pid"

if [ ! -f "$FILE_PROVIOUS_EXECUTE_TIME" ]; then
if [ ! -f "$FILE_PREVIOUS_EXECUTE_TIME" ]; then
  touch "$PID_FILES"
fi

START_TIME=`cat ${PID_FILES}`
SW_ANALYSIS_PID=`cat ${PID_FILES}`
# check if the skywalking analysis process is running
if [ "$SW_ANALYSIS_PID" != "" ]; then
    PS_OUT=`ps -ef | grep $SW_ANALYSIS_PID | grep -v 'grep' | grep -v $0`
    result=$(echo $PS_OUT | grep $SW_ALALYSIS_PID)
    if [ "$result" != "" ]; then
        echo "The skywalking analysis process is running. Will delay the analysis."
        exit -1;
    fi
fi

#skywalking analysis mode:1)accumulate(default) 2)rewrite
SW_ANALYSIS_MODE=ACCUMULATE
#skywalking rewrite execute dates. Each number represents the day of the month.
REWRITE_EXECUTIVE_DAY_ARR=(5,10)

#Get the previous execute time of rewrite mode
PRE_TIME_OF_REWRITE_FILE="${SW_RT_CONF_DIR}/rewrite_pre_time.conf"
if [ ! -f "$PRE_TIME_OF_REWRITE_FILE" ]; then
    echo "skywalking rewrite time file is not exists, create it"
    touch $PRE_TIME_OF_REWRITE_FILE
fi

PRE_TIME_OF_REWRITE_TIME=`cat $PRE_TIME_OF_REWRITE_FILE`
#check if the day is in the date of rewrite mode
if [ "$PRE_TIME_OF_REWRITE_TIME" != "" ]; then
    TODAY=$(date "+%d")
    if [ "$PRE_TIME_OF_REWRITE_TIME" != $TODAY ]; then
        THE_DAY_OF_MONTH=$(date "+%d")
        for THE_DAY in ${REWRITE_EXECUTIVE_DAY_ARR[@]}
        do
            if [ ${THE_DAY} -eq ${THE_DAY_OF_MONTH} ]; then
                SW_ANALYSIS_MODE=REWRITE
                START_TIME=$(date --date='1 month ago' '+%Y-%m-01/00:00:00')
                echo "skywalking analysis will execute rewrite mode. Start time:${START_TIME}"
                break
            fi
        done
    else
        echo "${TODAY} has been execute rewrite analysis process.Will not execute rewrite mode!!"
    fi
fi

if [ "${SW_ANALYSIS_MODE}" != "REWRITE" ]; then
    #check the file of previous executive accumulate mode time
    PRE_TIME_OF_ACCUMULATE_FILE="${SW_RT_CONF_DIR}/accumulate_pre_time.conf"
    if [ ! -f "${PRE_TIME_OF_ACCUMULATE_FILE}" ]; then
        echo "skywalking accumulate time file is not exists, create it."
        touch $PRE_TIME_OF_ACCUMULATE_FILE
    fi

    START_TIME=`cat ${PRE_TIME_OF_ACCUMULATE_FILE}`
    if [ "$START_TIME" = "" ]; then
        START_TIME=`date --date='3 month ago' "+%Y-%m-%d/%H:%M:%S"`
    fi
#echo $START_TIME
    SW_ANALYSIS_MODE=ACCUMULATE
    echo "skywalking analysis process will execute accumulate mode. start time: ${START_TIME}."
fi

##Get the current datetime
END_TIME=`date "+%Y-%m-%d/%H:%M:%S"`
#Get the current datetime
END_TIME=`date --date='10 minute ago' "+%Y-%m-%d/%H:%M:%S"`
#echo $END_TIME

## execute command
HADOOP_CLASSPATH=`${HBASE_HOME}/bin/hbase classpath` ${HADOOP_HOME}/bin/hadoop jar skywalking-analysis-1.0-SNAPSHOT.jar ${START_TIME} ${END_TIME}
echo "Begin to analysis the buried point data between ${START_TIME} to ${END_TIME}."
HADOOP_CLASSPATH=`${HBASE_HOME}/bin/hbase classpath` ${HADOOP_HOME}/bin/hadoop jar skywalking-analysis-1.0-SNAPSHOT.jar -Dskywalking.analysis.mode=${SW_ANALYSIS_MODE} ${START_TIME} ${END_TIME}

echo $END_TIME > ${PID_FILES}

if [ "${SW_ANALYSIS_MODE}" = "REWRITE" ]; then
    echo $(date "+%d") > ${PRE_TIME_OF_REWRITE_FILE}
else
    echo $END_TIME > ${PRE_TIME_OF_ACCUMULATE_FILE}
fi
+12 −0
Original line number Diff line number Diff line
@@ -32,7 +32,19 @@ public class AnalysisServerDriver extends Configured implements Tool {

    public static void main(String[] args) throws Exception {
        logger.info("Begin to analysis call chain.");

        String analysisMode = System.getenv("skywalking.analysis.mode");

        if ("rewrite".equalsIgnoreCase(analysisMode)){
            logger.info("Skywalking analysis mode will switch to [REWRITE] mode");
            Config.AnalysisServer.IS_ACCUMULATE_MODE = false;
        }else{
            logger.info("Skywalking analysis mode will switch to [ACCUMULATE] mode");
            Config.AnalysisServer.IS_ACCUMULATE_MODE = true;
        }

        int res = ToolRunner.run(new AnalysisServerDriver(), args);

        System.exit(res);
    }

+68 −7
Original line number Diff line number Diff line
@@ -2,6 +2,7 @@ package com.ai.cloud.skywalking.analysis.chainbuild.entity;

import com.ai.cloud.skywalking.analysis.chainbuild.po.ChainNode;
import com.ai.cloud.skywalking.analysis.chainbuild.util.HBaseUtil;
import com.ai.cloud.skywalking.analysis.config.Config;
import com.ai.cloud.skywalking.analysis.config.HBaseTableMetaData;
import com.google.gson.GsonBuilder;
import com.google.gson.annotations.Expose;
@@ -69,31 +70,43 @@ public class CallChainTreeNode {
        summaryMonthResult(treeId, node, calendar);
    }

    private void summaryMonthResult(String treeId, ChainNode node, Calendar calendar) {
    private void summaryMonthResult(String treeId, ChainNode node, Calendar calendar) throws IOException {
        String keyOfMonthSummaryTable = generateKeyOfMonthSummaryTable(treeId, calendar);
        ChainNodeSpecificMonthSummary monthSummary = chainNodeSpecificMonthSummaryContainer.get(keyOfMonthSummaryTable);
        if (monthSummary == null) {
            if (Config.AnalysisServer.IS_ACCUMULATE_MODE) {
                monthSummary = HBaseUtil.loadSpecificMonthSummary(keyOfMonthSummaryTable, getTreeNodeId());
            } else {
                monthSummary = new ChainNodeSpecificMonthSummary();
            }
            chainNodeSpecificMonthSummaryContainer.put(keyOfMonthSummaryTable, monthSummary);
        }
        monthSummary.summary(String.valueOf(calendar.get(Calendar.YEAR)), node);
    }

    private void summaryDayResult(String treeId, ChainNode node, Calendar calendar) {
    private void summaryDayResult(String treeId, ChainNode node, Calendar calendar) throws IOException {
        String keyOfDaySummaryTable = generateKeyOfDaySummaryTable(treeId, calendar);
        ChainNodeSpecificDaySummary daySummary = chainNodeSpecificDaySummaryContainer.get(keyOfDaySummaryTable);
        if (daySummary == null) {
            if (Config.AnalysisServer.IS_ACCUMULATE_MODE) {
                daySummary = HBaseUtil.loadSpecificDaySummary(keyOfDaySummaryTable, getTreeNodeId());
            } else {
                daySummary = new ChainNodeSpecificDaySummary();
            }
            chainNodeSpecificDaySummaryContainer.put(keyOfDaySummaryTable, daySummary);
        }
        daySummary.summary(String.valueOf(calendar.get(Calendar.DAY_OF_MONTH)), node);
    }

    private void summaryHourResult(String treeId, ChainNode node, Calendar calendar) {
    private void summaryHourResult(String treeId, ChainNode node, Calendar calendar) throws IOException {
        String keyOfHourSummaryTable = generateKeyOfHourSummaryTable(treeId, calendar);
        ChainNodeSpecificHourSummary hourSummary = chainNodeSpecificHourSummaryContainer.get(keyOfHourSummaryTable);
        if (hourSummary == null) {
            if (Config.AnalysisServer.IS_ACCUMULATE_MODE) {
                hourSummary = HBaseUtil.loadSpecificHourSummary(keyOfHourSummaryTable, getTreeNodeId());
            } else {
                hourSummary = new ChainNodeSpecificHourSummary();
            }
            chainNodeSpecificHourSummaryContainer.put(keyOfHourSummaryTable, hourSummary);
        }
        hourSummary.summary(String.valueOf(calendar.get(Calendar.HOUR)), node);
@@ -107,7 +120,11 @@ public class CallChainTreeNode {
        String keyOfMinSummaryTable = generateKeyOfMinSummaryTable(treeId, calendar);
        ChainNodeSpecificMinSummary minSummary = chainNodeSpecificMinSummaryContainer.get(keyOfMinSummaryTable);
        if (minSummary == null) {
            if (Config.AnalysisServer.IS_ACCUMULATE_MODE) {
                minSummary = HBaseUtil.loadSpecificMinSummary(keyOfMinSummaryTable, getTreeNodeId());
            } else {
                minSummary = new ChainNodeSpecificMinSummary();
            }
            chainNodeSpecificMinSummaryContainer.put(keyOfMinSummaryTable, minSummary);
        }
        minSummary.summary(String.valueOf(calendar.get(Calendar.MINUTE)), node);
@@ -146,6 +163,50 @@ public class CallChainTreeNode {
     * @throws InterruptedException
     */
    public void saveSummaryResultToHBase() throws IOException, InterruptedException {
        batchSaveMinSummaryResult();
        batchSaveHourSummaryResult();
        batchSaveDaySummaryResult();
        batchSaveMonthSummaryResult();
    }

    private void batchSaveMonthSummaryResult() throws IOException, InterruptedException {
        List<Put> puts = new ArrayList<Put>();
        for (Map.Entry<String, ChainNodeSpecificMonthSummary> entry : chainNodeSpecificMonthSummaryContainer.entrySet()) {
            Put put = new Put(entry.getKey().getBytes());
            put.addColumn(HBaseTableMetaData.TABLE_CHAIN_ONE_MONTH_SUMMARY.COLUMN_FAMILY_NAME.getBytes()
                    , getTreeNodeId().getBytes(), entry.getValue().toString().getBytes());
            puts.add(put);
        }

        HBaseUtil.batchSaveMonthSummaryResult(puts);
    }


    private void batchSaveDaySummaryResult() throws IOException, InterruptedException {
        List<Put> puts = new ArrayList<Put>();
        for (Map.Entry<String, ChainNodeSpecificDaySummary> entry : chainNodeSpecificDaySummaryContainer.entrySet()) {
            Put put = new Put(entry.getKey().getBytes());
            put.addColumn(HBaseTableMetaData.TABLE_CHAIN_ONE_DAY_SUMMARY.COLUMN_FAMILY_NAME.getBytes()
                    , getTreeNodeId().getBytes(), entry.getValue().toString().getBytes());
            puts.add(put);
        }

        HBaseUtil.batchSaveDaySummaryResult(puts);
    }

    private void batchSaveHourSummaryResult() throws IOException, InterruptedException {
        List<Put> puts = new ArrayList<Put>();
        for (Map.Entry<String, ChainNodeSpecificHourSummary> entry : chainNodeSpecificHourSummaryContainer.entrySet()) {
            Put put = new Put(entry.getKey().getBytes());
            put.addColumn(HBaseTableMetaData.TABLE_CHAIN_ONE_HOUR_SUMMARY.COLUMN_FAMILY_NAME.getBytes()
                    , getTreeNodeId().getBytes(), entry.getValue().toString().getBytes());
            puts.add(put);
        }

        HBaseUtil.batchSaveHourSummaryResult(puts);
    }

    private void batchSaveMinSummaryResult() throws IOException, InterruptedException {
        List<Put> puts = new ArrayList<Put>();
        for (Map.Entry<String, ChainNodeSpecificMinSummary> entry : chainNodeSpecificMinSummaryContainer.entrySet()) {
            Put put = new Put(entry.getKey().getBytes());
+110 −49
Original line number Diff line number Diff line
package com.ai.cloud.skywalking.analysis.chainbuild.util;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;

import com.ai.cloud.skywalking.analysis.chainbuild.entity.ChainNodeSpecificDaySummary;
import com.ai.cloud.skywalking.analysis.chainbuild.entity.ChainNodeSpecificHourSummary;
import com.ai.cloud.skywalking.analysis.chainbuild.entity.ChainNodeSpecificMinSummary;
import com.ai.cloud.skywalking.analysis.chainbuild.entity.ChainNodeSpecificMonthSummary;
import com.ai.cloud.skywalking.analysis.config.Config;
import com.ai.cloud.skywalking.analysis.config.HBaseTableMetaData;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.ai.cloud.skywalking.analysis.chainbuild.entity.ChainNodeSpecificMinSummary;
import com.ai.cloud.skywalking.analysis.config.Config;
import com.ai.cloud.skywalking.analysis.config.HBaseTableMetaData;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;

public class HBaseUtil {
    private static Logger logger = LoggerFactory.getLogger(HBaseUtil.class.getName());
@@ -48,6 +37,16 @@ public class HBaseUtil {

            createTableIfNeed(HBaseTableMetaData.TABLE_CHAIN_DETAIL.TABLE_NAME,
                    HBaseTableMetaData.TABLE_CHAIN_DETAIL.COLUMN_FAMILY_NAME);
            // 1 hour summary table
            createTableIfNeed(HBaseTableMetaData.TABLE_CHAIN_ONE_HOUR_SUMMARY.TABLE_NAME,
                    HBaseTableMetaData.TABLE_CHAIN_ONE_HOUR_SUMMARY.COLUMN_FAMILY_NAME);
            // 1 day summary table
            createTableIfNeed(HBaseTableMetaData.TABLE_CHAIN_ONE_DAY_SUMMARY.TABLE_NAME,
                    HBaseTableMetaData.TABLE_CHAIN_ONE_DAY_SUMMARY.COLUMN_FAMILY_NAME);
            // 1 month summary table
            createTableIfNeed(HBaseTableMetaData.TABLE_CHAIN_ONE_MONTH_SUMMARY.TABLE_NAME,
                    HBaseTableMetaData.TABLE_CHAIN_ONE_MONTH_SUMMARY.COLUMN_FAMILY_NAME);

        } catch (IOException e) {
            logger.error("Create tables failed", e);
        }
@@ -99,6 +98,75 @@ public class HBaseUtil {
        return result;
    }

    public static ChainNodeSpecificHourSummary loadSpecificHourSummary(String keyOfHourSummaryTable, String treeNodeId) throws IOException {
        ChainNodeSpecificHourSummary result = null;
        Table table = connection.getTable(TableName.valueOf(HBaseTableMetaData.TABLE_CHAIN_ONE_HOUR_SUMMARY.TABLE_NAME));
        Get g = new Get(Bytes.toBytes(keyOfHourSummaryTable));
        Result r = table.get(g);

        if (r.rawCells().length == 0) {
            return new ChainNodeSpecificHourSummary();
        }

        Cell cell = r.getColumnLatestCell(HBaseTableMetaData.TABLE_CHAIN_ONE_MINUTE_SUMMARY.COLUMN_FAMILY_NAME.getBytes(),
                treeNodeId.getBytes());

        if (cell != null && cell.getValueArray().length > 0) {
            result = new ChainNodeSpecificHourSummary(Bytes.toString(cell.getValueArray(),
                    cell.getValueOffset(), cell.getValueLength()));
        } else {
            result = new ChainNodeSpecificHourSummary();
        }

        return result;
    }

    public static ChainNodeSpecificDaySummary loadSpecificDaySummary(String keyOfDaySummaryTable, String treeNodeId) throws IOException {
        ChainNodeSpecificDaySummary result = null;
        Table table = connection.getTable(TableName.valueOf(HBaseTableMetaData.TABLE_CHAIN_ONE_DAY_SUMMARY.TABLE_NAME));
        Get g = new Get(Bytes.toBytes(keyOfDaySummaryTable));
        Result r = table.get(g);

        if (r.rawCells().length == 0) {
            return new ChainNodeSpecificDaySummary();
        }

        Cell cell = r.getColumnLatestCell(HBaseTableMetaData.TABLE_CHAIN_ONE_MINUTE_SUMMARY.COLUMN_FAMILY_NAME.getBytes(),
                treeNodeId.getBytes());

        if (cell != null && cell.getValueArray().length > 0) {
            result = new ChainNodeSpecificDaySummary(Bytes.toString(cell.getValueArray(),
                    cell.getValueOffset(), cell.getValueLength()));
        } else {
            result = new ChainNodeSpecificDaySummary();
        }

        return result;
    }

    public static ChainNodeSpecificMonthSummary loadSpecificMonthSummary(String keyOfMonthSummaryTable, String treeNodeId) throws IOException {
        ChainNodeSpecificMonthSummary result = null;
        Table table = connection.getTable(TableName.valueOf(HBaseTableMetaData.TABLE_CHAIN_ONE_DAY_SUMMARY.TABLE_NAME));
        Get g = new Get(Bytes.toBytes(keyOfMonthSummaryTable));
        Result r = table.get(g);

        if (r.rawCells().length == 0) {
            return new ChainNodeSpecificMonthSummary();
        }

        Cell cell = r.getColumnLatestCell(HBaseTableMetaData.TABLE_CHAIN_ONE_MINUTE_SUMMARY.COLUMN_FAMILY_NAME.getBytes(),
                treeNodeId.getBytes());

        if (cell != null && cell.getValueArray().length > 0) {
            result = new ChainNodeSpecificMonthSummary(Bytes.toString(cell.getValueArray(),
                    cell.getValueOffset(), cell.getValueLength()));
        } else {
            result = new ChainNodeSpecificMonthSummary();
        }

        return result;
    }

    public static List<String> loadHasBeenMergeChainIds(String treeId) throws IOException {
        List<String> result = new ArrayList<String>();
        Table table = connection.getTable(TableName.valueOf(HBaseTableMetaData.TABLE_CALL_CHAIN_TREE_ID_AND_CID_MAPPING.TABLE_NAME));
@@ -122,6 +190,25 @@ public class HBaseUtil {

    public static void batchSaveMinSummaryResult(List<Put> puts) throws IOException, InterruptedException {
        Table table = connection.getTable(TableName.valueOf(HBaseTableMetaData.TABLE_CHAIN_ONE_MINUTE_SUMMARY.TABLE_NAME));
        batchSaveData(puts, table);
    }

    public static void batchSaveMonthSummaryResult(List<Put> puts) throws IOException, InterruptedException {
        Table table = connection.getTable(TableName.valueOf(HBaseTableMetaData.TABLE_CHAIN_ONE_MONTH_SUMMARY.TABLE_NAME));
        batchSaveData(puts, table);
    }

    public static void batchSaveDaySummaryResult(List<Put> puts) throws IOException, InterruptedException {
        Table table = connection.getTable(TableName.valueOf(HBaseTableMetaData.TABLE_CHAIN_ONE_DAY_SUMMARY.TABLE_NAME));
        batchSaveData(puts, table);
    }

    public static void batchSaveHourSummaryResult(List<Put> puts) throws IOException, InterruptedException {
        Table table = connection.getTable(TableName.valueOf(HBaseTableMetaData.TABLE_CHAIN_ONE_HOUR_SUMMARY.TABLE_NAME));
        batchSaveData(puts, table);
    }

    private static void batchSaveData(List<Put> puts, Table table) throws IOException, InterruptedException {
        Object[] resultArray = new Object[puts.size()];
        table.batch(puts, resultArray);
        int index = 0;
@@ -135,39 +222,13 @@ public class HBaseUtil {

    public static void batchSaveChainInfo(List<Put> chainInfoPuts) throws IOException, InterruptedException {
        Table table = connection.getTable(TableName.valueOf(HBaseTableMetaData.TABLE_CHAIN_DETAIL.TABLE_NAME));
        Object[] resultArray = new Object[chainInfoPuts.size()];
        table.batch(chainInfoPuts, resultArray);
        int index = 0;
        for (Object result : resultArray) {
            if (result == null) {
                logger.error("Failed to insert the put the Value[" + chainInfoPuts.get(index).getId() + "]");
            }
            index++;
        }
        batchSaveData(chainInfoPuts, table);
    }

    public static void batchSaveHasBeenMergedCID(List<Put> chainIdPuts) throws IOException, InterruptedException {
        Table table = connection.getTable(TableName.valueOf(HBaseTableMetaData.TABLE_CALL_CHAIN_TREE_ID_AND_CID_MAPPING.TABLE_NAME));
        Object[] resultArray = new Object[chainIdPuts.size()];
        table.batch(chainIdPuts, resultArray);
        int index = 0;
        for (Object result : resultArray) {
            if (result == null) {
                logger.error("Failed to insert the put the Value[" + chainIdPuts.get(index).getId() + "]");
            }
            index++;
        }
        batchSaveData(chainIdPuts, table);
    }

    public static ChainNodeSpecificHourSummary loadSpecificHourSummary(String keyOfHourSummaryTable, String treeNodeId) {
        return null;
    }

    public static ChainNodeSpecificDaySummary loadSpecificDaySummary(String keyOfDaySummaryTable, String treeNodeId) {
        return null;
    }

    public static ChainNodeSpecificMonthSummary loadSpecificMonthSummary(String keyOfMonthSummaryTable, String treeNodeId) {
        return null;
    }
}
+4 −0
Original line number Diff line number Diff line
@@ -25,4 +25,8 @@ public class Config {
    public static class Filter {
        public static String FILTER_PACKAGE_NAME;
    }

    public static class AnalysisServer{
        public static boolean IS_ACCUMULATE_MODE = true;
    }
}
Loading