Commit dac6ec24 authored by zhang.xin's avatar zhang.xin
Browse files

1.增加打印日志

2.修复Mapper传出的key不是CallEntrance而是EntranceNodeToken
3.增加log4j的依赖,便于输出日志
parent ec6501b0
Loading
Loading
Loading
Loading
+5 −0
Original line number Diff line number Diff line
@@ -54,6 +54,11 @@
            <artifactId>mysql-connector-java</artifactId>
            <version>5.1.38</version>
        </dependency>
        <dependency>
            <groupId>org.apache.logging.log4j</groupId>
            <artifactId>log4j-core</artifactId>
            <version>2.2</version>
        </dependency>
    </dependencies>

    <build>
+5 −5
Original line number Diff line number Diff line
@@ -16,15 +16,16 @@ import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.Text;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;


import java.io.IOException;
import java.util.*;

public class ChainBuildMapper extends TableMapper<Text, Text> {

    private Logger logger = LoggerFactory.getLogger(ChainBuildMapper.class);
    private Logger logger = LogManager.getLogger(ChainBuildMapper.class);

    @Override
    protected void setup(Context context) throws IOException,
@@ -56,9 +57,8 @@ public class ChainBuildMapper extends TableMapper<Text, Text> {
            logger.debug("convert tid[" + Bytes.toString(key.get())
                    + "] to chain with cid[" + chainInfo.getCID() + "].");
            context.write(
                    new Text(chainInfo.getEntranceNodeToken()), new Text(new Gson().toJson(chainInfo)));
                    new Text(chainInfo.getCallEntrance()), new Text(new Gson().toJson(chainInfo)));
        } catch (Exception e) {
            e.printStackTrace();
            logger.error("Failed to mapper call chain[" + key.toString() + "]",
                    e);
        }
+5 −5
Original line number Diff line number Diff line
@@ -9,14 +9,14 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;

import java.io.IOException;
import java.util.Iterator;

public class ChainBuildReducer extends Reducer<Text, Text, Text, IntWritable> {
    private Logger logger = LoggerFactory.getLogger(ChainBuildReducer.class);
    private Logger logger = LogManager.getLogger(ChainBuildReducer.class);

    @Override
    protected void setup(Context context) throws IOException,
@@ -33,7 +33,8 @@ public class ChainBuildReducer extends Reducer<Text, Text, Text, IntWritable> {
    public void doReduceAction(String key, Iterator<Text> chainInfoIterator)
            throws IOException, InterruptedException {
        CallChainTree chainTree = CallChainTree.load(key);
        SpecificTimeCallTreeMergedChainIdContainer container = new SpecificTimeCallTreeMergedChainIdContainer(chainTree.getTreeToken());
        SpecificTimeCallTreeMergedChainIdContainer container
                = new SpecificTimeCallTreeMergedChainIdContainer(chainTree.getTreeToken());
        while (chainInfoIterator.hasNext()) {
            String callChainData = chainInfoIterator.next().toString();
            ChainInfo chainInfo = null;
@@ -42,7 +43,6 @@ public class ChainBuildReducer extends Reducer<Text, Text, Text, IntWritable> {
                container.addMergedChainIfNotContain(chainInfo);
                chainTree.summary(chainInfo);
            } catch (Exception e) {
                e.printStackTrace();
                logger.error(
                        "Failed to summary call chain, maybe illegal data:"
                                + callChainData, e);
+5 −0
Original line number Diff line number Diff line
@@ -7,8 +7,12 @@ import java.util.Map;
import com.ai.cloud.skywalking.analysis.chainbuild.po.ChainInfo;
import com.ai.cloud.skywalking.analysis.chainbuild.po.ChainNode;
import com.ai.cloud.skywalking.analysis.chainbuild.util.TokenGenerator;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;

public class CallChainTree {
    private Logger logger = LogManager.getLogger(CallChainTree.class);

    private String callEntrance;

    private String treeToken;
@@ -23,6 +27,7 @@ public class CallChainTree {
        nodes = new HashMap<String, CallChainTreeNode>();
        this.callEntrance = callEntrance;
        this.treeToken = TokenGenerator.generateTreeToken(callEntrance);
        logger.info("CallEntrance:[{}] == TreeToken[{}]",callEntrance, treeToken);
    }

    public static CallChainTree load(String callEntrance) throws IOException {
+4 −0
Original line number Diff line number Diff line
@@ -6,6 +6,8 @@ import com.ai.cloud.skywalking.analysis.config.HBaseTableMetaData;
import com.google.gson.GsonBuilder;
import com.google.gson.annotations.Expose;
import org.apache.hadoop.hbase.client.Put;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;

import java.io.IOException;
import java.util.*;
@@ -17,6 +19,8 @@ import java.util.*;
 * @author wusheng
 */
public class CallChainTreeNode {
    private Logger logger = LogManager.getLogger(CallChainTreeNode.class);

    @Expose
    private String traceLevelId;
    @Expose
Loading