提交 d8f84f2a 编写于 作者: wu-sheng's avatar wu-sheng

移除analysis任务中,关于计数的处理。同时移除redis的相关依赖。

上级 4b2fb51f
......@@ -38,12 +38,6 @@
<artifactId>skywalking-protocol</artifactId>
<version>1.0-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.12</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
......@@ -59,10 +53,12 @@
<artifactId>log4j-core</artifactId>
<version>2.2</version>
</dependency>
<dependency>
<groupId>redis.clients</groupId>
<artifactId>jedis</artifactId>
<version>2.8.1</version>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.12</version>
<scope>test</scope>
</dependency>
</dependencies>
......
package com.ai.cloud.skywalking.analysis.chainbuild;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.Text;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import com.ai.cloud.skywalking.analysis.chainbuild.exception.Tid2CidECovertException;
import com.ai.cloud.skywalking.analysis.chainbuild.filter.SpanNodeProcessChain;
import com.ai.cloud.skywalking.analysis.chainbuild.filter.SpanNodeProcessFilter;
import com.ai.cloud.skywalking.analysis.chainbuild.po.ChainInfo;
import com.ai.cloud.skywalking.analysis.chainbuild.po.ChainNode;
import com.ai.cloud.skywalking.analysis.chainbuild.po.SummaryType;
import com.ai.cloud.skywalking.analysis.chainbuild.util.*;
import com.ai.cloud.skywalking.analysis.config.Config;
import com.ai.cloud.skywalking.analysis.chainbuild.util.HBaseUtil;
import com.ai.cloud.skywalking.analysis.chainbuild.util.SubLevelSpanCostCounter;
import com.ai.cloud.skywalking.analysis.chainbuild.util.TokenGenerator;
import com.ai.cloud.skywalking.analysis.chainbuild.util.VersionIdentifier;
import com.ai.cloud.skywalking.analysis.config.ConfigInitializer;
import com.ai.cloud.skywalking.protocol.Span;
import com.ai.cloud.skywalking.util.SpanLevelIdComparators;
import com.google.gson.Gson;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.Text;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.*;
public class ChainBuildMapper extends TableMapper<Text, Text> {
......@@ -46,7 +54,6 @@ public class ChainBuildMapper extends TableMapper<Text, Text> {
return;
}
RedisUtil.autoIncrement(Config.Redis.MAPPER_COUNT_KEY);
List<Span> spanList = new ArrayList<Span>();
ChainInfo chainInfo = null;
try {
......@@ -121,11 +128,9 @@ public class ChainBuildMapper extends TableMapper<Text, Text> {
+ ":" + chainInfo.getCallEntrance()),
new Text(new Gson().toJson(chainInfo)));
}
RedisUtil.autoIncrement(Config.Redis.SUCCESS_MAPPER_COUNT_KEY);
} catch (Exception e) {
logger.error("Failed to mapper call chain[" + key.toString() + "]",
e);
RedisUtil.autoIncrement(Config.Redis.FAILED_MAPPER_COUNT_KEY);
}
}
......@@ -162,10 +167,4 @@ public class ChainBuildMapper extends TableMapper<Text, Text> {
}
return spanEntryMap;
}
private void clearData(){
RedisUtil.clearData(Config.Redis.MAPPER_COUNT_KEY);
RedisUtil.clearData(Config.Redis.FAILED_MAPPER_COUNT_KEY);
RedisUtil.clearData(Config.Redis.SUCCESS_MAPPER_COUNT_KEY);
}
}
package com.ai.cloud.skywalking.analysis.chainbuild;
import com.ai.cloud.skywalking.analysis.chainbuild.action.IStatisticsAction;
import com.ai.cloud.skywalking.analysis.chainbuild.po.SummaryType;
import com.ai.cloud.skywalking.analysis.config.Config;
import com.ai.cloud.skywalking.analysis.config.ConfigInitializer;
import org.apache.hadoop.hbase.util.Bytes;
import java.io.IOException;
import java.util.Iterator;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.io.IOException;
import java.util.Iterator;
import com.ai.cloud.skywalking.analysis.chainbuild.action.IStatisticsAction;
import com.ai.cloud.skywalking.analysis.chainbuild.po.SummaryType;
import com.ai.cloud.skywalking.analysis.config.Config;
import com.ai.cloud.skywalking.analysis.config.ConfigInitializer;
public class ChainBuildReducer extends Reducer<Text, Text, Text, IntWritable> {
private Logger logger = LogManager.getLogger(ChainBuildReducer.class);
......
package com.ai.cloud.skywalking.analysis.chainbuild;
import com.ai.cloud.skywalking.analysis.chainbuild.entity.CallChainDetailForMysql;
import com.ai.cloud.skywalking.analysis.chainbuild.po.ChainNode;
import com.ai.cloud.skywalking.analysis.config.Config;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Timestamp;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.*;
import java.util.Map;
import com.ai.cloud.skywalking.analysis.chainbuild.entity.CallChainDetailForMysql;
import com.ai.cloud.skywalking.analysis.chainbuild.po.ChainNode;
import com.ai.cloud.skywalking.analysis.config.Config;
public class DBCallChainInfoDao {
private static Logger logger = LoggerFactory
......
package com.ai.cloud.skywalking.analysis.chainbuild.filter.impl;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import com.ai.cloud.skywalking.analysis.chainbuild.SpanEntry;
import com.ai.cloud.skywalking.analysis.chainbuild.filter.SpanNodeProcessFilter;
import com.ai.cloud.skywalking.analysis.chainbuild.po.ChainNode;
import com.ai.cloud.skywalking.analysis.chainbuild.util.SubLevelSpanCostCounter;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
public class JDBCBusinessKeyHandleFilter extends SpanNodeProcessFilter {
private Logger logger = LogManager.getLogger(JDBCBusinessKeyHandleFilter.class);
......
package com.ai.cloud.skywalking.analysis.chainbuild.util;
import com.ai.cloud.skywalking.analysis.config.Config;
import org.apache.commons.pool2.impl.GenericObjectPoolConfig;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import redis.clients.jedis.Jedis;
import redis.clients.jedis.JedisPool;
/**
* Created by xin on 16-5-13.
*/
public class RedisUtil {
private static Logger logger = LogManager.getLogger(RedisUtil.class);
private static JedisPool jedisPool;
private static boolean turn_on = true;
static {
try {
GenericObjectPoolConfig genericObjectPoolConfig = new GenericObjectPoolConfig();
jedisPool = new JedisPool(genericObjectPoolConfig, Config.Redis.HOST, Config.Redis.PORT);
} catch (Exception e) {
logger.error("Failed to create jedis pool", e);
turn_on = false;
}
}
public static void autoIncrement(String key) {
if (!turn_on) {
return;
}
Jedis jedis = null;
try {
jedis = jedisPool.getResource();
jedis.incrBy(key, 1);
} catch (Exception e) {
logger.error("Failed to auto increment .", e);
} finally {
if (jedis != null) {
jedis.close();
}
}
}
public static void clearData(String key) {
if (!turn_on) {
return;
}
Jedis jedis = null;
try {
jedis = jedisPool.getResource();
jedis.setnx(key, "0");
} catch (Exception e) {
logger.error("Failed to auto increment .", e);
} finally {
if (jedis != null) {
jedis.close();
}
}
}
}
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册