From 4941ef38b8797f184c197c01759423d9a77e05bd Mon Sep 17 00:00:00 2001 From: JThink Date: Thu, 27 Jul 2017 10:57:30 +0800 Subject: [PATCH 01/27] upgrade the version from 0.0.1 to 1.0.0 --- skyeye-alarm/build.gradle | 6 +++--- skyeye-base/build.gradle | 2 +- skyeye-benchmark/dubbo-service/build.gradle | 6 +++--- skyeye-benchmark/log-generater/build.gradle | 6 +++--- skyeye-client/build.gradle | 6 +++--- skyeye-collector/build.gradle | 6 +++--- skyeye-data/build.gradle | 4 ++-- skyeye-monitor/build.gradle | 6 +++--- skyeye-trace/build.gradle | 6 +++--- skyeye-web/build.gradle | 6 +++--- 10 files changed, 27 insertions(+), 27 deletions(-) diff --git a/skyeye-alarm/build.gradle b/skyeye-alarm/build.gradle index 6930ac9..b40ef6d 100644 --- a/skyeye-alarm/build.gradle +++ b/skyeye-alarm/build.gradle @@ -6,7 +6,7 @@ apply plugin: 'application' group = 'skyeye' applicationName = 'skyeye-alarm' -version = '0.0.1' +version = '1.0.0' sourceCompatibility = 1.7 targetCompatibility = 1.7 @@ -22,8 +22,8 @@ repositories { ext { logbackVersion = '1.1.6' - baseVersion = '0.0.1' - dataVersion = '0.0.1' + baseVersion = '1.0.0' + dataVersion = '1.0.0' } dependencies { diff --git a/skyeye-base/build.gradle b/skyeye-base/build.gradle index 96b7633..06ecb08 100644 --- a/skyeye-base/build.gradle +++ b/skyeye-base/build.gradle @@ -3,7 +3,7 @@ apply plugin: 'maven' apply plugin: 'eclipse' group = 'skyeye' -version = '0.0.1' +version = '1.0.0' sourceCompatibility = '1.7' targetCompatibility = '1.7' diff --git a/skyeye-benchmark/dubbo-service/build.gradle b/skyeye-benchmark/dubbo-service/build.gradle index f84a097..e5e22d3 100644 --- a/skyeye-benchmark/dubbo-service/build.gradle +++ b/skyeye-benchmark/dubbo-service/build.gradle @@ -3,7 +3,7 @@ allprojects { apply plugin: 'eclipse' group = 'skyeye' - version = '0.0.1' + version = '1.0.0' sourceCompatibility = 1.7 targetCompatibility = 1.7 compileJava.options.encoding = 'UTF-8' @@ -38,8 +38,8 @@ subprojects { slf4jVersion = '1.7.21' logbackVersion = '1.1.6' dubboVersion = '2.8.4-skyeye-trace' - dataVersion = '0.0.1' - clientVersion = '0.0.1' + dataVersion = '1.0.0' + clientVersion = '1.0.0' zookeeperVerison = '3.4.6' zkClientVersion = '0.9.1-up' } diff --git a/skyeye-benchmark/log-generater/build.gradle b/skyeye-benchmark/log-generater/build.gradle index bab18b0..d75d8ff 100644 --- a/skyeye-benchmark/log-generater/build.gradle +++ b/skyeye-benchmark/log-generater/build.gradle @@ -6,7 +6,7 @@ apply plugin: 'application' group = 'skyeye' applicationName = 'log-generater' -version = '0.0.1' +version = '1.0.0' sourceCompatibility = 1.7 targetCompatibility = 1.7 @@ -22,8 +22,8 @@ repositories { ext { logbackVersion = '1.1.6' - clientVersion = '0.0.1' - dataVersion = '0.0.1' + clientVersion = '1.0.0' + dataVersion = '1.0.0' } dependencies { diff --git a/skyeye-client/build.gradle b/skyeye-client/build.gradle index 15b1c2e..7b31063 100644 --- a/skyeye-client/build.gradle +++ b/skyeye-client/build.gradle @@ -3,7 +3,7 @@ apply plugin: 'maven' apply plugin: 'eclipse' group = 'skyeye' -version = '0.0.1' +version = '1.0.0' sourceCompatibility = '1.7' targetCompatibility = '1.7' @@ -19,8 +19,8 @@ ext { log4jVersion = '1.2.17' zookeeperVersion = '3.4.6' zkclientVersion = '0.9.1-up' - baseVersion = '0.0.1' - traceVersion = '0.0.1' + baseVersion = '1.0.0' + traceVersion = '1.0.0' } repositories { diff --git a/skyeye-collector/build.gradle b/skyeye-collector/build.gradle index a39aee7..5407c16 100644 --- a/skyeye-collector/build.gradle +++ b/skyeye-collector/build.gradle @@ -6,7 +6,7 @@ apply plugin: 'application' group = 'skyeye' applicationName = 'skyeye-collector' -version = '0.0.1' +version = '1.0.0' sourceCompatibility = 1.7 targetCompatibility = 1.7 @@ -25,8 +25,8 @@ ext { logbackVersion = '1.1.6' kafkaVersion = '0.10.0.1' esVersion = '2.3.3' - traceVersion = '0.0.1' - dataVersion = '0.0.1' + traceVersion = '1.0.0' + dataVersion = '1.0.0' zkclientVersion = '0.9.1-up' hadoopVersion = '2.6.0-cdh5.4.0' fastJsonVersion = '1.2.28' diff --git a/skyeye-data/build.gradle b/skyeye-data/build.gradle index 813afb7..07ac39e 100644 --- a/skyeye-data/build.gradle +++ b/skyeye-data/build.gradle @@ -4,7 +4,7 @@ allprojects { apply plugin: 'eclipse' group = 'skyeye' - version = '0.0.1' + version = '1.0.0' sourceCompatibility = 1.7 targetCompatibility = 1.7 @@ -55,7 +55,7 @@ subprojects { ext { slf4jVersion = '1.7.21' - baseVersion = '0.0.1' + baseVersion = '1.0.0' } [compileJava, compileTestJava]*.options*.encoding = 'UTF-8' diff --git a/skyeye-monitor/build.gradle b/skyeye-monitor/build.gradle index d2aaf68..9c14a22 100644 --- a/skyeye-monitor/build.gradle +++ b/skyeye-monitor/build.gradle @@ -6,7 +6,7 @@ apply plugin: 'application' group = 'skyeye' applicationName = 'skyeye-monitor' -version = '0.0.1' +version = '1.0.0' sourceCompatibility = 1.7 targetCompatibility = 1.7 @@ -24,8 +24,8 @@ ext { logbackVersion = '1.1.6' zookeeperVersion = '3.4.6' curatorVersion = '2.11.0' - baseVersion = '0.0.1' - dataVersion = '0.0.1' + baseVersion = '1.0.0' + dataVersion = '1.0.0' jacksonVersion = '1.9.13' zkclientVersion = '0.9.1-up' } diff --git a/skyeye-trace/build.gradle b/skyeye-trace/build.gradle index 442dd45..45f8d18 100644 --- a/skyeye-trace/build.gradle +++ b/skyeye-trace/build.gradle @@ -3,7 +3,7 @@ apply plugin: 'maven' apply plugin: 'eclipse' group = 'skyeye' -version = '0.0.1' +version = '1.0.0' sourceCompatibility = '1.7' targetCompatibility = '1.7' @@ -14,8 +14,8 @@ ext { mavenPublicUrl = 'http://192.168.88.8:8081/nexus/content/repositories/public' mavenReleaseUrl = 'http://192.168.88.8:8081/nexus/content/repositories/releases' mavenSnapshotUrl = "http://192.168.88.8:8081/nexus/content/repositories/snapshots" - baseVersion = '0.0.1' - dataVersion = '0.0.1' + baseVersion = '1.0.0' + dataVersion = '1.0.0' slf4jVersion = '1.7.21' fastJsonVersion = '1.2.28' zookeeperVersion = '3.4.6' diff --git a/skyeye-web/build.gradle b/skyeye-web/build.gradle index 3117a1a..a634e98 100644 --- a/skyeye-web/build.gradle +++ b/skyeye-web/build.gradle @@ -6,7 +6,7 @@ apply plugin: 'application' group = 'skyeye' applicationName = 'skyeye-web' -version = '0.0.1' +version = '1.0.0' sourceCompatibility = 1.7 targetCompatibility = 1.7 @@ -25,8 +25,8 @@ repositories { ext { logbackVersion = '1.1.6' - baseVersion = '0.0.1' - dataVersion = '0.0.1' + baseVersion = '1.0.0' + dataVersion = '1.0.0' jacksonVersion = '1.9.13' httpclientVersion = '4.5.2' fastjsonVersion = '1.2.28' From 202ae58c14aebefc887ffebfc08dd71c0da80134 Mon Sep 17 00:00:00 2001 From: JThink Date: Thu, 27 Jul 2017 14:08:44 +0800 Subject: [PATCH 02/27] upgrade spring-boot version from 1.3.6 to 1.5.4 --- skyeye-alarm/build.gradle | 2 +- skyeye-benchmark/dubbo-service/dubbo-service-a/build.gradle | 2 +- skyeye-benchmark/dubbo-service/dubbo-service-b/build.gradle | 2 +- skyeye-benchmark/dubbo-service/dubbo-service-c/build.gradle | 2 +- skyeye-benchmark/dubbo-service/dubbo-service-d/build.gradle | 2 +- skyeye-benchmark/dubbo-service/dubbo-service-e/build.gradle | 2 +- skyeye-benchmark/log-generater/build.gradle | 2 +- skyeye-collector/build.gradle | 2 +- skyeye-data/skyeye-data-dubbox/build.gradle | 2 +- skyeye-data/skyeye-data-hbase/build.gradle | 2 +- skyeye-data/skyeye-data-jpa/build.gradle | 2 +- skyeye-data/skyeye-data-rabbitmq/build.gradle | 2 +- skyeye-monitor/build.gradle | 2 +- skyeye-web/build.gradle | 2 +- 14 files changed, 14 insertions(+), 14 deletions(-) diff --git a/skyeye-alarm/build.gradle b/skyeye-alarm/build.gradle index b40ef6d..588c8d5 100644 --- a/skyeye-alarm/build.gradle +++ b/skyeye-alarm/build.gradle @@ -53,7 +53,7 @@ mainClassName = 'com.jthink.skyeye.alarm.launcher.Launcher' buildscript { ext { - springBootVersion = '1.3.6.RELEASE' + springBootVersion = '1.5.4.RELEASE' } repositories { diff --git a/skyeye-benchmark/dubbo-service/dubbo-service-a/build.gradle b/skyeye-benchmark/dubbo-service/dubbo-service-a/build.gradle index 2295385..e213803 100644 --- a/skyeye-benchmark/dubbo-service/dubbo-service-a/build.gradle +++ b/skyeye-benchmark/dubbo-service/dubbo-service-a/build.gradle @@ -28,7 +28,7 @@ mainClassName = "com.jthink.skyeye.benchmark.dubbo.service.a.application.Applica buildscript { ext { - springBootVersion = '1.3.6.RELEASE' + springBootVersion = '1.5.4.RELEASE' } repositories { diff --git a/skyeye-benchmark/dubbo-service/dubbo-service-b/build.gradle b/skyeye-benchmark/dubbo-service/dubbo-service-b/build.gradle index 09f6946..66cc084 100644 --- a/skyeye-benchmark/dubbo-service/dubbo-service-b/build.gradle +++ b/skyeye-benchmark/dubbo-service/dubbo-service-b/build.gradle @@ -28,7 +28,7 @@ mainClassName = "com.jthink.skyeye.benchmark.dubbo.service.a.application.Applica buildscript { ext { - springBootVersion = '1.3.6.RELEASE' + springBootVersion = '1.5.4.RELEASE' } repositories { diff --git a/skyeye-benchmark/dubbo-service/dubbo-service-c/build.gradle b/skyeye-benchmark/dubbo-service/dubbo-service-c/build.gradle index 09f6946..66cc084 100644 --- a/skyeye-benchmark/dubbo-service/dubbo-service-c/build.gradle +++ b/skyeye-benchmark/dubbo-service/dubbo-service-c/build.gradle @@ -28,7 +28,7 @@ mainClassName = "com.jthink.skyeye.benchmark.dubbo.service.a.application.Applica buildscript { ext { - springBootVersion = '1.3.6.RELEASE' + springBootVersion = '1.5.4.RELEASE' } repositories { diff --git a/skyeye-benchmark/dubbo-service/dubbo-service-d/build.gradle b/skyeye-benchmark/dubbo-service/dubbo-service-d/build.gradle index 09f6946..66cc084 100644 --- a/skyeye-benchmark/dubbo-service/dubbo-service-d/build.gradle +++ b/skyeye-benchmark/dubbo-service/dubbo-service-d/build.gradle @@ -28,7 +28,7 @@ mainClassName = "com.jthink.skyeye.benchmark.dubbo.service.a.application.Applica buildscript { ext { - springBootVersion = '1.3.6.RELEASE' + springBootVersion = '1.5.4.RELEASE' } repositories { diff --git a/skyeye-benchmark/dubbo-service/dubbo-service-e/build.gradle b/skyeye-benchmark/dubbo-service/dubbo-service-e/build.gradle index 09f6946..66cc084 100644 --- a/skyeye-benchmark/dubbo-service/dubbo-service-e/build.gradle +++ b/skyeye-benchmark/dubbo-service/dubbo-service-e/build.gradle @@ -28,7 +28,7 @@ mainClassName = "com.jthink.skyeye.benchmark.dubbo.service.a.application.Applica buildscript { ext { - springBootVersion = '1.3.6.RELEASE' + springBootVersion = '1.5.4.RELEASE' } repositories { diff --git a/skyeye-benchmark/log-generater/build.gradle b/skyeye-benchmark/log-generater/build.gradle index d75d8ff..424f634 100644 --- a/skyeye-benchmark/log-generater/build.gradle +++ b/skyeye-benchmark/log-generater/build.gradle @@ -51,7 +51,7 @@ mainClassName = 'com.unionpaysmart.alch.launcher.Launcher' buildscript { ext { - springBootVersion = '1.3.6.RELEASE' + springBootVersion = '1.5.4.RELEASE' } repositories { diff --git a/skyeye-collector/build.gradle b/skyeye-collector/build.gradle index 5407c16..766e4d0 100644 --- a/skyeye-collector/build.gradle +++ b/skyeye-collector/build.gradle @@ -82,7 +82,7 @@ mainClassName = 'com.jthink.skyeye.collector.launcher.Launcher' buildscript { ext { - springBootVersion = '1.3.6.RELEASE' + springBootVersion = '1.5.4.RELEASE' } repositories { diff --git a/skyeye-data/skyeye-data-dubbox/build.gradle b/skyeye-data/skyeye-data-dubbox/build.gradle index 8ed9452..c7abdb3 100644 --- a/skyeye-data/skyeye-data-dubbox/build.gradle +++ b/skyeye-data/skyeye-data-dubbox/build.gradle @@ -8,7 +8,7 @@ compileJava.options.encoding = 'UTF-8' buildDir = 'target' ext { - springBootVersion = '1.3.6.RELEASE' + springBootVersion = '1.5.4.RELEASE' springVersion = '4.2.7.RELEASE' dubboVersion = '2.8.4-skyeye-trace' slf4jVersion = '1.7.21' diff --git a/skyeye-data/skyeye-data-hbase/build.gradle b/skyeye-data/skyeye-data-hbase/build.gradle index cd809ea..5abf3d2 100644 --- a/skyeye-data/skyeye-data-hbase/build.gradle +++ b/skyeye-data/skyeye-data-hbase/build.gradle @@ -17,7 +17,7 @@ repositories { } ext { - springBootVersion = '1.3.6.RELEASE' + springBootVersion = '1.5.4.RELEASE' springVersion = '4.2.7.RELEASE' hbaseVersion = '1.0.0-cdh5.4.0' } diff --git a/skyeye-data/skyeye-data-jpa/build.gradle b/skyeye-data/skyeye-data-jpa/build.gradle index c40a209..9e6891e 100644 --- a/skyeye-data/skyeye-data-jpa/build.gradle +++ b/skyeye-data/skyeye-data-jpa/build.gradle @@ -8,7 +8,7 @@ compileJava.options.encoding = 'UTF-8' buildDir = 'target' ext { - springBootVersion = '1.3.6.RELEASE' + springBootVersion = '1.5.4.RELEASE' } dependencies { diff --git a/skyeye-data/skyeye-data-rabbitmq/build.gradle b/skyeye-data/skyeye-data-rabbitmq/build.gradle index e44cf50..fec355b 100644 --- a/skyeye-data/skyeye-data-rabbitmq/build.gradle +++ b/skyeye-data/skyeye-data-rabbitmq/build.gradle @@ -8,7 +8,7 @@ compileJava.options.encoding = 'UTF-8' buildDir = 'target' ext { - springBootVersion = '1.3.6.RELEASE' + springBootVersion = '1.5.4.RELEASE' jacksonVersion = '1.9.13' } diff --git a/skyeye-monitor/build.gradle b/skyeye-monitor/build.gradle index 9c14a22..72bcc88 100644 --- a/skyeye-monitor/build.gradle +++ b/skyeye-monitor/build.gradle @@ -68,7 +68,7 @@ mainClassName = 'com.jthink.skyeye.monitor.launcher.Launcher' buildscript { ext { - springBootVersion = '1.3.6.RELEASE' + springBootVersion = '1.5.4.RELEASE' } repositories { diff --git a/skyeye-web/build.gradle b/skyeye-web/build.gradle index a634e98..f0a8f4c 100644 --- a/skyeye-web/build.gradle +++ b/skyeye-web/build.gradle @@ -76,7 +76,7 @@ mainClassName = 'com.jthink.skyeye.web.application.Application' buildscript { ext { - springBootVersion = '1.3.6.RELEASE' + springBootVersion = '1.5.4.RELEASE' } repositories { From aedb2a0622b680470f252258179e75933c83a27c Mon Sep 17 00:00:00 2001 From: JThink Date: Thu, 27 Jul 2017 14:10:56 +0800 Subject: [PATCH 03/27] upgrade the jdk from 1.7 to 1.8 --- skyeye-alarm/build.gradle | 4 ++-- skyeye-benchmark/dubbo-service/build.gradle | 4 ++-- skyeye-benchmark/log-generater/build.gradle | 4 ++-- skyeye-collector/build.gradle | 4 ++-- skyeye-data/build.gradle | 4 ++-- skyeye-data/skyeye-data-dubbox/build.gradle | 4 ++-- skyeye-data/skyeye-data-hbase/build.gradle | 4 ++-- skyeye-data/skyeye-data-http/build.gradle | 4 ++-- skyeye-data/skyeye-data-jpa/build.gradle | 4 ++-- skyeye-data/skyeye-data-rabbitmq/build.gradle | 4 ++-- skyeye-monitor/build.gradle | 4 ++-- skyeye-web/build.gradle | 4 ++-- 12 files changed, 24 insertions(+), 24 deletions(-) diff --git a/skyeye-alarm/build.gradle b/skyeye-alarm/build.gradle index 588c8d5..a988916 100644 --- a/skyeye-alarm/build.gradle +++ b/skyeye-alarm/build.gradle @@ -8,8 +8,8 @@ group = 'skyeye' applicationName = 'skyeye-alarm' version = '1.0.0' -sourceCompatibility = 1.7 -targetCompatibility = 1.7 +sourceCompatibility = 1.8 +targetCompatibility = 1.8 compileJava.options.encoding = 'UTF-8' buildDir = 'target' diff --git a/skyeye-benchmark/dubbo-service/build.gradle b/skyeye-benchmark/dubbo-service/build.gradle index e5e22d3..08e2217 100644 --- a/skyeye-benchmark/dubbo-service/build.gradle +++ b/skyeye-benchmark/dubbo-service/build.gradle @@ -4,8 +4,8 @@ allprojects { group = 'skyeye' version = '1.0.0' - sourceCompatibility = 1.7 - targetCompatibility = 1.7 + sourceCompatibility = 1.8 + targetCompatibility = 1.8 compileJava.options.encoding = 'UTF-8' buildDir = 'target' diff --git a/skyeye-benchmark/log-generater/build.gradle b/skyeye-benchmark/log-generater/build.gradle index 424f634..621b378 100644 --- a/skyeye-benchmark/log-generater/build.gradle +++ b/skyeye-benchmark/log-generater/build.gradle @@ -8,8 +8,8 @@ group = 'skyeye' applicationName = 'log-generater' version = '1.0.0' -sourceCompatibility = 1.7 -targetCompatibility = 1.7 +sourceCompatibility = 1.8 +targetCompatibility = 1.8 compileJava.options.encoding = 'UTF-8' buildDir = 'target' diff --git a/skyeye-collector/build.gradle b/skyeye-collector/build.gradle index 766e4d0..00c6485 100644 --- a/skyeye-collector/build.gradle +++ b/skyeye-collector/build.gradle @@ -8,8 +8,8 @@ group = 'skyeye' applicationName = 'skyeye-collector' version = '1.0.0' -sourceCompatibility = 1.7 -targetCompatibility = 1.7 +sourceCompatibility = 1.8 +targetCompatibility = 1.8 compileJava.options.encoding = 'UTF-8' buildDir = 'target' diff --git a/skyeye-data/build.gradle b/skyeye-data/build.gradle index 07ac39e..1eff079 100644 --- a/skyeye-data/build.gradle +++ b/skyeye-data/build.gradle @@ -6,8 +6,8 @@ allprojects { group = 'skyeye' version = '1.0.0' - sourceCompatibility = 1.7 - targetCompatibility = 1.7 + sourceCompatibility = 1.8 + targetCompatibility = 1.8 compileJava.options.encoding = 'UTF-8' buildDir = 'target' diff --git a/skyeye-data/skyeye-data-dubbox/build.gradle b/skyeye-data/skyeye-data-dubbox/build.gradle index c7abdb3..c50c9d9 100644 --- a/skyeye-data/skyeye-data-dubbox/build.gradle +++ b/skyeye-data/skyeye-data-dubbox/build.gradle @@ -2,8 +2,8 @@ apply plugin: 'java' apply plugin: 'maven' apply plugin: 'eclipse' -sourceCompatibility = 1.7 -targetCompatibility = 1.7 +sourceCompatibility = 1.8 +targetCompatibility = 1.8 compileJava.options.encoding = 'UTF-8' buildDir = 'target' diff --git a/skyeye-data/skyeye-data-hbase/build.gradle b/skyeye-data/skyeye-data-hbase/build.gradle index 5abf3d2..e75ddbb 100644 --- a/skyeye-data/skyeye-data-hbase/build.gradle +++ b/skyeye-data/skyeye-data-hbase/build.gradle @@ -2,8 +2,8 @@ apply plugin: 'java' apply plugin: 'maven' apply plugin: 'eclipse' -sourceCompatibility = 1.7 -targetCompatibility = 1.7 +sourceCompatibility = 1.8 +targetCompatibility = 1.8 compileJava.options.encoding = 'UTF-8' buildDir = 'target' diff --git a/skyeye-data/skyeye-data-http/build.gradle b/skyeye-data/skyeye-data-http/build.gradle index 090e6e7..861488a 100644 --- a/skyeye-data/skyeye-data-http/build.gradle +++ b/skyeye-data/skyeye-data-http/build.gradle @@ -2,8 +2,8 @@ apply plugin: 'java' apply plugin: 'maven' apply plugin: 'eclipse' -sourceCompatibility = 1.7 -targetCompatibility = 1.7 +sourceCompatibility = 1.8 +targetCompatibility = 1.8 compileJava.options.encoding = 'UTF-8' buildDir = 'target' diff --git a/skyeye-data/skyeye-data-jpa/build.gradle b/skyeye-data/skyeye-data-jpa/build.gradle index 9e6891e..023a689 100644 --- a/skyeye-data/skyeye-data-jpa/build.gradle +++ b/skyeye-data/skyeye-data-jpa/build.gradle @@ -2,8 +2,8 @@ apply plugin: 'java' apply plugin: 'maven' apply plugin: 'eclipse' -sourceCompatibility = 1.7 -targetCompatibility = 1.7 +sourceCompatibility = 1.8 +targetCompatibility = 1.8 compileJava.options.encoding = 'UTF-8' buildDir = 'target' diff --git a/skyeye-data/skyeye-data-rabbitmq/build.gradle b/skyeye-data/skyeye-data-rabbitmq/build.gradle index fec355b..466f12f 100644 --- a/skyeye-data/skyeye-data-rabbitmq/build.gradle +++ b/skyeye-data/skyeye-data-rabbitmq/build.gradle @@ -2,8 +2,8 @@ apply plugin: 'java' apply plugin: 'maven' apply plugin: 'eclipse' -sourceCompatibility = 1.7 -targetCompatibility = 1.7 +sourceCompatibility = 1.8 +targetCompatibility = 1.8 compileJava.options.encoding = 'UTF-8' buildDir = 'target' diff --git a/skyeye-monitor/build.gradle b/skyeye-monitor/build.gradle index 72bcc88..1dcd80d 100644 --- a/skyeye-monitor/build.gradle +++ b/skyeye-monitor/build.gradle @@ -8,8 +8,8 @@ group = 'skyeye' applicationName = 'skyeye-monitor' version = '1.0.0' -sourceCompatibility = 1.7 -targetCompatibility = 1.7 +sourceCompatibility = 1.8 +targetCompatibility = 1.8 compileJava.options.encoding = 'UTF-8' buildDir = 'target' diff --git a/skyeye-web/build.gradle b/skyeye-web/build.gradle index f0a8f4c..6c09ac1 100644 --- a/skyeye-web/build.gradle +++ b/skyeye-web/build.gradle @@ -8,8 +8,8 @@ group = 'skyeye' applicationName = 'skyeye-web' version = '1.0.0' -sourceCompatibility = 1.7 -targetCompatibility = 1.7 +sourceCompatibility = 1.8 +targetCompatibility = 1.8 compileJava.options.encoding = 'UTF-8' buildDir = 'target' From 1cf63bcabad81e6fa7c031fed68f16b1970733be Mon Sep 17 00:00:00 2001 From: JThink Date: Thu, 27 Jul 2017 15:19:56 +0800 Subject: [PATCH 04/27] update jdk from 1.7 to 1.8 --- skyeye-alarm/build.gradle | 2 +- skyeye-base/build.gradle | 4 +- .../dubbo-service-a/build.gradle | 2 +- .../dubbo-service-b/build.gradle | 2 +- .../dubbo-service-c/build.gradle | 2 +- .../dubbo-service-d/build.gradle | 2 +- .../dubbo-service-e/build.gradle | 2 +- skyeye-benchmark/log-generater/build.gradle | 2 +- skyeye-client/build.gradle | 4 +- skyeye-collector/build.gradle | 4 +- .../jpa/configuration/JpaConfiguration.java | 2 +- skyeye-monitor/build.gradle | 2 +- skyeye-statistics/pom.xml | 10 +- .../context/annotation/PropertySource.java | 181 ------------------ .../configuration/SparkConfiguration.scala | 2 +- skyeye-trace/build.gradle | 4 +- skyeye-web/build.gradle | 2 +- 17 files changed, 24 insertions(+), 205 deletions(-) delete mode 100644 skyeye-statistics/src/main/java/org/springframework/context/annotation/PropertySource.java diff --git a/skyeye-alarm/build.gradle b/skyeye-alarm/build.gradle index a988916..8b8c226 100644 --- a/skyeye-alarm/build.gradle +++ b/skyeye-alarm/build.gradle @@ -1,7 +1,7 @@ apply plugin: 'java' apply plugin: 'eclipse' apply plugin: 'maven' -apply plugin: 'spring-boot' +apply plugin: 'org.springframework.boot' apply plugin: 'application' group = 'skyeye' diff --git a/skyeye-base/build.gradle b/skyeye-base/build.gradle index 06ecb08..f1cd62e 100644 --- a/skyeye-base/build.gradle +++ b/skyeye-base/build.gradle @@ -5,8 +5,8 @@ apply plugin: 'eclipse' group = 'skyeye' version = '1.0.0' -sourceCompatibility = '1.7' -targetCompatibility = '1.7' +sourceCompatibility = 1.8 +targetCompatibility = 1.8 compileJava.options.encoding = 'UTF-8' buildDir = 'target' diff --git a/skyeye-benchmark/dubbo-service/dubbo-service-a/build.gradle b/skyeye-benchmark/dubbo-service/dubbo-service-a/build.gradle index e213803..8ae84d2 100644 --- a/skyeye-benchmark/dubbo-service/dubbo-service-a/build.gradle +++ b/skyeye-benchmark/dubbo-service/dubbo-service-a/build.gradle @@ -1,7 +1,7 @@ apply plugin: 'java' apply plugin: 'eclipse' apply plugin: 'maven' -apply plugin: 'spring-boot' +apply plugin: 'org.springframework.boot' apply plugin: 'application' dependencies { diff --git a/skyeye-benchmark/dubbo-service/dubbo-service-b/build.gradle b/skyeye-benchmark/dubbo-service/dubbo-service-b/build.gradle index 66cc084..0877f1a 100644 --- a/skyeye-benchmark/dubbo-service/dubbo-service-b/build.gradle +++ b/skyeye-benchmark/dubbo-service/dubbo-service-b/build.gradle @@ -1,7 +1,7 @@ apply plugin: 'java' apply plugin: 'eclipse' apply plugin: 'maven' -apply plugin: 'spring-boot' +apply plugin: 'org.springframework.boot' apply plugin: 'application' dependencies { diff --git a/skyeye-benchmark/dubbo-service/dubbo-service-c/build.gradle b/skyeye-benchmark/dubbo-service/dubbo-service-c/build.gradle index 66cc084..0877f1a 100644 --- a/skyeye-benchmark/dubbo-service/dubbo-service-c/build.gradle +++ b/skyeye-benchmark/dubbo-service/dubbo-service-c/build.gradle @@ -1,7 +1,7 @@ apply plugin: 'java' apply plugin: 'eclipse' apply plugin: 'maven' -apply plugin: 'spring-boot' +apply plugin: 'org.springframework.boot' apply plugin: 'application' dependencies { diff --git a/skyeye-benchmark/dubbo-service/dubbo-service-d/build.gradle b/skyeye-benchmark/dubbo-service/dubbo-service-d/build.gradle index 66cc084..0877f1a 100644 --- a/skyeye-benchmark/dubbo-service/dubbo-service-d/build.gradle +++ b/skyeye-benchmark/dubbo-service/dubbo-service-d/build.gradle @@ -1,7 +1,7 @@ apply plugin: 'java' apply plugin: 'eclipse' apply plugin: 'maven' -apply plugin: 'spring-boot' +apply plugin: 'org.springframework.boot' apply plugin: 'application' dependencies { diff --git a/skyeye-benchmark/dubbo-service/dubbo-service-e/build.gradle b/skyeye-benchmark/dubbo-service/dubbo-service-e/build.gradle index 66cc084..0877f1a 100644 --- a/skyeye-benchmark/dubbo-service/dubbo-service-e/build.gradle +++ b/skyeye-benchmark/dubbo-service/dubbo-service-e/build.gradle @@ -1,7 +1,7 @@ apply plugin: 'java' apply plugin: 'eclipse' apply plugin: 'maven' -apply plugin: 'spring-boot' +apply plugin: 'org.springframework.boot' apply plugin: 'application' dependencies { diff --git a/skyeye-benchmark/log-generater/build.gradle b/skyeye-benchmark/log-generater/build.gradle index 621b378..5eec60c 100644 --- a/skyeye-benchmark/log-generater/build.gradle +++ b/skyeye-benchmark/log-generater/build.gradle @@ -1,7 +1,7 @@ apply plugin: 'java' apply plugin: 'eclipse' apply plugin: 'maven' -apply plugin: 'spring-boot' +apply plugin: 'org.springframework.boot' apply plugin: 'application' group = 'skyeye' diff --git a/skyeye-client/build.gradle b/skyeye-client/build.gradle index 7b31063..5b0d3ac 100644 --- a/skyeye-client/build.gradle +++ b/skyeye-client/build.gradle @@ -5,8 +5,8 @@ apply plugin: 'eclipse' group = 'skyeye' version = '1.0.0' -sourceCompatibility = '1.7' -targetCompatibility = '1.7' +sourceCompatibility = 1.8 +targetCompatibility = 1.8 compileJava.options.encoding = 'UTF-8' buildDir = 'target' diff --git a/skyeye-collector/build.gradle b/skyeye-collector/build.gradle index 00c6485..c78d2b6 100644 --- a/skyeye-collector/build.gradle +++ b/skyeye-collector/build.gradle @@ -1,7 +1,7 @@ apply plugin: 'java' apply plugin: 'eclipse' apply plugin: 'maven' -apply plugin: 'spring-boot' +apply plugin: 'org.springframework.boot' apply plugin: 'application' group = 'skyeye' @@ -48,7 +48,7 @@ dependencies { compile "org.elasticsearch:elasticsearch:$esVersion" - compile "org.springframework.boot:spring-boot-starter-redis" + compile "org.springframework.boot:spring-boot-starter-data-redis" compile "com.101tec:zkclient:$zkclientVersion" diff --git a/skyeye-data/skyeye-data-jpa/src/main/java/com/jthink/skyeye/data/jpa/configuration/JpaConfiguration.java b/skyeye-data/skyeye-data-jpa/src/main/java/com/jthink/skyeye/data/jpa/configuration/JpaConfiguration.java index a932fba..7fc582c 100644 --- a/skyeye-data/skyeye-data-jpa/src/main/java/com/jthink/skyeye/data/jpa/configuration/JpaConfiguration.java +++ b/skyeye-data/skyeye-data-jpa/src/main/java/com/jthink/skyeye/data/jpa/configuration/JpaConfiguration.java @@ -1,6 +1,6 @@ package com.jthink.skyeye.data.jpa.configuration; -import org.springframework.boot.orm.jpa.EntityScan; +import org.springframework.boot.autoconfigure.domain.EntityScan; import org.springframework.context.annotation.Configuration; import org.springframework.data.jpa.repository.config.EnableJpaRepositories; import org.springframework.transaction.annotation.EnableTransactionManagement; diff --git a/skyeye-monitor/build.gradle b/skyeye-monitor/build.gradle index 1dcd80d..7cb6f67 100644 --- a/skyeye-monitor/build.gradle +++ b/skyeye-monitor/build.gradle @@ -1,7 +1,7 @@ apply plugin: 'java' apply plugin: 'eclipse' apply plugin: 'maven' -apply plugin: 'spring-boot' +apply plugin: 'org.springframework.boot' apply plugin: 'application' group = 'skyeye' diff --git a/skyeye-statistics/pom.xml b/skyeye-statistics/pom.xml index f1bf94e..93da6fe 100644 --- a/skyeye-statistics/pom.xml +++ b/skyeye-statistics/pom.xml @@ -5,24 +5,24 @@ skyeye skyeye-statistics - 0.0.1 + 1.0.0 jar http://maven.apache.org UTF-8 - 1.7 + 1.8 2.10.4 2.10 1.3.0-cdh5.4.0 - 0.0.1 - 1.2.28 + 1.0.0 + 1.2.35 org.springframework.boot spring-boot-starter-parent - 1.3.6.RELEASE + 1.5.4.RELEASE diff --git a/skyeye-statistics/src/main/java/org/springframework/context/annotation/PropertySource.java b/skyeye-statistics/src/main/java/org/springframework/context/annotation/PropertySource.java deleted file mode 100644 index b9da310..0000000 --- a/skyeye-statistics/src/main/java/org/springframework/context/annotation/PropertySource.java +++ /dev/null @@ -1,181 +0,0 @@ -/* - * Copyright 2002-2015 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.context.annotation; - -import java.lang.annotation.*; - -/** - * Annotation providing a convenient and declarative mechanism for adding a - * {@link org.springframework.core.env.PropertySource PropertySource} to Spring's - * {@link org.springframework.core.env.Environment Environment}. To be used in - * conjunction with @{@link Configuration} classes. - * - *

Example usage

- * - *

Given a file {@code app.properties} containing the key/value pair - * {@code testbean.name=myTestBean}, the following {@code @Configuration} class - * uses {@code @PropertySource} to contribute {@code app.properties} to the - * {@code Environment}'s set of {@code PropertySources}. - * - *

- * @Configuration
- * @PropertySource("classpath:/com/myco/app.properties")
- * public class AppConfig {
- *     @Autowired
- *     Environment env;
- *
- *     @Bean
- *     public TestBean testBean() {
- *         TestBean testBean = new TestBean();
- *         testBean.setName(env.getProperty("testbean.name"));
- *         return testBean;
- *     }
- * }
- * - * Notice that the {@code Environment} object is @{@link - * org.springframework.beans.factory.annotation.Autowired Autowired} into the - * configuration class and then used when populating the {@code TestBean} object. Given - * the configuration above, a call to {@code testBean.getName()} will return "myTestBean". - * - *

Resolving ${...} placeholders in {@code } and {@code @Value} annotations

- * - * In order to resolve ${...} placeholders in {@code } definitions or {@code @Value} - * annotations using properties from a {@code PropertySource}, one must register - * a {@code PropertySourcesPlaceholderConfigurer}. This happens automatically when using - * {@code } in XML, but must be explicitly registered using - * a {@code static} {@code @Bean} method when using {@code @Configuration} classes. See - * the "Working with externalized values" section of @{@link Configuration}'s javadoc and - * "a note on BeanFactoryPostProcessor-returning @Bean methods" of @{@link Bean}'s javadoc - * for details and examples. - * - *

Resolving ${...} placeholders within {@code @PropertySource} resource locations

- * - * Any ${...} placeholders present in a {@code @PropertySource} {@linkplain #value() - * resource location} will be resolved against the set of property sources already - * registered against the environment. For example: - * - *
- * @Configuration
- * @PropertySource("classpath:/com/${my.placeholder:default/path}/app.properties")
- * public class AppConfig {
- *     @Autowired
- *     Environment env;
- *
- *     @Bean
- *     public TestBean testBean() {
- *         TestBean testBean = new TestBean();
- *         testBean.setName(env.getProperty("testbean.name"));
- *         return testBean;
- *     }
- * }
- * - * Assuming that "my.placeholder" is present in one of the property sources already - * registered, e.g. system properties or environment variables, the placeholder will - * be resolved to the corresponding value. If not, then "default/path" will be used as a - * default. Expressing a default value (delimited by colon ":") is optional. If no - * default is specified and a property cannot be resolved, an {@code - * IllegalArgumentException} will be thrown. - * - *

A note on property overriding with @PropertySource

- * - * In cases where a given property key exists in more than one {@code .properties} - * file, the last {@code @PropertySource} annotation processed will 'win' and override. - * - * For example, given two properties files {@code a.properties} and - * {@code b.properties}, consider the following two configuration classes - * that reference them with {@code @PropertySource} annotations: - * - *
- * @Configuration
- * @PropertySource("classpath:/com/myco/a.properties")
- * public class ConfigA { }
- *
- * @Configuration
- * @PropertySource("classpath:/com/myco/b.properties")
- * public class ConfigB { }
- * 
- * - * The override ordering depends on the order in which these classes are registered - * with the application context. - *
- * AnnotationConfigApplicationContext ctx =
- *     new AnnotationConfigApplicationContext();
- * ctx.register(ConfigA.class);
- * ctx.register(ConfigB.class);
- * ctx.refresh();
- * 
- * - * In the scenario above, the properties in {@code b.properties} will override any - * duplicates that exist in {@code a.properties}, because {@code ConfigB} was registered - * last. - * - *

In certain situations, it may not be possible or practical to tightly control - * property source ordering when using {@code @ProperySource} annotations. For example, - * if the {@code @Configuration} classes above were registered via component-scanning, - * the ordering is difficult to predict. In such cases - and if overriding is important - - * it is recommended that the user fall back to using the programmatic PropertySource API. - * See {@link org.springframework.core.env.ConfigurableEnvironment ConfigurableEnvironment} - * and {@link org.springframework.core.env.MutablePropertySources MutablePropertySources} - * javadocs for details. - * - * @author Chris Beams - * @author Phillip Webb - * @since 3.1 - * @see PropertySources - * @see Configuration - * @see org.springframework.core.env.PropertySource - * @see org.springframework.core.env.ConfigurableEnvironment#getPropertySources() - * @see org.springframework.core.env.MutablePropertySources - */ -@Target(ElementType.TYPE) -@Retention(RetentionPolicy.RUNTIME) -@Documented -public @interface PropertySource { - - /** - * Indicate the name of this property source. If omitted, a name - * will be generated based on the description of the underlying - * resource. - * @see org.springframework.core.env.PropertySource#getName() - * @see org.springframework.core.io.Resource#getDescription() - */ - String name() default ""; - - /** - * Indicate the resource location(s) of the properties file to be loaded. - * For example, {@code "classpath:/com/myco/app.properties"} or - * {@code "file:/path/to/file"}. - *

Resource location wildcards (e.g. **/*.properties) are not permitted; - * each location must evaluate to exactly one {@code .properties} resource. - *

${...} placeholders will be resolved against any/all property sources already - * registered with the {@code Environment}. See {@linkplain org.springframework.context.annotation.PropertySource above} - * for examples. - *

Each location will be added to the enclosing {@code Environment} as its own - * property source, and in the order declared. - */ - String[] value(); - - /** - * Indicate if failure to find the a {@link #value() property resource} should be - * ignored. - *

{@code true} is appropriate if the properties file is completely optional. - * Default is {@code false}. - * @since 4.0 - */ - boolean ignoreResourceNotFound() default false; - -} diff --git a/skyeye-statistics/src/main/scala/com/jthink/skyeye/statistics/configuration/SparkConfiguration.scala b/skyeye-statistics/src/main/scala/com/jthink/skyeye/statistics/configuration/SparkConfiguration.scala index 15b87df..13f0855 100644 --- a/skyeye-statistics/src/main/scala/com/jthink/skyeye/statistics/configuration/SparkConfiguration.scala +++ b/skyeye-statistics/src/main/scala/com/jthink/skyeye/statistics/configuration/SparkConfiguration.scala @@ -20,7 +20,7 @@ class SparkConfiguration { @Bean def sparkContext(): SparkContext = { - val conf = new SparkConf().setAppName(this.taskProperties.getName) + val conf = new SparkConf().setAppName(this.taskProperties.getRpcJobName) val sc = new SparkContext(conf) sc } diff --git a/skyeye-trace/build.gradle b/skyeye-trace/build.gradle index 45f8d18..f6f0541 100644 --- a/skyeye-trace/build.gradle +++ b/skyeye-trace/build.gradle @@ -5,8 +5,8 @@ apply plugin: 'eclipse' group = 'skyeye' version = '1.0.0' -sourceCompatibility = '1.7' -targetCompatibility = '1.7' +sourceCompatibility = 1.8 +targetCompatibility = 1.8 compileJava.options.encoding = 'UTF-8' buildDir = 'target' diff --git a/skyeye-web/build.gradle b/skyeye-web/build.gradle index 6c09ac1..495d7c5 100644 --- a/skyeye-web/build.gradle +++ b/skyeye-web/build.gradle @@ -1,7 +1,7 @@ apply plugin: 'java' apply plugin: 'eclipse' apply plugin: 'maven' -apply plugin: 'spring-boot' +apply plugin: 'org.springframework.boot' apply plugin: 'application' group = 'skyeye' From 38aa07d18273734ce66ba50d010e529513b6071d Mon Sep 17 00:00:00 2001 From: JThink Date: Fri, 28 Jul 2017 20:53:05 +0800 Subject: [PATCH 05/27] =?UTF-8?q?=E4=BF=AE=E6=94=B9client=E6=9E=B6?= =?UTF-8?q?=E6=9E=84=EF=BC=8C=E5=87=86=E5=A4=87=E6=94=AF=E6=8C=81log4j2?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .gitignore | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 6c898f0..5d6bd37 100644 --- a/.gitignore +++ b/.gitignore @@ -18,4 +18,7 @@ nohup.out /.apt_generated/ .idea/ disconf/ -/target/ \ No newline at end of file +/target/ +.DS_Store +/.DS_Store + From 7cd1d4bd9c1a2b675342e7535a9f22b3e07c36c9 Mon Sep 17 00:00:00 2001 From: JThink Date: Fri, 28 Jul 2017 20:53:19 +0800 Subject: [PATCH 06/27] =?UTF-8?q?=E4=BF=AE=E6=94=B9client=E6=9E=B6?= =?UTF-8?q?=E6=9E=84=EF=BC=8C=E5=87=86=E5=A4=87=E6=94=AF=E6=8C=81log4j2?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- skyeye-client/skyeye-client-core/todo.md | 2 ++ skyeye-client/skyeye-client-log4j/todo.md | 2 ++ skyeye-client/skyeye-client-log4j2/todo.md | 2 ++ skyeye-client/skyeye-client-logback/todo.md | 2 ++ 4 files changed, 8 insertions(+) create mode 100644 skyeye-client/skyeye-client-core/todo.md create mode 100644 skyeye-client/skyeye-client-log4j/todo.md create mode 100644 skyeye-client/skyeye-client-log4j2/todo.md create mode 100644 skyeye-client/skyeye-client-logback/todo.md diff --git a/skyeye-client/skyeye-client-core/todo.md b/skyeye-client/skyeye-client-core/todo.md new file mode 100644 index 0000000..22e6689 --- /dev/null +++ b/skyeye-client/skyeye-client-core/todo.md @@ -0,0 +1,2 @@ +TODO: + diff --git a/skyeye-client/skyeye-client-log4j/todo.md b/skyeye-client/skyeye-client-log4j/todo.md new file mode 100644 index 0000000..22e6689 --- /dev/null +++ b/skyeye-client/skyeye-client-log4j/todo.md @@ -0,0 +1,2 @@ +TODO: + diff --git a/skyeye-client/skyeye-client-log4j2/todo.md b/skyeye-client/skyeye-client-log4j2/todo.md new file mode 100644 index 0000000..22e6689 --- /dev/null +++ b/skyeye-client/skyeye-client-log4j2/todo.md @@ -0,0 +1,2 @@ +TODO: + diff --git a/skyeye-client/skyeye-client-logback/todo.md b/skyeye-client/skyeye-client-logback/todo.md new file mode 100644 index 0000000..22e6689 --- /dev/null +++ b/skyeye-client/skyeye-client-logback/todo.md @@ -0,0 +1,2 @@ +TODO: + From 5023f0e9e631bad9459eb746c06b7185f2be4ba2 Mon Sep 17 00:00:00 2001 From: JThink Date: Sun, 30 Jul 2017 11:54:28 +0800 Subject: [PATCH 07/27] upgrade spring boot and some plugin version to latest --- skyeye-alarm/build.gradle | 8 +- skyeye-benchmark/dubbo-service/build.gradle | 1 - .../dubbo-service-a/build.gradle | 4 +- .../dubbo-service-b/build.gradle | 4 +- .../dubbo-service-c/build.gradle | 4 +- .../dubbo-service-d/build.gradle | 4 +- .../dubbo-service-e/build.gradle | 4 +- skyeye-benchmark/log-generater/build.gradle | 8 +- skyeye-client/build.gradle | 113 ++++++++++-------- skyeye-client/settings.gradle | 2 +- skyeye-collector/build.gradle | 8 +- skyeye-data/settings.gradle | 2 +- skyeye-data/skyeye-data-dubbox/build.gradle | 2 +- skyeye-data/skyeye-data-hbase/build.gradle | 2 +- skyeye-data/skyeye-data-jpa/build.gradle | 2 +- skyeye-data/skyeye-data-rabbitmq/build.gradle | 2 +- skyeye-monitor/build.gradle | 8 +- skyeye-web/build.gradle | 8 +- 18 files changed, 87 insertions(+), 99 deletions(-) diff --git a/skyeye-alarm/build.gradle b/skyeye-alarm/build.gradle index 8b8c226..31ff94b 100644 --- a/skyeye-alarm/build.gradle +++ b/skyeye-alarm/build.gradle @@ -21,7 +21,6 @@ repositories { } ext { - logbackVersion = '1.1.6' baseVersion = '1.0.0' dataVersion = '1.0.0' } @@ -35,9 +34,6 @@ dependencies { exclude group: 'ch.qos.logback', module: 'logback-core' } - compile "ch.qos.logback:logback-classic:$logbackVersion" - compile "ch.qos.logback:logback-core:$logbackVersion" - compile "javax.mail:mail:1.4.7" testCompile "org.springframework.boot:spring-boot-starter-test" @@ -53,7 +49,7 @@ mainClassName = 'com.jthink.skyeye.alarm.launcher.Launcher' buildscript { ext { - springBootVersion = '1.5.4.RELEASE' + springBootVersion = '1.5.6.RELEASE' } repositories { @@ -64,7 +60,7 @@ buildscript { dependencies { classpath("org.springframework.boot:spring-boot-gradle-plugin:${springBootVersion}") - classpath("io.spring.gradle:dependency-management-plugin:0.6.0.RELEASE") + classpath("io.spring.gradle:dependency-management-plugin:1.0.3.RELEASE") } } diff --git a/skyeye-benchmark/dubbo-service/build.gradle b/skyeye-benchmark/dubbo-service/build.gradle index 08e2217..a132430 100644 --- a/skyeye-benchmark/dubbo-service/build.gradle +++ b/skyeye-benchmark/dubbo-service/build.gradle @@ -36,7 +36,6 @@ subprojects { ext { slf4jVersion = '1.7.21' - logbackVersion = '1.1.6' dubboVersion = '2.8.4-skyeye-trace' dataVersion = '1.0.0' clientVersion = '1.0.0' diff --git a/skyeye-benchmark/dubbo-service/dubbo-service-a/build.gradle b/skyeye-benchmark/dubbo-service/dubbo-service-a/build.gradle index 8ae84d2..ec484fe 100644 --- a/skyeye-benchmark/dubbo-service/dubbo-service-a/build.gradle +++ b/skyeye-benchmark/dubbo-service/dubbo-service-a/build.gradle @@ -28,7 +28,7 @@ mainClassName = "com.jthink.skyeye.benchmark.dubbo.service.a.application.Applica buildscript { ext { - springBootVersion = '1.5.4.RELEASE' + springBootVersion = '1.5.6.RELEASE' } repositories { @@ -39,7 +39,7 @@ buildscript { dependencies { classpath("org.springframework.boot:spring-boot-gradle-plugin:${springBootVersion}") - classpath("io.spring.gradle:dependency-management-plugin:0.6.0.RELEASE") + classpath("io.spring.gradle:dependency-management-plugin:1.0.3.RELEASE") } } diff --git a/skyeye-benchmark/dubbo-service/dubbo-service-b/build.gradle b/skyeye-benchmark/dubbo-service/dubbo-service-b/build.gradle index 0877f1a..50beb60 100644 --- a/skyeye-benchmark/dubbo-service/dubbo-service-b/build.gradle +++ b/skyeye-benchmark/dubbo-service/dubbo-service-b/build.gradle @@ -28,7 +28,7 @@ mainClassName = "com.jthink.skyeye.benchmark.dubbo.service.a.application.Applica buildscript { ext { - springBootVersion = '1.5.4.RELEASE' + springBootVersion = '1.5.6.RELEASE' } repositories { @@ -39,7 +39,7 @@ buildscript { dependencies { classpath("org.springframework.boot:spring-boot-gradle-plugin:${springBootVersion}") - classpath("io.spring.gradle:dependency-management-plugin:0.6.0.RELEASE") + classpath("io.spring.gradle:dependency-management-plugin:1.0.3.RELEASE") } } diff --git a/skyeye-benchmark/dubbo-service/dubbo-service-c/build.gradle b/skyeye-benchmark/dubbo-service/dubbo-service-c/build.gradle index 0877f1a..50beb60 100644 --- a/skyeye-benchmark/dubbo-service/dubbo-service-c/build.gradle +++ b/skyeye-benchmark/dubbo-service/dubbo-service-c/build.gradle @@ -28,7 +28,7 @@ mainClassName = "com.jthink.skyeye.benchmark.dubbo.service.a.application.Applica buildscript { ext { - springBootVersion = '1.5.4.RELEASE' + springBootVersion = '1.5.6.RELEASE' } repositories { @@ -39,7 +39,7 @@ buildscript { dependencies { classpath("org.springframework.boot:spring-boot-gradle-plugin:${springBootVersion}") - classpath("io.spring.gradle:dependency-management-plugin:0.6.0.RELEASE") + classpath("io.spring.gradle:dependency-management-plugin:1.0.3.RELEASE") } } diff --git a/skyeye-benchmark/dubbo-service/dubbo-service-d/build.gradle b/skyeye-benchmark/dubbo-service/dubbo-service-d/build.gradle index 0877f1a..50beb60 100644 --- a/skyeye-benchmark/dubbo-service/dubbo-service-d/build.gradle +++ b/skyeye-benchmark/dubbo-service/dubbo-service-d/build.gradle @@ -28,7 +28,7 @@ mainClassName = "com.jthink.skyeye.benchmark.dubbo.service.a.application.Applica buildscript { ext { - springBootVersion = '1.5.4.RELEASE' + springBootVersion = '1.5.6.RELEASE' } repositories { @@ -39,7 +39,7 @@ buildscript { dependencies { classpath("org.springframework.boot:spring-boot-gradle-plugin:${springBootVersion}") - classpath("io.spring.gradle:dependency-management-plugin:0.6.0.RELEASE") + classpath("io.spring.gradle:dependency-management-plugin:1.0.3.RELEASE") } } diff --git a/skyeye-benchmark/dubbo-service/dubbo-service-e/build.gradle b/skyeye-benchmark/dubbo-service/dubbo-service-e/build.gradle index 0877f1a..50beb60 100644 --- a/skyeye-benchmark/dubbo-service/dubbo-service-e/build.gradle +++ b/skyeye-benchmark/dubbo-service/dubbo-service-e/build.gradle @@ -28,7 +28,7 @@ mainClassName = "com.jthink.skyeye.benchmark.dubbo.service.a.application.Applica buildscript { ext { - springBootVersion = '1.5.4.RELEASE' + springBootVersion = '1.5.6.RELEASE' } repositories { @@ -39,7 +39,7 @@ buildscript { dependencies { classpath("org.springframework.boot:spring-boot-gradle-plugin:${springBootVersion}") - classpath("io.spring.gradle:dependency-management-plugin:0.6.0.RELEASE") + classpath("io.spring.gradle:dependency-management-plugin:1.0.3.RELEASE") } } diff --git a/skyeye-benchmark/log-generater/build.gradle b/skyeye-benchmark/log-generater/build.gradle index 5eec60c..201aed4 100644 --- a/skyeye-benchmark/log-generater/build.gradle +++ b/skyeye-benchmark/log-generater/build.gradle @@ -21,7 +21,6 @@ repositories { } ext { - logbackVersion = '1.1.6' clientVersion = '1.0.0' dataVersion = '1.0.0' } @@ -35,9 +34,6 @@ dependencies { exclude group: 'ch.qos.logback', module: 'logback-core' } - compile "ch.qos.logback:logback-classic:$logbackVersion" - compile "ch.qos.logback:logback-core:$logbackVersion" - testCompile "org.springframework.boot:spring-boot-starter-test" } @@ -51,7 +47,7 @@ mainClassName = 'com.unionpaysmart.alch.launcher.Launcher' buildscript { ext { - springBootVersion = '1.5.4.RELEASE' + springBootVersion = '1.5.6.RELEASE' } repositories { @@ -62,7 +58,7 @@ buildscript { dependencies { classpath("org.springframework.boot:spring-boot-gradle-plugin:${springBootVersion}") - classpath("io.spring.gradle:dependency-management-plugin:0.6.0.RELEASE") + classpath("io.spring.gradle:dependency-management-plugin:1.0.3.RELEASE") } } diff --git a/skyeye-client/build.gradle b/skyeye-client/build.gradle index 5b0d3ac..5c96b76 100644 --- a/skyeye-client/build.gradle +++ b/skyeye-client/build.gradle @@ -1,64 +1,73 @@ -apply plugin: 'java' -apply plugin: 'maven' -apply plugin: 'eclipse' +allprojects { + apply plugin: 'java' + apply plugin: 'maven' + apply plugin: 'eclipse' -group = 'skyeye' -version = '1.0.0' + group = 'skyeye' + version = '1.0.0' -sourceCompatibility = 1.8 -targetCompatibility = 1.8 -compileJava.options.encoding = 'UTF-8' -buildDir = 'target' + sourceCompatibility = 1.8 + targetCompatibility = 1.8 + compileJava.options.encoding = 'UTF-8' + buildDir = 'target' -ext { - mavenPublicUrl = 'http://192.168.88.8:8081/nexus/content/repositories/public' - mavenReleaseUrl = 'http://192.168.88.8:8081/nexus/content/repositories/releases' - mavenSnapshotUrl = "http://192.168.88.8:8081/nexus/content/repositories/snapshots" - kafkaVersion = '0.10.0.1' - logbackVersion = '1.1.6' - log4jVersion = '1.2.17' - zookeeperVersion = '3.4.6' - zkclientVersion = '0.9.1-up' - baseVersion = '1.0.0' - traceVersion = '1.0.0' -} + ext { + mavenPublicUrl = 'http://192.168.88.8:8081/nexus/content/repositories/public' + mavenReleaseUrl = 'http://192.168.88.8:8081/nexus/content/repositories/releases' + mavenSnapshotUrl = 'http://192.168.88.8:8081/nexus/content/repositories/snapshots' + } -repositories { - mavenLocal() - maven { url mavenPublicUrl } - maven { url mavenReleaseUrl } - maven { url mavenSnapshotUrl } - mavenCentral() -} + repositories { + mavenLocal() + maven { url mavenPublicUrl } + maven { url mavenReleaseUrl } + maven { url mavenSnapshotUrl } + mavenCentral() + } -task sourcesJar(type: Jar, dependsOn: classes) { - classifier = 'sources' - from sourceSets.main.allSource -} + task sourcesJar(type: Jar, dependsOn: classes) { + classifier = 'sources' + from sourceSets.main.allSource + } -artifacts { - archives sourcesJar -} + artifacts { + archives sourcesJar + } -uploadArchives { - repositories { - mavenDeployer { - repository(url: mavenReleaseUrl) { - authentication(userName: 'admin', password: 'admin123') - } - snapshotRepository(url: mavenSnapshotUrl) { - authentication(userName: 'admin', password: 'admin123') + uploadArchives { + repositories { + mavenDeployer { + repository(url: mavenReleaseUrl) { + authentication(userName: 'admin', password: 'admin123') + } + snapshotRepository(url: mavenSnapshotUrl) { + authentication(userName: 'admin', password: 'admin123') + } } } } } -dependencies { - compile "skyeye:skyeye-base:$baseVersion" - compile "skyeye:skyeye-trace:$traceVersion" - compile "org.apache.kafka:kafka-clients:$kafkaVersion" - compile "ch.qos.logback:logback-classic:$logbackVersion" - compile "log4j:log4j:$log4jVersion" - compile "org.apache.zookeeper:zookeeper:$zookeeperVersion" - compile "com.101tec:zkclient:$zkclientVersion" -} \ No newline at end of file +subprojects { + apply plugin: 'java' + apply plugin: 'maven' + apply plugin: 'eclipse' + + ext { + kafkaVersion = '0.10.0.1' + zookeeperVersion = '3.4.6' + zkclientVersion = '0.9.1-up' + baseVersion = '1.0.0' + traceVersion = '1.0.0' + } + + [compileJava, compileTestJava]*.options*.encoding = 'UTF-8' + + dependencies { + compile "skyeye:skyeye-base:$baseVersion" + compile "skyeye:skyeye-trace:$traceVersion" + compile "org.apache.kafka:kafka-clients:$kafkaVersion" + compile "org.apache.zookeeper:zookeeper:$zookeeperVersion" + compile "com.101tec:zkclient:$zkclientVersion" + } +} diff --git a/skyeye-client/settings.gradle b/skyeye-client/settings.gradle index 7fd3f0e..9e3f39d 100644 --- a/skyeye-client/settings.gradle +++ b/skyeye-client/settings.gradle @@ -1 +1 @@ -rootProject.name = 'skyeye-client' \ No newline at end of file +include 'skyeye-client-core', 'skyeye-client-log4j', 'skyeye-client-log4j2', 'skyeye-client-logback' diff --git a/skyeye-collector/build.gradle b/skyeye-collector/build.gradle index c78d2b6..96462bc 100644 --- a/skyeye-collector/build.gradle +++ b/skyeye-collector/build.gradle @@ -22,7 +22,6 @@ repositories { } ext { - logbackVersion = '1.1.6' kafkaVersion = '0.10.0.1' esVersion = '2.3.3' traceVersion = '1.0.0' @@ -43,9 +42,6 @@ dependencies { compile "org.apache.kafka:kafka-clients:$kafkaVersion" - compile "ch.qos.logback:logback-classic:$logbackVersion" - compile "ch.qos.logback:logback-core:$logbackVersion" - compile "org.elasticsearch:elasticsearch:$esVersion" compile "org.springframework.boot:spring-boot-starter-data-redis" @@ -82,7 +78,7 @@ mainClassName = 'com.jthink.skyeye.collector.launcher.Launcher' buildscript { ext { - springBootVersion = '1.5.4.RELEASE' + springBootVersion = '1.5.6.RELEASE' } repositories { @@ -93,7 +89,7 @@ buildscript { dependencies { classpath("org.springframework.boot:spring-boot-gradle-plugin:${springBootVersion}") - classpath("io.spring.gradle:dependency-management-plugin:0.6.0.RELEASE") + classpath("io.spring.gradle:dependency-management-plugin:1.0.3.RELEASE") } } diff --git a/skyeye-data/settings.gradle b/skyeye-data/settings.gradle index fb754be..5edf8cd 100644 --- a/skyeye-data/settings.gradle +++ b/skyeye-data/settings.gradle @@ -1 +1 @@ -include 'skyeye-data-dubbox', 'skyeye-data-hbase', 'skyeye-data-http', 'skyeye-data-jpa', 'skyeye-data-rabbitmq' \ No newline at end of file +include 'skyeye-data-dubbox', 'skyeye-data-hbase', 'skyeye-data-http', 'skyeye-data-jpa', 'skyeye-data-rabbitmq' diff --git a/skyeye-data/skyeye-data-dubbox/build.gradle b/skyeye-data/skyeye-data-dubbox/build.gradle index c50c9d9..8ea1ed6 100644 --- a/skyeye-data/skyeye-data-dubbox/build.gradle +++ b/skyeye-data/skyeye-data-dubbox/build.gradle @@ -8,7 +8,7 @@ compileJava.options.encoding = 'UTF-8' buildDir = 'target' ext { - springBootVersion = '1.5.4.RELEASE' + springBootVersion = '1.5.6.RELEASE' springVersion = '4.2.7.RELEASE' dubboVersion = '2.8.4-skyeye-trace' slf4jVersion = '1.7.21' diff --git a/skyeye-data/skyeye-data-hbase/build.gradle b/skyeye-data/skyeye-data-hbase/build.gradle index e75ddbb..7611a47 100644 --- a/skyeye-data/skyeye-data-hbase/build.gradle +++ b/skyeye-data/skyeye-data-hbase/build.gradle @@ -17,7 +17,7 @@ repositories { } ext { - springBootVersion = '1.5.4.RELEASE' + springBootVersion = '1.5.6.RELEASE' springVersion = '4.2.7.RELEASE' hbaseVersion = '1.0.0-cdh5.4.0' } diff --git a/skyeye-data/skyeye-data-jpa/build.gradle b/skyeye-data/skyeye-data-jpa/build.gradle index 023a689..effe116 100644 --- a/skyeye-data/skyeye-data-jpa/build.gradle +++ b/skyeye-data/skyeye-data-jpa/build.gradle @@ -8,7 +8,7 @@ compileJava.options.encoding = 'UTF-8' buildDir = 'target' ext { - springBootVersion = '1.5.4.RELEASE' + springBootVersion = '1.5.6.RELEASE' } dependencies { diff --git a/skyeye-data/skyeye-data-rabbitmq/build.gradle b/skyeye-data/skyeye-data-rabbitmq/build.gradle index 466f12f..df2add3 100644 --- a/skyeye-data/skyeye-data-rabbitmq/build.gradle +++ b/skyeye-data/skyeye-data-rabbitmq/build.gradle @@ -8,7 +8,7 @@ compileJava.options.encoding = 'UTF-8' buildDir = 'target' ext { - springBootVersion = '1.5.4.RELEASE' + springBootVersion = '1.5.6.RELEASE' jacksonVersion = '1.9.13' } diff --git a/skyeye-monitor/build.gradle b/skyeye-monitor/build.gradle index 7cb6f67..768ed59 100644 --- a/skyeye-monitor/build.gradle +++ b/skyeye-monitor/build.gradle @@ -21,7 +21,6 @@ repositories { } ext { - logbackVersion = '1.1.6' zookeeperVersion = '3.4.6' curatorVersion = '2.11.0' baseVersion = '1.0.0' @@ -42,9 +41,6 @@ dependencies { compile("org.apache.commons:commons-dbcp2:2.1.1") compile("mysql:mysql-connector-java:5.1.39") - compile "ch.qos.logback:logback-classic:$logbackVersion" - compile "ch.qos.logback:logback-core:$logbackVersion" - compile "org.apache.zookeeper:zookeeper:$zookeeperVersion" compile "org.codehaus.jackson:jackson-core-asl:$jacksonVersion" @@ -68,7 +64,7 @@ mainClassName = 'com.jthink.skyeye.monitor.launcher.Launcher' buildscript { ext { - springBootVersion = '1.5.4.RELEASE' + springBootVersion = '1.5.6.RELEASE' } repositories { @@ -79,7 +75,7 @@ buildscript { dependencies { classpath("org.springframework.boot:spring-boot-gradle-plugin:${springBootVersion}") - classpath("io.spring.gradle:dependency-management-plugin:0.6.0.RELEASE") + classpath("io.spring.gradle:dependency-management-plugin:1.0.3.RELEASE") } } diff --git a/skyeye-web/build.gradle b/skyeye-web/build.gradle index 495d7c5..0bd3d82 100644 --- a/skyeye-web/build.gradle +++ b/skyeye-web/build.gradle @@ -24,7 +24,6 @@ repositories { } ext { - logbackVersion = '1.1.6' baseVersion = '1.0.0' dataVersion = '1.0.0' jacksonVersion = '1.9.13' @@ -53,9 +52,6 @@ dependencies { compile "org.apache.httpcomponents:httpclient:$httpclientVersion" - compile "ch.qos.logback:logback-classic:$logbackVersion" - compile "ch.qos.logback:logback-core:$logbackVersion" - compile "com.101tec:zkclient:$zkclientVersion" compile "org.codehaus.jackson:jackson-core-asl:$jacksonVersion" @@ -76,7 +72,7 @@ mainClassName = 'com.jthink.skyeye.web.application.Application' buildscript { ext { - springBootVersion = '1.5.4.RELEASE' + springBootVersion = '1.5.6.RELEASE' } repositories { @@ -87,7 +83,7 @@ buildscript { dependencies { classpath("org.springframework.boot:spring-boot-gradle-plugin:${springBootVersion}") - classpath("io.spring.gradle:dependency-management-plugin:0.6.0.RELEASE") + classpath("io.spring.gradle:dependency-management-plugin:1.0.3.RELEASE") } } From f665b0d45ba8d9b8099cd20d5d8f45e8356e11f7 Mon Sep 17 00:00:00 2001 From: JThink Date: Sun, 30 Jul 2017 11:58:13 +0800 Subject: [PATCH 08/27] modify the launcher class --- skyeye-benchmark/dubbo-service/dubbo-service-b/build.gradle | 2 +- skyeye-benchmark/dubbo-service/dubbo-service-c/build.gradle | 2 +- skyeye-benchmark/dubbo-service/dubbo-service-d/build.gradle | 2 +- skyeye-benchmark/dubbo-service/dubbo-service-e/build.gradle | 2 +- skyeye-benchmark/log-generater/build.gradle | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/skyeye-benchmark/dubbo-service/dubbo-service-b/build.gradle b/skyeye-benchmark/dubbo-service/dubbo-service-b/build.gradle index 50beb60..2f36a6a 100644 --- a/skyeye-benchmark/dubbo-service/dubbo-service-b/build.gradle +++ b/skyeye-benchmark/dubbo-service/dubbo-service-b/build.gradle @@ -24,7 +24,7 @@ configurations { compile.exclude group: "org.slf4j", module: "slf4j-log4j12" } -mainClassName = "com.jthink.skyeye.benchmark.dubbo.service.a.application.Application" +mainClassName = "com.jthink.skyeye.benchmark.dubbo.service.b.launcher.Launcher" buildscript { ext { diff --git a/skyeye-benchmark/dubbo-service/dubbo-service-c/build.gradle b/skyeye-benchmark/dubbo-service/dubbo-service-c/build.gradle index 50beb60..0ead6ac 100644 --- a/skyeye-benchmark/dubbo-service/dubbo-service-c/build.gradle +++ b/skyeye-benchmark/dubbo-service/dubbo-service-c/build.gradle @@ -24,7 +24,7 @@ configurations { compile.exclude group: "org.slf4j", module: "slf4j-log4j12" } -mainClassName = "com.jthink.skyeye.benchmark.dubbo.service.a.application.Application" +mainClassName = "com.jthink.skyeye.benchmark.dubbo.service.c.launcher.Launcher" buildscript { ext { diff --git a/skyeye-benchmark/dubbo-service/dubbo-service-d/build.gradle b/skyeye-benchmark/dubbo-service/dubbo-service-d/build.gradle index 50beb60..354d53a 100644 --- a/skyeye-benchmark/dubbo-service/dubbo-service-d/build.gradle +++ b/skyeye-benchmark/dubbo-service/dubbo-service-d/build.gradle @@ -24,7 +24,7 @@ configurations { compile.exclude group: "org.slf4j", module: "slf4j-log4j12" } -mainClassName = "com.jthink.skyeye.benchmark.dubbo.service.a.application.Application" +mainClassName = "com.jthink.skyeye.benchmark.dubbo.service.d.launcher.Launcher" buildscript { ext { diff --git a/skyeye-benchmark/dubbo-service/dubbo-service-e/build.gradle b/skyeye-benchmark/dubbo-service/dubbo-service-e/build.gradle index 50beb60..189839f 100644 --- a/skyeye-benchmark/dubbo-service/dubbo-service-e/build.gradle +++ b/skyeye-benchmark/dubbo-service/dubbo-service-e/build.gradle @@ -24,7 +24,7 @@ configurations { compile.exclude group: "org.slf4j", module: "slf4j-log4j12" } -mainClassName = "com.jthink.skyeye.benchmark.dubbo.service.a.application.Application" +mainClassName = "com.jthink.skyeye.benchmark.dubbo.service.e.launcher.Launcher" buildscript { ext { diff --git a/skyeye-benchmark/log-generater/build.gradle b/skyeye-benchmark/log-generater/build.gradle index 201aed4..f652e82 100644 --- a/skyeye-benchmark/log-generater/build.gradle +++ b/skyeye-benchmark/log-generater/build.gradle @@ -43,7 +43,7 @@ configurations { compile.exclude group: "org.springframework", module: "spring-web" } -mainClassName = 'com.unionpaysmart.alch.launcher.Launcher' +mainClassName = 'com.jthink.skyeye.benchmark.log.generater.launcher.Launcher' buildscript { ext { From 531fb6d565d260e828bb1ae496130c452ff02ec7 Mon Sep 17 00:00:00 2001 From: JThink Date: Mon, 31 Jul 2017 09:31:46 +0800 Subject: [PATCH 09/27] upgrade --- .gitignore | 1 + skyeye-benchmark/dubbo-service/dubbo-service-a/build.gradle | 2 -- skyeye-benchmark/dubbo-service/dubbo-service-b/build.gradle | 2 -- skyeye-benchmark/dubbo-service/dubbo-service-c/build.gradle | 2 -- skyeye-benchmark/dubbo-service/dubbo-service-d/build.gradle | 2 -- skyeye-benchmark/dubbo-service/dubbo-service-e/build.gradle | 2 -- .../java/com/jthink/skyeye/trace/test/id/UniqueIdGenTest.java | 2 +- 7 files changed, 2 insertions(+), 11 deletions(-) diff --git a/.gitignore b/.gitignore index 5d6bd37..2db9752 100644 --- a/.gitignore +++ b/.gitignore @@ -21,4 +21,5 @@ disconf/ /target/ .DS_Store /.DS_Store +/out/ diff --git a/skyeye-benchmark/dubbo-service/dubbo-service-a/build.gradle b/skyeye-benchmark/dubbo-service/dubbo-service-a/build.gradle index ec484fe..b8430c8 100644 --- a/skyeye-benchmark/dubbo-service/dubbo-service-a/build.gradle +++ b/skyeye-benchmark/dubbo-service/dubbo-service-a/build.gradle @@ -13,8 +13,6 @@ dependencies { } compile "org.apache.zookeeper:zookeeper:$zookeeperVerison" compile "com.101tec:zkclient:$zkClientVersion" - compile "ch.qos.logback:logback-classic:$logbackVersion" - compile "ch.qos.logback:logback-core:$logbackVersion" testCompile "org.springframework.boot:spring-boot-starter-test" } diff --git a/skyeye-benchmark/dubbo-service/dubbo-service-b/build.gradle b/skyeye-benchmark/dubbo-service/dubbo-service-b/build.gradle index 2f36a6a..92300c8 100644 --- a/skyeye-benchmark/dubbo-service/dubbo-service-b/build.gradle +++ b/skyeye-benchmark/dubbo-service/dubbo-service-b/build.gradle @@ -13,8 +13,6 @@ dependencies { } compile "org.apache.zookeeper:zookeeper:$zookeeperVerison" compile "com.101tec:zkclient:$zkClientVersion" - compile "ch.qos.logback:logback-classic:$logbackVersion" - compile "ch.qos.logback:logback-core:$logbackVersion" testCompile "org.springframework.boot:spring-boot-starter-test" } diff --git a/skyeye-benchmark/dubbo-service/dubbo-service-c/build.gradle b/skyeye-benchmark/dubbo-service/dubbo-service-c/build.gradle index 0ead6ac..5bf69f0 100644 --- a/skyeye-benchmark/dubbo-service/dubbo-service-c/build.gradle +++ b/skyeye-benchmark/dubbo-service/dubbo-service-c/build.gradle @@ -13,8 +13,6 @@ dependencies { } compile "org.apache.zookeeper:zookeeper:$zookeeperVerison" compile "com.101tec:zkclient:$zkClientVersion" - compile "ch.qos.logback:logback-classic:$logbackVersion" - compile "ch.qos.logback:logback-core:$logbackVersion" testCompile "org.springframework.boot:spring-boot-starter-test" } diff --git a/skyeye-benchmark/dubbo-service/dubbo-service-d/build.gradle b/skyeye-benchmark/dubbo-service/dubbo-service-d/build.gradle index 354d53a..ae31489 100644 --- a/skyeye-benchmark/dubbo-service/dubbo-service-d/build.gradle +++ b/skyeye-benchmark/dubbo-service/dubbo-service-d/build.gradle @@ -13,8 +13,6 @@ dependencies { } compile "org.apache.zookeeper:zookeeper:$zookeeperVerison" compile "com.101tec:zkclient:$zkClientVersion" - compile "ch.qos.logback:logback-classic:$logbackVersion" - compile "ch.qos.logback:logback-core:$logbackVersion" testCompile "org.springframework.boot:spring-boot-starter-test" } diff --git a/skyeye-benchmark/dubbo-service/dubbo-service-e/build.gradle b/skyeye-benchmark/dubbo-service/dubbo-service-e/build.gradle index 189839f..4fd0826 100644 --- a/skyeye-benchmark/dubbo-service/dubbo-service-e/build.gradle +++ b/skyeye-benchmark/dubbo-service/dubbo-service-e/build.gradle @@ -13,8 +13,6 @@ dependencies { } compile "org.apache.zookeeper:zookeeper:$zookeeperVerison" compile "com.101tec:zkclient:$zkClientVersion" - compile "ch.qos.logback:logback-classic:$logbackVersion" - compile "ch.qos.logback:logback-core:$logbackVersion" testCompile "org.springframework.boot:spring-boot-starter-test" } diff --git a/skyeye-trace/src/test/java/com/jthink/skyeye/trace/test/id/UniqueIdGenTest.java b/skyeye-trace/src/test/java/com/jthink/skyeye/trace/test/id/UniqueIdGenTest.java index c2d1627..4522c55 100644 --- a/skyeye-trace/src/test/java/com/jthink/skyeye/trace/test/id/UniqueIdGenTest.java +++ b/skyeye-trace/src/test/java/com/jthink/skyeye/trace/test/id/UniqueIdGenTest.java @@ -18,7 +18,7 @@ public static void main(String[] args) { } public static void testTimeConsume() { - UniqueIdGen idGen = new UniqueIdGen(5); + UniqueIdGen idGen = UniqueIdGen.getInstance(5); long start = System.currentTimeMillis(); for (int i = 0; i < 1024000; ++i) { System.out.println(idGen.nextId()); From 4ed87a718e1ea7cc65b7de3b2c56a4300606aa5f Mon Sep 17 00:00:00 2001 From: JThink Date: Mon, 31 Jul 2017 10:38:13 +0800 Subject: [PATCH 10/27] =?UTF-8?q?=E5=AE=8C=E6=88=90skyeye-client-core?= =?UTF-8?q?=E6=A0=B8=E5=BF=83=E4=BB=A3=E7=A0=81?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../jthink/skyeye/base/constant/RpcType.java | 1 + skyeye-client/settings.gradle | 3 +- skyeye-client/skyeye-client-core/build.gradle | 29 ++ .../skyeye-client-core/settings.gradle | 1 + .../client/core}/constant/KafkaConfig.java | 2 +- .../client/core}/constant/NodeMode.java | 2 +- .../client/core/register/ZkRegister.java | 116 +++++ .../skyeye/client/core}/util/SysUtil.java | 2 +- .../kafka/logback/test/KafkaProducerDemo.java | 2 +- .../kafka/partition/test/PartitionTest.java | 2 +- .../skyeye/client/core}/zk/test/ZkTest.java | 2 +- skyeye-client/skyeye-client-core/todo.md | 2 - .../client/kafka/LazySingletonProducer.java | 50 --- .../client/kafka/log4j/KafkaAppender.java | 425 ------------------ .../client/kafka/logback/KafkaAppender.java | 356 --------------- .../logback/encoder/KafkaLayoutEncoder.java | 64 --- .../kafka/partitioner/AppHostKeyBuilder.java | 49 -- .../client/kafka/partitioner/KeyBuilder.java | 20 - .../kafka/partitioner/KeyModPartitioner.java | 44 -- .../skyeye/client/register/ZkRegister.java | 77 ---- 20 files changed, 155 insertions(+), 1094 deletions(-) create mode 100644 skyeye-client/skyeye-client-core/build.gradle create mode 100644 skyeye-client/skyeye-client-core/settings.gradle rename skyeye-client/{src/main/java/com/jthink/skyeye/client => skyeye-client-core/src/main/java/com/jthink/skyeye/client/core}/constant/KafkaConfig.java (97%) rename skyeye-client/{src/main/java/com/jthink/skyeye/client => skyeye-client-core/src/main/java/com/jthink/skyeye/client/core}/constant/NodeMode.java (92%) create mode 100644 skyeye-client/skyeye-client-core/src/main/java/com/jthink/skyeye/client/core/register/ZkRegister.java rename skyeye-client/{src/main/java/com/jthink/skyeye/client => skyeye-client-core/src/main/java/com/jthink/skyeye/client/core}/util/SysUtil.java (95%) rename skyeye-client/{src/test/java/com/jthink/skyeye/client => skyeye-client-core/src/test/java/com/jthink/skyeye/client/core}/kafka/logback/test/KafkaProducerDemo.java (97%) rename skyeye-client/{src/test/java/com/jthink/skyeye/client => skyeye-client-core/src/test/java/com/jthink/skyeye/client/core}/kafka/partition/test/PartitionTest.java (92%) rename skyeye-client/{src/test/java/com/jthink/skyeye/client => skyeye-client-core/src/test/java/com/jthink/skyeye/client/core}/zk/test/ZkTest.java (98%) delete mode 100644 skyeye-client/skyeye-client-core/todo.md delete mode 100644 skyeye-client/src/main/java/com/jthink/skyeye/client/kafka/LazySingletonProducer.java delete mode 100644 skyeye-client/src/main/java/com/jthink/skyeye/client/kafka/log4j/KafkaAppender.java delete mode 100644 skyeye-client/src/main/java/com/jthink/skyeye/client/kafka/logback/KafkaAppender.java delete mode 100644 skyeye-client/src/main/java/com/jthink/skyeye/client/kafka/logback/encoder/KafkaLayoutEncoder.java delete mode 100644 skyeye-client/src/main/java/com/jthink/skyeye/client/kafka/partitioner/AppHostKeyBuilder.java delete mode 100644 skyeye-client/src/main/java/com/jthink/skyeye/client/kafka/partitioner/KeyBuilder.java delete mode 100644 skyeye-client/src/main/java/com/jthink/skyeye/client/kafka/partitioner/KeyModPartitioner.java delete mode 100644 skyeye-client/src/main/java/com/jthink/skyeye/client/register/ZkRegister.java diff --git a/skyeye-base/src/main/java/com/jthink/skyeye/base/constant/RpcType.java b/skyeye-base/src/main/java/com/jthink/skyeye/base/constant/RpcType.java index 322a635..c36091b 100644 --- a/skyeye-base/src/main/java/com/jthink/skyeye/base/constant/RpcType.java +++ b/skyeye-base/src/main/java/com/jthink/skyeye/base/constant/RpcType.java @@ -12,6 +12,7 @@ public enum RpcType { none(Constants.RPC_TYPE_NONE, "none"), dubbo(Constants.RPC_TYPE_DUBBO, "dubbo"), + dubbo(Constants.RPC_TYPE_SC, "spring-cloud"), thrift(Constants.RPC_TYPE_THRIFT, "thrift"); private String symbol; diff --git a/skyeye-client/settings.gradle b/skyeye-client/settings.gradle index 9e3f39d..03a1a85 100644 --- a/skyeye-client/settings.gradle +++ b/skyeye-client/settings.gradle @@ -1 +1,2 @@ -include 'skyeye-client-core', 'skyeye-client-log4j', 'skyeye-client-log4j2', 'skyeye-client-logback' +include 'skyeye-client-core' +// , 'skyeye-client-log4j', 'skyeye-client-log4j2', 'skyeye-client-logback' diff --git a/skyeye-client/skyeye-client-core/build.gradle b/skyeye-client/skyeye-client-core/build.gradle new file mode 100644 index 0000000..e6ef5a3 --- /dev/null +++ b/skyeye-client/skyeye-client-core/build.gradle @@ -0,0 +1,29 @@ +apply plugin: 'java' +apply plugin: 'maven' +apply plugin: 'eclipse' + +sourceCompatibility = 1.8 +targetCompatibility = 1.8 +compileJava.options.encoding = 'UTF-8' +buildDir = 'target' + +ext { +} + +repositories { + mavenLocal() + maven { url mavenPublicUrl } + maven { url mavenReleaseUrl } + maven { url mavenSnapshotUrl } + mavenCentral() +} + +dependencies { +} + +configurations { + compile.exclude group: "log4j", module: "log4j" + compile.exclude group: "org.slf4j", module: "slf4j-log4j12" + compile.exclude group: "ch.qos.logback", module: "logback-classic" + compile.exclude group: "ch.qos.logback", module: "logback-core" +} diff --git a/skyeye-client/skyeye-client-core/settings.gradle b/skyeye-client/skyeye-client-core/settings.gradle new file mode 100644 index 0000000..f8470e2 --- /dev/null +++ b/skyeye-client/skyeye-client-core/settings.gradle @@ -0,0 +1 @@ +rootProject.name = 'skyeye-client-core' diff --git a/skyeye-client/src/main/java/com/jthink/skyeye/client/constant/KafkaConfig.java b/skyeye-client/skyeye-client-core/src/main/java/com/jthink/skyeye/client/core/constant/KafkaConfig.java similarity index 97% rename from skyeye-client/src/main/java/com/jthink/skyeye/client/constant/KafkaConfig.java rename to skyeye-client/skyeye-client-core/src/main/java/com/jthink/skyeye/client/core/constant/KafkaConfig.java index 44b4924..c299db7 100644 --- a/skyeye-client/src/main/java/com/jthink/skyeye/client/constant/KafkaConfig.java +++ b/skyeye-client/skyeye-client-core/src/main/java/com/jthink/skyeye/client/core/constant/KafkaConfig.java @@ -1,4 +1,4 @@ -package com.jthink.skyeye.client.constant; +package com.jthink.skyeye.client.core.constant; import java.util.HashSet; import java.util.Set; diff --git a/skyeye-client/src/main/java/com/jthink/skyeye/client/constant/NodeMode.java b/skyeye-client/skyeye-client-core/src/main/java/com/jthink/skyeye/client/core/constant/NodeMode.java similarity index 92% rename from skyeye-client/src/main/java/com/jthink/skyeye/client/constant/NodeMode.java rename to skyeye-client/skyeye-client-core/src/main/java/com/jthink/skyeye/client/core/constant/NodeMode.java index b6a20cc..481d58d 100644 --- a/skyeye-client/src/main/java/com/jthink/skyeye/client/constant/NodeMode.java +++ b/skyeye-client/skyeye-client-core/src/main/java/com/jthink/skyeye/client/core/constant/NodeMode.java @@ -1,4 +1,4 @@ -package com.jthink.skyeye.client.constant; +package com.jthink.skyeye.client.core.constant; import com.jthink.skyeye.base.constant.Constants; diff --git a/skyeye-client/skyeye-client-core/src/main/java/com/jthink/skyeye/client/core/register/ZkRegister.java b/skyeye-client/skyeye-client-core/src/main/java/com/jthink/skyeye/client/core/register/ZkRegister.java new file mode 100644 index 0000000..788bbbd --- /dev/null +++ b/skyeye-client/skyeye-client-core/src/main/java/com/jthink/skyeye/client/core/register/ZkRegister.java @@ -0,0 +1,116 @@ +package com.jthink.skyeye.client.core.register; + +import com.jthink.skyeye.base.constant.Constants; +import com.jthink.skyeye.base.constant.RpcType; +import com.jthink.skyeye.client.core.constant.NodeMode; +import com.jthink.skyeye.client.core.util.SysUtil; +import com.jthink.skyeye.trace.dto.RegisterDto; +import com.jthink.skyeye.trace.generater.IncrementIdGen; +import com.jthink.skyeye.trace.registry.Registry; +import com.jthink.skyeye.trace.registry.ZookeeperRegistry; +import org.I0Itec.zkclient.ZkClient; + +/** + * JThink@JThink + * + * @author JThink + * @version 0.0.1 + * @desc zookeeper注册中心 + * @date 2016-09-21 14:03:46 + */ +public class ZkRegister { + + // zkClient + private ZkClient client; + + public ZkRegister(ZkClient client) { + this.client = client; + } + + /** + * 向注册中心注册节点信息 + * @param host + * @param app + * @param mail + */ + public void registerNode(String host, String app, String mail) { + // 注册永久节点用于历史日志查询 + this.create(Constants.SLASH + app + Constants.SLASH + host, NodeMode.PERSISTENT); + this.getClient().writeData(Constants.ROOT_PATH_PERSISTENT + Constants.SLASH + app + Constants.SLASH + host, + mail + Constants.SEMICOLON + SysUtil.userDir); + // 注册临时节点用于日志滚屏 + this.getClient().createPersistent(Constants.ROOT_PATH_EPHEMERAL + Constants.SLASH + app, true); + this.create(Constants.SLASH + app + Constants.SLASH + host, NodeMode.EPHEMERAL, + Constants.APPENDER_INIT_DATA + Constants.SEMICOLON + SysUtil.userDir); + } + + /** + * rpc trace注册中心 + * @param host + * @param app + * @param rpc + */ + public void registerRpc(String host, String app, String rpc) { + if (rpc.equals(RpcType.dubbo.symbol())) { + // TODO: 目前仅支持dubbo作为rpc/soa框架 + RegisterDto dto = new RegisterDto(app, host, this.client); + Registry registry = new ZookeeperRegistry(); + IncrementIdGen.setId(registry.register(dto)); + } + } + + /** + * 创建节点 + * @param path + * @param nodeMode + */ + private void create(String path, NodeMode nodeMode) { + if (nodeMode.symbol().equals(NodeMode.PERSISTENT.symbol())) { + // 创建永久节点 + this.client.createPersistent(nodeMode.label() + path, true); + } else if (nodeMode.symbol().equals(NodeMode.EPHEMERAL.symbol())) { + // 创建临时节点 + this.client.createEphemeral(nodeMode.label() + path); + } + } + + /** + * 创建带data的节点 + * @param path + * @param nodeMode + * @param data + */ + private void create(String path, NodeMode nodeMode, String data) { + if (nodeMode.symbol().equals(NodeMode.PERSISTENT.symbol())) { + // 创建永久节点,加入数据 + this.client.createPersistent(nodeMode.label() + path, true); + } else if (nodeMode.symbol().equals(NodeMode.EPHEMERAL.symbol())) { + // 创建临时节点,加入数据 + this.client.createEphemeral(nodeMode.label() + path, data); + } + } + + /** + * 写节点数据 + * @param path + * @param nodeMode + * @param data + */ + private void write(String path, NodeMode nodeMode, String data) { + if (nodeMode.symbol().equals(NodeMode.PERSISTENT.symbol())) { + // 创建永久节点,加入数据 + this.client.writeData(nodeMode.label() + path, true); + } else if (nodeMode.symbol().equals(NodeMode.EPHEMERAL.symbol())) { + // 创建临时节点,加入数据 + this.client.writeData(nodeMode.label() + path, data); + } + } + + public ZkClient getClient() { + return client; + } + + public void setClient(ZkClient client) { + this.client = client; + } +} diff --git a/skyeye-client/src/main/java/com/jthink/skyeye/client/util/SysUtil.java b/skyeye-client/skyeye-client-core/src/main/java/com/jthink/skyeye/client/core/util/SysUtil.java similarity index 95% rename from skyeye-client/src/main/java/com/jthink/skyeye/client/util/SysUtil.java rename to skyeye-client/skyeye-client-core/src/main/java/com/jthink/skyeye/client/core/util/SysUtil.java index 02175f5..5272635 100644 --- a/skyeye-client/src/main/java/com/jthink/skyeye/client/util/SysUtil.java +++ b/skyeye-client/skyeye-client-core/src/main/java/com/jthink/skyeye/client/core/util/SysUtil.java @@ -1,4 +1,4 @@ -package com.jthink.skyeye.client.util; +package com.jthink.skyeye.client.core.util; import com.jthink.skyeye.base.constant.Constants; diff --git a/skyeye-client/src/test/java/com/jthink/skyeye/client/kafka/logback/test/KafkaProducerDemo.java b/skyeye-client/skyeye-client-core/src/test/java/com/jthink/skyeye/client/core/kafka/logback/test/KafkaProducerDemo.java similarity index 97% rename from skyeye-client/src/test/java/com/jthink/skyeye/client/kafka/logback/test/KafkaProducerDemo.java rename to skyeye-client/skyeye-client-core/src/test/java/com/jthink/skyeye/client/core/kafka/logback/test/KafkaProducerDemo.java index 5c9c056..afc33f9 100644 --- a/skyeye-client/src/test/java/com/jthink/skyeye/client/kafka/logback/test/KafkaProducerDemo.java +++ b/skyeye-client/skyeye-client-core/src/test/java/com/jthink/skyeye/client/core/kafka/logback/test/KafkaProducerDemo.java @@ -1,4 +1,4 @@ -package com.jthink.skyeye.client.kafka.logback.test; +package com.jthink.skyeye.client.core.kafka.logback.test; import org.apache.kafka.clients.producer.Callback; import org.apache.kafka.clients.producer.KafkaProducer; diff --git a/skyeye-client/src/test/java/com/jthink/skyeye/client/kafka/partition/test/PartitionTest.java b/skyeye-client/skyeye-client-core/src/test/java/com/jthink/skyeye/client/core/kafka/partition/test/PartitionTest.java similarity index 92% rename from skyeye-client/src/test/java/com/jthink/skyeye/client/kafka/partition/test/PartitionTest.java rename to skyeye-client/skyeye-client-core/src/test/java/com/jthink/skyeye/client/core/kafka/partition/test/PartitionTest.java index e1c0e1c..8bd91c7 100644 --- a/skyeye-client/src/test/java/com/jthink/skyeye/client/kafka/partition/test/PartitionTest.java +++ b/skyeye-client/skyeye-client-core/src/test/java/com/jthink/skyeye/client/core/kafka/partition/test/PartitionTest.java @@ -1,4 +1,4 @@ -package com.jthink.skyeye.client.kafka.partition.test; +package com.jthink.skyeye.client.core.kafka.partition.test; import org.apache.kafka.common.utils.Utils; diff --git a/skyeye-client/src/test/java/com/jthink/skyeye/client/zk/test/ZkTest.java b/skyeye-client/skyeye-client-core/src/test/java/com/jthink/skyeye/client/core/zk/test/ZkTest.java similarity index 98% rename from skyeye-client/src/test/java/com/jthink/skyeye/client/zk/test/ZkTest.java rename to skyeye-client/skyeye-client-core/src/test/java/com/jthink/skyeye/client/core/zk/test/ZkTest.java index 902edb4..2835692 100644 --- a/skyeye-client/src/test/java/com/jthink/skyeye/client/zk/test/ZkTest.java +++ b/skyeye-client/skyeye-client-core/src/test/java/com/jthink/skyeye/client/core/zk/test/ZkTest.java @@ -1,4 +1,4 @@ -package com.jthink.skyeye.client.zk.test; +package com.jthink.skyeye.client.core.zk.test; import com.jthink.skyeye.base.constant.Constants; import org.I0Itec.zkclient.IZkChildListener; diff --git a/skyeye-client/skyeye-client-core/todo.md b/skyeye-client/skyeye-client-core/todo.md deleted file mode 100644 index 22e6689..0000000 --- a/skyeye-client/skyeye-client-core/todo.md +++ /dev/null @@ -1,2 +0,0 @@ -TODO: - diff --git a/skyeye-client/src/main/java/com/jthink/skyeye/client/kafka/LazySingletonProducer.java b/skyeye-client/src/main/java/com/jthink/skyeye/client/kafka/LazySingletonProducer.java deleted file mode 100644 index 0ac1565..0000000 --- a/skyeye-client/src/main/java/com/jthink/skyeye/client/kafka/LazySingletonProducer.java +++ /dev/null @@ -1,50 +0,0 @@ -package com.jthink.skyeye.client.kafka; - -import org.apache.kafka.clients.producer.KafkaProducer; -import org.apache.kafka.clients.producer.Producer; - -import java.util.Map; - -/** - * JThink@JThink - * - * @author JThink - * @version 0.0.1 - * @desc double check实现KafkaProducer的懒加载 - * @date 2016-09-09 09:02:34 - */ -public class LazySingletonProducer { - - private static volatile Producer producer; - - /** - * 私有化构造方法 - */ - private LazySingletonProducer() { - - } - - /** - * 实例化 - * @param config - * @return - */ - public static Producer getInstance(Map config) { - if (producer == null) { - synchronized(LazySingletonProducer.class) { - if (producer == null) { - producer = new KafkaProducer(config); - } - } - } - return producer; - } - - /** - * 是否初始化 - * @return - */ - public static boolean isInstanced() { - return producer != null; - } -} diff --git a/skyeye-client/src/main/java/com/jthink/skyeye/client/kafka/log4j/KafkaAppender.java b/skyeye-client/src/main/java/com/jthink/skyeye/client/kafka/log4j/KafkaAppender.java deleted file mode 100644 index 3d5effb..0000000 --- a/skyeye-client/src/main/java/com/jthink/skyeye/client/kafka/log4j/KafkaAppender.java +++ /dev/null @@ -1,425 +0,0 @@ -package com.jthink.skyeye.client.kafka.log4j; - -import com.jthink.skyeye.base.constant.RpcType; -import com.jthink.skyeye.client.constant.KafkaConfig; -import com.jthink.skyeye.client.constant.NodeMode; -import com.jthink.skyeye.client.kafka.LazySingletonProducer; -import com.jthink.skyeye.client.kafka.partitioner.KeyModPartitioner; -import com.jthink.skyeye.client.register.ZkRegister; -import com.jthink.skyeye.client.util.SysUtil; -import com.jthink.skyeye.base.constant.Constants; -import com.jthink.skyeye.base.util.StringUtil; -import com.jthink.skyeye.trace.dto.RegisterDto; -import com.jthink.skyeye.trace.generater.IncrementIdGen; -import com.jthink.skyeye.trace.registry.Registry; -import com.jthink.skyeye.trace.registry.ZookeeperRegistry; -import org.I0Itec.zkclient.ZkClient; -import org.apache.kafka.clients.producer.*; -import org.apache.kafka.common.serialization.ByteArraySerializer; -import org.apache.kafka.common.serialization.StringSerializer; -import org.apache.log4j.AppenderSkeleton; -import org.apache.log4j.helpers.LogLog; -import org.apache.log4j.spi.Filter; -import org.apache.log4j.spi.LoggingEvent; - -import java.nio.ByteBuffer; -import java.util.HashMap; -import java.util.Map; -import java.util.concurrent.ConcurrentLinkedQueue; -import java.util.concurrent.atomic.AtomicBoolean; - -/** - * JThink@JThink - * - * @author JThink - * @version 0.0.1 - * @desc KafkaAppender, 包含log4j kafka appender的配置 - * @date 2016-09-27 09:30:45 - */ -public class KafkaAppender extends AppenderSkeleton { - - // kafka topic - private String topic; - // 生产日志的host - private String host = SysUtil.host; - // 生产日志的app,多节点部署会使日志有序 - private String app; - // zookeeper的地址 - private String zkServers; - // 接受报警邮件的接收方 - private String mail; - // 标记是否为rpc服务, 取值为RpcType.java - private String rpc; - // KafkaProducer类的配置 - private Map config = new HashMap(); - // zk注册器 - private ZkRegister zkRegister; - // kafka producer是否正在初始化 - private volatile AtomicBoolean isInitializing = new AtomicBoolean(false); - // kafka producer未完成初始化之前的消息存放的队列 - private ConcurrentLinkedQueue msgQueue = new ConcurrentLinkedQueue(); - - // kafka server - private String bootstrapServers; - // 消息确认模式 - private String acks; - // linger.ms - private String lingerMs; - // max.block.ms - private String maxBlockMs; - // kafkaAppender遇到异常需要向zk进行写入数据,由于onCompletion()的调用在kafka集群完全挂掉时会有很多阻塞的日志会调用,所以我们需要保证只向zk写一次数据,监控中心只会发生一次报警 - private volatile AtomicBoolean flag = new AtomicBoolean(true); - - /** - * 构造方法 - */ - public KafkaAppender() { - this.checkAndSetConfig(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName()); - this.checkAndSetConfig(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); - // 设置分区类, 使用自定义的KeyModPartitioner,同样的key进入相同的partition - this.checkAndSetConfig(ProducerConfig.PARTITIONER_CLASS_CONFIG, KeyModPartitioner.class.getName()); - - // 添加hook - Runtime.getRuntime().addShutdownHook(new Thread() { - @Override - public void run() { - close(); - } - }); - } - - /** - * 覆写doAppend, 去掉closed的log日志 - * @param event - */ - @Override - public synchronized void doAppend(LoggingEvent event) { - if (closed) { - return; - } - - if (!isAsSevereAsThreshold(event.getLevel())) { - return; - } - - Filter f = this.headFilter; - - FILTER_LOOP: - while(f != null) { - switch(f.decide(event)) { - case Filter.DENY: return; - case Filter.ACCEPT: break FILTER_LOOP; - case Filter.NEUTRAL: f = f.getNext(); - } - } - - this.append(event); - } - - @Override - protected void append(LoggingEvent event) { - if (closed) { - return; - } - this.sendMessage(this.getMessage(event)); - } - - /** - * 向kafka send - * @param value - */ - private void send(String value) { - final byte[] key = ByteBuffer.allocate(4).putInt(new StringBuilder(app).append(host).toString().hashCode()).array(); - - final ProducerRecord record = new ProducerRecord(this.topic, key, value); - LazySingletonProducer.getInstance(this.config).send(record, new Callback() { - @Override - public void onCompletion(RecordMetadata recordMetadata, Exception e) { - // TODO: 异常发生如何处理(直接停掉appender) - if (null != e) { - closed = true; - LogLog.error("kafka send error in appender", e); - // 发生异常,kafkaAppender 停止收集,向节点写入数据(监控系统会感知进行报警) - if (flag.get() == true) { - zkRegister.write(Constants.SLASH + app + Constants.SLASH + host, NodeMode.EPHEMERAL, - String.valueOf(System.currentTimeMillis()) + Constants.SEMICOLON + SysUtil.userDir); - flag.compareAndSet(true, false); - } - } - } - }); - } - - /** - * 发送msg - * @param msg - */ - private void sendMessage(String msg) { - if (!LazySingletonProducer.isInstanced()) { - if (this.isInitializing.get() != true) { - this.isInitializing.compareAndSet(false, true); - this.initKafkaConfig(); - this.isInitializing.compareAndSet(true, false); - this.send(msg); - } else { - this.msgQueue.add(msg); - } - } else if (this.msgQueue.size() > 0) { - if (LazySingletonProducer.isInstanced() ) { - this.msgQueue.add(msg); - while (this.msgQueue.size() > 0) { - this.send(this.msgQueue.remove()); - } - } - } else { - this.send(msg); - } - } - - /** - * 初始化kafka config - */ - private void initKafkaConfig() { - - if (!LazySingletonProducer.isInstanced()) { - - // app配置 - if (StringUtil.isBlank(this.host)) { - // host未获取到 - LogLog.error("can't get the host"); - closed = true; - return; - } - - if (StringUtil.isBlank(this.app)) { - // app name未设置 - LogLog.error("log4j.xml is not set the app"); - closed = true; - return; - } - - // zk配置 - if (StringUtil.isBlank(this.zkServers)) { - // zk地址未设置 - LogLog.error("can't get zkServers"); - closed = true; - return; - } - - if (StringUtil.isBlank(this.topic)) { - // topic未设置(或者设置成了""),无法写入kafka - LogLog.error("topic is not set, appender: " + name); - closed = true; - return; - } - - if (StringUtil.isBlank(this.mail)) { - // 报警mail未设置 - LogLog.error("mail is not set, appender: " + name); - closed = true; - return; - } - - if (StringUtil.isBlank(this.rpc) || !this.checkRpcType(this.rpc)) { - // rpc未设置或者rpc值不对 - LogLog.error("rpc is not set or value not right, appender: " + name); - closed = true; - return; - } - - new Thread() { - @Override - public void run() { - // 初始化zk - zkRegister = new ZkRegister(new ZkClient(zkServers, 60000, 5000)); - // 注册永久节点用于历史日志查询 - zkRegister.create(Constants.SLASH + app + Constants.SLASH + host, NodeMode.PERSISTENT); - zkRegister.getClient().writeData(Constants.ROOT_PATH_PERSISTENT + Constants.SLASH + app + Constants.SLASH + host, - mail + Constants.SEMICOLON + SysUtil.userDir); - // 注册临时节点用于日志滚屏 - zkRegister.getClient().createPersistent(Constants.ROOT_PATH_EPHEMERAL + Constants.SLASH + app, true); - zkRegister.create(Constants.SLASH + app + Constants.SLASH + host, NodeMode.EPHEMERAL, - Constants.APPENDER_INIT_DATA + Constants.SEMICOLON + SysUtil.userDir); - - // rpc trace注册中心 - if (rpc.equals(RpcType.dubbo.symbol())) { - register(app, host, zkRegister.getClient()); - } - } - }.start(); - - if (StringUtil.isNotBlank(this.bootstrapServers)) { - this.config.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, this.bootstrapServers); - } - if (StringUtil.isNotBlank(this.acks)) { - this.config.put(ProducerConfig.ACKS_CONFIG, this.acks); - } - if (StringUtil.isNotBlank(this.lingerMs)) { - this.config.put(ProducerConfig.LINGER_MS_CONFIG, this.lingerMs); - } - if (StringUtil.isNotBlank(this.maxBlockMs)) { - this.config.put(ProducerConfig.MAX_BLOCK_MS_CONFIG, this.maxBlockMs); - } - if (StringUtil.isNotBlank(this.app) && StringUtil.isNotBlank(this.host)) { - this.config.put(ProducerConfig.CLIENT_ID_CONFIG, this.app + Constants.MIDDLE_LINE + this.host + Constants.MIDDLE_LINE + "log4j"); - } - - LazySingletonProducer.getInstance(this.config); - } - } - - /** - * 进行rpc trace注册 - * @param app - * @param host - * @param zkClient - */ - private void register(String app, String host, ZkClient zkClient) { - RegisterDto dto = new RegisterDto(app, host, zkClient); - Registry registry = new ZookeeperRegistry(); - IncrementIdGen.setId(registry.register(dto)); - } - - /** - * 监察rpc type是否正确 - * @param rpcType - * @return - */ - private boolean checkRpcType(String rpcType) { - try { - RpcType.valueOf(rpcType); - return true; - } catch (Exception e) { - return false; - } - } - - /** - * 获得message - * @param event - * @return - */ - private String getMessage(LoggingEvent event) { - if (this.layout == null) { - return event.getRenderedMessage(); - } else { - // 获取host和app - String msg = System.nanoTime() + Constants.SEMICOLON + this.layout.format(event); - return msg.replaceFirst(Constants.APP_NAME, this.app).replaceFirst(Constants.HOSTNAME, this.host); - } - } - - @Override - public void close() { - closed = true; - // 关闭KafkaProuder - if (LazySingletonProducer.isInstanced()) { - // producer实际上已经初始化 - LazySingletonProducer.getInstance(this.config).close(); - } - - // 关闭client,临时节点消失,监控系统进行感知报警 - ZkClient client = this.zkRegister.getClient(); - if (null != client) { - client.close(); - } - } - - @Override - public boolean requiresLayout() { - return true; - } - - /** - * 进行kafka配置设置 - * @param key - * @param value - */ - public void checkAndSetConfig(String key, String value) { - if (!KafkaConfig.PRODUCER_CONFIG_KEYS.contains(key)) { - // 当前kafka版本没有该配置项 - LogLog.warn("in this kafka version don't has this config: " + key); - } - this.config.put(key, value); - } - - public String getTopic() { - return topic; - } - - public void setTopic(String topic) { - this.topic = topic; - } - - public String getHost() { - return host; - } - - public void setHost(String host) { - this.host = host; - } - - public String getApp() { - return app; - } - - public void setApp(String app) { - this.app = app; - } - - public String getZkServers() { - return zkServers; - } - - public void setZkServers(String zkServers) { - this.zkServers = zkServers; - } - - public String getMail() { - return mail; - } - - public void setMail(String mail) { - this.mail = mail; - } - - public String getBootstrapServers() { - return bootstrapServers; - } - - public void setBootstrapServers(String bootstrapServers) { - this.bootstrapServers = bootstrapServers; - } - - public String getAcks() { - return acks; - } - - public void setAcks(String acks) { - this.acks = acks; - } - - public String getLingerMs() { - return lingerMs; - } - - public void setLingerMs(String lingerMs) { - this.lingerMs = lingerMs; - } - - public String getMaxBlockMs() { - return maxBlockMs; - } - - public void setMaxBlockMs(String maxBlockMs) { - this.maxBlockMs = maxBlockMs; - } - - public String getRpc() { - return rpc; - } - - public void setRpc(String rpc) { - this.rpc = rpc; - } -} diff --git a/skyeye-client/src/main/java/com/jthink/skyeye/client/kafka/logback/KafkaAppender.java b/skyeye-client/src/main/java/com/jthink/skyeye/client/kafka/logback/KafkaAppender.java deleted file mode 100644 index 1513ffc..0000000 --- a/skyeye-client/src/main/java/com/jthink/skyeye/client/kafka/logback/KafkaAppender.java +++ /dev/null @@ -1,356 +0,0 @@ -package com.jthink.skyeye.client.kafka.logback; - -import ch.qos.logback.core.Context; -import ch.qos.logback.core.CoreConstants; -import ch.qos.logback.core.UnsynchronizedAppenderBase; -import ch.qos.logback.core.hook.DelayingShutdownHook; -import ch.qos.logback.core.status.ErrorStatus; -import com.jthink.skyeye.base.constant.RpcType; -import com.jthink.skyeye.client.constant.KafkaConfig; -import com.jthink.skyeye.client.constant.NodeMode; -import com.jthink.skyeye.client.kafka.LazySingletonProducer; -import com.jthink.skyeye.client.kafka.logback.encoder.KafkaLayoutEncoder; -import com.jthink.skyeye.client.kafka.partitioner.KeyBuilder; -import com.jthink.skyeye.client.kafka.partitioner.KeyModPartitioner; -import com.jthink.skyeye.client.register.ZkRegister; -import com.jthink.skyeye.client.util.SysUtil; -import com.jthink.skyeye.base.util.StringUtil; -import com.jthink.skyeye.base.constant.Constants; -import com.jthink.skyeye.trace.dto.RegisterDto; -import com.jthink.skyeye.trace.generater.IncrementIdGen; -import com.jthink.skyeye.trace.registry.Registry; -import com.jthink.skyeye.trace.registry.ZookeeperRegistry; -import org.I0Itec.zkclient.ZkClient; -import org.apache.kafka.clients.producer.Callback; -import org.apache.kafka.clients.producer.ProducerConfig; -import org.apache.kafka.clients.producer.ProducerRecord; -import org.apache.kafka.clients.producer.RecordMetadata; -import org.apache.kafka.common.serialization.ByteArraySerializer; -import org.apache.kafka.common.serialization.StringSerializer; - -import java.util.HashMap; -import java.util.Map; -import java.util.concurrent.atomic.AtomicBoolean; - -/** - * JThink@JThink - * - * @author JThink - * @version 0.0.1 - * @desc KafkaAppender, 包含logback kafka appender的配置 - * @date 2016-09-08 20:10:21 - */ -public class KafkaAppender extends UnsynchronizedAppenderBase { - - // kafka topic - private String topic; - // 生产日志的host - private String host; - // 生产日志的app,多节点部署会使日志有序 - private String app; - // zookeeper的地址 - private String zkServers; - // 接受报警邮件的接收方 - private String mail; - // 标记是否为rpc服务, 取值为RpcType.java - private String rpc; - // KafkaProducer类的配置 - private Map config = new HashMap(); - // key生成器 - private KeyBuilder keyBuilder; - // 编码器 - private KafkaLayoutEncoder encoder; - // zk注册器 - private ZkRegister zkRegister; - // hook - private DelayingShutdownHook shutdownHook; - // kafkaAppender遇到异常需要向zk进行写入数据,由于onCompletion()的调用在kafka集群完全挂掉时会有很多阻塞的日志会调用,所以我们需要保证只向zk写一次数据,监控中心只会发生一次报警 - private volatile AtomicBoolean flag = new AtomicBoolean(true); - - /** - * 构造方法 - */ - public KafkaAppender() { - this.checkAndSetConfig(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName()); - this.checkAndSetConfig(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); - // 设置分区类, 使用自定义的KeyModPartitioner,同样的key进入相同的partition - this.checkAndSetConfig(ProducerConfig.PARTITIONER_CLASS_CONFIG, KeyModPartitioner.class.getName()); - - shutdownHook = new DelayingShutdownHook(); - } - - @Override - public void start() { - // xml配置校验 - if (!this.checkNecessaryConfig()) { - addError("necessary config is not set, kafka appender is not started"); - return; - } - - super.start(); - - // 添加logback shutdown hook, 关闭所有的appender, 调用stop()方法 - shutdownHook.setContext(this.getContext()); - Runtime.getRuntime().addShutdownHook(new Thread(this.shutdownHook)); - - // 初始化zk - this.zkRegister = new ZkRegister(new ZkClient(this.zkServers, 60000, 5000)); - // 注册永久节点用于历史日志查询 - this.zkRegister.create(Constants.SLASH + this.app + Constants.SLASH + this.host, NodeMode.PERSISTENT); - this.zkRegister.getClient().writeData(Constants.ROOT_PATH_PERSISTENT + Constants.SLASH + this.app + Constants.SLASH + this.host, - this.mail + Constants.SEMICOLON + SysUtil.userDir); - // 注册临时节点用于日志滚屏 - this.zkRegister.getClient().createPersistent(Constants.ROOT_PATH_EPHEMERAL + Constants.SLASH + this.app, true); - this.zkRegister.create(Constants.SLASH + this.app + Constants.SLASH + this.host, NodeMode.EPHEMERAL, - Constants.APPENDER_INIT_DATA + Constants.SEMICOLON + SysUtil.userDir); - - // rpc trace注册中心 - if (this.rpc.equals(RpcType.dubbo.symbol())) { - this.register(this.app, this.host, this.zkRegister.getClient()); - } - } - - /** - * 进行rpc trace注册 - * @param app - * @param host - * @param zkClient - */ - private void register(String app, String host, ZkClient zkClient) { - RegisterDto dto = new RegisterDto(app, host, zkClient); - Registry registry = new ZookeeperRegistry(); - IncrementIdGen.setId(registry.register(dto)); - } - - @Override - public void stop() { - super.stop(); - - // 关闭KafkaProuder - if (LazySingletonProducer.isInstanced()) { - // producer实际上已经初始化 - LazySingletonProducer.getInstance(this.config).close(); - } - - // 关闭client,临时节点消失,监控系统进行感知报警 - ZkClient client = this.zkRegister.getClient(); - if (null != client) { - client.close(); - } - } - - @Override - protected void append(E e) { - if (!isStarted()) { - return; - } - final String value = System.nanoTime() + Constants.SEMICOLON + this.encoder.doEncode(e); - final byte[] key = this.keyBuilder.build(e); - final ProducerRecord record = new ProducerRecord(this.topic, key, value); - LazySingletonProducer.getInstance(this.config).send(record, new Callback() { - @Override - public void onCompletion(RecordMetadata recordMetadata, Exception e) { - // TODO: 异常发生如何处理(目前使用RollingFileAppender.java中的方法) - if (null != e) { - // 如果发生异常, 将开始状态设置为false, 并每次append的时候都先check该状态 - started = false; - addStatus(new ErrorStatus("kafka send error in appender", this, e)); - // 发生异常,kafkaAppender 停止收集,向节点写入数据(监控系统会感知进行报警) - if (flag.get() == true) { - zkRegister.write(Constants.SLASH + app + Constants.SLASH + host, NodeMode.EPHEMERAL, - String.valueOf(System.currentTimeMillis()) + Constants.SEMICOLON + SysUtil.userDir); - flag.compareAndSet(true, false); - } - } - } - }); - } - - @Override - public void setContext(Context context) { - super.setContext(context); - - this.host = context.getProperty(CoreConstants.HOSTNAME_KEY); - this.app = context.getName(); - } - - /** - * 校验最基本的配置是否在logback.xml进行配置 - * @return - */ - private boolean checkNecessaryConfig() { - - boolean flag = true; - - // app配置 - if (StringUtil.isBlank(this.host)) { - // host未获取到 - addError("can't get the host"); - flag = false; - } - - if (StringUtil.isBlank(this.app)) { - // app name未设置 - addError("logback.xml is not set the node"); - flag = false; - } - - // zk配置 - if (StringUtil.isBlank(this.zkServers)) { - // zk地址未设置 - addError("can't get zkServers"); - flag = false; - } - - // kafka配置 - if (null == config.get(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG)) { - // kafka的bootstrap.servers未设置 - addError("kafka's " + ProducerConfig.BOOTSTRAP_SERVERS_CONFIG + " do not set, appender: " + name); - flag = false; - } - - if (StringUtil.isBlank(this.topic)) { - // topic未设置(或者设置成了""),无法写入kafka - addError("topic is not set, appender: " + name); - flag = false; - } - - if (StringUtil.isBlank(this.mail)) { - // 报警mail未设置 - addError("mail is not set, appender: " + name); - flag = false; - } - - if (StringUtil.isBlank(this.rpc) || !this.checkRpcType(this.rpc)) { - // rpc未设置或者rpc值不对 - addError("rpc is not set or value not right, appender: " + name); - flag = false; - } - - if (null == this.keyBuilder) { - // key生成器为设置 - addError("key builder is not set, appender: " + name); - flag = false; - } - - if (null == this.encoder) { - // 编码器未设置 - addError("encoder is not set, appender: " + name); - flag = false; - } - return flag; - } - - /** - * 监察rpc type是否正确 - * @param rpcType - * @return - */ - private boolean checkRpcType(String rpcType) { - try { - RpcType.valueOf(rpcType); - return true; - } catch (Exception e) { - return false; - } - } - - /** - * 将logback配置文件中节点中的值读入Map config - * @param kv - */ - public void addConfig(String kv) { - String[] keyValue = kv.split(Constants.EQUAL, 2); - if (keyValue.length == 2) { - this.checkAndSetConfig(keyValue[0], keyValue[1]); - } else { - // 值设置得不对 - addError("config item value is wrong, appender: " + name); - } - } - - /** - * 进行kafka配置设置 - * @param key - * @param value - */ - public void checkAndSetConfig(String key, String value) { - if (!KafkaConfig.PRODUCER_CONFIG_KEYS.contains(key)) { - // 当前kafka版本没有该配置项 - addWarn("in this kafka version don't has this config: " + key); - } - this.config.put(key, value); - } - - public String getTopic() { - return topic; - } - - public void setTopic(String topic) { - this.topic = topic; - } - - public Map getConfig() { - return config; - } - - public void setConfig(Map config) { - this.config = config; - } - - public String getHost() { - return host; - } - - public void setHost(String host) { - this.host = host; - } - - public String getApp() { - return app; - } - - public void setApp(String app) { - this.app = app; - } - - public KeyBuilder getKeyBuilder() { - return keyBuilder; - } - - public void setKeyBuilder(KeyBuilder keyBuilder) { - this.keyBuilder = keyBuilder; - } - - public KafkaLayoutEncoder getEncoder() { - return encoder; - } - - public void setEncoder(KafkaLayoutEncoder encoder) { - this.encoder = encoder; - } - - public String getZkServers() { - return zkServers; - } - - public void setZkServers(String zkServers) { - this.zkServers = zkServers; - } - - public String getMail() { - return mail; - } - - public void setMail(String mail) { - this.mail = mail; - } - - public String getRpc() { - return rpc; - } - - public void setRpc(String rpc) { - this.rpc = rpc; - } -} diff --git a/skyeye-client/src/main/java/com/jthink/skyeye/client/kafka/logback/encoder/KafkaLayoutEncoder.java b/skyeye-client/src/main/java/com/jthink/skyeye/client/kafka/logback/encoder/KafkaLayoutEncoder.java deleted file mode 100644 index 9263b8e..0000000 --- a/skyeye-client/src/main/java/com/jthink/skyeye/client/kafka/logback/encoder/KafkaLayoutEncoder.java +++ /dev/null @@ -1,64 +0,0 @@ -package com.jthink.skyeye.client.kafka.logback.encoder; - -import ch.qos.logback.core.Layout; -import ch.qos.logback.core.spi.ContextAwareBase; -import ch.qos.logback.core.spi.LifeCycle; - -import java.nio.charset.Charset; - -/** - * JThink@JThink - * - * @author JThink - * @version 0.0.1 - * @desc kafka的encoder - * @date 2016-09-09 16:51:18 - */ -public class KafkaLayoutEncoder extends ContextAwareBase implements LifeCycle { - - // layout - private Layout layout; - // 编码,默认utf-8 - private Charset charset; - private boolean started = false; - private static final Charset UTF8 = Charset.forName("UTF-8"); - - public String doEncode(E event) { - return this.layout.doLayout(event); - } - - @Override - public void start() { - if (charset == null) { - addInfo("no set charset, set the default charset is utf-8"); - charset = UTF8; - } - started = true; - } - - @Override - public void stop() { - started = false; - } - - @Override - public boolean isStarted() { - return started; - } - - public Layout getLayout() { - return layout; - } - - public void setLayout(Layout layout) { - this.layout = layout; - } - - public Charset getCharset() { - return charset; - } - - public void setCharset(Charset charset) { - this.charset = charset; - } -} diff --git a/skyeye-client/src/main/java/com/jthink/skyeye/client/kafka/partitioner/AppHostKeyBuilder.java b/skyeye-client/src/main/java/com/jthink/skyeye/client/kafka/partitioner/AppHostKeyBuilder.java deleted file mode 100644 index 933d1cb..0000000 --- a/skyeye-client/src/main/java/com/jthink/skyeye/client/kafka/partitioner/AppHostKeyBuilder.java +++ /dev/null @@ -1,49 +0,0 @@ -package com.jthink.skyeye.client.kafka.partitioner; - -import ch.qos.logback.core.Context; -import ch.qos.logback.core.CoreConstants; -import ch.qos.logback.core.spi.ContextAwareBase; - -import java.nio.ByteBuffer; - -/** - * JThink@JThink - * - * @author JThink - * @version 0.0.1 - * @desc 根据app名字和host值生成key(如果kafka有多个partition,由于kafka的限制,每个partition只能由相等数量的每个cosumer group - * 中的consumer进行消费,并且consumer group中只能有一个consumer消费,为了保证后续logstash进行消费能够保证每个应用的日志 - * 有序,key加上app,相同的app进入相同的partition,由于app有可能是部署多个节点,所以key在加上host可以保证每个app在不同的 - * 节点上的日志能够有序得进行消费) - * @date 2016-09-09 13:27:35 - */ -public class AppHostKeyBuilder extends ContextAwareBase implements KeyBuilder { - - private byte[] appHost; - - @Override - public void setContext(Context context) { - super.setContext(context); - String host = context.getProperty(CoreConstants.HOSTNAME_KEY); - String app = context.getName(); - appHost = ByteBuffer.allocate(4).putInt(new StringBuilder(app).append(host).toString().hashCode()).array(); - } - - /** - * 生成key,key规则app+host的byte[] - * @param e log event, ch.qos.logback.classic.spi.ILoggingEvent - * @return - */ - @Override - public byte[] build(E e) { - return appHost; - } - - public byte[] getAppHost() { - return appHost; - } - - public void setAppHost(byte[] appHost) { - this.appHost = appHost; - } -} diff --git a/skyeye-client/src/main/java/com/jthink/skyeye/client/kafka/partitioner/KeyBuilder.java b/skyeye-client/src/main/java/com/jthink/skyeye/client/kafka/partitioner/KeyBuilder.java deleted file mode 100644 index bc1b4f2..0000000 --- a/skyeye-client/src/main/java/com/jthink/skyeye/client/kafka/partitioner/KeyBuilder.java +++ /dev/null @@ -1,20 +0,0 @@ -package com.jthink.skyeye.client.kafka.partitioner; - -/** - * JThink@JThink - * - * @author JThink - * @version 0.0.1 - * @desc ProducerRecord需要的key参数,根据该值进行分区 - * @date 2016-09-09 13:23:18 - */ -public interface KeyBuilder { - - /** - * 生成ProducerRecord需要的key参数 - * @param e log event, ch.qos.logback.classic.spi.ILoggingEvent - * @return - */ - byte[] build(E e); - -} diff --git a/skyeye-client/src/main/java/com/jthink/skyeye/client/kafka/partitioner/KeyModPartitioner.java b/skyeye-client/src/main/java/com/jthink/skyeye/client/kafka/partitioner/KeyModPartitioner.java deleted file mode 100644 index 0f331c6..0000000 --- a/skyeye-client/src/main/java/com/jthink/skyeye/client/kafka/partitioner/KeyModPartitioner.java +++ /dev/null @@ -1,44 +0,0 @@ -package com.jthink.skyeye.client.kafka.partitioner; - -import org.apache.kafka.clients.producer.Partitioner; -import org.apache.kafka.common.Cluster; -import org.apache.kafka.common.PartitionInfo; -import org.apache.kafka.common.utils.Utils; - -import java.util.List; -import java.util.Map; - -/** - * JThink@JThink - * - * @author JThink - * @version 0.0.1 - * @desc 自定义kafka的partitioner - * @date 2016-09-09 11:23:49 - */ -public class KeyModPartitioner implements Partitioner { - - @Override - public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { - List partitions = cluster.partitionsForTopic(topic); - int numPartitions = partitions.size(); - int partitionNum = 0; - try { - partitionNum = Utils.murmur2(keyBytes); - } catch (Exception e) { - partitionNum = key.hashCode(); - } - - return Math.abs(partitionNum % numPartitions); - } - - @Override - public void close() { - - } - - @Override - public void configure(Map configs) { - - } -} diff --git a/skyeye-client/src/main/java/com/jthink/skyeye/client/register/ZkRegister.java b/skyeye-client/src/main/java/com/jthink/skyeye/client/register/ZkRegister.java deleted file mode 100644 index f143215..0000000 --- a/skyeye-client/src/main/java/com/jthink/skyeye/client/register/ZkRegister.java +++ /dev/null @@ -1,77 +0,0 @@ -package com.jthink.skyeye.client.register; - -import com.jthink.skyeye.client.constant.NodeMode; -import org.I0Itec.zkclient.ZkClient; - -/** - * JThink@JThink - * - * @author JThink - * @version 0.0.1 - * @desc zookeeper注册中心 - * @date 2016-09-21 14:03:46 - */ -public class ZkRegister { - - // zkClient - private ZkClient client; - - public ZkRegister(ZkClient client) { - this.client = client; - } - - /** - * 创建节点 - * @param path - * @param nodeMode - */ - public void create(String path, NodeMode nodeMode) { - if (nodeMode.symbol().equals(NodeMode.PERSISTENT.symbol())) { - // 创建永久节点 - this.client.createPersistent(nodeMode.label() + path, true); - } else if (nodeMode.symbol().equals(NodeMode.EPHEMERAL.symbol())) { - // 创建临时节点 - this.client.createEphemeral(nodeMode.label() + path); - } - } - - /** - * 创建带data的节点 - * @param path - * @param nodeMode - * @param data - */ - public void create(String path, NodeMode nodeMode, String data) { - if (nodeMode.symbol().equals(NodeMode.PERSISTENT.symbol())) { - // 创建永久节点,加入数据 - this.client.createPersistent(nodeMode.label() + path, true); - } else if (nodeMode.symbol().equals(NodeMode.EPHEMERAL.symbol())) { - // 创建临时节点,加入数据 - this.client.createEphemeral(nodeMode.label() + path, data); - } - } - - /** - * 写节点数据 - * @param path - * @param nodeMode - * @param data - */ - public void write(String path, NodeMode nodeMode, String data) { - if (nodeMode.symbol().equals(NodeMode.PERSISTENT.symbol())) { - // 创建永久节点,加入数据 - this.client.writeData(nodeMode.label() + path, true); - } else if (nodeMode.symbol().equals(NodeMode.EPHEMERAL.symbol())) { - // 创建临时节点,加入数据 - this.client.writeData(nodeMode.label() + path, data); - } - } - - public ZkClient getClient() { - return client; - } - - public void setClient(ZkClient client) { - this.client = client; - } -} From 28db330fb0071b57e4f2a722208def9968fbd80a Mon Sep 17 00:00:00 2001 From: JThink Date: Tue, 1 Aug 2017 09:49:44 +0800 Subject: [PATCH 11/27] re arch log4j --- skyeye-client/settings.gradle | 4 +- .../kafka/partitioner/KeyModPartitioner.java | 44 ++ .../core/producer/LazySingletonProducer.java | 50 +++ .../client/core/register/ZkRegister.java | 2 +- .../skyeye-client-log4j/build.gradle | 17 + .../skyeye-client-log4j/settings.gradle | 1 + .../client/log4j/appender/KafkaAppender.java | 417 ++++++++++++++++++ skyeye-client/skyeye-client-log4j/todo.md | 2 - 8 files changed, 532 insertions(+), 5 deletions(-) create mode 100644 skyeye-client/skyeye-client-core/src/main/java/com/jthink/skyeye/client/core/kafka/partitioner/KeyModPartitioner.java create mode 100644 skyeye-client/skyeye-client-core/src/main/java/com/jthink/skyeye/client/core/producer/LazySingletonProducer.java create mode 100644 skyeye-client/skyeye-client-log4j/build.gradle create mode 100644 skyeye-client/skyeye-client-log4j/settings.gradle create mode 100644 skyeye-client/skyeye-client-log4j/src/main/java/com/jthink/skyeye/client/log4j/appender/KafkaAppender.java delete mode 100644 skyeye-client/skyeye-client-log4j/todo.md diff --git a/skyeye-client/settings.gradle b/skyeye-client/settings.gradle index 03a1a85..abffb77 100644 --- a/skyeye-client/settings.gradle +++ b/skyeye-client/settings.gradle @@ -1,2 +1,2 @@ -include 'skyeye-client-core' -// , 'skyeye-client-log4j', 'skyeye-client-log4j2', 'skyeye-client-logback' +include 'skyeye-client-core' , 'skyeye-client-log4j' +//, 'skyeye-client-log4j2', 'skyeye-client-logback' diff --git a/skyeye-client/skyeye-client-core/src/main/java/com/jthink/skyeye/client/core/kafka/partitioner/KeyModPartitioner.java b/skyeye-client/skyeye-client-core/src/main/java/com/jthink/skyeye/client/core/kafka/partitioner/KeyModPartitioner.java new file mode 100644 index 0000000..3eb678b --- /dev/null +++ b/skyeye-client/skyeye-client-core/src/main/java/com/jthink/skyeye/client/core/kafka/partitioner/KeyModPartitioner.java @@ -0,0 +1,44 @@ +package com.jthink.skyeye.client.core.kafka.partitioner; + +import org.apache.kafka.clients.producer.Partitioner; +import org.apache.kafka.common.Cluster; +import org.apache.kafka.common.PartitionInfo; +import org.apache.kafka.common.utils.Utils; + +import java.util.List; +import java.util.Map; + +/** + * JThink@JThink + * + * @author JThink + * @version 0.0.1 + * @desc 自定义kafka的partitioner + * @date 2016-09-09 11:23:49 + */ +public class KeyModPartitioner implements Partitioner { + + @Override + public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { + List partitions = cluster.partitionsForTopic(topic); + int numPartitions = partitions.size(); + int partitionNum = 0; + try { + partitionNum = Utils.murmur2(keyBytes); + } catch (Exception e) { + partitionNum = key.hashCode(); + } + + return Math.abs(partitionNum % numPartitions); + } + + @Override + public void close() { + + } + + @Override + public void configure(Map configs) { + + } +} diff --git a/skyeye-client/skyeye-client-core/src/main/java/com/jthink/skyeye/client/core/producer/LazySingletonProducer.java b/skyeye-client/skyeye-client-core/src/main/java/com/jthink/skyeye/client/core/producer/LazySingletonProducer.java new file mode 100644 index 0000000..bfddd09 --- /dev/null +++ b/skyeye-client/skyeye-client-core/src/main/java/com/jthink/skyeye/client/core/producer/LazySingletonProducer.java @@ -0,0 +1,50 @@ +package com.jthink.skyeye.client.core.producer; + +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.Producer; + +import java.util.Map; + +/** + * JThink@JThink + * + * @author JThink + * @version 0.0.1 + * @desc double check实现KafkaProducer的懒加载 + * @date 2016-09-09 09:02:34 + */ +public class LazySingletonProducer { + + private static volatile Producer producer; + + /** + * 私有化构造方法 + */ + private LazySingletonProducer() { + + } + + /** + * 实例化 + * @param config + * @return + */ + public static Producer getInstance(Map config) { + if (producer == null) { + synchronized(LazySingletonProducer.class) { + if (producer == null) { + producer = new KafkaProducer(config); + } + } + } + return producer; + } + + /** + * 是否初始化 + * @return + */ + public static boolean isInstanced() { + return producer != null; + } +} diff --git a/skyeye-client/skyeye-client-core/src/main/java/com/jthink/skyeye/client/core/register/ZkRegister.java b/skyeye-client/skyeye-client-core/src/main/java/com/jthink/skyeye/client/core/register/ZkRegister.java index 788bbbd..445b799 100644 --- a/skyeye-client/skyeye-client-core/src/main/java/com/jthink/skyeye/client/core/register/ZkRegister.java +++ b/skyeye-client/skyeye-client-core/src/main/java/com/jthink/skyeye/client/core/register/ZkRegister.java @@ -96,7 +96,7 @@ private void create(String path, NodeMode nodeMode, String data) { * @param nodeMode * @param data */ - private void write(String path, NodeMode nodeMode, String data) { + public void write(String path, NodeMode nodeMode, String data) { if (nodeMode.symbol().equals(NodeMode.PERSISTENT.symbol())) { // 创建永久节点,加入数据 this.client.writeData(nodeMode.label() + path, true); diff --git a/skyeye-client/skyeye-client-log4j/build.gradle b/skyeye-client/skyeye-client-log4j/build.gradle new file mode 100644 index 0000000..0719938 --- /dev/null +++ b/skyeye-client/skyeye-client-log4j/build.gradle @@ -0,0 +1,17 @@ +apply plugin: 'java' +apply plugin: 'maven' +apply plugin: 'eclipse' + +sourceCompatibility = 1.8 +targetCompatibility = 1.8 +compileJava.options.encoding = 'UTF-8' +buildDir = 'target' + +ext { + log4jVersion = '1.2.17' +} + +dependencies { + compile project(':skyeye-client-core') + compile "log4j:log4j:$log4jVersion" +} diff --git a/skyeye-client/skyeye-client-log4j/settings.gradle b/skyeye-client/skyeye-client-log4j/settings.gradle new file mode 100644 index 0000000..0c35f7c --- /dev/null +++ b/skyeye-client/skyeye-client-log4j/settings.gradle @@ -0,0 +1 @@ +rootProject.name = 'skyeye-client-log4j' diff --git a/skyeye-client/skyeye-client-log4j/src/main/java/com/jthink/skyeye/client/log4j/appender/KafkaAppender.java b/skyeye-client/skyeye-client-log4j/src/main/java/com/jthink/skyeye/client/log4j/appender/KafkaAppender.java new file mode 100644 index 0000000..e9833b0 --- /dev/null +++ b/skyeye-client/skyeye-client-log4j/src/main/java/com/jthink/skyeye/client/log4j/appender/KafkaAppender.java @@ -0,0 +1,417 @@ +package com.jthink.skyeye.client.log4j.appender; + +import com.jthink.skyeye.base.constant.RpcType; +import com.jthink.skyeye.base.constant.Constants; +import com.jthink.skyeye.base.util.StringUtil; +import com.jthink.skyeye.client.core.constant.KafkaConfig; +import com.jthink.skyeye.client.core.constant.NodeMode; +import com.jthink.skyeye.client.core.kafka.partitioner.KeyModPartitioner; +import com.jthink.skyeye.client.core.producer.LazySingletonProducer; +import com.jthink.skyeye.client.core.register.ZkRegister; +import com.jthink.skyeye.client.core.util.SysUtil; +import com.jthink.skyeye.trace.dto.RegisterDto; +import com.jthink.skyeye.trace.generater.IncrementIdGen; +import com.jthink.skyeye.trace.registry.Registry; +import com.jthink.skyeye.trace.registry.ZookeeperRegistry; +import org.I0Itec.zkclient.ZkClient; +import org.apache.kafka.clients.producer.*; +import org.apache.kafka.common.serialization.ByteArraySerializer; +import org.apache.kafka.common.serialization.StringSerializer; +import org.apache.log4j.AppenderSkeleton; +import org.apache.log4j.helpers.LogLog; +import org.apache.log4j.spi.Filter; +import org.apache.log4j.spi.LoggingEvent; + +import java.nio.ByteBuffer; +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.ConcurrentLinkedQueue; +import java.util.concurrent.atomic.AtomicBoolean; + +/** + * JThink@JThink + * + * @author JThink + * @version 0.0.1 + * @desc KafkaAppender, 包含log4j kafka appender的配置 + * @date 2016-09-27 09:30:45 + */ +public class KafkaAppender extends AppenderSkeleton { + + // kafka topic + private String topic; + // 生产日志的host + private String host = SysUtil.host; + // 生产日志的app,多节点部署会使日志有序 + private String app; + // zookeeper的地址 + private String zkServers; + // 接受报警邮件的接收方 + private String mail; + // 标记是否为rpc服务, 取值为RpcType.java + private String rpc; + // KafkaProducer类的配置 + private Map config = new HashMap(); + // zk注册器 + private ZkRegister zkRegister; + // kafka producer是否正在初始化 + private volatile AtomicBoolean isInitializing = new AtomicBoolean(false); + // kafka producer未完成初始化之前的消息存放的队列 + private ConcurrentLinkedQueue msgQueue = new ConcurrentLinkedQueue(); + + // kafka server + private String bootstrapServers; + // 消息确认模式 + private String acks; + // linger.ms + private String lingerMs; + // max.block.ms + private String maxBlockMs; + // kafkaAppender遇到异常需要向zk进行写入数据,由于onCompletion()的调用在kafka集群完全挂掉时会有很多阻塞的日志会调用,所以我们需要保证只向zk写一次数据,监控中心只会发生一次报警 + private volatile AtomicBoolean flag = new AtomicBoolean(true); + + /** + * 构造方法 + */ + public KafkaAppender() { + this.checkAndSetConfig(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName()); + this.checkAndSetConfig(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); + // 设置分区类, 使用自定义的KeyModPartitioner,同样的key进入相同的partition + this.checkAndSetConfig(ProducerConfig.PARTITIONER_CLASS_CONFIG, KeyModPartitioner.class.getName()); + + // 添加hook + Runtime.getRuntime().addShutdownHook(new Thread() { + @Override + public void run() { + close(); + } + }); + } + + /** + * 覆写doAppend, 去掉closed的log日志 + * @param event + */ + @Override + public synchronized void doAppend(LoggingEvent event) { + if (closed) { + return; + } + + if (!isAsSevereAsThreshold(event.getLevel())) { + return; + } + + Filter f = this.headFilter; + + FILTER_LOOP: + while(f != null) { + switch(f.decide(event)) { + case Filter.DENY: return; + case Filter.ACCEPT: break FILTER_LOOP; + case Filter.NEUTRAL: f = f.getNext(); + } + } + + this.append(event); + } + + @Override + protected void append(LoggingEvent event) { + if (closed) { + return; + } + this.sendMessage(this.getMessage(event)); + } + + /** + * 向kafka send + * @param value + */ + private void send(String value) { + final byte[] key = ByteBuffer.allocate(4).putInt(new StringBuilder(app).append(host).toString().hashCode()).array(); + + final ProducerRecord record = new ProducerRecord(this.topic, key, value); + LazySingletonProducer.getInstance(this.config).send(record, new Callback() { + @Override + public void onCompletion(RecordMetadata recordMetadata, Exception e) { + // TODO: 异常发生如何处理(直接停掉appender) + if (null != e) { + closed = true; + LogLog.error("kafka send error in appender", e); + // 发生异常,kafkaAppender 停止收集,向节点写入数据(监控系统会感知进行报警) + if (flag.get() == true) { + zkRegister.write(Constants.SLASH + app + Constants.SLASH + host, NodeMode.EPHEMERAL, + String.valueOf(System.currentTimeMillis()) + Constants.SEMICOLON + SysUtil.userDir); + flag.compareAndSet(true, false); + } + } + } + }); + } + + /** + * 发送msg + * @param msg + */ + private void sendMessage(String msg) { + if (!LazySingletonProducer.isInstanced()) { + if (this.isInitializing.get() != true) { + this.isInitializing.compareAndSet(false, true); + this.initKafkaConfig(); + this.isInitializing.compareAndSet(true, false); + this.send(msg); + } else { + this.msgQueue.add(msg); + } + } else if (this.msgQueue.size() > 0) { + if (LazySingletonProducer.isInstanced() ) { + this.msgQueue.add(msg); + while (this.msgQueue.size() > 0) { + this.send(this.msgQueue.remove()); + } + } + } else { + this.send(msg); + } + } + + /** + * 初始化kafka config + */ + private void initKafkaConfig() { + + if (!LazySingletonProducer.isInstanced()) { + + // app配置 + if (StringUtil.isBlank(this.host)) { + // host未获取到 + LogLog.error("can't get the host"); + closed = true; + return; + } + + if (StringUtil.isBlank(this.app)) { + // app name未设置 + LogLog.error("log4j.xml is not set the app"); + closed = true; + return; + } + + // zk配置 + if (StringUtil.isBlank(this.zkServers)) { + // zk地址未设置 + LogLog.error("can't get zkServers"); + closed = true; + return; + } + + if (StringUtil.isBlank(this.topic)) { + // topic未设置(或者设置成了""),无法写入kafka + LogLog.error("topic is not set, appender: " + name); + closed = true; + return; + } + + if (StringUtil.isBlank(this.mail)) { + // 报警mail未设置 + LogLog.error("mail is not set, appender: " + name); + closed = true; + return; + } + + if (StringUtil.isBlank(this.rpc) || !this.checkRpcType(this.rpc)) { + // rpc未设置或者rpc值不对 + LogLog.error("rpc is not set or value not right, appender: " + name); + closed = true; + return; + } + + new Thread() { + @Override + public void run() { + // 初始化zk + KafkaAppender.this.zkRegister = new ZkRegister(new ZkClient(zkServers, 60000, 5000)); + // 注册节点 + KafkaAppender.this.zkRegister.registerNode(KafkaAppender.this.host, KafkaAppender.this.app, KafkaAppender.this.mail); + + // rpc trace注册中心 + KafkaAppender.this.zkRegister.registerRpc(KafkaAppender.this.host, KafkaAppender.this.app, KafkaAppender.this.rpc); + } + }.start(); + + if (StringUtil.isNotBlank(this.bootstrapServers)) { + this.config.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, this.bootstrapServers); + } + if (StringUtil.isNotBlank(this.acks)) { + this.config.put(ProducerConfig.ACKS_CONFIG, this.acks); + } + if (StringUtil.isNotBlank(this.lingerMs)) { + this.config.put(ProducerConfig.LINGER_MS_CONFIG, this.lingerMs); + } + if (StringUtil.isNotBlank(this.maxBlockMs)) { + this.config.put(ProducerConfig.MAX_BLOCK_MS_CONFIG, this.maxBlockMs); + } + if (StringUtil.isNotBlank(this.app) && StringUtil.isNotBlank(this.host)) { + this.config.put(ProducerConfig.CLIENT_ID_CONFIG, this.app + Constants.MIDDLE_LINE + this.host + Constants.MIDDLE_LINE + "log4j"); + } + + LazySingletonProducer.getInstance(this.config); + } + } + + /** + * 进行rpc trace注册 + * @param app + * @param host + * @param zkClient + */ + private void register(String app, String host, ZkClient zkClient) { + RegisterDto dto = new RegisterDto(app, host, zkClient); + Registry registry = new ZookeeperRegistry(); + IncrementIdGen.setId(registry.register(dto)); + } + + /** + * 监察rpc type是否正确 + * @param rpcType + * @return + */ + private boolean checkRpcType(String rpcType) { + try { + RpcType.valueOf(rpcType); + return true; + } catch (Exception e) { + return false; + } + } + + /** + * 获得message + * @param event + * @return + */ + private String getMessage(LoggingEvent event) { + if (this.layout == null) { + return event.getRenderedMessage(); + } else { + // 获取host和app + String msg = System.nanoTime() + Constants.SEMICOLON + this.layout.format(event); + return msg.replaceFirst(Constants.APP_NAME, this.app).replaceFirst(Constants.HOSTNAME, this.host); + } + } + + @Override + public void close() { + closed = true; + // 关闭KafkaProuder + if (LazySingletonProducer.isInstanced()) { + // producer实际上已经初始化 + LazySingletonProducer.getInstance(this.config).close(); + } + + // 关闭client,临时节点消失,监控系统进行感知报警 + ZkClient client = this.zkRegister.getClient(); + if (null != client) { + client.close(); + } + } + + @Override + public boolean requiresLayout() { + return true; + } + + /** + * 进行kafka配置设置 + * @param key + * @param value + */ + public void checkAndSetConfig(String key, String value) { + if (!KafkaConfig.PRODUCER_CONFIG_KEYS.contains(key)) { + // 当前kafka版本没有该配置项 + LogLog.warn("in this kafka version don't has this config: " + key); + } + this.config.put(key, value); + } + + public String getTopic() { + return topic; + } + + public void setTopic(String topic) { + this.topic = topic; + } + + public String getHost() { + return host; + } + + public void setHost(String host) { + this.host = host; + } + + public String getApp() { + return app; + } + + public void setApp(String app) { + this.app = app; + } + + public String getZkServers() { + return zkServers; + } + + public void setZkServers(String zkServers) { + this.zkServers = zkServers; + } + + public String getMail() { + return mail; + } + + public void setMail(String mail) { + this.mail = mail; + } + + public String getBootstrapServers() { + return bootstrapServers; + } + + public void setBootstrapServers(String bootstrapServers) { + this.bootstrapServers = bootstrapServers; + } + + public String getAcks() { + return acks; + } + + public void setAcks(String acks) { + this.acks = acks; + } + + public String getLingerMs() { + return lingerMs; + } + + public void setLingerMs(String lingerMs) { + this.lingerMs = lingerMs; + } + + public String getMaxBlockMs() { + return maxBlockMs; + } + + public void setMaxBlockMs(String maxBlockMs) { + this.maxBlockMs = maxBlockMs; + } + + public String getRpc() { + return rpc; + } + + public void setRpc(String rpc) { + this.rpc = rpc; + } +} diff --git a/skyeye-client/skyeye-client-log4j/todo.md b/skyeye-client/skyeye-client-log4j/todo.md deleted file mode 100644 index 22e6689..0000000 --- a/skyeye-client/skyeye-client-log4j/todo.md +++ /dev/null @@ -1,2 +0,0 @@ -TODO: - From c242506b6d91ecb40bc83da77ca1ecf2bc0d98c6 Mon Sep 17 00:00:00 2001 From: JThink Date: Tue, 1 Aug 2017 10:28:12 +0800 Subject: [PATCH 12/27] re arch logbck kafka appender --- skyeye-client/settings.gradle | 2 +- .../skyeye-client-log4j2/build.gradle | 17 + .../skyeye-client-log4j2/settings.gradle | 1 + skyeye-client/skyeye-client-log4j2/todo.md | 2 - .../skyeye-client-logback/build.gradle | 22 ++ .../skyeye-client-logback/settings.gradle | 1 + .../logback/appender/KafkaAppender.java | 348 ++++++++++++++++++ .../logback/builder/AppHostKeyBuilder.java | 49 +++ .../client/logback/builder/KeyBuilder.java | 20 + .../logback/encoder/KafkaLayoutEncoder.java | 64 ++++ skyeye-client/skyeye-client-logback/todo.md | 2 - skyeye-collector/build.gradle | 2 +- skyeye-trace/build.gradle | 2 +- skyeye-web/build.gradle | 2 +- 14 files changed, 526 insertions(+), 8 deletions(-) create mode 100644 skyeye-client/skyeye-client-log4j2/build.gradle create mode 100644 skyeye-client/skyeye-client-log4j2/settings.gradle delete mode 100644 skyeye-client/skyeye-client-log4j2/todo.md create mode 100644 skyeye-client/skyeye-client-logback/build.gradle create mode 100644 skyeye-client/skyeye-client-logback/settings.gradle create mode 100644 skyeye-client/skyeye-client-logback/src/main/java/com/jthink/skyeye/client/logback/appender/KafkaAppender.java create mode 100644 skyeye-client/skyeye-client-logback/src/main/java/com/jthink/skyeye/client/logback/builder/AppHostKeyBuilder.java create mode 100644 skyeye-client/skyeye-client-logback/src/main/java/com/jthink/skyeye/client/logback/builder/KeyBuilder.java create mode 100644 skyeye-client/skyeye-client-logback/src/main/java/com/jthink/skyeye/client/logback/encoder/KafkaLayoutEncoder.java delete mode 100644 skyeye-client/skyeye-client-logback/todo.md diff --git a/skyeye-client/settings.gradle b/skyeye-client/settings.gradle index abffb77..14a9deb 100644 --- a/skyeye-client/settings.gradle +++ b/skyeye-client/settings.gradle @@ -1,2 +1,2 @@ -include 'skyeye-client-core' , 'skyeye-client-log4j' +include 'skyeye-client-core' , 'skyeye-client-log4j', 'skyeye-client-logback' //, 'skyeye-client-log4j2', 'skyeye-client-logback' diff --git a/skyeye-client/skyeye-client-log4j2/build.gradle b/skyeye-client/skyeye-client-log4j2/build.gradle new file mode 100644 index 0000000..e74068c --- /dev/null +++ b/skyeye-client/skyeye-client-log4j2/build.gradle @@ -0,0 +1,17 @@ +apply plugin: 'java' +apply plugin: 'maven' +apply plugin: 'eclipse' + +sourceCompatibility = 1.8 +targetCompatibility = 1.8 +compileJava.options.encoding = 'UTF-8' +buildDir = 'target' + +ext { +// log4jVersion = '1.2.17' +} + +dependencies { + compile project(':skyeye-client-core') +// compile "log4j:log4j:$log4jVersion" +} diff --git a/skyeye-client/skyeye-client-log4j2/settings.gradle b/skyeye-client/skyeye-client-log4j2/settings.gradle new file mode 100644 index 0000000..809c829 --- /dev/null +++ b/skyeye-client/skyeye-client-log4j2/settings.gradle @@ -0,0 +1 @@ +rootProject.name = 'skyeye-client-log4j2' diff --git a/skyeye-client/skyeye-client-log4j2/todo.md b/skyeye-client/skyeye-client-log4j2/todo.md deleted file mode 100644 index 22e6689..0000000 --- a/skyeye-client/skyeye-client-log4j2/todo.md +++ /dev/null @@ -1,2 +0,0 @@ -TODO: - diff --git a/skyeye-client/skyeye-client-logback/build.gradle b/skyeye-client/skyeye-client-logback/build.gradle new file mode 100644 index 0000000..700744c --- /dev/null +++ b/skyeye-client/skyeye-client-logback/build.gradle @@ -0,0 +1,22 @@ +apply plugin: 'java' +apply plugin: 'maven' +apply plugin: 'eclipse' + +sourceCompatibility = 1.8 +targetCompatibility = 1.8 +compileJava.options.encoding = 'UTF-8' +buildDir = 'target' + +ext { + logbackVersion = '1.1.11' +} + +dependencies { + compile project(':skyeye-client-core') + compile "ch.qos.logback:logback-classic:$logbackVersion" +} + +configurations { + compile.exclude group: "log4j", module: "log4j" + compile.exclude group: "org.slf4j", module: "slf4j-log4j12" +} diff --git a/skyeye-client/skyeye-client-logback/settings.gradle b/skyeye-client/skyeye-client-logback/settings.gradle new file mode 100644 index 0000000..479589a --- /dev/null +++ b/skyeye-client/skyeye-client-logback/settings.gradle @@ -0,0 +1 @@ +rootProject.name = 'skyeye-client-logback' diff --git a/skyeye-client/skyeye-client-logback/src/main/java/com/jthink/skyeye/client/logback/appender/KafkaAppender.java b/skyeye-client/skyeye-client-logback/src/main/java/com/jthink/skyeye/client/logback/appender/KafkaAppender.java new file mode 100644 index 0000000..ad32647 --- /dev/null +++ b/skyeye-client/skyeye-client-logback/src/main/java/com/jthink/skyeye/client/logback/appender/KafkaAppender.java @@ -0,0 +1,348 @@ +package com.jthink.skyeye.client.logback.appender; + +import ch.qos.logback.core.Context; +import ch.qos.logback.core.CoreConstants; +import ch.qos.logback.core.UnsynchronizedAppenderBase; +import ch.qos.logback.core.hook.DelayingShutdownHook; +import ch.qos.logback.core.status.ErrorStatus; +import com.jthink.skyeye.base.constant.RpcType; +import com.jthink.skyeye.base.util.StringUtil; +import com.jthink.skyeye.base.constant.Constants; +import com.jthink.skyeye.client.core.constant.KafkaConfig; +import com.jthink.skyeye.client.core.constant.NodeMode; +import com.jthink.skyeye.client.core.kafka.partitioner.KeyModPartitioner; +import com.jthink.skyeye.client.core.producer.LazySingletonProducer; +import com.jthink.skyeye.client.core.register.ZkRegister; +import com.jthink.skyeye.client.core.util.SysUtil; +import com.jthink.skyeye.client.logback.builder.KeyBuilder; +import com.jthink.skyeye.client.logback.encoder.KafkaLayoutEncoder; +import com.jthink.skyeye.trace.dto.RegisterDto; +import com.jthink.skyeye.trace.generater.IncrementIdGen; +import com.jthink.skyeye.trace.registry.Registry; +import com.jthink.skyeye.trace.registry.ZookeeperRegistry; +import org.I0Itec.zkclient.ZkClient; +import org.apache.kafka.clients.producer.Callback; +import org.apache.kafka.clients.producer.ProducerConfig; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.clients.producer.RecordMetadata; +import org.apache.kafka.common.serialization.ByteArraySerializer; +import org.apache.kafka.common.serialization.StringSerializer; + +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.atomic.AtomicBoolean; + +/** + * JThink@JThink + * + * @author JThink + * @version 0.0.1 + * @desc KafkaAppender, 包含logback kafka appender的配置 + * @date 2016-09-08 20:10:21 + */ +public class KafkaAppender extends UnsynchronizedAppenderBase { + + // kafka topic + private String topic; + // 生产日志的host + private String host; + // 生产日志的app,多节点部署会使日志有序 + private String app; + // zookeeper的地址 + private String zkServers; + // 接受报警邮件的接收方 + private String mail; + // 标记是否为rpc服务, 取值为RpcType.java + private String rpc; + // KafkaProducer类的配置 + private Map config = new HashMap(); + // key生成器 + private KeyBuilder keyBuilder; + // 编码器 + private KafkaLayoutEncoder encoder; + // zk注册器 + private ZkRegister zkRegister; + // hook + private DelayingShutdownHook shutdownHook; + // kafkaAppender遇到异常需要向zk进行写入数据,由于onCompletion()的调用在kafka集群完全挂掉时会有很多阻塞的日志会调用,所以我们需要保证只向zk写一次数据,监控中心只会发生一次报警 + private volatile AtomicBoolean flag = new AtomicBoolean(true); + + /** + * 构造方法 + */ + public KafkaAppender() { + this.checkAndSetConfig(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName()); + this.checkAndSetConfig(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); + // 设置分区类, 使用自定义的KeyModPartitioner,同样的key进入相同的partition + this.checkAndSetConfig(ProducerConfig.PARTITIONER_CLASS_CONFIG, KeyModPartitioner.class.getName()); + + shutdownHook = new DelayingShutdownHook(); + } + + @Override + public void start() { + // xml配置校验 + if (!this.checkNecessaryConfig()) { + addError("necessary config is not set, kafka appender is not started"); + return; + } + + super.start(); + + // 添加logback shutdown hook, 关闭所有的appender, 调用stop()方法 + shutdownHook.setContext(this.getContext()); + Runtime.getRuntime().addShutdownHook(new Thread(this.shutdownHook)); + + // 初始化zk + this.zkRegister = new ZkRegister(new ZkClient(this.zkServers, 60000, 5000)); + // 注册节点 + this.zkRegister.registerNode(KafkaAppender.this.host, KafkaAppender.this.app, KafkaAppender.this.mail); + + // rpc trace注册中心 + this.zkRegister.registerRpc(KafkaAppender.this.host, KafkaAppender.this.app, KafkaAppender.this.rpc); + } + + /** + * 进行rpc trace注册 + * @param app + * @param host + * @param zkClient + */ + private void register(String app, String host, ZkClient zkClient) { + RegisterDto dto = new RegisterDto(app, host, zkClient); + Registry registry = new ZookeeperRegistry(); + IncrementIdGen.setId(registry.register(dto)); + } + + @Override + public void stop() { + super.stop(); + + // 关闭KafkaProuder + if (LazySingletonProducer.isInstanced()) { + // producer实际上已经初始化 + LazySingletonProducer.getInstance(this.config).close(); + } + + // 关闭client,临时节点消失,监控系统进行感知报警 + ZkClient client = this.zkRegister.getClient(); + if (null != client) { + client.close(); + } + } + + @Override + protected void append(E e) { + if (!isStarted()) { + return; + } + final String value = System.nanoTime() + Constants.SEMICOLON + this.encoder.doEncode(e); + final byte[] key = this.keyBuilder.build(e); + final ProducerRecord record = new ProducerRecord(this.topic, key, value); + LazySingletonProducer.getInstance(this.config).send(record, new Callback() { + @Override + public void onCompletion(RecordMetadata recordMetadata, Exception e) { + // TODO: 异常发生如何处理(目前使用RollingFileAppender.java中的方法) + if (null != e) { + // 如果发生异常, 将开始状态设置为false, 并每次append的时候都先check该状态 + started = false; + addStatus(new ErrorStatus("kafka send error in appender", this, e)); + // 发生异常,kafkaAppender 停止收集,向节点写入数据(监控系统会感知进行报警) + if (flag.get() == true) { + zkRegister.write(Constants.SLASH + app + Constants.SLASH + host, NodeMode.EPHEMERAL, + String.valueOf(System.currentTimeMillis()) + Constants.SEMICOLON + SysUtil.userDir); + flag.compareAndSet(true, false); + } + } + } + }); + } + + @Override + public void setContext(Context context) { + super.setContext(context); + + this.host = context.getProperty(CoreConstants.HOSTNAME_KEY); + this.app = context.getName(); + } + + /** + * 校验最基本的配置是否在logback.xml进行配置 + * @return + */ + private boolean checkNecessaryConfig() { + + boolean flag = true; + + // app配置 + if (StringUtil.isBlank(this.host)) { + // host未获取到 + addError("can't get the host"); + flag = false; + } + + if (StringUtil.isBlank(this.app)) { + // app name未设置 + addError("logback.xml is not set the node"); + flag = false; + } + + // zk配置 + if (StringUtil.isBlank(this.zkServers)) { + // zk地址未设置 + addError("can't get zkServers"); + flag = false; + } + + // kafka配置 + if (null == config.get(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG)) { + // kafka的bootstrap.servers未设置 + addError("kafka's " + ProducerConfig.BOOTSTRAP_SERVERS_CONFIG + " do not set, appender: " + name); + flag = false; + } + + if (StringUtil.isBlank(this.topic)) { + // topic未设置(或者设置成了""),无法写入kafka + addError("topic is not set, appender: " + name); + flag = false; + } + + if (StringUtil.isBlank(this.mail)) { + // 报警mail未设置 + addError("mail is not set, appender: " + name); + flag = false; + } + + if (StringUtil.isBlank(this.rpc) || !this.checkRpcType(this.rpc)) { + // rpc未设置或者rpc值不对 + addError("rpc is not set or value not right, appender: " + name); + flag = false; + } + + if (null == this.keyBuilder) { + // key生成器为设置 + addError("key builder is not set, appender: " + name); + flag = false; + } + + if (null == this.encoder) { + // 编码器未设置 + addError("encoder is not set, appender: " + name); + flag = false; + } + return flag; + } + + /** + * 监察rpc type是否正确 + * @param rpcType + * @return + */ + private boolean checkRpcType(String rpcType) { + try { + RpcType.valueOf(rpcType); + return true; + } catch (Exception e) { + return false; + } + } + + /** + * 将logback配置文件中节点中的值读入Map config + * @param kv + */ + public void addConfig(String kv) { + String[] keyValue = kv.split(Constants.EQUAL, 2); + if (keyValue.length == 2) { + this.checkAndSetConfig(keyValue[0], keyValue[1]); + } else { + // 值设置得不对 + addError("config item value is wrong, appender: " + name); + } + } + + /** + * 进行kafka配置设置 + * @param key + * @param value + */ + public void checkAndSetConfig(String key, String value) { + if (!KafkaConfig.PRODUCER_CONFIG_KEYS.contains(key)) { + // 当前kafka版本没有该配置项 + addWarn("in this kafka version don't has this config: " + key); + } + this.config.put(key, value); + } + + public String getTopic() { + return topic; + } + + public void setTopic(String topic) { + this.topic = topic; + } + + public Map getConfig() { + return config; + } + + public void setConfig(Map config) { + this.config = config; + } + + public String getHost() { + return host; + } + + public void setHost(String host) { + this.host = host; + } + + public String getApp() { + return app; + } + + public void setApp(String app) { + this.app = app; + } + + public KeyBuilder getKeyBuilder() { + return keyBuilder; + } + + public void setKeyBuilder(KeyBuilder keyBuilder) { + this.keyBuilder = keyBuilder; + } + + public KafkaLayoutEncoder getEncoder() { + return encoder; + } + + public void setEncoder(KafkaLayoutEncoder encoder) { + this.encoder = encoder; + } + + public String getZkServers() { + return zkServers; + } + + public void setZkServers(String zkServers) { + this.zkServers = zkServers; + } + + public String getMail() { + return mail; + } + + public void setMail(String mail) { + this.mail = mail; + } + + public String getRpc() { + return rpc; + } + + public void setRpc(String rpc) { + this.rpc = rpc; + } +} diff --git a/skyeye-client/skyeye-client-logback/src/main/java/com/jthink/skyeye/client/logback/builder/AppHostKeyBuilder.java b/skyeye-client/skyeye-client-logback/src/main/java/com/jthink/skyeye/client/logback/builder/AppHostKeyBuilder.java new file mode 100644 index 0000000..ec81653 --- /dev/null +++ b/skyeye-client/skyeye-client-logback/src/main/java/com/jthink/skyeye/client/logback/builder/AppHostKeyBuilder.java @@ -0,0 +1,49 @@ +package com.jthink.skyeye.client.logback.builder; + +import ch.qos.logback.core.Context; +import ch.qos.logback.core.CoreConstants; +import ch.qos.logback.core.spi.ContextAwareBase; + +import java.nio.ByteBuffer; + +/** + * JThink@JThink + * + * @author JThink + * @version 0.0.1 + * @desc 根据app名字和host值生成key(如果kafka有多个partition,由于kafka的限制,每个partition只能由相等数量的每个cosumer group + * 中的consumer进行消费,并且consumer group中只能有一个consumer消费,为了保证后续logstash进行消费能够保证每个应用的日志 + * 有序,key加上app,相同的app进入相同的partition,由于app有可能是部署多个节点,所以key在加上host可以保证每个app在不同的 + * 节点上的日志能够有序得进行消费) + * @date 2016-09-09 13:27:35 + */ +public class AppHostKeyBuilder extends ContextAwareBase implements KeyBuilder { + + private byte[] appHost; + + @Override + public void setContext(Context context) { + super.setContext(context); + String host = context.getProperty(CoreConstants.HOSTNAME_KEY); + String app = context.getName(); + appHost = ByteBuffer.allocate(4).putInt(new StringBuilder(app).append(host).toString().hashCode()).array(); + } + + /** + * 生成key,key规则app+host的byte[] + * @param e log event, ch.qos.logback.classic.spi.ILoggingEvent + * @return + */ + @Override + public byte[] build(E e) { + return appHost; + } + + public byte[] getAppHost() { + return appHost; + } + + public void setAppHost(byte[] appHost) { + this.appHost = appHost; + } +} diff --git a/skyeye-client/skyeye-client-logback/src/main/java/com/jthink/skyeye/client/logback/builder/KeyBuilder.java b/skyeye-client/skyeye-client-logback/src/main/java/com/jthink/skyeye/client/logback/builder/KeyBuilder.java new file mode 100644 index 0000000..6503f4e --- /dev/null +++ b/skyeye-client/skyeye-client-logback/src/main/java/com/jthink/skyeye/client/logback/builder/KeyBuilder.java @@ -0,0 +1,20 @@ +package com.jthink.skyeye.client.logback.builder; + +/** + * JThink@JThink + * + * @author JThink + * @version 0.0.1 + * @desc ProducerRecord需要的key参数,根据该值进行分区 + * @date 2016-09-09 13:23:18 + */ +public interface KeyBuilder { + + /** + * 生成ProducerRecord需要的key参数 + * @param e log event, ch.qos.logback.classic.spi.ILoggingEvent + * @return + */ + byte[] build(E e); + +} diff --git a/skyeye-client/skyeye-client-logback/src/main/java/com/jthink/skyeye/client/logback/encoder/KafkaLayoutEncoder.java b/skyeye-client/skyeye-client-logback/src/main/java/com/jthink/skyeye/client/logback/encoder/KafkaLayoutEncoder.java new file mode 100644 index 0000000..489882e --- /dev/null +++ b/skyeye-client/skyeye-client-logback/src/main/java/com/jthink/skyeye/client/logback/encoder/KafkaLayoutEncoder.java @@ -0,0 +1,64 @@ +package com.jthink.skyeye.client.logback.encoder; + +import ch.qos.logback.core.Layout; +import ch.qos.logback.core.spi.ContextAwareBase; +import ch.qos.logback.core.spi.LifeCycle; + +import java.nio.charset.Charset; + +/** + * JThink@JThink + * + * @author JThink + * @version 0.0.1 + * @desc kafka的encoder + * @date 2016-09-09 16:51:18 + */ +public class KafkaLayoutEncoder extends ContextAwareBase implements LifeCycle { + + // layout + private Layout layout; + // 编码,默认utf-8 + private Charset charset; + private boolean started = false; + private static final Charset UTF8 = Charset.forName("UTF-8"); + + public String doEncode(E event) { + return this.layout.doLayout(event); + } + + @Override + public void start() { + if (charset == null) { + addInfo("no set charset, set the default charset is utf-8"); + charset = UTF8; + } + started = true; + } + + @Override + public void stop() { + started = false; + } + + @Override + public boolean isStarted() { + return started; + } + + public Layout getLayout() { + return layout; + } + + public void setLayout(Layout layout) { + this.layout = layout; + } + + public Charset getCharset() { + return charset; + } + + public void setCharset(Charset charset) { + this.charset = charset; + } +} diff --git a/skyeye-client/skyeye-client-logback/todo.md b/skyeye-client/skyeye-client-logback/todo.md deleted file mode 100644 index 22e6689..0000000 --- a/skyeye-client/skyeye-client-logback/todo.md +++ /dev/null @@ -1,2 +0,0 @@ -TODO: - diff --git a/skyeye-collector/build.gradle b/skyeye-collector/build.gradle index 96462bc..d9713b2 100644 --- a/skyeye-collector/build.gradle +++ b/skyeye-collector/build.gradle @@ -28,7 +28,7 @@ ext { dataVersion = '1.0.0' zkclientVersion = '0.9.1-up' hadoopVersion = '2.6.0-cdh5.4.0' - fastJsonVersion = '1.2.28' + fastJsonVersion = '1.2.35' } dependencies { diff --git a/skyeye-trace/build.gradle b/skyeye-trace/build.gradle index f6f0541..ee3056e 100644 --- a/skyeye-trace/build.gradle +++ b/skyeye-trace/build.gradle @@ -17,7 +17,7 @@ ext { baseVersion = '1.0.0' dataVersion = '1.0.0' slf4jVersion = '1.7.21' - fastJsonVersion = '1.2.28' + fastJsonVersion = '1.2.35' zookeeperVersion = '3.4.6' zkclientVersion = '0.9.1-up' } diff --git a/skyeye-web/build.gradle b/skyeye-web/build.gradle index 0bd3d82..e86c9d5 100644 --- a/skyeye-web/build.gradle +++ b/skyeye-web/build.gradle @@ -28,7 +28,7 @@ ext { dataVersion = '1.0.0' jacksonVersion = '1.9.13' httpclientVersion = '4.5.2' - fastjsonVersion = '1.2.28' + fastjsonVersion = '1.2.35' zkclientVersion = '0.9.1-up' } From e4e6e505c524deeefc0140e44dbf1d1bf2cfde4a Mon Sep 17 00:00:00 2001 From: JThink Date: Tue, 1 Aug 2017 14:44:20 +0800 Subject: [PATCH 13/27] change statistics use gradle --- skyeye-alarm/build.gradle | 5 +- .../dubbo-service-a/build.gradle | 5 +- .../dubbo-service-b/build.gradle | 5 +- .../dubbo-service-c/build.gradle | 5 +- .../dubbo-service-d/build.gradle | 5 +- .../dubbo-service-e/build.gradle | 5 +- skyeye-benchmark/log-generater/build.gradle | 5 +- skyeye-collector/build.gradle | 5 +- skyeye-monitor/build.gradle | 5 +- skyeye-statistics/build.gradle | 84 +++++++++++++++++++ skyeye-statistics/settings.gradle | 1 + skyeye-web/build.gradle | 5 +- 12 files changed, 95 insertions(+), 40 deletions(-) create mode 100644 skyeye-statistics/build.gradle create mode 100644 skyeye-statistics/settings.gradle diff --git a/skyeye-alarm/build.gradle b/skyeye-alarm/build.gradle index 31ff94b..8ce3737 100644 --- a/skyeye-alarm/build.gradle +++ b/skyeye-alarm/build.gradle @@ -29,10 +29,7 @@ dependencies { compile "skyeye:skyeye-base:$baseVersion" compile "skyeye:skyeye-data-http:$dataVersion" compile "skyeye:skyeye-data-rabbitmq:$dataVersion" - compile ("org.springframework.boot:spring-boot-starter") { - exclude group: 'ch.qos.logback', module: 'logback-classic' - exclude group: 'ch.qos.logback', module: 'logback-core' - } + compile "org.springframework.boot:spring-boot-starter" compile "javax.mail:mail:1.4.7" diff --git a/skyeye-benchmark/dubbo-service/dubbo-service-a/build.gradle b/skyeye-benchmark/dubbo-service/dubbo-service-a/build.gradle index b8430c8..176c0a7 100644 --- a/skyeye-benchmark/dubbo-service/dubbo-service-a/build.gradle +++ b/skyeye-benchmark/dubbo-service/dubbo-service-a/build.gradle @@ -7,10 +7,7 @@ apply plugin: 'application' dependencies { compile project(":dubbo-service-client") compile "skyeye:skyeye-data-dubbox:$dataVersion" - compile("org.springframework.boot:spring-boot-starter-web") { - exclude group: 'ch.qos.logback', module: 'logback-classic' - exclude group: 'ch.qos.logback', module: 'logback-core' - } + compile "org.springframework.boot:spring-boot-starter-web" compile "org.apache.zookeeper:zookeeper:$zookeeperVerison" compile "com.101tec:zkclient:$zkClientVersion" diff --git a/skyeye-benchmark/dubbo-service/dubbo-service-b/build.gradle b/skyeye-benchmark/dubbo-service/dubbo-service-b/build.gradle index 92300c8..440ad33 100644 --- a/skyeye-benchmark/dubbo-service/dubbo-service-b/build.gradle +++ b/skyeye-benchmark/dubbo-service/dubbo-service-b/build.gradle @@ -7,10 +7,7 @@ apply plugin: 'application' dependencies { compile project(":dubbo-service-client") compile "skyeye:skyeye-data-dubbox:$dataVersion" - compile("org.springframework.boot:spring-boot-starter") { - exclude group: 'ch.qos.logback', module: 'logback-classic' - exclude group: 'ch.qos.logback', module: 'logback-core' - } + compile "org.springframework.boot:spring-boot-starter" compile "org.apache.zookeeper:zookeeper:$zookeeperVerison" compile "com.101tec:zkclient:$zkClientVersion" diff --git a/skyeye-benchmark/dubbo-service/dubbo-service-c/build.gradle b/skyeye-benchmark/dubbo-service/dubbo-service-c/build.gradle index 5bf69f0..8cdae00 100644 --- a/skyeye-benchmark/dubbo-service/dubbo-service-c/build.gradle +++ b/skyeye-benchmark/dubbo-service/dubbo-service-c/build.gradle @@ -7,10 +7,7 @@ apply plugin: 'application' dependencies { compile project(":dubbo-service-client") compile "skyeye:skyeye-data-dubbox:$dataVersion" - compile("org.springframework.boot:spring-boot-starter") { - exclude group: 'ch.qos.logback', module: 'logback-classic' - exclude group: 'ch.qos.logback', module: 'logback-core' - } + compile "org.springframework.boot:spring-boot-starter" compile "org.apache.zookeeper:zookeeper:$zookeeperVerison" compile "com.101tec:zkclient:$zkClientVersion" diff --git a/skyeye-benchmark/dubbo-service/dubbo-service-d/build.gradle b/skyeye-benchmark/dubbo-service/dubbo-service-d/build.gradle index ae31489..a6e2840 100644 --- a/skyeye-benchmark/dubbo-service/dubbo-service-d/build.gradle +++ b/skyeye-benchmark/dubbo-service/dubbo-service-d/build.gradle @@ -7,10 +7,7 @@ apply plugin: 'application' dependencies { compile project(":dubbo-service-client") compile "skyeye:skyeye-data-dubbox:$dataVersion" - compile("org.springframework.boot:spring-boot-starter") { - exclude group: 'ch.qos.logback', module: 'logback-classic' - exclude group: 'ch.qos.logback', module: 'logback-core' - } + compile "org.springframework.boot:spring-boot-starter" compile "org.apache.zookeeper:zookeeper:$zookeeperVerison" compile "com.101tec:zkclient:$zkClientVersion" diff --git a/skyeye-benchmark/dubbo-service/dubbo-service-e/build.gradle b/skyeye-benchmark/dubbo-service/dubbo-service-e/build.gradle index 4fd0826..47820ae 100644 --- a/skyeye-benchmark/dubbo-service/dubbo-service-e/build.gradle +++ b/skyeye-benchmark/dubbo-service/dubbo-service-e/build.gradle @@ -7,10 +7,7 @@ apply plugin: 'application' dependencies { compile project(":dubbo-service-client") compile "skyeye:skyeye-data-dubbox:$dataVersion" - compile("org.springframework.boot:spring-boot-starter") { - exclude group: 'ch.qos.logback', module: 'logback-classic' - exclude group: 'ch.qos.logback', module: 'logback-core' - } + compile "org.springframework.boot:spring-boot-starter" compile "org.apache.zookeeper:zookeeper:$zookeeperVerison" compile "com.101tec:zkclient:$zkClientVersion" diff --git a/skyeye-benchmark/log-generater/build.gradle b/skyeye-benchmark/log-generater/build.gradle index f652e82..3ff608f 100644 --- a/skyeye-benchmark/log-generater/build.gradle +++ b/skyeye-benchmark/log-generater/build.gradle @@ -29,10 +29,7 @@ dependencies { compile ("skyeye:skyeye-client:$clientVersion") { exclude group: 'log4j', module: 'log4j' } - compile ("org.springframework.boot:spring-boot-starter") { - exclude group: 'ch.qos.logback', module: 'logback-classic' - exclude group: 'ch.qos.logback', module: 'logback-core' - } + compile "org.springframework.boot:spring-boot-starter" testCompile "org.springframework.boot:spring-boot-starter-test" } diff --git a/skyeye-collector/build.gradle b/skyeye-collector/build.gradle index d9713b2..8e58053 100644 --- a/skyeye-collector/build.gradle +++ b/skyeye-collector/build.gradle @@ -35,10 +35,7 @@ dependencies { compile "skyeye:skyeye-data-jpa:$dataVersion" compile "skyeye:skyeye-data-rabbitmq:$dataVersion" compile "skyeye:skyeye-data-hbase:$dataVersion" - compile ("org.springframework.boot:spring-boot-starter") { - exclude group: 'ch.qos.logback', module: 'logback-classic' - exclude group: 'ch.qos.logback', module: 'logback-core' - } + compile "org.springframework.boot:spring-boot-starter" compile "org.apache.kafka:kafka-clients:$kafkaVersion" diff --git a/skyeye-monitor/build.gradle b/skyeye-monitor/build.gradle index 768ed59..b8a83bd 100644 --- a/skyeye-monitor/build.gradle +++ b/skyeye-monitor/build.gradle @@ -33,10 +33,7 @@ dependencies { compile "skyeye:skyeye-base:$baseVersion" compile "skyeye:skyeye-data-jpa:$dataVersion" compile "skyeye:skyeye-data-rabbitmq:$dataVersion" - compile ("org.springframework.boot:spring-boot-starter") { - exclude group: 'ch.qos.logback', module: 'logback-classic' - exclude group: 'ch.qos.logback', module: 'logback-core' - } + compile "org.springframework.boot:spring-boot-starter" compile("org.apache.commons:commons-dbcp2:2.1.1") compile("mysql:mysql-connector-java:5.1.39") diff --git a/skyeye-statistics/build.gradle b/skyeye-statistics/build.gradle new file mode 100644 index 0000000..385c31c --- /dev/null +++ b/skyeye-statistics/build.gradle @@ -0,0 +1,84 @@ +apply plugin: 'java' +apply plugin: 'eclipse' +apply plugin: 'maven' +apply plugin: 'org.springframework.boot' +apply plugin: 'application' +apply plugin: 'war' + +group = 'skyeye' +applicationName = 'skyeye-statistics' +version = '1.0.0' + +sourceCompatibility = 1.8 +targetCompatibility = 1.8 +compileJava.options.encoding = 'UTF-8' +buildDir = 'target' + +repositories { + mavenLocal() + maven { url "https://repository.cloudera.com/artifactory/cloudera-repos" } + maven { url "http://192.168.88.8:8081/nexus/content/repositories/cloudera"} + maven { url "http://192.168.88.8:8081/nexus/content/groups/public" } + maven { url "http://192.168.88.8:8081/nexus/content/repositories/releases" } + maven { url "http://192.168.88.8:8081/nexus/content/repositories/snapshots" } + mavenCentral() +} + +ext { + scalaVersion = '2.10.4' + scalaBinaryVersion = '2.10' + sparkVersion = '1.3.0-cdh5.4.0' + baseVersion = '1.0.0' + fastjsonVersion = '1.2.35' +} + +dependencies { + compile "skyeye:skyeye-base:$baseVersion" + compile "org.springframework.boot:spring-boot-starter" + + providedCompile "org.scala-lang:scala-library:$scalaVersion" + providedCompile "org.scala-lang:scala-compiler:$scalaVersion" + + providedCompile "org.apache.spark:spark-core_$scalaBinaryVersion:$sparkVersion" + + providedCompile "org.apache.spark:spark-streaming-kafka_$scalaBinaryVersion:$sparkVersion" + providedCompile "org.apache.spark:spark-streaming_$scalaBinaryVersion:$sparkVersion" + + compile "com.alibaba:fastjson:$fastjsonVersion" + + testCompile "org.springframework.boot:spring-boot-starter-test" +} + +configurations { + compile.exclude group: 'ch.qos.logback', module: 'logback-classic' + compile.exclude group: 'ch.qos.logback', module: 'logback-core' +} + +mainClassName = 'com.jthink.skyeye.statistics.launcher.Launcher' + +jar { + manifest {abstractHDXDTokenTransactionHandler + attributes "Main-Class": "$mainClassName" + } + + from { + configurations.compile.collect { it.isDirectory() ? it : zipTree(it) } + } +} + +buildscript { + ext { + springBootVersion = '1.5.6.RELEASE' + } + + repositories { + mavenLocal() + maven { url "http://192.168.88.8:8081/nexus/content/groups/public" } + mavenCentral() + } + + dependencies { + classpath("org.springframework.boot:spring-boot-gradle-plugin:${springBootVersion}") + classpath("io.spring.gradle:dependency-management-plugin:1.0.3.RELEASE") + } +} diff --git a/skyeye-statistics/settings.gradle b/skyeye-statistics/settings.gradle new file mode 100644 index 0000000..c816356 --- /dev/null +++ b/skyeye-statistics/settings.gradle @@ -0,0 +1 @@ +rootProject.name = 'skyeye-statistics' diff --git a/skyeye-web/build.gradle b/skyeye-web/build.gradle index e86c9d5..9111428 100644 --- a/skyeye-web/build.gradle +++ b/skyeye-web/build.gradle @@ -38,10 +38,7 @@ dependencies { compile "skyeye:skyeye-data-http:$dataVersion" compile "skyeye:skyeye-data-hbase:$dataVersion" compile "skyeye:skyeye-data-rabbitmq:$dataVersion" - compile ("org.springframework.boot:spring-boot-starter") { - exclude group: 'ch.qos.logback', module: 'logback-classic' - exclude group: 'ch.qos.logback', module: 'logback-core' - } + compile "org.springframework.boot:spring-boot-starter" compile "org.springframework.boot:spring-boot-starter-aop" compile("org.springframework.boot:spring-boot-starter-web") { exclude group: 'org.springframework.boot', module: 'spring-boot-starter-validation' From 1d7e500fbb7cb79d25bcb87b30b8eab070825944 Mon Sep 17 00:00:00 2001 From: JThink Date: Wed, 2 Aug 2017 09:17:49 +0800 Subject: [PATCH 14/27] modify the scope --- skyeye-statistics/build.gradle | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/skyeye-statistics/build.gradle b/skyeye-statistics/build.gradle index 385c31c..854a9c1 100644 --- a/skyeye-statistics/build.gradle +++ b/skyeye-statistics/build.gradle @@ -36,13 +36,13 @@ dependencies { compile "skyeye:skyeye-base:$baseVersion" compile "org.springframework.boot:spring-boot-starter" - providedCompile "org.scala-lang:scala-library:$scalaVersion" - providedCompile "org.scala-lang:scala-compiler:$scalaVersion" + compileOnly "org.scala-lang:scala-library:$scalaVersion" + compileOnly "org.scala-lang:scala-compiler:$scalaVersion" - providedCompile "org.apache.spark:spark-core_$scalaBinaryVersion:$sparkVersion" + compileOnly "org.apache.spark:spark-core_$scalaBinaryVersion:$sparkVersion" - providedCompile "org.apache.spark:spark-streaming-kafka_$scalaBinaryVersion:$sparkVersion" - providedCompile "org.apache.spark:spark-streaming_$scalaBinaryVersion:$sparkVersion" + compileOnly "org.apache.spark:spark-streaming-kafka_$scalaBinaryVersion:$sparkVersion" + compileOnly "org.apache.spark:spark-streaming_$scalaBinaryVersion:$sparkVersion" compile "com.alibaba:fastjson:$fastjsonVersion" @@ -57,7 +57,7 @@ configurations { mainClassName = 'com.jthink.skyeye.statistics.launcher.Launcher' jar { - manifest {abstractHDXDTokenTransactionHandler + manifest { attributes "Main-Class": "$mainClassName" } @@ -82,3 +82,5 @@ buildscript { classpath("io.spring.gradle:dependency-management-plugin:1.0.3.RELEASE") } } + + From 6c413b2f06f9586d039ebe259549def14a733975 Mon Sep 17 00:00:00 2001 From: JThink Date: Thu, 3 Aug 2017 11:17:30 +0800 Subject: [PATCH 15/27] re arch log indexer collector --- skyeye-alarm/build.gradle | 2 +- .../dubbo-service-b/build.gradle | 2 +- .../dubbo-service-c/build.gradle | 2 +- .../dubbo-service-d/build.gradle | 2 +- .../dubbo-service-e/build.gradle | 2 +- .../performance-test/build.gradle | 2 +- skyeye-benchmark/log-generater/build.gradle | 2 +- skyeye-collector/build.gradle | 121 +++++++----------- skyeye-collector/settings.gradle | 3 +- .../skyeye-collector-core/build.gradle | 29 +++++ .../skyeye-collector-core/settings.gradle | 1 + .../callback/KafkaOffsetCommitCallback.java | 30 +++++ .../configuration/es/EsConfiguration.java | 57 +++++++++ .../core/configuration/es/EsProperties.java | 76 +++++++++++ .../kafka/KafkaConfiguration.java | 47 +++++++ .../configuration/kafka/KafkaProperties.java | 55 ++++++++ .../core/hook/ShutdownHookRunner.java | 42 ++++++ .../skyeye/collector/core/task/Task.java | 19 +++ .../skyeye-collector-indexer/build.gradle | 41 ++++++ .../skyeye-collector-indexer/settings.gradle | 1 + .../indexer/balancer/HandleRebalance.java} | 10 +- .../collector/indexer/launcher/Launcher.java | 58 +++++++++ .../collector/indexer}/task/IndexerTask.java | 24 ++-- .../skyeye/collector/launcher/Launcher.java | 1 - .../skyeye/collector/task/TaskExecutor.java | 61 --------- skyeye-monitor/build.gradle | 2 +- skyeye-statistics/build.gradle | 89 +++++++------ 27 files changed, 571 insertions(+), 210 deletions(-) create mode 100644 skyeye-collector/skyeye-collector-core/build.gradle create mode 100644 skyeye-collector/skyeye-collector-core/settings.gradle create mode 100644 skyeye-collector/skyeye-collector-core/src/main/java/com/jthink/skyeye/collector/core/callback/KafkaOffsetCommitCallback.java create mode 100644 skyeye-collector/skyeye-collector-core/src/main/java/com/jthink/skyeye/collector/core/configuration/es/EsConfiguration.java create mode 100644 skyeye-collector/skyeye-collector-core/src/main/java/com/jthink/skyeye/collector/core/configuration/es/EsProperties.java create mode 100644 skyeye-collector/skyeye-collector-core/src/main/java/com/jthink/skyeye/collector/core/configuration/kafka/KafkaConfiguration.java create mode 100644 skyeye-collector/skyeye-collector-core/src/main/java/com/jthink/skyeye/collector/core/configuration/kafka/KafkaProperties.java create mode 100644 skyeye-collector/skyeye-collector-core/src/main/java/com/jthink/skyeye/collector/core/hook/ShutdownHookRunner.java create mode 100644 skyeye-collector/skyeye-collector-core/src/main/java/com/jthink/skyeye/collector/core/task/Task.java create mode 100644 skyeye-collector/skyeye-collector-indexer/build.gradle create mode 100644 skyeye-collector/skyeye-collector-indexer/settings.gradle rename skyeye-collector/{src/main/java/com/jthink/skyeye/collector/listener/HandleRebalanceForApp.java => skyeye-collector-indexer/src/main/java/com/jthink/skyeye/collector/indexer/balancer/HandleRebalance.java} (80%) create mode 100644 skyeye-collector/skyeye-collector-indexer/src/main/java/com/jthink/skyeye/collector/indexer/launcher/Launcher.java rename skyeye-collector/{src/main/java/com/jthink/skyeye/collector => skyeye-collector-indexer/src/main/java/com/jthink/skyeye/collector/indexer}/task/IndexerTask.java (85%) delete mode 100644 skyeye-collector/src/main/java/com/jthink/skyeye/collector/task/TaskExecutor.java diff --git a/skyeye-alarm/build.gradle b/skyeye-alarm/build.gradle index 8ce3737..9e6d26b 100644 --- a/skyeye-alarm/build.gradle +++ b/skyeye-alarm/build.gradle @@ -42,7 +42,7 @@ configurations { compile.exclude group: "org.springframework", module: "spring-web" } -mainClassName = 'com.jthink.skyeye.alarm.launcher.Launcher' +mainClassName = 'com.jthink.skyeye.alarm.launcher.launcher' buildscript { ext { diff --git a/skyeye-benchmark/dubbo-service/dubbo-service-b/build.gradle b/skyeye-benchmark/dubbo-service/dubbo-service-b/build.gradle index 440ad33..b74765d 100644 --- a/skyeye-benchmark/dubbo-service/dubbo-service-b/build.gradle +++ b/skyeye-benchmark/dubbo-service/dubbo-service-b/build.gradle @@ -19,7 +19,7 @@ configurations { compile.exclude group: "org.slf4j", module: "slf4j-log4j12" } -mainClassName = "com.jthink.skyeye.benchmark.dubbo.service.b.launcher.Launcher" +mainClassName = "com.jthink.skyeye.benchmark.dubbo.service.b.launcher.launcher" buildscript { ext { diff --git a/skyeye-benchmark/dubbo-service/dubbo-service-c/build.gradle b/skyeye-benchmark/dubbo-service/dubbo-service-c/build.gradle index 8cdae00..8d19933 100644 --- a/skyeye-benchmark/dubbo-service/dubbo-service-c/build.gradle +++ b/skyeye-benchmark/dubbo-service/dubbo-service-c/build.gradle @@ -19,7 +19,7 @@ configurations { compile.exclude group: "org.slf4j", module: "slf4j-log4j12" } -mainClassName = "com.jthink.skyeye.benchmark.dubbo.service.c.launcher.Launcher" +mainClassName = "com.jthink.skyeye.benchmark.dubbo.service.c.launcher.launcher" buildscript { ext { diff --git a/skyeye-benchmark/dubbo-service/dubbo-service-d/build.gradle b/skyeye-benchmark/dubbo-service/dubbo-service-d/build.gradle index a6e2840..b874a87 100644 --- a/skyeye-benchmark/dubbo-service/dubbo-service-d/build.gradle +++ b/skyeye-benchmark/dubbo-service/dubbo-service-d/build.gradle @@ -19,7 +19,7 @@ configurations { compile.exclude group: "org.slf4j", module: "slf4j-log4j12" } -mainClassName = "com.jthink.skyeye.benchmark.dubbo.service.d.launcher.Launcher" +mainClassName = "com.jthink.skyeye.benchmark.dubbo.service.d.launcher.launcher" buildscript { ext { diff --git a/skyeye-benchmark/dubbo-service/dubbo-service-e/build.gradle b/skyeye-benchmark/dubbo-service/dubbo-service-e/build.gradle index 47820ae..cc3a998 100644 --- a/skyeye-benchmark/dubbo-service/dubbo-service-e/build.gradle +++ b/skyeye-benchmark/dubbo-service/dubbo-service-e/build.gradle @@ -19,7 +19,7 @@ configurations { compile.exclude group: "org.slf4j", module: "slf4j-log4j12" } -mainClassName = "com.jthink.skyeye.benchmark.dubbo.service.e.launcher.Launcher" +mainClassName = "com.jthink.skyeye.benchmark.dubbo.service.e.launcher.launcher" buildscript { ext { diff --git a/skyeye-benchmark/dubbo-service/performance-test/build.gradle b/skyeye-benchmark/dubbo-service/performance-test/build.gradle index 4688c7f..3b14a2c 100644 --- a/skyeye-benchmark/dubbo-service/performance-test/build.gradle +++ b/skyeye-benchmark/dubbo-service/performance-test/build.gradle @@ -13,7 +13,7 @@ configurations { compile.exclude group: "org.slf4j", module: "slf4j-log4j12" } -mainClassName = "com.jthink.skyeye.benchmark.performance.test.launcher.Launcher" +mainClassName = "com.jthink.skyeye.benchmark.performance.test.launcher.launcher" buildscript { repositories { diff --git a/skyeye-benchmark/log-generater/build.gradle b/skyeye-benchmark/log-generater/build.gradle index 3ff608f..62823e0 100644 --- a/skyeye-benchmark/log-generater/build.gradle +++ b/skyeye-benchmark/log-generater/build.gradle @@ -40,7 +40,7 @@ configurations { compile.exclude group: "org.springframework", module: "spring-web" } -mainClassName = 'com.jthink.skyeye.benchmark.log.generater.launcher.Launcher' +mainClassName = 'com.jthink.skyeye.benchmark.log.generater.launcher.launcher' buildscript { ext { diff --git a/skyeye-collector/build.gradle b/skyeye-collector/build.gradle index 8e58053..7881909 100644 --- a/skyeye-collector/build.gradle +++ b/skyeye-collector/build.gradle @@ -1,97 +1,62 @@ -apply plugin: 'java' -apply plugin: 'eclipse' -apply plugin: 'maven' -apply plugin: 'org.springframework.boot' -apply plugin: 'application' - -group = 'skyeye' -applicationName = 'skyeye-collector' -version = '1.0.0' - -sourceCompatibility = 1.8 -targetCompatibility = 1.8 -compileJava.options.encoding = 'UTF-8' -buildDir = 'target' - -repositories { - mavenLocal() - maven { url "https://repository.cloudera.com/artifactory/cloudera-repos" } - maven { url "http://192.168.88.8:8081/nexus/content/groups/public" } - maven { url "http://192.168.88.8:8081/nexus/content/repositories/releases" } - mavenCentral() -} - -ext { - kafkaVersion = '0.10.0.1' - esVersion = '2.3.3' - traceVersion = '1.0.0' - dataVersion = '1.0.0' - zkclientVersion = '0.9.1-up' - hadoopVersion = '2.6.0-cdh5.4.0' - fastJsonVersion = '1.2.35' -} - -dependencies { - compile "skyeye:skyeye-data-jpa:$dataVersion" - compile "skyeye:skyeye-data-rabbitmq:$dataVersion" - compile "skyeye:skyeye-data-hbase:$dataVersion" - compile "org.springframework.boot:spring-boot-starter" - - compile "org.apache.kafka:kafka-clients:$kafkaVersion" - - compile "org.elasticsearch:elasticsearch:$esVersion" - - compile "org.springframework.boot:spring-boot-starter-data-redis" - - compile "com.101tec:zkclient:$zkclientVersion" - - compile "commons-lang:commons-lang:2.6" - - compile ("org.apache.hadoop:hadoop-common:$hadoopVersion") { - exclude group: 'tomcat', module: 'jasper-runtime' - exclude group: 'tomcat', module: 'jasper-compiler' +allprojects { + apply plugin: 'java' + apply plugin: 'eclipse' + + group = 'skyeye' + version = '1.0.0' + sourceCompatibility = 1.8 + targetCompatibility = 1.8 + compileJava.options.encoding = 'UTF-8' + buildDir = 'target' + + ext { + mavenClouderaUrl = 'https://repository.cloudera.com/artifactory/cloudera-repos' + mavenPublicUrl = 'http://192.168.88.8:8081/nexus/content/repositories/public' + mavenReleaseUrl = 'http://192.168.88.8:8081/nexus/content/repositories/releases' + mavenSnapshotUrl = 'http://192.168.88.8:8081/nexus/content/repositories/snapshots' } - compile ("org.apache.hadoop:hadoop-hdfs:$hadoopVersion") { - exclude group: 'tomcat', module: 'jasper-runtime' - exclude group: 'tomcat', module: 'jasper-compiler' + task sourcesJar(type: Jar, dependsOn: classes) { + classifier = 'sources' + from sourceSets.main.allSource } - compile ("org.apache.commons:commons-dbcp2:2.1.1") - compile ("mysql:mysql-connector-java:5.1.39") - - compile "com.alibaba:fastjson:$fastJsonVersion" - - testCompile "org.springframework.boot:spring-boot-starter-test" -} + // task javadocJar(type: Jar, dependsOn: javadoc) { + // classifier = 'javadoc' + // from javadoc.destinationDir + // } -configurations { - compile.exclude group: "log4j", module: "log4j" - compile.exclude group: "org.slf4j", module: "slf4j-log4j12" - compile.exclude group: "org.springframework", module: "spring-web" + artifacts { + archives sourcesJar + // archives javadocJar + } } +subprojects { + apply plugin: 'java' + apply plugin: 'maven' + apply plugin: 'eclipse' -mainClassName = 'com.jthink.skyeye.collector.launcher.Launcher' - -buildscript { ext { + baseVersion = '1.0.0' + kafkaVersion = '0.10.0.1' springBootVersion = '1.5.6.RELEASE' + esVersion = '2.3.3' } + [compileJava, compileTestJava]*.options*.encoding = 'UTF-8' + repositories { mavenLocal() - maven { url "http://192.168.88.8:8081/nexus/content/groups/public" } + maven { url mavenPublicUrl } + maven { url mavenReleaseUrl } + maven { url mavenSnapshotUrl } mavenCentral() } dependencies { - classpath("org.springframework.boot:spring-boot-gradle-plugin:${springBootVersion}") - classpath("io.spring.gradle:dependency-management-plugin:1.0.3.RELEASE") + compile "org.apache.kafka:kafka-clients:$kafkaVersion" + compile "org.springframework.boot:spring-boot-starter:$springBootVersion" + compile "skyeye:skyeye-base:$baseVersion" + compile "org.elasticsearch:elasticsearch:$esVersion" } } - -startScripts { - doLast { - unixScript.text = unixScript.text.replaceAll("lib/(.*)\n", "lib/\\*") - } -} \ No newline at end of file diff --git a/skyeye-collector/settings.gradle b/skyeye-collector/settings.gradle index 8c049b0..5b43fdc 100644 --- a/skyeye-collector/settings.gradle +++ b/skyeye-collector/settings.gradle @@ -1 +1,2 @@ -rootProject.name = 'skyeye-collector' \ No newline at end of file +include 'skyeye-collector-core', 'skyeye-collector-indexer' +//, 'skyeye-collector-backup', , 'skyeye-collector-trace', ''skyeye-collector-metrics diff --git a/skyeye-collector/skyeye-collector-core/build.gradle b/skyeye-collector/skyeye-collector-core/build.gradle new file mode 100644 index 0000000..786f78b --- /dev/null +++ b/skyeye-collector/skyeye-collector-core/build.gradle @@ -0,0 +1,29 @@ +apply plugin: 'java' +apply plugin: 'maven' +apply plugin: 'eclipse' + +sourceCompatibility = 1.8 +targetCompatibility = 1.8 +compileJava.options.encoding = 'UTF-8' +buildDir = 'target' + +ext { + +} + +repositories { + mavenLocal() + maven { url mavenPublicUrl } + maven { url mavenReleaseUrl } + maven { url mavenSnapshotUrl } + mavenCentral() +} + +dependencies { + +} + +configurations { + compile.exclude group: "log4j", module: "log4j" + compile.exclude group: "org.slf4j", module: "slf4j-log4j12" +} diff --git a/skyeye-collector/skyeye-collector-core/settings.gradle b/skyeye-collector/skyeye-collector-core/settings.gradle new file mode 100644 index 0000000..ff62ecc --- /dev/null +++ b/skyeye-collector/skyeye-collector-core/settings.gradle @@ -0,0 +1 @@ +rootProject.name = 'skyeye-collector-core' diff --git a/skyeye-collector/skyeye-collector-core/src/main/java/com/jthink/skyeye/collector/core/callback/KafkaOffsetCommitCallback.java b/skyeye-collector/skyeye-collector-core/src/main/java/com/jthink/skyeye/collector/core/callback/KafkaOffsetCommitCallback.java new file mode 100644 index 0000000..bce22b3 --- /dev/null +++ b/skyeye-collector/skyeye-collector-core/src/main/java/com/jthink/skyeye/collector/core/callback/KafkaOffsetCommitCallback.java @@ -0,0 +1,30 @@ +package com.jthink.skyeye.collector.core.callback; + +import org.apache.kafka.clients.consumer.OffsetAndMetadata; +import org.apache.kafka.clients.consumer.OffsetCommitCallback; +import org.apache.kafka.common.TopicPartition; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Map; + +/** + * JThink@JThink + * + * @author JThink + * @version 0.0.1 + * @desc + * @date 2016-09-20 10:22:54 + */ +public class KafkaOffsetCommitCallback implements OffsetCommitCallback { + + private static final Logger LOGGER = LoggerFactory.getLogger(KafkaOffsetCommitCallback.class); + + @Override + public void onComplete(Map offsets, Exception exception) { + if (null != exception) { + // 如果异步提交发生了异常 + LOGGER.error("commit failed for offsets {}, and exception is {}", offsets, exception); + } + } +} diff --git a/skyeye-collector/skyeye-collector-core/src/main/java/com/jthink/skyeye/collector/core/configuration/es/EsConfiguration.java b/skyeye-collector/skyeye-collector-core/src/main/java/com/jthink/skyeye/collector/core/configuration/es/EsConfiguration.java new file mode 100644 index 0000000..e538623 --- /dev/null +++ b/skyeye-collector/skyeye-collector-core/src/main/java/com/jthink/skyeye/collector/core/configuration/es/EsConfiguration.java @@ -0,0 +1,57 @@ +package com.jthink.skyeye.collector.core.configuration.es; + +import com.jthink.skyeye.base.constant.Constants; +import org.elasticsearch.client.transport.TransportClient; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.InetSocketTransportAddress; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.condition.ConditionalOnClass; +import org.springframework.boot.context.properties.EnableConfigurationProperties; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +import java.net.InetAddress; +import java.net.UnknownHostException; + +/** + * JThink@JThink + * + * @author JThink + * @version 0.0.1 + * @desc elasticsearch 配置 + * @date 2016-09-20 09:13:32 + */ +@ConditionalOnClass({ Settings.class, TransportClient.class }) +@Configuration +@EnableConfigurationProperties(EsProperties.class) +public class EsConfiguration { + + private static Logger LOGGER = LoggerFactory.getLogger(EsConfiguration.class); + + @Autowired + private EsProperties esProperties; + + @Bean + public Settings settings() { + Settings settings = Settings.settingsBuilder().put("cluster.name", this.esProperties.getCluster()) + .put("client.transport.sniff", this.esProperties.isSniff()).build(); + + return settings; + } + + @Bean + public TransportClient transportClient(Settings settings) { + TransportClient client = TransportClient.builder().settings(settings).build(); + for (String ip : this.esProperties.getIps().split(Constants.COMMA)) { + try { + client.addTransportAddress(new InetSocketTransportAddress(InetAddress.getByName(ip), this.esProperties.getPort())); + } catch (UnknownHostException e) { + LOGGER.error("es集群主机名错误, ip: {}", ip); + } + } + return client; + } + +} diff --git a/skyeye-collector/skyeye-collector-core/src/main/java/com/jthink/skyeye/collector/core/configuration/es/EsProperties.java b/skyeye-collector/skyeye-collector-core/src/main/java/com/jthink/skyeye/collector/core/configuration/es/EsProperties.java new file mode 100644 index 0000000..c0d22d1 --- /dev/null +++ b/skyeye-collector/skyeye-collector-core/src/main/java/com/jthink/skyeye/collector/core/configuration/es/EsProperties.java @@ -0,0 +1,76 @@ +package com.jthink.skyeye.collector.core.configuration.es; + +import org.springframework.boot.context.properties.ConfigurationProperties; + +/** + * JThink@JThink + * + * @author JThink + * @version 0.0.1 + * @desc es的配置项 + * @date 2016-09-20 10:44:54 + */ +@ConfigurationProperties(prefix = "spring.indexer.es") +public class EsProperties { + + private String ips; + + private String cluster; + + private int port; + + private boolean sniff; + + private String index; + + private String doc; + + public String getIps() { + return ips; + } + + public void setIps(String ips) { + this.ips = ips; + } + + public String getCluster() { + return cluster; + } + + public void setCluster(String cluster) { + this.cluster = cluster; + } + + public int getPort() { + return port; + } + + public void setPort(int port) { + this.port = port; + } + + public boolean isSniff() { + return sniff; + } + + public void setSniff(boolean sniff) { + this.sniff = sniff; + } + + public String getIndex() { + return index; + } + + public void setIndex(String index) { + this.index = index; + } + + public String getDoc() { + return doc; + } + + public void setDoc(String doc) { + this.doc = doc; + } + +} diff --git a/skyeye-collector/skyeye-collector-core/src/main/java/com/jthink/skyeye/collector/core/configuration/kafka/KafkaConfiguration.java b/skyeye-collector/skyeye-collector-core/src/main/java/com/jthink/skyeye/collector/core/configuration/kafka/KafkaConfiguration.java new file mode 100644 index 0000000..a81c340 --- /dev/null +++ b/skyeye-collector/skyeye-collector-core/src/main/java/com/jthink/skyeye/collector/core/configuration/kafka/KafkaConfiguration.java @@ -0,0 +1,47 @@ +package com.jthink.skyeye.collector.core.configuration.kafka; + +import org.apache.kafka.clients.consumer.ConsumerConfig; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.apache.kafka.common.serialization.ByteArrayDeserializer; +import org.apache.kafka.common.serialization.StringDeserializer; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.context.properties.EnableConfigurationProperties; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +import java.util.HashMap; +import java.util.Map; + +/** + * JThink@JThink + * + * @author JThink + * @version 0.0.1 + * @desc kafka配置 + * @date 2016-09-18 11:12:07 + */ +@Configuration +@EnableConfigurationProperties({KafkaProperties.class}) +public class KafkaConfiguration { + + @Autowired + private KafkaProperties kafkaProperties; + + // kafka consumer + @Bean + public KafkaConsumer kafkaConsumerApp() { + Map config = new HashMap(); + config.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, this.kafkaProperties.getBrokers()); + config.put(ConsumerConfig.GROUP_ID_CONFIG, this.kafkaProperties.getConsumeGroup()); + // 手动commit offset到kafka(该版本不将offset保存到zk) + config.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false); + config.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, 15000); + config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class.getName()); + config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); + + KafkaConsumer kafkaConsumer = new KafkaConsumer(config); + + return kafkaConsumer; + } + +} diff --git a/skyeye-collector/skyeye-collector-core/src/main/java/com/jthink/skyeye/collector/core/configuration/kafka/KafkaProperties.java b/skyeye-collector/skyeye-collector-core/src/main/java/com/jthink/skyeye/collector/core/configuration/kafka/KafkaProperties.java new file mode 100644 index 0000000..4766fa4 --- /dev/null +++ b/skyeye-collector/skyeye-collector-core/src/main/java/com/jthink/skyeye/collector/core/configuration/kafka/KafkaProperties.java @@ -0,0 +1,55 @@ +package com.jthink.skyeye.collector.core.configuration.kafka; + +import org.springframework.boot.context.properties.ConfigurationProperties; + +/** + * JThink@JThink + * + * @author JThink + * @version 0.0.1 + * @desc kafka配置项 + * @date 2016-09-20 10:15:05 + */ +@ConfigurationProperties(prefix = "spring.message.kafka") +public class KafkaProperties { + + private String brokers; + + private String consumeGroup; + + private long pollTimeout; + + private String topic; + + public String getBrokers() { + return brokers; + } + + public void setBrokers(String brokers) { + this.brokers = brokers; + } + + public String getConsumeGroup() { + return consumeGroup; + } + + public void setConsumeGroup(String consumeGroup) { + this.consumeGroup = consumeGroup; + } + + public long getPollTimeout() { + return pollTimeout; + } + + public void setPollTimeout(long pollTimeout) { + this.pollTimeout = pollTimeout; + } + + public String getTopic() { + return topic; + } + + public void setTopic(String topic) { + this.topic = topic; + } +} diff --git a/skyeye-collector/skyeye-collector-core/src/main/java/com/jthink/skyeye/collector/core/hook/ShutdownHookRunner.java b/skyeye-collector/skyeye-collector-core/src/main/java/com/jthink/skyeye/collector/core/hook/ShutdownHookRunner.java new file mode 100644 index 0000000..87a0417 --- /dev/null +++ b/skyeye-collector/skyeye-collector-core/src/main/java/com/jthink/skyeye/collector/core/hook/ShutdownHookRunner.java @@ -0,0 +1,42 @@ +package com.jthink.skyeye.collector.core.hook; + +import com.jthink.skyeye.collector.core.task.Task; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * JThink@JThink + * + * @author JThink + * @version 0.0.1 + * @desc 项目启动器 + * @date 2017-08-03 18:31:48 + * @since 1.0.0 + */ +public class ShutdownHookRunner extends Thread { + + private static final Logger LOGGER = LoggerFactory.getLogger(ShutdownHookRunner.class); + + private KafkaConsumer kafkaConsumer; + + private Task task; + + public ShutdownHookRunner(KafkaConsumer kafkaConsumer, Task task) { + this.kafkaConsumer = kafkaConsumer; + this.task = task; + } + + @Override + public void run() { + LOGGER.info("starting to exit"); + + this.kafkaConsumer.wakeup(); + + try { + this.task.executeThread().join(); + } catch (InterruptedException e) { + LOGGER.error("interrupted, ", e); + } + } +} diff --git a/skyeye-collector/skyeye-collector-core/src/main/java/com/jthink/skyeye/collector/core/task/Task.java b/skyeye-collector/skyeye-collector-core/src/main/java/com/jthink/skyeye/collector/core/task/Task.java new file mode 100644 index 0000000..b739996 --- /dev/null +++ b/skyeye-collector/skyeye-collector-core/src/main/java/com/jthink/skyeye/collector/core/task/Task.java @@ -0,0 +1,19 @@ +package com.jthink.skyeye.collector.core.task; + +/** + * JThink@JThink + * + * @author JThink + * @version 0.0.1 + * @desc kafka消费task + * @date 2016-09-20 10:24:24 + */ +public interface Task extends Runnable { + + /** + * 执行task + */ + void doTask(); + + Thread executeThread(); +} diff --git a/skyeye-collector/skyeye-collector-indexer/build.gradle b/skyeye-collector/skyeye-collector-indexer/build.gradle new file mode 100644 index 0000000..6b6058d --- /dev/null +++ b/skyeye-collector/skyeye-collector-indexer/build.gradle @@ -0,0 +1,41 @@ +apply plugin: 'java' +apply plugin: 'eclipse' +apply plugin: 'maven' +apply plugin: 'org.springframework.boot' +apply plugin: 'application' + +dependencies { + compile project(":skyeye-collector-core") + + testCompile "org.springframework.boot:spring-boot-starter-test" +} + +configurations { + compile.exclude group: "log4j", module: "log4j" + compile.exclude group: "org.slf4j", module: "slf4j-log4j12" +} + +mainClassName = "com.jthink.skyeye.collector.indexer.launcher.Launcher" + +buildscript { + ext { + springBootVersion = '1.5.6.RELEASE' + } + + repositories { + mavenLocal() + maven { url mavenPublicUrl } + mavenCentral() + } + + dependencies { + classpath("org.springframework.boot:spring-boot-gradle-plugin:${springBootVersion}") + classpath("io.spring.gradle:dependency-management-plugin:1.0.3.RELEASE") + } +} + +startScripts { + doLast { + unixScript.text = unixScript.text.replaceAll("lib/(.*)\n", "lib/\\*") + } +} diff --git a/skyeye-collector/skyeye-collector-indexer/settings.gradle b/skyeye-collector/skyeye-collector-indexer/settings.gradle new file mode 100644 index 0000000..501d0e5 --- /dev/null +++ b/skyeye-collector/skyeye-collector-indexer/settings.gradle @@ -0,0 +1 @@ +rootProject.name = 'skyeye-collector-indexer' diff --git a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/listener/HandleRebalanceForApp.java b/skyeye-collector/skyeye-collector-indexer/src/main/java/com/jthink/skyeye/collector/indexer/balancer/HandleRebalance.java similarity index 80% rename from skyeye-collector/src/main/java/com/jthink/skyeye/collector/listener/HandleRebalanceForApp.java rename to skyeye-collector/skyeye-collector-indexer/src/main/java/com/jthink/skyeye/collector/indexer/balancer/HandleRebalance.java index 46aeed3..2bf98a5 100644 --- a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/listener/HandleRebalanceForApp.java +++ b/skyeye-collector/skyeye-collector-indexer/src/main/java/com/jthink/skyeye/collector/indexer/balancer/HandleRebalance.java @@ -1,7 +1,7 @@ -package com.jthink.skyeye.collector.listener; +package com.jthink.skyeye.collector.indexer.balancer; -import com.jthink.skyeye.collector.configuration.kafka.KafkaProperties; -import com.jthink.skyeye.collector.task.IndexerTask; +import com.jthink.skyeye.collector.core.configuration.kafka.KafkaProperties; +import com.jthink.skyeye.collector.indexer.task.IndexerTask; import org.apache.kafka.clients.consumer.ConsumerRebalanceListener; import org.apache.kafka.clients.consumer.KafkaConsumer; import org.apache.kafka.common.TopicPartition; @@ -23,9 +23,9 @@ * @date 2016-09-20 11:14:27 */ @Component -public class HandleRebalanceForApp implements ConsumerRebalanceListener, InitializingBean { +public class HandleRebalance implements ConsumerRebalanceListener, InitializingBean { - private static final Logger LOGGER = LoggerFactory.getLogger(HandleRebalanceForApp.class); + private static final Logger LOGGER = LoggerFactory.getLogger(HandleRebalance.class); @Autowired private KafkaConsumer kafkaConsumerApp; diff --git a/skyeye-collector/skyeye-collector-indexer/src/main/java/com/jthink/skyeye/collector/indexer/launcher/Launcher.java b/skyeye-collector/skyeye-collector-indexer/src/main/java/com/jthink/skyeye/collector/indexer/launcher/Launcher.java new file mode 100644 index 0000000..a328914 --- /dev/null +++ b/skyeye-collector/skyeye-collector-indexer/src/main/java/com/jthink/skyeye/collector/indexer/launcher/Launcher.java @@ -0,0 +1,58 @@ +package com.jthink.skyeye.collector.indexer.launcher; + +import com.jthink.skyeye.collector.core.hook.ShutdownHookRunner; +import com.jthink.skyeye.collector.core.task.Task; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.boot.autoconfigure.EnableAutoConfiguration; +import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.boot.builder.SpringApplicationBuilder; +import org.springframework.boot.logging.LoggingApplicationListener; +import org.springframework.context.ApplicationListener; +import org.springframework.context.ConfigurableApplicationContext; +import org.springframework.context.annotation.ComponentScan; +import org.springframework.context.annotation.PropertySource; + +import java.util.Iterator; +import java.util.Set; + +/** + * JThink@JThink + * + * @author JThink + * @version 0.0.1 + * @desc 项目启动器 + * @date 2016-08-24 18:31:48 + */ +@SpringBootApplication +@EnableAutoConfiguration +@ComponentScan(basePackages={"com.jthink.skyeye.collector.core", "com.jthink.skyeye.collector.indexer"}) +@PropertySource("file:/opt/jthink/jthink-config/skyeye/collector/collector-indexer.properties") +//@PropertySource("classpath:properties/collector-indexer.properties") +public class Launcher { + + private static final Logger LOGGER = LoggerFactory.getLogger(Launcher.class); + + public static void main(String[] args) { + SpringApplicationBuilder builder = new SpringApplicationBuilder(Launcher.class); + Set> listeners = builder.application().getListeners(); + for (Iterator> it = listeners.iterator(); it.hasNext();) { + ApplicationListener listener = it.next(); + if (listener instanceof LoggingApplicationListener) { + it.remove(); + } + } + builder.application().setListeners(listeners); + ConfigurableApplicationContext context = builder.run(args); + LOGGER.info("collector indexer start successfully"); + + KafkaConsumer kafkaConsumer = (KafkaConsumer) context.getBean("kafkaConsumer"); + Task task = (Task) context.getBean("indexerTask"); + + // 优雅停止项目 + Runtime.getRuntime().addShutdownHook(new ShutdownHookRunner(kafkaConsumer, task)); + task.doTask(); + } + +} diff --git a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/task/IndexerTask.java b/skyeye-collector/skyeye-collector-indexer/src/main/java/com/jthink/skyeye/collector/indexer/task/IndexerTask.java similarity index 85% rename from skyeye-collector/src/main/java/com/jthink/skyeye/collector/task/IndexerTask.java rename to skyeye-collector/skyeye-collector-indexer/src/main/java/com/jthink/skyeye/collector/indexer/task/IndexerTask.java index 2a3d999..47b44e7 100644 --- a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/task/IndexerTask.java +++ b/skyeye-collector/skyeye-collector-indexer/src/main/java/com/jthink/skyeye/collector/indexer/task/IndexerTask.java @@ -1,11 +1,15 @@ -package com.jthink.skyeye.collector.task; +package com.jthink.skyeye.collector.indexer.task; -import com.jthink.skyeye.collector.callback.KafkaOffsetCommitCallback; -import com.jthink.skyeye.collector.configuration.es.EsProperties; -import com.jthink.skyeye.collector.configuration.kafka.KafkaProperties; import com.jthink.skyeye.base.constant.Constants; import com.jthink.skyeye.base.dto.LogDto; -import org.apache.kafka.clients.consumer.*; +import com.jthink.skyeye.collector.core.callback.KafkaOffsetCommitCallback; +import com.jthink.skyeye.collector.core.configuration.es.EsProperties; +import com.jthink.skyeye.collector.core.configuration.kafka.KafkaProperties; +import com.jthink.skyeye.collector.core.task.Task; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.apache.kafka.clients.consumer.OffsetAndMetadata; import org.apache.kafka.common.TopicPartition; import org.apache.kafka.common.errors.WakeupException; import org.elasticsearch.action.bulk.BulkRequestBuilder; @@ -35,7 +39,7 @@ public class IndexerTask implements Task { private static final Logger LOGGER = LoggerFactory.getLogger(IndexerTask.class); @Autowired - private KafkaConsumer kafkaConsumerApp; + private KafkaConsumer kafkaConsumer; @Autowired private KafkaProperties kafkaProperties; @Autowired @@ -62,7 +66,7 @@ public void doTask() { int count = 0; try { while (true) { - ConsumerRecords records = this.kafkaConsumerApp.poll(this.kafkaProperties.getPollTimeout()); + ConsumerRecords records = this.kafkaConsumer.poll(this.kafkaProperties.getPollTimeout()); if (!records.isEmpty()) { for (ConsumerRecord record : records) { String value = record.value(); @@ -78,7 +82,7 @@ public void doTask() { count++; if (count >= 1000) { // 当达到了1000触发向kafka提交offset - kafkaConsumerApp.commitAsync(currentOffsets, new KafkaOffsetCommitCallback()); + kafkaConsumer.commitAsync(currentOffsets, new KafkaOffsetCommitCallback()); count = 0; } } @@ -88,7 +92,7 @@ public void doTask() { } LOGGER.info("total record: {}, indexed {} records to es", records.count(), size); bulkRequest = transportClient.prepareBulk(); - kafkaConsumerApp.commitAsync(currentOffsets, new KafkaOffsetCommitCallback()); + kafkaConsumer.commitAsync(currentOffsets, new KafkaOffsetCommitCallback()); } } } catch (WakeupException e) { @@ -97,7 +101,7 @@ public void doTask() { } catch (Exception e) { LOGGER.error("process records error, {}", e); } finally { - kafkaConsumerApp.commitSync(currentOffsets); + kafkaConsumer.commitSync(currentOffsets); LOGGER.info("finally commit the offset"); // 不需要主动调kafkaConsumer.close(), spring bean容器会调用 } diff --git a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/launcher/Launcher.java b/skyeye-collector/src/main/java/com/jthink/skyeye/collector/launcher/Launcher.java index 9fa0514..d5574b7 100644 --- a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/launcher/Launcher.java +++ b/skyeye-collector/src/main/java/com/jthink/skyeye/collector/launcher/Launcher.java @@ -1,6 +1,5 @@ package com.jthink.skyeye.collector.launcher; -import com.jthink.skyeye.collector.task.TaskExecutor; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.boot.autoconfigure.EnableAutoConfiguration; diff --git a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/task/TaskExecutor.java b/skyeye-collector/src/main/java/com/jthink/skyeye/collector/task/TaskExecutor.java deleted file mode 100644 index f480946..0000000 --- a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/task/TaskExecutor.java +++ /dev/null @@ -1,61 +0,0 @@ -package com.jthink.skyeye.collector.task; - -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Component; - -import java.util.ArrayList; -import java.util.List; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; - -/** - * JThink@JThink - * - * @author JThink - * @version 0.0.1 - * @desc 所有的task执行器 - * @date 2016-09-20 10:30:11 - */ -@Component -public class TaskExecutor { - - @Autowired - private Task indexerTask; - @Autowired - private Task collectTask; - @Autowired - private Task backupTask; - @Autowired - private Task rpcTraceTask; - - private List tasks = null; - - public void addTask() { - if (null == tasks) { - tasks = new ArrayList(); - } - this.tasks.add(this.indexerTask); - this.tasks.add(this.collectTask); - this.tasks.add(this.backupTask); - this.tasks.add(this.rpcTraceTask); - } - - /** - * 执行 - */ - public void execute() { - ExecutorService pool = Executors.newFixedThreadPool(4); - for (Task task : tasks) { - pool.execute(task); - } - } - - /** - * 等待执行完成 - */ - public void join() throws InterruptedException { - for (Task task : tasks) { - task.executeThread().join(); - } - } -} diff --git a/skyeye-monitor/build.gradle b/skyeye-monitor/build.gradle index b8a83bd..144b406 100644 --- a/skyeye-monitor/build.gradle +++ b/skyeye-monitor/build.gradle @@ -57,7 +57,7 @@ configurations { compile.exclude group: "org.springframework", module: "spring-web" } -mainClassName = 'com.jthink.skyeye.monitor.launcher.Launcher' +mainClassName = 'com.jthink.skyeye.monitor.launcher.launcher' buildscript { ext { diff --git a/skyeye-statistics/build.gradle b/skyeye-statistics/build.gradle index 854a9c1..3aed438 100644 --- a/skyeye-statistics/build.gradle +++ b/skyeye-statistics/build.gradle @@ -3,7 +3,6 @@ apply plugin: 'eclipse' apply plugin: 'maven' apply plugin: 'org.springframework.boot' apply plugin: 'application' -apply plugin: 'war' group = 'skyeye' applicationName = 'skyeye-statistics' @@ -15,72 +14,70 @@ compileJava.options.encoding = 'UTF-8' buildDir = 'target' repositories { - mavenLocal() - maven { url "https://repository.cloudera.com/artifactory/cloudera-repos" } - maven { url "http://192.168.88.8:8081/nexus/content/repositories/cloudera"} - maven { url "http://192.168.88.8:8081/nexus/content/groups/public" } - maven { url "http://192.168.88.8:8081/nexus/content/repositories/releases" } - maven { url "http://192.168.88.8:8081/nexus/content/repositories/snapshots" } - mavenCentral() + mavenLocal() + maven { url "https://repository.cloudera.com/artifactory/cloudera-repos" } + maven { url "http://192.168.88.8:8081/nexus/content/repositories/cloudera"} + maven { url "http://192.168.88.8:8081/nexus/content/groups/public" } + maven { url "http://192.168.88.8:8081/nexus/content/repositories/releases" } + maven { url "http://192.168.88.8:8081/nexus/content/repositories/snapshots" } + mavenCentral() } ext { - scalaVersion = '2.10.4' - scalaBinaryVersion = '2.10' - sparkVersion = '1.3.0-cdh5.4.0' - baseVersion = '1.0.0' - fastjsonVersion = '1.2.35' + scalaVersion = '2.10.4' + scalaBinaryVersion = '2.10' + sparkVersion = '1.3.0-cdh5.4.0' + baseVersion = '1.0.0' + fastjsonVersion = '1.2.35' } dependencies { - compile "skyeye:skyeye-base:$baseVersion" - compile "org.springframework.boot:spring-boot-starter" + compile "skyeye:skyeye-base:$baseVersion" + compile "org.springframework.boot:spring-boot-starter" - compileOnly "org.scala-lang:scala-library:$scalaVersion" - compileOnly "org.scala-lang:scala-compiler:$scalaVersion" + compileOnly "org.scala-lang:scala-library:$scalaVersion" + compileOnly "org.scala-lang:scala-compiler:$scalaVersion" - compileOnly "org.apache.spark:spark-core_$scalaBinaryVersion:$sparkVersion" + compileOnly "org.apache.spark:spark-core_$scalaBinaryVersion:$sparkVersion" - compileOnly "org.apache.spark:spark-streaming-kafka_$scalaBinaryVersion:$sparkVersion" - compileOnly "org.apache.spark:spark-streaming_$scalaBinaryVersion:$sparkVersion" + compileOnly "org.apache.spark:spark-streaming-kafka_$scalaBinaryVersion:$sparkVersion" + compileOnly "org.apache.spark:spark-streaming_$scalaBinaryVersion:$sparkVersion" - compile "com.alibaba:fastjson:$fastjsonVersion" + compile "com.alibaba:fastjson:$fastjsonVersion" - testCompile "org.springframework.boot:spring-boot-starter-test" + testCompile "org.springframework.boot:spring-boot-starter-test" } configurations { - compile.exclude group: 'ch.qos.logback', module: 'logback-classic' - compile.exclude group: 'ch.qos.logback', module: 'logback-core' + compile.exclude group: 'ch.qos.logback', module: 'logback-classic' + compile.exclude group: 'ch.qos.logback', module: 'logback-core' } -mainClassName = 'com.jthink.skyeye.statistics.launcher.Launcher' +mainClassName = 'com.jthink.skyeye.statistics.launcher.launcher' jar { - manifest { - attributes "Main-Class": "$mainClassName" - } + manifest { + attributes "Main-Class": "$mainClassName" + } - from { - configurations.compile.collect { it.isDirectory() ? it : zipTree(it) } - } + from { + configurations.compile.collect { it.isDirectory() ? it : zipTree(it) } + } } buildscript { - ext { - springBootVersion = '1.5.6.RELEASE' - } - - repositories { - mavenLocal() - maven { url "http://192.168.88.8:8081/nexus/content/groups/public" } - mavenCentral() - } - - dependencies { - classpath("org.springframework.boot:spring-boot-gradle-plugin:${springBootVersion}") - classpath("io.spring.gradle:dependency-management-plugin:1.0.3.RELEASE") - } -} + ext { + springBootVersion = '1.5.6.RELEASE' + } + repositories { + mavenLocal() + maven { url "http://192.168.88.8:8081/nexus/content/groups/public" } + mavenCentral() + } + dependencies { + classpath("org.springframework.boot:spring-boot-gradle-plugin:${springBootVersion}") + classpath("io.spring.gradle:dependency-management-plugin:1.0.3.RELEASE") + } +} From 0e074c1339956de59694a9eb0c04ce641274aa12 Mon Sep 17 00:00:00 2001 From: JThink Date: Thu, 3 Aug 2017 14:07:48 +0800 Subject: [PATCH 16/27] update properties file in collecttor indexer --- .../skyeye-collector-indexer/build.gradle | 42 +++++++------- .../src/main/resources/application.properties | 13 +++++ .../src/main/resources/banner.txt | 6 ++ .../src/main/resources/logback.xml | 58 +++++++++++++++++++ .../properties/collector-indexer.properties | 13 +++++ .../src/main/resources/banner.txt | 6 -- skyeye-web/src/main/resources/logback.xml | 2 +- 7 files changed, 112 insertions(+), 28 deletions(-) create mode 100644 skyeye-collector/skyeye-collector-indexer/src/main/resources/application.properties create mode 100644 skyeye-collector/skyeye-collector-indexer/src/main/resources/banner.txt create mode 100644 skyeye-collector/skyeye-collector-indexer/src/main/resources/logback.xml create mode 100644 skyeye-collector/skyeye-collector-indexer/src/main/resources/properties/collector-indexer.properties diff --git a/skyeye-collector/skyeye-collector-indexer/build.gradle b/skyeye-collector/skyeye-collector-indexer/build.gradle index 6b6058d..d1c0bcb 100644 --- a/skyeye-collector/skyeye-collector-indexer/build.gradle +++ b/skyeye-collector/skyeye-collector-indexer/build.gradle @@ -5,37 +5,37 @@ apply plugin: 'org.springframework.boot' apply plugin: 'application' dependencies { - compile project(":skyeye-collector-core") + compile project(":skyeye-collector-core") - testCompile "org.springframework.boot:spring-boot-starter-test" + testCompile "org.springframework.boot:spring-boot-starter-test" } configurations { - compile.exclude group: "log4j", module: "log4j" - compile.exclude group: "org.slf4j", module: "slf4j-log4j12" + compile.exclude group: "log4j", module: "log4j" + compile.exclude group: "org.slf4j", module: "slf4j-log4j12" } mainClassName = "com.jthink.skyeye.collector.indexer.launcher.Launcher" buildscript { - ext { - springBootVersion = '1.5.6.RELEASE' - } - - repositories { - mavenLocal() - maven { url mavenPublicUrl } - mavenCentral() - } - - dependencies { - classpath("org.springframework.boot:spring-boot-gradle-plugin:${springBootVersion}") - classpath("io.spring.gradle:dependency-management-plugin:1.0.3.RELEASE") - } + ext { + springBootVersion = '1.5.6.RELEASE' + } + + repositories { + mavenLocal() + maven { url mavenPublicUrl } + mavenCentral() + } + + dependencies { + classpath("org.springframework.boot:spring-boot-gradle-plugin:${springBootVersion}") + classpath("io.spring.gradle:dependency-management-plugin:1.0.3.RELEASE") + } } startScripts { - doLast { - unixScript.text = unixScript.text.replaceAll("lib/(.*)\n", "lib/\\*") - } + doLast { + unixScript.text = unixScript.text.replaceAll("lib/(.*)\n", "lib/\\*") + } } diff --git a/skyeye-collector/skyeye-collector-indexer/src/main/resources/application.properties b/skyeye-collector/skyeye-collector-indexer/src/main/resources/application.properties new file mode 100644 index 0000000..d2c392e --- /dev/null +++ b/skyeye-collector/skyeye-collector-indexer/src/main/resources/application.properties @@ -0,0 +1,13 @@ +# kafka config +spring.message.kafka.brokers=${kafka.brokers} +spring.message.kafka.topic=${kafka.topic} +spring.message.kafka.consumeGroup=${kafka.consume.group} +spring.message.kafka.pollTimeout=${kafka.poll.timeout} + +# es config +spring.indexer.es.ips=${es.ips} +spring.indexer.es.cluster=${es.cluster} +spring.indexer.es.port=${es.port} +spring.indexer.es.sniff=${es.sniff} +spring.indexer.es.index=${es.index} +spring.indexer.es.doc=${es.doc} diff --git a/skyeye-collector/skyeye-collector-indexer/src/main/resources/banner.txt b/skyeye-collector/skyeye-collector-indexer/src/main/resources/banner.txt new file mode 100644 index 0000000..fbe7ade --- /dev/null +++ b/skyeye-collector/skyeye-collector-indexer/src/main/resources/banner.txt @@ -0,0 +1,6 @@ +███████╗██╗ ██╗██╗ ██╗███████╗██╗ ██╗███████╗ ██████╗ ██████╗ ██╗ ██╗ ███████╗ ██████╗████████╗ ██████╗ ██████╗ +██╔════╝██║ ██╔╝╚██╗ ██╔╝██╔════╝╚██╗ ██╔╝██╔════╝ ██╔════╝██╔═══██╗██║ ██║ ██╔════╝██╔════╝╚══██╔══╝██╔═══██╗██╔══██╗ +███████╗█████╔╝ ╚████╔╝ █████╗ ╚████╔╝ █████╗█████╗██║ ██║ ██║██║ ██║ █████╗ ██║ ██║ ██║ ██║██████╔╝ +╚════██║██╔═██╗ ╚██╔╝ ██╔══╝ ╚██╔╝ ██╔══╝╚════╝██║ ██║ ██║██║ ██║ ██╔══╝ ██║ ██║ ██║ ██║██╔══██╗ +███████║██║ ██╗ ██║ ███████╗ ██║ ███████╗ ╚██████╗╚██████╔╝███████╗███████╗███████╗╚██████╗ ██║ ╚██████╔╝██║ ██║ +╚══════╝╚═╝ ╚═╝ ╚═╝ ╚══════╝ ╚═╝ ╚══════╝ ╚═════╝ ╚═════╝ ╚══════╝╚══════╝╚══════╝ ╚═════╝ ╚═╝ ╚═════╝ ╚═╝ ╚═╝ diff --git a/skyeye-collector/skyeye-collector-indexer/src/main/resources/logback.xml b/skyeye-collector/skyeye-collector-indexer/src/main/resources/logback.xml new file mode 100644 index 0000000..54e3c31 --- /dev/null +++ b/skyeye-collector/skyeye-collector-indexer/src/main/resources/logback.xml @@ -0,0 +1,58 @@ + + + + + + + ${APP_NAME} + + + + + + %d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{96}[%line]: %msg%n + + + + + + true + + ${LOG_HOME}/info/${APP_NAME}_%d{yyyy-MM-dd}.%i.log + + 128 MB + + + + INFO + + + + %d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{96}[%line]: %msg%n + + + + + true + + ${LOG_HOME}/error/${APP_NAME}_%d{yyyy-MM-dd}.%i.log + + 128 MB + + + + ERROR + + + + %d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{96}[%line]: %msg%n + + + + + + + + + + \ No newline at end of file diff --git a/skyeye-collector/skyeye-collector-indexer/src/main/resources/properties/collector-indexer.properties b/skyeye-collector/skyeye-collector-indexer/src/main/resources/properties/collector-indexer.properties new file mode 100644 index 0000000..59b5c41 --- /dev/null +++ b/skyeye-collector/skyeye-collector-indexer/src/main/resources/properties/collector-indexer.properties @@ -0,0 +1,13 @@ +# kafka config +kafka.brokers=riot01:9092,riot02:9092,riot03:9092 +kafka.topic=app-log +kafka.consume.group=es-indexer-consume-group +kafka.poll.timeout=100 + +# es config +es.ips=riot01,riot02,riot03 +es.cluster=mondeo +es.port=9300 +es.sniff=true +es.index=app-log +es.doc=log diff --git a/skyeye-collector/src/main/resources/banner.txt b/skyeye-collector/src/main/resources/banner.txt index fbe7ade..e69de29 100644 --- a/skyeye-collector/src/main/resources/banner.txt +++ b/skyeye-collector/src/main/resources/banner.txt @@ -1,6 +0,0 @@ -███████╗██╗ ██╗██╗ ██╗███████╗██╗ ██╗███████╗ ██████╗ ██████╗ ██╗ ██╗ ███████╗ ██████╗████████╗ ██████╗ ██████╗ -██╔════╝██║ ██╔╝╚██╗ ██╔╝██╔════╝╚██╗ ██╔╝██╔════╝ ██╔════╝██╔═══██╗██║ ██║ ██╔════╝██╔════╝╚══██╔══╝██╔═══██╗██╔══██╗ -███████╗█████╔╝ ╚████╔╝ █████╗ ╚████╔╝ █████╗█████╗██║ ██║ ██║██║ ██║ █████╗ ██║ ██║ ██║ ██║██████╔╝ -╚════██║██╔═██╗ ╚██╔╝ ██╔══╝ ╚██╔╝ ██╔══╝╚════╝██║ ██║ ██║██║ ██║ ██╔══╝ ██║ ██║ ██║ ██║██╔══██╗ -███████║██║ ██╗ ██║ ███████╗ ██║ ███████╗ ╚██████╗╚██████╔╝███████╗███████╗███████╗╚██████╗ ██║ ╚██████╔╝██║ ██║ -╚══════╝╚═╝ ╚═╝ ╚═╝ ╚══════╝ ╚═╝ ╚══════╝ ╚═════╝ ╚═════╝ ╚══════╝╚══════╝╚══════╝ ╚═════╝ ╚═╝ ╚═════╝ ╚═╝ ╚═╝ diff --git a/skyeye-web/src/main/resources/logback.xml b/skyeye-web/src/main/resources/logback.xml index 275b5c9..f5f8c6c 100644 --- a/skyeye-web/src/main/resources/logback.xml +++ b/skyeye-web/src/main/resources/logback.xml @@ -2,7 +2,7 @@ - + ${APP_NAME} From 3735e0946b435bb8f84f22413508ed5ea20039ef Mon Sep 17 00:00:00 2001 From: JThink Date: Thu, 3 Aug 2017 16:02:26 +0800 Subject: [PATCH 17/27] re arch log backup collector --- skyeye-collector/settings.gradle | 2 +- .../skyeye-collector-backup/build.gradle | 58 +++++++ .../skyeye-collector-backup/settings.gradle | 1 + .../backup/balancer/HandleRebalance.java} | 16 +- .../hadoop/HadoopConfiguration.java | 2 +- .../hadoop/HadoopProperties.java | 12 +- .../collector/backup/launcher/Launcher.java | 61 ++++++++ .../collector/backup}/task/BackupTask.java | 24 +-- .../collector/backup/task/UploadTask.java | 54 +++++++ .../collector/backup/util/FileUtil.java | 112 +++++++++++++ .../src/main/resources/application.properties | 13 ++ .../src/main/resources/banner.txt | 6 + .../src/main/resources/logback.xml | 58 +++++++ .../properties/collector-backup.properties | 13 ++ .../indexer/balancer/HandleRebalance.java | 6 +- .../callback/KafkaOffsetCommitCallback.java | 30 ---- .../configuration/es/EsConfiguration.java | 55 ------- .../configuration/es/EsProperties.java | 95 ----------- .../kafka/KafkaConfiguration.java | 148 ------------------ .../configuration/kafka/KafkaProperties.java | 105 ------------- .../skyeye/collector/launcher/Launcher.java | 87 ---------- .../listener/HandleRebalanceForRpcTrace.java | 1 - skyeye-web/build.gradle | 2 +- 23 files changed, 415 insertions(+), 546 deletions(-) create mode 100644 skyeye-collector/skyeye-collector-backup/build.gradle create mode 100644 skyeye-collector/skyeye-collector-backup/settings.gradle rename skyeye-collector/{src/main/java/com/jthink/skyeye/collector/listener/HandleRebalanceForBackup.java => skyeye-collector-backup/src/main/java/com/jthink/skyeye/collector/backup/balancer/HandleRebalance.java} (67%) rename skyeye-collector/{src/main/java/com/jthink/skyeye/collector => skyeye-collector-backup/src/main/java/com/jthink/skyeye/collector/backup}/configuration/hadoop/HadoopConfiguration.java (96%) rename skyeye-collector/{src/main/java/com/jthink/skyeye/collector => skyeye-collector-backup/src/main/java/com/jthink/skyeye/collector/backup}/configuration/hadoop/HadoopProperties.java (79%) create mode 100644 skyeye-collector/skyeye-collector-backup/src/main/java/com/jthink/skyeye/collector/backup/launcher/Launcher.java rename skyeye-collector/{src/main/java/com/jthink/skyeye/collector => skyeye-collector-backup/src/main/java/com/jthink/skyeye/collector/backup}/task/BackupTask.java (84%) create mode 100644 skyeye-collector/skyeye-collector-backup/src/main/java/com/jthink/skyeye/collector/backup/task/UploadTask.java create mode 100644 skyeye-collector/skyeye-collector-backup/src/main/java/com/jthink/skyeye/collector/backup/util/FileUtil.java create mode 100644 skyeye-collector/skyeye-collector-backup/src/main/resources/application.properties create mode 100644 skyeye-collector/skyeye-collector-backup/src/main/resources/banner.txt create mode 100644 skyeye-collector/skyeye-collector-backup/src/main/resources/logback.xml create mode 100644 skyeye-collector/skyeye-collector-backup/src/main/resources/properties/collector-backup.properties delete mode 100644 skyeye-collector/src/main/java/com/jthink/skyeye/collector/callback/KafkaOffsetCommitCallback.java delete mode 100644 skyeye-collector/src/main/java/com/jthink/skyeye/collector/configuration/es/EsConfiguration.java delete mode 100644 skyeye-collector/src/main/java/com/jthink/skyeye/collector/configuration/es/EsProperties.java delete mode 100644 skyeye-collector/src/main/java/com/jthink/skyeye/collector/configuration/kafka/KafkaConfiguration.java delete mode 100644 skyeye-collector/src/main/java/com/jthink/skyeye/collector/configuration/kafka/KafkaProperties.java delete mode 100644 skyeye-collector/src/main/java/com/jthink/skyeye/collector/launcher/Launcher.java diff --git a/skyeye-collector/settings.gradle b/skyeye-collector/settings.gradle index 5b43fdc..50f74dc 100644 --- a/skyeye-collector/settings.gradle +++ b/skyeye-collector/settings.gradle @@ -1,2 +1,2 @@ -include 'skyeye-collector-core', 'skyeye-collector-indexer' +include 'skyeye-collector-core', 'skyeye-collector-indexer', 'skyeye-collector-backup' //, 'skyeye-collector-backup', , 'skyeye-collector-trace', ''skyeye-collector-metrics diff --git a/skyeye-collector/skyeye-collector-backup/build.gradle b/skyeye-collector/skyeye-collector-backup/build.gradle new file mode 100644 index 0000000..0142a50 --- /dev/null +++ b/skyeye-collector/skyeye-collector-backup/build.gradle @@ -0,0 +1,58 @@ +apply plugin: 'java' +apply plugin: 'eclipse' +apply plugin: 'maven' +apply plugin: 'org.springframework.boot' +apply plugin: 'application' + +ext { + hadoopVersion = '2.6.0-cdh5.4.0' +} + +dependencies { + compile project(":skyeye-collector-core") + + compile "joda-time:joda-time:2.9.9" + + compile ("org.apache.hadoop:hadoop-common:$hadoopVersion") { + exclude group: 'tomcat', module: 'jasper-runtime' + exclude group: 'tomcat', module: 'jasper-compiler' + } + + compile ("org.apache.hadoop:hadoop-hdfs:$hadoopVersion") { + exclude group: 'tomcat', module: 'jasper-runtime' + exclude group: 'tomcat', module: 'jasper-compiler' + } + + testCompile "org.springframework.boot:spring-boot-starter-test" +} + +configurations { + compile.exclude group: "log4j", module: "log4j" + compile.exclude group: "org.elasticsearch", module: "elasticsearch" + compile.exclude group: "org.slf4j", module: "slf4j-log4j12" +} + +mainClassName = "com.jthink.skyeye.collector.indexer.launcher.Launcher" + +buildscript { + ext { + springBootVersion = '1.5.6.RELEASE' + } + + repositories { + mavenLocal() + maven { url mavenPublicUrl } + mavenCentral() + } + + dependencies { + classpath("org.springframework.boot:spring-boot-gradle-plugin:${springBootVersion}") + classpath("io.spring.gradle:dependency-management-plugin:1.0.3.RELEASE") + } +} + +startScripts { + doLast { + unixScript.text = unixScript.text.replaceAll("lib/(.*)\n", "lib/\\*") + } +} diff --git a/skyeye-collector/skyeye-collector-backup/settings.gradle b/skyeye-collector/skyeye-collector-backup/settings.gradle new file mode 100644 index 0000000..9aa407e --- /dev/null +++ b/skyeye-collector/skyeye-collector-backup/settings.gradle @@ -0,0 +1 @@ +rootProject.name = 'skyeye-collector-backup' diff --git a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/listener/HandleRebalanceForBackup.java b/skyeye-collector/skyeye-collector-backup/src/main/java/com/jthink/skyeye/collector/backup/balancer/HandleRebalance.java similarity index 67% rename from skyeye-collector/src/main/java/com/jthink/skyeye/collector/listener/HandleRebalanceForBackup.java rename to skyeye-collector/skyeye-collector-backup/src/main/java/com/jthink/skyeye/collector/backup/balancer/HandleRebalance.java index b50ddaa..7b0a599 100644 --- a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/listener/HandleRebalanceForBackup.java +++ b/skyeye-collector/skyeye-collector-backup/src/main/java/com/jthink/skyeye/collector/backup/balancer/HandleRebalance.java @@ -1,7 +1,7 @@ -package com.jthink.skyeye.collector.listener; +package com.jthink.skyeye.collector.backup.balancer; -import com.jthink.skyeye.collector.configuration.kafka.KafkaProperties; -import com.jthink.skyeye.collector.task.BackupTask; +import com.jthink.skyeye.collector.backup.task.BackupTask; +import com.jthink.skyeye.collector.core.configuration.kafka.KafkaProperties; import org.apache.kafka.clients.consumer.ConsumerRebalanceListener; import org.apache.kafka.clients.consumer.KafkaConsumer; import org.apache.kafka.common.TopicPartition; @@ -23,19 +23,19 @@ * @date 2016-09-20 11:14:27 */ @Component -public class HandleRebalanceForBackup implements ConsumerRebalanceListener, InitializingBean { +public class HandleRebalance implements ConsumerRebalanceListener, InitializingBean { - private static final Logger LOGGER = LoggerFactory.getLogger(HandleRebalanceForBackup.class); + private static final Logger LOGGER = LoggerFactory.getLogger(HandleRebalance.class); @Autowired - private KafkaConsumer kafkaConsumerBackup; + private KafkaConsumer kafkaConsumer; @Autowired private KafkaProperties kafkaProperties; @Override public void onPartitionsRevoked(Collection partitions) { - this.kafkaConsumerBackup.commitSync(BackupTask.currentOffsets); + this.kafkaConsumer.commitSync(BackupTask.currentOffsets); LOGGER.info("before rebalance, commit offset once"); } @@ -46,6 +46,6 @@ public void onPartitionsAssigned(Collection partitions) { @Override public void afterPropertiesSet() throws Exception { - this.kafkaConsumerBackup.subscribe(Arrays.asList(this.kafkaProperties.getTopic()), this); + this.kafkaConsumer.subscribe(Arrays.asList(this.kafkaProperties.getTopic()), this); } } \ No newline at end of file diff --git a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/configuration/hadoop/HadoopConfiguration.java b/skyeye-collector/skyeye-collector-backup/src/main/java/com/jthink/skyeye/collector/backup/configuration/hadoop/HadoopConfiguration.java similarity index 96% rename from skyeye-collector/src/main/java/com/jthink/skyeye/collector/configuration/hadoop/HadoopConfiguration.java rename to skyeye-collector/skyeye-collector-backup/src/main/java/com/jthink/skyeye/collector/backup/configuration/hadoop/HadoopConfiguration.java index 86e6f65..4d8ea41 100644 --- a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/configuration/hadoop/HadoopConfiguration.java +++ b/skyeye-collector/skyeye-collector-backup/src/main/java/com/jthink/skyeye/collector/backup/configuration/hadoop/HadoopConfiguration.java @@ -1,4 +1,4 @@ -package com.jthink.skyeye.collector.configuration.hadoop; +package com.jthink.skyeye.collector.backup.configuration.hadoop; import com.jthink.skyeye.base.constant.Constants; import org.apache.hadoop.conf.Configuration; diff --git a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/configuration/hadoop/HadoopProperties.java b/skyeye-collector/skyeye-collector-backup/src/main/java/com/jthink/skyeye/collector/backup/configuration/hadoop/HadoopProperties.java similarity index 79% rename from skyeye-collector/src/main/java/com/jthink/skyeye/collector/configuration/hadoop/HadoopProperties.java rename to skyeye-collector/skyeye-collector-backup/src/main/java/com/jthink/skyeye/collector/backup/configuration/hadoop/HadoopProperties.java index 995bce0..b894ba4 100644 --- a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/configuration/hadoop/HadoopProperties.java +++ b/skyeye-collector/skyeye-collector-backup/src/main/java/com/jthink/skyeye/collector/backup/configuration/hadoop/HadoopProperties.java @@ -1,4 +1,4 @@ -package com.jthink.skyeye.collector.configuration.hadoop; +package com.jthink.skyeye.collector.backup.configuration.hadoop; import org.springframework.boot.context.properties.ConfigurationProperties; @@ -21,6 +21,8 @@ public class HadoopProperties { private String baseDir; + private String fileRoot; + public String getHost() { return host; } @@ -52,4 +54,12 @@ public String getBaseDir() { public void setBaseDir(String baseDir) { this.baseDir = baseDir; } + + public String getFileRoot() { + return fileRoot; + } + + public void setFileRoot(String fileRoot) { + this.fileRoot = fileRoot; + } } diff --git a/skyeye-collector/skyeye-collector-backup/src/main/java/com/jthink/skyeye/collector/backup/launcher/Launcher.java b/skyeye-collector/skyeye-collector-backup/src/main/java/com/jthink/skyeye/collector/backup/launcher/Launcher.java new file mode 100644 index 0000000..1c0ace7 --- /dev/null +++ b/skyeye-collector/skyeye-collector-backup/src/main/java/com/jthink/skyeye/collector/backup/launcher/Launcher.java @@ -0,0 +1,61 @@ +package com.jthink.skyeye.collector.backup.launcher; + +import com.jthink.skyeye.collector.core.hook.ShutdownHookRunner; +import com.jthink.skyeye.collector.core.task.Task; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.boot.autoconfigure.EnableAutoConfiguration; +import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.boot.builder.SpringApplicationBuilder; +import org.springframework.boot.logging.LoggingApplicationListener; +import org.springframework.context.ApplicationListener; +import org.springframework.context.ConfigurableApplicationContext; +import org.springframework.context.annotation.ComponentScan; +import org.springframework.context.annotation.PropertySource; + +import java.util.Iterator; +import java.util.Set; +import java.util.UUID; + +/** + * JThink@JThink + * + * @author JThink + * @version 0.0.1 + * @desc 项目启动器 + * @date 2016-08-24 18:31:48 + */ +@SpringBootApplication +@EnableAutoConfiguration +@ComponentScan(basePackages={"com.jthink.skyeye.collector.core", "com.jthink.skyeye.collector.backup"}) +@PropertySource("file:/opt/jthink/jthink-config/skyeye/collector/collector-backup.properties") +//@PropertySource("classpath:properties/collector-backup.properties") +public class Launcher { + + private static final Logger LOGGER = LoggerFactory.getLogger(Launcher.class); + + public static final String SERVER_ID = UUID.randomUUID().toString(); + + public static void main(String[] args) { + SpringApplicationBuilder builder = new SpringApplicationBuilder(Launcher.class); + Set> listeners = builder.application().getListeners(); + for (Iterator> it = listeners.iterator(); it.hasNext();) { + ApplicationListener listener = it.next(); + if (listener instanceof LoggingApplicationListener) { + it.remove(); + } + } + builder.application().setListeners(listeners); + ConfigurableApplicationContext context = builder.run(args); + LOGGER.info("collector backup start successfully"); + + KafkaConsumer kafkaConsumer = (KafkaConsumer) context.getBean("kafkaConsumer"); + Task task = (Task) context.getBean("backupTask"); + + // 优雅停止项目 + Runtime.getRuntime().addShutdownHook(new ShutdownHookRunner(kafkaConsumer, task)); + task.doTask(); + } + +} diff --git a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/task/BackupTask.java b/skyeye-collector/skyeye-collector-backup/src/main/java/com/jthink/skyeye/collector/backup/task/BackupTask.java similarity index 84% rename from skyeye-collector/src/main/java/com/jthink/skyeye/collector/task/BackupTask.java rename to skyeye-collector/skyeye-collector-backup/src/main/java/com/jthink/skyeye/collector/backup/task/BackupTask.java index a8e3e2a..43fb7bb 100644 --- a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/task/BackupTask.java +++ b/skyeye-collector/skyeye-collector-backup/src/main/java/com/jthink/skyeye/collector/backup/task/BackupTask.java @@ -1,10 +1,11 @@ -package com.jthink.skyeye.collector.task; +package com.jthink.skyeye.collector.backup.task; -import com.jthink.skyeye.collector.callback.KafkaOffsetCommitCallback; -import com.jthink.skyeye.collector.configuration.kafka.KafkaProperties; -import com.jthink.skyeye.collector.util.FileUtil; import com.jthink.skyeye.base.constant.LogLevel; import com.jthink.skyeye.base.dto.LogDto; +import com.jthink.skyeye.collector.backup.util.FileUtil; +import com.jthink.skyeye.collector.core.callback.KafkaOffsetCommitCallback; +import com.jthink.skyeye.collector.core.configuration.kafka.KafkaProperties; +import com.jthink.skyeye.collector.core.task.Task; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.consumer.ConsumerRecords; import org.apache.kafka.clients.consumer.KafkaConsumer; @@ -16,7 +17,10 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; -import java.util.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; /** * JThink@JThink @@ -32,7 +36,7 @@ public class BackupTask implements Task { private static final Logger LOGGER = LoggerFactory.getLogger(BackupTask.class); @Autowired - private KafkaConsumer kafkaConsumerBackup; + private KafkaConsumer kafkaConsumer; @Autowired private KafkaProperties kafkaProperties; @Autowired @@ -47,7 +51,7 @@ public void doTask() { int count = 0; try { while (true) { - ConsumerRecords records = this.kafkaConsumerBackup.poll(this.kafkaProperties.getPollTimeout()); + ConsumerRecords records = this.kafkaConsumer.poll(this.kafkaProperties.getPollTimeout()); if (!records.isEmpty()) { Map> lines = new HashMap>(); for (ConsumerRecord record : records) { @@ -75,14 +79,14 @@ public void doTask() { count++; if (count >= 1000) { // 当达到了1000触发向kafka提交offset - this.kafkaConsumerBackup.commitAsync(currentOffsets, new KafkaOffsetCommitCallback()); + this.kafkaConsumer.commitAsync(currentOffsets, new KafkaOffsetCommitCallback()); count = 0; } } // save to file int size = this.fileUtil.save(lines); - this.kafkaConsumerBackup.commitAsync(currentOffsets, new KafkaOffsetCommitCallback()); + this.kafkaConsumer.commitAsync(currentOffsets, new KafkaOffsetCommitCallback()); LOGGER.info("total record: {}, saved {} records to file", records.count(), size); } } @@ -92,7 +96,7 @@ public void doTask() { } catch (Exception e) { LOGGER.error("process records error, {}", e); } finally { - this.kafkaConsumerBackup.commitSync(currentOffsets); + this.kafkaConsumer.commitSync(currentOffsets); LOGGER.info("finally commit the offset"); // 不需要主动调kafkaConsumer.close(), spring bean容器会调用 } diff --git a/skyeye-collector/skyeye-collector-backup/src/main/java/com/jthink/skyeye/collector/backup/task/UploadTask.java b/skyeye-collector/skyeye-collector-backup/src/main/java/com/jthink/skyeye/collector/backup/task/UploadTask.java new file mode 100644 index 0000000..ce84ccd --- /dev/null +++ b/skyeye-collector/skyeye-collector-backup/src/main/java/com/jthink/skyeye/collector/backup/task/UploadTask.java @@ -0,0 +1,54 @@ +package com.jthink.skyeye.collector.backup.task; + +import com.jthink.skyeye.base.util.DateUtil; +import com.jthink.skyeye.collector.backup.util.FileUtil; +import org.joda.time.DateTime; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.scheduling.annotation.EnableScheduling; +import org.springframework.scheduling.annotation.Scheduled; +import org.springframework.stereotype.Component; +import org.springframework.util.StopWatch; + +/** + * JThink@JThink + * + * @author JThink + * @version 0.0.1 + * @desc 数据上传hdfs任务 + * @date 2016-12-06 17:51:33 + */ +@Component +@EnableScheduling +public class UploadTask { + + private static final Logger LOGGER = LoggerFactory.getLogger(UploadTask.class); + + @Autowired + private FileUtil fileUtil; + + /** + * 上传到hdfs并删除相应的文件 + */ + @Scheduled(cron = "${spring.upload.log.cron}") + private void upload() { + String yesterday = this.getYesterday(); + LOGGER.info("开始上传到hdfs, 时间: {}", yesterday); + StopWatch sw = new StopWatch(); + sw.start(); + this.fileUtil.uploadToHDFS(yesterday); + sw.stop(); + LOGGER.info("上传到hdfs结束, 耗时: {} ms", sw.getTotalTimeMillis()); + } + + /** + * 返回昨天的字符串 + * @return + */ + private String getYesterday() { + DateTime yesterday = new DateTime(System.currentTimeMillis()).minusDays(1); + return yesterday.toString(DateUtil.YYYYMMDD); + } + +} diff --git a/skyeye-collector/skyeye-collector-backup/src/main/java/com/jthink/skyeye/collector/backup/util/FileUtil.java b/skyeye-collector/skyeye-collector-backup/src/main/java/com/jthink/skyeye/collector/backup/util/FileUtil.java new file mode 100644 index 0000000..b975ba2 --- /dev/null +++ b/skyeye-collector/skyeye-collector-backup/src/main/java/com/jthink/skyeye/collector/backup/util/FileUtil.java @@ -0,0 +1,112 @@ +package com.jthink.skyeye.collector.backup.util; + +import com.jthink.skyeye.base.constant.Constants; +import com.jthink.skyeye.collector.backup.configuration.hadoop.HadoopProperties; +import com.jthink.skyeye.collector.backup.launcher.Launcher; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +import java.io.BufferedWriter; +import java.io.File; +import java.io.FileWriter; +import java.io.IOException; +import java.util.List; +import java.util.Map; + +/** + * JThink@JThink + * + * @author JThink + * @version 0.0.1 + * @desc file 相关的util + * @date 2016-12-06 11:26:33 + */ +@Component +public class FileUtil { + + private static final Logger LOGGER = LoggerFactory.getLogger(FileUtil.class); + + @Autowired + private HadoopProperties hadoopProperties; + @Autowired + private FileSystem fileSystem; + + /** + * 将数据写入文件 + * @param lines + */ + public int save(Map> lines) { + int sum = 0; + for (Map.Entry> entry : lines.entrySet()) { + this.writeTofile(entry.getKey(), entry.getValue()); + sum += entry.getValue().size(); + } + return sum; + } + + /** + * 写入文件 + * @param fileName + * @param lines + */ + private void writeTofile(String fileName, List lines) { + BufferedWriter bw = null; + try { + File file = new File(this.hadoopProperties.getFileRoot() + this.getFileName(fileName)); + if (!file.getParentFile().exists()) { + if (!file.getParentFile().mkdirs()) { + LOGGER.info("创建父文件夹失败"); + } + } + bw = new BufferedWriter(new FileWriter(file, true)); + StringBuilder sb = new StringBuilder(); + for (String line : lines) { + sb.append(line); + } + bw.write(sb.toString()); + } catch (IOException e) { + LOGGER.error("写文件报错, ", e); + } finally { + if (bw != null) { + try { + bw.flush(); + bw.close(); + } catch (IOException e) { + LOGGER.error("写文件报错, ", e); + } + } + } + } + + /** + * 上传至hdfs + */ + public void uploadToHDFS(String yesterday) { + try { + String fileName = this.getFileName(yesterday); + File file = new File(this.hadoopProperties.getFileRoot() + fileName); + if (!file.exists()) { + LOGGER.info("当天没有可上传的文件"); + return; + } + this.fileSystem.copyFromLocalFile(true, false, new Path(this.hadoopProperties.getFileRoot() + fileName), + new Path(this.hadoopProperties.getBaseDir() + yesterday + Constants.SLASH + fileName)); + } catch (IOException e) { + LOGGER.error("上传至hdfs失败, ", e); + } + } + + /** + * 返回具体的名字 + * @param fileName + * @return + */ + private String getFileName(String fileName) { + return fileName + Constants.POINT + Launcher.SERVER_ID; + } + +} diff --git a/skyeye-collector/skyeye-collector-backup/src/main/resources/application.properties b/skyeye-collector/skyeye-collector-backup/src/main/resources/application.properties new file mode 100644 index 0000000..190b8fe --- /dev/null +++ b/skyeye-collector/skyeye-collector-backup/src/main/resources/application.properties @@ -0,0 +1,13 @@ +# kafka config +spring.message.kafka.brokers=${kafka.brokers} +spring.message.kafka.topic=${kafka.topic} +spring.message.kafka.consumeGroup=${kafka.consume.group} +spring.message.kafka.pollTimeout=${kafka.poll.timeout} + +# hdfs +spring.bigdata.hadoop.hdfs.port=${hadoop.hdfs.namenode.port} +spring.bigdata.hadoop.hdfs.host=${hadoop.hdfs.namenode.host} +spring.bigdata.hadoop.hdfs.user=${hadoop.hdfs.user} +spring.bigdata.hadoop.hdfs.baseDir=${hadoop.hdfs.baseDir} +spring.bigdata.hadoop.hdfs.fileRoot=${hadoop.hdfs.fileRoot} +spring.upload.log.cron=${upload.log.cron} diff --git a/skyeye-collector/skyeye-collector-backup/src/main/resources/banner.txt b/skyeye-collector/skyeye-collector-backup/src/main/resources/banner.txt new file mode 100644 index 0000000..fbe7ade --- /dev/null +++ b/skyeye-collector/skyeye-collector-backup/src/main/resources/banner.txt @@ -0,0 +1,6 @@ +███████╗██╗ ██╗██╗ ██╗███████╗██╗ ██╗███████╗ ██████╗ ██████╗ ██╗ ██╗ ███████╗ ██████╗████████╗ ██████╗ ██████╗ +██╔════╝██║ ██╔╝╚██╗ ██╔╝██╔════╝╚██╗ ██╔╝██╔════╝ ██╔════╝██╔═══██╗██║ ██║ ██╔════╝██╔════╝╚══██╔══╝██╔═══██╗██╔══██╗ +███████╗█████╔╝ ╚████╔╝ █████╗ ╚████╔╝ █████╗█████╗██║ ██║ ██║██║ ██║ █████╗ ██║ ██║ ██║ ██║██████╔╝ +╚════██║██╔═██╗ ╚██╔╝ ██╔══╝ ╚██╔╝ ██╔══╝╚════╝██║ ██║ ██║██║ ██║ ██╔══╝ ██║ ██║ ██║ ██║██╔══██╗ +███████║██║ ██╗ ██║ ███████╗ ██║ ███████╗ ╚██████╗╚██████╔╝███████╗███████╗███████╗╚██████╗ ██║ ╚██████╔╝██║ ██║ +╚══════╝╚═╝ ╚═╝ ╚═╝ ╚══════╝ ╚═╝ ╚══════╝ ╚═════╝ ╚═════╝ ╚══════╝╚══════╝╚══════╝ ╚═════╝ ╚═╝ ╚═════╝ ╚═╝ ╚═╝ diff --git a/skyeye-collector/skyeye-collector-backup/src/main/resources/logback.xml b/skyeye-collector/skyeye-collector-backup/src/main/resources/logback.xml new file mode 100644 index 0000000..b66120e --- /dev/null +++ b/skyeye-collector/skyeye-collector-backup/src/main/resources/logback.xml @@ -0,0 +1,58 @@ + + + + + + + ${APP_NAME} + + + + + + %d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{96}[%line]: %msg%n + + + + + + true + + ${LOG_HOME}/info/${APP_NAME}_%d{yyyy-MM-dd}.%i.log + + 128 MB + + + + INFO + + + + %d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{96}[%line]: %msg%n + + + + + true + + ${LOG_HOME}/error/${APP_NAME}_%d{yyyy-MM-dd}.%i.log + + 128 MB + + + + ERROR + + + + %d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{96}[%line]: %msg%n + + + + + + + + + + \ No newline at end of file diff --git a/skyeye-collector/skyeye-collector-backup/src/main/resources/properties/collector-backup.properties b/skyeye-collector/skyeye-collector-backup/src/main/resources/properties/collector-backup.properties new file mode 100644 index 0000000..6b13e1c --- /dev/null +++ b/skyeye-collector/skyeye-collector-backup/src/main/resources/properties/collector-backup.properties @@ -0,0 +1,13 @@ +kafka config +kafka.brokers=riot01:9092,riot02:9092,riot03:9092 +kafka.topic=app-log +kafka.consume.group=log-backup-consume-group +kafka.poll.timeout=100 + +# hdfs +hadoop.hdfs.namenode.port=8020 +hadoop.hdfs.namenode.host=192.168.88.131 +hadoop.hdfs.user=qianjicheng +hadoop.hdfs.baseDir=/user/qianjicheng/JThink/ +hadoop.hdfs.fileRoot=/tmp/monitor-center/ +upload.log.cron=0 30 0 * * ? diff --git a/skyeye-collector/skyeye-collector-indexer/src/main/java/com/jthink/skyeye/collector/indexer/balancer/HandleRebalance.java b/skyeye-collector/skyeye-collector-indexer/src/main/java/com/jthink/skyeye/collector/indexer/balancer/HandleRebalance.java index 2bf98a5..7bbd7a5 100644 --- a/skyeye-collector/skyeye-collector-indexer/src/main/java/com/jthink/skyeye/collector/indexer/balancer/HandleRebalance.java +++ b/skyeye-collector/skyeye-collector-indexer/src/main/java/com/jthink/skyeye/collector/indexer/balancer/HandleRebalance.java @@ -28,14 +28,14 @@ public class HandleRebalance implements ConsumerRebalanceListener, InitializingB private static final Logger LOGGER = LoggerFactory.getLogger(HandleRebalance.class); @Autowired - private KafkaConsumer kafkaConsumerApp; + private KafkaConsumer kafkaConsumer; @Autowired private KafkaProperties kafkaProperties; @Override public void onPartitionsRevoked(Collection partitions) { - this.kafkaConsumerApp.commitSync(IndexerTask.currentOffsets); + this.kafkaConsumer.commitSync(IndexerTask.currentOffsets); LOGGER.info("before rebalance, commit offset once"); } @@ -46,6 +46,6 @@ public void onPartitionsAssigned(Collection partitions) { @Override public void afterPropertiesSet() throws Exception { - this.kafkaConsumerApp.subscribe(Arrays.asList(this.kafkaProperties.getTopic()), this); + this.kafkaConsumer.subscribe(Arrays.asList(this.kafkaProperties.getTopic()), this); } } \ No newline at end of file diff --git a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/callback/KafkaOffsetCommitCallback.java b/skyeye-collector/src/main/java/com/jthink/skyeye/collector/callback/KafkaOffsetCommitCallback.java deleted file mode 100644 index e0af22a..0000000 --- a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/callback/KafkaOffsetCommitCallback.java +++ /dev/null @@ -1,30 +0,0 @@ -package com.jthink.skyeye.collector.callback; - -import org.apache.kafka.clients.consumer.OffsetAndMetadata; -import org.apache.kafka.clients.consumer.OffsetCommitCallback; -import org.apache.kafka.common.TopicPartition; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.Map; - -/** - * JThink@JThink - * - * @author JThink - * @version 0.0.1 - * @desc - * @date 2016-09-20 10:22:54 - */ -public class KafkaOffsetCommitCallback implements OffsetCommitCallback { - - private static final Logger LOGGER = LoggerFactory.getLogger(KafkaOffsetCommitCallback.class); - - @Override - public void onComplete(Map offsets, Exception exception) { - if (null != exception) { - // 如果异步提交发生了异常 - LOGGER.error("commit failed for offsets {}, and exception is {}", offsets, exception); - } - } -} diff --git a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/configuration/es/EsConfiguration.java b/skyeye-collector/src/main/java/com/jthink/skyeye/collector/configuration/es/EsConfiguration.java deleted file mode 100644 index 44dd263..0000000 --- a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/configuration/es/EsConfiguration.java +++ /dev/null @@ -1,55 +0,0 @@ -package com.jthink.skyeye.collector.configuration.es; - -import com.jthink.skyeye.base.constant.Constants; -import org.elasticsearch.client.transport.TransportClient; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.InetSocketTransportAddress; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.context.properties.EnableConfigurationProperties; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; - -import java.net.InetAddress; -import java.net.UnknownHostException; - -/** - * JThink@JThink - * - * @author JThink - * @version 0.0.1 - * @desc elasticsearch 配置 - * @date 2016-09-20 09:13:32 - */ -@Configuration -@EnableConfigurationProperties(EsProperties.class) -public class EsConfiguration { - - private static Logger LOGGER = LoggerFactory.getLogger(EsConfiguration.class); - - @Autowired - private EsProperties esProperties; - - @Bean - public Settings settings() { - Settings settings = Settings.settingsBuilder().put("cluster.name", this.esProperties.getCluster()) - .put("client.transport.sniff", this.esProperties.isSniff()).build(); - - return settings; - } - - @Bean - public TransportClient transportClient(Settings settings) { - TransportClient client = TransportClient.builder().settings(settings).build(); - for (String ip : this.esProperties.getIps().split(Constants.COMMA)) { - try { - client.addTransportAddress(new InetSocketTransportAddress(InetAddress.getByName(ip), this.esProperties.getPort())); - } catch (UnknownHostException e) { - LOGGER.error("es集群主机名错误, ip: {}", ip); - } - } - return client; - } - -} diff --git a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/configuration/es/EsProperties.java b/skyeye-collector/src/main/java/com/jthink/skyeye/collector/configuration/es/EsProperties.java deleted file mode 100644 index 1c5327f..0000000 --- a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/configuration/es/EsProperties.java +++ /dev/null @@ -1,95 +0,0 @@ -package com.jthink.skyeye.collector.configuration.es; - -import org.springframework.boot.context.properties.ConfigurationProperties; - -/** - * JThink@JThink - * - * @author JThink - * @version 0.0.1 - * @desc es的配置项 - * @date 2016-09-20 10:44:54 - */ -@ConfigurationProperties(prefix = "spring.indexer.es") -public class EsProperties { - - private String ips; - - private String cluster; - - private int port; - - private boolean sniff; - - private String index; - - private String doc; - - private String indexEvent; - - private String docEvent; - - public String getIps() { - return ips; - } - - public void setIps(String ips) { - this.ips = ips; - } - - public String getCluster() { - return cluster; - } - - public void setCluster(String cluster) { - this.cluster = cluster; - } - - public int getPort() { - return port; - } - - public void setPort(int port) { - this.port = port; - } - - public boolean isSniff() { - return sniff; - } - - public void setSniff(boolean sniff) { - this.sniff = sniff; - } - - public String getIndex() { - return index; - } - - public void setIndex(String index) { - this.index = index; - } - - public String getDoc() { - return doc; - } - - public void setDoc(String doc) { - this.doc = doc; - } - - public String getIndexEvent() { - return indexEvent; - } - - public void setIndexEvent(String indexEvent) { - this.indexEvent = indexEvent; - } - - public String getDocEvent() { - return docEvent; - } - - public void setDocEvent(String docEvent) { - this.docEvent = docEvent; - } -} diff --git a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/configuration/kafka/KafkaConfiguration.java b/skyeye-collector/src/main/java/com/jthink/skyeye/collector/configuration/kafka/KafkaConfiguration.java deleted file mode 100644 index 8550ad2..0000000 --- a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/configuration/kafka/KafkaConfiguration.java +++ /dev/null @@ -1,148 +0,0 @@ -package com.jthink.skyeye.collector.configuration.kafka; - -import com.jthink.skyeye.data.rabbitmq.service.RabbitmqService; -import com.jthink.skyeye.collector.configuration.es.EsProperties; -import com.jthink.skyeye.collector.service.CacheService; -import com.jthink.skyeye.collector.task.job.ExceptionProcessor; -import com.jthink.skyeye.collector.task.job.Indexer; -import com.jthink.skyeye.collector.task.job.NameCollector; -import com.jthink.skyeye.base.constant.EventType; -import org.I0Itec.zkclient.ZkClient; -import org.apache.kafka.clients.consumer.ConsumerConfig; -import org.apache.kafka.clients.consumer.KafkaConsumer; -import org.apache.kafka.common.serialization.ByteArrayDeserializer; -import org.apache.kafka.common.serialization.StringDeserializer; -import org.elasticsearch.client.transport.TransportClient; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.context.properties.EnableConfigurationProperties; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; - -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -/** - * JThink@JThink - * - * @author JThink - * @version 0.0.1 - * @desc kafka配置 - * @date 2016-09-18 11:12:07 - */ -@Configuration -@EnableConfigurationProperties({KafkaProperties.class, EsProperties.class}) -public class KafkaConfiguration { - - @Autowired - private KafkaProperties kafkaProperties; - @Autowired - private CacheService cacheService; - @Autowired - private RabbitmqService rabbitmqService; - @Autowired - private TransportClient transportClient; - @Autowired - private EsProperties esProperties; - @Autowired - private ZkClient zkClient; - - // 消费入es的消费组 - @Bean(name = "kafkaConsumerApp") - public KafkaConsumer kafkaConsumerApp() { - Map config = new HashMap(); - config.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, this.kafkaProperties.getBrokers()); - config.put(ConsumerConfig.GROUP_ID_CONFIG, this.kafkaProperties.getIndexerGroup()); - // 手动commit offset到kafka(该版本不将offset保存到zk) - config.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false); - config.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, 15000); - config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class.getName()); - config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); - - KafkaConsumer kafkaConsumer = new KafkaConsumer(config); - - return kafkaConsumer; - } - - // 消费采集api name、account name、third name、第三方系统异常、任务调度异常、入新es的索引(for kibana)的消费组 - @Bean(name = "kafkaConsumerEvent") - public KafkaConsumer kafkaConsumerEvent() { - Map config = new HashMap(); - config.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, this.kafkaProperties.getBrokers()); - config.put(ConsumerConfig.GROUP_ID_CONFIG, this.kafkaProperties.getCollectGroup()); - // 手动commit offset到kafka(该版本不将offset保存到zk) - config.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false); - config.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, 15000); - config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class.getName()); - config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); - - KafkaConsumer kafkaConsumer = new KafkaConsumer(config); - - return kafkaConsumer; - } - - // 消费入hdfs备份的消费组 - @Bean(name = "kafkaConsumerBackup") - public KafkaConsumer kafkaConsumerBackup() { - Map config = new HashMap(); - config.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, this.kafkaProperties.getBrokers()); - config.put(ConsumerConfig.GROUP_ID_CONFIG, this.kafkaProperties.getBackupGroup()); - // 手动commit offset到kafka(该版本不将offset保存到zk) - config.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false); - config.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, 15000); - config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class.getName()); - config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); - - KafkaConsumer kafkaConsumer = new KafkaConsumer(config); - - return kafkaConsumer; - } - - // rpc trace跟踪入库的消费组 - @Bean(name = "kafkaConsumerRpcTrace") - public KafkaConsumer kafkaConsumerRpcTrace() { - Map config = new HashMap(); - config.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, this.kafkaProperties.getBrokers()); - config.put(ConsumerConfig.GROUP_ID_CONFIG, this.kafkaProperties.getRpcTraceGroup()); - // 手动commit offset到kafka(该版本不将offset保存到zk) - config.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false); - config.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, 15000); - config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class.getName()); - config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); - - KafkaConsumer kafkaConsumer = new KafkaConsumer(config); - - return kafkaConsumer; - } - - // 以下3个bean进行责任链的生成和组装 - @Bean - public ExceptionProcessor exceptionProcessor() { - List exceptionProcesses = Arrays.asList(EventType.job_execute, EventType.thirdparty_call, EventType.middleware_opt, EventType.invoke_interface); - ExceptionProcessor exceptionProcessor = new ExceptionProcessor(exceptionProcesses); - exceptionProcessor.setRabbitmqService(this.rabbitmqService); - exceptionProcessor.setZkClient(this.zkClient); - return exceptionProcessor; - } - - @Bean - public NameCollector nameCollector(ExceptionProcessor exceptionProcessor) { - List names = Arrays.asList(EventType.invoke_interface, EventType.thirdparty_call); - NameCollector nameCollector = new NameCollector(names); - nameCollector.setNextJob(exceptionProcessor); - nameCollector.setCacheService(this.cacheService); - return nameCollector; - } - - @Bean - public Indexer indexer(NameCollector nameCollector) { - List indexes = Arrays.asList(EventType.job_execute, EventType.thirdparty_call, EventType.middleware_opt, EventType.invoke_interface); - Indexer indexer = new Indexer(indexes); - indexer.setNextJob(nameCollector); - indexer.setEsProperties(this.esProperties); - indexer.setTransportClient(this.transportClient); - return indexer; - } - -} diff --git a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/configuration/kafka/KafkaProperties.java b/skyeye-collector/src/main/java/com/jthink/skyeye/collector/configuration/kafka/KafkaProperties.java deleted file mode 100644 index 0e261a2..0000000 --- a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/configuration/kafka/KafkaProperties.java +++ /dev/null @@ -1,105 +0,0 @@ -package com.jthink.skyeye.collector.configuration.kafka; - -import org.springframework.boot.context.properties.ConfigurationProperties; - -/** - * JThink@JThink - * - * @author JThink - * @version 0.0.1 - * @desc kafka配置项 - * @date 2016-09-20 10:15:05 - */ -@ConfigurationProperties(prefix = "spring.message.kafka") -public class KafkaProperties { - - private String topic; - - private String brokers; - - private String indexerGroup; - - private long pollTimeout; - - private String collectGroup; - - private String backupGroup; - - private String rpcTraceGroup; - - private String fileRoot; - - private String serverId; - - public String getFileRoot() { - return fileRoot; - } - - public void setFileRoot(String fileRoot) { - this.fileRoot = fileRoot; - } - - public String getBackupGroup() { - return backupGroup; - } - - public void setBackupGroup(String backupGroup) { - this.backupGroup = backupGroup; - } - - public String getTopic() { - return topic; - } - - public void setTopic(String topic) { - this.topic = topic; - } - - public String getBrokers() { - return brokers; - } - - public void setBrokers(String brokers) { - this.brokers = brokers; - } - - public String getIndexerGroup() { - return indexerGroup; - } - - public void setIndexerGroup(String indexerGroup) { - this.indexerGroup = indexerGroup; - } - - public long getPollTimeout() { - return pollTimeout; - } - - public void setPollTimeout(long pollTimeout) { - this.pollTimeout = pollTimeout; - } - - public String getCollectGroup() { - return collectGroup; - } - - public void setCollectGroup(String collectGroup) { - this.collectGroup = collectGroup; - } - - public String getServerId() { - return serverId; - } - - public void setServerId(String serverId) { - this.serverId = serverId; - } - - public String getRpcTraceGroup() { - return rpcTraceGroup; - } - - public void setRpcTraceGroup(String rpcTraceGroup) { - this.rpcTraceGroup = rpcTraceGroup; - } -} diff --git a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/launcher/Launcher.java b/skyeye-collector/src/main/java/com/jthink/skyeye/collector/launcher/Launcher.java deleted file mode 100644 index d5574b7..0000000 --- a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/launcher/Launcher.java +++ /dev/null @@ -1,87 +0,0 @@ -package com.jthink.skyeye.collector.launcher; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.boot.autoconfigure.EnableAutoConfiguration; -import org.springframework.boot.autoconfigure.SpringBootApplication; -import org.springframework.boot.builder.SpringApplicationBuilder; -import org.springframework.boot.logging.LoggingApplicationListener; -import org.springframework.context.ApplicationListener; -import org.springframework.context.ConfigurableApplicationContext; -import org.springframework.context.annotation.ComponentScan; -import org.springframework.context.annotation.PropertySource; -import org.apache.kafka.clients.consumer.KafkaConsumer; - -import java.util.Arrays; -import java.util.Iterator; -import java.util.List; -import java.util.Set; - -/** - * JThink@JThink - * - * @author JThink - * @version 0.0.1 - * @desc 项目启动器 - * @date 2016-08-24 18:31:48 - */ -@SpringBootApplication -@EnableAutoConfiguration -@ComponentScan(basePackages={"com.jthink.skyeye.collector", "com.jthink.skyeye.data.jpa", "com.jthink.skyeye.data.rabbitmq"}) -@PropertySource("file:/opt/jthink/jthink-config/skyeye/collector/collector.properties") -//@PropertySource("classpath:properties/collector.properties") -public class Launcher { - - private static final Logger LOGGER = LoggerFactory.getLogger(Launcher.class); - - public static void main(String[] args) { - SpringApplicationBuilder builder = new SpringApplicationBuilder(Launcher.class); - Set> listeners = builder.application().getListeners(); - for (Iterator> it = listeners.iterator(); it.hasNext();) { - ApplicationListener listener = it.next(); - if (listener instanceof LoggingApplicationListener) { - it.remove(); - } - } - builder.application().setListeners(listeners); - ConfigurableApplicationContext context = builder.run(args); - LOGGER.info("collector start successfully"); - - KafkaConsumer kafkaConsumerApp = (KafkaConsumer) context.getBean("kafkaConsumerApp"); - KafkaConsumer kafkaConsumerEvent = (KafkaConsumer) context.getBean("kafkaConsumerEvent"); - KafkaConsumer kafkaConsumerBackup = (KafkaConsumer) context.getBean("kafkaConsumerBackup"); - KafkaConsumer kafkaConsumerRpcTrace = (KafkaConsumer) context.getBean("kafkaConsumerRpcTrace"); - TaskExecutor taskExecutor = context.getBean(TaskExecutor.class); - - // 优雅停止项目 - Runtime.getRuntime().addShutdownHook(new ShutdownHookRunner(Arrays.asList(kafkaConsumerApp, kafkaConsumerEvent, - kafkaConsumerBackup, kafkaConsumerRpcTrace), taskExecutor)); - - taskExecutor.addTask(); - taskExecutor.execute(); - } - - private static class ShutdownHookRunner extends Thread { - - private List kafkaConsumers; - private TaskExecutor taskExecutor; - - public ShutdownHookRunner(List kafkaConsumers, TaskExecutor taskExecutor) { - this.kafkaConsumers = kafkaConsumers; - this.taskExecutor = taskExecutor; - } - - @Override - public void run() { - LOGGER.info("starting to exit"); - for (KafkaConsumer kafkaConsumer : kafkaConsumers) { - kafkaConsumer.wakeup(); - } - try { - taskExecutor.join(); - } catch (InterruptedException e) { - LOGGER.error("interrupted, ", e); - } - } - } -} diff --git a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/listener/HandleRebalanceForRpcTrace.java b/skyeye-collector/src/main/java/com/jthink/skyeye/collector/listener/HandleRebalanceForRpcTrace.java index 9fa3db9..5698e88 100644 --- a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/listener/HandleRebalanceForRpcTrace.java +++ b/skyeye-collector/src/main/java/com/jthink/skyeye/collector/listener/HandleRebalanceForRpcTrace.java @@ -1,7 +1,6 @@ package com.jthink.skyeye.collector.listener; import com.jthink.skyeye.collector.configuration.kafka.KafkaProperties; -import com.jthink.skyeye.collector.task.BackupTask; import com.jthink.skyeye.collector.task.RpcTraceTask; import org.apache.kafka.clients.consumer.ConsumerRebalanceListener; import org.apache.kafka.clients.consumer.KafkaConsumer; diff --git a/skyeye-web/build.gradle b/skyeye-web/build.gradle index 9111428..b003998 100644 --- a/skyeye-web/build.gradle +++ b/skyeye-web/build.gradle @@ -55,7 +55,7 @@ dependencies { compile "org.codehaus.jackson:jackson-mapper-asl:$jacksonVersion" compile "com.alibaba:fastjson:$fastjsonVersion" - compile "joda-time:joda-time:2.9.4" + compile "joda-time:joda-time:2.9.9" testCompile "org.springframework.boot:spring-boot-starter-test" } From 05d62b58c6a39bf2e8a325de16687cafc3675df3 Mon Sep 17 00:00:00 2001 From: JThink Date: Thu, 3 Aug 2017 16:09:20 +0800 Subject: [PATCH 18/27] add gitignore --- skyeye-collector/build.gradle | 1 + 1 file changed, 1 insertion(+) diff --git a/skyeye-collector/build.gradle b/skyeye-collector/build.gradle index 7881909..ff65ae1 100644 --- a/skyeye-collector/build.gradle +++ b/skyeye-collector/build.gradle @@ -47,6 +47,7 @@ subprojects { repositories { mavenLocal() + maven { url mavenClouderaUrl } maven { url mavenPublicUrl } maven { url mavenReleaseUrl } maven { url mavenSnapshotUrl } From 7419259ca0522d918900b9ee5201061f0ec76b07 Mon Sep 17 00:00:00 2001 From: JThink Date: Thu, 3 Aug 2017 16:10:17 +0800 Subject: [PATCH 19/27] add gitignore --- skyeye-client/skyeye-client-core/.gitignore | 22 +++++++++++++++++++ skyeye-client/skyeye-client-log4j/.gitignore | 22 +++++++++++++++++++ skyeye-client/skyeye-client-log4j2/.gitignore | 22 +++++++++++++++++++ .../skyeye-client-logback/.gitignore | 22 +++++++++++++++++++ .../skyeye-collector-backup/.gitignore | 22 +++++++++++++++++++ .../skyeye-collector-core/.gitignore | 22 +++++++++++++++++++ .../skyeye-collector-indexer/.gitignore | 22 +++++++++++++++++++ .../skyeye-collector-metrics/.gitignore | 22 +++++++++++++++++++ .../skyeye-collector-trace/.gitignore | 22 +++++++++++++++++++ 9 files changed, 198 insertions(+) create mode 100644 skyeye-client/skyeye-client-core/.gitignore create mode 100644 skyeye-client/skyeye-client-log4j/.gitignore create mode 100644 skyeye-client/skyeye-client-log4j2/.gitignore create mode 100644 skyeye-client/skyeye-client-logback/.gitignore create mode 100644 skyeye-collector/skyeye-collector-backup/.gitignore create mode 100644 skyeye-collector/skyeye-collector-core/.gitignore create mode 100644 skyeye-collector/skyeye-collector-indexer/.gitignore create mode 100644 skyeye-collector/skyeye-collector-metrics/.gitignore create mode 100644 skyeye-collector/skyeye-collector-trace/.gitignore diff --git a/skyeye-client/skyeye-client-core/.gitignore b/skyeye-client/skyeye-client-core/.gitignore new file mode 100644 index 0000000..f94d1be --- /dev/null +++ b/skyeye-client/skyeye-client-core/.gitignore @@ -0,0 +1,22 @@ +bin/ +target/ +file/ +logs/ +gen-java/ +.externalToolBuilders/ +.settings/ +.gradle/ +.classpath +.gradletasknamecache +.buildpath +.project +.springBeans +dependency-reduced-pom.xml +*.iml +nohup.out +/tmp +/.apt_generated/ +.idea/ +disconf/ +/target/ +/build/ diff --git a/skyeye-client/skyeye-client-log4j/.gitignore b/skyeye-client/skyeye-client-log4j/.gitignore new file mode 100644 index 0000000..f94d1be --- /dev/null +++ b/skyeye-client/skyeye-client-log4j/.gitignore @@ -0,0 +1,22 @@ +bin/ +target/ +file/ +logs/ +gen-java/ +.externalToolBuilders/ +.settings/ +.gradle/ +.classpath +.gradletasknamecache +.buildpath +.project +.springBeans +dependency-reduced-pom.xml +*.iml +nohup.out +/tmp +/.apt_generated/ +.idea/ +disconf/ +/target/ +/build/ diff --git a/skyeye-client/skyeye-client-log4j2/.gitignore b/skyeye-client/skyeye-client-log4j2/.gitignore new file mode 100644 index 0000000..f94d1be --- /dev/null +++ b/skyeye-client/skyeye-client-log4j2/.gitignore @@ -0,0 +1,22 @@ +bin/ +target/ +file/ +logs/ +gen-java/ +.externalToolBuilders/ +.settings/ +.gradle/ +.classpath +.gradletasknamecache +.buildpath +.project +.springBeans +dependency-reduced-pom.xml +*.iml +nohup.out +/tmp +/.apt_generated/ +.idea/ +disconf/ +/target/ +/build/ diff --git a/skyeye-client/skyeye-client-logback/.gitignore b/skyeye-client/skyeye-client-logback/.gitignore new file mode 100644 index 0000000..f94d1be --- /dev/null +++ b/skyeye-client/skyeye-client-logback/.gitignore @@ -0,0 +1,22 @@ +bin/ +target/ +file/ +logs/ +gen-java/ +.externalToolBuilders/ +.settings/ +.gradle/ +.classpath +.gradletasknamecache +.buildpath +.project +.springBeans +dependency-reduced-pom.xml +*.iml +nohup.out +/tmp +/.apt_generated/ +.idea/ +disconf/ +/target/ +/build/ diff --git a/skyeye-collector/skyeye-collector-backup/.gitignore b/skyeye-collector/skyeye-collector-backup/.gitignore new file mode 100644 index 0000000..f94d1be --- /dev/null +++ b/skyeye-collector/skyeye-collector-backup/.gitignore @@ -0,0 +1,22 @@ +bin/ +target/ +file/ +logs/ +gen-java/ +.externalToolBuilders/ +.settings/ +.gradle/ +.classpath +.gradletasknamecache +.buildpath +.project +.springBeans +dependency-reduced-pom.xml +*.iml +nohup.out +/tmp +/.apt_generated/ +.idea/ +disconf/ +/target/ +/build/ diff --git a/skyeye-collector/skyeye-collector-core/.gitignore b/skyeye-collector/skyeye-collector-core/.gitignore new file mode 100644 index 0000000..f94d1be --- /dev/null +++ b/skyeye-collector/skyeye-collector-core/.gitignore @@ -0,0 +1,22 @@ +bin/ +target/ +file/ +logs/ +gen-java/ +.externalToolBuilders/ +.settings/ +.gradle/ +.classpath +.gradletasknamecache +.buildpath +.project +.springBeans +dependency-reduced-pom.xml +*.iml +nohup.out +/tmp +/.apt_generated/ +.idea/ +disconf/ +/target/ +/build/ diff --git a/skyeye-collector/skyeye-collector-indexer/.gitignore b/skyeye-collector/skyeye-collector-indexer/.gitignore new file mode 100644 index 0000000..f94d1be --- /dev/null +++ b/skyeye-collector/skyeye-collector-indexer/.gitignore @@ -0,0 +1,22 @@ +bin/ +target/ +file/ +logs/ +gen-java/ +.externalToolBuilders/ +.settings/ +.gradle/ +.classpath +.gradletasknamecache +.buildpath +.project +.springBeans +dependency-reduced-pom.xml +*.iml +nohup.out +/tmp +/.apt_generated/ +.idea/ +disconf/ +/target/ +/build/ diff --git a/skyeye-collector/skyeye-collector-metrics/.gitignore b/skyeye-collector/skyeye-collector-metrics/.gitignore new file mode 100644 index 0000000..f94d1be --- /dev/null +++ b/skyeye-collector/skyeye-collector-metrics/.gitignore @@ -0,0 +1,22 @@ +bin/ +target/ +file/ +logs/ +gen-java/ +.externalToolBuilders/ +.settings/ +.gradle/ +.classpath +.gradletasknamecache +.buildpath +.project +.springBeans +dependency-reduced-pom.xml +*.iml +nohup.out +/tmp +/.apt_generated/ +.idea/ +disconf/ +/target/ +/build/ diff --git a/skyeye-collector/skyeye-collector-trace/.gitignore b/skyeye-collector/skyeye-collector-trace/.gitignore new file mode 100644 index 0000000..f94d1be --- /dev/null +++ b/skyeye-collector/skyeye-collector-trace/.gitignore @@ -0,0 +1,22 @@ +bin/ +target/ +file/ +logs/ +gen-java/ +.externalToolBuilders/ +.settings/ +.gradle/ +.classpath +.gradletasknamecache +.buildpath +.project +.springBeans +dependency-reduced-pom.xml +*.iml +nohup.out +/tmp +/.apt_generated/ +.idea/ +disconf/ +/target/ +/build/ From a304022fff10dcf038c0e8d4f42a4574a70a1a82 Mon Sep 17 00:00:00 2001 From: JThink Date: Fri, 4 Aug 2017 09:39:04 +0800 Subject: [PATCH 20/27] re arch rpc trace data collector --- skyeye-collector/settings.gradle | 4 +- .../skyeye-collector-backup/build.gradle | 2 +- .../properties/collector-backup.properties | 2 +- .../skyeye-collector-trace/build.gradle | 52 +++++++ .../skyeye-collector-trace/settings.gradle | 1 + .../trace/balancer/HandleRebalance.java} | 16 +-- .../collector/trace/cache/CacheService.java | 128 ++++++++++++++++++ .../collector/trace/launcher/Launcher.java | 59 ++++++++ .../collector/trace}/store/HbaseStore.java | 7 +- .../skyeye/collector/trace}/store/Store.java | 2 +- .../collector/trace}/task/RpcTraceTask.java | 25 ++-- .../src/main/resources/application.properties | 47 +++++++ .../src/main/resources/banner.txt | 6 + .../config/collector-trace.properties | 21 +++ .../src/main/resources/logback.xml | 58 ++++++++ .../skyeye/collector/task/UploadTask.java | 54 -------- .../skyeye/collector/util/FileUtil.java | 113 ---------------- .../hbase/boot/HbaseAutoConfiguration.java | 5 + .../data/hbase/boot/HbaseProperties.java | 20 +++ 19 files changed, 428 insertions(+), 194 deletions(-) create mode 100644 skyeye-collector/skyeye-collector-trace/build.gradle create mode 100644 skyeye-collector/skyeye-collector-trace/settings.gradle rename skyeye-collector/{src/main/java/com/jthink/skyeye/collector/listener/HandleRebalanceForRpcTrace.java => skyeye-collector-trace/src/main/java/com/jthink/skyeye/collector/trace/balancer/HandleRebalance.java} (66%) create mode 100644 skyeye-collector/skyeye-collector-trace/src/main/java/com/jthink/skyeye/collector/trace/cache/CacheService.java create mode 100644 skyeye-collector/skyeye-collector-trace/src/main/java/com/jthink/skyeye/collector/trace/launcher/Launcher.java rename skyeye-collector/{src/main/java/com/jthink/skyeye/collector/dapper => skyeye-collector-trace/src/main/java/com/jthink/skyeye/collector/trace}/store/HbaseStore.java (97%) rename skyeye-collector/{src/main/java/com/jthink/skyeye/collector/dapper => skyeye-collector-trace/src/main/java/com/jthink/skyeye/collector/trace}/store/Store.java (95%) rename skyeye-collector/{src/main/java/com/jthink/skyeye/collector => skyeye-collector-trace/src/main/java/com/jthink/skyeye/collector/trace}/task/RpcTraceTask.java (92%) create mode 100644 skyeye-collector/skyeye-collector-trace/src/main/resources/application.properties create mode 100644 skyeye-collector/skyeye-collector-trace/src/main/resources/banner.txt create mode 100644 skyeye-collector/skyeye-collector-trace/src/main/resources/config/collector-trace.properties create mode 100644 skyeye-collector/skyeye-collector-trace/src/main/resources/logback.xml delete mode 100644 skyeye-collector/src/main/java/com/jthink/skyeye/collector/task/UploadTask.java delete mode 100644 skyeye-collector/src/main/java/com/jthink/skyeye/collector/util/FileUtil.java diff --git a/skyeye-collector/settings.gradle b/skyeye-collector/settings.gradle index 50f74dc..bf49417 100644 --- a/skyeye-collector/settings.gradle +++ b/skyeye-collector/settings.gradle @@ -1,2 +1,2 @@ -include 'skyeye-collector-core', 'skyeye-collector-indexer', 'skyeye-collector-backup' -//, 'skyeye-collector-backup', , 'skyeye-collector-trace', ''skyeye-collector-metrics +include 'skyeye-collector-core', 'skyeye-collector-indexer', 'skyeye-collector-backup', 'skyeye-collector-trace' +//, 'skyeye-collector-metrics diff --git a/skyeye-collector/skyeye-collector-backup/build.gradle b/skyeye-collector/skyeye-collector-backup/build.gradle index 0142a50..0c5dbbf 100644 --- a/skyeye-collector/skyeye-collector-backup/build.gradle +++ b/skyeye-collector/skyeye-collector-backup/build.gradle @@ -32,7 +32,7 @@ configurations { compile.exclude group: "org.slf4j", module: "slf4j-log4j12" } -mainClassName = "com.jthink.skyeye.collector.indexer.launcher.Launcher" +mainClassName = "com.jthink.skyeye.collector.backup.launcher.Launcher" buildscript { ext { diff --git a/skyeye-collector/skyeye-collector-backup/src/main/resources/properties/collector-backup.properties b/skyeye-collector/skyeye-collector-backup/src/main/resources/properties/collector-backup.properties index 6b13e1c..ca1b47e 100644 --- a/skyeye-collector/skyeye-collector-backup/src/main/resources/properties/collector-backup.properties +++ b/skyeye-collector/skyeye-collector-backup/src/main/resources/properties/collector-backup.properties @@ -1,4 +1,4 @@ -kafka config +# kafka config kafka.brokers=riot01:9092,riot02:9092,riot03:9092 kafka.topic=app-log kafka.consume.group=log-backup-consume-group diff --git a/skyeye-collector/skyeye-collector-trace/build.gradle b/skyeye-collector/skyeye-collector-trace/build.gradle new file mode 100644 index 0000000..89edcf2 --- /dev/null +++ b/skyeye-collector/skyeye-collector-trace/build.gradle @@ -0,0 +1,52 @@ +apply plugin: 'java' +apply plugin: 'eclipse' +apply plugin: 'maven' +apply plugin: 'org.springframework.boot' +apply plugin: 'application' + +ext { + dataVersion = '1.0.0' + fastJsonVersion = '1.2.35' +} + +dependencies { + compile project(":skyeye-collector-core") + + compile "org.springframework.boot:spring-boot-starter-data-redis:$springBootVersion" + compile "skyeye:skyeye-data-hbase:$dataVersion" + compile "skyeye:skyeye-data-jpa:$dataVersion" + compile "com.alibaba:fastjson:$fastJsonVersion" + + testCompile "org.springframework.boot:spring-boot-starter-test" +} + +configurations { + compile.exclude group: "log4j", module: "log4j" + compile.exclude group: "org.elasticsearch", module: "elasticsearch" + compile.exclude group: "org.slf4j", module: "slf4j-log4j12" +} + +mainClassName = "com.jthink.skyeye.collector.backup.launcher.Launcher" + +buildscript { + ext { + springBootVersion = '1.5.6.RELEASE' + } + + repositories { + mavenLocal() + maven { url mavenPublicUrl } + mavenCentral() + } + + dependencies { + classpath("org.springframework.boot:spring-boot-gradle-plugin:${springBootVersion}") + classpath("io.spring.gradle:dependency-management-plugin:1.0.3.RELEASE") + } +} + +startScripts { + doLast { + unixScript.text = unixScript.text.replaceAll("lib/(.*)\n", "lib/\\*") + } +} diff --git a/skyeye-collector/skyeye-collector-trace/settings.gradle b/skyeye-collector/skyeye-collector-trace/settings.gradle new file mode 100644 index 0000000..8768488 --- /dev/null +++ b/skyeye-collector/skyeye-collector-trace/settings.gradle @@ -0,0 +1 @@ +rootProject.name = 'skyeye-collector-trace' diff --git a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/listener/HandleRebalanceForRpcTrace.java b/skyeye-collector/skyeye-collector-trace/src/main/java/com/jthink/skyeye/collector/trace/balancer/HandleRebalance.java similarity index 66% rename from skyeye-collector/src/main/java/com/jthink/skyeye/collector/listener/HandleRebalanceForRpcTrace.java rename to skyeye-collector/skyeye-collector-trace/src/main/java/com/jthink/skyeye/collector/trace/balancer/HandleRebalance.java index 5698e88..73e0dbf 100644 --- a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/listener/HandleRebalanceForRpcTrace.java +++ b/skyeye-collector/skyeye-collector-trace/src/main/java/com/jthink/skyeye/collector/trace/balancer/HandleRebalance.java @@ -1,7 +1,7 @@ -package com.jthink.skyeye.collector.listener; +package com.jthink.skyeye.collector.trace.balancer; -import com.jthink.skyeye.collector.configuration.kafka.KafkaProperties; -import com.jthink.skyeye.collector.task.RpcTraceTask; +import com.jthink.skyeye.collector.core.configuration.kafka.KafkaProperties; +import com.jthink.skyeye.collector.trace.task.RpcTraceTask; import org.apache.kafka.clients.consumer.ConsumerRebalanceListener; import org.apache.kafka.clients.consumer.KafkaConsumer; import org.apache.kafka.common.TopicPartition; @@ -23,19 +23,19 @@ * @date 2016-09-20 11:14:27 */ @Component -public class HandleRebalanceForRpcTrace implements ConsumerRebalanceListener, InitializingBean { +public class HandleRebalance implements ConsumerRebalanceListener, InitializingBean { - private static final Logger LOGGER = LoggerFactory.getLogger(HandleRebalanceForRpcTrace.class); + private static final Logger LOGGER = LoggerFactory.getLogger(HandleRebalance.class); @Autowired - private KafkaConsumer kafkaConsumerRpcTrace; + private KafkaConsumer kafkaConsumer; @Autowired private KafkaProperties kafkaProperties; @Override public void onPartitionsRevoked(Collection partitions) { - this.kafkaConsumerRpcTrace.commitSync(RpcTraceTask.currentOffsets); + this.kafkaConsumer.commitSync(RpcTraceTask.currentOffsets); LOGGER.info("before rebalance, commit offset once"); } @@ -46,6 +46,6 @@ public void onPartitionsAssigned(Collection partitions) { @Override public void afterPropertiesSet() throws Exception { - this.kafkaConsumerRpcTrace.subscribe(Arrays.asList(this.kafkaProperties.getTopic()), this); + this.kafkaConsumer.subscribe(Arrays.asList(this.kafkaProperties.getTopic()), this); } } \ No newline at end of file diff --git a/skyeye-collector/skyeye-collector-trace/src/main/java/com/jthink/skyeye/collector/trace/cache/CacheService.java b/skyeye-collector/skyeye-collector-trace/src/main/java/com/jthink/skyeye/collector/trace/cache/CacheService.java new file mode 100644 index 0000000..181eb9d --- /dev/null +++ b/skyeye-collector/skyeye-collector-trace/src/main/java/com/jthink/skyeye/collector/trace/cache/CacheService.java @@ -0,0 +1,128 @@ +package com.jthink.skyeye.collector.trace.cache; + +import com.jthink.skyeye.base.constant.Constants; +import com.jthink.skyeye.base.constant.NameInfoType; +import com.jthink.skyeye.data.jpa.domain.NameInfo; +import com.jthink.skyeye.data.jpa.domain.ServiceInfo; +import com.jthink.skyeye.data.jpa.repository.NameInfoRepository; +import com.jthink.skyeye.data.jpa.repository.ServiceInfoRepository; +import org.apache.commons.lang.time.StopWatch; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.InitializingBean; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.redis.core.SetOperations; +import org.springframework.data.redis.core.StringRedisTemplate; +import org.springframework.stereotype.Service; + +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; +import java.util.Set; + +/** + * JThink@JThink + * + * @author JThink + * @version 0.0.1 + * @desc 缓存名称采集相关的数据 + * @date 2016-11-22 09:23:19 + */ +@Service +public class CacheService implements InitializingBean { + + private static Logger LOGGER = LoggerFactory.getLogger(CacheService.class); + + @Autowired + private ServiceInfoRepository serviceInfoRepository; + + @Autowired + private StringRedisTemplate redisTemplate; + private SetOperations setOps; + + private static final String CONFIG_PREFIX = "jthink_monitor_collector"; + private static final String SERVICE_INFO_PREFIX = "jthink_monitor_collector_service_info"; + public static final String SERVICE_INFO_TYPE = "service"; + + private static final Map mapping = new HashMap() { + { + put(SERVICE_INFO_TYPE, SERVICE_INFO_PREFIX); + } + }; + + /** + * 保存 + * @param serviceInfo + */ + public void save(ServiceInfo serviceInfo) { + this.serviceInfoRepository.save(serviceInfo); + } + + /** + * 根据采集的类型和值存入redis + * @param type + * @param value + * @return + */ + public void add(String type, String value) { + this.setOps.add(mapping.get(type), value); + } + + /** + * 根据采集的类型和值判断是否存在 + * @param type + * @param value + * @return + */ + public boolean isExists(String type, String value) { + return this.setOps.isMember(mapping.get(type), value); + } + + /** + * 将数据库中的配置表进行缓存 + */ + private void loadCache() { + StopWatch sw = new StopWatch(); + sw.start(); + LOGGER.info("start load config to cache"); + + Iterable serviceInfos = this.serviceInfoRepository.findAll(); + + for (Iterator it = serviceInfos.iterator(); it.hasNext();) { + ServiceInfo serviceInfo = it.next(); + this.setOps.add(SERVICE_INFO_PREFIX, serviceInfo.getSid()); + } + + sw.stop(); + LOGGER.info("load config to cache end, cost {} ms", sw.getTime()); + } + + /** + * 将redis中的配置信息清除 + */ + private void clearCache() { + StopWatch sw = new StopWatch(); + sw.start(); + LOGGER.info("start clear config cache"); + + Set keys = this.redisTemplate.keys(CONFIG_PREFIX + Constants.XING_HAO); + this.redisTemplate.delete(keys); + + sw.stop(); + LOGGER.info("clear config cache end, cost {} ms", sw.getTime()); + } + + /** + * 缓存数据初始化 + */ + public void load() { + this.clearCache(); + this.loadCache(); + } + + @Override + public void afterPropertiesSet() throws Exception { + this.setOps = this.redisTemplate.opsForSet(); + this.load(); + } +} diff --git a/skyeye-collector/skyeye-collector-trace/src/main/java/com/jthink/skyeye/collector/trace/launcher/Launcher.java b/skyeye-collector/skyeye-collector-trace/src/main/java/com/jthink/skyeye/collector/trace/launcher/Launcher.java new file mode 100644 index 0000000..65630f2 --- /dev/null +++ b/skyeye-collector/skyeye-collector-trace/src/main/java/com/jthink/skyeye/collector/trace/launcher/Launcher.java @@ -0,0 +1,59 @@ +package com.jthink.skyeye.collector.trace.launcher; + +import com.jthink.skyeye.collector.core.hook.ShutdownHookRunner; +import com.jthink.skyeye.collector.core.task.Task; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.boot.autoconfigure.EnableAutoConfiguration; +import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.boot.builder.SpringApplicationBuilder; +import org.springframework.boot.logging.LoggingApplicationListener; +import org.springframework.context.ApplicationListener; +import org.springframework.context.ConfigurableApplicationContext; +import org.springframework.context.annotation.ComponentScan; +import org.springframework.context.annotation.PropertySource; + +import java.util.Iterator; +import java.util.Set; +import java.util.UUID; + +/** + * JThink@JThink + * + * @author JThink + * @version 0.0.1 + * @desc 项目启动器 + * @date 2016-08-24 18:31:48 + */ +@SpringBootApplication +@EnableAutoConfiguration +@ComponentScan(basePackages={"com.jthink.skyeye.collector.core", "com.jthink.skyeye.collector.trace"}) +@PropertySource("file:/opt/jthink/jthink-config/skyeye/collector/collector-trace.properties") +//@PropertySource("classpath:properties/collector-trace.properties") +public class Launcher { + + private static final Logger LOGGER = LoggerFactory.getLogger(Launcher.class); + + public static void main(String[] args) { + SpringApplicationBuilder builder = new SpringApplicationBuilder(Launcher.class); + Set> listeners = builder.application().getListeners(); + for (Iterator> it = listeners.iterator(); it.hasNext();) { + ApplicationListener listener = it.next(); + if (listener instanceof LoggingApplicationListener) { + it.remove(); + } + } + builder.application().setListeners(listeners); + ConfigurableApplicationContext context = builder.run(args); + LOGGER.info("collector trace start successfully"); + + KafkaConsumer kafkaConsumer = (KafkaConsumer) context.getBean("kafkaConsumer"); + Task task = (Task) context.getBean("rpcTraceTask"); + + // 优雅停止项目 + Runtime.getRuntime().addShutdownHook(new ShutdownHookRunner(kafkaConsumer, task)); + task.doTask(); + } + +} diff --git a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/dapper/store/HbaseStore.java b/skyeye-collector/skyeye-collector-trace/src/main/java/com/jthink/skyeye/collector/trace/store/HbaseStore.java similarity index 97% rename from skyeye-collector/src/main/java/com/jthink/skyeye/collector/dapper/store/HbaseStore.java rename to skyeye-collector/skyeye-collector-trace/src/main/java/com/jthink/skyeye/collector/trace/store/HbaseStore.java index a994709..9209385 100644 --- a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/dapper/store/HbaseStore.java +++ b/skyeye-collector/skyeye-collector-trace/src/main/java/com/jthink/skyeye/collector/trace/store/HbaseStore.java @@ -1,4 +1,4 @@ -package com.jthink.skyeye.collector.dapper.store; +package com.jthink.skyeye.collector.trace.store; import com.google.common.collect.Lists; import com.jthink.skyeye.base.constant.Constants; @@ -9,7 +9,10 @@ import org.slf4j.LoggerFactory; import org.springframework.stereotype.Component; -import java.util.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; /** * JThink@JThink diff --git a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/dapper/store/Store.java b/skyeye-collector/skyeye-collector-trace/src/main/java/com/jthink/skyeye/collector/trace/store/Store.java similarity index 95% rename from skyeye-collector/src/main/java/com/jthink/skyeye/collector/dapper/store/Store.java rename to skyeye-collector/skyeye-collector-trace/src/main/java/com/jthink/skyeye/collector/trace/store/Store.java index e8d54dc..7725f8e 100644 --- a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/dapper/store/Store.java +++ b/skyeye-collector/skyeye-collector-trace/src/main/java/com/jthink/skyeye/collector/trace/store/Store.java @@ -1,4 +1,4 @@ -package com.jthink.skyeye.collector.dapper.store; +package com.jthink.skyeye.collector.trace.store; import com.jthink.skyeye.base.dapper.Annotation; import com.jthink.skyeye.base.dapper.Span; diff --git a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/task/RpcTraceTask.java b/skyeye-collector/skyeye-collector-trace/src/main/java/com/jthink/skyeye/collector/trace/task/RpcTraceTask.java similarity index 92% rename from skyeye-collector/src/main/java/com/jthink/skyeye/collector/task/RpcTraceTask.java rename to skyeye-collector/skyeye-collector-trace/src/main/java/com/jthink/skyeye/collector/trace/task/RpcTraceTask.java index 9c8455c..d6053c4 100644 --- a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/task/RpcTraceTask.java +++ b/skyeye-collector/skyeye-collector-trace/src/main/java/com/jthink/skyeye/collector/trace/task/RpcTraceTask.java @@ -1,17 +1,18 @@ -package com.jthink.skyeye.collector.task; +package com.jthink.skyeye.collector.trace.task; import com.alibaba.fastjson.JSON; -import com.jthink.skyeye.base.dapper.Span; -import com.jthink.skyeye.collector.service.CacheService; -import com.jthink.skyeye.data.hbase.api.HbaseTemplate; -import com.jthink.skyeye.collector.callback.KafkaOffsetCommitCallback; -import com.jthink.skyeye.collector.configuration.kafka.KafkaProperties; -import com.jthink.skyeye.collector.dapper.store.Store; import com.jthink.skyeye.base.constant.Constants; import com.jthink.skyeye.base.constant.EventType; +import com.jthink.skyeye.base.dapper.Span; import com.jthink.skyeye.base.dto.EventLog; import com.jthink.skyeye.base.dto.LogDto; import com.jthink.skyeye.base.dto.RpcTraceLog; +import com.jthink.skyeye.collector.core.callback.KafkaOffsetCommitCallback; +import com.jthink.skyeye.collector.core.configuration.kafka.KafkaProperties; +import com.jthink.skyeye.collector.core.task.Task; +import com.jthink.skyeye.collector.trace.cache.CacheService; +import com.jthink.skyeye.collector.trace.store.Store; +import com.jthink.skyeye.data.hbase.api.HbaseTemplate; import com.jthink.skyeye.data.jpa.domain.ServiceInfo; import com.jthink.skyeye.data.jpa.pk.ServiceInfoPK; import org.apache.hadoop.hbase.client.Mutation; @@ -47,7 +48,7 @@ public class RpcTraceTask implements Task { private static final Logger LOGGER = LoggerFactory.getLogger(RpcTraceTask.class); @Autowired - private KafkaConsumer kafkaConsumerRpcTrace; + private KafkaConsumer kafkaConsumer; @Autowired private KafkaProperties kafkaProperties; @Autowired @@ -71,7 +72,7 @@ public void doTask() { int count = 0; try { while (true) { - ConsumerRecords records = this.kafkaConsumerRpcTrace.poll(this.kafkaProperties.getPollTimeout()); + ConsumerRecords records = this.kafkaConsumer.poll(this.kafkaProperties.getPollTimeout()); if (!records.isEmpty()) { List spanPuts = new ArrayList(); List annotationPuts = new ArrayList(); @@ -122,7 +123,7 @@ public void doTask() { count++; if (count >= 1000) { // 当达到了1000触发向kafka提交offset - kafkaConsumerRpcTrace.commitAsync(currentOffsets, new KafkaOffsetCommitCallback()); + kafkaConsumer.commitAsync(currentOffsets, new KafkaOffsetCommitCallback()); count = 0; } } @@ -132,7 +133,7 @@ public void doTask() { this.storeToHbase(Constants.TABLE_TIME_CONSUME, tracePuts); this.storeToHbase(Constants.TABLE_ANNOTATION, annotationPuts); - kafkaConsumerRpcTrace.commitAsync(currentOffsets, new KafkaOffsetCommitCallback()); + kafkaConsumer.commitAsync(currentOffsets, new KafkaOffsetCommitCallback()); LOGGER.info("processed {} records, " + "{} span records stored in hbase table trace, " + "{} annotation records stored in hbase table annotation, " + @@ -146,7 +147,7 @@ public void doTask() { } catch (Exception e) { LOGGER.error("process records error, {}", e); } finally { - kafkaConsumerRpcTrace.commitSync(currentOffsets); + kafkaConsumer.commitSync(currentOffsets); LOGGER.info("finally commit the offset"); // 不需要主动调kafkaConsumer.close(), spring bean容器会调用 } diff --git a/skyeye-collector/skyeye-collector-trace/src/main/resources/application.properties b/skyeye-collector/skyeye-collector-trace/src/main/resources/application.properties new file mode 100644 index 0000000..e2ba23d --- /dev/null +++ b/skyeye-collector/skyeye-collector-trace/src/main/resources/application.properties @@ -0,0 +1,47 @@ +# kafka config +spring.message.kafka.brokers=${kafka.brokers} +spring.message.kafka.topic=${kafka.topic} +spring.message.kafka.consumeGroup=${kafka.consume.group} +spring.message.kafka.pollTimeout=${kafka.poll.timeout} + +# redis config +spring.redis.database=6 +spring.redis.host=${redis.host} +spring.redis.port=${redis.port} +spring.redis.password=${redis.password} +spring.redis.pool.max-active=5 +spring.redis.pool.max-idle=5 +spring.redis.pool.max-wait=-1 +spring.redis.pool.min-idle=2 +spring.redis.timeout=0 + +# jpa config +spring.datasource.driver-class-name=com.mysql.jdbc.Driver +spring.datasource.url=jdbc:mysql://${database.address}/${database.name}?characterEncoding=utf8 +spring.datasource.name=${database.name} +spring.datasource.username=${database.username} +spring.datasource.password=${database.password} + +spring.datasource.initial-size=5 +spring.datasource.min-idle=5 +spring.datasource.max-idle= 10 +spring.datasource.max-active=100 +spring.datasource.max-wait=10000 +spring.datasource.validation-query=SELECT 1 + +spring.datasource.time-between-eviction-runs-millis=18800 +spring.datasource.min-evictable-idle-time-millis=18800 +spring.datasource.test-while-idle=true +spring.datasource.test-on-borrow=false +spring.datasource.test-on-return=false + +spring.data.jpa.repositories.enabled=true +spring.jpa.hibernate.ddl-auto=validate +spring.jpa.generate-ddl=false +spring.jpa.database=org.hibernate.dialect.MySQL5InnoDBDialect +spring.jpa.show-sql=false + +# hbase config +spring.data.hbase.quorum=${hbase.quorum} +spring.data.hbase.rootDir=${hbase.rootDir} +spring.data.hbase.nodeParent=${hbase.zookeeper.znode.parent} diff --git a/skyeye-collector/skyeye-collector-trace/src/main/resources/banner.txt b/skyeye-collector/skyeye-collector-trace/src/main/resources/banner.txt new file mode 100644 index 0000000..fbe7ade --- /dev/null +++ b/skyeye-collector/skyeye-collector-trace/src/main/resources/banner.txt @@ -0,0 +1,6 @@ +███████╗██╗ ██╗██╗ ██╗███████╗██╗ ██╗███████╗ ██████╗ ██████╗ ██╗ ██╗ ███████╗ ██████╗████████╗ ██████╗ ██████╗ +██╔════╝██║ ██╔╝╚██╗ ██╔╝██╔════╝╚██╗ ██╔╝██╔════╝ ██╔════╝██╔═══██╗██║ ██║ ██╔════╝██╔════╝╚══██╔══╝██╔═══██╗██╔══██╗ +███████╗█████╔╝ ╚████╔╝ █████╗ ╚████╔╝ █████╗█████╗██║ ██║ ██║██║ ██║ █████╗ ██║ ██║ ██║ ██║██████╔╝ +╚════██║██╔═██╗ ╚██╔╝ ██╔══╝ ╚██╔╝ ██╔══╝╚════╝██║ ██║ ██║██║ ██║ ██╔══╝ ██║ ██║ ██║ ██║██╔══██╗ +███████║██║ ██╗ ██║ ███████╗ ██║ ███████╗ ╚██████╗╚██████╔╝███████╗███████╗███████╗╚██████╗ ██║ ╚██████╔╝██║ ██║ +╚══════╝╚═╝ ╚═╝ ╚═╝ ╚══════╝ ╚═╝ ╚══════╝ ╚═════╝ ╚═════╝ ╚══════╝╚══════╝╚══════╝ ╚═════╝ ╚═╝ ╚═════╝ ╚═╝ ╚═╝ diff --git a/skyeye-collector/skyeye-collector-trace/src/main/resources/config/collector-trace.properties b/skyeye-collector/skyeye-collector-trace/src/main/resources/config/collector-trace.properties new file mode 100644 index 0000000..a550840 --- /dev/null +++ b/skyeye-collector/skyeye-collector-trace/src/main/resources/config/collector-trace.properties @@ -0,0 +1,21 @@ +# kafka config +kafka.brokers=riot01:9092,riot02:9092,riot03:9092 +kafka.topic=app-log +kafka.consume.group=rpc-trace-consume-group +kafka.poll.timeout=100 + +# redis config +redis.host=localhost +redis.port=6379 +redis.password= + +# mysql config +database.address=localhost:3306 +database.name=monitor-center +database.username=root +database.password=root + +# hbase config +hbase.quorum=panda-01,panda-01,panda-03 +hbase.rootDir=hdfs://panda-01:8020/hbase +hbase.zookeeper.znode.parent=/hbase diff --git a/skyeye-collector/skyeye-collector-trace/src/main/resources/logback.xml b/skyeye-collector/skyeye-collector-trace/src/main/resources/logback.xml new file mode 100644 index 0000000..5fed6b7 --- /dev/null +++ b/skyeye-collector/skyeye-collector-trace/src/main/resources/logback.xml @@ -0,0 +1,58 @@ + + + + + + + ${APP_NAME} + + + + + + %d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{96}[%line]: %msg%n + + + + + + true + + ${LOG_HOME}/info/${APP_NAME}_%d{yyyy-MM-dd}.%i.log + + 128 MB + + + + INFO + + + + %d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{96}[%line]: %msg%n + + + + + true + + ${LOG_HOME}/error/${APP_NAME}_%d{yyyy-MM-dd}.%i.log + + 128 MB + + + + ERROR + + + + %d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{96}[%line]: %msg%n + + + + + + + + + + \ No newline at end of file diff --git a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/task/UploadTask.java b/skyeye-collector/src/main/java/com/jthink/skyeye/collector/task/UploadTask.java deleted file mode 100644 index 50fac0a..0000000 --- a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/task/UploadTask.java +++ /dev/null @@ -1,54 +0,0 @@ -package com.jthink.skyeye.collector.task; - -import com.jthink.skyeye.collector.util.FileUtil; -import com.jthink.skyeye.base.util.DateUtil; -import org.joda.time.DateTime; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.scheduling.annotation.EnableScheduling; -import org.springframework.scheduling.annotation.Scheduled; -import org.springframework.stereotype.Component; -import org.springframework.util.StopWatch; - -/** - * JThink@JThink - * - * @author JThink - * @version 0.0.1 - * @desc 数据上传hdfs任务 - * @date 2016-12-06 17:51:33 - */ -@Component -@EnableScheduling -public class UploadTask { - - private static final Logger LOGGER = LoggerFactory.getLogger(UploadTask.class); - - @Autowired - private FileUtil fileUtil; - - /** - * 上传到hdfs并删除相应的文件 - */ - @Scheduled(cron = "${spring.upload.log.cron}") - private void upload() { - String yesterday = this.getYesterday(); - LOGGER.info("开始上传到hdfs, 时间: {}", yesterday); - StopWatch sw = new StopWatch(); - sw.start(); - this.fileUtil.uploadToHDFS(yesterday); - sw.stop(); - LOGGER.info("上传到hdfs结束, 耗时: {} ms", sw.getTotalTimeMillis()); - } - - /** - * 返回昨天的字符串 - * @return - */ - private String getYesterday() { - DateTime yesterday = new DateTime(System.currentTimeMillis()).minusDays(1); - return yesterday.toString(DateUtil.YYYYMMDD); - } - -} diff --git a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/util/FileUtil.java b/skyeye-collector/src/main/java/com/jthink/skyeye/collector/util/FileUtil.java deleted file mode 100644 index 2e1b5c0..0000000 --- a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/util/FileUtil.java +++ /dev/null @@ -1,113 +0,0 @@ -package com.jthink.skyeye.collector.util; - -import com.jthink.skyeye.collector.configuration.hadoop.HadoopProperties; -import com.jthink.skyeye.collector.configuration.kafka.KafkaProperties; -import com.jthink.skyeye.base.constant.Constants; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Component; - -import java.io.BufferedWriter; -import java.io.File; -import java.io.FileWriter; -import java.io.IOException; -import java.util.List; -import java.util.Map; - -/** - * JThink@JThink - * - * @author JThink - * @version 0.0.1 - * @desc file 相关的util - * @date 2016-12-06 11:26:33 - */ -@Component -public class FileUtil { - - private static final Logger LOGGER = LoggerFactory.getLogger(FileUtil.class); - - @Autowired - private KafkaProperties kafkaProperties; - @Autowired - private HadoopProperties hadoopProperties; - @Autowired - private FileSystem fileSystem; - - /** - * 将数据写入文件 - * @param lines - */ - public int save(Map> lines) { - int sum = 0; - for (Map.Entry> entry : lines.entrySet()) { - this.writeTofile(entry.getKey(), entry.getValue()); - sum += entry.getValue().size(); - } - return sum; - } - - /** - * 写入文件 - * @param fileName - * @param lines - */ - private void writeTofile(String fileName, List lines) { - BufferedWriter bw = null; - try { - File file = new File(this.kafkaProperties.getFileRoot() + this.getFileName(fileName)); - if (!file.getParentFile().exists()) { - if (!file.getParentFile().mkdirs()) { - LOGGER.info("创建父文件夹失败"); - } - } - bw = new BufferedWriter(new FileWriter(file, true)); - for (String line : lines) { - bw.write(line); - } - } catch (IOException e) { - LOGGER.error("写文件报错, ", e); - } finally { - if (bw != null) { - try { - bw.flush(); - bw.close(); - } catch (IOException e) { - LOGGER.error("写文件报错, ", e); - } - } - } - } - - /** - * 上传至hdfs - */ - public void uploadToHDFS(String yesterday) { - try { - String fileName = this.getFileName(yesterday); - File file = new File(this.kafkaProperties.getFileRoot() + fileName); - if (!file.exists()) { - LOGGER.info("当天没有可上传的文件"); - return; - } - this.fileSystem.copyFromLocalFile(true, false, new Path(this.kafkaProperties.getFileRoot() + fileName), - new Path(this.hadoopProperties.getBaseDir() + yesterday + Constants.SLASH + fileName)); - } catch (IOException e) { - LOGGER.error("上传至hdfs失败, ", e); - } - } - - /** - * 返回具体的名字 - * @param fileName - * @return - */ - private String getFileName(String fileName) { - return fileName + Constants.POINT + this.kafkaProperties.getServerId(); - } - - -} diff --git a/skyeye-data/skyeye-data-hbase/src/main/java/com/jthink/skyeye/data/hbase/boot/HbaseAutoConfiguration.java b/skyeye-data/skyeye-data-hbase/src/main/java/com/jthink/skyeye/data/hbase/boot/HbaseAutoConfiguration.java index eb2260a..17b56e9 100644 --- a/skyeye-data/skyeye-data-hbase/src/main/java/com/jthink/skyeye/data/hbase/boot/HbaseAutoConfiguration.java +++ b/skyeye-data/skyeye-data-hbase/src/main/java/com/jthink/skyeye/data/hbase/boot/HbaseAutoConfiguration.java @@ -23,6 +23,9 @@ public class HbaseAutoConfiguration { private static final String HBASE_QUORUM = "hbase.zookeeper.quorum"; + private static final String HBASE_ROOTDIR = "hbase.rootdir"; + private static final String HBASE_ZNODE_PARENT = "zookeeper.znode.parent"; + @Autowired private HbaseProperties hbaseProperties; @@ -32,6 +35,8 @@ public class HbaseAutoConfiguration { public HbaseTemplate hbaseTemplate() { Configuration configuration = HBaseConfiguration.create(); configuration.set(HBASE_QUORUM, this.hbaseProperties.getQuorum()); + configuration.set(HBASE_ROOTDIR, hbaseProperties.getRootDir()); + configuration.set(HBASE_ZNODE_PARENT, hbaseProperties.getNodeParent()); return new HbaseTemplate(configuration); } } diff --git a/skyeye-data/skyeye-data-hbase/src/main/java/com/jthink/skyeye/data/hbase/boot/HbaseProperties.java b/skyeye-data/skyeye-data-hbase/src/main/java/com/jthink/skyeye/data/hbase/boot/HbaseProperties.java index bbc34fe..b72840d 100644 --- a/skyeye-data/skyeye-data-hbase/src/main/java/com/jthink/skyeye/data/hbase/boot/HbaseProperties.java +++ b/skyeye-data/skyeye-data-hbase/src/main/java/com/jthink/skyeye/data/hbase/boot/HbaseProperties.java @@ -15,6 +15,10 @@ public class HbaseProperties { private String quorum; + private String rootDir; + + private String nodeParent; + public String getQuorum() { return quorum; } @@ -22,4 +26,20 @@ public String getQuorum() { public void setQuorum(String quorum) { this.quorum = quorum; } + + public String getRootDir() { + return rootDir; + } + + public void setRootDir(String rootDir) { + this.rootDir = rootDir; + } + + public String getNodeParent() { + return nodeParent; + } + + public void setNodeParent(String nodeParent) { + this.nodeParent = nodeParent; + } } From aa4e8aa86b8565ef78432f249ab23325a758aedd Mon Sep 17 00:00:00 2001 From: JThink Date: Fri, 4 Aug 2017 10:46:19 +0800 Subject: [PATCH 21/27] re arch metrics data collector --- skyeye-collector/settings.gradle | 3 +- .../core}/test/KafkaConsumerTest.java | 6 +- .../skyeye-collector-metrics/build.gradle | 52 ++++ .../skyeye-collector-metrics/settings.gradle | 1 + .../metrics/balancer/HandleRebalance.java} | 16 +- .../metrics/cache}/CacheService.java | 26 +- .../zookeeper/ZookeeperConfiguration.java | 2 +- .../collector/metrics/launcher/Launcher.java | 58 ++++ .../collector/metrics/task/MetricsTask.java} | 16 +- .../metrics}/task/job/ExceptionProcessor.java | 4 +- .../collector/metrics}/task/job/Indexer.java | 6 +- .../collector/metrics}/task/job/Job.java | 2 +- .../metrics/task/job/JobConfiguration.java | 66 +++++ .../metrics}/task/job/NameCollector.java | 8 +- .../src/main/resources/application.properties | 19 +- .../src/main/resources/banner.txt | 6 + .../config/collector-metrics.properties} | 19 +- .../src/main/resources/logback.xml | 4 +- .../skyeye-collector-trace/build.gradle | 2 +- .../com/google/common/base/Stopwatch.java | 267 ------------------ .../jthink/skyeye/collector/task/Task.java | 19 -- .../src/main/resources/banner.txt | 0 .../shell/es/app-log/create-index.py | 59 ---- .../main/resources/shell/es/app-log/start.sh | 12 - .../shell/es/event-log/create-index.py | 60 ---- .../resources/shell/es/event-log/start.sh | 12 - .../src/main/resources/shell/es/install.sh | 6 - .../src/main/resources/shell/hbase/hbase | 3 - .../resources/shell/kafka/create-topic.sh | 6 - 29 files changed, 224 insertions(+), 536 deletions(-) rename skyeye-collector/{src/test/java/com/jthink/skyeye/collector => skyeye-collector-core/src/test/java/com/jthink/skyeye/collector/core}/test/KafkaConsumerTest.java (99%) create mode 100644 skyeye-collector/skyeye-collector-metrics/build.gradle create mode 100644 skyeye-collector/skyeye-collector-metrics/settings.gradle rename skyeye-collector/{src/main/java/com/jthink/skyeye/collector/listener/HandleRebalanceForEvent.java => skyeye-collector-metrics/src/main/java/com/jthink/skyeye/collector/metrics/balancer/HandleRebalance.java} (67%) rename skyeye-collector/{src/main/java/com/jthink/skyeye/collector/service => skyeye-collector-metrics/src/main/java/com/jthink/skyeye/collector/metrics/cache}/CacheService.java (83%) rename skyeye-collector/{src/main/java/com/jthink/skyeye/collector => skyeye-collector-metrics/src/main/java/com/jthink/skyeye/collector/metrics}/configuration/zookeeper/ZookeeperConfiguration.java (94%) create mode 100644 skyeye-collector/skyeye-collector-metrics/src/main/java/com/jthink/skyeye/collector/metrics/launcher/Launcher.java rename skyeye-collector/{src/main/java/com/jthink/skyeye/collector/task/CollectTask.java => skyeye-collector-metrics/src/main/java/com/jthink/skyeye/collector/metrics/task/MetricsTask.java} (92%) rename skyeye-collector/{src/main/java/com/jthink/skyeye/collector => skyeye-collector-metrics/src/main/java/com/jthink/skyeye/collector/metrics}/task/job/ExceptionProcessor.java (98%) rename skyeye-collector/{src/main/java/com/jthink/skyeye/collector => skyeye-collector-metrics/src/main/java/com/jthink/skyeye/collector/metrics}/task/job/Indexer.java (95%) rename skyeye-collector/{src/main/java/com/jthink/skyeye/collector => skyeye-collector-metrics/src/main/java/com/jthink/skyeye/collector/metrics}/task/job/Job.java (95%) create mode 100644 skyeye-collector/skyeye-collector-metrics/src/main/java/com/jthink/skyeye/collector/metrics/task/job/JobConfiguration.java rename skyeye-collector/{src/main/java/com/jthink/skyeye/collector => skyeye-collector-metrics/src/main/java/com/jthink/skyeye/collector/metrics}/task/job/NameCollector.java (97%) rename skyeye-collector/{ => skyeye-collector-metrics}/src/main/resources/application.properties (75%) create mode 100644 skyeye-collector/skyeye-collector-metrics/src/main/resources/banner.txt rename skyeye-collector/{src/main/resources/properties/collector.properties => skyeye-collector-metrics/src/main/resources/config/collector-metrics.properties} (61%) rename skyeye-collector/{ => skyeye-collector-metrics}/src/main/resources/logback.xml (97%) delete mode 100644 skyeye-collector/src/main/java/com/google/common/base/Stopwatch.java delete mode 100644 skyeye-collector/src/main/java/com/jthink/skyeye/collector/task/Task.java delete mode 100644 skyeye-collector/src/main/resources/banner.txt delete mode 100644 skyeye-collector/src/main/resources/shell/es/app-log/create-index.py delete mode 100644 skyeye-collector/src/main/resources/shell/es/app-log/start.sh delete mode 100644 skyeye-collector/src/main/resources/shell/es/event-log/create-index.py delete mode 100644 skyeye-collector/src/main/resources/shell/es/event-log/start.sh delete mode 100644 skyeye-collector/src/main/resources/shell/es/install.sh delete mode 100644 skyeye-collector/src/main/resources/shell/hbase/hbase delete mode 100644 skyeye-collector/src/main/resources/shell/kafka/create-topic.sh diff --git a/skyeye-collector/settings.gradle b/skyeye-collector/settings.gradle index bf49417..28c071f 100644 --- a/skyeye-collector/settings.gradle +++ b/skyeye-collector/settings.gradle @@ -1,2 +1 @@ -include 'skyeye-collector-core', 'skyeye-collector-indexer', 'skyeye-collector-backup', 'skyeye-collector-trace' -//, 'skyeye-collector-metrics +include 'skyeye-collector-core', 'skyeye-collector-indexer', 'skyeye-collector-backup', 'skyeye-collector-trace' , 'skyeye-collector-metrics' diff --git a/skyeye-collector/src/test/java/com/jthink/skyeye/collector/test/KafkaConsumerTest.java b/skyeye-collector/skyeye-collector-core/src/test/java/com/jthink/skyeye/collector/core/test/KafkaConsumerTest.java similarity index 99% rename from skyeye-collector/src/test/java/com/jthink/skyeye/collector/test/KafkaConsumerTest.java rename to skyeye-collector/skyeye-collector-core/src/test/java/com/jthink/skyeye/collector/core/test/KafkaConsumerTest.java index 1c77d07..99d0128 100644 --- a/skyeye-collector/src/test/java/com/jthink/skyeye/collector/test/KafkaConsumerTest.java +++ b/skyeye-collector/skyeye-collector-core/src/test/java/com/jthink/skyeye/collector/core/test/KafkaConsumerTest.java @@ -1,4 +1,4 @@ -package com.jthink.skyeye.collector.test; +package com.jthink.skyeye.collector.core.test; import org.apache.kafka.clients.consumer.*; import org.apache.kafka.common.TopicPartition; @@ -9,8 +9,6 @@ import org.elasticsearch.client.transport.TransportClient; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.InetSocketTransportAddress; -import static org.elasticsearch.common.xcontent.XContentFactory.*; - import org.elasticsearch.common.xcontent.XContentBuilder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -23,6 +21,8 @@ import java.util.HashMap; import java.util.Map; +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; + /** * JThink@JThink * diff --git a/skyeye-collector/skyeye-collector-metrics/build.gradle b/skyeye-collector/skyeye-collector-metrics/build.gradle new file mode 100644 index 0000000..3d548d3 --- /dev/null +++ b/skyeye-collector/skyeye-collector-metrics/build.gradle @@ -0,0 +1,52 @@ +apply plugin: 'java' +apply plugin: 'eclipse' +apply plugin: 'maven' +apply plugin: 'org.springframework.boot' +apply plugin: 'application' + +ext { + dataVersion = '1.0.0' + zkclientVersion = '0.9.1-up' +} + +dependencies { + compile project(":skyeye-collector-core") + + compile "org.springframework.boot:spring-boot-starter-data-redis:$springBootVersion" + compile "skyeye:skyeye-data-jpa:$dataVersion" + compile "skyeye:skyeye-data-rabbitmq:$dataVersion" + compile "com.101tec:zkclient:$zkclientVersion" + compile "commons-lang:commons-lang:2.6" + + testCompile "org.springframework.boot:spring-boot-starter-test" +} + +configurations { + compile.exclude group: "log4j", module: "log4j" + compile.exclude group: "org.slf4j", module: "slf4j-log4j12" +} + +mainClassName = "com.jthink.skyeye.collector.metrics.launcher.Launcher" + +buildscript { + ext { + springBootVersion = '1.5.6.RELEASE' + } + + repositories { + mavenLocal() + maven { url mavenPublicUrl } + mavenCentral() + } + + dependencies { + classpath("org.springframework.boot:spring-boot-gradle-plugin:${springBootVersion}") + classpath("io.spring.gradle:dependency-management-plugin:1.0.3.RELEASE") + } +} + +startScripts { + doLast { + unixScript.text = unixScript.text.replaceAll("lib/(.*)\n", "lib/\\*") + } +} diff --git a/skyeye-collector/skyeye-collector-metrics/settings.gradle b/skyeye-collector/skyeye-collector-metrics/settings.gradle new file mode 100644 index 0000000..508d401 --- /dev/null +++ b/skyeye-collector/skyeye-collector-metrics/settings.gradle @@ -0,0 +1 @@ +rootProject.name = 'skyeye-collector-metrics' diff --git a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/listener/HandleRebalanceForEvent.java b/skyeye-collector/skyeye-collector-metrics/src/main/java/com/jthink/skyeye/collector/metrics/balancer/HandleRebalance.java similarity index 67% rename from skyeye-collector/src/main/java/com/jthink/skyeye/collector/listener/HandleRebalanceForEvent.java rename to skyeye-collector/skyeye-collector-metrics/src/main/java/com/jthink/skyeye/collector/metrics/balancer/HandleRebalance.java index ee14a75..cd48de9 100644 --- a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/listener/HandleRebalanceForEvent.java +++ b/skyeye-collector/skyeye-collector-metrics/src/main/java/com/jthink/skyeye/collector/metrics/balancer/HandleRebalance.java @@ -1,7 +1,7 @@ -package com.jthink.skyeye.collector.listener; +package com.jthink.skyeye.collector.metrics.balancer; -import com.jthink.skyeye.collector.configuration.kafka.KafkaProperties; -import com.jthink.skyeye.collector.task.CollectTask; +import com.jthink.skyeye.collector.core.configuration.kafka.KafkaProperties; +import com.jthink.skyeye.collector.metrics.task.MetricsTask; import org.apache.kafka.clients.consumer.ConsumerRebalanceListener; import org.apache.kafka.clients.consumer.KafkaConsumer; import org.apache.kafka.common.TopicPartition; @@ -23,19 +23,19 @@ * @date 2016-09-20 11:14:27 */ @Component -public class HandleRebalanceForEvent implements ConsumerRebalanceListener, InitializingBean { +public class HandleRebalance implements ConsumerRebalanceListener, InitializingBean { - private static final Logger LOGGER = LoggerFactory.getLogger(HandleRebalanceForEvent.class); + private static final Logger LOGGER = LoggerFactory.getLogger(HandleRebalance.class); @Autowired - private KafkaConsumer kafkaConsumerEvent; + private KafkaConsumer kafkaConsumer; @Autowired private KafkaProperties kafkaProperties; @Override public void onPartitionsRevoked(Collection partitions) { - this.kafkaConsumerEvent.commitSync(CollectTask.currentOffsets); + this.kafkaConsumer.commitSync(MetricsTask.currentOffsets); LOGGER.info("before rebalance, commit offset once"); } @@ -46,6 +46,6 @@ public void onPartitionsAssigned(Collection partitions) { @Override public void afterPropertiesSet() throws Exception { - this.kafkaConsumerEvent.subscribe(Arrays.asList(this.kafkaProperties.getTopic()), this); + this.kafkaConsumer.subscribe(Arrays.asList(this.kafkaProperties.getTopic()), this); } } \ No newline at end of file diff --git a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/service/CacheService.java b/skyeye-collector/skyeye-collector-metrics/src/main/java/com/jthink/skyeye/collector/metrics/cache/CacheService.java similarity index 83% rename from skyeye-collector/src/main/java/com/jthink/skyeye/collector/service/CacheService.java rename to skyeye-collector/skyeye-collector-metrics/src/main/java/com/jthink/skyeye/collector/metrics/cache/CacheService.java index 354b9f0..b3f9057 100644 --- a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/service/CacheService.java +++ b/skyeye-collector/skyeye-collector-metrics/src/main/java/com/jthink/skyeye/collector/metrics/cache/CacheService.java @@ -1,10 +1,10 @@ -package com.jthink.skyeye.collector.service; +package com.jthink.skyeye.collector.metrics.cache; +import com.jthink.skyeye.base.constant.Constants; +import com.jthink.skyeye.base.constant.NameInfoType; import com.jthink.skyeye.data.jpa.domain.NameInfo; import com.jthink.skyeye.data.jpa.domain.ServiceInfo; import com.jthink.skyeye.data.jpa.repository.NameInfoRepository; -import com.jthink.skyeye.base.constant.Constants; -import com.jthink.skyeye.base.constant.NameInfoType; import com.jthink.skyeye.data.jpa.repository.ServiceInfoRepository; import org.apache.commons.lang.time.StopWatch; import org.slf4j.Logger; @@ -35,8 +35,6 @@ public class CacheService implements InitializingBean { @Autowired private NameInfoRepository nameInfoRepository; - @Autowired - private ServiceInfoRepository serviceInfoRepository; @Autowired private StringRedisTemplate redisTemplate; @@ -46,15 +44,12 @@ public class CacheService implements InitializingBean { private static final String API_NAME_PREFIX = "jthink_monitor_collector_api_name"; private static final String ACCOUNT_NAME_PREFIX = "jthink_monitor_collector_account_name"; private static final String THIRD_NAME_PREFIX = "jthink_monitor_collector_third_name"; - private static final String SERVICE_INFO_PREFIX = "jthink_monitor_collector_service_info"; - public static final String SERVICE_INFO_TYPE = "service"; private static final Map mapping = new HashMap() { { put(NameInfoType.API.symbol(), API_NAME_PREFIX); put(NameInfoType.ACCOUNT.symbol(), ACCOUNT_NAME_PREFIX); put(NameInfoType.THIRD.symbol(), THIRD_NAME_PREFIX); - put(SERVICE_INFO_TYPE, SERVICE_INFO_PREFIX); } }; @@ -66,14 +61,6 @@ public void save(NameInfo nameInfo) { this.nameInfoRepository.save(nameInfo); } - /** - * 保存 - * @param serviceInfo - */ - public void save(ServiceInfo serviceInfo) { - this.serviceInfoRepository.save(serviceInfo); - } - /** * 根据采集的类型和值存入redis * @param type @@ -109,13 +96,6 @@ private void loadCache() { this.setOps.add(mapping.get(nameInfo.getNameInfoPK().getType()), nameInfo.getNameInfoPK().getName()); } - Iterable serviceInfos = this.serviceInfoRepository.findAll(); - - for (Iterator it = serviceInfos.iterator(); it.hasNext();) { - ServiceInfo serviceInfo = it.next(); - this.setOps.add(SERVICE_INFO_PREFIX, serviceInfo.getSid()); - } - sw.stop(); LOGGER.info("load config to cache end, cost {} ms", sw.getTime()); } diff --git a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/configuration/zookeeper/ZookeeperConfiguration.java b/skyeye-collector/skyeye-collector-metrics/src/main/java/com/jthink/skyeye/collector/metrics/configuration/zookeeper/ZookeeperConfiguration.java similarity index 94% rename from skyeye-collector/src/main/java/com/jthink/skyeye/collector/configuration/zookeeper/ZookeeperConfiguration.java rename to skyeye-collector/skyeye-collector-metrics/src/main/java/com/jthink/skyeye/collector/metrics/configuration/zookeeper/ZookeeperConfiguration.java index 0a392fb..e0a8af0 100644 --- a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/configuration/zookeeper/ZookeeperConfiguration.java +++ b/skyeye-collector/skyeye-collector-metrics/src/main/java/com/jthink/skyeye/collector/metrics/configuration/zookeeper/ZookeeperConfiguration.java @@ -1,4 +1,4 @@ -package com.jthink.skyeye.collector.configuration.zookeeper; +package com.jthink.skyeye.collector.metrics.configuration.zookeeper; import org.I0Itec.zkclient.ZkClient; import org.springframework.boot.context.properties.ConfigurationProperties; diff --git a/skyeye-collector/skyeye-collector-metrics/src/main/java/com/jthink/skyeye/collector/metrics/launcher/Launcher.java b/skyeye-collector/skyeye-collector-metrics/src/main/java/com/jthink/skyeye/collector/metrics/launcher/Launcher.java new file mode 100644 index 0000000..a16aeca --- /dev/null +++ b/skyeye-collector/skyeye-collector-metrics/src/main/java/com/jthink/skyeye/collector/metrics/launcher/Launcher.java @@ -0,0 +1,58 @@ +package com.jthink.skyeye.collector.metrics.launcher; + +import com.jthink.skyeye.collector.core.hook.ShutdownHookRunner; +import com.jthink.skyeye.collector.core.task.Task; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.boot.autoconfigure.EnableAutoConfiguration; +import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.boot.builder.SpringApplicationBuilder; +import org.springframework.boot.logging.LoggingApplicationListener; +import org.springframework.context.ApplicationListener; +import org.springframework.context.ConfigurableApplicationContext; +import org.springframework.context.annotation.ComponentScan; +import org.springframework.context.annotation.PropertySource; + +import java.util.Iterator; +import java.util.Set; + +/** + * JThink@JThink + * + * @author JThink + * @version 0.0.1 + * @desc 项目启动器 + * @date 2016-08-24 18:31:48 + */ +@SpringBootApplication +@EnableAutoConfiguration +@ComponentScan(basePackages={"com.jthink.skyeye.collector.core", "com.jthink.skyeye.collector.metrics"}) +@PropertySource("file:/opt/jthink/jthink-config/skyeye/collector/collector-metrics.properties") +//@PropertySource("classpath:properties/collector-metrics.properties") +public class Launcher { + + private static final Logger LOGGER = LoggerFactory.getLogger(Launcher.class); + + public static void main(String[] args) { + SpringApplicationBuilder builder = new SpringApplicationBuilder(Launcher.class); + Set> listeners = builder.application().getListeners(); + for (Iterator> it = listeners.iterator(); it.hasNext();) { + ApplicationListener listener = it.next(); + if (listener instanceof LoggingApplicationListener) { + it.remove(); + } + } + builder.application().setListeners(listeners); + ConfigurableApplicationContext context = builder.run(args); + LOGGER.info("collector metrics start successfully"); + + KafkaConsumer kafkaConsumer = (KafkaConsumer) context.getBean("kafkaConsumer"); + Task task = (Task) context.getBean("metricsTask"); + + // 优雅停止项目 + Runtime.getRuntime().addShutdownHook(new ShutdownHookRunner(kafkaConsumer, task)); + task.doTask(); + } + +} diff --git a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/task/CollectTask.java b/skyeye-collector/skyeye-collector-metrics/src/main/java/com/jthink/skyeye/collector/metrics/task/MetricsTask.java similarity index 92% rename from skyeye-collector/src/main/java/com/jthink/skyeye/collector/task/CollectTask.java rename to skyeye-collector/skyeye-collector-metrics/src/main/java/com/jthink/skyeye/collector/metrics/task/MetricsTask.java index a8d26b4..70fb5fa 100644 --- a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/task/CollectTask.java +++ b/skyeye-collector/skyeye-collector-metrics/src/main/java/com/jthink/skyeye/collector/metrics/task/MetricsTask.java @@ -1,12 +1,13 @@ -package com.jthink.skyeye.collector.task; +package com.jthink.skyeye.collector.metrics.task; -import com.jthink.skyeye.collector.callback.KafkaOffsetCommitCallback; -import com.jthink.skyeye.collector.configuration.kafka.KafkaProperties; -import com.jthink.skyeye.collector.task.job.Indexer; import com.jthink.skyeye.base.constant.EventType; import com.jthink.skyeye.base.dto.ApiLog; import com.jthink.skyeye.base.dto.EventLog; import com.jthink.skyeye.base.dto.LogDto; +import com.jthink.skyeye.collector.core.callback.KafkaOffsetCommitCallback; +import com.jthink.skyeye.collector.core.configuration.kafka.KafkaProperties; +import com.jthink.skyeye.collector.core.task.Task; +import com.jthink.skyeye.collector.metrics.task.job.Indexer; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.consumer.ConsumerRecords; import org.apache.kafka.clients.consumer.KafkaConsumer; @@ -20,7 +21,8 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; -import java.util.*; +import java.util.HashMap; +import java.util.Map; /** * JThink@JThink @@ -31,9 +33,9 @@ * @date 2016-11-21 15:33:55 */ @Component -public class CollectTask implements Task { +public class MetricsTask implements Task { - private static final Logger LOGGER = LoggerFactory.getLogger(CollectTask.class); + private static final Logger LOGGER = LoggerFactory.getLogger(MetricsTask.class); @Autowired private KafkaConsumer kafkaConsumerEvent; diff --git a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/task/job/ExceptionProcessor.java b/skyeye-collector/skyeye-collector-metrics/src/main/java/com/jthink/skyeye/collector/metrics/task/job/ExceptionProcessor.java similarity index 98% rename from skyeye-collector/src/main/java/com/jthink/skyeye/collector/task/job/ExceptionProcessor.java rename to skyeye-collector/skyeye-collector-metrics/src/main/java/com/jthink/skyeye/collector/metrics/task/job/ExceptionProcessor.java index cadeddd..29807b6 100644 --- a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/task/job/ExceptionProcessor.java +++ b/skyeye-collector/skyeye-collector-metrics/src/main/java/com/jthink/skyeye/collector/metrics/task/job/ExceptionProcessor.java @@ -1,12 +1,12 @@ -package com.jthink.skyeye.collector.task.job; +package com.jthink.skyeye.collector.metrics.task.job; -import com.jthink.skyeye.data.rabbitmq.service.RabbitmqService; import com.jthink.skyeye.base.constant.Constants; import com.jthink.skyeye.base.constant.EventType; import com.jthink.skyeye.base.dto.AlertDto; import com.jthink.skyeye.base.dto.ApiLog; import com.jthink.skyeye.base.dto.EventLog; import com.jthink.skyeye.base.dto.LogDto; +import com.jthink.skyeye.data.rabbitmq.service.RabbitmqService; import org.I0Itec.zkclient.ZkClient; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.slf4j.Logger; diff --git a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/task/job/Indexer.java b/skyeye-collector/skyeye-collector-metrics/src/main/java/com/jthink/skyeye/collector/metrics/task/job/Indexer.java similarity index 95% rename from skyeye-collector/src/main/java/com/jthink/skyeye/collector/task/job/Indexer.java rename to skyeye-collector/skyeye-collector-metrics/src/main/java/com/jthink/skyeye/collector/metrics/task/job/Indexer.java index 43682ba..5e9529f 100644 --- a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/task/job/Indexer.java +++ b/skyeye-collector/skyeye-collector-metrics/src/main/java/com/jthink/skyeye/collector/metrics/task/job/Indexer.java @@ -1,12 +1,12 @@ -package com.jthink.skyeye.collector.task.job; +package com.jthink.skyeye.collector.metrics.task.job; -import com.jthink.skyeye.collector.configuration.es.EsProperties; import com.jthink.skyeye.base.constant.Constants; import com.jthink.skyeye.base.constant.EventType; import com.jthink.skyeye.base.dto.ApiLog; import com.jthink.skyeye.base.dto.EventLog; import com.jthink.skyeye.base.dto.LogDto; import com.jthink.skyeye.base.util.DateUtil; +import com.jthink.skyeye.collector.core.configuration.es.EsProperties; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.client.transport.TransportClient; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -42,7 +42,7 @@ public void doJob(EventLog log, LogDto logDto, BulkRequestBuilder bulkRequest) { // 进行索引(for kibana),包含api调用、第三方调用、中间件调用 if (this.getTypes().indexOf(log.getEventType()) != -1 && (log.getStatus().equals(EventLog.MONITOR_STATUS_FAILED) || log.getStatus().equals(EventLog.MONITOR_STATUS_SUCCESS))) { try { - bulkRequest.add(transportClient.prepareIndex(this.esProperties.getIndexEvent(), this.esProperties.getDocEvent()) + bulkRequest.add(transportClient.prepareIndex(this.esProperties.getIndex(), this.esProperties.getDoc()) .setSource(this.buildXContentBuilder(log, logDto))); } catch (IOException e) { LOGGER.error("构造一条es入库数据失败, {]", logDto); diff --git a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/task/job/Job.java b/skyeye-collector/skyeye-collector-metrics/src/main/java/com/jthink/skyeye/collector/metrics/task/job/Job.java similarity index 95% rename from skyeye-collector/src/main/java/com/jthink/skyeye/collector/task/job/Job.java rename to skyeye-collector/skyeye-collector-metrics/src/main/java/com/jthink/skyeye/collector/metrics/task/job/Job.java index f705b6b..ba3b6e1 100644 --- a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/task/job/Job.java +++ b/skyeye-collector/skyeye-collector-metrics/src/main/java/com/jthink/skyeye/collector/metrics/task/job/Job.java @@ -1,4 +1,4 @@ -package com.jthink.skyeye.collector.task.job; +package com.jthink.skyeye.collector.metrics.task.job; import com.jthink.skyeye.base.constant.EventType; import com.jthink.skyeye.base.dto.EventLog; diff --git a/skyeye-collector/skyeye-collector-metrics/src/main/java/com/jthink/skyeye/collector/metrics/task/job/JobConfiguration.java b/skyeye-collector/skyeye-collector-metrics/src/main/java/com/jthink/skyeye/collector/metrics/task/job/JobConfiguration.java new file mode 100644 index 0000000..03912a3 --- /dev/null +++ b/skyeye-collector/skyeye-collector-metrics/src/main/java/com/jthink/skyeye/collector/metrics/task/job/JobConfiguration.java @@ -0,0 +1,66 @@ +package com.jthink.skyeye.collector.metrics.task.job; + +import com.jthink.skyeye.base.constant.EventType; +import com.jthink.skyeye.collector.core.configuration.es.EsProperties; +import com.jthink.skyeye.collector.metrics.cache.CacheService; +import com.jthink.skyeye.data.rabbitmq.service.RabbitmqService; +import org.I0Itec.zkclient.ZkClient; +import org.elasticsearch.client.transport.TransportClient; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +import java.util.Arrays; +import java.util.List; + +/** + * JThink@JThink + * + * @author JThink + * @version 0.0.1 + * @desc 责任链组装3个job + * @date 2017-08-04 09:23:57 + */ +@Configuration +public class JobConfiguration { + + @Autowired + private RabbitmqService rabbitmqService; + @Autowired + private ZkClient zkClient; + @Autowired + private CacheService cacheService; + @Autowired + private EsProperties esProperties; + @Autowired + private TransportClient transportClient; + + // 以下3个bean进行责任链的生成和组装 + @Bean + public ExceptionProcessor exceptionProcessor() { + List exceptionProcesses = Arrays.asList(EventType.job_execute, EventType.thirdparty_call, EventType.middleware_opt, EventType.invoke_interface); + ExceptionProcessor exceptionProcessor = new ExceptionProcessor(exceptionProcesses); + exceptionProcessor.setRabbitmqService(this.rabbitmqService); + exceptionProcessor.setZkClient(this.zkClient); + return exceptionProcessor; + } + + @Bean + public NameCollector nameCollector(ExceptionProcessor exceptionProcessor) { + List names = Arrays.asList(EventType.invoke_interface, EventType.thirdparty_call); + NameCollector nameCollector = new NameCollector(names); + nameCollector.setNextJob(exceptionProcessor); + nameCollector.setCacheService(this.cacheService); + return nameCollector; + } + + @Bean + public Indexer indexer(NameCollector nameCollector) { + List indexes = Arrays.asList(EventType.job_execute, EventType.thirdparty_call, EventType.middleware_opt, EventType.invoke_interface); + Indexer indexer = new Indexer(indexes); + indexer.setNextJob(nameCollector); + indexer.setEsProperties(this.esProperties); + indexer.setTransportClient(this.transportClient); + return indexer; + } +} diff --git a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/task/job/NameCollector.java b/skyeye-collector/skyeye-collector-metrics/src/main/java/com/jthink/skyeye/collector/metrics/task/job/NameCollector.java similarity index 97% rename from skyeye-collector/src/main/java/com/jthink/skyeye/collector/task/job/NameCollector.java rename to skyeye-collector/skyeye-collector-metrics/src/main/java/com/jthink/skyeye/collector/metrics/task/job/NameCollector.java index ef958cf..b3a7c81 100644 --- a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/task/job/NameCollector.java +++ b/skyeye-collector/skyeye-collector-metrics/src/main/java/com/jthink/skyeye/collector/metrics/task/job/NameCollector.java @@ -1,13 +1,13 @@ -package com.jthink.skyeye.collector.task.job; +package com.jthink.skyeye.collector.metrics.task.job; -import com.jthink.skyeye.data.jpa.domain.NameInfo; -import com.jthink.skyeye.data.jpa.pk.NameInfoPK; -import com.jthink.skyeye.collector.service.CacheService; import com.jthink.skyeye.base.constant.EventType; import com.jthink.skyeye.base.constant.NameInfoType; import com.jthink.skyeye.base.dto.ApiLog; import com.jthink.skyeye.base.dto.EventLog; import com.jthink.skyeye.base.dto.LogDto; +import com.jthink.skyeye.collector.metrics.cache.CacheService; +import com.jthink.skyeye.data.jpa.domain.NameInfo; +import com.jthink.skyeye.data.jpa.pk.NameInfoPK; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/skyeye-collector/src/main/resources/application.properties b/skyeye-collector/skyeye-collector-metrics/src/main/resources/application.properties similarity index 75% rename from skyeye-collector/src/main/resources/application.properties rename to skyeye-collector/skyeye-collector-metrics/src/main/resources/application.properties index 2b1b0c2..833707c 100644 --- a/skyeye-collector/src/main/resources/application.properties +++ b/skyeye-collector/skyeye-collector-metrics/src/main/resources/application.properties @@ -1,13 +1,8 @@ # kafka config spring.message.kafka.brokers=${kafka.brokers} spring.message.kafka.topic=${kafka.topic} -spring.message.kafka.indexerGroup=${kafka.group.indexer} +spring.message.kafka.consumeGroup=${kafka.consume.group} spring.message.kafka.pollTimeout=${kafka.poll.timeout} -spring.message.kafka.collectGroup=${kafka.group.collect} -spring.message.kafka.backupGroup=${kafka.group.backup} -spring.message.kafka.rpcTraceGroup=${kafka.group.rpc.trace} -spring.message.kafka.fileRoot=${kafka.hdfs.file.root} -spring.message.kafka.serverId=${kafka.hdfs.file.server.id} # es config spring.indexer.es.ips=${es.ips} @@ -16,8 +11,6 @@ spring.indexer.es.port=${es.port} spring.indexer.es.sniff=${es.sniff} spring.indexer.es.index=${es.index} spring.indexer.es.doc=${es.doc} -spring.indexer.es.indexEvent=${es.index.event} -spring.indexer.es.docEvent=${es.doc.event} # redis config spring.redis.database=6 @@ -71,13 +64,3 @@ spring.queue.rabbitmq.routingKey=${rabbit.request.routingKey} spring.coordinate.zookeeper.zkServers=${zookeeper.zkServers} spring.coordinate.zookeeper.sessionTimeout=${zookeeper.sessionTimeout} spring.coordinate.zookeeper.connectionTimeout=${zookeeper.connectionTimeout} - -# hdfs -spring.bigdata.hadoop.hdfs.port=${hadoop.hdfs.namenode.port} -spring.bigdata.hadoop.hdfs.host=${hadoop.hdfs.namenode.host} -spring.bigdata.hadoop.hdfs.user=${hadoop.hdfs.user} -spring.bigdata.hadoop.hdfs.baseDir=${hadoop.hdfs.baseDir} -spring.upload.log.cron=${upload.log.cron} - -# hbase config -spring.data.hbase.quorum=${hbase.quorum} \ No newline at end of file diff --git a/skyeye-collector/skyeye-collector-metrics/src/main/resources/banner.txt b/skyeye-collector/skyeye-collector-metrics/src/main/resources/banner.txt new file mode 100644 index 0000000..fbe7ade --- /dev/null +++ b/skyeye-collector/skyeye-collector-metrics/src/main/resources/banner.txt @@ -0,0 +1,6 @@ +███████╗██╗ ██╗██╗ ██╗███████╗██╗ ██╗███████╗ ██████╗ ██████╗ ██╗ ██╗ ███████╗ ██████╗████████╗ ██████╗ ██████╗ +██╔════╝██║ ██╔╝╚██╗ ██╔╝██╔════╝╚██╗ ██╔╝██╔════╝ ██╔════╝██╔═══██╗██║ ██║ ██╔════╝██╔════╝╚══██╔══╝██╔═══██╗██╔══██╗ +███████╗█████╔╝ ╚████╔╝ █████╗ ╚████╔╝ █████╗█████╗██║ ██║ ██║██║ ██║ █████╗ ██║ ██║ ██║ ██║██████╔╝ +╚════██║██╔═██╗ ╚██╔╝ ██╔══╝ ╚██╔╝ ██╔══╝╚════╝██║ ██║ ██║██║ ██║ ██╔══╝ ██║ ██║ ██║ ██║██╔══██╗ +███████║██║ ██╗ ██║ ███████╗ ██║ ███████╗ ╚██████╗╚██████╔╝███████╗███████╗███████╗╚██████╗ ██║ ╚██████╔╝██║ ██║ +╚══════╝╚═╝ ╚═╝ ╚═╝ ╚══════╝ ╚═╝ ╚══════╝ ╚═════╝ ╚═════╝ ╚══════╝╚══════╝╚══════╝ ╚═════╝ ╚═╝ ╚═════╝ ╚═╝ ╚═╝ diff --git a/skyeye-collector/src/main/resources/properties/collector.properties b/skyeye-collector/skyeye-collector-metrics/src/main/resources/config/collector-metrics.properties similarity index 61% rename from skyeye-collector/src/main/resources/properties/collector.properties rename to skyeye-collector/skyeye-collector-metrics/src/main/resources/config/collector-metrics.properties index 1b86e61..98920ae 100644 --- a/skyeye-collector/src/main/resources/properties/collector.properties +++ b/skyeye-collector/skyeye-collector-metrics/src/main/resources/config/collector-metrics.properties @@ -1,13 +1,8 @@ # kafka config kafka.brokers=riot01:9092,riot02:9092,riot03:9092 kafka.topic=app-log -kafka.group.indexer=es-indexer-consume-group +kafka.consume.group=info-collect-consume-group kafka.poll.timeout=100 -kafka.group.collect=info-collect-consume-group -kafka.group.backup=log-backup-consume-group -kafka.group.rpc.trace=rpc-trace-consume-group -kafka.hdfs.file.root=/tmp/monitor-center/ -kafka.hdfs.file.server.id=0 # es config es.ips=riot01,riot02,riot03 @@ -16,8 +11,6 @@ es.port=9300 es.sniff=true es.index=app-log es.doc=log -es.index.event=event-log -es.doc.event=log # redis config redis.host=localhost @@ -44,13 +37,3 @@ rabbit.request.routingKey=log.key zookeeper.zkServers=riot01:2181,riot02:2181,riot03:2181 zookeeper.sessionTimeout=60000 zookeeper.connectionTimeout=5000 - -# hdfs -hadoop.hdfs.namenode.port=8020 -hadoop.hdfs.namenode.host=192.168.88.131 -hadoop.hdfs.user=qianjicheng -hadoop.hdfs.baseDir=/user/qianjicheng/JThink/ -upload.log.cron=0 30 0 * * ? - -# hbase config -hbase.quorum=panda-01 \ No newline at end of file diff --git a/skyeye-collector/src/main/resources/logback.xml b/skyeye-collector/skyeye-collector-metrics/src/main/resources/logback.xml similarity index 97% rename from skyeye-collector/src/main/resources/logback.xml rename to skyeye-collector/skyeye-collector-metrics/src/main/resources/logback.xml index 4096abc..ad6dff2 100644 --- a/skyeye-collector/src/main/resources/logback.xml +++ b/skyeye-collector/skyeye-collector-metrics/src/main/resources/logback.xml @@ -1,7 +1,8 @@ - + + ${APP_NAME} @@ -53,4 +54,5 @@ + \ No newline at end of file diff --git a/skyeye-collector/skyeye-collector-trace/build.gradle b/skyeye-collector/skyeye-collector-trace/build.gradle index 89edcf2..ed7f2cf 100644 --- a/skyeye-collector/skyeye-collector-trace/build.gradle +++ b/skyeye-collector/skyeye-collector-trace/build.gradle @@ -26,7 +26,7 @@ configurations { compile.exclude group: "org.slf4j", module: "slf4j-log4j12" } -mainClassName = "com.jthink.skyeye.collector.backup.launcher.Launcher" +mainClassName = "com.jthink.skyeye.collector.trace.launcher.Launcher" buildscript { ext { diff --git a/skyeye-collector/src/main/java/com/google/common/base/Stopwatch.java b/skyeye-collector/src/main/java/com/google/common/base/Stopwatch.java deleted file mode 100644 index b5c20a2..0000000 --- a/skyeye-collector/src/main/java/com/google/common/base/Stopwatch.java +++ /dev/null @@ -1,267 +0,0 @@ -/* - * Copyright (C) 2008 The Guava Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.google.common.base; - -import static com.google.common.base.Preconditions.checkNotNull; -import static com.google.common.base.Preconditions.checkState; -import static java.util.concurrent.TimeUnit.DAYS; -import static java.util.concurrent.TimeUnit.HOURS; -import static java.util.concurrent.TimeUnit.MICROSECONDS; -import static java.util.concurrent.TimeUnit.MILLISECONDS; -import static java.util.concurrent.TimeUnit.MINUTES; -import static java.util.concurrent.TimeUnit.NANOSECONDS; -import static java.util.concurrent.TimeUnit.SECONDS; - -import com.google.common.annotations.Beta; -import com.google.common.annotations.GwtCompatible; -import com.google.common.annotations.GwtIncompatible; - -import java.util.concurrent.TimeUnit; - -/** - * An object that measures elapsed time in nanoseconds. It is useful to measure - * elapsed time using this class instead of direct calls to {@link - * System#nanoTime} for a few reasons: - * - *

    - *
  • An alternate time source can be substituted, for testing or performance - * reasons. - *
  • As documented by {@code nanoTime}, the value returned has no absolute - * meaning, and can only be interpreted as relative to another timestamp - * returned by {@code nanoTime} at a different time. {@code Stopwatch} is a - * more effective abstraction because it exposes only these relative values, - * not the absolute ones. - *
- * - *

Basic usage: - *

- *   Stopwatch stopwatch = Stopwatch.{@link #createStarted createStarted}();
- *   doSomething();
- *   stopwatch.{@link #stop stop}(); // optional
- *
- *   long millis = stopwatch.elapsed(MILLISECONDS);
- *
- *   log.info("time: " + stopwatch); // formatted string like "12.3 ms"
- * - *

Stopwatch methods are not idempotent; it is an error to start or stop a - * stopwatch that is already in the desired state. - * - *

When testing code that uses this class, use - * {@link #createUnstarted(Ticker)} or {@link #createStarted(Ticker)} to - * supply a fake or mock ticker. - * This allows you to - * simulate any valid behavior of the stopwatch. - * - *

Note: This class is not thread-safe. - * - * @author Kevin Bourrillion - * @since 10.0 - */ -@Beta -@GwtCompatible(emulated = true) -public final class Stopwatch { - private final Ticker ticker; - private boolean isRunning; - private long elapsedNanos; - private long startTick; - - /** - * Creates (but does not start) a new stopwatch using {@link System#nanoTime} - * as its time source. - * - * @since 15.0 - */ - public static Stopwatch createUnstarted() { - return new Stopwatch(); - } - - /** - * Creates (but does not start) a new stopwatch, using the specified time - * source. - * - * @since 15.0 - */ - public static Stopwatch createUnstarted(Ticker ticker) { - return new Stopwatch(ticker); - } - - /** - * Creates (and starts) a new stopwatch using {@link System#nanoTime} - * as its time source. - * - * @since 15.0 - */ - public static Stopwatch createStarted() { - return new Stopwatch().start(); - } - - /** - * Creates (and starts) a new stopwatch, using the specified time - * source. - * - * @since 15.0 - */ - public static Stopwatch createStarted(Ticker ticker) { - return new Stopwatch(ticker).start(); - } - - /** - * Creates (but does not start) a new stopwatch using {@link System#nanoTime} - * as its time source. - * - * @deprecated Use {@link Stopwatch#createUnstarted()} instead. - */ - @Deprecated - public Stopwatch() { - this(Ticker.systemTicker()); - } - - /** - * Creates (but does not start) a new stopwatch, using the specified time - * source. - * - * @deprecated Use {@link Stopwatch#createUnstarted(Ticker)} instead. - */ - @Deprecated - Stopwatch(Ticker ticker) { - this.ticker = checkNotNull(ticker, "ticker"); - } - - /** - * Returns {@code true} if {@link #start()} has been called on this stopwatch, - * and {@link #stop()} has not been called since the last call to {@code - * start()}. - */ - public boolean isRunning() { - return isRunning; - } - - /** - * Starts the stopwatch. - * - * @return this {@code Stopwatch} instance - * @throws IllegalStateException if the stopwatch is already running. - */ - public Stopwatch start() { - checkState(!isRunning, "This stopwatch is already running."); - isRunning = true; - startTick = ticker.read(); - return this; - } - - /** - * Stops the stopwatch. Future reads will return the fixed duration that had - * elapsed up to this point. - * - * @return this {@code Stopwatch} instance - * @throws IllegalStateException if the stopwatch is already stopped. - */ - public Stopwatch stop() { - long tick = ticker.read(); - checkState(isRunning, "This stopwatch is already stopped."); - isRunning = false; - elapsedNanos += tick - startTick; - return this; - } - - /** - * Sets the elapsed time for this stopwatch to zero, - * and places it in a stopped state. - * - * @return this {@code Stopwatch} instance - */ - public Stopwatch reset() { - elapsedNanos = 0; - isRunning = false; - return this; - } - - private long elapsedNanos() { - return isRunning ? ticker.read() - startTick + elapsedNanos : elapsedNanos; - } - - /** - * Returns the current elapsed time shown on this stopwatch, expressed - * in the desired time unit, with any fraction rounded down. - * - *

Note that the overhead of measurement can be more than a microsecond, so - * it is generally not useful to specify {@link TimeUnit#NANOSECONDS} - * precision here. - * - * @since 14.0 (since 10.0 as {@code elapsedTime()}) - */ - public long elapsed(TimeUnit desiredUnit) { - return desiredUnit.convert(elapsedNanos(), NANOSECONDS); - } - - /** - * Returns a string representation of the current elapsed time. - */ - @GwtIncompatible("String.format()") - @Override public String toString() { - long nanos = elapsedNanos(); - - TimeUnit unit = chooseUnit(nanos); - double value = (double) nanos / NANOSECONDS.convert(1, unit); - - // Too bad this functionality is not exposed as a regular method call - return String.format("%.4g %s", value, abbreviate(unit)); - } - - private static TimeUnit chooseUnit(long nanos) { - if (DAYS.convert(nanos, NANOSECONDS) > 0) { - return DAYS; - } - if (HOURS.convert(nanos, NANOSECONDS) > 0) { - return HOURS; - } - if (MINUTES.convert(nanos, NANOSECONDS) > 0) { - return MINUTES; - } - if (SECONDS.convert(nanos, NANOSECONDS) > 0) { - return SECONDS; - } - if (MILLISECONDS.convert(nanos, NANOSECONDS) > 0) { - return MILLISECONDS; - } - if (MICROSECONDS.convert(nanos, NANOSECONDS) > 0) { - return MICROSECONDS; - } - return NANOSECONDS; - } - - private static String abbreviate(TimeUnit unit) { - switch (unit) { - case NANOSECONDS: - return "ns"; - case MICROSECONDS: - return "\u03bcs"; // μs - case MILLISECONDS: - return "ms"; - case SECONDS: - return "s"; - case MINUTES: - return "min"; - case HOURS: - return "h"; - case DAYS: - return "d"; - default: - throw new AssertionError(); - } - } -} diff --git a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/task/Task.java b/skyeye-collector/src/main/java/com/jthink/skyeye/collector/task/Task.java deleted file mode 100644 index 99cf817..0000000 --- a/skyeye-collector/src/main/java/com/jthink/skyeye/collector/task/Task.java +++ /dev/null @@ -1,19 +0,0 @@ -package com.jthink.skyeye.collector.task; - -/** - * JThink@JThink - * - * @author JThink - * @version 0.0.1 - * @desc kafka消费task - * @date 2016-09-20 10:24:24 - */ -public interface Task extends Runnable { - - /** - * 执行task - */ - void doTask(); - - Thread executeThread(); -} diff --git a/skyeye-collector/src/main/resources/banner.txt b/skyeye-collector/src/main/resources/banner.txt deleted file mode 100644 index e69de29..0000000 diff --git a/skyeye-collector/src/main/resources/shell/es/app-log/create-index.py b/skyeye-collector/src/main/resources/shell/es/app-log/create-index.py deleted file mode 100644 index 5b05503..0000000 --- a/skyeye-collector/src/main/resources/shell/es/app-log/create-index.py +++ /dev/null @@ -1,59 +0,0 @@ -#!/usr/bin/python198 -# -*- coding: UTF-8 -*- - -import sys -import datetime -from pyelasticsearch import ElasticSearch -from pyelasticsearch import bulk_chunks - -def main(argv): - index = argv[1] - doc_type = 'log' - url = [] - urls = argv[2].strip().split(',') - for u in urls: - url.append(u) - - es = ElasticSearch(urls = url, timeout = 60, max_retries = 0) - create_mapping(es, index, doc_type) - -def create_mapping(es, index, doc_type): - mapping = { - 'settings': { - 'index': { - 'number_of_replicas': 1, - 'number_of_shards': 6, - 'refresh_interval': '5s' - } - }, - 'mappings': { - '_default_': { - '_all': { - 'enabled': False - } - }, - doc_type : { - 'properties' : { - 'day': { 'type': 'string', 'index': 'not_analyzed'}, - 'time': { 'type': 'string', 'index': 'not_analyzed'}, - 'nanoTime': { 'type': 'string', 'index': 'not_analyzed'}, - 'created': { 'type': 'date', 'index': 'not_analyzed'}, - 'app': { 'type': 'string', 'index': 'not_analyzed'}, - 'host': { 'type': 'string', 'index': 'not_analyzed'}, - 'thread': { 'type': 'string', 'index': 'not_analyzed'}, - 'level': { 'type': 'string', 'index': 'not_analyzed'}, - 'eventType': { 'type': 'string', 'index': 'not_analyzed'}, - 'pack': { 'type': 'string', 'index': 'not_analyzed'}, - 'clazz': { 'type': 'string', 'index': 'not_analyzed'}, - 'line': { 'type': 'string', 'index': 'not_analyzed'}, - 'messageSmart': { 'type': 'string', 'analyzer': 'ik_smart', 'search_analyzer': 'ik_smart', 'include_in_all': 'true', 'boost': 8}, - 'messageMax': { 'type': 'string', 'analyzer': 'ik_max_word', 'search_analyzer': 'ik_max_word', 'include_in_all': 'true', 'boost': 8} - } - } - } - } - es.create_index(index = index, settings = mapping) - - -if __name__ == '__main__': - main(sys.argv) \ No newline at end of file diff --git a/skyeye-collector/src/main/resources/shell/es/app-log/start.sh b/skyeye-collector/src/main/resources/shell/es/app-log/start.sh deleted file mode 100644 index d38d58b..0000000 --- a/skyeye-collector/src/main/resources/shell/es/app-log/start.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/bash - -# 索引 -# urls http://192.168.88.70:9200,http://192.168.88.71:9200,http://192.168.88.72:9200 -# call: bash start.sh app-log http://192.168.88.70:9200,http://192.168.88.71:9200,http://192.168.88.72:9200 - -index=$1 -urls=$2 - -echo '创建索引开始' -python create-index.py ${index} ${urls} -echo '创建索引结束' \ No newline at end of file diff --git a/skyeye-collector/src/main/resources/shell/es/event-log/create-index.py b/skyeye-collector/src/main/resources/shell/es/event-log/create-index.py deleted file mode 100644 index bada3c6..0000000 --- a/skyeye-collector/src/main/resources/shell/es/event-log/create-index.py +++ /dev/null @@ -1,60 +0,0 @@ -#!/usr/bin/python198 -# -*- coding: UTF-8 -*- - -import sys -import datetime -from pyelasticsearch import ElasticSearch -from pyelasticsearch import bulk_chunks - -def main(argv): - index = argv[1] - doc_type = 'log' - url = [] - urls = argv[2].strip().split(',') - for u in urls: - url.append(u) - - es = ElasticSearch(urls = url, timeout = 60, max_retries = 0) - create_mapping(es, index, doc_type) - -def create_mapping(es, index, doc_type): - mapping = { - 'settings': { - 'index': { - 'number_of_replicas': 1, - 'number_of_shards': 6, - 'refresh_interval': '5s' - } - }, - 'mappings': { - '_default_': { - '_all': { - 'enabled': False - } - }, - doc_type : { - 'properties' : { - 'created': { 'type': 'date', 'index': 'not_analyzed'}, - 'time': { 'type': 'string', 'index': 'not_analyzed'}, - 'day': { 'type': 'string', 'index': 'not_analyzed'}, - 'week': { 'type': 'string', 'index': 'not_analyzed'}, - 'month': { 'type': 'string', 'index': 'not_analyzed'}, - 'year': { 'type': 'string', 'index': 'not_analyzed'}, - 'app': { 'type': 'string', 'index': 'not_analyzed'}, - 'host': { 'type': 'string', 'index': 'not_analyzed'}, - 'eventType': { 'type': 'string', 'index': 'not_analyzed'}, - 'account': { 'type': 'string', 'index': 'not_analyzed'}, - 'uniqueName': { 'type': 'string', 'index': 'not_analyzed'}, - 'cost': { 'type': 'long', 'index': 'not_analyzed'}, - 'status': { 'type': 'string', 'index': 'not_analyzed'}, - 'messageSmart': { 'type': 'string', 'analyzer': 'ik_smart', 'search_analyzer': 'ik_smart', 'include_in_all': 'true', 'boost': 8}, - 'messageMax': { 'type': 'string', 'analyzer': 'ik_max_word', 'search_analyzer': 'ik_max_word', 'include_in_all': 'true', 'boost': 8} - } - } - } - } - es.create_index(index = index, settings = mapping) - - -if __name__ == '__main__': - main(sys.argv) \ No newline at end of file diff --git a/skyeye-collector/src/main/resources/shell/es/event-log/start.sh b/skyeye-collector/src/main/resources/shell/es/event-log/start.sh deleted file mode 100644 index 4ef4c2e..0000000 --- a/skyeye-collector/src/main/resources/shell/es/event-log/start.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/bash - -# 索引 -# urls http://192.168.88.70:9200,http://192.168.88.71:9200,http://192.168.88.72:9200 -# call: bash start.sh event-log http://192.168.88.70:9200,http://192.168.88.71:9200,http://192.168.88.72:9200 - -index=$1 -urls=$2 - -echo '创建索引开始' -python create-index.py ${index} ${urls} -echo '创建索引结束' \ No newline at end of file diff --git a/skyeye-collector/src/main/resources/shell/es/install.sh b/skyeye-collector/src/main/resources/shell/es/install.sh deleted file mode 100644 index 11a6e96..0000000 --- a/skyeye-collector/src/main/resources/shell/es/install.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/bin/bash - -echo "--> install python-pip module" -sudo apt-get install python-pip -echo "--> install elasticsearch module" -sudo pip install pyelasticsearch \ No newline at end of file diff --git a/skyeye-collector/src/main/resources/shell/hbase/hbase b/skyeye-collector/src/main/resources/shell/hbase/hbase deleted file mode 100644 index a75fc56..0000000 --- a/skyeye-collector/src/main/resources/shell/hbase/hbase +++ /dev/null @@ -1,3 +0,0 @@ -create 'trace', {NAME => 'span', VERSIONS => 1, COMPRESSION => 'SNAPPY'}, {NUMREGIONS => 3, SPLITALGO => 'HexStringSplit'} -create 'time_consume', {NAME => 'trace', VERSIONS => 1, COMPRESSION => 'SNAPPY'}, {NUMREGIONS => 3, SPLITALGO => 'HexStringSplit'} -create 'annotation', {NAME => 'trace', VERSIONS => 1, COMPRESSION => 'SNAPPY'}, {NUMREGIONS => 3, SPLITALGO => 'HexStringSplit'} diff --git a/skyeye-collector/src/main/resources/shell/kafka/create-topic.sh b/skyeye-collector/src/main/resources/shell/kafka/create-topic.sh deleted file mode 100644 index a6ae5d3..0000000 --- a/skyeye-collector/src/main/resources/shell/kafka/create-topic.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/bin/sh - -# zk servers, 192.168.88.70:2181,192.168.88.71:2181,192.168.88.72:2181/kafka/0.10.0.1 -urls=$1 - -bin/kafka-topics.sh --create --zookeeper ${urls} --replication-factor 3 --partitions 3 --topic app-log \ No newline at end of file From 33c89beb5332a89649f80ef818a9d331c5f512c5 Mon Sep 17 00:00:00 2001 From: JThink Date: Mon, 7 Aug 2017 09:44:00 +0800 Subject: [PATCH 22/27] update constants --- .../main/java/com/jthink/skyeye/base/constant/Constants.java | 1 + .../src/main/java/com/jthink/skyeye/base/constant/RpcType.java | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/skyeye-base/src/main/java/com/jthink/skyeye/base/constant/Constants.java b/skyeye-base/src/main/java/com/jthink/skyeye/base/constant/Constants.java index 845e395..24299be 100644 --- a/skyeye-base/src/main/java/com/jthink/skyeye/base/constant/Constants.java +++ b/skyeye-base/src/main/java/com/jthink/skyeye/base/constant/Constants.java @@ -209,6 +209,7 @@ public class Constants { public static final String RPC_TYPE_NONE = "none"; public static final String RPC_TYPE_DUBBO = "dubbo"; public static final String RPC_TYPE_THRIFT = "thrift"; + public static final String RPC_TYPE_SC = "sc"; // rpc trace 统计指标相关 public static final String TRACE_SUCCESS = "success"; diff --git a/skyeye-base/src/main/java/com/jthink/skyeye/base/constant/RpcType.java b/skyeye-base/src/main/java/com/jthink/skyeye/base/constant/RpcType.java index c36091b..c16b920 100644 --- a/skyeye-base/src/main/java/com/jthink/skyeye/base/constant/RpcType.java +++ b/skyeye-base/src/main/java/com/jthink/skyeye/base/constant/RpcType.java @@ -12,7 +12,7 @@ public enum RpcType { none(Constants.RPC_TYPE_NONE, "none"), dubbo(Constants.RPC_TYPE_DUBBO, "dubbo"), - dubbo(Constants.RPC_TYPE_SC, "spring-cloud"), + sc(Constants.RPC_TYPE_SC, "spring-cloud"), thrift(Constants.RPC_TYPE_THRIFT, "thrift"); private String symbol; From 11b8d1260bfae5535e7b196c3855d73e256445e6 Mon Sep 17 00:00:00 2001 From: JThink Date: Tue, 8 Aug 2017 10:10:10 +0800 Subject: [PATCH 23/27] fix bug --- skyeye-alarm/build.gradle | 2 +- skyeye-benchmark/log-generater/build.gradle | 6 ++--- .../generater/service/GenerateLogService.java | 25 +++++++++---------- .../src/main/resources/logback.xml | 14 +++++------ .../client/log4j/appender/KafkaAppender.java | 2 +- .../logback/appender/KafkaAppender.java | 2 +- .../kafka/KafkaConfiguration.java | 2 +- .../collector/metrics/launcher/Launcher.java | 2 +- .../src/main/resources/application.properties | 2 +- .../config/collector-metrics.properties | 2 +- .../collector/trace/launcher/Launcher.java | 2 +- .../src/main/resources/application.properties | 2 +- skyeye-data/skyeye-data-jpa/build.gradle | 4 +++ skyeye-monitor/build.gradle | 5 +--- .../src/main/resources/application.properties | 2 +- skyeye-statistics/build.gradle | 5 +++- skyeye-web/build.gradle | 3 --- .../src/main/resources/application.properties | 6 +++-- .../main/resources/properties/web.properties | 6 +++-- 19 files changed, 48 insertions(+), 46 deletions(-) diff --git a/skyeye-alarm/build.gradle b/skyeye-alarm/build.gradle index 9e6d26b..8ce3737 100644 --- a/skyeye-alarm/build.gradle +++ b/skyeye-alarm/build.gradle @@ -42,7 +42,7 @@ configurations { compile.exclude group: "org.springframework", module: "spring-web" } -mainClassName = 'com.jthink.skyeye.alarm.launcher.launcher' +mainClassName = 'com.jthink.skyeye.alarm.launcher.Launcher' buildscript { ext { diff --git a/skyeye-benchmark/log-generater/build.gradle b/skyeye-benchmark/log-generater/build.gradle index 62823e0..bfc8099 100644 --- a/skyeye-benchmark/log-generater/build.gradle +++ b/skyeye-benchmark/log-generater/build.gradle @@ -26,9 +26,7 @@ ext { } dependencies { - compile ("skyeye:skyeye-client:$clientVersion") { - exclude group: 'log4j', module: 'log4j' - } + compile "skyeye:skyeye-client-logback:$clientVersion" compile "org.springframework.boot:spring-boot-starter" testCompile "org.springframework.boot:spring-boot-starter-test" @@ -40,7 +38,7 @@ configurations { compile.exclude group: "org.springframework", module: "spring-web" } -mainClassName = 'com.jthink.skyeye.benchmark.log.generater.launcher.launcher' +mainClassName = 'com.jthink.skyeye.benchmark.log.generater.launcher.Launcher' buildscript { ext { diff --git a/skyeye-benchmark/log-generater/src/main/java/com/jthink/skyeye/benchmark/log/generater/service/GenerateLogService.java b/skyeye-benchmark/log-generater/src/main/java/com/jthink/skyeye/benchmark/log/generater/service/GenerateLogService.java index 9db291a..df0ac46 100644 --- a/skyeye-benchmark/log-generater/src/main/java/com/jthink/skyeye/benchmark/log/generater/service/GenerateLogService.java +++ b/skyeye-benchmark/log-generater/src/main/java/com/jthink/skyeye/benchmark/log/generater/service/GenerateLogService.java @@ -4,6 +4,7 @@ import com.jthink.skyeye.base.constant.MiddleWare; import com.jthink.skyeye.base.dto.ApiLog; import com.jthink.skyeye.base.dto.EventLog; +import org.I0Itec.zkclient.ZkClient; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Service; @@ -49,21 +50,19 @@ public void generateCoverLog() { // 生成正常入库日志 LOGGER.info("我是mock normal日志"); // 生成api日志 - LOGGER.info(ApiLog.buildApiLog(EventType.invoke_interface, "/app/status", "800001", 100, EventLog.MONITOR_STATUS_SUCCESS, "我是mock api成功日志").toString()); - LOGGER.info(ApiLog.buildApiLog(EventType.invoke_interface, "/app/status", "800001", 10, EventLog.MONITOR_STATUS_FAILED, "我是mock api失败日志").toString()); - LOGGER.info(ApiLog.buildApiLog(EventType.invoke_interface, "/log/realtime", "800002", 100, EventLog.MONITOR_STATUS_SUCCESS, "我是mock api成功日志").toString()); + LOGGER.info(ApiLog.buildApiLog(EventType.invoke_interface, "/app/status", "800001", 2000, EventLog.MONITOR_STATUS_SUCCESS, "我是mock api成功日志").toString()); + LOGGER.info(ApiLog.buildApiLog(EventType.invoke_interface, "/app/status", "800001", 2000, EventLog.MONITOR_STATUS_FAILED, "我是mock api失败日志").toString()); + LOGGER.info(ApiLog.buildApiLog(EventType.invoke_interface, "/log/realtime", "800002", 2000, EventLog.MONITOR_STATUS_SUCCESS, "我是mock api成功日志").toString()); // 生成中间件日志 - LOGGER.info(EventLog.buildEventLog(EventType.middleware_opt, MiddleWare.HBASE.symbol(), 100, EventLog.MONITOR_STATUS_SUCCESS, "我是mock middle ware成功日志").toString()); - LOGGER.info(EventLog.buildEventLog(EventType.middleware_opt, MiddleWare.MONGO.symbol(), 10, EventLog.MONITOR_STATUS_FAILED, "我是mock middle ware失败日志").toString()); + LOGGER.info(EventLog.buildEventLog(EventType.middleware_opt, MiddleWare.HBASE.symbol(), 2000, EventLog.MONITOR_STATUS_SUCCESS, "我是mock middle ware成功日志").toString()); + LOGGER.info(EventLog.buildEventLog(EventType.middleware_opt, MiddleWare.MONGO.symbol(), 2000, EventLog.MONITOR_STATUS_FAILED, "我是mock middle ware失败日志").toString()); // 生成任务执行日志 - LOGGER.info(EventLog.buildEventLog(EventType.job_execute, "application_1477705439920_0544", 10, EventLog.MONITOR_STATUS_FAILED, "我是mock job exec失败日志").toString()); - LOGGER.info(EventLog.buildEventLog(EventType.job_execute, "application_1477705439920_0545", 100, EventLog.MONITOR_STATUS_SUCCESS, "我是mock job exec成功日志").toString()); + LOGGER.info(EventLog.buildEventLog(EventType.job_execute, "application_1477705439920_0544", 2000, EventLog.MONITOR_STATUS_FAILED, "我是mock job exec失败日志").toString()); + LOGGER.info(EventLog.buildEventLog(EventType.job_execute, "application_1477705439920_0545", 2000, EventLog.MONITOR_STATUS_SUCCESS, "我是mock job exec成功日志").toString()); // 生成第三方日志 - LOGGER.info(EventLog.buildEventLog(EventType.thirdparty_call, "信联", 100, EventLog.MONITOR_STATUS_FAILED, "我是mock third 失败日志").toString()); - LOGGER.info(EventLog.buildEventLog(EventType.thirdparty_call, "信联", 100, EventLog.MONITOR_STATUS_SUCCESS, "我是mock third 成功日志").toString()); - LOGGER.info(EventLog.buildEventLog(EventType.thirdparty_call, "百付", 100, EventLog.MONITOR_STATUS_SUCCESS, "我是mock third 成功日志").toString()); - LOGGER.info(EventLog.buildEventLog(EventType.thirdparty_call, "百付", 100, EventLog.MONITOR_STATUS_FAILED, "我是mock third 失败日志").toString()); + LOGGER.info(EventLog.buildEventLog(EventType.thirdparty_call, "信联", 2000, EventLog.MONITOR_STATUS_FAILED, "我是mock third 失败日志").toString()); + LOGGER.info(EventLog.buildEventLog(EventType.thirdparty_call, "信联", 2000, EventLog.MONITOR_STATUS_SUCCESS, "我是mock third 成功日志").toString()); + LOGGER.info(EventLog.buildEventLog(EventType.thirdparty_call, "百付", 2000, EventLog.MONITOR_STATUS_SUCCESS, "我是mock third 成功日志").toString()); + LOGGER.info(EventLog.buildEventLog(EventType.thirdparty_call, "百付", 2000, EventLog.MONITOR_STATUS_FAILED, "我是mock third 失败日志").toString()); } - - } diff --git a/skyeye-benchmark/log-generater/src/main/resources/logback.xml b/skyeye-benchmark/log-generater/src/main/resources/logback.xml index 0e7fa83..755aa0d 100644 --- a/skyeye-benchmark/log-generater/src/main/resources/logback.xml +++ b/skyeye-benchmark/log-generater/src/main/resources/logback.xml @@ -50,19 +50,19 @@ - - + + %d{yyyy-MM-dd HH:mm:ss.SSS};${CONTEXT_NAME};${HOSTNAME};%thread;%-5level;%logger{96};%line;%msg%n app-log - dubbo - riot01:2181,riot02:2181,riot03:2181 - xxx@xxx.com - + none + riot01.jthink.com:2181,riot02.jthink.com:2181,riot03.jthink.com:2181 + qianjc@unionpaysmart.com + - bootstrap.servers=riot01:9092,riot02:9092,riot03:9092 + bootstrap.servers=riot01.jthink.com:9092,riot02.jthink.com:9092,riot03.jthink.com:9092 acks=0 linger.ms=100 max.block.ms=5000 diff --git a/skyeye-client/skyeye-client-log4j/src/main/java/com/jthink/skyeye/client/log4j/appender/KafkaAppender.java b/skyeye-client/skyeye-client-log4j/src/main/java/com/jthink/skyeye/client/log4j/appender/KafkaAppender.java index e9833b0..3f37e5c 100644 --- a/skyeye-client/skyeye-client-log4j/src/main/java/com/jthink/skyeye/client/log4j/appender/KafkaAppender.java +++ b/skyeye-client/skyeye-client-log4j/src/main/java/com/jthink/skyeye/client/log4j/appender/KafkaAppender.java @@ -311,7 +311,7 @@ public void close() { } // 关闭client,临时节点消失,监控系统进行感知报警 - ZkClient client = this.zkRegister.getClient(); + ZkClient client = this.zkRegister == null ? null : this.zkRegister.getClient(); if (null != client) { client.close(); } diff --git a/skyeye-client/skyeye-client-logback/src/main/java/com/jthink/skyeye/client/logback/appender/KafkaAppender.java b/skyeye-client/skyeye-client-logback/src/main/java/com/jthink/skyeye/client/logback/appender/KafkaAppender.java index ad32647..9a55318 100644 --- a/skyeye-client/skyeye-client-logback/src/main/java/com/jthink/skyeye/client/logback/appender/KafkaAppender.java +++ b/skyeye-client/skyeye-client-logback/src/main/java/com/jthink/skyeye/client/logback/appender/KafkaAppender.java @@ -125,7 +125,7 @@ public void stop() { } // 关闭client,临时节点消失,监控系统进行感知报警 - ZkClient client = this.zkRegister.getClient(); + ZkClient client = this.zkRegister == null ? null : this.zkRegister.getClient(); if (null != client) { client.close(); } diff --git a/skyeye-collector/skyeye-collector-core/src/main/java/com/jthink/skyeye/collector/core/configuration/kafka/KafkaConfiguration.java b/skyeye-collector/skyeye-collector-core/src/main/java/com/jthink/skyeye/collector/core/configuration/kafka/KafkaConfiguration.java index a81c340..37c8a35 100644 --- a/skyeye-collector/skyeye-collector-core/src/main/java/com/jthink/skyeye/collector/core/configuration/kafka/KafkaConfiguration.java +++ b/skyeye-collector/skyeye-collector-core/src/main/java/com/jthink/skyeye/collector/core/configuration/kafka/KafkaConfiguration.java @@ -29,7 +29,7 @@ public class KafkaConfiguration { // kafka consumer @Bean - public KafkaConsumer kafkaConsumerApp() { + public KafkaConsumer kafkaConsumer() { Map config = new HashMap(); config.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, this.kafkaProperties.getBrokers()); config.put(ConsumerConfig.GROUP_ID_CONFIG, this.kafkaProperties.getConsumeGroup()); diff --git a/skyeye-collector/skyeye-collector-metrics/src/main/java/com/jthink/skyeye/collector/metrics/launcher/Launcher.java b/skyeye-collector/skyeye-collector-metrics/src/main/java/com/jthink/skyeye/collector/metrics/launcher/Launcher.java index a16aeca..15ea0ef 100644 --- a/skyeye-collector/skyeye-collector-metrics/src/main/java/com/jthink/skyeye/collector/metrics/launcher/Launcher.java +++ b/skyeye-collector/skyeye-collector-metrics/src/main/java/com/jthink/skyeye/collector/metrics/launcher/Launcher.java @@ -27,7 +27,7 @@ */ @SpringBootApplication @EnableAutoConfiguration -@ComponentScan(basePackages={"com.jthink.skyeye.collector.core", "com.jthink.skyeye.collector.metrics"}) +@ComponentScan(basePackages={"com.jthink.skyeye.collector.core", "com.jthink.skyeye.collector.metrics", "com.jthink.skyeye.data.rabbitmq", "com.jthink.skyeye.data.jpa"}) @PropertySource("file:/opt/jthink/jthink-config/skyeye/collector/collector-metrics.properties") //@PropertySource("classpath:properties/collector-metrics.properties") public class Launcher { diff --git a/skyeye-collector/skyeye-collector-metrics/src/main/resources/application.properties b/skyeye-collector/skyeye-collector-metrics/src/main/resources/application.properties index 833707c..bba8be2 100644 --- a/skyeye-collector/skyeye-collector-metrics/src/main/resources/application.properties +++ b/skyeye-collector/skyeye-collector-metrics/src/main/resources/application.properties @@ -46,7 +46,7 @@ spring.datasource.test-on-return=false spring.data.jpa.repositories.enabled=true spring.jpa.hibernate.ddl-auto=validate spring.jpa.generate-ddl=false -spring.jpa.database=org.hibernate.dialect.MySQL5InnoDBDialect +spring.jpa.database=mysql spring.jpa.show-sql=false # rabbitmq diff --git a/skyeye-collector/skyeye-collector-metrics/src/main/resources/config/collector-metrics.properties b/skyeye-collector/skyeye-collector-metrics/src/main/resources/config/collector-metrics.properties index 98920ae..9839bae 100644 --- a/skyeye-collector/skyeye-collector-metrics/src/main/resources/config/collector-metrics.properties +++ b/skyeye-collector/skyeye-collector-metrics/src/main/resources/config/collector-metrics.properties @@ -9,7 +9,7 @@ es.ips=riot01,riot02,riot03 es.cluster=mondeo es.port=9300 es.sniff=true -es.index=app-log +es.index=event-log es.doc=log # redis config diff --git a/skyeye-collector/skyeye-collector-trace/src/main/java/com/jthink/skyeye/collector/trace/launcher/Launcher.java b/skyeye-collector/skyeye-collector-trace/src/main/java/com/jthink/skyeye/collector/trace/launcher/Launcher.java index 65630f2..e830f20 100644 --- a/skyeye-collector/skyeye-collector-trace/src/main/java/com/jthink/skyeye/collector/trace/launcher/Launcher.java +++ b/skyeye-collector/skyeye-collector-trace/src/main/java/com/jthink/skyeye/collector/trace/launcher/Launcher.java @@ -28,7 +28,7 @@ */ @SpringBootApplication @EnableAutoConfiguration -@ComponentScan(basePackages={"com.jthink.skyeye.collector.core", "com.jthink.skyeye.collector.trace"}) +@ComponentScan(basePackages={"com.jthink.skyeye.collector.core", "com.jthink.skyeye.collector.trace", "com.jthink.skyeye.data.jpa"}) @PropertySource("file:/opt/jthink/jthink-config/skyeye/collector/collector-trace.properties") //@PropertySource("classpath:properties/collector-trace.properties") public class Launcher { diff --git a/skyeye-collector/skyeye-collector-trace/src/main/resources/application.properties b/skyeye-collector/skyeye-collector-trace/src/main/resources/application.properties index e2ba23d..50fcf8a 100644 --- a/skyeye-collector/skyeye-collector-trace/src/main/resources/application.properties +++ b/skyeye-collector/skyeye-collector-trace/src/main/resources/application.properties @@ -38,7 +38,7 @@ spring.datasource.test-on-return=false spring.data.jpa.repositories.enabled=true spring.jpa.hibernate.ddl-auto=validate spring.jpa.generate-ddl=false -spring.jpa.database=org.hibernate.dialect.MySQL5InnoDBDialect +spring.jpa.database=mysql spring.jpa.show-sql=false # hbase config diff --git a/skyeye-data/skyeye-data-jpa/build.gradle b/skyeye-data/skyeye-data-jpa/build.gradle index effe116..365369a 100644 --- a/skyeye-data/skyeye-data-jpa/build.gradle +++ b/skyeye-data/skyeye-data-jpa/build.gradle @@ -14,5 +14,9 @@ ext { dependencies { compile("org.springframework.boot:spring-boot-starter-data-jpa:$springBootVersion") + compile ("org.apache.commons:commons-dbcp2:2.1.1") + compile ("mysql:mysql-connector-java:5.1.43") + + testCompile "junit:junit:4.12" } diff --git a/skyeye-monitor/build.gradle b/skyeye-monitor/build.gradle index 144b406..a4d4de0 100644 --- a/skyeye-monitor/build.gradle +++ b/skyeye-monitor/build.gradle @@ -35,9 +35,6 @@ dependencies { compile "skyeye:skyeye-data-rabbitmq:$dataVersion" compile "org.springframework.boot:spring-boot-starter" - compile("org.apache.commons:commons-dbcp2:2.1.1") - compile("mysql:mysql-connector-java:5.1.39") - compile "org.apache.zookeeper:zookeeper:$zookeeperVersion" compile "org.codehaus.jackson:jackson-core-asl:$jacksonVersion" @@ -57,7 +54,7 @@ configurations { compile.exclude group: "org.springframework", module: "spring-web" } -mainClassName = 'com.jthink.skyeye.monitor.launcher.launcher' +mainClassName = 'com.jthink.skyeye.monitor.launcher.Launcher' buildscript { ext { diff --git a/skyeye-monitor/src/main/resources/application.properties b/skyeye-monitor/src/main/resources/application.properties index 0f04df9..8c9cefe 100644 --- a/skyeye-monitor/src/main/resources/application.properties +++ b/skyeye-monitor/src/main/resources/application.properties @@ -39,5 +39,5 @@ spring.datasource.test-on-return=false spring.data.jpa.repositories.enabled=true spring.jpa.hibernate.ddl-auto=validate spring.jpa.generate-ddl=false -spring.jpa.database=org.hibernate.dialect.MySQL5InnoDBDialect +spring.jpa.database=mysql spring.jpa.show-sql=false diff --git a/skyeye-statistics/build.gradle b/skyeye-statistics/build.gradle index 3aed438..dd724cc 100644 --- a/skyeye-statistics/build.gradle +++ b/skyeye-statistics/build.gradle @@ -29,6 +29,7 @@ ext { sparkVersion = '1.3.0-cdh5.4.0' baseVersion = '1.0.0' fastjsonVersion = '1.2.35' + zkClientVersion = '0.9.1-up' } dependencies { @@ -45,6 +46,8 @@ dependencies { compile "com.alibaba:fastjson:$fastjsonVersion" + compile "com.101tec:zkclient:$zkClientVersion" + testCompile "org.springframework.boot:spring-boot-starter-test" } @@ -53,7 +56,7 @@ configurations { compile.exclude group: 'ch.qos.logback', module: 'logback-core' } -mainClassName = 'com.jthink.skyeye.statistics.launcher.launcher' +mainClassName = 'com.jthink.skyeye.statistics.launcher.Launcher' jar { manifest { diff --git a/skyeye-web/build.gradle b/skyeye-web/build.gradle index b003998..1de233e 100644 --- a/skyeye-web/build.gradle +++ b/skyeye-web/build.gradle @@ -44,9 +44,6 @@ dependencies { exclude group: 'org.springframework.boot', module: 'spring-boot-starter-validation' } - compile("org.apache.commons:commons-dbcp2:2.1.1") - compile("mysql:mysql-connector-java:5.1.39") - compile "org.apache.httpcomponents:httpclient:$httpclientVersion" compile "com.101tec:zkclient:$zkclientVersion" diff --git a/skyeye-web/src/main/resources/application.properties b/skyeye-web/src/main/resources/application.properties index 741e418..1f78dd1 100644 --- a/skyeye-web/src/main/resources/application.properties +++ b/skyeye-web/src/main/resources/application.properties @@ -38,7 +38,7 @@ spring.datasource.test-on-return=false spring.data.jpa.repositories.enabled=true spring.jpa.hibernate.ddl-auto=validate spring.jpa.generate-ddl=false -spring.jpa.database=org.hibernate.dialect.MySQL5InnoDBDialect +spring.jpa.database=mysql spring.jpa.show-sql=false # es @@ -81,5 +81,7 @@ spring.monitor.es.totalTemplate=select * from event-log/log where eventType='EVE spring.monitor.es.template=select * from event-log/log where eventType='EVENTTYPE' and time>='BEGIN' and day<='END' and cost>=COST group by SCOPE spring.monitor.es.delay=${spring.data.es.delay} -# hbase +# hbase config spring.data.hbase.quorum=${hbase.quorum} +spring.data.hbase.rootDir=${hbase.rootDir} +spring.data.hbase.nodeParent=${hbase.zookeeper.znode.parent} diff --git a/skyeye-web/src/main/resources/properties/web.properties b/skyeye-web/src/main/resources/properties/web.properties index bef0df6..176fee9 100644 --- a/skyeye-web/src/main/resources/properties/web.properties +++ b/skyeye-web/src/main/resources/properties/web.properties @@ -35,5 +35,7 @@ monitor.es.apiThreshold=0.1 monitor.es.thirdResponseTime=1000 monitor.es.thirdThreshold=0.1 -#hbase -hbase.quorum=panda-01 \ No newline at end of file +# hbase config +hbase.quorum=panda-01,panda-01,panda-03 +hbase.rootDir=hdfs://panda-01:8020/hbase +hbase.zookeeper.znode.parent=/hbase From a60e3fe095cc62cb82f7476194c100f98e714bc6 Mon Sep 17 00:00:00 2001 From: JThink Date: Tue, 8 Aug 2017 10:58:33 +0800 Subject: [PATCH 24/27] update gradle --- skyeye-benchmark/log-generater/build.gradle | 1 - 1 file changed, 1 deletion(-) diff --git a/skyeye-benchmark/log-generater/build.gradle b/skyeye-benchmark/log-generater/build.gradle index bfc8099..9509c9f 100644 --- a/skyeye-benchmark/log-generater/build.gradle +++ b/skyeye-benchmark/log-generater/build.gradle @@ -22,7 +22,6 @@ repositories { ext { clientVersion = '1.0.0' - dataVersion = '1.0.0' } dependencies { From 10d50c91627467ef6ce6e305c2398eb1b332f2bf Mon Sep 17 00:00:00 2001 From: JThink Date: Tue, 8 Aug 2017 11:32:27 +0800 Subject: [PATCH 25/27] test trace module --- skyeye-benchmark/dubbo-service/build.gradle | 4 ++-- .../dubbo-service-a/src/main/resources/logback.xml | 12 ++++++------ .../dubbo-service/dubbo-service-b/build.gradle | 2 +- .../dubbo-service-b/src/main/resources/logback.xml | 12 ++++++------ .../dubbo-service/dubbo-service-c/build.gradle | 2 +- .../dubbo-service-c/src/main/resources/logback.xml | 12 ++++++------ .../dubbo-service/dubbo-service-d/build.gradle | 2 +- .../dubbo-service-d/src/main/resources/logback.xml | 12 ++++++------ .../dubbo-service/dubbo-service-e/build.gradle | 2 +- .../dubbo-service-e/src/main/resources/logback.xml | 12 ++++++------ .../dubbo-service/performance-test/build.gradle | 2 +- 11 files changed, 37 insertions(+), 37 deletions(-) diff --git a/skyeye-benchmark/dubbo-service/build.gradle b/skyeye-benchmark/dubbo-service/build.gradle index a132430..7db0572 100644 --- a/skyeye-benchmark/dubbo-service/build.gradle +++ b/skyeye-benchmark/dubbo-service/build.gradle @@ -36,7 +36,7 @@ subprojects { ext { slf4jVersion = '1.7.21' - dubboVersion = '2.8.4-skyeye-trace' + dubboVersion = '2.8.4-skyeye-trace-1.0.0' dataVersion = '1.0.0' clientVersion = '1.0.0' zookeeperVerison = '3.4.6' @@ -55,6 +55,6 @@ subprojects { dependencies { compile "org.slf4j:slf4j-api:$slf4jVersion" - compile "skyeye:skyeye-client:$clientVersion" + compile "skyeye:skyeye-client-logback:$clientVersion" } } diff --git a/skyeye-benchmark/dubbo-service/dubbo-service-a/src/main/resources/logback.xml b/skyeye-benchmark/dubbo-service/dubbo-service-a/src/main/resources/logback.xml index 26f47be..5f33eb6 100644 --- a/skyeye-benchmark/dubbo-service/dubbo-service-a/src/main/resources/logback.xml +++ b/skyeye-benchmark/dubbo-service/dubbo-service-a/src/main/resources/logback.xml @@ -49,19 +49,19 @@ - - + + %d{yyyy-MM-dd HH:mm:ss.SSS};${CONTEXT_NAME};${HOSTNAME};%thread;%-5level;%logger{96};%line;%msg%n app-log dubbo - riot01:2181,riot02:2181,riot03:2181 - xxx@xxx.com - + riot01.jthink.com:2181,riot02.jthink.com:2181,riot03.jthink.com:2181 + qianjc@unionpaysmart.com + - bootstrap.servers=riot01:9092,riot02:9092,riot03:9092 + bootstrap.servers=riot01.jthink.com:9092,riot02.jthink.com:9092,riot03.jthink.com:9092 acks=0 linger.ms=100 max.block.ms=5000 diff --git a/skyeye-benchmark/dubbo-service/dubbo-service-b/build.gradle b/skyeye-benchmark/dubbo-service/dubbo-service-b/build.gradle index b74765d..440ad33 100644 --- a/skyeye-benchmark/dubbo-service/dubbo-service-b/build.gradle +++ b/skyeye-benchmark/dubbo-service/dubbo-service-b/build.gradle @@ -19,7 +19,7 @@ configurations { compile.exclude group: "org.slf4j", module: "slf4j-log4j12" } -mainClassName = "com.jthink.skyeye.benchmark.dubbo.service.b.launcher.launcher" +mainClassName = "com.jthink.skyeye.benchmark.dubbo.service.b.launcher.Launcher" buildscript { ext { diff --git a/skyeye-benchmark/dubbo-service/dubbo-service-b/src/main/resources/logback.xml b/skyeye-benchmark/dubbo-service/dubbo-service-b/src/main/resources/logback.xml index bc10ddf..10c1e31 100644 --- a/skyeye-benchmark/dubbo-service/dubbo-service-b/src/main/resources/logback.xml +++ b/skyeye-benchmark/dubbo-service/dubbo-service-b/src/main/resources/logback.xml @@ -49,19 +49,19 @@ - - + + %d{yyyy-MM-dd HH:mm:ss.SSS};${CONTEXT_NAME};${HOSTNAME};%thread;%-5level;%logger{96};%line;%msg%n app-log dubbo - riot01:2181,riot02:2181,riot03:2181 - xxx@xxx.com - + riot01.jthink.com:2181,riot02.jthink.com:2181,riot03.jthink.com:2181 + qianjc@unionpaysmart.com + - bootstrap.servers=riot01:9092,riot02:9092,riot03:9092 + bootstrap.servers=riot01.jthink.com:9092,riot02.jthink.com:9092,riot03.jthink.com:9092 acks=0 linger.ms=100 max.block.ms=5000 diff --git a/skyeye-benchmark/dubbo-service/dubbo-service-c/build.gradle b/skyeye-benchmark/dubbo-service/dubbo-service-c/build.gradle index 8d19933..8cdae00 100644 --- a/skyeye-benchmark/dubbo-service/dubbo-service-c/build.gradle +++ b/skyeye-benchmark/dubbo-service/dubbo-service-c/build.gradle @@ -19,7 +19,7 @@ configurations { compile.exclude group: "org.slf4j", module: "slf4j-log4j12" } -mainClassName = "com.jthink.skyeye.benchmark.dubbo.service.c.launcher.launcher" +mainClassName = "com.jthink.skyeye.benchmark.dubbo.service.c.launcher.Launcher" buildscript { ext { diff --git a/skyeye-benchmark/dubbo-service/dubbo-service-c/src/main/resources/logback.xml b/skyeye-benchmark/dubbo-service/dubbo-service-c/src/main/resources/logback.xml index 9a1d9ba..a93a7b7 100644 --- a/skyeye-benchmark/dubbo-service/dubbo-service-c/src/main/resources/logback.xml +++ b/skyeye-benchmark/dubbo-service/dubbo-service-c/src/main/resources/logback.xml @@ -49,19 +49,19 @@ - - + + %d{yyyy-MM-dd HH:mm:ss.SSS};${CONTEXT_NAME};${HOSTNAME};%thread;%-5level;%logger{96};%line;%msg%n app-log dubbo - riot01:2181,riot02:2181,riot03:2181 - xxx@xxx.com - + riot01.jthink.com:2181,riot02.jthink.com:2181,riot03.jthink.com:2181 + qianjc@unionpaysmart.com + - bootstrap.servers=riot01:9092,riot02:9092,riot03:9092 + bootstrap.servers=riot01.jthink.com:9092,riot02.jthink.com:9092,riot03.jthink.com:9092 acks=0 linger.ms=100 max.block.ms=5000 diff --git a/skyeye-benchmark/dubbo-service/dubbo-service-d/build.gradle b/skyeye-benchmark/dubbo-service/dubbo-service-d/build.gradle index b874a87..a6e2840 100644 --- a/skyeye-benchmark/dubbo-service/dubbo-service-d/build.gradle +++ b/skyeye-benchmark/dubbo-service/dubbo-service-d/build.gradle @@ -19,7 +19,7 @@ configurations { compile.exclude group: "org.slf4j", module: "slf4j-log4j12" } -mainClassName = "com.jthink.skyeye.benchmark.dubbo.service.d.launcher.launcher" +mainClassName = "com.jthink.skyeye.benchmark.dubbo.service.d.launcher.Launcher" buildscript { ext { diff --git a/skyeye-benchmark/dubbo-service/dubbo-service-d/src/main/resources/logback.xml b/skyeye-benchmark/dubbo-service/dubbo-service-d/src/main/resources/logback.xml index 46f5f90..d114e20 100644 --- a/skyeye-benchmark/dubbo-service/dubbo-service-d/src/main/resources/logback.xml +++ b/skyeye-benchmark/dubbo-service/dubbo-service-d/src/main/resources/logback.xml @@ -49,19 +49,19 @@ - - + + %d{yyyy-MM-dd HH:mm:ss.SSS};${CONTEXT_NAME};${HOSTNAME};%thread;%-5level;%logger{96};%line;%msg%n app-log dubbo - riot01:2181,riot02:2181,riot03:2181 - xxx@xxx.com - + riot01.jthink.com:2181,riot02.jthink.com:2181,riot03.jthink.com:2181 + qianjc@unionpaysmart.com + - bootstrap.servers=riot01:9092,riot02:9092,riot03:9092 + bootstrap.servers=riot01.jthink.com:9092,riot02.jthink.com:9092,riot03.jthink.com:9092 acks=0 linger.ms=100 max.block.ms=5000 diff --git a/skyeye-benchmark/dubbo-service/dubbo-service-e/build.gradle b/skyeye-benchmark/dubbo-service/dubbo-service-e/build.gradle index cc3a998..47820ae 100644 --- a/skyeye-benchmark/dubbo-service/dubbo-service-e/build.gradle +++ b/skyeye-benchmark/dubbo-service/dubbo-service-e/build.gradle @@ -19,7 +19,7 @@ configurations { compile.exclude group: "org.slf4j", module: "slf4j-log4j12" } -mainClassName = "com.jthink.skyeye.benchmark.dubbo.service.e.launcher.launcher" +mainClassName = "com.jthink.skyeye.benchmark.dubbo.service.e.launcher.Launcher" buildscript { ext { diff --git a/skyeye-benchmark/dubbo-service/dubbo-service-e/src/main/resources/logback.xml b/skyeye-benchmark/dubbo-service/dubbo-service-e/src/main/resources/logback.xml index 5260b60..be4b741 100644 --- a/skyeye-benchmark/dubbo-service/dubbo-service-e/src/main/resources/logback.xml +++ b/skyeye-benchmark/dubbo-service/dubbo-service-e/src/main/resources/logback.xml @@ -49,19 +49,19 @@ - - + + %d{yyyy-MM-dd HH:mm:ss.SSS};${CONTEXT_NAME};${HOSTNAME};%thread;%-5level;%logger{96};%line;%msg%n app-log dubbo - riot01:2181,riot02:2181,riot03:2181 - xxx@xxx.com - + riot01.jthink.com:2181,riot02.jthink.com:2181,riot03.jthink.com:2181 + qianjc@unionpaysmart.com + - bootstrap.servers=riot01:9092,riot02:9092,riot03:9092 + bootstrap.servers=riot01.jthink.com:9092,riot02.jthink.com:9092,riot03.jthink.com:9092 acks=0 linger.ms=100 max.block.ms=5000 diff --git a/skyeye-benchmark/dubbo-service/performance-test/build.gradle b/skyeye-benchmark/dubbo-service/performance-test/build.gradle index 3b14a2c..4688c7f 100644 --- a/skyeye-benchmark/dubbo-service/performance-test/build.gradle +++ b/skyeye-benchmark/dubbo-service/performance-test/build.gradle @@ -13,7 +13,7 @@ configurations { compile.exclude group: "org.slf4j", module: "slf4j-log4j12" } -mainClassName = "com.jthink.skyeye.benchmark.performance.test.launcher.launcher" +mainClassName = "com.jthink.skyeye.benchmark.performance.test.launcher.Launcher" buildscript { repositories { From 68a59624fd9cdb75097b2941f31bcf2de4eb43a1 Mon Sep 17 00:00:00 2001 From: JThink Date: Tue, 8 Aug 2017 15:47:46 +0800 Subject: [PATCH 26/27] update the readme --- README.md | 299 ++++++++++++------ skyeye-client/README.md | 100 +++--- .../collector-metrics.properties | 0 .../collector-trace.properties | 0 4 files changed, 256 insertions(+), 143 deletions(-) rename skyeye-collector/skyeye-collector-metrics/src/main/resources/{config => properties}/collector-metrics.properties (100%) rename skyeye-collector/skyeye-collector-trace/src/main/resources/{config => properties}/collector-trace.properties (100%) diff --git a/README.md b/README.md index 298d960..dfd5e5a 100644 --- a/README.md +++ b/README.md @@ -14,11 +14,20 @@ - alert: 具体报警手段,包括邮件和微信 # 项目介绍 -日志生产系统,包括应用系统的日志入队列、埋点、应用系统注册等 -- 自定义一些log框架的appender,包含logback和log4j -- 向注册中心注册应用 -- 应用埋点进行监控报警 -- rpc trace数据产生器 +对java、scala等运行于jvm的程序进行实时日志采集、索引和可视化,对系统进行进程级别的监控,对系统内部的操作进行策略性的报警、对分布式的rpc调用进行trace跟踪以便于进行性能分析 + +- 日志实时采集(支持log4j和logback) +- 日志实时页面实时展示(支持关键字过滤) +- 历史日志查询(支持多种条件过滤,支持sql语句查询) +- app实时部署位置展示(机器和文件夹) +- app实时日志采集状态展示 +- app历史部署位置展示 +- api请求实时统计和历史统计 +- 第三方请求实时统计和历史统计 +- 基于dubbox的rpc调用数据收集和调用链展示(支持多种条件检索) +- 系统上下线报警 +- 系统内嵌采集器报警 +- 中间件、api、第三方、job执行异常报警(策略报警和异常报警) # 部署步骤 @@ -77,12 +86,12 @@ gradle clean install uploadArchives ### dubbox -由于使用dubbox,为了能够采集到dubbox里面的rpc数据,需要修改dubbox的源码,见我修改的dubbox项目:[dubbox](https://github.com/JThink/dubbox/tree/skyeye-trace),该项目主要实现了rpc跟踪的具体实现,需要单独打包。 +由于使用dubbox,为了能够采集到dubbox里面的rpc数据,需要修改dubbox的源码,见我修改的dubbox项目:[dubbox](https://github.com/JThink/dubbox/tree/skyeye-trace-1.0.0),该项目主要实现了rpc跟踪的具体实现,需要单独打包。 ```shell git clone https://github.com/JThink/dubbox.git cd dubbox -git checkout skyeye-trace +git checkout skyeye-trace-1.0.0 修改相关pom中的私服地址 mvn clean install deploy -Dmaven.test.skip=true ``` @@ -100,7 +109,7 @@ mvn clean install deploy -Dmaven.test.skip=true | zookeeper | 3.4.6 | | | rabbitmq | 3.5.7 | | | hbase | 1.0.0-cdh5.4.0 | 不支持1.x以下的版本,比如0.9x.x | -| gradle | 3.0 | | +| gradle | 3.0+ | | | hadoop | 2.6.0-cdh5.4.0 | | | spark | 1.3.0-cdh5.4.0 | | | redis | 3.x | 单机版即可 | @@ -120,7 +129,7 @@ source skyeye-data/skyeye-data-jpa/src/main/resources/sql/init.sql ```Shell hbase shell -执行skyeye-collector/src/main/resources/shell/hbase/hbase这个文件里面的内容 +执行skyeye-collector/skyeye-collector-trace/src/main/resources/shell/hbase这个文件里面的内容 ``` ### elasticsearch @@ -128,11 +137,10 @@ hbase shell 首先安装相应的es python的module,然后再创建索引,根据需要修改es的的ip、端口 ```shell -cd skyeye-collector/src/main/resources/shell/es/ +cd skyeye-collector/skyeye-collector-indexer/src/main/resources/shell ./install.sh -cd app-log bash start.sh app-log http://192.168.xx.xx:9200,http://192.168.xx.xx:9200,...... -cd event-log +cd skyeye-collector/skyeye-collector-metrics/src/main/resources/shell bash start.sh event-log http://192.168.xx.xx:9200,http://192.168.xx.xx:9200,...... 注意点:如果es版本为5.x,那么需要修改skyeye-collector/src/main/resources/shell/es/app-log/create-index.py的49和50行为下面内容: @@ -206,35 +214,120 @@ nohup bin/skyeye-alarm & ## skyeye-collector -### 配置文件 +本项目从v1.0.0版本开始按不同的kafka消费group组织子module以实现可插拔的功能模块,主要包含如下5个module: + +- skyeye-collector-core: 收集项目的所有公用的配置和公用代码,改module不需要部署 +- skyeye-collector-backup: 对采集的所有日志进行备份 +- skyeye-collector-indexer: 对采集的所有日志进行索引存入es +- kyeye-collector-metrics: 对事件日志进行meta data的采集和相关报警metrics进行索引存入es +- skyeye-collector-trace: 对rpc跟踪数据进行采集入hbase + +## 打包 + +```shell +cd skyeye-collector +gradle clean build -x test +``` + +### skyeye-collector-backup + +#### 配置文件 配置文件外部化,需要在机器上创建配置文件,根据对接系统的个数和产生日志的量进行部署,最好部署3个节点(每个节点消费3个partition的数据) ```shell ssh 到部署节点 mkdir -p /opt/jthink/jthink-config/skyeye/collector -vim collector.properties +vim collector-backup.properties # kafka config kafka.brokers=riot01:9092,riot02:9092,riot03:9092 kafka.topic=app-log -kafka.group.indexer=es-indexer-consume-group +kafka.consume.group=log-backup-consume-group +kafka.poll.timeout=100 + +# hdfs +hadoop.hdfs.namenode.port=8020 +hadoop.hdfs.namenode.host=192.168.88.131 +hadoop.hdfs.user=xxx +hadoop.hdfs.baseDir=/user/xxx/JThink/ +hadoop.hdfs.fileRoot=/tmp/monitor-center/ +upload.log.cron=0 30 0 * * ? +``` + +### 部署 + +多个节点部署需要部署多次 + +```shell +cd skyeye-collector-backup/target/distributions +unzip skyeye-collector-backup-x.x.x.zip(替换相应的x为自己的版本) + +cd skyeye-collector-backup-x.x.x +nohup bin/skyeye-collector-backup & +``` +### skyeye-collector-indexer + +#### 配置文件 + +配置文件外部化,需要在机器上创建配置文件,根据对接系统的个数和产生日志的量进行部署,最好部署3个节点(每个节点消费3个partition的数据) + +```shell +ssh 到部署节点 +mkdir -p /opt/jthink/jthink-config/skyeye/collector +vim collector-indexer.properties + +# kafka config +kafka.brokers=riot01:9092,riot02:9092,riot03:9092 +kafka.topic=app-log +kafka.consume.group=es-indexer-consume-group kafka.poll.timeout=100 -kafka.group.collect=info-collect-consume-group -kafka.group.backup=log-backup-consume-group -kafka.group.rpc.trace=rpc-trace-consume-group -kafka.hdfs.file.root=/tmp/monitor-center/ -kafka.hdfs.file.server.id=0 # 如果部署多个节点,第一个节点值为0,第二个节点就是1,第三个节点是2,以此类推 # es config es.ips=riot01,riot02,riot03 -es.cluster=mondeo # 需要修改成搭建es的时候那个值 +es.cluster=mondeo es.port=9300 es.sniff=true es.index=app-log es.doc=log -es.index.event=event-log -es.doc.event=log +``` + +### 部署 + +多个节点部署需要部署多次 + +```shell +cd skyeye-collector-indexer/target/distributions +unzip skyeye-collector-indexer-x.x.x.zip(替换相应的x为自己的版本) + +cd skyeye-collector-indexer-x.x.x +nohup bin/skyeye-collector-indexer & +``` + +### skyeye-collector-metrics + +#### 配置文件 + +配置文件外部化,需要在机器上创建配置文件,根据对接系统的个数和产生日志的量进行部署,最好部署3个节点(每个节点消费3个partition的数据) + +```shell +ssh 到部署节点 +mkdir -p /opt/jthink/jthink-config/skyeye/collector +vim collector-metrics.properties + +# kafka config +kafka.brokers=riot01:9092,riot02:9092,riot03:9092 +kafka.topic=app-log +kafka.consume.group=info-collect-consume-group +kafka.poll.timeout=100 + +# es config +es.ips=riot01,riot02,riot03 +es.cluster=mondeo +es.port=9300 +es.sniff=true +es.index=event-log +es.doc=log # redis config redis.host=localhost @@ -261,33 +354,66 @@ rabbit.request.routingKey=log.key zookeeper.zkServers=riot01:2181,riot02:2181,riot03:2181 zookeeper.sessionTimeout=60000 zookeeper.connectionTimeout=5000 +``` -# hdfs -hadoop.hdfs.namenode.port=8020 -hadoop.hdfs.namenode.host=192.168.88.131 -hadoop.hdfs.user=qianjicheng -hadoop.hdfs.baseDir=/user/qianjicheng/JThink/ -upload.log.cron=0 30 0 * * ? # 按需修改,每天零点30分上传前一天的日志到hdfs,建议不改 +### 部署 -# hbase config -hbase.quorum=panda-01 +多个节点部署需要部署多次 + +```shell +cd skyeye-collector-metrics/target/distributions +unzip skyeye-collector-metrics-x.x.x.zip(替换相应的x为自己的版本) + +cd skyeye-collector-metrics-x.x.x +nohup bin/skyeye-collector-metrics & ``` -需要修改相关的配置,注释过的是要注意的,别的ip和端口根据需要进行修改(rabbitmq的配置需和alarm一致) +### skyeye-collector-trace -### 打包部署 +#### 配置文件 + +配置文件外部化,需要在机器上创建配置文件,根据对接系统的个数和产生日志的量进行部署,最好部署3个节点(每个节点消费3个partition的数据) + +```shell +ssh 到部署节点 +mkdir -p /opt/jthink/jthink-config/skyeye/collector +vim collector-trace.properties + +# kafka config +kafka.brokers=riot01:9092,riot02:9092,riot03:9092 +kafka.topic=app-log +kafka.consume.group=rpc-trace-consume-group +kafka.poll.timeout=100 + +# redis config +redis.host=localhost +redis.port=6379 +redis.password= + +# mysql config +database.address=localhost:3306 +database.name=monitor-center +database.username=root +database.password=root + +# hbase config +hbase.quorum=panda-01,panda-01,panda-03 +hbase.rootDir=hdfs://panda-01:8020/hbase +hbase.zookeeper.znode.parent=/hbase +``` + +### 部署 多个节点部署需要部署多次 ```shell -cd skyeye-collector -gradle clean distZip -x test -cd target/distributions -unzip skyeye-collector-x.x.x.zip(替换相应的x为自己的版本) +cd skyeye-collector-trace/target/distributions +unzip skyeye-collectortracemetrics-x.x.x.zip(替换相应的x为自己的版本) -cd skyeye-collector-x.x.x -nohup bin/skyeye-collector & +cd skyeye-collector-trace-x.x.x +nohup bin/skyeye-collector-trace & ``` + ## skyeye-monitor ### 配置文件 @@ -383,6 +509,11 @@ monitor.es.apiResponseTime=1000 monitor.es.apiThreshold=0.1 monitor.es.thirdResponseTime=1000 monitor.es.thirdThreshold=0.1 + +# hbase config +hbase.quorum=panda-01,panda-01,panda-03 +hbase.rootDir=hdfs://panda-01:8020/hbase +hbase.zookeeper.znode.parent=/hbase ``` 需要修改相关的配置(rabbitmq的配置需和alarm一致,es也需要前后一致),注释过的是要注意的 @@ -408,9 +539,7 @@ nohup bin/skyeye-web & gradle或者pom中加入skyeye-client的依赖 ``` xml -compile ("skyeye:skyeye-client:0.0.1") { - exclude group: 'log4j', module: 'log4j' -} +compile "skyeye:skyeye-client-logback:1.0.0" ``` ### 配置 在logback.xml中加入一个kafkaAppender,并在properties中配置好相关的值,如下(rpc这个项目前支持none和dubbo,所以如果项目中有dubbo服务的配置成dubbo,没有dubbo服务的配置成none,以后会支持其他的rpc框架,如:thrift、spring cloud等): @@ -418,78 +547,66 @@ compile ("skyeye:skyeye-client:0.0.1") { ``` xml - - - - %d{yyyy-MM-dd HH:mm:ss.SSS};${CONTEXT_NAME};${HOSTNAME};%thread;%-5level;%logger{96};%line;%msg%n - - - ${kafka.topic} - none - ${zookeeper.servers} - ${mail} - - - bootstrap.servers=${kafka.bootstrap.servers} - acks=0 - linger.ms=100 - max.block.ms=5000 - client.id=${CONTEXT_NAME}-${HOSTNAME}-logback - + + + + %d{yyyy-MM-dd HH:mm:ss.SSS};${CONTEXT_NAME};${HOSTNAME};%thread;%-5level;%logger{96};%line;%msg%n + + + app-log + none + riot01.jthink.com:2181,riot02.jthink.com:2181,riot03.jthink.com:2181 + xxx@xxx.com + + + bootstrap.servers=riot01.jthink.com:9092,riot02.jthink.com:9092,riot03.jthink.com:9092 + acks=0 + linger.ms=100 + max.block.ms=5000 + client.id=${CONTEXT_NAME}-${HOSTNAME}-logback + ``` ## log4j ### 依赖 gradle或者pom中加入skyeye-client的依赖 ``` xml -compile ("skyeye:skyeye-client:0.0.1") { - exclude group: 'ch.qos.logback', module: 'logback-classic' -} +compile "skyeye:skyeye-client-log4j:1.0.0" ``` ### 配置 在log4j.xml中加入一个kafkaAppender,并在properties中配置好相关的值,如下(rpc这个项目前支持none和dubbo,所以如果项目中有dubbo服务的配置成dubbo,没有dubbo服务的配置成none,以后会支持其他的rpc框架,如:thrift、spring cloud等): ``` xml - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + ``` ## 注意点 ## logback -- 目前公司很多项目采用的是spring-boot,版本为1.3.6.RELEASE,该版本自带logback版本为1.1.7,该版本结合kafka有bug,需要降低一个版本 -- logback bug: [logback bug](http://jira.qos.ch/browse/LOGBACK-1158), 1.1.8版本会fix -- 示例: +- logback在对接kafka的时候有个bug,[jira bug](https://jira.qos.ch/browse/LOGBACK-1328),所以需要将肉root level设置为INFO(不能是DEBUG) -``` shell -compile ("org.springframework.boot:spring-boot-starter") { - exclude group: 'ch.qos.logback', module: 'logback-classic' - exclude group: 'ch.qos.logback', module: 'logback-core' -} -compile "ch.qos.logback:logback-classic:1.1.6" -compile "ch.qos.logback:logback-core:1.1.6" -``` ### log4j 由于log4j本身的appender比较复杂难写,所以在稳定性和性能上没有logback支持得好,应用能使用logback请尽量使用logback ### 中间件 如果项目中有使用到zkClient、,统一使用自己打包的版本,以防日志收集出错或者异常(PS:zk必须为3.4.6版本,尽量使用gradle进行打包部署) ### rpc trace -使用自己打包的dubbox([dubbox](https://github.com/JThink/dubbox/tree/skyeye-trace)),在soa中间件dubbox中封装了rpc的跟踪 +使用自己打包的dubbox([dubbox](https://github.com/JThink/dubbox/tree/skyeye-trace-1.0.0)),在soa中间件dubbox中封装了rpc的跟踪 ``` shell compile "com.101tec:zkclient:0.9.1-up" -compile ("com.alibaba:dubbo:2.8.4-skyeye-trace") { +compile ("com.alibaba:dubbo:2.8.4-skyeye-trace-1.0.0") { exclude group: 'org.springframework', module: 'spring' } ``` diff --git a/skyeye-client/README.md b/skyeye-client/README.md index 4a550a9..e933e5c 100644 --- a/skyeye-client/README.md +++ b/skyeye-client/README.md @@ -11,9 +11,7 @@ gradle或者pom中加入skyeye-client的依赖 ``` xml -compile ("skyeye:skyeye-client:0.0.1") { - exclude group: 'log4j', module: 'log4j' -} +compile "skyeye:skyeye-client-logback:1.0.0" ``` ### 配置 在logback.xml中加入一个kafkaAppender,并在properties中配置好相关的值,如下(rpc这个项目前支持none和dubbo,所以如果项目中有dubbo服务的配置成dubbo,没有dubbo服务的配置成none,以后会支持其他的rpc框架,如:thrift、spring cloud等): @@ -21,81 +19,79 @@ compile ("skyeye:skyeye-client:0.0.1") { ``` xml - - - - %d{yyyy-MM-dd HH:mm:ss.SSS};${CONTEXT_NAME};${HOSTNAME};%thread;%-5level;%logger{96};%line;%msg%n - - - ${kafka.topic} - none - ${zookeeper.servers} - ${mail} - - - bootstrap.servers=${kafka.bootstrap.servers} - acks=0 - linger.ms=100 - max.block.ms=5000 - client.id=${CONTEXT_NAME}-${HOSTNAME}-logback - + + + + %d{yyyy-MM-dd HH:mm:ss.SSS};${CONTEXT_NAME};${HOSTNAME};%thread;%-5level;%logger{96};%line;%msg%n + + + app-log + none + riot01.jthink.com:2181,riot02.jthink.com:2181,riot03.jthink.com:2181 + xxx@xxx.com + + + bootstrap.servers=riot01.jthink.com:9092,riot02.jthink.com:9092,riot03.jthink.com:9092 + acks=0 + linger.ms=100 + max.block.ms=5000 + client.id=${CONTEXT_NAME}-${HOSTNAME}-logback + ``` ## log4j ### 依赖 gradle或者pom中加入skyeye-client的依赖 ``` xml -compile ("skyeye:skyeye-client:0.0.1") { - exclude group: 'ch.qos.logback', module: 'logback-classic' -} +compile "skyeye:skyeye-client-log4j:1.0.0" ``` ### 配置 在log4j.xml中加入一个kafkaAppender,并在properties中配置好相关的值,如下(rpc这个项目前支持none和dubbo,所以如果项目中有dubbo服务的配置成dubbo,没有dubbo服务的配置成none,以后会支持其他的rpc框架,如:thrift、spring cloud等): ``` xml - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + ``` ## 注意点 ## logback -- 目前公司很多项目采用的是spring-boot,版本为1.3.6.RELEASE,该版本自带logback版本为1.1.7,该版本结合kafka有bug,需要降低一个版本 -- logback bug: http://jira.qos.ch/browse/LOGBACK-1158, 1.1.8版本会fix -- 示例: +- logback在对接kafka的时候有个bug,[jira bug](https://jira.qos.ch/browse/LOGBACK-1328),所以需要将肉root level设置为INFO(不能是DEBUG) -``` shell -compile ("org.springframework.boot:spring-boot-starter") { - exclude group: 'ch.qos.logback', module: 'logback-classic' - exclude group: 'ch.qos.logback', module: 'logback-core' -} -compile "ch.qos.logback:logback-classic:1.1.6" -compile "ch.qos.logback:logback-core:1.1.6" -``` ### log4j 由于log4j本身的appender比较复杂难写,所以在稳定性和性能上没有logback支持得好,应用能使用logback请尽量使用logback ### 中间件 如果项目中有使用到zkClient、,统一使用自己打包的版本,以防日志收集出错或者异常(PS:zk必须为3.4.6版本,尽量使用gradle进行打包部署) ### rpc trace -使用自己打包的dubbox(https://github.com/JThink/dubbox/tree/skyeye-trace),在soa中间件dubbox中封装了rpc的跟踪 +使用自己打包的dubbox([dubbox](https://github.com/JThink/dubbox/tree/skyeye-trace-1.0.0)),在soa中间件dubbox中封装了rpc的跟踪 ``` shell compile "com.101tec:zkclient:0.9.1-up" -compile ("com.alibaba:dubbo:2.8.4-skyeye-trace") { +compile ("com.alibaba:dubbo:2.8.4-skyeye-trace-1.0.0") { exclude group: 'org.springframework', module: 'spring' } ``` +### spring boot + +如果项目使用的是spring-boot+logback,那么需要将spring-boot对logback的初始化去掉,防止初始化的时候在zk注册两次而报错,具体见我的几篇博客就可以解决: + +http://blog.csdn.net/jthink_/article/details/52513963 + +http://blog.csdn.net/jthink_/article/details/52613953 + +http://blog.csdn.net/jthink_/article/details/73106745 + ## 应用注册中心设计 ### zookeeper注册中心节点tree ![](zknode.png) diff --git a/skyeye-collector/skyeye-collector-metrics/src/main/resources/config/collector-metrics.properties b/skyeye-collector/skyeye-collector-metrics/src/main/resources/properties/collector-metrics.properties similarity index 100% rename from skyeye-collector/skyeye-collector-metrics/src/main/resources/config/collector-metrics.properties rename to skyeye-collector/skyeye-collector-metrics/src/main/resources/properties/collector-metrics.properties diff --git a/skyeye-collector/skyeye-collector-trace/src/main/resources/config/collector-trace.properties b/skyeye-collector/skyeye-collector-trace/src/main/resources/properties/collector-trace.properties similarity index 100% rename from skyeye-collector/skyeye-collector-trace/src/main/resources/config/collector-trace.properties rename to skyeye-collector/skyeye-collector-trace/src/main/resources/properties/collector-trace.properties From aeb304d2136f904da8ef9cf0a4634f64c3a1148b Mon Sep 17 00:00:00 2001 From: JThink Date: Tue, 8 Aug 2017 15:48:14 +0800 Subject: [PATCH 27/27] add file --- .../src/main/resources/shell/create-topic.sh | 6 ++ .../src/main/resources/shell/create-index.py | 59 ++++++++++++++++++ .../src/main/resources/shell/install.sh | 6 ++ .../src/main/resources/shell/start.sh | 12 ++++ .../src/main/resources/shell/create-index.py | 60 +++++++++++++++++++ .../src/main/resources/shell/install.sh | 6 ++ .../src/main/resources/shell/start.sh | 12 ++++ .../src/main/resources/shell/hbase | 3 + 8 files changed, 164 insertions(+) create mode 100644 skyeye-collector/skyeye-collector-core/src/main/resources/shell/create-topic.sh create mode 100644 skyeye-collector/skyeye-collector-indexer/src/main/resources/shell/create-index.py create mode 100644 skyeye-collector/skyeye-collector-indexer/src/main/resources/shell/install.sh create mode 100644 skyeye-collector/skyeye-collector-indexer/src/main/resources/shell/start.sh create mode 100644 skyeye-collector/skyeye-collector-metrics/src/main/resources/shell/create-index.py create mode 100644 skyeye-collector/skyeye-collector-metrics/src/main/resources/shell/install.sh create mode 100644 skyeye-collector/skyeye-collector-metrics/src/main/resources/shell/start.sh create mode 100644 skyeye-collector/skyeye-collector-trace/src/main/resources/shell/hbase diff --git a/skyeye-collector/skyeye-collector-core/src/main/resources/shell/create-topic.sh b/skyeye-collector/skyeye-collector-core/src/main/resources/shell/create-topic.sh new file mode 100644 index 0000000..a6ae5d3 --- /dev/null +++ b/skyeye-collector/skyeye-collector-core/src/main/resources/shell/create-topic.sh @@ -0,0 +1,6 @@ +#!/bin/sh + +# zk servers, 192.168.88.70:2181,192.168.88.71:2181,192.168.88.72:2181/kafka/0.10.0.1 +urls=$1 + +bin/kafka-topics.sh --create --zookeeper ${urls} --replication-factor 3 --partitions 3 --topic app-log \ No newline at end of file diff --git a/skyeye-collector/skyeye-collector-indexer/src/main/resources/shell/create-index.py b/skyeye-collector/skyeye-collector-indexer/src/main/resources/shell/create-index.py new file mode 100644 index 0000000..5b05503 --- /dev/null +++ b/skyeye-collector/skyeye-collector-indexer/src/main/resources/shell/create-index.py @@ -0,0 +1,59 @@ +#!/usr/bin/python198 +# -*- coding: UTF-8 -*- + +import sys +import datetime +from pyelasticsearch import ElasticSearch +from pyelasticsearch import bulk_chunks + +def main(argv): + index = argv[1] + doc_type = 'log' + url = [] + urls = argv[2].strip().split(',') + for u in urls: + url.append(u) + + es = ElasticSearch(urls = url, timeout = 60, max_retries = 0) + create_mapping(es, index, doc_type) + +def create_mapping(es, index, doc_type): + mapping = { + 'settings': { + 'index': { + 'number_of_replicas': 1, + 'number_of_shards': 6, + 'refresh_interval': '5s' + } + }, + 'mappings': { + '_default_': { + '_all': { + 'enabled': False + } + }, + doc_type : { + 'properties' : { + 'day': { 'type': 'string', 'index': 'not_analyzed'}, + 'time': { 'type': 'string', 'index': 'not_analyzed'}, + 'nanoTime': { 'type': 'string', 'index': 'not_analyzed'}, + 'created': { 'type': 'date', 'index': 'not_analyzed'}, + 'app': { 'type': 'string', 'index': 'not_analyzed'}, + 'host': { 'type': 'string', 'index': 'not_analyzed'}, + 'thread': { 'type': 'string', 'index': 'not_analyzed'}, + 'level': { 'type': 'string', 'index': 'not_analyzed'}, + 'eventType': { 'type': 'string', 'index': 'not_analyzed'}, + 'pack': { 'type': 'string', 'index': 'not_analyzed'}, + 'clazz': { 'type': 'string', 'index': 'not_analyzed'}, + 'line': { 'type': 'string', 'index': 'not_analyzed'}, + 'messageSmart': { 'type': 'string', 'analyzer': 'ik_smart', 'search_analyzer': 'ik_smart', 'include_in_all': 'true', 'boost': 8}, + 'messageMax': { 'type': 'string', 'analyzer': 'ik_max_word', 'search_analyzer': 'ik_max_word', 'include_in_all': 'true', 'boost': 8} + } + } + } + } + es.create_index(index = index, settings = mapping) + + +if __name__ == '__main__': + main(sys.argv) \ No newline at end of file diff --git a/skyeye-collector/skyeye-collector-indexer/src/main/resources/shell/install.sh b/skyeye-collector/skyeye-collector-indexer/src/main/resources/shell/install.sh new file mode 100644 index 0000000..11a6e96 --- /dev/null +++ b/skyeye-collector/skyeye-collector-indexer/src/main/resources/shell/install.sh @@ -0,0 +1,6 @@ +#!/bin/bash + +echo "--> install python-pip module" +sudo apt-get install python-pip +echo "--> install elasticsearch module" +sudo pip install pyelasticsearch \ No newline at end of file diff --git a/skyeye-collector/skyeye-collector-indexer/src/main/resources/shell/start.sh b/skyeye-collector/skyeye-collector-indexer/src/main/resources/shell/start.sh new file mode 100644 index 0000000..d38d58b --- /dev/null +++ b/skyeye-collector/skyeye-collector-indexer/src/main/resources/shell/start.sh @@ -0,0 +1,12 @@ +#!/bin/bash + +# 索引 +# urls http://192.168.88.70:9200,http://192.168.88.71:9200,http://192.168.88.72:9200 +# call: bash start.sh app-log http://192.168.88.70:9200,http://192.168.88.71:9200,http://192.168.88.72:9200 + +index=$1 +urls=$2 + +echo '创建索引开始' +python create-index.py ${index} ${urls} +echo '创建索引结束' \ No newline at end of file diff --git a/skyeye-collector/skyeye-collector-metrics/src/main/resources/shell/create-index.py b/skyeye-collector/skyeye-collector-metrics/src/main/resources/shell/create-index.py new file mode 100644 index 0000000..bada3c6 --- /dev/null +++ b/skyeye-collector/skyeye-collector-metrics/src/main/resources/shell/create-index.py @@ -0,0 +1,60 @@ +#!/usr/bin/python198 +# -*- coding: UTF-8 -*- + +import sys +import datetime +from pyelasticsearch import ElasticSearch +from pyelasticsearch import bulk_chunks + +def main(argv): + index = argv[1] + doc_type = 'log' + url = [] + urls = argv[2].strip().split(',') + for u in urls: + url.append(u) + + es = ElasticSearch(urls = url, timeout = 60, max_retries = 0) + create_mapping(es, index, doc_type) + +def create_mapping(es, index, doc_type): + mapping = { + 'settings': { + 'index': { + 'number_of_replicas': 1, + 'number_of_shards': 6, + 'refresh_interval': '5s' + } + }, + 'mappings': { + '_default_': { + '_all': { + 'enabled': False + } + }, + doc_type : { + 'properties' : { + 'created': { 'type': 'date', 'index': 'not_analyzed'}, + 'time': { 'type': 'string', 'index': 'not_analyzed'}, + 'day': { 'type': 'string', 'index': 'not_analyzed'}, + 'week': { 'type': 'string', 'index': 'not_analyzed'}, + 'month': { 'type': 'string', 'index': 'not_analyzed'}, + 'year': { 'type': 'string', 'index': 'not_analyzed'}, + 'app': { 'type': 'string', 'index': 'not_analyzed'}, + 'host': { 'type': 'string', 'index': 'not_analyzed'}, + 'eventType': { 'type': 'string', 'index': 'not_analyzed'}, + 'account': { 'type': 'string', 'index': 'not_analyzed'}, + 'uniqueName': { 'type': 'string', 'index': 'not_analyzed'}, + 'cost': { 'type': 'long', 'index': 'not_analyzed'}, + 'status': { 'type': 'string', 'index': 'not_analyzed'}, + 'messageSmart': { 'type': 'string', 'analyzer': 'ik_smart', 'search_analyzer': 'ik_smart', 'include_in_all': 'true', 'boost': 8}, + 'messageMax': { 'type': 'string', 'analyzer': 'ik_max_word', 'search_analyzer': 'ik_max_word', 'include_in_all': 'true', 'boost': 8} + } + } + } + } + es.create_index(index = index, settings = mapping) + + +if __name__ == '__main__': + main(sys.argv) \ No newline at end of file diff --git a/skyeye-collector/skyeye-collector-metrics/src/main/resources/shell/install.sh b/skyeye-collector/skyeye-collector-metrics/src/main/resources/shell/install.sh new file mode 100644 index 0000000..11a6e96 --- /dev/null +++ b/skyeye-collector/skyeye-collector-metrics/src/main/resources/shell/install.sh @@ -0,0 +1,6 @@ +#!/bin/bash + +echo "--> install python-pip module" +sudo apt-get install python-pip +echo "--> install elasticsearch module" +sudo pip install pyelasticsearch \ No newline at end of file diff --git a/skyeye-collector/skyeye-collector-metrics/src/main/resources/shell/start.sh b/skyeye-collector/skyeye-collector-metrics/src/main/resources/shell/start.sh new file mode 100644 index 0000000..4ef4c2e --- /dev/null +++ b/skyeye-collector/skyeye-collector-metrics/src/main/resources/shell/start.sh @@ -0,0 +1,12 @@ +#!/bin/bash + +# 索引 +# urls http://192.168.88.70:9200,http://192.168.88.71:9200,http://192.168.88.72:9200 +# call: bash start.sh event-log http://192.168.88.70:9200,http://192.168.88.71:9200,http://192.168.88.72:9200 + +index=$1 +urls=$2 + +echo '创建索引开始' +python create-index.py ${index} ${urls} +echo '创建索引结束' \ No newline at end of file diff --git a/skyeye-collector/skyeye-collector-trace/src/main/resources/shell/hbase b/skyeye-collector/skyeye-collector-trace/src/main/resources/shell/hbase new file mode 100644 index 0000000..a75fc56 --- /dev/null +++ b/skyeye-collector/skyeye-collector-trace/src/main/resources/shell/hbase @@ -0,0 +1,3 @@ +create 'trace', {NAME => 'span', VERSIONS => 1, COMPRESSION => 'SNAPPY'}, {NUMREGIONS => 3, SPLITALGO => 'HexStringSplit'} +create 'time_consume', {NAME => 'trace', VERSIONS => 1, COMPRESSION => 'SNAPPY'}, {NUMREGIONS => 3, SPLITALGO => 'HexStringSplit'} +create 'annotation', {NAME => 'trace', VERSIONS => 1, COMPRESSION => 'SNAPPY'}, {NUMREGIONS => 3, SPLITALGO => 'HexStringSplit'}