Skip to content

Commit

Permalink
modified task descriptions for clarity. Upgraded dependencies includi…
Browse files Browse the repository at this point in the history
…ng gradle-analytics. Using docker-compose now for integration testing
  • Loading branch information
stewartbryson committed Feb 7, 2019
1 parent f42e029 commit eac29c0
Show file tree
Hide file tree
Showing 7 changed files with 160 additions and 35 deletions.
1 change: 0 additions & 1 deletion Jenkinsfile
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@ pipeline {

environment {
GOOGLE_APPLICATION_CREDENTIALS = '/var/lib/jenkins/.gcp/gradle-analytics-build-user.json'
JENKINS_NODE_COOKIE = 'dontKillMe'
}

stages {
Expand Down
19 changes: 6 additions & 13 deletions build.gradle
Original file line number Diff line number Diff line change
@@ -1,22 +1,13 @@
// added because the github-release plugin used implementation instead of api/compile for plugin dependencies
// An issue exists for this, so hopefully it will be corrected soon
buildscript {
dependencies {
classpath 'com.squareup.okhttp3:okhttp:3.12.0'
classpath 'com.j256.simplemagic:simplemagic:1.10'
classpath 'org.zeroturnaround:zt-exec:1.10'
}
}

plugins {
id 'groovy'
id 'java-gradle-plugin'
id "com.gradle.plugin-publish" version "0.10.0"
id "com.gradle.plugin-publish" version "0.10.1"
id "pl.allegro.tech.build.axion-release" version "1.10.0"
id "com.github.breadmoirai.github-release" version "2.2.3"
id "com.github.breadmoirai.github-release" version "2.2.4"
id 'org.unbroken-dome.test-sets' version '2.0.3'
id "com.avast.gradle.docker-compose" version "0.8.13"
id "com.github.ben-manes.versions" version "0.20.0"
id "com.redpillanalytics.gradle-analytics" version "1.1.19"
id "com.redpillanalytics.gradle-analytics" version "1.1.21"
}

// send analytics
Expand Down Expand Up @@ -151,6 +142,8 @@ tasks.publishPlugins.dependsOn publishDocs
task runAllTests {
description 'Run all defined tests.'
group 'verification'
dependsOn composeUp
finalizedBy composeDown
}

tasks.withType(Test) {
Expand Down
146 changes: 146 additions & 0 deletions docker-compose.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,146 @@
---
version: '2'
services:
zookeeper:
container_name: zookeeper
image: confluentinc/cp-zookeeper:5.1.0
environment:
ZOOKEEPER_CLIENT_PORT: 2181
ZOOKEEPER_TICK_TIME: 2000

kafka:
container_name: kafka
image: confluentinc/cp-enterprise-kafka:5.1.0
depends_on:
- zookeeper
#ports:
# This would expose 9092 for external connections to the broker
# Use kafka:29092 for connections internal on the docker network
# See https://rmoff.net/2018/08/02/kafka-listeners-explained/ for details
# - 9092:9092
environment:
KAFKA_BROKER_ID: 1
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:29092,PLAINTEXT_HOST://localhost:9092
KAFKA_AUTO_CREATE_TOPICS_ENABLE: "true"
KAFKA_METRIC_REPORTERS: io.confluent.metrics.reporter.ConfluentMetricsReporter
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 100
CONFLUENT_METRICS_REPORTER_BOOTSTRAP_SERVERS: kafka:29092
CONFLUENT_METRICS_REPORTER_ZOOKEEPER_CONNECT: zookeeper:2181
CONFLUENT_METRICS_REPORTER_TOPIC_REPLICAS: 1
CONFLUENT_METRICS_ENABLE: 'true'
CONFLUENT_SUPPORT_CUSTOMER_ID: 'anonymous'

schema-registry:
container_name: schema-registry
image: confluentinc/cp-schema-registry:5.1.0
depends_on:
- zookeeper
- kafka
environment:
SCHEMA_REGISTRY_HOST_NAME: schema-registry
SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: zookeeper:2181

kafka-connect:
container_name: kafka-connect
image: confluentinc/cp-kafka-connect:5.1.0
ports:
- 8083:8083
depends_on:
- zookeeper
- kafka
- schema-registry
environment:
CONNECT_BOOTSTRAP_SERVERS: "kafka:29092"
CONNECT_REST_PORT: 8083
CONNECT_GROUP_ID: compose-connect-group
CONNECT_CONFIG_STORAGE_TOPIC: docker-connect-configs
CONNECT_OFFSET_STORAGE_TOPIC: docker-connect-offsets
CONNECT_STATUS_STORAGE_TOPIC: docker-connect-status
CONNECT_KEY_CONVERTER: io.confluent.connect.avro.AvroConverter
CONNECT_KEY_CONVERTER_SCHEMA_REGISTRY_URL: 'http://schema-registry:8081'
CONNECT_VALUE_CONVERTER: io.confluent.connect.avro.AvroConverter
CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL: 'http://schema-registry:8081'
CONNECT_INTERNAL_KEY_CONVERTER: "org.apache.kafka.connect.json.JsonConverter"
CONNECT_INTERNAL_VALUE_CONVERTER: "org.apache.kafka.connect.json.JsonConverter"
CONNECT_REST_ADVERTISED_HOST_NAME: "kafka-connect"
CONNECT_LOG4J_ROOT_LOGLEVEL: "INFO"
CONNECT_LOG4J_LOGGERS: "org.apache.kafka.connect.runtime.rest=WARN,org.reflections=ERROR"
CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR: "1"
CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR: "1"
CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: "1"
CONNECT_PLUGIN_PATH: '/usr/share/java'
CONNECT_HOST: kafka-connect
volumes:
- $PWD/ksql/ksql-clickstream-demo/demo/:/scripts
- $PWD/ksql/ksql-clickstream-demo/demo/connect-config:/usr/share/java/null-smt

ksql-server:
container_name: ksql-server
image: confluentinc/cp-ksql-server:5.1.0
ports:
- 8088:8088
depends_on:
- kafka
- schema-registry
environment:
KSQL_CUB_KAFKA_TIMEOUT: 300
KSQL_BOOTSTRAP_SERVERS: kafka:29092
KSQL_LISTENERS: http://0.0.0.0:8088
KSQL_KSQL_SCHEMA_REGISTRY_URL: http://schema-registry:8081
KSQL_KSQL_SERVICE_ID: confluent_rmoff_01

ksql-cli:
container_name: ksql-cli
image: confluentinc/cp-ksql-cli:5.1.0
depends_on:
- ksql-server
entrypoint: /bin/sh
tty: true
volumes:
- $PWD/ksql/ksql-clickstream-demo/demo:/usr/share/doc/clickstream

kafkacat:
container_name: kafkacat
image: confluentinc/cp-kafkacat:latest
depends_on:
- kafka
entrypoint: /bin/sh
tty: true

# Runs the Kafka KSQL data generator
datagen:
container_name: datagen
image: confluentinc/ksql-examples:5.0.1
depends_on:
- kafka
- schema-registry
command: "bash -c 'echo Waiting for Kafka to be ready... && \
cub kafka-ready -b kafka:29092 1 300 && \
echo Waiting for Confluent Schema Registry to be ready... && \
cub sr-ready schema-registry 8081 300 && \
echo Waiting a few seconds for topic creation to finish... && \
sleep 20 && \
ksql-datagen \
bootstrap-server=kafka:29092 \
quickstart=clickstream_codes \
format=json \
topic=clickstream_codes \
maxInterval=20 \
iterations=100 && \
ksql-datagen \
bootstrap-server=kafka:29092 \
quickstart=clickstream_users \
format=json \
topic=clickstream_users \
maxInterval=10 \
iterations=1000 && \
ksql-datagen \
quickstart=clickstream \
format=json \
topic=clickstream \
maxInterval=100 \
bootstrap-server=kafka:29092'"
3 changes: 1 addition & 2 deletions gradle/wrapper/gradle-wrapper.properties
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
#Fri Jan 18 10:16:40 EST 2019
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-5.2-bin.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-5.1.1-all.zip
8 changes: 1 addition & 7 deletions settings.gradle
Original file line number Diff line number Diff line change
@@ -1,9 +1,3 @@
rootProject.name = 'gradle-confluent'

if (file('../gradle-analytics').exists()) {
includeBuild('../gradle-analytics') {
dependencySubstitution {
substitute module('gradle.plugin.com.redpillanalytics:gradle-analytics') with project(':')
}
}
includeBuild('../gradle-analytics')
}
Original file line number Diff line number Diff line change
Expand Up @@ -43,35 +43,32 @@ class PipelineExecuteTask extends PipelineTask {
boolean fromBeginning = false

/**
* When defined, required TERMINATE statements are not auto-generated and executed for all currently running queries.
* When defined, applicable TERMINATE statements are not auto-generated and executed.
*/
@Input
@Option(option = 'no-terminate',
description = 'When defined, required TERMINATE statements are not auto-generated and executed for all currently running queries.'
description = 'When defined, applicable TERMINATE statements are not auto-generated and executed.'
)
boolean noTerminate

/**
* When defined, applicable DROP statements are not auto-generated and executed for all existing tables and streams.
* When defined, applicable DROP statements are not auto-generated and executed.
*/
@Input
@Option(option = 'no-drop',
description = 'When defined, applicable DROP statements are not auto-generated and executed for all existing tables and streams.'
description = 'When defined, applicable DROP statements are not auto-generated and executed.'
)
boolean noDrop

/**
* When defined, CREATE TABLE or STREAM statements found in SQL scripts are not executed. Used primarily for auto-generating and executing associated DROP and/or TERMINATE statements.
* When defined, CREATE statements found in SQL scripts are not executed. Used primarily for auto-generating and executing applicable DROP and/or TERMINATE statements.
*/
@Input
@Option(option = 'no-create',
description = 'When defined, CREATE TABLE or STREAM statements found in SQL scripts are not executed. Used primarily for auto-generating and executing associated DROP and/or TERMINATE statements.'
description = 'When defined, CREATE statements in SQL scripts are not executed. Used primarily for auto-generating and executing applicable DROP and/or TERMINATE statements.'
)
boolean noCreate

/**
* The main Gradle Task method.
*/
@TaskAction
def executePipelines() {

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,6 @@ class PipelineScriptTask extends PipelineTask {
return createScript
}

/**
* The main Gradle Task method.
*/
@TaskAction
def pipelineScript() {
createScript()
Expand Down

0 comments on commit eac29c0

Please sign in to comment.