diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 329a54799eb..c1a05085d85 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -109,3 +109,24 @@ updates: directory: /plugin/storage/cassandra schedule: interval: daily + - package-ecosystem: docker + directory: /docker-compose/elasticsearch + schedule: + interval: daily + allow: + - dependency-name: "docker.elastic.co/elasticsearch/elasticsearch" + update-types: ["version-update:semver-minor"] + - package-ecosystem: docker + directory: /docker-compose/opensearch + schedule: + interval: daily + allow: + - dependency-name: "docker.elastic.co/opensearch/opensearch" + update-types: ["version-update:semver-minor"] + - package-ecosystem: docker + directory: /docker-compose/kafka-integration-test + schedule: + interval: daily + allow: + - dependency-name: "bitnami/kafka" + update-types: ["version-update:semver-minor"] \ No newline at end of file diff --git a/.github/workflows/ci-elasticsearch.yml b/.github/workflows/ci-elasticsearch.yml index 5547cc2c694..b05c0ab9d5c 100644 --- a/.github/workflows/ci-elasticsearch.yml +++ b/.github/workflows/ci-elasticsearch.yml @@ -22,13 +22,13 @@ jobs: fail-fast: false matrix: version: - - major: 7.x + - major: 7 distribution: elasticsearch jaeger: v1 - - major: 8.x + - major: 8 distribution: elasticsearch jaeger: v1 - - major: 8.x + - major: 8 distribution: elasticsearch jaeger: v2 name: ${{ matrix.version.distribution }} ${{ matrix.version.major }} ${{ matrix.version.jaeger }} @@ -60,7 +60,7 @@ jobs: - name: Output ${{ matrix.version.distribution }} logs - run: docker logs ${{ steps.test-execution.outputs.cid }} + run: docker compose -f ${{ steps.test-execution.outputs.docker_compose_file }} logs if: ${{ failure() }} - name: Upload coverage to codecov diff --git a/.github/workflows/ci-kafka.yml b/.github/workflows/ci-kafka.yml index 042c9e04c26..a3a8619b212 100644 --- a/.github/workflows/ci-kafka.yml +++ b/.github/workflows/ci-kafka.yml @@ -31,10 +31,11 @@ jobs: go-version: 1.22.x - name: Run kafka integration tests + id: test-execution run: bash scripts/kafka-integration-test.sh -k - - name: Output Kafka logs - run: docker logs kafka + - name: Output Kafka logs on failure + run: docker compose -f ${{ steps.test-execution.outputs.docker_compose_file }} logs if: ${{ failure() }} - name: Upload coverage to codecov diff --git a/.github/workflows/ci-opensearch.yml b/.github/workflows/ci-opensearch.yml index 0547b6e91b5..c3b0747f132 100644 --- a/.github/workflows/ci-opensearch.yml +++ b/.github/workflows/ci-opensearch.yml @@ -22,13 +22,13 @@ jobs: fail-fast: false matrix: version: - - major: 1.x + - major: 1 distribution: opensearch jaeger: v1 - - major: 2.x + - major: 2 distribution: opensearch jaeger: v1 - - major: 2.x + - major: 2 distribution: opensearch jaeger: v2 name: ${{ matrix.version.distribution }} ${{ matrix.version.major }} ${{ matrix.version.jaeger }} @@ -61,7 +61,7 @@ jobs: - name: Output ${{ matrix.version.distribution }} logs - run: docker logs ${{ steps.test-execution.outputs.cid }} + run: docker compose -f ${{ steps.test-execution.outputs.docker_compose_file }} logs if: ${{ failure() }} - name: Upload coverage to codecov diff --git a/docker-compose/kafka-integration-test/v3.yml b/docker-compose/kafka-integration-test/v3.yml new file mode 100644 index 00000000000..4e96f051b2c --- /dev/null +++ b/docker-compose/kafka-integration-test/v3.yml @@ -0,0 +1,16 @@ +version: '3.8' + +services: + kafka: + image: bitnami/kafka:3.6.0 + ports: + - "9092:9092" + environment: + - KAFKA_CFG_NODE_ID=0 + - KAFKA_CFG_PROCESS_ROLES=controller,broker + - KAFKA_CFG_CONTROLLER_QUORUM_VOTERS=0@localhost:9093 + - KAFKA_CFG_LISTENERS=PLAINTEXT://:9092,CONTROLLER://:9093 + - KAFKA_CFG_ADVERTISED_LISTENERS=PLAINTEXT://localhost:9092 + - KAFKA_CFG_LISTENER_SECURITY_PROTOCOL_MAP=CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT + - KAFKA_CFG_CONTROLLER_LISTENER_NAMES=CONTROLLER + - KAFKA_CFG_INTER_BROKER_LISTENER_NAME=PLAINTEXT diff --git a/scripts/es-integration-test.sh b/scripts/es-integration-test.sh index 0539d08ea3a..3e6fee6b626 100755 --- a/scripts/es-integration-test.sh +++ b/scripts/es-integration-test.sh @@ -13,20 +13,19 @@ usage() { check_arg() { if [ ! $# -eq 3 ]; then - echo "ERROR: need exactly two arguments, " + echo "ERROR: need exactly three arguments, " usage fi } +# start the elasticsearch/opensearch container setup_db() { - local distro=$1 - local compose_file=$2 + local compose_file=$1 docker compose -f "${compose_file}" up -d - local cid - cid=$(docker compose -f "${compose_file}" ps -q "${distro}") - echo "cid=${cid}" >> "$GITHUB_OUTPUT" + echo "docker_compose_file=${compose_file}" >> "${GITHUB_OUTPUT:-/dev/null}" } +# check if the storage is up and running wait_for_storage() { local distro=$1 local url=$2 @@ -47,6 +46,7 @@ wait_for_storage() { sleep 10 done + # if after all the attempts the storage is not accessible, terminate it and exit if [[ "$(curl "${params[@]}" "${url}")" != "200" ]]; then echo "ERROR: ${distro} is not ready at ${url} after $(( attempt * 10 )) seconds" echo "::group::${distro} logs" @@ -71,7 +71,7 @@ bring_up_storage() { do echo "attempt $retry" if [ "${distro}" = "elasticsearch" ] || [ "${distro}" = "opensearch" ]; then - setup_db "${distro}" "${compose_file}" + setup_db "${compose_file}" else echo "Unknown distribution $distro. Valid options are opensearch or elasticsearch" usage @@ -82,7 +82,7 @@ bring_up_storage() { fi done if [ ${db_is_up} = "1" ]; then - # shellcheck disable=SC2064 + # shellcheck disable=SC2064 trap "teardown_storage ${compose_file}" EXIT else echo "ERROR: unable to start ${distro}" @@ -90,6 +90,7 @@ bring_up_storage() { fi } +# terminate the elasticsearch/opensearch container teardown_storage() { local compose_file=$1 docker compose -f "${compose_file}" down @@ -112,4 +113,4 @@ main() { fi } -main "$@" \ No newline at end of file +main "$@" diff --git a/scripts/kafka-integration-test.sh b/scripts/kafka-integration-test.sh index 5c000338ae5..43f7a47f16f 100644 --- a/scripts/kafka-integration-test.sh +++ b/scripts/kafka-integration-test.sh @@ -3,51 +3,42 @@ set -e export STORAGE=kafka +compose_file="docker-compose/kafka-integration-test/v3.yml" -# Function to start Kafka -start_kafka() { - echo "Starting Kafka..." - - docker run --name kafka -d \ - -p 9092:9092 \ - -e KAFKA_CFG_NODE_ID=0 \ - -e KAFKA_CFG_PROCESS_ROLES=controller,broker \ - -e KAFKA_CFG_CONTROLLER_QUORUM_VOTERS=0@localhost:9093 \ - -e KAFKA_CFG_LISTENERS=PLAINTEXT://:9092,CONTROLLER://:9093 \ - -e KAFKA_CFG_ADVERTISED_LISTENERS=PLAINTEXT://localhost:9092 \ - -e KAFKA_CFG_LISTENER_SECURITY_PROTOCOL_MAP=CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT \ - -e KAFKA_CFG_CONTROLLER_LISTENER_NAMES=CONTROLLER \ - -e KAFKA_CFG_INTER_BROKER_LISTENER_NAME=PLAINTEXT \ - bitnami/kafka:3.6.0 -} - -# Check if the -k parameter is provided or not +# Check if the -k parameter is provided and start Kafka if it was if [ "$1" == "-k" ]; then - start_kafka + echo "Starting Kafka using Docker Compose..." + docker compose -f "${compose_file}" up -d kafka + echo "docker_compose_file=${compose_file}" >> "${GITHUB_OUTPUT:-/dev/null}" fi +# Check if Kafka is ready by attempting to list topics +is_kafka_ready() { + docker compose -f "${compose_file}" \ + exec kafka /opt/bitnami/kafka/bin/kafka-topics.sh \ + --list \ + --bootstrap-server localhost:9092 \ + >/dev/null 2>&1 +} + # Set the timeout in seconds timeout=180 # Set the interval between checks in seconds interval=5 - # Calculate the end time end_time=$((SECONDS + timeout)) while [ $SECONDS -lt $end_time ]; do - # Check if Kafka is ready by attempting to describe a topic - if docker exec kafka /opt/bitnami/kafka/bin/kafka-topics.sh --list --bootstrap-server localhost:9092 >/dev/null 2>&1; then + if is_kafka_ready; then break fi echo "Kafka broker not ready, waiting ${interval} seconds" sleep $interval done -# Check if Kafka is still not available after the timeout -if ! docker exec kafka /opt/bitnami/kafka/bin/kafka-topics.sh --list --bootstrap-server localhost:9092 >/dev/null 2>&1; then +if ! is_kafka_ready; then echo "Timed out waiting for Kafka to start" exit 1 fi -# Continue with the integration tests make storage-integration-test