From 40337c52fbf578c8a7f31d5efc2cd3c797b472b5 Mon Sep 17 00:00:00 2001 From: Eric Fu Date: Mon, 13 Mar 2023 06:55:45 +0000 Subject: [PATCH 01/12] Migrate demo project here as integration tests --- integration_test/README.md | 46 + integration_test/ad-click/create_mv.sql | 13 + integration_test/ad-click/create_source.sql | 13 + integration_test/ad-click/data_check | 1 + integration_test/ad-click/docker-compose.yml | 60 ++ integration_test/ad-click/query.sql | 6 + integration_test/ad-ctr/create_mv.sql | 64 ++ integration_test/ad-ctr/create_source.sql | 20 + integration_test/ad-ctr/data_check | 1 + integration_test/ad-ctr/docker-compose.yml | 60 ++ integration_test/ad-ctr/query.sql | 6 + integration_test/cdn-metrics/create_mv.sql | 79 ++ .../cdn-metrics/create_source.sql | 26 + integration_test/cdn-metrics/data_check | 1 + .../cdn-metrics/docker-compose.yml | 60 ++ integration_test/cdn-metrics/query.sql | 8 + integration_test/clickstream/create_mv.sql | 34 + .../clickstream/create_source.sql | 14 + integration_test/clickstream/data_check | 1 + .../clickstream/docker-compose.yml | 60 ++ integration_test/clickstream/query.sql | 19 + integration_test/datagen/.gitignore | 1 + integration_test/datagen/.goreleaser.yaml | 21 + integration_test/datagen/Dockerfile | 8 + integration_test/datagen/ad_click/ad_click.go | 58 ++ integration_test/datagen/ad_ctr/ad_ctr.go | 111 +++ .../datagen/cdn_metrics/cdn_metrics.go | 32 + integration_test/datagen/cdn_metrics/nics.go | 108 +++ integration_test/datagen/cdn_metrics/tcp.go | 88 ++ .../datagen/clickstream/clickstream.go | 153 ++++ integration_test/datagen/delivery/delivery.go | 73 ++ .../datagen/ecommerce/ecommerce.go | 139 +++ integration_test/datagen/gen/generator.go | 96 ++ integration_test/datagen/go.mod | 78 ++ integration_test/datagen/go.sum | 835 ++++++++++++++++++ .../datagen/livestream/livestream.go | 147 +++ .../datagen/livestream/proto/livestream.pb.go | 256 ++++++ integration_test/datagen/load_gen.go | 133 +++ integration_test/datagen/main.go | 214 +++++ integration_test/datagen/nexmark/auction.go | 67 ++ integration_test/datagen/sink/kafka/kafka.go | 139 +++ .../datagen/sink/kinesis/kinesis.go | 52 ++ integration_test/datagen/sink/mysql/mysql.go | 51 ++ .../datagen/sink/postgres/postgres.go | 47 + .../datagen/sink/pulsar/pulsar.go | 60 ++ integration_test/datagen/sink/sink.go | 63 ++ integration_test/datagen/twitter/avro.go | 45 + .../datagen/twitter/proto/twitter.pb.go | 347 ++++++++ integration_test/datagen/twitter/twitter.go | 173 ++++ .../datagen/twitter/twitter_example.json | 14 + integration_test/delivery/delivery.sql | 25 + integration_test/delivery/docker-compose.yml | 60 ++ integration_test/ecommerce/ecommerce.sql | 60 ++ integration_test/iceberg-sink/README.md | 36 + integration_test/iceberg-sink/create_mv.sql | 7 + integration_test/iceberg-sink/create_sink.sql | 10 + .../iceberg-sink/create_source.sql | 19 + .../iceberg-sink/docker-compose.yml | 98 ++ .../iceberg-sink/iceberg-query.sql | 1 + .../iceberg-sink/mysql_prepare.sql | 15 + .../presto-with-iceberg/Dockerfile | 5 + .../presto-with-iceberg/hadoop-catalog.xml | 22 + .../presto-with-iceberg/iceberg.properties | 6 + .../presto-with-iceberg/log.properties | 2 + .../iceberg-sink/spark-script/.gitignore | 3 + .../spark-script/create-table.sql | 11 + .../iceberg-sink/spark-script/query-table.sql | 1 + .../iceberg-sink/spark-script/run-sql-file.sh | 13 + integration_test/livestream/create_mv.sql | 69 ++ integration_test/livestream/create_source.sql | 26 + integration_test/livestream/data_check | 1 + .../livestream/docker-compose.yml | 62 ++ integration_test/livestream/livestream.proto | 19 + integration_test/livestream/pb/create_mv.sql | 62 ++ .../livestream/pb/create_source.sql | 6 + integration_test/livestream/query.sql | 19 + integration_test/livestream/schema | 18 + integration_test/mysql-cdc/create_mv.sql | 8 + integration_test/mysql-cdc/create_source.sql | 18 + integration_test/mysql-cdc/data_check | 1 + integration_test/mysql-cdc/docker-compose.yml | 78 ++ integration_test/mysql-cdc/mysql_prepare.sql | 28 + integration_test/mysql-cdc/query.sql | 6 + integration_test/mysql-sink/create_mv.sql | 16 + integration_test/mysql-sink/create_source.sql | 14 + integration_test/mysql-sink/data_check | 1 + .../mysql-sink/docker-compose.yml | 91 ++ integration_test/mysql-sink/mysql_prepare.sql | 4 + integration_test/mysql-sink/query.sql | 6 + integration_test/postgres-cdc/create_mv.sql | 28 + .../postgres-cdc/create_source.sql | 41 + integration_test/postgres-cdc/data_check | 1 + .../postgres-cdc/docker-compose.yml | 94 ++ .../postgres-cdc/postgres_prepare.sql | 112 +++ integration_test/postgres-cdc/query.sql | 6 + integration_test/postgres-sink/README.md | 16 + integration_test/postgres-sink/create_mv.sql | 16 + .../postgres-sink/create_source.sql | 14 + integration_test/postgres-sink/data_check | 1 + .../postgres-sink/docker-compose.yml | 94 ++ .../postgres-sink/postgres_prepare.sql | 4 + integration_test/postgres-sink/query.sql | 6 + integration_test/prometheus/create_mv.sql | 16 + integration_test/prometheus/create_source.sql | 13 + integration_test/prometheus/create_user.sql | 6 + integration_test/prometheus/data_check | 1 + .../prometheus/docker-compose.yml | 94 ++ integration_test/prometheus/prometheus.yaml | 37 + integration_test/prometheus/query.sql | 8 + .../schema-registry/create_mv.sql | 10 + .../schema-registry/create_source.sql | 8 + integration_test/schema-registry/data_check | 1 + integration_test/schema-registry/datagen.py | 144 +++ .../schema-registry/docker-compose.yml | 66 ++ integration_test/schema-registry/query.sql | 6 + integration_test/schema-registry/readme.md | 96 ++ integration_test/superset/create_mv.sql | 13 + integration_test/superset/create_source.sql | 26 + integration_test/superset/docker-compose.yml | 135 +++ integration_test/superset/docker/.env-non-dev | 46 + .../superset/docker/docker-bootstrap.sh | 51 ++ .../superset/docker/docker-init.sh | 79 ++ .../superset/docker/pythonpath_dev/.gitignore | 23 + .../docker/pythonpath_dev/superset_config.py | 124 +++ .../superset/docker/requirements-local.txt | 1 + .../superset/docker/run-server.sh | 33 + integration_test/superset/query.sql | 6 + integration_test/twitter-pulsar/create_mv.sql | 34 + .../twitter-pulsar/create_source.sql | 19 + .../twitter-pulsar/docker-compose.yml | 67 ++ integration_test/twitter-pulsar/query.sql | 8 + integration_test/twitter/avro.json | 27 + integration_test/twitter/avro/create_mv.sql | 21 + .../twitter/avro/create_source.sql | 6 + integration_test/twitter/create_mv.sql | 21 + integration_test/twitter/create_source.sql | 19 + integration_test/twitter/data_check | 1 + integration_test/twitter/docker-compose.yml | 60 ++ integration_test/twitter/pb/create_mv.sql | 21 + integration_test/twitter/pb/create_source.sql | 6 + integration_test/twitter/query.sql | 8 + integration_test/twitter/schema | 19 + integration_test/twitter/twitter.proto | 24 + 143 files changed, 6985 insertions(+) create mode 100644 integration_test/README.md create mode 100644 integration_test/ad-click/create_mv.sql create mode 100644 integration_test/ad-click/create_source.sql create mode 100644 integration_test/ad-click/data_check create mode 100644 integration_test/ad-click/docker-compose.yml create mode 100644 integration_test/ad-click/query.sql create mode 100644 integration_test/ad-ctr/create_mv.sql create mode 100644 integration_test/ad-ctr/create_source.sql create mode 100644 integration_test/ad-ctr/data_check create mode 100644 integration_test/ad-ctr/docker-compose.yml create mode 100644 integration_test/ad-ctr/query.sql create mode 100644 integration_test/cdn-metrics/create_mv.sql create mode 100644 integration_test/cdn-metrics/create_source.sql create mode 100644 integration_test/cdn-metrics/data_check create mode 100644 integration_test/cdn-metrics/docker-compose.yml create mode 100644 integration_test/cdn-metrics/query.sql create mode 100644 integration_test/clickstream/create_mv.sql create mode 100644 integration_test/clickstream/create_source.sql create mode 100644 integration_test/clickstream/data_check create mode 100644 integration_test/clickstream/docker-compose.yml create mode 100644 integration_test/clickstream/query.sql create mode 100644 integration_test/datagen/.gitignore create mode 100644 integration_test/datagen/.goreleaser.yaml create mode 100644 integration_test/datagen/Dockerfile create mode 100644 integration_test/datagen/ad_click/ad_click.go create mode 100644 integration_test/datagen/ad_ctr/ad_ctr.go create mode 100644 integration_test/datagen/cdn_metrics/cdn_metrics.go create mode 100644 integration_test/datagen/cdn_metrics/nics.go create mode 100644 integration_test/datagen/cdn_metrics/tcp.go create mode 100644 integration_test/datagen/clickstream/clickstream.go create mode 100644 integration_test/datagen/delivery/delivery.go create mode 100644 integration_test/datagen/ecommerce/ecommerce.go create mode 100644 integration_test/datagen/gen/generator.go create mode 100644 integration_test/datagen/go.mod create mode 100644 integration_test/datagen/go.sum create mode 100644 integration_test/datagen/livestream/livestream.go create mode 100644 integration_test/datagen/livestream/proto/livestream.pb.go create mode 100644 integration_test/datagen/load_gen.go create mode 100644 integration_test/datagen/main.go create mode 100644 integration_test/datagen/nexmark/auction.go create mode 100644 integration_test/datagen/sink/kafka/kafka.go create mode 100644 integration_test/datagen/sink/kinesis/kinesis.go create mode 100644 integration_test/datagen/sink/mysql/mysql.go create mode 100644 integration_test/datagen/sink/postgres/postgres.go create mode 100644 integration_test/datagen/sink/pulsar/pulsar.go create mode 100644 integration_test/datagen/sink/sink.go create mode 100644 integration_test/datagen/twitter/avro.go create mode 100644 integration_test/datagen/twitter/proto/twitter.pb.go create mode 100644 integration_test/datagen/twitter/twitter.go create mode 100644 integration_test/datagen/twitter/twitter_example.json create mode 100644 integration_test/delivery/delivery.sql create mode 100644 integration_test/delivery/docker-compose.yml create mode 100644 integration_test/ecommerce/ecommerce.sql create mode 100644 integration_test/iceberg-sink/README.md create mode 100644 integration_test/iceberg-sink/create_mv.sql create mode 100644 integration_test/iceberg-sink/create_sink.sql create mode 100644 integration_test/iceberg-sink/create_source.sql create mode 100644 integration_test/iceberg-sink/docker-compose.yml create mode 100644 integration_test/iceberg-sink/iceberg-query.sql create mode 100644 integration_test/iceberg-sink/mysql_prepare.sql create mode 100644 integration_test/iceberg-sink/presto-with-iceberg/Dockerfile create mode 100644 integration_test/iceberg-sink/presto-with-iceberg/hadoop-catalog.xml create mode 100644 integration_test/iceberg-sink/presto-with-iceberg/iceberg.properties create mode 100644 integration_test/iceberg-sink/presto-with-iceberg/log.properties create mode 100644 integration_test/iceberg-sink/spark-script/.gitignore create mode 100644 integration_test/iceberg-sink/spark-script/create-table.sql create mode 100644 integration_test/iceberg-sink/spark-script/query-table.sql create mode 100644 integration_test/iceberg-sink/spark-script/run-sql-file.sh create mode 100644 integration_test/livestream/create_mv.sql create mode 100644 integration_test/livestream/create_source.sql create mode 100644 integration_test/livestream/data_check create mode 100644 integration_test/livestream/docker-compose.yml create mode 100644 integration_test/livestream/livestream.proto create mode 100644 integration_test/livestream/pb/create_mv.sql create mode 100644 integration_test/livestream/pb/create_source.sql create mode 100644 integration_test/livestream/query.sql create mode 100644 integration_test/livestream/schema create mode 100644 integration_test/mysql-cdc/create_mv.sql create mode 100644 integration_test/mysql-cdc/create_source.sql create mode 100644 integration_test/mysql-cdc/data_check create mode 100644 integration_test/mysql-cdc/docker-compose.yml create mode 100644 integration_test/mysql-cdc/mysql_prepare.sql create mode 100644 integration_test/mysql-cdc/query.sql create mode 100644 integration_test/mysql-sink/create_mv.sql create mode 100644 integration_test/mysql-sink/create_source.sql create mode 100644 integration_test/mysql-sink/data_check create mode 100644 integration_test/mysql-sink/docker-compose.yml create mode 100644 integration_test/mysql-sink/mysql_prepare.sql create mode 100644 integration_test/mysql-sink/query.sql create mode 100644 integration_test/postgres-cdc/create_mv.sql create mode 100644 integration_test/postgres-cdc/create_source.sql create mode 100644 integration_test/postgres-cdc/data_check create mode 100644 integration_test/postgres-cdc/docker-compose.yml create mode 100644 integration_test/postgres-cdc/postgres_prepare.sql create mode 100644 integration_test/postgres-cdc/query.sql create mode 100644 integration_test/postgres-sink/README.md create mode 100644 integration_test/postgres-sink/create_mv.sql create mode 100644 integration_test/postgres-sink/create_source.sql create mode 100644 integration_test/postgres-sink/data_check create mode 100644 integration_test/postgres-sink/docker-compose.yml create mode 100644 integration_test/postgres-sink/postgres_prepare.sql create mode 100644 integration_test/postgres-sink/query.sql create mode 100644 integration_test/prometheus/create_mv.sql create mode 100644 integration_test/prometheus/create_source.sql create mode 100644 integration_test/prometheus/create_user.sql create mode 100644 integration_test/prometheus/data_check create mode 100644 integration_test/prometheus/docker-compose.yml create mode 100644 integration_test/prometheus/prometheus.yaml create mode 100644 integration_test/prometheus/query.sql create mode 100644 integration_test/schema-registry/create_mv.sql create mode 100644 integration_test/schema-registry/create_source.sql create mode 100644 integration_test/schema-registry/data_check create mode 100644 integration_test/schema-registry/datagen.py create mode 100644 integration_test/schema-registry/docker-compose.yml create mode 100644 integration_test/schema-registry/query.sql create mode 100644 integration_test/schema-registry/readme.md create mode 100644 integration_test/superset/create_mv.sql create mode 100644 integration_test/superset/create_source.sql create mode 100644 integration_test/superset/docker-compose.yml create mode 100644 integration_test/superset/docker/.env-non-dev create mode 100755 integration_test/superset/docker/docker-bootstrap.sh create mode 100755 integration_test/superset/docker/docker-init.sh create mode 100644 integration_test/superset/docker/pythonpath_dev/.gitignore create mode 100644 integration_test/superset/docker/pythonpath_dev/superset_config.py create mode 100644 integration_test/superset/docker/requirements-local.txt create mode 100644 integration_test/superset/docker/run-server.sh create mode 100644 integration_test/superset/query.sql create mode 100644 integration_test/twitter-pulsar/create_mv.sql create mode 100644 integration_test/twitter-pulsar/create_source.sql create mode 100644 integration_test/twitter-pulsar/docker-compose.yml create mode 100644 integration_test/twitter-pulsar/query.sql create mode 100644 integration_test/twitter/avro.json create mode 100644 integration_test/twitter/avro/create_mv.sql create mode 100644 integration_test/twitter/avro/create_source.sql create mode 100644 integration_test/twitter/create_mv.sql create mode 100644 integration_test/twitter/create_source.sql create mode 100644 integration_test/twitter/data_check create mode 100644 integration_test/twitter/docker-compose.yml create mode 100644 integration_test/twitter/pb/create_mv.sql create mode 100644 integration_test/twitter/pb/create_source.sql create mode 100644 integration_test/twitter/query.sql create mode 100644 integration_test/twitter/schema create mode 100644 integration_test/twitter/twitter.proto diff --git a/integration_test/README.md b/integration_test/README.md new file mode 100644 index 0000000000000..0e09cf6f0dba7 --- /dev/null +++ b/integration_test/README.md @@ -0,0 +1,46 @@ +# RisingWave Demos + +Here is a gallery of demos that present how to use RisingWave alongwith the ecosystem tools. + +- `ad-click/`: [Build and Maintain Real-time Applications Faster and Easier with Redpanda and RisingWave](https://singularity-data.com/blog/build-with-Redpanda-and-RisingWave) +- `ad-ctr`: [Perform real-time ad performance analysis](https://www.risingwave.dev/docs/latest/real-time-ad-performance-analysis/) +- `cdn-metrics`: [Server performance anomaly detection](https://www.risingwave.dev/docs/latest/server-performance-anomaly-detection/) +- `clickstream`: [Clickstream analysis](https://www.risingwave.dev/docs/latest/clickstream-analysis/) +- `twitter`: [Fast Twitter events processing](https://www.risingwave.dev/docs/latest/fast-twitter-events-processing/) +- `twitter-pulsar`: [Tutorial: Pulsar + RisingWave for Fast Twitter Event Processing](https://www.risingwave.com/blog/tutorial-pulsar-risingwave-for-fast-twitter-events-processing/) +- `live-stream`: [Live stream metrics analysis](https://www.risingwave.dev/docs/latest/live-stream-metrics-analysis/) + +## Demo Runnability Testing + +The demos listed above will all run through a series of tests when each PR is merged, including: + +- Run the queries mentioned in the demos. +- Ingest the data in various formats, including Protobuf, Avro, and JSON. Each format will be tested individually. +- For each demo test, we check if the sources and MVs have successfully ingested data, meaning that they should have >0 records. + +## Workload Generator + +The workloads presented in the demos are produced by a golang program in `/datagen`. You can get this tool in multiple ways: + +- Download pre-built binaries from [Releases](https://github.com/risingwavelabs/risingwave-demo/releases) +- Pull the latest docker image via `docker pull ghcr.io/risingwavelabs/demo-datagen:v1.0.9`. +- Build the binary from source: + ```sh + cd datagen && go build + ``` + +To use this tool, you can run the following command: + +```sh +./datagen --mode clickstream --qps 10 kafka --brokers 127.0.0.1:57801 +``` + +or + +```sh +./datagen --mode ecommerce --qps 10000000 postgres --port 6875 --user materialize --db materialize +``` + +- `--mode clickstream` indicates that it will produce random clickstream data. +- `--qps 10` sets a QPS limit to 10. +- `kafka | postgres` chooses the destination. For kafka, you will need to specify the brokers. diff --git a/integration_test/ad-click/create_mv.sql b/integration_test/ad-click/create_mv.sql new file mode 100644 index 0000000000000..828cb6dcff16e --- /dev/null +++ b/integration_test/ad-click/create_mv.sql @@ -0,0 +1,13 @@ +-- The number of clicks on the ad within one minute after the ad was shown. +create materialized view m_click_statistic as +select + count(user_id) as clicks_count, + ad_id +from + ad_source +where + click_timestamp is not null + and impression_timestamp < click_timestamp + and impression_timestamp + interval '1' minute >= click_timestamp +group by + ad_id; \ No newline at end of file diff --git a/integration_test/ad-click/create_source.sql b/integration_test/ad-click/create_source.sql new file mode 100644 index 0000000000000..532f980ad52ed --- /dev/null +++ b/integration_test/ad-click/create_source.sql @@ -0,0 +1,13 @@ +-- impression_timestamp: The time when the ad was shown. +-- click_timestamp: The time when the ad was clicked. +create source ad_source ( + user_id bigint, + ad_id bigint, + click_timestamp timestamptz, + impression_timestamp timestamptz +) with ( + connector = 'kafka', + topic = 'ad_clicks', + properties.bootstrap.server = 'message_queue:29092', + scan.startup.mode = 'earliest' +) row format json; \ No newline at end of file diff --git a/integration_test/ad-click/data_check b/integration_test/ad-click/data_check new file mode 100644 index 0000000000000..77a90db334156 --- /dev/null +++ b/integration_test/ad-click/data_check @@ -0,0 +1 @@ +ad_source,m_click_statistic \ No newline at end of file diff --git a/integration_test/ad-click/docker-compose.yml b/integration_test/ad-click/docker-compose.yml new file mode 100644 index 0000000000000..a6cd2f4fe1433 --- /dev/null +++ b/integration_test/ad-click/docker-compose.yml @@ -0,0 +1,60 @@ +--- +version: "3" +services: + compactor-0: + extends: + file: ../../docker/docker-compose.yml + service: compactor-0 + compute-node-0: + extends: + file: ../../docker/docker-compose.yml + service: compute-node-0 + etcd-0: + extends: + file: ../../docker/docker-compose.yml + service: etcd-0 + frontend-node-0: + extends: + file: ../../docker/docker-compose.yml + service: frontend-node-0 + grafana-0: + extends: + file: ../../docker/docker-compose.yml + service: grafana-0 + meta-node-0: + extends: + file: ../../docker/docker-compose.yml + service: meta-node-0 + minio-0: + extends: + file: ../../docker/docker-compose.yml + service: minio-0 + prometheus-0: + extends: + file: ../../docker/docker-compose.yml + service: prometheus-0 + message_queue: + extends: + file: ../../docker/docker-compose.yml + service: message_queue + datagen: + image: ghcr.io/risingwavelabs/demo-datagen:v1.0.9 + depends_on: [message_queue] + command: + - /bin/sh + - -c + - /datagen --mode ad-click --qps 2 kafka --brokers message_queue:29092 + restart: always + container_name: datagen +volumes: + compute-node-0: + external: false + etcd-0: + external: false + grafana-0: + external: false + minio-0: + external: false + prometheus-0: + external: false +name: risingwave-compose diff --git a/integration_test/ad-click/query.sql b/integration_test/ad-click/query.sql new file mode 100644 index 0000000000000..01a1a7af3cba1 --- /dev/null +++ b/integration_test/ad-click/query.sql @@ -0,0 +1,6 @@ +select + * +from + m_click_statistic +limit + 10; \ No newline at end of file diff --git a/integration_test/ad-ctr/create_mv.sql b/integration_test/ad-ctr/create_mv.sql new file mode 100644 index 0000000000000..7c1cbe1cdeac7 --- /dev/null +++ b/integration_test/ad-ctr/create_mv.sql @@ -0,0 +1,64 @@ +CREATE MATERIALIZED VIEW ad_ctr AS +SELECT + ad_clicks.ad_id AS ad_id, + ad_clicks.clicks_count :: NUMERIC / ad_impressions.impressions_count AS ctr +FROM + ( + SELECT + ad_impression.ad_id AS ad_id, + COUNT(*) AS impressions_count + FROM + ad_impression + GROUP BY + ad_id + ) AS ad_impressions + JOIN ( + SELECT + ai.ad_id, + COUNT(*) AS clicks_count + FROM + ad_click AS ac + LEFT JOIN ad_impression AS ai ON ac.bid_id = ai.bid_id + GROUP BY + ai.ad_id + ) AS ad_clicks ON ad_impressions.ad_id = ad_clicks.ad_id; + +CREATE MATERIALIZED VIEW ad_ctr_5min AS +SELECT + ac.ad_id AS ad_id, + ac.clicks_count :: NUMERIC / ai.impressions_count AS ctr, + ai.window_end AS window_end +FROM + ( + SELECT + ad_id, + COUNT(*) AS impressions_count, + window_end + FROM + TUMBLE( + ad_impression, + impression_timestamp, + INTERVAL '5' MINUTE + ) + GROUP BY + ad_id, + window_end + ) AS ai + JOIN ( + SELECT + ai.ad_id, + COUNT(*) AS clicks_count, + ai.window_end AS window_end + FROM + TUMBLE(ad_click, click_timestamp, INTERVAL '5' MINUTE) AS ac + INNER JOIN TUMBLE( + ad_impression, + impression_timestamp, + INTERVAL '5' MINUTE + ) AS ai ON ai.bid_id = ac.bid_id + AND ai.window_end = ac.window_end + GROUP BY + ai.ad_id, + ai.window_end + ) AS ac ON ai.ad_id = ac.ad_id + AND ai.window_end = ac.window_end; \ No newline at end of file diff --git a/integration_test/ad-ctr/create_source.sql b/integration_test/ad-ctr/create_source.sql new file mode 100644 index 0000000000000..70a4ca6400981 --- /dev/null +++ b/integration_test/ad-ctr/create_source.sql @@ -0,0 +1,20 @@ +CREATE SOURCE ad_impression ( + bid_id BIGINT, + ad_id BIGINT, + impression_timestamp TIMESTAMPTZ +) WITH ( + connector = 'kafka', + topic = 'ad_impression', + properties.bootstrap.server = 'message_queue:29092', + scan.startup.mode = 'earliest' +) ROW FORMAT JSON; + +CREATE SOURCE ad_click ( + bid_id BIGINT, + click_timestamp TIMESTAMPTZ +) WITH ( + connector = 'kafka', + topic = 'ad_click', + properties.bootstrap.server = 'message_queue:29092', + scan.startup.mode = 'earliest' +) ROW FORMAT JSON; \ No newline at end of file diff --git a/integration_test/ad-ctr/data_check b/integration_test/ad-ctr/data_check new file mode 100644 index 0000000000000..9708f5cb617c3 --- /dev/null +++ b/integration_test/ad-ctr/data_check @@ -0,0 +1 @@ +ad_impression,ad_click,ad_ctr,ad_ctr_5min \ No newline at end of file diff --git a/integration_test/ad-ctr/docker-compose.yml b/integration_test/ad-ctr/docker-compose.yml new file mode 100644 index 0000000000000..bcabd91987077 --- /dev/null +++ b/integration_test/ad-ctr/docker-compose.yml @@ -0,0 +1,60 @@ +--- +version: "3" +services: + compactor-0: + extends: + file: ../../docker/docker-compose.yml + service: compactor-0 + compute-node-0: + extends: + file: ../../docker/docker-compose.yml + service: compute-node-0 + etcd-0: + extends: + file: ../../docker/docker-compose.yml + service: etcd-0 + frontend-node-0: + extends: + file: ../../docker/docker-compose.yml + service: frontend-node-0 + grafana-0: + extends: + file: ../../docker/docker-compose.yml + service: grafana-0 + meta-node-0: + extends: + file: ../../docker/docker-compose.yml + service: meta-node-0 + minio-0: + extends: + file: ../../docker/docker-compose.yml + service: minio-0 + prometheus-0: + extends: + file: ../../docker/docker-compose.yml + service: prometheus-0 + message_queue: + extends: + file: ../../docker/docker-compose.yml + service: message_queue + datagen: + image: ghcr.io/risingwavelabs/demo-datagen:v1.0.9 + depends_on: [message_queue] + command: + - /bin/sh + - -c + - /datagen --mode ad-ctr --qps 2 kafka --brokers message_queue:29092 + restart: always + container_name: datagen +volumes: + compute-node-0: + external: false + etcd-0: + external: false + grafana-0: + external: false + minio-0: + external: false + prometheus-0: + external: false +name: risingwave-compose diff --git a/integration_test/ad-ctr/query.sql b/integration_test/ad-ctr/query.sql new file mode 100644 index 0000000000000..4bd10cc551ee4 --- /dev/null +++ b/integration_test/ad-ctr/query.sql @@ -0,0 +1,6 @@ +SELECT + * +FROM + ad_ctr_5min +limit + 10; \ No newline at end of file diff --git a/integration_test/cdn-metrics/create_mv.sql b/integration_test/cdn-metrics/create_mv.sql new file mode 100644 index 0000000000000..cd2c2a7195158 --- /dev/null +++ b/integration_test/cdn-metrics/create_mv.sql @@ -0,0 +1,79 @@ +CREATE MATERIALIZED VIEW high_util_tcp_metrics AS +SELECT + tcp.device_id AS device_id, + tcp.window_end AS window_end, + tcp.metric_name AS metric_name, + tcp.metric_value AS metric_value, + nic.avg_util AS tcp_avg_bandwidth_util +FROM + ( + SELECT + device_id, + window_end, + metric_name, + AVG(metric_value) AS metric_value + FROM + TUMBLE( + tcp_metrics, + report_time, + INTERVAL '1' MINUTE + ) + GROUP BY + device_id, + window_end, + metric_name + ) AS tcp + JOIN ( + SELECT + device_id, + window_end, + AVG((metric_value) / bandwidth) * 100 AS avg_util + FROM + TUMBLE( + nics_metrics, + report_time, + INTERVAL '1' MINUTE + ) + WHERE + metric_name = 'tx_bytes' + AND aggregation = 'avg' + GROUP BY + device_id, + window_end + ) AS nic ON tcp.device_id = nic.device_id + AND tcp.window_end = nic.window_end +WHERE + avg_util >= 50; + +CREATE MATERIALIZED VIEW retrans_incidents AS +SELECT + device_id, + window_end AS trigger_time, + metric_value AS trigger_value +FROM + high_util_tcp_metrics +WHERE + metric_name = 'retrans_rate' + AND metric_value > 0.15; + +CREATE MATERIALIZED VIEW srtt_incidents AS +SELECT + device_id, + window_end AS trigger_time, + metric_value AS trigger_value +FROM + high_util_tcp_metrics +WHERE + metric_name = 'srtt' + AND metric_value > 500.0; + +CREATE MATERIALIZED VIEW download_incidents AS +SELECT + device_id, + window_end AS trigger_time, + metric_value AS trigger_value +FROM + high_util_tcp_metrics +WHERE + metric_name = 'download_speed' + AND metric_value < 200.0; \ No newline at end of file diff --git a/integration_test/cdn-metrics/create_source.sql b/integration_test/cdn-metrics/create_source.sql new file mode 100644 index 0000000000000..3ef8c61af64ba --- /dev/null +++ b/integration_test/cdn-metrics/create_source.sql @@ -0,0 +1,26 @@ +CREATE SOURCE nics_metrics ( + device_id VARCHAR, + metric_name VARCHAR, + aggregation VARCHAR, + nic_name VARCHAR, + report_time TIMESTAMPTZ, + bandwidth DOUBLE PRECISION, + metric_value DOUBLE PRECISION +) WITH ( + connector = 'kafka', + topic = 'nics_metrics', + properties.bootstrap.server = 'message_queue:29092', + scan.startup.mode = 'earliest' +) ROW FORMAT JSON; + +CREATE SOURCE tcp_metrics ( + device_id VARCHAR, + metric_name VARCHAR, + report_time TIMESTAMPTZ, + metric_value DOUBLE PRECISION +) WITH ( + connector = 'kafka', + topic = 'tcp_metrics', + properties.bootstrap.server = 'message_queue:29092', + scan.startup.mode = 'earliest' +) ROW FORMAT JSON; \ No newline at end of file diff --git a/integration_test/cdn-metrics/data_check b/integration_test/cdn-metrics/data_check new file mode 100644 index 0000000000000..b3847bdc60b77 --- /dev/null +++ b/integration_test/cdn-metrics/data_check @@ -0,0 +1 @@ +nics_metrics,tcp_metrics \ No newline at end of file diff --git a/integration_test/cdn-metrics/docker-compose.yml b/integration_test/cdn-metrics/docker-compose.yml new file mode 100644 index 0000000000000..bf1622c1bce32 --- /dev/null +++ b/integration_test/cdn-metrics/docker-compose.yml @@ -0,0 +1,60 @@ +--- +version: "3" +services: + compactor-0: + extends: + file: ../../docker/docker-compose.yml + service: compactor-0 + compute-node-0: + extends: + file: ../../docker/docker-compose.yml + service: compute-node-0 + etcd-0: + extends: + file: ../../docker/docker-compose.yml + service: etcd-0 + frontend-node-0: + extends: + file: ../../docker/docker-compose.yml + service: frontend-node-0 + grafana-0: + extends: + file: ../../docker/docker-compose.yml + service: grafana-0 + meta-node-0: + extends: + file: ../../docker/docker-compose.yml + service: meta-node-0 + minio-0: + extends: + file: ../../docker/docker-compose.yml + service: minio-0 + prometheus-0: + extends: + file: ../../docker/docker-compose.yml + service: prometheus-0 + message_queue: + extends: + file: ../../docker/docker-compose.yml + service: message_queue + datagen: + image: ghcr.io/risingwavelabs/demo-datagen:v1.0.9 + depends_on: [message_queue] + command: + - /bin/sh + - -c + - /datagen --heavytail --mode cdn-metrics --qps 1000 kafka --brokers message_queue:29092 + restart: always + container_name: datagen +volumes: + compute-node-0: + external: false + etcd-0: + external: false + grafana-0: + external: false + minio-0: + external: false + prometheus-0: + external: false +name: risingwave-compose diff --git a/integration_test/cdn-metrics/query.sql b/integration_test/cdn-metrics/query.sql new file mode 100644 index 0000000000000..7fb778a38640a --- /dev/null +++ b/integration_test/cdn-metrics/query.sql @@ -0,0 +1,8 @@ +SELECT + * +FROM + srtt_incidents +limit + 10; + +SELECT * FROM high_util_tcp_metrics limit 5; diff --git a/integration_test/clickstream/create_mv.sql b/integration_test/clickstream/create_mv.sql new file mode 100644 index 0000000000000..4d1e033470f57 --- /dev/null +++ b/integration_test/clickstream/create_mv.sql @@ -0,0 +1,34 @@ +CREATE MATERIALIZED VIEW thread_view_count AS WITH t AS ( + SELECT + target_id, + COUNT() AS view_count, + window_start as window_time + FROM + TUMBLE( + user_behaviors, + event_timestamp, + INTERVAL '10 minutes' + ) + WHERE + target_type = 'thread' + AND behavior_type = 'show' + GROUP BY + target_id, + window_start +) +SELECT + target_id, + SUM(t.view_count) AS view_count, + window_start as window_time, + window_end +FROM + HOP( + t, + t.window_time, + INTERVAL '10 minutes', + INTERVAL '1440 minutes' + ) +GROUP BY + target_id, + window_start, + window_end; \ No newline at end of file diff --git a/integration_test/clickstream/create_source.sql b/integration_test/clickstream/create_source.sql new file mode 100644 index 0000000000000..7a9e3d3add4c8 --- /dev/null +++ b/integration_test/clickstream/create_source.sql @@ -0,0 +1,14 @@ +CREATE SOURCE user_behaviors ( + user_id VARCHAR, + target_id VARCHAR, + target_type VARCHAR, + event_timestamp TIMESTAMPTZ, + behavior_type VARCHAR, + parent_target_type VARCHAR, + parent_target_id VARCHAR +) WITH ( + connector = 'kafka', + topic = 'user_behaviors', + properties.bootstrap.server = 'message_queue:29092', + scan.startup.mode = 'earliest' +) ROW FORMAT JSON; \ No newline at end of file diff --git a/integration_test/clickstream/data_check b/integration_test/clickstream/data_check new file mode 100644 index 0000000000000..da8bdf62ada77 --- /dev/null +++ b/integration_test/clickstream/data_check @@ -0,0 +1 @@ +user_behaviors,thread_view_count \ No newline at end of file diff --git a/integration_test/clickstream/docker-compose.yml b/integration_test/clickstream/docker-compose.yml new file mode 100644 index 0000000000000..9a8c28447137f --- /dev/null +++ b/integration_test/clickstream/docker-compose.yml @@ -0,0 +1,60 @@ +--- +version: "3" +services: + compactor-0: + extends: + file: ../../docker/docker-compose.yml + service: compactor-0 + compute-node-0: + extends: + file: ../../docker/docker-compose.yml + service: compute-node-0 + etcd-0: + extends: + file: ../../docker/docker-compose.yml + service: etcd-0 + frontend-node-0: + extends: + file: ../../docker/docker-compose.yml + service: frontend-node-0 + grafana-0: + extends: + file: ../../docker/docker-compose.yml + service: grafana-0 + meta-node-0: + extends: + file: ../../docker/docker-compose.yml + service: meta-node-0 + minio-0: + extends: + file: ../../docker/docker-compose.yml + service: minio-0 + prometheus-0: + extends: + file: ../../docker/docker-compose.yml + service: prometheus-0 + message_queue: + extends: + file: ../../docker/docker-compose.yml + service: message_queue + datagen: + image: ghcr.io/risingwavelabs/demo-datagen:v1.0.9 + depends_on: [message_queue] + command: + - /bin/sh + - -c + - /datagen --mode clickstream --qps 2 kafka --brokers message_queue:29092 + restart: always + container_name: datagen +volumes: + compute-node-0: + external: false + etcd-0: + external: false + grafana-0: + external: false + minio-0: + external: false + prometheus-0: + external: false +name: risingwave-compose diff --git a/integration_test/clickstream/query.sql b/integration_test/clickstream/query.sql new file mode 100644 index 0000000000000..917bdfb717fc9 --- /dev/null +++ b/integration_test/clickstream/query.sql @@ -0,0 +1,19 @@ +--- TODO: we need now() for ad-hoc mode. +-- SELECT +-- * +-- FROM +-- thread_view_count +-- WHERE +-- window_time > ( +-- '2022-7-22 18:43' :: TIMESTAMP - INTERVAL '1 day' +-- ) +-- AND window_time < ( +-- '2022-7-22 18:43' :: TIMESTAMP - INTERVAL '1 day' + INTERVAL '10 minutes' +-- ) +-- AND target_id = 'thread83 +SELECT + * +FROM + thread_view_count +LIMIT + 10; \ No newline at end of file diff --git a/integration_test/datagen/.gitignore b/integration_test/datagen/.gitignore new file mode 100644 index 0000000000000..503f832e84296 --- /dev/null +++ b/integration_test/datagen/.gitignore @@ -0,0 +1 @@ +datagen diff --git a/integration_test/datagen/.goreleaser.yaml b/integration_test/datagen/.goreleaser.yaml new file mode 100644 index 0000000000000..bbfdb6483d5c7 --- /dev/null +++ b/integration_test/datagen/.goreleaser.yaml @@ -0,0 +1,21 @@ +before: + hooks: + - go mod tidy +builds: + - env: + - CGO_ENABLED=0 + goos: + - linux + - windows + - darwin + binary: datagen +checksum: + name_template: "checksums.txt" +snapshot: + name_template: "{{ incpatch .Version }}-next" +changelog: + sort: asc + filters: + exclude: + - "^docs:" + - "^test:" diff --git a/integration_test/datagen/Dockerfile b/integration_test/datagen/Dockerfile new file mode 100644 index 0000000000000..59354cbecab59 --- /dev/null +++ b/integration_test/datagen/Dockerfile @@ -0,0 +1,8 @@ +FROM golang as builder + +ADD . /datagen-src +RUN cd /datagen-src && gofmt -s -w . && go build + +FROM ubuntu:20.04 + +COPY --from=builder /datagen-src/datagen / diff --git a/integration_test/datagen/ad_click/ad_click.go b/integration_test/datagen/ad_click/ad_click.go new file mode 100644 index 0000000000000..eb97467784d31 --- /dev/null +++ b/integration_test/datagen/ad_click/ad_click.go @@ -0,0 +1,58 @@ +package ad_click + +import ( + "context" + "datagen/gen" + "datagen/sink" + "encoding/json" + "fmt" + "math/rand" + "time" +) + +type clickEvent struct { + sink.BaseSinkRecord + + UserId int64 `json:"user_id"` + AdId int64 `json:"ad_id"` + ClickTimestamp string `json:"click_timestamp"` + ImpressionTimestamp string `json:"impression_timestamp"` +} + +func (r *clickEvent) ToPostgresSql() string { + return fmt.Sprintf("INSERT INTO %s (user_id, ad_id, click_timestamp, impression_timestamp) values ('%d', '%d', '%s', '%s')", + "ad_source", r.UserId, r.AdId, r.ClickTimestamp, r.ImpressionTimestamp) +} + +func (r *clickEvent) ToJson() (topic string, key string, data []byte) { + data, _ = json.Marshal(r) + return "ad_clicks", fmt.Sprint(r.UserId), data +} + +type adClickGen struct { +} + +func NewAdClickGen() gen.LoadGenerator { + return &adClickGen{} +} + +func (g *adClickGen) KafkaTopics() []string { + return []string{"ad_clicks"} +} + +func (g *adClickGen) Load(ctx context.Context, outCh chan<- sink.SinkRecord) { + for { + now := time.Now() + record := &clickEvent{ + UserId: rand.Int63n(100000), + AdId: rand.Int63n(10), + ClickTimestamp: now.Add(time.Duration(rand.Intn(1000)) * time.Millisecond).Format(gen.RwTimestampLayout), + ImpressionTimestamp: now.Format(gen.RwTimestampLayout), + } + select { + case <-ctx.Done(): + return + case outCh <- record: + } + } +} diff --git a/integration_test/datagen/ad_ctr/ad_ctr.go b/integration_test/datagen/ad_ctr/ad_ctr.go new file mode 100644 index 0000000000000..a180c0c267e27 --- /dev/null +++ b/integration_test/datagen/ad_ctr/ad_ctr.go @@ -0,0 +1,111 @@ +package ad_ctr + +import ( + "context" + "datagen/gen" + "datagen/sink" + "encoding/json" + "fmt" + "strconv" + "time" + + "github.com/brianvoe/gofakeit/v6" +) + +type adImpressionEvent struct { + sink.BaseSinkRecord + + BidId int64 `json:"bid_id"` + AdId int64 `json:"ad_id"` + ImpressionTimestamp string `json:"impression_timestamp"` +} + +func (r *adImpressionEvent) ToPostgresSql() string { + return fmt.Sprintf("INSERT INTO %s (bid_id, ad_id, impression_timestamp) values ('%d', '%d', '%s')", + "ad_impression", r.BidId, r.AdId, r.ImpressionTimestamp) +} + +func (r *adImpressionEvent) ToJson() (topic string, key string, data []byte) { + data, _ = json.Marshal(r) + return "ad_impression", fmt.Sprint(r.BidId), data +} + +type adClickEvent struct { + sink.BaseSinkRecord + + BidId int64 `json:"bid_id"` + ClickTimestamp string `json:"click_timestamp"` +} + +func (r *adClickEvent) ToPostgresSql() string { + return fmt.Sprintf("INSERT INTO %s (bid_id, click_timestamp) values ('%d', '%s')", + "ad_click", r.BidId, r.ClickTimestamp) +} + +func (r *adClickEvent) ToJson() (topic string, key string, data []byte) { + data, _ = json.Marshal(r) + return "ad_click", fmt.Sprint(r.BidId), data +} + +type adCtrGen struct { + faker *gofakeit.Faker + ctr map[int64]float64 +} + +func NewAdCtrGen() gen.LoadGenerator { + return &adCtrGen{ + ctr: make(map[int64]float64), + faker: gofakeit.New(0), + } +} + +func (g *adCtrGen) getCtr(adId int64) float64 { + if ctr, ok := g.ctr[adId]; ok { + return ctr + } + ctr := g.faker.Float64Range(0, 1) + g.ctr[adId] = ctr + return ctr +} + +func (g *adCtrGen) hasClick(adId int64) bool { + return g.faker.Float64Range(0, 1) < g.getCtr(adId) +} + +func (g *adCtrGen) generate() []sink.SinkRecord { + bidId, _ := strconv.ParseInt(g.faker.DigitN(8), 10, 64) + adId := int64(g.faker.IntRange(1, 10)) + + events := []sink.SinkRecord{ + &adImpressionEvent{ + BidId: bidId, + AdId: adId, + ImpressionTimestamp: time.Now().Format(gen.RwTimestampLayout), + }, + } + if g.hasClick(adId) { + randomDelay := time.Duration(g.faker.IntRange(1, 10) * int(time.Second)) + events = append(events, &adClickEvent{ + BidId: bidId, + ClickTimestamp: time.Now().Add(randomDelay).Format(gen.RwTimestampLayout), + }) + } + return events +} + +func (g *adCtrGen) KafkaTopics() []string { + return []string{"ad_impression", "ad_click"} +} + +func (g *adCtrGen) Load(ctx context.Context, outCh chan<- sink.SinkRecord) { + for { + records := g.generate() + for _, record := range records { + select { + case outCh <- record: + case <-ctx.Done(): + return + } + } + } +} diff --git a/integration_test/datagen/cdn_metrics/cdn_metrics.go b/integration_test/datagen/cdn_metrics/cdn_metrics.go new file mode 100644 index 0000000000000..aff55f87a80f3 --- /dev/null +++ b/integration_test/datagen/cdn_metrics/cdn_metrics.go @@ -0,0 +1,32 @@ +package cdn_metrics + +import ( + "context" + "datagen/gen" + "datagen/sink" +) + +type cdnMetricsGen struct { + cfg gen.GeneratorConfig +} + +func NewCdnMetricsGen(cfg gen.GeneratorConfig) gen.LoadGenerator { + return &cdnMetricsGen{cfg: cfg} +} + +func (g *cdnMetricsGen) KafkaTopics() []string { + return []string{"tcp_metrics", "nics_metrics"} +} + +func (g *cdnMetricsGen) Load(ctx context.Context, outCh chan<- sink.SinkRecord) { + for i := 0; i < 10; i++ { // Assume there are 10 devices + go func(i int) { + m := newDeviceTcpMonitor(i, g.cfg) + m.emulate(ctx, outCh) + }(i) + go func(i int) { + m := newDeviceNicsMonitor(i, g.cfg) + m.emulate(ctx, outCh) + }(i) + } +} diff --git a/integration_test/datagen/cdn_metrics/nics.go b/integration_test/datagen/cdn_metrics/nics.go new file mode 100644 index 0000000000000..dc280eab1468e --- /dev/null +++ b/integration_test/datagen/cdn_metrics/nics.go @@ -0,0 +1,108 @@ +package cdn_metrics + +import ( + "context" + "crypto/md5" + "datagen/gen" + "datagen/sink" + "encoding/hex" + "encoding/json" + "fmt" + "strconv" + "time" +) + +const ( + // bandwidth in bytes. + maxBandwidth = float64(10 * 1024 * 1024 * 1024 / 8) // 10Gb +) + +type nicsMetric struct { + sink.BaseSinkRecord + + DeviceId string `json:"device_id"` + MetricName string `json:"metric_name"` + Aggregation string `json:"aggregation"` + NicName string `json:"nic_name"` + ReportTime string `json:"report_time"` + Bandwidth float64 `json:"bandwidth"` + Value float64 `json:"metric_value"` +} + +func (r *nicsMetric) ToPostgresSql() string { + return fmt.Sprintf( + `INSERT INTO %s +(device_id, metric_name, aggregation, nic_name, report_time, link_bandwidth, metric_value) +values ('%s', '%s', '%s' '%s', '%s', '%f', '%f')`, + "nics_metrics", r.DeviceId, r.MetricName, r.Aggregation, r.NicName, r.ReportTime, r.Bandwidth, r.Value) +} + +func (r *nicsMetric) ToJson() (topic string, key string, data []byte) { + data, _ = json.Marshal(r) + return "nics_metrics", r.DeviceId, data +} + +// Each device has a nics monitor. +type deviceNicsMonitor struct { + deviceId string + randDist gen.RandDist +} + +func newDeviceNicsMonitor(id int, cfg gen.GeneratorConfig) deviceNicsMonitor { + hash := md5.Sum([]byte(strconv.Itoa(id))) + return deviceNicsMonitor{ + deviceId: hex.EncodeToString(hash[:]), + randDist: gen.NewRandDist(cfg), + } +} + +func (m *deviceNicsMonitor) emulate(ctx context.Context, outCh chan<- sink.SinkRecord) { + for { + metrics := m.generate() + for _, metric := range metrics { + select { + case outCh <- metric: + case <-ctx.Done(): + return + } + } + select { + case <-ctx.Done(): + case <-time.NewTicker(10 * time.Second).C: + } + } +} + +func (impl *deviceNicsMonitor) generate() []*nicsMetric { + curTime := time.Now() + var metrics []*nicsMetric + for nicId := 0; nicId < 4; nicId++ { + // Median value is 480MB/s. + txBytesAvg := impl.randDist.Rand(maxBandwidth / 4 * 3) + // Peak value must be larger than average but lower than maxBandwidth. + txBytesPeak := (impl.randDist.Rand(0.3) + 1) * txBytesAvg + metrics = append(metrics, + impl.newMetrics(nicId, "tx_bytes", "avg", curTime, int64(txBytesAvg)), + impl.newMetrics(nicId, "tx_bytes", "peak", curTime, int64(txBytesPeak)), + ) + } + return metrics +} + +func (impl *deviceNicsMonitor) newMetrics( + NicId int, + metricName string, + aggregation string, + reportTime time.Time, + value int64) *nicsMetric { + + return &nicsMetric{ + DeviceId: impl.deviceId, + MetricName: metricName, + Aggregation: aggregation, + NicName: "eth" + strconv.Itoa(NicId), + ReportTime: reportTime.Format(gen.RwTimestampLayout), + Bandwidth: maxBandwidth, + Value: float64(value), + } +} diff --git a/integration_test/datagen/cdn_metrics/tcp.go b/integration_test/datagen/cdn_metrics/tcp.go new file mode 100644 index 0000000000000..909c0ebf07058 --- /dev/null +++ b/integration_test/datagen/cdn_metrics/tcp.go @@ -0,0 +1,88 @@ +package cdn_metrics + +import ( + "context" + "crypto/md5" + "datagen/gen" + "datagen/sink" + "encoding/hex" + "encoding/json" + "fmt" + "strconv" + "time" +) + +type tcpMetric struct { + sink.BaseSinkRecord + + DeviceId string `json:"device_id"` + ReportTime string `json:"report_time"` + MetricName string `json:"metric_name"` + Value float64 `json:"metric_value"` +} + +func (r *tcpMetric) ToPostgresSql() string { + return fmt.Sprintf("INSERT INTO %s (device_id, report_time, metric_name, metric_value) values ('%s', '%s', '%s', '%f')", + "tcp_metrics", r.DeviceId, r.ReportTime, r.MetricName, r.Value) +} + +func (r *tcpMetric) ToJson() (topic string, key string, data []byte) { + data, _ = json.Marshal(r) + return "tcp_metrics", r.DeviceId, data +} + +// Each device has a TCP monitor. +type deviceTcpMonitor struct { + deviceId string + randDist gen.RandDist +} + +func newDeviceTcpMonitor(id int, cfg gen.GeneratorConfig) deviceTcpMonitor { + hash := md5.Sum([]byte(strconv.Itoa(id))) + return deviceTcpMonitor{ + deviceId: hex.EncodeToString(hash[:]), + randDist: gen.NewRandDist(cfg), + } +} + +func (m *deviceTcpMonitor) emulate(ctx context.Context, outCh chan<- sink.SinkRecord) { + for { + metrics := m.generate() + for _, metric := range metrics { + select { + case <-ctx.Done(): + return + case outCh <- metric: + } + } + // Produce tcp metrics every 1s. + select { + case <-ctx.Done(): + case <-time.NewTicker(1 * time.Second).C: + } + } +} + +func (m *deviceTcpMonitor) generate() []*tcpMetric { + curTime := time.Now() + + retransRate := m.randDist.Rand(0.6) + srtt := m.randDist.Rand(1400) + downloadSpeed := m.randDist.Rand(2000) + + return []*tcpMetric{ + m.newMetrics("retrans_rate", curTime, retransRate), + // Smoothed Round Trip Time ( SRTT ) + m.newMetrics("srtt", curTime, srtt), + m.newMetrics("download_speed", curTime, downloadSpeed), + } +} + +func (m *deviceTcpMonitor) newMetrics(metricName string, reportTime time.Time, value float64) *tcpMetric { + return &tcpMetric{ + DeviceId: m.deviceId, + MetricName: metricName, + ReportTime: reportTime.Format(gen.RwTimestampLayout), + Value: value, + } +} diff --git a/integration_test/datagen/clickstream/clickstream.go b/integration_test/datagen/clickstream/clickstream.go new file mode 100644 index 0000000000000..e157e21e1f59a --- /dev/null +++ b/integration_test/datagen/clickstream/clickstream.go @@ -0,0 +1,153 @@ +package clickstream + +import ( + "context" + "datagen/gen" + "datagen/sink" + "encoding/json" + "fmt" + "time" + + "github.com/brianvoe/gofakeit/v6" +) + +type userBehavior struct { + sink.BaseSinkRecord + + UserId string `json:"user_id"` + TargetId string `json:"target_id"` + TargetType string `json:"target_type"` + EventTimestmap string `json:"event_timestamp"` + BehaviorType string `json:"behavior_type"` + + // The two fields are used to express the following behaviors: + // - Comment on a thread + // - Comment on a comment. + // Otherwise the fields will be empty. + ParentTargetType string `json:"parent_target_type"` + ParentTargetId string `json:"parent_target_id"` +} + +func (r *userBehavior) ToPostgresSql() string { + return fmt.Sprintf(`INSERT INTO %s +(user_id, target_id, target_type, event_timestamp, behavior_type, parent_target_type, parent_target_id) +values ('%s', '%s', '%s', '%s', '%s', '%s', '%s')`, + "user_behaviors", r.UserId, r.TargetId, r.TargetType, r.EventTimestmap, r.BehaviorType, r.ParentTargetType, r.ParentTargetId) +} + +func (r *userBehavior) ToJson() (topic string, key string, data []byte) { + data, _ = json.Marshal(r) + return "user_behaviors", r.UserId, data +} + +type targetType string + +type clickStreamGen struct { + faker *gofakeit.Faker +} + +func NewClickStreamGen() gen.LoadGenerator { + return &clickStreamGen{ + faker: gofakeit.New(0), + } +} + +func (g *clickStreamGen) randTargetType() targetType { + switch p := g.faker.IntRange(0, 9); { + case p < 7: + return "thread" + case p >= 7 && p < 9: + return "comment" + case p >= 9: + return "user" + default: + panic(fmt.Sprintf("unreachable: %d", p)) + } +} + +func (g *clickStreamGen) randBehaviorType(t targetType) string { + switch t { + case "thread": + switch p := g.faker.IntRange(0, 99); { + case p < 40: + return "show" + case p >= 40 && p < 65: + return "upvote" + case p >= 65 && p < 70: + return "downvote" + case p >= 70 && p < 75: + return "share" + case p >= 75 && p < 80: + return "award" + case p >= 80 && p < 90: + return "save" + case p >= 90: + return "publish" // Publish a thread. + default: + panic(fmt.Sprintf("unreachable: %d", p)) + } + case "comment": + behaviors := []string{ + "publish", // Publish a comment, the parent target type can be a comment or a thread. + "upvote", + "downvote", + "share", + "award", + "save", + } + return behaviors[g.faker.IntRange(0, len(behaviors)-1)] + case "user": + behaviors := []string{ + "show", // View the user profile. + "follow", + "share", + "unfollow", + } + return behaviors[g.faker.IntRange(0, len(behaviors)-1)] + default: + panic("unexpected target type") + } +} + +func (g *clickStreamGen) generate() sink.SinkRecord { + // TODO: The overall throughput can be further controlled by a scale factor. + userId := g.faker.IntRange(0, 10) + targetId := g.faker.IntRange(0, 100) + target := g.randTargetType() + behavior := g.randBehaviorType(target) + // NOTE: The generated event might not be realistic, for example, a user is allowed to follow itself, + // and a user can upvote a not existed thread. Anyway, it's just a simple demo. + + var parentTargetId string + var parentTargetType string + if target == "comment" && behavior == "publish" { + possibleTargets := []string{"thread", "comment"} + parentTargetType = possibleTargets[g.faker.IntRange(0, len(possibleTargets)-1)] + parentTargetId = parentTargetType + fmt.Sprint(g.faker.IntRange(0, 100)) + } + + return &userBehavior{ + UserId: fmt.Sprint(userId), + TargetId: string(target) + fmt.Sprint(targetId), + TargetType: string(target), + EventTimestmap: time.Now().Format(gen.RwTimestampLayout), + BehaviorType: behavior, + ParentTargetType: parentTargetType, + ParentTargetId: parentTargetId, + } +} + +func (g *clickStreamGen) KafkaTopics() []string { + return []string{"user_behaviors"} +} + +func (g *clickStreamGen) Load(ctx context.Context, outCh chan<- sink.SinkRecord) { + for { + record := g.generate() + select { + case <-ctx.Done(): + return + case outCh <- record: + } + } +} diff --git a/integration_test/datagen/delivery/delivery.go b/integration_test/datagen/delivery/delivery.go new file mode 100644 index 0000000000000..167d911d5295f --- /dev/null +++ b/integration_test/datagen/delivery/delivery.go @@ -0,0 +1,73 @@ +package delivery + +import ( + "context" + "datagen/gen" + "datagen/sink" + "encoding/json" + "fmt" + "math/rand" + "time" +) + +type orderEvent struct { + sink.BaseSinkRecord + + OrderId int64 `json:"order_id"` + RestaurantId int64 `json:"restaurant_id"` + OrderState string `json:"order_state"` + OrderTimestamp string `json:"order_timestamp"` +} + +func (r *orderEvent) ToPostgresSql() string { + return fmt.Sprintf("INSERT INTO %s (order_id, restaurant_id, order_state, order_timestamp) values ('%d', '%d', '%s', '%s')", + "delivery_orders_source", r.OrderId, r.RestaurantId, r.OrderState, r.OrderTimestamp) +} + +func (r *orderEvent) ToJson() (topic string, key string, data []byte) { + data, _ = json.Marshal(r) + return "delivery_orders", fmt.Sprint(r.OrderId), data +} + +type orderEventGen struct { + seqOrderId int64 + cfg gen.GeneratorConfig +} + +func NewOrderEventGen(cfg gen.GeneratorConfig) gen.LoadGenerator { + return &orderEventGen{ + seqOrderId: 0, + cfg: cfg, + } +} + +func (g *orderEventGen) KafkaTopics() []string { + return []string{"delivery_orders"} +} + +func (g *orderEventGen) Load(ctx context.Context, outCh chan<- sink.SinkRecord) { + order_states := []string{ + "CREATED", + "PENDING", + "DELIVERED", + } + + var num_of_restaurants int64 = 3 + var total_minutes = 30 + + for { + now := time.Now() + record := &orderEvent{ + OrderId: g.seqOrderId, + RestaurantId: rand.Int63n(num_of_restaurants), + OrderState: order_states[rand.Intn(len(order_states))], + OrderTimestamp: now.Add(time.Duration(rand.Intn(total_minutes)) * time.Minute).Format(gen.RwTimestampLayout), + } + g.seqOrderId++ + select { + case <-ctx.Done(): + return + case outCh <- record: + } + } +} diff --git a/integration_test/datagen/ecommerce/ecommerce.go b/integration_test/datagen/ecommerce/ecommerce.go new file mode 100644 index 0000000000000..cd27049d9d559 --- /dev/null +++ b/integration_test/datagen/ecommerce/ecommerce.go @@ -0,0 +1,139 @@ +package ecommerce + +import ( + "context" + "datagen/gen" + "datagen/sink" + "encoding/json" + "fmt" + "math/rand" + "time" + + "github.com/brianvoe/gofakeit/v6" +) + +// The order details. +type orderEvent struct { + sink.BaseSinkRecord + + OrderId int64 `json:"order_id"` + ItemId int64 `json:"item_id"` + ItemPrice float64 `json:"item_price"` + EventTimestmap string `json:"event_timestamp"` +} + +func (r *orderEvent) ToPostgresSql() string { + return fmt.Sprintf(`INSERT INTO %s +(order_id, item_id, item_price, event_timestamp) +values ('%d', '%d', %f, '%s')`, + "order_events", r.OrderId, r.ItemId, r.ItemPrice, r.EventTimestmap) +} + +func (r *orderEvent) ToJson() (topic string, key string, data []byte) { + data, _ = json.Marshal(r) + return "order_events", fmt.Sprint(r.OrderId), data +} + +// Each order/trade will be composed of two events: +// An 'order_created' event and a 'parcel_shipped' event. +type parcelEvent struct { + sink.BaseSinkRecord + + OrderId int64 `json:"order_id"` + EventTimestmap string `json:"event_timestamp"` + EventType string `json:"event_type"` +} + +func (r *parcelEvent) ToPostgresSql() string { + return fmt.Sprintf(`INSERT INTO %s +(order_id, event_timestamp, event_type) +values ('%d', '%s', '%s')`, + "parcel_events", r.OrderId, r.EventTimestmap, r.EventType) +} + +func (r *parcelEvent) ToJson() (topic string, key string, data []byte) { + data, _ = json.Marshal(r) + return "parcel_events", fmt.Sprint(r.OrderId), data +} + +type ecommerceGen struct { + faker *gofakeit.Faker + + // We simply model orders as a sliding window. `seqOrderId` advances as new orders are created. + // `seqShipId` is always smaller than `seqOrderId` and is advanced when a new order is shipped. + seqOrderId int64 + seqShipId int64 + + // Item ID -> Item Price + items []float64 +} + +func NewEcommerceGen() gen.LoadGenerator { + const numItems = 1000 + items := make([]float64, numItems) + for i := 0; i < numItems; i++ { + items[i] = gofakeit.Float64Range(0, 10000) + } + return &ecommerceGen{ + faker: gofakeit.New(0), + seqOrderId: 0, + seqShipId: 0, + items: items, + } +} + +func (g *ecommerceGen) KafkaTopics() []string { + return []string{"order_events", "parcel_events"} +} + +func (g *ecommerceGen) generate() []sink.SinkRecord { + ts := time.Now().Format(gen.RwTimestampLayout) + + if g.faker.Bool() && g.seqShipId >= g.seqOrderId { + // New order. + g.seqOrderId++ + itemsNum := g.faker.IntRange(1, 4) + orders := make([]sink.SinkRecord, itemsNum) + for i := 0; i < itemsNum; i++ { + itemId := rand.Intn(len(g.items)) + itemPrice := g.items[itemId] + orders[i] = &orderEvent{ + OrderId: g.seqOrderId, + ItemId: int64(itemId), + ItemPrice: itemPrice, + EventTimestmap: ts, + } + } + var records []sink.SinkRecord + records = append(records, orders...) + records = append(records, &parcelEvent{ + OrderId: g.seqOrderId, + EventTimestmap: ts, + EventType: "order_created", + }) + return records + } else { + // Ship order. + g.seqShipId++ + return []sink.SinkRecord{ + &parcelEvent{ + OrderId: g.seqShipId, + EventType: "parcel_shipped", + EventTimestmap: ts, + }, + } + } +} + +func (g *ecommerceGen) Load(ctx context.Context, outCh chan<- sink.SinkRecord) { + for { + records := g.generate() + for _, record := range records { + select { + case <-ctx.Done(): + return + case outCh <- record: + } + } + } +} diff --git a/integration_test/datagen/gen/generator.go b/integration_test/datagen/gen/generator.go new file mode 100644 index 0000000000000..af9fa9db96d8e --- /dev/null +++ b/integration_test/datagen/gen/generator.go @@ -0,0 +1,96 @@ +package gen + +import ( + "context" + "datagen/sink" + "datagen/sink/kafka" + "datagen/sink/kinesis" + "datagen/sink/mysql" + "datagen/sink/postgres" + "datagen/sink/pulsar" + + "gonum.org/v1/gonum/stat/distuv" +) + +type GeneratorConfig struct { + Postgres postgres.PostgresConfig + Mysql mysql.MysqlConfig + Kafka kafka.KafkaConfig + Pulsar pulsar.PulsarConfig + Kinesis kinesis.KinesisConfig + + // Whether to print the content of every event. + PrintInsert bool + // The datagen mode, e.g. "ad-ctr". + Mode string + // The sink type. + Sink string + // The throttled requests-per-second. + Qps int + + // Whether the tail probability is high. + // If true, We will use uniform distribution for randomizing values. + HeavyTail bool + + // The record format, used when the sink is a message queue. + Format string +} + +type LoadGenerator interface { + KafkaTopics() []string + + Load(ctx context.Context, outCh chan<- sink.SinkRecord) +} + +const RwTimestampLayout = "2006-01-02 15:04:05.07+01:00" + +type RandDist interface { + // Rand returns a random number ranging from [0, max]. + Rand(max float64) float64 +} + +func NewRandDist(cfg GeneratorConfig) RandDist { + if cfg.HeavyTail { + return UniformDist{} + } else { + return PoissonDist{} + } +} + +type UniformDist struct { + u map[float64]distuv.Uniform +} + +func (ud UniformDist) Rand(max float64) float64 { + if ud.u == nil { + ud.u = make(map[float64]distuv.Uniform) + } + _, ok := ud.u[max] + if !ok { + ud.u[max] = distuv.Uniform{ + Min: 0, + Max: max, + } + } + gen_num := ud.u[max].Rand() + return gen_num +} + +// A more real-world distribution. The tail will have lower probability.. +type PoissonDist struct { + ps map[float64]distuv.Poisson +} + +func (pd PoissonDist) Rand(max float64) float64 { + if pd.ps == nil { + pd.ps = make(map[float64]distuv.Poisson) + } + _, ok := pd.ps[max] + if !ok { + pd.ps[max] = distuv.Poisson{ + Lambda: max / 2, + } + } + gen_num := pd.ps[max].Rand() + return gen_num +} diff --git a/integration_test/datagen/go.mod b/integration_test/datagen/go.mod new file mode 100644 index 0000000000000..315ee66ad50cc --- /dev/null +++ b/integration_test/datagen/go.mod @@ -0,0 +1,78 @@ +module datagen + +go 1.18 + +require ( + github.com/Shopify/sarama v1.37.2 + github.com/apache/pulsar-client-go v0.8.1 + github.com/aws/aws-sdk-go v1.44.126 + github.com/brianvoe/gofakeit/v6 v6.16.0 + github.com/go-sql-driver/mysql v1.7.0 + github.com/lib/pq v1.10.7 + github.com/linkedin/goavro/v2 v2.9.8 + github.com/urfave/cli v1.22.10 + go.uber.org/ratelimit v0.2.0 + gonum.org/v1/gonum v0.12.0 + google.golang.org/protobuf v1.28.1 +) + +require ( + github.com/99designs/keyring v1.1.6 // indirect + github.com/AthenZ/athenz v1.10.39 // indirect + github.com/DataDog/zstd v1.5.0 // indirect + github.com/andres-erbsen/clock v0.0.0-20160526145045-9e14626cd129 // indirect + github.com/apache/pulsar-client-go/oauth2 v0.0.0-20220120090717-25e59572242e // indirect + github.com/ardielle/ardielle-go v1.5.2 // indirect + github.com/beorn7/perks v1.0.1 // indirect + github.com/cespare/xxhash/v2 v2.1.2 // indirect + github.com/cpuguy83/go-md2man/v2 v2.0.2 // indirect + github.com/danieljoos/wincred v1.0.2 // indirect + github.com/davecgh/go-spew v1.1.1 // indirect + github.com/dvsekhvalnov/jose2go v0.0.0-20200901110807-248326c1351b // indirect + github.com/eapache/go-resiliency v1.3.0 // indirect + github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21 // indirect + github.com/eapache/queue v1.1.0 // indirect + github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2 // indirect + github.com/gogo/protobuf v1.3.2 // indirect + github.com/golang-jwt/jwt v3.2.2+incompatible // indirect + github.com/golang/protobuf v1.5.2 // indirect + github.com/golang/snappy v0.0.4 // indirect + github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c // indirect + github.com/hashicorp/errwrap v1.0.0 // indirect + github.com/hashicorp/go-multierror v1.1.1 // indirect + github.com/hashicorp/go-uuid v1.0.3 // indirect + github.com/jcmturner/aescts/v2 v2.0.0 // indirect + github.com/jcmturner/dnsutils/v2 v2.0.0 // indirect + github.com/jcmturner/gofork v1.7.6 // indirect + github.com/jcmturner/gokrb5/v8 v8.4.3 // indirect + github.com/jcmturner/rpc/v2 v2.0.3 // indirect + github.com/jmespath/go-jmespath v0.4.0 // indirect + github.com/keybase/go-keychain v0.0.0-20190712205309-48d3d31d256d // indirect + github.com/klauspost/compress v1.15.11 // indirect + github.com/matttproud/golang_protobuf_extensions v1.0.1 // indirect + github.com/mitchellh/go-homedir v1.1.0 // indirect + github.com/mtibben/percent v0.2.1 // indirect + github.com/pierrec/lz4 v2.0.5+incompatible // indirect + github.com/pierrec/lz4/v4 v4.1.17 // indirect + github.com/pkg/errors v0.9.1 // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/prometheus/client_golang v1.12.1 // indirect + github.com/prometheus/client_model v0.2.0 // indirect + github.com/prometheus/common v0.32.1 // indirect + github.com/prometheus/procfs v0.7.3 // indirect + github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 // indirect + github.com/rogpeppe/go-internal v1.9.0 // indirect + github.com/russross/blackfriday/v2 v2.1.0 // indirect + github.com/sirupsen/logrus v1.8.1 // indirect + github.com/spaolacci/murmur3 v1.1.0 // indirect + github.com/stretchr/testify v1.8.0 // indirect + go.uber.org/atomic v1.7.0 // indirect + golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa // indirect + golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6 // indirect + golang.org/x/net v0.0.0-20220927171203-f486391704dc // indirect + golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c // indirect + golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10 // indirect + golang.org/x/term v0.0.0-20210927222741-03fcf44c2211 // indirect + google.golang.org/appengine v1.6.7 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect +) diff --git a/integration_test/datagen/go.sum b/integration_test/datagen/go.sum new file mode 100644 index 0000000000000..02af8451039e8 --- /dev/null +++ b/integration_test/datagen/go.sum @@ -0,0 +1,835 @@ +cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= +cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= +cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= +cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= +cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= +cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To= +cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4= +cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M= +cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc= +cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk= +cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs= +cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc= +cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= +cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI= +cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk= +cloud.google.com/go v0.78.0/go.mod h1:QjdrLG0uq+YwhjoVOLsS1t7TW8fs36kLs4XO5R5ECHg= +cloud.google.com/go v0.79.0/go.mod h1:3bzgcEeQlzbuEAYu4mrWhKqWjmpprinYgKJLgKHnbb8= +cloud.google.com/go v0.81.0/go.mod h1:mk/AM35KwGk/Nm2YSeZbxXdrNK3KZOYHmLkOqC2V6E0= +cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= +cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= +cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= +cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= +cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= +cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= +cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= +cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= +cloud.google.com/go/firestore v1.1.0/go.mod h1:ulACoGHTpvq5r8rxGJ4ddJZBZqakUQqClKRT5SZwBmk= +cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= +cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= +cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= +cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= +cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= +cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= +cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= +cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= +cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= +dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= +github.com/99designs/keyring v1.1.6 h1:kVDC2uCgVwecxCk+9zoCt2uEL6dt+dfVzMvGgnVcIuM= +github.com/99designs/keyring v1.1.6/go.mod h1:16e0ds7LGQQcT59QqkTg72Hh5ShM51Byv5PEmW6uoRU= +github.com/AthenZ/athenz v1.10.39 h1:mtwHTF/v62ewY2Z5KWhuZgVXftBej1/Tn80zx4DcawY= +github.com/AthenZ/athenz v1.10.39/go.mod h1:3Tg8HLsiQZp81BJY58JBeU2BR6B/H4/0MQGfCwhHNEA= +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= +github.com/DataDog/zstd v1.5.0 h1:+K/VEwIAaPcHiMtQvpLD4lqW7f0Gk3xdYZmI1hD+CXo= +github.com/DataDog/zstd v1.5.0/go.mod h1:g4AWEaM3yOg3HYfnJ3YIawPnVdXJh9QME85blwSAmyw= +github.com/Shopify/sarama v1.37.2 h1:LoBbU0yJPte0cE5TZCGdlzZRmMgMtZU/XgnUKZg9Cv4= +github.com/Shopify/sarama v1.37.2/go.mod h1:Nxye/E+YPru//Bpaorfhc3JsSGYwCaDDj+R4bK52U5o= +github.com/Shopify/toxiproxy/v2 v2.5.0 h1:i4LPT+qrSlKNtQf5QliVjdP08GyAH8+BUIc9gT0eahc= +github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho= +github.com/andres-erbsen/clock v0.0.0-20160526145045-9e14626cd129 h1:MzBOUgng9orim59UnfUTLRjMpd09C5uEVQ6RPGeCaVI= +github.com/andres-erbsen/clock v0.0.0-20160526145045-9e14626cd129/go.mod h1:rFgpPQZYZ8vdbc+48xibu8ALc3yeyd64IhHS+PU6Yyg= +github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= +github.com/apache/pulsar-client-go v0.8.1 h1:UZINLbH3I5YtNzqkju7g9vrl4CKrEgYSx2rbpvGufrE= +github.com/apache/pulsar-client-go v0.8.1/go.mod h1:yJNcvn/IurarFDxwmoZvb2Ieylg630ifxeO/iXpk27I= +github.com/apache/pulsar-client-go/oauth2 v0.0.0-20220120090717-25e59572242e h1:EqiJ0Xil8NmcXyupNqXV9oYDBeWntEIegxLahrTr8DY= +github.com/apache/pulsar-client-go/oauth2 v0.0.0-20220120090717-25e59572242e/go.mod h1:Xee4tgYLFpYcPMcTfBYWE1uKRzeciodGTSEDMzsR6i8= +github.com/ardielle/ardielle-go v1.5.2 h1:TilHTpHIQJ27R1Tl/iITBzMwiUGSlVfiVhwDNGM3Zj4= +github.com/ardielle/ardielle-go v1.5.2/go.mod h1:I4hy1n795cUhaVt/ojz83SNVCYIGsAFAONtv2Dr7HUI= +github.com/ardielle/ardielle-tools v1.5.4/go.mod h1:oZN+JRMnqGiIhrzkRN9l26Cej9dEx4jeNG6A+AdkShk= +github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o= +github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= +github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= +github.com/aws/aws-sdk-go v1.32.6/go.mod h1:5zCpMtNQVjRREroY7sYe8lOMRSxkhG6MZveU8YkpAk0= +github.com/aws/aws-sdk-go v1.44.126 h1:7HQJw2DNiwpxqMe2H7odGNT2rhO4SRrUe5/8dYXl0Jk= +github.com/aws/aws-sdk-go v1.44.126/go.mod h1:y4AeaBuwd2Lk+GepC1E9v0qOiTws0MIWAX4oIKwKHZo= +github.com/beefsack/go-rate v0.0.0-20220214233405-116f4ca011a0/go.mod h1:6YNgTHLutezwnBvyneBbwvB8C82y3dcoOj5EQJIdGXA= +github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= +github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= +github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= +github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= +github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= +github.com/bketelsen/crypt v0.0.4/go.mod h1:aI6NrJ0pMGgvZKL1iVgXLnfIFJtfV+bKCoqOes/6LfM= +github.com/bmizerany/perks v0.0.0-20141205001514-d9a9656a3a4b/go.mod h1:ac9efd0D1fsDb3EJvhqgXRbFx7bs2wqZ10HQPeU8U/Q= +github.com/brianvoe/gofakeit/v6 v6.16.0 h1:EelCqtfArd8ppJ0z+TpOxXH8sVWNPBadPNdCDSMMw7k= +github.com/brianvoe/gofakeit/v6 v6.16.0/go.mod h1:Ow6qC71xtwm79anlwKRlWZW6zVq9D2XHE4QSSMP/rU8= +github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= +github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/cespare/xxhash/v2 v2.1.2 h1:YRXhKfTDauu4ajMg1TPgFO5jnlC2HCbmLXMcTG5cbYE= +github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= +github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= +github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= +github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= +github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= +github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= +github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= +github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= +github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= +github.com/cpuguy83/go-md2man/v2 v2.0.2 h1:p1EgwI/C7NhT0JmVkwCD2ZBK8j4aeHQX2pMHHBfMQ6w= +github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= +github.com/danieljoos/wincred v1.0.2 h1:zf4bhty2iLuwgjgpraD2E9UbvO+fe54XXGJbOwe23fU= +github.com/danieljoos/wincred v1.0.2/go.mod h1:SnuYRW9lp1oJrZX/dXJqr0cPK5gYXqx3EJbmjhLdK9U= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/dimfeld/httptreemux v5.0.1+incompatible h1:Qj3gVcDNoOthBAqftuD596rm4wg/adLLz5xh5CmpiCA= +github.com/dimfeld/httptreemux v5.0.1+incompatible/go.mod h1:rbUlSV+CCpv/SuqUTP/8Bk2O3LyUV436/yaRGkhP6Z0= +github.com/dvsekhvalnov/jose2go v0.0.0-20200901110807-248326c1351b h1:HBah4D48ypg3J7Np4N+HY/ZR76fx3HEUGxDU6Uk39oQ= +github.com/dvsekhvalnov/jose2go v0.0.0-20200901110807-248326c1351b/go.mod h1:7BvyPhdbLxMXIYTFPLsyJRFMsKmOZnQmzh6Gb+uquuM= +github.com/eapache/go-resiliency v1.3.0 h1:RRL0nge+cWGlxXbUzJ7yMcq6w2XBEr19dCN6HECGaT0= +github.com/eapache/go-resiliency v1.3.0/go.mod h1:5yPzW0MIvSe0JDsv0v+DvcjEv2FyD6iZYSs1ZI+iQho= +github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21 h1:YEetp8/yCZMuEPMUDHG0CW/brkkEp8mzqk2+ODEitlw= +github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21/go.mod h1:+020luEh2TKB4/GOp8oxxtq0Daoen/Cii55CzbTV6DU= +github.com/eapache/queue v1.1.0 h1:YOEu7KNc61ntiQlcEeUIoDTJ2o8mQznoNvUhiigpIqc= +github.com/eapache/queue v1.1.0/go.mod h1:6eCeP0CKFpHLu8blIFXhExK/dRa7WDZfr6jVFPTqq+I= +github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= +github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po= +github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= +github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= +github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= +github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= +github.com/fortytw2/leaktest v1.3.0 h1:u8491cBMTQ8ft8aeV+adlcytMZylmA5nnwwkRZjI8vw= +github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= +github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4= +github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= +github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= +github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY= +github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= +github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= +github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= +github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= +github.com/go-sql-driver/mysql v1.7.0 h1:ueSltNNllEqE3qcWBTD0iQd3IpL/6U+mJxLkazJ7YPc= +github.com/go-sql-driver/mysql v1.7.0/go.mod h1:OXbVy3sEdcQ2Doequ6Z5BW6fXNQTmx+9S1MCJN5yJMI= +github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= +github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2 h1:ZpnhV/YsD2/4cESfV5+Hoeu/iUR3ruzNvZ+yQfO03a0= +github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2/go.mod h1:bBOAhwG1umN6/6ZUMtDFBMQR8jRg9O75tm9K00oMsK4= +github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= +github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= +github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= +github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/golang-jwt/jwt v3.2.1+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I= +github.com/golang-jwt/jwt v3.2.2+incompatible h1:IfV12K8xAKAnZqdXVzCZ+TOjboZ2keLg81eXfW3O+oY= +github.com/golang-jwt/jwt v3.2.2+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I= +github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= +github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= +github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= +github.com/golang/mock v1.5.0/go.mod h1:CWnOUgYIOo4TcNZ0wHX3YZCqsaM1I1Jvs6v3mP3KVu8= +github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk= +github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= +github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= +github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= +github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= +github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= +github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= +github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= +github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx4u74HPM= +github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw= +github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= +github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= +github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.7 h1:81/ik6ipDQS2aGcBfIN5dHDB36BwrStyeAQquSYCV4o= +github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= +github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= +github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= +github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= +github.com/google/uuid v1.1.2 h1:EVhdT+1Kseyi1/pUmXKaFxYsDNy9RQYkMWRH68J/W7Y= +github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= +github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= +github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= +github.com/gorilla/mux v1.7.4/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So= +github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4= +github.com/gorilla/sessions v1.2.1/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM= +github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= +github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c h1:6rhixN/i8ZofjG1Y75iExal34USq5p+wiN1tpie8IrU= +github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c/go.mod h1:NMPJylDgVpX0MLRlPy15sqSwOFv/U1GZ2m21JhFfek0= +github.com/hashicorp/consul/api v1.1.0/go.mod h1:VmuI/Lkw1nC05EYQWNKwWGbkg+FbDBtguAZLlVdkD9Q= +github.com/hashicorp/consul/sdk v0.1.1/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8= +github.com/hashicorp/errwrap v1.0.0 h1:hLrqtEDnRye3+sgx6z4qVLNuviH3MR5aQ0ykNJa/UYA= +github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= +github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= +github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM= +github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= +github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= +github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= +github.com/hashicorp/go-rootcerts v1.0.0/go.mod h1:K6zTfqpRlCUIjkwsN4Z+hiSfzSTQa6eBIzfwKfwNnHU= +github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU= +github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4= +github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go-uuid v1.0.2/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go-uuid v1.0.3 h1:2gKiV6YVmrJ1i2CKKa9obLvRieoRGviZFL26PcT/Co8= +github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go.net v0.0.1/go.mod h1:hjKkEWcCURg++eb33jQU7oqQcI9XDCnUzHA0oac0k90= +github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= +github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64= +github.com/hashicorp/mdns v1.0.0/go.mod h1:tL+uN++7HEJ6SQLQ2/p+z2pH24WQKWjBPkE0mNTz8vQ= +github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2pPBoIllUwCN7I= +github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc= +github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= +github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= +github.com/jawher/mow.cli v1.0.4/go.mod h1:5hQj2V8g+qYmLUVWqu4Wuja1pI57M83EChYLVZ0sMKk= +github.com/jawher/mow.cli v1.2.0/go.mod h1:y+pcA3jBAdo/GIZx/0rFjw/K2bVEODP9rfZOfaiq8Ko= +github.com/jcmturner/aescts/v2 v2.0.0 h1:9YKLH6ey7H4eDBXW8khjYslgyqG2xZikXP0EQFKrle8= +github.com/jcmturner/aescts/v2 v2.0.0/go.mod h1:AiaICIRyfYg35RUkr8yESTqvSy7csK90qZ5xfvvsoNs= +github.com/jcmturner/dnsutils/v2 v2.0.0 h1:lltnkeZGL0wILNvrNiVCR6Ro5PGU/SeBvVO/8c/iPbo= +github.com/jcmturner/dnsutils/v2 v2.0.0/go.mod h1:b0TnjGOvI/n42bZa+hmXL+kFJZsFT7G4t3HTlQ184QM= +github.com/jcmturner/gofork v1.7.6 h1:QH0l3hzAU1tfT3rZCnW5zXl+orbkNMMRGJfdJjHVETg= +github.com/jcmturner/gofork v1.7.6/go.mod h1:1622LH6i/EZqLloHfE7IeZ0uEJwMSUyQ/nDd82IeqRo= +github.com/jcmturner/goidentity/v6 v6.0.1 h1:VKnZd2oEIMorCTsFBnJWbExfNN7yZr3EhJAxwOkZg6o= +github.com/jcmturner/goidentity/v6 v6.0.1/go.mod h1:X1YW3bgtvwAXju7V3LCIMpY0Gbxyjn/mY9zx4tFonSg= +github.com/jcmturner/gokrb5/v8 v8.4.3 h1:iTonLeSJOn7MVUtyMT+arAn5AKAPrkilzhGw8wE/Tq8= +github.com/jcmturner/gokrb5/v8 v8.4.3/go.mod h1:dqRwJGXznQrzw6cWmyo6kH+E7jksEQG/CyVWsJEsJO0= +github.com/jcmturner/rpc/v2 v2.0.3 h1:7FXXj8Ti1IaVFpSAziCZWNzbNuZmnvw/i6CqLNdWfZY= +github.com/jcmturner/rpc/v2 v2.0.3/go.mod h1:VUJYCIDm3PVOEHw8sgt091/20OJjskO/YJki3ELg/Hc= +github.com/jmespath/go-jmespath v0.3.0/go.mod h1:9QtRXoHjLGCJ5IBSaohpXITPlowMeeYCZ7fLUTSywik= +github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg= +github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= +github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8= +github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U= +github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4= +github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= +github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= +github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= +github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= +github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= +github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= +github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM= +github.com/keybase/go-keychain v0.0.0-20190712205309-48d3d31d256d h1:Z+RDyXzjKE0i2sTjZ/b1uxiGtPhFy34Ou/Tk0qwN0kM= +github.com/keybase/go-keychain v0.0.0-20190712205309-48d3d31d256d/go.mod h1:JJNrCn9otv/2QP4D7SMJBgaleKpOf66PnW6F5WGNRIc= +github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/klauspost/compress v1.10.8/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= +github.com/klauspost/compress v1.15.11 h1:Lcadnb3RKGin4FYM/orgq0qde+nc15E5Cbqg4B9Sx9c= +github.com/klauspost/compress v1.15.11/go.mod h1:QPwzmACJjUTFsnSHH934V6woptycfrDDJnH7hvFVbGM= +github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= +github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= +github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/lib/pq v1.10.7 h1:p7ZhMD+KsSRozJr34udlUrhboJwWAgCg34+/ZZNvZZw= +github.com/lib/pq v1.10.7/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= +github.com/linkedin/goavro/v2 v2.9.8 h1:jN50elxBsGBDGVDEKqUlDuU1cFwJ11K/yrJCBMe/7Wg= +github.com/linkedin/goavro/v2 v2.9.8/go.mod h1:UgQUb2N/pmueQYH9bfqFioWxzYCZXSfF8Jw03O5sjqA= +github.com/magiconair/properties v1.8.5/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60= +github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= +github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= +github.com/matttproud/golang_protobuf_extensions v1.0.1 h1:4hp9jkHxhMHkqkrB3Ix0jegS5sx/RkqARlsWZ6pIwiU= +github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= +github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= +github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc= +github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= +github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= +github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= +github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= +github.com/mitchellh/gox v0.4.0/go.mod h1:Sd9lOJ0+aimLBi73mGofS1ycjY8lL3uZM3JPS42BGNg= +github.com/mitchellh/iochan v1.0.0/go.mod h1:JwYml1nuB7xOzsp52dPpHFffvOCDupsG0QubkSMEySY= +github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= +github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= +github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= +github.com/mtibben/percent v0.2.1 h1:5gssi8Nqo8QU/r2pynCm+hBQHpkB/uNK7BJCFogWdzs= +github.com/mtibben/percent v0.2.1/go.mod h1:KG9uO+SZkUp+VkRHsCdYQV3XSZrrSpR3O9ibNBTZrns= +github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= +github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= +github.com/nxadm/tail v1.4.4 h1:DQuhQpB1tVlglWS2hLQ5OV6B5r8aGxSrPc5Qo6uTN78= +github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= +github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= +github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk= +github.com/onsi/ginkgo v1.14.0 h1:2mOpI4JVVPBN+WQRa0WKH2eXR+Ey+uK4n7Zj0aYpIQA= +github.com/onsi/ginkgo v1.14.0/go.mod h1:iSB4RoI2tjJc9BBv4NKIKWKya62Rps+oPG/Lv9klQyY= +github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= +github.com/onsi/gomega v1.10.1 h1:o0+MgICZLuZ7xjH7Vx6zS/zcu93/BEp1VwkIW1mEXCE= +github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= +github.com/opentracing/opentracing-go v1.2.0/go.mod h1:GxEUsuufX4nBwe+T+Wl9TAgYrxe9dPLANfrWvHYVTgc= +github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= +github.com/pelletier/go-toml v1.9.3/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= +github.com/pierrec/lz4 v2.0.5+incompatible h1:2xWsjqPFWcplujydGg4WmhC/6fZqK42wMM8aXeqhl0I= +github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY= +github.com/pierrec/lz4/v4 v4.1.17 h1:kV4Ip+/hUBC+8T6+2EgburRtkE9ef4nbY3f4dFhGjMc= +github.com/pierrec/lz4/v4 v4.1.17/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= +github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/sftp v1.10.1/go.mod h1:lYOWFsE0bwd1+KfKJaKeuokY15vzFx25BLbzYYoAxZI= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI= +github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= +github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= +github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M= +github.com/prometheus/client_golang v1.11.0/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0= +github.com/prometheus/client_golang v1.11.1/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0= +github.com/prometheus/client_golang v1.12.1 h1:ZiaPsmm9uiBeaSMRznKsCDNtPCS0T3JVDGF+06gjBzk= +github.com/prometheus/client_golang v1.12.1/go.mod h1:3Z9XVyYiZYEO+YQWt3RD2R3jrbd179Rt297l4aS6nDY= +github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= +github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.2.0 h1:uq5h0d+GuxiXLJLNABMgp2qUWDPiLvgCzz2dUR+/W/M= +github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= +github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo= +github.com/prometheus/common v0.26.0/go.mod h1:M7rCNAaPfAosfx8veZJCuw84e35h3Cfd9VFqTh1DIvc= +github.com/prometheus/common v0.32.1 h1:hWIdL3N2HoUx3B8j3YN9mWor0qhY/NlEKZEaXxuIRh4= +github.com/prometheus/common v0.32.1/go.mod h1:vu+V0TpY+O6vW9J44gczi3Ap/oXXR10b+M/gUGO4Hls= +github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= +github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= +github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= +github.com/prometheus/procfs v0.7.3 h1:4jVXhlkAyzOScmCkXBTOLRLTz8EeU+eyjrwB/EPq0VU= +github.com/prometheus/procfs v0.7.3/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= +github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 h1:N/ElC8H3+5XpJzTSTfLsJV/mx9Q9g7kxmchpfZyxgzM= +github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4= +github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= +github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/rogpeppe/go-internal v1.9.0 h1:73kH8U+JUqXU8lRuOHeVHaa/SZPifC7BkcraZVejAe8= +github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= +github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= +github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= +github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= +github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= +github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= +github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= +github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88= +github.com/sirupsen/logrus v1.8.1 h1:dJKuHgqk1NNQlqoA6BTlM1Wf9DOH3NBjQyu0h9+AZZE= +github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= +github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= +github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= +github.com/spaolacci/murmur3 v1.1.0 h1:7c1g84S4BPRrfL5Xrdp6fOJ206sU9y293DDHaoy0bLI= +github.com/spaolacci/murmur3 v1.1.0/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= +github.com/spf13/afero v1.6.0/go.mod h1:Ai8FlHk4v/PARR026UzYexafAt9roJ7LcLMAmO6Z93I= +github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= +github.com/spf13/cobra v1.2.1/go.mod h1:ExllRjgxM/piMAM+3tAZvg8fsklGAf3tPfi+i8t68Nk= +github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo= +github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/spf13/viper v1.8.1/go.mod h1:o0Pch8wJ9BVSWGQMbra6iw0oQ5oktSIBaujf1rJH9Ns= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= +github.com/stretchr/objx v0.4.0 h1:M2gUjqZET1qApGOWNSnZ49BAIMX4F/1plDv3+l31EJ4= +github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= +github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.8.0 h1:pSgiaMZlXftHpm5L7V1+rVB+AZJydKsMxsQBIJw4PKk= +github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw= +github.com/urfave/cli v1.22.10 h1:p8Fspmz3iTctJstry1PYS3HVdllxnEzTEsgIgtxTrCk= +github.com/urfave/cli v1.22.10/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= +github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +go.etcd.io/etcd/api/v3 v3.5.0/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs= +go.etcd.io/etcd/client/pkg/v3 v3.5.0/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g= +go.etcd.io/etcd/client/v2 v2.305.0/go.mod h1:h9puh54ZTgAKtEbut2oe9P4L/oqKCVB6xsXlzd7alYQ= +go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= +go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= +go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= +go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= +go.uber.org/atomic v1.7.0 h1:ADUqmZGgLDDfbSL9ZmPxKTybcoEYHgpYfELNoN+7hsw= +go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= +go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU= +go.uber.org/ratelimit v0.2.0 h1:UQE2Bgi7p2B85uP5dC2bbRtig0C+OeNRnNEafLjsLPA= +go.uber.org/ratelimit v0.2.0/go.mod h1:YYBV4e4naJvhpitQrWJu1vCpgB7CboMe0qhltKt6mUg= +go.uber.org/zap v1.17.0/go.mod h1:MXVU+bhUf/A7Xi2HNOnopQOrmycQ5Ih87HtOu4q5SSo= +golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa h1:zuSxTR4o9y82ebqCUJYNGJbGPo6sKVl54f/TVDObg1c= +golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= +golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= +golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY= +golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= +golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6 h1:QE6XYQK6naiK1EPAe1g/ILLxN5RBoH5xkJk3CqlMI/Y= +golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= +golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= +golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= +golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= +golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= +golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20210508222113-6edffad5e616/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= +golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= +golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= +golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= +golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181201002055-351d144fa1fc/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= +golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc= +golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= +golang.org/x/net v0.0.0-20210525063256-abc453219eb5/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20210726213435-c6fcb2dbf985/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20220725212005-46097bf591d3/go.mod h1:AaygXjzTFtRAg2ttMY5RMuhpJ3cNnI0XpyFJD1iQRSM= +golang.org/x/net v0.0.0-20220927171203-f486391704dc h1:FxpXZdoBqT8RjqTy6i1E8nXHhW21wK7ptQ/EPIGxzPQ= +golang.org/x/net v0.0.0-20220927171203-f486391704dc/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk= +golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= +golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210220000619-9bb904979d93/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210313182246-cd4f82c27b84/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210402161424-2e8d93401602/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c h1:pkQiBZBvdos9qq4wBAHqlzuZHEXo07pqV06ef90u1WI= +golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220923202941-7f9b1623fab7 h1:ZrnxWX62AgTKOSagEqxvb3ffipvEDX2pl7E1TdqLqIc= +golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190712062909-fae7ac547cb7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200519105757-fe76b779f299/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200615200032-f1bc736245b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210305230114-8fe3ee5dd75b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210403161142-5e06dd20ab57/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210603081109-ebe580a85c40/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220114195835-da31bd327af9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10 h1:WIoqL4EROvwiPdUtaip4VcDdpZ4kha7wBWZrbVKCIZg= +golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211 h1:JGgROgKl9N8DuW20oFS5gxc+lE67/N3FcwmBPMe7ArY= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= +golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191112195655-aa38f8e97acc/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= +golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= +golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= +golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +gonum.org/v1/gonum v0.12.0 h1:xKuo6hzt+gMav00meVPUlXwSdoEJP46BR+wdxQEFK2o= +gonum.org/v1/gonum v0.12.0/go.mod h1:73TDxJfAAHeA8Mk9mf8NlIppyhQNo5GLTcYeqgo2lvY= +google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= +google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= +google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM= +google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc= +google.golang.org/api v0.35.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg= +google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34qYtE= +google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8= +google.golang.org/api v0.41.0/go.mod h1:RkxM5lITDfTzmyKFPt+wGrCJbVfniCr2ool8kTBzRTU= +google.golang.org/api v0.43.0/go.mod h1:nQsDGjRXMo4lvh5hP0TKqF244gqhGcr/YSIykhUk/94= +google.golang.org/api v0.44.0/go.mod h1:EBOGZqzyhtvMDoxwS97ctnh0zUmYY6CxqXsc1AvkYD8= +google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= +google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= +google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.7 h1:FZR1q0exgwxzPzp/aF+VccGrSfxfPpkBqjIIEq3ru6c= +google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= +google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA= +google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= +google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= +google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= +google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200904004341-0bd0a958aa1d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210222152913-aa3ee6e6a81c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210303154014-9728d6b83eeb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210310155132-4ce2db91004e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210319143718-93e7006c17a6/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210402141018-6c239bbf2bb1/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A= +google.golang.org/genproto v0.0.0-20210602131652-f16073e35f0c/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= +google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= +google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= +google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= +google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= +google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60= +google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= +google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0= +google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= +google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8= +google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.36.1/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= +google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= +google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= +google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= +google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= +google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= +google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= +google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= +google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= +google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +google.golang.org/protobuf v1.28.1 h1:d0NfwRgPtno5B1Wa6L2DAG+KivqkdutMf1UhdNx175w= +google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= +gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= +gopkg.in/ini.v1 v1.62.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= +gopkg.in/natefinch/lumberjack.v2 v2.0.0/go.mod h1:l0ndWWf7gzL7RNwBG7wST/UCcT4T24xpD6X8LsfU/+k= +gopkg.in/square/go-jose.v2 v2.4.1/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI= +gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ= +gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= +gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= +honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= +rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= +rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= diff --git a/integration_test/datagen/livestream/livestream.go b/integration_test/datagen/livestream/livestream.go new file mode 100644 index 0000000000000..a5b68075f0391 --- /dev/null +++ b/integration_test/datagen/livestream/livestream.go @@ -0,0 +1,147 @@ +package livestream + +import ( + "context" + "datagen/gen" + "datagen/livestream/proto" + "datagen/sink" + "encoding/json" + "fmt" + "math" + "time" + + "github.com/brianvoe/gofakeit/v6" + protobuf "google.golang.org/protobuf/proto" +) + +type liveClient struct { + faker *gofakeit.Faker + + ip string + agent string + id string + country string + roomId string +} + +func (c *liveClient) emulate() *liveMetric { + longestFreezeDuration := int64(c.faker.UintRange(0, 100)) + return &liveMetric{ + Ip: c.ip, + Agent: c.agent, + Id: c.id, + RoomId: c.roomId, + Country: c.country, + VideoBps: int64(c.faker.UintRange(1000, 1000000)), + VideoFps: int64(c.faker.UintRange(30, 40)), + VideoRtt: int64(c.faker.UintRange(100, 300)), + VideoLostPps: int64(c.faker.UintRange(0, 10)), + VideoLongestFreezeDuration: longestFreezeDuration, + VideoTotalFreezeDuration: longestFreezeDuration + int64(c.faker.UintRange(0, 20)), + ReportTimestamp: time.Now().Format(time.RFC3339), + } +} + +func (c *liveClient) reportMetric(ctx context.Context, outCh chan<- sink.SinkRecord) { + for { + select { + case <-time.NewTicker(10 * time.Second).C: + record := c.emulate() + select { + case outCh <- record: + case <-ctx.Done(): + return + } + case <-ctx.Done(): + return + } + } +} + +type liveMetric struct { + sink.BaseSinkRecord + + Ip string `json:"client_ip"` + Agent string `json:"user_agent"` + Id string `json:"user_id"` + RoomId string `json:"room_id"` + VideoBps int64 `json:"video_bps"` + VideoFps int64 `json:"video_fps"` + VideoRtt int64 `json:"video_rtt"` + VideoLostPps int64 `json:"video_lost_pps"` + VideoLongestFreezeDuration int64 `json:"video_longest_freeze_duration"` + VideoTotalFreezeDuration int64 `json:"video_total_freeze_duration"` + ReportTimestamp string `json:"report_timestamp"` + Country string `json:"country"` +} + +func (r *liveMetric) ToPostgresSql() string { + return fmt.Sprintf( + ` +INSERT INTO %s (client_ip, user_agent, user_id, room_id, video_bps, video_fps, video_rtt, video_lost_pps, video_longest_freeze_duration, video_total_freeze_duration, report_timestamp, country) +VALUES ('%s', '%s', '%s', '%s', %d, %d, %d, %d, %d, %d, '%s', '%s') +`, + "live_stream_metrics", + r.Ip, r.Agent, r.Id, r.RoomId, r.VideoBps, r.VideoFps, r.VideoRtt, r.VideoLostPps, r.VideoLongestFreezeDuration, r.VideoTotalFreezeDuration, r.ReportTimestamp, r.Country) +} + +func (r *liveMetric) ToJson() (topic string, key string, data []byte) { + data, _ = json.Marshal(r) + return "live_stream_metrics", fmt.Sprint(r.Id), data +} + +func (r *liveMetric) ToProtobuf() (topic string, key string, data []byte) { + m := proto.LiveStreamMetrics{ + ClientIp: r.Ip, + UserAgent: r.Agent, + UserId: r.Id, + RoomId: r.RoomId, + VideoBps: r.VideoBps, + VideoFps: r.VideoFps, + VideoRtt: r.VideoRtt, + VideoLostPps: r.VideoLostPps, + VideoLongestFreezeDuration: r.VideoLongestFreezeDuration, + VideoTotalFreezeDuration: r.VideoTotalFreezeDuration, + ReportTimestamp: time.Now().Unix(), + Country: r.Country, + } + data, err := protobuf.Marshal(&m) + if err != nil { + panic(err) + } + return "live_stream_metrics", fmt.Sprint(r.Id), data +} + +type liveStreamMetricsGen struct { + faker *gofakeit.Faker + cfg gen.GeneratorConfig +} + +func NewLiveStreamMetricsGen(cfg gen.GeneratorConfig) gen.LoadGenerator { + return &liveStreamMetricsGen{ + faker: gofakeit.New(0), + cfg: cfg, + } +} + +func (g *liveStreamMetricsGen) KafkaTopics() []string { + return []string{"live_stream_metrics"} +} + +func (g *liveStreamMetricsGen) Load(ctx context.Context, outCh chan<- sink.SinkRecord) { + // The number of clients is roughly the QPS. + clients := int(math.Min(float64(g.cfg.Qps), 1000)) + for i := 0; i < clients; i++ { + go func(i int) { + c := &liveClient{ + faker: g.faker, + id: fmt.Sprint(i), + agent: g.faker.UserAgent(), + ip: fmt.Sprintf("%s:%d", g.faker.IPv4Address(), g.faker.Uint16()), + country: g.faker.Country(), + roomId: fmt.Sprint(g.faker.Uint32()), + } + c.reportMetric(ctx, outCh) + }(i) + } +} diff --git a/integration_test/datagen/livestream/proto/livestream.pb.go b/integration_test/datagen/livestream/proto/livestream.pb.go new file mode 100644 index 0000000000000..3c098ca70fbbe --- /dev/null +++ b/integration_test/datagen/livestream/proto/livestream.pb.go @@ -0,0 +1,256 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.28.1 +// protoc v3.21.9 +// source: livestream.proto + +package proto + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type LiveStreamMetrics struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + ClientIp string `protobuf:"bytes,1,opt,name=client_ip,json=clientIp,proto3" json:"client_ip,omitempty"` + UserAgent string `protobuf:"bytes,2,opt,name=user_agent,json=userAgent,proto3" json:"user_agent,omitempty"` + UserId string `protobuf:"bytes,3,opt,name=user_id,json=userId,proto3" json:"user_id,omitempty"` + RoomId string `protobuf:"bytes,4,opt,name=room_id,json=roomId,proto3" json:"room_id,omitempty"` + VideoBps int64 `protobuf:"varint,5,opt,name=video_bps,json=videoBps,proto3" json:"video_bps,omitempty"` + VideoFps int64 `protobuf:"varint,6,opt,name=video_fps,json=videoFps,proto3" json:"video_fps,omitempty"` + VideoRtt int64 `protobuf:"varint,7,opt,name=video_rtt,json=videoRtt,proto3" json:"video_rtt,omitempty"` + VideoLostPps int64 `protobuf:"varint,8,opt,name=video_lost_pps,json=videoLostPps,proto3" json:"video_lost_pps,omitempty"` + VideoLongestFreezeDuration int64 `protobuf:"varint,9,opt,name=video_longest_freeze_duration,json=videoLongestFreezeDuration,proto3" json:"video_longest_freeze_duration,omitempty"` + VideoTotalFreezeDuration int64 `protobuf:"varint,10,opt,name=video_total_freeze_duration,json=videoTotalFreezeDuration,proto3" json:"video_total_freeze_duration,omitempty"` + ReportTimestamp int64 `protobuf:"varint,11,opt,name=report_timestamp,json=reportTimestamp,proto3" json:"report_timestamp,omitempty"` + Country string `protobuf:"bytes,12,opt,name=country,proto3" json:"country,omitempty"` +} + +func (x *LiveStreamMetrics) Reset() { + *x = LiveStreamMetrics{} + if protoimpl.UnsafeEnabled { + mi := &file_livestream_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *LiveStreamMetrics) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*LiveStreamMetrics) ProtoMessage() {} + +func (x *LiveStreamMetrics) ProtoReflect() protoreflect.Message { + mi := &file_livestream_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use LiveStreamMetrics.ProtoReflect.Descriptor instead. +func (*LiveStreamMetrics) Descriptor() ([]byte, []int) { + return file_livestream_proto_rawDescGZIP(), []int{0} +} + +func (x *LiveStreamMetrics) GetClientIp() string { + if x != nil { + return x.ClientIp + } + return "" +} + +func (x *LiveStreamMetrics) GetUserAgent() string { + if x != nil { + return x.UserAgent + } + return "" +} + +func (x *LiveStreamMetrics) GetUserId() string { + if x != nil { + return x.UserId + } + return "" +} + +func (x *LiveStreamMetrics) GetRoomId() string { + if x != nil { + return x.RoomId + } + return "" +} + +func (x *LiveStreamMetrics) GetVideoBps() int64 { + if x != nil { + return x.VideoBps + } + return 0 +} + +func (x *LiveStreamMetrics) GetVideoFps() int64 { + if x != nil { + return x.VideoFps + } + return 0 +} + +func (x *LiveStreamMetrics) GetVideoRtt() int64 { + if x != nil { + return x.VideoRtt + } + return 0 +} + +func (x *LiveStreamMetrics) GetVideoLostPps() int64 { + if x != nil { + return x.VideoLostPps + } + return 0 +} + +func (x *LiveStreamMetrics) GetVideoLongestFreezeDuration() int64 { + if x != nil { + return x.VideoLongestFreezeDuration + } + return 0 +} + +func (x *LiveStreamMetrics) GetVideoTotalFreezeDuration() int64 { + if x != nil { + return x.VideoTotalFreezeDuration + } + return 0 +} + +func (x *LiveStreamMetrics) GetReportTimestamp() int64 { + if x != nil { + return x.ReportTimestamp + } + return 0 +} + +func (x *LiveStreamMetrics) GetCountry() string { + if x != nil { + return x.Country + } + return "" +} + +var File_livestream_proto protoreflect.FileDescriptor + +var file_livestream_proto_rawDesc = []byte{ + 0x0a, 0x10, 0x6c, 0x69, 0x76, 0x65, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x12, 0x06, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x22, 0xc5, 0x03, 0x0a, 0x11, 0x4c, + 0x69, 0x76, 0x65, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, + 0x12, 0x1b, 0x0a, 0x09, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x70, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x08, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x49, 0x70, 0x12, 0x1d, 0x0a, + 0x0a, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x09, 0x75, 0x73, 0x65, 0x72, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x12, 0x17, 0x0a, 0x07, + 0x75, 0x73, 0x65, 0x72, 0x5f, 0x69, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x75, + 0x73, 0x65, 0x72, 0x49, 0x64, 0x12, 0x17, 0x0a, 0x07, 0x72, 0x6f, 0x6f, 0x6d, 0x5f, 0x69, 0x64, + 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x72, 0x6f, 0x6f, 0x6d, 0x49, 0x64, 0x12, 0x1b, + 0x0a, 0x09, 0x76, 0x69, 0x64, 0x65, 0x6f, 0x5f, 0x62, 0x70, 0x73, 0x18, 0x05, 0x20, 0x01, 0x28, + 0x03, 0x52, 0x08, 0x76, 0x69, 0x64, 0x65, 0x6f, 0x42, 0x70, 0x73, 0x12, 0x1b, 0x0a, 0x09, 0x76, + 0x69, 0x64, 0x65, 0x6f, 0x5f, 0x66, 0x70, 0x73, 0x18, 0x06, 0x20, 0x01, 0x28, 0x03, 0x52, 0x08, + 0x76, 0x69, 0x64, 0x65, 0x6f, 0x46, 0x70, 0x73, 0x12, 0x1b, 0x0a, 0x09, 0x76, 0x69, 0x64, 0x65, + 0x6f, 0x5f, 0x72, 0x74, 0x74, 0x18, 0x07, 0x20, 0x01, 0x28, 0x03, 0x52, 0x08, 0x76, 0x69, 0x64, + 0x65, 0x6f, 0x52, 0x74, 0x74, 0x12, 0x24, 0x0a, 0x0e, 0x76, 0x69, 0x64, 0x65, 0x6f, 0x5f, 0x6c, + 0x6f, 0x73, 0x74, 0x5f, 0x70, 0x70, 0x73, 0x18, 0x08, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0c, 0x76, + 0x69, 0x64, 0x65, 0x6f, 0x4c, 0x6f, 0x73, 0x74, 0x50, 0x70, 0x73, 0x12, 0x41, 0x0a, 0x1d, 0x76, + 0x69, 0x64, 0x65, 0x6f, 0x5f, 0x6c, 0x6f, 0x6e, 0x67, 0x65, 0x73, 0x74, 0x5f, 0x66, 0x72, 0x65, + 0x65, 0x7a, 0x65, 0x5f, 0x64, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x09, 0x20, 0x01, + 0x28, 0x03, 0x52, 0x1a, 0x76, 0x69, 0x64, 0x65, 0x6f, 0x4c, 0x6f, 0x6e, 0x67, 0x65, 0x73, 0x74, + 0x46, 0x72, 0x65, 0x65, 0x7a, 0x65, 0x44, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x3d, + 0x0a, 0x1b, 0x76, 0x69, 0x64, 0x65, 0x6f, 0x5f, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x5f, 0x66, 0x72, + 0x65, 0x65, 0x7a, 0x65, 0x5f, 0x64, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x0a, 0x20, + 0x01, 0x28, 0x03, 0x52, 0x18, 0x76, 0x69, 0x64, 0x65, 0x6f, 0x54, 0x6f, 0x74, 0x61, 0x6c, 0x46, + 0x72, 0x65, 0x65, 0x7a, 0x65, 0x44, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x29, 0x0a, + 0x10, 0x72, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, + 0x70, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0f, 0x72, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x54, + 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x12, 0x18, 0x0a, 0x07, 0x63, 0x6f, 0x75, 0x6e, + 0x74, 0x72, 0x79, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x63, 0x6f, 0x75, 0x6e, 0x74, + 0x72, 0x79, 0x42, 0x12, 0x5a, 0x10, 0x6c, 0x69, 0x76, 0x65, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, + 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_livestream_proto_rawDescOnce sync.Once + file_livestream_proto_rawDescData = file_livestream_proto_rawDesc +) + +func file_livestream_proto_rawDescGZIP() []byte { + file_livestream_proto_rawDescOnce.Do(func() { + file_livestream_proto_rawDescData = protoimpl.X.CompressGZIP(file_livestream_proto_rawDescData) + }) + return file_livestream_proto_rawDescData +} + +var file_livestream_proto_msgTypes = make([]protoimpl.MessageInfo, 1) +var file_livestream_proto_goTypes = []interface{}{ + (*LiveStreamMetrics)(nil), // 0: schema.LiveStreamMetrics +} +var file_livestream_proto_depIdxs = []int32{ + 0, // [0:0] is the sub-list for method output_type + 0, // [0:0] is the sub-list for method input_type + 0, // [0:0] is the sub-list for extension type_name + 0, // [0:0] is the sub-list for extension extendee + 0, // [0:0] is the sub-list for field type_name +} + +func init() { file_livestream_proto_init() } +func file_livestream_proto_init() { + if File_livestream_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_livestream_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*LiveStreamMetrics); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_livestream_proto_rawDesc, + NumEnums: 0, + NumMessages: 1, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_livestream_proto_goTypes, + DependencyIndexes: file_livestream_proto_depIdxs, + MessageInfos: file_livestream_proto_msgTypes, + }.Build() + File_livestream_proto = out.File + file_livestream_proto_rawDesc = nil + file_livestream_proto_goTypes = nil + file_livestream_proto_depIdxs = nil +} diff --git a/integration_test/datagen/load_gen.go b/integration_test/datagen/load_gen.go new file mode 100644 index 0000000000000..69cd71687d184 --- /dev/null +++ b/integration_test/datagen/load_gen.go @@ -0,0 +1,133 @@ +package main + +import ( + "context" + "datagen/ad_click" + "datagen/ad_ctr" + "datagen/cdn_metrics" + "datagen/clickstream" + "datagen/delivery" + "datagen/ecommerce" + "datagen/gen" + "datagen/livestream" + "datagen/nexmark" + "datagen/sink" + "datagen/sink/kafka" + "datagen/sink/kinesis" + "datagen/sink/mysql" + "datagen/sink/postgres" + "datagen/sink/pulsar" + "datagen/twitter" + "fmt" + "log" + "time" + + "go.uber.org/ratelimit" +) + +func createSink(ctx context.Context, cfg gen.GeneratorConfig) (sink.Sink, error) { + if cfg.Sink == "postgres" { + return postgres.OpenPostgresSink(cfg.Postgres) + } else if cfg.Sink == "mysql" { + return mysql.OpenMysqlSink(cfg.Mysql) + } else if cfg.Sink == "kafka" { + return kafka.OpenKafkaSink(ctx, cfg.Kafka) + } else if cfg.Sink == "pulsar" { + return pulsar.OpenPulsarSink(ctx, cfg.Pulsar) + } else if cfg.Sink == "kinesis" { + return kinesis.OpenKinesisSink(cfg.Kinesis) + } else { + return nil, fmt.Errorf("invalid sink type: %s", cfg.Sink) + } +} + +// newgen creates a new generator based on the given config. +func newGen(cfg gen.GeneratorConfig) (gen.LoadGenerator, error) { + if cfg.Mode == "ad-click" { + return ad_click.NewAdClickGen(), nil + } else if cfg.Mode == "ad-ctr" { + return ad_ctr.NewAdCtrGen(), nil + } else if cfg.Mode == "twitter" { + return twitter.NewTwitterGen(), nil + } else if cfg.Mode == "cdn-metrics" { + return cdn_metrics.NewCdnMetricsGen(cfg), nil + } else if cfg.Mode == "clickstream" { + return clickstream.NewClickStreamGen(), nil + } else if cfg.Mode == "ecommerce" { + return ecommerce.NewEcommerceGen(), nil + } else if cfg.Mode == "delivery" { + return delivery.NewOrderEventGen(cfg), nil + } else if cfg.Mode == "livestream" || cfg.Mode == "superset" { + return livestream.NewLiveStreamMetricsGen(cfg), nil + } else if cfg.Mode == "nexmark" { + return nexmark.NewNexmarkGen(cfg), nil + } else { + return nil, fmt.Errorf("invalid mode: %s", cfg.Mode) + } +} + +// spawnGen spawns one or more goroutines to generate data and send it to outCh. +func spawnGen(ctx context.Context, cfg gen.GeneratorConfig, outCh chan<- sink.SinkRecord) (gen.LoadGenerator, error) { + gen, err := newGen(cfg) + if err != nil { + return nil, err + } + go gen.Load(ctx, outCh) + return gen, nil +} + +// generateLoad generates data and sends it to the given sink. +func generateLoad(ctx context.Context, cfg gen.GeneratorConfig) error { + sinkImpl, err := createSink(ctx, cfg) + if err != nil { + return err + } + defer func() { + if err = sinkImpl.Close(); err != nil { + log.Print(err) + } + }() + + outCh := make(chan sink.SinkRecord, 1000) + gen, err := spawnGen(ctx, cfg, outCh) + if err != nil { + return err + } + + err = sinkImpl.Prepare(gen.KafkaTopics()) + if err != nil { + return err + } + + count := int64(0) + initTime := time.Now() + prevTime := time.Now() + ticker := time.NewTicker(time.Second) + defer ticker.Stop() + rl := ratelimit.New(cfg.Qps, ratelimit.WithoutSlack) // per second + for { + select { + case <-ctx.Done(): + return nil + case <-ticker.C: + if time.Since(prevTime) >= 10*time.Second { + log.Printf("Sent %d records in total (Elasped: %s)", count, time.Since(initTime).String()) + prevTime = time.Now() + } + case record := <-outCh: + if cfg.PrintInsert { + fmt.Println(record.ToPostgresSql()) + } + // Consume records from the channel and send to sink. + if err := sinkImpl.WriteRecord(ctx, cfg.Format, record); err != nil { + return err + } + _ = rl.Take() + count++ + if time.Since(prevTime) >= 10*time.Second { + log.Printf("Sent %d records in total (Elasped: %s)", count, time.Since(initTime).String()) + prevTime = time.Now() + } + } + } +} diff --git a/integration_test/datagen/main.go b/integration_test/datagen/main.go new file mode 100644 index 0000000000000..404934464868d --- /dev/null +++ b/integration_test/datagen/main.go @@ -0,0 +1,214 @@ +package main + +import ( + "context" + "datagen/gen" + "log" + "os" + "os/signal" + "syscall" + + "github.com/urfave/cli" +) + +var cfg gen.GeneratorConfig = gen.GeneratorConfig{} + +func runCommand() error { + terminateCh := make(chan os.Signal, 1) + signal.Notify(terminateCh, os.Interrupt, syscall.SIGTERM) + + ctx, cancel := context.WithCancel(context.Background()) + go func() { + <-terminateCh + log.Println("Cancelled") + cancel() + }() + return generateLoad(ctx, cfg) +} + +func main() { + + app := &cli.App{ + Commands: []cli.Command{ + { + Name: "postgres", + Flags: []cli.Flag{ + cli.StringFlag{ + Name: "host", + Usage: "The host address of the PostgreSQL server", + Required: false, + Value: "localhost", + Destination: &cfg.Postgres.DbHost, + }, + cli.StringFlag{ + Name: "db", + Usage: "The database where the target table is located", + Required: false, + Value: "dev", + Destination: &cfg.Postgres.Database, + }, + cli.IntFlag{ + Name: "port", + Usage: "The port of the PostgreSQL server", + Required: false, + Value: 4566, + Destination: &cfg.Postgres.DbPort, + }, + cli.StringFlag{ + Name: "user", + Usage: "The user to Postgres", + Required: false, + Value: "root", + Destination: &cfg.Postgres.DbUser, + }, + }, + Action: func(c *cli.Context) error { + cfg.Sink = "postgres" + return runCommand() + }, + }, + { + Name: "mysql", + Flags: []cli.Flag{ + cli.StringFlag{ + Name: "host", + Usage: "The host address of the MySQL server", + Required: false, + Value: "localhost", + Destination: &cfg.Mysql.DbHost, + }, + cli.StringFlag{ + Name: "db", + Usage: "The database where the target table is located", + Required: false, + Value: "mydb", + Destination: &cfg.Mysql.Database, + }, + cli.IntFlag{ + Name: "port", + Usage: "The port of the MySQL server", + Required: false, + Value: 3306, + Destination: &cfg.Mysql.DbPort, + }, + cli.StringFlag{ + Name: "user", + Usage: "The user to MySQL", + Required: false, + Value: "mysqluser", + Destination: &cfg.Mysql.DbUser, + }, + cli.StringFlag{ + Name: "password", + Usage: "The password to MySQL", + Required: false, + Value: "mysqlpw", + Destination: &cfg.Mysql.DbPassword, + }, + }, + Action: func(c *cli.Context) error { + cfg.Sink = "mysql" + return runCommand() + }, + }, + { + Name: "kafka", + Flags: []cli.Flag{ + cli.StringFlag{ + Name: "brokers", + Usage: "Kafka bootstrap brokers to connect to, as a comma separated list", + Required: true, + Destination: &cfg.Kafka.Brokers, + }, + cli.BoolFlag{ + Name: "no-recreate", + Usage: "Do not recreate the Kafka topic when it exists.", + Required: false, + Destination: &cfg.Kafka.NoRecreateIfExists, + }, + }, + Action: func(c *cli.Context) error { + cfg.Sink = "kafka" + return runCommand() + }, + HelpName: "datagen kafka", + }, + { + Name: "pulsar", + Flags: []cli.Flag{ + cli.StringFlag{ + Name: "brokers", + Usage: "Pulsar brokers to connect to, as a comma separated list", + Required: true, + Destination: &cfg.Pulsar.Brokers, + }, + }, + Action: func(c *cli.Context) error { + cfg.Sink = "pulsar" + return runCommand() + }, + HelpName: "datagen pulsar", + }, + { + Name: "kinesis", + Flags: []cli.Flag{ + cli.StringFlag{ + Name: "region", + Usage: "The region where the Kinesis stream resides", + Required: true, + Destination: &cfg.Kinesis.Region, + }, + cli.StringFlag{ + Name: "name", + Usage: "The Kinesis stream name", + Required: true, + Destination: &cfg.Kinesis.StreamName, + }, + }, + Action: func(c *cli.Context) error { + cfg.Sink = "kinesis" + return runCommand() + }, + HelpName: "datagen kinesis", + }, + }, + Flags: []cli.Flag{ + cli.BoolFlag{ + Name: "print", + Usage: "Whether to print the content of every event", + Required: false, + Destination: &cfg.PrintInsert, + }, + cli.IntFlag{ + Name: "qps", + Usage: "Number of messages to send per second", + Required: false, + Value: 1, + Destination: &cfg.Qps, + }, + cli.StringFlag{ + Name: "mode", + Usage: "ad-click | ad-ctr | twitter | cdn-metrics | clickstream | ecommerce | delivery | livestream", + Required: true, + Destination: &cfg.Mode, + }, + cli.StringFlag{ + Name: "format", + Usage: "The output record format: json | protobuf. Used when the sink is a message queue.", + Value: "json", + Required: false, + Destination: &cfg.Format, + }, + cli.BoolFlag{ + Name: "heavytail", + Usage: "Whether the tail probability is high. If true We will use uniform distribution for randomizing values.", + Required: false, + Destination: &cfg.HeavyTail, + }, + }, + } + err := app.Run(os.Args) + if err != nil { + log.Fatalln(err) + } +} diff --git a/integration_test/datagen/nexmark/auction.go b/integration_test/datagen/nexmark/auction.go new file mode 100644 index 0000000000000..5bc293268da41 --- /dev/null +++ b/integration_test/datagen/nexmark/auction.go @@ -0,0 +1,67 @@ +package nexmark + +import ( + "context" + "datagen/gen" + "datagen/sink" + "encoding/json" + "fmt" + "time" + + "github.com/brianvoe/gofakeit/v6" +) + +type auction struct { + sink.BaseSinkRecord + + Id int `json:"id"` + ItemName string `json:"item_name"` + DateTime int64 `json:"date_time"` + Seller int `json:"seller"` + Category int `json:"category"` +} + +func (r *auction) ToJson() (topic string, key string, data []byte) { + data, _ = json.Marshal(r) + return "auction", fmt.Sprint(r.Id), data +} + +type auctionGen struct { + faker *gofakeit.Faker + + nextAuctionId int +} + +func NewNexmarkGen(cfg gen.GeneratorConfig) gen.LoadGenerator { + return &auctionGen{ + faker: gofakeit.New(0), + nextAuctionId: 1000, + } +} + +func (g *auctionGen) generate() sink.SinkRecord { + g.nextAuctionId++ + return &auction{ + Id: g.nextAuctionId, + ItemName: g.faker.FarmAnimal(), + DateTime: time.Now().Unix(), + Seller: g.faker.Number(1000, 1099), + Category: g.faker.Number(1, 20), + } +} + +func (g *auctionGen) KafkaTopics() []string { + // We generate the auction table only. + return []string{"auction"} +} + +func (g *auctionGen) Load(ctx context.Context, outCh chan<- sink.SinkRecord) { + for { + record := g.generate() + select { + case outCh <- record: + case <-ctx.Done(): + return + } + } +} diff --git a/integration_test/datagen/sink/kafka/kafka.go b/integration_test/datagen/sink/kafka/kafka.go new file mode 100644 index 0000000000000..f01a116629e4f --- /dev/null +++ b/integration_test/datagen/sink/kafka/kafka.go @@ -0,0 +1,139 @@ +package kafka + +import ( + "context" + "datagen/sink" + "fmt" + "log" + "strings" + "time" + + "github.com/Shopify/sarama" +) + +type KafkaConfig struct { + Brokers string + + // Do not recreate the Kafka topic when it exists. The default value is false. + // It can be enabled if datagen is not authorized to create topic. + NoRecreateIfExists bool +} + +type KafkaSink struct { + admin sarama.ClusterAdmin + cfg KafkaConfig + client sarama.AsyncProducer +} + +func newKafkaConfig() *sarama.Config { + version, err := sarama.ParseKafkaVersion("1.1.1") + if err != nil { + panic(fmt.Sprintf("failed to parse Kafka version: %v", err)) + } + config := sarama.NewConfig() + config.Version = version + config.Net.DialTimeout = 3 * time.Second + config.Admin.Timeout = 5 * time.Second + config.Producer.Timeout = 5 * time.Second + return config +} + +func OpenKafkaSink(ctx context.Context, cfg KafkaConfig) (*KafkaSink, error) { + admin, err := sarama.NewClusterAdmin(strings.Split(cfg.Brokers, ","), newKafkaConfig()) + if err != nil { + return nil, err + } + topics, err := admin.ListTopics() + if err != nil { + return nil, err + } + var topicNames []string + for k := range topics { + topicNames = append(topicNames, k) + } + log.Printf("Existing topics: %s", topicNames) + client, err := sarama.NewAsyncProducer(strings.Split(cfg.Brokers, ","), newKafkaConfig()) + if err != nil { + return nil, fmt.Errorf("NewAsyncProducer failed: %v", err) + } + p := &KafkaSink{ + admin: admin, + cfg: cfg, + client: client, + } + go func() { + p.consumeSuccesses(ctx) + }() + return p, nil +} + +func (p *KafkaSink) consumeSuccesses(ctx context.Context) { + for { + select { + case <-ctx.Done(): + return + case <-p.client.Successes(): + } + } +} + +func (p *KafkaSink) createRequiredTopics(admin sarama.ClusterAdmin, keys []string) error { + topics, err := admin.ListTopics() + if err != nil { + return err + } + for _, t := range keys { + if err := p.createTopic(admin, t, topics); err != nil { + return err + } + } + return nil +} + +func (p *KafkaSink) createTopic(admin sarama.ClusterAdmin, key string, topics map[string]sarama.TopicDetail) error { + _, exists := topics[key] + if p.cfg.NoRecreateIfExists { + if exists { + // The topic already exists, and we don't want to recreate it. + return nil + } else { + return fmt.Errorf("topic \"%s\" does not exist", key) + } + } + if exists { + // Recreate the topic if it exists. + if err := admin.DeleteTopic(key); err != nil { + log.Printf("Deleted an existing topic: %s", key) + return err + } + } + log.Printf("Creating topic: %s", key) + return admin.CreateTopic(key, &sarama.TopicDetail{ + NumPartitions: 16, + ReplicationFactor: 1, + }, false) +} + +func (p *KafkaSink) Prepare(topics []string) error { + return p.createRequiredTopics(p.admin, topics) +} + +func (p *KafkaSink) Close() error { + p.client.AsyncClose() + return nil +} + +func (p *KafkaSink) WriteRecord(ctx context.Context, format string, record sink.SinkRecord) error { + topic, key, data := sink.RecordToKafka(record, format) + msg := &sarama.ProducerMessage{} + msg.Topic = topic + msg.Key = sarama.StringEncoder(key) + msg.Value = sarama.ByteEncoder(data) + select { + case <-ctx.Done(): + case p.client.Input() <- msg: + case err := <-p.client.Errors(): + log.Printf("failed to produce message: %s", err) + } + return nil +} diff --git a/integration_test/datagen/sink/kinesis/kinesis.go b/integration_test/datagen/sink/kinesis/kinesis.go new file mode 100644 index 0000000000000..139736f24c276 --- /dev/null +++ b/integration_test/datagen/sink/kinesis/kinesis.go @@ -0,0 +1,52 @@ +package kinesis + +import ( + "context" + "datagen/sink" + "fmt" + + "github.com/aws/aws-sdk-go/aws" + "github.com/aws/aws-sdk-go/aws/session" + "github.com/aws/aws-sdk-go/service/kinesis" +) + +type KinesisConfig struct { + StreamName string + Region string +} + +type KinesisSink struct { + client *kinesis.Kinesis + cfg KinesisConfig +} + +func OpenKinesisSink(cfg KinesisConfig) (*KinesisSink, error) { + ss := session.Must(session.NewSession()) + client := kinesis.New(ss, aws.NewConfig().WithRegion(cfg.Region)) + return &KinesisSink{ + client: client, + cfg: cfg, + }, nil +} + +func (p *KinesisSink) Prepare(topics []string) error { + return nil +} + +func (p *KinesisSink) Close() error { + return nil +} + +func (p *KinesisSink) WriteRecord(ctx context.Context, format string, record sink.SinkRecord) error { + _, key, data := sink.RecordToKafka(record, format) + _, err := p.client.PutRecordWithContext(ctx, &kinesis.PutRecordInput{ + Data: data, + PartitionKey: aws.String(key), + StreamName: aws.String(p.cfg.StreamName), + }) + if err != nil { + return fmt.Errorf("failed to write record to kinesis: %s", err) + } else { + return nil + } +} diff --git a/integration_test/datagen/sink/mysql/mysql.go b/integration_test/datagen/sink/mysql/mysql.go new file mode 100644 index 0000000000000..5d1370b245970 --- /dev/null +++ b/integration_test/datagen/sink/mysql/mysql.go @@ -0,0 +1,51 @@ +package mysql + +import ( + "context" + "database/sql" + "datagen/sink" + "fmt" + + _ "github.com/go-sql-driver/mysql" +) + +type MysqlConfig struct { + DbHost string + Database string + DbPort int + DbUser string + DbPassword string +} + +type MysqlSink struct { + db *sql.DB +} + +func OpenMysqlSink(cfg MysqlConfig) (*MysqlSink, error) { + fmt.Printf("Opening MySQL sink: %+v\n", cfg) + + db, err := sql.Open("mysql", fmt.Sprintf("%s:%s@tcp(%s:%d)/%s", + cfg.DbUser, cfg.DbPassword, cfg.DbHost, cfg.DbPort, cfg.Database)) + if err != nil { + return nil, err + } + return &MysqlSink{db}, nil +} + +func (p *MysqlSink) Prepare(topics []string) error { + return nil +} + +func (p *MysqlSink) Close() error { + return p.db.Close() +} + +func (p *MysqlSink) WriteRecord(ctx context.Context, format string, record sink.SinkRecord) error { + // MySQL's INSERT INTO is compatible with Postgres's. + query := record.ToPostgresSql() + _, err := p.db.ExecContext(ctx, query) + if err != nil { + err = fmt.Errorf("failed to execute query '%s': %s", query, err) + } + return err +} diff --git a/integration_test/datagen/sink/postgres/postgres.go b/integration_test/datagen/sink/postgres/postgres.go new file mode 100644 index 0000000000000..f3698a93258bb --- /dev/null +++ b/integration_test/datagen/sink/postgres/postgres.go @@ -0,0 +1,47 @@ +package postgres + +import ( + "context" + "database/sql" + "datagen/sink" + "fmt" + + _ "github.com/lib/pq" +) + +type PostgresConfig struct { + DbHost string + Database string + DbPort int + DbUser string +} + +type PostgresSink struct { + db *sql.DB +} + +func OpenPostgresSink(cfg PostgresConfig) (*PostgresSink, error) { + db, err := sql.Open("postgres", fmt.Sprintf("postgresql://%s:@%s:%d/%s?sslmode=disable", + cfg.DbUser, cfg.DbHost, cfg.DbPort, cfg.Database)) + if err != nil { + return nil, err + } + return &PostgresSink{db}, nil +} + +func (p *PostgresSink) Prepare(topics []string) error { + return nil +} + +func (p *PostgresSink) Close() error { + return p.db.Close() +} + +func (p *PostgresSink) WriteRecord(ctx context.Context, format string, record sink.SinkRecord) error { + query := record.ToPostgresSql() + _, err := p.db.ExecContext(ctx, query) + if err != nil { + err = fmt.Errorf("failed to execute query '%s': %s", query, err) + } + return err +} diff --git a/integration_test/datagen/sink/pulsar/pulsar.go b/integration_test/datagen/sink/pulsar/pulsar.go new file mode 100644 index 0000000000000..2566ce993c1db --- /dev/null +++ b/integration_test/datagen/sink/pulsar/pulsar.go @@ -0,0 +1,60 @@ +package pulsar + +import ( + "context" + "datagen/sink" + "fmt" + + "github.com/apache/pulsar-client-go/pulsar" +) + +type PulsarConfig struct { + Brokers string +} + +type PulsarSink struct { + client pulsar.Client + producers map[string]pulsar.Producer +} + +func OpenPulsarSink(ctx context.Context, cfg PulsarConfig) (*PulsarSink, error) { + client, err := pulsar.NewClient(pulsar.ClientOptions{ + URL: fmt.Sprintf("pulsar://%s", cfg.Brokers), + }) + if err != nil { + return nil, err + } + return &PulsarSink{ + client: client, + producers: make(map[string]pulsar.Producer), + }, nil +} + +func (p *PulsarSink) Prepare(topics []string) error { + return nil +} + +func (p *PulsarSink) Close() error { + p.client.Close() + return nil +} + +func (p *PulsarSink) WriteRecord(ctx context.Context, format string, record sink.SinkRecord) error { + var err error + topic, key, data := sink.RecordToKafka(record, format) + producer, ok := p.producers[topic] + if !ok { + producer, err = p.client.CreateProducer(pulsar.ProducerOptions{ + Topic: topic, + }) + if err != nil { + return err + } + p.producers[topic] = producer + } + _, err = producer.Send(ctx, &pulsar.ProducerMessage{ + Value: data, + Key: key, + }) + return err +} diff --git a/integration_test/datagen/sink/sink.go b/integration_test/datagen/sink/sink.go new file mode 100644 index 0000000000000..4d4116c1f4153 --- /dev/null +++ b/integration_test/datagen/sink/sink.go @@ -0,0 +1,63 @@ +package sink + +import ( + "context" +) + +type SinkRecord interface { + // Convert the event to an INSERT INTO command. + ToPostgresSql() string + + // Convert the event to a Kakfa message in JSON format. + // This interface will also be used for Pulsar and Kinesis. + ToJson() (topic string, key string, data []byte) + + // Convert the event to a Kakfa message in Protobuf format. + // This interface will also be used for Pulsar and Kinesis. + ToProtobuf() (topic string, key string, data []byte) + + // Convert the event to a Kakfa message in Avro format. + // This interface will also be used for Pulsar and Kinesis. + ToAvro() (topic string, key string, data []byte) +} + +type BaseSinkRecord struct { +} + +func (r BaseSinkRecord) ToPostgresSql() string { + panic("not implemented") +} + +func (r BaseSinkRecord) ToJson() (topic string, key string, data []byte) { + panic("not implemented") +} + +func (r BaseSinkRecord) ToProtobuf() (topic string, key string, data []byte) { + panic("not implemented") +} + +func (r BaseSinkRecord) ToAvro() (topic string, key string, data []byte) { + panic("not implemented") +} + +// Convert the event to a Kakfa message in the given format. +// This interface will also be used for Pulsar and Kinesis. +func RecordToKafka(r SinkRecord, format string) (topic string, key string, data []byte) { + if format == "json" { + return r.ToJson() + } else if format == "protobuf" { + return r.ToProtobuf() + } else if format == "avro" { + return r.ToAvro() + } else { + panic("unsupported format") + } +} + +type Sink interface { + Prepare(topics []string) error + + WriteRecord(ctx context.Context, format string, record SinkRecord) error + + Close() error +} diff --git a/integration_test/datagen/twitter/avro.go b/integration_test/datagen/twitter/avro.go new file mode 100644 index 0000000000000..df20780c3e0bd --- /dev/null +++ b/integration_test/datagen/twitter/avro.go @@ -0,0 +1,45 @@ +package twitter + +import ( + "github.com/linkedin/goavro/v2" +) + +var AvroSchema string = ` +{ + "type": "record", + "name": "Event", + "fields": [ + { + "name": "data", + "type": "record", + "fields": [ + { "name": "id", "type": "string" }, + { "name": "text", "type": "string" }, + { "name": "lang", "type": "string" }, + { "name": "created_at", "type": "string" } + ] + }, + { + "name": "author", + "type": "record", + "fields": [ + { "name": "id", "type": "string" }, + { "name": "name", "type": "string" }, + { "name": "username", "type": "string" }, + { "name": "created_at", "type": "string" }, + { "name": "followers", "type": "long" } + ] + } + ] +} +` + +var AvroCodec *goavro.Codec = nil + +func init() { + var err error + AvroCodec, err = goavro.NewCodec(AvroSchema) + if err != nil { + panic(err) + } +} diff --git a/integration_test/datagen/twitter/proto/twitter.pb.go b/integration_test/datagen/twitter/proto/twitter.pb.go new file mode 100644 index 0000000000000..9938c8dd46725 --- /dev/null +++ b/integration_test/datagen/twitter/proto/twitter.pb.go @@ -0,0 +1,347 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.28.1 +// protoc v3.21.9 +// source: twitter.proto + +package proto + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type Event struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Data *TweetData `protobuf:"bytes,1,opt,name=data,proto3" json:"data,omitempty"` + Author *User `protobuf:"bytes,2,opt,name=author,proto3" json:"author,omitempty"` +} + +func (x *Event) Reset() { + *x = Event{} + if protoimpl.UnsafeEnabled { + mi := &file_twitter_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Event) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Event) ProtoMessage() {} + +func (x *Event) ProtoReflect() protoreflect.Message { + mi := &file_twitter_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Event.ProtoReflect.Descriptor instead. +func (*Event) Descriptor() ([]byte, []int) { + return file_twitter_proto_rawDescGZIP(), []int{0} +} + +func (x *Event) GetData() *TweetData { + if x != nil { + return x.Data + } + return nil +} + +func (x *Event) GetAuthor() *User { + if x != nil { + return x.Author + } + return nil +} + +type TweetData struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + Text string `protobuf:"bytes,2,opt,name=text,proto3" json:"text,omitempty"` + Lang string `protobuf:"bytes,3,opt,name=lang,proto3" json:"lang,omitempty"` + CreatedAt string `protobuf:"bytes,4,opt,name=created_at,json=createdAt,proto3" json:"created_at,omitempty"` +} + +func (x *TweetData) Reset() { + *x = TweetData{} + if protoimpl.UnsafeEnabled { + mi := &file_twitter_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *TweetData) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*TweetData) ProtoMessage() {} + +func (x *TweetData) ProtoReflect() protoreflect.Message { + mi := &file_twitter_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use TweetData.ProtoReflect.Descriptor instead. +func (*TweetData) Descriptor() ([]byte, []int) { + return file_twitter_proto_rawDescGZIP(), []int{1} +} + +func (x *TweetData) GetId() string { + if x != nil { + return x.Id + } + return "" +} + +func (x *TweetData) GetText() string { + if x != nil { + return x.Text + } + return "" +} + +func (x *TweetData) GetLang() string { + if x != nil { + return x.Lang + } + return "" +} + +func (x *TweetData) GetCreatedAt() string { + if x != nil { + return x.CreatedAt + } + return "" +} + +type User struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` + UserName string `protobuf:"bytes,3,opt,name=user_name,json=userName,proto3" json:"user_name,omitempty"` + CreatedAt string `protobuf:"bytes,4,opt,name=created_at,json=createdAt,proto3" json:"created_at,omitempty"` + Followers int64 `protobuf:"varint,5,opt,name=followers,proto3" json:"followers,omitempty"` +} + +func (x *User) Reset() { + *x = User{} + if protoimpl.UnsafeEnabled { + mi := &file_twitter_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *User) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*User) ProtoMessage() {} + +func (x *User) ProtoReflect() protoreflect.Message { + mi := &file_twitter_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use User.ProtoReflect.Descriptor instead. +func (*User) Descriptor() ([]byte, []int) { + return file_twitter_proto_rawDescGZIP(), []int{2} +} + +func (x *User) GetId() string { + if x != nil { + return x.Id + } + return "" +} + +func (x *User) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *User) GetUserName() string { + if x != nil { + return x.UserName + } + return "" +} + +func (x *User) GetCreatedAt() string { + if x != nil { + return x.CreatedAt + } + return "" +} + +func (x *User) GetFollowers() int64 { + if x != nil { + return x.Followers + } + return 0 +} + +var File_twitter_proto protoreflect.FileDescriptor + +var file_twitter_proto_rawDesc = []byte{ + 0x0a, 0x0d, 0x74, 0x77, 0x69, 0x74, 0x74, 0x65, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, + 0x0e, 0x74, 0x77, 0x69, 0x74, 0x74, 0x65, 0x72, 0x2e, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x22, + 0x64, 0x0a, 0x05, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x12, 0x2d, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x74, 0x77, 0x69, 0x74, 0x74, 0x65, 0x72, + 0x2e, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x54, 0x77, 0x65, 0x65, 0x74, 0x44, 0x61, 0x74, + 0x61, 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x12, 0x2c, 0x0a, 0x06, 0x61, 0x75, 0x74, 0x68, 0x6f, + 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x74, 0x77, 0x69, 0x74, 0x74, 0x65, + 0x72, 0x2e, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x55, 0x73, 0x65, 0x72, 0x52, 0x06, 0x61, + 0x75, 0x74, 0x68, 0x6f, 0x72, 0x22, 0x62, 0x0a, 0x09, 0x54, 0x77, 0x65, 0x65, 0x74, 0x44, 0x61, + 0x74, 0x61, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, + 0x69, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x65, 0x78, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x04, 0x74, 0x65, 0x78, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6c, 0x61, 0x6e, 0x67, 0x18, 0x03, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6c, 0x61, 0x6e, 0x67, 0x12, 0x1d, 0x0a, 0x0a, 0x63, 0x72, + 0x65, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, + 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, 0x22, 0x84, 0x01, 0x0a, 0x04, 0x55, 0x73, + 0x65, 0x72, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, + 0x69, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x6e, + 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x75, 0x73, 0x65, 0x72, 0x4e, + 0x61, 0x6d, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x61, + 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, + 0x41, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x66, 0x6f, 0x6c, 0x6c, 0x6f, 0x77, 0x65, 0x72, 0x73, 0x18, + 0x05, 0x20, 0x01, 0x28, 0x03, 0x52, 0x09, 0x66, 0x6f, 0x6c, 0x6c, 0x6f, 0x77, 0x65, 0x72, 0x73, + 0x42, 0x0f, 0x5a, 0x0d, 0x74, 0x77, 0x69, 0x74, 0x74, 0x65, 0x72, 0x2f, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_twitter_proto_rawDescOnce sync.Once + file_twitter_proto_rawDescData = file_twitter_proto_rawDesc +) + +func file_twitter_proto_rawDescGZIP() []byte { + file_twitter_proto_rawDescOnce.Do(func() { + file_twitter_proto_rawDescData = protoimpl.X.CompressGZIP(file_twitter_proto_rawDescData) + }) + return file_twitter_proto_rawDescData +} + +var file_twitter_proto_msgTypes = make([]protoimpl.MessageInfo, 3) +var file_twitter_proto_goTypes = []interface{}{ + (*Event)(nil), // 0: twitter.schema.Event + (*TweetData)(nil), // 1: twitter.schema.TweetData + (*User)(nil), // 2: twitter.schema.User +} +var file_twitter_proto_depIdxs = []int32{ + 1, // 0: twitter.schema.Event.data:type_name -> twitter.schema.TweetData + 2, // 1: twitter.schema.Event.author:type_name -> twitter.schema.User + 2, // [2:2] is the sub-list for method output_type + 2, // [2:2] is the sub-list for method input_type + 2, // [2:2] is the sub-list for extension type_name + 2, // [2:2] is the sub-list for extension extendee + 0, // [0:2] is the sub-list for field type_name +} + +func init() { file_twitter_proto_init() } +func file_twitter_proto_init() { + if File_twitter_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_twitter_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Event); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_twitter_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*TweetData); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_twitter_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*User); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_twitter_proto_rawDesc, + NumEnums: 0, + NumMessages: 3, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_twitter_proto_goTypes, + DependencyIndexes: file_twitter_proto_depIdxs, + MessageInfos: file_twitter_proto_msgTypes, + }.Build() + File_twitter_proto = out.File + file_twitter_proto_rawDesc = nil + file_twitter_proto_goTypes = nil + file_twitter_proto_depIdxs = nil +} diff --git a/integration_test/datagen/twitter/twitter.go b/integration_test/datagen/twitter/twitter.go new file mode 100644 index 0000000000000..5a4aefb152b5f --- /dev/null +++ b/integration_test/datagen/twitter/twitter.go @@ -0,0 +1,173 @@ +package twitter + +import ( + "context" + "datagen/gen" + "datagen/sink" + "datagen/twitter/proto" + "encoding/json" + "fmt" + "math/rand" + "time" + + "github.com/brianvoe/gofakeit/v6" + protobuf "google.golang.org/protobuf/proto" +) + +type tweetData struct { + CreatedAt string `json:"created_at"` + Id string `json:"id"` + Text string `json:"text"` + Lang string `json:"lang"` +} + +type twitterEvent struct { + sink.BaseSinkRecord + + Data tweetData `json:"data"` + Author twitterUser `json:"author"` +} + +type twitterUser struct { + CreatedAt string `json:"created_at"` + Id string `json:"id"` + Name string `json:"name"` + UserName string `json:"username"` + Followers int `json:"followers"` +} + +func (r *twitterEvent) ToPostgresSql() string { + return fmt.Sprintf("INSERT INTO %s (data, author) values (%s, %s);", + "twitter", r.Data.objectString(), r.Author.objectString()) +} + +func (r *twitterUser) objectString() string { + return fmt.Sprintf("('%s'::TIMESTAMP, '%s', '%s', '%s')", r.CreatedAt, r.Id, r.Name, r.UserName) +} + +func (r *tweetData) objectString() string { + return fmt.Sprintf("('%s'::TIMESTAMP, '%s', '%s', '%s')", r.CreatedAt, r.Id, r.Text, r.Lang) +} + +func (r *twitterEvent) ToJson() (topic string, key string, data []byte) { + data, _ = json.Marshal(r) + return "twitter", r.Data.Id, data +} + +func (r *twitterEvent) ToProtobuf() (topic string, key string, data []byte) { + m := proto.Event{ + Data: &proto.TweetData{ + CreatedAt: r.Data.CreatedAt, + Id: r.Data.Id, + Text: r.Data.Text, + Lang: r.Data.Lang, + }, + Author: &proto.User{ + CreatedAt: r.Author.CreatedAt, + Id: r.Author.Id, + Name: r.Author.Name, + UserName: r.Author.UserName, + Followers: int64(r.Author.Followers), + }, + } + data, err := protobuf.Marshal(&m) + if err != nil { + panic(err) + } + return "twitter", r.Data.Id, data +} + +func (r *twitterEvent) ToAvro() (topic string, key string, data []byte) { + obj := map[string]interface{}{ + "data": map[string]interface{}{ + "created_at": r.Data.CreatedAt, + "id": r.Data.Id, + "text": r.Data.Text, + "lang": r.Data.Lang, + }, + "author": map[string]interface{}{ + "created_at": r.Author.CreatedAt, + "id": r.Author.Id, + "name": r.Author.Name, + "username": r.Author.UserName, + "followers": r.Author.Followers, + }, + } + binary, err := AvroCodec.BinaryFromNative(nil, obj) + if err != nil { + panic(err) + } + return "twitter", r.Data.Id, binary +} + +type twitterGen struct { + faker *gofakeit.Faker + users []*twitterUser +} + +func NewTwitterGen() gen.LoadGenerator { + faker := gofakeit.New(0) + users := make(map[string]*twitterUser) + for len(users) < 100000 { + id := faker.DigitN(10) + if _, ok := users[id]; !ok { + endYear := time.Now().Year() - 1 + startYear := endYear - rand.Intn(8) + + endTime, _ := time.Parse("2006-01-01", fmt.Sprintf("%d-01-01", endYear)) + startTime, _ := time.Parse("2006-01-01", fmt.Sprintf("%d-01-01", startYear)) + users[id] = &twitterUser{ + CreatedAt: faker.DateRange(startTime, endTime).Format(gen.RwTimestampLayout), + Id: id, + Name: fmt.Sprintf("%s %s", faker.Name(), faker.Adverb()), + UserName: faker.Username(), + Followers: gofakeit.IntRange(1, 100000), + } + } + } + usersList := []*twitterUser{} + for _, u := range users { + usersList = append(usersList, u) + } + return &twitterGen{ + faker: faker, + users: usersList, + } +} + +func (t *twitterGen) generate() twitterEvent { + id := t.faker.DigitN(19) + author := t.users[rand.Intn(len(t.users))] + + wordsCnt := t.faker.IntRange(10, 20) + hashTagsCnt := t.faker.IntRange(0, 2) + hashTags := "" + for i := 0; i < hashTagsCnt; i++ { + hashTags += fmt.Sprintf("#%s ", t.faker.BuzzWord()) + } + sentence := fmt.Sprintf("%s%s", hashTags, t.faker.Sentence(wordsCnt)) + return twitterEvent{ + Data: tweetData{ + Id: id, + CreatedAt: time.Now().Format(gen.RwTimestampLayout), + Text: sentence, + Lang: gofakeit.Language(), + }, + Author: *author, + } +} + +func (t *twitterGen) KafkaTopics() []string { + return []string{"twitter"} +} + +func (t *twitterGen) Load(ctx context.Context, outCh chan<- sink.SinkRecord) { + for { + record := t.generate() + select { + case <-ctx.Done(): + return + case outCh <- &record: + } + } +} diff --git a/integration_test/datagen/twitter/twitter_example.json b/integration_test/datagen/twitter/twitter_example.json new file mode 100644 index 0000000000000..14c050eb3a847 --- /dev/null +++ b/integration_test/datagen/twitter/twitter_example.json @@ -0,0 +1,14 @@ +{ + "data": { + "created_at": "2020-02-12T17:09:56.000Z", + "id": "1227640996038684673", + "text": "Doctors: Googling stuff online does not make you a doctor\n\nDevelopers: https://t.co/mrju5ypPkb", + "lang": "English" + }, + "author": { + "created_at": "2013-12-14T04:35:55.000Z", + "id": "2244994945", + "name": "singularity", + "username": "singular ritty" + } +} \ No newline at end of file diff --git a/integration_test/delivery/delivery.sql b/integration_test/delivery/delivery.sql new file mode 100644 index 0000000000000..157e0ba6e2ad2 --- /dev/null +++ b/integration_test/delivery/delivery.sql @@ -0,0 +1,25 @@ +CREATE SOURCE delivery_orders_source ( + order_id BIGINT, + restaurant_id BIGINT, + order_state VARCHAR, + order_timestamp TIMESTAMP +) WITH ( + connector = 'kafka', + topic = 'delivery_orders', + properties.bootstrap.server = 'message_queue:29092', + scan.startup.mode = 'earliest' +) ROW FORMAT JSON; + + +CREATE MATERIALIZED VIEW restaurant_orders AS +SELECT + window_start, + restaurant_id, + COUNT(*) AS total_order +FROM + HOP(delivery_orders_source, order_timestamp, INTERVAL '1' MINUTE, INTERVAL '15' MINUTE) +WHERE + order_state = 'CREATED' +GROUP BY + restaurant_id, + window_start; diff --git a/integration_test/delivery/docker-compose.yml b/integration_test/delivery/docker-compose.yml new file mode 100644 index 0000000000000..937209a68a275 --- /dev/null +++ b/integration_test/delivery/docker-compose.yml @@ -0,0 +1,60 @@ +--- +version: "3" +services: + compactor-0: + extends: + file: ../../docker/docker-compose.yml + service: compactor-0 + compute-node-0: + extends: + file: ../../docker/docker-compose.yml + service: compute-node-0 + etcd-0: + extends: + file: ../../docker/docker-compose.yml + service: etcd-0 + frontend-node-0: + extends: + file: ../../docker/docker-compose.yml + service: frontend-node-0 + grafana-0: + extends: + file: ../../docker/docker-compose.yml + service: grafana-0 + meta-node-0: + extends: + file: ../../docker/docker-compose.yml + service: meta-node-0 + minio-0: + extends: + file: ../../docker/docker-compose.yml + service: minio-0 + prometheus-0: + extends: + file: ../../docker/docker-compose.yml + service: prometheus-0 + message_queue: + extends: + file: ../../docker/docker-compose.yml + service: message_queue + datagen: + image: ghcr.io/risingwavelabs/demo-datagen:v1.0.9 + depends_on: [message_queue] + command: + - /bin/sh + - -c + - /datagen --mode delivery --qps 2 kafka --brokers message_queue:29092 + restart: always + container_name: datagen +volumes: + etcd-0: + external: false + grafana-0: + external: false + minio-0: + external: false + prometheus-0: + external: false + message_queue: + external: false +name: risingwave-compose diff --git a/integration_test/ecommerce/ecommerce.sql b/integration_test/ecommerce/ecommerce.sql new file mode 100644 index 0000000000000..434801743b13c --- /dev/null +++ b/integration_test/ecommerce/ecommerce.sql @@ -0,0 +1,60 @@ +CREATE SOURCE order_events ( + order_id VARCHAR, + item_id VARCHAR, + item_price DOUBLE PRECISION, + event_timestamp TIMESTAMP +) WITH ( + connector = 'kafka', + topic = 'nics_metrics', + properties.bootstrap.server = 'message_queue:29092', + scan.startup.mode = 'earliest' +) ROW FORMAT JSON; + +CREATE TABLE order_events ( + order_id VARCHAR, + item_id VARCHAR, + item_price DOUBLE PRECISION, + event_timestamp TIMESTAMP +); + +CREATE TABLE parcel_events ( + order_id VARCHAR, + event_timestamp TIMESTAMP, + event_type VARCHAR +); + +CREATE MATERIALIZED VIEW order_details AS +SELECT + order_id, + SUM(item_price) AS total_price, + AVG(item_price) AS avg_price +FROM + order_events +GROUP BY + order_id; + +CREATE MATERIALIZED VIEW order_details AS +SELECT + t1.order_id AS order_id, + (t2.event_timestamp - t1.event_timestamp) as delivery_time +FROM + ( + SELECT + order_id, + event_timestamp + FROM + parcel_events + WHERE + event_type = 'order_created' + ) AS t1 + JOIN ( + SELECT + order_id, + event_timestamp + FROM + parcel_events + WHERE + event_type = 'parcel_shipped' + ) t2 ON t1.order_id = t2.order_id +WHERE + t2.event_timestamp - t1.event_timestamp > INTERVAL '7 days'; diff --git a/integration_test/iceberg-sink/README.md b/integration_test/iceberg-sink/README.md new file mode 100644 index 0000000000000..add6d72125673 --- /dev/null +++ b/integration_test/iceberg-sink/README.md @@ -0,0 +1,36 @@ +# Demo: Sinking to Iceberg + +RisingWave only provides limited capabilities to serve complex ad-hoc queries, which typically require optimizations such as columnar storage and code generation (https://www.vldb.org/pvldb/vol4/p539-neumann.pdf). However, RisingWave's internal storage format is row-based, and we have not paid much attention to improving its batch-processing capability. Therefore, we recommend sinking the stream into Iceberg or another data lake to build a "streaming data warehouse" solution. + +In this demo, we want to showcase how RisingWave is able to sink data to Iceberg for big data analytics. + +1. Launch the cluster: + +```sh +docker compose up -d +``` + +The cluster contains a RisingWave cluster and its necessary dependencies, a Spark that will be used to create the Iceberg table, a datagen that generates the data, and a Presto for querying the Iceberg. + + +2. Create the Iceberg table: + +```sh +docker compose exec spark bash /spark-script/run-sql-file.sh create-table +``` + +3. Execute the SQL queries in sequence: + +- create_source.sql +- create_mv.sql +- create_sink.sql + +4. Connect to the Presto that is pre-installed in the docker compose and execute a simple query: + +``` +docker compose exec presto presto-cli --server localhost:8080 +``` + +```sql +select user_id, count(*) from iceberg.demo_db.demo_table group by user_id +``` diff --git a/integration_test/iceberg-sink/create_mv.sql b/integration_test/iceberg-sink/create_mv.sql new file mode 100644 index 0000000000000..0a803f8a2762d --- /dev/null +++ b/integration_test/iceberg-sink/create_mv.sql @@ -0,0 +1,7 @@ +CREATE MATERIALIZED VIEW bhv_mv AS +SELECT + user_id, + target_id, + event_timestamp +FROM + user_behaviors; \ No newline at end of file diff --git a/integration_test/iceberg-sink/create_sink.sql b/integration_test/iceberg-sink/create_sink.sql new file mode 100644 index 0000000000000..6c984c5cc83b2 --- /dev/null +++ b/integration_test/iceberg-sink/create_sink.sql @@ -0,0 +1,10 @@ +CREATE SINK bhv_iceberg_sink +FROM + bhv_mv WITH ( + connector = 'iceberg', + sink.mode='upsert', + location.type='minio', + warehouse.path='minio://hummockadmin:hummockadmin@minio-0:9301/hummock001/iceberg-data', + database.name='demo_db', + table.name='demo_table' +); \ No newline at end of file diff --git a/integration_test/iceberg-sink/create_source.sql b/integration_test/iceberg-sink/create_source.sql new file mode 100644 index 0000000000000..efb1319cc3544 --- /dev/null +++ b/integration_test/iceberg-sink/create_source.sql @@ -0,0 +1,19 @@ +CREATE TABLE user_behaviors ( + user_id VARCHAR, + target_id VARCHAR, + target_type VARCHAR, + event_timestamp VARCHAR, + behavior_type VARCHAR, + parent_target_type VARCHAR, + parent_target_id VARCHAR, + PRIMARY KEY(user_id, target_id, event_timestamp) +) with ( + connector = 'mysql-cdc', + hostname = 'mysql', + port = '3306', + username = 'root', + password = '123456', + database.name = 'mydb', + table.name = 'user_behaviors', + server.id = '1' +); \ No newline at end of file diff --git a/integration_test/iceberg-sink/docker-compose.yml b/integration_test/iceberg-sink/docker-compose.yml new file mode 100644 index 0000000000000..16ed06e332f54 --- /dev/null +++ b/integration_test/iceberg-sink/docker-compose.yml @@ -0,0 +1,98 @@ +--- +version: "3" +services: + spark: + image: apache/spark:3.3.1 + command: tail -f /dev/null + depends_on: + - minio-0 + volumes: + - "./spark-script:/spark-script" + container_name: spark + presto: + build: ./presto-with-iceberg + container_name: presto + compactor-0: + extends: + file: ../../docker/docker-compose.yml + service: compactor-0 + compute-node-0: + extends: + file: ../../docker/docker-compose.yml + service: compute-node-0 + etcd-0: + extends: + file: ../../docker/docker-compose.yml + service: etcd-0 + frontend-node-0: + extends: + file: ../../docker/docker-compose.yml + service: frontend-node-0 + grafana-0: + extends: + file: ../../docker/docker-compose.yml + service: grafana-0 + meta-node-0: + extends: + file: ../../docker/docker-compose.yml + service: meta-node-0 + minio-0: + extends: + file: ../../docker/docker-compose.yml + service: minio-0 + prometheus-0: + extends: + file: ../../docker/docker-compose.yml + service: prometheus-0 + mysql: + image: mysql:8.0 + ports: + - "3306:3306" + environment: + - MYSQL_ROOT_PASSWORD=123456 + - MYSQL_USER=mysqluser + - MYSQL_PASSWORD=mysqlpw + - MYSQL_DATABASE=mydb + healthcheck: + test: [ "CMD-SHELL", "mysqladmin ping -h 127.0.0.1 -u root -p123456" ] + interval: 5s + timeout: 5s + retries: 5 + container_name: mysql + connector-node: + extends: + file: ../../docker/docker-compose.yml + service: connector-node + prepare_mysql: + image: mysql:8.0 + depends_on: + - mysql + command: + - /bin/sh + - -c + - "mysql -p123456 -h mysql mydb < mysql_prepare.sql" + volumes: + - "./mysql_prepare.sql:/mysql_prepare.sql" + container_name: prepare_mysql + restart: on-failure + datagen: + image: ghcr.io/risingwavelabs/demo-datagen:v1.1.1 + depends_on: [mysql] + command: + - /bin/sh + - -c + - /datagen --mode clickstream --qps 1 mysql --user mysqluser --password mysqlpw --host mysql --port 3306 --db mydb + container_name: datagen + restart: on-failure +volumes: + compute-node-0: + external: false + etcd-0: + external: false + grafana-0: + external: false + minio-0: + external: false + prometheus-0: + external: false +name: risingwave-compose \ No newline at end of file diff --git a/integration_test/iceberg-sink/iceberg-query.sql b/integration_test/iceberg-sink/iceberg-query.sql new file mode 100644 index 0000000000000..67b7d9e831fa8 --- /dev/null +++ b/integration_test/iceberg-sink/iceberg-query.sql @@ -0,0 +1 @@ +select user_id, count(*) from iceberg.demo_db.demo_table group by user_id \ No newline at end of file diff --git a/integration_test/iceberg-sink/mysql_prepare.sql b/integration_test/iceberg-sink/mysql_prepare.sql new file mode 100644 index 0000000000000..3e5a236a41205 --- /dev/null +++ b/integration_test/iceberg-sink/mysql_prepare.sql @@ -0,0 +1,15 @@ +-- mysql -p123456 -uroot -h 127.0.0.1 mydb < mysql_prepare.sql +-- +-- Mysql +USE mydb; + +CREATE TABLE user_behaviors ( + user_id VARCHAR(60), + target_id VARCHAR(60), + target_type VARCHAR(60), + event_timestamp VARCHAR(100), + behavior_type VARCHAR(60), + parent_target_type VARCHAR(60), + parent_target_id VARCHAR(60), + PRIMARY KEY(user_id, target_id, event_timestamp) +); diff --git a/integration_test/iceberg-sink/presto-with-iceberg/Dockerfile b/integration_test/iceberg-sink/presto-with-iceberg/Dockerfile new file mode 100644 index 0000000000000..bb6fea50243c4 --- /dev/null +++ b/integration_test/iceberg-sink/presto-with-iceberg/Dockerfile @@ -0,0 +1,5 @@ +FROM ahanaio/prestodb-sandbox + +COPY ./iceberg.properties /opt/presto-server/etc/catalog +COPY ./log.properties /opt/presto-server/etc +COPY ./hadoop-catalog.xml /etc/iceberg/conf/hadoop-catalog.xml \ No newline at end of file diff --git a/integration_test/iceberg-sink/presto-with-iceberg/hadoop-catalog.xml b/integration_test/iceberg-sink/presto-with-iceberg/hadoop-catalog.xml new file mode 100644 index 0000000000000..460072cfb47b0 --- /dev/null +++ b/integration_test/iceberg-sink/presto-with-iceberg/hadoop-catalog.xml @@ -0,0 +1,22 @@ + + + presto.s3.endpoint + http://minio-0:9301 + + + presto.s3.path-style-access + true + + + presto.s3.access-key + hummockadmin + + + presto.s3.secret-key + hummockadmin + + + fs.s3a.impl + com.facebook.presto.hive.s3.PrestoS3FileSystem + + \ No newline at end of file diff --git a/integration_test/iceberg-sink/presto-with-iceberg/iceberg.properties b/integration_test/iceberg-sink/presto-with-iceberg/iceberg.properties new file mode 100644 index 0000000000000..ecccfc7fc8275 --- /dev/null +++ b/integration_test/iceberg-sink/presto-with-iceberg/iceberg.properties @@ -0,0 +1,6 @@ +connector.name=iceberg +hive.metastore.uri=thrift://192.0.2.3:9083 +iceberg.catalog.type=hadoop +iceberg.catalog.warehouse=s3a://hummock001/iceberg-data +iceberg.hadoop.config.resources=/etc/iceberg/conf/hadoop-catalog.xml +hive.config.resources=/etc/iceberg/conf/hadoop-catalog.xml \ No newline at end of file diff --git a/integration_test/iceberg-sink/presto-with-iceberg/log.properties b/integration_test/iceberg-sink/presto-with-iceberg/log.properties new file mode 100644 index 0000000000000..208a8faf4176b --- /dev/null +++ b/integration_test/iceberg-sink/presto-with-iceberg/log.properties @@ -0,0 +1,2 @@ +com.facebook.presto=DEBUG +org.apache.hadoop=DEBUG \ No newline at end of file diff --git a/integration_test/iceberg-sink/spark-script/.gitignore b/integration_test/iceberg-sink/spark-script/.gitignore new file mode 100644 index 0000000000000..51dcf07222856 --- /dev/null +++ b/integration_test/iceberg-sink/spark-script/.gitignore @@ -0,0 +1,3 @@ +derby.log +metastore_db +.ivy \ No newline at end of file diff --git a/integration_test/iceberg-sink/spark-script/create-table.sql b/integration_test/iceberg-sink/spark-script/create-table.sql new file mode 100644 index 0000000000000..e609784d849c7 --- /dev/null +++ b/integration_test/iceberg-sink/spark-script/create-table.sql @@ -0,0 +1,11 @@ +drop table if exists demo.demo_db.demo_table; + +CREATE TABLE demo.demo_db.demo_table +( + user_id string, + target_id string, + event_timestamp string +) TBLPROPERTIES ('format-version'='2'); + + + diff --git a/integration_test/iceberg-sink/spark-script/query-table.sql b/integration_test/iceberg-sink/spark-script/query-table.sql new file mode 100644 index 0000000000000..d305c6c51c72d --- /dev/null +++ b/integration_test/iceberg-sink/spark-script/query-table.sql @@ -0,0 +1 @@ +SELECT user_id, count(*) from demo.demo_db.demo_table group by user_id; \ No newline at end of file diff --git a/integration_test/iceberg-sink/spark-script/run-sql-file.sh b/integration_test/iceberg-sink/spark-script/run-sql-file.sh new file mode 100644 index 0000000000000..15a9c9ffedd0b --- /dev/null +++ b/integration_test/iceberg-sink/spark-script/run-sql-file.sh @@ -0,0 +1,13 @@ +set -ex + +/opt/spark/bin/spark-sql --packages org.apache.iceberg:iceberg-spark-runtime-3.2_2.12:1.1.0,org.apache.hadoop:hadoop-aws:3.3.2\ + --conf spark.jars.ivy=${HOME}/work-dir/.ivy2 \ + --conf spark.sql.catalog.demo=org.apache.iceberg.spark.SparkCatalog \ + --conf spark.sql.catalog.demo.type=hadoop \ + --conf spark.sql.catalog.demo.warehouse=s3a://hummock001/iceberg-data \ + --conf spark.sql.catalog.demo.hadoop.fs.s3a.endpoint=http://minio-0:9301 \ + --conf spark.sql.catalog.demo.hadoop.fs.s3a.path.style.access=true \ + --conf spark.sql.catalog.demo.hadoop.fs.s3a.access.key=hummockadmin \ + --conf spark.sql.catalog.demo.hadoop.fs.s3a.secret.key=hummockadmin \ + --conf spark.sql.defaultCatalog=demo \ + -f /spark-script/$1.sql \ No newline at end of file diff --git a/integration_test/livestream/create_mv.sql b/integration_test/livestream/create_mv.sql new file mode 100644 index 0000000000000..32dbd6f4a8c9b --- /dev/null +++ b/integration_test/livestream/create_mv.sql @@ -0,0 +1,69 @@ +CREATE MATERIALIZED VIEW live_video_qos_10min AS +SELECT + window_start AS report_ts, + room_id, + SUM(video_total_freeze_duration) AS video_total_freeze_duration, + AVG(video_lost_pps) as video_lost_pps, + AVG(video_rtt) as video_rtt +FROM + TUMBLE( + live_stream_metrics, + report_timestamp, + INTERVAL '10' MINUTE + ) +GROUP BY + window_start, + room_id; + +-- +-- +-- -- Unsupported yet. +-- CREATE MATERIALIZED VIEW blocked_user_ratio_10min AS +-- SELECT +-- window_start AS report_ts, +-- ( +-- COUNT() FILTER ( +-- WHERE +-- video_total_freeze_duration > 0 +-- ) / COUNT(DISTINCT user_id) :: DOUBLE PRECISION +-- ) AS blocked_user_ratio, +-- FROM +-- TUMBLE( +-- live_stream_metrics, +-- report_timestamp, +-- INTERVAL '10' MINUTE +-- ) +-- GROUP BY +-- window_start, +-- room_id; +-- +-- +-- +-- A real-time dashboard of the total UV. +CREATE MATERIALIZED VIEW total_user_visit_1min AS +SELECT + window_start AS report_ts, + COUNT(DISTINCT user_id) as uv +FROM + TUMBLE( + live_stream_metrics, + report_timestamp, + INTERVAL '1' MINUTE + ) +GROUP BY + window_start; + +CREATE MATERIALIZED VIEW room_user_visit_1min AS +SELECT + window_start AS report_ts, + COUNT(DISTINCT user_id) as uv, + room_id +FROM + TUMBLE( + live_stream_metrics, + report_timestamp, + INTERVAL '1' MINUTE + ) +GROUP BY + window_start, + room_id; \ No newline at end of file diff --git a/integration_test/livestream/create_source.sql b/integration_test/livestream/create_source.sql new file mode 100644 index 0000000000000..3e230af07f1b0 --- /dev/null +++ b/integration_test/livestream/create_source.sql @@ -0,0 +1,26 @@ +CREATE SOURCE live_stream_metrics ( + client_ip VARCHAR, + user_agent VARCHAR, + user_id VARCHAR, + -- The live room. + room_id VARCHAR, + -- Sent bits per second. + video_bps BIGINT, + -- Sent frames per second. Typically 30 fps. + video_fps BIGINT, + -- Round-trip time (in ms). 200ms is recommended. + video_rtt BIGINT, + -- Lost packets per second. + video_lost_pps BIGINT, + -- How long was the longest freeze (in ms). + video_longest_freeze_duration BIGINT, + -- Total freeze duration. + video_total_freeze_duration BIGINT, + report_timestamp TIMESTAMPTZ, + country VARCHAR +) WITH ( + connector = 'kafka', + topic = 'live_stream_metrics', + properties.bootstrap.server = 'message_queue:29092', + scan.startup.mode = 'earliest' +) ROW FORMAT JSON; \ No newline at end of file diff --git a/integration_test/livestream/data_check b/integration_test/livestream/data_check new file mode 100644 index 0000000000000..3aaf674ab9add --- /dev/null +++ b/integration_test/livestream/data_check @@ -0,0 +1 @@ +live_stream_metrics,live_video_qos_10min,total_user_visit_1min,room_user_visit_1min \ No newline at end of file diff --git a/integration_test/livestream/docker-compose.yml b/integration_test/livestream/docker-compose.yml new file mode 100644 index 0000000000000..8e3eb3a870e3f --- /dev/null +++ b/integration_test/livestream/docker-compose.yml @@ -0,0 +1,62 @@ +--- +version: "3" +services: + compactor-0: + extends: + file: ../../docker/docker-compose.yml + service: compactor-0 + compute-node-0: + extends: + file: ../../docker/docker-compose.yml + service: compute-node-0 + etcd-0: + extends: + file: ../../docker/docker-compose.yml + service: etcd-0 + frontend-node-0: + extends: + file: ../../docker/docker-compose.yml + service: frontend-node-0 + grafana-0: + extends: + file: ../../docker/docker-compose.yml + service: grafana-0 + meta-node-0: + extends: + file: ../../docker/docker-compose.yml + service: meta-node-0 + minio-0: + extends: + file: ../../docker/docker-compose.yml + service: minio-0 + prometheus-0: + extends: + file: ../../docker/docker-compose.yml + service: prometheus-0 + message_queue: + extends: + file: ../../docker/docker-compose.yml + service: message_queue + datagen: + image: ghcr.io/risingwavelabs/demo-datagen:v1.0.9 + depends_on: [message_queue] + command: + - /bin/sh + - -c + - /datagen --mode livestream --qps 2 kafka --brokers message_queue:29092 + restart: always + container_name: datagen +volumes: + compute-node-0: + external: false + etcd-0: + external: false + grafana-0: + external: false + minio-0: + external: false + prometheus-0: + external: false + message_queue: + external: false +name: risingwave-compose diff --git a/integration_test/livestream/livestream.proto b/integration_test/livestream/livestream.proto new file mode 100644 index 0000000000000..125b66a744c1d --- /dev/null +++ b/integration_test/livestream/livestream.proto @@ -0,0 +1,19 @@ +syntax = "proto3"; + +package livestream.schema; +option go_package = "livestream/proto"; + +message LiveStreamMetrics { + string client_ip = 1; + string user_agent = 2; + string user_id = 3; + string room_id = 4; + int64 video_bps = 5; + int64 video_fps = 6; + int64 video_rtt = 7; + int64 video_lost_pps = 8; + int64 video_longest_freeze_duration = 9; + int64 video_total_freeze_duration = 10; + int64 report_timestamp = 11; + string country = 12; +} diff --git a/integration_test/livestream/pb/create_mv.sql b/integration_test/livestream/pb/create_mv.sql new file mode 100644 index 0000000000000..4a2739d6921bb --- /dev/null +++ b/integration_test/livestream/pb/create_mv.sql @@ -0,0 +1,62 @@ +CREATE MATERIALIZED VIEW live_stream_metrics AS +SELECT + client_ip, + user_agent, + user_id, + room_id, + video_bps, + video_fps, + video_rtt, + video_lost_pps, + video_longest_freeze_duration, + video_total_freeze_duration, + to_timestamp(report_timestamp) as report_timestamp, + country +FROM + live_stream_metrics_pb; + +CREATE MATERIALIZED VIEW live_video_qos_10min AS +SELECT + window_start AS report_ts, + room_id, + SUM(video_total_freeze_duration) AS video_total_freeze_duration, + AVG(video_lost_pps) as video_lost_pps, + AVG(video_rtt) as video_rtt +FROM + TUMBLE( + live_stream_metrics, + report_timestamp, + INTERVAL '10' MINUTE + ) +GROUP BY + window_start, + room_id; + +-- A real-time dashboard of the total UV. +CREATE MATERIALIZED VIEW total_user_visit_1min AS +SELECT + window_start AS report_ts, + COUNT(DISTINCT user_id) as uv +FROM + TUMBLE( + live_stream_metrics, + report_timestamp, + INTERVAL '1' MINUTE + ) +GROUP BY + window_start; + +CREATE MATERIALIZED VIEW room_user_visit_1min AS +SELECT + window_start AS report_ts, + COUNT(DISTINCT user_id) as uv, + room_id +FROM + TUMBLE( + live_stream_metrics, + report_timestamp, + INTERVAL '1' MINUTE + ) +GROUP BY + window_start, + room_id; \ No newline at end of file diff --git a/integration_test/livestream/pb/create_source.sql b/integration_test/livestream/pb/create_source.sql new file mode 100644 index 0000000000000..5664a8502141f --- /dev/null +++ b/integration_test/livestream/pb/create_source.sql @@ -0,0 +1,6 @@ +CREATE SOURCE live_stream_metrics_pb WITH ( + connector = 'kafka', + topic = 'live_stream_metrics', + properties.bootstrap.server = 'message_queue:29092', + scan.startup.mode = 'earliest' +) ROW FORMAT PROTOBUF MESSAGE 'livestream.schema.LiveStreamMetrics' ROW SCHEMA LOCATION 'http://file_server:8080/schema'; diff --git a/integration_test/livestream/query.sql b/integration_test/livestream/query.sql new file mode 100644 index 0000000000000..193c87cf4991d --- /dev/null +++ b/integration_test/livestream/query.sql @@ -0,0 +1,19 @@ +--- TODO: we need now() for ad-hoc mode. +-- SELECT +-- * +-- FROM +-- thread_view_count +-- WHERE +-- window_time > ( +-- '2022-7-22 18:43' :: TIMESTAMP - INTERVAL '1 day' +-- ) +-- AND window_time < ( +-- '2022-7-22 18:43' :: TIMESTAMP - INTERVAL '1 day' + INTERVAL '10 minutes' +-- ) +-- AND target_id = 'thread83 +SELECT + * +FROM + live_video_qos_10min +LIMIT + 10; \ No newline at end of file diff --git a/integration_test/livestream/schema b/integration_test/livestream/schema new file mode 100644 index 0000000000000..08b5cd4852c7e --- /dev/null +++ b/integration_test/livestream/schema @@ -0,0 +1,18 @@ + +‰ +livestream.protolivestream.schema"Å +LiveStreamMetrics + client_ip ( RclientIp + +user_agent ( R userAgent +user_id ( RuserId +room_id ( RroomId + video_bps (RvideoBps + video_fps (RvideoFps + video_rtt (RvideoRtt$ +video_lost_pps (R videoLostPpsA +video_longest_freeze_duration (RvideoLongestFreezeDuration= +video_total_freeze_duration + (RvideoTotalFreezeDuration) +report_timestamp (RreportTimestamp +country ( RcountryBZlivestream/protobproto3 \ No newline at end of file diff --git a/integration_test/mysql-cdc/create_mv.sql b/integration_test/mysql-cdc/create_mv.sql new file mode 100644 index 0000000000000..86de17ee12e04 --- /dev/null +++ b/integration_test/mysql-cdc/create_mv.sql @@ -0,0 +1,8 @@ +CREATE MATERIALIZED VIEW product_count AS +SELECT + product_id, + COUNT(*) as product_count +FROM + orders +GROUP BY + product_id; \ No newline at end of file diff --git a/integration_test/mysql-cdc/create_source.sql b/integration_test/mysql-cdc/create_source.sql new file mode 100644 index 0000000000000..1a35f6dae9855 --- /dev/null +++ b/integration_test/mysql-cdc/create_source.sql @@ -0,0 +1,18 @@ +create table orders ( + order_id int, + order_date bigint, + customer_name varchar, + price decimal, + product_id int, + order_status smallint, + PRIMARY KEY (order_id) +) with ( + connector = 'mysql-cdc', + hostname = 'mysql', + port = '3306', + username = 'root', + password = '123456', + database.name = 'mydb', + table.name = 'orders', + server.id = '1' +); \ No newline at end of file diff --git a/integration_test/mysql-cdc/data_check b/integration_test/mysql-cdc/data_check new file mode 100644 index 0000000000000..1f4c99fed9650 --- /dev/null +++ b/integration_test/mysql-cdc/data_check @@ -0,0 +1 @@ +orders,product_count \ No newline at end of file diff --git a/integration_test/mysql-cdc/docker-compose.yml b/integration_test/mysql-cdc/docker-compose.yml new file mode 100644 index 0000000000000..d25a91d8e9cfe --- /dev/null +++ b/integration_test/mysql-cdc/docker-compose.yml @@ -0,0 +1,78 @@ +--- +version: "3" +services: + compactor-0: + extends: + file: ../../docker/docker-compose.yml + service: compactor-0 + compute-node-0: + extends: + file: ../../docker/docker-compose.yml + service: compute-node-0 + etcd-0: + extends: + file: ../../docker/docker-compose.yml + service: etcd-0 + frontend-node-0: + extends: + file: ../../docker/docker-compose.yml + service: frontend-node-0 + grafana-0: + extends: + file: ../../docker/docker-compose.yml + service: grafana-0 + meta-node-0: + extends: + file: ../../docker/docker-compose.yml + service: meta-node-0 + minio-0: + extends: + file: ../../docker/docker-compose.yml + service: minio-0 + prometheus-0: + extends: + file: ../../docker/docker-compose.yml + service: prometheus-0 + mysql: + image: mysql:8.0 + ports: + - "3306:3306" + environment: + - MYSQL_ROOT_PASSWORD=123456 + - MYSQL_USER=mysqluser + - MYSQL_PASSWORD=mysqlpw + - MYSQL_DATABASE=mydb + healthcheck: + test: [ "CMD-SHELL", "mysqladmin ping -h 127.0.0.1 -u root -p123456" ] + interval: 5s + timeout: 5s + retries: 5 + container_name: mysql + connector-node: + extends: + file: ../../docker/docker-compose.yml + service: connector-node + datagen: + image: mysql:8.0 + depends_on: + - mysql + command: + - /bin/sh + - -c + - "mysql -p123456 -h mysql mydb < mysql_prepare.sql" + volumes: + - "./mysql_prepare.sql:/mysql_prepare.sql" + container_name: datagen + restart: on-failure +volumes: + compute-node-0: + external: false + etcd-0: + external: false + grafana-0: + external: false + minio-0: + external: false + prometheus-0: + external: false +name: risingwave-compose diff --git a/integration_test/mysql-cdc/mysql_prepare.sql b/integration_test/mysql-cdc/mysql_prepare.sql new file mode 100644 index 0000000000000..65758e11f3af7 --- /dev/null +++ b/integration_test/mysql-cdc/mysql_prepare.sql @@ -0,0 +1,28 @@ +-- mysql -p123456 -uroot -h 127.0.0.1 mydb < mysql_prepare.sql +-- +-- Mysql +USE mydb; + +create table orders ( + order_id int, + order_date bigint, + customer_name varchar(200), + price decimal, + product_id int, + order_status smallint, + PRIMARY KEY (order_id) +); + +insert into + orders +values + (1, 1558430840000, 'Bob', 10.50, 1, 1), + (2, 1558430840001, 'Alice', 20.50, 2, 1), + ( + 3, + 1558430840002, + 'Alice', + 18.50, + 2, + 1 + ); \ No newline at end of file diff --git a/integration_test/mysql-cdc/query.sql b/integration_test/mysql-cdc/query.sql new file mode 100644 index 0000000000000..a66e5c24f78e2 --- /dev/null +++ b/integration_test/mysql-cdc/query.sql @@ -0,0 +1,6 @@ +SELECT + * +FROM + orders +LIMIT + 10; \ No newline at end of file diff --git a/integration_test/mysql-sink/create_mv.sql b/integration_test/mysql-sink/create_mv.sql new file mode 100644 index 0000000000000..ea3ab9d71272a --- /dev/null +++ b/integration_test/mysql-sink/create_mv.sql @@ -0,0 +1,16 @@ +CREATE MATERIALIZED VIEW target_count AS +SELECT + target_id, + COUNT(*) AS target_count +FROM + user_behaviors +GROUP BY + target_id; + +CREATE SINK target_count_mysql_sink +FROM + target_count WITH ( + connector = 'jdbc', + jdbc.url = 'jdbc:mysql://mysql:3306/mydb?user=root&password=123456', + table.name = 'target_count' + ); \ No newline at end of file diff --git a/integration_test/mysql-sink/create_source.sql b/integration_test/mysql-sink/create_source.sql new file mode 100644 index 0000000000000..7a9e3d3add4c8 --- /dev/null +++ b/integration_test/mysql-sink/create_source.sql @@ -0,0 +1,14 @@ +CREATE SOURCE user_behaviors ( + user_id VARCHAR, + target_id VARCHAR, + target_type VARCHAR, + event_timestamp TIMESTAMPTZ, + behavior_type VARCHAR, + parent_target_type VARCHAR, + parent_target_id VARCHAR +) WITH ( + connector = 'kafka', + topic = 'user_behaviors', + properties.bootstrap.server = 'message_queue:29092', + scan.startup.mode = 'earliest' +) ROW FORMAT JSON; \ No newline at end of file diff --git a/integration_test/mysql-sink/data_check b/integration_test/mysql-sink/data_check new file mode 100644 index 0000000000000..3835eb979b86e --- /dev/null +++ b/integration_test/mysql-sink/data_check @@ -0,0 +1 @@ +user_behaviors,target_count \ No newline at end of file diff --git a/integration_test/mysql-sink/docker-compose.yml b/integration_test/mysql-sink/docker-compose.yml new file mode 100644 index 0000000000000..1c187a62eb3a0 --- /dev/null +++ b/integration_test/mysql-sink/docker-compose.yml @@ -0,0 +1,91 @@ +--- +version: "3" +services: + compactor-0: + extends: + file: ../../docker/docker-compose.yml + service: compactor-0 + compute-node-0: + extends: + file: ../../docker/docker-compose.yml + service: compute-node-0 + etcd-0: + extends: + file: ../../docker/docker-compose.yml + service: etcd-0 + frontend-node-0: + extends: + file: ../../docker/docker-compose.yml + service: frontend-node-0 + grafana-0: + extends: + file: ../../docker/docker-compose.yml + service: grafana-0 + meta-node-0: + extends: + file: ../../docker/docker-compose.yml + service: meta-node-0 + minio-0: + extends: + file: ../../docker/docker-compose.yml + service: minio-0 + prometheus-0: + extends: + file: ../../docker/docker-compose.yml + service: prometheus-0 + message_queue: + extends: + file: ../../docker/docker-compose.yml + service: message_queue + mysql: + image: mysql:8.0 + ports: + - "3306:3306" + environment: + - MYSQL_ROOT_PASSWORD=123456 + - MYSQL_USER=mysqluser + - MYSQL_PASSWORD=mysqlpw + - MYSQL_DATABASE=mydb + healthcheck: + test: [ "CMD-SHELL", "mysqladmin ping -h 127.0.0.1 -u root -p123456" ] + interval: 5s + timeout: 5s + retries: 5 + container_name: mysql + connector-node: + extends: + file: ../../docker/docker-compose.yml + service: connector-node + datagen: + image: ghcr.io/risingwavelabs/demo-datagen:v1.0.9 + depends_on: [message_queue] + command: + - /bin/sh + - -c + - /datagen --mode clickstream --qps 2 kafka --brokers message_queue:29092 + restart: always + container_name: datagen + prepare_mysql: + image: mysql:8.0 + depends_on: + - mysql + command: + - /bin/sh + - -c + - "mysql -p123456 -h mysql mydb < mysql_prepare.sql" + volumes: + - "./mysql_prepare.sql:/mysql_prepare.sql" + container_name: prepare_mysql + restart: on-failure +volumes: + compute-node-0: + external: false + etcd-0: + external: false + grafana-0: + external: false + minio-0: + external: false + prometheus-0: + external: false +name: risingwave-compose diff --git a/integration_test/mysql-sink/mysql_prepare.sql b/integration_test/mysql-sink/mysql_prepare.sql new file mode 100644 index 0000000000000..cac57c699a154 --- /dev/null +++ b/integration_test/mysql-sink/mysql_prepare.sql @@ -0,0 +1,4 @@ +CREATE TABLE target_count ( + target_id VARCHAR(128), + target_count BIGINT +); \ No newline at end of file diff --git a/integration_test/mysql-sink/query.sql b/integration_test/mysql-sink/query.sql new file mode 100644 index 0000000000000..e09c66a255f10 --- /dev/null +++ b/integration_test/mysql-sink/query.sql @@ -0,0 +1,6 @@ +SELECT + * +FROM + target_count +LIMIT + 10; \ No newline at end of file diff --git a/integration_test/postgres-cdc/create_mv.sql b/integration_test/postgres-cdc/create_mv.sql new file mode 100644 index 0000000000000..ec31f79c941b7 --- /dev/null +++ b/integration_test/postgres-cdc/create_mv.sql @@ -0,0 +1,28 @@ +CREATE MATERIALIZED VIEW city_population AS +SELECT + city, + COUNT(*) as population +FROM + person +GROUP BY + city; + +CREATE MATERIALIZED VIEW nexmark_q8 AS +SELECT + P.id, + P.name, + A.starttime +FROM + person as P + JOIN ( + SELECT + seller, + window_start AS starttime, + window_end AS endtime + FROM + TUMBLE(auction, date_time, INTERVAL '10' SECOND) + GROUP BY + seller, + window_start, + window_end + ) A ON P.id = A.seller; \ No newline at end of file diff --git a/integration_test/postgres-cdc/create_source.sql b/integration_test/postgres-cdc/create_source.sql new file mode 100644 index 0000000000000..cc08684f5f365 --- /dev/null +++ b/integration_test/postgres-cdc/create_source.sql @@ -0,0 +1,41 @@ +create table person ( + "id" int, + "name" varchar, + "email_address" varchar, + "credit_card" varchar, + "city" varchar, + PRIMARY KEY ("id") +) with ( + connector = 'postgres-cdc', + hostname = 'postgres', + port = '5432', + username = 'myuser', + password = '123456', + database.name = 'mydb', + schema.name = 'public', + table.name = 'person', + slot.name = 'person' +); + +CREATE SOURCE t_auction ( + id BIGINT, + item_name VARCHAR, + date_time BIGINT, + seller INT, + category INT +) WITH ( + connector = 'kafka', + topic = 'auction', + properties.bootstrap.server = 'message_queue:29092', + scan.startup.mode = 'earliest' +) ROW FORMAT JSON; + +CREATE VIEW auction as +SELECT + id, + item_name, + to_timestamp(date_time) as date_time, + seller, + category +FROM + t_auction; \ No newline at end of file diff --git a/integration_test/postgres-cdc/data_check b/integration_test/postgres-cdc/data_check new file mode 100644 index 0000000000000..4e00aba632aaa --- /dev/null +++ b/integration_test/postgres-cdc/data_check @@ -0,0 +1 @@ +person,city_population,nexmark_q8 \ No newline at end of file diff --git a/integration_test/postgres-cdc/docker-compose.yml b/integration_test/postgres-cdc/docker-compose.yml new file mode 100644 index 0000000000000..c3583ea7d7e71 --- /dev/null +++ b/integration_test/postgres-cdc/docker-compose.yml @@ -0,0 +1,94 @@ +--- +version: "3" +services: + compactor-0: + extends: + file: ../../docker/docker-compose.yml + service: compactor-0 + compute-node-0: + extends: + file: ../../docker/docker-compose.yml + service: compute-node-0 + etcd-0: + extends: + file: ../../docker/docker-compose.yml + service: etcd-0 + frontend-node-0: + extends: + file: ../../docker/docker-compose.yml + service: frontend-node-0 + grafana-0: + extends: + file: ../../docker/docker-compose.yml + service: grafana-0 + meta-node-0: + extends: + file: ../../docker/docker-compose.yml + service: meta-node-0 + minio-0: + extends: + file: ../../docker/docker-compose.yml + service: minio-0 + prometheus-0: + extends: + file: ../../docker/docker-compose.yml + service: prometheus-0 + # Use this command to connect to the DB from outside the container: + # docker exec postgres psql --username=myuser --dbname=mydb + postgres: + image: postgres + environment: + - POSTGRES_USER=myuser + - POSTGRES_PASSWORD=123456 + - POSTGRES_DB=mydb + ports: + - 5432:5432 + healthcheck: + test: [ "CMD-SHELL", "pg_isready --username=myuser --dbname=mydb" ] + interval: 5s + timeout: 5s + retries: 5 + command: [ "postgres", "-c", "wal_level=logical" ] + restart: always + container_name: postgres + connector-node: + extends: + file: ../../docker/docker-compose.yml + service: connector-node + postgres_prepare: + image: postgres + depends_on: + - postgres + command: + - /bin/sh + - -c + - "psql postgresql://myuser:123456@postgres:5432/mydb < postgres_prepare.sql" + volumes: + - "./postgres_prepare.sql:/postgres_prepare.sql" + container_name: postgres_prepare + restart: on-failure + datagen: + image: ghcr.io/risingwavelabs/demo-datagen:v1.1.0 + depends_on: [message_queue] + command: + - /bin/sh + - -c + - /datagen --mode nexmark --qps 2 kafka --brokers message_queue:29092 + restart: always + container_name: datagen + message_queue: + extends: + file: ../../docker/docker-compose.yml + service: message_queue +volumes: + compute-node-0: + external: false + etcd-0: + external: false + grafana-0: + external: false + minio-0: + external: false + prometheus-0: + external: false +name: risingwave-compose diff --git a/integration_test/postgres-cdc/postgres_prepare.sql b/integration_test/postgres-cdc/postgres_prepare.sql new file mode 100644 index 0000000000000..5a7c638011183 --- /dev/null +++ b/integration_test/postgres-cdc/postgres_prepare.sql @@ -0,0 +1,112 @@ +create table person ( + "id" int, + "name" varchar(64), + "email_address" varchar(200), + "credit_card" varchar(200), + "city" varchar(200), + PRIMARY KEY ("id") +); + +ALTER TABLE + public.person REPLICA IDENTITY FULL; + +INSERT INTO person VALUES (1000, 'vicky noris', 'yplkvgz@qbxfg.com', '7878 5821 1864 2539', 'cheyenne'); +INSERT INTO person VALUES (1001, 'peter white', 'myckhsp@xpmpe.com', '1781 2313 8157 6974', 'boise'); +INSERT INTO person VALUES (1002, 'sarah spencer', 'wipvdbm@dkaap.com', '3453 4987 9481 6270', 'los angeles'); +INSERT INTO person VALUES (1003, 'vicky jones', 'kedmrpz@xiauh.com', '5536 1959 5460 2096', 'portland'); +INSERT INTO person VALUES (1004, 'julie white', 'egpemle@lrhcg.com', '0052 8113 1582 4430', 'seattle'); +INSERT INTO person VALUES (1005, 'sarah smith', 'siqjtnt@tjjek.com', '4591 5419 7260 8350', 'los angeles'); +INSERT INTO person VALUES (1006, 'walter white', 'fwdbytp@zepzq.com', '1327 3245 1956 8200', 'san francisco'); +INSERT INTO person VALUES (1007, 'walter spencer', 'ktncerj@jlikw.com', '5136 7504 2879 7886', 'los angeles'); +INSERT INTO person VALUES (1008, 'john abrams', 'jmvgsrq@nyfud.com', '6064 8548 6057 2021', 'redmond'); +INSERT INTO person VALUES (1009, 'peter noris', 'bhjbkpk@svzrx.com', '1063 2940 2119 8587', 'cheyenne'); +INSERT INTO person VALUES (1010, 'kate smith', 'fvsmqlb@grtho.com', '9474 6887 6463 6972', 'bend'); +INSERT INTO person VALUES (1011, 'vicky noris', 'chyakdh@acjjz.com', '9959 4034 5717 6729', 'boise'); +INSERT INTO person VALUES (1012, 'walter jones', 'utfqxal@sfxco.com', '8793 6517 3085 0542', 'boise'); +INSERT INTO person VALUES (1013, 'sarah walton', 'xdybqki@xrvkt.com', '2280 4209 8743 0735', 'kent'); +INSERT INTO person VALUES (1014, 'walter abrams', 'cbujzko@ehffe.com', '1235 3048 6067 9304', 'phoenix'); +INSERT INTO person VALUES (1015, 'vicky jones', 'xyygoyf@msejb.com', '3148 5012 3225 2870', 'los angeles'); +INSERT INTO person VALUES (1016, 'john walton', 'yzbccmz@hdnvm.com', '0426 2682 6145 8371', 'seattle'); +INSERT INTO person VALUES (1017, 'luke jones', 'yozosta@nzewf.com', '9641 9352 0248 2749', 'redmond'); +INSERT INTO person VALUES (1018, 'julie white', 'clhqozw@gioov.com', '3622 5461 2365 3624', 'bend'); +INSERT INTO person VALUES (1019, 'paul abrams', 'fshovpk@ayoej.com', '4433 7863 9751 7878', 'redmond'); +INSERT INTO person VALUES (1020, 'deiter smith', 'nqgdcpx@sumai.com', '0908 3870 4528 4710', 'boise'); +INSERT INTO person VALUES (1021, 'john walton', 'zzjwizw@skwdx.com', '2404 5072 3429 2483', 'phoenix'); +INSERT INTO person VALUES (1022, 'paul walton', 'zwhnjwb@ojuft.com', '0402 5453 9709 8030', 'portland'); +INSERT INTO person VALUES (1023, 'peter bartels', 'gwlteve@aikvf.com', '6555 8884 1360 0295', 'redmond'); +INSERT INTO person VALUES (1024, 'saul shultz', 'mghpttp@sxihm.com', '7987 2816 9818 8727', 'cheyenne'); +INSERT INTO person VALUES (1025, 'julie bartels', 'cxjfsuu@uwcpw.com', '0352 3457 2885 0266', 'san francisco'); +INSERT INTO person VALUES (1026, 'paul spencer', 'plcully@qwfas.com', '2017 1897 0926 6328', 'los angeles'); +INSERT INTO person VALUES (1027, 'luke white', 'jtatgee@wjaok.com', '2465 7541 1015 4655', 'portland'); +INSERT INTO person VALUES (1028, 'kate white', 'mmcqrfk@fldvr.com', '3696 3808 1329 0692', 'seattle'); +INSERT INTO person VALUES (1029, 'kate spencer', 'wkixktk@nqzin.com', '8540 3588 4648 5329', 'portland'); +INSERT INTO person VALUES (1030, 'sarah walton', 'bhinrlm@itvuw.com', '1009 7742 8888 9596', 'portland'); +INSERT INTO person VALUES (1031, 'luke abrams', 'tmoomlm@umwjm.com', '1161 4093 8361 3851', 'redmond'); +INSERT INTO person VALUES (1032, 'saul bartels', 'kkxmkbp@sjldo.com', '5311 2081 6147 8292', 'cheyenne'); +INSERT INTO person VALUES (1033, 'sarah smith', 'gixszyd@ikahc.com', '0654 0143 9916 7419', 'cheyenne'); +INSERT INTO person VALUES (1034, 'sarah spencer', 'wazwjxh@giysr.com', '8093 7447 4488 2464', 'los angeles'); +INSERT INTO person VALUES (1035, 'kate smith', 'xdtubdc@eoqat.com', '1880 7605 7505 3038', 'seattle'); +INSERT INTO person VALUES (1036, 'deiter white', 'lzxmcig@pfyrp.com', '8336 1080 3823 2249', 'los angeles'); +INSERT INTO person VALUES (1037, 'john jones', 'qdolslh@pzlry.com', '4394 1929 0794 1731', 'los angeles'); +INSERT INTO person VALUES (1038, 'walter spencer', 'ljboats@roguq.com', '5990 9981 6050 5247', 'bend'); +INSERT INTO person VALUES (1039, 'luke jones', 'sobojsi@vhqkh.com', '1406 2686 9359 7086', 'cheyenne'); +INSERT INTO person VALUES (1040, 'luke bartels', 'qtlduro@zijhv.com', '6662 1330 8131 8426', 'cheyenne'); +INSERT INTO person VALUES (1041, 'deiter jones', 'chmequx@mkfof.com', '2941 9597 1592 6346', 'phoenix'); +INSERT INTO person VALUES (1042, 'john smith', 'odilagg@ckwuo.com', '7919 0755 1682 9068', 'portland'); +INSERT INTO person VALUES (1043, 'vicky walton', 'nhcbcvg@kkqvz.com', '0031 6046 4743 7296', 'cheyenne'); +INSERT INTO person VALUES (1044, 'peter white', 'bigajpm@tslez.com', '6077 8921 3999 7697', 'bend'); +INSERT INTO person VALUES (1045, 'walter shultz', 'vaefysn@unvsg.com', '3638 3193 7385 6193', 'boise'); +INSERT INTO person VALUES (1046, 'saul abrams', 'zxfjtbp@fgwli.com', '4031 2701 7554 5688', 'cheyenne'); +INSERT INTO person VALUES (1047, 'saul jones', 'xyeymyt@otocr.com', '5732 1968 8707 8446', 'redmond'); +INSERT INTO person VALUES (1048, 'peter bartels', 'ysmazaq@rnpky.com', '4696 0667 3826 9971', 'san francisco'); +INSERT INTO person VALUES (1049, 'walter noris', 'zeeibrx@aljnm.com', '1484 3392 4739 2098', 'redmond'); +INSERT INTO person VALUES (1050, 'peter smith', 'kabfpld@fhfis.com', '5179 0198 7232 1932', 'boise'); +INSERT INTO person VALUES (1051, 'julie abrams', 'knmtfvw@lyiyz.com', '3687 0788 3300 6960', 'cheyenne'); +INSERT INTO person VALUES (1052, 'peter abrams', 'uweavbw@ijmcd.com', '9341 0308 6833 3448', 'portland'); +INSERT INTO person VALUES (1053, 'paul noris', 'hnijvou@zawwc.com', '1502 1867 0969 4737', 'seattle'); +INSERT INTO person VALUES (1054, 'sarah jones', 'kmhnjtg@cetsb.com', '3145 3266 2116 5290', 'cheyenne'); +INSERT INTO person VALUES (1055, 'kate abrams', 'gyocmgj@uimwr.com', '0552 0064 4476 2409', 'cheyenne'); +INSERT INTO person VALUES (1056, 'julie abrams', 'ckmoalu@ndgaj.com', '9479 9270 0678 6846', 'boise'); +INSERT INTO person VALUES (1057, 'julie white', 'chxvkez@djjaa.com', '3522 2797 5148 3246', 'cheyenne'); +INSERT INTO person VALUES (1058, 'walter abrams', 'rmfqwms@pvttk.com', '8478 3866 5662 6467', 'seattle'); +INSERT INTO person VALUES (1059, 'julie spencer', 'nykvghm@kdhpt.com', '9138 9947 8873 7763', 'kent'); +INSERT INTO person VALUES (1060, 'kate abrams', 'wqxypwn@jrafo.com', '5422 1018 4333 0049', 'portland'); +INSERT INTO person VALUES (1061, 'kate white', 'njkweqw@qlinl.com', '3254 1815 6422 1716', 'san francisco'); +INSERT INTO person VALUES (1062, 'luke bartels', 'emoramu@tkqmj.com', '7655 7679 5909 2251', 'portland'); +INSERT INTO person VALUES (1063, 'julie spencer', 'acpybcy@fygni.com', '0523 2583 3342 5588', 'portland'); +INSERT INTO person VALUES (1064, 'luke spencer', 'rxlzmbi@ftvjh.com', '3989 4985 1721 9240', 'los angeles'); +INSERT INTO person VALUES (1065, 'john jones', 'sdjpica@sfddi.com', '7716 1367 0259 3889', 'bend'); +INSERT INTO person VALUES (1066, 'paul white', 'gclssac@cjcqr.com', '2708 5518 8447 8022', 'kent'); +INSERT INTO person VALUES (1067, 'vicky bartels', 'qsurdwa@zcyxz.com', '9332 8313 3113 1752', 'cheyenne'); +INSERT INTO person VALUES (1068, 'john spencer', 'rvdbxjj@thhat.com', '2065 0039 4966 7017', 'phoenix'); +INSERT INTO person VALUES (1069, 'luke white', 'rlnjujw@yajij.com', '8511 7005 7854 1288', 'portland'); +INSERT INTO person VALUES (1070, 'sarah jones', 'hpuddzw@zqxub.com', '4625 1520 6481 1767', 'bend'); +INSERT INTO person VALUES (1071, 'luke shultz', 'uhlejag@whmqq.com', '3427 8456 9076 1714', 'kent'); +INSERT INTO person VALUES (1072, 'julie shultz', 'xzwbhur@otviv.com', '6404 5841 0949 2641', 'boise'); +INSERT INTO person VALUES (1073, 'vicky walton', 'ercndev@gequo.com', '8807 4321 6973 6085', 'boise'); +INSERT INTO person VALUES (1074, 'julie noris', 'jytjumk@fddus.com', '7463 7084 1696 8892', 'kent'); +INSERT INTO person VALUES (1075, 'julie bartels', 'hugijat@huhob.com', '4530 8776 7942 5085', 'los angeles'); +INSERT INTO person VALUES (1076, 'kate spencer', 'snqygzv@tsnwb.com', '2522 9594 4307 9831', 'boise'); +INSERT INTO person VALUES (1077, 'kate jones', 'lsshriy@aknvv.com', '7065 2545 7960 0041', 'portland'); +INSERT INTO person VALUES (1078, 'saul walton', 'xveffme@gcplt.com', '5848 5246 7319 1450', 'phoenix'); +INSERT INTO person VALUES (1079, 'vicky smith', 'fhcdtoq@aemjt.com', '3071 1822 6864 8221', 'los angeles'); +INSERT INTO person VALUES (1080, 'luke shultz', 'zlrbrav@pynxn.com', '2038 4905 4566 6031', 'phoenix'); +INSERT INTO person VALUES (1081, 'john shultz', 'giradrs@mavun.com', '3344 8962 5224 8904', 'portland'); +INSERT INTO person VALUES (1082, 'john bartels', 'nqxjwrg@ppebb.com', '7144 4781 7168 6500', 'los angeles'); +INSERT INTO person VALUES (1083, 'john white', 'kkcnemc@wcdej.com', '6683 7670 7530 0890', 'bend'); +INSERT INTO person VALUES (1084, 'walter abrams', 'bmjdpec@ynwal.com', '3594 8838 1244 9650', 'bend'); +INSERT INTO person VALUES (1085, 'deiter jones', 'xquhjkv@azyxm.com', '6385 5861 0188 6728', 'los angeles'); +INSERT INTO person VALUES (1086, 'vicky shultz', 'lwmmeqx@rvddr.com', '5916 6762 6797 4669', 'los angeles'); +INSERT INTO person VALUES (1087, 'vicky walton', 'askxzha@lachv.com', '2178 8782 4988 7051', 'bend'); +INSERT INTO person VALUES (1088, 'kate noris', 'tbalnld@nmxkq.com', '3240 6224 1233 7005', 'boise'); +INSERT INTO person VALUES (1089, 'vicky noris', 'grjawpy@zkyds.com', '2009 4332 9634 9823', 'boise'); +INSERT INTO person VALUES (1090, 'sarah bartels', 'hrpmxnr@rvzgq.com', '0733 1934 0398 7793', 'redmond'); +INSERT INTO person VALUES (1091, 'saul walton', 'ntqrfhp@oumoz.com', '8923 8221 6882 0275', 'bend'); +INSERT INTO person VALUES (1092, 'paul noris', 'qevgjyo@wubwo.com', '9303 3741 8490 6300', 'portland'); +INSERT INTO person VALUES (1093, 'peter white', 'cjbkbke@rtbye.com', '1188 2449 6471 5253', 'boise'); +INSERT INTO person VALUES (1094, 'kate smith', 'pbjnaxm@fbgld.com', '3054 4394 5921 6700', 'bend'); +INSERT INTO person VALUES (1095, 'luke spencer', 'iamwwkv@cujlu.com', '6643 2101 9195 1615', 'seattle'); +INSERT INTO person VALUES (1096, 'luke noris', 'amsxmdf@znzqj.com', '7291 3287 8055 7550', 'kent'); +INSERT INTO person VALUES (1097, 'walter abrams', 'djjgtgv@gdhku.com', '9089 0787 4194 7095', 'san francisco'); +INSERT INTO person VALUES (1098, 'kate spencer', 'suadlvi@makbh.com', '0823 4419 7875 1675', 'phoenix'); +INSERT INTO person VALUES (1099, 'sarah white', 'ynsyxew@rjjmk.com', '4049 9641 0911 0158', 'redmond'); diff --git a/integration_test/postgres-cdc/query.sql b/integration_test/postgres-cdc/query.sql new file mode 100644 index 0000000000000..6ea3da992e59c --- /dev/null +++ b/integration_test/postgres-cdc/query.sql @@ -0,0 +1,6 @@ +SELECT + * +FROM + city_population +LIMIT + 10; \ No newline at end of file diff --git a/integration_test/postgres-sink/README.md b/integration_test/postgres-sink/README.md new file mode 100644 index 0000000000000..3066b8d37e3f9 --- /dev/null +++ b/integration_test/postgres-sink/README.md @@ -0,0 +1,16 @@ +# HOW-TO + +This demo showcases how to sink RisingWave's data to an external Postgres. The data loader has been included in the docker compose so the data will be loaded to Postgres once the cluster is set up. + +Here's what this demo does: + +1. `docker compose up -d`: Start the cluster. +2. After 20-30s: `create_source.sql`. +3. After 10s: `create_mv.sql`. +4. After another 10s, the tester will check if the source has ingested some data by creating a materialized view upon the source. It also checks if the MV created in the 3rd step has some data. + +To connect to the Postgres on your local PC: + +```sh +psql postgresql://myuser:123456@127.0.0.1:5432/mydb +``` diff --git a/integration_test/postgres-sink/create_mv.sql b/integration_test/postgres-sink/create_mv.sql new file mode 100644 index 0000000000000..e9bdce80d7749 --- /dev/null +++ b/integration_test/postgres-sink/create_mv.sql @@ -0,0 +1,16 @@ +CREATE MATERIALIZED VIEW target_count AS +SELECT + target_id, + COUNT(*) AS target_count +FROM + user_behaviors +GROUP BY + target_id; + +CREATE SINK target_count_postgres_sink +FROM + target_count WITH ( + connector = 'jdbc', + jdbc.url = 'jdbc:postgresql://postgres:5432/mydb?user=myuser&password=123456', + table.name = 'target_count' + ); \ No newline at end of file diff --git a/integration_test/postgres-sink/create_source.sql b/integration_test/postgres-sink/create_source.sql new file mode 100644 index 0000000000000..7a9e3d3add4c8 --- /dev/null +++ b/integration_test/postgres-sink/create_source.sql @@ -0,0 +1,14 @@ +CREATE SOURCE user_behaviors ( + user_id VARCHAR, + target_id VARCHAR, + target_type VARCHAR, + event_timestamp TIMESTAMPTZ, + behavior_type VARCHAR, + parent_target_type VARCHAR, + parent_target_id VARCHAR +) WITH ( + connector = 'kafka', + topic = 'user_behaviors', + properties.bootstrap.server = 'message_queue:29092', + scan.startup.mode = 'earliest' +) ROW FORMAT JSON; \ No newline at end of file diff --git a/integration_test/postgres-sink/data_check b/integration_test/postgres-sink/data_check new file mode 100644 index 0000000000000..3835eb979b86e --- /dev/null +++ b/integration_test/postgres-sink/data_check @@ -0,0 +1 @@ +user_behaviors,target_count \ No newline at end of file diff --git a/integration_test/postgres-sink/docker-compose.yml b/integration_test/postgres-sink/docker-compose.yml new file mode 100644 index 0000000000000..f0c619ced4d72 --- /dev/null +++ b/integration_test/postgres-sink/docker-compose.yml @@ -0,0 +1,94 @@ +--- +version: "3" +services: + compactor-0: + extends: + file: ../../docker/docker-compose.yml + service: compactor-0 + compute-node-0: + extends: + file: ../../docker/docker-compose.yml + service: compute-node-0 + etcd-0: + extends: + file: ../../docker/docker-compose.yml + service: etcd-0 + frontend-node-0: + extends: + file: ../../docker/docker-compose.yml + service: frontend-node-0 + grafana-0: + extends: + file: ../../docker/docker-compose.yml + service: grafana-0 + meta-node-0: + extends: + file: ../../docker/docker-compose.yml + service: meta-node-0 + minio-0: + extends: + file: ../../docker/docker-compose.yml + service: minio-0 + prometheus-0: + extends: + file: ../../docker/docker-compose.yml + service: prometheus-0 + message_queue: + extends: + file: ../../docker/docker-compose.yml + service: message_queue + datagen: + image: ghcr.io/risingwavelabs/demo-datagen:v1.0.9 + depends_on: [message_queue] + command: + - /bin/sh + - -c + - /datagen --mode clickstream --qps 2 kafka --brokers message_queue:29092 + restart: always + container_name: datagen + # Use this command to connect to the DB from outside the container: + # docker exec postgres psql --username=myuser --dbname=mydb + postgres: + image: postgres + environment: + - POSTGRES_USER=myuser + - POSTGRES_PASSWORD=123456 + - POSTGRES_DB=mydb + ports: + - 5432:5432 + healthcheck: + test: [ "CMD-SHELL", "pg_isready --username=myuser --dbname=mydb" ] + interval: 5s + timeout: 5s + retries: 5 + command: [ "postgres", "-c", "wal_level=logical" ] + restart: always + container_name: postgres + connector-node: + extends: + file: ../../docker/docker-compose.yml + service: connector-node + prepare_postgres: + image: postgres + depends_on: + - postgres + command: + - /bin/sh + - -c + - "psql postgresql://myuser:123456@postgres:5432/mydb < postgres_prepare.sql" + volumes: + - "./postgres_prepare.sql:/postgres_prepare.sql" + container_name: prepare_postgres + restart: on-failure +volumes: + compute-node-0: + external: false + etcd-0: + external: false + grafana-0: + external: false + minio-0: + external: false + prometheus-0: + external: false +name: risingwave-compose diff --git a/integration_test/postgres-sink/postgres_prepare.sql b/integration_test/postgres-sink/postgres_prepare.sql new file mode 100644 index 0000000000000..cac57c699a154 --- /dev/null +++ b/integration_test/postgres-sink/postgres_prepare.sql @@ -0,0 +1,4 @@ +CREATE TABLE target_count ( + target_id VARCHAR(128), + target_count BIGINT +); \ No newline at end of file diff --git a/integration_test/postgres-sink/query.sql b/integration_test/postgres-sink/query.sql new file mode 100644 index 0000000000000..e09c66a255f10 --- /dev/null +++ b/integration_test/postgres-sink/query.sql @@ -0,0 +1,6 @@ +SELECT + * +FROM + target_count +LIMIT + 10; \ No newline at end of file diff --git a/integration_test/prometheus/create_mv.sql b/integration_test/prometheus/create_mv.sql new file mode 100644 index 0000000000000..81be629ccdbea --- /dev/null +++ b/integration_test/prometheus/create_mv.sql @@ -0,0 +1,16 @@ +create materialized view metric_avg_30s as +select + name as metric_name, + window_start as metric_time, + avg(value :: decimal) as metric_value +from + tumble( + prometheus, + timestamp, + interval '30 s' + ) +group by + name, + window_start +order by + window_start; \ No newline at end of file diff --git a/integration_test/prometheus/create_source.sql b/integration_test/prometheus/create_source.sql new file mode 100644 index 0000000000000..b67e8676e00b4 --- /dev/null +++ b/integration_test/prometheus/create_source.sql @@ -0,0 +1,13 @@ +CREATE SOURCE prometheus ( + labels STRUCT < __name__ VARCHAR, + instance VARCHAR, + job VARCHAR >, + name VARCHAR, + timestamp TIMESTAMPTZ, + value VARCHAR +) WITH ( + connector = 'kafka', + topic = 'prometheus', + properties.bootstrap.server = 'message_queue:29092', + scan.startup.mode = 'earliest' +) ROW FORMAT JSON; \ No newline at end of file diff --git a/integration_test/prometheus/create_user.sql b/integration_test/prometheus/create_user.sql new file mode 100644 index 0000000000000..791e376a90916 --- /dev/null +++ b/integration_test/prometheus/create_user.sql @@ -0,0 +1,6 @@ +create user grafanareader with password 'password'; + +-- It is recommended to use a dedicated read-only user when querying the database using Grafana. +grant +select + on materialized view metric_avg_30s to grafanareader; \ No newline at end of file diff --git a/integration_test/prometheus/data_check b/integration_test/prometheus/data_check new file mode 100644 index 0000000000000..6a39c46f26f9c --- /dev/null +++ b/integration_test/prometheus/data_check @@ -0,0 +1 @@ +prometheus,metric_avg_30s \ No newline at end of file diff --git a/integration_test/prometheus/docker-compose.yml b/integration_test/prometheus/docker-compose.yml new file mode 100644 index 0000000000000..e716e567e2fd4 --- /dev/null +++ b/integration_test/prometheus/docker-compose.yml @@ -0,0 +1,94 @@ +--- +version: "3" +services: + compactor-0: + extends: + file: ../../docker/docker-compose.yml + service: compactor-0 + compute-node-0: + extends: + file: ../../docker/docker-compose.yml + service: compute-node-0 + etcd-0: + extends: + file: ../../docker/docker-compose.yml + service: etcd-0 + frontend-node-0: + extends: + file: ../../docker/docker-compose.yml + service: frontend-node-0 + grafana-0: + extends: + file: ../../docker/docker-compose.yml + service: grafana-0 + meta-node-0: + extends: + file: ../../docker/docker-compose.yml + service: meta-node-0 + minio-0: + extends: + file: ../../docker/docker-compose.yml + service: minio-0 + prometheus-0: + image: "prom/prometheus:latest" + command: + - "--config.file=/etc/prometheus/prometheus.yml" + - "--storage.tsdb.path=/prometheus" + - "--web.console.libraries=/usr/share/prometheus/console_libraries" + - "--web.console.templates=/usr/share/prometheus/consoles" + - "--web.listen-address=0.0.0.0:9500" + - "--storage.tsdb.retention.time=5m" # Use prometheus for short-term storage. + expose: + - "9500" + ports: + - "9500:9500" + depends_on: [] + volumes: + - "prometheus-0:/prometheus" + - "./prometheus.yaml:/etc/prometheus/prometheus.yml" + environment: {} + container_name: prometheus-0 + healthcheck: + test: + - CMD + - printf + - "" + - /dev/tcp/127.0.0.1/9500 + interval: 1s + timeout: 5s + retries: 5 + message_queue: + extends: + file: ../../docker/docker-compose.yml + service: message_queue + prometheus-kafka-adaptor: + image: "telefonica/prometheus-kafka-adapter:1.8.0" + expose: + - "9501" + ports: + - "9501:9501" + environment: + - KAFKA_BROKER_LIST=message_queue:29092 + - KAFKA_TOPIC=prometheus + - PORT=9501 + - GIN_MODE=release + - LOG_LEVEL=info + - SERIALIZATION_FORMAT=json + container_name: prometheus-kafka-adaptor + depends_on: + - prometheus-0 + - message_queue +volumes: + compute-node-0: + external: false + etcd-0: + external: false + grafana-0: + external: false + minio-0: + external: false + prometheus-0: + external: false + message_queue: + external: false +name: risingwave-compose diff --git a/integration_test/prometheus/prometheus.yaml b/integration_test/prometheus/prometheus.yaml new file mode 100644 index 0000000000000..8fef25a62dc17 --- /dev/null +++ b/integration_test/prometheus/prometheus.yaml @@ -0,0 +1,37 @@ +# --- THIS FILE IS AUTO GENERATED BY RISEDEV --- +global: + scrape_interval: 1s + evaluation_interval: 5s + +scrape_configs: + - job_name: prometheus + static_configs: + - targets: ["prometheus-0:9500"] + + - job_name: compute + static_configs: + - targets: ["compute-node-0:1222"] + + - job_name: meta + static_configs: + - targets: ["meta-node-0:1250"] + + - job_name: minio + metrics_path: /minio/v2/metrics/cluster + static_configs: + - targets: ["minio-0:9301"] + + - job_name: compactor + static_configs: + - targets: ["compactor-0:1260"] + + - job_name: etcd + static_configs: + - targets: ["etcd-0:2379"] + + - job_name: redpanda + static_configs: + - targets: ["redpanda:9644"] + +remote_write: + - url: http://prometheus-kafka-adaptor:9501/receive diff --git a/integration_test/prometheus/query.sql b/integration_test/prometheus/query.sql new file mode 100644 index 0000000000000..e81b4b4ec74f7 --- /dev/null +++ b/integration_test/prometheus/query.sql @@ -0,0 +1,8 @@ +select + * +from + metric_avg_30s +where + metric_name = 'object_store_read_bytes' +order by + metric_time; \ No newline at end of file diff --git a/integration_test/schema-registry/create_mv.sql b/integration_test/schema-registry/create_mv.sql new file mode 100644 index 0000000000000..7a02f5803cd4a --- /dev/null +++ b/integration_test/schema-registry/create_mv.sql @@ -0,0 +1,10 @@ +CREATE MATERIALIZED VIEW student_view AS +SELECT + id, + name, + avg_score, + age, + schema_version +FROM + student +WHERE age > 10; \ No newline at end of file diff --git a/integration_test/schema-registry/create_source.sql b/integration_test/schema-registry/create_source.sql new file mode 100644 index 0000000000000..d1e17540dd5df --- /dev/null +++ b/integration_test/schema-registry/create_source.sql @@ -0,0 +1,8 @@ +CREATE SOURCE student WITH ( + connector = 'kafka', + topic = 'sr-test', + properties.bootstrap.server = 'message_queue:29092', + scan.startup.mode = 'earliest' +) +ROW FORMAT avro message 'student' +row schema location confluent schema registry 'http://message_queue:8081'; \ No newline at end of file diff --git a/integration_test/schema-registry/data_check b/integration_test/schema-registry/data_check new file mode 100644 index 0000000000000..de11e3bcc0c0c --- /dev/null +++ b/integration_test/schema-registry/data_check @@ -0,0 +1 @@ +student_view \ No newline at end of file diff --git a/integration_test/schema-registry/datagen.py b/integration_test/schema-registry/datagen.py new file mode 100644 index 0000000000000..4ac7bd1537536 --- /dev/null +++ b/integration_test/schema-registry/datagen.py @@ -0,0 +1,144 @@ +from confluent_kafka import Producer +from confluent_kafka.admin import AdminClient, NewTopic +from confluent_kafka.serialization import StringSerializer, SerializationContext, MessageField +from confluent_kafka.schema_registry import SchemaRegistryClient +from confluent_kafka.schema_registry.avro import AvroSerializer +import sys +import random +import time + +# the two versions of the schema are compatible +schema_v1 = r''' +{ + "name": "student", + "type": "record", + "fields": [ + { + "name": "id", + "type": "int", + "default": 0 + }, + { + "name": "name", + "type": "string", + "default": "" + }, + { + "name": "avg_score", + "type": "double", + "default": 0.0 + }, + { + "name": "age", + "type": "int", + "default": 0 + }, + { + "name": "schema_version", + "type": "string", + "default": "" + } + ] +} +''' + +schema_v2 = r''' +{ + "name": "student", + "type": "record", + "fields": [ + { + "name": "id", + "type": "int", + "default": 0 + }, + { + "name": "name", + "type": "string", + "default": "" + }, + { + "name": "avg_score", + "type": "double", + "default": 0.0 + }, + { + "name": "age", + "type": "int", + "default": 0 + }, + { + "name": "facebook_id", + "type": "string", + "default": "" + }, + { + "name": "schema_version", + "type": "string", + "default": "" + } + ] +} +''' + +schemas = {'v1': schema_v1, 'v2': schema_v2} + + +def create_topic(kafka_conf, topic_name): + client = AdminClient(kafka_conf) + topic_list = [] + topic_list.append( + NewTopic(topic_name, num_partitions=1, replication_factor=1)) + client.create_topics(topic_list) + + +def get_basic_value(id): + return {'id': id, 'name': ''.join(random.sample('zyxwvutsrqponmlkjihgfedcba', 7)), 'avg_score': random.random() * 100, 'age': random.randint(10, 100)} + + +def get_value_and_serializer(id, version, schema_registry_client): + value = get_basic_value(id) + value['schema_version'] = version + if version == 'v2': + value['facebook_id'] = "12345678" + return value, AvroSerializer(schema_registry_client=schema_registry_client, schema_str=schemas[version]) + + +def delivery_report(err, msg): + if err is not None: + print("Delivery failed for User record {}: {}".format(msg.value(), err)) + return + + +if __name__ == '__main__': + if len(sys.argv) < 4: + print("datagen.py ") + broker_list = sys.argv[1] + schema_registry_url = sys.argv[2] + topic = sys.argv[3] + + print("broker_list: {}".format(broker_list)) + print("schema_registry_url: {}".format(schema_registry_url)) + print("topic: {}".format(topic)) + + schema_registry_conf = {'url': schema_registry_url} + kafka_conf = {'bootstrap.servers': broker_list} + schema_registry_client = SchemaRegistryClient(schema_registry_conf) + + create_topic(kafka_conf=kafka_conf, topic_name=topic) + + id = 0 + while True: + for version in schemas.keys(): + id += 1 + value, avro_serializer = get_value_and_serializer( + id, version, schema_registry_client) + producer = Producer(kafka_conf) + producer.produce(topic=topic, partition=0, + value=avro_serializer( + value, SerializationContext(topic, MessageField.VALUE)), + on_delivery=delivery_report) + producer.flush() + if id % 100 == 0: + print("Sent {} records".format(id)) + time.sleep(0.05) diff --git a/integration_test/schema-registry/docker-compose.yml b/integration_test/schema-registry/docker-compose.yml new file mode 100644 index 0000000000000..df36671a4a336 --- /dev/null +++ b/integration_test/schema-registry/docker-compose.yml @@ -0,0 +1,66 @@ +--- +version: "3" +services: + compactor-0: + extends: + file: ../../docker/docker-compose.yml + service: compactor-0 + compute-node-0: + extends: + file: ../../docker/docker-compose.yml + service: compute-node-0 + etcd-0: + extends: + file: ../../docker/docker-compose.yml + service: etcd-0 + frontend-node-0: + extends: + file: ../../docker/docker-compose.yml + service: frontend-node-0 + grafana-0: + extends: + file: ../../docker/docker-compose.yml + service: grafana-0 + meta-node-0: + extends: + file: ../../docker/docker-compose.yml + service: meta-node-0 + minio-0: + extends: + file: ../../docker/docker-compose.yml + service: minio-0 + prometheus-0: + extends: + file: ../../docker/docker-compose.yml + service: prometheus-0 + message_queue: + extends: + file: ../../docker/docker-compose.yml + service: message_queue + datagen: + image: python:3.10 + depends_on: [ message_queue ] + volumes: + - type: bind + source: ./datagen.py + target: /datagen.py + command: + - /bin/sh + - -c + - | + pip install requests fastavro confluent_kafka + python /datagen.py message_queue:29092 http://message_queue:8081 sr-test + restart: always + container_name: datagen +volumes: + compute-node-0: + external: false + etcd-0: + external: false + grafana-0: + external: false + minio-0: + external: false + prometheus-0: + external: false +name: risingwave-compose diff --git a/integration_test/schema-registry/query.sql b/integration_test/schema-registry/query.sql new file mode 100644 index 0000000000000..95e60697428f1 --- /dev/null +++ b/integration_test/schema-registry/query.sql @@ -0,0 +1,6 @@ +SELECT + * +FROM + student_view +limit + 10; diff --git a/integration_test/schema-registry/readme.md b/integration_test/schema-registry/readme.md new file mode 100644 index 0000000000000..6c1c53c26e0a4 --- /dev/null +++ b/integration_test/schema-registry/readme.md @@ -0,0 +1,96 @@ +This demo shows how to ingest Avro data into RisingWave with [Schema Registry](https://github.com/confluentinc/schema-registry), which manages multiple versions of Avro schemas. + +At the beginning, there's a datagen process that ingests Avro data into Redpanda (a Kafka-compatible message queue). The Avro schema is as follows: + +- ** Version 1 ** + + ```json + { + "name": "student", + "type": "record", + "fields": [ + { + "name": "id", + "type": "int", + "default": 0 + }, + { + "name": "name", + "type": "string", + "default": "" + }, + { + "name": "avg_score", + "type": "double", + "default": 0.0 + }, + { + "name": "age", + "type": "int", + "default": 0 + }, + { + "name": "schema_version", + "type": "string", + "default": "" + } + ] + } + ``` + + +- ** Version 2 ** + + ```json + { + "name": "student", + "type": "record", + "fields": [ + { + "name": "id", + "type": "int", + "default": 0 + }, + { + "name": "name", + "type": "string", + "default": "" + }, + { + "name": "avg_score", + "type": "double", + "default": 0.0 + }, + { + "name": "age", + "type": "int", + "default": 0 + }, + { + "name": "facebook_id", + "type": "string", + "default": "" + }, + { + "name": "schema_version", + "type": "string", + "default": "" + } + ] + } + ``` + +As shown above, there are two versions of the schema. The new version contains an additional field `facebook_id`. Hence, it is backward-compatible with the old version. The data will be generated randomly (50/50) using one of the versions. + +Then, this demo will connect RisingWave to the message queue. Here we specify the address as `confluent schema registry 'http://message_queue:8081'`. The final CREATE SOURCE query is as follows: + +```sql +CREATE SOURCE student WITH ( + connector = 'kafka', + topic = 'sr-test', + properties.bootstrap.server = 'message_queue:29092', + scan.startup.mode = 'earliest' +) +ROW FORMAT avro message 'student' +row schema location confluent schema registry 'http://message_queue:8081'; +``` diff --git a/integration_test/superset/create_mv.sql b/integration_test/superset/create_mv.sql new file mode 100644 index 0000000000000..99f4f09cff42f --- /dev/null +++ b/integration_test/superset/create_mv.sql @@ -0,0 +1,13 @@ +-- A real-time dashboard of the total UV. +CREATE MATERIALIZED VIEW total_user_visit_1min AS +SELECT + window_start AS report_ts, + COUNT(DISTINCT user_id) as uv +FROM + TUMBLE( + live_stream_metrics, + report_timestamp, + INTERVAL '1' MINUTE + ) +GROUP BY + window_start; \ No newline at end of file diff --git a/integration_test/superset/create_source.sql b/integration_test/superset/create_source.sql new file mode 100644 index 0000000000000..3e230af07f1b0 --- /dev/null +++ b/integration_test/superset/create_source.sql @@ -0,0 +1,26 @@ +CREATE SOURCE live_stream_metrics ( + client_ip VARCHAR, + user_agent VARCHAR, + user_id VARCHAR, + -- The live room. + room_id VARCHAR, + -- Sent bits per second. + video_bps BIGINT, + -- Sent frames per second. Typically 30 fps. + video_fps BIGINT, + -- Round-trip time (in ms). 200ms is recommended. + video_rtt BIGINT, + -- Lost packets per second. + video_lost_pps BIGINT, + -- How long was the longest freeze (in ms). + video_longest_freeze_duration BIGINT, + -- Total freeze duration. + video_total_freeze_duration BIGINT, + report_timestamp TIMESTAMPTZ, + country VARCHAR +) WITH ( + connector = 'kafka', + topic = 'live_stream_metrics', + properties.bootstrap.server = 'message_queue:29092', + scan.startup.mode = 'earliest' +) ROW FORMAT JSON; \ No newline at end of file diff --git a/integration_test/superset/docker-compose.yml b/integration_test/superset/docker-compose.yml new file mode 100644 index 0000000000000..34b1f14330c7b --- /dev/null +++ b/integration_test/superset/docker-compose.yml @@ -0,0 +1,135 @@ +x-superset-image: &superset-image apache/superset:${TAG:-latest-dev} +x-superset-depends-on: + &superset-depends-on + - db + - redis +x-superset-volumes: + # /app/pythonpath_docker will be appended to the PYTHONPATH in the final container + &superset-volumes + - ./docker:/app/docker + - superset_home:/app/superset_home + +version: "3.7" +services: + compactor-0: + extends: + file: ../../docker/docker-compose.yml + service: compactor-0 + compute-node-0: + extends: + file: ../../docker/docker-compose.yml + service: compute-node-0 + etcd-0: + extends: + file: ../../docker/docker-compose.yml + service: etcd-0 + frontend-node-0: + extends: + file: ../../docker/docker-compose.yml + service: frontend-node-0 + grafana-0: + extends: + file: ../../docker/docker-compose.yml + service: grafana-0 + meta-node-0: + extends: + file: ../../docker/docker-compose.yml + service: meta-node-0 + minio-0: + extends: + file: ../../docker/docker-compose.yml + service: minio-0 + prometheus-0: + extends: + file: ../../docker/docker-compose.yml + service: prometheus-0 + message_queue: + extends: + file: ../../docker/docker-compose.yml + service: message_queue + datagen: + image: ghcr.io/risingwavelabs/demo-datagen:v1.0.9 + depends_on: [message_queue] + command: + - /bin/sh + - -c + - /datagen --mode livestream --qps 2 kafka --brokers message_queue:29092 + restart: always + container_name: datagen + + # Superset-related services # + + redis: + image: redis:latest + container_name: superset_cache + restart: unless-stopped + volumes: + - redis:/data + + db: + env_file: docker/.env-non-dev + image: postgres:10 + container_name: superset_db + restart: unless-stopped + volumes: + - db_home:/var/lib/postgresql/data + + superset: + env_file: docker/.env-non-dev + image: *superset-image + container_name: superset_app + command: [ "/app/docker/docker-bootstrap.sh", "app-gunicorn" ] + user: "root" + restart: unless-stopped + ports: + - 8088:8088 + depends_on: *superset-depends-on + volumes: *superset-volumes + + superset-init: + image: *superset-image + container_name: superset_init + command: [ "/app/docker/docker-init.sh" ] + env_file: docker/.env-non-dev + depends_on: *superset-depends-on + user: "root" + volumes: *superset-volumes + + superset-worker: + image: *superset-image + container_name: superset_worker + command: [ "/app/docker/docker-bootstrap.sh", "worker" ] + env_file: docker/.env-non-dev + restart: unless-stopped + depends_on: *superset-depends-on + user: "root" + volumes: *superset-volumes + + superset-worker-beat: + image: *superset-image + container_name: superset_worker_beat + command: [ "/app/docker/docker-bootstrap.sh", "beat" ] + env_file: docker/.env-non-dev + restart: unless-stopped + depends_on: *superset-depends-on + user: "root" + volumes: *superset-volumes + +volumes: + superset_home: + external: false + db_home: + external: false + redis: + external: false + compute-node-0: + external: false + etcd-0: + external: false + grafana-0: + external: false + minio-0: + external: false + prometheus-0: + external: false +name: risingwave-compose diff --git a/integration_test/superset/docker/.env-non-dev b/integration_test/superset/docker/.env-non-dev new file mode 100644 index 0000000000000..c6e4739e90a9e --- /dev/null +++ b/integration_test/superset/docker/.env-non-dev @@ -0,0 +1,46 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +COMPOSE_PROJECT_NAME=superset + +# database configurations (do not modify) +DATABASE_DB=superset +DATABASE_HOST=db +DATABASE_PASSWORD=superset +DATABASE_USER=superset + +# database engine specific environment variables +# change the below if you prefers another database engine +DATABASE_PORT=5432 +DATABASE_DIALECT=postgresql +POSTGRES_DB=superset +POSTGRES_USER=superset +POSTGRES_PASSWORD=superset +#MYSQL_DATABASE=superset +#MYSQL_USER=superset +#MYSQL_PASSWORD=superset +#MYSQL_RANDOM_ROOT_PASSWORD=yes + +# Add the mapped in /app/pythonpath_docker which allows devs to override stuff +PYTHONPATH=/app/pythonpath:/app/docker/pythonpath_dev +REDIS_HOST=redis +REDIS_PORT=6379 + +FLASK_ENV=production +SUPERSET_ENV=production +SUPERSET_LOAD_EXAMPLES=no +CYPRESS_CONFIG=false +SUPERSET_PORT=8088 diff --git a/integration_test/superset/docker/docker-bootstrap.sh b/integration_test/superset/docker/docker-bootstrap.sh new file mode 100755 index 0000000000000..67e5294be5fdc --- /dev/null +++ b/integration_test/superset/docker/docker-bootstrap.sh @@ -0,0 +1,51 @@ +#!/usr/bin/env bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +set -eo pipefail + +REQUIREMENTS_LOCAL="/app/docker/requirements-local.txt" +# If Cypress run – overwrite the password for admin and export env variables +if [ "$CYPRESS_CONFIG" == "true" ]; then + export SUPERSET_CONFIG=tests.integration_tests.superset_test_config + export SUPERSET_TESTENV=true + export ENABLE_REACT_CRUD_VIEWS=true + export SUPERSET__SQLALCHEMY_DATABASE_URI=postgresql+psycopg2://superset:superset@db:5432/superset +fi +# +# Make sure we have dev requirements installed +# +if [ -f "${REQUIREMENTS_LOCAL}" ]; then + echo "Installing local overrides at ${REQUIREMENTS_LOCAL}" + pip install -r "${REQUIREMENTS_LOCAL}" +else + echo "Skipping local overrides" +fi + +if [[ "${1}" == "worker" ]]; then + echo "Starting Celery worker..." + celery --app=superset.tasks.celery_app:app worker -Ofair -l INFO +elif [[ "${1}" == "beat" ]]; then + echo "Starting Celery beat..." + celery --app=superset.tasks.celery_app:app beat --pidfile /tmp/celerybeat.pid -l INFO -s "${SUPERSET_HOME}"/celerybeat-schedule +elif [[ "${1}" == "app" ]]; then + echo "Starting web app..." + flask run -p 8088 --with-threads --reload --debugger --host=0.0.0.0 +elif [[ "${1}" == "app-gunicorn" ]]; then + echo "Starting web app..." + /usr/bin/run-server.sh +fi diff --git a/integration_test/superset/docker/docker-init.sh b/integration_test/superset/docker/docker-init.sh new file mode 100755 index 0000000000000..c928c1ba505af --- /dev/null +++ b/integration_test/superset/docker/docker-init.sh @@ -0,0 +1,79 @@ +#!/usr/bin/env bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +set -e + +# +# Always install local overrides first +# +/app/docker/docker-bootstrap.sh + +STEP_CNT=4 + +echo_step() { +cat < str: + """Get the environment variable or raise exception.""" + try: + return os.environ[var_name] + except KeyError: + if default is not None: + return default + else: + error_msg = "The environment variable {} was missing, abort...".format( + var_name + ) + raise EnvironmentError(error_msg) + + +DATABASE_DIALECT = get_env_variable("DATABASE_DIALECT") +DATABASE_USER = get_env_variable("DATABASE_USER") +DATABASE_PASSWORD = get_env_variable("DATABASE_PASSWORD") +DATABASE_HOST = get_env_variable("DATABASE_HOST") +DATABASE_PORT = get_env_variable("DATABASE_PORT") +DATABASE_DB = get_env_variable("DATABASE_DB") + +# The SQLAlchemy connection string. +SQLALCHEMY_DATABASE_URI = "%s://%s:%s@%s:%s/%s" % ( + DATABASE_DIALECT, + DATABASE_USER, + DATABASE_PASSWORD, + DATABASE_HOST, + DATABASE_PORT, + DATABASE_DB, +) + +REDIS_HOST = get_env_variable("REDIS_HOST") +REDIS_PORT = get_env_variable("REDIS_PORT") +REDIS_CELERY_DB = get_env_variable("REDIS_CELERY_DB", "0") +REDIS_RESULTS_DB = get_env_variable("REDIS_RESULTS_DB", "1") + +RESULTS_BACKEND = FileSystemCache("/app/superset_home/sqllab") + +CACHE_CONFIG = { + "CACHE_TYPE": "redis", + "CACHE_DEFAULT_TIMEOUT": 300, + "CACHE_KEY_PREFIX": "superset_", + "CACHE_REDIS_HOST": REDIS_HOST, + "CACHE_REDIS_PORT": REDIS_PORT, + "CACHE_REDIS_DB": REDIS_RESULTS_DB, +} +DATA_CACHE_CONFIG = CACHE_CONFIG + + +class CeleryConfig(object): + BROKER_URL = f"redis://{REDIS_HOST}:{REDIS_PORT}/{REDIS_CELERY_DB}" + CELERY_IMPORTS = ("superset.sql_lab", "superset.tasks") + CELERY_RESULT_BACKEND = f"redis://{REDIS_HOST}:{REDIS_PORT}/{REDIS_RESULTS_DB}" + CELERYD_LOG_LEVEL = "DEBUG" + CELERYD_PREFETCH_MULTIPLIER = 1 + CELERY_ACKS_LATE = False + CELERYBEAT_SCHEDULE = { + "reports.scheduler": { + "task": "reports.scheduler", + "schedule": crontab(minute="*", hour="*"), + }, + "reports.prune_log": { + "task": "reports.prune_log", + "schedule": crontab(minute=10, hour=0), + }, + } + + +CELERY_CONFIG = CeleryConfig + +FEATURE_FLAGS = {"ALERT_REPORTS": True} +ALERT_REPORTS_NOTIFICATION_DRY_RUN = True +WEBDRIVER_BASEURL = "http://superset:8088/" +# The base URL for the email report hyperlinks. +WEBDRIVER_BASEURL_USER_FRIENDLY = WEBDRIVER_BASEURL + +SQLLAB_CTAS_NO_LIMIT = True + +# +# Optionally import superset_config_docker.py (which will have been included on +# the PYTHONPATH) in order to allow for local settings to be overridden +# +try: + import superset_config_docker + from superset_config_docker import * # noqa + + logger.info( + f"Loaded your Docker configuration at " f"[{superset_config_docker.__file__}]" + ) +except ImportError: + logger.info("Using default Docker config...") diff --git a/integration_test/superset/docker/requirements-local.txt b/integration_test/superset/docker/requirements-local.txt new file mode 100644 index 0000000000000..8af33f481367f --- /dev/null +++ b/integration_test/superset/docker/requirements-local.txt @@ -0,0 +1 @@ +sqlalchemy-risingwave diff --git a/integration_test/superset/docker/run-server.sh b/integration_test/superset/docker/run-server.sh new file mode 100644 index 0000000000000..064f47b9c2cbc --- /dev/null +++ b/integration_test/superset/docker/run-server.sh @@ -0,0 +1,33 @@ +#!/usr/bin/env bash +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +HYPHEN_SYMBOL='-' + +gunicorn \ + --bind "${SUPERSET_BIND_ADDRESS:-0.0.0.0}:${SUPERSET_PORT:-8088}" \ + --access-logfile "${ACCESS_LOG_FILE:-$HYPHEN_SYMBOL}" \ + --error-logfile "${ERROR_LOG_FILE:-$HYPHEN_SYMBOL}" \ + --workers ${SERVER_WORKER_AMOUNT:-1} \ + --worker-class ${SERVER_WORKER_CLASS:-gthread} \ + --threads ${SERVER_THREADS_AMOUNT:-20} \ + --timeout ${GUNICORN_TIMEOUT:-60} \ + --keep-alive ${GUNICORN_KEEPALIVE:-2} \ + --limit-request-line ${SERVER_LIMIT_REQUEST_LINE:-0} \ + --limit-request-field_size ${SERVER_LIMIT_REQUEST_FIELD_SIZE:-0} \ + "${FLASK_APP}" diff --git a/integration_test/superset/query.sql b/integration_test/superset/query.sql new file mode 100644 index 0000000000000..16a7e1efa27a6 --- /dev/null +++ b/integration_test/superset/query.sql @@ -0,0 +1,6 @@ +select + * +FROM + total_user_visit_1min +LIMIT + 1; \ No newline at end of file diff --git a/integration_test/twitter-pulsar/create_mv.sql b/integration_test/twitter-pulsar/create_mv.sql new file mode 100644 index 0000000000000..4f776516bd215 --- /dev/null +++ b/integration_test/twitter-pulsar/create_mv.sql @@ -0,0 +1,34 @@ +-- +-- Find the influencers +-- +CREATE MATERIALIZED VIEW influencer_tweets AS +SELECT + (author).id as author_id, + (data).text as tweet +FROM + twitter +WHERE + (author).followers > 5000 + AND (data).lang = 'English'; + +-- +-- Find the top10 hotest hashtags. +-- +CREATE MATERIALIZED VIEW hot_hashtags AS WITH tags AS ( + SELECT + unnest(regexp_matches((data).text, '#\w+', 'g')) AS hashtag, + (data).created_at AS created_at + FROM + twitter +) +SELECT + hashtag, + COUNT(*) AS hashtag_occurrences, + window_start +FROM + TUMBLE(tags, created_at, INTERVAL '1 day') +GROUP BY + hashtag, + window_start +ORDER BY + hashtag_occurrences; diff --git a/integration_test/twitter-pulsar/create_source.sql b/integration_test/twitter-pulsar/create_source.sql new file mode 100644 index 0000000000000..98492b15f4e1b --- /dev/null +++ b/integration_test/twitter-pulsar/create_source.sql @@ -0,0 +1,19 @@ +-- +-- The Pulsar source version +-- +CREATE SOURCE twitter ( + data STRUCT < created_at TIMESTAMPTZ, + id VARCHAR, + text VARCHAR, + lang VARCHAR >, + author STRUCT < created_at TIMESTAMPTZ, + id VARCHAR, + name VARCHAR, + username VARCHAR, + followers INT > +) WITH ( + connector = 'pulsar', + pulsar.topic = 'twitter', + pulsar.admin.url = 'http://message_queue:8080', + pulsar.service.url = 'pulsar://message_queue:6650' +) ROW FORMAT JSON; diff --git a/integration_test/twitter-pulsar/docker-compose.yml b/integration_test/twitter-pulsar/docker-compose.yml new file mode 100644 index 0000000000000..ca2ee7e705685 --- /dev/null +++ b/integration_test/twitter-pulsar/docker-compose.yml @@ -0,0 +1,67 @@ +--- +version: "3" +services: + compactor-0: + extends: + file: ../../docker/docker-compose.yml + service: compactor-0 + compute-node-0: + extends: + file: ../../docker/docker-compose.yml + service: compute-node-0 + etcd-0: + extends: + file: ../../docker/docker-compose.yml + service: etcd-0 + frontend-node-0: + extends: + file: ../../docker/docker-compose.yml + service: frontend-node-0 + grafana-0: + extends: + file: ../../docker/docker-compose.yml + service: grafana-0 + meta-node-0: + extends: + file: ../../docker/docker-compose.yml + service: meta-node-0 + minio-0: + extends: + file: ../../docker/docker-compose.yml + service: minio-0 + prometheus-0: + extends: + file: ../../docker/docker-compose.yml + service: prometheus-0 + message_queue: + image: "apachepulsar/pulsar:2.9.1" + command: bin/pulsar standalone + ports: + - 8080:8080 + - 6650:6650 + hostname: message_queue + container_name: message_queue + stop_grace_period: 2s + datagen: + image: ghcr.io/risingwavelabs/demo-datagen:v1.0.9 + depends_on: [message_queue] + command: + - /bin/sh + - -c + - /datagen --mode twitter --qps 2 pulsar --brokers message_queue:6650 + restart: always + container_name: datagen +volumes: + compute-node-0: + external: false + etcd-0: + external: false + grafana-0: + external: false + minio-0: + external: false + prometheus-0: + external: false + message_queue: + external: false +name: risingwave-compose diff --git a/integration_test/twitter-pulsar/query.sql b/integration_test/twitter-pulsar/query.sql new file mode 100644 index 0000000000000..3e64784a04aa7 --- /dev/null +++ b/integration_test/twitter-pulsar/query.sql @@ -0,0 +1,8 @@ +SELECT + * +FROM + hot_hashtags +ORDER BY + hashtag_occurrences DESC +LIMIT + 10; \ No newline at end of file diff --git a/integration_test/twitter/avro.json b/integration_test/twitter/avro.json new file mode 100644 index 0000000000000..650685d3ee03a --- /dev/null +++ b/integration_test/twitter/avro.json @@ -0,0 +1,27 @@ +{ + "type": "record", + "name": "Event", + "fields": [ + { + "name": "data", + "type": "record", + "fields": [ + { "name": "id", "type": "string" }, + { "name": "text", "type": "string" }, + { "name": "lang", "type": "string" }, + { "name": "created_at", "type": "string" } + ] + }, + { + "name": "author", + "type": "record", + "fields": [ + { "name": "id", "type": "string" }, + { "name": "name", "type": "string" }, + { "name": "username", "type": "string" }, + { "name": "created_at", "type": "string" }, + { "name": "followers", "type": "long" } + ] + } + ] +} diff --git a/integration_test/twitter/avro/create_mv.sql b/integration_test/twitter/avro/create_mv.sql new file mode 100644 index 0000000000000..06d2eb14e4074 --- /dev/null +++ b/integration_test/twitter/avro/create_mv.sql @@ -0,0 +1,21 @@ +-- +-- Find the top10 hotest hashtags. +-- +CREATE MATERIALIZED VIEW hot_hashtags AS WITH tags AS ( + SELECT + unnest(regexp_matches((data).text, '#\w+', 'g')) AS hashtag, + (data).created_at :: timestamptz AS created_at + FROM + twitter +) +SELECT + hashtag, + COUNT(*) AS hashtag_occurrences, + window_start +FROM + TUMBLE(tags, created_at, INTERVAL '1 day') +GROUP BY + hashtag, + window_start +ORDER BY + hashtag_occurrences; \ No newline at end of file diff --git a/integration_test/twitter/avro/create_source.sql b/integration_test/twitter/avro/create_source.sql new file mode 100644 index 0000000000000..b0e53b3c1a54a --- /dev/null +++ b/integration_test/twitter/avro/create_source.sql @@ -0,0 +1,6 @@ +CREATE SOURCE twitter WITH ( + connector = 'kafka', + topic = 'twitter', + properties.bootstrap.server = 'message_queue:29092', + scan.startup.mode = 'earliest' +) ROW FORMAT AVRO MESSAGE 'Event' ROW SCHEMA LOCATION 'http://file_server:8080/avro.json'; \ No newline at end of file diff --git a/integration_test/twitter/create_mv.sql b/integration_test/twitter/create_mv.sql new file mode 100644 index 0000000000000..5ad3f4fc3d92e --- /dev/null +++ b/integration_test/twitter/create_mv.sql @@ -0,0 +1,21 @@ +-- +-- Find the top10 hotest hashtags. +-- +CREATE MATERIALIZED VIEW hot_hashtags AS WITH tags AS ( + SELECT + unnest(regexp_matches((data).text, '#\w+', 'g')) AS hashtag, + (data).created_at AS created_at + FROM + twitter +) +SELECT + hashtag, + COUNT(*) AS hashtag_occurrences, + window_start +FROM + TUMBLE(tags, created_at, INTERVAL '1 day') +GROUP BY + hashtag, + window_start +ORDER BY + hashtag_occurrences; \ No newline at end of file diff --git a/integration_test/twitter/create_source.sql b/integration_test/twitter/create_source.sql new file mode 100644 index 0000000000000..943ca51fd01f6 --- /dev/null +++ b/integration_test/twitter/create_source.sql @@ -0,0 +1,19 @@ +-- +-- The Kafka source version +-- +CREATE SOURCE twitter ( + data STRUCT < created_at TIMESTAMPTZ, + id VARCHAR, + text VARCHAR, + lang VARCHAR >, + author STRUCT < created_at TIMESTAMPTZ, + id VARCHAR, + name VARCHAR, + username VARCHAR, + followers INT > +) WITH ( + connector = 'kafka', + topic = 'twitter', + properties.bootstrap.server = 'message_queue:29092', + scan.startup.mode = 'earliest' +) ROW FORMAT JSON; \ No newline at end of file diff --git a/integration_test/twitter/data_check b/integration_test/twitter/data_check new file mode 100644 index 0000000000000..fae1c0be8dc4b --- /dev/null +++ b/integration_test/twitter/data_check @@ -0,0 +1 @@ +twitter,hot_hashtags \ No newline at end of file diff --git a/integration_test/twitter/docker-compose.yml b/integration_test/twitter/docker-compose.yml new file mode 100644 index 0000000000000..3cf397ba6fab8 --- /dev/null +++ b/integration_test/twitter/docker-compose.yml @@ -0,0 +1,60 @@ +--- +version: "3" +services: + compactor-0: + extends: + file: ../../docker/docker-compose.yml + service: compactor-0 + compute-node-0: + extends: + file: ../../docker/docker-compose.yml + service: compute-node-0 + etcd-0: + extends: + file: ../../docker/docker-compose.yml + service: etcd-0 + frontend-node-0: + extends: + file: ../../docker/docker-compose.yml + service: frontend-node-0 + grafana-0: + extends: + file: ../../docker/docker-compose.yml + service: grafana-0 + meta-node-0: + extends: + file: ../../docker/docker-compose.yml + service: meta-node-0 + minio-0: + extends: + file: ../../docker/docker-compose.yml + service: minio-0 + prometheus-0: + extends: + file: ../../docker/docker-compose.yml + service: prometheus-0 + message_queue: + extends: + file: ../../docker/docker-compose.yml + service: message_queue + datagen: + image: ghcr.io/risingwavelabs/demo-datagen:v1.0.9 + depends_on: [message_queue] + command: + - /bin/sh + - -c + - /datagen --mode twitter --qps 2 kafka --brokers message_queue:29092 + restart: always + container_name: datagen +volumes: + compute-node-0: + external: false + etcd-0: + external: false + grafana-0: + external: false + minio-0: + external: false + prometheus-0: + external: false +name: risingwave-compose diff --git a/integration_test/twitter/pb/create_mv.sql b/integration_test/twitter/pb/create_mv.sql new file mode 100644 index 0000000000000..c08722bacdbb3 --- /dev/null +++ b/integration_test/twitter/pb/create_mv.sql @@ -0,0 +1,21 @@ +-- +-- Find the top10 hotest hashtags. +-- +CREATE MATERIALIZED VIEW hot_hashtags AS WITH tags AS ( + SELECT + unnest(regexp_matches((data).text, '#\w+', 'g')) AS hashtag, + (data).created_at :: timestamp AS created_at + FROM + twitter +) +SELECT + hashtag, + COUNT(*) AS hashtag_occurrences, + window_start +FROM + TUMBLE(tags, created_at, INTERVAL '1 day') +GROUP BY + hashtag, + window_start +ORDER BY + hashtag_occurrences; \ No newline at end of file diff --git a/integration_test/twitter/pb/create_source.sql b/integration_test/twitter/pb/create_source.sql new file mode 100644 index 0000000000000..de6e2c2320bf0 --- /dev/null +++ b/integration_test/twitter/pb/create_source.sql @@ -0,0 +1,6 @@ +CREATE SOURCE twitter WITH ( + connector = 'kafka', + topic = 'twitter', + properties.bootstrap.server = 'message_queue:29092', + scan.startup.mode = 'earliest' +) ROW FORMAT PROTOBUF MESSAGE 'twitter.schema.Event' ROW SCHEMA LOCATION 'http://file_server:8080/schema'; \ No newline at end of file diff --git a/integration_test/twitter/query.sql b/integration_test/twitter/query.sql new file mode 100644 index 0000000000000..3e64784a04aa7 --- /dev/null +++ b/integration_test/twitter/query.sql @@ -0,0 +1,8 @@ +SELECT + * +FROM + hot_hashtags +ORDER BY + hashtag_occurrences DESC +LIMIT + 10; \ No newline at end of file diff --git a/integration_test/twitter/schema b/integration_test/twitter/schema new file mode 100644 index 0000000000000..e4c57548e9a34 --- /dev/null +++ b/integration_test/twitter/schema @@ -0,0 +1,19 @@ + +‰ + twitter.prototwitter.schema"d +Event- +data ( 2.twitter.schema.TweetDataRdata, +author ( 2.twitter.schema.UserRauthor"b + TweetData +id ( Rid +text ( Rtext +lang ( Rlang + +created_at ( R createdAt"„ +User +id ( Rid +name ( Rname + user_name ( RuserName + +created_at ( R createdAt + followers (R followersBZ twitter/protobproto3 \ No newline at end of file diff --git a/integration_test/twitter/twitter.proto b/integration_test/twitter/twitter.proto new file mode 100644 index 0000000000000..14df4a932c714 --- /dev/null +++ b/integration_test/twitter/twitter.proto @@ -0,0 +1,24 @@ +syntax = "proto3"; + +package twitter.schema; +option go_package = "twitter/proto"; + +message Event { + TweetData data = 1; + User author = 2; +} + +message TweetData { + string id = 1; + string text = 2; + string lang = 3; + string created_at = 4; +} + +message User { + string id = 1; + string name = 2; + string user_name = 3; + string created_at = 4; + int64 followers = 5; +} From 690d97523ec6bdd0e017e6ce5644a667e613ade2 Mon Sep 17 00:00:00 2001 From: Eric Fu Date: Mon, 13 Mar 2023 06:58:12 +0000 Subject: [PATCH 02/12] build datagen image --- integration_test/ad-click/docker-compose.yml | 2 +- integration_test/ad-ctr/docker-compose.yml | 2 +- integration_test/cdn-metrics/docker-compose.yml | 2 +- integration_test/clickstream/docker-compose.yml | 2 +- integration_test/delivery/docker-compose.yml | 2 +- integration_test/livestream/docker-compose.yml | 2 +- integration_test/mysql-sink/docker-compose.yml | 2 +- integration_test/postgres-sink/docker-compose.yml | 2 +- integration_test/superset/docker-compose.yml | 2 +- integration_test/twitter-pulsar/docker-compose.yml | 2 +- integration_test/twitter/docker-compose.yml | 2 +- 11 files changed, 11 insertions(+), 11 deletions(-) diff --git a/integration_test/ad-click/docker-compose.yml b/integration_test/ad-click/docker-compose.yml index a6cd2f4fe1433..6f0f2f6a2cd9e 100644 --- a/integration_test/ad-click/docker-compose.yml +++ b/integration_test/ad-click/docker-compose.yml @@ -38,7 +38,7 @@ services: file: ../../docker/docker-compose.yml service: message_queue datagen: - image: ghcr.io/risingwavelabs/demo-datagen:v1.0.9 + build: ../datagen depends_on: [message_queue] command: - /bin/sh diff --git a/integration_test/ad-ctr/docker-compose.yml b/integration_test/ad-ctr/docker-compose.yml index bcabd91987077..0105f5dcf683a 100644 --- a/integration_test/ad-ctr/docker-compose.yml +++ b/integration_test/ad-ctr/docker-compose.yml @@ -38,7 +38,7 @@ services: file: ../../docker/docker-compose.yml service: message_queue datagen: - image: ghcr.io/risingwavelabs/demo-datagen:v1.0.9 + build: ../datagen depends_on: [message_queue] command: - /bin/sh diff --git a/integration_test/cdn-metrics/docker-compose.yml b/integration_test/cdn-metrics/docker-compose.yml index bf1622c1bce32..4e2fb50d88168 100644 --- a/integration_test/cdn-metrics/docker-compose.yml +++ b/integration_test/cdn-metrics/docker-compose.yml @@ -38,7 +38,7 @@ services: file: ../../docker/docker-compose.yml service: message_queue datagen: - image: ghcr.io/risingwavelabs/demo-datagen:v1.0.9 + build: ../datagen depends_on: [message_queue] command: - /bin/sh diff --git a/integration_test/clickstream/docker-compose.yml b/integration_test/clickstream/docker-compose.yml index 9a8c28447137f..3a9d288b89c00 100644 --- a/integration_test/clickstream/docker-compose.yml +++ b/integration_test/clickstream/docker-compose.yml @@ -38,7 +38,7 @@ services: file: ../../docker/docker-compose.yml service: message_queue datagen: - image: ghcr.io/risingwavelabs/demo-datagen:v1.0.9 + build: ../datagen depends_on: [message_queue] command: - /bin/sh diff --git a/integration_test/delivery/docker-compose.yml b/integration_test/delivery/docker-compose.yml index 937209a68a275..dc93d884bc1b8 100644 --- a/integration_test/delivery/docker-compose.yml +++ b/integration_test/delivery/docker-compose.yml @@ -38,7 +38,7 @@ services: file: ../../docker/docker-compose.yml service: message_queue datagen: - image: ghcr.io/risingwavelabs/demo-datagen:v1.0.9 + build: ../datagen depends_on: [message_queue] command: - /bin/sh diff --git a/integration_test/livestream/docker-compose.yml b/integration_test/livestream/docker-compose.yml index 8e3eb3a870e3f..df1f39773d070 100644 --- a/integration_test/livestream/docker-compose.yml +++ b/integration_test/livestream/docker-compose.yml @@ -38,7 +38,7 @@ services: file: ../../docker/docker-compose.yml service: message_queue datagen: - image: ghcr.io/risingwavelabs/demo-datagen:v1.0.9 + build: ../datagen depends_on: [message_queue] command: - /bin/sh diff --git a/integration_test/mysql-sink/docker-compose.yml b/integration_test/mysql-sink/docker-compose.yml index 1c187a62eb3a0..c1aa340d4f2ff 100644 --- a/integration_test/mysql-sink/docker-compose.yml +++ b/integration_test/mysql-sink/docker-compose.yml @@ -57,7 +57,7 @@ services: file: ../../docker/docker-compose.yml service: connector-node datagen: - image: ghcr.io/risingwavelabs/demo-datagen:v1.0.9 + build: ../datagen depends_on: [message_queue] command: - /bin/sh diff --git a/integration_test/postgres-sink/docker-compose.yml b/integration_test/postgres-sink/docker-compose.yml index f0c619ced4d72..3d9e08d03a59d 100644 --- a/integration_test/postgres-sink/docker-compose.yml +++ b/integration_test/postgres-sink/docker-compose.yml @@ -38,7 +38,7 @@ services: file: ../../docker/docker-compose.yml service: message_queue datagen: - image: ghcr.io/risingwavelabs/demo-datagen:v1.0.9 + build: ../datagen depends_on: [message_queue] command: - /bin/sh diff --git a/integration_test/superset/docker-compose.yml b/integration_test/superset/docker-compose.yml index 34b1f14330c7b..21491a122e749 100644 --- a/integration_test/superset/docker-compose.yml +++ b/integration_test/superset/docker-compose.yml @@ -48,7 +48,7 @@ services: file: ../../docker/docker-compose.yml service: message_queue datagen: - image: ghcr.io/risingwavelabs/demo-datagen:v1.0.9 + build: ../datagen depends_on: [message_queue] command: - /bin/sh diff --git a/integration_test/twitter-pulsar/docker-compose.yml b/integration_test/twitter-pulsar/docker-compose.yml index ca2ee7e705685..cbc3df529533d 100644 --- a/integration_test/twitter-pulsar/docker-compose.yml +++ b/integration_test/twitter-pulsar/docker-compose.yml @@ -43,7 +43,7 @@ services: container_name: message_queue stop_grace_period: 2s datagen: - image: ghcr.io/risingwavelabs/demo-datagen:v1.0.9 + build: ../datagen depends_on: [message_queue] command: - /bin/sh diff --git a/integration_test/twitter/docker-compose.yml b/integration_test/twitter/docker-compose.yml index 3cf397ba6fab8..9887ba3818274 100644 --- a/integration_test/twitter/docker-compose.yml +++ b/integration_test/twitter/docker-compose.yml @@ -38,7 +38,7 @@ services: file: ../../docker/docker-compose.yml service: message_queue datagen: - image: ghcr.io/risingwavelabs/demo-datagen:v1.0.9 + build: ../datagen depends_on: [message_queue] command: - /bin/sh From 6806113ab4e3c0d870f84ee8e7689e63bc008ccc Mon Sep 17 00:00:00 2001 From: Eric Fu Date: Mon, 13 Mar 2023 07:00:37 +0000 Subject: [PATCH 03/12] rename folder --- {integration_test => integration_tests}/README.md | 0 {integration_test => integration_tests}/ad-click/create_mv.sql | 0 .../ad-click/create_source.sql | 0 {integration_test => integration_tests}/ad-click/data_check | 0 .../ad-click/docker-compose.yml | 0 {integration_test => integration_tests}/ad-click/query.sql | 0 {integration_test => integration_tests}/ad-ctr/create_mv.sql | 0 {integration_test => integration_tests}/ad-ctr/create_source.sql | 0 {integration_test => integration_tests}/ad-ctr/data_check | 0 {integration_test => integration_tests}/ad-ctr/docker-compose.yml | 0 {integration_test => integration_tests}/ad-ctr/query.sql | 0 {integration_test => integration_tests}/cdn-metrics/create_mv.sql | 0 .../cdn-metrics/create_source.sql | 0 {integration_test => integration_tests}/cdn-metrics/data_check | 0 .../cdn-metrics/docker-compose.yml | 0 {integration_test => integration_tests}/cdn-metrics/query.sql | 0 {integration_test => integration_tests}/clickstream/create_mv.sql | 0 .../clickstream/create_source.sql | 0 {integration_test => integration_tests}/clickstream/data_check | 0 .../clickstream/docker-compose.yml | 0 {integration_test => integration_tests}/clickstream/query.sql | 0 {integration_test => integration_tests}/datagen/.gitignore | 0 {integration_test => integration_tests}/datagen/.goreleaser.yaml | 0 {integration_test => integration_tests}/datagen/Dockerfile | 0 .../datagen/ad_click/ad_click.go | 0 {integration_test => integration_tests}/datagen/ad_ctr/ad_ctr.go | 0 .../datagen/cdn_metrics/cdn_metrics.go | 0 .../datagen/cdn_metrics/nics.go | 0 .../datagen/cdn_metrics/tcp.go | 0 .../datagen/clickstream/clickstream.go | 0 .../datagen/delivery/delivery.go | 0 .../datagen/ecommerce/ecommerce.go | 0 {integration_test => integration_tests}/datagen/gen/generator.go | 0 {integration_test => integration_tests}/datagen/go.mod | 0 {integration_test => integration_tests}/datagen/go.sum | 0 .../datagen/livestream/livestream.go | 0 .../datagen/livestream/proto/livestream.pb.go | 0 {integration_test => integration_tests}/datagen/load_gen.go | 0 {integration_test => integration_tests}/datagen/main.go | 0 .../datagen/nexmark/auction.go | 0 .../datagen/sink/kafka/kafka.go | 0 .../datagen/sink/kinesis/kinesis.go | 0 .../datagen/sink/mysql/mysql.go | 0 .../datagen/sink/postgres/postgres.go | 0 .../datagen/sink/pulsar/pulsar.go | 0 {integration_test => integration_tests}/datagen/sink/sink.go | 0 {integration_test => integration_tests}/datagen/twitter/avro.go | 0 .../datagen/twitter/proto/twitter.pb.go | 0 .../datagen/twitter/twitter.go | 0 .../datagen/twitter/twitter_example.json | 0 {integration_test => integration_tests}/delivery/delivery.sql | 0 .../delivery/docker-compose.yml | 0 {integration_test => integration_tests}/ecommerce/ecommerce.sql | 0 {integration_test => integration_tests}/iceberg-sink/README.md | 0 .../iceberg-sink/create_mv.sql | 0 .../iceberg-sink/create_sink.sql | 0 .../iceberg-sink/create_source.sql | 0 .../iceberg-sink/docker-compose.yml | 0 .../iceberg-sink/iceberg-query.sql | 0 .../iceberg-sink/mysql_prepare.sql | 0 .../iceberg-sink/presto-with-iceberg/Dockerfile | 0 .../iceberg-sink/presto-with-iceberg/hadoop-catalog.xml | 0 .../iceberg-sink/presto-with-iceberg/iceberg.properties | 0 .../iceberg-sink/presto-with-iceberg/log.properties | 0 .../iceberg-sink/spark-script/.gitignore | 0 .../iceberg-sink/spark-script/create-table.sql | 0 .../iceberg-sink/spark-script/query-table.sql | 0 .../iceberg-sink/spark-script/run-sql-file.sh | 0 {integration_test => integration_tests}/livestream/create_mv.sql | 0 .../livestream/create_source.sql | 0 {integration_test => integration_tests}/livestream/data_check | 0 .../livestream/docker-compose.yml | 0 .../livestream/livestream.proto | 0 .../livestream/pb/create_mv.sql | 0 .../livestream/pb/create_source.sql | 0 {integration_test => integration_tests}/livestream/query.sql | 0 {integration_test => integration_tests}/livestream/schema | 0 {integration_test => integration_tests}/mysql-cdc/create_mv.sql | 0 .../mysql-cdc/create_source.sql | 0 {integration_test => integration_tests}/mysql-cdc/data_check | 0 .../mysql-cdc/docker-compose.yml | 0 .../mysql-cdc/mysql_prepare.sql | 0 {integration_test => integration_tests}/mysql-cdc/query.sql | 0 {integration_test => integration_tests}/mysql-sink/create_mv.sql | 0 .../mysql-sink/create_source.sql | 0 {integration_test => integration_tests}/mysql-sink/data_check | 0 .../mysql-sink/docker-compose.yml | 0 .../mysql-sink/mysql_prepare.sql | 0 {integration_test => integration_tests}/mysql-sink/query.sql | 0 .../postgres-cdc/create_mv.sql | 0 .../postgres-cdc/create_source.sql | 0 {integration_test => integration_tests}/postgres-cdc/data_check | 0 .../postgres-cdc/docker-compose.yml | 0 .../postgres-cdc/postgres_prepare.sql | 0 {integration_test => integration_tests}/postgres-cdc/query.sql | 0 {integration_test => integration_tests}/postgres-sink/README.md | 0 .../postgres-sink/create_mv.sql | 0 .../postgres-sink/create_source.sql | 0 {integration_test => integration_tests}/postgres-sink/data_check | 0 .../postgres-sink/docker-compose.yml | 0 .../postgres-sink/postgres_prepare.sql | 0 {integration_test => integration_tests}/postgres-sink/query.sql | 0 {integration_test => integration_tests}/prometheus/create_mv.sql | 0 .../prometheus/create_source.sql | 0 .../prometheus/create_user.sql | 0 {integration_test => integration_tests}/prometheus/data_check | 0 .../prometheus/docker-compose.yml | 0 .../prometheus/prometheus.yaml | 0 {integration_test => integration_tests}/prometheus/query.sql | 0 .../schema-registry/create_mv.sql | 0 .../schema-registry/create_source.sql | 0 .../schema-registry/data_check | 0 .../schema-registry/datagen.py | 0 .../schema-registry/docker-compose.yml | 0 {integration_test => integration_tests}/schema-registry/query.sql | 0 {integration_test => integration_tests}/schema-registry/readme.md | 0 {integration_test => integration_tests}/superset/create_mv.sql | 0 .../superset/create_source.sql | 0 .../superset/docker-compose.yml | 0 .../superset/docker/.env-non-dev | 0 .../superset/docker/docker-bootstrap.sh | 0 .../superset/docker/docker-init.sh | 0 .../superset/docker/pythonpath_dev/.gitignore | 0 .../superset/docker/pythonpath_dev/superset_config.py | 0 .../superset/docker/requirements-local.txt | 0 .../superset/docker/run-server.sh | 0 {integration_test => integration_tests}/superset/query.sql | 0 .../twitter-pulsar/create_mv.sql | 0 .../twitter-pulsar/create_source.sql | 0 .../twitter-pulsar/docker-compose.yml | 0 {integration_test => integration_tests}/twitter-pulsar/query.sql | 0 {integration_test => integration_tests}/twitter/avro.json | 0 .../twitter/avro/create_mv.sql | 0 .../twitter/avro/create_source.sql | 0 {integration_test => integration_tests}/twitter/create_mv.sql | 0 {integration_test => integration_tests}/twitter/create_source.sql | 0 {integration_test => integration_tests}/twitter/data_check | 0 .../twitter/docker-compose.yml | 0 {integration_test => integration_tests}/twitter/pb/create_mv.sql | 0 .../twitter/pb/create_source.sql | 0 {integration_test => integration_tests}/twitter/query.sql | 0 {integration_test => integration_tests}/twitter/schema | 0 {integration_test => integration_tests}/twitter/twitter.proto | 0 143 files changed, 0 insertions(+), 0 deletions(-) rename {integration_test => integration_tests}/README.md (100%) rename {integration_test => integration_tests}/ad-click/create_mv.sql (100%) rename {integration_test => integration_tests}/ad-click/create_source.sql (100%) rename {integration_test => integration_tests}/ad-click/data_check (100%) rename {integration_test => integration_tests}/ad-click/docker-compose.yml (100%) rename {integration_test => integration_tests}/ad-click/query.sql (100%) rename {integration_test => integration_tests}/ad-ctr/create_mv.sql (100%) rename {integration_test => integration_tests}/ad-ctr/create_source.sql (100%) rename {integration_test => integration_tests}/ad-ctr/data_check (100%) rename {integration_test => integration_tests}/ad-ctr/docker-compose.yml (100%) rename {integration_test => integration_tests}/ad-ctr/query.sql (100%) rename {integration_test => integration_tests}/cdn-metrics/create_mv.sql (100%) rename {integration_test => integration_tests}/cdn-metrics/create_source.sql (100%) rename {integration_test => integration_tests}/cdn-metrics/data_check (100%) rename {integration_test => integration_tests}/cdn-metrics/docker-compose.yml (100%) rename {integration_test => integration_tests}/cdn-metrics/query.sql (100%) rename {integration_test => integration_tests}/clickstream/create_mv.sql (100%) rename {integration_test => integration_tests}/clickstream/create_source.sql (100%) rename {integration_test => integration_tests}/clickstream/data_check (100%) rename {integration_test => integration_tests}/clickstream/docker-compose.yml (100%) rename {integration_test => integration_tests}/clickstream/query.sql (100%) rename {integration_test => integration_tests}/datagen/.gitignore (100%) rename {integration_test => integration_tests}/datagen/.goreleaser.yaml (100%) rename {integration_test => integration_tests}/datagen/Dockerfile (100%) rename {integration_test => integration_tests}/datagen/ad_click/ad_click.go (100%) rename {integration_test => integration_tests}/datagen/ad_ctr/ad_ctr.go (100%) rename {integration_test => integration_tests}/datagen/cdn_metrics/cdn_metrics.go (100%) rename {integration_test => integration_tests}/datagen/cdn_metrics/nics.go (100%) rename {integration_test => integration_tests}/datagen/cdn_metrics/tcp.go (100%) rename {integration_test => integration_tests}/datagen/clickstream/clickstream.go (100%) rename {integration_test => integration_tests}/datagen/delivery/delivery.go (100%) rename {integration_test => integration_tests}/datagen/ecommerce/ecommerce.go (100%) rename {integration_test => integration_tests}/datagen/gen/generator.go (100%) rename {integration_test => integration_tests}/datagen/go.mod (100%) rename {integration_test => integration_tests}/datagen/go.sum (100%) rename {integration_test => integration_tests}/datagen/livestream/livestream.go (100%) rename {integration_test => integration_tests}/datagen/livestream/proto/livestream.pb.go (100%) rename {integration_test => integration_tests}/datagen/load_gen.go (100%) rename {integration_test => integration_tests}/datagen/main.go (100%) rename {integration_test => integration_tests}/datagen/nexmark/auction.go (100%) rename {integration_test => integration_tests}/datagen/sink/kafka/kafka.go (100%) rename {integration_test => integration_tests}/datagen/sink/kinesis/kinesis.go (100%) rename {integration_test => integration_tests}/datagen/sink/mysql/mysql.go (100%) rename {integration_test => integration_tests}/datagen/sink/postgres/postgres.go (100%) rename {integration_test => integration_tests}/datagen/sink/pulsar/pulsar.go (100%) rename {integration_test => integration_tests}/datagen/sink/sink.go (100%) rename {integration_test => integration_tests}/datagen/twitter/avro.go (100%) rename {integration_test => integration_tests}/datagen/twitter/proto/twitter.pb.go (100%) rename {integration_test => integration_tests}/datagen/twitter/twitter.go (100%) rename {integration_test => integration_tests}/datagen/twitter/twitter_example.json (100%) rename {integration_test => integration_tests}/delivery/delivery.sql (100%) rename {integration_test => integration_tests}/delivery/docker-compose.yml (100%) rename {integration_test => integration_tests}/ecommerce/ecommerce.sql (100%) rename {integration_test => integration_tests}/iceberg-sink/README.md (100%) rename {integration_test => integration_tests}/iceberg-sink/create_mv.sql (100%) rename {integration_test => integration_tests}/iceberg-sink/create_sink.sql (100%) rename {integration_test => integration_tests}/iceberg-sink/create_source.sql (100%) rename {integration_test => integration_tests}/iceberg-sink/docker-compose.yml (100%) rename {integration_test => integration_tests}/iceberg-sink/iceberg-query.sql (100%) rename {integration_test => integration_tests}/iceberg-sink/mysql_prepare.sql (100%) rename {integration_test => integration_tests}/iceberg-sink/presto-with-iceberg/Dockerfile (100%) rename {integration_test => integration_tests}/iceberg-sink/presto-with-iceberg/hadoop-catalog.xml (100%) rename {integration_test => integration_tests}/iceberg-sink/presto-with-iceberg/iceberg.properties (100%) rename {integration_test => integration_tests}/iceberg-sink/presto-with-iceberg/log.properties (100%) rename {integration_test => integration_tests}/iceberg-sink/spark-script/.gitignore (100%) rename {integration_test => integration_tests}/iceberg-sink/spark-script/create-table.sql (100%) rename {integration_test => integration_tests}/iceberg-sink/spark-script/query-table.sql (100%) rename {integration_test => integration_tests}/iceberg-sink/spark-script/run-sql-file.sh (100%) rename {integration_test => integration_tests}/livestream/create_mv.sql (100%) rename {integration_test => integration_tests}/livestream/create_source.sql (100%) rename {integration_test => integration_tests}/livestream/data_check (100%) rename {integration_test => integration_tests}/livestream/docker-compose.yml (100%) rename {integration_test => integration_tests}/livestream/livestream.proto (100%) rename {integration_test => integration_tests}/livestream/pb/create_mv.sql (100%) rename {integration_test => integration_tests}/livestream/pb/create_source.sql (100%) rename {integration_test => integration_tests}/livestream/query.sql (100%) rename {integration_test => integration_tests}/livestream/schema (100%) rename {integration_test => integration_tests}/mysql-cdc/create_mv.sql (100%) rename {integration_test => integration_tests}/mysql-cdc/create_source.sql (100%) rename {integration_test => integration_tests}/mysql-cdc/data_check (100%) rename {integration_test => integration_tests}/mysql-cdc/docker-compose.yml (100%) rename {integration_test => integration_tests}/mysql-cdc/mysql_prepare.sql (100%) rename {integration_test => integration_tests}/mysql-cdc/query.sql (100%) rename {integration_test => integration_tests}/mysql-sink/create_mv.sql (100%) rename {integration_test => integration_tests}/mysql-sink/create_source.sql (100%) rename {integration_test => integration_tests}/mysql-sink/data_check (100%) rename {integration_test => integration_tests}/mysql-sink/docker-compose.yml (100%) rename {integration_test => integration_tests}/mysql-sink/mysql_prepare.sql (100%) rename {integration_test => integration_tests}/mysql-sink/query.sql (100%) rename {integration_test => integration_tests}/postgres-cdc/create_mv.sql (100%) rename {integration_test => integration_tests}/postgres-cdc/create_source.sql (100%) rename {integration_test => integration_tests}/postgres-cdc/data_check (100%) rename {integration_test => integration_tests}/postgres-cdc/docker-compose.yml (100%) rename {integration_test => integration_tests}/postgres-cdc/postgres_prepare.sql (100%) rename {integration_test => integration_tests}/postgres-cdc/query.sql (100%) rename {integration_test => integration_tests}/postgres-sink/README.md (100%) rename {integration_test => integration_tests}/postgres-sink/create_mv.sql (100%) rename {integration_test => integration_tests}/postgres-sink/create_source.sql (100%) rename {integration_test => integration_tests}/postgres-sink/data_check (100%) rename {integration_test => integration_tests}/postgres-sink/docker-compose.yml (100%) rename {integration_test => integration_tests}/postgres-sink/postgres_prepare.sql (100%) rename {integration_test => integration_tests}/postgres-sink/query.sql (100%) rename {integration_test => integration_tests}/prometheus/create_mv.sql (100%) rename {integration_test => integration_tests}/prometheus/create_source.sql (100%) rename {integration_test => integration_tests}/prometheus/create_user.sql (100%) rename {integration_test => integration_tests}/prometheus/data_check (100%) rename {integration_test => integration_tests}/prometheus/docker-compose.yml (100%) rename {integration_test => integration_tests}/prometheus/prometheus.yaml (100%) rename {integration_test => integration_tests}/prometheus/query.sql (100%) rename {integration_test => integration_tests}/schema-registry/create_mv.sql (100%) rename {integration_test => integration_tests}/schema-registry/create_source.sql (100%) rename {integration_test => integration_tests}/schema-registry/data_check (100%) rename {integration_test => integration_tests}/schema-registry/datagen.py (100%) rename {integration_test => integration_tests}/schema-registry/docker-compose.yml (100%) rename {integration_test => integration_tests}/schema-registry/query.sql (100%) rename {integration_test => integration_tests}/schema-registry/readme.md (100%) rename {integration_test => integration_tests}/superset/create_mv.sql (100%) rename {integration_test => integration_tests}/superset/create_source.sql (100%) rename {integration_test => integration_tests}/superset/docker-compose.yml (100%) rename {integration_test => integration_tests}/superset/docker/.env-non-dev (100%) rename {integration_test => integration_tests}/superset/docker/docker-bootstrap.sh (100%) rename {integration_test => integration_tests}/superset/docker/docker-init.sh (100%) rename {integration_test => integration_tests}/superset/docker/pythonpath_dev/.gitignore (100%) rename {integration_test => integration_tests}/superset/docker/pythonpath_dev/superset_config.py (100%) rename {integration_test => integration_tests}/superset/docker/requirements-local.txt (100%) rename {integration_test => integration_tests}/superset/docker/run-server.sh (100%) rename {integration_test => integration_tests}/superset/query.sql (100%) rename {integration_test => integration_tests}/twitter-pulsar/create_mv.sql (100%) rename {integration_test => integration_tests}/twitter-pulsar/create_source.sql (100%) rename {integration_test => integration_tests}/twitter-pulsar/docker-compose.yml (100%) rename {integration_test => integration_tests}/twitter-pulsar/query.sql (100%) rename {integration_test => integration_tests}/twitter/avro.json (100%) rename {integration_test => integration_tests}/twitter/avro/create_mv.sql (100%) rename {integration_test => integration_tests}/twitter/avro/create_source.sql (100%) rename {integration_test => integration_tests}/twitter/create_mv.sql (100%) rename {integration_test => integration_tests}/twitter/create_source.sql (100%) rename {integration_test => integration_tests}/twitter/data_check (100%) rename {integration_test => integration_tests}/twitter/docker-compose.yml (100%) rename {integration_test => integration_tests}/twitter/pb/create_mv.sql (100%) rename {integration_test => integration_tests}/twitter/pb/create_source.sql (100%) rename {integration_test => integration_tests}/twitter/query.sql (100%) rename {integration_test => integration_tests}/twitter/schema (100%) rename {integration_test => integration_tests}/twitter/twitter.proto (100%) diff --git a/integration_test/README.md b/integration_tests/README.md similarity index 100% rename from integration_test/README.md rename to integration_tests/README.md diff --git a/integration_test/ad-click/create_mv.sql b/integration_tests/ad-click/create_mv.sql similarity index 100% rename from integration_test/ad-click/create_mv.sql rename to integration_tests/ad-click/create_mv.sql diff --git a/integration_test/ad-click/create_source.sql b/integration_tests/ad-click/create_source.sql similarity index 100% rename from integration_test/ad-click/create_source.sql rename to integration_tests/ad-click/create_source.sql diff --git a/integration_test/ad-click/data_check b/integration_tests/ad-click/data_check similarity index 100% rename from integration_test/ad-click/data_check rename to integration_tests/ad-click/data_check diff --git a/integration_test/ad-click/docker-compose.yml b/integration_tests/ad-click/docker-compose.yml similarity index 100% rename from integration_test/ad-click/docker-compose.yml rename to integration_tests/ad-click/docker-compose.yml diff --git a/integration_test/ad-click/query.sql b/integration_tests/ad-click/query.sql similarity index 100% rename from integration_test/ad-click/query.sql rename to integration_tests/ad-click/query.sql diff --git a/integration_test/ad-ctr/create_mv.sql b/integration_tests/ad-ctr/create_mv.sql similarity index 100% rename from integration_test/ad-ctr/create_mv.sql rename to integration_tests/ad-ctr/create_mv.sql diff --git a/integration_test/ad-ctr/create_source.sql b/integration_tests/ad-ctr/create_source.sql similarity index 100% rename from integration_test/ad-ctr/create_source.sql rename to integration_tests/ad-ctr/create_source.sql diff --git a/integration_test/ad-ctr/data_check b/integration_tests/ad-ctr/data_check similarity index 100% rename from integration_test/ad-ctr/data_check rename to integration_tests/ad-ctr/data_check diff --git a/integration_test/ad-ctr/docker-compose.yml b/integration_tests/ad-ctr/docker-compose.yml similarity index 100% rename from integration_test/ad-ctr/docker-compose.yml rename to integration_tests/ad-ctr/docker-compose.yml diff --git a/integration_test/ad-ctr/query.sql b/integration_tests/ad-ctr/query.sql similarity index 100% rename from integration_test/ad-ctr/query.sql rename to integration_tests/ad-ctr/query.sql diff --git a/integration_test/cdn-metrics/create_mv.sql b/integration_tests/cdn-metrics/create_mv.sql similarity index 100% rename from integration_test/cdn-metrics/create_mv.sql rename to integration_tests/cdn-metrics/create_mv.sql diff --git a/integration_test/cdn-metrics/create_source.sql b/integration_tests/cdn-metrics/create_source.sql similarity index 100% rename from integration_test/cdn-metrics/create_source.sql rename to integration_tests/cdn-metrics/create_source.sql diff --git a/integration_test/cdn-metrics/data_check b/integration_tests/cdn-metrics/data_check similarity index 100% rename from integration_test/cdn-metrics/data_check rename to integration_tests/cdn-metrics/data_check diff --git a/integration_test/cdn-metrics/docker-compose.yml b/integration_tests/cdn-metrics/docker-compose.yml similarity index 100% rename from integration_test/cdn-metrics/docker-compose.yml rename to integration_tests/cdn-metrics/docker-compose.yml diff --git a/integration_test/cdn-metrics/query.sql b/integration_tests/cdn-metrics/query.sql similarity index 100% rename from integration_test/cdn-metrics/query.sql rename to integration_tests/cdn-metrics/query.sql diff --git a/integration_test/clickstream/create_mv.sql b/integration_tests/clickstream/create_mv.sql similarity index 100% rename from integration_test/clickstream/create_mv.sql rename to integration_tests/clickstream/create_mv.sql diff --git a/integration_test/clickstream/create_source.sql b/integration_tests/clickstream/create_source.sql similarity index 100% rename from integration_test/clickstream/create_source.sql rename to integration_tests/clickstream/create_source.sql diff --git a/integration_test/clickstream/data_check b/integration_tests/clickstream/data_check similarity index 100% rename from integration_test/clickstream/data_check rename to integration_tests/clickstream/data_check diff --git a/integration_test/clickstream/docker-compose.yml b/integration_tests/clickstream/docker-compose.yml similarity index 100% rename from integration_test/clickstream/docker-compose.yml rename to integration_tests/clickstream/docker-compose.yml diff --git a/integration_test/clickstream/query.sql b/integration_tests/clickstream/query.sql similarity index 100% rename from integration_test/clickstream/query.sql rename to integration_tests/clickstream/query.sql diff --git a/integration_test/datagen/.gitignore b/integration_tests/datagen/.gitignore similarity index 100% rename from integration_test/datagen/.gitignore rename to integration_tests/datagen/.gitignore diff --git a/integration_test/datagen/.goreleaser.yaml b/integration_tests/datagen/.goreleaser.yaml similarity index 100% rename from integration_test/datagen/.goreleaser.yaml rename to integration_tests/datagen/.goreleaser.yaml diff --git a/integration_test/datagen/Dockerfile b/integration_tests/datagen/Dockerfile similarity index 100% rename from integration_test/datagen/Dockerfile rename to integration_tests/datagen/Dockerfile diff --git a/integration_test/datagen/ad_click/ad_click.go b/integration_tests/datagen/ad_click/ad_click.go similarity index 100% rename from integration_test/datagen/ad_click/ad_click.go rename to integration_tests/datagen/ad_click/ad_click.go diff --git a/integration_test/datagen/ad_ctr/ad_ctr.go b/integration_tests/datagen/ad_ctr/ad_ctr.go similarity index 100% rename from integration_test/datagen/ad_ctr/ad_ctr.go rename to integration_tests/datagen/ad_ctr/ad_ctr.go diff --git a/integration_test/datagen/cdn_metrics/cdn_metrics.go b/integration_tests/datagen/cdn_metrics/cdn_metrics.go similarity index 100% rename from integration_test/datagen/cdn_metrics/cdn_metrics.go rename to integration_tests/datagen/cdn_metrics/cdn_metrics.go diff --git a/integration_test/datagen/cdn_metrics/nics.go b/integration_tests/datagen/cdn_metrics/nics.go similarity index 100% rename from integration_test/datagen/cdn_metrics/nics.go rename to integration_tests/datagen/cdn_metrics/nics.go diff --git a/integration_test/datagen/cdn_metrics/tcp.go b/integration_tests/datagen/cdn_metrics/tcp.go similarity index 100% rename from integration_test/datagen/cdn_metrics/tcp.go rename to integration_tests/datagen/cdn_metrics/tcp.go diff --git a/integration_test/datagen/clickstream/clickstream.go b/integration_tests/datagen/clickstream/clickstream.go similarity index 100% rename from integration_test/datagen/clickstream/clickstream.go rename to integration_tests/datagen/clickstream/clickstream.go diff --git a/integration_test/datagen/delivery/delivery.go b/integration_tests/datagen/delivery/delivery.go similarity index 100% rename from integration_test/datagen/delivery/delivery.go rename to integration_tests/datagen/delivery/delivery.go diff --git a/integration_test/datagen/ecommerce/ecommerce.go b/integration_tests/datagen/ecommerce/ecommerce.go similarity index 100% rename from integration_test/datagen/ecommerce/ecommerce.go rename to integration_tests/datagen/ecommerce/ecommerce.go diff --git a/integration_test/datagen/gen/generator.go b/integration_tests/datagen/gen/generator.go similarity index 100% rename from integration_test/datagen/gen/generator.go rename to integration_tests/datagen/gen/generator.go diff --git a/integration_test/datagen/go.mod b/integration_tests/datagen/go.mod similarity index 100% rename from integration_test/datagen/go.mod rename to integration_tests/datagen/go.mod diff --git a/integration_test/datagen/go.sum b/integration_tests/datagen/go.sum similarity index 100% rename from integration_test/datagen/go.sum rename to integration_tests/datagen/go.sum diff --git a/integration_test/datagen/livestream/livestream.go b/integration_tests/datagen/livestream/livestream.go similarity index 100% rename from integration_test/datagen/livestream/livestream.go rename to integration_tests/datagen/livestream/livestream.go diff --git a/integration_test/datagen/livestream/proto/livestream.pb.go b/integration_tests/datagen/livestream/proto/livestream.pb.go similarity index 100% rename from integration_test/datagen/livestream/proto/livestream.pb.go rename to integration_tests/datagen/livestream/proto/livestream.pb.go diff --git a/integration_test/datagen/load_gen.go b/integration_tests/datagen/load_gen.go similarity index 100% rename from integration_test/datagen/load_gen.go rename to integration_tests/datagen/load_gen.go diff --git a/integration_test/datagen/main.go b/integration_tests/datagen/main.go similarity index 100% rename from integration_test/datagen/main.go rename to integration_tests/datagen/main.go diff --git a/integration_test/datagen/nexmark/auction.go b/integration_tests/datagen/nexmark/auction.go similarity index 100% rename from integration_test/datagen/nexmark/auction.go rename to integration_tests/datagen/nexmark/auction.go diff --git a/integration_test/datagen/sink/kafka/kafka.go b/integration_tests/datagen/sink/kafka/kafka.go similarity index 100% rename from integration_test/datagen/sink/kafka/kafka.go rename to integration_tests/datagen/sink/kafka/kafka.go diff --git a/integration_test/datagen/sink/kinesis/kinesis.go b/integration_tests/datagen/sink/kinesis/kinesis.go similarity index 100% rename from integration_test/datagen/sink/kinesis/kinesis.go rename to integration_tests/datagen/sink/kinesis/kinesis.go diff --git a/integration_test/datagen/sink/mysql/mysql.go b/integration_tests/datagen/sink/mysql/mysql.go similarity index 100% rename from integration_test/datagen/sink/mysql/mysql.go rename to integration_tests/datagen/sink/mysql/mysql.go diff --git a/integration_test/datagen/sink/postgres/postgres.go b/integration_tests/datagen/sink/postgres/postgres.go similarity index 100% rename from integration_test/datagen/sink/postgres/postgres.go rename to integration_tests/datagen/sink/postgres/postgres.go diff --git a/integration_test/datagen/sink/pulsar/pulsar.go b/integration_tests/datagen/sink/pulsar/pulsar.go similarity index 100% rename from integration_test/datagen/sink/pulsar/pulsar.go rename to integration_tests/datagen/sink/pulsar/pulsar.go diff --git a/integration_test/datagen/sink/sink.go b/integration_tests/datagen/sink/sink.go similarity index 100% rename from integration_test/datagen/sink/sink.go rename to integration_tests/datagen/sink/sink.go diff --git a/integration_test/datagen/twitter/avro.go b/integration_tests/datagen/twitter/avro.go similarity index 100% rename from integration_test/datagen/twitter/avro.go rename to integration_tests/datagen/twitter/avro.go diff --git a/integration_test/datagen/twitter/proto/twitter.pb.go b/integration_tests/datagen/twitter/proto/twitter.pb.go similarity index 100% rename from integration_test/datagen/twitter/proto/twitter.pb.go rename to integration_tests/datagen/twitter/proto/twitter.pb.go diff --git a/integration_test/datagen/twitter/twitter.go b/integration_tests/datagen/twitter/twitter.go similarity index 100% rename from integration_test/datagen/twitter/twitter.go rename to integration_tests/datagen/twitter/twitter.go diff --git a/integration_test/datagen/twitter/twitter_example.json b/integration_tests/datagen/twitter/twitter_example.json similarity index 100% rename from integration_test/datagen/twitter/twitter_example.json rename to integration_tests/datagen/twitter/twitter_example.json diff --git a/integration_test/delivery/delivery.sql b/integration_tests/delivery/delivery.sql similarity index 100% rename from integration_test/delivery/delivery.sql rename to integration_tests/delivery/delivery.sql diff --git a/integration_test/delivery/docker-compose.yml b/integration_tests/delivery/docker-compose.yml similarity index 100% rename from integration_test/delivery/docker-compose.yml rename to integration_tests/delivery/docker-compose.yml diff --git a/integration_test/ecommerce/ecommerce.sql b/integration_tests/ecommerce/ecommerce.sql similarity index 100% rename from integration_test/ecommerce/ecommerce.sql rename to integration_tests/ecommerce/ecommerce.sql diff --git a/integration_test/iceberg-sink/README.md b/integration_tests/iceberg-sink/README.md similarity index 100% rename from integration_test/iceberg-sink/README.md rename to integration_tests/iceberg-sink/README.md diff --git a/integration_test/iceberg-sink/create_mv.sql b/integration_tests/iceberg-sink/create_mv.sql similarity index 100% rename from integration_test/iceberg-sink/create_mv.sql rename to integration_tests/iceberg-sink/create_mv.sql diff --git a/integration_test/iceberg-sink/create_sink.sql b/integration_tests/iceberg-sink/create_sink.sql similarity index 100% rename from integration_test/iceberg-sink/create_sink.sql rename to integration_tests/iceberg-sink/create_sink.sql diff --git a/integration_test/iceberg-sink/create_source.sql b/integration_tests/iceberg-sink/create_source.sql similarity index 100% rename from integration_test/iceberg-sink/create_source.sql rename to integration_tests/iceberg-sink/create_source.sql diff --git a/integration_test/iceberg-sink/docker-compose.yml b/integration_tests/iceberg-sink/docker-compose.yml similarity index 100% rename from integration_test/iceberg-sink/docker-compose.yml rename to integration_tests/iceberg-sink/docker-compose.yml diff --git a/integration_test/iceberg-sink/iceberg-query.sql b/integration_tests/iceberg-sink/iceberg-query.sql similarity index 100% rename from integration_test/iceberg-sink/iceberg-query.sql rename to integration_tests/iceberg-sink/iceberg-query.sql diff --git a/integration_test/iceberg-sink/mysql_prepare.sql b/integration_tests/iceberg-sink/mysql_prepare.sql similarity index 100% rename from integration_test/iceberg-sink/mysql_prepare.sql rename to integration_tests/iceberg-sink/mysql_prepare.sql diff --git a/integration_test/iceberg-sink/presto-with-iceberg/Dockerfile b/integration_tests/iceberg-sink/presto-with-iceberg/Dockerfile similarity index 100% rename from integration_test/iceberg-sink/presto-with-iceberg/Dockerfile rename to integration_tests/iceberg-sink/presto-with-iceberg/Dockerfile diff --git a/integration_test/iceberg-sink/presto-with-iceberg/hadoop-catalog.xml b/integration_tests/iceberg-sink/presto-with-iceberg/hadoop-catalog.xml similarity index 100% rename from integration_test/iceberg-sink/presto-with-iceberg/hadoop-catalog.xml rename to integration_tests/iceberg-sink/presto-with-iceberg/hadoop-catalog.xml diff --git a/integration_test/iceberg-sink/presto-with-iceberg/iceberg.properties b/integration_tests/iceberg-sink/presto-with-iceberg/iceberg.properties similarity index 100% rename from integration_test/iceberg-sink/presto-with-iceberg/iceberg.properties rename to integration_tests/iceberg-sink/presto-with-iceberg/iceberg.properties diff --git a/integration_test/iceberg-sink/presto-with-iceberg/log.properties b/integration_tests/iceberg-sink/presto-with-iceberg/log.properties similarity index 100% rename from integration_test/iceberg-sink/presto-with-iceberg/log.properties rename to integration_tests/iceberg-sink/presto-with-iceberg/log.properties diff --git a/integration_test/iceberg-sink/spark-script/.gitignore b/integration_tests/iceberg-sink/spark-script/.gitignore similarity index 100% rename from integration_test/iceberg-sink/spark-script/.gitignore rename to integration_tests/iceberg-sink/spark-script/.gitignore diff --git a/integration_test/iceberg-sink/spark-script/create-table.sql b/integration_tests/iceberg-sink/spark-script/create-table.sql similarity index 100% rename from integration_test/iceberg-sink/spark-script/create-table.sql rename to integration_tests/iceberg-sink/spark-script/create-table.sql diff --git a/integration_test/iceberg-sink/spark-script/query-table.sql b/integration_tests/iceberg-sink/spark-script/query-table.sql similarity index 100% rename from integration_test/iceberg-sink/spark-script/query-table.sql rename to integration_tests/iceberg-sink/spark-script/query-table.sql diff --git a/integration_test/iceberg-sink/spark-script/run-sql-file.sh b/integration_tests/iceberg-sink/spark-script/run-sql-file.sh similarity index 100% rename from integration_test/iceberg-sink/spark-script/run-sql-file.sh rename to integration_tests/iceberg-sink/spark-script/run-sql-file.sh diff --git a/integration_test/livestream/create_mv.sql b/integration_tests/livestream/create_mv.sql similarity index 100% rename from integration_test/livestream/create_mv.sql rename to integration_tests/livestream/create_mv.sql diff --git a/integration_test/livestream/create_source.sql b/integration_tests/livestream/create_source.sql similarity index 100% rename from integration_test/livestream/create_source.sql rename to integration_tests/livestream/create_source.sql diff --git a/integration_test/livestream/data_check b/integration_tests/livestream/data_check similarity index 100% rename from integration_test/livestream/data_check rename to integration_tests/livestream/data_check diff --git a/integration_test/livestream/docker-compose.yml b/integration_tests/livestream/docker-compose.yml similarity index 100% rename from integration_test/livestream/docker-compose.yml rename to integration_tests/livestream/docker-compose.yml diff --git a/integration_test/livestream/livestream.proto b/integration_tests/livestream/livestream.proto similarity index 100% rename from integration_test/livestream/livestream.proto rename to integration_tests/livestream/livestream.proto diff --git a/integration_test/livestream/pb/create_mv.sql b/integration_tests/livestream/pb/create_mv.sql similarity index 100% rename from integration_test/livestream/pb/create_mv.sql rename to integration_tests/livestream/pb/create_mv.sql diff --git a/integration_test/livestream/pb/create_source.sql b/integration_tests/livestream/pb/create_source.sql similarity index 100% rename from integration_test/livestream/pb/create_source.sql rename to integration_tests/livestream/pb/create_source.sql diff --git a/integration_test/livestream/query.sql b/integration_tests/livestream/query.sql similarity index 100% rename from integration_test/livestream/query.sql rename to integration_tests/livestream/query.sql diff --git a/integration_test/livestream/schema b/integration_tests/livestream/schema similarity index 100% rename from integration_test/livestream/schema rename to integration_tests/livestream/schema diff --git a/integration_test/mysql-cdc/create_mv.sql b/integration_tests/mysql-cdc/create_mv.sql similarity index 100% rename from integration_test/mysql-cdc/create_mv.sql rename to integration_tests/mysql-cdc/create_mv.sql diff --git a/integration_test/mysql-cdc/create_source.sql b/integration_tests/mysql-cdc/create_source.sql similarity index 100% rename from integration_test/mysql-cdc/create_source.sql rename to integration_tests/mysql-cdc/create_source.sql diff --git a/integration_test/mysql-cdc/data_check b/integration_tests/mysql-cdc/data_check similarity index 100% rename from integration_test/mysql-cdc/data_check rename to integration_tests/mysql-cdc/data_check diff --git a/integration_test/mysql-cdc/docker-compose.yml b/integration_tests/mysql-cdc/docker-compose.yml similarity index 100% rename from integration_test/mysql-cdc/docker-compose.yml rename to integration_tests/mysql-cdc/docker-compose.yml diff --git a/integration_test/mysql-cdc/mysql_prepare.sql b/integration_tests/mysql-cdc/mysql_prepare.sql similarity index 100% rename from integration_test/mysql-cdc/mysql_prepare.sql rename to integration_tests/mysql-cdc/mysql_prepare.sql diff --git a/integration_test/mysql-cdc/query.sql b/integration_tests/mysql-cdc/query.sql similarity index 100% rename from integration_test/mysql-cdc/query.sql rename to integration_tests/mysql-cdc/query.sql diff --git a/integration_test/mysql-sink/create_mv.sql b/integration_tests/mysql-sink/create_mv.sql similarity index 100% rename from integration_test/mysql-sink/create_mv.sql rename to integration_tests/mysql-sink/create_mv.sql diff --git a/integration_test/mysql-sink/create_source.sql b/integration_tests/mysql-sink/create_source.sql similarity index 100% rename from integration_test/mysql-sink/create_source.sql rename to integration_tests/mysql-sink/create_source.sql diff --git a/integration_test/mysql-sink/data_check b/integration_tests/mysql-sink/data_check similarity index 100% rename from integration_test/mysql-sink/data_check rename to integration_tests/mysql-sink/data_check diff --git a/integration_test/mysql-sink/docker-compose.yml b/integration_tests/mysql-sink/docker-compose.yml similarity index 100% rename from integration_test/mysql-sink/docker-compose.yml rename to integration_tests/mysql-sink/docker-compose.yml diff --git a/integration_test/mysql-sink/mysql_prepare.sql b/integration_tests/mysql-sink/mysql_prepare.sql similarity index 100% rename from integration_test/mysql-sink/mysql_prepare.sql rename to integration_tests/mysql-sink/mysql_prepare.sql diff --git a/integration_test/mysql-sink/query.sql b/integration_tests/mysql-sink/query.sql similarity index 100% rename from integration_test/mysql-sink/query.sql rename to integration_tests/mysql-sink/query.sql diff --git a/integration_test/postgres-cdc/create_mv.sql b/integration_tests/postgres-cdc/create_mv.sql similarity index 100% rename from integration_test/postgres-cdc/create_mv.sql rename to integration_tests/postgres-cdc/create_mv.sql diff --git a/integration_test/postgres-cdc/create_source.sql b/integration_tests/postgres-cdc/create_source.sql similarity index 100% rename from integration_test/postgres-cdc/create_source.sql rename to integration_tests/postgres-cdc/create_source.sql diff --git a/integration_test/postgres-cdc/data_check b/integration_tests/postgres-cdc/data_check similarity index 100% rename from integration_test/postgres-cdc/data_check rename to integration_tests/postgres-cdc/data_check diff --git a/integration_test/postgres-cdc/docker-compose.yml b/integration_tests/postgres-cdc/docker-compose.yml similarity index 100% rename from integration_test/postgres-cdc/docker-compose.yml rename to integration_tests/postgres-cdc/docker-compose.yml diff --git a/integration_test/postgres-cdc/postgres_prepare.sql b/integration_tests/postgres-cdc/postgres_prepare.sql similarity index 100% rename from integration_test/postgres-cdc/postgres_prepare.sql rename to integration_tests/postgres-cdc/postgres_prepare.sql diff --git a/integration_test/postgres-cdc/query.sql b/integration_tests/postgres-cdc/query.sql similarity index 100% rename from integration_test/postgres-cdc/query.sql rename to integration_tests/postgres-cdc/query.sql diff --git a/integration_test/postgres-sink/README.md b/integration_tests/postgres-sink/README.md similarity index 100% rename from integration_test/postgres-sink/README.md rename to integration_tests/postgres-sink/README.md diff --git a/integration_test/postgres-sink/create_mv.sql b/integration_tests/postgres-sink/create_mv.sql similarity index 100% rename from integration_test/postgres-sink/create_mv.sql rename to integration_tests/postgres-sink/create_mv.sql diff --git a/integration_test/postgres-sink/create_source.sql b/integration_tests/postgres-sink/create_source.sql similarity index 100% rename from integration_test/postgres-sink/create_source.sql rename to integration_tests/postgres-sink/create_source.sql diff --git a/integration_test/postgres-sink/data_check b/integration_tests/postgres-sink/data_check similarity index 100% rename from integration_test/postgres-sink/data_check rename to integration_tests/postgres-sink/data_check diff --git a/integration_test/postgres-sink/docker-compose.yml b/integration_tests/postgres-sink/docker-compose.yml similarity index 100% rename from integration_test/postgres-sink/docker-compose.yml rename to integration_tests/postgres-sink/docker-compose.yml diff --git a/integration_test/postgres-sink/postgres_prepare.sql b/integration_tests/postgres-sink/postgres_prepare.sql similarity index 100% rename from integration_test/postgres-sink/postgres_prepare.sql rename to integration_tests/postgres-sink/postgres_prepare.sql diff --git a/integration_test/postgres-sink/query.sql b/integration_tests/postgres-sink/query.sql similarity index 100% rename from integration_test/postgres-sink/query.sql rename to integration_tests/postgres-sink/query.sql diff --git a/integration_test/prometheus/create_mv.sql b/integration_tests/prometheus/create_mv.sql similarity index 100% rename from integration_test/prometheus/create_mv.sql rename to integration_tests/prometheus/create_mv.sql diff --git a/integration_test/prometheus/create_source.sql b/integration_tests/prometheus/create_source.sql similarity index 100% rename from integration_test/prometheus/create_source.sql rename to integration_tests/prometheus/create_source.sql diff --git a/integration_test/prometheus/create_user.sql b/integration_tests/prometheus/create_user.sql similarity index 100% rename from integration_test/prometheus/create_user.sql rename to integration_tests/prometheus/create_user.sql diff --git a/integration_test/prometheus/data_check b/integration_tests/prometheus/data_check similarity index 100% rename from integration_test/prometheus/data_check rename to integration_tests/prometheus/data_check diff --git a/integration_test/prometheus/docker-compose.yml b/integration_tests/prometheus/docker-compose.yml similarity index 100% rename from integration_test/prometheus/docker-compose.yml rename to integration_tests/prometheus/docker-compose.yml diff --git a/integration_test/prometheus/prometheus.yaml b/integration_tests/prometheus/prometheus.yaml similarity index 100% rename from integration_test/prometheus/prometheus.yaml rename to integration_tests/prometheus/prometheus.yaml diff --git a/integration_test/prometheus/query.sql b/integration_tests/prometheus/query.sql similarity index 100% rename from integration_test/prometheus/query.sql rename to integration_tests/prometheus/query.sql diff --git a/integration_test/schema-registry/create_mv.sql b/integration_tests/schema-registry/create_mv.sql similarity index 100% rename from integration_test/schema-registry/create_mv.sql rename to integration_tests/schema-registry/create_mv.sql diff --git a/integration_test/schema-registry/create_source.sql b/integration_tests/schema-registry/create_source.sql similarity index 100% rename from integration_test/schema-registry/create_source.sql rename to integration_tests/schema-registry/create_source.sql diff --git a/integration_test/schema-registry/data_check b/integration_tests/schema-registry/data_check similarity index 100% rename from integration_test/schema-registry/data_check rename to integration_tests/schema-registry/data_check diff --git a/integration_test/schema-registry/datagen.py b/integration_tests/schema-registry/datagen.py similarity index 100% rename from integration_test/schema-registry/datagen.py rename to integration_tests/schema-registry/datagen.py diff --git a/integration_test/schema-registry/docker-compose.yml b/integration_tests/schema-registry/docker-compose.yml similarity index 100% rename from integration_test/schema-registry/docker-compose.yml rename to integration_tests/schema-registry/docker-compose.yml diff --git a/integration_test/schema-registry/query.sql b/integration_tests/schema-registry/query.sql similarity index 100% rename from integration_test/schema-registry/query.sql rename to integration_tests/schema-registry/query.sql diff --git a/integration_test/schema-registry/readme.md b/integration_tests/schema-registry/readme.md similarity index 100% rename from integration_test/schema-registry/readme.md rename to integration_tests/schema-registry/readme.md diff --git a/integration_test/superset/create_mv.sql b/integration_tests/superset/create_mv.sql similarity index 100% rename from integration_test/superset/create_mv.sql rename to integration_tests/superset/create_mv.sql diff --git a/integration_test/superset/create_source.sql b/integration_tests/superset/create_source.sql similarity index 100% rename from integration_test/superset/create_source.sql rename to integration_tests/superset/create_source.sql diff --git a/integration_test/superset/docker-compose.yml b/integration_tests/superset/docker-compose.yml similarity index 100% rename from integration_test/superset/docker-compose.yml rename to integration_tests/superset/docker-compose.yml diff --git a/integration_test/superset/docker/.env-non-dev b/integration_tests/superset/docker/.env-non-dev similarity index 100% rename from integration_test/superset/docker/.env-non-dev rename to integration_tests/superset/docker/.env-non-dev diff --git a/integration_test/superset/docker/docker-bootstrap.sh b/integration_tests/superset/docker/docker-bootstrap.sh similarity index 100% rename from integration_test/superset/docker/docker-bootstrap.sh rename to integration_tests/superset/docker/docker-bootstrap.sh diff --git a/integration_test/superset/docker/docker-init.sh b/integration_tests/superset/docker/docker-init.sh similarity index 100% rename from integration_test/superset/docker/docker-init.sh rename to integration_tests/superset/docker/docker-init.sh diff --git a/integration_test/superset/docker/pythonpath_dev/.gitignore b/integration_tests/superset/docker/pythonpath_dev/.gitignore similarity index 100% rename from integration_test/superset/docker/pythonpath_dev/.gitignore rename to integration_tests/superset/docker/pythonpath_dev/.gitignore diff --git a/integration_test/superset/docker/pythonpath_dev/superset_config.py b/integration_tests/superset/docker/pythonpath_dev/superset_config.py similarity index 100% rename from integration_test/superset/docker/pythonpath_dev/superset_config.py rename to integration_tests/superset/docker/pythonpath_dev/superset_config.py diff --git a/integration_test/superset/docker/requirements-local.txt b/integration_tests/superset/docker/requirements-local.txt similarity index 100% rename from integration_test/superset/docker/requirements-local.txt rename to integration_tests/superset/docker/requirements-local.txt diff --git a/integration_test/superset/docker/run-server.sh b/integration_tests/superset/docker/run-server.sh similarity index 100% rename from integration_test/superset/docker/run-server.sh rename to integration_tests/superset/docker/run-server.sh diff --git a/integration_test/superset/query.sql b/integration_tests/superset/query.sql similarity index 100% rename from integration_test/superset/query.sql rename to integration_tests/superset/query.sql diff --git a/integration_test/twitter-pulsar/create_mv.sql b/integration_tests/twitter-pulsar/create_mv.sql similarity index 100% rename from integration_test/twitter-pulsar/create_mv.sql rename to integration_tests/twitter-pulsar/create_mv.sql diff --git a/integration_test/twitter-pulsar/create_source.sql b/integration_tests/twitter-pulsar/create_source.sql similarity index 100% rename from integration_test/twitter-pulsar/create_source.sql rename to integration_tests/twitter-pulsar/create_source.sql diff --git a/integration_test/twitter-pulsar/docker-compose.yml b/integration_tests/twitter-pulsar/docker-compose.yml similarity index 100% rename from integration_test/twitter-pulsar/docker-compose.yml rename to integration_tests/twitter-pulsar/docker-compose.yml diff --git a/integration_test/twitter-pulsar/query.sql b/integration_tests/twitter-pulsar/query.sql similarity index 100% rename from integration_test/twitter-pulsar/query.sql rename to integration_tests/twitter-pulsar/query.sql diff --git a/integration_test/twitter/avro.json b/integration_tests/twitter/avro.json similarity index 100% rename from integration_test/twitter/avro.json rename to integration_tests/twitter/avro.json diff --git a/integration_test/twitter/avro/create_mv.sql b/integration_tests/twitter/avro/create_mv.sql similarity index 100% rename from integration_test/twitter/avro/create_mv.sql rename to integration_tests/twitter/avro/create_mv.sql diff --git a/integration_test/twitter/avro/create_source.sql b/integration_tests/twitter/avro/create_source.sql similarity index 100% rename from integration_test/twitter/avro/create_source.sql rename to integration_tests/twitter/avro/create_source.sql diff --git a/integration_test/twitter/create_mv.sql b/integration_tests/twitter/create_mv.sql similarity index 100% rename from integration_test/twitter/create_mv.sql rename to integration_tests/twitter/create_mv.sql diff --git a/integration_test/twitter/create_source.sql b/integration_tests/twitter/create_source.sql similarity index 100% rename from integration_test/twitter/create_source.sql rename to integration_tests/twitter/create_source.sql diff --git a/integration_test/twitter/data_check b/integration_tests/twitter/data_check similarity index 100% rename from integration_test/twitter/data_check rename to integration_tests/twitter/data_check diff --git a/integration_test/twitter/docker-compose.yml b/integration_tests/twitter/docker-compose.yml similarity index 100% rename from integration_test/twitter/docker-compose.yml rename to integration_tests/twitter/docker-compose.yml diff --git a/integration_test/twitter/pb/create_mv.sql b/integration_tests/twitter/pb/create_mv.sql similarity index 100% rename from integration_test/twitter/pb/create_mv.sql rename to integration_tests/twitter/pb/create_mv.sql diff --git a/integration_test/twitter/pb/create_source.sql b/integration_tests/twitter/pb/create_source.sql similarity index 100% rename from integration_test/twitter/pb/create_source.sql rename to integration_tests/twitter/pb/create_source.sql diff --git a/integration_test/twitter/query.sql b/integration_tests/twitter/query.sql similarity index 100% rename from integration_test/twitter/query.sql rename to integration_tests/twitter/query.sql diff --git a/integration_test/twitter/schema b/integration_tests/twitter/schema similarity index 100% rename from integration_test/twitter/schema rename to integration_tests/twitter/schema diff --git a/integration_test/twitter/twitter.proto b/integration_tests/twitter/twitter.proto similarity index 100% rename from integration_test/twitter/twitter.proto rename to integration_tests/twitter/twitter.proto From 8ed96b1014946938e89ee008ace88408483a2839 Mon Sep 17 00:00:00 2001 From: Eric Fu Date: Mon, 13 Mar 2023 07:13:49 +0000 Subject: [PATCH 04/12] check in CI pipeline and scripts --- .github/workflows/intergration_tests.yml | 108 +++++++++++++++++++ integration_tests/scripts/.gitignore | 4 + integration_tests/scripts/check_data.py | 49 +++++++++ integration_tests/scripts/gen_pb_compose.py | 48 +++++++++ integration_tests/scripts/run_demos.py | 111 ++++++++++++++++++++ 5 files changed, 320 insertions(+) create mode 100644 .github/workflows/intergration_tests.yml create mode 100644 integration_tests/scripts/.gitignore create mode 100644 integration_tests/scripts/check_data.py create mode 100644 integration_tests/scripts/gen_pb_compose.py create mode 100644 integration_tests/scripts/run_demos.py diff --git a/.github/workflows/intergration_tests.yml b/.github/workflows/intergration_tests.yml new file mode 100644 index 0000000000000..b8032c03f2c7c --- /dev/null +++ b/.github/workflows/intergration_tests.yml @@ -0,0 +1,108 @@ +name: Integration Tests CI + +on: + pull_request: + +jobs: + golangci: + name: lint + runs-on: ubuntu-latest + steps: + - uses: actions/setup-go@v3 + with: + go-version: 1.18 + - uses: actions/checkout@v3 + - name: golangci-lint + uses: golangci/golangci-lint-action@v3 + with: + working-directory: integration_tests/datagen + args: --timeout=120s + - name: Go build + run: | + go mod tidy + git diff --exit-code go.mod go.sum + go build . + working-directory: integration_tests/datagen + run-demos: + strategy: + matrix: + testcase: + - ad-click + - ad-ctr + - cdn-metrics + - clickstream + - docker + - livestream + - twitter + - prometheus + - schema-registry + - mysql-cdc + - postgres-cdc + #- mysql-sink + - postgres-sink + - iceberg-sink + format: ["json", "protobuf"] + exclude: + - testcase: ad-click + format: protobuf + - testcase: ad-ctr + format: protobuf + - testcase: cdn-metrics + format: protobuf + - testcase: clickstream + format: protobuf + - testcase: docker + format: protobuf + - testcase: prometheus + format: protobuf + # This demo is showcasing avro + schema registry. So there's no file server for the schema file. + - testcase: schema-registry + format: protobuf + - testcase: mysql-cdc + format: protobuf + - testcase: postgres-cdc + format: protobuf + - testcase: mysql-sink + format: protobuf + - testcase: postgres-sink + format: protobuf + - testcase: iceberg-sink + format: protobuf + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v2 + with: + fetch-depth: 0 + + # In this step, this action saves a list of existing images, + # the cache is created without them in the post run. + # It also restores the cache if it exists. + - uses: satackey/action-docker-layer-caching@v0.0.11 + # Ignore the failure of a step and avoid terminating the job. + continue-on-error: true + + - name: Rewrite docker compose for protobuf + working-directory: integration_tests/scripts + if: ${{ matrix.format == 'protobuf' }} + run: | + python3 gen_pb_compose.py ${{ matrix.testcase }} ${{ matrix.format }} + + - name: Run Demos + working-directory: integration_tests/scripts + run: | + python3 run_demos.py --case ${{ matrix.testcase }} --format ${{ matrix.format }} + + - name: Check if the ingestion is successful + working-directory: integration_tests/scripts + run: | + python3 check_data.py ${{ matrix.testcase }} + + - name: Dump logs on failure + if: ${{ failure() }} + working-directory: integration_tests/${{ matrix.testcase }} + run: | + docker compose logs + + - uses: satackey/action-docker-layer-caching@v0.0.11 + continue-on-error: true diff --git a/integration_tests/scripts/.gitignore b/integration_tests/scripts/.gitignore new file mode 100644 index 0000000000000..31e3ea5413583 --- /dev/null +++ b/integration_tests/scripts/.gitignore @@ -0,0 +1,4 @@ +bin/ +include/ +lib/ +pyvenv.cfg diff --git a/integration_tests/scripts/check_data.py b/integration_tests/scripts/check_data.py new file mode 100644 index 0000000000000..1d0a4b52c00d6 --- /dev/null +++ b/integration_tests/scripts/check_data.py @@ -0,0 +1,49 @@ +#!/usr/bin/python3 + +# Every demo directory contains a 'data_check' file that lists the relations (either source or mv) +# that are expected to have >=1 rows. This script runs the checks by creating a materialized view over it, +# and verify the rows count in the view. + +import os +from posixpath import abspath +import subprocess +import sys + +from os.path import dirname +import time + + +def create_mv(rel: str): + if "_mv" in rel: + raise Exception('relation "{}" must not contains "_mv"'.format(rel)) + run_sql("CREATE MATERIALIZED VIEW {0}_mv AS SELECT * FROM {0}".format(rel)) + + +def check_mv(rel: str): + rows = run_sql("SELECT COUNT(*) FROM {}_mv".format(rel)) + rows = int(rows.decode('utf8').strip()) + print("{} rows in {}".format(rows, rel)) + assert rows >= 1 + + +def run_sql(sql): + print("Running SQL: {}".format(sql)) + return subprocess.check_output(["psql", "-h", "localhost", "-p", "4566", + "-d", "dev", "-U", "root", "--tuples-only", "-c", sql]) + + +demo = sys.argv[1] +if demo in ['docker', 'iceberg-sink'] : + print('Skip for running test for `%s`'%demo) + sys.exit(0) +file_dir = dirname(abspath(__file__)) +project_dir = dirname(file_dir) +demo_dir = os.path.join(project_dir, demo) +data_check = os.path.join(demo_dir, 'data_check') +with open(data_check) as f: + relations = f.read().split(",") + for rel in relations: + create_mv(rel) + time.sleep(20) + for rel in relations: + check_mv(rel) diff --git a/integration_tests/scripts/gen_pb_compose.py b/integration_tests/scripts/gen_pb_compose.py new file mode 100644 index 0000000000000..6d3d35fe5fe23 --- /dev/null +++ b/integration_tests/scripts/gen_pb_compose.py @@ -0,0 +1,48 @@ +#!/usr/bin/python3 + +import argparse +import os +import sys +from os.path import (dirname, abspath) + + +file_server = """ file_server: + image: halverneus/static-file-server:latest + volumes: + - "./:/demo" + restart: always + environment: + FOLDER: /demo + container_name: file_server +""" + + +def gen_docker_compose(demo_compose: str, format: str): + content = "" + with open(demo_compose) as file: + for line in file: + line = line.replace(" - /datagen", + " - /datagen --format {}".format(format)) + if line == 'volumes:\n': + content += file_server + content += line + with open(demo_compose, 'w') as file: + file.write(content) + + +demo = sys.argv[1] +if demo == 'docker': + print('Will not generate docker-compose file for `docker`') + sys.exit(0) + +format = sys.argv[2] +if format not in ["json", "protobuf", "avro"]: + print('Invalid format: {}'.format(format)) + sys.exit(1) + +file_dir = dirname(abspath(__file__)) +project_dir = dirname(dirname(file_dir)) +demo_dir = os.path.join(project_dir, demo) +demo_compose = os.path.join(demo_dir, 'docker-compose.yml') + +gen_docker_compose(demo_compose, format) diff --git a/integration_tests/scripts/run_demos.py b/integration_tests/scripts/run_demos.py new file mode 100644 index 0000000000000..4f104c0a90b1d --- /dev/null +++ b/integration_tests/scripts/run_demos.py @@ -0,0 +1,111 @@ +#!/usr/bin/python3 + +from os.path import (dirname, abspath) +import os +import sys +import subprocess +from time import sleep +import argparse + + +def run_sql_file(f: str, dir: str): + print("Running SQL file: {}".format(f)) + # ON_ERROR_STOP=1 will let psql return error code when the query fails. + # https://stackoverflow.com/questions/37072245/check-return-status-of-psql-command-in-unix-shell-scripting + proc = subprocess.run(["psql", "-h", "localhost", "-p", "4566", + "-d", "dev", "-U", "root", "-f", f, "-v", "ON_ERROR_STOP=1"], check=True, + cwd=dir) + if proc.returncode != 0: + sys.exit(1) + + +def run_demo(demo: str, format: str): + file_dir = dirname(abspath(__file__)) + project_dir = dirname(file_dir) + demo_dir = os.path.join(project_dir, demo) + print("Running demo: {}".format(demo)) + + subprocess.run(["docker", "compose", "up", "-d"], + cwd=demo_dir, check=True) + sleep(40) + + sql_files = ['create_source.sql', 'create_mv.sql', 'query.sql'] + for fname in sql_files: + if format == 'protobuf': + sql_file = os.path.join(demo_dir, "pb", fname) + if os.path.isfile(sql_file): + # Try to run the protobuf version first. + run_sql_file(sql_file, demo_dir) + sleep(10) + continue + # Fallback to default version when the protobuf version doesn't exist. + sql_file = os.path.join(demo_dir, fname) + run_sql_file(sql_file, demo_dir) + sleep(10) + +def run_iceberg_demo(): + demo = "iceberg-sink" + file_dir = dirname(abspath(__file__)) + project_dir = dirname(dirname(file_dir)) + demo_dir = os.path.join(project_dir, demo) + print("Running demo: iceberg-sink") + + subprocess.run(["docker", "compose", "up", "-d"], + cwd=demo_dir, check=True) + sleep(40) + + subprocess.run(["docker", "compose", "exec", "spark", "bash", "/spark-script/run-sql-file.sh", "create-table"], + cwd=demo_dir, check=True) + + sql_files = ['create_source.sql', 'create_mv.sql', 'create_sink.sql'] + for fname in sql_files: + sql_file = os.path.join(demo_dir, fname) + print("executing sql: ", open(sql_file).read()) + run_sql_file(sql_file, demo_dir) + sleep(10) + + print("sink created. Wait for 2 min time for ingestion") + + # wait for two minutes ingestion + sleep(120) + + query_sql = open(os.path.join(demo_dir, "iceberg-query.sql")).read() + + print("querying iceberg with presto sql: %s"%query_sql) + + query_output_file_name = "query_outout.txt" + + query_output_file = open(query_output_file_name, "wb") + + subprocess.run(["docker", "compose", "exec", "presto", "presto-cli", "--server", "localhost:8080", "--execute", query_sql], + cwd=demo_dir, check=True, stdout=query_output_file) + query_output_file.close() + + output_content = open(query_output_file_name).read() + + print(output_content) + + assert len(output_content.strip()) > 0 + + + + +arg_parser = argparse.ArgumentParser(description='Run the demo') +arg_parser.add_argument('--format', + metavar='format', + type=str, + help='the format of output data', + default='json') +arg_parser.add_argument('--case', + metavar='case', + type=str, + help='the test case') +args = arg_parser.parse_args() + +if args.case == "iceberg-sink": + if args.format == "protobuf": + print("skip protobuf test for iceberg-sink") + else: + run_iceberg_demo() +else: + run_demo(args.case, args.format) From 3980d4dcfe9f3bfe8ae8a8d4d5fcb287813459f4 Mon Sep 17 00:00:00 2001 From: Eric Fu Date: Mon, 13 Mar 2023 07:31:18 +0000 Subject: [PATCH 05/12] fix bugs --- .github/workflows/intergration_tests.yml | 3 --- docker/docker-compose.yml | 4 ++-- integration_tests/scripts/gen_pb_compose.py | 2 +- 3 files changed, 3 insertions(+), 6 deletions(-) diff --git a/.github/workflows/intergration_tests.yml b/.github/workflows/intergration_tests.yml index b8032c03f2c7c..5bcf809cc01c7 100644 --- a/.github/workflows/intergration_tests.yml +++ b/.github/workflows/intergration_tests.yml @@ -31,7 +31,6 @@ jobs: - ad-ctr - cdn-metrics - clickstream - - docker - livestream - twitter - prometheus @@ -51,8 +50,6 @@ jobs: format: protobuf - testcase: clickstream format: protobuf - - testcase: docker - format: protobuf - testcase: prometheus format: protobuf # This demo is showcasing avro + schema registry. So there's no file server for the schema file. diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index 734c313b821e0..461cad59cb2ab 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -327,7 +327,7 @@ services: - "9644:9644" depends_on: [] volumes: - - "redpanda:/var/lib/redpanda/data" + - "message_queue:/var/lib/redpanda/data" environment: {} container_name: message_queue healthcheck: @@ -348,6 +348,6 @@ volumes: external: false prometheus-0: external: false - redpanda: + message_queue: external: false name: risingwave-compose diff --git a/integration_tests/scripts/gen_pb_compose.py b/integration_tests/scripts/gen_pb_compose.py index 6d3d35fe5fe23..4687c7688852d 100644 --- a/integration_tests/scripts/gen_pb_compose.py +++ b/integration_tests/scripts/gen_pb_compose.py @@ -41,7 +41,7 @@ def gen_docker_compose(demo_compose: str, format: str): sys.exit(1) file_dir = dirname(abspath(__file__)) -project_dir = dirname(dirname(file_dir)) +project_dir = dirname(file_dir) demo_dir = os.path.join(project_dir, demo) demo_compose = os.path.join(demo_dir, 'docker-compose.yml') From 288b33299965394d4f044fb73b082c3155919083 Mon Sep 17 00:00:00 2001 From: Eric Fu Date: Mon, 13 Mar 2023 07:45:32 +0000 Subject: [PATCH 06/12] fix docker volumes --- integration_tests/ad-click/docker-compose.yml | 2 ++ integration_tests/ad-ctr/docker-compose.yml | 2 ++ integration_tests/cdn-metrics/docker-compose.yml | 2 ++ integration_tests/clickstream/docker-compose.yml | 2 ++ integration_tests/mysql-sink/docker-compose.yml | 2 ++ integration_tests/postgres-cdc/docker-compose.yml | 2 ++ integration_tests/postgres-sink/docker-compose.yml | 2 ++ integration_tests/schema-registry/docker-compose.yml | 2 ++ integration_tests/superset/docker-compose.yml | 2 ++ integration_tests/twitter/docker-compose.yml | 2 ++ 10 files changed, 20 insertions(+) diff --git a/integration_tests/ad-click/docker-compose.yml b/integration_tests/ad-click/docker-compose.yml index 6f0f2f6a2cd9e..ab8f175db1252 100644 --- a/integration_tests/ad-click/docker-compose.yml +++ b/integration_tests/ad-click/docker-compose.yml @@ -57,4 +57,6 @@ volumes: external: false prometheus-0: external: false + message_queue: + external: false name: risingwave-compose diff --git a/integration_tests/ad-ctr/docker-compose.yml b/integration_tests/ad-ctr/docker-compose.yml index 0105f5dcf683a..bd12b521d53b5 100644 --- a/integration_tests/ad-ctr/docker-compose.yml +++ b/integration_tests/ad-ctr/docker-compose.yml @@ -57,4 +57,6 @@ volumes: external: false prometheus-0: external: false + message_queue: + external: false name: risingwave-compose diff --git a/integration_tests/cdn-metrics/docker-compose.yml b/integration_tests/cdn-metrics/docker-compose.yml index 4e2fb50d88168..617566382b7f7 100644 --- a/integration_tests/cdn-metrics/docker-compose.yml +++ b/integration_tests/cdn-metrics/docker-compose.yml @@ -57,4 +57,6 @@ volumes: external: false prometheus-0: external: false + message_queue: + external: false name: risingwave-compose diff --git a/integration_tests/clickstream/docker-compose.yml b/integration_tests/clickstream/docker-compose.yml index 3a9d288b89c00..b7a1573d25253 100644 --- a/integration_tests/clickstream/docker-compose.yml +++ b/integration_tests/clickstream/docker-compose.yml @@ -57,4 +57,6 @@ volumes: external: false prometheus-0: external: false + message_queue: + external: false name: risingwave-compose diff --git a/integration_tests/mysql-sink/docker-compose.yml b/integration_tests/mysql-sink/docker-compose.yml index c1aa340d4f2ff..21651f2ac49b2 100644 --- a/integration_tests/mysql-sink/docker-compose.yml +++ b/integration_tests/mysql-sink/docker-compose.yml @@ -88,4 +88,6 @@ volumes: external: false prometheus-0: external: false + message_queue: + external: false name: risingwave-compose diff --git a/integration_tests/postgres-cdc/docker-compose.yml b/integration_tests/postgres-cdc/docker-compose.yml index c3583ea7d7e71..0ab26badc2cef 100644 --- a/integration_tests/postgres-cdc/docker-compose.yml +++ b/integration_tests/postgres-cdc/docker-compose.yml @@ -91,4 +91,6 @@ volumes: external: false prometheus-0: external: false + message_queue: + external: false name: risingwave-compose diff --git a/integration_tests/postgres-sink/docker-compose.yml b/integration_tests/postgres-sink/docker-compose.yml index 3d9e08d03a59d..cd8033ad2221b 100644 --- a/integration_tests/postgres-sink/docker-compose.yml +++ b/integration_tests/postgres-sink/docker-compose.yml @@ -91,4 +91,6 @@ volumes: external: false prometheus-0: external: false + message_queue: + external: false name: risingwave-compose diff --git a/integration_tests/schema-registry/docker-compose.yml b/integration_tests/schema-registry/docker-compose.yml index df36671a4a336..ac9b6b26aeaf7 100644 --- a/integration_tests/schema-registry/docker-compose.yml +++ b/integration_tests/schema-registry/docker-compose.yml @@ -63,4 +63,6 @@ volumes: external: false prometheus-0: external: false + message_queue: + external: false name: risingwave-compose diff --git a/integration_tests/superset/docker-compose.yml b/integration_tests/superset/docker-compose.yml index 21491a122e749..f8309c4ca2abb 100644 --- a/integration_tests/superset/docker-compose.yml +++ b/integration_tests/superset/docker-compose.yml @@ -132,4 +132,6 @@ volumes: external: false prometheus-0: external: false + message_queue: + external: false name: risingwave-compose diff --git a/integration_tests/twitter/docker-compose.yml b/integration_tests/twitter/docker-compose.yml index 9887ba3818274..7ac085e0a17ee 100644 --- a/integration_tests/twitter/docker-compose.yml +++ b/integration_tests/twitter/docker-compose.yml @@ -57,4 +57,6 @@ volumes: external: false prometheus-0: external: false + message_queue: + external: false name: risingwave-compose From 15b631ad6ca2b8b0b502ded2480cf327f6c8eb7a Mon Sep 17 00:00:00 2001 From: Eric Fu Date: Mon, 13 Mar 2023 08:11:07 +0000 Subject: [PATCH 07/12] fix --- integration_tests/scripts/run_demos.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/integration_tests/scripts/run_demos.py b/integration_tests/scripts/run_demos.py index 4f104c0a90b1d..330ebee1065b7 100644 --- a/integration_tests/scripts/run_demos.py +++ b/integration_tests/scripts/run_demos.py @@ -46,7 +46,7 @@ def run_demo(demo: str, format: str): def run_iceberg_demo(): demo = "iceberg-sink" file_dir = dirname(abspath(__file__)) - project_dir = dirname(dirname(file_dir)) + project_dir = dirname(file_dir) demo_dir = os.path.join(project_dir, demo) print("Running demo: iceberg-sink") From faa85145c6cdfb853627471659f745df058fda14 Mon Sep 17 00:00:00 2001 From: Eric Fu Date: Mon, 13 Mar 2023 08:24:09 +0000 Subject: [PATCH 08/12] fix schema-registry test. see #8124 --- integration_tests/schema-registry/create_source.sql | 2 +- integration_tests/schema-registry/readme.md | 7 +++---- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/integration_tests/schema-registry/create_source.sql b/integration_tests/schema-registry/create_source.sql index d1e17540dd5df..09c078f99f432 100644 --- a/integration_tests/schema-registry/create_source.sql +++ b/integration_tests/schema-registry/create_source.sql @@ -4,5 +4,5 @@ CREATE SOURCE student WITH ( properties.bootstrap.server = 'message_queue:29092', scan.startup.mode = 'earliest' ) -ROW FORMAT avro message 'student' +ROW FORMAT avro row schema location confluent schema registry 'http://message_queue:8081'; \ No newline at end of file diff --git a/integration_tests/schema-registry/readme.md b/integration_tests/schema-registry/readme.md index 6c1c53c26e0a4..dd1e9cac1d989 100644 --- a/integration_tests/schema-registry/readme.md +++ b/integration_tests/schema-registry/readme.md @@ -2,7 +2,7 @@ This demo shows how to ingest Avro data into RisingWave with [Schema Registry](h At the beginning, there's a datagen process that ingests Avro data into Redpanda (a Kafka-compatible message queue). The Avro schema is as follows: -- ** Version 1 ** +- **Version 1** ```json { @@ -38,8 +38,7 @@ At the beginning, there's a datagen process that ingests Avro data into Redpanda } ``` - -- ** Version 2 ** +- **Version 2** ```json { @@ -91,6 +90,6 @@ CREATE SOURCE student WITH ( properties.bootstrap.server = 'message_queue:29092', scan.startup.mode = 'earliest' ) -ROW FORMAT avro message 'student' +ROW FORMAT avro row schema location confluent schema registry 'http://message_queue:8081'; ``` From 14c742c0f6de27f7d0545080da293d5236c42b99 Mon Sep 17 00:00:00 2001 From: Eric Fu Date: Mon, 13 Mar 2023 08:30:53 +0000 Subject: [PATCH 09/12] fix typo: Timestamp --- .../datagen/clickstream/clickstream.go | 6 +++--- integration_tests/datagen/ecommerce/ecommerce.go | 14 +++++++------- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/integration_tests/datagen/clickstream/clickstream.go b/integration_tests/datagen/clickstream/clickstream.go index e157e21e1f59a..8a81f48a5430d 100644 --- a/integration_tests/datagen/clickstream/clickstream.go +++ b/integration_tests/datagen/clickstream/clickstream.go @@ -17,7 +17,7 @@ type userBehavior struct { UserId string `json:"user_id"` TargetId string `json:"target_id"` TargetType string `json:"target_type"` - EventTimestmap string `json:"event_timestamp"` + EventTimestamp string `json:"event_timestamp"` BehaviorType string `json:"behavior_type"` // The two fields are used to express the following behaviors: @@ -32,7 +32,7 @@ func (r *userBehavior) ToPostgresSql() string { return fmt.Sprintf(`INSERT INTO %s (user_id, target_id, target_type, event_timestamp, behavior_type, parent_target_type, parent_target_id) values ('%s', '%s', '%s', '%s', '%s', '%s', '%s')`, - "user_behaviors", r.UserId, r.TargetId, r.TargetType, r.EventTimestmap, r.BehaviorType, r.ParentTargetType, r.ParentTargetId) + "user_behaviors", r.UserId, r.TargetId, r.TargetType, r.EventTimestamp, r.BehaviorType, r.ParentTargetType, r.ParentTargetId) } func (r *userBehavior) ToJson() (topic string, key string, data []byte) { @@ -130,7 +130,7 @@ func (g *clickStreamGen) generate() sink.SinkRecord { UserId: fmt.Sprint(userId), TargetId: string(target) + fmt.Sprint(targetId), TargetType: string(target), - EventTimestmap: time.Now().Format(gen.RwTimestampLayout), + EventTimestamp: time.Now().Format(gen.RwTimestampLayout), BehaviorType: behavior, ParentTargetType: parentTargetType, ParentTargetId: parentTargetId, diff --git a/integration_tests/datagen/ecommerce/ecommerce.go b/integration_tests/datagen/ecommerce/ecommerce.go index cd27049d9d559..b43b084d96631 100644 --- a/integration_tests/datagen/ecommerce/ecommerce.go +++ b/integration_tests/datagen/ecommerce/ecommerce.go @@ -19,14 +19,14 @@ type orderEvent struct { OrderId int64 `json:"order_id"` ItemId int64 `json:"item_id"` ItemPrice float64 `json:"item_price"` - EventTimestmap string `json:"event_timestamp"` + EventTimestamp string `json:"event_timestamp"` } func (r *orderEvent) ToPostgresSql() string { return fmt.Sprintf(`INSERT INTO %s (order_id, item_id, item_price, event_timestamp) values ('%d', '%d', %f, '%s')`, - "order_events", r.OrderId, r.ItemId, r.ItemPrice, r.EventTimestmap) + "order_events", r.OrderId, r.ItemId, r.ItemPrice, r.EventTimestamp) } func (r *orderEvent) ToJson() (topic string, key string, data []byte) { @@ -40,7 +40,7 @@ type parcelEvent struct { sink.BaseSinkRecord OrderId int64 `json:"order_id"` - EventTimestmap string `json:"event_timestamp"` + EventTimestamp string `json:"event_timestamp"` EventType string `json:"event_type"` } @@ -48,7 +48,7 @@ func (r *parcelEvent) ToPostgresSql() string { return fmt.Sprintf(`INSERT INTO %s (order_id, event_timestamp, event_type) values ('%d', '%s', '%s')`, - "parcel_events", r.OrderId, r.EventTimestmap, r.EventType) + "parcel_events", r.OrderId, r.EventTimestamp, r.EventType) } func (r *parcelEvent) ToJson() (topic string, key string, data []byte) { @@ -101,14 +101,14 @@ func (g *ecommerceGen) generate() []sink.SinkRecord { OrderId: g.seqOrderId, ItemId: int64(itemId), ItemPrice: itemPrice, - EventTimestmap: ts, + EventTimestamp: ts, } } var records []sink.SinkRecord records = append(records, orders...) records = append(records, &parcelEvent{ OrderId: g.seqOrderId, - EventTimestmap: ts, + EventTimestamp: ts, EventType: "order_created", }) return records @@ -119,7 +119,7 @@ func (g *ecommerceGen) generate() []sink.SinkRecord { &parcelEvent{ OrderId: g.seqShipId, EventType: "parcel_shipped", - EventTimestmap: ts, + EventTimestamp: ts, }, } } From d18c064fd38cccccd5646ae48d766a07f71fe12d Mon Sep 17 00:00:00 2001 From: Eric Fu Date: Mon, 13 Mar 2023 08:37:37 +0000 Subject: [PATCH 10/12] fix type: Elapsed & upgrade typo checker --- .github/workflows/typo.yml | 2 +- integration_tests/datagen/load_gen.go | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/typo.yml b/.github/workflows/typo.yml index 51f1f221b4fba..67684745cf2a4 100644 --- a/.github/workflows/typo.yml +++ b/.github/workflows/typo.yml @@ -10,4 +10,4 @@ jobs: uses: actions/checkout@v3 - name: Check spelling of the entire repository - uses: crate-ci/typos@v1.11.1 + uses: crate-ci/typos@v1.13.20 diff --git a/integration_tests/datagen/load_gen.go b/integration_tests/datagen/load_gen.go index 69cd71687d184..12548e34df8c9 100644 --- a/integration_tests/datagen/load_gen.go +++ b/integration_tests/datagen/load_gen.go @@ -111,7 +111,7 @@ func generateLoad(ctx context.Context, cfg gen.GeneratorConfig) error { return nil case <-ticker.C: if time.Since(prevTime) >= 10*time.Second { - log.Printf("Sent %d records in total (Elasped: %s)", count, time.Since(initTime).String()) + log.Printf("Sent %d records in total (Elapsed: %s)", count, time.Since(initTime).String()) prevTime = time.Now() } case record := <-outCh: @@ -125,7 +125,7 @@ func generateLoad(ctx context.Context, cfg gen.GeneratorConfig) error { _ = rl.Take() count++ if time.Since(prevTime) >= 10*time.Second { - log.Printf("Sent %d records in total (Elasped: %s)", count, time.Since(initTime).String()) + log.Printf("Sent %d records in total (Elapsed: %s)", count, time.Since(initTime).String()) prevTime = time.Now() } } From bd42e6b48b6cde549f2f27d11852c42814d2fa3a Mon Sep 17 00:00:00 2001 From: Eric Fu Date: Mon, 13 Mar 2023 08:41:20 +0000 Subject: [PATCH 11/12] fix 2 more datagen builds --- integration_tests/iceberg-sink/docker-compose.yml | 2 +- integration_tests/postgres-cdc/docker-compose.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/integration_tests/iceberg-sink/docker-compose.yml b/integration_tests/iceberg-sink/docker-compose.yml index 16ed06e332f54..89368e90cb505 100644 --- a/integration_tests/iceberg-sink/docker-compose.yml +++ b/integration_tests/iceberg-sink/docker-compose.yml @@ -76,7 +76,7 @@ services: container_name: prepare_mysql restart: on-failure datagen: - image: ghcr.io/risingwavelabs/demo-datagen:v1.1.1 + build: ../datagen depends_on: [mysql] command: - /bin/sh diff --git a/integration_tests/postgres-cdc/docker-compose.yml b/integration_tests/postgres-cdc/docker-compose.yml index 0ab26badc2cef..59a9f86f13cb3 100644 --- a/integration_tests/postgres-cdc/docker-compose.yml +++ b/integration_tests/postgres-cdc/docker-compose.yml @@ -68,7 +68,7 @@ services: container_name: postgres_prepare restart: on-failure datagen: - image: ghcr.io/risingwavelabs/demo-datagen:v1.1.0 + build: ../datagen depends_on: [message_queue] command: - /bin/sh From 69773736fcf8017e3a5602a7de6124ef44d799af Mon Sep 17 00:00:00 2001 From: Eric Fu Date: Mon, 13 Mar 2023 09:06:34 +0000 Subject: [PATCH 12/12] nightly running --- .github/workflows/intergration_tests.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/intergration_tests.yml b/.github/workflows/intergration_tests.yml index 5bcf809cc01c7..74b5d09a81191 100644 --- a/.github/workflows/intergration_tests.yml +++ b/.github/workflows/intergration_tests.yml @@ -1,7 +1,9 @@ name: Integration Tests CI on: - pull_request: + schedule: + # Currently we build docker images at 12:00 (UTC), so run this at 13:00 + - cron: '0 13 * * *' jobs: golangci: