From 6cf37317d5a5d92f1ead98359eb4af3b37bc50b4 Mon Sep 17 00:00:00 2001 From: kamille <34352236+Rachelint@users.noreply.github.com> Date: Fri, 16 Jun 2023 14:40:28 +0800 Subject: [PATCH] chore: add integration test about recovery (#996) ## Rationale Part of #799 Now we run the test about recovery manually that is so tired, this pr add this into integration tests which will be run automatically in ci. ## Detailed Changes + Add integration test about recovery. + Add above test to ci. ## Test Plan None. --- .github/workflows/ci.yml | 42 ++++++++++ integration_tests/Makefile | 3 + .../config/shard-based-recovery.toml | 21 +++++ integration_tests/recovery/check.py | 84 +++++++++++++++++++ integration_tests/recovery/run.sh | 35 ++++++++ 5 files changed, 185 insertions(+) create mode 100644 integration_tests/config/shard-based-recovery.toml create mode 100644 integration_tests/recovery/check.py create mode 100755 integration_tests/recovery/run.sh diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 8a3d825fe1..e682ae5b55 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -232,3 +232,45 @@ jobs: name: sdk-test-${{ github.head_ref }}.${{ github.sha }} path: | /tmp/ceresdb-stdout.log + + recovery-test: + name: recovery-test + runs-on: ubuntu-latest + timeout-minutes: 60 + steps: + - uses: actions/checkout@v3 + with: + submodules: true + - name: Cache Rust Dependencies + uses: actions/cache@v3 + with: + path: | + ~/.cargo + ./target + key: debug-${{ runner.os }}-${{ hashFiles('rust-toolchain') }}-${{ hashFiles('Cargo.lock') }} + restore-keys: | + debug-${{ runner.os }}-${{ hashFiles('rust-toolchain') }}- + debug-${{ runner.os }}- + debug- + - run: | + rustup set auto-self-update disable + rustup toolchain install ${RUST_VERSION} --profile minimal + - name: Release Disk Quota + run: | + sudo rm -rf /usr/local/lib/android # release about 10 GB + sudo rm -rf /usr/share/dotnet # release about 20GB + - name: Setup Build Environment + run: | + sudo apt update + sudo apt install --yes protobuf-compiler + - name: Run recovery tests + working-directory: integration_tests + run: | + make run-recovery + - name: Upload Logs + if: always() + uses: actions/upload-artifact@v3 + with: + name: recovery-test-${{ github.head_ref }}.${{ github.sha }} + path: | + /tmp/ceresdb-stdout.log diff --git a/integration_tests/Makefile b/integration_tests/Makefile index 49ff4ba9c6..84bb3454e1 100644 --- a/integration_tests/Makefile +++ b/integration_tests/Makefile @@ -69,3 +69,6 @@ run-mysql: run-prom: cd prom && ./run-tests.sh + +run-recovery: clean build-ceresdb kill-old-process + cd recovery && ./run.sh && ./run.sh shard_based diff --git a/integration_tests/config/shard-based-recovery.toml b/integration_tests/config/shard-based-recovery.toml new file mode 100644 index 0000000000..3ad980df0b --- /dev/null +++ b/integration_tests/config/shard-based-recovery.toml @@ -0,0 +1,21 @@ +[server] +bind_addr = "0.0.0.0" +http_port = 5440 +grpc_port = 8831 + +[logger] +level = "info" + +[tracing] +dir = "/tmp/ceresdb" + +[analytic] +recover_mode = "ShardBased" + +[analytic.storage.object_store] +type = "Local" +data_dir = "/tmp/ceresdb" + +[analytic.wal] +type = "RocksDB" +data_dir = "/tmp/ceresdb" diff --git a/integration_tests/recovery/check.py b/integration_tests/recovery/check.py new file mode 100644 index 0000000000..73b7495b14 --- /dev/null +++ b/integration_tests/recovery/check.py @@ -0,0 +1,84 @@ +#!/usr/bin/env python +# coding: utf-8 + +import requests +import argparse + +api_root = 'http://localhost:5440' +headers = { + 'Content-Type': 'application/json' +} + +def get_test_tables(ts): + table = 'sql_test' + str(ts) + table2 = 'SQL_TEST' + str(ts) + return [table, table2] + +def get_args(): + parser = argparse.ArgumentParser(description='cmd args') + parser.add_argument('--timestamp', '-ts', type=int, help='timestamp') + parser.add_argument('--init_before_check', '-i', help='init_before_check', action="store_true") + args = vars(parser.parse_args()) + return args + + +def execute_sql(sql): + r = requests.post('{}/sql'.format(api_root), json={'query': sql}, headers=headers) + assert r.status_code == 200, r.text + return r.json() + +def prepare_data(ts, tables): + for t in tables: + execute_sql(""" +CREATE TABLE if not exists `{}` ( + `t` timestamp NOT NULL, + `tag1` string TAG, + `tag2` string TAG, + `value` double NOT NULL, + `VALUE2` double NOT NULL, + timestamp KEY (t) +); + """.format(t)) + + execute_sql(""" +insert into {}(t, tag1, tag2, value, VALUE2) +values +({}, "v1", "v2", 1, 2), +({}, "v1", "v2", 11, 22) + ; + """.format(tables[0], ts-5000, ts)) + + execute_sql(""" +insert into {}(t, tag1, tag2, value, VALUE2) +values +({}, "v1", "v2", 10, 20), +({}, "v1", "v2", 110, 220) + ; + """.format(tables[1], ts-5000, ts)) + +def query_and_check(ts, tables): + expected = {'rows': [{'tsid': 7518337278486593135, 't': ts - 5000, 'tag1': 'v1', 'tag2': 'v2', 'value': 1.0, 'VALUE2': 2.0},\ + {'tsid': 7518337278486593135, 't': ts, 'tag1': 'v1', 'tag2': 'v2', 'value': 11.0, 'VALUE2': 22.0}]} + expected2 = {'rows': [{'tsid': 7518337278486593135, 't': ts - 5000, 'tag1': 'v1', 'tag2': 'v2', 'value': 10.0, 'VALUE2': 20.0},\ + {'tsid': 7518337278486593135, 't': ts, 'tag1': 'v1', 'tag2': 'v2', 'value': 110.0, 'VALUE2': 220.0}]} + expecteds = [expected, expected2] + + for idx, t in enumerate(tables): + r = execute_sql("select * from {}".format(t)) + assert r == expecteds[idx] + + print('Restart test pass...') + +def main(): + args = get_args() + init_before_check = args['init_before_check'] + ts = args['timestamp'] + test_tables = get_test_tables(args['timestamp']) + + if init_before_check: + print("Init before check") + prepare_data(ts, test_tables) + query_and_check(ts, test_tables) + +if __name__ == '__main__': + main() diff --git a/integration_tests/recovery/run.sh b/integration_tests/recovery/run.sh new file mode 100755 index 0000000000..83295244cd --- /dev/null +++ b/integration_tests/recovery/run.sh @@ -0,0 +1,35 @@ +#!/usr/bin/env bash + +set -e + +ROOT=`pwd` +# For compatibility in macos, so convert to milliseconds by adding 3 zeros. +NOW=`date +%s000` +BINARY_PATH=${ROOT}/../../target/debug/ceresdb-server +SERVER_HTTP_ENDPOINT=127.0.0.1:5440 + +CONFIG_FILE=${ROOT}/../../docs/minimal.toml +if [ ${1} == 'shard_based' ]; then + CONFIG_FILE=${ROOT}/../config/shard-based-recovery.toml +fi + +echo "Run with config: ${CONFIG_FILE}" +echo "First check..." +nohup ${BINARY_PATH} --config ${CONFIG_FILE} & +sleep 10 +python3 ./check.py -ts ${NOW} -i + +echo "Restart and check..." +killall ceresdb-server | true +nohup ${BINARY_PATH} --config ${CONFIG_FILE} & +sleep 10 +python3 ./check.py -ts ${NOW} + +echo "Flush, restart and check..." +curl -XPOST ${SERVER_HTTP_ENDPOINT}/debug/flush_memtable +echo "\nFlush finish..." +killall ceresdb-server | true +nohup ${BINARY_PATH} --config ${CONFIG_FILE} & +sleep 10 +python3 ./check.py -ts ${NOW} +echo "All finish..."