Skip to content

Commit

Permalink
chore: add integration test about recovery (#996)
Browse files Browse the repository at this point in the history
## Rationale
Part of #799 
Now we run the test about recovery manually that is so tired, this pr
add this into integration tests which will be run automatically in ci.

## Detailed Changes
+ Add  integration test about recovery.
+ Add above test to ci.

## Test Plan
None.
  • Loading branch information
Rachelint committed Jun 16, 2023
1 parent 2c1025a commit 6cf3731
Show file tree
Hide file tree
Showing 5 changed files with 185 additions and 0 deletions.
42 changes: 42 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -232,3 +232,45 @@ jobs:
name: sdk-test-${{ github.head_ref }}.${{ github.sha }}
path: |
/tmp/ceresdb-stdout.log
recovery-test:
name: recovery-test
runs-on: ubuntu-latest
timeout-minutes: 60
steps:
- uses: actions/checkout@v3
with:
submodules: true
- name: Cache Rust Dependencies
uses: actions/cache@v3
with:
path: |
~/.cargo
./target
key: debug-${{ runner.os }}-${{ hashFiles('rust-toolchain') }}-${{ hashFiles('Cargo.lock') }}
restore-keys: |
debug-${{ runner.os }}-${{ hashFiles('rust-toolchain') }}-
debug-${{ runner.os }}-
debug-
- run: |
rustup set auto-self-update disable
rustup toolchain install ${RUST_VERSION} --profile minimal
- name: Release Disk Quota
run: |
sudo rm -rf /usr/local/lib/android # release about 10 GB
sudo rm -rf /usr/share/dotnet # release about 20GB
- name: Setup Build Environment
run: |
sudo apt update
sudo apt install --yes protobuf-compiler
- name: Run recovery tests
working-directory: integration_tests
run: |
make run-recovery
- name: Upload Logs
if: always()
uses: actions/upload-artifact@v3
with:
name: recovery-test-${{ github.head_ref }}.${{ github.sha }}
path: |
/tmp/ceresdb-stdout.log
3 changes: 3 additions & 0 deletions integration_tests/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -69,3 +69,6 @@ run-mysql:

run-prom:
cd prom && ./run-tests.sh

run-recovery: clean build-ceresdb kill-old-process
cd recovery && ./run.sh && ./run.sh shard_based
21 changes: 21 additions & 0 deletions integration_tests/config/shard-based-recovery.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
[server]
bind_addr = "0.0.0.0"
http_port = 5440
grpc_port = 8831

[logger]
level = "info"

[tracing]
dir = "/tmp/ceresdb"

[analytic]
recover_mode = "ShardBased"

[analytic.storage.object_store]
type = "Local"
data_dir = "/tmp/ceresdb"

[analytic.wal]
type = "RocksDB"
data_dir = "/tmp/ceresdb"
84 changes: 84 additions & 0 deletions integration_tests/recovery/check.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,84 @@
#!/usr/bin/env python
# coding: utf-8

import requests
import argparse

api_root = 'http://localhost:5440'
headers = {
'Content-Type': 'application/json'
}

def get_test_tables(ts):
table = 'sql_test' + str(ts)
table2 = 'SQL_TEST' + str(ts)
return [table, table2]

def get_args():
parser = argparse.ArgumentParser(description='cmd args')
parser.add_argument('--timestamp', '-ts', type=int, help='timestamp')
parser.add_argument('--init_before_check', '-i', help='init_before_check', action="store_true")
args = vars(parser.parse_args())
return args


def execute_sql(sql):
r = requests.post('{}/sql'.format(api_root), json={'query': sql}, headers=headers)
assert r.status_code == 200, r.text
return r.json()

def prepare_data(ts, tables):
for t in tables:
execute_sql("""
CREATE TABLE if not exists `{}` (
`t` timestamp NOT NULL,
`tag1` string TAG,
`tag2` string TAG,
`value` double NOT NULL,
`VALUE2` double NOT NULL,
timestamp KEY (t)
);
""".format(t))

execute_sql("""
insert into {}(t, tag1, tag2, value, VALUE2)
values
({}, "v1", "v2", 1, 2),
({}, "v1", "v2", 11, 22)
;
""".format(tables[0], ts-5000, ts))

execute_sql("""
insert into {}(t, tag1, tag2, value, VALUE2)
values
({}, "v1", "v2", 10, 20),
({}, "v1", "v2", 110, 220)
;
""".format(tables[1], ts-5000, ts))

def query_and_check(ts, tables):
expected = {'rows': [{'tsid': 7518337278486593135, 't': ts - 5000, 'tag1': 'v1', 'tag2': 'v2', 'value': 1.0, 'VALUE2': 2.0},\
{'tsid': 7518337278486593135, 't': ts, 'tag1': 'v1', 'tag2': 'v2', 'value': 11.0, 'VALUE2': 22.0}]}
expected2 = {'rows': [{'tsid': 7518337278486593135, 't': ts - 5000, 'tag1': 'v1', 'tag2': 'v2', 'value': 10.0, 'VALUE2': 20.0},\
{'tsid': 7518337278486593135, 't': ts, 'tag1': 'v1', 'tag2': 'v2', 'value': 110.0, 'VALUE2': 220.0}]}
expecteds = [expected, expected2]

for idx, t in enumerate(tables):
r = execute_sql("select * from {}".format(t))
assert r == expecteds[idx]

print('Restart test pass...')

def main():
args = get_args()
init_before_check = args['init_before_check']
ts = args['timestamp']
test_tables = get_test_tables(args['timestamp'])

if init_before_check:
print("Init before check")
prepare_data(ts, test_tables)
query_and_check(ts, test_tables)

if __name__ == '__main__':
main()
35 changes: 35 additions & 0 deletions integration_tests/recovery/run.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
#!/usr/bin/env bash

set -e

ROOT=`pwd`
# For compatibility in macos, so convert to milliseconds by adding 3 zeros.
NOW=`date +%s000`
BINARY_PATH=${ROOT}/../../target/debug/ceresdb-server
SERVER_HTTP_ENDPOINT=127.0.0.1:5440

CONFIG_FILE=${ROOT}/../../docs/minimal.toml
if [ ${1} == 'shard_based' ]; then
CONFIG_FILE=${ROOT}/../config/shard-based-recovery.toml
fi

echo "Run with config: ${CONFIG_FILE}"
echo "First check..."
nohup ${BINARY_PATH} --config ${CONFIG_FILE} &
sleep 10
python3 ./check.py -ts ${NOW} -i

echo "Restart and check..."
killall ceresdb-server | true
nohup ${BINARY_PATH} --config ${CONFIG_FILE} &
sleep 10
python3 ./check.py -ts ${NOW}

echo "Flush, restart and check..."
curl -XPOST ${SERVER_HTTP_ENDPOINT}/debug/flush_memtable
echo "\nFlush finish..."
killall ceresdb-server | true
nohup ${BINARY_PATH} --config ${CONFIG_FILE} &
sleep 10
python3 ./check.py -ts ${NOW}
echo "All finish..."

0 comments on commit 6cf3731

Please sign in to comment.