docs(autonomi): fix timeout doc #10505
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: Check before merge | |
on: | |
# tests must run for a PR to be valid and pass merge queue muster | |
# on main, we want to know that all commits are passing at a glance, any deviation should help bisecting errors | |
# the merge run checks should show on master and enable this clear test/passing history | |
merge_group: | |
branches: [main, alpha*, beta*, rc*] | |
pull_request: | |
branches: ["*"] | |
env: | |
CARGO_INCREMENTAL: 0 # bookkeeping for incremental builds has overhead, not useful in CI. | |
WINSW_URL: https://github.com/winsw/winsw/releases/download/v3.0.0-alpha.11/WinSW-x64.exe | |
GENESIS_PK: 9377ab39708a59d02d09bfd3c9bc7548faab9e0c2a2700b9ac7d5c14f0842f0b4bb0df411b6abd3f1a92b9aa1ebf5c3d | |
GENESIS_SK: 5ec88891c1098a0fede5b98b07f8abc931d7247b7aa310d21ab430cc957f9f02 | |
jobs: | |
build: | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v4 | |
- uses: actions/setup-python@v5 | |
with: | |
python-version: '3.x' | |
- name: Display Python version | |
run: python --version | |
cargo-udeps: | |
if: "!startsWith(github.event.head_commit.message, 'chore(release):')" | |
name: Unused dependency check | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v4 | |
- name: Install Rust | |
uses: dtolnay/rust-toolchain@nightly # Needs nightly to distinguish between deps of different versions | |
with: | |
# we need rustfmt here while we have a build step | |
components: rustfmt | |
- name: Install cargo-udeps | |
run: cargo install cargo-udeps --locked | |
- name: Run cargo-udeps | |
run: cargo +nightly udeps --all-targets | |
# ignore the error cause by the latest nightly changes. | |
# should be fixed by https://github.com/dalek-cryptography/curve25519-dalek/pull/619 | |
continue-on-error: true | |
lint: | |
runs-on: ubuntu-latest | |
env: | |
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} | |
steps: | |
- uses: actions/checkout@v4 | |
with: | |
fetch-depth: 0 | |
- uses: wagoid/commitlint-github-action@7f0a61df502599e1f1f50880aaa7ec1e2c0592f2 | |
checks: | |
if: "!startsWith(github.event.head_commit.message, 'chore(release):')" | |
name: various checks | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v4 | |
- name: Install Rust | |
uses: dtolnay/rust-toolchain@stable | |
with: | |
components: rustfmt, clippy | |
- uses: Swatinem/rust-cache@v2 | |
- name: Check formatting | |
run: cargo fmt --all -- --check | |
- shell: bash | |
run: cargo clippy --all-targets --all-features -- -Dwarnings | |
- name: Check documentation | |
# Deny certain `rustdoc` lints that are unwanted with `RUSTDOCFLAGS`. See | |
# https://doc.rust-lang.org/rustdoc/lints.html for lints that are 'warning' by default. | |
# | |
# We exclude autonomi-cli because it is not published and conflicts with the `autonomi` crate name, | |
# resulting in an error when building docs. | |
run: RUSTDOCFLAGS="--deny=warnings" cargo doc --no-deps --workspace --exclude=autonomi-cli | |
- name: Check local is not a default feature | |
shell: bash | |
run: if [[ ! $(cargo metadata --no-deps --format-version 1 | jq -r '.packages[].features.default[]? | select(. == "local")') ]]; then echo "local is not a default feature in any package."; else echo "local is a default feature in at least one package." && exit 1; fi | |
- name: Clean out the target directory | |
run: cargo clean | |
# In a cargo workspace, feature unification can occur, allowing a crate to be built successfully even if it | |
# doesn't explicitly specify a feature it uses, provided another crate in the workspace enables that feature. | |
# To detect such cases, we must build each crate using `--package` flag, building all packages at once does not work. | |
- name: Check the whole workspace can build | |
shell: bash | |
run: | | |
for package in $(cargo metadata --no-deps --format-version=1 | jq -r '.packages[].name'); do | |
cargo build -p "$package" --all-targets --all-features | |
done | |
echo "All packages built successfully. Cleaning up..." | |
cargo clean | |
unit: | |
if: "!startsWith(github.event.head_commit.message, 'chore(release):')" | |
name: Unit Tests | |
runs-on: ${{ matrix.os }} | |
strategy: | |
matrix: | |
os: [ubuntu-latest, windows-latest, macos-latest] | |
steps: | |
- uses: actions/checkout@v4 | |
- name: Check we're on the right commit | |
run: git log -1 --oneline | |
- name: Install Rust | |
uses: dtolnay/rust-toolchain@stable | |
- uses: Swatinem/rust-cache@v2 | |
- name: Run autonomi tests | |
timeout-minutes: 25 | |
run: cargo test --release --package autonomi --lib --features="full,fs" | |
- name: Run node tests | |
timeout-minutes: 25 | |
run: cargo test --release --package sn_node --lib | |
# The `can_store_after_restart` can be executed with other package tests together and passing | |
# on local machine. However keeps failing (when executed together) on CI machines. | |
# This is most likely due to the setup and cocurrency issues of the tests. | |
# As the `record_store` is used in a single thread style, get the test passing executed | |
# and passing standalone is enough. | |
- name: Run network tests (with encrypt-records) | |
timeout-minutes: 25 | |
run: cargo test --release --package sn_networking --features="open-metrics, encrypt-records" -- --skip can_store_after_restart | |
- name: Run network tests (with encrypt-records) | |
timeout-minutes: 5 | |
run: cargo test --release --package sn_networking --features="open-metrics, encrypt-records" can_store_after_restart | |
- name: Run network tests (without encrypt-records) | |
timeout-minutes: 25 | |
run: cargo test --release --package sn_networking --features="open-metrics" -- --skip can_store_after_restart | |
- name: Run network tests (without encrypt-records) | |
timeout-minutes: 5 | |
run: cargo test --release --package sn_networking --features="open-metrics" can_store_after_restart | |
- name: Run protocol tests | |
timeout-minutes: 25 | |
run: cargo test --release --package sn_protocol | |
- name: Run transfers tests | |
timeout-minutes: 25 | |
run: cargo test --release --package sn_transfers | |
- name: Run logging tests | |
timeout-minutes: 25 | |
run: cargo test --release --package sn_logging | |
- name: Run register tests | |
timeout-minutes: 25 | |
run: cargo test --release --package sn_registers | |
env: | |
# this will speed up PR merge flows, while giving us a modicum | |
# of proptesting | |
# we do many more runs on the nightly run | |
PROPTEST_CASES: 50 | |
e2e: | |
if: "!startsWith(github.event.head_commit.message, 'chore(release):')" | |
name: E2E tests | |
runs-on: ${{ matrix.os }} | |
strategy: | |
matrix: | |
include: | |
- os: ubuntu-latest | |
safe_path: /home/runner/.local/share/safe | |
- os: windows-latest | |
safe_path: C:\\Users\\runneradmin\\AppData\\Roaming\\safe | |
- os: macos-latest | |
safe_path: /Users/runner/Library/Application\ Support/safe | |
steps: | |
- uses: actions/checkout@v4 | |
- name: Install Rust | |
uses: dtolnay/rust-toolchain@stable | |
- uses: Swatinem/rust-cache@v2 | |
- name: Build binaries | |
run: cargo build --release --features local --bin safenode --bin autonomi | |
timeout-minutes: 30 | |
- name: Start a local network | |
uses: maidsafe/sn-local-testnet-action@main | |
with: | |
action: start | |
enable-evm-testnet: true | |
node-path: target/release/safenode | |
platform: ${{ matrix.os }} | |
build: true | |
- name: Check if SAFE_PEERS and EVM_NETWORK are set | |
shell: bash | |
run: | | |
if [[ -z "$SAFE_PEERS" ]]; then | |
echo "The SAFE_PEERS variable has not been set" | |
exit 1 | |
elif [[ -z "$EVM_NETWORK" ]]; then | |
echo "The EVM_NETWORK variable has not been set" | |
exit 1 | |
else | |
echo "SAFE_PEERS has been set to $SAFE_PEERS" | |
echo "EVM_NETWORK has been set to $EVM_NETWORK" | |
fi | |
# only these unit tests require a network, the rest are run above in unit test section | |
- name: Run autonomi --tests | |
run: cargo test --package autonomi --tests -- --nocapture | |
env: | |
SN_LOG: "v" | |
# only set the target dir for windows to bypass the linker issue. | |
# happens if we build the node manager via testnet action | |
CARGO_TARGET_DIR: ${{ matrix.os == 'windows-latest' && './test-target' || '.' }} | |
timeout-minutes: 15 | |
# FIXME: do this in a generic way for localtestnets | |
- name: export default secret key | |
if: matrix.os != 'windows-latest' | |
run: echo "SECRET_KEY=0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80" >> $GITHUB_ENV | |
shell: bash | |
- name: Set secret key for Windows | |
if: matrix.os == 'windows-latest' | |
run: echo "SECRET_KEY=0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append | |
shell: pwsh | |
- name: Get file cost | |
run: ./target/release/autonomi --log-output-dest=data-dir file cost "./resources" | |
env: | |
SN_LOG: "v" | |
timeout-minutes: 15 | |
- name: File upload | |
run: ./target/release/autonomi --log-output-dest=data-dir file upload "./resources" > ./upload_output 2>&1 | |
env: | |
SN_LOG: "v" | |
timeout-minutes: 15 | |
- name: parse address (unix) | |
if: matrix.os != 'windows-latest' | |
run: | | |
UPLOAD_ADDRESS=$(rg "At address: ([0-9a-f]*)" -o -r '$1' ./upload_output) | |
echo "UPLOAD_ADDRESS=$UPLOAD_ADDRESS" >> $GITHUB_ENV | |
shell: bash | |
- name: parse address (win) | |
if: matrix.os == 'windows-latest' | |
run: | | |
$UPLOAD_ADDRESS = rg "At address: ([0-9a-f]*)" -o -r '$1' ./upload_output | |
echo "UPLOAD_ADDRESS=$UPLOAD_ADDRESS" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append | |
shell: pwsh | |
- name: File Download | |
run: ./target/release/autonomi --log-output-dest=data-dir file download ${{ env.UPLOAD_ADDRESS }} ./downloaded_resources | |
env: | |
SN_LOG: "v" | |
timeout-minutes: 5 | |
- name: Generate register signing key | |
run: ./target/release/autonomi --log-output-dest=data-dir register generate-key | |
- name: Create register (writeable by owner) | |
run: ./target/release/autonomi --log-output-dest=data-dir register create baobao 123 > ./register_create_output 2>&1 | |
env: | |
SN_LOG: "v" | |
timeout-minutes: 10 | |
- name: parse register address (unix) | |
if: matrix.os != 'windows-latest' | |
run: | | |
REGISTER_ADDRESS=$(rg "Register created at address: ([0-9a-f]*)" -o -r '$1' ./register_create_output) | |
echo "REGISTER_ADDRESS=$REGISTER_ADDRESS" >> $GITHUB_ENV | |
shell: bash | |
- name: parse register address (win) | |
if: matrix.os == 'windows-latest' | |
run: | | |
$REGISTER_ADDRESS = rg "Register created at address: ([0-9a-f]*)" -o -r '$1' ./register_create_output | |
echo "REGISTER_ADDRESS=$REGISTER_ADDRESS" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append | |
shell: pwsh | |
- name: Get register | |
run: ./target/release/autonomi --log-output-dest=data-dir register get ${{ env.REGISTER_ADDRESS }} | |
env: | |
SN_LOG: "v" | |
timeout-minutes: 5 | |
- name: Edit register | |
run: ./target/release/autonomi --log-output-dest=data-dir register edit ${{ env.REGISTER_ADDRESS }} 456 | |
env: | |
SN_LOG: "v" | |
timeout-minutes: 10 | |
- name: Get register (after edit) | |
run: ./target/release/autonomi --log-output-dest=data-dir register get ${{ env.REGISTER_ADDRESS }} | |
env: | |
SN_LOG: "v" | |
timeout-minutes: 5 | |
- name: Create Public Register (writeable by anyone) | |
run: ./target/release/autonomi --log-output-dest=data-dir register create bao 111 --public > ./register_public_create_output 2>&1 | |
env: | |
SN_LOG: "v" | |
timeout-minutes: 5 | |
- name: parse public register address (unix) | |
if: matrix.os != 'windows-latest' | |
run: | | |
PUBLIC_REGISTER_ADDRESS=$(rg "Register created at address: ([0-9a-f]*)" -o -r '$1' ./register_public_create_output) | |
echo "PUBLIC_REGISTER_ADDRESS=$PUBLIC_REGISTER_ADDRESS" >> $GITHUB_ENV | |
shell: bash | |
- name: parse public register address (win) | |
if: matrix.os == 'windows-latest' | |
run: | | |
$PUBLIC_REGISTER_ADDRESS = rg "Register created at address: ([0-9a-f]*)" -o -r '$1' ./register_public_create_output | |
echo "PUBLIC_REGISTER_ADDRESS=$PUBLIC_REGISTER_ADDRESS" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append | |
shell: pwsh | |
- name: Get Public Register (current key is the owner) | |
run: ./target/release/autonomi --log-output-dest=data-dir register get ${{ env.PUBLIC_REGISTER_ADDRESS }} | |
env: | |
SN_LOG: "v" | |
timeout-minutes: 5 | |
- name: Edit Public Register (current key is the owner) | |
run: ./target/release/autonomi --log-output-dest=data-dir register edit ${{ env.PUBLIC_REGISTER_ADDRESS }} 222 | |
env: | |
SN_LOG: "v" | |
timeout-minutes: 10 | |
- name: Delete current register signing key | |
shell: bash | |
run: rm -rf ${{ matrix.safe_path }}/autonomi | |
- name: Generate new register signing key | |
run: ./target/release/autonomi --log-output-dest=data-dir register generate-key | |
- name: Get Public Register (new signing key is not the owner) | |
run: ./target/release/autonomi --log-output-dest=data-dir register get ${{ env.PUBLIC_REGISTER_ADDRESS }} | |
env: | |
SN_LOG: "v" | |
timeout-minutes: 2 | |
- name: Edit Public Register (new signing key is not the owner) | |
run: ./target/release/autonomi --log-output-dest=data-dir register edit ${{ env.PUBLIC_REGISTER_ADDRESS }} 333 | |
env: | |
SN_LOG: "v" | |
timeout-minutes: 10 | |
- name: Get Public Register (new signing key is not the owner) | |
run: ./target/release/autonomi --log-output-dest=data-dir register get ${{ env.PUBLIC_REGISTER_ADDRESS }} | |
env: | |
SN_LOG: "v" | |
timeout-minutes: 2 | |
- name: create local user file | |
run: echo random > random.txt | |
env: | |
SN_LOG: "v" | |
timeout-minutes: 2 | |
- name: file upload | |
run: ./target/release/autonomi --log-output-dest=data-dir file upload random.txt | |
env: | |
SN_LOG: "v" | |
timeout-minutes: 2 | |
- name: create a local register | |
run: ./target/release/autonomi --log-output-dest=data-dir register create sample_new_register 1234 | |
env: | |
SN_LOG: "v" | |
timeout-minutes: 2 | |
- name: Estimate cost to create a vault | |
run: ./target/release/autonomi --log-output-dest=data-dir vault cost | |
env: | |
SN_LOG: "v" | |
timeout-minutes: 2 | |
- name: create a vault with existing user data as above | |
run: ./target/release/autonomi --log-output-dest=data-dir vault create | |
env: | |
SN_LOG: "v" | |
timeout-minutes: 2 | |
- name: add more files - linux/macos | |
if: matrix.os != 'windows-latest' | |
run: | | |
set -e | |
for i in {1..100}; do | |
dd if=/dev/urandom of=random_file_$i.bin bs=1M count=1 status=none | |
./target/release/autonomi --log-output-dest=data-dir file upload random_file_$i.bin --public | |
./target/release/autonomi --log-output-dest=data-dir file upload random_file_$i.bin | |
./target/release/autonomi --log-output-dest=data-dir register create $i random_file_$i.bin | |
done | |
env: | |
SN_LOG: "v" | |
timeout-minutes: 25 | |
- name: add more files - windows | |
if: matrix.os == 'windows-latest' | |
shell: pwsh | |
run: | | |
$ErrorActionPreference = "Stop" | |
for ($i = 1; $i -le 100; $i++) { | |
$fileName = "random_file_$i.bin" | |
$byteArray = [byte[]]@(0xFF) * (1MB) # Create a 1 MB array filled with 0xFF | |
[System.IO.File]::WriteAllBytes($fileName, $byteArray) | |
# Run autonomi commands | |
./target/release/autonomi --log-output-dest=data-dir file upload "random_file_$i.bin" --public | |
./target/release/autonomi --log-output-dest=data-dir file upload "random_file_$i.bin" | |
./target/release/autonomi --log-output-dest=data-dir register create $i "random_file_$i.bin" | |
} | |
env: | |
SN_LOG: "v" | |
timeout-minutes: 25 | |
- name: sync the vault | |
run: ./target/release/autonomi --log-output-dest=data-dir vault sync | |
env: | |
SN_LOG: "v" | |
timeout-minutes: 2 | |
- name: load the vault from network | |
run: ./target/release/autonomi --log-output-dest=data-dir vault load | |
env: | |
SN_LOG: "v" | |
timeout-minutes: 2 | |
- name: vault sync validation | |
if: matrix.os != 'windows-latest' | |
shell: bash | |
run: | | |
set -e | |
NUM_OF_PUBLIC_FILES="" | |
NUM_OF_PRIVATE_FILES="" | |
NUM_OF_REGISTERS="" | |
NUM_OF_PUBLIC_FILES_IN_VAULT="" | |
NUM_OF_PRIVATE_FILES_IN_VAULT="" | |
NUM_OF_REGISTERS_IN_VAULT="" | |
./target/release/autonomi --log-output-dest=data-dir file list 2>&1 > file_list.txt | |
./target/release/autonomi register list | grep register > register_list.txt | |
NUM_OF_PUBLIC_FILES=`cat file_list.txt | grep "public" | grep -o '[0-9]\+'` | |
NUM_OF_PRIVATE_FILES=`cat file_list.txt | grep "private" | grep -o '[0-9]\+'` | |
NUM_OF_REGISTERS=`cat register_list.txt | grep "register" | grep -o '[0-9]\+'` | |
# when obtaining registers we get random garbage, this is the only hack that works. | |
NUM_OF_REGISTERS_first=${NUM_OF_REGISTERS%%[ $'\n']*} | |
echo "NUM_OF_REGISTERS is $NUM_OF_REGISTERS_first" | |
./target/release/autonomi --log-output-dest=data-dir vault load 2>&1 > vault_data.txt | |
NUM_OF_PUBLIC_FILES_IN_VAULT=`cat vault_data.txt | grep "public" | grep -o '[0-9]\+'` | |
NUM_OF_PRIVATE_FILES_IN_VAULT=`cat vault_data.txt| grep "private" | grep -o '[0-9]\+'` | |
NUM_OF_REGISTERS_IN_VAULT=`cat vault_data.txt | grep "register" | grep -o '[0-9]\+'` | |
echo "Total Num of local public files is $NUM_OF_PUBLIC_FILES and in vault is $NUM_OF_PUBLIC_FILES_IN_VAULT" | |
echo "Total Num of local private files is $NUM_OF_PRIVATE_FILES and in vault is $NUM_OF_PRIVATE_FILES_IN_VAULT" | |
echo "Total Num of local registers is $NUM_OF_REGISTERS_first and in vault is $NUM_OF_REGISTERS_IN_VAULT" | |
rm -rf file_list.txt register_list.txt vault_data.txt | |
python3 -c 'import sys; assert sys.argv[1] == sys.argv[2], f"Error: local data and vault in network dont match, Local public Files: {sys.argv[1]} and vault public files: {sys.argv[2]} are Not Equal"' $NUM_OF_PUBLIC_FILES $NUM_OF_PUBLIC_FILES_IN_VAULT | |
python3 -c 'import sys; assert sys.argv[1] == sys.argv[2], f"Error: local data and vault in network dont match, Local private Files: {sys.argv[1]} and vault private files: {sys.argv[2]} are Not Equal"' $NUM_OF_PRIVATE_FILES $NUM_OF_PRIVATE_FILES_IN_VAULT | |
python3 -c 'import sys; assert sys.argv[1] == sys.argv[2], f"Error: local data and vault in network dont match, Local registers: {sys.argv[1]} and vault registers: {sys.argv[2]} are Not Equal"' $NUM_OF_REGISTERS_first $NUM_OF_REGISTERS_IN_VAULT | |
echo "vault synced successfully!" | |
env: | |
SN_LOG: "v" | |
timeout-minutes: 15 | |
- name: Set up variables - vault sync - windows | |
if: matrix.os == 'windows-latest' | |
shell: pwsh | |
run: | | |
$ErrorActionPreference = "Stop" | |
./target/release/autonomi --log-output-dest=data-dir file list > file_list.txt 2>&1 | |
./target/release/autonomi register list > register_list.txt 2>&1 | |
./target/release/autonomi --log-output-dest=data-dir vault load > vault_data.txt 2>&1 | |
env: | |
SN_LOG: "v" | |
timeout-minutes: 15 | |
- name: Vault sync validation | |
if: matrix.os == 'windows-latest' | |
shell: python | |
run: | | |
import re | |
def find_number_before_word(file_name, search_word): | |
""" | |
Reads a file and finds the number immediately preceding a specified word in a line. | |
:param file_name: Name of the file to read. | |
:param search_word: Word to search for in the file. | |
:return: The number before the word as an integer, or None if not found. | |
""" | |
try: | |
with open(file_name, 'r') as file: | |
for line in file: | |
if search_word in line: | |
match = re.search(r'(\d+)\s+' + re.escape(search_word), line) | |
if match: | |
return int(match.group(1)) # Convert to integer | |
return None # Return None if no match is found | |
except FileNotFoundError: | |
print(f"Error: File '{file_name}' not found.") | |
return None | |
NUM_OF_PUBLIC_FILES = find_number_before_word("file_list.txt", "public") | |
print("NUM_OF_PUBLIC_FILES:", NUM_OF_PUBLIC_FILES) | |
NUM_OF_PRIVATE_FILES = find_number_before_word("file_list.txt", "private") | |
print("NUM_OF_PRIVATE_FILES:", NUM_OF_PRIVATE_FILES) | |
NUM_OF_REGISTERS_FILES = find_number_before_word("register_list.txt", "register") | |
print("NUM_OF_REGISTERS_FILES:", NUM_OF_REGISTERS_FILES) | |
NUM_OF_PUBLIC_FILES_IN_VAULT = find_number_before_word("vault_data.txt", "public") | |
print("NUM_OF_PUBLIC_FILES_IN_VAULT:", NUM_OF_PUBLIC_FILES_IN_VAULT) | |
NUM_OF_PRIVATE_FILES_IN_VAULT = find_number_before_word("vault_data.txt", "private") | |
print("NUM_OF_PRIVATE_FILES_IN_VAULT:", NUM_OF_PRIVATE_FILES_IN_VAULT) | |
NUM_OF_REGISTERS_IN_VAULT = find_number_before_word("vault_data.txt", "register") | |
print("NUM_OF_PRIVATE_FILES_IN_VAULT:", NUM_OF_PRIVATE_FILES_IN_VAULT) | |
# Assertions | |
assert NUM_OF_PUBLIC_FILES == NUM_OF_PUBLIC_FILES_IN_VAULT, f"Error: local data and vault in network dont match, Local public Files: {NUM_OF_PUBLIC_FILES} and vault public files: {NUM_OF_PUBLIC_FILES_IN_VAULT} are Not Equal" | |
assert NUM_OF_PRIVATE_FILES == NUM_OF_PRIVATE_FILES_IN_VAULT, f"Error: local data and vault in network dont match, Local private Files: {NUM_OF_PRIVATE_FILES} and vault private files: {NUM_OF_PRIVATE_FILES_IN_VAULT} are Not Equal" | |
assert NUM_OF_REGISTERS_FILES == NUM_OF_REGISTERS_IN_VAULT, f"Error: local data and vault in network dont match, Local registers: {NUM_OF_REGISTERS_FILES} and vault registers: {NUM_OF_REGISTERS_IN_VAULT} are Not Equal" | |
print("Vault synced successfully!") | |
env: | |
SN_LOG: "v" | |
timeout-minutes: 2 | |
- name: load an existing vault from the network | |
run: ./target/release/autonomi --log-output-dest=data-dir vault load | |
env: | |
SN_LOG: "v" | |
timeout-minutes: 2 | |
- name: Time profiling for Different files | |
if: matrix.os != 'windows-latest' | |
run: | | |
set -e | |
# 1 MB | |
python3 -c "with open('random_1MB.bin', 'wb') as f: f.write(bytearray([0xff] * 1 * 1024 * 1024))" | |
# 10 MB | |
python3 -c "with open('random_10MB.bin', 'wb') as f: f.write(bytearray([0xff] * 10 * 1024 * 1024))" | |
# 100 MB | |
python3 -c "with open('random_100MB.bin', 'wb') as f: f.write(bytearray([0xff] * 100 * 1024 * 1024))" | |
# 1 GB | |
python3 -c "with open('random_1GB.bin', 'wb') as f: f.write(bytearray([0xff] * 1000 * 1024 * 1024))" | |
./target/release/autonomi --log-output-dest=data-dir file list | |
time ./target/release/autonomi --log-output-dest=data-dir file upload random_1MB.bin | |
time ./target/release/autonomi --log-output-dest=data-dir file upload random_10MB.bin | |
time ./target/release/autonomi --log-output-dest=data-dir file upload random_100MB.bin | |
time ./target/release/autonomi --log-output-dest=data-dir file upload random_1GB.bin | |
./target/release/autonomi --log-output-dest=data-dir vault sync | |
rm -rf random*.bin | |
rm -rf ${{ matrix.safe_path }}/autonomi | |
env: | |
SN_LOG: "v" | |
timeout-minutes: 15 | |
- name: Stop the local network and upload logs | |
if: always() | |
uses: maidsafe/sn-local-testnet-action@main | |
with: | |
action: stop | |
log_file_prefix: safe_test_logs_e2e | |
platform: ${{ matrix.os }} | |
# spend_test: | |
# if: "!startsWith(github.event.head_commit.message, 'chore(release):')" | |
# name: spend tests against network | |
# runs-on: ${{ matrix.os }} | |
# strategy: | |
# matrix: | |
# os: [ubuntu-latest, windows-latest, macos-latest] | |
# steps: | |
# - uses: actions/checkout@v4 | |
# - name: Install Rust | |
# uses: dtolnay/rust-toolchain@stable | |
# - uses: Swatinem/rust-cache@v2 | |
# - name: Build binaries | |
# run: cargo build --release --features=local --bin safenode | |
# timeout-minutes: 30 | |
# - name: Build faucet binary | |
# run: cargo build --release --bin faucet --features="local,gifting" | |
# timeout-minutes: 30 | |
# - name: Build testing executable | |
# run: cargo test --release -p sn_node --features=local --test sequential_transfers --test storage_payments --test double_spend --no-run | |
# env: | |
# # only set the target dir for windows to bypass the linker issue. | |
# # happens if we build the node manager via testnet action | |
# CARGO_TARGET_DIR: ${{ matrix.os == 'windows-latest' && './test-target' || '.' }} | |
# timeout-minutes: 30 | |
# - name: Start a local network | |
# uses: maidsafe/sn-local-testnet-action@main | |
# with: | |
# action: start | |
# interval: 2000 | |
# node-path: target/release/safenode | |
# faucet-path: target/release/faucet | |
# platform: ${{ matrix.os }} | |
# build: true | |
# - name: Check SAFE_PEERS was set | |
# shell: bash | |
# run: | | |
# if [[ -z "$SAFE_PEERS" ]]; then | |
# echo "The SAFE_PEERS variable has not been set" | |
# exit 1 | |
# else | |
# echo "SAFE_PEERS has been set to $SAFE_PEERS" | |
# fi | |
# - name: execute the sequential transfers tests | |
# run: cargo test --release -p sn_node --features="local" --test sequential_transfers -- --nocapture --test-threads=1 | |
# env: | |
# SN_LOG: "all" | |
# CARGO_TARGET_DIR: ${{ matrix.os == 'windows-latest' && './test-target' || '.' }} | |
# timeout-minutes: 25 | |
# - name: execute the storage payment tests | |
# run: cargo test --release -p sn_node --features="local" --test storage_payments -- --nocapture --test-threads=1 | |
# env: | |
# SN_LOG: "all" | |
# CARGO_TARGET_DIR: ${{ matrix.os == 'windows-latest' && './test-target' || '.' }} | |
# timeout-minutes: 25 | |
# - name: execute the double spend tests | |
# run: cargo test --release -p sn_node --features="local" --test double_spend -- --nocapture --test-threads=1 | |
# env: | |
# CARGO_TARGET_DIR: ${{ matrix.os == 'windows-latest' && './test-target' || '.' }} | |
# timeout-minutes: 25 | |
# - name: Stop the local network and upload logs | |
# if: always() | |
# uses: maidsafe/sn-local-testnet-action@main | |
# with: | |
# action: stop | |
# log_file_prefix: safe_test_logs_spend | |
# platform: ${{ matrix.os }} | |
# # runs with increased node count | |
# spend_simulation: | |
# if: "!startsWith(github.event.head_commit.message, 'chore(release):')" | |
# name: spend simulation | |
# runs-on: ${{ matrix.os }} | |
# strategy: | |
# matrix: | |
# os: [ ubuntu-latest, windows-latest, macos-latest ] | |
# steps: | |
# - uses: actions/checkout@v4 | |
# - name: Install Rust | |
# uses: dtolnay/rust-toolchain@stable | |
# - uses: Swatinem/rust-cache@v2 | |
# - name: Build binaries | |
# run: cargo build --release --features=local --bin safenode | |
# timeout-minutes: 30 | |
# - name: Build faucet binary | |
# run: cargo build --release --bin faucet --features="local,gifting" | |
# timeout-minutes: 30 | |
# - name: Build testing executable | |
# run: cargo test --release -p sn_node --features=local --test spend_simulation --no-run | |
# env: | |
# # only set the target dir for windows to bypass the linker issue. | |
# # happens if we build the node manager via testnet action | |
# CARGO_TARGET_DIR: ${{ matrix.os == 'windows-latest' && './test-target' || '.' }} | |
# timeout-minutes: 30 | |
# - name: Start a local network | |
# uses: maidsafe/sn-local-testnet-action@main | |
# with: | |
# action: start | |
# interval: 2000 | |
# node-count: 50 | |
# node-path: target/release/safenode | |
# faucet-path: target/release/faucet | |
# platform: ${{ matrix.os }} | |
# build: true | |
# - name: Check SAFE_PEERS was set | |
# shell: bash | |
# run: | | |
# if [[ -z "$SAFE_PEERS" ]]; then | |
# echo "The SAFE_PEERS variable has not been set" | |
# exit 1 | |
# else | |
# echo "SAFE_PEERS has been set to $SAFE_PEERS" | |
# fi | |
# - name: execute the spend simulation | |
# run: cargo test --release -p sn_node --features="local" --test spend_simulation -- --nocapture | |
# env: | |
# CARGO_TARGET_DIR: ${{ matrix.os == 'windows-latest' && './test-target' || '.' }} | |
# timeout-minutes: 25 | |
# - name: Stop the local network and upload logs | |
# if: always() | |
# uses: maidsafe/sn-local-testnet-action@main | |
# with: | |
# action: stop | |
# log_file_prefix: safe_test_logs_spend_simulation | |
# platform: ${{ matrix.os }} | |
# token_distribution_test: | |
# if: "!startsWith(github.event.head_commit.message, 'chore(release):')" | |
# name: token distribution test | |
# runs-on: ${{ matrix.os }} | |
# strategy: | |
# matrix: | |
# os: [ubuntu-latest, windows-latest, macos-latest] | |
# steps: | |
# - uses: actions/checkout@v4 | |
# - name: Install Rust | |
# uses: dtolnay/rust-toolchain@stable | |
# - uses: Swatinem/rust-cache@v2 | |
# - name: Build binaries | |
# run: cargo build --release --features=local,distribution --bin safenode | |
# timeout-minutes: 35 | |
# - name: Build faucet binary | |
# run: cargo build --release --features=local,distribution,gifting --bin faucet | |
# timeout-minutes: 35 | |
# - name: Build testing executable | |
# run: cargo test --release --features=local,distribution --no-run | |
# env: | |
# # only set the target dir for windows to bypass the linker issue. | |
# # happens if we build the node manager via testnet action | |
# CARGO_TARGET_DIR: ${{ matrix.os == 'windows-latest' && './test-target' || '.' }} | |
# timeout-minutes: 35 | |
# - name: Start a local network | |
# uses: maidsafe/sn-local-testnet-action@main | |
# with: | |
# action: start | |
# interval: 2000 | |
# node-path: target/release/safenode | |
# faucet-path: target/release/faucet | |
# platform: ${{ matrix.os }} | |
# build: true | |
# - name: Check SAFE_PEERS was set | |
# shell: bash | |
# run: | | |
# if [[ -z "$SAFE_PEERS" ]]; then | |
# echo "The SAFE_PEERS variable has not been set" | |
# exit 1 | |
# else | |
# echo "SAFE_PEERS has been set to $SAFE_PEERS" | |
# fi | |
# - name: execute token_distribution tests | |
# run: cargo test --release --features=local,distribution token_distribution -- --nocapture --test-threads=1 | |
# env: | |
# SN_LOG: "all" | |
# CARGO_TARGET_DIR: ${{ matrix.os == 'windows-latest' && './test-target' || '.' }} | |
# timeout-minutes: 25 | |
# - name: Stop the local network and upload logs | |
# if: always() | |
# uses: maidsafe/sn-local-testnet-action@main | |
# with: | |
# action: stop | |
# log_file_prefix: safe_test_logs_token_distribution | |
# platform: ${{ matrix.os }} | |
churn: | |
if: "!startsWith(github.event.head_commit.message, 'chore(release):')" | |
name: Network churning tests | |
runs-on: ${{ matrix.os }} | |
strategy: | |
matrix: | |
include: | |
- os: ubuntu-latest | |
node_data_path: /home/runner/.local/share/safe/node | |
safe_path: /home/runner/.local/share/safe | |
- os: windows-latest | |
node_data_path: C:\\Users\\runneradmin\\AppData\\Roaming\\safe\\node | |
safe_path: C:\\Users\\runneradmin\\AppData\\Roaming\\safe | |
- os: macos-latest | |
node_data_path: /Users/runner/Library/Application Support/safe/node | |
safe_path: /Users/runner/Library/Application Support/safe | |
steps: | |
- uses: actions/checkout@v4 | |
- uses: dtolnay/rust-toolchain@stable | |
- uses: Swatinem/rust-cache@v2 | |
- name: Build binaries | |
run: cargo build --release --features local --bin safenode | |
timeout-minutes: 30 | |
- name: Build churn tests | |
run: cargo test --release -p sn_node --features=local --test data_with_churn --no-run | |
env: | |
# only set the target dir for windows to bypass the linker issue. | |
# happens if we build the node manager via testnet action | |
CARGO_TARGET_DIR: ${{ matrix.os == 'windows-latest' && './test-target' || '.' }} | |
timeout-minutes: 30 | |
- name: Start a local network | |
uses: maidsafe/sn-local-testnet-action@main | |
with: | |
action: start | |
enable-evm-testnet: true | |
node-path: target/release/safenode | |
platform: ${{ matrix.os }} | |
build: true | |
- name: Check if SAFE_PEERS and EVM_NETWORK are set | |
shell: bash | |
run: | | |
if [[ -z "$SAFE_PEERS" ]]; then | |
echo "The SAFE_PEERS variable has not been set" | |
exit 1 | |
elif [[ -z "$EVM_NETWORK" ]]; then | |
echo "The EVM_NETWORK variable has not been set" | |
exit 1 | |
else | |
echo "SAFE_PEERS has been set to $SAFE_PEERS" | |
echo "EVM_NETWORK has been set to $EVM_NETWORK" | |
fi | |
- name: Chunks data integrity during nodes churn | |
run: cargo test --release -p sn_node --features=local --test data_with_churn -- --nocapture | |
env: | |
TEST_DURATION_MINS: 5 | |
TEST_TOTAL_CHURN_CYCLES: 15 | |
SN_LOG: "all" | |
CARGO_TARGET_DIR: ${{ matrix.os == 'windows-latest' && './test-target' || '.' }} | |
timeout-minutes: 30 | |
# Sleep for a while to allow restarted nodes can be detected by others | |
- name: Sleep a while | |
run: sleep 300 | |
- name: Stop the local network and upload logs | |
if: always() | |
uses: maidsafe/sn-local-testnet-action@main | |
with: | |
action: stop | |
log_file_prefix: safe_test_logs_churn | |
platform: ${{ matrix.os }} | |
- name: Get total node count | |
shell: bash | |
timeout-minutes: 1 | |
run: | | |
node_count=$(ls "${{ matrix.node_data_path }}" | wc -l) | |
echo "Node dir count is $node_count" | |
- name: Get restart of nodes using rg | |
shell: bash | |
timeout-minutes: 1 | |
# get the counts, then the specific line, and then the digit count only | |
# then check we have an expected level of restarts | |
# TODO: make this use an env var, or relate to testnet size | |
run: | | |
restart_count=$(rg "Node is restarting in" "${{ matrix.node_data_path }}" -c --stats | \ | |
rg "(\d+) matches" | rg "\d+" -o) | |
echo "Restarted $restart_count nodes" | |
# `PeerRemovedFromRoutingTable` now only happens when a peer reported as `BadNode`. | |
# Otherwise kad will remove a `dropped out node` directly from RT. | |
# So, the detection of the removal explicity will now have much less chance, | |
# due to the removal of connection_issue tracking. | |
- name: Get peers removed from nodes using rg | |
shell: bash | |
timeout-minutes: 1 | |
run: | | |
peer_removed=$(rg "PeerRemovedFromRoutingTable" "${{ matrix.node_data_path }}" -c --stats | \ | |
rg "(\d+) matches" | rg "\d+" -o) || { echo "Failed to extract peer removal count"; exit 0; } | |
if [ -z "$peer_removed" ]; then | |
echo "No peer removal count found" | |
exit 1 | |
fi | |
echo "PeerRemovedFromRoutingTable $peer_removed times" | |
# TODO: reenable this once the testnet dir creation is tidied up to avoid a large count here | |
# if [ $restart_count -lt $node_count ]; then | |
# echo "Restart count of: $restart_count is less than the node count of: $node_count" | |
# exit 1 | |
# fi | |
- name: Verify data replication using rg | |
shell: bash | |
timeout-minutes: 1 | |
# get the counts, then the specific line, and then the digit count only | |
# then check we have an expected level of replication | |
# TODO: make this use an env var, or relate to testnet size | |
run: | | |
fetching_attempt_count=$(rg "FetchingKeysForReplication" "${{ matrix.node_data_path }}" -c --stats | \ | |
rg "(\d+) matches" | rg "\d+" -o) | |
echo "Carried out $fetching_attempt_count fetching attempts" | |
node_count=$(ls "${{ matrix.node_data_path }}" | wc -l) | |
if [ $fetching_attempt_count -lt $node_count ]; then | |
echo "Replication fetching attempts of: $fetching_attempt_count is less than the node count of: $node_count" | |
exit 1 | |
fi | |
# Only error out after uploading the logs | |
- name: Don't log raw data | |
if: matrix.os != 'windows-latest' # causes error | |
shell: bash | |
timeout-minutes: 10 | |
run: | | |
if ! rg '^' "${{ matrix.safe_path }}"/*/*/logs | awk 'length($0) > 15000 { print; exit 1 }' | |
then | |
echo "We are logging an extremely large data" | |
exit 1 | |
fi | |
verify_data_location_routing_table: | |
if: "!startsWith(github.event.head_commit.message, 'chore(release):')" | |
name: Verify data location and Routing Table | |
runs-on: ${{ matrix.os }} | |
strategy: | |
matrix: | |
include: | |
- os: ubuntu-latest | |
node_data_path: /home/runner/.local/share/safe/node | |
safe_path: /home/runner/.local/share/safe | |
- os: windows-latest | |
node_data_path: C:\\Users\\runneradmin\\AppData\\Roaming\\safe\\node | |
safe_path: C:\\Users\\runneradmin\\AppData\\Roaming\\safe | |
- os: macos-latest | |
node_data_path: /Users/runner/Library/Application Support/safe/node | |
safe_path: /Users/runner/Library/Application Support/safe | |
steps: | |
- uses: actions/checkout@v4 | |
- uses: dtolnay/rust-toolchain@stable | |
- uses: Swatinem/rust-cache@v2 | |
- name: Build binaries | |
run: cargo build --release --features local --bin safenode | |
timeout-minutes: 30 | |
- name: Build data location and routing table tests | |
run: cargo test --release -p sn_node --features=local --test verify_data_location --test verify_routing_table --no-run | |
env: | |
# only set the target dir for windows to bypass the linker issue. | |
# happens if we build the node manager via testnet action | |
CARGO_TARGET_DIR: ${{ matrix.os == 'windows-latest' && './test-target' || '.' }} | |
timeout-minutes: 30 | |
- name: Start a local network | |
uses: maidsafe/sn-local-testnet-action@main | |
with: | |
action: start | |
enable-evm-testnet: true | |
node-path: target/release/safenode | |
platform: ${{ matrix.os }} | |
build: true | |
- name: Check if SAFE_PEERS and EVM_NETWORK are set | |
shell: bash | |
run: | | |
if [[ -z "$SAFE_PEERS" ]]; then | |
echo "The SAFE_PEERS variable has not been set" | |
exit 1 | |
elif [[ -z "$EVM_NETWORK" ]]; then | |
echo "The EVM_NETWORK variable has not been set" | |
exit 1 | |
else | |
echo "SAFE_PEERS has been set to $SAFE_PEERS" | |
echo "EVM_NETWORK has been set to $EVM_NETWORK" | |
fi | |
- name: Verify the routing tables of the nodes | |
run: cargo test --release -p sn_node --features="local" --test verify_routing_table -- --nocapture | |
env: | |
CARGO_TARGET_DIR: ${{ matrix.os == 'windows-latest' && './test-target' || '.' }} | |
timeout-minutes: 5 | |
- name: Verify the location of the data on the network | |
run: cargo test --release -p sn_node --features="local" --test verify_data_location -- --nocapture | |
env: | |
CHURN_COUNT: 6 | |
SN_LOG: "all" | |
CARGO_TARGET_DIR: ${{ matrix.os == 'windows-latest' && './test-target' || '.' }} | |
timeout-minutes: 25 | |
- name: Verify the routing tables of the nodes | |
run: cargo test --release -p sn_node --features="local" --test verify_routing_table -- --nocapture | |
env: | |
CARGO_TARGET_DIR: ${{ matrix.os == 'windows-latest' && './test-target' || '.' }} | |
timeout-minutes: 5 | |
# Sleep for a while to allow restarted nodes can be detected by others | |
- name: Sleep a while | |
run: sleep 300 | |
- name: Stop the local network and upload logs | |
if: always() | |
uses: maidsafe/sn-local-testnet-action@main | |
with: | |
action: stop | |
log_file_prefix: safe_test_logs_data_location | |
platform: ${{ matrix.os }} | |
- name: Verify restart of nodes using rg | |
shell: bash | |
timeout-minutes: 1 | |
# get the counts, then the specific line, and then the digit count only | |
# then check we have an expected level of restarts | |
# | |
# `PeerRemovedFromRoutingTable` now only happens when a peer reported as `BadNode`. | |
# Otherwise kad will remove a `dropped out node` directly from RT. | |
# So, the detection of the removal explicity will now have much less chance, | |
# due to the removal of connection_issue tracking. | |
# | |
# With the further reduction of replication frequency, | |
# it now becomes harder to detect a `dropped out node` as a `failed to replicate` node. | |
# Hence now remove the assertion check and replace with a print out only. | |
run: | | |
node_count=$(ls "${{ matrix.node_data_path }}" | wc -l) | |
echo "Node dir count is $node_count" | |
restart_count=$(rg "Node is restarting in" "${{ matrix.node_data_path }}" -c --stats | \ | |
rg "(\d+) matches" | rg "\d+" -o) | |
echo "Restart $restart_count nodes" | |
if ! rg "PeerRemovedFromRoutingTable" "${{ matrix.node_data_path }}" -c --stats | |
then | |
echo "No peer removal count found" | |
exit 0 | |
fi | |
peer_removed=$(rg "PeerRemovedFromRoutingTable" "${{ matrix.node_data_path }}" -c --stats | \ | |
rg "(\d+) matches" | rg "\d+" -o) | |
echo "PeerRemovedFromRoutingTable $peer_removed times" | |
# Only error out after uploading the logs | |
- name: Don't log raw data | |
if: matrix.os != 'windows-latest' # causes error | |
shell: bash | |
timeout-minutes: 10 | |
run: | | |
if ! rg '^' "${{ matrix.safe_path }}"/*/*/logs | awk 'length($0) > 15000 { print; exit 1 }' | |
then | |
echo "We are logging an extremely large data" | |
exit 1 | |
fi | |
# faucet_test: | |
# if: "!startsWith(github.event.head_commit.message, 'chore(release):')" | |
# name: Faucet test | |
# runs-on: ubuntu-latest | |
# steps: | |
# - uses: actions/checkout@v4 | |
# - name: Install Rust | |
# uses: dtolnay/rust-toolchain@stable | |
# - uses: Swatinem/rust-cache@v2 | |
# - name: install ripgrep | |
# shell: bash | |
# run: sudo apt-get install -y ripgrep | |
# - name: Build binaries | |
# run: cargo build --release --bin safenode --bin safe | |
# timeout-minutes: 30 | |
# - name: Build faucet binary | |
# run: cargo build --release --bin faucet --features gifting | |
# timeout-minutes: 30 | |
# - name: Start a local network | |
# uses: maidsafe/sn-local-testnet-action@main | |
# with: | |
# action: start | |
# interval: 2000 | |
# node-path: target/release/safenode | |
# faucet-path: target/release/faucet | |
# platform: ubuntu-latest | |
# build: true | |
# - name: Check we're _not_ warned about using default genesis | |
# run: | | |
# if rg "USING DEFAULT" "${{ matrix.safe_path }}"/*/*/logs; then | |
# exit 1 | |
# fi | |
# shell: bash | |
# - name: Move built binaries and clear out target dir | |
# shell: bash | |
# run: | | |
# mv target/release/faucet ~/faucet | |
# mv target/release/safe ~/safe | |
# rm -rf target | |
# - name: Check SAFE_PEERS was set | |
# shell: bash | |
# run: | | |
# if [[ -z "$SAFE_PEERS" ]]; then | |
# echo "The SAFE_PEERS variable has not been set" | |
# exit 1 | |
# else | |
# echo "SAFE_PEERS has been set to $SAFE_PEERS" | |
# fi | |
# - name: Create and fund a wallet first time | |
# run: | | |
# ~/safe --log-output-dest=data-dir wallet create --no-password | |
# ~/faucet --log-output-dest=data-dir send 100000000 $(~/safe --log-output-dest=data-dir wallet address | tail -n 1) | tail -n 1 1>first.txt | |
# echo "----------" | |
# cat first.txt | |
# env: | |
# SN_LOG: "all" | |
# timeout-minutes: 5 | |
# - name: Move faucet log to the working folder | |
# run: | | |
# echo "SAFE_DATA_PATH has: " | |
# ls -l $SAFE_DATA_PATH | |
# echo "test_faucet foder has: " | |
# ls -l $SAFE_DATA_PATH/test_faucet | |
# echo "logs folder has: " | |
# ls -l $SAFE_DATA_PATH/test_faucet/logs | |
# mv $SAFE_DATA_PATH/test_faucet/logs/faucet.log ./faucet_log.log | |
# env: | |
# SN_LOG: "all" | |
# SAFE_DATA_PATH: /home/runner/.local/share/safe | |
# continue-on-error: true | |
# if: always() | |
# timeout-minutes: 1 | |
# - name: Upload faucet log | |
# uses: actions/upload-artifact@main | |
# with: | |
# name: faucet_test_first_faucet_log | |
# path: faucet_log.log | |
# continue-on-error: true | |
# if: always() | |
# - name: Cleanup prior faucet and cashnotes | |
# run: | | |
# ls -l /home/runner/.local/share | |
# ls -l /home/runner/.local/share/safe | |
# rm -rf /home/runner/.local/share/safe/test_faucet | |
# rm -rf /home/runner/.local/share/safe/test_genesis | |
# rm -rf /home/runner/.local/share/safe/autonomi | |
# env: | |
# SN_LOG: "all" | |
# timeout-minutes: 5 | |
# - name: Create a new wallet | |
# run: ~/safe --log-output-dest=data-dir wallet create --no-password | |
# env: | |
# SN_LOG: "all" | |
# timeout-minutes: 5 | |
# - name: Attempt second faucet genesis disbursement | |
# run: ~/faucet --log-output-dest=data-dir send 100000000 $(~/safe --log-output-dest=data-dir wallet address | tail -n 1) > second.txt 2>&1 || true | |
# env: | |
# SN_LOG: "all" | |
# timeout-minutes: 5 | |
# - name: cat second.txt | |
# run: cat second.txt | |
# env: | |
# SN_LOG: "all" | |
# timeout-minutes: 5 | |
# - name: Verify a second disbursement is rejected | |
# run: | | |
# if grep "Faucet disbursement has already occured" second.txt; then | |
# echo "Duplicated faucet rejected" | |
# else | |
# echo "Duplicated faucet not rejected!" | |
# exit 1 | |
# fi | |
# env: | |
# SN_LOG: "all" | |
# timeout-minutes: 5 | |
# - name: Create and fund a wallet with different keypair | |
# run: | | |
# ls -l /home/runner/.local/share | |
# ls -l /home/runner/.local/share/safe | |
# rm -rf /home/runner/.local/share/safe/test_faucet | |
# rm -rf /home/runner/.local/share/safe/test_genesis | |
# rm -rf /home/runner/.local/share/safe/autonomi | |
# ~/safe --log-output-dest=data-dir wallet create --no-password | |
# if GENESIS_PK=a9925296499299fdbf4412509d342a92e015f5b996e9acd1d2ab7f2326e3ad05934326efdc345345a95e973ac1bb6637 GENESIS_SK=40f6bbc870355c68138ac70b450b6425af02b49874df3f141b7018378ceaac66 nohup ~/faucet --log-output-dest=data-dir send 100000000 $(~/safe --log-output-dest=data-dir wallet address | tail -n 1); then | |
# echo "Faucet with different genesis key not rejected!" | |
# exit 1 | |
# else | |
# echo "Faucet with different genesis key rejected" | |
# fi | |
# env: | |
# SN_LOG: "all" | |
# timeout-minutes: 5 | |
# - name: Build faucet binary again without the gifting feature | |
# run: cargo build --release --bin faucet | |
# timeout-minutes: 30 | |
# - name: Start up a faucet in server mode | |
# run: | | |
# ls -l /home/runner/.local/share | |
# ls -l /home/runner/.local/share/safe | |
# rm -rf /home/runner/.local/share/safe/test_faucet | |
# rm -rf /home/runner/.local/share/safe/test_genesis | |
# rm -rf /home/runner/.local/share/safe/autonomi | |
# target/release/faucet server & | |
# sleep 60 | |
# env: | |
# SN_LOG: "all" | |
# timeout-minutes: 5 | |
# - name: check there is no upload happens | |
# shell: bash | |
# run: | | |
# if grep -r "NanoTokens(10) }, Output" $NODE_DATA_PATH | |
# then | |
# echo "We find ongoing upload !" | |
# exit 1 | |
# fi | |
# env: | |
# NODE_DATA_PATH: /home/runner/.local/share/safe/node | |
# timeout-minutes: 1 | |
# - name: Stop the local network and upload logs | |
# if: always() | |
# uses: maidsafe/sn-local-testnet-action@main | |
# with: | |
# action: stop | |
# platform: ubuntu-latest | |
# log_file_prefix: safe_test_logs_faucet | |
large_file_upload_test: | |
if: "!startsWith(github.event.head_commit.message, 'chore(release):')" | |
name: Large file upload | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v4 | |
- name: Install Rust | |
uses: dtolnay/rust-toolchain@stable | |
- uses: Swatinem/rust-cache@v2 | |
- name: install ripgrep | |
shell: bash | |
run: sudo apt-get install -y ripgrep | |
- name: Check the available space | |
run: | | |
df | |
echo "Home dir:" | |
du -sh /home/runner/ | |
echo "Home subdirs:" | |
du -sh /home/runner/*/ | |
echo "PWD:" | |
du -sh . | |
echo "PWD subdirs:" | |
du -sh */ | |
- name: Download material (135MB) | |
shell: bash | |
run: | | |
mkdir test_data_1 | |
cd test_data_1 | |
wget https://sn-node.s3.eu-west-2.amazonaws.com/joshuef/Qi930/safe-qiWithListeners-x86_64.tar.gz | |
wget https://sn-node.s3.eu-west-2.amazonaws.com/joshuef/Qi930/safenode-qiWithListeners-x86_64.tar.gz | |
ls -l | |
cd .. | |
tar -cvzf test_data_1.tar.gz test_data_1 | |
ls -l | |
- name: Build binaries | |
run: cargo build --release --features local --bin safenode --bin autonomi | |
timeout-minutes: 30 | |
- name: Start a local network | |
uses: maidsafe/sn-local-testnet-action@main | |
with: | |
action: start | |
enable-evm-testnet: true | |
node-path: target/release/safenode | |
platform: ubuntu-latest | |
build: true | |
- name: Check if SAFE_PEERS and EVM_NETWORK are set | |
shell: bash | |
run: | | |
if [[ -z "$SAFE_PEERS" ]]; then | |
echo "The SAFE_PEERS variable has not been set" | |
exit 1 | |
elif [[ -z "$EVM_NETWORK" ]]; then | |
echo "The EVM_NETWORK variable has not been set" | |
exit 1 | |
else | |
echo "SAFE_PEERS has been set to $SAFE_PEERS" | |
echo "EVM_NETWORK has been set to $EVM_NETWORK" | |
fi | |
- name: Check the available space post download | |
run: | | |
df | |
echo "Home dir:" | |
du -sh /home/runner/ | |
echo "Home subdirs:" | |
du -sh /home/runner/*/ | |
echo "PWD:" | |
du -sh . | |
echo "PWD subdirs:" | |
du -sh */ | |
- name: export default secret key | |
run: echo "SECRET_KEY=0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80" >> $GITHUB_ENV | |
shell: bash | |
- name: File upload | |
run: ./target/release/autonomi --log-output-dest=data-dir file upload "./test_data_1.tar.gz" > ./upload_output 2>&1 | |
env: | |
SN_LOG: "v" | |
timeout-minutes: 5 | |
- name: showing the upload terminal output | |
run: cat upload_output | |
shell: bash | |
if: always() | |
- name: parse address | |
run: | | |
UPLOAD_ADDRESS=$(rg "At address: ([0-9a-f]*)" -o -r '$1' ./upload_output) | |
echo "UPLOAD_ADDRESS=$UPLOAD_ADDRESS" >> $GITHUB_ENV | |
shell: bash | |
- name: File Download | |
run: ./target/release/autonomi --log-output-dest=data-dir file download ${{ env.UPLOAD_ADDRESS }} ./downloaded_resources > ./download_output 2>&1 | |
env: | |
SN_LOG: "v" | |
timeout-minutes: 5 | |
- name: showing the download terminal output | |
run: | | |
cat download_output | |
ls -l | |
cd downloaded_resources | |
ls -l | |
shell: bash | |
if: always() | |
- name: Confirming connection errors | |
shell: bash | |
timeout-minutes: 1 | |
env: | |
NODE_DATA_PATH: /home/runner/.local/share/safe/node | |
run: | | |
incoming_connection_errors=$(rg "IncomingConnectionError" $NODE_DATA_PATH -c --stats | \ | |
rg "(\d+) matches" | rg "\d+" -o) || { echo "Failed to find IncomingConnectionError error"; exit 0; } | |
if [ -z "$incoming_connection_errors" ]; then | |
echo "Doesn't find any IncomingConnectionError error !" | |
else | |
echo "Found $incoming_connection_errors IncomingConnectionError errors." | |
fi | |
if ! rg "UnexpectedEof" $NODE_DATA_PATH -c --stats; then | |
echo "Doesn't find any UnexpectedEof error !" | |
else | |
echo "Found errors." | |
exit 1 | |
fi | |
- name: Stop the local network and upload logs | |
if: always() | |
uses: maidsafe/sn-local-testnet-action@main | |
with: | |
action: stop | |
platform: ubuntu-latest | |
log_file_prefix: safe_test_logs_large_file_upload_no_ws | |
build: true | |
# replication_bench_with_heavy_upload: | |
# if: "!startsWith(github.event.head_commit.message, 'chore(release):')" | |
# name: Replication bench with heavy upload | |
# runs-on: ubuntu-latest | |
# env: | |
# CLIENT_DATA_PATH: /home/runner/.local/share/safe/autonomi | |
# steps: | |
# - uses: actions/checkout@v4 | |
# - name: Install Rust | |
# uses: dtolnay/rust-toolchain@stable | |
# - uses: Swatinem/rust-cache@v2 | |
# - name: install ripgrep | |
# shell: bash | |
# run: sudo apt-get install -y ripgrep | |
# - name: Download materials to create two 300MB test_files to be uploaded by client | |
# shell: bash | |
# run: | | |
# mkdir test_data_1 | |
# cd test_data_1 | |
# wget https://sn-node.s3.eu-west-2.amazonaws.com/joshuef/Qi930/safe-qiWithListeners-x86_64.tar.gz | |
# wget https://sn-node.s3.eu-west-2.amazonaws.com/joshuef/Qi930/safenode-qiWithListeners-x86_64.tar.gz | |
# wget https://sn-node.s3.eu-west-2.amazonaws.com/joshuef/Qi930/safenode_rpc_client-qiWithListeners-x86_64.tar.gz | |
# wget https://sn-node.s3.eu-west-2.amazonaws.com/joshuef/QiSubdivisionBranch/faucet-qilesssubs-x86_64.tar.gz | |
# wget https://sn-node.s3.eu-west-2.amazonaws.com/joshuef/QiSubdivisionBranch/safe-qilesssubs-x86_64.tar.gz | |
# ls -l | |
# cd .. | |
# tar -cvzf test_data_1.tar.gz test_data_1 | |
# mkdir test_data_2 | |
# cd test_data_2 | |
# wget https://sn-node.s3.eu-west-2.amazonaws.com/joshuef/QiSubdivisionBranch/safenode-qilesssubs-x86_64.tar.gz | |
# wget https://sn-node.s3.eu-west-2.amazonaws.com/joshuef/QiSubdivisionBranch/safenode_rpc_client-qilesssubs-x86_64.tar.gz | |
# wget https://sn-node.s3.eu-west-2.amazonaws.com/joshuef/RemoveArtificalReplPush/faucet-DebugMem-x86_64.tar.gz | |
# wget https://sn-node.s3.eu-west-2.amazonaws.com/joshuef/RemoveArtificalReplPush/safe-DebugMem-x86_64.tar.gz | |
# wget https://sn-node.s3.eu-west-2.amazonaws.com/joshuef/RemoveArtificalReplPush/safenode-DebugMem-x86_64.tar.gz | |
# ls -l | |
# cd .. | |
# tar -cvzf test_data_2.tar.gz test_data_2 | |
# ls -l | |
# mkdir test_data_3 | |
# cd test_data_3 | |
# wget https://sn-node.s3.eu-west-2.amazonaws.com/joshuef/RemoveArtificalReplPush/safenode_rpc_client-DebugMem-x86_64.tar.gz | |
# wget https://sn-node.s3.eu-west-2.amazonaws.com/joshuef/RemoveArtificalReplPush2/faucet-DebugMem-x86_64.tar.gz | |
# wget https://sn-node.s3.eu-west-2.amazonaws.com/joshuef/RemoveArtificalReplPush2/safe-DebugMem-x86_64.tar.gz | |
# wget https://sn-node.s3.eu-west-2.amazonaws.com/joshuef/RemoveArtificalReplPush2/safenode-DebugMem-x86_64.tar.gz | |
# wget https://sn-node.s3.eu-west-2.amazonaws.com/joshuef/RemoveArtificalReplPush2/safenode_rpc_client-DebugMem-x86_64.tar.gz | |
# ls -l | |
# cd .. | |
# tar -cvzf test_data_3.tar.gz test_data_3 | |
# ls -l | |
# df | |
# - name: Build binaries | |
# run: cargo build --release --bin safenode --bin safe | |
# timeout-minutes: 30 | |
# - name: Build faucet binary | |
# run: cargo build --release --bin faucet --features gifting | |
# timeout-minutes: 30 | |
# - name: Start a local network | |
# uses: maidsafe/sn-local-testnet-action@main | |
# with: | |
# action: start | |
# interval: 2000 | |
# node-path: target/release/safenode | |
# faucet-path: target/release/faucet | |
# platform: ubuntu-latest | |
# build: true | |
# - name: Check SAFE_PEERS was set | |
# shell: bash | |
# run: | | |
# if [[ -z "$SAFE_PEERS" ]]; then | |
# echo "The SAFE_PEERS variable has not been set" | |
# exit 1 | |
# else | |
# echo "SAFE_PEERS has been set to $SAFE_PEERS" | |
# fi | |
# - name: Create and fund a wallet to pay for files storage | |
# run: | | |
# ./target/release/safe --log-output-dest=data-dir wallet create --no-password | |
# ./target/release/faucet --log-output-dest=data-dir send 100000000 $(./target/release/safe --log-output-dest=data-dir wallet address | tail -n 1) | tail -n 1 > transfer_hex | |
# ./target/release/safe --log-output-dest=data-dir wallet receive --file transfer_hex | |
# env: | |
# SN_LOG: "all" | |
# timeout-minutes: 5 | |
# - name: Start a client to upload first file | |
# run: ./target/release/safe --log-output-dest=data-dir files upload "./test_data_1.tar.gz" --retry-strategy quick | |
# env: | |
# SN_LOG: "all" | |
# timeout-minutes: 5 | |
# - name: Ensure no leftover cash_notes and payment files | |
# run: | | |
# expected_cash_notes_files="1" | |
# expected_payment_files="0" | |
# pwd | |
# ls $CLIENT_DATA_PATH/ -l | |
# ls $CLIENT_DATA_PATH/wallet -l | |
# ls $CLIENT_DATA_PATH/wallet/cash_notes -l | |
# cash_note_files=$(ls $CLIENT_DATA_PATH/wallet/cash_notes | wc -l) | |
# echo "Find $cash_note_files cash_note files" | |
# if [ $expected_cash_notes_files -lt $cash_note_files ]; then | |
# echo "Got too many cash_note files leftover: $cash_note_files" | |
# exit 1 | |
# fi | |
# ls $CLIENT_DATA_PATH/wallet/payments -l | |
# payment_files=$(ls $CLIENT_DATA_PATH/wallet/payments | wc -l) | |
# if [ $expected_payment_files -lt $payment_files ]; then | |
# echo "Got too many payment files leftover: $payment_files" | |
# exit 1 | |
# fi | |
# env: | |
# CLIENT_DATA_PATH: /home/runner/.local/share/safe/client | |
# timeout-minutes: 10 | |
# - name: Wait for certain period | |
# run: sleep 300 | |
# timeout-minutes: 6 | |
# - name: Use same client to upload second file | |
# run: ./target/release/safe --log-output-dest=data-dir files upload "./test_data_2.tar.gz" --retry-strategy quick | |
# env: | |
# SN_LOG: "all" | |
# timeout-minutes: 10 | |
# - name: Ensure no leftover cash_notes and payment files | |
# run: | | |
# expected_cash_notes_files="1" | |
# expected_payment_files="0" | |
# pwd | |
# ls $CLIENT_DATA_PATH/ -l | |
# ls $CLIENT_DATA_PATH/wallet -l | |
# ls $CLIENT_DATA_PATH/wallet/cash_notes -l | |
# cash_note_files=$(find $CLIENT_DATA_PATH/wallet/cash_notes -type f | wc -l) | |
# if (( $(echo "$cash_note_files > $expected_cash_notes_files" | bc -l) )); then | |
# echo "Got too many cash_note files leftover: $cash_note_files when we expected $expected_cash_notes_files" | |
# exit 1 | |
# fi | |
# ls $CLIENT_DATA_PATH/wallet/payments -l | |
# payment_files=$(find $CLIENT_DATA_PATH/wallet/payments -type f | wc -l) | |
# if (( $(echo "$payment_files > $expected_payment_files" | bc -l) )); then | |
# echo "Got too many payment files leftover: $payment_files" | |
# exit 1 | |
# fi | |
# env: | |
# CLIENT_DATA_PATH: /home/runner/.local/share/safe/client | |
# timeout-minutes: 10 | |
# - name: Wait for certain period | |
# run: sleep 300 | |
# timeout-minutes: 6 | |
# # Start a different client to avoid local wallet slow down with more payments handled. | |
# - name: Start a different client | |
# run: | | |
# pwd | |
# mv $CLIENT_DATA_PATH $SAFE_DATA_PATH/client_first | |
# ls -l $SAFE_DATA_PATH | |
# ls -l $SAFE_DATA_PATH/client_first | |
# mkdir $SAFE_DATA_PATH/client | |
# ls -l $SAFE_DATA_PATH | |
# mv $SAFE_DATA_PATH/client_first/logs $CLIENT_DATA_PATH/logs | |
# ls -l $CLIENT_DATA_PATH | |
# ./target/release/safe --log-output-dest=data-dir wallet create --no-password | |
# ./target/release/faucet --log-output-dest=data-dir send 100000000 $(./target/release/safe --log-output-dest=data-dir wallet address | tail -n 1) | tail -n 1 > transfer_hex | |
# ./target/release/safe --log-output-dest=data-dir wallet receive --file transfer_hex | |
# env: | |
# SN_LOG: "all" | |
# SAFE_DATA_PATH: /home/runner/.local/share/safe | |
# CLIENT_DATA_PATH: /home/runner/.local/share/safe/client | |
# timeout-minutes: 25 | |
# - name: Use second client to upload third file | |
# run: ./target/release/safe --log-output-dest=data-dir files upload "./test_data_3.tar.gz" --retry-strategy quick | |
# env: | |
# SN_LOG: "all" | |
# timeout-minutes: 10 | |
# - name: Ensure no leftover cash_notes and payment files | |
# run: | | |
# expected_cash_notes_files="1" | |
# expected_payment_files="0" | |
# pwd | |
# ls $CLIENT_DATA_PATH/ -l | |
# ls $CLIENT_DATA_PATH/wallet -l | |
# ls $CLIENT_DATA_PATH/wallet/cash_notes -l | |
# cash_note_files=$(ls $CLIENT_DATA_PATH/wallet/cash_notes | wc -l) | |
# echo "Find $cash_note_files cash_note files" | |
# if [ $expected_cash_notes_files -lt $cash_note_files ]; then | |
# echo "Got too many cash_note files leftover: $cash_note_files" | |
# exit 1 | |
# fi | |
# ls $CLIENT_DATA_PATH/wallet/payments -l | |
# payment_files=$(ls $CLIENT_DATA_PATH/wallet/payments | wc -l) | |
# if [ $expected_payment_files -lt $payment_files ]; then | |
# echo "Got too many payment files leftover: $payment_files" | |
# exit 1 | |
# fi | |
# env: | |
# CLIENT_DATA_PATH: /home/runner/.local/share/safe/client | |
# timeout-minutes: 10 | |
# - name: Stop the local network and upload logs | |
# if: always() | |
# uses: maidsafe/sn-local-testnet-action@main | |
# with: | |
# action: stop | |
# log_file_prefix: safe_test_logs_heavy_replicate_bench | |
# platform: ubuntu-latest |