Skip to content

Commit

Permalink
allow rerunning test without tearing down kind cluster
Browse files Browse the repository at this point in the history
  • Loading branch information
samos123 committed Nov 12, 2023
1 parent 9fb43f3 commit 0f5888e
Show file tree
Hide file tree
Showing 2 changed files with 18 additions and 12 deletions.
20 changes: 13 additions & 7 deletions tests/system-test-kind.sh
Original file line number Diff line number Diff line change
@@ -1,11 +1,16 @@
#!/usr/bin/env bash

set -e
set -xe

kind create cluster --name=substratus-test
# trap "kind delete cluster --name=substratus-test" EXIT
if kind get clusters | grep -q substratus-test; then
echo "Cluster substratus-tests already exists.. reusing it"
else
kind create cluster --name substratus-test
fi

skaffold run
if ! kubectl get deployment proxy-controller; then
skaffold run
fi

kubectl wait --for=condition=available --timeout=30s deployment/proxy-controller

Expand All @@ -30,7 +35,9 @@ fi
SCRIPT_DIR=$(dirname "$0")
VENV_DIR=$SCRIPT_DIR/.venv

python3 -m venv "$VENV_DIR"
if [ ! -d "$VENV_DIR" ]; then
python3 -m venv "$VENV_DIR"
fi
source "$VENV_DIR/bin/activate"
pip3 install openai==1.2.3

Expand All @@ -51,8 +58,7 @@ requests=500
echo "Send $requests requests in parallel to stapi backend using openai python client and threading"
python3 $SCRIPT_DIR/test_openai_embedding.py \
--requests $requests \
--model text-embedding-ada-002 \
--client-per-thread False
--model text-embedding-ada-002

replicas=$(kubectl get deployment stapi-minilm-l6-v2 -o jsonpath='{.spec.replicas}')
if [ "$replicas" -ge 2 ]; then
Expand Down
10 changes: 5 additions & 5 deletions tests/test_openai_embedding.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
parser.add_argument("--requests", type=int, default=60)
parser.add_argument("--model", type=str, default="text-embedding-ada-002")
parser.add_argument("--text", type=str, default="Generate an embedding for me")
parser.add_argument("--client-per-thread", type=bool, default=False)
# parser.add_argument("--client-per-thread", type=bool, default=False)
args = parser.parse_args()

def create_client():
Expand All @@ -16,17 +16,17 @@ def create_client():
base_url=args.base_url,
)

client = create_client()

# client = create_client()

def embedding_request(index: int):
print (f"Request {index} of {args.requests}")
if args.client_per_thread:
client = create_client()
client = create_client()
embedding = client.embeddings.create(model=args.model, input=args.text)
print (f"Finished {index} of {args.requests}")
return embedding

with concurrent.futures.ThreadPoolExecutor(max_workers=args.requests) as executor:
futures = [executor.submit(embedding_request, i+1) for i in range(args.requests)]
results = [future.result() for future in concurrent.futures.as_completed(futures, timeout=600)]
results = [future.result() for future in concurrent.futures.as_completed(futures, timeout=900)]
assert len(results) == args.requests

0 comments on commit 0f5888e

Please sign in to comment.