Skip to content

Commit

Permalink
Merge pull request #1361 from msimberg/shfmt-all-files
Browse files Browse the repository at this point in the history
Check that all shell scripts are formatted with `shfmt`
  • Loading branch information
msimberg authored Nov 29, 2024
2 parents ff5f0cf + 00c1e83 commit ed4eef5
Show file tree
Hide file tree
Showing 6 changed files with 41 additions and 41 deletions.
8 changes: 4 additions & 4 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -170,13 +170,13 @@ jobs:
at: /pika
- run:
name: Check that shell scripts are correctly formatted with shfmt
working_directory: /pika/source
command: |
cd /pika/source && shopt -s globstar # to activate the ** globbing
shfmt --version
shfmt -w **/*.sh
git diff --exit-code > /tmp/modified_black_files.txt
fd --hidden --extension sh --exec shfmt --write
git diff --exit-code > /tmp/modified_shfmt_files.txt
- store_artifacts:
path: /tmp/modified_black_files.txt
path: /tmp/modified_shfmt_files.txt
destination: /pika/artifacts/modified_black_files.txt

configure:
Expand Down
2 changes: 1 addition & 1 deletion .gitlab/scripts/collect_file_sizes.sh
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ function submit_filesizes {

json_merge "${metadata_file}" "${result_file}" "${result_file}"
submit_logstash "${result_file}"
done
done
}

# Submit file name and size for files under lib and bin in the build directory
Expand Down
56 changes: 28 additions & 28 deletions .gitlab/scripts/run_performance_benchmarks.sh
Original file line number Diff line number Diff line change
Expand Up @@ -24,85 +24,85 @@ metadata_file=$(mktemp --tmpdir metadata.XXXXXXXXXX.json)
create_metadata_file "${metadata_file}"

pika_targets=(
"task_overhead_report_test"
"task_size_test"
"task_size_test"
"task_size_test"
"task_size_test"
"task_size_test"
"task_latency_test"
"task_latency_test"
"task_latency_test"
"task_latency_test"
"task_yield_test"
"task_yield_test"
"condition_variable_overhead_test"
"task_overhead_report_test"
"task_size_test"
"task_size_test"
"task_size_test"
"task_size_test"
"task_size_test"
"task_latency_test"
"task_latency_test"
"task_latency_test"
"task_latency_test"
"task_yield_test"
"task_yield_test"
"condition_variable_overhead_test"
)
pika_test_options=(
"--pika:ini=pika.thread_queue.init_threads_count=100 \
"--pika:ini=pika.thread_queue.init_threads_count=100 \
--pika:queuing=local-priority \
--repetitions=100 \
--tasks=500000"

"--method=task
"--method=task
--tasks-per-thread=1000 \
--task-size-growth-factor=1.05 \
--target-efficiency=0.9 \
--perftest-json"

"--method=task-hierarchical
"--method=task-hierarchical
--tasks-per-thread=1000 \
--task-size-growth-factor=1.05 \
--target-efficiency=0.9 \
--perftest-json"

"--method=task-yield
"--method=task-yield
--tasks-per-thread=1000 \
--task-size-growth-factor=1.05 \
--target-efficiency=0.9 \
--perftest-json"

"--method=barrier
"--method=barrier
--tasks-per-thread=1000 \
--task-size-growth-factor=1.05 \
--target-efficiency=0.9 \
--perftest-json"

"--method=bulk
"--method=bulk
--tasks-per-thread=1000 \
--task-size-growth-factor=1.05 \
--target-efficiency=0.5 \
--perftest-json"

"--repetitions=1000000
"--repetitions=1000000
--pika:threads=1
--perftest-json"

"--repetitions=1000000
"--repetitions=1000000
--nostack
--pika:threads=1
--perftest-json"

"--repetitions=1000000
"--repetitions=1000000
--pika:threads=2
--perftest-json"

"--repetitions=1000000
"--repetitions=1000000
--nostack
--pika:threads=2
--perftest-json"

"--repetitions=100
"--repetitions=100
--num-yields=100000
--pika:threads=1
--perftest-json"

"--repetitions=100
"--repetitions=100
--num-yields=100000
--pika:threads=2
--perftest-json"

"--loops=1000000
"--loops=1000000
--repetitions=3
--pika:threads=2
--perftest-json"
Expand All @@ -114,8 +114,8 @@ failures=0
for executable in "${pika_targets[@]}"; do
test_opts=${pika_test_options[$index]}
raw_result_file=$(mktemp --tmpdir "${executable}_raw.XXXXXXXXXX.json")
result_file=$(mktemp --tmpdir "${executable}_raw.XXXXXXXXXX.json")
echo '{}' > "${result_file}"
result_file=$(mktemp --tmpdir "${executable}.XXXXXXXXXX.json")
echo '{}' >"${result_file}"

echo
echo
Expand Down
4 changes: 2 additions & 2 deletions .gitlab/scripts/utilities.sh
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,8 @@ function submit_logstash {
--request POST \
--header "Content-Type: application/json" \
--data "@${1}" \
"${CSCS_LOGSTASH_URL}" \
|| true
"${CSCS_LOGSTASH_URL}" ||
true
}

function json_merge {
Expand Down
2 changes: 1 addition & 1 deletion .jenkins/cscs-perftests/batch.sh
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ build_dir="/dev/shm/pika/build"
mkdir -p ${build_dir}/tools
# Copy source directory to /dev/shm for faster builds and copy the perftest
# utility in the build dir
cp -r "${orig_src_dir}" "${src_dir}" && \
cp -r "${orig_src_dir}" "${src_dir}" &&
cp -r ${src_dir}/tools/perftests_ci ${build_dir}/tools &

# Variables
Expand Down
10 changes: 5 additions & 5 deletions .jenkins/cscs-perftests/comment_github.sh
Original file line number Diff line number Diff line change
Expand Up @@ -14,15 +14,15 @@ pushd perftests-reports/reference-comparison
# - Remove the image as does not display in github comments (section Details in the report)
# - Escape double quotes for JSON compatibility
# - Escape slashes for JSON compatibility
report=$(cat index.html | \
report=$(cat index.html |
sed -e 's:<section class="grid-section"><h2>Details[-a-z0-9<>/"=\ \.]*</section>::Ig' \
-e 's/"/\\"/g' \
-e 's/\//\\\//g')

curl \
-X POST \
-H "Authorization: token ${GITHUB_TOKEN}" \
https://api.github.com/repos/pika-org/pika/issues/${ghprbPullId}/comments \
-d "{\"body\": \"<details><summary>Performance test report</summary>${report}<\/details>\"}"
-X POST \
-H "Authorization: token ${GITHUB_TOKEN}" \
https://api.github.com/repos/pika-org/pika/issues/${ghprbPullId}/comments \
-d "{\"body\": \"<details><summary>Performance test report</summary>${report}<\/details>\"}"

popd

0 comments on commit ed4eef5

Please sign in to comment.