Skip to content

Commit

Permalink
Perf test under CTest conf to exclude from default test set (microsof…
Browse files Browse the repository at this point in the history
…t#6422)

Co-authored-by: Amaury Chamayou <[email protected]>
Co-authored-by: Amaury Chamayou <[email protected]>
  • Loading branch information
3 people authored Aug 6, 2024
1 parent 20965eb commit 1a97e95
Show file tree
Hide file tree
Showing 9 changed files with 15 additions and 13 deletions.
12 changes: 6 additions & 6 deletions .azure-pipelines-templates/daily-matrix.yml
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ jobs:
cmake_args: "${{ parameters.build.common.cmake_args }} ${{ parameters.build.debug.cmake_args }} ${{ parameters.build.ASAN.cmake_args }} ${{ parameters.build.Virtual.cmake_args }}"
suffix: "ASAN"
artifact_name: "Virtual_ASAN"
ctest_filter: '-LE "benchmark|perf"'
ctest_filter: '-LE "benchmark"'
ctest_timeout: "1600"
depends_on: configure
installExtendedTestingTools: true
Expand All @@ -82,7 +82,7 @@ jobs:
cmake_args: "${{ parameters.build.common.cmake_args }} ${{ parameters.build.debug.cmake_args }} ${{ parameters.build.TSAN.cmake_args }} ${{ parameters.build.Virtual.cmake_args }}"
suffix: "TSAN"
artifact_name: "Virtual_TSAN"
ctest_filter: '-LE "benchmark|perf"'
ctest_filter: '-LE "benchmark"'
ctest_timeout: "1600"
depends_on: configure
installExtendedTestingTools: true
Expand All @@ -104,7 +104,7 @@ jobs:
cmake_args: "${{ parameters.build.common.cmake_args }} ${{ parameters.build.SGX.cmake_args }} -DCLIENT_PROTOCOLS_TEST=ON -DSHUFFLE_SUITE=ON"
suffix: "Release"
artifact_name: "SGX_Release"
ctest_filter: '-LE "benchmark|perf|rotation"'
ctest_filter: '-LE "benchmark|rotation"'
depends_on: configure
installExtendedTestingTools: true

Expand All @@ -116,7 +116,7 @@ jobs:
cmake_env: "${{ parameters.build.SNPCC.cmake_env }}"
suffix: "Release"
artifact_name: "SNPCC_Release"
ctest_filter: '-LE "benchmark|perf|rotation"'
ctest_filter: '-LE "benchmark|rotation"'
depends_on: configure
installExtendedTestingTools: true

Expand All @@ -127,7 +127,7 @@ jobs:
cmake_args: "${{ parameters.build.common.cmake_args }} ${{ parameters.build.Virtual.cmake_args }} -DCLIENT_PROTOCOLS_TEST=ON -DSHUFFLE_SUITE=ON"
suffix: "Release"
artifact_name: "Virtual_Release"
ctest_filter: '-LE "benchmark|perf|rotation"'
ctest_filter: '-LE "benchmark|rotation"'
depends_on: configure
installExtendedTestingTools: true

Expand All @@ -138,6 +138,6 @@ jobs:
cmake_args: "${{ parameters.build.common.cmake_args }} ${{ parameters.build.SGX.cmake_args }} ${{ parameters.build.unsafe.cmake_args }}"
suffix: "Unsafe"
artifact_name: "SGX_Unsafe"
ctest_filter: '-LE "benchmark|perf|rotation"'
ctest_filter: '-LE "benchmark|rotation"'
depends_on: configure
installExtendedTestingTools: true
2 changes: 1 addition & 1 deletion .azure-pipelines-templates/deploy_aci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ jobs:
set -ex
cd /CCF/build
npm config set cache /ccfci/workspace_$(Build.BuildNumber)/.npm
WORKSPACE=/ccfci/workspace_$(Build.BuildNumber) ELECTION_TIMEOUT_MS=10000 ./tests.sh -VV -T Test -LE "benchmark|perf|tlstest|suite|snp_flaky" -E "lts_compatibility"
WORKSPACE=/ccfci/workspace_$(Build.BuildNumber) ELECTION_TIMEOUT_MS=10000 ./tests.sh -VV -T Test -LE "benchmark|tlstest|suite|snp_flaky" -E "lts_compatibility"
# Remove irrelevant and bulky data from workspace before uploading
find /ccfci/workspace_$(Build.BuildNumber) -type f -name cchost -delete
find /ccfci/workspace_$(Build.BuildNumber) -type f -name "*.so" -delete
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/bencher.yml
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ jobs:
# Microbenchmarks
./tests.sh -VV -L benchmark
# End to end performance tests
./tests.sh -VV -L perf
./tests.sh -VV -L perf -C perf
# Convert microbenchmark output to bencher json
source env/bin/activate
PYTHONPATH=../tests python convert_pico_to_bencher.py
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ jobs:
# Unit tests
./tests.sh --output-on-failure -L unit -j$(nproc --all)
# All other acceptably fast tests, mostly end-to-end
./tests.sh --timeout 360 --output-on-failure -LE "benchmark|perf|protocolstest|suite|unit"
./tests.sh --timeout 360 --output-on-failure -LE "benchmark|protocolstest|suite|unit"
# Partitions tests
./tests.sh --timeout 240 --output-on-failure -L partitions -C partitions
shell: bash
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/long-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ jobs:
run: |
set +x
cd build
./tests.sh --output-on-failure --timeout 1600 -LE "benchmark|perf"
./tests.sh --output-on-failure --timeout 1600 -LE "benchmark"
- name: "Upload logs"
if: success() || failure()
Expand Down Expand Up @@ -88,7 +88,7 @@ jobs:
run: |
set +x
cd build
./tests.sh --output-on-failure --timeout 1600 -LE "benchmark|perf"
./tests.sh --output-on-failure --timeout 1600 -LE "benchmark"
- name: "Upload logs"
if: success() || failure()
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ jobs:
export ASAN_SYMBOLIZER_PATH=$(realpath /usr/bin/llvm-symbolizer-15)
# Unit tests
./tests.sh --output-on-failure -L unit -j$(nproc --all)
./tests.sh --timeout 360 --output-on-failure -LE "benchmark|perf|unit"
./tests.sh --timeout 360 --output-on-failure -LE "benchmark|unit"
shell: bash
if: "${{ matrix.platform.name != 'snp' }}"

Expand Down
1 change: 1 addition & 0 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -1561,6 +1561,7 @@ if(BUILD_TESTS)
NAME historical_query_perf_test
PYTHON_SCRIPT ${CMAKE_SOURCE_DIR}/tests/historical_query_perf.py
LABEL perf PERF_LABEL "Historical Queries"
CONFIGURATIONS perf
)

add_e2e_test(
Expand Down
1 change: 1 addition & 0 deletions cmake/common.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -306,6 +306,7 @@ function(add_piccolo_test)
--label ${TEST_NAME} --perf-label ${PARSED_ARGS_PERF_LABEL}
--snapshot-tx-interval 10000 ${PARSED_ARGS_ADDITIONAL_ARGS} -e
${ENCLAVE_TYPE} -t ${ENCLAVE_PLATFORM} ${NODES}
CONFIGURATIONS perf
)

# Make python test client framework importable
Expand Down
2 changes: 1 addition & 1 deletion doc/overview/performance.rst
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ The end-to-end service performance tests can also be from the CCF build director

.. code-block:: bash
./tests.sh -VV -L "perf"
./tests.sh -VV -L "perf" -C "perf"
Each of these tests creates a temporary CCF service on the local machine, then sends a high volume of transactions to measure peak and average throughput. The python test wrappers will print summary statistics including a transaction rate histogram when the test completes. These statistics can be retrieved from any CCF service via the ``getMetrics`` RPC.

Expand Down

0 comments on commit 1a97e95

Please sign in to comment.