chore: Add benchmark workflow (#758)

* Add benchmark workflow

* Do not check for marine artifact

* Cache rust things

* Fix check

* Run on ubuntu latest

* fix

* Fix?

* No timeout

* Run bench on release of air

* Fix

---------

Co-authored-by: raftedproc <71657594+raftedproc@users.noreply.github.com>
This commit is contained in:
Anatolios Laskaris 2023-11-25 13:38:02 +02:00 committed by GitHub
parent 524c30243b
commit 97be08e900
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 90 additions and 0 deletions

73
.github/workflows/benchmark.yml vendored Normal file
View File

@ -0,0 +1,73 @@
name: Run benchmark with workflow_call
on:
workflow_call:
inputs:
ref:
description: "git ref to checkout to"
type: string
default: "master"
tag:
description: "Release tag to upload results to"
type: string
default: "null"
jobs:
benchmark:
name: Run benchmark
runs-on: ubuntu-latest
steps:
- name: Checkout AquaVM
uses: actions/checkout@v4
with:
repository: fluencelabs/aquavm
ref: ${{ inputs.ref }}
- name: Setup Rust toolchain
uses: dsherret/rust-toolchain-file@v1
- name: Setup cache
uses: Swatinem/rust-cache@v2
with:
shared-key: aquavm
save-if: false
- name: Setup marine
uses: fluencelabs/setup-marine@v1
- name: Generate benchmark data
working-directory: junk/gen-bench-data
run: ./gen_benchmark_data.sh
- name: Setup python
uses: actions/setup-python@v4
with:
python-version: "3.x"
- name: Install python module
run: pip install tools/cli/performance_metering
- name: Run benchmark
run: aquavm_performance_metering run
- name: Upload benchmark results to artifacts
uses: actions/upload-artifact@v3
with:
name: ./benches/PERFORMANCE.txt
path: lcov.info
- name: Print benchmark results to checks
run: |
{
echo '```';
cat ./benches/PERFORMANCE.txt;
echo '```';
} >> $GITHUB_STEP_SUMMARY
- name: Upload benchmark results to release
if: inputs.tag != 'null'
uses: softprops/action-gh-release@v1
with:
files: ./benches/PERFORMANCE.txt
tag_name: ${{ inputs.tag }}

View File

@ -16,6 +16,9 @@ jobs:
releases-created: ${{ steps.release.outputs.releases_created }}
pr: ${{ steps.release.outputs['pr'] }}
air-release-created: ${{ steps.release.outputs['air--release_created'] }}
air-tag-name: ${{ steps.release.outputs['air--tag_name'] }}
air-interpreter-release-created: ${{ steps.release.outputs['air-interpreter--release_created'] }}
air-interpreter-version: ${{ steps.release.outputs['air-interpreter--version'] }}
air-interpreter-tag-name: ${{ steps.release.outputs['air-interpreter--tag_name'] }}
@ -297,6 +300,14 @@ jobs:
run: npm publish --access public
working-directory: tools/wasm/air-beautify-wasm/pkg
benchmark:
needs: release-please
if: needs.release-please.outputs.air-release-created
uses: ./.github/workflows/benchmark.yml
with:
ref: ${{ github.ref }}
tag: ${{ needs.release-please.outputs.air-tag-name }}
slack:
if: always()
name: "Notify"

View File

@ -18,6 +18,12 @@ jobs:
with:
ref: ${{ github.ref }}
aquavm-benchmark:
name: "aquavm"
uses: ./.github/workflows/benchmark.yml
with:
ref: ${{ github.ref }}
lints:
runs-on: builder