diff --git a/.github/actions/run-hey-load-test/action.yml b/.github/actions/run-hey-load-test/action.yml new file mode 100644 index 000000000..5364cb04b --- /dev/null +++ b/.github/actions/run-hey-load-test/action.yml @@ -0,0 +1,46 @@ +name: "Run hey load test" +description: "Composite action to run hey load test" + +inputs: + github-token: + description: "GitHub token" + required: true + +runs: + using: "composite" + steps: + - name: Install hey + shell: bash + run: | + curl -LO https://hey-release.s3.us-east-2.amazonaws.com/hey_linux_amd64 + chmod +x hey_linux_amd64 + sudo mv hey_linux_amd64 /usr/local/bin/hey + hey --help + - name: Run hey benchmark + shell: bash + run: | + hey -n 100000 -c 50 http://127.0.0.1:8080 > raw-output.txt + python3 ./scripts/parse-hey.py raw-output.txt > benchmark-results.json + cat benchmark-results.json + - name: Report Throughput results + uses: benchmark-action/github-action-benchmark@v1.20.4 + with: + name: "HTTP Throughput" + tool: "customBiggerIsBetter" + output-file-path: benchmark-results.json + github-token: ${{ inputs.github-token }} + external-data-json-path: ./cache/benchmark-data.json + summary-always: true + alert-threshold: "120%" + fail-on-alert: true + - name: Report Latency results + uses: benchmark-action/github-action-benchmark@v1.20.4 + with: + name: "HTTP Latency" + tool: "customSmallerIsBetter" + output-file-path: benchmark-results.json + github-token: ${{ inputs.github-token }} + external-data-json-path: ./cache/benchmark-data.json + summary-always: true + alert-threshold: "130%" + fail-on-alert: true \ No newline at end of file diff --git a/.github/action-sign/action.yml b/.github/actions/sign/action.yml similarity index 100% rename from .github/action-sign/action.yml rename to .github/actions/sign/action.yml diff --git a/.github/workflows/action-build.yml b/.github/workflows/action-build.yml index 589e7ae0c..02281a420 100644 --- a/.github/workflows/action-build.yml +++ b/.github/workflows/action-build.yml @@ -66,7 +66,7 @@ jobs: if: ${{ inputs.arch == 'x86_64' }} - name: Sign the binary if: ${{ inputs.runtime != 'common' && inputs.slug != 'windows' && inputs.sign }} - uses: ./.github/action-sign + uses: ./.github/actions/sign with: runtime: ${{ inputs.runtime }} - name: Package artifacts diff --git a/.github/workflows/benchmarks.yml b/.github/workflows/benchmarks.yml index f524a1a4f..1fe67489c 100644 --- a/.github/workflows/benchmarks.yml +++ b/.github/workflows/benchmarks.yml @@ -4,33 +4,29 @@ on: schedule: - cron: '0 0 * * *' # Runs daily at midnight pull_request: - + jobs: benchmark: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - - name: fetch submodules + - name: Fetch submodules run: git submodule update --init --recursive - uses: actions-rust-lang/setup-rust-toolchain@v1 with: rustflags: '' #Disable. By default this action sets environment variable is set to -D warnings. We manage this in the Makefile - - name: Setup build env + - name: Setup build environment + shell: bash run: | os=$(echo "$RUNNER_OS" | tr '[:upper:]' '[:lower:]') ./scripts/setup-$os.sh - shell: bash - name: Build and load shims and wasi-demo-app shell: bash run: | - make build - make install - make test-image - make load - make test-image/oci - make load/oci + make build install test-image load test-image/oci load/oci test-image/http load/http - name: Run Benchmarks + shell: bash run: | set -o pipefail cargo bench -p containerd-shim-benchmarks -- --output-format bencher | tee output.txt @@ -56,4 +52,25 @@ jobs: # Enable Job Summary summary-always: true # Where the previous data file is stored - external-data-json-path: ./cache/benchmark-data.json \ No newline at end of file + external-data-json-path: ./cache/benchmark-data.json + - name: Start wasmtime shim + shell: bash + run: | + sudo ctr run --rm --net-host --runtime=io.containerd.wasmtime.v1 ghcr.io/containerd/runwasi/wasi-http:latest wasi-http /wasi-http.wasm & + sleep 1 + - name: Wait for wasmtime shim to start + shell: bash + run: | + while ! curl -s http://127.0.0.1:8080 > /dev/null; do + sleep 1 + done + - name: Run HTTP throughput and latency benchmarks + if: success() + uses: ./.github/actions/run-hey-load-test + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + - name: Stop wasmtime shim + if: success() + shell: bash + run: | + sudo ctr task kill -s SIGKILL wasi-http \ No newline at end of file diff --git a/Makefile b/Makefile index bd03bc633..36216f7ff 100644 --- a/Makefile +++ b/Makefile @@ -174,6 +174,9 @@ test-image: dist/img.tar .PHONY: test-image/oci test-image/oci: dist/img-oci.tar dist/img-oci-artifact.tar +.PHONY: test-image/http +test-image/http: dist/http-img-oci.tar + .PHONY: test-image/clean test-image/clean: rm -rf target/wasm32-wasip1/$(OPT_PROFILE)/ @@ -218,6 +221,10 @@ load/oci: dist/img-oci.tar dist/img-oci-artifact.tar sudo ctr -n $(CONTAINERD_NAMESPACE) image import --all-platforms $< sudo ctr -n $(CONTAINERD_NAMESPACE) image import --all-platforms dist/img-oci-artifact.tar +.PHONY: load/http +load/http: dist/http-img-oci.tar + sudo ctr -n $(CONTAINERD_NAMESPACE) image import --all-platforms $< + target/wasm32-wasip1/$(OPT_PROFILE)/img-oci.tar: target/wasm32-wasip1/$(OPT_PROFILE)/wasi-demo-app.wasm mkdir -p ${CURDIR}/bin/$(OPT_PROFILE)/ cargo run --bin oci-tar-builder -- --name wasi-demo-oci --repo ghcr.io/containerd/runwasi --tag latest --module ./target/wasm32-wasip1/$(OPT_PROFILE)/wasi-demo-app.wasm -o target/wasm32-wasip1/$(OPT_PROFILE)/img-oci.tar @@ -227,6 +234,16 @@ target/wasm32-wasip1/$(OPT_PROFILE)/img-oci-artifact.tar: target/wasm32-wasip1/$ mkdir -p ${CURDIR}/bin/$(OPT_PROFILE)/ cargo run --bin oci-tar-builder -- --name wasi-demo-oci-artifact --as-artifact --repo ghcr.io/containerd/runwasi --tag latest --module ./target/wasm32-wasip1/$(OPT_PROFILE)/wasi-demo-app.wasm -o target/wasm32-wasip1/$(OPT_PROFILE)/img-oci-artifact.tar +.PHONY: +dist/http-img-oci.tar: crates/containerd-shim-wasm-test-modules/src/modules/hello_wasi_http.wasm + @mkdir -p "dist/" + cargo run --bin oci-tar-builder -- \ + --name wasi-http \ + --repo ghcr.io/containerd/runwasi \ + --tag latest \ + --module $< \ + -o $@ + bin/kind: test/k8s/Dockerfile $(DOCKER_BUILD) --output=bin/ -f test/k8s/Dockerfile --target=kind . diff --git a/crates/containerd-shim-wasmtime/README.md b/crates/containerd-shim-wasmtime/README.md index 05cd85748..21d6f7dbb 100644 --- a/crates/containerd-shim-wasmtime/README.md +++ b/crates/containerd-shim-wasmtime/README.md @@ -67,7 +67,7 @@ sudo ctr image import --all-platforms ./dist/wasi-http-img-oci.tar ```shell sudo ctr run --rm --net-host --runtime=io.containerd.wasmtime.v1 \ - ghcr.io/containerd/runwasi/wasi-js:latest wasi-http /wasi-http.wasm + ghcr.io/containerd/runwasi/wasi-http:latest wasi-http /wasi-http.wasm ``` - Finally, assuming our handler will respond to `GET` requests at `/`, we can diff --git a/scripts/parse-hey.py b/scripts/parse-hey.py new file mode 100644 index 000000000..d946441da --- /dev/null +++ b/scripts/parse-hey.py @@ -0,0 +1,75 @@ +# Usage: +# +# First run: hey http://127.0.0.1:8080 > raw-output.txt +# Then run: python ./scripts/parse-hey.py ./raw-output.txt +# Output: +# [ +# { +# "name": "HTTP RPS", +# "unit": "req/s", +# "value": 13334.5075 +# }, +# { +# "name": "HTTP p95 Latency", +# "unit": "ms", +# "value": 4.1000000000000005 +# } +# ] + + +import json +import re +import sys + +def parse_hey_output(file_path): + with open(file_path, 'r') as f: + lines = f.readlines() + + rps = None + lat = None + + for i, line in enumerate(lines): + line = line.strip() + + if line.startswith("Requests/sec:"): + parts = line.split() + if len(parts) >= 2: + rps = float(parts[1]) + + if "Latency distribution:" in line: + for j in range(i+1, len(lines)): + if "% in" in lines[j] and "95%" in lines[j]: + match = re.search(r'95% in ([0-9.]+) secs', lines[j]) + if match: + lat = float(match.group(1)) + break + + return rps, lat + + +if __name__ == "__main__": + if len(sys.argv) != 2: + print("Usage: python parse_hey.py ") + sys.exit(1) + + file_path = sys.argv[1] + rps, latency = parse_hey_output(file_path) + + latency = latency * 1000 if latency is not None else None + + results = [] + if rps is not None: + results.append({ + "name": "HTTP RPS", + "unit": "req/s", + "value": rps + }) + + if latency is not None: + results.append({ + "name": "HTTP p95 Latency", + "unit": "ms", + "value": latency + }) + + print(json.dumps(results, indent=2))