feat: Track the binary size for all the different platforms (#34744)

* feat: Track the binary size for all the different platforms

Signed-off-by: DK Liao <dklassic@gmail.com>

* Add target to bencher job name

Signed-off-by: DK Liao <dklassic@gmail.com>

* Update .github/workflows/bencher.yml

Co-authored-by: Martin Robinson <mrobinson@igalia.com>
Signed-off-by: DK Liao <dklassic@gmail.com>

---------

Signed-off-by: DK Liao <dklassic@gmail.com>
Co-authored-by: Martin Robinson <mrobinson@igalia.com>
This commit is contained in:
DK Liao 2025-01-20 17:41:23 +08:00 committed by GitHub
parent c070372d1e
commit 9ceb957dd8
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
11 changed files with 365 additions and 153 deletions

View file

@ -13,6 +13,11 @@ on:
github-release-id:
required: false
type: string
bencher:
required: false
default: false
type: boolean
workflow_dispatch:
inputs:
profile:
@ -20,6 +25,11 @@ on:
default: "release"
type: choice
options: ["release", "debug", "production"]
bencher:
required: false
default: false
type: boolean
env:
RUST_BACKTRACE: 1
@ -104,7 +114,7 @@ jobs:
- name: Archive build timing
uses: actions/upload-artifact@v4
with:
name: cargo-timings-android-${{ matrix.target }}
name: cargo-timings-android-${{ matrix.target }}-${{ inputs.profile }}
# Using a wildcard here ensures that the archive includes the path.
path: target/cargo-timings-*
- name: Upload nightly
@ -137,3 +147,21 @@ jobs:
with:
name: ${{ inputs.profile }}-library-android-${{ matrix.target }}
path: target/android/${{ matrix.target }}/${{ inputs.profile }}/servoview.aar
bencher:
needs: ["build"]
strategy:
matrix:
target: ['aarch64-linux-android', 'armv7-linux-androideabi', 'i686-linux-android', 'x86_64-linux-android']
if: ${{ inputs.bencher && inputs.profile != 'debug' && github.event_name != 'workflow_dispatch' && github.event_name != 'merge_group' }}
name: 'Bencher (${{ matrix.target }})'
uses: ./.github/workflows/bencher.yml
with:
target: android-${{ matrix.target }}
profile: ${{ inputs.profile }}
compressed-file-path: ${{ inputs.profile }}-binary-android-${{ matrix.target }}/servoapp.apk
binary-path: lib/${{ matrix.target == 'aarch64-linux-android' && 'arm64-v8a' || matrix.target == 'armv7-linux-androideabi' && 'armeabi-v7a' || matrix.target == 'i686-linux-android' && 'x86' || matrix.target == 'x86_64-linux-android' && 'x86_64'}}/libservoshell.so
file-size: true
speedometer: false
dromaeo: false
secrets: inherit

147
.github/workflows/bencher.yml vendored Normal file
View file

@ -0,0 +1,147 @@
name: Bencher
on:
workflow_call:
inputs:
target:
required: false
default: "linux"
type: string
profile:
required: false
default: "release"
type: string
compressed-file-path:
required: false
default: ""
type: string
binary-path:
required: false
default: ""
type: string
file-size:
required: false
default: false
type: boolean
stripped-file-size:
required: false
default: false
type: boolean
speedometer:
required: false
default: false
type: boolean
dromaeo:
required: false
default: false
type: boolean
permissions:
checks: write
pull-requests: write
env:
RUST_BACKTRACE: 1
SHELL: /bin/bash
# allows overriding bencher project for pushes
BENCHER_PROJECT: ${{ vars.BENCHER_PROJECT || 'servo' }}
jobs:
bencher:
name: Bencher (${{ inputs.target }})
# This needs to be kept in sync with the `--testbed` argument sent to bencher.
runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@v4
if: github.event_name != 'pull_request_target'
with:
fetch-depth: 0
# This is necessary to checkout the pull request if this run was triggered via a
# `pull_request_target` event.
- uses: actions/checkout@v4
if: github.event_name == 'pull_request_target'
with:
ref: refs/pull/${{ github.event.number }}/head
fetch-depth: 0
- uses: actions/download-artifact@v4
with:
name: ${{ inputs.profile }}-binary-${{ inputs.target }}
path: ${{ inputs.profile }}-binary-${{ inputs.target }}
# Linux and macOS uploads compressed binary, need to extract first
- name: unPackage binary (tar)
if: ${{ inputs.compressed-file-path != '' && contains(inputs.compressed-file-path, '.tar.gz') }}
run: tar -xzf ${{ inputs.compressed-file-path }}
- name: unPackage binary (unzip)
if: ${{ inputs.compressed-file-path != '' && !contains(inputs.compressed-file-path, '.tar.gz') }}
run: unzip ${{ inputs.compressed-file-path }}
- name: Setup Python
uses: ./.github/actions/setup-python
- name: Bootstrap dependencies
if: ${{ inputs.speedometer == true || inputs.dromaeo == true }}
run: |
sudo apt update
sudo apt install -qy --no-install-recommends mesa-vulkan-drivers
python3 ./mach bootstrap --skip-lints
- uses: bencherdev/bencher@main
- name: File size
if: ${{ inputs.file-size == true }}
run: |
./etc/ci/bencher.py filesize ${{ inputs.binary-path }} ${{ inputs.target }}-${{ inputs.profile }} --bmf-output size.json
echo "SERVO_FILE_SIZE_RESULT=size.json" >> "$GITHUB_ENV"
# We'll additionally strip and measure the size of the binary when using production profile
- name: Install LLVM
if: ${{ inputs.file-size == true && inputs.profile == 'production' }}
uses: KyleMayes/install-llvm-action@v2
with:
version: "17"
- name: Install llvm-strip dependency
if: ${{ inputs.file-size == true && inputs.profile == 'production' }}
run: sudo apt install libncurses5
- name: File size (llvm stripped)
if: ${{ inputs.file-size == true && inputs.profile == 'production' }}
run: |
llvm-strip ${{ inputs.binary-path }}
./etc/ci/bencher.py filesize ${{ inputs.binary-path }} ${{ inputs.target }}-${{ inputs.profile }}-stripped --bmf-output size-stripped.json
echo "SERVO_STRIPPED_FILE_SIZE_RESULT=size-stripped.json" >> "$GITHUB_ENV"
- name: Speedometer
if: ${{ inputs.speedometer == true }}
run: |
python3 ./mach test-speedometer -r --bmf-output speedometer.json
echo "SERVO_SPEEDOMETER_RESULT=speedometer.json" >> "$GITHUB_ENV"
- name: Dromaeo
if: ${{ inputs.dromaeo == true }}
run: |
python3 ./mach test-dromaeo -r dom --bmf-output dromaeo.json
echo "SERVO_DROMAEO_RESULT=dromaeo.json" >> "$GITHUB_ENV"
# set options
- name: Set bencher opts for PRs (label try run)
if: github.event_name == 'pull_request_target'
run: |
echo "RUN_BENCHER_OPTIONS=--branch ${{ github.event.number }}/PR \
--branch-start-point ${{ github.base_ref }} \
--branch-start-point-hash ${{ github.event.pull_request.base.sha }} \
--branch-reset \
--github-actions ${{ secrets.GITHUB_TOKEN }}" >> "$GITHUB_ENV"
- name: Set bencher opts for main
if: ${{ github.event_name == 'push' && github.ref_name == 'main' }}
run: |
echo "RUN_BENCHER_OPTIONS=--branch main \
--github-actions ${{ secrets.GITHUB_TOKEN }}" >> "$GITHUB_ENV"
- name: Set bencher opts for try branch
if: ${{ github.event_name == 'push' && github.ref_name == 'try' }}
run: |
git remote add upstream https://github.com/servo/servo
git fetch upstream main
echo "RUN_BENCHER_OPTIONS=--branch try \
--github-actions ${{ secrets.GITHUB_TOKEN }} \
--hash $(git rev-parse HEAD~1) \
--branch-start-point main \
--branch-start-point-hash $(git merge-base upstream/main HEAD) \
--branch-reset" >> "$GITHUB_ENV"
# we join results and send all data once to have it all in one report
- name: Send results
continue-on-error: true
run: |
./etc/ci/bencher.py merge ${{ env.SERVO_FILE_SIZE_RESULT }} ${{ env.SERVO_STRIPPED_FILE_SIZE_RESULT }} ${{ env.SERVO_SPEEDOMETER_RESULT }} ${{ env.SERVO_DROMAEO_RESULT }} --bmf-output b.json
bencher run --adapter json --file b.json \
--project ${{ env.BENCHER_PROJECT }} --token ${{ secrets.BENCHER_API_TOKEN }} --testbed ubuntu-22.04 \
$RUN_BENCHER_OPTIONS

View file

@ -30,6 +30,7 @@ jobs:
with:
profile: ${{ inputs.profile }}
unit-tests: ${{ inputs.unit-tests }}
bencher: ${{ inputs.bencher }}
macos:
if: ${{ inputs.workflow == 'macos' }}
@ -41,6 +42,7 @@ jobs:
wpt-layout: ${{ inputs.wpt-layout }}
unit-tests: ${{ inputs.unit-tests }}
wpt-args: ${{ inputs.wpt-args }}
bencher: ${{ inputs.bencher }}
linux:
if: ${{ inputs.workflow == 'linux' }}
@ -67,10 +69,13 @@ jobs:
secrets: inherit
with:
profile: ${{ inputs.profile }}
bencher: ${{ inputs.bencher }}
ohos:
if: ${{ inputs.workflow == 'ohos' }}
name: OpenHarmony
uses: ./.github/workflows/ohos.yml
secrets: inherit
with:
profile: ${{ inputs.profile }}
bencher: ${{ inputs.bencher }}

View file

@ -1,86 +0,0 @@
name: Linux WPT Tests
on:
workflow_call:
permissions:
checks: write
pull-requests: write
env:
RUST_BACKTRACE: 1
SHELL: /bin/bash
# allows overriding bencher project for pushes
BENCHER_PROJECT: ${{ vars.BENCHER_PROJECT || 'servo' }}
jobs:
linux-bencher:
name: Bencher
# keep this in sync with testbed
runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@v4
if: github.event_name != 'pull_request_target'
with:
fetch-depth: 0
# This is necessary to checkout the pull request if this run was triggered via a
# `pull_request_target` event.
- uses: actions/checkout@v4
if: github.event_name == 'pull_request_target'
with:
ref: refs/pull/${{ github.event.number }}/head
fetch-depth: 0
- uses: actions/download-artifact@v4
with:
name: release-binary-linux
path: release-binary-linux
- name: unPackage binary
run: tar -xzf release-binary-linux/target.tar.gz
- name: Setup Python
uses: ./.github/actions/setup-python
- name: Bootstrap dependencies
run: |
sudo apt update
sudo apt install -qy --no-install-recommends mesa-vulkan-drivers
python3 ./mach bootstrap --skip-lints
- uses: bencherdev/bencher@main
- name: File size
run: ./etc/ci/bencher.py filesize target/release/servo --bmf-output size.json
- name: Speedometer
run: |
python3 ./mach test-speedometer -r --bmf-output speedometer.json
- name: Dromaeo
run: |
python3 ./mach test-dromaeo -r dom --bmf-output dromaeo.json
# set options
- name: Set bencher opts for PRs (label try run)
if: github.event_name == 'pull_request_target'
run: |
echo "RUN_BENCHER_OPTIONS=--branch ${{ github.event.number }}/PR \
--branch-start-point ${{ github.base_ref }} \
--branch-start-point-hash ${{ github.event.pull_request.base.sha }} \
--branch-reset \
--github-actions ${{ secrets.GITHUB_TOKEN }}" >> "$GITHUB_ENV"
- name: Set bencher opts for main
if: ${{ github.event_name == 'push' && github.ref_name == 'main' }}
run: |
echo "RUN_BENCHER_OPTIONS=--branch main \
--github-actions ${{ secrets.GITHUB_TOKEN }}" >> "$GITHUB_ENV"
- name: Set bencher opts for try branch
if: ${{ github.event_name == 'push' && github.ref_name == 'try' }}
run: |
git remote add upstream https://github.com/servo/servo
git fetch upstream main
echo "RUN_BENCHER_OPTIONS=--branch try \
--github-actions ${{ secrets.GITHUB_TOKEN }} \
--hash $(git rev-parse HEAD~1) \
--branch-start-point main \
--branch-start-point-hash $(git merge-base upstream/main HEAD) \
--branch-reset" >> "$GITHUB_ENV"
# we join results and send all data once to have it all in one report
- name: Send results
continue-on-error: true
run: |
./etc/ci/bencher.py merge size.json speedometer.json dromaeo.json --bmf-output b.json
bencher run --adapter json --file b.json \
--project ${{ env.BENCHER_PROJECT }} --token ${{ secrets.BENCHER_API_TOKEN }} --testbed ubuntu-22.04 \
$RUN_BENCHER_OPTIONS

View file

@ -168,7 +168,7 @@ jobs:
- name: Archive build timing
uses: actions/upload-artifact@v4
with:
name: cargo-timings-linux
name: cargo-timings-linux-${{ inputs.profile }}
# Using a wildcard here ensures that the archive includes the path.
path: target/cargo-timings-*
- name: Build mach package
@ -176,7 +176,7 @@ jobs:
- name: Upload artifact for mach package
uses: actions/upload-artifact@v4
with:
name: linux
name: linux-${{ inputs.profile }}
path: target/${{ inputs.profile }}/servo-tech-demo.tar.gz
- name: Upload nightly
if: ${{ inputs.upload }}
@ -222,7 +222,15 @@ jobs:
bencher:
needs: ["build"]
# benches must be release (we will do benches for production profile in servo/servo-nightly-builds)
if: ${{ inputs.bencher && inputs.profile == 'release' && github.event_name != 'workflow_dispatch' && github.event_name != 'merge_group' }}
uses: ./.github/workflows/linux-bencher.yml
if: ${{ inputs.bencher && inputs.profile != 'debug' && github.event_name != 'workflow_dispatch' && github.event_name != 'merge_group' }}
uses: ./.github/workflows/bencher.yml
with:
target: 'linux'
profile: ${{ inputs.profile }}
compressed-file-path: ${{ inputs.profile }}-binary-linux/target.tar.gz
binary-path: target/${{ inputs.profile }}/servo
file-size: true
# We only evaluate speedometer and dromaeo score in release
speedometer: ${{ inputs.profile == 'release' }}
dromaeo: ${{ inputs.profile == 'release' }}
secrets: inherit

View file

@ -26,9 +26,13 @@ on:
required: false
type: string
force-github-hosted-runner:
required: false
type: boolean
default: false
bencher:
required: false
type: boolean
default: false
type: boolean
workflow_dispatch:
inputs:
profile:
@ -53,9 +57,13 @@ on:
default: false
type: boolean
force-github-hosted-runner:
required: false
type: boolean
default: false
bencher:
required: false
type: boolean
default: false
type: boolean
env:
RUST_BACKTRACE: 1
@ -155,13 +163,13 @@ jobs:
- name: Archive build timing
uses: actions/upload-artifact@v4
with:
name: cargo-timings-macos
name: cargo-timings-macos-${{ inputs.profile }}
# Using a wildcard here ensures that the archive includes the path.
path: target/cargo-timings-*
- name: Upload artifact for mach package
uses: actions/upload-artifact@v4
with:
name: mac
name: ${{ inputs.profile }}-binary-mac
path: target/${{ inputs.profile }}/servo-tech-demo.dmg
- name: Upload nightly
if: ${{ inputs.upload }}
@ -202,3 +210,17 @@ jobs:
wpt-layout: "layout-2013"
wpt-args: ${{ inputs.wpt-args }}
secrets: inherit
bencher:
needs: ["build"]
if: ${{ inputs.bencher && inputs.profile != 'debug' && github.event_name != 'workflow_dispatch' && github.event_name != 'merge_group' }}
uses: ./.github/workflows/bencher.yml
with:
target: 'macos'
profile: ${{ inputs.profile }}
compressed-file-path: ${{ inputs.profile }}-binary-macos/target.tar.gz
binary-path: target/${{ inputs.profile }}/servo
file-size: true
speedometer: false
dromaeo: false
secrets: inherit

View file

@ -35,7 +35,7 @@ jobs:
run: |
{
echo 'result<<EOF'
python ./python/servo/try_parser.py ${{ github.event_name == 'pull_request' && 'linux lint' || 'fail-fast full' }}
python ./python/servo/try_parser.py ${{ github.event_name == 'pull_request' && 'linux-unit-tests lint' || github.event_name == 'push' && 'fail-fast full bencher production-bencher' || 'fail-fast full' }}
echo EOF
} >> $GITHUB_OUTPUT

View file

@ -13,6 +13,11 @@ on:
github-release-id:
required: false
type: string
bencher:
required: false
default: false
type: boolean
workflow_dispatch:
inputs:
profile:
@ -21,6 +26,11 @@ on:
type: choice
description: "Cargo build profile"
options: [ "release", "debug", "production"]
bencher:
required: false
default: false
type: boolean
env:
RUST_BACKTRACE: 1
@ -34,6 +44,8 @@ jobs:
strategy:
matrix:
target: ['aarch64-unknown-linux-ohos', 'x86_64-unknown-linux-ohos']
outputs:
signed: $${{ steps.signing-config.outputs.signed }}
steps:
- uses: actions/checkout@v4
if: github.event_name != 'pull_request_target'
@ -70,6 +82,7 @@ jobs:
npm install "@ohos/hvigor@5" "@ohos/hvigor-ohos-plugin@5"
echo "HVIGOR_PATH=$PWD" >> $GITHUB_ENV
- name: "Setup HAP signing config"
id: signing-config
env:
SIGNING_MATERIAL: ${{ secrets.SERVO_OHOS_SIGNING_MATERIAL }}
if: ${{ inputs.upload || env.SIGNING_MATERIAL != '' }} # Allows the build to pass on forks.
@ -78,6 +91,7 @@ jobs:
echo "${SIGNING_MATERIAL}" | base64 -d > servo-ohos-material.zip
unzip servo-ohos-material.zip
echo "SERVO_OHOS_SIGNING_CONFIG=${PWD}/servo-ohos-material/signing-configs.json" >> $GITHUB_ENV
echo "signed=true" >> "$GITHUB_OUTPUT"
- name: Build (arch ${{ matrix.target }} profile ${{ inputs.profile }})
env:
OHOS_SDK_NATIVE: ${{ steps.setup_sdk.outputs.ohos_sdk_native }}
@ -88,7 +102,7 @@ jobs:
- name: Archive build timing
uses: actions/upload-artifact@v4
with:
name: cargo-timings-ohos-${{ matrix.target }}
name: cargo-timings-ohos-${{ matrix.target }}-${{ inputs.profile }}
# Using a wildcard here ensures that the archive includes the path.
path: target/cargo-timings-*
- name: Upload nightly
@ -118,3 +132,20 @@ jobs:
with:
name: ${{ inputs.profile }}-binary-ohos-${{ matrix.target }}
path: target/openharmony/${{ matrix.target }}/${{ inputs.profile }}/entry/build/default/outputs/default/servoshell-default-unsigned.hap
bencher:
needs: ["build"]
strategy:
matrix:
target: ['aarch64-unknown-linux-ohos', 'x86_64-unknown-linux-ohos']
if: ${{ inputs.bencher && inputs.profile != 'debug' && github.event_name != 'workflow_dispatch' && github.event_name != 'merge_group' }}
uses: ./.github/workflows/bencher.yml
with:
target: ohos-${{ matrix.target }}
profile: ${{ inputs.profile }}
compressed-file-path: ${{ inputs.profile }}-binary-ohos-${{ matrix.target }}/servoshell-default-${{ needs.build.outputs.signed == true && 'signed' || 'unsigned' }}.hap
binary-path: libs/${{ matrix.target == 'aarch64-unknown-linux-ohos' && 'arm64-v8a' || matrix.target == 'x86_64-unknown-linux-ohos' && 'x86_64' }}/libservoshell.so
file-size: true
speedometer: false
dromaeo: false
secrets: inherit

View file

@ -22,6 +22,10 @@ on:
required: false
type: boolean
default: false
bencher:
required: false
default: false
type: boolean
workflow_dispatch:
inputs:
profile:
@ -41,6 +45,10 @@ on:
required: false
type: boolean
default: false
bencher:
required: false
default: false
type: boolean
env:
RUST_BACKTRACE: 1
@ -163,7 +171,7 @@ jobs:
- name: Archive build timing
uses: actions/upload-artifact@v4
with:
name: cargo-timings-windows
name: cargo-timings-windows-${{ inputs.profile }}
# Using a wildcard here ensures that the archive includes the path.
path: C:\\a\\servo\\servo\\target\\cargo-timings-*
- name: Build mach package
@ -171,7 +179,7 @@ jobs:
- name: Upload artifact for mach package
uses: actions/upload-artifact@v4
with:
name: win
name: ${{ inputs.profile }}-binary-windows
# These files are available
# MSI Installer: C:\a\servo\servo\target\${{ inputs.profile }}\msi\Installer.msi
# Bundle: C:\a\servo\servo\target\${{ inputs.profile }}\msi\Servo.exe
@ -186,3 +194,16 @@ jobs:
S3_UPLOAD_CREDENTIALS: ${{ secrets.S3_UPLOAD_CREDENTIALS }}
NIGHTLY_REPO_TOKEN: ${{ secrets.NIGHTLY_REPO_TOKEN }}
NIGHTLY_REPO: ${{ github.repository_owner }}/servo-nightly-builds
bencher:
needs: ["build"]
if: ${{ inputs.bencher && inputs.profile != 'debug' && github.event_name != 'workflow_dispatch' && github.event_name != 'merge_group' }}
uses: ./.github/workflows/bencher.yml
with:
target: 'windows'
profile: ${{ inputs.profile }}
compressed-file-path: ''
binary-path: ${{ inputs.profile }}-binary-windows/Servo.exe
file-size: true
speedometer: false
dromaeo: false
secrets: inherit

View file

@ -19,7 +19,7 @@ def size(args):
print(size)
with open(args.bmf_output, 'w', encoding='utf-8') as f:
json.dump({
'servo': {
args.variant: {
'file-size': {
'value': float(size),
}
@ -46,6 +46,7 @@ parser = argparse.ArgumentParser("Helper commands for bencher")
subparser = parser.add_subparsers()
size_parser = subparser.add_parser("filesize", help="Returns BMF for filesize")
size_parser.add_argument("binary", help="Servo binary file")
size_parser.add_argument("variant", help="variant of the binary")
size_parser.add_argument("--bmf-output", help="BMF JSON output file", default=None)
size_parser.set_defaults(func=size)

View file

@ -73,54 +73,78 @@ class JobConfig(object):
self.wpt_layout |= other.wpt_layout
self.unit_tests |= other.unit_tests
self.bencher |= other.bencher
common = min([self.name, other.name], key=len)
p1 = self.name.strip(common).strip()
p2 = other.name.strip(common).strip()
self.name = common.strip()
if p1:
self.name += f" {p1}"
if p2:
self.name += f" {p2}"
self.update_name()
return True
def update_name(self):
if self.workflow is Workflow.LINUX:
self.name = "Linux"
elif self.workflow is Workflow.MACOS:
self.name = "MacOS"
elif self.workflow is Workflow.WINDOWS:
self.name = "Windows"
elif self.workflow is Workflow.ANDROID:
self.name = "Android"
elif self.workflow is Workflow.OHOS:
self.name = "OpenHarmony"
modifier = []
if self.profile != "release":
modifier.append(self.profile.title())
if self.unit_tests:
modifier.append("Unit Tests")
if self.wpt_layout != Layout.none:
modifier.append("WPT")
if self.bencher:
modifier.append("Bencher")
if modifier:
self.name += " (" + ", ".join(modifier) + ")"
def handle_preset(s: str) -> Optional[JobConfig]:
s = s.lower()
if s == "linux":
return JobConfig("Linux", Workflow.LINUX, unit_tests=True)
elif s in ["perf", "linux-perf", "bencher"]:
return JobConfig("Linux perf", Workflow.LINUX, bencher=True)
elif s in ["mac", "macos"]:
return JobConfig("MacOS", Workflow.MACOS, unit_tests=True)
elif s in ["win", "windows"]:
return JobConfig("Windows", Workflow.WINDOWS, unit_tests=True)
elif s in ["wpt-2013", "linux-wpt-2013"]:
return JobConfig("Linux WPT", Workflow.LINUX, wpt_layout=Layout.layout2013)
elif s in ["wpt-2020", "linux-wpt-2020", "wpt", "linux-wpt"]:
return JobConfig("Linux WPT", Workflow.LINUX, wpt_layout=Layout.layout2020)
elif s in ["mac-wpt", "wpt-mac"]:
return JobConfig("MacOS WPT", Workflow.MACOS, wpt_layout=Layout.all())
elif s == "mac-wpt-2013":
return JobConfig("MacOS WPT", Workflow.MACOS, wpt_layout=Layout.layout2013)
elif s == "mac-wpt-2020":
return JobConfig("MacOS WPT", Workflow.MACOS, wpt_layout=Layout.layout2020)
elif s == "android":
if any(word in s for word in ["linux"]):
return JobConfig("Linux", Workflow.LINUX)
elif any(word in s for word in ["mac", "macos"]):
return JobConfig("MacOS", Workflow.MACOS)
elif any(word in s for word in ["win", "windows"]):
return JobConfig("Windows", Workflow.WINDOWS)
elif any(word in s for word in ["android"]):
return JobConfig("Android", Workflow.ANDROID)
elif s in ["ohos", "openharmony"]:
elif any(word in s for word in ["ohos", "openharmony"]):
return JobConfig("OpenHarmony", Workflow.OHOS)
elif s == "webgpu":
elif any(word in s for word in ["webgpu"]):
return JobConfig("WebGPU CTS", Workflow.LINUX,
wpt_layout=Layout.layout2020, # reftests are mode for new layout
wpt_args="_webgpu", # run only webgpu cts
profile="production", # WebGPU works to slow with debug assert
unit_tests=False) # production profile does not work with unit-tests
elif s in ["lint", "tidy"]:
elif any(word in s for word in ["lint", "tidy"]):
return JobConfig("Lint", Workflow.LINT)
else:
return None
def handle_modifier(config: JobConfig, s: str) -> Optional[JobConfig]:
if config is None:
return None
s = s.lower()
if "unit-tests" in s:
config.unit_tests = True
if "production" in s:
config.profile = "production"
if "bencher" in s:
config.bencher = True
if "wpt-2013" in s:
config.wpt_layout = Layout.layout2013
elif "wpt-2020" in s:
config.wpt_layout = Layout.layout2020
elif "wpt" in s:
config.wpt_layout = Layout.all()
config.update_name()
return config
class Encoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, (Config, JobConfig)):
@ -151,10 +175,18 @@ class Config(object):
self.fail_fast = True
continue # skip over keyword
if word == "full":
words.extend(["linux", "linux-wpt", "linux-perf", "macos", "windows", "android", "ohos", "lint"])
words.extend(["linux-unit-tests", "linux-wpt-2020", "linux-bencher"])
words.extend(["macos-unit-tests", "windows-unit-tests", "android", "ohos", "lint"])
continue # skip over keyword
if word == "bencher":
words.extend(["linux-bencher", "macos-bencher", "windows-bencher", "android-bencher", "ohos-bencher"])
continue # skip over keyword
if word == "production-bencher":
words.extend(["linux-production-bencher", "macos-production-bencher", "windows-production-bencher"])
words.extend(["ohos-production-bencher"])
continue # skip over keyword
job = handle_preset(word)
job = handle_modifier(job, word)
if job is None:
print(f"Ignoring unknown preset {word}")
else:
@ -181,11 +213,11 @@ if __name__ == "__main__":
class TestParser(unittest.TestCase):
def test_string(self):
self.assertDictEqual(json.loads(Config("linux fail-fast").to_json()),
self.assertDictEqual(json.loads(Config("linux-unit-tests fail-fast").to_json()),
{'fail_fast': True,
'matrix': [{
'bencher': False,
'name': 'Linux',
'name': 'Linux (Unit Tests)',
'profile': 'release',
'unit_tests': True,
'workflow': 'linux',
@ -198,7 +230,7 @@ class TestParser(unittest.TestCase):
self.assertDictEqual(json.loads(Config("").to_json()),
{"fail_fast": False, "matrix": [
{
"name": "Linux WPT perf",
"name": "Linux (Unit Tests, WPT, Bencher)",
"workflow": "linux",
"wpt_layout": "2020",
"profile": "release",
@ -207,7 +239,7 @@ class TestParser(unittest.TestCase):
"wpt_args": ""
},
{
"name": "MacOS",
"name": "MacOS (Unit Tests)",
"workflow": "macos",
"wpt_layout": "none",
"profile": "release",
@ -216,7 +248,7 @@ class TestParser(unittest.TestCase):
"wpt_args": ""
},
{
"name": "Windows",
"name": "Windows (Unit Tests)",
"workflow": "windows",
"wpt_layout": "none",
"profile": "release",
@ -253,11 +285,11 @@ class TestParser(unittest.TestCase):
]})
def test_job_merging(self):
self.assertDictEqual(json.loads(Config("wpt-2020 wpt-2013").to_json()),
self.assertDictEqual(json.loads(Config("linux-wpt-2020 linux-wpt-2013").to_json()),
{'fail_fast': False,
'matrix': [{
'bencher': False,
'name': 'Linux WPT',
'name': 'Linux (WPT)',
'profile': 'release',
'unit_tests': False,
'workflow': 'linux',
@ -266,30 +298,33 @@ class TestParser(unittest.TestCase):
}]
})
a = JobConfig("Linux", Workflow.LINUX, unit_tests=True)
a = JobConfig("Linux (Unit Tests)", Workflow.LINUX, unit_tests=True)
b = JobConfig("Linux", Workflow.LINUX, unit_tests=False)
self.assertTrue(a.merge(b), "Should merge jobs that have different unit test configurations.")
self.assertEqual(a, JobConfig("Linux", Workflow.LINUX, unit_tests=True))
self.assertEqual(a, JobConfig("Linux (Unit Tests)", Workflow.LINUX, unit_tests=True))
a = handle_preset("linux")
b = handle_preset("linux-wpt")
a = handle_preset("linux-unit-tests")
a = handle_modifier(a, "linux-unit-tests")
b = handle_preset("linux-wpt-2020")
b = handle_modifier(b, "linux-wpt-2020")
self.assertTrue(a.merge(b), "Should merge jobs that have different unit test configurations.")
self.assertEqual(a, JobConfig("Linux WPT", Workflow.LINUX, unit_tests=True, wpt_layout=Layout.layout2020))
self.assertEqual(a, JobConfig("Linux (Unit Tests, WPT)", Workflow.LINUX,
unit_tests=True, wpt_layout=Layout.layout2020))
a = JobConfig("Linux", Workflow.LINUX, unit_tests=True)
a = JobConfig("Linux (Unit Tests)", Workflow.LINUX, unit_tests=True)
b = JobConfig("Mac", Workflow.MACOS, unit_tests=True)
self.assertFalse(a.merge(b), "Should not merge jobs with different workflows.")
self.assertEqual(a, JobConfig("Linux", Workflow.LINUX, unit_tests=True))
self.assertEqual(a, JobConfig("Linux (Unit Tests)", Workflow.LINUX, unit_tests=True))
a = JobConfig("Linux", Workflow.LINUX, unit_tests=True)
b = JobConfig("Linux", Workflow.LINUX, unit_tests=True, profile="production")
a = JobConfig("Linux (Unit Tests)", Workflow.LINUX, unit_tests=True)
b = JobConfig("Linux (Unit Tests, Production)", Workflow.LINUX, unit_tests=True, profile="production")
self.assertFalse(a.merge(b), "Should not merge jobs with different profiles.")
self.assertEqual(a, JobConfig("Linux", Workflow.LINUX, unit_tests=True))
self.assertEqual(a, JobConfig("Linux (Unit Tests)", Workflow.LINUX, unit_tests=True))
a = JobConfig("Linux", Workflow.LINUX, unit_tests=True)
b = JobConfig("Linux", Workflow.LINUX, unit_tests=True, wpt_args="/css")
a = JobConfig("Linux (Unit Tests)", Workflow.LINUX, unit_tests=True)
b = JobConfig("Linux (Unit Tests)", Workflow.LINUX, unit_tests=True, wpt_args="/css")
self.assertFalse(a.merge(b), "Should not merge jobs that run different WPT tests.")
self.assertEqual(a, JobConfig("Linux", Workflow.LINUX, unit_tests=True))
self.assertEqual(a, JobConfig("Linux (Unit Tests)", Workflow.LINUX, unit_tests=True))
def test_full(self):
self.assertDictEqual(json.loads(Config("full").to_json()),