From e0d6cb8a60da72544358e2b1aeb22222107101af Mon Sep 17 00:00:00 2001 From: Simon Sapin Date: Fri, 28 Sep 2018 15:26:33 +0200 Subject: [PATCH 01/25] Initial Windows AMI-building script --- etc/taskcluster/decisionlib.py | 2 +- etc/taskcluster/windows/.gitignore | 1 + etc/taskcluster/windows/README.md | 88 +++++++++++++++++++ etc/taskcluster/windows/base-ami.txt | 1 + etc/taskcluster/windows/build-ami.py | 116 +++++++++++++++++++++++++ etc/taskcluster/windows/first-boot.ps1 | 45 ++++++++++ 6 files changed, 252 insertions(+), 1 deletion(-) create mode 100644 etc/taskcluster/windows/.gitignore create mode 100644 etc/taskcluster/windows/README.md create mode 100644 etc/taskcluster/windows/base-ami.txt create mode 100755 etc/taskcluster/windows/build-ami.py create mode 100644 etc/taskcluster/windows/first-boot.ps1 diff --git a/etc/taskcluster/decisionlib.py b/etc/taskcluster/decisionlib.py index 53778221b07..30d01d65c4f 100644 --- a/etc/taskcluster/decisionlib.py +++ b/etc/taskcluster/decisionlib.py @@ -35,7 +35,7 @@ class DecisionTask: "0a7d012ce444d62ffb9e7f06f0c52fedc24b68c2060711b313263367f7272d9d" def __init__(self, *, index_prefix="garbage.servo-decisionlib", task_name_template="%s", - worker_type="github-worker", docker_image_cache_expiry="1 year", + worker_type="github-worker", docker_image_cache_expiry="1 month", routes_for_all_subtasks=None, scopes_for_all_subtasks=None): self.task_name_template = task_name_template self.index_prefix = index_prefix diff --git a/etc/taskcluster/windows/.gitignore b/etc/taskcluster/windows/.gitignore new file mode 100644 index 00000000000..5570b425ee1 --- /dev/null +++ b/etc/taskcluster/windows/.gitignore @@ -0,0 +1 @@ +*.id_rsa diff --git a/etc/taskcluster/windows/README.md b/etc/taskcluster/windows/README.md new file mode 100644 index 00000000000..89271d9e043 --- /dev/null +++ b/etc/taskcluster/windows/README.md @@ -0,0 +1,88 @@ +# Windows AMIs for Servo on Taskcluster + +Unlike Linux tasks on `docker-worker` where each tasks is executed in a container +based on a Docker image provided with the task, +Windows tasks on Taskcluster are typically run by `generic-worker` +where tasks are executed directly in the worker’s environment. +So we may want to install some tools globally on the system, to make them available to tasks. + +With the [AWS provisioner], this means building a custom AMI. +We need to boot an instance on a base Windows AMI, +install what we need (including `generic-worker` itself), +then take an image of that instance. +The [`worker_types`] directory in `generic-worker`’s repository +has some scripts that automate this, +in order to make it more reproducible than clicking around. +The trick is that a PowerShell script to run on boot can be provided +when starting a Windows instance on EC2, and of course AWS has an API. + +[AWS provisioner]: https://docs.taskcluster.net/docs/reference/integrations/aws-provisioner/references/api +[`worker_types`]: https://github.com/taskcluster/generic-worker/blob/master/worker_types/ + + +## Building and deploying a new image + +* Install and configure the [AWS command-line tool]. +* Make your changes to `first-boot.ps1` and/or `base-ami.txt`. +* Run `python3 build-ami.py`. Note that it can take many minutes to complete. +* Save the administrator password together with the image ID + in Servo’s shared 1Password account, in the *Taskcluster Windows AMIs* note. +* In the [worker type definition], edit `ImageId` and `DeploymentId`. + +Note that the new worker type definition will only apply to newly-provisionned workers. + +`DeploymentId` can be any string. It can for example include the image ID. +Workers check it between tasks (if `checkForNewDeploymentEverySecs` since the last check). +If it has changed, they shut down in order to leave room for new workers with the new definition. + +The [EC2 Resources] page has red *Terminate All Instances* button, +but that will make any running task fail. + +[AWS command-line tool]: https://docs.aws.amazon.com/cli/latest/userguide/cli-chap-welcome.html +[worker type definition]: https://tools.taskcluster.net/aws-provisioner/servo-win2016/edit +[EC2 Resources]: https://tools.taskcluster.net/aws-provisioner/servo-win2016/resources + + +## FIXME: possible improvement + +* Have a separate staging worker type to try new AMIs without affecting the production CI +* Automate cleaning up old, unused AMIs +* Use multiple AWS regions +* Use the Taskcluster API to automate updating worker type definitions? + + +## Picking a base AMI + +Amazon provides an ovewhelming number of different Windows images, +so it’s hard to find what’s relevant. +Their console might show a paginated view like this: + +> ⇤ ← 1 to 50 of 13,914 AMIs → ⇥ + +Let’s grep through this with the API: + +```sh +aws ec2 describe-images --owners amazon --filters 'Name=platform,Values=windows' \ + --query 'Images[*].[ImageId,Name,Description]' --output table > /tmp/images +< /tmp/images less -S +``` + +It turns out that these images are all based on Windows Server, +but their number is explained by the presence of many (all?) combinations of: + +* Multiple OS Version +* Many available locales +* *Full* (a.k.a. *with Desktop Experience*), or *Core* +* *Base* with only the OS, or multiple flavors with tools like SQL Server pre-installed + +If we make some choices and filter the list: + +```sh +< /tmp/images grep 2016-English-Full-Base | less -S +``` + +… we get a much more manageable handlful of images with names like +`Windows_Server-2016-English-Full-Base-2018.09.15` or other dates. + +Let’s set `base-ami.txt` to `Windows_Server-2016-English-Full-Base-*`, +and have `build-ami.py` pick the most recently-created AMI whose name matches that pattern. \ No newline at end of file diff --git a/etc/taskcluster/windows/base-ami.txt b/etc/taskcluster/windows/base-ami.txt new file mode 100644 index 00000000000..0ed695728fa --- /dev/null +++ b/etc/taskcluster/windows/base-ami.txt @@ -0,0 +1 @@ +Windows_Server-2016-English-Full-Base-* \ No newline at end of file diff --git a/etc/taskcluster/windows/build-ami.py b/etc/taskcluster/windows/build-ami.py new file mode 100755 index 00000000000..2c7da96793f --- /dev/null +++ b/etc/taskcluster/windows/build-ami.py @@ -0,0 +1,116 @@ +#!/usr/bin/python3 + +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +import json +import datetime +import subprocess + + +REGION = "us-west-2" +WORKER_TYPE = "servo-win2016" +AWS_PROVISIONER_USER_ID = "692406183521" + + +def main(): + base_ami_pattern = read_file("base-ami.txt").strip() + base_ami = most_recent_ami(base_ami_pattern) + print("Starting an instance with base image:", base_ami["ImageId"], base_ami["Name"]) + + key_name = "%s_%s" % (WORKER_TYPE, REGION) + key_filename = key_name + ".id_rsa" + ec2("delete-key-pair", "--key-name", key_name) + result = ec2("create-key-pair", "--key-name", key_name) + write_file(key_filename, result["KeyMaterial"].encode("utf-8")) + + user_data = b"\n%s\n" % read_file("first-boot.ps1") + result = ec2( + "run-instances", "--image-id", base_ami["ImageId"], + "--key-name", key_name, + "--user-data", user_data, + "--instance-type", "c4.xlarge", + "--block-device-mappings", + "DeviceName=/dev/sda1,Ebs={VolumeSize=75,DeleteOnTermination=true,VolumeType=gp2}", + "--instance-initiated-shutdown-behavior", "stop" + ) + assert len(result["Instances"]) == 1 + instance_id = result["Instances"][0]["InstanceId"] + + ec2("create-tags", "--resources", instance_id, "--tags", + "Key=Name,Value=TC %s base instance" % WORKER_TYPE) + + print("Waiting for password data to be available…") + ec2_wait("password-data-available", "--instance-id", instance_id) + result = ec2("get-password-data", "--instance-id", instance_id, + "--priv-launch-key", here(key_filename)) + print("Administrator password:", result["PasswordData"]) + + print("Waiting for the instance to finish executing first-boot.ps1 and shut down…") + ec2_wait("instance-stopped", "--instance-id", instance_id) + + now = datetime.datetime.utcnow().strftime("%Y-%m-%d_%H.%M.%S") + image_id = ec2("create-image", "--instance-id", instance_id, + "--name", "TC %s %s" % (WORKER_TYPE, now))["ImageId"] + print("Started creating image with ID %s …" % image_id) + + ec2_wait("image-available", "--image-ids", image_id) + ec2("modify-image-attribute", "--image-id", image_id, + "--launch-permission", "Add=[{UserId=%s}]" % AWS_PROVISIONER_USER_ID) + + print("Image available. Terminating the temporary instance…") + ec2("terminate-instances", "--instance-ids", instance_id) + + +def most_recent_ami(name_pattern): + result = ec2( + "describe-images", "--owners", "amazon", + "--filters", "Name=platform,Values=windows", b"Name=name,Values=" + name_pattern, + ) + return max(result["Images"], key=lambda x: x["CreationDate"]) + + +def ec2_wait(*args): + # https://docs.aws.amazon.com/cli/latest/reference/ec2/wait/password-data-available.html + # “It will poll every 15 seconds until a successful state has been reached. + # This will exit with a return code of 255 after 40 failed checks.” + while True: + try: + return ec2("wait", *args) + except subprocess.CalledProcessError as err: + if err.returncode != 255: + raise + + +def try_ec2(*args): + try: + return ec2(*args) + except subprocess.CalledProcessError: + return None + + +def ec2(*args): + args = ["aws", "ec2", "--region", REGION, "--output", "json"] + list(args) + output = subprocess.check_output(args) + if output: + return json.loads(output) + + +def read_file(filename): + with open(here(filename), "rb") as f: + return f.read() + + +def write_file(filename, contents): + with open(here(filename), "wb") as f: + f.write(contents) + + +def here(filename, base=os.path.dirname(__file__)): + return os.path.join(base, filename) + + +if __name__ == "__main__": + main() diff --git a/etc/taskcluster/windows/first-boot.ps1 b/etc/taskcluster/windows/first-boot.ps1 new file mode 100644 index 00000000000..b0ba9d43b55 --- /dev/null +++ b/etc/taskcluster/windows/first-boot.ps1 @@ -0,0 +1,45 @@ +Start-Transcript -Path "C:\first_boot.txt" + +Get-ChildItem Env: | Out-File "C:\install_env.txt" + +# use TLS 1.2 (see bug 1443595) +[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12 + +# For making http requests +$client = New-Object system.net.WebClient +$shell = new-object -com shell.application + +# Download a zip file and extract it +function Expand-ZIPFile($file, $destination, $url) +{ + $client.DownloadFile($url, $file) + $zip = $shell.NameSpace($file) + foreach($item in $zip.items()) + { + $shell.Namespace($destination).copyhere($item) + } +} + +# Open up firewall for livelog (both PUT and GET interfaces) +New-NetFirewallRule -DisplayName "Allow livelog PUT requests" ` + -Direction Inbound -LocalPort 60022 -Protocol TCP -Action Allow +New-NetFirewallRule -DisplayName "Allow livelog GET requests" ` + -Direction Inbound -LocalPort 60023 -Protocol TCP -Action Allow + +# Install generic-worker and dependencies +md C:\generic-worker +$client.DownloadFile("https://github.com/taskcluster/generic-worker/releases/download" + + "/v10.11.3/generic-worker-windows-amd64.exe", "C:\generic-worker\generic-worker.exe") +$client.DownloadFile("https://github.com/taskcluster/livelog/releases/download" + + "/v1.1.0/livelog-windows-amd64.exe", "C:\generic-worker\livelog.exe") +Expand-ZIPFile -File "C:\nssm-2.24.zip" -Destination "C:\" ` + -Url "http://www.nssm.cc/release/nssm-2.24.zip" +Start-Process C:\generic-worker\generic-worker.exe -ArgumentList ( + "install service --nssm C:\nssm-2.24\win64\nssm.exe " + + "--config C:\generic-worker\generic-worker.config" + ) -Wait -NoNewWindow -PassThru ` + -RedirectStandardOutput C:\generic-worker\install.log ` + -RedirectStandardError C:\generic-worker\install.err + +# Now shutdown, in preparation for creating an image +shutdown -s \ No newline at end of file From bb7157ec592aeea2de70e086f9fef25f6d247776 Mon Sep 17 00:00:00 2001 From: Simon Sapin Date: Sun, 30 Sep 2018 21:59:13 +0200 Subject: [PATCH 02/25] Windows AMI: add missing signinig key for generic-worker --- etc/taskcluster/windows/first-boot.ps1 | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/etc/taskcluster/windows/first-boot.ps1 b/etc/taskcluster/windows/first-boot.ps1 index b0ba9d43b55..e96b7d5cda2 100644 --- a/etc/taskcluster/windows/first-boot.ps1 +++ b/etc/taskcluster/windows/first-boot.ps1 @@ -34,6 +34,11 @@ $client.DownloadFile("https://github.com/taskcluster/livelog/releases/download" "/v1.1.0/livelog-windows-amd64.exe", "C:\generic-worker\livelog.exe") Expand-ZIPFile -File "C:\nssm-2.24.zip" -Destination "C:\" ` -Url "http://www.nssm.cc/release/nssm-2.24.zip" +Start-Process C:\generic-worker\generic-worker.exe -ArgumentList ` + "new-openpgp-keypair --file C:\generic-worker\generic-worker-gpg-signing-key.key" ` + -Wait -NoNewWindow -PassThru ` + -RedirectStandardOutput C:\generic-worker\generate-signing-key.log ` + -RedirectStandardError C:\generic-worker\generate-signing-key.err Start-Process C:\generic-worker\generic-worker.exe -ArgumentList ( "install service --nssm C:\nssm-2.24\win64\nssm.exe " + "--config C:\generic-worker\generic-worker.config" From c2d868f213fe2a84bf8df29bda34da99c06555f9 Mon Sep 17 00:00:00 2001 From: Simon Sapin Date: Sun, 30 Sep 2018 22:02:11 +0200 Subject: [PATCH 03/25] =?UTF-8?q?(Disabled)=20Exfiltrate=20IP=C2=A0address?= =?UTF-8?q?es=20for=20new=20generic-worker=20instances?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- etc/taskcluster/windows/first-boot.ps1 | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/etc/taskcluster/windows/first-boot.ps1 b/etc/taskcluster/windows/first-boot.ps1 index e96b7d5cda2..e4986c4ddc3 100644 --- a/etc/taskcluster/windows/first-boot.ps1 +++ b/etc/taskcluster/windows/first-boot.ps1 @@ -46,5 +46,17 @@ Start-Process C:\generic-worker\generic-worker.exe -ArgumentList ( -RedirectStandardOutput C:\generic-worker\install.log ` -RedirectStandardError C:\generic-worker\install.err +# # For debugging, let us know the worker’s IP address through: +# # ssh servo-master.servo.org tail -f /var/log/nginx/access.log | grep ping +# Start-Process C:\nssm-2.24\win64\nssm.exe -ArgumentList ` +# "install", "servo-ping", "powershell", "-Command", @" +# (New-Object system.net.WebClient).DownloadData( +# 'http://servo-master.servo.org/ping/generic-worker') +# "@ + +# # This "service" isn’t a long-running service: it runs once on boot and then terminates. +# Start-Process C:\nssm-2.24\win64\nssm.exe -ArgumentList ` +# "set", "servo-ping", "AppExit", "Default", "Exit" + # Now shutdown, in preparation for creating an image shutdown -s \ No newline at end of file From 1bab9fb64f66982419cfaff5ea59fa54685869df Mon Sep 17 00:00:00 2001 From: Simon Sapin Date: Sun, 30 Sep 2018 23:12:12 +0200 Subject: [PATCH 04/25] mach.bat: exit with an error code --- mach.bat | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mach.bat b/mach.bat index f81fc6a7e9b..077bd574083 100644 --- a/mach.bat +++ b/mach.bat @@ -25,13 +25,13 @@ IF EXIST "%VS_VCVARS%" ( call "%VS_VCVARS%" x64 ) ELSE ( ECHO 32-bit Windows is currently unsupported. - EXIT /B + EXIT /B 1 ) ) ) ELSE ( ECHO Visual Studio 2015 or 2017 is not installed. ECHO Download and install Visual Studio 2015 or 2017 from https://www.visualstudio.com/ - EXIT /B + EXIT /B 1 ) popd From f9038a78dfd4f80fa7412a98a2c87e84052d8bf7 Mon Sep 17 00:00:00 2001 From: Simon Sapin Date: Sun, 30 Sep 2018 22:32:47 +0200 Subject: [PATCH 05/25] Taskcluster: add initial Windows task --- etc/taskcluster/decision-task.py | 41 ++++++++++-- etc/taskcluster/decisionlib.py | 107 ++++++++++++++++++------------- 2 files changed, 96 insertions(+), 52 deletions(-) diff --git a/etc/taskcluster/decision-task.py b/etc/taskcluster/decision-task.py index ded4b6e479c..95a2edc6f1a 100644 --- a/etc/taskcluster/decision-task.py +++ b/etc/taskcluster/decision-task.py @@ -16,6 +16,7 @@ def main(): linux_tidy_unit() #linux_wpt() android_arm32() + windows_dev() # https://tools.taskcluster.net/hooks/project-servo/daily elif task_for == "daily": @@ -56,7 +57,7 @@ def linux_tidy_unit(): ./etc/ci/lockfile_changed.sh ./etc/ci/check_no_panic.sh """, - **build_kwargs + **linux_build_kwargs ) @@ -68,7 +69,7 @@ def with_rust_nightly(): ./mach build --dev ./mach test-unit """, - **build_kwargs + **linux_build_kwargs ) @@ -90,6 +91,31 @@ def android_arm32(): "/repo/target/armv7-linux-androideabi/release/servoapp.apk", "/repo/target/armv7-linux-androideabi/release/servoview.aar", ], + **linux_build_kwargs + ) + + +def windows_dev(): + return decision.create_task( + task_name="Windows x86_64: clone only", + worker_type="servo-win2016", + script=""" + dir + """, + mounts=[ + { + "directory": "git", + "format": "zip", + "content": { + "url": "https://github.com/git-for-windows/git/releases/download/" + + "v2.19.0.windows.1/MinGit-2.19.0-64-bit.zip", + "sha256": "424d24b5fc185a9c5488d7872262464f2facab4f1d4693ea8008196f14a3c19b", + } + }, + ], + homedir_path=[ + "git\\cmd", + ], **build_kwargs ) @@ -120,7 +146,7 @@ def linux_release_build(): artifacts=[ "/target.tar.gz", ], - **build_kwargs + **linux_build_kwargs ) @@ -219,7 +245,7 @@ def dockerfile_path(name): decision = DecisionTask( task_name_template="Servo: %s", index_prefix="project.servo.servo", - worker_type="servo-docker-worker", + default_worker_type="servo-docker-worker", ) # https://docs.taskcluster.net/docs/reference/workers/docker-worker/docs/caches @@ -234,11 +260,14 @@ build_caches = { } build_kwargs = { "max_run_time_minutes": 60, - "dockerfile": dockerfile_path("build"), "env": build_env, +} +linux_build_kwargs = dict(**build_kwargs, **{ + "worker_type": "servo-docker-worker", + "dockerfile": dockerfile_path("build"), "scopes": cache_scopes, "cache": build_caches, -} +}) if __name__ == "__main__": diff --git a/etc/taskcluster/decisionlib.py b/etc/taskcluster/decisionlib.py index 30d01d65c4f..2ae6d14c7b4 100644 --- a/etc/taskcluster/decisionlib.py +++ b/etc/taskcluster/decisionlib.py @@ -35,11 +35,11 @@ class DecisionTask: "0a7d012ce444d62ffb9e7f06f0c52fedc24b68c2060711b313263367f7272d9d" def __init__(self, *, index_prefix="garbage.servo-decisionlib", task_name_template="%s", - worker_type="github-worker", docker_image_cache_expiry="1 month", + default_worker_type="github-worker", docker_image_cache_expiry="1 month", routes_for_all_subtasks=None, scopes_for_all_subtasks=None): self.task_name_template = task_name_template self.index_prefix = index_prefix - self.worker_type = worker_type + self.default_worker_type = default_worker_type self.docker_image_cache_expiry = docker_image_cache_expiry self.routes_for_all_subtasks = routes_for_all_subtasks or [] self.scopes_for_all_subtasks = scopes_for_all_subtasks or [] @@ -133,37 +133,21 @@ class DecisionTask: def create_task(self, *, task_name, script, max_run_time_minutes, docker_image=None, dockerfile=None, # One of these is required artifacts=None, dependencies=None, env=None, cache=None, scopes=None, - routes=None, extra=None, features=None, - with_repo=True): + routes=None, extra=None, features=None, mounts=None, homedir_path=None, + worker_type=None, with_repo=True): """ - Schedule a new task. Only supports `docker-worker` for now. - - Returns the new task ID. + Schedule a new task. Returns the new task ID. One of `docker_image` or `dockerfile` (but not both) must be given. If `dockerfile` is given, the corresponding Docker image is built as needed and cached. `with_repo` indicates whether `script` should start in a clone of the git repository. """ - if docker_image and dockerfile: - raise TypeError("cannot use both `docker_image` or `dockerfile`") - if not docker_image and not dockerfile: - raise TypeError("need one of `docker_image` or `dockerfile`") - # https://docs.taskcluster.net/docs/reference/workers/docker-worker/docs/environment decision_task_id = os.environ["TASK_ID"] dependencies = [decision_task_id] + (dependencies or []) - if dockerfile: - image_build_task = self.find_or_build_docker_image(dockerfile) - dependencies.append(image_build_task) - docker_image = { - "type": "task-image", - "taskId": image_build_task, - "path": "public/" + self.DOCKER_IMAGE_ARTIFACT_FILENAME, - } - # Set in .taskcluster.yml task_owner = os.environ["TASK_OWNER"] task_source = os.environ["TASK_SOURCE"] @@ -175,19 +159,72 @@ class DecisionTask: for k in ["GIT_URL", "GIT_REF", "GIT_SHA"]: env[k] = os.environ[k] - script = """ + worker_type = worker_type or self.default_worker_type + if "docker" in worker_type: + if docker_image and dockerfile: + raise TypeError("cannot use both `docker_image` or `dockerfile`") + if not docker_image and not dockerfile: + raise TypeError("need one of `docker_image` or `dockerfile`") + + if dockerfile: + image_build_task = self.find_or_build_docker_image(dockerfile) + dependencies.append(image_build_task) + docker_image = { + "type": "task-image", + "taskId": image_build_task, + "path": "public/" + self.DOCKER_IMAGE_ARTIFACT_FILENAME, + } + + if with_repo: + script = """ git init repo cd repo git fetch --depth 1 "$GIT_URL" "$GIT_REF" git reset --hard "$GIT_SHA" """ + script + command = ["/bin/bash", "--login", "-x", "-e", "-c", deindent(script)] + else: + command = [ + "set PATH=%CD%\\{};%PATH%".format(p) + for p in reversed(homedir_path or []) + ] + if with_repo: + command.append(deindent(""" + git init repo + cd repo + git fetch --depth 1 %GIT_URL% %GIT_REF% + git reset --hard %GIT_SHA% + """)) + command.append(deindent(script)) + worker_payload = { + "maxRunTime": max_run_time_minutes * 60, + "command": command, + "env": env, + } + if docker_image: + worker_payload["image"] = docker_image + if cache: + worker_payload["cache"] = cache + if features: + worker_payload["features"] = features + if mounts: + worker_payload["mounts"] = mounts + if artifacts: + worker_payload["artifacts"] = { + "public/" + os.path.basename(path): { + "type": "file", + "path": path, + "expires": self.from_now_json(expires), + } + for path, expires in artifacts + } payload = { "taskGroupId": decision_task_id, "dependencies": dependencies or [], "schedulerId": "taskcluster-github", "provisionerId": "aws-provisioner-v1", - "workerType": self.worker_type, + "workerType": worker_type, "created": self.from_now_json(""), "deadline": self.from_now_json("1 day"), @@ -200,29 +237,7 @@ class DecisionTask: "scopes": (scopes or []) + self.scopes_for_all_subtasks, "routes": (routes or []) + self.routes_for_all_subtasks, "extra": extra or {}, - "payload": { - "cache": cache or {}, - "maxRunTime": max_run_time_minutes * 60, - "image": docker_image, - "command": [ - "/bin/bash", - "--login", - "-x", - "-e", - "-c", - deindent(script) - ], - "env": env, - "artifacts": { - "public/" + os.path.basename(path): { - "type": "file", - "path": path, - "expires": self.from_now_json(expires), - } - for path, expires in artifacts or [] - }, - "features": features or {}, - }, + "payload": worker_payload, } task_id = taskcluster.slugId().decode("utf8") From 522445f609eb9397a5d37ad4b712ebb6a153d0c7 Mon Sep 17 00:00:00 2001 From: Simon Sapin Date: Sun, 30 Sep 2018 23:54:51 +0200 Subject: [PATCH 06/25] Convert python-2.7.15.amd64.msi to a ZIP file, for generic-worker mount --- etc/taskcluster/decision-task.py | 56 +++++++++++++++++++- etc/taskcluster/docker/msiextract.dockerfile | 47 ++++++++++++++++ 2 files changed, 101 insertions(+), 2 deletions(-) create mode 100644 etc/taskcluster/docker/msiextract.dockerfile diff --git a/etc/taskcluster/decision-task.py b/etc/taskcluster/decision-task.py index 95a2edc6f1a..843872f3393 100644 --- a/etc/taskcluster/decision-task.py +++ b/etc/taskcluster/decision-task.py @@ -30,6 +30,7 @@ def main(): ping_on_daily_task_failure = "SimonSapin, nox, emilio" build_artifacts_expiry = "1 week" +build_dependencies_artifacts_expiry = "1 month" log_artifacts_expiry = "1 year" build_env = { @@ -96,12 +97,20 @@ def android_arm32(): def windows_dev(): + python27_task = extract_msi( + "https://www.python.org/ftp/python/2.7.15/python-2.7.15.amd64.msi", + sha256="5e85f3c4c209de98480acbf2ba2e71a907fd5567a838ad4b6748c76deb286ad7", + ) return decision.create_task( - task_name="Windows x86_64: clone only", + task_name="Windows x86_64: clone only (for now)", worker_type="servo-win2016", + # script=""" + # python mach --help + # """, script=""" - dir + python -c "import sys; print(sys.path)" """, + with_repo=False, mounts=[ { "directory": "git", @@ -112,10 +121,20 @@ def windows_dev(): "sha256": "424d24b5fc185a9c5488d7872262464f2facab4f1d4693ea8008196f14a3c19b", } }, + { + "directory": "python2", + "format": "tar.gz", + "content": { + "artifact": "public/extracted.tar.gz", + "taskId": python27_task, + } + }, ], homedir_path=[ "git\\cmd", + "python2", ], + dependencies=[python27_task], **build_kwargs ) @@ -238,6 +257,38 @@ def daily_tasks_setup(): decision.task_name_template = "Servo daily: %s. On failure, ping: " + ping_on_daily_task_failure +def extract_msi(url, sha256): + return decision.find_or_create_task( + index_bucket="extract-msi.v4", + index_key=sha256, + index_expiry=build_dependencies_artifacts_expiry, + + task_name="Extracting MSI file " + url, + dockerfile=dockerfile_path("msiextract"), + script=""" + curl --retry 5 --connect-timeout 10 --location --fail "$MSI_URL" -o input.msi + echo "$EXPECTED_SHA256 input.msi" | sha256sum --check + msiextract input.msi -C output + + # May contains directories with names too long for Windows to even create: + # https://gitlab.gnome.org/GNOME/msitools/issues/5 + rm -rf output/Windows/winsxs + + ls output/ + tar -czf /extracted.tar.gz -C output . + """, + env={ + "MSI_URL": url, + "EXPECTED_SHA256": sha256, + }, + artifacts=[ + "/extracted.tar.gz" + ], + max_run_time_minutes=20, + with_repo=False, + ) + + def dockerfile_path(name): return os.path.join(os.path.dirname(__file__), "docker", name + ".dockerfile") @@ -246,6 +297,7 @@ decision = DecisionTask( task_name_template="Servo: %s", index_prefix="project.servo.servo", default_worker_type="servo-docker-worker", + docker_image_cache_expiry=build_dependencies_artifacts_expiry, ) # https://docs.taskcluster.net/docs/reference/workers/docker-worker/docs/caches diff --git a/etc/taskcluster/docker/msiextract.dockerfile b/etc/taskcluster/docker/msiextract.dockerfile new file mode 100644 index 00000000000..67cda9239bf --- /dev/null +++ b/etc/taskcluster/docker/msiextract.dockerfile @@ -0,0 +1,47 @@ +# Build a version of libgcal that includes commit +# https://gitlab.gnome.org/GNOME/gcab/commit/3365b4bd58dd7f13e786caf3c7234cf8116263d9 +# which fixes "Invalid cabinet chunk" errors: +# https://gitlab.gnome.org/GNOME/msitools/issues/4#note_336695 +FROM ubuntu:bionic-20180821 +RUN \ + apt-get update -q && \ + apt-get install -qy --no-install-recommends \ + curl \ + ca-certificates \ + # + # Build dependencies for libgcab + meson \ + git \ + pkg-config \ + libc6-dev \ + libglib2.0-dev \ + libgirepository1.0-dev \ + gobject-introspection \ + valac \ + intltool \ + && \ + export REV=3365b4bd58dd7f13e786caf3c7234cf8116263d9 && \ + curl -L https://gitlab.gnome.org/GNOME/gcab/-/archive/$REV/gcab-$REV.tar.gz | tar -xz && \ + mv gcab-$REV gcab && \ + cd gcab && \ + meson build && \ + cd build && \ + # UTF-8 locale to work around https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=870310 + export LANG=C.UTF-8 && \ + ninja && \ + cp -v libgcab/libgcab* /usr/local/lib + + +# FIXME: uncomment this after we upgrade docker-worker +# to a version of Docker that supports multi-stage builds: + +# # Start a new image without the build dependencies, only the compiled library +# FROM ubuntu:bionic-20180821 +# COPY --from=0 /usr/local/lib/libgcab* /usr/local/lib/ + +RUN \ + apt-get update -q && \ + apt-get install -qy --no-install-recommends \ + curl \ + ca-certificates \ + msitools \ No newline at end of file From 3b2076fdc3db3871688c5979ea67e0ee75e5bf57 Mon Sep 17 00:00:00 2001 From: Simon Sapin Date: Mon, 1 Oct 2018 22:55:50 +0200 Subject: [PATCH 07/25] =?UTF-8?q?Windows:=20don=E2=80=99t=20check=20out=20?= =?UTF-8?q?tests/wpt?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Creating many files is slow, on Windows --- etc/taskcluster/decision-task.py | 1 + etc/taskcluster/decisionlib.py | 28 ++++++++++++++++++++++------ 2 files changed, 23 insertions(+), 6 deletions(-) diff --git a/etc/taskcluster/decision-task.py b/etc/taskcluster/decision-task.py index 843872f3393..65f98b5fab2 100644 --- a/etc/taskcluster/decision-task.py +++ b/etc/taskcluster/decision-task.py @@ -135,6 +135,7 @@ def windows_dev(): "python2", ], dependencies=[python27_task], + sparse_checkout_exclude=["tests/wpt"], **build_kwargs ) diff --git a/etc/taskcluster/decisionlib.py b/etc/taskcluster/decisionlib.py index 2ae6d14c7b4..db1682e77f5 100644 --- a/etc/taskcluster/decisionlib.py +++ b/etc/taskcluster/decisionlib.py @@ -13,6 +13,7 @@ Project-independent library for Taskcluster decision tasks """ +import base64 import datetime import hashlib import json @@ -134,7 +135,7 @@ class DecisionTask: docker_image=None, dockerfile=None, # One of these is required artifacts=None, dependencies=None, env=None, cache=None, scopes=None, routes=None, extra=None, features=None, mounts=None, homedir_path=None, - worker_type=None, with_repo=True): + worker_type=None, with_repo=True, sparse_checkout_exclude=None): """ Schedule a new task. Returns the new task ID. @@ -176,12 +177,13 @@ class DecisionTask: } if with_repo: - script = """ + git = """ git init repo cd repo git fetch --depth 1 "$GIT_URL" "$GIT_REF" git reset --hard "$GIT_SHA" - """ + script + """ + script = git + script command = ["/bin/bash", "--login", "-x", "-e", "-c", deindent(script)] else: command = [ @@ -189,9 +191,23 @@ class DecisionTask: for p in reversed(homedir_path or []) ] if with_repo: - command.append(deindent(""" - git init repo - cd repo + if with_repo: + git = """ + git init repo + cd repo + """ + if sparse_checkout_exclude: + git += """ + git config core.sparsecheckout true + echo %SPARSE_CHECKOUT_BASE64% > .git\\info\\sparse.b64 + certutil -decode .git\\info\\sparse.b64 .git\\info\\sparse-checkout + type .git\\info\\sparse-checkout + """ + env["SPARSE_CHECKOUT_BASE64"] = base64.b64encode( + b"/*" + + "".join("\n!" + p for p in sparse_checkout_exclude).encode("utf-8") + ) + command.append(deindent(git + """ git fetch --depth 1 %GIT_URL% %GIT_REF% git reset --hard %GIT_SHA% """)) From a7fc7629ff12e40969022a01dc2937dda39a54d0 Mon Sep 17 00:00:00 2001 From: Simon Sapin Date: Mon, 1 Oct 2018 22:14:20 +0200 Subject: [PATCH 08/25] msiextract corrupts files :( Try lessmsi instead https://gitlab.gnome.org/GNOME/msitools/issues/6 --- etc/taskcluster/decision-task.py | 71 +++++++------------- etc/taskcluster/docker/msiextract.dockerfile | 47 ------------- 2 files changed, 23 insertions(+), 95 deletions(-) delete mode 100644 etc/taskcluster/docker/msiextract.dockerfile diff --git a/etc/taskcluster/decision-task.py b/etc/taskcluster/decision-task.py index 65f98b5fab2..105c1dc9aa9 100644 --- a/etc/taskcluster/decision-task.py +++ b/etc/taskcluster/decision-task.py @@ -97,20 +97,19 @@ def android_arm32(): def windows_dev(): - python27_task = extract_msi( - "https://www.python.org/ftp/python/2.7.15/python-2.7.15.amd64.msi", - sha256="5e85f3c4c209de98480acbf2ba2e71a907fd5567a838ad4b6748c76deb286ad7", - ) + def extract_msi(*names): + return "".join( + "lessmsi x %HOMEDRIVE%%HOMEPATH%\\{name}.msi %HOMEDRIVE%%HOMEPATH%\\{name}\\\n" + .format(name=name) + for name in names + ) + return decision.create_task( task_name="Windows x86_64: clone only (for now)", worker_type="servo-win2016", - # script=""" - # python mach --help - # """, - script=""" - python -c "import sys; print(sys.path)" + script=extract_msi("python2") + """ + python -c "import os; print(os.listdir('.'))" """, - with_repo=False, mounts=[ { "directory": "git", @@ -122,19 +121,27 @@ def windows_dev(): } }, { - "directory": "python2", - "format": "tar.gz", + "file": "python2.msi", "content": { - "artifact": "public/extracted.tar.gz", - "taskId": python27_task, + "url": "https://www.python.org/ftp/python/2.7.15/python-2.7.15.amd64.msi", + "sha256": "5e85f3c4c209de98480acbf2ba2e71a907fd5567a838ad4b6748c76deb286ad7", + } + }, + { + "directory": "lessmsi", + "format": "zip", + "content": { + "url": "https://github.com/activescott/lessmsi/releases/download/" + + "v1.6.1/lessmsi-v1.6.1.zip", + "sha256": "540b8801e08ec39ba26a100c855898f455410cecbae4991afae7bb2b4df026c7", } }, ], homedir_path=[ "git\\cmd", - "python2", + "lessmsi", + "python2\\SourceDir", ], - dependencies=[python27_task], sparse_checkout_exclude=["tests/wpt"], **build_kwargs ) @@ -258,38 +265,6 @@ def daily_tasks_setup(): decision.task_name_template = "Servo daily: %s. On failure, ping: " + ping_on_daily_task_failure -def extract_msi(url, sha256): - return decision.find_or_create_task( - index_bucket="extract-msi.v4", - index_key=sha256, - index_expiry=build_dependencies_artifacts_expiry, - - task_name="Extracting MSI file " + url, - dockerfile=dockerfile_path("msiextract"), - script=""" - curl --retry 5 --connect-timeout 10 --location --fail "$MSI_URL" -o input.msi - echo "$EXPECTED_SHA256 input.msi" | sha256sum --check - msiextract input.msi -C output - - # May contains directories with names too long for Windows to even create: - # https://gitlab.gnome.org/GNOME/msitools/issues/5 - rm -rf output/Windows/winsxs - - ls output/ - tar -czf /extracted.tar.gz -C output . - """, - env={ - "MSI_URL": url, - "EXPECTED_SHA256": sha256, - }, - artifacts=[ - "/extracted.tar.gz" - ], - max_run_time_minutes=20, - with_repo=False, - ) - - def dockerfile_path(name): return os.path.join(os.path.dirname(__file__), "docker", name + ".dockerfile") diff --git a/etc/taskcluster/docker/msiextract.dockerfile b/etc/taskcluster/docker/msiextract.dockerfile deleted file mode 100644 index 67cda9239bf..00000000000 --- a/etc/taskcluster/docker/msiextract.dockerfile +++ /dev/null @@ -1,47 +0,0 @@ -# Build a version of libgcal that includes commit -# https://gitlab.gnome.org/GNOME/gcab/commit/3365b4bd58dd7f13e786caf3c7234cf8116263d9 -# which fixes "Invalid cabinet chunk" errors: -# https://gitlab.gnome.org/GNOME/msitools/issues/4#note_336695 -FROM ubuntu:bionic-20180821 -RUN \ - apt-get update -q && \ - apt-get install -qy --no-install-recommends \ - curl \ - ca-certificates \ - # - # Build dependencies for libgcab - meson \ - git \ - pkg-config \ - libc6-dev \ - libglib2.0-dev \ - libgirepository1.0-dev \ - gobject-introspection \ - valac \ - intltool \ - && \ - export REV=3365b4bd58dd7f13e786caf3c7234cf8116263d9 && \ - curl -L https://gitlab.gnome.org/GNOME/gcab/-/archive/$REV/gcab-$REV.tar.gz | tar -xz && \ - mv gcab-$REV gcab && \ - cd gcab && \ - meson build && \ - cd build && \ - # UTF-8 locale to work around https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=870310 - export LANG=C.UTF-8 && \ - ninja && \ - cp -v libgcab/libgcab* /usr/local/lib - - -# FIXME: uncomment this after we upgrade docker-worker -# to a version of Docker that supports multi-stage builds: - -# # Start a new image without the build dependencies, only the compiled library -# FROM ubuntu:bionic-20180821 -# COPY --from=0 /usr/local/lib/libgcab* /usr/local/lib/ - -RUN \ - apt-get update -q && \ - apt-get install -qy --no-install-recommends \ - curl \ - ca-certificates \ - msitools \ No newline at end of file From 63e244ebab825c66f3549b4771dcb231f1ae910d Mon Sep 17 00:00:00 2001 From: Simon Sapin Date: Tue, 2 Oct 2018 08:39:49 +0200 Subject: [PATCH 09/25] Cache repackaged Python 2 for Windows --- etc/taskcluster/decision-task.py | 95 ++++++++++++++++++++++++-------- etc/taskcluster/decisionlib.py | 33 +++++++---- 2 files changed, 93 insertions(+), 35 deletions(-) diff --git a/etc/taskcluster/decision-task.py b/etc/taskcluster/decision-task.py index 105c1dc9aa9..2286130de2e 100644 --- a/etc/taskcluster/decision-task.py +++ b/etc/taskcluster/decision-task.py @@ -4,6 +4,8 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. +import hashlib +import json import os.path import subprocess from decisionlib import DecisionTask @@ -97,29 +99,16 @@ def android_arm32(): def windows_dev(): - def extract_msi(*names): - return "".join( - "lessmsi x %HOMEDRIVE%%HOMEPATH%\\{name}.msi %HOMEDRIVE%%HOMEPATH%\\{name}\\\n" - .format(name=name) - for name in names - ) - - return decision.create_task( - task_name="Windows x86_64: clone only (for now)", + python2_task_definition = dict( + task_name="Windows x86_64: repackage Python 2", worker_type="servo-win2016", - script=extract_msi("python2") + """ - python -c "import os; print(os.listdir('.'))" + with_repo=False, + script=""" + lessmsi x python2.msi python2\\ + cd python2\\SourceDir + 7za a python2.zip * """, mounts=[ - { - "directory": "git", - "format": "zip", - "content": { - "url": "https://github.com/git-for-windows/git/releases/download/" + - "v2.19.0.windows.1/MinGit-2.19.0-64-bit.zip", - "sha256": "424d24b5fc185a9c5488d7872262464f2facab4f1d4693ea8008196f14a3c19b", - } - }, { "file": "python2.msi", "content": { @@ -136,13 +125,73 @@ def windows_dev(): "sha256": "540b8801e08ec39ba26a100c855898f455410cecbae4991afae7bb2b4df026c7", } }, + { + "directory": "7zip", + "format": "zip", + "content": { + "url": "https://www.7-zip.org/a/7za920.zip", + "sha256": "2a3afe19c180f8373fa02ff00254d5394fec0349f5804e0ad2f6067854ff28ac", + } + } + ], + homedir_path=[ + "lessmsi", + "7zip", + ], + artifacts=[ + "python2/SourceDir/python2.zip", + ], + max_run_time_minutes=20, + ) + index_by = json.dumps(python2_task_definition).encode("utf-8") + python2_task = decision.find_or_create_task( + index_bucket="by-task-definition", + index_key=hashlib.sha256(index_by).hexdigest(), + index_expiry=build_artifacts_expiry, + **python2_task_definition + ) + + return decision.create_task( + task_name="Windows x86_64: clone only (for now)", + worker_type="servo-win2016", + script=""" + python -m ensurepip + pip install virtualenv==16.0.0 + python mach --help + """, + mounts=[ + { + "directory": "git", + "format": "zip", + "content": { + "url": "https://github.com/git-for-windows/git/releases/download/" + + "v2.19.0.windows.1/MinGit-2.19.0-64-bit.zip", + "sha256": "424d24b5fc185a9c5488d7872262464f2facab4f1d4693ea8008196f14a3c19b", + } + }, + { + "directory": "python2", + "format": "zip", + "content": { + "taskId": python2_task, + "artifact": "public/python2.zip", + } + }, ], homedir_path=[ "git\\cmd", - "lessmsi", - "python2\\SourceDir", + "python2", + "python2\\Scripts", + ], + dependencies=[python2_task], + sparse_checkout=[ + "/*", + "!/tests/wpt/metadata", + "!/tests/wpt/mozilla", + "!/tests/wpt/webgl", + "!/tests/wpt/web-platform-tests", + "/tests/wpt/web-platform-tests/tools", ], - sparse_checkout_exclude=["tests/wpt"], **build_kwargs ) diff --git a/etc/taskcluster/decisionlib.py b/etc/taskcluster/decisionlib.py index db1682e77f5..3e325f3ed44 100644 --- a/etc/taskcluster/decisionlib.py +++ b/etc/taskcluster/decisionlib.py @@ -135,7 +135,7 @@ class DecisionTask: docker_image=None, dockerfile=None, # One of these is required artifacts=None, dependencies=None, env=None, cache=None, scopes=None, routes=None, extra=None, features=None, mounts=None, homedir_path=None, - worker_type=None, with_repo=True, sparse_checkout_exclude=None): + worker_type=None, with_repo=True, sparse_checkout=None): """ Schedule a new task. Returns the new task ID. @@ -196,7 +196,7 @@ class DecisionTask: git init repo cd repo """ - if sparse_checkout_exclude: + if sparse_checkout: git += """ git config core.sparsecheckout true echo %SPARSE_CHECKOUT_BASE64% > .git\\info\\sparse.b64 @@ -204,9 +204,7 @@ class DecisionTask: type .git\\info\\sparse-checkout """ env["SPARSE_CHECKOUT_BASE64"] = base64.b64encode( - b"/*" + - "".join("\n!" + p for p in sparse_checkout_exclude).encode("utf-8") - ) + "\n".join(sparse_checkout).encode("utf-8")) command.append(deindent(git + """ git fetch --depth 1 %GIT_URL% %GIT_REF% git reset --hard %GIT_SHA% @@ -227,14 +225,25 @@ class DecisionTask: if mounts: worker_payload["mounts"] = mounts if artifacts: - worker_payload["artifacts"] = { - "public/" + os.path.basename(path): { - "type": "file", - "path": path, - "expires": self.from_now_json(expires), + if "docker" in worker_type: + worker_payload["artifacts"] = { + "public/" + os.path.basename(path): { + "type": "file", + "path": path, + "expires": self.from_now_json(expires), + } + for path, expires in artifacts } - for path, expires in artifacts - } + else: + worker_payload["artifacts"] = [ + { + "type": "file", + "name": "public/" + os.path.basename(path), + "path": path, + "expires": self.from_now_json(expires), + } + for path, expires in artifacts + ] payload = { "taskGroupId": decision_task_id, "dependencies": dependencies or [], From 233c9116394fe2460024cf950db35b8f3831eb03 Mon Sep 17 00:00:00 2001 From: Simon Sapin Date: Tue, 2 Oct 2018 10:46:30 +0200 Subject: [PATCH 10/25] Taskcluster Windows: install rustup --- etc/taskcluster/decision-task.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/etc/taskcluster/decision-task.py b/etc/taskcluster/decision-task.py index 2286130de2e..eaf260bf473 100644 --- a/etc/taskcluster/decision-task.py +++ b/etc/taskcluster/decision-task.py @@ -155,6 +155,7 @@ def windows_dev(): task_name="Windows x86_64: clone only (for now)", worker_type="servo-win2016", script=""" + ..\\rustup-init.exe --default-toolchain none -y python -m ensurepip pip install virtualenv==16.0.0 python mach --help @@ -177,11 +178,20 @@ def windows_dev(): "artifact": "public/python2.zip", } }, + { + "file": "rustup-init.exe", + "content": { + "url": "https://static.rust-lang.org/rustup/archive/" + + "1.13.0/i686-pc-windows-gnu/rustup-init.exe", + "sha256": "43072fbe6b38ab38cd872fa51a33ebd781f83a2d5e83013857fab31fc06e4bf0", + } + } ], homedir_path=[ "git\\cmd", "python2", "python2\\Scripts", + ".cargo\\bin", ], dependencies=[python2_task], sparse_checkout=[ From b5cf1f87cfc399fa40051ae12d918746dc1001e8 Mon Sep 17 00:00:00 2001 From: Simon Sapin Date: Wed, 3 Oct 2018 09:18:23 +0200 Subject: [PATCH 11/25] Taskcluster Windows: add gstreamer --- etc/taskcluster/decision-task.py | 159 +++++++++++++++++------------- etc/taskcluster/windows/README.md | 2 +- 2 files changed, 93 insertions(+), 68 deletions(-) diff --git a/etc/taskcluster/decision-task.py b/etc/taskcluster/decision-task.py index eaf260bf473..12cd7a9be67 100644 --- a/etc/taskcluster/decision-task.py +++ b/etc/taskcluster/decision-task.py @@ -99,21 +99,100 @@ def android_arm32(): def windows_dev(): - python2_task_definition = dict( - task_name="Windows x86_64: repackage Python 2", + python2_task = repack_msi( + url="https://www.python.org/ftp/python/2.7.15/python-2.7.15.amd64.msi", + sha256="5e85f3c4c209de98480acbf2ba2e71a907fd5567a838ad4b6748c76deb286ad7", + ) + gstreamer_task = repack_msi( + url="https://gstreamer.freedesktop.org/data/pkg/windows/" + + "1.14.3/gstreamer-1.0-devel-x86_64-1.14.3.msi", + sha256="b13ea68c1365098c66871f0acab7fd3daa2f2795b5e893fcbb5cd7253f2c08fa", + ) + return decision.create_task( + task_name="Windows x86_64: clone only (for now)", worker_type="servo-win2016", - with_repo=False, script=""" - lessmsi x python2.msi python2\\ - cd python2\\SourceDir - 7za a python2.zip * + python -m ensurepip + pip install virtualenv==16.0.0 + python mach --help + ..\\rustup-init.exe --default-toolchain none -y + + set LIB=%HOMEDRIVE%%HOMEPATH%\\gst\\gstreamer\\1.0\\x86_64\\lib;%LIB% + """, mounts=[ { - "file": "python2.msi", + "directory": "git", + "format": "zip", "content": { - "url": "https://www.python.org/ftp/python/2.7.15/python-2.7.15.amd64.msi", - "sha256": "5e85f3c4c209de98480acbf2ba2e71a907fd5567a838ad4b6748c76deb286ad7", + "url": "https://github.com/git-for-windows/git/releases/download/" + + "v2.19.0.windows.1/MinGit-2.19.0-64-bit.zip", + "sha256": "424d24b5fc185a9c5488d7872262464f2facab4f1d4693ea8008196f14a3c19b", + } + }, + { + "directory": "python2", + "format": "zip", + "content": { + "taskId": python2_task, + "artifact": "public/repacked.zip", + } + }, + { + "directory": "gst", + "format": "zip", + "content": { + "taskId": gstreamer_task, + "artifact": "public/repacked.zip", + } + }, + { + "file": "rustup-init.exe", + "content": { + "url": "https://static.rust-lang.org/rustup/archive/" + + "1.13.0/i686-pc-windows-gnu/rustup-init.exe", + "sha256": "43072fbe6b38ab38cd872fa51a33ebd781f83a2d5e83013857fab31fc06e4bf0", + } + } + ], + homedir_path=[ + "git\\cmd", + "python2", + "python2\\Scripts", + ".cargo\\bin", + ], + dependencies=[ + python2_task, + gstreamer_task, + ], + sparse_checkout=[ + "/*", + "!/tests/wpt/metadata", + "!/tests/wpt/mozilla", + "!/tests/wpt/webgl", + "!/tests/wpt/web-platform-tests", + "/tests/wpt/web-platform-tests/tools", + ], + **build_kwargs + ) + + +def repack_msi(url, sha256): + task_definition = dict( + task_name="Windows x86_64: repackage " + url.rpartition("/")[-1], + worker_type="servo-win2016", + with_repo=False, + script=""" + lessmsi x input.msi extracted\\ + cd extracted\\SourceDir + 7za a repacked.zip * + """, + mounts=[ + { + "file": "input.msi", + "content": { + "url": url, + "sha256": sha256, } }, { @@ -139,70 +218,16 @@ def windows_dev(): "7zip", ], artifacts=[ - "python2/SourceDir/python2.zip", + "extracted/SourceDir/repacked.zip", ], max_run_time_minutes=20, ) - index_by = json.dumps(python2_task_definition).encode("utf-8") - python2_task = decision.find_or_create_task( + index_by = json.dumps(task_definition).encode("utf-8") + return decision.find_or_create_task( index_bucket="by-task-definition", index_key=hashlib.sha256(index_by).hexdigest(), index_expiry=build_artifacts_expiry, - **python2_task_definition - ) - - return decision.create_task( - task_name="Windows x86_64: clone only (for now)", - worker_type="servo-win2016", - script=""" - ..\\rustup-init.exe --default-toolchain none -y - python -m ensurepip - pip install virtualenv==16.0.0 - python mach --help - """, - mounts=[ - { - "directory": "git", - "format": "zip", - "content": { - "url": "https://github.com/git-for-windows/git/releases/download/" + - "v2.19.0.windows.1/MinGit-2.19.0-64-bit.zip", - "sha256": "424d24b5fc185a9c5488d7872262464f2facab4f1d4693ea8008196f14a3c19b", - } - }, - { - "directory": "python2", - "format": "zip", - "content": { - "taskId": python2_task, - "artifact": "public/python2.zip", - } - }, - { - "file": "rustup-init.exe", - "content": { - "url": "https://static.rust-lang.org/rustup/archive/" + - "1.13.0/i686-pc-windows-gnu/rustup-init.exe", - "sha256": "43072fbe6b38ab38cd872fa51a33ebd781f83a2d5e83013857fab31fc06e4bf0", - } - } - ], - homedir_path=[ - "git\\cmd", - "python2", - "python2\\Scripts", - ".cargo\\bin", - ], - dependencies=[python2_task], - sparse_checkout=[ - "/*", - "!/tests/wpt/metadata", - "!/tests/wpt/mozilla", - "!/tests/wpt/webgl", - "!/tests/wpt/web-platform-tests", - "/tests/wpt/web-platform-tests/tools", - ], - **build_kwargs + **task_definition ) diff --git a/etc/taskcluster/windows/README.md b/etc/taskcluster/windows/README.md index 89271d9e043..705bf1189aa 100644 --- a/etc/taskcluster/windows/README.md +++ b/etc/taskcluster/windows/README.md @@ -46,7 +46,7 @@ but that will make any running task fail. ## FIXME: possible improvement * Have a separate staging worker type to try new AMIs without affecting the production CI -* Automate cleaning up old, unused AMIs +* Automate cleaning up old, unused AMIs and their backing EBS snapshots * Use multiple AWS regions * Use the Taskcluster API to automate updating worker type definitions? From a88fb7ed4013c7c5e01299ef3042a6d082cdf714 Mon Sep 17 00:00:00 2001 From: Simon Sapin Date: Tue, 2 Oct 2018 14:10:09 +0200 Subject: [PATCH 12/25] Taskcluster Windows: build --- etc/taskcluster/decision-task.py | 22 +++++++++++++++------- etc/taskcluster/windows/first-boot.ps1 | 12 ++++++++++++ 2 files changed, 27 insertions(+), 7 deletions(-) diff --git a/etc/taskcluster/decision-task.py b/etc/taskcluster/decision-task.py index 12cd7a9be67..de44401eb38 100644 --- a/etc/taskcluster/decision-task.py +++ b/etc/taskcluster/decision-task.py @@ -40,10 +40,12 @@ build_env = { "RUSTFLAGS": "-Dwarnings", "CARGO_INCREMENTAL": "0", "SCCACHE_IDLE_TIMEOUT": "1200", +} +linux_build_env = dict(**build_env, **{ "CCACHE": "sccache", "RUSTC_WRAPPER": "sccache", "SHELL": "/bin/dash", # For SpiderMonkey’s build system -} +}) def linux_tidy_unit(): @@ -109,16 +111,17 @@ def windows_dev(): sha256="b13ea68c1365098c66871f0acab7fd3daa2f2795b5e893fcbb5cd7253f2c08fa", ) return decision.create_task( - task_name="Windows x86_64: clone only (for now)", - worker_type="servo-win2016", + task_name="Windows x86_64: dev build + unit tests", script=""" python -m ensurepip pip install virtualenv==16.0.0 - python mach --help + ..\\rustup-init.exe --default-toolchain none -y set LIB=%HOMEDRIVE%%HOMEPATH%\\gst\\gstreamer\\1.0\\x86_64\\lib;%LIB% + call mach.bat build --dev + call mach.bat test-unit """, mounts=[ { @@ -173,7 +176,7 @@ def windows_dev(): "!/tests/wpt/web-platform-tests", "/tests/wpt/web-platform-tests/tools", ], - **build_kwargs + **windows_build_kwargs ) @@ -362,6 +365,7 @@ decision = DecisionTask( # https://docs.taskcluster.net/docs/reference/workers/docker-worker/docs/caches cache_scopes = [ + # FIMXE: move to servo-* cache names "docker-worker:cache:cargo-*", ] build_caches = { @@ -372,15 +376,19 @@ build_caches = { } build_kwargs = { "max_run_time_minutes": 60, - "env": build_env, } linux_build_kwargs = dict(**build_kwargs, **{ "worker_type": "servo-docker-worker", "dockerfile": dockerfile_path("build"), "scopes": cache_scopes, "cache": build_caches, + "env": linux_build_env, +}) +windows_build_kwargs = dict(**build_kwargs, **{ + "worker_type": "servo-win2016", + "env": build_env, }) if __name__ == "__main__": - main() + main() \ No newline at end of file diff --git a/etc/taskcluster/windows/first-boot.ps1 b/etc/taskcluster/windows/first-boot.ps1 index e4986c4ddc3..be330cdf3a0 100644 --- a/etc/taskcluster/windows/first-boot.ps1 +++ b/etc/taskcluster/windows/first-boot.ps1 @@ -58,5 +58,17 @@ Start-Process C:\generic-worker\generic-worker.exe -ArgumentList ( # Start-Process C:\nssm-2.24\win64\nssm.exe -ArgumentList ` # "set", "servo-ping", "AppExit", "Default", "Exit" + +# Visual C++ Build Tools +# https://blogs.msdn.microsoft.com/vcblog/2016/11/16/introducing-the-visual-studio-build-tools/ +$client.DownloadFile("https://aka.ms/vs/15/release/vs_buildtools.exe", "C:\vs_buildtools.exe") +Start-Process C:\vs_buildtools.exe -ArgumentList (` + "--passive --norestart --includeRecommended " + + "--add Microsoft.VisualStudio.Workload.VCTools " + + "--add Microsoft.VisualStudio.Component.VC.ATL " + + "--add Microsoft.VisualStudio.Component.VC.ATLMFC" + ) -Wait + + # Now shutdown, in preparation for creating an image shutdown -s \ No newline at end of file From 7be1c2c8999c6bd6a680a39caced70896627b3b6 Mon Sep 17 00:00:00 2001 From: Simon Sapin Date: Wed, 3 Oct 2018 17:14:23 +0200 Subject: [PATCH 13/25] Windows AMI: disable indexing and anti-virus --- etc/taskcluster/windows/first-boot.ps1 | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/etc/taskcluster/windows/first-boot.ps1 b/etc/taskcluster/windows/first-boot.ps1 index be330cdf3a0..bbf1cdb6449 100644 --- a/etc/taskcluster/windows/first-boot.ps1 +++ b/etc/taskcluster/windows/first-boot.ps1 @@ -2,6 +2,13 @@ Start-Transcript -Path "C:\first_boot.txt" Get-ChildItem Env: | Out-File "C:\install_env.txt" +# DisableIndexing: Disable indexing on all disk volumes (for performance) +Get-WmiObject Win32_Volume -Filter "IndexingEnabled=$true" | Set-WmiInstance -Arguments @{IndexingEnabled=$false} + +# Disable Windows Defender +# https://docs.microsoft.com/en-us/windows/security/threat-protection/windows-defender-antivirus/windows-defender-antivirus-on-windows-server-2016#install-or-uninstall-windows-defender-av-on-windows-server-2016 +Uninstall-WindowsFeature -Name Windows-Defender + # use TLS 1.2 (see bug 1443595) [Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12 From 545d54704a6f273dffa83a4191653dbe1a1e5d5f Mon Sep 17 00:00:00 2001 From: Simon Sapin Date: Wed, 3 Oct 2018 17:40:41 +0200 Subject: [PATCH 14/25] Add bootstrap script for messing with a manually-started instance --- etc/taskcluster/windows/bootstrap.ps1 | 55 +++++++++++++++++++++++++++ 1 file changed, 55 insertions(+) create mode 100644 etc/taskcluster/windows/bootstrap.ps1 diff --git a/etc/taskcluster/windows/bootstrap.ps1 b/etc/taskcluster/windows/bootstrap.ps1 new file mode 100644 index 00000000000..4b64c19e39a --- /dev/null +++ b/etc/taskcluster/windows/bootstrap.ps1 @@ -0,0 +1,55 @@ +# Use this script is to get a build environment +# when booting a Windows EC2 instance outside of Taskcluster. + + +[Environment]::SetEnvironmentVariable("Path", $env:Path + + ";C:\git\cmd;C:\python2;C:\python2\Scripts;C:\Users\Administrator\.cargo\bin", + [EnvironmentVariableTarget]::Machine) +[Environment]::SetEnvironmentVariable("Lib", $env:Lib + + ";C:\gstreamer\1.0\x86_64\lib", + [EnvironmentVariableTarget]::Machine) + + +# Optional +$client.DownloadFile( + "http://download.tuxfamily.org/dvorak/windows/bepo.exe", + "C:\bepo.exe" +) + + +# use TLS 1.2 (see bug 1443595) +[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12 + +# For making http requests +$client = New-Object system.net.WebClient +$shell = new-object -com shell.application + +# Download a zip file and extract it +function Expand-ZIPFile($file, $destination, $url) +{ + $client.DownloadFile($url, $file) + $zip = $shell.NameSpace($file) + foreach($item in $zip.items()) + { + $shell.Namespace($destination).copyhere($item) + } +} + +md C:\git +Expand-ZIPFile -File "C:\git.zip" -Destination "C:\git" -Url ` + "https://github.com/git-for-windows/git/releases/download/v2.19.0.windows.1/MinGit-2.19.0-64-bit.zip" + +$client.DownloadFile( + "https://static.rust-lang.org/rustup/archive/1.13.0/i686-pc-windows-gnu/rustup-init.exe", + "C:\rustup-init.exe" +) + +Start-Process C:\rustup-init.exe -Wait -NoNewWindow -ArgumentList ` + "--default-toolchain none -y" + +md C:\python2 +Expand-ZIPFile -File "C:\python2.zip" -Destination "C:\python2" -Url ` + "https://queue.taskcluster.net/v1/task/RIuts6jOQtCSjMbuaOU6yw/runs/0/artifacts/public/repacked.zip" + +Expand-ZIPFile -File "C:\gst.zip" -Destination "C:\" -Url ` + "https://queue.taskcluster.net/v1/task/KAzPF1ZYSFmg2BQKLt0LwA/runs/0/artifacts/public/repacked.zip" \ No newline at end of file From 95150280bf6aecf1f68523453b93ce3c875e17ba Mon Sep 17 00:00:00 2001 From: Simon Sapin Date: Sat, 6 Oct 2018 15:42:59 +0200 Subject: [PATCH 15/25] Rename decision-task.py to make it importable --- .taskcluster.yml | 2 +- etc/taskcluster/README.md | 8 ++++---- etc/taskcluster/{decision-task.py => decision_task.py} | 0 servo-tidy.toml | 2 +- 4 files changed, 6 insertions(+), 6 deletions(-) rename etc/taskcluster/{decision-task.py => decision_task.py} (100%) diff --git a/.taskcluster.yml b/.taskcluster.yml index 2749d11ada0..69b8ef41446 100644 --- a/.taskcluster.yml +++ b/.taskcluster.yml @@ -53,4 +53,4 @@ tasks: cd repo && git fetch --depth 1 "$GIT_URL" "$GIT_REF" && git reset --hard "$GIT_SHA" && - python3 etc/taskcluster/decision-task.py + python3 etc/taskcluster/decision_task.py diff --git a/etc/taskcluster/README.md b/etc/taskcluster/README.md index d423020a54f..e89b10a4cd1 100644 --- a/etc/taskcluster/README.md +++ b/etc/taskcluster/README.md @@ -40,7 +40,7 @@ to build an arbitrary [task graph]. ## Servo’s decision task This repository’s [`.taskcluster.yml`][tc.yml] schedules a single task -that runs the Python 3 script [`etc/taskcluster/decision-task.py`](decision-task.py). +that runs the Python 3 script [`etc/taskcluster/decision_task.py`](decision_task.py). It is called a *decision task* as it is responsible for deciding what other tasks to schedule. The Docker image that runs the decision task @@ -101,7 +101,7 @@ together with multiple testing tasks that each depend on the build task (wait until it successfully finishes before they can start) and start by downloading the artifact that was saved earlier. -The logic for all this is in [`decision-task.py`](decision-task.py) +The logic for all this is in [`decision_task.py`](decision_task.py) and can be modified in any pull request. [web-platform-tests]: https://github.com/web-platform-tests/wpt @@ -162,7 +162,7 @@ to edit that role in the web UI and grant more scopes to these tasks The [`project-servo/daily`] hook in Taskcluster’s [Hooks service] is used to run some tasks automatically ever 24 hours. In this case as well we use a decision task. -The `decision-task.py` script can differenciate this from a GitHub push +The `decision_task.py` script can differenciate this from a GitHub push based on the `$TASK_FOR` environment variable. Daily tasks can also be triggered manually. @@ -221,7 +221,7 @@ To modify those, submit a pull request. * The [`.taskcluster.yml`][tc.yml] file, for starting decision tasks in reaction to GitHub events -* The [`etc/ci/decision-task.py`](decision-task.py) file, +* The [`etc/ci/decision_task.py`](decision_task.py) file, defining what other tasks to schedule However some configuration needs to be handled separately. diff --git a/etc/taskcluster/decision-task.py b/etc/taskcluster/decision_task.py similarity index 100% rename from etc/taskcluster/decision-task.py rename to etc/taskcluster/decision_task.py diff --git a/servo-tidy.toml b/servo-tidy.toml index 471bb629dcd..1777f195d50 100644 --- a/servo-tidy.toml +++ b/servo-tidy.toml @@ -69,7 +69,7 @@ files = [ "./tests/wpt/mozilla/tests/css/pre_with_tab.html", "./tests/wpt/mozilla/tests/mozilla/textarea_placeholder.html", # Python 3 syntax causes "E901 SyntaxError" when flake8 runs in Python 2 - "./etc/taskcluster/decision-task.py", + "./etc/taskcluster/decision_task.py", "./etc/taskcluster/decisionlib.py", ] # Directories that are ignored for the non-WPT tidy check. From eaee801e646d3b5623b891efdc468d42d9ff03d9 Mon Sep 17 00:00:00 2001 From: Simon Sapin Date: Sun, 7 Oct 2018 00:28:40 +0200 Subject: [PATCH 16/25] Add try-taskcluster branch --- .taskcluster.yml | 2 +- etc/taskcluster/decision_task.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/.taskcluster.yml b/.taskcluster.yml index 69b8ef41446..098a1414fe7 100644 --- a/.taskcluster.yml +++ b/.taskcluster.yml @@ -5,7 +5,7 @@ policy: tasks: - $if: 'tasks_for == "github-push"' then: - $if: 'event.ref in ["refs/heads/auto", "refs/heads/try"]' + $if: 'event.ref in ["refs/heads/auto", "refs/heads/try", "refs/heads/try-taskcluster"]' then: # NOTE: when updating this consider whether the daily hook needs similar changes: diff --git a/etc/taskcluster/decision_task.py b/etc/taskcluster/decision_task.py index de44401eb38..87e01acf030 100644 --- a/etc/taskcluster/decision_task.py +++ b/etc/taskcluster/decision_task.py @@ -388,6 +388,7 @@ windows_build_kwargs = dict(**build_kwargs, **{ "worker_type": "servo-win2016", "env": build_env, }) + .with_index_and_artifacts_expire_in(build_artifacts_expire_in) if __name__ == "__main__": From c0b132a2e08d44d0e57552842ea11d1e6078bb1e Mon Sep 17 00:00:00 2001 From: Simon Sapin Date: Sat, 6 Oct 2018 16:10:27 +0200 Subject: [PATCH 17/25] Rewrite decisionlib with a builder pattern to be more composable --- etc/taskcluster/decision_task.py | 419 +++++++------------- etc/taskcluster/decisionlib.py | 653 ++++++++++++++++++++----------- etc/taskcluster/mock.py | 19 +- 3 files changed, 585 insertions(+), 506 deletions(-) diff --git a/etc/taskcluster/decision_task.py b/etc/taskcluster/decision_task.py index 87e01acf030..67e8329bb84 100644 --- a/etc/taskcluster/decision_task.py +++ b/etc/taskcluster/decision_task.py @@ -4,21 +4,18 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -import hashlib -import json import os.path -import subprocess -from decisionlib import DecisionTask +from decisionlib import * -def main(): - task_for = os.environ["TASK_FOR"] - +def main(task_for, mock=False): if task_for == "github-push": - linux_tidy_unit() - #linux_wpt() - android_arm32() - windows_dev() + if CONFIG.git_ref in ["refs/heads/auto", "refs/heads/try", "refs/heads/try-taskcluster"]: + linux_tidy_unit() + android_arm32() + windows_dev() + if mock: + linux_wpt() # https://tools.taskcluster.net/hooks/project-servo/daily elif task_for == "daily": @@ -31,206 +28,89 @@ def main(): ping_on_daily_task_failure = "SimonSapin, nox, emilio" -build_artifacts_expiry = "1 week" -build_dependencies_artifacts_expiry = "1 month" -log_artifacts_expiry = "1 year" +build_artifacts_expire_in = "1 week" +build_dependencies_artifacts_expire_in = "1 month" +log_artifacts_expire_in = "1 year" build_env = { "RUST_BACKTRACE": "1", "RUSTFLAGS": "-Dwarnings", "CARGO_INCREMENTAL": "0", - "SCCACHE_IDLE_TIMEOUT": "1200", } -linux_build_env = dict(**build_env, **{ +linux_build_env = { "CCACHE": "sccache", "RUSTC_WRAPPER": "sccache", + "SCCACHE_IDLE_TIMEOUT": "1200", "SHELL": "/bin/dash", # For SpiderMonkey’s build system -}) +} +windows_build_env = {} +windows_sparse_checkout = [ + "/*", + "!/tests/wpt/metadata", + "!/tests/wpt/mozilla", + "!/tests/wpt/webgl", + "!/tests/wpt/web-platform-tests", + "/tests/wpt/web-platform-tests/tools", +] def linux_tidy_unit(): - return decision.create_task( - task_name="Linux x86_64: tidy + dev build + unit tests", - script=""" - ./mach test-tidy --no-progress --all - ./mach build --dev - ./mach test-unit - ./mach package --dev - ./mach test-tidy --no-progress --self-test - python2.7 ./etc/memory_reports_over_time.py --test - python3 ./etc/taskcluster/mock.py - ./etc/ci/lockfile_changed.sh - ./etc/ci/check_no_panic.sh - """, - **linux_build_kwargs - ) + return linux_build_task("Linux x86_64: tidy + dev build + unit tests").with_script(""" + ./mach test-tidy --no-progress --all + ./mach build --dev + ./mach test-unit + ./mach package --dev + ./mach test-tidy --no-progress --self-test + python2.7 ./etc/memory_reports_over_time.py --test + python3 ./etc/taskcluster/mock.py + ./etc/ci/lockfile_changed.sh + ./etc/ci/check_no_panic.sh + """).create() def with_rust_nightly(): - return decision.create_task( - task_name="Linux x86_64: with Rust Nightly", - script=""" - echo "nightly" > rust-toolchain - ./mach build --dev - ./mach test-unit - """, - **linux_build_kwargs - ) + return linux_build_task("Linux x86_64: with Rust Nightly").with_script(""" + echo "nightly" > rust-toolchain + ./mach build --dev + ./mach test-unit + """).create() def android_arm32(): - return decision.find_or_create_task( - index_bucket="build.android_armv7_release", - index_key=os.environ["GIT_SHA"], # Set in .taskcluster.yml - index_expiry=build_artifacts_expiry, - - task_name="Android ARMv7: build", + return ( + linux_build_task("Android ARMv7: build") # file: NDK parses $(file $SHELL) to tell x86_64 from x86 # wget: servo-media-gstreamer’s build script - script=""" + .with_script(""" apt-get install -y --no-install-recommends openjdk-8-jdk-headless file wget ./etc/ci/bootstrap-android-and-accept-licences.sh ./mach build --android --release - """, - artifacts=[ + """) + .with_artifacts( "/repo/target/armv7-linux-androideabi/release/servoapp.apk", "/repo/target/armv7-linux-androideabi/release/servoview.aar", - ], - **linux_build_kwargs + ) + .find_or_create("build.android_armv7_release." + CONFIG.git_sha) ) def windows_dev(): - python2_task = repack_msi( - url="https://www.python.org/ftp/python/2.7.15/python-2.7.15.amd64.msi", - sha256="5e85f3c4c209de98480acbf2ba2e71a907fd5567a838ad4b6748c76deb286ad7", - ) - gstreamer_task = repack_msi( - url="https://gstreamer.freedesktop.org/data/pkg/windows/" + - "1.14.3/gstreamer-1.0-devel-x86_64-1.14.3.msi", - sha256="b13ea68c1365098c66871f0acab7fd3daa2f2795b5e893fcbb5cd7253f2c08fa", - ) - return decision.create_task( - task_name="Windows x86_64: dev build + unit tests", - script=""" - python -m ensurepip - pip install virtualenv==16.0.0 - - ..\\rustup-init.exe --default-toolchain none -y - - set LIB=%HOMEDRIVE%%HOMEPATH%\\gst\\gstreamer\\1.0\\x86_64\\lib;%LIB% - - call mach.bat build --dev - call mach.bat test-unit - """, - mounts=[ - { - "directory": "git", - "format": "zip", - "content": { - "url": "https://github.com/git-for-windows/git/releases/download/" + - "v2.19.0.windows.1/MinGit-2.19.0-64-bit.zip", - "sha256": "424d24b5fc185a9c5488d7872262464f2facab4f1d4693ea8008196f14a3c19b", - } - }, - { - "directory": "python2", - "format": "zip", - "content": { - "taskId": python2_task, - "artifact": "public/repacked.zip", - } - }, - { - "directory": "gst", - "format": "zip", - "content": { - "taskId": gstreamer_task, - "artifact": "public/repacked.zip", - } - }, - { - "file": "rustup-init.exe", - "content": { - "url": "https://static.rust-lang.org/rustup/archive/" + - "1.13.0/i686-pc-windows-gnu/rustup-init.exe", - "sha256": "43072fbe6b38ab38cd872fa51a33ebd781f83a2d5e83013857fab31fc06e4bf0", - } - } - ], - homedir_path=[ - "git\\cmd", - "python2", - "python2\\Scripts", - ".cargo\\bin", - ], - dependencies=[ - python2_task, - gstreamer_task, - ], - sparse_checkout=[ - "/*", - "!/tests/wpt/metadata", - "!/tests/wpt/mozilla", - "!/tests/wpt/webgl", - "!/tests/wpt/web-platform-tests", - "/tests/wpt/web-platform-tests/tools", - ], - **windows_build_kwargs - ) - - -def repack_msi(url, sha256): - task_definition = dict( - task_name="Windows x86_64: repackage " + url.rpartition("/")[-1], - worker_type="servo-win2016", - with_repo=False, - script=""" - lessmsi x input.msi extracted\\ - cd extracted\\SourceDir - 7za a repacked.zip * - """, - mounts=[ - { - "file": "input.msi", - "content": { - "url": url, - "sha256": sha256, - } - }, - { - "directory": "lessmsi", - "format": "zip", - "content": { - "url": "https://github.com/activescott/lessmsi/releases/download/" + - "v1.6.1/lessmsi-v1.6.1.zip", - "sha256": "540b8801e08ec39ba26a100c855898f455410cecbae4991afae7bb2b4df026c7", - } - }, - { - "directory": "7zip", - "format": "zip", - "content": { - "url": "https://www.7-zip.org/a/7za920.zip", - "sha256": "2a3afe19c180f8373fa02ff00254d5394fec0349f5804e0ad2f6067854ff28ac", - } - } - ], - homedir_path=[ - "lessmsi", - "7zip", - ], - artifacts=[ - "extracted/SourceDir/repacked.zip", - ], - max_run_time_minutes=20, - ) - index_by = json.dumps(task_definition).encode("utf-8") - return decision.find_or_create_task( - index_bucket="by-task-definition", - index_key=hashlib.sha256(index_by).hexdigest(), - index_expiry=build_artifacts_expiry, - **task_definition + return ( + windows_build_task("Windows x86_64: dev build + unit tests") + .with_python2() + .with_rustup() + .with_repacked_msi( + url="https://gstreamer.freedesktop.org/data/pkg/windows/" + + "1.14.3/gstreamer-1.0-devel-x86_64-1.14.3.msi", + sha256="b13ea68c1365098c66871f0acab7fd3daa2f2795b5e893fcbb5cd7253f2c08fa", + path="gst", + ) + .with_script( + "set LIB=%HOMEDRIVE%%HOMEPATH%\\gst\\gstreamer\\1.0\\x86_64\\lib;%LIB%", + "call mach.bat build --dev", + "call mach.bat test-unit", + ) + .create() ) @@ -243,40 +123,23 @@ def linux_wpt(): def linux_release_build(): - return decision.find_or_create_task( - index_bucket="build.linux_x86-64_release", - index_key=os.environ["GIT_SHA"], # Set in .taskcluster.yml - index_expiry=build_artifacts_expiry, - - task_name="Linux x86_64: release build", - script=""" + return ( + linux_build_task("Linux x86_64: release build") + .with_script(""" ./mach build --release --with-debug-assertions -p servo ./etc/ci/lockfile_changed.sh tar -czf /target.tar.gz \ target/release/servo \ target/release/build/osmesa-src-*/output \ target/release/build/osmesa-src-*/out/lib/gallium - """, - artifacts=[ - "/target.tar.gz", - ], - **linux_build_kwargs + """) + .with_artifacts("/target.tar.gz") + .find_or_create("build.linux_x86-64_release." + CONFIG.git_sha) ) def wpt_chunk(release_build_task, total_chunks, this_chunk, extra): - if extra: - name_extra = " + extra" - script_extra = """ - ./mach test-wpt-failure - ./mach test-wpt --release --binary-arg=--multiprocess --processes 24 \ - --log-raw test-wpt-mp.log \ - --log-errorsummary wpt-mp-errorsummary.log \ - eventsource - """ - else: - name_extra = "" - script_extra = "" + name = "Linux x86_64: WPT chunk %s / %s" % (this_chunk, total_chunks) script = """ ./mach test-wpt \ --release \ @@ -296,100 +159,114 @@ def wpt_chunk(release_build_task, total_chunks, this_chunk, extra): # IndexError: list index out of range # File "/repo/python/servo/testing_commands.py", line 533, in filter_intermittents # pull_request = int(last_merge.split(' ')[4][1:]) - create_run_task( - build_task=release_build_task, - task_name="Linux x86_64: WPT chunk %s / %s%s" % (this_chunk, total_chunks, name_extra), - script=script_extra + script, - env={ - "TOTAL_CHUNKS": total_chunks, - "THIS_CHUNK": this_chunk, - }, + if extra: + name += " + extra" + script += """ + ./mach test-wpt-failure + ./mach test-wpt --release --binary-arg=--multiprocess --processes 24 \ + --log-raw test-wpt-mp.log \ + --log-errorsummary wpt-mp-errorsummary.log \ + eventsource + """ + return ( + linux_run_task(name, release_build_task, script) + .with_env(TOTAL_CHUNKS=total_chunks, THIS_CHUNK=this_chunk) + .create() ) -def create_run_task(*, build_task, script, **kwargs): - fetch_build = """ - ./etc/taskcluster/curl-artifact.sh ${BUILD_TASK_ID} target.tar.gz | tar -xz - """ - kwargs.setdefault("env", {})["BUILD_TASK_ID"] = build_task - kwargs.setdefault("dependencies", []).append(build_task) - kwargs.setdefault("artifacts", []).extend( - ("/repo/" + word, log_artifacts_expiry) - for word in script.split() if word.endswith(".log") - ) - return decision.create_task( - script=fetch_build + script, - max_run_time_minutes=60, - dockerfile=dockerfile_path("run"), - **kwargs +def linux_run_task(name, build_task, script): + return ( + linux_task(name) + .with_dockerfile(dockerfile_path("run")) + .with_early_script(""" + ./etc/taskcluster/curl-artifact.sh ${BUILD_TASK_ID} target.tar.gz | tar -xz + """) + .with_env(BUILD_TASK_ID=build_task) + .with_dependencies(build_task) + .with_script(script) + .with_index_and_artifacts_expire_in(log_artifacts_expire_in) + .with_artifacts(*[ + "/repo/" + word + for word in script.split() if word.endswith(".log") + ]) + .with_max_run_time_minutes(60) ) def daily_tasks_setup(): # ':' is not accepted in an index namepspace: # https://docs.taskcluster.net/docs/reference/core/taskcluster-index/references/api - now = decision.now.strftime("%Y-%m-%d_%H-%M-%S") - index_path = "%s.daily.%s" % (decision.index_prefix, now) + now = SHARED.now.strftime("%Y-%m-%d_%H-%M-%S") + index_path = "%s.daily.%s" % (CONFIG.index_prefix, now) # Index this task manually rather than with a route, # so that it is indexed even if it fails. - decision.index_service.insertTask(index_path, { - "taskId": os.environ["TASK_ID"], + SHARED.index_service.insertTask(index_path, { + "taskId": CONFIG.decision_task_id, "rank": 0, "data": {}, - "expires": decision.from_now_json(log_artifacts_expiry), + "expires": SHARED.from_now_json(log_artifacts_expire_in), }) # Unlike when reacting to a GitHub event, # the commit hash is not known until we clone the repository. - os.environ["GIT_SHA"] = \ - subprocess.check_output(["git", "rev-parse", "HEAD"]).decode("utf8").strip() + CONFIG.git_sha_is_current_head() # On failure, notify a few people on IRC # https://docs.taskcluster.net/docs/reference/core/taskcluster-notify/docs/usage notify_route = "notify.irc-channel.#servo.on-failed" - decision.routes_for_all_subtasks.append(notify_route) - decision.scopes_for_all_subtasks.append("queue:route:" + notify_route) - decision.task_name_template = "Servo daily: %s. On failure, ping: " + ping_on_daily_task_failure + CONFIG.routes_for_all_subtasks.append(notify_route) + CONFIG.scopes_for_all_subtasks.append("queue:route:" + notify_route) + CONFIG.task_name_template = "Servo daily: %s. On failure, ping: " + ping_on_daily_task_failure def dockerfile_path(name): return os.path.join(os.path.dirname(__file__), "docker", name + ".dockerfile") -decision = DecisionTask( - task_name_template="Servo: %s", - index_prefix="project.servo.servo", - default_worker_type="servo-docker-worker", - docker_image_cache_expiry=build_dependencies_artifacts_expiry, -) +def linux_task(name): + return DockerWorkerTask(name).with_worker_type("servo-docker-worker") -# https://docs.taskcluster.net/docs/reference/workers/docker-worker/docs/caches -cache_scopes = [ - # FIMXE: move to servo-* cache names - "docker-worker:cache:cargo-*", -] -build_caches = { - "cargo-registry-cache": "/root/.cargo/registry", - "cargo-git-cache": "/root/.cargo/git", - "cargo-rustup": "/root/.rustup", - "cargo-sccache": "/root/.cache/sccache", -} -build_kwargs = { - "max_run_time_minutes": 60, -} -linux_build_kwargs = dict(**build_kwargs, **{ - "worker_type": "servo-docker-worker", - "dockerfile": dockerfile_path("build"), - "scopes": cache_scopes, - "cache": build_caches, - "env": linux_build_env, -}) -windows_build_kwargs = dict(**build_kwargs, **{ - "worker_type": "servo-win2016", - "env": build_env, -}) + +def windows_task(name): + return WindowsGenericWorkerTask(name).with_worker_type("servo-win2016") + + +def linux_build_task(name): + return ( + linux_task(name) + # https://docs.taskcluster.net/docs/reference/workers/docker-worker/docs/caches + # FIMXE: move to servo-* cache names + .with_scopes("docker-worker:cache:cargo-*") + .with_caches(**{ + "cargo-registry-cache": "/root/.cargo/registry", + "cargo-git-cache": "/root/.cargo/git", + "cargo-rustup": "/root/.rustup", + "cargo-sccache": "/root/.cache/sccache", + }) .with_index_and_artifacts_expire_in(build_artifacts_expire_in) + .with_max_run_time_minutes(60) + .with_dockerfile(dockerfile_path("build")) + .with_env(**build_env, **linux_build_env) + .with_repo() + .with_index_and_artifacts_expire_in(build_artifacts_expire_in) + ) + + +def windows_build_task(name): + return ( + windows_task(name) + .with_max_run_time_minutes(60) + .with_env(**build_env, **windows_build_env) + .with_repo(sparse_checkout=windows_sparse_checkout) + ) + + +CONFIG.task_name_template = "Servo: %s" +CONFIG.index_prefix = "project.servo.servo" +CONFIG.docker_images_expire_in = build_dependencies_artifacts_expire_in +CONFIG.repacked_msi_files_expire_in = build_dependencies_artifacts_expire_in if __name__ == "__main__": - main() \ No newline at end of file + main(task_for=os.environ["TASK_FOR"]) \ No newline at end of file diff --git a/etc/taskcluster/decisionlib.py b/etc/taskcluster/decisionlib.py index 3e325f3ed44..2cfbd42d4ee 100644 --- a/etc/taskcluster/decisionlib.py +++ b/etc/taskcluster/decisionlib.py @@ -19,270 +19,450 @@ import hashlib import json import os import re +import subprocess import sys import taskcluster -class DecisionTask: +# Public API +__all__ = [ + "CONFIG", "SHARED", "Task", "DockerWorkerTask", + "GenericWorkerTask", "WindowsGenericWorkerTask", +] + + +class Config: """ - Holds some project-specific configuration and provides higher-level functionality - on top of the `taskcluster` package a.k.a. `taskcluster-client.py`. + Global configuration, for users of the library to modify. """ + def __init__(self): + self.task_name_template = "%s" + self.index_prefix = "garbage.servo-decisionlib" + self.scopes_for_all_subtasks = [] + self.routes_for_all_subtasks = [] + self.docker_images_expire_in = "1 month" + self.repacked_msi_files_expire_in = "1 month" - DOCKER_IMAGE_ARTIFACT_FILENAME = "image.tar.lz4" + # Set by docker-worker: + # https://docs.taskcluster.net/docs/reference/workers/docker-worker/docs/environment + self.decision_task_id = os.environ.get("TASK_ID") - # https://github.com/servo/taskcluster-bootstrap-docker-images#image-builder - DOCKER_IMAGE_BUILDER_IMAGE = "servobrowser/taskcluster-bootstrap:image-builder@sha256:" \ - "0a7d012ce444d62ffb9e7f06f0c52fedc24b68c2060711b313263367f7272d9d" + # Set in the decision task’s payload, such as defined in .taskcluster.yml + self.task_owner = os.environ.get("TASK_OWNER") + self.task_source = os.environ.get("TASK_SOURCE") + self.git_url = os.environ.get("GIT_URL") + self.git_ref = os.environ.get("GIT_REF") + self.git_sha = os.environ.get("GIT_SHA") - def __init__(self, *, index_prefix="garbage.servo-decisionlib", task_name_template="%s", - default_worker_type="github-worker", docker_image_cache_expiry="1 month", - routes_for_all_subtasks=None, scopes_for_all_subtasks=None): - self.task_name_template = task_name_template - self.index_prefix = index_prefix - self.default_worker_type = default_worker_type - self.docker_image_cache_expiry = docker_image_cache_expiry - self.routes_for_all_subtasks = routes_for_all_subtasks or [] - self.scopes_for_all_subtasks = scopes_for_all_subtasks or [] + def git_sha_is_current_head(self): + output = subprocess.check_output(["git", "rev-parse", "HEAD"]) + self.git_sha = output.decode("utf8").strip() - # https://docs.taskcluster.net/docs/reference/workers/docker-worker/docs/features#feature-taskclusterproxy + + +class Shared: + """ + Global shared state. + """ + def __init__(self): + self.now = datetime.datetime.utcnow() + self.found_or_created_indexed_tasks = {} + + # taskclusterProxy URLs: + # https://docs.taskcluster.net/docs/reference/workers/docker-worker/docs/features self.queue_service = taskcluster.Queue(options={"baseUrl": "http://taskcluster/queue/v1/"}) self.index_service = taskcluster.Index(options={"baseUrl": "http://taskcluster/index/v1/"}) - self.now = datetime.datetime.utcnow() - self.found_or_created_indices = {} - def from_now_json(self, offset): """ Same as `taskcluster.fromNowJSON`, but uses the creation time of `self` for “now”. """ return taskcluster.stringDate(taskcluster.fromNow(offset, dateObj=self.now)) - def find_or_create_task(self, *, index_bucket, index_key, index_expiry, artifacts, **kwargs): - """ - Find a task indexed in the given bucket (kind, category, …) and cache key, - on schedule a new one if there isn’t one yet. - Returns the task ID. - """ - index_path = "%s.%s.%s" % (self.index_prefix, index_bucket, index_key) +CONFIG = Config() +SHARED = Shared() +from_now_json = SHARED.from_now_json +now = SHARED.now - task_id = self.found_or_created_indices.get(index_path) + +def chaining(op, attr): + def method(self, *args, **kwargs): + op(self, attr, *args, **kwargs) + return self + return method + + +def append_to_attr(self, attr, *args): getattr(self, attr).extend(args) +def prepend_to_attr(self, attr, *args): getattr(self, attr)[0:0] = list(args) +def update_attr(self, attr, **kwargs): getattr(self, attr).update(kwargs) + + +class Task: + def __init__(self, name): + self.name = name + self.description = "" + self.scheduler_id = "taskcluster-github" + self.provisioner_id = "aws-provisioner-v1" + self.worker_type = "github-worker" + self.deadline_in = "1 day" + self.expires_in = "1 year" + self.index_and_artifacts_expire_in = self.expires_in + self.dependencies = [] + self.scopes = [] + self.routes = [] + self.extra = {} + + with_description = chaining(setattr, "description") + with_scheduler_id = chaining(setattr, "scheduler_id") + with_provisioner_id = chaining(setattr, "provisioner_id") + with_worker_type = chaining(setattr, "worker_type") + with_deadline_in = chaining(setattr, "deadline_in") + with_expires_in = chaining(setattr, "expires_in") + with_index_and_artifacts_expire_in = chaining(setattr, "index_and_artifacts_expire_in") + + with_dependencies = chaining(append_to_attr, "dependencies") + with_scopes = chaining(append_to_attr, "scopes") + with_routes = chaining(append_to_attr, "routes") + + with_extra = chaining(update_attr, "extra") + + def build_worker_payload(self): + raise NotImplementedError + + def create(self): + worker_payload = self.build_worker_payload() + + assert CONFIG.decision_task_id + assert CONFIG.task_owner + assert CONFIG.task_source + queue_payload = { + "taskGroupId": CONFIG.decision_task_id, + "dependencies": [CONFIG.decision_task_id] + self.dependencies, + "schedulerId": self.scheduler_id, + "provisionerId": self.provisioner_id, + "workerType": self.worker_type, + + "created": SHARED.from_now_json(""), + "deadline": SHARED.from_now_json(self.deadline_in), + "expires": SHARED.from_now_json(self.expires_in), + "metadata": { + "name": CONFIG.task_name_template % self.name, + "description": self.description, + "owner": CONFIG.task_owner, + "source": CONFIG.task_source, + }, + + "payload": worker_payload, + } + scopes = self.scopes + CONFIG.scopes_for_all_subtasks + routes = self.routes + CONFIG.routes_for_all_subtasks + if any(r.startswith("index.") for r in routes): + self.extra.setdefault("index", {})["expires"] = \ + SHARED.from_now_json(self.index_and_artifacts_expire_in) + dict_update_if_truthy( + queue_payload, + scopes=scopes, + routes=routes, + extra=self.extra, + ) + + task_id = taskcluster.slugId().decode("utf8") + SHARED.queue_service.createTask(task_id, queue_payload) + print("Scheduled %s" % self.name) + return task_id + + def find_or_create(self, index_path=None): + if not index_path: + worker_type = self.worker_type + index_by = json.dumps([worker_type, self.build_worker_payload()]).encode("utf-8") + index_path = "by-task-definition." + hashlib.sha256(index_by).hexdigest() + index_path = "%s.%s" % (CONFIG.index_prefix, index_path) + + task_id = SHARED.found_or_created_indexed_tasks.get(index_path) if task_id is not None: return task_id try: - result = self.index_service.findTask(index_path) + result = SHARED.index_service.findTask(index_path) task_id = result["taskId"] except taskcluster.TaskclusterRestFailure as e: - if e.status_code == 404: - task_id = self.create_task( - routes=[ - "index." + index_path, - ], - extra={ - "index": { - "expires": self.from_now_json(self.docker_image_cache_expiry), - }, - }, - artifacts=[ - (artifact, index_expiry) - for artifact in artifacts - ], - **kwargs - ) - else: + if e.status_code != 404: raise + self.routes.append("index." + index_path) + task_id = self.create() - self.found_or_created_indices[index_path] = task_id + SHARED.found_or_created_indexed_tasks[index_path] = task_id return task_id - def find_or_build_docker_image(self, dockerfile): - """ - Find a task that built a Docker image based on this `dockerfile`, - or schedule a new image-building task if needed. - Returns the task ID. +class GenericWorkerTask(Task): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.max_run_time_minutes = 30 + self.env = {} + self.mounts = [] + self.artifacts = [] + + with_max_run_time_minutes = chaining(setattr, "max_run_time_minutes") + with_artifacts = chaining(append_to_attr, "artifacts") + with_mounts = chaining(append_to_attr, "mounts") + with_env = chaining(update_attr, "env") + + def build_command(self): + raise NotImplementedError + + def build_worker_payload(self): + worker_payload = { + "command": self.build_command(), + "maxRunTime": self.max_run_time_minutes * 60 + } + return dict_update_if_truthy( + worker_payload, + env=self.env, + mounts=self.mounts, + artifacts=[ + { + "type": "file", + "path": path, + "name": "public/" + url_basename(path), + "expires": SHARED.from_now_json(self.index_and_artifacts_expire_in), + } + for path in self.artifacts + ], + ) + + def _mount_content(self, url_or_artifact_name, task_id, sha256): + if task_id: + content = {"taskId": task_id, "artifact": url_or_artifact_name} + else: + content = {"url": url_or_artifact_name} + if sha256: + content["sha256"] = sha256 + return content + + def with_file_mount(self, url_or_artifact_name, task_id=None, sha256=None, path=None): + return self.with_mounts({ + "file": path or url_basename(url_or_artifact_name), + "content": self._mount_content(url_or_artifact_name, task_id, sha256), + }) + + def with_directory_mount(self, url_or_artifact_name, task_id=None, sha256=None, path=None): + supported_formats = ["rar", "tar.bz2", "tar.gz", "zip"] + for fmt in supported_formats: + suffix = "." + fmt + if url_or_artifact_name.endswith(suffix): + return self.with_mounts({ + "directory": path or url_basename(url_or_artifact_name[:-len(suffix)]), + "content": self._mount_content(url_or_artifact_name, task_id, sha256), + "format": fmt, + }) + raise ValueError( + "%r does not appear to be in one of the supported formats: %r" + % (url_or_artifact_name, ", ".join(supported_formats)) + ) + + +class WindowsGenericWorkerTask(GenericWorkerTask): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.scripts = [] + + with_script = chaining(append_to_attr, "scripts") + with_early_script = chaining(prepend_to_attr, "scripts") + + def build_command(self): + return [deindent(s) for s in self.scripts] + + def with_path_from_homedir(self, *paths): + for p in paths: + self.with_early_script("set PATH=%HOMEDRIVE%%HOMEPATH%\\{};%PATH%".format(p)) + return self + + def with_repo(self, sparse_checkout=None): + git = """ + git init repo + cd repo """ + if sparse_checkout: + git += """ + git config core.sparsecheckout true + echo %SPARSE_CHECKOUT_BASE64% > .git\\info\\sparse.b64 + certutil -decode .git\\info\\sparse.b64 .git\\info\\sparse-checkout + type .git\\info\\sparse-checkout + """ + self.env["SPARSE_CHECKOUT_BASE64"] = base64.b64encode( + "\n".join(sparse_checkout).encode("utf-8")) + git += """ + git fetch --depth 1 %GIT_URL% %GIT_REF% + git reset --hard %GIT_SHA% + """ + return self \ + .with_git() \ + .with_script(git) \ + .with_env(**git_env()) + + def with_git(self): + return self \ + .with_path_from_homedir("git\\cmd") \ + .with_directory_mount( + "https://github.com/git-for-windows/git/releases/download/" + + "v2.19.0.windows.1/MinGit-2.19.0-64-bit.zip", + sha256="424d24b5fc185a9c5488d7872262464f2facab4f1d4693ea8008196f14a3c19b", + path="git", + ) + + def with_rustup(self): + return self \ + .with_path_from_homedir(".cargo\\bin") \ + .with_early_script( + "%HOMEDRIVE%%HOMEPATH%\\rustup-init.exe --default-toolchain none -y" + ) \ + .with_file_mount( + "https://static.rust-lang.org/rustup/archive/" + + "1.13.0/i686-pc-windows-gnu/rustup-init.exe", + sha256="43072fbe6b38ab38cd872fa51a33ebd781f83a2d5e83013857fab31fc06e4bf0", + ) + + def with_repacked_msi(self, url, sha256, path): + repack_task = ( + WindowsGenericWorkerTask("MSI repack: " + url) + .with_worker_type(self.worker_type) + .with_max_run_time_minutes(20) + .with_file_mount(url, sha256=sha256, path="input.msi") + .with_directory_mount( + "https://github.com/activescott/lessmsi/releases/download/" + + "v1.6.1/lessmsi-v1.6.1.zip", + sha256="540b8801e08ec39ba26a100c855898f455410cecbae4991afae7bb2b4df026c7", + path="lessmsi" + ) + .with_directory_mount( + "https://www.7-zip.org/a/7za920.zip", + sha256="2a3afe19c180f8373fa02ff00254d5394fec0349f5804e0ad2f6067854ff28ac", + path="7zip", + ) + .with_path_from_homedir("lessmsi", "7zip") + .with_script(""" + lessmsi x input.msi extracted\\ + cd extracted\\SourceDir + 7za a repacked.zip * + """) + .with_artifacts("extracted/SourceDir/repacked.zip") + .with_index_and_artifacts_expire_in(CONFIG.repacked_msi_files_expire_in) + .find_or_create("repacked-msi." + sha256) + ) + return self \ + .with_dependencies(repack_task) \ + .with_directory_mount("public/repacked.zip", task_id=repack_task, path=path) + + def with_python2(self): + return self \ + .with_repacked_msi( + "https://www.python.org/ftp/python/2.7.15/python-2.7.15.amd64.msi", + sha256="5e85f3c4c209de98480acbf2ba2e71a907fd5567a838ad4b6748c76deb286ad7", + path="python2" + ) \ + .with_early_script(""" + python -m ensurepip + pip install virtualenv==16.0.0 + """) \ + .with_path_from_homedir("python2", "python2\\Scripts") + + + +class DockerWorkerTask(Task): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.docker_image = "ubuntu:bionic-20180821" + self.max_run_time_minutes = 30 + self.scripts = [] + self.env = {} + self.caches = {} + self.features = {} + self.artifacts = [] + + with_docker_image = chaining(setattr, "docker_image") + with_max_run_time_minutes = chaining(setattr, "max_run_time_minutes") + with_artifacts = chaining(append_to_attr, "artifacts") + with_script = chaining(append_to_attr, "scripts") + with_early_script = chaining(prepend_to_attr, "scripts") + with_caches = chaining(update_attr, "caches") + with_env = chaining(update_attr, "env") + + def build_worker_payload(self): + worker_payload = { + "image": self.docker_image, + "maxRunTime": self.max_run_time_minutes * 60, + "command": [ + "/bin/bash", "--login", "-x", "-e", "-c", + deindent("\n".join(self.scripts)) + ], + } + return dict_update_if_truthy( + worker_payload, + env=self.env, + cache=self.caches, + features=self.features, + artifacts={ + "public/" + url_basename(path): { + "type": "file", + "path": path, + "expires": SHARED.from_now_json(self.index_and_artifacts_expire_in), + } + for path in self.artifacts + }, + ) + + def with_features(self, *names): + self.features.update({name: True for name in names}) + return self + + def with_repo(self): + return self \ + .with_env(**git_env()) \ + .with_early_script(""" + git init repo + cd repo + git fetch --depth 1 "$GIT_URL" "$GIT_REF" + git reset --hard "$GIT_SHA" + """) + + def with_dockerfile(self, dockerfile): + basename = os.path.basename(dockerfile) + suffix = ".dockerfile" + assert basename.endswith(suffix) + image_name = basename[:-len(suffix)] + dockerfile_contents = expand_dockerfile(dockerfile) digest = hashlib.sha256(dockerfile_contents).hexdigest() - return self.find_or_create_task( - index_bucket="docker-image", - index_key=digest, - index_expiry=self.docker_image_cache_expiry, - - task_name="Docker image: " + image_name(dockerfile), - script=""" + image_build_task = ( + DockerWorkerTask("Docker image: " + image_name) + .with_worker_type(self.worker_type) + .with_max_run_time_minutes(30) + .with_index_and_artifacts_expire_in(CONFIG.docker_images_expire_in) + .with_features("dind") + .with_env(DOCKERFILE=dockerfile_contents) + .with_artifacts("/image.tar.lz4") + .with_script(""" echo "$DOCKERFILE" | docker build -t taskcluster-built - - docker save taskcluster-built | lz4 > /%s - """ % self.DOCKER_IMAGE_ARTIFACT_FILENAME, - env={ - "DOCKERFILE": dockerfile_contents, - }, - artifacts=[ - "/" + self.DOCKER_IMAGE_ARTIFACT_FILENAME, - ], - max_run_time_minutes=20, - docker_image=self.DOCKER_IMAGE_BUILDER_IMAGE, - features={ - "dind": True, # docker-in-docker - }, - with_repo=False, + docker save taskcluster-built | lz4 > /image.tar.lz4 + """) + .with_docker_image( + # https://github.com/servo/taskcluster-bootstrap-docker-images#image-builder + "servobrowser/taskcluster-bootstrap:image-builder@sha256:" \ + "0a7d012ce444d62ffb9e7f06f0c52fedc24b68c2060711b313263367f7272d9d" + ) + .find_or_create("docker-image." + digest) ) - def create_task(self, *, task_name, script, max_run_time_minutes, - docker_image=None, dockerfile=None, # One of these is required - artifacts=None, dependencies=None, env=None, cache=None, scopes=None, - routes=None, extra=None, features=None, mounts=None, homedir_path=None, - worker_type=None, with_repo=True, sparse_checkout=None): - """ - Schedule a new task. Returns the new task ID. - - One of `docker_image` or `dockerfile` (but not both) must be given. - If `dockerfile` is given, the corresponding Docker image is built as needed and cached. - - `with_repo` indicates whether `script` should start in a clone of the git repository. - """ - # https://docs.taskcluster.net/docs/reference/workers/docker-worker/docs/environment - decision_task_id = os.environ["TASK_ID"] - - dependencies = [decision_task_id] + (dependencies or []) - - # Set in .taskcluster.yml - task_owner = os.environ["TASK_OWNER"] - task_source = os.environ["TASK_SOURCE"] - - env = env or {} - - if with_repo: - # Set in .taskcluster.yml - for k in ["GIT_URL", "GIT_REF", "GIT_SHA"]: - env[k] = os.environ[k] - - worker_type = worker_type or self.default_worker_type - if "docker" in worker_type: - if docker_image and dockerfile: - raise TypeError("cannot use both `docker_image` or `dockerfile`") - if not docker_image and not dockerfile: - raise TypeError("need one of `docker_image` or `dockerfile`") - - if dockerfile: - image_build_task = self.find_or_build_docker_image(dockerfile) - dependencies.append(image_build_task) - docker_image = { - "type": "task-image", - "taskId": image_build_task, - "path": "public/" + self.DOCKER_IMAGE_ARTIFACT_FILENAME, - } - - if with_repo: - git = """ - git init repo - cd repo - git fetch --depth 1 "$GIT_URL" "$GIT_REF" - git reset --hard "$GIT_SHA" - """ - script = git + script - command = ["/bin/bash", "--login", "-x", "-e", "-c", deindent(script)] - else: - command = [ - "set PATH=%CD%\\{};%PATH%".format(p) - for p in reversed(homedir_path or []) - ] - if with_repo: - if with_repo: - git = """ - git init repo - cd repo - """ - if sparse_checkout: - git += """ - git config core.sparsecheckout true - echo %SPARSE_CHECKOUT_BASE64% > .git\\info\\sparse.b64 - certutil -decode .git\\info\\sparse.b64 .git\\info\\sparse-checkout - type .git\\info\\sparse-checkout - """ - env["SPARSE_CHECKOUT_BASE64"] = base64.b64encode( - "\n".join(sparse_checkout).encode("utf-8")) - command.append(deindent(git + """ - git fetch --depth 1 %GIT_URL% %GIT_REF% - git reset --hard %GIT_SHA% - """)) - command.append(deindent(script)) - - worker_payload = { - "maxRunTime": max_run_time_minutes * 60, - "command": command, - "env": env, - } - if docker_image: - worker_payload["image"] = docker_image - if cache: - worker_payload["cache"] = cache - if features: - worker_payload["features"] = features - if mounts: - worker_payload["mounts"] = mounts - if artifacts: - if "docker" in worker_type: - worker_payload["artifacts"] = { - "public/" + os.path.basename(path): { - "type": "file", - "path": path, - "expires": self.from_now_json(expires), - } - for path, expires in artifacts - } - else: - worker_payload["artifacts"] = [ - { - "type": "file", - "name": "public/" + os.path.basename(path), - "path": path, - "expires": self.from_now_json(expires), - } - for path, expires in artifacts - ] - payload = { - "taskGroupId": decision_task_id, - "dependencies": dependencies or [], - "schedulerId": "taskcluster-github", - "provisionerId": "aws-provisioner-v1", - "workerType": worker_type, - - "created": self.from_now_json(""), - "deadline": self.from_now_json("1 day"), - "metadata": { - "name": self.task_name_template % task_name, - "description": "", - "owner": task_owner, - "source": task_source, - }, - "scopes": (scopes or []) + self.scopes_for_all_subtasks, - "routes": (routes or []) + self.routes_for_all_subtasks, - "extra": extra or {}, - "payload": worker_payload, - } - - task_id = taskcluster.slugId().decode("utf8") - self.queue_service.createTask(task_id, payload) - print("Scheduled %s" % task_name) - return task_id - - -def image_name(dockerfile): - """ - Guess a short name based on the path `dockerfile`. - """ - basename = os.path.basename(dockerfile) - suffix = ".dockerfile" - if basename == "Dockerfile": - return os.path.basename(os.path.dirname(os.path.abspath(dockerfile))) - elif basename.endswith(suffix): - return basename[:-len(suffix)] - else: - return basename + return self \ + .with_dependencies(image_build_task) \ + .with_docker_image({ + "type": "task-image", + "path": "public/image.tar.lz4", + "taskId": image_build_task, + }) def expand_dockerfile(dockerfile): @@ -303,5 +483,26 @@ def expand_dockerfile(dockerfile): return b"\n".join([expand_dockerfile(path), rest]) +def git_env(): + assert CONFIG.git_url + assert CONFIG.git_ref + assert CONFIG.git_sha + return { + "GIT_URL": CONFIG.git_url, + "GIT_REF": CONFIG.git_ref, + "GIT_SHA": CONFIG.git_sha, + } + +def dict_update_if_truthy(d, **kwargs): + for key, value in kwargs.items(): + if value: + d[key] = value + return d + + def deindent(string): - return re.sub("\n +", " \n ", string).strip() + return re.sub("\n +", "\n ", string).strip() + + +def url_basename(url): + return url.rpartition("/")[-1] \ No newline at end of file diff --git a/etc/taskcluster/mock.py b/etc/taskcluster/mock.py index 085c82c2917..02d55beb1a6 100755 --- a/etc/taskcluster/mock.py +++ b/etc/taskcluster/mock.py @@ -32,14 +32,15 @@ class Index: Queue = stringDate = fromNow = slugId = MagicMock() sys.modules["taskcluster"] = sys.modules[__name__] sys.dont_write_bytecode = True -code = open(os.path.join(os.path.dirname(__file__), "decision-task.py"), "rb").read() -for k in "TASK_ID TASK_OWNER TASK_SOURCE GIT_URL GIT_REF GIT_SHA".split(): - os.environ[k] = k +os.environ.update(**{k: k for k in "TASK_ID TASK_OWNER TASK_SOURCE GIT_URL GIT_SHA".split()}) +os.environ["GIT_REF"] = "refs/heads/auto" +import decision_task -print("Push:") -os.environ["TASK_FOR"] = "github-push" -exec(code) +print("\n# Push:") +decision_task.main("github-push", mock=True) -print("Daily:") -os.environ["TASK_FOR"] = "daily" -exec(code) +print("\n# Push with hot caches:") +decision_task.main("github-push", mock=True) + +print("\n# Daily:") +decision_task.main("daily", mock=True) From 0a7811111e3330c44b8d0d1dc2fdd280b1ab1d1c Mon Sep 17 00:00:00 2001 From: Simon Sapin Date: Sun, 7 Oct 2018 12:28:12 +0200 Subject: [PATCH 18/25] Time fetching dependencies on Windows separately from build --- etc/taskcluster/decision_task.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/etc/taskcluster/decision_task.py b/etc/taskcluster/decision_task.py index 67e8329bb84..8281eb299ad 100644 --- a/etc/taskcluster/decision_task.py +++ b/etc/taskcluster/decision_task.py @@ -105,10 +105,14 @@ def windows_dev(): sha256="b13ea68c1365098c66871f0acab7fd3daa2f2795b5e893fcbb5cd7253f2c08fa", path="gst", ) + .with_env(LIB="%HOMEDRIVE%%HOMEPATH%\\gst\\gstreamer\\1.0\\x86_64\\lib;%LIB%") .with_script( - "set LIB=%HOMEDRIVE%%HOMEPATH%\\gst\\gstreamer\\1.0\\x86_64\\lib;%LIB%", - "call mach.bat build --dev", - "call mach.bat test-unit", + # Not necessary as this would be done at the start of `build`, + # but this allows timing it separately. + "mach fetch", + + "mach build --dev", + "mach test-unit", ) .create() ) From 2c7abbb2bf6eea23a37ffab981dff90b4e479e58 Mon Sep 17 00:00:00 2001 From: Simon Sapin Date: Sun, 7 Oct 2018 14:50:36 +0200 Subject: [PATCH 19/25] Taskcluster: package the Windows build --- etc/taskcluster/decision_task.py | 9 +++++++++ etc/taskcluster/decisionlib.py | 9 ++++++--- 2 files changed, 15 insertions(+), 3 deletions(-) diff --git a/etc/taskcluster/decision_task.py b/etc/taskcluster/decision_task.py index 8281eb299ad..1c9dcdc1867 100644 --- a/etc/taskcluster/decision_task.py +++ b/etc/taskcluster/decision_task.py @@ -114,6 +114,15 @@ def windows_dev(): "mach build --dev", "mach test-unit", ) + .with_directory_mount( + "https://github.com/wixtoolset/wix3/releases/download/wix3111rtm/wix311-binaries.zip", + sha256="37f0a533b0978a454efb5dc3bd3598becf9660aaf4287e55bf68ca6b527d051d", + path="wix", + ) + .with_path_from_homedir("wix") + .with_script("mach package --dev") + .with_artifacts("repo/target/debug/msi/Servo.exe", + "repo/target/debug/msi/Servo.zip") .create() ) diff --git a/etc/taskcluster/decisionlib.py b/etc/taskcluster/decisionlib.py index 2cfbd42d4ee..7303a9ba552 100644 --- a/etc/taskcluster/decisionlib.py +++ b/etc/taskcluster/decisionlib.py @@ -205,7 +205,6 @@ class GenericWorkerTask(Task): self.artifacts = [] with_max_run_time_minutes = chaining(setattr, "max_run_time_minutes") - with_artifacts = chaining(append_to_attr, "artifacts") with_mounts = chaining(append_to_attr, "mounts") with_env = chaining(update_attr, "env") @@ -223,15 +222,19 @@ class GenericWorkerTask(Task): mounts=self.mounts, artifacts=[ { - "type": "file", + "type": type_, "path": path, "name": "public/" + url_basename(path), "expires": SHARED.from_now_json(self.index_and_artifacts_expire_in), } - for path in self.artifacts + for type_, path in self.artifacts ], ) + def with_artifacts(self, *paths, type="file"): + self.artifacts.extend((type, path) for path in paths) + return self + def _mount_content(self, url_or_artifact_name, task_id, sha256): if task_id: content = {"taskId": task_id, "artifact": url_or_artifact_name} From 652ff714823383fc827dc59a2e4252a0a6e51f89 Mon Sep 17 00:00:00 2001 From: Simon Sapin Date: Sun, 7 Oct 2018 16:48:55 +0200 Subject: [PATCH 20/25] Taskcluster: Add Windows release build --- etc/taskcluster/decision_task.py | 47 ++++++++++++++++++++------------ 1 file changed, 30 insertions(+), 17 deletions(-) diff --git a/etc/taskcluster/decision_task.py b/etc/taskcluster/decision_task.py index 1c9dcdc1867..81c352c11e9 100644 --- a/etc/taskcluster/decision_task.py +++ b/etc/taskcluster/decision_task.py @@ -15,6 +15,7 @@ def main(task_for, mock=False): android_arm32() windows_dev() if mock: + windows_release() linux_wpt() # https://tools.taskcluster.net/hooks/project-servo/daily @@ -43,7 +44,9 @@ linux_build_env = { "SCCACHE_IDLE_TIMEOUT": "1200", "SHELL": "/bin/dash", # For SpiderMonkey’s build system } -windows_build_env = {} +windows_build_env = { + "LIB": "%HOMEDRIVE%%HOMEPATH%\\gst\\gstreamer\\1.0\\x86_64\\lib;%LIB%", +} windows_sparse_checkout = [ "/*", "!/tests/wpt/metadata", @@ -97,15 +100,6 @@ def android_arm32(): def windows_dev(): return ( windows_build_task("Windows x86_64: dev build + unit tests") - .with_python2() - .with_rustup() - .with_repacked_msi( - url="https://gstreamer.freedesktop.org/data/pkg/windows/" + - "1.14.3/gstreamer-1.0-devel-x86_64-1.14.3.msi", - sha256="b13ea68c1365098c66871f0acab7fd3daa2f2795b5e893fcbb5cd7253f2c08fa", - path="gst", - ) - .with_env(LIB="%HOMEDRIVE%%HOMEPATH%\\gst\\gstreamer\\1.0\\x86_64\\lib;%LIB%") .with_script( # Not necessary as this would be done at the start of `build`, # but this allows timing it separately. @@ -113,20 +107,25 @@ def windows_dev(): "mach build --dev", "mach test-unit", + "mach package --dev", ) - .with_directory_mount( - "https://github.com/wixtoolset/wix3/releases/download/wix3111rtm/wix311-binaries.zip", - sha256="37f0a533b0978a454efb5dc3bd3598becf9660aaf4287e55bf68ca6b527d051d", - path="wix", - ) - .with_path_from_homedir("wix") - .with_script("mach package --dev") .with_artifacts("repo/target/debug/msi/Servo.exe", "repo/target/debug/msi/Servo.zip") .create() ) +def windows_release(): + return ( + windows_build_task("Windows x86_64: release build") + .with_script("mach build --release", + "mach package --release") + .with_artifacts("repo/target/release/msi/Servo.exe", + "repo/target/release/msi/Servo.zip") + .create() + ) + + def linux_wpt(): release_build_task = linux_release_build() total_chunks = 2 @@ -272,6 +271,20 @@ def windows_build_task(name): .with_max_run_time_minutes(60) .with_env(**build_env, **windows_build_env) .with_repo(sparse_checkout=windows_sparse_checkout) + .with_python2() + .with_rustup() + .with_repacked_msi( + url="https://gstreamer.freedesktop.org/data/pkg/windows/" + + "1.14.3/gstreamer-1.0-devel-x86_64-1.14.3.msi", + sha256="b13ea68c1365098c66871f0acab7fd3daa2f2795b5e893fcbb5cd7253f2c08fa", + path="gst", + ) + .with_directory_mount( + "https://github.com/wixtoolset/wix3/releases/download/wix3111rtm/wix311-binaries.zip", + sha256="37f0a533b0978a454efb5dc3bd3598becf9660aaf4287e55bf68ca6b527d051d", + path="wix", + ) + .with_path_from_homedir("wix") ) From 2d12d50b54e2093102c73e6e90421eed18facc1f Mon Sep 17 00:00:00 2001 From: Simon Sapin Date: Sun, 7 Oct 2018 16:50:03 +0200 Subject: [PATCH 21/25] Tascluster: shorten x86_64 to x64 --- etc/taskcluster/decision_task.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/etc/taskcluster/decision_task.py b/etc/taskcluster/decision_task.py index 81c352c11e9..dfdcfa126ae 100644 --- a/etc/taskcluster/decision_task.py +++ b/etc/taskcluster/decision_task.py @@ -58,7 +58,7 @@ windows_sparse_checkout = [ def linux_tidy_unit(): - return linux_build_task("Linux x86_64: tidy + dev build + unit tests").with_script(""" + return linux_build_task("Linux x64: tidy + dev build + unit tests").with_script(""" ./mach test-tidy --no-progress --all ./mach build --dev ./mach test-unit @@ -72,7 +72,7 @@ def linux_tidy_unit(): def with_rust_nightly(): - return linux_build_task("Linux x86_64: with Rust Nightly").with_script(""" + return linux_build_task("Linux x64: with Rust Nightly").with_script(""" echo "nightly" > rust-toolchain ./mach build --dev ./mach test-unit @@ -82,7 +82,7 @@ def with_rust_nightly(): def android_arm32(): return ( linux_build_task("Android ARMv7: build") - # file: NDK parses $(file $SHELL) to tell x86_64 from x86 + # file: NDK parses $(file $SHELL) to tell x64 host from x86 # wget: servo-media-gstreamer’s build script .with_script(""" apt-get install -y --no-install-recommends openjdk-8-jdk-headless file wget @@ -99,7 +99,7 @@ def android_arm32(): def windows_dev(): return ( - windows_build_task("Windows x86_64: dev build + unit tests") + windows_build_task("Windows x64: dev build + unit tests") .with_script( # Not necessary as this would be done at the start of `build`, # but this allows timing it separately. @@ -117,7 +117,7 @@ def windows_dev(): def windows_release(): return ( - windows_build_task("Windows x86_64: release build") + windows_build_task("Windows x64: release build") .with_script("mach build --release", "mach package --release") .with_artifacts("repo/target/release/msi/Servo.exe", @@ -136,7 +136,7 @@ def linux_wpt(): def linux_release_build(): return ( - linux_build_task("Linux x86_64: release build") + linux_build_task("Linux x64: release build") .with_script(""" ./mach build --release --with-debug-assertions -p servo ./etc/ci/lockfile_changed.sh @@ -146,12 +146,12 @@ def linux_release_build(): target/release/build/osmesa-src-*/out/lib/gallium """) .with_artifacts("/target.tar.gz") - .find_or_create("build.linux_x86-64_release." + CONFIG.git_sha) + .find_or_create("build.linux_x64_release." + CONFIG.git_sha) ) def wpt_chunk(release_build_task, total_chunks, this_chunk, extra): - name = "Linux x86_64: WPT chunk %s / %s" % (this_chunk, total_chunks) + name = "Linux x64: WPT chunk %s / %s" % (this_chunk, total_chunks) script = """ ./mach test-wpt \ --release \ From 829b44e9f0a30bad4527544ba57bcbb7b7343ff9 Mon Sep 17 00:00:00 2001 From: Simon Sapin Date: Sun, 7 Oct 2018 16:51:35 +0200 Subject: [PATCH 22/25] Taskcluster: index Windows builds --- etc/taskcluster/decision_task.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/etc/taskcluster/decision_task.py b/etc/taskcluster/decision_task.py index dfdcfa126ae..2a2a2ea4505 100644 --- a/etc/taskcluster/decision_task.py +++ b/etc/taskcluster/decision_task.py @@ -111,7 +111,7 @@ def windows_dev(): ) .with_artifacts("repo/target/debug/msi/Servo.exe", "repo/target/debug/msi/Servo.zip") - .create() + .find_or_create("build.windows_x64_dev." + CONFIG.git_sha) ) @@ -122,7 +122,7 @@ def windows_release(): "mach package --release") .with_artifacts("repo/target/release/msi/Servo.exe", "repo/target/release/msi/Servo.zip") - .create() + .find_or_create("build.windows_x64_release." + CONFIG.git_sha) ) From 62e4f7072b6a2a9245324fead0c78f70c16234ae Mon Sep 17 00:00:00 2001 From: Simon Sapin Date: Tue, 9 Oct 2018 10:57:04 +0200 Subject: [PATCH 23/25] Taskcluster WPT: inline `extra` variable --- etc/taskcluster/decision_task.py | 8 ++++---- etc/taskcluster/windows/first-boot.ps1 | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/etc/taskcluster/decision_task.py b/etc/taskcluster/decision_task.py index 2a2a2ea4505..dd637859eaa 100644 --- a/etc/taskcluster/decision_task.py +++ b/etc/taskcluster/decision_task.py @@ -131,7 +131,7 @@ def linux_wpt(): total_chunks = 2 for i in range(total_chunks): this_chunk = i + 1 - wpt_chunk(release_build_task, total_chunks, this_chunk, extra=(this_chunk == 1)) + wpt_chunk(release_build_task, total_chunks, this_chunk) def linux_release_build(): @@ -150,7 +150,7 @@ def linux_release_build(): ) -def wpt_chunk(release_build_task, total_chunks, this_chunk, extra): +def wpt_chunk(release_build_task, total_chunks, this_chunk): name = "Linux x64: WPT chunk %s / %s" % (this_chunk, total_chunks) script = """ ./mach test-wpt \ @@ -171,7 +171,7 @@ def wpt_chunk(release_build_task, total_chunks, this_chunk, extra): # IndexError: list index out of range # File "/repo/python/servo/testing_commands.py", line 533, in filter_intermittents # pull_request = int(last_merge.split(' ')[4][1:]) - if extra: + if this_chunk == 1: name += " + extra" script += """ ./mach test-wpt-failure @@ -295,4 +295,4 @@ CONFIG.repacked_msi_files_expire_in = build_dependencies_artifacts_expire_in if __name__ == "__main__": - main(task_for=os.environ["TASK_FOR"]) \ No newline at end of file + main(task_for=os.environ["TASK_FOR"]) diff --git a/etc/taskcluster/windows/first-boot.ps1 b/etc/taskcluster/windows/first-boot.ps1 index bbf1cdb6449..6c3106d7a0c 100644 --- a/etc/taskcluster/windows/first-boot.ps1 +++ b/etc/taskcluster/windows/first-boot.ps1 @@ -78,4 +78,4 @@ Start-Process C:\vs_buildtools.exe -ArgumentList (` # Now shutdown, in preparation for creating an image -shutdown -s \ No newline at end of file +shutdown -s From 09d8339b465f3171c4b24ce8b94e43e09d03a176 Mon Sep 17 00:00:00 2001 From: Simon Sapin Date: Tue, 9 Oct 2018 11:37:38 +0200 Subject: [PATCH 24/25] Code coverage for decisionlib --- .gitignore | 1 + etc/memory_reports_over_time.py | 0 etc/taskcluster/decision_task.py | 9 +++++---- etc/taskcluster/decisionlib.py | 13 +++++-------- etc/taskcluster/docker/base.dockerfile | 2 +- etc/taskcluster/docker/build.dockerfile | 3 +++ etc/taskcluster/mock.py | 12 ++++++++++-- 7 files changed, 25 insertions(+), 15 deletions(-) mode change 100644 => 100755 etc/memory_reports_over_time.py diff --git a/.gitignore b/.gitignore index 94acd5b1d03..55bd1fc1ce4 100644 --- a/.gitignore +++ b/.gitignore @@ -19,6 +19,7 @@ *.csv *.rej *.orig +.coverage .DS_Store Servo.app .config.mk.last diff --git a/etc/memory_reports_over_time.py b/etc/memory_reports_over_time.py old mode 100644 new mode 100755 diff --git a/etc/taskcluster/decision_task.py b/etc/taskcluster/decision_task.py index dd637859eaa..24aa16ac629 100644 --- a/etc/taskcluster/decision_task.py +++ b/etc/taskcluster/decision_task.py @@ -17,6 +17,7 @@ def main(task_for, mock=False): if mock: windows_release() linux_wpt() + linux_build_task("Indexed by task definition").find_or_create() # https://tools.taskcluster.net/hooks/project-servo/daily elif task_for == "daily": @@ -24,7 +25,7 @@ def main(task_for, mock=False): with_rust_nightly() android_arm32() - else: + else: # pragma: no cover raise ValueError("Unrecognized $TASK_FOR value: %r", task_for) @@ -64,8 +65,8 @@ def linux_tidy_unit(): ./mach test-unit ./mach package --dev ./mach test-tidy --no-progress --self-test - python2.7 ./etc/memory_reports_over_time.py --test - python3 ./etc/taskcluster/mock.py + ./etc/memory_reports_over_time.py --test + ./etc/taskcluster/mock.py ./etc/ci/lockfile_changed.sh ./etc/ci/check_no_panic.sh """).create() @@ -294,5 +295,5 @@ CONFIG.docker_images_expire_in = build_dependencies_artifacts_expire_in CONFIG.repacked_msi_files_expire_in = build_dependencies_artifacts_expire_in -if __name__ == "__main__": +if __name__ == "__main__": # pragma: no cover main(task_for=os.environ["TASK_FOR"]) diff --git a/etc/taskcluster/decisionlib.py b/etc/taskcluster/decisionlib.py index 7303a9ba552..bdd5f1aebb8 100644 --- a/etc/taskcluster/decisionlib.py +++ b/etc/taskcluster/decisionlib.py @@ -82,8 +82,6 @@ class Shared: CONFIG = Config() SHARED = Shared() -from_now_json = SHARED.from_now_json -now = SHARED.now def chaining(op, attr): @@ -127,7 +125,7 @@ class Task: with_extra = chaining(update_attr, "extra") - def build_worker_payload(self): + def build_worker_payload(self): # pragma: no cover raise NotImplementedError def create(self): @@ -184,10 +182,9 @@ class Task: return task_id try: - result = SHARED.index_service.findTask(index_path) - task_id = result["taskId"] + task_id = SHARED.index_service.findTask(index_path)["taskId"] except taskcluster.TaskclusterRestFailure as e: - if e.status_code != 404: + if e.status_code != 404: # pragma: no cover raise self.routes.append("index." + index_path) task_id = self.create() @@ -208,7 +205,7 @@ class GenericWorkerTask(Task): with_mounts = chaining(append_to_attr, "mounts") with_env = chaining(update_attr, "env") - def build_command(self): + def build_command(self): # pragma: no cover raise NotImplementedError def build_worker_payload(self): @@ -263,7 +260,7 @@ class GenericWorkerTask(Task): raise ValueError( "%r does not appear to be in one of the supported formats: %r" % (url_or_artifact_name, ", ".join(supported_formats)) - ) + ) # pragma: no cover class WindowsGenericWorkerTask(GenericWorkerTask): diff --git a/etc/taskcluster/docker/base.dockerfile b/etc/taskcluster/docker/base.dockerfile index 891469e8df4..0d9385f4054 100644 --- a/etc/taskcluster/docker/base.dockerfile +++ b/etc/taskcluster/docker/base.dockerfile @@ -15,7 +15,7 @@ RUN \ ca-certificates \ # # Running mach - python2.7 \ + python \ virtualenv \ # # Installing rustup and sccache (build dockerfile) or fetching build artifacts (run tasks) diff --git a/etc/taskcluster/docker/build.dockerfile b/etc/taskcluster/docker/build.dockerfile index 8b7b3f75f6f..294adb44796 100644 --- a/etc/taskcluster/docker/build.dockerfile +++ b/etc/taskcluster/docker/build.dockerfile @@ -2,6 +2,9 @@ RUN \ apt-get install -qy --no-install-recommends \ + # + # Testing decisionlib (see etc/taskcluster/mock.py) + python3-coverage \ # # Multiple C/C++ dependencies built from source g++ \ diff --git a/etc/taskcluster/mock.py b/etc/taskcluster/mock.py index 02d55beb1a6..d3e19d88080 100755 --- a/etc/taskcluster/mock.py +++ b/etc/taskcluster/mock.py @@ -1,4 +1,4 @@ -#!/usr/bin/python3 +#!/bin/bash # Copyright 2018 The Servo Project Developers. See the COPYRIGHT # file at the top-level directory of this distribution. @@ -9,6 +9,12 @@ # option. This file may not be copied, modified, or distributed # except according to those terms. +''''set -e +python3 -m coverage run $0 +python3 -m coverage report -m --fail-under 100 +exit +''' + """ Run the decision task with fake Taskcluster APIs, to catch Python errors before pushing. """ @@ -29,7 +35,9 @@ class Index: raise TaskclusterRestFailure -Queue = stringDate = fromNow = slugId = MagicMock() +stringDate = str +slugId = b"id".lower +Queue = fromNow = MagicMock() sys.modules["taskcluster"] = sys.modules[__name__] sys.dont_write_bytecode = True os.environ.update(**{k: k for k in "TASK_ID TASK_OWNER TASK_SOURCE GIT_URL GIT_SHA".split()}) From bd6762314ff84d0bd83585a3e17a8e8909df75c0 Mon Sep 17 00:00:00 2001 From: Simon Sapin Date: Tue, 9 Oct 2018 13:58:29 +0200 Subject: [PATCH 25/25] decisionlib docs --- etc/taskcluster/decisionlib.py | 163 +++++++++++++++++++++++++++++++++ 1 file changed, 163 insertions(+) diff --git a/etc/taskcluster/decisionlib.py b/etc/taskcluster/decisionlib.py index bdd5f1aebb8..34902dc9bfc 100644 --- a/etc/taskcluster/decisionlib.py +++ b/etc/taskcluster/decisionlib.py @@ -97,6 +97,14 @@ def update_attr(self, attr, **kwargs): getattr(self, attr).update(kwargs) class Task: + """ + A task definition, waiting to be created. + + Typical is to use chain the `with_*` methods to set or extend this object’s attributes, + then call the `crate` or `find_or_create` method to schedule a task. + + This is an abstract class that needs to be specialized for different worker implementations. + """ def __init__(self, name): self.name = name self.description = "" @@ -111,6 +119,7 @@ class Task: self.routes = [] self.extra = {} + # All `with_*` methods return `self`, so multiple method calls can be chained. with_description = chaining(setattr, "description") with_scheduler_id = chaining(setattr, "scheduler_id") with_provisioner_id = chaining(setattr, "provisioner_id") @@ -126,9 +135,21 @@ class Task: with_extra = chaining(update_attr, "extra") def build_worker_payload(self): # pragma: no cover + """ + Overridden by sub-classes to return a dictionary in a worker-specific format, + which is used as the `payload` property in a task definition request + passed to the Queue’s `createTask` API. + + + """ raise NotImplementedError def create(self): + """ + Call the Queue’s `createTask` API to schedule a new task, and return its ID. + + + """ worker_payload = self.build_worker_payload() assert CONFIG.decision_task_id @@ -171,6 +192,18 @@ class Task: return task_id def find_or_create(self, index_path=None): + """ + Try to find a task in the Index and return its ID. + + The index path used is `{CONFIG.index_prefix}.{index_path}`. + `index_path` defaults to `by-task-definition.{sha256}` + with a hash of the worker payload and worker type. + + If no task is found in the index, + it is created with a route to add it to the index at that same path if it succeeds. + + + """ if not index_path: worker_type = self.worker_type index_by = json.dumps([worker_type, self.build_worker_payload()]).encode("utf-8") @@ -194,6 +227,13 @@ class Task: class GenericWorkerTask(Task): + """ + Task definition for a worker type that runs the `generic-worker` implementation. + + This is an abstract class that needs to be specialized for different operating systems. + + + """ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.max_run_time_minutes = 30 @@ -206,9 +246,18 @@ class GenericWorkerTask(Task): with_env = chaining(update_attr, "env") def build_command(self): # pragma: no cover + """ + Overridden by sub-classes to return the `command` property of the worker payload, + in the format appropriate for the operating system. + """ raise NotImplementedError def build_worker_payload(self): + """ + Return a `generic-worker` worker payload. + + + """ worker_payload = { "command": self.build_command(), "maxRunTime": self.max_run_time_minutes * 60 @@ -229,6 +278,14 @@ class GenericWorkerTask(Task): ) def with_artifacts(self, *paths, type="file"): + """ + Add each path in `paths` as a task artifact + that expires in `self.index_and_artifacts_expire_in`. + + `type` can be `"file"` or `"directory"`. + + Paths are relative to the task’s home directory. + """ self.artifacts.extend((type, path) for path in paths) return self @@ -242,12 +299,35 @@ class GenericWorkerTask(Task): return content def with_file_mount(self, url_or_artifact_name, task_id=None, sha256=None, path=None): + """ + Make `generic-worker` download a file before the task starts + and make it available at `path` (which is relative to the task’s home directory). + + If `sha256` is provided, `generic-worker` will hash the downloaded file + and check it against the provided signature. + + If `task_id` is provided, this task will depend on that task + and `url_or_artifact_name` is the name of an artifact of that task. + """ return self.with_mounts({ "file": path or url_basename(url_or_artifact_name), "content": self._mount_content(url_or_artifact_name, task_id, sha256), }) def with_directory_mount(self, url_or_artifact_name, task_id=None, sha256=None, path=None): + """ + Make `generic-worker` download an archive before the task starts, + and uncompress it at `path` (which is relative to the task’s home directory). + + `url_or_artifact_name` must end in one of `.rar`, `.tar.bz2`, `.tar.gz`, or `.zip`. + The archive must be in the corresponding format. + + If `sha256` is provided, `generic-worker` will hash the downloaded archive + and check it against the provided signature. + + If `task_id` is provided, this task will depend on that task + and `url_or_artifact_name` is the name of an artifact of that task. + """ supported_formats = ["rar", "tar.bz2", "tar.gz", "zip"] for fmt in supported_formats: suffix = "." + fmt @@ -264,6 +344,11 @@ class GenericWorkerTask(Task): class WindowsGenericWorkerTask(GenericWorkerTask): + """ + Task definition for a `generic-worker` task running on Windows. + + Scripts are written as `.bat` files executed with `cmd.exe`. + """ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.scripts = [] @@ -275,11 +360,24 @@ class WindowsGenericWorkerTask(GenericWorkerTask): return [deindent(s) for s in self.scripts] def with_path_from_homedir(self, *paths): + """ + Interpret each path in `paths` as relative to the task’s home directory, + and add it to the `PATH` environment variable. + """ for p in paths: self.with_early_script("set PATH=%HOMEDRIVE%%HOMEPATH%\\{};%PATH%".format(p)) return self def with_repo(self, sparse_checkout=None): + """ + Make a shallow clone the git repository at the start of the task. + This uses `CONFIG.git_url`, `CONFIG.git_ref`, and `CONFIG.git_sha`, + and creates the clone in a `repo` directory in the task’s home directory. + + If `sparse_checkout` is given, it must be a list of path patterns + to be used in `.git/info/sparse-checkout`. + See . + """ git = """ git init repo cd repo @@ -303,6 +401,11 @@ class WindowsGenericWorkerTask(GenericWorkerTask): .with_env(**git_env()) def with_git(self): + """ + Make the task download `git-for-windows` and make it available for `git` commands. + + This is implied by `with_repo`. + """ return self \ .with_path_from_homedir("git\\cmd") \ .with_directory_mount( @@ -313,6 +416,10 @@ class WindowsGenericWorkerTask(GenericWorkerTask): ) def with_rustup(self): + """ + Download rustup.rs and make it available to task commands, + but does not download any default toolchain. + """ return self \ .with_path_from_homedir(".cargo\\bin") \ .with_early_script( @@ -325,6 +432,17 @@ class WindowsGenericWorkerTask(GenericWorkerTask): ) def with_repacked_msi(self, url, sha256, path): + """ + Download an MSI file from `url`, extract the files in it with `lessmsi`, + and make them available in the directory at `path` (relative to the task’s home directory). + + `sha256` is required and the MSI file must have that hash. + + The file extraction (and recompression in a ZIP file) is done in a separate task, + wich is indexed based on `sha256` and cached for `CONFIG.repacked_msi_files_expire_in`. + + + """ repack_task = ( WindowsGenericWorkerTask("MSI repack: " + url) .with_worker_type(self.worker_type) @@ -356,6 +474,14 @@ class WindowsGenericWorkerTask(GenericWorkerTask): .with_directory_mount("public/repacked.zip", task_id=repack_task, path=path) def with_python2(self): + """ + Make Python 2, pip, and virtualenv accessible to the task’s commands. + + For Python 3, use `with_directory_mount` and the "embeddable zip file" distribution + from python.org. + You may need to remove `python37._pth` from the ZIP in order to work around + . + """ return self \ .with_repacked_msi( "https://www.python.org/ftp/python/2.7.15/python-2.7.15.amd64.msi", @@ -371,6 +497,13 @@ class WindowsGenericWorkerTask(GenericWorkerTask): class DockerWorkerTask(Task): + """ + Task definition for a worker type that runs the `generic-worker` implementation. + + Scripts are interpreted with `bash`. + + + """ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.docker_image = "ubuntu:bionic-20180821" @@ -390,6 +523,11 @@ class DockerWorkerTask(Task): with_env = chaining(update_attr, "env") def build_worker_payload(self): + """ + Return a `docker-worker` worker payload. + + + """ worker_payload = { "image": self.docker_image, "maxRunTime": self.max_run_time_minutes * 60, @@ -414,10 +552,23 @@ class DockerWorkerTask(Task): ) def with_features(self, *names): + """ + Enable the give `docker-worker` features. + + + """ self.features.update({name: True for name in names}) return self def with_repo(self): + """ + Make a shallow clone the git repository at the start of the task. + This uses `CONFIG.git_url`, `CONFIG.git_ref`, and `CONFIG.git_sha`, + and creates the clone in a `/repo` directory + at the root of the Docker container’s filesystem. + + `git` and `ca-certificate` need to be installed in the Docker image. + """ return self \ .with_env(**git_env()) \ .with_early_script(""" @@ -428,6 +579,18 @@ class DockerWorkerTask(Task): """) def with_dockerfile(self, dockerfile): + """ + Build a Docker image based on the given `Dockerfile`, and use it for this task. + + `dockerfile` is a path in the filesystem where this code is running. + Some non-standard syntax is supported, see `expand_dockerfile`. + + The image is indexed based on a hash of the expanded `Dockerfile`, + and cached for `CONFIG.docker_images_expire_in`. + + Images are built without any *context*. + + """ basename = os.path.basename(dockerfile) suffix = ".dockerfile" assert basename.endswith(suffix)