Simon Sapin 2018-09-21 12:38:10 +02:00
commit 17a0061219
7 changed files with 529 additions and 0 deletions

View file

@ -0,0 +1,50 @@
version: 1
policy:
# https://docs.taskcluster.net/docs/reference/integrations/taskcluster-github/docs/taskcluster-yml-v1#pull-requests
pullRequests: collaborators
tasks:
- $if: 'tasks_for == "github-push"'
then:
$if: 'event.ref == "refs/heads/master"'
then:
taskGroupId: {$eval: as_slugid("decision_task")}
taskId: {$eval: as_slugid("decision_task")}
provisionerId: aws-provisioner-v1
workerType: servo-docker-worker
created: {$fromNow: ''}
deadline: {$fromNow: '1 day'}
metadata:
name: "Taskcluster experiments for Servo: decision task"
description: ""
owner: &task_owner ${event.pusher.name}@users.noreply.github.com
source: &task_source ${event.compare}
scopes:
- "queue:scheduler-id:taskcluster-github"
# Granted to role "repo:github.com/servo/servo-taskcluster-experiments:branch:master"
- "queue:create-task:highest:aws-provisioner-v1/servo-*"
- "docker-worker:cache:cargo-*"
- "queue:route:index.project.servo.servo-taskcluster-experiments.*"
payload:
maxRunTime: {$eval: '20 * 60'}
# https://github.com/servo/taskcluster-bootstrap-docker-images#decision-task
image: "servobrowser/taskcluster-bootstrap:decision-task@sha256:28045b7ec0485ef363f8cb14f194008b47e9ede99f2ea40a1e945e921fce976e"
features:
taskclusterProxy: true
env:
GIT_URL: ${event.repository.clone_url}
GIT_REF: ${event.ref}
GIT_SHA: ${event.after}
TASK_OWNER: *task_owner
TASK_SOURCE: *task_source
command:
- /bin/bash
- '--login'
- '-c'
- >-
git init repo &&
cd repo &&
git fetch --depth 1 "$GIT_URL" "$GIT_REF" &&
git reset --hard "$GIT_SHA" &&
python3 decision-task.py

View file

@ -0,0 +1,13 @@
#!/bin/sh
task_id="$1"
artifact="$2"
shift 2
url="https://queue.taskcluster.net/v1/task/${task_id}/artifacts/${artifact}"
echo "Fetching $url" >&2
curl \
--retry 5 \
--connect-timeout 10 \
--location \
--fail \
"$url" \
"$@"

View file

@ -0,0 +1,164 @@
# coding: utf8
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import os.path
from decisionlib import DecisionTask
def main():
decision = DecisionTask(
project_name="Servo", # Used in task names
route_prefix="project.servo.servo",
worker_type="servo-docker-worker",
)
# FIXME: remove this before merging in servo/servo
os.environ["GIT_URL"] = "https://github.com/SimonSapin/servo"
os.environ["GIT_REF"] = "refs/heads/taskcluster-experiments-20180920"
os.environ["GIT_SHA"] = "a6dbfdd29f9b3f0ce0c13adc79fad99538a9a44b"
decision.docker_image_cache_expiry = "1 week"
decision.route_prefix = "project.servo.servo-taskcluster-experiments"
# ~
build_artifacts_expiry = "1 week"
log_artifacts_expiry = "1 year"
# https://docs.taskcluster.net/docs/reference/workers/docker-worker/docs/caches
cache_scopes = [
"docker-worker:cache:cargo-*",
]
build_caches = {
"cargo-registry-cache": "/root/.cargo/registry",
"cargo-git-cache": "/root/.cargo/git",
"cargo-rustup": "/root/.rustup",
"cargo-sccache": "/root/.cache/sccache",
}
build_env = {
"RUST_BACKTRACE": "1",
"RUSTFLAGS": "-Dwarnings",
"CARGO_INCREMENTAL": "0",
"SCCACHE_IDLE_TIMEOUT": "1200",
"CCACHE": "sccache",
"RUSTC_WRAPPER": "sccache",
}
build_kwargs = {
"max_run_time_minutes": 60,
"dockerfile": dockerfile_path("build"),
"env": build_env,
"scopes": cache_scopes,
"cache": build_caches,
}
decision.create_task(
task_name="Linux x86_64: tidy + dev build + unit tests",
script="""
./mach test-tidy --no-progress --all
./mach build --dev
./mach test-unit
./mach package --dev
./mach test-tidy --no-progress --self-test
python2.7 ./etc/memory_reports_over_time.py --test
./etc/ci/lockfile_changed.sh
./etc/ci/check_no_panic.sh
""",
**build_kwargs
)
release_build_task = decision.find_or_create_task(
route_bucket="build.linux_x86-64_release",
route_key=os.environ["GIT_SHA"],
route_expiry=build_artifacts_expiry,
task_name="Linux x86_64: release build",
script="""
./mach build --release --with-debug-assertions -p servo
./etc/ci/lockfile_changed.sh
tar -czf /target.tar.gz \
target/release/servo \
target/release/build/osmesa-src-*/output \
target/release/build/osmesa-src-*/out/lib/gallium
""",
artifacts=[
"/target.tar.gz",
],
**build_kwargs
)
def create_run_task(*, script, env=None, **kwargs):
fetch_build = """
curl \
"https://queue.taskcluster.net/v1/task/${BUILD_TASK_ID}/artifacts/public/target.tar.gz" \
--retry 5 \
--connect-timeout 10 \
--location \
--fail \
| tar -xz
"""
kwargs.setdefault("artifacts", []).extend(
("/repo/" + word, log_artifacts_expiry)
for word in script.split() if word.endswith(".log")
)
decision.create_task(
script=fetch_build + script,
env=dict(**env or {}, BUILD_TASK_ID=release_build_task),
dependencies=[release_build_task],
max_run_time_minutes=60,
dockerfile=dockerfile_path("run"),
**kwargs
)
total_chunks = 2
for i in range(total_chunks):
chunk = i + 1
if chunk == 1:
name_extra = " + extra"
script_extra = """
./mach test-wpt-failure
./mach test-wpt --release --binary-arg=--multiprocess --processes 24 \
--log-raw test-wpt-mp.log \
--log-errorsummary wpt-mp-errorsummary.log \
eventsource
"""
else:
name_extra = ""
script_extra = ""
script = """
./mach test-wpt \
--release \
--processes 24 \
--total-chunks "$TOTAL_CHUNKS" \
--this-chunk "$THIS_CHUNK" \
--log-raw test-wpt.log \
--log-errorsummary wpt-errorsummary.log \
--always-succeed
./mach filter-intermittents\
wpt-errorsummary.log \
--log-intermittents intermittents.log \
--log-filteredsummary filtered-wpt-errorsummary.log \
--tracker-api default
"""
# FIXME: --reporter-api default
# IndexError: list index out of range
# File "/repo/python/servo/testing_commands.py", line 533, in filter_intermittents
# pull_request = int(last_merge.split(' ')[4][1:])
create_run_task(
task_name="Linux x86_64: WPT chunk %s / %s%s" % (chunk, total_chunks, name_extra),
script=script_extra + script,
env={
"TOTAL_CHUNKS": total_chunks,
"THIS_CHUNK": chunk,
},
)
def dockerfile_path(name):
return os.path.join(os.path.dirname(__file__), "docker", name + ".dockerfile")
if __name__ == "__main__":
main()

View file

@ -0,0 +1,227 @@
# coding: utf8
# Copyright 2018 The Servo Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
"""
Project-independent library for Taskcluster decision tasks
"""
import datetime
import hashlib
import json
import os
import re
import sys
import taskcluster
class DecisionTask:
DOCKER_IMAGE_ARTIFACT_FILENAME = "image.tar.lz4"
# https://github.com/servo/taskcluster-bootstrap-docker-images#image-builder
DOCKER_IMAGE_BUILDER_IMAGE = "servobrowser/taskcluster-bootstrap:image-builder@sha256:" \
"0a7d012ce444d62ffb9e7f06f0c52fedc24b68c2060711b313263367f7272d9d"
def __init__(self, project_name, *, route_prefix,
worker_type="github-worker", docker_image_cache_expiry="1 year"):
self.project_name = project_name
self.route_prefix = route_prefix
self.worker_type = worker_type
self.docker_image_cache_expiry = docker_image_cache_expiry
# https://docs.taskcluster.net/docs/reference/workers/docker-worker/docs/features#feature-taskclusterproxy
self.queue_service = taskcluster.Queue(options={"baseUrl": "http://taskcluster/queue/v1/"})
self.index_service = taskcluster.Index(options={"baseUrl": "http://taskcluster/index/v1/"})
self.now = datetime.datetime.utcnow()
self.found_or_created_routes = {}
def from_now_json(self, offset):
return taskcluster.stringDate(taskcluster.fromNow(offset, dateObj=self.now))
def find_or_create_task(self, *, route_bucket, route_key, route_expiry, artifacts, **kwargs):
route = "%s.%s.%s" % (self.route_prefix, route_bucket, route_key)
task_id = self.found_or_created_routes.get(route)
if task_id is not None:
return task_id
try:
result = self.index_service.findTask(route)
task_id = result["taskId"]
except taskcluster.TaskclusterRestFailure as e:
if e.status_code == 404:
task_id = self.create_task(
routes=[
"index." + route,
],
extra={
"index": {
"expires": self.from_now_json(self.docker_image_cache_expiry),
},
},
artifacts=[
(artifact, route_expiry)
for artifact in artifacts
],
**kwargs
)
else:
raise
self.found_or_created_routes[route] = task_id
return task_id
def find_or_build_docker_image(self, dockerfile):
dockerfile_contents = expand_dockerfile(dockerfile)
digest = hashlib.sha256(dockerfile_contents).hexdigest()
return self.find_or_create_task(
route_bucket="docker-image",
route_key=digest,
route_expiry=self.docker_image_cache_expiry,
task_name="Docker image: " + image_name(dockerfile),
script="""
echo "$DOCKERFILE" | docker build -t taskcluster-built -
docker save taskcluster-built | lz4 > /%s
""" % self.DOCKER_IMAGE_ARTIFACT_FILENAME,
env={
"DOCKERFILE": dockerfile_contents,
},
artifacts=[
"/" + self.DOCKER_IMAGE_ARTIFACT_FILENAME,
],
max_run_time_minutes=20,
docker_image=self.DOCKER_IMAGE_BUILDER_IMAGE,
features={
"dind": True, # docker-in-docker
},
with_repo=False,
)
def create_task(self, *, task_name, script, max_run_time_minutes,
docker_image=None, dockerfile=None, # One of these is required
artifacts=None, dependencies=None, env=None, cache=None, scopes=None,
routes=None, extra=None, features=None,
with_repo=True):
if docker_image and dockerfile:
raise TypeError("cannot use both `docker_image` or `dockerfile`")
if not docker_image and not dockerfile:
raise TypeError("need one of `docker_image` or `dockerfile`")
# https://docs.taskcluster.net/docs/reference/workers/docker-worker/docs/environment
decision_task_id = os.environ["TASK_ID"]
dependencies = [decision_task_id] + (dependencies or [])
if dockerfile:
image_build_task = self.find_or_build_docker_image(dockerfile)
dependencies.append(image_build_task)
docker_image = {
"type": "task-image",
"taskId": image_build_task,
"path": "public/" + self.DOCKER_IMAGE_ARTIFACT_FILENAME,
}
# Set in .taskcluster.yml
task_owner = os.environ["TASK_OWNER"]
task_source = os.environ["TASK_SOURCE"]
env = env or {}
if with_repo:
# Set in .taskcluster.yml
for k in ["GIT_URL", "GIT_REF", "GIT_SHA"]:
env[k] = os.environ[k]
script = """
git init repo
cd repo
git fetch --depth 1 "$GIT_URL" "$GIT_REF"
git reset --hard "$GIT_SHA"
""" + script
payload = {
"taskGroupId": decision_task_id,
"dependencies": dependencies or [],
"schedulerId": "taskcluster-github",
"provisionerId": "aws-provisioner-v1",
"workerType": self.worker_type,
"created": self.from_now_json(""),
"deadline": self.from_now_json("1 day"),
"metadata": {
"name": "%s: %s" % (self.project_name, task_name),
"description": "",
"owner": task_owner,
"source": task_source,
},
"scopes": scopes or [],
"routes": routes or [],
"extra": extra or {},
"payload": {
"cache": cache or {},
"maxRunTime": max_run_time_minutes * 60,
"image": docker_image,
"command": [
"/bin/bash",
"--login",
"-x",
"-e",
"-c",
deindent(script)
],
"env": env,
"artifacts": {
"public/" + os.path.basename(path): {
"type": "file",
"path": path,
"expires": self.from_now_json(expires),
}
for path, expires in artifacts or []
},
"features": features or {},
},
}
task_id = taskcluster.slugId().decode("utf8")
self.queue_service.createTask(task_id, payload)
print("Scheduled %s: %s" % (task_name, task_id))
return task_id
def image_name(dockerfile):
basename = os.path.basename(dockerfile)
suffix = ".dockerfile"
if basename == "Dockerfile":
return os.path.basename(os.path.dirname(os.path.abspath(dockerfile)))
elif basename.endswith(suffix):
return basename[:-len(suffix)]
else:
return basename
def expand_dockerfile(dockerfile):
with open(dockerfile, "rb") as f:
dockerfile_contents = f.read()
include_marker = b"% include"
if not dockerfile_contents.startswith(include_marker):
return dockerfile_contents
include_line, _, rest = dockerfile_contents.partition(b"\n")
included = include_line[len(include_marker):].strip().decode("utf8")
path = os.path.join(os.path.dirname(dockerfile), included)
return b"\n".join([expand_dockerfile(path), rest])
def deindent(string):
return re.sub("\n +", " \n ", string)

View file

@ -0,0 +1,23 @@
FROM ubuntu:bionic-20180821
ENV \
#
# Some APT packages like 'tzdata' wait for user input on install by default.
# https://stackoverflow.com/questions/44331836/apt-get-install-tzdata-noninteractive
DEBIAN_FRONTEND=noninteractive
RUN \
apt-get update -q && \
apt-get install -qy --no-install-recommends \
#
# Cloning the repository
git \
ca-certificates \
#
# Running mach
python2.7 \
virtualenv \
#
# Installing rustup and sccache (build dockerfile) or fetching build artifacts (run tasks)
curl

View file

@ -0,0 +1,43 @@
% include base.dockerfile
RUN \
apt-get install -qy --no-install-recommends \
#
# Multiple C/C++ dependencies built from source
g++ \
make \
cmake \
#
# ANGLE
xorg-dev \
#
# mozjs (SpiderMonkey)
autoconf2.13 \
#
# Bindgen (for SpiderMonkey bindings)
clang \
#
# GStreamer
libgstreamer-plugins-bad1.0-dev \
#
# OpenSSL
libssl1.0-dev \
#
# blurz
libdbus-1-dev \
#
# Skia
libglu1-mesa-dev \
libbz2-dev \
#
#
&& \
#
#
curl https://sh.rustup.rs -sSf | sh -s -- --default-toolchain none -y && \
#
#
curl -sSfL \
https://github.com/mozilla/sccache/releases/download/0.2.7/sccache-0.2.7-x86_64-unknown-linux-musl.tar.gz \
| tar -xz --strip-components=1 -C /usr/local/bin/ \
sccache-0.2.7-x86_64-unknown-linux-musl/sccache

View file

@ -0,0 +1,9 @@
% include base.dockerfile
# Servos runtime dependencies
RUN apt-get install -qy --no-install-recommends \
libgl1 \
libssl1.0.0 \
libdbus-1-3 \
libgstreamer-plugins-bad1.0-0 \
gstreamer1.0-plugins-good