From 0cd734fd7a8f531f721914f990540e31485a8e2a Mon Sep 17 00:00:00 2001 From: Simon Sapin Date: Thu, 19 Mar 2020 17:30:29 +0100 Subject: [PATCH 1/4] Remove unused task indexing by task definition hash --- etc/taskcluster/decision_task.py | 1 - etc/taskcluster/decisionlib.py | 7 +------ 2 files changed, 1 insertion(+), 7 deletions(-) diff --git a/etc/taskcluster/decision_task.py b/etc/taskcluster/decision_task.py index 7a7784292bb..bdd10e643d3 100644 --- a/etc/taskcluster/decision_task.py +++ b/etc/taskcluster/decision_task.py @@ -105,7 +105,6 @@ def mocked_only(): windows_release() magicleap_dev() magicleap_nightly() - decisionlib.DockerWorkerTask("Indexed by task definition").find_or_create() ping_on_daily_task_failure = "SimonSapin, nox, emilio" diff --git a/etc/taskcluster/decisionlib.py b/etc/taskcluster/decisionlib.py index ecbdc54cfe8..2ac0d4569b4 100644 --- a/etc/taskcluster/decisionlib.py +++ b/etc/taskcluster/decisionlib.py @@ -279,7 +279,7 @@ class Task: print("Found task %s indexed at %s" % (task_id, full_index_path)) return task_id - def find_or_create(self, index_path=None): + def find_or_create(self, index_path): """ Try to find a task in the Index and return its ID. @@ -292,11 +292,6 @@ class Task: """ - if not index_path: - worker_type = self.worker_type - index_by = json.dumps([worker_type, self.build_worker_payload()]).encode("utf-8") - index_path = "by-task-definition." + hashlib.sha256(index_by).hexdigest() - task_id = SHARED.found_or_created_indexed_tasks.get(index_path) if task_id is not None: return task_id From 0c10e3ab8e5afeaa40ff23d1a230598a86a9ce55 Mon Sep 17 00:00:00 2001 From: Simon Sapin Date: Thu, 19 Mar 2020 17:34:10 +0100 Subject: [PATCH 2/4] Remove dead code for disabled CI tasks --- etc/taskcluster/decision_task.py | 81 -------------------------------- etc/taskcluster/mock.py | 3 -- 2 files changed, 84 deletions(-) diff --git a/etc/taskcluster/decision_task.py b/etc/taskcluster/decision_task.py index bdd10e643d3..b3eb6cede4d 100644 --- a/etc/taskcluster/decision_task.py +++ b/etc/taskcluster/decision_task.py @@ -27,10 +27,6 @@ def tasks(task_for): # https://github.com/taskcluster/taskcluster/blob/21f257dc8/services/github/config.yml#L14 CONFIG.routes_for_all_subtasks.append("statuses") - # The magicleap build is broken until there's a surfman back end - magicleap_dev = lambda: None - magicleap_nightly = lambda: None - if task_for == "github-push": all_tests = [ linux_tidy_unit, @@ -39,7 +35,6 @@ def tasks(task_for): windows_arm64, windows_uwp_x64, macos_unit, - magicleap_dev, linux_wpt, linux_wpt_layout_2020, linux_release, @@ -61,7 +56,6 @@ def tasks(task_for): "try-mac": [macos_unit], "try-linux": [linux_tidy_unit, linux_docs_check, linux_release], "try-windows": [windows_unit, windows_arm64, windows_uwp_x64], - "try-magicleap": [magicleap_dev], "try-arm": [windows_arm64], "try-wpt": [linux_wpt], "try-wpt-2020": [linux_wpt_layout_2020], @@ -95,18 +89,9 @@ def tasks(task_for): windows_nightly() macos_nightly() update_wpt() - magicleap_nightly() uwp_nightly() -# These are disabled in a "real" decision task, -# but should still run when testing this Python code. (See `mock.py`.) -def mocked_only(): - windows_release() - magicleap_dev() - magicleap_nightly() - - ping_on_daily_task_failure = "SimonSapin, nox, emilio" build_artifacts_expire_in = "1 week" build_dependencies_artifacts_expire_in = "1 month" @@ -389,18 +374,6 @@ def windows_unit(cached=True): return task.create() -def windows_release(): - return ( - windows_build_task("Release build") - .with_treeherder("Windows x64", "Release") - .with_script("mach build --release", - "mach package --release") - .with_artifacts("repo/target/release/msi/Servo.exe", - "repo/target/release/msi/Servo.zip") - .find_or_create("build.windows_x64_release." + CONFIG.task_id()) - ) - - def windows_nightly(): return ( windows_build_task("Nightly build and upload") @@ -877,60 +850,6 @@ def macos_build_task(name): ) -def magicleap_build_task(name, build_type): - return ( - macos_build_task(name) - .with_treeherder("MagicLeap aarch64", build_type) - .with_directory_mount( - "https://servo-deps.s3.amazonaws.com/magicleap/macos-sdk-v0.20.0%2Bndk19c.tar.gz", - sha256="d5890cc7612694d79e60247a5d5fe4d8bdeb797095f098d56f3069be33426781", - path="magicleap" - ) - .with_directory_mount( - "https://servo-deps.s3.amazonaws.com/magicleap/ServoCICert-expires-2020-08-25.zip", - sha256="33f9d07b89c206e671f6a5020e52265b131e83aede8fa474be323a8e3345d760", - path="magicleap" - ) - # Early script in order to run with the initial $PWD - .with_early_script(""" - export MAGICLEAP_SDK="$PWD/magicleap/v0.20.0+ndk19c" - export MLCERT="$PWD/magicleap/servocimlcert.cert" - """) - .with_script(""" - unset OPENSSL_INCLUDE_DIR - unset OPENSSL_LIB_DIR - export HOST_CC=$(brew --prefix llvm)/bin/clang - export HOST_CXX=$(brew --prefix llvm)/bin/clang++ - """) - ) - - -def magicleap_dev(): - return ( - magicleap_build_task("Dev build", "Dev") - .with_script(""" - ./mach build --magicleap --dev - env -u DYLD_LIBRARY_PATH ./mach package --magicleap --dev - """) - .find_or_create("build.magicleap_dev." + CONFIG.task_id()) - ) - - -def magicleap_nightly(): - return ( - magicleap_build_task("Nightly build and upload", "Release") - .with_features("taskclusterProxy") - .with_scopes("secrets:get:project/servo/s3-upload-credentials") - .with_script(""" - ./mach build --magicleap --release - env -u DYLD_LIBRARY_PATH ./mach package --magicleap --release - ./mach upload-nightly magicleap --secret-from-taskcluster - """) - .with_artifacts("repo/target/magicleap/aarch64-linux-android/release/Servo.mpk") - .find_or_create("build.magicleap_nightly." + CONFIG.task_id()) - ) - - CONFIG.task_name_template = "Servo: %s" CONFIG.docker_images_expire_in = build_dependencies_artifacts_expire_in CONFIG.repacked_msi_files_expire_in = build_dependencies_artifacts_expire_in diff --git a/etc/taskcluster/mock.py b/etc/taskcluster/mock.py index cbc495ee5b5..f06f7bcb147 100755 --- a/etc/taskcluster/mock.py +++ b/etc/taskcluster/mock.py @@ -56,9 +56,6 @@ decision_task.main("github-push") print("\n# Push with hot caches:") decision_task.main("github-push") -print("\n# Mocked only:") -decision_task.mocked_only() - print("\n# Push to master:") decision_task.CONFIG.git_ref = "refs/heads/master" decision_task.main("github-push") From 8fff3e206f0f8a5bb59fc9d96fed48cfa17135f1 Mon Sep 17 00:00:00 2001 From: Simon Sapin Date: Thu, 19 Mar 2020 17:39:08 +0100 Subject: [PATCH 3/4] Remove indexing of daily decision tasks by date Treeherder does this job better --- etc/taskcluster/decision_task.py | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/etc/taskcluster/decision_task.py b/etc/taskcluster/decision_task.py index b3eb6cede4d..0d3a4fbe174 100644 --- a/etc/taskcluster/decision_task.py +++ b/etc/taskcluster/decision_task.py @@ -664,19 +664,6 @@ def wpt_chunks(platform, make_chunk_task, build_task, total_chunks, processes, def daily_tasks_setup(): - # ':' is not accepted in an index namepspace: - # https://docs.taskcluster.net/docs/reference/core/taskcluster-index/references/api - now = SHARED.now.strftime("%Y-%m-%d_%H-%M-%S") - index_path = "%s.daily.%s" % (CONFIG.index_prefix, now) - # Index this task manually rather than with a route, - # so that it is indexed even if it fails. - SHARED.index_service.insertTask(index_path, { - "taskId": CONFIG.decision_task_id, - "rank": 0, - "data": {}, - "expires": SHARED.from_now_json(log_artifacts_expire_in), - }) - # Unlike when reacting to a GitHub push event, # the commit hash is not known until we clone the repository. CONFIG.git_sha_is_current_head() From fa625a73882abbbf6f521a9d19eb84217dc99f68 Mon Sep 17 00:00:00 2001 From: Simon Sapin Date: Thu, 19 Mar 2020 18:26:49 +0100 Subject: [PATCH 4/4] Index tasks by git tree hash instead of parent commits hashes --- etc/taskcluster/decisionlib.py | 23 ++++++----------------- 1 file changed, 6 insertions(+), 17 deletions(-) diff --git a/etc/taskcluster/decisionlib.py b/etc/taskcluster/decisionlib.py index 2ac0d4569b4..34cc634e5c2 100644 --- a/etc/taskcluster/decisionlib.py +++ b/etc/taskcluster/decisionlib.py @@ -65,23 +65,12 @@ class Config: def task_id(self): - if hasattr(self, "_task_id"): - return self._task_id - # If the head commit is a merge, we want to generate a unique task id which incorporates - # the merge parents rather that the actual sha of the merge commit. This ensures that tasks - # can be reused if the tree is in an identical state. Otherwise, if the head commit is - # not a merge, we can rely on the head commit sha for that purpose. - raw_commit = subprocess.check_output(["git", "cat-file", "commit", "HEAD"]) - parent_commits = [ - value.decode("utf8") - for line in raw_commit.split(b"\n") - for key, _, value in [line.partition(b" ")] - if key == b"parent" - ] - if len(parent_commits) > 1: - self._task_id = "-".join(parent_commits) # pragma: no cover - else: - self._task_id = self.git_sha # pragma: no cover + if not hasattr(self, "_task_id"): + # Use the SHA-1 hash of the git "tree" object rather than the commit. + # A `@bors-servo retry` command creates a new merge commit with a different commit hash + # but with the same tree hash. + output = subprocess.check_output(["git", "show", "-s", "--format=%T", "HEAD"]) + self._task_id = output.decode("utf-8").strip() return self._task_id def git_sha_is_current_head(self):