Auto merge of #24811 - servo:fail-fast, r=jdm

Fix updating the GitHub Status as soon as any TC task fails

… rather than only when the entire task group is resolved. This allows Homu to more quickly be notified of a failure, and move on to the next PR in the queue sooner.

(Plus drive-by Brewfile fix.)
This commit is contained in:
bors-servo 2019-11-22 14:58:40 -05:00 committed by GitHub
commit ef192c6b36
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
8 changed files with 154 additions and 93 deletions

View file

@ -1,4 +1,8 @@
version: 1 version: 1
# If and when switching to `reporting: checks-v1` here, also change the `statuses` route to `checks`
# in `CONFIG.routes_for_all_subtasks` in `etc/taskcluster/decision_task.py`
policy: policy:
# https://docs.taskcluster.net/docs/reference/integrations/taskcluster-github/docs/taskcluster-yml-v1#pull-requests # https://docs.taskcluster.net/docs/reference/integrations/taskcluster-github/docs/taskcluster-yml-v1#pull-requests
pullRequests: public pullRequests: public
@ -27,6 +31,11 @@ tasks:
sha256:7471a998e4462638c8d3e2cf0b4a99c9a5c8ca9f2ec0ae01cc069473b35cde10" sha256:7471a998e4462638c8d3e2cf0b4a99c9a5c8ca9f2ec0ae01cc069473b35cde10"
features: features:
taskclusterProxy: true taskclusterProxy: true
artifacts:
public/repo.bundle:
type: file
path: /repo.bundle
expires: {$fromNow: '1 day'}
env: env:
GIT_URL: ${event.repository.clone_url} GIT_URL: ${event.repository.clone_url}
TASK_FOR: ${tasks_for} TASK_FOR: ${tasks_for}

View file

@ -10,12 +10,23 @@ from decisionlib import CONFIG, SHARED
def main(task_for): def main(task_for):
with decisionlib.make_repo_bundle():
tasks(task_for)
def tasks(task_for):
if CONFIG.git_ref.startswith("refs/heads/"): if CONFIG.git_ref.startswith("refs/heads/"):
branch = CONFIG.git_ref[len("refs/heads/"):] branch = CONFIG.git_ref[len("refs/heads/"):]
CONFIG.treeherder_repository_name = "servo-" + ( CONFIG.treeherder_repository_name = "servo-" + (
branch if not branch.startswith("try-") else "try" branch if not branch.startswith("try-") else "try"
) )
# Work around a tc-github bug/limitation:
# https://bugzilla.mozilla.org/show_bug.cgi?id=1548781#c4
if task_for.startswith("github"):
# https://github.com/taskcluster/taskcluster/blob/21f257dc8/services/github/config.yml#L14
CONFIG.routes_for_all_subtasks.append("statuses")
# The magicleap build is broken until there's a surfman back end # The magicleap build is broken until there's a surfman back end
magicleap_dev = lambda: None magicleap_dev = lambda: None
magicleap_nightly = lambda: None magicleap_nightly = lambda: None
@ -166,7 +177,7 @@ def linux_tidy_unit_untrusted():
.with_max_run_time_minutes(60) .with_max_run_time_minutes(60)
.with_dockerfile(dockerfile_path("build")) .with_dockerfile(dockerfile_path("build"))
.with_env(**build_env, **unix_build_env, **linux_build_env) .with_env(**build_env, **unix_build_env, **linux_build_env)
.with_repo() .with_repo_bundle()
.with_script("rustup set profile minimal") .with_script("rustup set profile minimal")
# required by components/script_plugins: # required by components/script_plugins:
.with_script("rustup component add rustc-dev") .with_script("rustup component add rustc-dev")
@ -378,7 +389,7 @@ def android_x86_release():
def android_x86_wpt(): def android_x86_wpt():
build_task = android_x86_release() build_task = android_x86_release()
return ( task = (
linux_task("WPT") linux_task("WPT")
.with_treeherder("Android x86") .with_treeherder("Android x86")
.with_provisioner_id("proj-servo") .with_provisioner_id("proj-servo")
@ -386,8 +397,13 @@ def android_x86_wpt():
.with_capabilities(privileged=True) .with_capabilities(privileged=True)
.with_scopes("project:servo:docker-worker-kvm:capability:privileged") .with_scopes("project:servo:docker-worker-kvm:capability:privileged")
.with_dockerfile(dockerfile_path("run-android-emulator")) .with_dockerfile(dockerfile_path("run-android-emulator"))
.with_repo() .with_repo_bundle()
.with_curl_artifact_script(build_task, "servoapp.apk", "target/android/i686-linux-android/release") )
apk_dir = "target/android/i686-linux-android/release"
return (
task
.with_script("mkdir -p " + apk_dir)
.with_curl_artifact_script(build_task, "servoapp.apk", apk_dir)
.with_script(""" .with_script("""
./mach bootstrap-android --accept-all-licences --emulator-x86 ./mach bootstrap-android --accept-all-licences --emulator-x86
./mach test-android-startup --release ./mach test-android-startup --release
@ -551,7 +567,7 @@ def linux_wpt():
.find_or_create("build.linux_x64_release_w_assertions" + CONFIG.task_id()) .find_or_create("build.linux_x64_release_w_assertions" + CONFIG.task_id())
) )
def linux_run_task(name): def linux_run_task(name):
return linux_task(name).with_dockerfile(dockerfile_path("run")) return linux_task(name).with_dockerfile(dockerfile_path("run")).with_repo_bundle()
wpt_chunks("Linux x64", linux_run_task, release_build_task, repo_dir="/repo", wpt_chunks("Linux x64", linux_run_task, release_build_task, repo_dir="/repo",
total_chunks=4, processes=12) total_chunks=4, processes=12)
@ -585,14 +601,14 @@ def update_wpt():
.with_scopes("secrets:get:project/servo/wpt-sync") .with_scopes("secrets:get:project/servo/wpt-sync")
.with_index_and_artifacts_expire_in(log_artifacts_expire_in) .with_index_and_artifacts_expire_in(log_artifacts_expire_in)
.with_max_run_time_minutes(6 * 60) .with_max_run_time_minutes(6 * 60)
# Not using the bundle, pushing the new changes to the git remote requires a full repo.
.with_repo(alternate_object_dir="/var/cache/servo.git/objects")
) )
return ( return (
with_homebrew(update_task, [ with_homebrew(update_task, [
"etc/taskcluster/macos/Brewfile-wpt", "etc/taskcluster/macos/Brewfile-wpt-update",
"etc/taskcluster/macos/Brewfile-gstreamer", "etc/taskcluster/macos/Brewfile",
]) ])
# Pushing the new changes to the git remote requires a full repo clone.
.with_repo(shallow=False, alternate_object_dir="/var/cache/servo.git/objects")
.with_curl_artifact_script(build_task, "target.tar.gz") .with_curl_artifact_script(build_task, "target.tar.gz")
.with_script(""" .with_script("""
export PKG_CONFIG_PATH="$(brew --prefix libffi)/lib/pkgconfig/" export PKG_CONFIG_PATH="$(brew --prefix libffi)/lib/pkgconfig/"
@ -628,21 +644,21 @@ def macos_wpt():
priority = "high" if CONFIG.git_ref == "refs/heads/auto" else None priority = "high" if CONFIG.git_ref == "refs/heads/auto" else None
build_task = macos_release_build_with_debug_assertions(priority=priority) build_task = macos_release_build_with_debug_assertions(priority=priority)
def macos_run_task(name): def macos_run_task(name):
task = macos_task(name).with_python2() task = macos_task(name).with_python2() \
return with_homebrew(task, ["etc/taskcluster/macos/Brewfile-gstreamer"]) .with_repo_bundle(alternate_object_dir="/var/cache/servo.git/objects")
return with_homebrew(task, ["etc/taskcluster/macos/Brewfile"])
wpt_chunks( wpt_chunks(
"macOS x64", "macOS x64",
macos_run_task, macos_run_task,
build_task, build_task,
repo_dir="repo", repo_dir="repo",
repo_kwargs=dict(alternate_object_dir="/var/cache/servo.git/objects"),
total_chunks=30, total_chunks=30,
processes=4, processes=4,
) )
def wpt_chunks(platform, make_chunk_task, build_task, total_chunks, processes, def wpt_chunks(platform, make_chunk_task, build_task, total_chunks, processes,
repo_dir, chunks="all", repo_kwargs={}): repo_dir, chunks="all"):
if chunks == "all": if chunks == "all":
chunks = range(total_chunks + 1) chunks = range(total_chunks + 1)
for this_chunk in chunks: for this_chunk in chunks:
@ -651,7 +667,6 @@ def wpt_chunks(platform, make_chunk_task, build_task, total_chunks, processes,
this_chunk, total_chunks, width=len(str(total_chunks)), this_chunk, total_chunks, width=len(str(total_chunks)),
)) ))
.with_treeherder(platform, "WPT-%s" % this_chunk) .with_treeherder(platform, "WPT-%s" % this_chunk)
.with_repo(**repo_kwargs)
.with_curl_artifact_script(build_task, "target.tar.gz") .with_curl_artifact_script(build_task, "target.tar.gz")
.with_script("tar -xzf target.tar.gz") .with_script("tar -xzf target.tar.gz")
.with_index_and_artifacts_expire_in(log_artifacts_expire_in) .with_index_and_artifacts_expire_in(log_artifacts_expire_in)
@ -798,7 +813,7 @@ def linux_build_task(name, *, build_env=build_env, install_rustc_dev=True):
.with_max_run_time_minutes(60) .with_max_run_time_minutes(60)
.with_dockerfile(dockerfile_path("build")) .with_dockerfile(dockerfile_path("build"))
.with_env(**build_env, **unix_build_env, **linux_build_env) .with_env(**build_env, **unix_build_env, **linux_build_env)
.with_repo() .with_repo_bundle()
.with_script("rustup set profile minimal") .with_script("rustup set profile minimal")
) )
if install_rustc_dev: if install_rustc_dev:
@ -841,7 +856,7 @@ def windows_build_task(name, package=True, arch="x86_64"):
**windows_build_env[arch], **windows_build_env[arch],
**windows_build_env["all"] **windows_build_env["all"]
) )
.with_repo(sparse_checkout=windows_sparse_checkout) .with_repo_bundle(sparse_checkout=windows_sparse_checkout)
.with_python2() .with_python2()
.with_directory_mount( .with_directory_mount(
"https://www.python.org/ftp/python/3.7.3/python-3.7.3-embed-amd64.zip", "https://www.python.org/ftp/python/3.7.3/python-3.7.3-embed-amd64.zip",
@ -894,7 +909,7 @@ def macos_build_task(name):
# https://github.com/servo/servo/issues/24735 # https://github.com/servo/servo/issues/24735
.with_max_run_time_minutes(60 * 2) .with_max_run_time_minutes(60 * 2)
.with_env(**build_env, **unix_build_env, **macos_build_env) .with_env(**build_env, **unix_build_env, **macos_build_env)
.with_repo(alternate_object_dir="/var/cache/servo.git/objects") .with_repo_bundle(alternate_object_dir="/var/cache/servo.git/objects")
.with_python2() .with_python2()
.with_rustup() .with_rustup()
# Since macOS workers are long-lived and ~/.rustup kept across tasks: # Since macOS workers are long-lived and ~/.rustup kept across tasks:
@ -910,7 +925,7 @@ def macos_build_task(name):
return ( return (
with_homebrew(build_task, [ with_homebrew(build_task, [
"etc/taskcluster/macos/Brewfile", "etc/taskcluster/macos/Brewfile",
"etc/taskcluster/macos/Brewfile-gstreamer", "etc/taskcluster/macos/Brewfile-build",
]) ])
.with_script(""" .with_script("""
export OPENSSL_INCLUDE_DIR="$(brew --prefix openssl)/include" export OPENSSL_INCLUDE_DIR="$(brew --prefix openssl)/include"

View file

@ -14,6 +14,7 @@ Project-independent library for Taskcluster decision tasks
""" """
import base64 import base64
import contextlib
import datetime import datetime
import hashlib import hashlib
import json import json
@ -28,6 +29,7 @@ import taskcluster
__all__ = [ __all__ = [
"CONFIG", "SHARED", "Task", "DockerWorkerTask", "CONFIG", "SHARED", "Task", "DockerWorkerTask",
"GenericWorkerTask", "WindowsGenericWorkerTask", "MacOsGenericWorkerTask", "GenericWorkerTask", "WindowsGenericWorkerTask", "MacOsGenericWorkerTask",
"make_repo_bundle",
] ]
@ -56,6 +58,7 @@ class Config:
self.git_url = os.environ.get("GIT_URL") self.git_url = os.environ.get("GIT_URL")
self.git_ref = os.environ.get("GIT_REF") self.git_ref = os.environ.get("GIT_REF")
self.git_sha = os.environ.get("GIT_SHA") self.git_sha = os.environ.get("GIT_SHA")
self.git_bundle_shallow_ref = "refs/heads/shallow"
self.tc_root_url = os.environ.get("TASKCLUSTER_ROOT_URL") self.tc_root_url = os.environ.get("TASKCLUSTER_ROOT_URL")
self.default_provisioner_id = "proj-example" self.default_provisioner_id = "proj-example"
@ -145,6 +148,9 @@ class Task:
self.extra = {} self.extra = {}
self.treeherder_required = False self.treeherder_required = False
self.priority = None # Defaults to 'lowest' self.priority = None # Defaults to 'lowest'
self.git_fetch_url = CONFIG.git_url
self.git_fetch_ref = CONFIG.git_ref
self.git_checkout_sha = CONFIG.git_sha
# All `with_*` methods return `self`, so multiple method calls can be chained. # All `with_*` methods return `self`, so multiple method calls can be chained.
with_description = chaining(setattr, "description") with_description = chaining(setattr, "description")
@ -221,9 +227,14 @@ class Task:
assert CONFIG.decision_task_id assert CONFIG.decision_task_id
assert CONFIG.task_owner assert CONFIG.task_owner
assert CONFIG.task_source assert CONFIG.task_source
def dedup(xs):
seen = set()
return [x for x in xs if not (x in seen or seen.add(x))]
queue_payload = { queue_payload = {
"taskGroupId": CONFIG.decision_task_id, "taskGroupId": CONFIG.decision_task_id,
"dependencies": [CONFIG.decision_task_id] + self.dependencies, "dependencies": dedup([CONFIG.decision_task_id] + self.dependencies),
"schedulerId": self.scheduler_id, "schedulerId": self.scheduler_id,
"provisionerId": self.provisioner_id, "provisionerId": self.provisioner_id,
"workerType": self.worker_type, "workerType": self.worker_type,
@ -299,6 +310,29 @@ class Task:
SHARED.found_or_created_indexed_tasks[index_path] = task_id SHARED.found_or_created_indexed_tasks[index_path] = task_id
return task_id return task_id
def with_curl_script(self, url, file_path):
return self \
.with_script("""
curl --retry 5 --connect-timeout 10 -Lf "%s" -o "%s"
""" % (url, file_path))
def with_curl_artifact_script(self, task_id, artifact_name, out_directory=""):
queue_service = CONFIG.tc_root_url + "/api/queue"
return self \
.with_dependencies(task_id) \
.with_curl_script(
queue_service + "/v1/task/%s/artifacts/public/%s" % (task_id, artifact_name),
os.path.join(out_directory, url_basename(artifact_name)),
)
def with_repo_bundle(self, **kwargs):
self.git_fetch_url = "../repo.bundle"
self.git_fetch_ref = CONFIG.git_bundle_shallow_ref
self.git_checkout_sha = "FETCH_HEAD"
return self \
.with_curl_artifact_script(CONFIG.decision_task_id, "repo.bundle") \
.with_repo(**kwargs)
class GenericWorkerTask(Task): class GenericWorkerTask(Task):
""" """
@ -453,9 +487,9 @@ class WindowsGenericWorkerTask(GenericWorkerTask):
self.with_early_script("set PATH=%HOMEDRIVE%%HOMEPATH%\\{};%PATH%".format(p)) self.with_early_script("set PATH=%HOMEDRIVE%%HOMEPATH%\\{};%PATH%".format(p))
return self return self
def with_repo(self, sparse_checkout=None, shallow=True): def with_repo(self, sparse_checkout=None):
""" """
Make a shallow clone the git repository at the start of the task. Make a clone the git repository at the start of the task.
This uses `CONFIG.git_url`, `CONFIG.git_ref`, and `CONFIG.git_sha`, This uses `CONFIG.git_url`, `CONFIG.git_ref`, and `CONFIG.git_sha`,
and creates the clone in a `repo` directory in the tasks home directory. and creates the clone in a `repo` directory in the tasks home directory.
@ -478,13 +512,16 @@ class WindowsGenericWorkerTask(GenericWorkerTask):
type .git\\info\\sparse-checkout type .git\\info\\sparse-checkout
""" """
git += """ git += """
git fetch --no-tags {depth} %GIT_URL% %GIT_REF% git fetch --no-tags {} {}
git reset --hard %GIT_SHA% git reset --hard {}
""".format(depth="--depth 30" if shallow else "") """.format(
assert_truthy(self.git_fetch_url),
assert_truthy(self.git_fetch_ref),
assert_truthy(self.git_checkout_sha),
)
return self \ return self \
.with_git() \ .with_git() \
.with_script(git) \ .with_script(git)
.with_env(**git_env())
def with_git(self): def with_git(self):
""" """
@ -501,6 +538,19 @@ class WindowsGenericWorkerTask(GenericWorkerTask):
path="git", path="git",
) )
def with_curl_script(self, url, file_path):
self.with_curl()
return super().with_curl_script(url, file_path)
def with_curl(self):
return self \
.with_path_from_homedir("curl\\curl-7.67.0-win64-mingw\\bin") \
.with_directory_mount(
"https://curl.haxx.se/windows/dl-7.67.0_4/curl-7.67.0_4-win64-mingw.zip",
sha256="1d50deeac7f945ed75149e6300f6d21f007a6b942ab851a119ed76cdef27d714",
path="curl",
)
def with_rustup(self): def with_rustup(self):
""" """
Download rustup.rs and make it available to task commands, Download rustup.rs and make it available to task commands,
@ -578,13 +628,9 @@ class WindowsGenericWorkerTask(GenericWorkerTask):
class UnixTaskMixin(Task): class UnixTaskMixin(Task):
def __init__(self, *args, **kwargs): def with_repo(self, alternate_object_dir=""):
super().__init__(*args, **kwargs)
self.curl_scripts_count = 0
def with_repo(self, shallow=True, alternate_object_dir=None):
""" """
Make a shallow clone the git repository at the start of the task. Make a clone the git repository at the start of the task.
This uses `CONFIG.git_url`, `CONFIG.git_ref`, and `CONFIG.git_sha` This uses `CONFIG.git_url`, `CONFIG.git_ref`, and `CONFIG.git_sha`
* generic-worker: creates the clone in a `repo` directory * generic-worker: creates the clone in a `repo` directory
@ -597,46 +643,20 @@ class UnixTaskMixin(Task):
""" """
# Not using $GIT_ALTERNATE_OBJECT_DIRECTORIES since it causes # Not using $GIT_ALTERNATE_OBJECT_DIRECTORIES since it causes
# "object not found - no match for id" errors when Cargo fetches git dependencies # "object not found - no match for id" errors when Cargo fetches git dependencies
if alternate_object_dir:
self.with_env(ALTERNATE_OBJDIR=alternate_object_dir)
return self \ return self \
.with_env(**git_env()) \ .with_script("""
.with_early_script("""
git init repo git init repo
cd repo cd repo
{alternate} echo "{alternate}" > .git/objects/info/alternates
time git fetch --no-tags {depth} "$GIT_URL" "$GIT_REF" time git fetch --no-tags {} {}
time git reset --hard "$GIT_SHA" time git reset --hard {}
""".format( """.format(
depth="--depth 30" if shallow else "", assert_truthy(self.git_fetch_url),
alternate=( assert_truthy(self.git_fetch_ref),
"""echo "$ALTERNATE_OBJDIR" > .git/objects/info/alternates""" assert_truthy(self.git_checkout_sha),
if alternate_object_dir else "" alternate=alternate_object_dir,
)
)) ))
def with_curl_script(self, url, file_path):
self.curl_scripts_count += 1
n = self.curl_scripts_count
return self \
.with_env(**{
"CURL_%s_URL" % n: url,
"CURL_%s_PATH" % n: file_path,
}) \
.with_script("""
mkdir -p $(dirname "$CURL_{n}_PATH")
curl --retry 5 --connect-timeout 10 -Lf "$CURL_{n}_URL" -o "$CURL_{n}_PATH"
""".format(n=n))
def with_curl_artifact_script(self, task_id, artifact_name, out_directory=""):
queue_service = CONFIG.tc_root_url + "/api/queue"
return self \
.with_dependencies(task_id) \
.with_curl_script(
queue_service + "/v1/task/%s/artifacts/public/%s" % (task_id, artifact_name),
os.path.join(out_directory, url_basename(artifact_name)),
)
class MacOsGenericWorkerTask(UnixTaskMixin, GenericWorkerTask): class MacOsGenericWorkerTask(UnixTaskMixin, GenericWorkerTask):
""" """
@ -812,15 +832,10 @@ def expand_dockerfile(dockerfile):
return b"\n".join([expand_dockerfile(path), rest]) return b"\n".join([expand_dockerfile(path), rest])
def git_env(): def assert_truthy(x):
assert CONFIG.git_url assert x
assert CONFIG.git_ref return x
assert CONFIG.git_sha
return {
"GIT_URL": CONFIG.git_url,
"GIT_REF": CONFIG.git_ref,
"GIT_SHA": CONFIG.git_sha,
}
def dict_update_if_truthy(d, **kwargs): def dict_update_if_truthy(d, **kwargs):
for key, value in kwargs.items(): for key, value in kwargs.items():
@ -835,3 +850,20 @@ def deindent(string):
def url_basename(url): def url_basename(url):
return url.rpartition("/")[-1] return url.rpartition("/")[-1]
@contextlib.contextmanager
def make_repo_bundle():
subprocess.check_call(["git", "config", "user.name", "Decision task"])
subprocess.check_call(["git", "config", "user.email", "nobody@mozilla.com"])
tree = subprocess.check_output(["git", "show", CONFIG.git_sha, "--pretty=%T", "--no-patch"])
message = "Shallow version of commit " + CONFIG.git_sha
commit = subprocess.check_output(["git", "commit-tree", tree.strip(), "-m", message])
subprocess.check_call(["git", "update-ref", CONFIG.git_bundle_shallow_ref, commit.strip()])
subprocess.check_call(["git", "show-ref"])
create = ["git", "bundle", "create", "../repo.bundle", CONFIG.git_bundle_shallow_ref]
with subprocess.Popen(create) as p:
yield
exit_code = p.wait()
if exit_code:
sys.exit(exit_code)

View file

@ -1,11 +1,10 @@
brew "autoconf@2.13" # Runtime dependencies
brew "automake"
brew "cmake"
brew "openssl"
brew "pkg-config"
brew "llvm"
brew "yasm"
brew "zlib"
# For sccache brew "gnutls"
brew "openssl@1.1" brew "gstreamer"
brew "gst-plugins-base"
brew "gst-libav"
brew "gst-plugins-bad"
brew "gst-plugins-good"
brew "gst-rtsp-server"
brew "openssl"

View file

@ -0,0 +1,12 @@
# Build dependencies (that are not also runtime dependencies)
brew "autoconf@2.13"
brew "automake"
brew "cmake"
brew "pkg-config"
brew "llvm"
brew "yasm"
brew "zlib"
# For sccache
brew "openssl@1.1"

View file

@ -1,7 +0,0 @@
brew "gnutls"
brew "gstreamer"
brew "gst-plugins-base"
brew "gst-libav"
brew "gst-plugins-bad"
brew "gst-plugins-good"
brew "gst-rtsp-server"

View file

@ -39,7 +39,7 @@ class Index:
stringDate = str stringDate = str
slugId = b"<new id>".lower slugId = b"<new id>".lower
Queue = fromNow = MagicMock() sys.exit = Queue = fromNow = MagicMock()
sys.modules["taskcluster"] = sys.modules[__name__] sys.modules["taskcluster"] = sys.modules[__name__]
sys.dont_write_bytecode = True sys.dont_write_bytecode = True
os.environ.update(**{k: k for k in "TASK_ID TASK_OWNER TASK_SOURCE GIT_URL GIT_SHA".split()}) os.environ.update(**{k: k for k in "TASK_ID TASK_OWNER TASK_SOURCE GIT_URL GIT_SHA".split()})
@ -48,6 +48,7 @@ os.environ["TASKCLUSTER_ROOT_URL"] = "https://community-tc.services.mozilla.com"
os.environ["TASKCLUSTER_PROXY_URL"] = "http://taskcluster" os.environ["TASKCLUSTER_PROXY_URL"] = "http://taskcluster"
os.environ["NEW_AMI_WORKER_TYPE"] = "-" os.environ["NEW_AMI_WORKER_TYPE"] = "-"
import decision_task import decision_task
decision_task.decisionlib.subprocess = MagicMock()
print("\n# Push:") print("\n# Push:")
decision_task.main("github-push") decision_task.main("github-push")