Upload nightly builds to Github Releases

This change extends the `mach upload-nightly` command to
publish the nightly builds for all platforms as GH Release
assets.

The GH releases are made on a separate repository so
that we can persist older nightly builds without having
to accumulate git tags for them.

Some design tradeoffs in this approach are:
1. To allow the 'latest' link from servo.org to remain stable,
the release assets are named 'servo-latest.{ext}' instead of
containing the release tag/date.
2. The release is created as draft and published atomically
when all platforms have been built successfully. This allows
us to link to the 'latest' alias from servo.org while
gauranteeing that it contains builds for all platforms.
The other option here would be to have code in servo.org UI
that uses GH API to find the most recent release with a
successful build for a given platform.
3. The tags in the nightly repo are all based on the same
commit that has no relation to servo code base.

Signed-off-by: Mukilan Thiyagarajan <me@mukilan.in>
This commit is contained in:
Mukilan Thiyagarajan 2023-04-09 10:14:45 +05:30
parent 1f3837dd43
commit f1ba708cf7
6 changed files with 129 additions and 17 deletions

View file

@ -23,6 +23,7 @@ colorama == 0.3.7
# For package uploading
boto3 == 1.17.27
PyGithub == 1.58.1
# Default root CAs on Windows CI do not trust CloudFront certificates,
# connecting to https://static.rust-lang.org would fail:

View file

@ -10,6 +10,8 @@
from __future__ import absolute_import, print_function, unicode_literals
from datetime import datetime
from github import Github
import base64
import hashlib
import io
@ -594,7 +596,11 @@ class PackageCommands(CommandBase):
@CommandArgument('--secret-from-environment',
action='store_true',
help='Retrieve the appropriate secrets from the environment.')
def upload_nightly(self, platform, secret_from_environment):
@CommandArgument('--github-release-id',
default=None,
type=int,
help='The github release to upload the nightly builds.')
def upload_nightly(self, platform, secret_from_environment, github_release_id):
import boto3
def get_s3_secret():
@ -612,7 +618,24 @@ class PackageCommands(CommandBase):
path.basename(package)
)
def upload_to_s3(platform, package, timestamp):
def upload_to_github_release(platform, package, package_hash_fileobj):
if not github_release_id:
return
extension = path.basename(package).partition('.')[2]
g = Github(os.environ['NIGHTLY_REPO_TOKEN'])
nightly_repo = g.get_repo(os.environ['NIGHTLY_REPO'])
release = nightly_repo.get_release(github_release_id)
if '2020' in platform:
asset_name = f'servo-latest-layout-2020.{extension}'
else:
asset_name = f'servo-latest.{extension}'
release.upload_asset(package, name=asset_name)
release.upload_asset_from_memory(
package_hash_fileobj,
package_hash_fileobj.getbuffer().nbytes,
name=f'{asset_name}.sha256')
def upload_to_s3(platform, package, package_hash_fileobj, timestamp):
(aws_access_key, aws_secret_access_key) = get_s3_secret()
s3 = boto3.client(
's3',
@ -635,17 +658,6 @@ class PackageCommands(CommandBase):
extension = path.basename(package).partition('.')[2]
latest_upload_key = '{}/servo-latest.{}'.format(nightly_dir, extension)
# Compute the hash
SHA_BUF_SIZE = 1048576 # read in 1 MiB chunks
sha256_digest = hashlib.sha256()
with open(package, 'rb') as package_file:
while True:
data = package_file.read(SHA_BUF_SIZE)
if not data:
break
sha256_digest.update(data)
package_hash = sha256_digest.hexdigest()
package_hash_fileobj = io.BytesIO(package_hash.encode('utf-8'))
latest_hash_upload_key = f'{latest_upload_key}.sha256'
s3.upload_file(package, BUCKET, package_upload_key)
@ -763,7 +775,21 @@ class PackageCommands(CommandBase):
package
), file=sys.stderr)
return 1
upload_to_s3(platform, package, timestamp)
# Compute the hash
SHA_BUF_SIZE = 1048576 # read in 1 MiB chunks
sha256_digest = hashlib.sha256()
with open(package, 'rb') as package_file:
while True:
data = package_file.read(SHA_BUF_SIZE)
if not data:
break
sha256_digest.update(data)
package_hash = sha256_digest.hexdigest()
package_hash_fileobj = io.BytesIO(package_hash.encode('utf-8'))
upload_to_s3(platform, package, package_hash_fileobj, timestamp)
upload_to_github_release(platform, package, package_hash_fileobj)
if platform == 'maven':
for package in PACKAGES[platform]: