Auto merge of #26510 - camelid:checksum, r=jdm

Add integrity hash for nightly builds

<!-- Please describe your changes on the following line: -->
Compute a SHA-256 integrity hash for nightly builds on the download.servo.org page.

---
<!-- Thank you for contributing to Servo! Please replace each `[ ]` by `[X]` when the step is complete, and replace `___` with appropriate data: -->
- [ ] `./mach build -d` does not report any errors (it does, but unrelated to the change)
- [X] `./mach test-tidy` does not report any errors
- [X] These changes fix servo/download.servo.org#14 (GitHub issue number if applicable)

<!-- Either: -->
- [ ] There are tests for these changes OR
- [X] These changes do not require tests because they're in the infrastructure

<!-- Also, please make sure that "Allow edits from maintainers" checkbox is checked, so that we can help you if you get stuck somewhere along the way.-->

<!-- Pull requests that do not address these steps are welcome, but they will require additional verification as part of the review process. -->
This commit is contained in:
bors-servo 2020-05-13 21:25:36 -04:00 committed by GitHub
commit 6c506ba260
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23

View file

@ -12,6 +12,7 @@ from __future__ import absolute_import, print_function, unicode_literals
from datetime import datetime
import base64
import hashlib
import io
import json
import os
import os.path as path
@ -630,12 +631,27 @@ class PackageCommands(CommandBase):
extension = path.splitext(path.basename(package))[1]
latest_upload_key = '{}/servo-latest{}'.format(nightly_dir, extension)
# Compute the hash
SHA_BUF_SIZE = 1048576 # read in 1 MiB chunks
sha256_digest = hashlib.sha256()
with open(package, 'rb') as package_file:
while True:
data = package_file.read(SHA_BUF_SIZE)
if not data:
break
sha256_digest.update(data)
package_hash = sha256_digest.hexdigest()
package_hash_fileobj = io.BytesIO(package_hash)
latest_hash_upload_key = '{}/servo-latest{}.sha256'.format(nightly_dir, extension)
s3.upload_file(package, BUCKET, package_upload_key)
copy_source = {
'Bucket': BUCKET,
'Key': package_upload_key,
}
s3.copy(copy_source, BUCKET, latest_upload_key)
s3.upload_fileobj(package_hash_fileobj, BUCKET, latest_hash_upload_key)
def update_maven(directory):
(aws_access_key, aws_secret_access_key) = get_s3_secret()