mirror of
https://github.com/servo/servo.git
synced 2025-07-24 07:40:27 +01:00
Fix nightly upload to GH release logic.
The boto3 S3 client automatically closes the given fileobj after the transfer is complete. This prevents us from reusing the package_hash_fileobj between s3 and github upload methods. This PR changes fixes the issue by creating fresh instances of io.BytesIO within the upload_to_* methods. Signed-off-by: Mukilan Thiyagarajan <me@mukilan.in>
This commit is contained in:
parent
4a9b80382c
commit
76a5a1081b
1 changed files with 9 additions and 5 deletions
|
@ -618,24 +618,28 @@ class PackageCommands(CommandBase):
|
||||||
path.basename(package)
|
path.basename(package)
|
||||||
)
|
)
|
||||||
|
|
||||||
def upload_to_github_release(platform, package, package_hash_fileobj):
|
def upload_to_github_release(platform, package, package_hash):
|
||||||
if not github_release_id:
|
if not github_release_id:
|
||||||
return
|
return
|
||||||
|
|
||||||
extension = path.basename(package).partition('.')[2]
|
extension = path.basename(package).partition('.')[2]
|
||||||
g = Github(os.environ['NIGHTLY_REPO_TOKEN'])
|
g = Github(os.environ['NIGHTLY_REPO_TOKEN'])
|
||||||
nightly_repo = g.get_repo(os.environ['NIGHTLY_REPO'])
|
nightly_repo = g.get_repo(os.environ['NIGHTLY_REPO'])
|
||||||
release = nightly_repo.get_release(github_release_id)
|
release = nightly_repo.get_release(github_release_id)
|
||||||
|
package_hash_fileobj = io.BytesIO(package_hash.encode('utf-8'))
|
||||||
|
|
||||||
if '2020' in platform:
|
if '2020' in platform:
|
||||||
asset_name = f'servo-latest-layout-2020.{extension}'
|
asset_name = f'servo-latest-layout-2020.{extension}'
|
||||||
else:
|
else:
|
||||||
asset_name = f'servo-latest.{extension}'
|
asset_name = f'servo-latest.{extension}'
|
||||||
|
|
||||||
release.upload_asset(package, name=asset_name)
|
release.upload_asset(package, name=asset_name)
|
||||||
release.upload_asset_from_memory(
|
release.upload_asset_from_memory(
|
||||||
package_hash_fileobj,
|
package_hash_fileobj,
|
||||||
package_hash_fileobj.getbuffer().nbytes,
|
package_hash_fileobj.getbuffer().nbytes,
|
||||||
name=f'{asset_name}.sha256')
|
name=f'{asset_name}.sha256')
|
||||||
|
|
||||||
def upload_to_s3(platform, package, package_hash_fileobj, timestamp):
|
def upload_to_s3(platform, package, package_hash, timestamp):
|
||||||
(aws_access_key, aws_secret_access_key) = get_s3_secret()
|
(aws_access_key, aws_secret_access_key) = get_s3_secret()
|
||||||
s3 = boto3.client(
|
s3 = boto3.client(
|
||||||
's3',
|
's3',
|
||||||
|
@ -658,6 +662,7 @@ class PackageCommands(CommandBase):
|
||||||
extension = path.basename(package).partition('.')[2]
|
extension = path.basename(package).partition('.')[2]
|
||||||
latest_upload_key = '{}/servo-latest.{}'.format(nightly_dir, extension)
|
latest_upload_key = '{}/servo-latest.{}'.format(nightly_dir, extension)
|
||||||
|
|
||||||
|
package_hash_fileobj = io.BytesIO(package_hash.encode('utf-8'))
|
||||||
latest_hash_upload_key = f'{latest_upload_key}.sha256'
|
latest_hash_upload_key = f'{latest_upload_key}.sha256'
|
||||||
|
|
||||||
s3.upload_file(package, BUCKET, package_upload_key)
|
s3.upload_file(package, BUCKET, package_upload_key)
|
||||||
|
@ -786,10 +791,9 @@ class PackageCommands(CommandBase):
|
||||||
break
|
break
|
||||||
sha256_digest.update(data)
|
sha256_digest.update(data)
|
||||||
package_hash = sha256_digest.hexdigest()
|
package_hash = sha256_digest.hexdigest()
|
||||||
package_hash_fileobj = io.BytesIO(package_hash.encode('utf-8'))
|
|
||||||
|
|
||||||
upload_to_s3(platform, package, package_hash_fileobj, timestamp)
|
upload_to_s3(platform, package, package_hash, timestamp)
|
||||||
upload_to_github_release(platform, package, package_hash_fileobj)
|
upload_to_github_release(platform, package, package_hash)
|
||||||
|
|
||||||
if platform == 'maven':
|
if platform == 'maven':
|
||||||
for package in PACKAGES[platform]:
|
for package in PACKAGES[platform]:
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue