Auto merge of #19524 - asajeffrey:build-perf-urllib, r=jdm

Use urllib rather than httplib2 when downloading build perf data.

<!-- Please describe your changes on the following line: -->

Fixes http://build.servo.org/builders/linux-nightly/builds/534/steps/shell__3/logs/stdio

---
<!-- Thank you for contributing to Servo! Please replace each `[ ]` by `[X]` when the step is complete, and replace `__` with appropriate data: -->
- [X] `./mach build -d` does not report any errors
- [X] `./mach test-tidy` does not report any errors
- [X] These changes do not require tests because this is test infrastructire

<!-- Also, please make sure that "Allow edits from maintainers" checkbox is checked, so that we can help you if you get stuck somewhere along the way.-->

<!-- Pull requests that do not address these steps are welcome, but they will require additional verification as part of the review process. -->

<!-- Reviewable:start -->
---
This change is [<img src="https://reviewable.io/review_button.svg" height="34" align="absmiddle" alt="Reviewable"/>](https://reviewable.io/reviews/servo/servo/19524)
<!-- Reviewable:end -->
This commit is contained in:
bors-servo 2017-12-08 14:30:46 -06:00 committed by GitHub
commit ab7187cb66
2 changed files with 18 additions and 36 deletions

View file

@ -114,7 +114,7 @@ linux-nightly:
- ./mach package --release - ./mach package --release
- ./mach upload-nightly linux - ./mach upload-nightly linux
- ./mach test-perf - ./mach test-perf
- python ./etc/ci/performance/download_buildbot_timings.py --verbose - python3 ./etc/ci/performance/download_buildbot_timings.py --verbose
- aws s3 sync --size-only --acl public-read ./etc/ci/performance/output s3://servo-perf - aws s3 sync --size-only --acl public-read ./etc/ci/performance/output s3://servo-perf
android: android:

View file

@ -7,10 +7,10 @@
import argparse import argparse
import csv import csv
from datetime import datetime, date from datetime import datetime, date
import httplib2
import json import json
from math import floor from math import floor
import os import os
from urllib.request import urlopen, HTTPError
SCRIPT_PATH = os.path.split(__file__)[0] SCRIPT_PATH = os.path.split(__file__)[0]
@ -55,8 +55,6 @@ def main():
help="print every HTTP request") help="print every HTTP request")
args = parser.parse_args() args = parser.parse_args()
http = httplib2.Http()
os.makedirs(args.cache_dir, exist_ok=True) os.makedirs(args.cache_dir, exist_ok=True)
os.makedirs(args.output_dir, exist_ok=True) os.makedirs(args.output_dir, exist_ok=True)
@ -64,29 +62,21 @@ def main():
# Note: this isn't cached # Note: this isn't cached
if args.verbose: if args.verbose:
print("Downloading index {}.".format(args.index_url)) print("Downloading index {}.".format(args.index_url))
(index_headers, index_data) = http.request(args.index_url, "GET", headers={'cache-control': 'no-cache'}) with urlopen(args.index_url) as response:
if args.verbose: index = json.loads(response.read().decode('utf-8'))
print("Response {}.".format(index_headers))
index = json.loads(index_data.decode('utf-8'))
builds = [] builds = []
for builder in index["builders"]: for builder in sorted(index["builders"]):
# The most recent build is at offset -1 # The most recent build is at offset -1
# Fetch it to find out the build number # Fetch it to find out the build number
# Note: this isn't cached # Note: this isn't cached
recent_build_url = args.build_url.format(builder, -1) recent_build_url = args.build_url.format(builder, -1)
if args.verbose: if args.verbose:
print("Downloading recent build {}.".format(recent_build_url)) print("Downloading recent build {}.".format(recent_build_url))
(recent_build_headers, recent_build_data) = http.request( with urlopen(recent_build_url) as response:
recent_build_url, recent_build = json.loads(response.read().decode('utf-8'))
"GET", recent_build_number = recent_build["number"]
headers={'cache-control': 'no-cache'}
)
if args.verbose:
print("Respose {}.".format(recent_build_headers))
recent_build = json.loads(recent_build_data.decode('utf-8'))
recent_build_number = recent_build["number"]
# Download each build, and convert to CSV # Download each build, and convert to CSV
for build_number in range(0, recent_build_number): for build_number in range(0, recent_build_number):
@ -104,25 +94,17 @@ def main():
build_url = args.build_url.format(builder, build_number) build_url = args.build_url.format(builder, build_number)
if args.verbose: if args.verbose:
print("Downloading build {}.".format(build_url)) print("Downloading build {}.".format(build_url))
(build_headers, build_data) = http.request( try:
build_url, with urlopen(build_url) as response:
"GET", build = json.loads(response.read().decode('utf-8'))
headers={'cache-control': 'no=cache'} except HTTPError as e:
) if e.code == 404:
if args.verbose: build = {}
print("Response {}.".format(build_headers)) else:
raise
# Only parse the JSON if we got back a 200 response. # Don't cache current builds.
if build_headers.status == 200: if build.get('currentStep'):
build = json.loads(build_data.decode('utf-8'))
# Don't cache current builds.
if build.get('currentStep'):
continue
elif build_headers.status == 404:
build = {}
else:
continue continue
with open(cache_json, 'w+') as f: with open(cache_json, 'w+') as f: